loo/0000755000176200001440000000000015122444652011050 5ustar liggesusersloo/tests/0000755000176200001440000000000014641333357012216 5ustar liggesusersloo/tests/testthat/0000755000176200001440000000000015122444652014052 5ustar liggesusersloo/tests/testthat/test_print_plot.R0000644000176200001440000001332215064301501017415 0ustar liggesusersset.seed(1414) LLarr <- example_loglik_array() waic1 <- suppressWarnings(waic(LLarr)) loo1 <- suppressWarnings(loo(LLarr)) psis1 <- suppressWarnings(psis(-LLarr)) r_eff_arr <- relative_eff(exp(LLarr)) loo1_r_eff <- suppressWarnings(loo(LLarr, r_eff = r_eff_arr)) # plotting ---------------------------------------------------------------- test_that("plot methods don't error", { expect_silent(plot(loo1, label_points = FALSE)) expect_silent(plot(psis1, label_points = TRUE)) expect_silent(plot(psis1, diagnostic = "n_eff", label_points = FALSE)) loo1$diagnostics$pareto_k[1] <- 10 expect_silent(plot(loo1, label_points = TRUE)) expect_output(print(loo1, plot_k = TRUE)) expect_output(print(psis1, plot_k = TRUE)) }) test_that("plot methods throw appropriate errors/warnings", { expect_error(plot(waic1), regexp = "No Pareto k estimates found") loo1$diagnostics$pareto_k[1:5] <- Inf psis1$diagnostics$pareto_k[1:5] <- Inf expect_warning( plot(loo1), regexp = "estimates are Inf/NA/NaN and not plotted." ) expect_warning( plot(psis1), regexp = "estimates are Inf/NA/NaN and not plotted." ) }) # printing ---------------------------------------------------------------- lldim_msg <- paste0( "Computed from ", prod(dim(LLarr)[1:2]), " by ", dim(LLarr)[3], " log-likelihood matrix" ) lwdim_msg <- paste0( "Computed from ", prod(dim(LLarr)[1:2]), " by ", dim(LLarr)[3], " log-weights matrix" ) test_that("print.waic output is ok", { expect_output(print(waic1), lldim_msg) expect_output( print(waic1), "p_waic estimates greater than 0.4. We recommend trying loo instead." ) }) test_that("print.psis_loo and print.psis output ok", { expect_output(print(psis1), lwdim_msg) expect_output(print(psis1), "Pareto k estimates are good") expect_output(print(loo1), lldim_msg) expect_output(print(loo1), "MCSE and ESS estimates assume independent draws") expect_output(print(loo1_r_eff), "MCSE and ESS estimates assume MCMC draws") expect_output(print(loo1), "Pareto k estimates are good") loo1$diagnostics$pareto_k <- psis1$diagnostics$pareto_k <- runif(32, 0, .49) expect_output(print(loo1), regexp = "Pareto k estimates are good") expect_output(print(psis1), regexp = "Pareto k estimates are good") loo1$diagnostics$pareto_k[1] <- psis1$diagnostics$pareto_k[1] <- 0.71 expect_output(print(loo1), regexp = "Pareto k diagnostic") loo1$diagnostics$pareto_k[1] <- psis1$diagnostics$pareto_k[1] <- 1.1 expect_output(print(loo1), regexp = "Pareto k diagnostic") }) # pareto_k_[ids,values,table] --------------------------------------------- test_that("pareto_k_values works for psis_loo and psis objects, errors for waic", { kpsis <- pareto_k_values(psis1) kloo <- pareto_k_values(loo1) expect_identical(kpsis, kloo) expect_identical(kpsis, psis1$diagnostics$pareto_k) expect_error(pareto_k_values(waic1), "No Pareto k estimates found") }) test_that("pareto_k_influence_values works for psis_loo objects, errors for psis waic", { kloo <- pareto_k_influence_values(loo1) kloo2 <- pareto_k_values(loo1) expect_identical(kloo, kloo2) expect_error( pareto_k_influence_values(psis1), "No Pareto k influence estimates found" ) expect_error( pareto_k_influence_values(waic1), "No Pareto k influence estimates found" ) }) test_that("pareto_k_ids identifies correct observations", { for (j in 1:5) { loo1$diagnostics$pareto_k <- psis1$diagnostics$pareto_k <- runif( 32, .25, 1.25 ) expect_identical( pareto_k_ids(loo1, threshold = 0.5), pareto_k_ids(psis1, threshold = 0.5) ) expect_identical( pareto_k_ids(loo1, threshold = 0.5), which(pareto_k_values(loo1) > 0.5) ) expect_identical( pareto_k_ids(psis1, threshold = 0.7), which(pareto_k_values(psis1) > 0.7) ) } }) test_that("pareto_k_table gives correct output", { threshold <- ps_khat_threshold(dim(psis1)[1]) psis1$diagnostics$pareto_k[1:10] <- runif(10, 0, threshold) psis1$diagnostics$pareto_k[11:20] <- runif(10, threshold + 0.01, 0.99) psis1$diagnostics$pareto_k[21:32] <- runif(12, 1, 10) k <- pareto_k_values(psis1) tab <- pareto_k_table(psis1) expect_output(print(tab), "Pareto k diagnostic values") expect_identical(colnames(tab), c("Count", "Proportion", "Min. n_eff")) expect_equal(sum(tab[, "Count"]), length(k)) expect_equal(sum(tab[, "Proportion"]), 1) expect_equal(sum(k <= threshold), tab[1, 1]) expect_equal(sum(k > threshold & k <= 1), tab[2, 1]) expect_equal(sum(k > 1), tab[3, 1]) # if n_eff is NULL psis1$diagnostics$n_eff <- NULL tab2 <- pareto_k_table(psis1) expect_output(print(tab2), "") expect_equal(unname(tab2[, "Min. n_eff"]), rep(NA_real_, 3)) psis1$diagnostics$pareto_k[1:32] <- 0.4 expect_output( print(pareto_k_table(psis1)), paste0("All Pareto k estimates are good (k < ", round(threshold, 2), ")"), fixed = TRUE ) }) # psis_neff and mcse_loo -------------------------------------------------- test_that("psis_n_eff_values extractor works", { n_eff_psis <- psis1$diagnostics$n_eff expect_type(n_eff_psis, "double") expect_identical(psis_n_eff_values(psis1), n_eff_psis) expect_identical(psis_n_eff_values(psis1), psis_n_eff_values(loo1)) psis1$diagnostics$n_eff <- NULL expect_error(psis_n_eff_values(psis1), "No PSIS ESS estimates found") }) test_that("mcse_loo extractor gives correct value", { mcse <- mcse_loo(loo1) expect_type(mcse, "double") expect_snapshot_value(mcse, style = "serialize") }) test_that("mcse_loo returns NA when it should", { loo1$diagnostics$pareto_k[1] <- 1.5 mcse <- mcse_loo(loo1) expect_equal(mcse, NA) }) test_that("mcse_loo errors if not psis_loo object", { expect_error(mcse_loo(psis1), "psis_loo") }) loo/tests/testthat/test_loo_subsampling_approximations.R0000644000176200001440000004632315064301501023564 0ustar liggesusersoptions(mc.cores = 1) generate_test_elpd_dataset <- function() { N <- 10 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- draws <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) list(fake_posterior = fake_posterior, fake_data = fake_data) } test_elpd_loo_approximation <- function(cores) { set.seed(123) test_data <- generate_test_elpd_dataset() fake_posterior <- test_data$fake_posterior fake_data <- test_data$fake_data llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } # Compute plpd approximation expect_silent( pi_vals <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior, loo_approximation = "plpd", cores = cores ) ) # Compute it manually point <- mean(fake_posterior) llik <- dbinom(fake_data$y, size = fake_data$K, prob = point, log = TRUE) abs_lliks <- abs(llik) man_elpd_loo_approximation <- abs_lliks / sum(abs_lliks) expect_equal( abs(pi_vals) / sum(abs(pi_vals)), man_elpd_loo_approximation, tolerance = 0.00001 ) # Compute lpd approximation expect_silent( pi_vals <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior, loo_approximation = "lpd", cores = cores ) ) # Compute it manually llik <- numeric(10) for (i in seq_along(fake_data$y)) { llik[i] <- loo:::logMeanExp(dbinom( fake_data$y[i], size = fake_data$K, prob = fake_posterior, log = TRUE )) } abs_lliks <- abs(llik) man_approx_loo_variable <- abs_lliks / sum(abs_lliks) expect_equal( abs(pi_vals) / sum(abs(pi_vals)), man_approx_loo_variable, tolerance = 0.00001 ) # Compute waic approximation expect_silent( pi_vals_waic <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior, loo_approximation = "waic", cores = cores ) ) expect_true(all(pi_vals > pi_vals_waic)) expect_true(sum(pi_vals) - sum(pi_vals_waic) < 1) # Compute tis approximation expect_silent( pi_vals_tis <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior, loo_approximation = "tis", loo_approximation_draws = 100, cores = cores ) ) expect_true(all(pi_vals > pi_vals_tis)) expect_true(sum(pi_vals) - sum(pi_vals_tis) < 1) } test_that("elpd_loo_approximation works as expected", { test_elpd_loo_approximation(1) }) test_that("elpd_loo_approximation with multiple cores", { test_elpd_loo_approximation(2) }) test_that("Test loo_approximation_draws", { set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- draws <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( res1 <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior, loo_approximation = "waic", loo_approximation_draws = NULL, cores = 1 ) ) expect_silent( res2 <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior, loo_approximation = "waic", loo_approximation_draws = 10, cores = 1 ) ) expect_silent( res3 <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior[1:10 * 100, ], loo_approximation = "waic", loo_approximation_draws = NULL, cores = 1 ) ) expect_silent( res4 <- loo:::elpd_loo_approximation( .llfun = llfun_test, data = fake_data, draws = fake_posterior[1:10 * 100, , drop = FALSE], loo_approximation = "waic", loo_approximation_draws = NULL, cores = 1 ) ) expect_failure(expect_equal(res1, res3)) expect_equal(res2, res3) expect_silent( loo_ss1 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss2 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "plpd", loo_approximation_draws = 10, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss3 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "plpd", loo_approximation_draws = 31, r_eff = rep(1, nrow(fake_data)) ) ) expect_error( loo_ss4 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "plpd", loo_approximation_draws = 3100, r_eff = rep(1, nrow(fake_data)) ) ) expect_equal( names(loo_ss1$loo_subsampling), c( "elpd_loo_approx", "loo_approximation", "loo_approximation_draws", "estimator", ".llfun", ".llgrad", ".llhess", "data_dim", "ndraws" ) ) expect_null(loo_ss1$loo_subsampling$loo_approximation_draws) expect_equal(loo_ss2$loo_subsampling$loo_approximation_draws, 10L) expect_equal(loo_ss3$loo_subsampling$loo_approximation_draws, 31L) }) test_that("waic using delta method and gradient", { if (FALSE) { # Code to generate testdata - saved and loaded to avoid dependency of mvtnorm set.seed(123) N <- 400 beta <- c(1, 2) X_full <- matrix(rep(1, N), ncol = 1) X_full <- cbind(X_full, runif(N)) S <- 1000 y_full <- rnorm(n = N, mean = X_full %*% beta, sd = 1) X <- X_full y <- y_full Lambda_0 <- diag(length(beta)) mu_0 <- c(0, 0) b_hat <- solve(t(X) %*% X) %*% t(X) %*% y mu_n <- solve(t(X) %*% X) %*% (t(X) %*% X %*% b_hat + Lambda_0 %*% mu_0) Lambda_n <- t(X) %*% X + Lambda_0 # Uncomment row below when running. Commented out to remove CHECK warnings # fake_posterior <- mvtnorm::rmvnorm(n = S, mean = mu_n, sigma = solve(Lambda_n)) colnames(fake_posterior) <- c("a", "b") fake_data <- data.frame(y, X) save( fake_posterior, fake_data, file = test_path("data-for-tests/normal_reg_waic_test_example.rda") ) } else { load(file = test_path("data-for-tests/normal_reg_waic_test_example.rda")) } .llfun <- function(data_i, draws) { # data_i: ith row of fdata (fake_data[i,, drop=FALSE]) # draws: entire fake_posterior matrix dnorm( data_i$y, mean = draws[, c("a", "b")] %*% t(as.matrix(data_i[, c("X1", "X2")])), sd = 1, log = TRUE ) } .llgrad <- function(data_i, draws) { x_i <- data_i[, "X2"] gr <- cbind( data_i$y - draws[, "a"] - draws[, "b"] * x_i, (data_i$y - draws[, "a"] - draws[, "b"] * x_i) * x_i ) colnames(gr) <- c("a", "b") gr } fake_posterior <- cbind(fake_posterior, runif(nrow(fake_posterior))) expect_silent( approx_loo_waic <- loo:::elpd_loo_approximation( .llfun, data = fake_data, draws = fake_posterior, cores = 1, loo_approximation = "waic" ) ) expect_silent( approx_loo_waic_delta <- loo:::elpd_loo_approximation( .llfun, data = fake_data, draws = fake_posterior, cores = 1, loo_approximation = "waic_grad", .llgrad = .llgrad ) ) expect_silent( approx_loo_waic_delta_diag <- loo:::elpd_loo_approximation( .llfun, data = fake_data, draws = fake_posterior, cores = 1, loo_approximation = "waic_grad_marginal", .llgrad = .llgrad ) ) # Test that the approaches should not deviate too much diff_waic_delta <- mean(approx_loo_waic - approx_loo_waic_delta) diff_waic_delta_diag <- mean(approx_loo_waic - approx_loo_waic_delta_diag) expect_equal(approx_loo_waic, approx_loo_waic_delta_diag, tolerance = 0.1) expect_equal(approx_loo_waic, approx_loo_waic_delta, tolerance = 0.01) # Test usage in subsampling_loo expect_silent( loo_ss_waic <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic", observations = 50, llgrad = .llgrad ) ) expect_silent( loo_ss_waic_delta <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic_grad", observations = 50, llgrad = .llgrad ) ) expect_silent( loo_ss_waic_delta_marginal <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic_grad_marginal", observations = 50, llgrad = .llgrad ) ) expect_silent( loo_ss_plpd <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "plpd", observations = 50, llgrad = .llgrad ) ) expect_error( loo_ss_waic_delta <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic_grad", observations = 50 ) ) }) test_that("waic using delta 2nd order method", { if (FALSE) { # Code to generate testdata - saved and loaded to avoid dependency of MCMCPack set.seed(123) N <- 100 beta <- c(1, 2) X_full <- matrix(rep(1, N), ncol = 1) X_full <- cbind(X_full, runif(N)) S <- 1000 y_full <- rnorm(n = N, mean = X_full %*% beta, sd = 0.5) X <- X_full y <- y_full # Uncomment row below when running. Commented out to remove CHECK warnings # fake_posterior <- MCMCpack::MCMCregress(y~x, data = data.frame(y = y,x=X[,2]), thin = 10, mcmc = 10000) # Because Im lazy fake_posterior <- as.matrix(fake_posterior) fake_posterior[, "sigma2"] <- sqrt(fake_posterior[, "sigma2"]) colnames(fake_posterior) <- c("a", "b", "sigma") fake_data <- data.frame(y, X) save( fake_posterior, fake_data, file = test_path("data-for-tests/normal_reg_waic_test_example2.rda"), compression_level = 9 ) } else { load(file = test_path("data-for-tests/normal_reg_waic_test_example2.rda")) } .llfun <- function(data_i, draws) { # data_i: ith row of fdata (data_i <- fake_data[i,, drop=FALSE]) # draws: entire fake_posterior matrix dnorm( data_i$y, mean = draws[, c("a", "b")] %*% t(as.matrix(data_i[, c("X1", "X2")])), sd = draws[, c("sigma")], log = TRUE ) } .llgrad <- function(data_i, draws) { sigma <- draws[, "sigma"] sigma2 <- sigma^2 b <- draws[, "b"] a <- draws[, "a"] x_i <- unlist(data_i[, c("X1", "X2")]) e <- (data_i$y - draws[, "a"] * x_i[1] - draws[, "b"] * x_i[2]) gr <- cbind( e * x_i[1] / sigma2, e * x_i[2] / sigma2, -1 / sigma + e^2 / (sigma2 * sigma) ) colnames(gr) <- c("a", "b", "sigma") gr } .llhess <- function(data_i, draws) { hess_array <- array( 0, dim = c(ncol(draws), ncol(draws), nrow(draws)), dimnames = list(colnames(draws), colnames(draws), NULL) ) sigma <- draws[, "sigma"] sigma2 <- sigma^2 sigma3 <- sigma2 * sigma b <- draws[, "b"] a <- draws[, "a"] x_i <- unlist(data_i[, c("X1", "X2")]) e <- (data_i$y - draws[, "a"] * x_i[1] - draws[, "b"] * x_i[2]) hess_array[1, 1, ] <- -x_i[1]^2 / sigma2 hess_array[1, 2, ] <- hess_array[2, 1, ] <- -x_i[1] * x_i[2] / sigma2 hess_array[2, 2, ] <- -x_i[2]^2 / sigma2 hess_array[3, 1, ] <- hess_array[1, 3, ] <- -2 * x_i[1] * e / sigma3 hess_array[3, 2, ] <- hess_array[2, 3, ] <- -2 * x_i[2] * e / sigma3 hess_array[3, 3, ] <- 1 / sigma2 - 3 * e^2 / (sigma2^2) hess_array } #data <- fake_data fake_posterior <- cbind(fake_posterior, runif(nrow(fake_posterior))) #draws <- fake_posterior <- cbind(fake_posterior, runif(nrow(fake_posterior))) expect_silent( approx_loo_waic <- loo:::elpd_loo_approximation( .llfun, data = fake_data, draws = fake_posterior, cores = 1, loo_approximation = "waic" ) ) expect_silent( approx_loo_waic_delta <- loo:::elpd_loo_approximation( .llfun, data = fake_data, draws = fake_posterior, cores = 1, loo_approximation = "waic_grad", .llgrad = .llgrad ) ) expect_silent( approx_loo_waic_delta2 <- loo:::elpd_loo_approximation( .llfun, data = fake_data, draws = fake_posterior, cores = 1, loo_approximation = "waic_hess", .llgrad = .llgrad, .llhess = .llhess ) ) # Test that the approaches should not deviate too much expect_equal(approx_loo_waic, approx_loo_waic_delta2, tolerance = 0.01) expect_equal(approx_loo_waic, approx_loo_waic_delta, tolerance = 0.01) expect_silent( test_loo_ss_waic <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic", observations = 50, llgrad = .llgrad ) ) expect_error( test_loo_ss_delta2 <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic_hess", observations = 50, llgrad = .llgrad ) ) expect_silent( test_loo_ss_delta2 <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic_hess", observations = 50, llgrad = .llgrad, llhess = .llhess ) ) expect_silent( test_loo_ss_delta <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "waic_grad", observations = 50, llgrad = .llgrad ) ) expect_silent( test_loo_ss_point <- loo_subsample( x = .llfun, data = fake_data, draws = fake_posterior, cores = 1, r_eff = rep(1, nrow(fake_data)), loo_approximation = "plpd", observations = 50, llgrad = .llgrad ) ) }) test_that("whhest works as expected", { N <- 100 m <- 10 z <- rep(1 / N, m) y <- 1:10 m_i <- rep(1, m) expect_silent(whe <- loo:::whhest(z = z, m_i = m_i, y = y, N = N)) expect_equal(whe$y_hat_ppz, 550) man_var <- (sum((whe$y_hat_ppz - y / z)^2) / (m - 1)) / m expect_equal(whe$v_hat_y_ppz, man_var) z <- 1:10 / (sum(1:10) * 10) expect_silent(whe <- loo:::whhest(z = z, m_i = m_i, y = y, N = N)) expect_equal(whe$y_hat_ppz, 550) expect_equal(whe$v_hat_y_ppz, 0) # School book example # https://newonlinecourses.science.psu.edu/stat506/node/15/ z <- c(650 / 15650, 2840 / 15650, 3200 / 15650) y <- c(420, 1785, 2198) m_i <- c(1, 1, 1) N <- 10 expect_silent(whe <- loo:::whhest(z = z, m_i = m_i, y = y, N = N)) expect_equal(round(whe$y_hat_ppz, 2), 10232.75, tolerance = 0) expect_equal(whe$v_hat_y_ppz, 73125.74, tolerance = 0.01) # Double check that it is rounding error man_var_round <- (sum((round(y / z, 2) - 10232.75)^2)) * (1 / 2) * (1 / 3) expect_equal(man_var_round, 73125.74, tolerance = 0.001) man_var_exact <- (sum((y / z - 10232.75)^2)) * (1 / 2) * (1 / 3) expect_equal(whe$v_hat_y_ppz, man_var_exact, tolerance = 0.001) # Add test for variance estimation N <- 100 m <- 10 y <- rep(1:10, 1) true_var <- var(rep(y, 10)) * (99) z <- rep(1 / N, m) m_i <- rep(100000, m) expect_silent(whe <- loo:::whhest(z = z, m_i = m_i, y = y, N = N)) expect_equal(true_var, whe$hat_v_y_ppz, tolerance = 0.01) # Add tests for m_i N <- 100 y <- rep(1:10, 2) m <- length(y) z <- rep(1 / N, m) m_i <- rep(1, m) expect_silent(whe1 <- loo:::whhest(z = z, m_i = m_i, y = y, N = N)) y <- rep(1:10) m <- length(y) z <- rep(1 / N, m) m_i <- rep(2, m) expect_silent(whe2 <- loo:::whhest(z = z, m_i = m_i, y = y, N = N)) expect_equal(whe1$y_hat_ppz, whe2$y_hat_ppz) expect_equal(whe1$v_hat_y_ppz, whe2$v_hat_y_ppz) expect_equal(whe1$hat_v_y_ppz, whe1$hat_v_y_ppz) }) test_that("srs_diff_est works as expected", { set.seed(1234) N <- 1000 y_true <- 1:N sigma_hat_true <- sqrt(N * sum((y_true - mean(y_true))^2) / length(y_true)) y_approx <- rnorm(N, y_true, 0.1) m <- 100 sigma_hat <- y_hat <- se_y_hat <- numeric(10000) for (i in 1:10000) { y_idx <- sample(1:N, size = m) y <- y_true[y_idx] res <- loo:::srs_diff_est(y_approx, y, y_idx) y_hat[i] <- res$y_hat se_y_hat[i] <- sqrt(res$v_y_hat) sigma_hat[i] <- sqrt(res$hat_v_y) } expect_equal(mean(y_hat), sum(y_true), tolerance = 0.1) in_ki <- y_hat + 2 * se_y_hat > sum(y_true) & y_hat - 2 * se_y_hat < sum(y_true) expect_equal(mean(in_ki), 0.95, tolerance = 0.01) # Should be unbiased expect_equal(mean(sigma_hat), sigma_hat_true, tolerance = 0.1) m <- N y_idx <- sample(1:N, size = m) y <- y_true[y_idx] res <- loo:::srs_diff_est(y_approx, y, y_idx) expect_equal(res$y_hat, 500500, tolerance = 0.0001) expect_equal(res$v_y_hat, 0, tolerance = 0.0001) expect_equal(sqrt(res$hat_v_y), sigma_hat_true, tolerance = 0.1) }) test_that("srs_est works as expected", { set.seed(1234) # Cochran 1976 example Table 2.2 y <- c( rep(42, 23), rep(41, 4), 36, 32, 29, 27, 27, 23, 19, 16, 16, 15, 15, 14, 11, 10, 9, 7, 6, 6, 6, 5, 5, 4, 3 ) expect_equal(sum(y), 1471) approx_loo <- rep(0L, 676) expect_equal(sum(y^2), 54497) res <- loo:::srs_est(y = y, approx_loo) expect_equal(res$y_hat, 19888, tolerance = 0.0001) expect_equal(res$v_y_hat, 676^2 * 229 * (1 - 0.074) / 50, tolerance = 0.0001) expect_equal(res$hat_v_y, 676 * var(y), tolerance = 0.0001) # Simulation example set.seed(1234) N <- 1000 y_true <- 1:N sigma_hat_true <- sqrt(N * sum((y_true - mean(y_true))^2) / length(y_true)) m <- 100 y_hat <- se_y_hat <- sigma_hat <- numeric(10000) for (i in 1:10000) { y_idx <- sample(1:N, size = m) y <- y_true[y_idx] res <- loo:::srs_est(y = y, y_approx = y_true) y_hat[i] <- res$y_hat se_y_hat[i] <- sqrt(res$v_y_hat) sigma_hat[i] <- sqrt(res$hat_v_y) } expect_equal(mean(y_hat), sum(y_true), tolerance = 0.1) in_ki <- y_hat + 2 * se_y_hat > sum(y_true) & y_hat - 2 * se_y_hat < sum(y_true) expect_equal(mean(in_ki), 0.95, tolerance = 0.01) # Should be unbiased expect_equal(mean(sigma_hat), sigma_hat_true, tolerance = 0.1) m <- N y_idx <- sample(1:N, size = m) y <- y_true[y_idx] res <- loo:::srs_est(y, y_true) expect_equal(res$y_hat, 500500, tolerance = 0.0001) expect_equal(res$v_y_hat, 0, tolerance = 0.0001) }) loo/tests/testthat/test_psislw.R0000644000176200001440000000377615064301501016560 0ustar liggesusersSW <- suppressWarnings set.seed(123) x <- matrix(rnorm(5000), 100, 50) expect_deprecated <- function(object) { testthat::expect_warning(object, "deprecated", ignore.case = TRUE) } test_that("psislw throws deprecation warning", { expect_deprecated(psislw(x[, 1])) }) test_that("psislw handles special cases, throws appropriate errors/warnings", { expect_snapshot(psis <- psislw(x[, 1], wcp = 0.01)) expect_true(is.infinite(psis$pareto_k)) expect_error( expect_deprecated(psislw(wcp = 0.2)), regexp = "'lw' or 'llfun' and 'llargs' must be specified" ) }) test_that("psislw returns expected results", { psis <- SW(psislw(x[, 1])) lw <- psis$lw_smooth expect_equal(length(psis), 2L) expect_equal(nrow(lw), nrow(x)) expect_equal(lw[1], -5.6655489517740527106) expect_equal(lw[50], -5.188442371693668953) expect_equal(range(lw), c(-7.4142421808626526314, -2.6902215137943321643)) expect_equal(psis$pareto_k, 0.17364505906017813075) }) test_that("psislw function and matrix methods return same result", { set.seed(024) # fake data and posterior draws N <- 50 K <- 10 S <- 100 a0 <- 3 b0 <- 2 p <- rbeta(1, a0, b0) y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) draws <- rbeta(S, a, b) data <- data.frame(y, K) llfun <- function(i, data, draws) { dbinom(data$y, size = data$K, prob = draws, log = TRUE) } psislw_with_fn <- SW(psislw(llfun = llfun, llargs = nlist(data, draws, N, S))) # Check that we get same answer if using log-likelihood matrix ll <- sapply(1:N, function(i) llfun(i, data[i, , drop = FALSE], draws)) psislw_with_mat <- SW(psislw(-ll)) expect_equal(psislw_with_fn, psislw_with_mat) }) test_that("psislw_warnings helper works properly", { k <- c(0, 0.1, 0.55, 0.75) expect_silent(psislw_warnings(k[1:2])) expect_warning( psislw_warnings(k[1:3]), "Some Pareto k diagnostic values are slightly high" ) expect_warning( psislw_warnings(k), "Some Pareto k diagnostic values are too high" ) }) loo/tests/testthat/test_loo_subsampling.R0000644000176200001440000010173415064301501020425 0ustar liggesusersoptions(mc.cores = 1) test_that("overall loo_subampling works as expected (compared with loo) for diff_est", { set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { # each time called internally within loo the arguments will be equal to: # data_i: ith row of fdata (fake_data[i,, drop=FALSE]) # draws: entire fake_posterior matrix dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( true_loo <- loo( llfun_test, draws = fake_posterior, data = fake_data, r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(true_loo, "psis_loo") expect_silent( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 500, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss, "psis_loo_ss") # Check consistency expect_equal( loo_ss$pointwise[, "elpd_loo_approx"], loo_ss$loo_subsampling$elpd_loo_approx[loo_ss$pointwise[, "idx"]], ignore_attr = TRUE ) # Expect values z <- 2 expect_lte( loo_ss$estimates["elpd_loo", "Estimate"] - z * loo_ss$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ss$estimates["elpd_loo", "Estimate"] + z * loo_ss$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_lte( loo_ss$estimates["p_loo", "Estimate"] - z * loo_ss$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_gte( loo_ss$estimates["p_loo", "Estimate"] + z * loo_ss$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_lte( loo_ss$estimates["looic", "Estimate"] - z * loo_ss$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_gte( loo_ss$estimates["looic", "Estimate"] + z * loo_ss$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_failure(expect_equal( true_loo$estimates["elpd_loo", "Estimate"], loo_ss$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["p_loo", "Estimate"], loo_ss$estimates["p_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["looic", "Estimate"], loo_ss$estimates["looic", "Estimate"], tolerance = 0.00000001 )) # Test that observations works as expected expect_message( loo_ss2 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = obs_idx(loo_ss), loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(loo_ss2$estimates, loo_ss$estimates, tolerance = 0.00000001) expect_silent( loo_ss2 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = loo_ss, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(loo_ss2$estimates, loo_ss$estimates, tolerance = 0.00000001) # Test lpd expect_silent( loo_ss_lpd <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 500, loo_approximation = "lpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss_lpd, "psis_loo_ss") z <- 2 expect_lte( loo_ss_lpd$estimates["elpd_loo", "Estimate"] - z * loo_ss_lpd$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ss_lpd$estimates["elpd_loo", "Estimate"] + z * loo_ss_lpd$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_lte( loo_ss_lpd$estimates["p_loo", "Estimate"] - z * loo_ss_lpd$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_gte( loo_ss_lpd$estimates["p_loo", "Estimate"] + z * loo_ss_lpd$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_lte( loo_ss_lpd$estimates["looic", "Estimate"] - z * loo_ss_lpd$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_gte( loo_ss_lpd$estimates["looic", "Estimate"] + z * loo_ss_lpd$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_failure(expect_equal( true_loo$estimates["elpd_loo", "Estimate"], loo_ss_lpd$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["p_loo", "Estimate"], loo_ss_lpd$estimates["p_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["looic", "Estimate"], loo_ss_lpd$estimates["looic", "Estimate"], tolerance = 0.00000001 )) expect_silent( loo_ss_lpd10 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 500, loo_approximation = "lpd", loo_approximation_draws = 10, r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss_lpd10, "psis_loo_ss") z <- 2 expect_lte( loo_ss_lpd10$estimates["elpd_loo", "Estimate"] - z * loo_ss_lpd10$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ss_lpd10$estimates["elpd_loo", "Estimate"] + z * loo_ss_lpd10$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_lte( loo_ss_lpd10$estimates["p_loo", "Estimate"] - z * loo_ss_lpd10$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_gte( loo_ss_lpd10$estimates["p_loo", "Estimate"] + z * loo_ss_lpd10$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_lte( loo_ss_lpd10$estimates["looic", "Estimate"] - z * loo_ss_lpd10$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_gte( loo_ss_lpd10$estimates["looic", "Estimate"] + z * loo_ss_lpd10$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_failure(expect_equal( true_loo$estimates["elpd_loo", "Estimate"], loo_ss_lpd10$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["p_loo", "Estimate"], loo_ss_lpd10$estimates["p_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["looic", "Estimate"], loo_ss_lpd10$estimates["looic", "Estimate"], tolerance = 0.00000001 )) # Test conversion of objects expect_silent(true_loo_2 <- loo:::as.psis_loo.psis_loo(true_loo)) expect_silent(true_loo_ss <- loo:::as.psis_loo_ss.psis_loo(true_loo)) expect_s3_class(true_loo_ss, "psis_loo_ss") expect_silent(true_loo_conv <- loo:::as.psis_loo.psis_loo_ss(true_loo_ss)) expect_failure(expect_s3_class(true_loo_conv, "psis_loo_ss")) expect_equal(true_loo_conv, true_loo) expect_error(loo:::as.psis_loo.psis_loo_ss(loo_ss)) }) test_that("loo with subsampling of all observations works as ordinary loo.", { set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( true_loo <- loo( llfun_test, draws = fake_posterior, data = fake_data, r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(true_loo, "psis_loo") expect_silent( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 1000, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss, "psis_loo_ss") expect_error( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 1001, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_equal( true_loo$estimates["elpd_loo", "Estimate"], loo_ss$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 ) expect_equal( true_loo$estimates["p_loo", "Estimate"], loo_ss$estimates["p_loo", "Estimate"], tolerance = 0.00000001 ) expect_equal( true_loo$estimates["looic", "Estimate"], loo_ss$estimates["looic", "Estimate"], tolerance = 0.00000001 ) expect_equal(dim(true_loo), dim(loo_ss)) expect_equal(true_loo$diagnostics, loo_ss$diagnostics) expect_equal(max(loo_ss$pointwise[, "m_i"]), 1) }) test_that("overall loo_subsample works with diff_srs as expected (compared with loo)", { set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( true_loo <- loo( x = llfun_test, draws = fake_posterior, data = fake_data, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 200, loo_approximation = "plpd", estimator = "diff_srs", r_eff = rep(1, nrow(fake_data)) ) ) expect_equal( true_loo$estimates[1, 1], loo_ss$estimates[1, 1], tolerance = 0.1 ) }) test_that("Test the srs estimator with 'none' approximation", { set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( true_loo <- loo( llfun_test, draws = fake_posterior, data = fake_data, r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(true_loo, "psis_loo") expect_silent( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 200, loo_approximation = "none", estimator = "srs", r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss, "psis_loo_ss") expect_error( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 1100, loo_approximation = "none", estimator = "srs", r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(length(obs_idx(loo_ss)), nobs(loo_ss)) # Check consistency expect_equal( loo_ss$pointwise[, "elpd_loo_approx"], loo_ss$loo_subsampling$elpd_loo_approx[loo_ss$pointwise[, "idx"]], ignore_attr = TRUE ) # Expect values z <- 2 expect_lte( loo_ss$estimates["elpd_loo", "Estimate"] - z * loo_ss$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ss$estimates["elpd_loo", "Estimate"] + z * loo_ss$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_lte( loo_ss$estimates["p_loo", "Estimate"] - z * loo_ss$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_gte( loo_ss$estimates["p_loo", "Estimate"] + z * loo_ss$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_lte( loo_ss$estimates["looic", "Estimate"] - z * loo_ss$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_gte( loo_ss$estimates["looic", "Estimate"] + z * loo_ss$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_failure(expect_equal( true_loo$estimates["elpd_loo", "Estimate"], loo_ss$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["p_loo", "Estimate"], loo_ss$estimates["p_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["looic", "Estimate"], loo_ss$estimates["looic", "Estimate"], tolerance = 0.00000001 )) }) test_that("Test the Hansen-Hurwitz estimator", { set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( true_loo <- loo( llfun_test, draws = fake_posterior, data = fake_data, r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(true_loo, "psis_loo") expect_silent( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 300, loo_approximation = "plpd", estimator = "hh_pps", r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss, "psis_loo_ss") expect_silent( loo_ss_max <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 1100, loo_approximation = "plpd", estimator = "hh_pps", r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss_max, "psis_loo_ss") expect_silent( loo_ss_max2 <- update( loo_ss, draws = fake_posterior, data = fake_data, observations = 1100, r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(nobs(loo_ss_max2), 1100) expect_gt(max(loo_ss_max2$pointwise[, "m_i"]), 1) expect_error( loo_ss_max2 <- update( loo_ss_max2, draws = fake_posterior, data = fake_data, observations = 300, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_max3 <- update( loo_ss, draws = fake_posterior, data = fake_data, observations = 1500, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss2 <- update( loo_ss, draws = fake_posterior, data = fake_data, observations = loo_ss, r_eff = rep(1, nrow(fake_data)) ) ) expect_error( loo_ss2 <- update( loo_ss, draws = fake_posterior, data = fake_data, observations = loo_ss, loo_approximation = "lpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(loo_ss$estimates, loo_ss2$estimates) expect_equal(length(obs_idx(loo_ss_max)), length(obs_idx(loo_ss_max2))) expect_equal(length(obs_idx(loo_ss_max)), nobs(loo_ss_max)) # Check consistency expect_equal( loo_ss$pointwise[, "elpd_loo_approx"], loo_ss$loo_subsampling$elpd_loo_approx[loo_ss$pointwise[, "idx"]], ignore_attr = TRUE ) # Check consistency expect_equal( loo_ss_max$pointwise[, "elpd_loo_approx"], loo_ss_max$loo_subsampling$elpd_loo_approx[loo_ss_max$pointwise[, "idx"]], ignore_attr = TRUE ) # Expect values z <- 2 expect_lte( loo_ss$estimates["elpd_loo", "Estimate"] - z * loo_ss$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ss$estimates["elpd_loo", "Estimate"] + z * loo_ss$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_lte( loo_ss$estimates["p_loo", "Estimate"] - z * loo_ss$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_gte( loo_ss$estimates["p_loo", "Estimate"] + z * loo_ss$estimates["p_loo", "subsampling SE"], true_loo$estimates["p_loo", "Estimate"] ) expect_lte( loo_ss$estimates["looic", "Estimate"] - z * loo_ss$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_gte( loo_ss$estimates["looic", "Estimate"] + z * loo_ss$estimates["looic", "subsampling SE"], true_loo$estimates["looic", "Estimate"] ) expect_failure(expect_equal( true_loo$estimates["elpd_loo", "Estimate"], loo_ss$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["p_loo", "Estimate"], loo_ss$estimates["p_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( true_loo$estimates["looic", "Estimate"], loo_ss$estimates["looic", "Estimate"], tolerance = 0.00000001 )) expect_lte( loo_ss_max$estimates["elpd_loo", "Estimate"] - z * loo_ss_max$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ss_max$estimates["elpd_loo", "Estimate"] + z * loo_ss_max$estimates["elpd_loo", "subsampling SE"], true_loo$estimates["elpd_loo", "Estimate"] ) }) test_that("update.psis_loo_ss works as expected (compared with loo)", { set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( true_loo <- loo( llfun_test, draws = fake_posterior, data = fake_data, r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(true_loo, "psis_loo") expect_silent( loo_ss <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 500, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_s3_class(loo_ss, "psis_loo_ss") # Check error when draws and data dimensions differ expect_error( loo_ss2 <- update( object = loo_ss, draws = cbind(fake_posterior, 1), data = fake_data, observations = 600, r_eff = rep(1, nrow(fake_data)) ) ) expect_error( loo_ss2 <- update( object = loo_ss, draws = fake_posterior, data = fake_data[-1, ], observations = 600, r_eff = rep(1, nrow(fake_data)) ) ) # Add tests for adding observations expect_silent( loo_ss2 <- update( object = loo_ss, draws = fake_posterior, data = fake_data, observations = 600, r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(dim(loo_ss2)[2] - dim(loo_ss)[2], expected = 100) expect_equal(dim(loo_ss2)[2], expected = dim(loo_ss2$pointwise)[1]) expect_length(loo_ss2$diagnostics$pareto_k, 600) expect_length(loo_ss2$diagnostics$n_eff, 600) for (i in 1:nrow(loo_ss2$estimates)) { expect_lt( loo_ss2$estimates[i, "subsampling SE"], loo_ss$estimates[i, "subsampling SE"] ) } expect_silent( loo_ss2b <- update( object = loo_ss, draws = fake_posterior, data = fake_data ) ) expect_equal(loo_ss2b$estimates, loo_ss$estimates) expect_equal(loo_ss2b$pointwise, loo_ss$pointwise) expect_equal(loo_ss2b$diagnostics$pareto_k, loo_ss$diagnostics$pareto_k) expect_equal(loo_ss2b$diagnostics$n_eff, loo_ss$diagnostics$n_eff) expect_silent( loo_ss3 <- update( object = loo_ss2, draws = fake_posterior, data = fake_data, observations = loo_ss ) ) expect_equal(loo_ss3$estimates, loo_ss$estimates) expect_equal(loo_ss3$pointwise, loo_ss$pointwise) expect_equal(loo_ss3$diagnostics$pareto_k, loo_ss$diagnostics$pareto_k) expect_equal(loo_ss3$diagnostics$n_eff, loo_ss$diagnostics$n_eff) expect_silent( loo_ss4 <- update( object = loo_ss, draws = fake_posterior, data = fake_data, observations = 1000, r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(loo_ss4$estimates[, 1], true_loo$estimates[, 1]) expect_equal( loo_ss4$estimates[, 2], true_loo$estimates[, 2], tolerance = 0.001 ) expect_silent( loo_ss5 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 1000, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) ss4_order <- order(loo_ss4$pointwise[, "idx"]) expect_equal( loo_ss4$pointwise[ss4_order, c(1, 3, 4)], loo_ss5$pointwise[, c(1, 3, 4)] ) expect_equal( loo_ss4$diagnostics$pareto_k[ss4_order], loo_ss5$diagnostics$pareto_k ) expect_equal(loo_ss4$diagnostics$n_eff[ss4_order], loo_ss5$diagnostics$n_eff) expect_equal( loo_ss4$pointwise[ss4_order, c(1, 3, 4)], true_loo$pointwise[, c(1, 3, 4)] ) expect_equal( loo_ss4$diagnostics$pareto_k[ss4_order], true_loo$diagnostics$pareto_k ) expect_equal(loo_ss4$diagnostics$n_eff[ss4_order], true_loo$diagnostics$n_eff) expect_error( loo_ss_min <- update( object = loo_ss, draws = fake_posterior, data = fake_data, observations = 50, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent(true_loo_ss <- loo:::as.psis_loo_ss.psis_loo(true_loo)) expect_silent( loo_ss_subset0 <- update( true_loo_ss, observations = loo_ss, r_eff = rep(1, nrow(fake_data)) ) ) expect_true(identical(obs_idx(loo_ss_subset0), obs_idx(loo_ss))) expect_silent( loo_ss_subset1 <- update( object = loo_ss, observations = loo_ss, r_eff = rep(1, nrow(fake_data)) ) ) expect_message( loo_ss_subset2 <- update( object = loo_ss, observations = obs_idx(loo_ss)[1:10], r_eff = rep(1, nrow(fake_data)) ) ) expect_equal(nobs(loo_ss_subset2), 10) expect_silent(true_loo_ss <- loo:::as.psis_loo_ss.psis_loo(true_loo)) set.seed(4711) expect_silent( loo_ss2 <- update( object = loo_ss, draws = fake_posterior, data = fake_data, observations = 600, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss2_subset0 <- update( object = true_loo_ss, observations = loo_ss2, r_eff = rep(1, nrow(fake_data)) ) ) expect_true(setequal(obs_idx(loo_ss2), obs_idx(loo_ss2_subset0))) expect_true(identical(obs_idx(loo_ss2), obs_idx(loo_ss2_subset0))) expect_true(identical(loo_ss2$diagnostic, loo_ss2_subset0$diagnostic)) # Add tests for changing approx variable expect_silent( loo_ss_lpd <- update( object = loo_ss, draws = fake_posterior, data = fake_data, loo_approximation = "lpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_failure(expect_equal( loo_ss_lpd$loo_subsampling$elpd_loo_approx, loo_ss$loo_subsampling$elpd_loo_approx )) expect_equal(dim(loo_ss_lpd)[2], dim(loo_ss)[2]) expect_equal(dim(loo_ss_lpd)[2], dim(loo_ss_lpd$pointwise)[1]) expect_length(loo_ss_lpd$diagnostics$pareto_k, 500) expect_length(loo_ss_lpd$diagnostics$n_eff, 500) expect_failure(expect_equal( loo_ss_lpd$estimates[1, "subsampling SE"], loo_ss$estimates[1, "subsampling SE"] )) expect_failure(expect_equal( loo_ss_lpd$estimates[3, "subsampling SE"], loo_ss$estimates[3, "subsampling SE"] )) }) test_that("loo_compare_subsample", { skip_on_cran() # to get under cran check time limit set.seed(123) N <- 1000 x1 <- rnorm(N) x2 <- rnorm(N) x3 <- rnorm(N) sigma <- 2 y <- rnorm(N, 1 + 2 * x1 - 2 * x2 - 1 * x3, sd = sigma) X <- cbind("x0" = rep(1, N), x1, x2, x3) # Generate samples from posterior samples_blin <- function(X, y, sigma, draws = 1000) { XtX <- t(X) %*% X b_hat <- solve(XtX) %*% (t(X) %*% y) Lambda_n = XtX + diag(ncol(X)) mu_n <- solve(Lambda_n) %*% (XtX %*% b_hat + diag(ncol(X)) %*% rep(0, ncol(X))) L <- t(chol(sigma^2 * solve(Lambda_n))) draws_mat <- matrix(0, ncol = ncol(X), nrow = draws) for (i in 1:draws) { z <- rnorm(length(mu_n)) draws_mat[i, ] <- L %*% z + mu_n } draws_mat } fake_posterior1 <- samples_blin(X[, 1:2], y, sigma, draws = 1000) fake_posterior2 <- samples_blin(X[, 1:3], y, sigma, draws = 1000) fake_posterior3 <- samples_blin(X, y, sigma, draws = 1000) fake_data1 <- data.frame(y, X[, 1:2]) fake_data2 <- data.frame(y, X[, 1:3]) fake_data3 <- data.frame(y, X) llfun_test <- function(data_i, draws) { dnorm( x = data_i$y, mean = draws %*% t(data_i[, -1, drop = FALSE]), sd = sigma, log = TRUE ) } expect_silent( l1 <- loo( llfun_test, data = fake_data1, draws = fake_posterior1, r_eff = rep(1, N) ) ) expect_silent( l2 <- loo( llfun_test, data = fake_data2, draws = fake_posterior2, r_eff = rep(1, N) ) ) expect_silent( l3 <- loo( llfun_test, data = fake_data3, draws = fake_posterior3, r_eff = rep(1, N) ) ) expect_silent( lss1 <- loo_subsample( llfun_test, data = fake_data1, draws = fake_posterior1, observations = 100, r_eff = rep(1, N) ) ) expect_silent( lss2 <- loo_subsample( llfun_test, data = fake_data2, draws = fake_posterior2, observations = 100, r_eff = rep(1, N) ) ) expect_silent( lss3 <- loo_subsample( llfun_test, data = fake_data3, draws = fake_posterior3, observations = 100, r_eff = rep(1, N) ) ) expect_silent( lss2o1 <- loo_subsample( llfun_test, data = fake_data2, draws = fake_posterior2, observations = lss1, r_eff = rep(1, N) ) ) expect_silent( lss3o1 <- loo_subsample( llfun_test, data = fake_data3, draws = fake_posterior3, observations = lss1, r_eff = rep(1, N) ) ) expect_silent( lss2hh <- loo_subsample( llfun_test, data = fake_data2, draws = fake_posterior2, observations = 100, estimator = "hh_pps", r_eff = rep(1, N) ) ) expect_snapshot( lcss <- loo:::loo_compare.psis_loo_ss_list(x = list(lss1, lss2, lss3)) ) expect_warning( lcss2 <- loo:::loo_compare.psis_loo_ss_list(x = list(lss1, lss2, lss3o1)) ) expect_silent( lcsso <- loo:::loo_compare.psis_loo_ss_list(x = list(lss1, lss2o1, lss3o1)) ) expect_warning( lcssohh <- loo:::loo_compare.psis_loo_ss_list( x = list(lss1, lss2hh, lss3o1) ) ) expect_message( lcssf1 <- loo:::loo_compare.psis_loo_ss_list( x = list(loo:::as.psis_loo_ss.psis_loo(l1), lss2o1, lss3o1) ) ) expect_message( lcssf2 <- loo:::loo_compare.psis_loo_ss_list( x = list( loo:::as.psis_loo_ss.psis_loo(l1), lss2o1, loo:::as.psis_loo_ss.psis_loo(l3) ) ) ) expect_equal(lcss[, 1], lcsso[, 1], tolerance = 1) expect_equal(lcss2[, 1], lcsso[, 1], tolerance = 1) expect_equal(lcssohh[, 1], lcsso[, 1], tolerance = 1) expect_equal(lcssf1[, 1], lcsso[, 1], tolerance = 1) expect_equal(lcssf2[, 1], lcsso[, 1], tolerance = 1) expect_gt(lcss[, 2][2], lcsso[, 2][2]) expect_gt(lcss[, 2][3], lcsso[, 2][3]) expect_gt(lcss2[, 2][2], lcsso[, 2][2]) expect_equal(lcss2[, 2][3], lcsso[, 2][3]) expect_gt(lcssohh[, 2][2], lcsso[, 2][2]) expect_equal(lcssohh[, 2][3], lcsso[, 2][3]) expect_silent( lcss2m <- loo:::loo_compare.psis_loo_ss_list(x = list(lss2o1, lss3o1)) ) expect_equal(unname(lcss2m[,]), unname(lcsso[1:2, ])) expect_snapshot(lcssapi <- loo_compare(lss1, lss2, lss3)) expect_equal(lcssapi, lcss) expect_warning(lcssohhapi <- loo_compare(lss1, lss2hh, lss3o1)) expect_equal(lcssohhapi, lcssohh) expect_silent(lcss2mapi <- loo_compare(lss2o1, lss3o1)) expect_equal(lcss2mapi, lcss2m) }) test_that("Test 'tis' and 'sis'", { skip_on_cran() set.seed(123) N <- 1000 K <- 10 S <- 1000 a0 <- 3 b0 <- 2 p <- 0.7 y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_posterior <- draws <- as.matrix(rbeta(S, a, b)) fake_data <- data.frame(y, K) rm(N, K, S, a0, b0, p, y, a, b) llfun_test <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } expect_silent( loo_ss_full <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 1000, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_plpd <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "plpd", r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_tis_S1000 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "tis", r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_tis_S100 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "tis", loo_approximation_draws = 100, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_tis_S10 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "tis", loo_approximation_draws = 10, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_sis_S1000 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "sis", r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_sis_S100 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "sis", loo_approximation_draws = 100, r_eff = rep(1, nrow(fake_data)) ) ) expect_silent( loo_ss_sis_S10 <- loo_subsample( x = llfun_test, draws = fake_posterior, data = fake_data, observations = 100, loo_approximation = "sis", loo_approximation_draws = 10, r_eff = rep(1, nrow(fake_data)) ) ) SEs <- 4 expect_gt( loo_ss_tis_S1000$estimates["elpd_loo", "Estimate"] + SEs * loo_ss_tis_S1000$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_lt( loo_ss_tis_S1000$estimates["elpd_loo", "Estimate"] - SEs * loo_ss_tis_S1000$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_gt( loo_ss_tis_S100$estimates["elpd_loo", "Estimate"] + SEs * loo_ss_tis_S100$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_lt( loo_ss_tis_S100$estimates["elpd_loo", "Estimate"] - SEs * loo_ss_tis_S100$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_gt( loo_ss_tis_S10$estimates["elpd_loo", "Estimate"] + SEs * loo_ss_tis_S10$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_lt( loo_ss_tis_S10$estimates["elpd_loo", "Estimate"] - SEs * loo_ss_tis_S10$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_gt( loo_ss_sis_S1000$estimates["elpd_loo", "Estimate"] + SEs * loo_ss_sis_S1000$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_lt( loo_ss_sis_S1000$estimates["elpd_loo", "Estimate"] - SEs * loo_ss_sis_S1000$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_gt( loo_ss_sis_S100$estimates["elpd_loo", "Estimate"] + SEs * loo_ss_sis_S100$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_lt( loo_ss_sis_S100$estimates["elpd_loo", "Estimate"] - SEs * loo_ss_sis_S100$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_gt( loo_ss_sis_S10$estimates["elpd_loo", "Estimate"] + SEs * loo_ss_sis_S10$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) expect_lt( loo_ss_sis_S10$estimates["elpd_loo", "Estimate"] - SEs * loo_ss_sis_S10$estimates["elpd_loo", "subsampling SE"], loo_ss_full$estimates["elpd_loo", "Estimate"] ) }) loo/tests/testthat/test_pointwise.R0000644000176200001440000000174415064301501017251 0ustar liggesusersloo1 <- suppressWarnings(loo(example_loglik_matrix())) test_that("pointwise throws the right errors", { expect_error( pointwise(loo1, "xxx"), "'xxx' not found", fixed = TRUE ) expect_error( pointwise(loo1, c("elpd_loo", "p_loo")), "length(estimate) == 1 is not TRUE", fixed = TRUE ) expect_error( pointwise(loo1, 1), "is.character(estimate) is not TRUE", fixed = TRUE ) loo1$pointwise <- NULL expect_error( pointwise(loo1, "xxx"), "No pointwise estimates found" ) }) test_that("pointwise returns correct estimate", { expect_equal(pointwise(loo1, "elpd_loo"), loo1$pointwise[, "elpd_loo"]) expect_equal( pointwise(loo1, "mcse_elpd_loo"), loo1$pointwise[, "mcse_elpd_loo"] ) expect_equal(pointwise(loo1, "p_loo"), loo1$pointwise[, "p_loo"]) expect_equal(pointwise(loo1, "looic"), loo1$pointwise[, "looic"]) expect_equal( pointwise(loo1, "influence_pareto_k"), loo1$pointwise[, "influence_pareto_k"] ) }) loo/tests/testthat/test_loo_subsampling_cases.R0000644000176200001440000003714315064301501021605 0ustar liggesusersoptions(mc.cores = 1) test_that("Test loo_subsampling and loo_approx with radon data", { skip_on_cran() # avoid going over time limit for tests load(test_path("data-for-tests/test_radon_laplace_loo.rda")) # Rename to spot variable leaking errors llfun_test <- llfun log_p_test <- log_p log_g_test <- log_q draws_test <- draws data_test <- data rm(llfun, log_p, log_q, draws, data) set.seed(134) expect_silent( full_loo <- loo( llfun_test, draws = draws_test, data = data_test, r_eff = rep(1, nrow(data_test)) ) ) expect_s3_class(full_loo, "psis_loo") set.seed(134) expect_silent( loo_ss <- loo_subsample( x = llfun_test, draws = draws_test, data = data_test, observations = 200, loo_approximation = "plpd", r_eff = rep(1, nrow(data_test)) ) ) expect_s3_class(loo_ss, "psis_loo_ss") set.seed(134) expect_silent( loo_ap_ss <- loo_subsample( x = llfun_test, draws = draws_test, data = data_test, log_p = log_p_test, log_g = log_g_test, observations = 200, loo_approximation = "plpd", r_eff = rep(1, nrow(data_test)) ) ) expect_s3_class(loo_ap_ss, "psis_loo_ss") expect_s3_class(loo_ap_ss, "psis_loo_ap") expect_silent( loo_ap_ss_full <- loo_subsample( x = llfun_test, log_p = log_p_test, log_g = log_g_test, draws = draws_test, data = data_test, observations = NULL, loo_approximation = "plpd", r_eff = rep(1, nrow(data_test)) ) ) expect_failure(expect_s3_class(loo_ap_ss_full, "psis_loo_ss")) expect_s3_class(loo_ap_ss_full, "psis_loo_ap") # Expect similar results z <- 2 expect_lte( loo_ss$estimates["elpd_loo", "Estimate"] - z * loo_ss$estimates["elpd_loo", "subsampling SE"], full_loo$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ss$estimates["elpd_loo", "Estimate"] + z * loo_ss$estimates["elpd_loo", "subsampling SE"], full_loo$estimates["elpd_loo", "Estimate"] ) expect_lte( loo_ss$estimates["p_loo", "Estimate"] - z * loo_ss$estimates["p_loo", "subsampling SE"], full_loo$estimates["p_loo", "Estimate"] ) expect_gte( loo_ss$estimates["p_loo", "Estimate"] + z * loo_ss$estimates["p_loo", "subsampling SE"], full_loo$estimates["p_loo", "Estimate"] ) expect_lte( loo_ss$estimates["looic", "Estimate"] - z * loo_ss$estimates["looic", "subsampling SE"], full_loo$estimates["looic", "Estimate"] ) expect_gte( loo_ss$estimates["looic", "Estimate"] + z * loo_ss$estimates["looic", "subsampling SE"], full_loo$estimates["looic", "Estimate"] ) expect_failure(expect_equal( full_loo$estimates["elpd_loo", "Estimate"], loo_ss$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( full_loo$estimates["p_loo", "Estimate"], loo_ss$estimates["p_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( full_loo$estimates["looic", "Estimate"], loo_ss$estimates["looic", "Estimate"], tolerance = 0.00000001 )) z <- 2 expect_lte( loo_ap_ss$estimates["elpd_loo", "Estimate"] - z * loo_ap_ss$estimates["elpd_loo", "subsampling SE"], loo_ap_ss_full$estimates["elpd_loo", "Estimate"] ) expect_gte( loo_ap_ss$estimates["elpd_loo", "Estimate"] + z * loo_ap_ss$estimates["elpd_loo", "subsampling SE"], loo_ap_ss_full$estimates["elpd_loo", "Estimate"] ) expect_lte( loo_ap_ss$estimates["p_loo", "Estimate"] - z * loo_ap_ss$estimates["p_loo", "subsampling SE"], loo_ap_ss_full$estimates["p_loo", "Estimate"] ) expect_gte( loo_ap_ss$estimates["p_loo", "Estimate"] + z * loo_ap_ss$estimates["p_loo", "subsampling SE"], loo_ap_ss_full$estimates["p_loo", "Estimate"] ) expect_lte( loo_ap_ss$estimates["looic", "Estimate"] - z * loo_ap_ss$estimates["looic", "subsampling SE"], loo_ap_ss_full$estimates["looic", "Estimate"] ) expect_gte( loo_ap_ss$estimates["looic", "Estimate"] + z * loo_ap_ss$estimates["looic", "subsampling SE"], loo_ap_ss_full$estimates["looic", "Estimate"] ) expect_failure(expect_equal( loo_ap_ss_full$estimates["elpd_loo", "Estimate"], loo_ap_ss$estimates["elpd_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( loo_ap_ss_full$estimates["p_loo", "Estimate"], loo_ap_ss$estimates["p_loo", "Estimate"], tolerance = 0.00000001 )) expect_failure(expect_equal( loo_ap_ss_full$estimates["looic", "Estimate"], loo_ap_ss$estimates["looic", "Estimate"], tolerance = 0.00000001 )) # Correct printout expect_failure(expect_output( print(full_loo), "Posterior approximation correction used\\." )) expect_failure(expect_output( print(full_loo), "subsampled log-likelihood\nvalues" )) expect_failure(expect_output( print(loo_ss), "Posterior approximation correction used\\." )) expect_output(print(loo_ss), "subsampled log-likelihood\nvalues") expect_output(print(loo_ap_ss), "Posterior approximation correction used\\.") expect_output(print(loo_ap_ss), "subsampled log-likelihood\nvalues") expect_output( print(loo_ap_ss_full), "Posterior approximation correction used\\." ) expect_failure(expect_output( print(loo_ap_ss_full), "subsampled log-likelihood\nvalues" )) # Test conversion of objects expect_silent(loo_ap_full <- loo:::as.psis_loo.psis_loo(loo_ap_ss_full)) expect_s3_class(loo_ap_full, "psis_loo_ap") expect_silent(loo_ap_full_ss <- loo:::as.psis_loo_ss.psis_loo(loo_ap_full)) expect_s3_class(loo_ap_full_ss, "psis_loo_ss") expect_s3_class(loo_ap_full_ss, "psis_loo_ap") expect_silent(loo_ap_full2 <- loo:::as.psis_loo.psis_loo_ss(loo_ap_full_ss)) expect_s3_class(loo_ap_full2, "psis_loo_ap") expect_failure(expect_s3_class(loo_ap_full2, "psis_loo_ss")) expect_equal(loo_ap_full2, loo_ap_full) # Test update set.seed(4712) expect_silent( loo_ss2 <- update( loo_ss, draws = draws_test, data = data_test, observations = 1000, r_eff = rep(1, nrow(data_test)) ) ) expect_gt(dim(loo_ss2)[2], dim(loo_ss)[2]) expect_gt(dim(loo_ss2$pointwise)[1], dim(loo_ss$pointwise)[1]) expect_equal(nobs(loo_ss), 200) expect_equal(nobs(loo_ss2), 1000) for (i in 1:nrow(loo_ss2$estimates)) { expect_lt( loo_ss2$estimates[i, "subsampling SE"], loo_ss$estimates[i, "subsampling SE"] ) } set.seed(4712) expect_silent( loo_ap_ss2 <- update( object = loo_ap_ss, draws = draws_test, data = data_test, observations = 2000 ) ) expect_gt(dim(loo_ap_ss2)[2], dim(loo_ap_ss)[2]) expect_gt(dim(loo_ap_ss2$pointwise)[1], dim(loo_ap_ss$pointwise)[1]) expect_equal(nobs(loo_ap_ss), 200) expect_equal(nobs(loo_ap_ss2), 2000) for (i in 1:nrow(loo_ap_ss2$estimates)) { expect_lt( loo_ap_ss2$estimates[i, "subsampling SE"], loo_ap_ss$estimates[i, "subsampling SE"] ) } expect_equal(round(full_loo$estimates), round(loo_ap_ss_full$estimates)) expect_failure(expect_equal(full_loo$estimates, loo_ap_ss_full$estimates)) expect_equal(dim(full_loo), dim(loo_ap_ss_full)) expect_s3_class(loo_ap_ss_full, "psis_loo_ap") }) test_that("Test the vignette", { skip_on_cran() # NOTE # If any of these test fails, the vignette probably needs to be updated if (FALSE) { # Generate vignette test case library("rstan") stan_code <- " data { int N; // number of data points int P; // number of predictors (including intercept) matrix[N,P] X; // predictors (including 1s for intercept) int y[N]; // binary outcome } parameters { vector[P] beta; } model { beta ~ normal(0, 1); y ~ bernoulli_logit(X * beta); } " # logistic <- function(x) {1 / (1 + exp(-x))} # logit <- function(x) {log(x) - log(1-x)} llfun_logistic <- function(data_i, draws) { x_i <- as.matrix(data_i[, which(grepl(colnames(data_i), pattern = "X")), drop = FALSE ]) y_pred <- draws %*% t(x_i) dbinom(x = data_i$y, size = 1, prob = 1 / (1 + exp(-y_pred)), log = TRUE) } # Prepare data url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat" wells <- read.table(url) wells$dist100 <- with(wells, dist / 100) X <- model.matrix(~ dist100 + arsenic, wells) standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X)) # Fit model set.seed(4711) fit_1 <- stan(model_code = stan_code, data = standata, seed = 4711) print(fit_1, pars = "beta") parameter_draws <- extract(fit_1)$beta stan_df <- as.data.frame(standata) loo_i(1, llfun_logistic, data = stan_df, draws = parameter_draws) sm <- stan_model(model_code = stan_code) set.seed(4711) fit_laplace <- optimizing(sm, data = standata, draws = 2000, seed = 42) parameter_draws_laplace <- fit_laplace$theta_tilde log_p <- fit_laplace$log_p # The log density of the posterior log_g <- fit_laplace$log_g # The log density of the approximation # For comparisons standata$X[, "arsenic"] <- log(standata$X[, "arsenic"]) stan_df2 <- as.data.frame(standata) set.seed(4711) fit_2 <- stan(fit = fit_1, data = standata, seed = 4711) parameter_draws_2 <- extract(fit_2)$beta save( llfun_logistic, stan_df, stan_df2, parameter_draws, parameter_draws_laplace, parameter_draws_2, log_p, log_g, file = test_path("data-for-tests/loo_subsample_vignette.rda"), compression_level = 9 ) } else { load(test_path("data-for-tests/loo_subsample_vignette.rda")) } set.seed(4711) expect_no_warning( looss_1 <- loo_subsample( llfun_logistic, draws = parameter_draws, data = stan_df, observations = 100 ) ) expect_output( print(looss_1), "Computed from 4000 by 100 subsampled log-likelihood" ) expect_output(print(looss_1), "values from 3020 total observations.") expect_output( print(looss_1), "MCSE and ESS estimates assume independent draws" ) expect_output(print(looss_1), "elpd_loo -1968.5 15.6 0.3") expect_output(print(looss_1), "p_loo 3.1 0.1 0.4") expect_s3_class(looss_1, c("psis_loo_ss", "psis_loo", "loo")) set.seed(4711) expect_no_warning( looss_1b <- update( looss_1, draws = parameter_draws, data = stan_df, observations = 200 ) ) expect_output( print(looss_1b), "Computed from 4000 by 200 subsampled log-likelihood" ) expect_output(print(looss_1b), "values from 3020 total observations.") expect_output( print(looss_1b), "MCSE and ESS estimates assume independent draws" ) expect_output(print(looss_1b), "elpd_loo -1968.3 15.6 0.2") expect_output(print(looss_1b), "p_loo 3.2 0.1 0.4") expect_s3_class(looss_1b, c("psis_loo_ss", "psis_loo", "loo")) set.seed(4711) expect_no_warning( looss_2 <- loo_subsample( x = llfun_logistic, draws = parameter_draws, data = stan_df, observations = 100, estimator = "hh_pps", loo_approximation = "lpd", loo_approximation_draws = 100 ) ) expect_output( print(looss_2), "Computed from 4000 by 100 subsampled log-likelihood" ) expect_output(print(looss_2), "values from 3020 total observations.") expect_output( print(looss_2), "MCSE and ESS estimates assume independent draws" ) # Currently failing # expect_output(print(looss_2), "elpd_loo -1968.9 15.4 0.5") # expect_output(print(looss_2), "p_loo 3.5 0.2 0.5") expect_s3_class(looss_2, c("psis_loo_ss", "psis_loo", "loo")) set.seed(4711) expect_no_warning( aploo_1 <- loo_approximate_posterior( llfun_logistic, draws = parameter_draws_laplace, data = stan_df, log_p = log_p, log_g = log_g ) ) expect_output( print(aploo_1), "Computed from 2000 by 3020 log-likelihood matrix" ) expect_output( print(aploo_1), "MCSE and ESS estimates assume independent draws" ) expect_output(print(aploo_1), "elpd_loo -1968.4 15.6") expect_output(print(aploo_1), "p_loo 3.2 0.2") expect_output(print(aploo_1), "Posterior approximation correction used.") expect_output(print(aploo_1), "All Pareto k estimates are good") expect_equal(length(pareto_k_ids(aploo_1, threshold = 0.5)), 31) expect_s3_class(aploo_1, c("psis_loo_ap", "psis_loo", "loo")) set.seed(4711) expect_no_warning( looapss_1 <- loo_subsample( llfun_logistic, draws = parameter_draws_laplace, data = stan_df, log_p = log_p, log_g = log_g, observations = 100 ) ) expect_output( print(looapss_1), "Computed from 2000 by 100 subsampled log-likelihood" ) expect_output( print(looapss_1), "MCSE and ESS estimates assume independent draws" ) expect_output(print(looapss_1), "values from 3020 total observations.") expect_output(print(looapss_1), "elpd_loo -1968.2 15.6 0.4") expect_output(print(looapss_1), "p_loo 2.9 0.1 0.5") expect_output(print(looapss_1), "All Pareto k estimates are good") expect_equal(length(pareto_k_ids(looapss_1, threshold = 0.5)), 3) # Loo compare set.seed(4711) expect_no_warning( looss_1 <- loo_subsample( llfun_logistic, draws = parameter_draws, data = stan_df, observations = 100 ) ) set.seed(4712) expect_no_warning( looss_2 <- loo_subsample( x = llfun_logistic, draws = parameter_draws_2, data = stan_df2, observations = 100 ) ) expect_output( print(looss_2), "Computed from 4000 by 100 subsampled log-likelihood" ) expect_output( print(looss_2), "MCSE and ESS estimates assume independent draws" ) expect_output(print(looss_2), "values from 3020 total observations.") expect_output(print(looss_2), "elpd_loo -1952.0 16.2 0.2") expect_output(print(looss_2), "p_loo 2.6 0.1 0.3") expect_warning( comp <- loo_compare(looss_1, looss_2), "Different subsamples in 'model2' and 'model1'. Naive diff SE is used." ) expect_output(print(comp), "model1 16.5 22.5 0.4") set.seed(4712) expect_no_warning( looss_2_m <- loo_subsample( x = llfun_logistic, draws = parameter_draws_2, data = stan_df2, observations = looss_1 ) ) expect_message( looss_2_m <- suppressWarnings(loo_subsample( x = llfun_logistic, draws = parameter_draws_2, data = stan_df2, observations = obs_idx(looss_1) )), "Simple random sampling with replacement assumed." ) expect_silent(comp <- loo_compare(looss_1, looss_2_m)) expect_output(print(comp), "model1 16.1 4.4 0.1") set.seed(4712) expect_no_warning( looss_1 <- update( looss_1, draws = parameter_draws, data = stan_df, observations = 200 ) ) expect_no_warning( looss_2_m <- update( looss_2_m, draws = parameter_draws_2, data = stan_df2, observations = looss_1 ) ) expect_silent(comp2 <- loo_compare(looss_1, looss_2_m)) expect_output(print(comp2), "model1 16.3 4.4 0.1") expect_no_warning( looss_2_full <- loo( x = llfun_logistic, draws = parameter_draws_2, data = stan_df2 ) ) expect_message( comp3 <- loo_compare(x = list(looss_1, looss_2_full)), "Estimated elpd_diff using observations included in loo calculations for all models." ) expect_output(print(comp3), "model1 16.5 4.4 0.3") }) loo/tests/testthat/test_psis_approximate_posterior.R0000644000176200001440000002147115064301501022724 0ustar liggesusersload(test_path("data-for-tests/test_data_psis_approximate_posterior.rda")) test_that("Laplace approximation, independent posterior", { log_p <- test_data_psis_approximate_posterior$laplace_independent$log_p log_g <- test_data_psis_approximate_posterior$laplace_independent$log_q ll <- test_data_psis_approximate_posterior$laplace_independent$log_liks expect_silent( psis_lap <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_lap, "psis") expect_lt(pareto_k_values(psis_lap), 0.7) expect_silent( psis_lap_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_lap_ll, "loo") expect_true(all(pareto_k_values(psis_lap_ll) < 0.7)) }) test_that("Laplace approximation, correlated posterior", { log_p <- test_data_psis_approximate_posterior$laplace_correlated$log_p log_g <- test_data_psis_approximate_posterior$laplace_correlated$log_q ll <- test_data_psis_approximate_posterior$laplace_correlated$log_liks expect_silent( psis_lap <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_lap, "psis") expect_lt(pareto_k_values(psis_lap), 0.7) expect_silent( psis_lap_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_lap_ll, "loo") expect_true(all(pareto_k_values(psis_lap_ll) < 0.7)) }) test_that("Laplace approximation, normal model", { log_p <- test_data_psis_approximate_posterior$laplace_normal$log_p log_g <- test_data_psis_approximate_posterior$laplace_normal$log_q ll <- test_data_psis_approximate_posterior$laplace_normal$log_liks expect_no_warning( psis_lap <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_lap, "psis") expect_gt(pareto_k_values(psis_lap), 0.5) expect_warning( psis_lap_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_lap_ll, "loo") expect_true(all(pareto_k_values(psis_lap_ll) > 0.5)) }) test_that("ADVI fullrank approximation, independent posterior", { log_p <- test_data_psis_approximate_posterior$fullrank_independent$log_p log_g <- test_data_psis_approximate_posterior$fullrank_independent$log_q ll <- test_data_psis_approximate_posterior$fullrank_independent$log_liks expect_silent( psis_advi <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi, "psis") expect_lt(pareto_k_values(psis_advi), 0.7) expect_silent( psis_advi_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi_ll, "loo") expect_true(all(pareto_k_values(psis_advi_ll) < 0.7)) }) test_that("ADVI fullrank approximation, correlated posterior", { log_p <- test_data_psis_approximate_posterior$fullrank_correlated$log_p log_g <- test_data_psis_approximate_posterior$fullrank_correlated$log_q ll <- test_data_psis_approximate_posterior$fullrank_correlated$log_liks expect_silent( psis_advi <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi, "psis") expect_lt(pareto_k_values(psis_advi), 0.7) expect_silent( psis_advi_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi_ll, "loo") expect_true(all(pareto_k_values(psis_advi_ll) < 0.7)) }) test_that("ADVI fullrank approximation, correlated posterior", { log_p <- test_data_psis_approximate_posterior$fullrank_normal$log_p log_g <- test_data_psis_approximate_posterior$fullrank_normal$log_q ll <- test_data_psis_approximate_posterior$fullrank_normal$log_liks expect_warning( psis_advi <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi, "psis") expect_gt(pareto_k_values(psis_advi), 0.7) expect_warning( psis_advi_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi_ll, "loo") expect_true(all(pareto_k_values(psis_advi_ll) > 0.7)) }) test_that("ADVI meanfield approximation, independent posterior", { log_p <- test_data_psis_approximate_posterior$meanfield_independent$log_p log_g <- test_data_psis_approximate_posterior$meanfield_independent$log_q ll <- test_data_psis_approximate_posterior$meanfield_independent$log_liks expect_silent( psis_advi <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi, "psis") expect_lt(pareto_k_values(psis_advi), 0.7) expect_silent( psis_advi_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi_ll, "loo") expect_true(all(pareto_k_values(psis_advi_ll) < 0.7)) }) test_that("ADVI meanfield approximation, correlated posterior", { log_p <- test_data_psis_approximate_posterior$meanfield_correlated$log_p log_g <- test_data_psis_approximate_posterior$meanfield_correlated$log_q ll <- test_data_psis_approximate_posterior$meanfield_correlated$log_liks expect_warning( psis_advi <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi, "psis") expect_gt(pareto_k_values(psis_advi), 0.7) expect_warning( psis_advi_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi_ll, "loo") expect_true(all(pareto_k_values(psis_advi_ll) > 0.5)) expect_true(any(pareto_k_values(psis_advi_ll) > 0.7)) }) test_that("ADVI meanfield approximation, normal model", { log_p <- test_data_psis_approximate_posterior$meanfield_normal$log_p log_g <- test_data_psis_approximate_posterior$meanfield_normal$log_q ll <- test_data_psis_approximate_posterior$meanfield_normal$log_liks expect_warning( psis_advi <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi, "psis") expect_gt(pareto_k_values(psis_advi), 0.7) expect_warning( psis_advi_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi_ll, "loo") expect_true(all(pareto_k_values(psis_advi_ll) > 0.7)) }) test_that("ADVI meanfield approximation, normal model", { log_p <- test_data_psis_approximate_posterior$meanfield_normal$log_p log_g <- test_data_psis_approximate_posterior$meanfield_normal$log_q ll <- test_data_psis_approximate_posterior$meanfield_normal$log_liks expect_warning( psis_advi <- psis_approximate_posterior( log_p = log_p, log_g = log_g, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi, "psis") expect_gt(pareto_k_values(psis_advi), 0.7) expect_warning( psis_advi_ll <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = ll, cores = 1, save_psis = FALSE ) ) expect_s3_class(psis_advi_ll, "loo") expect_true(all(pareto_k_values(psis_advi_ll) > 0.7)) }) test_that("Deprecation of log_q argument", { log_p <- test_data_psis_approximate_posterior$laplace_independent$log_p log_g <- test_data_psis_approximate_posterior$laplace_independent$log_q ll <- test_data_psis_approximate_posterior$laplace_independent$log_liks expect_warning( psis_lap <- loo:::psis_approximate_posterior( log_p = log_p, log_q = log_g, cores = 1, save_psis = FALSE ), regexp = "argument log_q has been changed to log_g" ) expect_s3_class(psis_lap, "psis") expect_lt(pareto_k_values(psis_lap), 0.7) }) loo/tests/testthat/test_loo_approximate_posterior.R0000644000176200001440000001075715064301501022544 0ustar liggesusers# Create test data # Checked by Mans M and Paul B 24th of June 2019 set.seed(123) N <- 50 K <- 10 S <- 1000 a0 <- 1 b0 <- 1 p <- 0.5 y <- rbinom(N, size = K, prob = p) fake_data <- data.frame(y, K) # The log posterior log_post <- function(p, y, a0, b0, K) { log_lik <- sum(dbinom(x = y, size = K, prob = p, log = TRUE)) # the log likelihood log_post <- log_lik + dbeta(x = p, shape1 = a0, shape2 = b0, log = TRUE) # the log prior log_post } it <- optim( par = 0.5, fn = log_post, control = list(fnscale = -1), hessian = TRUE, y = y, a0 = a0, b0 = b0, K = K, lower = 0.01, upper = 0.99, method = "Brent" ) lap_params <- c(mu = it$par, sd = sqrt(solve(-it$hessian))) a <- a0 + sum(y) b <- b0 + N * K - sum(y) fake_true_posterior <- as.matrix(rbeta(S, a, b)) fake_laplace_posterior <- as.matrix(rnorm( n = S, mean = lap_params["mu"], sd = lap_params["sd"] )) # mean(fake_laplace_posterior); sd(fake_laplace_posterior) p_draws <- as.vector(fake_laplace_posterior) log_p <- numeric(S) for (s in 1:S) { log_p[s] <- log_post(p_draws[s], y = y, a0 = a0, b0 = b0, K = K) } log_g <- as.vector(dnorm( as.vector(fake_laplace_posterior), mean = lap_params["mu"], sd = lap_params["sd"], log = TRUE )) llfun <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } ll <- matrix(0, nrow = S, ncol = N) for (j in 1:N) { ll[, j] <- llfun( data_i = fake_data[j, , drop = FALSE], draws = fake_laplace_posterior ) } test_that("loo_approximate_posterior.array works as loo_approximate_posterior.matrix", { # Create array with two "chains" log_p_mat <- matrix(log_p, nrow = (S / 2), ncol = 2) log_g_mat <- matrix(log_g, nrow = (S / 2), ncol = 2) ll_array <- array(0, dim = c((S / 2), 2, ncol(ll))) ll_array[, 1, ] <- ll[1:(S / 2), ] ll_array[, 2, ] <- ll[(S / 2 + 1):S, ] # Assert that they are ok expect_equal(ll_array[1:2, 1, 1:2], ll[1:2, 1:2], ignore_attr = TRUE) expect_equal( ll_array[1:2, 2, 1:2], ll[(S / 2 + 1):((S / 2) + 2), 1:2], ignore_attr = TRUE ) # Compute aploo expect_silent( aploo1 <- loo_approximate_posterior.matrix( x = ll, log_p = log_p, log_g = log_g ) ) expect_silent( aploo2 <- loo_approximate_posterior.array( x = ll_array, log_p = log_p_mat, log_g = log_g_mat ) ) expect_silent(aploo1b <- loo.matrix(x = ll, r_eff = rep(1, N))) # Check equivalence expect_equal(aploo1$estimates, aploo2$estimates) expect_equal(class(aploo1), class(aploo2)) expect_failure(expect_equal(aploo1b$estimates, aploo2$estimates)) expect_failure(expect_equal(class(aploo1), class(aploo1b))) # Should fail with matrix expect_error( aploo2 <- loo_approximate_posterior.matrix( x = ll, log_p = as.matrix(log_p), log_g = log_g ) ) expect_error( aploo2 <- loo_approximate_posterior.matrix( x = ll, log_p = as.matrix(log_p), log_g = as.matrix(log_g) ) ) # Expect log_p and log_g be stored in the approximate_posterior in the same way expect_length(aploo1$approximate_posterior$log_p, nrow(ll)) expect_length(aploo1$approximate_posterior$log_g, nrow(ll)) expect_equal( aploo1$approximate_posterior$log_p, aploo2$approximate_posterior$log_p ) expect_equal( aploo1$approximate_posterior$log_g, aploo2$approximate_posterior$log_g ) }) test_that("loo_approximate_posterior.function works as loo_approximate_posterior.matrix", { # Compute aploo expect_silent( aploo1 <- loo_approximate_posterior.matrix( x = ll, log_p = log_p, log_g = log_g ) ) expect_silent(aploo1b <- loo.matrix(x = ll, r_eff = rep(1, N))) expect_silent( aploo2 <- loo_approximate_posterior.function( x = llfun, log_p = log_p, log_g = log_g, data = fake_data, draws = fake_laplace_posterior ) ) # Check equivalence expect_equal(aploo1$estimates, aploo2$estimates) expect_equal(class(aploo1), class(aploo2)) expect_failure(expect_equal(aploo1b$estimates, aploo2$estimates)) # Check equivalence # Expect log_p and log_g be stored in the approximate_posterior in the same way expect_length( aploo2$approximate_posterior$log_p, nrow(fake_laplace_posterior) ) expect_length( aploo2$approximate_posterior$log_g, nrow(fake_laplace_posterior) ) expect_equal( aploo1$approximate_posterior$log_p, aploo2$approximate_posterior$log_p ) expect_equal( aploo1$approximate_posterior$log_g, aploo2$approximate_posterior$log_g ) }) loo/tests/testthat/test_extract_log_lik.R0000644000176200001440000000044515064301501020377 0ustar liggesuserstest_that("extract_log_lik throws appropriate errors", { x1 <- rnorm(100) expect_error(extract_log_lik(x1), regexp = "Not a stanfit object") x2 <- structure(x1, class = "stanfit") expect_error(extract_log_lik(x2)) # not an S4 object OR no applicable method (depending on R version) }) loo/tests/testthat/test_crps.R0000644000176200001440000000412115027034070016172 0ustar liggesusersset.seed(123456789) n <- 10 S <- 100 y <- rnorm(n) x1 <- matrix(rnorm(n * S), nrow = S) x2 <- matrix(rnorm(n * S), nrow = S) ll <- matrix(rnorm(n * S) * 0.1 - 1, nrow = S) with_seed <- function(seed, code) { code <- substitute(code) orig.seed <- .Random.seed on.exit(.Random.seed <<- orig.seed) set.seed(seed) eval.parent(code) } test_that("crps computation is correct", { expect_equal(.crps_fun(2.0, 1.0), 0.0) expect_equal(.crps_fun(1.0, 2.0), -1.5) expect_equal(.crps_fun(pi, pi^2), 0.5 * pi - pi^2) expect_equal(.crps_fun(1.0, 0.0, scale = TRUE), 0.0) expect_equal(.crps_fun(1.0, 2.0, scale = TRUE), -2.0) expect_equal(.crps_fun(pi, pi^2, scale = TRUE), -pi^2/pi - 0.5 * log(pi)) }) test_that("crps matches snapshots", { expect_snapshot_value(with_seed(1, crps(x1, x2, y)), style = "serialize") expect_snapshot_value(with_seed(1, scrps(x1, x2, y)), style = "serialize") expect_snapshot_value(with_seed(1, loo_crps(x1, x2, y, ll)), style = "serialize") expect_snapshot_value(with_seed(1, loo_scrps(x1, x2, y, ll)), style = "serialize") }) test_that("input validation throws correct errors", { expect_error(validate_crps_input(as.character(x1), x2, y), "is.numeric(x) is not TRUE", fixed = TRUE) expect_error(validate_crps_input(x1, as.character(x2), y), "is.numeric(x2) is not TRUE", fixed = TRUE) expect_error(validate_crps_input(x1, x2, c('a', 'b')), "is.numeric(y) is not TRUE", fixed = TRUE) expect_error(validate_crps_input(x1, t(x2), y), "identical(dim(x), dim(x2)) is not TRUE", fixed = TRUE) expect_error(validate_crps_input(x1, x2, c(1, 2)), "ncol(x) == length(y) is not TRUE", fixed = TRUE) expect_error(validate_crps_input(x1, x2, y, t(ll)), "ifelse(is.null(log_lik), TRUE, identical(dim(log_lik), dim(x))) is not TRUE", fixed = TRUE) }) test_that("methods for single data point don't error", { expect_silent(crps(x1[,1], x2[,1], y[1])) expect_silent(scrps(x1[,1], x2[,1], y[1])) }) loo/tests/testthat/test_model_weighting.R0000644000176200001440000000753515064301501020401 0ustar liggesusers# generate fake data set.seed(123) y <- rnorm(50, 0, 1) sd_sim1 <- abs(rnorm(500, 1.5, 0.1)) sd_sim2 <- abs(rnorm(500, 1.2, 0.1)) sd_sim3 <- abs(rnorm(500, 1, 0.05)) log_lik1 <- log_lik2 <- log_lik3 <- matrix(NA, 500, 50) for (s in 1:500) { log_lik1[s, ] <- dnorm(y, -1, sd_sim1[s], log = T) log_lik2[s, ] <- dnorm(y, 0.7, sd_sim2[s], log = T) log_lik3[s, ] <- dnorm(y, 1, sd_sim3[s], log = T) } ll_list <- list(log_lik1, log_lik2, log_lik3) r_eff_list <- list(rep(0.9, 50), rep(0.9, 50), rep(0.9, 50)) loo_list <- lapply(1:length(ll_list), function(j) { loo(ll_list[[j]], r_eff = r_eff_list[[j]]) }) tol <- 0.01 # absolute tolerance of weights test_that("loo_model_weights throws correct errors and warnings", { expect_error( loo_model_weights(log_lik1), "list of matrices or a list of 'psis_loo' objects" ) expect_error(loo_model_weights(list(log_lik1)), "At least two models") expect_error(loo_model_weights(list(loo_list[[1]])), "At least two models") expect_error( loo_model_weights(list(log_lik1), method = "pseudobma"), "At least two models" ) expect_error( loo_model_weights(list(log_lik1, log_lik2[-1, ])), "same dimensions" ) expect_error( loo_model_weights(list(log_lik1, log_lik2, log_lik3[, -1])), "same dimensions" ) loo_list2 <- loo_list attr(loo_list2[[3]], "dims") <- c(10, 10) expect_error(loo_model_weights(loo_list2), "same dimensions") expect_error( loo_model_weights(ll_list, r_eff_list = r_eff_list[-1]), "one component for each model" ) r_eff_list[[3]] <- rep(0.9, 51) expect_error( loo_model_weights(ll_list, r_eff_list = r_eff_list), "same length as the number of columns" ) expect_error( loo_model_weights(list(loo_list[[1]], 2)), "List elements must all be 'psis_loo' objects or log-likelihood matrices", fixed = TRUE ) expect_no_warning(loo_model_weights(ll_list)) }) test_that("loo_model_weights (stacking and pseudo-BMA) gives expected result", { w1 <- loo_model_weights(ll_list, method = "stacking", r_eff_list = r_eff_list) expect_type(w1, "double") expect_s3_class(w1, "stacking_weights") expect_length(w1, 3) expect_named(w1, paste0("model", c(1:3))) expect_snapshot_value(as.numeric(w1), style = "serialize") expect_output(print(w1), "Method: stacking") w1_b <- loo_model_weights(loo_list) expect_identical(w1, w1_b) w2 <- loo_model_weights( ll_list, r_eff_list = r_eff_list, method = "pseudobma", BB = TRUE ) expect_type(w2, "double") expect_s3_class(w2, "pseudobma_bb_weights") expect_length(w2, 3) expect_named(w2, paste0("model", c(1:3))) expect_snapshot_value(as.numeric(w2), style = "serialize") expect_output(print(w2), "Method: pseudo-BMA+") w3 <- loo_model_weights( ll_list, r_eff_list = r_eff_list, method = "pseudobma", BB = FALSE ) expect_type(w3, "double") expect_length(w3, 3) expect_named(w3, paste0("model", c(1:3))) expect_equal( as.numeric(w3), c(5.365279e-05, 9.999436e-01, 2.707028e-06), tolerance = tol ) expect_output(print(w3), "Method: pseudo-BMA") w3_b <- loo_model_weights(loo_list, method = "pseudobma", BB = FALSE) expect_identical(w3, w3_b) }) test_that("stacking_weights and pseudobma_weights throw correct errors", { xx <- cbind(rnorm(10)) expect_error(stacking_weights(xx), "two models are required") expect_error(pseudobma_weights(xx), "two models are required") }) test_that("loo_model_weights uses correct names for list of loo objects", { loo1 <- loo_list[[1]] loo2 <- loo_list[[2]] loo3 <- loo_list[[3]] expect_named( loo_model_weights(list(loo1, loo2, loo3)), c("model1", "model2", "model3") ) expect_named( loo_model_weights(list("a" = loo1, loo2, "c" = loo3)), c("a", "model2", "c") ) expect_named( loo_model_weights(list(`a` = loo1, `b` = loo2, `c` = loo3)), c("a", "b", "c") ) }) loo/tests/testthat/test_E_loo.R0000644000176200001440000001501215064301501016256 0ustar liggesusersLLarr <- example_loglik_array() LLmat <- example_loglik_matrix() LLvec <- LLmat[, 1] chain_id <- rep(1:2, each = dim(LLarr)[1]) r_eff_mat <- relative_eff(exp(LLmat), chain_id) r_eff_vec <- relative_eff(exp(LLvec), chain_id = chain_id) psis_mat <- psis(-LLmat, r_eff = r_eff_mat, cores = 2) psis_vec <- psis(-LLvec, r_eff = r_eff_vec) set.seed(123) x <- matrix(rnorm(length(LLmat)), nrow = nrow(LLmat), ncol = ncol(LLmat)) log_rats <- -LLmat # matrix method E_test_mean <- E_loo(x, psis_mat, type = "mean", log_ratios = log_rats) E_test_var <- E_loo(x, psis_mat, type = "var", log_ratios = log_rats) E_test_sd <- E_loo(x, psis_mat, type = "sd", log_ratios = log_rats) E_test_quant <- E_loo( x, psis_mat, type = "quantile", probs = 0.5, log_ratios = log_rats ) E_test_quant2 <- E_loo( x, psis_mat, type = "quantile", probs = c(0.1, 0.9), log_ratios = log_rats ) # vector method E_test_mean_vec <- E_loo( x[, 1], psis_vec, type = "mean", log_ratios = log_rats[, 1] ) E_test_var_vec <- E_loo( x[, 1], psis_vec, type = "var", log_ratios = log_rats[, 1] ) E_test_sd_vec <- E_loo( x[, 1], psis_vec, type = "sd", log_ratios = log_rats[, 1] ) E_test_quant_vec <- E_loo( x[, 1], psis_vec, type = "quant", probs = 0.5, log_ratios = log_rats[, 1] ) E_test_quant_vec2 <- E_loo( x[, 1], psis_vec, type = "quant", probs = c(0.1, 0.5, 0.9), log_ratios = log_rats[, 1] ) # E_loo_khat khat <- loo:::E_loo_khat.matrix(x, psis_mat, log_rats) test_that("E_loo return types correct for matrix method", { expect_type(E_test_mean, "list") expect_named(E_test_mean, c("value", "pareto_k")) expect_length(E_test_mean, 2) expect_length(E_test_mean$value, ncol(x)) expect_length(E_test_mean$pareto_k, ncol(x)) expect_type(E_test_var, "list") expect_named(E_test_var, c("value", "pareto_k")) expect_length(E_test_var, 2) expect_length(E_test_var$value, ncol(x)) expect_length(E_test_var$pareto_k, ncol(x)) expect_type(E_test_sd, "list") expect_named(E_test_sd, c("value", "pareto_k")) expect_length(E_test_sd, 2) expect_length(E_test_sd$value, ncol(x)) expect_length(E_test_sd$pareto_k, ncol(x)) expect_type(E_test_quant, "list") expect_named(E_test_quant, c("value", "pareto_k")) expect_length(E_test_quant, 2) expect_length(E_test_quant$value, ncol(x)) expect_length(E_test_quant$pareto_k, ncol(x)) expect_type(E_test_quant2, "list") expect_named(E_test_quant2, c("value", "pareto_k")) expect_length(E_test_quant2, 2) expect_equal(dim(E_test_quant2$value), c(2, ncol(x))) expect_length(E_test_quant2$pareto_k, ncol(x)) }) test_that("E_loo return types correct for default/vector method", { expect_type(E_test_mean_vec, "list") expect_named(E_test_mean_vec, c("value", "pareto_k")) expect_length(E_test_mean_vec, 2) expect_length(E_test_mean_vec$value, 1) expect_length(E_test_mean_vec$pareto_k, 1) expect_type(E_test_var_vec, "list") expect_named(E_test_var_vec, c("value", "pareto_k")) expect_length(E_test_var_vec, 2) expect_length(E_test_var_vec$value, 1) expect_length(E_test_var_vec$pareto_k, 1) expect_type(E_test_sd_vec, "list") expect_named(E_test_sd_vec, c("value", "pareto_k")) expect_length(E_test_sd_vec, 2) expect_length(E_test_sd_vec$value, 1) expect_length(E_test_sd_vec$pareto_k, 1) expect_type(E_test_quant_vec, "list") expect_named(E_test_quant_vec, c("value", "pareto_k")) expect_length(E_test_quant_vec, 2) expect_length(E_test_quant_vec$value, 1) expect_length(E_test_quant_vec$pareto_k, 1) expect_type(E_test_quant_vec2, "list") expect_named(E_test_quant_vec2, c("value", "pareto_k")) expect_length(E_test_quant_vec2, 2) expect_length(E_test_quant_vec2$value, 3) expect_length(E_test_quant_vec2$pareto_k, 1) }) test_that("E_loo.default equal to snapshots", { expect_snapshot_value(E_test_mean_vec, style = "serialize") expect_snapshot_value(E_test_var_vec, style = "serialize") expect_snapshot_value(E_test_sd_vec, style = "serialize") expect_snapshot_value(E_test_quant_vec, style = "serialize") expect_snapshot_value(E_test_quant_vec2, style = "serialize") }) test_that("E_loo.matrix equal to snapshots", { expect_snapshot_value(E_test_mean, style = "serialize") expect_snapshot_value(E_test_var, style = "serialize") expect_snapshot_value(E_test_sd, style = "serialize") expect_snapshot_value(E_test_quant, style = "serialize") expect_snapshot_value(E_test_quant2, style = "serialize") }) test_that("E_loo throws correct errors and warnings", { # warnings expect_no_warning(E_loo.matrix(x, psis_mat)) # no warnings if x is constant, binary, NA, NaN, Inf expect_no_warning(E_loo.matrix(x * 0, psis_mat)) expect_no_warning(E_loo.matrix(0 + (x > 0), psis_mat)) expect_no_warning(E_loo.matrix(x + NA, psis_mat)) expect_no_warning(E_loo.matrix(x * NaN, psis_mat)) expect_no_warning(E_loo.matrix(x * Inf, psis_mat)) expect_no_warning(E_test <- E_loo.default(x[, 1], psis_vec)) expect_length(E_test$pareto_k, 1) # errors expect_error(E_loo(x, 1), "is.psis") expect_error( E_loo(x, psis_mat, type = "quantile", probs = 2), "all(probs > 0 & probs < 1) is not TRUE", fixed = TRUE ) expect_error( E_loo(rep("a", nrow(x)), psis_vec), "is.numeric(x) is not TRUE", fixed = TRUE ) expect_error( E_loo(1:10, psis_vec), "length(x) == dim(psis_object)[1] is not TRUE", fixed = TRUE ) expect_error( E_loo(cbind(1:10, 1:10), psis_mat), "identical(dim(x), dim(psis_object)) is not TRUE", fixed = TRUE ) }) test_that("weighted quantiles work", { .wquant_rapprox <- function(x, w, probs) { stopifnot(all(probs > 0 & probs < 1)) ord <- order(x) d <- x[ord] ww <- w[ord] p <- cumsum(ww) / sum(ww) stats::approx(p, d, probs, rule = 2)$y } .wquant_sim <- function(x, w, probs, n_sims) { xx <- sample(x, size = n_sims, replace = TRUE, prob = w / sum(w)) quantile(xx, probs, names = FALSE) } set.seed(123) pr <- seq(0.025, 0.975, 0.025) x1 <- rnorm(100) w1 <- rlnorm(100) expect_equal( .wquant(x1, w1, pr), .wquant_rapprox(x1, w1, pr) ) x1 <- rnorm(1e4) w1 <- rlnorm(1e4) # expect_equal( # .wquant(x1, w1, pr), # .wquant_sim(x1, w1, pr, n_sim = 5e6), # tol = 0.005 # ) expect_equal( .wquant(x1, rep(1, length(x1)), pr), quantile(x1, probs = pr, names = FALSE) ) }) test_that("weighted variance works", { x <- rnorm(100) w <- rep(0.01, 100) expect_equal(.wvar(x, w), var(x)) expect_equal(.wsd(x, w), sqrt(.wvar(x, w))) w <- c(rep(0.1, 10), rep(0, 90)) expect_equal(.wvar(x, w), var(x[w > 0])) }) loo/tests/testthat/test_gpdfit.R0000644000176200001440000000133015064301501016474 0ustar liggesuserstest_that("gpdfit returns correct result", { set.seed(123) x <- rexp(100) gpdfit_val_old <- unlist(gpdfit(x, wip = FALSE, min_grid_pts = 80)) expect_snapshot_value(gpdfit_val_old, style = "serialize") gpdfit_val_wip <- unlist(gpdfit(x, wip = TRUE, min_grid_pts = 80)) expect_snapshot_value(gpdfit_val_wip, style = "serialize") gpdfit_val_wip_default_grid <- unlist(gpdfit(x, wip = TRUE)) expect_snapshot_value(gpdfit_val_wip_default_grid, style = "serialize") }) test_that("qgpd returns the correct result ", { probs <- seq(from = 0, to = 1, by = 0.25) q1 <- qgpd(probs, k = 1, sigma = 1) expect_equal(q1, c(0, 1 / 3, 1, 3, Inf)) q2 <- qgpd(probs, k = 1, sigma = 0) expect_true(all(is.nan(q2))) }) loo/tests/testthat/test_deprecated_extractors.R0000644000176200001440000001231515064301501021602 0ustar liggesusersoptions(mc.cores = 1) set.seed(123) LLarr <- example_loglik_array() r_eff_arr <- relative_eff(exp(LLarr)) loo1 <- suppressWarnings(loo(LLarr, r_eff = r_eff_arr)) waic1 <- suppressWarnings(waic(LLarr)) test_that("extracting estimates by name is deprecated for loo objects", { # $ method expect_snapshot(loo1$elpd_loo) expect_equal( suppressWarnings(loo1$elpd_loo), loo1$estimates["elpd_loo", "Estimate"] ) expect_snapshot(loo1$se_elpd_loo) expect_equal( suppressWarnings(loo1$se_elpd_loo), loo1$estimates["elpd_loo", "SE"] ) expect_snapshot(loo1$p_loo) expect_equal( suppressWarnings(loo1$p_loo), loo1$estimates["p_loo", "Estimate"] ) expect_snapshot(loo1$se_p_loo) expect_equal( suppressWarnings(loo1$se_p_loo), loo1$estimates["p_loo", "SE"] ) expect_snapshot(loo1$looic) expect_equal( suppressWarnings(loo1$looic), loo1$estimates["looic", "Estimate"] ) expect_snapshot(loo1$se_looic) expect_equal( suppressWarnings(loo1$se_looic), loo1$estimates["looic", "SE"] ) # [ method expect_snapshot(loo1["elpd_loo"]) expect_equal( suppressWarnings(loo1["elpd_loo"][[1]]), loo1$estimates["elpd_loo", "Estimate"] ) expect_snapshot(loo1["se_elpd_loo"]) expect_equal( suppressWarnings(loo1["se_elpd_loo"][[1]]), loo1$estimates["elpd_loo", "SE"] ) expect_snapshot(loo1["p_loo"]) expect_equal( suppressWarnings(loo1["p_loo"][[1]]), loo1$estimates["p_loo", "Estimate"] ) expect_snapshot(loo1["se_p_loo"]) expect_equal( suppressWarnings(loo1["se_p_loo"][[1]]), loo1$estimates["p_loo", "SE"] ) expect_snapshot(loo1["looic"]) expect_equal( suppressWarnings(loo1["looic"][[1]]), loo1$estimates["looic", "Estimate"] ) expect_snapshot(loo1["se_looic"]) expect_equal( suppressWarnings(loo1["se_looic"][[1]]), loo1$estimates["looic", "SE"] ) # [[ method expect_snapshot(loo1[["elpd_loo"]]) expect_equal( suppressWarnings(loo1[["elpd_loo"]]), loo1$estimates["elpd_loo", "Estimate"] ) expect_snapshot(loo1[["se_elpd_loo"]]) expect_equal( suppressWarnings(loo1[["se_elpd_loo"]]), loo1$estimates["elpd_loo", "SE"] ) expect_snapshot(loo1[["p_loo"]]) expect_equal( suppressWarnings(loo1[["p_loo"]]), loo1$estimates["p_loo", "Estimate"] ) expect_snapshot(loo1[["se_p_loo"]]) expect_equal( suppressWarnings(loo1[["se_p_loo"]]), loo1$estimates["p_loo", "SE"] ) expect_snapshot(loo1[["looic"]]) expect_equal( suppressWarnings(loo1[["looic"]]), loo1$estimates["looic", "Estimate"] ) expect_snapshot(loo1[["se_looic"]]) expect_equal( suppressWarnings(loo1[["se_looic"]]), loo1$estimates["looic", "SE"] ) }) test_that("extracting estimates by name is deprecated for waic objects", { expect_snapshot(waic1$elpd_waic) expect_equal( suppressWarnings(waic1$elpd_waic), waic1$estimates["elpd_waic", "Estimate"] ) expect_snapshot(waic1$se_elpd_waic) expect_equal( suppressWarnings(waic1$se_elpd_waic), waic1$estimates["elpd_waic", "SE"] ) expect_snapshot(waic1$p_waic) expect_equal( suppressWarnings(waic1$p_waic), waic1$estimates["p_waic", "Estimate"] ) expect_snapshot(waic1$se_p_waic) expect_equal( suppressWarnings(waic1$se_p_waic), waic1$estimates["p_waic", "SE"] ) expect_snapshot(waic1$waic) expect_equal( suppressWarnings(waic1$waic), waic1$estimates["waic", "Estimate"] ) expect_snapshot(waic1$se_waic) expect_equal( suppressWarnings(waic1$se_waic), waic1$estimates["waic", "SE"] ) # [ method expect_snapshot(waic1["elpd_waic"]) expect_equal( suppressWarnings(waic1["elpd_waic"][[1]]), waic1$estimates["elpd_waic", "Estimate"] ) expect_snapshot(waic1["se_elpd_waic"]) expect_equal( suppressWarnings(waic1["se_elpd_waic"][[1]]), waic1$estimates["elpd_waic", "SE"] ) expect_snapshot(waic1["p_waic"]) expect_equal( suppressWarnings(waic1["p_waic"][[1]]), waic1$estimates["p_waic", "Estimate"] ) expect_snapshot(waic1["se_p_waic"]) expect_equal( suppressWarnings(waic1["se_p_waic"][[1]]), waic1$estimates["p_waic", "SE"] ) expect_snapshot(waic1["waic"]) expect_equal( suppressWarnings(waic1["waic"][[1]]), waic1$estimates["waic", "Estimate"] ) expect_snapshot(waic1["se_waic"]) expect_equal( suppressWarnings(waic1["se_waic"][[1]]), waic1$estimates["waic", "SE"] ) # [[ method expect_snapshot(waic1[["elpd_waic"]]) expect_equal( suppressWarnings(waic1[["elpd_waic"]]), waic1$estimates["elpd_waic", "Estimate"] ) expect_snapshot(waic1[["se_elpd_waic"]]) expect_equal( suppressWarnings(waic1[["se_elpd_waic"]]), waic1$estimates["elpd_waic", "SE"] ) expect_snapshot(waic1[["p_waic"]]) expect_equal( suppressWarnings(waic1[["p_waic"]]), waic1$estimates["p_waic", "Estimate"] ) expect_snapshot(waic1[["se_p_waic"]]) expect_equal( suppressWarnings(waic1[["se_p_waic"]]), waic1$estimates["p_waic", "SE"] ) expect_snapshot(waic1[["waic"]]) expect_equal( suppressWarnings(waic1[["waic"]]), waic1$estimates["waic", "Estimate"] ) expect_snapshot(waic1[["se_waic"]]) expect_equal( suppressWarnings(waic1[["se_waic"]]), waic1$estimates["waic", "SE"] ) }) loo/tests/testthat/test_loo_predictive_metric.R0000644000176200001440000001315215027034070021601 0ustar liggesusersLL <- example_loglik_matrix() chain_id <- rep(1:2, each = dim(LL)[1] / 2) r_eff <- relative_eff(exp(LL), chain_id) psis_obj <- psis(-LL, r_eff = r_eff, cores = 2) set.seed(123) x <- matrix(rnorm(length(LL)), nrow = nrow(LL), ncol = ncol(LL)) x_prob <- 1 / (1 + exp(-x)) y <- rnorm(ncol(LL)) y_binary <- rbinom(ncol(LL), 1, 0.5) mae_mean <- loo_predictive_metric(x, y, LL, metric = 'mae', r_eff = r_eff) mae_quant <- loo_predictive_metric(x, y, LL, metric = 'mae', r_eff = r_eff, type = 'quantile', probs = 0.9) rmse_mean <- loo_predictive_metric(x, y, LL, metric = 'rmse', r_eff = r_eff) rmse_quant <- loo_predictive_metric(x, y, LL, metric = 'rmse', r_eff = r_eff, type = 'quantile', probs = 0.9) mse_mean <- loo_predictive_metric(x, y, LL, metric = 'mse', r_eff = r_eff) mse_quant <- loo_predictive_metric(x, y, LL, metric = 'mse', r_eff = r_eff, type = 'quantile', probs = 0.9) acc_mean <- loo_predictive_metric(x_prob, y_binary, LL, metric = 'acc', r_eff = r_eff) acc_quant <- loo_predictive_metric(x_prob, y_binary, LL, metric = 'acc', r_eff = r_eff, type = 'quantile', probs = 0.9) bacc_mean <- loo_predictive_metric(x_prob, y_binary, LL, metric = 'balanced_acc', r_eff = r_eff) bacc_quant <- loo_predictive_metric(x_prob, y_binary, LL, metric = 'balanced_acc', r_eff = r_eff, type = 'quantile', probs = 0.9) test_that('loo_predictive_metric stops with incorrect inputs', { expect_error(loo_predictive_metric(as.character(x), y, LL, r_eff = r_eff), 'no applicable method', fixed = TRUE) expect_error(loo_predictive_metric(x, as.character(y), LL, r_eff = r_eff), 'is.numeric(y) is not TRUE', fixed = TRUE) x_invalid <- matrix(rnorm(9), nrow = 3) expect_error(loo_predictive_metric(x_invalid, y, LL, r_eff = r_eff), 'identical(ncol(x), length(y)) is not TRUE', fixed = TRUE) x_invalid <- matrix(rnorm(64), nrow = 2) expect_error(loo_predictive_metric(x_invalid, y, LL, r_eff = r_eff), 'identical(dim(x), dim(log_lik)) is not TRUE', fixed = TRUE) }) test_that('loo_predictive_metric return types are correct', { # MAE expect_type(mae_mean, 'list') expect_type(mae_quant, 'list') expect_named(mae_mean, c('estimate', 'se')) expect_named(mae_quant, c('estimate', 'se')) # RMSE expect_type(rmse_mean, 'list') expect_type(rmse_quant, 'list') expect_named(rmse_mean, c('estimate', 'se')) expect_named(rmse_quant, c('estimate', 'se')) # MSE expect_type(mse_mean, 'list') expect_type(mse_quant, 'list') expect_named(mse_mean, c('estimate', 'se')) expect_named(mse_quant, c('estimate', 'se')) # Accuracy expect_type(acc_mean, 'list') expect_type(acc_quant, 'list') expect_named(acc_mean, c('estimate', 'se')) expect_named(acc_quant, c('estimate', 'se')) # Balanced accuracy expect_type(bacc_mean, 'list') expect_type(bacc_quant, 'list') expect_named(bacc_mean, c('estimate', 'se')) expect_named(bacc_quant, c('estimate', 'se')) }) test_that('loo_predictive_metric is equal to snapshot', { expect_snapshot_value(mae_mean, style = "serialize") expect_snapshot_value(mae_quant, style = "serialize") expect_snapshot_value(rmse_mean, style = "serialize") expect_snapshot_value(rmse_quant, style = "serialize") expect_snapshot_value(mse_mean, style = "serialize") expect_snapshot_value(mse_quant, style = "serialize") expect_snapshot_value(acc_mean, style = "serialize") expect_snapshot_value(acc_quant, style = "serialize") expect_snapshot_value(bacc_mean, style = "serialize") expect_snapshot_value(bacc_quant, style = "serialize") }) test_that('MAE computation is correct', { expect_equal( .mae(rep(0.5, 5), rep(1, 5))$estimate, 0.5) expect_equal( .mae(rep(0.5, 5), rep(1, 5))$se, 0.0) expect_error( .mae(rep(0.5, 5), rep(1, 3)), 'length(y) == length(yhat) is not TRUE', fixed = TRUE) }) test_that('MSE computation is correct', { expect_equal( .mse(rep(0.5, 5), rep(1, 5))$estimate, 0.25) expect_equal( .mse(rep(0.5, 5), rep(1, 5))$se, 0.0) expect_error( .mse(rep(0.5, 5), rep(1, 3)), 'length(y) == length(yhat) is not TRUE', fixed = TRUE) }) test_that('RMSE computation is correct', { expect_equal( .rmse(rep(0.5, 5), rep(1, 5))$estimate, sqrt(0.25)) expect_equal( .mse(rep(0.5, 5), rep(1, 5))$se, 0.0) expect_error( .mse(rep(0.5, 5), rep(1, 3)), 'length(y) == length(yhat) is not TRUE', fixed = TRUE) }) test_that('Accuracy computation is correct', { expect_equal( .accuracy(c(0, 0, 0, 1, 1, 1), c(0.2, 0.2, 0.2, 0.7, 0.7, 0.7))$estimate, 1.0 ) expect_error( .accuracy(c(1, 0), c(0.5)), 'length(y) == length(yhat) is not TRUE', fixed = TRUE) expect_error( .accuracy(c(2, 1), c(0.5, 0.5)), 'all(y <= 1 & y >= 0) is not TRUE', fixed = TRUE ) expect_error( .accuracy(c(1, 0), c(1.1, 0.5)), 'all(yhat <= 1 & yhat >= 0) is not TRUE', fixed = TRUE ) }) test_that('Balanced accuracy computation is correct', { expect_equal( .balanced_accuracy(c(0, 0, 1, 1, 1, 1), c(0.9, 0.9, 0.9, 0.9, 0.9, 0.9))$estimate, 0.5 ) expect_error( .balanced_accuracy(c(1, 0), c(0.5)), 'length(y) == length(yhat) is not TRUE', fixed = TRUE) expect_error( .balanced_accuracy(c(2, 1), c(0.5, 0.5)), 'all(y <= 1 & y >= 0) is not TRUE', fixed = TRUE ) expect_error( .balanced_accuracy(c(1, 0), c(1.1, 0.5)), 'all(yhat <= 1 & yhat >= 0) is not TRUE', fixed = TRUE ) }) loo/tests/testthat/test_kfold_helpers.R0000644000176200001440000000766115064301501020055 0ustar liggesusersset.seed(14014) test_that("kfold_split_random works", { fold_rand <- kfold_split_random(10, 100) expect_length(fold_rand, 100) expect_equal(sort(unique(fold_rand)), 1:10) expect_equal(sum(fold_rand == 2), sum(fold_rand == 9)) }) test_that("kfold_split_stratified works", { y <- rep(c(0, 1), times = c(10, 190)) fold_strat <- kfold_split_stratified(5, y) expect_true(all(table(fold_strat) == 40)) y <- rep(c(1, 2, 3), times = c(15, 33, 42)) fold_strat <- kfold_split_stratified(7, y) expect_equal(range(table(fold_strat)), c(12, 13)) y <- mtcars$cyl fold_strat <- kfold_split_stratified(10, y) expect_equal(range(table(fold_strat)), c(3, 4)) # test when a group has 1 observation # https://github.com/stan-dev/loo/issues/277 y <- rep(c(1, 2, 3), times = c(20, 40, 1)) expect_silent(fold_strat <- kfold_split_stratified(5, y)) # used to be a warning before fixing issue #277 tab <- table(fold_strat, y) expect_equal(tab[1, ], c("1" = 4, "2" = 8, "3" = 1)) for (i in 2:nrow(tab)) { expect_equal(tab[i, ], c("1" = 4, "2" = 8, "3" = 0)) } }) test_that("kfold_split_grouped works", { grp <- gl(n = 50, k = 15, labels = state.name) fold_group <- kfold_split_grouped(x = grp) expect_true(all(table(fold_group) == 75)) expect_equal(sum(table(fold_group)), length(grp)) fold_group <- kfold_split_grouped(K = 9, x = grp) expect_false(all(table(fold_group) == 75)) expect_equal(sum(table(fold_group)), length(grp)) grp <- gl(n = 50, k = 4, labels = state.name) grp[grp == "Montana"] <- "Utah" fold_group <- kfold_split_grouped(K = 10, x = grp) expect_equal(sum(table(fold_group)), length(grp) - 4) grp <- rep(c("A", "B"), each = 20) fold_group <- kfold_split_grouped(K = 2, x = grp) expect_equal(fold_group, as.integer(as.factor(grp))) }) test_that("kfold helpers throw correct errors", { expect_error(kfold_split_random(10), "!is.null(N) is not TRUE", fixed = TRUE) expect_error( kfold_split_random(10.5, 100), "K == as.integer(K) is not TRUE", fixed = TRUE ) expect_error( kfold_split_random(10, 100.5), "N == as.integer(N) is not TRUE", fixed = TRUE ) expect_error( kfold_split_random(K = c(1, 1), N = 100), "length(K) == 1 is not TRUE", fixed = TRUE ) expect_error( kfold_split_random(N = c(100, 100)), "length(N) == 1 is not TRUE", fixed = TRUE ) expect_error( kfold_split_random(K = 5, N = 4), "K <= N is not TRUE", fixed = TRUE ) expect_error( kfold_split_random(K = 1, N = 4), "K > 1 is not TRUE", fixed = TRUE ) y <- sample(c(0, 1), size = 200, replace = TRUE, prob = c(0.05, 0.95)) expect_error( kfold_split_stratified(10), "!is.null(x) is not TRUE", fixed = TRUE ) expect_error( kfold_split_stratified(10.5, y), "K == as.integer(K) is not TRUE", fixed = TRUE ) expect_error( kfold_split_stratified(K = c(1, 1), y), "length(K) == 1 is not TRUE", fixed = TRUE ) expect_error( kfold_split_stratified(K = 201, y), "K <= length(x) is not TRUE", fixed = TRUE ) expect_error( kfold_split_stratified(K = 1, y), "K > 1 is not TRUE", fixed = TRUE ) grp <- gl(n = 50, k = 15) expect_error(kfold_split_grouped(10), "!is.null(x) is not TRUE", fixed = TRUE) expect_error( kfold_split_grouped(3, c(1, 1, 1)), "'K' must not be bigger than the number of levels/groups in 'x'", fixed = TRUE ) expect_error( kfold_split_grouped(10.5, grp), "K == as.integer(K) is not TRUE", fixed = TRUE ) expect_error( kfold_split_grouped(K = c(1, 1), grp), "length(K) == 1 is not TRUE", fixed = TRUE ) expect_error( kfold_split_grouped(K = 1, grp), "K > 1 is not TRUE", fixed = TRUE ) }) test_that("print_dims.kfold works", { xx <- structure(list(), K = 17, class = c("kfold", "loo")) expect_output(print_dims(xx), "Based on 17-fold cross-validation") attr(xx, "K") <- NULL expect_silent(print_dims(xx)) }) loo/tests/testthat/test_psis.R0000644000176200001440000001153115074562565016225 0ustar liggesusersoptions(mc.cores = 1) options(loo.cores = NULL) set.seed(123) LLarr <- example_loglik_array() LLmat <- example_loglik_matrix() LLvec <- LLmat[, 1] chain_id <- rep(1:2, each = dim(LLarr)[1]) r_eff_arr <- relative_eff(exp(LLarr)) r_eff_vec <- relative_eff(exp(LLvec), chain_id = chain_id) psis1 <- psis(log_ratios = -LLarr, r_eff = r_eff_arr) test_that("psis results haven't changed", { expect_snapshot_value(psis1, style = "serialize") }) test_that("psis returns object with correct structure", { expect_true(is.psis(psis1)) expect_false(is.loo(psis1)) expect_false(is.psis_loo(psis1)) expect_named(psis1, c("log_weights", "diagnostics")) expect_named(psis1$diagnostics, c("pareto_k", "n_eff", "r_eff")) expect_equal(dim(psis1), dim(LLmat)) expect_length(psis1$diagnostics$pareto_k, dim(psis1)[2]) expect_length(psis1$diagnostics$n_eff, dim(psis1)[2]) }) test_that("psis methods give same results", { psis2 <- suppressWarnings(psis(-LLmat, r_eff = r_eff_arr)) expect_identical(psis1, psis2) psisvec <- suppressWarnings(psis(-LLvec, r_eff = r_eff_vec)) psismat <- suppressWarnings(psis(-LLmat[, 1], r_eff = r_eff_vec)) expect_identical(psisvec, psismat) }) test_that("psis throws correct errors and warnings", { # r_eff default no warnings expect_no_warning(psis(-LLarr)) expect_no_warning(psis(-LLmat)) expect_no_warning(psis(-LLmat[, 1])) # r_eff=NULL no warnings expect_silent(psis(-LLarr, r_eff = NULL)) expect_silent(psis(-LLmat, r_eff = NULL)) expect_silent(psis(-LLmat[, 1], r_eff = NULL)) # r_eff=NA disables warnings expect_silent(psis(-LLarr, r_eff = NA)) expect_silent(psis(-LLmat, r_eff = NA)) expect_silent(psis(-LLmat[, 1], r_eff = NA)) # r_eff default and r_eff=NA give same answer expect_equal( suppressWarnings(psis(-LLarr)), psis(-LLarr, r_eff = NA) ) # r_eff=NULL and r_eff=NA give same answer expect_equal( suppressWarnings(psis(-LLarr, r_eff = NULL)), psis(-LLarr, r_eff = NA) ) # r_eff scalar is fine expect_silent(psis(-LLarr, r_eff = r_eff_arr[1])) # r_eff non-scalar wrong length is error expect_error(psis(-LLarr, r_eff = r_eff_arr[-1]), "one value per observation") # r_eff has some NA values which are replaced with 1 r_eff_arr[2] <- NA expect_snapshot(psis(-LLarr, r_eff = r_eff_arr)) # tail length warnings expect_snapshot(psis(-LLarr[1:5, , ])) # no NAs or non-finite values allowed LLmat[1, 1] <- NA expect_error(psis(-LLmat), "NAs not allowed in input") LLmat[1, 1] <- 1 LLmat[10, 2] <- -Inf expect_error(psis(-LLmat), "All input values must be finite or -Inf") # log ratio of -Inf is allowed LLmat[10, 2] <- Inf expect_no_error(psis(-LLmat)) # no lists allowed expect_error( expect_warning(psis(as.list(-LLvec))), "List not allowed as input" ) # if array, must be 3-D array dim(LLarr) <- c(2, 250, 2, 32) expect_error( psis(-LLarr), "length(dim(log_ratios)) == 3 is not TRUE", fixed = TRUE ) }) test_that("throw_tail_length_warnings gives correct output", { expect_silent(throw_tail_length_warnings(10)) expect_equal(throw_tail_length_warnings(10), 10) expect_warning(throw_tail_length_warnings(1), "Not enough tail samples") expect_warning( throw_tail_length_warnings(c(1, 10, 2)), "Skipping the following columns: 1, 3" ) expect_warning(throw_tail_length_warnings(rep(1, 21)), "11 more not printed") }) test_that("weights method returns correct output", { # default arguments expect_identical(weights(psis1), weights(psis1, normalize = TRUE, log = TRUE)) # unnormalized log-weights same as in psis object expect_equal(psis1$log_weights, weights(psis1, normalize = FALSE)) # normalized weights sum to 1 expect_equal( colSums(weights(psis1, normalize = TRUE, log = FALSE)), rep(1, ncol(psis1$log_weights)) ) }) test_that("psis_n_eff methods works properly", { w <- weights(psis1, normalize = TRUE, log = FALSE) expect_equal(psis_n_eff.default(w[, 1], r_eff = 1), 1 / sum(w[, 1]^2)) expect_equal(psis_n_eff.default(w[, 1], r_eff = 2), 2 / sum(w[, 1]^2)) expect_equal( psis_n_eff.default(w[, 1], r_eff = 2), psis_n_eff.matrix(w, r_eff = rep(2, ncol(w)))[1] ) expect_no_warning(psis_n_eff.default(w[, 1])) expect_no_warning(psis_n_eff.matrix(w)) }) test_that("do_psis_i throws warning if all tail values the same", { xx <- c(1, 2, 3, 4, 4, 4, 4, 4, 4, 4, 4) expect_warning( val <- do_psis_i(xx, tail_len_i = 6), "all tail values are the same" ) expect_equal(val$pareto_k, Inf) }) test_that("psis_smooth_tail returns original tail values if k is infinite", { # skip on M1 Mac until we figure out why this test fails only on M1 Mac skip_if(Sys.info()[["sysname"]] == "Darwin" && R.version$arch == "aarch64") xx <- c(1, 2, 3, 4, 4, 4, 4, 4, 4, 4, 4) val <- suppressWarnings(psis_smooth_tail(xx, 3)) expect_equal(val$tail, xx) expect_equal(val$k, Inf) }) loo/tests/testthat/test_relative_eff.R0000644000176200001440000000311315064301501017653 0ustar liggesusersoptions(mc.cores = 1) set.seed(123) LLarr <- example_loglik_array() LLmat <- example_loglik_matrix() test_that("relative_eff results haven't changed", { expect_snapshot_value(relative_eff(exp(LLarr)), style = "serialize") }) test_that("relative_eff is equal to ESS / S", { dims <- dim(LLarr) ess <- r_eff <- rep(NA, dims[3]) for (j in 1:dims[3]) { r_eff[j] <- relative_eff(LLarr[,, 1, drop = FALSE]) ess[j] <- posterior::ess_mean(LLarr[,, 1]) } S <- prod(dim(LLarr)[1:2]) expect_equal(r_eff, ess / S) }) test_that("relative_eff array and matrix methods return identical output", { r_eff_arr <- relative_eff(exp(LLarr)) r_eff_mat <- relative_eff(exp(LLmat), chain_id = rep(1:2, each = nrow(LLarr))) expect_identical(r_eff_arr, r_eff_mat) }) test_that("relative_eff matrix and function methods return identical output", { source(test_path("data-for-tests/function_method_stuff.R")) chain <- rep(1, nrow(draws)) r_eff_mat <- relative_eff(llmat_from_fn, chain_id = chain) r_eff_fn <- relative_eff( llfun, chain_id = chain, data = data, draws = draws, cores = 1 ) expect_identical(r_eff_mat, r_eff_fn) }) test_that("relative_eff with multiple cores runs", { skip_on_cran() source(test_path("data-for-tests/function_method_stuff.R")) dim(llmat_from_fn) <- c(nrow(llmat_from_fn), 1, ncol(llmat_from_fn)) r_eff_arr <- relative_eff(llmat_from_fn, cores = 2) r_eff_fn <- relative_eff( llfun, chain_id = rep(1, nrow(draws)), data = data, draws = draws, cores = 2 ) expect_identical(r_eff_arr, r_eff_fn) }) loo/tests/testthat/test_tisis.R0000644000176200001440000001357615074562565016415 0ustar liggesusersoptions(mc.cores = 1) options(loo.cores = NULL) set.seed(123) LLarr <- example_loglik_array() LLmat <- example_loglik_matrix() LLvec <- LLmat[, 1] chain_id <- rep(1:2, each = dim(LLarr)[1]) r_eff_arr <- relative_eff(exp(LLarr)) r_eff_vec <- relative_eff(exp(LLvec), chain_id = chain_id) psis1 <- psis(log_ratios = -LLarr, r_eff = r_eff_arr) tis1 <- tis(log_ratios = -LLarr, r_eff = r_eff_arr) is1 <- sis(log_ratios = -LLarr, r_eff = r_eff_arr) test_that("tis and is runs", { LLvec[1] <- -10 expect_silent(tis1 <- tis(log_ratios = -LLvec, r_eff = r_eff_vec)) expect_silent(is1 <- sis(log_ratios = -LLvec, r_eff = r_eff_vec)) expect_failure(expect_equal(tis1$log_weights, is1$log_weights)) expect_failure(expect_equal(tis1$log_weights, psis1$log_weights)) }) test_that("tis() and sis() returns object with correct structure for tis/sis", { expect_false(is.psis(tis1)) expect_false(is.psis(is1)) expect_true(is.tis(tis1)) expect_false(is.tis(is1)) expect_false(is.sis(tis1)) expect_true(is.sis(is1)) expect_false(is.loo(tis1)) expect_false(is.loo(is1)) expect_false(is.psis_loo(tis1)) expect_false(is.psis_loo(is1)) expect_named(tis1, c("log_weights", "diagnostics")) expect_named(is1, c("log_weights", "diagnostics")) expect_named(tis1$diagnostics, c("pareto_k", "n_eff", "r_eff")) expect_named(is1$diagnostics, c("pareto_k", "n_eff", "r_eff")) expect_equal(dim(tis1), dim(LLmat)) expect_equal(dim(is1), dim(LLmat)) expect_length(tis1$diagnostics$pareto_k, dim(psis1)[2]) expect_length(is1$diagnostics$pareto_k, dim(psis1)[2]) expect_length(tis1$diagnostics$n_eff, dim(psis1)[2]) expect_length(is1$diagnostics$n_eff, dim(psis1)[2]) expect_equal(attr(psis1, "method")[1], "psis") expect_equal(attr(tis1, "method")[1], "tis") expect_equal(attr(is1, "method")[1], "sis") }) test_that("psis methods give same results", { tis2 <- suppressWarnings(tis(-LLmat, r_eff = r_eff_arr)) expect_identical(tis1, tis2) tisvec <- suppressWarnings(tis(-LLvec, r_eff = r_eff_vec)) tismat <- suppressWarnings(tis(-LLmat[, 1], r_eff = r_eff_vec)) expect_identical(tisvec, tismat) is2 <- suppressWarnings(sis(-LLmat, r_eff = r_eff_arr)) expect_identical(is1, is2) isvec <- suppressWarnings(sis(-LLvec, r_eff = r_eff_vec)) ismat <- suppressWarnings(sis(-LLmat[, 1], r_eff = r_eff_vec)) expect_identical(isvec, ismat) }) test_that("tis throws correct errors and warnings", { # r_eff default no warnings expect_silent(tis(-LLarr)) expect_silent(tis(-LLmat)) expect_silent(tis(-LLmat[, 1])) # r_eff=NULL no warnings expect_silent(tis(-LLarr, r_eff = NULL)) expect_silent(tis(-LLmat, r_eff = NULL)) expect_silent(tis(-LLmat[, 1], r_eff = NULL)) # r_eff=NA no warnings expect_silent(tis(-LLarr, r_eff = NA)) expect_silent(tis(-LLmat, r_eff = NA)) expect_silent(tis(-LLmat[, 1], r_eff = NA)) # r_eff default and r_eff=NA give same answer expect_equal( suppressWarnings(tis(-LLarr)), tis(-LLarr, r_eff = NA) ) # r_eff=NULL and r_eff=NA give same answer expect_equal( suppressWarnings(tis(-LLarr, r_eff = NULL)), tis(-LLarr, r_eff = NA) ) # r_eff scalar is fine expect_silent(tis(-LLarr, r_eff = r_eff_arr[1])) # r_eff wrong length is error expect_error(tis(-LLarr, r_eff = r_eff_arr[-1]), "one value per observation") # r_eff has some NA values which are replaced with 1 r_eff_arr[2] <- NA expect_snapshot(psis(-LLarr, r_eff = r_eff_arr)) # no NAs or non-finite values allowed LLmat[1, 1] <- NA expect_error(tis(-LLmat), "NAs not allowed in input") LLmat[1, 1] <- 1 LLmat[10, 2] <- -Inf expect_error(tis(-LLmat), "All input values must be finite or -Inf") LLmat[10, 2] <- Inf expect_no_error(tis(-LLmat)) # no lists allowed expect_error(expect_warning( tis(as.list(-LLvec)), "List not allowed as input" )) # if array, must be 3-D array dim(LLarr) <- c(2, 250, 2, 32) expect_error( tis(-LLarr), "length(dim(log_ratios)) == 3 is not TRUE", fixed = TRUE ) }) test_that("explict test of values for 'sis' and 'tis'", { lw <- 1:16 expect_silent(tis_true <- tis(log_ratios = lw, r_eff = NA)) expect_equal( as.vector(weights(tis_true, log = TRUE, normalize = FALSE)), c( -14.0723, -13.0723, -12.0723, -11.0723, -10.0723, -9.0723, -8.0723, -7.0723, -6.0723, -5.0723, -4.0723, -3.0723, -2.0723, -1.0723, -0.0723, 0. ) + 15.07238, tolerance = 0.001 ) expect_silent(is_true <- sis(log_ratios = lw, r_eff = NA)) expect_equal( as.vector(weights(is_true, log = TRUE, normalize = FALSE)), lw, tolerance = 0.00001 ) lw <- c( 0.7609420, 1.3894140, 0.4158346, 2.5307927, 4.3379119, 2.4159240, 2.2462172, 0.8057697, 0.9333107, 1.5599302 ) expect_silent(tis_true <- tis(log_ratios = lw, r_eff = NA)) expect_equal( as.vector(weights(tis_true, log = TRUE, normalize = FALSE)), c( -2.931, -2.303, -3.276, -1.161, 0, -1.276, -1.446, -2.886, -2.759, -2.132 ) + 3.692668, tolerance = 0.001 ) expect_silent(is_true <- sis(log_ratios = lw, r_eff = NA)) expect_equal( as.vector(weights(is_true, log = TRUE, normalize = FALSE)), lw, tolerance = 0.00001 ) }) test_that("tis_loo and sis_loo are returned", { LLmat <- example_loglik_matrix() loo_psis <- suppressWarnings(loo(LLmat, r_eff = NA, is_method = "psis")) loo_tis <- suppressWarnings(loo(LLmat, r_eff = NA, is_method = "tis")) loo_sis <- suppressWarnings(loo(LLmat, r_eff = NA, is_method = "sis")) expect_s3_class(loo_tis, "tis_loo") expect_s3_class(loo_sis, "sis_loo") expect_s3_class(loo_tis, "importance_sampling_loo") expect_s3_class(loo_sis, "importance_sampling_loo") expect_output(print(loo_tis), regexp = "tis_loo") expect_output(print(loo_sis), regexp = "sis_loo") }) loo/tests/testthat/_snaps/0000755000176200001440000000000015100712211015316 5ustar liggesusersloo/tests/testthat/_snaps/compare.md0000644000176200001440000000510215100712211017264 0ustar liggesusers# loo_compare returns expected results (2 models) WAoAAAACAAQFAAACAwAAAAMOAAAAEAAAAAAAAAAAwBA6U1+cRe4AAAAAAAAAAD+2ake0LxMB wFTh8N3JQljAVeWWE8MGuUARCD2zEXBfQBEalRIN2T9ACijAYdW5U0AmZ5XrANCKP/H9Zexy 814/8ZtgnG1nx0Bk4fDdyUJYQGXllhPDBrlAIQg9sxFwX0AhGpUSDdk/AAAEAgAAAAEABAAJ AAAAA2RpbQAAAA0AAAACAAAAAgAAAAgAAAQCAAAAAQAEAAkAAAAIZGltbmFtZXMAAAATAAAA AgAAABAAAAACAAQACQAAAAZtb2RlbDEABAAJAAAABm1vZGVsMgAAABAAAAAIAAQACQAAAAll bHBkX2RpZmYABAAJAAAAB3NlX2RpZmYABAAJAAAACWVscGRfd2FpYwAEAAkAAAAMc2VfZWxw ZF93YWljAAQACQAAAAZwX3dhaWMABAAJAAAACXNlX3Bfd2FpYwAEAAkAAAAEd2FpYwAEAAkA AAAHc2Vfd2FpYwAABAIAAAABAAQACQAAAAVjbGFzcwAAABAAAAADAAQACQAAAAtjb21wYXJl LmxvbwAEAAkAAAAGbWF0cml4AAQACQAAAAVhcnJheQAAAP4= # loo_compare returns expected result (3 models) WAoAAAACAAQFAAACAwAAAAMOAAAAGAAAAAAAAAAAwBA6U1+cRe7AMA3KkbYEGAAAAAAAAAAA P7ZqR7QvEwE/y6/t4TTtXsBU4fDdyUJYwFXllhPDBrnAWOVjgjbDYkARCD2zEXBfQBEalRIN 2T9AEPIF3GigE0AKKMBh1blTQCZnlesA0IpAQcjYUhrdCj/x/WXscvNeP/GbYJxtZ8c/8YDQ kmfJX0Bk4fDdyUJYQGXllhPDBrlAaOVjgjbDYkAhCD2zEXBfQCEalRIN2T9AIPIF3GigEwAA BAIAAAABAAQACQAAAANkaW0AAAANAAAAAgAAAAMAAAAIAAAEAgAAAAEABAAJAAAACGRpbW5h bWVzAAAAEwAAAAIAAAAQAAAAAwAEAAkAAAAGbW9kZWwxAAQACQAAAAZtb2RlbDIABAAJAAAA Bm1vZGVsMwAAABAAAAAIAAQACQAAAAllbHBkX2RpZmYABAAJAAAAB3NlX2RpZmYABAAJAAAA CWVscGRfd2FpYwAEAAkAAAAMc2VfZWxwZF93YWljAAQACQAAAAZwX3dhaWMABAAJAAAACXNl X3Bfd2FpYwAEAAkAAAAEd2FpYwAEAAkAAAAHc2Vfd2FpYwAABAIAAAABAAQACQAAAAVjbGFz cwAAABAAAAADAAQACQAAAAtjb21wYXJlLmxvbwAEAAkAAAAGbWF0cml4AAQACQAAAAVhcnJh eQAAAP4= # compare returns expected result (2 models) Code comp1 Output elpd_diff se 0.0 0.0 --- Code comp2 Output elpd_diff se -4.1 0.1 # compare returns expected result (3 models) WAoAAAACAAQFAAACAwAAAAMOAAAAGAAAAAAAAAAAwBA6U1+cRe7AMA3KkbYEGAAAAAAAAAAA P7ZqR7QvEwE/y6/t4TTtXsBU4fDdyUJYwFXllhPDBrnAWOVjgjbDYkARCD2zEXBfQBEalRIN 2T9AEPIF3GigE0AKKMBh1blTQCZnlesA0IpAQcjYUhrdCj/x/WXscvNeP/GbYJxtZ8c/8YDQ kmfJX0Bk4fDdyUJYQGXllhPDBrlAaOVjgjbDYkAhCD2zEXBfQCEalRIN2T9AIPIF3GigEwAA BAIAAAABAAQACQAAAANkaW0AAAANAAAAAgAAAAMAAAAIAAAEAgAAAAEABAAJAAAACGRpbW5h bWVzAAAAEwAAAAIAAAAQAAAAAwAEAAkAAAACdzEABAAJAAAAAncyAAQACQAAAAJ3MwAAABAA AAAIAAQACQAAAAllbHBkX2RpZmYABAAJAAAAB3NlX2RpZmYABAAJAAAACWVscGRfd2FpYwAE AAkAAAAMc2VfZWxwZF93YWljAAQACQAAAAZwX3dhaWMABAAJAAAACXNlX3Bfd2FpYwAEAAkA AAAEd2FpYwAEAAkAAAAHc2Vfd2FpYwAABAIAAAABAAQACQAAAAVjbGFzcwAAABAAAAAEAAQA CQAAAAtjb21wYXJlLmxvbwAEAAkAAAAGbWF0cml4AAQACQAAAAVhcnJheQAEAAkAAAAPb2xk X2NvbXBhcmUubG9vAAAA/g== loo/tests/testthat/_snaps/gpdfit.md0000644000176200001440000000070015027034070017122 0ustar liggesusers# gpdfit returns correct result WAoAAAACAAQFAAACAwAAAAIOAAAAAj+cD4qKVTgaP/BK8xJCK3sAAAQCAAAAAQAEAAkAAAAF bmFtZXMAAAAQAAAAAgAEAAkAAAABawAEAAkAAAAFc2lnbWEAAAD+ --- WAoAAAACAAQFAAACAwAAAAIOAAAAAj+yA4g2tkbvP/BK8xJCK3sAAAQCAAAAAQAEAAkAAAAF bmFtZXMAAAAQAAAAAgAEAAkAAAABawAEAAkAAAAFc2lnbWEAAAD+ --- WAoAAAACAAQFAAACAwAAAAIOAAAAAj+yA5BUlFrHP/BK8oexSVIAAAQCAAAAAQAEAAkAAAAF bmFtZXMAAAAQAAAAAgAEAAkAAAABawAEAAkAAAAFc2lnbWEAAAD+ loo/tests/testthat/_snaps/deprecated_extractors.md0000644000176200001440000002322715027034070022234 0ustar liggesusers# extracting estimates by name is deprecated for loo objects Code loo1$elpd_loo Condition Warning: Accessing elpd_loo using '$' is deprecated and will be removed in a future release. Please extract the elpd_loo estimate from the 'estimates' component instead. Output [1] -83.58926 --- Code loo1$se_elpd_loo Condition Warning: Accessing se_elpd_loo using '$' is deprecated and will be removed in a future release. Please extract the se_elpd_loo estimate from the 'estimates' component instead. Output [1] 4.283835 --- Code loo1$p_loo Condition Warning: Accessing p_loo using '$' is deprecated and will be removed in a future release. Please extract the p_loo estimate from the 'estimates' component instead. Output [1] 3.328834 --- Code loo1$se_p_loo Condition Warning: Accessing se_p_loo using '$' is deprecated and will be removed in a future release. Please extract the se_p_loo estimate from the 'estimates' component instead. Output [1] 1.152103 --- Code loo1$looic Condition Warning: Accessing looic using '$' is deprecated and will be removed in a future release. Please extract the looic estimate from the 'estimates' component instead. Output [1] 167.1785 --- Code loo1$se_looic Condition Warning: Accessing se_looic using '$' is deprecated and will be removed in a future release. Please extract the se_looic estimate from the 'estimates' component instead. Output [1] 8.567671 --- Code loo1["elpd_loo"] Condition Warning: Accessing elpd_loo using '[' is deprecated and will be removed in a future release. Please extract the elpd_loo estimate from the 'estimates' component instead. Output $elpd_loo [1] -83.58926 --- Code loo1["se_elpd_loo"] Condition Warning: Accessing se_elpd_loo using '[' is deprecated and will be removed in a future release. Please extract the se_elpd_loo estimate from the 'estimates' component instead. Output $se_elpd_loo [1] 4.283835 --- Code loo1["p_loo"] Condition Warning: Accessing p_loo using '[' is deprecated and will be removed in a future release. Please extract the p_loo estimate from the 'estimates' component instead. Output $p_loo [1] 3.328834 --- Code loo1["se_p_loo"] Condition Warning: Accessing se_p_loo using '[' is deprecated and will be removed in a future release. Please extract the se_p_loo estimate from the 'estimates' component instead. Output $se_p_loo [1] 1.152103 --- Code loo1["looic"] Condition Warning: Accessing looic using '[' is deprecated and will be removed in a future release. Please extract the looic estimate from the 'estimates' component instead. Output $looic [1] 167.1785 --- Code loo1["se_looic"] Condition Warning: Accessing se_looic using '[' is deprecated and will be removed in a future release. Please extract the se_looic estimate from the 'estimates' component instead. Output $se_looic [1] 8.567671 --- Code loo1[["elpd_loo"]] Condition Warning: Accessing elpd_loo using '[[' is deprecated and will be removed in a future release. Please extract the elpd_loo estimate from the 'estimates' component instead. Output [1] -83.58926 --- Code loo1[["se_elpd_loo"]] Condition Warning: Accessing se_elpd_loo using '[[' is deprecated and will be removed in a future release. Please extract the se_elpd_loo estimate from the 'estimates' component instead. Output [1] 4.283835 --- Code loo1[["p_loo"]] Condition Warning: Accessing p_loo using '[[' is deprecated and will be removed in a future release. Please extract the p_loo estimate from the 'estimates' component instead. Output [1] 3.328834 --- Code loo1[["se_p_loo"]] Condition Warning: Accessing se_p_loo using '[[' is deprecated and will be removed in a future release. Please extract the se_p_loo estimate from the 'estimates' component instead. Output [1] 1.152103 --- Code loo1[["looic"]] Condition Warning: Accessing looic using '[[' is deprecated and will be removed in a future release. Please extract the looic estimate from the 'estimates' component instead. Output [1] 167.1785 --- Code loo1[["se_looic"]] Condition Warning: Accessing se_looic using '[[' is deprecated and will be removed in a future release. Please extract the se_looic estimate from the 'estimates' component instead. Output [1] 8.567671 # extracting estimates by name is deprecated for waic objects Code waic1$elpd_waic Condition Warning: Accessing elpd_waic using '$' is deprecated and will be removed in a future release. Please extract the elpd_waic estimate from the 'estimates' component instead. Output [1] -83.53033 --- Code waic1$se_elpd_waic Condition Warning: Accessing se_elpd_waic using '$' is deprecated and will be removed in a future release. Please extract the se_elpd_waic estimate from the 'estimates' component instead. Output [1] 4.258048 --- Code waic1$p_waic Condition Warning: Accessing p_waic using '$' is deprecated and will be removed in a future release. Please extract the p_waic estimate from the 'estimates' component instead. Output [1] 3.269898 --- Code waic1$se_p_waic Condition Warning: Accessing se_p_waic using '$' is deprecated and will be removed in a future release. Please extract the se_p_waic estimate from the 'estimates' component instead. Output [1] 1.124365 --- Code waic1$waic Condition Warning: Accessing waic using '$' is deprecated and will be removed in a future release. Please extract the waic estimate from the 'estimates' component instead. Output [1] 167.0607 --- Code waic1$se_waic Condition Warning: Accessing se_waic using '$' is deprecated and will be removed in a future release. Please extract the se_waic estimate from the 'estimates' component instead. Output [1] 8.516096 --- Code waic1["elpd_waic"] Condition Warning: Accessing elpd_waic using '[' is deprecated and will be removed in a future release. Please extract the elpd_waic estimate from the 'estimates' component instead. Output $elpd_waic [1] -83.53033 --- Code waic1["se_elpd_waic"] Condition Warning: Accessing se_elpd_waic using '[' is deprecated and will be removed in a future release. Please extract the se_elpd_waic estimate from the 'estimates' component instead. Output $se_elpd_waic [1] 4.258048 --- Code waic1["p_waic"] Condition Warning: Accessing p_waic using '[' is deprecated and will be removed in a future release. Please extract the p_waic estimate from the 'estimates' component instead. Output $p_waic [1] 3.269898 --- Code waic1["se_p_waic"] Condition Warning: Accessing se_p_waic using '[' is deprecated and will be removed in a future release. Please extract the se_p_waic estimate from the 'estimates' component instead. Output $se_p_waic [1] 1.124365 --- Code waic1["waic"] Condition Warning: Accessing waic using '[' is deprecated and will be removed in a future release. Please extract the waic estimate from the 'estimates' component instead. Output $waic [1] 167.0607 --- Code waic1["se_waic"] Condition Warning: Accessing se_waic using '[' is deprecated and will be removed in a future release. Please extract the se_waic estimate from the 'estimates' component instead. Output $se_waic [1] 8.516096 --- Code waic1[["elpd_waic"]] Condition Warning: Accessing elpd_waic using '[[' is deprecated and will be removed in a future release. Please extract the elpd_waic estimate from the 'estimates' component instead. Output [1] -83.53033 --- Code waic1[["se_elpd_waic"]] Condition Warning: Accessing se_elpd_waic using '[[' is deprecated and will be removed in a future release. Please extract the se_elpd_waic estimate from the 'estimates' component instead. Output [1] 4.258048 --- Code waic1[["p_waic"]] Condition Warning: Accessing p_waic using '[[' is deprecated and will be removed in a future release. Please extract the p_waic estimate from the 'estimates' component instead. Output [1] 3.269898 --- Code waic1[["se_p_waic"]] Condition Warning: Accessing se_p_waic using '[[' is deprecated and will be removed in a future release. Please extract the se_p_waic estimate from the 'estimates' component instead. Output [1] 1.124365 --- Code waic1[["waic"]] Condition Warning: Accessing waic using '[[' is deprecated and will be removed in a future release. Please extract the waic estimate from the 'estimates' component instead. Output [1] 167.0607 --- Code waic1[["se_waic"]] Condition Warning: Accessing se_waic using '[[' is deprecated and will be removed in a future release. Please extract the se_waic estimate from the 'estimates' component instead. Output [1] 8.516096 loo/tests/testthat/_snaps/loo_moment_matching.md0000644000176200001440000045563415027034070021713 0ustar liggesusers# loo_moment_match.default warnings work Code loo_moment_match(x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 1, k_thres = 0.5, split = TRUE, cov = TRUE, cores = 1) Condition Warning: The maximum number of moment matching iterations ('max_iters' argument) was reached. Increasing the value may improve accuracy. Warning: Some Pareto k diagnostic values are too high. See help('pareto-k-diagnostic') for details. Output Computed from 4000 by 30 log-likelihood matrix. Estimate SE elpd_loo -74.0 18.8 p_loo 11.7 11.1 looic 148.0 37.6 ------ MCSE of elpd_loo is NA. MCSE and ESS estimates assume independent draws (r_eff=1). Pareto k diagnostic values: Count Pct. Min. ESS (-Inf, 0.7] (good) 29 96.7% 3758 (0.7, 1] (bad) 1 3.3% (1, Inf) (very bad) 0 0.0% See help('pareto-k-diagnostic') for details. # loo_moment_match.default works WAoAAAACAAQFAAACAwAAAAMTAAAACgAAAg4AAAAGwFJQKLlLGUdAJeeelRE5/EBiUCi5SxlH QDIH7c01qv5AJJ/C+xNRK0BCB+3NNar+AAAEAgAAAAEABAAJAAAAA2RpbQAAAA0AAAACAAAA AwAAAAIAAAQCAAAAAQAEAAkAAAAIZGltbmFtZXMAAAATAAAAAgAAABAAAAADAAQACQAAAAhl bHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAAAEAAAAAIABAAJAAAACEVzdGlt YXRlAAQACQAAAAJTRQAAAP4AAAIOAAAAlsAz2SCKL/OPv/uoQkpWQUi//t6gM8RRaL/7ME6e 7mn4v/slBOxuIMC//9jJ8p3XcL/7L6Fev0U4wAA2/FFJKpi//ScFgWbouL/8Pocg7R74v/0p qq1wTGC/+x8Kh2qjyL/7JFoKTeIwv/soK3EUBUC//KBEQBPCGMAAKtiBefwUv/s4omTXOVjA A0qKEEuTHL/7hlp0sBHYv/xVS8d60Ki//yJy272SKL/7oVoD6+JIv/7iRaQt0cC//VaEo30D QL/85OxxwadowAHqd1LUWNy/+9Uw/+PdqL/7IYmDCMJQv/+TLUcS2KC//UsYUnCS0D/ADGFe xkQGP2Gfcpi2J8Y/Y+tB1l78/T9hmK3v3QgHP2GaKR7p1QA/ZQkD2NQUNz9hp3Fk+oG7P2Wd 0KgLpYI/Yjujzb+UVT9hx0KNqoPZP2J1vTdzw+s/YaMJLQv8lD9hpM5D/++/P2GZy1TTq7Y/ YfB+8HlL/j9lqO+CzesuP2GpxXMbu9A/cGE1wkkFgT9hvBsU728qP2HQSdm5/Yo/Y/zERK8U MT9hni0KCSNZP2O2kFJN6R4/Ylo+R+/P/j9iFB/WgCCUP2s9ryP5flA/YdKJGMivnD9hmtGw ba2EP2SA235CBJ8/YotQDpXP+EAkqqbkVm+LP5IXf6UkPQA/lrxpW1EOAD+SDmssqA4AP5IS 8YLzxAA/mQEBdV99AD+SOdWCpaAAP5otz1Wr6wA/k0U02BITAD+SYx8kcE4AP5PhNpO18QA/ ki0P3eyAAD+SMjlDn5oAP5IR1Ro8TgA/krLjpKmXAD+aTlBMoagAP5JAN/f7OgA/qwiqhKr0 AD+SbOl4OJEAP5J03K3aTgA/lsJL+aFkAD+SFPC7p4gAP5Y04S7O5wA/k4CJvuH6AD+S+DEh 3hkAP6Omxr93f4A/kp2i9JwgAD+SFOjEYZoAP5fSMVoWZAA/lAsiPEe5AEBD2SCKL/OPQAuo QkpWQUhADt6gM8RRaEALME6e7mn4QAslBOxuIMBAD9jJ8p3XcEALL6Fev0U4QBA2/FFJKphA DScFgWbouEAMPocg7R74QA0pqq1wTGBACx8Kh2qjyEALJFoKTeIwQAsoK3EUBUBADKBEQBPC GEAQKtiBefwUQAs4omTXOVhAE0qKEEuTHEALhlp0sBHYQAxVS8d60KhADyJy272SKEALoVoD 6+JIQA7iRaQt0cBADVaEo30DQEAM5OxxwadoQBHqd1LUWNxAC9Uw/+PdqEALIYmDCMJQQA+T LUcS2KBADUsYUnCS0D/2XcffnmqVv5oq/ZSIVXI/rc6o9F9nBr9RT6Q+S52Qv3AJF5hupNw/ teKv62UxfL+O7vQo7zgIP8HB+bh4VvU/p+nRG8MPFz+j+bKjZAlIv6dNik1Iv/W/pGhy2+ci f7+iYyxrlH4Ov4shzHMAyAw/r8r08fJQBz+9B+WoAPW6v6EcmkpM9v8/sa5n3E19AL+gl0c6 bbNbP5Z/I4ovlu4/xB68LmfXyr+KgBHuAEaAP7sysVWUlHQ/tAdcziRrAj+tf7e/7a72P6bM edRbec6/pOE4IxDMN79Twd0WCzSwP8WPgi1BERa/kp3azvQypgAABAIAAAH/AAAADQAAAAIA AAAeAAAABQAABAIAAAL/AAAAEwAAAAIAAAD+AAAAEAAAAAUABAAJAAAACGVscGRfbG9vAAQA CQAAAA1tY3NlX2VscGRfbG9vAAQACQAAAAVwX2xvbwAEAAkAAAAFbG9vaWMABAAJAAAAEmlu Zmx1ZW5jZV9wYXJldG9fawAAAP4AAAITAAAAAwAAAA4AAAAeP+fdHDQ/QUa/mir9lIhVcj+t zqj0X2cGv1FPpD5LnZC/cAkXmG6k3D+14q/rZTF8v47u9CjvOAg/wcH5uHhW9T+n6dEbww8X P6P5sqNkCUi/p02KTUi/9b+kaHLb5yJ/v6JjLGuUfg6/iyHMcwDIDD+vyvTx8lAHP70H5agA 9bq/oRyaSkz2/z+xrmfcTX0Av6CXRzpts1s/ln8jii+W7j/EHrwuZ9fKv4qAEe4ARoA/uzKx VZSUdD+0B1zOJGsCP61/t7/trvY/psx51Ft5zr+k4TgjEMw3v1PB3RYLNLA/xY+CLUERFr+S ndrO9DKmAAAADgAAAB5ARQcvr5nxRUCurmCYTUMgQK6GZIiNpldArq7KVLrefECurq6LsT47 QK5xjnyab6BArq3SHuGPKUCuZrTzmGSJQK6kDS9Qm5FArqvKHptSZ0CuoDIHBkg3QK6uB1vl FFtArq3uNy14MkCurq95Y/PBQK6pD98CEa1ArmWF3fh0P0CuraLKskjiQK1cwbtJXeNArqxg HKDKmkCuqx85Cm9rQK6FgdGWcINArq598JLLRECuil5ipQqpQK6iHFktv3tArqazEBI7tkCt 65ndUr6PQK6q4cDxAOlArq6k0f++pUCufAQQaRkWQK6e1Q/LzlsAAAAOAAAAHj/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAA AAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAA AAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/w AAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAAAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAADAAQACQAAAAhw YXJldG9fawAEAAkAAAAFbl9lZmYABAAJAAAABXJfZWZmAAAA/gAAAP4AAAAOAAAAAcBSUCi5 SxlHAAAADgAAAAFAJeeelRE5/AAAAA4AAAABQGJQKLlLGUcAAAAOAAAAAUAyB+3NNar+AAAA DgAAAAFAJJ/C+xNRKwAAAA4AAAABQEIH7c01qv4AAAQCAAAD/wAAABAAAAAKAAQACQAAAAll c3RpbWF0ZXMABAAJAAAACXBvaW50d2lzZQAEAAkAAAALZGlhZ25vc3RpY3MABAAJAAAAC3Bz aXNfb2JqZWN0AAQACQAAAAhlbHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAQA CQAAAAtzZV9lbHBkX2xvbwAEAAkAAAAIc2VfcF9sb28ABAAJAAAACHNlX2xvb2ljAAAEAgAA AAEABAAJAAAABGRpbXMAAAANAAAAAgAAD6AAAAAeAAAEAgAAAAEABAAJAAAABWNsYXNzAAAA EAAAAAMABAAJAAAACHBzaXNfbG9vAAQACQAAABdpbXBvcnRhbmNlX3NhbXBsaW5nX2xvbwAE AAkAAAADbG9vAAAA/g== --- WAoAAAACAAQFAAACAwAAAAMTAAAACgAAAg4AAAAGwFJ5ESZa2ndAJy7h/Y9DekBieREmWtp3 QDKrWN/9QglAJecFy/N4C0BCq1jf/UIJAAAEAgAAAAEABAAJAAAAA2RpbQAAAA0AAAACAAAA AwAAAAIAAAQCAAAAAQAEAAkAAAAIZGltbmFtZXMAAAATAAAAAgAAABAAAAADAAQACQAAAAhl bHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAAAEAAAAAIABAAJAAAACEVzdGlt YXRlAAQACQAAAAJTRQAAAP4AAAIOAAAAlsA0fMI+bvhOv/uoQkpWQUi//t6gM8RRaL/7ME6e 7mn4v/slBOxuIMC//9jJ8p3XcL/7L6Fev0U4wAA2/FFJKpi//ScFgWbouL/8Pocg7R74v/0p qq1wTGC/+x8Kh2qjyL/7JFoKTeIwv/soK3EUBUC//KBEQBPCGMAAKtiBefwUv/s4omTXOVjA A0qKEEuTHL/7hlp0sBHYv/xVS8d60Ki//yJy272SKL/7oVoD6+JIv/7iRaQt0cC//VaEo30D QL/85OxxwadowAHqd1LUWNy/+9Uw/+PdqL/7IYmDCMJQv/+TLUcS2KC//UsYUnCS0D+TxTjn PtpbP2Gfcpi2J8Y/Y+tB1l78/T9hmK3v3QgHP2GaKR7p1QA/ZQkD2NQUNz9hp3Fk+oG7P2Wd 0KgLpYI/Yjujzb+UVT9hx0KNqoPZP2J1vTdzw+s/YaMJLQv8lD9hpM5D/++/P2GZy1TTq7Y/ YfB+8HlL/j9lqO+CzesuP2GpxXMbu9A/cGE1wkkFgT9hvBsU728qP2HQSdm5/Yo/Y/zERK8U MT9hni0KCSNZP2O2kFJN6R4/Ylo+R+/P/j9iFB/WgCCUP2s9ryP5flA/YdKJGMivnD9hmtGw ba2EP2SA235CBJ8/YotQDpXP+EAl8epM1HkJP5IXf6UkPQA/lrxpW1EOAD+SDmssqA4AP5IS 8YLzxAA/mQEBdV99AD+SOdWCpaAAP5otz1Wr6wA/k0U02BITAD+SYx8kcE4AP5PhNpO18QA/ ki0P3eyAAD+SMjlDn5oAP5IR1Ro8TgA/krLjpKmXAD+aTlBMoagAP5JAN/f7OgA/qwiqhKr0 AD+SbOl4OJEAP5J03K3aTgA/lsJL+aFkAD+SFPC7p4gAP5Y04S7O5wA/k4CJvuH6AD+S+DEh 3hkAP6Omxr93f4A/kp2i9JwgAD+SFOjEYZoAP5fSMVoWZAA/lAsiPEe5AEBEfMI+bvhOQAuo QkpWQUhADt6gM8RRaEALME6e7mn4QAslBOxuIMBAD9jJ8p3XcEALL6Fev0U4QBA2/FFJKphA DScFgWbouEAMPocg7R74QA0pqq1wTGBACx8Kh2qjyEALJFoKTeIwQAsoK3EUBUBADKBEQBPC GEAQKtiBefwUQAs4omTXOVhAE0qKEEuTHEALhlp0sBHYQAxVS8d60KhADyJy272SKEALoVoD 6+JIQA7iRaQt0cBADVaEo30DQEAM5OxxwadoQBHqd1LUWNxAC9Uw/+PdqEALIYmDCMJQQA+T LUcS2KBADUsYUnCS0D/2XcffnmqVv5oq/ZSIVXI/rc6o9F9nBr9RT6Q+S52Qv3AJF5hupNw/ teKv62UxfL+O7vQo7zgIP8HB+bh4VvU/p+nRG8MPFz+j+bKjZAlIv6dNik1Iv/W/pGhy2+ci f7+iYyxrlH4Ov4shzHMAyAw/r8r08fJQBz+9B+WoAPW6v6EcmkpM9v8/sa5n3E19AL+gl0c6 bbNbP5Z/I4ovlu4/xB68LmfXyr+KgBHuAEaAP7sysVWUlHQ/tAdcziRrAj+tf7e/7a72P6bM edRbec6/pOE4IxDMN79Twd0WCzSwP8WPgi1BERa/kp3azvQypgAABAIAAAH/AAAADQAAAAIA AAAeAAAABQAABAIAAAL/AAAAEwAAAAIAAAD+AAAAEAAAAAUABAAJAAAACGVscGRfbG9vAAQA CQAAAA1tY3NlX2VscGRfbG9vAAQACQAAAAVwX2xvbwAEAAkAAAAFbG9vaWMABAAJAAAAEmlu Zmx1ZW5jZV9wYXJldG9fawAAAP4AAAITAAAAAwAAAA4AAAAeP7Z3M/Z+3oW/mir9lIhVcj+t zqj0X2cGv1FPpD5LnZC/cAkXmG6k3D+14q/rZTF8v47u9CjvOAg/wcH5uHhW9T+n6dEbww8X P6P5sqNkCUi/p02KTUi/9b+kaHLb5yJ/v6JjLGuUfg6/iyHMcwDIDD+vyvTx8lAHP70H5agA 9bq/oRyaSkz2/z+xrmfcTX0Av6CXRzpts1s/ln8jii+W7j/EHrwuZ9fKv4qAEe4ARoA/uzKx VZSUdD+0B1zOJGsCP61/t7/trvY/psx51Ft5zr+k4TgjEMw3v1PB3RYLNLA/xY+CLUERFr+S ndrO9DKmAAAADgAAAB5AFp345qujjUCurmCYTUMgQK6GZIiNpldArq7KVLrefECurq6LsT47 QK5xjnyab6BArq3SHuGPKUCuZrTzmGSJQK6kDS9Qm5FArqvKHptSZ0CuoDIHBkg3QK6uB1vl FFtArq3uNy14MkCurq95Y/PBQK6pD98CEa1ArmWF3fh0P0CuraLKskjiQK1cwbtJXeNArqxg HKDKmkCuqx85Cm9rQK6FgdGWcINArq598JLLRECuil5ipQqpQK6iHFktv3tArqazEBI7tkCt 65ndUr6PQK6q4cDxAOlArq6k0f++pUCufAQQaRkWQK6e1Q/LzlsAAAAOAAAAHj/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAA AAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAA AAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/w AAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAAAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAADAAQACQAAAAhw YXJldG9fawAEAAkAAAAFbl9lZmYABAAJAAAABXJfZWZmAAAA/gAAAP4AAAAOAAAAAcBSeREm Wtp3AAAADgAAAAFAJy7h/Y9DegAAAA4AAAABQGJ5ESZa2ncAAAAOAAAAAUAyq1jf/UIJAAAA DgAAAAFAJecFy/N4CwAAAA4AAAABQEKrWN/9QgkAAAQCAAAD/wAAABAAAAAKAAQACQAAAAll c3RpbWF0ZXMABAAJAAAACXBvaW50d2lzZQAEAAkAAAALZGlhZ25vc3RpY3MABAAJAAAAC3Bz aXNfb2JqZWN0AAQACQAAAAhlbHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAQA CQAAAAtzZV9lbHBkX2xvbwAEAAkAAAAIc2VfcF9sb28ABAAJAAAACHNlX2xvb2ljAAAEAgAA AAEABAAJAAAABGRpbXMAAAANAAAAAgAAD6AAAAAeAAAEAgAAAAEABAAJAAAABWNsYXNzAAAA EAAAAAMABAAJAAAACHBzaXNfbG9vAAQACQAAABdpbXBvcnRhbmNlX3NhbXBsaW5nX2xvbwAE AAkAAAADbG9vAAAA/g== --- WAoAAAACAAQFAAACAwAAAAMTAAAACgAAAg4AAAAGwFKt/dMmDJVAKNZHY+jUZEBirf3TJgyV QDN+yjVXeO1AJ45qhpBmBkBDfso1V3jtAAAEAgAAAAEABAAJAAAAA2RpbQAAAA0AAAACAAAA AwAAAAIAAAQCAAAAAQAEAAkAAAAIZGltbmFtZXMAAAATAAAAAgAAABAAAAADAAQACQAAAAhl bHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAAAEAAAAAIABAAJAAAACEVzdGlt YXRlAAQACQAAAAJTRQAAAP4AAAIOAAAAlsA1UHTxm8DDv/uoQkpWQUi//t6gM8RRaL/7ME6e 7mn4v/slBOxuIMC//9jJ8p3XcL/7L6Fev0U4wAA2/FFJKpi//ScFgWbouL/8Pocg7R74v/0p qq1wTGC/+x8Kh2qjyL/7JFoKTeIwv/soK3EUBUC//KBEQBPCGMAAKtiBefwUv/s4omTXOVjA A0qKEEuTHL/7hlp0sBHYv/xVS8d60Ki//yJy272SKL/7oVoD6+JIv/7iRaQt0cC//VaEo30D QL/85OxxwadowAHqd1LUWNy/+9Uw/+PdqL/7IYmDCMJQv/+TLUcS2KC//UsYUnCS0D+cqILP qzBKP2Gfcpi2J8Y/Y+tB1l78/T9hmK3v3QgHP2GaKR7p1QA/ZQkD2NQUNz9hp3Fk+oG7P2Wd 0KgLpYI/Yjujzb+UVT9hx0KNqoPZP2J1vTdzw+s/YaMJLQv8lD9hpM5D/++/P2GZy1TTq7Y/ YfB+8HlL/j9lqO+CzesuP2GpxXMbu9A/cGE1wkkFgT9hvBsU728qP2HQSdm5/Yo/Y/zERK8U MT9hni0KCSNZP2O2kFJN6R4/Ylo+R+/P/j9iFB/WgCCUP2s9ryP5flA/YdKJGMivnD9hmtGw ba2EP2SA235CBJ8/YotQDpXP+EAnmU+zLgnzP5IXf6UkPQA/lrxpW1EOAD+SDmssqA4AP5IS 8YLzxAA/mQEBdV99AD+SOdWCpaAAP5otz1Wr6wA/k0U02BITAD+SYx8kcE4AP5PhNpO18QA/ ki0P3eyAAD+SMjlDn5oAP5IR1Ro8TgA/krLjpKmXAD+aTlBMoagAP5JAN/f7OgA/qwiqhKr0 AD+SbOl4OJEAP5J03K3aTgA/lsJL+aFkAD+SFPC7p4gAP5Y04S7O5wA/k4CJvuH6AD+S+DEh 3hkAP6Omxr93f4A/kp2i9JwgAD+SFOjEYZoAP5fSMVoWZAA/lAsiPEe5AEBFUHTxm8DDQAuo QkpWQUhADt6gM8RRaEALME6e7mn4QAslBOxuIMBAD9jJ8p3XcEALL6Fev0U4QBA2/FFJKphA DScFgWbouEAMPocg7R74QA0pqq1wTGBACx8Kh2qjyEALJFoKTeIwQAsoK3EUBUBADKBEQBPC GEAQKtiBefwUQAs4omTXOVhAE0qKEEuTHEALhlp0sBHYQAxVS8d60KhADyJy272SKEALoVoD 6+JIQA7iRaQt0cBADVaEo30DQEAM5OxxwadoQBHqd1LUWNxAC9Uw/+PdqEALIYmDCMJQQA+T LUcS2KBADUsYUnCS0D/2XcffnmqVv5oq/ZSIVXI/rc6o9F9nBr9RT6Q+S52Qv3AJF5hupNw/ teKv62UxfL+O7vQo7zgIP8HB+bh4VvU/p+nRG8MPFz+j+bKjZAlIv6dNik1Iv/W/pGhy2+ci f7+iYyxrlH4Ov4shzHMAyAw/r8r08fJQBz+9B+WoAPW6v6EcmkpM9v8/sa5n3E19AL+gl0c6 bbNbP5Z/I4ovlu4/xB68LmfXyr+KgBHuAEaAP7sysVWUlHQ/tAdcziRrAj+tf7e/7a72P6bM edRbec6/pOE4IxDMN79Twd0WCzSwP8WPgi1BERa/kp3azvQypgAABAIAAAH/AAAADQAAAAIA AAAeAAAABQAABAIAAAL/AAAAEwAAAAIAAAD+AAAAEAAAAAUABAAJAAAACGVscGRfbG9vAAQA CQAAAA1tY3NlX2VscGRfbG9vAAQACQAAAAVwX2xvbwAEAAkAAAAFbG9vaWMABAAJAAAAEmlu Zmx1ZW5jZV9wYXJldG9fawAAAP4AAAITAAAAAwAAAA4AAAAeP7Z3M/Z+3oW/mir9lIhVcj+t zqj0X2cGv1FPpD5LnZC/cAkXmG6k3D+14q/rZTF8v47u9CjvOAg/wcH5uHhW9T+n6dEbww8X P6P5sqNkCUi/p02KTUi/9b+kaHLb5yJ/v6JjLGuUfg6/iyHMcwDIDD+vyvTx8lAHP70H5agA 9bq/oRyaSkz2/z+xrmfcTX0Av6CXRzpts1s/ln8jii+W7j/EHrwuZ9fKv4qAEe4ARoA/uzKx VZSUdD+0B1zOJGsCP61/t7/trvY/psx51Ft5zr+k4TgjEMw3v1PB3RYLNLA/xY+CLUERFr+S ndrO9DKmAAAADgAAAB5Ak/Dwg4seSECurmCYTUMgQK6GZIiNpldArq7KVLrefECurq6LsT47 QK5xjnyab6BArq3SHuGPKUCuZrTzmGSJQK6kDS9Qm5FArqvKHptSZ0CuoDIHBkg3QK6uB1vl FFtArq3uNy14MkCurq95Y/PBQK6pD98CEa1ArmWF3fh0P0CuraLKskjiQK1cwbtJXeNArqxg HKDKmkCuqx85Cm9rQK6FgdGWcINArq598JLLRECuil5ipQqpQK6iHFktv3tArqazEBI7tkCt 65ndUr6PQK6q4cDxAOlArq6k0f++pUCufAQQaRkWQK6e1Q/LzlsAAAAOAAAAHj/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAA AAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAA AAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/w AAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAAAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAADAAQACQAAAAhw YXJldG9fawAEAAkAAAAFbl9lZmYABAAJAAAABXJfZWZmAAAA/gAAAP4AAAAOAAAAAcBSrf3T JgyVAAAADgAAAAFAKNZHY+jUZAAAAA4AAAABQGKt/dMmDJUAAAAOAAAAAUAzfso1V3jtAAAA DgAAAAFAJ45qhpBmBgAAAA4AAAABQEN+yjVXeO0AAAQCAAAD/wAAABAAAAAKAAQACQAAAAll c3RpbWF0ZXMABAAJAAAACXBvaW50d2lzZQAEAAkAAAALZGlhZ25vc3RpY3MABAAJAAAAC3Bz aXNfb2JqZWN0AAQACQAAAAhlbHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAQA CQAAAAtzZV9lbHBkX2xvbwAEAAkAAAAIc2VfcF9sb28ABAAJAAAACHNlX2xvb2ljAAAEAgAA AAEABAAJAAAABGRpbXMAAAANAAAAAgAAD6AAAAAeAAAEAgAAAAEABAAJAAAABWNsYXNzAAAA EAAAAAMABAAJAAAACHBzaXNfbG9vAAQACQAAABdpbXBvcnRhbmNlX3NhbXBsaW5nX2xvbwAE AAkAAAADbG9vAAAA/g== # variance and covariance transformations work WAoAAAACAAQFAAACAwAAAAMTAAAACgAAAg4AAAAGwFKLcIr2U6lAJ7mJXVQ1mEBii3CK9lOp QDMBFEV6xQ1AJnj9Bj3oV0BDARRFesUNAAAEAgAAAAEABAAJAAAAA2RpbQAAAA0AAAACAAAA AwAAAAIAAAQCAAAAAQAEAAkAAAAIZGltbmFtZXMAAAATAAAAAgAAABAAAAADAAQACQAAAAhl bHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAAAEAAAAAIABAAJAAAACEVzdGlt YXRlAAQACQAAAAJTRQAAAP4AAAIOAAAAlsA00hUKopC/v/uiIMLHLuy//ssyhve2vL/7Jr8K SbUAv/sa1cItPhC//8POqw85FL/7JoUvY15YwAA7D8sD/c6//SUayysMjL/8O2Quh638v/0W mUgAlIC/+xKZXXIQsL/7F40nuPLAv/seLsVhXcC//J23rmyzmMAAIARDWDzAv/srA2aD7MjA A1R95vJwGL/7djZ68aYYv/xSR/sGCki//yevfroaBL/7mxSAeStQv/7m3aUwM2i//VTP19jb WL/84rONukOUwAHx9fU/2Fa/+8QvunAEmL/7Fx4TRHXUv/+ZdSn1rdS//Tf3NN2ngD+W4GjR aryTP5P9k5w+sG4/ayNFdQtzdT+UNR65X0TEP5QzA295mvs/bFTXaJrIOz+U3CtVdiGyP5RR Ot5s8NI/aT33KWiSfT9pDE2N/1sTP5S2SpsPGnU/lHsSPZvdgj+UhiM2HUpuP5QsiYrA7e4/ aRVFdDks8D9tDHOUss4tP5Sd+c05JPQ/lYiPg3JzFD+UxOlt8wDqP2kMYnIOue0/lBcHGRWx ez+T/tJ6LYPNP5QNq0mErcY/aVODDQzbTz9pJSlA+yZNP5UHY/IZmJA/lNKlqN7G3T+UO0Qc JW5GP5QpKO2aIqQ/lK/I/7EmaEAmg6Qr+gHKP5I5hg6AAwA/lTqBKHTFAD+SIcNl86MAP5If omEDgQA/lvi9WmUhAD+THZI/2f4AP5mPg19ObgA/ktItKsZkAD+SjsiR3esAP5KkmoaHBgA/ khZKaRj3AD+SFGHT2/EAP5Ig3ZVNGAA/kpwrkaAHAD+YCFgsLCIAP5IPS9yzsgA/qlFcWiEQ gD+RznOE6nMAP5KPYOKjMwA/lmRIfQIhAD+SN2h/dggAP5Xg9zNtrgA/ku5c/6I+AD+SsXhR Iu0AP6MUf55iiAA/kd0sR7uFAD+SH4L0gRUAP5dfNWv4+gA/kshKi4uJAEBE0hUKopC/QAui IMLHLuxADssyhve2vEALJr8KSbUAQAsa1cItPhBAD8POqw85FEALJoUvY15YQBA7D8sD/c5A DSUayysMjEAMO2Quh638QA0WmUgAlIBACxKZXXIQsEALF40nuPLAQAseLsVhXcBADJ23rmyz mEAQIARDWDzAQAsrA2aD7MhAE1R95vJwGEALdjZ68aYYQAxSR/sGCkhADyevfroaBEALmxSA eStQQA7m3aUwM2hADVTP19jbWEAM4rONukOUQBHx9fU/2FZAC8QvunAEmEALFx4TRHXUQA+Z dSn1rdRADTf3NN2ngD/5Fjgy7IjZP7bHI1nATQa/p51R7PmwGD+sHH7uZvEqP6/qoVFau3K/ qA5Hs2wKJj+VYzdWUCsiP7mzNB+LFFy/vdDQDBpFx7+VFeEkxRxeP7F7m5KVODU/pVACJbjF Jj+jdMHsaW1mP65HVhP0fJm/s2b06ISqEb+kQJCWKzfqP6COIa9BZSQ/zvjmbLoMiD9TIc+d D8Ugv5o1IjVvHMo/s1oN2T4FLD+3OGyWxNwrP7EIIW+h0fm/uyMfiLLHcb+0y/cCrI9DP8sJ wPl6FQA/o6KWFvbG6j+0vX2EvGQYP7O2T6sntQo/qCNltsfvAgAABAIAAAH/AAAADQAAAAIA AAAeAAAABQAABAIAAAL/AAAAEwAAAAIAAAD+AAAAEAAAAAUABAAJAAAACGVscGRfbG9vAAQA CQAAAA1tY3NlX2VscGRfbG9vAAQACQAAAAVwX2xvbwAEAAkAAAAFbG9vaWMABAAJAAAAEmlu Zmx1ZW5jZV9wYXJldG9fawAAAP4AAAITAAAAAwAAAA4AAAAev+M6GYBVASq/vGuV8SI2X7+n nVHs+bAYv6TW5Y0JFQu/kWww4vFX5L+oDkezbAomP5VjN1ZQKyI/lNht5+qt4r+90NAMGkXH v5UV4STFHF6/oHuKtB6KiL9jFZ1qwuogP3lA+cNvwvi/nu9VS24Wgr+zZvTohKoRv6RAkJYr N+o/myfZhL4yPT+5EKUE8xZgv4E3FDJaGYy/mjUiNW8cyr8zDtdI8zuAv7tWWF8xxVc/hs8u D/wJuL+7Ix+Issdxv7TL9wKsj0M/sGq450lfB7+1y0lRJZXnv6J/sQ9joN0/n+DuBpUSPj9T FE1IlvogAAAADgAAAB5AA4tpeQZcIkCdJ778gk1wQJ6TSnvdXnBAnD6Pn+iHVUCcPOYri38u QJ6D/vF10JpAmzztybeSzECefRV6RttJQJ6p/nbR4JNAnqxBiqhjm0Cb0Jv6ZpKMQJtvTMQm Lw9Am1O/s5Mn+0CcUUc/BUohQJ6ryQO6wvNAnnp5yf77kUCbHdP+dPZgQJxwFCHevvhAmt1+ OmGOAECerENzxOK5QJ6kbKmimTZAnSAFYnwMUUCepBjV4PmdQJ6pF6rZX3pAnqslCMgZy0Cd UOL9AXOaQJr484ME1q1AnCOQo84P50CendeyiBCCQJvx7is3eLQAAAAOAAAAHj/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAA AAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAA AAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/w AAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAAA/8AAAAAAAAD/wAAAAAAAA P/AAAAAAAAA/8AAAAAAAAAAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAADAAQACQAAAAhw YXJldG9fawAEAAkAAAAFbl9lZmYABAAJAAAABXJfZWZmAAAA/gAAAP4AAAAOAAAAAcBSi3CK 9lOpAAAADgAAAAFAJ7mJXVQ1mAAAAA4AAAABQGKLcIr2U6kAAAAOAAAAAUAzARRFesUNAAAA DgAAAAFAJnj9Bj3oVwAAAA4AAAABQEMBFEV6xQ0AAAQCAAAD/wAAABAAAAAKAAQACQAAAAll c3RpbWF0ZXMABAAJAAAACXBvaW50d2lzZQAEAAkAAAALZGlhZ25vc3RpY3MABAAJAAAAC3Bz aXNfb2JqZWN0AAQACQAAAAhlbHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAQA CQAAAAtzZV9lbHBkX2xvbwAEAAkAAAAIc2VfcF9sb28ABAAJAAAACHNlX2xvb2ljAAAEAgAA AAEABAAJAAAABGRpbXMAAAANAAAAAgAAB9AAAAAeAAAEAgAAAAEABAAJAAAABWNsYXNzAAAA EAAAAAMABAAJAAAACHBzaXNfbG9vAAQACQAAABdpbXBvcnRhbmNlX3NhbXBsaW5nX2xvbwAE AAkAAAADbG9vAAAA/g== # loo_moment_match_split works WAoAAAACAAQFAAACAwAAAAITAAAABAAAAA4AAA+gwCSZfNDtrTLAJQg4rmPuN8AlzHES4hut wCIGBetDhYLAI0awRsFdk8AjINcUGsscwCXMUMW16IzAI/L2nOOyq8AqdhFsysKGwCYWOgEw BijAI3cQGt4njsAgq4sZEnDQwChE2LymruHAJNOCW9qoWMAmI8sb5ckXwCGzOpKcK2zAJv7I b1OkycAmN8XjSDXswCSq47v/hGrAKkuVjS2+qsArvf0xkquowCcn6arojHLAJVSkdZTYNMAi 8ZtFHnYcwCfz3i1lkVTAGruuA85pIMApgBCZyu8iwCUkWt9LdRvAIzGhBL5XTcAl9Pty6V89 wBqIpxRnqFjAFgbuAJyecMAmya15QZBDwCc2ql7TNUfAIp0rBV9IRMAhsA13A9yQwCapB7rS uL7AJ0PI+WyTfsAnx+KQwqBMwCRSVuhxWM3AI8tC38n9fcAl97qfkuyWwCdQWbnatcvAI8pe OXmmyMAZmkxX53LYwCOZ5Tc5mXjAJjwBmcXHFsAk20Jx4OgdwCgui+gwshjAJE7PfC0Gf8An ZVYIRUlpwBuEbwPsBzDAJqL/tJDae8AYoDbV73zowCfVnRKjGNzAI/OcDmz1PsAdVgKQKmbw wCVxSq5R/DfAHU0HcNNxBMAnaW9kiy0zwCOGcok3LVDAJBKkL7QL6MAiYpU7q08QwCTHX32M /JjAJ2cdmc2M0sAj28o1eHjbwCWJik13j/HAIjdWtmTwWMAkSvcYglIWwCSh2lNthc3AKEde wh6wH8Agvy/nPanSwCHuyIm2S7jAJXIQ3UCUf8Ah1ssyi808wCNqIDtFybLAHHTmbfpl8MAi YlLVr9/GwCZzHZCcdrbAImQaC/EEPMAl8+ZWWIMIwCC0WMI0YFDAIehpg2r7gMAd4fdRXfwE wCXjHaCcWd/AJWlkJvQIGcAhpT0vbj1SwCpDNVYyoYvAI1lRmeI5WcAlj8TPjDwowCN68hoz oM/AJdsSfkyfcsAm/RXopWr1wCtg9wqh6qjAKVYMy7eUZsAij8BC1RLYwBRfEnD1NzTAKGna 8nd6scAtiVLMf6c/wCTvDr+czUvAKWpvU1q2nMAj500KMo8cwCJ70kpAuVzAKB5ZVU2ZnsAl GFdW3GWgwCGLXQGVL2bAKOEIyYEzg8AS0yQ2jPuowCHOzNTHqZTAIljvriP2AMAphMZc6DM0 wCO0YV1Au4bAGUv1GpcOaMApOYvNs+iwwCNQyFNwPj/AJtQH7+BllsApp654oevZwCe9vYJo orLAI6u0gnKjHsAlxLkAvL2dwCjDXVbJ8KTAI2paDwqxQMAsgLLdDbriwCZn3BJvAIrAJxvC /2/D0MAhDwVNC7I+wCTnU+UNFefAIn25rJ2ydsAidWUsBIGywCDSDRHnBALAJrX+IyYg9cAY QQ4yJrcswCSLb2Cu8EPAKMNAwtx97sAqlNfdwfRawBm4ckNPiOTAH0IhKSJ/EMAnph7seVdi wCgCKZ/zn4rAJSMSEdwiFMAmSvKrLKC+wBeBF/LkuZTAJfPQIf0kjsAkEMaKN5cCwCdpTR8o fdHAJ6QoKHcntsAgLABDShSswCOFxcXUV8HAFC7XtYEGYMAlDWZrU91vwCNpP5QNKc/AJPMI yHI0FcAmkZXjW4DewCKeZOo9s6jAJiikV4Dr+cAovKmPSmcNwCgJwX4v0YHAIpY5Y6rTmMAo FelCsGomwCSQYoY5++7AJhQoo16efcAlMIN0+sxnwCILfNky+bLAFzm5Gkm1eMAjqAvlb6QB wCjS6jd28pXAIiHL9EPDssAkCiEgSGN3wCgoiBrQNyfAJxydTbIgWcAprxlOZ6HGwCqEGGv7 iZLAKuJklh/46sAm0vUTlmYfwCkyPE1TGanAJgaPt3LALMAcm/bC5LP4wCf6FJ/GGb3AJafK RPIEa8Aljse0Q93GwCEFAPf1eZLAJwPox0FLRMAlHs6OL9snwCD/Yhs8zxzAGHr+8gu24MAb +ySMytAYwCZoFI1VQ23AIPJg0i5xDMAh6VIhSSTOwCgUCFSWfjbAHdDTCJY78MAiGgqEKLaW wCk2/3dJ/1jAImOXcBnIxMAkM1Li2HJVwCY2Bf4iQ0rAIUdduOKdTsAjQCE0MH7TwCpZ1UCP 4AbAJ6g+Vfo2xsAmA1lP/ka5wCD04BVZsU7AH7cnqfLj4MAn+mGbTBjewCUaFlLPiKnAIvkG LEWc2MAdKMu1vtfMwCCvNFesHhDAJhh6Bwaq9cAldL7yn/CzwCopcMbG1GbAKcEY3BvwAMAj uli8I+qwwCb83GYYg97AJieTQnpO3cAmTsFnavpzwCSvCoe1Cj7AKx+lbYII7sAjPGBmpPxH wCNTWP0STGbALBxmZLNHCcAjtHCEeJ6lwCq2OKwWc6LAInh9cI1NKMAmSiY2+XtHwCahAzV7 APfAJZMQoP/2pcAg/VbJd6FgwCU9+K6+1s3AIe+3ZVig8MApiKIdeM6WwCk4w7a1ebDAI2mJ 8U9SR8AlEa62OFekwCFHGtAY02zAKvfGtTLgz8Aif2DPpS/WwCdgS00zLPDAKVXcNx70HcAk eTN33AdOwBkJ5DtEHUjAJZvbwJjQW8AmxmbPVAxswCFJqy7i1FLAIeXisi/lasAf6qFfxayk wCPhh2xmYc3AIsBkM4emFMAju6xOQRU4wCovGsQSEmHAJ9F0HeRNM8Am8eCFBYuUwC4FirQm HETAIPQDsdicMMAmw8L+f2SjwCM2OQ6TD9/AI5rDKSNK08AnPV4RTZBPwBI/brq7H1jAI7Gk FJRu18AinxnnCQmcwCHtvWcFuOLAIhGGzRCy+sAlYq566uQgwCBYuBEnECDAJCS6B2aKRcAo GsI8Bkm5wCtxr8KWGDLAJeHSQckEBcAq7VNVn50VwClIrCuQBUbAKL/b1gxkx8AlJDsgTsAN wCqPDUMmntvAIwQoX+PmXMAlfk35D5bgwCkrMvuKFc7AIz1GIeI7LcAiYe+D6xNUwCGUggRY zqDAKCeShKz+gcAjLql/oy1QwCQ7LHQfEgXAJJovqbWyWMAmk9cFICMawCcCMdyGT4nAI3SH Eu5IDsAkcQ9vv6B6wCc9ZTXkaeLAJOk7BuzapsAqfR1Qp4L7wBMVv0wxnAjAJd/JPfsRScAq SvYs+JCbwB0ECnmNVoDAKEd5iaZ1y8AkDBIo/taHwCSRzALxE0TAJiE8D8vvUsAkTv4ovDa/ wCcYYFaNUnzAJFJWqS5qIMAjcLIEckZxwCJwHfvoMY7AJKlcfB4PgMAhGsA+v13QwChAnqe6 xq7AIskIPDJ8FMAi7syKqKokwCGdM+nrTH7AJ/7CmIJvesAjVghWpvzpwCMfU8WCK0LAJqhm coYZdsArQVDJ6VlewCnNcuYQTnDAIG02fnWZ+MAmhD+4YS//wCPzUgIWK4jAKCVcb+2GXMAi cPV0LNc0wC5MNQzOfvfAJLHiMKBOAcAgqQX+qngmwChHBV8zmmnAJAV4/QuhocAmIyDxdfht wCFX6M9yVizAH56o0F4sDMAR7LxXEwO0wCWi9XbXQkjAImqZ9XS4PsAlqbCu9L2owCGyfTGw J37AGq8TkZ1N/MAluH6syzi7wCxsEMj1P6/AI0spn3JhacAlJnY5TRv5wB/SAx4rK+jAJ9U3 r9w0jsAbeUBymI/EwCVMdTYknUjAIV8lUMHxPsAcwloqbhDwwCan2sTJWV7AKcbxDIyhO8Al ze/Qnu+4wCl1uziDQArAJbqzk0RDu8AqMhCWpH/xwCUQs9Ulf1TAIkM1BuVnzsAcQwuS58Ww wCRAFcy9BJbAID2p8hXvosAifrZJZSfywCKxLiH2LvrAJYs/e2+FZ8Aue1hMSHaZwCLWmyh6 E+7AJPYBuLG9FsAiLhv+EehMwCpp7xX7nk3AKbp1DbEArcAnx2G+UwGewCQ976M+2TjAIvEo yiRLasAoM48ofO+JwCHr4nz60MDAJGwEqlr0H8ArSyheIE5QwCcc4IyOyJbALT2pCgTrRsAj W8Z9wWoEwCbUC2Tz72vAJUjOn5IQ5cAjDZVkUXAUwCS7OllAgCDAJc2CuQnFhsAliBY5JfFT wCj7maBnZdTAKfvCwqaLs8AizXhYfFE+wCRKM0kp8yHAGh0/XBr6QMAciIT5Px4kwCbmoSVc wKjAImYjI7TLyMAnYy16jzRkwCVU9brHYdvALPluD1u8vsAprOuCC5EvwCGOn88YAjbAIS9p t5foSMAi0Pjk1DSOwCQrtl7cwkjAJIppTRaYSsAlKBr2HfjjwChdjbI7WlbAIpPsweUyTsAh ilc/3mqswCirwbFE9iPAI7mp/Hth8cAiclEkC3XawCiOIR49/g3AJiSz00QfAMAlnID7ofgK wCBju8sIeATAIw/3VN2prMAiSX6qlzXwwCIFsb5XFfDAJhDBXxKvVcAk28t5wXkUwChryiru jgHAKkY1eN0oyMAjBsbqozdMwCXjyv2KgijAIFkUlDZsbsAhtWEf6l0qwCRX8px28LTAJ9RC h1jh1MAie5y0ThfUwCr5RODN1JjAJYJZc/7TrcAnpe/XBLcuwCWZ5HuZZTPAGjjSSzY3vMAk TiwfKa1lwCPlTfPxujLAJq98q+p6h8AjQupl4tRiwCMsZa7M6nfAJiz3ddJs8MAlWH9gztgk wCT8JrwDSWvAJy0PNBxXQ8AvjpvC4DDQwCDW+MnVk8bAKrEMEVh3FsAiDD5a3Oj+wCZy+GhG swHAIaoXDyiPOsAm2hct0KhiwBxrBeAoDdDAIM2j4HTacsAi1YEXD2cQwBbTVSdrjnjAKSTD 1mj4jsAjkNCKt4hqwCYPlvYAZxTAIrTJc6YfGMAjZCUz4enWwB0WfFtzo+jAJV1Lv32dHcAe qilThSTMwCrkso3JfhDAHw0XfU8QHMAfqjVrtPS0wCjm5TSn4KvAH9E+th+DSMAo3T5zWj4m wCPoph+lOefAJeF9T4A/3sAoBM243nUTwCVbJWI2tgbAMJ5UV3BWmsAquPByiUKHwCXT6g6t 76/AJCl33lQrXsAqI3ltlfd6wB8b/gJC9ejAIG1b90VulsAk/9kMxF+fwCTRlSnjLm/AJCul iIcjr8AlQhcsRTgBwCNBTbOOdW3AJawdOadECsAiq7njbZlewCeoij7RYwfAJEoPYCyTNcAf lkO7uddYwChb+B0ibb/AIXFU66KRlsAt3sLfi4bewB8YaFjKXHDAJQUb3YK4XcAoM3d6217Q wCojwNbo4OrAKPRd1QUBM8AjCSC281pSwCPuk2q6hjjAIwfDAiL89sAgulqGH+9GwBUM05/I ICjAJmQZthPKaMAkwwJjA5XWwCZB8FlPWpnAJ8jaswcVr8Ajv1ESd53twCQHjP8d1wXAJm4w qN4hp8AkNTPyer/MwCaK0wVoDCnAGsgvOrx6RMAk5Lo/LTirwCfUPpoxrVPAJfSUdWWmU8Ai A+GhpYt0wCesGes/FxvAJcfbyXOX3sAn8rn3JeM2wCQtyqWrqZvAIRXnPdFOasAc6BxCnBMg wCNIsE7VFiDAIxR9UuV2tsAhaiw8pqXewCibXICTUy/AJkONM0JdgsAmFaBUfoPhwCGuoUTp Uz7AJOdRiXr/CsAoKNcqZ4rwwCWQC9XazHPAI9GbTnQ5r8Aolny6OI3uwCBoX/IytNTAJsIf E8owMsAjRaMaqJvEwCO51nyTTTHAIX0eex3r0MAlFpJp3x/awCeQnRGDNO7AIHXDRa9+SMAg OYhk/LZewCJJ/Gbf/1zAJ0nMHsrUrMAg40wm7cvKwCYEFCaZBQvAIkoadaxLaMAiCe7qxdhw wCrXMNow5qnAKHAl1Xxk6sAiZwKAGJ/SwCQURzrhUrHAKJGTEYDsPsAoTmeaX3hcwCArOjhI kZ7AKuJKbF4/KcAoZIMwu3f7wCChElNYOsLAI8xtOxCwK8AmBkMNcu1jwAyoDXUVdmDAIwWN o0yZhsAqe8U5/hpZwBhUqgFEafDAIk++uq20GsAljSBpCPIjwCtEFRBiTX7AI0ABIMSmC8Aj UVPWDty0wCpOQFIKuHLAH0Co9a3D1MAmRE0lUJt/wCLmgY2AC0bAIw5g8jZOtMAlZt1s5W6i wCfhcr6sj9jALFnU0r83Z8AmqdJfF1JMwCD4jf0E6jjAI+z224x4UMAsgFVmUriUwCdLtKdY 8OHAIbD+95fNmMAlxLrqkSw0wCig7tLTaEfAKe0zfGP/E8AlTcQQrMWawCfBFFjnG+7AJHnW A6uJ/sAdeaPJ1uJAwCTfE/FfxDzAIyHhD2o8ssAldBKZnPMowCeGMFgFGPrAIca9SHBNhsAm PKjFFkD2wCEm2SRYnkLALDgbZ1W7s8AXrmxal3A8wCmq9Lr1J+rAIoqX/RCxFMAkRte7Apan wCZyiX1EB4vAIZhmKrlGMMAiGofsXgqKwCjxQGJALvPAI8tvnw3m9cAfp/iQ8ce0wCKg0W1F b5jAKzlJQ7u1vsAkLes/vN1UwCQYNr9+a5rAJjzHn8gWZMAnO5UW+RfnwCan8kqAWQvALAdY eocLsMAlP0Gz/51gwChWhvNQwbjAHDjt41iIIMAkIBbr/NvpwCFlBorZSwDAJ3E+AcTKz8An JCkYLFRUwCAr44xFp+DAH4agb2vQPMAgDGqlkToIwCljnbf4OLjAJkotjiWYzsAkdSGNAxVE wCB6deIpTPzAIiaoJEIfVMAsZ/6Bn+PVwCNTWROKAt/AH/kvFy1MMMAncys63XxzwCVEzFFu WbLAIzF9wDgon8Akmy375A5UwCO96bqvEKHAJ8NOWOz6CsAVNDqHEzncwCYzx/cH1r7AJVF/ cBoYBMAdcMaD5Qp4wCii9QfOUzDAImngolyRnsAe6GcmYMAgwCSyWJskFlPALCuWGrwBmsAb 24ETdeQMwCjLe6W8D1/AI80hxn8U1sAqJb7ImUffwCZOBJvIHVzAJrSFHwSphsAlzKu4OSq1 wCWso8q2ax7AKKRIRDYRk8Aks1sLGw7IwCckIKYDqKTALQZCCSJQ7cAj56kna944wCEeE01F FajAHpae0GSqXMAYBFMFmjfQwBa4RNSrJjzAHu+ytaJHLMAbu1Sn1BRUwCI060m1zDrAHfuC i+PykMAc8XTbk21EwCYeQxXCo5PALh8qT+at9MAmPepsXsp6wCYeAeifoMLAIs7ysatzhsAm PldXo5D0wCLVSnu0K87AJIFkeWpL6MAn0O/S14mYwCGNiYVT1jjAKuOKFuBEaMAkXA1KjOOW wCOmusGZyxzAIwAnRax5gMAn2P+kkyQWwCI00XbUUWDAJyfQSCZ66MAj1xEEfy8FwCthTVl+ +AnAHmvt2XdQJMAmGQBJpxBIwCWPMqbyZ4XAJltg9W4yTMAliTILtpp+wBDCYcld7UzAKItM k66A6sAc+sQ1yMF0wCE5yZuxgkLAInXsgj1Z0MAnJnrc910BwCczlDPpZdPAI7DVeTtBfcAb BU2GPKDAwCalPMMN1vDAIUKJ4lFVFsAcYRmGtzhkwCAZmPst1EjAI/vbYe3XNsAjFxqQb8BW wCcF6y+cE5jAJY7mGivSSMAq1hNzfOBdwCd5AXPMA9vAJS/Lvn7nI8AgwYgcXmKWwCZ3CRrS PDzAJtnqUuGRdsAkz0S/qvfawCdYvIFV2/jAIWKsbGes3MAmcFycSB5ZwCJw9BJTclLAJ2GM n3UPKsAl4jMiav/iwCGwet2+qQLAIhhyz7spFMAo4J415SgowCNOrH6XrqDAIsX1LwE/UsAi +5WDPcEkwCk5sFGQMxbAJDTSHbBu48AqRwNXLzzEwCE0H3dgfbrAIzXGxVbcrcAX72eKFYmQ wCJ52aCAqYLAJaTT94bX9sAio6+MRlv4wCSzpN0UFC3AIvTATHLYkMAg16D48XTYwCPKPV1L OMzAJtaptj7z+MAq/RQe78U3wCOod3hiLfXAJcnmUvkQuMAoYYsyQPkEwCQQrvSc9UDAJX6g IcB9R8AZK1Plth78wClAgOSXgILAJv7SR0+tKsAfyixShnYowCkWbD8fOtPAIrPGytl9xMAi UbxP7FV8wCaaBPoxIJfAIq0OdDBAUMAmz7cuyONuwCXVTSOW1GrAI1nziWtoM8Am/ZJAO9C/ wCs/OcUMC5HAJ7mPLkwTeMAiwey5C0YKwCl7GZM0n2bAI02xQ8HVJ8Aqz+0XABDBwCIZCx/8 sOrAJcF3CYNYLMAWJnm+3pDowCGs3Nl6TxLAJ2NwV8wniMAmedgT7HmFwCpiWBq66FvAJZTg 6rK5OsAjVVbFGiYtwCL3Qq2W+RzAJ6puQQUxTsAhB3Tc4gZwwB6zosyGIyDAJisXdcLTcMAj XpifRb9KwCBsslG9CQ7AI7J3u7GhAMAjavgrRKbawB9aECA6WqDAHuHoGzE/KMAkiylpq6Ai wCEg0OkJ04rAKWir0qifAsAg0K+n4gv2wBWilTAtGEzAKIsXOBuxbMAnpjI/ZNpXwCUTOviv wj/AHKWfWuCkUMAqfWR+Q4WowCjDXtCOJaPAJn7Y/dANpcAlgFoXJe3bwCqwwdwSiazAIJbf bCkiMsAlorfA+ZE5wCUXNPV4kQrAJxYZUXfbNcAi/VapgkHKwCOl1mjIDqzAJH2AAVbWiMAm uHQSD+UGwCh1Sru15mbAIC5Jd/DXfsAd8wRtTNBcwCjsX8wC3v7ALL4qxhKVa8Al+jEm71Nr wCcRJcRZYw/AIUKcAMrUFsAlTYgsfg3GwCV55z9Sx3XAKXvwELCkZ8AnMC1xaknswCY9gim8 WJ3AJ0otrHAXy8AtCddzawqAwCcAiMbLv0vAIYzAiN1aKMAj+GwfZ9fEwCSGJ5oIpMHALDvT HT1ilcAkhhuYKCOxwCk8viI+LiPAKcfK4oDPmMArIckmslJzwCRY4fRwnLHAJG+/Dh18I8Ah kj9XTvnowCjvNWGjLXDAJ3EusJQHSsAe6Z6BhP3gwCQ302/SpKHAJKf9rLwrmcAiwd+O1n1+ wC2FQ3xkGPzAKoETkyJyLcAoSlwxYlxkwCdABFpCPcLAKNNcaTKmscAjj+FLZjFbwCbNJuqR rXHAJV9WFPb2MsAhOcic6yc0wCIqxSDpb57AJd7RsQJi6MAjZJl70kz/wC3KK7T+6W3AI6g/ 1lbJTcAY55kqnED8wCGADdguv9LAIiS0oSCqRMAmlx8yL+XRwCUK6zrpf+zAKTww+5sTPsAp sPYPcBVrwCIdfJFqzazAEEdn3N++XMAn/eePEa5NwCWoBogo2kvAH5kub3RzGMAdgnnqPGi4 wCa+bwmEraTAHoUZWO3Z2MAmoVMz14QDwCfmPc6TZPTAIyRrZqfzNsAmYzVWRwGzwCXmlGMR 3ZTAKMzu0JhC/cAg9Z8kohCWwCBmNVwtP1zAJEgviOBS3sAn9wgsArurwB2uQZA175DAJbet 2TsVVsAkKi1eS00ZwCBU/LpvH6bAIxGsnH8eBsAj8pht+oAGwCXvBbB5iLTAImb7J/ZQcMAn JU5r7MJvwCakHxASAGvAKaCZU877fcAjtXjR4VCZwCfFsFjNT+/AIFJAwjgUhsAmVMPSzb5E wCbqEy9fJNTAJUp0xPbwjsAhHXZLZFUwwCcrT3xh9ePAKcp+K+Bo28An8uqfK+MQwCRTA045 /zrAIY8/aukL5sAjkoHyuRZlwCKoPG6JoOLAIWW7K16HBMApLy+tbum3wCZjV6Bljw7ALXLj t1vLZ8AwPWGIZ0TawBiNvUxOx1DAJj5tq+e9dcAkHzC+7HogwDBUOezBZyzAFVpCFfQmcMAp FhDj8bROwCFd8VdbI4bAI5rjyYnLm8AfBLfch1a4wCcLjn2SlfrAIyAzdWufisAhcBzPP0G2 wCoBk6GGlZrAKa5HnJt9d8AgTh4MyL6ewCTgNcCz937AJfCS3XCP3cAmBAHFDvdhwCQpXIHu +0PAJZJkRi3aXcAnybGOEM2mwCEFN6bu8HDAHi4HYep+CMAnSJvN06u7wCjPEimoUJDALG4v 4+3O+cAhHz04chScwCi2BHEeaHbAGalz2S0lBMAnjHk1EngswChcxW7/qbjAJtPogOvW4MAp oEqjTATnwCvkiK3qMGbAJb4TF5LtP8AlFC2tMDOPwCA4+ylxhCjAJo2/zD2lzsAjN8xs5eEc wCTndKdwgzPAIIF3v3SbKsAm0BQJnr6AwCGFIaRUmWjAIFFDDyeC+sAjzywwVA9OwCCcScFl WUDAICPAf9MPvMAioAunnFRCwCS1sbVsqWPAION33aJg+MAn3gPIzzKdwCOC+CUSjEPAJLs2 shNJYcAg/ixbelDOwCPkfPTGqHvAJelhrACrp8ArtGlwV84RwCENHu4AI+jAJVG/EDrQb8Ac uNDgOPIUwCYALwLIL3DAIFbZ5m7Z9MApYLciEnS1wCF+dPsDa1DAKDjb4ThQp8AoOrBmutlp wCQt5nxP2ujAH5VyyNxVRMAi4ZKkAr6gwCRo9g3bn9rAIpbntLkA/MAhtXtjcSvuwCf0ymti WybAJbNNkkXJNMAiDiocxYfSwCx4pnEK42XAI+6GxDwWbsAq14d66t7fwCb0uQTkeUvAIlfS HHNUOsAfCxoJ/5MwwCSxycmn5RDAKCXEmbACDMAesCS0H9cYwCaMayMiubTAJRRCv4QnJ8An rCKifc9wwCQuJgPPBoXAIf6gn4htGMAoJqAzjGnwwCBiKTYXM57AJIqnoqq/zsAf5YH5Eq4w wCkvW6ajqLzAI7rVW2ytmsAjZeA55rQAwCscieHp+7jAJR4k18kt48AqRVAYj3euwCP0rBGT nGHAJUNT4JbQHMAnf/ZAlcxBwAq+7P80xADAI5hT0WzNMcAeFNxb7afAwCGmX6SaTfjAKL3r 3eyr/8AiUYQj7PegwCUtAWN3EzLAJrpYToY3e8Aj9C3UIVuhwCRHOPjUQdnAKTARUxY10MAi x8sN7eyEwCqR2+3mhqrAInA9BHw8XMAlxSComn0kwCAz9ganNNDAKCV+yY3E48AeDG5qJK3k wBS5c5BIIiDAJiO4yKh1G8AhgGVt6GV0wCflmzzjPwnAGTu9ivzGSMAhY07JN0OIwCKiYiUu H9jAHAWTPceVNMAhO0BGK200wCc5xfn3ThTAJCK8R/PxUcAeh8DKk68swCawAoTh2gjAF5f4 em4qbMAlxCi887xmwCCOV5jYf3rAI39KPx89e8Ap03aFKL18wCNEAAH8xYXAIzkrr9H0fMAg kJ0urggywCOHyRj9IPnAJhdr6mvcuMAsp1XD/J4GwCnD7pUhnC7AJl2x4w2R6sAoTp26t45A wCXkySTHOFDAIQ/6i7juqMAln5xnGYeZwCPwuBh/9jfAIXSzYnyqNMAj+yVZ1SgNwCQBjwP3 08TALUldGwYU7sAkB8oO732WwCPaN6slT8vAKJEtkMcDoMAkchEu5R2jwBs0oGUuMbDAJNef okGqtMAwX+3FLf4KwCrOk5lu1PjAJd4eC1ZYEMAjVD4Bjzc4wCfzUqHpnPjAJW17ZeCNCsAp Q7P7ZcTlwCYaFaZTC7TAHUQEh7EfuMAjPK/gq2B+wCDCkyuDj+LAKMaYV/gZq8AjllK7/fsb wCNts+0zY0PAKkHmgiWSz8Aoo2jvZ283wCXV2J4tmTPAIW+BWEp5hsAmejVtkneDwCSlDseQ HnHAIamO7JyI7MAlNh9QBZ2WwCSSSlPT3TbAIP7NQbvJcMAlk2BTRcEmwCgB5cGksrTAF2nF /mRuVMAk+4eVziRpwCIuVRBZJCDAKKVIOm3I7cAj88EY4a4iwCGCorRxZJTAMMBa2sJloMAq 3XYQjNdIwCQX/BxKZPTAJQGuxyfgDMAfh2w3frUMwCSDOH6WknvAJU5vVcBvbsAbj4vazn9Q wCsC+QaiZF/AKT5L95ColcAmAF4Aqle0wCLtw587jTTAJggb+Bvw+MAk7asuBzKYwCcs1Tgt 5NPAIVwseyKbiMAlg7JHSg/TwCZ41RGYHc/AIQVDANt7wMAgP69Ab+WwwCACnBnz48LAKRsQ ed+emsAm+zsCXAuuwCSc7Y7Nr7bAJAU95FBo0cAlo52irws7wCbEgW+vmjfAI0cNzuZRLcAl jhMFoswPwCK6BW1/FE7AKF8PulmAFMAmO1b2z5JowCazN6L3MsvAIK03yalZ7sAnD9kU9GJd wCCiMuZ6QyTAKdfSJSybisAiTal4VWi6wCJ15ibZby7AIXLJsK76vsAmRaQDrWMvwCNwiKE2 vA/AJfVMi3Yx9cAYZ/lcrBe0wB2ljV9fwYzAIV4h4CxwsMAp4NK6KXGUwBtirLZCo/TAJ5CW EfdYfcAj5MWhWKWIwB8DcDGma4jAIf4e+BsWrMAXxHgOTs6QwCSepK1M+BDAHh1FGwcIFMAk Sif7JPeLwCP/KWMvhe/AJTcojIHd9sAbEUJZ837owCVbE92v1MbAKi9V+gKGpMAjj2AJyHAa wCSZbS95ZF3AJHYc0Tbn4cAgbL/fN7S2wCZbRuqUjf7AJeXlfiBdo8AiaznOo8hGwCUzVs8j AjTAH9NhJNAIgMAT/FEr6WscwCT2e23OO3XAIMQ7XZStFMAnYHFgE7KswCd+2b2vKwDAJEkg hAVAasAaRmuoY3PgwCaKWo395mLAJmXuMuPXnsAePrX9TO3IwCWbi/iNcabAJbHLBSkmW8Ah tmw6oB9WwCPxllr89tHAIg4mp6MJmMApr+EYgJIFwCZD0KjqIbzAJYTOFx0gbcAaASbGJfVY wCJ34HzpsubAJMoCb65aQMAqnnH4kg8mwCRBjm5JonLAI3AQYCFMDsAj0Kw7YwxRwCi2B5Gd aijAKdhXx6i8o8AffnHLmX9YwBZFKf2ONbDAGYr6nRDG2MAigfAWfxRCwCgUljYARLfAJif6 kk+DkcAkEdY0uCw3wCYtBLc4jyDAJKR8rkGh3cAlrBY3F89lwCJZDxUyqwzAGnuh2oGAQMAm s0CyzT6VwCd9fG1nRGDAHR+oRrEYOMAlmxeP3ORKwCGkHVnk2JTAJtkUVQX0wcAmhF/g9D92 wCTZLPF4i7PAJxJDINO4d8AqzMMHK/54wCNqbr6t+grAJ43bLMzqfcAnWV/5AvwWwBjWHPyi l8DALqHif9naZMAkFYNCJz9ywCazfSETt+3AIpL1mUbvKMAg7MV9++RcwCFB+Hiy8ODAIljj Pwt/osAfSfKt0qhgwCdTL3jAmuLAIxRvb5dB/MAlVshNpNoCwCHwUWBofUTAJCxU2AGiY8As 4tPxVz2fwCDi7jD84WTAH1jtbTtT2MAjFTu4dCVIwB3IN/CSeMzAIsqRfXSEfMAjUiOszdAa wCc2mj7vSBjAJhR9tym0TsAnb5RWmNW8wCE7oxhR7JjAHbbvUFzSEMAlKwUM2zTRwCdzCNIK YHrAIO7ZPwUizsAitxgzGHxSwCR4hfVNspnAIv9lb4sfysAYsm3bF034wC5dYf5wIYDAI95f 8NSOm8Aa7SDnecoIwCU9AH81WDDAJkorFAda/cAm1Yol7OwxwCfzN5wNVH/AJL5koZu7lsAl sRdjURguwCa23JZMOxLAIOjAINNwdMAor5b0PpuxwCg5gWiZKi/AHK89VQdewMAlvVKOzdk/ wCe46pJdUPfAH0WW4/ANqMAgHuYEGW9uwCbU+B72V8jAImsGqeeqjsAia5utkjGCwCiR9Vmk 2FvALUzCSlqyBsAl/GqyYx/awCHndAcrs8DALA7nmWKmNcAmIeRqKEMfwCowJc2tNZ7AIoIa hheREMAj9wSjHTtJwB/loQa4z3jAKFD8Un+ZLMAkLL5cRuGnwCkCE3hBBOjAJpguWqlWIcAi wVFV43j+wCbAA0ZMt8zAIkfJFK058sAieTew9fkSwCYWU/KTOY7AJemkZqgio8AmfqjLtR3c wCLp61lEKqLAGvlCiJA93MAlnMOVlJ7VwCavaqbmBpzAKDyDECtWHsAouUeMxDLIwCS2EeGH ltbAJTv9BGkYzcAfsOrNcrAEwCOs/mJMmZHAJQrIjtj0LMAn/elGX3+CwDD79rCv7qvAJr+C N3T788Anll5Y5I44wCEk/1gkbHjAJOl7P3CayMAl82DDteWIwCk1SgsYSNHAJmzt67FoBcAl 9KlDnjBGwCVkw5QT26bAJjd+vMhkh8Aid8aydCXwwCeUanNrpm/AJK748YPba8Aim+eHvWi4 wCRh3SATDMHAJvHqNV53DMAl2gu5ED23wCHZkK+koXrAJ801aIl+DsAjlgpueiKZwCAncp8R SOrALBoM8HBlSMAef486kQtQwCcgZEjWaNPAJt7xPifGh8AjNibzYscCwCTlcrNNHUbAJJaq c0UvJ8AnDXug8h1cwCTs965eAKjAGmFB+eFWkMAimESjNqgkwCgmzofs1Z/AIr4aKQRKqMAh PCUZ76sywCMn4lQfWirAKppkVxdC1sAinKhScjm0wCg8g8bK2T3AK2mH2/xo9cAgXgzoSQyw wCSJiFN5O4bAJY7tJLTrqsAlJ/rWmyUSwChkqUq3djbAKiJC4aq7YMAiWKoehazKwBPHQNzc 4UzAJ4RakCEsysAk5USKwtxgwCTJXHjFWlDAJhGAH/HUR8Ag6k/nmz9mwCOIIz2ey/zAHC7D Tlky4MAjBU82v5x2wCEjhZ/ZOXrAJlvJ/YodpsAlIqBSsRh2wCx8pJIh4GXAIt1NTQnAKMAm ZRq2vZZUwBcHpLxnSgzAGuDoJfKa/MAit2OSBtxOwCN2uqD0AgvAFX8DdFnf0MAhq0LLWjew wClH6zx+9F/AI9yN/zQHNsAqVKoi27NjwCbEEjdT4yfAJJZi4keMmcAlsbwgEiH1wCUIQh1e MUDAJh35Eu08JcAcfrtlEiF0wCStShPCIA/AJASsacr19sAl4YJHnB0owCBM84W+MfzAHV72 B6HYVMAj5SBxomj1wCMjserQYdzAJ3ekeuRMF8AisuhgDrscwCQlaC5KYrvAI4/1DcYJMsAi tA6fbHEmwCVaeIhVxjvAJmRmVGxXd8AkKncMFW6HwCVzrA/FGjXAIZ/EEAbt9sApU1S593QD wCbRE++Qz4bAJ4NJEkk68cAilnPKMvZmwClsPx6JIArAH1Bp7H93jMAm+B7w+RS5wCYa44PW 6DDAIvfh19AFgMAeiCMG7odcwCaJkwnc6ZHAJZxjPwZPIMAkWzaAsm9SwB5PUacIECzAHlN1 sQ/ayMAoD4F6ljinwB7EsBiBnbDAIKi7Ls6e2MApxzWJIqWgwBtAQQzixfDAKk0DiuAwW8Am QSoragKTwCT6rqWf5YDADi+Qol+CQMAo801ssV72wCZKh2HaRfjAJb/E9hVLy8AX2h/L0uJg wCN5nVAfZaTAJC3cwGtMgMAtgEwA3izHwCZyjG4XoyTAKUUsil7q68AlWn1ovIQ0wCGAObfb uajAKcuZxcTLAcAd6oC3wxK8wCcdVNsxAl7AIU0QQ8T85sAY+Nr6epPowCTBdXyMD+XAKCGK IwVlscAmW6jTH/2RwBSNNuHKcejAK7SY0XwnY8AZa9O147qUwCWQOUFGAyXAJZGUF4gm2sAb V0bQFSNAwCoVA8krFCjAKv6SXnBXJ8AphSb+KQ+gwCqifaH3JYjAIrYA+kSr6sAiW+upJo8q wCU7UuiFRlTAJa1Mkb3JIsAiTIAjqc9UwCS8qzLJeeHAJ7M++dTIE8AhlfjHNrTswCTKkJHm Z+HAL2/sY6zNyMAuZ0EOE+6OwCkhUmBGDUDAJZcUo65K2sAmor1SbrVUwCPVMUcwmXLAI1aV r7PR8cAmuwTl5nBLwCWEcjJ9r9rAJwTYEz5IZsAqr8VHEhk6wC6NyXVq7G3AKQs6sJBkicAj dy7k11edwCaC2Ch184LAJ3/NhLgwVMAmH794oybRwCOjzMSSrlvAJ6OBoBYSisAji5Es3CSX wCDbhbFbtp7AIqW+EZ1tfsAhJDz0SoD+wB2c0qDV0PzAIYd7CieV+MAloHGHV0rSwBorGR97 u5jAKaiBfndfY8AhHj513O7mwCTGHP8S32PAKOzhRk2aXsAmuK9RR1sZwCQrhCaqaaHAJOvj NbxvAMAlx23deMiOwCNmcHHwNIzAJJKq4w/JBMAhvPxIj4JMwCOt7pk55f3AI3rXlgZwLcAt HhUX8/QwwCeoR0eMfD7AE1RU+KDu9MAkZKcv6MYjwCohuQ45j0LAGyjrWEpbLMAuG2a8p7Ua wB5HBiTumAjAEZK4ojVuXMAhLTHoBG1wwCag5f9kHg/AJKduzjQ7RsAXIPLqoDwYwCgNpwFj bhLAIVUKNSTWosAqc8t99YYRwCbTjdyG7o3AIyJ3khvqSMAb5hu9O0R4wChHDgrE2yDAJb5r rMww3cAnKJAeCoVXwCE0TdlTfCrAIj838ul+TsAa1JfDPNYQwCT2sLcxA7/AI3jOH1ySYcAn i4vdqPIuwChYEH6rYNfAJ75P4hVC6MAlun1g2QCUwCKcVJdBQ2LAIawErda6aMAlSdyG1VLD wB+QeO52fnzAI4Gm6hyaMcAh00i2wz+QwCTcR4L4llPAIcuOW8V0csAlcFZMRjm9wCKj9p1Y CQ7AJJ1JQjhka8Ajp1iueXZowCRT7XUIbQbAJdPuoSLOLcAiWTJiKjcqwCNawHOuSnnAIjGa /W/X0MAkwOXexjg9wCOON975hKXAJJqaCdZ8AMAmuRjTk8ZowCKwcqwta1bAJyN5riF9+cAc kkNdRVXcwCjQRWDT70fAJyD6kXk8jMAiVfCNDN32wCG30E6us9zAKDf4pBnEo8AisuUadWR8 wCrUAey5wQfAIdZQNZBf8sAnGw0KovXpwC1yAgvn0lDAKTX2Lq49EMAh5GNMO1uQwCPa1hDK x3/AJf4Bu9gGI8AltcYFkop/wBvGHdN3hQTAJTajCwXWqcAmXyExL975wCLrz+rqGVLAF1H9 hguO6MAlDKoro6wDwCbAao6/x+nAIdLAUOqejsAlw3obflhdwCSJS9ASu5vAHFchK6eWLMAq T7QibjeBwCAVDQYOPZbAIdvztWB3MMAsYp4aJ8t+wCOWVM/K7JjAFmMLVSPf1MAjae/iRzrb wCBudpAXIebAIq75bA4v7MAmbcQ/H9tZwCtTHjLVpUbAIysa7QizI8Akb9658hbLwCL3GjVU tWLAHb+WvGxOzMAnbLdHpWbfwCKq4+wIEt7AIMZIoa4IGsAW7cfgcre8wCHCUsQNZD7AKFtQ WLIb08AmSFESIasQwCd7GkJvkv7AJaNTWhpZe8Ape2pCsy4JwCynp2B2PVLAJsE/YCXmwcAi bRMJwOD+wCi+E82mqoDAJFmLgDQ7acAmK2ZReXPvwCUXIjr+c4bAJOvPd42SpsAZGranW3c4 wCsy/sF6kgzAJV/xOL9buMAh97sxe9p4wCcQGHA7dG7AJasrZ8N8dcAlxbqHEtSJwCQoCt1K k6bAHCSLl9F5AMAl6pcEcBT6wCCM+jZShozAJnA2zZ/F0sAh/nqhrRsWwCTdr/T5mp3AKc39 VPLhVsAhs5maYQh0wCJ5joMBdtzAIdafv3iEzsAlztdP6RkZwCCOnJVX7FbAJYa5UQIE5sAm WGOJ5V2fwCb4O+JusSnAInE9vjlhlMAleL2cxS98wCIjlLd5fkDAJbvo60GcjsAhrBcotvcA wCtQyFs2plDAIpxUsYnKwMAeNmEZUxVgwCPf07jQHDzAJPsHU6qh8MAkFHdf0qEqwCPJn7rk YqTAI4k9d8w1HMAnrRQRN6YWwCU/DCUjqCC/+BVXxjIL0MAbHSF5upFYwCNVP3tu2YjAKFRt M2J8/8AmRFzOToibwCm3tFYVxGTAJRi4BPnzvcAoWefMRbpuwCEH/aNotzDAKMTy/4P38MAh V0tX+o7KwCci/ym7rgfAIwILwCgo+sAnE2SoHYuuwCZ58lGAjC7AIrmDM0Qo6sAnByfjsq2j wCIPEzj3E6TAJdkjaseIJMAiIRhtDP0wwB06+bJG/hjAIQUKEAicosAg2bkMXC7YwCaPfcq6 cELAIeHA+vQAbMAkU5+/05D3wCFiIrtH8mzAKG5L7raJCcAg/G6PzUaCwCSWhJb6chrAI4wn MVazcMAm0XO8lhUewCZM1Zmw/svAJmmUVP37/sAhyCpJF9l+wCU22Fuf13rAK0f4swwtP8Ah 2lquOB4IwBl7fXqLDBzAJkdXMBBkVcAkaHzvZccxwCP+S91TgKfAI7mYWmGXmcAoTCaObIDt wCdwEPHJo1LAKemSVZi+ssAleGG/qkWHwCMUz1obFNzAKGiC/Vl5BsAqg0CHu2XYwCdrSgkX wfLAJvm5B1+v+MAmkpVUpBRjwCIw9eI3DVLAKNb8Ab8xxMAnuR0SEyZ1wCU3mmFKO5rAJWgO dFfhMMAdZ+H4mMbIwCb1HlEVDRnAJZsfMBf148Aq5vaaeKnPwCzq9nKCX0DAIdv5643sssAj NsFkJjjmwCEtMfvI0qrAJDJTLRy0+sAi4OpCiM/kwCg4C1NaZVLAJytEpD05x8AjTdH8+zFF wCS4UI0k8b3AI4IwnEsPF8Ajg/UZdFa9wCK400hJ0czALAuwTqFvgsAle0eHlli9wBTj8TOE X6zAJHm/ahBYkMArD4XmIQNdwCT11P4k+i7AJhQQRncDTcAkZ8IZJsT3wCOkBbzHXwHAJ6jA CbLrk8Akd5sNx4WFwCLN+azYEj7AIXkEeIAo8MAkqQYM9jJQwCbK1Sx1J9DAJZtY9i6ub8Ar kUm/TbqOwCbCWc0r/CbAJ4hc7kve58Alpi07J1bIwCkGNf5bPHrAJNXhVrdTesAlZebPgL+m wBzVTq9oIHjAIE10k36HRMAnaGNuf9/LwCI8i5XONkrAHlR43IhvTMArew/wHRJZwB6KaNFD NcDAJGLR8VsIs8Al5QX8CNb6wB/+f9ISQJzAKGEYdDCh6cAj4kwdxGknwCyw3yZggdvAI7hB FccJzMAkckEXxeHfwCousRV3TGzAJ0Lm9w1qKcAl2oTk/IqiwCDbDtmswdbAJWO/4sgDS8Ah kA0U83+kwCcK6ti7DjLAI8+LlAzVYsApUviSwhqwwCG53jKod3LAI69q/R5oc8AkD7BtmTgd wCHSoxT3U3DAJ1JXS4KBO8ApzXKmn286wCPMSXfz2k7AIY8/atyoSsAYxGSLk5oowCg5Lams Wk/AIayD4nvsUsAlu8xH2h0cwCNxPXo1vt7AIB/H8bNJbMAk9BHOp81QwCTGih0nPxXAIQJv 0RExVsAk3xhCtd2ZwCW4T+nL+zvAKJ1ARS8F7cAlblQufGURwCX50tba1AbAJukCqhIrcMAd MebNhBJgwCF6Jlt1Z+zAJxt6rBUDGMAoB76jQv4IwCNXDBfz0MHAIofSzH2T4MAtHoBY5ixE wCfvpr2NiWjAJ/iFOF+NasAfynAti5X0wCJt6bm+EPbAJqamvjyuEsAnA4pIxFjKwBuwe2Mb uKjAJkwO3x0goMAaD0Qw4FuowB/qurXuaEDAIYMtKRCK3MAjvJJWwYxWwBqVkLIPBDTAIggx o705eMAlWebBgahhwCMJDLcdEQLAK1znFhtyPsAk/lPuTVm3wCLQKNbaRArAG9DXM3oKKMAi MYzpeGv2wCmJ4wKMP+jAJcUQl36pL8Aje0uDijrSwCrN+MW6hs/AJmqpVoPgYsAkC4yfIRTy wCKuq+myNF7AKsAqH2jMK8Akvv3a2BoVwCPMDWRc32DAJiPWg4ejh8Ao5N0aQkouwCTekKH/ vzvAJY2O7YcKUMAYLSMcW364wB4lqL+0NEDAI5ucHeRtz8AmsGUDob4vwCoXAOklp/7AJwRt dbxQAcAjKmT01RBGwBual074rbjAJ5g7mNl2XMAom5ObO+bfwCLQw5vyNb7AKeoiYZurL8Aq BKmaoaJVwCfsRHlpAjvAI8OAKtbvyMAmwNMOM2LgwCUnXFvv2WTAKfgRXecpY8AocMUAueBU wCSt0UDmsOrAJA7RZigs3cApn3CILqKtwClOnSs6nMLAJWMw8Q1KbsAPdkbOunN4wCXOAI5Z qCrAJfSP8g+7scAkihL9ETw2wCFbg5po1TLALVg5orJTV8AqdWaSgKHpwCeDDJcT1TLAH7NA A2DJLMAleXPMMyGTwC0m5rkBQFjAHxgl0wdN1MAlubwbInCKwCSAx0isEJXALGb+sCUxvMAi lWrwZk4MwBtt/8zUMNjAJRccyzmvSMAegLvK/ELIwCUo2EVY2ybAI3jrjCf10MAky/x8C/Yw wCM07ikVYebAJiSuNSi3T8Am2VDqUkNzwCIaNw6wQZjAKD9IlG0iSsAjgJHJBSwdwCJ8Am76 ovzAHgP7LRijKMAibEI+8WkywCclHNXK4vTAGdX4JP56UMAoQALhBk8OwChzBWV0jlbAJEMb WhhXH8AmjO+NDs61wBulkbXwV9jAIwCj7sP1tMAljIcbg0wawCbc5u0ZSfzAFcULliM7NMAZ 5IGsKjx0wCPWwoLtbETAIaXekuzaHsApJFnmWUt1wCUfao3Hbb7AJXvPvI3wrcAmPEc7LBOu wCA146Iex6DAIPcT7lknYMAlXEtBGVVIwCkphv62NaDAJamkabjhNMAjtyrRIPPxwCjqE43F f/vAIssJ42F6BMAlFJU0qg/pwBacjo9bfjjAFoApUZT/VMAokqw/Uh0swBpugF+xIyzAI//a 3LLB+cAig0iOyhJiwCZ1VpsSPEjAIlfO0xueNMAjeXqMkP1owCc/r5dGEgPAIrEihr9gjsAh f9jNYOF4wCFZnxM/VqrAHN66QdOukMAnA6pMNbtlwCYlxFmRm/TAJsKpI7niFcAV5nj8PGTw wCpFnd+/sUzAJ+CHskmMCcAgceQUqhNkwCdCgU33ZS7ALebcwbTBNcAld8dlxXMbwCVzQaKK NmTAIWJsGqn29MAsUWnSXFNtwBqiX1L0QYDALe+Cq9rIM8AjBWCU+vg4wCWO+IfdTd/AIu9s 80v1psAhWyn1om5qwCI+KnnPS9rAIrUZjC2ZGsAugUTlgOVnwCMeSGTdM+jAJoSh7iGymMAk 6Q+0yf42wCNBP3Bg8aLAKY+uquRMe8AmkHnw2SpIwCCU1YYZ7JbAJDY5EGEoMcAdDUfOeeZU wCNg7vZfWJ3AI0Xx16Z+YMAitUZE+o0GwCGX4JAKzRbAJZXKc0R6NMAhlUcd4OiWwCLWI3MB xOzAKR9ySpdokMAiWaoXwAggwChBVoqJnTXAHdloH9n1sMAmbMafYxSswCYqo7frHNDAJdXb reIXWMAkQQhZ+v2kwCJ6cRVvTbTAI/ihkYZ5V8AkvYbNj3BhwCWUM0Q0RE7AKNuDiRkkRMAm TQ8la50pwCb8M8z+FCPAKgKWGOZMZsAb8Kd431yQwCq1IDQ2d+zAIYwJ/gqYCMAtqbNbMNCV wCrnylPkn9PAJsoqF312lcAmdZ4CWQytwC/zrzL5OO7AMRUwd6pMeMAl1J/fYbfawCtDV76n jU7ALjOxewcI58AyHGs+ajz+wCncuRfwWsjAJhT0TlmJ9cAs8P6FhSS+wC72FuHHqZ7AKHhf M+94BsAr8OAeAwvawCSAOy4XkCfAM4/xWvfSKMAtK8eY8rPkwDHKFRnhXL/ALC9+U9qtfsAm aBpEFK38wDEYFPXbaWjAK9CRfNSAYcAphFP1sfx5wCyUiI1RZV3ALHtAlr1GQ8AtEU4qRbyS wDCbEXEyeT/AMKBn4mED2sAtegUtjQu+wCNc2citGqzAJ7sUscJt0cAs/9veXAAnwDBAzZKI t9XAMOcPn8+i1cAqF2B3pk8WwC5JgDQmVpLAK3CwLKeyk8AvlJD2sBJDwDCi7DGgyqvAKqo5 swz5zMAqe52qXLaAwCqjmYwl93PAMNK8qvrXvsAuq3MlVKAOwCtZ225VuYbALIKAlXNfw8Aq 0qdo6nICwCvUHdbDBb3AKB4YR74ye8AtS0Bhlm28wCrnIL+i1Z7ALd9uMeSUyMAv3NWReqDF wDF9fXfg0B7ALby/lnCI38Arpra39bzUwC2imKaro5LAMDGugHgz/sAtozeJpYkcwDEmsyQq EX/AL8AfY5NW7MAwiuedhvKzwDB1qf0nfPjAMAlXOQu5vcAwOnxpcas4wDDhzH5t1MnAKMpr KxvXEcAnTJysUAyEwCn5JF6H9JXAJZbTxffXYsAx5U7FJI3GwCDHSsgjK0jAK0Z6WtrKeMAt GJ5edI2rwC1+uKxvqyzAMHkWGju6JsAq2zjoNjpxwDAGXRSDiOrALNbuKTNXssAtd6jNU+xa wC8/tvx1TtfAKQ6d1M6FvMAqn3nUeIpYwDOw38ZPEDTALc/Xaek8fMArkdnUGoyewC8Biigo OvHALuaD0t9DT8At8KFRA9k7wCzh80snLSfAK+oS060qPcAwnlK+lJp6wCd8llinLmDAK8/U Z5pc/cAnD3xCqYkzwDBwGXFPs2HAMCk3A+7NjMAvpkcaO784wDMcjVod6zrAJ9WjYdnfkcAw YsGdepC6wCzgtlUb9mjAMV453mDlv8Ai6PgSi23MwCjHoJh1DNXAK9WOCTnG1sAq4AkaPd5s wCfuZua8cNbAKcInR12So8AkyhHBN14dwCpCmjmJSyPALMOdCnb3EcArTp320F/5wCiz9AKG 4hrAKIiEgOcDNMAt/icoT4CYwCL1ztv1W3rALbdC7P4KksArF4r9gvtywCm+yIwQuhLAJNy8 qFQRTMArr3YQEZsewCwoMPD29CbAKFBtO9b6OsApo2wxANcOwCg6p/dMZhDAKSu+tKFtrMAr 58a4OXMewCeBXy8xmTfALLvLpnZEkMAiEu+IWpYEwCYnKdYG0VDAMAMZkCNkcMAt0h+OIOuu wCwzKTiSLKXAK7BtI1q1x8AvaezOWRGswC0JJ7TwDArALlPS54VcLsAs4zG/s5xEwC/UvQxJ Z0PAJjjiLAFluMAtmtPlw0pbwC5ssXNm8x3ALPcDvfW2RMApzoQ6yvqTwC6NQmyiMffAI3KW C/Y9lMAjGdNM8oKowCieVJYcMknAKmmHM8IbAsAnBDYfHU2SwC4+EFlXXl3AKi4IDOeUfsAw iI0g34n2wCqJUXAqjE3ALB+ftCmtesAtOA694QvMwDDCAnhhhlPAMbeUT8gZh8An7IBkKA8G wCpcKsrnv/jAKsgGnVqt/8Av3CmELiKuwC2afbGob3/AJwJf5vzk1cAsozaM/2d6wChvZ2Oq E2bALBgJsDBO8cAndw6lDpsSwDApmSY13I7AL3r92H+Ia8AvXL6ucIPcwC/kkFBjQQ/AMKRq R/FANsAtu7IDZriWwC5fCbXXrtfAEovwdH78QMAuf7vg2wggwDCxyLUJiNPAIK55YdrzWMAs LQk8LzaQwCyjltq6F/HAMbAvOfx5GMAoyrxUVBmCwBtLzbjzRRDAKmwP7UqIBMAxIpVyZP4Z wC00IfweVJLALL+hlLjBncApL9U8IsPlwCoFb692K0HAKX/ndk+FHsAwhodufdJcwDBV6xSi 51jAKfQs1JXe6MAtXyBm+A7owCd4Wy7gORHAMCUh/GJ5NcAjfyIl/1UDwC86Wr0SWcrALL/E 4SxAP8AqXq0fimrQwCtmhetX0+7ALei0RuLy98AwDFG73NqzwDHmQ8F+BvbAK8yeDo4sFsAq +WVCjI6VwCQvY041+f7AK4zqd+iKl8AuhesJBQSjwC+rM/Yj9q3AMiu1kq0CbsAsmXMexYi2 wCgpXF7jJjvAMeTFCfZxwMAuT6DSoGe4wCzOeYVq7wLALQ3rJ8TiY8Au9DWEDw9DwC3QNb4+ CerAKj9u6cfB0cAr063kYu5vwCl8hd4vnybAJc34jduNDMAu6YPnd+7gwCrsWXmTpqrAMRZy djH+PMAupvrjhmpmwCwUv4gzc0rAKD/CCtXlLcAvaUHz+Sh1wDNQUrXSbQbAIAz9mLY9rMAs tKmSM1KHwCQYQb6xoFvALRZ6UTmE18Ao+a4IY/SAwDBOQ+UEJsbAMA2vfJv80MApLtKU2+jy wC57+K3RNpnALSp1NkYcJMAwFTVWGq4EwC1wFie/zZPAMLX+sw13zsAsLrgr/sIcwDBXZx9v pBLAKxYZ9SWKYsAkEiLemVwvwCZ8GrN9DpjANB8jkBdmesAtXkJ/n1HNwCIMuzqmFsbAMbJI WqAjG8Al3cSGTHU9wC0HoS2TUfvAKGxlza3ldMAxa9znwY8mwCphjIrF8EvALQXNlW297MAt edQUGeRmwCotC1GRr37AKqczCUcIm8AmnUrMD/a6wCXjjglGzOXAL39vkntXqsAoWkiiuBkb wC0aeNhZeknAKlFGCGSs4cAtYhaqNuumwCYoh6qrAOvAMNTTgRBvKsAvAqMfoa/rwCw/fbci bDjAJHAgfRq7+8Aq1hiORzE9wCsT0jqpctTAKQaI/q5oqsAqSM2zVhZAwCGDNIfEpnDAMU84 FFa/IMAs2i/r4Wy6wCpNy0wn0lvAMBQotmRwfcAujDk8arJawC7uGONGiIrALW0mLrP4asAw +xfaHeF7wCo+P5I3UxPALjPIHgAFxsApWPncWL6YwCeItqZWCO7ALmcBMNyH48ARL+wh4GZQ wCFWN4TArbzALjaSkpTVCMAoegeVbFWrwCVB9p127qLAMYPD44Ng3cAtB92E3obSwCxVbmcj qVrAJ5l/1exaRcAx+8DyqLnhwCie5fCOBvnAMJLIEbCtfMAlgES4iUlNwC2C2e5AM+/AJ2LJ zfEp38Ao4TH2Wn0awCLWS9oqPULALRsPBBR4zcAwm1uNZyQKwCjw0SYV2rjAMOHyMCroWsAq NsxINvPcwC67g+NR1tLAL1IUUUiV9MAotPmhJCnYwCz7AlMqO3DAJC9Am5zygMAsDw40pnmM wC5IsqprjwrAKfepTEGIH8AnHiSMuF44wCxLv4bD8rHALTayCzVmfcAjUCfvO8KIwDBCyuLP IOXAKdxw5gRwMsAx8XLeu8JuwCsh6ro+Q7DAMGEl8gA80sAp69Y5fohEwCkHnwwGpP3ALqLx nZGQmcAr0mfZpZ6nwCy3RlfgKGvAJtucb52+fcAro/pWSW/FwDGO4UD5UhPALX4yTHrwBMAs H8x+WXL9wBlb/AekanDAKCx0GrbFlMAsTddy6u40wC4qVypJ1knAMMweizNGBMAtl88lTvEk wC/CpfGfm3DAHYtJBGTvuMAqdPxkMD99wDHuNyg704TAKqJxRLjIhsAs+JMTkTIvwCp7xkYY 82PALVEr0iSFZMAucRbRvjlywCwVRnK6yx7AMGmQV8o+1MAwcK87zCXSwCzWdxGrqwfAJ+mf oNmPrcAnOtui3RvEwCPXYQxKXCXALFJi/S6ofMAvdgLctp2kwCdVnrKtzBHALhu07zikfsAo 0ZCIKzcTwCbYBltBzEzAMH5t+x64eMAhI/+RpWSYwC/S+ShZtMDAJkmyyu5+P8AppnACFN5+ wCwv8XfRVj7AKHx/wSUgecAuA1p8q430wCew1KwxIK7AKhg0HyfOAcAtRusdQSd6wC0JVFuG oODAMUgUead2PMAtbDGvX7NawCdPQaiRc2nALQoDi1QwcsAqj2Y3iWSnwDCegKQo61nALZ8b 1je0rMAmeCEpqKZ1wCdBLV8t9eLAMSgO0gf/dsAyA1Ss/b6BwC1ScmArQLbAKcXRn6WYjcAc y9lgvqOUwCS+8L4AinbAKj+s8w/jEMApXTqO5RQCwDJjJYVbUSnALPN+VU6x9cAnDAWpyhmi wC8PlvTyPf7AL42idVWZgMAnQqskV1g7wC0JaRKAa/7ALS6hT5y0F8AvBPcz6Ms8wDH0mYGI JWTALLDi3iW8dcAvteXnFlsowDIzVlVDWpvAMBqjfdEdB8AwdzDpsa/wwDF/ocJ8duTAMROs rPlZNMAtTi6tM1PNwCdl/stcXVDALWgeCF9cdMAviK5cvYefwDCJS2DwsUzALksnIgc6CMAv O6nlynRSwDB2AP/aSDrALFIcz9nLd8AuvAdEacXYwCz7X1eVU6vAI7gPrEkFqcArjtVTx3XM wCzUSj/6NzTALjR6T+KFWMAwfK1wrALwwCxwFzyVZx7ALAp/q8UABMAs1tTi2x/1wDJbRfLp 4gjALWk1QJkBYsApMHAWwmzCwCX/g8eRRVDAL9gARkCD28AxjSOvPrKSwCrN1+8U0ebAMJfb VL4i9MApcKqb6hkswDJljBzvTvbAKFiOhrqV6cAxGijt8KS1wCpq7LoRwjbAJK9TM7NzqsAu qYUWjv3+wB2UETckQqjAKpPg9lRd9cAqFYpYhsQbwCS8D7UZH4vAK6NAf0fuvsAsqcr6wlKM wCTRbXrnf6HAM8hv5XRAoMAwyl2OZa9swCxMrTffckHALdakWo/7ncAs7bXUzenEwC9ib3lW ITDAKCzzbRys/cAs5eOlJF0BwCueJoLNo1PALthBRILFzsAxKcl4p96/wCeYbWvLUsDALhrL 2atYFMAqLWgQZAEcwC+vs5YMOtbALHWFnFGZyMAq09MgsYx0wCtsCgoRusPAHtbo5s9boMAo fmpHA/P5wCvj9P72SuTAIhyhLPqd6MArd3m0fq7cwCpmiulZXozAJ3aSkDnJocAsMSvUBmes wCmlK/VgzyXALqkrgWuh6MAorDJ58KtUwC0KSC8kHvnALj+fcTdbm8AwDz3iky02wCKMqENA NnbAL05JZIQWrMAtw4JUsHeRwC9EGd3hLzvAKr67PxSqisATj1578rHowCw082/9+u3AKSJs p8OEtcAqGc+u9WAlwC6tZNWK7KbALJWn7PJqxcAoLyv3tWo/wCwgrUHr3u7AMVunlhh4uMAy 0t3NwcxawC46ohrZfqzALla0avZAMMAuqKGNt2zvwCp3rReyv5LALTmVT63pFsAvUA8fw1R0 wC7exW4/ozPALCjhzCWxEcAqNhjwXMDcwCtHMKpOvlTALlkpG6mY2cAtZ7BNTrHqwCuFSa+0 DXTAKH0FZEqMFsArBjsdFBw6wDDC6sEM2P7AL50rtiJiisAuzJq1DpwEwC/MLOYuyQLAJ++s 0IHQQcAqu83nBiYkwCjlY9MoBX/AMAg3OmdaF8AlgXtBgI0qwCemUlviY/XAK0Nw7eitkcAx UqMMiIrWwC9F6amM8F7AL8YEX98dGMApWf9Y2CDdwC4IyQ8A/kfALHYXG/avhsAvnd85dmKF wCxO/V/dCefAK6fQhXHmNsAlftj41xUpwCXDalBKFmbAKov7PKmxwsAwCkFw4cSBwCo46ek3 znnAK2gRIo8q7MAor6ndM3FvwCqWotocFKzAMT21PXYEdsAnfkmSB82ywC1JhaW2BqjALQTc TWR3KMAtpmOtO2qHwCqciJncZCLAJ2Lqja1qYMAwXBrWpfCDwCyKTHYYXBDAKS/Hsl+TxcAv 2SLVnGCZwCjL1FfIM6DAKpMOAzHiosAnA+n2tFF6wCf4IhbTvqXAJid0lVEfmMAtC47Orenv wCuEa6Mzxe3AKG7VJ9Fi+sAmBZ0IrmdewDFXimhdX07ALWv9UOK6acAnLawgi/z4wCq4VetU AiLAL+2ESY0UrcAkz1YuU+48wCx+AZwHIl7AKsXO/Z1PJ8AthRipNmolwCtzjptq5C/AKFpP wwPtEsAp+W0n214VwCshBQVcacXAKM5krp+bXMAnuyDIhFA/wCr4+8G6x2PALZIBNvsVmsAs WQpS2xHawCwjyP2jA1LAJjfurm2P+sAqX/l/8L4vwC8UTLPB+yfAKbFkhFZ5m8AtTgm93Q4X wCqvUDw71o3ALLs8Tw5Gj8AwW0jy2PgwwC5ySFYQgKDAKi1Qwur4BcAmhiSsEE4awC29teZm DkfAL6cBVxvdTcArF5iu5820wC9tHLKHnTLALjwCqY2ZdMAqrCnAX+zRwDBTtFhBeP7AKCsc hd61yMApUAo4+EfAwDBWy2Lu7pzAKptOZysj2cAtru+H5i8swC+UZ8YN7eDAK2/DZvZbU8Aq dpvg0oItwC0jDn71yDTAJa51NH/+08ApmyncBwC4wClhvytdp13AMPP1zna5OMAxBTZKWZUU wDGV1cktLh3AKY1pHRKKDsAqr5rl69ErwC0XWBxOjPXAMDpouuMCycAveeC4qFZYwDCJRJMg eS7ALY2ZQGkfvsAw6BrSY1K8wDFnpwh/OVzALyvYLywQ+sAqNDg5HSppwCnHFBO2Zc3AMGhz tAHnIMArh9xE1ZVUwCYKIZtae/zALeNc16XKgsAsU7rTYXaBwCoPWVfL5wfAKc19q7i/CsAv 5EQaFM8qwCyfUhzdvSPALrNLljwCVcAvjH3ZfBpTwC5xu2NL5FzAJg9Dl59EFMAkokh3rZQG wDCGXPkY5OvALmhK1GzeHMAuUJ5p2NfwwDCFLxgh7uTAL4aDHOU71MArQyxCieplwCmboso3 OMXALnbbwGCIQcAiaJIZdat8wCyzSqlR9frAMIKaTdfLOMAl2tjZsSoAwCegxOuM/wLAK5ci liL9Q8Akzps5UGwrwDDe9O+ggAzAMQqFFL8I78AsPBo3V6U2wCrgyJhQbkvAKOL2n5ncdcAo 2pBZtN9bwDBHjVwqFo/ALUUHb9JP3MAmaUUGRonxwCTgsya/XcjALrkVKOn8U8ArjvYFEnHD wDHmtk0AqLXALOE+MnuqPsAwyhI0v+62wC4AXAEio9/ALB89j9DhycAuBsy64uvwwCe9pxSA T93AMy9aP1ilFsAsXjkd6+0WwCpXt9+KFQLALqXW7+bkM8AvK/cbO6g/wCvBJc4PzCjAKDVl a3xoAMAqa2Zpt0O9wCZNrGKZ4wTAMBCqp2uuusAgiPNht9nswDFLvAoT3BTAMFDH428CbMAv 4Hgr93ShwC5QZQVdQd3AJ0cuHWYt/sAoanQRS5TUwDC6WYdDP87ALA2d1OWWC8AjbAecJPZT wC3ItYGo8p3AJpx6P8n1nsAs3CM2hcjPwDNjYhc7IlPAKUhRAAWQtsAzKZxUSslnwC+yzlLH xx3AKKpPbTpymsArhmHgFFrrwCWQCxjZ+LPALgYxsZlrpMAw8vnEgeEJwC2Wd9OFfZbAMnmT YVI8GsAvZBwGK82TwC9Utg+A6xLAMEfBKbNaHsAtvss1HElzwDFsaICLhm3AIOhZmrWnusAs 9sQt+qEQwCmVcK3PqqbAKaY2mQzQ5cAmuTX2c367wDBmtUOzV3/AJkzFGw4QKMAsCCdNGN5Y wCzNCir/31vAKa+92CM7vsAlcmQFVD1MwCQoIkMh2uTAKL2bR0Hto8AyxELDc4PqwCisLHJ/ OIbAKxo0GOBQB8AxAFPAo4aywDCZ+4penDjALtYPNbW478AEVHBv0CUIwC5IB0uxxyLAL6Gu NnRNUsApztf0gqMOwCw1ws1TBnvAKEG1LZv6ssAwcyQJPjLGwCVhsc/G8SXALLWF8QtRUsAu GchyGPppwC0AvjL5yRzAKdPhUMKN7MAmpQXKodTxwCwpPZVlGCHALmkNhBR1+cAuEk9rk+5N wCSID8xTyqDAMNRGjyzb/MAoohSSOP8mwC5IUvjC0j/AKu/MWdSivMAwyieX7zY6wC1/OdNY vA7AIXWocBqAUMAsLISoqDw8wDA8aFeTEkHALna2AMOTi8Atc7tOtPLSwCJJys9w8zTALwAf wCDeq8AqOca5EaGqwCnrGleUfXfAK7HXk15my8Ark2GZXMh8wCJiQqq6jvTAJ8x52nXbE8Ar 1X8j8z/0wC6A1ounaEfALaqHvR8TvMAnL0W+FRXYwC3p0vHb1y/AJ7SOGmCfIcAq5Cpx0qSU wCi3VDUgi+3AMKTwtETQYMApUX66SVOswCWlBh5Y4GnAKxghowqoAcAts4FSe7IowC0YwHcq o17ALucfYqVXvsAryxot7DybwCcn8iu8dh3ALR1PNnni5sAnzGntonqAwCghmtDw4HDALVRD DHqQVsAogptst/lrwCkMWIVmi7fAMuteM4P0XcAwH/2PAOx1wC1erGA8ZSTALIXQ5gLmWsAn hUvbXgaHwC/y0H7irTbAK7siaE3LpsAnBKOwtoA8wCvQiVJDw8vAK2he1UOZAcAzd8uFTDcf wCsR29EoOfDALCvXoAbwFMAq52it7OFuwDHmgaxkWK3AJtnnOEwzFMAtZXET/gaMwDA9jFPr cQTALtRVfV8mncAtriBgUbu4wCUK9oPBiH7AL1wWR1pj7cAw4udpTwU7wC+dYWJ4ztTAJgq8 AzMzAsAqXwU/aUU8wCwGPEgx7M/AKorOkA+0EMAxhXJhfV3cwCyhJhGc9WvALNCOK/M8XcAp j5lS8Pl1wCho8B0FfRTAJU/gX2GRLcAq+RYgvBc5wC5s0llyJ1TAJ3BOasYYesAwDJeljVy8 wC6EVXu4JKDALyYTHqWa2cAxfO4KR/evwCyHYWSO2bfALhNds0augMAvw6SucHYUwDA7bdee bAjALIodoUZx2cArbohpxzfowCq8Wim8OsfAK4Wyh1TaFcApQJm6coLIwC2zE0r+hoPAJ7lB hIoMAcAshaW8BRTHwCxmPPKlVH3AK96cIukahsAshc3At7z2wDD0JQRqcWnALHuttAGglMAw BXXg5SaewCwx2NQE1f/AMZNP2XlH6sAq/3oJKz7MwCg31ecclYPALPw0bqZ5FMAyjRtT2hEO wC8+iREdvXDAKTZ8gfI6/8AvnACzNcf8wDFBkXLseFzAJJax2glgdsApwMCHTCIZwCImsQJr HYzALOWsP4LZzMAtDvpH32VBwC+wxDxOzgfALtNqdcyAc8AuC74+tjUxwCcziqujF6XALmBL UdFyT8Ani70yG2fOwCtf3zuPPc7ALEcAQ0N4j8AxrMKYsm/SwDIVRY9KW07ALWV6AmtQkMAs bWuLwTd2wCpoxZw+KuHAKJl+Y9cxrMAm4uvXh39OwDD0e018+LnAKVUkxoxaR8AqY5KJSE3q wC3pWNP+ZDbALBWhqlhmaMAnF0Y/VjzcwDDCw0qWEBLAJcaOFWdDrsAsNT7YfTP6wCXK98KS Ur3AMaDFBAELkMApazwODF6VwCws2PE+0izALKiI/pvf08AphV4VKJ+4wCxE4OkyE2HAK1uA qd3PR8Ax/ZekxB0EwC1l34m8mULAMCPbM3oXK8ArwK/lu6NqwC/xX5JQIFjAK75kmFLw5cAs XNpRgpYywCfOg8vFpYTAMAUQaQQ29MAh3KMvBbniwDA8KnJ2PY7AMpJfUQYizMArq17OcMRs wDFRBfaHuHbALEct11/LaMAzeGYh2zy0wDD/GBa8XTrALCs8SBd5ZcArkSk36qUcwCucQOJE CZLALOKuv+I2asAtdv8s/227wCrybbBDzeXAKetJ6qvITcAr0sKc73SQwCZjkHCYR+/AL1HR k/GXMsAoSz7PnopKwDDoNQKYU+7AKqgjBmde5sAuRHsvbA1SwC10R+TYvKrAK2z0WTYifsAt 9kEKEeHewDAMsEnV5InAKAbqqALRrcAqTjHRBFvYwCSrb7cPpDXALDIIc8qoJ8ApbNDziF20 wDEgIEhvSDnAK+N/PSsU+8AsO9YNHXR9wDEXGa/8HVjALHl2EmDXBsAioutfidncwCdmyJoh 1nzAKPo++ojY4cAwvAWvRs0ewC5vJZF3JwfAMOqd8596hsAvPleew4nWwC+s8aMCbb/AMOVh s5vPc8As0o9ZsAL7wC4VKke84yDAK0uw1npiuMAu4LWkOw/lwCgP/Alipv7AKd+0sWTSg8Ay jsUB1VS/wCQxhWrGfW/AH2hEBWdGTMAuLba3f40GwC10K1lN5iHAMERWTpu42MAoVFf7ThgG wC4LzxaBM6vALH6WeDFLeMAjNUpeAj42wCwEgzW17fLAJOfJthnL9cAqSomO73yNwCt6e87N XWLAL++6LOPiP8Axzu7UoKLowDJZx28ArXjALX4qy2p3iMApSw1WhuokwCzbM/G/HL7AKD/i MAUKlMAxMSfjum3XwC+VmfSdE4TAKjFy5E44GsAoXykMryCGwDIH19YUKdLAKIv9ZS+/WsAr a5e1STVZwC6hWR900vbALS6lLPdbgsAwvzb7rDZEwCtyA98aGGDAMHFHetHeCsAwXKO4il5S wCyYiiiExXvALDWXoNrgfMApxbGMLLG5wC5WgWWJSIzAKUcuiq89X8ArLHWjYzygwCrKn+qq eN3AK7zNhKaoOMAyLwNVK9m6wCp3TgPdWsrAK5fwizsIAsAt1Msjzu7FwC/6WhOiEgDALuaJ NQ0l7MAjrUe/BkaXwCjzN27ET1nAMJz3OPfYucAqi3O35nfJwDAlVcF0NmXAKgaNdwreBMAz OUsDuC1gwCrAKklMZ/bAMC0KPq6KMsAxWq+X3bRgwC/QCiyVASrAMUGDcagXfMAwmtwarENd wCub9nIVPRjAKIzxiYOR6cAkolPX0ugbwCakqBpxNJTAK6ouPHo5ZsArLC9kSHndwDCrv/9K 7xXAJrkhAKcm48AouUSCYdAawDCRl+J0vJHAL2I+kajLxMApQN9wjOjSwClH4dsKOjHAKaZo TH9SVMAreYDbVArJwCvmEL/FkDvAMAGHRAA9msAp82gUT0ODwCSZ5156arrAKsriDd6RRsAu aLqQE1Y9wCJROYI8S8rALCm/jonDNcAqeJuQocfnwCkzzM45JJbALXI2GPvdZMAxlHIDW5bS wC166DStZyzALJWaL08yKsAwZam7/8X8wCk8jSkGNxHAKHo0rDW8z8AkR1/7TbeBwDCKinUc kKTAMU8Aoh9qqsAsz0i8uPRgwDBKS2kLtbbAKEdt5fYOK8Ap758Q1x1DwCmcG6UQ8ZrAK6Os qB3TpMAhRLmcwkrwwDDFc6vKmlDALLDKvW6rUsApoKVf78cuwCYfQ+HUn+TAJnbAZSbLdcAw HyZxqDIewCy4i55xC8nALT+5T17eHcAwIl9K2d9wwCShmmBQ+ObAMR/8u81/3sAkq+WJPB0S wDL9KcBCZmfALpmLNHHxaMAx6wb5Tf+0wDBBwD+1MwTAMEyN3oSZYMAvU3hAkWKCwClaGTpI vzvAKQ22++eR+cAgiegccNEcwDBBUlfcN7zALo/uHtnYWsAxEZ4CJykdwCZiabhOUWvAIP6o DBW0AMAoCiD81neXwCmv+kAmKqbAKpk4MkmMXsAsuGlh2ul8wCcwxMwo2IvAIVk43izeRMAw uuX3CJaEwDHPT0wOsV/AK/0FIm1+pMAvjqWGkBkDwDAFOxm49ZDALhZO+0ENPMAtmqWS/QrY wBwaRoJS+JDAMUxTdqwyMcAwKbHiaykywCl7MeuN51jAL0klf9wmg8Aw99LWOw2awCRE3jHG nAvALmaAvqVvOsAoCfz+Q7kzwCw55JJ3jSHAJKkV089CDcAuGLDJpqewwDCdwwMgQr7ALRJ7 qUQAdcAt2NCDXNPEwC9unk0T+bfAJ9u/G3BGjMAwHDS5Ie90wCagpx6ElF3AJIgJy5B28MAp fQrFQPWDwCb2Z32eBE3AMeBy01K0zsAnY1pMt/XWwCWbGDKOqQLALoeOtIxMScAuQzKSyW12 wDHYSDI1jzrALpSiZrkb18AsiY4swUahwDNNqqc1wzTAI/5q6nvTCsAtjwgKiosKwClSfURg jiLAMO6+eEzjYMAuTx0ai8XGwC0tmmMTLr7AKaefslCK6cAvGBAJxr6HwCML5+ivzOTAKVS/ kYJjwsAxU/mJWxmuwC5uK34QprHAHE0cl9OskMAwIhFOs7vvwCek1a8rCFHALMJUMBJmeMAq PlJBYhJmwCk+PidFLanALhX9yjcCmMAo8/ti66RXwDELvr165tHALgETKjj+FsAuefSRXmba wCxTFIkr3XvAKwNe6Af3YsAIMa0j0eYQwCbzOd5GX9nALPyJg+g48MAx0NkArcKLwDEHQYAJ TjvAKMOzgoEoNMAuorR08EaSwCzbR96hF9jAK6lTvY46MMAwjC1jjM2wwC+wFPY9XcPAJkyO r98UKsAuLi3rBu1hwC10tsZddtDAKjIg/uAom8Aq0N2vuO5kwDAKnpu3sw3AKkNBpWDJMcAh 6LYJ59CwwCj8H9hLHm7ALaPWJufDYsAu5an1aM+gwC579UjAlM3AKUbt2IoMAMAvgI/X91Xp wC392AUGdujALHC2FVgPfcAqfUfvZ68WwCyNWjLFtnfALAuUOdsS8MAsAYne+MKDwC8PRyzH SwzALX5MMpJh18AqS8v4/7dgwC9ydATrv8TALIZ39XTlpcAw6IxAWh67wDG0BGYkfu7ALlWH ZJOpjsAvqBCJ9ceCwC5rop7VrQ7ALxhNzGlb88AwzKyYXJmgwC4erKrVVSjALx/Yj/P/HMAZ x0itpWfkwCzLvJmaLXjAKSxROWhwr8ArQ8t/t37nwDHk2eOkEH7ALHHJPGauE8Aq/6+97l/D wCoeDWEOpjvAKb89rHLEJ8AsOY0Vg9nzwCgmkjmgiFPAMDVRjDp3QcAuH1xS219gwCxeELKs 267AKBKm1g3gjcAp7NKKODjCwDAwp/wPUiLALiRb2aA0+8AtslvjE9rFwC1+fXVZXIDAMA8d njLWFsAqSLwENFcTwCyC8uhaRa3AK364mMaZHsAoWFdgkv+awC78jgKLoyDAKLkMUkQJG8Aw MbFvWn6QwC/2+iA6jsLAJkqogH1w/8Al1NgIoUYEwCfXKASJV47ALAJ1iU/YUMArdrAgre1t wDNPlKaXemDALsmyl3uc7sAp7ko0pXHEwCjDIXucs6HAKzYWIdXAicAwMbxgKq/TwDM6NL/J aaLAL6xiRYTbRcAqGvGQiOocwCnvZu2NJCLALM4Zw4A8XMApp2lGP+WzwDCEHCVj5o7ALFZD pNpbzMAr0M4j2PU8wCca1GauNKfAManFkTQmgMAtVp9n9WnhwDC/RLc6Ud/ALKClA2pivMAt 6ywDV4/ywCgNOCuYWnDAK1ewNQ1qY8Am5yro/jE7wC2L0+xKQDjAL1SA6gz1w8AtYHm774Oh wCx5UEvwAkvALpmX24V/Z8AwDIsp6qv4wDAJaGTBv77ALCTuPmLcHMApZhlLW6s4wCWJLyng +MnAK3y+tmVXCsAqy89UWf58wCzj9a6sjcHALca7m855wMAl4BZc+qiGwC6llBI/FcHALgVU 6tlbNMAyUXML4f7CwCPdH2wzELDALxZ2T/QmK8AjADkUVIRowCuSvcL4Xa3AKoR43xFPJsAs b+WLIA4rwC5jFCU/HkzAL/sVx3bywMAwPujAAQC2wCrmX7VveIHALA41brictcAoctqxy37C wCRbCDNOggfAKm6eXCnyzcAyYYa9lmGgwChNfy/GCkTAKLg7KXwVvsArOUjG+5RewC8u8P4T SZTALMgF9GIpbcAwDyQQuPCQwCtR/GtZ08DAJV3y/l2a5MAwM0c78ExwwDEz/KzI9L7AI+ZN 4ljAxcAppqNmwh0vwC/vQEywYcTAJ7znEHzkm8ApiJG7iD3BwCrgVcHne9HAKO5SE5hmYMAu owXDFqZDwCWpZNIailjAK9S9Ch3IYsArZfFMw6LPwDFdqWiMUh3ALQNr057APcAsA6dyJ569 wDEk9vE4U5LAKK23bkgt2MAqM+4h01qjwC6FVxmO1bjAL2CfaQF6ycAvHc8gg2KKwCZ8xgoR xrXALbT5KWxVusAqNRHpIaUAwC9PZ7V0MWTAKxVQHz3W28AqoSkkfkTmwCJP64EQOmDAJKEM 4odSZcAmhyLylvpTwC3G1B0i9jrAJqZRt86HpsAuspIaqGXywCf+mc7gh3LALXjmXqO16MAg 7vXJAYDGwCqgQZkjGAnAKs2Kypq6+MAuLDhE8g1jwCalu2VgHm/AMKokvk+BwMAte8deNl1E wCj4Nf/qr9TAK/2qJ1e8x8AuDz+g5SUOwCga5yLieIzAKd3EQ2jy2sApKuLtqCpgwCnVB6ug bbPALewSNxIqAcArVwaa5zp/wCkht2NHHrvAKvQnmtusMsAoVdViOo/twCrQevYClSvAKb0W N28YbMAuXeyvMmcwwC1xjscEqA3AL2eZMyTVxcArfjGXXgNCwCyIX60AF1HAKS10dwngeMAq rw33DI6cwBgY5cTeuujAK7u+7d1N8sAtmqTJ+3CHwCJQCWtMMKbALWqaOEBXGMAr6vWrkxV1 wDBKo4I1RabALrjWtQu5+cAsdzo1cHlkwDCmog6SCgTALqwQmHgKAMAmnnKeS1lGwCbINReb 9F7AMtDqajNwoMAwasRu9PlbwCtW58IYRkzAMQu9QSxhB8AwGcmG3jqTwCsqeWgacwPAMAEP xSmqGMAw3NefNJeywC0AfT+zmDrAKXOeK9wOnMApIx/8zAVgwCuV4YjIQ9zAJhrgE72XDsAs fxFGT9edwCdc9e0zjBrAKurwii3iScAwm5XoFeYuwCn0uiPOL5HALZoK+h7utsAu/Yy+ROAq wC3sJsNwHWvAJyAL0Oa99MAmzhvluL0swCy0oiOZ2zjALDfZc9/OtcAtIu7ReyH2wCf0m8Zz lUPALD2RA9Lu7MAtXHILDrwmwCXQ8kB1ZZfALOC7wi24Z8ArUt4km1MKwC0L5G3tIU/AKU51 bPCzWsAtLRbw6bozwC3+vNE/K5zAL479g2H0SMAseRuibokbwCfiSGL6bo7AMJKov+KBl8Ac D/PPDkgMwC2doDwzKn7AMb5EUQI+1sAw2N3GegXUwC/yay2cXyfAK/t+udrONsAxgbzv2BJE wCxmpBWBywHAJO+1zuZ/icAw0So/L/NiwDCxPHvifpTAKzIwhtGO1sAxTfFruYC1wCjcF5/v 7AHAImnI4oaJXsAwulK14xQ6wC5kN9d5asjAI769ziyBdMAsU9EIeHW6wCxQl0O2H9rAKDP6 XkNrHsArbJLniDu/wCvfjHl+kS/AMO9IHkKYacAsM/exlZAAwCXke/R0cz7AMIk02o1pAsAo mQ9bDq+7wDGnZ708u+LALsCcaPqEQsAsTzYWsUxmwCsbQZfYwQbAKLAHNU0Hd8At9o7slzeE wDAA+jLPcxHAMDxQT5kKf8AhKl6jF0BcwDDKm9DO6urAMBk3pJ1mVcAwuwhpzMVMwC5YecUh bGfAMQr+Q6ZkvMAvhMlLgKL1wDG/mmVJ5cjAMJ4JDGy7c8Ao3MBecdUrwCx5tocy6K7AJ5b5 0oW+eMAo2bibqhCuwCjigOsHGHLALWYI4QYLS8AvD1gp8BKmwDCA3omNWYDAKuBwW16JB8Al xJLEAYO4wChYhqs1YpDALzWCPitmk8ArR/p3Fi7GwCmpiXM88ljALVeckiNqusAqYZW+XFhT wDArJDzretTALVkhBUaog8AkKba9MyJ2wCmO9W7Er2HALrINaFcQaMAq7QOil6SjwCr//yH+ ogDAK1TWFBw2zMAtF2gFVYN5wC1r+DldOX/AJuym+gxp3sArp2GFgi4ZwC7D7ijHgEjAKhDA VtMmTMAs/V1tPpwgwCrfAo5HmX/AJwZw7luQSsAnQGsNMqc4wCyJO+4YkXjAML0S+E1erMAx 0sflt5l6wCmF/0+K1E7AMSBEKb9Zr8Amryc0cYf7wCzdP/jZAnrAMILGp2ZgBsAyOyujxW4g wCY5V9hiJLHALK9ytXNP+cAp2V2Rgo+rwC+VrNhUmebAMBSsTOPo3cAwR1NyckPIwDD7o9fs mSXALMAOJYAiAsAqInfHJlBMwC2jWvM3NmDAKov8ou2k/8AlADnPeo0QwC/aEfV9z+HAMWID /dRP9sAoi+uikS6pwCbIShxUIf3AJQzKyiIca8Ak/ZvIp4kTwCoTlBCZcGfAKzuo28am7cAq fyqtOHkzwDAwNbm6Uw7ALEg2dlT8bcAvT2TtCyIbwCrCyruQQYTAMe8gTPgckMAsYhDe7ZmM wCI291sBwDbAL4zQmhqnyMAopSNoFpniwCo3ZUU4HazAKeqocpjfpcAwzL9CArxowDE/6pLt QkDAL68iQoE7RsArRVVKb0+gwCwah2U6MybAKgvzBGtpScAvxwY5psV2wC8+T/NIdA3AKkIH jVWpHsAsc+gBOCwUwCaZsnXZHiDAMWj0VWHoTMAo4i8yXJ9vwC1fYqrqkUDALlptHIQP/cAw dTSc4oiRwCb0dqQtF87AKyPvP3xXM8AsfWxutj1twCGPr0dGuz7ALlNvZK20qMAoAtjY63xx wC3x4JFEYq7AIfkQz/dRkMAvwd/Y+h0SwDHVzvgL8vrAKR5mQIeRFMAZ8uY8dnJIwCsbd6x0 mH3AKfHfMsABksAw947L206QwCrcfk/qqCnALeKnKZ1aXMAm8WHQlC/7wCpYIFYV+SHAMHp9 iZtT8sAqhgIUDNTtwC/KCsgy/bDAL469E42iw8Ax37Bz9YV5wCxlHVaVkjjAL3CoNEnhZsAx OjxhXNCjwDDvg8bhOJbAMEgKg1DPDcAuN/JNa+UZwCwQ5n+glzjAK7OzrQYmj8AvWOKi6wZU wDBwxSv2NhzALftRJrqSCsAn7Dp6G3XewC7ntvDAB+/AKWgLGhfmPsAqPNUyIxs4wCz/6Ume Cg7AK94vFqZpbMAntRp70gY3wClVWPus6fbAJ+3cC/wZiMAt5ouxt7rqwCgU2o/3J7bALHUi 9U8TbsAtclP3tMM3wC0iBoJBOUbAK/K330UfqsAoo7alSg2UwCzJPVcGeMTALgOZWJnoL8Aw gJz4FEfUwCihMIwqg9jAK42gFH/phMAltcJasqr3wDDJEHGI8HvAMCzBwDLTLMAp7LsdBaBW wCq3OaEFMybAJJsQ7lpIG8Aloeex9iO7wDFG7yxifETAK/0wohGh/cAwTNvRoTCcwCs/8Mf7 Vi7AKsXG9izvPMAxVByIPGrkwComIb3c7dzAKGyK99ndTcArcmSTCHenwC1s/BZkEwHAMJti RWICfcAwirHm4XJLwDE0VZybZjDAMHs14HnsFMAvsxPBwppGwCetA9iIhn/AL3MpTzdUzsAw mMThA06nwC2kIGhsi1jAL3uNKrRaasApHNwRT5huwCwSaMKXvDbAJyCjpKH12sAjPw5kU4Gt wCq8B8APTtzAKJ6xJLuy7MAnhhUvHZNfwCgR5jRiX+DAHrcS7XarbMAqcAMjB8N1wCwyqFoX oubAKfR0RD92usAo3wgi0MQZwClpl7hm80HAKSwp7YyAtsArwde+t88vwCnTDAjpOPzAMd/W woW8ZMArzliEtz2kwC77C0J0LdvAI5UE2AMt/cAtbMlbml0SwDAzDwXX/YDALPx/GGZbC8Aw ytRM9pm+wCnH8EgllLPAKk1AuPIKPMAroQ5aGRTFwCfkV8JUgaXAMAD5kpBHpsAu1Kj4KCEO wCtLGR2u9b7ALrKv/pFq4sAv5i/LeEpiwChBSyEzchXAKNszzeUaPsAkramdj4rvwCx8LJ9x qefAKdbOFzcSP8AwidwEjvOiwCm2cWX+XgHAMYA5Dy3KpcAxAtybJhPfwC3IeMQPpijALIiR S+FgtsAu/g6xn8vqwC9JDKRyN/HAK67W670sq8Asr+WGvyZQwCBuUE+YJebAKdaFsk/L2cAs avrr22l9wCozWBCdMM/ALGzs+JTkv8AwJdAoAWv5wCUEUhAou0zALxYorbNN7sApeym1XETx wC1PETpIqYjAJmjmap/gK8As2TIYo5lLwC6Fek00DtXAMSezg2LfrsAmRpKEt/AswC8rX/74 ox7AMS5zSBYbfsAtqwm8rZHbwC8DxFFnMzrAK5kr1GmxbsAwKXRd/CCJwCsCXR9GjMnAL54f /rK/+8AuysGi7+cwwCq/xCjOSPfAJn++BvzVNsAwh6M540U6wC//FSqZA9bAKZ3e/7hM/cAv 2KtQkGRywCjS8+TDYNXALZoN0G1wFsApA9vor4sswCevXTYwpm/AMDOApBtaosApqAdPYJkb wCw+Z9BRObjAI1KKOj2m6sAmxxVc8zh+wCmz/CvnTTjAK0MOJxMyE8Aqq8+RPuipwC8x3BN/ 16zAL0XcHhacc8Ats4hrdLBywCjiW5QRkDfAJcmBhROFx8AwVBIVxwb7wC76xOS4mpjAJUOK dOvQHMAlr9PxqO5OwCyCd3TEGJTALozON0DOyMAn333bG2TKwC7YvnPyV7TALgh9QA5/qMAv VmFQgMzawCS1GuipiHHAMtXnAmo2rsAoEwBbgnNgwDKIcY2pH2DALBWH9hcUqMAtkuZ74/9s wCoiMolI5wrALnFuq3RPx8AlzkuhUQqmwCQ0tlOhMjLAMD38V0dm58AlmK9QHz6wwCuBsuDw Ad7AK0AvgsSA/cAtd411y/zjwC3XSInsyxDAKUtj16d6A8ArJv3sG2HBwCwi45Tf/lLALZPr tgf+q8Aw+afA8V3fwCuW3UWQJ5jAKPB4EhLt2sArus6b1MKiwDCvENwc4MbALKCyk6sRcMAx dmuyXHx1wCo4swy19yvAKdxZLQI3bMAo2KfwSuDhwClps1hD6hjAJ8sw/bjl1sAsk34yNGtt wC318tH+xKTAJi2f6QyWuMAtaiW7OhpbwC5nWgn2+UPAK/mLtBPai8AxzS1wVMrtwCf7he8g GerALTS92jqCLcAyCcPlUUcRwCpDhFKTQprAKOPO9ruUZcAwJh8+YEgAwCwiDger6FnAJE1Z bL2URsAlTWfbU1CnwCkP2fUVkXnALrJ9FaNQDcAmf+TwOKaqwCssv44BNYbALrBIdA80EcAt 4z7vl1m8wCqqavJDXu3AL0SsGDRDu8AthEnL7vEGwCyfT6XaA2TALVSBDop1gsAvu0y7s50u wCmjop8Xvd7AKBut3eV8wsAtinIs8oJzwCwaHwegl3rAKRD23RwhFcAjVVGIr9rOwC/gStJX IVDALDN/+Q5sUMAxzuJaUVYKwCbfJkKTvzLAKDPnmH62TsAmTMDYGE7FwC/DHNo9+ETALDlS gVHagMAtMbWfi/NCwCw1ukBOBj7AJdgFuKDcn8AxQ9ALiDKTwC2Qcibp5QLALwHQCi5l9sAt 6IpuIlhNwC0wn3U3ri/AJlqKlORPyMApyq0LhMv7wCZ55WPiufrAMXB7hr57ccAqDCVeQWyK wCY+AXJUpnjAL5Dnxm+9sMAq+wL/XCPNwCZkgEMNhJHAKwiOubscU8AsyUeLL0sowDDg5X1N DN7ALVfENnr5c8ApM7isyB6cwCx4GuqflJPALoE57fWB7cAwkU1Oaku5wCR4q2AkcTvAL68L d3Z9esAwK7+ewFk2wDCakDvtpW7ALKi41LBPncAsjy1qIbccwCxnVqcGF4/AKB5fn0XO68Ai sTVz1ppEwC1+95xdyNfAJXfrf/0Id8Ath3BjPmx4wDAgVS/rZ+LAL/KAA5WcNcAskbklozW7 wCd1/JFB9E7AJ9G/rjlwcMApuiYQg1cmwCvFUjfb0GXAK74FFF+jPMAuuUOCOY/nwCmOkdVQ LjLALsmVpZyZbsAqGDLmqbZHwCt9dYMOBcbAL0jsGcM+s8AwGwTJ+nAOwC1YWtSKk9vAK6BR I6bynMAvPBamD6rzwDDyjuwB/p/AMX+k7SEbIMAwMBAPE389wCZVTeGCKZbAK+j3+6ixXcAn 7myo6g7gwDG8iH2yBHDAKLXaGH7MycAvp9KST6+IwCxWpY1NPWDAMQfpHduEGcAqZNGUvaUb wCixXSZ6n57AJ68lJabRocAlXqKxyj9kwCnoOi+D+ZzAKVRHCLH+FsAuB491J2h8wC6Ptwdr 0XfAKsy8kuourcAr+9+S/UfCwDIuHfufJ5jAKk6zL84QqsAr8cfuuuGawDCBu54XCsHAKgUV P1UwfsAsDwagnHETwCfrXeCi+nzAKUVUnKSjaMAiC5s2B8r6wDDeTpzr2nbALBXkQV6qh8Av 6Tchp6NqwCzX2Y+DrKbALEO7s2HD+8AjyikZWzOuwCssNnVqZzXAMIRby75Mc8Ar6PDflBis wCpJHXFf0bPAJDXlBCqE1MAqo5IxJxUMwC/+JtnwPWvAMLmRODLh8MAnN06Ue2EawCwdzQDu aDfAKDirOZYiRMApkro/NK/2wCpT/hi+WWLALtD64VIKSMAugxzaUr7fwC72mQ36GR7AMB67 E557J8Axul/GUzGrwC+Lzval9VTALEsfreg/OMApNAeVqZPzwCyjDOjzI1zALAGYagpkvcAq tzSXUKd5wCttnrc0h5jALVvr82ejDMAupn5HwF8nwCrKsu015TTAKmbxBYP/ssAm/BxCDIq+ wCqwRLTvIkbAIPef5tqFNsAvSx+A2zziwCxJ0+QH3/DAJy5jeAzwNsApwL3dsmt6wCstMRnQ T2fAKnOsXbjiU8AvKnTyg/0fwDBOJaIHVbLAMDl7+pMcBsArFRDwhTmswCwCPnotb7rALeTn +UvM7sAo+UW3xWTtwC65z8WWXKLAL9fhi1VXq8AvqqrImb6kwCitbJ4DkWrALoauL6zasMAp yVSo/cA6wDDNYlR9/EPAK06puN4PisAott7rvyVHwCGgIQFH6MDALaHExKLvcMApwixz1mKo wCVpifHUBibAHr0SPTJB4MAwRHVn0OBNwC48tG7hT/nALRSv135cisAvDP8g6FOAwCql9J83 cvjALXqLeD/1J8AsTlbu35Z8wCq/udC43KDAITrOf3Sx2MAq78TVXsd4wC4EqoDj/MTAL5rK Ah2RWMAaU+X5VKaYwCrGps91vdrALny9HXKALMAnGscN5JUbwC++MN0RV7/AJ4Pe4SqM8cAn A0qpXwK/wDM1sg7Cm57AK5zEV0wqzMAuUO4oZrgIwCxy1kx6X6/ALJpqt3VuXsArQDdTaExD wCaEwoLWRpsAAAAOAAAPoMAilgyZPR0gwCHHu8VA02jAIdslPnAZtMAkHm1aBQlSwCN3GZFH VVDAI4Y0ayEZWMAh5uRelDIqwCL4lOxP7wTAH5NlnXFawMAhq29x9cRUwCIX8uLAtHDAJMZE jMsa/MAgIimfc9/cwCJe5V4mi/zAIJVvoBuD2MAiFGdtp75mwCEg4ZsQthDAIaJB5TqL6MAi pNbogHyiwB+3FgLQhGDAHxnRZHe2UMAgRN0MmZyQwCIp+AHfuUbAI1F2KCYiIMAgrcuI5l6q wCUxxjEDTTjAH6xHpKp4YMAiOHqWRIPwwCNivOXs+iDAIcXSTJlAHMAj8WZ35jUQwCYbN1/P 8PzAISPvAFPkEsAggm6YbXKAwCClTCDSs2zAI92nuvFQIsAhKapVCBxcwCAP3cjSYbDAIJNb PoHv0MAicjZCFQiIwCMhOMvkgeDAIYE+xB+8YMAgrf2BPDsUwCMS+hDcmYDAJEVmkDs5IsAj P0BlSpImwCF++XIzNLzAIoT4hwK6tMAgjnPQmZkwwCLL/PwF6RjAINNt7YhSzMAk+R1Fd5e2 wCE25lcchk7AJTdPBIu8oMAggvpjq10uwCLSPuqjZ1rAJXUmSbGuxsAiInwaDPwgwCTygL7r 817AIOH5+ef3oMAiYhKujk/wwCMDm0l6QjDAI6VsecBNDsAh7tLdnE+swCDfQ3J2UWjAIyWf +TkUpMAhwoA1dg5gwCOlTKTnNKjAItrWdfXYCsAiiPygp3jkwCCGMV85in7AJIhlJuxLpMAk LC9TaYaowCEkam2KGfjAIzwHm4cdBMAjaO6rG2sgwCV2vm8ok/jAI8X0q9CqHsAgyE4rpKgQ wCKeTm+G/QTAIHMxZ3vwCMAkuYEcw38gwCOYuitcJMbAJX7IB6+sBMAhkPpS80VOwCGNmr1a ZKDAJDk7I0UMaMAfZNoxZLuIwCNyCMuWxYDAIf5RqXs3RMAjQ0pMlg/owCGwJgnfUCbAIS8v VjY2kMAfHgj47FVcwB/JX62wGAjAI9WjYafkhsAmBj0eklJMwB/mmM38vkjAHqVj7Cd7iMAi ZMvx6nYAwCAOdjqfDQDAIm9eiueh5sAjsfS2eWmGwCCBF1jS7rbAIObvj7OlKsAkYpjcmlIk wCAhG3P0NJDAJpIscwmRVsAjIFZ5RLLmwCPUUk35TebAH7LzYGKPrMAjIv5ppGK6wCXq/E6X yAjAH/ZdW2j2qMAiZ1e5cZy0wCFHDm8ZTjTAH/AfJvNc0MAgWVvVl0F8wCM4nHSrw8LAIWQR C0MmdMAgRyc5/tUkwCGgMQUNhCrAHsBBTDL81MAhhuXbKXn0wCDtpPLGRYDAJCUwEZE5vsAh WyahEYYcwCPgKxiQH+7AI+hsw7UFKMAkbyiCDjnEwCEMn9PCtwDAJfp0XOdzhMAh3PUknD10 wB/ta+5BV+jAH4EoIssGeMAlckDo/0UCwCUdnemoKtDAILYAdirMIMAft8G8+RLYwCFm/5In JKrAIYljX/hHwMAlYJJjU3nQwCHRU9RRxYTAIwCzL3o3BsAg8IqQDuJIwCDR+JJh1uzAJMc5 HUJkdsAi1iQPohIAwCWqQ8PCH47AIldQ2nQ1aMAiaRFu695kwCExzcIfRvjAIG77GeSf+MAj 1D+/zWO0wCF4SqFjjaTAIB9RxDLGnMAgkbVQ4TfQwCPOmaoWa7LAIB20X3fohsAiO2l90GDa wCEVkBw9+LTAITIK5rq2RMAkJesxikHSwCKdu/x1XWzAIilrx1KILMAgS+ytOjmWwCQa9rnW BPbAIwXGxB37rMAgD5cZAtwMwCEIunXZOgTAH3PrcyvmmMAfc6pUDjzowB8h8Tl+DPzAITyk 5TI7TMAfms/57cHYwCGnmhsfb1DAJaM00iW/rMAgDbawW5nEwCG8YE3xFszAIfbnlRs7asAk Z/vAWHuowCEqTkRY8yrAIktNdyspqMAkc3O0dYaowCV1DJPHHarAJX2CSEGcAsAhiGEZcRNS wCPkFVD5oNTAJDbT/SbNyMAgnRZXvG4swCR5I8xsGqDAI/EmyTZCAMAfnmBugAwMwCGivHks 9EjAIsBf8g2MYMAhe7kGI2UgwCNmp4V0cWDAIorj2FRl5MAfKMSiAqwwwCCJgweS6BzAIaL+ JtIBPMAko70wxCI+wCUScLYoMazAIE26ahKb4MAh6rvJecjgwCJoO9B/PgTAJKLl9BjyVsAk v6+k4ZnGwCEkHZujpsTAIgzfG2sioMAfTnq2gH34wB+behvXj6jAIXvgUAO1HMAgzc0XDLwi wCFPc7jq3bzAIE3gaOsCmMAhv27mlaQswB7WZi/zYHjAI1znAgwbcMAjbMVRPyfQwB74xyKO puDAIrVun53h2MAfE5ndecNgwCNyW06/SOrAITlabpXpxMAhWtPn1JLuwCDLiWgH73zAJG6Q dCDQtsAiOK+iVnX0wCMaqlKVIPLAH6k8qQpb7MAfqJ/ZVj4YwCNbqWNP15DAIi8UsCiIksAk guBFXlSSwB8t3yvoAYDAI4TgTbUMEMAgGhvp9+wAwB+JQ5JoKNDAIsF35lRaasAkQCK1Tg5W wCHagONGTYzAIKV5u/ivZsAj35sZO8XwwCQ4V3Fxd/TAIraz6sUIjMAimDOP5M4kwCJRj3jW SKbAIxtdkGC/HMAfwByIb33AwCBZuKnHg3DAITBX6rLgkMAeuTDHNrDgwCMcSRly+6LAIUyK D1ES8MAjM6t9SQqowCM5FusGGAjAIJQ+I1KmkMAmUuBtZaEEwCMiraF8exzAIx0W7/0yDsAk KIUtKfZQwCQZ9bMMqcTAIeGj+6QU6sAkuydqPzfEwCFfdVhP60zAIJYArX9tWMAfPmTxGMro wCFdv9/9cxLAH0VHAHOpmMAgD83u7nFOwB+LJ8F0ZRDAIjj3imKtiMAfgPN0ZPXUwCLskxY3 zHjAIg96jq1uEsAgE8kmJY/swCNMSO+B6SjAI/WZJr695sAkSDB9I/VawCCPDlF3BPLAIzo/ 6d52IsAg/jg0WHDwwCKg9HRfqtDAIWOOF9WREMAg/Gat1ttowCK6gQXf7c7AIl5zu3HvBMAg Aup4D0NswCEJ7sb+pNLAH5jq+j/YBMAlbm6+cl1mwCHc8wWjBl7AHy5pfaAIqMAk9Kj7/oL4 wCAsv+STHnjAIt2WHm1DBMAidIgOGVBKwCDPGKW7XVTAIgdPrdGK9MAg9e2pmyhowCI8OuCx npLAIzQaRyElEMAjvMsjibSEwCKaZDv6nlbAI1FoLsIbLMAf5Mx9HQh4wCMj/b3z1r7AIsad jCm3/MAjhWg6utuUwCCUoSPJ57TAI3SBfrOKiMAjidTMcrp2wCFe1ekwk6DAHz79EyabKMAf IN+fU18YwCLn7c77o6rAIGFH6qBx4MAitjfTdlhQwCBUQnRyWhjAIx7LdCBh7sAeqJaR9b8A wCKbWIGyjGDAJHNB2edwDsAgOBnAuTgUwCIG4HLRwLDAIM5JcGTyhMAjb26letCEwCRakoSa errAJf85Uz+L+sAh/5pgBP/4wCL1HQDX/DTAIgD1X+xlVsAiJBG2umuSwCXuEMGtJn7AIayi gbYNDMAe7b8z34UAwCNvDcctNZzAIZQdDAb0YMAkI1ufZJCkwCCEswaMvMzAJaeljBN7YsAi OTVVNJmkwCRyCyjKidTAJJnFtRCn/MAhXd0Pw8vwwB/3oa0uW0TAIcPQgUTGjMAgC7wWdtAK wCGfrinm+ZDAH07A4/mFWMAiSemWHsGIwCMyw+tZoXTAJSQEczNexMAiqKZQ7bCGwCP+NSez dtTAI+odRNpiiMAjpDFag9IUwCEwAug4J2DAHpvfvUK9iMAiMDOiuIk2wCJg3GS3S1DAI/Io gv5XMMAfqrci5uzgwB+aJgY5tLjAIJaHsIkiXsAi6LCx+fzgwCOU6onGGBzAII/8TdghVMAk ICw82xPGwCJRGinhL5DAHzGR9zY6GMAhGBVNNQc8wB68l2qck4TAIzl2TQoWcMAhA1awPlo6 wCI0y7D4yDDAIz+CMDAx+MAiliTCBBwQwCEX3W3xCBTAIhRsdLzzfMAgDaTj6nagwB/Z499v yTzAIh4nZtn2csAiOP6gxLzgwCWsnxK/1KDAJQIbTj5q6MAgVjLpqiS0wCNqeZ0yqiDAIOQU QLQBQsAgdnzT7YKywB7r16kkfVjAH+2D9JrsUMAkMnOGzpPywCSHpyYj9YTAI0pDSmvTBMAh Ezl6XTA8wCGjse7AmUjAIWrfXlQXEMAgfg2zO5aQwCPWingquWLAJEj24SpYjsAgBWGqPWa0 wCL4WlO7y7jAIzYZJ0jZCsAgWHnPudbowCDFHwYlc1LAIgiTvYA3kMAkF9sEEckYwCNcdWnf cZTAI/uQ5Qm50MAhnSan4JPawCGv/7TB+kjAIoE5CHiMsMAgW/y9i1nMwB+2K4HMAijAIuIf oH1IpMAhprlwj/LQwCR1ipO5LsjAIsdKLv91jMAhlBcQ+8EWwCC6D96UYfjAI8II+B6IDsAf SNkrQ3zwwCHPAFYUr7jAINRePawtXMAhxq94LdmgwCU2zQGk+fjAIbzCu6KqNMAiiCSwGNIK wCFVnh16CtzAIMJwomsSwMAjdD8C7gQgwCBS9y23rULAIdMjuFQA8MAgwLQlyfHkwCEPPiE5 uZzAHo6j61EXYMAkrnWgzqwAwB+LmFqa/AjAI58Vjytc6sAgJdyamIJ8wCQQ4ReMeDbAIJod l8i3XsAlfNWfekRqwCSqH+FSrGrAImPI3qimWMAmQRWMmnXWwCAdLM/F7xzAIfwB+1845MAh tunXZqRgwCOu4JYueDLAIr/3/SGLPMAlmIT1lI/WwCIvJhnVFhzAJP2cQEvTKsAfDBGv1bHc wCTAJsgKhj7AJHU597NozsAfKWOqXzK4wCTXC3aALADAH56Cjr7HSMAiJBK2dGkYwCGe/F9U XnjAIG7yVJK/lMAiLEoUKQ8swB6I1vYValTAH2K4TKkSkMAho5twG5wIwCLnWmiaH7DAH8fM jxFmGMAiuhBzBuVwwCSovkO4zVrAIm01mrML9MAif8XzrGYIwCLrEKv8MdzAIctqFwd9iMAj FIquZhcAwCHo3tvi24TAI76Y4yF7RMAg0KuGBB3owCKPK0HVpNTAJKt9I4sJXMAgeTr9wgds wCRsCk5hpILAHr6w3Y0+xMAkjOPLgUwewCJnjTd2nlTAICtMSUu7HsAfv5VgBHSAwCBCDUHh roDAI5PnfQvulMAituhExkYAwCNdjFWkIHjAJH4xCb8axsAl3igPf+zcwCF54nrhQGTAIoV+ 8KTkisAhTJTJOW5WwCBhSKdRI1TAIwi51idwNMAio4jU5I4wwCGCdHV/MdTAIu7jgU+4tsAh HpK8ProUwCHo7+dV26bAIhVHIJeyXMAgfXEQVQoEwCHQ7mS9LszAJCOPV2OiTMAgqAmdil5g wCGjdzHd6dTAIGXdBbwAjMAh7//gpVnswCRyJBf5BrjAJZvXbQobDsAjGr4QWBrSwCGHSxCu wF7AJE98DdKyTMAgXEyTiJ/2wCDeu/BZsHjAIbRqTJ/FyMAkPIHTeBSMwCIvXyVz48DAIIa3 BvT57MAiDPAR23o0wCK5sy9xcujAH7ubGxM4gMAkt98Tb64SwCBmXoKs/qDAIv2DP2dNQMAi uGj5GNV8wCRph4NPyajAIe4mk37T3MAf9SbzF9okwCSWqaEhPUzAJNHsi55H0MAkBfzFV+QW wCD/n3Lwz3TAJKd79Ed60sAhu65l81JowCMiR6DlkQTAIlrAtXUIMsAfPpv2bRdgwCB2DEU8 PxTAI7VUsImx4MAizYWRUc70wCBk8rGtytrAIBFwJKWMeMAkYbGsfByqwB7kOGxpw3DAICfC aBrpIsAjkK5a7PxuwCMUn58UuOjAIcRAXNVCasAma4ukMR88wCLZtOPLpTjAH5KMRRcdDMAk jjUWXxXYwCPf7rgwtcTAIfNo6UqPnMAeutOx2964wCNy0S6yL/TAIps5S+laJMAfqDtmj9fw wCUZ94oM9OrAIZ7ULOp3/MAi0Cx+ThpMwCINdKx3P3DAIgHuzhQdPsAgkquo6NC0wB7I5YkP GDDAIJeOf5AsQMAj1htMbOf6wCLyr1Df+ybAHu+w4ng3kMAgrUI/OfDEwCMmuXe2c5rAIcMa 6KzoJMAgX8Gg1phowB8vmpWzWMjAIjwrzOlyzsAgvdWbO3pgwCLA2mKC34TAJMHrGQayFMAi UXUEyKyUwCFJKkx/X8zAIcDtYrvj1sAg4VZmXfQUwCJUsJ9m2eTAIBzCTWjqVsAkkXJc9Mp4 wB7Y9ynTJPjAJObg9NEeesAflKPtdTvowCPX/yabhXjAIuFDHFcmmMAhgj3G97nIwCQGzG9i trLAIv5W2m2faMAflHSCBZXAwCMHV7q4MlzAIu5vEWBZvsAgw1C1jIaiwB8R7Tg3/qDAIuHI 6XfDzMAiul+XRSP0wCGiSSCT6qDAIQbOT/JgkMAhXth7QXZiwB7TlrsoILDAIhwrfBP/9MAg W+fu847cwCPZjWW6BPrAIgpGR9sGbMAkMaoJbipywCDKU53PG4zAIA1WT8SamMAkHekkYR7E wCT2k/AQwk7AJN7FFtK06sAfxJtaU7PgwCFtngh2pJzAIoGHaMSxZsAk1veFJD1EwCQM1o4f mxzAHueBAyElmMAjNuqCPoy0wCTa28fJvozAIMK56sydnMAiQcXZ4+DewCOJJGzt+4TAIp4J 3yqXwsAjGZFP/tRkwCC/4T+w8x7AI8pofKmN3MAhUprtBQZUwCIihLUOXTjAJXxo2zMlKMAf sb42mbtIwCOD6Tpa+57AJROHNR8EJsAiUcE8GRb4wB8UpLolDmTAJE+3OTLZcMAgDHpmm8TI wCMFhlQ46N7AH1X15VmhRMAhcekhas0gwCEBJIRg8nLAIAWmXmovzMAh7dwbYXwswB+uuNEm PajAIoY3WYhLCMAgllL1fW58wB7TA0mCx9DAIvGJt97QgsAkXPjJSwnswCSrOUDFyF7AJdxW oUHK3sAmVAj8BgYkwCU62RlDNC7AJWJufEpGYsAkCkJi7hbywCSMRALAVizAJQ75zIcBTsAh tBPHGIeOwB6x4DIzVGjAIZ2xBJHsZMAhbtv7beaMwCN5GxJ1UQbAIFOWiU+P7MAiMAXQAiAW wCK3u+KNdBjAIFSgn/pZ4MAjEgR4r1YUwB9Iig5WWujAIsbTDuCQOsAgz6ZwDwx4wCKA6Amf wfbAILg4gHKR7MAjBglNdg7wwCBZcIKgw4rAIyWicLKioMAfFgS1PDD0wCS/PYxB/V7AIQ6s LJzQzsAgVS5QP19qwCGNWdl8jX7AIdZPAhHBDMAmPJw91moswCALfdt3zn7AJOjJU0j6esAk fGFLaoquwCPol7xKYP7AIJC7C2qXiMAgorDBDwOMwCMxhyUktTjAJZxOH9d8nMAhRJQ8cqxU wCQnLvhuF/7AJU9mF8E85MAlABl+Kl3WwCJDzhcKCo7AIvIsyb1hAsAhH1LUvJCcwCH6GY+F BOrAH2VHyA4EUMAgqTBFh+WMwCI12rQ8NIDAJG/IJ/bg+sAhaSxVSs/4wCFCTFDEr+zAIZc/ VAlUBsAg/KJd9Ig2wCQx7Y7vP4zAIXhIdnwYcMAisIekelT2wCDLJ2I5A1zAIbNNPPd4NMAk TkHuz5kYwCKdbnMpOnrAIEIqOsm/XsAi12PrQfi2wCMF7D0o6QjAI0Tqo2/10sAfRyY059iY wCLpdGNnbzjAH3fzYYarWMAkdVn7wHliwCNg2spAttzAJbp3nd8PFMAi06sI3rdKwCIECJZ6 kFDAIrAqInDLuMAid7wKJKkowCOIHbX0yAbAJFvx9lpMOsAi5o1bX3H+wCEMT8cot2bAHz+5 U/rgsMAjMbrpH9VswCEHUGqB8KDAIGsC4K78bMAiVBYr7N3uwCIWLEi4DKzAIrtP+F5HssAg BEBwjSF4wCErl/Wm0XzAJPLaHZ7aLsAgNf2melikwCK3gVVZ90LAI1mlyDy0bMAhOXSliDWS wCPJEqG+rVTAIEVT/N9jFMAgO3/gZK0wwCMlJikT28zAIP+3lIGnVMAfTa1wX+cYwCDIQ/11 OuzAIh42ecqgpsAgB0oxIarUwCMfBFLjU2zAHzEFfApk9MAj+meKU9Q4wCGNBdpCtELAJiTE l0Z90MAj/i+foh0GwCD0kfcYCSDAIX2tDyIwLMAfmYYRA2KEwCHw+RAqHf7AI3T8/SmYlMAi 7+4sZ+HYwCCfEN8bpNTAIvu6fnmtlMAkgCSL5FrAwCGpyKbDOnTAI1pZB4L8psAk209OT6nE wCCDrfxQ6tzAIvs/MIEQzsAkowYvK7HGwCU6h34bZZ7AIqeSy9O4+MAkFA2qIF70wB/jvx9c QCjAJI1fipWLnsAmajumKsgiwCAX3GhytmzAILFGs1eI4MAiUd7JuMokwCWdLBPf8irAH6IK TfeaOMAf9Rk2x+qYwCF0EbKS9xTAIV/gog0dFMAfczdDT1qowCPhyARCSdzAIdD6Z+yHvMAi OHhwZ+imwCERVPCEXMDAI5gXoN/sLsAgiVJBW1HywCIiIfIE+SjAILINcgImksAgaYvJUId8 wCOT/gxLDzDAJXpUD54oSMAgPkSI2dU+wB7MfrawX8DAIcDE7uVTesAhJIrC6oW8wCN5aUHt u2TAIhnODYlpIsAiHdvYIkqAwCASqDqowQzAIFTSlEvlbsAhiagtbMNQwCD/VEn8/oTAHsPl i92ldMAhBKrLvj4kwCGZifCQ1CjAIwO3dsH2QsAis38Ug+C0wB73HiRRTrDAIhlqNCd3YMAg JNtB3BjUwB850asJ7bTAHwdeVeXo8MAi1NbKpXMkwCJBc9fcRkjAJGCd3RT1BsAgPw5Zr79A wCBs2oSUtRTAJKyAo28/dsAi4QX997lswCHpWdj9PkDAI6fQbcTbBMAeuDAODWdAwB9qlJcl isjAIIYxOYxSssAgYprQmhtwwB/lNWXAVGDAItXH0EuZ7MAg9qqkF/GgwCFwbXvWjpbAJIdz jFTxUsAkEkUM/oPawCHcMVLPcsbAIkiIMAsuZMAeu2l3fQtQwCMwioUlepjAJcbtH3mKZsAk XaO2enaIwCQK923/AazAIRpwkob7UMAiNNcuuoZ+wCAmvKmnPubAH1LEyyRHGMAjyfnEF6do wCcH3ivv8ezAIEXrqtYnzMAhV9szZK0EwCUSoKRXIijAJYUlPX0bUsAhTkGAmyQqwCUWBR9F YGbAISlFgboHAMAgXZgSJud0wCMgvss7G0jAINHx2PnnLMAhwFmj/guQwCBDqt6ug0rAJJ7e PQHuUsAk4eiFgRR6wCLdRqE+sPTAIHDhgwpz2MAlZ/NFJEX6wCHrSjVxieDAIbjvbDsurMAk crVSpRFawCNfWd0FUbrAIuD1o7pK3MAhxjHzgwlQwCOh0T6lzi7AIL/5/3UvAMAg85lieoyQ wCAH2khedkDAIqqBnSe+hMAf8UeaU1uwwCTNvzUtwZ7AIF+3q/63rMAgch/5snDIwCGonWy/ nnjAJCxwhUk2YsAg8OmdBonWwB9mE6/A5PDAIIsVywVzBsAij7YEbOiEwCPO1JAGeMzAIPPQ Ia8+kMAjuOIo1+EMwCRfhy7F0cbAICy4OUaFNsAg7irorF8AwB6wTdNquQTAHpWQW6YJ2MAl 23IMEEr+wCEvAfjWGSzAIITxgqNj6MAelpI803AcwCStNlJYGizAIDU05UCvsMAj+6gink6O wCMLEbdYVPTAJTFMRhH6xMAhEFBS697wwCOSH+45pPrAJGzC5br/1MAf2+PnOwDgwB+337Hk ypjAItzvzMCzkMAigdILzPxSwCFride/BHzAIcUKROCFRMAiK1ghI9TwwCG71png2/TAH4Ys hBTtIMAjcLLrAFBuwCQwakADuVrAIMfaj7cS2sAeww1+JVxQwB77ZRAgd1DAJFUlrEuVEMAg GytTdM3SwCTrF8Tf6/DAIMBwdWTyCMAgYmEnG64MwCCgX+qO2mzAHwanX+M05MAfA1hM9yyg wCGlU33UltTAIj582JlC2MAko8uOb6B0wCDXTFeT6AzAINi3ukQoqsAgvzGYJyZQwCOZesLq Z9rAIPITaTlGZMAjtolZGHIewCTTg6+MIPTAIyAjBY49cMAkrOxTTAaiwCO+9+svKPzAI3Nm H8DTCsAiie3MqhewwCSThKp/1VLAIE+XDGk/1MAgw7Ip0ArgwCIncm26NizAIPmUmuRvOsAi P4LsBVwcwCGr4HoDXhTAHyqeOmduoMAklylMBMnmwCFgkk09iTzAJWtC42pKVMAhx4dN8kT4 wCTogutoOIbAIA7ztmr65MAjY9JO2lOmwCBKecHy5HTAIGhP+A+DMMAii+kkZx5uwCR9PXIp 24bAIi8CMLS29MAg0vdl60iSwCISveTNvKTAJE+BtTa68MAgrTzKB/RswCHxlYyovQbAIg6c Bo8yMMAezRiiPOkowCKEwZnRBPbAH2yjJdQu+MAg6c+/9flOwCP3R1G+2B7AIP4sobNK/MAi nVCbTjHiwCBp9lpLK1jAI/mK+i2rAsAhJJGYA4nkwCG5Ih505GDAIFRRAIe9xMAi8rGKEEXI wCPmYL04YYDAIAkIv1i/AMAj1r/9WdnWwCKvL745zILAJN44ssmtGsAgGBBhVWmIwCLiAisr 6UDAIKv7HB3F6MAfEpDWeW74wCD5g94m3Q7AH5G47GVUFMAjC3c5zJVQwCEs+qP+ZBDAIHJp TQEhSMAlZewSGNOEwCLrsWidbXDAJDFN7g7RxsAj2uuk/yWAwCBWk9u52UDAI7JEjFoUVsAh 0eIHmURowCEBy+PY4ZDAIc+bOejP5MAioM2WUVvAwCADoDHUAFTAI7ULq7ZDkMAfXTx9NjwA wCNotkZ9QpbAIduzweDkiMAkzbc4YTP2wCCWkSP/WvTAJXPviWpZ1sAmXdo/rybGwCFQK8jz 6oTAI1+rkdj1ksAgd8o0Xmk4wCWUHLJ4pnLAI9AUaAWXAMAjPpYecDdwwCVDBahrHCDAJIdU gDFu8sAg+jUJUtg0wCJzVah5VzTAJSygZmN0oMAhHas8K6ncwCXS08Ia1PjAIeIBk/ak0sAk x24jR1yawCMJw+eK0qjAH89lMYFChMAjXVor/ldswCIFlx3pDm7AI2QY61jzCMAjTxCXMD7Y wCFJ/7zrSoLAHre2XoYXbMAfg2AZF5AwwCFOXokN/vjAIBCQZ87y5MAhNRO6F0x4wCNqqMVD eO7AIfUA/eHL2MAjFtWHroaEwCN0oG85/GjAIwEvKhBhhMAiw35P1+SgwB7FAoiluuDAIew+ yPzcBsAimeR+uLJUwCAKb7JgePzAIfWDZi0ZMMAli+llzLc6wCKHbuf4FHLAHpVzxR8OlMAf eJ8mcS4MwCHanwXcGhzAIt2Tn0yi7sAgrKmD3W16wCIkbYFVAojAH4dBpQK4+MAhYivwYAx2 wCWWc+t3VXDAIcOz0Ngy1sAkgtffIfMUwB8uXA2uVETAI08KQu5qHMAhyHi1uYq8wB+sYSd5 eADAIFodJZtQ5MAhWs8siDzswCOdVpu+0frAIXqfvT7KmMAibcNNZdiYwCRTtu+PthLAId2j PogiLsAitECBPwXwwCP1r8IX3aDAISWlvQu7HMAgo2wTWB6MwCMMSTYUJxTAIiT0DCanxMAj 5R9cD2HMwCBWNlXE/njAIxaqjNof+MAjVpwjxq3OwB6Nx5nMZszAH2xBRdP1oMAigQBlPlSk wCFz7wnhtBTAI1JQ/HU6fsAie3rsl5c0wCIWcDumnBbAJTcqxPsvWMAfEi2Q/L1QwB/0m4vz rEjAIHflAVSBCMAjlZoy2efowCHEDEHYE/LAInVKBCsIisAg8VThXiCowCOQPffqTcrAIVWt RHrPZsAhfnCkzNw+wCQZgzCiCTLAJB2yE61XmsAjKVVGFqiEwCAv4FHMoWzAIM0Jv8j3JMAh RUAU0RCSwCML6/B7dXDAIdSBt1dmbMAf3xFY32cIwCLgO4Xf2AzAIK7FqXuapMAhIlelhJ8q wB9zQtIW+tDAIaMvTaKtRMAhQu5OHbwAwCS4C9FICKLAIPgHSvpdQMAkMDq47drOwB+F7eqA h+jAI9+ffWxZWsAjejpAOQzEwCRLyAD0R9LAIMihEvVeSsAhxygVPv3IwCHOpg5nCVDAJee3 t48JfMAkfPpDHwGowCQpj4L3fjzAH9kO4SdmuMAlYjtzYd4KwCDcNgazaczAIuH8ZSSV6MAk VlfyZkh8wCQdpZHnD2jAI+ecN8zHvMAiFTl5w6PWwCV0COCkiUDAIt47UtoddMAjDkEwvKPM wCIzOLNCJpjAJIBR4S+GDMAhsF9UgYjCwB+/+rbR22DAIgilg0wDzMAh7cVniKX0wCF7pv3e fmTAJMuipr+LxMAhfHT4kc1kwCHAtqeDfWDAI98ETLUPKMAiETwBuYckwCUC1qj//9zAJrTp 42FQQsAhvm0s2crowCRfJQwE5XDAIG5PAE1jOMAfypTtPv5AwCKUD5fDWm7AI7770HlcKsAg RmImO/YcwCFzIofKfEjAJXWmnIHo7MAh4cRiXIqewCHWLUbtw+zAJD71FoSSzsAjDxx+Fp5U wCOe9aovogTAH6u4m4sScMAhnWB2oSxMwCHITpMRqpjAJYv1jzc1qsAiQoHNILWGwCKPEw2j pizAHzDwTssycMAiZtd/Pe6cwCNkvbLS1rLAIp4RfpQPUMAgEkgFZV7gwB+oJDA+/oTAIzb2 KEDt/MAmD5kcw3amwCV0fepD9jDAIt1x9NYO8MAgkO/6aTwgwCGbSnELtKTAIvMf4xnQ7MAh OQ2bBKvSwCJtuntBHWjAIMWBv4swwsAhxPGRQbpAwCJ1CnU0xBLAITs308B2TMAgq+tBi6HI wCVmEsBSrLDAIghG1EVz7sAkV+LokW3kwCDYpzgOQiDAIWFHWaEV3MAiJVKkC35EwCEh5w3Y BdjAH1btczLHuMAijVtr7ghewCB96qFc9rzAILV8MzpDpsAlxx8hYcY6wB6qKXSgOPDAIYZ/ whW3FsAgLdVuElJ0wCPXzb31EDjAI4PWfbUJlsAj+DNzIqPKwCPvqUpDx+rAIgCMfuMVGMAg nbVfDxRQwCOZmtewASzAIXBMt4qk3sAjMmDDBPuuwCLg1x4jTYzAHuN3tSADiMAkjkrUNNkE wCR2ipIFIljAIvsOZbcAfsAk6sV38koCwCOOBU3kvqDAI2IfXdnR0sAg7AHgrvc6wCFWjEAG 0mzAIPChF+DFRMAkYkzXeBSGwCSpkLqwLp7AIk5gdO7EGMAg4LoN7uQIwCP54r9p7DbAIxlz LyTCsMAivhTdQzaYwCOkzJyfcgbAJanw25R1kMAer00UfKCIwCMfPLGypKjAJdxVTqYkIsAh NSCPT0iywCFmZiDEgvzAIQdfOOu4WsAga7nRtI2AwCI5WfenIibAIV4sJ9GNSMAhWAa2hQi0 wCSpDlsWbGrAIFW/b/kbhMAf6B22Xz9IwCRSoJX9fVLAIa1YraQrkMAgyhbDcHqkwCUr5lYQ FwTAI8wT9WDPnsAgqakgaUW0wCPvPwyU5IzAI1kGrU2dTsAgZ/mo6fUowB7HSml7HXDAIcb6 PF/SSsAjwovLaU+awB8BDp0AXWjAIZtfKOm1zMAfpsKFOqA0wCNv9hh1x/LAIxR88wYTUMAk fCQdeGy6wCB/EZBNxFzAIgxs8YZurMAgOkbjh9E+wCFrT0ozvCTAI6khEkMfbsAhIOu2dZGe wCNgeWmvoXrAIt6sQOh6dMAhqn5A6sEiwCGQtJq2EITAIXNEPZXIGMAjqQbAVIkqwCXbJFW/ dprAIbjI/bQHEMAg+EDJPeFQwB/+KhiNt0DAIEpUyA3LGMAiKRS9RqsuwCFLRPB0rbTAJRPn T6Mc5MAjBQzJ4TUswCJJPFjT5ELAIIfTOnOOIMAeiIhsJ6TkwCFPikIr2dTAIDVDAevf/sAk kegfp2tEwCJz/i6Pl/7AITCZHm97VMAgDz8GTv4iwCExvjCixBTAIIMoIFGESMAiK2Qrcgvo wCFmcL4E+4zAIzN2htGGWsAguHEluUaswCJ/rO6IiLDAIxSBxwtMZsAivVV5rhQQwCEvmJtB 2LLAIZKGAPuaZMAi8J4mo6CWwCCtlPCuAvzAIrj1umpJsMAkAkIJ/EB6wB8PNba3gLjAJSut tZJ2PsAg+Hc/8q9AwCEKwniF8sTAI3bhx/Y1MsAibmHxSMKiwCGy3xkskvDAIN0fkV9ZesAh 6WEU3+w8wCXwn6dVZvzAI9D5N38bssAgXrjhjfrcwCLK6EeqDuzAJDVMjYSKlsAjONIOMdnm wB9PYZNb9HzAI8pLbB6KbsAf8zV0j3xQwB64pTG4VYDAJEIe1U05EMAio2tj7sQQwCH4rVQ5 OOjAIQYAlfydsMAgWm2rAUPQwB8wlHCc4CDAIixDOG1kMsAmpKnUyGk0wCAQPDQjEojAIfXc cvfkusAh5gbGhvDAwCG67CyMGxTAJH/HsyH6FsAitr6fjY3ewCVeKljQWRjAI5Yxn/Me2sAj 1FHo8FVAwCGQOAcqKyLAIjODSWTOwsAewCTQcwFgwCMS2aJWQtzAITdhyliFrMAl+5G5O3X+ wCXXvbLqHbLAI70jiexFfMAjOXXnfzqYwCVbtXaZhWrAJBBHqG4IHsAfvxIvY19owCMFffgf uQbAH7IP7BN0YMAhBVJx94+MwCKeCI1VXojAIeVZYaz/FMAiY6uafrHgwCGnt3IefhLAJapO IiGVwsAikacL21SYwCL6sJj8eoTAIdw3vVsXFsAk6v0HdFW8wCWPAoYtdTLAIwbmKegFIMAj S4XCkRRQwCDTbhV35RDAIvl6x5VnzMAiqZe+OnFIwCNEl5lI3bjAIYPyQy8D0MAhpd2CxIDw wCF8LZyTE5DAIaOtvScCnMAiCqMmp1PwwCNx+ZzQjfDAH9grdkK3kMAhRoZIfDtUwCCJO951 0U7AI5XJKzYyKMAgCzhn5qlowCRaOBKccAbAISC7P2vuEMAhsFcbVSlEwCOm+uRjADjAJUit bMlUDsAhTDGgOxSSwCIBuig+c6bAIf+HEYRa7sAlTRZkg+fUwCVFF+RiLljAICCUhjwRwMAl P3RIeJ5iwCTHIpC4yTbAHy6YrOdUwMAj2FP3zbdywB+q5O7EpNTAIM7ANapOGMAiVSGdvH40 wCcwclJ/pVbAICmojYPX6MAhT0U42Je8wCHyjHINyETAJQoZmDm28MAjX1Ccc9n6wCLMGhMw NLLAHs40AtDRGMAhEwHB/ufawB+Zcgk7FBjAIFsDxK0lBsAkaAfYJMJwwB/R8qcO9pDAJYNM bxoU+MAgg07R0o10wCR0fEX+EhrAJXu5Fy1PzMAiksMJiIEQwCCYRn2l1DzAIT9B7GphrMAm VWS1PGlAwB7ZGi52TqDAJaTlgeUz/sAgyBaiZTLgwCE2Hbv4o1zAJPWEQ8CihMAfTZ/gRmbg wB8Vr63LAvjAH7wgyFP13MAfJnlnY8yUwCHQmcL2b/DAIuhfetUSbsAh8h0KxGsAwCHwB36d DqDAJAU0p0zUmMAhjUFxoq8owCDNnm3T89TAJF6lhd6BfMAiObtYJ9agwB6UrAc7gLDAHqpj zTv0AMAgHrR+b8A4wCGbpa70q1rAIFM5gUAdyMAjCYV2e+/CwCKNMHAlzrzAIKSNwALe5MAh +fACNzc2wCCZkoXKFLrAHxrpegeYuMAeoTY1I5HwwCA5yO33UszAIona6ANQKMAheDH/6xLw wCCijcDf+0bAIYVKR19/FMAjFZPR3FNgwCCNkO3IZYDAIr9prmPOcMAjzMAv3YzIwCMSUVdG XhjAJHN0mYV/ssAk/1sXokuQwCRe9ZdYbprAIXZqZn/XMMAjuREdL99owCAEJgju2tDAJHjq cRPaSMAiDmt7nyr4wCAqn9rWGVzAIPBD5571tsAi3A+02H+swCB1FAttoJjAIdl3FthNzsAj WrMyu5SuwCG0zqDyqrrAJErIirZbPsAi/UIjSIB4wCNYfvyauijAHr5MN2MHUMAg0urhWBgQ wCaFvhwajtTAIpX5/NDKVMAfqeuzYmKYwCUe4sDwmgbAHretmFK0AMAk8bbRmAocwCYj5yl9 v1zAJIxkteRRPsAgxM1yy96swCKe2tbk/AbAJjNSqxcF4sAgn7mth4RSwCN1jQtUUlTAH6Lf pjVKAMAg5Dv2vj3gwCMLzXoJO9DAJUT38SbLRMAgh7/knePewCBxhxn6pW7AIOh2fM1IqMAk VBAW8WpSwCOYnUM/c0LAJd6czQHiBMAiWmjnO3zMwCMkAEDuvhTAIDcadJjtcMAgdyQeW5YU wCC/Vbumv1zAIZJFSPFo8MAjHzZ9Acf+wCOw7PxrxwjAIW21Jalj8MAkiYYtFecKwCIjffk9 WjjAITomIOtO1MAhCoU3VcSYwCPgEsaZf7rAIdZdgLhR2MAjxvnSsX4owCKKADuaFNzAItgN TNBcdsAh2UZKsArmwCHQ7ZmJMATAIkdVI/LUKsAjRJ8dlxw4wCOFcpbDmLzAIpajVaL1DMAi dgIqE4AswCKV/Kd04W7AIRpoQAhwFsAi9uKlmCt6wCED8R/XpMzAIRFWF45pNsAgEibr1Btw wCCdF/c0I5bAI8Gm5+IBosAkMm1CIv/uwCBdZZC2Z3jAIzMP1aNTNsAffbcx1Z04wCQhU7Hj kLLAIR7i2Hr5IsAe0Cj9ZBlYwB88YYPFNtjAI6OKE3suOsAjJhcDoim8wCFCZoou5ETAIfdI SqOUVMAloiLE05NkwCJIx83/6iDAICLRy/7iTMAjlLiHP8HUwCUaqSfnpyzAIkFwJAqvMMAh R6850TvGwCPTZvVqK97AIaejE9dOUsAisxthzYqswCSo4CWDjELAH7a2CK5SuMAk8LbSI5S+ wCQo9Rb4GqLAHuswkc/GuMAiGakEAUwywCYtGuO/hbLAI2kLiPITGMAhOGzyKB5awCOeQT9H mZLAIM9Us6n3kMAfGXOWQwIcwCM0fo2T1lzAIr9X2VyEnMAjGPYZOCWOwCWKJAn7jQbAIB1v OPCaGMAjD1UwrQhkwCHIRyjAZYLAJjosgGWLNsAkK7uponBywCBW7hfDYXTAIJi3TbwhnMAg Z2Zt9UTYwCHqTfvPUwzAIAa+531UesAe101zllSUwCDjxIQVo6jAI+1OiPR9fMAfZiOCHidA wCLE79cHfQDAIaP/K0AytsAiQx+RE6TwwCJwawO8C5DAI8qd4osgTMAfHh/BMVFcwCHRXakZ YfjAIy8xRuKEXMAguPV6P74wwCHpxDNC8NDAIPhrb3c22MAi5IjlC8OMwCUznlQt3nLAIMQs 5HtudMAjSA1ISO/YwCGDvc9ZEBzAIq+zoF/ISMAiQQ2oQy7EwB+bVDtxGcDAJDrRvD2yiMAj hhHTOk7MwCI+tp7w9ILAIUyVqcZz2MAkdRLT9DDmwCH7p9Jtq3zAIYZWsE0nvMAhJQ+1PCl0 wCNqzPwcsRLAIgXRPdJZNsAh7Um/nLUEwCHw2I3mLbjAI1VVFzq71sAe3IepeLT4wCPBsYPu AjDAI4balC3qmsAi/IFK8s94wCJiNe/TKuLAIoAquuiEEsAhoVSeRcDYwCMkgCcaw6LAIEHL 0kbdIsAiPhGCPF4UwCi8oJr4zWzAJajRRrXCgsAjdG65GieIwCBrdmuFx9DAIY7LaRj2YMAf /aYvmDxIwCG6FV4M0DDAIHyjVao9BsAjqje1DSAswCAiPpu2L3bAImY64O/vpsAhBkr1xD+i wCK+7Kp939bAINmLKSPtgMAhZURBNAaQwCJUH86YE5TAISoziWf1UMAjsRNKr2FcwCHSDAab ANjAIx4DOG0xXMAkva8B8/X+wCNo+4fQP4jAJJblPNaYPsAhJnXqaEtkwCQzb4Z/yfzAIssZ Q6QSBMAkSSoAJNKiwCB2yajj+qTAJI8564QS9sAhGZcAcsrCwCNBKWITYFDAIUIVZX3aiMAh ck9ofLxcwCGAXeFWviLAI3FhbTTrMsAh7p6qRXhCwB9C9sQ143TAJAK1Gd8cusAko3rVq1Ni wCFMqSOrCLTAIpYPc4PRQMAjCUUfw/YYwCMxJYx8dPjAIH++dlsw4MAg3nI9mPbQwB/d5/hl i4DAIiAFhWxDpMAjUTL1/SVMwCB4hfa6BgTAHu2dDXe2VMAg8plagAR+wCBfJPlo/UDAIWui CBtSOsAkCgSdL0I0wCARCuD1sLzAIJQy33VUwsAhiPibOOuewCIUVVXid/jAJW4oLGJ5SsAg +VhoDJxmwCHL0lnaoSjAH3NhrhMNMMAe40UF7ETowCQ9QD08fiDAI4Prp+6xvMAkjoH1JlEm wCGBabfoYULAIe//SoiLLsAgOAa8xU20wCC4jlCvcETAIvIb1dv5oMAhvtUAFUYQwCNJb0Hm tnjAImH5Vf3NmMAjoaKnqShYwB8ObBLYSgDAIc06NDFRkMAmGBBglpUewCGSZzTn3NbAHymC aD1qKMAh1SkbNSXMwCAsvcF3SIzAIl49243xcMAjD5sb5ZkIwCBSx5lsxoLAIrTBtavwrMAj pBniG5JuwCNKbQOfTSLAIJxBUTE6qsAgYFF2ntbOwCHfTEHpUXzAHteHOcQlCMAgjaRLd0YI wCCsWclq1ArAIOkg1wz4pMAgN8EfVdSAwCH5M+0QuJLAIemMJAq7LMAlZf/IMAlGwCTAkDHP NkrAINtGfOv49sAj3MM/4jLQwCOjmZlN5wzAHwJVkyqp1MAlJk1wNVmkwCLSCd71a1TAIcxQ ANKv1MAkvzB7Vm/mwB/ZS4O+uKDAIdpC1Ed/6MAe4Tt1NayQwCLnQmyFrNzAIgzA0BAIVMAf W3lVqpMowCCtgwZZL+DAITfeHbN+uMAi9tqbzagmwCFvOW4cvibAI9YvHIDAIMAhIU/Qid4k wCI7Qnd8srDAH9yIE2eYSMAkTMJFW1r0wCLJ/+zHDVTAIdUattYoGMAkKyQYZc7awCBnssp5 nETAH0xSb3lONMAjLukSnNB0wCQ8Qi4p+YjAIbXcTRIXSMAgNR3Oga2EwCRTH56rdBLAIbzm saGPnMAjY3bHlL7awCTpYWxXlSLAImBHIC4k1MAiProvpd+wwCSB2EFfo97AIm8KegbHfMAh 1+qRY4bkwB/0lH1p2lTAIetoTRlTtMAhEjfjg1wAwCEOIlzZz7DAI+GN6wLW8MAkGnRoEEts wCEZihijvGjAII3KWlzO9MAjUqlaigSUwCIx8zWwXlzAHsvwcj0JDMAgfRmNE+FMwCAqIt9N aGjAJCoHr/BBVMAju2hGJyREwCFUJrRNK+7AISrcOcAn3MAllUghT9lYwCE6BLtNcVDAJLVw j4dPjsAlC9GgWyIGwCKpSIT6wxDAIzF7cNRuJsAl28VEs9nMwCPbkkm4g9zAIgmsTMeL5MAh 799KW4UiwB8YMuZq+QTAIawL29rM/MAjrTy1G/hYwCPo4aQLhM7AJAcR2QyHjsAfiFlH3UDQ wCHVskvMHFjAI1n9HwN9JMAfZFpKuO5UwCE6E0QIQ9DAIvBfxL7GDMAj0YVW+AgYwB8vKspS jQjAIo0LtevsLMAi66avlH0AwCDIy3a0pyDAIEUyQ1N+kMAhzpGFBwbcwCIG009w0vjAJPDp VHA4rMAlV4XuZrguwCGHNpu6cBzAIS03/TFBmsAfuSCXoUJ8wCEiOG7GCETAIiZf0xLmVMAl IKIUihwmwCAMT5T0dRbAIFi+xZrRgMAis1xkiN52wB96Zgd/PnjAH6qq5WOi+MAgR4e/Ypm4 wCMUuS35XJTAIU0nfFDvsMAiUcE8aWyowB/iC7YBccjAH/lNbAAiTMAid7myu95awCKdN8dt 06DAH/LFLyv08MAf2WN5A+BkwCH+zEN3FwbAJyGLFnXmRsAh3f8zRQySwCHEQmuj8yTAIkfI W6EWmMAjpAvCp0xmwB6wV6arMEjAH5vQ+VheCMAgvN0kDpXywCT6YFiGGtjAIeR141pPTMAe 4YzUjjvYwCUowBVtDRDAIPIhlE0eUMAivkcYguRMwB8ETKfxfvDAI2Zxelhe1sAlqKWJ3NA+ wCItwytae8bAJUoG/fWc/MAh7LZG0JeOwCNcl5B2MVTAIfGqlsW+OMAjcXr2xEw0wCGyfHkr 3OjAIRgwgQCdhMAj+DM02knQwB9t1gst1XjAIrH39Foh+MAj3Sw3eeAywCRp9iWH0R7AI/TY U0DcgsAhGUcX4l1owCX5Vn5rR2zAH0XyXcjVkMAgGmMBZoc4wCGQfJwBKzDAIQ3tAoo/XMAl BzhzHTFewCFOfKyXjpjAIg2cZK0ZEsAhJB5zbb9wwCXwMhKMy3zAJV3GdaLxzsAh9Ah/ZalS wCQiI++lDIjAIDDZIQm3jMAiVtqp+4gowCEmi5CeoRrAIZ38KieiIsAkBfDv9z0MwCNfuHt/ 7LDAIeLXFTG2ZsAgLpj8Ha1wwCD6Tv3/zqLAIym0ghxIwMAftZ9R73bQwCPAknL3yHrAIWe3 aeWtaMAl+NPgnI8KwCYgsZCFBGrAIFYC2ZsoZMAlzA7Xzp40wCLcDOxW5/zAI2978xZ+KMAh Nta6qH7owCP2XJJ9H0rAIzMYPL8QZMAgttiQsZ32wCPPdBxcDTTAJAN/gB+hjMAkOtOHekLG wCPK2J2CSKLAIOTJHv4+9sAhreFfkmlEwCEvi6aGOJjAJi4Ax0nVcMAfg2WFHISYwCBt9JWT mWjAJH77xkZShMAgvEIq7qYUwB69wUev0+DAIiEFmLUhbMAiH0BpQmYcwCRyb+wewVLAHudv CGHhIMAkZAhHshw+wB6iJcui6RTAIiGi5ZlmGsAhn20iSnPIwCJWP/meShbAI8a5i8Cx2sAj rD8V4ij0wCOMjMiX/KDAHq62ZkqzdMAjbbq21Hh+wCEFcIKhP4jAIGEZpSZ1EsAjWR9uJusM wB938vKUs0zAILhOU8jMMMAkqtZZ0zoiwCHVT8jP5ATAJNeg0BoSXsAikJph+Nj8wCMuBfh7 0YLAIvyaG3XtusAkR2HVgPbmwCIMtlJMN57AJE5utgQAgMAhjB/mMVj+wCAxjFGM+97AI/5/ SCyQYsAgiZMCRc6MwCU4HQJHzvDAIQCapBMOZsAhTesRZFbkwCHkpl2aQtzAIaw/edu7bMAj skeWq+ykwCMMOU/b0djAIm3hoiisWMAhJE474E8cwCBJ2OAjSHLAIU5CHAEWgMAg1aDFuzuE wB+4oG5nTdjAJQ+bRPeQfMAe8gsjs57AwCQV2ui1cXLAHuUIg5AW5MAgZcaF7O+ywCHTR5PF 1JDAIb16Tz5z/MAe2t5RSzfIwB6YJWwk6HDAIXnnt4AMDsAgSSzFdnH4wB7+IDizF7jAHomp 7kausMAg07jfsjDAwCGw4D248ZTAH3eIpdhI+MAe45/NHdTwwCEvOtQLMKzAHtVx4YdseMAi l01YyloAwB6F/eq0AZTAH5043qbbiMAekJfsVb6swCAMFtPPqm7AIh2or8mpsMAej/CgaOJA wB+zGeTNa9DAIMJmkuDK2MAfWTlB4FzowB/VYgcgirDAH6jjhVxfgMAepcG1oADQwB6hvtww LhzAHyKJ1AobyMAiUOG+gIlgwCGXKoUbfATAHxTtz4c22MAesN7Wn0xgwB6XPrubE4DAIIuO ldaAYsAfMB2yvH/gwCAJQOjQJrTAHsvWL8vkKMAep+KxyaKYwB/gG/F2b0jAIH92gzmjCsAg e1mbEQpgwB6fi0GluEzAHwsOU5Kv2MAgQdrwWp7AwB/hsEyVPdjAH/EhSSVdnMAgGnhEAI7Y wCA0vHOBRADAH5GyUAyB4MAgbnoqoWqswB8nPWxlaJjAHt03pS03TMAeicRythIIwB9e49jH p5jAIDLKoSiU7MAfEEyWpjikwB7MXPJ1A5DAH27sPZjoaMAel7H6uiEowB7W85CL2FTAHrHx RszmwMAetHhSiA7gwB7XfoyAMfDAHqqgNfdWWMAeqk8REY0gwCE7oT0I6HjAINFTNfSduMAg bcYCn2UkwCFHiUINX4DAHojxNqiXoMAjGeJ81bcMwCAiKDDHLKTAH6U5iF+VZMAfBl/Aks2Y wB64vlnZ2JDAIHGrEGr3rMAez2yyZtZIwB/COj64h4jAHxoXLwkkBMAe4+Y/TwEowB8KI8U2 dKzAIFRX8Cb1CMAehf3qtAGUwB9gLMN8DzzAIDpat241osAe76ZMYppYwB8K96tTDEjAHtnG uLbecMAfNCf4ueO8wB9nr9DcwBDAHqUePXrtVMAhwkY74XJywCAkdlLRIe7AIc9GaPwRssAe u+vdqWnQwB7NKAh74RDAHpyGuIzq6MAehf3qtAGUwCFVTr+P+3zAHsDsZNMbuMAfv0L2pIX4 wB6YQlBGbjzAIlZadMeIQsAhQdyeN664wB/OJyCYzSDAH+94E01S4MAg6+0obA4wwCDHYoz/ AxjAImqRIfBRLMAgl/lsDJUAwB9hW4kAldjAICamF7gtasAgSNRJ1I5ewCFWZDxVWMzAH0wV y4EaXMAiQxLvsuUawB9WGlBUTrzAH8GtgP3uAMAg3tVB5/EGwCJ6iBtRNubAH4cHxP2wYMAf jrMDLlvowCER7ZM6UnzAINwf7TDsCMAhbbWL1uYUwCDcgztfof7AIBytF7U4vMAhsuZqZ2m0 wB+y/U5kk1jAImTIKJUs9sAiIOKJevIgwB7KVvVy5gjAHxu9eYSI0MAfiLKgOArowCAF9HWo AzTAHu/acmf7GMAfjETlA3UowB8tgvvn64jAH7Ikb0l2xMAe2DCZOJGowCHDHscoW0DAH1lP vQM1qMAfHv/uSZvYwB9XpvWcqKjAILhUyVccPsAfGCDt7mOQwCLLXnlfKYzAIre+hBPVXsAg 0Im7N/uYwB+2k2IadDDAIFfLrXJqAMAe81c708CQwCBtQ1Z3hajAHrjSW2whPMAfWA0kb4R4 wCABzxkAQWTAHzZwxjptXMAepPxBcfKAwB6PEoh/PYzAIRV29uCnzMAghJ/Ltpn6wCBVnVVD FjTAHp8fj9/hqMAfTQAL+VAAwCHWn5nQ5QTAH0z71wR75MAgsiYAM7uqwB/7haZ+iUzAIcQe 7vjshsAeoNgVtyWUwB7t149YC6DAHqTC+xyxxMAet8H4wCbAwB6TrDQmWSDAH2dWi3dOYMAf M5H11D5cwCFJ+W+xbu7AHthQ+1q6UMAeiVqRddvQwCBVvXWRC5bAH/mRbc9ukMAf03+4Q+SQ wB6Kzz16GZDAIBVLq/TDOMAij/79F9v2wCCG36zaEXzAHpWs9Y4DiMAfFQRhYcB0wB7nCJeN W4DAIP27YtR6VsAgNFzGb1B4wCDQlpa/pDzAHrGBk/oRtMAel1uGobfwwB/kiQpHnlDAHzop ZB7DaMAhjfWmxWf0wB7NY/yvZjDAItfBtKiBSMAe7ANoMg/4wB/JyuOwDKDAH9u4aO5FEMAf x2TLTx2MwB8nuCsEk9jAHrdawNAf6MAejmy5YROkwB8AOfMYmtTAIFmq2f6G8MAiugYzSNnS wCAVeFbWndTAHtnwH4eZiMAew1QXp/bowB6IBkvugfjAHtMcMXcM8MAhBQmBCb5IwB6K6hKG pFDAHwJDBsSEuMAfoNzMVd5kwB9U/OcvNmjAHw2FBectYMAfMH9FVQ+AwCCZsJrKZPbAICXe P+s47MAfd1jfnWTowCIEXLxPlSzAHxAtIjVuaMAf81Y4EjDAwB6UcxTI30jAHtbe4LOlKMAg D47+M+oowCF4qJJIRHDAHuqDikNymMAeh2uPc65UwCNDPSpKehzAH9CSvuAv2MAiSEvDLbLM wB8dN5LBBujAIMeCG/TjBMAeqKb+fX0cwB7DMDIWqIDAIKnWzRkbsMAe9xCuNCX4wB78G0hR ZSDAHsg5BYEGwMAe86G9jl/IwB6XIfPEvfDAH6DzQQZKAMAerOOoLQEwwCAj3fh7F2bAIfMC KdDy4MAiB2gKyWB4wB6F/eq0AZTAH1luhSpKwMAiObsVec9uwB6Tj9lZVMDAIbVB5c5BVMAf ZVDkM2q8wCCCQF5oxejAHplOgr49SMAgnH5HSllswB93SbxKMozAH3HCUqE06MAe5JeWV0Zg wCAmyNJgp0DAIeaAnCZKeMAiQzLEuzKgwB7UGAOptyDAIOmi8Id1osAfiANni7OUwCCSf6F4 HHzAH3MTm4fe3MAh7Cupp9imwB6sA6yQH4jAHvfvNPzroMAfWw3H96TIwCDwUbQzrwzAH7XM 1O9NWMAgY5paC3eUwCAjapfFyLzAIC4eOUVc3sAi2j35CCvKwB6Qz9KE2qzAH2xuaoR2uMAg S3sjefh4wB7QFlz8sMDAHyY+AEQeqMAe5VQcAeHswB9/wjUK+ejAHo41mvAG0MAgTEWjy4tW wB8UeJTTi1DAIO/qA7Ox3MAhUACxiq2gwB8c4kOv0cjAIuELqIxqTsAhmEhnF6ckwB72/m5Z eSjAITo8N48DRMAidslbFkc8wB6WAtdo3gzAH6se/MN5KMAf+Di5btb0wCEpgD1rbXDAHoYi 81DJMMAg00fAq0bAwB6gTrAtQiDAIUeJ4bwJFMAfDrU5vxXIwCGwLZyq51TAIG3Ge9ezFMAh KIY+OiwKwB9KMwM0rYjAHpF36MnxbMAhMoQyJtKgwB6XeFTVt0DAIIFKW9u2LMAfGrWgUkNw wB7xd9lzqejAIDAQYxlqfMAe/USFPwx4wCKtllZeI4zAIA5i3GeXnMAe4izB/kmAwB/67tB9 5FDAIc/8nDSS8MAgA4gr/MOkwB8shtnCgyDAIBhEAo/4psAelcmS03U8wCADvfZz18jAHobS nQBr+MAgIWOz2ZPQwB69E8GzNwDAH8nNDMyPLMAgz6scl5fYwB7JX2126SDAH/Sxp0vfcMAe 4sNZXBYcwCCuaxtOjRzAICTV6u5QDMAejEkuRYtYwB8QhqD6h8zAH3RdSXz4aMAg3ULD/AQY wCF8WvrZfbLAH8D2dKIcdMAfQsCk+ob4wB6T5PSo/NDAH3OdTN/10MAezI0eUA0AwCJEWIPa 7sbAHzwAc3wPIMAejVnqi/NUwCB+sFdBL7DAH5v9ckD26MAgg6P05wMIwB7qtRURbpDAHvjD wVQfeMAgEraB8f+swB65Q8tPagDAHr1ZqdltEMAfiSLMg37owCB/8keI3ZzAIW10D1QKNMAg CcC2fUqYwB/uLcnBmBzAHsymQQGO1MAgzohi7CrawB8j2XSsEGjAIFkBMTdBZsAhifbPJaLE wB660MVpKKjAH+M5aI1/BMAezIMLjYG8wCIPTtWOVnjAIItMeoRzPMAf9uDWHHngwCEk63Xl qADAHvQ4CHECvMAglV7mDK4AwCAtXqiI12DAH5N3N900IMAe6BUFd9JQwB6NIzKFB7zAH4Gz 2gU73MAhl2U4wFyMwB9/T3e2PNDAIIkvaYg3asAetSox/kLcwB71Jd3FUEjAIap9AW5PsMAh l0RoVWm+wB6PgXA5dCjAHoggPZ1VvMAfeUJhdqjIwCDZ0q+5F3DAI22Z6x66qsAik8zNOycA wCCudZ4XqXDAIQSU2ZFrvMAehf3qtAGUwB9pMQH97pTAIFLZtAeOqsAewt+kmHIEwB7MwepV 56jAIUNMEv5cBMAe7Ir0jC6gwB7dEj9Y6GzAHwnpK4YqNMAei3CfZfD4wB8MW5mRo7DAHuYn gmNYWMAeipmn+uoQwB7QkYuiPGzAHpFb2tVeWMAelOUHqYmkwB6UyIW1DNzAHrOK/fo+2MAh TM4TVvWWwB+FX1Kv7WDAHugRLZNI5MAejXVOh4BkwB8NP7BxGiDAHpbobbT6fMAetpvH36vI wCAA0qxiNQzAHwl3/sGS0MAfYMwjtFDIwCJntqEqBAjAH72MSuIDSMAfEb/D5zwQwB9ARiPj /pzAHrXUconYAMAf5whoFoP4wCAUYRVxdq7AH7E/xnOZfMAeh4U7yho4wB9l7Z4bMRzAICjl 7tLjZsAgYN44h/aowB6Z0DqcUWDAHpUeFfP8XMAftQn30qDIwB6q1gjPF5DAINwksADh5MAe hrlR7YWcwCFpi+I8sijAHpnVEuazjMAgI3AWqXoYwCKWg5Ej57rAHuG4NJj8TMAjZWbql1m6 wCB1xwJuyErAILjty+hQhsAhhYjc4PqswB8zt32v4bjAH2bO27KQSMAilHFLwNkswB6F/eq0 AZTAHqOzrb1RcMAftjDFXrzAwB9Aphduo4DAHvhlv1oVaMAe4lqisQxIwCFpdZbptprAH35e ++NsIMAf/UmpjAJowB62VJsODOzAHpqavCCc9MAgK4jDCcB4wB8AIJZnYNTAILCBnd7yosAe mOY4VBKUwB/knXlA1rzAIDAy+i6JSsAgRURY+NScwCISK6o5zMbAIUPfrVq7FMAgDfIgzzK6 wCJcJcGCS1DAIDfaSSQF2MAgn+g2Y7rawCHBTZ0gKEDAH/ulTg5pEMAg3HBwm+cMwB8YNhNd PQjAIU5GqRHZWMAffqvr+oJUwB7wDapxDdzAHtDavTTYgMAi3JxckJFOwB7eS6EXlIjAHzbg x4VwcMAe3JY6ob84wCBIoZ8VYB7AIfSySxCQmMAezg/6OIhwwCEM5d9Ne+bAILaTc+njNMAe yoFsdGS4wB/iH9cFEXjAILJ//cUwvMAfpG+tCibgwB6GO9UaZxTAHoZUylxAsMAfPj3H0sRY wB8wcKTxRDjAHx/+W9D8sMAgfCBQl9s4wB9GSvk8tLDAHvimfhoNmMAexDXYjH2owCAKh9SK E8DAIDXtPPVP/MAf/C35O1towB7XXViQonjAH4a9ElnpaMAgIlWSlCm8wCFKRfLWkIjAHz0u x4Jm2MAejr+LgLVMwB7rUFNPfvjAHw1FKBchpMAewj0CvfEMwCC4Wmh5fIjAHuM/1+TIIMAf 9fUIQXY4wB6iiy4OKrzAIWH4v0tXssAgaq64VZsowCBTGmj+z67AHodR78WrEMAe6rfdZKLY wB7RDSTEBwzAIQj0yJyl1MAfQAZPFVt4wB/zNv8DsTDAHqqKlbIQfMAfO8PFxVfYwB/SgDck 5ETAIjtD1MacfMAhr/42bgdQwCB7XaF/B0LAHtC9147reMAgXpa1twYIwCAsVhQZHgjAIUzr 0fDdnsAfVuwDBkkIwB6PSfMtM0TAINzpx5AN0MAfRIPbiu+gwB+oBWhbStDAH2nNnrYpwMAg B53hED9uwCDnv/zU1MTAHsF75Fa1RMAex4JCyA04wCBVA8V1W2LAHrRYxSy3tMAhN73GvW/o wCAxnHHS2iTAIN08K1XhhMAhjmBv4wZYwCFFDxd3RM7AHzzlXMUnEMAgPTkmA7aUwCD4sWM9 7ujAIVlhjvXC9MAelnWIXuTIwB82W2g58mzAIFdc1PN1bMAfVjdfho34wB7UAOK6GxjAIE6u 7muuHMAf7yKfwV44wCBtUzub6BzAHzVSprjX0MAgLErBBqzcwB+KQj7vsFDAIK8zAr9xHsAg QPLr8kugwCENVTTGjH7AIZT+5oxaYsAgbFhfzgEYwB9blTJM3LDAH81+uxqynMAf9Fr8+1go wCAmqyiv/ozAIHDya9uIyMAe63Bof4IQwCCr2Uvm7TjAHzEgs+ZqgMAgU6ZVPYhIwB/RJuVv QpDAHsGYVrMKaMAfHHx1yNNgwCCFNpF8jWjAIgEwZ4xbssAfFy280mU4wB6/OL6zAHDAH7ea 5ozsyMAe1fhQEy6owB7wlvAI9NDAH+HYI/TZ2MAeqglafvoYwCFrtSFlxzLAIPbZFXC33MAe o1ZeVFnwwCBkStZsCODAH2vAqnZELMAe5K0aaEV8wCAsS+zI20DAIBACWLChOMAfdV9kiiyQ wCF2QzqOkhDAIKJ4yKLhTMAhBKc6HsuWwB6m4c2SFWTAHqE7SCEBjMAejvbaIMo0wCCxPw2i z87AIGlpcuXzZMAe7HweEwvYwB6pGul95SDAHvGadg7Z2MAepMHgS58cwB9ViNLbygzAHpIg kJ0CMMAemHwhy3OAwB7zJApzm7TAIC5g3JFhIsAg3mQAioewwB68/+sJiBzAIA7aX8tSesAi O9HPB0VUwB8L1ugGpyjAH9t1k7Uw0MAgvFWzx5TswB8UzmIK+KjAHpBgFxxrcMAfpX+oVA0Y wB8bbAwzDvTAHr/6rrabEMAfAAQO5hrkwCI8Rb0zrS7AIh2dnHRmJsAemdu2qKUwwB704UCl t6DAHxJpQUBiQMAepUCnFdqcwB7t5sVz0kzAH+aD1Ik/eMAgq6/eVZpcwB8Aag/3jSDAIo2s 1O5ebsAfuPV9RHO0wB6oKlwfYqTAIX1zpoJs7MAgw163Bkm0wCAbuOOAhzzAImcRYvIx7MAe octGLgGQwB6OiE/uD8TAH9PnsKdbCMAgdTiHw1VcwCC9u8JOAsDAIPPd3A9yNsAes9uMDMUg wB8T9A/m6hjAIhS0JyJI/MAgs6YOdeeUwB8br1ZvhbDAICTG9zOAyMAekT/Q8Y3cwB8QQ9XZ X3zAHqTZW/yTKMAfRG2Jhe7owB/P/egmHbDAH008kzleyMAhpoxx2x7GwB6G6/dbdODAH2/j lQww4MAgoetyWCZgwB66K2khQODAHtYovXgruMAfeaOl3Ag4wCBLgKEHygjAIGvrMfYqfMAh 9moG2nNIwB7UgmPd00jAIg+bUIxp1MAemPiw5jIAwB6a0Nr3gUjAHsXmIk4O9MAetW08kkEA wCGafYc7hGTAIQdtQx5MEMAeoP8km+IMwB+GUQBKfFDAIlVm3kaXvMAfHe2jIuOIwCIHiVHI 5sDAH1YioOOifMAehqAWsdTAwB+oouOODZjAHoX96rQBlMAe3RMGpqpswCDEO4Rt3rrAHwa4 G46sIMAhkfWTrtFYwB8offJv/XDAHqaVVVKLCMAfca9ko4u4wB6GbdI5OZzAHqihA2NRiMAe suROzcTIwB6toSpzK/DAHzv1dcWUyMAejLYDlrY4wCHN5XlSBazAH5yQXAvLPMAgz/mCwOtI wB+39FxazrjAIH3c9cLtzMAeqlgQdDMQwCGvJUDMV4jAHuCTgvnAsMAfiYXQbQBYwCDiwVL5 aVDAIYYtXu7FtMAiTh8ciTKQwCFAcTU5QwTAHoce1+EziMAgK/IUUVt+wCA94Am6SJDAHqYs 3SToDMAelI+MQMskwB7jjl9bYgDAIaFRVMeHRsAen4YODSF4wB7qkU9tSBjAHsZUqPEZrMAf 75gckaUgwCC0kBr8K7TAHp/WC9fhrMAiRKnjCPUIwB+HoRJE0EjAHw4Z9SZ5CMAfpdisuG7Y wCCtSftj4ZDAIUErpFNYmMAfOaT5FXtkwB8BIdAG8zTAHp9E33hEwMAiTVyoWvr6wB6aslw3 lYzAIRoqG+hB1MAes8Tozm/IwCAeswMOgZzAHoyaxbjlCMAfRD5Iu3VkwCMHRPCbxWrAH7h+ E3UIXMAex/tQkwnAwB8P0LoZovDAHq+A/pykqMAiUCz6b9cswB8K/ZWDOlTAIKiji6RSPMAg Lhh7BeBowCAvn6Ml4rzAH1mwoSZd+MAjBpthcjv0wCGWn0Mz+QDAICNkz+HMYMAfI5WtirwM wB9k0GDXD7TAIcNxmMUOZsAfV9xdMh88wCGQa+oD+tTAH7HULGFUQMAhQbzJq+M+wB6kgsR1 H4DAIMfuQshLnMAh0Cn80dY8wCBgVDkRCgzAHw9P0kG90MAfpOTxWsWYwB8QwDN2wezAICSK gCBVAMAhNXrh/chQwB8LonCne7DAIVh4r4nIUsAhc01rfBSewB8OUeKBcWjAIV4qbzFdHMAg woE3UwHIwB6F/eq0AZTAHtEjATNZcMAfYNXnPePYwB/VpqRQYPzAIRpkrfMd1sAe0aHrw6yQ wB74HPvOYIzAINxNwox8CMAgI03CNNBQwB+dBhy16qDAHoX96rQBlMAfbpnmBxGwwB/r6Yt+ H0jAHy+fHctriMAeiwTuU3b0wCBuI2muw9LAH23qABhG/MAerg4gvkbkwB8TrV8Zn7jAHvoG Eq2zoMAg6ZMZVlAwwB7CAb+OSEjAHpAoUw/XoMAerklvn6y4wCFNzq48FBbAIKNSYvi+8sAg DNP165A4wCCTHOm8nkbAHpgIixz4UMAe1GrYrxl0wB+x1fh8OUDAIM3g18dJhMAhPt9odGaM wCI7/Pjng9zAIEYKqSp+uMAfL2c/o99QwCFOhijp3Q7AHq5Oi0YIMMAfF13S92dIwB7xW/sh Q0DAHpjDVj4LaMAfjMlUSosAwB8Nl6UHEBDAHpPIkpeyrMAeygvqK97AwB/pJDKhkRTAIDWd c0sZOMAf3fksiUJgwB/7Q9Rzk0DAIKcnasdBBMAfUvIXZiYwwCGp9zF24NDAH+lPr0GfUMAf 2q46X65gwB95SYNgyeDAH6IY18nSkMAekHv/lve0wB8lXzWkb6DAHrUMMEXLUMAfUTl624Ug wB6Li5u+omjAIGKYIqsBrMAhRc/RATTowB8WptSeO6jAHoc4XR4X+MAe16UJ69CMwCDnjbL+ QCzAHurUS5KMsMAekuYGBAAkwCJhYaAUxqzAIAkvPDpiKsAiucuk5jBMwB8oTeWUKeTAH1rT uOVKsMAe2fFQIRTowB6O2zBzo5zAHs8EKC5kqMAg7RxMPU0gwB7Db+9kBijAIFl5YYIpLMAg Sk7gsWv4wCAAmTq5xHTAHo0+i9xm6MAejBLmVuz0wB9R/aIqhwDAH7zZpoeYZMAf5Q/ujYlQ wCDvoCZ1/QbAIZQ/68699MAekbAQzY3cwCBzc83BFZjAIJvG28EEUsAfHXLRVrXwwCAFiK/o t1DAIKDBshLtJMAeirRvQ+6cwCIXAfIeM3rAH1JAK0AHyMAhEin2hHLUwB6N/pBNgSTAIOzv 513hgMAf5eGa3ZTYwB/U5Nf6QXTAIPUUlQNYksAf3rKRs+bIwCAPD/iTtKjAHoiikKaEuMAf b4XYDn3AwB6+j+6fQ2DAICoj77AqOsAepDqakQFMwCAroQumy1jAHx0qovrITMAg16QMq3FM wB7Y5dpUKzzAIqNjwhfK9MAekOvL5+Z8wB6F/eq0AZTAH7slb0pdUMAekjy6ZJsswB+1FSer 8rDAHoaG6+hIhMAepEez38MYwB7NsZjpsvDAIDYGmevvTMAf8lHJzzFcwB++54D5pqjAHulB HgUCSMAgIOaSaXl4wCDIUiZnzHzAICP/xM4A5MAgshcQJkEEwB7CBACYRgjAH6/SdZhOeMAe ng1iK0mowCAlLXBiux7AHzRqYpDBrMAfeoVyndnAwB+55qu+1KzAHz6o/pUoUMAe1jNcLLv0 wCF5qo4DfVTAH/ibU3/FZMAgwCjki9rIwCABDSX9XOjAIJASxMFn7MAeoFXbtUcAwB97YOnJ GvjAIAjl0wsRZMAenWKIP9RYwB/wpyVVMpjAIbj6QomersAglgb8XMAEwCDr15y9PGbAHquO IA7lyMAfLXnBRgm4wB6pA9LJlvDAHq6ftodpRMAercCpMstwwB6WrvSKvKDAH6+x92BOrMAe pHkNOingwB9oUawBSRzAHwLnSTS6QMAhRLfEu8ZMwB8i5FG9q3jAHog6OehKYMAic2IZcEMg wCNGSZvW+RjAHxPkDVHaUMAffbT0wha4wB61ySxAOtTAIS/HT2r56sAfOoH5jRhwwB/jrKzE BsjAIenU4WmRPsAgA7eTp7mowCAwE6hq/PjAIIMsIyu5LMAfr1Mal9TowB6g2iL5qUzAHoee 9HCNtMAeii7R+5y8wB7pQN2TNdjAIHWeYPyb9sAfXcY5+/7AwCFQGlzt/W7AHpQBWlXiKMAe yEn+lLp4wCCYKd9GMLrAITRiSLgRkMAejOyQEOkowCFGArQukMjAIEYfblVebMAe/daoY5MM wB7LBCcFXBDAHpEHyXSZLMAgLcAic1iuwB68HdlodSDAHq+yxRWYmMAfvlK3GpnAwB9dXORF 5iDAILHe5kcSEMAfNfNw8N7cwCDl9ku5BCzAIFUBPpV/2MAgUbnMEqW0wCAjCDQO49TAHooU LsQ/CMAgmIqdfkqswB+rPjQDg7jAH0vVeEFJLMAeqrtKnRd0wB7dOzmw/9TAIob8ukdubMAh KoKMP6u4wB6uoyUksKTAIGDpOyiCcMAexvcHt1vcwCCu0VzaJzjAHoX96rQBlMAfVT13E19k wB7EmxIS+gjAHop+57aJiMAesXZPs00MwB6THpLql1jAHqhL2+ojaMAf4K21hNhcwCCjAF4l XADAInFpgfixSMAhHHfIJsjywCAdZDw9/YjAIFoEfQUqNMAenM1lWnxkwCFdsA64HmDAIQgd P9z3PMAeo/D7WodYwB6XlSY0JgjAIN7Apw0kdMAg5ha4HQxmwCC8d3VXIMTAIC11ofYbDMAg HY/iAmRgwB7Z45pgFPzAILW5wH9xiMAifxVwyKpAwB+lIOw3YODAHxpcnc/IoMAjD0i0aw20 wCANtSpsRwDAIBVMVC/zasAg8jhwyEZqwB7WuNjXBmDAHo/Uza3UeMAfOOVFSAkowB7aZ8bu ADjAHp2L+fWEeMAgkCpbvC9MwCAND2ZmHxTAIELoZknuJsAeuNbTriO4wB6SkU768ojAH8e/ pGM+WMAewXzvdQz4wCDqt53ErljAIGbVcFrAvMAgb2sk3TBqwCAhkDqxUH7AIr2j3T+E8MAe oDBbPDKMwB88PG8EePDAH8ruUzdmoMAhfuvSUtxEwCHhu4gV0CDAHrnhVNFPfMAfL4ZrNUuQ wB8uH+c8jYzAHrYs7Kq0KMAif0W6joSYwB6VVzH8o1DAH/9HyIvT1MAehf3qtAGUwB8ZNe/r mfjAHo5RJ7PItMAeyLwhJa/YwB7AA52+ULjAHvfg2a0NfMAgzkevuSnowCDZdEnAoNTAIx4g NX6XBsAeyH65/OdowB6ah0pu38DAHpXmM3LjnMAiIYTvld08wCKr8M2vIb7AITrGtNLSHMAf +ybCmZPcwCCAXl47jE7AH8aVN91GwMAh3IBzESyMwCMuhYCyNQDAHqGpRcKZ4MAehwVgehd8 wCAW2IImuVrAHsrH1zJP8MAek1cuviuMwB8qOW7Qi6jAHxzrTwb77MAhmDL0WMiywB6Mf42C epDAHqIYQXuT5MAgq6nigQ0EwB744ZlRYKDAHqMRSdrK8MAitfb2Tjk4wB8xL+LaRMDAIENi eimY8MAf9WQDbbBUwCKDZWIE5SDAHx0FDUtx8MAejy47hj1MwB7wi7jGPojAHvm0YiNF6MAe 4xWuLfSwwCGZoqYJ0UzAHtIAIJLagMAhkUY16akswCKlMzeVoQDAILbBAqSRKMAh6orr7UxQ wB6Lwac3YDzAIGzzEuu7WMAgmHfp20/0wB8MWS1HUzDAHzPR9yC8XMAekwJKl0sEwB64tI9B 3yjAH8f62GLnVMAehgol88iwwCLFEhDv2ULAH2utMNMbEMAg6hKCjarcwB6UVqDXvVzAHzW1 i0kCwMAfl5D5wN9IwCBrSz4HgIrAHwWxPSRWbMAiTycDdHwowCDO54uZfVzAHone/wRjHMAf JmSv7FbgwCKhRkFMCwbAHs/qYQq3PMAhnmRbtmXUwB/M+By6dlTAIEH8SRlWLMAhBSQvPH1U wB9JV32UpGjAIKctX37T0MAei1WpaEzgwB8T2TqYiWDAHvgcUM1XiMAf0vKMFoO8wB9LZh/v 8JDAIvCE1HhUiMAhaI+UJdrUwB8ZrnIrAyDAHpGT+slMwMAeon3Vk0NYwCClQ4hEHurAHrje mTGYEMAfZIdh2tN8wB7f0ao4JYTAHqrRNp05WMAewnbZrZMAwCCk4oGoCSTAHwHFx5WUrMAf S2g0Wn2kwCCcp4Z6gSrAIGLAS5IFFMAeo3zRg4fgwCBL1GLiHCjAITTP+TlX0sAhCX1Pln7i wB9YIAjLvUjAHv3FSDm/2MAfGt8U5ezowB//j4WXMIjAHuda32Lw4MAfTlnHDa14wB9SAoAI 6KzAH6YJac9h1MAfEMYmDaAMwCATqz+I0OTAH6lFUKzYWMAfB1xdIxfIwB7D4xoo53DAIKuB OBK68MAe31tgJrvQwB/nzYM+zDDAHqLoz19JzMAeix/QyyCgwB6ogrcHTmTAHtO6669FRMAf G/h892AYwB6YipgYgqDAHqnHjmvz7MAfIMlyySo8wB6q21c2rJDAIs4zQPiSKsAfxZuweG4Q wCEKllf1EWTAIBSzBXbwwMAekzre+R6gwB+oEdhXJDTAIGiDnpXABMAguxbclRpcwCCw5X/a E4zAIAeHhSxWKMAhf5Ha09aYwB6rxaSpg3TAHsikBKrTxMAf9v0NJQ1gwCFRJ2Y//jrAIJCR IPWWlMAexsm7I7JgwB8br1+03DTAH2DBR3QGKMAfAKZO4+IQwB7UTreKT7zAH8V9pbMOBMAf R1k6z5gAwCAZyHasM7zAIFjPi5O19MAesJbbUuO4wB+ViorTs5jAHqp+TH6JDMAewrYVRiPg wCHuc3+2raTAIk4mV1OtbMAgn7yM7KCSwCAPCwl3FjjAIB2TwdefuMAehf3qtAGUwB7wR4tq jKDAIJ+f3pIloMAgaU5bzZfiwCA44tgUQcTAHpBEMu0HaMAeh+xlGv4cwB7nu1y1DADAH48R nbR72MAgR4xV0yVkwB+Ob7aVLBzAICQEkvPQuMAesM477bLgwB/VJgqqbeDAICNYLvUEaMAh wG9mghUKwB6VOqJCZQjAH2ywQsA6CMAeq4DO0PoAwB9Y89GDFITAH1Ys72x8rMAg5Wbb1hts wB/+qq2cksjAIfI4A2beosAfcaqhBvqAwB7cHLXg8FjAHya9oyyTwMAfIsaWLs8UwB7K1Ccs czjAHsJEbgnxTMAexIcl3gzAwB+/MLk6M2jAIJvxTWf0/MAgaBPARGkAwCA+qgPpJyDAIEFc WqA6eMAfhamsucKYwB8nAm6XKcDAIFtDqfn9eMAe8H3/DHr0wB8n70MG71DAHoxkWwHMfMAh s/8Q81vowB7gQYQS7CjAIPzq+FE5SsAf0MzEW0fwwCAF1RKrcLjAH/R0e1kG2MAe7buPL3YQ wB6ul1Agz4zAHqtdZGj8UMAfmg2geuF8wCADX5UEFwzAINxadm3+KMAhx342L2mcwCAfpKbF wyDAHokLgD5LbMAgtm2shyrowCCkHxld+fTAHtEoB13s6MAe8xDF4JtgwB+iDTkw7TTAHquS u9+0WMAgQtmBCrmKwCJrmLTx17DAHr+jdul9IMAejZC3wyY8wCKtc8rPHbjAIJ6rM3CH5sAe 13FhrauowCE9hremxVzAIPR1wjKyNsAgVn9zFHmcwCCjLeoLTnTAHv9wpVR4cMAiTXyvOIbW wB/fvN6NzFjAH8f7hSjTiMAem2Dy48oAwB8qokEycdjAH/74XmiQqMAej7j/YmxIwCD4z9sd tJzAIIM0mwIcdMAfDtZrcw2owB7Ti2FpplDAHt0Mup9QYMAhhycPqa5wwB9FyfN5khDAILMr puTKSMAe1kwiaCaQwCAn+WocY7TAIIn4ZbHzkMAi5m9M8zvswCHkT+7z0azAIXTuiiSpdMAf JcqOXbx8wCFihzY/k+bAHtUBweTwgMAhT+Cc5OmswB9vH2gm34jAImIrHxLCEMAgLNgWuXa0 wCB0nMyCzCjAHwJaAOEyCMAgdhNHWVVswB6x/Lw+s2jAH3m0vynz6MAhJSi4qfd6wB/VUzhy b3jAH0ZYL9m/EMAhNhPZl7ygwB88RIGTDJDAIKZtfitOFMAgiv18QcWSwB9T5h23iFDAHw6v vUQlfMAercAr+IjswCBQC3iVasDAIE9HHMz99MAgQBtW0JKgwCDiMYT5xxzAHuFbfm5XkMAf VAJb678YwB6x/7mwnFDAH25pK8aolMAflcUy6pUMwCDNLsuzptjAH8jDiwjaaMAhtDBX9Cta wB/FYYFuC4DAHsvvsfVMKMAiXe8V36wGwB+ENCk12vTAIAuCqxiCSMAeul681B/QwB7vHxZ4 aGjAH6051/U28MAelsuvgaBMwB7vQ61rQwDAIgBJRD774MAhnotlBmiiwB6F/eq0AZTAHrSI d28SoMAfPHrINMDAwB6M0UcN8BzAHr4qSY/miMAgU5i2/qAkwB6lP4x0YNjAHqmvgv505MAf sYQyJG2cwCD62xdh62LAIR6IR0A8yMAgCcxgeWoQwCEukzwvsTzAH+IJ8CaN8MAgDwe5oQR4 wCBgO9gtU4DAHp4dvji8oMAgJg3V1DmMwB8eONbmBajAHv9kplzWwMAe41C5k6oAwCEvTlBd y/zAId/LKotmuMAfzgfLTsqowCAJ61BgvQzAH10UUjAroMAhPwC4vUsqwCAFh0PmA2rAHuSf 1b9SMMAgTllQYA4owB++vRY6GmDAIB3+pMZqHsAfo+JTRJAIwCD/u+WTbvzAHxQ1sHRQCMAe 5Vu1VHSgwB6k80CN8vTAHtj/asD1XMAhiOx8DLSUwB6iQWl0+kDAIyVjXNVHpMAfTz1MbjOs wB6IblFlQdjAHquaHg6juMAeyRqZo9/MwCAAjaD2vg7AHofSiWVaKMAfMRn8eo9YwCJPAgWN u7zAHp/sbc4KuMAeq666aQA4wCAG69LjRhDAHon5kxL6AMAgyWHemzACwCKhkigY3oLAHp0R vQvfgMAfMK+orQS0wCAggw+st0jAH8cqcGNwaMAf2UhNzLDIwCEDrLW7gaTAICZSq6ZDnMAg IIn4r7bMwB6a0XvJEdDAIARC/OlixMAgGIPfhgikwB6aCAxcvITAIULtAoGJ5MAekehIzawA wB8CSz9T84zAIANgI+iUjsAgVZ9WG3ECwCAzc29VP/DAH0k5Np31WMAey6PVrCwUwB6UOjBx cyzAIVYBVVhQHsAenUBabGqgwB7DOl2cXWDAHpKtiDDgzMAfNcGAokCAwB6JdP2E/NDAHuIU yWZpqMAejawmMz9owB609fRd2zDAIQWCPrJiJMAf4vv38BtwwCGlnwbgNMDAIKbUX4QJhMAg yy1eY4kIwB9seKSpdKDAHuvaJ63p4MAeknUZmNLkwB+AKU11SgDAH17jwPZ+wMAg8ugMTgOc wB7559AnBLjAIEp8aIk/oMAgqzdTy/0iwB9cS8BxAAjAIGTedMcaKMAetEKbeiJswB8kQ43b XpjAIjXMazGFIsAg316W1ObUwB7hAMaqA3DAIEs43JFhKMAgMeTvlyHwwCBLJ8XhgnTAHwFX JyhN+MAfcD7wEKLAwCHHwyn73M7AIAhjwGtewMAe0f3TJ4ZIwCC4JajEujrAH6fCGqRFeMAf 8MZdt+OwwCGzxJDAyRbAIdQa126SkMAe4Q8JeghMwB6ZNP+IfTzAHpLJxTWOwMAgxM01/1Ze wB6SWOgPo+jAIU2q0Al10MAfY3bEcajAwB6tAbHDBZjAHomPcdYAJMAiJniK2NWywB+EWm6/ S7jAIBJCvdAU/MAeyM6SiKqowB6/+hbXJVjAHqLBf5LLUMAeoOGucJ3owB+0xtW8KFjAICKq Vbrg8MAfG+iZ4BCgwCAzpMVmrajAIbKGb7e/ssAenXOd4/C4wB6Lpp5fzbDAIOieWL/DpsAh /H5uZD9YwCC+TugG1BTAIZADGexMmMAgV3I9nKgswCAcvaPoO7TAIJYJmO1JIMAexcnjc3W4 wB/OWtR100DAHvobzyQf8MAgclWKPZu0wB6RzCrQ6bTAH/iWc5FnKMAgPa2MBwK2wB7MC6aO 8vjAIHrq1YkkCMAgfpccycbawCCiOVkoLvLAHqCzIODa4MAenpaoXZpIwB7htBsaWDDAHzal durznMAfzM4U9VRgwCCyAeQYmYjAHr55bGbuWMAeuIT4DiXwwCCceZXzu1bAHwSt9ag7EMAg AXokaprcwB6JQC3MJ5zAITD6Aq/kYMAe3DaiwrqAwB6Y6qHg3pDAHrENCLNboMAhjHhWG0PA wCBMcu1KslDAHq2jN8CjYMAiKZPJA6fywB7K3uzSRGTAIOTuz6ylcMAfKyiwLPewwCLcONs5 PiLAHs5WQq9sHMAeiLy+sEGkwCEIRSFXK/bAIAuhuo2BhsAgXe+FWiAQwCC0y0QBjdDAHpNz gjUZmMAfm+1n5P0YwB84svhLDoDAINIurcpW9sAf+I3RxiZIwB67uOOoAizAH208/FcSeMAe 0nWlAPiAwB7lEu2fdtTAHpIEar424MAf9URpKJ90wB7Gvz07RNzAHp8G3jLR2MAelh9+sf6c wB6htkkPU0jAHuWlvLUJ0MAfmqY458JgwCAuoDH4LKDAHuSLUJJiVMAepmefsFPAwB6pK9Ou 9uzAIMt1/t2vjMAelB3DmhxAwCAr6XpqkPLAH/bMB8rUyMAfg6G/5nugwB+QAYegkcjAIObs 8tjafsAg9sKfXwWUwCGMCzm3uhzAH1hNq/9XXMAhgDXYfiWgwB/XgOmq06DAH2rUju1aMMAe /o6qzqXwwCAaI6/AeITAIUsxRvHhEMAfUhKICTOUwB8PVzEph1DAHqLYRg2bfMAhM4dq4cMy wCApu+LsezjAIkRcOQx6eMAei/fLQ9a8wB6r0v692HDAILk0KF0kPMAgSmTA+TcSwCKadyUu RhjAIlpeDq2x9MAekAx3jAdwwCAYuVsnPIjAHsYmKDD6NMAgKn/VwMoewCBQt4PdpbDAHpzb azuDEMAgezT6dr74wCEeaKZz/djAIERE58BDqMAfRKJ0sUmowB6oAoilA2jAHrBtNLbRmMAe mtgUXSLUwB66NisMukzAHuQezXwsSMAhpp9vcaSAwB69d9+KPzjAHqBHdd5uEMAe34ss0yhE wB7rzON0GAjAIQrpCsp6KMAf9SyX6Jm4wCE+cAa31JjAIuqUnOGzMMAf5UMhVDzgwCE+FRE4 O+rAIbpKcC5VNMAhOuo0Hzc4wCLcdDH2pwDAID8z7IEzpMAf13HkFGkYwCAwGBSsMIDAITOx YIGEEMAg9qTZVoZ0wCA4AsoPoVDAICmPD5nTFMAgrN+DPVzwwB6MLgdcxezAICJ6WOJoQsAe tMCDYrLowCKPTmU0fBzAH3SfPdxtmMAew25D6ReAwB9a0/YE4HjAHqLGoWPvmMAgDzimrhmS wCCm3KTTCETAH/EkTYwj7MAhVHia7gEEwB6a6JxCULzAHvdq2Sa/EMAgURa43zZIwB79eYnn qKzAHsrAvIiAKMAha+s2+SBCwCEwnCZyZkjAIlGFx7plmMAfncYiQO8owCDIujZdDCzAHrfy v9nC7MAfOyOMbwggwB6Y8pTBSYDAHo4aEx/ZaMAfYa3qsnx0wB+ReREkR+DAHtPJp+i8EMAe z2aCRhNwwB68r9dGpSjAHs/ohD/EgMAi5bx6BUcgwCDJUVgbn1DAH2rn/uEu2MAgRCcqDz8S wB/I+Y+7s6jAHshvk3pqBMAhctQQV1OkwB7OXvvjFDjAIBBcyA2iMsAfGvUdphTAwCCK9TXH i8DAHtagL7+/PMAfEhyDxyjAwB6N4xKDdZDAINi2m36eAMAeqTmWSuz4wB6VAY0VHbzAHyFS xDC+oMAej2Wva7IUwB/9JeaVu+jAHs68s0NkgMAgMdZoRcpEwB7pD5lTk+DAHsai5HD4MMAg eEDrU6KwwCIJkNy+anTAHrNSX0NbQMAekSPLJJLUwCDtp6R5RKzAHuE3CqhaAMAgs8UmAUA4 wB9MZKbnqCDAIPa7tRfdesAhbyQ48iC4wB6XBS8hs8zAIOEAL+jdYMAfK1PItHcAwCH8clo3 hMrAIfi0BUMCNMAfhX0VqctowB+2L48gpszAH76WFsLYMMAe2/3aE07QwB734shxEDjAHrfz m2agaMAfWW9XWapUwCGRRxN2b07AHqwYSQd2/MAfA5UO9jwAwCGG+UVP5vjAIDO75AbhhMAf 0scaCt9kwB69wyR940DAIZio++6UasAe+XVUVYDYwB7ndbekcNDAHrIwinXi1MAilDNqXwtU wB6F/eq0AZTAID8PXxeWhsAeikl8nXLowB/qYQo6VIDAHyQ6Mpf6IMAfa3crcnY4wB8utQk5 CujAIB9FWOmYmMAiJRvlJWo6wB6yEMBKBwjAITqDU6qihsAf7n5cGDbEwCBTnSigs2jAHsiq m0D4qMAe0jQ/C/sgwCDfheBVH6TAIA09ZGE6hsAf786T0sZUwB85W1qOfeTAHoe4uRPO4MAf Q5Dq4MvUwCEyyaGcou7AICzC8cBj8MAesafdroMYwB/bwc5uRTTAHo0H3pJJcMAgrF+jkc1E wCAfKN/8okbAH0K9PgUq6MAg4YpeeboSwCGOcrDK37DAHzm9lRI4QMAenRTtH9tkwCEmxvIm h5jAH2gqb/fkIMAevW4tPtoQwCABAiiIkaDAHohUQJNRRMAgX4YosNqYwB93MNbra+TAHpCz 3VHwlMAgf164PqC+wCAQk9RRHHbAHrWkEjjRKMAgDY5KXAs0wCKTvjYRsg7AImpljc1jcMAh IZGA/VNAwB6bd4MakgDAIB3W/SXiqMAf6vR7j15QwB8MaOuwqdTAH1f8aI4SHMAe2bDaXID4 wB7L/iC2fmzAHu+TGptmqMAfVnmQ0QkIwB+MvRQwNJjAHth7IecsQMAgvM4uGN+UwCGGCCdg vSrAHrXAQnI66MAe5RpmpY2QwCEdYdbQ+vTAIjDg5Jg9jMAez1GpqBgwwB8ZB4xa4xDAHopk Lo8KVMAg9oYorRLEwCF9PInmW5TAIhCxCnqL0MAeortNXuKAwB8IqHlXyQTAH3AqXaYxQMAf 7+UezB64wCJFmuBpJRjAHp6Bp8+I2MAfCFz8PqxkwB7FB2ZX18jAHtbFbbVyMMAe2paeVs7o wCHXhvDfRm7AIGzf3AeiJMAhO+2UpdjMwB6X660xWRjAIMOtuTUU+MAhyur8TGZ2wB7WxK5B P4TAIEZJ4wg/8MAhf+WTaKjcwCBhTbK719TAH1+7+tF9aMAeiSXSrNJQwB7y7fpG5LDAIFQZ EYNWLMAf8TSkNaY0wB7h2oxSgLDAHqnv6QkYoMAh/iVzyoiUwB7juHgSwqDAHssjaSfOYMAe mF83ftX8wB97F15fucjAH05/BvwX7MAfz1+six0YwCFYTj4oIpTAIvR2cT2gbsAfDflFXDQw wCJmuiYYMlDAH2MjvNkE4MAem8AJDj8YwB7ZLCGXv5TAHwwXWx7XeMAhXOp2CAiSwCFGrRbu bzLAIE1s1QBVfsAgFL8SSWqQwCAT5JAnYbrAHv5ZX8Ym8MAgaIVdThfgwB8S5+cEeUDAIKmP LydwOsAfjOpKs/HcwB7h+P+WU3DAHqnSy28d3MAfjMJsYQ3swB9cfHvfs/DAHrgXpxlYaMAe niQEXIswwB6WWNcwTjjAHpfO0mTNxMAiAz9aOvkMwB/912cRaUjAIStYi7LLcMAej501jlqk wCFJr/kFE5rAHubfaCYxCMAfwXN+sVQEwB6eruz/SPjAIKHh5JDT1MAgOtXhBtrgwCCSahnJ rcTAImcVkrZkAMAfa8XY5rKIwCBttsdDgrLAH0kVD6LrWMAfH76p3S5QwCA16Qqge9TAIAWN 6dtwUMAehf3qtAGUwCCQu3UCQ3DAIBhX9lKVJsAeuS/If+6QwB/warJNAgzAH754rOKDkMAh j6z4wTVUwCEQi3YWdQDAIn1475Yj7MAemdLYaheEwCASPPi8BVDAHtzkk8aiLMAe/lCj1P0E wCAGEkLj8PDAIs+lFloResAfHFyUJX1kwB64AB9lpfDAH1ZljKl4YMAgQgN1mxvswCK4fY9g CQDAIIlztj+y5MAev6xrr6JQwB6LOrnY0BDAIXyYTWxouMAgESBTyLmEwCDkgAdkyQjAIPJj Kc7fAMAgn8ww8jxOwB8UiSpoxujAHyDlX7UUmMAe934BohaYwB7O8kOIVODAHovctjPDQMAe qzzWGQ58wB+ibjqN8VjAIK7Lf/Lu4MAfPixeZqswwCAD5YevOPDAIGVQDQ+uTsAgQw+5nk90 wB9Zx52kNFjAHvUjMk3NqMAgLxueFhvgwCCgYlA8jTzAIN42nCOEkMAftxhK+D9QwCJfzByw DOjAHrDtc/QgAMAgBKM1eFxcwCHdVbL+2V7AIOAgq8DdQMAgMVvQNAaEwCBSh5yL5cDAHvh/ xpdl4MAeoZRt+ScAwB64xnRXK1DAIF3ad4ScesAgAw2v+u8qwB9Q15WWNzTAIQnMxgbNjMAe o3oECPEUwB67esRz8STAHs4JW+e4hMAgWSqF7qVkwB65jacEZBjAIIZi1/Id1MAeoXsim58o wCAI3unv4mTAIK9YKPrF7MAiQDV2cf1swB6ycpTtSlTAIMwDp8LXDsAh06VHFdlIwCLZvt1K VQjAHpY8KUrpxMAfFY71H8EwwB85auqoy6DAHvrlIBLwBMAgUT2TnJUUwB8CHQwJa1jAH38b FiV8zMAgJBYmMgkAwCIU8R1oEqjAH5f6dKMvCMAfS5YPuEewwB7B/D5BnPDAIPwK62ATOMAg dCa2BBKwwB8dZ8allyDAIeVR+CKOhMAe4R7P17dQwCE87/AHrJjAIevMX9Pi5MAehf3qtAGU wCATKYL7URrAHwYfPOXLGMAf17wlMxIcwB8d9BrVX5DAH8dFocWObMAh/HRXv0ywAAAADgAA D6DAMO+bohXS9cAwURVJXI2WwC/xW6dqNATAMv2HdU7c5sAyCYhjMRbcwDIkAmlxQhzAL/07 FLp/msAxdCLlpDkrwCk2SN3KINbAL3fc7KH0J8AxQcUh32FvwDP+sHfKcBTAK7/4Xqlm9sAw twU/FAzQwC5UTAAR8L3AMiHqK3Pke8AuBMCnmUdEwC9NI33Oi/nAMO5NVC6XGsApcpzwFrmB wCexkvyFZX3ALP+a3Y1GGcAwW/2EE4uHwDIhQS9x8QDALJyU110DUsA120tVfCdSwCo4urRm gwrAMHtjmWqiacAyCeGuhWxowC+zflWMFtrANUfdNMdLcMA3fTPtruvgwC486QLuicvALS5r tXZzNMAw9WRLp9CSwDMIIN/k1MfALmNKFhGZmsAsrrxLQgQvwCyuICmbhYDAMQFDar/y3MAx nE6z+10wwC9sK6BpBcfALUBLQz27RsAxlaGpn5RawDWtc/AR2tnAMcQBVPaXVcAvJZ9USaOi wDDGLsh/BEnALEKPZEUdFMAxL+p92oxLwC1Qv2EfP2DANYzGn67lDcAudo4eZ+HPwDZk7Yq4 GhTALJAEzOR6TsAxYKUsCVQMwDVWZj68WKXAMEnsc8ua8sA1F1JBLzhswC1bMhE5AGnAMV8j 0JmsTsAxac9K0TYiwDKSv1z4mf3AMIUNbfXEiMAtWs1UhPqSwDGWPp/OaOLAMA3Ose1aNcAy qE61Lz0mwDE5Q2yn3fjAMOTk5IsUisAsIXoY9xBbwDPV7l3Fa+fAMxAHIse4dsAvlQEMJbt2 wDKj8fJrwuLAMfC69djrtcA1j3laA8t+wDKjJKj+gCrALjfYFuRnV8AyDm3vuRdiwC5h8oz/ ov3AM/Pn6zWqZsAyyXwR5q+hwDU4Oe1ucf/AL5CELjMha8AwA28JIUlBwDM7UrfZgonAKVHf PlvyNsAx/a9WyGESwDAomirlmIzAMdV/1x9SisAvt7sHbuawwC4UwOltAZfAKBC07bB2AsAq cUqG/K2awDKURU1Xg9XAN9ytsPn2V8ArbBjwYxpvwCWsBqVwTIDAMKwyVxTvWcAqhq5jIIxg wDE1XH5IpGPAMoxk9ApzE8AsRWV/YYsUwC+xP7SzdYbAM1zxq3CsXcArIromTzcIwDiFoOnP pL/AMpoYkCyfp8AyrwUN2MbxwCo3Ws8lSp7AMaiiRB/umMA2k9SelDtowCqkSlvcyJ/AMXyb cO7KOMAuVa37FR6awCozCJaz+IrALH5FzwrUxcAxt8e3CqtQwC+B/4ZintLAK2ZxXxEae8Ax DD8474RzwCbCFUTn+YTALwGxRJavZcAttIlvMreswDN8aSAw3r7AMCs9G/BTGcAyooxz51G6 wDKq14nGXLnAM7/hdgG138AuOUkseMwHwDbeSl/YJvXAMJoWn+TBlsArFhywIGQDwCkOY69/ xN/ANjxXoZnbRsA0r5poeZCiwCzyiQWNqrrAK7xeumUf3cAwE0p+E5xJwC8hGDCn3P3ANsFW 8t6pgcAvwCsuMNbywDFpShCPawDALWnk7MKacsAtEHflxuUxwDQ+8CrqQuPAMZmC4tT4HsA3 ur+ybuktwDCWSPV+RvvAMXE8q111ScAwELY6xKRwwC3ADLJlVRfAMoxBKLXzBMAvMk3Fvteo wCtFT7DElYrALGqbTo2cTMAyjYPhI+cLwCvqcpijtFzAMMbXOblNdMAu5A70u5AzwC/kLu2c H9nAMv6K6hm/DsA1ccN1llxWwDEyA67fjRPAK1up8Z98/cAy7ekgtzugwDFvJo/Y5xjAK8m2 eg7a4sAtzsSkA0+mwCntg+cKh4HAKRhkOefK3cAokTuCe0OQwC5MV014CynAKn3TK3/9PsAv g7HfiOUgwDWb8HZHzdbAK/ZJjHG2BMAv9z2E20hcwDAlY65ZydDAM6LRIh+cCcAuCQz4893i wDCHkzJrwj7AM6tciop2xMA2jRpLTrwbwDWxS77cNPnALwL0B/gF4cAzai39U7LiwDMYFKvc 73vALGu1fwIl8cA0ubDh/plSwDLc4eB04LPAKnrYO9I8qsAxkOZCd7DAwDE32kWIqATALyha g91X0cAzAPikNwUHwDGWtRAADobAKR00jE2sD8Asw+wtdOdTwC+CTFKv8H7AM8jCS6NTdsA0 jMIuhXrcwCw2AEqiuP7AMFmmeUM7GsAxqO6QCuuUwDT4k8qK3jbAM/mRZIjY2cAu7ksQeTHL wDA9Y9JTs/XAKWB0EFWgkcAp70utrA3QwDDSF4feADTALbOYLNBuQMAvCofyTMTcwC3hxn1c PiDAMHmF7V5n9cAoLjUmU91KwDIBlwuhqpLAMf4J6ASIs8AnQqSocEJiwDFx0suAvJfAKLY7 voKkCsAybkKtBxjfwC7R27N4pHjALpx4LjXH88AvGyBC5C7TwDOp8JNCsqnAMG6vN7nqksAy hs00jFr/wCouo7LolVzAKn4zsdHbV8Ax6mN27l2iwDCABrrmM3XAM482eJDbkcAogdBcnVXu wDJ0E3z2CRvALJx4GKD1DcAqUW0N8VZIwDEVdfUqRIzANc7sCcQa18AwEKZPRNmXwC3BumiA 2PbAMzxLsxqTzcAzGo4djuRDwDNSBVtfNBvAMUypz61RKsAxuelglWxHwDGhLF797/DAKZOa /AHiesAsauwHv2w5wC4hHuGJivnAJTm1K1FyKMAzBXZxu0q3wC5rboyt5EnAMfAM9UkYY8Ax wH2e34GZwC05h43hTDzAOIroRfIjqsAxqdiEYiEgwDIwUkJoLzfAMw63oQA5tcAy9Ysw7BZj wDAwzn5Ks2PANCKLanou0MAwjrFmYsuCwCxd5e1VWZzAKBAqMdKDPsAvXpUaEKUJwCiX96Z2 bbTAKqnJPzqiBMAq6F+GigO+wDB7sfL4EbvAKRQT8ugSC8Ax5YkZGA4MwDA56gi9BpfAKss9 pnewGsAx+NUkvfH7wDK7KI9X8EfAM0sq+lOuW8AsSiNIpjxtwDH3HvMLv2fAMFLZngrKc8Aw 9LYjQxc6wC6yXo6Ro/LALdzcTSzB3MAxlFC3Zu3ewDDoBePHQkPALKgsvgcPhcAwAa2d9wAU wCkx/6hUnwPAN+MbShrir8Av39FDhCsRwCku5g2zqbbANSqlnYoG2sArx+3WyN6pwDFaFbil UTzAMOKxw4I5gcAukIQRy6P9wDDNfIB4xRnALcA0zuoL6MAw5kXZr7U3wDHTB99Fik7AMpeq Ub7cecAw6ded3GJpwDMMp7XveazAK5RvEq/zi8AyHs5+zshTwDHdPD6uoerAMuVt5lXiicAs eIYHI643wDIAkFH0Yc3AMiaUQWZimMAumRbyhrAmwChA1TuGKjPAKaWkZXWXGcAzLq9mMR/X wC2/r64bd97AMVLGpp4xYsAsEY2AYQm4wDJIPr3n4FvAJOq9uAiWhsAw5g7mdzouwDPWcauM lvLAK9O73WHTqMAw8gd40SqGwC6Nz/rLMBTAMv0WqPJYKsA0NvLMI81YwDh1wVHJIA7AMB+m MoT51sAyNpVDn7z5wDAc9hZp7tXAMioeAHM9CMA2PJc6XVq+wC/Wy1DHCk7AJu12TNa4zsAy A0XRy4UXwDAoJydLBzHANA6AxhWYVsAskiLSjL46wDXm1mdRtL7AMGezzXYZLMAzesap8mdJ wDUNoA3a6sDALpipxtaojcAqF4dF5sJjwC/YiCyCDM/AKnioWc/F/MAvx6ISfuvQwClX91c0 eLfAMI3unmq8GMAyaRswKDfRwDVykxLN2PTAMSWcAAZw9sAz0ZlYvN6XwDKnBzuouEnAMmrV WjTsi8Avh2ropNf0wCS1Pw41HijAMZ4f+w1VosAwpsET8OIbwDLTWgBkUnDAKU4T91QOHsAp 9UVxSA+rwCyxzW4SVrzAMUa0RUus0sAyQzSdvwFXwCw/FKE3Z8bAMwt4nd48gcAw4959sTi2 wCgwSBlXBLfALd3cPIJ0osAmA0onJZR4wDHgK6WScTTALhHyxyagy8AwZ1JGoXakwDIKSiPd e/DAMN7I8k/o9sAvLQIww3iJwDA3ftu5nBPAKvSyv19Gx8Ap09ao7Y7nwDGZq0Uc7ZjAMOi5 abt/3cA2QFNwR0a+wDVQQCa9iOnALVI5QCmaCMAyc376rQoqwC1jjkIBAtrALwQulQJW08Am XyVBErfrwCosffQeGvXAM0M9mclj3MAznXJ1NCGcwDIt+PC56jnAMGUVS65R+MAwffgOwxt9 wDAStfIJKhTALAMnfNxyNsAykqKZEN6IwDNQo46UEe/AKzxHdNSmjMAxkKvpjk/iwDJTN7+M zJbAK60ALVgO2MAugxKuvYpOwDAnXR7dOsHAM8tjWnLDiMAyF5LIbv7ywDLKXNsnXO7AMb0O MrLZ88AvgeXRi4DuwDDECoVJpMzAK9LaDnkBxsApd4fD5Q5HwDHfABjbI6rAL6WV7uGmpcAz /469r3wrwDJ6SEV4py/AMI9l+DCDL8AsyHTTF7YgwDKUid/WUxvAKI3PMLAf28AwF6cu+QkD wC0RFeKDrCvAMAe5PDhVNMA1/oWr8woLwDConwwqmWbAMUK/HAGm6sAuiMjta8ZQwDCxFtwy Oi3AMhVAZ/6n0sAuCKczwXZOwDAupemwr2TAL6c05aLedsAtxNZo+ZhUwCObXSjc2bzAM90S KWqnG8Ao92eX0TzqwDK6v1gVVPTALZWLri4FeMAzJLjCIA98wC2ireXURPjANZT9FaE5v8Az 35G+XQP6wDG4d6G6uqLAN10JOmByS8Aq2xB1OSyJwDEm7HZB8zvAL4n6XUJzR8Aybl9PMkeL wDGfPSKN67HANXf3Idt578AwWkDrGdd9wDTFl4kyu2DAKIP9xf2Q2sA0jiFCn5oWwDRBY17a kjjAKpB0HGPurMA0aInLplAswCrUqk/hW3rAMQ8KCVWyl8AvoCaLsFSWwCxMzBeQgHzAMFnm FudHkcAh6t+TywdfwCjbEy+nfL7AL7JY3UniVsAxUEUDERUnwCmjFFXO8Y7AM4dcduDQPMA0 DwTkJ8pgwDCoAgTlcSnAMMhsItK2y8AxUQlPqKIVwDA1/TNPPcHAMdryO1nrx8AwD7SPC+a7 wDJ6wz3IC/HALQrIww7w3MAxE+GuwqPOwDRhgWDFKdbAK//qXHvPqMAzbq5vTaR0wCVjPQsX ToDANHGrjXwp8cAwooxq6A36wCvafEpMkkrAKZ6xVPWPUsArMFbouONJwDI2tyD6ZR/AMVV+ KvP64sAyHDhnrqy/wDPTPv+9sL7AN50y3bwJYsAu+HBAqav5wDDSkgS+wljALu1L68ZJucAs exVwJkOiwDGWCB/GBCLAMT9RqNF2k8Au9utIfUYowDFOK4VYl3PALnZnMrLj5sA0M7/i6epA wDCJmi6jV9fALIvZ8f+SrsAvvwFrM752wDMBKpjNJmrALN6XLid9QsAvvkLkRofxwCxVyopy U1PAMNJuW2rzJsAzn3IrAfclwDWFOGPMI73AMdpanq+dV8AxKrqc0r/SwDNj+6aEITXAK6OX jtGCw8AufdY484jzwC+BcXP9d+TAMzhEBTV7pcAwlVqL6o1ZwCxAh1hppPfAMC/F2+5x38Ax ZV+ubLebwCsp+E8tRE7ANBkTToyXncAthubqvwRqwDHNQ9BNc7zAMXCc/DDfI8AzZ4hCBwnq wDBdHdK99P/ALEyd4+TuIMA0AcbrpvqAwDQ9hdE+47fAMs9T7SoNW8AtmHrQAjDDwDPTa6Sa 8oLAL5pBuzaDWsAyXWpTir3MwDIZvKNFst/AKKrEnOHbA8Ar6I3rnBAnwDKYfNYmpAXAMU3y 6SZZH8ArtgccCRSYwCulsAYiShjANAyPeAfghMAocnlFstiLwCul5rM7pyPAM2khwbh71MAx lWzv8B9cwC+gpMs+iwPAOZIX4WP70MAx22deLaDXwCkwKGRpqinANiNDyMyLbsAyuWu8r5vT wDAkd/4O6brAJ/v8RGfX28AyCrvE5N/zwDGWRnjbWbbAKWiE3RlpgsA0riVFiSR+wC89LoN2 EnjAMeYpNlUigcAxcN2bDpNcwDA+3G6FckzALJPgZhh22MAm7UVtpIquwC3QY5xVD/DAM2Aa ZaIZ38AxdC/4l9xpwCbaKobFmTHALUQ1E7013sAyrDD9/W3/wC/hB3n38ezAK6F6Sd9mHsAp jUFKUeNOwDBoh5wMcZjALN9ovjCUb8AxFNXtWcXBwDTz4Ff7u3jAMKqER6KPKsAxBPhceKyL wDAXwSJ9k1XALT3NijURFsAyOE1paWEtwC3CwQQu31zAM6agWjwxGcAnFwepcAzGwDZ5KSGw zizAKgIEt6GsBcAymAdSs4UwwDE+iW6YYvbALvJbxY/oOMAzKIbgQtM/wDJjOzT15W3AKruh Wp7R6sAxj0fLw0CxwDN+jSJh1fDAMQKTYhGmg8AoMlTUPH+OwDFLQpLLjjrAMUJoKdF3K8Av SCj8qAo4wC2t4LTVfqTALpmNrJ1TU8AnRRpe6TqjwDBfyKH4TEjAK+gId38DH8A0z9739Ptz wDDma2vdMD/AM1elfTiKt8AtO70X5oa6wCzL1LN0fD/AM+pWifvWcMA0ivWaG4gWwDRagPaO 2G/AKmFXcQ3XNMAvBhf2LUHLwDD3hqvO6Q/ANB+Uj2uTIsAy5Gry3NjiwCbuaXvM5PTAMeMc dUhf6cA0YHXcB6c4wC0yNivLVyTAMG/QgijelMAyHScUSQRwwDDywa+RX7XAMZ8niJX84MAs 3zpioC8QwDaJeVp+E3XALwF6cdllksAwWdZgaD2YwDVTVoqOavTAKxiRj1rAb8AyflgJ7U/+ wDTA/Y7lbQnAMMEIDmibUMAnQWO+MruUwDUiTxWqDrPAKyOmPLvrZMAxjYYEywUCwCln46Xv vr/ALwaMAX7lwcAuL0bhOH7owC4boiINOxTAMBHv5kOjhcArFbugOUM9wDDaweUkuR7ALVTZ y1X708AmRecXe0j3wDF2RAYnlCPAM5DGe/EVIMA0oUiqN9SWwDbeak0ocnnAN21HBsZUgcA0 0tOdJyNKwDWztdIeORrAMtv/SopAWsA0uJUcVUlwwDU8c21MwFTAL3h4LTIZ98AlHG1FDzI7 wC9CbhQPV+bALzOBjqp7xcAyRmfuUwm+wC335q2INPXAMZ6xaBUVIsAxDH9yf68WwCxmWEj/ BkTAMrORN5va7MAoo2JsJx8JwDEmtqAX8VDAMIXJlSi7rMAxsbQf5785wCzB4Fe7o9LAMlnv qT75xsAtFEe2Vn6ewDGYnHQH1MzAKAxcfPtWbcA0tfcNsUWkwC7YU17R9oLALqijJSkt4cAv FKBf6pEuwDAX4jkbrkXAON8JaoHUwcArYtjDpYOQwDUnB1ogZ97AM5KflcqfNMAyqqla9J6V wC1M56pPcILALVHECQHTtcAxsayT4tTcwDX+J2xKsRzALoH+9UELX8AzY6ZI/HxywDWAwGgg 61fANGST/2xfxcAxFU0YfDSqwDHe3NqU61TALfwPIPyy/8AwJu14mrRPwCi/N+xmV8fALRLW TZgXrMAwdFs4zMGswDPIc8O6WjDALtTKtlTJuMAuSwl5v1RywDBVUQgdSRTALYaNWHriOsAz WPRPMeRWwC7qk1YQMBPAMhEdhwGMUMAtTEI+oCovwC+zwZZork7AM0A3RnaTCcAyM9GPpSOx wCtEM4DAzTLAMbWvdENACcAyEU9FAe/ZwDIV/k4HNVXAKkyKRL/vMsAxS6TgyZspwClXndVw TuPAM5HxAB4Y0sAyBt3AYwgVwDbStapYQCTAMh48ch0h4sAwIO4NZ/crwDH3kQkDUt7AMNNf VHZle8AyOwJyrxK5wDOzfDyihq/AMX97vPg3l8AuGE2MxflqwCiFcAbp4R3AMbX1dkzuusAv IBGTZRXlwCvsHypKOWXAMRMHWZYPVcAwPRnRaeKwwDUEJsCvtxjAKqZnB9HW8cAuD20qM1pO wDR4Nbgb6ovAKwI44zdTzMAx8zEDLle9wDJ1SHoWSnbALoIXJzNK98Ayf1XUtVGAwC1YREny taHALkjaOKoOwsAx1u0NwlTLwC3kzNAiDJDAKEpEbwAd9sAs8VxLBV1wwDGfeJ5NyEzAKm7Y GclBacAx2f1FftohwCirPSLhV7XAMuIB8xmspcAvrjZMm5ISwDd6F5nZtazAMxn9IQIB+MAt c8kbKBeTwC7mfHcR7KLAKU0SaaL+48AwH1/Qqc1gwDIBJtn11DHAMe2pfVaPXMAs10oZ8qmD wDLrdo657pDANIR9UL6/lsAvYVis3J0BwDHvM/IMuazANCiiPDdrWcAwWe7ePb/swDG5d0CM T/jANGxSzXVdOcA01h12L34DwDD/iG8CJ2nAM2ryHnlgs8Aqa9s44bcOwDPPq69H2tLAN73M RPg4/MArb2ysMzr9wCztu+/O5ITAMJClpnKe8cA1loHxJer/wCk2SCSUfW/AKxnVRrIFpcAu 1+Awnx9qwC/CLgbDZTbAKOuBQXFZpMAzlsgJ+q7TwDAIdRFnlj/AMIH1e2XGzMAt3eMa6LeH wDI+tDmc8DDAMGMRqje8ocAww6S2RSxOwC3cQNvOd4jAK9boiXbXE8AzpC4IGzbXwDUxvKpp +wvAKzSMOLMsPMAmirwRIdBywC+pO0PSNgvALfYMem1YqsAzDLpef46lwDBXdq5zyKzAMENO ClXchMAqeV+l1FKgwC0HTJ690X7ALy7Nf4ygrsAtl84ZaRy2wCY6ws5f/jbALebJgM601MAx 97hxx9f+wDF2+WmbKj3AMQf/eyu498AnImNwx3q+wDC6+wvtxNbAKsrEm3ogrsApt8Vu4F0+ wChEjYAc2ALAMS9OKQiGN8Aw2i4izYARwDNYgwDRGI3AKzKAc+jHzMAs3lNP3OPFwDSNLG9E e0HAMUXtBQClY8AwkgHUDqRSwDJkTC1lScHAJbl8Bn7QocApFt40TIk0wCwefIQGLErALQU9 8jQTq8ArAeXFibl8wDGURwBgz0fALgwrNWJ6LMAv877iu85gwDOYKTWjAA3AMuUTs/ilHMAv 4AcdqUXawDFjSxgKi7DAJXYwgpvSOMAxtXkVVXOjwDaa5AMDz/LAM2AerRP2WcAy5HUkXUay wC5l+NwzS3vAMIZJt9aeR8AqzTMp6GGkwCnbE9H+RB3AMseSV0SH3MA5Y2jcriRdwCwqq5eg r3rAL5J8JxgItMA0lFh0PI9MwDVTR+IdjnnALnJ58vKsgsA02w/3VVS7wC5qmcm+uPrALFoB v2+4e8Ax731wN68HwC5RY/6PG3TAL7xsvMhj+MArWWOJ8nZJwDPF80oeCdzANC8tUpgFjcAx O99KHUoJwCxcgNLj7irANTm8/HLCF8AwCyHsCVVDwDC4tMTmC8jANAAwCgkT2MAyGCpeMTTY wDFogljOAGnAL7nTvuW2mcAyjr7JRdndwC19Uw9koo7ALjIhzkTCIcAqSehwa7C+wDFr2COR UfTALBWa8DiT5MA0LxL3aPGKwC3tm1UNL2XALWq0Ri+B8MAwIGgR0nHzwDN40Nrgi5fALahB nIDJ78ApyzMn3D+awCx60qe1xfLAMQ+tGQePo8AzER5QfNFxwDCh+tVpLxTAMnmmmxU7E8Az bjm/ocBfwCrgMAez0XvALm16xCMF7cAlx+quNccXwCKywi0BkBHANrudcOKOqcAu0zvIypGz wDAkNB/Jj+LAIoWEQNZEScA28V5hnR54wCsBy30rMV3AM0AvI4+wgsAxqWq01V+qwDTIy+nV QrLALedpUTV+88AyKkn6VR22wDNvpskr+g3AKc8FzfMg08AqEE+4Mx3SwDM4vJ3qFXfAMMIh 43qdaMAvXZ52KqqawC+jr/utw9/AMPJRjYiH1cAwBgznx5vKwCvcDC/V3ufAMycRX/bK/cA0 fgcFdVgpwC1h5j2/nRvAKnUcEUaTlMAm8iof/qKswDOMR/fa2zjAK0fOXjKbWMA1/IKqEse1 wC0Wnrwur9jAK+hDM/g6UcAtrx7lfzmIwCnFsIiBy4fAJ3/K9G2b5sAvyefiHd+RwDCGe1Oi oqPANCa78G0pJMAuLDQHMng6wDDByWSdPsXAL7pkbJLZGMAzfVU/qQFWwC4Eptt2vd/AMwoH mFAHWcA0MnQOIGn7wDGZzyiLMg/AM/mlBuFxr8Azvu9znCeewDJbAPoAWmLAMNtxyYzSJMAz yVokXNUrwCxUOr92QzTAMJGwwEzaTMAwp3GbwZFkwDHvB92jKjTAMR7WuY10zsAvpSZJ3uhq wCfDjSi4HzzAM7ZY7PBt/cAv8Xq43u7JwDV4wPeVA6PAL6n/xwZLhMA0OihAaspHwCqQ5BA0 vCzAMuQCZ9mPKcAr9EVclsnJwCwQRw0w38PAMSBVEfm8wcA0SpXEy/NwwDGYC4RHFyjAMCZU afXvWsAxrz7V+HjSwDM+VubQ4n/ALJsZ2oHPQsAwEHe7H5TnwDHxjLLS8C3AJtCNW+/HKsAx PHEouJJCwCjBcZPbbprALde+Nu21/8AywQ5Yk9zwwDKto4xH27DAMOcXJsFBZ8AsJtk8d19I wDRCEA38+rnALnrN8L0GLMAwQ8NtZnmawCyK1dnmJFDAMVOZgQ66oMAy5TPMxhUywCvFEAeo iwzAM6ufIY9uGsAxA5fLtaFYwDRnD5kORf/AKstcNo32yMAxhOolzbjRwDCUYS8Jo/LAKE9m BS7xwMAvvgaCOeUnwClmM9l/aFjAMXy5UgqXdcAvzE4/Q8nwwCzVGohHiwTAOUxsJxPsQMAx mwKJhmsewDSEw536GfHAMwuZviCGwsAre095qWM8wDKhs/IkqVnAMEPED/8zmcAuKhsRLuAQ wDDfCnDR1R/AMR4eDKyn8cAqtjZamgCAwDJn9AzSRoTAKP9pzJDNUcAybZBe7p4bwC/5OpUi nV/AND40VssakcAsU7nWTcwMwDUoL+gaHHDAN/Hj+bOl2cAvDxp8J6tkwDLg9s/mYw3ALHTW c1dgLMA2bHK0azylwDMnto1VRLrAMj9tuo8mysA1kXHCscPBwDOXXdrxG93ALaMWizfAHcAx GaBuMM3vwDTls7566YPALlBQMyYF0cA29L+AX/rfwDAAQClvjzTANA3fAyWJjsAxtpCSI+WU wCn24490GcLAMf4A0u7j8sAxV4l0+af3wDNbEZxDkGnAMdT3fQep7sAvFTtOW6PGwCaXLOci o6zAKeBo80Zh5cAu01Qh3KMLwCukmijzmqDALzLyESxKI8AzHqras2AhwDAcBglSPR3AMYRi dYVjJcAy8UpETMQYwDF0WKYLt7rAMVJLY94jbMAl+8ulKP5/wDDjjhr0yjbAMVEqJ7fMQ8Ar W+mddatXwDCzDNmSGMTANeogV4jqL8AwyTtgyU/dwCJtn2kOWnzAKNBjdaX4CsAv3yh2YfgH wDG1/ozM0NnALJv+XdAGfsAwTMzLqFW9wCpilFL3zZPALyq9xek2vsA1a4yRvX3IwDE01bYE hCrAM9F2F71Ml8Aqsz0qu0ZzwDHNr4FmUn7AMR62IjEuusApdvGNc18uwCuZW7IQF6rAL2ee CjbZtMAzCD5fqEc4wC7jEcuIiRLAMNWuANj4EcAzRme/Z7GRwDBFFbUvXUrAMQJO1KOvW8Az bMT+HCUWwC907OWiL/LALIQtzY+h1MA1nQbZXxLzwDCGCfkaXKzAMsy448k51MArk5WXM2uG wDGCyHfqU+nAMttQdZi/m8AhqM/5ayXewCi7Ug45WYTAMSXV4mgS1sAwKnPfSwUCwDO4oS5J CvrAMO109O6dW8AwVVQw4TFSwDWpBim4EtbAKGjFPbgwRMAqnqlKRWOMwC5aLnyGX1HAMkU/ B71IWMAvnpfFmFj2wDC1IykABffALacnJQxx0cAzC1x8UfQfwC+0onkM9Y/ALuhDDxD0a8Az e3PV0WG3wDPgVSeM0/PAM4SwU/99X8Aq93dTyTjNwC20djlJIXPAMEV9AO/LbMAxdKrEA6FO wDAJxchCSJfALQ2uuJxPSMAxveqZat5twC8DWh+1BJLAMSV82fDgbMArPTkqjjNPwC9Kf9Kv UNfALnJeJwK/McAz9r3BvXJYwC3K1bHiMODAM7hXpyfm08ApzcxL795nwDK6TsB5k07AMnN9 yp3pycAzXdLmEMGIwC5lpIskMRfAMRyjd/I72sAvvAD+zQ1YwDbLMUKKmc/ANMZth6Wrb8Az VwqPU6HEwCnuXDJGd8XANcnGSg5hBsAtLkdwmEdLwDFv7x/UEy7ANFujqrekWsAzARcK1Bdc wDX0A9ZAyzjAMKyeJClw4cA1JAbnfp2ZwDE7XWnIrfLAMXjfpLSp7MAwb1vRTj9PwDVtLBgI /krAMBv5eVb0/MApk07dQp0HwDEt9nqv5NfAMJt/2fW7ycAwdBjUQeZAwDQgxSGyBoXALwPV idl1YcAvvXilP1W4wDKrOPz2vm/AMGBGVzlddsA0febJOhjMwDhMtGSkaFjAMFVMnXPiuMAz vsiVJjcswCzwhRwV5ofALEkYNMyKG8AxFstHzSgAwDU/NrwR7BvALZ6vFBpFt8Au79vQwtqn wDUceYzb1ALAMBRv8tWnesAwA4Te0GnHwDM1mCvgVLrAMYAWz3ruv8Ayubs/NGc0wCoIorEh LS7ALzw3SZNAjMAwExP76GATwDY3BNQAOH3AMdakZgmcTsAw09wM6MD0wCjcraqvwA/AMQP4 RmhBE8Ax66pnRuBQwDFYBl+GnH7AKz7n76QqtcAp3mHMUvibwDOtMl8oMibAN2fVzOxI5cA2 SNQLy+RgwDIfFK0ZmFXALF8BQEUtZsAvVfdamGcQwDFh+JUe7VjALu6wX6hSrsAw1fKkbdjD wC78EwRPl1nAMadE+/WimMA0jPCB6Bz3wC5qnpzPbbTALREWUACTZMA1XHMMaytIwDAn62Ai YtDAM0s2hURlpsAt4jpe5INcwC6/jvSJDGLAMJdmlzeURsAt8kto4INcwCjBWy5Jm2DAMYLK FI4iKMAs0rbwbEI8wC0+w7YTfYzANp9cD3ZYK8Akldm2UngQwDCp0f3lVtDALVz/yNrQhMAy k7/QRSuGwDM83D3KrZvAM0xxOyX0c8AyvLbDij8iwDMfHVHq+3LALS0tYiqvacAyM+lx+nqW wC/8K+XCANjAMpJbbzxaM8AxS5Tg/vCTwCZxj2UU+iDAM8cCD4oWzsA0Vl2rodc0wDHkPRSP iJnANPSofcKhzMAyUw2mJjgQwDH5UZZ0G9rALZgPHZvlHsAvJLYEuVQawC1jtD0kJYXAM4So nYEu9cA02GBHLv3JwDCDAXH34qHALVBYt8C5i8Azdth+IH+ywDIigTv0Pi3AMRQbMejc/sAy RAdUeEQcwDaZsLTygkjAJNzsB6pkwcAxkcIeXSYFwDYkNitiuo3AL9rHi/YmfsAu/uKImV38 wC4UfI7bAiXALFspsYNu/sAwrs5o885GwC+PvEBcqxfALoPRnBUDn8Az0XrbD5j5wCuIz/eW tc7AK5007nKrcsA07tSzqwH3wC/SrZqyiEzALPPTrO9fqMA0teEv+iMWwDPH6raRyxbALbdY fU8j6cAys2/vRLf9wDJoCT3L0OTAK7iryyFSyMAl+YpmPxKtwC+tNwXY6GzAMt7foAzo68An VEcw+b58wC9cIjqdqKnAKYXi8MxQeMAyaEGHHTZvwDGAD+XihwLANDX9ivwdfcAsELy5qmEr wDDhKwiN4YDAKxra5yMCUsAutchrZpwAwDJlO5wd7jbALkOP7AUPzsAyfavob07CwDIkDgXn W6/AL3bRyjO9kMAvibev6iPdwC7XQu284DjAMlDhcXZKQsA2IJVGqcbUwC/+rOP7njbALit9 njQQsMArpTl397t+wCtztLclzkvAMKrVK82lKsAv8e9n58rkwDSPDLJi/W/AMZ1a8bhozMAw kI2i65MJwCxskW/wRJrAIS9jmMjsNMAucq+GkxPcwCyBjCTjh8LAM6fIIa+aZMAwtpU1fZmZ wC8f39aVy8fAKrycdxLrTcAup3fAzZILwC5xJliPif7AMFSkCZ0zH8AvEZl9GM0AwDJPK6gc yzPALQauLinWOsAw2a28cHGhwDItoN2VDNXAMR8P6ruepsAuIFXhv5eiwC+bIcPHkqrAMnza eW2ajMAswwcEALrqwDGCyWPmLonAM967c2OWxsAnUDVmx5ERwDTnRsoTE0nALbq6cvh8asAu Dni2OmI5wDIRsSg30hbAMLXLXOvtrMAwf24Q4cziwC2yS2xJchrAMG+IcS8QyMA2UVMTIHjY wDKNrggSVMXALBqR1X1bOsAx97rNQP0gwDNt53e4irDAMfnLmvda3MAo7/Pucu1kwDKIJUrE Q1vAK5++b1ka58An1XI4u/fGwDPjXLRwMS7AMP5FRijfQ8AwJjPVsEGdwC/ArTs9rprAK9hr 3CYDlsApWK7Sf+qtwDHbIErh9rLAOFHYcRsXRcAsbokf3hu7wDB5n7IInyvAMH+o5M7mNsAv jBOIdnzJwDO8D6OxeFbAMYihbuV778A1lLgWv/rSwDI5xPKH3DDAM0m54nmo4cAvFxWFfEN4 wDB5xTlH9iTAJsYVUfPWRsAyDBnolFxYwC607o93JVXANy0za3IDesA2JPiN5oMYwDJ0M7ng z5XAMdKxYTO3RMA3P22cJGW/wDMj1ix4AzXAKnpFVw7xUsAxhcu6Y/PmwClnBU8KPMjALiPn tn/iYsAw9SaTdQP2wDALIl67iY3AMJ8IfH5bTsAvbGXbDXfpwDWmy/W6XYLAMOOCOfq1QsAx bFXVht1FwC/dXPGbL+rANEBYfsks3sA1YRd/HF+CwDGCNpoQ6RTAMgU9qc50OMAtPnEWb872 wDIUnPGxcVbAMTNrheYiRMAxy6UDr4VBwDFZRY/PZFPAMBcGOyV4WMAu+m7EAvIVwDCt7xZ2 5QnAMDzPSV8328Ay2m6EUur7wCp7aH0GHcLALlgZ1MehysAs6JpICMxZwDJw/m5vuN/AKoGg xTm/W8A0SlVMHHUewC4LQ8pPD1TAL3gbE1p3EMAySOBEN5hawDTzobKXIy7ALqVGEjpg/cAw I/8yii1BwDDDfAZXEMzANQQKhm4K3cA0/wJD2zt4wCvzuoeCDxbANN/h3AoCw8A0AIdu4zAt wCmyvkktOr3ANQ1tdpxFO8ApaxZoXlgLwC5wPYYcgYDAMJ6NOfxnWMA5w5rS4f1hwCsZApyu ru3ALudlUtqHwMAwCrd76lk7wDZ/2JcWPd7AMeQtZBhVKcAxQHJnUI8XwCXJdXxmccLALoMc z8N6ssAqajP2GtUcwC7jLdfM1s7AM2U6zhKfYsAqAAcJnuZEwDU4WceKYMvALUihcn3BEsAz hQm/CqWYwDZw+YrmHerAMNn6hGxTlMAsWWPWfKSIwC7GQJUmmhjAN/i4YBmzJMAnmpvBmzXq wDZo0ZFnxljALxqE3Ptlt8AvhzEgTLJ+wDWWRCvJI3DAKXRzotRVRMAobuz0UWBRwCo7kOHd IUrAKNNmjZb2vsAxfqAiRv0BwDI3jabFXKDAMEy4zw2tVMAwErE0Xb29wDLNrf+/naDAMFme 3Vq1osAs/Qbv22G+wDNVqh1CAUbAMKjpIQ7SXsAjvKf/M/5qwCTQmFRmQW/AKuAJmgXo88Av 5ziHIpZ8wC2TI6qtnm/AMYt91ZPGJsAxjKEeJxlkwC3MMFX4pJTAMCwSpcrerMAtd2HuaAJQ wCjAVvHN6R7AJKV5IQMShsArETW5QyQ+wDF6qb+EF0PALtgBU1FVacAtBWe4BADuwC9IMkqY jj7AMao3RJLtgMAszLbJjojywDGLP/6x7+vAM2nw/S8GE8AyJ51gwpNLwDOY75CLmljANQnM oYnMh8AzXREEhodPwC+4oFsEwlrANUMWBKcbzMAqPkwGU7FowDOeqbuJkK/AMJV6/DRAyMAr IGYQZLT7wC4aPBIz0JnAMUmZhQUmBMAva9hRjWeUwC/0sLU7uzzAMet1HlPLD8AwgmWc34vZ wDM4Od8Bh3fAMZj9gvVoPMAx4CdxOD/7wCYjuH+ZxXPALQ1LFafRzsA4Xx2N0yarwDEJ/SRi HRbAKZXkR1PYBcA1topIU9E2wCUjF4td2uLANNhtnX56CsA4npkqH58VwDOg7STeDOXALgaO 70P2mMAw7QnCRntewDdCwFjRjunALHS6KABMPMAzAZUpBdjXwClAS9EBVOrALfNVlhOFT8Ax 5f6x5MPCwDWaSMcyr4LALCNZVbU+usAulcLpCqqNwC2ijdqe+U3AM4E03L0SEsAyngZmGRV4 wDYrfDOf1nzAMKMv1fNXhMAxxuzOtzDXwCyONhLMMT3ALAG7G4xrOsAs461VbbJxwC+6b2P0 nlnAMjLEsM5dTMAy88flOKFOwDADQA1YI5TANFH4mNtu5MAxQj9FfnsBwDGkwnMCIqDAMAhy mByyIcAy+5XzWCCiwDAkV1gnJwzAMoLVWJrVi8Aw5686nvM3wDGJrg0ZjgXAMLQAKMHp7sAv 36Z0QpfUwDHoZR7SaX7AMeZDEuKD3cAymz+Kl/t0wDDcMnlceWXAMWU443sYwcAw7wUMvU21 wC5D9uhQ36rAMhSLuqN7EMAtwx7tklzOwDNVbfJj+iLAKySJBtxiJMAtXsThlx0GwDKnLutY rNTAMy6iN6hBB8AsCBRoeNjRwDIxaRuFElvAKM2BKA1DksAzFtV8F7NewC3mfUm0OTXAJdi5 7qZwWMAqSuIPEJRYwDLQ5yGOBFPAMZb0N1nMHMAvJwxKMxQdwDASFOB2n+jANdDdq3oDb8Aw emYfayS6wC2mWBarOU7AMkXIDBjvP8A2qijwXwrawDCLtrohnJTALmnsJu2p2cAy8acQLeGm wC/G0HQ1K/HAMQY7hsuChsA1L/uFxfuUwCluTl3FJ9fANF8oo/jGksAzF9Ruuey3wCb1oaqc TdrAMTL92AlKy8A3bx5ahOXiwDHw4ZFDhx3AMlZO7vaZOMAyaPenis/RwC5EN/BmUjTAKBxD FCgRxMAx9gWOM6ybwDEZEE2jUebAMgJBr9/TFMA1RoAT0M3OwCyTX20naTbAMiOMYECVwcAy clMBd0mywDdS+AYEMqrAMyYIMLihGMAr3kU67XuewC4zDbd2rInALM7zp2Hn1cAwFNEOyJfG wCpt/CCmXG3AJqam1TEi9MAuBSyfy/LjwDKxcX2H6T3AKtelb0SfHMAxJwXpV7vKwC9bQFWi 9MPAMIdSaPizs8Aws6GEBVdzwDWP9QVczVbAKD64mvpMnsAwKgn2Gx4ewDKNDsihb/DALYuE heB/v8AwEKAjrdUswC8VWGRAmErAMU+Swc6y8cA1ggACEKv3wC68PVvnj3bAM07dRulPpMAu 9i59lYBFwDJJ8D1HcZfAMKMCl5LlEcAp4lREoeGFwDM0787ccAjAMneVZgqG9sAyJV8tqlLY wC9gZdW5kLzAM+SO3Tw9RsAwK8r+o+5JwC8QmqJEABnALg97TqmuRsAybhtc38K9wDA33Y50 r9vAMdYuQf+2YMAwC8uPQGOTwDLF8rUv/WnAKAAi9WHqKMAyhAInIDa2wDQnKMGwSvPAMX+q hv90nMAwpOsMAl93wDEnLWt5DHLAMN0uL57KGMAxvvUVlWJBwCx3XzzrbQjAMHDWbHp1+MA8 zk6PB2D3wDX+dALaV+nAMgDrXMPB/sAr+bCz/4DMwC8tFhamo8HAKinGPZKPu8AwQgJqd4k4 wCwFYwVAuJTAM0JwxsBPfMArP/MYDm2CwDJ4y4Joy2zALcXzR+THl8Axz8QzGPZswC2ozfzi l83ALs35a4+wXsAxvqILmBBTwC4FsyGRfajAMsJTxspB2sAv25AXr66xwDJvySOeNRTANQFs 0lZWd8AzI0x50exxwDPP6dYrT7HALnmfm4oRHcAzGisDs//GwDEtEH/WW4TAM2TXYFyLGcAr 6yU2CaeYwDO6uWvJgTjAMDLc8qpHUsAxy9TWTHFuwC5TSSTD+2bALwghSqfzjsAu+XEINPgg wDLF70/8o9jAME025UDrYsAoPCoq6vp3wDMFgPPBmlfANeQxyiEBqMAu5/lvdtpbwDEIHP/9 IAXAMXbQXyZVt8AxrRpW+4mtwCwWP2PK5e/ALVEIx6uJesAp6Akidj0KwDBFJaDPGgzAMg+F i98jNsAr8qp1PML5wCjWNXrcq07ALWn2zUR4iMAtSBNt5YNFwC67tC9Tc9PAMt3bG2o1b8Ar HLZbErTzwCy9vUk+ZEnAMBoC2uVzAMAwR3cus2ZiwDVOb1X5JfHALebhktPFScAwCa1Sz3Cg wCi1YbhtEsbAJmlTjE/5MMAzIfbmxWO1wDIX6N/SV2nAM6H7upzaPMAwmN8DU/EiwDF43kHt +KPAK+Ki5UceX8Atb/EoTmx5wDHDeKpefyvAMHSV92ZFKMAx1PMQu+6uwDFgVdwy1mzAMmW7 bZ3GRMAnXi02put7wDAaTRQ7l2fAN8RfoVhNosAwfaejWd0hwChn4snZ57LAMGD9zHYwzcAt +1T23Hs6wDDskZ8hsTvAMacebX04AcAsjK8LlhDrwDEP5xHgUJHAMlxj2I/bFsAy2ggDfa0X wC/V4sAXPlbALXgjxgXk+sAwE01jy2yFwCe9IVlwjfLALa3x+id/3sAtBqRW+ysfwC8lmxfB 19fAKxQynNbOA8Awgv0JGs2KwDAzJmgzGMHANW7/9iwXg8A0KuGNFnKBwC1ViopITyfAMsFv kvgZQcA0LgJTcvKxwCfowlVUeI3ANOHgQbf6YMAxKO+0u0xPwC/J8YCmDtbANFFMBxTCysAr bjTJivBiwDDtTxkvpl7AJqJmEBaKaMAxiNRpTWyGwDC+k5oTLjnAKWGzETozJMAtTUOLJ/u0 wC9AALSTKhHAMv85nv6OJsAv7iEHMPDXwDMUZMG0uzzALfkMc6sF7sAxJy8vpgmlwCp98vLN 53DAMzrFx0eMv8Axfp41wm1uwDDUCOKMkvzAMx2UP6VYs8As+AL601EEwCm7Xgz5bdzAMaKj i0KWEcAzR9UflMOdwDSbKMGSQBjAK96XoLGJMcAzRKGcBd7ewC/jweWjqHzAMepwZJ2a/MA0 ViB7QEDZwDCnbmaxRsDAMK1rxy1rTMAzsQf2FVRCwDC5TNmWj+/AMAEhEbng0sArP7F1Yh05 wDAv3c08kk/ALvsMiIS99sAuB8cuo9o8wDSVoQAOgd7AM0F6xDuMvsAt4Lboau9NwCxoszL2 BujAMe8iXzk058AxxmPyh4A8wCYqH1wUjj7ALHAaS2KN4MAsFEUiyhD7wDQTu4qDVivAMpgT BCKkpcAukCdx7LPYwC4J+WzYBQ/ANc/Y9c8ZgMAu0J1YDIaswDXIOvl5q9vANHyN4KAR8cAy hGFr4zcYwDGryEr3i+bANjnSM8RG18Ay2wQQ68AwwDBJNoORDL/AMWS9B41VDsAoEdnY9kBA wDBIL7S01KHAMl/drQ71JcA08Y7DFVrbwDLcFjW4KMrAKhzxHT6WfMAv80kwKakkwDHgrIuq vCfAKMbb234mV8AushFpYJlqwDFjvVC884vAMoLAdJEE28AouhLBnLBUwDDYWqt4BAnAMYEg Y4npzsAuh5xvCTmUwCtC/KTtal3AMGlUL3G+z8AwLfXu4v9SwDZef6EPV6bANROshTRqBcAw 5yD82RwkwC5fenVruWfAKag23ocvPMAuAHJ05e4/wDFvUS0NBgXANZr+9HT9o8AsVrt39zS2 wCuf0qY7IJ3AMeKgIjlvWsAptbgeACoKwCmzU1PsZSTALD3qwdXNesAxmfA/f1FkwC5u++n5 wsvAMIaGLirkoMAp25v49cV+wCtuiTEiZs7AMNZH9tixtsAxOIbukO5gwCo8mYtDjcjAKoC8 DSOJbMAwPyFnIwFKwDmTUG9Rv7LAL/KmIMeaZMAvsln1cG1uwDDQLm02CC/AMxWX0g1WmMAl 4pmsf3rJwCk7KWYHwxbALRx4CNb2vMA0gbPpWPYfwDAm1MmBsdrAJiyHLSITkMA0v6pT4s4R wC8bDPUG48PAMRATpdmE2sAm/c8fr8O5wDJZ1wLnI2PANeomj6d258AwfKbt/oE9wDT2KEop 2MrAMFNCvqn5MsAx4ynAFTjAwDCEKstK/wLAMg+aJMWQJcAvcHW/31uVwC4hhxKKkAzAMuBR 0QMfGsArWkntBf5vwDGKBtOYlevAMqHooi25mcA0pVAFa9rDwDK1nsgV1KbALdbRvfOwcMA2 eIDz5CAgwCtFncm6UbbAK4oFF84u3sAwmARe4oUGwC5jpPFXpqLANYuLigCdt8AxGEAc1+dw wDAx3mKDAXrALinfAjCrcMA3eCnhq7HvwDYnFo21BMjAMP/2vCo5hcAzL3ZsSjQzwCrvJraM ohPAMI0LzAgoM8AvjWNP7OZpwC9EXGrXxHDAM9laZNpVtMAzJaYEgX2mwDA0maf6S43AKue5 eUOtzcAvM1IQIyNqwDGqmJZrxWbAKtNjlw5xaMAybBgFuUI5wDAa5NiL6b7AN0aaCmWC9cA3 YaIxy11ewCul/hYlQTPANju7EekhTcAxX2zFwC4AwDJnbXAUUOHALqQnm3J4nMAywJqdntuJ wDHOIpYFJHzALVnQdUfB78AygHyIvHFRwDMzJxdNewjAM2Ht+AuRDMA0nxF8OlOrwC3Dxk6k uY3AL2rEgd0DTMAuT4n+qIyAwDeOteKD7HrAKV68XqrG/MAscBRfJkNawDP335a8Oo7ALVxo WNN24sAlWqtd/164wDBF8tdl8ibAMEdTIUoy2sAzeVWmqIAtwCcE9S2w0yDANXrADQoYvcAl RDe10uJTwDF/dOY9Ue/AL/McFklb5cAxpL1BF0U2wDMnG4j9PLbAMqheC/eJi8AyXQ1cI0zB wCS4vcmAqk/AMhkM5um9ScAuY3YQW8LswC9asWw4rNjAMf1DvNEXs8AqDvJKQkMnwC4Ke97L 1+TAM/xUJ8rBxMAwwN8aJXjnwDUZ0jJcqpjAMYkpc7rbLsAx5V3OWMSPwDIU/akry1jAM0kU YKkv5sAwLMmtcfmzwDNN54n/pvPAMUxR94XlB8Aq9MGC0clKwDLDvlYkXx/ALCrj85hnUsA1 Fwg3G4UKwC52e4CML7bALwXu1VVwEMAv8XIrlGGAwDCm703eeeLAMo0+/oxqdsAxex+dGMc+ wDDJgSg6uPnAL3LCc4hAysArUPzS5loqwC7j2nJxr1PALbwUdJldXMApvGGaKZCBwDV894Iy DBjAKKaM2X+NcMAzNjwzQ4ezwCWreGJzcNnAKWCjreSF28Au68T4JJP2wC8qg8jBnUzAI1xn cYiY88AhBGVg6FZIwC+H71P6ijDAKOh8gqsap8AlLgYdLrjwwB3dw/DtOQ7AKtmnQ54L9MAv fpNrO52bwCatbUlDNbrAJF5ggKN218AsmYMb9+6hwCdcgE6c4F/AMPzc00d/68AYC6vbwQVq wCaFfFI879vAHy0dUDK83cAnvz/70TLswC+YNeeRMbDAIPpQ5VUdIMAn66LxbmuCwCsguhkL BFzAJvq7j3r/FMAnUhforzURwCalyxREqSrAIf9ldEdD5sAh8rclMkVWwCX55zhUOCPAMWtX uNfSWMAtvr1PNUQuwCZtQoVD0ULAIrl7whpsgsAhYCQHs88uwCpW1ZoMZ0jAJTE2IRQfWcAo ezg4BKodwCO0AZ0SFczAIfDAcX9x8sApKHvBinPUwCnmgFS5IobAKbpnisdI6cAhjPPGuWKm wCS8u4BQ7drAKMqm/eEbN8AnUP8Ms3UmwCkIkLeEcsjAKCkB6Rm/F8Ar+Uunn0eAwCZgQEJM CTDAKWoA5trLCsAlltgAKlWBwCN0bbz4MN3AICzW+28+vMAl1VnRz4DqwChuu2UPDhPAJcg1 IIOuvMAi5Xj0Jk/IwCX25hEDIRPAIOEr/BxE68AjjgHgjss6wCIl0OQ0w/bAIlGPqtFDe8Aj O7hQBNt5wCLC/sP0irjAIXQ2B4lS+cAsU92NyUdiwC1nXgWAxzDAKldJH/OmisAvk1z38b4a wB652nl6OMDAMxqfmEdg4MAovlVRyJgpwCacpeGXcwPAJecer7XxncAiTNp0Ua34wCl5GaQQ 8zjAIz2jrAiPTcAm7NZyBSIOwCX4CkYM26TAJBTjnw5nuMAqWRuJqOqWwCmXhZeKoKvAF4f3 z8xBFMAlwuZzsQEewCiLKF8v3wDAJFjweeVIOMAkgZ9+pnjQwCVe6Yczy/jAJprILRH6s8An rGyQnWvHwCH4kR1wd7LALihXXxZ6DsAoN0lnEvrtwC6icaIuvnvAImBqiBGEIcAi9s14PIts wCOKo73m7DnAGdmUxRtlpsAtYlLZklHowCJ9mnNQomXAJuGSohKCj8AgcmBQX1NHwDGoBO8M KDnALFzjgZ7X38An9C0C7tW3wCj6WmtFAP/ALOAtvYvTV8Aq5+LBfaZywDDBk25KlIbAKjgG rl9/2cAmz7g15YnXwCi6r5zEA23AK3eHwyniQMAssIc3SouTwCWKizlNQpLAMZf1x8zfzsAl 1nG3CFLIwCir8z7YMYrAKwK0MbNs8MAwwDl3bK3LwCf2tU5Jcw/AJ4HQDHxvysAspCfTP44+ wCsbWzgMSvbALRW1EGa1/8Ark2wCmmpOwCgXjdtX+5nALhQutxIGeMAnAFp8mDsYwDIaQA4L ZnfAL9xgL1BWzcAjQZ/WTuAhwCWeZqp9jrXAJ3PXk2YOysAoOC7OKYNqwCPwp+a3Id3AJp+i OW3kh8AlJZYSSs+SwCbYh/PNVRrAI3oCvC8XjMAvbOQXAyuEwCX0e3SahnTAJQV1/5oQysAm l3c4tNQMwCrMeApoV6fAJOF1hjE1zMAxnbf0opD6wDHASVl+xFnALBTcoPf/S8ApVGn5J1UR wC02PQoxUmvAJR5CwG635sAqIeLFbCcmwCIt9mfTMq7AKQVcnelr6sAnxNbgssnmwCZF0SEY YN7AIbEgq9IilsAfdfxXHX+SwC0Lng6UzsPAKgscfKsP/sApcD4zxJ4wwCNWDb+eBCLAJe6p 0DBufMAutucusDYswCbl7tpfDHTALCVmGGXeQMAnyGCe6yuxwC4vt8XGh3DAIt/hOkwPqsAj 3pVrCLNgwCPYSkr6CwLAI1n4J9kITsAh424rJRQawCXaoL4xJJbAJR1mwO6mU8A181RYpxNl wCTPFBiuiwTAIcQQw+KFrsAxxPXHyScdwCeyZvaUtrTAJyjQfUQQUsAfj8+dH7RAwCstNtN8 37LANGZfzj03tcAp/Xc7a790wCDoYxS5QjXAJjkHsG7BpMAmlooy6iIgwCuwjaKN7G3AKhGU ktVbNMArM1acTFUbwCIuWWjdmh7AIoKFw9NeAcAp4L8saiY8wCYgm8bziMjALfhB88Fk38Ai /xWBbvarwDGdo4VCsSHAJB5OcuLkL8AnB8gMh/wMwClx1pDI7bTAKF/T9ivw1MAljc9Ke4zw wCMlsWSKkInAHrp5Im605sAnliZm2ldQwClC7RNOLlbAMTalMHeK6MAoazVaykk4wCTJtIKa /h7AI5kdkYw6w8AdnyYqS9gVwCaywnXSM77ALL5UngLOCsAevZ/s7ftOwCUUKCyeEKDAJuSc XJw2LMAmfzrHru7NwCR1NHrAvWrAJaqxYEiz08AqPOks3tkhwCg019dkgHjAKiHODXtJSsAw DIXVKB8OwCSBOiV+/lDAKO/5HlGnscAg/52lHxEywCSnHAivnirAJ9128dys2cAtG44DTpVA wCPup00ExtTAGQwozRNN5MAzjHOGuDk2wCcWR0kY+2DAMQlYwCwkN8AmWsj0AzSYwCuwe49t JIDAIppzMRKm/sAjKOCbr5CcwCtdq7QZaLrAJOI3JSUSXsAmNj/pvsxowCMcWVJnXVTAJexi MuOYTcAhwi7zbGsywCeEaPBgmN/AIopLERNucMAo8Gt/McMAwDDhw2OJ5lbAL2300yiH3cAV zr6iDtOkwCYxHD7SCZDAMgfTq1f3UsAfj1R3U9g0wC+6JNteAhLAJo2uwGKZX8Ar+IIMlxZv wCBXlO24NlPAKh2ZOGCfHMAmmH7Ek5FWwCYhtJES7AvAKSfn9XYpr8ApYj1E9dShwC8r3Uvy ibnAMCEmG6hN3MAjzUPrNbnhwCxyAcmrkoPAJowwV0iVfcAqI+EU76WYwCY6Gp9pOcTAL6ZL etkNt8AhjwJQA2dswCRb+/a4++HAJ1CwqLWcJ8AwMWxZepSGwCjndVgMq2rAKTJvmz46vcAq /4kU85YOwCnH+AHLfJrAMpzYdo/dq8AgjGiTnheuwCa+rsU9BJ3AKeBXUy5cGMAjImE9ka1j wCTpjT+TkvbAJGc4ppaeaMAmNWJnrbqGwCEzc+82ymLAKfCtjXBuP8AlORuoRfXdwCt5l6M3 KUDALanxhxDarsAlChds15b+wDcV3om8NpHAMhJcLxmXssAlJ5QgdB2HwCyi3B3+45TAMIu9 HL3HS8AgJkF7OxLGwCawWXVfa73AJ4lVcW/4HMAtcqfjW0kowB5dVRjc6dPALBcJS/l1wsAi DT6wkXwUwC+p7KUO9cPAJecoKnuM8MAuMAtKlfNwwCtvPAFZa/XAMRpw7/YSYsAmbLH5YhPy wCH0hzTZofnALCRah+0t5MAhaoXvN/kQwCotJY+A+EzAJLR+aLOA4sAkCU8XTXT7wCtdvj3R dp/AJmZHa1GAyMAxMH6bTrOEwCfh/COdVA3AJQsLMm/Ls8Ap6HWX2aAEwC6Uf4tYarPAJ5pw IRUG7sAmQjjdiBEPwDBVYceYNg3AIqf4yapFosAqCfSMS52RwB6HUEaVPSXAKOIgdXeGHMAi fuV4tVfYwCnbt8jD9U7AK6qzjG0o2MAkpGWVBhnywCgKmHXchw7AJpzC0KoYn8AttXYnjQSc wChjgxCBFkTAIAssN2KvQ8Al7Lh/3onewCd9CaJBPzLANAj2HgMCbsAtMo5b/u4awCd1S0NC VgLAJVmwpA+jLsAhlApXpLG6wCYEpvz9P7/AI4ZIGWShC8AzsK2+wlZzwCkLq1Fp/hDAHpn3 evyQzsApvuaOZJ0mwCa4EyFrf0LAKeqFKqpFosAmBtY0QGfhwCTt8orIDEbAJ+AXixNqi8Ai bCix721VwCJf9dkwoN/AJtDB0HJKacAsePoii4PswC4VP+hTJGzAMAqDkweSOMAnh1tjjlmO wCPS97+mX8LALVuRLBqUxcAlWN9G+ZmxwCtqGCToQE/ALpSX78AMc8AiQzPoU1lgwDFYIk8+ yHPAI3Xv2UlCGsAvqEOGfA41wCrHg/RLyrrAJ64mbxkcrcAsixMwnL2DwCVZaQNpKWbALMcx tbfDTsAp99IFPT9cwCZld/qJqJLAJk1doxF+RMAgmTvAjw88wCY3T7l/IJDALirLDAsfIMAm mEusYyPzwCnccK3bCL/AIgA7TImAuMAlvh6Uhyl0wC8VA1Oh3zbALji+hQOp2MAg2jS9TjPI wB5A0g6XnHzAJkzWTGxJqsAq9qiL77TgwDR1KltNz27AMNvBxYtpQ8AqUXAm4/xdwCuKAcaI jbbAHL8J7XUZyMAmo8GnjHtRwC0pe4YZqwTAJDSAWTYxAMAju2X7sZBQwC3jSGqDOcTAJk+D 46HhTsAmIo9L6/YbwCRipN22f9rAHn6DUvnaAsAmt/Jqf0tewCOf1VX3hwHAHYLwjNN2j8Aj FalGCxokwCI8q9AMUZHAIC2WlS8UrcAhBWW4nWVLwCXuPk2mAZrALcl2w9bOQsAmPTkc1NA4 wCPOAbXoUs/AIhba0brFeMAlHiAyDYkDwCPyZsNiIojAIlHzYBd7bMAnkV1YZJ+RwCSrXDbT OYzAJpeuNiEKtsAxSSc4XpouwCgymE2FwdTAJpc9HdWcz8AlTlA966/ywCJENtPJHBvAJ2YU c1IQ2cAn7IjliKymwCbkcnw64sXAHOCLoCBHPcAmLGkKUM0owCrbHVPsrKDALkQB7NLnU8Aj V49S6drQwCASxNYDgOjAKO9UiLC0esAiCFvWx3vcwCtOIY/y/rXAHLbcU2fBmcAs86TXXlI8 wCD7QCluRljAKZsq2HPt3cAw5OvsplUGwCSp/n+ZtiTAND8C5XC3McApxI2H9qBRwCqGCu89 wmfAMFYQUdIIjsAn2UK7bDgbwCbsQ+7zK5TAMNLVplrHxMAXKZtJ7GISwCGfxjXvf93AJ3ES pqwiGsAlrFYtA4wgwCZxJIa7VuvAI/FlU96a78AtHymlqT+ZwCa781SpjwvAKEMlzdST3MAk ZZCE4HakwCDcYeicxvjALHXC0xqjtcAlR+vtZI5SwCplwQlXJ4LAI39nAfoEcMAnX3CcKweS wCk/B1VZMtLAKLvhysNP1sAzRK9ZVyp5wCyoHOIy/RbAKAyknbUd0sAyERYIMfGywCijCBCB jPnAKhwEyOaSSsAuLWKIwpSawCevTk7dAtnAKxnr9xdN5MAkxZcEHzKYwCyEu6r9Y//AJpe1 QrVYLcAlGw7f3WFPwCMsmRVQR9DAMhlNypZIasAkA4Pn4+mVwCW6lYrudqPAJAzYu0vmXsAp bI3b3OuQwDYH1UR5ttDAJxS8CPp/RsArzSCzZi0twCp/a0DQuQvAJJqDXIt7ssAnPg96bFP0 wCxl+4Hr/HjAJ5QyEHVqfsAgbt9JCrLbwBsBN9ttZ/fAJUckROwZfMAlJCtjXpfowCTKBRwN R2bAKecatMFRo8AmTDeozKc+wCQO65sl6FTAJGX8+eLRnsAnxE2EQJiswCnie8h0xRvAKJmN zislW8Ak9S0Meu5gwCY+Vbe6eMbAKH+zXrxSRMAsr+gKaDpuwCh7A8KJTS/AIaQwEMldCcAj uyPvYZLvwCScr1rZKsrAI3eZFwxlgMAsq1UT0+JEwCiYeYDIc+fAKvg+LNTrmcAjI36eFJcs wC/DJPmnAITALKcD2E9tL8Ao8lD28lgZwCCBbAWtngLAJBIZwQGXCcAjhSmuXxxqwCuRnOug uvLAJXnhlGXlccAnZivfZ18NwCOaDY0+27XAJzGL/uHYAcAoJBcR/MHowDBPiSvl3qfAL887 YgAm5sAp0gngsYt8wCM2g4XgIrbAKghUSFttisAopuxtZikXwCx/6XCZoivAKPd6o0NF08Ag rsFtl2u+wC1BR7FkdhvAJjtjw+unI8Amsc3ipWQ7wCXxKp374FTAKU28wxARSMAtZ3zrA6Bg wCKLL8C7r5jAJrwcJyfgicArB+OO8f2ZwCNjsQjWMT3ALE6Q6tFyRMApgTXqfS1+wC27+bB9 xgfALXjl1Ot9r8AvAEH+AlsywCZ1i1uQ35TAKJt0/qwmpMAsbIO3SMHpwC82bAIjkZHAIH7h IwyI3MAmEdffFnTJwC0MWDBDrnDAKNVtQEt61sAjXyOjrC7awC9iADvz9dzAJ1w3L7XCu8Ap iiu52s7wwCX4OCYCN7/AKJtjoXf+qMArTXjYUCETwCqYbVbASQXAKQKVYnIX2MAsIZgCAyce wC28hZnkQB/AKVYEGe9vssAl/nDeB467wCdwXIaOfXDAJ7kL/LbevsAt0WP2HqSNwCnzoGfH AJbAJEQS/Fn73cAq3RxDbKmZwCYtLhfyXSXAKYb9lN3nuMAnD/6fhZC0wCKM4cGDytDAJP6d YLAfDcAqOo1KbctgwC9dszdYQ5TAJbCIc99aUsAjm0KEGdjmwCim3EA63q3AI+CG8V4wH8Al HvBKUxbwwCknac12thfAIpBDeJjBC8AtI0AXY0dmwCuJdlhUphjAIoa75SiFvcApq6PrHRsE wCXpmEkxKObAI8CWQwJq2sAonzABrrXqwCl8DfO6VQjAJnpIryuEEMAvqnWB6sk5wCrp9mh4 FpDAK4WPip1aNcAhTizFt84+wCEo2Is5jJrAH/zFxhQ7McArBn1sbHu8wCmcdgjWWDTAJkGN bpcu9MAiwmN61SL7wCPhk/47TJHAIiJ/RcETLsAl/9Kk4PtEwCFbUoR+fFXAIF+S1K1LOMAk MGFR6fLcwCnc0B9QbLXAKvn3aLBX3sAicEAJXSvKwChppZbR8yLAMAor18R/qsAlhTYYOb8O wCd8p3JVV+LAKo+j19fj4MApn5EBKPNGwCNGbK0IBHjAJxYVMyh/ZcAkvRHrubshwCO2Jvm7 aTLAJPDuIANfEsAwB9TQuE+LwDCu/lBRhA7AIiLbZP6+v8Ak9M1HwjOvwCUbPbKjjy3AIirp nyNFgsAj0xfBsONOwCiSvSOW61LAKvK0j/qXk8Ak7ADDd3RKwDID4RuqdHfAJwvXkSx53MAi MYgOPFDewC+FQkiteOnALQVBR1WArsAoZz3JOb/0wDC9jtK+/d/AIXWjP7I2q8AhFOuKdrjB wCeQgRzYPknAKXcXa08dDMArvWyekFxGwCv79P42yNfAIq16iY5rbcAmJ5oT/VsrwC+OFpy3 9QbAL7WaY5K/ycAkt2n+KfyCwCh4eG39RQDAHrszyxfGbsAmiYs0TTt8wCGg78BaoiPAJYSC P3yJkMAnq2jgHmMKwCWCeQqV+XDALcuM2TcE5cAZj7ksqd+GwCc8YCh2YVfAKizbDqpHWcAk meZAhfI4wCQhxL9co5rAJ95TgLpt8cAr+MKxZ5gDwCnjLEQbHLzAL4tlIBzGP8AjK5Ne88Is wDK0p7VYYvLAIJerwCeW1cAijoAiefHIwCNlImELyNXAJOz5FMgUoMAuNfblsYxiwCx/oK2u 7TfAIb5z/6OnZsAnmDInG94YwDFmA17+67PAJajoy8S1JMAvTbaN2ycdwCaxlZXIPmvAGL93 BnYRFMAqbqftnawTwBmlZ1r/jILAI55irGfEFcAr/JOTD6IcwCffoamPMSLAL+SR9rEOosAl cLTDeskPwCFP/p2BuW/AJgUHWqh+QsAcZnis/w8VwCPS2/diES7AI+djk8ItTsAiqfW9rxe3 wCXB1wGitu7AIFBIY+b6XMAyZBmtPEn3wCa6K3vneorAKx0wUM12nsAqGGsQ/MxzwC2nTnsr pQzAImpo/K+ghsAvRQehmn1bwCdKyfBAN/vAJtpgORLWzMArFar2smOPwC/2cNV2vmTAMQRS KqHG1MAsZX1p04tewBs8UE4vf6nAK2JtHa5Y9MApBlNsti6FwCE1FmknnJ7AIfidDJJqSsAk fl911C4MwDg3blpX2gDAJOpjNzD/l8AjtkHtHoy1wCl2+dvSH8TAJ6SwvNICEsAsVYJpPGb+ wCJMSm9LwUbAMGLPx48c78Am8PIT80zNwCVP7ARWeBbAJrTVnz6kTMAqvBAmfYmhwC5+zVWN uaTAJ1Y8YwHbjcAk+irfyzmdwCUf+oAEahDAMNP6K/GzK8Ahh3OLnkjKwCxavQWLeKrAJPQ2 94CboMApEY4lFhTcwCGUwi9H5inAJgWMzOE0oMAyuiH+Lr2LwCeSYdzufe7AIs3UdP+WW8Ak 89nYJXPowCXGrKx1lX7AMfSE022M+sAkaAaGfPR7wCpRhE5u5o/AKiWln02Y7sAoYG+Lo7Hs wCf8HjMSnHzAMkOAGUnxfsAtrMzkmlPqwCgwjSfKwmjAJPObxvoru8Al6ofvKKoawC5201aM LorAJaTCuJlua8AtvoVLf5GuwCjXZyA6O4nALG0QEGeNTcAh6wd1jST9wCtZFwRbLezAMAbl rSqV6MApKtoR4pgHwCW2zhKBYrvAJpxZfV71acAkg+gy8j80wCg8F84QTmDALfAwMh2IMMAm Syl9thDvwC1utj3Dg87ALTRaFmdqKsAmFY1gol5ZwCy+Nn5Vma7AK5jQLcisDsAanrFfLvrA wCMLPd50CcrAJjRmDz7CxcAnR6noAYAgwC13wE5xTUvAI1in8ttfD8Ano5ORdZqcwC26UY2y McfAKDVr681CgMAoSMu085JMwBhsSGL9JxHAKIgYnbeE48AnrMShlFWNwCiTDlzVClHAHra2 xEC2JMAtduOtPsa6wCY0K2fqUu7AIr6V5GR3ZsAkmCiuCd86wCWxiiThVBPAL8FEEXD9rcAj 55IUSPYywCFkzOBYkirAI5xq0TM9hMAvJbom5RcQwCom9J9rr7LAJ+k/KZXZZcAp6vXViSAy wCAj4UrAdpjAJqu21pbNS8Am6wRMJxY/wCsg7wCyhgvALLiWx0sfdcAwZ2IKsRRWwCkvnARK nXrAJQ2Iwjv+UMAtwN85//qQwCMgn3ZkgJvAJOoA6Z/FAMAkNUJaxzzCwCA1LRJrTFPAJyGq wXKhxsAlVhWbGQ+DwCNoooK6KvTAItDRwbVNTcAnTRvz5oytwCipvIVgF0zAKRVJ6GScZsAo WpbewSWIwCtJNSww9DnAJdkNPJDCksAt010oyQrKwCdRqZd38N7AJ2nBpma4rsAnwLAao4Bl wCct5icJYk7AIUJqhr8uS8Am+alirM05wCMyQdI0zmnAJ1lrZUUijsAgAhQ5kSECwClFxZVb +N3ALPChZcDVYcAmccZ3hNq9wBwY8bU4F+vAJA/w77Rg0sArk7is6DsqwCO8EO5vtFnAIKim DSpibsAw1qug884ZwCorFjDKdg3AMjrhDyukXsAmkSIvI3EiwCaBFxBvdhPAI57b553yasAk VptoypdrwCU+a1E9MyDALZw5HHZrd8Ak5BQhvMbCwCywY6tC91rAKM0XIP5kJ8AnnEBzUoHh wB+fwBbczJvAHfyyFcYDXMAmJixKhijtwCdTqMNeyrjAKWxp1uTPxMAsOMk+ewFWwC6T+5Aj dKPAIUJxyscNGMArAPaDEPFOwCoa285U7GTAJYgIEIksvsAn0o6BbIbkwC1sIu6Y5kPAIaK6 E4FZyMAwGY2sSZLkwCdWiLj/BebALynZr85WEsAf0BNmpgrCwCtkW1UtuObAJ6i/WAwuPMAn JJDpPXbjwCtSXfu27tbAJ40f24QV/8AoljbKkhtcwB5YagKjYR7AJjSK3ibbmcAi+jkMN6lV wChMG4XQvMzAI0NlNtSWSsAoT+PvMBBwwCcUYnvXA/DALOvHvMIBxMAjRPmW/d2ywDJUtAd3 I4fAIrKJ9VWVEcAcAml5aJN6wCgU22UQoDfAIImA0Ul9RsAndgQ4UmPswBhrW/Q4K4PAITab KFNdFMAnHkQAOZYOwCiHhN3dgC3AKD+Pfn/FGMAm32x8dtLlwCXgSN3fSWbAKREgXgHhjsAq v6+3mDoqwCgz5KO6wk/ALjEuG2ovEsAj8dfoNsHNwCtvUecJ0u7AIWFEJ8Ey9MApX7Hl15I0 wCU4YX24iYDAJiuiUFJmMsAoUqZ4hX3UwCWLuvEU6EXAIzRglkbK5MAtVWdh3OGkwCmQw1SX vNbAL/dgqVhsjsAnsawuDuq+wCsF6U0VQDXAIPKR2NhJC8AnvNizla5+wCevt0HJ0uTAIQMl YAPleMAnYYT8JfhDwDF8Wy9t/WfALRHl3hcfhcAr1EAeEJmBwCHAYy1WDqTAJQo+ywgT0cAh Ye1+AgxowCP7n7hcYMjAI4yWLXMt9cAhYzgOsb5zwCbn8R3cWlfAJR+5urxny8AoSx97YnfS wCSDZXw7gzfALRdjNzVVScAplGTzVjk1wBwTFriOQDrAMRJCFUL91sAzumeKf8X3wCU+4ssF lh/AJi1WnO9bOMAitN90xOG2wCy+Fs/5F+DAJXQZYiGOicAnVedaDO3pwDFLmP+hxILAJ+Hb 2c4BscAvKvFuLWcAwCobShAYcpzAKD/VOlrDDsAjQ1pgdShjwB8R7iz6cPTAHOjWHwq2qMAl 2R0fO1lfwCsNOIZR587AJrZWpxsYnsAs8t+oxSjWwCDJ/9Keb2jAI7Eyhol/tMAqSV521C6c wCy34LflJwXAHjL2FUoM3sAsnKzK2wdpwCi9LzToXw7AJMA5sJksjMAmGYRiZ4iCwCGsgkr9 V4fAKJ5jvzV2SsAiXidy7LR5wCKBOW1SRaPAJylGruS9YsAnW75NJEiRwCrO1NX2llLAJScf zstc3sArgW885fzKwCkLMxcOeTPAKWnBXURi0sAoSOIrRHGYwB2TvNLawyzAKgPkFX0l3sAo IFYKou/XwCWzxxQt683AIz2rDYivtsAkarvjp4/6wDFeLjuOrunALBnymVeSXMAiAAqcftzc wCm4HP8eQKLAIvt3fM93IMAqiuthq38xwBlmjtdT+jbAKM0b7hl9uMAi6uCHiJ6dwCByux5O OInAI2tYdyDbWMAgqNCXzayUwCIBFTR4wPbAKDcH5IllEsAr+LZQfgASwDDY3pMA/5TALlp3 KZHKWsAoVd17n/ofwCkQfJSY5lLAIdmOL/OWBMAuhzaJ7S15wCwxgDlXXR3AIhFwNKAAh8Aj zDytPxiJwCuAiLJccZ3AK4DcWO8IMcAq+LaktARswCiWnEJ+Rj/AKBomnhkKIMAjTIrBC8VG wCqk+SgMZADAMOPqxxU6wcAo6lXkGVUlwCUHGzqwxA7AMlBbVwV788Anxp0XvrnIwCl/WD9q YX/AK6ETHmtX0MAl283PS9vJwCABhIYkPzHAJgQx6dLTZMAmukEwBAPuwCJmGgDXbEDAKzZE rpIuNsArdYI2DJhCwC/eL+bYbKHAIin9+3omj8AgjaRZL1nkwCb3PpFU4MjAIq7PIX9RC8As hfEzqtYowCpZ3dtf2XXAKrX2+6h0zMAoYIsOb7LWwDKtyN4st/7AIafYUeUaosAmz/r178ch wConeUWIIh3AL0JPbFpyXcAvTaKeyzqmwCMBS0L0eX7AJsHfEwXP/MAmOf4gG56lwCL4/1x9 0TDAMOApawzg18Ag7X+UXryVwC82ZdbmAtTAGlc+5mDV68Ak1bc/YBGRwB6nZuYaJIHAIsOF DQSn4MAiqY2NsiuYwCQLH6ghWjjAK1bV8UygqMArrmTJtUTXwDM7b8p0/fPAIsRCKSI6OcAk n/0COc2BwCEKePs/4QPAL6HCsyPBzMAyx/geutHdwC0TTTPYkIHAKjBAnQLVRMApyc2nzjXs wCcNiLXv7+DALo5jIsSJ/sAy2/oPMMZcwCG9sDCsVePAHxAb81A+aMAn/HrblXCywCO5ZeDl RPLAIyF/25AdjcAlYXU4A26TwCXWd5BiqRrAM7bbl4XBM8AgkFYt51ZVwCLgT9fDrYrAKxMf cs9bqMAkFfLIqL/JwCFEinRTgEHAMSngIDHplcAlFL6uo+khwCwcDPfCFbjAJ6N06xuBBcAw 3nuFCOyHwCVYeTjbR0XAIe6jtu3CRMAmSHGu+1TLwCWGsSmRBSzAI+WUBd82nsAtoIsGdcC8 wCMTPhnhxFTALtNGk0FKzMAw/+hz8LAGwCscXbk/0aHALtbK6it9/sAez91HPyR2wCzsQEIP +3/ALuAHMyjc7sAk4UVd85NKwCU5XeSjJrTAHvaALgam4MAkql9cxAm6wCc9FrtMYwXAGRWT 2k8zjMAxVKdRKB4awCYJdgm7OHvAK3o8uglStsAhTv5ciuFdwCUuZSb08ZfAJoDVlal24sAq plMHkyudwCRNcBCnoqvAMZLzS1ByoMArXM9180+VwCB/6prufxnAJQeuVcG6vMA0Lq+4nzVd wCMGeg76GbzALdw2KGeTfsAnBs9aJwquwCnmUYOTecPAK6mNg9OFqMAlcVVwb4WXwCuV2Xhv ZXbAIRD00Qt9XMAla4Du73yWwCTkwRLkeufAJ3kMOLuaX8AohPujzDbiwDhjYIOwCIDALlf9 Mbuw98AmcvUxCX6bwB8TOddY1HjAISVjZpM7MsArxDeBnyyywCScYlOEu3PAJrmjTiiH4sAn qTyTag6OwCIftVARN0jAI5PN8nWhucAuOvtNpSr3wCU1XHSgEvHAJhOkz6v9/sAqTS4Ddo6L wCl0ihe1TKzAIx8orS6T08Ap6zo5XYjywDGXYLWW3o/AK/AE8yeWcMAl6uFZWlE+wCR74CqQ RkfAJPQhvY6Xo8Aqm4FmHcJAwCPVxROWWILAJYv8WlyV0MAnGvKmiJrVwCk4ZEFcN9DAJt2w XB1Pi8An6r6BifPvwCe1wEU536bAJFcOfaZ21MAlxkzWXkfcwCpCXLrvOYvAI9/hJwPUIcAn UBZIBrZvwCFjA2LXnWzAH4DKtNDgf8Ak4WFyzDOgwCOkdGe+ERzAJQUBG4I4+sAkFp77fxtY wCGeMhJY/LLAJVRfimt18sAkGDyXg40pwDTmmzMBChrAJvm4un4/i8ArwOyaaNawwCizjwGb p9TAHsRlO51daMAnROcroRoDwClLe1yDlj7AKn+w92KqHsAq1E9PQ4ViwCewoeuEsjLALTun HQ+EQsAizec1vAk0wCUnnStWQH7AJ4AVT8Hg/cAtISgMDlOpwCqGZhKZk87AIuS8YU9q58Al TCNSFm8bwCXgrDyCXkzAJeR9LfTKg8AjLpObO7GuwCl8qkqBZevAJwNhMOm8TsAofbdZwdCa wCvjH6bc7FfAJD5k5voEuMAq9GBvAgatwCLUg8NmfWLAI00IZkS5KsAvhnJ7FXKhwDAt+uVH TrLALKs8BD9/AMAn7zz8A3PjwCiJix0F6EbAGQ02XBCa7MAkkRiqFd9fwCqT/SXI6dnAK4jU XA0aPcAo5XQyGrc2wCLHKLoSm4LAGWURTPkcmsAjqiLkseC2wCmPProticvAKjrM5CI3PsAm 28WTpo+uwCpfQsiQIQLAIjLWTwtCUcAndvbcVxEhwCg1MYb4RSjALohCe7AWX8AfsyWRU9sQ wCZCYDVG6R7AIbnedNAPPcAm7nxhM12CwCWikfA65GDALLrWLBn298AoikydnRT9wC7ttJZE 42PAJg+o4BVzA8Aj/DTsv7hmwCYVjJGC/DzAJvq6ewObO8Akrnmz7PAxwCMqs18L1rLAIzIa RUe84MAnnVGaFnOTwCsYf33of8HALsGMEj+mNMAopJLJYAYRwClYNIIicfnAJsGGo4yJh8Al r20XWVEcwC5d1Mjbiu7AJLVSaSNdtcAlcUoyhlJwwB0MLBESkDTAMNzDkE5AmsAkPFHt8YXm wDDvrK/sdW/AKDhQGxF8SMApZAOvdleNwCds/C5oqz7AJPZxHjTSucAjPt1cdasCwCK6hK4O srnAKMlOlqouOcAn19GiJ7BTwCxMJ0B+tWLAMKeOv16OyMApk63GeAZOwBzJACzlGKvALEuV +J1WocArzotrvhoxwCgR8riPmBLAJC0+4Lk6F8Am66gkEoMpwCMaKLhaLwfAKNOEkY0bxsAw eCaZODlkwCLb6r9wW6zAIMGEIDHOqMAxVOayKUl4wCrar0iKoLPAI18f4AKqDcAtY0cjBha8 wCtOi4KGqnHAKVjRLQkzyMArl4NSTx4RwCS/Wgtvy/LAMENfrH0ZPcAn/cjhBVPGwChgs/Gs /PDAIHUFJDV2wsAmdIzI1q6rwCfefDjo35TAIOBwWlvFksAsLb/osby/wCox7fUK987AJOS7 mAbnF8Aj683Dj45bwCQzXrioe6LALu0IgXQduMAl0JNMLKlKwCpgwTmfW0XAI/5l15wX4MAo 9VDGusLWwCnLdr0P5KfAMjyVo9+bxMAwkvVEJFqhwC7QcxNp5RzAJa64pegeAMAuntz6TUI8 wCSalkImSEvALTPuSeCYN8AmIVDRS+/XwDKq7mj2u6PAKW89+XKUqMApibl9xEcswCU3nDda wZ3ALbL/XdVs+cAh51xdXIwvwCYjun060qvALA+aNJt9osAnz6bwvbDywCV2k/Lj8HXALP3U MpF6EMApowV5PMlrwCteMgxfWa/AKpidTH2N28AloIhTpdAjwCgS+L+XDjvAKhfQLpFbt8Ap PotZlfSKwCvcGTZupALAKVJH3KozcMArB8LJZuStwCT1aIvg+pPAJhsZ4s1tesAj1A4lj65g wCgbqnphhwTAJyUqaFFpMcArgmHQhfxdwCkX+0pUFJPANMUyeLCB8cAoCZlOte3KwCWt+orb a4jAMfhGkzfYrsAmOidYNsxewCgDNHthos7AIqqP1du6l8AkoWBSDLA2wCdCCjJmWBHAIeDH LeTm/cAkrji6Gc19wC9EfiHP2JXALrj9yUaqQMAbCANAUysAwCJnYtmpzJfAKCn9Hd5QD8Ah Ea+YkFeBwCMOKZLntBrAKQvGysBjHcAjMye3wxI4wCF9z/7yQQrAJrrsVTrUkMAraeRnYhLC wCveD8ZQbWPAKFaSU41cMMAu9lqkTlApwCdUmy2fpVfALJS5SEmuWsApV/LJ26cywCH6iorM x+/AKhP7LeI/+MAl17jtMEoawCRkzRDFwTHAJWgpFTXtksAt8en7U0QEwC70VsCu34jAJxUJ PenAGcAntLlYXSRSwCZuQtN5KdbALS0MbiXr48Anqp2770p6wCX4hVutIu7ALmAOi8bejMAm 4UpEy4rGwCitx/wHTRDAJqi0N5FcsMArk+30fvGewCY/q2Mso83AJVaYhUdEsMAjpiOYwTsu wCbWC47OJ4/ALYlLlO58AcAiDnaw0a/twDSACHiVLM3AJeyl5eAlVMAfVXUnBxR0wCGGuP3v fDDAI1TJmxHGu8An57Zi+CXUwCAjdBbcNofAJxSQZJeypsAwoPnZQbkYwCGQSTRjVJXAIdYF 4Uu48MAot2LH7e01wCCL+0Ko1sLAK8/xuod5/cAyDThgt0WQwCG8iu6b/UnAJRbHeLlNjsAw IjZerjXowCdya6uVeHXAJ360Xwxuh8AsslnTVEyBwCicZz/6PdrAKCOk+w1bmsAhUX/9O44T wCey8scwCMHALhavZu3LYsAiHUHM78I6wCyMhSNPECbAH7lqVVsRTMAkozCyi6uAwCeW0YkT fiTAKR0FOh7l+MArZhO15G50wCWQtSqT+STAI0aFARNl5MAis9bexXoBwDIHJRcOot/AIZwQ B3SVd8AjEdVhb5gDwCG1nSXNNbfAJSUOdwvp1sAhEam/1gxHwCPO6JUOx9vAH1SWDiBmgcAi ARBdMayvwCwLaVwcwvTAJ1pu8KFbB8At8UywNqxDwCuvwz+2LtHAK8tT7zimksAmMtrtKuUC wCRJPGXDGEbAIinbXdYZRMAowkvHOFH0wC3NhphV8aTALH0I3PTXB8AkKhklxFHGwCjlKW1P RtbAKuRVXGtAxsAmOTDJ8UtGwCnl8DJG99LAIuaAT8JRisAmG6g9gzzFwDD3XpTtTFTAKzMQ o+xtbsAkoRp22idMwClA3LXV8oLAKRSNSXS168Ao2PktoYGlwCZL6woa2YDAJi7OuodN3MAu vcOry6jswChDqbbFZqTAJIe4PKh42cAqigzNzcnqwCa5KxvvvJjAKPwIHHCOVMAuj/seQW7I wC52V0YYIVTAJsnzEoCoqsAhtRwLBbdCwB8MSZhT7AjAKyF1YlC4DMAg6xAB8v3cwC6BKxd0 I9LAJrci5TwH4sAiM5sF8Pi7wB1io/yBIsXAL8/ILlLm/cAm9WH9yIvfwCobjKgpu07AI7Fh 7MvxasAjGUvtf/buwCKlYVbBFBXAITvQoztSpsAm/PzBOignwCni2gpwxqDAJc1A1ZUH68Ap ik+eVT6kwDBKeg4MtE/AI1dPVVBedsAgZLYeEYa3wCw/WjIKyvnALxbbzexTV8AvlCuZwO2k wDA6h2aQfMHAKiaFqN9twsAow7xD/crCwCn5hmeRBenAIuUg+iFKvcAngZ5vwiMuwCQQUHZj I9jAKZIySomQLMAemnCBwZQGwCd84da3UATAMPSu1nC8PsAju9y1CQevwCu4bulOwCLAKinZ U23fKsAqmjhia4VJwCGZgohHKpvAILIdqjB+ocAjpF9G6CbOwCg4pOziYCrAJ66HIRytB8Aq iLZbiWY7wCN63fho57PAJACaBJrU5sAqPRmEekg0wCbxFnV4J2/ALUpvKm2yucAgVbj0w1Pc wCwxckwveuzAJfFgIlMB+8Ak1K+wSJVIwCJQxMZw0qvALnqpLcph7sApCyspqpEYwCa8DKkG Sj/AMj5F/syRWMAk9KeNl6OGwCzEvXKdXvvAJYZbQq5PJ8AyYufDjxFHwCOH8sQ5zvjAHve+ oQiXkcArzIZcq9DewDN6awwXPy/AKSUfVMG9jsAqpZONHcI5wCE9CPj7B5XAKNQf3+QMXsAl nFnOZGLfwC3DdFkSXPfAKYbODqlP/sAiS4jaeY8uwCkTQ+X66kzAI4HXhim0jcAjxnPfHk6j wB7YTN+c8EXAJ3gsWdrzfsAj1V7mL/cEwCC9sig7/aHAIU7HdLBtd8Aio22ZwkGGwCUdiAzK 1czAJ58UGK9/9cAoXZQAzjwOwCP8CoE6YNLAIlRQ88fzpcAlO+w++R9owCzB4wCeb6rAJES1 lryc4sAqpoXcLuCwwCmhOE2ehSjAJqSPEjFpvsAnzHkpBhV1wC0UefLjCkPAK4QRH45Rm8At gNapl9aRwCWoQqAkJsDALU4CxGMz58AnWUT7YoxewCYlvcueH9zAJj/oTwJPr8AoChNMV47X wCyKIh2ECXfAJsJzaNpXAsAlZrm71xF1wCIy2a66dBLALHT+WpN1VsAofsNKSMevwDA4oK0b Ar/AIZa/262STsAi3w161XvdwCqvIIczueLAKXXSm9A56MAw8QbZWBn8wDBNjuxJ4hrAIJyz 8g8hLcAn/jA08dCGwCKsAuyyUd7AKM02iaGp7MApbZgJjOxwwCCI3CEBIbzAKje6uHYHF8As lIUqdlaHwCi0h9CUAf3AJhf8n9DHzsAh/+Q1arK3wCIlekh0ujHAIMdoTNP7BsAiSVbQbrr6 wCOhoyDXsdvALdxDEsVT/sAjzjocagDJwCIBQXTEz7XAJa5Mqdk+xsAj3QDC4eeVwCvQtHVX F7bAJ8rVBTjGosAuAHPd8hS6wDHHFto1M7/AKRlBTHcFkcAsggtoWL76wC4W3Lzs99DALQur e5kNVcAzsckbi8OjwCmx2EVVpivAJ5u4E87Ho8AqHktMSO/BwCw3ULmM9fTAK2+0nMvJLsAq 7oBYX1aXwChKXsy+OeHAKrx69jBZ8MAe0nGc22ycwCg2yVAHYJrAJEH8exlhlMAxbniEhsIN wCYwLb8wD7bAIt5AkiDGvMAmk5JeeEsswCGeYjKg+kvAKinv2mS628AqPENnvTQFwCg6K0iJ My3ALVLIVHW1W8AjLiik3M8OwCSJs/BHdHXAKOilFwx2h8AkrrRCPp9wwCNh2A6oK6/ALQ1H kaHkKcAsOA/UaYIGwDDDQdMDiFLAJzVd7YsDqcAq1JObAi/owCIq6NKrMC7AKcnH3BVcDMAg Lq6n4UVywCEj6J8nDBDAJcsFrSXODsAnItK4jPk2wCROfZ4wyBrAJAFOGI0HwsAnkih7wlvl wCaatjc88evAMy0J0ySrm8Aq1XMhqAlywCctII9xY+zAKfN2lU5EP8AnWjdLJSsQwCL7PvWW kwzAMCiUvgVnKsAkM65MGnIqwCp32o6Nkz3AJiEQ0GaW1cAuBLZHA+GQwCZ0xXsYfE/AJOY7 cIu7iMAg2iC5GCGCwC50y5Ki49DAJAvkSAkJWcAg0BhfV/VgwCXIRyFHA3LAJCZyxdccusAo SA6avWKCwCL3HRmFpynAKRIgxNtzdsAjuQ9J0z/xwCR7N0skyuXAKZskPmGPtMAvbHpRncs7 wCItCje3WSfAIyvujMPjEsArMnAgnS2swCN6l7Cf/orAK8N4vRoVX8Al7sv+4pn2wCvVh0hE iErALaJufp2wRcAixxiKOYbcwCsboFxkekLAJznpj+U3xMAxRkfN6wnuwC8URiQr/7LAKfFp 2snOecAoerEcWVdPwCkWIvX+uWvAJB7KVWYFuMAkGLzB/iGmwCWLGN4a1b7AKq0Dk3d678Av qm0KPx+DwCKQj3TR44TAJGmtHp65ZMAwEwsmICZswC5mj246KTPAJ0mTlB2NGsAktLrW2ljT wC2b0pyvZZzAJIajshSetcAlTeUXn+68wCPlXnCWWozAMODf/sjccMAa9HaKgHSwwCwOtn9x WSLAHC4657XZcMAnwlAK4kuUwCXh3hlEM6HAKXYwiEyKD8AlCJNVBGupwC4zoTN0w+7AMOmG hrA3AsAiv7ctcmuywC+Ee39nmdLAKFgzyPhPgMAo9hUhuGhnwCXPb1OwtWzAJXR5EXVofcAr dsmEiduewCjI5vQiDsHAJ7erMOWa1MAl62lzG3ZDwCEzkTss/yjAJ+2Sq7x0TsAsJPkLZesQ wChUm9HH10nAId1Zsnm1/MAnL9XPaEcmwCA8cC9oTarAKlZUErgMFMAqJXcu1qDWwCqrXiqT 6pDAK1p+ghIF9sAtpeku7i/WwCbsCBQw5rDAJTs/IG1fCsAu286E9ibcwCYslvieDbDAJNoE iISpwcAn6h3wUO0RwB8Zt5iHBY7ALEantWz2qcAmaYINF2nBwB4uc2LVF9TAKh6B4YeUIMAr D2xZcb4NwCLxOwg4Do/AJ84nvoxY1sAxFIYimCniwDB/0pcrJGLAK/RfB8P3xMAkfeYnxi7u wC2AmYjJcfvAKKtiK6Kvn8AkuJN9pVbVwCWrZsCL5U7AKKUU9scXi8AkA/p0AzF3wCXWJz06 +ErAJu6Unmq3HMAmVIT3adrGwCOTmFEcLu7AKvvTCt1XscAtTQHFV3ZkwCWzFXAi0P7AJzsV p45lSsAr7xJ1kQ/awDFfG2viTF3AI2oFflkgxMAnO6tI+zs0wB8UqLSCrgLALfoHYfWJjsAt K/xtQ9tCwC+ml64+cwbAI3DoSE2u+MAnLak3Nj/+wCZpBwsjW1vAJ6TfyvQ/GsAwKB5R0j86 wCCqSDizlUHAJdZj0xGnLMAkQ1sk2bvrwCVlf8SUlsbAJh9TVc/vQcAvX6PX1yyiwCqE2kxf DCXALqSvrJ9Uz8AgTcRDlUiywCqaL9bP3mnAL2+RBdP1+sAjvSIMjRgOwCkt7l+IUh/ALv4M zDdaRsApO2Z03PF8wCbJPe4VqYjAIWXMxSoaNMAmBFpChK7hwCsDB+CXbYvAJ2Mm41d0g8Ak 0lrUD/RmwCIVBNOMKtvAMLQQx8EmqsAjpXhAbxnSwCLwufLvasDAIfm8iY9WFMAm93pWW8NE wCb6uZU4itbAJ2MAqxus+sAtHJYavommwDIS9DyhnhPAJeqsgiyHPsAwaLsQ+6/qwCYMyPcK S/XAIu/dIIyFxMAjXL2JEnmRwCbW+gPIa/3ALcmVYKJKQMAtV5TkkTS+wCp17kBZNFTAKDIU VknQJ8AoOIb3o/R6wCSokKmFuYzAKrybA9ofq8AkooXJwdkuwCp0A8RZ7+/AKCunHigpJMAk Crfh5CEAwCMBh02e5M7AJlCt3YIpF8An8JSWJR1ZwCQCnKlZNz7AIUybpgZ+V8AgLpd1Ir0X wCLOedTpgXPAL5CY9JUFc8An+JszvDlDwC0fk16k8ozAH2KKJtELwsAsdn1cYnzNwCOuRJ2f nvjAJ2y7reeinsAhIiy2pNJFwCoft8uvZLbAK2wgNmhxPcAsxexv/xIfwDB1jS5kLUzAKbBQ OMuVpMAq/Bc6bbqYwCV/oo6GQyvAJOLPyV77rsApS9PzkoMkwCfsVdK6XorAHZN3pl6s+sAq JK/BEGjBwCgJN4Nz6YjAIjvII+4XwsAp1ceVrYaEwCey3TGxBrLALYb2k/pw1MArrd5VTgeV wDIqQpq1R3fAIXLzrjmM0sAn3wAzOZDGwCNn4qQX46jAJon2PkMH2MAnpP4LXmLwwDF0Ebxt ieTAKESfUISNecAiNe/0EnAPwCek6WKc2YHAKduNgBeANsAxMqADiN0UwCnIiQD009XAI0RW 18PJuMAhtUxBZs2mwC4n8TTNPZrAJ9X6zraHSMAsjnxJqtzBwCtCUGZ2ZQbAKi51lBAY6MAk m/Evvo8owCTv/VFkAWnAJGfNbrMoKcAjDKp2Y2oewB9oME70E8DAI6x28ELH5sAnaL7rOu9x wCtda2YlkOrAJt6wwhxoOMAn5PSZgQouwCmQwvGbPNHAKLgYfkX92cAmM59XRq0dwCS2us1C vajAKUcQLLxsqcAqHBjGlMOGwC3EwdXzL83AKQ3u7GkzXcAypWnY2N7XwCPv/rT7CRrAJ512 zUyyZ8AukZm2zh8kwCsCCknqp8PAKObSMj/tGMApwYK6rzlpwCQ0cmyj687AIpcmbsoeF8Ai zBLA4ZOXwCkrcQLbmMrAJ+N2sam1bMAlpitNW4SowCvzLoodnprAJHqUuEpR5MAjaINSwNbj wCOfAWE2U5rAK45lY8dJ9cAkuMAfsY1ZwCqftarQk5fAIZigZC4NCcAonNys7gjVwCvbILkX 1qHAMkFd+IMlVMAlmhwBr+u2wCrsfq/IqmLAMCZhaI8Ej8Azru6dRrUKwCKk8XdB6XrAJTC6 h4rGnMAmaq0Zsj9BwCRTGur9Wn7AKY3wcEFYGMAl6SqJoPaCwCdT3hgPXebAKUcD0VViXMAy XmUM58tmwCi+3+DPBgnAJYPIAtRdEMAjqNuY33MdwDPaX7VI+vTAKZAnYmqK0cAk9J5BvIFf wC6tMmYaL2bAI5UGBra55MAtm7iKuVWkwC7LKTJRFiHAGXU3OUNcIMAoWQyni1xKwCUUyPHo Y3/AJ1uvQftfW8Am1zbR0XdmwCiGEvlWsO/AL1pZUMU8EAAAAA4AAAABP+71T2guFhoAAAQC AAAAAQAEAAkAAAAFbmFtZXMAAAAQAAAABAAEAAkAAAADbHdpAAQACQAAAARsd2ZpAAQACQAA AAhsb2dfbGlraQAEAAkAAAAHcl9lZmZfaQAAAP4= loo/tests/testthat/_snaps/psislw.md0000644000176200001440000000070215027034070017170 0ustar liggesusers# psislw handles special cases, throws appropriate errors/warnings Code psis <- psislw(x[, 1], wcp = 0.01) Condition Warning in `psislw()`: 'psislw' is deprecated. Use 'psis' instead. See help("Deprecated") Warning: All tail values are the same. Weights are truncated but not smoothed. Warning: Some Pareto k diagnostic values are too high. See help('pareto-k-diagnostic') for details. loo/tests/testthat/_snaps/psis.md0000644000176200001440000132102215074562565016650 0ustar liggesusers# psis results haven't changed WAoAAAACAAQFAAACAwAAAAMTAAAAAgAAAg4AAH0AQALxZnTm3IpAAPXVVCLGeEAB8chWidma QANNHa46pVBAAfd18ibGhkABwU/8DuP7QAIzHCbGZXpABJg2FUXsH0AA/FE6Bwg+QASW1gfW TOdABCyaXQshtEAIclhtw7b2QAEBV0ut1ZU//u7abUAlxEAA0EGQKOqtQAODaxzbQC1AAgV7 WaQ5ekADJO7qejncQANBUGTZDdlAAo/inEfvTEACTrYI7gJ4QAIyOMcBPjhAAyScFVGI/kAD yJv/BIdTQAJKZVpQbGhAAuIEmk+eukACSciTu9VQQAKvr0OrzMBAA6OljeQjBEADmBElOOKG QAHy7cj9JolAAbBNmOc+QUAD3i22V6GqQANqqIm2XZpABT3x1T6OZEACAavFyrv4QAFGTXTp k7tAAZdKysfXB0ACd773y7z+QAQJCj/+j3BAAY2ektBIYEADm2RzSmWQQAQdzEKnJX5AAFXt sjvG5EAGGfUJlXB2QAOJXjCU+GlABB32D5O17kAEUjcO7rwGQATe3FuhXVZAA7eqFapceUAC a5WKCmCGQAQcMZ3aD1xABC2RlpRDDEAC/ISmWhZIQAKxl5S3BQxAAmLMKY2zskACufxltQiM QALtcFwYKuJAAkJ94jhivEACFxuzGU+CQAHka40hJRFAArqrNU2R0EAAdeQycUg6QAEZVeez TgFABCfhC4/Pr0ACc1Ixb4xCQAJIMBU3K+xAAjqw3LGWrEAEJnwPSG8XQAPx2gfySupAAckT W4Mn2EAApaBqk/WmQAEntm5852JAAM2s7jtML0AERBDcnTTGQAUCr+XEa0ZABKUOo+k9oEAC gTpQibp/QANJ/chbw0pAAWauRa3DDEAAnnckqhXXQAFioLjoIOBAA0DXC6MUYkACqn2kxD5P QAL8QGRkqHRABUx1aWsVckAFqlBMl9tiQAGLT0tDd9JABVvGpQIDCUABgh/02TVJQANhnaHF 2W9ABDIu7PxCKEAD/ue/FPdBQANjkX1QkqRAA8a8+h3+bUAEVAcEIXYmQAK3j2rQ1XhAATnb znhK20AB7Vy0g2UqQADm97CAn9tAAXYAHCKwSEAD/cixdX7UQAWPpMrd+8FAAPFO/ufxOkAB mIiGtf7ZQARfw5Rc8G5AAVZP6rAKlUABCz5WNRQIQAU0rFgRTNBAAeHABG6IykABp0GKFie6 QANJjC20iKRAAgeiQWJOGkADFmmdQpjkQAO37vPJcN5AA1SanlZh5UADMVsjVoIrQAL57NT+ vztAABrqMXT/VEAED4+dXCAeQADyBHBA7F5AAhUzdqJRZUAEOwyje38iQAOdXpa1+npAA0J4 IEuBdkABmX91uzW6QAHeOq3pefRAAsNxCYH0BEABvnLwcwIGQAOrrE0rn5VAASqZsjJPwEAB HWT33XW0QAI9spPlsoRAAKKAZDO7gD//1RRvo4XOQAE8/oA8DfJAAvpc1KYYGEAEthnGqsmC QAKWIdnCuXZAAYm3d2YH2UABKlG/dgGYQAH//ov9n7RAAk7GCwv0dUADBdYGv7+MQAHWfHm/ PQZAAxHWhsNDlkAD55ejDb78QAL3H4Lx+QhAA0o+WtySXUACAGTnDgD/QAK23YS/MfBAAS5p Znx1gEABvS8hBW6AQAJTUkad5IpAAe+yjMlYJ0AD48SEsk7kQAOIsS+Kf1xAAwZ7rTJsZEAD C8W8/VnZQAHDIlNBCjBAA40Ws6DlCEAD5ojEy7QAQAa4Fe1FGGVAA4xcqpwwVEAC9eVZPeLg QAO8y3S18HVAAgQWkkOmAEADNJrbFLn0QAMPVm3fL15AAe4o4nM89kABeoOh6z7+QAQvId8f 0cBAAqU7QqWr3kAAvo4gGh04QAUGh9Ec9NRAA5cLmTCXGkABQt5mmYyqQAWxaoAJ+aZAAmoI aqmmFkAD/6sY9aGyQATFG0e2kZVABId/AlonHkADEG9lKon4QAGwVapWhgxAAj4x7FZ2dUAC vyKWYgcrQAJgz4DuqEJABIg8EjgWi0AC7AnRC4AiQAC9guw0xoNAAi6KzdSlGUADOcVSHuMz QASwTvIPoYVAANLvNk9UxEAE2DQWhoVNQAD8pocRo65ABCeBtfT4dkAENFC3LM3sQAUjAWKL dqhAAiWfVvS+20ACUMdidUmGQALEI5uVOT5AAmOBR578IEAE/uUfaqdPQAJfNUKz9iZABOkl L/Sma0AEaFUY9mILQACHXMcrm51AA9CCvVt1LUADyAalXIw2QAMqbO8Y6bNAAMAgwq/UDEAC L3YUdrdmQAGMr8V3FahAAqLBopnDpkACd146Cex+QAKwLMF7X3pAASSUzPU3QkAC4Q+i7Som QAQjXIX+CTtABC8AZ+vE8kABGXlvBa3pQAGw+nwljgRABlX+RYdVvkAA0PwJo1kFP//kLIKP plhAAiVYz0q0A0ACPaWToLImQAUrsy8VTjBAAN0mjs3zkUAAgGGK4tRyQAXP4WIZuxxAADjC 0GfXrUAAfzV2uiAoQAOSrHajD6dAAvAVH+TJHkAEPLFZcSS6QAKDJcaDFt5AAzVuaq6bV0AC TnsmMOzuQAK4oh+yp3JAAiyDOWSNZEACf5izMbPsQAI0PRUwe+ZAAxCJ6RFtjkACCnwkhoYS QAKvzZz0wsRABDjCn2KgZkADUnZyWQiwQAIEsEbKBNZAAhm4dFORC0ACPjcfUg8IQAOh+dJQ 02dABEWuGRdZJkAFfTyeDymyP/+TLJLI9To//yTxxQM5Ej//kAcbuD2zP/989MaOTRlABiTk yHGfCEABhBOLYOwyQAQi35s1xlVAAOnvmy0nXUAE4j+GQazXQAFhWQTAbtJAAdbPAa6lakAE 93WR1NZdQAQf9QMqhNBAAXl7GyTIOEAC8PD5IvYoQAFdnQkgLxBAAap1YndyNkADJUmvtmXV QAKFIojROhRAAvq3HBvD+kADSTALOx7CQAMX+lAr40hAArRbKxD3TEACiSkJCIXCQAKnc/cd I3hAANbEGhREOkAAUTdJU4AwQAGASDEw38JABCbrumU+PUAD0YheJ/ITQALODS1dq6JAAU6i uxcVK0ADNvrHlramQAH1MVH/k/hABDeD+gkV7EAA5tncoWOQQARTctAD8VxABDbY4wdrZEAD PFerWufCQALX7q8bwRpAA0VjGT/CMEAC20yoFifbQADzXbJUaDRAAfHI2OVVC0AFa/7l20Ml QANVcWsByWhAAIlz+T3g6EAEqqDzon8kQAWcph8PWm5ABv0Yv5fhU0AE1O6BZn2UQADl7iIN j+dAAGTkxvjPn0AF2BNSiDgPQAXANoAHiPFABfurhpbVbEADPmHSxf5QQAOuckxocYhAA0sv PK6dREADwEgsV3eKQALsTYIlxnJAAg/v/s8+50ADja8NslhCQAOl/uDWsrZABOWtC+s8GEAC iLjZlv7IQAHtd9wWL7pAA0glfSAbEEADASjH5Hk4QALNo8d0kpZAAXg3C+xIVkAA7SqiFGBw QAEvjFdKtH5AA0toP/lQekACULuVzQ/yQAH0vdMllVZAAxAJLoQs8UAESllqQ3CEQAAjl30g cYpAAtZzv0XvpEACeZwKxShuQANXjBZfsUBAAbqHYVhbdkACE1Ib/wBcQALi00958axAA1q8 rPWB+EACZ/N57v0gQAFYJSdebFpAA78XtaHKGUACCchbmhZeQARUK3tdYbhAAqD1N1vTuEAC lnmFLarWQANyZKnVlnNAAnMVOxq610AC5XyjxY9IQANxhJ2G4VJAAbArNHwLV0ACnHxCI604 QALUaIA4MzpAAtc/J5jPV0ACNZqH/sAnQAJChl1N1yBAAlphuUH0UEAD5E9NY7gTQAOsyw/p sxtAAsAmFCPxLkABECkUnpCrQAPKRSIcmiZAA4N7DrJ2XkACZTyoCpC5QALz3QOQHNpAAQem FVPTn0ACeRNwdFQkQAOsaEvXBhlAAcfRZx6FakAAtrjt8YZDQANnzA3xWxNAAg8KLKcQL0AE JoX7+oUyQAc5RkxgbABAAjlBomuxskABm9bzUdYGQAP33CEhNTJAAUKfEiOIlkACGiW2ooG4 QAIYz4BC71RAA+5LYaGKnEABgVyEtPlMQANSufd13NpAAKgITWCq9kADvj9UZsHkQAFTKb/8 8FRAAzJHCIZiCkADwzaNJoXQQAF6s9DLmfJAA6K/HJ2neEAAsmHi3U9uQATOf0+5MO5ABL8C zQ4B00ADEtcWOF2wQAJvUripUvBAA0Btp0jkKkAAsWH6f6ZaQADB42RrpohAAGc+122m1kAD Z1qFAGoPQAILjpAYQWJAAgfhSb9a40ACRBBrQ+lvQAIIbQ/ECy9AANU7uO1AwEAEYBmZG0LQ QANb/fFrevNABFglZiPpPkAAm1MhKXD8QAEcHNsdHCxAACYzQWJrakADO3sLQX42QAKbK/6m vkxAA7SkQOTRtEABdG3ANIj8QAL100qo3VpAAxPy6L6XokAB2D6IP+a6QAIFvenG0yhAA36e dKrUWEADIx8I4C95QAKRVud37AhAAbQwaYm5t0ADfoDHekh2QADzSi3t9cxAAu1tABWTjkAD DJVApee6QAGwYA6CGspAA/+TgTDFQEADHDqn2roOQAKpZzKqGBJAAr8mwnO+7kABkrcwtLWA QAPW6PriRxpAAJtH1wz1MkAAvXLkVp/qQAKtXAIpwnJAAejXf0TQ+EAB2UE4AIQtQAGsdKzL UKJAALCs3u2JhEAA7rZ6kj2PQAPbjPzz74xAAyWOnWCrDUAEXcOKoxH6QADFuCI+AQJAADuE he8ZM0AGkvCYwsPKP/31uXs8KKRAApNUtsKH6UACj56vPMOXQAJu1Kaj0D9AAq958yco8EAD EAmoXudQQAJmWL/84ZxAAw4nch2mJ0ADaWKTCYouQAHq+lTHcRZABnKf4cgtNEAETMRU0Wg8 QAXyTw/slXpAA0+KswgUiEACYgHhHC+CQAKs3pdADHNAA3yRooTKqkACpFIs5cHMQAO2pTdi 2y9ABLwCrBPLMUAEc++XiOJDQAH65dhCiHRABGdwms2aNUABTushLN3uQAOytTUJKe9AA6pC +RJnUEAEZHzYmGpcQAMKN4nouMhAAuOaeltZgkABNXDMB4/mQAKcCH6RM5xAAsWtfX+eOUAC 4xS9WQfvQAKyFpK85FJAA9LgAqcVDkACZ/QDtOnqQANp/vVYHEVAAIS5lUrAj0AAoxijD/kj QAGiZGm5O/JAA6AuUj5wk0ACAtTNFD6WQAPaZ2dFyItAA9YL2vtsZj/+942/oAbGQAIgRB2Z JrRAAg6OJPZhCEACfvz6zugkQAQ/Sxd2aqpABD4LaEIdqkACab1qnMcDQAKUEUPtuaJAA2Ll zc+VUkAB5DvcjOnXQAWJXkvwvHlABxlLqI2mi0ADwZyWEa/mQATIM/3z2RpAAopdaBn6NkAB dp0/c63jQAJ7VYO7U/pAAsyEAwo1fkADNaPJamssQAH3DMQm4JdAAt15c9iBREABm4wSrDb3 QAICU48p/opABEMLa3bPrUADKpE09KHcQAM7qCpq28xAA1MllSDJ0kABu2zBkQgoQACpATgg HfxAAf77EPoQpEAC6MjAwelNQAPxbfbZSUhAA2MK/VoSs0ACsmBpn8qiQABfQYYR2HRABTAm i5TY3kAFgzwhXTD4QAFmbBhpMQlAAy8Md7BdEEAHijfILP/XQAXgiGbNKXhABWETrFUnA0AC CWh8hxC5QAHMqZGMi8RAATgOV8VCV0AA0P33hVhxQADmqpJPB/lAAaBfXmpxwEACc8oXht5K QAVWkz5WBZ9ABR7B0zcSN0AFcZ9pG78QQAELmiy2jNJAA9GwvFu5bkACQcPXJZ5sQAOlmFUm RYFAAffxtFP08EADcFxV3DapQATsqDhW9ohACAghC0r5SEABkvY11iWJQAaCQs+10v1AAq/3 zBSvEEADe+pYu7DaQAOkHP9qWWxAAmfn/hY2kkADOQPfmmwHQAMYyJvQdGNAAMu7CjvZOkAE ORrJJV2WQABdP7qNX1o//sG7YUR1lkAIfb0+WVMiQATwNm3RhwxABNGyUcs9uUADnbyNrEJc QAFMFTlgBsRAAm+8KzjtG0ADBpX8dtoGQAKDqs7FueRAAincN3oDuEACrDJPPgB1QAPA8djF PylAAb+YLqoFQ0ADibfMqJmUQAHUZiGA+8JABjxEdcMip0ADZv5+9Fw6QAQp3NEBr8hAAXb2 qVoBkkADatb3r+CWQAKoYsbmLYlAAstyjrqHgUAC07I7rIwqQAJYhqhvN1hAA/7GpS9zrUAB 9chtf4FuQARIfT7EFOlAAvvpz+FN1kAErXRq+QiDQAXpRUPPul5AAzGv5sYxB0ABd829cyk4 QANqUmqmaupABgVhD9quskACusW8AkGqQAGlIJnOGClAAdoYjJ7qMUADUwQLhelLQAQJi+HP AihAAHjAA3yNAEAHXkM+advAQAHflWKcsdRABJvZss6VekAENW8i7t/eQARfThcDV/Y//8uv rlbglEAFlhFrjB/yQAF11cJtHghAArgodwdb7EAEXZkq6C86QAPKzi2C0I5AA5uVtyR910AE fo0wJ5SGQAFACPJULzlAAGHdJOvYwkABLjEnRT+2QADxAvFC/NRABMtVRSmy5EAAggbizzqk QADmWSBx0sxAAAYi7iuih0ACuBksG6JwQAD9XyUlYqhAA2DAaXs0hkAC7/q9HIZ+QAJY6vE+ Sz5AAf7md+eq8kAB57P3xokaQAMVShuqJBxAAcHvgRflLEAB/ETZ290RQAMzcA1QgYpAApbd 1X37uEACeRPmGVQJQANvZNgYhCBAAqu3zpsc5kADJv6D//WiQAKbHUhetYlAAa+yHzEH5UAF UXh6RaKuQAMqCTB5AaRAAXuJ4BTaeUAEZ3ehuJ1EQAEMSXYvFdxAAV67sYy0gEACR4owy55Q QAMwT+5br9NAApn1/Wx2AEADDb76aQ8sQANZLBFxPcZAARbQiSud1kAD1m7VL+dAQAQxPAxX oBVAAqulOrBIQkADRDXwXVg0QAGZkqVWqQBAAW3M0cu37kAD6MaoT0g8QARxe7qEJzBAAU8M dAYu2UADTZMvDNbuQAGVNxf4UFpAApwb08OVzEABbOSPDkt4QASQS9D3IGxABGGi6rHmKkAB IY6Y4b8xQAQazCaqIOFAAsExEX4AvkACP7LnITsEQAIZZHYJbFlABQptQnQZxkACSRS3cvgg QALw3ENs32hAAkyttqDF4UAB0ueopjppQAIf1e1AIaJAA2lq3l5zZEACDBsg3BlxQAO/F6O0 IHNAA1fONNsf2kADd3uvq8K7QAGMY5S4g/BAAW0nlDenyUADi7cwV+hZQARYmdxAMyRAAQuX ENqbWUABvlhlW/2fQAN4pUdd/aBAApt1D3bK+EACwRf4q5eaQAErZc//eyNAAdTV0YQNtkAD hmsrb4xtQAFiJ/7bnq5AAaCphSL6BEAC0P7I99DSQAUaknRakXZABRZyxro8jkAGSM5N1Jd5 QAOJKr/5u2xAAkt4YtEktEACnfzIv8L4QAMSYiW4d9ZABF26oEK3LkABk3tzNgzSQAPiLu09 C0RAAlvhStFicEAEoDIZEET4QAIKreGh+upABGjS/i2T4UAETdd2MMVqQAMWxxAiWTNAAeE+ 8Ah06EACSWGxUEBCQAItxfgNIZpAAglOkdLHc0ADN4b7Q12hQARbHFgzlXVAAil/tSVdMkAD ySpre/PVQAPoHBHwY5RAAzEE40gNwkACJkpU7mUAQAOWnSLblgxAArBIOzX/tUADiqFq0J49 QANorcYTS09AA8izYQH1sEADC9DosGtpQAKFyB8168JAAxf3IZg5n0AE89AcEFA9QAMHYwPX y2BAAt9zDYrlVUAB+YFpfP0gQAK4Wy+DCBpAAlyj30izvUACqTBdrVimQAK9zo06ezJAAPGe TE1WsUAGY+dRMdanQAPiti2WYn5AA9pt2NESUkACVHdU19QmQAHF0yz7F65AAirrR/QTdEAD jeU2kcoAQAPoApezE/pAAsLjipZWz0ACfrmBUnTwQALO1nB0UI5AAbfc+Cr3xEAAdpf474Vt QABur1RKnqtABUeJKeoClEACJ+8MuyF8QAGcCp17reNAA6p2ctXNREADqD92PNG6QALlziQh /3hAA+lfR/jyQkADmJ/W98B2QAOQltgj2s1AA2ywzU2OWkABzVs5zF8rQAIrRDV5f1pAA5qB 9GIpCEACG+VIOUpIQAKMn+kFjpRAAlX+jCfyUUADaBBXacynQAEYbbU4sKVABLkKYtCBKUAE g211KxUpQAEeklax6QhABJB5z16EzkACqOmC41T0QAKGdbuT3c5AAvXqQgGXKkACarQGuOrV QAKv9VhtExZAA1BAhKrdhkAB06hrUBdyQAE/f1UaIGlAAqP6k9QxWUABTQnl//e8QASn1Gey itxABDeosEaDTkACIqc6SOS6QASjrkJZS/hAAWVLIDPTG0ADowNu4mgYQAGJqavT0+VABEOH wOYA/kAD7gezNtiQQAQGEAwVh/JAAu8+aPBagEADIb8yF0iYQAORoHf/P3ZAA00yhNcZukAB sthm8Up+QAV3XjJONWdABPsnISHwtkAD4L7L1CFoQAQzwQ0/ybZAAlilhREZKj//uSecl3dc QAL655k5s4FABjBQlQRAFEAD0WhX0fVCQANnlv7+1pxAAoenndZqTEAA4kLHtFE+QAQwJPJU ZfBAATRTLwy3PkAETkSt5jEKQAJbjRP/WPBAAyaHup/2EEADg3/fQUHaQAFpjqL97cRABBh3 k3qzuEAFOUU3bY5GQASWSElp/HJAAcRv21ya7kAFJ1GnzRg0QAM864BbynRAAozaskxWqEAD SGDLrf6gQAF4qpa1vhlAAcXtk2/upEACWjEjEzkGQAfAjtOIWjVAAHccyeGL0EAFuLZDvt5E QAQCI3GZFlBAA1I1MD8rMUADKtpMl3miQAIXvRYWV3xAAvx5XMOAmEADQbFsZqfRQAIBYfFt RxhAAUUXcb2JXEAES+PyqJmWQAM8qU3z8jpAAq2WbAKIgEAA6ULTpD76QAESGbZYTdBAAj9a ZsSAREACiE7KHQrsQALv4crqEkFAAh7CIKGjnEADY++ZS3PoP//0gMCO0a9AANrEp5zqSkAB czKbjMGNQATbg0s0f7FAAqTIxmtyKEACcjKaWt+NQAJyMppa341AA5jKZ1xlTkACuy64mP0C QAHh/j2ad7pAA2SHpdxUhkADnj6vOkP2QAPJwo63KK5AA3TdCpt5Y0ADhBiymkuqQAEViDT8 P8dAAimUZot8lkACclEqrhj2QAOh6QWNeyVAA4hADoslskADlKBDYxEAQAOnUiU5ewJABuPa 6lkVgEABdIu+5J6WQAUOYJ8Z99pAAPHytEg7NUADANzu5sItQAMmBOQyMbRAAeMn4/9BLkAD WAeIcVuuQAOlX963WhVAAMJpPP1TLkAGD3bcuAOGQANsa0lHPBRAAp3nLBcfakAB50Uc2QSQ QAMmpaeM9mBABDVrR0ZHwEABi0b3wNquQAHEz8fjvb9AAgpYc96NIUAD9zgW6PNcQAE+jt/t tTtAApmiq/IpDEACeU+/EfmCQAKbDZYiNT5AAoKZNMmvQEAEh345Ha2sQAUSYlDx2ENAAHLe Vg9y4kACnAH9KEpgQAMRNVvoHARABsz+Ci6BEkAFQrLmbaN0QAEyAYA16mJAA9xGX3ceQEAD wRQPYpThQAGGCugrkwVAAnstBWsuqEACk+JXJwoiQAGGa6pTrQ5AAZPc0WJhjEAENGB6atM5 QAMjb40kBAJAApDZTYmnpEAD/e+C83wNQAOOikexAFZAAnoSV1KIgkACg0MRTNnOQAQRVvhy YGBAA+DAL/w9BkAA0HebtPs4QAHYIwyIoCZAAhuCZSKAxEAB/Fr3uiYTQAGSRAd+bjRABEZ1 s9uWxkAECFDSgFJWQAEww3XHpBhAAk1LjdBDIEAB5UKGU16eQALomqbYWtNAAsYzExd0NkAC JF+zI2cGQAIQYwF79yBAAbu3f3LZUUACcFjmMjgmQAM20X11k9JAAU6DE0Rzn0AAslhE61x0 QAScfMa9AwpAAosRwwx2jEACcwRNhzhbQAKUhd8VIVhAAbJlUqd7MkAC+St2mcb5QAMPn5wx fQtAAd1khCTYlkABs1ubmB4GQANAAST1SNJAA2fouGkQ2EACOoFs0XhSQAI9Wmh1j2hAAahm 3OeP3kAEwgrxw/+/QAXH7mGR5OxAAkujn/D1jkAB7UQNhz3LQAM0pVKsN5VAAP4fjqG65EAE V4bwK7vUQADnNVR56EdAAghvKl/oX0AB385whlY8QALgnkMAAchAAm5Qc1B1zEACazNu3wm3 QAJ/oywmFJNABClpz82UvkAByUYiA+nmQAWjZP31clFAAH8zMmJ6lEAEYBeKTmfkQAak0RcP T61AAXzaOfioGj/+ndChyvNYQAMCeEoU3GRAASPgyvg58UACPae2oAJwQAOYPj7DNoFAAjtv s23wJEAEszCuZTr/QAKvboDkaWpAAe6u0A1/X0ADeeoihOMUQAQOvLUHOUxABD20xhKHqkAF ZntiOwbYQAKOmSCyc2xAAuefMyx1G0AEUM+iFyalQAMSqtvCGa9AAnf/rrujqT/+FrnapWCC P/8cgec/YW5AADoqMJXQW0AAd7kWWu/VQABchczG9zVAAOHkhVjwfkACNtASj1YSP/+vRqvu mqlAAjsAzr0lgkAB80v8SqreQAP8iDwNCBRAATlH3MxhbD/+AYxjkin4P/5egI+WoIZAAQaS kticH0AAtBRu5QY0QAB/O6N5uRxAAC3PSsf8IEABXBvX+4JyQAHkCtSrC3NAAYorC0cYYkAC MECXaE4hQAKvnlMdaXI//j97zXqTrj//m7A6JRKRQAFYLCJv915AANX/EkOlikACacPp92gX QAJtuwuy1DE//fEqdj6DzkAAyzDVzNdBQADpRle2WT5AAKVWU8hxgkAA5MBn+UsvQAEcV4l0 QSBAAEYqnY7H30AATq6w69g0P/9rAZToxARAAY9RwYMgsD/9bcgI5sJSQAK+KyHpbEdAA0/J Zx8AYT//uhOCPb6oQAJR7zMJmeJAAfkGfJfcvEACFBHiKO5NQADqy84ainpAAlKIZ553zkAB TN0reJs6QAHBktTKUuhAAWucIVDA8kABgrCf5fmIQAHbutHrLYdAAYFwQLf5EkABuytZlq5/ QADIsEfIPqBAAEpi5QrHWEAAqfzXagocQACySiWPT5BAANGi8I/dtkABePICXW3GP/9PmdER vKg//7BFRxuspEABOn6UBOeBQABZf3kozIxAAKBgUFm1rUAAZwUs0qlgQAD6BpQn1YVAASJV cJQTN0AALxMiLITEQAAXdkn01hg//2pI+k2/aT/8+cTEmT/0QAImEXxo02BAAV0oyeRKpEAB LAueGek/QAAeV4cxeUpAATDcKACohD//pD7qy7BYP/51TSGWenw//yCbPM2JXkABSO5xoNOB QAEFxWBo+thAAYZn0odgskACV26CrkyEQADB9xAFjEtAARzG0DLkh0ABs9Ikij3KQABVwLyc 4dJAAbBQXa+ZQUACjkTmHdDrQAOrIYM49X1AAOxuwzS7YkAALuvfPqPkQAEkDaEdS6ZAAMpn A3Mdez/+yROdffNEQADufWozolVAADGCiGqGu0AAjWhIsDc3QAEiAg/NRrRAAfpBQV+BUj// z6TimdrIQAEJN82T9ABAAgcS+SuLRkAA1B5DwZ2SQACm6OP1pWBAAZSTicvBbkABw1C84HLo QAFtWo+6q+VAAdxewft4Ej/++nSNvj9bQAEZR1PJZ2hAAU6Z82JKzUACC6Uk8XksQACt+jsA 9jFAALWnWFQEJT/8kTgN3EaEQAHSJ6TnV4M//tWLxEspcEAAQ/lDcN1FQAG7bqMWXAhAAhlm ZDsm+EABku/AJ5RgQAE1cP0W2rlAAScz+zgi30AAa38bn7G6QADXaVwmO5xAAMY+m168zUAA Fkm1U1YVQAAucDUByQ5AANbVPqcgIj//WaeU+rrWP/yY/zJVLYQ//uvr3+E+UUABpCGUZK0a QABXYc+L5VNAAgS7ORhgfUAAOqqIB3VQP/+cY/zqPQFAAJL+8czHMkABSZRI16X7QAE2Wyth 984//4WLqJHOFEABNT8r2sSMQAJcdwCuD9FAAZbXPYnNSUABojOCiHKSP//GI81Di6ZAAPUW 4i3sk0ABJOS+tbZIQAH0ilNmvNVAApMKmCd47kABFbigymsNQABl84LKAehAAiar8fCsrkAB aLJCxutqQAHdyyfmmfQ//dySuFV/vkABwpdZPJEpQAB+9IB3LqBAAKs51eD6LUAC3y+BMqoA QAA/RsLmNgdAAcPaShh3KD/+c/A0qBo5QAH3lAodV95AAOHRmH7likABDr+2FqDEQACUBt7X fopAAPD4KTMfEkACJKzdkmorP//Q0zkDmC5AAs3xAlpCtkABsFwMXLXxP/6xLj92LyxAAAlL x+dGJj//UTp4HQKGQAHqM9hK11JAAT0MMk6BXUACfEH4y5qjQAAfq+/JrWdAAFw+IrL1+kAC VkyCFy4iQAFeT2v8J7hAAOwD6RE9gkABSG3toKHMQAHWea9m5/E//y9vJOl+ZUABNBc0gFan QAFq/jjgObRAAwMV86yCNj/+WxEWFj5ZQAI+EShABUw//qP1YSUUWUACfmv1sfasQALi1qUs qqZAAyqdDVwsfT//+RFEH3juQAAfxvzc8XBAAP06HY0VPz//7mgiC91YP//DW5yG6sBAAceZ W5v0wEABDtNp08DKQAC8aof9AK9AAAtxqKxy+UAC9k0Qkz/FQALbmsFR+FdAAeaZFEt78D/+ l/dSBhrwQABrr9ud8rxAAHCFOQyHQ0AAnNnp84m1QAKL7QROxYtAArVODorsakAAmJoSRpd+ QAGFkVA0u5tAAI8xgfQlDEABYxzw/vj6QACdCMw4iGxAARU16w/TTEAC8jxsbSUeQACCMCaC 59s//4Wpgle4zEABZBgnI54MQAFk7LY4FyVAAZVRLEYXVj//MStsziSWP/6NtXMzCxJAA58n 4LTF9T/+HbNqxf8JP/7/L5Sxrm1AAeTi9e7re0ABLm7PxRi4QAFoWJSuGz5AAJkefwmRZEAB iL/yXodjQADKl4gRDwxAAGCevVLCckAAQDmQ5xCMQADn3lpfgDxAADg5IMhZX0ABTezqTOX2 QAEU50fOFldAAMGlJr0JHkABuahPjSYaQAEHMqgydmhAABtai9MB9j//8C9Yw63jQADDnIQC OedAARr05tehNUACDjvLBjRkQANI+711UdY//n6Id392Bj//JuaLznVqP/+qXEVzhN8//Wmn pOb9OEAD6Br/k2+3P//VlCE9JcdAAZxmBCGp0j//dfv51uYkQAJY0W4gfEU//0AOdBnJFkAA bMoPir72QAGkbB318SxAAP2ua4MQ6D/+4rA7zQP1QAIepov4IPxAAFkzs9eaUz/+7IymU0uP QAGw7X5cwURAAJDdR2zndEAA5IP6UbdhQAB6lNKSUAI//3hn3GzC1EAAZLTgxjZAQAF9iQn6 AaRAAJZztPpWaT/+mh4O8JeOP/2yJgZQQ+JAAF8+DsmVp0AB+lpbFe1mQAJVBmsAI4hAAWS+ 4/y33EAAbYYY+gd4QAGBZYnkc0tAAAdkDJ31TUACO8AhAKcQP/8LSPSAGfVAArg2on61VUAC qhjRDw+vQAHjVeu2wXQ//yB531zUMEAAfi/Om3/aQADhVPMpZZJAADSYdYJrckAA5/1aVzcW QAGxlvpIdT1AATxuoKnCUT/+7an5e3DpQAIQn/2TG/hAAmu8/zo0iEADuCbArYbeQAH37XDg KYpAAA2eGBlCIj//WkbxnOEWQAJ6HezcttFAAkCZayf24kADibeMQu3SQAH+P9UQS3JAAib0 na5DVEAB/ug7reT2QADf91QDR0ZAAUFDECsIekAAM/GbmJO6QAEaE9PyHxZAASsNBDwjhEAB Jh425zwmQADYKiJqU1JAABswtZthh0AA1k/g7hl0QADeXX91Xp5AAZA6cZYRhEAAfG/aslfy QACYq5ye3tlAAN91cZB3ekABcaZYML2oQADcZwj7ZxpAATncksg9DEAAMAwaFDRCQAJ15xx6 p1dAAF7IU/dB+0ACKIbSvAxzQAGH2RtvjIFAAYwqXabpcz//vT9tXcpUQAAQR6UTlv5AAd9J b8zSzj//1UhU/rhLQAGekkPHlrtAAEI4ubUNaUACGiDu4RWmQABfNk9tMLJAAj0bzRoTGkAB CG8ejAnZQABocGC7SrVAACqbDMidd0ABtXhlmhY7QADgCsW0zWFAAQcpWvIzS0AAKiVA9fyt QAGhHfjUDc5AAiPOW5gx80ACHm4eGXswQAHTik5Zfr0//2HWQ1p3IkABTr18dflqQAEF3EgE btpAAVCoRhzoykAA13y+gK/xP/4q+WHhh5lAAdqTeGtJIUADgAXxP8k5QAG3rLSwiTVAAdA0 /IVQ0T/+y185edXdQAC8CkSjErNAAmAtcU+7tD/+Cv3PdDE+P/yzhf4LpYZAAIwjwXQkFEAB YiMx21F2QAC9UFyTLc1AAk48RZCrNkABIU1pFLIAP//27PHAQ6JAAV6MdxgYikAAOany4IBH QABGt3ZbnyA//xp3Yx3gCkAA3BIlmYGSQABtv5HD359AADTiw25Z/j/9q+i4Ebw2QAI5zewS S0w//rDEz7CUNkABhfjmHF4cQAF1z/OIGEk//zISGAMKr0ABL7ayV2UiP/7pYBvxnBRAAZsm y0LunEAAyrap4RGaQALYF6D54qhAATUAfiPc7EABQ5ELKfomP/+C+ui9pTw//6V+iHtKskAA PXvoJzRfQAJvvisBmog//ngYqtc81EAABA9CO3goQAGTaEoxcWdAAEojNuDrLD//ZRIGLri+ QAHpSZffG0JAASuLSmHlPEACwTfhtdMgP/1k2rVQUhw//Vmr0HRm0z/80BaKU7YVQADn4xzO I2VAAO6KFIHEdEACNbcyvfMoP/+A2hWymGVAAQ44iuVyMUAAQTobw8ONQABRvwgEhk1AAS2a tRKxdEAAjdujALcAQADEXyPbrAhAANf1z2F5/UAAm0Zm75oJQAJiDzL2hTBAAKA2nH+lQEAB y9UG88xRQABu8DRiRmBAAB3FnYO0U0AAbzbs/VPEP//nV+xT7mY///1Nw0dwzkABaDLquk+G P/8flQ2qDu9AAlfqmL2x10AADJFvZ820QABbVe6EqqpAAXJYFWTz4j//ULLiIEFfQAAI7pvC otQ//7DPUut3FkAABb+8E03qP/+bAyxQhpVAAXn8tXg2KEAA+QO2lo0oQAG3rxHd/yQ//zhE /qQzvj//MU7eJR2cQAQx05kraNs//VnUK6B+gEACBU3FU1qUQAFR/m+LsThAAP9Xyi2aZ0AA vsnjtD6NQAFMH4lkCRpAAFFnJeoCy0AAbKOfR8fqQAGv41eYr69AANhi586oQEAEjpfL4x8y QALKrZpOGMJAA15PK0YypEAAjZGwI6DMQACF+kmHVLM//4N3g9eYjj/+rrtDOcPGQAC0upvR IJZAALVd7LWrGkACc9QVWS+fQAFB8/jZ+lZAANb6KOkBgkABCFoyJ/3WQACn7bktqyBAArsq 29STOUADGrYGcIV4QAHd3Li1wFJAAlM1fz2KUEABrTp6BUX0QAA1+pG/P55AAR2ReZA4i0AB 3xj9fAJNQAID7LjkUNc//9GzYB+FrkABCGungOA0QAB0ji9Aa7xAAdry3zzzpEAAaVbcscg+ QACoRwlFrEBAAUJY7klHZUAATWUDwhSsQAHMs4lv/V5AAMlH2bZAA0ACKiapoH9LP/2RRbxt 1yw//y86o9ENRkAAY9NxHBo8QABzQVrv+dtAAf0R3vFIFUACGqnSUbzeQAEh79s++xJAAP8P np/ITkABK08p1EjiQAAjHgrw2z9AAeYi96EwO0ACsnC91iCGQALEUY45E2FAAWJYAmBk70AB dq7W+39GP/ypEt97LVg//xRFBmrB8UAAVMm6HuexQACqnRe4brFAAMTxYnEmIkAB7ua8Y8fT QAFeFcIXtSRAApCkBuByqUAAfrTTT0WuP/8ZojHNbzRAAK6HrD2/MkAAlNMYWJA6QACvjWRK Wp4//zZYxaGB20ABxmv9vMxHQAAVgTTcYy1AAl5QEpbFVkABDNU+nNKUQAEpfoYyNno//6zg YSnZxEACx3ig7nubQAMlJXZCQZc//rx9S+Beh0AB4YIXCTIbQAEqydUE2tlAAWiUE/mfNEAB iMorcOCYQAHmIZn4sMxAAcmZlj19+EAAnDYrd3tEP/+Q+D67LfBAACBHzHq0ND//ZCVb9ZBl QADCafA29/pAAW9qg3RmAEABhggrnJryQAGkp79pZvRAAHg2V9/XhkABXN8jkdsgP/8HDOoX yyQ//5D8kIgvB0ABd5qTPNN1QAAxeitNQ89AAj+urfnREkADMENObo8yQAGeHCvg6ERAAxW4 y92FSUAA78vBKWMAQAKc82yLllJAAMxUTfjVKkAAs5DhFhaWQAGoftk9zYBAAaXsd4pPQT/+ vi02YBOUQAHxP264G4k//q1yji0rxT/+xjOdZSGaQASQ94hAzZhAAtSlZ+zfTkAB6HI3rKT0 P//AR0uqAVxAACu6fqRudUAAtW1p3edYQADtVgsiduBAAP7pYi5dW0ABAGl2w9okQAH8ZjY1 CsRAADpjaXOFPEAAyVjkkMBhQAEQ7GBgJ/pAATFWHQ+cSkACIITj2nKXQAE4oX8AiLlAAZOH LmzmjD//Jp00h7McQAGUQNLciXJAALQjbt1ikEAAxZDe7VnSQAHHPwqi0yJAAQ0eaOaj+EAC G/uDa92iQAD5Mx8O92xAASWWaF8Qc0AAlFj/1mFyQACD/bZS3yFAAeYvSswGRkABOFoT8qG0 QAChVyjOEpVAATRYo+HPjkAChQ8FpgUEQADpwHxf7qxAAJs1DvBjN0AA6XXcdmcbQAATEnBO 9M5AA5QPgvIf2EAAvhvb3LGMQARXuxkTRvRAAEpnH8dvFkABgI8iLOB+QAEXssTScQpAApV4 zZRCCD/9cggrh78OQANuRZqMZbVAAHLdowAj0UAAUMZ9iHu0QAF2PQhn/81AAQLjhi/6tkAB rrgGCV3YQAHbFMbRxWFAAKg5asUqRD/+43q4d6j4P//qGsCX4LY//8GFg7f1bEACTh5D1DHc P/9GyrYBqOpAAFhE2U23CD//aCaRA6RsQABCLh3deno//dSKE9j+ukAB3KuMMUGQQAIEM0Zq OsRAAXQxMJqfREABEMlv8zVCQACcod4XrtpAAUQhdtbbq0AACUEdx7rZP/6ghr4Tiv5AAfDP aRZq9EABfr0cBhoFQAIGezedYcpAAv64gvylkUABtXZ6QPBkQAAzaalsDItAAeWRRtERRkAB KEdeXG4iQAHUH0cIMi4//2yypyOmMz//yb0daYR6QAJJE5wWJVBAAKL0pcKJN0ABASPIsib0 QADR/LC7ZyRAAHlgawqKFEABA+1W25hUQAE12Fv31c5AAQ9BOokwcD//W5IWzc7nQAH6QEu4 m6lAAoCcCAUSyEABKOM9POnjQAD4u2hpjm1AANmYomnJ2kAAZ1qPfIyLQAFqLLgDRWRAAzYb ufHocj/98Fu9KEDfQAGomNynwvlAABUE90XbPEAADOgRB0SSP/7P2L22TDxAAlY8UjK6okAC J7edxx7RP/90NjbIDRRAAdOsfBPgJEAApEhTQVxgQAB3Qz03iuVAAWgm3QwV3kAB8pzi4mPw QADxIt5BkTRAAGjPjZCrdkAAMMeGk264QACJlvhIprZAARu0EgqbPEABCqDRaTaAP/2omf0j jklAAYjpdDOVCkAAKSfWfV8tQAHXw0f/IJNAAG5QGJBX4kABRNMesOzcQAFbIIqipVxAAoLS VZaIOz/+2OWi9OztQACqg8+/UShAAFKestWM8EABCN2iYaeNQAEiUmV135Y//dbssdrfRUAA 71YI1u2xP//vY/h8cc9AAC5p8Y4EyD//YyO7pCrJQAEBGkYXVshAAOUpGJrmGEAA5Ymm/oh0 QAF0Yq7f4jFAAqIaJhW9vz/+cy0XbWk1QAF+TUbxGuNAAJ2nNTPmoEAAvsiRj863QAB2Z6lX tj9AAVz2mNifSkABP5zzz91cQAFDmNKGBGZAAI3tWvL3KkADVuK4kfaoQAAiXtSQkpVAAnHG ebXnrj//5E/7Zk6SQABbx66NWcJAAQY+B1oq7kABAOiFZYrAQAG7NxBH8BFAASaz5QkHfkAA q+4TSS46QAG1E5H5G7FAAYQOJmyEjEAA8JG6bK9iQAB+HN8CQLBAAqzWdavdBz/+vSHVdGZ+ P/+FZyTvrwg//ztQMkdlAUAC6l9wRdctQAIVEs5TRsRAAa0/KR2N3kACFZ1exEXaQAIaGdqT O8pAAbfOE1DajkAB6EBO7TLWQABDB8Ipu+xAAOngUv1jjUAAwipgG0YgQADGPnjnCKFAAN++ rVdKFkAAbzC6nKt6QAJEwyK4QEhAAx/ZZTcG0UADEN9kY1BoP/5Owk7pal5AAHRahdjmSj/+ MGI4iK1OQAFDtD9I1ZxAAU5enziRtEABaF4udphxQADbAUA0U71AAReawqM3GkABP04yI25Q QABcVxQccwRAAAQJF/dvUkABnshKrlg6QAFQh4bdqexAADD1z/uRVkABU0f7fhSgQAGpUBYZ 13dAACSUnSkI8kABB/rGpXGQQAIrt8Zr5CpAAQtexlBmPEAApeIUhSVnQACQWN82qLJAAFsj 5K+iqEABJyaiiDxfQADuX+pnqpVAAWTp5Rx+6UAAdDzI5EAYQAFxCKdxeyRAAS37m84tDEAC PYixNDcUQAJMdA1E0+g//1fd+pcJJEACiZwx1vV7QAEgo6ne58JAAOWS4TpB8EAALAmKXU6T QAE1Wy5lz+VAAGgaOiNkb0AA1+1lIHSGQAAMfqlRq1Q///ZnsIzRmEABPmrt9QvUP/+cxbVJ fE1AAXfq4v2EzUABF2JxFlmmQACHBm4gbfJAAOEtsiJAE0ABB8eJWy/uQAEX0SFf/k5AAK7o 1F83KEABn/VfNDNUQAE6sOX6ephAAaadIHimoEACAyGkFFw0QAIPRrv+58lAAIdaYedlyEAA YXeGEo0MP//ZykC7CdBABBSKoUZVQEADZhdqxkoFQAKfgmR2W4FAAqS69DtFd0ABkprXDv6K P/8GnidtuORAAiNqkAJl+EAAoj7u4C04QAPWR8eH5UpAA0JzPehJMkACEPcJeRFSQABqKeJy mhRAAmP1bll6Wz//+AVLk3d/QAM8KjvjhiFAAP+OiSeupEAAI171uwD4QAGk3Axr/iBAAApr M1d2/kABlvp8Sxp6QAIvRef1fVRAAtFDaTuLs0ABBVmnm7n5QAFeownxlUZAAW+piXkxNkAA 5XzfjC30QAEg7LGVWJJAAN8wzuvazEAAdQQQf4VIQACw0a3TOPRAA8ZvfmaTYT//5x4cWSV9 QAHjv1+X1QNAAS/3FY6t3kAAeC4e5pvyQACx6oPxlhFAAEiBlDzUBEABIF5zhzCEQAD4j2vW FZxAAG02b9LFKUAAWrXE/eM8QAC3YrjGIoM//1/A7bRnukABon8oR9LFP/9MapP6uKpAADFy rZowwkABcm/wEsPhQACQQop+ACRAAQKElnDsXUAAUdkOoBVHQAHVodPL/pw//9i1k+ETwj// /Kxf62nGP/9p/teUn4hAAvp2AXZpckABzBuIEfCoP/8vx8Ml19o//y/HwyXX2kACl+7c9t2n QAEEvY1oREJAAC8HQJsz7kABhOSl/EX2QAG50XiCA0tAAk+duntLOkAA/ELI6+RfQAGFCvfO 7iw//uxiQtrTskAANzk4bmm5QACFWziagRlAABmPowGiikACZ8+xLru4QAFpcVaw3IhAAg04 eujrWEADduWxL+LbQABqWS/78zVAAckPz5+gJkAAjkv7x0OUQADDVaYQSSVAAOaFsDVFO0AA QvPhd9YMQAAn8hIcROpAApps/m5fpkAAYB5t96usQAHVwFFTJpRAACZmDaarLD//2IXVQFyQ QADpEz3YEPtAATL46ShSsUAB5K4T7BPjQAAE8mkQ/CJAAG4EZEUjET/+ox8BgmdXQAJaoiYh qkQ//u9qtIsWSkABd29CkeVRQADjFTfymIxAAF/tD23uOEABpHs0IovoQAHAqx7P+UBAAmXg Ov9BDD/+nblU1Z+kQAFT3RbeQ8BAAFULzcJJmEABupTjMop2QAJBvZcL6vpAADfdtx6sXkAC E1qXPLlsQAIJwquv4v5AADviDTFJ8EAAd8bwucpwQADue2RwtnZAAHF6C1+uRD/9xWsgtrOs QABXQh+H6FJAAUpgrVkX+0AAsZ2y1tk4QAFhgRoIRXJAAVnCYLFLrEAA29nLmvyoQAELtk4Y ZCxAAqdlFNGhmUAC7kLpFptHP//fFybW5Ww///hrjD2tLUAAVHvRMSC4P//Wo34+K4Q//6zw dvkR00ACPpqLM7xsQAIx+UAfS/0//eqVTHf/LEAAhiUbZAErQAFJe0He21lAAJAL91fP6EAA knU2JusZQACeZLhr2LZAADgY8qZnHEAAYZnS3sIlQABYa94yVbJAAwePz7a0Xj//i/laFVua P/+LCu9jeUZAAdr3Wg6HAD//r8QEMgT6QAGth5ck87JAAB3HD48Wp0AAweuH1NgAQACUbTho 1OZAAPxRk4Q3TEAAIhIialgqQABOhBAL92JAAI8znIiegUAA+PCk+TVwQACuS18Sd5lAARHC ET5KmkAA5NHIZ/coQAGCmLo2231AAkNlK/uLmEABmh2WtjV4QAEJotDbJelAAN0x4+2F8D// TXWPCe/2QAHOqOsmqQBAALONu/+/g0ABFeeFXL14P//ZibWUw5ZAAJ3Ur2BXBEAATWRG/mSE QADHLS1YeM5AAc/2ZX0/mEAABhSEG+AHQACNHTItk1pAAwwntHKuTT//t9jQHdCUQAD0pgrd 5bhAAodSQPQ24EAA9iHvp2WUP/4FppgIVKVAARRTVvGNs0AAiEoT5mycQAFUHxeucNRAAFnu 7FA95kABWBlkDqlAQAC+QK+PihRAAJ+XCGZ4OkAA7raDVI+9QAGlwODuZrRAAcV0+h1B/EAC HDrtXtLMQALmkP9HKsRAAGftRI3GxkABBXMd6mmWQAF+keA68sZAAddU3irMmUACvcHragQ+ QABfg5WrDchAAWHHJWfeFEACmK9LVq9+QAHp3KPBMWJAAZ7vVJHb9EAB+bwChsTPQAQ2GP0t vJdAAEsn5BU2NkAEjgiiekRRQAOiUQj+vTBAB6NrsKzskEAA9z1vNA/oP/5LKoGFmTJAACks 52TCtEADU5c3xxIDQAHXpVfbCktAApnE9E+JiEACh7J2PE74QAJ1X59+iXRAAeYVz1vRbEAB oLsJNUtqQAKeoEE9K49AA1ZMacX9kkAB5mmSBkFxQAKjJytb1XJAAfPOuunvSEACUEQks5ZC QAODFxSaiHpAA4ACqIEfTEAA9ozLIu/oQAFB8MO1xlhAA+fpjdtjw0ADR2upS4xmQAUPjXk/ OxVAAYAKvdwryEAA1cupOEsIQAErkemQYTJAAkoxCVbGDEADzmR/80a2QAFaGq3yZqVAAz4J E+crdUADsGP4cpGaP//oypVrMBVABqmLE+xV5UADCpaABBD+QAOJDZOLBO1AA+xuWpXW4kAE VA8U724CQALycqqunclAAov1G10bZEAC4qJmWLfdQALSTZUhyD5AAzpirNFoDEAC/JOrSjOQ QAJxpFHAkTxAAYadehJAZ0AB+naiLE00QAISOeN3vhBAAa00AuWa+kABae4uVMmMQAIyqMdz V+lAACbiySOBtkAAtvc1ZBoWQAQAeQPaGxJAAbn4jnPzIEACMpq6zba5QAI5PPqwPk9AA4BB 7Z/VnkADThe86yJKQAGKBtA+DHxAAHDtPVNf1kAA2L7Q4KknQAAJnFKDfItABHEcuOZv5kAF AfjymCvKQASLCdacYBNAAl7kBEHkskADaB3ERzgcQAECnOqzLAQ///fJEzKZlEAAy+qs2H/G QAMWIrA3iepAAkIvMW05ZUACo1jc5/4gQAV+/BmvvglABPT83CpttkABXVKJaJ0sQAT5QM8w pvJAAQZuRp0Y2kADVR+dshWFQAQGPhzaH+1AA9sAGUWxl0ADNIrZfcRnQAOPuzzieUhABIXW /fXikEACckf9DAUkQACs831VO7RAAXbjRYwGGkAAf/Apw/LmQAET78jGTQZABC4D4sdsBEAF u1H3JO7UQACrMRfzytJAAU3RjjZu0kAEFJE0A8eUQAEbbhYXqHRAANEb/1litkAE6IvslNDQ QAHFR+AYrydAAYM+ixMIcEADC0EotlUIQAGNB11VXxRAArxldYdXgEADb+OsC9dWQANLB9Xj wrdAA2uI1EaC3kAC6BcjFYhqP/84pPoWMXFABE6XSTAfNEAAkG/iREAeQAGMHfBpBdZABFfs ErWgXUADFMnZBNTZQALTEZnAirBAAV7e9U9nS0ABj201pcTvQAI2kA66tS1AAXm6yR0DT0AD PjhovNdpQACm2AA5EiBAAKmSrbpK6UAB1mHI3D2bQAB6mvxIRs4//3N7ZWMzP0ABAD97efCG QAJzV5vnVJVABKcd43nrlkACelKcUC30QAFxbYxU2OpAAR7iTzEzTkABaSYEBBy8QAJCgHEG GHxAArJoVPnNx0ABTKFQCM0MQALB9s27sl1AA331u0Wx2EAC3qG6Rl6pQAMPOB+5ZVpAAbJg jOEI8UACQF0Hg+66QAEORJvRPjxAAdrc5i0E9UACdd405LqxQAGYtjBx8hBAA6OF/cPu10AD S2KT/pw4QAKjqSN/AzpAAsx7/s0zcUABXGGLKuW6QANF4w+adFNAA0yc2u2SzkAG7fTa+JbV QAM8WlrwLVNAAhmsTn3ZcEADkf9nMJrSQAErdiWkCA5AAw/rmq1+HkADMZTPq4tcQAGrP71U 5WxAAQw6Le/klkAEBCJTlYH+QAJuAkiCBNJAAD/SqNWOBkAFPIOmXR9GQAOX4qJD5PZAAO6m iUk4T0AF6Ngo1tviQAIf9ZRt69pAA0gISiuqnkAEcHeaEk0GQAQtsSCF8MJAAw54bK3UKEAB EGcpaQyyQAI+pkFEZz1AAhqcwo9xgkAB5J9Kd8hiQAR6+jUlgHFAArEqKTFIa0AAUAzAGq8L QAHEBTyjdglAArp9DTN9CUAEXgg5L+gmQACFkWSV33JABH2tuzIfeEAA6f6FSX5aQAOBCvTS YfhABDUMNgbqaUAFMejcwVuwQAH9F+q0rSxAAg8yUvoZIkACPT6riKVeQAH9dWj8+Y9ABMYO yaoJGkACGQomUWSkQAXD2mg0TQpABNTmC8zEW0AAMAtrYKtVQAOzGoLDS1lAA5fd+c0S+EAC 8EtoSqC0QABJP9KrxPBAAaZFiSvFHkABO4LJ+xH8QAIrPI3hjiJAAm1dFgmYLkACp+ZtM6dC QAEXpzh1uHNAAnpJAA4e5UADhKkDB+1hQAPUHmbiGHJAAQroVVKnEEABplf3N7BoQAZzNRvk e6ZAAJLs7rM9hz//lQCk/EvdQAHsUMRwaJxAAgcKaExHLEAFUvUgwp4+QABam9ne1Rc//+U6 WGNKV0AF/VvBrHPNP/8jGc2nVx9AAB0GLAF7xkADfYxlB/KgQAJpAQ2y5aFABD7kDbOG8kAC BXCHnaP4QALiwD/ueEZAAh4iIwBS90ACQMn+rqcUQAH1yoC6W4xAAgHwZuTNp0ACaQZYs1XY QAIvU7HIen9AAcyxvy+Y1EACOMN9p58kQAPnE6Ps6RpAAxIjtn2gkEABoixLSB8LQAHNDHO5 1txAAZO2Gi7jwEADlgpY25s8QAQk5RKsJh5ABXhEcjZKLD//I3m8Gvy5P/7ol9UDUDw//1Xr /MXOCj/+fYi74SDmQAYTg4Gb2FRAARfJHNvK8EADu/yhTt3bQACJZ4NmtwBABRj3T433BkAA lSMbDZnCQAFpB3XHiBBABK25cuOtNUAD5ApItwuCQAC4fB6vuaNAAsN8b1eLPEABQ5K5t7Mt QAFtMV7DGK5AAtEJ2NIswkACdEPs12fHQAL+wd7yv2dAAy7g1L4vDkADZGEq+Di+QALPI212 Z05AAl7VTcQwVkACDYPn5CdXQAAqgVqu3l4//0HnLHnPSEABfziSiFHmQAQNLnpstDJAA5uj EtZhaUACnI+EOMyrQADR1O3Bs+BAA2SPwV9DQEABALMYap3iQARaaB1LErhAACuQX9vlKkAD zqgxpJKiQAObdMSWE3xAAoF6C+lgcUACsSRT4FOeQALIK0Tx4KNAAiz0Rl7blEAAmwt7/YTK QAF4kJqj1WxABcynNZLeYEADWItyb2iyQAAbIRA6rOpABIZyOeG7W0AFXt2uL+v2QAY4f5rH gztABMJ1jWOXNkAAhWWgLeb4QAAkKlnLtrJABOSDJxVW/EAEtHqxzz93QAVrSVuYH8lAAoKh 9DhhNEADJ8EfWsjqQAKilA79x+5AA9qVBOYXIkACp6kTuE1bQAGHeUcOo9ZAAws64CGQbUAD caIKSVkEQAU3Ke3p20FAAkaNYq9HakABh/J0NXeMQANNaNQNykVAAwcVcLMK6kACfXaHQG0I QAEb0wiXPzpAALO9gJwO2EAA6IjH6WCtQALtSA4NEEpAAhK7hESxJEABwJeArjoiQAJj8hBR 2vxABGnEXGbdI0AAE3V8wf7qQAJuhqV3Vo9AAgWH9oq/0kADBDExOwBkQAEt+1ItvY5AAap4 BbvmIEACndmwo+HBQANHOT5uEBxAAfQXS3Fl3UAA5mVwG/zsQAORIe/vwlJAASp5T7nPOkAD 5bOtw8dkQAJBkcbBQMNAAkVY/Z4mokADMA3N2gIiQAIfBuezqgpAAl6H85Ppr0ADK4kIp4qb QAFIlb9G3fhAAi09wZ2SMkACeK0PKutKQAJ7dr8Xog1AAfjOSeReDkACNA6DSiuEQAHoleIH IyhAA9QbFBYBxkADK6k3msaUQAKSh4ox54pAAFTWgGjsykADsxNaS0kpQANsW5GCnfxAAgQB eBQkO0AC5gXfl5JOQACSQkKV4T5AAhWYLUYXcEADcPu3YtlIQAEe7JTtnMtAAAXNedeesUAD HWvNBQ0aQAGpu/jvdk9ABFyPLPXqv0AH3vub//OjQAIFXe8Fs4ZAAZ/lsQ8WpEADUvJYgAci QAEWsOSK+rBAAaMNRtYyAkAByTIqePe0QAOOOYeeCfhAASvqFEBU9EAC5t0uVjFaQAAuF+ze pMVAA45GReUcmUABBkOYMKoiQALGVuVnfEVAAu6ytHC4R0AAt+rXYKOhQAPlHO8Ul5VAAG4T Q2fd1UAEVuku2+gFQAQRlnvl8D5AAu/HO2IzfEACN0dTyfBAQALAnOjQISZAADEOUAarVEAA URNkhVyJQAAyLcyV0oRAA0LPDE6/dkABXdA+6QlDQAFr9hX6xxBAAg87W4gAw0ABXjNNlYKo QAA3dNAyg/ZABJEPAzXD30ADFyV3a+9GQAQcXAW4N7dAABfR65dwzUAAvyfzqH/7P/+PATHd AphAAtnCl7N4s0ACMo0MPMFCQAOQyWs9+StAASbPQ71lpEAC0YWDng5KQALYNa1cjw9AATFw zKSNyUABsSH4LV0yQAM8r9wR6NxAAmtH72HM4kACDGNXQoP0QAE+RhjPdLpAAySNqqNhZEAA qL8Oi15yQAKwJ3VWo5NAAsacmxnlekAB2u5UGcpyQARHws3nZghAA6HqZvTyjUACm6sxk/ji QAI5u5DD/rJAAPkSEur0x0ADe1wYly2cQAB0TcTuKs5AAHHRaQ90S0ACGv0k99rcQAHhi0Zi +wpAAaEfeY1S2UABFHhK0ww0QABYnFjtlQlAAHA+p52uA0AD1ePAAnRgQALpcGCkI4JABFyz 5rPz/0AAQDV1hegqP/+nD8WwvSxABtUfURDWYz/9WkCKuQZnQAJw4wJA6Z5AAlRoexCujUAC HgK2xwgcQAJwbuXJr0BAAvsJOSjd9UACULxX654eQAJF3rS8HXhAA132cHHy9kAB43OJ6HwV QAZjS0Uvs+9AA8hc32AzrUAGK5uUgmZRQAMiMyI/3BBAAhuOc+Z2tEACUKOVMgD0QANzTDe2 P8xAAfT2PUIaGEADD0A9WlOSQATNYveyLN5ABACGZLdV9kABn5JDvmt2QAQf/oXkY5RAAQhq 6O858EADecnYp7A7QANnVsmPP6RAA6mxOe1ulkACqzmCiZHrQAJ5lRccOU1AANOoLRkVnkAC OOCpTnBZQAI0ULLtBpBAAnKj8NH8HEACxk/kEiRjQAPpG6Xq5DJAAcDD+ItSzkADUJ+QhglE QABnhYfAdOBAAJG6A7Z3KEABUhIvUTvQQAQkfRJDCPZAAdwGk7N3r0ADE4QSPlO/QAOZBYIM EuA//mWpoXhtS0ABycQgnSmHQAF7LUCW/PhAAkoUHTenkEAD3aNJvx8KQAOnlo+kk4dAAlVX muKOzkACIoKBG+eKQALZfNToKqZAAUlnil5zukAFcbQj4YyhQAcJZ1B/II1AA4lrnc2OdEAE ccZpsTAJQAJSO7mLxOJAAIfBRfXMHkABuDI4lOiIQAJ7LL9D819AAxFGA4RO30ABkIi0d6Lz QAKHf/u+SYpAAWtrfMz+L0ACKx7E2jRkQARAL+AYsZBAA3jhxCMMdEADFCsPd1PsQANoNe/b z7VAAUI4DZiXjUAAHENPviHKQAHk3OEsSOlAAgZvLgbfiEADU9GplB+6QAN3qXAueBhAAjsl tewniEAACgkdlJTkQASxFUB20UxABQr0y35KkEABNwPyDIZfQALCsmq2+TRABygaRJIz/kAF jOl0roZGQATcmojEsxJAAeXxLgD4zEABtZ3ryLPoQAEEYkIUARRAAI6roMQQTEAAsTRgd//5 QAE02nLe2WVAAeJa3nmlekAFZQJmUJuVQATsotsaOYRABUH2+3PawUAAuXKR8NrgQAPIDR89 7itAAbIpSUN6nEAC37Pmn1UdQAHDy+divu1AAqi/PV1K0EAEvucvMidOQAiaqqyLPkZAAX7J 4+NuZkAGRgc8MMChQALAP7SmirRAA2eEQgMxlEADeF5rHFQSQAHa92PkKoxAAwTE+VneBkAC 6g1q4MiZQAA7+UVFewZABChmRidILj//7KiJ6EeeP/5oWVhHqS9ACJqqrIs+RkAE4Igl94l8 QATwyFu2Y5pAAtzdnOHiiUAA6EFtnXPKQAI0ENU217pAArkdanftIkACJKvtCReNQAG7ZNc5 PKdAAn96kLq6RkADIUjx+EgyQAGIwkdQbP1AAsN3nvEaeEABoPKnfwFtQAa+b1TVxC9AAxAA bV6PUkAEJ+vJVv60QAEqABiRR8ZAA4W1y7jq1kACSeveizpyQAJxC/7JaaJAApIvNICxKEAB 8MtjsUXqQAQfxIpaUPxAAWQ0TTyRnEAEcC63AAmyQAKC0hcT3wRABJ13lC0UZ0AGCDbM/1X4 QAL5YWm7zUBAAS9fynvuxEADPgqT7v7HQAXy6X+ILcVAAmERZz57IUABTMycdA0GQAFht3vf v+ZAA4NCWvqfGkAD01Y1yT+DQABtASJ9AWFAB3NYhHmqJ0AA5HL7zaj+QASDvUZNRB5ABCBN JX6QQUAEKyOnMiprP/63nifF03BABYXdELabmEAA7qsiLfk3QAKf7QD7hxBAA+XfBtSgvkAD Txhl4tocQANKMTQreERABLfqAVORYEAA5w3zkFOmP//P3XRiV9tAALBI4sgjLEAAcyKTIT5+ QASUHSFJ68dAACBcWYTLrEAAmr1mLf1cP/+ipXdKzKJAAkEddqz2P0AAdvVVyTkXQAMrSzq4 ODhAAk4zeS30X0ABtnIhmZ8nQAGVx5DddGlAAYBf4C4TuEAC1s9E7IbSQAE8dRXhRYBAAS7F WkvoiEADGIzj697jQAJEqRKYJWpAAkS3SCe1TkADDaEx3jSLQAIJx+GpZThAA1s7CwyH5UAC KNTYDZkcQAFTYRvXlKlABFT1Ivf/BEACI3M3E+9+QAEcWCehWJ5AA9X0LRtAU0AAz3IPVf88 QAEaIcHhJV5AAmhoT9gYBEACyU0jd8b8QAJmnGWgVaFAAvN8V2WMpEADBDawxXISQAC0BHD1 M5ZAA9MEaQAIQkAEIVo6ep4ZQALrnKOyoe1AA68wCt1S8EABA7u1n9+3QAFNayzAtyJAAy6k XWxJrEAEOdXCQhxdQADIQ8T0RS1AAvE9y4WlpUABJBmspbkkQAISCD+5VJ9AARhkff3SE0AE iY08MV2+QARa6cxlrM5AAJcXS7c4t0AEFAm38J/pQAIQbSvoP5tAAiUwn2ZZ+EABuIl7mO/4 QASaUT3whDJAAiYM2NqxTkACOmX8yGjyQAHYDIeKghNAAZAvllnGOkACF0w7E6xzQAJxadki SklAAQ58FKaFDkADctJQYYdTQAOZMVYirRRAA3lWVGMcM0ABLVl9ZkbkQAFGyhgkHLBAA5vi 7LtaZEAEEeq6nuNOQADB5l5qdjVAAV/jx1YWekADHfSuetJIQAJDX3nNCipAAkNYbKMqAUAA eOXgfx8/QAFpLsJUySZAA6TuesPUgEAA2G4ymQLAQADLAMZbWzZAArDejKApnkAFoyr4VMOu QAVHhPFILvRABh9L8GouWkADWDeIqticQAG2lxriHvpAAke6UrPzukADA9+55xvKQAQvrooy enpAAUx9ZemInUADdejB1QGEQAIPsgc30bBABCYPrAVswEABOcg046HeQAQ0Hi0gmchABLtj a4nKs0ACvVpre+oFQAHfqx9sUMtAAiuvnj/THkABoQMnaiuSQAF6VQB7fkxAAvvDx+n9bUAE o922lyTCQAGcljvjilFAA9KYxtZ8skADh+3zedLoQALd9o+RKMJAAZzV3cHaokADQnKNwISv QAJpZQy9dJRAAsjZN26H00ACUQnAO7OMQAPCJnInmFZAAsM60IE2AkAB/WZttPdQQAJ9dlw0 x0RABSKwxn0iikADY/V8zgfMQALU8uFfNTVAAcmADowwrUACI090WWcbQAISwy19DUpAAm11 Y2tdc0ACf+1OZTtLQADDi5eorLZABlREMPBYkEADyr1oS3V2QAPCPte5L6hAAb560Fh4T0AB SZ4tUjlwQAGHLAv7VBJAAwU6PvMG5kADIIbgqrwsQALCso/yjjRAAnXze2Ysx0ACesdv4w2O QAFucQnQVL1AAFEIi70WDEAAFqenyP1+QAWUI44fc09AAcPvgrLXgEABHBu0TiRZQAOl+ixz TrtAA2aEwV0PikADegy1QiPYQAQbRoOISPxAAuaFrygr5EADUYipuYQwQAMmmMX1hLRAAXAA T6YQu0AB3hG9YE++QALRADlWzIRAAhbxSl7+sEACfeIMZ3ItQAFpHct6M89AAvEOoGTh0kAB ClRDTgYoQATJsyompxpABR3Jv6z+BEAAJSRxUjEiQAUGbiQuOBZAAZ4Cxv6nKEAB2qbIpUfq QALCD73v0cpAAfpD5rbL0kACYGZQB/3aQANATEL28Z1AAXwg9f+hDUAAzwPh0AznQAJBKUFe PxVAAM6D3oHz0UAEbLaUvKZcQASXMyZ9VPZAAVaXEQHIc0AEoKZXNbiuQAEwbXwKlLNAAzVe cpDUC0ABItvuvLVsQAQmF7S52YBAA6fmJ8PJrUADbaxDybXfQAKAxM+9oRBAAuCqNgTRIkAC lsj7r0CeQAJ7f+Q9XE5AAVmD/vQ2a0AFTS6b7G7qQATRHn4BgZBAA4zAzYbNtEAD2x9pv0cY QAH21gucog8//147GPNnM0ACdYETuwVVQAXVvPYVlrpAA7utWjwiP0ADShZF306nQAIftIny Rw1AAGWbQrBbMUADphwBEyq3QADAO0bP4wZAA/5LKryxJUAB0hblccmYQALwJnJin2lAA4WW FqzmHEAAt9bSqzBPQARp53HE/PxABRQ4xWGsukAE2Lny+P6EQAG/z+a8M4ZABDnD3OscNkAD SLu/k9fEQAJv5Advp6tAAr7SrffeAkABJml5oxMxQAF8+UoIx6RAAjK+dLHnXkAILaaUXv5t QAAX31BX8fBABd8gvFdZtkAEXZepyjZAQAOR/N9w1YtAA1A/vGW19kABLgsqVjY0QAJr5v4q iptAAxODXgYimEABkgVwGQAgQADsA5VZwtlABCwGp6fux0ACmyhRpmjgQAJZgUIQa35AALGX BMqU9kAAubM6OfEcQAH9UtgIuyJAAbOsqhkBKkACYf6CdZjwQAHrQhqRmKJAAzhi3XaHcz// m/JcOTVcQACVm7adP55AAT43vn2c5EAE/ZSsyxYLQAIuqJ41Je1AAb8oVpESWkABvyhWkRJa QAN0Njxy14NAAhzBESxt2EABqCnFddg0QAM6my5W/RJAA5G7X43+Y0ADERXZxbPcQAOP4YsP zMxAAwNoobl56UAAL+Jqn00qQAJJUrieQeRAAaEQXyA89kACjs4KUiFgQAMZoPBJDXVAAyPS w3m8QEADPKbToa7uQAaEHzIRzPhAASKSp/lbUEAFLL+LMqW7QAC3lzCgf2dAAurbFpyNh0AC +sbOoFw0QAFbrluwUclAAq8N1W3PSkADhtxX9zZsQAB7FTOGivVABSetHjSZtkACWNAlDLTi QAL8gTEMaz9AAgfiAk8Q6EAB/xl0dU94QAORpH/dA4dAASxccBx0kkABtnUVEL1EQAGR6MWS 6jxAA5COQTNXuUAAiyUrW4OrQAJb9ueGaAxAAl2XM5o67kAB7ukCGu+OQAI4GK1odhZABBuo plA18kAEdxK/hDiqQABcjyVaUQJAAjpfbK6oLUAC2izdxEncQAdK9pm7frhABbMJuAbZNUAA 8sv7R38kQAMhh0kVG3lAA0h8npgdEEABRpLR26RsQAHYFB8jRa9AAkxxh8iE6EABB0C1ElBF QADsGYwX2NJAA7SmQbvpOkAC0E9Q03trQAJpJSwpWR5ABGBF859IvkACyKy/hOvAQAJ2N37d MtlAAiInYN7YqEADfvaGQa4YQALqsps7yFNAAHr9nd8FcUABfKOMAJG6QAFejjBNb4ZAAUMW umieekABZyY4XU5iQAOtyNpr4y9AA22AsCJYSEABEMW1kcBOQAIQaBA/S3pAAbl9SlFxbkAC MG1kXuwSQAKzt5VIV4ZAAYr002MqoUABnlFHC+6OQAFep5s+iyhAAfzz4fLu0kADDETY0y+w QADdSe+MqeJAAGMMQ+KhsUAEOqAze+/CQAHzzKaUr8JAAkDerg5fLkAB9WAzqzlGQAFsz7Ow DJZAAsCE5iiAT0AC3elF52YMQAGfFBo2sz1AARgYONBteEADP8M68XHoQAN4xTs41l5AAex8 d57ki0AB2oB0odEEQAFmjDYm/NRABHt37VHLJEAFqv3f8VP6QAIK6aSy7c1AAajhhTLEq0AD PBlWCI5QQABPel5qztFABDSEJkOnMUAAy8Z823BKQAGzlK8+jBRAAaYD9gbci0ACQABiRswL QAHqC9tMXwRAAis1PAALZkACPwvn7JDcQANr613W2AZAAWa22rfOBEAFm43VcZYQQAAn1I/a IhxABEFZxPfm1kAGli095+Z0QAFUygqCmCg//fnNpg0nMEADG+43/CetQAEt/5z4m/hAAih1 jXjDWEACrxdg0IU+QAIEHTVzZ+RABKpnELVlCEACG1Yn2IyqQAGwFlE+7UZAA0ISwQX6vkAD kVXrbMpqQAPoeN1tHmNABVjZt/jnPUAB5cKnWo4nQALEnOzCYrVABEDDF1WOMUACv8OVCiYB QANcR1qVZnBAAFkqZ1DZxkAADwI7SRsWP/8Fh5MwhQVAAf3KH0QfNkAB68vZIMtgQAHi6rg/ cldAASz9fqgAEEAAl0sk4kBUQAGSl3QYSBVAAQLhfimjfEAAjYCNcdaAQAPkKBskVytAAjdH /85z6EAAY1Cw86/OQADQlTPlBfpAAfNU4wNFfkAAE+cDA+rsP/7y4kAtTOFAAmvk5MM+R0AC Lr1huPwxQAGyrj7PAzpAAiYQryZB1kACjBrsBBqGP/9tRsD+bI0///p3oeQTPkACH6simm33 QAEShNLSfR5AAsy6Z6yOjkAC7ZL5LY8XP/4EPSfGmlZAAbBuDIZaBkAAoWWf0FMSQACFhvMv zD8//o0osXpN/EABnZmqjARFQAF13oPMXTpAAVMsKycVS0AAW+zGuhF+QAD1vyP2BXNAAFzc yg/PDEAC5Nsmsk5MQAMu5PhwhERAAYeEsVYIAkAA37cWTJLcQAGfC6fffRpAAS/AetsOJD// lQmkIHQqQADbbQYBR2pAAE2Y1GqRVEAEIxjmbcF7P/+KnraTz8w//3dhCZVHtEADjf5gz62g QANxMe0r+QJAA8wDUahC+D//xd5e5wl9P/8XWGCSShhAAZy5c4LDtEABWlR3d4ouQAF7IxtX sFpAAYsb/grdHkACARiCKKUiQAFgDUkVco9AAIZnQ6L6qkAAMTdaneT7QAHAFttMD+xAAa1R drdGnD//e0sehyd4QAATMXNBAMFAAWjeC9yG5kACq6xFPmJWQAFk4zjq93w//4Tg/wD/6EAC EOTbA1acQAASt7yvwupAABmUnQGXzkAAzkDCJV+wQAGzgagzG/JAARJrdXP/wkAAjxWiLdRv QABuhhmBGcdAAWzoIPEwxkABQmzfUqrOQAG4rYGTc8JAAT08yD3i5T/9L+sIE2v2QAL9zILM 7kc//+fpmTyTpkABQUUkmAxcQAIMWMoKidtAAjtItaCf7EAET9KjJmJHQADPXKZ/gXM//1Id z6I+RkAAp1ET85oDQAEnvrGGrppAAH0POH53hkABmw4HrRuGQAGrWPPGrA5AAcDUpG7EQkAA 9o4zbYn5QACcwXjtcktAAenfp1ooyEACYhMswBteQAE09SKOy+hAAqRWM0rv3kACqwrgj5dE P//uKEcatnVAA8SwTj1xbkADda8eBM09QAITqjhddi5AABHh0SoV/0ABEWks9CD+QADbw/t/ ABJAArFBWap5LkABTRO1dxDQQAEduJZSdpRAAM58Rt/Hw0AB55dltW2AQAEioAiyDOBAAKpB 7by47EABacOGNTaAQAGtvba4eZpAAWfGShfVk0ACxFukBnH+QAJLd4ycSWFAAEda6ADB5EAC L1ChlMP6QAAamDFxyqZAASimdWEuIUABbi+6cZ8SQAFpTFRnm8hAAm6mIShipEAB7qgqVfaV QAE70KqWWlBAAZQDW9XA3D/+lVafhhinQAN/A3P3FNNAAh7zNwjCokACNM+w+mYIQADwd2A9 MJJAAsVFiTFjo0ABT0UC5cJMQABcRoh+dqhAAUgqKS2IdEACAf4tDDI6QAJMl4lkJVBAAcT+ l8HdmUAAwc0jFv8UQAEMoT+f8AFAA0J9aG9mOkADqjZk7NhGQAP2f5unkVJAAhVlIjBP+D// eZ1OwyGHQAJC1kHMcYpAAXfX2p/NJkACUsxFQaUGP//2VpE41UJAAZ5ZhmKOaj/+7i1d2Q0i P/yi48+g4xlAAyXNmBCOCj//KmqWI2yuQAGZKfEq5ls//qmCc5rQpEACgjBRs13EQAGSFLkb FzBAAkqviUBGGEABnerqyAmCQAAn1AuFOCxAAyjJOGFeaUABPXTdeNeNQAI8HPPt3W9AAeTk XpL7TEAA5Rz5GglaP/27HUzneKZAADEhFl8NhEAA65lJsz1xP/+W/fCk0rBAAaDBy7FUXkAA dZ64pFUAQADZlOh08OJAA72r2n5U5kABOi8a3do/QAE9efVfLZhAAF+pXw9q0EACcc1Ze8N+ QAFRaKSBDZxAAddpgglSwkABJRryHyHUQAJNfXwZ2WtAAT/SFHZBMkAA9VSZF99UQAHLSa7X mHBAAaBXn4E/xkADEh3vuydlQAKI7749UMdAAPugmySSwkAAyzbyodYGQADyOkw312pAAFZL RdDxsD/9MCLunhtYQAK6A1xgZIhAAJ6BnuL9ZEAAYVMstxGkQAHhQlXXPQZAA4PeGk67MkAD NTRJsm0xQAJKKQQ2voxAAPvUsYdc8EAAvymCCwmGQAHEI5X7HhNAALWFoVrJNkAE1olFnqDy QATz0jluWoRAA2iJxifH90ABt4tyGWKeP/6xNUnIPhRAAHpEK+FCDUADiNuuXz+fQANYXiZu xABAAQugK7XSoEACemUPk7p0QANkXBPTbihAAp/zYah6REACkEdAmdC8QABXeMG8TsRAAP6b GWIvgkAA3y9rLHwCQALnADPNzqJAAJBMNCAz60ABkvXdmkbTQAIWAkHlkPZAAQoqmDZ+GUAA 2oqEet34QADBh+H+IKBAAZDnAzLauEABuexSWpYpQABhkIcB/jRAASZg6DC0iEABHhTd/G4P QAHlOKtzhvVAAKIrA2M4VUACRdAjUyjJQADVUkt2FMpAAOjJ2wZwAkAA6HyuFLMMQADS2fyu FuxAAMIWShbaIEAA16dDL8w9QAD7p02o5IxAAYfMm1SEfkACT+BhZ0LcQALHHGh6jSRABDCV mUzRHEAEAIU7cCYbQAE2Xggx4IxAAtp1FT3r40AA/l8aDYJmQAC+7RNdC8FAAXMucz31nUAB sOaBEWLvQAAT5WGPegVAATrIWtL4FEAAMWOEplOAQAAwj5CWVcY//++IDm30MEAC9q862A+h QAJ/REjUiQRAAMmF8YrUzkAB1FmpVFUKQAFuvuoeb8BAAXu1os+gwkAAgDIG9lBLQAB7F9uL RbRAAU4TpkeixEACgPq2p7f0QAB4OXjLTVlAAGWWW6f3gkAAX4q6zCqCQAKgwbRr0+xAAZKT xNd4bEACSgKA9F0rQAIASI0S3k1AAWZq/bNhiUACXqmdEb+aP/+0l7cXv0pAAjMlUkdYaEAA WvW+s4WMQAIO9HdIjqVAAfKMxSvv+EABaqrk5A/KP//hFuVfS55AAArXSqi3h0AAiQDDHyWQ QAHn7yU6U/FAAYvPxWgBEkAAft3tr/hOQAGMJityus5AAVtEcEy0XEABKVnzswpYQADWpkWk xp9AAM/7R4UYvkAA7KCAASgkQAG2Y33vRxBAAlMjlHqmykAAM7hjLiP4QAAGNkpf78xAAboG zJgSY0ABiYnb3Ix1QAG1C3PoZ9BAAZwMLkOlZkAAx3voCRXUQAGNyFBmnMRAAJ8cz+Z6c0AA ecogdQSqQADX871GjTlAADpE1THLTEABbBM/2uNcQADhm+ZgQLhAARfcRFbxqkABb3D5IO0w QAIBGTSkzKNAAcFBrtykfkACqxn8dXfiQAIlGb8vguRAAU0N38y15kABsonJg6GQQAKpIthL S8Y//1kX+oMG1EACdsldqQ9kQASQgwjg2Z1AAn9hk+PpC0AB7GmprOVyQAF0uENbMHpAAIY+ 7vSGAkAArzmx7N3SQAJxgOw/4Fs//9SwpielAEACCpVWx+DiQAFkwfi4CmJAAg/eD/zXOUAA Pf6ZFonVQAFj7cXeAhVAAV1u1Z27xkAAwNxE2xc8P/+zytGVOC5AAnEnQh8URUAAtbixwpZY QADE3iDqjzhAASKkLkgyjEAB/eogeKnaQAKbV3zCwNpAApRagd5/YEAC8hPvguNJQAC8q4Y2 Ca5AAcby7UtgXUAAnjoMMkZaQACkJD6UBvZAAVcAYeoNkkAAAYRq6QefQAHHUABakTBABHeL T45TfEACUo71czxWQALCJOXdTpRAANoihrD2S0ABHUW6eS2YQAKEPf39KC8//zsR8LoasT// zxDcSa/+QAA7ZoP5UVBAAhuXm+se2EAAYVqIZSAgP//KF4WlMHFAAj8EXgYgj0AB+e5uTmqj QABUUP+4lpBAAkBkPfbAv0AAyNt4JSWVQABWp/tJs5ZAAAzPR1NNJUABv0DG2x9IP/+XR1ui QJFAALOlo5MbF0ACPaKfYAyVQADkz1tEYHdAAWnGA+tCjEAAZ1DighztQAASKzkoUtNAAYI5 eemFHkABvvBQPaaKQAAXEvenqGc//lWLIqW1aEAEFrMd3CyTQAIVryozgSpAAO2K9occmEAB Lyb+0s2+QAFpR5/JAqpAAmk09JE71EADDNabJ77AP/8cwgZNKfxAAFVxOLIKcEACzqJ6MjeY QACAjWDNvBBAAMNheWM3+kABmgIFfY8RQAEG4MIGWL1AAm6yd4aGukAAkBrRgH/3QABwvq0k 63hAAS9DRg5tCEAAqn8V6vkrQAEy6s/xGbJAAlTbz5na20ABJpp0p9QSQAF1/o9SR95AAEZr NNROAkAArB3gws6qQAImI8a3n0pAADR2iKQjn0AAKdORb/QGQADxvol/8E1AAXAGFwgDdEAC gZREC/CQQAH7EZDPVWZAAlvMjsnCgEAAb0FSr3UsQAJoc8CP97pAAEe0g6jdhkAA///91xc1 QAClf4aw7npAAXhwxcapKkAARh0crfZRQAIR3CbetUhAAvReNa3tzkACHLmim1WFQAF6LgxB nWFAASD5/crok0ABOwoS+rlGQAB1jb7upx9AAeriW+pCJEABJFSvGz3MQAFGK7FwUiNAAQal y0hvb0ABHlDCSQkvQAEFHM4/3OZAAZHAeFfoAEACniUWe9CgQAJrhE+7+YBAA1SJudI4WEAC FKDxtfcAQAGbTFUPcTtAASubnEvPukAByCVPr25GQAE57bwiC4Q//5GH1ieYwUACBJWpeugC QAKsP8sL5aRAAuCiDn7QSUACJDyxy1UQQAJXSfGezMBAAHZZd7r2B0ABJc6nNBnZP//sGizv 9oQ//s9NGjEmskAAfQBzaVb0P/+FBDNEP1hAAfowMLJhrEAABIV1hK0TQAG8SLzycKk//+9U YtImmEACUaCb0AjQQAMXiqEj7HxAA2BGUDxR8UAAjBPtCN23QAKrfsw1gEBAAeLj8dYH30AB oDuoN7iUQAF1ZHWn4ptAAeWcM6P9ekACQPZvnAFfQAC6r6VD4RxAAODfKlHI+EAAbwEYDK91 QAItoq85nEBAAbnkG3KLUkACfzQfhqZMQAIxxjwlCuZAAM1M1U/EGkADekm72yigP/8+xRlw 7qRAAfpeedOBPEACcVoz3ibkQABOPnqbUNFAAL14e2AOIkABEbf4bvKxQAErjkAZOkpAAR0j NxWKbkACUZBsloy+QAE9khLFskhAAKgEQGvbMkAAjjvQ3yUaQABH5TIyypk//9Jt7IZo8kAD CkBIneHSP//fY4p6QPxAAlSEG87lTD/+Bhl5m3UAP/7FsQhOzeJAAHscWRy9OkAAus80OHr+ QAGQmkINaBZAAmVZs5JRhUADAq/AM5Y1QATz0jluWoQ//5Xl2QqvlUAAQksRyNLQQAC1cGCd qrBAANRpR+q1eEABdeoLLnDsQAD4w2xsGlhAA9OsrdTb3j/+3nkfRrJQQAG83hu+BOpAAVeX IjlppEABTk4lRzSzQAH3pew/vHRAAV5DC55s3kABiUQUglYCQAEVfHGhPJlAAeWxfUWBKj/8 JJGBC9fYP/6h+FXXEd4//1pnyYf1c0ADk0iaBvuAQANJgFDgh09AAqkILPYMXEAB+CqiHi4w QAJtbTQm/eBAALErcggbrEAA28pDYHOmQAAHb0umXx4///pVuiXuiT//16GDCMkpQAIZ/l96 hspAASRIFyy+gj//linj0a8yP/2p+iFScOdAAu/QPjQaGz/+navwulZwQAEfRDS9DytAAJOR +rg5TUAEP2OognEpQADAV0+r0lBAAfpa8I4zfkADTRuh5J0eQABz2NlBI81AANuTnpxe4kAB 6rWS/HnBQAISx0DJZ+BAAL6KdconwUABfq4LF008QAF/ehVFYdpAAxTPeQzjxUABj36iiKvg QAIUQkX6hx1AAPhfSRl5Rj/944LO1cHoQAGAZWjWmV5AAWWAAVv0GkAA+8wUp2Q8QAFsjNSN AdtAAZmM/LzCnEADB7KF64rAP/6UTeAYtLFAAkaHk20ne0AAJyEKP3GOQALAlA/xtDxAAISn cNp190AA9QckYvLZQAEfY3hguRhAAPieZbl1lEACGJXuDv8jQADyWJ3HoHpAAO4BtwGn/EAC Zf1gD5aUQAGSHV5rVV1AAkGUZFR0r0ABX09PhB+2QACodEai4xNAAGFhw5Jaij/+7jlRI56Y QABLw2UfMEpAAVF1SXc/NEACJdl4pnAuQAEmcaxmoPJAAMXBjAYvEkABKcw9PL+XQAHJOXLJ 8xZAAZsMRWUl+EAAbhT2bVvbQAQLOAEL7ulAA+0RNjV7WUACAeWpECD7QAAT4GlVlOhAAIbz HcP6KkAAZbEAs4PNQAIPblLiSlFAAP4F7qlLskACWx+y955oQAFAgQTLjp5AAN90jonvmkAA TxdwU8SZQAA2Wf8gQYlAAWdCYQerskABaT5dpoC6QAIEX/aNaAJAAUCKDFF1rUABHlBs+pUk QAExUpLWEXpAATVWsqHBZkABqI9lEmdKQAI3MHjo8ghAAuK7z9KthkAAQ4G3XBMQQABfACHL 2V9AAgsdHX0MeEAB3kMpOcJ+QAGDkyhMeGNAAdQ4D7ukGEABbtT/juOoQAFzX3leUDlAALYu rM/XCj/+8vYxX43IQAKMQ+tV8TJAAhe79howS0ADMgX0IXwQQAMdHwBm7JBAAae2+5p4ykAA wIm2eYMoQAI7hCu55/lAAhwdBxdqGj//VJ82WmzTP/2emOxJpXBAAR02DovM80ABQARj4LjA QAKY/nJxC3JAAm3tMlpajEACf0SRBZvkQAAxfiFfcJhAAa7V+JsMIEABoqpq6Nn6QADQvi/5 jTpAATo2V2jyTEACBdBgpkt8QAJSS4Guo+BAArsn2gJXxkAB6EtSxDW2QAFFb8hFoDJAAmmh lghCN0AAWFcjavXwQAMPdbzRI5lAACdM1Py0RkABlwGQZaJfQAEO5G5+IcpAAAoBvQS5gEAA yfPhSGfNQAHEwRXx4jJAAbAeN06sJUAA18BLRzIcQAF/9/u9+0JAAFUUC5/Ee0ABinIKLP1S QAIr8N7bwtNAAFyXCokG+EACEUXXcPaQP//b/4KZfOtAAIx2FAxiu0ABwhBLtDyaQALI93Sj zwhAAAJlB27sAT/9uYLbrAcIQAEa+rslMkRAAJbNzcnQdkACR2Yg+riEQAGcL414PQRAA0X2 eX4VOUABjm04kkJsQAHjcnm45jxAAUfYapunP0ABtKWBDe3NP/++ymrEHK1AAW/rLoansUAB LxYwwkNlP//Mwi/FIBRAAcUkyIc5XkAADnqucqKuQAEPaSQCgvI//97Hj7oFIEABkthMi6NL P//b2d1Fz54//1Af0U00Vj/+fbPbpdWbQAMaT7PIqE0//wbtqBC+SEACCBXDRVFbQADs5ZFt uNg//5RLLh/phkAB4H8esKSQQACfTG+to6xAAgl37J3JSj//vfWxxQXNQAB/QZGMH7pAA1DJ K5bnzT//vttwH8UEQALUdioV1C9AAYg6/p7zXEABV0j62YOkQAFcax6PH/JAAWR3AwKaAUAB +mdeG1xnQAC8MC580FJAAQDpGw3d3EABwJyxFGgiQADY7Z+qsy1AANGVSU+S8kAA2QZK+GVm QALejcMibwo//3u68AxDUD/9yiSJnneIP/0Qbw/uKxtAA6QwcygYXUACjsgeeNruQAHecXpR sLxAAetH9gilJ0ABN98C8ZvCQANs0J6G1NdAAwUtFngaOkABbK8cEhksQADPprjzY5xAAW2s w5H6akABQJK14lrQQAFIeKmGWuNAAtZw3dm81UAAAGm7JCguQAOeXoj1KyNAA5i9Aeg9aD/+ 3WfJblxcQAEv2P1Js6A//tu2KyShMUAApEx5AH+OQAAqfPUow05AAnfUcNMnGkAB6OcbNKrQ QAFZ6y3nPpNAAnnURpe2fkAB/TybngdGQAG3c3SV7ZNAAGt+1WUSg0ACCM32+U5cQAED6Haz X1NAAUXf1l0T/0ABaR+6udcFQAGPJQFJMetAAO0CTbuepEABlUwe/ueMQAC3F6xl0A1AAFh6 G/mrskABjqQy7tkuQAEcgc9wfARAADGIYesxYkACj+kPF5ryQAKZUNe6e9ZAABZ5hHUikkAB EYEJd5GlQAMr0N9762JAAZ+0DxdVokACl/o42/27P/+6rpLf9p5AAq3CPGyfPEAAdxkw3qhj QADL4GH70v1AAFWKWibE8kABoYJZwVX2QACr9FgAmKpAAPNt2YQbOEABBN/Olh9nQAE7O7b9 L2hAAZg7WA36jUAA5uck3rXSQABSkpbD+GpAAOGExxtYFEAAdGb4DRAvP/+/zrdbDwBAAtx+ zzhDKkAAfhXJCB/LQAHCs5nNh41AAPd6GOBPPEAAlk0wDWOuQACvRVkrmHJAAjz8uNABvUAC gVix9BL+P/6yrqIIgFI//xJccterQEAA/t6zd8zGQAM7uwEweFNAAtCPEPIFakACeaOfUjfu QAIzwKj7KYpAAjWEjItYrEADIt2Y5Xh7QAItHuBLF/4//M8Q14M6sEAErwEbwA4gQARiUvam agBAAmLuT7B5RUAA7BOOQw6xQAGr4Y5IefhAATpYVDY6e0ADH/jaS0hKQAE+7r5fbSBAAB+W cVeohEAB7Astx2euQACUDaEDDI1AAaCOuYbqPUAA3USY4Xi+QALrW/lWcfdAAzhwdwIlDD/+ e5thIl8uQAH/0/qVmOBAAb79jxBIZkAAsGMkGbs9QAIyYF9uubhAAadxuR/94EABnTlsT4gc QAFaKdf6q9xAAbD4EPgMlkAAiDXc6xWmQAE7BA08Q5NAAPB3cvx+DEABOYiNMhCFQAASj08I LnpAAOIFyoxjlEAA/zyMCsluQAEU6wg2ZoJAAdFRKSgs2z//vYJX98dxP/5To5+W2XJAAi4z ISvj2kAB2hZJVEYpQAHTz3GJfbxAAn6/bWYv8EAANc4+ZMZiQADMG2dAJQdAAU9xh2xfGUAC DcvmLl9LQALYcHiof2xAAhnmIzGDDkABU4gDO7iYQAJ0vp6/ailAAhhSfK+nWD//EbUVTCKU P/8RtRVMIpRAAvkHKsfc50AA3o+JTFcCQAFZy/SZBNlAAZuwoiOHMUAB1nK8RzzoQAGa8/55 8WZAATgpsVfRZkABB39rlHcGP//Oz+yY0HJAAcd7zWukjEAAPNNQhRtaP/2iweIi0dZAAlya eG43oEAA6j9yvcZzQAG81ID0YDZAAPWoBIHR0EAB0MbfHHiFQACUHxZmR8pAAqcP9Dc1XUAB IQ+WGkTGQAEHxmzqKX1AANHrUXTEvD/+7XE5A1ZsQAMAOktyoWZAAkJxxms9UD/+hxsKJBSX P/33PX8ePhRAAU1b7iihYkADPxR1MW5iQAApvQN8xRJAAM6/KZd+/UABMCGRkMFQQAIwztVP eGQ//98BSEcyDEAB7+F1t71vQAA5SdyWiGJAAjjobpSfsEAB1Jri0aDYQAAujVDbWGRAAmxQ rOPMEkAAlSKhXjw4QADOeuoWFBNAAn+BtNZDhEABvGVw4SHUQABju7zMpn4//tNrce5UKkAB WWiVgmccQAITsvrGu9RAATmxZDancEABj2esFHyUQAG+AYgO8bRAAGrDpFX4VkABc8fAePVk QAFNhNo6vVM//1ic7hnq2z/+arktm+9iQAFORkicfkpAAWKGzglZbkABixeivqkgQAB529A8 28xAAfnMrE3Kd0ABeMmG0fCJQAITZOLQl/9AAhmXA0oBnkAB2Dov69lyQADn18sh89ZAAF/F 5TJg0EAAF0+0yF4pQAFryW4WFahAAUVF5IXx40ABY8VSALkqQAESQTwtiDBAAUwNrTvBWkAC 6SsY/riKQAAaUNh1mxpAASdmfPkem0AA35QESoYGQADKnhaOv7VAAWtRok8KMkAAmpXnog7g QAPbtWtPtyFAAP0+e7Wbb0AB5Tr/1AM+QACvHK+NCBM//6bIfZPVCkACytbCijJVQAADGqQz 0J9AAh7LyDTgeEAAvQJFhxFcQAEvXDNlOX9AAUZd2gm0cEAA1Y05ZoLsQADDS8ZlRA9AATMV oAgOPUABbmPcpvICQAG4HOSVEfxAAluJ/LCd40AAP5nJJx3SQACQZpiKxrJAAqiWCzi9wUAC QPI1STrZQAE4NkwQ3oRAAIPsSTKcpEABHGjrrT/mQAOwdHK9twFAAgl+QM58k0ABEfCg0dod QABMuPvjyWdAAHZzo4W8rUABeE27HbemQALAupiHbWo//XuagY7BlEABggiphBbUQAHAHEGo It5AAe3TygrqbkAAEI1qOhAEQAAPvqXti/FAAvtmMv35iEABtFpwfYoNQAHY3vVWUuhAA7bv NBGT8kAC0oBDebsyP/5zicxXtnBAAn8QtifCyj//aD7KB4o/QACEfLkuH/JAAi6zEHawI0AB nx5K/QksQADsgOVhhc1AAWWx1lAj+kABr4ERJMu9QABELikaUJ1AAXt6UlKp7EAAz3Hkqk09 QAIdQqF/4lVAAoKlRJRvQD/9s7ZlgacPP/5D9rDEVzA//uwTCybZ6kAAh2O2yk+SQABrAgVD 57pAAMrdNs6+TkABeUDr571kP/+VNfe+0shAAXkwKzegSEABWTEFiE5BQAI68GcL3btAAXYH t7uiJT/+hN7fRoKSP/4RQEjpd0BAAGG+DPB6rkAArj+JuiK9P/+yAzmZ0fs//tfSKe5rGkAB V1hl/tJ2QAIN9vtDvvpAAa+NwqYBkUACIyQ0CBIlQAKTR2Qw7c8//QrBthnNSj/+PUw9VUEq QAFEGT+zdNhAAHk0T3vDGkACQ5ecbmQcQAJRYwTrWdQ//NxT2XMxmkAAvFX0hIQoQAAb5yex i+Q//+W5bXIqaj/+1QV+AUZTQAEN1T+ShU5AADnoldo9nkAAMmzk4Nw2P/5v7zuk/F9AANVE 9gBrFj/87F3mv7LkQAK2TIL8KxRAA0Xpac7TYD//u9PKjZbsQADd5LKfkY9AAaQjt3u2ekAB kV/DyyR0P/+/QfKe5E5AAYw46vlLPEAAvvJMFKQFQAJi7+q/dFFAAQebtIcjnEABPpcNt1Pm QAI/NxZqmU5AAgMKTTF3fEACQWQIn5b9QADF6PztNGQ//4ySGjMsPEAAgv6EQBOFQACDb2f5 WplAALbNfpZVbEABSy0GML2SP/+u5KT0cxA//6VJV3MzjEAAV8BfxSU5P//rxBdF+xhAAIfa twzQ50AAVW5VPOaFQAATc3AzIdZAAF73/n+MU0AAFOiSFbXZQABr1WatuqE//2zHLnCnQj/8 bRlfLeouQAGbd/AoyHBAABuOaEaPY0AACYdRXnk4P/908uuQ5z9AANkiHYM1OD//ZckTGIXe P/5Dyg7WCFQ//qir3e3kVEAA2npzT1QzQAC3lEMBfwRAAT+xMH3xDkABQ9+DbBo5P/5jLUl+ JTJAAVGJbGa1/kAAcySvOupoQAA7fLLIuRBAAWF9y23V5kACDEyDgCH4QAPGsWztUo9AAE8h 4ifxEj/+YQS9IFG7QAA0iApsEaRAAG0k/ax2oD/+Xxg1XB5kQADXnOLtxiBAADMHa7N1AkAA hkrJOr06QABh4/6jVHNAAKxpkaAp+kAAC2FX74R5QAERlGRq00ZAAU4qFi/SO0AA+MA9s/LV QADPdFsrMl5AAFRLlwrg1EACBsA/0GtUQAGr57jnhMBAAZUvXQmvbz/+EmAXAc4OQACl55Bc 90pAAKUAU20fzEAB6o4w0hh5QABJYyClYnBAAEaxyYFtxj/8p1VhDgaAQAFPlGNGVN4//shU wtxoNj//7SjgqMaCQAEFwBYUeXlAAcwA1cvjBkABNCT7fC5kQAFKldkLslFAAScxtb0zwj// zhxZU6q/QADev17fuu0///MP8RiHqkAABCJ3TpN2QAAlaMDBqFBAAKQ9rKc9KUAAFunXBwl7 P/2dtdUd07w//uforVflokABbNyL19xGP/36HOhIFEVAAjjWVleEi0AAaP8FM9dfQAAmXkVQ 5Y5AAGKaAGOohUABcvA83spmQADTtx+tyV0//tmdv6NSbEAAzZiLCNEAQAIJAiaM2qtAAXGl 1u+s00ABSCcM++wOP/8t5eWxjyhAAJzBEVlAE0ABSoOHK0SkQAHZ7ML2gIBAAnCiL7y1ZEAB DA2ifLluP/7Npm/2jSpAAdy9juUSdEABFAzP/UUHQAGxMDQFEyw//QQBSJ7u5kABUvuRIswZ P/8Vb7iJobs//PaqgrSb4UACyMRqbwiiP/9VX1BPexpAAUHoUy3NMj/9aALUN0x7QAHNsE9N bYtAAJSMNWi0RkABEsFGexPdQACHzsfWybs//+psqm/n5UACNsI/+PTCP//FiKDlK8NAAhTF tQ10z0ABSfhqqk8BP/53qLS3ZcY//JcHwnwe7j/+RoE8HaZlQAF1E4mf2U9AABHGpZabE0AB 2NwCHXSGP/8FKRwbFGZAADgNUT88/0ACTwOSaZurQAExcd6+PUdAALD4wOUPrkAAPRrcDciU QAG2Y22JkWY//zLc13La+kABG3y2YI5PQAEE2zn83rhAAnxti6Ouhz/+eH0HKCcqQAFhnSRm U3Q//zI/KllgRUACLqrOMlkfQAKeIKE8tf9AAokZ7IeC9D//c+NqQVpSP/+IrifBXthAAKN+ XDztCj/+/F2tTQKWP/ycPYReIj5AAcZKpEFKXD//7vgreLZMP/9xeoVdRUZAABV5oU2aMEAC 0umnCbEYQAK6tpdNITpAAbGv5oZl4T/+gXl9Z/n8QAAfR+gKOiZAAGwxse6pe0AAMugfAJkE QAMMBCaZVLFAAzpH0PLCa0ABNZ5JjDIFQAFIWFiFk0c//w6AJMy3+kAAiue80GDQQAE9nMO3 /ohAAY5GulXuCEABg4Hj4881QACZoibmS1JAAF0aJuSGYkABb3mIHR+TQAFrpIaxc3ZAAFJT OddFED//DsJXOm8sP/52MWAOSlJAAsPwLfp69j/+BNbln0DYP/8sDwACjbpAAYrr3//vEEAA 00UFfZjBQACOuu5QHE5AADpCM+drYkABK6+Hwf8WQACnKfOSazk//7qSL48ETkAAAvs1i/TH QACgsPdzryhAAFMDdqRSwEABHM+WX724QAEV/1InP1lAAF0J1gJd7UAA+Jz9yzy2QAB2wIC0 c9U//6PUsV1Xbj//TQ6OFDN4QACSefqt+GxAAHXJyAOuGEABY7HpxoK+QAKF16cwAYc//4wO b1ULqUAAcjbRUORNQACZWtGOiC0//ZTjDKiIPEADFBHYW5WGP/+EANk16LtAAN5GU4IDCz// gG8y6/GmQAGE1sABDow//tYUD7DSLkAAQmuWnTFgQACC/veXiDdAAAv2wmCOzj/+TBm0xyyI QAIb0wGWmTVAAKYC3OvAdT/+hN2o8OJ4QAFnVeqRPoxAAFW210KOhEAAkDVDY5cNP/+edrJb m6I//lwpiFbdUkAAH6wxwokuQAF4VYRreTxAACr75ICJ6j/+UhD2FIYNP/2DJOaCvs1AAMKU hS2VOkABWSLoTaV/QAH7WZiVT6ZAATOshUgTzEAAYgQJj2WwQAFli2bd4lY//6J7InhiPkAB varmI0BmP/7VpC8Q2u5AAmhJp2w2kkACZZeTIIjkQAHI6CoGOCU//cEZ3lCRfj//kmJlW+2g QAB+952p3SJAAEKq1/cPXEAAzpGt11LWQABcrEMKFXlAANIDzoXEFz/+/06sldaVQAE03vnW kyRAAVMwYZkKsUACc4HTR6ynQAD63JY6XJJAABZScwj8cT//8h6ifcT6QAGAcdoR1rRAAU6N 0Sos3UACsgF3bztKQAHx4OAB50tAAdbHfK5RoUAB3J1PwUsCQAAlYi8RuXRAAPAUONMULD// yNJpRuz6QAB2fpLaukNAAIA9+ekJNT//1hluV9HCQACUoE6O5Pg//7MmZtV860AAUjJsNXXq QACFZuZgTiNAAWAWw11lMUAAdrMjPFJAQAC+3JIFUplAAN7hZsHWJ0ABAzdp0fgkQAC0WNUM y51AAVI14iZrYD/+/9QnvncSQAIFeOVx5NZAAU1iTDro+EACKNFGOYqxQAFvhG3ko3FAASFq Yzzk+T//MDYxHzgYP/95MaQsOONAAcC48A3okT/+LEIWi2eSQAGSr6wRUC9AADF+MvsUVEAB tUOeZN4pQAA4L4TE6MJAAaRKNM1oXUAAv9VNxCQdP//2t8Wh8vA//p6/jz4yB0ABqMRdvZv0 QABwJcrg8jZAAGe51ljFaUAAAOo4ycV+QAGEEgh3pxNAAhhEN9KeE0ACEVbUfy3mQAHZEQam YT4//ru3ZUccdEABMAkC7IcWQAA3a0srMpNAALGTJOdv9EAAgxxdChw/P/2tHjj5GxBAAV6B Z8aw0kADuNLBHCkVQAGtpI28ntRAAcublg0CaD/+lJSjHFtEQABtNoQNlZpAAjCoZ53VHz/9 EqHEEGu1P/w14FyOKyo//5rPQReEqEABWiTDLOaeP/+VkLQjnRJAAD5I3tl1YEABHH3DJ93p QAAvZ8WcddZAAK4Vf9xWWUAAcBIb0YLyP//0UY5ZjUM//kbad27Utj//76y9kjyOQABo7zk1 bJc//sz0FrEX7j/9iAnznEdAQAHgenWMVp8//nRpmHdRPkABKSzXrKZhQAD7Per0ZiE//q9K CAZPskAAtcegigIEP/9GOUqQr1xAAJIuyFy4XD//L/ao8Y5YQALjjfPhKuBAARvYitbunEAA zZnh5+nUP/91sb3K4jo//6VyDElZqkAATAYOegVIQAJdvn6Z1io//WQ5BBWz3D//VcPXT9VU QAGiHNADl9FAAAV8Fg55kD//QgniI3r8QAE1VtIRuFBAAKFAlKkaqEACWLLP/D+sP/01R6GY dF0//QA9riQf0j/9I4iIKaApQABVOHh2rcFAAJ+ikC8DTEAB4umrLuVTP/9OvxV3fB1AALSA FfQqUz//Lo5fSIb9QAAfI/Aa+ppAASOoO84ZLD//jHz1Q9rgQAA9/iHVrBZAAIYFhFFfuUAA giVkFPXQQAI9EJltZ01AAJ9whcS4QUABpvXEcLa6P/+j49O43xRAAIapyNdR+T//CR6iW3s6 P/83jJJoKA4//xIiV8L8ZkABMl2RBrsQP/6HoHBYYoxAAfy1/Qixl0AAh08EvyyUQABktnfg vyZAAUtWwxLCWT//EuT1yNvSP//G/m8AcFk//yf3ja3IjEAAGWV85b5TP/9/bAqrIhRAANeG 7HbS1kAAed2dmdlSQADmYXKR/yw//xzjOcHYVD//RiK3WHykQALdy9Z9yZg//e9hjS7kxEAC NLP+GrkEQAExdiYnrq5AAMuMf7/vQUAAY/EffoDnQAEA/aFgpUpAABCrEebGOT//rThBaTAY QAFdHjlHBhRAARo6d7lXgUADcW1N7i7WQAJ/ghsLrYNAAls0FjrM5D//vlU8qE4wQAA7Fbhg m9g//j/kLuhBsj/8tqOuGoLpQABZK+as4cY//9ChlGARcEABwhUzc7MOQABEd8meSTtAAL3i Pa9K2D//8w3tOuG1QAC++3kvrL1AAqnBRiMuD0AC9lgM+e+aQAE3Vb1RNQBAAkyr90W18EAB eywNQzKSQAAzmXJmYvZAANw9w4ZTwkAB221izw1fQAHuXkZm1Tg//vnALo5sFkAAToJDHGVQ QAAZTjvX8o9AAZEWe0wrjEAAZG9Sgg/+QACam34qUgRAAUP7eyFPSz//Z/9PLiyAQAHv6jY4 HNQ///qR1Br0ukABvfitmKogP/6A/76zPxI//lQYKNvsj0AAISAJxmbZQAAd6x0BSL1AAVMI 1DhmVUABjNmberyQQAEjgLE+7IFAALbq8v5gTkAAoL5Tw5tNP/+8cWu4FDBAAJSEJAECekAA 9pkJtw+QQAKlyOol0WFAAEEScAzBC0ABZEpJlSibP/uer0WabRg//dQJAdqN3D//lgYATbIu QAATFmnZYXFAAKVI1jLL+EAB0b/eaFMOQAF1OuXkEYtAAqHpIVcwFD/+wQZT6tE4P/3V3v+6 RBRAABRI5frrsz///EE7wBrhQACYLUFfVl0//x8uSNyaR0ACHaC3gZDlP/71D3H5wsFAAibo IBzgJkAAkxQLLfsaQADihVWC1jw//9yAcY3PtUACEwxaZpYbQAJIE2Pcfk0//qBcdWHxqkAB pD8Sc3lDP/2/p9dp2IA//6gFw9wIfEAAPrQ/k4LrQAIBODuwOOhAAc0jMGrR1kAA1bZkIF/w P//wG4gb9VxAAGUtFW3rEj/+6XXLllgHQAB2fBItcW5AABAfZ1W0UEAARXFQsnmhQABJE/sc OOJAAIT4tIOQEEAAtLwAAUCSP/3uAB+2uII//YrnyAOGDkABkv0iMxCHP/7CEyiM43hAAVGK ttbkhEABHpbXdJ1fQAIAOJZshapAAaD53w+nekAA1ruSyT3FQAKP0QHDkzVAAAePfupmykAA ZwMxtnTMQAFaeAE73mZAAWhVXEt3jj/+jFOLNoR8QAFEv8npj2g//ttm2Ggahj//e4UUk2pQ QAKt050JRdhAAiFJOIrAHkAA57+02OL3P/3KtniV8S1AACIcaiaKVkAAdjanNpNgQAB1Qt3p mjRAAL8KNtHc6kAA3RXoSHfMQAIIMbb66+Q//ocqpCdoJEAA3Jt9fU8aQACAgbrujsZAAVHF A/BIREAAjqUPCgW/QACsEBz5pxpAANEarF+X7j/+4gEUBXrUQAFNShWcDzBAAFD/0HZaL0AA WV1QzOCIQAGpJPqPhCNAAN7mWwVZQEABs3BquGt6QADqSA0ucwxAADnm84lSWEAAA/Or4JpK P/5rK2GNSy9AAGF2UtpQhkAAyAPhhZCIQACwG5v5Dk5AAKqPLmyTzkABHQ437EpqQACO/rdn V99AAJFgXOZqT0AA1oFcaXPWP/7N/ixAM41AA5t0OaZTyUABFNpCpvwSQAK/Qbuze8FAAELs B3SDC0AAfBXpy9iiQAAlv/D5Dc1AAgIupB4uzj/9hKODRj1lQAKabjl6XUJAAGB2NrPIWD// x2K749q9QACUfjhd4dNAADyuhIa2NEABMXSd9dghQAEQixLoKiNAAKsxNqOwhT/+6ZLCkRsK P/++Ct9BtI0//6kGRVVEDkABdtyvm0uAP/9wTJfPEfxAAHETeji6XkAAHmAH52c8P/9t+tZt OEg//XD1ofkSLkABjVwuv+x9QAIPpRBIHClAAYA68SGpb0AA/UGeTpvyQAB5AfM3wLNAAOMa enUddz//lYyJzt+jP/2ik0+wgnRAAcsifyKRC0ABXPoFLGZSQAIWgvgah7tAAxyiU4bcukAB uBxheE0HP/9WqdRW+mFAAeA9GhNILEABJkdkETDEQADf9DEYhD8//d844FLGGj//iSyDRuZy QAG48wIY8GxAAMb03sL2MEABB46p2XjkQAEIsd5ZQaM//5duuMWUdkAAzEo7aCsAQADhj3Ca pIZAAHwIimyh5j//RtxRa6QEQAGKXmUt8v5AAgPtq6MYukABZM7nliwGQADM/ODDEEZAAOqV vFiQQ0AApZaYYwRMQADNANo0RthAAu/Nwr995T/9WSCQj4N8QAFLMdQqqVY//9lbjX1Psz/+ /K6uJvKYP/55braEkTxAAaKODaGzl0ABe4AmxFkMP/81fsdLITRAAS6QFBQZCkAAOFAw7C+l QABSq0z3y5FAAV7iE3sD2EAA7QWDziWsQADmDu3dGwo//7B+NjF6dj//mDXMG8+OQAB9SEW+ OTFAAVUmTmeZV0AAoFBATn51P/xwa61uU7JAAO7rAPKMDD//El71SOTrQAGUMMuNnulAAGBQ 3ur2Y0ABdT3sMY6gQADi+6i1VlVAAezXdbGxfD/+41m4j3OZQACZBN1PAac//vhf6lwcrEAA xAAuqlbOQADUJvgkoSI//Tsv2H7VukAA3xG4pLcjP/5DV99K0iZAABMgBpXjhD/+3fpt7Vzc QAC36gIRizo//w5LaoRN8j/+9h8hL4C2P/9PJCK5u4JAAoxsyfuMCj/9MttztOJyQAFY53yd 7p9AABw5qkmH4T//P/NAMNIeQAB5fmyEgzRAAKctBe40+kABJO9c5RNJQAA0pFNNg+5AAHF0 26jFukADL6CmgO9qP/4RShRop3hAAmsGhWLPgj//4p/SJHISQAAnvfopmdFAAOZmKJ5PW0AA 7IOUfb6VQAFxy/H6jXhAADr+41+X8kAAczdKIXP7QAE3dvC4bVtAANcO+/feJEAAZfalTyoh QAA51ffLIS5AApbQ0RrluT/9WZuOHtSOP/2QKBGtsOY//XkDlpWFgUACzcE1dOFdQAH1bqK3 vwdAAahDP1TbYkACCqWXkXf9QAEdSZes8bpAAiyyf4vwy0AB/UtPYAsmQAAi4Pxfjt5AAJEM FLJ/ukAAiCldRqMjQABxpMejz5hAAIhSGdHCnEAAwk4nKWJKQACjQGTJcUhAAv0zZpwyHkAC 6YyltYp5P/0ARQiJeLlAAE2hAuz0zj/8+Fyn3ebZQACvDHDZSbxAAKzljEcYeEABZGD4LrHQ QADCOliY+iVAAMLj5KBxK0ABQ7K8WmzrQABRKBfvRSVAAAYA4VwDAUAAU0B7lAYrQAE/66eU F7ZAAAfWr4FlOEAAvmK1nr99QAElFeU/EmFAABSvwdxwyUAATANtOvruQAIAq9EKRXlAAF+k gQIG2T//2N/yaejwQAB2cITEkh9AABm9PNREGkAAmV2gUcWuQAEdU5+YuC5AAXE1OJTVGEAA PZutyN4DQAD4SANo+8xAATNWqoa830ABcl+McYc2QAHvIgZJ3mY//2LOgLrhC0ACH3LB5UTQ QAE2QFbLZXtAAKVNLKkOnj//GWa74ZVWQAEK1RbZtVo//+CM1wf5FUAARxl5bmDuP/+u310L u2M//87eGsydBEABA2QqyT5cP/9UlLVt+bZAAGya6kBkJkAAQuv7MykyQABaX236B80//1n+ turWeEABLUtXnt3bQABnkEZsY1RAAKVsmFHB30AA0JeIEwT/QAByCYUJ3+NAAQXSgku5a0AB 6GTJ6vhYQAHoNO4uS+4//7BfBN1z1z//YmPQCYjsP/957os0wRBAA4R3FqjvLUAC2EE4s2L7 QAJWOmNDSEBAAkXSABjZM0ABg7Kfb278QAAb4ut5CqpAAiUDzcnEjz/9fV5rL2q5QAPGsWzt Uo9AA1K+zDJeiEACMqtsVYtVQACu02ewE7BAAgTFCQOQtD//0rUGm2sOQAMlyY80Ji9AAM1a TKFVHD/+1aqu9IR+QAFGAk3ayIo//9RNm2ZdPEABAGsOaOqkQAEaXqeSvW5AAnlnATwV0kAB eFBorVhIQABIUOO4V6dAASw/bIy5zEAAuKP6Ip4XQACd/TwFQVRAAOPqfWmTVUAAZIrY4HLl QACGUAjWU+tAAd0NYp+AdT//8Oan5eF+QACGLQO/rXJAAIaNAED2pD//4fQLBP6yQABE1F+p no9AABhHyuSbCUAAvaMBeckhQABvYEoQbz9AADRHyY/s6UAAXqxqDCicP/8/JT3ZDGc//Zb+ r6JgaEABgaMFpQMsP/+t+IIpN8RAADymh+ABDEABcOp/8Db6QABB6MLsI8RAAJpxzOT0tkAA IY/w9LlGQAGFdgeIY1RAACGr0WvLMkAAKnTKhq35P/9cLN305QpAAlPKiuQ/PEABu8bFYSHg P/36WIbRJsw//fpYhtEmzEACYFJVX6pqQAC2Igd4RyRAAA2dVPBlBkABGQrNqovVQAFM/OIa 8iZAAiq/XE5I7kAAdcLeXQSlQAEJL1t3gjY//rB6hJoTfUAAQYWYUb5lQAA6OAgurww//o7Y HXZA7kACSlv6EHfYQADaPrmaappAAa31U/iQ2UACGgmJH2XoQABrPCwXyQVAAJy0FSNV20AA ufj70SfeQABS+ayisndAAGYafVJpaEAABo9OvYC6P/6wVbeonFRAAm3Oj/jC0EAAccf/DVbw QACBnEWPY5g//t5pwkTsWT//kwb0wjt2QAF8kYMPJUhAASUgRrNTXkABSzJVSRAmP//KmosK 2SRAAJdqdBnulD/9qKTBRlnAQAH6kEFY7PQ//n7wXh9mHkABW8ocZvsAQADAoW7tiko//9fm h76+MkABlI6nDOJ+QADiv52zgvlAAaUqOoaQfj//wi4LH0FYQAEhz+y8Dwg//2UT8a6rgj// aUiTdCQkQAEzGCgEBLhAAFlxEFajkkABwoPZVjofQAGhmPqEtx5AAD/jpSfP4kAAFXv6XYkY QACqVze3t0pAAFqwgtadrD/85TcrzQ3rP/5n8+rxo8RAAOE6/TZcXEAAbBR7tuvMQADRt1GF 7TtAAONM6XCb7kAAyoc4LbqWQADPEZp8jcpAAnZt77iEQ0ADYRiLUk0cQAAHNeDsL5A//3/v 8x/1e0AAEV9z0lzDP/8k9IYc93A//6KFFye0+EABvIPCABaIQAHH2zLI9Do//fkomOCQ0kAA SOeM/S+UQAF0Vm4KBthAAA/gBY2KVkAAM9iyhg3SQABldE6NoyQ//9kmNnq9XEAARGhIDVwg P//gNptjrIRAAwRpFyNwyT//SzMJSKFbP//U58ZNmDdAAPpokMFgkz/+hQRBIlppQAG15FRB wic//y6zvCxKOUAAyL9dUYDtQAANpmUlJA5AAItRNalRjz//+QNnoHAyQAAksuLJTiU//+9g ib1qckAAc7sdnotgQAB7Oq9HIDZAAO5ueNA0a0AA9d/jbgXGQABvtM4/d8RAAPoz9czQnEAB nCl/EEceQAEL0EIkiERAAGdDdY0OPj//FbATRatSQAEEueKzmFlAAT6goQefQ0ABB5rGRKuc P/95HVUlhO5AAB9t73vX5j//w2YA02pgQACMziZMdcxAAc3plrBGVD/9uX7o8v0zQABxdhhw L51AAf7YWXVaFj//6lMO+5TjP//HWF1UpjdAANm0Fxl5KEABO6Ct2kzSP/4YfGH/J8pAANvj odrJ5kABXhCalqSmQAF+0mH5/Gg//yFAP4jQy0ABXEFJqoHyP/74Wvnos3pAADLxdikHkEAA 8O3kTDucQAE3USJRlfpAASOXWXSfR0ABfA84cf5zQAHzCmK1P9E///UIbNBEvkAAsK9BjJLx QACfBal7wc9AAaFC+Gd5IEACgC93wSNZP/4A4ym1bzI//tEODmYbAEAABEfG7M7bQABj4VyT qeZAAEwA2DC5kEAAysnpqeQVQAHpdBhVmRtAACEME894jkABupqbBc+6QAHT1IuXiPBAAyiM r6PmUkABH8JL/UGrP/30e6c/TNw//nn+r74UWEAAn3suzUJWQACfesUV8PBAAES+x03vBD// 9/hUHgdcQAFG/L4XCyJAAmd5Gol1vUACGSCFLOFeQAJtVEvrq+FAAtkDMMraTT/9fwmJSq6a P/60Yp5e68JAAViQH99gRkAArrEEt/BQQAJR0C+J7vRAAli+uiwz5T/+Ac/RM78gQADdUAX9 P9RAAFCpnVXX/kAALOU1cJrIP//7xBnfkkNAAUwi4lh1tkAAT7RlbxjwQABKiGy5tWU//quJ 1y3YjkABJns/JDi0P/zdD0gz/vRAAuT/zfNg+0ADk/mHnmHwP//iy8u28XZAASpK9maLhUAB /DzcGbAGQAIMyhG7k0BAAGMoGK6r2kACJa7/kRtiQAFYpjlfNCxAAg7Us0bJNUAB9TX+oZhS QAJDH7v/+GZAAfqRsAD3xkABs6LIc0ecQAHvzaE46odAAZrjmL6FwkAAbzzLLjNqQACFDEIr Nv5AAKkcYCmA4EAA5nPsrCt4QAGXt0+hj1Q//1KMAkCiqz//oi+ZZFqDQACtKC9wUANAAGgh xb3KQ0AAecIAMaE8QAA9BCT3ZOZAALFHYo+APEAA8ElbsG2IQAARwnHKWlxAACNpDCJziT// S+YW5YxiP/zx+ffF4alAAbFmUe8tB0AAimk4HKyOQABxuPV0R1M//5rHVcFW7kAA2p1wtgzG P/9+uDl9nvA//qGdfs9pwj//F41t6o4nQAEAi2JI6hpAAO7aebUoZ0ABb5uj3eo1QAGSJvlu dVxAAApmHr7tc0ABIWJpj3npQAEP5ibNN4ZAAGFd4XoQjEABcB9x2pqmQAJBj/mn+h9AA6Sh gj53eUAAiEygLDOEP/8bZRQKzQZAAGlqmAOpuEAAk5OlkWfZP/6xtN3GYgpAAQUDPJGrBkAA PZVpPpCLQACSZiK3rQxAAIVbjdmihEABCsDylnCBP//MMA3H6o1AAQyA0S/9hkABrHwdYSHu QADUnlOhzJpAAKTwbpXGBUAA5OhCZotCQAHIjOZgaSlAAXIFKo+4oEABuEfP3c8GP/6JhoF7 Z+5AAOVyI+EUeEAA9Avdv2NCQAHmZAVofU9AAESlC/GUwEAAXbglCLFLP/x5CfrJNqdAAVuu IZshpj/+uI3atmZXQAA48/oHBplAAS+Jrpfv6kACKz8mHEtrQAF+/Z0lnndAATP2gFtmmEAB KjMAeTX/QABFF1eA0UFAANLOd2bItkAAZnqzrX6bQAArMLNWfRpAADx86JDtXEAAy+q1z1/6 P/+HLB2HTdg//MfqXRiZkT/+t80JsgTpQAG9kGOhwlE//tpshUqvpEACC2mwj+1WQAA3cAg/ p7g//7hQlB9o9kAArQptifo8QAFJPEXmG4FAAQkp/DQNgj//U3dH5YdMQAEDKJaiPZpAAl4g OHO/XUABcWZ6M6RSQAFwJ8LKpwA//13ddUJEmkAA4XcTTwQvQAEUTLLjbxpAAb1gzcmfC0AC WoVKsjJjQAEY+KZ6IWY//5aj7CQ3/EACAyF+VlZ7QAFPgdSQZ8BAAcgnH5b/9D/9TCFydr6O QAGMxYosXwdAAB5wxGhGPT/+YiAmRVlKQAMAUfKD/R5AAEW1jbaN30ABdJEElqkiP/5hSKpH ePpAAdbhZt5FGEAAjrpoyfDJQAEKERAYfcpAAKM/+Jxa5EAAU9058KuiQAIlacHJeSNAAAPu g7yhkkACPbcpAH0AQAFdwoF0EMA//nDrEppgDz/9Xaw7UxIUP/6gNg8s+x5AAgUWoy2eO0AA omBBBdM8QAI5k4RjgkU//0LsLPW/qUAAhLMmRH/qQAI3pJLfUVJAAZZ2v8/ovUAA7cJRlrQm QACXK/RZt55AAcRZUEgs4j//LbCMqe0UQAFCLUdWDNNAAVxVnzW4cEACuc97FsrxP/47LF74 vThAAeA4VPoxZD/+peo9GbJqQAKic7QftEpAAqf7hIgHH0ACrbVKZMPAP/9+Pw81qE4//8El GN+KTEAA9Yrcs5FeP/9wHb6GPJo//cA1jRVxHEABzGMn8RouQAAOxooZt6w//78HEF0/nEAA DxQhrYpWQALc504l1BJAArz8Q0d120AByhmDVUwVP/6KXX9gBXZAAGQ2DglhpEAAaLNraC+A QAB6jVv693pAAqrR6sR94kAC9qNwtDWhQADP0/TgpxlAAX4cNyy+HUAAJnWaczkzQAD5BQae 0TRAANVqmNOVqEABPV2Seg3rQAH9M2Qf2i1AAHtLU1U7pT//wbrHYxdPQAFgz5keSiNAAV7N 7fVKlkAAslv5CM3/P/89drDiTnA//qZ5+ELhJkAC7Y6MFwaTP/5pL0ei1AY//v2Y3k3uZEAB oxh7IRjQQAEpBoVUrj5AAM8aFKN9xEAAgUGK39qKQAFhxKr/cqxAAKizPxhmukAALF5mFv4B QAAOBhWoVcpAAOMGcwXD/EAAHNueK+0CQAG3CX/H8M9AAQzMOeQ9jkAApESsIgH5QAFbR0qx l9NAALP2iSAazj//7IxFkk3PP/+C72rnai1AAPM00//afkAApGtvIWpTQAGlDaigpPFAAsOI mI0t8D/+sAsph+x8P/+cKpUwhBRAAAHfBOiWZj/9btSiITbUQANX/eQOvqI//7NZ0uNTPEAB S2Zt1IqjP/9r0X6M9BhAAbjOLNIeDj//pMxnPNOMQABkdEnSRANAAQooK1W5VkAAb2ziWeZm P/8FpZWofH5AAhTnC3g3dkAAZn9fMB9jP/6FFgrn+N5AAZdcEf1CaEAAVPFbJDZRQACV/BYX xF5AAADWSYwAVT/+Vyp+RR9ZQAAUz5PuE9BAAW4gUfM4dkAAi5HsIyq8P/7LQR6Xgsw//ePY 621UnUAAd2S8D7EcQAGSmjwvm35AAiZD6FfvdEABQY43FUQ7QACMiGBw1sJAAU6fjzeB4EAA ZwyLZODmQAHSbYSUtSA//32JyQaYQ0ACyl4NP7tIQALg5adslCNAAkfj2NJH8z/+HcThf8fA QAAyBS08qodAAPHACqx2YkAANj0cm+fjQAD+ky6eEOxAAKoyKPQ+CEAA47DbMd0UP/7u+AE2 4hJAAYwv5IHWPUAB1s4J2ruiQANsoWqBjZlAAVahKhKKc0AAECc96U3xP/9q6ASW+X5AAmHO QLROp0ACMF3uBqG4QANOzWtlsE9AAnNjjCaxM0ACNgvt8L4EQAJQHwWjpnlAAExgdhCkk0AB FYZEmzjMQAAmBPIYCgVAAORUSBOOQEAAxBnXOZXEQAAzo8r99JNAAK4YUY8XbD//9/tH5FkO QABqqkeWdkdAAIxP2HU4z0ABgCoy5rJNQAB73o5uS4JAAJP3rvXiOkAA8wrEaltPQAFHPcNr H2lAAL6ty+H0bkABOJ5CpNUOQAAASznPpoBAAheIl8J/h0AAqMgNUUEBQAJcUGwDJxZAAaYW 8Qm59kABXi7RnvnWP/+lNIm0B04//9Fxdb3Q4kAB1E4ZAW9bP/6ftwOYPQZAAcvS8cpbeEAA STlhpMGZQAHfOcHWLINAAMECE3pxnkACDf7FzSOUQADuvelOUahAACqlx78cuD//RufA/FoY QAG+/RrXvqpAAMkZ+s6nbEAArmOHDcQ9QAAZ26/QQPJAAbf/khXBGkACPPZIpgKQQAI1AqVt NxlAAde5+Gtf5j/+vDlHHDIlQAFhO0XqKNpAAHgYd5TEH0ABH5sWTrLLQACa1Zk/YIY//kIU +ZyBmkABhhq8hwSUQAOFY6R1uH1AAdRxmeBhMkABuomojCV6P/6vuII+jzZAAJ2RLoqBvUAC SyNNbb6aP/2280eMd+o//JA/nPdB2kAAHkUq7TCsQAGAlgfcBCg///kJqpfFZUAAwwOO6SFw QAES4A93lMU//+mJBG6XX0ABPJ2wThbsQAA9Qm/iY+9AAC9yWX2Zaj/+ilzItRp0QABnvc2/ BU5AAGgAiUJtZD//ls0hSiHJP/2Jii9hjxJAAgcwiWO+5j/+aQ2KJinyQAFwBZi4gzBAAZ2U o5oY9j//byP6TAxaQAC6OSImstg//uZAU/d7ukABLJghpQumQABOrQLVKl5AAtGECfujVEAB HzzkHrAKQAEpkZ+80ZA//6r+8sXQkz//tKPankDZQAAs8rlxo55AAmWQSXU4Yj/+I4mUsTt0 P//2oH3g2F9AAZEWzRVd5EAAYJzSBcEPP/+3h9qVAsxAAVkUbw+66kAA36FCbfqCQAKdGj3B uNQ//UHfnseQNj/8/DT86N0gP/y/F7WkrR5AAKSfLyIS9kAA1iqQYefdQAIAeBlXke4//0lf 0hZQNEAAzJCS0ORSP/+pa0pI9ExAAHOHICRbPkABMCPA2+QeQAAWngA1JEZAAMIi4ZUYR0AA 0TMPqdF4QACoKaBGRVxAAmlnSdmaJUAAtSOObZSEQAG4SNLohtJAABDk9McuXEAANAJhetz6 P/9WLp6ZFE4//veNMV607T//NV0i3k66QAF9kAcLCUM//weNvdw1skACPnp8wfu+QAAp/7kL aatAAFxK92uW3kABn+M44pwWP/7ib15KLsA//8MSGgy3+D//q+vuxgw0QAAGymIH23c//6/c RDGgZUABBT8sEHOcQACsyzOiYWJAASbpL/TBFj//USaTC8gCP/8+O46eSMhAAyILok4gnD/9 Oqie1No4QAIIbj9QrWpAATsfes+ef0AA5xwqK/JkQACFqnTOg+FAAQ5yQAWUykAAEbAWDrut QABmV498OtxAAW0MFHdNe0AA4eP5PX84QAQYCheN5XhAAuk3eT66MkACmn3fOKQFQAAbGM2F yi1AAFadBX/BLD/+xpEB2LJeP/0zYhRrpW5AAMePduCcNEAAeTv+BuUoQAHuqIVzr/lAAM6y yWaRnUAA0StsWTkqQABvOtaqfEJAAKfm7yfS6kACsKYpCqyMQAMQLlXow9BAAdla6c69rUAC cVhunr6xQAGxT0bOR4xAADZpB02uKkABC4gIr8WzQAI1R7Tq3hpAAijS8UMQFT/+/ZWOlhc1 QAB32LsVTBNAAHwlKs7V+kABo5tT2BmqQACVuZVz+X5AAJBoSDJDJ0ABaO8FKi2aP/9Z6kde 7FJAAcl6cqoaw0AApOfwExcdQAHz9nUs0Vo//bFm3kZGjD/+oqCq4/ZBQABqF+COl5lAADVf hWxmgEABuJ+2s5lMQAINyqrDsKBAAQz1tnX2fUAA8dMr32E8QAEKYj3WoihAACj4nbi5VkAB E6GJSrzaQAHCGZBGYIZAArOlFNFp3EAA0QKjgncdQAFmfD+u3r0//Iaru9If3T/+zwF9ZGro QAAGLChypJZAAEJ7l7se5kAAwut0I7WdQAHya8XYLEpAAVgVlZtYVUACRkmac5BYP/9tIona zOI//dpUCRbRoEAARlrvJDcnQAAWlaHJcUlAAMMYOgG1+D//ZwI4psFWQAHXzsyIIi5AABjo 3iVNAUACkuXSZ6aeQACj0NmOEMZAASPOrSbfET//qZd+lRGEQAKfwWNb7gRAAsA5tfDmEj/+ cWuthqycQAHjXkOBIEo//7Epkeq2lEAAhCdFOjy4QADq0qi0W6VAAd4ZjBYSn0ABtbZk6hco QACiZGxp4xQ//5ME4eYaEkAAKVDtqVybP/8kXi108I1AAMf/NZYWvUAAfa/Ne9SQQADN6UbK NM9AANlNSIL39UAAes5LnfAkQADlnAsKpVI//pQcV0jJ1D/+29YvWRnsQAF4DiRa16RAAAA9 e0gl80ABtly1l53QQAGk8hbVhg1AAapniMcnkUACSYG4/mPJQADBS4W8/mhAAolIOX8BmEAA TpeAt8j3QAC0C8ZmCRtAAXmd1wGHikABfQNm6emKP/7MUW4f4DxAAX6MAU1PJj/+rE39q4qc P/6gnko/a5hAA3hijVusikACY6y1/lrUQAE2LwA3TKw//xLLyXmkMEAAKSnpfMfVQACJG/M8 xURAAK2rQ4jWJkAA6TW0i7dzQAEG8QrwF4xAAfi0uA5HOj//rxBKTl/qQADD7pXsluRAARgY MWHJPkABMl1WepS3QADn+8sjc29AAPY7JiOEikABCwfd274fP/7g0aaMkGtAAUuWj24tokAA hwlP+57pQACR8NdDAo5AAblb2mbAU0ABCD8pZMoFQAG/5VXelQhAATZfcLx65UAAcPvLecxy QABcW06Dh0Q//0U6pAbe+UAAx4wLMTiKQAD04lxhgKVAAJ85R3wYckAA3KI/eCInQAGRYgqC G2NAAMEQ8F5CBkAAl4hEoNncQAEFmzV8a9I//vVc0aYUPEADuCCo/UXDQACpHGbpVDRAA0Y4 XomeZ0AA5ByYWIxgQADaMzxT8L5AAHeZ7TN6DkACRLLr+OsiP/18THofvmZAAvIHGn2jYEAA lYl6y/kIP//xcZbT/fxAARjW5VFIvEAAstPDVW5aQAF2mrtScMtAATua+e0LeUAAse3zq0pQ P/72ZbgdAmM///mSYdjneT//3H05vuU6QAHgoRC3ny4//0gV74wa6kAAV7UKCWYaP/98nnyu c6tAAAcOoYQWwj/9o5WmEIU8QAGvcm82uYZAAoJqEAUrwkAB6PJ4l5R6QAEeyRuaqvJAAJZi FqeGQkABDKpJJwpiP///+OHheBo//oqpvAUbeEABzkXheVHSQAF21s1W7pxAAga/JEIHJUAD YeP94MzOQAIfxL2pgI4//2LzkaYCD0ACHeug72fNQAFE9/f+lpJAAcc0VhSxVj//XPL5udSG P/+f+ZlmEa5AAjkviVRyKkAAoLwOgBjDQAEE6qZwHiZAANAyHZB5zkAAJAc5iLziQADaz/fJ +NpAAPRl5+to7kAAxgy9fo0wP/9ES+TtC45AAaVwdqI0kUACKOj/fo7fQAEpqVf03cxAAKxg nCuBA0ABSfIlLNImQABub4PqOWJAAWT+55M3YEADG9VjrpRQP/2mivxhS2xAAYkWXhBQ/0AA CjLOyYgJP/+w2Ml68XI//oLgQeVCVkAB3UqiAeNvQAGyFQ9dGOI//4K2/6CZvEABYC+8TOYL QACr3/GbhgFAAEoqFhSfwEABfyPwtg2uQAGGNMlumoRAANhNn93KDEAAWHq+AuZMQAAJA13l rlRAAHhHYRP9uEABInaUbok9QAFUle/v/Zc//Zpyls8WjkABO/syAGGyP/8oPpZbF05AAZoN caIbyEAAaoBSxccOQAE/4pkmmadAAPabz+FkbUACOU/Kvs4OP/6zkcV0Jo5AAKhTxWpoTD// uU8RoR05QADsiPkXDyBAARzsvv0mbj/9wwBQYiE+QAEAOS3fQQs//pP9pbxyuEAAQ8iNlbMP P//FebT+YLNAAMZ2DUmLZT//nLmpLJTzP//Fje3l6MBAAE2SnyUAPkACl+vo9dgVP/3kff3N 8/5AAXdje3l/EUAAOPLVXdkYQAAMMuGYS/ZAAG0pHgJyT0ABEWPOtUAYQAE0QCNl+c5AAMnk JJy3okAA8IHk4ihmQAM+Ke7/mRY//mcoOzb9DEAClWQBHLGOP/+bi9AIejBAACZCdxzbQkAB LVxlC57cQAE1gOB6+VxAAZNdosD9nkAAZ0iQJlmQQAC44kIGEapAAVHTYicoOkABN98sUNXM QACpx0yRXVZAAH0zCyMUukACtrKlC5muP/3OKdR6D2w//tTVMOdEdD//Gwf/leyfQALG6b8J 8iRAAg5N1fbjmEAB9+7Oe08HQAJvU2ZhUJRAAWEWlRCs1EAB3FkxyG+PQAHgPm3SXsxAAB22 WpoKNUAA7LA7BA8eQACgE3/V2CxAAI82ICYus0AAqT/spzk0QAB8ikK1c+5AAUYccBbPiEAD BWojcsqAQAL7ZhuUAa4//bY30oIKmkAAemYokg7oP/2zQailLnFAAR14kZMGBUABTssPo0YU QAFNQUxOUyJAALJoHBwEekAA8P0A8xkEQAFGGSJdcoJAAGLS/8gwlEAAEPe9ELquQACoWxzs urZAAWCoH+4oW0AAM0QxxccNQADk+x6Oyg5AAWPuwPu3Yj//upDOddhvQABssdmr+gBAAob2 Na++bUAAqBq08MHuQAA6C8uSTvdAAIdlTWvCoEAAMiyUovC6QAE0CYTPALpAAO2+TRFlrkAB VhF0avObQADU/Z8rTVxAAVGAm7j2ekABCywXQkI3QAGpaChxM55AAd4vacWPjEAAS8wwiF6U QAIVaCGBbDxAAe8km/kMrEABDLC05k/5P/+CGzgh//ZAATyAvSQzqEAAI4lS/C4+QABsxlcp zQ8//9aF+27CMj//8mBS+DeSQAExadBbQAg//5Ttg/7iiEAA3gWLFAv4QABg7TG/5X5AANT/ 6KeUMkAAD8ZwHKNyQAEGWzRRUTZAAM0IBZrobEAAuo+vhO4wQAEchuR7aE9AAMxYzvysQUAB iWT3Pw5wQAIhPOdiCR5AAf9dz10b+EAAmBcPuwpsQABP53BWdQA//5sLEWpjK0AD7jVmqPGc QAMV4vdk9XVAApBxC9ic40ACjgVgb+6hQAGnH64uVN4//0Xi6vkG6kACa1r5QalcP/8Ks/Oh RV9AA8/JnAjlh0ADL2C0b6GLQAJ5qwhJ2PpAATD/Zks/MEACe9Anq/PnP//7TgIUpZBAAzaR TjJyYkABFCi0ALi3P/9VGkemdrRAAVX0DU3jv0AAQSayckTyQAENeA+qZTFAAYcTgFltukAC d4ytRHeIQAEqrDmRaZxAAS/adp+9z0ABLILtV0qEQAC4UoQabSZAAQQnbUyPJEAA5TDdt8II QABjspmpdtZAAIbsN9oE70ACVvyg/vuuP//8SHw6J5FAAN6ht0EgqkAAlUHegiXEP//rsACE 3bJAAEj5U+3a/kAAqSShEuR8QAEcww59FQhAAJ/wokKxfEAAXgCNLxEpQABaTlonXF5AAAIF BJHD0T/+n/ZjrYzcQAGeSLe7nnw//0nDchMI4kAAMUR4mNo4QAFu9OREFg5AAMntjlAb4EAA 96NKseRyQAAl86gInhlAAaLAS/+Aij//rNkUBTsMP///lYwh+bI//zFmbfQBqkAChKw8DAJ5 QAH5CBT8S5c//t1Ce3yVLj/+3UJ7fJUuQAJ1dOgX9zRAARfzWSCjNUAAC/WudSvIQAE/jc9z FwNAAWi8xBQRiUACpTGZ274hQACI/91t8zpAAWuhdbm8wT//qK/APSRCQAAVG+tcb/JAAL3U qpXQ9EAAG0iZ4wO4QAKLoobpdSRAATgQCh9jGEAB+2uFErMvQALN5niaiBxAAGT3FmtInkAA /FT5a5JNQACMVwodp59AAGymKbasxkAAkNl6cqtoQABA622cIzw//8POJQo/CEACffw78TW+ QABc9dRjmZ9AAXatJuONuEAAPLR7CiL1P/89622n2YNAASI0z6Q5cUAB9WOVupl1QAHXtQkF Y5A//+Qgg4VTE0AAaBiPVLZ6P/4fb922gZBAAkzIqSielT//F5+xE9dMQAFlkvYIzGBAALsQ /eELIkAAWaY4AR4sQAGiL+NfKexAAWFxLJP05EACTnHfz+gqP/7uliFoHpRAAUwzZramhz// 1wJA2NiOQABFKYMx+rlAAWxjBEa1IkAANiZVTDqDQAJThXMWJktAAfzubUk3FEAALc61Wb4+ QAB3rgosaJVAAMe+UvOsqUAAhjbUTgpQP/1341i4RVA//4/VNWqdTUABGk50KhndQAB5QMX0 MWRAANbvwxa0ZUABdwMVyB/yQAC2N2C3BghAAPnfmja/NkAC1Tff3lGEQAQYCheN5Xg//92r 9Y1Ngj//sO7yTUoCQAB9lYPLMO4///B1F71WRD//csE88B4uQAI8pcVxd7VAAkADlIzG8D/9 pE5DGlBSQABZf4C7EpRAAU1qKlxKwEAAjosJYbGiQABC9FgLDYZAALVE7tO6oUAAHR2d1wyf QABUHazjcyhAADBAfv8xOkADCrJBV/eAP/9zxlgtT98//4qmDzl0yEABdRYzjnZuP/9Ib9JE EaRAAahuHkMNUj//+wQnf6qlQAC8qWCYo0VAAD4mg1KaykAAsxHQ2y+KP//8Pj2Ia5lAAGw0 DrhNqEAAF7TGhyZdQACJsJR/pApAAJEUcQItO0ABD5mbwhxCQADjJqEkaclAAOqbfxK6nkAB fCm91zjgQAGZI1XjDqpAAQS8MQ/lA0AAeW/NOWkcP/+pgJZqBYFAAVKIVMltGkAA19m1X3N5 QAEUq5AzUVI//35JSdX1iEAAjJAYsEHtQAAtu8j0DVZAAKAk6DkaV0ABz2wQSVmqP/8k39Bz fcJAAIauaQB2I0ACX/TLRewDP/+3ex0C8pxAAEUwb2dsyEABiRF9g/6NQAEByZaeMaU//ez/ y1bsIEAA0amObqgaQADkLo+N3hVAAVX1DY9tCUAASCQUfItEQAFQyiFKU6I//8XROndiqkAA kDByoYQkQADmFNnAFwhAAWcAJMIAtEABlfQo12cYQAHW2GvrkSRAAlU+5pCtIUAAY/EKZ8ib QADFtajswEZAAOpdEbCRkEAByTAOqXjeQAVmzmLBAsBAB2xbY9ZRLUAIHfuBMhWnQAp+Ir31 9J9ABEWcurLWPkAEd0w87P2GQAVFNRVK1vxACOiE4S6OYEAIpwrOTLZEQAcidXcrx2BACVmK o090tEAPANPyYxOOQAN0XyAwVKpABIisNrhepkAHzyACKPYyQAcbbWQ/k4ZABNx60RYXk0AI /YWE7FtjQAqaFi6j0oRABLxQy94lWkAIpT44DMR6QAkbMSHmxvxACCJc70mN+EAH3dv+srA6 QAgLhf6dy1RAB40dsYHADkAF9792P94IQAbz9UdphX5ABbxDCd/Q2kAFeIjt4JusQAutdjRR JHRABnzWMY2erEAGxTfPUE46QAcRBGNXK0VACnNwi+5IgUAHcSmUyiOHQAZAX1nRpLlABjyD j9/0OkAGjvCiiG3yQAe6CFF0QCJABilK3IDe6EAHJ3mQd2B4QAgL0By0HjhABpYv5Zi6EUAH TAvfr7a0QAgqgbIO17lACWyrt5ZT0kAJwgPPqksdQAqOsHC02yJACrHTmhVDcEADBsrOKxgE QA5KZHbJqMdADxuBMiYkKEADdLy+UgYAQAMUalc5daRAA0hsvnkmCUAM4p92mKv6QAuiZeEF ci5ABVNTbvoozkAGiJm8qyxGQAbtWH1G2CxAB9Fx4IhYTEAEkIbQ6t/GQAWjnV153rdAB+Nd biLl5kAJVJIPlZl6QATSPYRcIJxABIxAMfSkTkAK3tSXs6WaQApLFowBYuNABVYomsE1ckAD tlJJ8JugQAU1YPD9mM1ACPTSHqquKUAFrXyTL4ulQAjLOUxpsrRACJemSbOm+EAF96YQvWqM QAUVUVZEe+JABgbzMvEHBEAHn8xN3+o9QAebi8I+ePFABk0iFtkpXEAHA0z52awsQAa7rBmU NQZAB31i4qyfFUANWKG0A60HQAQgUemzx7pACqXK/8XAHkAGtth2dTPUQAWLVVolKKJABtaL owxL60AFs20Oc5h2QAb43ueTF4BACJkkOWLMhEAGsCqe9XLsQAZtXUPdpshABzzZkLbkZUAG 2S+iThSaQAYGJ/1nUhdABd2FlEqKukAGGwG1NAZKQAgKL2QSYZhABJbA4hMtlUAFVkq6iE4G QAg3s3njGDZABHrEyIY9eEAEZaONVgh6QAoY/05HrNZAA90rUZFaF0AD7YzfbpKcQAZLBXoc bXtAB8RnqeoHh0AHOmuomt85QAexQ37h/hhABRP1UQn1kkAE8eqHJ9wQQAXgFpeFQoBABj+f vmpvZEAFVEMstrroQAWm2CE3mXVAB5yZ3ON45UAGRyVS/H1+QAh1yj9uz75AB6RIFJaC/UAE 2zq1mnZWQAWCW1NCBzJACIOk+HtJnEAFCai1orAFQAjsvAFCv+9ABuKhBA+EnEAGWqBG3gGO QAaQJdHLsVtAA3mzlVDA7EAD2ebzrZhoQAURwnKPfHhAB/HhnEJxnkAJULatSiC+QARVGPfS ZYBABAIMa2qmfkADg9EXyrK+QAfbGFDSyeVABBLuf0J6pEAG4XFuB5LWQAemlS5OM9pABuAM WIQC/kAH3pLm8KT/QAUpTJCgLZhABmcj+6DiSEAGSmws1xGfQAd8S2v35mhABCayOMDT20AE /21KpDWpQAUwvSleB6lABckJoUE980AIwe0ZjpTNQAZuxIqCX35ABxyTV4XLaEAF9gVmfzu6 QAdr3NNVadBABv1YZGBqtEAKq8SEJ69iQAwxfusgCixABr41zfeDYEALAi4/QEFiQAaxou7M bDZACmPe3oVXH0AFgGKRUb7BQATndnUhqtRABRr583hD/UAGXDT+JnmMQAhNqnUE7YhABUwM T/ovIkAG2pxXAZaaQAZuG7rcVUBABalKBXZu6kAFx6YqKOVkQAk6NOeVg8dABx6OUSjQtEAK PNvtbw8IQAovEkvhOHxACGZr1BawSkAGVn8fZI0JQAgG8nn8VEdABSftyFcWykAIte+7QkHA QAdE8NQ9o3pACAb/crZT7EAFsn2hDbanQAXKiDQdZ19ABpSqcGkx6EAIGMou86NMQAf5K/nS YahABRzAdDZgUkAJbcHuXVgQQAPZaMn7ZrJACbInCeEQD0AFq6Q4vt+EQAbuCnkAGWBABZt+ glAuKEAGTIa69f/AQAf2zldbJ1pAB2Y0g4znBEALQdBw6To2QAWezqfzIZBABaE7DUs0vkAG TZ4CCqTgQAWqEZN1EHJABZH3bePKT0AGBWovWsZPQAX7yWgfZsRABiaa2dfM/kAHpjus9Z/4 QAVY1370ItxAB6UDe1VB2kADcnUPRKAOQAOO4RU3IDdAAtDCmTMsU0AG91k/Vhg4QAsY6n2c bnJACNFZk2IDKEACx4+mvPHUQAM352sIvOdACRymJwBA1EAE1fggEiZBQALH6uwgOOdABO3O 1eqYhEAE83k+6baFQAgqTVnt33ZABpcsvAkJ7kAG3DfQqJ0oQAYtePIA4CNAB+5H4E8rDEAF RP7zuIVeQAXbjZVtmq1ACBJYsKty5EAHE/ipZUKyQAej+AzwNSdABuVxoRclm0AFSvass5QX QAf5KeKKoR5ABbP7Mw0lkEAHcUtXRyNhQAO6L/7c4+tACtF6uejtIUAFDqbv8RigQAeZKwIP iiRACF6nS2MV10AHHMcwzNsiQAaea5wYcNZABle0o55QF0AIne0Ctnl0QAaK8T0Ut+BABylh Se8OY0AHugNJfkugQANGXxQ1C7pAAhr8kywxHEACQ+90mYz2QAYW8t7KnM1AB4CvFg5lrEAG VpmwyHhfQAjICxCuEfBABXPTEccJmkAGnGBAWJfCQAlPS7JyuGBABn4n1/HTPUAJuOKXJAse QAh6Ahye64RACQW4TUa4tkAFOKLWrnjJQAOry4MZylBABcUlSRvvF0AGu01lvbyKQAUyAwaI 8CBABUjxt+B9MEAG1Ek2+fjnQAVwrFmxf09ABOJn3BLIYEAE9ZsNdfWDQAiSz4sm52pACAPC q3D/jUAIACa26I3oQANStDTtamhABuLLtXVp4kAGkvx1ZhGxQAWca9m3dLlABuwxB56H1EAE Zccdms1JQArsndNLAgBABa14I01luUAItY6cJhnAQAjkMa5KhbpACWHTL3gQUkAJseEks7qd QAdBGI9a0MNACOAp5XLCkUAJMt8SQ+aNQAVJnJca+6ZABu5C88fmrEAHbJIIbIbBQAWZE4zw 6HZABbqtypJdl0AH5oDYXvmGQAmODu4RCWJADgqcMDBrXEAIBkc9R5sCQAWDSelvLipAA/3B uI46+UAM/Lk2Iu7WQA03Ug5LSw9ACuWqzryn6kAJxwFbh2fiQAh2LAxZ8Y5ACSoo5dHKAkAG TPOSYG4aQAZZAVGrYKpAB5sq1sqIQUAI5mDX222uQAdMHSLVjptAB2GnUnNJeEAGBKMCBpeg QAaQzwQDfHpABdxDTQ74X0AFUahw1P/+QAY/iq7aEdNABaQ/ememx0AEbwz06wFsQAZJgQwB gN5AB12efQVzm0AFkhn6rzPAQAShaA9B8ZZACe8RgTeXekAFp4mkYrDWQAHfihQP4NxABzVP 6ZsoXkAHHjff+dzqQAcbNHfQdKtAB4V2KMhWNkAG5UtLHi2YQAXnYnT0g/tAB2yidRapiEAH QyTGIj8EQAZL1cX/4jJABnnBSWH3VUAKQY0zVISzQAi4xkvPjjZABrov3gmKIUAG1HErlUSk QAg+1695YLpABhUg8YRATkAINWMQaA80QAdwypTIsfNABkWAVbq7wEAHAq7IDTueQAau6Yud adpABqcU8ZbBmkAFegkPLX+vQAWVuND7EN5ABukxb8Ufg0AHLPW9IVYyQAjg9O0lFcBABer+ KBujIEAIpI+VF+GXQAZQYRucGVZABTMwkP/vj0AGqeiclS7gQAS5xoLBDKdABlVt6ZXFyEAG 0fAARydGQAZrVb2rZaZACP1bpjg2lkAIX6FVXcNPQAfwzPa2IOpABqkugqGY4EAGszdt8oj0 QAltp4b5RtNABP7KoaU6QEADyBIsajsKQAomHF/zp1ZABA6AAtxJK0AHJAwEMTIAQAbPiIx+ D3xACQFCI3C4ikAFcNSB7Y+2QAjcUGE1pbZABmBcDpy6bUAGaMInvMToQAWyof7xFGZAB4BB bP5HgkALCZ9ULultQAkKjmPFZk5ABR1lWWhrZEAEffQUOBHrQAqDlXRXKINADAUA6DNEr0AF HVziXYdwQAVY+h7ur5xACEAqDOBLYEAGsRXGrV3uQAYJWRq1mCRABQLpNkj3KkAFcVN1SdRC QAlOk7r2lyBACDef4G7CmkAE3/qG8hIOQAiEGoGcGmVAB8Fh6VIyvEAGEHCnojXYQAcjqvGO 265ABy+8TmQbfkAGsAVyF7LuQAZlevs/yihABUmBWKpc+kAHziPd6CwMQAb/1u2LnHJABhqX Jcij7EAFjsUCUpDeQAXhWjaUhHdAB1u4JS02wkAISa5VI6MXQAXCqWAtX4dAB+qk/enBGkAJ 3iUAr+KdQAfAyy4BD8BABqWAlyQIXEAG9arVitESQAZl5SMF6xpABco9oEXu4kAHVxZDva50 QAMZLD3eqnhABm9WEnlRRkAEXIRKIUZdQAX37BRkwKRAB8AZ4nlAb0AH5KfA/tt3QAd3Ks2D E25AAzFu9UGw/EAFgpKQkIYiQAgfj33qH+xABNvWNvudAEAFZ3h8wCIjQAfN9uoAD/FABTgT Ela6oEAGhrwLtQFSQAZ20ls+jStABsac1CqI4kAHEI2LhM74QAazJ/FkFkFABe+gjGY7FEAH rT6+NHR6QASB+2BR5wZABIKRTkR7/UAFh5XF0dWkQAYcZUNNxGZABkbPaUxf/EAFhT1swEW1 QAVTQa83eShACmj/wl/ff0AFlAk8NWzUQAO3tAktS99ACQLf2p3syUAI1M2Nw9ggQAbnPJj5 2R5ABywbGJ1AxkAGJudwuJRiQAfdEHTl1A5ACB+j3xY16kAJFCjpfecmQApU1ai7Y1xABm5m dKApekAJ7uJWLO4KQAXy1GOjWKhACSs59uR6NEAE17TfAO+4QAYsGbNOX9xABmdDCNwCRkAK 866sHSP0QAbprYCrEjVABwQPYWhgyEAFqyJvgyk1QAa/rtfzm9xACG7DiD7ipkAHS8JqOi2s QAV9Cp9BBmJABlgL9noLLEAIt4s7eU0NQAW0M+BHBrZAB4jSgl5KWkAFmmoYt8wUQAa/5Hyf f1xABPRWF3nPD0AEdao3A/ahQAuC7fOiquJABuTHSOfpxEADxTX92I8MQAb5QXNWHGBAB9Jf Z6FGWEAGA0IZVLbUQAiED8rDttxACZ++nPaNNUAEjgMDLnrPQAco6tnlghxACMC6BfiQKEAH +qkl5D2aQAlWW+SRhaVADGPSUENQXkAGOHjZAsDlQAohsmlb3t5ABVTeSPkWtkALMPdgEexG QAo4NzPH62BAB0IHMhgmiUAGsoYSCs/AQAZK4iL7sJBABl6wMMuPukAEuH8+0s2LQAN+qoC5 4xxACCox5rUaYEAFvdwzmsvSQAbJM0UM/sFABepDoVjxlEAGzizPQW84QAcOn24sfHJAA7mj Hb86REALSoETmXoBQAlDNl9kntxABY/ccm60GUAHXqStCslgQAUte0m2vzNACiqRlsNyhUAK CBZAVbkAQAUdJlOvepNAB1AYLH+8bEAPG4EyJiQoQAuXoc/0zlxAC2W9PsVWzUAEl2Ru+vic QAT+emdZvBxABBF6tfAxhEAEZDH14mskQAPxBRywEk5ABpzA7VwTXUAIAy7YImvaQAi3s2P0 B1hACcgYtm41/kAKEHZ64HJfQAVSa/bic1VABpSH9V2B7EAInCv9bMvQQAv3PQqVr4FABKlq bzFWPUALU2HioaifQAhmG5NOEH9ACXLSBriD+EADWObKUmbUQAsRMvUkI/9ABIBn+qBKzkAF G6u40tCAQAd7RRVV2a5AB9yW5+bxw0AGH6pSCwg6QAXOkXJDEHFABwByHu42TEAG4BQXLTHX QAVedglFFd5AA4zK0LbG80ALjSXK5AF1QAdY82YkGyZAB4PCWMeOREALuNdi8zD4QAXILswh ajpABdQ9y2usHUAG/jvPfciuQAaWaeUB5zhABqpHDGwIT0AE724Baa/6QArK9UWuBIhABKaS jJB/bEAKlFm0l34EQASAYQZSiFBAB8eHOXtAQEAHj6xR0s1IQAbZ7YpSvzxABbTg+JKRWkAE /yySZeAaQAb+KKVyy89ABw1pXdf+f0AFx7okcEjEQAaacBN1t0hABW2SLqoDfUAH7kRHMJY4 QAbFD8dIuYpACDyd6YMxREAJEoUOREtPQAhQSdxks4dABovMnCC5REAE9vgEWYfAQAawlFp3 rjBACOAAzc9+SUAG0ci6s2vkQAWeCALT6q9ABu2WrjZCnkAF+QBun460QAZUjWjd7upAAw+D Q7j/gEAJz1jA7WV9QAtvPNbtZAhACCVXdWOjbkAHxnkjuI46QAdHIdzFslJABnuiJFtArkAH 7XpNq8LOQAc84vTNhp5ABfFIFvQKKkAJrO+LBtlKQAk3zAxxHKBAB19uaI886kAGSY5BQajw QAXHqmUVxc1ABj8RLB1VlEAGmazlGMJ0QAaNW03cNklACKDGwAVMp0AFUMZ1I3cGQATr0s4M aHNAA22gKYZm20AIDhlLVsNWQAcUZlkM3CRABjvO93EqakAJG2keXfBCQAj84ez2sqBABnDT 34Vl0kAGTWGLhHdZQAZ0jx6uc35AB0O14hQS30AKAefEFZsJQAVNpdCXGQRABhZbF8FQY0AF GT6V6V+QQAf/D+NPWW1ACO3nQW5aGEAFdQF92L0hQAd2vlzu3QJABnsXIH5GD0ANpmeXl5FF QAyJgWetpTRABfMQTyfM6EAJnJqiS+tIQASDk0vDEL9ABYFuzDmquEADnAjeLiyYQAhBmJ2c KM5ABaRWrHkgjEAFsQ0b19tuQAeSmjPbVY5ABbUr/5+Ij0AF6GmiVUWWQAZQhxDqVCpAA3uu gdNIYEAEDphY0hdeQAi8apU2+/JAA+AgfhoxcUAKiRnb0XhdQAbsAT7iHDpAB3jVAWjzEkAH LQBhGcBoQAZ3sqFD3fBACIeDyWvWu0AF+1nYnYUfQAbtoTAUYIJABsWq4Q1vaEAG/Jxzp+AG QAaWymDaLPZACUcC2KNeHkAFBO5p4hJCQAZxft8W/llACm4wL4/crUAE1IUUJgAKQAnUDv0I akpAB2pSWpw/HkAFN8ZJSaAQQAPZH5Y3cvxAC9Cg8nQ8U0AL3RTvtiyeQAeecyP3PWpABaDP Zrbc2EAFR+gOGyruQAXHv3RtJlxABBfWtOfchkAFoEdVTx+EQAevAlh0XeRABSG6DEOtFkAF 4b9nY9NVQAihaDFlYRpABoLyFdWCSEAHoICEw4tfQAiMygnyb9RABnZdxScTdkAGxDkoaeCO QAcjbxIA7wRACbkMvVDkpEAFmHnLzV0oQAdt95fH+xNACK67xIwDoEALIMdeTApsQAXTIvX8 wgpACRRlAce96kAGO49SHI7RQAYTgQuAjs5ACObvfluJRkAFEMqzsnEmQAizpwAYPi9ABcHn w8+4xEAKWcouXAz8QAnUZ4SGLXZABrhjb2PMY0AGyf5YCrRBQAbZs3WdK6NABGW/OAhQ+kAF QO6gBmvmQAewbd+64qtAB8y8y+4omEAGOYMvJpNeQAZgX3dc8xRAB6RSQLQc9kAF1nT3s3lc QAhWEXlEXxhAB2EhihnK9kAHlZGN0h4pQAbXLZarPqRAB9h/YuKJUEALxI51dh04QA19YZpg o4tABRVGZYU2IUAGG7Sf5yQeQAgPb+l2IzVACMZ01PhgLEAHHpHq67CMQAM8mSo33dhABHG6 +trJEUAFN3piPPmMQAhVPZypD8xABgG/WPjgakAGIvKnc46sQAY6MQ5ZGvFAA6s1tEKhBkAK xIkCaSIMQAV/Nh133qRABXgOUCYAyUAJPEcnOnx2QAbetu7jpGRACV8rbnEZkUAI6V/MFUff QAr63m53/PZABH2mWt2uLUAEtmHqetx0QAapl71EcJZABZO+io8IlEAGgXqzRKvQQAZCEc1q OABAB7ts+pHJEEAGaZ4Ib2yzQAbn3YSxHqBABj7f0Lie4kAHIS7Vjo+aQAOa43rVgcFABgil ei/c40AJm/M+N1c3QAd9hsR5XhBAB8iEc+6WWEAF69Gu9B9QQAYeictPTjBACrf46g2L+kAD 6C4yygJeQARu8DpsQnNACtgaNPcl+EAIIQljkOKBQASeK0+bE+VABrytkozKrkAEdwKScs4m QAt49rqo2OVABNeIhBUxqEAME0YEZGjiQAjPworux1JABxL8IujBCkAG5BOuA7KMQAb1p6Ta lEFABk6ZTlYldUAGHFDVpc/mQAYuS7c59MFABrWrVUtUckAGtFbzIup0QAjloWF2+FhABeaE GJrYRkAJpaHthmRqQAh7J1uRthRABGyKLtx9LkAIkaHYyZiOQAYjJsXeaWpAB38v50SoBkAI GIGc4hILQAmwkmk2l/JABzg9qqmj3EAGA0PGo52eQAxSTGmHFG1ACyjLHuWq4kAGEW3Vg/Dg QAfogLTBOCtAB1ZoXY39EEAHLKXwTcTyQAe4B6OPdKxABoNXYHvGL0AC6vFMuRTSQAgrVcew d0RADJ3dWTAI8kAFtIsvMVnEQAVwTSh/a+VACBGFd8iVvEAKRktO3HtBQAkGjj2n/dRABkY2 kC0SDkAHTU1U/MD8QAei7Mq9Bv1AB3/wCy9xdkAFi52Sq5OlQAiHP/1LyXhABVKJhpivBEAI prtxCwyCQAU6cE0u/KlAA0F3iPmGTEAN1OPCgWKcQAUTtqo5MXRABS1oVXKQxUAIoq2zH2aU QAWM4TYTdndABV30HNjUXkAFQfeRXATeQAjkdJPFwMJABhNxAICguEAHzwGpRdX+QAVktUdJ 0upABUvw+LBrZUAFOtb5Ix9yQAqf5mvsW91ACGnzHuP9HEAGrTGyv3r7QAa4X9sWqNBABWmS 3CVFRkAIiFkNtZe0QAp4wUFbVYVABjD3kcQo1UAETB/by/4IQAVGkVDVnnlABUuTtYO7q0AK HVN239kQQAhZ9Hk3WtpABXq6E5nK0UAGD4jZPnjAQAQDsxia7ypABHNgTuBmLEAE/oXFGy/s QAaKwleFXdpAB2Q6D5//lkAJtZ+xaic2QAm1n7FqJzZABazb8/KUGEAIj5qS/j4YQAVdOR/K dAxABk72+OUm3kAF7PexLJOkQAnqlomp4qNABaPwgIOqsEAId2pT9805QAoznr9zPRpABALn hnJ0QkAJ9dLjDHzZQA0Y1nJ4Ul9AB5TqVOpkNkAIRSiqPM1sQAfCrj0FxFpAC+nxJlahm0AF SmljHZaxQAgiuixGELRABFcIUG+qOkAF85SszNcqQAZ/D4NmgXpAB1ta1ZTSjUAKT+85c6kb QAWaeVt/xo5ABSBvDJwdvEAOmSuOePlxQAzKQYLhcMRABBhtNE74TUAC4vS1SpxOQAyzZcl2 3nRACgP03NEv5EAF8LZJUgSMQAP9iJUrBJpAB+VN6G/dbEAH5Our8nIQQAhsAiQuP4NABY2v jY/MB0AFB/uol72DQAki17NcYApABcHQLsuVkUAJbSqaLWLEQAs5Td2eCGZAAxIT9fd+OkAG ms2zf+KYQAcppIQUiyhACgxBZGYnDkAGtUxxgxmyQASE2vjOrg9AChS1tyuGTUAIQEUf45Ve QATtkGbOGw5ACK5uiS4b5EAGI21V/yp3QAbkF8tnRkdACMrZZPfzgEAKvjUkhQ/LQAb9gg5P DUpABZ49/o0mGkAFFCn5UqgjQApezSH6gGxABJUCNmjaCEAGnJ5GPnmBQAkAg0mtAYxADHYx hoUFy0AFAgjGQZPiQAZJWG/aSZtACSNjH8b3Y0AJDqG2vltYQATX0cAcFZlACaBLNO2hXEAJ YkQIyj0EQATaTfjqo9pABc1ztoKvV0AESpmA/HKlQAm3w452G/ZABarYf56HAUAIBPqYPebZ QAb/J4KXL6RABepQJTuXlEAHdumebZLyQAWuc7V05ZZABlIGLkuQVEAEtRcytiOrQAlCx7OH hfZACO+exmfgCUAE817Ow21vQAj0IXwFcrhABQpzlyGESkAGyGADyeN3QAZlyhn6aEpABXwx nGc7UEAH2UsWV7hmQAYv8UVDjCBABb1wJ++yFkAF+OPe+Uh2QAZbBTLR7V5ABN8NUMxtSkAJ QqQLVQQUQAlwARhjEmBABUu4nnmf0EAFJvk/tJtOQAWn/NCC10hACEXe10OEcEAHkOBJsaaU QALqU7QMagtABcIGOq16T0AFlfBiZdZaQAkGEaKnUKhAB+k3fUg/CkAF15186125QAVw36D5 ncZADEGLvk1NHkAGEMviA1CwQAgQwTWSSd5ABR39FhgmOkAIZ4AbRB/IQAtcdSGk9WdAA9k5 39yPgUAGF2ybK7zwQATF3QTEj3BAAlsiXsA01EAEIPBURxesQAwiFsbGnO1ABPtYIr7pMkAI stu1daUlQAhvup3NYM1ABQUj6l0UCEAGoKnSW5X6QAkDm2PBNMhACCFnI09bPUAIOihBqMCu QAj3Vpi9MFZABcpgdYqdoEAHd+dsBo4fQAaD7ky0RVRACDGJlhv/2kAK67cyI2HDQAkRIS7Z PclABsn1UyHJ0kAKET+4+EZkQAospTE7CgxACLq1YTwixEAE8/SbY7WeQAiOGhTKfCpABbZc OWKGFEAFAv4b3fxcQAJyiJf6us5ADL5h61fnwkAOkbDrI6ULQApmAC/ybLhABttv78pgg0AJ UhxHapL5QAbMsPtPfEVABtixjcVK6kAIsAYlPZVKQAaCN0+ObdlABkFpHOARhEAGAi3TGOCE QAXWqUCaC/pACkte3FJJjkAIxpw6eyb2QAf8AbOcJ/dAB2APQhU110AHH9oEuOz4QAdfmXr4 4vpACUuzpcXYD0AIXiJT2diSQAcePyiQwQJAB2RJnmsQlkAGCXavQylhQAdYDvrB7GhACUv4 27Ca5kAI/SS0Jl9TQAl8lInZvSBABfRSfgR5IEAMbrkfIMVfQAZtok4hHKpABeOM8+SaREAK sMF9qxyMQAVKHNoni0RABbmru/+CCEAE8T/LY0jTQAX1ayDHjTZABEg+uYKRQ0AFJDRiupo/ QAr64j9XMZlABBL0CXJvWEADxRn1+vCMQAm55ZkF4zBACmPpjGu/TEAKoVcMWlmpQAT5N/fR OkxABkO8YYGuzkAIzezsJI3SQAgP+YAkg9ZAB+BfLgBU7kAGeS04hLtKQAxM8jgTgZBACmoF LCzviEAGMchPKEO7QAbuA7p371JACTZ3YIKPDkAJtA7pxYAOQAWElzqSdGRABXF/gMV1tEAJ o9RowGcdQAyHSKTY0AdACumTa7joz0AL2ULTi+ceQAaxg2EaxgxABbWCPiU2XkAGAdAFrDwM QAj+TRKWGKBAB/pBdV0yT0AJ7hNz4H0EQAqGoyb/3gRACW23QlndRUAHIgki0Mm5QAcvWx3l 8lZABuT7yYgE/kAFTvxv/mD2QAXGfNJvYgFACpG5IGVabEAExZDVw13qQAi1F7LNkqBAB11c r45/KEAGB+gXpcv8QAfenEZUzC9ABxED4SpGPUAHozTvyBZKQAbS/4WSh8ZAB7W7kPB0s0AK E1SiieGlQAfW3PQzU6ZACfvbADQTNUAJLZkOOvZlQAcrAri3BvRABU7eUY2P0kALdQUd9c3I QAlTQQqwOWBABV4vAA5V5kAKe48mPQXuQAsTrAulHYxABQoZXHNHbUAKyJYwQY/QQArNzVZ1 inpABubU4sV+ukAJZX/blIP2QAbVNuSksYVABkBdQIxn6kAHw40fYULKQAieHR7OKl1ACApZ plXXQEAOOgHLmuXRQAcbVKxb3ktAC0jx/UCvcEAH8YepJhkMQAZ/6tXfLyZABYiYafITgEAG PJaoa6M8QAnNskGL/bRACNBJB0Jx8kAHIj96K5WyQAljb5URWa9ABma5MPMqGEAJJ4yu7qH8 QAl6JASzNDpAB9TBWLutpEANOaf28+0kQBDQMBUXbt5AC3/TSITMI0AGNW4tD8yRQAdSl+qC UQdACUt1YJdghkAK7bgl+HneQAxCaO4xPq1AB3b7/TQKbkAJlPHLGyzDQAbz45lkgZRACN6w E8xF90AG8FMkvuoSQAWmaWWVZTtAB/YggEnpw0AG4EqHQeBUQAlbWeo7B9VABvarez37XkAL YJHtecafQApy4HJqE2VACd6wzNqDTkAIhN0iY+4gQAcnDM/VDclABqax6N2oPEAGtJIbuvwe QAdOXg+wsdtACybuQK3K6kAGXPiU6a8XQAZczyHqXmZABuQzXdM/3UAGz7wapxGkQAaGaAg8 F6FABm/xFiESy0AI8buV8qSTQAeEvPxJY75ACHkfPSuMrEAJEZpFiq4KQAjI/qWsftRABmmm dSyZ3UAIZnz37mBKQAnSbVT8q9RABdf0NoGZmkAHKdXK5o6MQAsi9y/xfhhACI/WbVky00AJ TrDZCyBKQATBVJnm88RABWs9+dIP+UAFHDIZS914QAixVer1PElAB8aVNjqexUAJoTdfJPm2 QAYR0WjqO1ZAB0Tri4F+3EAGEFfsuqgtQAeZrowTbjVACwjCFmBLlEAHsG1dARqxQAYd5NRb TYdABUeN3dOjNkAMLk1okJPsQASwWrGhCDRADO412idjE0AEwFxTLekWQAbtMRCyY/ZABZjY Phoe9EAJkOtBxqT6QAjVVLZcTM1ABqzsO7AblkAIgFMefBm7QAgwcfqqK2hACEHB+wwXtUAH czuVVI3bQAeb2tYzWLdACveHVvIycEAHcBk/1XRaQAcMJJByvtBABzIhZI1jUkALTYMBdlsJ QAesAJXHTnVACY9Ne2qC6EAHSLLAGlEDQAtW4Kl6NRxACyr0mrJCCEAMr8Uz03ivQAbTwvWA ku5ABixyCOShaUAFpl3es27aQAzdjHUM5N1AC2WLj5nm4kAERDFzxbMiQAsfDwKZDSBADsUo 9vxnGkAI+eGfEzYMQAjXFbptNClABapHpbB74EAKT9ISNcGYQAsMWINqxzBABd/MrRli+EAL BTgj7p57QAv4ktN7nFJABv7HKLNUikAGZowMa3WEQAZZeVQJIr5AB1b8ofgcwEAGq/dFMqmT QAiw5Ral7gFAB3L6/6HMLkAJEWEkLiwiQAcgKA3vvo5ACqyhlUmIEUAFL8UwmCmOQAj6wX27 UiJABx6P1HNxH0AFhRPmsQP0QAbkjabN7XxACMT8fvO7ikAJJIYgn9HGQAbHJkTzghxABwie EooSxEAF3o8XyfmQQAURhdiz37xADv9R3CzcL0AQ0DAVF27eQBB5A+EYUglADX4udWMipEAF IcKpfDnwQAl9uQ/vg8FABXWJg7aOPkAK5+znGSWAQAXSaSfyGQRACGwiV5U00kAIg6F3h5X5 QAUQZbhU7jpABkVS5lR8LkAI8N5w/vS/QAgQhMWA7FtAC2/EZGFlKEAKsIaeG//IQAbAOE3N YtxACNmZpWFx6EAIOg8m7C2WQAfIWG0yIhRACg0v12t5OkAJNy2cj6V7QAiEoSOEcfRABuBC jLxxvEAJ/z2iLMDbQAt6XyCuyShAC6+Ya0ugPkAGFRc5jZWmQAZk54E8RixAB8kAk1SEiEAI rpCgjv3ZQAhtHBBuXfZABufBAc5C/kAGtpQPfU7MQAkds9QFCCxABQOaZTjpoEAE3IZDIC9H QAU8HNszzJ5ACYIoSD7SskAG2vO/UlxYQAY17zaslY9ACoCqcgNoSkAHyKJvCMniQAXT/ypk TAVAB5IF+5DIEEALlrwZyqZoQAVpFXIOz3pABHG9ozln2EACuapk4KEGQAVhbX6JUs5ACoAu Ycb2A0AM/6ybDgS3QANqf3cfb4pAA4k/BHfRvkADwNP2K6xfQAUvS5/6EBZABX5yXlF9fEAF aHh0feaQQAdq1ikuqZlAB0ilNndA2UAIDBPzdX4RQAX6r6C8cb1ABpXbxKvSfkAGhYX5NHgM QAf/vA045RxACNbyl2TvuEAHzjwOiQjyQAg3MvC1mjZAB1Ap/LxbbEAJYRXRa9eoQAtEdBOq B+pACT8Ujyw5fkAGbeRrxEueQAht+pWMFAxACXZsiHkPFUAHAaJaSjETQAa0Vbra/FpAEC9b UDjHIkAGzSKI1zO+QAcMTkFyINlABny85QV2pEAIwsAxbsO0QAiopO1oTsRAB32zFLpVCUAI odY8kdXcQAcJPSQNH0hACTXC9I+CzEAGe9kihyy+QAaXo9BFw55ABRiCLszBUkAHXCREoijE QAgBMzC9B2pAB76DTHTCEUAHx2DTebdiQAaixHtJVfhABrTiyOykkkAJEYX0eEXlQAb9nLe2 lSJAByBSlKAj9EAHHxn8Gk3YQAinLD1RbpZACiKWrFF3KEAHTkk6ipJYQAbG2G9xVnBABcAg 8e4dFEAH9RViLlXeQAoZkQk9TQpABpkt1bScvkAI6alqkEOAQAdyuKH96DhACFuT4zjoNkAK v12/vYhbQAekpv6/hxBABp0Ptjd92kAKGBgVoy2gQAyhvje069JAByZocTGQ1EAHw8zavn6y QAdnScJ2va9ABKZbEVKT7UAI1I5u/YXGQAtqnCJDm5RABTeRgeNTbkALQAi4bBIUQAgtb0tB HeZACb66pY/OMkAGQeFFX5HsQAmJ+5UOEFRAB2a2qFNWTEAMODOD7miYQAaHyGzClWZACy8K p90CzUAGXETHL2iKQATfag/ZqgFACJXFYqVbZkAHz/e9FVOfQAxX1FZmm6lABNqw+MwaakAF rNAqdGJiQAjCDs9yZYdACFTFnhWhJkAGJmEPr6OfQAoscFaiYp5ACn/opwyh8EAL4Mv4M0wh QAeWYanarGBACV6pbZYQuEAIDiPGuzoDQAjtdQknL7tAB3f2gt1R+kAJaV+13Wr4QAZ6PM3t ppRABrq/SMxjXkAF0OdTtmqvQAxjFK2hPuxADJRCggQgR0AOY6AEwccIQAa3GaL0ntBAB0uL Z9IcLkAGv24Tu0PSQApZ1CYA6/5AB7IEV2Q3REAH7VjorM7UQAecPE+j5iBACG5kRHvd6EAH JoTjiheUQAYPg+8z6p9ABv20+/IK4EAIWo8u2h8XQAZvQOIC1+RACYpzH2DPfkAHhYeQMqb+ QAef6Q+NyWxAC+iFg/yMr0AID9FTLKDYQApDNG/aiDRACTF2iukbZkAGhTXiVTSAQAk4i9Cx 2ahABeLMEAekIUANTynd0z4gQAqg2IcDisZABlviMCcFxEAK0pUwUF7AQAnDlhZQcFlACKtU hMJ00kALF2nNPeHBQAn5zsCKpJBABq8F1w969EAHLGpCtr3eQAYRt6ahlxlACkttEp/q7UAL tiarip3sQATUV2XOEntAENAwFRdu3kAJIKxbLs+QQAgOTPyxcExAB+KnjiWBj0AH3GSRsuqk QAe4jSBJboRACSGk2zYBfUAGY87NuZqYQAdSlnhoBm1ACnK2c2GNlEAEesM2bpKdQAUQZOuS xhRABUjsuxDqQEAHfhd++ZaYQAhSH7Z+TmJACLmUnTgANkAJ9NixhbOCQAaB72nV+uZABgJ4 07nhhkAF/lfXY/cmQAVsmoz0GnpACHBLY+/zCEAF/Am1N2DyQAov0IA/o8hABvZ2/fpzBEAH DRTYKuYIQASCrbR7q7RABtrFu4qBBkAGwvKKqZdcQAnbuGL96nFAB0159fQnakAGGV5mZW3M QAaiqSsT0mhACadLM7AM8kAHLx2UYwQ5QAcLKZUm9PZABzTYP+19cEAJqmjmBtoCQAr+SIDA 48BACEgu+JLuXEAJHpZOIQyYQAnqjzvQEJtABaHf7paSYkAGMEpGQgpsQA/sdDu2hPRACY52 Vtpp5UAHq7YlhuwEQAh0UEDf0/hABUwuowUegkAE0u4Tea0aQAkHe3/4RhpAByq4f1+LsEAF /WALvxMSQAgCoGdKDxRABPAH3TgU/EADgZoTwXskQAbiDFpwofRABT0himYDuUAINTVZhK42 QAui5ydw9LlACEZG5rrHGEAH3Dkjah64QAeSszTX+FBACEGhD0+ZUEAHNI2UIW/cQAoH5tDf wEpADHrIGGxA1kAHQN7PdUyKQApb9W39nOhAB4Brz2Fo2EAIG2vkNY14QAgsHVoeGsBACing QYZU+kAKuzy3QHEEQAbFqDpFua5ABQRcX/nd1kAHusXJ7kIKQAbgDic3hwBAC9HoAjEq/kAE MnlBKqlSQAQeyBTzgg5AC5DV30Xx0kAGXzXv6nVIQAWrvYP+JoNABRBvc/brCkAEzuveHO0l QAnb4+lPGxpACeEMOEzDDUALGzVWiIk9QAvwca3z5zJADADre8RzNkAJqLQ9lxiyQAb9gl4U i3RABavGDnVW8kAFMugGo8sSQAUAwVxE5QZACjeDVaVREUAGuQ3Z1PjeQAjyTV6NB4ZAB4r1 jQsv4EAJgS4QyOVCQAasbfzxx4VABQ1/4luyTEAD6rmqxmpOQAwJflusCCxAA6cS0U6M8EAI 9FW+crXmQAfhwJGTiOhABvVsyQ1v7kAHJQQlYdPNQAcVLzrQ/8ZAB2EQYuXxg0AKgSMiN82T QAYTYtI4DV5ADCSyZVgvV0APj7BNZeFSQAL/wrr8vhpABWgS329HEkAForNPWzZQQAdbp5Om y3ZACbezLZXj6kAITTWN+EzAQAckNuLV9iNAB4iq/NGyf0AHuM0HvGCOQAiPT6wDuF5ABtPn vtueH0AJvUzit8lpQAVs9QvjnFdACcGIFP1OuEAFQmuM0/rQQAZ2vQaKd/5ABlMeNlMbJkAK kqpHPbwGQAfGPYfQKK5AB4GnSBmilUAHWeH4GoVkQAeeOPp6lApAB5ktMJSeDEAHBIhIy0xg QAcnxpQj3MtABsfhWe2IrkAG/Wmh1whgQAcBhcTITyZABUrlSDCyC0AHEAs8jzC2QAncY2B8 fsNABuq/d1GluUAEm0ffQNM+QAdbja3DGFBACSLkMTlZfkAH3gtmLLn9QAj8kgpRwBBAB0Vh 458f6kAN0VWjQFjOQAPB2vl2BpxABloJdEJlNEAFw3DhFVXiQAZo3NBnNDZABbM6ayfXykAN JU8HcZyaQAT6nrIJBGZACFBI3zayRkAIoBMKBXiQQAVDALRiC2JABfWIk8VvcEAGK+99PHm1 QAZXo792Dn5ACXxKGRdKNUALW66FiBgyQAl+IKAdycpACdk3nL+w5EAE9y/O+7hGQAvDtamG Js5ACthhB+V9BUAN8V5eG9fzQAecfSfZ6q5AC4sNrvP8RUAG4OvjGLNBQAWr3dJZR09ABivJ 8J859EAH/nifnK1EQAhqBmkIesZAByGZK3XhFEAIm/pME13XQAjarxtOyOpAB6ucNSVBnkAH mxMItkpXQAi6JiTfEWJABlSR7EW1r0AF7lol5dkmQAkKiVCMQftABsiIMgUl8kAIV5jSVi2e QAPcmsBui/BAB0THp5qKiEAJ0DchQ4SCQAS4U6ZI6yRACvDyiYSvjkAJj4L4lh/sQAoyp0ew m6JAByMcFyHjpEAIFQjUhjVOQAepFBt+bcpABqD4lH0WbEAKn/vHmba0QAbLsfsk4jlABlWO BJK8tkAJ3SIbq+9GQAj4wmH08rBABxk/XFDUoEALD/u57x8rQAUL6/HrvfxABiKyUaQKUEAL Abpf6sFFQAZr+GudRqRACR4/5Pph6EAHyqeJdMmIQArEF4Gg9fJABczzLPmsaEAF/vVe3yIG QAmfcrQFQzRABlI0ylSjxEAGfR6OVHO5QAk+UbgHLBBAB929+kS2CUAEjDJ7OhvBQAj6pc/m PpVABp9RuHngGEAIGf6lai2LQAlTxyNAX9hACgdJZwfsxEAFFUvvpDGHQAm2oZ8muZ5ABhe6 /qP9UEAJAKw32kHWQAd6KECFiz5ACTwVEFwp2UAK9DeFGqL9QAdgijFRl9hABYKWSd4SkkAL qS5e2MSmQAjOAPtDKDFAB0diIZYjOEAHf9qJ195MQAa7JrDrDZhACphhzcvBkEAILyrsB69n QAjWynt06LFACLOeqQxvLkAIDFfP0SMkQAf/G/mT6y5ABvKsZ8VPhkAGgeEs5NRxQAU5K087 m+xAB0ddFz13EkAJisyBEeKtQAd7wkziUnBACAPIC540KEAGii+yu1hVQAm+dwOU8rdABb7f 7Fm+rkAIFNP9QmdHQAVM0MxPf1xABogAqZbYAkAGZh/Hyhs8QAiYiOEWIjRABu7+vPVjBkAK rQk+9fzUQAkt/yaxQxpABxipbyHh1EAHJwZ4MDCiQAcBzKHgZDBABvX2uWI15EAHZd9qq/xg QAbxrrLfJ0JABcDf6hlHAEAG4wOZN0mHQAehQhuAcdpABpOhTiz5RkAJvYk4z3nMQAefF7ae RxRAB29qiaXnWkAH1k4TBgroQAdBYuUwIM1ABnvAkctBVEAFtvCuYVooQAWqNcUXqHpACecL lYcgJkAIvgjPx8JsQAmBzxeZ1TRABpZ9TMJ3CUAILexGC8SSQAftRi0aUllAB7yt6eputEAM Ek5aDx7pQAQ+wiW2qmJAB4IAJWT2vkAHiqqrgzAvQAmzPbhEHw5ACEpCHXGUzkAJyK3hC7hE QAXN77ef1fdABQfp+GVD50AIuC4TVuH8QAj3kCuhdiBAB0NK/6zfMkAJACSDsuyqQAqBpjws QCpACrDySz0hkkAFyb9H6hz4QAfQXqTh9VlACK0mYJdqkkAG+1dqvpJAQAZJKxBhTIBACo6Q R64AOEAHVs13b9M4QAUOOqDTtRFABqTca0Fs+kAHDrGaGtuOQAjMHL6nBSBACKa9mP18CEAF SVOFVAWkQAoCvmRz9eBACPj1RxIZ9kAGOa4iQZeuQAYGR1AW0KVACzuvZPwxsUAF3sBpH/sW QAd6OUPZrlFAB4itUfHeg0AHKRXVzsueQAT6Rv7Dhi5ABnONoaNr0UAIOS71cJe0QAdMNsGV uSxAB+m2cQfreEAHiBibhYnuQAlD3CDqRIRACbHVAZ2ZAEAHPLFOoMRgQAmP40ASsWhABYWU KZgUGkAHS7yR+BnAQAacXUQsJ7tABoBjfq8qdkAKf+W9JbmBQAYc49RZEuxACNNfVMiGvEAF 9dLgWy5wQAYf3weOdnBABRwFiF83tkAGolpzNEocQAc74DTIxwxABbXolLTSjEAGTVR5UKah QAmOrvValaRABV9F/8CWwkAFlWJHURi9QAYUyjQKEVZABZb145Rw4kAHkCVb8RZJQA9B9qYs Ni5ABh3YJd77akAGedb5GJx6QAiJfDOpLW1ACJesiw9UsEAGolMeIekoQAdN9rq5WsxABP87 5OPYY0AJqWf3SBUIQAYQIqVY485ABwaBaD8ZhkAIFvr8HatWQAdMeXyTYqdACBLOnjgT5UAH NUUJn13uQATzHD0ZkjhABvFYDNftckALN2eYPfdGQARqmlp8aSdAB9+ch7uSXEAIiNcCfaCE QAYZIVTo6bxACVJ5iKV0qUAJUtMpizSgQAjFR8H7SdhAA+/P6zRXLEAKzFEyFysQQAV5spYO /xZAB4SaOEpIUUAIpwXxWCQeQAhpaKDJtOFABpSmuNuMp0AGRV7Con1QQAcykusGUsZACEVA sVcQLEAJ4LiVXZreQAa1ZLwR0PRACArEQ869pkAHc+wx/QM8QAwbXpSgjopAClszR0H700AI jQ4W7oygQAZLIGD8MWNABmupRXxXSkAJJ+H0+ztYQAb8BTuRx0RADZgCoaCh+kALnMF5MjKU QArdXisYZdpABeFmgSIFPkAGwaRX4+YcQAhSBCwTEMlACFIELBMQyUAHRGjbr4vEQAZhwPTr eLBACY6c06e4+kAG7SYqq/ZqQAb2uSgWbsVABNbMOWhoNEAHuSf7EC3PQAXKLPDOcJ1ACGw2 yEysFEAKcTXUEolMQAZsg7s2DxpABlpT8DY7gUAGD52oYocAQAXveiw7hihABdi3G8xC4EAD QN5Whh+BQAm4vTRY5eBABXaM3lOsDEALUieyoAgsQAfr3H9PXEpAB2cy5jfuq0AIKph4JNUM QAb98nt+XGpAB0mG3HH4GEAK7B7aUIU4QAPlHYDfQNtABjvLToX+YkAKomhyZVBxQAuFYnxm vMJABJIlYaU8ZkAEyelvEmF6QAmW7pw1zW5ACovdtKw0JEAJxje9vN1aQAWaI0+jJQxACTyV 8K3HcEAH+zGfsp7DQAi0Tsjqq9hABvSZoPbtokAH7g8HBBBTQAUP3d35zR1ABAh4XcM/40AO FBwrW6gGQAdJvNsLvwZAB+GhBj7fcEAFL4gycp/CQAW2qJW/7LZACti4MrRA/0AEyNDKkUPl QAWR2fobsN5ACh0h5zYKrEAHBtOJD0j2QAfTR6PzUtBACHqyyUCI+kAKlUjbCk+IQAa9mm+b SVpABsnnRCiCY0AIZDHOUTe2QAeNaHuSQA1ABS4fRO3gPUAJFiSA9XEkQAd8h3dAUIlABRf0 e9HPvkAEFnDOdyZiQAszMNY6l5xACT7bXK+su0AHGiP/C7h2QAfZUKFkTjJACq4cU8jRIkAE xAUJtZSSQATzhC+k5mBADM2gAUSnS0AIliOU2DLCQAnlZhjJ7JNABmrZsym8sEAIaWVFX2sU QAdDX1hAZfxACFtxJp3zzkAJBlaJ+ebCQAfMV+SB3f9ACDSs+VC4fUAJ3julGVZIQAvKuZC0 +K1ABQehdBnYmEAIDPVDhrmKQAiqewkTChhAB3vKwOW4IkAJd+q0meroQAexjCnCOLhAB2Zn tc0VJkAJjMkCsiOyQAft2n6CiE5AB/+DqEA0EkAHsNlJjhVNQAhaYvZrLE9AB9Q7Gd0PWEAJ oaLco11hQAVaGP76TxpABJaOmkUxnEAIiSXY8+FKQAkJQvZHnX1AB+xWD+lwcEAJEY2MEzGr QAXIGIbfF4BADRIDSpWj+kAIbxNm8bxKQAnNSYeP3nBABpznr2PiYEAHpLqpqFroQAg9DzxL nfRACEAO7+He+EAHDjjNn4yOQAi4G5TlekxABNWVRaZNnEALvNqSpE7yQAZebP+gYMxAA+5c UDZxV0AK6OFKJbB6QA2zpY3r3nBACG+bKOD460ANZfTgK5ckQAmbxZnSqxZABiFHYw5zZkAI 0Vl8wwToQAat8Mo3MnhABuI4TDMNxkAJKdrtKYjiQAaka7HFFnBABUR6GeQpJEAFaXx14JU4 QATVwieC7UJABvuNb57YG0AHzsZpZAamQAYT4TAJR9tABuryG879akAEtOGnxEDeQAMab51R uxNAAkgjmQDlvkABHIw/d2B/QAQXJJ+SmfRABBa7fk0F2kADqTQqZ1elQAHkncXT+PZAArJ6 LETgFEACb3PF+P1SQAHLU7Z72yBAAHLcWXsw7kAGjaeQ3/x0QAX8sZ/MW7RAAwXfjWp3okAC MSidpX2uQAPgf5/LGDJAAZikU5Y1d0ABGZFYunDkQAQL4PZvy2xAA1fnEqmegEAC3csc3Y2V QAMTz0LwSa9AA02tjmm4E0ACScxSI0CKQAIRfzG2+lBAA6unIAQQOEACmi4fIzU4QAPdUFyc li1ABArkpqYJgUABecpO933CQAN7SwlP+0xAAhZdjE06w0ACGcrtxht+QACa2cEVuqNAAx36 u3/A60ADk26AIWkYQANXAwA17NpAApV20PY3H0ACCLD5s2kIQAN1hINJgQxAA8ZrpAv3aUAD zDdtDBZuQAQnUWAWxCFAAbDrHNW6mUACiUTGWqQfQAHq5zmuCaBAAQ6sO83IzkABaR+rSY1t QAFFdCqcH8hABfMPAf5/m0AAddmIXOdiQABRg6S2EZ9ABR1m+6/WYkAFP6UEzXLgQAWjUqb6 T2BAAOodl6DqaEABAynAyzb2QANyMkd7wJpAAxHcjK2OQkADJ3gXA7gBQAK+tX4539ZABOdF lYNYF0ADzGySJ1+rQAG8859XDKZAAcaWn+p4akADp0nxJOTgQAO3epBaiaBAAOUfklm/bkAB KYoj4M37QAOKnasx9tBABXfA7tBBiUAD7F0q2MolQALfd4u+2BZAAxywzs/XikABMJwsjByI QAFP24l4sTdAAtCBpHsAjUADNhSkaKpSQANjNZEKpKBAAzl7iekcKEACvU6JZDa0QALHqV2V ZvJAArUyvj9YO0AC+4xCcxgaQAIFi+1TsCI//9u0xiGYjkAFKbehW5hMQADN9btdnoJAAzlB PC47c0ADVZslvoHiQAMTl8NCsI5ABTsbajVPckACQIN14Vl/QAFwUyQxUY5AAgBS05hgMkAC wPjAX5lnQAL4TQ3hRClAAz7elmqd2kAD/C1HcVBoQAPEC9KV6GxAAlxzKvCntEABgKUqVP+Y QAR//LFP7+JABFJ9uaC/HEACDSNsarM6QATdqUdgJBJABRlsfWV47UAA5lAy6W5+QAXH8HYE 8U9ABY+1qOr8T0ADP98LPdXSQAJeUhUjrIBAAmwwjXlesUACDR/VN14qQAP2w2TXwi5AAw32 fq+vkEACyFtr+X3cQAR+yfUCWkhAAxlydXPbhEAD1CbfTQKqQAJ46wJBaaxAAomRRdqfr0AC hX9KrqqAQAKGHYHmocRABM5gMhQikEAEE+PE5FFpQAHhUcR9p1JABCHzgqNd5EABebrXzyJO QANNqpBhRb9AA57V3vKKOEADCubl0ZV6QAVnLaqxGPBABga7/fWegUAD5v0G0vpQQAKtyN0N e/hAAP96YBTgyUAFAr+r0JYdQAR11lMDVeZABO3gthTDWUAClNfN/toWQASUlBjwnA9AAqg7 bJsk0EACfN/dUtGYQAKggzMTDzBAAs4iGO+vj0ADufaTz2HBQAL9tuwNtVZAAura1O5mm0AC eNU//1SsQAWqOOa6BldABbFSEyFKWEAF2EalwId4QAPQ380AmgBAAV2WAKHexUADUHLSy5us QAK4ltmkSOJAA5Q3brE6zEACxT2B3NOgQAK2SXzv08JAAOtsl+hopD//7F8/JGniQAQYMYVT ZTxAAR4OI3nOvkACs+Av6WKoQAGk5UYzgCRAA8RpikN19kADP1dyZPW4QAQeyD6sRUZAA428 RRCJHkABfjEQ/S1SQASbHS99On9AA5tTnoOuMUAC+2EIuqoIQAMkJQjXa/ZAA439+8FrZEAA 7Nzrkyi6QAJonoUfIaZAAaWNAc7IjkAA4iQOjiSRQAJO7GXidtxAAmlSrvUoPUACn3oSVVS0 QAWJgG48K2BAAl4stn7vd0ACvb3uck/wQAGPC3TPcIZAA8QYYfDhEkAD9QDfbN8AQANfpdwV j6hAAkeHKfCAFUAC5GggJamPQAQwco+fYa5AAZ/m8M3hSEAEy2u2Vk2jQAI1dCjUebZAA/3Q QUvJSkADJge5JcxgQAMiwBZN4UVAAsh7Tsn06kACTgX7q37MQAJSJaPWGVtAAKGO6VoU7kAE PfMNJ2daQAIY2NhxZSpAAfolKiLLX0AEeidpbejoQAR5HXzt6hhABCrOA48mLkADgtL4ZnCy QAO93sfvdHZAAnVU3Ed3PEAD2cLTRiA0QAJI2OCFYSVABqDJDeHmE0AGzMWQdu/9QAYRN6nP aehAAviHtYWuREAAwNkzrKUsQAGLjxZWzIxABhwvl7oGgEAFlhOT9lUMQAGF59mkkkVABPSn IlT+p0AHA7wWW5JPQARZUjvjJY5ABD/nxqVCS0ABZKGygYRtQAN+62nwKxJAA6DomE4CNkAD fzddX5+EQANqwkhezMlABGsFYpMaI0ADQZ7u2lKjQAJK8sYQ5rpAAgs4UyqTZkACWS5OjoV8 QALHC1BIqEJAA4Go4y19hEACDXY42Jz0QAMk/bAZakhAApcg2JGE/EAEHxLnuNQAQAGXR0wi qJRABBC8gMfqJkACVqROJnKtQAHcMCvpOB5AAkpzKIunIUAC0ldzHRaGQALaoiUSHXJAAkpt 0gwAykACXe18OjCkQAJ2DM3OfxZAAs5BR849JEAGfAPbwU6XQAgMuq8JZ8xAB8AbZIurbEAE t5kXZEDMQAM8R1znikhAAzM7IKhPPEABtnSDkDZiQAQB9dfjgvFAAots+vRboEACLvU1Vcsd QAMcbh3WsAZAARrf3XmFlEABfDioBMcbQAI4aWZS7t1ABFD4gm+zZkAE6o3gy/RUQANPtRm2 ViBAAwUBGFyykkADSDVt6s56QAMf9Tj+YFZAAjHg3TN9SkACxBnosVqIQAM/z5ijPaJABBSp lmLiskAB+bJAjNhuQALswEXaz41AA1bJgQ19a0AE/ydAfW1RQAKPb8g5rWJAAz8y91XttEAD dMTNsJkzQANe//9KECpAA910Dl0cQEABhcOoes7gQAM6gjS/+t9AAqe8vkLYYkACoWCi08r0 QAJ/YhlAjuVAAj0b+YA35EACOGAg6jp/QAGS/dzMIOdAAdQdP508QUAES5Ayg0zWQAMty7MM zIBAAY2ZGory2EAC+6qfm2YqQAQj3TgD/pRAAgOK/zsFC0ABbk43+kcgQADGi+4qlnxAAc3J +JVx5UAEI4uz6QTgQAVNwqxLCxZAAI2d7DgmJEAAc1j9Ftv8QAHmXgFWjDRAAlH4MmtKsUAC h2RLzoaAQAJyOump+OZAAkrw5NKeIEAC86FSqSZHQAJ3NY8S0iBAAaurJLKZAEACIlR+2o0Q QAGK+mJZ6dZAAxADSEPxlEAC5XCI//ZCQAKt85NwXDZAAxTDH6PuakADV4A3PetoQAPP+1MO 2PdABS30v53FN0AEMYW6qrUsQAJ9MZR9SJhAA2ssp1g5fEAEhOiveTHsQAFSz82K8PNAA29a zQ1w5kAIDLqvCWfMQAOe/Qwb1OtAAz4oWf9peEACoMBIbW/gQAKa+ocqTRRAAqyO0zQ7JkAD vi+13wUHQAHvg/TFJL1AA1kfbCCe80ADfMQno8CsQAMXV1rZplBAAcEGkf0OkkACHvS0HsEe QALX+ue9VdhAAoN/ckHOOkABpRIaIkAcQAPe3wLEPxxAAhblf+Vv2kACHoPAyeJ0QAMwmwym 4ZhAA0UGL3FrzEADzJQoVJ2OQAPF4oMyg6JABI049t3PlkADEXTeadfQQAMzZwjjP05AAfxz 3TURokABvA2GlcJ6QAL2sC4ANkRAApm4Rtt2TEAC40pvMbYmQAXg4H6b24ZAA7IjDPWB/kAE M1H0aMIYQAN13B1DJYVAArnUsKVOykADgIMaMkI5QAI0PJQ4hoBAAzYXKd22+0ABzSeKMRgm QAOjlogsFPRAAfDQ/r282kAAqgZ5om3VQAP7D5lJiNxABG7ZQ8DHQEABThbTH1PlQASs5D8P pppAAqEwi29LsEACqxhfTl0GQAFgYEv1PWJAA9ZDu+XYc0ABg/kOBzsaQAPIBsP3/FpAAz/P iOrw2EADjWzUiMTpQAKRrp3waXhAAUTPkSjXJEACNgBzGtCDQAL8mheLsYRABKATfJ+0xEAA 9gVemX04QABuHkCNFD5ABVKlBwkzIkADs1RzcBHeQAIb6KfzfgJAA6bSq+fulkAD6Mx16e32 QAUyQ/tFHjJABDq3indWV0AB8OI/DlKyQAI70+D/XsxABHji4rIAeEACN0QRzJIaQAMVkiyP oWhAAqzRo6UmEkACV28ZTRZKQAMraIobzUtAA7sgCVhiKUADlfCt9Xz3QATg04o13/NAAg0u 7iFy5EACsTzgOLn2QANh+XzlSaJAA4wQrComzUAC+hKznMBwQAIQlNYbwjhAAmnwV8l8lkAD 1nTRoslaQAHGAVStoKpAAXWJ3KEH2UACZ8Bq9y7aQANDKinTzcBAA3YPlFVCFEAEKfPUdApV QAOsqR25dlZAAh4fImRhHEAE5AeO4Hc2QAIJxXLUy/JAAzkT5FSomkACuOAZvJvKQAKyM7wV xRhAAokRPdEAjkAC7Wgwp/qQQAXpy8MvZc9ABI+/3Af+xEACqa4VbZVKQAOWJcvKxKhAA2sQ DwyTJEACiK4oy8lJQAR9hIJ2Y29AA4bCEmiX6kACfxt4xVyEQAJ5R6IyHSZAAi0wCI3KykAD hxsLgZyUQARpVzBuuc5AAvG8Kv3FcEAGtbJRyZtvQATxPl2Kqn9AA52XWPfEdUADMkkn1K0s QALOtyQljARAAzwV4OLnHkADMoGUMwQwQAE9/MLNPopAA0vsRrP63kAEwr76OV+0QAMHFNwT L8BAAraYKZ4SrEAC4KU4zJwKQAIWH5IIzlBAAu9EPey2F0ACDI1+krYIQAHbBbkKBjhAAeOa +BjQEkABE2DabeemQALVIirdxuBAARYYBegwOkADfsxL8Jh4QAE1I5URNyxABII+2QPy7UAE EUU0tRvyQARXjDtMSPRAAT9ppA7xkEADwar8Gfe2QAMXpdupFcRAA9v9TkjXDUAC6GEbbHk3 QALtpF6rg2JAA11lrd94yUAC5k8KlT4PQAJO2/2J/xpAAgLQvA0dWEADZfyNdF7wQAQXsb/S 2XBABRWBVaQWCEAD69hApbrqQAK+ooVyuEdABUj30QcfskAA17tL96NCQALv/ItZkp5ABmuk wlyfzEACl03sVvCKQAJ3COved8hAAuSexdysUkACAbDelgTAQAHQ/0j1+J9ABA8g5QIgbkAC sgavuDueQAHVAAbnk19AAmsenLWYiEABH08VuiYSQABRWMVjJwxABASU6jg14kAA8sSArEQ2 QAPc2Tee70JAAiFle9o+jUABeMweyvHvQAI8jIPsNjpAAlT8IOKbxkADT1mcWr/IQAOii1XZ mPBABQoXO6nMVEAHfwe4aVSuQAFqmMqNKTBAAqlrXO2cO0ACS3B9fnFoQAKKN6Cc7GxAAzuX xuHnUEADeXh7eKdUQAW4oVhdOsJAARDxqLXd10ACZH8TDbgwQALaQf8t+MhAAqk5GVAnhkAE 0V0df2+SQAHJepGklxlAAeM9nbg3e0ADxyV9SkrUQAL4UBhxj3Q//r39613QfkAAVfzhUKOn QACU9Yqn1Z5ABVef2CEBZkAFREOM3S0LQAT7m0Bsc05ABK+HX6vnFEAFEaUKN8g7QAL7hqtQ Wl5AAlgaEsdqbkABIXWxPMqqQAD3+v/pq0hAANfkKXfWjEAEYoTq42VVQAJm52RUcUdAAgFS PmOkHkAAur//PjWWQATUYqvth4ZAAOZyV8SByUAB3bkh4AvWQAEANp8ESPVABlxdKIQpI0AB FAg7acPiQAO6lttDWghABIGfCVvk0EAB6k8bLVyWQAJjpN4szzJAAyumfc7FKkADYqPb7O5n QANYmtcCHU1AAn3PjcmWmEAEdd4st3eGQAdNjM29V+pAAXi1CXMO+kACvWz+jhseQAHpux8s a2xAAMZsLuJ6L0ADtOr7/+F8QAMgivggQmRAAnCxhx6+uEAC8sTmzHB7QAMuSKNhnDhABIzn +sV1K0AA6S6KWQoTQARJVvoq45dAAT2+btCFhEAEsjEhaDEVQAFjZjfAxxhAAjXCklmkQEAC QojFLjyGQAN0eT1fGzZAA3g5zT6QXkACiUBA+7P6QAJ4TolqSThAA74+wIODJEADGqISjLuQ QANdzeZQRj5AAt3b5DZ/YkAB/wVuYwQPQAHoqRxPO3RAAQ7WetZNWkABOTd6mYnuQAKt3gMR 3kNABEGwsSJAzEACfNHfAu1AQAFqKb1pPO9AAqsYchjd5kADwNp+Q3UzQANDUiiAPa5AAnDk pmZLlkAE13ESlOJBQAcle1zoqdRAAhrfasec0kABjuK/TLPEQAGZEGJDyRpAAbBrq3uaOEAC 1gur1R5aQARcycGJp5BAAs/BsKy0m0ADITj7AeuUQALBlzFQAN9AAUjJXy8LlkABbyMVFGEI QAJ7DUIAB8hAAnqoovjdiEAEG0i8xSwaQAQKJlVxikVAA1mlgsGykEADiXheWkZMQAHqFE4g PA5ABG5u4+iCHEAEpSFRJkbOQAYzyOGFLPhAAf/pInqmoUADQkSO7UInQAM1UNaMG1ZAAsNA wojdLEACq4Ub4f2EQAN2yyxRM+ZAAz8lcKl+YkAC0GZGcfVfQAK3nLSiX35AAb3SERaTwkAD 1i5Y1V+pQAODSmob9f1ABL/qXnkuDkAD8XUFTHRTQAKv8kcFSJxAArmfkkY/mkADa1XPRi9e QAPYM4rxP7VAAEQTC5vQK0AAqUw4fMCiQANh6oIk7X1AAeM2u+vPrEAE+BtSwFecQAR4YF6s 5BVABHw3hlXVmEABx9whlB42QANKHmFwSUZAAxn5AwzncEACJHuug7aBQAO6g0WJSwpAAyIn Yr5bUEADOydHo2usQASLyNmw5EJAA51eq32t60ACz67ULTUhQATFm0ltkbVAAUS8WVxd0kAD xCwRGvSeQALtQweZc0pAArj08SqjiEADJusp+CumQAHfLQJ8KtBAA1xrgAbM0EACnoV31cYI QAKeAQSJlIlAAyj2Al8zkUACkXyNRNuUQAG/FXKgJddAA3606149HEADtl3w42c2QAEVstMQ wS5AA9uNxW6qaEABb9tObDuJQAJlzWv7UIBAA79pFX5zNkAEvR1LBDaEQAEFeCQmwghAAXVw J/Z1SUACNRl/QJ6bQAKR8Rsd89RAA4rOb/hQSkADpMUk2Q6IQAV9h+leTEJAAuy5YZPNqEAC pQoh8zlCQAQCAdtf/xtAA5eQnkFBqkABiVy885R6QALw6o3z3KZAAoRGxi6dhEACoTgHd60Q QAN8A3XfX5lAAhb7mBq7AkADE3hLb9kmQAHwTusd6otAAyg28tljNkABZrJUK1ZSQAEPPg0P 4FRAAE7YR6cMHUAELOsFCxFAQAHuEORBjLxAA22UPnwY/UACnTYK2GImQAE3R9F8dRpAA/tJ QVyn3kABtOPx3T5QQAOcaZ0C0BVAAOt3a4hhHkAB8zZNdpIkQAQO4L6+KpxAAdOU9z5mz0AD 6wTOqkQGQAPgDBNdkaxAA1BSDKqvRkAC0QOzTtXTQALftVzvYXxAAzOwBBGWtUACGdpQIM23 QAKZvqcf70ZAAvS1Hq/zXkAB5GydfLCOQAI5u68meBNAAoi8tr6eGEADyvNYXqpuQAIiLGcv qUxAAM+JrP7bxkAAdLDxpBznQASqR50sBfRAA8MdMOG6VEADCgpkR8wuQALOPRk4oUBAAh4C PqUQX0AFDdcmnZSbQASLlLAnScJAA39WwO1URkACLDwnL4hcQAMhgpU3+LBAAuSoaUlb0EAC 3NqS151kQAVslUo5rKhAAKbM0l+XMEAEk0ny5LFgQASU4hEQdG5AAeb7yadH0UADBwPQHEkK QAHvepgnS1lAAcHbEVY3aEABHVtEn/QoQAQQ1FbydI5AA7L7yh0ePEACymyF2rwWQARPww5N nHJABIcF7wB0hkAETGu0TqI2QAF8hlu3/xhAA5KAQL17I0ADAKVEVjEgQAKWCV+rIhpAAoO/ OnmEkkADv/VWtanGQAJhRtLUYzRAAkwXdRQLuEACDOs3kSAmQAHeSvb+rIxAA3UhOgRyUkAD AqNkmoCYQAE6zgaXlDRABIYzXzkU+kAEQokjt22mQAF+xxJnq6NAAi3Mpdn+jEAFnJxt7yzC QAKDsQpeLlRAA7VoRdHaAEAByVsjS6+iQAOzF8f8y5FAAWfT5SfqTkACIzAu2gxeQAIy2zcX 0MdAAxGu4N3UfkACajF+MJB1QAJ+FvCbZMhAAyBoGUw/Y0ADe8C3aUG7QAMAAMJAry9AAyup wgPLDkABW/abSM/DQAJQCVgEroZAAe30AdirskABQwndVIKFQAUGZN81PXZAAbetjtJXvEAD rTZJ+15OQAIHYdB69pJAAcgyQrWW8UABkDdGNm/hQANYshFazypAA7P+yBUOvUAApZdP/j9W QAD+asBI4OJAAzH/Gbik9kADoMtMWXn4QANf5/nkvOxAA07tayZzJkAC7WAn0FVsQAOiGgZn jsxABuaJKJaNl0ADJVyzmGvWP//kKqQUelVABc/4YQhzFkAFg3HRyBJdQAOF4egVYQpAAqxN eujxw0ACUoq1xM2mQAN6q57EYlRAA8wJe2RddEACrtxs6wE0QAH6fYNC8XdAAzQzFjh6zkAC gXrsqc//QALmB+pOL6xAAZ3HTmrYokAD2NWTxdlOQAVh4wjqEc1AABbHrrwOqkADbpOr0gEO QAN05ZxSoUJAAeVllx7SukAELr0lIRHWQAOrBQ+8hMpAA227UoXqSEABpMRnJR3TQARJl9BB 2LBAAXrviQeqjkACqb8H0ZJIQALDggefx9RAAvIJHVu0nkABmpDbSOGEQAIfvEzk00pAAnK0 xz0stUAC6HUCoEcSQAQD/HswMChAAVUHET3cH0ABK6aL5u5WQAOJs3/+L1dABJnjLouxdEAE K4E3W9fQQAQgM3Prc5pAAZqmDyiXuUACGU6i8YJ+QANNVsGE7LRAAz4tn1YK20AGJ7BtkCJo QAS6V5YdjrlAA9A+ePrFvkADLR31uwaEQANKdQRRLtBAAZw0uzW/AkABnDS7Nb8CQAQQ66Z8 GW5AAimW3czVxUADdgp6/0rWQALdP2UooX9AAwrz7oJRhEACPFUO+yp9QAK/HSGzDOpAAhK/ zuoupEACEcU7oAHEQAP6/eYdHnhAAauzKmKj5EAAYL67XxUzQAM8WdWBjNZAAgSARGeiV0AC qJZHThd/QAEZtFcu37VAA/Hz+L5EK0ABiKl1Pj5DQAUljANynYVAAsLJkOfsk0ACiWuPc6Ay QAKtoupks8hAASBbdMkxXkAEFUdI7m8eQATIf3i7Ocg//9OmvkFRVEAAfurNQc4QQAOY/sek foxABXIbPHsMrkAA/q4/Hb1SQAGOoH3XmY9AA2FL1E8vaEAEZDwA/MesQAJbM8SehDlAArsK mtgNtEACiOkZfRIxQAO2BG3KbBlAA5MuZOpFMEABwz9akUCqQAPhd7dybdRAAXGn+rzGOkAB R+d+o4qXQAXAKjIL8IpAAyIfRF35tkACJY64ECmlQABsA1d1nn9AAjlEzaaaQEAEdzAr6+ul QAHoyTtYxExAAm2fskUw5kAD9qh9MjWOQAH8lP3s2gpAAwoG4YGWQEADNHgGgUkTQAJmA3G+ aCRAAMtPV2Bl/kACmfD3D0/YQAMgWd1MCEZAAuxkhx0E1EABbyFIL5PnQAPIcpmXJctAAvnJ V9tgk0ACrkYk2C2IQAJY6R5yepFABGoTbAXJ3UADBwLVtIp6QAIAo3lES/BAAftVaAZ74kAD 1CDj85G2QAHsaRGU+9pAAhaVxMIrWkAEIERQfSDSQAMhjjkwLuxABNqIiKUN8EABhnCBcVtY QALslnu12z5AAnVbBFqk5EACr3mf6OJmQANoUuxMlCpAAlna1BybwEAFIXFQyRuWQANRQJjt Ys5ABJ2Vc5cYC0ABg7Vf/uFWQAHFSV6ihGxABF+XBom290ABxM6Kx/z+QAQakpiQmZtAAmN4 ON5O7EACqzJSj5xQQANmZbGe/bxAAqYYv6XyzEACdue1oV2EQAK5+D/sp8RAAzDwMed8+EAD TF1iIQJwQARZwdxzJ2hAAT6dbLdPdEABPut5jUagQARBnhJglrRABBS8srLOLkAC0S1OI+Uq QALD5tyIr1lAAhfASuKTMkAGQIo0l/hNQAO9s+BZ14tAA0t4dqAvTkABvu9iKBFCQAIx6dkO eHxAAyxk9AI/ukAEPZbAoiY6QAB86STQz4pAA2ZTYuDIRUACSS1POFlYQASnsVimcdpAAWVf 75j0IkAAmMoA0v4TQAU2pfnkCT1ABVy0Ji5nrUADd0iMTXOVQAZOCNLZHt1ABKKXZ0p3FUAA qjlRyaIVQAQwP/TgVABAASqfM6NkqUACA8J1PFN/QAQMTBQ1oDZAAstksDXd4EABzjsnX6fo QAI5jomEaQ5AAj1qnsoZAkAB2Ab+TGTfQAMHCnKqviJAAe5JyKqCpEADTe0dykW7QAKer9+W 440//gMomf6Q6D/+SHKv2/9nP/5eXa0/TqlAAODDDmbqqkAAwNykhZGnQAD7F9P52qJAATOP fs35BD//bc0asvt2QAFeT4glXuBAAQx264npFUABcsAtAv5mQAH5udyM4tY//63/MGc0Tj/+ QA8Mux5qQABXmBpuKNxAAO8/hJc2VT//V3N4CbRmP/5ITgKk7MxAAZJkXjcp/kAB4EEzQdEe QAF2ZRt7P4BAAfN8bLq+D0ACafKr1cxIP/08nL2teWQ//lQOKemWTEABWqfGH8/wQAB3Zv9L yZJAAlRBGTZB0EACZEqx1SGhP/xwFPT4CLhAAM09sP8RK0AAHVPb5bFuP//kDEZpGq4//ja6 bwVv+kAA/57jdrraQABcRNstkmJAAFJePb3ucj/+v3jT6ppqQACyvWZSByE//akKeOI8bkAC mucej8jaQAMtRT1UkLVAAAka3HCtykAAu7jz60DZQAF1kylt/65AAUKAK3aePT//NBiELmam QAEhQimvLTJAAF380Odp5UAC8JgdSpWRQABd7cuKTjRAAIFHxHU0gkACpoHztZy7QAJ/MRxr UpRAArvAeHG3hEAANzl5wDpgP/7KsND5ZC5AALRafuJi30AAkXa9e5nGQAC5mJY7/ipAASvw u9rFz0AARecKCLC2QAASJU5OovRAADZ/ZX+JdD//hjdd+4U0QADJ73WqvYdAAKVEWEXFrz// abeD9YhCQAAITr+HghRAAFG82+67vEAA7pVNQ7dMQAAHCrwwTK8//KD0BkLlskABqrWxudmG P//H+Op7Ae4//7NuBO2VVz//yg909pz3QAEGTuhm0Yo//8tf/3bOwD/+enWAZhT0P/7FjisC MHJAAOJeX+FYPkAAsW81/4r0QAE6pn5emUhAAR1OVNPSLT/9U8rMRdH7QAGulhZ0//1AAA4z mGIrzUAATcpKQV0PQAF7S/k3MNJAAfvVU4nBEkADs1AVX2evQABKesCJ9LQ//jIq1kd+FEAA M+DCRdyhQAB6YrudLgc//px5MzZBWkAA2yYHSkkjQABhX67HY+ZAAK42x/8lzUAAcNRWpVWP QAB8Y3guFoBAAG035QgODkABRFk/JweSQAEabOqAZwhAAUzTwMFneUABLC2dmjA0P//3Frne kd5AAmzXEl8lykACD1BhKs/hQAGY8NpmL9I//i7JqWNAmEAAl9NVxc5oQACHIw1fmw5AAhTk NGxmlkAAhD6SWChtQABk1O/fqCk//YzRhw6TvkABbAab/oqIP/9ieZdYXmM//+A9vP4RIEAB BrIBzatiQAGVykqjgtBAARXD677X00ABjNG60fK2QAFQsb56ELs//4yu07qDpkABGqF1Kdkq P/+QlwlJYv9AABjB8qKv+kAAR9PWdRYAQACunrxq8VtAAKt2h0Qx3z//K4mOdAVlP/+h8u+t 49ZAAUghwyYNUj/9q49ie/eMQAJywqLDTutAANNh7mKlnUAAsFeSRcpWQABP1P0eGFFAAchb g09ihUAAzWyWF5cKP/7nTU6aEMhAAMdadcMDzkAB39Sa9BptQAGaiO2oJQJAAUnPjlN0NT// fiZmu7tOQACLCgjYuzdAAa2rzPgPzkACHoYWGeMsQAKijcsuaYVAASy2oqo0mj/+ifSQp4FE QAHamtycM9xAAQWbEbvrPEABwEd4V35iP/1jpGoFXHZAAUK9BXt9bj/+eO7aSRY0P/w2uNe1 gmFAAtQLy587Kz/+rbSetYFKQAE4sRVPZHc//RgjDecnhkAB6WAGlSqjQADM1MpTeNhAAUji YX6IuUAAoK3wD7GoP/+gWAuWq2hAAll0YFgl1z///09Vy+x5QAIHNZWfP59AAV8uk6Nzwz// DVye1l8WP/x4zk6CSpA//nrelX8fREABFanKTQRHP/96GGeQN3RAAZ5ks22//j//R2iWsDUJ QAAl+ZCzoXBAAokYZVVrG0AA/SzRmgvdQACmkln29sdAABgH7vSCbEABzmYwyYBOP/+7j/+c S5pAASI5AriIskAA3LU59VYRQAJPMCi1UPk//0ifpPWwO0ABDzDJVpgXQAAnF/oVrFBAAc8n kf8dxkACsu/uPOlTQAJnGVWtO60//+Ya9+99ej//yjVnGik0QACFHor36pQ//wvCKhhXZj/8 KUcBPPSlQAHmU/xJCWhAABed5UBX+j//me7xNcl1QABWtfzM6etAAvb6EAABkUACzuUHrl+G QAG/Jfj1czM//wdMUCC/GEAAFDHEbbOIQACk+P9DEQJAACNbcj/tXEADhwLs+IZIQAPrDZ8Z 6UxAAdExBM2z4EABPbk8VKZZP/5dV1/Ecf5AAFC6JHe0xkAB23kxuVxrQAINfIzWw1lAATC1 5P61D0AA7CauE1M0QAEbOeVIM49AAafbNz4HFkABoj+heHnZQAAlcBPXsRU//2VfH1Q0AD/+ 1hGVR5s0QALJ5jVp2Lw//js+7TJ9JD//4E0ZLabQQAGZV1QsXstAAK88E/gFRUAAfrPWwmAc QAArPBQ6sFBAASEMIYwTFkAA10ocwdMAP/+V2PvgUERAADHNEuEMj0AAkYUZN4u0QADEabU2 XvdAALp29UzEn0ABTHNbQBLlQABLz1xfBVxAAMRQ2ZYytkAAbH0atLTfP//Uiuj9b9g//5U9 NSkeN0AAaQcv0biAQAB5JP/AjIxAAUo9VYBPJkACVtZRAkRXQAB9Hm4vh7JAAV5YP+DRgUAB c+/6zfFiP/5eLXkD1ZRAAuSOrfVIGz//0RMWQJf0QAChxHtxTEJAAAn89lC0YEABdeXBnCBK P/6YJyXYbQZAAFd2vKaj8UAAMQIfO6QGP//C5Qnsa4g//ijunnMfIEACQLi49i1cQAEctWBE SHc//xHC/fy+GkABYH4LPg8CQACMGFHjoTNAALx6w/bvCD//sZNCisHeP/7vkWl829RAAGOV 5niumkABq5W+LMkaQAAE7JP4zMA//mz3hd+uoj/9xZn+hApiQAFFkRHfjcZAAUfIv0OdW0AB 82rHwp15QAFQp1etaSxAAG6dUjEl5EABpyJNQtXWP/8F4QRfJKVAAcz6oFuCcT/+vUhWRbiE QAIoneNd4aZAAhyP3+k2E0ABeWt3jen5P/36v977lo4//z4mzAZUlUAARI/vZuiwQACHMykH aPlAAM/4Pf5Fg0AAQE04wcSDQADunL5sqeY//5cTEY9yXEABB7wLDMLDQAD7KlzrL0RAAbN5 x+rnUkAAy1emIuQmQABW5Np9GSJAAH0DFOXi4EAA2lA89qk+QACnQ4nd7ARAAjEO8lrwvEAB oJKoM3PqQAGgEc0/1/RAAZh6mQOPUEAANCWV5vVWQAD4Ol+0RzQ//79LoezjY0AAPvXCLA3/ QABunXMcO8A//7QzjAUbA0AArRn6y/xOP//nG5y+9CJAAG3ux4R/m0AAsP3UGWCnQAFpcyEW Mq9AAKZBoJFeY0ABHLvS5XhuQAD72m/clhRAAOzKoKAfHUAA2toYE4QQQAGXRpwCSEU//op8 /OtjB0ACGLEwz2JQQAIu6lzaRWRAAhEoNkgOxUABY+rRRrGqQAEQkE+/Jx8//z3TkS7BwD// nPLD6dMGQAHS4wwCV8k//kZO5p0pRUABg/CI/5HaQABSQtYyCRNAAbB/2YWy9T//4EPicOow QAFjzpppPBRAAMFeGbYlo0AABdz9UvTQP/5+P9f6rqpAAbnnJJTl1kAATU5Aszj7QABU9hGE W05AACJzzKrOsEABeetelAgBQAIWyIfS5HhAAguspaqXTEACAA2Td+q2P/9DwB7v5KdAASvF 8fTybEAAK9Qs2JhKQAB3JX/FJxxAAJ3bI/hiHj/9uA/ZYb3oQAFe5ieHpoRAA9IzKcu+yEAB rxFexMvCQAIC3+f1B4Y//wX4Tmoy8kAAcUS1k8U8QAIigL37DxA//Ref2v09jT/8l/Wzhh3y P/9xtZ35xhdAAV9OwYkXHD//pnRMqlMyP//Zj0XvIyJAAVMBEQbiBUAAp2eZxNTQQABWkhjt UW5AANs25+4evz///6aDSXBiP/6SJ9lVvPQ//4bfeFsW+EAAn0xoSS8wP/6KxuWbTls//ie5 b/7P2EAB3Ycnk61BP/8PFvMQ2UJAAQ8AnBXApkAAkMWDvYM9P/6AYUf5YeFAAOEyog6QnEAA FxQrN0PaQAAvMVgrYNI//lILRFZHNkADQWnee3KcQAFE0cZDsXxAAKNbq5YxaT//wH+B6ovN QAAJXlnhq+VAAKNvXKX4+0ACeN9ktLnjP/1I7ir9+MI//zb82SrDSkAB2vF0o0XgP//PaPen Szw//1I/C87xGkABOjm364EpQACT7Tj+TG9AAkVtrWfKQj/90CpSezbWP/2v4Qibh6w//jk7 9ixn/UAAO5KsR9C8QACbUdpHaW5AAejjXYn9/D//1LOgFhniQADMXXb16GU//zHApK8PzUAA Br+uO9eWQAFDsInyuD4//2TBwe5h5z//7Lmh+aCVQABvojMAqnBAAJCsdJ2o5EACLMtpKPl8 QAC+DX1RYvVAAbwjDNpVkz//nZHXVjbGQAEWH5TfLrA//zlfg2HrNj///boaje3iP/9vneyH SARAARUUbF9Afz/+lanmZFBlQAHdN+RwtB1AASCIVN/f+EAAo579+Ai3QAElE7bnNaU//8o3 OziLskAAIdVsgxLMP/8oMH57CB1AAGaw7PpHjT//zYOjzl9rQADXEqWg4flAAHmkoT+RqEAA 0je0ANb8P/9thSMEQDQ//9GGwt/noUACt0mk7XJkP/9L0I+bgSNAAm/G7HWLDEABUxGOd6KS QADfrkinWDJAAHZMyt1gUkABICyjg1v4QABI7syZV/g//xP5NPnfOEABdbtz10wVQAGDtWbX fe1AA0yzcqQGEEACMzlMKAfGQAJH0uxk7Dw//7t44mXU1EAAVgj2bdQfP/5IGDSkBXc//OOb zcIL4EAAJMxytftvP/8wGcMNjbFAAbizPbPMxj//557NJyQxQADbcq2nZjY//32Lw+LKwEAB CBPgGrwEQAKusbZRB1VAAv2iFqWL2UAAyT2LTbcDQAI1aY4K2tNAAW6G/tLW2EAAaTJs9OYs QADcS7uOW7tAAax2mVEs7EAB2pCFovfsP/95movy5VVAAFi7EoGrEj//5VtrTpuiQAGlLaxi pXdAAG2pKkE03kAA2OMoXGWPQAFMh/zcD6I///M+LhvkvEACOdxgHHd4P/8wYR/kr2RAAa1B se+tGT//8wCDoQC0P/6TbZNtTx5AABKD1lsbakAAPk6dNqpQQAEY5xbs7BpAATjwC1afMEAB Z2C82x9gQACtktNtsIVAAGui7gPpGD//o1YDS8skQABHs944AzJAAGAnlhxzukACqo2Bsq6m P//VUp1lwEdAAYuEjzckpD/7heEftrpeP/15BOEFZ+o//5iePERD00AAGz5PLzw5QAC50sr2 ia5AAdbE+qz6b0ABvGSzjnfcQANZBC8zciw//pfBy4ZsaD/+aLRWwAFSQAAZuRhVTBdAAB5B iPiqXEAAoQ0A7FOoP/9dNX/gY5BAAoW8Ba2jPz/+TRHpgHObQAHVW0Kw86ZAALN2duCTNEAA 0T/kwv9kQABFQs9Sp9pAAarZxqRO4UAB2hjK3BIMP/9d6A72aoZAAY1OvBNCKT/8fbd9pvES P/7JdqvctFw//52+iwB3XEACUbR2UtD4QAIJ4JiY7aZAATvs2YrE1EAAZRi0DqSLQADanDWa lqw//zP+s/MbuEAAWz+dF34NP/+zdZbz6Ww//+lJrHALCj//4TGOoLwcQADDvbjEhTBAALK+ eBKZIz/934Q/wb2AP/zkRhEdGMZAAdbEDJs0ET/+GL1KyycvQAEWbqvyjptAAMDWc9U1XUAC fAFhIfq2QAEk6ODUWBhAARzed1SkykACwFYt2nL5P//xh1hNLK5AAFCk2DCVLEABZDfgBKia QAF8IVQ3ETM//tphu7szAUABM6yWmjw9P/+VuukF2PhAAHLvhy45RUACIIECGKNAQAH6c/dC EfpAAMVnfWyWeD/9J4IidwD6QABUQLpinuFAAJbSlfCMmUAAcDRJrGxeQADFXg5bvVRAAOM0 g1qlXkACN5/dKiqpP/3zOt0PpZZAASX0Y2BSnEAAJD7Q5nxqQAGc/jksuxBAAGNvM3Jc6kAA kx0+dE7OQADEh0Jnwcw//2p43+EL2kABeNY3kA2vQABQlTpg3BFAAFXfJ0bmS0ABv336XmOW QADlDILoQrJAAcvh3AB8/kAA0PPlj3NgQAA24THZijk//8zbdbnWcD/+Gu3r2X1AQAAs324p g55AAMojGUgZwEAA821I825SQACohJ4KOPFAANN48tFv8kAAj2a1d/96QAC+d1FqbBlAANiu qr554j//KkJRu7iBQAObGEGnRiBAAbTFTTb/8kACXBuNCZZQP//M/wmt9OBAAFAtFanGJkAA CiKNbWXDQAHkG5S8vbA//jA3W244qEACYYZm/3iuQABi4Bg6F0VAAAl3hprgY0AARcRuSJMC P///aH9ojHxAAReb6SDk/0ABD2X4NC0UQADXD4LXqjw//2WGIJxLzT///PxFy7RaP//x0enK ybRAATZdgEkxhEAADNXYGvyCQADAjAyYIvZAAL+Fd6bYOj//S5A27dzWP/3fnpsAWTRAAZIz IBOsFkABzsbwtEo9QAFIGGqbv4ZAAQprYgvPWUAAj+IMFznDQADngJwWu2k//6dqQd74eD/9 Xhg9K7dAQAHtGSqCHeNAAWzb96+BhEACTLPzi9mNQALqdhovBYNAAX5Oyb5pJz//xxHyELio QAHKxstPdP5AATVpbGDwEEAAOMgeG4P4P/0PdqRcISw//+6lfU/Np0ABY/aAObCPQAEfiSHJ yRxAATiTlnPa0UABc7aCQWDGP/9hg4UWpx1AAO0oupkEl0AA/JwIBSlwQABlZu9ynNQ//8sA gQBbpkABlYvGRglBQAIAz+s7ivhAAc9cTnvhXEABIB/cUtZWQADBnIpWNgdAARLl5C38PEAA bGqlNClwQALe3SlhSNQ//a4HML67D0ABN+gleNAIQAAK9oEt0k4//s6aA6Itrj/+/efQAKI8 QAGMuGHwQJJAAWtLQjTa6j//aykBCpUOQAEmaW8QxQBAAAB9L4953UAAkYcR14QLQAFpl8fy 4fNAAIcGOle0TUABI0pHYITGP/8yo6HsYKA//5kuQj6Wx0AAto5+zUYvQAG1xCsXKQRAACsm AJMN1j/8CB9trwl4QADP3BswMPI//3yVN1xHTEABtQ9CnLq2QACL1r+kVpJAAdX49+LtDkAA /J06TXWkQAHEqnwP+F4//51wl5Sx0kAAvIAUz1BsP/67Nbjm2wJAAMuF0c7NbEAAvGJqKgL+ P/1aVcWle0JAAO4bknuuuD/+fhK3aooJQAAdbHbsZ6g//ohHNYSXhUAA2W3QLkuoP/76CVut sqQ//qVjl+X4ID/+hN39JOnMQAKXMomx2bo//SbwU4I5Q0ABZGZZgRUIQAA2rWOv8Bo//uVv fbiYWkAAurisQyZbQABwDHrX1EdAAUGhGn7iTT//tV53dlz4QAAvfsgWeeRAAyQuEs+d1T/+ SGVOc8gYQAJezCBOgb5AAFL/+2dCQkAAYXOpHQDRQADRJqRlvuRAANWmvXX0+UABeFwGKNJa QABCh5rJXeZAAGOwca5ndkABRn5q9aa1QACmdSYO/ixAAFZ2caBCu0AALwdI9QzGQAKMhOjS QVk//YPWNLeTOj/89DZm6egeP/yQhzdDrTNAAwvh+zIkmUACAIwq+AY4QAGEWur7ky1AAdMM QCFSFEABArbKL0v/QAKTkTSiqWxAAj5oo3lQ1kAAYOwmTMEJQABq2lv8h1hAAKL/syTnHEAA h3/XuTcWQACZqyWfRnVAAT22vB62sEAANFm6WL6gQAMboJvnmVBAAxOL41pMxz/89BMPB7MU QABX9OBNARs//OjMaSFm/EAAdK8uBGEsQABFGBQtcj5AAaYsoZdItEABA0QNfQcmQADEuzrA s5xAAWzkuQOg9kAAeFccRmT1QAA3HrdCCBhAADBcpSfALEABSxOLCK9TQAAXLnRgiJ5AAMZV LNc9dEABEXpcniS4QACLjF+di55AAF72zHjLHEABsJTKjnt2QABLUT258rA//7J8sY07CEAA mc7F6IatQAA5yTacO6BAADlSyzO6DkABfPXjZ3RIQAG2zTW9Jyc//8/x/W5pCUAAzoq0UL0I QAGIdjDfMuFAAWF4nyaCWEACJo8SOwZTP/7HW91qmYdAAko/nKqy9EAAu2lUDvkEQAB0ZeYI QiE//y/g9xHIkkABB0g7yFoPP//us1FxOcxAAFYMgRaypUAAACwRYBcEQAASH0TEvy5AAQMR dfQ1sT//lFLWWzZSQAAvssBv4kJAAFi3eX9f2EAAHM0qTg7OP/8OXbQ529hAAYHjEf+HkUAA N/utcqH4QADC+uwnZnNAALK0AY6ZHEAAS4KWr96mQAC0dB5WnnhAAdYQSwEu9kAB9Tu/rPOf P/7Coy1tBg4//rHidMbouT//1V0EHrRlQANmmCc1vSJAAsUMtl+5tUACPB8++4oJQAIankZK ClhAAYmleMU2skAA2jzA4n67QAH0GlfPHrI//JkIN4jI5EAD6w2fGelMQAN1w7I72xZAAgYH NWewB0AAbYhlqMCvQAG94hzCfdVAABF8UQiQe0ADBJdMeOFHQAC4qXCVyt0//tmX/EAXEEAB X2HAqNLSP/+mO/cG18pAAR8Rv9ykAkAA2Xe/sh7/QAKQAkNjgZZAAfXM+WpXpD//TmktvRTo QAFXKwZjkGhAAOmwyOLnpkAAbF1DAWy7QAESZ3XzJO1AAJrNAClYykAAuO8X/7NGQAGJJnM1 XjJAAC9aGIoJiUAAXTIlcCgMQACpEz63SbxAACZOEYCloEAAdnTCMI0rP/+VE7MV5BBAAJHa 300+qkAAcb67KCcQQABCR+rokXdAAJlELu2yOz/+9vcVXxWEP/0wScfnXCRAAY1mcGLRbUAA ShQ9lblXQACAWPt7DIJAAZwmh5STBz//8RTunNALQABx6CHlf45AAFWd1BHJJUABj0EcbuEY QACqf0Kq5chAAJBDvwJuvUAABG1HZR1uQAJDD6yY0M1AAadzUKmtND/9snb3MlrAP/2ydvcy WsBAAnXKo9xsskAAiO7FS1EZQABJOGvSc+xAAR2/TLhOJEABWgThtrPTQAHLFERvTK9AAJTn xFqFtEAA1kKjZs3VP/5R822WvERAAKiwdBVErz//6nXoBDCsP/2PFarofTdAAiSFZLnGWkAA rQac3qlyQAGIY7YNu49AAYfbHFiP7UAApvwttdoaQABs704COGdAARsSbBW2fkAAbccMTsnO QABuxMzAMyJAAAc60u1Uzj/+LcD2C2lwQAKCbyHnwcNAALyOA3eXPz//oXcK/WJDP/3l2DTe aTRAADW2G0d8ikACCBhTIoRoQACVwY8JhY5AAO9XfHTm7kAAFLa1vlWVQAD9AWP8lOg//czT Q8m3ykABz5ekLQW6P/531HqM8V5AAXuYrWUb+EAA9tn1Y3FsP/96j5b+ofhAAa6eeO46GUAA lYQi0QRcQAEtU8IRk2hAAJNdYn/wVEABI+ECxLqkP/9txVBAoC8//sD+rnLVVUABIVxuawho QAC0wT/cRkdAAWZ3kGcbwkABb5dHYp/uQACKL5PuWwo//97WtG42MkAAveCWiMqwQABmJBlG uQo//P8vS0wiBj/90982dU86QADWuPg1FzBAAJLXlYaRL0AA+slM1j4hQACGpT6dA6hAARAF 2WpACEAA1CD1YTfxQAIqsfYGJaNAAtlda3qsH0AAW4KUyq97P//KsQFPsKs//8cylXX+hj/+ 4k1mIJMSQAAoF906bN1AAWfrv1QGgkABefS+mw2yP/7skW6aeXBAAG47zyObbEABxjL0w5YW P/+fcP9jeZZAAFtD6v/N6UAATUrLhO4qP//vIuGg2HxAAGuOWSWDQD//1qnzXnALQAM2+w87 U48//6Lq9e8tVkAATgjkYSf2QACvoiYtJ2k//lBom6+HyEAB6jrYRbFjP/7n4QcxjG9AAQXR DEQvmEAAFWGTHoW4QACVUda3/ipAADWvYsK90EAAGCiOFBq+QAAQkJKHE0xAAJA7gJ5LPkAA mOkyW0gAQAD8BANA3MFAATfJNr7vfUAAKe+iZMKPQACmYDl4qfpAAcbVvjc5gUABQJaWQKFm QACIeXFq1WQ//wuHcyb66EAA4t7DKEOeQAHZfq0nCLtAASgnX9NbAz//8N06EhMgP//cMx7i W+M//6Ud7NHL90AAq+u4Og3vQAHyGF8lHjY//Pann4y8XkAAkN7kIivEQAHG19gmh2ZAAEux LrpJ4j//eQ02yzWQQABdZpB+4FRAAaRnIg3ZqD/+3haffEh/QAEVknKDlv5AAhMEIGnr3EAB 0rqTSQkEP/5InCfhv/pAAZINfoM9Qj/+quzwYycyQAAPdCQmNhBAASrD9IgIkUABMdVYvkdN QADgK5C3lVVAAUpfdnO65EABuBVzO3X7P/+eJADoCexAAMvaC+cMKUAAg65EAeYEQAGgZNuE nm1AAoq9hfteGD/+sH1XrCyMP/+vsgwtxTJAAK0Y4LbrBkAAbVnj05fLQABaXZjv4TxAAO6M 96azNEACZ2JBlZ1+QACNuAY0HbxAAhMS+SC8RkACWP4IhwQ9QAQ6ATZDydtAAP2onrJigz/9 8C9EdtOUP/83mY5shfxAAPyr2sl9ZUAAuG1ow6L7QADL8iqjTiZAAKacsFUWWkABWPkOCgS8 QAK2V3r/AMVAAn8RlU7WV0ACx3qsW7BxQAMxKQtHLW0//l+cKi4Cuj//h/6bp/SaQAGHsgfQ tsVAAQFT3ER3gkACcguPx2HZQAJwHGVXRgA//27dxT/tQkABGYO7f79IQACqwGHYJkBAAIvw 355LrkAAtL+G0d4kQAGbMKEb3CxAAIht+pHPNkAAhks/tWP7P/9J1Q06vwRAAZA9XGWaXD/9 UzY2T4BoQAMat9Lef6pAA9bQQRxiOkAAKd/zPuccQAGXICdpfWdAAl/f/tLaNUACj5YsqisQ QAECjM0psDFAAsOB79Wd/0AB+qlyGTMnQAHaYnuiLdRAAtQIl30p5EADPdOsVuOGQAHa9fFJ F2JAAYkqdIyG+EABx1V7XRjMQAJiFso0P9ZAASYY9dgTFUAArXdG5YDaQADs9DP2NztAAS6S wCUCskAB8kj7/lBQP/9k99nLGiI///mrucKa90ABIG3QR6ZEQADvUdi1oGdAAJTiv8LXLEAA UZ7hZd8hQAFiKWFwpqJAAZIj5/w7skAAOiEPF5bTQAASVmMrLog//48+sGUDPj/94PombUrE QAHjaFp5RdZAARj1L8OsdEAA+hfXZZq3QAAM75BJAEdAAQB7mpC93z//7/5zE/aEP/9SfTlc FIQ//9TgAkyI5kABRIrlvMmpQAE/7cco99xAAbZPYswJcUAB+f3bOhscQAD/fgvrF5xAARrQ 9FGyZ0ABwU0y5wV8QACmwSgDb8BAAZw1J/xom0AChxrqhbCKQAOpPWYRUqFAAOI2/LQUsEAA FPDqRckJQADDyLlPTLhAANr/Ooa+Ij//XLFaToDcQAFKDPkdavxAAG1H0pAvEkAAwOgelCtc QADOHlk/ZchAAYbv5YpZST//50SI7pwOQAEo07zh/YlAAh0HR/J2SkAA2jyruBWCQACnTLi5 geJAAY1Os+t8PkABsmOyfo1qQAFhjucK3RJAAfK0biGHqz//Wnt/EyEeQAE/bRpO7hFAAVzm z9A/DEAB/mq9qkVlQABq2W9o0d1AAJsIUI2jTj/82PkAVaZKQAGHbniBoG0//xEHysiGUkAA mbEwTmieQAF3ofhT9LRAApLo0JikQEAB3Bv8Fm+AQAFBR8WzKIRAAUw/aB5iWkAAvwZ7Mn4Y QADr/CnAx2hAAO9W0CI3QkAAcv2K6sPOQAB2p9krG/hAARBN/UXz0j//WTaud5U1P/yYUa+D 51g//vXR5x1Xc0ACGwjD9TwWQAAKbuIJNVxAAgV0L6avEkAANt19KUTsP/+U+1klvd1AAQ3S OFCzkkABRXV1L1exQAFY7M6UNXpAAAzYCRVa90ABU3LvIVyEQAKog6CG9yFAAZALdRIHrUAB sWuG8tREP//moJ/bq/FAAT5Zh7os9EABCoVq0y+5QAHCy6jhxbdAAlkqtaq0U0ABQ6veEfdv QABV/NZr7ttAAj5TWFeDFEABoSSERiL4QAH3T2ZdU84//gcnpXKecEAB3JkH4Yz/QADNd6gR wstAABlD2YdurkADHjpzI3FNQADzm9MbaSxAAcAZNJDAHj//otC0bgZ0QAH6i2HefhhAALDQ VXFpPEABJBCmQUKKQADdFDPJzvVAANODO0ALlEACOSVIJy0+QABHjV/2QeJAAoAE2RNPaEAB j15QqGmpP/7VjGgEoC4//p1X9BHxiD//WOstiaE8QAKaQRPXnLRAAUwrxHF0wkACoHAagatp P//dkhmyr2tAAOfMXRYoPkACPzvDQ9/FQAIEhYi20QdAAUIlwxgUPEABEPW7/itQQAHr9+QE Q9w//4gip/wE60ABgD1E5ejSQAHFJNRYU8RAAyWCmywgKz/+dFnWEOJvQAJsHVkzon0//pZ0 HGkAUkADIdL9cSGrQALJgM35YjxAAwQ5wXxGrz//5d9kCntRQAAmwcBA0qpAAVySwd+eQUAA GbRDDpv/P/9LQKgjA+BAAewdu+MQaEAAU2AobppmQAAzrAYTFjZAADMLA+EGy0AC9gAdFuPA QALzTwswyZFAAfq3D5QI6D/++Own/POUQADFNhSNEOVAAIyxbbZIjkAA3otXXjOsQAJ9eQkZ YmRAAq9HeEsBw0AAnobg7xvZQAHIfG8Qfz1AAOAfzICdSkABfgH9MkYwQACiL2Q12r5AARlV xjNXP0ACiykJ3nrKQACIwKu9K3o//1S4FH/uPkABczyz2ZdBQAFy1lOJ7SJAATKaxgxyuz// wTJNmXS6P/8x6E87YeJAAxdKCXv7wD//H5cy1Ov7P/84xpCN3AhAAdX4zHf6oEABkWniH0s2 QAEu54Y5G6pAAOPcYCekGEABrvcoUsBpQADPTZd2S+dAAJrqbbUBgEAAQyBa9KGCQAE88dnF TYRAABhY/B2+kEACT+mdOndAQAEmZS8rObtAAQYP+0O91UAB0kXf/cHaQAEPG4m17ZlAAEFx XgXyr0AAB59HNdByQAFjXEFBqwBAAPS+6YIHVkAB/RYFywCUQAMtLDT5ax8//mSq1G+8zz/+ 8rykWRMkP/9qYVYs7ZU//cUWIGoEDEADszoKXwJUQAAac2oLqhlAAct5hUDOBD//tzeUOYcM QAIG1RKGNChAAFPL5acT6kAAp8BVTfZgQAGnj2C88SZAAPGnLsO5EEAAAQ3+gRtUQAIquW1d 66xAAFiStC9CFz/+74+LHYTYQAHdIsjiSQJAAH1J7ixrPkAAwevGG5mFQABaUiGu6xc//sLA XBxzZEAANz8VfuwYQAGEC+1mpxpAAQNavVc+qT//kUcXNFU6P/6g6zK99cJAAF5KVYn6JUAB 5DL78U/4QAJk7b+fOchAAW2FKkdmaUAA0oPGsuZJQAFamIm0icxAAQhf3HR4OEACAjW4iFt0 QAAvL0NbHzRAA0bvjutBIkADYKRQ0Zk6QAK/ohwEwAo//uLKe85qAUAAuFcGrM0LQAF0HGW0 jodAAFRwPNifQ0ABRgru2d5QQAEbL6idVGBAAReDrk/tqj//QxG9i24ZQAH5fcOuwQxAAmrA XGhP4kAEXbSVXf4JQAHKzF/KiGFAADTo88QRrj//Wb4gDEUiQANCTxlRHx9AAxPwF+XNgkAD 9Zvr3izJQALwqe3055hAAp7erFAU5kACssWDALhHQACaMtRFYBBAAVclA549SkAAhwyvI/Bp QAFnBVb/lPBAASVYTwL/L0AAo94VBVqaQADo/lFzzvJAAET/iZNBEkAAqU02dfSiQAC5qyqa TM5AAbivDw87S0AApeFcgpgUQACWvUzJRz5AASIzzae7fEABoWjep9++QADrxmp3QVJAAUNV d3RpEUAAnF5430U6QAJC5oWVcXlAAEqS1K9BG0ACmL6HYPlIQAHtY61CsdhAAbGSSUW2rkAA MQRcSRWEQAA7Ov8GHXhAAf/TlFvbez//c/SNDtDFQAIS7KHNm1xAAIQDWj9wu0ACH259xw9h QAFSzqFBvRhAAoOIAC/+rkABONZf96V2QAB+DyTOtoxAACBXl2tT7EAB6/746u89QAE4ngZW dmhAARI/cDHZM0AAWOrMoNxsQAH9IP1HC1pAAnM3rOIaCkACa05RVzaWQAHw8kNFWAY//yV5 dnRtSkABpuT30EeQQADa4ozUf0hAAaArLO8VdkAA1Zt1ZHjOP/8pWAHgQPJAAcfDM9vO10AD eUGQauzXQAINE85upYtAAciB/3mzzD//LDq96BmEQADr/4CzlKZAAoA5uFJPtT/+uc3ux3vV P/1iOx2LbBJAAJG1u4i+n0ABuw1l59AtQABYnj/yBc1AAWtwRmKN2EABLBTooLqrP//dndtI CWFAAdhauZ3yYkAAO9HH2we7QACGATW2j+Y//y+ndil2tkAA9Dscc5yKQACOCDOogPVAAFQ0 b/Pzrj/+ALWS0meOQAJDOsdv9Fg//spiEdPJqUABykHpsQXOQAJDVbOgkptAADUc6OiXe0AA 5GK0BN3SP/76GHi0+kRAAdgEEpBaCUABHWDsSmQgQALatcYJlshAAUMNSupb6kABmJVIqMdq QAAWa91XBfJAAAw+te6zUUAAPMpN038iQAJ8MN6WEEI//zg5b48lFkAAar09KBsyQAGgL8DH 86lAANMig1twOEAANhL0t2iGQAGaqmFD3/JAATnuUEEzKkAC5BVkINwJP/3GiX80vuw//Xj2 O6eZiz/87c2baUJEQAEPe1vTbgNAASeIytJBNkACNQA9dQbRP/+kXSPhgiZAAQYHwkDgukAA OhIBe/TSQADelK+IRaRAAVnMWmBsCEAAilomXoTQQAFW6hQnCd5AATN8Str6BkAA6urwspSq QAKlPDPLeINAAOdxtQzC0UAB4xXMGnaRQAB0Ygpn0ZRAABXpxJnd+kAAAL0CHA3KP/8kvjet sQA//7fbfK3GHkAB2GsDMwadP//VPV4nuA5AApLGNKgnxkAAB1acm8hsQAB7oI/pU/pAAgAN mLMcyD//HnJJXIn6QAANM9q/1nRAADob63tezUAAIesJiY19QAAXmkgBsKBAAVFg72ZjbUAA /vPh75/aQAGDnf/qioY//9esa6uD4D//k8bcdRJGQAOHcAbkWqE//SVsLW6+PkACB+GMgHgs QAFjUnqrA6pAASEL08QSOEAAyaiEMJJWQAE8eDQZGctAAD5gjO+QBkABCWwDo4euQAGaajqI RYJAANT9BcHz+EAEkgwODLUWQANbHMsSaqlAAuj+IFd7mEAAfFLVN2I0QACWu2mQcQk//6Vd OWqUrj/+JrpDQx2+QAFHYRGwR6xAAR/ZSSqP0UACM6RzjVrYQAFu/EKCz/ZAAQQM1mPZQUAB AnAwojjQQAC5cTqJ4E5AAuaEv/mApkADOXrqzVrtQAJ/75zMU75AAqOeyPHqS0AB+dETfHvq QABhGh5XM9xAAVSWaRW3CUACkphVv/LiQAJvbcg8Q+A//2djB3gvM0AAxf5KX7eiQAD1WsC4 jkdAAdFJJtj01UAA2tEQEQyiQACr0SCuRRZAAaECvoU1cD//sKYa0/F8QAHGOpfFM1JAAV0U FMgBrEACPbRZ8FxEP/19aJEg9RY//1AdbMc1nkAAzX+U3eYaQABz16b25U5AAi4Qyf4vlkAC lz+FAvmTQAEbg09PerhAAUVdzS5UpkABhtu9+nsEQACQlfkKXkBAAaweEWrN2kACnVETf/Z+ QALrgd4jbfFAAXf5lGwn0kABhvFOGGlfP/3W29XQ2RZAAAebcwBPXkAAZiSun5CuQACWrmgc jRZAAP6wZ1iY3UACJyoV5X69QAFfTv3Ynk1AAheA8piHSkAANvNg5aQcP/5VuXsDxW5AAJy/ Yf4tM0AAWQ7CulG7QAEIhMA1q4s///r1GSpxKUABvWZqKax1QADMCSmRAB9AAw10GM6SO0AA 2XW36ph1QAF6tFxUdks//9kdo3BMCkADNUKwguwAQANsZ1A1zRo//rQ5Aj6OkkACMkdy+SrW QAD21ykjcahAAVACBk+wq0ABq1wrj9C8QAHdF82TFkhAAb7acw4RikAAnVCnHNZ8P/+hap6c eQxAACG01gRIWj//t44Wf0tsQAEvVSUIJjVAAQzanPVi1UABcBxz367AQAGDIPbQghBAAJgF iy6LJkABNaipUjjnP/+N5kJEq2xAADhQyjYpakABf+Nncq8QQAC4eeKMor5AAi6vQrxE+kAC R/C4SKUSQAGCnFb3U3FAAwFFfaL1YUAA022enrmhQAKTv8tuANZAALgAhVE+70ABGIqqFNvE QAGy74tbB1VAAa0i2Zbp3j//Y+qptd2FQAHRdXLYu94//urc6+zCij/+XFA9Hza4QAQerF8H kaRAAq2Po0mSrUABoCPqyIHkQABOCGmPGd1AAFgKvaysDEAAv9wJSbaVQAEEACKgopBAAS9E rWBM50ABShp5dOVrQAIGSBh6wT1AAIogJ+No6UAA0zUiQB3UQAG6rjhyv/5AATiT7l1IpEAB Y37nJyxVQAFZlQ/Z3gVAAWJ2qMkyTD//RKz5rffCQAFqvKYpHlNAANvjGaZ+kEAA6OF+93e9 QAHi9axyhmdAAUumT+/wuUAB6IZKwYtKQAGQ8xzJE4dAAMzYo9k0jEAA0VSOLO9QQAA5+U/0 AEdAAU711JVWYEABPvAmL3Y0QAC2HXiL3oZAASzWkn7fE0ACHTS2u/6aQAEQLuAtR7RAAMGC Oxd7LUABS5dqu42pP/+BFRjFzIpAA74JpmhLe0AAeoKCGFY8QAPlRZdIQHBAAYYRusd0kEAB VMK83seQQADqeoSqYHZAApUHfl7D7D/96cKllVP4QANQqljKSgFAAOP9V4ZNKEAAOGlgeoLe QAGwBAMoD7NAAUAqDNGJ0EAB0P7fXvAqQAGFFjxcD4pAANpJ02qBwD//YJlW2GRKQAA/txqB zOhAAC31cFrQfEACWsuji7wGP/+Eq9rk8lhAAGpTt3jrND//P0ChgtioQAB4ZiieT14//kMy L+UIykAB6ajjSGzYQALhr7hcRkJAAlKAK2cJ70ABWaFSVktGQADTxxbXxWRAAVKW8stCnkAA V7RqHioSP/+6oLLtzDZAAe0cPgfpwkABqgUofin+QAIVs0eFy1NAA5dGluN7zEACiBAMeqfE P//QUFmQQaJAAmW+C9lE8kABen5/llPPQAKm3clLHqdAAIV1k3QM3kAABxOXU4txQAK4KDya O29AAKae/EAdSkABIvGXeTQ6QADC2OS7KelAAJyzWnUMIEABCtiSvtrgQAEoAz/cI9lAAStF xGltPD//oNLjPH1YQAHbXlD8oGRAAmPLKhhnKkABFi+mJL9oQAC0/IgQlKZAAa+TDWQmYUAA Z0l81SaOQAIGJw4RT5hAA2ZmDqhGcD/+Xf0jPzxOQAHbadXWtSJAAE16La7/1EAAVFO4I0lW P/70mU2UaSNAAi6mvrKyakACANTNI/3UQAAOZxx6I3ZAAaza/iCCkkABMgzqIB8sQABsBV9z NTJAAbUjZDALm0ACLXoVtVEAQADwJpxLULJAAO7FbuqrA0AAaJcPaN5rQACavxiqEj1AARg5 wzp9/kACCp98kPhCP/8T3OfvFkxAAaAWL4ZBLj//oanEsrTZQAG9SmjdVvZAAJl3lBDBNkAB NPOgoSr0QAEr0DLbTyxAApXD+0vAMz/+8er8CbLMQADZJkU/PVJAAGFA+hAKgkABMW0xSNcV QAF6d0EsyE8//qiU1JxmXkABO4JefjXLP/9Ku4v7y3hAAJLAYba1R0AAbpFaaEtBQAD3wXM7 IQRAAELMlMbk6EAAc56jBcaRQAEVXvH/5hJAAqot0VqcDD/+84Dsdt11QAGuWJPH3eNAAH0P 8k0l9EAAnAvq1sa7QACJyL9pTWJAAZB+XcupM0ABYOEFCAAMQAF0uTfItbBAAXeoeQO9hEAD gCmu0seoP/8ochegHphAAsuNrxwZxD//uvh+VsHaQABPvJR2ubJAAYW0wR/3IEABjjAwwI96 QAHNszzordNAALoc/vGasEABFgbq+/l8QAGKAhkZZPhAAa1rFGLijEABCn1VaA2UQADbelTt YypAAvi9mGWwPj/+rCk3vkjYQAAvLgM3mVhAAHfJxLd0v0AC2HPdmcpxQAI8eg41CwBAAk7Q sTOVIEACz7xztpmLQAHAd8VTVHlAAba+jaZ3uUAB4t40nlsLQABEEU++jQBAAVu7Q/h/qEAA 2dmijHViQADPNoYrP1pAAOsoGuRMKEAAagvBTy7WQAH/4HhSlvJAAwpQYUcBfEADBz0qqfPE P/7MPnjqo9xAAMUJtxFFiD/+zIkIvp4hQAGd1fLE2NZAAflH8HVNFEABWTKRa88ZQADJ97bs ysBAATr0JeskKEABZkJnwsgQQACT+BW3DaBAAEGvg3u8IUABH74MgJiqQAGYnRN/8mpAAH/P R3YFY0ABK7VXIZOaQAG6EgRE9b8//7MJ92u6NkAAs9kXGZM9QAL+X8FwUvNAAQ4939xfnEAA qVsrK5YIQAC7JHM3qhZAAHDdRgMNLEAB2BO6ASEKQADnyS3i7cNAAV5T0zb3VEABc2dwHvcn QAG9Ju3sJVBAAQzJuVp1BUAB+fRLOLB4QAHrOFjhpZxAAOyIOrowu0ACKwnK924sQAKbxzxf +z1AAYJ50CtxfkAAHzwpUd6mQAGEf9TqGOFAAHr3homZg0AAtugotETUQAAot7BlnpJAADLf dKTNF0ABd+U7m478QAASXdWnhktAAWlB7UtdS0AAptIRvbd2QAFa3zfp6gxAAJgqcA4BmkAB CEGVt8OuQAFKQwYFA/9AAO6Uifc4UEABgyDpgOq2QAFBdvVkEH5AAhiRQroxkkACZzYXfiT8 QAIs9/i177ZAAWPDoFijVUABAkCg6OzeQAAJc+KRPkJABAhs7tFjiUADcq4mdtlLQALR3r5y VrVAAt9TbGXtw0AB3uwvp+g+P/7imbWQeWxAAsV7G9RlikAAdPvOTK9qQAPJ1JQwrNtAA1XK 1t1c5UACuf5Xw7G6QAGrlJFi2KRAAu4QVxAcykAAOQpcd/2oQAOPH9l8bx5AAW6YjYJaW0AA FFl0WP5BQAGEWlElQ4xAAK7ScaIvLUABPOjPjQ1xQAIJSMImf0pAAo2fHiktSkABCYTyPFXC QAIek/1s7LhAAU5Fgkrlh0AA3Q8Hsdr8QAF9iM3IythAAQe6Go/EXEAAimQ5tQffQACuKwRW /6hAAt0AM93LuEAAK/rC1gZYQAFXvyBRAZVAAMqyxJg+ZUAAKFQG77+WQAB27GF2a1JAAUNC NxiCeEABjZ5E/MlUQADxAn7t4rBAAKhpuHV4gEAAfcpf/AUvQACJid+lf8s///YY9mNumkAB 0nASz7KmP/9VLG6usPRAAFCmbSElT0ABi0hVX8/PQAFd0o8phzxAAWiCIdJtOUAAVDsyAU8I QAHZePEX/xk//5B0EZGT+kAAB/JIU5uAP/9uE1tmngVAArEEDsUpykACQo6ekL0mQAAD495y L1RAAAPj3nIvVEACkoQrxt6pQAGJ2VbmkzpAADX4gpmTyEABgc3ynXdLQAGg4MGHzPJAAylK jnOP5kAAwhURZhLDQAHeLokM4LtAAGSiwCwGd0AAGbjFs5s6QAFOS6309ORAAQAAG8KJF0AC zaGALtSyQAGpik0JlSZAAlcr8altREADn/R7OjYOQACGu8XX9jZAAXi8Bcav7kAAjSBS8gHy QACrp+2fzqhAAN1MUgV95kAAmeeVc2SSQACJGg+yfB9AApKe1bi42EAActhAVohgQAJyfMGN SRhAARiTLWObPz//VIetiUUaQAD0wG3NFNpAAqvcdHAhMEACa4q1rfVDQAAouUCjv39AAGdY pCN2dj/+9pOre3irQAKiBXJlW2s///XxjRbqGEABjGdOUt8AQADbIFGnM2ZAAN63huoIn0AB ybcDcQM1QAHxQYtlvwRAAvuH+Mon2z/+oYFRyEXuQAGOeeX1y1BAAEvM3DIpSEAA/I4sCJrS QAHC+EkMFVtAAEH+YEYGXEAC1jo4c0jTQAJkWnddprVAAEg/VwofXkAA8PzEEa99QAEE8BUh S5dAAM5Fjo5iHz/+bowF18WzQAB9kKO2tSRAAWzCoHYl1EAArBml10EaQAEBIPK2fbpAAhF9 CBoBFkAAybklY1cHQAE/tL2ObZVAA0u3q5PmNUAEq+bU2S50QAAGgAAnoKNAABrMBBs5hEAA /LdUAnYoQAB5LLe9MJY//6gtyqkZl0ACvbsjmvnYQAK0i/JKRB8//dHeBC0UN0AAkBwTBYVg QAFMnhLgLKtAASBWadmCKEAAejPmaR5/QAEapSL/BhxAAHCPhyqcWkAAiP2NKYJWQACRAStJ Wh1AAxCpESsPOT//8gLf2j5GP/+o/DOukehAAgEL7QzqIkAAKUtgR3ieQAG50lf+C7hAAIFX DlMNy0AA1oCyXZR6QACS0HzK0LZAAPs2eKJHRkAAK39i2GiYQADMng8gV6RAAGChuUJP/kAA xQxLHmSQQADJdtRdYexAAUu8f4aCHEAA9jPlqJoxQAF94MyOnNFAAhNgnqsDdEABs0XMUeI7 QAEgOSn5WzhAALHGfsL040AAPEb4zUrOQAG4xQvHwbxAAKezO+g6ukABP+o251WXP//gstJK 8wZAAQ/HPxUGlUAAmHmsyEXaQADWPWCoHftAAeu55vKW/UAAaKWp3noXQAC93SJ8tExAAsGO /ZhqoT//5o+fehZUQADIzdL8SG9AAku9e7/Y5EAA9Ai0aQS3P/42ffyXSrxAAO07lWbhl0AA n+rw7YSiQAFSr4I+jC9AARGnEi6MREABZmRI1YgJQABxRXqDXFpAAQUiHT5ufEAA/5y8z7zB QAGvqfja0y1AAhaFzcWjUkACQgDDlRsSQAK72e5e0lBAAOSpE9d/akAA/L23hV5LQAFSzbIQ tHxAAgYhhImbU0ACs4cv3EMYP/6wWWAapr0//t1sXJOo9j/+Xse+yc04QAINENPwkkRAAbrp Q0u2GEABgaycmxdmQAEbBMlEip4//2ub50g/lkABnRuk4nQCQADvs+0kFZBAALt6e8mgS0AC In4JTa2GQAA2xv60PkI//onNcklh0kAAuezIEPEjQAGlBa89rfc//4MiK1HFMT/+RsPyoCnz QAJNIqb13ShAAi6J2P91RUABt5l6kTEoQAH31FQ8GKhAAmVgYeZjCj/+rIhA5AF3P/+szDyU +I5AAX9YHfhcQEAAswi8pAgGQAKaRTK3S8ZAAtZUEb8IOT/8h18FC7cEQADYpRRrcmhAANBb PFyra0AAfupIntO5P/6HSAw+V5hAAP7w4QS14kAAeoLsxLGNQACM9AaMHQpAAEOg7FKtPUAA 3btNRZYcQAA1HrVXUyVAAo/Mz0nRMUADK0yRyJB/P//jrMGsRpBAAVk/I96yAkABbWrTvRwu QAElzSykPz4//0/FGs+HOkAA6WiwwKQLQAA/l2q/TWdABOyBFygBNEAAV4H3/XILQACYISmz zcdABITX341yw0AE0qeHonGGQARQ8myzYKxAAHA46uuwhj/+obZmEFeyQAFdEprOkDtAAMGA J72WgEAAw++PdKfMQAErE9XdSEJAANAKHwVjIkAAemZKOy6WQAB/OJyvngo//4Nk2Ek4IkAB u+/YjQNtQAHgAcx8Upo//y9P6UiwVj//2xiLMhEBQAEBOCuwkLtAAa9k32MRUEAAuE8Nu9uw P/0ucruQdRJAAms+L3nlmUAAIqHjTFk/QAAf/HQuXFxAAME6wDVsbEACFMxrj/3BQABaLfRp hQA//qobesgofj//LMCSZLFiQAFQhUZbbsxAANhY1GyhB0ABXyeSeUBIQAFmm0BXoME//PZU d8a3vUACJUQkZA6sP//PbTgBaaxAAGde56/9dUACFuBH5nt7QAIkkjjIHWlAA/r8TORTWEAA t/IWntyxP/8w6T6NjkRAAQHkI6RIgkAA4XP8S35CP/81P0sZ1V1AAOM5z+EIW0AAbJqpmQY4 QADUojofuQJAAVr2854rwUAAzjdRu8yOQAEauSdSKJZAAV/Ic1oVLkABHP65UrPPQAGjekUS akJAAXLRcf1QRj//1ewTGHg4QALS8+z2vnZAAnwMKfoBXUAB1GxZjWR2P/8PVneWo25AAMls RkL8rkAAtfZyR/coQAKlL8UOs5hAAe3Xw/I2skABKVOFzUFuP/8olZWU05hAAmtZaug4AkAA S9x9D4yYQAATxA5gZsZAAaeHEovSRkABiiPkYcCqQAEffoIDM8ZAAbDXq840jkABdjZ5QG9K P//CZaalcpxAAXxpETAUaj//yccjrRugQAAnc/eHwPtAAFwqwlfS/kAA3GvdTVlnQAH48JPF yVNAAYV/dtg6qkAA0p3dHqwLQAFFaA5MDPo//snNB+4f/EADIbgjK+s6QAIH6+mzLj5AAlGk Lu9u1kAAVgz5nY21QAK/n3nKxulAAQSifX6dMz//aeoht6lZQAECwJggShZAAdjpn0dz6EAC Ot6hbp3XQAGQmoCCX3NAAF8WJlOkHkAApN7TRXASQAHGhp624epAAdn9yts8/0ACZn2/AiqM QAFRWfCsXY4//0nJtzxLeEACCiCgiuA4QAEiOlBx00JAAf4AsA5isj/+/HFzN8OSQAFsRRab WBo//ngqRzGcfD/8+rFYSSFuQAK5gIrdSFU//ozQYPQZTkABglG+b+wIP/1Furt2Xh5AAlc2 NaWkwkACBK01iFznQAGhShIAMflAALCtU7CBCEAAIGb5vAXRQAJ3VVmbaCY///2I3ryuV0AC ea4QMATyQAIFlew8h4hAAFItixf6CD//AAUqDS/MP//dwjXhazhAAPlDJPslQj//aaqk0hmT QAGM1DkiNNVAAKN8PnfrsEAAKJQSA0fYQAJZAo2khYRAAPp2XKHJKkAAuYOljOK+QABuhJ6K Wg9AAhaa41j05EAAMULVPpAZQAEwIT8tHRhAAN6J7EAUfUACOuMjSWgAQACWu6l+rhZAAOoV 7uux/EAB2GKqd9pKQAG8nFrNUupAAzp8CLPIQUAClPtiP3l6QADcZvOVPjxAAIXY/pbgVEAA j1fRycN8P//eL1Du2Bw//TDNYQIxk0AB/aQBKOclQAHbHQqZMjJAARqRI5xr2kAAVrd5QoEp QANca3pvyEBAAvr7aN6g+EACA2h3+iN2P//bqHpO3TtAADB5XkK1tUABCVkQf0iIQABLq10k K6lABGjYr7J79EAEptrRjU/cQANpCHHWOcFAAVIjMPFK3D/+SNFvZ9iAQABaLIc0K2JAA2+i AJ5gIUADhMg+bVWxQAEWSsXIBKhAAP24ttw4gkACJolrXzcdQAIDtq0V7BBAAgo2urDe4kAA ge9nogSGP//G98O7BqA//yF2mTfZ4UADdnJcJdQRP/4/cW1/DepAAFmukEm1/0ACEgebzzSA QACzjINWA6pAAPgZdfnFTEAATV2Z0bKqQAFKkqdMoNVAAXdcw2UHyD///QwI54d6QADmXFQW IohAAKVxJvg5+EACrbF1ZENFQAC+/FxbCiJAAbZqqXYi1kAAbb+ff5rKQADMF+487WdAAL2/ JyrqKEAAUONV7meQQABlMzFxLL5AAGc1NxLWKkABDZreUUt7QAGAEqlkU7hAAlQUBLG6mEAB lK2ge49mQALPn5p7DI5AApz0t58ARj/+8pRkbKQbQAL3DS0OiKpAAEV3WHgdhkAAnR7bzr2W QAB6PZTxbetAAfGCtv9c/T/+nNL6wClIQACOk9b6txpAACKS9e//ZUAAHJyeMsYLP/5OhMbp y9xAAoq4R/Q5nkACUSRtoUnHQAB8g+A2vDxAAYPq0sZBDUABemSVziksQAGt9O2kO0NAAIUv gl3goUABSfZDWTsBQAG7oenCKq9AAjMAByLuvkAADnGk5971P/6XWbm4FKw//fcAbtcYeUAC 4L87CfhWQAGPzBNtxElAAiFHr8iG90ABwd1iaWJiQABvebzIVqZAAwML/fgnbz//BU8LxUJv QAJ+b8L3Ssw//r3hbWsSIEACEVIDixxRQAINVgqV8XFAAYVwjpd19T//2MI/TPDyP/+BKwIN 33FAADwm1qQAdEAAvkwcmggAQADYUud6CkBAAOfddwBELEABtcYq3AjOQAAcPa90tthAASRL ORn/HkAA0hdsU0JiQAE4wv2troxAAPLMBU+Y2kAAjajqzXBMQAE9iUqcDKFAAII+zm7/g0AA U68YHHntQAHpVu2+LkBAAbeAjE1upkABk6LmsszoQAGb/t50rSFAAREdkUZHdEABRn8pA6Je QAAHggUVaIpAAEA8sY9ghkAAvskWfnf0QACiQq40arxAARvBxoFPx0AAVF7Ve6D/QAFIRuyT QglAAaU5ftQUvUABmG0Y7cwqQADjFFrNSo9AAVMNFlkJwEAA5cVlgjYeQAEJ39Plp6BAAVXF F1S6bEACDG4S4kuSP/6dtzOfaZJAAqrTYw2JzkAD1gGUX/u+QAIQfYd9GSZAAWW22c4BXUAB NhK7NfXCP/+jw8UwVwVAAC1wyuIaW0ACBRMifCQNQAAAsjuFA9ZAAYOTqMgdkUAAdqYvi2cK QAHv+0SB4ww//+fStLEd3UABTdkNkrPrQADzi4pCQh5AAHCcAurRTz//cq44g7+UQAHJ54qZ NO1AAFqijgLc/EAAmgrFpFVOQAB2VKb1CHxAAX4R8VjkdkACFNb34/EkQAIPSSKnQ3xAAgba aavL8kAA2q0rsmvAQAE0UAFEE+1AAKxNydc/5EAAcdJOn5NgQAEu76gdi/g//f6zpxYmskAB xFfu0H0TQAPt07ZYv4lAAa5fsGJFrUACsJf8M4ukP//mqumOuspAAK1bhAm8jUACTEhbxhfc P/3BICl+xH4//Xq4K4WfTkAAEGvDOgLeQAFdM+CIVYtAAMvmVGFGMEAAMgQ6WVXyQAHYHM3R CepAAkrrgsu0IUAAOA7TKR/TQAHPPY5xlvBAADnN4PXPskAAAFSEU+4yP//Gcql6VjBAAPdO oabiXj//EjPHohsJP/9lY2KRj1JAAhiqPNPgoUAAZdKNQOAIQAEdlMmayQNAAHSzcmXVuT/+ k5ZyQhn2QAIeqSsJGUdAAQLYgtICIkAABc4xm2uhP/4OYbdLrl5AA1CCTpj8EkABuC5E/Wut QACnsVOxSco//9dv7f8nPkAALvAQ2jC2QACSRw9ycm9AAtnAZ8YJbz/9ySgaDSGUP/9yfV/Q 63VAAjXTxp4MZj//2zDJ60VMP/9Ux+stQrZAAe6Oe5x8KEAA1/qDi/JyQAJRWEGDBSY//wKe zJQlXj//uhbSNimhQAA9cB/G8+tAAGiKYk2dIkAAxhlskBBlQAI6qmyMAYpAAJziub78AkAB Xd+2GFgMQAAip2vsW5pAAAq4mS/qBUABaUb7J3caQAASaewXa/A//9A/n+uYqEAAgBeXr1uA QAClfCTQEkFAAjQzgc16TkAApiSW5chEQAIBcKtiEjBAAD3/6SJQOUADYqIInKOgQADZfcB/ 7/xAAmJjGsGkWkAAwUY5ldzgQAEV6fDj+lk//wGvtRAr50AB4Pg0nxz7QAI/GAiksw9AAJ1B vUZQqEABI105+jC3QAFP27TrW5JAAO0TwR081j//cpTerCZDQACYi74A0cNAAAmV1Hl3Q0AB XGzIyMLDQADaDyCaFRhAATUtze7ZYD//r37KqOTkP//kYIWBQIBAAqf9jvxVTD//8SGnCCEe QALvYbJqLZZAAbdAjNdkr0ABKuV15e2BQADr6esYlwRAAcYnDkeHXEABPs/1j3VwP/74WfGp NypAAhEfOQgkNEACvIsu/C7WQALro013xrBAAibXOpNFfEACkmDEsZVgQABj04iDEMxAANGY mfmlqD//UWpUSCt2P/8tUC944UpAACACc2wq0D//E7Ehy/uwQAIsPhe/mbI//8vzJ96BJUAB FHrZ+bqMP//Dqk8YR1tAAVOLYdW7JEACxeYbhztsQAMHL7SDyUNAAJxWoJfBv0ACOkWfHawb QAF6SP90Od9AAKgB51EgH0ABBR4OOlZSQAG6/tnSErJAAdqLQLlgYkABGzkEvLHsQAEhdco5 5Sw//+9e3cGzvEACIMQb90RFQABxPCCyYdxAAKJWzEozUEABQgu9yr34QAH3wFi1Kd5AAm76 zxFKfj/+7tBuofHZQAHSrO30MrhAAPvwMOMRpz//3sqSYOz2QAAkiSztEQtAAN6VGvbvOEAB DsdhN2B+QAEYC5W94N9AAkVZQ8LsFkAAynl/8D9jQABpgKStKEY//84Ba3Ubf0AATN/Mq8CG P//Xe69rtQZAAsK9sIprCz//tF8wDIEAQAHu48xgwew/+/xXefkqrT/9rAmkCx4qQAA2sPtc aVxAAK9X93KnBUAA5NwwS9GDQAHqa2mM3nZAAdKgUgo7l0ADC2at7s+EP//avRUKBpFAAROX MgDh2kAApmA++YQmQAEhmp1MuAFAAK1enlA+Rj//c1wVg9YoQAM1ToD6IVQ//i8pfsg280AB x7Kl+2daQAGgvTH5PiRAAOJN9J7s40AAZ0DqQm0zQAFxg6Nw2yxAAZp535RBQ0AA26d/wlYE QAGTNBOaJUk//BeD4czVSD/+kMKUptI+P/8+H0qhpndAAmIVUvzm4kAB70UBmxVIQAHXDqGU J0ZAAS1A8eawPkABqZvOQPnSQAAPuuBfT2hAAGR03tbQMkAAK0DVf9jXP//oFeF+/Qc//83o hi8TTEAA4Ak1sgHZQAE2nPtdzP4//oC8wbhLiD/88/aplyUsQAIkpuYvXNY//gIS75tM40AB GHasfKA5QAC+9uMqEW5AAxQRot9nVUAAxLvlzqoiQAJRybq6V3tAA0rKiI83sUAAZfrJ9/0A QABe9hjqXf1AAbhmDf/oqkAB4CF+e1xCP/88KXjq1ClAAYTyrRvPCkAANHfUY0QWQADOP0UE 1XVAAXszwBRXaEACFo+wC98UQAEW7SqgmnU//TQIiuPw9EAAm7zFtAzQQAEcnnfihnhAALuV a/bTRkAA+bwjxT7PQAD6BeQUeTBAAo0/XHk8gz/+CCQvD7isQAG68g6eQW5AAAkDV4NRJEAC D8aq5heOQADxmxKuWypAALzDPYQxIUABN6ZrluF+QAB6iGQ6EohAAnBqfcDXtEAAlJp7tT39 QACaepMY8fpAAfzigCDW/0ABBzbWb/4zQAKDR+b89d9AANHktdlNmUAA9gFv3Y7LQAAL43zf j2Q//x8lKmnysUAAkdHTWLDGQAElijtgc5xAAVGA2KovVUABDm2GncbNQADW+zJ2uXpAAM8L yvu5l0ABAyRnfW0ZQADcqRYE/NVAAPKe1uNz/EADrcJXB+XyQAIAUd4uR8BAAfu/hanGNkAA BF7Y1vpQQACL3cM9hFxAAG8Q7UoHi0AB+ZFZNskXP/62Eth7H5pAAltGPof/iEAAZS3TIINA QADj2aYF+/xAADHT5YMtrEAAA+NsBurcQAE1D06i/7pAAbw+gjQ5dUAA37PNvizhP/+mtcnF EgRAACTO+vP+nUAAE4v8tQzkQAEnsBllYkFAAFtVtI1eGkAA9R6c1OjFQAF3lHkcdyA//8JL NckRbD/+6m7G8IbkQAHZdQeu5Q5AAfE3RWDiKEABZ76bgU0EQAEZoIsTJRNAAMRqhT5wP0AB OxnvW0kgQAAEKd/yrUY//ZIwxvVh90ACcrOqUrs9QAGY3VCxek5AAm9Hx0S3okADD7G1TDWA QAGWkJxvQbBAAVO2szlT+kABzbgHt27vQAEtA/+CT3s//9QAMAvPVz/85WahHAj8QABxT3Hp gS5AAUCImLMOG0ABXmDfWz6sQAEztorXefRAAzA/LPVo4D//2ctZVT5wQAFwsM9B2/1AAZlI 8s4m5UAAmMBO2kU5QABhmqUEaFhAAhw+YSCRfEACVOO0CYBjQAPhih3vUEJAAxiHXUtXTEAA 3j2ESVX6QAIj+bnrNT1AAEpnBwUfAEAC8y938/duP/678a9bptxAAVDbTvDBykAAUVPdJ+5R P/8yiaJPYvdAAELJbTW4L0AB0aQHNY0fQAG+qeVZbMc//7x8Ru8RukABkXVM+nVTP//2prDi h1JAAX5G51LM9kABayFyFjcxQABT/hDYfO9AAeHuxIIynj//KnBXAiTtQAAQ77BTLGhAAT+T ELnb0kAC3TlSqrvqQAAX2jAZ8e0//DBMtK8sIkAA8i5/qoN+QAE8sgTw23JAAmt+YsYLwEAA z7yI6K2gQAIVYAkJsiRAAcsXF83krkAByOItW/XJQACvbvIBTjFAAPPFs+k3cT//V5spcKck QAEIc9URFQdAAMkXzJ+J0z/93TvLkQSEQAD6Wo4DlQJAAG7IRSPjVEAALF01OcOfP/6JNfSj TKZAAX4xc65XiUAAhOZACc0UP//G1RE8fUw//phFbvzKDEACzFa/vAXuP/3racxW9MZAAYrT TZSUlUAA/7h1/ib+P/+HQ/cg+eZAATn+c0b3gEAAdAg1hBSTQAGD/EzpZRg//4bufS/7JUAA NkB0Wp2pQAM/yV3x1LZAAKJTgn+zCEACYs93v+dCQAHC1IglwnRAAUghsdx6UEAA0fxxSEM+ QADVrVYw9UlAAb2VK2ipG0ABNlQ8ZatOQABvz+OGAZFAAexR5nl91kAAsEX1ABEvQACVMur1 27xAAEacD8fkDEACgNj4CO+QP/8acyHBfoQ//Q2ihbzWAj/8XDJooi7qQAPA113Ue+dAAiPM EF3k1EABjRLeZWmJQAHiVTQmvfBAAWtyOB7BmEAE7IEXKAE0QAMdE9rkp25AAS1GQZ56PUAA bd8ciWyKQAEKEXHUGEBAAQLI70MMikABCXaztGk6QAH8s35ti3VAAAK35PujmUADlDD+kNBg QAOMV3MRja4//cLek3zriEAAdSozbgqKP/2cKYmMnSBAAG4MIYrr9EAAINPJ9geNQAKXnM7V JUVAAgJm5wiVkEABA3Vbgyk5QAFzbBd/ZE9AAFFi1vfkb0AAIB9nYXTCQAC2bFXbdZlAAVWv wYloIEAAPWw9E4FKQAFfxvqQkHBAAUCQo+OOokADVmHNgGmuQAFXrnhL8/JAAbI+bifxx0AA lM0HBx3oQAAwjfzrMytAAODRztKpFEAAtkK45RDeQAAcfa+0X+hAAp+riMwfrUACiDduo/fX P//QAge6aEBAANMLGNKf1kABY36dDg5vQAHJxNsjQQtABAkpm69lWj//B8PI66/uQAO3EXBs uGtAAQNmoibKSkAAcA7x43XoQAA48EgNiJZAARg5UBdqzkAAYp8K5jhGQAECGHavhO1AAICS eR73K0AAUtY6P2lwQAEmMLG+RjFAAAevznhRpkAARYPpSG9NQAGCKA8kHGxAABxlJd1bBj// 9Yna2H0GQAHFIyQZyK5AAEjVWMOQlUAA1yuoxGl2QAD1uXZo6dVAAHfA2m1JrUAAl3CQ/3jM QAHWkk9nYohAAidB6ibJlD/+e/gDK92yP/6KUmMoLwRAAHF/jGy2cEADRTg8+cJ5QAK2f0sL 3TRAAkQcMAU3nEACEhRco+xxQAGOtBKzz0FAAfvonrvYKEAB/2VEpVZ2P/y6vFL4RLRABBiM KuPnSkADnFt02q8CQAIs0O+yJWBAAPs12o/gx0ABpeIcMnklQABHnTVQrnFAAv76y1kSjEAA vhvdrGhuQAAE5W9B9pJAAhAKdbLpaT//p0j/pPxeQAJB2uxSDG9AAN1Z9wN82UADpN5axZLJ QAN9fZadVTc//q+iS+VaTEACPOXDgQ4aQAGqsBNMjL5AAGynWkf7fEABJnyoj6OnQAEZ8MFl f1dAAXKfH41CbkABeEFD/c9EQAAlC8SLSANAANRJnnnRQEAB8E8kRMfbQAGT3OotfjhAAaxc wQ+x5j//l9XpBabxQACRVO+mFPhAAOSw3tUAKEAAe3Z8j05IQADeOJ3p5yU//8W2fhyp+j/9 jUBe5xV0QAGv5p8vNERAAU9w2dYJBEAAy4iQT9RiQAHeLp8to6E//9+88kOB/kAAdfc0NOKj QAEPiwNGQmpAAegiQWSU0UAA0ER/bel+QAEseH2IJKhAAQA3pCXd10AChbyumL3SQAGnpM08 k0o//frmpdIa8T/9+ual0hrxQALJGQZHD2ZAAIWR2Z5DXkABBCDSx3yVQAF/4KdnvU5AAeGW HJoJsEABzdmfe+s6QAGLIeYS8gxAAND7BcTUCD/+VZiB/n6YQAJdpxHwfRM//9tnKZ+oED/9 QxtV+Y6IQAIghpE5X3BAALOSp+H9ekABh+hDIYLTQAEOnHe29WJAAQaLOzYL8kAAr3QR5NEU QAFmZIsXXtJAASVU6VymxEAA7uGOTkwoQAAouFR7eEs//kQX+tE55EAC5/PBuN+EQADISqRo Qg0//s4mp7M9oD/9oaKz3NouQAJ1AesYCetABClj9UydvEAAxdBfZ2rVQADLHWhmy4tAAIRT cXqcqUACO26RuoeiP/7TVKdH5Q5AAcjPg4w2BT/+okAeamf2QAHRxzyu3/JAAcCg4rwSFj// g9Lvh0VmQAHZwtn3cSpAAH5hTDjDP0AA7eNazshzQAJrLrYWzB5AAUPKv3vnAEAAQ9fGXimb P/9WPJEBIMZAAby7I6nqWEABXC2g4hAyQAFXPfpJVUVAAWVmwozc3kABNleGwbbgP//tbVMT nlBAARz9Sr8chkAAcxdTQ4wwP/2+SoGrILA//gUDiu0EXEABCSbFEihJQAFAlY2ubBpAAlWH KyD6uEAAbhPnvNJwQAIgAMPbTK1AAQKr0bQK8EACHuPxBcNcQAPLIe6H8kRAAMB8NZWDukAA YtYylEeDP//DZ0Udigo//va0NcmquEABMimZ13YnQAFH1EXFZjBAAWHBvpFYzkABBsecMpBc QAD/uJpYBVpAAlE4KXs6HD//jvN1KFQjQAEyRuEfXHhAAFLysAuawUAAPGRCOyKiQADB4PVs aDRAACeuRtXnVUADJnVQdeGsQAArm9o9pXZAANwBezwTdEAAmohGanWQP/6xTVEt8u5AAkyW OV7jFj//GjV1TZF8QAFslj3a5VRAAJTENMVsNEABCmswMQHoQADpUW+m6nJAAB9h/LEBDkAA 7WRYF0t8QAF04bls3fdAAP1+Bpq/uUABIGbIWkixQAGUREE2y6ZAAC/Fk0ZiPEAAkYXzRu6I QAH8wLABl99AAZb5eWK550ABbYK3iMcKP/8RQ23HfCJAARYEz7OOtEAC5FKO1oLaQAFYipE5 kSxAAMbrlZlS6z//4ojjL4JDP//5UrnHJVxAASUtUrJU9EACH73mxBbKP/zu126CzbRAAM41 7b+CQ0ABx7MMDwzcQACB8XBQVQRAABPiuA3+SUAAD4lr5DMsQAJWJ8l+Abo//phPT7GuVkAC O14EAg6hQAQ8B+D5tlhAAqJp0xjjOj/+CahbchmOQAH8TCtA1gE//6JJj14RYEAAG/3BJlSr QAGded+PYRBAAXYusMMLokAAzPaWHd0YQAFIm4OIHzNAAa+EX8BpZT//qZh/WNvFQAFka6H5 3qxAANoDV/6lH0ABwG5/0RU1QAKFRnxtQyk//bVjyqiRTj/+N916SU/AP/7BJeHw64pAALgk juJg1EAAkBn50vrGQADXMNI6+iFAAVzxgpDnNT//vTmNJvdRQAFjGWMNXK9AAUgYDW1wx0AB 4hLEzKCAQAFq1xXtR8I//oNYgAN6JD/+EW5OH+hIQABVrayqdTNAAMSVnRvEwz//jY4ZuT2A P/6uhXcTcEtAAXD0JY5Y20ACWAKEuJ68QAHzXuU4GOhAAi0KSygNzkACnGSrDtvTP/0LeWUP dhY//jaiovoMdkABREhKmACBQABzRyz9ugxAAkRC43bYd0ACXLtiYfh5P/zR3nH8ObRAAL18 9gMapkAAFvKkiO7mP//WEDv2JZo//mb2+FJPmkABGadQYIcYQAA5uR7KrSRAADM5fI1oSD/+ igGF1bgBQAC+c3MbUZA//T2CdmW4yEACwFS9n6vnQANdbSTMRPQ//7tLzJMePEAAu2VavMoC QAGgJzMrFPBAAYeAbf85qT//c3qj0tYWQAFzflNlgA1AALmrBC8TJkAC3jgFTdjLQAExLQNp jvZAAYCRIrARBkACs2H7XFqKQAKSEQBg7hdAAp/+ZOBoukABFl2zXpR6P/+RShAFIzNAAJMv 7r7mrEAAgkMI1I1SQAC43svOSSVAAVBav9d1nT//yJy2Muq2P/+ybBoJjkBAAD4U/dvrwT// 59e+oEteQACoGM3HIzBAAIUmX+k/jz//6tL7mDD3QABH0HJYuS9AACZcsZC/mUAAhrzE6dfa P/+SPEdnj6U//Gon41VS3EABpljPrDgsP//gi6yFiX4//8mzUGu2aj//lctWMS33QAD5Rt44 KQY//3LYkNqZwD/+RacG8Y1lP/6lSCq5qNJAANiDVdEiHkAAsj+wiEBQQAE6zF/RmyNAASGU VXdLIj/95VQMZ+UvQAFfKBt2uchAAD2d9bj900AAO7Et0Pz4QAFsnPTlBbVAAgCzp0fHVkAD zAgzDswfQABF1C+h1tM//jTelwrFokAAL8qcJSTHQABsGl/v5oQ//l8K6epK50AA2iqaAg8q QAAw3XUwrm9AAIZWDbh8hEAAamGl851kQACBo1a1tAVAACA83DBKHEABD8/OH1slQAEzcGMF +ThAAP7YzSUOZEAA0S1g0BCcQAAgNeyifbRAAhIzFd6na0ABt47k1f9cQAGR+5D8ZJY//gsX Az6HvEAAm7nuUeMaQACRrEkWBxVAAf9r2e6S2UAAeZBPvpxJQABSyzmnINs//M0Y+Chm3UAB aFuYYPr6P/7i2hD+PcY//+SvS5YlykABAo8Q7C6EQAHMTjeOkEVAASu15+SkJEABSGycTBEQ QAEnXRMIY0c//7VbTEZmH0AA5vvpiKRnP/+/7JMlP+hAAAVegJhUpUAAJRk6i7s7QACikHoL f3xAAFJEnqnWJD/+Tu+Ipr+5P/8plzHESz5AAXA1RS1aAz/9tOWpApsCQAJO8mdycTxAAJ12 igK280AAdZKBW50SQABlmL0oUmVAAaEbmplrz0AAzGMHTiOsP/7RjAIzuR5AAMXuPkIZL0AC AeXH0jR3QAGC4v0fnhBAAUOoLI4sgD//PrNnY4jHQACWBAOUsGpAAUDXyJhVdEABwp1Al507 QAJaWq0iP2ZAAQxW5+OD+j/+mGeUBX6gQAHX3Fzca6ZAAQ0UCyM6zkABsZJYp/wMP/0WYckr CvRAAUgZONGyvD/+1T32moIKP/xbFDdBD8RAAtOQBBIRZD//R7atj+9AQAE31x8fKM4//VgG 6ITlikAB1biTE93cQAC99lpsTjNAARm8vC38rkAAiCDHhBBDP/+xQEvCcotAAiewNcBv7D// yV8rmUv4QAIGqNHTQf5AAVPau9qy/D/+npX6V7oCP/xxuhD6BKA//kzw8VomMEABddzyPt23 P//HLPAgds9AAcHu7g+8sT//IzoTXKFcQAA/TYBwkINAAjwrYABsIUABP2chK22hQACuxLG+ c6xAAB6dNIf4Z0ABuWzuVeGkP/85tCdGZkJAARxZW4z430AA/HHNmYuOQAJkE5Sg/Gs//rSD AeBbeUABPtPhQKz0P//NoV0XfT5AAil0SYZlQEACr0191UYDQAJuaELcr9c//6GiRC92SD// ltT7kN6sQACdn6DtoTQ//vJumBA8BD/8QR+6UVMuQAHFz7LJ+RFAACH9jhSU2z//lC8LYAnd QAAO5nv7v3FAAtjK7IH+ckACxOi6TriGQAGx6UQbyGA//oxiFWNxyUAAGyaNFSj7QABzdQc0 50pAACpEJ358l0ADOGKaPsV0QAN+MdKzdcBAAYkBn/ZvoUABRLVAYp3KP/7GosU/hQxAAGyC eYqavkABjzStcFCiQAHUsZkz4rJAAUIadbj1Y0AAkb05nHdRQACH5VXf47RAAXgB5vBhmEAB diJcrBSuQAArTBo9iTE//w8hzlthBj/+dUDwfGwUQALJohfOztk//gx7S1j/XD//OPfIoTIS QAGOYm+3hTpAAM15cjGNgUAAfh413hacQAAzJuno1vpAASPc4k7J20AAtoNKYxMlP/+jfLmz 05VAABGPPamxskAAnNZZieOCQACzoURfNdJAAT2BIbeFHUABH9Lx2/KgQABU0DRqGAZAAN1v LIICnUAAa4IysqRWP/+lhuMS5Hg//1shQ0iqHkAAnFv4QPCeQABx3MoQCitAAVBOnbnwt0AC YZeLqm6iP//iu9VIPZlAALg4zpCJEkAA0ENhywmKP/2TzvFyABpAAuPbknIRID//icK8n74L QADDw7Z/SmY//5BRDl9ACkABdYf7p/BkP/7oGhU42rVAAEIKCn4XXkAAVKz/qYK1P//cqwwB M5Q//kt++3AsIEACIn5/cymZQADdvq8hKAI//rYx3r0UbkABYdmVK0VsQABwohqp8TFAAKmO 1+w40T//myRwxSjQP/7T4xBI7ZZAAE7+Lm6egUABh/rPb6vbQAAjfPFkOiw//lVn9ozAdT/9 hQPrP8BIQAEU/Dw4z7pAAUncf+l8FUAB8+uyKIoeQAE6gusoNuJAAGY+ibcJfEABnfPoXN68 P//W32WM4oBAAce48egJTj/+7Rw4M/4EQAJpKSB6N51AAnabnvhxRkAB6kJMQkdmP/3P3k7K ejQ//2lX5oEsyEAAfYwKr3obQABC9wt9MxBAANEkTHqnckAAQI2xN2D+QADgsK2QzmU//wTN cFnGtkABFd1DvkoLQAEglplWSVRAAkqXeI29PEAA13xLFTExQAAWsLcroSZAABHeacVRkkAB aKv7wABMQAE58df8YPNAApVu8M0X4UACG8wIwYXRQAHWJq5zBu9AAfSAAZ98XkAAK1mseUlW QADto8KL1cY//7/rlB0xokAAYxNrjw7GQABwthTe+Dk//7GKEghrLEAAmAvpOl4BP/+1Pbx+ AGRAAF/QOvPQtEAAnpYS6hliQAFelQDJucxAAHiq4BikikAAvNT6Unp7QADh0ZElA2VAAPgp ptKZrEAAvOQwQ0XMQAFfo00qybs//ttAL/YIiEACD+DLbiDqQAGb6h0SZcdAAis88ouwUUAB dQoSBYKcQAEXOO9r2H8//ypOYbwruD//dp9/4hN+QAHBUX9aIyg//it8JMllqkABnnv+RDq6 QAAxgTQ6rCJAAa5KaMXm/0AAVQkl5Q/4QAGQVxMVSU5AALvG1SOAWj//8M/sL9OeP/58O6RP NTpAAajytVQskUAAZnCtbEjNQABYrj9Gh0tAAAL/Oc9mc0ABh1WFeBgGQAIaKUx3BJBAAhCm C0pNmUAB1icXS+ALP/8CMvoA1dpAATHi0VSD3kAAKYNKGlUoQACenRBqgrhAAInhCBu1nj/9 q5VeD0GYQAFZnettZuxAA6u7nMHYaEABswioPuojQAHnvgvh4sA//p+bciIK0UAAaTUAl58S QAIfHTMJkns//QTFyaQAqT/8OOtfVubwP/932gqZYxRAAV/BYJ3k+D//mhzeO8CwP//eyIVw 3mpAASrW3cJ1sUAAfLlx3fy+QACbNIhHBRdAAJaRJiWzVz//7QFdRGQoP/5WIVZVQW8//7RO jtXZd0AAblKOHuSOP/6hfaxNbho//Z7Wvn0IPEAB2qwLc321P/6hy13yhetAASDohhjAYkAA /6uVylYvP/61Oqvhi5xAANtEYE6n1j//htGbQC2VQABpO/U+2pw//tZAdPCEjUAC79ohMCPC QAElWA/ZgEpAAMJF5PM9xz//dVsTb3J0P/+jcWn91LBAAD0Vz5fVakACZplUxXOvP/1QSoU+ /6o//0vHQYQUuUABqvlqjT5xQAAGzblB014//07/dVeTQkABNqvJXcZOQACVT5dsMoVAAlNq 1WaD6T/9SNCRgSoFP/0zDUObDSQ//W3A2K3LUEAARhDT2XhxQACacS5IsytAAeE8J0XIjj// cFFFEUNsQAC7A1goIuU//x/BDpQbykAAJAQCv/pyQAEkDbGdiUM//2jinuDLAEAANdidkOz8 QACBEuJC+udAAIKTGeCGfEACNHC5iTNDQACi1yHGqpZAAakU2co8ej//kXglJZ86QAD/PKWq BlQ//yo2rGOusEAAA2bPcRPFP/8+l/OuWCVAATR4Si73Uz/+gUwjaUEQQAHzZVC5qytAALn7 AaXm0kAAXU37uI94QAFVvpd7MJw//3vTZ52oSD//8LoyI+nkP/8iuWd5i0FAABgFZO+DUj// fwQJQupcQADR1odrtYBAAHM5iwI/qkAA030LPAXCP/8cWi/s1S4//zrXWd4u2EACt5N0Gjg5 P/3gY9q2ojxAAjZXLRLLfEABN/HAjAYfQADMO/jvgctAAGVIzVQNekABDbpS0iopQAArdrda 9Ug//5lfuoMAbkABZ6b0YVGSQAFXrH+7vcpAA0OSGt1c2EACgjuDkKIoQAJGWGw+Oyg//62Q XYNxnEAAPxhLNxTzP/4xUFuzunU//MBvEkBnjEAAWLSzVs3YP/+ffhsdbyhAAbd/8HkNn0AA HujooRNAQAC/CWD2NSo//6iJz6Ry90AAwwphmikGQAKrVFW4wJRAAvz2oZHCMUABK7EFX+9D QAJMwVq7hj5AAXmiGgJfWEAANXcsyN0VQADYjYH+ncJAAfoVjQJSyUAB9uhOLgJKP/9LGKzK WmJAAFD/Lw2uHUAAFkAh0uFEQAGXQDnEXmJAAIjgFylIgEAAjeQEKDXQQAFQ8BU6tyg///gR bD/cJEAB8At389mNP//QqwHZbuhAAbLpzKA99D/+xunl9/nsP/5dBp/1ZhhAAB9B9ScnuEAA JmHj/Ai6QAE7SZ7kKLZAAYC4jQ83kkABRHSve/jvQACyVmUC7KVAAJJe8ccYYD//uHl6UUzW QABd+KV2JWdAAJ4sXOZfqEACp3UA+ngIQAAT2NG5QnpAAWw+LZZyBj/7kEeOky4wP/24oOoW qqI//4fi/jnPvUAAEIxG0wkaQAClFGJGbBBAAdE5+a/o+kABb6rPI+ZyQAKIYKOBn8s//pXC oqRTkT/+StI2egRaQAAQIPI92/lAABBmBVX2LEAAmc4pot1jP/8hL2aG2J5AAjCzeSswxT/+ 5Uq6I8XxQAIkNWjvaEtAAKa2uZgbh0AA3md3rrDBP//SDjMC/l1AAfbdnUByF0ACHXKkLuOg P/7v+E/5n8pAAaGUQDgaJj/89YioL5+yP/8gP8RzUCVAAAhU6VDiJUAB+/b0iSKoQAG/WkYP U/1AAOn7nbqmlEAAEnME51+fQACDeybc7xA//vC6fyJIDkAAc+DJ5hpEP//FESfvBoRAABKh eeXIUUAAEBfMAXiyQACB+Ut12opAAK3ENV+5FT/92kws93nGP/1Mt3Tkg/hAAZktEKvccj/+ lYw0BDIxQAEsHO8oaepAAMEb5R6tXkACFVeQpSVRQAFcxsMafutAAQbZI40RjkACo64dVe7k P//wFTGlJPJAAGOKNg8VfEABWXLqIP0MQAFr2FNAVcM//oyAfpV1j0ABNQ0h5EwrP/7naxLQ VY4//2/Q20UTOkACOEMDPOU1QAID3So/muBAAMoZlaQE/T/9jowh6ilAQAAkzIDyiQRAAH2R UEmuPEAAbPwEhJ15QAC8OByl2ihAANys/4xKZkACE8OSAyJuP/5IaUcJcChAAO7RUkCd2kAA eduNRGHnQAFe18xU0lZAAGOhfWhOB0AAnS2oyxu6QADDR5oWpf8//wcsvj+kvkABbJ/HxIew QABK/4HF+31AAFIhVH02ekABqvI7AOwLQADdO3j2xzhAAcTtpuNiJkAA+1rUC+e9QAAy3iiu n2Q//+q+9zLUVz/+JcWPnVoSQAAwpZc1mWVAAMLxETZR7EAAt3UmzdXrQACjOxvP6z9AAOKL HmNqWkAAiam80PrFQACUSO3f03BAANqc8vHEkT//DWlsvElqQAOSuidjg5NAAQ7cG3v9MkAC fFCtYZpdQAB6jF2pFttAAFq2AkFTwEAADbfoqIqMQAHwWT2hGnw//YO9bshv3EACeW+v73Ar QABmCwLRo1U//+Yyby4fE0AAc7OfYkgyQAAi4DyC9iJAASOVK5Ip4kABDKPNeRMuQACove+i Rxg//uV45Ogshj//vg8xtj9ZP/+ojTsmVF5AAVSl2pkc1j//dQ9I76Y7QABwI9nrrepAADQQ OPTljD//VdsD8e7kP/18A9P9dGhAAYp9zcB3I0ACPiguLYV8QAGuwuGWawhAAP36D5odvkAA eQO7u0p2QADfLSUUuEA//5DkkmXGwj/9kyQvP4EDQAHYFj2HU6pAAVyw55m8u0ACFu8jM3kP QAMuF22mp05AAeAy0Os7/j//ql+t9DwSQAHx+0rLpuJAAStVXeaInUAAzHh7GxDwP/3E6jt0 8bY//5fhYoXsxkABq8FIP4KcQADHYnc3LJ5AAQJpsnvagUABX00+nJWqP/90NUU1coBAANS/ w9MalkAA6x+K8wXXQABtFIDFBWA//1iZNgfGfUABjVQMVqEyQAH9wyjcDxRAAc5YSMOOikAB J21f4dbtQAEZjolcfqdAANQ1sJAWsEAAwhzQ1gb+QALpum07jcs//V+3Hx8q+EABQmw7gC6M P//a8fBVDUI//uM7jwjUfj/+mxiPe8k8QAGPwRqJyQFAAWw5n36bPD//NUaLCJaGQAEkODem BtBAADQztaDKzUAAb75UO6CYQAFhxsRrsbFAAMNuqMC+4EAA/mrLFZvqP/+Zr4IUYU4//4xU plTrKEAAijOW/PmpQAGMYCbMXg5AALEGNu5ifD/8XpaeVkmqQADcxZVuvhY//2SXhP+XTkAB ppQ6YJY4QABixE9UrhNAAXWD9PdN3UAA8XTcaprcQAHZn8tOkhA//xkboooADEAAmkwFAEI0 P/7MHtUeZlRAAME+/2Gh0EAAz3fahqCCP/02Inhi3BxAAOAjvtEfvD/+Yd3eeh2ZQAAUzSBx 7nw//u62lMzDmkAAxAAZLNXqP/72luaA+OQ//qxmERKbWD/+uZAm5dXiQAKLiqGPzrs//R0c wZXcokABV/vzg8VaQAAmSKj0H/0//vup3t+cVEAAha8B+8o8QACPLRwgaQBAASc3hWFGHUAA C6M+930yQACP6yb9nhFAAySMlTvllz/+PDTTuB4GQAJz0/6iv0pAACxbq7FEIEAAQUhrMjOn QADuM0x9yf9AAPm7hz//cUABb24DKMA8QAA+l04JfVRAAHL5pNKv/kABPdoS2nD+QADATDAP RSlAAFmL9/AQUEAANuWZE0XMQAKY39od6uU//VqdCCfAPz/9VN5DbVSkP/1Xg96FQMZAAvZB EGDtbUAB9MxLeEAsQAHBkdx40d5AAiXwncM/nUABBF0PleutQAK747Y4FodAAiDLykF6t0AA OVKS0rAAQACNDSLLLw9AAIuFejtBkkAAdLdDZpPxQACJZIkCT8ZAAN5zpXPgfkAAVyWN6KoL QAMTTdAEMjFAAwQDe5t7/T/86ZX2aJhYQABNG6ccATo//OKs8GaiR0AAnqn0pVS1QACjOORa ZTVAAYxpwT2fKkAA5iesEUMEQAC+PiRJrfVAAUDpu1FgiUAAVX6Ba+yPQAACvSLXF3BAADIq dzek+EABQR5nM4cWQAAHHXveWJBAAL4wntA6cEABF4JWB/HiQACeNDxvvMhAAFfc1/VllkAC GIpbo42gQABPKaZrnLM//7dqVYGbpEAAeLO5urflQAAeysX+dUBAAJRsBg6N/kABVlM4vs9A QAGVPNkJXjVAAFkPgwsEOEAA68XwhXyIQAEeUtA2wW9AAWGydCi+NkACOjRd2DUpP//aQBjw WiRAAlWypyQOx0ABil5S8duBQACr+pJH1co//xTYoJ4Y5kABCjHiFhbwP//Xz6b/otRAAEoJ ebqnEz//u+vhQN+cP//RzTNf17BAAQAyRCwnrj//VT+Z1S5qQABFn1qlr6xAAFV8gA9ZckAA b07pay1yP/8YprzamSZAAS9/T9s8pEAAUaGLmqIBQAClOsZc2LdAALkb/S/tPUAAWVwk2/nC QAD07mq+vP9AAe5W0kAtdkAB5/+/WltHP/+knroKBXw//0XlvfTPcD//iYKyfuMwQANstFa5 MyVAAs6DhhNbQ0ACUSrTVjc/QAIu3G6fBiRAAYYqcKG4ZEAAS2nD8nyKQAIyj4gKJvo//Ofi OzzePEAD+Ll5ZN/uQANP1W5Ic+BAAl8k6a5gdEABJdOvtATaQAH/Yvkymow//9QWzx0JHEAD G6YaIzn8QADPKgV3llg//sarjTUqTUABUubJeUHAP//r3DIOudtAAR00rV51vEAA7hHs10cp QAKOxSrh8bBAAcQnwdOsGEAAKcVA2ByxQAFHWc8zekJAAM1tpwcSXEAAkRA8YnEbQADh2jHg VN1AAG89iW3OAkAAmVnUGoxkQAGIAwiQSyg//+eGi1eN9EAAX2rk1cAyQACm7nr+8lZAABvH NMLcV0AAaGMFhhC7QAAzy4fpDHRAALeZ/12PCkAAafdgU/uDQAAyrnRHDq1AAGGBB6+gMT// A4XHyuLmP/1kPGStSLhAAYDsttKs2T//+9/d73AyQABAA6WQMBZAAXThLZ1cmkAATcxW+3jC QACS08mjtdNAADLdRDsHMEABhPbVSHIIQAAVK7yQPupAADxgRa+OzT//nMn3VoOnQAJCM8ez xfVAAcdnLXyURT/931T+fbh6P/3fVP59uHpAAmvDUJ1ni0AAtnnR6p+qQAAgYc1V76ZAARXI TIK2U0ABUChnzDowQAJAKu5GPUNAAIkk8ikav0AA/N9ygrWOP/7kc2tLHXRAAI+Y969Q8UAA RcpaDTdsP/5252XjuV1AAkh0jn7gDUAAyWjzHCzCQAGlH7By3u9AAdi1/WBaDkAAcfVQLEOf QAB1QCElgHhAALww7dKIFkAAXJU+ML5tQABj/Ugzd2JAAARJSubaIj/+gGAncZ33QAJxGFoQ 9NpAAGpmLzuqv0AAUZDzQGNPP/7U2Axt+SZAADBMN+8OF0AB9fnsm1HqQAFuEYFyGAxAAT5J 8AH62D//1vCCKEckQADOzRF+uqY//aM8bpvHhkAB8HN8iucEP/6AhGguU8hAAWDWmtkkDEAA 2GTGguCHP//JUqGlmzBAAZV1SWLHOEAAwW7cJzAsQAGOJGORPmpAAD5cNvELKEABH4jjQC/W P/9ZnjyVig4//tG9jYFszEABITEY06XIQABuLpodTLhAAc/WdZ6IjkABmLMc+YQiQABTr+z7 zXRAABCDfRFFOkAAq5EyoidZQABcXip7jR0//Nvh9DL10D/+GSPtrb2kQADYfFcJnOZAAHmK TMlPHkAA+2osXHpSQADkkZJ72hxAAPMKRWXjLUAAzEAamU6LQAJ/Pxq4kjZAA/i5eWTf7kAA DhDoTPT5P/+KX0r8iGhAABf+0tQ21z//IWmTOjw2P//qgwVsXG9AAbP6rivHY0ABxaQMtzoM P/5yxFXiOJBAAFIR/DZMNEABhyxhHehMQAAIdNdC2+VAAEVlsPKR0kAAZ29Lhn8yP//TkhmM R4ZAAEf934uvqT//0f8uS5nmQAMLcdLkaGw//1FU7TIezD//8RG6/jTzQADX/46s9fU//mpR XM9830ABwBAiz9W0P/8XQlhRa9NAANGfuaM4rkAACif4p2u2QACJx3jRSoJAAA1XcS0ZtkAA KIzq9cDNQAACHStIGChAAIOnoR51aUAAfu6bo1XzQADtuC4nBBtAAP2VJnHK+EAARd1FVodM QADBdl3F571AAZ61C2jc2UABElsfzB/QQAB5LdIugAQ//yW3GFu58EAA6+LrMjmEQAFu+d9c 37FAAQixidKFdz//oNY7VZ7DQAAVQP8Ll3o//7K+QSjVjEAAktteh/g4QAHPlgEXcQc//Wss 0hPoRkAAcqf8j/3hQAHVHZi1q9w//+cI9X5lMD//iRVE+shAQACKVWfkdMZAAVbVxhPBRT/9 94osokZ6QAEFFqR1pCtAAd+/QaARbkABpjHKCKyEP/8AyrwKuvhAAWbkdfiyGD/+tP2963SA QAAqt1ObxXBAAPwESGmrSUABL4jdq0grQAEPY5BftfJAAWXN6NgQ1kABy2tv6sUYP//oMqyk ST1AALkNKoZAxEAAh70qGTJEQAGeIrJr32dAAwa99W6SNUAAx8bbgjvxQAFPNCBP76JAAp6U QZNeKkAAu8gmFeS6QAC+iu5p8rRAAYHKPqHV10ADonDUkUiIQAKxAV1hS9pAAr6wz5QpvUAD 3pHY4XNcQAZp63jGOxlAASqnqN1Lcj//dq6ir5LdQAFGZMlTIg9AAdrk+KWvBkABMaqxiKHx QAJUILbOq2BAAqKzK1EX5UABsLHyQfpVQAT3oIWUGCNABM+3ZxptpkAEL7E+M1eWQARHtl5B sFhAAHWYsMCiVEAA0v8wDkwOQAJbucbAyqVAAgWCN+Bf7kAC4foDyoz1QALM/9A+NMZAAkJs Bc+5fUACRMOC70S4QAFbk07nEXRAAWdgVUnaCEACI3sEddhUQAMaKIXqLw5AAapnkfSnDEAB jOIfsQR0QAB9zMhsQMpAAoZM4x4dBD/+9Xc75UaGQAQaRBo9SX5ABSmd35ENdkABvYzlpPBo QAIKRSxHpNZAA6tDE81Y+kAEICfagCUEQAJ8oKsp6EVABGdFe//gIUAEBu12a3Z4QAHCiehX Z2RABjfi7pcDK0AHEqaNcnzUQAHVsdLhzJZAAX2ypQn04UAButY+66RkQAXusFs2jpZAA5/I stRc/EABQDcwJHlfQAH0q6yF4iBAAnOZiECmIEADWSWNu0aSQABUAjSU8eZAAN/8Iyy1dkAC F014VN6gQALPmVsLK0pAAP+pItNhuUAAqsn561+CQANEnEQGS6VAA11da/hXY0AA4KiBifQg QABi3x4oMmxAAHynFMV1ZkAA2MAJwaCHQAI3pW32L9pAAiHaehS+e0ACB7icUalOQAC+yG0b LaJAAVdrx23Ps0AA6M9khwKIQAFYFZB6BARAAWMVhzBfk0AB9RgFaIz4QAJPq0s7PgxAAp+/ +oeu+EACmx78PFcNQANEsszJsRpAAWyTtqvMVEADRx2+Lch5QAHkc23PXmpAAg0iyiB0zEAD MgvPOjz6QAPlHx7mm51AAbvfTYpPBUABRieMD9+OQAFap62MRPpAAa9ocr46XEABCSyMzZTs QAKI7Uqzl8pAAZeRY1jFxEABty9MHqtwQAFNEK/knhBAAkPbw/VKYEAAgk6vwTRIQAHu2qL8 kohAAyrL0IIE3kABV2GEe0rlQAEsPrBCyQRAAvMPaiTZK0AB2JMmGXs/QAGa5zo9kjpAAqTg CRV8TUAA/Lprvuv4QAJEix+uMvhAAmFK1TANKEACTJKyIFBOQAC+S5L30g5AATU9IE3xvj// C+hSv8yyQAHQAIeCeSZAAGiSO9tWjkAB9rrVfCv6QAH3cUJ6ZXdAA/yq6E1e50ADA7f3uwLp QAHZwNoMzB5AAhBDNR5YGkACOh4Kv9M4QAGEv7NmFZ9AAlFY5E+DUUAB1pWOunswQAGtu6NM VThAAhMTolVlBD//z9ESa19+P/1OW4hSdTJAABqewnHX3kADe9Le3dULQAE8aCcefxZAAjVG EDykYkAAgC4CtrhsP//3QaC0HThAApj5lz4QqkABdVChlQy4QAJG4dPouwJAAWxo0z70q0AC PaLIrsGmQAPOFivSEUxAAfackXFiXUACat962tkEQADV7DwUTCpAAnNjFu/vbEABdxF2nfdA QAKKB/pEXMRAAw+cQ50UjkACG6xOm9eYQAGRYBKjfmZAAu7PekHBGEACp1TOxW38QAKb/zY6 GjxAADJ17Er9/kACuBLoM4M6QAKfmVhI9eBAAanJwmRO8EAD97rR6lU8QAMzwBc8CWJAAneO aUO3DkACAC/byqXgQAJvVc10zfBAAQVAjBczbUABumrFYMf0QAIFipmSr8ZAAeQ2zxT86kAC vdqnmnexQAHKEwuNlmhAAuFeOcNwYkAB/9O2vUeqQABAb/6yRRRAAEU1+qdKxkAAsok2X5+E QARrl/PSM6FAAszKQLLaCEADwLUQ/h1pQACgQwgjc+dAApmgnhyrF0AC6UCALIWnQAO+3hMw GU9AAn7NF07/KkAB/L6gRREkQAKA9kxpyo5AAMZKEXt0EkACljME6oquQAMVHkpNBztABBdi qto0NT//1rA8lvBTQAO99DSqmeI//y2stxG8t0AE5rCTC9sEQAMVk3SafypAA3o3FC2sm0AA mXXSkJA2QAD8rtIGaGZAArqAVrk8ukABQUvjxMTHQAE+7aj4ZIJAAqFLvxnSh0AAl3vouBTc QACuN+9Kx3tAAVN1mJx79kADSZl7MLJFQANsRy24QMpAApoRTL9GuEAAkteFcETTQAIjQx1j PBRAAUbjEwIy30ACHWTBBTOyQAJcOOo8kBxAApaBVTGpX0AAluzi6CVaQALPxGTFwRZAAsb7 ln+2zkACwCszSZyuQACX91HLfEFAAR0pELymlEADadscGAZSQAFKDmfKG7I//3nG5LOBEkAB 72aJa3npQAHrhjCcnNdAAgeROeTYRkABJzSeLuZrQAEI6LRGMppAA0gleCWTvEABfK/fVH29 QAB59AIn5xVAAlUGD9kKQkAC8NycbZKSQAHmQUtxLFxAAivicGSyRkACls9uTwpNQAFeEdt+ q69AAehL2Q0GdEAA85nnPR63QAJ+uyS2kqFAAEAx6z3jjkAEl2owBFuPQAGx3rv9eB9AAkFF pQmoVkAC8t5iAC/OQAH5hpc5G15AAUSNP3CZpEAA6nml+HlUQAMrZSnS3IZAAaJJumGxUEAC vAQTQBQMQAPUG8wMboY//uVv0amXyz/+r//6HkzwP/84nL2qxKRAACEMEmaJ4EAEFIvGPKpD QAEiaWBtmqFAAxLkDZOUDUAAtkqzbGkHQAJ8vN7Cq1pAAnfP9wDGdkABttQsESB5QAL/gcZE 1XZAAhR23SP8fEAB8RpHnlyRQAKVIvE3TWVAAI2USxbXM0AAOWSiHT3qQAK9iMVlMMZAAPmE kTd+IEABMnPAUrEQQAEozrerIAg//6cKqEoyRUAAluC+uCF6QAHuQ+w1rnJAApQ6aul2ZEAB jXFPsxq3QAEnhDbo9hBAAHgqITTClkAClDH+PhkMQAMQaowOuqpAAf+0yQP4F0ACQFMxaghw QAGG0i57UlFAA6QDAW8lxEACV2VtqcfRQAJGUaTuDuJABIe1i3TTrEAFAKtm4ksuQATInPOE hQxAAGY0YgNNB0ACLExNtIg3QAM5iROVEIxAAS1/gTAYqUACilp9HQsYQAGyAdJlbTxAAY9Y /d05L0AAp5XLCrvbQALbA3f41OBAA5uYrcHJ8UAGrOqr9VTBQAKpk/tn/IxAASLqKxMCSkAA HMrs03FoQAW5TGYz60xABaMqEc2XpUAFjz+qgMvcQAUUFzmqJIdABARP/oSdCEAEdIfTTmyU QAEvwQ71wbJAAh7MZs4lkkAB4rXX3lK6QALZu7FHDYRAAgxKOxlkUkABTXg6KEq5QAGnXKzN NjJAAUnLSKR+zEABNSWNxeFHQAErjZq2eupAAoQppMIWskABgvMv85JGQAEmhf+lJI5AAnqj YY7jVEACqn88dowdQAGRpM/j+2lAAa4yf/87BEACcdwCjWxwQAKRd94L4pJAAAQ+0HKmOkAD 2DomF3E8QAMqnm5Z+7BAAqJ5DMtJZEABksnTRBskQAFRBvy7509AAqoepfiqPkAAptezRd+V QANsZHl5zQ1AAaNpXr/rTkACxkFbEcHWQAPCkHZ0PeZAA8ZUzhcOi0ACMeikC/vyQAF0mWG2 C+BAAUsDSBPvvEACx596hOLjQAKdiabGFrZAAg+DoalL4kABVni5h9L2QAMlFrhLjCJAA3a4 FXxoykADaX4SQjl0QAKns+JiPZJAAC4pUcgta0AC0bdePGegQAGoui3sIXZAAw1SlrKP5kAB f4Skg0qKQAFxDzTAdoJAAl7mJD2eB0ADpt2rdlFpQAMsMRNqfoNAAg0PPFMYfUAArSug9pET QAHxhg7SF5hAAyfqcTWo2EABE8hnZHVwQABhfy2c5JtAAbAlsoCWskAC721VYFWwQAD2bpMU F5hAAjHqD1UcEUABqGpDMMaaQAAnJenXg5RAA52WESIYhEAAkk/FcS3NQAG2n7bxEXZAAJXE /6FUT0ACS+u/Z69+QAFTpwy+13VAAbmvAv890EAAHI4dy9vHQALlICYqleZAADBzAZmBqEAC 6OtS0O0gQARe5j3moPhAAjEhAAjzLEABMi5r3l7WQAAG28efjIdAA3wMPvJVZ0ADKIwJdQRK QAM0hbw2n3hAAdJtoFoSNkAC76J7s5skQAGCAvs987JAATDe49CbBEABLIxAuCYSQALftYL0 4P1AAWQqB/LQoEAB+XC4God6QAIRTX31ybpAAox0QTzmMUAB/wj9eWI/QAIFgbQcOspAAiZj x+4UH0ADobQNome2QAAX+xuFLH4//20huVB0UD/+fzT8kHrSQAI3HeFWzDVAAjhGwCm6EkAC wAi+gSDZQACU7bATQu9AAaOx0/I7LkABON6meaM3QAKXAdLSeV9AAirZIzUU50ABoTZ99+No QAM5+yHObPpAAooF6tflnUACHAJp71/LQAOUjy7CRjpAAliqNDEXIEACf60eQhXeQAF3YIhz HW5AACPaCkt5KkAAkkAY8NXSP/+TyvMxwoJAAISNcAHEMEADM9vMGP28QAFtSneBJ41AA4ti 9QNEWEAALEspdmQ1QAGIZOiKGthAA4ptjdgznEAABZ9c2REKQACzkCDk/w5AAboZHRfbp0AB AaAiHKkwQAFc7tWD4ChAAe+e2HStskAB2Ddi7+pSQAI0ru2JYBxAAUa7nU9ERkABCIozXmbE QAPr5Z/XXHk//sCdG2QjC0ACRwFXHdM0QAH76gLKjcRAAfBTvUQnCkABkuKhVL3UQAG6gWeo JhBAAMfIhTOOtUADGGH7JtI6QAIMh79CcpRAAPtO0dV0TEAFQcJxAgspQASSClxEpgdAAz2+ KjgkWEABZHrjw0HeQAFkQZDo+lBAAQZIzN99BUAAHbjLethMQAMOp/EgU3ZAAvhOPy+ZdkAC pkBxAGq6QAL2n5qj15ZAAeGFvgu8SEACSgKuRwzuQAFY5dgVoaRAA3RvcxFm0kAD4LrP/uvT QARjCpMn6u1AA7bxLoqiAkADCT9BP8AoQAFLy1fGEihAAlMRAZfJQ0AEOaIdNA3eQAOn7F4H LmJAADjVkMW/ikABW+P+yltXQAKlKikk25RAAkw+jMylskADFRPd6XvhQAHtXNlHFHBAAxFr lIEd7kAAILPjYJ94QAIgjWilsFZAA3v2sqZavEADAifxW5m7P/5atKH1VghAALKHImvkIEAC RlRsjvSiQAEuXopCVy5AA13Wlj9kX0AEKVTFASWlQAFshknVMvNAAmjjnXzC6EAC/L84G7zJ QAIbyv2ZLspAAsdoRWa7gEAEMvN2bUSXQAN4JCyUOLpAAvXsQ2HsDEACEIP2WIcyQAFLjEdX gthAAgrPr2Pmi0ABcFMocoP+QAFh9MYoQFVAAfwK1M31gEADAIEzyUAhQAHtEjE523xAAg9K LsvPPkABMW2I8oWBP/9Vy4/CDFVAAW2uJvCfhEAA5aOulxF2QAJKzUUdgrhAAYi8GQM9H0AB 0jc9CHvxQAMcVeOrwulABIzOxAp1wkABTMQkqp3YQAKtA3hguRZAANoKl3C9qUAEop7Ef91H QAS7ICDP1f4//+fjT++z6EADScAXfzZKQAMtcEA4anlAAvCA6LnmKkADYq7S8yq8QAJFCCWF SwVAAmsZGFmZQUAA9+N/lIAUQABSMHyJvjpAAHxA3z4SfEAA7Ha4p33SQAKlp8F2iapAAgB6 moFthkACv0CORop0QALWJSjSfMJAAXeC/M4vYUAB3eprsx+EQAFOXkii3W5AAnDhYgCwy0AB 7kvzxgTPQALayF6RlqdAAydHpxGzykAC9IF4xnv8QAGGNnJEXgVABFL3wXFh10ABGUJwJMuz QALXP6yMAaJAAahfqIIKFkACgmGyfx4OQAJZXKirFZVAAkCEEnM2HkABGoKD0VB6QAJ8rHs5 NB5AAF9RKDzP+T//Op3923CqQAVdPpLBYltAA1+IrQ02mEACWN9vtUZ0QAJ3KeYLfadAAUVn 75PqmEABcnCmiHpcQAH2Di91uy5AAiNl9NWY20ACYoLGILiqQAJmRxM72iJAAmrLXZZ0P0AB SHoyZFa6QAPKK8vOV3xAAZ5fBy+wBkAB8TfB/GXRQAJnQGaElpJAAg3l+30zcEAAbJGjeQkM QAG1Jzvd7kBAAeJjX3FuYUAB7HRYZTHkQAKFAfvjGxZAAk/yaJ+IUEACNogQruPgQANBTmrM 5jBAAWs48M2JRUACJ6q9D/yFQAFe/rj3zDlAAgsqZ5sGh0ACBemMvpRCQAFWCmz5klBAAfIi D6dl5kAC/viCRmIqQAIHZTNomhBAAZJ5we8ankACmdW/kroSQABNlHywsepABCx8fOGajkAA llqqvnheQASc8ELXB/JABFrXkD23j0ACR1UjKk5YQAHT6sxLj/VAA1l476I5zEAAT6AkTE8U QAQBuznsN/pAAmM5I5hbqkAA4FG0qFdGQAMuKVopp+xAArhf5x3mG0ACyDv6bM6iQAH5/2ES CTJAAeBblUTSRUAA9zoxUCLNQAF9w6ECB8JAAXlcqXfasUADblWGviJaQACpgs6aiSpAAScG Xo7e+j//1uC/KH9bQAHJDMCmijBAAGiTaUGyhUACkTckxfC6QATu+mEqsgNABFbdrFjkBUAC bcYzswA+QAHR2/x77ORAAhljv44PWkABpo8rzO7+QAHuwK5GTK1AAlJ33Im85EACdTVxfZiA QAKSE81UYeFABTVSZqjv7EAEcAMGxnfcQABb7m977vpAA9AS3pZ7pEACuuDxSdVyQAV9Jatb fwRAAzkkxQHBc0AA7b4y5ARYQAQ2Q5h/0/RAATbv0cZx6kACEKw1OfDkQADcvaCCbsdAAeIR rmELmEABqDK8HlMxQAGzP+Pyl/hAAjp0ocbzDkAAtPUZV/jKQAJRoxgNokVAAuV9KvEgnEAB IH71QfOEQADVGcaTtsdAA+Lp280Fj0AAqaR9S0geQAP1Tl63knhAA/8u74EKyEAAg3l0MqLy QALXR30yWrxAAWI0COn2VkABzw7zeCXJQABY6wbipSBAAswi/HPVXkACnDRF/fxGQAFrDWxA p8FAAkidVKgktEAC/aqPK3hfQADmTS01hsZAAs5nEV+1fEADxHBY2xvlQAFbuDbhJCZAAtBJ ENTIokABn2NZShvSQAE6bNuuXgdAAT9RmUmcykAEfeN4vWhnQAIY84MrA/ZAAqAufj2MdkAA SBnFCmtUQAIW0wVwgsNAAXwX6crAH0ABi2aia67bQAGcr984g7xAA30BxQ5o8EAAJpnJLf7H QAG9pBu3JhZAAazo7bI+EEACGNdKPOx8QAK+/U+XB51AARLAl7bgo0ACXGeOcOfgQABcneCT ur1AAf+J86JFnkACpI30p0JoQAGGXScZ5+BAAOV4wg2CukABaSXGcdyCQAJ5HRQyax9AAzK/ j6f0GEABGPH0LM1VQAKFCNdBchBAASCkunNVOUABy3ICG376QAEpR5ifQIRAAt7ONqMbUkAC GbqEOTEjQAMTxydb5axAA8weg1RLr0AEDvxwTHVCQAAtCusQQbJAA9YoPVH3IkAANy3aLlUq QADZLfuMQqVAAwUeqhzPj0ADKNVF/cUEQAJ9bu59FkpAATuiPFMATkACgIgF0RjXQAH+k2yf JLBAAt4zsdAh1EACEeNoFRKDQAI5pPxhqKpAA9IUtbS9uEAAezmYa34CQAJamN+u9VFAA1xi teeKpkADCco64iKUQALw0UYV1OZAA/CLu7lsqkAEgrz6VzXuQAJXfJukja5AAa56WUEXLUAC FDEuw0eVQADYUa4bTFhAAt2sh7wPbkABoIKzkmDqQAGO4LnCEP5AAa7bsC9rMEAArBj2mGYd QANOSvdqwGNAA1I+suLU60ADS30vCeTeQAEMSpimu9NAAf8RyVhKvkABIiG7RxblQAMWlmew 4HZABBG/IFhiYUABl+5d7/FNQAEktBJI0nxAAiKvSFGtDUACQr7nDe9wQAJFCNO8wHhAAbAV OKnoykAB0UsVvn0EQAKkYRB9Yy9AAcBWCUdVikABwvVlUXY0QAKZGflnliA//+RMd64HvkAB MVAlf1SGQATeu7bgBQpAAgf2MLQpnEABvCjVVGoAQAGb8Y2W7+RAAUWRY+g3pEAD51q9EThJ QAEVnI2ugZxAAZ+lBtvwOkAD8ulmhgx+QAMBUTR+w9BAAbv3Y23b9kACgwCvMlm+QAH54tb7 P3NABAxDcDbljkACRK+07DTOQAXSNSyzaTpAA0YE4DoLCkABDU7NEA8pQAKjcFz5IbRAAXW6 kxkYckABYVofnlUIQAELMOiDLsJAAUMt5EdRFEACc44YIRvBQAFIw2gGUVpAApLpN3eIcEAB EFFPyu8BQAOJaJWOBqNAAZn3LFvQzEABf1Bgb4tMQAKZZ1AsRW5AAf8lTYHu+EACWcA65Ezu QAJXMAKO1zJAA7pt5rXRGEADkkvbdUsBQALSKa0UindAA/ou72s7rUADL+C/OkMrQADtO8NC s5JABKh4Fivw+0AECZPdCykyQAOyyBfvtDpAA9pRo9X9JEAC7Ic0uWCbP/8j9T5uTwtABEtk ztRx0kACXOxPhGe0QAQjKtURM1NAA6tfSXju2kAEeSeL2bN8QAUKJJ8wG3hABECMUVUOMkAB ViPvqCkSQAQ9D4OSza1AAtQ2jtDDLEABFyomOXUkQAHv8e2T97tAAp6ehD75fEABhf53dy2+ QAMFF7aMtmJAArS/dj6cwEABEIP0NTMOQASuftBwUSJAAaUaUDJomEABV7nlvScaQALwdQB0 tnpAAelkKlYmJEABOLLwQHYIQAE3OaUbAuBAA2uZpD+lMkABe3llZwUsQAIAunVPMTpAAR0j 2G6IYkAAjQRC0XC5QADc0HwbP9FAA8g97K4gbkADBq5w2kcHQAG71QKO++xAAb92jT6ldkAB T6CXpNLWQAGgyC6ANqFAAdgWrCB6WkACo1/KYHkMQAAd4v7KDGhAAR7J1Uoeo0ACI7ND8cAB QAOOMBjteOhAAtvcE9fCYkAA9rLk+KehQAJx8VsLvZBAAFl1JeCX4kAAkHtFdJUEQABLNt7U +PhAAyhb5lF7dkADluDThciDQAHfioEWQvJAAd+KgRZC8kAC/wFChvoKQAMlLGqb3hpAANmc fRRPLUACLLtwdpIIQAIjOWOLQoVABU8HB4GCkkABN+TUd5pQQAM52ex4iM5AAvythMHPoUAA Urbt12HOQAN2cDgFn3ZAA9xu1PeXAkAD7jUyUZB5QALrViKFVqZAA3ROaUsrVEAFbI6DVK01 QAFBbNjH3oxAAlAiMi7cskABEMimPyM4QAFKj4/nX8lAAZ9ZtowYGEAB8AlA4eZ7QAJnnHi5 eYJAAvagvttdCUABU1XoKaSOQAUekABZRa5AA+mc20qpaT//ve1QAR9qQADpwjJpEQ1ABg/w rqLIVkAEJjmuPP2MQAEOIm7n3MJAAKf2+iFwpEAAzfIsfqeIQAO9ImyZYa5AAcnbJd2FP0AC JeVsaU8eQAFOBadzeV5AAp6qB3x1mkACfkRj+qkyQANUZN5MtkhABNcVQs7ZCD/+48nL+6Y4 QAKE8yUSfc1AAT2a4cy5PkAB/UlkRhIwQAIxuEbehr9AAMJZgCVM5kAEtLXKN++TQAOl3res uNVAANs3Dr9qDkAClsJ0nFbEQAHMDdw4tyFAAiHZc5itJEAA5hgbYYxMQAI7rJHWu/ZAAl2H kMYEKEABRsqDX6iMQAFBMeg9Cj1ABB0wY81SqUABG+/fHrAZQAI3X4/X37NABMHBQUruq0AH xhH657IGQADEfmvATbRAAQmG83zAskAC7b3e8OPHQAJWVJeNmw9AAFjbn2Z2IkAETyUKzNVc QAREGRNePX4//uCpFGiYpUABRZmtqUCRQAGcQIsZjJBAAwIsYElo9EABDalPPvUGQAKpqYLA 5+pAAZUwSgiMkkABaTtCFmoYQAHGoTQoVltAA5drKaczd0ABCZMl2bvOQAB4sL2qI9BAA1IT T6nZIUABxFzNqP62QAIno8bSnqRAAibvHWIoN0ABcTIxf1UlQAFw8mcUzIdAAbtpvRSBnkAA 2R+ZqZK+QAJnnILHHERAAQM8ew0E2EABQ53GJVUoQAGUOd1envhAAkG6FVa1mkABhadvK9mP QAK+OHNBrWpAAy6YCRUv4kACSodgyCV8QAG63d169mRAATLDafWilEACJUiysETjQAKRnOos ++5AAKfEVez76kACDWsJA4KMQACh/T00TD1AArbe3f9TLUAB9N8FUwBRQAGL2JxGNNJAAoVE t4rzNkACpDWq+YcUQAGtCYsxt6BAA4DHqgaa10AA0OHogbocQAHVqeE8Hz5AA7aBF46u4kAB L3xcyIZAQACeAH0AeTxAATnptNhvOkAAfvhM5ULaQAGH0zI9oAhAA3/uJfUksUAB4hEJdcC6 QAF+QktjLCpAAogAOSlGKUABjJlHHZ/JQAJwdQbWJlhAA3xOpTcYBEADUyVOG6euQAOS//CA dLJAAprseJRLoUABlhL7ctcwQAIjPaM1WXJAAtrDZgGYJkACqK60T/aHP/6mcBMATDc//zEU mgnLUD/+dxqPSo9OQAQPzJMjoV5AAx4kmhevXEACEZQwBE/cQAEs4uGTBAZAAC+AqBIZmUAC g+GnM7p6QADuhXESTtdAAHJuoyOzTkABjr+9FvvKP/9xlBVcq2Y//i4rLF9TIkABly5CK/vZ QAKITdy4QUY//8SsHjfWiT/+WW7TwsU7QANjr0MX2g5ABNF4L2XmwEAD5SauWaCVQAJqCcrU kg5AAof84xpux0AAbZ7qHuFxQAEhtsJdiKlAAWq7CdVGiEAA6mN3Fs6wQAM5PXxkDK5AA5JR aa/+Nz/8cLrIXCt6QAC7wo2pbNhAArOl0XZc2kABwJcCVWh4QABRIHK23z5AATv4kDcV2kAA SrZ+UBTlQAB+wtQkzpNAAeM5/eopbUABZGamnEMyQAJyNGk7aJNAAqqBu1nre0ADhFPI7ELJ QAAWVk7/P5dABFko91t3skABcFPFnO8+QAErzx2w5nJAABJ+H7fm1EAA4q5aSSYsQABUwskz 165ACLvAsg5otkABV6mFaF8KQAIjMcV6T9ZACSccXm4LB0AKcFyXd32iQAaHCIt7jDdAAmWq 1LwxpD/+65zacV/PQAJG80dTMl5AAL918O3TPkAAsjHUgy5PQAE9iG06HxJAANQz08UeT0AA iWca9cN4QAFoYkwlQvY//4U84zqTiEADU7ct2oIaQAQ6On2UhOg//z9Sn/4Kkz//3pJfQzQ4 QAHNwMUrY+1AAgr4DyfrUEABSha4eLjgP/z0uGgLrcZABIFOUnRA8EABXsCYZLDRQAFBXrJ0 3rlAAlO7OvFjxkAEkfOrfOiIQACX+vRApFo//kfl7hi1Vj//B/dnOP/8QAIfTOZ6mGRAAOiu I+3xQEABctUItz+UQALZqw9xnDA//XLVuk68HkACOnrMhJzBQAAt3joWKuZAAEP29w8LBkAD UGs48on/QAKHuNyxQg5AA+LodcjWR0ABozlAtdtqQADd/3d1uepAA3eQj16OWEABcaY7p70f P/8xcWjWm05AANSl3jsLD0AAMXyoBBpsQACnm+YX2hlAA+gxJt8iXkAChs+BZKJkQAGFblm0 GNtAASNDj0Dd60ABWalpOzHGQAGG5D4XsSRAATAnl34VX0AAXrRXkbFsQALP2nex/4VAAngH +Rf1sEACG1+9dFtJP//xPV3c3j5AAQpclR+A7EABKcZSwNoMQAP0uzhCMVRABV5ZIJneS0AC uaRDNsm4P//oVfAgB9dABStBcJSVOUAApLOdMAwWQAAOgp8QJs5AA3zeWlHYmEABm1a12xPC QAEiju+ynjhAAWqfLch/tEABTFjvvyJyP//m5SZQf4pAAZk4QjXSJEAANtS4VKJeQAADpcOQ lOVAACoN7bM9s0AA3eCTKywBQAN8ry6ccBhABB51liAKrkACFwUJiL2WQAFUQ5Vx5VxAAT/h lu98sUADvUHT3urGQAPTqM5Nj1VABOd8+HZ4SkAAUrMdcFSyQARnjYPzvLJAAUY0THnpSj// ks1vGdMqQAFQ9fTi+eVAAdios7Z1aEADQ96hyPloQAH6zNGF8CxAASuH8Gl/9EAAqh8C9e9O QAFOtuOpRFZAAhRnhDl6KkACvYYWK1/zQAEvPQvofjRAAKq5M8QZuEACRFhR2WdwQAEx3XaD eLBAAi93GcZQ20AAiPq8zKbeQAGxCEveIWM//s5YGAyepEAAvfFvnSdUQALHX3ZmUR4//pp/ Krm7tkACI7i/ClzhP/0xLJ3kSHJAAvV+zN6iDEAEyni1iTF6QAG4Iutzfx5AAImfiQlIXkAB I1Va2MRRQAJbP/exfPM//9nCJbbdxEAEMf944glPQAN9x3UM5RtAASmD5vWVNkADu0NN1yNg QAEQAhUxFaJAASIq+GaUskAAGDxt2QstQAGcHE4+STtAAt8Hx3OMdkAAOPNS/AMcQAJbM8SU kS9AAUMbDL5BcEAArLXrmF2QQAGtVFjJsCZAAk8M49Zr8EAAD5s4cro0QAEZs6XzrcpAAN2v sZjmokACRZyb1ZacQAGJCM0Vp/hAAPrNilEGIEAEjEuwKNLgQAH/J00TnY1ABEcPltL2BEAD fW/hg6NUQAJep26Db5tAAXTqXFKc5UAAisKGgXKpQABxr/BVM59AAFHKRr/CwEAB1i10DMag QAbcNyQJcnRABRAY+fSJ30AACqi4gJM4QAO/XQDGK7JAAx2uHRmGjkACS4GK0uqGP//b/kk3 E/xAACge7hhLH0ABKKm8yi/dQABlph0VD9ZABT68nwRlVEAFtm34wVaWQAWBUZQxIQZAAU2+ NFZ1fD/+mSNJLdfxQACnsVIIoUNABWmZyhe7kkAFSO/d1iMCQAHrLS7xuHFAAJYr5EKzgkAC rRCpsJ1LQAIjv5vEorxAAkiwGRLuYEACKv59twz5P/9jA0AJM5o//p6HWc2CXEAF9TODsqXE P/4bCP4fwjRAAD1XLOF8yEADBZh+GCfaQACvyqEJsYNAAmvHwbzkSkAAV8sikLuQQAF4MBWN wgpAAk7oPUfqhEAANIIz8bjAQAHtap+Bbw5AAJ1W5hYtnUAGbl1IgNLVQAF+L/gLbrRAAfPR wHNHZEAAf1QBM4aOQAEQCQk5ocJAAVuXb/Jd8EAAqjqoo9oaQAEwNcfrAYpAAJFfsyQnnEAC eGUTVO/8QAIqKg13tBZAAtffn+KD90ACN9/BGmsoQAPrRQhK5vhAA2XvPOQKej/+LcGNnpx6 QAPNpqw4UrBAAGan8qoJsUAAwrgjVR/zQACGhkSionhAA7Y0yRNXpj/+zBeKhlUHQACKrHRs VlhAAIOeUyJ6WkAA4fZAfr7vP/4mBQKFeUZAAuDbbgZ0+0AD+z0Bj8DdQAHaLlpyoV9AAZv/ P2bNAEADMa+GyP8+QAOr1a2euNZAAgc6TzTFvEAF5G2dv3CuQASdmznMdzFAAsxe+bZITUAA CnLRR2lmP/5McjmGko4//YcBevcSmEAFjdanc82RQAJbUGW+6DNAAmziD056DEACVv28sRqN QABt0ZXIO8tABhlOkycqBkAAEKTXI5A9QARd4C5AxSA//xF7x5EA+kACJ9AzxZzfQAJTS3JK yVtAAkwgSHjnwUAB0iro5ymoP//89y6YBcRAAEU1ka/VH0AAf8qnMVlmQADKfFHPFw5AA56+ Y4cysEADcbkKRi18P//VwTxrpA5AAcKjQBjspEABFotAOgn0QAEojmzbVXxAAc2h7NPldEAA UWQ3CkqtQAGMz/PLKKRAAIwRNsdkmkAAZTzgcwxdQAHbQrJgO0BAArYDsEyRTEABoqiQcgGn QAImY4fpt7RAA1axW7flvkABqLeGsWNdQAAF7T1guPxAAFI/pBsJ0EABfNIPDe9qQANkMZaT HdZAAaT1fwAxYkAAmz5lPLeUQAM82vbV1zdAA693EDUgCkABqygTKJIeQADIDGOK8SJAAPn9 HbjFQ0ABPmzlJEmkQAEs3VNcgNpAAd15dBm2VEACSkVxx79aP/64LG+YD7ZABHv3FgUJDUAE /zYqJtV2QAJez4eI66FAAXKhPY1gJkABZ//SBCi3P/+bZlh/ZOxAAH8D+igycEACFQMOqt3q QAIDggkbqiBAAcSoPh9If0AAS/nq9ACTQAJnZiOXsAZAAJb3qOy2tEABUV4MZznrQAEQyE4i 2PBAAQR3tbzzYEAAzeZnIHxtQAGtMh1tIj9AAF3kGDRjhEABKehZeCvfQACSvSb98cJAAX2x UcnP90ACHuxql9OcQAIRyc6Ul2RAAdX0iByyekADMJLwQmEgQAEol+kmFVdAAgjVftKegEAA fZks9E9gQAIeYFpRrZQ//bnfr7FATkACrA1Gy7ptQAOR3w4Ps7VAAb/cwe/mKkAECMOV0QUC QAARnVdDtI5AANZ9SmedRkACe3AgKaZ2P/4ZuMlHoAA//Y1HICzE/0AAzpmo7MHKQAFyWJQG g8lAA54BF1OXT0ACzoAeAzJHQAJaiICjELZABTTZQDVva0AANzr1vU0UQALfTOLmNpdAAE6u 38zTfEABCqM4esxvQABTA1H6SG5AAQEh9YlNjkAAEgjb0vxoP//4pW+cWcZAAn4Ztcb+mEAB azcxvk3VQAEjc4fiz+hAAK3lJPBJNz/+gxaarDACQAVTd6uu6CdAAdGLWOIwYkAAGucAHv9y P/5Pj8yY23xAAw0nmhMGPEACLddzd7woQACrfZ8+JJQ//3UypaRCED//17BgTF4HQAAtMVIe EWJAA0iMU3Xbqj/+CPJPfsrsP/9gyRd24aRAAlYBM6SmEj//00ko4HPFP/9iUqcFHNBABBq3 qxpI/EABWAFEEMCtQAJ7XtQIZwA//3tKnb1qLEABFR9HH2PkQAE4HMZf+rFAALLo0iRzj0AA 2gVaCWaAQALOvI3dncFAAVMGZ+ReCEACYjh+cPuuQAE0zc5ldOdAABB3Ur1DFEABScffH6r6 QADugEJ6FV0//9WgfUPkSEAAecS4iJYeQACF44tufwJAAi5ATvJSoEABC2QtOewqQAI3xzyQ SZBAAQ1OvN+5HkAHIUjRBhcEQARP9lWHBINACGRuXzTc8UAC22QHTz2tQAEawYCswzo//vTj GG9g+EAB5kbZjmfvQANGZ3cv+WZAAFjem6kIO0ABVBqEBu5kQAPxidnZzO1AAgFVwxqWuj// T8fv3ehKQABIzD0FKYs//6eSmkTQ8EACtHDAbxHAQAGHhu8Y/7ZAAnT6hC8N7j//PL4YpBlN P/83gTlE84BAA/f23ECHfj/+jbQ1VQpEQANRIafb7hhAAhhYj6PJukABYIpQdtTEQAGZE7OA sxxAAv7dW0h2AEAC/aKrLxj3P/76oJEK76ZAA09K9/ZrgEAE3/hnkNUSQALwUdnIviVAAkKD HkHdfUAEcZM1GBAeQAGHx5vtB8pAAW88ELfzkEAAgMd7o8eAQAJIfta/+4NAACceswIoPz// J75Bd8iqQAOqVko+SBpAABsCjuaV7EABD+J5u77GQACIp2qLnJRAATEeOu7S7UAC4VOkcCqg QALxTYArkShAAKp6SFZ8GUACUm3oqfaTQAFwh41i8FFAAIRu62lSGUABEVUWsgZYQAKDu7Rg 7M5AAgAlqrP5eEAEJhrHsuBEQAM4aN/7yok//+QVDCXtdkADBYskIndMQAL2rRHRJ1pAAQJj XLnqnEAB46CkJLS8QAahZ53SM91AAjHcJ33mFT/+7dU2mjoKQAIY4jX9XxdAAWCZJqqhBUAA 1pfk+Z62QAAVWoN0S/JAAdwuJRzhVkABJyJKx90cQAEcIVlDmKVAA6VXkzbrwEAAyqwZXxEM QABuKFjMyLc//6tnnPpePUABHvqN7MujQAAWdcmwgeJAAuSN5EKukkAAKgFDMWSjQAJD4bwk RXQ/++W3PBvt/D/9ubLOKZ1VQADhKsaDuPZAAefhss5BbkAA0/aZpjwGQAHaG6yKWDRAAX0N EUQ2OEACQoPRX07jQAHQk486Cd1ABdRqEIpQfkABzzy7uoeSQAOPQyHkZdlAAJV6VMytoD// H4rVdJdOQAONd23BeKQ//jf0Z+rHzEACB4ptp+5CQAPS6NbS9vFAANvfEONw7D///nWP3abY QAFoN0cqCnVAAYyueYUya0AClziBoP1cQAGOAzu3wAc//cgq0vBh2D//uzABSaTaP/+hD9Y1 nVJAAg8TeCKYTkABuglYZwCfQAIf1eojiSVAAbzodatLpkACQK8CcHwkQABmfXzUAS5AAFwa zMMw6EABxZ963xOZQACNHYdxp+BAAIloPq8yBkAAk+XmiHyaQAKJOgNKisg//xOgjXMuij/9 PiHFh6EcQAIYs5nnmxg//gxDGcqRfUABjpxhW6YeQALUM8D4zeJAAx/fKEz0RkAA1fJid4C0 QATDofazEQdABAxCCjNZL0ABck4xZBG4QABVPUHRJKpAAjWFmHYyckACb9EEa0g4P/7aayI6 tnxAAnT27J0qgkAAENPgsquOQAAlzlZ83tVAAaXZdS+w8kACr2+0/ArkQAJq6tIMABw//Xhf AZ7qGkAAiOvBcpswQAHP31bqm/hAAS76Yx5RYEABEnc6OyyBQADj+YksR2JAAtX6iztcmz/+ cXXLhzzeQAIy45533vpAABfA0NUNAUACPIkvdA0KQAPTPAVq1RFAAQfQChs6OEACidmrZ+rY QAFatx9r7V5ABKOc9V5QxEAA3AJ8ZGqRQADtxbTEIqxAAhz4+q076kAA/d7qH4GiQASXuH+t u4FAATRUMJ2sikADOYBPL3cKQAA6ZjT6wDpAAUwER1z/ckACwlNUJZxxQAHEbyCkj55AAVcn kOPgqkAB4kQfGZPuQAHbjZRo/W1AAQ78Ka/HLkAA+eOSIZnZQADZjCFIWHJABFSF4eGBhEAD f8RMkg8KQAFtSgzkbGpAAkB1vUr+MEABaJDyql0cQAGIVBJqc+lAAau1+KH9EkACRRSQrvlG P/4Dyyyf2IhAArxT+rE9xEAAakCfgOyeQAKNstfjQbFAAE/Bf3JDy0AALCwfor4+QAFrJi24 slFAA90tsa+CQ0AAqMEJyaiUP/8Hkd4+bDg///M9KN1MLj//vDI/PSQiQAFij7BnRhpAAB9T woyI90AAqNuqPf76QAGEel2FEfZAACYTCKtqXT//Zpp6svtEQAJCbM+M9tRAAy3dPyuPfEAC tKzS3mJjQAD90GzJdNxAAL0Odl7YtUABuc0CQwn0P//5eRYHxPY//X74TQI0Z0ADSeo+oolm QAGaEsXdwiJAAkrJ3MdesEAES3nAb1goQAKjEMmhEsFABNiiIhNgaUACQ4AIBDNnQAFTiBmK HxQ///s5Q2vgVj/86rLqNlC4QAC+V+7LmHlAAUNzNXlCX0ABFl12PpPYQAEAmoxfhIJABlc0 91jDlEAAWQZjCdfoQAImtCizXjhAAr6in/yKbUAA9xkBMTwjQACRW2ZF2DFAA1hXYxOmHEAD I9/si/UKQAfcNe0F09JAB6U1ln6XbkACYOsTnOZSQAOC/a7lAR5AAFOw+Zwg8UADCjDR1PF3 P/9x4lwBtTNAAWaUyv5P+kAAVXS4J6P6P/+PSklL97xAARzveFzxL0ACvmpoEV/7QALApM+8 A6A//2zGVVf330ACuZP+Gp22P//5ax+gLcJAAwsgudTFqUABZa7JiaXmQABfgAI3lF5AAvHu HdAWzD//Jzs764SCQABFcpt71oZAAbbgTtaI60AEsAItg9VSQACGX/TUEfk//BjAys/T5kAB RbgACHSEQAUHimWRTzpAA+5ihzLKAEAAxDbNsxxHQAHKLlqnEz5AA7jVuHXhXkAB8K4dat7j QAGWgjNjs89AAOHWdwQAAEAAWtG+T23zQAE0wzNTNzJAAMNB1lcLvz/9wP26JAqAQADeckXK j7FAA2mCjfh4kEAAEDMYDp3HP/68vSKpJahAAp31vVwE0kAEKgLGrtq9QAISLTLgWIxAAIO4 GApkaUAC/cWR0/QHP/68c67iM5pAAYf+a5baf0ACq+PWlwIPQADcTmQZyP5AAYajVcpM0kAA m/kKJ3gNQAGaPvIjCcE//+UmGvsfU0AA6Df8MA/AQANE4xQfoANABPcZiAN9VkACg8rw6QL6 QARirCWtG51AAs/v6vUYmkAA6/p7ecJ0QAEX/TJBLmxAAhby4zYzKkAELfmKo8ubQABi56+i PShAA4OC35aenEAA4UPr4NY9QAD+3cWFceBAADmDz7N9i0AChq8OnVd1QAECP6pIi0E//VxL TUNpUj/8YEb8Qoi8QASpvt1lxnBAAiOzBhvVVUACNF6J30AeQAKok+eKQLVAAw+vw/UpZkAK gc9JvBquQARshPcO9dFAAk5pX1SaZkAAaaf6/X++QAF0UAkGPuZAAbhb8aPoXEABrDPbVoJe QAJpW/FFShBAAKWuv1r0HkAEBVDs9DO6QAQB6ckMmVA//qmLmhFeYkAAWwTkyjS9P/44Qo0M NMlAAHORLb59vEAALwuFv8vdQAR2uPzIelxAA9H6IgDWZEABQvBnXNIRQAFAZugYKcpAAVfJ jlXd7EAAKLhO8euFQALuPa2JF6RAAT6wYd+B8EAAI+vXRhNSQALVUzkuoTNAAaDOt1zVvkAK gc9JvBquQAQTY3w/wrRAAkLbBWpoP0ABPCd+NlrBQADvcD//3Y1AAOjmx/3KFEABQrl1oq3+ QAAxYkFZf4xABLzyTySN5UAEFwcT2hikQACBeTMLyXxAANqH9MCPmUABCZXsmcxSQAM2Hw6x zzhACbE/8sezvkABIvqoRj4DQAa9tO9D5RxAAyYqQamIQEAAjtEQbe/wQAF1cqcXa1BAAQfg gqSEQkABAiUXqBXwQAKCPlIodWdAAPf7xjmPkkAAO0Itd5k7QAEpyCeA2jY//+nWX+iOtkAA 4hsGr0mfQATvMe6bB0NAAIvdy0q+v0ABlnz9+ovuQAGMYj6tQVRAAHtSs2KgUEAAq5RJjYgY QAHUBC05YtJAAP7e/gOHOEAAlrcYJ1U8QAHqmmZnV41AAkM5PWE03T/+sJRi4YdEP/6Ld4QT SfFAAOpk5r2ytkADSMJGggf6QAMEzAlXMENAAkYkML93GkACF2dBPpqnQAGEAHckdlxAAq3w VPcWLEACqiJLZ4xPP/7YnOwXRGNAA7m6m52eXkADSepe3VTsQAPQCmyIq11ABcUZa9HtJEAB sj55KrdiQAAkTw9Z1jhAAvNvw3U/nUAAuo/RawAdQAFBzR6h3IhAA5i+c7Hrn0AAAOReZaD8 QAV1P7luJRFAAX1zlZFM3EAGQWZWJxziQAWoW3tJtv4//qoOofxdCUAEPnBzZNdGQALq02eC Z35AAHAom/JyTEAA6uFbQqLmQAF8h9UXXapAAo5FHk2hEUADMQthONZyP//y0JwVmXRAAw4H 47cvzEAFmtb+JqvUQAUh8TP//HpABIa/jiC7ZkAAZISkP8paQACO0Vy4M5BAAcsYK4OcnEAA hVOCHwoZQAC/x/zzuKNAAS9yMkuGXj/+Lh7WhwnDQAGoDUIrqE5AAmkJZgUTYkAAqJcDDlxk QAHgGYyfi8xAADAgbaQt+kAAc1GDlpcBQAIZi5kreSxAAnqHxnfkCkAALbCtLLs+QAFrJsay mpRAAlCjt0mL00ADu7T+nINhQAHkBwdt4os//h3rp6r8wD/+HeunqvzAQAM+3Kgpm6pAAIyH h25IqkAB9T33N0w2QAI5075NmHRAAwyhCOOwfkACbm1bP2jcQAPjpOlvfBZAANKwtLp3oj// dJTFByfyQAYGzTgbqaZAACocFVPI8j/9YAGUJKHIQAIurH/OBCNAAMhxjWJ4LkABiNRqxnpC QADsEBA/Nd1AARanfJ5hCEACAVbSOTcOQAEjiBgfYTpAApeYp0uXo0AB6Po27TngQAAY5Ebq 4lU//n6rIg+/iUADbXvSccpOQABpsfCm0oI//qT3FPwZCD/93DCi73iqQAdIysxncmFACBsl HhGGnkACg3BO5HrkQADQNQP7ZABAAMLMTvGVMkAEQrdSq2t0P//9bpte0utAAcnFzYl3XD/+ bDqO742cQAIVXQ3jvpRAAwowIFLEnz//eo8l38+AQAHN1XWd+vtAAJxarhxeGEAA6ogoYY2K QAUY5Nqz/U5AAT+8NfmaAEABXE6ALjtOQAIGtHSniftABDYU+rRApUABzZJSdT9pQAGwoCeW aDBAAWVR6j4LzUAB2Vio7RKKP//gcWf72KpAAYhvlUcJqEAAWHK/fPfmP/4anJVryYQ//u4M x4Q+ZkABSdsU2jGJQAJeymZxCglABizOQiKAPkAAlCF5XQD2QAQiQSwoAxBAARGdtle8zEAC YHpsC4NGQAb9RK38pKtAAK0hXSuDgEAA1+D51HaoP//013/frAQ//uJ4z9JhNEACsODEzAaI QAFS4SLp5PBAAXkLgvOn3EADioXRr+amQAHCCWx4SmdAAqlR/uN2zT//j3Oa2FfAQALWorSu U8xAAE+wEHYFNEAAXx4vvQdBQADfdNsBaiZAAFdT4ogb1UAC+ScxeSCgQAA63JV+t6lAAPuH ZKg4P0AAwQ93NRooP/8B6kH+YHpAAodyu52pMz//LWwjyNEkQAGQTn1EifxAAX7aJXiqJEAB 3GJDmriQQAHKFsH1o3NAABo4jCawVUAC6E/V+Ew8QAOVhGFwddFAAVfYARskc0ABEgzL6sks QAGdnz9BXV9AAK3r6hPQjEABKdFOgLyTQAHsO81+07dAAauIILrCwkADcWAuc7FoP/8s7bwR OWxAAc6uiO+G4EADt3tOMPrqQAFFpAGwraZAAegNEjLiED//35an5NKIQAARIxDjXoZAAblQ wEtpCkACDOqB8QJ7P/1Ot3rEuLxAAMm1c/I14EACbXKalUs1QAAqSkfS1GhAAUEnPzaHpEAA dDPGTCwBQALUCAVDTGY//fyu1j/wd0AEtmgzM8bRQAd0eFrnE29AA/6N5eZXDj/+D4F1M0Hx QAJA0fBXsKhAAYW2W2JTqEAAGfnvrTboQAHlsKLVRMJAAfB+Wq6JpUAA0Y3Z+7qmQAFrh3sd mvJAAiSGMTVvdD//nvmgoiMgQAJzL57ArI5AAfcEGt/9NUAByJUHjb7vQAM4PYcUiIJAAMEB eX3UqEABC070fx5+QAAVskFlBrpABxXBJUTXikAFuN1Ca/X6QAPAGcvd8XFAAcg8aHOiVj// edZgES2sQAQhWcARNK1AASwy5ByhOkAAg+P8r41mQAKLCfCzPFFAAUgLy7KoYj//pIqrKh32 QAN3jgtgj9hABJe7kEyKmEAA6mMoeIyTP//8Szt8BmhABWYgJTG/iEAD1mkfXF+gQALniteW fahAAgLVyr96TkACYfbMrBIeQANAcxfEuLBAA9p8zLcxukACDaJvAk3aQAIV82hTw+xABHlO nTQbYkAE+cRwb/LvP/2ky2eyceZAAP2dRsOY30AFfOD63nZmQAQdR9yLwURAAqZuWIuvoEAA /xddPs/9QADgGk6uDA1AAXZZFOCzOUAE6RJcxJ+VQALKWTJO4QVABnqJF0wjN0ACoVjlLVpV QAMqBI4d6R0//7l2lTpTkkAHnFCC3e3mQAGSc87QgWxAATNkRE7IL0ABi9NG3AO6QAD4h/bh QA9AAD47u1w2iEALw5iv2PCZQABFna8HMuZAAMbmehLqiEAMiCdpR9SMQA8GYe6drtpACjrm UJblDkABDnca5HSWP/6qSZEE9RpABFVt+6T5AEABnU7jwAzYQADrh2zYzMFAATXqZ/idJkAC lWgilJrsQAH/NiGSlCRAA0/S2ApJXD///Rgot51eQAXxj6q1pdRAB1VShdKEPEAASRpjuuAS QABrMinilsdAA/C2MyLn+0AEJY5/MpXmQANvWOsOxKo//5VEkgyghkAHHVjeazEZQAOhuVyL TBZAA3Hes6+O9EAFHp/s7Q2KQAeJxh0e/wVAAjw7dpkP/j//S60xoMVcQACAHh2eGxJAA9wJ Q30vHEABw2mW8Hh8QAI8ryFFAypABPEQuoouY0AAIjsPIvZTQAOhw15D931AAVjgpKDmIkAA xvnyEf/RQAVJrocJzYBAA5DezGdqtkAEf/iUktgeQAOX8Xt0St5AA3yo6Am9X0AGmVMJJCNQ QAMUN24esPBAAOOUf47c9kABAG5YIi6fQACMOGt/beRAAVZnvrUiHkAHJQkahO4VQAT9zXmr 0wdAA4oEn+h5mUABsK6U9IUJQAJJc2ONRI1AArOlad280kACQxY4EvwgQAHMU8e4IbJABBKk k+BHgkADxkOMVZvSQAMnrgXTwPhAAeHfd1bCbEACL2gfU3NKQAKGt4W1op5ABdcmkUaqGEAJ ZBUvV3+8QAVHo1id1xZAApPZhFwJMEAIDdvnVzTMQAKkq3ZpH+JAAN14UjftpkAF7cTM+ivr QAGQnFfmhOJAAagjLPPm4UACF/HRWSk+QAHxZETkmVhAAOFQS0hMPUAC4LonqcO8QAGJ07Ye oOZAAFeEM3drDUAAm8SSYd8EQAG2U/RZMsRABqeWdpfsiEAIxlMylKuaQAT4EqkIachAAVOa Wbi0EkAEcC43S7EpQAVTAcLtXJFABs6cUUrS/0AIqEILxuQHQAB8Bs+x8tVABtwxHXVYFEAC aHql+AVYQAEH9zUoBHFAAoS7TLq6tEACGDm7bYx3QAUjDkJnvhFAAzsQMcaOvEADXruXwEh6 QAFh5qb2ZYJAAgdBxzWnT0ABy0R2LJWBQAJvSwBjjeVAAdHnPGXOhkAC+IInoHjfQAMwFLrk hpNAAfCnLokAskADKMZCsSw4QANlUxQhH+1AAreqsAKePkAAi1AHfQ2oQASM0e5natVAAtK8 62ZBJj/+/gFiuaf2QAOVgrvAx9w//tLJvh/fsEAEYViLlXqwQAgCnGCWqShAAuVGXAofS0AA 4903e2rEQAM/C/424NVAAvbF4yPz9D///eT2p9+UQAYkj/XGnYxABaWfigmHNkADrVqHcYR2 QAkY0BvdvtBAA6w3tuE3LEAA312E7fAWQAF9PB+Zpl5AAiV+eW+sWEAGMYO2Zi+MQAA1TqHW 1YxAAje3EdPjykAA9cdJcFKSQAEocD/R1vJAA+cqOlzk4kADWj2tamEkQAFTB/YCWWdAAWXZ 43mrz0ABSSzrroILQAKzyjekCsZABExJCjB0TEABmeHTXC0OQAh9zFUJc8pAAaDNYPUvqEAF 1AQNB2sSQATgKbpoLm5ABSrcJ5oTeEADsWT4kDCDQAEEiyRO0chAAkR8mgdLL0ADuAkHtuCz QAJHnW8yq8FADNuUlmr0P0AJ0c8ZEpg2QABaH3e5e2VABNaRqF2/3kAD4uTCd2S2QANdPpZT lmRAAXnjq91iGkAA2iEjmAIAQAKFEgPSqwZAAWO0NcElHUAHPLNVPRCAQAeAtiqm6KpACLca jOem00AB1FwUMvkEQABqfglU+J9AAdKnySWwYkAIcEjijr5GQAgZUf+1epFAA4HpY8LyLkAB LJaOwWISQAVjOtiwv3JAA07/qFyh6kADkXlN+gDEQASym3M41IxAAI/S3o7vxUAADAfCITEY QAglAMq6skY//k467STidEABuiuMlj1GQAS5YWTQ1bJAARJdgWjNXUAEuInhEr32QAE0FsUe 9TRAAmon1ILIEkAEQ1MwWxLeQAF0LqJMAaJABDcmCte3lkABKgpfvAjhQArfUt8ZOMhAAMZ0 Hyy5AkADS3jNHSi0QAFhv1DQOD1AAhju7owL8EAC+ai2sbbLQAJFGN26JHxAA1kPrK7mcEAA afmfum2qQATMUdZo2YJAA5cJ1PkCkkAD7FAllmHsQAUcUjBgtWpAB2ZO8PNZL0AGbN783+9s QABdLVmaNhBABRgIzk+p3UAB0FIlZbPdQAGc8V3ZU05AAg+/7MoCLEAF89ozdNRYP/6+hynL 16ZAAX5/HQ3PKEABzUnuHtGKQAK62OY4Mfw//yQ30HL6DkAD/jUdvgipQAbqFPCy4ZxABMXT G27GskACZQijrRifQAXpmoPLAm5ABnWJeW5xMkAEqD2w9YoiQAp3U9drZZZACD0RopINNkAE X6boSJ9XQACTb8QGQG4//zS4XXkALD/+mZG45L8WQAk9RGeVGChAA+rQlYKzQEADZBPtpPC6 QAPczD9BMaRAAHL5lAxkYEAJUFxW6Tv+P/8BHj1wA0xABq3r0zjwiD/+wSGK+gUSQAIEbOxZ MzFAAfWQuEAohEABoaNlftZtQAUsJlwCTN1AAUECrsY1Z0AAXtkBro3dQAFqbiSB/CBAAPfY 495/10AG4xjsO9/0QAXvRU+BbolAASm41lv67UADK3ur5skhQAIX2s4uPGZAALNPuFnCG0AD fSK3jZs4QAE8VhCRdS5AA7pfkpSJ/UAAMMx1M5HDQAAFFKDxycRAAbfASnqJwEAB9PwFAYr2 QAGbyRR6ayJAAaJBVRGcdUAGZAZ/QMaeQAL4GBcdBvpAAOZzWhXOAkABEuHeTECIQAM01s1f BqJABtVdD3gyyEADQKhx6WEPQAIfi9ub8xhABgqd8KPhfEAGhE8+ELB1QAKAHMiljUZAAbtN WGR6a0AB7J3BpzAcQADnqsciRt5AAgvyz9Gev0ADfpgpjaFmQAOtzHmIBedAAD76rDnhFkAG oVGzbEfyQAhJeJ9pHU9AAhbvRq5prkABbaIFSS/LQAJdYxB0d9pAAM4rHVrK6EACD0c5i1x6 QALiwp6fTCtABVJ1P++CSkABhURNHXVOQAD3ZbGoyjlAA5uEOw91QD//9wG97AxoQAG4H+/v GDBAAgnw4xjmCkACx05ggCCGQAM2o4uEbZFAAgDcWBHC5EABAZ9ZzX2hQAK5JvfiX3lAAd7b g9Wb/kABkkypZdr2QAIYO9Hg9DJAAhd8u37PikACGabXWIwbQAbBUxMTOhZAAVoQRILBSEAE YIetmM5SQAEhGahXgPZABBv+BUTgtT//O8dgCMHUQARY8QkgnbxABAEIMxNu30ABraeJe8W7 QAX+EtMfj+xAAbN0TW00qUAB9g0rc7QqQANN0W+5qTJAALSDsGNQrEAAnHDamyJDQAK4Jt1q N3ZAAVpKgYfkWUAHNLWqJXVmQAYX4YdnsBZAA/JFOM+aHkAJKsNdG6ZGQACDXO8gQmZABVwZ n+YpOEABaSRX1aFuQAOcE51DKmtAAc6iqFWSM0ACPqax09FuQAHbDHG4+zVAAiIAxVpnHkAD mTv8MktCQAQalp7tZfZAAblOcsw5JUAAVw7PIyGoP/8IFliOSxBACPZycZ9ZkkAEZ5wHnGlu QADhVuRqJIhAADO3rJbVa0ADkax5qFJQQAOpRS5xA2pAATeZUx/kwUAADRtdCF8RQACkhK7b kfVAAHUdC+p1/0AEcyWFnaZZQAB2lJRFJdJAAHPiADKzzEADecywM+PWQAAj+KzTzyQ//14M OXIpQkAGnjVwEAHiQALKyI/t3VBAAy58WgNu4UAB3Z+2EfjiQARHB7IRdLRABHmOMKWpiEAB 72JxHDNMQAHCfJ1TUQBABBvXlc7tWEADi1Jy+CqaQARgohEt0f5AA4mGxA+NHkAAJo+KpHSA QAHuedgRpzlAAvg8yXQ7NEAAJbfL/xOyQAEDtr7M4alAAPM7qLFoWEACdEQUfUTBQACq/u2b J8BAAz/DdtGAPkADDpUys31EQAuOWiD9nU5ACFYiNR6diEAOQ24dmMrLQAYdwZlhIThAATFZ 1HZ+MUAAi9KCI2fJQAJWhpalILxABgThZSzTl0AAk1RSZ+i2QAEiUXNebOxAB7k7iz8TPkAE cF7+jzdGQAB0c055qlBAAS0lj2khRkAAfto6bKI4QATbKb85qJpAAznb75lzj0AEf6p79y5O QABE/z0+UlhAAAyMS4bs20AFjDglbo4iQADZv3km+8BABK/TXmyn9UADcrl8hDX8QAKPw1/E zINAA15KfdWS+UAFFW5GreZoQAXUq8srvbM//5ia9t+jMkAFTBvk7kfaQAfDL8hY7NpAA2wm ApOyaEACGBvjlCxpQAZ9OMBfCTpAA9IFt4sZnkADOG+kztN4QALKvIUSWu5ABnjM+Oz9z0AA R31zMoYLQABL3gdUDUFABanDIhf1rkABLFwQj7v4QAH6CksYpzpAAkUjw7CmikACRmCRNymW QAOEoazJ55xAAytV5oG0sEAAi9ia8MzsQAI7Nig1lqRAAcaIiYvyQkABgdYKSdZKQAHlJ5mG s3xAAe3GMIGl5kAB2uAJ7ekMQAfsuRiVVxJABhWsCgi8WUAASnDlPbqwQASwe1qw2NtAAega flNv/EAAnnTv3XegQAFq+F28rgpADD8aoi1m4UADAE2E8hBOP/+njlpewm5AAw5n5Vi2gkAE PGgTxlL5QANAVCXMU7ZAAJFACtz9dkAECD+pB0F+QAHVagB15UBAASXkblQTLEAF8Zq6lvyq QAF8A5PqSfxAAPwJz3YYz0AAYUhyt5QYQALkBBXAjTZAASUGrKrUWEADjPLHOLKmQAFo4R2C 5qdAA4789nkqqz/+nifwDGIUP/+9oqx+16hAAsK8f4JK2kAENtDO5imKQAGWkKcCSdJAAkEu qY1Me0ACDkcizg2yQALU8Z1YfqhABN3POiAb/kAKmALXQAPuQAQPjFqH67dABsfudbR+skAA 2bing9qWP/+1XZYn98BABS6Fpaf/PD/+uu264hWwQAGzZ5KSoPJABrRSIyym8kABXd/VY5Vk QADFhpkb32RAAXddZ5cSMUABsnHudjgoQAXgQTkIZf1AAdbmzUVah0ABUsQZv2S7QAGasBP2 XCBAAOUG7+JRFkACpH7J8cbCQAHHcoT+jPZAA96JkIXLUEAD+gcj/gR+QAR1ijVppURAAifB Na6JnkAAu0y1bTjiQARZk/mn2sRAAiluEqoL+kACMbwont5gQAEw9lpZtjRABLWF8yY6HUAB MTnipPqGP//NSbd2dLZAAyDSpiFmBz//f0tIkIb0QALHrOGzpuFABYFF7QKJ9kAErl8DET/Y QAGSzpeaGtBAB9eZVT2KKEAFc16ifdzLQAOVuAp0GTJAAM2idMG2/EADi2ykTpavQAPcrOt3 47hAAFAsHfQ9VkAEKDnSCq7+QAGS819pr5xAAcOLeCOq6kAClEo2RkNPQAPnGb6m4PpABIO6 mBl6zT//6N3sQKjqQAGhFt359MZAA6KNdEwHokACo2exl6sRQAIKRUYpGhRAAV0aTL5yHEAD +9xcG2CCQACCDLlhB8FAA+oDTGkd0kAAGJnKzxcbQAOaBJkTLJRAByzSe566JkACMpIkaqzy QASqO+co3KxAA8ucjtKYkUAHXcGOGGMeQAIntC3MYzJAAkQd6yaWDkADCtJ81tswQAGqFQHa drRABwbY0Z0IcUAA19AD55ucQAY38ahYbiBAAVxdhzsQxEAES5E/dhJ5QAWoAPpN2B5AA1Zz M+R7ukACmRfSXLPYQAOqJxlM0WJAA64Lf53lt0ACQ4TOK3Y2QAICLMY/c2hAAOneHNtZOkAI YxFFSpvkQAO7uWLRJhNAAsYcVov1MkADJidfhFSNQABiBrbWSddAA3ZrQEOb/kAD6+BZhu1u QAMwkp/FJZ5AAC4bmB5QbkADqp/3XRpCQABsoxNq3C5ABVaT/PWSBkABJaRq/2vmQAEoI8l5 mwxAAl4ejtXWCkAGeuRLaosMQAD34DDC4qpAADTGsBo7WEAAr7w6kKVEQABqDBXi3x9AAknj J9U4hkABVZYQWkKIQAGQouva3NZAA6jVApnzaEABhw+ST0IwQAGpfQ3cgS5AA3Yog6dX9EAC Z+FgmjcrQAHlqMg009xAAU/qjWjEZkABok1OU2J0QAMnIsLlNtxAAOzZRK5VTD//Lya1nEp0 QATovN+jeNZAAlsFDbMfrkAC9J4WVRG+QAOoflvhG9JAAessw6KgykAJB2JVhwbtQAHgkj3U qJpAASXArQ0mqj//VYoFMnXyP/2lmxGDRPBAAm99CNHASEABSpc2GjH2QAIWjuLnOkpAASw8 DTrAlEAKWFjSexvVQAHjxDWoeZBAA+W1Y+NyMEAEzGwkjzj+QAJGolpA0mlAAkBYJSdvjEAF P8ISrKl+QASW5q6DKnhACzBlFu4pdUAL/h9vEyCXQAF/pOoyk2BABioq6rkls0AAWNQMvHWG QAOZ/CshoP9AAcYaa7atWEACI5/k+OZOQAF7JFW0Jp5AAQ9wxIHUoEADmDfWtqbMQARYXYhe xGRABHWmSIMWEkAAeafYrtcFQAShjDQ1tA1AADjMjBc+uUAFr2qxCnnNQAFwH3odL9BAAQSk 6zd+UUAFD5LjHHpUQAAOxuSBaYNAAZg9yOAuVkADbRar10n4QAd3yFAuCLs///GiEBeFiD/9 yCZI9JhmQAJrK3RmYZRACboxe+7zMUAGDg972REGQAHO/ckJPpZAAsfJSVpgrUAGT6j6XBMq QAKqp2ZiSG5ABC3qiPo4yUABxYM02+7YQAJJ1dRDL8JAAkqMZIsdMEABOs5FF7heQAAfld4c AXtAASNP4zw0cEAHbvuOXlsDQABit/+5gyA//q9+q411PEAEuG2nP6IuQAgw6ooL4pBABUyk ZGuvSUAC9X3KMpUGQAPSraWI5rBAAVH52keWo0ACOcHoafMcQAVaDPJe5VpAAyb4jWvoLkAD H2YDnztAQAGEzpVtWS5AAppwsIKSsUAA/RurcUcIQABHbBWmxNxAA890aXlV7kAJ6mWyK6Kq QAJiTNaF9BdAB/eRaJ0pVUAFedscEi34QADXuQWGYSRAANYVDd96mEADQOsVEl6TQAeS+TKr zt9AALmsucfTuEAFwftgrqpIQAHTQ/Sa7bZAAmVADTSPZUAA0F6fmZMGQAKz6R5AHCNABBSD VkVbTj//9433H2MCP/01qAgp9tRABkUUjNVbgkACw0nE0oduQAGwzjKB0QZAAg9Yho5PmUAF VBtf2SCGQBAUJXbfDZZABnsiXE2jPkAEstBVYknQQAC85Cn4HZpAAvYhbh4o3EADhJUzrDjF QANdKgKH0txABHDwc/KweUACRRMpzH9QQAUomKw3a3ZABSgWEipToEABYBD1b/lsQADxUtms gzpAAPNp5aaEoEAA/9lLsBRdQAAlap+M42pABw5BUxP5tkAGgJAccIWyQAJtDdct0mpAAYWs pnnYFkAAq8SkNjrAQAAFojwiDilABdoWLS/U2kABegKId630QADfXY6EaeBABRyxRw15wkAC z//aYVoYQBAUJXbfDZZAB00BAXSjvEABs7DxQiZeQALlUXyEm8RAAtDyGD/WwkACABQXP6gB QAMJV2L2pzRAAB9hyf1+BEAHpc0yi3V+QAZIfPFvd/w//8xRxWKMfkABbiU0acFvQAErTpbd WuBABTyN6GwjR0ANsYjkqnOyQAAEldDsxt5ACrqY8wDAJEAB4BmgLeVGQAB3XMBscRBAA/e2 58D/TUABbKBPNgNjQALRiUc/sTZABPrfbz+6n0ACxTqPK76gQAFGppEik6VAAeQAXqTmWEAB AKFVonp9QAJ2Fb+gCFpACJnFA/BmSUAAGoJa9iYyQARM60QmtFpAAozKBfqsJkABhUYzeWM8 QAEX8PZCWmtAA5Hq4s8ZQkACe1OJLyR2QADVjgHn0ZxAAdt8le8pPkADFVdvRpnoP/6DopAl QIw//0Tee/SJbEACxq/S8joIQAOzhiowZhNAA8QDeJHwUEACqEM0HWBPQAJ6rALhwxtAAZ9K Nu50yEAFlO+qV4NoQAImXBNaNeVAAgtWHTcU4kAD2Pb0SX3uQAOLKbYB+1dAAwl9dFAcOEAE JxegGFnoQAGlYrPUsUpAAQ6ffdvVbUADIIc+/GclQADmg13guElAA8vqqSYZPEAFz2+oXn++ P/+sOO3IEfZACNXxBi54CkAC72xXeRahQAjl+XlcqwZACIufH01y3z/+rE3I/LQwQAa6ycMz 7CpABSstLKmTREAA+/qpdqMuQAFgpmtrMHxAAyCQShJXDEAEw+9jliU2QAVnGx1qS4pAABRe t3/gBEAF6iGtyaLlQAmjecVNrT1ACXh5/cqFgkAH4hGjq/awP/+agyV7r+BAAONCZ+D2tkAD teMyQDoLQAGKt0Nt48pAAcqeRCX+xkADqS5TSFdbQAC684ZMj6ZAAkizSPs6hkAFJ5f7HF3a QAHHEV68FbJAAs+fHy/+8j//0mmUy7iaQADlDqElEZ1ABGFkwufuqkAD25KKYar/QAEyz1kJ X/RAA0V28fYK/EAFHS9156SEQAVcsKhGQfRAAaiNv+HOtEAAM9oC/V7jQAAz2gL9XuNABF+W 6naiTEAAp3icoa4GQAQ3c1N1pBBAA9LyNWe+zEAE+GjzlQrGQAHRc4NWD89ABv+HEB8IokAB Qf2Tomb1P/5h1uB+Bb5ACh7XDaLf3z//0pabHrnIP/2MWKN5f1xAAinpBcXQ/kABhBrPI2HE QAHpNlVtjoZAATA1QxZ8vUACZXr0Ivx+QAQuuEtJmYlAAkSNTHPGrEAFCWXwg04IQAPq/dma lY5AANAGxnRvXEAAU+1gfzVoQASozRPJAVRAAOghdy+X2T/+5/Wu4LZCP/20sHoy155ADTza B0I/g0ALXYKkZk1HQAFCx5ZfU0xAANbemU3qpkACVc9ct3ymQAdEzDMdRNJAAifrEoXS4EAC GwMg/ZVGP/9lHOObHGBAA0a52RfePkAFVCI5noZ0QAA20RrdCUxAAoBya1eWPkABZvwG6vJC QADZxGx6E59ACY2W/Zq4vUAB5lKV5on8QAOuolSCQTFABVkQY/JfGUAG+EuDEB/0QAO5wtdY oXBAAT1aJAF6bEABpL89aS6aQAPeC4j/pBpAAF9ZyCshFEAC+/C24FfUQACgenK9aqBAAMwo pa3qVEABLCEQ0qO3QAJk7uYeYllABJLQsi/xyEAKBArYsIeAQABgLQu2sqdABvElo94jtEAB 8+KsTjE5QAINB72/qwhABmpXsZuJlEACEaZBlkzQQAKlaQmfkIw//+NANiZD9D//sPpC5rps QAWTa1VUop5AAUPLlwdBHkABVrSf8n4KQAevcHSqwQJAA7M1FQVY5EAEMxVixJujQAAUCIAN 7YJABYA7n2PUNkAAf2mxWdp3QAGanIailcJAAiY+sH8J+kABlHpjUaqIQAMg3Tnkc1NAAZox WEq7BEACwRj9bSfIQAGZVb8vC8xAAMuDlbHExkADvCk0RJtuQAB+BFLSCHxAAuXyHYUvnEAD jNYZM5dQQAOyI/dd7gZABAE542qhdkAAQhOrzxDEQAXaImf0agZABnSnt9mwykACzt9OQoPJ QAG+GE8lsSxAAtS/+90OlUACHtRasS1CQAKfcNrSc/xAAri1nuV5HEAC1Nt7jybpQAZFOnKK Sxo//yTUY7hFMkADXLE+cNTZQAYryDXiOAhAAg3gVrvpGEAEa6XHdZDYQAB3RMGlNFdAARch XYbqlkADaAhqIH/FQALAmcawKEI///5RDt3qREABxfvm33H4QAO4cORaG1ZAASGvQanOekAD iEmONyAWQAHHaEDNg8lABK5CfbKeBj/+LtnDE9ewQAfNTo8sE+RACwZ5cQ/2V0AGMcHU4Y66 P/6a9zn//EpAA5E81wyEY0AEYoiv8VrMQACyqo4u/cBAA1TcirY5hkADQDS1s0nmQAFIz2q/ EQZAAiqKO8+oBkADQU7oysD0QABFf/WK3qhABH1wLj3EVkAD+J7xTL4xQAJyDFhLZfhACm8e daqrIkARQTeTJ9K4QBD9U1hpf5hADlsRVeyoEUAU8zn6sfNEQBPjyRFmFHBAENS9zpAwD0AK yz4OSQreQAaUGvSK4d5AEBuZbN7/jkAJBqGwSY8mQAiW+WZK5SpADhVfdL1Q6kAR3n9qsZw+ QA4vMQz2FRxAEU3GXB99lUASD8SWmTgnQA6aGhcjAoZADj+6l7ILCkARsX7ukfCNQAJrmNsf qF5AAocTjyCW2kAFMsmAZ8dgQAZjwbwXorBAFY4yYVUx60AUk2aw7j2MQAsyUDZSqupADqqD NYNRMkAOR+bee2JgQA87uNpKCHZADvlOLXfzJ0AJZCdTCNOuQBQAs4Nnp55AEuo9tu/5XkAT KnvIfayVQAZy768pURJAC7+YY9QpDkAN9CcKo/43QBWGZ81V0YNAD3mXt5LdEkAZcK298Q8e QAbtk5B3kNRABU5jAW1RJEAHglFAnBxwQBSqSxEMc5pACC/a7rSGNkAH0nEngZWuQBB/R22M 51JACEkYXLpcYEAIAFqCI82KQBbp8d7na3VAA/P2hiLoCEACz65Wi8uGQBbdFMOXUSZAGM3Q UVL5T0AVfrJI4L3eQAKI2KZVn9ZACPvymQo9gkASBDZ89AJUQA0AQjzyS+RACTdBDZqBxEAH ehnf1STyQBHUAOHZp8BAELWXD7gJY0ARWK0nkfs0QAo2Zkyjh+5AE7s+5wkJ8kAVtnXymm26 QAyZF/OkclhACz4kal2i8EASgaxo2V1bQBLCbKaBHWhAEtqJKHi1VEARk2pspmFDQBKdIRbq F05AEmxql71iqkASSFLW4acXQBRyPDL3zHJAFH0kE5uyTkARP8BTEvexQAzB49dxxbdADxoF KbYBOEAQxmjW0EjKQAyxE07LrbdAC/L8itTaikARa4TEA7tNQBE0YZ3BDLdAECEZKgZ6FUAO xODKZt1EQAtKIlXxM4hAEV42wtz4a0AM3iitixpnQAq34E0eRZ5AEZOnFxsgl0AUIW8HYFmh QBTs4dS3XYBAELBkr05dCEAQjqnLK6zoQAjRvMVWeXJACn+sqm33gkAMD8ViKPk1QBT5n4+C qMxAEqZiai8+okASawfnRo1KQAr2kkzngSxADJ2WZ4WwckAO9tW4rYqCQA6M18cOzUdAEBpz STKWGkAPRpTDOG7iQA+epgoskURADUAPQeumqUASYbI/SgfAQA5Si/DzdhpAD3fSUoTlGkAR HEbOExEpQBcSIgzkVt1AE2vL8Dc8BkAVrjnrotaAQBQgzVvXGGxAEp0Y4IrIpEANNbUb53CI QBKiQAsHVCxABzoAAEaAzkAKfAeJhs0MQAurTPv3cd5AC2aZTMBpbkAN8/mV+OpwQA9miezD /HhAEADGG6TkJkAKfXCtaLF2QAsI833fZBJADMIAEUh7/kAWSTgyc2O8QBt2fAekyYZAFWCl H+n0hUAHj6/lkqksQBXp95vr/FdAEHDBKI6XPEAVQ8QV3MM4QBdq6tuhZtNACTOkyfiRhEAT JLIflle0QA4kG44xMnhAEDEmj9078kAOiWOzi1MgQAia6XyJCG5AESf9qbD4RkAOhQQUC1oI QBLcLA3pecpADAP+HRuohEAMMTJ/yapYQAWMYjTbQjJABT6jqyjqQEALXeTlIuouQBMTaFwj XoNADGTw8yzKdkALy/2WEja2QAzchOKZXSJAFcc4DsEgNkANKb7iA+MCQA/L4c67qgxAF/ji sXXVQkAHWGt05x3QQAqseKq7qcJAD4faXrMYpkAPaXpUHpaRQA91Y64UYBpAFb7KcaP9GkAO n6ZyGa0QQAodmBrXSO5AEiCtuFc6fEAKyOhJ3URCQAkwn2wg++JAESKQmcw3+0AR4DzYTlI7 QBQy3FUEP7lAHQ/mVgNAekAUSeesMgIYQAaNteexD6JAEC3qTFsFokAKMyKadkcWQBYX9iS/ cQ5ACGq8o6tyEkAGU6odzG9AQAY2BZUEz6ZACqqp16D7wkASRO0or701QA1P5KK+z15AEEw4 Q2apiUAJVuIfmoToQAoUwMSb3w5ACWorQDo/fkAVJ/Wt18fkQAr33Gx9SSxAGLIMwHsd20AF nSa7kVP2QBAIoWL8UmBADpR2CvwqLEAUmRAyyjjYQBLQhUX6s55ACq7yltW/YkARwOMi6Rhi QBcuYcLf7ZVACeYxatkK0kAakGPhOduNQBhlyau9R2ZACm9RQFHaMEANY6P8p8ymQAt0O6wN 1IZADWK/gkncCkARWhomJNMTQAxlpWo4GwhAEBcgj1fZ+UAN744NakvBQBHDq7/m7SFAEb72 frb9BkAWU4J+9pPBQAp4vcMzwXxAD8OVhePQCUAOgcgsVjQQQBYrPpzY3FVAFLAeu5h5cEAP 2GwCWRw6QAuyae9mPeZAFTy3qNr7QUAOa+KjljG8QA8Vf99aA3hAEwYfFFBpOkAOP+ge/Ut2 QA32OHJIBPJAEesHc+7Z90AKqgK0yHSWQBEQ0A8VixZAEIEEEysiOEAKCvNCgpfuQBJbvtCN GhZADS7GO7tTqkAM1vL46tnMQBGp0bOuaIJAD71zXvF1XEAS5OzaWnuKQAsVkPoQldZAGcFU Dtv2ykAElhld0qeOQA+VrFJ89dZADSqwNuyxK0ANZDSmFo6WQBCNFoTqDsRAEPgwBQiUekAS r+A/m38FQAdrAd+DFEBAEo+i7Kg75kAPP4ixb3J3QAzNpw5EabhAFfLtgulb1EAXnHJfDj5V QBZokZZZT5pAERlFcvFBxUAN1MzvL3SyQBCDD25ZmpBADIcJzRhjz0ARBVZ4DUDWQBHYX9qg hvRACXnhvQY/BkANwST0FtIGQA86JrDVzrBAEVwaf8cc80AMtnMWxuSXQA29BW+tUppAFQAS 1fKuAkAVdxFUhnaUQAvWAzCiPm5AFAQ7CEFW2kAUDM0mKKhmQBPIbnM5YB5AGtLnhvVe+kAW rADh/gOWQBBb7uGLZ8lAC4J5KSMqKkAMZteY6xqGQA2cEKn8FUxAFsQWEtA+sEAQBzlZFJlO QAyQiJTHsRJAECdr4oSt6EAIXcOCzq/WQBUhKk7JVrRABniEO7N/zEASHW3ZoVV5QAipmSLC bZpABpDpz6ftEkAFf3rphbYqQARF+RN289RAFn5NpMYJaEAP6Rl431YnQAjtzIAKfcJADdaG RH6U9EAI4bXo4C/CQBUGlBLE26RAEwHJk/gP+kAQWDxM6rm4QA80gUw6nMRADP4NeygHMkAF 8bK3HLWcQBBsvAesuPBADfy/omjaCkATLnqJDTiCQAbKHvjDZh5ABuFJynlOKkAGuNY+QU+K QAPPELqRCE5AB0lp8kUrUkAE4btT7LuIQBTH3VMF3EZADxxnRRMkCkANrZTENh/jQAxjUR0+ pDJAEMrnQF1enEAV2GFdTJGyQBCWw5m1aElAELVO4WXo90AUFi8zEDaWQBQvbXTi0nZADC2a dm/9OkANstezgLGkQA3Y1mXM6RFABd7ou2Fe1EAMg6Smw3AaQBCpwFNHn3xAD+Rg9JPf20AO Zo0jUUByQBGkj+68bzlAFvcRHg1A7kAFt4YkwPeQQAdmralrgX5ADOsNlcpSWEAOyUA/TGgo QBDi424ezCdAC+XGl9jQrkAWPxTx4KvBQAYZJgP6bpBADER19BEb5kAN/94+/jWNQAWrgokR qShACZ6Kvw9m6EANWNOCstXwQBFD5r75hNxAE3ZPJ84oUkAJOv0MGk9qQAvEUEIB1ORAEGc9 3Yb2gEAQBcqUvXhgQAfnji5RaAxABxXrgGjEvkAHXJL3geSeQAjb1X9Qw6pAFtB3B7v3z0AI tIzd7msQQBKq14SurNBAC4+oCLHKPEARtNoJYEgfQA6pP4i7vXZAEGfKIhoKlUAJxXjB4Xiu QAb6aIQtZLhAEYddZ+BtbkARhs3lCGMSQA55Z8GI2rxAC8FQPcOCjEASJD2+9c6iQBM4mk64 6xpAEbkIeSczOEAHASHZ7zcSQBZd9SmmQuBAFRpuXnYrCkAQgvwwBLPqQBd6/4HhuFlACfLq Be59+kATtaDk5W/yQA7ZvkHOsSxAFDhHnZ49JUAQbylnwfj0QA9485bM7rdAEasrtSuLT0AT jIZnVC+rQA1n62fx9c1AFLX9ZXhXcEAK20ecwzOaQAagApRXDJ5ACx0kSBZoykAV/AEHee7u QBRs1hbhh0hADJe6JBytJkAPe5x5n4QuQAo6SRYWP5JAEB9YRQU+MUAK8F/VmWrKQArVrbTr kZZADRRwUiwy4EAKJscs6OceQA3k/XFFGZ5AEWRzBPi5RkANWmhN/SLCQA4trojEqMxACgCg fIhZHkAJGgO8KsB2QBLz0IKc1EpAEACLZiLKAkAK6lNV3rH6QBOfV9L7dhBAFrfvdJutiEAX S+5kACckQA8afFaqNC5ADRKeJuJBrkAOjWjor9baQBMFDSVicBNAEZ9lcamTQ0ATEuV2av+I QAkYultDIBhAC1pKJ8sD/kASFjNKWmgLQAn5Kq4/r7RACx/DBellekAKie0WwM3qQAivF4ll PwhABdmxQZzy7kANPkikVFkOQBIAlpRC2oNAGh9b6n8qTkAX5UI+kMiYQB4FYA168qdAFgUz B5UnB0AIKcmhmizAQA+nR6yAhPBACaFmox+u/EAU4FjTjVe4QAmhCu/6RihABqGdk/TuIEAX Wzw/gC9CQBN4alK1hn5ADax4FJgaiUAN6+NAHtPGQAzLW1xUnvBAEeQ7njEOGkAQyrd/tkL/ QBF86kDPtEdADPSMmuTnM0ALwdOz3taKQA9amTMxcHxAElOqe/SebkAPhmu9zLGEQA9j6wZc 0iRADpaI1mKd50ARGp+duABeQBHTe/jcGvlAFIKmJA2ehkAK8zMBPFoSQBFnexatxG1AFMHc fdrUfkAJduLKfEdSQAZnzkpJ5fZAERluY++XFEASwv/Cmb8jQBFCmmnVBTRAE1SvtwD8zEAZ l5TdBCGEQAkwU03w47JADQX8n932Y0ARJpokhlztQA5eCo4GMaxADShsCrYOxkARHCrCtgAp QA51BxVnluJACrD+JSNnOkAIWzwdMS5IQAeL5vAOS75ABrGZGYBq0kAJUcN0tRJAQA42PSET QlFADHI78Ktd4UAEYbrzhkfqQAaEmBJMZrBAF63dn6+8zkAUKOJkGJeyQAqTKKjd+ipAEG5j uSbpukACR3sJSmxeQAW7GIYhBmBABOu7BoSfbkAbHpZMeU0hQAxG2JXSa4NACzwIXnSxnkAM k+ym5RHuQBY1F3t3qVJAE7xd1poIakALOyvcQlPSQBJu6JNFVjBAC0Ky19SWDkAH5zjTV7kC QBKjq3cld1lAC8w9STlakEALKdJ0WYEWQAvhzSSRqe5AENRohHI4J0AN+/PgmrzsQArKvcY3 FPpAD1cT6S2AuEAPABRt7k9iQBI+rrVy+EBAEAWNBXfFMEARsHEdSK0LQBK6CWf8XD5ADFB4 IzYcHkAJkFMCR7BMQAr3cSjyC2hACxdgfbXzDEAVNbwEdt3XQBxiKe4t+yZAEo3zRi0LBEAV ngjNhBVqQAmOQkOM2/hACuIlFHReXEAQyK/R9DfFQAtMcWAB3VhABb+KPFk91EAUObGA2ypu QAqkD8we2OJADXxGzFFJGkAH0daTbim+QAgpszcpzD5AFnNZO8EXi0AI9XrKNG4eQBRkzc7A fB5AEVch09WnREAOqKVPogDoQArgt5Y2pLRAB6puFpBTMkARVYGSfEArQBMv4hz6O7RAEvjU K2b6TUARrdnzzElRQApJAhZh+TZAE00XkwWgbkAQmlgDDZyoQBC6vHydi3VAC8msa/hyvkAR /+lVdgBJQBGzYNia9kNAEQgERVMAtEANqlmpdNfEQA3utccODZxADhtRZyWkAkAS6KBk8NHQ QBC4DV+iemBAC48/QdzqikAVDSOMyfLmQA+ck81CNPhAEjTeXUmYXkAK3wYZJhl+QA8B1tpJ qJ5AD5DWhTtrgUAOtbA+fgydQBBvTM2j10lAET72QMjJgEAR/QiJPtGsQAvqB14AGLRADbir cyyotkARlukZtG1mQBC6s2g0tVNADtwWtPViOEARSFXYqXXvQBAef1CV9EtADVbmUNPMwEAK fVBV5aDyQA4Ph/A+QNRAEINDxhJvy0ARDQDPmDWgQAisQnQsM+JAD9J0ohAcbkAU5pbcsBh8 QA5klV6ZkRBAEcgf9gmWeEATzJ8jJiTBQBOMRl47QyRAD4VL+6XC6kAPtFzSX0tNQAyraYIK 5iBAC5jhscTNukAScIoTuKaWQAXu4m8xtYxAFHereraHjEAO6GknSGoSQBUu0M4NwcxAE/Vu QGgwzkAQUVLH24gIQA94OZ+20IBAEO7aIIfEL0AQsuuHAaw2QA7KaC6+f7JADhFGM2esMEAI Oec57NhCQBh+JCy14pRACG4MjajFmkAQcdI4cubkQAui6QMNKQZAA8P/LbX6IkARUoT69PiX QBJpDLroUbxADDJU+zXkO0AQwt3PSKfEQAv5c7l0UwxACFODCjGgXEAUUUSPu0HnQAzIKzrG i8ZADeN4gQdxZEAMzHuD/8thQBMXYFSmitxACcxK9WyVQkANbTtVc8YxQAzccUOBr5BAC/0p 3UTmokAMVP5lrFXOQBAldD8IOl9ADbK5G5ct5kATPyCdQ6hmQBBT/q4hVxJAEvEvHF8Lh0AO ILira5x5QAOHbxBy42BAA31LG//JikAJjnKJZvEoQA086wRKQ2lAD8Y/cMTeFEAOKugTNqUs QA9Mfljte1tAEFUSucl12UALxwMm0c0SQAsu1rzqlQpAA+ozpNUu9kAD25cQUVOOQBg32r6k 1zxABUDKeC29zkAGoJur475IQAeMGHa64jpADR6FsfxeoEARWbiPFQvSQAfBl42spVBADheo VZENiUAIvTH7eBrsQBcEdXU764hAELXyUqKHDEAQ7wN9cmsnQBG+wtt8YqtAD1uK9KfaNEAR emu2VlQ8QBENGXL6bQNAD0AP4NN/ykAXv8kfOpvLQBkJnG8NvY9AA1FNVuqZLEAUJfylI8kU QAh9WU15vkBACh7pkQiadEATMWqyQwCIQAubrh0iDmZAD0PbuD+N5EAQOGWWk1DGQBQMPq39 PK5AEAsXmhLVk0AQZIneRIOQQA17K2qazipAEQ+elvjHHEAJ7Kcc7qMQQBPcwD0FBQxAB77M 998JUkALkqgLS90UQBIgqM2LDUxAC8AKqRrx2EAQI9NUsvGrQBEUL6kYFTRAE/rt0wtn2kAG B2R5a/4KQBBjzTUonYdADgr9wrC1KkAYl4eSMdeVQBHRE947GctADkcudNkiF0ANvoMlCCkj QBM+QzIcaWdACyuj3PDP2kAUZ3kB1sfiQA0pOWTDtp5AEgES99+YnUAN+2vylDOoQAp3B5PI KJJAEO4ZBr/10UAJWfx7BnVSQBgNKUdFbbNACmM8ZwxArkAJiI9WqX20QBH6ow6xgmdAF9I9 At2dqkAVlhKJBhaeQBNMJ7wPoQhAC91v90nDPkAS5AkCd0J8QAtux0sITrhAE9YpSlqmV0AS 2yrw6eZJQBDLbJk46zZADQWsmd9w+UANXzxNpPzTQA5boWRkizdABUM42isMQkAKCdn5cl48 QBpVA+CDuo9ABm/8WJ0/vkAXIBpGpxXaQBP2zJ4Es4xAB3Km5N1dkkAGeSLUyFBWQA360s8B 8UpAFc+/drBuwEAKGW/2imz4QBIbJnh41pVADQehP26NWEAP8/wFWSxBQAuy6Nvy6pxACBeE nIgrXEAWDoRrKuimQBEPS38aF1VADgAejS5Y4kAQOhT4RuiIQArF+ulfHfxABMDKAAOlNEAE VM61wHnWQBH+VIKch9BAGOrzWxUOUkARyi1Ew7z0QBM5D0w9krlACfXb+IqwhkAQVWXhhvyL QBErtFf+tRZAENBh0td2ykASXa50Xi5MQBB3mZhtcxpADdXts2Y7N0AN47wLrZaUQBNApS4/ 4eJAC6AEhS+O3EAStV0pOtOkQAsX5Lmy6zhACGOZgDcS2kATKCVb9iZmQBPVpVCHIKRADmY5 lZIxkkAJKYaIaGIoQARiDj0YwRhAB9WjgWPDFEAUGvKVKIKxQAjjU/IvH9ZADL4RLxJwn0AS UvYQxGCXQA4gs8i1vvdAHgVgDXryp0AVb4R1ViJgQAS1BePytZZAELFj3Jd+GEARjwn4Z2QP QA5shJaStWRAEVHqwyqvTUAIOu/BtpGMQBSNx0nor6RAEnmTrlqF9UAFzJUDRthAQAr0nT34 /vZACIXPGM5nfkARRGn+/brBQBc8/GbiEQxAA8obbnyRyEAVWVHIcXiTQAI9uCBDLP5AB7ix OpVYxEAToWutvGLcQAnoISsIa+pAEWrCjxyN20ATGolYxgCIQBGTSz2sA1ZADsZrBi6IekAL x6G7NdDMQA8FT8d9QXhAEGgk33dZvEAWlMNbbFcaQAZE9Wv4p1xAE/aDW12VdUAN0ejfZZYe QA31HEUYICxACm6MANcHbkAQrlyCkmnbQBAoDH9Ib8pACVMqDJYCxkAG/SmJS22cQAvozHmU RZZACVpVZFgckkALjKGcG5DyQBHTuhKp/5RACTl9/SnI8EALC6yc4jQqQAk9CEs1W4JACUQB ebIOPkAICrdwJaVaQBXhHn3yASpABJ/UtjEaAkAUu+dAAco8QAffEfqfD+RACKDtqFNxgEAD O4Of0jIWQADWq0C0c1pABydrbxm3DkAOA2MtlPF8QAgGb6mHWw5ACOHg1pZYWkATx/SPNkC9 QBIRtUE3oEVAB/cHaN1B2EAVaAs0p9W0QA9wW1Y+UKhAEvXCd6a36EAVUhDDTOoWQAnGo91L J5JAEyxmZWX3rEASfoHhtRHoQAse+waelJJACoUVWi+7EkAQ5guVd1w0QBJ1FN58nqpAEWfn 0b4BDkAJUqbG8zeeQBPPIarCCrRAFqBIMgrhgkAXi3/uuVJzQBYhiTITWTVABfY1gxBaAEAJ 11qHqwXAQBFuSYJOM6BADirM2/ITGkAOZP/0ozI6QBNc19A7GWdAEd1DsugZ70AK+lzMOD2+ QBTN6fTuVaRADwnS166ENkAM8uVrYixQQAdWrc1TrZ5ACpF17Wb4UEAS18dqFfsxQA8qp5Mr CBRAD0NqtqG0iEAR5poohXBEQBTaJ3wCz1pAEAbjujZHh0AGIUVT6zOGQBBMJJ5FsF9AEEwk nkWwX0ANlpH3/lF6QAiulF7spWxAEt1Be4Fg1EAQSlO4nXmdQBErGcxOEw5ABHb6BavtgEAU nsP65U9WQAorX09ceZ5ABvvYS3SLZkAYIiIf59xYQAd1B/GS9kJAC1RgTvG3vkAHHmRA6/ia QAvjfTNPxoRACW8mRzmETkAJFGh29kTAQA/nVj8d191AEelo4JQcdkAO+6jK8iTwQBMRuUaB 7NdAEblr6TJMQEAMoJsApbrTQA+7z8ZwzzZADjGwEOnmzUALN5/1CAG6QApUMXzEHmJACiee kfcJRkAb33sRrDU8QBhOYkK7wWtAAqheZhCY7kAIEPDMej8WQBDsn6EC4dhAFUrhqRncSEAT Rv0GRbWSQAj9lRhPOexADHPUNukGf0AOYASczyZbQBKgFCWuWv1AC4Zp2vprqkALaKMnIIj0 QAw9PiUmT2RAB5TbYshnRkAZ7mN554HrQAtIQq/RKWpAEvuXeOr9b0AVphW9H6cYQBNNeMln l01AEfUjUQp7R0AFbgrJvwzqQAjMMwYHO2xAEitDS0XjAEALET94r1f2QBAXxZuxaGBACdzJ 1gaTZEASgBQXfyHEQBGwTrI0GrxADe2jkGwNZEASaRDbNZOTQBaJcHIEVMpAB0DGQJb9REAU KA56pMgsQAzRFKts5UpABaNuOi1T4kACFrMxGLmqQBCA4YJ8MZ1AEZ6qv3TBvUAIns9OcaBG QAuTio4hy7JAFRPBjPXj5kAHUEX9H8z4QAbgtALv/rRAGUw+fjCe7kARuS1d+kWUQBBvwZZ9 biVACoBc0rfM6EAT0TmmU4w/QAlXRMPtrFhAD6cToFB/kEAPsPeVkDF+QA+eU5Ml3qRACABN D/AOWkAQfGmHzo62QBHEpmlOZcRADGxP22rH4kAQcOcf+7ukQA5+yCL/PApADkJ6gCmQDEAP ymvUqpwTQBIIHY+8ZDRAES5wt+pqW0ASwow8bJRPQAlprjVUSm5AFKSCNcM4DEAUOUADa88I QBA/TnJoCkJAC5LGIYIpcEAPBy+9bfCzQBAK+PxAwgtAD6Ji39JKCkAMFGeTFMjmQA6KalCs B99AFCbSknRhokAJOMAU+NWKQBAVR5EqsOJAE86rU2c9qEAMN8QYQp7mQBPFdApx/ZJAC5Ig G69YvkAOfMJQzP/0QBDXGyYhxAtAC2fTcn14CkARKkUXhuAGQA3Ri5jEiWRADhJc8lti9EAO lqsu+czWQBKhS9Cps7VADz7X3soM5UARkwA22e1eQArEppQILxpAFNQCmnYCaEAZKfjierGh QBJwgPvD3wFAC4E5FtSEFkAPRpi146FsQBSIMdNikXJAC8vxyTf0nkAQHlpPbCNnQA7ZEh89 sQpACkzGjprG7EALfo1kaC7qQA0kTs75lZNAC10IwCRTtkARxpns4dLuQBF+ChmLpuZACxB+ yKEluEARF3UW4kQ7QBhZZMs6DqxAFZp3yAdlR0ATp1TtU+/AQBJhLfXHwoZAExazBujl2EAS H3aXypzAQA2MmEDnwqpAF66EmCNgP0AMP7piD4vaQA870youvqpACdShU6MIBkAXdXl/HwJw QB0aDWM5nk9AGKlAWAL2MEAQc5laNdXMQBJw0v1BkoRAEoypne696kAT2p1+FxvbQBDpUIbA e8dAFAhFKC7n+kAT9rvy07V1QBEAmxo8ww5ADzB3/IQWekAWI/Nic9AVQBN8cuP3qxlAEkIf 5wdrbUAR1TkfNcY8QA4gYHqLFEZADiM0Xcb6EkAZBGyM06EEQBQ/IFPZHNZAEA4E0DmK2kAR EcS1AIkeQBD1W3C7DB5AE6wVj6rCVEAVf1aFARYKQBRyRRJcpeZAE6qzjt2DS0APBgWDLhwA QBc4+IbmnSVAD88n9hlmmUAOrYrj4hviQBnzCx0HwkRACe4kfDvnyEAQGLkfMve0QA83cAyV a2hAERTGrP+pMEAN6gaudVHQQBFeTF3EwgpAEI3UgVq+cEASTJODEo6UQBKA0+/pD/BADo6c IduQqkAPYdrVFlvgQBDXW3WlnatAFHze3SiR9kAUZzbdEKRQQBIYImm3FbZAEwlp0F8gnUAT voOrMw1/QBHEMehCyxlAF+MWsHiWC0AV69wJ4pAeQA+0paUj4wtAE+eiytVSN0AR0rBzJt/6 QBH5n0qPNSZAEZeMAjXj6kARPpYG+fmWQBOTNIFtfjJAFr4snkeslUAVommgx9fVQBsBUyes bqlAC/+XNiC9lEAO0RFX18SqQA+5Droyc9JAEo8/Lh2udUAPWrayJJdWQBVAN4QtttRAGSEi or+/iEAWbBm/tXbLQBA1TGGS2TlAEcCS1i32QEAQwn4G9P2nQAsLD7gWRsZAEsZ9fVGYPEAU DvqWzLdBQA8CN2V5B4xAFOgQ3/F3nEAO6/CoGhwuQAzaIcK3zPZADsYsInQQzkAQq/QbX3i0 QBJgp0Z5lfBADongV7LPnkARlaRNkdFPQBbmhhQ0nrZAE59W316wSEAW5ab4GqSyQBSywW4p X8BADqYGoRPKoEAL50iZkf1+QBXmCLOw+W1AFFymXGSk7kANz4uBcyjiQBTMDzyt4TZAFfRg Dk0DuEAPG86XKMXgQBOtJJfo+mRAFBDWPEETikAP03rvSUFzQBVtjvLmnDlAEQYO4I80RUAQ IctBLcssQA6ys62yKdZAEEEiDNq8F0AQ/PMMPDp1QBvfQMfUgPNADJCvakjDVkAWzECLmCha QBPnqGSZei9ADRQwCyVILkAQBe2qKGLaQBCPdsitNSBAFGxN/GejD0ATamNvSAD9QBLxZWUA 8wRAE2biOoWW9kAReuBJsBuIQBYwNUaT3WZAFiLPJ+7kuEASnBtO1WqWQBZ6idYwTe5AGyZG qD9IUkAVod+DV0V+QBE2j03axQJAEdYk8GW1FEARJTMGR4uRQBOETjPvfndAFJdueEkQh0AT 5u3pXmqWQBFEj3d6XvxAEOTSbuqpCEAVKEkcDf24QBDQKIp7b1tADocFK8z+FkAQJSnFl0Lk QA/9+xoWghhAE/RXHHzYsEAR563i2vn+QBZgse7YuBBAFyqFtvxVrEAVxUZlnPESQBM3SXje OmJAEeotAXwdeUAO8CFRFL0mQBDeXPKbkLRAEFSGJpOTqkAXAJzHCzasQA9xgatp3NxAEqE/ 0yvr1kASInpkHgDKQBAQp+5FEolAFDHBSJlK9kAOoWev/DkqQBd9XvX+cntAD5S7uiZKjkAQ NWTJCJLYQBLmlUzsrHBAFN6gAEUi1kAQc1sF/SgXQBGk9kPFKEBAF9ElqoTejEAKPhqALSoQ QA5R38/NGSpAFiPZ7R42j0ASNdUXsr1fQBQmWVAr9ahAEC9VmYwCtkAQYls22OGuQA0Bznho eUJAEfvEIyB1uEAU7rGvfPTdQBTwvfe4EGNAEhPCxGDzrkASis+qMeh2QA8KFJBBfyJAEIgq BeLe7kAXZepQ94mqQBLIw/S+OHNAEORJRYgIhkAME1w75zhIQBcxtw4TMwZADVQfClSlfkAV 0G3NWZvGQA7ahqkIID5ADB3xJzAHOEAKJsNatWY0QBMue/ryyr9AEwf8xEw6+kAR+31BQC5/ QBO5zbaMiU1AE/iN3NF7QUASSMl/xivCQAzSiEt1z25AD0HYqUST2kAYenHpuXsjQA3gDiBW +3ZADjiNM215bEAQD6qkAQeAQBfIVncEqTFAE4f7dO0W0EAUHwgFli1PQBJ3n3hWJyZAEjnv BAfUEUARxzsxE2PMQBRLPW6JQOZAERW440qEAkASjoJWs9LOQBAFRR4gmJdAFHu1B4B5ekAS wE9NPpjzQAnX6UTN3BxAF0BKgm0k3EAZtumqo5D4QBJRD59Ls5xAEf3eOmGYuUANa2gNr3MK QBdPIZbQlgpAGRKcVbgJZUAIdDPIHfs6QBqggww4X6FAGBH9NYSxakAOZBVR9UqSQBGOldY2 CyRADmKuZqpH6EASssYXKmySQBBhk/zKqJtAEeaL4C97XEAS0N2Sd3yFQBLUg7+jaDdAEltr ulMNW0ARw4xqhAIgQBJGnx805pRAEoNJvPMKDEASMceY6hniQA7N1R+DmiJAELLhwDE8OkAT xMX6wVkHQBOrZRYc46JAE4EtBhTvgkAP6h+vm8lqQA03Ku+MZOxACecs2Iu2jkAaHx9OH/Xm QBtOP7KtXehAGjaWb2x2SEActBFnnAdyQAh77971trJAFQB9EwGghEAPjovrFs/mQBaDe4xI 6SFACwJO5lSEPkAXHFH6cpfMQBPPzIxy6zhADtsvhPI85EAQgDFm4N1LQBcjZCP0xklAEHtE ICO6kkAT2XIMM9OKQBUNnXgG7JBAEGQ7p25jRkARk5YOBHGmQBCDBYc6o3RAEV2ePB8zcUAS OMf1PQrEQBFLyUqlGhpAESBLv/gNCUAS4rcjI9cFQBhvPtMoPwhAGoPyCbG8yEATMYlIzgcV QA1mNG0nbSpADfZfjIkKEkAQt3YqkdqpQBW3WaMEzcpADpxBRCATHkAV/Xd5fA6qQAwRCbLj SuxAGEQ574Rm7UANsVpf5QPKQA5bvEpbyO5AEThFWC3KQkAT62iwEwwHQBJb/n9OmZxAEnV5 PSdgLEAWUDt2WRHOQBOX1NIPUU9ADChxmzN8AEAPnuCBMqskQBglprsOO7hADOnndsHZWEAM B4JTqhjeQAstV51TgA5ADNcO9cxlPEAWkqu41hUsQBfaECDZjaJADouirPCpakAPeyIFdQmQ QAq3Z1FFW2ZAETaOuBfnwUAPyHq/lMhMQBDn9updvvFAEAAdtibOY0AQ1CDxvTPYQBQF+cIj uAJAERiUz847kEAQM9v/AtQQQA5Jk1XzKe5AEbSpKFJEPEAT7qHjGVoKQBBluVR+axJAEIKP orlGR0AQ+56ayTuSQBSKJATgT+hAFof8Q3GofkAWw4c0iqRCQBB4U7+POgFAEfqCd+Kb0UAS tcUVyfBQQBOzl7ZcS55AC7y5GGEjwkAY27jAHDiAQBGrxsqe/jZAEjtPugIqnkAQOUUcwHZO QBUjb4n1igVAE9m8h9YKwkAQu67UuqD+QBKwMqAxWYlAEpg9OJ8LyUAVRYkAW87yQA4uRsw0 OQZAFQZDIceP3kAN+d6hy0l2QBGzrwdVq4JAEm60x3LDF0ASgTUtBa5BQBITaTvv5Z5AEfqe LT7zZ0AQr2aM1RO1QBRKSsdpj35AEdN9h5zprUARe/H7Ji4IQBFm52al6QlAExORDDXO/EAT ee8E3iyCQBJsaBdL4bFAEBCYMM+5MEAQrHZ1k9UvQBE+4H8Ew6hAGJFt+CoSSEAOFSifqWgM QBETKekt6XdAEogIagu0oUAQBGYbP98KQBbvvQCr2RxAEkXCoztV20AOc3kaJquaQBeNaEZM bJFAG3nVEXqTTEARqmBPpnb4QBNubVDqaX1AEAbj7pvzmkAKubfPbw5CQBHuWmGE6AlAE15k ACHYDEAQzzBo39L0QBRvJg0BiQRAE6J8drlyEkAUrSP9SkGfQBEiA/D6TtlAFFArVd7likAS 2yZTs6i8QBj2jsF5aMVADh5nGlZirkAV8xV1YTpRQBClRbUKIG5AET767YNa8kAW2lL95zoD QA5rcPm15fZAFu+5TwIsUkAP1nwAcc74QBIsoN9CvwlAEV3lVF6zDEARg9FFI+XBQBEKQyMq woFAF/V7qJwYiUAXXj+a5h+UQBlvkHcFvZlADxMcErmfRkAW3N8EJxgiQBSksHlHGmNAEgyh 5VGpkkAUYx2z9p+qQBfsOjnzEb5ADE/XQFgZ2kAQgg6heeRkQAyY5rG/JvJAGaQ5FVUBpEAY OeIt0WXrQBpo33HvxAZAEUY8BUDtN0AR7cuvaZPSQA4IQMwifLZAFPTrm1OiykAQrxXJXkXW QBJB7uOi6vpAFKzOc3+QrEAS9sxywGI1QBGMfiRuhANAEoMcgFXlSkASZT+DGUV6QBQ2/Ycq r3BAD6m2iE1FtEAXhVkJnBtcQBCJ1GitUJRAEfo7quvl4kASplgttwPkQBCLQ+1j7ApAESVS fy0jukASiN0Qr6vLQBG1JRGWVcZAFii7WlSaF0AOf5C5+UgWQBaNkMrsPrtAF7+huWQBFUAS Algvnhr9QBO88MJ3Ej1AE50pToM+k0AVbdxSH4XYQBdWp4OAQLNAFvMWRM/VLkAOix9pEbG6 QBDb86uSJTZADXdDLkL1WEAXphrFSzbzQBnKPmxW0NxAB8OfYqbv9kAfxRXSEhDrQBFLLtmC yDZAEUXPErujfUAR47CAlJ38QBGdjyzxhhVAEC/PZ94wCEASIfloBKDuQBN5I/B4DW1ADtgI XmlncEASISj/ndFtQAhoenrFhuJADbyb9j/iZEAImVPfMcSSQBFjHtuhi3tAEldQA++FqEAT //4t6wENQBRyZvcbE49AEvkXQvbeI0ASPmNuMgfeQAs/jFjnAwRAEHpIcWnMCkATDViSmZTS QBCJeGXVcVZAFOXoMmj0okAOn6kdWEWyQA+XSIcQkuxAEBECGGTyWkAREoeosR0GQBEU9o51 QVNAFXULlOkPWEARtiSPFVgXQBH71sva8v9AEVuAuZAKp0ASYceFhjBvQA94lJSRuVJAE4v/ f+nHGEAO2+LZxlvaQBre8NW7kCJAGd5EmAmAmkAVWH17J7hWQBBP7pQw5dNAE3vCbGGZoUAS Z3g4etnnQA4ouU41jOxAHGFppptqIEAUojzNOhInQBPpryOR8nxAEiSodutl/EAOYafG/Lte QA8DzFzzL55AETKG8S6NOUASAuoygYutQBEoHzsr4YpAFKKVroHUS0ANLtlPg8fGQAwAMGL+ TohADmG/JTP8IkAQGW5G/fVvQBFFfyEey1xAG6nPMH45wUAWA4EhmCiBQBJvKBjJUJpAERyO Wjo8pkATQEtkWUdNQBEE0KVeBv9AFLlsJhN4GkAWt8gXa9/hQBEjfre3y6JAEr2T5lZ/nUAR Gjz0QZ6nQBC0vHESsytAFDIqvdjuoEAYTrpLYt+sQBLwUs9etUpAFLfs19fJPkAPSOREZWGA QA/Dsv+zHFdAEcF6PWKbqUAYzrkZ63GpQAxbanqGr5pAC3G3fh8X7kAVa79fZQ3nQBCD7JnL 9clAEk6QaS78mkAQMqUEuaVZQBASWqzbgiVAE8QgnxpI3EAVUIkd8mMQQBTOzYmy3PlAFkk2 5/P47EAVvF+sWOwaQBUswJTUshBAEs77k26Zo0AOMSXKiO5eQA8Q0sBomhRADpzPw3iE+kAW KgUSGwxiQA7zbJZinv5AFX4C1JrmMEAVSKZA70nfQBMDxIWtZlZAE/pZFbtoB0AMczEeVCy8 QAhZVdhhj+RAFKzPMiNUlUAKxqYXXms+QBCe6nN/Ft5ADpRXTFOZAkAQyoGzD7kAQBLkDLOf aLhAEADxWMA4EkAQIZrLlX54QBf+2/M9PDpADUNEHST3IkAYwfyB+Lv0QB2gOrmj+blABvYo LPVGTEALDGVuyPGaQAyOOKA6VqpAFOWS1odztkAVNAn5thSeQBHrIxL0+WhAETAM0bknhkAR 6LI98ccSQBLTtgbg2W9AERkRQsv3SEATa6IbTPr6QBMqsEzuuKZAEeLFdhMa/EATG5B7B2g+ QAqhW7bFFAJAEJOSYTO3VUAN+3E7EI8mQBVBN2SQYHtADmb/AbStqkASCD440dW8QBHAZ6+x YGhAEMv7YS5DxkASTPO7ebLXQAyTQLksPIpAE/WFlo0PEkAOrVPK+nG+QBJCbrNJaS1AEVNB VKSuzkAL5cWTBclGQBB1IxgkxKJAFEXiFzy/hEAQM3enyQHHQArr5VhVFOJAEaCWqLSFVEAT 8WibwWgAQBPnlUzHQRpAEZewQ74rb0AO3+IGQq5uQBlO8AqraoJAB7me3c8cOkAV6AtBEanm QA5sV87LurJAD+S/NEMefkAMnRdeShoUQBwbsSFmuFBACeI0aI1bmkAVSQnvlKkgQBHrto/U rnhAEB+eAE3L2UARHKinTdDMQA+lmF3ltc1ADFNaK5gqckAVqAZ8mhPQQBkwBF6Ly6JAFg7K xfJkukAW5WK8twajQAy8bzkcVzRAGAhcHNofNUAWgYINdAD0QBmAln/jWwpAEw3kaPQb7kAY ZDsP5VLGQA9qoynHBhZAEcqHG9cT/UATRJkJProxQBNHPBbMdttAE3XTfQB/nEAQkoImYeUY QBS5y7uDr/ZAFw5aiLIGSUAPbHyKc+sGQBF3vBgyJRBAEhAwWfMOiUARIBe3oYyIQBJcEK3l FQZAEUXLkCJHHEASQsVusse1QBSwKEfC+exAEK62LwAi9kAWUL39j3TtQBTr0AJgGwpADloq 4rjqQEAV+WAkVVPmQBWTIi0/f4JAERgTUxV670ASL4VLSqOUQBFLhe7R5+BAEFQhqNHdVUAQ 1HhQC3BAQBYdIeLdwU1ADYQ29dpK4kAMgl71rm3cQBAFcSSiHjJADsWTjoOs2kAVit9V/2cC QBPA8fgji8JAEPf3kxNl2EAMW14AkLn4QBe3Bq2gBjFAEEPoLCvM9EAUumvWVkPKQBPRR/JB yeRAFcTzS19jyEAMCXwcz6XMQAyLBzgpJzxAFpHOyS1XEEANe6wccCFwQBL3RGSwZCdAEh4t ioRRwkATNLDmkOnzQA6C1zJVRcJAEa4TpBfmNUATWMfOlqIeQBOB3eJngk5AEzT7S8y59kAR oiJNwq0GQBKUPKwdJl5AGZIg4PQpGkAPrdAnbwQcQBE4/Rze2bFADlEbTS15JEAUVS3mLOPy QBUYEk9gEHRADtaKeIorDkAM/1uvC0TwQBY6tRp1KQNAE7+KFWkCXkASXDLl061EQBGnio5y 221AEcH1FCxAzkAYtX/hkTseQBPLLZqKoaZAEg62qG5fakAVi5+iEP16QBayX6XioMdAENnl NE2WUEAO84PcARpCQBAcohoV569AD4e+rQwdmEAO++YGS4RsQBZyWuLJd55AEXMDm/u7rEAR EE3UILTKQBD0Nd/1MHxAE+VGvlwpeEAQTeQuqyp9QBHpeXuJnf5AEIuHfoXqWkAUoyptC/jj QAyOsA+0ZUJAER2em7K5w0ARFX7BbpqOQBMfMMz5YwlAEkNsdLTQzUATMoCdB18+QBOrC+gA 5QNAEAqqwhQOQ0AOK3rcdZpeQBNaRFnQQYpADd0643lO3kAP59QJ0eWsQBEZibU6yuNAE4Db VS4oXEAPiOimm47cQBTpHBaKqIlAFUYDqtVXT0AW+dLDgA+QQA2Bc8Ir035AEH+k87ZVvEAS ha6w2lGoQBFH1+U15oZAC65SBuYPEkAODQX21JBOQBAkOsGBOUZAEv/E54kWE0ASShs5t6zU QBIftJdK1NNAEZGQA2ih/EARYCmltSYEQBWk+tmh2ExADHEAurafQkANnzl9ik0aQA231rcq UiRAFrHnbQbPXEAUJPbruXZWQBcVTuLrUBZAEOUA1GLIs0ARTMlavOXOQBBN9GYMSWVAESq+ dBfm7kARR1n1Hf6PQBRIAJ8ASShAGk8h4+0bKkAZXwY5kXK7QAzLznzMs8ZAEuAbP0AOnkAU xXZXgg9cQA8cv+HIg4xAD1Y4jBEnfkAQpoSulUXkQA7/TfSbTyxAEGtTjM6SokAQibky5MEF QBFm9oFq+MlAE5gfm41D3EAS7TyPW91sQBG6/YT6qtBAEa5Np/aV70AQzc+lfHp9QBSTF8jP rBRAEJmIZShAzkAXnciI9NjjQAu0Ul8v6ZpACkagEvEukkAYnTs1xVmPQApZ30BJbZpAE6oL /9T9UEAS33J3e9PHQBJnPVenWiBAEjfYvhdN1kASVrhFy3FaQBCPQskrW3FAE//zHGBVFUAV oSxTSUXhQBGU/KvH331AFd4cbnj3zkAPGmhERUbcQA4etrdyW6ZAFGINVElde0AQULKo91ZV QBTaGHC2wqZAEN48NtNfLUAUmTMPHHojQA42rnyH4eZAECoLRB86BEAQN3kAaddaQBEuVD1M RRtAEDZ5AM1xYUATs8j81pZ7QBOcqkKcM1lAFGsKxPKA/UAKHA3ROYUKQArhY6QKe1ZADjuS 71XdWEANZDI9WNWCQBJ5pTLibrFAGgii6iy5+kARdxKNFJfpQBKpC+tIeuBAEP1AajAdqUAR QsTx7F3SQBMiNI1aonhAGT9G47i/tEAOWg16fRKyQBXP2EIPCl9ADTTmBbGwJkAStUsDixba QBJ36aKyVKVADoHH8MVxIkAWKnL2li7GQAzLOOt0QMBADFjQJAW8NkAKiLeVcDamQBJtoOXp 4nZAEah9udMYPUAPNubIJUsAQBFAYURCOY9AEUubscI2TkAUzJv5jtcoQBNzDwPEZXpAEdbs 2iRxzkAHY+noNJmAQBjo/sazcqVAC9GmnGNBnkAN0QnpOBoiQBCO6JZfp55AEGoQJIB2gEAV L/eGtofyQBGoqbTbToZAEKh0nIYn5EATgDbtxaxrQBUhiyKeQSZAEN9hbDsGLEAVCAqJAP7D QBFQdAqmMClAFfUjyjwiy0AVymjiUnfGQBItuFkWYzhAE8wK3w3VoEAR1ARc1k8jQBLBWPkZ q4dADz2ysyCW9kAavsP+TUzMQBYsQN3+uMpAFLqycMnUDUAKlQICma+iQBH8Y78avSRAFaT4 JiofUUAVpPgmKh9RQA5lyperzkpAEkRb4jCPiEATW4AU6j1EQA+qqSNMM3pADmqTP+R5tkAQ It76f/ulQA/JsMRsUk5AEIFnJDKmOEAYhdZFhJxsQBICqbFeuXpAE+8PD3RYGkAVRVOgzsRC QA/mqSHPG11AEG/bdb+hCkAPczITzRjGQApKPh9InmpAFGjbBTffY0AM8xM2qt4WQBZGU0MM MkJAEOwws4Ys2kAQzuUaKSuwQBQ3qWi1zdhAE7SwMmmevkAOIPsQVjzSQBdtqB4NjQ1AEKhw FDohSEAVGwIDoEgOQBGvPRw353VAEawcUV0UHkATJ7rFC6NFQA/KinBXcCpAFKIQU5vnSUAS xFDnkC/wQBXhKzR5B1JADlb7H7nHXEAXR61lDvZMQBE0K8Wf6/RAEVnQTi5XJkATiPawYCYm QBGoWkKPIeRADv+mhd99gkANxFAkCcGyQBcHdJnXXJxAEZoBpKVdlkASCRk3UmyZQA1HgMzj DkJACqM0fK99QkAU67XefM2pQBAvu9lWNPJAD3ea2XzUDkAUCzikttSkQBNiULAAfDlAEZzh /0QeWkAU6ZnjGKOCQBgvsZHcFmlAEtiWioDuHkAQryO59AnXQBGTheOeD0xADSqHYjxeBEAR ZRCquP1SQBEjwycsf+9AEeaGC96zPEAOouyhLmZkQBCngvgjWWdAFqDMey2+0EAUIJkXOvxx QBR1wVYsc4BAFUoRNnQa20AUMZnMspncQA66Gl0QZH5AD0J0nY7H7kAWiVBzhYWFQBJWaT6n i8dAEtaZ4OWqCkATEWiN6NqeQBFcqvxT9iNAE5kiGjXugUATttA+k5iGQBPa5uYa5HpAExFL Cdb8/0ARWn9IA9P+QBWzOQ/zSq5AFu7caGsdPkAOe3NSl/e2QBSJdJ/66V1AEYh27eOMfEAT 8OVaXFaVQBOH1thrHshAEZcXA9K1qkAQzvczHY9yQBOCnI5dNXRAFNw8z55PK0AQ9wWmdT6G QBAFefcRchxAEnUn6WKoTEASe3LwqFt4QBOtvvZqi6RADyaj6519+EAMJMMpADh+QBIsxkjL fpVAEuw6KdhoaEAQX8pLE2X7QBeVjT4TVwJADcEkTpzjFkAVw6HK2gYAQBLdp4YFCe5AE8pr ptFV1EAStdCZ0YHOQBNaSdOmArdAEemSLADKr0ARvzDhh73DQBSSL7UC90hAE7XJIUpICEAK KParXNYyQBgbwFtD0BpAEFN0IxCnxkAMFgzZ5WhIQBPmRn8RBWhAHmeHWgHB0UAQEQULze2U QBPkvGga26hAEYYZ6n/rjEAT9MPkYrqyQBH/boTlBW1AEKeSZ575skASugB2YWGmQBLKA6GN RItAD4O8gA3zFkAPlEvS29ZaQA3vjfb0WFxACrE++GcfvEATeyztC2XiQBDET0z2ve1ADl8z SLfd2kAQeSAbzGTyQATSqxLQCsZAAoknDtoE5EABPMPlm7rQQABjaRlSSTxAAV8YGCyJXEAB s+j9ee6+QAI6owG78aBAAVS+6MZ9A0AF+mJbtJvgQAFlL/OTiwlAAYGhVSr6akAAelsJXDSo QAd3Nsoj8uRABsYdh1qOIEADreSO53SfQACtcD4egmZAAguHqC9ROkAAuM7fsaYSQABrTkvt +3VAAjh9swsI4UAI4AD4O0WoQAcmwsdnt0tABHbYp0vntEAD1jdizyxGP/+y0a8oyDI//5QU 2cFBF0ADrPn5+Q56QAGoQ+9NTDhAAsaDsN2u/0ACzwDkMj90QAGfx5GR1dlABJDFGJ9goEAA KiJd/hw1QAA8XO1xEkc//jbMv+jIbEAE9wXiOG/pQARHmpwK3gJAAxZ1CSq8S0AAAOeDD2B6 QADtTXDROCc//7ylL8vMskAEYMHYN+NkQATHA2NHrYhACF1eeBMRWUABF71LbTJ6QAKSaiyn ND5AAddM4r0Cdz//t/ZBP4vUQAEmNrNbiC1AAWEFeYgJWEACWCwZDkHAQAGNRL7oFlxAAcf+ /gb+NkAB+2wGNIAtQAGhwdqp3udAApA8Eh8l10ADsoNZ0a9IQAGA94bzxjFAAZ/tvL2GyEAC syWbfl9YQAQI0KFIEHZAA3R9j99cmUAENmxuPgamQAMU4h3sM6NAAFmmHmxFBkACCMSFmiXl QAFPj7sK6TJAAOcSkykk40AAO0r00ErbQACrM+FKc6BAAa9m5dZtnEAEHrPb8TGEQAI3eGBJ 0E5AAq0cj+8kaEABnDGkUrZkP//BMCixbtM//8a5KNV99EAAbD0p+354QAD3PG/Yd2FAAkBN pHF/kkAEhG1U/0xpQAKKXLaCm31AAVElQDhBL0ACIEKtjudMQAJqgR8Uq0xAASH5/GZp9j/9 exNL4P2wQAR5J3nGf9Y//+sWCvU72kADzhHFSDv+QAGzrDhbBRRAAjpVqKz3TkAE4bSzhxHu QAComhckc2Q//tMvdsspQkAALZSt09lWQAFYkCj2u3VAAmvNqmaBiEAEQFdX+uSCQAXTqjHx N59ABDHsIeQiJkAAZAfvYzvRQACEzh/Bf2pAAw0g65lmckAFHkrcy6dvQAFG+4NvKS5ABK6z mHojEEAFdJmnVOsAP//aFmxDTYVABXong2kIakAFFgXmL506QAJdWaM3IpxAAMvEnTGd7EAB dANspdFfQADqtqS5N7tAAlK2TEVkmEAAVREZaWgGQACujQeP8jZAAscqLETv8UABT1yeCS0i QAJrHQT5IUtAAiVQQZUeUkABA++iVi+/QALCB7teiIxAAiuSTyBmkkAFo+qMHgdrQARsgMaU z3xAASy8efCh8EADaBgAGDSsQABWLhFPjClABIyMoCZzvEAExFVrSVgxQAKjXSjK/NhAAn6z KG8zUEABlxpANzmKQAFVKFp+XoJAAy7CTKnT0D/9znM3HJSZQAOmN5iBEPxAAaqlKGy80kAB Uzjr1dqGQANVz9x7exJAAkh7kVMn6kABtHy1/amdQAG0zaofk5xAAZsa1b2B/kACotecLm5j QAIR5vctdWNAAeriJQ/krEABAxuoRTMoQAIHeZKopCZABwPYIWNA5kALS1bA4FwaQApDc+Jm 6W1ABA4Rs36QGD//Jv9Om2SSQAKJc1WH0QZAAi37YP5SpUAC0N5cpHIPQABK8J49a3hAAdU3 brIxBD//yT6efKJsP/whdjNWqEBABLUb4+9EbkABMHXPbF1SQAGF1GUIGGBAAWopGW1PB0AC hLVlcwaKQAC2nBZ1wsdAA3lDIW+kgUAEmt0pwiFSP//r57XbkWFABMB5AufZ+kAGHMja43ms QAILcHgP0cVAAXhaIbentUABaksSGPF4P/x0enqqnrZAACfqoOZZxkAB4mC0dkcwP/+frCYE vIRAAc4Te/UR1j//oWlGSNilQAP19jr2mJ1ACVEpzYOJmEADdbhGzpoqQALF3YvoMVpAACEd KtxrTkAC8/JklfCFQAPR3GgtXtJAA/3UZWwmOkACCFSrCNLvQAJ7dGcu9EJAAdilPG+/jEAB ED6vHzVgQADyV0TZ/h5AApq8xRbHWUACwJIXIsuYQAJokMmVtTJAAKvc/ZMGtEAA1cLplgXt QAIjCqoF3tRAAMXkTB4NuD/8fbBM1NJfQATErTP/JzFAADZU4dtHQT//dmhru1GvQAbUp5qw ZthAA4OEAgHNDkADjbs7zXS2QAKetxoNvERAAxyd64ur6kACPXsKj2vCQAMOlp6mQGhAAYo+ 38s5ekAE7Cx7Gp/eQATlJ38WrDxAAsQvjapNCkACxPPm1U50P/95kblHFElAAJLYm9g1x0AC 7upYNdGmQALFfp4ClVZAASiPhVTBZ0AGnYl3PXg+QAUR+HdHiHNAA6fDz2b33kADVR5wEmmY QAAkoM2UoOFAA+sd9AJlFEAEpJ3XQchUQAL53gFvV2BABiXvAc2E1EAEHxvFsuDiQAHaV0o0 eXRAAi3zq3toWEAAiMCMm5roQAHKO0Le1FxAAf50M91c7UABx1TGVjEOQAEGrpE/JtBAASDK kuqrXUACdAM/htu/QABpr5e5cD5AAv+yBdsmbkADHhAwzXfsQAGz41HYFXpAAP9xzrKVV0AA 7gdUGo2pQAGJuY95/9JAAPtz6ui8mkADZrbH1jQAQACeKrryPRBAAWhvz9xXW0ACRB9N+aCU QARlr5lq3ZFABTP9nZO6SUAFt48VYZLPQAW+Xr0ygLxAAu6tydUiRUACUh5j5ZInQAD0i9Jr RGlAA0uR4f130kABeUjvodzGQAOqFTeRjK5AArtFEbqKtEAALSrTs0G6QAAU85eWVUlAArYq qXPqjkADcTQFi5giQAIssPUe0/NAALcKppiTAEACbM3Stzp0QADtyBQBXI1AANzTNXmKmkAA HsKt+tZsP/56iwAxqc5AAFw61VLORkACtZ5YKmCcQAHA7tdkc/5ABBEp7ZfEtUAE2u1tPigf QAGiAsM1no5AAWeoXDCxgkACa7hha+HOQAIhZT8GIKhABVAwmrme5UABba5Cd45hQANxqBgm rlhAAcFUlviwK0AE+rqANYHWQALChxNXEhBAAu1wKBjrg0ADsk38Wlq4P/7t89tRVjVAAHxD 13HYLkACFJHpKZLYQATDfkLiYv9ABCHUEUOy/UAAXAKX5eHOQAEJPmp9xhNABDgcNrpk4UAB D2m9eC/MQADSMAUukqxAALJ2xVbgpEAAyRz/zbVdQASYHLHcEShAA///6mI/3kAAcyyUJrGS QABfWmM1MnVAAcPcElnOZEAEB5NVt6j2QAK4hYYJf6ZAA6OBZwgyUkAANJdSa/eAQAHWHbkB ZKxAAg5WheSC3EABANQYUndAQADAJIZI8I4//7pWuNn45EABrpCPsMyaQAG8RwpZq2ZAAIdj RgBd6kAAzWlmbvtXQALLl9+tHd5AA7xoYhncPEAGCyoxclrFQAe/1s2gdplAAcLRnt1BdkAC CCM3d446QAOTwQ5iohBAAJvq4uxPTkACChFESy3oQAU4hKkkTZFABVUk/668XEAESrZo/EVG QAHQ4eSv0CdAAi+Xmvji7kABbcN9R8hiQANArthQ70Y//sUnlYcDQEAFCgYNOr4qQAPxQiQR g+pAAhM2gnQzNEADyKlPAVPSQAG1DVVhAyRAAiK5M3efFEABAF4aSC8MP/9kmxNOG1hABHLH b8uo6EABqzmlzeZcQADP93TZ9BtAAmQcrl3LFkAEBiGg31A7QATopK6kzjZABLzhtwqYbEAG AqyYQIPZQAAGSdU2M3xAA9x2aApls0AAS17GX7dGQAEoOk+LxSZAAT0BBivsBkADAVQOD9b7 QAGRWpqhJ5ZABm1vTvCI9UAFKyDYGAPSQAJmEO0ybe1AAocuujfz70AB4KyxHFisQALQ3ZfH 0FRAAQfc843fEEACUyFAqmhsQAA3yRVUYHJABYWOFFSnbT//kbIQMZkUQABV8mBP0otAAqVH lhS6iEAA5tEHo7nWQAD5Q4lAYDhAAnZ3f4INtkAB6jzKvCs0QABuwcGMFHFAACUuaEE4R0AD QGFq5kbnP//+MorF0bZAAl2OGVxZ1kACTjgTCP1UQAE2mSdf495AAilIR2GC2EABpgsat0u0 QAMWHswo2z1AALbSIOdzs0AChWBdiyy0QAA/2oSBIa0//uyEDHYqeUAF6l9xPKYmQAJ0n7pc oZZAAbP4RQuE5kAFkV3f2SKMQATUTTWToo9ACITpJd3pbEADN5OTSiKUQADq9CWT23JAAfzk vTHzhUADy45kliHWQALCw3pX8ZBABWR81hMNTkABMhvAJSITQAEdEQfG4HxAApDr3uSNlkAC ZQ2F8qNkQADM7iAoj5xAAjfgjosN60AA/ZfEBPoHQAIPxM8vhvJAAkdxVtEqkEABrQBtin4y QAFFe2I76J5AACCjMWn74UADXtrCLfTqQAQEe5PzMJZAAB3WFq37IkABaZ0EEDIOQAI8vpkU B7BAA+Po0fpaBEADe+s1khFAQAiwNuEvY5ZAAt7lpXpxDEAAbLrM++2dQACxKKrqp3Y//ww5 vq/RsT/++6RRpkxZQAAHUPAmUaZAAywCCJi+t0ACHPtGBstRQAKUUCUfWwhAA4StHUSH1kAH FOuQ+4SlQAOywB2s8PVAAEVi/OqOBkABQjE8DwTkQAJugWaCQsBABUtPtFA5dEAES+jLdLqI QADvPgnbUJJAAQis0LkaXkAA2sD433yYQAR2m0qHD+JABrga2GPx/kAC7KWvGxeUQAg5Bl7R uplAA9rUFSvkRkACf11tnujZQAJL1evv+gdAAUaXgAx1oUABdg1wRy7tQAC7ftvZIzxAASMJ 5hI3pkABqj/1Tz37QAH3oCc70mpAAud0p8KpuUAC5bcPcuNKQAJ5DFbZpx5AADywlOLAGEAB ZVimkpr2QAAWWyYKctY//Vgp0I4LkUACNlEvzpn1QABoYL9bO5ZAAbd18Pya9UAAKxuqAquc QAMnLs6q0qY//81kENhvGEAEctAljLcnQAOfgpwwR+NABJ5mOl5mVEABND6eI4ZSQATXmIj/ NhJAAy/Dq49yckAD4elfe15UQAJjSJuHH05ABQJHgjxBy0AEepd6NVIvP/+U+z2oPVBAAFuY CkmnIkACJi4oiQudQAH7aOQVVv5ADRJk8KvKdkAKtx6A694qQAdhkYRsEUc//1Jkcd7ickAF xVI5ckMpQAB3Gz6hCvJAAh4dWsLi7UAEsHv8ZWeoQACABsbkXDlAAp6IjQ+/MkABACYpEh+B QAFivo2FrfdAAbKyXl1qHkAB9d152FbkQAJX+WYLOMpAAWEmPxEvNEACkMe9iC3gQAAygmok S8c//+YiT3hUQ0ADiAevVtACP//yxtIGd9pAAtNu5DxsoEABnlDVFfAPQADO4hmuht5AAJ+6 kFGSgkAAdQl8yLh8QAMxym3CN99AA8IVuxczFkAGOPcHDZNGQAmVlUkCCzw//rwYsf0DOD/9 +Y0s/jpbQAB2siWHTvRAACSgvCq/K0AEKbI1RUW0QAVaLXHHOXJABJSAW2UT4UABGGSi7n+N QALdb2+fNklAALXenVnkpEACgwRGKcg9QAZC3yOxsjRAAY21TESOEkABnvFmYrgsQADNhHL9 G4VAAv83Dzi6/j/7/issGuUGP/6DpxODfzA//3VsLLdfukAGV4j0WN4AQAf4ORlHr7dAA+P1 qMEG9EAC+htipiTGQANb0VrNCoxAAaNAL8JIckACdC6DwuF2P/+sLadXyUQ//9QeDpn1Vj// sVKPRCqQQAWqVi2T4LBAAM9nsrr4VUAArMzppZdTP/8xwqJPSRdABH3sB9a1QEAARxU7g0Hx QAEQGC8Mx4xAAV78M9PQXkAF2xGh09wCQADDczcUDatAASHuj4v3rkADMo3LfCPUQAA+3IRT WS9AAmIIuzHEfkACBNrxqYCuQAIllDJe7WlAA8RaKLucZ0ABSVIvcerXQAQ8eZuwCshACBdu HE6KGEACGExUG4PWQAH6oWDRzjFAAMRnt5EKcD//Y6OYALTIQAN15nDM/0RAAYpU2s2xH0AB MbaS099CQAJII5QBoRJAA2wIPa+takADuV8W5C2+P/+Jrqu8LUJAAu1oiXHKi0ABTobGePb5 QAPb2rnY65ZAALKgYmzNEEABNTOWfIhsQADSAjtptUBAAVk8WBovy0ABaiRbtAjlQAF3MitZ RH5AAVerCU+sJEADGS/iTQarQALqlArS5BpAAbykg/lBJUAE81zXX4zXQAAzBpgpYZZAAPV9 LSjjlj/+N+BBA6JcQABJrdwkss1AAVcvWvYrSkADuL+D4UjiQAEGBtqBg/1AAMsPmU0b2kAB rhHI17FNQANsqJXhozlABJQF3ayWvD//AmhlBq3ZQAUGIHIrD+JAB44Nh5ha2kACRHVzf05l QASz6j+76Z5AAFgwVcLfHkAAIV2KoD8kQAIWJepBR/VABS+HJq/EQkACUrEWxG24QATRSVtR haZAAGwKU3/o+kAAhlazuwC6QACObk/+Wn5AAamoGJZfhEABCmy2+vUuQAXiotB/S2xABW8j uoDdmUADzl1DGWMqQASPd1EU0NZAATlnEywad0AEqPYoHgpvQAVfSoWZ+V5ABWnFBM+yLEAA 2RuEmuKtQAHyvy2EhWBAAizSl8LRdkAFIoMV94bKQAXMauXrwu1ABC+ukzK5OUAC78BIEuXk QAGWgfBNVDRAAl+m3o7RSkABeG08PqSOQAJnikb0jWpAAypY/Yw4cEAE3kwUw1aKQAaqls5r 6d5ABQ34n2bkCD//h9eLR1QGQAWXb2r3vLZABmJSbVNYSEAAMCgx6R3sQACIYoAnFghAAiV3 IHCH3EABwS+yNYbAQAWLaGWIOHVABoS2jVIl9UABN3PeTigMQAB1v43Y6jVAAcMZgWthHEAB XAH21bi0QAEFYqdUdCBAAq6gzKnJs0ABr0apFmCCQAIguUwpNyFAAUKcMtZh1kAArGK7iujH QAbjwrsaHgRAAknKsSK88kABNQm4+vH8QANInEY2+3ZAAWSozvK49EACIrVDmHYdQAKczDSk G55AANdAR4kXKEABO/XK1U8yQAGUWxqns4pAAXdbZzQ1rUADhZWyXIfYQAE5QJUUu7NAAdtW 90xdPkABJJVjmJ35QAUaIUBNN/RAAHuL9txNsUAB8sJDbvUyQAEsf50dY3JAAV7YFF1FOUAC XW8UNx3gQAIpnmEzQg5AAdNgqzeK+kABR8Bti4l6QAE8EX+EKPQ//y40tEZXxEAB0FBoHrV0 QANVSmk7qCtABhPddbWHT0ABAoxabVRmQAIEIvP62M9AAcwwE2MuJUADbMmQzS6gP//Zuf2p cBJAAhxHwLRd8EACYpej/xY6QAI8RvPLqihABHCdojj0Xj/+hEWPVtaAQAQV2a6USxZAAyzV ZCdz6kABVlLYkx99P/8NYJwM4Fg//qUGymIaDj/+UBh17ZBQQAOHz9HPoXdAAFCuSW9hgkAD JVHS2CjAQAB1MboW3Hw//ylm0edNYEAC1uUsVwNIQADmIbq38MRAAv/oggTg6EAABSiY2dca QAQepdtttNRAA5wCwdGaAj/+D/9c1351QAUmyklk8iRAAKETPdjbEkAA9oofDB1CQAQDf0Dx BWdABKJSxQloAEACNV8J3r4AQAA3lNAh2/BAAvMDY2ZhDEABUZO0ombOQAEQ9V7ij9pAAQce KT49oEACfViwAYK8QAQD7Mt1LfE//xCeyUIX5j//VbkBSndsQAA6IxMSURRAAz2S+J87eUAD e4vsAdzUQAU9HLnuMSZABJDkc2VChEABBSuY73n3QAG4ZBprrZRAAqMpueZzpEABXLh5Gpy1 QAI17PQ3wrBAAeAsMKX+rEABVXXI7yv+QAFmbL4uBQJABAaRzEXNJkAABG+XH5CAQAOKEJY7 qHRAA4mumBLJnEAAMufbPDYsQANCBht5jV5AAHfcI7+26kABR2sx6DjiQAEbF9ZmTTxAAde9 dSY6LEABVKxUfM0HQAHVgQFWoJ5ABfJJUaIrDUAMF1mwXbjKQAfbG5O/p8xAADsFeOMboEAE bJ5BdkxsQAMUXQAzKXJAAOKw8Oh/VkABgt/dHuIuP/+sWYqvxj9AAE/cbZGkJUADT0q2fls+ QACvzfGRmHlAAFDMC52U6EAC68227YjNQAGA5/FUpD5AAVq7soG9C0AB2HH0q+RlQAIupiRQ 2bxAAzDtwMj28EABtwH54871QAkVOlUizc5AAWFfMvpLwkACBy2CkG3AQAYvUroty2JAAiet 4gmLU0AEbeiw3lq+QALoswDSXHdAABxksgDTtUADSbTTzIy+QADb4Ozab6FAAIyLlr1vFkAB p/oc4vAYQANSM+tqnNRAApvpPfolHEAC8XjPm3VlQAA921J6tdhAAEOgO7p730ADlkdlLrXo P/8eDFQ91X9ABUaBw4nBZkAAztGphYDmQASTLk7ZWdRAAMsFiNtD4UAAlbVud7OYQAFH1bEt URZABDj9/jgPpkADHlUYTsmbQADPonIQ1BxAAMGktI0qwEABsHZNB2pQQANH7P7H3O9AAs3N 8+F5gEADDJKxETcUQAKUAmrxQ1JABKumrFoMskAEpUD4sNJ1QAT+evPyUiQ//IMzYKp33EAG 83mbvwL0QAZ45E/3VrFAB00BlNEFMkANEmTwq8p2QAJZ3SZzqiZAA5F7tUxQikADwPE6BySX QAM5wG5D5VE//8ve6FRyMUABeE848lrZQARcwFillG5AAQAftIthckAAyXla86lZQAJwUd/u IZlAAmtZGqTb+D//lxcVOE9aQAFi6Vr7VdBAAXcQqXBTPkABeaCWWk3UQAVBxkThENRAAmPs SvcB3kABdtKa3L8mQAInRu2EoMFABzlufJEMsEAAdOiE7vAAQACGMKxkKFQ///cVm2yZeEAA Yt1S+0h8QAN+7V186z9AAgxcsS9vvkAA29fBYUUKQAJYG3b93aZAA++CWiiVpD//MliiwpBZ P/+DwqwP4JBAA1oaQF5pikACMX7MAipyQAQbzyxK+j1AA7vQ0gBNfkAClD3jHxb4QAHlbb3Q xgJAAUhO5+EgjkACDraO3OiuQAemNkCOp7BAA5PyHPKHKEABS/Ui+7x7QAJBsp4QxqhABMKn 7TL9TkAAyxYQNhU8QADLFhA2FTxAAxWBE9dglUACgjzU3q4+QAF39I9RvpFAAYVte8FC70AB heVjyw16QAMz+ZdLYthAAJDPFjHI5UABuj1+45g2QATvvx8p0JlAAIfNLe6zLEACrDAInNO1 QABgfKNILcZAA6YS6dYBrkABYB5q3kRIQAJjqlXWpKJAAO3BuMLTK0ADShcTDLYrQABo+SH8 IpxABX/OD0MTPkAAvk5M09tgQADczMDod1pAApdTiRy5z0AAMDtyI1G4QAMAU2k2OQdABpDr lcBhqD/+/pkLZNItQAC8aHJXjZU//27sWB07z0ABnArpAIyAQAM5SwXmSK1AAXBCzlts7kAC RYrZiVllQAGPsMU114FAAInaGL+ZWkACeApe2lzOQAM6b07ShS5AAsfGSLH1pEABiPxjlyBA QAGhcKDKyHNAA7ixsrtqyUAAvNFljIcAQAEYgX7BHktAAcEHnRxfgkAC4ChZldKMQAA4Dwn8 YpE//wu7E5jOwEABO9IaZRsMQAMIyI3urMdAAnvyt9n2ikACQVLMfrnIQAJIjOzooQpAAfdg EQnrFkAB0F2V8hoiQAQ93YVKdmhAATDtTPHKbD/+ujq/04ukQAGqewz49aZAATMmkyOxLEAA z+VqpDYeQAG+wZGtyUFAAVLLoKxmlEACavkK0UR0QAMqdIWFA9JABbDh+GQyfkAEEBX2dkSG QAGOkhq2WMBAAukPAMXG5UACQ9nYb1GvQAE2Dt6QEelAAeXha8r/9EACOWy16NXyQABgrDtv D9RAAW/auccMDkADu5+4WM4GQAF8KaSsXjhAALCfdYwiMkADDLk6pG1yQAHTOqVZwfZAAqBj sEdHcEABYiD9QvX8QAZNDokgl9xAApl2yrTsAEADtIMv9EmoQADKjUeM8htAAMBsxOOD3UAD g5C4wb0EQAElawcC5k5AA08FSCHRAUAApqABkeIXQAEb9f8AdBFAAXVrlKjHtkADorueYUBs QAAzrPumVbpAAJhTKZji0EACAgHDYI7hQAMwt0aPB1pAA84snIkloEAANF29GOqdQACGWuL4 r3RABBxZzdL2XkADdEwwHDAkQACfogKu7n9ABLyfseWUSkAA+rI3NoEBQARTbjcKWPVAA6L5 lA4LcEABESUXXXiDQAFytH77SWZAAXiVrrHhjkABuaekFZFCQAQq2jcKJrk//ovTuyh6JEAD Bo22pqbAQAG4Jl6ocXNABZ2d+fqRwD//pzh5Cm0DQAAXJK+fNOZAA81gCXn/9UAJ5QyEdxUG QAEJNhR2PO1AAhhLJyt4C0AClHbUuCmiQABy21EHdw5AAznhndBtMD/+u8pMZPhOQAG1O6mL qQVAAwGSzubmPkABrgKgF3ZpQAFX0c9nXyZAAZESH31RZUABpEFiSQ1sQAHAJrtwBrRAAUa0 CaqnSkAAk3amjPI6QALmZomTLQBAEIfpQFBFBEAWmAbW/bZmQBQCZnIq67FAEjkzMkRcmUAQ kj+25XjxQBFgDSt6co1AENFc8x9gI0ALp+NIYPw8QBddFvUmCGxACeDjZ6N37kANkyfk0Y4u QAfjJhnQo9JAFrui4YG3c0AbvX5iz3mQQBdodMdM6xpADevCZkATC0AQ+dDS8iL2QBEw9hDw MCJAEm2mMaHlc0APJ5talLVKQBTMMwuREOpAFIBiSvdO/UAQ10CmxkZRQA6CnftiZNZAE9iU dhyIj0ARaImReexFQBF+x/7xkppAEKAzj9V5G0AMO2NtD58iQAwpH6qBkpJAF3/A2rts2kAT rJ22s8VcQAyH83MWj45ADqt3a7yg+EAN/P1xDwjzQBNqn6EDzUtAFK/Yl7biSEATX9ZuO7dK QBGI2Ih1fxFADHwxTjDvAkAUgtkc6ZOiQA8oA0KdjLlADnBiojJgfEAZxI2Fm4miQAbQRfh6 AJxADum7nd69FEAN0G/DRQkYQA8E8z75Y8BADETKz/LNlkAQkE90DtJwQA2sGMhuH7FAEd7m BqyG6UASPcxKCrYMQAs5RKIrLKZAC6Jx8L3ZAkAOgVyyOBVQQBSE5XXIdxxAE3sZISCTpkAQ mIDt+3CpQBIFYxJKZL5AEyJ7cklBRkARQ1lfpHRlQBZtk6uGjqJAFJXAs9x+I0AMqHygYfA4 QBL4x0kylCpAECPt9YYGcEAQEhPNdZTbQBBOiHu6EYBAEB5VQZLpbEAR90ElRlEKQBVCgqLt SSFAE/ZQiROeJUAZOm3W0/dkQAlQJiwCfmZAC2/yJtp1ZkAMV9I9UYQwQBCkYwq9dTdADAMP a5hRfkATxliBEgZiQBhOQbEA2PxAFSWyhJgWOkANvfdO3TTEQBC5EufkMmZAD64gpKYUlkAI Z9s4H5ZeQBDdF9YHnUBAEwPA0jnqOUAMN2SxZXeaQBQWtxxgdp1ADFD9Gy8MAEALAHJ1yL36 QA20IdFNv1JADkk/7+JL5UAQVDEwmVOIQArl4kQ8HCBAEDOKHdIHsUAVd1K5BQlgQBMSWhls gI9AFkUlljgnRUAT3/EYInsgQAsUsWjLYAhACNGS2ZFcGEAUXTs5Aw9eQBO7XrRuc4BAC7Lj INMtpEAT1ND46qv1QBUPTN5Tg9BADCjclirEpkASzQp8D9TOQBMg0Ee/iLVADeDA+W97qEAT n8ENkFu7QA+sNTHlutNADa4rPS2bkEAMT9vZwm9EQAx/K/nEJ9hADpIcktGa5kAZiPN4wdwr QAmKgQwm0XJAFSAx/jxozkASx+xo+BsxQAol9PxlrKZADvWHlzZcHEAPaSdo6d9YQBPG/9qv r/xAErHMQbJhuEARrJqyRcd4QBJP5EmUo2pAEAYWWrYSYkAVfTm3nSyyQBVrieP92iNAEZ1N G/c0rEAUdhwnBzvJQBgfWuBP+4pAE5sub9KkQ0AQrbHHIKK2QA8gKTLMrDBAEBZT0et6K0AR qKq7/+9QQBJkXzqDfrdAEzVWElx5k0APlxvU/mdGQA+CWYKBA4xAE7zXjvfS5EAPSMbQslvE QA07Cp+XtH5ADcFFuiIiRkANysmEaeTEQBIzSmDs2yxAEOaHA7mQ20AV0kKElwfIQBekO0U2 GQpAFj1H+zXtPEASdgbEToi0QBADROvSj/ZADSboJHr0GEAP2ei1SpC0QA7WYeI8/O9AFLzK rVrkXkANYYFwxcTsQBESxEFHwalADuEZjYeK5kAPehYpwkVcQBMbTcNQpmVADEZVbGvZ1kAW D3x8otbaQA10asiwonxADMRe9uN51EAR4Y/NIY0WQBQ7kqsMFVZADYgMe+HOQ0ARCm7WaETp QBc8CJgUlTxAB/oeUO3zCkALl+BKwE1QQBQzG0x23fpADn4YtZRuOUASHwJ3I2MWQA8otmkN HF1ADbeoDJ0wDEALUvqFnTeCQA+7fXS64+5AFFu5r/lLOEAVKCHOLly4QBGuX8YhlgBAEbPQ R26i6kALxvHMBDXWQA81x6jWHlFAFh5+w4fcqEASLaxDbAwcQBAJoBoshhxACqz/AhdzHkAV Ke4zESj+QAtXFesyjGRAE2KBQKlaLEAOAfx1qK4bQAojird71spACEbGLPjogkARQf5QOzlw QBFNJmxzW15AERNoeq49mkASD4baPJ9wQBFV2HwN231AEbtqs3dFl0AIcTBCTTYuQArhKSD5 4QZAGD46a/C4x0AMPyWw9poeQAzLYUtaWbZADjZfyuzbBkAWTRVer5N+QBJ7pKlLjKtAEut6 SWZFP0ARQ9TTJic0QBEgI/lB/3FAELKd6XhyXUASb6gzGP7gQBBK1WIQxltAEPqLAB03+EAN dRAATNKsQBKlqC9yKMxAER6fb7/910AHdoU54I/KQBaPXKaZYUJAF9ghxQR9JEARWTg8UFLv QBD14QywU25AChatDob3LEAWLcGO9ZUaQBgQd2E3+l9ABm5KLlQ08EAaVZjn5rHwQBagyKpt KEZADAAwR+PY7kAQtYtu/HZKQAtHhbpHYFxAEZEpzVwYWkAOvHaPA9VIQBB3Hwk/wSBAEW1U 1sCe5EARJPbah6RrQBF0RdObXu5ADv/HrcFaiEAR+yHCbW7yQBFpCt0LqYhAERF9js/70EAM gJijTqMyQA6dEc7BJuhAEkqMz8ktEUAR8Sxip1rSQBL5ER8IV5pADLyO6x6KOEAK5x9UmY1O QAgkiAsags5AGIEEhPPyykAY9bvXPTKqQBi4d0zWpAhAGySEyEttvEAG2ZaBEVHyQBOhFDUG w2ZADVB6eQ9JYkAVJNtSkPRxQAht2OD2SFZAFl0tT8dw50ASuphZe3JTQAwZvMpC8spADc95 WPiokEAWCBKgpIguQA8tdDRKvtpAEg+AM8vQlkAS9rM69d/4QA763z/BU9xAD6hDya5zBkAN n1nPy6qLQA8LS4gft3ZADy3S1MjHxkAOhH7r2iyjQA/wVN+UvvBAEdnYfbveTEAXdAIX3u7b QBnlHppyTQJAETR/K/uJ0EALAajmuViWQAwonpc9HapADw1u7/L8sUAVSX/6pVHCQAtgINi5 zOJAFYNEhlzd9EAJfuGwMj18QBe9rgbt0sVADLu2OsbrLEANntgCTDaiQBEP+1D9tERAEZTq IjAqLEAQ5i65tMpUQBGolJPY6cRAFWN4veCes0ATBjInoCX9QAin7tXZfM5ADIQZOCyWnEAW 4OgP8Q7bQAqCAgLxTj5ACcWgtKgXuEAJtPejoZFmQAoy8dji66JAFZypnNtO2UAWJhfoNUQH QAzjClgz14JADcdjuec/qkAJeFHQPOwgQBEoJow+C09ADq44RfdCsEAQrTKv9jgnQAxRtM4c DVxAD1B3USstK0AS2rZsDpLlQA/oIAzDIJdADY+hzwQ29kAKYiKoF/5GQBBfbNYbKsRAEoDG M+14O0ANS0EnZ8sSQA2ULedmillAEBbS7WH3eEATjqMSrO4mQBW4/whs6OpAFsTKUxeai0AO 4n20PCwMQBCsA1n0XzNAEaK+HFHYdEASTMZwGqKUQAlV6HDMSmJAFuqCDcAAnEARi/M6xpaO QBHTbhYsdWlADmA+i/nIdEAT4VV0lQE7QBJdPP1sKfFAD9VO0jiP4kAQcCGq/37TQBJliKll wqFAFGP8RKVyrEAMIyNPBrjyQBSu6n4FepJADFGEqR/paEAQouo9ro8eQBDoIkGjvixAEI+k jvJTr0ARiuHhcgYIQBDw7AoCZ+BADpThqSrOhEATAohPVgLEQBFadUFECjpAES9m+/4ec0AR D6k4SV95QBKzwC7ig0xAETjDbbuaBkAR27ZCHRkOQAzQyLZUOqpADzo9M4GgpkAPhyUc4mWZ QBcmzXLXNJBAC6TU5GP1yEAQuI1ZaVNOQBJCvr0MVTNADYd06gee5EAVZZ2Qcx1JQBEW+tE+ xblADNDlRmRMmkAVzYgv+E++QBltfTO6o0BAEAKqeZ+N8kATLI0aSCB0QAv5LEEm/g5AByc8 LHSB0kAQt8VwUGCQQBEwy21wTsJAD6Rj4aOhqUASw32sKV6SQBJjU0wn1GdAEqtENtpYTUAP OmmPLyDgQBMrk3+uQqxAER6TJ/YCpUAW/hPoYQlTQAwx/hI0KBxAE/C8F72SFEAPhx3hrhHK QBCW7JgV0zhAFflrve5fmUAK174azZ0KQBUTYHvvJAxADVv3zndl4kAQk7kIhrJ1QBDo8B3v 3+VAEFX8o1MhBkAQFXs7Ii9GQBccaWnWfotAFnXli548FkAZIpQW4LCUQA1Jd4GNgqhAFSCE 1y8uLkATd6mmkQ2/QBEemoSPkjhAE40jKTA2pUAXUebKzir2QAltX/l9aSZADmiiP+PpE0AL CkQ53e/SQBew1f7Jrz9AFdzIU77AC0AYku5lbEYWQA/xZy9sQPxAENz1859NiEAL/i4xJfQS QBM6kP4DPFVADntmtq8e/kAQbwNipQVGQBP6jgpRPRxAEjY/Vx15dUAPshDuN5GUQBGLutbn LdlAEXY9YO/NCkATelbl675IQA6JzCMoX3RAF8rGQKVuOkAPNgcF3fFWQBBQdQhdDt5AEE2o fNMBXUAMlQ2/NBTCQAz/jeRQLTZAEG4+sKv18kARHbiciOngQBTNfJlGyn9ADQ4F1Q2aSkAU yj3Wl7qxQBdG4oyqLjNAEZrEoFjA1kARc5aU/sv3QBHaEQF2dnhAFEPn+lqaTkAWZVZvjA8h QBYAt9LhoPxAC6Qc5YNV7kAO56MTgH4aQAql29MXo8ZAFqmiziXhBUAZC7V4npyjQAYGXAOV QZxAHXDMrbdbZEAQUmPptoaOQBAnLJ/Ds1lAEMQ5KF88hkAQLhEKA6OyQA2NbhqMjXlAEEh2 rp9wikASYIbo2rbgQAw518DMoGJAEG1+zWImZUAHIuSAESZwQAzWX0BaI/ZABoQGx6XBNkAP TYDnvt58QBDf5u0XeptAEg9X63l3UEARsOxB2GBiQBIo3x29Yg1AEPEJeuXK40AI1+bkPQcc QA46UbCuuJxAEhSFeC4f8UAN3Sftl3aoQBPvpkFl5KxADUpo/WSTskAOxAc7+zg0QA6UwWqn XuZAEM6dsGMflkAQauSHMxwSQBRubnlnyB9AELsnRLa/HEAR+cqTpg3vQBEQIdypbhxAEBZR sKSPDEAL983Du+WmQBKcc2UKkXtADH73Pru8DkAcI6v7KqnLQBqBkJe2i+xAFY7FXTVWG0AL wGXYOQpUQBLVdQdrErtAETgBYtV10EAMRX3Lk3pwQBotPIZ05a1AEq+F1dWJJEAS+yixJnWp QBCEUnV4fIBADG8QRs0uikANkJSktQCCQA92SyTu2SJAEQ6yNceSpUAQI8f2UnymQBOh5/6b 2axACj7y7uLRgkAJTam6QSxwQA0FsbScGGpADVMkwGf9jkAQNkezmGRwQBml6G4KHu5AFHXJ 4ZT+nUAQz6WWNzHgQA7a3gY7l59AElUqr7vpDkAQZ39CNsHTQBQy7fOXHpZAFobJcx5nokAO GrW7pHxWQA/rMM73RtZADt7QA+nDIEANhNfmXrk4QBOQ3BkS15JAF4uzI4VQ4EAR34PzVLYX QBOPO5iw3eJADnq/ChHoikAMW3IXSMReQBDnlithOVNAGAHqr3k40UAK5zyBxrnWQAn4EUdY h0pAEzazdnWldUAPusKugUjYQA/V/E5/qh5ADOjUvJ4bDkANQIXkVdRuQBNGpkWP995AFUbg G4Fy0kATiOfwVsVfQBSkk6iP/25AFC97CM4H0UATkwllefnMQBHzGzecu3dACqp6hg0qmEAM BKwPc2L0QAuGi897DtZAFXlIL5VI9EAL+Qxu6xkWQBO+D2n82B9AE3OncHpb3UASKX69kWQ+ QBKL30oj7oZACiiMWWa6xEAFiMRaRltkQBOrquKAuzpACKUSo32pXkANw4jZCK/QQAyj6/lo 8C5ADkSG6/yKPkAR+u05iv8ZQA3IqJpDFgNADgGgD2rczkAWspX27RmsQArEn9ZlZPpAFzFX YYMGrkAcqeCnVQUYQAUsKYcdlOpACRoMQ/be+EAJuIfMuiDeQBMhXnoyH6dAFBgY0tDMeEAQ fO6keLHDQA+3ACNL8WJAENsOSWxzj0ASEfo10DvlQBAyuHc+10ZAEb8jFsEG4EAR4S/bZmee QBEC+spFOklAEg3780IT00AHS14H9gGkQA67v6cAHxhACxWfhJCZ8EATY8IY4gU5QAtlDbWx wQxAEL68NxmlfEAQcsAI2afHQA/UhnHPGZ5AEWvwKl8hqkAJ7SeteG5eQBO/Uad8qABACxmX NY0pNEAQ7ZRx9LxIQA5brHFvu7BACIGqEyNkSkAOUFKJII6MQBMvkXnltXZADZ1CQylX+kAI RRmbess+QBBsbH+/TnhAEuswbzq57EATb4Gv40xrQA6AflqlW8xADiUvuYt38EAYpWZ4Em9d QAYQGW84w0JAFePcplWDWUALbGZe0lbYQAyTRnlM5VJACszDotmdMkAasetW+IM5QAg2kqam OuBAFNAROa9Q8UAQDRGSMs1bQA3ToHJc4ZpAD6IndUmyv0ANkm58ZA2AQAldry0ZmwxAFSd+ MepURkAYb59ZCW4EQBUZSdzp0FxAFhb1vylXIkAKqYc2plKMQBbODRncbMBAFaWA7Xl6NUAY LpoaGVfrQBGUiePtBqpAFn5MnSQaUEANV9RaKykqQBHrbHckh69AE3KEGmyvG0ASqyGkAZcE QBJyDDxWV+dADq/qhZ/rF0ATi0AcntxvQBWp0cflKx5ADSmH6CP7hkAQoO0gmXeJQBFwXWvv 2EFAEWvSXLzgH0ASbCamZBkKQA4IfdRHT0FAEjMMj3KyTUAUjz0KxbKSQA98Ofrms0RAFOca F4ZXdEATbwaYxveCQAzyRYzxjmhAFR08Z8uBY0AVQ4LIxh49QA5acG9mKEBAEKXK+cZWp0AP 6V+vZIhFQA3O3yIKb7hADxFpZ37piEAUoZPdAgJpQAsGPS9f1fxACmxVN0wlwEAMQdAVs52y QAq8dB5U0lRAFer+dm3VPkASDjkvPm/eQBAcXNAb42NACxAeWc2fBEAV1cE14gDCQA6wWB5c JTJAE4HX9oGtPkASU0tJBtNhQBPWhBhNzORACb2QtiMbOkAKItVami3mQBWFDNZBB1pACtqp 3Hhh7EASDWPtp5HSQBBiSF+GN25AEtow/BbnykAMQCeJzyAmQBA7xqC9v/5AEjR3ar1BEUAS HBvntm0wQBHY5K9ZDvNAEA26xnRW70AR+xj5MC7SQBflw+ikkf1ADVpEsKgI+EANxeqnEHL4 QAuylV6+T4ZAE0XHSJSkEEAUVb1CCP1kQAu6e2k8GX5AC0NlD3hdmEAUULw6hX8yQBLJnKuP HiBAEJmTRlVCM0AQjPW3bsodQBDnC9mmGOBAFwgN2xrKm0ATN/2kthI4QA9T8TCDQ7dAFNBo ZMpPgEAV8i4PH+ZuQA7BLD2ceyBACollk5S+xkAMDeTnvAhQQAuod3pEB0hADXxZzrewOkAU eKY22KNmQBChUoQIMUxADpLIb9BDIEAOOj7jBnnFQBKYsxas+GRADkQEXdB90EAQ8YaMYLz4 QA5PUqau39RAFGKyRO1rtEALV5XXWnsqQA0STdSSsXZAEOE8CJKGwUAQ+YyxQW+aQBB8m4id EcFAEr21CN+lJEATXg7vkKx/QA4CqZXpW/ZACnFfZKjNGkASjkgNIq50QAsTzDfiDMZADaGW 3AqAskAPhyQ6iiC8QBKIp/Qj4P5ADp4133lsxkASlT1Y5kvOQBNzO8e8CodAFWhKAwQbm0AL jJqIul2GQA+RMqByRlBAEn0AYgr/HUAROmlCOjLrQAjmL0D+Ox5ACnBn38quFEANx2IIR+vb QBFOYt0x9x9AEW9Iq55vqUAQ01tALpJGQBAi+aawETFAD/4OWV7On0AUPUXblkasQAl/y8M2 eoJAC/bo38flXEAMDTcFUuFWQBSmydUckUtAE0G+AG5glEAVK7o5lhi0QA+/2JYohZpAEG3X FP192UANqBanP+/3QA8H6RKX9IZAECECZoTtyEAT3GEbNz1uQBtqj96G0ChAGVNcSuhyBEAJ WV3YHYWGQBJaEJBeM2ZAE8TVRLumOUAMGNlUvhR+QA0XMLlVCN5ADCVRxm/m/kALU/ENtbgq QBArlGJ2EgBADjRWffZpl0APlFvD30+5QBJ/5Yxpd7lAEXFsWAim6EAQ5hVGFGDzQBADKMDj 9OVADs18wQgWmkAUJsEEToLWQA9PaDjMkNdAF5fbKGmvckAJKH/YTvsCQAc8T1q5SmBAGOCT 5I116EAHllw4q9KqQBPbb5R5ASRAEkPeqqv+HUAQhMfo/+kfQBGAJSWSoBtAEMgOgWyBGkAN xQG3vsp8QBJ2kQHZi9lAFH8HpkJDgEAQrG3RV/UJQBSqWirHVTtADDYPJitMYkAKR+hE0wXE QBP3X+ClV/hADM06LahmtkAUBbBteW/xQA86opuM3TJAE/BvsuvzmkALfJIjDViUQA2OrCTR qtdADqaP6DnJ/kAQ0SRqM1j8QA7J/lEHsBJAEqs4M6Z7K0ASara1PZaOQBLXRlXH4nBACPLD 2VKRvkAJZ6Qd5OqSQAz06D53RDBADANP4QJ6wkASDeT/fYg6QBfzsEHO6eRAEW7g1Mh0AUAQ THHLUswTQBDcU7XZcJFAEQFvMWr/cEATmHAiSuIlQBrnutiTTDJADTET8T5qgkAUwKNXDryr QAxFCE8cxmpAEgyghlQNHUAQiCj7zS2pQAu9Zh228BRAFah/9hm++kAJfWLdEu5CQAnXx7xH REBACCEDvu0+3kAQurKR6a18QBB604pJFBFADDXYAFMLzEAPdVmvEnQSQBBKYtMprVRAFDTK syjelkASGiXlhv1dQBBKZoMCZGZABR2SiThw1kAYzCyxPRbFQAiIEe/AyQpAChhsWHylokAM 1VvAScNkQAz4nSkRBTBAFMc8GJUCNkAQzkOUv2rlQA5ZBkXp2EdAElq+twRsBEAUG95ExjPk QA39O3F1Oc1AEySRkuDoh0AQjTnLy0P+QBQR0x3omapAFLoLzc/ONUART8JFfBnSQBMqGhVg Ai9AEObsVz3GJ0ARGXxicPQFQA0KZqrxzBBAGgflG4iqH0AUvy+ABHhVQBLJgFs3pN1ACIjK KCtmGEARwPJCxh1UQBQRetFVXfRAFBF60VVd9EAMolDgaP/SQBGLPSCooohAEbHng+Oa4EAN K9uGkCj2QAvRdlLr0UxAD82gpgo3UUAMRJeNiTJYQA86h0Iy6ZpAGF62Uj7pMkAPropBuy4L QBNM+6h3vc1AE/1sZbLnBEAPDDHL74owQA7J6zVIA65ADfdS+PZgsEAIetV0xE30QBM96+oX GOJACerpA30uhEAVWGHG0oAeQA6GiSiqsYJADpTufnE/fUATLbLSjQv+QBIpWT/jEwFADEvF tPr30EAW12wNhtUMQA7Qu9CfijBAE/fiRR/W7EAOQ2Ze0b9LQA9aQmjIR3JAEx53PDNdQEAO PFJ9vglGQBM4hqx4nNZAEO/2fcGOMEAT7DHhm7M0QAz0FyIOvFhAFjV8HQJvo0AQLOXMbjmr QA+kgXN8dPpAEnctlyA/aEAQ5oXVXGYyQAy8UYTY2ZJADDPESazq0kAUqETWrNMNQBDBoM2c SElAED7bdEpOBkAJOx6560rOQAfFRL5dXBhAE3nQN0bSeEAPhJfJbt0YQA4InBtKuwFAEoYN kVmZWUASZOes8NnwQBBaKKKklPRAFEDXtXk58kAWVRfJdG6WQBEAHJ9dDKJADx3lvbuOi0AQ Ah89HVgQQAmZXoTg6aJAELK/aoOydUAO7KrRtUEYQBDlSFOeddJADe6h7YrdRUARSgJA7tmq QBVoSkmEyqxAEpE5vPuIjEATv/deICx1QBRAxjHsw5NAEkDK1IW9V0ANa3LXLlQxQA4Zh1jR SspAE/LmM0QW6kAQ1KskYbqrQBG48zr5uzZAEg+yhvKD80APNsPXrfg0QBLdfW+urgJAEmfP QsWagUASo9/rYtOvQBG5yMRbcYJAER7M8yYQk0AUVve2s82VQBWB7/x4YkRADDpR/YvZkEAS +rZvoGMYQBCSM4osk49AEp3nTM7g2EASZvzhhrruQA/0AxLKlghADsFRrh8aHkAR2VmF3kPl QBQqt867BPlADisf138p9EAMlSObq6DiQBEsIBuaWUxAEaHswIbyA0ASpaZhc7IEQAxQUmul NmxACYenZwFR+EARZ4DDQb0TQBHntq3voAZADUVMp4p+hkAW9Dr/GA5DQAs1wKpGd+ZAFDxW wWGzv0ASA+b9+eE5QBH23jCapp9AEaKmy7FLy0ASFCmQRt8aQBCNBxJBOghAEQlywfQD8kAS snKREnVvQBKpaCEXLp5ACB+L+WIyBkAXEin2xDHVQA0nduoKOKZACVTABWzmDkASoHsx8Jef QB6ygnZpwzlADLiSUKtp0kARvNPSXqswQBAivSbSvhtAErWMDHb3X0AQ8YxPA1nJQA1IKT9y 7gJAEavceTL1MEARoIpBwLcpQA08VyiCREZADbTsHPLf+EAMBeUPX3EoQAi0jeUehYxAEnCp fJrstEAOnRPYbAmUQAts8sbKRlJAD1i0QTA10EADI8pjauHgQAGpHziomhJAAstS2JmrL0AE S3EPXi8aQAK1mL6Tx9xAAm08DLpqukACwj0hx6KVQAVhYJe5B59AAUFZh6jZKkAFgnUrFH0c QATZoc943GhACXIywvEMSkABE5RJ+yQfP/+N0fRtAA9AAVqfxQEtSUAEekEzOLLdQAKeLAg2 V1BABAmFtdXPJUAEPlD54TKoQAMfMWiyKNpAAlDbwhIUfUACQGEFWQZgQANZ1bfLdlxABA5G wyB9LEADiZzpQvQ0QAQb6CdM1mRAAqAA69vlfEADWuU1gIHYQAQhQE62wyBABBUw+5mE+EAC 28t3ok7kQAH4rtW88OZABRjYl+I1NEAEhgIh4G7yQAb1d/JSaQhAAjqnIaWvOkABn4pHFfn3 QAIPeCZlWwBAA58xAh3XOUAE8prOAeDcQALRVMk5LUhAA9RgonS2XEAESMMxf2peQAB7M+aT 8ZtAB60tQPuSvkAD/r2dK0m+QAS0eKr+oIhABYYEbHdb8EAFskNHw8aPQARkKAsKwetAAvDf LpMCGEAEsf8WaCPwQASzMtff6ORAA6+hIedQWkADdoQ4m1HGQALcUwQIHUVAAwzY9ta1ZkAD phmipdpTQALx/78VuutAApUYnpVOFEACN/kk4oM2QAMSy6Me7tJAAN7on/5ICkABnFwzfnhC QAVGCA25+RNAAxF2I58RyEADC4d1X24UQAMa5kUpvF5ABSny4GFScEAE0sVO9KtmQAJ5tSaq QEpAAQMkYHzM6kAByyJw8ijgQAGatT9/0sBABTRg9IbOa0AGaK6TMtucQAX3uiZFy6hAA4U/ zqGDbUAEQOuu3VCaQAIHdolsiqBAARGp4bfAvkACArfagllqQAQJJNeQjBlAAzwN7Q59UkAD fq+I4V5cQAZ5pLiCOYhAB0naSGjZCEABzxXyN5icQAaX2pfJbYZAAeVmQLvHZkAEG00jNY7w QATSXZEuDOpABDAMJXhghEAEWmEbCp3TQAUrMzKOzGNABa/COy4dBkADdut11P4+QAHjjMVA wBBAAjnjGYfb1EABJvxA+5LqQAHLOzrvruNABTzeIQ2a7EAG/ds8JXYMQAFxlunAV6RAAdIL KJFIkkAFMVhhUVEvQAGbFiILUyRAAUZAUujVt0AGc+Y3HzeTQAILQoN6xZpAAdnTH4kNSkAD 0j0bw0CEQAL6/4OHsWxAA8z/0+Fu8EAEmWPQNGDOQAPpyxeC3WRABGCnVdMQD0AD8JD+ziks QADl8fEHvaJABRatWX2xikABl6LqywPTQAKw3dqsY4tABUnLnIlslUAECLwWkdeeQAPOA6lR 995AAcqJpLFZmEACJGMwCxCvQAOMQGrO6ShAAiJ5srBLDkAEr/4COUEKQAGCV0d4UnpAAW5p Dxmr60ACu+N31KmgQAE25ebt5INAANY1NiN9FkACDURpO1ioQANZUbdKHxBABl3OZfEIDUAC 53tlExrGQAI3eA/n3r1AAe2j3gq7lEACanJOa13GQALZFmZ9/ppAA64eV3SwfEACkj3yWLAH QAPAaShJkS1ABF4mKi9cukADkkYbgEpQQAPrOyCe/EJAAtuquYrdkkADTLv2pVQKQAFK+0Wy NJ5AAb5JFwXSF0ACU47t1fodQAI/RTY0hh5ABTdHrm9eB0AECtHhAjPkQAOThr5Tz55AA35Q 3Ha7TkAC5u9RKtLAQAQykC5+IvRABQsNK0g6WkAI1mSyogQGQAO+g2yKMRJAA8BuLHLsQkAE fuIxg2NyQALm2O0v9pJAA7jtFwvvRUAEFDvbt9QOQAJNXU5VFRFAAcY1sZakBUAFbDRdh0YQ QALatmf8NyVAAP/dZbYtVkAF+82Iz8w/QARoNLBBNPBAAhSLBV+vzkAH7bfOkOp1QAOEv7dt 19NABI0wZaMlHkAGFT2UT2n+QAU1KWHtAtRABFNEWg47D0ACDlm9HI9yQAJDJKiUrYJAAxYO NQLj6kAC2BrDwT1eQAXROrVLOhpAA1jnm1G1XEABM50oJkLTQAJ9evtcwRlAA8rYRkQuBEAF OpwTLQc6QAGU4B9O0hhABcLcqTMNdEAB3/L3Lki9QASS/vt6OPRABMh40uReMkAF68JpnQ1K QAMV6GKy8SZAAzSHwphfikADU5uAvzSEQANNHoCeE3VAByGuJzpqa0AClxeIIBhwQAbHFObO 3dJABhDgxoeBqkAAuSduY7saQAQvr6KooWRABCI4Ex2h8UADprXTxi38QAFUJ2Q9BbJAAsPl XwueaUACAnC9rrOqQANVOp5HwKZAAp5cztA1qUAC1neDuQiuQAGYLJ+sUXRAA1MskQqmgkAF VTkLIwO6QAUpzOjd8dpAAYbuKezXq0ACIPy/31KCQAee7nepik9AAPzoA8f9MEAAQZqhuEyX QAJ7dw3lH7ZAAp2rDSw5K0AGpLSZBXkVQAFRejEnxLpAAO6FW7qB6kAGv+1rCZzaQACR9jIp b2FAAPAyWonxeEAEREZrWgyLQAN7WVpRhRJABWElKOvF3kADKM24o2LGQAPN8Zg5IO5AAvT1 KAzrcEADi9DXkeV0QAL8CHhguphAAwNFoQjAQEADQSx932y/QANx1s1SNtZAAnSNVqr2DEAD V//dyPGsQAUYZUTIcvpABDDfFjxpNkACv0oPIJiuQAL1lMvdOvJAAqHDAb0/AEAEqvE7powu QAUdj6Ua/4pABkP5l3yOdkAAOF3eJWPRP//FdKP8IixAAAwYiOY3ekAALrJTBrtgQAcPSb3p LMhAAiAnUG31+UAE/YudlrttQAFpiRxsoT5ABf/sbUivyUAB3QnY6I/0QAJYeXVV0thABic5 t3NJX0AFS3Y5Q4suQAIZ4c9Y5gdAA0syrKGgHEAB8LZNSmihQAKfIsMDWP5AA6eZvc/kpEAD ZKIQjF6gQAPpfjC/cvxABG6dmBJul0AEquatLTaEQAPKHvXetXhAAwCK/I5MVEADTge4ncLO QAFSjhc9FVtAAMtDztVfdkACK3dVtn/rQAT/J8wHyihABF4zepx1eEADYebGYXJnQAGP2Tau 0oJABBFjab8D3kACaaxAodSWQAUXTReC+B5AAUUQuhFLYkAEv659KojSQASWMdQbsnxAA4IO UW8bKEAEM8nwXCUrQAQ8OF2fgfNAA2xr7cNNbEABQ6YHr3YqQAJBcHddmZ9ABzUnSkqYOkAE PtoSDgDIQAD5242KJwxABaGYreWjgUAGnjh4GZeEQAgT51E4DLpABe+015SVC0ABPVyZY2V4 QADQ2ngR9NJABrjrLizF7UAGf3tv2jAbQAayDJrDi6VAA3lJNYmVKEAEG4Z4pKqIQAOTsBRA HwRABPvbbYK6TEADjppvKF3jQAKw0J5ZhrZABFwHsXL7DkAEmBE+hI0UQAaFazw8eNJAAzQV k6iQwEACnlqGCZwKQARWUzKcTsVAA/VS+UpcgEADQCzomExgQAHZ3wF5qhdAASGoCjPSrEAB Re0YTQcvQAPvWiGW1kRAAumJvjAvv0ACTkv/+dfaQAP/2I1LQIZABRjQynFeHEAAXGgBFqg7 QAL+ay4HDABAArw8FF/EuEAD+juppvqqQAJeenFs7gFAAtSMcUpdZkADQ23KP4EYQATFt9ni XOxAApi4wgINMEABubN0msZCQARfjFLuuS5AAmn+et5DlEAE/gU9ot2zQAMyf9UwV7tAA2yi A81FBkAEtf4AzPjHQAKyDz4TkIBAA40MvNW+KUAEWB5Ssg/IQAJDomDfy5RAAuTYsYyVcUAD BTqVrItYQAMLsr7ykyhAAltk9Q1BL0ADaOqRhMjgQAKq10G5hipABQpUTkn1gEAEcMc8gwv7 QAOIvLSGS+lAAbRrAL0Lu0AEkQNZFiOcQAOX00mX3INAApSFcYSGMkADfmvbxi3XQAGrkKLF bbZAAxma/rNepEAEIuMCUhWyQALCZ6rG1iRAAZYceBJ5dEAEe0xeXgUoQAI+HMM7QgdABZ6T YP9RekAJM9vguutrQAK0ax8KogVAAniD859XpEAEwhL91bvhQAHN3jXMOpxAAr+azTmn7kAD IE9UjIZuQAUG9RYmyZRAAfF05yHNhEAEdLGj0KoQQAFnWAzdlelABFGCq9370EACLpThi9ms QAO+12Q4TgpABFz+GW98TkACCLajCYGkQATG+VzmPoxAAVACmkYcikAF3F9Vkhs9QAYEFxqq B1FAAzJeYudOkEAC8oz25ivYQAPjhtBMxKpAAQA7CSYUbkABGttRd/N8QACGD7XuhU1AA816 3IAtY0ADBIGLDExwQAKwkapOGmFAApSLY0xf80ACi9h7l/vOQAEntNZ7sihABXEwtDjleEAE LRnXuQL+QATjQt0HV/lAAWC0OZRlrEACJ54Ek8ikQAD60K5tJ5pABA96T22uNkADMJaFtkeG QARMAyVncCpAAi5SY5BMwkADvzExsqHQQAQv1fbY5j5AAkdNmUnKHEACVPlI9IOUQASdFm/T 9oRAA9acCwAQaEADHokYM/cYQAIPij2LNPxAA9QYlK/+8EABClTAp61MQANdnGLoufRABA5I AFvhKUACm8cxqJbGQAWSliwqxvpABLNV1nQtz0AD0ePFZKDkQAMhUIWiX1ZAAkOVKp3WNEAE UuFgRCJNQAEGSxBh1rFAAOZ9+5m0UEAC/dMWRT3kQAL5EhtM+zBAAqLPJAlxtEACR9wUKcJ8 QAD7hfL3M8RAAVO3VPPb9kAE1RU1pJO6QAP8LL+iC7JABWy8ONuwmkABLMEdopoYQAB7A5dl zp5AB5FNMfkt7z/+hNlM9kbKQALew1etiTFAAxCdS47qyUAC+efr5vx8QANzy0QHJcFAA9Fi mWsbe0ADUtj0UFBxQAPXTGIuE85ABCY+PmHbzEAChCRJ41hSQAdUtz+4jw9ABLLVUYOFYkAH K0X6KRrcQARm1ygzrghAAx8ydfpgCEADy9pEBd1IQAU4aVg7j8BAAzMEw6JJ7EAErWG8R9Je QAWjcEPE4GRABYp+Mi90IEACZz9OKCkYQAWi/PTathxAAZv8+Y5T/kAECFnJdCHcQAPfvJoC f45ABRoiIN0KSkADOnE1o0uKQANE3xtacHJAAZlNyz4Q4EADIVNCg7B4QALw8l3oMEZAAxw6 u3aSSkAD9pVlo7pAQAUCnvHTAuBAAv7w7oCFAEAEEBemgqRNQACGOSLwNBBAAKxDff8TPUAB troP/Ps1QAU7vU8NM8xAAimugENZ2kAEyNI5QQWwQARxMQ/4DTI//+4vSmwJCEADIolMnMZc QAKU2cBSBlxAA1ZmjIjrMkAFABZkvn6KQATczi7syyRAAwZvmbRt0EADG7Jh4GegQAQX5CGW POxAAnKhNTngLEAG5UsL3YZpQAiQapiRBpJABBswRkX9UkAGCE3akyLaQAL82bdWfHBAAoHo c6RHMUADcWxXy6mKQAO9hUWl+YRABDw5qfofEkACYyILKtM1QAMsP0WXnsxAAcBnm2A0NEAB /S5FTA3EQAXJ+l+ZANpABNj2BjafWUAEQUjn0kyPQASGfUHjEq5AAg57yqtNWEAA/v0lrAB8 QAI2tJNjNDlAA72Fs3gcikAEU4xv092yQARrhVyGYxJAAzDHgp5SuEAAn1TDh0NkQAXgKShD ZV9ABjCPKyaJmEACVfmae0vVQAOVsn4tcARACelH0X/H40AHd63a32i2QAarUDUWhhdAAicJ f2JzekAB2kGXjmGFQAGQ/kTlQ3JAAVYxRvvrskABVvNCT+ocQAJfhDsVfcBAAvjqFSq2aEAG 7UjCYYLcQAZuPxg7MbBABtXagEqjPUABSM5DEla+QATRljTLz+FAAz1dykXFh0AE7iRs4X8O QAI4DQFmnzJABHcv+KzDnUAF59p5vBXDQAnD04BHxUlAAbcXjitf/EAHvBlyBlfmQAOI69Nf BIxAA+XB0lupgkAEv4/5HHphQALxD/3QrOZAA9ULffSn3EADrcc9EQE2QAFL1cXEcypABR7R XRdyYEAA0OThkTp+P/8kmubHcPhACelH0X/H40AFu+A0Q996QAYMkPlrgndABNssaVmPdEAB vAkvRfUtQAMj30/dVd5AA88v3c4hB0ADDuJw1s3HQAKLLGC461BAAvyj9o5i7kAE8mi8lTEl QAIy+Eavl8JABD1Id/bCaUACJlwweV1SQAfLxYMt51xABC6Bgk9edkAFNS0GklU8QAJFTR5J h3RABETrcJjQ9kADX0241ftOQAOJffpdwq5AAzpvvzX1SkACybQy13fCQATXk384iqtAAjC8 EtcNJEAFm3TO7wsQQAPRFFXSdm5ABlNAKGC9xkAHhDup7uy+QAP1B2fCv2FAAdZ9fwvY7UAE R/gT7ofyQAdrmTe/OuxAA2Si7xFa1kACDPv4Yjp0QAIfqyo/0pJABMgffbnX5EAENUOrcQkq QACPcRYZxxhACHLvqnL5OEACKdJ4vMxOQAXNlh/QNTRABW3/yxOO8kAFB9pjK9X9QABc5LZd a45ABk4WLece9EABvzUR1yNdQAO+sTxr3QZABVVPgr77kEAEu0to+JmYQARJKk7wjN9ABaA1 Dkr6mkABdnwQkHDkQAC7ITfNw/hAAZrRvqzr3kABTg/rB3laQAW1Wg6u8QZAAOMKCutFxkAB KUCgYm+aQABUuwJfzZ9AA5g7XsOz90ABy3XcHMfWQAP1e+Vka7xAAxRMvXa9cEACfDh/S+gW QAJLPCRpf3hAAl7R9B+6zkADx2wn3xB0QAJZ1zbvb7RAAtdfi/tIekADvwTTaP+/QAL7xPSp jzxAAqw8xAKXykADfEaAbNK8QALV81C72TJABIHQ6aQGZkACweBwnI+UQAHUSxB/SIpABjoj PxrkkUAEMNLS3FEQQAIeeQQL4VhABQTzilt5jkABRxJ/fuvqQAGDVJj8UvtAAxVigcMM7kAE K2rUy1zoQANBUjAJ1iBAA9RkGHgFOkAELWALDGJkQAGrYSmB9rNABJ41jN9d/kAE4cGnPzsh QAOHTYTMpPBABHNG7keSZEABtQFPRoN8QAH8NxzN1iZABKADgE+LakAE3O23GzTAQAI0doyq /mhAA91kfGKSpEACIpLwGUTAQAN/3SRkUYZAAkZPZhS6j0AFcMeaXoEyQAVETv5s90pAAZyi C1ouFkAFBhvRJNuKQANg7UZsujFAAw3zzOWFjUACTWUv5ZxoQAYZp7hpHg9AAuXutkEax0AD uKGzkrkiQAMOzp7csvZAAl+pX4oaOkACrza//7egQAP+a+1svlxAAw6kQSJkAkAEihEBtHnu QATM6xV2HdVABDBXYYH2EkAB+lDUNrVrQAGVgRIZ04RABIDex8L+ckAE/eJRma9wQAHHvu3d hd5AAiU9WeT1NkAEowWnoBOmQAMuklROH8ZAA0Sdb6WZ90AB8kdaCALxQAIdfUpR0JlABRDC d0vxwkABwgUiiUoyQAIo7aCYr7FAA5VkFKbSTEAHGFtpvhPLQAbdfGC1keRACCj4Y+1Az0AD 4lGk4suEQANovlmjDa9AAwNoGiBHjEAEHFu3KFR6QAW4bgq5nZxAAg+P9VniOkAEvRcYrQ1j QALIu0e7DahABcZnRtsTqkACXaKRnIW9QATGpwrDqs1ABivc1HUjE0ADQAMEYWw2QALT23kh uZRAAyQhzA4hZUACfS6wLqHQQAJKqlX8M9pAA8gleT9TbEAFv1pblhGYQAKebj72CcJABKym B3PhrEAEuqmBlsTeQAQEeGaRwZRAAq9ae3NgWkAD2zxVO9scQAQFWKb+EWBABM1rgDzaMUAE isy2F/toQAQ3xdJgCVZAA2Oc5uv1wUACsOHS/B4+QANJMAtVpohABiKla2tSqkAD2fZpF+nY QANc8l+73lpAArpg/kAPO0ADRL/WFEl8QAL9y3PrfWZAA2n/NxRsokADelcOYScsQAFJfksr LQVAB9xHGZceN0AEQrSki21wQAQ5+wTanaNAA3yicGb2oUACNbS/DkGiQANENQuFdSZABEjK KsUR+EAEpKh6wK3rQANr6LqjGTBAA0NWRETEm0ADb/0+zLy4QAHkbApKafBAAH/016qHxUAA koaAdoiIQAbOY1MVaUpAAmtnsWsWE0ACF2MCwMJEQASjWrN7YexABGNW06P8LUAEXu+hryy9 QAUs0rAknRpAA+hNDPNv0EAEg00xqZ49QAR5J19nXSJAAkW/Jf8W6kAC4756OFrcQARJzEQh qZJAArz79EREj0ADHkR7nJVbQALFGjlXfgtABAsL2r8YhkABJSlX7lwbQAW1g5JMcDpABak3 o5V5wkABX0f0LDBPQAWWMhTvWdZAAuFG2+GPfEAC9u/lteruQAQT0pV8I9dAAs0W15SaPEAD j9tb2K5pQARWyk5ZwvhAAokHUiZ2FkABuG49wy8qQAMe7Aovw7RAAdbJmV603EAF1OhPSzI+ QAWfqA7V40xAAoTdqsgsKkAGHh+L5JLoQAGcEudrv1RABILbCrybjEAB1BGwtBmEQAVG9VFw K/xABOsPDMl4ukAEvb+kyI5+QAMuwF4a5dpAA4esXW0J0kAEavsUKLvQQAQuXyY48NBAAmkb 37B4ekAF87IBNnvmQAWO+T+Xyv5ABElf4dZwXEAEs3XyWQOvQAKToca1DHxAADyGw6irHEAD JJd5H58+QAhYKsyTui5AA94rWERU4EADe2ykeSw+QAKSq4D5ANxAAN3KWo5MxEAErigvWTne QAGmU9f3WFhABJ6ryN6AwEACwbQJ4kP6QARX0SyEAZJABFNSptBspUABxemCYCfmQAVBAzKw DABABlh9XETqQkAFbmp5nFl5QAJD8yZkwdJABj8F5UOFxkAEDxdt31v8QANHZimy4S5AA/f5 4IdyykABss/2scfbQAJTCqeOfYxAAxN1tGY22kAJATIJypqLQACmKXCi1kpABz9W8Nhlu0AF T+9JlZuaQASkOejhDZlABEzUDxAFtkACgpIljqadQAONlfuTQy9ABCh7lyWwuEACkZ3NScyw QAGkRfcZPNRABa98V3jFbUAEeKCBo12SQAMLGlMSkzVAAYx0GBZOVEABcJz5E4weQAKTBpp7 WYFAAwph2tmCeUADidxR2Qh1QALkJkx7ZxpABAE4utszqkAAIiZS6+DsQAFJun1BcxpAAj9U exHqZUAFtrfoykMIQALZlnzw3b5AA2Y4HWHcLUADZjgdYdwtQAQGqDtAf09AAzkDoco9kkAC neO/NNvyQAQhya8+tQpABGfuXlynvEAEGsHBDteGQASMk10fn5BABCTGvmaoKkABd0dU/8Vg QAMor7zg/5hAAvIgxvW14EAEpdOT8jhHQAPVVvZf9t5ABE0zIGxQEEAEKKHzqKrVQAgANjiF RddAAeOc1ZXVV0AGYzPpej75QAEuMGm+9odAA/Jep6TykEAEC//HegV0QAJtdtQsgg1ABGcG qTslYEAEGLr7sNTQQADxSs6S8iFAB1/0cAKYz0AEVnJf7FDvQAP+FBpMEIxAApJbhfd2F0AD fSfYduomQATgywSjkDtAAiY/x2famkACdqrtx2KAQAMS3Q2lhoJABHbCadVHgkABzC1ySv7t QAMN7EKgXt5AAy8fkdT4IkADS31CQLkMQALU+nySilBABXUo66ME2kAF2J8emCGPQAErqXwL O7BAAwvuIVDxfEAEJgJ6FxZkQAixQpkJpUtABpGZz+iN3EABq2Cek5usQARLOjPzh1ZABEak N2/xJkACGnVCGbmaQAMZ735pYBpAAzdv8ZiAKEAB3JCzrg30QAKLdf40VchABZDLF/nBMkAD zir0lNe4QANbsu2x9I5ABTob0lSWREAEJmX03qD8QAM/fW0mh25AAwgFl8zCwkAEaN3pLbWj QAPjarf2mPRAATX7aPJBGkACk9XQVORIQAKYds5dtrZAApzthFpwPkACYEA6MF3wQATZSMVs eV9ABIjYhocXKEACRDr5o2WkQAMIO+Cz1PpAAjjz4B6NlUADnBbweCsNQAO5ibWC491AApat ZZvB/EACuxAZocwAQAJBtijWaOBAAy81KNHZcEADT1WEA3zWQAHkPGM+u9ZAASRoqGF2CEAF iy2Pk8YGQAN5POfq2l5AAstedI55NUADZJsPCPwxQAIal4zoFy9AA+rboGWhckAD4lFYAcop QAKapVU8N4BAAhtlkLYVfkAEZZ1/OA7tQAR4E+W+ldRAAtXA3G2VRkACpTV+86FYQAIAd/oo W25ABeP8zHOi4EAHBnV4WNBEQAKT5g9y5LJAAk1WFF9vE0AEOrPRMVzXQAFd7ZBIX8ZABU0b 0Mw0akABL8sFe0s6QAJi5ovD+xJAArZaI2OthkADlggLt5suQAMo8Y8KffxAAxRyxkugekAC xND4J/SjQAWLhw2fVFBAAj5+XgbTq0AGi3UJPevUQADKNNLRzEZABbHXdTQ4akAIP6HGtVK0 QAHRccp1GII//u4pUffDFUAD66TLNReTQAGu2qJ7V1ZAArlft+4AmEAEjpWxjYUuQAKfaHdy 1CxABjVRKLiYOUADV9wFkCrRQAJeI8ocBWpABCslB+0aYkAEyJujOnzgQAT3FwRI6KxABkj+ 17d6YUADOJFlVDaOQAOwW6j4wE9ABWuXy6+mu0ADf/I95iZ2QAP+hSnGcuNAA6G1JyADmkAE WHf+rLzsQAY0N7lULcJAAk51cOSAc0ACYaZkxALSQAMsgndwuGxABiPKEcMJxEAE+6yQoSgs QATrmUoDfUVABl4kzSAfA0AKPlx4erBsQAH00DA6vzBAAZPFNBfxWkAD/ozOOuVuQARdbYvt NfNAAtBjRNA2TkAFYqq+K2tWQAY/ZWRmyGRAAwyswIPoUUAGC0LKAs7OQAY2/EZDCTZABcqU GkuRM0AF0eEUh9WPQAP3CLFz/RJAA//Mib3sn0AD4S8gwYREQARBCPpBi9JABDKrMC68mEAE B1PoZyAAQAZJXZOZmnVAA/irCKHosEAEBErqmLYoQAQfYZoqOVJABkChbYxdP0AEzOiO8b9G QAOKIR3TW6BAA4sbGoP1BkADVFaMBhMrQAUGx2SgLtJAAoax6B7dkEAFWvpm81nbQAY3gRHl JQhAA4Cc82fM7UAE0+lY2jxIQAWmOqi2HIpABoLAcEGzYkAF+dEUWQdpQAdkR5kh9XhABvoG Pk4FNkACLKoNOfMoQAnJjRbRz3ZACj5ceHqwbEACe7vg5sOKQAIXGRPR2xhAAk6Nv+3m/EAI X+5QWpfEQAcWNEhzn2VAAxc8ia4+ZkAD8EdNGkbcQARJwZe45XJABTdS6xPJd0ACC6PmHsr+ QALkSPLneWxABOsT5jJqlkAFokh0tZibQAK8QaOQDJZAAm/zwEYY6kAGxD8mkV3wQAaHp1BK 1E1AAtvZFDgXfUABtENMSr2FQAKG99k4RwpABFBQXkQ8lkAD8DEpwNfSQAV21/bmstVABUbv 8FSruEADLu+H9shgQAMibEREPI5AAyJMBJNwS0AD5sTaKAB+QAQO46j8nx5ABADC97KmA0AE Z8GS+yBdQAR2FMNiv81ABRt2N0S7DkAIFoGljZeSQAJ/XCu/FqFABuUnnmHt3EAD4+hwFHEy QAOzvDixl0pABO555kdiMkAEqOTSgoZ6QAQ66L4b2/pABM8MNsF3ZkAEBv0QBuDJQAPdkDr3 ecxAA7WFQWHckkAESo+A864zQANTisUprQ5AA2ruUQ8h2UADr6YUmqpkQAU7aane6Q9AAjfR ENxLkkADSGDgBC4jQAWWD/4XvBRAApgZETtbWUACbhxS74BIQAZ3A//9RLJAApnKaPjC9kAC fr8qp8zoQARPbaQVr1pABA2/rfeNukAEjJrZ64NaQATnAgiiJMpAA5UEeAjM+EACxBIFurJK QANrOXOM+J1AAmD4t12avUADjgc98wT4QAKpY5pfknpABG6Q8s0MYkAEHIiasspMQAXrPjFq HFJABRbzM3l7XEADBgZ/zzkCQAN4GrZdjc9ABRDz5dFhAkADAGEr2n8mQAVt7UfMDCpAA+D0 PfpvakADj0O8+vp0QAQHMkhvV/RAAV0dZvQxkEAA0bsgRPfyQAJK6WDVZEZABV/jzfAg1EAF O2YcELS6QAMWc0EcegZAAgSMV/uJb0ABet8GUwe4QATLZQHk6G1AApabaE4m7kAEYkha8/V2 QAQrq+enR2ZABF9Jnrsev0AFoCuh383qQANtogYB2fRABEM0jdEG2EADT0aqeFwyQASwgc/B l3xAAnJMJIHsJUADTNVz6C1IQAO0GVIpFvJAA6EPfDrDfUAFCT6pnYC/QASJNMPVsqpABKek sffj9kAEGsSv/qWtQAN7ne6W3hRABLSNYaVUGEAGWLqpAoJ+QAcbSwofFbBABRoSGSl3OUAG nJlRU34BQAR7DmC0ZtxABbELydFWy0AD1UuX34d+QALeXsm4GcZAAyc7cPQufkADxc/f6crU QAUFGuFcb/JAA8euSyE/YUADwJ6o1cwcQASxjToH/M1AA8Ij2tVI7EACsbjsMK5+QATPRZd5 2QVAA63SYSGMfkAHAwA1mZE6QAZhhkPhu35ABfHaDscRg0ADXYY9qxxCQATQFrNAaIxAA6Y+ 88AzBEAFybdYxeOeQASNmxN5LyRABPtDdpskMEAD6reU6MxWQALaj8OYyaRABDyv0fy0uEAF Uk/Jq7ZyQAXn8+FLfJpAAip36X+h+kAGeU5n78l2QAFtS9ghSeZABvWtjNagXEAEUlyz7AlV QAU39+u8JXpAAuZ/QwlucUADbYuqsGUzQAUI9BnzDaJABBDMPNooUEAGGm3MF7y8QAPWzhdi RfRAAy/Zo+CAWEADhLw33VqiQAL4J9tCskxABFDL/Fow/EAElcHLqvNOQAQhG43Iv5JAAu43 fDAAYkAEip27gL3VQAMCS/azhgxABJpcrV0oAkACsWHIjI0TQALfTHB7A+pAAWvZWgx+i0AE oZa5xotUQAan+dwZjI5ABaZAbZFyXkABZpcj3UN8QAHuVjLP2mBABl0jb0m9hEACoWrUZovk QADaYZDxsKBAAzFfSFPF5kADNjG+9JWXQAUkZwf3dMVAA2cHKG0VtkADZzZiSiJXQATQB09c stBABAL7NWdmA0ACcPS22A/EQAQA1QINqVFABTPekYA0I0AEdirtxP6VQASbNbV65kpABI44 IXLTO0ADIqdTdQpIQASsO9n03k5AAx3LwHuo0UAEpykoxj05QAHQqvCvaNpABzbA+VmOYEAD IkQX8qHHQASmyg38+vxABYnIoiEvWEAEZaKRgZwwQAOpdPXv+hRAA2KPqcVEiEAFa258ztMC QAQDZYEYkaRABOOYxd8Wt0AFx/Y+p2wYQADuBG92DO5AAEgLJnFFHUAAhF4EcNBGQAKECVuz 0lhABdrRvJ6jDEADZTHD95cuQAXFGSaI5h5AArPidV2FdkAElsqK95drQAVHm9WHFMRAA8YV //giMkAGSJgfHNZsQAUt7MsX1UVABPK+ktinZ0ADukElCbsmQAHbyWYIyc9AArtxd+7BkEAE iP8OFsyWQALuTxLp2CxAAyG/DuVE3EAD4UP4aqH1QAKJOc/xZIRAApzZrv/IPkADQy2JN/OG QAU6kXyns5xABDV/cJILnEAD7d0mbKhJQAGoF+EyPSlABKyTCc1au0AEsvgXuwnUQAOjY8nC e3hABBYQUJJ/U0AC3nYme8nSQAaUeovBOEhAA/2cfGWt5kAE1Gs4xyUCQAaAWLuvUdVABtmV DhS9GkAGvZvtb7HMQAOmqP3XhrhABUhx8x44A0AF0fgcUW9yQALU1jgjeiFABFXKm9MPoUAE qSLZnCNeQAN+gJKXN9JAArxX9W5m60AFVrse/cGeQAaKJty/Yn9ACi/EaKYYiEAFVVW8bsfY QALol/f0HbNAAavezxb4EEAI7C2x8rBQQAkUphDRT1VAB+tJVA9dNEAG6SmiCig0QAXw0byL qcZABme0sgs+LkADsttsVlH4QAQNsNipr5hABGRTRfIj3EAFo3w1LlKWQASPPyx+YMRABGVq cwUmeUADothjaXqHQAOiFkVrZs5AA3Jg0iK6XEADI1wkiR3CQAQpcRuj45hAAztUYwvfAEAC aeo/wrnKQAPTCERtWnhABNF+K+CAvkADW9L0fSFWQALpuzHyWbRABdj/kEu55kAEFynx1oQT QACOq5EZhWxABSD0QfAtK0AExTSc3Lh8QASxL33REYRABCkdERPVVUAD0ZnvA5TWQAQTneHP +CNAA+q5k4Yo1kAE60lnGJtwQAOQCHJuqsNABIQorbT9hkAGWYrFbRnCQAYSJOLOic5ABDfG cXCizUAD7W2U/eJLQASdQOk21kZABCFjU1zEBUAFHn8FKcw8QASalzQ2m1JAA3u3iDGQ4UAE vQtuJtbZQAS/UNOjqmRABLdTJjWdnEADvqHvgs2gQAKymEfD39BABIPj0bcXh0AEWlERQoJA QAW8W8dlJjhAA4wWQ9greEAEfHu6wZEKQARDwAFmFpBABDtMnhuYEkAEinwAihj0QANBEBUi kOVAAxtyozWqMkAEIEXGDwN4QASlL/FyUjBABJnqwamxV0AD0/A264P6QASn7Wm3B+xABGFq G9kFGUAD0qzc6UirQAYKNADyo9RAAx1/8iMwXEABveW/o65lQAaXJe7pB9tAAgqjaGB91EAE HLLbv3DoQANw9vqIXYtABX4+j/0fFUADEQ5YnlmaQAUUHqo1YFBAAs4U6AVqf0AEiZkMpQe4 QAKiv7zbJp5ABPhuLwCca0AHXSkuiMzEQAUQVbJJQeZAAxz8NEyDwkAB6XAZ+HUAQAbhOVAh d8ZAB2ujU20hNEAD8iP1QHItQANiXDLMeF5ABVVga0z7qkADjs+t949gQAMkzaVZYuhAApDN JH5j+EAEBc8AwaXMQATt3Iiks0dABLNtPKnBu0ADPgMMHNEoQAUUqCGGGWVABETmHFeHMkAE DHQr6ypBQAR+gXiLU+dABU3ZJzRmRkAC7Yw+zAH4QAKw+OtZttdAAdHNvHfdmkAEz52KJryf QARZmcLqWIpABFMBkIR9ZEACxrjY4cRgQAOfJGJKfPpABCCPB8jGHEAE9WRssa16QAOoSHLZ g/hABKE6VdppwEAGK+AlhrhHQATTNzMPNKlAA/wnTKtFaEAFC9TqhdewQAPF2VSQgANAA/WM srHeHkAEOtxAkD8aQAFja5s27MdAA3xxtXz4skAB+keyTiaxQAMXhVjUAYVABSFEzZq0ckAE PI8qSlisQAVSp7W9p/hAAVn5MK1ggkADBUSHYElsQAVsRcDlaoJAAjvlVysqokAC0Obz3/5I QARYfJ5Tn4JAAq1Y5Aigf0ADeWVojDB4QAQk5ng39flABCiGHwShEkAEnQx9ZUSuQAN8SCAb nn5AAusNn43LxEAF7Z8lW7c7QAFOIIhEj+RAAzPtS3lpy0ADjzpVl52ZQAPKn40oecxAA72R fPl2ekADgS8pkkWQQALiq8IfE+RABls0cCeD5UADuI1m+OVGQAIlG46yZCpAByCCCTpfQ0AG ffibgctYQAUtfMsc1ppABCcUTB38GkADjrY4Ha44QAQ1XnXGbsZAA/w8G8ERikAFqG6q5eCG QAZhNyq6zf5ABI2D+hKxVkAGSj4pTWskQAOeZn9Si2pABaEYOZ2BQEACwxkLAajyQASoWwNn QfZABOzqXYxSwkAHc0Bjf505QAT9yx+FFiZABL+dhoSO1UADGyvN8FksQARH28/DUJJABd5m 29lxzkAFHvqMJNs8QAK4nqLT3xFAA890+4cgEEAFR38ZaEtcQAPlF0zC8uBABGbWbMkLM0AD I0Tm0DKiQARcBZi/zoBAAn+mwyJiXEADBVlnBGXcQAcMYdL3tXpABNPBNPZOc0AA+/UxNclg QAOSttRqqRdABKfW7S40MEADaSFohiVPQAXK7SMMwphABqI0j1gMPUAC0kAr0DdtQASANF+r 1btABZr928cp7kAEoH8IxwlMQAYTHZIptHJACCbeF31sIUAEsgIKQoNOQAZwTbCymsFAA3+i d6R34kAFkAhYtBJHQAWzjTD7HXBABCUlijLkmEAD6k/AWkbIQAPRi5mR6xRABG32Vb3QT0AC /ICD47/qQAJ8B8sZlnhABKH2+F6VMUACm0Ul8fhIQAP7IRxW36hAA1PQHY+BLUAEIxBF/BPQ QAO6rCQh0CxAAorBL7rhL0AGqu0Erz+5QAaZ2sYpPb1AA1vjaTsgWEAEurzxAJCaQAKG8QNz A/BAB0+V7BtWE0AHVkRoaZmXQAJCoFLavXVABQoiCq6q8EAIqnQ05B29QAc8sq1LtzNAB0ka pqABe0ADI9G2tFvGQANRgsVh7fpAAjjHrxmlwUACBTpaeJdtQAHlnFSALIxAA3Ebo5e2aUAE +AZIqCBCQAVm7vUPiEVABjfOoMgJkkAGbh4Q18mCQAL7Eoxg7H9ABCmKQ/TWikAEnRXCA0KK QAbSN6lsu0dAAwjS4CazFkAGpRHO6HjjQAW9s6uMoJRABnuf40DQ6UACKlVdXpeoQAe73viH D79AAqt4b2X2gkAD2xXsCdY6QAR2Z9U75LlABNWK0xBYmkAEF5g0Gd5NQAPiAyHIzFpAA43y T8l2xUAEotoJmniVQAJqr6Ai875AAQvIIBrpN0AIOG53FJ/TQAVYp9Kn7X5ABPL1I5t9SkAG reuZsGnKQAMp8EJANrVAA3DEUGWyrEAES392jEzRQAQcbPcx6JJABDEJscHzHUADeUS2WGlM QAZMY7WYgr5AArecA0oabkAGx6ZOOP/8QALOLnAfPdhABQYOjZrvP0AEzt9rSiPGQARrHRoT cXlAAsWPBJqsY0ADRxUMvgycQAQ09lw49UhABEWi+Q9fYEAD8sGzfcFxQAQqPnX8MVFAA8kA K4fpAkAFE5eRZC5bQAQXr7wm0exABPFswZeeyEAFL0PzyQ37QAVMlDexK51ABCOvloUwekAC 19nIr1SmQAQzPk+Hc7hABgQ9bhcTtUAEM9tTnHWNQANG9aJtLvxABFaogBrQNEADD13LVG+K QAUVmxWG6sVAAW74hTngpkAHg1CtqQwrQAcRPFOraK5ABS6QGkDzbkAEv/RCl5FyQAU6o9nd XWBAAs2W/TBUckAF9ZZFoT5jQARRfOrhEtRAA0LRpCf2AkAGQeg3trQWQAXC0EuTBlVABOsg KF0NDUAEJhEXB//FQANlVcLzZj9AAxHnaq3wOEADnA2vUx9AQAOIf9wwxPlABfRzaHnD+kAC izjU3hKMQAKjvtOLQzRAAUGbf5OBKEAEp4DUKXiOQANQI2ZJjZxABELtaHWikEAGadGzOY36 QAYTPDfjOBBABBLWD0OjrUADvjH0o2a5QAQdzscCI8RABBVUdSTrwkAFeT5gCYMXQAOv7IIX +sxABAclz7dND0ADlkXYcB9uQAYNcF0yMQpABiXA0hyPgkAC0nu20H5FQAUxXcNN7upABCKB 9vK3zkAIyTPsYhNmQAeLzjDV53ZAAx69zOLzvkAGsPXvlaQYQAKAr0dalwZAA1uz5YRjXEAC EALSIXZAQATb6tC7FJpAA3ifxqBoRkADknB8ttliQAS6K3Lu7tBAAtivEHYXlkAEDWfXtxDg QASM2fSDDPFAAiY1LlaYhkACW6lSwokwQAWZGbNjkFhAAgSqeN/hYkAG8WvLHBRjQAVWypYB /edAA5azS+yOpkAEzs06AGrGQAOTTGv9PmtABNygMuud2kAC2wxdPictQATX2XEopkVABKmQ DP7hIEADvsjAyWHhQARkr0+J/BxABb1VyX5p5kACx3k9ZfUAQAQ9P/nP+Q9ABv52zS9VeUAC 6KMvjH36QAXxZVyq7h5ABDoxgg88UEAC+EQFkGezQAJbeDWxjeJAB6dmjrqWiUAGWcUWbwWb QAT7iqY8p+FAAuKaO20Fw0ADmdHspxSaQANMZ1pqfDhAAoFxmVjqcEADjjIp8yh4QAV5n2A0 my5AAlFF6HSQV0ADfOPiRtAMQAT47Y0gehBABBKNKZFILkAE4vEeDo7zQARNLd2AQ+pABAZD d1zHGEADfLPuGaxmQAQabUiE14BABZYqtJ0BKkADaodCHI2EQAQ9T/UfBDJABRRX2oG/8UAG zqIOQ8lSQARcmoaLjZBABLtHMNK/VUAEID9CHu3UQAN8kLrfk3JABUWMA3V/YkAC1OH9clt8 QAWbV7KupYpAA6+XjLMj90AGjz8uQbnOQAYsdwzROnNABUQN06zZTUADfGVC2tuhQAUBwrFU 2cBAAhrI6ZFq0EAC3uQKUWr+QATrvLdo1QxABP8DJ5OAkEAEM3GSNURPQAPSXAEbMAxABLAS w5JhzEAD3PmYWT2AQAVxNZNz+RJABIt/cTMYtUAEiwpKCuPLQAUYVcZTtqxAA/C6a3luYUAG n2HSu1ltQAfefSD4iAxAA/atmNdyOkAEUBeLuK5+QAWDsriMWbVABjOwcBzu1kAExcS9JgUT QAJIpxiz0Q5AAx5HieWm0kAC0BAqEtZ6QAVAylUWqG5AA5giItOe5EADqjtDvrD0QAPGdvD0 QG5AAduXvIQtQEAHK1cGLTzxQARNqE6mILRABEY7oUhvMkAEyGq3AwKoQAQN2P50g5lABNuy 1QLifUAFv2evtrpBQAcw+HEPY0hAAugEK7p8QkACxoAAZRWKQAQwbh1eYSpAA4tAdS+H2kAD pMaTh9BkQANaH3TfXSJABNm4IvnqoEAELM8jhuLAQAPwE7NDaD1AA+/BAyff4UAEu02OrQZW QAGke+L1aepAA5c3FcJS6EAGyxxxXMOoQAShNaZlJ7xABJvJBqm3eEADdcHzW0LlQAN1B5Zu VkJABu1AGNBJdEACTukzwHI8QALemAbTGu5ABsDmgfURK0AFVQfygW2MQAK6L61fR8BABKMk 142lBkADNO07QZ9BQAa3L0JYT/tAA420TnClXkAH+NQFEhb/QAWas+EQXnpAA+aMo/q4J0AE cfVto4JeQAQAvm1r4u1AA76R83hejkADTHbsb3yuQANVfTn6iYRABFJnSfb2WkADl0eD7FLM QAWrSYc40YhAA35PppzxTkAGA8zCdQQyQAUFa7eu95ZAAqQQyLBbRkAFY8Fsz28mQAOr2Nno wehABN7KTbSMbkAFGT6GdtO1QAZyg8qopx9ABRASaF5LDEAEO1MHyRFMQAexZ7OPOmZABrQM Xnt+t0ADNNUqND3iQAZEEL5rxvJABaDOd+gx4kAFQdvz7h8MQAWeeI96y6lABF4qVBOHoEAA 1A5go6msQAXNnFSTaJdAB53SC0EbK0AEs57sk3woQARU6fkZQNZABbS/JK4d5EAGdMCDM1Er QAZr9MfDdt1AA2Z1e9W60EAFo9BCdOfAQATa58rq815ABCOE7WWSekADqjrZboE0QAT+Lgrx kRJAA2rNE5enjkAF1KMOtjWGQAP2P92iYMhAAe76aAusEkAIjw3DPNKPQANFHyMfzd5AAxmS DiRhsEAFg9KHMKYMQANZZBKPhR1AAwdUOZn4QEADDeiEQB9jQAZmqG6ewqRAAzRgzeIY4EAF Af0XezmqQAM/NyV5dK9AAttENbKYqEAC9nb0LjcoQAaFL+c+dNRABWcVO7ren0AEE2Dfw04x QAPriYQmqHRAAwFidSCzBEAFA9HWEPoTQAXVquIsWplABCqCQ/d6AkAB5MkHm+fIQALTTNYq GPlAA3e/Nom0fkAGUCQvDB1xQAVLjQu/qgZAAwK8D4NM3kAEIKbxe3akQAGwqDLWj35AAiog dfQPaEACYJe2Qg70QATb5QhIcShABRdyghsvKUAFZFsbEBf3QAVkWxsQF/dABDZ5PeSPqkAF eaXtzo3TQALfPOHoNgFABB+J8Hf+1kAD89uXNnooQAcl2tBFnjpAA116xuThpkAFmR8GkU6k QAXUxVK7HzNAAf5vOif6DEAGMDbXOSu4QAgHM2LPzudABX3aoCQDZEAFYuN+LNhbQAVkeOjJ REJACEtg2m8bqUAC9T17DkxtQAVBKwRQBMBAAlc3ehzfQEADf2BRH+YnQAPtIbC7jX5ABEWB 2zFO1EAGDZGrDa8aQAQrn9BxwbdAAsTw+cJJXkAJfvpMAjCIQAfSXH3g6oBAAdgrhl0gxkAB q043WMMhQAh2Xp+T6XtABtXdzfcBKEADLsaz1rlsQAIcByISMDhABAb4ZJTGIkAFn1tLi22x QASSGMGIactAA6XrvCVM70ADAMo3CBOsQAV95o7rfcJAA9/6gGrCuUAGO+hSHzh4QAfG1yNs HURAAOm1ksoj8kAETAfZiCGOQAQLekH3tCRABjIOM+GQCUAEiPQGqAM4QAJWcLwzQ1pABwej g0yckUAFty+j+LwLQAKfJIGq9JJABUCZRAeBmUADw0B0WVAcQAQSVAU6ugVABGX1cgqtqkAG REDof7yZQAR9jo5hRv9AA0edAADrjkADJ6mD+TnSQAbdXgOGd0xAArF9Y5LZd0AEJ9kn/Al8 QAaR2F9CasxACUSJ+IJLy0ACgT9buAtmQANg93DS1RBABYiOG78yEkAFOgQw3GVgQAJY97yy ZAhABrpe+zXu+0AGjK6/vMVmQAHad2ef0TxAA1RIFmsookACtjyN9FfBQAX40dVuU2pAAzkK LrbsAkAE7CyOa9TMQAP7j60/1ZpAA1wXUlR+80AEVwghjvh1QARiodHeVBNAA08wqs0bpkAC N08Krm2TQAYqjY2sYHpABQPG5ACl6UADV/pZKcDpQAUzgAVhtJZAAvZdF2eKgUAD86wgel6e QAPrbyJIjH5AAuzOCYt2yEAEp73ggQ0+QAOBJs9TRGNAA24KoT0vxEADiYXO2gbCQAQCXekJ uShAAukhXThrt0AF7RheKxU2QAZUKXQMvqpAA4nTcBCU7UADLOYVjRbPQANWCqwLXytABJl+ D5kq60AFAxP1YwC5QAF6TbexF+FAA5x4B08+tkAC3wgbWjPcQAWHD10BW9VABJ8J5Q/TfkAD fU9HFnYkQAO8LFoj6upAB0LPxPDlXUADjW2/vm9aQAXRwt+SKwRAAoNFu+DqHEAFEF9gzgnC QAeUob/2CEJAAkDLifupIEACl1eN7UqNQALl0X/VOv1AASA3GOuAoEACpE4cwm7zQAd7IzZL kwFAAzWBNKRlAUAFExU7H0/KQAUmxQ1Z+EJAAwlIXea7bEAEZxFzfTUcQAYLOYaeFapABZij pTpn2UAF548DPZGQQAVmlpzzGZhAA4w4vJk36EAEw+JINgSIQAR4wZpszuhABQH3mnZPB0AG doTRx6zWQAdAxJ41PO1ACXQ4YMjRukAEAmF+JY84QAQg0NfZ8dRABNu9YglgckAIWxe9w6EO QAdXjEs503NABt3vPhh270AIo/XQWUhSQA4fesKGh/JAAvauMmDX4EADqFQlM3QYQAa2PmOW CnBABq829Rq9yUAEfe/rMYz4QAgzjnce3jlACZ8dI4ivBUAEeCFZwMQIQAdtCrRCWaVAB9dl +Cvd4kAHUaWKuzM4QAc852M+nilAB1GPCJF3IEAG/t8JGAWOQAVnSSII7V1ABl98UIa5GkAF ea5XcK1kQAU+ig2owJBACjClPHBve0AFs78aTaR0QAZ70xPkiIlABqc+AYN9TUAJ81VB2JrO QAaHYtEbt2pABXNnV/+Eh0AFiSb7fzsAQAYUXZsIOr1AB0jisYxrAEAFnJu3hfOqQAaYU+ma rEBAB2lNnLDDzkAFbKY3PG/YQAdGijvrLTxAB35ZLjkvMkAIrU3DG6uLQAkW9aZYNkVACdUk 52ctSUAJronl8IRUQAL9tdPMgjtADKvWuy8LhUANx4ts8r0sQAN0h6SVS2pAAxhbs1wC5EAD M1qeU4knQAt16ZDrmH5ACk+Ctktc5EAE79fdo9xwQAXhV0Xpf55ABh80V1ZSykAHBsmlNsLh QAPrrIfUpDxABPWuoBFSjEAHd4tc91FqQAhOx0RFleFABIbaVT8asEAETTe88MyxQAoGrU9X TEJACVoK3XigwkAE3hZ8BJgkQAM9MSmgVjtABKQbu4IDKEAHuu7CfZnUQAWSu16T9e5ACGdU DQaORkAIKuNxm5/QQAWRD3IV2mRABO0wRLABckAFXYys/N1mQAZ9uG044UNABq1yQ2O470AF 75Remf4OQAZmZ4zZ16pABjVxEZ4NoUAHTU2JZXgLQAxZnoqwwn1AA7e0dtRh7UAKFCZF3kA8 QAXkeTLHe/RABU5gk4jmpEAGhFDsXuHsQAVxsQSq2TRABozplOECU0AIC6AqKa67QAZ+Qa9+ yKZABfMwPtq6OkAGWW/UIJBQQAYNqsj0ZXRABSXqUB0g5kAFKXKWKSiSQAXu8MYEPgNAB9Os v+p4LkAEDl02tirYQAS1negUCxdAB724bWLFAkAD+IW2dWv+QAPUnZwm2MxACYazhRy3ZkAD hzqvYxEkQAONCgl54BZABeUpZkwPfEAG/luTDTsHQAauER1kex5ABzNzIyAApkAE4jf4ntva QATQ+sTQtzZABY4wIrZiIEAFWUNNrIBZQAVArGIawShABPl+wdTSvEAGziWODpGFQAYYqOzS 3gRAB75/tZzqmUAHBmJueGqiQARMqEP28X5ABOyM2CeadEAHt4rdfkmCQASL68zeEdlACEEG q/Y4iUAF84lG2jjYQAV982DT9gxABe6gH8VFFkADF4eQlRpGQANX5wF7sqRABJJwtl6+a0AH L6CuPdVSQAjVOtPKbxBABBdAktXJOEADtU1rqYWEQAM+o73E8txABvFJnc7wBkAD3gRmT/wO QAZd+50i0dJABtIq21/bYEAGX+eCSKmgQAdPS/ngK7pABOZ96FNhUEAGARsp49WsQAW70SfQ CNJABs66OW+5VkADl7E6oZqZQAQ+nVNnnF9ABIN/9PTW/0AFLaw6HNPCQAgwpev8KwJABgyF ATqXUUAGipdFIm+yQAWPKkc9F5xABrBap23Mk0AGi2gprh5iQAnJtcwkMbJAC5zfMFeDsEAG O2YvXJs4QAng7sNQvuxABldkfJQbWkAJKmTiik57QAU2WKKxvp5ABL7iLoTscEAEoat/+Kb1 QAWPi2wkFJ1AB9bWKM1y10AE3tuVGNTSQAXNFXy+7nxABlSU5bb3lEAFctfwFTa4QAUsGIyP Nh5ACPMyAwMNRkAGibkyZNHSQAlFfJC2Qj9ACZBIQ7I6OEAH5jhqOebeQAX/hyr2PQ5ABwIx 7dQ050AEh1s240U+QAfFZ6ENhqhABooAm7VgjkAHqQqb3MSKQAVP/kLgJk5ABQD0Rg/nPEAF 4ityoItIQAdnpfIk9dpAB4rXS4So+EAEhrMHJCIOQAja7H4cGNxAA4QOTpc2pEAI39hCNVlY QAWFET5C2rpABsLNe/7kaEAFMcqsUGsyQAXMUZ/VlvpABzYhN12VkkAGvV4Ou/FVQAp6IMnp R/FABRcWHCfCREAFso2aNzrTQAYzCnF+3lJABL6soPnwREAFWCkWKiN2QAW21QsoKSJABZsW pZd/+kAFViWMDml0QAbZRWsLU0RABMlP2Lg8OkAG8xN00CO6QANLlxrH14VAA20v7QeekEAC mqUC/K8OQAZe/uTx2KpACj/BnxHoZUAIPdBvr5B+QAKP9VYwZAtAAwfjmrWBDkAI2o8lx4wg QAQc+6K509NAAlJnCGpYF0AEg6RKOKUcQASPOzPYOkBAB+PIRgKMskAFr9JQpmrTQAXUbM2X NwZABjR47pghnEAGnxSgCnvvQASE+HpYGphABZkDaR0Ef0AHVAcSXvgmQAbKoxSwuzBABuod lIaomkAGZoJQxvQhQATpTQUIa8NAB0NuYG10IEAFQrTGhHqyQAa3oWkjE2tAA52+jpiVL0AJ i3ZQpDnHQASfP5cjgQZABulpk7yYqUAH2nquB7EWQAalLubV0IJABf0+/5i+s0AFy1TfK37+ QAec4bUQpbpABjtvJ/asJEAG1S4cyhQwQAd9swcYuqJAArxBOcUxXEABspzOMbZiQAHaYac0 kVlABQtBdU8yEkAHY5LOe+EwQAWkL4x/eQRACC9IP5pKf0AExEo7dbV1QAZ7TE46BBVACAx9 3DXB2EAFzeIAfcVdQAkmq5p//39AB/f2b+7bYkAH3r+WhPUiQATm1nBHrWpAA2EvPUP/J0AF QW4Q3MjHQAY8JOHzdMBABOVbANTX2kAFDL0NwTdIQAZvumQgWtxABURGF4Ct5kAErx/oKVVM QASjDOf+gpFAB7hx/K97/kAG3oWT+ElFQAa/6vLLdFRAAx96VU1aUEAGlHxFLpqvQAY4xB5r 1txABT/Hdez83EAF+TqajBgKQARQwY64Y7ZACWKTTUdow0AFj1IStWTOQAdqDLJI6VJACDsg L+m9dkAInEvNseC2QAipKxVsULRABsFGZiKMNUAII2JnGQKyQAhCqJ8Ve8FABJQf6yDt5kAG ISPjDy5uQAdNL2iXJuxABV4KZo1NQEAE5/h0ndxaQAeJXWQJnsBACRbHo5JtA0ANGqRU2CBw QAev6yOzoyBABMO744s50kADcC7yA1joQAwX+99auURADDcrYBsK+kAKR4yqXVyyQAi3ISD1 J7JAB8JDSJ0nFkAIO+a6PXzRQAYQoVWvEGZABeXv6LCo0EAGzS2THjnBQAguGHtXohhABt1I K9RVyEAHMmMcjXE+QAWP8z/mOndABevuERY3AEAFmZyLjSv3QAUVw0A1rRJABcIHB7ua3kAE /oPGc214QAPTST5ltNRABVCl51j8nkAG0UkZub6tQAUjDNK4585ABDxWk2e9REAI/JVODBLI QAWLH4bDnZ5AAZbU4kY4oEAGd18N7pMQQAZeHkVeSNFABps9ZICfhEAGrEBRngAlQAY8Yz61 5mBABXfhASdVtEAG/FjxeLf4QAZxoAl3vHJABYMzJlNGGEAGI5fOrXTHQAjq3gWoFOJACCKu j1UPyEAGJ4i5iHkeQAZIKgH3jVZAB62afqqrQkAFf5Ly/ZDGQAd1e2sx6NtABvJf61/4MEAF m+0Bj8ZqQAZP9RLZb3VABgu+WpOOIEAGB1sGjjhcQAToL+slKvJABTihW/5PjkAGMSHplIUm QAbS7pWY20hACC0CnGx6qUAFiBSY3XJ1QAd62oSTP7xABgeUAMlchEAE5RwUeHNgQAXxaUc4 80ZABIU+lKc2QEAFjlEw2eV7QAY3hrNw9HpABg1p6VfJ2UAH+kLZQ9ZOQAc++jgNYPxAB2Nk fxsAyEAF36+iStV4QAZ9QwSA4tlACUGAO/M5N0AEnenitQlyQAONseYxFgZACT2OFctltUAD rcFTIZaDQAZsjkmnD5BABjbhOqoTZ0AIYCCjZ30KQATZs999CI9ACCjx1Jg0dEAFjBBzVGeq QAYTD/lDTwFABR8IL5u/jEAG5ZlKl/PBQAoAHi4rtblAB9390GKQ4EAFBSFunnIHQAP1GTjw MahACdr+Hx0Gh0ALCFIbmLJeQATDoKJAJNhABPRDLkLZKUAHjXKA7u+AQAWuhXmrxgxABSvE OMSSkkAEIapXkM0PQAUsuHyJdi5ACEnzCj4i7EAHT/YxmUR5QAR8I4q0TYBAB4aFgMq+XkAG l//OEmFkQAXvGsQCXNtABqqUqIpXUkAG0pZHelAdQAXNT2yFVSxABbOtielt0EAEkN0CKj5+ QAc1dTXo7MBABmGi4lXptkAF0BNziTEmQAUAsJhLPfZABYieIXBp3EAG2QFnmrK6QAdG6TiX ccxABSxor/tM3EAHY1GsOz/CQAjmQ3ZmV5JABv9pqvM+VEAF3RQQqeTHQAZyRFhUKd5ABVta ZGi6KkAFZNGRW3R0QAbQRhHvJMBAAv1FeHwuOUAGQAWOr05rQARWSdCcvJZABZw0YaOtmEAG /Cz3IWC0QAb3Mq7l3T9ABva82bVsOkAC0R1Z7qxDQASkMa9ePOpAB0LK6qSEtkAEiievWAFQ QATzlY7y5lRABuP87xvuRkAEdke6Iu+0QAWgklIiRXJABjEhX13JiUAGVAE+oLe6QAbKNB37 kHRABb3fRTkpL0AE73GtUZjpQAeea9SDZXNAA4RKpw7bkkAEO0OdJkKzQAUgYVpGrxdABZlr 6LllVUAF0i0Zqb1UQAU9mjlKVMdABP5vI6P+eEAJSYNMEB8YQAVXXkof8DJAA4M8kE1d/kAI uwMMFwElQAgro6wtL55ABt3s7Y8MiEAGuXu8Mz1uQAWpUje9JARABzX48KVsn0AHoz/m5FGR QAgeZlyfA1pACWtMXGvyA0AGSGATurSKQAk1x34lJztABVsA/2NMSUAIm5JTHFTqQARGunLw VuZABdKuKCFUKkAF+a6Gw5nkQAoiIBPRmTZABkZ76cLukUAGZc2qvADQQAT4RGeyioxABiiu Oce1xkAHeianfyhAQAaUTKew62xABTgULYWRZEAGG5RA2t2FQAfIFjFzFyVABW++Pe7ZBEAG DLkCfznEQASPwVTSBOxABckWHr+c/kAE6/q7pSLUQAQJHyr9lq5AClelv0FSmEAGgOFFc0SX QAMZXoBkT49ABlptQDiLBUAG9I5FtGEsQAWUOq/+cU5AB/aHTU0/sEAI30tgDt+dQARM+JLl CrpABn/DL+JpwkAIAqb9Guq6QAcQTNEiuZJACO4W/3uv/kALsihfgMPLQAXgc98SxqpACW+7 2OK52EAE8hICDANSQAm+m+YLE05ACSEmLvE9HkAGsZvccVW+QAZMI3yJ5iBABaN5T0/fvkAF 1aDzAAp2QAQqJuRi3t1AAxtLFGlBR0AHw+IX1zF+QAWL/3ii0upABmAwr8zVyUAFq1VrO5RB QAX9SXmDV/hABfzzy0jQyEADdMkJhiYNQAopUIerr7ZACHb/z60KhEAFW6ofTWgQQAav6MZ6 yiBABFgQgGcirEAJeMJF4PphQAlm6ZyQevBABKf4WzeP0EAGtCuRi1KDQA4fesKGh/JACuIj vKkUA0AKoDZnXIR+QAQg5R32NBdABFpyNmL/KUADonlYyfISQAPfrNVP8nRAA37oUOpPqEAF 6y8Ow2ioQAcvisg/HmBACGSRx7npgUAJOaXoCDa/QAmVKcUbFnlABJgk+TPGHkAGSbrU1R0v QAe+txj8x51ACtZGDaedBUAEO++l/EkiQAo4H7lQT7hACAFjJViUMEAJTZSn1YsAQAMJv8aN xn5ACoNIE9BbPEAEU/y3nQWVQATom2Cwb+JABwpk2FCsjUAHDi2lJV76QAXBOgl+BW1ABXZ1 0OfGoEAGBzJXqSRPQAaWlPQAzQRABJQ+sjM2rkACxsHlatbYQAskSE8der5ABxZ8//XtrkAH QmGoWNzMQAq03wvX9w1ABRhEovBG/kAFZJIbU3J+QAZ7vp+IqLRABgMtKqdW/kAF+Rni9+Zq QASbfrQGuyZACe0ZfI7+jkAEPXBSWIrqQAmaGz/mAMBABBkB3V3dqEAHtx0TLiLrQAcGBF5y 9KxABpNvwiIP3EAFJMMmKoDgQATb5IRolC5ABmjv53QoJEAGfO1R79hcQAVcOdQwMGlABfjR 4/UJVEAFT6hJ8/HzQAbtiNBeCotABo2wq6qM1kAHh+PbUZ8OQAidHTxMIFVACCL4W/A5pEAG IWrU3LqmQARt8n0/ARdABk1Q0fuaBkAImY/PWQvdQAZEchX32YZABQOPfVfecEAGGBswf6WT QAW+2k/T+0RABffJvmB9U0ACjDR3+GqSQAmkL+QnQZZACcQehf2oYEAHw4rgrsLSQAdkLcvD J5ZABuwC5IgLIEAFaKxF+jMZQAeqmL46wLRABkdp0T6WwkAFlJxNU1FOQAj+ec+lH7RACIJt f/Cs70AG4c2Yp4CiQAYmLZ7z2zhABP/dGxDzsEAFRMGDXJmLQAW/KWz3UghABaOjIFUls0AI K00D2kQFQASJ5rTL5CxABD7qr2ZOhkAC3aG3YVNpQAdXKXOQIvlABjTMbL4R/kAF3jymBoTM QAgOlWcNtAJAB9XSBFc85kAFunsnL/XeQAWlrpy/sc9ABgaIR3Zc0EAGdALeTQ/UQAjW3Iaq lzpABQx8+XdnI0AFkr5Agl0LQASruICyJnpABys6oajVckAH3hx5ylIIQAVEkZKC/oJABqFX izbJ2kAFoCsYTFLcQAyAIXo6jmBAC2P5hTMX1kAFUSj6rQouQAjjeSnNytpAA+1H/s4eK0AE whk+ZOsBQAOAKNnk4IZAB5sEEOM7GEAFQR8ZUsQcQAVjGnEdOThABwg22RZQ0EAFCC3/wW6v QAWxDTdjmz5ABhSXfmJLPEADc4+w7yEYQAQOlHBmUdlAB3JJIHtOjkADjvOBxhiYQAmpU/rY vexABpghGZDjRkAGnBHLnpFuQAalEjqp9z5ABb9ppth1DUAHuN8AUovpQAVgC4VD8gpABq6Q 0LRn+EAGhcQABmxWQAYTJAIpZ/RABlQ0HRiiREAIUha9P9jxQASx565yBrJABbYjvaeMZkAJ z2Jdl8DCQASDJxi0tnxACNj4kMmbukAGtfBaiUFZQAS/e9aY9FZAA6Z3c1f930AKlk9i/W5y QApxMt3tPE5AByFkL3fqukAFc8NtTOkyQAUYM2sPPqxABSNTDnGP90ADn5Rjy6yRQAVtjcAZ m2BAB0Jb0I4L9kAEk63thc0VQAVALgijjn1AB/6XhLd/vEAF+KIU1uH8QAbqv4rvXOZAB3To yog0PUAFtZreKZT0QAZ2qv1yoaVABjcPliIyJkAIdHmBbQLsQAVEaJCGpPJAB0mWuAdkDUAI WroIUY1lQAqqZNh6UU1ABYHc37E0bUAIKvJU8N5sQAWzLH6i98pABcALX9cPRUAIZK6Y9QRi QASQHkQesUlACBOFsDkDbEAFQ8m5yKewQAmz0ilNMX5ACI6lTXvi2EAGaK8DEoQiQAagBbLW nz9ABjhi8wOsUEAEIQbXosoVQATn7+vj/3NABs2s4wBgCkAG2H+SQtYlQAXU2BTZEGJABjuI pW0Z40AGzpww1ILgQAWi8qvaxlZAB8Q/KdsvCEAG18soZW85QAbG+1eo5uhABlhQEFUs70AH OklQuJWUQAq/qcBmNLZAC/t7EO62l0AE8BYj6KnwQAWokz0hJ+xABx4muGNXmkAH1g77HzQs QAbv/tPFc0dAA0ckfKunJkAEQzR5qD0yQATPKIuBmQJAB4IzCarA3UAFhEW6oGD6QAWyXoQj mtJABcipQgrtOUADQJdcUOk0QApf9+Yl6gpABUtyv48jykAFRAGzFAIlQAhPwycoxQNABhZF VlMJvEAIYz3urmHcQAgtOr+6uw5ACg1aS8HCcUAETsLSz0xfQAR3+p8pcjRABiSChgSHfkAE 6BGNwAXaQAVCfPRTCnZABSwR3HAoQUAHipBVOvPQQAW5C2AVcAhABhlGw3cuQUAF+fvJst7I QAaxehWP+P1AA6O6tG5KXEAF3LP3Y5G6QAirpiUUOaxABwPU7WDRG0AHQSCzHaWEQAVQAY33 i6BABZhDg7GFnEAJuS1OrWkCQAO1EWNZoWBABDaopFYWOkAJXkklMU8BQAd6H0IErntAA+RE m1pGuUAGjXPBaqJeQASQGCsKFExACYH/fkJdREAE5G38fJ00QAqMq9NiIMhAB9fwTKGi6kAG lu2qMAGiQAY1eGwPAd5ABmnnX/5AnkAF+1n9T8n+QAWG0VXHcfFABW0p6tRbGEAGH2KlklKS QAXjkOZ+o0xACGgKhhI3OkAFzZj/isAYQAhyDEqwI5xACBaG8xjl3EAD6iXzug9TQAftMnHu AxhABWLWdyVTZ0AHI0U9GPEWQAeXUGc8A8VACOh+JMDOqEAGiNLcGHO5QAWbVE8cCSFACxYG BfJhTEAKGxJxN8tqQAV4Lcb8J/RAB5rnj2oqxUAHDlfCfKBCQAa11pRIL95ABz2R20R43kAF 1E20KhsEQAJwQruP9iNAB0vKE+QjcEAL4TjYURKNQAVYV0ODNsxABRE2iWZe0EAHCPFV5zBS QAhk8L2+MJZACFW70tBfckAFfNyaIfScQAbeZkQm10RABtKR6QgfgEAG/XiNKqm4QAVWdHC6 F7dAB13CutLnN0AFQ3V4zxWCQAhGYbWxs99ABTfyRE+CkEADFY+a3woDQAzecKmU3OxABOXk VYMlMEAE3Z35UWsAQAfj2kWZFcZABNyY85E31UAE3TC5v8wGQATmotgQNTpACNthYSwnIkAF D9I/YBfMQAemTEckV8ZABVOYfaD/TkAFJn2uTZVhQAUNYVSuIV9ACS4nK/xiXUAHoMevuhoU QAZFSUyfVgZABgmUrfgIAEAEw32gZuGoQAgRxb+USoFACX1Z3QKm5UAFqxHjco56QAPWZp45 JVBABJyawG9vnEAE1ZICGWjvQAjzIRJ7S5BAB5MY3geNfkAFDqHyPR0gQAW6wQP/jMNAA0Hg +VYNmkAD6GTK03FQQASKvyY1SAhABmTBESlV9EAGmtTdQnv2QAixetrMbohACLF62sxuiEAF aFDXx95qQAev3bLEmTpABOosKSJM30AF9Te9WwcaQAWsrq+MENRACPbRaYrjY0AFcRZ9SWOc QAfG/Bt6XA1ACLHZBFjvqkAD2STCYnGoQAjNF5qgTxxAC8jeroezQkAG9qVZrl9PQAej70E9 RipABy8gDVClCEALQqwX9HNrQAS3KiCwJmNAB9ek+ihi80ADxCr1tSgqQAWfKnHkG8ZABhm6 8dmycEAGinpKOY+cQAlV2DGz4phABVvJOH/u0kAEWJ88hTffQA1lLUVuPZFAC4jWh8bc/EAE BY4GjsVhQALP/Rq/eDNAC1LsaOGATEAJMfKlPbnQQAVQSS80Us1AA7urOScLeEAHHgZ3kCFk QAdZH6AxJRJAB062K6IhN0AFJJ5ISbtcQAS6LsEMCihACDCpI5+sSkAFQ60ibhEkQAjN7QDF xn5ACmh7c/sdFEACvzbLDfd8QAYE6oz+/0lABq05vnCqj0AJ5vecRDjCQAah9WSpGvhABAmK G+/aDEAJIvsm1qloQAedy+pdRV5ABHWmCZguLEAHxc2TvsP8QAWqUzZo6oFABgY/87YNFEAH xid3XiT4QAn5q9WjscVABnr2qhQbc0AFP/kN2m5KQAULj/GmckBACVGw38mORkAEWwjtYkmq QAYG5T+HQnJACEIcN/46GkAK+yF3V+/UQARZqbxMRLBABa59hpYVa0AIDMKPQfs3QAf/AE1E dEZABG+tAWTmxEAI3uGmtQvyQAicRCBCUzpABGwTkMWOjEAFWkQnmh7mQAPv8AL/0iZACLuw tf60ZEAFWI8GALHSQAcdTqyEPkpABk0VzqqrkkAFTuCeRfteQAbGD/f6f9pABT3Gcdy8/kAF lSKyPFySQAQYGNARwdFACK2a2BZAykAIDwdQ30thQASWrnty+oFACA7hhKhB8kAEi36mFJqq QAZSQbcKzbFABf4UaNpYPEAFA6TYnhpAQAbgUK/rJjpABeJ1g+QuEkAFhDTg464EQAV15Dai WDZABbycnMritEAEYSuo9Dn8QAi7H1AodvZACQheokaJpkAE1H0G4eieQASrtj1LcfpABWkp 9dIBfkAHEzcrn+gCQAczq9mjZndAAptB6xItWUAFL+YQjhEwQAUd2KWkLnZACCYWML3NbkAH JJyL3uNsQAVkzoyJA2JABPvd8yVknEALMyWMh1QkQAVtN5SH9HNAB8/z+nGLoEAEVvPCEm+k QAf2xx/xhPZACsrKLGYAsEADf1PC+3UcQATLgarCXLRABJ1JzEApz0ACOze777YAQAPkvM+O SE1ACu5qQzZB50AElllr+gGeQAhJ1pMkqiZAB5zvordxxkAEk6yvtASAQAY78Jd9Zn9ACFji xN7DfkAHoc3QDD0rQAfq4lf6uzJACAiuMM8jYUAFctVoxqW+QAch2YoQMeRABgYO/sKzvkAE 3+KoXTwMQAXQrJsQg39ABlbSWPyzUEAIiCCG8oPYQALpqxgQULBAAyUgh6Dz6kAEG9duOYPc QAd3F3SW1KFACAXVJ/X7WUAFmPX65PJcQAgMsldTQ59ADBmm6q07LUAC/sErVjdSQAN9dReh q25ABn9jhh09GEAFYcEpL3yEQAOmM7sp+MBABzzZnDDamkAIpD2AISm0QAOodqU0e/1ACO6G dVZ6skAJPVBipBBMQAezfVbICpFAB0pzftUKqkAFqYIOWQpKQAVSZo7qMrJABSk4E8J3u0AF mU6AUl32QATN/Dq3wORABI6qBfTqH0AJyNiQ0i3vQAXDva4WM71ABMC0slbdqkAFID5mBN92 QAewFeDUcV1ABupd4AzcXkAFWtFWDz9oQAUiGzc8+VdABHbqdFlUpkAGF+jKnBPSQAPNp6vz z+5ABqck5/m8bEAHtXJjzNlIQAYp/LpWC8ZABQmyt5sY1kAHOWxryV/IQAhHBUWi98xAB5XZ PtVobkAJBq8dgpn6QAlPtPVJ7oFAAkau+p09n0AOJPHuoUFLQA4v+AjXxd9AAos7wLna5EAC IGixsZcoQAJ/EJ2vpHhADPSgsgNkZkAKQbgiKl9dQAP7uI6QgIJABW68Xo5XMEAGIrs3+BQ/ QAcOOV7/ti5AA3UCQ00GPEAEaoeRlROwQAX0G/miNEZACAPws95+w0ADa+Rz6IfGQAMGfkak fYhACODd440quUAIkmBUNsVKQAPsn1FlPsBAAre6um8PkEADzEpVDMz8QAcdM3HBItpABD/u AIOwwUAGdpYYA2biQAZX8oj66LhABCJEzRtbfEADjTta3lgAQAShtrmCGdNABnw6y+p0rEAG L6Hy/Wm7QATe7KKBaqxABdJ8nuzZLkAFrKDbiZdOQAWi9YonWCFACl1t+VltN0ADYVp+Rc2E QAhi8phwgXpABcO+d9xfrUAERxH5EjpKQAWzZX7gxi5ABTuFzWhWu0AFPg6TC6Q0QAYnPPXN KvxABJPPsHIstEAE+hI8ZdlIQAW7qyRqCi5ABh2GmblFCkAFTw3G8wIGQAUBimNC9OdABCwW 9VPEXEAF1vmV9NWWQANkdth6pfNABLCXVk38/0AGu4z7G0KuQAO1bPUs3MRAA6tdLXdkQkAH 3zK1qNQ0QANO75Fnv65AA0xyZObnBkAFPA1tpA7ZQAXXhm56g31ABdTnfKTuLEAGE1oj0JGy QAQLJ/lvd4xAAySNDcuE3UAEMV3QkaGOQARi7s11X01AA9AslaOHRkAELj9atlqOQAZKXqTe /9ZABJqGFvL8J0AHmUcVEpLeQAaHatWW+eRABEH9WUULEkAEyPhk/YYsQAbs+Aeu8ilABBiw nWcsbEAHBfeynmicQAYCpTMfShxABYQtiSIbvEAFeS5gF3PyQAIwvDuJ7HpAAe20YWPfnUAD am2iCb89QAckP8LtnblABnQvn5lnUEADnNS3571IQAKw5RyTAGxAAhYv3R62Y0AG54M+l7cJ QAMOI/4fEpVABZSvF0RSuEAGFBp2S0O+QAWKmA9Z+6JABvCM5DQidkAECXFtWxCMQAUyFBrF fdxABJf4K005s0AGQwQqd7TYQAOpu9b0ddJABQaAR3NeBUAFQueXCiUWQAUAPUkP3x5ABmme 0XFg/0AFbhtHqO0MQAX6rZGeV5FABQbBtD/2hEAFLK6QPT9UQAXAt0sAuvJACGvCjoiF8EAI efmodOLFQAZHtMqwRMNACWn+9LvN1kAFU1dCtAe+QAiO7CbqZy1ABICME1cvFUADSsLgN2KG QAQ2IP/PgLFABZeeZfekm0AGKV8H6kMtQAS7fimRtOhABihIQHohlEAFAG6APdWOQARJ6aEb WC9ABBIg7lKe3EAFou7GTaztQAUK3VvYLvRACRYc82euOkAH9NmC7+14QAckXQTIm65ABDVq rrYMJ0AHKprMkxekQAUTWkiZOkBAB/wLH1zsDkAGPLFzPTnmQAXpH2s7atlABMtUn8C66UAE nk8qu7JHQAXVXopunUZABvBRSdHOXUAG7xrifaZBQAN5tc5hL5hAB9nm18cGjEACLiT91/hu QAjccRsKVcVABLJG+QcdhEAFqn7sY/0AQAPdXg+BAiRABJfIYPm45EAGyNQVtZlUQAWZQ2hW kYVAB8mrteSIQUAFCOkN3FLKQAM6v2SU8ddAA+Xdcnll4EAFCsrbxnkaQATVTe84K3pABUo2 BBCOtEAFABOR+NgOQATC6yjHEJZABmOPpPhi4EAERCkepZQ+QAY3k2cbjrdAAvvNDKtDpkAD HwvKKYh/QAHcjvXTtAtABgGcv/fUlkAIvxb/BYICQAcIgPksS5BAAdl4KFEYb0ACVG0LtnEg QAcSAzL+vVJABDxYSqtNbEABzJGwJPimQAQdS/dXKhZABBWrvhzadEAF4xUQBpJCQAVy1EBQ mndABbykD76LAUAE2uC/0R8eQAcBxGNUF1lABBteH+8R70AEoM88UlJOQAbvME7+wzlABTH7 L+nbSkAGScqFR8MkQAW2Jm56uzNABAKEi4gPi0AGU/vuTF6uQAQjsJGC521ABlRVbNpqp0AC IQbUcyB0QAooKVyI0MhABBt0KJVxg0AGPt/sVIFdQAbHHGw+kvhABYF6HZCbekAFG5FGorpM QASm1WC+mVZAB8Fsye2A1kAEyIbFnIR2QAWx7Ki1+k5ABmLK+zzNiEACEZjSNEu2QAEXgo/g rRVAAVdOSp8rRkAE2WkyO0siQAYxOS4g/fZABPp+A0PFmkAHMaTAoHk8QAQ8DFROEJZABPlJ 0iTjXEAIMszXJvU6QAVYExL7F6FAB6jJqsLpMkAGamYQN/DSQAea7H/ikAlABHIWueLOcEAC f4DYRPqPQAPqZVpJSJxABa/JhkIh4kADpId2mA14QAO2xhNZ8GRABNZvejsQFEADCwSHEYw7 QAMgxcr1OitAA/1kYS7ywkAHNtLxcp7AQAbUjML95FhABsekn6kAPEACG3Z3QM+AQAVtA/As vTpABYdFKXtrSkAEePdZTabnQAZDJ0oXEC1AAxz8PoaO10AKHBd8kR4wQARS8c/vrhpAB9G3 wCA0ikAIDwCM381eQAivd8FNxN5ACSZT6EgAfUAE2m98efJMQAcFQo8DZ+BACANf2Vnh9kAE adUERZKyQAYswNoMNTJABRVRqSXhokAEEdniZA28QASTh3zFCzNABj155d3KvEAHtc4dNlXw QAxUclKm7yFABi/g0/HFyEAElWLyLsrKQALd7yFPbkdACzXDGWRNiEALlPOvXyLuQAm/ykAL BupACXf+Qkb/KkAHlygq8CuQQAiZZncic6lABFHroSJqbkAFGPxch1LWQAY+9S/aid1AB1Ye Tg3FBEAFmd+S2lu2QATzdlLhVytABLBExqcX4EAFG75bj3wWQAQZwokYDnJAA7hMr78AiEAF SWm4zCIWQASwJFdmVJhAA79v0EKb8UAGEC9bV9sWQAYZ7BXZrRJABFmN9A/cqEADyJ02IJvV QAgRi4SRWFZABGcR/4DNiEABF/18Ea1yQAbaaPIP9LBABoDhxI4CCUAF3Nj9wuqqQAYbk8Br w2xABVZjfknlfEAFEcCMianwQAT/WCbX//FABtEm8Y+9QkAFVxAS1fAkQAVQWhakqvRACYag WqGtT0AHdwgyzqmEQAWKKl/2B8RABTRM10XQzEAF+luTXR+eQAV11Zmxg6tABt3wpAOmXkAF x0SL3EtMQAT9WW/6rEFABllcZIxjBUAGNy4CVmNwQAYm7OIeqgVABQdExvQw3EADkd72vI/v QAYrSlRWghNABTp8kSGL4EAHZSliIXpzQARz/NU9pedABycQEn5Kd0AE9L+lybzSQATlytIP F1ZABjM137LXikADusKTsj0qQATdPams3ytABYSQXGNAbEAFf7C8EbTiQAbvfjbAw3JABmRe Rfr4zkAF+XEnyS4+QAYw8fDnropABGpK7vfHkkAGf82setG3QAP3Zw/49N5AAkayfEedMEAI nPjeAogrQALfFe38DIJABcLLd6RR8EAE06Ojkr/SQAcAJFaOdmVABGQstzuaikAGvRQkW/uG QASrsOBj2FdABVOYOiLj80AD8e2pCVH4QAZg6Zyv7ixACfpXNfG+2kAHxnox72D0QANrE7Dh DNRAAxbzZbSnJEAIld5cYxdlQAm2+taac7VABLeinBucyEAERyj4ZCjUQAb3zFxu4dBABdq7 cAveTEAFGCnM4iNSQASJSRQnByJABKJfUqvP3kAHSTgq+dWKQAbKdgntMaZABBtMC+i18kAH X+wYq72sQAbv+fTwulZABG4aX4Jj/EAFnSDDz73uQAYrFVKfK5pABOxWwTGMPkAERaXbmw8s QANq4FtFseZABjbpHQZ6uEAFxy7V7ot3QAUDWfFRzBhABAhc0rmM8UAEcPeXW3RZQAVYWTzs ZrJAB0vyqxV61EAE+923Q8B+QAXl6PKL85xACHFFiGBynEAGk17zw3OxQAXFz4IXsBdABjwt uVUp/EAGLA0nCjQnQATdVBULUjxABXpSIb5Z+EABrTn+WKdSQAQHddxUyNhAAizUSCeXTUAD 9gYgEYMiQAboIM3NftVABl162BVFEEAGf7xN8n/QQAIk12YrsipABPrMsQyU/0AHbpHnoAuM QAL+Md5LV+5AA9pAhLgiRUAGcgQ7EPygQARdBWhnl4JABYMfCBE16kAE04jLzKFmQAU3e6/B oN1ABVDPdvM0nkAFqIZ4S3dKQAUsOTLbSy5ABimgq5GPSkADdqa/by0gQAPPfzFRjK1ABHpm Drg7bkAE9pyAo8x/QATNcYDrctZABCwATeW7xUADq6zAEx7eQAjTxgMXkF1ABEupzU4iIUAC m1/fc7ruQAfEwFChugtACArblqIKikAFXZ0g6GwyQAU7NiBGYRtABLDwKDBdSkAFvgBgA51M QAUxpUmoTOlAB+WNspUKckAIgXnhFBDSQAT3B8+gyMZAB/J5U+JcGEAE9r5pzYOiQAb+8MoI zphABAPxoHIakUAFdaCQRA8LQAXn28ztvMxACa5nD+jwi0AGdyHT/EwhQAYtJhmTq6pABK0D 0j2D/EAFo8sDXb65QAgdA7ejSQpABsik6BgsPEADZzmEfh8MQARuExekGFJAB3PcP0gXd0AE h7PNFxtaQAfpvZMzSVpABYe+dBArDUAGmRhQJDtCQAK38AWdFkpAA/GQmefabkAJjjMdKmKn QAW/0Ci90xhAAmtpjrEKJ0AFA3T1ZfnrQAanpLp7aK5ABGxgYnxay0AHHHkv6V62QAhsuPfS TKBAA2eJnC50UkAGBuuEaa6sQAdZfj5zH8xABrzE7YUYxkAHHSsBUzNeQAoFNXSSK+9ABXyU SmLn60AH/ekwnJoGQARfNcO2ztNACMtWLmaaSEAIPr6VUAuYQAV4SOC850JABOEHUk8QZUAF TsiA1ZXwQAWn0vOH/JpABDc5is+ackADS2bRGx1kQAWmGjRIatFAAytZ5cDkLkAE99iivE1e QAP2V19XWZpABgFNBSqmj0AGKmivp/SBQAMaWOhnaHpACZX0bpiwrkAIf9XQ5CIUQAPjKqOm 2W5ABk8uCV/wwEAEYh8XTCVUQAjYE/PVzKdACLNNHHTie0ADTnd44V3tQAZ0FsgFT1pACtYo G22bMkAIty6SHxwoQAjzNkv4DddABCeSpWt7gkAEwqsga9VSQAMpPgW1f65AAygVhZofMEAC 3MBFk1r2QAUGkZirxjxABuKbayCU9EAGRC5vWZ2yQAeHZsLHlc5AB7h4aLZvAkAEnBCpcszL QATkEaDLVi1ABp8HmCna/0AJfzoOLRHKQAP5YWP+505ACRttRuU5fEAGtxp7PGf2QAbMJHte EgBAAswShEoll0AI9/gh1IOyQAMac2jGalBABE/xmjAxGUAFiHNF1Rl0QAazZlByKUVABPQl +a5bHkAErriAyGCeQAXDkeO6FV5ABVm775jKYkAEK+CEmEePQAKzWXyFav1ACSvrz9kbZUAF /8bVx0zWQAWfnUU4xexACUNUD0YtqUAEtfZwgCtiQARypIycfe5ABX2MdXEjDEAFbyTB6be/ QAXGyEWEJKZABDOZCIu8TkAIdxjQ2721QAOg2GMD6tJACSDUq0iOR0ADsNwJeWysQAVV2W4a 22BABhOJuCjPAEAFHi2xYP8mQAQNHavEJ75AA6TQXFQDVkAFj2CexwdwQAWWOl/5NK9ABOeI AZolxEAFnLSNewgWQAQciC9etipAB2hrmMZeg0AEr5/MFgnkQAaYlFLPubhABlkBac8nr0AF 2XBxGMd2QAUeombVvpBABAhFEjpowkAFINtDD9ZSQAbCyS/g+1hABXurQc+1xkAEorzlo+/u QAZBJbBlV21AA60J99jJcEAF6Qgfesc+QAKA+U52YdxACApc1b7SWkAKi+lJvQRDQAYj7X8g /XhABbV1CqbXTkAGFY1pujsNQAU2LoSXeiRABp7ohvnKw0AGhdIibqJEQAQhKfCRHipAB+Ny IZcN2EAHcSyT0RlkQAYTWOLUOsZABI4Woh+9kkAFKS1YIfhyQAVGE6lxNJ5ABYxbZY0qYkAF muqP9m6KQAcSVMwIxe5ABEWZq4+SGUAEIxvlu9J+QAJy4sZoyRxABlJ86A5UykAFS5YeiM7+ QAUdpPAyOhhACOnoGkXIhUAIwx6WaXLPQAWzSYdcwJlABTwLUiLXpUAFH1/y7qRCQAXxcEQ9 055ACC45yRME2UAER31AaduJQAUzaEw+i4ZABImAdI1qi0AIAwjba5M6QAirrjeF+DpAA1gz mV7HhEAHIrPUGgpmQAYZzpA181xAC7u4A+9TPEAKnPofEGdpQASNadHm7NhACG5VEvh69EAD yqfPQhxoQAUMFvAwraJAAli8f7K660AGZPS+W5NOQARd1FOk/upABEv5FQdSVUAGAPEi7jp0 QARdUXU900xABJc1CcgZwkAFIAFrWvccQAJI9WfSxOhAAnfTVBHfWEAIp/A4hUIbQAK3JYnW Mn9ACQu+j6Bz0UAGHBFRZpQyQAWO3GtHVaBABgvggp1PHEAFNn3cya1OQAa+KPv3eOhABDyV YccP1EAFd8NBFDl8QAVK+gkXZyNABdJB/oepxkAFC/xbgrDUQAf5J+dyF2hAA4kUu2hCqUAF 6ov/GHhUQAiglcyA17dAA6JEwrJ1dkAIPQxdxGSeQAXbWngh9tdABAdNKkgMQEACzaeAaQ6R QApsNZj+5+dACdvAYnBOvEAGJOar8zVUQANkAOB8cZ5ABA+tq+ZYukAEv0ONP84vQAOFD67s dJJABBD5bebsVEAGfQpJ1PjmQAOPr88vaMNABOpKHEWSsUAGfQ4uTkboQAVRTwwicwBABoiQ nwen9UAGzgUozqVZQAW+cqqfB8xABEdPUQjD10AGN4HUj7WWQAiLgZ6RYYpABDCdEKeAeEAE uV65G9NaQAX2cwJAy6BACBLWBMmtwUAFFiaUESRcQAbZ50+B3/ZABVgOVGcRwEAESDddmP4O QAaG086vfOtAA/R5VY+dqkAHF9AoASIeQATNzUmft7pACE4D5kw5T0AJEOLysT3WQAX+KmYX +MhABAMeZM7JjEAGds77q8vkQAK9x68MUeJAA68z2H/l70AG+M316DahQAc1wnsKaKxABR2J D0oDAkAEQ6RkD9DYQAadWPaKH8RABGCiR1c8aUAGzA0N2hnaQAXO8R9IdchABmdpKSvoKkAG PXYxbVhSQAVnviBHY8hACTGdPua4xkALApBPAZNjQARNz9YwWcBABVcpP76I+EAHuGYq8Bkg QAhpuKjxk/ZABUjVD1z/gkACR8clwDCtQAOIoPTal9FAA7801qCqIkAHK+OEhyazQAS4M1PU A7RABLBlbq4o40AEzvsjO+0CQAK/SBLiZQpACD0B6xDoUUAExI9MNS80QAS9njK7uGhABu6V 3IWZYEAF2FLLFarYQAckCfXHt2FAB3qBsZ/g3kAJpgvmnht8QANXqctkFpJAA1pGJ2z7pkAF ZPZC99KgQAUO8tfvgUBABooU0uwlkkAF1NCVW6NiQAVqV6QC52RABcB41sHAh0AFx/F4VZza QASbzl5t2i5ABcQ/MkpcV0ABtfdqauLAQAQTjOqVM4lACPzMgHKfKkAFxGM3BvqeQAXfNue+ 2hJABNa7ZDJOVEAEqaydOB63QAljOB7geTdAAsQO+vDgHkADX6qoHxFwQAoQc1HTChFABtyF FmAafkAEWfK5C4aiQAUi7K5TdUhAAyArplQnKEAKrvemChrzQAONKT0FZjBAC3IpCP5xOEAH 3FfXbTYFQAUOsAhBliVABgNxY2CLwkAFR4soi7MrQASKjtEch45ABKHQqVhm/kAFCiteCEoU QAWsYIP7L6ZABXHMDhfjgEAG38iGZ+rAQAPVumvOtQpACM+G4irZWEAGBr29iCCKQAPBJ9ug VJRABukrFAPk1EAFYGS1M/fyQAXCG6r7/VJABlAOqCPST0AITP4//CDIQAamhffOygJABSla cbTbwkAKe65WvmzjQAlWFIEjW2xABJH6gq47WkAHAAWmtLqBQAZP5DVVuspABlcvsIMPGEAG v1hL/JaTQAXve4JHdxxAAdMp3ly8MEAH2jtpJxnaQAmd5o5sDpJABYH/L9W/ZEAFKHZW/L0A QAgnS7P64ppACsH8g7TwBEAIDuMwH0cqQAUuIVCiHzhABqbImy2xmEAGvNMH3rM2QAVcaioc fMxABC65qj1tZ0AHrr18rtL5QAOmyyoNe7ZABsduEdQQvEAEEDKZHKsAQAJOfftJ5R9AC1Kc 2SrLOEADuI/APor0QAPTv+G/XkBAB0NVSYASKEAE5kuLlpFJQAQpv1SINf1AA+EcxZrSLkAG pPXztPuPQAWBkoUUZjxABYGlZshHYkADgwjcI+M+QANJF5FOCPxAA3CaASekTUAJ5aHtmV8s QAc8kQ1dVqhABQuBsndF20AFe8iIXxN4QAR1KJP3iyhABip+WsTxQ0AIE1wE34uUQAVcXWYw mP5AAuiTe1ryYEAETnAVziPkQASAQLEGmS5ACQGz95AU40AHG2PDxn4EQAP7vNlPvbhABOfL 0JHtrUADVkuexbX8QANa2RX82VxAA2cfClJpnEAFQpNadVygQAbrJIY/MHVAB8HBsA1ViEAH wcGwDVWIQATUj5j6bCFAB4MAjJj0pkAD5fBoMz3lQAT0wnvj4JBABJEkwvjfkEAJXJUcio07 QAPjdAzscMVABzeZ77ENC0AJcOshsFPDQAJpauk0lmZACOVavKTRLkALGzBVvn8DQAbusyf2 YZRABt7R/Kz9rEAGvjJJ29/iQAo0sS3manlABD64odc7/kAGBz1Mm92cQAOX/Ry22QlABEri bnrAzkAE3soMF8COQAYlRgQKBoVACEV9Wsse+EAEvO6GjuOsQASDdmzWMYxAC+d4SfpECEAK 65/AnR4CQAIeLBAS7cxAAdwtnTjOdEAMm14djnlxQAi7HG4bar5ABJoh0rjWMkACsd4+36EJ QAXUSpoiPiRABugv7nznHkAHIa5sGwHMQASUrDv2nQBAA7WkG8jNkkAHsiU2a6tAQAT9kcPZ E45AB7pUaIe8BkAJ0ilT2PZMQAGdFkRZOERABaRfn8z8zkAFNHLH3tNKQAbyBiqKm8ZABNOI dsZn2EADZkTLa8M0QAlJdTU20U9ABzUDOS4q8UADq6yyszqsQAddUTl404RABNqmVa0RYUAG DejCNjQoQAa9eUHSd1ZACDWZDpRXBkAFrpx60BoCQAQkHOTRWGpAA1fPcHfr7UAJN2keFyXf QAM7aXqwu/ZABYAa+295vkAIUjq6FfQAQA1s3tfF7B1AA/ZofZ2QdEAEwy+KHOsxQAgQZOrc khZAB69POfHRZEADRhdTnongQAiEyK119Z5ACGRp/bAcakAC18ILYT8CQARd6ISWNMdAA3ut Z+FW5kAIRpaJoUaAQAP+0PhClepABwQ3nrmZskAFlNLSSACcQAS8RQE14jZABfDMsOp5QkAF YKDQoi0IQAT7G1o/ufZAA5EOBiR4ykAHkfcAUgGgQAcO4Zr53eRABCZUW0NadEAHSVJ2HrSO QAQRexfXDnRABQuLvNl+FEAE4K3zrZf8QAQAFqy+me5ABuXD/EsK8kAEQnguoO2LQAQBM8nn yQ1ABLU1Vf3wHUAFbBWO5TuMQAQAPds0zo1ABzjUaOV/1EAHW7tL/924QASUwYgxNFNABEB0 t8JDqkAD8+ee98ukQAdbw4BCb+RABeiXXHstXEACG7YAGMi2QATmy9Y2tiJAA/MKO9plmkAH kkOmpCiBQAZm21a35YhABIWMd9z13kAEwvd1DaN0QAnv0oi3wVZABQCD+yX1q0AGesJWX0yc QAQ9ZeVQaU5ABij3zVWYbkAIxzOI6YpXQAMBjxBDt+hABYVwHmUiCkADT0XPoa+dQAFkSMGL GX1AAyotDAdcu0AKT0iHqoaUQAQXCncdo4dABh/A0j9SHkAHEWFic8rkQAQG+MoyUKhABVUG TcMXKkAHoAWFXFPMQAbJKgyjiLhABq/yft076UAHkiUFKdvsQARbAj4vNDJABZvigf4590AF mtpxg6yoQARsoO+sBzpAAtwJ73VpckACXyukKm9KQAEkFt+xitxABXm2mr9NQ0AFGy/rqKX3 QAQZ+f1hErFAAhDVa27b00ABWuCNV/FrQAMlvXHRtGlAAcJv5bTPTEAAjaiZ96XaQAWghrQV hCVABPbGzWfKBEACSaVULoDCQALt96VsCVxABIvZ3phcKkABvdHrGcRAQAEZmAS6ZaFABNVW ty3mZUACHA52GeVJQAG6zDgbPOlAAn3FaXE0fkAC+O8QnthcQAM4SG05BaJAAyByD+oDdEAD ZCqWTCwAQALLstCZ3/pABE+UZlKg9EAElxiGTV+fQADyPRs6T3NAAsKXc//vzUADZimX4ED2 QAMYAcZWbypAAZQM/wJbrkACPNQ9vKmOQALsevhn+kBAAxaqYi8E1kAD0cWSeWddQAKOMX7f 1P9ABSop62Qd0UADZg8Wg+6oQANlWZj/rPZAAlj4eyflnUADST3nRyLyQAJW98wpQzFAAcqB Il678kABkOj37VBgQAFmz8eBN5pAAQXIO6VZ1kAISZVEb6UwQAACy8M4Zxc//7a0kK3I10AH YP7mTe7SQAgecln2yZBABxupMTTtWUAAFooDtlQzQACIjAXCVzNABCdj2WXZgkAC7AnQc7vi QAKF0JebPUJAAkNTA9HR+0AEtNeNZR5XQAOySBmIn8JAAoa/WRERkUABXkyUK5PAQATG6xOY i8xABUGsP5Z4eUAA/aMhLlc6QAEsGpwKTbRABCoBEHJfUkAFuuUmCa/9QARWHm9fzFlAAmSE lp1lWUAEYiTTeDuqQAIvP/0CuR5AAkFGh0uDOkAD+85duQX7QAS9v2jfbz9AA4TTph3sUEAC Qfp6cE74QAJnrspi/EpAA3HwYiV0iEACthOPVIDuQAMEkSQNRvxAAv9xI5tdcUAAZiMy6/xv QAUXhhMI+gFAAT440l6v7EACpBKkaPTcQAQ2pKnJC4JAA3MkYpsyckAFNcEAqB3wQAMF0PB3 4GBAAoJsXtUuKUADjnpefSYYQAM/NCD5hkpAAsQs1D9NmEAClJCTzRq9QALzalscaW9AAzyQ ZLV+UkAD88ORzLpiQAKm0ldAgmRABMCR/44OsEADqF9pmbbGQAJdYrG37pxABIqjTMfsN0AE hMNatP2gQAF06C+OTLdABbV5sMdymEAFYz16OqadQAOA4JUOYABAAsHVediwFEACrhpg0HjG QAKD2DH6wSdABM+c0JSS2kAFIpwQpiBuQAPqD8DoNVpABKlt+CbGS0AErNh5U6T9QAQIjSx6 KCxAAkfqJWuq00ADu4CUgubiQAJDHVP2AupAAng3GGIM+EAEIKp+SIIWQAOfmsj716lAAekC WWR4NEAEIIf2VfeqQAHcI+n2KQ9AAnT0r4VJx0ACzHkww+xCQALtHD2TkepABlQuBZS0o0AH vLo4q9+dQATZ5udm0vdAAkhS83PlTEACa7o2KHfjQAWRhXQXX/VABdc2e7c6REAGnkPCQjei QAH+z25BK7ZABcX4ZXzGCkAC56kpJPYoQAJ1mzBm00JAAuxxXYtsDkACuD/lhYa1QAR8zhbc JzFAA16jJ3iRkEADk2bRrLJuQAJn60vFmD5ABK9ICxoi5kADv6vwrYgZQAP8iPQjLkhAA2ke 8bGCt0ACRw3VGd5AQAOH2Aa6XnxAAr3soqVSrEADu2fsSba0QAOix5uXbtlAAv274V76akAB Lqd0JQyeQAGrzTJIEMZAA7NzwrWV9EAAyAokyuXKQANBD3pgW7ZAAU3936XZ8kAEQfdyU30s QAT2OtRAqBlABBjQiL4T2EAC0F6jcTGPQAJd/LK2NO9ABEnnVw5pM0ACWvbaFi8MQAP7s/is kIZABCdujUkW+kAENa/cZEtOQAOIS3J5zOBAA1dSGenwKUABZlaDIA4jQAFkYRPEUuJAAnJq r+aJhkAD9hB6zICBQAHTcMhoWLJABAxkjmad9UAByoWLei2+QAJzC4bN6tpAAop98v1pL0AD 7xRK9SD8QAONRWipdJZAAtwTt1qYgkACKsIdkq7qQAL95OoSbOlABOSnOOGC6kAB0rz4U8nt QAaAiW3cnGpAAfL44XnTbkAEvAN3MlDsQAPIojjpU/dABEJLKQiy1kADi+GLGYzDQAIeJAJd ZwJAAslI4WTH6kACEwjDeNlWQAPEo6KgUw5ABPgTDPEmUEAEO5htydzaQAMki+BrgZRABNOp udT3ykAETPvu5rpKQAPBAYFMwX5AA4j98Ut5ZEACO/0mD2gGQAPZ48S787VAAli0sY+H2kAH DJaK/APxQAcrh1IGdUZAB04QV53zIEAC0xl/qJUAQAEF/U94DddAAe5SWXh3NEAHdUP6YeIh QAbKo0GgDn5AAjLfqcbmDUAD48ebk4piQAb+OiU5L/xABF7npc1k0kAEZdI124zLQAKQFAm6 0/pAAuEV7M17+EACvn1hya0+QAS2xm2ncNRAAffYoYSx3UAEDSiRlAXVQAP6Oaz1CCBAAhl5 A4QPAkADGVs4ustkQAJOuwX8DElAAvO2hxcPakAEKUNYozZJQAJCEGkUQE5AA/NEUavWAkAC X6G1hwO0QAaUDlIluG5AAP9qeg1juUAENQ9Qs5T6QAJbB1z4L/ZAAjNh5ceHbkAC2x55NIJK QAMbhS5dl/JAA4MJxJ2BkkABokUfJ8SYQANpUE+cmiZAAwtmxbKi/EADRGwnO7TDQAazsLb5 6EVACN5ypMZkGEAIfgXdBf/aQAPA1QFfSHRAA9B9SReyPEADNwNs62kGQAHyY1TrUh1AA+Fu fpZBK0ADpTs9a0l3QAFEzthWZQpAAvBdZOZRcEABmBQD9TH4QAI167XzlpxAAZvgfSQJQEAE g/yZDUSeQAYQUHeohn1ABFtUi3VwzUADFetCqC4FQAR9WXLTa0lABHXADjLgqEADVfqpJqzS QAVwks72KlZABR7h3QUmskAEiYITXMmCQAHIVFDJ3ChAAgjtq1RF3EACOY4aHRqHQAaozSRi UBRAAzeNo2o7XUADiNSE4e+LQAPuS0KgOZJAAltYZX5VxEAFqtv6P0VoQACZFJ2yenBABGW6 E+/JNEABcj40VRa6QAJ0/IAIRnhAAj0CC1uTykABrtpygtzCQAOmbYiTq9FAAdwBEbvl6kAB ee69Rfd+QAOwE6INt7tAAogmAI1CwkADJfoKkVkWQAQqAb/68qZAA5596KIdNUACl/jkZzk1 QAHUIOD+VNFAAMRFWRKhcUACh758GstoQAOHXJZyuJ5ABYfTjAhcZkAAeyKaffu4QABaScNw +HJAAebjTSaOIkABvWWYeDYPQAJJ/lF1kuhAAe83rVTC3EADyTyflfPUQANL+5qn7v1AAk5t q/4hPkABxIK5I/35QALLc03e9IhAAz5C38G+XEADhBI3QC7cQAMasqlCwiRABALxT2hHnkAE cYmYROkqQANanwteCOJAA3tKrdOV7EAEaVMMzehLQAKfSky6oIhAAqH5ez8QEUAD2MNcwKpM QASldTU540hAAVc6QgfZ/0AEhpFeDvEcQAjecqTGZBhAAuaQG3AWhkACoksVqIDtQALVBmdq 2JlAAmjZuyLMwUAC8BrerXyKQAO8c8epeJpAA2n+ckTX5kACjwXnEg9RQALuc5l1PrdAA4Ml aojOi0AA2Zx0nX3IQAItHfBv6vRAAukw2XItQEADB1Y/atPMQAKUsBIjpStAA2R8/n54JEAC A/WMLj00QAKoAGQ0qlJAAzFTJ8rWv0ACwnmiEob8QAMz7N0h2oxAAzeftk8CQ0ADz5Zg3Mbw QASwYNEdT3FAArExrVrpHkADBfEwcVgYQAHNC7KCYL5AA7UzNAAt/UAB9reaR59TQAOY2+8k h7FABV7kI0JenUAC8qdKVKeOQAUQSgwSwylAA20kEzXVOEAC18iK4I9QQAOtJs83Jg9AAkkL cIM8+kADBBycld6nQAJ9l+MneZJAAsUOuqIYr0ADtt9QzPVVQAIq8H2fC1JABF3trixnLEAG S/VgGDK4QAE//ViE8bBABYMQ+WHoAEACn7uEcU7uQAOGPkn5d9ZAAd9K7VVZL0ADu6GmM9zk QAIRpi2QFuRAA+hKfJJavEADnMxJkiyBQARYbwyZS0BAAok9bJBCBEAA8jtgzw1GQAF7uTQO 7ohABNryfWklWkAFPal6DkcgQAEyqXODj3xAALdTloOqUkAE8IsxjwR3QAQQG7kdyzNAAhCn KqEulkACjttxsHbsQAMcjKemhRlAA5G6x7pGEkAEkjRKxPBuQAH6+hRFCy5AAfxWuLNBYEAE fy+qgaFIQAGvXIGsUNJAAeVd6rdCrkAD+Z+dFC96QALRoDYvINZAA15uByXH10ADy9JrTeRS QASJ2wJZ4HRABaWojz76ykACXX2/BMKeQAK2ekEM+FhAA93EYZosj0AEGZSyrP8MQAPEqlpQ EXFAAvZBMnwvPkABvYwdmGPIQAN2viNozSxAAotgWPtvm0ABN/XbyI1UQAIzrt62wH9AArCD LqnqOUADRZcPBgw2QAJ41Fbdd7dAA9Z/Hwq4ykAC1R7URMPGQAeLGbMu4N9ABCSHhG27NUAG bhfm6HeYQAQzw/92hH5AAkj9R9pllEACTpTS9XvBQALsWirmDtJABncwXgVGckADOETfkXsu QAIW+ELr/dxABVqWhqyeL0AEP7kyraFiQAIyNSwgsnJAA7nUI9JdK0ACzMC7T+ZEQAN5nekG jl1AAxIDn/ZjGEADGn08STYmQAK31H+VdrhAAxLuzuRvokADpdZL9vv4QAVWVF1F1txABTmw dvmId0AD6xvW1kRLQANZ89BSJ8lAA2HY7MGavUAEIVREKRfCQARwZG81QXpAAP607toZMkAE L8S6CcWWQAYlhjQiv3lAAzj9jmdkLEAChavuxM74QAPgfJnBtyhAAwPjg9nTq0ADdDxvhCIW QALNLuWHD9NAA8bXNKD21UABffkovUQ+QAEi95IExM1AA82ty6HctEABa9g5baK4QANU4ie8 d9BAAeAkTVCVBEAEJ5X4Kb40QAQZ/56zYS9ABA+DTwv7XEABGmVAY5vwQAMw7C5m4+xAAtQ1 5Q+EZEADdLmHcd2wQALhcsQQXVpAAirlZ4zpzkACycsotZidQATWOTnBE0pAA7KZgWDde0AB p3xbDbzZQAQUx4f9YapAAY3SB2g7IEAC2m7rsVluQAKDvB8rNYJABdFgpDKEx0AE0lAIUbdq QADB8j56KOZAAzjQzJWyGEAGXJoFvZmUQANZ8OVs8khAAhTcLguFXEADrw0C3tsQQAIuISK3 /sZAAbY7Bzhg5kAFBi/D0WPcQAKU2cu1VuBAAc9pq0R2IkACA09HYL9OQAHf292DMbBAAL6r UT2PAEAEFF9CsqoIQAFk75zbLtRABB1fPlunGEABxrlC2nRbQAFgTo2AZZ1AAta6lgqm9UAD Sl/pwuEmQAMLfp6fscFAA1vjmShUXEAEPc7rlU0NQAYC1iTkjTJAAs401N/P/kAFdR4P2frG QAM31ga8ZrZABCJI2opcTEACkLgJ+KX4QAJeCnFCRiVABgmDTp/8WEAAurU43hRiQAIY6dDR qlxABEdm/gdBR0ACeHhS5QgEQAO3P6KfYbFAAcvG8lSAEkAB8koghgKGQAUK0x4ItvxAAsFK ucrdhkAAXBYgv8a0QAEINdkGcUhAAPfunv0B7kAEpTzXruirQAPzr1Cow+tABTHa4y1UckAF DOR5Ov3QQAV+XMltPjtAAzwjgozUmEACBNHoR3JqQAJN7ZNESHRAAZ2Fpy9qgkABhT0aQQ+q QAOMuDSVywhAA2EvHl11SUACQw67to4aQADuTisX6oFABJNaSIe6ZEAA3zVSyY6eQAJbo6UT FaRAAiQxQJHhJkAGQ+1sqRUwQAFg5/EM4exABU3xU/mU4EAFE+Qq3hs4QALKDQSxErtAAhqY PCRNUkADmHmB09A4QAPZ/+JedXpAArXlXYe5SEADO165tHtXQAQKhoAnUU9ABelIvskBEkAB w4ZHhy6AQANCV3Va98pAAuARgSe2gkAA9h1wqCu2QANsXCXPcx5AA7NWkJsPuUAC3e+xPk4E QAL797fxMfVAAsuue2HcoUAEsRJrs1xyQAE0Ajn5TDhABJ21vUbHCEAA/MzXERm/QAS9LiFh fmZAAvq7IUDRFEAChtbJXeh0QAM6pV4ymwZABCREVbnahEAE25dgBN7WQALLLMbcHQhAAseq o8eLfUADz0HMDgbiQALntMkk3ctABJfDHOQBCkAB8Ng3LpoXQAN2lXOmPvhAAhog8lhEsEAC ZPkqLOQlQAKXZ5cWXTpAAzlP19HAIEAEIPyFddu/QAMukytvOQ5AAjYIWzq8MkAC5Vk6S8/P QAOMsY8kOUBAAn+Vgb6DNkAEjUaDNA+6QASRQuszDUJABhc/ARlTfkACcHldIyDaQAB0v3Ay EMRAAm1MtWXwUEACq7yZPNF4QAMoWWnBZwFAA25DxaSI1kADMu0VnvQtQAI0nhcjJehAA/f/ 1fmf4EABhB5YaW9gQAGqsZoYv+FAArg9dg0N8EADyr5czH9CQAMmkgnUQJRAAwhV6XYANEAC xJTdBtWyQAK6MYOzNdJAAj56OxFaKkAD2nMpQW2EQAP2lV35N91ABh5QZBn7eEACRK7ENy/h QANdBttagllAA5NTeYlHO0ACBe/KCvwuQAG1yRekpMpAAuUNxLDwU0ADDlOKkQidQANCFwzd AxdAAoBbSOJse0ABbj231An5QAR3wyUX+0dAA2lXnw2wZEAEYiwPtX3iQAMrQ6KA9XRAAeFw zu9HnUAE23pnoNa6QAKZxhHACR5AArjlF5emFEAAIsxUifLqQABX2D2tWqpAA5RVOIRccEAB zS2IqcR3QARhFf1pv8ZAA1p0KwfMzEAGZTxEPKdPQAI4yfz63WpAA9z0zc0fJ0AD9gDKTdPi QAKGhhrqPdhAA8bhryKcW0AEAnbbmgsgQAPblgw+2aZABuNes+SyCEAGPBO3gjJ7QAF30nLj MhpABd0j3q/6RkABGDhfKwtyQAPjPB5KDURAAy1T9rGigUACy2hFDxY2QAMEiZJ7nMxAAgnR SMwokkAECSTqYsO7QANVHgQpt+hAA2GrbJFxCkACmNIj26CcQANteammIsZAAWz+t2zQF0AE mzip7PZ5QALwcXIgfRZAAUqyWlR9JEAEpHxhsYxcQAFBKISHmeRAAo/7jOQAXEAEGOxuOmQC QAX8R69g1+9AAIukRarCdEABCot7hkQmQAKQsXXEJJxABNXiPvFOqEAEjoClZXfgQANrLnWK uWhABPQZtPmCC0AEMXQRNglDQALb8t7Iu3FABK+bCKVFuUADU/6tjwRCQAI0dgn8O45AAxLS 7NkQx0ACUhJjbNtzQAJNy5CnbhZAAtIkWAdwYkAEC37LGFTYQAJVQtvz5J9AAR6wFaZql0AE AezDPWhIQAOAkL4DtEhAApJTyjiQC0ABXIscppwWQARN7peP3m9AAlAZAbA6BkADTJnC3mC8 QAPRVZ4hCv5AAitb0eH8wkAEKh67lMqtQAHvZNFWyHRAA50Xsv8wlEABP6e21FTWQAEIdAcT 4fpABCafc6ev+UAEXyHrmjqGQANPYvOVt7xABbAhf6m5IEAEbNL7DXfEQAIiJWvCbexAAgiJ 2tIXiUADhD5fYZLcQAPYkp5iaPhAAiqHs2kK4EAECAzUbouiQAImsq+qgohAAqLqgBXIIkAC PWYWjBUYQAOIqedXAH1AA1DO8MmbyEABA+f/kie3QAAp4pO+njxABWei1X6m/kADqxa2F8bX QAIz1u9jP89AAirPenphEUADMsE4DdljQAf5ozONS0pABYyk5vKJH0AEWpVDpqW0QAHlUpZd XXhAA3pC3vs0zEADfKMeYEbrQANnqFC+BNxABcuhiOzKCkABU1LJr4HcQAT97MkiCBFABP+S gipTNkACVBg/h6A0QAKZd4rlvTRAAjO8jsUktkABwsJ13/9DQADplqPG1H1ABUnP8hHk7EAE 7kWSLtQzQAMECcXA4kxAA3SUl0J9jkACLcxBEYwOQAKRbnpAyQpAAuRjFzm4akAC+n+yqO6q QAKcTf/N57VAA6F/7mlmnkAC5QT7EYV6QAc8SjbRStJABAj5ShOhUkAB3LuEGKdUQAKoYJ/7 THtAAo3D5jLK8kADXE1dfPgWQAN4DLm+jchAAPZTmy5axEAF74IYElyeQAVFuP8aoSpAALMN LWsTAEACLjh7WuTfQAP0CRLDIdFAA3oioAmo2kAGNGX8UwCqQABJfQ5b4WNABcBkf4Xs80AA mHXVwyMBQAGaryAJSbVAAzH+Aa/7u0ACt20DBVfeQAL47L3lVVpAA5fGgv8flEADgFOPK9QQ QAMm97887ZBAAurSwqhS0UAC3HqXV8vCQAH8JD1B1a1ABEgRl+nhtkABHzKKsDPqQAJ7UIFO 1CxABJHOEIv3JkAB+VQl3sOAQAL+TcxCZnhAAsNcsd6AVkACUe1NMVR5QAGBPQqj4ehAAtYo tJD20EADxDM8PDLIQABPKrR8LHNAAMz/1qKZ2EADkXbMaLoCQAPFU1qd9xBAA6dR7V0UY0AD Q/b8cmCmQALzZ1EwK3pAAwETu0fZ7EAG8IH/1ze1QAJyekzM3mxAAQPjYMD9+0AFLf3sTBLW QATvGBNLelBAAmyquHBKNkAAtloGRhE+QAIrjRsBJwhAAwqGmErxYUADrwpAnEOUQAI1f36q jftAAv3eA5dkmkAEQEv3o2sSQAGGY5Gtx8hABLM+NNzLqkACOH06eUSeQAVSHWJuLEpABr71 d1HPVUAABDH+9tWeQASxHxWMYzRABF/5Xu1/jEAB2i2+MXTgQANrvZgInIRAA/K8nrON7EAE Q+PRKg6wQAKPPIN+y8BAAsqw1zjTlkAC2iAi4ufQQASwv9Ig6NhABOTr8lWqLkAEuF/ck/1/ QAC51xq7TbRAAeoTHz6gMkADMdOJFHeIQALVD6Fh2FFAA6nz8z4fWUACZQSG3ON2QAGI6Tjx wcpAA19ix+Dx4kAFbBSB0V3hQAPJWNUdlShABAOaG66sj0AA/v0Sx12cQAHvRSlqduRABBs/ 2oWsSEADue8v4a4CQATRgJIs+7NABMq/U5ykGEAEyrgDCSMAQAPza1uu11JAApnEtvNu8EAB mTHQVWQ7QAGZMdBVZDtABGtKv7/oZkABx5O99V6JQAQxJE/sDr9AA3hyHYfLpkAD5dYt5pi1 QAHWYcemsmRABECPTykd90ACBf0ssTv9QADGFnJ9oMFABiziIYdZMkABC9+ISL9VQAANknnd ExNAAuqc7rF4MEACIAV9y+/SQAKZUZQ4GfZAAT484PG7fEAD3noBZ9m4QAKEypUHUDJABJPD og0SrEAD0zGwTyxzQANUHArI0UJAAl+/VhGMsEABRR7I3B0rQASEaXrWneJAA5zl04Psfj// 6wsMq0leQAAWn09RcdJABoonOfE6/EAH2W3j8mGYQABVF3qfLvBAAXEAkl/KS0ADhaoYf3TN QAXjKafu7a9AAt+UB43f5kACsNooii/8QAHXaVsk5flAA+jIYrtFDkAEguXAL8doQAGDiJF/ pzxAA7CJaeMA5EABrnEyA+T2QAE9yY28txlAB6LK4X/+u0AC/WvMjUUuQAMNRfYZmJFAAfIz pnw8DEADmfM+nFqiQATAOo5gQpZAAZPtBQhLykACVB+TORjoQARwMl0SAmxAAbIAtd6OxUAD ZnnyD2IwQAJ5238UxCZAAni426d3TkABWWt6VcnMQALacBTuzERAA/uONFOeDkAE/tlnkB+o QAEeFGEAWd5ABSZesWBfTUAC9V7eTfrnQAJlOGgkQcJAAh7+cBfVGEAEHwRP6uNwQANl/uT5 JthAAV2j722COEABgyWMNSbqQATrfUx1U9VAAcnBlTbvSEAB4q3UbY5DQAX11rc0cttAA780 pMXHNEAFDn/cSx/ZQAFC0rPWQXhABBork7YiPEAB8BBiuBVCQAK/jMahd+pAA2qIyFjbdEAC gQYWUVLIQAR2HTLhgjJAAzpws8I9wEAEkBTPR8b/QAHJee0CkMZAAecIcIEfnUAEgxjup7OM QAG4sQ1hpFBABCBgsJvuBEADJ7ATM2WTQANZ3y+f+xRABBavF+I1NkAB86jmbCiRQAPXIpCr j8hABCE8h8P2IEADeds5AVQZQAMO8vuGb4BABD/bY6v8akABzPL7P94qQAHatlZ1C+RABAnd 3TQX0kAEDDQzJksyQAQsU2yvailAAaxXSZIKEkACu8dfLPgqQAbWwvg7IcRAA3uI2GApJkAE KxilFnOJQAGYY7Ocz9xAAjY6yUrKSkADpxsLm9YKQAQCdbj+Xa5AAMofjx5TkkADOfiAE38m QALX1s+b+9BAA9l8FzgiBkACXwCyFQOrQAEi1hA8LRpABZZ1ohWlYkADBGzrOG4RQAUJPYBX Ht9ACMGjTnq4nkAFm3Xke9KjQAB1nfI0biRABFxB+ZhX+kACdKfug093QAHc9TLQb5hABDoD q2tevkADO9e6WbP5QAHaDrK1obZAAm8ETCiMU0ACs/RW7H0KQAGUesJ9DzJAA9euZAp2zUAC z5DhQz2bQANFOzqQqPhAAyA4iEGDTj/+3R0x9Po/P/585cDeR5I//k5+SlO4uUAAbbyfjvQU QABrzbVoy1BAAOlQkmMv4kABNubVTDYwQAFDIlEMhyBAAaJ9+ZAXukAA8a50kQ+JQAHBd52U wohAA9ghSxCFv0ABhPSUyxLgP//o85XbA8hAAGNMA2SGyEAAwY7cWbHsP/9VM/Mx6nc//jkJ WysgAUABVXoUKw0oQAQoaw8GW0JAAyUGOcOQdUACYV4o9exsQAJ5/Tlgoww//Qpzqh7flz/+ PeEQz0PKQAG4cHXt/gNAAHx46GkmjEACPyl9lPFiQAJMr/EzyzA//QuGMVp+xkABpiAssIHs QABUqmjrd4s///9F3d5Hhj//Lgqjlpm9QAH0tPPjO8hAAS09RRxyqEAAtB54OiOlP/5w/vJD MjZAAMv4DB/uKT/83lp/T1VxQALxejH6Vz5AA1wehFSRH0ACu8h+aFHbQAIyW63W3CxAAXhI BoXstEABHtpi53raP/9xTwNxnVtAAPU47N40nUAANOMrYHA3QAHK7E5QF6s//5gVqsY5zD// khOIw5g0QAHaNZPJz1xAAYMjIO6Y7kAB0mYSbMvmQABYVB99U64//stqpusZzkAAi8uczxtc QADM1rVDWOpAAWKMl9aFO0ABfjZDxMNPQADZz1QUh95AAG0bxcq970AAafp50RD4P//fvmP2 ikJAAHzUCQZrw0AAPVS3KtAYP/9Fp4RyahA//+MO3VHTPkAAMUnclzrAQAEwz9M90MlAAAiP v/ATHj/9z4QpM5hsQAHk5Md5bVRAAGq1rZ7AsEAAOx9sJKxoP/9zt1NJNHhAAO2SJLe6hkAA C/CVXNZAQABeLuT2GyY//5QddAOx3kAA2eW9oGDYQADFoAv8HvFAAUjXLrIFM0ABy8uMTvRb P/2d/dHtUmpAAg3LB09fJkAATi/GUusDQAD3PABFIUBAAWUt4wqXkkACBWi3xfg8QAP8kely H4FAAFDgJtn85T/+e6bGMIexQACnuCAiYvNAAG5Gc+H3Sz//UxOkVRl1QAGRbx1DBLxAAcb7 n4w110ABYbM0wVTWQAC7Uhh5BMFAAVgXAPFBYkAAjiMNjLFbQAI01vFL2UxAASzPJvCCJEAB 6s3sWxy8QAIc7ojNlyBAAExYymTpKUAC+UAF5KbbQAKFKD7+P6FAAZZI20I09D/+MW0KK+1W QACXlBDHwQBAAJJpGvzxGkAB4/k9kkGnQABuhCk8e9ZAAE0cFdz8uD/+K9Jd5n5GQAGgKDfq 7c8//7M+h70HlEAAJVL/H3YnQAFP5XvjKoFAAZWQtkJQfkABHSdLGwJoQAJ1roFoJ8pAAfKR 4bl4Qj//nXz5AMolQAFXz17a/zA//54ZMgwdCEABFuc29sIpQAFP9swVoWRAAOCg/m7UZUAA P1G50auKP/2xZcJxGLw//xrwPMZaqEABfKFdiE60P/6VuvILd7BAAlkP0jPlU0AAWJo9wGRw P//lHQbHhzRAAM1CE1FWykABWo7Ar7dmQADNPrO53lo//zqwLgmXX0AAxXPLcXLEQAHapULe XCJAAW7TZom2GUABQ+iUAWL1P/9LG/rkw4pAAKCWmepbykADT+inCDtaQAYNfS9zlmpABidr XVAd3EABtl1wrhjaP/7WXrGxZ9ZAAdle8mLA6UABESjBI9ggQAHHAdoQWBQ//SNpo3T3BkAB Qp2lz8G0P/50ojWUpEA//ncCpBtE4kADNB4lwwyeP/6tBKSPZghAAUAXZZN3fj/9iy2XgrpS QAHR1DiVaxZAAKFsyew/6EABhTgMYhTJQAGGtMpTPGlAAAonjzyTXEAC0aFqNiYfQAGgipdb 1dBAAnOPcOJ/LUABWpJQiE0SP/7Xskh+zRI//ousfmX8YD/+RYeEcqZMQADgNm1Fjuo//938 tTc6NEABnanV8frrP/8zhE9IfFJAAORhHVR6mEAFGd2mlVDgQAFMrJZM+a5AAOaX4FUy0EAA dsEXnAnVQAHVi0nS4A5AAIfM6iBquEABrnTZm7YzQADiqVPcWAJAAkVH8/jL4j//BYJ00gBC QAEavgEtv7w//txAJk3zYEABm/+nLBHqQAKgBCTK72BAAr7FLilh8D//boImYstIP/+QL7rW SY5AAJ9meIkuTD/+/tCAENy7P/0BTIw030hAAnfTEA2qZ0ABOPLAGo5OQABuNUdMUo9AAklk Ry2g3kAC7bHR/g34QALE34i/YbBAAbyrKw78Wz//99qNtyixQABJznc6qVVAAOGXeTPs5kAA L9f04/DtQANEWzyzLmhAA2kRDslPnEABDoOUv2FUQAFgE9jJRu4//lpXCd5pM0AAZCWqITbY QAEhqjb5U0xAAWcx3ywpd0ACG5UTXSfwQAKHdpPBZe5AAWiQ5lQ2mUAB2b/2eqvQQAG5P/Pd DsVAANdA9peRiEAAfhMer2RqQACKwIIQ60pAA7j2FB4X3EAA7O2lG7a4QACjIIg9Z/pAAY8Q vnP/bkAAw3LbizlAQADJQT3s+jVAAEJTLsGtFEABIs9P+6bYQACyAOXa5FQ//50XRp5vJEAA CNu7nrBKQAC+/DC2uORAABpl9q1vdEAAxoZ6lBl6QAFnCO22daNAAFp0ydmLskAA1VnXnfL0 QABt0qkbuS8//+Dj4PHDez//Y+zpArPsQADdHV5m8G5AAJA3xcfH9EABbmVuONqMQAKiopxN TnhAALLhi2hiPkABU728Oz6bQAGpl+s5eQZAAJmY+GywMkADpm74WiyYQAAcAj8MVPJAAKYJ tsMe9EAAcnkVIlEYQAIAC0O+t6hAABS8VarY+EAAnpljpBTxQABu3M6ZiJFAAA2OYkX2Hj// MTUqPJgGQAJFVpX9Y/BAAKBlNX5GLj/+lSIc3ndsQAFp+Elm1oRAAE79Yzj8iEAAlWq2tv1g P/+/Yt9EDpg//spDqUPBLkAAH0CDheg1QAGRLGO4tvBAAByu1eBXokAALikNJLxSQAA1djiD ecdAAH6zgdw4x0ABaNHF3EO0QAHyHjm1tv5AATx/d7Tp0UABqb/zeNUHQAFgLBY+lAtAAA/7 y1X8EkAB93c/Ck62QACq3KP++rpAAgPah+c8q0AB8AKEwLbWQAGeaUafn4E//dMVYNZfjD// PfrRID5gQABZstlWC2pAAW8lKJ3fvkABfRsEVUlfQAFKKiUCbJ5AAOI0Lo7nf0AAnICjxrRV QAFHK0ETUg9AAU7D1a+HI0ABTWcEOZq4QAEr8YVvf/dAATpLrdA3vUAA0Q0jfkUCQABqkhKw eadAADHKFpqLeEACAXenee76QAHR/s24by1AAZwhYBrWDkABv2DyBxewQABkIWwtWXBAAPJI uB3NukAAEXJHqOlCQAA4tiu/27ZAAH5aowDtH0AAoBLewWe0QACfJGkRlh5AAAMAKSOf70AA ak57Jvl6QACM2u1lu6JAAYHS0H7JekABKW67eJkyQAJL1mnUu7pAA34UqNhhMkAA7fsWC3c8 QADL+sz6cwVAAbnMf9CJaD/+hz/jYJpYQAI35GnkryxAAeb9N8T2zEAC4rurd8K8QAH6G9Ee 1DZAARDEzha2rD//xSCaozmeP/+sf316moxAAfG2CkBR+T/+ZkIET5C8QAJgDCsxmAJAAQQJ +K2Ib0ABr7ZDj19JQACHwPXl3KtAAVZjV3PuTkAA0f/uBfQ+P//2Fj95bmw//p6ESUjHOUAC SwMd2zP1QABYkTE0KrVAAFqxfmE0t0AAUzDeNFJqQAHyCRDaE/JAArMQ1SlK5kACnW3YIF8p QAMSbPggCbA//q2KI0vLzkABqSpAXkuSQABaZtKZcnNAAG5wUeHq+EAAg3FaB0hVP/8Cb6Uu k+xAAWg3m4eviEAEjc+JJHNNQAKAnoBSFJBAAcATi5i4zz//nWPwK+UYQAB/cN3nFpFAAiKo sQ/bgj/9Wf3UCNa8P/14hF/uSJE//4VAf4WAzEACb2FcLkfMQAA8GG+nYoRAAdi/14fCqUAB RR0+GoUwP//qKnMrXYxAADkF90SAbkAAm/YRyzYNQAAhN72fuEg//lHjjpvRcj//rQ9nzDUY QADxCB1SfEA//pS4x+t0Yj/+ppekLeSEQAHamChHQNQ//rlsiF5uFEABF8lSb19eQABaVeGA 4T0//7Xj9jVYZkAA99+tRHl+QAAFyaWMJ8RAADi+wUhgFD/+WA/aMLdCQAPjm21f5exAATnN cm+SckAApGnwVapHQAFGnK6BMu1AARv0pbyY3kADOWUghg1kQAJnQo2Jz40//YDdCYsr9D// qMFg37TgQAILccoPTtBAAEgvlZHKMUABDhriGSphQAGUXLE5NNxAAJToRJqqQ0ACRd12G8FW P/5nMHHgQ3w//Uso/nWRsj/+GYBQ22AGQAA7vWlkp+JAAK8gFHWOhEAB4Z7q5WZtP/+teNUu JUhAALTAAUPZ/z//LPfmT1eGQACUjJ641Q5AAcQRmVuEGD//gyDoTd3FP//i0wHtjOpAAJSv 4sJJfUABNGFkmdEBQAJNYm1xCytAA2/YbzcInEABw3LKBftiP/+WkK8NtfxAABXBalhxLD// +n0jhWQdP/+9hMrBivw//xxPFrqRI0ABV35PWZhDP/8twJwrguZAAdxZQofadUAA76wFRgtC QAKa34RGNZdAAYi7wRJjET/+4pMiKLSGP//frKz+LiY//9o5FBE7DEABic4GeDmpQADQAfdx lz1AAPe/S0llo0AAdHL30o/oQAEckpHXE4RAALm9+caXGkAB0UgqBBjcQAPNO37jPqpAAfOZ vvUa1EACZUcTOhYrQAFORUJdPPRAAOztGmJ1oEAAZWGPvhZxQAEBIwOTIkVAAAphZ+qreD// DBxl9VdoQAFiJn0GWHlAAPCBNs6H2kADlV/DUnEeQAIZnwIsPbBAAz7OPL9jRD//ylrPV8xM QABFH+pjk8A//jFQcNxEWj/9DFCrJVB7QABKfSjhZw0//xKv/WU7b0ACFOFwOqRlQAAEybLE HHBAASXTBaY5zD//5c1Mjr+oQAGpkcEX9SpAArA80wTuu0ADSg4GTy9YQACJE+uNuXJAAsf9 1yq7AkABo4DcJ4bNQAEFabDawttAAPdhuFjj9kACXzs7NjzlQAJgZstRtpw//w6H87isZkAA i4TIsnzIQAAjb0eAcbRAAZHPsOE9hkAFUWmsuDPIQATIy4LGWRxAA4WRetBG80AAMMiOEwc+ QAMbhdl5nck//u71qI11fkABrpGh2jaCQABxhTX08m0//mDMXZFuokAAY03ULVNKQAAfXCBg FdxAARkuq8OeDkABEU4hqoRVQAEYwWGQiFBAAM816N5ai0AAZeseN/swQAAqc0ggjiRAANmn L1BApEABDTj0ZBA8QAKlSXKKyPBAABZeoi5Hn0ABjJFzz3/UP/xDgn4GmCA//aDScaW+AD// h6zLY7iSQAAaXFiRDldAARMXK2zIxUACIV/cqcAEQAMFd5drn85ABZv0fSVHQD//TUCPuzos P/5UtoB5iCBAABpmeuo5w0AAJOE+uvWGQAFcFE2yRpJAAQ4uayEBPkACjCbZzNZlP/5SHOaa fXhAAbisYGiTlEAAscJzscqCQAD2ztd2v+5AAdeFet4JSkABhKSTb8sTQAHH7EyjtlY//qBJ +GZCqkABrRLyjUWgP/4YDpTdVIA//8Db3RCVPj//+uNFT3cCQAN22F84k69ABDlBoOjkaUAB c2IJzAjcQABzc54CA6RAAONweVUODj//VaBcFIWsQACQprpTrpJAAJKCChWQvkAAUelpWBZ2 QABmAj/oSeFAAfal2RAM5kAA0wR4nc2IP/3wIf30b5o//NVZUU37eEACRqolOnIQP/38FDB8 7MBAAVu34hKm4UAC9VQMxQkSQAMJsy4m8ZxAAZZx4cmg4UAAw1re83AeQAJ+YtKxLpBAABFw WT7vMUAAhEgtL+Q2QAFZ6cbOFc9AAWt/DBz0qkAAOC1l8PWTQAFf2aeAfkRAAI754QQvYEAC fC0azy3SQAOvf48pPMxAAjujtMpdmkABQohjHFFVP/0YGsS3JPtAAM3wmQtJHkAAf0oy6kyL QABr7rkNNVBAANxMvKTO6EABSkiOkyE1QAJOd/NSZ4A//fAcJ5oLWkABJoBUDbVaQAAFdxme kxNAAdOJ7Krt4kAB3QASK0VSQACUDTNvljJAAQBCZtPIFz//KQzxZ6ShQAFbLhPe0QFAAFIr TGXRl0AAVF1cPnInQAHVBBGapW9AASIBmqgS/EAB3gKovOnjQAHIDZ0q4axAAKBfSJ4WIT// z0RaNA3nP/8DUFW04glAAVMgfiqqr0AAw2955gB4QAFTNPWX4gxAAKr9O2mlKEABqSRiv/S5 QACR/EbDqXFAAR9tgCV9QEABsBo5EwKCP/9FQvkU66VAA++6dzdeokADjVN29ZKyQANV7VhB Z3VAALyY4ePAQEAAp3VvNWRDQABOhtfZaaBAAfKb2FINhEAAaj00EigWQAKqsRSgVpFAAW12 ZLK7ED//zMngGPcgQABJ6B0WB0E///4kITnR2EABGDTcUR5bQAF903v3mvJAAiZpXxkuCkAB D3h+13BIQAC03AXnbmBAAPrHpGrxK0ABWr2CT4gsQAD4D0F9RUxAAdBI2TDdukABdjnOBHzt P/9Owk/He2w//k1kiGY0WUABiycWB1LaQAJxdcLGwY9AAlCvBi/uCkABrsnPPsIyQADbcLiO 9qVAAN+MxI5iq0AAG6NqCRy1P/3QeyWFT55AAcqzevGO7kABnQTAT0i1QALOXmqqK0JABAo4 8OwNBEACOEMpXy6CP/+xlGfNM+hAAq1yUmjP2EACuNZllO1MP/+PFmvfPoo//PrNqXk9g0AA EgVUrU9SQAE6TQHbBEBAAhzRJ4YyGUAC5lMeLooZQADCljyZV5s//2Pbspj/GUAA033Mumlm QADichaOQm1AAGbFeNdM3kAAI8QEdJrQQAGdjKis2bxAAhFpJbMJ0EABHliV5MJvQADy8Xjc q0lAAonLZcqlBkAAsZkLiV/8QABBdrYySnJAAun6UcWORT/95FGfDdiwQAE8N31gTlBAAFOi xVzFHT/+23pmU3niP/7E9PGbzRBAActKfqldrkABoRjoPMRzQABgeN7srPxAAVBp2uqtiEAA FrDUGWxOQABKxGB0Bv1AAlkXGkrqSkAAkYffzgLYQADp+oPq6Ik//zmdncCAcj//saprHUUA QAC0bAn4LTlAATGviedg7D///fH+UxPyP/yHkGvVY59AANZ+cvbUPj//mQNXVKd4QAGYy0Y8 941AAPEG7fC1+UAC27n7rF19QAEBWztpCBZAAcrfaIvQLD//WnPfiRUuQAEjd0d3vco//tJd apeydEAA10DiE/CLQADc9pLdP8g//i/R/AYWTEABsoTxm9H2P/7E3nb5xShAAO0ZF+nncj// vhCplCucQAC3I0s9TBRAAHUWWoDSxEAADOz3z3q6QAAi4LS64LFAApXaB6uMbz/9LGpwDDFW QAGWY5e9xX9AAChPDWdpXD//dJ6IPqr6QADXNONm0S5AAHEojWALj0ABaEnd8QFHP//Z+pJj /+BAANvdt4JtWEADKfAp2IMTP/9nKY+gobBAAxbtBcu+/z//noXasbFzQAAiGm/8sfhAAW1w 3w91XEABq5EpJFYIQAFyuEJ3u7NAAMQ7ysUCjEAAvgKcycvIQAFW3CRZeAhAAKqGbjfiSkAA VaAI5VUUQABxOhJGhT9AArXumg6rPj/9WN0sycWPP/zn9/F6llI//GiGYjH4UEACyygEFl41 QAIpQzp/9x1AAmNQTmChhUACQxtaRWrAQAGZGemeGPJAAcPA5lPXEUAB6rsgWi7QQAAqUw2E 0K5AAIsDl3+cX0AAnebN1h76QAB0CB+EiORAAIno1o8HjEABZ01QaaKhQAD8hMJApvZAAwFR D38CrUAC/T7J/NzQP/z15QPRNDBAAM5W+WQ4aD/8/1H3HL5kQABrI28spnhAABR/TeETsUAB TeCNmKnRQACyfOzqtZJAAMbFS77BakACk2KDZCBxQASpUFiAYR9AAt8zdMTiZkABDhOl5a6s QAH3URk64AxAAIoz7ibMYUAA2hPaCdybQAEVMcBG+fxAAC/5mue+YEAAownrdqAxQAGrupo5 0mRAAFcGOrpiNj//xqH+8FpQQADb32OUQw5AAC1l/bfDTkAAFQ6p8Nb8QAD12IyY+SBAAWE+ 4WW91UAAQ5jp/XLxQADMuq2dUelABEuDOLHsJ0ABxnB3l5E2QAKOiRW1A1xAAPbIj9JdkEAC mFj4+nyHQAERVWAl6t5AALsRAB5/cz//HZSItNvNQAFbX61JdUs//9lwfIjxcUAAV+I5iy5+ P//9evf8/IxAAJFBNGhp6kABJTqVSsvPQAA6ut+BUYdAAHEYMYNS80AA0UJeW78/QACi63aQ xtQ///hErfvb+EACVBOf0RjjQAA4+lTrNJdAAZzKn8fYMEAA59EJxKLsQABinRPeAihAAJfK TeBJvEACRhkHPbfYQAIFko6ksf0//mrd0wHU2D/+kMG51rKYP//WSRJ15YNAA2J+zFjMbUAC 2E7HzmaVQAJCsJBWvXVAAhBLfduV0EACPr2qifTxQAEEeICP+xhAApDyQLiMrj/9xMThohX6 QATtmA5P7lBABHVlNLRtF0ADwtvXXhdwQARfckJPk+5AAZyqzi6SNEAAqW2HAMdEQAMOBKxi vVBAARY49pA9OD/+3MHIASlUQAFUEhHXoz5AAMbGKls1WkABa84Mw0hyQAFCgT63XZNAAsHM wZekWEABPNF/jNztP/6uV9qbiYdAATDvQsLTCEAAtRYzihaIQABpxQ8+I6xAAh6fb7ypqEAA o4n75CJ+QACHpFHiMDJAA528HosqA0ACW9oin6b6QAFhz8nKXfxAAP8VolUbLkAAKxwDvotD QABia9L1I41AAEzqgtW050AAoROjT998QABvtBpFg1RAAHJzmTueNkABLEOyZbNCP/+OPzOF ogY//S/r2bEGmEABxmBJO3xIQAAPpNxDZ0NAASRgsJIEn0AB42UFLMO6QAAvcwkwyBFAAIAn 1xdR/EAAKI/mbb7oQAGEIoySuFpAAtTxZnQbHEAA1/YJDI6eP/95lWuTQq5AAoLgQ/sh/EAC VeZ6vG8eP/3Vr6nzi6g//dWvqfOLqEACaULT9fcBQACyZWAaNNBAAB7ZiYYTREABF+r6JxP6 QAFY7lc8xsRAAa4zJ3oiqkAAnfH7SEcQQADP0xeRqANAAGZ1H6Io0kAAEzUj0imAQAA22KNA CLs//TwPlowcWkACSwgXtSguQACpOsYIbFdAAYfGl1slpEABvmiT8qRqQAD2O0u/HqJAAQTJ OkfZDUACD0Msk90dQABYsuGl1qpAAGdB8PZcNUAAV9OwiVgoP/4jT1APCnhAAmtH/claQUAC bVIi+bT2P/8xRXNPGMQ//Z5Lzs7jzT//kJ6VKWzcQADyCV4u8sJAAHZH8Wbma0AAwvvG/BjA QAA1zoDEq8tAAHlboPSPOT/9v+MU3Oq+QAHIPtsmjYo//8vgV80mXkABgtZKqvVeQAC77JXG 5Ms//6zDuRtEskAB+fVff1U2QACe41OPGGxAAO9F+BqlZj//c4kbNkeBQAFRjYv37VA//2YX TkO+0kAA6gUW3FLaQAH+vZoq6dpAAMjs/+G7zkABOBBc4HkKQAFnCQtfmxRAAHony6ZavkAA FeWdLrXTQAC22FSw1DlAATXih4Dlez/9UEPcAjCgP/4BI0HZlolAANY6QeZWWkAAa9c8TQXA QAFFHUmC+VJAAGUTx0Lt/kAAtqN2LB2rQADxUUhwnQxAAhGZ/OjdbEACp/jdWmmQQAD1SlR8 OXI//8mSOL6vCUAASa9eUaqmP/+Cm1UG+9A//6e7KiRxsEABOy7ieovwQAFUyWjXMyk//cr+ kek7NkAAU0+Ekvh4QAHbtkawRcA//66icPxnykAANXNRYzSVQACyLfNr3/xAABL1toWC8kAA nMrbhpFmP//r18HzE/pABBjLYiFgaEAAHM5LYs9GQAC2T8votyJAAMD5SxTKaD/+Yma491wZ QAIAD89IEds//wbc3Knk9kABPhQlFTJCQAALhQ09ONtAAIm/SEFGpUAAEK9++dM3QAC858rC lRZAABGZYCsH0UAAlH4VL8hCQACbAL84t11AAUhTDK1fzUABjwB4Er/mQABnERUfuupAATI3 pxWMjkACJ6H4pGx0QAF+YjUwulFAAHsaKO4mJEAAsd/GNy0eQAEUXAVL6+lAAb/+Slnq9EAB ioPf6s+mP/+MiBbiwPI//+9FLKxRkD//y45sFrkOQACayTyNu7ZAAlByl1pgnT/87siQ7TN+ QADf9jN6E85AAke4iT7AkEABhsMYKN5vQAAKsNJ+yctAARLmfmhNWEABrl7Eo+rBQAMu96p0 bwdAANwFksz3ZUAAyN/JGXwlQAF5LyPJIm0//gfwfG/VUkABpLCaemb8P/+fDjnquoZAACQ1 1VYBokABQQqGcyhYQAEvzyvmoiRAANGdIwGKI0ABTuLeGzm7QAIV3E4odzY//9m4da9jkEAA sLdQhYYCQADIwPSVwCJAAbe+ri9yckAC42ONu3UgP/5TF/o53t4//kkniFsvND/+WUzLvn5Y QABoTpAu/x9AAF5MUedinEAA1zUyIK00QAFNjVm4fJ5AAJtI3YAT0EABr6jTGP8KQAEAnQ/N Tp1AAgVueRP6MEADIRMOx0ENQADNU2NhXQs//xpNWhL3dkAAbvFvHut2QACxjknedkM//1xe 0XLE3j/+QFomoCdDQAFN8vqz9WZAAzq7sebO+UACW92Lrd/9QAIgzV4gpdJAAmzaeWc0JD/9 CCWxzpyOP/5QJxzsZPNAAYZgstMkCEAAcv0wR41tQAI7YAombRZAAkwazTW3vD/8nw5de5tM QAFIEeoLkGhAAF9Zw80OLUAAC7PjeagyP/9tEWYZKSlAAYmr6jLCNUAAzqe2tmOmQAB7DvF1 U9w//nlDiHO7iEAA3C5fj4hYP/zSKMW2ZjNAArs9sgBHUEADLXzzRvJOQAHKPF/T8FRAAh0F HNrd40ABbd1fz2CmQAEoh3PQ4Jw//6Z8/BgALEABE9y4o3BfQAA3eVNZGaxAAcxEKoFiBj// jWzsFeQFP/95A7y1q9BAAdgLQQgSn0ABgA0wqz/WQAHQM2Yyq/w///cO2Cj01D/+mOU+M00F QACCw5nfkghAAKYRRNltkEABE+1GJXPSQAFNzU7STE9AAHzEYYhARUAALfapyY3JQAB8JN7Z JvA//5kk2VnAbkAAeZ4ARANcQAA9CYRHnAQ//3SDnPw+mEAAAoxYbQWZQAAefVjtWG1AAOVS Xis8sj//wUp0RGAOP/0hyIzluUVAAeWah9flEUAAgAZ/0jnkQABQuQGr7to//3mg+xhRckAA 8HQD3mwaP//DvJPNraY//7rb4C8TZj//GlBg5RbTQADfP4qslZBAALQqcsm7fkABO37/tJW+ QAHTTwA+fUo//f0VWbYldkAByoHYoJiEQAByj1BO0BNAAKZ2R1IGR0ABaSzwYlFQQAIOl8Au UVpABACqaYjQrEAAW1tYlgO1P/6iF1gMuOpAAKzNho8oL0AAa3wHM3PhP/7c0obn93tAAT7F V9lao0ABOZt/0D+9QAEN+XrSwF9AAL76CFlFUkABYBJ2L3p+QABWv/7QKxtAAdCsK/xuSkAB P9AeQtHkQAGXv1WAvcZAAbBiKcJNBEAAbSXZOvKnQAKZltT8LZZAAkWufzccGUABkhT6cLnt P/4SpocU8ShAAJd57P5S7EAAn7cd1ugWQAHkWSUeHcZAAG7cDR4+AkAAUvhiCd6/P/2IeCix oANAAZ1/6d2Pej//Sqzq/fl6QAABoeC7pG9AAVXkTcfNpkABiEdkHvEfQAEVTHBlQthAAiAR qkqZdUABp44+EhFcP/+M0J6ATK1AASb/1f5ulj//vpSaYwiuQACoJUMyRoJAAN8eXU0KSkAA vpuMSJ8dQAAZCqfZod4//WMFb3+j2T/+7mWyqlpPQAFYhwmAnDs//sK9nDa53UACP3C3b/LH QABLNsSCgbI//83ysitau0AAi1Pjv3SFQAFSonbtlctAAMojx0FnNz/+96M1f3SMQADEaP4S fMRAAdkk5ybAmUABbNbLwJ70QAFEjlMFLwg//zTLeJclPkAAjypPeZt2QAKwkWDNxudABMTg S62da0AFGktNFyKmQAFziIi5MuU//v9XibToOkAB1vh+8XuRQAEGdBviFSZAAbkkxbSdSz/9 CkHsjiTVQAFG9nOihWs//p7pSy3z2j/+skAU/pX4QAMFDN28SN4//o0Zwv85LUABSSL+Ssw3 P/04XEqH+fpAAc2fbEn9QkAAo+C68XPZQAFW9R6EJ6BAASNcWdnK8kAAHkvZIcvyQAKNf4fo hGZAAP0jIZ/QzkACfI55LbvLQAFgJjUl7eY//p9KsZhyEj/+k22O/tFKP/5IGlk9RLZAAOPf MJTluEAADtShDHJ1QAGvuS65tdg//0ThtBi+rkAAiLY1RWC7QARon22lqTJAARYBQBCDxUAA wAv3M00vQACI1stzl1FAAcR4TH7HY0AALVL6lyy9QAFsCypJdSNAANoSyf9tPkACVz7Us1hq P/63bZrHOMhAATbQQRAxnD/+wtOVWX7eQAGZ579Aq3BAAqMh5enZlkAC0mIF7TIGP/9pyapa SsI//4jG7SYrb0AAiprZn4rpP/7yorF5IRA//T84oxnshEACPWX9j/NgQAETrufbKu9AAGdZ S6WXzkABkZWSxclcQALn3r7ZDqNAArefV/wNTEABtCIv9nYIP/9fQX3UdHJAACaL7fxkuUAA sGyMAZhmQAAjjlAnmo5AAxVj2LWKkEADQbSpD6tiQAD3cgCv9ihAAUiM2m+8Fj/+jWOAz3RQ QAB5ljOL0OJAAQd95qOCS0ABVn8n2zYxQAIrmufPy+RAAfTWNCESFEABA8dY7OMeQAGvD+si 0BlAAZduCMuRHEAA5F5/A8TuQAAcHMxdII9AAArmh9xqlEADmiRZC5DVQAAyGvA9mepAAEEk c7ZFbUABk/5wPZrgQACxi4ccrMdAANW7ET8Ym0AAL0cj2yJVQAEfjFQr96pAAKgaDwqg8z// k0diVdbHQAACMYFjuTxAAKDNIhJum0AAGmFzna8zQACWmvidcPRAAUNyj+MAwEAATRIy5t22 QADoiqLGAVBAAHZDNKUtXD//uNPqkVFiP/9ReCppccdAAJpIDgBorkAAmqJyYFlFQAF8QQTR 1dZAAsKsDpfKcUAAVOaYfRnJQADw40mpLq9AAT+uOyZQOD//8p9mkchQQAOlvi1R6/w//+Fe X0mVQkAAuRfWOakwQAAsBSmvjMhAAgI9UdmAtT//XikN5y7xQABxSFHPV+NAAI1rJQSPgEAA ImLjuYcwP/6e5tlMDPJAAjGG0iPznkAAjBysu6WuP/5/R3hJCYBAAWAJNSVd3UAAUCTlaM6W QACZrP4cjNk//9UtrNquwT/+y4z0VKZMQAAiS6CgFzlAAYEeHZScmUAACGKfUsyPP/92jzgY sm4//0IPQOAY2UAAdq/Yf7qGQAF1ActASb9AAfVQ8YCN0UABNX+lP18SQAEovYaYnxBAAV/O yQEG5D//Z5qjnFh5QAH5ruy2WnxAABjGCi7ApUACBnyDyAQ+QAHq+TTm9m9AAWlxW5gKKz/9 5ocPmX/IP/9PRFcIKepAAEEW2sGGCkABBhwzUUHmQAEtTQWLQAVAAUWG1/QXbEAA59ZwZHFe QAAz95pGh3ZAAVoKqKFRKEABbVdbikMGQAGQ7QXyGYhAAT6Rp31FGUAA0gt13lrNQAB/BMcO 8hhAAJ15AGuIAkAAYhAejJUQQAIqKd69p5ZAAZP9Z9wKtEABkT+w9FvUQAGRSwUpYDRAAGuc YY+5tkAA7cW59FrRP//eDRboYZ5AAEG+rvECtEAAiyzoib+xQACidvGxmP5AAJbWxieEjD// 1EsuuAyyQABxRv9r4zRAAJFZt7HlW0ABbNOGCHUYQADjyJ0KG7RAAc3N0lnbYUACk3UuoBAO QADtq541nKRAAL0o2fQ8QUABj0F8O0H5P/6GfMASf1RAAjdiGkS2qkABloOs3iYqQAJ5286F N6dAAbNA87hAMkABEX1rHE4GP/9sJ5JsY0E//4by/sf8DkAB2NeLqqMaP/6Atv4YJ4hAAf+z gDlM7kAAr5M86looQAG1HzrJYIRAACkeb7y5QEABZp0YUqe8QADBNX3XHXY///CZ9SCDwD/+ veN/iHgSQAIJa0K0Ub5AAE1yLiXiBEAAZUwvCqCtQAAq/38GGdNAAbb4+/03iEACb2R/gYUH QAJeNmuoLWJAApy5AZ7hHz/+s0hMZerEQAFrwtd39vVAAGg0zZSHGEAAeEcCZQHMQACDvLKY WGY//lQ9PJNP/kABcImVO7twQARITdAPj8JAAjN2NW308EABvLLTwByCP/8oNAZrI7xAAG7H 2M/oZEACH0ltw0BnP/0bCjQxMbs//N/dQC+utT//n/Se6aNGQAIBrqoJAH1AAEFFw77K2UAB zAxL82xgQAEvAljwqEU//+K582HeNEAAR+q+cm7rQAB81E5os+FAAAV0Vdn62D/+R1BoP/Ri P//WEiKg20RAALmcYe6NUT/+rsL0IqEUP/4oC1wGQItAAd4RDhoIej/+iupEA2fgQAEPS+Q+ 2JBAAFiJu31jRz//EVZGRkT4QAD2AeI9jfw//6o8vlal2EAAWeunN6CtP/6enfs3NoZAA3HE 9tmXzkABKJR+haYsQACgthTGTqZAALTDjtR6ZUAApIw4p4yoQAJU+LaDBExAAmL77+79zD/9 ThXBE2gSP/9c1riq3vlAAeGJIWgLbUAAD8Il6TPsQAB5ARP9hYBAAZZTYmVZrkAAnIgOKrut QAJQfkt7iuQ//eBiwQ16HD/9F5wjIH5eP/2gtdOjMLBAAEF8zz/jUUAAnXC9pxPoQAHlIaVE 8FQ//3emkUa4hEAAt3nilVCxP/9A2N2ePupAAEo4yuWe+kABhL87dlrMP/+gZkbYv/Q//9A1 ZvZk4EAAel9dbD7tQADnFCbWUYVAAjVp8Th/OEACdI/oRvgaQAGzM7PDy1s//6at1kJsqkAA Faln+g/jP//+fSZVcaY//6TvXncjtz//KBqDEKJKQAEunNyb3Ng//ssCOw5r0EAB27f4DQRs QAC4XzYb+HBAAerjkaiktkABTo7wQYHcP/7hu41/Wns//8ctrbe9qj//cCNDTaglQAENGAqk ICBAAGYh4P4840ABAbAtZsnUQAB6LvB+2MRAASkbW3swjUAARhH4thn6QAEXsGWHWXxAA+4D dWXYLkABCPzJvF9sQAJOSb43cCRAAT3K3D/aUEAA2PLZqsZuQABjUjlnwAtAAQP3XsG3GUAA C09yEXS/P/74PzO+djJAAWaC7QnuTkAA6FxqPcOIQAOyHFZe8iBAAhmSRyKeJkADM//dkUaf P//hgtN/fLpAADz1Cjb9lT/+OU7hKgyEP/0p/x30JXhAACpODTMF6T//L4EM3Gq+QAIZ2aF9 sZ5AACCYpgv05EAA95zDnu6oQAAMCrx8QLZAAVcLppDVRkACqb4wQLn4QAMnLye9wxZAAJ0I tiKTuUACgg5iIIm5QAGDh/tO5atAALVtp0tA/UAA4bb0dH5tQAIA1H/4Ss1AAh8a9lRXkj// GLRRQKuuQACTbY/QYWpAAACRpl65D0ABlJ7fcIAVQAQVe6QGmTZAA4TimQdg90ACrSCwI2Cf QAAjjFAQ4y1AArQQwWc+pD//C/Ojae8GQAG0h2WzSTpAAAA5OIpvwj/+VISdk3wSQAAzsynF TO1AAB3a2YcJ9kABKrcoGL+eQAEe/FF5KoxAARKpj9xjRkAAtvLWQuRuQABnqGjHNxk//+1K gttUJEAA8xktAQnmQAE8KikDuKhAAp/nO5tcE0AANi4XJ5JeQAF3w36Avw4/+8Wi9bpT2D/9 fCZeF8dsP/+N73UOtIpAACO9n+/p90AA4Ih8nwOeQAH5evl0PlpAAncxsPQy+kAFEk1CEi14 P/9vrVIoq2w//ldg5QaywkAAJBGWk0BvQAArkj1J06FAAQbvboKnaEAAfLXuqDAMQAJgldV7 jMc//jAdy7WjDEABrg4ZhLgsQAC28g+9DfBAANz5eMAdUEABOIRZqUfuQAGj/yt3iXpAAenH TUAL1T/+hrw/KCh5QAGWpLBWcPA//oaB2PSNgkAACCleZIFHQAAlNjVbpchAAwpZvmhD1kAD jzcMJdBbQAE2LA9yXchAAD0EYyB4BEAArA7D9SQeP/8ZZOzFkLJAAG1zl6MbH0AAo0mV/1+C QABwPyAz7cpAAIWvbG5COkABejiDMZjGQADdqcrVyL4//dWrD/y4+j/88zE8/YHcQAIHI221 vRY//go1SJHXEkABchGyrA+sQALfAO2oyNtAAqZpaGu8lUABvuNuIC5xQADE05hhwyJAAn9J 62pGHEAAH/Zw0qqPQABiU7AojyJAAVk03u3QJUABaHLbhin8P/+p9ByhmjVAAWwPOSaYUkAA Jjsn594CQAG1csKw0jBAA79cASyHdkACR81KDVgeQAFPJqhpcWE//TT/P6G0VEAAiUWgrE7h QAB32yBqUT5AAG4nYy3zCEAAx0wze4F3QAEUdtsTbL9AAi+bsywLbT/+Ff0X3qRSQAEDrUkl LxFAAATArP+6y0ABoSZkWo3cQAHNcBGmEkpAAJsMByyeV0ABC+zD2aqIP/77y3+ED0NAAV2M tL5OCkAASofMSW5GQABQtP3CLoZAAb6Oc4MFIUAA/KcLK99RQAHgE4RaD2RAAVhAvA/350AA p10m6g2cP//Pv2D9DQ4//y7WqnvSCEABVMNgKvAoQADHtN08SEpAARUfGTImGEAAs7/T0eqL QAG5fNjvavxAAIm10ZP+PUAA5c14k8a3QAFRwFeLFvo//072l3u98EADzaF9wAj8QALKYO2W S/hAA1BxcVP9AEAAN8WmiXZEQAC7BENrpFBAAF+qm2Ze7EAB/wu4r/waP/+l+XNmHKVAAs5X kCoyZEAA+w+JIBOcP//Wz+8wS4FAAGMTDVL4ukAAD6KcZh8aQAEeftH+lsRAAYAJNlfkNUAB o4IzNh8oQAB92MXyyr1AAF0J7Xr+Y0AAiaG8sBq6QAFyeggL7V9AAIeTjbgmqEABW0hj77vO QAEBWKoZmjY//0iCYh7x8j/94xQDtiHeQAGK7fthE6lAAg45JGKiskABzC8Q928oQAFjnbBk l7hAAKz0zSP2pEAA4CmbqLm9P//but6FmoA//X80QQoTZkAByGARKcU8QAF85AcjIfpAAoez XXEw4kADexaKK8zhQAHQo11GKlU//7d83wW5X0ACUriTr4VwQAIrYwPEpxY//+XvmB0uXj/8 5bBAOejdP//Y0opNngFAAUqm6WHkYEABrYedjXb+QAJDlN8w/6lAAMKYic4IZz//dTUQOcnA QADNMHMXWIFAAOW4t+z/mEAAboSvdOyFP//ZJkscSgVAAaQR3AEjAkACGWG3TVnhQAEcSxaG Y5BAAOgZpb24hEAB0P9h7aGMQACaxMXeYgtAAEtdtxtMIEAC8SOH63L+P/2XQAuzZR9AATfW p1DwJEAAItVGS6c6P/7M1OKIWHA//pXu1qz8ukAB1wSRXPciQAGsM8nd25ZAAAtDcBvAjEAB Wtg/F2RIP//+N7RHIpBAAEfOc+DYokAB9p0DoAtlQAC0J3U0rLxAAOBXhGlMjj//JNnTfZNq P/+RuwlOtBhAAJhdmZHOHkABKc2ulL42P//YhSCYEnU//CoG7Q2ZfkAA4iUgk5/iP/+cgYVP yqtAAZw8uk06XkAAtYHPN5cQQAJlaOMrt6pAAQbB7xI+UEAB2EcqkWkAP/8Wj92jN8BAAOnA tad3az/+8XXjJ5EuQADHqJ3i5gJAAMVXDEIq+D/9q89+gnjqQAFb9Iy9WO8//tbUV6L3k0AA jfySPS0KP/8Si4TWyThAALgoePZbVUAAcBWWL4/GQAAbl8GEMeJAAEGWopN1X0ACipRpKSdf P/0YE7J81axAAXYv3ZbAZkAAL6d/9xrJP/+izFN2TzxAAK3qAnhXnkAAgQSuoql2QAFIcUoj JO5AAAvu2sRwfkAAdxl+nCTSQAMbJY85YdU//2IMLqHmkEACxn1Wao7dP/+bXkjiK1pAACBR qq+ZfkABIH9kn+CDQAFIqqd9Eo5AAW9yr1djBkAAxFRt0LnSQACLKP9Ety9AAV0ZvVnnhEAA uZLtB9AuQABbM9gdk9xAAEh3DeBs8EACloBXVbAAP/1n/GV/scI//QHcylBCOD/8XwwfFAhu QALWgSNeQ8BAAg5PW5duYkAB+LddeW7cQAH5Q1tRZmpAAZ+gJ+NMfkABu4mzvRHgQAHljqUN fo5AACAS96GY20AAcX8UjD17QACPo8XkqKNAAHFzUMompkAAh6QOFKXEQAElwS4pkdZAARzE tIh/5kAC+tmWE7s6QAL177ID4jw//ORMhTYEakAAkGjKYPVSP/zejHKrBKBAAHEoVtoocEAA Hj/YVjSwQAFNJxD6OP5AALHasvK9G0AAvrRhVpo/QAIo+niz1JFAA0jveotE60AB6cKSgsQk QAESWPorEXdAAaqfF+7IKEAATAl7hHQ9QADi6mgqTOxAAR24TbdMrUAAHrjhEqlKQACmbph5 aW5AAYu2eXzS/UAAY6bkOj/kP//gwFr2zgRAAK5zIxm/skAAH+iZLEg+QAAU2oibuVRAAPBK dw2ssEABW7BcsI9aP//yaIesbAJAAMyvdFUpgkADaOICROu3QAHN+IxEevJAAnH2PnRgzUAA McDcjCGKQAKE3BzCCNZAAKWRjLAuWkAAintG+0QOP/8vDyPCGo1AAS2D6Z28rj//2WkADdap QABgsl5Ro+c//9AF+Yt4VkAASvOdHbyqQAEMi3+pe8M///PayORrPEAAie8VJ8QsQADMgsFV Si5AAFCerzEOaEAADykG/w3iQAHzr1I+/MBAAEXG1ZKJkUABPhsYUx60QAD39md00KxAAHQP J9AtI0AApYuCF+/jQAINkZNSKahAAfREzoBvKj/+aMs5k/jyP/6ONkP0dC8//6Lnjy/VoUAD WECFF0IdQALsc6iGLghAAjlexewZqkACFg/uqfeBQAHv2h5URaBAAKdQ7MgVDUACQYBPv4+g P/4bwgTEBjxABJB/UlVhBUAELRCG1N2CQAMPy1r3lApAAtq1ytDIxUABofbJX5hyQABbNlr6 DZ9AAv/iwHNkUUAA4kxvlEJ5P/7z7IdjC+ZAAVke1YmLLEAAVMxO/k7WQAFlyoxlqNxAAVl0 LxQ+8kACvuxk3xPeQAExtrFqzKY//wMSCLjGWkABNCtZNIKPQACy1ApyUGFAAGhoqEG9zUAB sVr6oAfyQACEp0woDIxAAII+3CiqckADYGN5FbxLQAGRpezNOgxAAWJwbxFPp0AA+GGKE1r4 QAArw/oyLGBAAGWxopn5vj//6kMOOdMCQACQz7m6PvNAAHfLdzweAEAAT0wBEkRoQADfJgw2 57g//7fX6qKOxz/9Qygo40bAQAGj1DZpUpU//9cJ9PaKzEAAz5DgvuS2QAG1sDsKIaQ///uE C+OJE0AAcZuiV6FBQAAgVsLXM+ZAAYWnWA8va0ACBmhB90miQACUZ/s9Txg//1blSyPoGkAC kHUE3z4oQAIEwUTQzZA//bU3YHfYFD/9tTdgd9gUQAJn3NjOVkpAAJHZtequW0AAEMORSP5Y QAEdayQzNt9AAV962XMKF0ABlIR6as/qQACjBGp6jgBAANDIBfGcwD//ne+tAkpeQAAT8hcY ItVAAAGmaidxqD/9PgKaGzemQAIup7cnJmlAALBPfAMkqEABhOCHYGgMQAHwav5igtlAAL4+ Q1CXEUABFE8d6tMmQAGgTYSjezFAAF6dSY7l4EAAbjRlK7+iQAAn98CvsNM//jTGX0mAkEAC alf7+RZEQAHNatSzNc8//6Om+jUGUz/9jpUXSSjIP/+IEGR4tixAAPIIpnyqVkAANQy4/Nvh QADSGgqm5oRAAA9DlX6dAEAAcQFoTXj4P/2m+LlTG6hAAcpmRsBuKj//HjRW8RxPQAFubKZh XLBAALiKs4375j//f6ZHqehjQAHO3vFO0RxAALirZ0ZGJEABEeMLEi9zP/9Bs3nr6ehAATO2 8aiJAD//eOO48GJgQADvZyEloxRAAfeyIUADxkAAmGfPwnYUQAEucIXOfXJAAWaFo8cFr0AA W293KJ19P//xfWCplbZAAK0LxITKJEAA2G7rnucoP/0FCA+q+yQ//jfsfW/Wg0AA1VvDRrlO QABquk9Fa6BAATtkWfgzJEAAXtEJ6pYiQAC2JxBMfD5AANm4BlWNLEACBcn5q8NKQAJJ8VrA IkxAAJ6lCMTqLj//nbICRiSBQAALSGulcjE//x22tuXyxD//jXkD6yDUQAFHFnZyCPRAAVcn ZN18ND/9twJMfNDSQABK2h1nYHVAAbB65PoVAT//j4zgIVFgQAA5truTGeFAAHmf/qRYIT// 8ar/serKQABz2lcS86o//9EV7dMzIkAD3RrNiJAAP//OB/DSYvhAAGk7uu45OkAA2u5Pn+US P/5PYUj1cqBAAd/vQmiKQD/+6j68ajsWQAEOLvC5N3pAABGsTHvK2EAAjj2OT4XqQAABp2P2 9kZAAGuTsq6zIEAAGgMwUVgLQACal7R+SvJAAIgmf5qamkABHGfS1KxWQAFU32qDJHxAAIJI RmjIZEABTST8wCB8QAHylBgKIs1AAU/2HUSA/EAAgSVtD214QAAtTh2sZcpAAST/VNY92kAB gux0DlxEQAFUmMEmVPg//3eUd1bQ8j//1u+lO1aWP/+pAj+LDOlAAJCmTj3M8kACHhj4vOVD P/0i5A9OuY5AAK6f07b9kkACWYsQF6uuQAEAu7DaTQBAAB9BJkvM5UABOIyjizO2QAF9B/0k /3RAAdqdpDLkcEAA3lwau74aQADBVwoMglFAAWwV4gHmjj/+EqX0QLrWQAGEbYOljyI//8aQ bV+fkEAAEjiXT60iQAEdXu81TtFAATT0l8dag0AA4HKxxwWAQAFevNRFtK9AAim44R47Tj// p9a9V+jwQACy5D8zVbVAANhTG49dtkABpYMnywq7QAQOgQFb2phAASUTmP6whUAAN8o3tIUS P/9RWcJ99RhAAMBDs2Ug9EAA8cax8Q8kQAGELFOAOPpAASBabE/UWEAEbCPrA25JQAFl8y1d HOpAASA6r8/u5EAA1mGsBZP4QAZu98Ye3ohABQd7BBdAnUACMpG7ml2iQABf+0Se5ZJAAVGd 6UdO6EAAFV181wrFP/9THnZ+xBpAAa2MZU39GEAHipBW0QZ0QAYv3Or/rxZAA7UqRfUKDUAD Ub0jlu5EP/37VWw5AYg//oDEn0RyF0AC2nq1E5nWQAECy7V3vNdAAm6CG4xwskACeHZZ86UM QAA8EChZvCZAA4FUzGUUBkAAGa4D4nsoP//ie6SOJ/w//jxhcEAzlkAD7d+plbH5QAMXCsJy nJBAAgyj9uBcNj/+1UTgjlyOQAC1qY0MhcQ//dqhtudf1kADxdIQnxoTQARWH+rXED9ABtYp GUwadUABxWtvqmAzQAIK4tU/yw5AAWueCeOwUj//KW3tdC1OQADmJTjpiRhAAMVD7UI7NEAB 64EXu2QMQADM/AahQvlAAP1rHyFGQ0AB1X/J0rzRQAF9O6YcPa5AAgzXMv7ddEACpF9V0jTe QACOTIi/rW1AAPrT6ibD/kAB1Snjbtf0QAMJl7HfWntAArBZPSpwWEACxs9W8FeoQAHkezE8 dF5AADZW16C6RkABGrc93R7rQADCFCOoTY5AAGibkGPjtT//h8y2IGVnQAAtIBivUxdAANuG vispHkACzL5FrlWWQAEedJ9ND1dAAQk7gO2SCEABsaByowSmP//3Gqncto8//8LaJHs1Mj// zTBuSp0UQADUgn3oOrFAAS81PfkxBkAC949KT58TQAFUZNJBgrhAAPRBRSILakABcSi/up1q QAHVWCSSUkhAAWayVk/dLT/88oPLet1xQANuMHLhWsQ//9OoMhyBhkACsJ9d9yh6QAFuDJxa LnZAAgUU2k8YDEAEkBizRZ/6QABUPf3gSWg//ja2EWRgjEAAUmKdJ7qsQADHiMbxlqpAASwo sKXcfEADQ3yjiSScQARmgBgAwGJAAxc8xho5WEAAdxfB6WcTQADdq7hMBVxAAd6JjTuMw0AE Bgh5UdF6QAEU7W7BCjNAA432o5oImkAEJLIAEchiP//OCauuBHBABH2lKJYANkAD9bpEU2SY QAHo6zRi/5I//7S6I0lkLEAA7brANBjOQACYqb8tRnJAAgC6MpWRzEAARbmeD8L7QABVxvsw KbpAAQ1zHJG9/kABbDpYAqcGQAEvZAuwJa5AATeAbESnHkABEeIRfNEcQAI5B9Vfsn5AAZ63 QAhrmEAEidXCjApSQAN0QUgpYd5AAG+J2sNRGUACeHHDIXjkP//OcfLi5FxAA0jarHfvnEAD gKTKcmhuQAHSbzoissRAAUzpf0CtGkAADuF8bJ9qQABUPD5pYWVAAn0IwpUz5T/9w2CPFz6M QAL6rsPz7+JAAN8RIkmj0EAAdhXSLdX8QAJXO4gUmhpAAbQLUV+hMkABLED1V829QACvnxia cwZAARjLnvDZMkACOBFGup14QAGlCINXCohAAX8dPrj8rkAANpHM4dyKQAFUhXJJ4rlABcm3 KWSPWUAKV/B2LeCTQAmo4xpJPqVAAx1naFCDbj/+kS67tjCHQAIg7xN4oehAAZjve0jx6kAC RbpJdYBZP/6LmwIyqNBAAXLN/F8cGT/+vmr6xRR5P/zccU2B9mxABBczZ/b5TEAAS5DqknJ0 QAFC6l2lwiNAAC49HQnQckACFpX5571iQACLegSldGRAApcl/trOvEADegsMlmIYP/+gEHrJ unRAA964UBoln0AEv2yLc52VQAJA/D1lhuVAAUtKra+KiEAAV5Swe19IP/1JJCUGUH4//u6v Ul+TtEABX1QJbi5BP/9PC9LLOVxAAZZW/z0efD//C4udZ9sWQALXac7mnc5ACCubUZ3TeUAC qC8BsmKaQAHzkdJODBhAACLtfOZ8wEACYLm7cUAXQAJzHg+rzApAAxmojl1f2EABcld2nHe7 QAJHHKulyohAAKDIQJ5eQEAA6rp4mAp8P//+m3++QixAAiQZYG5GEEACnNJyOI28QAKCTU03 F6JAAAfBBLvVIUAAKzj+GsfeQAFqQbV3qypAAAR/L07jiz/8LggOv280QAPTzwqc1G5AAOgk szD5D0AADaP0EBSFQAVtMqOhfeFAAyTbxuILe0ADJAqy6hoLQAIg1EA0ke5AAbvcUEZhJkAB V+PmokeOQAIO6sElOMhAANKUAZXPTEAECj2ubzLWQAQS0sschYBAAcsO4kRvZkACHlPh9C26 P/6BQJMucrRAAE+W0t56dEAB7RE9Q1sEQAH5ZB2qiNpAAY5VtYmlgkAFVnlvNwL+QAN3MTss sBFAAtU/ErvOvEACkBfxlfd2QABhqk2TModAAoztfsVhWUADFqOhX6NkQANop3A5LdxABHe1 plQEb0ACqu3o8B+2QAGXlZmMdZBAAYCN43d+qEAAhmNfFf0eQAEFYQfnDmRAAX9lyvpJ10AB I9601QlKQABT6Q5/MAFAAHPaV5Px0EABq6u1w88vQAAaJNP6abdAAi9Y2x+W0UACS/A/3b1+ QAD/9NrPXuJAAMM1xpp/m0AAiUK/73VlQACwSVPQ4udAADfzSrlLjUACcjt49QoOQABxCOiU JFhAAUoLoVsUA0ACYYvzMnjMQAK1RxV3mARAA2xXJAY0a0ADwmsNT2eRQAOtKGzpSFJAAze1 0QXmTkABRbMXpB4mQACnA6M4PKxAAgj6aOiPoEABsX1aMoGgQAJUlEIxlMRAAcdkVG4pwkAA GwiQ9HKqP//B1wxiERBAAWr3Bmh69EAC2rTbJa1VQAFKaRbWfKo//7MJV/vekEAB47hpTb8U QAB3DxJvTgdAAJQ0nFNvkj//oyDfqgH2P/5ZNSimnMRAABXhJ+bYfEACFlKD+OClQAD1b9Ts dFBAApacCIhRNkADKYyd5sswQADl/dphpiZAAUeeu/EaMkACGRtIvYV1QAGaDO/C5+RAA/m6 QF+AwUABTpN1rnHsQAI8U0Ya5g5AAcqOMMvtDEADhx7reVV6QAJgZE9exghAAntgqu4ksUAC +6MK6DsnP/3u4i5Fhvs//9JXhPtykUABTNjaDl7sQAN5u4L467xAAyZRaS8sukAA0615LH4q QADT1jmXd8hAAsEhosrZiEABBn75De1PQADkLHO0O5VAALn8sAqVDEAA1rGOp6WAQANIdMjB B/RAApKoGKrha0AAOASWDPWTQAAUunRpAnxAAbVkrhL17UADUEWj091MQAI0W06vXS1AAvZU rdNRqEAAKlbD8GRnQAFO+JT9FzZAAR8qe7d2OEAAgEIOFJvEQAB3aszdeRRAAC5QidMrxkAB EJLYDBN9QADZJxx+LkRAAFL3YzOpDEAAiBlStm18QAIp9S/z/7pAAqlb3ILHK0AEsTs2otJW QAcVZVFhdnJAAUTulfisNEABWb46NWNRQAK26DWtbHo//7G5rjkktkACD8e+GtM0QAOm26uY 4YVABKpYkCOSHEADdR+dBA6/QAFbbkcYB+BAASCv3Egv1EAAlGGdJ7ohQAKhhxblq00//imQ k+8a7EAEQXapXQcdQALI5MNd4OhAAccXYy5t30ACqfjuqEdoQAFpDJjpWUhAAXXhI+tyMEAA Xu3WrUJsP/6XTHrE1sJAA6E16WMjsEAA/KXAV8sJQABvGWjSWvhAAWllvoA0hkADPUSYZqFW QAQyxFmh+vlAA+JxEFD5jEAFEHcHiOfVP/73h83l+ttAAwM6zwLgYEAAKbYPxzqDQACwGPMR HOlAAL7pliEuAkABjnxsrUImQAFdipCuoapABZIcblvqzEAEYPP0z02cQAH4m/Vqh81AAUVN BpqmCUABKHbG3pQ4QAJr990P7lI//6tHdgvB20AAr3Hb6NoVP/+exXlfHSpABJZ5YIOLGT// zG2FyPN7QAEvXAUvutRAAez6bqWNzkAAPsBpMbvPQAB9wHjI70VAAXtBrz98s0ABCYZAu1Fs P/9GoEyA18U//5t84NKqoEACOMH3U+URP/7q7e7xcPlAAO1TS5uss0AB/NAVH1dKQAAvT4Gx aNFAAZpjTegUOEABBi7Ew1r6QAHNkx3bfjRAAMU++ZqmjkABRXT6/lusQAAIaIl+muY//iim ywx98EAFIyI1zq5EQAHL70WOx85AASEDbwAgEUAD6gR8iJ3wQANtlktJ0UpABvSU5kQ6SEAC xa7cp/PnP/+cs1oyjspAAP3fg/R7MEAC/97yD+rxQAG9VYcPIktAA8y8xY9jz0ABUxrqIy+h QAC26b9VWxdAAlRNAkJacEAA5b7tsGPiP/8wL5nAMnhAAKDnHK9w50AAfRifDUFgQAFc1iWw 7lNAAft8NiSIsUAAsmSNHTkoQADbK2DUP2w//1kjOoYaakACS1SATdzoQAMbOwKkOtw//4Cl +1Ol7kAArgG6THjvQAF2k36XaMlAAtiNVfuU2UAC9Jx2hOB5QAdf1kMAdJZAAkxvUX5EAD// 4yxtDt+gQAA0Ap672/4//05oij1Izj//SD8cQJacP/807c8+MLRAAmwm/mkyzEAA9SrwOrSP QAItPGO/IOFAAj8V/SULBEAFn0R321Q6QALl48LZtvw//1JE4QCz/kAAemtWV6nKQAFTaW8D IsRAA9BA2+15dkAC9THysCUWQADRiQtoRDBAAJttV9F9zUAA21G/CrTJQAMLW9sNSCBABSzX Vm2pekADZI+CpuE7QAX5cxpdfwVAAydlGcsUlUAB3NBqm7EuQAGX1YQ2y5RAALf17dz0sEAB GsGiBELKQAA5SgzhdVFAAFN7s8n0iEABZ611aqXqQAFQ81+6h4dAAxdRzIP/yUACgM2bKank QALoBv/znyo//8cmtHXdIUAAu8CgzLphP/7UFeqzqvM//LM5CN/zPEABXkgDztPIP/+rrNAl mUZAAdTw50Sr/D//yzdHEDMOQAJDSvxf4ZI//3H4cAPQ9kADULOe+GNnQAMsV6kJBaZAA/3H E9NGSkAAxg35KiLKQAQpUcrmlf9AAoOD9e1bvkACtNoUOBNiQAGwxmAlVIdABEaCo5HPCUAD sfAFavqSP/7z5dQEO3hAAFJcCFGApUABQEb5QZFTQAGqmAfevbxAC5DyTUrwkkAIxBoQhSlC QAafT/G4i0o//9Oe2GSAH0AE5aN9G4AFP/+z2gH124pAAc2XEHtgzkACzrYh9K0fP/9kOj6e dchAAagg2TSvoEAAbHlRCEn5QAEcITajxP5AAU3Ra4bCVEABaZFJFDDyQAGcQg4AuDpAANIO +1F2QEABh/PI7rvWQABcjn/UjrxAAEcnmVbewkADGkaT4HrCP/+jNX/pT9hAAits/F2P0EAA AaJaFTDhP/+RmNdmsdJAAAwJHA8jdkAAHRt8oJG4QAJHYrszKWVAAxOaW/tR+kAFNtCA9BNw QAhxfrnma4s//qSMspT51T/92FSy8nzGQAAd9p+tVuI///zFcugDz0ADHV6zJmM6QAO7uMQN lChAA6naKsV8/kAAKFWSEKi3QAJdK/O39aNAAJLCqX8BnEABy0xqgUlyQATdqFzRbSxAAWIc JC7m6kABiU0EmGmDP//KWdXn/GRAAmY7gajNtz/8ZFG5eLlgP/6ltEZtHBQ//z7fU+VY9EAF Yal/tquwQAc4/JxzkklAAsiblWQjbEABwrr/y3iZQAItoN1boXJAAKFVic3qUkABniTg4g1A QAAShNQ0t84//9TDgMTyfj//1/RIaFOKQARLonQWjGtAAK2H+pdIPz//d63yAohyP/2lvbAe 8AlAA5DiZXRlLD//C9TLhgGsQAEPdMSWSJNAAk0D6xuTp0AEuEFrbVgYQAD7jsjq6ilAAM6y 0+CpTEACzeX8iq6LP//2O1aXn8ZAAYsgWtNX6kABl5esZXOjQAGx6JVqIFNAAlc6/Lmh10AB NEoueF8vQAK2m/y8p85ABgrC95cwBkACzJL3egWGQAH/xAU5mqhAAOdyb8SMQD/945ZIHoe+ QAJTpfkQjHJAAOruUo23e0AAsJH2XmSkQAGSSqgjkS5AAorOrwe6WEADDCbWad06P/5bK4Lg wxZAAgv7siEqbkAApctiEED+QALwqrqg62BAAV7nhp2hBkAAxb5AlswtQADHzlnSYK9AAF/U KdTzeEABR45nATYfQADRKLv4cOlAAL4rgcPid0ACeoGeFaDRQAIeSSeBMsFAAbljk/LTr0AD 12eH6pzGQABOLholo2JAAFStMqCMaj/+NO5JBbN6QADI0YUL+CxAAO2N0DPBTEACrWDs3dpU QACz9yft2SpAASI1vbu+LkABDuyQVf98QAJtC7RKLFlAA45rRyAjtj/+1ZmiWGbYQATGvgeq qCxABkPO8qAJr0ACujcRfx+RQAN84+c6nnJAAFbQCOGXl0AADO5jqa/+QAHnmn22oINAA17H fg70LUACapMM1g7bQAOloR6Aeco///rBKf3yW0AAOzjMqiViQAAfSfLCtQ9AAUaCZc+G5EAB NIDGkav3QASjmC2auJ5AA78NWLfY9EACklx3o6o8QAM4kkUOLDpAASKDwQBuk0ADMa5TBzWY QAPmNQn3FUNAA66+Nn7gF0AAJZPehmNSQACaGtFilxFAAcWrE5e+VEAEg6+PvsGlQAT2KLg3 ZyxAAzyTr+BDgEACAsy/2TpgQAEf2K1dpjtAAVhNQE6dgkAARXXLOPEVQAIBLE9kgxxAAnSa Wn7d6EAEAeEMGa1UQAXo3/fyzYxABFDWrgcyUz//VGmEkwofQATON2aXnM5ABUudiVYyiT// l8lkVSG+P/7/QQJEVZlAAR9mcDRbGkABZvOknoKOQAQ3mAkhvYZABXkaMrplfUAA1sJqAsP+ P//Y6iJ38cpAATJHu0oUgkAA/aaP1f2RQACUekBvkOVAAX9tYrHuo0ABid7iVNh4QAH+ehpl omxAARYxYPydNEAAypJHJfEXQAXY+vtHPS9AAWagsAQQEkAAqsxOWvgyQAMEUKESJ3xAACfJ rsjrTEABopYL7BH8QAGSMjREMJJAAAzUUh+oqUAAN9xXi0RWQAGTwvr9ZTZAAW8dLkLqZEAC PwiKfA9IQAElelriQSJAAQn5pwHkWkAAklVug1tqQAQ8ffhcOGFAAFPFwR82FUABVDGUA+ku QABgoruzpypAAI8mCGguU0ABg771HZQmQAGNVkRgfOBAAQlcLcktFD//wUMg2aelQADncZSJ B3Y//yIEfffrCEABl+jp9MjyQAJL5OK5a8NABP63FMIJLUAA4x+0OigoQAHKzYcLlohAAK38 SQRs+0ACcl0ffGrXP/7ye9Af/flAAXI+WPKCDkABrHEWOWkCQADTS3t8lRBAA24dBkf/9j/+ SVTNKF2kQALlt9G0pTpAAedslco260AA5Rqfss+CP//wxsT0Xkg//xdOZyL8qT/+6If2h/1X QAMQymuoosE//qi9BtGhOEACcAsSR0scQAAlA7ASkw0//uV2x4zaREAB4TqwaMwCQACHDqr0 bXFAAj+5nh5TXj//h4cl4LveQAMIq8SDn6dAA0/mvDTJTz/+k2ptNCuiQASc+NYFJg5AAAly 2iZVJ0AAZW9ERYKEQAMTvILQgblAA6QIU2fDdkABwJXvcSeNQABt0Ldi8BtAAgd6ygRD2EAB N1FJA47cQAC6N9xn4A5AAI2Cpven3EABlEHkm3QBQAN67lMVqPw//cMoCtuG4D/9xJIkaXUW P/5XcypE965AAvEB3wmqTkAC4LEc6NeZQARx4C0OFkhAA8lCktTNXUABPWxiHXncQAG2xoJL LVxAAi0svkXsMUAApJvDt9KiQAFyGTOG2JpAAS+CIs4om0AAxw7G4PiuQADbmIS6AaBAAtMP dwsiYkAAVGk+4sZrQAMxTrk14GJAAy8N2tPr2j/+acnVOJ3sQAI9+VMfbV4//sg3O1SNvkAA w2gi04gmQACIixZPUQRAAXKswJum5EAA30QZOe9MQAE90Jd8b6JABO3NlmjeSUAJKL6llBsG QAa50nuHoYtAAJZEoqGvj0ADhAX/+ZYAQAIBkvb66ztAALuhmz3YukABLVC7HRsTQAADbMqK KYpAAGAlT/kdK0ACsg58/9GkQABbBMW+Wg0//9rcBAnW20AB+93MibdeQADCR6cj6BhAALTj F4lQ20ABQ8KdVf3LQAGrIIJH8JpAAiHse6g9REABMn6LEseyQAe5+aAHx9tAAX6/cVm6wkAC fUOL/4L9QATV2kYCBpVAAn4ea1hl00ADdDol35kUQAIJ352ULIw//0x4ACwrdkACeu4nGu6c QABCfH36oP5AAEoN55oQQ0AAxltnekxyQAImKPn9HmBAAelhS7UPzEABw+dPsyISQAAoSQbL oW5AAIENKvBTVEACiTyKHHKMP/8xTD8klmxABCAinUvyfkAAYAv7wIFMQAN5sk7ejLpAALNd kHZvIEAAUYNV2ECsQADXGmYHmn5AA4JX1ZuEVEACkgkOCccIP///aW3p0+w///F2OIzFVEAA wp1q67VyQAM3LEMp/99AAreWQYKPpEACo176fOkiQAJBZz28fFhAA7htIdtm6EAC/SC/UCCB QARbfsqufFI//Iti35vtqkAGWM6C2EGIQAW7COfgzEBABoZreLMTY0ALaNMGeOwaQAHy94Q+ A/5AAl3/K6ekDkADYhLbVtYuQAJdAzION54//vAQF5W7nEABR9roURE8QAMcIWdN3xNAAS/j E/4kREAA373SRhycQAKeFevqKUlAAbGZirHf4D/+t4qDa84MQAEq3jqkrCNAAPUtqj0F8UAA 47RCdwWtQAQuAmzr+1hAAYJZg65go0AA3/DHFW28QAL9h9+Z+qZABazlzQiv1kAA5aago96g QAC7pqgFTJ8//+0x56bj8UAAQR1XW1HtQAJdxVGzB5RAAV3DjecbkEAAf5u+7k+tQAFzAGiw zeBAAsq6P7iaoD/++z5RR6gxP/3+PKlZVwJAAqadPVUVDkABEmVs0v1WQALj54YxG9xAAuwN CPLEckABn9L33AaOQAE1VtlCE8ZAAJkntEauwkABr/kqRN3QQAYc4ASEgkRAAlb4XU2FokAA ZOj319ccQAJYQgxMWL1AA9sKi9RoDj//nz0bRRHoP/+fPRtFEehAArHbxfNrjEABuaYHniAO QACw3kNdILlAAS9JoE4z30ABUFRPKTEuQAKfqrEciRFAAHcCw5kg6UABNxMjXvWUQANvU5G4 E8dAACJGz/dmpkABsxdGWeHHP/7qUBbmrbRAAxfXD8y1DUAA603FLaigQAHt1FE4RD5AAR08 IufzeEACP35EQWHGQACX8ORp999ABBujN5qtaEAAY/vuCQZDQAB75g2qOUdAAZmuO5yoLT/+ +kpKkgsKQAKklIb/sX1ABUERSKWnED/+isIUD/acP/+T/8XiiU4//0ZHzk6l3EABG5yo4WzO QAJP6FtZ5qpAAQO+MQ/6hkABRIkF0RMgQADfJ1i/Gfc//yxJbWhbZkACFBqEOmKNQAHkj6Cb s6JAAiHdtjahOkABAgBOE0eBQADF/40/nuJAAvf4pLnJYEAAggEu1WaAQADadDo/MapAAJL9 D4i8xEACKRH4eS6kP/+PgL5tmKlAAC+b0hl/kEABoq1Ojwy4QAH0sllzcgxAAe3y+7WZPkAB zADJX7VEQAFZq6pPBiZAARls+CT+2kABMOGs2lLxQAMduFdujE0//9kG014jhD/95P/M6wNI QAEqErJ0z4dAAK0G9UJ/yUABCWadHaSqQAEZsVM30DxAAN/9q+JjMEABspdsTOICQAKx86z4 mLhABRmtzYGxk0ACwjOE4K7EQACrrkBxlGxAAd+vahClCUABLjnVqRAOQABht2COwZtAAYCD GRooTEABw/FqAqZfP/7cBriH16JAAMayvmqKTkAC2fN8Ri3hQACvtu5UVqpAAEpZh9WkcEAC GR4hEcnSQADyx5PQWgJAAa3k45Gz7kAAnka5zJ1CQAWFZgEPdw9AAXXL/bz/2kACYh1k8tIo QACZ46LLcTs//8lCrQrykEACyptcyy5iQABMebT7zOhAAlzmMeKjwEAAMQ32hGyoQACv/Nkl h8BAAKmrWXxbykACiBYwVZ+mP//xWhC/ZCZAAHPLPCDP7kABQ/Xb8JcoQAJcFvRjBu9AAtK1 gptDukAAHKkxvK8sQACz4ROjRgRAA0nSZQ5dykACkbCNzBcmQABoOg+xK5RAA1MFBCSiaEAA 4lWN6uzbQAMZ6Pms1gpAAr9wwRC5tEAASkucaU3kQACyrYLUTIpAAKjrORePnEABFJj/5wPS QANks030dC4//UhtM/5X5kACEcR4/Ry7QAHm/cVkkPBAA/HG1NmgLD//fw2sGAr8QABkSzIz uQBAAsy2ONqa2EAH7ye7crabQADSAiK1jUJAAT7aydWi40AB70sA3FkGP/9SMXxX/1RAAnX9 /RFVcz/+z1ygVTPTQADvka6iEJ5AAikEs4J71kABUi0a4aOIQAD3pBUyyA5AAU+Zi0aUbEAB vfB40gKUQADjtGs8qrZAANm2sn02ZkAAiFsnh22JQAJSr5GkNxxABj3tN4ytlUAKA+1msOlw QAsDMW+K8tdADZbdHmPu90AGwqvHtj/mQAbSc+Qsh51AB0BFrMbZIEALP0sTTZGnQAmLueHX XIxACcmDl7PVLkALZ1rTVtmgQBErzYGXtLxABC5iPWegmEAGM4O1AD8SQAnac2hoEvVACg2P zNEnwkAG+Ne1keNOQAvUjhVwkB9ADbWZC9MyyUAGppoXozRQQAgllZTJmHRACOGnDXGGlkAI wLoh8PJgQAi7NWY74MZADBepe+5IfEALYbmcsfBwQAc/9UsCETJACS24gspDVEAHVf7n9wGb QAcRKGlMDMBADnyGSAjXxEAHlnxvPdl2QApYBnGiCXBACmwpdDenhEAPDfxIBgxbQAg0IaaY GixAB6VYP42JBkAIAZ4e+D7cQAo2ife90ZFACnhrBxEiREAKVOgUD8f7QAfop7jxyclACIcF ZoeLBEAHKzQKTXzoQAtw3xX+2yRACaGP8z+83UALNgFRzZnMQA1XdPeDvrZADNA53HLHKEAM rS8uYqb1QAS0ezZqbrJAD4ac/ATUMkAQMECMzg0bQAVztMd3OwJABT0WnTqMRkAE6pbCKO+Z QA4LHZ/xvJtADc3aJYzJhkAHpqIVE31QQAhNhAc5Lo5ACCKs/8g/REAI+PIZhT9EQAZll0NC gHJAB6RPt2OR/EALMDnij3+AQAteRqFeGwdAB1Sxn56u9EAHWH61J/jqQA4B68CHNMRADQzb jGCtZ0AHwvwzBg/4QAVrNN0X6a5AB5oWo4dtKEAL4xPK3BBsQAhN89dCAz9ADMsFwe3KdEAM chIgRP0wQAkwIQI4TDxAB+rqASt6U0AIVf+YbfEwQAlfT8Pgb7xACd69ugS8PkAIwXCFmMfo QAjwJe9+mKlACHNc8J/WDkAKv+C72RHzQBFf5DF7egtABWE0sGjIdEAOO6bBsfYjQAgwrSzm 7yRAB8nDFtPPqkAIwB5OKUvQQAZ4yoUILohACe7YW9ATM0AMyHA89q34QAp+Y0QvinRACN/J bz6UykAJqYnwjBCeQAf2CvQkx5FABxrY5B5kSEAHNyrCGtfcQAmgI/+HPuhAC+lE6VGZBkAG nkrmI1dfQAZXiiiHBM5ACqqkB+ew5EAFvSSt60+YQAWRG4zab89ADd6mxW2n1EAEzSuk9J/N QAT724sohx5ACA4SuaBEGkAK9er/NTlGQAmHqf87SixACmSqeLMk7kAG7/7vU0BSQAhYIKr1 aSdACNkM4z5OUEAJWoXXP4SzQAgs49/1nMdACBvCrH4RBEAJt9iGMWQIQAlHtmw/DkpACcgQ itPL1EAJaq9pdDz6QAXLi6c6ae1ABqayMsXnMUALEsLOAdb2QAaYenmVL2ZADA6xp0XO30AI OAu9gp9MQAeji47F4fBACFDwK9GPpEAFzLrN35MiQAdagXiN4GRAB/UPmjLwlUAJK0w4lzou QA5F62G5UkJABZuCF+ho2kAGaLmTHcyKQAYydhzhVTBACU+C3goqNkAF9Mka/kIxQAkG+rN/ /JZACjIyig0YfUAJFjTaodx0QAlV2i9+MeZAByVM5b/3YEAIbjoh8ybkQAktZ4SfB6JACXIz LlgBMEAE6EYgIND6QATrvOSVseRABQ5DrsqKFkAHBgShJCM5QAy9tkJJgIhACBg3MQr5WEAI 8NG61UOYQAeCKFdeLJdACz2D1HGt70AJCb0AvRJyQA4d/4CQ4u1AEgtUlC9BJEAHcViS+sBE QA2A5u/ZKnJACQV1rBxrO0ANa8nop/hkQAc5PdymYkxAB+ji2R3hUkAGlLAxqLyUQAeI/W2q Wc9AC/Uro3OLdkAGMmo0p5+aQAfhkPrj2V5ACQH4IW2sNkAIHNOIE8FwQAiyZl0ZB4ZAD3N6 5Tx0bEAKrMkV9VdxQAvlQiSbbFpADfjh59Oh6kAKb69XDiBMQAoiX4PGrBVACVGdiZPK/EAF Pm4UtQPWQAnJHrNL1E5ACORtr+dFzEALrkgwR+68QAczQSaiOcpAB61nu6tw6EAHu1T16ZiP QAnt+PY1oHFACabMx77qIkAH7qmnDRxcQAv87WYYGK5ABvC7ZFXJ9kAK7mXd7hzOQAdqQcYQ RY5ACSJEzwsDmkAIq4xZiulxQAk4av6zjZZACdOZZ/mqokAKZ1emvCafQBClEEDkqWVABohG CGwWUkAKUC/XIETaQArHvmLlastABpbFgUoDCEAG1vWSdcxyQAc78qdP1+hAB6AZ35XZEkAI aSiryRsmQAmmXj/hd/JAByFAbJ/iBEAJ9T9Ts6b9QARXt4NMiN1ABGsqh48ZVEAEqVCNp8SG QAh95mdr3jZADpQVLnY1aEALw3DBu4CAQASTKhLkbcZABOz7m7Nh3EAMm2MIqnDEQAXAi0XE KDZABILSdRv//0AGSIZcOrsEQAZoM7JpMthADCbcHQi5AEAIY0GoBHW/QAiaj/WT0qlACK6A 8NxGdEAJTQAc90oeQAcoTVMAlG5ACAESFUl7+kAJ3G7YXd2QQApfwLwJbcxACdCdmPJ20UAI 1yl56wEmQAeC6oW/miRACqoRqIdUeEAIZ/+xdv8KQAk0AhLNU5BABt9DQsKimEAL0ADKctee QAalzmQe+GBACcgpnwXOOEAK/xhi4A8OQAnS2Gy1geRACSCm3z1VqkAJN9KTlqKYQAnoI6Nw n25ACaAzkUMjHkAJqLlvtg85QAoBr2P0j0RABWER44Yl7EAD93LVtL/NQAPvUwAUlABACAvT lG/SIkAJywIK/bkdQAiQi6inX4ZAC17FtOeSJ0AHcxxDt3InQAmMzpUM5Z1ACvt2YLMXjEAI VxQnHN4jQA0w6JQXTlRAC/WIYqf4DEALOSWsjWQtQAaHzFxedZZABdHEqYhNuEAI/1nphx4Y QAhuZ4iB8AJAB/jyD2Ua3EAIILNJB0DgQApNzDKOXl9ACfMhRpOOmkAIJT3Tt4z7QAaiBwQs aP5ACriY3DcxxEAJ1XFsdBE4QAneqp8o5cZABahDQ66c40AJZFi60vJXQAhRw7/BZ5xAB44H z3SdS0AH5kHnxzuIQAbhPhvH5MRADEifyB1pWEAIJ1/jgjfAQAoFNhSdNCRACisAXEYMzkAK eZ8kNkKeQApukHNcEcFAC3Ooz4wzVUAL6YiVMA/hQAsHfNQeGoRABqvrOtCXHkAIFH7bwwx/ QAuoHle5fMVACFbb4Q4HukAHkzMBn2EwQAq1/GQRFR5ADJx65i0wc0AQjQfl6r62QAsqaDS8 LZdABvuMgU+g7kAF5uaUEjRnQA/E8Q5F+65AECFnaGqNvEAM5YebQfn0QApXy5kj80RACc0H xsmQlkAJ+2etGobOQAnchZVRrgZACHBkRguq1EAJyAZEzFdbQAtrXWRZcTtACi1g0EcupEAL wrWLYDY6QAhD1g5PViJACPlpY9ukdEAJAzantU92QAg23sctsdpAB8/7q1tN8EAHK4mb8uXE QAWFWFq+3EtABpLUoNhlfEAJbWH/tpxCQAekP/MqNn9ABg8uY+h8dEAM9gzLPiHeQAfyEpYq ZP9AA16QrnJDakAHth1RJ7OHQAgJueHSnvBACSYjtYp1TkAJyPPOMaO8QAl5QSmIisJAB0O9 dhbKfEALrt88vuk4QAfm+3YCkJNAB8tpa0r/tkAIcHGHhTgiQAtiKT9wwlRACrt6F0OKmEAI qZbKGZhOQAmQLzmp9j5ADBvKjJ32rkAHCyU6WttSQApZJ0mLjDxACjuzDhbSWkAIXe+aWr3V QAgDl7hw0XFAB2CP1wl5HUAHZuJvRJC0QAYnM2KcCpxACTPptQByDUAIC/f48Gh1QAqPuJBH LEBAC0LWn2wg2kAIcjo0OXsfQAr5PZ1VrcxACKqJgu7iFEAFrIGO/Y3EQAdX4o/J1EBABoaS uQUUlEAIva6qSGVlQAjzZrxcp9JAB/CoHPC760AMVLdxrn9OQAuU/n8k2XFAC0ITSn8zCEAH WCk9hOaRQArWSa4XCzNADogrYIYrWEAGwOswpTYwQAagWTspOalADIvdLb7YrUAGJ2SD+PEK QAlgsQKtiShACjN9160U0UAMVMz8Q7iAQAcoLs3kDnlADGQ9nb1BpkAJM/SZMXuSQAg7dLHq /5hACLZG4OCA7kAJTECBEXCGQAzFR7JG3vRACvudxall8kAIUPWpM4cZQAbqBxmtkCBADaYD WF0x+kAPmoVE35zcQAW+VTtd4GpABymIyEJYnEAKS6iqkU68QAf0vuSHjyJAB4CleOWhkEAF s9H6A3OYQAbTlHk7gW5ADJQOZvs5xUAKeu3XJAycQAYnsua9cHNACkYg+9RvYUAI90ewz3hP QAkJVcC2CtpACbLdYeAPxkAI3LltAitgQAmZR8ebNWdACgvVc2FHR0AIf86xFTZDQAptmsRF sXZACPpgzjgWYkAH+C7/SbnwQAgnqNNPM7xACGDS7zCzbEAKzZ+a7k77QAnLSjZFkEZABv0x agDjVkALVtcpt/HSQAwXsVVArERACZ1rdtu2aUAH/9odNrkJQAgYepKyscxABq8FkGEeiEAH U8aenuooQAp/gcJ9EfRABfg/U0IVvkAK2ZdK15LCQAi0Y6lRVnpACYuVmMy7dEAJCwCTUyoz QApWY8qnA/RACQMGrtqaokAFE4s8fUssQAZKL3LvqgZACSbd7ZN9HkAIQ4yYFGKkQAgYmT/k oh1ACfTlIB2G3kAGkFkcR3NgQAgawDA0lOhACVaw2W5NgEAJa0wWX32RQAodFJ8U4lBACFCU QR4rnEAHFZT7GjV+QApcnQtY8aBABigxAbzcqEAFuGFSJfn6QAdNHvepKIRACAGIFFiW2kAI x9VHqbC+QAfl6uKqXXFACEYoNcfGMkAM+6p2hr0wQAfZ39LCXXZABc5JkyoCIEALD0MHlfWL QAoJGr0Axm5ACbIMNlOY/kAKguQWHYYaQAif/XAellJAC3JI2WmExEANSk6h3HNqQArm/X7H GEZADV4mr2jMeUAJDJF86sVEQA1D2QDGe3lAB4l7o0EAYEAM1Xo/5HdhQAYuQlt8BU5AB1YL gtiNgEAHLO2uamXYQA0YnkcLhvJAB5Kj+Lku/kAIVGz7H9mmQAdBYi9G7rZACIpNYU6KHkAI 3V3HcC6YQAgNLxiSiTJACU3JfBAOMkAJxaNYWF3HQAqwFo2nTH9AB8An5F6WrEAG7hmLz1vl QAWouSC+Bw5ABu1AQD/NhkAJTlUhx6w8QAVJfHDXx6lADieywXQR1EAIy3ItRlCSQAYUW553 6vVACk3SK0qRRkAJpeHN2EGIQAjAl7F8KcRACsvfPc1egEALfukAiR7oQAagP8jxNK5ACPiI yG4z6kAK/ZiwKqyOQAnsvzbIgaxADSq8ca/tckAQY2Veb776QAdsHT5HsDhADZ5ithiQCEAG 8qCT4A7cQA6gRx1grxhADWTtes8dyUAKRSxVl2v5QAnYzVdtjg5AB9y/QGk9oEAHffSeHPBn QAWAG/d81nhAA8qJarXZqEAMj6dHc4RnQAqPkKbRyM9ACe0CkUQOekAJckF3lu4qQAgHQdpO C65ACGcCvS1kGkAE0t/8Sl+VQA3v/qxnXbVACmhFmcPxYkAImoS5SZKVQAkMHT+a82xABmfn RdpuckAMOk52R3O1QAw2IwCuubhACFBplCOFxEAIqIVIXttMQBKn5WEieSlAD/NrPF3lYEAO 4gausQZ2QAVFc2eNoKBABVE+9faKGkAFnOYj/BDvQAaBpOF47IJABc9dBTTZOkAJPl3cFJ84 QAnGAXuP3UhADRK1BKrZ6UANj3HJFVzaQA4UeQP5f3FABmxWGKQN9EAJiUd0jixfQAvfJIuF DL1AD69FSlIonEAFyjXWWzL6QA5l/bcpxnZACzkrDN7gVkAOMZSME6dsQARM6n4r9qhADlBk jFLBh0AHHOg6iw0aQAaEmKSXlpZACtI5s92ZOEAJr310KKV+QAgadW4lkz1AB7lcx85P/EAI 8rx38b9IQAmCa2wk0FBAB08dwmDoqEAE6OPhXIY2QA7wQkiPggFACaWeKCVuAkAK0HbeapSe QA8+J661HYZAB4HxM13JOEAIL6eIgch8QAmBXi5zTKRACHb6v3A+BEAIDmiFLmfgQAYsYg80 1DxADnEitvib4EAGZbefaHvqQAy73rOuL61ABd1O6FMytkAMNwt2HvE0QAoDyPnLn6RACeHz GNW//kAIijlosdAYQAeCBkRk3RJACVvEYrYhkEAJevWKZ/y+QAc4sv+1CrBACC7KggTHsEAH 2lrDOLXZQAitoAAyrLxACoFaoKQAokAK6cKnnxukQA3WL6Dwvu5ADLBvuCx8wUAI9t4yxJYo QAZ/DZYzeSNACVyZ8xB3QkAMgtOZfovXQAkGIFM2VYxABzSFF+h3nEAH8AnQEv6BQAowlH62 zBpABvvHqJt8KkAD9jpmZOI0QAzIWCguWgFAC/a5WslQtkALlDp3B1BAQAtXcXe1LtJACUQp ScFY7EAIe3xUSsOuQAohMQIbsAxACE0pyWXHxUAJJF2MBMucQAyUJxKpCF5ADBu8ffaYw0AJ gZ+JBP0ZQAls69o4yJJABraV5NCJkkAHvzg5Ea/hQAg/wYX6byhACAN/h+fFdkALMnlSOLOq QAb8HLYF/HxABijlsGNlVEAFFaXWLhEuQArmlzymVM9ACgRV9W+eHEAIHZInbYqSQAlbg6el gBpACTg3Bz2CMkAHlGMVzhxtQAgEaCdLf2NACLM6l5kB5kAJYKDY7Oo5QAzwfUocssxABxi4 rr1s1kAHhGJka5Y0QAX+2ZBS4WpAB+8VLNz4okAJS4wABznbQAlhF93pSyhAB+sZokSWBUAH BEoK9jiiQBBAGDQjmJVAEAY5iKpP/kAIRW9/ZosKQAt0vunO+TJABar8BdtcX0AGLeT6+Snq QAYaMv4vdshAC1gTPzHb/EAH1Aesz0+NQAgimAVzAE5ACi1qd0sgnkAH7X4unHk7QAg9CmSi dPhACGDWWOledkAF9wpgcNLdQAdEkUF04EZACNL+QwS+mEAF9/d9MMhXQAyz405zpEhACEBQ 8L/sMEAKortXVH+KQAkBsDb/3KRACH9Lcigx+kALcGs9ETZPQAj1NINoLehACXUzJTW7aEAJ V8vjFtmcQAjRIcnYfzhACUg6IikWwkALTjZV10EiQAepbEN68BBABznahEml/kANcryNuzUg QAbv/mmgpe5ADFW3DjizJkAJ96oEdzzFQAc7OB0EqcJABc3NXR5FoEANiB/2rZu6QA8diItT MfxAChCpHvcOMkAJytSPvLmmQAd7lNeyHUpAB3KbjveOlkAFBEtDcGHqQAhx99roWs5ACaTd HRjlmkAH1chREhvmQAdukjbBflBADDkRJKCUUkAIeKcvsn9IQAlaZJPcRRtAC1Zh4TJBXEAH jxpAgk60QAtAzYjFeEdACIyRsKvgUkALh39ooUi+QAgNuihH4HhADF7b0Ot0dEANecYE+kik QBATbaN7PE9ABw0iqY54VEAMwOar+b3fQAeoz5K96wJACT9MyTn7VkANBxGFXt1SQAbtqk7S I/VAC1MWqqmSNUAHUYQg7njbQA3FpTzONQJACstasgjQbkAH6OyM8J5EQAvtUNnYsIdAB2my LwYH8UAHdPnTasHwQAgIwsBBSWpACMKoxCLGf0AIqHBpYexhQAgU/cL0q5xACj19KfJmKkAJ PIGIxSa8QAhzZc0TbeZACttVDfCIakAJ/t468OZ4QAlyJYTNdIJAB8VNpakSGUAMC0qbit8y QA9hD4lOU7JAEN5vcbc8F0AGgFJXnqyfQAdYEHXvz7ZACIZ+XTT/y0AJRiUeGgF8QApe4tnl 0ctABW+ExeiuvkAGHZnCV8lUQAfIDedlDVtACiX6VdPcbEAIKlxi9HtyQAiaN4jRUKVACKKJ 7+aXFEAFQ0RP8TMeQA65kmcHAKdABsRjaLmHzkAGvPDemgmMQA0koZzH+K5ACHp9yNiuaUAN HpevYt8fQAszEUoD91tADT12bAaIkUAGnKg4TBpnQAcuwz4Szw5ACMG0MniH4kAGW5Lbc22A QAZletQj92JABtowFhbWOkAL2/JwVOvmQAdrv3Vl2kNACK6x8+kdtEAJIgN+sfWEQAlnaox3 0stAB4rY5CDtCEAJm3O7EYd6QAp9x5dbg8BACmYWPxQamkALAx4E1OFKQAeuJraiNChACJMv CO0agEAMx9vVETCYQAYLVSp78tpABlx+tNcwlEAMFYGHnZ40QAoofkt4JsRABPzO9NI4hkAJ kYS3d5yQQAdJAJ6KetJAC9V6f38YGEAHcvgEqWWCQAxlYqnG5ERACkA1DO6aWEAKjKFxlhap QAhC7uBdoaJACcoPNIpY3kAJaOjB9mAuQAiW+Knq+SxACAJCgApmrkAIX+qgpie+QAi4Xdpn 6ZRADEvCuUOBzEAJv18VFOFyQAravP70PS5ADL3zUTPBDkAFgMxS1+EVQAtHrGXDgHRAB08B zHvoUkAKeKqYYDWaQAsU9BWQQ2NAC+FJ/Rz3WkAIEklUabQgQAdoo/1NCIhADtQucWtBKkAN 50C318GuQAiUk/7ZbeBACWQdhoSR6EAJDD0biQPQQAiA9f1INMBACUYdav+xhkAHYhYZfi7a QATY+7d2WyhACJpFWd3GJ0ARpLTj36OyQAX+NQ0Ct+BABdzgjjvY+kAH5WAI8b2iQAk66pxR uYRACoWPvf71R0AIAxensWH8QAhSbwUgo9hACQRHxSZIskALLOWaQY8oQAf9C6BfLOxACceK mt8gp0AIe8bakhS8QAvQ61xQw7xAB3xB5K+q0UAFGIaMBUemQBC/7u+pRkpAB46xvwfDJEAH j6sWmGHyQArSRR/nf/5ABozJo/Y4oUAHZBxDRW0UQAerOKjws61ADOr7p8hC4UAG54SEqK1I QAvqxsDVIRZACPPYIlPOBkAJD93Af4ZMQAiKoaIPR7BAC9p+0LkjK0AKQqiZyIxHQAl5M9dG VWxACLgmFE6A0EAG8oG/Fa3kQAyZeFxlM0tADlsUhUWUZ0AHhOUITapUQAa8D9SQVeZABtYB 9YLiHEAGl0mvDA7eQAu6hOAe1X9AClDIfvUb3kAIEvJaXNL8QAgHoZMYyEdABREesHMw5kAG S2NnkeEkQAfJCvctfvxACM9t7Z7qaEAIE+XpV0yCQAzaxyrYIhRADOAg3c1H4EAHHiroqYFA QAotRc20fg9AB+WS9iNKZEAIn4thTaq4QAhQkIctvWZACsrhDGgtSUAI1EC6biqyQApzbcks EstAC3ELSbr/kkAHE3iBv88aQAuOSxwA1R5AEHdHAcmr70AIkfkHyjPVQAqIRDEuocZACWA1 i5zV1UAOxrQKVPb0QAcB/SWQ/bpAC8rim+aPU0AFitaR0+oaQAjepru5tM5ACUuWBoEfGEAJ RlDPZ36MQA2tv9aPQsZABxi2Mcy2mkAGDiNFSuX0QBEBwjphjnZAD9ufhvGr5kAH/9JOL4Av QAULig6wqlJADb2P3bPBQEAMEYzDVEpVQAgn+yQ2G5dABmEbjaV15kALV/5Z1LWWQAl17fDm PdJACmtPhh+ujUAHLadJaMfdQAdfegKFaxJAC2aXuKaJ2EAG/MHjxfOHQAwwwRTRCcBADVDX ybehxkAFz7x7y7a2QAgkpBHPTY5ACoVei1LdQEAPT00zVzs7QAojx/+hRSxABmzWYqiZ00AL W0fZTEMyQAnk/Dz1N95ABxSbTqlAdUAKvwGKp4cIQAhKyihjyvVACCsLVPwzjUAMIo1Mz8xa QA6sxNy0V8xACSZNeNxl4EAIMTAFMNWPQAhqLRg6s9BADDVSVtHS4EAHDdzOPyZ8QAhpZE7e NStACgT9SsIXvEALyfAgQdH2QAa2PkI+owZACNZXQZtEFEAKxF2gPFVHQAsxHn1ptPxAB573 ZkXVDkALWbo2AktNQAroFKacy+JACHxY+LohJkAIQeurmhwwQAWtIBk57qxAC/i0HpCcAEAI odTkz9doQAmPUS8ajXpACU4brlAqZkAH2YM9/vDyQAn0tOQTUnhABiJZLZonZkAIgHkUdI+q QAaaylqcbQNADAX5o3Re9EAL/EJR/aqIQAZMP+DNOBpAC5/+IMBr2EAGp2o3nKveQAnA8vKi wO1ACP/TtHj7hkAICg1lhV/gQAlL9xVgceBACZ6i0xUl1EAI37xVXq/ZQAgVawxI3f5AB9dO dX5POkAGThq9d8d4QAypzaUpLJpADQFWmcZ0GkAGcwwcqZRQQAai1XB+3FtACLjmurVXPEAJ ooW65T+BQApilf1Jeu9ABGHQOzxfukAHIPu/ZxSsQAhsPjOZMIJAC1I9tpvmtUAKWWFH3sA4 QAgU8xix08BABogmQxJdh0AQURSX07C4QAfIETyesV5ACsVz0r6Vp0AGf3AOd1kcQAxA+csD 0gZADy2UEKr+aUAFUMKQ3Qp9QAbmJ5+kVPhAB3/aYBGYd0AEYLgLNN4LQAXhkrr1TB5ADv7n R8Wq90AGe3EKYM+UQA03Jm2uHtJACptg2pyqlEAGr/mWiVh+QAjPUBrh+qxACzwO9mY4yEAK U5LQ+9RLQArSn4loJ2NACyh2s4gcR0AISZ6FBixmQAqtsuFpaa1AB/vaKdHYBEADag4iY6F6 QAJdzY6Law1AA2sxgehzb0AFDZCyVtDhQAK3XVmCvzdAAoozzd/vWEAC/kkVFmk4QAXCRWY6 QVxAAj/hEFog30AFfCmW1htWQAVt8RqFqnxACgRphaNaN0ABSLcQduSmQABPEScNkIJAAjD6 Vq0iZkAEnoxyvvqUQALLBAIwfYhABJyvEVkKGEAFByMuy6KAQAM0U8xLGQRAAxeFVfbSYkAD KZif60vWQAQI01rGfVZABJkkWghAoEAD55N+YzlyQARNEaG+bDxAAwuqOPv53kADwOYxUMNk QAQ+P6+2di9ABCgmv2te8kAEBEyk3nXZQAKPe5+tLB1ABPafpRLlcEAEmQCkeeUGQAcHq+iF vgdAAu9jau9AJ0ACNp8gjjLiQAKTmzQQQP1AA731KrlPm0AFI+po9Pn8QAL17o6nGzZABEpF U0dY4kAE1K1Zm5OVQAEy5P7dG6NABv/aCF5gh0AEj7NjD9Y8QAVYjC4Rv/VABeboiBpnU0AG SNmGhdQOQAVCLf/w6TdAAr/mFEatI0AGF2q8ZowcQAZEvo/UE3hAA1xrV5vfeUADE3o3p0Um QALAsi6ArZZABJSWAPgkVkAEwRzXzc3UQAMe2lTftxxAAxI7EgDjYkAC2D81dTYMQAO7cy+z EVZAAUk/Q5Gxp0ACFQaCeuV8QAVdI5Lc63xAA+t4yw6XLEADFkihf7HwQAMLoEuAAQNABdcH fK76YkAFgO4CP1X9QAK5zJuglaZAAUmz5sU8vEACI5yyGpnCQAKI4B0ectFABPEREXJSxEAG Z0k3hxcwQAYDEy9A005AA5mitjNTkEAEDKBTpxwOQAJ6jRXg+NdAAev9+ffrD0ACuCl5SoCz QAQrmwv6nM5AA7Cx3L43bkAD4TK7vmUUQAY03RW63CdAB6+hlIueFUACDvNRSxp6QAbplx6L QelAAoWQYui5w0AEG7vXwvWdQAT4x1Q5pDhABFyvt088bkAEfd3XQ6zAQAVPFDo0UCFABV9W BqHiekADurSxxYKxQAKLgAZet7pAAtc2yk7swkABwm6dE9aVQAJOKTCYPcdABPAIJNmisEAG qbznP8XfQAHFx9cq3NJAAkAdhxEBc0AFd1AeNYi6QAHu60kp7qhAAaD5w8U7fkAGzlik0Udy QAI63wEbH7BAAhHR58DUt0AEElvoGH1GQAN8qA8Gaw5ABCnPnFTCc0AE21wYU+iGQAPps8eY 2JpABAlGz2iFQUAD8o3ygnVyQAF9a52SJH5ABL4bnN2dK0ACCTyAjPwTQANQXx7lSRpABRXb hqiwYEAEpz53zweKQARJvgVhvL5AAiaO3ovHPEACj0HTvvrqQAQmOtsVU/xAAnbMmN7UH0AF Gvr2ixoQQAI1xfvkNFBAAg+F6vqr7kADNMdbBy/JQAFgFRWvbh9AAQJzkLfXCEACSFe3IIc/ QAP82160jrRABk0DHogDqEADBgAj2oa0QAJIAU9EgudAAeydO0wk0kADJ4b9CUfUQALdccW+ 4gZABATDtkHg4EADLwuHRM0RQAQR6mdSnfRABNNlDlht9kADo313Wx3QQAQkqeLZUk5AAyj4 ty63VkAD0i0VPc6OQAGWLXdcKR5AAfEfyRp1wkACeygVoz00QAKxCwRkvYhABWbv3jQGKUAE Sfq8PtaWQAQAh5sPM7JAA8NX2lmqnEADTCooJ2EKQAR5g+LUoMpABadmEdTLQEAJWw0LKMai QAQmdnOBV1ZABLvcf6TG8EAEoXuw2PIdQAPhcPypwcpAA9q74HC8HkAD2ij8W5osQAKfc8oJ T1lAAl3EKtMA7EAFhQuUAMLGQAMmcI+mTnJAAb7Za65zTUAFuDyrYR7cQARXop+57+BAAmz/ N7jcIUAHWb9+EnsRQAPGc8a+eUtABV1cfpG/ekAGdVkedZxiQAWPGhiPW4tABD1hCUoNTUAC 3uFM3IkVQAJ/ubbeUSRAA+N7sD7F9EADbEuHOg16QAXDpLQwguBAA5m1tYS7fEABwePBzO2K QAMHgVEdt6RABFlsgjYKpUAFiewSlIP+QAHoRGulS7xABh6DapEvLkAB5UEHyRGaQAVS/thz MAxABLyQp5gU4UAF2b5ZrckYQAMzlT+XpfxAA3EjIyOXMkAD7kwUHD32QAO1N+c6anZABxf6 /BoIZ0AC+E3i+r85QAXQnazFkUhABXkXx5fr7EABTyNx9rbwQARMDPiUYIxABFZWX1/qvEAD 46WXRmbIQAHnLnl2XwZAA2QqtIAf6kACZEhdQguoQAPYOQvgea5AAqzTieC1MkAC4Z/fcnHX QAGgUcTSs3RAA8pLcsbHlEAF+WTDZbWKQAV+yFRDOyBAAZIOHOEtskACJgWhCSd4QAd7ZPAj fNxAAWziwRQds0AAfnKOk4ywQALBjHpZZShAAt3NHEjIfEAGa+ZTu4K0QAH8Enc2u2JAAa9p JLn2CEAGLTpEoqhoQAGFlYCurpRAAXLGJkRQmEAET0LGiDIQQAQWAocrdzJABUkKwz3hokAD tdw467kXQAQlCD3VlhZAAyLHSJ9GIEAECumAtsnMQAMts429mwhAA5YGllLgxEAC8MwnQ5gJ QASGiy4j1IJAArzcTG2yqEAD246tjw/IQAVk6tF6v5hABGqwT1FrREADKbG7d9VXQANA6qmI XgZAA3gf+zxYoEAEpLAOPqI7QAUyfDVzsV1ABjjBcvigvkAAhGMO4W0UQAASM9faTCtAAD25 LsgFqEAA5bLVabZmQAbU69Hf1wJAAp4RYdQhoEAFY2pgTfKAQAHh7PF9dmpABbZUPFnLSEAC 4s1fpdpNQALbbhgvyN5ABnowEXNDKEAFePUq3N6aQAMGIO6mdEtAA34qPN3PVEACBlk9yi5y QALWNfRUmbNABAPMALyP2EADZ27nH8AXQAPS+jn246pABHap8dlOsEAEN+StFMOEQAOW4GwG GZBAAywN+Db9lkAD/YIMawJcQAI0LNwx/vhAAbqqcCV29EACH4lxqrXQQAUMHFhyJABABJK4 QoVSIkADkjQbqMriQAJDHkYdEbJAA83eyhesKkADn3xTBVC4QATgKVlBfpJAAkXoqP6FtkAF Va5LjGgqQAVKsL/EXSdABGyhr1nYr0AERkyzvuuyQAS8S8ZgiZ1ABDfMOiOFkEABwFSk9nI+ QALg/O0J3ExABo5hRa4fEkAEKeQQ0BFMQAGNT4djvitABba/0o/yBEAG26X+uyH4QAi/bvTs 8OtABfY72kqOlkABwxaRDtkaQAEmN3PPwy1AB5RLTfF/t0AHh5FXH1FBQAc7rxeJTYVABGjq lW755EAEtv3E8ngKQARlpPUlvYpABMcjpykjqEAD1EYTwW8PQANOwYYjrVRABOWBRNkVDkAE wdtwH6dwQAYQe7v2+AhAA3atWSfo5EADDjewXv0UQAQ8NXeHVFJAA9xHhUd+mkADmywoDf+a QAJQp1ATD7hAAYEtFIWbzEAB2LI3q5TSQARSppCwP9JAAynHdApl8EACjdrTUOU2QAS6YCje t4RABOa+kBsxz0AAeJKf8W4MQAORKfP6gVRAA1TxxyoZRkAEUIvpz3TIQAMDVQxOGUJAA0XK 0nC04EADk1biVFi2QATAxCopQkhAAzrQhMEDnUACTqZvZRVzQASI1IRB1rhAA4kmo63knEAF cRzC+eXsQAOcMYuVYVlAA7y5smg4NEAE6cMCqSn0QAMiMomBdXRABCNroJmHu0AEl6y+pDEW QAK7/9+fmohAA3O6pZa3zkADguh7Nlh6QAOHgB/dDcxAArxWq4tKBEADY4OG5JR2QAM8ZvT7 vsVABQaxzWcgmEAE+VjmflodQAOvN7hxgiVAApybPUz9DEAEnJd8FbvQQAPEXRL7LWxAAx7m r6nSSkADhF/oWHmJQAI3FgiIwB1AA4beBZ+gbkAEYO43ERJjQAN+jwdnKDhAAmhVkqoNtUAE vEplC0jNQALSNeQzgWxABUc8cYJvUEAImu66ErAfQALsSoQMPrpAAmLW53JGJkAFc+Z4NRD5 QAH757NinHpAA0aAZKIqwEADawhhym9fQAVf/ILvgqpAAlqYZZbdaEAE3BXV8JVmQAH2hz6T qI9ABH50aAYSSUACfHTjwnuUQAQ2bsODXwBABVBOMVcLjkAC+5fprgD8QARoHJXqgK1AAZ2A OEN+a0AGcJYWpI5wQAa7d+8r7mBAA2q4EJfVsUADLeTnwHuqQARwFKbQ+o5AAbn/gFlwdEAB uJliI0hKQAEA2D2qrwNAA/JbY2RPukADxW0XBjiuQANmPcPypYhAAtXi7rXmbkADW2X+mZH6 QAIGkPd6g2BABSliVA6y4EAEbV+9PV3/QAUd+2qsZ/tAAfvlvG1+DkAChS8K9xWIQAFlkCYj 3rBABHIUhHcv8kADpUzoAgNbQARqVNJpCR5AAoCrMBgQ8EAD2lftuwC7QARffDiBT5lAAxmU pjwsoEACwvwH5RozQATTOqRI4QBABKaYu6wN0UADuFaKam2gQAKovTrVI0RABD4OH5RRFkAB pn5C1DvEQAOg3Zq7jBxABEwkcDl+dEACWFMRqGSwQAUlU727C9ZABAKtKgEHrkADy2vhO8vV QAPDt3xF39lAAvg1x0IG9kAEtoJK+sYiQAE1hcX+8RZAAW/glAuy+UADtrGVRHtnQALtAc9J WHhAAtdtxu2fSkAC/ZHA5cXGQAF9s8eSCsJAAf1VS8ieGEAEyOSKXBNTQAQyAFpOy/JABVnc EC6EbEAB4VJ3LQSxQAEjnNxWTMZAB0VbBVuXKz//qE9nYzCNQAMHXHhjtS5AA0/gMvlIYkAD UyiclCcAQAOvodxM62JAA9syLsHMo0ADWjsLTawcQAS5U1B+4VxABCUw9nIUcEACgYvWLqIG QAdvugNqftpABUmTx8f6+0AGmQT91o6kQASF3nx56vxAA2VtuSaw40AEIlzm9hxEQAUhzgxO xLdABAGz1Yj6d0AFXqvCR8ofQAVxiQTFijBABfyWKd73/kAC1gtDfpgDQAXeEbXixrpAAf0v MZ8JIkAESFFcpES4QAQ0zzhYHw1ABenpli8IZ0ADurxof01kQAPDqFJhSb9AAhj0xwA4PUAD kcHASJehQAO39EnlNZxAA7BVwt+R1EADx1DDvURXQATTUY3BbNpAA8NyaQGWIEAEIRDf5EZE QAEszwfgcWhAAR5/mq7D3EACUA51G9QIQASNLkfwrrxAAmraXa9Xj0AFoHq0e6olQASsmEII XaZAAFt+aAXVsEADdgfJ7QFQQANF8XYHaxpAA4SZ8kXUJkAFYfr4IUCWQAWBJPU+ZtxAAxMD IsqNcEADnfwSfQxyQAStSvs1XI5AAyz06SQhUkAG4okTzAJhQAjpyZYIe4JABFlhHGAKuEAG Yr4rYSbZQAM6fcGbXnNAA5hmu1BHDkAESv4ojdaCQAQK0PLrfQRABFJDI+T4MkAC4MAEzqWm QAOWZHQFqsJAAhZR8zE5kkACBQU4/rwSQAWtuLsqUOVABGHMyaY1uUAEWv37RNMuQARX7ugV ghBAAq7mSvR2CkABxv3BfCngQAJbAXy6ipdABL+n1J8280AFC+DlsUnOQARAoeKpyHdAA7s8 6JUvE0ABKMOGfAV0QAaEHH3GKINABsfqsMu9eEACfQtXJ1IfQAQUhMUIg5BACgRphaNaN0AH oaC0efVzQAcPtcjuhk9AAmsn1qc38EACKQx6hwElQAHT4B0dPrBAAaTmv3VSzEABmA276bgz QALWYLYjw8RAA6TRa1T68kAGtXAinJrpQAavh4ydhURABvDSVV6iTUABxeSBAaWSQATJZ0e2 TaVAA9c0Hwpz2UAFvTMbbq8tQAJ/eJeVh2NABU9FWPi/SkAGDRNBQfvmQAkcTxHy4+RAAd5P uvPYZEAHvmCb+aXnQANl3IoWpzRAA/ZSjl9CXEAE3Cp4I5RqQAOWxK4as6RABAcC91ib9kAD 2hV17TRKQAIFl0j3K/BABSCCUh7BokABWqHqAD9yP//xSAqjTnZACa4rLoZX/kAFw2tVc9sH QAXj7vdPjzRABaU3rLW/ZEACOgk4ZwydQANdzH9A74VABBsw7lbh9kADeY6B01FfQAMUosHO +pxAAzF+SiT8SUAFlPrfQzxGQAJxjG2ozuVABRyws3cnW0ACZ5VTp3teQAcyVJl1k+RABIUc n8beekAFI4bIqfLwQAKUTCpZQM1ABBZgBBZjvkADws92dj8bQAPmy/7QbihAA4T+wH16gkAD RTgMAtmLQASi0wpp4shAAvo+t0aKQkAFVqlTb/wgQARQhJXDL9JABjEFd/kTFEAHIH8jOCjz QAQoByTq1eBAAjL+gfowUkAEakyEExE6QAcpRjg125BAA8Olf8EGa0ACeysoq8lYQALDdCwO Eu1ABHaJr1epnEAEe6N1oObeQADHFlFshHFACBbnfH1AwUADeR6VjDl0QAXQmZyQWTJABW55 kTfb+UAFNtLjP/bWQAEdlDdczK5ABlE7xRfab0ACehuVpn30QAPG8z5XOFRABcvTVlrzwEAF OJ6acNQXQASbQwyasUtABVRIuWlMl0AB/z49Fl/TQAFq+3zbN95AAj42zLAkzEAB+fSAGYcE QAXzG0BnF1JAAWq6QsrytkABmwvPWNyoQACm4PFVdgFABBTgGzKxRUACZlwdd+rSQAQpZay+ lnRAA/IjouW1WEADZM0ttPX2QALWFsBO6NNAAttCwyXzVUAEA56m2CxPQAL3lu6Tsj5AA8UB rwneS0AD1FQRWtYJQANdBhEqE5JAAvY/PFGSTEAEFmm+J6Y5QAO0OjYxTUhABC2loUoXKEAD ZcOVVPKmQAJp01ElyVFAB2SF+dnZQ0AFWK5oo5GDQAKKeMpptZRABaLessLEJ0ABpgMRyPZm QAH8CzcpMBhAAt+5NjH1m0AEkMBx99Q+QANyJiobhvlAA+Pqo3DV8kAEgHVQv/wIQAIg5iYa HopABJMtFmAPY0AE5pYlblzMQAMurdwJwIFAA+e7dhDdzEAComif4dIoQAIabz/AEA1ABW4G 2wH/SkAFFjWWjdg0QALKk7BeGwxABEEIxISESUACqSPSKqXNQAQT8E/wrZhAAp2fgNeBZ0AF Z6vklol0QAU7A6q8IXNAAk0WaYE5/0AE/DafqtmcQAQti2Uae/VAAx2wvX6Vb0AC19UhT920 QAaTptMTC6FAAwR46o6OwkAEhVZp4C8AQAONqaYS6ABAAqkaCaZq4kACrp2ohBDyQAUhh4s0 W7ZABDRUGSFjeEAE0vD4CbuSQARokx4oPKBABCBeMiQf7EACcIbxkXt2QAHai1n+gDtABFxv PFdJmUAFQrIx6DRCQAIWS5oWP1pAApsozlDQfkAE9Si0nsCKQAOPCMVi8yFAA9X/PtWpGEAC xsVQYf1UQAKu77uAKW1ABNFSUqKlWkACd6vdWiYyQAM0ncnY9P5AA6wsdGHIykAGWdwLmlV4 QAaefLeZFmhAB95075Ol+EAEGDesUfCOQAQFedEqRN1AA2mYLvpkHEAEGQddby0vQAXYOFYU cthAAmJ8Q5pFi0AFKeL6B5QIQAMhs5yY7cFABil7Kfago0ADbpqyyTM3QAT+T921yepABZRC sxJsUkADvGiX9HUWQALDTYX6QTpAAzaV3DX56EADNDP2mM8WQAMMXuxvTyxABARrmX2F4kAF WNzqPSZgQANKUfi1jCtABJEhGCM2WEAFGeqomhbYQARVtmA77dtAA1JkyH3MEkAEQc/tVXFq QAQ+DwTen2pABZhqisli90AFw3byeCT8QAQ3FI8Fee1AA7lPgqiF1EADb8S/WYvsQAQUsVgD M7ZABezySPPj10ADZDALsNk4QANgBYHkBIxAAuWpiiWovUAD8gj58S77QANLkvtAm+BAA6I+ UO/WtUADtXFKjU/aQAGGQCFFP15AB83zulGWtkAEWEccWtkfQARP5POHlw9ABBmGmkA+gEAC 0E+X+lleQAP0XDEp365ABNvoD4PhHkAFglA4x+KAQANfUgDPquZAAz689oGESkADyOjEtWHU QAJX9Pa7R/ZAARVwnqdUbkABO6TJaivGQAZ/G56CwctAAvGO73BHB0ACtUcQbPv+QASV4Lqb HyJABKF2rdcKskADoUBIK+iTQATdw3iQlSlABME3yhA5lUAEuY1kDyI4QAS1sAHPLW5AArWI Bk1s2EADM2Fdamc0QAUtaLkcMd5AArabfgryx0ADJOKQEu0sQAPtf8ZyBDZABIw8Oe5dOUAB dakITZ5vQAWIzcT01pdABPHxYCAZtkACwH5OJqgUQAUGrzB/hOhABD+R36F12EADyKc01wf4 QAQ6Os/2NrFAA1bvjm2krEAD3N1vUqH2QARVVLaL21BAAucWZoCUmkACRWnVyk3WQAOQmURT KKxAAnHfjIrnpEAF/9A9ZRgnQAUkAMUv37hAA4jAAGBN+EAGBl8yZ4fqQAHtyx17Ki5ABPFz eCtBSEACYUc5DqgZQAVUv7ZMccpABSgu0ZyUZ0AFYlT/ytcIQAO9OEUdttBAA9C772ungUAF hpSzxpwuQAUXpF8F33pAAsnSLuPHyEAGJcfZWAlDQAWvup/r0nJABKar4MMXQEAFEfLKZNo2 QAMY6kZR4kRAAHzzIyb0PEAD2pMfvzvaQAhER6rZKUxABBBpqcl5Z0ADs5oFkyU6QANF4Jhz DoBAAfKN/ObDdkAFSERHR7AlQAI6gxzXe3xABPnqryWuMkADa1fcqDMgQAR/cX8EOLZABEFs L/N26kACspNNfy2gQATSUuwgJPpABl5En7c3hUAFD7gP72QmQAI/08vtfjVAB09eS1FKF0AD 8edd/sv+QANbg1pKCXZABI6sXjjz2EACLKibH9dYQAKknlpat1FAAzR9iTZyEkAIetYxqa24 QAFMBteMc8xABsGgTaiaSEAE05As44DcQARETFoHZC9ABA1DXsqh8UADqzVsrkJ6QAQy1MLT fEBABExtgSoyBEADEsA12HTOQAIY/siv3NhABbjSZ+8aCEAFHkmIAxvIQANv/+iofWNAAceK kKmUoUAB5mM4kc7QQALlQinVZGVABAxSH2g3mEAEKp/Mr7Q+QAMS/U4AfKJABChwDyIzqkAA iGdSfw/iQAGhQ82SKNRAAnCieJagnEAFkzOEc0DcQAN5qVg/i4BABCwSzRdX9UAELBLNF1f1 QAQqSMujirJAA/Tod99ZSkAC1fcCE2ATQAREJP9T82BABGccXRzT30AE+dlySz8dQARZgVlv Pl9ABLGXQI8BXUACrYpQQ+UhQALw51EAv3JAA/EWuJRH00AF2y28NV+kQARbGrYjCZJABMO5 ATUOYkAEnnzXaDOBQAgspVDGgEVAAkiuZq069EAGPLLoQTjtQAGJkD3RANtAA/mQkH/XrkAE LLGKa7mDQAMPlzaY1PNABRliLxQHskAENFkoxtEeQAFtrNv45+ZAB/ADX2yJG0AFkBRjUMpG QAN6h45OmS5AAl53IYMWOkAE7hi+gQE4QAWTwBUfoORAApLjDsvbqEACenDZAhTAQAOPlKrh 6a5ABObaeryKfkACrU2FJ2luQANSFNs5bjlAA0I77JtR1EAEEKEP18ykQAMxSCMi2bpABeD8 vZha8EAGpA8ARuHSQAE5zqkrSI5AA35qhQFNqUAEUL3I0en4QAheLb87bz1ABhPuJibwBkAB 9ltW77ecQAUlavrwO/RABMxhnbQUvkACX6lQqoMwQAPXVMkKYsxAA4FHwPvjTUAChhIK8mmk QANHyiXhMjpABgm0e47u9kAEI9fZKT0gQAN7KbUSLS9ABLfb0lF4YUAFCTxPCWzPQAM1Dh0D k8hAA3ZsqhtDOkAFFOxqC9K8QAUwIvqF2DJAAaeINs6T9UAC9f6ZpxRkQAOBPC4upVZAA3o0 ota5QkAChO5xXLwiQAWBska3Rp9ABTess5EanEACVaS1PyjUQANDOLLN2FRAAm7QUrvw8UAE bewBwRsKQAO8lviQc1RAA1OgZPDxukADOwKY1SAQQAKutysNZ6JAA6xZGWnJ6kADl5h6/Yxy QAJrH38mvjpAAYpcKICO/0AF8ALJDIaZQAQcuyiWyRtAAwZ12ib/YkAEFT3/JAAOQAJvUMG6 ruVABBp+/X+cqkAEDFbWefzmQALYKk3Q9w9AAuIhzwO7kkAET/qgUMYyQARQ1r3TEJZAAynp Y9Hn3EADHULh87QqQAJUslbSyZ1ABhrxvYBtrEAG+D0b+oyOQALnmwjZo7ZAAqEw0xcH6kAE Hq+ILpEuQAJLM53nwHtABWI3x0qo+EABVW+agfxkQALNjWSQtqBAAuumiIDHh0AESmWTjDL3 QAO7pTO1DHJAA1TnI9a6qEADGCCpXfwDQAZAsdHTkm1AArXpxeX/3kAGiTNxRpceQAFN0zYJ OL5ABbs08PQFjkAIAsTGLqd5QAIDsX0KEhNAAC+59ADGhUADvUwIogsQQAGW6acq9dhAAspF zs5eskAFkMc+44nvQALfstf2FJpABiIgByUmIUAD/4+LxFSEQAKm5Ef1m2VABF7Z+Hu9CEAF TIVSgmgAQAVLJ7kJpzRABlWD7Fd/D0AD+t22MtUpQAPKFyqt3IlABWiQBhafVkAD3i6wlSKO QARTV8QyTaxABINoNrAF+0AFNUpbpGuMQAcyvi8YH/JAArFAYSPsqkACzmQHZrQAQAOgRZeJ p1dABsSL0XXU9EAF8yFtbq4fQAVfNl2WUdtABxMcHiuG3kALf2TfBQKbQAJNkZVhxYpAAkSC xCYrXkAE6yjk1k1nQATxWEdGmPZAAz5xddGqOEAGNgzZ6OWoQAdDOISnJoFAA2S81B13OkAG 0WNDW2asQAcNPl0lhwZABmk9WUjwrUAGVuNtJxaVQATXRoriC0RABL6sv+nyXEAEX7pGZSOS QATcwjR2w2xABIXYRVjUPEAEU8HX82FKQAd+C0cZy8RABJiyzVc7SEAEj4DMF2kdQAS7FIZM HCNAByGavoQrXkAFfQEacVk+QAQyiv4l+NVABCxhDPo6MkAD/re9fb3+QAWbWwG9UdxAA0Sy xx5UzEAFz0YEEPv4QAazx1oLAg5ABFBg5Yhdv0AFQ1KcYxcNQAZC19ucCqBABzcLmUxgNkAG zkMwY8b9QAgufqecKg1AB/0jse+zskACT1H25hMoQArkXo89DkhAC39k3wUCm0ACovcthB/E QAI9naAtkK5AAnmcN/HHtkAJjhanbm+LQAhSvFcp8utAA4+a8y+S5EAEi0rUkw2iQATxWpI2 bghABd6qedDbTkACnO6Y+bf0QAOEtGEgJThABYoHKA/PzkAGiYzwO++hQAMnbUKcXo9AAtis OZjYPEAHs9jd21kyQAdmy94W6khAA2NZR8SlZUACIzEs163uQAMcfmIrQKNABWlPM4DNOEAE Qr1+uqHcQAYknB7XBaxABfX0/4h4xEADwK2TooFkQAOAJxTtRF5AA8jGkAIBnkAE0XgRUamy QATlkswQ6hRABHyd1CJbBEAFAeLZZlkZQAT7t9uFNoJABZHVg5vj1kAJTHw/oDLVQALbjxxe +nNAB5tsr6+qRkAEk6Bg68UWQAQSm0D4vaRABVedmMeapUAE5VCIE578QATOelyyyedABZnp aJFVukAEiHCdx5jUQARr1UF7EfpABIeZRgSPN0AE7eFCftQQQAP/b1stX/BABAEev9hQV0AE JGCrXqZcQAXGemoxsvBAArxxyQ/IOkADx7cE13OHQAYq0fLE4pRAAwZmApZx0EAC4w9c0lGD QAdAdxYY66FAAuC0wAbuOUACz9b4Nn3AQAS/ucgDDzdABOF/V46TdEAFJhYNwuqFQAWCfoHt XuhAA+EvsT5KGkADKbogQFmsQAPqFipocuhAAz3fbGKdkkAD3+FKwBf4QANTnA77XSJABS5U SdKGAkAEh01RDp/ZQAaMWJ9YCKZABbBdIP+zwEADdhVuqLplQAP0sRnWLT5ABd1XgfpjREAD db6ml51cQAY09wagHadABJ/VVF3GdkAEP0WTsppuQASeWa7ZD6ZAAcqMJafMmkABbSBrDEhz QALftoFR2uZABgQC0o2kCkAGD2K4Og2IQANYijW9raZAAmnPEyRRoEAB3ujXSSXQQAWN4BOU 7XBAAuDotweTakAE8RuBICFOQAT5QZkI+BVABO2K8CRjr0AGKSH9x7FFQAPIW4QpEiBABLqR hVEEoEAD8513eACcQAVXzbjkVqpAAtqWkTlHCkADw3UKrZZCQAQdQzgZqcJABCSzzUcdnkAF 0pnO3jreQAT1JpXF4v1ABTjloc1hY0AEhSPlxDlsQARUyyHLShJABTYyuhZw3EAHVGZLg8Bi QAhLFYWmWJJABYOAwKWr7kAHp1oR3S97QATzOJarpJ9ABtG81n2ZIkAEL5Ozoli6QAM/kbbd NgJAA5ggdHtKhkAEaVTsSQmIQAW0wZ54DZ5ABCMmkET/KEAEiueyZ0pkQAUEwF+YiDZABCJU VvwidEADW/kdDKn0QAWj4BXc2aVABGio7tEzMkAH54YPkLzNQAc2bGp15/pABoD3E+jlREAD 9D9Fbpj5QAWgLFGErLZABA0g1QYbIkAGikTFdFeIQAU0brCmAipABZo5AaTXXkAEUCdzYHyW QAOJ5u3I0hJABNARSkwa9kAF+v0hYEsMQAZhAkRzq7ZAAsuixLpszEAHIs0DiOpaQAHmG2Wa TI1AB4xSaLc9F0AElmRuBAYlQAWPBg4i4IRAA3TeNVciukAECeIM7O1MQAW7vEeXZiBABMzq /XmZH0AHJk4CjgJdQARFFvCHyI1AA5Zcj7TYNkAEBSZH/EDrQAOld/ovNfxABJTs/u3cSkAE 588BH/+NQASJ7z0V2FRAA6vUHf4wnEAFRyYC8npIQAOJjWtER5RABU1W9yV+X0AC1bo5DBlM QAL/qZyVHTNAAa7tssmEQkAFLvRZWSGiQAeQBEOFTnhABllQcC81EkABqSsX+WXqQAIsnT8V aEpABuoQgDLpnkADK10t3Y1dQAFBghEbK4pAA5TfyFycuEADmINMpacmQAW80HyyAnlABCpl 5MbwPkAEPkT6tIKOQAUKrrhU8YZABQHX3LVLF0ADF8q4lJvCQARiJzoLyeJABePJpquJ3EAE /CDRrRNXQAVP3ThoJy1ABRVS+vuLHEADlzpVLpVUQAVroQb6ZuJAA6kzo5j79EAFUSRCqBKM QAInRkjsZOpACDxoRrj+40ADj3VHcmqzQAVVLVEMybZABino8n3NXEAE+z7dZChmQARSM5Kd j5xABAWIhxIZr0AGO2G3ykteQASGKWUrBlpABV2oREroIkAGLqWO0prUQAFtGIeBWUxAAKUU TSGFlEAA3xrvAsZNQANd1BGASchABiw+Bfuz2kAEENDiXoelQAZy1L6x8ppAA1QxkxNOi0AE +JDHrXMBQAZL+i8zwcpABGhh+Tk9/EAHB7r89iDSQAXhTNJVNEBABfDf0aH3ZUAEDg6+/QkV QAI3/1yB/1pAA14MtRAC6EAFCaXo+JcKQANiQq1tjShAA41RUNGORkAEe2dHARI/QAMRA96v cQRAAwqC2umwSEADn4t0vTR0QAYGfpDCsuZABSWHPLc2C0AE7xd4Mzf4QAH3/QxPU8RABSIV swsOAUAFHEDTdvv6QAQPlz7wz+dABM8ZWVYHtEADJFBQ7A/qQAerc4OakqhABE6ocxWXUEAF 01BAsI9iQAcXNnNNgNtAB3PTajUpkkAHdzAIP6zaQARjXYAECmJABhe3RcrH6kAGpluZrPRl QANp8Mf+YjVABPtVlDJurkAFK6150DBfQAPoVGFqUFhAA2/0jLvw1kAF4K6fLqKOQAcz4ZPM UJlAC0bF804gSEAF41XwYXlaQAOG++RZVolAAi1+VNodi0AJxHA4yWQHQAoIBicnQZZACK5S /J6N/kAHk8Oq3jMHQAaQL3LsF1BAByJeBrC0hkAEM5bZjTOFQASOt1TS3Y1ABSVdS/ajBUAG Y/foID2KQAUkAknA3DJABPUvRq/rwkAEJz52IaQ8QARKmCJN8nhAA+vCyRHa5kADkB1byeAa QASj6raLqSRAA8vKrCZPQkAC47/wk/uqQAR8RWaVGdNABWWzzAKldEAD1pUt2luNQANKc3av L6pABsyL16S/5UAEYlpgJaeaQADOUq4Pud5ABa2B88/hPUAFXYNvVWJNQAU8G04SpwdABPK0 2js4okAEgepB3vsqQAR+sW1qMUhABKAniSveJEAFiDWShgbCQAQ4Zqu1QghABO/xBFzhJUAH XW9rAzXaQAavzYhpZTRABMqQCOsxWkAEj1KhgdLCQAVhwYsaKIZABJufI0olLkAF2EsVcOu9 QAU3rpiDSLJABB6fcaWNLkAFTn2/RHGnQAU9q14VqPZABTTEaLd6R0AELNkTF5kWQANFY4d3 O5JABRuvg12HvUAE7Uv6zVHaQAZ2wWhONJBABAt9IedE3UAFfW5h8dRxQASv4dcneBhABHYg gcN1nkAFFjXZjm1wQAOMGMbnkf9AA9egWDx9VEAEvPM875SAQAUKrUNY2dRABZtLxms6eEAE 4Oml9X1sQAVcgWQwkI5ABPgfgaZq8kAEXkIrMKzQQAap4sZAlwpAA4VBL+VOrEACIJGZrqOx QAdtOsix9HRAAnWQnvCijkAEz0kOsqvqQAQpBcVOsYRABkNzDbwaCUADm+oWhmaLQAXovVyI RVdAA5thnfTfe0AE8VOwiFACQANJ5YE7q15ABY/R9mHmnEAIYromUniEQAYMHWxzMRdAA3li 4rzwQEACdoKXst7IQAejUMSAQJ1ACIXiZY13C0AEONUwnG0KQAPPeRKHHeNABgS51RteikAE VxvNY12kQAPYB8NvG3hAAy8MTbTs4UAEU66x72nRQAXwHDpTsI5ABYjabiCrdkADm12kpmgu QAXtKuwpLiFABSiCpb/4sEAEbTIy5w7mQAURlGbFRyJABbk5IhESZ0ADxtiNtD/rQAN7VmUp y0hAApB/nbHfMEAFeqRJN6/QQAT1x1SE9+NABLNuVJ6WikADYKqUgwkCQAQXQL5bX0BABM72 Vv81IkAFylU6ozotQAQoWPqdRrJABVRT65zytEAHEMnqJXd4QAWFpK35vDtABKJ9gKtBRkAF gcaCPdtCQAR6mfROO7BABF4X1wQyoEAE5Tsg8bdkQAGvtJpf8yJABAjtNXQql0ACYVLv8vYQ QAOqhMm/mHRABceAVMvg00AFGDX4YlgjQAXSvLIDnzRAAbqMzQo+T0ADpm2VlDuGQAYdGXBX JIRAAr9Ac+WGSkADXDNhotRWQAUrDaXw4KRAA0i50CjybkAENsgooRJWQAScNDAP/t5ABLgQ 2lNl6kAFGzLG6nfOQAREhNm2o1pAA6t9bIUO4EAGQCTHcC8rQAIPRbC7yrZAA3ta0HGAfkAD /afAOfy2QARQERPQQMBABEkPnqKCcEAD6g2OaRS6QANgPp4yRk5AB1dgbdgHr0AEGBSWgauc QAJyQHnx1rxAB3CBwXmdLkAHESTJchjCQAV81dnikyRABMhWY0YveEAEHogmZZuoQAT/V5Xw 221ABM/5KLkxT0AGf7ecCdKiQAdRdM2M3tJABOn4HwAL20AHGrC9LcZfQAQo90+NctlABmNc LozMFEADPlCkGh5DQAUA0XGPmDpABUkf3u57XkAIaxhQ7sIMQAV8CW29ZOBABUuyPMLA9kAD tT9GcUDcQATaCE4ATyVABpBzYGfmbEAFriTNk36YQANAjp/c/XxABE1pUeRRFkAGHPrv6Kgh QAREmrSvWgBABVE174HzQkADzrwrK5wyQAUDIUeq/nhAAutrd8AjQEADWvV9bFqXQAghRZEe O5NABUhWq40AKEABlohSXkqnQARN+/DQZmFABWt4nCFDyEAD9Wm9CR8eQAZpGkOqK1BAB1pj ZvVUOkADKz5NdsbaQAUf56ru3cJABlfp2hHYekAFb1BN7cAYQAbAFOVs7+5ACQfyRDUMf0AF CrD0H4tqQAc7ClxK6KxAA+acoUJZvEAG3L7EGZxrQAbCxZbS4NBABNHNqjUdjEAEfZzzIxIG QARm5/pIRGpABOVDJf45eEADZ85BKbcfQAK4lwPTjNhABVeW3LVbZkADLpCxfJUJQASQOO/b MOxAA9PIeoBhjEAEzD667VTCQASPucp1m15AAsn7iX+x4UAHyhcYeVKIQAdLqv1BB/BAA8hH 60qeskAFW4rtFgHNQAMrWHhEE2RACA6XltVG0UAICKVubf/EQALYWMn/3xNABZaCf0vy3kAK YUD9e2BgQAg1XEYuKetACEOlIkfQuUADfKdWDYBAQAPAOidtWyRAAp+4/VRA6EACiFEwIJWO QAJVUK/NRqpABCaYohOJekAFtIAx803IQAYPx8jf9G5ABvonEhG7EkAHMrebODP5QAOMqJek kihABKZ8xGt1a0AFhFcwpiXoQAga5f8g91dAA2ecHtWFpEAHuCXa0JCAQAZOf5M6QeZABwK6 KAYKJEACan8RzwyOQAiPcNsCkdRAAwPv8YeJiUAEHLjpBK2eQAUX3afRWB9ABY+oswmn0kAE h9kqNABJQARLb7USuv9ABGF2bwRcvEAFGFbedIbOQAMZUqTISYRAAZ/Y2LGNFEAI6zHbE9n2 QAXCzZR3ILBABXWIDgHmMkAH8hxzMO8gQAPF8zl3HwRAA/PITjNAQkAE4tLL4TKsQAStpLV7 EcBABMnFLFJC6kADy38RnA4RQAdOi8MwSE1AAyKt9ZYS6EAHr52am5bbQAMuJBsXlkBABYUe zdbrtEAFa4ErZCD2QAToJF8iJBhAA2a4AgEzgkADmZQs9V+6QATUmkDdBn9ABOQCDMX4KkAE XMtdvpxeQAS9dKS1SshABBepq4XCZ0AF06mu70H8QASbTM+jCCJABbDnDqd7S0AF+S/Ua2s8 QAXenD2oA5lABKfLqLV7h0ADU04j5LDTQAS5bUwzYChABpay/TpMSUAEyhAJFDb8QAPRs3dL Y4tABP4PUCV7BEADm+4xmzzsQAVjDsZj1hxAAc0cEFeJx0AIAtSaINOzQAhanL1PUSZABcs8 T01YtEAFXqmNF1h6QAWs7J+kPp9AA67PPplRFkAGXzGCj+P3QAUPeA0CdpdAA83cOeOuBEAH B++NWwwiQAaLVVGqfpBABXez0ggFoEAEjPqeYfSfQAP9dnLcfThAA9hWybt1DkAEVEjL2cSb QARFgrTRXGRABoqIoqRf5kADMY9CWbfgQAMuT0pZOFBAAbtqkVqccUAFa4OkrdziQAQqQo1M GFZABLCfNxW5ekAHKsHA2MH8QAbiZGd5z9ZABKfPKa0aXUAEVzkODN2UQASfu7aPMIdABNSI zaMOT0AGjWWvlmkgQAQFbA8q2vJABIGYxDb+eEAD8RbVsuHJQAabk18ldj5ABukuyANkA0AD UAqKg7a2QAXMuH5GkiRABL/lo9ip4kAKMR3M2qPAQAjRME2omYVAA8CPXY2WFEAHYISv+bd1 QAL5JQN1fZxAA+b1NhzTxEACVoXpKojQQAWcJQkCQ5JAA+729iOouEAEAZ97uAPwQAVa49du vZpAA33BL/w0BUAEbDDrAiXLQATpbwFcRtZAAl8F5HQIRkACo6wGpQoKQAZ6IFywTyhAAmTX JDrbQkAHzsYCVPWgQAWyM/iByNJABHX9SzcLckAFWeQrtoNQQAQ+pYdGNEBABbKobZJMrEAD h3+fGI52QAVELyKMJwhABRbtUot3pUAEhhIm1hxiQATWHXtHFFRABpmnS6S1nEADO3OLMIUQ QATMmMNkT2JAB8D3Wq+G5EADTioSuA4uQAbewtBkX15ABPNBaRkRKEADddeA7uCGQAKk9uhk 91pACLl0e/+OO0AHl5EBz6xaQAWQMTdrXXNAA2REOLcLBkAD7gGBwyMYQAPf9H/4u19AAt85 nU1zLEAD9LAe9wiXQAX37E8uKylAAutAX+dEYUAEDFsJW9RDQAXDyn+M8shABJ/N33d8wEAF ik9TdAF6QAVM8o29hQxABKDd8RYqtEAEHtmGfo+qQATaiGhnlzpABqBdlrjKgEAD3YZugp2w QATTJ1LWpQJABcloEkHP4kAHn1cgXDz3QASwI/BG5DxABbTfndkzakAEnjygAukyQAQB2r0W XDdABgevnVvNoUADU2wL7EQIQAZOT5++hQRABChvb7itp0AHY6Nttxz0QAckDDARzLFABZhg udSXxkAEFxWXMPnLQAV8AfQduoxAAowaCWgtmUADWuCIaEWkQAWf6HIQdjJABblteOtVxkAE pRUG8xJmQARLnEPecTpABWh5Hoym4kAEP+QqrIqwQAYXYOcRYLRABSsV8DizDkAFRRLGomWq QAWFj8w27s5ABMKax2x1FkAH2GWlYQhjQAk5rj9+9bRABC8bIFrtHkAEuxrUSU9dQAYzRXDo yFRABuXA56e/BkAFOa6rPv1yQAJsllMfiUtAA2DoQeCYsEADUG9WpEAkQAX/Xcb6BgNABB/s 1qtoyEAEMbHqLF1YQARNi4Os9ytAAj66B5YXdkAH7MO0v1DqQASNe9jdKNVABIYgH3rih0AF x12Ew0X0QAS7Lqrm0CpABeFQ+m9U0EAGfNlgsteIQAgnzSCkvJNAAzZELpU5dEADKA7Z8aL4 QAS+L0zD3e5ABA0OelnJOkAEcNvBeGJaQAQcqsjgVUpABWYQ7yDlSEAEusq32JXAQASlS9u1 oaxABGX8glDu/UAFQYZz5GxVQAH08XX+ApRABAzK3arJvEAHepfjnGM4QAU+O5vzvaBABUn1 1rpM5UAEBr6lWeMaQAQJs2VzBfpAB/eRZBMt50ACnWDJskmmQAMt6ov1djRAB9OKmQVwtEAF /BIg+ZciQAM2MkULVfhABQvwADBBZ0ADZO83s1VGQAgUrJOH58ZAA8IoTMLk3kAI+TExla5/ QAZpIr8q36ZABJBIQLcVV0AFCW7Vy1R0QASlEs4zI0dABEMqFXkgFEAD6wP5pDdxQAQAURbo +7ZABODpsuK+4EAEUmX5AN40QAZbblbTVfBAA+2cKyZTTEAG9S1ee0cDQAW4QCZRmtxAAw3L V+qmnEAGGxNehJiBQARGm5CYz75ABWqE00JTBUAFvxadOn0EQAc4XsRK/mhABZw9Rz3z1kAE osY8Kb22QAjd4ZdymRNAB8V9HOebDEAD1jn9XMWeQAah77pG1yxABf9/A4JQwEAFtTDvSmhZ QAYboLidDzhABOgXgu8CvUABQ61m3G3UQAZxN4opBeRACMUQAiEGvkAE9HdhrkQcQASatkvn muFABmLPVfRElkAHiK27ctCbQAcP3TOQuhtABBSy845ixEAGCiZ/8A/mQAWKxeDjqqxABNeM dCW3xkAECOxx1vLZQAXmE9l6tilAA8FHEAzsHEAGa7Iu0oPIQAQtqBRwuBFAAi4UGiQzckAJ p++47ew0QAOeBaf5D7pAA4ZAtTak60AGP5IoyY2gQAPjYHxWo8RAA4shm6+MA0ADgttbIbhw QAbZuypSv9BAA/BItWyy8EAFiP+2NuHnQAOgq9gaVHBAA02+7jKQG0ADY1aTrm2hQAeBioB3 DM5ABiB1MuvKTUAEn2G4gMvyQASRbtBWPvhAA5EhB7EV7kAFvZ7WfQquQAbgaPsLih5ABKRK I9bF5kACZYqXfzJYQANlWsDKR3VAA+L2arjFakAHSNJOFusCQAYGEWCwscxAA4dmDIvU2UAE ii1jDfuSQAI+ea6KTYZAAqtcwbgXYkAC6+VUT7rOQAUv6Bo+5mJABbDBkkPZJUAGZSrO1uB8 QAZlKs7W4HxABIaq3zb3BEAGPEGSJUfZQANmOc4LZPJABJWkIj7EnkAEWew1H128QAfiYnqC Xg1AA8zK5N4TpUAGRkhiPAmeQAb6HCRDi7tAAl0dhDF5uUAHJSjXy7qxQAlgq1GkADtABgOA eG4fMkAGDGu7My1aQAXzqDREuxxACSgUsAX3akADfP63gwkCQAXUokceJv5AAs3IWY+fOEAD /uW3o3/0QAR20m2hdxpABQFXVejWEEAHCam0IrqKQAR4/I19RRtAA1nQdGVolEAKm0+U3FkS QAkXit8wntNAAjvbU46A5EAB39uT/U2RQAl2cnvHLANAB4UV4IvREUADzUDrHXsyQAJ6J4wr hPZABOGcFcCEIUAGKK0bU6g8QAWDKG0qJTFABBIMXG8SWEADao1T4Ec6QAZdhhGKJzxABFCp Z42i8kAG9BmYX7zKQAijogUOh71AAVRZF9kbj0AE1s3B1vlUQASzMqekcA5ABu9ez+MZq0AE 7h6TmVS3QALP4IiU78ZAB91X+0oyGEAGUkRvOUQhQAMeRn7vYr1ABhPj4fa+6UAESLziCiSX QATFPQ31nSBABWdgz2bbukAHPb0VodZ8QAUMhQQNznVAA8PYB64iokADfQ1cRlwoQAe8hSXZ Si9AAw+o4wk360AEuOEttXMUQAcwWdd4rERACeQxSNp7HEADFD3K35qAQAQFP8FHF5hABm/9 t+qrkkAGKImL1HKeQALcGFeBw5tAB2n+PQpLDUAHRgGJfgiwQAJzs/23OK5AA9ulqT+V+EAD DZxGYZ/YQAbggOafuVxAA7cGJX80CkAFr/vjNWD7QASr0jkCIRBAA/DpqVFD5kAFDLnWU36C QAS14NnolyZABAAaVR2Fj0ACxh/474P8QAbbvfudAYpABeml7gb2MEADsrmd+xJYQAYTw9/a rHJAA23T82gjAkAEjQuDjfwsQARxmT6TVyxAA3hCpDm+IkAFc3uoGql4QAQJKzMWMGNAA+CM BuGBZEAEE4PfkWM+QASQSmxDqF5AA1rR11/EVkAGpOFdmqzcQAb6lomrQcBAA/JIaZZsQEAD n3IRqKcwQAPKH3vepKBABYiBtvLOkkAFjCYe+qBdQAHEpPHelS1ABB1iCaltJ0ADcRCsfFFL QAZbsE/gJEZABWGVRlYNlkAD/5kpmJe4QAQiM6v04fhACHyntr9NUEAEIuEqiQ+MQAZHL9/f wLhAAyIeEJs1pkAFwRL0Nh46QAhzu1Z+l7VAApduFasFDEADfhQtWa1aQAM/25gWtoBAAVZY WExGvEAC8GO8tPXiQAiZWSZ3ISJAA5mI+Ohdu0AFzkNkTzAuQAXt/BavFJJAA3l2QO8isEAE 4jDgCH4OQAa8Q8HbJTpABiqFu8WjLkAGZDLjz3PJQAZBrtqjMEBABAMtO4KsJEAFUV6Knh9D QATyD6EqI95AAnh19DPriD/+NZt8DZQ4P/9R5bUi66pAAGEUg3Il5UAAiZFZfbeWQABq8xf2 ESJAAO/93UEUZ0ACXCU1O1y/P/+wt7zAp25AAmnfGRpKMkACD3U/oZ68QARseZKNpDlAATLz YnzJoD/9+AlY1ebQP/5xWaKrksVAATDWy3uTdkAAwfcI4qDOQACiQbBsz5RAAFQNQYPXQkAB ajzoihGrQAHfClWHaWFAAYPbIZb6QEACMbK0thHwQALFAA3i9gk//po1T1KJOz///vhVNUTD QAFebMW1hgxAAOzxQ6CU9UACgo/VnEDeQAKHMabWRps//iWflg4GmkAAzn62pnT0QAEjZhg5 ANBAANXrlcnji0ABOZIEs35QQAEd0u8tBntAAEp5AmYsSEAAWA7qKYYwP//BOClPKupAAbnx C4eNij/9u0JlMfh+QALMbW8/C8dAA0/m65eMjj//ukKG/5uOQAK3NFB+XQdAAggeWD/XIkAC Kr6q4Wo4QAEkB0x/UZ1AAnXCK6S1qkABZKQ9mOjWQAHAX3M4R1dAAXeJC0LWWEABiEOcHkcG QAHp4ZIe2PxAAY8VqvKSqUABuwGf82rsQADEisecoOxAAF/6KHqyXkAAvgVqCASMQAC+mDNI yPFAANbT8s1Rv0ABgEfkYi/aP/9QCPcRGrQ//8CoVlQWrkABcKNbWoUCQABrWMxXKWpAALbG f/o8i0AAgUhrhW6rQAEmV7vQO9hAAUcqytk4nkAAQLmqFjHXQAATv+n1CZ8//4Iu3PGWej/9 HeW11H8wQAJTJqJRMDNAAagbx7w4kkABcFRugDKqQABDdb2mgdVAAVfjwFDnfj//wLZm22NC P/6BIpRM49o//z5LY5u2FkABaEhQqUblQAEX4ffsuZ5AAZb3FjRwl0ACnTN5k60oQAESRmbg QytAARqNz+ecdkAB9RFE1/jMQABcKdJyp/JAAcyGgSmsakACosw3IOB6QAONjbwzybRAARXh QJrqvEAAbsNLazTuQAFoengJIUBAAOUVVjmqBj/+5/JVUPDLQADyyUOML99AADJ418If20AA kWIaUhg8QAFd53+YTrJAAkq7JAD/qD//2RB1XgDWQAEJ5gsyWb1AAi4LEAkrxEAA0ymV3Aev QACkiJYbs/5AAdfezghPfkABvZ57Oa1CQAFovErmQqJAAe8sOWlNij//OXRma2XQQAE0ZBGd 5r5AAXWgUWZ4nkACHuaaJzsbQADewgfDBXxAANvAfE76aD/8qPvwhaEGQAICOOh9GmY//u1m Ypc6gEAAVTloxDvkQAHvdSxd2TJAAiY7lfnULEABphX3CM3mQAE0hFNr/uBAASnYmMoe6UAA h5v5GHRAQADcpZL692NAAPLEal8pX0AAGmPc7Sc3QAAxg3ta1+RAAOPARKI7rz//WzqIcbz8 P/yms/ohvdo//xCpDIZy6EABrT5Tk9oFQACtdEPd+4FAAgY5PZttlEAARmMtG4U/P/+xCLum I9BAAJuRUbn5kEABUvQt9RwEQAFOZ2KJyJ8//7BOWwuHzEABTr+Mk4BKQAJ1jxpLvWZAAaoi GjUZ0UABufYoTwNXP//8PrA943BAAQgVcqwI80ABIV50NMjWQAH7rpJH8ipAApDu7xzB3UAB GgYn0oiwQACjdtPkNg5AAjkuHtAPEkABe3CXHOdkQAHrF4Pogjw//iZaM89EFkAB3Jy/zVTs QACw4k8KWCZAAS3Vi443d0AC4lWuCCqUQABZgtr31ZhAAeTKREeVRj/+qac7MfyAQAIIHgee c9xAAQmGI2fjTkABFABg9IgJQACXR5G/LzxAAS0Qy2+VGkACJg+1tUoKP//TJIVpz19AAwcu RWHP+EAB0r6lJKQOP/7ZSwAlYYpAAITKn0047j//oq8Auub3QAH80H7NIo5AAXvPO98YREAC lhAhmYshQABUZ1BLaHhAAGHy/fmiJkACWUn1DLddQAFkVCnmN1ZAAPg9/V4G1UABiRPaKlVl QAHh9h3FkOw//zpB0JjQFkABOUmjiEPrQAF+vw3jppJAAxugtX67aD/+dnywcdpLQAJpJsSX ciw//sJJPA69HkACiMWaylZYQAL1Q474+EBAA1ZUVEnkMkAAG0NurJHNQAA+VclYMIdAAQ7v TaqM0UAAF4g/0oaeQABCni5DQatAAcmGSyD+JkABcH/w+C5mQAEPgR9TWIdAAAr+PByLxEAC /77+4aHjQALoX4v2cZBAAfY0JiP8HT/+qtIOZELfQAB8HzqLyt5AAHg8UvKsxUAAtLu+bmnV QAJ3X4B9r/VAArUHQDkzckAAki+AuBl5QAGSpIeAIjhAAMSLFlQ0n0ABkRlfAof6QACVXxOP jGRAARK6xJUeWkADM25yIZ1hQACAeL3zAVw//2w2w/5/M0ABaB6e2AG2QAFqsrPIb4xAAeOl 3rbQHj//Pi57qnSMP/6XQO4OpAhAA+cY3yuJFz/+IkZ1q0IZP/8FJ3u7extAAgDk4MGEUkAB P+mGWovOQAGg6Gh5FI5AAK4PTiCFuEABnpC2GfWuQADdPNpmlgRAAH7QHr0MQ0AAWdWs+tIo QAD2qcjwbQhAAFbDRWHAlEABUwCFuP6+QAEb8lMmcUpAANfkLzJc+0AB4qeF95FmQAErcs3f 7e1AADFivjHajEAAE+7iEmZWQADK6p/kj/pAAUmeqtrcsEACNtLFHcZJQANrgUK1U6s//m5Y WQ6jqj/+/vvYMsBrP/+F+X7kjUU//Wydlh7oDEAEKF5sFnKmP//ybGwRfCJAAcNOwivFUj// g0luUz60QAKZlnCOdk0//1DY+yD/r0AAeLLhtUJ2QAHg/asUpdhAATWpK6Vp8T//ABxRcsSc QAIlz6Pgh6ZAAF5cdL+EGT//JKYwq+J2QAHCFOwVhhVAAK6eIRaBHEABCFCnmjmVQACsCnih qSA///vAYU+BmUAAjMF/gCu7QAGIBICuj6JAAKuL5BWFAD/+qVjaklHBP/29sOyGeZ5AAGaK C/iN/0ACIl7e0kr8QAJq7yCnD3RAAXbs785hbUAAb2SAwuEmQAGf5YNr+BxAAA3c3XgwFkAC ZOAXhZU7P/8SkO0szupAAtRCGire+kACwptujFlcQAHlDXIuCRg//45s5mAf+kAApZpg59Qc QADyXuPJvFdAADX49Ymfd0AA7L2NWRX1QAIKzpwf/BZAAWIS6xTsEj/+9YxV98RRQAJCHlSz XLJAArLifPrbiEAD1njK1RAvQAIx8vqhHqpAAA/qxFHwjD//VlfhUjMUQAKonxFH+zRAAmir PXi5IEADl3Zk4utbQAH9jAUeq8ZAAjRdTjKRdkACAmZWrY8XQAEZQoOVZ1xAAVeE0wetdUAA Rf0RS3m0QAE7C+EagzVAAVU1D1tFyUABe11ZO0bjQADt6ufYzetAAC8cFSsnskABAwM8PtsE QAEDak/6xL5AAZz4OYy26UAAgbCYbEUgQACWAPE5cIpAAN/cemL6JEABigndr4kxQADtOyFz cWZAATz+rdQ5fkAAU5O8NfoBQAKg6GcUVChAAEecV3HNiUACKPiXFpOcQAGLoYb37Z5AAaSb dDABtT//306jz6R8QAAn1Y47tcBAAei+qOgQfEAAKMihtwn8QAGfoORWboRAAEfCaZsOwUAC M+uBB4JTQABjI4L9eMlAAlpBkc9ocEABGl4PUxhiQACGoC7N9aBAAGIkp5J3fUABuM4c4jS9 QAD2vYBvYFBAAS2rAHQC40AAOAZO7c+vQAGmHVxdeKlAAiXU7dR2mkACIPnqAOaLQAHTkHx5 JMU//7KFuZb5HkABVIHesLXLQAE8Ei1MsGxAAXAh0DSmaEAA86focdfiP/5GknYxnm1AAfyg kH7X00ADZBmwlguAQAG5An+OxVtAAd69fzYqgj/+5YIvF0ucQADP/y/J8hpAAng3q7ueej/+ SAWzzSzhP/zbaVQBBeZAALrAtbbclUABYx2cwFZgQAEFfT1LYiJAAsnosYdot0ABK36zzwCy QAAN7B+BffxAAX7aQimllEAAPy5PuOjUQABZyWsbhuI//2CvxZ/jBkABDkNRXC3uQAB0spE6 8tJAAGTG7O5qUj/9yM7YN0BTQAJQuxLmWYI//txEH7rLlUABmSlIvrAsQAGI5NjOmhs//0mx P+w5okABYkciZl7OP/72v0mNaGJAAc8ENTFqakABB2+vZL5SQALcidWpmeFAAUIuTSzLNkAB WvDu1Q9oP/+HR5GAPMI//6sFr6aslkAAPJTGPr/rQAKATQLnxIU//reTBJZrdEAAFwXiXRtO QAGWtLiY0O5AAFaBEJNzJz//anrYm0KeQAIesxny+EZAAU1kffZWDEAC5VKKXYkgP/2DQl7c FiA//ZCrGdahtT/85rpkBK2cQAEJRcZcoT9AAQEaCHgorEACTRhRh58nP/+j62MGJb5AASu4 ZTqInEAAb0Mb4mwcQABaT/PCslhAATIGqvUtykAAvzNDCspSQADcfSBOtH5AAOiZm10GBEAA oRw9Y6TwQAJ1VxooKR5AAKC0vNRODEAB2BYHHeRXQACYBeut3ZZAAC8xA1EzekAAutBTa87A QAA0d34yrPtAACszEo10U0ABcXn9gKawP/9EFsIIoHxAAmoqEKv80kAABtykh7ZNQABaz0x3 DoJAAXfWJrqAvz//kcqOgm1pQAAfCNzG0WI//8+NHEZXHUAABfT/xAV3P/+k0YQSOTRAAaca C5EgTEABGrGzEPEdQAHsvyW+RRY//0HIkIHxDz//MQYeCyQcQARGkJBqR78//U0AvdcR10AC BtMJldXRQAFfm5+v7p9AAQ74MDs6/EAA2hvIWPbcQAFoJCuFBipAAHDb+HF600AAh9+tL2vr QAHM9/tYollAAOD5ka1ABkAE7QkBq8ayQALfaFCiGDVAA6IWop6YZ0AAvJNcoJe0QACeMtw+ jSs//9ob8bJ2BD//Rvburq98QADEq8e6gPdAAN1OZOBXXkACqp3B4Vz+QAF1v9RnSIFAAN+x t9b4OkABRK0rwMCtQACo0KkV6dVAAs79o1J+H0ADCwvOp5ABQAH59s5jy3RAAlVJ5p4xEkAB tzQStsi1QAA6v97DND1AAS0yDk0KNEAB3ehXUH5QQAIGyTHVeiBAABnbMuOBNEABQA2Zj0fS QACFk/6ewFpAAfOTs2SEY0AAbxvuGL3uQACsh9ocrZ5AAUIerwj/3kAAla6J3VJAQAHKDU03 9TlAAO6mn/+ZwkACQ08fo8qCP/2E5CqKDrg//3RS0d3BQEAAcUE27zqLQACQoXrYqBlAAh9E NbnikkACM2HQbl7wQAExPdAIkDFAAQ74nAaHPEABRmfiyLSeQAAxM0dtbMlAAjAuGgG+kEAD IBY/H/krQALZuamtgIVAAZ42oqV3kkABgUsinzRfP/zkB8iqmHQ//1Y9frcQzEAAeSV5Yd8t QADVy9tlWEtAAM3YYB3vl0AB9ZNxDMzBQAFcWSCu6qxAAopeDr1XyUAAyRiK564CP/+npmSl unxAANm2V8cFvkAAyNDizbT1QAC0VQBfErk//zwER0kXCEABwKHHDgnMQAAxKSUXcvxAAnGq wIZAgkABON48TSZWQAE4FvWuNoY//6qxZC5aHEAC7qzOf+Z8QAMuWU41EKI//uyv1OmZXEAB 7T0MHq2DQAGl0k54z71AAb2FGK4/dEAByspX19LuQAHj0n72u0RAAcoSndSTBkAAm1DpDThr P/+ZFYgs/XBAACBvQwVQEj//jSvq7OZmQADQ9fKMx5dAAcL803xT4EAByvBMJGGZQAHvB7Wg +hFAAHhbTF/k+kABiwvz/qZUP/9LF299nvo///q2ef0B4kABd7S1y1xjQABZsX6AUA1AAnMP yfVcTkADufbSek98QAGUq4er38RAAz4pc+s6JkABC3yj0++KQAKmp17/WppAAP51SOv7jEAA wsaCKASaQAG+wL9dquJAAbney9u6mD/+zfpsEr/CQAIcmxNLmdo//rNIGFVdvz/+tImwT9Ae QATxQov/0+JAAwNqbmrX90ACJwtBxH1KQAATLrzt6rtAADJyiMeiYUAAzAUW40iQQAELn9LX jVxAAQ8u39g22kABCF7pZaXsQAIAkCGWquFAAG2b0UOz+EAAz6gaRNuwQAEpmGE6XChAATK8 QZvQlEACi/sccxANQAFYpPgOJ8hAAcZzKK/r+D//UEm4O15TQAG2BG1mHUZAAM0X7XymrEAA 4JJkiMcEQAHRd9iRmJZAARg5lwmV3EACQc7YEdFVQAD6WpG3fx1AAWflA/u7LEAAtDp6kajk QADWh932xyVAAkTm289VJUABVoKE230EQACk50ZBqlZAAVkIRDcV4EACx28GF+xlQAEApz+7 qupAAKHIzol95kAA7KsTTk6jQABSp6xQt2xAA3uK5Hsm3kAAtOJqqdepQASfZDWblm9AAEhP Q0w250ABvYVjE7f/QAFToAPA4JZAAqyjotfADT/9eEuqmxGgQAOERZE3gFlAAHXF4zgp5EAA d+3E2WzSQAGjXJ45rUJAASvKLjXETkABypXi+dh0QAIVBVuj2fBAAKjNJZcNQT/+54kuZyMw P//3WHxNtqA//8mh1X5uQEACephUfrF8P/9KGriJRIhAAFe0kLikOT//WAVX3UKmQABirO17 9VQ//fqlO4h5hkAB8juxDn5rQAIAgWavP45AAXBye/GE3kABFTMOUuAyQACnCdbMTHBAAV5K 6fi87kAAGIOJRwAaP/7UNS0A+dhAAgJPgQ3HNkABiEbCUbaXQAIG/yYcOOBAAvHulTm5qUAB sv+Yy07NQABsa8bggcRAAeVRrMRGAUABKFbpI1mKQAH8v4obWj8//7RwAz9zGD//532J5pRW QAJiMsdJDz5AAKD6jJ2F0kABANdmysf4QADk8FF6YfBAAKGr7k9EwEABGNMmZAfCQAFTHcwX eq9AATF84/wN5z//cKfXqo2gQAIcYzjXaM9AApfQg52uCEABP3ZCvIAwQAEnPXZNLK1AANXe qT1BQEAAbS+N6LpmQAGFT7BzSehAA0PWvoUPED/+IWLjOrdJQAG9BwGlJ2lAACGlqz2wCEAA LROLkoZ6P/78+Eled25AAoQW2/7tokACU2Jwk8LrP/+FLxus8CZAAf94SmQC7kAAt7LeXt6g QACP3hi42DlAAWm8zAevbEACJSAClq1RQAEA2Dym4CBAAIQgc6rAQkAAScPm2vUyQACVjqj5 wwlAASQY4naVNEABGdXU4BajP/3mAVFPhkFAAauBUVTVk0AAaLG40Vj+QAHzpO7slCZAAHWg 4nj6YEABQLSalEOnQAGEi6S4yFRAApRVKINFID/+9dKDzn2+QACxmwB5YRNAAIViY11eaEAB Gu2wHoNlQAEx85kKytw//f2H8utbKkAA8vDR03OdQAA8+cKicV9AADQOha51Ej//eId5KpLB QAEccxxJv4xAAUdsTM1+dEABQGLvz83iQAHaIwlnAZ5AArDFyMnDhz/+wiZR9fBaQAGIHVPM hqBAAMgu32PAKEABAo1nbu4nQAB+gj/w22JAAYKaJWRBrkABSWuAf5L7QAF6VvuB4NpAAJAr PcEEYEADXQ1/kJT8QAB9pL+UqQ9AAnbpD1F9IUAADIQimjSuQAB3R5YybtZAAQsAvEAlEkAB AzNviofNQAHPQuTCiIBAAW1oyzWRJkAAtt5m8ysIQAHcsUs3YtRAAah+zFpCcEABEdNkj4tt QACMeIrbJNpAAsBA0huhez//JcE05pZTP//sa6CqAxw//4zphuTWHkAC631j0FRIQAIdlzFR FqRAAayHv9fYc0ACFQ3mvZf9QAJaFlWf6chAAcrRtfJihEAB8rCbwc7rQABYJhh9uIJAAPp/ JwTJr0AA1Ty34VbSQADgrUjWp7tAAPnqlZj7WEAAa38W01eoQAKfCuA5/CFAAxMZrVjuVkAD DwRwDgooP/6hhTAO1MJAAH1nZ82prz/+e9JT+BygQAFgoYPSo/JAAWnk89V0ckABey/2o7eo QADyhm+XV75AAS06XoYey0ABP4elrYVaQABgdCwmLTlAAAQc5HGrvkAB8s/HvafYQAFUDy56 vj9AADsuWKTshEABft7ylK2GQAHIVkOO0FhAAFuWocfP60ABRARjLO5YQAIwb+ZYxAdAATUF SiI7yEAA02hW41i+QACaPZE5GAVAAHFRWmulGEABPvc6LUkkQAD6Lt9YvMhAAXI3Vp4/UEAA ep9IPL+ZQAGJMrB7czJAAS6qCI9QnUACcjA6BA00QAKFohnzBD8//1BZmkrDsEACuWnumBIA QAEZamfatt5AAO+5w6qKWEAAWZWRfC8kQAE+NE6b/wZAAIir9RupcUABA7xxUp08QAAgPD6Z CV9AAANclZw7GkABTEjRLXFiP/+y2fHrtKxAAbLSbTlas0ABXRUiHRZaQACMcO08ZBtAAS0Z zs/V2kABBaYeGdN6QAE9OVq1sOpAALIGXdy4GEAB0akuEsJ3QAFoWdrl3E5AAcPjCcGEakAC ByuPOqXRQAIahM40dAxAAKTCTz3H/EAAga3OTiYAQAAAAa3xe91AA/nUx5yoj0ADc03ks09X QAKusOhXAVJAAr3v5rSC20ABlVb1lA2NP/7wcPRzt7ZAAiKvUAeRSkABC8ieJrarQAPHh8D9 KdBAAziyqJjc6UACDVuVwkQBQABkUOYwaj1AAoEM8CjsREAAAzACnvseQANJvuumI+5AAQiO VoluO0AAVbsO1DpYQAHGVmITdxhAAA7h+feo00ABzlS63Xd0QAJrrN0Tn/lAAvwqxDM9CUAB Bc4v3TiOQAGPNCIyaBFAAY8F8rSv2kAA/LusLFY2QAE6kZhtIS9AAOAQzUq1KEAAgR3dzKb8 QADG6u/hrnRABA9IYw88az//5tFaQTdiQAI7COjCyK5AAW4/dQImJkAAsQQe8+SCQADglFvF eRxAAE3upSXc2EABMtoTmVikQAEeB1khRyNAAHw7+aZ+o0AAXrVYqskpQAD7kD9IUHI//8O9 cV9BZEABqxtl1rD+P/9cP2qukapAADSHOLK+WEABdz5D12QKQACb7RrfmcxAARadQD7tzkAA aRXllih8QAHs1S/QhJY//86y4aBibEAAADzYnZyKP/+QFc38BKZAAySv6/vvSkABzdtBs3uG P/9xwVMSHRg//3HBUxIdGEACm2ILrle1QAESXc1Zyc1AAEMVW1nBdEABotDD6LhuQAHbLbbf Cf5AAlRfsGjLjUABK9v/an2gQAGc2gh52nI//vIQKBiFU0AAVFlU4/JqQACQlMSD/vhAAD32 m5Np1EACd92QiPyaQAGGxbSdtT5AAh+vx0UL8kADrYrov6mlQABws//9WL9AAhApiOTQtEAA i6RMvxoyQADovn2PaC1AAQsWJKa2fkAAUI+DlxTeQABShyPd7fZAAqS2ev3EE0AAXwK0iXRo QAISdTpJirJAAEUk5j6oWUAAHg/VueIBQADuBOnJr/ZAATCnCOtCBkAB/1hyQTnNQAAS8ZVs P3ZAAHuH5yw3ED/+6PGlkA2rQAJyLYP40bY//wYPynN1+EABgvzHRRZ4QAD4tb5BywVAAHYU WfYAkkABqnO3LV2PQAHs/YhnZSBAApKfd5B4HD/+oPZZxQMKQAFfoiU84JRAAIHGEEsbKEAC NDcNzVatQAKPQ3CcMyJAADyKfaUFTUACHsxCQO8dQAIdAdFiEbRAAEbxNhl9nUAAipHSIG7A QAEDJ9FsFThAAHYycQdz6T/9/xHmkkFZQACV/dewKytAAWN3gkEmE0AAzLZ0isNlQAGbmPGz YbRAAWzOuVREhEAA8vxbUB1GQAEa1Ue7BHVAAruoXlTnrkAC0Zm3N5VJP//j52vaXUZAABE3 Mbv3tEAAX1CU82QGP//4Iju48ow//9RzJm+dJEACVK/3FRu5QAJDYPD3efQ//h/vNYm4hkAA nSXgl9FVQAFKVSUrf1BAAKdjcykh6kAAtknzl/kQQACokvg7LQBAAEvYk3xOcEAAbUYiPUfO QABxlWC2WjRAAvisoch+zD//pPJEQy1yP/+P8DMfGrhAAghGKuvKlz//8lfGyMhQQAGylisG l0JAADnclZ3NSEAAx5gMUlvvQAC6F632e7BAARycuf7AYEAANnPs7KD0QABV5aK8JZdAAMA/ iLXnVEABJtKvecV4QAC/q7p+IkJAARrlJRKkm0AA59GnVATMQAG9thBT9oNAAo2c3o6GlkAB nWznI9g3QAEPHFRYU5tAAQeSD6HiSD//VhZTqd9gQAH95GKp1u5AAKj2RDol/kABG+kzlKUO QAAFZ6O/R+RAALZMTkRIDkAAZZdJ7MsoQADbxIzBH1JAAdN7VzkBNEAAQosOj5iaQACWnpYC 9rhAAylwFLJQjz//tvnJYZNQQAE2MjqQ/+5AAtb3QZ2BfkAA9CmGay03P/4FYbuolpBAATYB pX7gQkAAgEF6WaJrQAFbC2O2+yhAAH4FWY+/lkABXoxjxIFGQAEN0eMEaYhAALVMfUnLXkAA 9iJLc+3CQAHBzpRA1Q1AAeR4KaCQgkACPTXq0z10QAMXTSusncNAAHzWl1IIJkABImYC0kak QAG1hUIakmJAAeQeE57DWAAABAIAAAABAAQACQAAAANkaW0AAAANAAAAAgAAA+gAAAAgAAAA /gAAAhMAAAADAAAADgAAACA/qQv9LeIWyb+uWWyqUA9QP7GREVrajU6/qkhYWDHdrL+9tsdo V7uMv7fBKJv14eU/t7YcNUPKsT/aTE3zYuljP6G3mVtQiba/vp1jR9XXbb+gLW87Hw2mv7FT EHhMvSa/sxFLk3EUuz+5A6XuSljYP9idj04idy4/0bL+IIz1RT/e15NGvsbPP9VSjdQVlsE/ 2LoPMwLTfD/U+wQrF7IQP6aKY3Lc//Q/zQFaAA69qT+4Rrgot4/8P8jynnS82Ey/nP2U9Wg1 Vj+9sDYbFcUYP4hK+zwAbv4/1HzUh2soqj/D6wM3SdjbP7XPnkuMip4/xNfwCK9VJj+1oylZ VifyAAAADgAAACBAjGtR4vOUSECNSIXMZdptQI1OzHDwC35AjCl3nEfMhECMV+BQfP7MQItW fnwINa1Aipoa+DhXG0CJ4jnKlWa8QIp1d1J1ZG1AjUahmmkFkkCL5UfgNu4DQI2abt7PHcNA i6rgACII5UCHF8533XLIQIiixsC750JAiRWGP5WHBUBkAZfpVM1eQHdzaroEDUlAhbbxJuuv P0B2G3GedgHaQIurDxL70Z1AimK5GPqZQ0CKb2AkeCpMQIqX6UZQkVlAjPSaSuWUT0CGV1Ue /HaIQIgmSdq8SDtAhKKy2922NECIf2FmaiksQIrvm5gPU99AioGA+OUNHUCNhdwLjHhJAAAA DgAAACA/7iIjiprtpD/uhgpFxKUzP+9C2+HteTo/7WeuRVdLxz/thpx9PZyZP+x8NVYA5gc/ 7bgj+feDuD/tVJY/ijqGP+vk6tOVjqc/7oFMRnLCAz/tFUhNscONP+71S5Zdfro/7NXCRLX3 pz/oSat+OPKuP+rzPc6+kK4/7rBYQ1wZPj/h+KYWRg06P/BP+OJ7Frg/6GyNUDLmvD/v8/5X aLpuP+3Om11xZMc/7CaGH4VipT/tF1NNe4n9P+1mF6TWIdc/7s96p0MNpT/ndID3UIhJP+lA QASVPCI/5qSPlaOkhD/s9t8Xe5GFP+zJwNIA1oY/7It81CYJlj/uynC4minDAAAEAgAAAAEA BAAJAAAABW5hbWVzAAAAEAAAAAMABAAJAAAACHBhcmV0b19rAAQACQAAAAVuX2VmZgAEAAkA AAAFcl9lZmYAAAD+AAAEAgAAAv8AAAAQAAAAAgAEAAkAAAALbG9nX3dlaWdodHMABAAJAAAA C2RpYWdub3N0aWNzAAAEAgAAAAEABAAJAAAADm5vcm1fY29uc3RfbG9nAAAADgAAACBAIo9p IwKZtUAiFODmfqX0QCJ9gz/f07hAIivLIPaGY0Ah/mF3uo/2QCIKPtm9ejpAI6f6zxjPuEAj 5A24wogSQCKWSsHOdepAIf/wVqMZrUAiHd9XiZZsQCIXkSJTQfhAIf8U/iPR10AiYTYp4dWi QCJMKj4ZQmBAIrY1620ld0Am2UearSwhQCfLut3EHXJAInnQknjNHkAnPQHwhOqxQCK/nvQM N9lAIvC0vn2bIEAjfxLkE57SQCNQekpyV6pAIqhSQ65pXkAiEyXUlQFYQCIKkzB+dRZAIkwO R+LhtEAkQc1jUnv4QCLThKy/B8pAIxnw/5wxQUAiGzyXdrKUAAAEAgAAAAEABAAJAAAACHRh aWxfbGVuAAAADgAAACBAWIAAAAAAAEBYgAAAAAAAQFgAAAAAAABAWMAAAAAAAEBYwAAAAAAA QFlAAAAAAABAWMAAAAAAAEBZAAAAAAAAQFmAAAAAAABAWIAAAAAAAEBZAAAAAAAAQFhAAAAA AABAWQAAAAAAAEBbQAAAAAAAQFoAAAAAAABAWEAAAAAAAEBfwAAAAAAAQFeAAAAAAABAW0AA AAAAAEBXwAAAAAAAQFjAAAAAAABAWYAAAAAAAEBZAAAAAAAAQFjAAAAAAABAWEAAAAAAAEBb wAAAAAAAQFrAAAAAAABAXEAAAAAAAEBZAAAAAAAAQFlAAAAAAABAWUAAAAAAAEBYQAAAAAAA AAAEAgAAAAEABAAJAAAABXJfZWZmAAAADgAAACA/7iIjiprtpD/uhgpFxKUzP+9C2+HteTo/ 7WeuRVdLxz/thpx9PZyZP+x8NVYA5gc/7bgj+feDuD/tVJY/ijqGP+vk6tOVjqc/7oFMRnLC Az/tFUhNscONP+71S5Zdfro/7NXCRLX3pz/oSat+OPKuP+rzPc6+kK4/7rBYQ1wZPj/h+KYW Rg06P/BP+OJ7Frg/6GyNUDLmvD/v8/5XaLpuP+3Om11xZMc/7CaGH4VipT/tF1NNe4n9P+1m F6TWIdc/7s96p0MNpT/ndID3UIhJP+lAQASVPCI/5qSPlaOkhD/s9t8Xe5GFP+zJwNIA1oY/ 7It81CYJlj/uynC4minDAAAEAgAAAAEABAAJAAAABGRpbXMAAAANAAAAAgAAA+gAAAAgAAAE AgAAAAEABAAJAAAABm1ldGhvZAAAABAAAAABAAQACQAAAARwc2lzAAAEAgAAAAEABAAJAAAA BWNsYXNzAAAAEAAAAAMABAAJAAAABHBzaXMABAAJAAAAE2ltcG9ydGFuY2Vfc2FtcGxpbmcA BAAJAAAABGxpc3QAAAD+ # psis throws correct errors and warnings Code psis(-LLarr, r_eff = r_eff_arr) Message Replacing NAs in `r_eff` with 1s Output Computed from 1000 by 32 log-weights matrix. MCSE and ESS estimates assume MCMC draws (r_eff in [0.6, 1.0]). All Pareto k estimates are good (k < 0.67). See help('pareto-k-diagnostic') for details. --- Code psis(-LLarr[1:5, , ]) Condition Warning: Not enough tail samples to fit the generalized Pareto distribution in some or all columns of matrix of log importance ratios. Skipping the following columns: 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ... [22 more not printed]. Warning: Some Pareto k diagnostic values are too high. See help('pareto-k-diagnostic') for details. Output Computed from 10 by 32 log-weights matrix. MCSE and ESS estimates assume independent draws (r_eff=1). Pareto k diagnostic values: Count Pct. Min. ESS (-Inf, 0] (good) 0 0.0% (0, 1] (bad) 0 0.0% (1, Inf) (very bad) 32 100.0% See help('pareto-k-diagnostic') for details. loo/tests/testthat/_snaps/E_loo.md0000644000176200001440000001324515027034070016712 0ustar liggesusers# E_loo.default equal to snapshots WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP5YkyJk2Uw4AAAAOAAAAAT/CCf5d2lYl AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAABXZhbHVlAAQACQAAAAhwYXJl dG9fawAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP+99QfBwyEoAAAAOAAAAAT/CwzqDy8zd AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAABXZhbHVlAAQACQAAAAhwYXJl dG9fawAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP+++XajbmJQAAAAOAAAAAT/CwzqDy8zd AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAABXZhbHVlAAQACQAAAAhwYXJl dG9fawAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP5Q/UH4+kokAAAAOAAAAAT+pC/0t4hZY AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAABXZhbHVlAAQACQAAAAhwYXJl dG9fawAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAADv/RdTp/OKVA/lD9Qfj6SiT/0B+XkzCLI AAAADgAAAAE/qQv9LeIWWAAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAV2 YWx1ZQAEAAkAAAAIcGFyZXRvX2sAAAD+ # E_loo.matrix equal to snapshots WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAAgP5YkyJk2Uw4/qWAe5+slHb+UGxtRYEj6 v3phLSMHz52/odiRUA18Oz+j5kMZY+OZv6UiBYpZA1c/gOCbIdkALD+aGlgT7KOMv6le704r 1Mk/lQ5ZQDf3uj+ewlXQr8iOv7Cx0vAv3x8/qBb/vlaNmD+RVknGML4AP5YmXOYjb6E/oOks rHS+3b+SlEsEisU6v5de/GugPA6/poyaPupPOr+GCwyWu+Dpv3vFGSbfabK/mtljUf0Mlz+K vNxsu3xMv6N5GBqP57a/jGhAZ3mD+L+mlNu35k+lP3ecrpcfLVC/dzGNuHKBtD+g/BS/OEnN v4Igw4FPLoM/WQ0Eo+5f+gAAAA4AAAAgP8IJ/l3aViU/pca2RdOM3j+/8rXkBqCrv6pIWFgx 3Ro/zBDTTM3Ckz+e+pZvzROmP8SF8jCU81Y/2kxN82LpUD+ht5lbUInzP8PjBotEZAk/sBld t19woT+1Fp9rH8ZgP7mOpdZ+VZ8/uQOl7kpZID/YnY9OInbSP9Gy/iCM9TY/4pVb5TMm8D/h I59hPK5oP9i6DzMC02U/2d2gH1IGej/Fs7T0mf6XP80BWgAOvZg/wVDbzlrO2z/I8p50vNhw P82+MASgNhQ/vbA2GxXFDz+ISvs8AG4SP9R81IdrKFQ/x2BUQaDJEz+7JHLaPhQmP8TX8Aiv VNQ/wcnwypNNIwAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAV2YWx1ZQAE AAkAAAAIcGFyZXRvX2sAAAD+ --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAAgP+99QfBwyEo/8GLAkD/xED/urslTY/r8 P+9KWn8Ems8/8CFJIrn2dz/vm+wdgqz5P/AqS8QTOzQ/8Cbqm5CTOT/xSPL9dvdTP+8H16ot cmc/8Eke14BJfj/vYHX1dPLcP/BiQhlcUl4/78dCMDeGID/vSig0D8UoP/AOt7Yy0Nc/8QBL caz6WD/v2Rg/ai74P+7+Midd/Bg/7b36n0mRlT/wXcau4TfXP+/m6IR7cp4/8DQ+EMd7Xj/u 46MOX9w+P/Gsf0/jcxw/7yZac/NVID/u8Trrn7y5P/AFJDL0n7s/72gByARvRT/wDbVLG9Ph P+8uXczdPuI/7yVeIEvf5gAAAA4AAAAgP8LDOoPLzN0/vC9PGiC2mD/Dw665YwtSv0/zEf9g gzs/wrKXeFX04D+6oCb1eqeGP9VkIEZUNbU/2kxN82LpUD+ht5lbUInzP8CPzHjviWW/oC1v Ox8Noz+3SaHHVF8tv5miuj3t2Pk/yY0i+C8iSD/YnY9OInbSP9Gy/iCM9TY/3teTRr7GQT/l 5tcuLT5aP9i6DzMC02U/1PsEKxeyET+/YoKHfssaP80BWgAOvZg/uJULoaFQPT/I8p50vNhw P9eBi3nBuZc/x4JCmLKJjj+5nmB9gzl/P9R81IdrKFQ/w+sDN0nZGz+1z55LjIrMP8TX8Aiv VNQ/zWAJ7qX34gAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAV2YWx1ZQAE AAkAAAAIcGFyZXRvX2sAAAD+ --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAAgP+++XajbmJQ/8DEU/8Lr5j/vVZ8WRY5+ P++kqukYCOA/8BCb8nKMzD/vzc6xkMxBP/AVF/p+31Y/8BNphxvXqz/woUx0rrHkP++C95Ag m9k/8CRmBO/JwD/vr9aTD09nP/Aw1oOFlzI/7+OUeVFCtz/vpJF6/sziP/AHWiqornE/8H4z /wmU2T/v7IYye0D5P+9+EUnmiwk/7tm0BTqf0j/wLp9qBaS7P+/zccuuIkY/8BoJ2GEYeT/v cJAOK3HZP/DQ66mN/dI/75Jxr/yrxj/vd3o8ODlDP/ACkeSklNY/77OlzUqqMT/wBtkuWosN P++WgQC9Cqo/75Hxz1uVVwAAAA4AAAAgP8LDOoPLzN0/vC9PGiC2mD/Dw665YwtSv0/zEf9g gzs/wrKXeFX04D+6oCb1eqeGP9VkIEZUNbU/2kxN82LpUD+ht5lbUInzP8CPzHjviWW/oC1v Ox8Noz+3SaHHVF8tv5miuj3t2Pk/yY0i+C8iSD/YnY9OInbSP9Gy/iCM9TY/3teTRr7GQT/l 5tcuLT5aP9i6DzMC02U/1PsEKxeyET+/YoKHfssaP80BWgAOvZg/uJULoaFQPT/I8p50vNhw P9eBi3nBuZc/x4JCmLKJjj+5nmB9gzl/P9R81IdrKFQ/w+sDN0nZGz+1z55LjIrMP8TX8Aiv VNQ/zWAJ7qX34gAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAV2YWx1ZQAE AAkAAAAIcGFyZXRvX2sAAAD+ --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAAgP5Q/UH4+kok/rJhyoKzLQ7+mS88H3cZO v4Diw77SBPK/nJmNhpoF3z+eJuXpnME8v6uKXkbg5Hc/iwobiZz0Yz+CcwHscaAbv7nU2Akt GX+/kI6eu9QpOj+rntL08OAEv7OnDaPGnrk/qZJuk37o6j+khJ9TfTs9v3VOr8Mp0w4/Y4WW 2hIb6L+5piRwV8y9v51UE9Vh3d6/vDKUx9urWT+Vff+IHf9IP4PB4HUxxjK/rgu97ZVdnD91 l2lfiMfQv6ABPz28O8C/sFJbkh7yZL+o2Wq0N3S+P4Zn3piChxE/bj6AYsnnNj93X8jkk/2u v5UhfPojTuu/pJCpz+t4lwAAAA4AAAAgP6kL/S3iFli/rllsqlAP3D+xkRFa2o0Dv6pIWFgx 3Rq/vbbHaFe7lL+3wSib9eF2P7e2HDVDyo8/2kxN82LpUD+ht5lbUInzv76dY0fV19G/oC1v Ox8No7+xUxB4TL1Bv7MRS5NxFNQ/uQOl7kpZID/YnY9OInbSP9Gy/iCM9TY/3teTRr7GQT/V Uo3UFZa8P9i6DzMC02U/1PsEKxeyET+mimNy3QAxP80BWgAOvZg/uEa4KLePjD/I8p50vNhw v5z9lPVoNWw/vbA2GxXFDz+ISvs8AG4SP9R81IdrKFQ/w+sDN0nZGz+1z55LjIrMP8TX8Aiv VNQ/taMpWVYn7wAABAIAAAABAAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAV2YWx1ZQAE AAkAAAAIcGFyZXRvX2sAAAD+ --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAg4AAABAv/RdTp/OKVA/9Afl5MwiyL/z0FnOG0F+ P/W3tESq+hG/83dZE3d+GD/zW0+Mmka2v/XZilmZkLg/9Anod0Btdb/0f6W0vqw/P/OZiJ83 G76/9JCX02s4vD/z/c7djgcNv/TDQRow++g/86vcLJmh6L/0wjA4NlBjP/RMb7F0RzK/9HAc YG8N8D/1mlKBbWwFv/SagAvlYpw/88LV9Rj1xb/0mH86EFjXP/QlJsXLmXy/9FUITMCdhT/0 YOU3NGbzv/XZiRsmTKE/8/w2ct9kJL/zwk+t/SPSP/Y2Nkb1kwi/9Jccz9A3RT/z1vUQMbyE v/Szng+3zeY/9MciBArOKL/0f9xw2jLPP/Vga+lxCxC/8tVQDeo7Az/1p5eybgglv/SJCtxH yEM/9AVPifNnSr/zjLE8SzISP/Ohe1nttQ6/9S6bPIHJgT/0hd4sDJmuv/RsPduiC1E/89vZ GzKrar/1FZjeT/k2P/S7KA0QeGi/83Y/x5KlBz/ztNqmF3ywv/ZCUvZXPyI/9PI83KF96L/0 Ml3NWrX1P/Rnu9LXaiq/9WcGWiOq+D/zkg6tBDLov/T2vDTMsMs/9Og0XHuRtr/04y833CW8 P/NqXO3hEbG/9HEUw/KPQD/1y+CeGQebv/SL1cuvxX8/9SbAzpRiIb/zufzsmkU1P/S0K91V 63IAAAQCAAAAAQAEAAkAAAADZGltAAAADQAAAAIAAAACAAAAIAAAAP4AAAAOAAAAID+pC/0t 4hZYv65ZbKpQD9w/sZERWtqNA7+qSFhYMd0av722x2hXu5S/t8Eom/Xhdj+3thw1Q8qPP9pM TfNi6VA/obeZW1CJ87++nWNH1dfRv6AtbzsfDaO/sVMQeEy9Qb+zEUuTcRTUP7kDpe5KWSA/ 2J2PTiJ20j/Rsv4gjPU2P97Xk0a+xkE/1VKN1BWWvD/Yug8zAtNlP9T7BCsXshE/popjct0A MT/NAVoADr2YP7hGuCi3j4w/yPKedLzYcL+c/ZT1aDVsP72wNhsVxQ8/iEr7PABuEj/UfNSH ayhUP8PrAzdJ2Rs/tc+eS4yKzD/E1/AIr1TUP7WjKVlWJ+8AAAQCAAAAAQAEAAkAAAAFbmFt ZXMAAAAQAAAAAgAEAAkAAAAFdmFsdWUABAAJAAAACHBhcmV0b19rAAAA/g== loo/tests/testthat/_snaps/crps.md0000644000176200001440000000270115027034070016617 0ustar liggesusers# crps matches snapshots WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAg4AAAACv+IeiUeluMc/vSY1w5IjJgAABAIAAAAB AAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAhFc3RpbWF0ZQAEAAkAAAACU0UAAAD+AAAA DgAAAAq/zdHHAHZD6L/K3ky9mEk4v/B2uF/xc76/38/N1JUUkL/Z4VW5uFHUv/D3XaD33ZO/ yPJtFZYXlL/RgM+YTul0v+9F9Rv30V+/6Q2h5ndyqgAABAIAAAH/AAAAEAAAAAIABAAJAAAA CWVzdGltYXRlcwAEAAkAAAAJcG9pbnR3aXNlAAAA/g== --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAg4AAAACv/ELnetrJtw/uJgUmYONYAAABAIAAAAB AAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAhFc3RpbWF0ZQAEAAkAAAACU0UAAAD+AAAA DgAAAAq/6MKGUxChmr/pWmzfnhawv/dXJCu4gh2/7/dH2oOKIL/tR104AbV4v/fX3UkYDWm/ 5+ZWJz0Y3r/qbdDewNjFv/aQu+wUiEO/89yOK7F4DAAABAIAAAH/AAAAEAAAAAIABAAJAAAA CWVzdGltYXRlcwAEAAkAAAAJcG9pbnR3aXNlAAAA/g== --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAg4AAAACv+IVUtAszNU/vWbB6hSQFgAABAIAAAAB AAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAhFc3RpbWF0ZQAEAAkAAAACU0UAAAD+AAAA DgAAAAq/zKEONUJbrL/KuMhrCsFsv/BrsoFFUDu/31EWnT302L/ZnTwwjfZiv/E78Fc0R+6/ yM8apxh02L/RTacKeM0kv+6ksQycKl+/6bkMJjTk9gAABAIAAAH/AAAAEAAAAAIABAAJAAAA CWVzdGltYXRlcwAEAAkAAAAJcG9pbnR3aXNlAAAA/g== --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAg4AAAACv/EL1OkLv00/uNvlkZb58AAABAIAAAAB AAQACQAAAAVuYW1lcwAAABAAAAACAAQACQAAAAhFc3RpbWF0ZQAEAAkAAAACU0UAAAD+AAAA DgAAAAq/6JdmIec4s7/pNG4XiJ0yv/diwSdPiFi/77pjBRqbxr/tO+gNa/rMv/glDLlafB6/ 5+SScRdhOb/qdN1KnaG+v/Y6AtSCgWa/9Ca44XM7bgAABAIAAAH/AAAAEAAAAAIABAAJAAAA CWVzdGltYXRlcwAEAAkAAAAJcG9pbnR3aXNlAAAA/g== loo/tests/testthat/_snaps/loo_subsampling.md0000644000176200001440000000106615027034070021050 0ustar liggesusers# loo_compare_subsample Code lcss <- loo:::loo_compare.psis_loo_ss_list(x = list(lss1, lss2, lss3)) Condition Warning: Different subsamples in 'model3' and 'model2'. Naive diff SE is used. Warning: Different subsamples in 'model3' and 'model1'. Naive diff SE is used. --- Code lcssapi <- loo_compare(lss1, lss2, lss3) Condition Warning: Different subsamples in 'model3' and 'model2'. Naive diff SE is used. Warning: Different subsamples in 'model3' and 'model1'. Naive diff SE is used. loo/tests/testthat/_snaps/loo_predictive_metric.md0000644000176200001440000000335315027034070022226 0ustar liggesusers# loo_predictive_metric is equal to snapshot WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP+b6DHwJlO4AAAAOAAAAAT+0VQV434B0 AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP/c/cc5N6ckAAAAOAAAAAT/C02/w8SU2 AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP+r8D5W4BOIAAAAOAAAAAT+07COl+XlU AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP/qvkDt3DvwAAAAOAAAAAT/BcF2FCRTJ AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP+bBWsjIsVUAAAAOAAAAAT/BpKrAKkv4 AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABQAZBDZS19iwAAAAOAAAAAT/dFfIrJ59T AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP9gAAAAAAAAAAAAOAAAAAT+16K3SNqWP AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP9QAAAAAAAAAAAAOAAAAAT+0+ea7xOyz AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP9aw32sN9rEAAAAOAAAAAT+uapXy8Hyq AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= --- WAoAAAACAAQFAAACAwAAAAITAAAAAgAAAA4AAAABP+AAAAAAAAAAAAAOAAAAAQAAAAAAAAAA AAAEAgAAAAEABAAJAAAABW5hbWVzAAAAEAAAAAIABAAJAAAACGVzdGltYXRlAAQACQAAAAJz ZQAAAP4= loo/tests/testthat/_snaps/print_plot.md0000644000176200001440000000013015027034070020034 0ustar liggesusers# mcse_loo extractor gives correct value WAoAAAACAAQFAAACAwAAAAAOAAAAAT+2J8YDcP5s loo/tests/testthat/_snaps/model_weighting.md0000644000176200001440000000032615027034070021016 0ustar liggesusers# loo_model_weights (stacking and pseudo-BMA) gives expected result WAoAAAACAAQFAAACAwAAAAAOAAAAAz/KEXngFjO6P+l7oXTIUDU+YzIi3AAAAA== --- WAoAAAACAAQFAAACAwAAAAAOAAAAAz+xA6UGtqDFP+3eFS5zKzY/J2MLYAsc4w== loo/tests/testthat/_snaps/tisis.md0000644000176200001440000000057515074562565017033 0ustar liggesusers# tis throws correct errors and warnings Code psis(-LLarr, r_eff = r_eff_arr) Message Replacing NAs in `r_eff` with 1s Output Computed from 1000 by 32 log-weights matrix. MCSE and ESS estimates assume MCMC draws (r_eff in [0.6, 1.0]). All Pareto k estimates are good (k < 0.67). See help('pareto-k-diagnostic') for details. loo/tests/testthat/_snaps/relative_eff.md0000644000176200001440000000067315027034070020311 0ustar liggesusers# relative_eff results haven't changed WAoAAAACAAQFAAACAwAAAAAOAAAAID/uIiOKmu2kP+6GCkXEpTM/70Lb4e15Oj/tZ65FV0vH P+2GnH09nJk/7Hw1VgDmBz/tuCP594O4P+1Ulj+KOoY/6+Tq05WOpz/ugUxGcsIDP+0VSE2x w40/7vVLll1+uj/s1cJEtfenP+hJq3448q4/6vM9zr6Qrj/usFhDXBk+P+H4phZGDTo/8E/4 4nsWuD/obI1QMua8P+/z/ldoum4/7c6bXXFkxz/sJoYfhWKlP+0XU017if0/7WYXpNYh1z/u z3qnQw2lP+d0gPdQiEk/6UBABJU8Ij/mpI+Vo6SEP+z23xd7kYU/7MnA0gDWhj/si3zUJgmW P+7KcLiaKcM= loo/tests/testthat/_snaps/loo_and_waic.md0000644000176200001440000001703715027034070020276 0ustar liggesusers# loo, waic and elpd results haven't changed WAoAAAACAAQFAAACAwAAAAMTAAAACgAAAg4AAAAGwFTltnf8GWZACqFzqDCa8kBk5bZ3/Blm QBEipbqh/pI/8m8Dok+RiEAhIqW6of6SAAAEAgAAAAEABAAJAAAAA2RpbQAAAA0AAAACAAAA AwAAAAIAAAQCAAAAAQAEAAkAAAAIZGltbmFtZXMAAAATAAAAAgAAABAAAAADAAQACQAAAAhl bHBkX2xvbwAEAAkAAAAFcF9sb28ABAAJAAAABWxvb2ljAAAAEAAAAAIABAAJAAAACEVzdGlt YXRlAAQACQAAAAJTRQAAAP4AAAIOAAAAoMAC+fJnaLHwwAEP7abtCtDAArJiAQQUDsABa6HW xx3swAC19bRGOqbAAOVsk57EhMAHXE6oz8X6wAhOMwo4HzjAAxWJEz7wYMAAvCuQLdoawAEz +EIq/LDAARqnVc5fZMAAuMi8GPlmwAJBKzhjQ9zAAe5YKyVuTsADlXoqYwIGwBIRlu/H12/A E/WagapcWsACpJaoNucYwBLYLgxdc6fAA7rSrp1oIMAEf9XEej+SwAa4oZCQGv7ABf5oQger 6sADXWMppHVuwAEJEltQ9nLAAOaxVhD86sAB7SPP8uDQwAnDIjSfxULABAp5a+6r9sAFJHaV 7SH8wAEph84QBEw/eQDDaV43ET9xp+BQBCmTP3qfwKytXZ0/ctyOXeTxzz9xqi9N9aR4P3IZ d1RCmrs/hDyfFGZAlj+GNSCYztY2P3e6iLhToUY/cX4fUwwY4j9yNONK++tcP3MW2+Teir8/ chrQtYH1VT94S/hkJOXvP4KGRiL5AqY/jR2tM+V2kj+xFwrjXj1kP6T55+1SS6g/h0J5DiSH Lz+l12L3soNYP36Eg9WZBwg/fE5degjr7j+CgjOtLdigP4MAE38XAjM/efuEjSOY2z938dyR QJQSP3UOviSS5E0/hJj7+xYimz+KmFtUHEseP3x0d1lSXvQ/fyjSq08iSD9x5Q1KokZ4P6BC 0qXNAwA/kYGquy8AAD+imuIOTvsAP5NUpOQ/eAA/kQtugOBEAD+RMeghdl4AP7Jn3QxtCsA/ tE9QFCi4gD+bSeGjWlIAP5FVoLo7XwA/kbVn03S8AD+Ui/4kS28AP5F+F0mQ2wA/mP14d7y0 AD+pRBELXxKAP7xVzQoMgYA/6p0tx9H/CD/kuDZpjnkQP7DPsKVRKQA/5Y1g8Rh9sD+m1FBE ZMYAP6LmZaqjsIA/rvRkyt/1gD+wN1KCt4TAP6FZe4TNQ4A/lxkATIN0AD+UCq5AO8gAP6ku qlvkC4A/u837aXIIAD+joD1aCLQAP6afX0AzCAA/khQ6CvIGAEAS+fJnaLHwQBEP7abtCtBA ErJiAQQUDkARa6HWxx3sQBC19bRGOqZAEOVsk57EhEAXXE6oz8X6QBhOMwo4HzhAExWJEz7w YEAQvCuQLdoaQBEz+EIq/LBAERqnVc5fZEAQuMi8GPlmQBJBKzhjQ9xAEe5YKyVuTkATlXoq YwIGQCIRlu/H129AI/WagapcWkASpJaoNucYQCLYLgxdc6dAE7rSrp1oIEAUf9XEej+SQBa4 oZCQGv5AFf5oQger6kATXWMppHVuQBEJEltQ9nJAEOaxVhD86kAR7SPP8uDQQBnDIjSfxUJA FAp5a+6r9kAVJHaV7SH8QBEph84QBEw/qQv9LeIWyb+uWWyqUA9QP7GREVrajU6/qkhYWDHd rL+9tsdoV7uMv7fBKJv14eU/t7YcNUPKsT/aTE3zYuljP6G3mVtQiba/vp1jR9XXbb+gLW87 Hw2mv7FTEHhMvSa/sxFLk3EUuz+5A6XuSljYP9idj04idy4/0bL+IIz1RT/e15NGvsbPP9VS jdQVlsE/2LoPMwLTfD/U+wQrF7IQP6aKY3Lc//Q/zQFaAA69qT+4Rrgot4/8P8jynnS82Ey/ nP2U9Wg1Vj+9sDYbFcUYP4hK+zwAbv4/1HzUh2soqj/D6wM3SdjbP7XPnkuMip4/xNfwCK9V Jj+1oylZVifyAAAEAgAAAf8AAAANAAAAAgAAACAAAAAFAAAEAgAAAv8AAAATAAAAAgAAAP4A AAAQAAAABQAEAAkAAAAIZWxwZF9sb28ABAAJAAAADW1jc2VfZWxwZF9sb28ABAAJAAAABXBf bG9vAAQACQAAAAVsb29pYwAEAAkAAAASaW5mbHVlbmNlX3BhcmV0b19rAAAA/gAAAhMAAAAD AAAADgAAACA/qQv9LeIWyb+uWWyqUA9QP7GREVrajU6/qkhYWDHdrL+9tsdoV7uMv7fBKJv1 4eU/t7YcNUPKsT/aTE3zYuljP6G3mVtQiba/vp1jR9XXbb+gLW87Hw2mv7FTEHhMvSa/sxFL k3EUuz+5A6XuSljYP9idj04idy4/0bL+IIz1RT/e15NGvsbPP9VSjdQVlsE/2LoPMwLTfD/U +wQrF7IQP6aKY3Lc//Q/zQFaAA69qT+4Rrgot4/8P8jynnS82Ey/nP2U9Wg1Vj+9sDYbFcUY P4hK+zwAbv4/1HzUh2soqj/D6wM3SdjbP7XPnkuMip4/xNfwCK9VJj+1oylZVifyAAAADgAA ACBAjGtR4vOUSECNSIXMZdptQI1OzHDwC35AjCl3nEfMhECMV+BQfP7MQItWfnwINa1Aipoa +DhXG0CJ4jnKlWa8QIp1d1J1ZG1AjUahmmkFkkCL5UfgNu4DQI2abt7PHcNAi6rgACII5UCH F8533XLIQIiixsC750JAiRWGP5WHBUBkAZfpVM1eQHdzaroEDUlAhbbxJuuvP0B2G3GedgHa QIurDxL70Z1AimK5GPqZQ0CKb2AkeCpMQIqX6UZQkVlAjPSaSuWUT0CGV1Ue/HaIQIgmSdq8 SDtAhKKy2922NECIf2FmaiksQIrvm5gPU99AioGA+OUNHUCNhdwLjHhJAAAADgAAACA/7iIj iprtpD/uhgpFxKUzP+9C2+HteTo/7WeuRVdLxz/thpx9PZyZP+x8NVYA5gc/7bgj+feDuD/t VJY/ijqGP+vk6tOVjqc/7oFMRnLCAz/tFUhNscONP+71S5Zdfro/7NXCRLX3pz/oSat+OPKu P+rzPc6+kK4/7rBYQ1wZPj/h+KYWRg06P/BP+OJ7Frg/6GyNUDLmvD/v8/5XaLpuP+3Om11x ZMc/7CaGH4VipT/tF1NNe4n9P+1mF6TWIdc/7s96p0MNpT/ndID3UIhJP+lAQASVPCI/5qSP laOkhD/s9t8Xe5GFP+zJwNIA1oY/7It81CYJlj/uynC4minDAAAEAgAAAAEABAAJAAAABW5h bWVzAAAAEAAAAAMABAAJAAAACHBhcmV0b19rAAQACQAAAAVuX2VmZgAEAAkAAAAFcl9lZmYA AAD+AAAA/gAAAA4AAAABwFTltnf8GWYAAAAOAAAAAUAKoXOoMJryAAAADgAAAAFAZOW2d/wZ ZgAAAA4AAAABQBEipbqh/pIAAAAOAAAAAT/ybwOiT5GIAAAADgAAAAFAISKluqH+kgAABAIA AAP/AAAAEAAAAAoABAAJAAAACWVzdGltYXRlcwAEAAkAAAAJcG9pbnR3aXNlAAQACQAAAAtk aWFnbm9zdGljcwAEAAkAAAALcHNpc19vYmplY3QABAAJAAAACGVscGRfbG9vAAQACQAAAAVw X2xvbwAEAAkAAAAFbG9vaWMABAAJAAAAC3NlX2VscGRfbG9vAAQACQAAAAhzZV9wX2xvbwAE AAkAAAAIc2VfbG9vaWMAAAQCAAAAAQAEAAkAAAAEZGltcwAAAA0AAAACAAAD6AAAACAAAAQC AAAAAQAEAAkAAAAFY2xhc3MAAAAQAAAAAwAEAAkAAAAIcHNpc19sb28ABAAJAAAAF2ltcG9y dGFuY2Vfc2FtcGxpbmdfbG9vAAQACQAAAANsb28AAAD+ --- WAoAAAACAAQFAAACAwAAAAMTAAAACAAAAg4AAAAGwFTh8N3JQlhACijAYdW5U0Bk4fDdyUJY QBEIPbMRcF8/8f1l7HLzXkAhCD2zEXBfAAAEAgAAAAEABAAJAAAAA2RpbQAAAA0AAAACAAAA AwAAAAIAAAQCAAAAAQAEAAkAAAAIZGltbmFtZXMAAAATAAAAAgAAABAAAAADAAQACQAAAAll bHBkX3dhaWMABAAJAAAABnBfd2FpYwAEAAkAAAAEd2FpYwAAABAAAAACAAQACQAAAAhFc3Rp bWF0ZQAEAAkAAAACU0UAAAD+AAACDgAAAGDAAvmNuneZ+MABD8PfZfY4wAKx99lcynHAAWt8 Ymy7fsAAtdsVao/IwADlTdu1DZXAB1udTX3k5MAITnOoOskWwAMVPYmACM/AALwIUpw5kMAB M9Wkr7fgwAEacHJh2rLAALipB9PpyMACQM0jnEqOwAHuKRocp5fAA5B3eimOtsAR/4j9ckNW wBPkxJfCGnvAAqNA9Dl1Q8ASxzduTNRhwAO6YsL8iDTABH/w24FP78AGt/B5FTffwAX9sPYm FK7AA1z0NJiPzcABCNYOV3oNwADmeBUiLKbAAewYWMd6msAJv/mOcrF/wAQKIFP4rJDABSRV zW5E/8ABKW7gHFe9P6App2mHBRU/kWzG96Sz0z+igFgkfJPZP5NB6rcOQRY/kP4fEwrVCz+R IowsmuZNP7JRsaIw6A4/tFdj1H30QD+bJBzD5olzP5FEAfFrGdk/kaQZFdJUCj+UcIxuCRXv P5FuPScJC8U/mM5uFEAM6j+pOEzJLWSxP7u1dwLeF4A/6gy+NSVeQz/kMYcaTGoXP7Ck+iWi 7mA/5QWsAJODgj+muFVcLMsTP6LtK2xnx8I/rsge7Cctwj+wIGkGhJ02P6E9vkHT2zQ/lvrZ z8VBdj+T7g3I06XIP6jrzJEKfhw/u2jmo8+PpT+jifdciNpuP6aXLSB7yNY/kgfDERu+V0AS +Y26d5n4QBEPw99l9jhAErH32VzKcUARa3xibLt+QBC12xVqj8hAEOVN27UNlUAXW51NfeTk QBhOc6g6yRZAExU9iYAIz0AQvAhSnDmQQBEz1aSvt+BAERpwcmHaskAQuKkH0+nIQBJAzSOc So5AEe4pGhynl0ATkHd6KY62QCH/iP1yQ1ZAI+TEl8Iae0ASo0D0OXVDQCLHN25M1GFAE7pi wvyINEAUf/DbgU/vQBa38HkVN99AFf2w9iYUrkATXPQ0mI/NQBEI1g5Xeg1AEOZ4FSIspkAR 7BhYx3qaQBm/+Y5ysX9AFAogU/iskEAVJFXNbkT/QBEpbuAcV70AAAQCAAAB/wAAAA0AAAAC AAAAIAAAAAMAAAQCAAAC/wAAABMAAAACAAAA/gAAABAAAAADAAQACQAAAAllbHBkX3dhaWMA BAAJAAAABnBfd2FpYwAEAAkAAAAEd2FpYwAAAP4AAAAOAAAAAcBU4fDdyUJYAAAADgAAAAFA CijAYdW5UwAAAA4AAAABQGTh8N3JQlgAAAAOAAAAAUARCD2zEXBfAAAADgAAAAE/8f1l7HLz XgAAAA4AAAABQCEIPbMRcF8AAAQCAAAAAQAEAAkAAAAFbmFtZXMAAAAQAAAACAAEAAkAAAAJ ZXN0aW1hdGVzAAQACQAAAAlwb2ludHdpc2UABAAJAAAACWVscGRfd2FpYwAEAAkAAAAGcF93 YWljAAQACQAAAAR3YWljAAQACQAAAAxzZV9lbHBkX3dhaWMABAAJAAAACXNlX3Bfd2FpYwAE AAkAAAAHc2Vfd2FpYwAABAIAAAABAAQACQAAAARkaW1zAAAADQAAAAIAAAPoAAAAIAAABAIA AAABAAQACQAAAAVjbGFzcwAAABAAAAACAAQACQAAAAR3YWljAAQACQAAAANsb28AAAD+ --- WAoAAAACAAQFAAACAwAAAAMTAAAAAgAAAg4AAAAEwFQQqtq6lJBAZBCq2rqUkEAJ4d/NRDUI QBnh381ENQgAAAQCAAAAAQAEAAkAAAADZGltAAAADQAAAAIAAAACAAAAAgAABAIAAAABAAQA CQAAAAhkaW1uYW1lcwAAABMAAAACAAAAEAAAAAIABAAJAAAABGVscGQABAAJAAAAAmljAAAA EAAAAAIABAAJAAAACEVzdGltYXRlAAQACQAAAAJTRQAAAP4AAAIOAAAAQMACuOcc0X3kwADs 6lF2rNDAAmf2eMrYIsABRPiM/p78wACT3tdEeh7AAMMIw1vXyMAGyQ/AbF2kwAeruImW2XTA At71T/g7vMAAmYBOuWNcwAEQjXKEEzjAAPGPWYXIhsAAlcyNhdewwAIPMEdzynTAAYlH5vfy BMACssvCEp36wA174m2bLxzAEV6TtHiNOMACHhkjDF3QwBAmge46Y/HAA1+BbYvVCMAENDwt z7DQwAY8z/1kmyjABXytrfHvxMADF/07kUBgwADa4Fq374rAAL6b+ZCFWsABiGkmg1CiwAjk sllUNQLAA7v4doaJJsAEyfkY7FXcwAEFX1n6IEBAErjnHNF95EAQ7OpRdqzQQBJn9njK2CJA EUT4jP6e/EAQk97XRHoeQBDDCMNb18hAFskPwGxdpEAXq7iJltl0QBLe9U/4O7xAEJmATrlj XEAREI1yhBM4QBDxj1mFyIZAEJXMjYXXsEASDzBHc8p0QBGJR+b38gRAErLLwhKd+kAde+Jt my8cQCFek7R4jThAEh4ZIwxd0EAgJoHuOmPxQBNfgW2L1QhAFDQ8Lc+w0EAWPM/9ZJsoQBV8 ra3x78RAExf9O5FAYEAQ2uBat++KQBC+m/mQhVpAEYhpJoNQokAY5LJZVDUCQBO7+HaGiSZA FMn5GOxV3EARBV9Z+iBAAAAEAgAAAf8AAAANAAAAAgAAACAAAAACAAAEAgAAAv8AAAATAAAA AgAAAP4AAAAQAAAAAgAEAAkAAAAEZWxwZAAEAAkAAAACaWMAAAD+AAAEAgAAAAEABAAJAAAA BW5hbWVzAAAAEAAAAAIABAAJAAAACWVzdGltYXRlcwAEAAkAAAAJcG9pbnR3aXNlAAAEAgAA AAEABAAJAAAABGRpbXMAAAANAAAAAgAAA+gAAAAgAAAEAgAAAAEABAAJAAAABWNsYXNzAAAA EAAAAAIABAAJAAAADGVscGRfZ2VuZXJpYwAEAAkAAAADbG9vAAAA/g== loo/tests/testthat/test_0_helpers.R0000644000176200001440000000505515064301501017110 0ustar liggesusersLLarr <- example_loglik_array() LLmat <- example_loglik_matrix() test_that("example_loglik_array and example_loglik_matrix dimensions ok", { dim_arr <- dim(LLarr) dim_mat <- dim(LLmat) expect_equal(dim_mat[1], dim_arr[1] * dim_arr[2]) expect_equal(dim_mat[2], dim_arr[3]) }) test_that("example_loglik_array and example_loglik_matrix contain same values", { expect_equal(LLmat[1:500, ], LLarr[, 1, ]) expect_equal(LLmat[501:1000, ], LLarr[, 2, ]) }) test_that("reshaping functions result in correct dimensions", { LLmat2 <- llarray_to_matrix(LLarr) expect_identical(LLmat2, LLmat) LLarr2 <- llmatrix_to_array(LLmat2, chain_id = rep(1:2, each = 500)) expect_identical(LLarr2, LLarr) }) test_that("reshaping functions throw correct errors", { expect_error( llmatrix_to_array(LLmat, chain_id = rep(1:2, times = c(400, 600))), regexp = "Not all chains have same number of iterations", fixed = TRUE ) expect_error( llmatrix_to_array(LLmat, chain_id = rep(1:2, each = 400)), regexp = "Number of rows in matrix not equal to length(chain_id)", fixed = TRUE ) expect_error( llmatrix_to_array(LLmat, chain_id = rep(2:3, each = 500)), regexp = "max(chain_id) not equal to the number of chains", fixed = TRUE ) expect_error( llmatrix_to_array(LLmat, chain_id = rnorm(1000)), regexp = "all(chain_id == as.integer(chain_id)) is not TRUE", fixed = TRUE ) }) test_that("colLogMeanExps(x) = log(colMeans(exp(x))) ", { expect_equal(colLogMeanExps(LLmat), log(colMeans(exp(LLmat)))) }) test_that("validating log-lik objects and functions works", { f_ok <- function(data_i, draws) return(NULL) f_bad1 <- function(data_i) return(NULL) f_bad2 <- function(data, draws) return(NULL) expect_equal(validate_llfun(f_ok), f_ok) bad_msg <- "Log-likelihood function must have at least the arguments 'data_i' and 'draws'" expect_error(validate_llfun(f_bad1), bad_msg) expect_error(validate_llfun(f_bad2), bad_msg) }) test_that("nlist works", { a <- 1 b <- 2 c <- 3 nlist_val <- list(nlist(a, b, c), nlist(a, b, c = "tornado")) nlist_ans <- list( list(a = 1, b = 2, c = 3), list(a = 1, b = 2, c = "tornado") ) expect_equal(nlist_val, nlist_ans) expect_equal(nlist(a = 1, b = 2, c = 3), list(a = 1, b = 2, c = 3)) }) test_that("loo_cores works", { expect_equal(loo_cores(10), 10) options(mc.cores = 2) expect_equal(loo_cores(getOption("mc.cores", 1)), 2) options(mc.cores = 1) options(loo.cores = 2) expect_warning(expect_equal(loo_cores(10), 2), "deprecated") options(loo.cores = NULL) }) loo/tests/testthat/data-for-tests/0000755000176200001440000000000014523242160016701 5ustar liggesusersloo/tests/testthat/data-for-tests/normal_reg_waic_test_example2.rda0000644000176200001440000005716414523242160025372 0ustar liggesusers?o?_]"B!)#J"%YEђʈ2Y)!NDDJ\˞ײ5zn_x?o<81tE_Ex8^#LJBY^ 8}>MD5Wh L2캫 2gRS02~jRs[`HVq茁I7j&Pßm1M 6F[0bW־h 86P K$֓@5&h1FuK0i֩8K$äR<[7Xɛ VwjIкc0QTn&U q^s`zT=&o% 1? b!2FVcjJr{0T_<}'f;{% j- 0f? 螧kaRxq7bH L;+d>ɥS3{nLz;&P9cA'-›]shoׯ`B $枽;GVep1{!aFQtXzj@;):L+ooHw 'h 7h0Rnt2-k+ym=Y ր,̧,`yμEQ3P/u wG}]L)#'L['܀`0y(Kd0S]fͥmI#[֩w> 7o`x@y=+_fK7C=zǀnE$yeyTmN`\vVɵ$nI@9=&~o;t;*[wiӷ]ZL =m&a3ӝ~2=bc{`b0Ё_\C:E"~FH;"ˢ}9Sɝ) Ux_Ftf*?]0(60B1s࿺a ?y+0%t`;\9 Gbc6(#A#8׸mfkS"`XƸXN#w6@]%1>lPcBZnjf Ru(NLfM{V'",c ji> -f=zQTS76d]R]"\;Wǽ¸'u60m=WU]KTs`<)5ޥ ¥'kt=%v 0fI*_IR*H/)OIig9y :`L>l0:o;![+9묹0($s$BS|Aen?>c oaDYkamFoC!0? @b j.MU Z)̄A)('ufEgQ8pXX `FٓC@;.vW`.*c\SuV{gDz3lv h!J.fp<) #śPכ}mz{Fh:УkWn C1Ji|@?ə0C?R0^G:3.d0:ߒ^` !=]8S)P s\tGHUH7_O؍prue|6fa!Vy 0׾s`jm;ޯbW*mkX!f =ҬD00.4& ĶXh*mu0:d|P9ydA!hu*Ns[U ů4SQ["' y9Đŗt4y LߍQrCc t7H/t[}0Ož1܀x=&FRB3&gsON}fZ_b4VXV#@p07t0\|0|ޅb![ZImX87oNF|h~ Cqul tF]@;RFP =af/`UPj[i§cY-0J\E'c {0VKo`TDv/ >鰶s6N?,Xq1usL3@ŅP>cpj5}G`(.yS:g`*_33As{VDb=ܒ ;s7p{f$9_"9\h{'&Oj킿?HgEږ^-0P< S_/B:ӈ7 E#ߍ? k~_nm@~Fso sߐh=3?@vmXlj10ݭ\{P  sOv*L DxJ[?NηEm@t=žsP4pD'SHoᲅi5@ N&f5R/!"ʳ=kO*霚㚁UμͶhgs.[WܒBn};z9?溲!}{ pfy8S6 :i"(]Q_Kbꐯ[ !=(=#/ ^"kz~6Hq~%6E^y OnF} K κh6Z`t&0\Q`Õ՚#`}Z[q~ s7'* ۀAx|<-r?3Tۀj0mg&&с#9K ՕoUM)A}|LÄs%02R]*xh:%jYho sp*^ԏNjL,015K~Fյ"¢Ɣⳋf~~`,hU T2BhjYg0ޭ+YHD>[<9|a(iJMpܮF GtTk/zA1LyAM h]B~hCh5keᤜQ[4nxOGdXEf}0kP] vթ5&37^N'~4XhgWw}q)%ز zo>Nm3  `R0^nz5L=0dxel#ihn1aM>0vo^V{сΛN9ljv~_ 0`hn{ZTBt;Kar[bUS->p&'b~]&5s%O)zG$zJG|Ѻ]qH;iRa֬ywngwdDE yŻb!\ˉ5Gf~<ꥎoDUAt10kp5ThD Pe66uA?-Dzп@yzvoP_<Lz?bb0m-awY`0n.B}1g(]+Tlơ3ٷÄ]IU.~.?E~O0iqlMC⫸~\lW;u!=7uuG>`>^qx'0e M=k Fʦ@u7Po;6D: *F ׈q nx:O-3+5GGmgAZ*Ƒ@)kTפֿFzíLI+|:7GԿE?zWK@6PY/ 'M%6V'-~Lh⧳/Xvҋfek=aRRV 7Ofl_e`|vŲ}ǥMy :NxƤ49`TfGٗK c#mi`lu]cYĢUV$L:8J4dEnf`Fuyƫn𿲫=`( y3Gx #ڜa"?~G `ګ &nsL?+Jſq``M=ѻo!Kv߉O?F뼬Zx:E}- MxQt+Tt;?+&cal_6*4O:!:h=뾊R6DٕpYoNxH =zP>&% 9`~&5OIc3}}\F8OY9QE8}g~!^:|0#WVFZpP7YKlJQ/=8ԧQ1[n=B8Z@oMC+Fx Ȓ0єW{hy@N{Q4Po}Y%-t{};ބtⅿVcmW 0湒9͏DSlDe_,L)k 4 Ε.E8{΢47o5B?H`]E~<犯@'MW%1请l sU'_=O0Oq2Lvp/In/0Mv2 :ZO#Y,2Zk' ~^qI>x]75:^gCbofypw߿>RsMB0gjJ&΅:0+?fsvp'ƌ?Ct 09HEatzK'`|mm0hs- Ư3꒮s^0VVmT T22$@*7Zq'#+Iv}S2wt;q`칁ɣ>sP0ˆI i@wQeFTWWNnV*]M8zY6~< LËjo"8.Qz&s{QW9wݫs-)<鏾tNӍOY9}O7U)50LF4>\:STׂ!{I* \#h}Ll;V ̕x/NZo]^aec+dUَkTGR=]v<>}{/J~̌NW)ȧ- h"0qȯ0Y/V5Cאzݹ ~̿{CpgP|:RTʻ%^`ki4:{5g*Nr` aݜk;n/_BEʞoRkJl+p[ȃ5Ϧ#Gn[bw$:'g{,Em8I?JGU4llF0\H`'= 1ĹUS? ~ vdyn' 1)}7Cs}Ǖ >4}m2Vn` I3pg.L ؃ۀI@6 zK#f+c|I\b]P~syN+ @=)a%X4~k`Wx}`7_Z_:smj`3b##&yS+, ?̼=߭n?6~S߾nxXE[z %7sYak+ҀlG1(6},KI.J3Cz#ɻ0奴K.zii\ O?cx\pm v DfwcxLO` %ԀpP_?Ւ3peSs͏`h25˟En*CHtNFBH"0{ᬜW .]2t /X,k6 }h|:pD-(_8 sTZK]>WaE}Keÿi]$kU>_ KMٜw8,`. 1tKEJ*|IiEH4O0^z pQgHaa]'1 3w6Nt5 KXj+}ow.݃#.p9D}sFTsʂ+85du:a /Ei9vn7D8#u=٫cp.wR1R''^vMN*c#$ǯ+B=[hNz1xbJ})ii%p?$WH`Yzَn/8n;#wgb ٟb(/rҊ]1<6t_(@8g{r>>Fۡ}jp%Π- pN&R0`7y`ap!=2;.z#ܖM(|~K_.m+8gi"epA_Opr/t wϢ|fwvP !H'T?aFGelai iyfGl0Jh?W7"KPQzoqXx =sD}e'AgGFe'nNiOF_[`T 8*gwѭfRDh^D$?v8:sƾy!8<"~ -i4Kt;Tl#]*r\Í (z`uB # wԯ8 1'cǧGπTYC}ޯoQ?STuG\Jpֺe4׶w"R ( t={R(S 05?'ՒON2+"Pk aÒݛf{`)^GNF p{xk+W7Þ'ruox6L~RA4_V l2蚻]dÍ<~ '2=;8A5\TP!,3aÿs][K^pJ\iEV0tZwoןX>4|֥;IF+l*! }58>HGR< ṌgSC f<_ZBq%R"u#kNup:i mnLM}jx¢^Qg g `1*y kPb,v|Q?Ssaq}/dY#L؎J1K:b[1 Qpq}x>ŇcL`QUrP kg;T\ s64{ " 9L2~d#;Q^γlw`:y؃)jZ {JpX<2PPp Rp:/wt2kK<~8~ΚQo=tR0K|ǚė2:]<ދpnS?ai}MJ_7 {_]жH7+o| 繊7x-鲡BVD8%Zo{ Q _˺; ȧ4E8y,.E+"ݮ }5;;붆AX[4r&! ؁G7cc%᷁{\l K8߀!>-R. 'sS aqJ k~AҒ֝a=tn8<")g=y=zV 3p3_AzSUIQ8σb񝷑^2-0-Q-s 9N6G|11$oTȇ[(o3K-I٠/W 8 g\0S^{Rr_lبx}1 YA8Ql'~kӻUF% _ag|ܐ͕*[bx4ӿrHYBh ~; *vn,`Dr0W{n}xk^$}h ܦ¤"=~7u#$4av&/)K! }&K_^g$&FZZ֬Ţ׻W `(U8a<1_>SXi |,kL@}#5yJꔈ/nΊOE0|]Hgudg (VeZ:s +[۫scƵ^/یuչܤ 3AK.ᖔ?.oãz(>mGԊFgs'`*$/Ĭ9~s>s6lIy{ۛ&-{N^2g"rzW[&"=܇z ƨK`y&q-`|Lxi6}z0?".[! `)ptp{XH#8fa+R}'=ꛏ>}Q fi9zPjNn:k^W+"_X$: _z*{A[&p71]9j \f`N|R?ῷ9."2^jYMTO%\P;7b2Lݴn,vnnEWbϮptw dwVl.2pe }TGoly/cOWVb(R," =+_/D ԿFߕ&b<~v!U3da z6V7~700rjQ z2 ]:G谤UI_$`8-C[0|8T!?W8/.KU7].~1}S,wՙ!270 rA"Xqkp:Kf]`ɶ$:˟A&;z{xO<`~,|Usl k]sυլ++C/p%/dVRM[_~*;ެ{5L0 ^f'`q7u"]MX|'1Dq)͉@3J&+9`neeuM GX]{c.$M"ݴD^(oYA1tX? PnKṕǀ=gf O?S>?,L~:;DChڮhKbGS𛛤w)/}q/)Ub:GoE>"]ȧ[. de,[ :m@ S43v"V*;|A }=읏x8[{#;sxV<"*e/! q*;(^zƫGk%pZ3-7+zWQ.2]zY  @T𭽿٨.4V-8{s9pf.`E v-)8y鳨vq%ZX2/_u%2h(c#n ѿ7p!)\Z˰n~ S@ da~G #߿":TWuJ! ;O!ovW=U2,$~eSy7du<ځk2Pv[lwXN&:ߒ*R{;2$XAzn]ޚRX^ܵ_Q_0 |XGHSNnFcS{¨O\<sS>`q.pB |L'Gem %$?1L@8*| J | LЗr}hu,wupĠd,n1P ҷW?ҕGM@$ dr@>w&D0k7yQ% E[1zuP|S@TmM/J:Sס&Ѿ5Jr\D˹S='\TC+,R 4ԝ@=Nł"~ Tl2d6I89o:kӷ}/~6'7&)\$ R }vpȷuCdP]+f{ЀT6|{T+?%TUzsi/́E Hw. X¾w=y#@9R? >msj;.HϞ`~(;2C@o$cb&tj+R[譨/W}W % ~Y> + _>)^6CM;ޥ^@zcw7zp۷M YA289Pyg:9բo]5D s'b/bmŷ*Go =3*'ԜBg8&,I@x F`2PZķki`#(gr[uw.68j>)4Ә@uiQO0f('U1F.(ORN,2o?ݓM@XKJ)  G_kzƘ Iһ)mg/_:H,I'ӎ󣠉&~/ ĬgmbxWP`E ̢ct@ݢ@wWsy: (I/`T?L8+^:X =QwOTQ|f'/Rsǟ\L7Y!™Br0NxWهx vt#*,S0DLL'\~?"1Y 7/2$TЀ9fw[@u H/vdwn= }ô('rP|hL'e(M7wޟ~(A=NBuĞ-&ZKrѨ;ri+@`z: d@O>?}mE]#+v7?U':HAψgzf)UP9n^%]@)t1 ?߽ ̎LY Sb [6τfbG qN4gg{.O)t/mHO,5}"Yg d+@SJtzCoBuK,t>%Ajᎈv1OrƩ&~8G{ %еM}^Kt>ض&+;re!C+ 4n ';_'dϷ@oto_ @63 Ki 7AW񰧧'}3 [Ηe e-DLG+(j/t?S{Y͹Tȋ7CAG,>g$&_~?;R%(+~;Go_ݱk[:_7 zW }qi3GAn@^T) 6Bt~@L%T.bᨏCď޼[AbhQ*P淵;N~AvHf"<佈mc ̥~zg}dq?bQۧr rF=zs,fxeN$ w"uuH?^ I!=t[U7f7oTo@yWqxuG|1 Q=p7z3f}ub?(i"sǁ@h=cM+Ţeŧo\p=~}P}Hcݏ<7wYK'WmS"K:)>m_q=hsmbn/t^pio݇8?(IkNDWEiC٢Z4yz|, Py~'a;'&]@ܜ ᦮·kx3?6_@UgS7N+6P 훋k}=C-kt@0"A_MS,N9.t= =W}r%yuu#F֞/&ρڳ@R  >P{ 4oǴ 8Yu"vHgm}+L\{uǣhƲv8ksib 8XGWWll9J_)@ ߾ Hf)6WYoK4_#-@ uZ1HbA~JhrRk=L/@ɣY@{}/~g p.CE&nf! :PdU]Rb 밡/C"TRLҡGV|AZ/);0m(높S ذ=)U|1G7B(~U?ҟ g?quSo=mFP Dkz&(OܰcSvACG|PGdu(uy/%+cTlʵʧj X\!KdWz!Pob Sܓt͠c퀡L .7e}l' ?xzmL(~-3"֍e+M= YƂ5%mʕ6C:絎/?͚2 [SvKscdW-_ F\AoL%Q%4g?6f=2EV/Fux2*oQT'Jǁ^Gٹ4yӗjEZb\U| R-?%v@)'s SH)؂xho4\u@|jSl^i*H 'ϑhcBo~mt@..;RMH?}>'sO'o~95V+/Ձ_e\swY,꯽+Uc=n#6yϡ7{?{ѹ\:U#8شN7TY룧7Q^O?və³ǥm@2Jv@z F@duL=ToWP@z{x/x!`% HOz{wUcq[A &( #=o5eu0zomL:՗M٠U _x/@/W?63-4!ҭ^lB<۰N,zT] 7x* :w4#8Ll¦'nHKzH |G]2CռՏ[ʊ/3ጫd ٮ|,h+^;|= ;S?)M4鹠tyWMGMo-χuMπWUPQ Q#AV}G}B9b_ wS=FD:JU7 +Gu7O)y."?%XU˻ +xtה#h?˸L6kkp6ﻥнX> NIins@*v5=IHݗy Gתd.ԟCmww%Hmiow!BF ~ku(?O _>oW;7H7@ٮe4V}[!C Z-(+7ogGceZ4dfTл+T g CٹMԂDBԁ S_˞Ԃu*@{g\5@L;ù ρ7mGHAv ɏג\bY<p.W <Ub6s9e\w/?WkR<Ӂp28o:Vo  /]ϳo@j"Q@"PDdxp|u0)y@~dJ)vQBCdE˛{BBPe ȷ7/ 4y'^zx^]AWlsOp+T;~-OAKg ;U_c{˴G› +,LQ)n~Dzp}BK:HĿ@$)'i K6 ])OC~G0D<*pҿ_՝Y }*?IC]8{Qo|\sL]kau ط<ӂ_nZ 2u1 ?UиOgYZSA:_ w ]B>G$} ݛ[gK $Ƚ?_Jajw[tT_< 2^ٕ/ʰwn/e3sCzP0iJxȢvQyГ{a%kf iG956>6%[yO>~xd"! 3Ba|5CuGmM[`mqnYL`gǴxk50,j)&Y^Î0|#_KÓn~:iTLp_הg ?|nqL}cba@0~p,|77 ޝ0?=m;R!0u=۹Wj*#OV.X{a0̪_'O0p7?}dF^,yyHUbה{~7"7%4tx>.;6<yY.%xw/_W.o5Qh=ay,ooX*޲.w'B5(h ]7)KzKǖ?-<[}y~?5t3^~>z)Tz~S#)mXӗu$yy^4:<6X/nqTuÃ2ǿ9dygXv_/]5a/>qhr˟ Z/ݲyfۧr&nZ~ܝ_|Muݙ}7)Dky=}NJW\=[#A}kQ-/*iwkŨv޶[{wXߟ󋾼Οc_cz{ = :Խψ1u;v {)~C|2_ פA{\SmwH}zx(X_HaF a[_}O?2г\>yT*۽l*KekGyKWw9xԁrl6* ɾ` 7 Dh+t@3nAD72Jdh|zʬ7՜/ {[_Ah2([ߙ(&?%Yɼv|BL2&G{x_~Y3 g 6W埦Oَ0ԧ/lmd?93''|_n?h/F26-b|C f(bb.eC`췷Ieg,9։43t2?–o,%Yd2? #1-`|IL4)*j㫄2$^o1򿄜؁5w653E~ު崬u+:3jQ}ŵ5M~״Ze Ý,vRZA,Q;*̝X3D2w:szb/NH %r"$OT6Qi{//N73g)Y埍pP_HIFF٩5s_UF倥9xK[_8 ןYsuswώkt2?Ib&p =LfKq6ƅGJ#6^S?xb͵=ۭäu.ܐbtb:4scu._nnh[Yʬ7fM H:|29c,>f睼bѱƒn\D8"Κ 6!ɝvI[kOb3ߟ3Ფ;3d⒮fpIYFYKSבt{{~,(>s`)rgR $s|t*=-']MXe;][jLR\%v+hgDmIwHW]+tO,nz5]U ~$ܬ8i/*[{rY>=( T,lFҽ.߲^mwSYzN\oQq[Nw KkԪ.ޕ4|[[xIfa,jbIWbͱGpBA{I%Ԋ\x0d/߂ϏPܒnzM:]{0YoגnHtgy1I.Ykwb%]DҼ<"[<ޣf>xy%ݣαgHbm,ř'Ν_*U3mxz@~OI7+U|x\ƭX,馿.ҳmyI77nNs>R~^XmK*n4>̷I7uJbRsJMtælM8`]-(Z~Qvi_>zn).߼NHq6u֞Ԇzue49^$C ܀N7k/_^IôF潀Ol,On|3\ջ>'Kq'w8~Q]?|L&v:m7}Jχi;~ʻS>5\+̘VkGKc>o)閧l5 ӑuAܰ.fgv0 ǎn8 ~".|S0Q ljbI=꾗7{>/ǗSjzqgi@)mxt/9{g?R\b+=gᛧ㥸* K=Oy/ _o;}`yj @.v|IWm I^dy#s2k(M\{m ߿aX\6YW 8E 3,6뼩])Ɠ5=8Z4yNC_HSb,E-e|}Il OweBP%]m3s? |PI2.yIpݽ>vR\zVFO_Ԯ@3|_oQ<*{%]bxHߎYz$Iwbcniyo ?>`Z}w97]I1j}?yM|9sЯag NG:]HQy)KIJqf*c ]Qg:;jfX_O5p?@<_vܢcF{t ,8E)/_n]$]XW#_$]j I%]re)<ͷp6 &xXFy$ݗ1އŭ n=aO5>zh+i"U@%|;jdۊR\ Omzo%}[>s.-1֪Av>S2mUOm=ٲ^*-źxn~A{tav|rµJq[\8Kf gkL{vtOś$r=>` õ ^alscnߔfa?8+GQvt$ `2~!Á^ hhЫ: z&2aX ݚur=<ɞWA=xѼWh[q@߭ Ȃ^ ^ze=I9R.V_t}r;&ŕcZN4^>Zuh I^9ٴdxVSG+ŵ(W}<}q۞=hI\Vn_sE]Ot긇GA_(Q&|ț붊 tg5 .<~ ӦzYHqr6xqy].wxv{׳Rh07".9ydHpPۖ9E?=0u̫g7J"'Vun>JI7>p_g}\T|@Gi-PU*A)Lnu܆}I: ѽ^&Fqknj;;:.X`k]| ) +Yxɻ @^yKNqc_/IqI[j *xpo2-}f N~?-OsJqo~)޴v)࿃nR5Wk5VNYmo?q"5-3~@Vm3 ty[vG4Zw9 t16fnNW[\URqMui~mS)HͺM4t SygÊr}/uk)ũş=;n0ȓ+cC)tx"vtq.xVƈep?݂ f,%ȫEs~bXcWiGx6{.T\ۺIFqciA]*= -iiet~Y oG|pF+ O  ~N\ Nw=9\'#ǶQ^rеp5獶mr YeJ׍o5ŝ:rj^ස>RR\đoAwGnRٝZjUQfr5|{\e?^tW^XR }Ԯސ+{ݑ A!K7O:vw)QG :qzF`W|>ȕCmkbEu+15ȁt{~vk]+}^7}\ٶJG|S -W1ߠn&_\(5ۺ Om3 [LY,\@ G[;խ[e+tM6j@b y lأoU~Sc$CpMICXbك#0^ ?ǝ@7DՃsO}y!X s_sܪrO }~u^T}M|l3R9W̋@ow}J,6-㠟oƅg0xjTșN?]"p==:[Plt;׬;5?UbǞol_`4kyk|;kG;<(W/). aWgUʹ}xc@8W k_7|g 1bc#=K t<ObDZ=[l}saՎ{ܽ!fcM8 U`}ܮuiOmR,ݾABA9]9wu`g//;窛:z|)[s5)2<צF<}; [:T5hOc뛂>)/[jsSlnW:<.ڸA{5{v(ȗW-LGsG;h?xbݴ4_SuqL_7 ;^cqoh| ߁kBUӷG-ց}޷ث]R^UX7G><_Ŵ+W=7֫z)@OOMWm#ŵh<{4S/sufW|13@/ 9 v>;XvC'}G L͸=$C?'f_,1sWHhaOօ}^~Z9bʷI@l ȣSyW>@E x7iI ]N>)jgUFs_y9eW^b w8Ri:8w3|C[22:ףxB w6j鞯;A B^޺D;Ϯ?Pz=1jywWm:Ěm?ۯ.|'\Ug67im=ۜ`^.s_4RkAns+R\CF|!>!B>:ʗ>Ws0?C 4g`LY2x ӈ *> {3 ꕣR=c 0&5'sp>؍}>b/ȷ>qY|kf쨕`ŇO|àG T8.E?sL O0 GP3ö]tv\p͹b#f?;{|K݀w nlܧooz ?{.siS<@,jWn5-]Y,wAòe''A5=ΫΘny޾mR_D`ЯIPЧ/;XMI'#UqVhwtS+9g8"zR`o͙9L[`QWܴcqw^[M:܍/ù$&]{K헇CA f ]GOKq#[sϸ2*z/gJ@\9x #>Y+ǻ\y!ࣃ.֯sp^ۿ}npc^Ik̷9Ww|7\O=lH_'Nwi/SA9ۍj񩟃I&J8'ur_4ͤo369t:1WoA^^{ρ7;Kl?&/N`;ﱔtl?moO {GRYqway΀}~7TAo-8z;Oҧ]bQnem_+V)κmǛ?9e|[6\x¾.Oq9fecm<.x`y{>,^s} ୶_[~K^tFwviJw.\bxm:s_H;)ܑt5v Qj\cOJvJm4: <ةL:95tݩU`L_6Al۸|oIцӧ>z>};V6c،X kS!_>W-Uu= {/x2bɉ?'QOZ6y6~k@Ϲ |49;rgulx`WToUד*;zU 75f.y:bBofw/[mhOޙ\QyUqo^bUf7-]DLJ nH8۽N`/X[o= mXb-;`-3iAFYg4ó؝YEqGN-~p@/<x<J:3s#+/Tv#*ݼeù bgc\@J- cu+^`-o8+GϾ0x ۄojMjaD!o>_b*8t7gW|`F 'txywG]Ǜ] _!ȃF;vz[]3K.z"s%jbR\,gw"p',hxbOwv|oIqmޞ~fC2eѻ+`i{Kpw#?x⟿Jҩz^[Ҟ{| Y g/I? ;~8PH8Enlpȁ/+V]EQIcJnW{K;+t Kq].>"5K sEBik:اׁ^VЇ%gM3qjzJ";u{KJج:K8!!@W{W|Qmb)VnR\xɒn}a=89^W zE.Rt/7ǽ!3j|~:إG͝+=tK },[s{6胋EtEJK9 Yx?}7w2¹n G7n 郎l^y|)xV9}oES}`>h\z ȳLJ6N2:׮&Ӭ@Lײi@ǃ x;YAj&ٞ)m®.3ٳ؁Z/7AG>9Wd􀊠Ow*wk=%Ms} ԋ}ݧ +J%Lf"k0lɱ۲ɓSoΎ, k a)YzW 6ɚUew5J9-ܜ k~p yX`;.urƲA -qTX ]}tY}]UxFdYeͮWօd}(#k&,nx]-ӓp=m}duƞ!k6o:vQz9򪢲ҧaSK'ʚG6yVEϙ^֭ߪ4 aߩm;# dMN:T5ގX`[dMiM;"kJ:؏<5u|_ޏ.^7x=xOZ{*kϽ)aY=VWԆ܃χ}S/k*/>QiozX"J6 eMwC5=O>IV7&[.yتBzKYvֽާ%Lu>~GYO3򁯬+iįZDY; emïR>je͚kOo)/&=j~]aDTb~:}*YaCzM`Yk^۲WmڲWe̓m7 5[cF&XW^CdIy6FuY+dƒZVvҾʚ& +^;2n.*tzJ)rxugܲfPe{F;/ks>;ӠEYäcrاF!U;Ț#WS8FpхF>}\9xYeM/¾6@'j)g5VC.V̵Wdmgꇓkk(k>Dy[بrx 5sq=<^EYiJkdѽ'.e kƐܯN5^ ͖M;Pa Y=p-K *6oԩ ud͈u?&'{û=qNIв+ڷg^r:lv6I8?zԇZ;o47fmbY87t\yKeu]ẙrc!Yr͛>xv'@wKO^?ຽ,k8W>g#Y[oBߛ.k]f]gFV?^tyYۧ\r=&{Nm5BQS,f'k^ rc`is]e[nz|Qa4/n9 /nkv&YsȻj 7uF|qtw=:Y;*x*fvUsCJɚgc;rP~Nk)߿uPּ("<^i%k9&krZ]3d#>Z~*}{srB ,rZY۲'ˀ? &q k=#KVW|wOV;uYm<ͧvǡ2ֹ;+kt=ڮmYZk腠[Z,H4wii/4z}?ѱۃ@/_vMYSa?{ tM#Z50q wrlZ>4pY3j) G:VvP+yn]&_\{='Gx)ٻ C5:gOtj9庼+%kv<\W: wnuhwymL;T_a')zjtD~`[krqgT"_jYAV| n^{~ 79x>TXO<{אO -~;zHM5(ܿ)~Տn{w_lSw9gl5&*|qm͠ŀK^9Y<馑{kΥ惚. w<[g˰ߩm>j}wA/uw:')k93ywЛz旵B:Z.Yo.^g_uy29sڀz>M׎6i4L9*<gj:}>K;dW+ k9y|qTYv׫y4'πNKJo@l6UbX,ob9ЁC9"dM|8+k}wΩGۡ]mu=G&{e< m+̴Zʚ'L)kw}{6p杧+ޭ^u嫕qlWKGtq7N ܖKm<ÎpN=E;?l0}%h)_GYx_};?L/Pe6'%kTVM;jYCVZ)OYs*Lq*,m7mrYnL ].#9n=\Z|Yb/CGU{p˄USF-Uzrd9)BsYctY:(bDMu{d[qba*~Tl xu묡U;>ܧwwVk9<>YQ7}?Iz| zeWk@+4譪[1{kZT[-kl䓦j[Їɳ7YW73 pǀ-!a5]&-< ک~ycz~so%YGwg/?>oX>mHJO>Pf~L FӖX8DVZYF!89f,W9xcC\oV rX2Ktz)Z.@ohꘅszE:Iuȓ)bւ~W\wpkn̖vDtڱm[Q;YӪe=`]\1솋o~ӳ|#nk9Rnq!h[i+7ϴ}Vr},duÚX9}K1ae9wac6uV;wv"knrXϜQ}18]gYǖp9,qm:[䨸 OŊ(_:2\]KƯ_Nִ\3i} |<uS+pԉ]=[뺡_\`]}srA*%w ߕ_n4)^ep֍Hi..yxM &oLxcS'm{rώn|Ԃ64:[]3. N|rxta 2BkqSVMXY"[ϵ5y+}`Uzz~gᄚ\>6αNYS2킽kgc ǂ޵{SToZ>rYO8}<*Y)eۮ`TV8tS9MJdkR`v>~{[^^Y~=^<1ߊ.~^twţɹ.퇠Y!7&~/*ՎԠ?:oD9|ԽA>m~VaWa{|{~t.VO~z$mdu/ld}eY=Y]ٸXoO9LջYk6*jj5ȳY֬YҪ9|\L5qf}F._ |߄R9W:ƃ}|vX(vҠ~Fgw퓬y=e͂{ ]'Ӟn; NȫdI(?㵾@7+y^e)䰌uO7*k֚t_:TYשe.RK{QWgi> :w%F͙Ϲ{)jT ms\LB>ȷie{{婷y<>/8-۬v(yK6$k˖)=q$w-\R#7Bb͐K8vr%pnKe|#oem 3vX홳 Jʚa}ݪ4o^^vJvcN\ޠ<՝()GeO+}'%kH](kG\<4Y(FsQVW9˶$ Ӑݯ+억_T?],tCdC.^w}֖3dqR]Y9E@7ݪlN"u,!hcleM1O9˚ZUl)^ըЏo/wq{U\|Yɝ.{{lLYzV/UCuJI:!ޱz냘3Ț6ݬO4߯<&kz?? 7|Ń'˚/g&j?vV!VVXfZw()>nJͤ7iݔdkY:j>cR|Qko)>yJԈYR[$[Rʭ_ _|w))>qL^y*Iϋ8^JbѬp)Ԑ.'uU62(4{sKY%B[OJ n e>o/z6{ҹ H_kNU1N)>U  _FM9t?ŚJr@[e{[վ*ſ:oU)܆G {JQ.1qXXWB9J>lQtHkϜw6|)T(ߦ`iy#>i9/k:=G鬺LuRK5li'%װ,WDx=l\?<RVO64ROhs[ǰ_.^U.xV[ȄQpWrAQ}V8Y^'7;2-nybӺRBZ~>5 J &gw X طv⣯'V :%pew)YsNeH7 T\vLRB>)F@/l-2*t~e& )jwt./=1rgWNr>~5 I e m:U{R@?k_OquBvw`.f3;py승׹=\(lkZxM&ӿ.鶾x\er9k>5RJp/1\/rw }I #76+\A1_psmk}|P1O*QW! (p;QzZО@G;V4a+\r@qƫa_Tom:eߌqr(·zߙRBYr4忤pZsg:߳@W7~1[J(|QXkr.οΩZ+k·IE )v pOΏHO˘ <,෈e~KIVxxPK[j1ףǵh9p;͖!ş{eE?ڭ7/̆5_Ɔc$~rf^eOt1)x:8hԪx)>Y cm<鹤R֥[prcW5'/_zX pN]ƦyyOl-dډpMy}9p>n~QJ0%jV/Yq(ϮE@~M܊>8@/6Zwu-7 f݁NlNz9 ){RV|Vt{yfm rfKGO&'`R}{))zm>F6p >8f}Weg/ | zͱf7 KoyXMt~8kh)rRs{aL[WIp\prhќ~:=/j-^O痣b6o-qqmȏ[UQDo?]#;Ix{H=o=oeGmV> 踀G;6RUFi廬x)G/nn}u5ojmz;v6aCSo[>Qp }=RZ)փV ebs>]֊Z@': ttۤ#nph)xagxu>w9br=-tæ6ezf0ݳo_r}Ӯ-W״~0ufyp5h);iΎ}^8p)e/?1~c`\Dn۶'L*_ tx / Ac V=q\tc%φ2?cR89_4˱sLȷ0Skvq)7۽tc)ȱusW |sZ|?؇[}Vz_8Ҿd2Z@Iv}8~_@.}iq=G^iAG=~aۺ<l}Xϣ^ ?8=>'붵ǀ\ңj.U:iV3wWa܍0g˩U LjqXxe 0 gFN^npK۹ߎ3;V<76<ԤzKnAzI^~zz3:Ѱk΁RBqSŻx9U` חy7O/د~Wg4X{Yiڋ ޽+ٿd!~灞.lwa,Xg_ُ2eձ?*z^Nr6ĵwqH*>( [\9;wQ\at߳H}_XoёvOW륍O7_ץRyeOo}Spo8>R}JL-ߟ_]&ŷ`_ߙ 8;. mN6_rzjzG@FY7iº]VX zX[vJ%|f]sD́ahwSK;\AW!}2bXjags::Oɴ ZVm@~aeK; nuߩ@-{)1ߨuO:oWi)*.ρ ܒF~ZèC}b)?-?}61#yŽWjoE/1{+eopD.BTjRw'a>zȝCsI܋\?;fȝÝ~:?p۸*WiGL|˂"rJeJ i zW*)_ze*a?ّp+Ѓ+w}܂+U0Kذk&goOG }[GNs5{/]zKB`?nj~(Kr>>tbpI g'~(g^q;yg~yݻ`&~baԵƸysvH87;y<{һ#Yx2*w<|Q>BFx,a:,:. vӫ sqma.x;ف~q19ܽS?ZVcߺc㰼cty>zxsZ~}{ʝ_{h;s/t9Wf~|V.L[ r%W/RB˺A]X5츌Nl1)@Ҋi/v]2@tc]۵v0^:c>f<)s䑤z%|G%7.K&{< 3g>#~E{w3rP`燐%4՝A/HOWV^/@.%[\V׷y5řk5Anq3w 7`_=|!i =U0L]#qt?^~Ȍ7ETy\Z]'V߮t)!GZJk7rogjb#s ج޷9U@޽D{Y1 c~. 0y9~ S̳pi>AOjhk##ANҸ?~Nըsw*УI ܶX >g&=䄮AYzJyL0*v eL6Aa 3Vlm9u'`M =.^r^_z?Zq`3)X>/g65uf>$=Husobpg-\2cQ&Tu1oǍK*:ϳ,/} xQɺ^^drbB`OND{wz}~<z pRRҪv|^:|׎1kh&i&v;=a7|ME@?u0"@Uy6$&/gfz< >_va()`e1 =S PwporG߮so >=)`_/04~=V'6ﱭ%೷N:m[1qT' N odNJg31yfo ZJSj-߯(~7?Rl޺ң[*wX߼̍Lُ-ú7 yofH< 8\ ow5 o!͆2Oo,W jof o ӧ$Gx)B~%H؜ӤvLsj\7臤+]zTOMmN]p\,/.W~{gSgߺ\:,| #IJ:~Ȥw{Nw& ?W)eqÀF?F*WjQ SDq/et#wĘRB_5eq)/=Ǿ] rιsKvv{{)ЗS_pޞ 6>Eۂjm_ p=P쑋yp(&X:__\ mγ'r%c<}Ԛ@練R$X'ntH uO|>zV۟[Ωnh3`.;F!oA:L\Qgw}W}q|LzHuXljM:V hr*~9OpN?ύ]aA7\>m +\a?mYS([279k2]o<lƤ] }y^ORjs8 vUE~s`̹]vg+;vx`qӿԍ 1;}?m[!)؇cu[R1GrL;m3GD<Hmry}wA;ag̛'Lܿ I:esk-}׷u S[l8ݖ }Ojs]w}6mI8ǴڰsqՉ7_*l0.V&`?vsxGkt!pZp}Lmg@[ ۙW0R m߼tͣKz11 oKoV1JݥF{-m>O.z8zs1ؗy;?1ܐU{:K '= dL7g&Tu _&rKJ-NL?=o ݮP㵃no\ܨЍWK䦝vLyx'j8z @. 7re=iG[xyP3 hy[<<~9& ~F>%`ˡ&\7J~Z=1 ǿ_4p͡ ~t$zA͐UV09wz.|rT ׺t<ösh F=:=cͺuR*ըu;;@?nk r돞W#m-fL}hS ؏jr!&.:6_3?\,$9Pgy`FK_y{7{΅w9p#.,|<؀(lr\A~`Պ媒7D}ط_=/ wAosr* Tذ{o}쀻ڶn;pMا=f )Et<+ul5vABӖ`袺wɔ*|eig@:%\5* "˰﷛jCg尿:8K= `1U; ;:!cTk}RB%Vxjh8MڃO??8Kuټ\n'vzl)臷] k(om&7_.vw& .k|US^}G`זLjNݕ;zq ?)uS&;;\bquf-^7?4k3FśX|TŗA<9Q񦠟kUYit{$ó`OltJs6+oXV?ַ׏dN$$\S6q"t 6/ϟ2 :U* 8[}gFJ89*>*jtڊ<ÿbv?eIW=K?B|]\Ez8O$GenRv~I .q^.[mӞJV#&Uzfŧ Mm]9]= ߗNJ6!%m3Ruy*O 5r܉9oNc󮝔u@Å}ߣ 4%Xʸ{RBy9i|M=W qp{I|bLZZ'>~ݗt5b:^$h~KOk[ިh7׽DF?>! u9z^.8A:yk:0_>tqWN%Լ>&z}DF>eLʷ3RI+W-*}mq^_wx]|SȘg -6w1vlYJz>HO :2=gׇr%#! }"_sDu(W]p{.t>ocp=;T օc#frZO :9:>(|R슛~+_FUj(gop9H~(/p_oz;/+·S5}~ 6f&֬Z291K,ޗ(zGbϹ;+~NNCEzFz*NC:qOtINW1ᎇsssBE,GethGH p= >׏zHHLߨ4ݑ9G!> @Qs:D:G ]|q$:@^sQ_|_쏂Q?9:v8.gu2~?8Pl'9H#ߖ+|)&Y\OZGJ}}+;׉GܗKALO(>?ozu yo{-l8P \ Š:{tN\|ezE'\T?E7.~=.q.Rk|^J=U:i$ǖrMz wa~z8d#3'H+ E= 5#1᤟76x :9C|f2NtB3}AχP_ezҵ~3/\'/';wCy`iq}|ƿ*#SFE9^Q~߅~##]#=o|Qu6!yr + Λ^PRGn +<+;>'c0|N&=fHN1w~@xǠ}DOE^I|.uOIn|tΟb+כ8 !].@܍t&Ƿn32ޅ7A99DSOW[Iro])ׯs9RX<nNvr9.W#g(2|eʼnyEq q 9۱>;sMrYIۙs>r GO:9mb Ž8D;_7p;#<F$np9}zg@*x vXF oi?3 q@f'*:7%;F*AKsr)xOq<|8 ++yDإ=GLoR݄S?yU3y/ΗD?썫߸y~p/3b>x yq/2vx(߹ '\ zT/ :qy=/sta}縗/F~9!aƿ\_xѩ^>g?/w),Gx1MSaq;(.x<:7P+~6˾5hwGz~_i9qXv x)!G>`|S,DC:sgqBʋa'ӹ_ǟb q'xu/vO|c!Dxo+>y"g r\q<U|;WryGU\P^9D/,oQeTp3;Hn_~ZԽԑȿ,;@W/պY!7! i3}A![ _)C49+1b],~Wu(Tf! ZIBy~ =Tc(>ϕ1'[H, m(O&uf)KqDG7ÙD=Jt 2Rr{9%:2lΊ *t1~T/CzsA9#ʲz9(ٕq߸x_<8GyR]qy HHBo*}u|7T[>73"y.ΙEFܣ+ON̎up!FIϏ%LΩl7:S&H[0~\ȟSɭ)wIds?6<' ۑ< ~}@0Qo-.qtvYx#ϗt~Pyȯh7uSLsٶ=X$WDn'r>R|jR'>CK9P.!]LjP&5I=FuD裡@zeVO_^0X)}<4 NV}ʨ֮7&:P1d_\';_󥨾846l{>Aԓx_Hg O>,c2Mh }F9S2~RHLW׺7ĵ<χzX}}aDQ$^+"\>#lNNtr]ч~ _\d~??DZl\sA\C:@? z<LNj$=] 9 ^g=:yHG g(y?".bO%ebu~z?'iy_~yCy/(XOe~l[*W/[B 6Ky!u}'~.ԓǑθb|}W^Oy=b ?Tq^0~Bzy~ A"nC{U_`?@+g'8# z^C`:yq]Q=^N)y^ 9NU9YiOq E|'΍:~>'z窇ilȞC/Ebr{Ӄq4o2cV2y[|^E<+KģD/ײ|VKz'9ux(=-{BnD<}ey"|N!ol^M>2/ ȿ'p3 ?Pooudr{&= nrUĽI_~BbW݃UƏUzUee d<^o2yF9?uu7!=/s"/%F܉<ނ<;2P#x ʗR%.|iz^7yޟU0Pߦqiq6ϟqfa.(w!!^;gτVP١S̻L-Zv3cͻN 휖X>@esrݐxOӖqWA=Uv;e { ]ZkEܞB$ٳZ|ʲZI(CQW^xRA<_y}24϶ZUg~!d.Qﳼ蟶|fUaQwޢ>+ O~> >*+\'5Q5Ci|̼YsUAk,zЮxHHh/^xFjӯL}mb8Nsw\3 R2+փ>އ @q_ao I|C:F:GgwW;q_P ;!z똽rw$F yN̟DxU}_D<gx?VI88뗨C?mzq߷5%OݟۥP=/~S뛄P_~2&|1m'"޾[?F{{ge,CvפX6^I2ܪ_£=d?.Y9oH?c|Tf 9A?$Dv(~x>~z'K :dW&Ŏs1yD s$yq>ȟT :wd4~~Gts*!y2#D9a)n\*g~x;b]H^Ur׼o#Eu>~|\ø ܉Ǘ/";*:- ;Z|?g?ū:H:7_YPaWp7| :7"*+@>~v*?x=x3W8w,6Gux+c| (/"@|"p"&(7}yE8/\.nuzT{k zpo<]e`j.LE~E:2[y:ɞE9'P~wE]q\DQo}PX#Dj4sKrY:^_{`ytT/nq|~M7C TU0nKO< ;ۗd QYtAǬרp]V9CVq^h!x#}gK}E :yqGPx.VF?Gcte~`<+bYͫP_ VCx_u\rWq'~W#Kl;Bz.gaס<+GfQwd|ujz~j(φCodh~)(Yn+꣰.1c?Xby掴OH|Ey'36~!'wCqt!}SW('x^"~07/];yyfMۨy^?;ssx>WDzNo`>6[~.L<\Vj0N&7s/&{֍~ă<psx/=1W"߃k0|x}y^u|Iϟտ]Q׆1ݗ'>׬"^@A,3>{Y=߆82`!'㊾} wa Op[ا1ܿGys\oRzA%F #ߦdyK\@:.ox>)Ds?8GLB" ,Oo8 ռYE_6AAH"Tx??0@_Bv~1uA5(pћo Xx󔓳+:G9bcOo'>ߌE&Fh>b%zsѯ\e/P8>3Byy߅CGWEUfc9g/y^+ :sy_Bp=]Sy@)0@?7fj}=PLw}˨Be7?[ӂ1$>0SdHU#zAKh"nb~Cy<*yק >SܿROReQ5(.~> ~Yv<(.}0<񥢾Y|~P̃.XPro[$X*wWp5+M|!{1Cy~?1f/?k_7>?vst޼,G ;?RA|C\(\}^oL~~.Џ*Ĝ!fP.%ʗ牸*/)nQB:jX_ E<hTcNL?(sf}c=?q?g>-|G~_>E>_iMSo<ޢ|c֟Ǔy맪>ܯ?d{{lgfW>UL[M1?)s3NP2XByd$x|1Q<X ߄ۜk7 ~Ey\5%?>+N .}9] ?鉶$ܨ?*ST9a:IXZ۶| \eI^b$w{NW ǿopaPǹ,CxܯyVx1eSW̟#(\dy+ 孡.<'|'} /t~kqHϩȻfs8W{xn7=9P^ГBߑ~Nz>Z/6s]YEG+%Og .|I}aE^l?%v65n̯G8g0N(Oavt.Ik_"/Ky>A/?bFxk,>"_]{n~יP}70/Ag{\mu|m{QOp(pH$N}3v#ŷx\?O8S뇨7"MG,m95cm< (s;թ9๠|$E?Nux.\~#cPn@Zߒ\V_/>,A"}iNNX#oyoEܯyH?W#>y ,U++OJU5ey$X|9Nu!~#sޏ@}Sy[\N0/A~nq?⾬MzͅS4dﱼk?gqyżgq] >a#k*@lEԯ<O2jy9/8oe^<Hoy܄קA}z|nXN?oQ9<3缊u9'3dhn8 uV9 ?Zџf)Day<.)y-9Oc/0?ZOQ^]Oq/cDD(BN~H߼_2++Gyx>GKp}\ <$9y oH?;1=Et7DFz`ނqX~X; ~'߂}Oyx=g|N!h?a=Յ1>}OK}|3>kJ7 륄'FwY~t&x])kbbs i}yL)>wl.>Syz{Q:<'\7qC+^EO\hq Q\S/`} p^/A~i~$ó֝"]|lf7M !:}(Oϗcy,t̓q9>nj|wO7?<?ByTw(OK~ ^)@""o_䙐o$g<(|n]yus\9,>R_:~"Myh, MsNBE~)u bsߊ󆑮x/['n~^4T]ˋ4o'sHߔ(u|]Q^sa9>Ez. 6T"]<!װ^};"&+brҾqz$>CGMSBs~WR?@ϙk\G|M>Ͻ6 P_i又8*קOJgE5?R=O2xʺUFK>W$(C3I{ŹQ}\`E>ɻ,TA<8}|,DbD )+s@X]1ב׬f.o5/r+2ŋu<O?@_.Oi1׀Qc_W>}9dz_$܅8c0u {'@}ΐy>ؿ"at6 Yj$=G MG#ż@g p8Uԡc^ ͉}Y^%<(;o<7\\y"/@Ksq|n9;7=GsD> dy?ȧ0{`y|.$s3g$_q=< ?p~Px~9$XUCqOw兒/뇠tzd}ќeIJg^Ǜ?Q|Q;<V$R?nky&'Oωc)dQ񾶼?=aynCsTy1Ӡor B~W9JpgCs h֏ذ!}/֍r.wQWywm!bsk&D]%^W{z(o&9}y-͉}C:2?F}L"nuȟGUXb\#}ՈYʪw #9Y9dQ|({<u&7y܉XE_z>oyf|oJAP^x^ng6[Zk`,F'8 鹬ydt}p98 J ʿ8ݫOm:JQ?o4@ U }8<_zgK.C$p]V]χC-gfv(wpNUػ'@}spݨ~nꕬߑy|8gy?;1x>ǂ%O7P_]y˿!Ϳ[6R9~+DYShޗ'/?_OvVC}Y^X_bH:y]5a~V/a H@O&_~30Ν㾠?Yϳ\9<iߛIRW~5<^G>aH(y7@* >P3,ߕϑus6>/8.+Gy_3t}J=@8}19NRǵUG)<ۡy{ߎ^]%W<}/09<|<>|17~t(xoGއ煰y8 %ߖ^6C#3Y﷣3sh.sżd}ވS}r't牠 '݅|&GzD ϓpS}UhFS|^;N>/=@=^I}|M(_9+ONPQ}_q=T?+ꖩ_ȇ~&l>(^qGq#'c}!x^\זdX/?QD-8=SD`-,~WT$M)ha ':OF)ōsԩr[;V:v- ^"'}a (g,ψ<]Ѿ!CAIĭmaA~7hy|7as/9Nb_x6k<~8W,z1V~~\/է]p υv3x޷שR2E} :|~ĻQ!}}\P޳~M'E>~c6oDN eC}tYH̯5bs cwQe4sݸAAm:ykۣ󅸽y\}{Q׼C_z>EIa`%+]+bND/+o}d8/g<ljsZx3᫬R>1w;es|Y,b%y+~u6~ܟЗ_O򾊈O/b|x.G~s<0r!d)Wy~*tG+ b q+Qof~+[+{fGszsuB y8QgyK5x^ gOavz!МV8NSHC5bdv=V7ͬ"ߒ_Ţh/k9NPp+ϗ27bĕ~l.K)‚oC7^ XJߞ+bgsoxqu$r;\7@gzf(ˬO/=+/={^r)}n.4צ9\fs?_ߗYbp/kb~0֯b_mUYO=rOb4}„^rQcya7F7?rmV^|ys+j=`<3=bg[a6Y_Gԡmj+? \ir{No1WD}C19&EE};3}@sʄ?~Y\XO~z$qp΀X :U[Vg/s!ݢgiT9Xl>:Wz-s C :(ωχ\a ToW1>Qa|n_ }q.!c'yҾQ7PݏNZϿ'/! PNϼ2 x63*+`Ż+PS9hW๲x6>r@9s"G~Orx{"o.>KDyp^r厥պ*m;?}+~B}ũ[*#_# 0}WѿFӣ|FOS\77)?Iߋu`Y E ~ytR ')SvLoZUg~w#:sL8^Yny/"/'ω~x[xO.~9uxͺES]> VcSEo2??Xe33-m{Yt ;?c_4Oa ?*Hx>~C!|:)|n[Tط1h} Ï}9c^#yJ]N:o^q '8i_Q%Էh HI~yB8? UKHB&yytYnoo~prЀ4/ ZեKUzْF[q߬xJi#څԇ=Bo>F?k룞B|@"#?G}GMrN_o=::&:_ w|0iY~ԣ*WKC8POꁑ?^zcVBY[Oؿ^3gP' %c?Kgpюyly0Oz͕!2zBo: O- h WB %^G P)K!"}V7ΕBi>sx=x#">۲xk{Xϋ^2cqECVIͼ//G\u ]?Hhw~>r{tGWGBD}YQo>~@\CT~4Bv%˅ϋ|"֯lX~<)$ӜD6 {E:a})%"$ ~$}x8'V|MW9d"B<2>?/i7zAċDcM,G.B^uR_tq(oI쟗PӳyZ|y ~x|O~F[ O[޿}.}#>BL<'9@;%u>ࢮ }DP/c &[(V'LHz ؍\r*ۺjK?n,R@PB1B? ;wZ)>MD]ގ~Wċl. )^+/V;jW _Q-b_oR[$3Vϼo(Sv [Dbx.‹x@by掊>h?\uϊs :G\'fGׇ8q"ˠs"8͇#AuY¿Ac^=OJ#igByyC i7#~>>Pz? +#[?A3K~_OC8 @~,"Ew)Ca,b?Ϫ3[D: GvKq+%Bn(Bd[qaףF̹}n<@L>xإf"N/~,}Eq?>7۩Wίs]'Sql.z׉z*g#*6'搉}`[,p eFzc<Jegޘii'ɟ/,~N${3F0'C@zL|͇'heIQ] N.@BOԑܤJ8`4P+ٜ0??G}X?*W(Ay.gq?>C#[6'dž*j}.<#jAyd|ٜ zX\σqIY|.;]%Kھ~CW8G?Pbyύ  yN+a3[8o38b>1=dSr?-G8iTZ>6k좨`l8WE_͊<s658TMcFE\H~;a/a\7" N4N%(p5>c\͙cu$$?Q/2=CW̗VWˮOv=֗z:CY}Xiw>'㹱z*eLƸ 9p]RT}QO.;lN& /~&u~|o /S?}AN`C}Yǒ}?ZAϏ߄חyּh>璮՛\rgho<g}Y|y[u?Iv3y}; :t[ W(?$D>5kIy/>+\ O\a' ~"ͼ߀bn7sS!YݪʱĖ77[_۬~Յ*(!ͬ>E ρ9&}oY: >HE"_I ;zX/e7^׈=qm>WGWq gQ9\CG6="VGx? Cf*t=/sB\(_?ǝ!OGy"V뉈Q䝐%3qg.SҜT>_K弟 GrþֳN=#ʐBMi/J=3~ԇ 7?G,oHeqKvw,_֍ q$^<|hڞoF/ *'˽x)cm|o&} #O(p:0@hdz;:?VF~a+ET< %Q¸VG%\|KyP;#yX]&Ui1\$?OilW8kQH_, σE(_? ]x6ƚBJ9!>s ?Y .uF;U6VGo(sQPfrzl˭d3sc7u|=ow SL7?_ ;Z< Bt>}0ҷE>&9[+<VN>OCB| |>Yvw">8,Wٸ7 ls,ˣSz#>[Hs80?T#f}l[X~>ö$ b>0W 1qXݾMyf?Obxˏ 1]iN,_\1 Y*;~+ !PD> ; [SO)?އ0Fb]ʦI OG:%+e:gZe{W@YtoךGBm8˛ͫPu,>D]tsɻRb1{<+" y? e7HqT҃?|gf/R=uDwmob$%,_∸Ǩ0h| | 7ߌ͹S!~m!뇁~7K*x:V7@z\!`>b'ƉK |q0Qz)"x~͉88 w$(^*CyרG uK:0ǾU߽W qCǸKu/$: S~]يsbLx-VzمЏPȞyOXԽTz#q}b~ k;3 !{ݢL&OymX߁c!pMnAؼ$D&'~o1_fgs\Lq OE9 v6a=$X7}2A|+y8\oy}6o]loԖE~CqmTsM<_`'QWUEQݞWDE]Q9z$Qj&}@(orv jnE}Q]yzX+^_"T>"}\j}o=3v7_kAtf!ھ$OQ8f樤Sބ =hsx}߼T5zE/@u.=| A`\aQ)# .)Vk;%RUjy9"✅r`fW7池%=L#A}vJG00ׂ)z#x}\/ w{vrD/~cA'G" B^)>¼ߴB_yx;lss(#hGR5C~G#+Ue2iWH,>@zS/1ϔžt=y_^Opb?A<'rgp?#OL>}:4+=y}}1[ `7Bh先W*BkY6ټ}8,){B'#3 NA~(B}\FP^?[?7 &?6Wؼ?E\k#?Ub]S+\3)4ʭ"$/ ԸO?ģ$:~qa^}Db;l%33t޷ OW,{*n)4}I._v;{OQ_,ETXE}VE_<"OLc&KGRȿaԯաKM9_=&!Nu`b4K'L>|YˮaS#Qطz6qENC^cR-օ`8Fz3R|N3K5Q~}q߭sNNO}0v-}E OLz \n# =9,_'}(̩>?%KJ;]gIJ9iM=!|%}6Kq_t&uO=x~Yu╂?N_>Dp#VU:ׯʢiȏ$S(Eu"BܱiLG m]뫜W&Z_~}uXBn#u.[po`I|Bz'__MgW)?A+A* sy[N|Cx.l+[1{S܏}8l=4xg G ~"C"y*ϧD<On|~=tRWlQt7*gÞGӾ!C}@2 ?" w;]L'y!dza| ҽS߸_N1G>P/S]z?[|x" .e$KW> st?<&ψ/ȿ+s?WyT(Kyz0Ts/𝾞杻+Y2uM[&qs6ۅ~}O|>P'8mlr|kCk=(QٔʡCDy[cY@yYr+y|69H~:x,͸_G|>=z_gz={?u,] x6ڍy>=֙xd'|ͳsR%ޤϕC mNqHsSۣ<GoPtr+O0mQo§qЁ6*+ Eݎq>'ϗB) <_aS_Gγݮs/1ʲgT/Q3Ѯ~wm h`^Q+Y q" b=:d*ց~B^F5VQ?qJEOJsrX-)QۮS'\8S,S}y,XޚGho>9|K1wϳg"o/qv\&>dsEs#YR/Yɋ:ܜǧ^naBz\䑻wq|3VOD"~y|n7H8Q};ǾzWŇosf}1ޢC<РWyyU &GJ$GKYv* 5Gx_cz#CTy}Yu~ _vT*΃;zQ>50ߍ@>4)&(o'wJs\nzgŒ?WG~Bnr|瘿xRWּHl[yWԵ)>Kk_Kbgkh('ҹgOG#-@y?%Ϭߑ^F"PV6b ||IP?w)+fdwQ 9Gry^87E'9; `v/͹H^0<_|-HxH>~'*Y^6 +ﳗ=ϑfo)Q-Ni.EX7Ozîu z?7ևIqwcB.xn,E G)Q͙|(ѷ껂qNA3q;K9Dq~q)P΋2J1KV_7}ѡ>+#;枈x-ƩtU&'=cߢB>/@ץ'_K>K>*{Wku|;x~+c~5gsq.%a8#7sX~vdV;5';a}0@|_w*(? >?s)ԏ8|1'@} :7u]x_~g$:{t?W |efihY8A7q\?KCE-8 E I fB]9 E-h>3CyX}~/!ꟕo$Q|Cá_D>x_/D)݂땿\썌*ϘYꟑk꺆w?]OOiwOOO5ggO/?AOS5zOt>wu;tw-xOCaup\p?kOyF^'^pͿ{'zs'gz05Ӻ߽%sagu7ڟ?Ox柞gd3<5u5vwg;F:7|oɚֿۓ9ή~ O>wYy w{+5]_m辆茿kj݆oʾ'뿣!~_]Ooh-G/矬7պ3Lko_1ȯ'oό~wϿį\~w_]W7t_u_Cݺ~kp̯w{zwt'սd/ у!2t?5Y)~/w;z׹O_^7t_}b3;[w'kӿ ݋̯>g2t}Oi/C4tF=wk=,J9.2t￾g>wճ:? >y;d!2C ϟߝ!-O^߿Owk3w3ěov<9C5̟/ ]_]3~zW#QZ!g{>6 Wρ7GZ߭Oiw3?; f>'z̟_hOrz~~u?_d&w~*96;wٟ_W8oN_VCW0~u}Cw5~ޯ~jM?7w2O5\cٿ{ƿ˿[ӯ{hR< w$ $-RI*B`J$BIT4*2T4JH) MֳJ=]Yy9ޟ ^{J/nV;~īoW8?:W^c: ~kLg>q=]wog|_:UVm㽒 -ȹӗߜ7g$x^29׮zGAwdt,]~НJ"Go'EP[ξVuևT~Ov/v%S/ғ5x >ٝ\AcrOx ;n/x^zCwJO7'=]>?s;?.垮E'vX,H O~+0?<)t꣫5W!77pǟ.^ƹוO'{R 1:?ݚy$UGy{}V^['y߸7-nߛSwHg}:#uvWzC?r~Un"\]=w'έtƫ~lo=︮tU[o왟vK~gx<n^AKwc^8~tҕ ׮d 2g8ؕu%L^.~pO<9s+!S~p;Wق` ~[2u'``gX8AJgU൞dWݍw@g]_gq''ߑˏ]bgr{cݳZmWwW9yS9=)kJW2*?c& +~YO~],/;t~Ko0)w&W~ݽ;]w8^s  uwև߳tWw6gW6Q ??zt^mռJg#hׯxOAw]cÏd j8uoǾ?=_u6r:}=/qmOw6" Z:?w[]5믤ۓ9;ήϼ=ձcM~吺q 34]=~u::`\gc;{/H9ŏWu6Aΰҕ ;cvu^}ۛ ʏ/v\GOluw΍|tܱ׼ b'Z;=Ӟ؊WzRzS&㺲Y~ʯߕ ~d{\W~֓w9v6}]ߓ]=~;u:yk}]t#yg|=]=ۿʟBA +{t|w~tlgۥ;rk|OPWۛ:J^wsƂJgΏuۥ7':+܂A墳^:7GW'k¯ tcæf,rig~^ܯqMҙʽ[y@oh>:v[GO8jw3 h/A76 \o"G57~aIg}9O&A𫧘o Ŷb5vF.!\=YgW1_w` x.!ȞOݸпKo؎g'F5:z"\q39D{SzR?j(=YcOMݏKڞ޴9Atߝ7;zsLgcK>YWOM.dɜ8_'?Sݱ譿ұ`ǘͿѼ?mƷeW)Kg]8[+y+D^$~{r^=Go~.̿Uz{~Y8.Ȼpm7pqU:㜼 'nGWvZ ά;k]>3_g՘r{3^vߕ.ߓx˫ g|ǞE ~.kVB8O?Ow ]ow(n?B*~uyJkW0THl:zZpn>ih>>7+nn6w}@|j.#7:gal{qVo.ABϛ} #EqѕX<055(/u >ؖQnye>(߮l8Pm6;4h-/HKv=[%]=o9 ؾkks|Xn52s,>X|HHL2:%z"uy7{Ka``;5O;l5ZW(mM~VwfKR7~ Fc;CEcg|ؗ$]MGEU᭄[-gzvȢX{j2{)) q?G簖zhRV'l|UV>x9/LzXc&c\?gX"|yJ+GYB0;~+za<{D)UEprĪ4%rKVg՘:YIGf}J#8^)7tsR7)`ؾ |8%ؾO]+R?{I__OƸQ+bF/\ҧc Mn͆>)ۉkԻCXhz׾q܀?}Yi-zA?qbl8 M{ؙ03UjvpF?};\?143K\W?Rr#'v'Scձ]Goh"vwg}-;vA8|N_ᎵKzd!ܦt߷ J@ڕ_b ;^oT&ؿ"r)沁k ѿr[lp_ L9oDvW؞|*ynq{ύǒ_)[mu Bya눿NV A=s>4G}9}euH 1ݿYFq'xPeI5~41?b'gsޯٰ$;]s3}ڣ^"=o܂=oo<5,zP}k-xV#*;2 ]6<|I9;cv[E`=l.9sq}d??܆K$]Al#F2%o_3<*=ɹ̹5(RJ폒%i^GGM};}"^+aw+3Yoa`|7jVh3cB}i^jRcS=nǿߌHJjSY>1ZO,0q]>yM*64ؐ$ +)G9]k9 X+6~.'}7ɺXu,-J%ș?U> qx[ad}oLWbRnŖ824x:Smïa!ӹ^YɍOk$]2Of]ϵ_@xGsCݢ_ p\99z#< 7?4] |WѫD-3hlih8c2s ⇅#ww<Ǣn0 ElJxINz 93Gs dY!0 CB~Ro?Sjƺ(q;6d|> vYu>Hn3ϜB_leS#o9^y/)i[įy 4_C>i1ϋ;iU$.i~mf/㴜ٮGXWHYPMD_^f:x@1 We C9&BA$^*MG9lN3 NᄏqX&:hE/W&)fXN 1p-X͍0"quvbgA˄b&e(ߪsQ\#r-ߛ9^g,ǻP2;>OӹJfQKv_*MAωu~JqW G%Z+[)^22 *9.N|Fp4b4V9Wղd?ުr! [e8/[Lm;Rа~VIpuGX y92$gNx KQo7g`dxǂħ;0qnzύU5`| )Ge[Uu\4Jq~NZqbD>}Y8iT~l܂xǕxkK0sVyc`ϲn߼2-#b-?V#;1NA/4i铄7S'=&>}ڈ17텁XSԟ;E; [k 'wr>.6wH.c;=޹߇6H|x3Y_B[&O<9"ݜ}xVR6\3Ҟw y+{ (a+(ڍ/x C;N!QqDnduPu^5R}oF^7T$>AGGxD^i5X'atgiWI^^ O߅5c7U"rC~X)'l='nt?Uqa>pƔE}S/Iܬ elL OfDz0eOKnCod gxֺZMZψ4=KUa?L-:5޽Z ތw &c8w2g+>wk92 /? &V38`oGE|A`a>=إM<B# = CSFIR%S[5Ӌˍ'_u Kn;'9u_b22'8apsZpw}]7+I4'H)ɓ>H3yr.Uʨ?CѨ$~|DkF Y֭V|=p;|7b?zF s뭈]\\\Kx |9WoȽ~!$ٽ=DǴZő8p`OZ 'gϢ Dxj|sL`6!GȏnvD_%?CS5g3J)`g?Ww`'Ve[4|/_/neXGyF?n/|͜1mFJدݧ_!y>$Oi!ߊ*~B%~L3(ߍOjMK8Dq g#mGzPIStݑӾ\87_ )Gk+ sjb_YG*LsR{ P]"lgeDv\O.yO$NȹG\q)ӭ&>!I(RV߸CE!6Gm@~T'AxUnq}-\r ه;s?99_z$Xćif}=x3ݕqK؄\fߜ1;q_m /z&Zةn|V ힻgvqO(,A[#sY3?H1٨cw}EQUaYoIA'.{mlN/䞍r*V m?ݘhri뚤ޝ %vsm{Y~c,./ |P\n‡㞽|;_5ٝ>yr%\^r`xfm,0rr}Wt_$rgh&c\n Iqe`\)Q̡qri!o7X? =oOzuDNyX],Q/ X{w.[H~eN9ףrt6Fs.DT uf ɯLg9eNxL+B$Xo'lCoȼSGz8<ۧjZbKLOc1edU-͞`'' Aؾޗ93${\-K٫[%_:Ϟ) Un5m*J d;#vgٓZ/#nduѳJ%㬘^n"x<ɝ>HQ=_ )H$_Έ[dL#W`-iߘu4z7 a+Iuy N0di3QcMv~b:Y]&D/l鑄OG$A_sɻ=IQ}J(H|7Y, ?8N{VG?>ٺOI\'$|S('dt$^\D8Cz5mq77YRF~^uqΧIؒKZ>6}|`G>>nQɟXJKjJ {Ǘ" 3@X8*gFSEyf?aC]r_wbN-跬{ל@?\A;}~ZT;W@5z?9e$ՊqMYXH~ѶTT:/eVI._rs.^|A?SMQmgfgE lvWN0Cb#Gn8 y;{ XZo/ Ӫ9c8BW\x~: v=;c$OOJ뱢I'v ÜN/B|f9yq,œ~R+% ]}Eš$rܿ0ϞB䅵rm = # yUDpS,l_d.q[,1OIEp%^w 2=KqԘgc9|FILJ!;rD2Ewo~8'{c1nVv"yiDyrAY^i}~Ls>#yA$_dmlm}G+=k$_hh9S1fEU|0zRD_rt˞gtDaMd⡯ĞnI!#?{9>&\1 Κx}ϗ@ IƱ;Avflm0Ug~7svB5n $Vu/H/6x6`ďڱ,f䶀+߬y(iUZ5uAeo^@(5s<ɚ@17:y #6ɏTQPqx`|" F j>#i@*|3[uz)0L; t>飁{FQYy %)^߫tc-'~C#]777 5,=Y^v`}Bݻ@nZ D@z7P S:L5%! S/'[u`z^5hٹ_3Ձ6;) sw;-O끹fS@ @-^Fǁ"@9t48ϋY;m#@c/{ 4-Jg-`Ρ,:vAM-?nE}9Z7*(%Jbi: )«BsͲvS&C^v0F\S1 zo+ǀQYii#@Y{z' tqکfVQL Ŏ;C?iլU@oٖ>$3FʕgOPUGZwSF/?Y!>pJ.Y?2-1ZլX=PYcZ;9&T-j˻@w:nksÚ915S60&6BݖѻQn%fo 9Ͼ[ _ "A6mOiУũ]KgJxgAW;+4ũ_@a t@bC@0K.(Bi+fƊ@U20X\qFu{x=0rV7fJ 8_c@Y9sTp`RDT@Sg/Lecć2(jo/]^&#l.BĢ!Wy&죋!(<~إOPzzs|o@l)ȏ~8we0޸y?m{KPhz E#D!| cC`Y4Џ+FzC[C5Ҝxl%`.Dґ nJUCn/Jpa홇 x/VEG=:Yu@e(=~賃T3O&]fRX7R+,2 Gg|?LK?}_@7PbJ=5 9}Cuow}p<і[YhKsWxnE "Aa_{`ߣEt;悈{kf3xsW0^я: & dꔣU'u_@(Y)?6e׊U"s|&Kڪup]WBK5)Z0Enq60t?X#d$P&t"=X:(TS{=l7Kyqg'ӿVDa)8BR` `_>gf`RCLX>-cOޏK〼Qȫ#)W ˀ6In `H{iE_*hƷfu77R юZMt)}7!I|uֈ2οԿ).k>jDj)\7= ;оR U3q~%`l|2ȴ?P}̽1d7,k6fS/-~kO؜+B`268?m"wk$:nOu= [!άIܙ &?h/7Ap-=.P/ hF[>*twG<ރ:ƣ0f2Ӹw1׌˺P0F%z">)}_n#|$8T{S@xш9@)ѓz~ÆMIKnn&`8r1øawH4j{v9sQ(`))y c{(@| G Z@0!xƏߖ`PMt^kA~<3͛c*ʬzt9p=sMukwvh:JAV2^}`4k<({MoӁzÒ)> )O8j420y+4pPKw)v1}@1չf@RUkzXPPo+ \g}`D -t mĸm/y,䛮Æ5z)ڟFa',lBW]-۞@? Zyŗ>zJw90ߪ0=w(ҸR@Nh9Ik(f_ňs%ra{'FjƁ7_EHj ϼg&|qK_ {`[ȕ!'ޞ}{?u+ Wr'yj$ag+I= vboƪ~yWKU fGnꃏY s[>/@STT@? ⚽0;_7Ny c2Jw% te.iǘ!x=j% Kv_}Qi<5 6sEX@QsXBeWg5uPդZ؁ "~K'D8X|ۀ7k0MU?ԴTxnu8ODIϪ`|(7nIחY N0WCYF3KA3mN_`<{+o)G>mn>ЦA޷h;PX K#^$sɣ劘~}x40 m_)yjy-6;3;WL`|fA0aΐy =lM(^5>zu'"^7s\ }uw9YŷΦԖo FշƑY^1eY 6uJdBȋM~iYm;`alA_G]@+! qDO;`m} mC eWKfZæI<o mw(Ͻѯc˴9BWV15?9bS7&FH\[{ZB}|e@sL^lY~xU? ;FvZAxȼlijԆ5xN!ہAہ2T̯ǾuŜq۱@e+iIF.\OC^놅@$P8>UL?`-S5π:y9 e2Ю0q4Af@{|o!/|n_Gog +sQPUYEP_9?XhU^4c<U>Iȭ\0r"℡]@X aܪ:jx)n^0#(WG[SeʻA@a{DcF*]k׸.gOA3l+po޺~! k- ɂ-Bmi=KVa9`R.ׁ15T~-N-ҷ(q|lڛד= @ [=6{xDp˙]2rn4̧M#>w]qXLؗLZU0&$\[vIയX=0J`' ~K^juMfL{c0Gw-[ž WyPj|(zK17?"T (O<*'籂ph=V5VY>N[~,/h w,Q`x`|]} JTZN=;VG<@^c_X20C#b<v7suZPX5i%q#31VU*e 5oqǥԀ)Eq$_~mf) Gi&J@wHbrPP;‘6 wnM ?f8 y5-@?٪gY@.sX?UQI ɩ]Jy~cadLR W`L]24򜝢rnUځhԫ~7΢n7^S/忐?9\$qG&sz$=@̥`S7"e;IAkyYXPħjx.ƉLԟebV&NE?z`2dT-cJ1+Ӛ*jr'wa:GwƯK }zq\{PV_N" ODJ!r!‚MXw1J^:eM.Wc~=Yt0ds"_.Z+}A Pޮi̜g@]~al) -; вl<9WSK}ޏ&\4gI«(eΠOvMȁK18$ (6ԫc>:xk$и%2د)=y{dŭ+(?-#_;#u yIMF2>%_M8wVysȋbt3G/kwU}+A\Lc K@/KL>9W6ԕ|lx>Nh g~^n\Mž<,D0z]['-(hN74 ='NOv Um=}rZ)n>{mrSzAKq|i@5m A;ۮLR-v@WdM| u=*'-*([ǚ ң{Ўe  NX9k˚u8w!GJ:~*0쮄;h:0hfNӷ>Q@/IFES}?Hp}Ƒ?.+uM,Lk)! zjI䫗@]hm5@*~zSc E?B}̅30i>U tͺUomїeo0~y5JG(9w /u~0 n1ղ'!>'wu}$  o9Gpmޅ|8@{_YjplJu:0ys@[G|㐟0lbl Ufg++W].(5_e7^ P^r{-0m̖Z}u]ڈk0r= ?>?{g`* f"/| 0%c?/eN6N[/YLiN>ԯWq@VזϷj%߀~9#@!@hM 39$iX /wG 0(cM46c~0&Tair@K0Wc!wXŖwe+/< z:[HC#]k*< 7Azг"aPySکEw_fҳwqFMNApZVׅ\zb0r5` E V|yGSiYHy/13|8q[o0څoͧqКw>66E{|čBǁ1 xB  ;M1_ RNI 6 EAA@PR&9}.FGRZ0!7bimĩKr׍/m@j/]@~vR:ؐ +H#fvfB4.p-0/pU¼lçi#o'R9 #>LpyMͼ2WXx*rC鿅b>0yO4y׽bۖWL:%__Ps^GW,aV9[kџw̞}kU)?'9>|ll̓B|LZb/`"jloo/tests/testthat/data-for-tests/function_method_stuff.R0000644000176200001440000000057414523242160023426 0ustar liggesusersN <- 50; K <- 10; S <- 100; a0 <- 3; b0 <- 2 p <- rbeta(1, a0, b0) y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y); b <- b0 + N * K - sum(y) draws <- as.matrix(rbeta(S, a, b)) data <- data.frame(y,K) llfun <- function(data_i, draws) { dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } llmat_from_fn <- sapply(1:N, function(i) llfun(data[i,, drop=FALSE], draws)) loo/tests/testthat/data-for-tests/test_data_psis_approximate_posterior.rda0000644000176200001440000201244414523242160027125 0ustar liggesusers4\w<۷"E!TFFBFqJ$;e4L(#{l2"QInT*_*#șx.yys|}_z- L ̛eaל}wrpy//+~=9so}\lllcKm/qSmI 7soK/*K޵ؖlmoS۞>˅ 6ږ- ۖs}o /ToFzOҫ8ڃ(m}Ż-y]}fl_hK/Owڧ`זg ^~p 7@Ųgv[z-Hq5XU<Ѕc~Җި^$3`QΖwpsD _Am˺ϗzBĶ6pǬ",P_ʃm{^;ztzLE~_Č4SBi{c |oZl>S rO/-l纝NαȟLzאRx󌳂=Z-l]gHxyO>x~9nlr|iė? F6#uS8cҬԕ i_O\.m?9u@{[zV JPV+p݉ЇDڞp!ƉmP/RSң^>rx{,LHn>u'Wv>tOf~(=@9qRt":ȱ>x+rǓb[R ? Iɩύw$W7y K7h#}x78:,=~`+Zo[oHKj8ϿKצ7?ri KuRyAOg:`5A2cw Gk Jd/}odO{o;В}F>" +Pc^xxvd=j#tCЇU'' `a)̱ɾ76S4^m*@?֥/Mz%,9k)P'?R%ڞ 8aeˁ~V5J5s\Ts pp]1}< p蕒uΝ~޹s %cN۳ʢ#;΄}\XvރzB_ > }wH4W\Q~<{Lc͵/Va ƊЗrcp{Gǟ oQٿT}0??'|9tc\QyXo$ң[s罛Ba.G.9/|dZ`oZs9/\5>K]g*eF./sfNf|Ia/9{-㛰Sغ`_' 0>o3⤥ϔcnZ ]yQ2})m&g9臑!K'|~wwz +<}iwU:S<ڵ8]G:6WRSGxl ϽWMLNOu,0 :죡RzWz7P̀t֜~!U`^q` :|j[ڧ"KI`blpKp]_cN@Sl܁G]SbzҀ_ro3'?7޺^je'n̓> ڜ}n&' r^rlK/\^A]qe hpl3=M!C$]/GZN+\G~0&~w8D."Pwz\x#+0RE_7p/^An`_)R]<7i%}kk^zПWx@O:4$Xo"~OyOzNpp~Lb԰xu3 GCC}Ǫ˶! 5zu9 ьe +mФԳ/iIoYRdKR'`mbx}W06P[xeùx$`Qj+gwI^.1(ӖF%֓*=*>9|3)VsO0<ǣÕlؿƘ^ Ji.wsO)35a7r+caEݮ#ښ+ m |@ } H;j6s~f8#kBm++8"!E20t3^ U bW?=/@]Փ92W*0D𝾪4n D:5a&\|^,ܗ[m ;k<7e@p>KV.u-ЭLۓ}{]bп+/'B7jE'nN?Ex\oj2L s^"Rdayd ퟁsY\AodɴopS >E z;fCL! hUJ1s~>g/ cNǵs,30Gj> /k|Op.g# mTСMPx8 /Ek^XӬ4oʀZ[+ڣé5Z^LQ^72&Ba:݁Rпnodj߯_} ubGɰ}&~5%#wLPpp=7LF-J PgՍzO֕l<:H1n$ŹAC9 ?LB zzGfCl :Sa Γ;UL(/OV-f:p`Wf ߧOSd]v5[K棠StF_{緘n9ɤ( ThSS|>[2n:|OBpW lP?z!+Dch iDqw?[p ca %Rs[FqXC1n^5}Krm.R:KNsVΒ^ޓz$v!{zyCtNJ0-N HWЃݳK("}oYBwv7B?Yx Q|>/r%oA߳~\g35qC|^$x7YX{\7/@WӜp:F.i7i?vUx=WwWGD[T'ys7s( -g:M7%٨EHZOп _eX[ (八tw _4=y,K8x%}wo:ľ[Id!o䅎i^0[MቘwkO>e_QcBl68I"MVԼs7c쿡q#KOy*]栻)3^36ϸ`[St6Γ {'_o$Q޻3_x#RC\ע^ffUɬ^?H?&`~;;wٓ ?~ pwU0\UweG$)]S<:XxUx'ظ|~sc-._fD7G_Dm m-? ϏL:UP=sUޤOn|}`jq!waaCxTaEϙCBߏXGxCY9L@:@'fmU{GQ&}pU;ń!y5A7r6W<)[;TW 8PF|pUɿm}~8]T;X3N.#dl/|G7hלDRk߃~q\硁Mՠs$( 9N>u-?aҼ ?Db9˘-t8+YC puk]ؖ ?μipS f%9ex FGaroaNDA,#qb$IC_\&.N $xNBQuC'6 E7fl)I㓝N_ς4(4ΗENoJMT} M;n-'dj7y ]NsC>Χ|%sw*q'*4 4ȕ5yo'6r,Y|C˥~wLJ EB:{5{@=J l̖d7U{yp q[9YC[a\K|26gqrmL[=mD7ZJ=I*Ef]IMZzGӠu~Xo˥uop{W=/`~2.kF2@)ρrzNܰKyHIB7c/=dE,p=N>@0?ɗ@72fN߃j9fg?[ug'Ynċl>y|ЉJ mlcӸZXϛ/ xiit82)]7&%0 .>0XibVF΋p \; #&_ q-yw)b `k﮽PY i=7i 1{érZ0<''H_/ |?T5q}CAV <抁|z&+}RHowO{ݻ={`s,Wf7X3ȏ_BslyOk G2Z_K'7>ތ ^ L{VbuFOm-y>JTuph 3mm6W8w,|%)O> x^m9Om K 3> MJs'80Ѵ/ |6k VȹRI!ʈ~r梴 䞚_y_!G ,ܺUP\j8{O\'XJs*t'|RަH/3T;7͔zO4ҿ@.c76u5ph}:H4'gwZ)_x$h+1ǎ өf'iDZ JK[ܕmk2gOe &! ok-;5^*,Eu!wNIvpAHKЗ s|X-鬶pF$t&K.Kl}?:틴!/wFƂw,l~o0 .h>7@Go ]nށ&EN{m<}+KXuB.I? u|r_/=26mRϯtNIP;fGqvNNf쩺s#?wR93%~iNJshy":W\@wVmD6~5bu!l{2'9+8 c6=b QPLK p);5 \:pԆ% ? x~!XI5c~nytϓw} /X8)Q)'A=|ik>1BDes>W;,Ę ]퍜m<;f\&H6c'ӓF7d{IJl{"]⥺3t-\ȋ;~,<7CkLbXZU8g?k}>鹪,i&!/׿H4ntx/SߙEg$U-!@Ӣ%O:^}wEn{#Hu'@F_:+1y*+|X|vEi]g(QX,>&D8NmS~_H^| R?CJ]xԉVj> X`\ ~v@܁}H5H ioHDJ/α(e!b<׳:D:e2ʭP3"ju~B^eF nSBh<=O"OU#:VCCH啐##Nsȳ_ȧi?A?vZAjNLG$Z7zv!ҴɬFґ!"/tm""T, )EѦQLvBo¤~v!Nn_AeOx,>RZUkCF<5 =eFzH5rNӈXP i DAɡ}}Hp$o_")Ԑ*H!DE{2|ޜB|_G:HYQ'4"Ԛ# ȤЃHǕ0g]m@1Gʗ~؁_?vaH秈PXwBo62l^l(" (#u׈P:qziBT>x݂TD+e #.}{RYM@(mFzc@Մ#aHsڱUGI>,phRzVGL(80CCy4O~ 9#eMH]MK"R>wO<83\ٷvR)5n= i!9sD iQQ\3C_rJfSHSp=!MQM+A*1t*r&g / >NսH]|*"t݃g fHv@j5'Uj 9Q$Dz Ts[뎖EZ!;vʴ͠_q{qgA2"rlAZyn"H;uk? 4D^z"j{TeȻK`wF7յ}M9ZԣSuuSE'6gqQ~4(R yc ;pA,S'z\C]VHmUKHOb&yd҈iBEKiR-ʾzQcʐMEQfB`E||"un6!u23)5?7|j$"rO#"KЗ_F I4ЙHu?s$;5ۧ"xI9X)ݝ%\@Fy'R 54$L" bQ(ZQAFavV [Q[krKNLп~>>zfiR]%"r.G/NG{Ww}#V!ŬR>i*Ӧua_9P<()$wVY`qǺw^t%ˀr~ p;]H%%2=buٷא|yxZ Ҩp kG?(O8I*Gj _W6|} Y#BEHn O0?2Yš*.#A :km1e_}A0bR7 c5В+&?n~QĀT55w5 e?!?yIT Rk5>"U:AOs[ UG; WHC2?q{RvFy^K9#ԒD}+1AHJz@"OE|#"Fz&YAğmU_2?CQrgYA߾e|"ͅ,^EGc )#ׅ|HVN>HO8 s9z- -؆ t{D= jAf.^ T"GH}Wd>h R?qS-3>)Mi{[_x󗁇*^Ej'M!c;W>G;F֮0%Rz/g#O? _.?_<p.'#ʺ<=ɼ#"ܨꛞF[7'HK\Cߜo:Bxu#/idMO6f}{ tpװ&c\Ȑ yn\x rZD* -Cw[Gmqѹg8N.kO_EwZ^DHS+xz}`-8n5RVB s;!ϗt1QkNDۋԃ>!ŽD'SH"x'DAn0d_ުSeIʓ<}H͢c5iؗY!WxkDRGDJrisvXEDd6: Py[R?D#y.!-u^{)6eiFef"y?D'2 f=Ok~D u(:4>s.H:#\HCꯐC~Bnֈtw<٤d^"nнZHy>vF9I,mTs']}=aTh=)i_7~7܌P/RY+'m:IF!8 1X~B"^7|W2YNA5"*̦ N#UGv.kOg9lTl00{ԉT˽ "5SP 4?޻&79;|[DuWT/._pE/ƞ@Nn\:T%#-\v#&3S=9^"ȿq\D-Oye꟞AocFO /_H]\Z <>e9igYCj͜\H2W78ȟ*MUDDw߭)5!ȟa TtCs}MewՅƥwe?\~q,ԟ=÷2v 8iH#Y*z-Rjc e6&:l/8 ?MS0̅"E7BxmUׂ #R}w)QJ_Cj4 ~F{k!ӽ"#8QBScBnsx:E>C !Z7Q.sޜyh sýG@a7G HKsyyh "w 1#aDxbB6xʞ?6Aζ4O74BX>쪼z2V%2 5Ee(f /Ǘ@nޞ܁T"uY@?~t;>=X,krRj=Z#c}MHZ0S*Rg1-TnktD!'g֛FHsq DXb4{ζn/BA F;""";2"< t),! 4z[o@la[a.,e$ UY}'sLr#Bԃ娿H?[+D*<ރ]c_Bj_K7s l>I! 7j)HxoHP$"0]L1CJOY fnyw8"&Ʋ'~oDmYlHyG0VB,(4.ݜdA]U[9Dc(ReHE}y2KD]Dw 5o*3 8;DЭ1 ]}[ zUڶSx]K myC6_K}H#ٖrD\*A-?Rt̂a<i)yԿ+cMlYHYB"7soc|pV<߼4hcw;aQ$Ґ9=ɺŒRYSsA.un.!!bj:q^]BRۋ#FZETƴNZ1n2 t1wBU&p@$#_]D;Y~O 8Iu@ 1;!WɀԯlREZ <&p!s'R]TNzւ9EU͗MiLGߪС|$&_Mg0*ǔZNkq0bi=JL3 TdgbL})ϧk}oY&12h|xSME=19$̮i/&U_<\)6vS,0?Ir|M1S~.In.'0MNBuw=&?Rw >4&ɼZc Bos)s Sg0?zRcr6&L fİ2Lnc(.S4d"6avebW~+&OSF˵\\pDz VyOۅ_> i-dG81pariL;ܘ<]Q).%'O }LʛRx)LctZ &3y&ۉe+0սC0L5(uyQ&ɅX'116V郩nn?1tLͷʦ+^t0E?7YL!9&\׏)B `i-2g\?\K?srN &=5 )]5`U_ʁ/ phbr T$[ܟUL902I[.oʃ 3[F1Evѕ_0*E0GWI!&ݻ)ڛ _aNg0:x̵~aiu6j˶y0NSn_ɗ\$C' v](6S"[aJn?0Oxr]1R7`J>Ôz'ԕ> a~5MSK>+53NWz؁7_O33˜nfӸn _) PX1%n+˃O?0Jg0Y鏭q<&:9)ΐdH_sEox0Sm%?gfL$ݫ0ŴM\1/<*6cjpIG[LpdkK1E"-LJ=lxMGOB^Zeb95~RL83` >ǡg s1f.IՕ8׋{W¤hߏzq;Lz)ރ)k1Uj@ r&8 i_j` &kLn{{=ݚ0Et1 Sβ'wxv0Ex6!(?e}>FIXսIsBՖ"?ޚݝ 0vo4þm<_48etD?{O[s.%n'-R0ȦSz5 *8^_f[ sv6 SJ&&ئ .kO݅I70v._xub"&Μ=,s?C|8)3~[\ qV()!G9H-J,15L \tU?`ފS{,vark&_~w>37y_0Ɣ\'sɖ?s"rQK4@.o7z`o̓sULfLh2:9LDuޖ zkF:)Xl)+k[み~lil/fx03Lv;Xr%1&\݅i al31D&Luc>r< sy$ to{=kAױpv8oR*g7r @OVuKdƽ= j()=5,yF+g0h^A =-<g\O=s_(T? ɰs4.B J3A]Yh޳]V6_ڃ1EMi4>I]g2>K(vnHZTi)*$뼀/)%c_-oDLt6,8̏ e@ Jn?󜔓ɪpy`JMp'h\JwdcrM k8 ~*V_@cf !fiя1y)|䢎es۾6㓌/DaTh+g˛ evٽNA^9u79vCW;c=e$L"Ek3S,I2Bݐ0gŏ_b+&7_β5->$1iѾ |ϕK;(xfWyY{#=uykKEuոc|l2 ~'ĩG-/fJaca\?;ZtiK]n󶖼[/@/Y֡)w̺yOO68 嵇?q)N%al+v{S:<eŁ vUk^g|1Lަjx4pKTifvK4Cj69O䕡_"K51MygSLKQyH)1)[lTAL|&S:p5-PLfԬr)Z70iH|E jz󐻎͟|MRL=W3oR[s.}ccʸz7Ka?ڬ,YT>(슧>z!x>bj͹=.e0[޽Fn71̶b1rXj|2nEF =G; O{a|[FXq+'{'M7p7p;Ͻ*n7 a q:gl)M{(tKl 6g%0x `"礵5akV Knq].cʞ>lpNΪ[=`>$Od'TԱ{8y[큹,W{9SkL61@=w?edG.^M8~d> iG1Ʉb}paPh86c#'x GcP*][Q.m.=B~2_S}AԨu1EU҆y&OF&!Om:̖ >izucNX?q)y^FWA/vfI4di:qϝv2pc)\7и湞bRh#U5͙n8?F!nVP])+S?;`]UE w2iyC(}|;C'0ɦpsYD=&)is=奌ja҂b9/וofOǶ) j!/~~/MP`4SCnv~}QiuoUꧮ -Mbm|CJ\}2>dvSCӼ0iBo2^-t&Ly( _`ɬEinBMg\vbx/YƤ]o횠ja$DIizٸSޞ6n^y'Br`y.Xc/>%5G.L'u.<Ô3:%I%!s>PuCIfMJRc@Ψ;w(u[$90jaFp]󹗲G!7:{^v*HUC6dL06 Yd}.}C=}FHoe,^߬`J1v~-ijMniE O/)6mb;> u8_b:Ts,8薾S)@]~Rwn.x-Ќ:Vf` |AW L =,)1>-8O`JV=3X0b_?U?+F<Lݩo9 ߮Z7s9o,CVG_OM=.Ik|; /Eڰ/GWB[۱:ZtB/)'bL]y=$t!P@ws=s]c`. 0XD91)Mtg\=9T{\4GR&:/Mͧz1h7Bܥ!8&c3L=3-NON:zG|~bR[gjdNI9wIqn<;VJe\!wʏڣ0UJĝu}QZ{򾳼=P&F`˱.G7M0:lRm[/,`rmJח ܚ3+,nId'] 7fD}qÔޘtb&GSDFW1+n8̟Qwj &n|&+%Swi0dݏur9 \ g`ꍦ?ЗG:1e_f J,q- lTC@gxi.tcIs4v{MKcVpF -OFñW8LAn{#D3r{yE3K~c_LHc߬W3^s;vX?=IN!P-d_)Ϝ.`ҀFUaKT8 e12nFSN۠u 0oU`j\b*)z\.`2øgb\ҙDQL9\ skx_;|qeΐwZ/6xoz&gAPZgg_be~`놏؝ '_XG_”?=oSO: ?'g~ ~yLԁ q$T;a^*Ԁ:~|?s= :#~F>7Ԑ$!A KL~;>C*-!g&ϻ>o^SYN}2$u0alȫ=7uX`narwHL|rrˑޠ[,5 o],0UoY5 d%?'|QsrLɖ}Q q=HϐG̵ _]?sZB[?[د|7DGA~W>c*S-S$|^IVq ;YX[?;A~pzEiv- Imx=|mAS*frRl;>Z0䵍4; Ag[LV7\R1evs n#{WfZ?tʈ~"M3P >w~G_X!Lj{rno/t1d./lO D09>ڣ)% ߯j0Z~/?0~CO%n'ii, "pca# V-&:=Czs7 ߉|o鿀GT`n1UG.5{,cx4.#j8Qw'A>\>5gfșx']Ƥ3_j#xj|l웊ɖql@^%E1)S[h{MukzFvb9N"(kPȃ{GM೦JaZb.qס\jk:_\|2f7wo73Dtbڂx̧"0]ݘứ.~pכ\>C1+^p 5>dh\ƴ95ݘ"Z SeoqcZ&|ӻZ}vazkwLELWx</HH>NU̶aZJ׵R Us3yM[4}-AFJ5y$x<iU adfsM-yoڃiay`ГovaWNa)X $pj´+x>7nB9 (^ߤ 7_ľ+X?=\ ~RLI./W1D&oP>ij?wQ ٦ƴk+evр#aQϪe~PLikw@;U7O\F f=Uvl'D~߀y;^4KjEnj7Swam^V*u:ߣUZ@~m-3dra˔y퉂=tôك;or/4ߏ1-䘦t8RۏJgfg/3ujoh>y!QA2gXc \TS|!h U^4-΂0]<0}<|&{Vj,ǽ:';-1u(kSckpyfp=V2>ف^OL yjIl5'\-n'c򷖻J ׳\1uC$xN/]揼0݀☞hFwE~YWfT%} 3|Y>cZS˯ ?9](£0msL:ӮcZvv!L%T4Q77`Z_c:Av6[OMǴBQVHcm]]O';9٘X0- 4CR}Q<1=qSJ~Ex[ } ucϟ^> w'Ϟc4w|sVuxoDLM3 q92hs^ly ʟ!|A rScL4 !RrxG*蚙wP=|sb-iNZǔ5|AGb^Ar= )Fn/iE [t? #_3~ǁlt?i^~KsyoC?M0i41kǞ7@{}6 i5~ ࠱>})|xd棘=wG#.q1;Sz= ^Q{e}xKE{.q 9zUT#i:rv&CuAu"f^<7pC ^ƴʘv<{Tx?"j7}gRx8)GItn+6>p9o_&$O;y_ [1g'O3'E?%:R6`qc_J$L7tG\-Lzfwgs~֬!l$C3L'Vc uT{t'0qqNPK S| }Xu8S SվR$-h`ڛ{8atƹNRz/k6U321:Ϗ|b\?>> sq=@>o?r3at{< }g+`j0'YloCDw7A: =o&90m< w2at9/(JӦC .m qwsXxGjK|R,};Crtxg## ox_:&^Qϫ5xx1rۇ}*gGsw.зEqqLKK y}S&!ӿxA);ܳJ}$vchsS'tן-lv0[>mUo{V߄  ya'e#?\tx^pFg]e׶sm-,zv X}3Crm6|]sZ]{$ }5 yؐ(7^شO\%ԞH2PW >*nj:0akkJpաPljl^%~UK_iȼ tB6)D+g10j||~ۋ ؗ-< lׂcSVwNɒv+\ޅi^yOr;`E{=H{o_q4&OUzz r2RU>Ux~8UVF;w.nڕ0o~l,C d27ơiS 4hgWdȲNG>p5_ ´̧aJgmL{SL39{8gi2L{HO|~m|k(pep |7O;;q$9Hy?Jt@=s$}BW_mPfnƴQb|%]~e ,Wa]<&z7#9;_}Wz/ϐ3c 2ܾE uȀo M\\O,K~΋pޞCf# ~v^u9o]!ǞHW7ϯ[AN|*vMpݗ~>N+lX 'S}bmwy0* ׹?䑅XfLZ hw U{ˆ0'PLi}0tKsWVLKL֬LpIh LQOkR#l`m1HRc4֮ LVLI_ṷ-c1EdApGL~)-H]X[kJLnϨC^ ̨=r?`;EPG<\0u[Uo_b)LbrÔS_1yc?ч8aЌ-&'E߿h)ëR,xƥ Sv*>پEL~>.\_)Liv6yzd'=LSkq { 7N^0ET=LZQܩ%ػ7\]C_|4 x73W}||0y#_üoFa٘OZSNEKcJlwFLa{ &~-*"bYl:#*)¿04R/ClP_݀ gM񜩁 SG͘<u "4qM^Yʨ.o3 J09k!%-z<4ST4a2V|)ΒaIӵ=cwuט7](KO`-jL S|1hy㌼2kqɘJpPS )C :3{aKw| ŘX(+ ٕ\yrYl WwoӀM90=ZG η^ &bxh<&KaJdjx&w7~"9 HéL|P[F`RɝU]~⩠7kl9e2Nm7~ S"tVd_o] l3Iyw7óPO[1̤LNJ]L%\%>|HM4lSg0RSLz>A4<*).ݹ Z}497naʞDG&јnd1KT1flEOUL{Bh.z z5z|v^TrS(v|+Ǻ7aV?_2؋c8_Gds1Ie 9/iS9oo)>id7&&޻) O]1inoS8ª}0h7ð oSƤu_J1SNJ@=*Ɣ ~~߾ |kyx#>Ap[4^L?pڱ@DaL_\+~'VR "z$aGg^;{~~Ҏ)6V=෷6°Oڎw\uy&>sm*%0'M0Wֻuc/]墥]l1w0ŏIƿKäߧ(0;c_-؁Jo2y/O.0aʠ[o~;œ]|;!f:_3cSuAOB @71cR_nL54Z p7a\^huGy ;9h.)'cFԯ+mn?>A'˙+1%=Ś{2LtzjxɽfAww@*&ÊF3~ `ʣW*]0jO,SwL=&܎ɛZ@ 8zS[*mL|eY cmg}A Sb-?O\z/YmB`eLX% *Ac*csLXoRGZ8Sx'6B}KܲPM%a{?)&ĞYU&YycU_ %yLh |p^L8)-fW^nԲ=.I阬_W?!c7Yaw) 8%G^äg|1YG>6a1|70Y#@9fÀáqVXcܬ`{Oޔ0u׭i/#s''njXwo^{Tr=1 l0BKUЁ=nXS}kT*@_pmqqүrI&o,Ơ)ĥ,~|Wb;~n;2͘(5ttx,`K s[aDw6н]VVI"0Ũ*J'Ėfj}>|o=1OOܘ0gczx}mf] uM1LQn?ɗD5A7^(ۃy'‹q,pgV0s2%Ќ7?  ^Lw ^5Aj8B.Q3 X} o'WĂ>%69?5N|䇏b0T=8DLe 9X5䐊ux?3חpcٟ6KGn%q/`0;.Is~1_2k ?m>zmSB~{Z\ DC]3qb wJ3Li0dy L9|Kxu!o|0cIpOT׎GObU&I*~7/A?/Oc[YUB϶=_jpkL `v> x?Rdx{hjfKWOaćI"IE~&bSj_|M4!Wz/y.g!v999dXou~;ڌ/sS3{.S~FtyB:). ļʺϲDZ.uư\V{Z:2.pqܐaf8=@_dioծ]3?w)+ C,YƊn%ѵh}Kf>x>k]aֿ;GHTxUS daې[{!O]{c O165 `ȝu('1ydLq[.C_|_"d9aDY| 9"~& 9;`n~˭ڕ pW?){(K&xf >`7WVMЩӆ='!v:xHhu~J!Nvb2{LI->u?(rVA CE~Gge!Dd^L֚0&OI>-{a yW}5I?)?tҼ$t3_ɢDЩo{b:>.胂 WLn`%Ͽ!x,( SסK5 @ f>?.pt[o~QȥkijOmw{*LR(tZưExIWN>"ս4~ GOϝy;Y" 2Vvyw y'e`qLUydi?౿w` J.o\ƛ8{Lwy}tȶT>$8jz4)S? @=L¤` zPa/&'s*BHl.cטp~>Sk1*^Sk;b"ǫIL+/NIF7_?ڟ<|{ Lyฉ2tS&bBAwkǎ_zÄI7֮ c;d)ǤG!>6ެR)\'A'{d?Ƥ=!fQ~EdHی ~?I&ez dB#M=q:Љ<S7|q OLЙgtQ{ye2Ɏ 70jOKx'1E`;L)?/tBVN,( D ;Ud啘 rn݄9۹3.>ﳾTZ|MLShZ2O,g%`͖}O }q x%L6? HLu?sGS|l؁Sa%<`}\W5^; ddKe|hRFFʦPF)EC(eJR*#dK%[CyywWy>뾮+!w8<灺Tp&]QV_}d1USs_Rskb%Qxioױwhp;"O^3~Wr XX=|u->>~.xJIёBgPE arG7|~%/=B?,e{EY"n7M0}hY\٘Q C7`G( h;/%ͰE)ÄօQ0o~|>|wy͜I-8u]Q6c#`ɊgnN}CHhLhM^wStB0wdpЮ\ob96d⼏mC]b:p8щ3LK$~\]Ms=ZSbp+LS`7B.. S.)u}%(h+ V.D:T-C]2} [#0OuF6oڑ]`y5J1?~-cٻy{u̔{`|JR?~^׫9tD|Gy^F>IoЗWka?cOFNڔM; ץeso4؍\~jh˚n`HHG=9T o ܄9ip+L InrpI.鰲D}Jhʢ~gsjtbߩ8T}I0LzSd7~Cl}If˽Xy}ּY$W?{:<ڙa$rHc >~X/CJ<֫"lC%C"7 ?Qun"0_#G]-G90=MqB v eLurke/L!x#w\`:.п{a9L{.V N.EXysq~Pv佴,]з7L TĂ_@3 }Wqwo1F0x-MrD28GcFۏNqGO+3s_x3Ê/u'uoɂ>&v_ ȅʑcQ+#.:0DjYm(xYv_¼Y ALX>̿&`o@ko}e߁J-V܇H^241z507h$[a2 :. ە^Ael g@Z&_ܷ X:'.+oxO P=ΏY LSu@}<49&S@΋'ޜ^hYGO-?*nJ Wh80}r?QUۗ@<}b7Œ۩@v? '#@9贼_ Tl`*~9| w|75LM9Pq 5jVf0筝:k_xҵ* 'S,T?+,}B5 ramS@=7aVcW+̆eF*2 `~;_k0*S!lo6`7Թō4sc%\&;G-Wegz>Su7jus0̱۟ T3.uL4 }}@N c#s M arvu4P 4?E6N@8u9Ju)dп̐s@/V_(*7/(c&Z{\kxp_V/e|g jk.Gh `|̀^:l-y6=<ţVk`Oj,ǰ:0*o{A˟l`MC7m.{qy"@K״rdѲ ;Xkg;);[>K`hNU:|V8nf9"Xy=eݟ|z{p0z^ fd I|mz!UCyO72ܨB^ TY̿_?-PsaF}Jh]`)n(&yr3o%=5dӮc]@{DskB|0G䅗ÇeUŧ2P/7? _&ٓix]EUֈζ [bmm@16 zLK;_}x.6bZ%?wcf ,pH S@uojj4t=JYы6iQ_t389B.f\/U0*9_ixa6eJN-pϊyZ- G}?_*`:\0?ڠ*i b7|3:jA 3jgՀy*2P/_z $2fȲ?Yrt/OF`VЭdg@g%{5P _~_} 0Yݣ2֮3v֧WQ'}iuu0m+-I.koε\?ԈsR*dx0ޫ-ڿE@XD+jgEoXgOKS"pݨG>M,&l_VÇ @Coj`n}k-6MP?pEcc%.pߺIĹq tc]`dz?gu)@΍NĹN+4#0{ f@6H1+s ˁN$Pnv>ѱzoUq(9}0 q=6X&s~s@Q n:M"YXBL=J,^\1Xr Zic|G[?ehbm P{ىqJ|c,gtGy0uջΦհpd4qΒd1_HsY+rYLpz}A\w&sn`&6]zoLw+pݕBfp?>SDX{vwcySp%Ϫ9|&דm}gʮlfʪ)4Hf N4v+}HqUVw/:cƌsR}٥!b 9ndy{Tl}>џ7`scR)` ,¹TLn1d{Wc>Oj wįzqyrO cٲ{ n  ʟ=Ԍ˒h@=\Gc/v퉯< n프Ψ $瀲rg#{W"o|q(B18'D]iWWbn%d9/ԨL`Gg(o{U*Wǃ,`]h]܄>uiL~o:sӦ9U='@}HR>{T NS~kҐfT̾aV}Ǻr>|b`$Km\Љ-mw(cyv36C E@_QKvݗȣ}7*P) @|({P~fV퐑]|V98 yH D r o,%lP,K"S)D,CXs2d!EDo_um.m nx~aQ `z݉+آ.K帿ez嘯yf3*J_kiAm. sjpvI.JlX׋}*E1W/3#: mR1뙉  cӒSa,w'7KԞAdwo 3BBE.w54:JgfPlQÍ@ܢ `vb4gynuXe0܀r(ݣ|Պy䔤 ,q@v{awRD`J7hs ^/6%Ì*ࡂ9`N9뷙f4B< Ůvb0#0˲szԘ=-F=`+M) geG`.V>GTpW1 )ϧ&C y6i`:jHs%#~ß[_x 垏`\#6^-Dﴂ=8w}Bd/u&>xq/r=CJ^]Z-9f`s< u6o {3, }r_>}5gvO]kSjA%M^%C2oτEs`_gHP*ߣjoch"BvHUxniK9=#Bp!wrAΞJ뀹;q*VrÕq.cё3 ~Hn(H9ocnk@<{Y>I? |@f`|Remˏjr׾峡@U)Ze L- 0^ 딳x @ ZNjPy j{d>B.{i5AI~{9}y%M}W9U], գIrgъx22q%3 A!z/63t!9Ն؛\ I!۞3hZ%8,ˏw {酾~2Cu Gt^*!;k ×恊]>}.00_\x%}ggLVhM1K܏gGB0bB64XVCNmU;[,|~چyQp˥Q̕/*`8}1ܯT{wwf$'S Nz{lGP|[~IƼ ׵G qc<l3rE"j묨Źil8iܭ}@Y_;UfUя}K9-gi:ǰ ;Ejʨ`/WQ#mٗg!S FTsiXB5!;Hi>_ UR-Xd[nwDue0|+aŮ%:-<{02ɖj}*}T-}zMP:z:`#s{?P\ˁ%DŽ uv{^H廊=Lթ|80̙!̀:_BgsaVѓ8vfqiָߥ`t^}FA8+m/ 3J'm Vf,Ic|sP1˧wzYrq-\xX2!@|/Lb<': SbԼh(~x_H`w6vJzE`c{'cCC.{W)N<]Pa63,qm|ձO,DQ܌zV؀seYc x;ۈ>/ڭ>ð_3[n3[pwm=+3y~kEke8r\yK;mָK:v˙ Ƚ b?||U$`9.\\KJvM%x۰ω9;?wʼn`,](G xK3ֳSZ eB.|5v#ï)륏(K9,S:zߘ_*}^'0Ou!:Uzsh[f-gp# 5ԗ~y{_E|XkϾܹ]MxC0 eţ#rV{"c_,݆)'!۞!%@|y>ϧ9oE'1(՘z=*&`O.9g=VcZ@)ZnW:}ݳ3qd#= 9PκV-}|Z⍹7KZ'ztŒw6ٽk/s{>q ̬xeg9uW+_6|Z Xazڞxm <8a%uOJWJ2uNjeCo.r4~9`|MG\ υoRBXp{1Xy~xNVy@*z|X<ͫp}DN'A0*Bczvٵ-@^,=ꡆN{@9;9>-ˏݞ(y4=~rv$hܵ_@+jޚ x?A8!w8ǐ>-x o7.WH &Y!`G?lK+5;\{w(`(eG ź]i[mQz7 "/g.:s7M?Ctt`Ե#_F܋;<=}aĒ$g -ȩ"s<[ nΓ{KiSlmJs큙}eg-gcb[Y2Ï\qug)`/V5K͓qȃt[DK\ʧ+ q;HsŃ}ofя8߮j2_ ZhS _H8]w3VR%/[ ?@_]thK=;{Boq{c{3qκb3d{˹G}U^Ҿ24~ Р>>B:h,9Owjb=03uHUl7HVZՉ}Kyg`R{DWU=Apߕuar(Y?;y`_YS.PP!nSlx^j9c0q )^D`frTi!@H=XRzn^gUmTzQo؊u^tvvFգ 6=/9`^(h tY[Ly{奨㽗/90c^tGxu.Ǻc_8qHF>a Лen\)sJ۩ ͗X@Y7 EM`$^>,ru̳Un@xi+؝30̞CGG00?_ƅi^t`p? 趆'n}1Y6 WFy]@/ nIhsȌ ^ 03v@~zrBDG?d]BJumN #>,|wFwjNhm\\@~dsf1pˡO7(r.hӍN:.Sv8 okDHKݷ]N=nb}ݓHߕ^ ;bUQ {}ZzW [.owAifH~U\4ǹUnz$+ǯf;\]4Lv0ht=ςv>8k}Eݵ1`o<>Oa_ ^P=ǃ {gQ=2kݖv?Od DqnA`J7J$z@Yr`Dm]qV){^Ȼ0OSz~`U_k]OV)m._`3`˻&Ked2{^✝s;x{.ۨaE/m߀yE+V ueŀ=6 uxwҀ>\9?RIQ`,bۅf$9 ϧ-=uv=]ѯI:п{^={hF=O8$pzߩ@duF^g-빟WRyKb==/`8I}GܵlaO`kt FF(2#^~Z%ПdjnL3? u>,}c3Q"(tOƁtvϺ}tw -dna3\#Ǜ>O/Ay^U?zuw}Z@ܩqǯu6ktFiˈ}€S!EH?)3<#GȼD?**c>5ĭ/?t{%dqGq29]플ճE}SN@ 8G/dk>L6C~uY O c͏"{q`9 J}M3]"p?0rycϞ[n'#/l@O }^ϨOkjܿewB?֜_h| ]W=V:гaw-䅥 t;k*r7_f>zE_Ź&@>1s9%r0߶Gcs#1u6s*R1gZ7)]j~ǖC8fsם=t-sdIi>bB6U[0UC&ceM< &Y=bLeA[b_9;f7/Ã/PxǣQy[sCDpn1}8T}Pk"SvasUm-wj}ȟRvCҌ]{HM[1vV.?RDf= ❢#-0E:D">E Zԍ-v\!֫ay~5,hFb~{\%Z9[c- 4@03) O+o7y/Dx\Uke@M\\:yg=r[>4̋gQ:kGfc s$u+S\>%wD\Y=umOm#n&M@\p?'J;r@AZ M͌ݎq]%@gXyo^;"|hC#ȉYiߞ^Yl~|6xlu%*_99ή7 j'U-g lXl_=]?$^z5></76:0xv`_άkSp}!WpLl ߋ3ҁGxs#hz w|e葦zs@9}qw}V_#|ZNׯe}l =oq!B8⒑Jܿv9`4m wWI+Rݝk?.F-}8z{Оs@!|~O 'mA]2lb"/Qg c/^,w֛g?D=nč@*>'[zZBݙǬ<Jo҉61g⦐7_!q |ʞ@,['5/ ]pG>'Ƭ:prCL~̻9g#goP7dl-9֗k%m]e@Zj|=9KQ=%l8?4~tCQ[%E coaUdd9|wV oZbMc:x;we57yTu;:v_-.gJ?8&jۤuq@dˮ~M!s=YS}r7q̭-Ϣ߷q;3W{` /G\}۾]ٚKe̷coÛ9[%^V5T1q9U dJj,'3%Y&0Ȧٺu+IF*O}?oJ>G^J@^:{nPqx37~N}W@+nʀ7&9^"u8?aџ C{x  ˁ{Zմzo70jZ|gXzA\_uޫ@x)טw/:o~ \koOں yjB^ ǶF#{7XӷdNGz۴rSݛ[/"ɬ5?澨^#C56%aձ^zpf_tW ?x罾o,)ZQכ@q^s<Ҩ;=X7 ׭s;Wؿ:P" t'&W#k [3y'x~kE.<ZRo3qzȅ=#4a*1 Zl9"dr]lKGH}2RNԕ}Ws9Cp_8S,} <6z|%"}yVrEt8\X' ;731?*-o6ZmQIPW.:_c{9Me.yEIósO~<[tKq@}|(sYvEj&vUH0uCc|&IW[Aܿ{i 3T4u]n>ղKk%Kb?雑[&\QPߤO&0\BԵ/D}(6ra50_ /}+mCP{,:r !Un5yWS,;giLG:z> ȷq*Kp?ޔ1Xܝiò@ϺyNDN[O>[bbZ|3w|>dp  @o3(:\馇|vס}Vl-:`ݺɴ~Ҹ^&-'תzG.m%. NR㝅~[zFE="w[z\R|)SyC2 ПYF|rY }[@(ine_iUi~5.Wf̋\9 W|UR ⯅E+-Gя^Q>_~Pӻ_ߟg|xá)E~=q^0m4muf?ێ4>+ςVx9V^t`cYrMaif j[=4q\7G򉆛)ȷ= '_c4/k 1cj@*d{Rm\xxsۛxl#ՁvWq IGθsﻱ<(r Pؗ eC~2(wuB/QmoevV>wTƏzWE)@*htD`^vRk:e${n;hoY"{s̨%ya#%X]C̣6?S¼5]?~cކ%+ֿ`u+1$usk$qͤ,`.%^h;_+QSE6}w5B}"?V iVS?gV oB}gIsƁVwo^UX[Tt< uKm1o_?v\3F73_`ύ|iu] JUg9ڐ nFb_"0ao-S2E=r H8JG0.U߀k} |&욍^|`(+i/NuzbssrrB:PQin{<dՖ@T[x6kMu~ozw~P[~Ҩ}v`.ZQACYn+P϶',[07 b f'ƮL46R&sn1ӯ,My/#t<;̅Vb "VŕUnˀPfO@kd0r!{.ǰZ1R&͊@>2n>x`:.>jT]1)eOCX֍4 iUSC՘۷lY"@O&ǀy&MLη꣍i@~dw z9i@&lk{8IB{7?܄J5HՇ^DN׫؟IҼ}=|=y( ?~ul[`q wVܕ[ǁbI\ssa>kQ6]vM `J%O5v21U eQ>,_MLﵸŗF݊.k}ہ|80Ak r1?9I$ z 3n,.D垾\^3/kWi4sgkVIisoV@d.oyUD`fv=bE犼!@p+rjԯxjԝƦ{V (hV&x@~:ެ[Cq}WB ? TUn]?(0^:*;$86{ K(bSsڟH tJF83|ϗ~O.`]qMfĎ>XkD;oJ/@3SȲoB(K=Lv ӂkԦ˂2XDs q@}ܲiRO Ŀsy2JȓG0fޫrGVޑQ?ҫZD%T}㵕NGp7ֆ˳{_7ZQSqv_Y&q>ލ5!~g0GjJ|ˈ E\Tկg/p{&uqUQ} €oE`'޽[ C$>ܮ](/a\kw =.~F)o7%aÃ;fΌ~j3I}0/מ{`'O~ j;D\SD 9;p_;=þ@t޼/4ӛ<}FJ8C30h#+|Q{_+PO۶ O9*TQ2Ћ+;nƢJlpD2^^4u> dY`T)]ܸCʿ25C?~~-VFW2}ێ&kœl{9=b nvW:]w_[%(c%gm3o[*n ":xe3 GtCTOl4-F}Cp0jiU!F'rCx 72ݞF_xXգv' q7Q$߯;=R"}5MFΒQ̝rn`*?Yd:?58}?/gġ={kb_U>;Z)w0] {̥{sh灪,+w S ]Ds/~7&˞:y0zk hb v, !K%@6ҸBu/PT)W$0NGoyB*nlRf5IV`JŧLbF%V)0 {V&lݸ2s܂n%/wk8?,Àʠ/ΑGi+0{O9p;&;ް|n y5!s'ړ/ 0v\*{ f!1I`:c͛M@|Q@> ӳ6ϻ߆nA=Q 07|.j`xds\;37zO{7?M֎ȏ߷P BhW>'c _+z㮯/D}c>r PV&Y䑏65C1PRo)0/tEr:rU $8woj~v2ؘ1_*,Mb΅ۻ^"=5}tt)H<̾J ][<ڼtF70- ||6]vzzuxW^\M 5MZ̋8Muc@p^y0q62ܼ0Oϭ_N 0u>Wb,-YkBωLu&SI0Q.ֵM]Ooe8jѻ4r=i+ ~~_6ۅ89}+eŹ-춀g.>$?r+ ycp={zbOі:PXl<KXQwrzޫwȬƚg1Mrl;ddҌ cw;ٌc<)M^sUKg@ż4h6un`\P|QmŘB_#L,5fnըQopZ T_|WEWG=:я_׹`5r'BQG-u,4ǀ?@= uW;4Vr_@U>NN{\!YΩfM?6ϼYUs7iƕ8w}l0E.z0sxtk_ʷ}c@'E7| NK֞Y{@7\j]0An`^U@)J04Q0ȹE O;%Vr(n ފ@z__;rrxM_ٟԝ׊&{uh_6 FRDR¾z5P>}EJwɰ .( lґ~D_H>#ەݏŰ/9onl.ޯ5fͶ\Ɵ\-kf3DXNh ,Tm?x,W ,̽csHbU_U l טľLA RZȣo=^mc9N{\w=}@lO]Gϧ(+$4r=Vi# W\-A~|'x3y={$oH~K>Kf^E!9umckK}{I%?}`V8}'1\*qa.Ҩm>U\<؋ZaeEC]<1uPtTV>Q…e=;-^ff{>hXw@դE(%SjQTMrр{p?v֐x%'~4>X|S_żH[GB?0#C;-5cxg r⃙, J{mj_@n[+Jo{uOOe|ݛϮot?mD5U&؇Һn㾾/4ޜs%y 1)MR) ʃ O&P2#!QP *Wwb؟N`θd]~v%`/^mpػr^=,=&%%q9%oIaϴt}+uysʿC.Y=s-:! >ڒȃqbj#Pk|_>}TC䮵BFeJ\qNEivU0~D*`θ y.Yi*7xndO|{Hey2gZ}]bA1LyT5u{|EV\b ؏$0G#HWcߋ}U}Q@zwG>Nv>xrs5w8qkM>R9X.!ُ߾Ḻ2<&,˼~׏WiFJ^MGnMsT H_b!@xsV@Ƚ;W.lg"F'G^&y_Ő!/sC@,=8 Z&O\-fk$B.CZk6+8ޞ[8H 5Ehsj7kױ9|?:L33:@9u\ }s}wj$珆8Oaή[tѰq3S>pBvO0/0mKS__v?i 6ҹRؿJ˔Cu}1,yˀ36"u\!$ ;uU\5Y뚙zQB::gmU L4, j>@G_} Xk8쒁Xa9+@5g_]v I諟ēJž|Aqϋ,8NGdw`>OMzJe 7͏hx‚0v%Gz`OWp)6M.7 5g-ϦCՏ?p)˖;iAk0;]&e LF: ~nɸ^by9j~`T*0װTbCtރ'f _쇼%'t}@j<,Z}V% K֪$^ zθ]vby .:1f.21aaƧ@(NFebp]=oiCߘ;g.y* ߃ w/)Aґiy||5q@}9:b}lk Ӫ؋SrtQ|4bI ea̍p7Nj(c rso#_Vܿ;g y@ Xh|D?8ai Du'SfIQ ax6 yк24^?Yҫ.%)C y| 1O^>Dx/O>sp_rùQq#2IQSF2:ޜ^: SʚuoWO0?w`O,jVk)gxӶM8߳A82U<Î^[{e .nuJs J<;@b] &OCsf`ݫ*\7 LujۀgŰ:Wv nD/9ݾ{:_o=N _Mҕ_#V~f~- H6kǽm9N=0ɞ(R7es#Bb(j0\m4{"á_?1Ϟ^X˸a, X{U12oF_?|B;A;5L6绫h-I8P<{JմM[ms@sS@9 _?nNgi$Ob^=uݤ|5}R {?g,љ~Q r'kYO {S@RHƈVHڋ: ibp<- ~jB~Ko|ߺ7c`Rx8!L}eurSNRW{iv]i 39~(r/@r?Ci/89_3سD:-?y|@V_H& 4šoop?p A>e^AebΎ.3X:s5ρh \Ol"Pf3@{CsyxK$Leߌ9yȣ|!?n(ľmppy*YYOs1;y7s_> H+ߓq~7(SD[%or2hzk)dV\4_ӧj,\͂]%'=zn(:s;Usğ+c B8ao==⧇Ν^yqr;{9>JՊ߇ Q!W8_KyGnd=[!3 j8sPcnM}+椷 CܚtSL)ˋ,[cc1ozW8`P]1ɨ5KP_K-V-<>)1$3P5ؗVš4`2fi}Hf^)+0uyNb.h^f'^dFN}1qKڐ=eOxå\6 frjuv>,0M/Fѧ9cDŽaIodoCVP e{G\WJi:qY "@D~m`v^ȓ_vΟ0s>rS&<4rW,~k3+ #'.Qtzg\x{g@ycƠl8nB`06E}Fe@ =d`HSkEA ΣWQ CkM8wRFyyZmrl ץl> N[@מ8ywP!eH{? u]QWZt8ꗳlMs:϶W?-;c_)O94y'skBǑO`owUsK{ i T'aޅ֛y6G)Lن\WԽw_P3/'9}S@vinލ\%_< צ~~(+ W)6+@p9^ >EfmzJ܀=AҾxNj4[<Wo}9n~֯F;6B:@Z'JoG.cU~ ƸCPM̫m=70︫8~9ʯw~fi 'ܚ@<+ /b"џ|7s)vcr '5={]dl:c:> >Co0PlDn̕ϫc(s߂qewa8ofN۾ex=W/0vZ#^IL|sv6 .N%.Ca @/ a*comx~5SW 吿z/sjIgף! Wz3YZqjx ;׼Z}tu 6!t"_J1b{ntG[/O@NҔL8ą9O"8JQ ;v;=YfѺ8mc5=5g)>B9jW{sDI9ٓ3A"g^j*~x]+Pɞ:x|)jCۏFa.*L5]O T~Q̹U1K6ZqXR@OpE]d#75++DʊC"q؏xrA Q(.hNBM t>j_Ƈ'vf=Зx se W$0 [=/ 7X߀b}]yU=;sY,2ﲵ{Wqiv|={`;+Ŷ#gH!܉-~9Дc5lCIƑR:<`9]jO?9aZ8c_{Пݗltw'ScWKO{g/dvZ@Yy|e0/[2V;ޅO1! F@},=:M\j8giwZ|ؐ$0zܫ d`̧;{pUrG3䗮;ꗙaN|dRr S-y|8W;G7Utԅm؏ZZ%.I;#ÂoeO`?ڀJlBfydYgu)!t1L37{] _Ӹ=V|z Chvp.ƎuԽ{paϮxVq| H1~N+Wq䬖`LDZ[P뭿_s h k&E-[qER|[%}iidVy g"u]=:5}@9&n @PydįU2Kczv;0>H}u맞(D}O-k))ѾҺ7J Vר+T˾Wk?~m6~Gbߜˑ0"veqNUr?-Ҿ}R"GQq*KC9D&R$h+U+mVjY{L3H+ )0*ƢƑ E_/XUwҭ 3ۀqĿGPGпJ~"83s[vKV%22/6~yj$b#8\:s3j Ec݁Oj̿o近.?lFJNRgZW\u,c7^s֣!`~Ѝ=/i/^1ti ߷K $qɵ_>S/0bZwy{®W2P*3/wدu$ 8̻̿~`ǵs-:/}أ]{~ JJ95x]`8 Ilm3a0NpˆkϻE붅;E=s(7OP@NLۏHrUsR<p>/)rrUm2/fdyZzud]/+95oUav?$0/fzu!Y?r sqxm}ϒvaO 5>g?OL '9a~[*$=fU/'MU9`TM,+މXT-ζ #P`l`zȩ[7; Lv]n32HE;P'*yJFV@ۤğkhyy')d .?qR zT$?d|Z5ukr%*J9S >eiG7oS:>szTLK<"hzDi{mm_ȇPE~ΞxO#uT"5pN\wAoAV*`0E}W;>iDZiFƝc#9n[@].LmNO(/-¹~r_]؉yq>'mNt nfPk2x]_~c1Zf#(ߝھz.6# $˹Fl~bB_3|ASS{eVz8r $C9A#+пo@ZYq[>/FkƔŀFcBZ ,3`4?9n}4}0 R;/ہX贈/㷡W?al)rBo l7?>[ohwi9rZLPßc]Cyrq~|ϲӪq1y*5.H!o(3ҌzeOı-pM}D{:&w|U%eF%/?T/moeZ:4NW2K[ܿ أToFΰ͉|l 0`Xkg*z`hد߹ a,Wۊy,n2ǻ Y /97s]'oDի,|+Қ,@L(r,R XJ*tnl`4v\ߎ=֚3;|^]zi3rz%/0͵~߮|(| (#Kx=ohd Fn@?T?"~= o>=Y yɘW\fagL}vR#UKF_;yl?a/k~ZxH.eo+v܁#gU@ zkN ] 7셡n{.AO>R %인|_Ҭ9s7#?zVfӫZu3|8i .ϻ3}}%5+~i);hݯ 5L|z` /'U>raI=Uf5@=y(Җ YmMaܕ'xg]9uV~֥{H@)}Ɗi$〒 о F;Y@.,hM+)f`|Oj(oXvXXVkة잁@}po tw~7^K9,iM~k@WQ{5e=@dI}{Uq p ao۬o t_ЧVY+UOM}oaZS"z0N;[ƙڏ5uZjᘄ1mr麂o.-Rg@LZ.+ -Xk4s؁:/5X?hk5[q W22ZLTMzܓ##豈ntw{SQ8krtyiJE}W&]jۇrn;;{z[nweq|W}ۺoN`xF|_~M[]6~Ѳ9n { Z>hsu Vc0֦:|cg.XE+Á&7$ ^/ԆZDQbS>|=zIR Wt@\:/(ᴽM_tdWj{$A]YTNP_*/;ACTu Anoa=wh6A1&KV3ҿh.E~0q5[lXkqٶ u/z*](8"hѻ繲uxPغ2X rxY/kME@){>ӞᇌIO,hUrR@K*XfJ=Ѝ.Ċl_Džz4 j%#0d_' !,`SwW]A=Ǭ; Ӷ%v/(O;)gx[o3Ku#- I3:A-*i@a}Jbb>K?/hiE!:E0#g:1;p h_9q8zBU(yZBjx=~ }!]"cx_8S%GV 'kHzo}@4ߗh[V4QU"A8н#Ɓ~/Ss6Qp{/a_a.}= '3$'^gy{2Д֩ybr7о:7.? QSYl'iZ0mR>Du}G ^'j*r qZޟsrKr9 up2S_ eG'Dx%~Zww@3OhbZQգ+#يƩ.k|,Rx9"—O#`.KsfS00kshϣV6tk݁vʽDXh q[˿X JOvG-,Z\O( 텵0R"x9ͣفQ*~h{r_\FSgz1^5ؒ`"C35(i]fν4f3 >MXb0?Փ| U7, vw=`~p>|cHĘ9ucه{H2(yB0 {u9b+߸>ƼM65}߲~k3`=~W 鰍a]%@|=ruv#/\ӟXS 1y >gBcN UE} \H __ײOS9&9}CF,1wH^QJqޞ|Mo=E)eA ّ,uG$p^CVȌE v$^ LdiG}MK!Lsv=aWW}jlx(T2bEj+'#ls:u#Viߦ0k`县B8U 0Ś5]hit̓PfExOX0AF/zk\J? nt4g?uN)8>[*f.q#Lo*m%@d;0a܏oÛ98(h b~8:f<٨?[.O5n9zRR1LƟ4+D#1ȻF)+8$d"/8vƭ ,qp?ۏ _Lf\Zy |}S\DM-MAS^׻|Z&!OYu{(#v]UQO,toM_,Ӹ!ߴ*_~{*6Sf8!_7E?ښ.~L_[F:[5:08 B?;wo 7r$Imo蟁?{xLϘ@~uXu h[{G&9y }|N~\KwVc[yUio)桻ݙ -qcSe%ôR/pN+|2/Əx[W:oeoF-p>wεC?Wz5 4時°8Ko !k?锿Wrox=?Zk K~0[}}[2` P*-Qg 7j4i`ѷ/dy}hNs[6-7Ї-mوz$"Zx0'#~ۿS#}^<]`Gz#y's>WTyl'5-zJr4Jnuc4%M@١Kuдݎ!G_< ]s4"\'Rswx,{.cyNov2?eQݗ-yKs 9NMW0m~@?r;ɦj~ܢb0m#=6;Wq~i#IaӫpǸ@`zW*M5l߽:S_x"a:[ēJ[ t'ȿu3`3{>b*c ͆dv`m~9Oqtk:yl_t;m^[X/iq;v|40o?y6uDi.#-S˸Ky|^a?(t+q;β!ΪJF{|:h5mn")r}u8T籇ykgo]zҏiK>_9^چ.5Z1cOW߽y.<>:4o_a?$:}k{}[[Ŋ'ݷK֢*yrs`:[S=ډ姀ڽ`CuWW`M7.n?ҨYӤ7Q+FKa^st xuQIg3~<O=]շ*3UU#^|x]osmk_f̜4!X\0䊦&PC,ˀ:_WOlBz| ԉMoijC_8׀{GqSOB7t 9MpC6V<Ǹ|舄ݎLotao* >rιA)<;D~.9DO߄|a̽XO,9bI5H+umxf)e|v ~ U efB#ss8,iaKY|Neˀߗk4%3@,lZl`j ~YBh Mʉ-6<[nϬD}> W6AHU }\d szԦ-Y,>z|nY 1ճHrJcFTOu|Bτp3B*nB=*&o௾43 (ŸWxfɹ=0ɤEW E:H?ngG9#V}c҆3 Muތ7ɶ=F1Ih7($<\.Cz\OohGV|g O:}^k<[rݿmOd8e! /j8T.s;L&r0jGzD8ރ~m8ؐ3>0t#/c }zJtûv8ЊFpvl߹O`1_<-L>#h{pbK Q<]{"<TT~P IR Θd~:Kߡ&>ϷHZ ,KxV̞s,oṣ~< ȃv lp=ɠ ?Ny?->kóF> k,UmUe!;k :&a/GѯSW Mk79R?iiPg $ >CA!Rk2Ux]|3vqYߕ1[W-~ |G"0*|,uߧƃKүli܀njdvリI]0rLX}_q<}`Ct#䙞^\ =!>#Ao=DŽr¼|  (&p6&zVmцI/~^ uc/Q[lnM_(Ǩ*1\KWa&U 7䯶&907: /TbD?%߂EmuP ~ν;3*T/_avb@Rx %|.xI<]C0a _C Ŧ7A_2 fڢ ^Xᙒ7b|.;i |QR {hawNͲUoǻwU<]q~|Y{f|s̥g ae]N; 4*kr'==SnҒgjw/wy֏rV7.yĂg,;_}Ѻf,s3Y))(n}xp]D./&mTu8?8Im<4+g#tFv y$ηKLlP#Yէ}?,;V㰏_'NIyaYI Mxz&m*(Ђ,)t{$ ||;Y6z+i~[h\l>oq :3:XQT|^J;Ee`ڴſQGߛ0=\1$t=-NbC]5x}@UL}&閚؊ZTm*~5 ^N zӣ߫8㥃OK7t1b'mXY&^yp,nn;%q^4?cZ6?xYM)ީ/*ahSLm0mAߧrC{slob[ixi|gKx3Iמ]xwj.yq_Hƴ9q1m@^ړb>s /rd `:b0Ŷ'WJ`ڮ/?ʽ1}0YQ h#O?hAxƶQ Vc/gK ˭p]mM[aRCe+8: ?zj|tK;~p4cl;Cv}/^;VyDm4=C;Ws[ω7Ë<%KcvXsIv"v+c$9pXj'^wK&vS%װv=a;J40p=aϼOǫN\ :ks|!lhݼ/-Od`zиx5k ‹v;1]ңdqŘIo4~k⻘Oi"4ݠΕ헃ZVmi&|BEWˮOT w{?mp9KT!˞ Bq$8r^v/n^;|hLQ[9iW>yo|1Y lVr]SUyj%[`xލwq>%Z_x]U ^'#r}$U`Q/LV5QҎOQV觲 }=d6ûd7կj0O+Šŕׄ']Kxy:0F>8y/DžvnV*Gd5O~/8򀿾fǴo*biB1^N`|<5 ks?L[! C71J=ss6`EpyT^ƣ?SV½,Z%;!'dPKҾ&wϜpѓsd)@7qЎWߙ*ܡ`swp^(W }Jhn/9lϙvYHLܦ+x9_0kⅱ3][9v=~+./Vo4d 8:c2Tr즚 .nm|~0[ #2!)o:4}| t^Ǟap)y.})K ANR~}< H|V65ryLô6VIIvWl%~^SoAWu~}qӗkicË~kO#1m6x]V)E)I۶Gm*sd"][=U 9~ef*V F*n]7 {w ix4.k;Q7^H'J/1] 6Z|t-^zP-LB!%(T3ܷ/>H\^n-sahrtPFTV d㹓'P# 1LSR ^轸x /l!x)c'^&:;{x8<*I/qL'¾~$I:5lޣubG0ݚd klDu9-vз 5L#I0-tnK>9ë_m<~@|DNzȖ cBr)0/KNyJ_PXz:p/5`4%VozioͷUL۪y x9_~\.膗;ۮX?oFgV /F06i/R\|6GN1yM=w;pWŠ/T A.s6Sm & ^`2H܁?K:R/RܑLsf ~H| 93j Nǟ_<[jzzG "{5Nxu襏0‰ÞYvf*j!xwJG_|fsCO-^b^p L&N}? yӒsCtd]XO9 /aNu f! Kd ~bz|oGR5 EzؓxQ2h48>p*s3Գyq}wN6SN"൙-^~ގ)N^_:+n %>Ðcf6xo ᕸ΢5ЍSJTad~V@Sl/ \2 kf!W%m^܂iX9gێWXMY/kK5P7I[߮#lçx- 4 vVcg} 5L?G:_/k68t#cyט1yS=&oNTVSYbh/0.˧gA?DT?(:?ͱ/=)>˃|A2&gut&f*Ô3ar%W-K[/E _bg@L}{SG0dKp \Gch Sl{[RNc2].ø>L!bRK &]칲9nd]_J966ro{ ))VL6!xx?!= "&upO.Ŕo&e'i݃ѩ0n'0E,m._pn(׺:%xƎ=irY"^Js_aSK1.L>[@SRbߋF75[0aO_L4ha TW+EJJܻ0M?r{Srnq{Ĥþ Α1?6Lj{h';$6t6jwJ|Pnu"7K)$SS1Uv k&'=sz~ p=):k% T&:, SMUԶ/n [47Ym]py#&o pP}b6I7`S:t{.LN:^x_3,St7p)Rx0[CZ$X܂IG\0)S8+l}_+M0і&g}&U:a역㮅rBx92~S3hԧo0+n/& #Lbkfw.nIєWaZ> ?|%mƫM1GN>"\<&2):/n(HxSm=I)ǜ̫p0gd-/ujT9nkc0WE`2|ܼ22ɣ&^yſ-i3 )=}`]L3/{{Ek.=?a+zLaev9z }%t+0uØ_e9(?~]i|07ε8uD dߑc#yONozЯAB5>24צZyZx,g}w2yMOd FTɓ9 kQ`xPJ_>;ST,o_yՁ;.jo/ ẂLlEw5Ӈ~"0ekkp8Ɲ# :|u)S^Ƙ$ˈr!T&uL-ux>>ިs]0nh0 }?d)FL^۬^Ս0?Y/i.xr4/ŷfg95s/F;Sϔ VMc?/m>gB&uwbb+0K!aBDZfZyj0T w`l?cJpkLi{W/^hL+N-7~3cU;}wW Cײl0~L):iqC="m P!ʶ y6t<]u~Wͦ׾C^UL<;jryYn hA[ńB2WwŽV^ 5 -{F 3Ր '$Z~mNb;>H|B&;[Nj1ܚ5 <]'?1i.~:3}w7cҠ^̷1Un?TH|Tzw&ř ~8*_cw࿲<忭]U1pF f>: yMϡ O>zIƔoHCnyyR$F!hgG}7 b g"ǤT|?nۓ㇘0鴟.L-ԑ^ivy}6k0读Ӵ@U2֙9X@/l8lYj P/N{r'tPK &vߔrcAg}Xb`iXeX;kc1N *}|>Us ~=\ab= aQ>]pޭ2Js< S1w [4Y4iЧR m0:9J=D,k<^+ Jo rx)Bj' /Z؂)Vލ Z,_>|Pjw Ao/.w޸ p#G?O]w\YEaIA&V뿟. _qrG O7Iu,SN+@^u4?~"w#ԱR]Yء9%Řzubx-=[ []/ɶ͗3iP۳-|Ԣf* y[,R,9ˠ`ʭ-xSmSMV >-$.}\0{xŒvTI똢z=-JZd#oF{1eS<14Iqw-.¤"V?3S3tLҒv9`[ɪ6;Ln-F$0yk؉dC#*}m%Ljmkl-p-[,RU6GaRؕܚx.|'L@#L^e|&C;izx좇*&X_s=*ָf_dԹ穫{zx|3=q&]|(h}{0Y2[y1~9L^,b>iSz7_ lssc%Sl[4~(*Qې_XF_G2\0p?iI Swaҕ_~G};mHk'_xa*L%&uyjpTL o0YY'q'>eאx*93PV:ӐT x$N/iWsB׏Ǥo׶>;/xxmt RB*>izgn>ɭKaLxM[ &uT)._+8q6e{zpC+Ϸ|~6}?[^f̜߳JtYLk Px<>$K+%"-0WA޽s yRJRG=Dr5UcGoe/P?ү!_gbBн\crGSx.;o܇{6stRL&hbr_^|`sS6OTojc2[G),k2R^׮9x5m[-=ő W09t%5jan_م1Utq]؏y˘"̭a3=m܏ԯsat LA^:%·phRf'vfEԭѧ:> zI}MW?U7:A>?oqS')~QZ\@V ltR &55\zf:vTl/ׂm_5-IAv T[ \Y$F3rRAG111iL`CyJ\[iĵAv(be]jP#}lgLzqoR@Nj\cX sfjo~WB;d]^A?f($]91G"Josʕ\)䊼SO~_W1\X,bܳjώOCj7G0y0L-hx}O\>ܢɒLV>`^6S=B %wG~z qٷѱW@<}ɠ6焭;6P$;8iqrt>9["̲!Gʹ 8rxC*|W v@4KuٟbjONe'o܂eS^z?KA*`6V3\)E7U-Q=#޼(w< f(3gكI 4Cj.G~?VpqCQ `qGu\L7.TvQ5;$2goTPA.Нn sbM`KS}:`&ȶp \-jQ 0ÐU*Ӧ ?VS[w= |m`)eYS o蜁y-|p)^J"_lk֣{ 70lNއ);crǏWS4_Zwk I =(k+wl|FZ!w_׃ܖA S®Iٿ[>' ϱSŤ3 +1_: CِV.𙬳* 6\IgﰓX<|bG>g: އIΓOcٛw&lI*փY.؏aJQŕ-|rM?&dw9mQLzl $0ٌsucC- 09Nߟaw|7|A?_P|A?_ |A^7ϵ~}qɕ,vb8+WmϏdz'L6*pm{\Y!:G\zY=(̂sVogZK(b ׉FYK(t>.(ð΃TopN3MjpG~{d݌Ѩsﶺ=zRpެgSV+KȽz\_0;U2 CNҡL?ǎp{v%`=#6/yP?4/[gofzڝj0EZoM?P&>oǦ6%^Ԙz=( 0}Gͷ:/tA= m{2Y8poߧL`PvziԾ,KI <Mu.er8.!X}Is,{"^!HR,ãǝ"7|^߬_Pjn=u eI -u ϏU >DѬ7s+}##{XOe ϺT2iIEMT}mI?8 |ʴ.!ϨYM=&_$}>N<τJ5uO`8od0a&ƹpʿQ;veu O/tA*F`ݨ¾(8_^-~zJPJ^=]|p=nܜ҃z'Ro%?@Uw%< NӇs:O}ERzmW㇡/!S-C]S9 }_p񬈕G5}t,u1,7QPsMAXȭȚ' \=Ux\x!n-},xzَ*oeak*}(! d6swދso(?Fj.3'ٿs+Z'._Z=f_;XZb C;3"p݊_Rd{qQ/~CvxiWr=w6R`4$^<>K}'%P O.p2i`PlV H -yL`N܄8E9EY18ºQ;6~a]0v72mGVXǻwnKY*A о9[HtfY.$G/=aJ,EgcyI6PTpx? U62'f %4GiiNX),Ei9?Uæ }7[ 1fS-.ÎԋT[qT~pK p "q߱ pw &oz9nIvuhrQs8}]~7)l_]K! Fa\$z89;h$0۫WuV|\e擖pN&+\|[gDhU.?zFQryU^φD-W}FH(C?na˾'rOX:/3\5YRb8h /S 'j¼Dc P<?'"NczPuV೴3\AaI|9ἱ7Ye[ y׿g$[2Z 쯔I9sHp?I<=\7pDdxҾt?R{ ׄ}anZZz:i^EsyH5w̗pδS'NFmK@w|`}s4m `C1Yoe'nλy_Y(5HW) z] yT]7ї+dݯWFKϣ?J cx|3[{9?o>& gú>!N Η ~Y%P R/y7B}T$y?>2 ;Y]=\ 3#}Nq&o|!>2.ГHu>/Klvt& p\>kQۘs\f gIIH΁]~Or%2l :).G{%X?[<ݖQx ݈Htt87=x7*Gyn,6S}Wy)V7tBcNi 8zꨝuUܧRhi 9::KUK1w헠ߑACiiŻ2!ǟܵyz:]x 5j%_Eqy>X?y"+`rXx,b\ BJe|YSGb6H ]΀ t#+JY?}}Q'[a.cE坡?n<}NxF'%_uŗGpwbjW8|z-[{0o U0Cݣmn]{Mr]OMtSS)sd 7Y"5L.<9q\1Exe!c3}Z' spi^??G*^/HIdtbyͩLRI< Yn+N੸?!g 9J|w<m*e$WIG= q,ugٗ,~|p"[H-F7w[Ǭƀ;C2n7 /9ZƯuut,~(Hd;굲0Sem;$=օ%Ů+I:jOY 8gef u+z8LCbB"[8} {˩zd&E#16qKy0Щ8?]/v PrQ_VM.9J*?WcjPϨ~ouKW6kQi d}ծ#,RbcZ3#*'g׹pVoc^ S (qx]>su#Y Y.j猞 2WcYCCy>ӫn0g[ /&/)._k)߯ԇl\Yyj 9$saEaPKG wJIUI nJZM%)~Ҏw< 3I7r=uosDHSCpҺN0>吠*;Gs2rm[ߏ%0}f''`^QvF8o=K8mQb]!htbvO4| pti)%t,R)n0 |GW}^mYp o. O':D>+<}z s\m>26{=;Jg=`=;X`#Q;|:nb!f΃= uYr0gbP#ȅB]B] zm+̊x1ܮP^fAu )5_\RܫurJեӠkU?=,R豞>N}!O|R*'ҹ0)b%y7䎈p.7I />fP5.A$ h fGc֬nӍqm&_:a&IffuswQ Tp0prig|T6$r3T.8ӷ! PPB7>ѹ^) taf/z~;Z\Pnń9u[SGW"@?CE./gYck:2WZG\?;jrj3OBN E?Yˌ)aggfTE>eM9R \.:¾S]\ad8wˬm%0[Rx9ּFbo1dU9ef&Ξ} 5<sLυz&;Ѕkw]:DwuYB" e£׿k|jvf_< )Q;̱hZ9N)׏6|&&| )uP\z=z(|i>* ';f9Y 6[~o3St~4{<݅#fDŽg+I;J-0W%Duފy&pt+moqu8j}NA3WO:@_Ҹr48ꤡ㐋_( :Xu=Q^-OۘH! ЫPgGGֿǬеyOx1KBz= tc@fSDXׇjO2{K ?:F2j]"%a*qBZ__w0M*TN}{bꕙz~vM0=>6feԤ~F9Pu qr}O>#~ O!EW*cx_{]gǕn%ET4GP׈gpzwXIڐ2Tiozv؏ }P )/MG??OˤGqs7N5_?nxP͗v܆:x{y'teO6HȋU#a+6N%ԩұ vBe`_)ϸ';Ql|x K:}X3G6яo"|m@oS/B]ȴ܁5X{4>rq)YmQ!GK ȇo|VPDӋ`Fa:!S>v(AhuHn8B|HXSUv8G_AQVc̺.E-wG=y czc䝊Zj1VWr W?USD8;wtDc0<i C%8{?l7!4NrnBQѢR֪j4I#Vy5Rj!'5mk۞ymD݃\=G=bǘHQkW" #ӱZzq5D\?_Ôqo"ͯR__Cݫ?cH (R k=ۊ+U(u u8科V{X~RT%R/p;H EO˕2Hdb1NX@ļ1W uVq5Z"g|eMHH,8/Gn}4T.zC!B^W?Դ<*A㪈p0R=zyQ$J$>ɴH)}к*?ꗀoHm2݌O,X%HHsFV$D並t ]FX4|49Y>zzJCz"`v;"޼y${j.Dh9,pۛQtw"E6a6#=魷CkDPo~@BNly)liHR@iǶaw#Uye8=X͈TnaT)DcޖADbI/`_C_M0Fge8QgCmiV5 ^~>Ş0d|MO)Hsdi]a(|UJҭ B6WmO\,B4IlD*N\IɄψP9_|_ xi׭luADյh ȹ/zQRgM[`nFzHM]hRth4@[y9]m硟o!="KI Mw;m!DH>5,B}6I=][Υ&"G)^NHJNQsqF<fe!b4ۈ2|Ͳ9N!vO+H@jqؑniHS ejAZ> %n(y5l14_Iل4yyyXn{d!SՐjEHu1D}(~ ͑?Hwiz9v h2~AV#:D@"a6DHȋRDDX!hoC-1*GZ3ړֻ#ke4Di7pD"Vp&[!]u[kGuT"՞Ko#(Mc`}/IiY_.]' B<"l5;N/I 'sד#zHEDS?#XT|a@j_4oGY]D`ev!⍉jH=(-ZӓJgu$E{+ƐFgӬ "S^Fsw.Z\ Zmd*cDsʅXF倫k| 1"ʘ/?sU~:=dvցE֞z!U7oe"ucn\$M%R9s%i3y1!¾-"-?a݃}!Yǚfx јƧC^"ZH&6]ƨ,/I'Bg3!ւfHaXA{>:sbZ my >sV]vQBA#S>H%w"< |H8Rgz**&^Fę:U"MO5nü&,ވ H wyD)-^DwIϐޝ G߿4ڕ~=_aUM e#@קFwCĿg>;S*~?Et'$(jX~r[%iDͤg ՐGw[o)ܢnH[/.ҜZ񂺽lڌfA{YGFO";~T& E!?wy;"hܿ_-DL=gzxhPv%Fo̹Py~͗c>5y#C̲<ӤJ;ƖL:Z?a"/'O_k-Ojz DŽ}g%;dwo1h*RirArB"v.vDS|,̥!Of"קw!B-TnoB[}wf?#PʦП 6HMT\8r}Rr5_2s#|~!Wje^ˡ #W1=U|~|6P[ KYU捓v9n!u3HCжjlRyzY1=WFKU][Ԃ:R芝97V;W xO+F7BZu[]keHf.""m$A7>B]7~ G [ܯB_R'<[CjudcJ$Tt?xm[dءZrjˆ#":E}Zk{c痱8i5[HiC>. wM/Kϧ^>3v>1<_zw[#MڦH]),U\tL it7X!fkxaM%s\veHmpY{7E?)Eza/jAZ(<6T!=Fk[3VtW̷\4kIM"5[ y8:ts%Ww["||p26r@_kBV$?~>2󯶧 'A4whϩxFh8 )H3:ܷ,f iԈ-=߉,~y E{U& G"M~4_r߬wX D.!MB0n,"ƟfIzUX P}yCehHX4[ @J jwq7HuS[E|,n!画 v_ mܩqHiR孼@Oz@"l|ܞ7Ddy9N@DB޷ I aJODS4ӻ @XӼU!{e]yw/F=?cT E!sHI@]5"03t@uJR#MUGv" ]'vzݘDZ[)yK` ލ4D#sOy#xI1f!L8q"j@ ~%ARF0ȼ:4LJt "zAGj Wj? eDyMZa@_t㡯ޙ"۷Hc|b΁ϱ$::@ M3u4Rx|mX~]LWtWF=@êd,nd"|d1 l2|hb@OA3 /6x!Mֽ"1iG .*r#Id{"֢k׹vHGY!_X>4Na< xPE@  b fD̻Q2*/ RAw!7Q8 EZ]HBg̀4w^}6 cq4|.ギM bj5zi*z BwF΁7IZA>ϯT`G,\yӛfE;gSCo,T韾^HF}_1RB|A'3[)þNM2ށ4\NI@Փ zSΓ yJ-̿#fF)A(?Y+LGD]G;גƚX|Hib.]GHmO'z#oUPy蚉cIa|>qaq~SL4bLO7_k">Gjn/}#rQmQ`QU]TiCZWhFs;zAW/옩Dj=ހ~?X~m"CFaJaGչSKw 1S c܃xn:4nv5w4hocZFG1HUv]}H́36|P("|?c#=ݨd@"A٪K`_=lO{pf%;6@i,w6iG ye1p;?3rix]ϲC%&!¬l"o#;N!5d&[D(!64I6^B vIkaHMytr4I nIi* [Ło"مW/w|sb[F0yKCHt#kGRz_{̼7kJ/:/1;˯e^MLAyo"ȉO }I ˋ)[5!^AȬ}H.@qlkRᵫVZ xq#Q?˧prDur|.D.߈4ˏ<[9!vUYtu@M?;Ҹ̷>Y4z [֌ ߛ.'" vbq݀Mu*RӝJ"ĨRyg 0z>Ö~q&gvoW郙[}H!!"'Rs^ ZʄZܵ,@Ou|~F2NVeCBsnÏH%2r_@_";_& Qߟ3i~afݼlH7H+=|R -#|n?F3CHCGۜ @/ʎ2#.I0"q*SH3#DxCA OUM}O~ 6F=һg{;7H $Jfp'׷0_M-S:{2"5HUѹELjX=.V^$~y~ο%xgKR}.muu'蠗Β x짧>lM{|\?$itƟBnE쥷XliAV?@(oo]|pi\{& b9ӧ;_vVZ1Ғ}t0ThfEV@Άf1a6<0-TCv\|p)B!H=>m:D f#bDw8i&Y~C5;|/qv*rʍ[0^}oo5'~61Pz&;ǔv;R1OLXT6zX۲CS>57x{Ĵ1mgtԭܵ"-UMݰI3ֵl?`NtmL5v|,6IaSxlGLu>Jم+=]o" ܻ%Ws~sOe1Ou=V0%]t&;-1N1`YLOxWrn߫I}t'1%(YaN-,[ ~|S)쏹OOxQbJ(9bW<ﰦ>SUy)ٝ5d%w,$d&D\z)X2{;05:4&jMRO}u܇1~uA,K} LfdN5suC?Sg1Uk-ʘ* 5Ą) ƘBeV'DĤ<)03#աl]2Ty6B1 >ywZpW0L,v*S"Y) 1u-1szk5hUS֑DWQ7T_O\PHLqL=ɏyD󴞓1B^L]&\㜅9tp+r 7ąW~VRBwL^4 q90VHz* io\}&s I[l3Ș265dS_sf0EqLɿ# Jimay4'FQx+}o+? [R>>xvvvuL4 ~GƳ}:0C\b{kYC6[[0%b~fW&o`R/&EK?~κk/m6x.U,PF 2cҒ ;g$ xkv:1]5oH~Kuܫ91amܨ5$ k|G0yOLc vZ;7Οs[[DWRL!n'?UqX wkܯs\f&|R0ٻN.bHW&&HPΥW8#2ãoFG"n{aJl;'qPSJ6HĽ]Sp>nT-V0B)b M+G)S:k_d tKx,T(ɓFLO;)ND,…,ZF; IJ/!3xK)|SDJz-1Ϯ5ҿVFALI-qo؈)cތ)ܔṰN'r]/ML717yr"pw+BHLQ4{{LڵÑ* daWk<8Z &SNbIr|uFL]nirh}lR:0Ff8&Kz1^f0'wKHKaK?>Jmd Npqⓩ\ ;`ݫp ޟx^z$Vz>' /v9ʐU*Uڙ廘:F:Xǁ3ߘ¤G s2+,w LȺS_~P}}cj⍻|x.msz`#-Gz~6 JR0v{Jn:Kxz+-p2Or w֪?NxG`@ W} {tոLJs:møDPHF* !+H,dJT$ E2BmYD,pi޿/dzVa@(;X7ڛŊaםŸP}d>i3Q@Bƽ[[;(6̧Od8]ʻp7ny }EV9 *$)*W"fʚfv$crQX"]d`U߸f#6r{7@'N}C?;P0҃y[G\Nyى$/h[YOwznE=j QIA1tJI}wO|4u0g &ndYh¼JH|@!ɂm=0o^Fߠ=x:0DOq_ܼS~'PjK5}mg90vs.P%)=~hcB=osʽSK;;kud% sœ @=1MaQgL\fp*N[|Fu(*^Apl1]Yg_6h,Z/m䎤p1kbDn{m@tE P:CzyZ)P+k0C2l%<F)O#ls;-;U#PoF.Dsk7wluaIɕ[b[yv@[8uY#T!9d(-~ R4@~GH .VрaM|%sS %п;Vh|ή%5`"Wкlq'ѹ-t`})mԁ&εlSy@gO\?T .9I7xa>< 5rܧv<+0wkvǒ>/큲 Vx!Д3 2Jʹ#@#2rhŽ'Y0AT~`f3_쫧v k]9Iz?Sw&_dUefGZ'][X[SCKյNU[80y^IJӹrm&'I7_;%R6@Z'Y8WCuBtgPoN a+MX?ԃ%%0v&7gI Ԑ#Kob6q ^ysY= @<ڂ`$0O9Crq *cG-ww;vQqɌ`>stOЧiKWQSPTJ6i`1k={ߝ 9OݫLe`;9a&;}XQ?Յ.@%9޸=rJb.;E7  c^lB!F]κ(ڀ*hd˘5d:s'{_vϢ0Zg1t!E܉g#|>`ދ|i(yĶO N>nKq_C5ۜ`Z}j^]0oݥƯ*S +CH9/O?nu P2>mQ(ެ3@Zyy.# OVɬ8tpxo~'70KNl9w x{x'Y$3w(##xjMlʙ//Uh|w9ߎRK/",^}|_azay*yR?T:g3#wCEhnl{M7W#*z5!r&- ̟gSFM,A8!ubL,P/WKms޼c@4 -XôRziP|;؅#z 6NHN_W*#G8,èy˵rDr}mo0摭W%Fwed|' `Š.[T`r~7)cr:ۋ,!bXK}x~!(A2nkkWԑKM+za6KX,{JkNN<ǘ)߻"qȷbCY׈En{扥|M%΁J͟i-dvӁ.BV<1a*N{>9ExsaГm,Kbusş8T8WA8{ ȳ^)̋ʶ_6'~KQZ)SAO5i3)!;. ²rҺ1>9D?yٹvs[wdmYirMoY}@]VsܻTvCϑWM{sUCaCu0!h6Y >cf:c~o+՜4ùjÜ%3WWJ& ܽlfm V9Ų,>k&7/5lCa"Ŕ0oQC4݂%aT]s2tK/]R韁7z7&SW1.~}Puy_Ȩyj}ٰnI^}4˛N=9 ]jfȭuw#g ЙxϢݳV93y`LtQƾ+~'<3W`~h̯/& s<@`I:TYM7C~Q,>0/Ğ{(9/Ϋ9sZ/{߾V;N)hIK(p~:06bNkF{]+ W##xH3b4IYEzYdX[7xJ1lsrJƽp4(h}` M]Ps cFW^?]Ww[0 lyt^ -ia;4_9syѻ_u=c}P {,!FsyDTNZ낾x^NÆ>޺j93_H2rCyR]ϑW뤂Kj?<Skl1V;[5KWDA<<8Us/P;jpUPҬ3Ǒ-Q.D7Â@W̽,J)ڔ9}r)P85998kYd f&8ܽ/2.ԩ&x73̅JLiڗ<&Gv㡈L%rEۢBgh7gdQ'MI3}7*oG`'K*;O;Vu2ֵ%tR=P~q?+Wcz[X/cAkƁfi+@s/ X"9ڏP?V.-%1YHGN(z)#;0fzy3V~}l>gu'&r(=woלlv?VSva&6*+Bp?6'۫fŬ~ekv䏦vV"&Bʵ+[O>7߷<;`޼JS>0_X3B,nN ;9o9ˁ"{{u޵n!*CD6̟6p|9/ܓ[Oe(ވW eudWٍ]+P5+1Kzo Pַ*3@7`soA9#~3טc$ۨst]k_ft\fpyO`泝^`?I(7I_˦&Gmw&&p><3:M?&j:a^ؑ?f&Go@gNԵߟ&o|ۀ_z) (k}-6aNm9GXWh_%,@ 5${@1w5@|eQ/йT`S0r 6sDL}кb޶]W7*̽z ?RZg}*[MS:s _ _gy[p"+@Zcfq*4Nٶu05 D;%@\e ĤC?R,u a9Ԑ-$_ Eٓ~V}4o%F4/Q _qDNE7%)6&"s\?b놞TwE :_Bya&wG@H'ʨ_Rϣ5x6[c=o;Τa@#F[H/ގl'|ji' .|ǵ?VՑ:@n7<-D1V@2|#WTI|]ԧ)@4"/߁.q7Ց7l 'uIyܧgaEf 'UaMG'm9a@\RRH6x]8 $kda|ig I_'}?߱/;d71r>/'Ou`@ϑH9-'I' S0ᵯϛn< 33 Pv풭+#ꛟ@ b@xͮ bpy:o=sbwD4޴9z H͛N6L%@9$դ nےa_guLhkf̐G' m qRyDތ @xcFEx9w\P,S㞟L)BUʳi@dmQ e}z yޯvZ|ZJcXG b,^͗@q!HOo {ߨϯ QڎX$ tG hUApQ $Sc x_I<%@2*;p+ 9ZBz_!<K)nxh cp.;5' q? m8CG1@J7?I@l)) *g#ޮcن;4(_}|3xBY ڧXon. xC?\_[:?s[ySWx>q3߀'"z uRcP^m%~=TMNgN q9RWa 죡z-e [:88*Z ;>AM_P1qT|S ʾk<5` pH0DKIj9":atX*bWEq1 _bNpn۹A1jYĝ'Ȝ@пdը_q-엓X{a>XjZ')Wpjos9 B=SݩVU2H[F[X̵.N\<g D+6U@ ]s*r?<߿mF{ wjTps:V޻uW}xWn5Rrp.z?7sd[̩ m= ?v2 8{Zy?'?un蠞cl['ܪL6)!KཀscT>b ٵU[̹6xI6r >}% ̔|OrUHic['S8&*^z-3Ƿ{Q my> ʅ7v~Q D]H>;w {˻$e8B~@~2ŧ# Q7H.ΏPw3 upNāz.10nw*WQ !)rQ/m(ÚׇIԢJqIךa?+C~#@"/UEnia],v ]4g{D^x $ |JÉi@8g(7 $U4΅z;Cur}h=z|H`v,נ(fü1@lf#7)lI' |c.ޮ\AN>? ȏ85Җ;'Bm8_3y8 FdEGD1YG*?F:2%zr]/# n Ľnf;̒|@ܪK\ &dg+C3x)\Ԫ8'qq!= w<($El'4P/[l@Qr.njs~w {v_5UdЪ\qkp+Qs 5GĜ3qG4lwz @ý@ W{6V?qMA_^iob]C|^Wݽ~&@Lw k|Bq^ or[꡾5ȿ6'|=5N̫nE ~̍DҨ-ԭo$qn:_wkkZK:>UI۱珟7b^fb>}R9lYR }曼v\2a 1ORf笣'ZzoG>XCW0~~Sr _Gw0 yߪkkYm'0{?x(3k'GYN4<7A EMc{e#@Wf tyP:aW=ق hk<aqΚJ!1ح95'.A^vm"ǩ',\Sm֧N%lhf! !Ofh4Mq"bo:_Étԍ˚zJ}oytXW>9ܚs6PPwS/!IBW7]?0=T5v)ï+\QT=m@|π8N8ys5=9{C;W=v3_}Mj!oR]s@& G~IżFv.QҺ@կuc .)'c.k?*$Ӿ_PIe9[ymSő{>uO rlMuAOx(%ւ:h_;ߊ_~d|@!5NɛS/O =Q-ٺ| ,}uH66',` I[Na&S8ra,%S 橏PS>A+3=}6_q֍s@RMg3z: "f;dxyp)n uj?ySs ǡMZa^يJ)m+ [(Q'@֍h4f?6i{qXz9/lz2 UlW< ,s7AF! kߛRbTq.>=]֦]}U3$ +oxoTgaݿooB܍rUWn }f7 = dߖw_Gݒ19䴚}_6 C$i\pLIne:<ɦ-/ q|9U= 1iN[,R+. sƒaR:]̀F?A_厾lr/UqaslJ Ըns* Z$n"֯=Msc#|B^]n3Ҁq+WDUQn'<}q_,ݕTvE(6"⭬-M~@87z[:G/AEr@ܿW 1_+ +v`SZ*~ɉR]:bҘ+i>n>euMuLaMȳ:\tdH;}@6Q=]wN LIa .w6W]ˉȗ7&]>@ߥpG? oR!2~ۆT;jFԑׁ&ՀPp93`E򒺍\LѮ:䒽3KW5h[̢|ۑsg>rݒ͵0P$b>ykAKuS|s} ti{~@wC㇤ |nK\f{D\= yS=6uBK_i}}ֲUy?aJD+d|ƍsgo䐃\:S8jFޜ})%zG3 s Iy~,,,9-uhI LͩQhkf n= 3cn#}@| -m#zqM[w(TU DN3pƜn]~u. 50ŭCuty"rҁ`6mm`2΍]G&N7/0qKxnW8u#&~G6 DCc11ͥ@V~*8MxZeI tkk/L3^".Ϩ6';DK$Idzo)u1qgd>O@H2\Wmr@%]3ߋ|yglDx4lt;f>My#Dp>[{^gwicGK?9b+xz v?A?_a%yuwoyn 0z|\6cϽc/̯rO7\_9I6%rsx9Gp?ŊhU+? 9Yjg66+ŵȝ^}._ =Fz_$8!E=aT+ٙ@tx {DzKC`v/=Wv50ɷ.={z`U2_ }@MJ6f_" or#-`to+hdʀ~U`Lҿif݇oMY| m ہՙ@)C(<*9s0yX: ݀X7{]Tz\-ܩ>Hjn_k?uh<}QFKk $Gu4sϻ]@H1u:dz$6D :{}q;hSWZ-&Ѓ/>Om``xlhWBB0ڎ5,|6z 3%Ȗ0޸[ϧfއ3ځyx;jC㖀7`8|= yϽf.#<%{ 0.l 1[z۵XpA I&enƏ%Нrn.zD _k#t6О[ !-0k4c88^:$W4=:;]ׁHh /J;w뻗o:KV s{@MTtyGw6X/C`osW90or^p&,P/ _X>ܖW =~^JŬ3~Ry|7lar*F dPO/6O9౅lh}"hS@_Xk!nУly~Y3h"W xϼ7r8J>6͏Z6ӋPe欋A10َ}z0xxߢsut]rӞԍcbyz[,xYd+Mg<}ܸv=VFc.2{+Exj5%eId]ѲA{>U/ 1gݰc_ە_gPdc٥j* &Mg³X:@9EEӼBp/Ae%o`:Uqc^j%`?ő4JZ^6`jI@5O}]c@70f'{ϾB]߿ϸZY:--dUSe؏/sԹ46KqRg;m Y@}n'P{7} $a])T6. ņ=Odssq& Ц.;i8ha{tORD>j/j#t  f3\7J huv",poϞA^z=T֎Cl}BQgfq.:g2$qy{4"щQH`r ^of}C;O!jAwXWvk5'€x 9##CQ]gp}gzuV.r%S:t r!ׁX$h=Mh^^#ey稰{&V"}߁s[u6`>o0d2<,uZb/z!`)7TRSV(a)Pz9M^P[`nxh-:Iԋ`(> czr'pၒ+󊂁:$7a9ca-aZk ϊNS0;]zww"e+V(<ҍopiȻE);mc'үtQ'DKSQ3"* M/=a  R؍(v G-fWrS`~QUymD[Aq:e[Y@K3J^!s*q_zSySd#~n.~TQiO<؞u=UWRyVٔJh Lq`0_v S8GvaCj2NGn(xV d_9wWUv/I9 n.^;٬#p×O`v͛<%ӪsZɗ,|̱U7^kqqa &3ku̫"l|W4/7}2xM|UP6B?k=JI:6x,Qc )@K]+sq0) ao{U1g+N4sț'FKuKW\Lύ ,aμz?􅃉SNjpכ%@F.{W[b>`lVz/<`'|Qg{sի^%`)yvco.=9 Z;r ',&MGnzzB$ꗑ/9x7xcڌ9T/7uz}Jkx)06>wHFzUZ30)Mo8a_s?Vǟ>Ա5Q@-h|9߳+A8ɇx)|+v8399XkeRP_;89њ)Οy]t&0^e*I6H!N8M'Va~RF{:܃|L/E9^c1$7~6g}˗`ak82 #Wu[oi),OwWXt0.r]9fnRs~cuug{z5;MˡH3R XZ`\Mv1ᘳR hy(&j_~^HQ}rwnHx^mdEP$L?z~6S})2?u~u7I|5}w'웟M#Ї;t1oG_^ۣO.֕WB{0dnY8y}ПUDxeccO̝KݵB@:CNځyNQČ,h` ߄gpojufǬ/;"/~yoڶyH9wr8Zaco8o4qnGrb j[ BꅯR:`iv`.Qu~.a6Oн^@ oəSwftnO.߃]uk\KX./FV&pnpz:ǞM8ᷭ?s3**Nn=rmY }f;o2oW|ii@v(t?Bn{!:<ӖN Ӕl%,wBFV`]r ,9Yv`UŹw7N?%OOe@5' . l Tn+a8yJW:ם꽖th-zձLMXtL'Lg-lLmJR8ǺpߗC' З?A*_urK+9fD0$*}k' Wn"Dq>I;ˍyV[Z1}թgmő!9_7uZL~b 'SL]ؽ3V+~mfqv}}cxuk'<""bn@Ptk`In ?GG^zңT܇']ykaφ戹3b^tx}B!n{kO[~:e%j֯qFe@}X{*I h}*tD~`]R^eRMrzI!̏;fR_\Ӄ!αH`rcz{?ʽ9pkE{ܨȣ=#+s7]|_cۢ8^(HnFRC,sgsb1<߇@* 4^ܣdVoU} M^Kɞœ6MVH6ùym PnVы2ؼ1Jֵ327.`3R"#.{UΔ4yW8o 8߿wWdlK6c}coR?< apZ;J 5:7B Ц\@{yg;]Q?[ !mGl6ɂ'0 uϼm Zm`찺跛R6@.r3Y@X䅓 c3֗`OHf%0#6MϢߐG\&ۗ9տaN`^x},a$dJB>U>󁧩&s1wh˚j_a `c]EI|8ο`£ૈDnْj8+~ynp1i i }W-ۣ70ziljZ؟_JOԀ5őɍQQ@>\kVHU2g[ 蒊Y#]oUN&"熩`N-1Q5 J,0|wi#߸ZhY @ݵd -T1v<&uбR^(dჹ/LbG O-Fԫ@Z]'PH'?+Ƅq`uX(iz?wj{ EtxE0 _z;SI?"[ fiߋ$ @ lIޡ]%VH~.[lQ` F.ܦ3YC gɡIT޻@1"8)c?mTWR5aFZep r~Ze㸁~k q_$ DlI 6&^Q@}a5m@{4K*sӹ@{sh7|hqUqaާ@]N\c:r o[ ΑO7yF'yҘ\ 5 xIldRIq[ɒ܇NRÇ5*@X+/FHj_/.Q: qC 4{-/ dQݚA@muͣJO|K9ڹw5X5@z1W4o n ];_\bEyZ4֑ S> ?j IL}aҕcm@J z3 $Z`q?v []}Ձ)u(2oܰ6'X)Ü*U9u;ZFz,:ǜQ|1gb@ĕU@'d{GnaW@a+` ˸$'`Vdc|ŊjU—| Kfxziiξ/=l{).k<5`~K1ҳ(0i-6u;E8LE5@_@ۃOxl8?1 fʀ/;=_T~Һ7;@^[|Q{^c#ᕗR Ym8&B_J4Ń@:&5:3C]Wi@kY1kֵ~MȎsoeS1Ȭdy@Ȍ~6Q6i{nQM@>`*/ u7eˎ~R) д2~鉽=@f6^vbHޛM6q4Z]߀| >^Oz x"v"xtp<l93c @\[:tq7, }SˎF)Z,òl-P&YIר7ۯ{/ SZм95"aP,^vf3*}9v7@Zj/1I̾Ds!@J4:b`H5Gɑ'5 #8:YoyGHg݁0Qk9]TTzQ ZS@Ҿ[o+ꅥnZ34MuSpd%L(B}g}u\m ZG/@26ȠfPR_?N\7B_:H /* /ǟg\m8-$g 8Hv01l.O_{}s&F+?!9A5@Uq5qר\ϵ~(~s]t: I}{׾[wI&bkh=P47^8[UO6U6%tQrpεϩp+@cW껎Xۻ~OZ*LFn7v~zHM Yc?rs:$=wQ} C_R92 DQO55@h^uHvvuQǂս.i֮}ޤ^9SH1wn ze-NK ,(M'bp6?_3nazZݵ=i$UF5t3s0c?@|-}s?"fyYٸ_OHy΍ .0sM}hq@Wė~{);="& U?(n¹(W sޏ*Oìï"@|/uB*|rW8ޑE?ɼ(|#߆s"* |:'ճW![K&k_^'b©d<饉s0'WH ;khyV:^tk˃?EYd~p{ V\~Ⱥ فtK8&{?jow<~wQ6E޿N 窟1K̴͸w4ݮ@mwqSsؼ`Dp~x٭5&G"R]ab{KÃ5ku!Bl{ϭXd%>E #ڷ7OkELYc@p:v%Temo6jى$S Bn IPe:fUoWԗ1}'k ~f$dO#6LK5x7r nު~rM> Q&+,P33s^I!z[ +g9ΖR X8JOQE9}/e@^&q?xoiG }s'@}JQs}aeԿu@>,D f[tuP?wrs~L"C 9hg1Е`ߊv2[ 1 <1A)y 8p}>tỵ'( ЧTۣh>:>YM@ Y$#Pw:F\"aM i}Wr{L4_EVE?ؤ6g_.^ 2Bk9(3 昿f5vlOHāt- +vo¦'X'Eyviʆ1ݾ}O>\,8Jbl#D0KO"?}s֬Fn5 Z ou# ̖>BA[v$Ν֥?\2<^N_8l4Z`_܃W'E=0[Mr=P*/OodAY#LxRqNNtr/c2ƾmr\j=6}?^9N,x9ؾ/-[S9@\]y& _|uv@bk(/jrޯ١!uj5g0f .ہZ27[\A&9f: DV[.8!#𿟗x2&*9XJt"38v&l%N Ü6%o6N*U)y]l6d`_ϗD# F}!1?г]зe'a7itqܒ.gw9y|v{=캉IS 7̱ 7yLlby#_@$"wp~UM9Ѻ< f)/Tfpk^ ZUԛ "ᜟJk(kQsq?o9#|8:vx0G[nr:u|[Its5=+/#嚎 D5!E@Zf|H Q@:Dݯ{*Ľ>^#G1/-.|ـ *Ce دm*a?"tra?L$ˌQ8muM9/=8*v 2dDP,}%( )$PE ?rm{g Og?"Ὄ頮 7ƼqxJr5 uKat#s\פR˳A8m`O 9>ֿu^ϋ5@>%9lᘧ0껻酗دk~ܐꓤ_-c0^y|UeM<UrY]GY G}WOHr"}G%PVDcii ~@v?̡QlÊ}{!m#@Pq@2_Y+?V@$X{Gd0no<ڶ ;AT}2u壎JCRmj}3w )׎s;lbq* "=Zi\a d1Tϭ^M0@яIF!Q^x҂mq1̟"ENC}7[ ^9vÕ6 z̨6<`m|O^ mt 9\t1̧Bjb!'b_K(&ϙ9ȱ)7\w|V3Ejt` ugv4#L: g-~py/-ģ$Ӓ[Q 䵢>E]z~(k[ jJF]B l%Ao| @>.S}/*ȝ7_Jv\ QE1;:l$IhC{Ta: 9 ycG_=]bDg. WF3\_qXFڮ=0ۓ~yS6||m+r wa>wl}@ٴAyG!V*GAwjYƞ?[zīC;=6Y1,d#^k,8mҗR $IG]2;)K3̽,ԉt~V˵+=?ǜTJ?D`J}]@rQۚS9P˼z@")px٩;O`s.Jz7V4 \ÜgXyiBw.qlb랣2g3DnTL}s5y [ے"~xNĕV83S$NxgZk1-Sx?VKa“@xR{9q(;~%x>|NHn瑗ߘ.lLl[]נOQrrmBz̭漖tkmQwUb`7pNgz;1O>f,~/u\U])Ɲ4Arn)>׾Hn~)\+)2@ܺu/nd<Rv4~P9IZe D]݈,x#w_3%xS@fn-ۭqW{W ;J4xF,mD lq\yw23}SSyt弙Fs{w~<Ny:KZbR<08yLp] E ʓ0 GO+i`Zn;t^Yp$(X~s<夽h\aaƕ`4Tu X_l-*M3dX wzGY WXZ7z 0L] }-;2SkZ?W _'Z@b(Zhg )6h-f:`ls9bc >ץ嚓JaY}_E@~( 2̟xEpY` j?ʁŭ}@;6# ^^Ze=EaXJœ4N~UxKӷŜEB;2,gpkrа"Ah~wElzEم|v +owPz5ppC1`ͭߠһ\)X^ h)fNBJH., #0rS4c$$4-`U |ĺ8L,=߰vZ6/[Wiϕ\‚yy ׊qNZ={U'maAjڎ\OeucE2ܨ${-Em$ar6K`e )+^1@/Vڲmg|wC>\`)fqE>3~\ -h,:=^ɯ;@kp 5X[#hWk֋ Ud]D{sXT]wυW"DŽXֲam`i%xLM_Xv:?&6"٦`mr7,nX_#ε꾫B`Ӕ@'_mu2ɞ[`SԘ,ջuNG=APF`^N5GWs9bk*sy+&*٦3l=b:':)*2x@6r?,>^yt˭FօٯLmy[ W%y, > 'y@nWjDV@=}"|Ȇ͕+jU_2ax0?`6$*K @Wl:Ja!H@F=;$|ĸvʬ}ݻmlZЈ UŔOܑW2M\|Ox /ܓǷ-_'ޙj=tb/0J7k68LlG00#4_rQ Z|c:e͏>q^e Vn[B2S6nFsfc eVPC3(ۄb>mypјf} |ys_n,}k/ UٱwO%Gf&'uNEBwlIqk,0w{]!mzz a&uǞ2X F;f0=}J?cLvn_h>hӢs‚“0hpF.PZ+!:^e/kjn,0~ ;tsqo%;̐_Q> [;97oꖁKVw"`rhL+u?0dT-ˆ$0v]`z{o&0Қvq3Xiccp9F.{Ǫb| Ƨ-}jޱpag2yCgK`7=}, 9X3EJk\37q2fX =\ޭ-o4]d#~5, ?hB/*WnOD ط3حRc:Fu 㭎E@3U1[^7Lu@kS y65`g$!,TgJZ}Y ĻCmV4aQýo'6TNX/+1d ac^sޕͅx0+vu#>e{de,\; Ň?WbX&0 sqߚ>V X[v4(aa`&~sKRCGUIXtT!;8nPZdž'nKc覼0O`8 ¿=m*OO[4GX XFXEV5eX81+~h{o>FO0w?ZzZ^.O3שoh1hk 9qwb}[`+CX4<߈ ?]4"d}ݱ>F a`j~7%Q$*%%h"0MןY+T K9=%&դ% [wylC!HXW{s?G.t@H\Fm9ajj%4/o}OF !mvr`sv<܃Vg<~IKBI9iO\z7X3Tܞq%ώBLj8g}Nצ⋍70 0wk>Ahֺ,XstF77GMn?.+1ϤEnޑ ?1#p%7*Rlڵ_'j7AK~hx>KKM8=>ADFqa5t*G#ڂ:!\p;}:*u͏rK7Tcf- 2MOEL3*5Y L`~j;#G.'ru: kA fԫ٭J)b],h>M@I NX_Ў+i(""5|{97(]VoQ5=tV`i6Kk_}A֑N\\_Q@ѵL@l/.B1)+_3vz*6Phm'> 'b =TkĊ0OnO29 =ګ5l[%IP<0+k?;o YG]vh]5R|n`+\q߶"XY_xwm+,<9~L='a 4BJ"-e#_ko>,?irʰ0ey h{ڀQlMh`²5Od ׋y/J_`G z}(,r]1ɠq1崒(45>o}鿟Ӭޗ_ycU{߆\y@ocVW\&y[f䩚7u0zox d¢V%O`il)iyp9$I@]yPJrupLznK}ɮyBSlV?9ܝ>1@Em뗪Saqյ֌ڷb7:9yaV0v&9q,ͿSpXh mS+4z"]G˵sL_rϘ!Os.)N0.Md !PJ=7tpm )|]:pޙ2`IqgiG`^D,}Rgw?״vy3<O|=!r+ OS;r}~O%p3@yʄQ+!Y^g4v0^~:`|+ 0O[c 2/r=Q/&|˰D.}Zuoo<><0**L 5;3or<5_u#D'mPgw46sy!k#k7ޚqI4gWtP/B0'j={ 'kj9_" <K}:#Ÿ^';qW-觾ĭMRMʦO%j:Q+*7{*+/',4RYAC JzeXl,7ؘ .YX>up$$7= STF`Mk w 54Ǘ3<0Nq-g+o7-\}T#(f6 W6uIIFD)b٭ XpLޱ蟪x[ZgaZ`]4({;=zpZ e!^ZGoj~RL}[rG ;p7BLSہ+t,`i>s0Xd* tU W5s/ef.ԍ[\75ފ(;ȄOf1I7` oz$o0\4ARjXKNJ?b]iJ`qOsYo:Ѝ`Y@~yIk[q`辯<[f'aA!Ǚ|耺K^2w\}E~&/9y:#[0l4b={egJ]o[ө KEG;1\1I' :~9K_Ǫt<~oAH.Ctx40HΥ=N' Y`aנ̧lcͅ`]I\:HquC`7ު*yX¹ k,{8{NzKeNW` 5M:m$`_9hJnr z\pa> &uW{<0W]*Ǽ'eYP|MZz@Sw0]6/)_ [/×kub!yx<0+cC_,q %G n h!ڊ^J(_<^X\9^oaK>s :e䲽i[9,H.|iˣ>#o -K?`ycS4  6kXƙs0S,+I'>}? Xn,alw[vg"{j-s>r zӏ,g`*}ި>:"`_!>u '-7M&#W=_AN&lLRFÝ|qX<}Yؿ-T@VW4i,ڋbq_[Ɇ5X\cܹ ?od[",q !uLFc?Km/~Ey `)y BUG]Sޢ*уG'x~0&J3Vۉ@_(xjU|Gq~&^|CXEǂ;ApSg _oS`x]ݙȻ篟;Lm!5aa_m?Q“?4?;6 9&EJ E2 ʈ((*%Q-e"ݿJJ+0W㺾W wj;AM%+@Q՘?zUc3v̮xyrǶʰ}fu7i.fLlL+t{B#O#û̾rډo0}o0;\pk!Lb_A+j{#ˑ^g[竖`#h sBwpVV|kJԓGGw0{;y.༛h*fG]_ 6+s^QhF}J_27̕f{Ea a v NPAHXj,3r2/+RIR/xo˂L=/ŸvUmϣOڃqaNy,`Z5 ҴSuW}`Nh[gnsolt:rqWe0se=OO,r̆/Ή+vx'&Jп>t(AvV_`@PLa`OVLpdr w)sQ 񚬷-fTeҍ幑 ҿ!x'2, aSV9uuښ"['A`/:DP*ֶͣd=1N?X;!1Mӫ5 ғ'+7'x!vrJ@ֳ`VN>0cAV];|e:&]_L KM$ %߭'s"HOGu"_JߧZ;m|RdGݚlZ0b/:'$Y ݥu)7Sp h!ȝާ dl_ߤ_߹Iirs"A*!/w$pu^$iF"o)O\Iś Z!u.wV<ףtUl%_9;.v^+XAs6p*q~)ǐ&͑"x8* 5ϋfG+O̼= ȱC? ?;F3 ޕg>?g䥬~+yo ; JsuCrսKJ)=R+9Y< 6 mk2  W"E[<321-ŕ|ω:"V}= *~ s|3ǝ|^juR >?ߗ8dUCFcyJo/Fw>A9mw()}ox&I~i /AQ*+!>V oF{ȧRR=GZ: OmeKn&>/.A_W @,69\Ǽu-3A"6o[$@=}G[FA+A.Πn!8*U|Aݻڿ* }8B zF"H]IN|ǣ^ŤF|\ ?}:{ έ鼜oi5"|ʓ%#'{ :CxNu2藇zHPo~~><}l47epkq)Kuz'X1AIѴ7v n8B6v<?EWXCg=4Cڰƅ ԇ+r͸'/*lp~hL/Q3l?Ul}AjRPC|Dv7UW&xy~5- j~ek8sأ'kW-%;cc%;*alS42%ɚkA=FJeVW x6jב ~OEpv~]-Z{aB|Gj[] Y^BٜK*AۨAv;GbEt#GJ'寺^~s _ԝ$(irϻ|+ȇʿR)I䆶,Diw'r~O"ȼoC)suEV|^vAm[= A)?lSqN'$kw25/P׵ZBw&:o!\$(ś5qL" [˳z#^GJ}mؑNA . 9y{A%̅t: C7m0"(NMA*sq r"y1?o{e%jM;Y{Prk0Eik7C 7A8+=L o"Hd\3&HJ ^W[dY^BaeaclbNA>vH U(| #HmbRo-{tSޤ1ӷ f^*r93 t8'!+zvM3n0O ow|rm&e߭u!j.{&Zsڻo!VSG˓lw?~tN4A+S#x| gc]R}ң08HIP|t'(ˣ\S ʤvn 3%wK6|}iqEqxrSa߹LEXZ+1 eu~!M=#x} ļp%o$#AkX`|G~?EWǵ G-'>?#&]_4C[vD#vU%n\X쵻Gi&!Vo!Hft 6λ/|G:IF ɪʾȩM%G(u< >߁ݻY='z9\XN"\FTIŨs?Ys"x_մFۛrӢA|u؈uϻoʿ 7}jW_*?_"x1_eO +c5y۬]2K޲gs ŠC݂90jk;Ug௾@s:[y~#(]߭"׵ p!qiAX/,j~A$,xk1tGmÓ˄ k[tO]$˭%r˿tA{HP GjhG}BO!vw uTJ=gÝ*ϓޖ۫ǚ'*.*qTA!g;AYuVuWܝ}X1gǰu.޽@k5OWԟo%(#^<ka omv>j`` g~8'ț{wtЊ%xo:~->9"n )5in$^%up!4rjFDU(}'i/"{#Ը[}_^+]G%>( oLJy AHSF5i^?$r,{؈@AI}9A~(y:`iqwH8}IrsM\a\'s&zҔɻ0gr~~eKY%w~ Q'{ԅYp}KF7c~üQJ/iaEőcÖ喝V̷c6Vx:`AQ3+dU&AI`e<@9fLweFW~j8|{zWqoKD}b'?hs4ļA|ГrZvN :>Zu;CWpN)ݺ-N]]HHA?y lBtg #ԅj0OoL ^=}Kj'H[)p&HS; yC*'~ܹ\w^u[e벮i@A!;ȳ ?:i`?/-'ȷ*7[{=f##"F(A~6UCkr?# gͩf/PV6ڂ%|3yTIZ͢Am;ΎJWg\Rٲ6f{f%5+= 3+ rbS)GzT!=FѤq~u׊"njch+wx.AeQd6sZ v'ي;C rSB^ٲ4 gP2 c=?"H}_gvaް}]rYE"Aֻly W 4@d_ח$El`2R+},.BRiĂӘd̖H/¸Zz}cZA2}>^ywSiWi"zԑ\#gn=E,lCuو9]!xA><7f*Truѻ4ȑg{dʭ=*o C_A=vSN[/yzr3nxb@Q |BgW!?&m~mf)=W9*[>-{u/"QKȁٲ*AB]n|%HPf\]jVߖ|SB@/XUChsHזٱ G'-L-cD GY&a'AkȻnUEĎ]y^a}./jyc.A1D43KP$:icܱo^:Az(fuQ^{,P!4b#`XH(Ŀcn&xE%8C6jy1ׯֶqE ~Gk}*S89s4yF ]t~ 'ܷ+K3K J-߉'xncf~J|mOoۻU&f/)*[pq++*D[ PJ|b|؏kw#*-fqɵXX|*oaN[E#5ipCA>hT`CPs'xu9 -EP=ͳEBe MbN\'+4 Poyqr~fܬo7*0q~bNP HFO[1!i~73o5 R #5L (Ƨ[W sB :A"Fc-ATs+8}_> &ǐ+%q=ňCK"e*z1T1#-|uVՁ9t'bke>A6W84{_!U/;" 38tϕ\#//ġU%xHn΍&EBk&rl;{yԋgP?^ h#ȳ&n~!CTfwgo~=F KM%HKNEƽn'֬s$7_<|\ ZBj\&˩;(d&~ -t[,!u]. x`nxaz08<5y\A^{aRW̍Dȿ&[PQAZ;,!*` NWsfkW6>OCE*F/F ;zG|5n#_T6v[e)EI= JaU#.3vK"Ȯ %~Z^&^9}7gN݂mI 15ӛ}< ל@&~:u@L'(Nj5#/M}~-;>:,{_QSiS'⟛ȁuԇOGgF#_y @UlBB)ioޏNwg{ a]Zv7}prׇO]#xv'pO* o-JE̳s<'Sd~)A,w6B^sՋW-?={uI1'4uSU>i m`8ϭ 8YҒ^S en#|~mkB%- w?:&އ`Ju6 qUڍk[) _cNxFhadto|,KEGq!(7*%Y<Ӕ1X΁eВ{k0Jhz 귛 Tz~."qN:XOfwFE乩F_DNm#ȯZtx>?t}ºvE9r9*}\.Ԡ73?};>k`c7m|0"euYŬ,p{ eF`:{95-P$.\ X~\xXKկ [+8q{fކ?e&õ%CSm Zzt9eXVN:x?-n\6e{tcipX/stDNPS`V*,e¸Xلg 9ׁC(>G|Q~ )K5طun:֪rq2>gh#2Zjctd)0^L _kks>[`NґnS >`]Zd$t ?&wr1b`=@[^?o.V9-W9]/m}2u`)Z{%js- v)3iyoUO=z <7ysx)n7'zlQ]0POu:^ MW[Y~#o 0C/)a'`+H? X#*+ƻ{+bF"Rk=g` O_? Fj8/-Sd`]y/= Xݯm̼ET9BܢU#Zbs0t5IXG~f.>6w{^,vL '~Y}TRSVnV}vSj9?sfx0|8[liRlTvЌF>0зLm?W }LNV :W/Q<㻛Łt}:̅]r=g&X_L-mrgsrq:oAr::쬔*ЛXO׫>:dg[F/q<`gTصBb5i(\.> ,׮Gty. сQn(*_}eRcd&Nb?}svs s`77 vGy#es`YݸNb$C-UN ff Ӂ&D)GV:쟻\ibr_.>2yMtk-w\[Us,k=Kos?S g9 b`Nv@ʪf3`u㧇:1mC3a#s xzǀo5k`_|ג =7 NK=vإ' O7^s~~)0s.)ͩ?sp0ζ;=:rg{ie=إ>w K`2vܕ O !%GMLcOjpu/pN($ei2Xo}x^((N px]Xkpίwgfa]x-q?^6kj@F=`O޲M UGkC6pr֡|I YtYGov?V̸s <طZ [R`ɊosJ[JX꺥o uQx wS+SCi> g,fhq.<1+U}'lË]W0>$V  ;tʚߏ. $ =}VUq8A^{$l0}q m9잗8N=+YfsCeg m|睒s 0}o4DS_c]mQhՅ.KɌ>VE#OϹPğ,~1 G?ww_ZugnY]2簔 `}xQG..\#{Q7z> ȣxz0Sv~Iԅ}r?gƉz`_vB]%u?ruSU)@}m^U2C6( JJ`v Kz]_'^'{}r)`UxUN}OQ?Zn~$9_ CXqmu7rYg6ti`\=~| 0]m:oA')'cܣZJy*VK[U7h4ҍzsI K`tДk_=מ؅Bn Y~r9spըl'r {!37f[N~>pa`||p>NXIBK/i"eұ3WX E[j\z#;1YLU&`ǥ7{6ׄ͋Ϊ)!ׁK`rfX*lmE=߂KC8n..c#R{ +ǚQ;|ӽ7Xl*5.`SO=\csg;H^`ŷ?*}~f#[֭W@jȟM9Q ?bW}h5t J6Ks>k'3ȁNXJ9k#UeclCܗY)^S]`2߂_}K G3k`GS "dj {=~ÔnoFJyu4hA}{]Ƴh?' KLG _!0cԺMM޲j2W bVt`xs:~ڇҟyvtu!Mwgs~\c-LXv7/>!yS=ӺxM38Gg.744B;xa7?P}a[0M0VË#l7Ѹ{XJg[Kv1_A`%ݬ_ X|[6B.4q7??gzFs^ Xү P O*qN҅n{ݏ&*Q,5Iuz-:Ya=yKn_~`[VE^M<(Կ}3sgR#Jrq_rZWs0W|.y &,{iP:]c'MB^EvwUSm0Y4D>{ a p ;F>E-sŊf=ZEsj=q_9ψqy{mY`>oY]3Q0csvKo(n`?5gK?;| m tH_: 4!>5BsY>^c#-ت ԟŏywI!|Nj}yqewHJ;3IxeM8!7LgZ,sƆ1uNL?ҡD`~Wؿ"U^t;>Q2wax6YGIX y\>x+JT~|>z uJv?-Z'(0tU}Ri?1oggPObABTEGQ ˬW^q6f+W >?ʿ/d գo$+7MW?`X r1ټ r TӦ h83~/|g1`{9 ٶ}F0r9G;`IIƠ/P_/lM$lFs* Om>`]WS/޸;Oz^XQyouAJ_m#~/*k5B[鴭gX(bq`?#vuì8Sx30mKCq\ץ;p~z܍y_8 bԯ Rg mi_[-_vn`ɳBqkg<_K9JuP>M}OG#MF/k Hbv}1.&`~SqM}v?8Yz}~_I_Z6( k+;9B6NbVA!akSx_쿸qQ0wM)I, Ok3;M^\Xq 'EZ9,?L`>܂ϹɰLŖs?K oKBbm,yؼI]~6t|&|^h?u)CRYl>u ?_M S1u_`e][!OgosDn8fwJ:Kwc?RGrO L?a'+?qcjU鈘c+ HzuF֋]}[a*^qXؿkf0Wb.T[+՟~$9!U.'Sb_6+ ~q96&+jIQnsjDTrQ%<0;NF]ϸI q3?m~u'hxl4%X1S1vcNpfzpD<>Zs/EÓ.P6gfKaO K<0r >Dž]K1G0Rύm;1ocTZrl͇%G-TFB9Km}j?L͟VL>5r^~#l;ԯEy ]fG"xǢ~1/f9zy5P'~F- SsC߸}m}. ,-?w_T;3Wݏc]NA92~e5~I`>~"8Z_+!깰яwmCQqc|pŏPy7yNi;&G_ }<3qC܆95suU,렣r|0PESpeO`^WH}JܳO=a0n)pn XE;ܑQ̐򶖍Cn~SQu5nlI=Xw8e`>y\@ٗO}m ؍VdQCzWs~U5Õm\ o!֣opXwLی}076 CU\;#o\ywp@`&z}/8_TfoaOob[зx20Vt(J븾s] &`R|(O`K>ڇ sorw`䛟L{H,X8OAʍ;GA}|2PAߠ՗Ln#&2\]W~S6`;/+xm`a׀|}3pP?kEߋM8)OGN"ʸvZޏWs}6ex O$=S~{T, e ܩ8AO~kv^rk&hTùӛ) Pb \l'g\z ݙ\CE8~b`ꎟ~ ])/FH(زGYEQy6sG>&Z"c#0ަXݻ w:IXpS1-s =d~ZG'}3Bi߀z,FaӆJ|gHuzRc~os跙.,8oܗg`?>fvc Ս:p?O.t`NښVpe87L>ڡ<\~,ū-nkL C.=Nż,[U7ÍG16itG^-պ? tW;,\CkyVN=Z:ȿ}~Dii,`z{%`@=]eαj\ᓡa`>|vWuisrՠi\+D/Z1x-1؉}tD\h|=O'ESz24\LwLp$njo#G;󀚃w[;MA~S׬&5QGM$%&~n wTVYyۍOX@gNމr~ G8'!`dsrO/u~yi`]`)Ž%5Äq^/YnVp{W$, ט&YK\#SW: O>t0Gd2kk#h,nhN\_Ioup.No6'+l o ;sp\>/7·.yj_#WF|ANji߷x|$ɜA3I4dmipɩC<}aٸˊq?VW{] 햣lLjq̣;x05o؁%#iV=*ng>K', GLB 0olĬȽʩ{VoF2fwGm wi݂ˎmF?0'@f5ƼA ]dZ|0)w'.Z99Y9% MI?;|Mj|?nzD]0yX5@ikc,vjm=R`O_ ,c3X8v3;,qO n2R[c/UI)ڼ}XkE~[}@yrY+`_5@~zPAȖ@xg 1]ݸOokg(T3"P*ŰNJmdczpbU<]<3Em!W5p y#ZY^#/5G~x( ``<il9k=cU rYqtGEEolʞ:3W;)8W^ x)8ߐ`xS.,}8WSyMIxUU|8hpu&c>?`՗TKl[7i#99*l\=e,!]X7nzwF +Y9.raIUl$8f?g؝ǫۭ}~GE!IlLĹdi.+Wg9mε V]u"0O.?ԙ}iM4H`rM[`.15wgY /a:bMдrʋ0&w&]0,]iaڥw +W0=rppW]<0ٛ"{B:zU)'azmQgӌ"pvk~ifQwM]$)~nhpwj-գsx:p|oܳQ5d.O!=P\#qLWh.e5v=cgXO2ç>C>%UD~)?EϺ&;yY@`10|}Zw^G% 8.xOm@NYpFtP' 3m o3p|g=~iqp]zOD2>@}DW2B}=~K"\#_.{[[77jIv&֭z"nv-٣.]ɋ(} "[QuCguolOͅ8Ⱥr ་9JДُ.T3و{RJϡS0s2 8wXb>nP8>nY|sr/-W-П%`Ƌf/o>2-[+tߝbB'a}l`6rqOjh8ob{sc7Uݺ|2+4I<ؚ|c0} %Enj=^L:?9aJƨcg븉tqԗGﮂl peJp%r#?|͆p7věG+=lOiۮ'u] X;Wiߋ2m}0L3S 0OH\KR <~`WlaG9ERz`3ISX %}xɃ-.W,9vec "6 i7lze⭵A/3,>\ܗ f^=TN4>GO(|M#)%sYwH.~b\X@4z8pLhs/ߌc>L3j4";9Ԑs;d 뽆2B?kpN[t?OGy7օR 8M^0IoT~#57!gρgP\![{s.@>F;1|6GTZs3-sw(LZiFvw9B1C-KTp~_o#Gˌ4P"þ/ҭ|g5W+~)ص:e?lhTwc0mJg,{A, =8nlԳ<j~|L(0SmZܕY߻nNma`!9G]}H'ur[p=~0iE߰ؗ)U_DW!^OȧTYÌMLﺉ\dMd&b@0pK0,ݚ(?7bY\!Y3? ӂj4ם0֠OO1@] Ɯ*myc&v&xD=Tp0vVlvnu`Lʫ,槺+4IQc7& oiE`蠣 p l7L5>+38pN<QOp)i΍p0OW!gn`FGxq0)>lQWw-M `n]s,9~6*]-!#'6;%߾C\uo;uwnt'0tW慁{a63ŽX+㗓E=_v_s,ԧ0|ҩ=rN#'>9h{|m'Yzf+pX9:t=J7^{J.(B|@jȉεȇIg^ l/W~:({+tYKC?"ok, ΏE;p.:97|e|3iQϬ\̒gXGf `eE<9 0+lڀ5Yq7KW0#wƄ~IDicNQ {B6/y~ՙ˾tgkL=쮔[H >2%ܥ`:'c8WnƃC֊FGbI`yRaU0R0Nf挕$fZ͵5j+ru0'0\;q}x߻C?a欺ˣwf#31rQwjG{"Ͻ^VuHc ϿBT-g!ǻ[Ew0=>洀@Rnph925rr{Piؽd8+Cߣf߉:?R'}m6%^ޟ{3ܴ>9F5%^nzxXo]?fؗt؉!͔ <"tq<̬=:sUySN& ٴ8{[߮6ׇҋ]WlZ!Nm7̏WǀUm&{NjߍX<׏b38.8aI7~\hNGy z X-/<7;4jּf%ԄhzE+eNf0sk&<8|x>ިؕSEXB P_DMfC~;[mԏVVi+9NzW Lo1c\>=a,-%\+44#Y.`g$.Nx|qmbRyX%|jY-۵9g/LkЖ3'^g}-#*sL|&?ƅKs9"}SE.\|i"X#O]rZ3K1(\zd0]GU^Ԫm}qm\f- 8ܼlBl|i9m+5x31LOҗ[8=+aZǢϯ8"7i .c7.yIQh1RXf?#h}pN1?`ao&`] J4Nl5/MwT W_[l?Krя=j5l]3o%[`:v>iZƌ]Ν.QF_Gu-pԴLՂi >%\`GKݷu~뽒ϧ`:Ig  Y {sٖ'_su+yzO+G0mrpW䢂e՘3 7?N!OfkBa '_O߀iKAfP* Y/KaQwA=fԅ4tu=K4G3X:8 ֮GD9kzK7oɧtmB^$uQ bs.:Cr8-K0'yB<ӿ(^AUq-U/w:}?rnm]o;=X+w 2iE=;QlcR;ˑ1X7aaLmq) {.?/#/E3-iE⺷Ϫl.E̦/Anl8\{(>2sr_Ƈǟϱ|9[zu1Owkb.]pgs[c9Y S?c#Uy[ m\ 8\i3qVOk~[ aÚQ :tf܉x_0Ǿ VYİC`k1{ WV~ǘ.Yxد/P %KϚ9$|Aqx?XIwDy8o|AoPw2]K[7ĊGUUAe%K>g.HEéfjޯȺpq_[0crj"L{g%4>~w? <5)*xM9G+v.ڃ0=0r1ޅ>=܇ٟcJCYU[#*:FF?M78Pr8kǐ; ;3`ƵG~8/:~)QnJe`zؖqJd6-/ԁ&J" s,xazuDtM\(\ͥ 4G!@q;!o[aūIU ,+]wkh;iʖ\kGESH0ZSa*r }.KQx_jn6t;^lR \6OaÏ_K9l6X_|=ֵ8/~b^3Sy/1/?[^()yg3gm y5@\o;Mf;$<\MZڙK:yg#BEsw`zM(iujx/Cb>䗑 U~U/GW&/:F=;:#ґ d6dR:pW8͠ac.)cYSŇF- Zajq1L}gv8UcҪgE P1OWy=:_1jB_ߎ}Zb rNKs:J3oK<Ĺc5ܓ=6bxS7n7Zw z-,b]}G%aȝ~87,8+ug`٧6.g3L ܮY{B"+WMvE635:xjڥGF3I^tHX QQpa^?|uuO69ω)1`\=Ү^[ ,yZa^ ',?h5֥U^4&'ΑЦ4>A8_לE%Mc/?a>Z[m͌ mwjheze a{`+3 vckߨoStNsĽ>T*,¹ui98SU=`GiTSdr$I8ն`KdWb5Hv;2$~OTcg8mz,rG r8q{o?EO+߄iMI.p~k-=ⲑzVXJ /g\<,K^H}AN ƀ}i 6fbzn6ڲ u<)URr]JaKۗH4Q^p}s;<7 79>OXK M0~}cz#YSKY|N:!sd`ϔkL/)?9 egح\OŖ`F9/ ]Ru@ X~g%{&kBvQqۉuR0]0C$NF&LZ{༉t5SăAϪ@|5N'׏Zݢ3hwf9w˂:&G -k _y,S3߲=!rC/qh^sgl~@;rXK hrOEU]d}ДC+d1rC@w/r ]k 7=/~V.h/ױ֑@?&7"Lj vM`VBkeVc9}2.G_4yB2w㙭i MoQ;IZdCή׍@;ıR0}~s:1S?+ڱ=bgڿN8S*N"̀"2nV-30-O=ȇO_Uq{*hFL# ܻG2vK]@??IVz /<5)-j~9=At(O􉻹@L!/S؇ _@d0J8r\q(j'@?bVP@;z;_; hvAo]Gu=Pی멾: Ѧq;Cfvg $_ (-~ԱL7o oCܮS|t1x{-|mh2uo^ڲkGll?mBI7ܫm#ohȸt%Aoz,U[OwΎzx[@?ob|]ۚ90H3K۝QSHe@7y㺅 vRp 0Z"mt,ЗoT=J`Ӏֶv?+V+'!e`pg7mu{I؍koP嶺r\Rvrn|&n`i9PqDЉUnsuP\*)`+ihg\׍}E'+dȋֺ~sj'B/.;E⾻˔V$.'Tø} _:N۞Ɗ@pϊ?.Z,+PFR`. Wb6xwigRO|ѵmasohoKCj+:Rݝ@}YX!{ֶSd+_KSvsH xNCu2ljrGx0ϥ6\Sг_M;3I$q%X=Q͛q"4]ĈM:KxVu4c\x#9.Ju,aOj8ܿ;c& q?v3pomb!cN0t.@|aS67n\~+K"nԟ}Ax:ٸg{}f"-!UQyQ&P?/<69o(xv ݒ,8nIDX@ KwhlBב0|hb~@?񾱢Q%~_r>@4|~dig 911}cNökaRe? }geUh4bĺߙЫ2ԢAr[[@;N7f&cY"KEծ~jePq69mgm^`kьkzy!ηci A@;P^՗ -F R @QVi|}Rޟzg2J Ƶt\WI_&EJU%[glڳYB07pI@#?GAC}뗠̘,[Yпv=h)k<,y4~P2▃+o%u3. m|M#qXś]Pk4gcKN YA[=3o,%ۣ&,p9]gIq6O 9Zn웩N&o؋!="~8 o- "WbĬ=SZjV2eGOc<abc[ عh|e[ꛇ Xg=Y;?rv Lɦu6nP(%(y߱r*l`@:H{$Pٓy?s;N iGP[xLVn8_j]on2=T N= %a{ns#+ZGK8jI.RC|-rt@-whz$[#Obm}^-p#So.6^ CsQ?ڑ@ɬOS%ئu5VScMYy]Z>Z/U.mֽB!/U /W|B>@zec 0'NRoɌ&7,%䑵 T<@zE6KUO[gcC[24{)8][\ed(@}ne,#>˗snl?MnЄdCR7aoFa|/"ڢԨ9@o]o+^Vuyw̩ Ow~bz_gk/Oorbψ"]QϻuƧȵ)Fg>͵+p>c=콄|л@}$v:n&g>#myr`hDH9@=2MA-@c9ގ~՘9dP 9bK9T dHy"bZ]D?9IoƩ֟W*eG@+ΞJF|)]t?qR.AyNUg=vy@06G_ IBw:Mk BɨKECW굻 rq)Jśs/G>y;芇q用ڀ酋_9@Zk!x1$ ܑZ%%ږY\"0ܵ>`4M'Ӏ&[Q[X7WDX~'?P;BèLVD(vDM1o*Aj5oojW>1K 54eY[*?4stwp!LZW MK@ﺙ&B*_}q%I se]NQ=^:*_a9VW u ЙsRPG/9 ]&t yy7oZ$ }i&Am/{B# kݗDze ~(ka/ȹ/0 PG5[_. "o9w+cm!rp qr3z^/d=P TuHه8wͯ_@7y trg[Ny>&{a:}sQWiB/`O7>~}"XX_ieU Rܛk:tc/GZPoD?|ԼCq cu[pO?HALe#[/ȕC5@7ߍӭYEK~9nC>Ded9+n&S뽁[^ԟܟwc#O89!Y:#͚ u|  ыe@8q2Yz.UXuxcC@;`'H?ñgNys~Cp'Ф.fǀ}N,Ƞ[ *w߈>W;Ҏg(A~{ 9zC3HD=PW_,u]ԯ [KG1Օs=+ruiA7}k<5<6X!(F v ;f!v_[eqoLS&{\Olm^Ɏ_cn]7j {u:k}cZKĞ@vh&o C@. cӕ'B`vF]чEkWW;e@_מYAUySt нn,JKSOjaRcKek8eRO1r%=W'F0e7VB]dWRV]!AʘYZrNTL~>UlZX?/+@[6!!!yդufk)E?c쉓̀^5}D5rږmuv@5$w )ٱ+@kW<Q"0QV3POF]lQ58wBYʘL"^`6\'Rך:{WWLONVJ:";~P@bܬjwKQ_=Omk|v=-KGv/nGF D\?x~Пpw|p,0$_(SA]lN8RPGJU~ W>Ө܊>[—Xn+eaHa 9{S?IÝ=5DžoKM 񽡜 m>6#i+FjFC%hE~XՁ5w:OOݯHF%}YD>|} Nhij'ns\~^;>/+d~ǹ:#L[>`._ 9ۋa}j\G:އ9;]'}QOuv6=%H]犖iGԝu^u;ٸo5Rs2mg:Tۇߤth[P;z@_u7`l]|ajͼ2k^\pwQԷӧza\36rv.Q^y``1oD =<{qs[w^kD`nNO;z^#02oۯ S/g gNjs.~}NCȋS#dMxן*ZiXwE{9@E״Z|{#s] ǃyvu*?M+wZks|pyBj@E%M"g>`N.((bnw; sn wR TW_޼j|%DbzECS`Dyt[Ko=kĺ2uw #cB̅ ;jGl䳁%Rz?s;n''b]TcT*FΏGZ-q2mύ_ ^OARs9F{ MP'(* Z iI}7 c1zL{6>$=g|G7OkϨ?cdFnNon?ŕ?&_c^H9u?ui;ߟ#٠sOw}kV? ܨ)gyD&%+,>E`;M12*r}_;`\^qw_h㯨i݈}{:Ӝyh|Hhrם^gߊ<>kb9s6ڕQV,Cy:vK&̣C#)͞%dz)z0ᗲ@9&p7[ ^~oךQ)ԅ:+_"t? ̹/Z׻ ;r'=OW?f`ց>d]my$3s븎/lLOD0|!7>K5 &F21zq?hx[FV+{]ʱC@Us] QQ9lf!oH9f:Lk0ge^5=؃{q>W~(z*}00/3(Bo~72@gd4{ 5@Mf4KA>>}e[=]F11y Bhdt5{~P{+ֆp6l.5euF;l|xaW> T{A g&QjJf't!i)hùO}s6=q٣; S$$ TvPHFCmYFl;?ny *0kV@;-j@'kd O؍n\{dyKzlj ڪbͼn\g䍙7I@u7j!0E>z)w$nx_ gWm >" e%x>}|Jaʏ{ﱵ>(O>uI ;?=saOC˙+97>iC(<t&.a`ix.=Ư?A<<[6XS"NybԢ-Zb}̑7u㗌YcCQW:I7UӴ0tR0Wk\ϟ֫B_C?-rͿoWz{:plNMeZw1a?yHr_.䅵ca1^ýsն\@(`߀6= P2ƚߑ;ּUQ:S>}074y7t*w~Ya:LvYb>A^P^Tn0u<3wv7ιh0)}_ýۀaЮ=g;$TFIkfrKI7mEA`B%d%եq` i_{7Tw(nJ| KZuym'[`[EtzEAUߙAiVy (;ܬ" _]ƦQ '1}3=|#4P^Bkې#' ,Du=.5ycU Kf\CrƲWjjΐJ`t;G4o(;2L'e_>g6IRП.&mxB)#lw<ud.|| Gзu;7O:XԃqcB"CCx'<tMʔ׸}q]MN wZE, ^*-@nZ}9䳙C:,e}ms8aGye cr('ƻ+^Anz=W1^tZh/Ϛ0Lrnj2ѽv:o>-یssOH$MFt0"a>Y1%tA ??&t |@_&5h⊆5{8ZTbQwi)BsNZμ_@d%qnڼS bEk пZ9z,3u{BE`|k^ ꫜ"bOܣCLs!7[%팣_璧Ts_>IG&o@=ss^zܗ6B<O$j={Ðw僩 tp(_; QRdƾf˚$ɖ-KJHEDٚKhUd)T*K,IR~>6{-9ia!F?3gf3u>J~1byA_Q;q{b u󷽱M1q#7`NB h3=`\^-3}3ʒU"x6[u) >وze bvT`y<{-gHeI̯=;~rf| }{YN&-!xf[r߆5oFVE6ڹx0kE8n$\{5D}}.ЗF3{56}}:qOާ9Q&\35ХhP1;cSpu4j,?ۛ.cWZRL;|[sw-j1G7Gߐ6|ulg6q,}8j{ɁrY/"7ΌHxe+N{?'0:Ǵ83nOp,̮7B?/2p ϭϣo93a<9˦޽n=,ecע' 4E˩J&1GW0F7z>"tYE3խԛe̙Ny^k3Cz'd pf[a< isؐ9޷v"Tm9SJLIgERϾCW\.Ԅܡ-W\rdytаmN7#?BKD(djLώ:>G z2~C1/ܗI~5[*g~Bǿ94<l ͙?u6RuغV܋w/NILO0zMKpڸKA t*a0m!30X`r鏼MGEN4vogJչAꌽn gf~xpi{δq˪O ͜j>D#g0KqB{޼*g7M9cT_ x|!'ƙt\[;g6r?s9e̕1IIL%jJvL`A9^'Q;,VƑ?L{{F3Zmr83/"p L kxKRu/NF7Π?0ae[f{s-9Μ`uv,&onș7-3V'p'gg)0-ΔW6.&-RsiUx<yY3<2/qf]x)1^o˙1Y#ݲi0a^S٫1n cjq`3Erg&k{ZO+iZo}=P[kCp>`gj>G?g&fh-O$1VQgZ yG=D},ZA/#/4b0'/Ľ?7̂BoO(g"򦱝st΍8ߋuL+ږu<=wNsZnq`^ӨuT9[})gVxY,W֪HZy[q9y{i%%QcW`b? 93_&f)lU%m87[.4;3ն櫮u/_L%]ΥʙWS^I9M佄3T[HP+E} n }O\N[1e_u$ܱrÿL_he ~5Osj(Or{qknL?:꣦4%f&#_ #gx#^Ù-ȫ`Lj7M _:߳/P[d3py)^XAx/Girf?Ekl.z3ę~M3AW8wjggFx`-g-gk zަe89%BΌfŠe>U{0ծ=-g6tX n[gmSɳX >..^n..\\\g<=sh{\{GОkZ ~`'t w0{??O?_﫹U\\BXˊV G`=jXoԪ}.ws^, [|l[sPW`ME^~ɬ ӋSCzu |qņ_pZ窪'Pz6 G+c7;˃S-6g^``d}ڜR`?;;_/9_G5v)Y}eݵj?Le=m} ;k/OO>ֹ=oڎCw܀5l 'mvRՓGl4z7nۤuXUشcn`O8/]w6o;ECO ǼJs~0om38`]F: +3'`%{CK4c3CZ~|B;8(Kvw:(ַF8>?i6j9B`+(+Z|N4t^6#~(y3Rt?=:5VueI`ak%۾Pvث"w&-/+:vج f 78>d0JY-[l{=z! ǥoj[1eK_8wtY=M ^ ؅kژXGoӀqrekPG`%gE94eł.DoS&vNg[0} `n6 ֻmZuvyG\HBdL{ W Nb;܈损;ixFY~o,z d9.qi}{^&Wbcz}&S X#6˪Yk{jY]wX f\ֳ9wq_kStz;`Kā|e=v [_cÿXW>|QQwfv9 x`-4[-Ĭ;(Y͎D_#KpO[>< *#Y1:%X&9ܯ&+'DŽw%DgM ]C'¢Nü9 q+Irw7`wT= Ցٜ1sSyء?f;; W .HyTQtOTuDg7ZIuXskY8}C:5l[yۛj8*P} cJ+{^`}ons;!oh3~p>|gA>wͲ -W[Oa8`~~: wA`/K!+4Yml 1sdnyJŹXEX3 ?*qjZR+ӃxojO$Y㜦]iXq/꧰=5+wV1p+U0=Wx 6j )Q4`TZ'j>؊fԵ!{p@vh51pd ]`~[1Ǯ1WX_]YJI@Ӟp`=5=NX_o>[sW<;u*UȣsegB_ZM]xF`S nEYz&luM@'[;0I{=1F.`zИF t:nח_IvZ}N"B o'0 V=D^zBƦg`U~<!Ʀs?FHt#O<>n\hbu"J YtxFWLsQt%oXi/D~aݒL5y[j |GOV68^:z\ѩi Ԋw{ԁx/Ac^tRէޜ$9 L Nn+~ .mُ~#cpMXJ.i<ֻQ,#MJI&`[Sq~k]:cepog5@SٞfD8뫜wVP?0@7FG։LU7]CY!̏>[]j ۧo?pSxzcF~8TƳ{.c[Jw?O"wkr^\9 ZD&D AF}a.Ju94ֺ:;&ݘ [Db}ݐs;6)ϖ;s,- >|>X*-:jq.pG9=>G0& {+oάY7,nՏjDnTЁ~$SnW~FȝK-O_B=V]v|*.~ޏҗ;X'W z vxs7*ꕎs(8.t-UZR-U&XԳ> 9|2_I UMU3U3 QS `;OK$!wqz6%sx2 ʢtX:u$iUIFE{"+_B EɃP?Kl7oΌ]KGZgws*b-[첐7L @8GR}aǻe}`&yts|S?*h$NT;Yl_ܻMrWeg!'=uy: c2oWmֈZJg \IT1᠝V8R/b|aB񼲛6=F:|,Tf~϶Wݖ`nBNQqܱc蛹_w9/goļ~nK{lvWf@/{_̶3?1cÀXtulǾWF^ye4~nHj&;)9g=ƥg3-uwmA^D{֤-M5˲C=NvP,UHU `۝|5 op̅1% 2s^[B_4qXmr ޔ85oR1I9$8`' }v01q>fgWjn-5{y0e/ݖoF?|lۣȣnj!] @*}ʽv"YҢ1(X0n]> ?BnT{d\jW?\mp zls_*5'fn;wob}Z%SԳB] !oܓdD!s}#h-/o֢g8}Yx翄v;( c䏸ˁ;#'?guMAOH<ךo+w&N_S5Nq1x?\{c3!]zVP)ީ^|aн{?s&mz\ܸqIXٗ~@ީ݁S6NV%N[.D}VJ k_juG~encPb[؈QqyQ|:#s׿|?ۧAhݘGj5[S[sK"t7<0c O2yKka,ޜ< `pZ.lSPvoļ5'VlbuLmo3}KncP*¼5$Uqt~slCM*U*!?oݳMw_8X9qCrᖕ%`ep/ԇؾSuXʳ>"bW%΅o8xJMMV1eQ(; |+IJ:]7+,o mynΛ5XM`],BX 2?ty RPgH$1GN^ }ڬF+dbr_"fj-G\@_d l ]7xNYgJҢ@ϝ>(sP q#߷>{"yf俍WZ㜇Dn{悛} ,.݃C]P䷤Dm{:\ba؎dxOO62kHA&#Us~F7ޯ^EV2u+]z1Mt9絼}-s/U|P$zxr]ի1X4k:Eʖ cu wE@7o>GG'Jb^rbbRA6w1YʼM۶o*@78@_I蒏%!^p~=UϾ_^ ^νʯzf5U^oT(iz<Tz%7MjqGỺFǗINӍl'#L|f" : ?Ocn7s@r8/ձAjSsbW0 ůث˶ǹy}rRy?KW:n4xc.%+\\CG*1oW\<}aR9o1o.DL9; ty:E}L>79Sœ*cX˯{:JW^7ތ}ﺺGw2w]<肞XLQoͨ~ Kvtn 0;zB3˶ESVoE}u:=\SJ%^Gw@7*{O.C> O`r)F~)zc-<} /Wvbf ܐu PZ\ѽ}O'υ1qs5 >9QQ:|ceVfȣ9#O3).~ImU9|^e:gE|&>:'y[p_!$:9po[Ʃl6/qn/dd+ s{reu2f+}]k~JM7='F}Y 9)x#?*?Kzt[S$FNF]]z'Zjo뿁A~%A{+>~W{8(خ ˻f#GU܈@#40tk|`uc)!jcB;0gőtqFlwOSV*ļAw ];*(2uy# (W׮ WN@i h7@??c~@+m `T=se-|{ys8. ] O/k?MC_B @im$&{@³:+(F|;=g<>ѷ:? 0'Ӽ'ՌTU /恰ŋ^ =͗ $Pig|]=,/zfUoOa["͹ !!gۇzjJ8P*l^,6F j4؞j - P`EѱZ~(Di1ϖAW$<8W^*=G:t$-TM$UW)#">* dۿ[%oFq)sXΫ&RQ(6t+} #| wK#^C B߰l^z᧏.t? įmy%`ae(Tm qMbPV:.t52`YXyU [S~u8SLDbpPgUY@}mABa>{]ð}\)G@02,aٲGƳ@x{k3o+MgHm&I Mq ̣X[{h8袎A k}4=Q"breu/|Vs!q.hBzt%n|ރY8϶jG+\`ޟԯ[旽j= )@(5CIw bn=. *Fg@˸KO*osZ^{>)mĽrʒHaP#Q}cꑷ6nUA75җ!p",jeb{-ܶh70Úb.8Ю~ ]F怄GȾS@7<}3lpyy:{t]0Ĭed>2b*,bɓ 6߀0_o] T_3%@:w6<7@p:tlj@=[0;YWj{l |@8q9j ^+sҊ'~fPغテ Ԃ@axe"g{phm܀>'R0qρI@țd{7@NY-q30o|~ܫ`yJ2Qwz:Ib9O*y}1D*YV 06_x5)W" t)B4hg% ei:u@Y(q9—h~fcN=MP1 < #c9+j2zA:ay6X~I~v;^u@ &} яC@pwuIj@y2 w_vZ gdALj10_uS)xX-0ӟ~>pDoh3-5E ٤.W՛, 9QU"-9̒n]+_%] Z` :OW+Atj eẼEFJ_[-@QGЎN7>Ђpj3=/ p]GZbv RQa)s;H]bK'P=d.0GPfcZmAh뗌q>slJukτvگ}Ur^*m2{\`|23 IS#K@0AxjPYf >m"A$Jhoȅ>£er7bOfnr<&n1fǔZe`]D;Іg &RU{-ft?J`Sr r֪xAm=}h7 O|<TaM@-AA {c}ݚ@J^]hs|ĹXt}&1mZ!lw72k/mOY suf`2Cd޴`[xuƢ86wQ>wgm9=wAQ $뾝6hkLV#k6;7w'0`3Sxߐ߁rˉ9&~n˳ |j䜞tf:5 coB"u~BQIevȽ@#\,Osqsp^f+0u)rol=Z'*>>@M'ĞX mj@}5wBÍe6y5fSCX/ ԇ1 ߌI<a># a{ݳ@Kij_ #n  ŝx'[wjwDaH8=ZD^ywo0v%8)ϫ"ߌNv=)=31@Юë0 _ڥb]Z7!1*Q]# |` Oʁ#aa,#Em˹Y4{2~cJ\+AJ~CB=}玧/R{ޯ~i/s@hbgX6ȧf쫈*7#6ڇhA  /^8J<dg 1Lį"\tJ6_sؚ1MAZƅX4KA@m8u0Lꏄ+XSˤDw&`>zpsrVu =Ĭ$'r@(D9كVls@e' Hn! (cvB}Uٽש 9ސD1))}&@`\w|sC{V(#Qb~^ģ, ܵFKd%\"l:hB5IZmT9}N] 34'}@Ky-q #_k dTC]JW<\iMH%3A>? =3(6G) \=*Ԓ̎+Q Mz#Ql9ŗCKMń)(V&.P# rg]rhA)ޞ×)Q:~(%]g xtXNͬ0c34Z @Qy&G ь9P{mώz Kʾ!/:Sx$ωN?[E`;5 `SC?u#[@4 9Iz.n_A|K4Y/v~c (6L.C8* ̱ZM@>!O)vǾ-{ToǻjZjKc'(Y !5V ^vf@6>r=8=ܲa 2adQ܀ab y?`Z 7u%m'}_gwon?bұrYk bminXʞKF 61JIBP{7eK[\uxcXEN\ԍ @Hzr:Ыb7.C@6lθ3 M-{F*m;[nŃP OoY||_4P<>_*Ͽ+`t. s:IHjjTsl~ZTz4\ d3׼_ic HH">u9c6 eX CÕ׾Ԧt%'ZmI =,~!"Ht- #Eι5 $bq .]>l14WƼ>OF/'#.*_t; quʱ xkcI7ԃX@699k{#Z*B&_CIK 9*&@:cg|} 3?rDG\vځ@np:׹ʭW3KCJm_ K;:.( 7YY+_jk]u6`$Fo&t=驺#,[1YY |Ans_C@0ʼn+@g;fb价jsqMMu$]ySO1g"y}4&PozFNU"{Ba)zj;1&m֊! +@Xi.&e n\@$~7q (Y#X=\{[1P>N_+Tպ,ضb Pv}:.4]hm`svK~"? '\RnC}/W/y`?? FQ7YA 9j6d6nQ9(05Q%w71[YTh25Hh-<9K(ʼnEUz *rwSŸGy%[7 (:_d<)m%%pSDOx\0st$ow!_ BwDFCD7H5s.h&}?sOד/|A#AwnN MGPWŏ灩` +nrPg]IsP^Wc]gk"(NNV轈 ,ow4Ov!а]7cm My%!A~ݔTW+0}Ψw||(uJiBz˦Ap6o<';nF;zy,KٿO_][e&I9O<~a?)TDvM7 SjUQ~={b. gM@7:46 W&,P3d$G)}#= .. \n}L (z:`?a.ϫ-ZRq櫓 N^_{ϝ= ViAtWš go4z4 I!u:+qsQjkzZd #_) (GWb&P94 sf'B0 b>J כկBZ^_ũ=@ t; WUc];@ΦN\^dN?eodZ@,5m[yg` q=_kO{}m-:(#K W]ŏs_,0*~=wAa@}=Q$P~ovu@P̖h[j*2Q}|x#KU2 OM@b6$[Egf2NZ_G}ͮh &W-<*Hh106aPfβ a 9$^M^7 d'J֖S@D,"`O1UVmsF;cH32qGt$m-شR#5*dnTߜU@w+wdOFVE>Ɗc__-cE~ g ' NkWWQUQ+}Q.n@qUĮ_@u cݢwAt^Pٺa|7N%WW@׷|PS0Ua+[2=Ij\Go"4d9]@ 6 rkBa!^*jQaɯ@52ތs/̬&gNI{4y0=0op~}Շ@Ԏ 75XyC{O@ª%gb@rYq qr6j]ސ =/߯8 ny|1' [h'aGRQguȭ7yHWI]8S[.0O ]{ C1 eA8M^gLҤ z,'@WQѾ?`ԋU֣'@nA3.Co\&5d#E͐(m[w>x6v~?z䜽fnt?wKO#,.7/ /TDgi'9 z7*3?1$"& inOށxʞӛ+폚&,(<$INPy ~=` r/ؓA/;Wn1BFSorj&Dϛ8 F]OybY,g٠XzPx zk͉S|]Jvq"d?`Άoy1bUW0HXr?_{@Aq"b 0mRvfaqj7@1`N;g ~n'ܝk# ft:Dm$+DÌb~eꫜ;U ~t H5Z!Y)ʠ};/|q%꼏z#P(t6h~&cVW*xD `Psi;U'kꥌ @vP/p}|5'7qF M{N #5=# ָ=󯡿S 0=AexH}W+6+Xћ@2s iJz-\sR? #HU^I,B(sm9<~=ZR+=.@+鸳eno# 0zF<ܮb1): "S|:3*" ݡTĤ| - \@]n ,M7Dy?Za guM)(ǭ)ш;zvŅ"\gV2Ѿi@{OHP+he#̯ПrF]ܢ \!؝W PŖOw.ӆtqַ\/oh~?r5e1Wtc % {G)Za釛AsXh=b,y{(&KTr|kA__9deDR@D'>c.]x4r^-ZWjClPt#1_qWC2Q߅B.wPL?=-4e݄g @&疽CQ0~<WhDބV0U:kEICZ Wh ^ egoJ6{\`Fb$%7nZfA[رkDzmx]?xL |cry9m\x+tK هiߍ*@]1b*0IT>eS;@X•@]eԭ 2u|jo{:505Ήo-3+Aj9t_1s@t*3f5}R`;z9 l>)sTAn헵*4@w٠N` Mz܀~!x&K2`GɊ l0dpKW6K35`e*$Z+gAV!$Y 6\wHzr7b{ɒ'GAՅ @17|buat%Ϲ.H-zw})gPnh*rr2/^0e?jQ0黦=d1öcU*A(>ht3Тp#\x4n?+ Y e%s|nm0џ1)R@Whg~0 BGX'"/:+/Q@*+(׵AAox#k̪;@µ/+w)'nz%΂v)yRLݛt+HΤ'Y\K` U . %Iݔ #Y{熙Z .9YR[_ƚgWč >?;r_q[ b%9ݟm֟FWpKZ+WC:H/t׎h`ª@Kꔯ|O蚘|u[̻]?o,sABwz f##85OL.F4 Ar]0XẀ/K Ydm1*,@H`H ] L2 tS\=PDp.I ^ܰy`<ů}H5C70uT糇ڠpSY*0^ "< 0M#@aݫ;+B+Hoy QYA$]8zȻȪn= u!{>oi]߻ DLiP7icC 0\F!|Y3jH~N=#/sAIIԫ:|@?$0. S1<@svQ-kKfn.ߖgń/&C؟J٠)/#gs7[UJt㙒EG] lGąN:EIq2BAl/#(d'owVֽZ@7Zwp lDſc@ߞ*;tkJ G쎫d%8^ s!G/l1 MgOb{1tY 1 $q '"'m{7X5Vvu ϱR"P4} "oԿbRĜ4hSEWAjni`Xw \.}22,7>zDBϞ]\&5Iw_7bq^.50G+| ¼ OsifxIwO؂D^coj om?X9Ŧ4ށe/==.s-_8ʓ;n LkgN?%s;3x%J#P/Vuy_Zu{B0_ײ`"fg뺃+:S }ɲ6+>rw׿ҠtqU[!7:p0(,P]gYs2ǻ>]|x>fVZ{)>;xF h/ntZ~-]f{y8̀w @Yg=P] b*O"+AnWY ܞ~/ROiiY'Y:Aaz,=Vt DiwmOO ,'پ%ʢޠ;R1)K@ Ծ2@xgϋLa2"ҁ2?@!''`-wx DgD[?Ù #7$J[bk_!BW} 4]4ټV0Q8 ? v<ƆȞRNiQuS+,7[Ix>0wh w؆ {#^*8 2Y]/ Se Uұ"ȗD]Gnp\Yl-׉}_ie! t<.0{8\UDϏލsGvE6pJ($p$6hYM۶ThNկ]}e0~9:'1׽J?{G_WǗlb@5q9s@ Ė$ZŎDኛ->})ϗ}/:-`._c"їuP7ZbځQPf}\Rߎ%56ywhz=%ߥ[ ٷ/o '~ ?*nHi ')UZfXVD|u׏J`NLbǡYgs;T ߶Zhe߫|*S em>`lHލs<_ANj= 4bKWloߧ}ը#5@Y6|tMAjgx;ȇ*˾=6jKOF-A2{WB |:D7w)9ESwq+9 ܟl' -2֛ʑ}Ywѷn=l;[j6z|ђH)>'fwJÐw.}48v49w*Sz௕9|(<8Ԅ|%=#&M kAUr :b2B?76fX ]kvkG@?G} kc ADB+&*YS ^|"Kw<DH}nJnMU^)]gfEAs)oQہNp: ΟJ߂NM}u`j)%U tK!`yJP؛ 7ġ3\0vWykn@HYsm%~aOyXj+븾F:swz_lc C9) I50yNڞz P O-Qs=U v T+sSc0($ֺTM_ zNB]D[))%s-7|͓ا'˴b}}:I \XZ܀ZwҁBz[ms9XPя@,)(5-Q?7;4Ie <; {SgpX8"m ߪ-=j F]N8KҀ}OnSxzwq"cя^ kOg>sgHO/IR >"xTugs: vݴP//>>!|W 0zNiVGzw70٧v1^$fO:P GT 3 M.ixO=}=%8x/l%;݋8;R _02 4Uz ("kyOx x*k?rK" 5v2K~m.XR]pQGou8s6 $?dwذlEx6K^:AH">7! Zv@֑⌣@jwoPI xbYmz@_EHm'_N y6uIF+W5Q |2+ܒ8L#M.Q࿷[Fі߁Ƴ{ڟ־?=?7e\D͝QluΣg|= |'6ͽm]@BUHid?]yNNPqNs@*KoAkohqtJ ,᫆q/Z؀v7̎Lm~T.y|! yLI c=9gzK <J).@hݣԂ}<}o96/!\,}BҲ'M w'߇{'Cܳ ;f0 "{ϫˁ:fC2 09uf VW) xD)|?J~mo#1c<0귝ω+@j;z*2 "&<-]@띊[>TI8Z6 lug4صןw1'Ҫstѡ0uZ!MѲ{ḧʝ6[PS9y ϯnOg˽8WNnu3߇ճU8+GW"e]/9A ]j%^[3 Pm yd'3 W`Q nW4ujV5 ĝ#\:{=I[П.ϸnȐCuѮa}橮"ճ 4iN>Y]MF_swt}kA\H n_`7gw#hy|j;4Usk{G1F;c?{ Mnj>%5 y~! $R0L u>9#H+k4O!a?Zn2S7s>ߥ/:^Q ;{zN6',ǀga 8@Bu|GׅLI%@uu$gb_7i9:SeSBMwHY5 nǽ_UB^ (mA؍W @ r@wKX83q~DjTD }cQxm~; īc%C_3=2mX97+M w[%z~J+cݛ:s(O8));f^? 39@.Ԏ4L1W=6^$N,ϼ;۪Q!WW_!VoI7Kcf%kI⫏n_Һ< B:xUX~qv#wrN]_=bvRP2ȄdoQkN[џC͢Ayx~u ?*$ϭY-γ;r꿁pnǣkoPljg"?%= |_{ȡ'WcB??EˡJ@E^(ɺ9{+Jƽxv%~n\*3k^< @n佻ut5:0r,)s44[$g$`?=.XOgFN%Wur9eߏ oOyANoH+6t@|2׻BK@q){1-M&|OGuSAEsyh:dk%ؼCP9_i vHFw> zb{9M?"c3Y̧f|bN+)ȓC 3{ {uj@0Sqk+䢃yUxۏx4KN"8h3\ǀ7ɖ@Hr +Wgq hAP~!K=G9˹@@<̺ݟ'-bIuȗ"[%g? ]\mZe7AS,mM@'ZOXĽ@XE0hF$D /~򿵘x_}j|۷\0|74 ČUܩ.r@]9[YB 0-l]Qud޾7bk_(?~bϲ%ebmY<uŦr8kKUOy{}R4 }uڣ>u9|`X[_=<Ƈ?22wHoAz20@TYn9մ]$xcdLփ@GE.g[@ X&ث8t9J rUQu'g0 r%g/V`JK/\lTf,ǜ/u $v~:TsT[~ ~eJB}Tm^o)4[z)+vH:0R!s^ZC 5B)WHj?V5;0O稅a[׆sF=*9ZI Fԗ|_#uқG@.?a/~0[xy3)0/ LoSu6z;k4c\K?2w#iYߘ҈ -^쎺pbnI}Ϣ1 6/@\_`Z~ePj :YE ,_)sQ[1WAci|4{V3Hrd>Mn ʸcrf'@.tuƈG@~/}/=ꌀ)ǸFm|yH6fe1ɹ@bvDi< Dwi7a3ԿzQHU&B.&+mj&!V=~G |v%Gfp"=҇e݉ȥ_9ǁ th]9{++n[ :ST@Ysڃ:ncD>HubrixNmtAiݿ y_)U!` VHE#>gnfe9[?րL >1 Xך+E~_znsu\G?@ 뭛D_^#)]=?"Yq<|u¯n-L?9 Y¹;=+ 4淏=gPMF-=&f:j6̀ӖJ.=)~w\ Jv|FuN/.{(}R[9ԔGWX-%./ק[T3=Ͳ'>~}r A|L';Hd!5rzh-U;o':@:`˜nhNjcoV[ &sReז{W 2#/˪r |w,vk= +^Na>5ynVg"Qw4E WreŇPg!D+L=gbkqEś<+1? 3SZ*g<[O= }W} s胦8GJ[mzJøgphܛm+ykяBVJULb]bW)nE]vz=U+X9Gsn 0v'Ą*k.G^9[3cwG]q]x_R5@Rsv_lDVv $JPO7"Z>V sgŭSm>}zj0ߘx49c۪ˇX,fVV};7gQ%SVQGTݛv|o*H3'mBZ\(?#0͢V(OpX5;ϦF3xK)gO} ` 姏Oaߎ#x=%'H)*Bvo A6Fq\ 2^X7<6"OGwK jv5Bÿqþ;N=ȿ- 1@Llk3t9 x[+琷zG<`%ro/oqp" 8+:(W^w;4 xFx¾(]7Yw4TiJ&eKJwm+m\!_z7TG{}=zW@x1J:iiԟ7בwqQ@0}uj&>߶[}quw ]A]"s"m)s }lSQc?MWW{uCw5r@6Q|5~/_;s~7}.;˅9b@4zo /syC;#*dZ/b~ Hqw_@uy<0;|Ɗ63pvdA}i!w<ފȩ$w[btF D-pu |_vG`}Pgrsܶ4I'O1@A_vk..V91s Խ>L _//|k6✙^jEZTxCx4ν 1T[n}n-X\{d19e㮆(9(Kٿ@p%#}d qjc^۬Xky,1Jܳ^ {{/zL/oSm 4d șM\:jBj:$TzW#ouB?rD ɌN_⯎@rz2&i>&gZi֍u ih9y* "-3cKó X\^=uS>r{s_`{gGI JJ%@/*cл4z區Hs|Y#H>T: @LM ny_r3fSR*=z1[ͻ$,n> ߬6S`=?N^X-~R }S@w/"8:[kT*oK@Fɉ'oO&Ȅ˷nGP iMȻ}>GFvO۲iP n̹k˧qФ;&?b9 j'p?H/W1\헋 }>k660n6 MԹ;oچA-Uu$ $YڼFry$ \4S_WACt+Dh{eO<H F|oȷF:Z2֭V*\<@k>uIp~U9W#;@rAW-uXw,H|Ƥ?g$ =DQ "^"WY 2IM_j!@^2yjcKtRL>h?EǒAlG iw/nSWlݭ *;ݤ kxo_]W^ P5VԹ *Hwwb!v-vcbb  ؘ3vww{==}ˍݳ7sf؝n5|b!NbCfzU}z.FvS֨?6=tʿ 恮^9{ [_  ̮C*Ѣ wѱW! Gz;V&k/Hg5z5.Ý']E})?AX]OŰxʬp_ofG v9 At[nեAhyLul{tܸBq/{E8buPްbCC[_yEIO^pΛ. hkע^CT`|aI1Z~mfn屷ߎi_Gg}Nߏvu)>-|7\Q|!asa i2%燚}(B%IG}/)E{o赴ϝmעwm#$\rJ:7'cϱ+k'<^mi2V ;˧͠-}_е=W~ZL~pcۖLYFsw4>=N`o9z;&ůC3wsDǎh=ӻi=|yq@vb-:[9jIC{G݉+VWϭprtRtZڿ>4giJ4:#~ 0(,z:X=LGKsB[ ˺ۓ|A,GAc]#Џֹn˞/D{-؏NgrcϦ3W=G)ei :pr6zȡ#S=FCTˑyӸ?{6c qWqrҞѣ27ܢQ:k5usi\W=w%8|xㆶD^k^]vCGG|['Cwi:qʭرI_&މ)n\P“ O=GB~G< ?X\_kk&t:fvetnyzt߸Aw`"+zc2ڵJRVݮ>nz :Y_8Xc~jo6`SZXͰSӮnvķ_ϕkآ-g&@Fwoz]5E趠,k{߂g5+\](.f\״f`}i\+wG9h]:6l^ U/Bz쫡inGCiLԵ≷fNqr/W=z77'WwYcE5*9nCKvAu!ZaQo\(f?F,AU|ݼhcnѥNP9@uk~Z:ܟ\}zO+Y=G1) w9K6U83!>NqP4_{lyK$?\8A5d5&[|Anfu7mBvd{-ttlz@t7nRmtq/U. lL"Z/}~s:TAZ>~vimFy鐷_tޥa֌l3Wv 3 q BרCiIksmfݰԦx)ty76I3cVf-nl;-6EGU|Awfoʁh:Vd)Z M,?Z !g)z 0g]{kEmr?&ߘxэAAǣkƮZѹaާ43WuѠF#*.ofL|^(ξ/B@_쿼YgEnd}FsBVħtC]Av% kTm78m}Fn2wASW^1FwmsDܿ+[NqNqk @]݇+ootljT =!O^̅ݛk1?G)i1,m_;Mmt =_1Uէa8 VK{n:wqeX0~~S;=[N{w7V/v@wk*{CH!nMv9u~pm2]"5բivʞ-fXށQmfeK~ Uk4D>jF{+-"ZUN~иsvvX;r27.FС֠1^Aȫv{a!U6h|~媋q&B]TiK}敄8/QHA?jWne*Nb=fO[nkFݥ.=^1>ݖ#>q}D? yrc,:ϴ{8ƭZm~ʑIU];Av6tl п23mDzuGOm/߸ϓk/h_ypx+NkU[{/MKwOPWǁh:cn6Ÿ́ VѥV őVh?kƑgTGFwD \Ow3F*OO>E=$lV˳t{rp^a~!8#:et/)9]TT]4O|aJvb]탋=ٶs\ѹԚ1W7wEߛW,aO#ZcXOb!_#lFzw믪ޯE:o%^wsQxtl?OZGC/ֳMqf;ߋCUQЩSWwgb`As'OGazΙ!#}н$??}i"bD݄_E@5xA(}v/(jf%q?}mYѫ0)3=`75lhԯt*-VԜ`Q[ K?W OnE+cps([b'[y`eAtsũXt|}qȆ܅cҎ$aNo==/Oِ'k9AcמA~M&ZttpUvN_*A_ W9#웠[1h_ߧk\!CXznU-ZGt9 K=kyJOj.an튟CG$59Α4'Q>}R$Ӥ'.}|mT'Jtt{ʪ/:ڧ>l_\G|&UݩVhwcLBAkAb>9]|7+mŎGS_s@w w2CQsL^B[jW ʜ tS9+ [_Gzv7R7W5DwGBۉs.݀Ο^&\߀n?.8=:\uUct0)2 M9KM&OBî~RFn%kqh5~YTU7Z͵[Bvc"t~:bwtIq7OH|xӗ~O>Rah1/+U]϶`- Y՛^l¢oYMS>W@㾍S3nճ$v ȟ:A=&DįVT+TݩgFśj[g?Z zilz9t7Z >|Gtk=dvg*?nV}]{n%?D=0dwxU{+[=:x5>;[?lpq!b}נyU]߈70# XS`St/⻿}Wk-ZEw[gp)?o>-hRN=ΰmvI[?2۶?3b: 5ƔhtyvZ:h_fr,]wKx3 Wuq+SzBYG ѩam1N]K1.Xb!tB%yfӼռy9]\wJ]P¯WY/G ]>x7y:z G}6xa:zX !ux^ⱻdZ&~Gg7;\l''4OBrF^Q5~!t{I3 yt7 eއhp>=_YLz`іmI>XO1EH]hK<500Y 3$>iB. nqڕw(oq/>eD#t(:f(| j'xeVJV3]sO47r޼дќaă8__]> >%:h3m>(z{h9~oW-z)v>iCz8]_z}ソr3UAzk;krRtܶ_XOx&/V6 ~`ƹ|,2 0Ȯѿ߬L=ג}ạ97P3u-yt228]>qz9jN9ˣu^ȓ=<섣'm|ihl.1rW:F!x@ť識pT=VQhqܴuOc#JkH8^ό d #g?f?̄?N2G#tzz\Nc.Ix[Q`f@E[_&oc-zOWy LqSknF+t=>k|ovݬRb_ze[qX" _snѿۘƑPG1 ӳiRla_g! A>^^6o"4̞7ƛegss8 Cwoafoc}/GL ^s}S:fqfM`Oa+wuw̞6)G [\gIg K[tws{>IuuSeybϒ&ql:mdϵ.GOn{~/W\>vLvt-{:ؼeG0a`t}n:=!Y|$elf.v~YLd}Nc%S/6-o3ׁ]oO"<6D&g;sofƗʞggGYYlzEtvYe=d;zmcOd`g8>49}/.y]ecdM{6-l>7Ɵ'w*O,Myq2]l~v V^^wK9L&yl~7?ɗ<[2Ifߓ투G}-}?MEyOm캛gS^sv3dF)+ǍL?d^e;N~َy8&lG8d=!'ɭleyMe͞3}/S=Ye;Ƶ N2}g2/y!8غ`O)3be͞o+[׭il26oÃe{/X6ﲽ ˟,Xvy]e(+y]dODZgzGiw}8)vOʮ='fvX 2d-_ٯ)q}OLRxe;M#粿M7L{le}ޗ%Wzʾ^+6|~}e~$,{/ {{2({n.r'_G#OB泲,G,Oy.y[ٞȼ2='<-Sc&_q2{?{J,.2n|t//nb8 r܀=n68<߲? ۲C6_2NB9Ǟ7Nodoe=-2dq\2'q%}_Wo9 \ٮyX㲝Ȓ2ʼ,ǛqygY^Toy9dG+8&{o='˿,Gl~Ȟ>{=Ol Y^e&}/[,_l<=l^eMgc} Y?!0v?e&9sٸ};2'Qlw :r([ɖGcl|2Jƽl8C}bm,;Oe9yl_:I2"!WCvd5{ƿAdaϷ}.5rW?vC̔Ju*Es且r|_:uhڨȕU?|W&aNOnݗ_mqfz r[?4:j#1pUS#C;+zm3ݲ;{EwYG{\ލ|2:Vz{66h;=t|o?/wo"0WtSokt# >۪v Q_&:HK~V8l=r1x(вƋw6mp:}2J޲Z7ncv't?{A_X>AuhrҶPw|44yEO=}lϠaCzz? ,_eve hYw}Und\5]~Ȩ:[e.8SW:۝ނUZ牎%C >؀~W>6vA̫v7kDYҦZ@ǖv;G~_OGR#M7@* #Vմm3i\ku3tpa9辵3Rѱjд0%mdߩ:r&eH !.$rem@Fo I]m"9~`I$evo%|Dsct[Я-(@g*ϵq']jMvzrcmn~\y)C{Gƪ#sJ* :Pn=\m?Wt? ?z5\l坎] &7-Z;6蜆.LJd.or<2Pt1"'԰7ޕx NX.ɯ>5Я2ab{L[]'INA1i01W3Iˑ.w}ƈkVsնX=p>huK!f] rҎh8rev͔hp!Z֜Nm<|(j.C7yD]oTNk+v}V򚆾#O?=\}786B-:Zӹ,:363Ұ3hoţz+j\~qݻ+p8f/4-~xGƒ}ۧW*#@RЧh7QzO&)q%ĸ?/Na!anK#|9r,ͧQD{-IN/=6jYI%93Ч ѩI/Jnb1MwE <|k6%ߊ޾K&VMkFHzev]GM+j|O=?~:E ;}dOtb' z 7%xBDžҸe΁OLA{Z ͼ:u0USTqL-S73CcV՘;tkڴ3OW ɣU%zOܤOu -=ǟM?^lKxdj~SX彤/_=}\3;c.U-E)W7G)s@:-Z'鄖'FC*Rs{{X+Z2_~-/XAU7v_|t {,#s,XEƯ[0=K4x$ f78`w"C+13eG+##ً^ ǘ#?f9:"u XY8t1rZ.6;-N$;u3C(Tg'mKFo}s ѷs7iw/*fhгKmB]jp]f7.}iu3ޭDg 'yC#^lVSw.Z05A-|[]Z&$MZ>gTLh3ò,tp-=hUF3fF)J~i iѡMj'xi/,.wzy_'7 $~>uzi&DX6cN_枖XꇕmLQGh_A Bj6:pwVv^v3:\u4-uN[M׾GG_ofς> DEm1]#,eɚT4% &=.si-sen"NM%v/? "K80dt1h$^)خ|ɛt8[iԑԸn|hwI!8"h?R[Ct(Dh:zv\rޑ3>6#@G_C̡)DZcJ?+p=WoAi)VdO14m.]_:JCC'rkڵAaa8Y~ke$W^v СăsgW$}5%}-!ױ/NꆠO2к.* g]DZ]Bh@~%2DJ^Jlu~l< ]ލ7kOE9VHq9>UGksUmm|:n;ڮ)w]~mM ̸u` tcZ 4cj ̪ 4R'}.m>V=} $_Q!{: j4|MO]팞MŃ'D WK5mpaa MajWGkc@Z L/ߊk ]6c$7d 3G@6*@Ŧ' Nd\)1v6#j#t0R㼧|#]'g.K/Vunev 0:lV;] x*k,!_8s",!9Hq O[ -1 ꝿbf7pV],%d(^BBg]:/ִ8ښ^uA3̿'!Pqk׷9ts7Xs9a0e)řhc^#(5#MU C()L{ r86P^砞<JT_ymJ=GI}}*{7AR\ةZ9ėdA=K+Y-iVA78pMFɲ@+Tytmyh ^Zhi+s.6}/5am-#N3j pdk'Lkta9"#ߎp[ju[w֏+謫tOgz`)N*hǴ(t(v;윶!h{Lah^Vrd¶h"V󪓢K6/B˷v %v'+|[sF+x܅nWXݞDh}},qA7'^I)G:ը#)یfUiWjBgqBR|Nv]DN024h/Pt z׻0n9Ӯx{U1]o6nH#0ϓkeO~GGSravBFixz:?eFj]o/3?iˮQ]/=F@дE=""h+NS P}KcsJ{hCFO=Fpn>#TeԷէ]NE^w9 M]Σ%q8-1΋@ЕJv߭`'`a?I41y9k&Lp|MF3w;t%>~Y5ukZ X[t#om놶A]r_ѹ{TeDK!L.mk'BY4cG;-9MK }L64C>FSEdć&ݍ,6 m{^]&!BoGiî@砻yϏ)!s\&Q͘d,x5J z~#~xqEInK=Mxs>f/clX{s.;-; 1n^Π0y+'oZK,BK4zv+.J̠?8Pj~3ok)a-,OzUec3? M*<9$9CD`/M<:8j;~g& N^IMqR;cK7:1ЃL4>nۃg :vHr+`-=굶?=!4$(6JU|DcܠkEP쳨kAE |}-(cNYxZ il żc }JA9rZ46Z^Z/Q=ͦ ,,^(ͷZ7BrP($l.}7/^5 7Wun]A嶝O7Hj"^q3p׳Xɵ]M9nrȓK95#H$#-ѯO<wh-Vi` ST}~znӘOkA7=֖BCz-Qh 'W[21UE~ Ē|GPs}j@ܿn՗ /5yt2(lz;\_,_2B+7lP]?;'#MnI[i6 ޻  0qv׺9Om*?MYg^Aqx~}x^ebU7y 9KgVeӤ_ǃʧiu k_o ^rCJ+_~2w* (SY#>y}>^~v{:i /|Z {OݫwjnLJ?R_LX# ၰ,Ai3 kf M{m.PFu:4p!Y~_UUP}>ʯƒ=5 zߒA_CLVR n: ͻogu)@1ϙ']_ R| [HvykUG?dAyq]M>jȿH7=нCZs'6-'Vydqp;7D| 陋6[5+)'m$BXx4(]oޯFhquD}IxN N*azc Aqj KEWԦ6eC;vMډ-Mu-u"yi{׾53>צ ?=w"(C~:P3nK~ 9 dn?ЦOB {ߊR(~\XiCw}N\vρ5 ~~' b~ݞ>C x_oGz'{Qs:h'U.Ȗs! sƌ4=B_sxAftE׼[MSŴNOA$K:A-(Þ ʯseA!-&~|r{4-<'fQ/f/zbJoxmxlH$E>~oK[b߹ {=յ)S9nA=U{_t63T[@^&~Iy<-+w >4ro{N>H^p7xCVg/= $HN NOhA֡6;xO|ߡZ[ t_G! o8 y3+.v>\V#`3 gېݸeChjpOES]B<炍A 7|1oN#=~yS#[,鰓e !5x!As=[G]5Qַ1t3~j\ Mޟ'L(=3fٻr_v}tdqU'~{P"^t")mK];>%)ƅ%w)Ђ#3A)aQ/>27nAˢk!|%|\ITѣ@2'v> #2of. }I/x:!(ûڰ3e{ۓ]|z#2ֵ݀㏓o1+|p̒sh]a뎑?0O*un@wI??x]}ɯ}vKq b"V?eJ~͹;3z9VȞt}vwzKںWJ-~=2`#i9:^:?R֡s2孃3G^Nu8K~O'Sܔk97 %3Ix(ݨFeirq~M]a KC7ŪȮY0A F= #\ =ʊ('fmhj?PZl`5܎MHz,lS$p} ϛvjaypZiY>`}d'4[lC}>5l:&pG~NS^#J|W[}ʆ?y?OT^&?' x-hD vKLڹC|_T_OG$|4k{ 惡KIpe$nnd?7t|("ͺV\_<+bܓ2SôV%>bYﲎ!KUҾ37`?uUG<|uo|,'(Hz:52"9m9oDPfHQ;!slMPȟl}OO1 []L|z!C;TKy?=M݋my?xD7AwX3O{įz/ݺ>FHsרWi^O&(θħ,߾A{k=K8ZwaF`!ŧ%: I1𚷻g^*G'r=Emi>IT7 E wL|YwzqٟC׌~J~Гht8E!,(%9赳| 1I/<>GvϱʒF!Cb^/%k.<; Sk}ps]pPM.-ν_?eU׫ֻb] YCV# ZTz"|HX1h f,E~.]޳"(MJCly{eq*‘F'e_et(oD1!ffsP%]$^- ʞ>L"={sŜY8i/D=ٗzz 6|X[&t0Pj˺O(`4$\_qnM&^~Jb͟$UGxGZY 7/m+$yK!p6ݵ8Sx7g:9^sD6Fs^vѠ49v4|[j=7sh5R6^zwAy\_'?Jw焺;v_DYNfS~boˠrI.v4ūV3VsB[s#J;2H߻ޭ_oy?VЛYd7N%Vi J; . %kn6S7&{[3PJ-nմ)cw7s1/o-$4O(u}fpCj{-ķ͗Cp|% &ɠ[e#8^4هީBbyPJϕak.tKp'k<޺Sѝ!|ߢ]R8Ѹ?(ܧmJ8^6qUAU>i1}N7WPܜ:4sX ܦj_p![K7%`wYaMs.Á+0gJfHNY>=wmnsɏk.yKh]_|s[I6/ޭN?^!ͮo=]~~ch}oVQg?F@vDd;uqBg/Mr?8&_4?[8ޓi- _ǺJKmܻ^} sXGI vBUoYj'i⯋,e_mwe)iNdKFx|=e#ꓜ}j2C>Wnkl4@ѽ2ٱW<<.lmi/ O41r1ϖ2nٓu$?è7r%b>=>tqԊtzLm'CDGQ;_%;"\$9,ċeNsOG#lɌr<dkmvp4g9W.b~?VFSŭ%G w}TG> >Ӱb=ץۯl$o0e Q@U/&KY]+8txx4:Էէ\F5pռ:?/x H.6z~\2tE XdAvcҚdou }[ #毁7*L!R }ސ_)D]GX_lζ_B&=3@!w׽-MXIvPFo#ߦݮ@Y#rOJ ]G ,u~^-^})L_pͼ.վaB&/% "AstA%F'ÁfB[cPEP4ib^~a^#B(GZ|գ:a9 >wo=p9vo;"ʠv@c򻠶A#?hH!b9%Z0ͧOy jPT@=rM@A('q/i\b}6֠7p:]ԷIuP'@ykw}a8hynNOEE=c@4_(׈%B/ڵb>({g i @C焲NT[ HP\֎~(񂺊9i|A*lWzDo!Pkĺ-T TPדm"k崆mAy,橐QRvY *fvNEu?zrPyO(PV4_CWfLpLrB9RPslMٕ@ џߋРJv߶OH'} _+Pa7P@U* =OA(P 77TBZxWc3Av=oŴWCPP1-XP(t})פzTE1}sB˲ZL-o6z_W B\^>mX ]=q? vfqOY-~-M c#b*`<FP @5H̛f[j ĴPV[TܔI8@s}h|b PR7MKitHP _(D>c }@gzBPNJ 1vAm@mbsǎ#PT_z:PGiz3@t_ǫ@e4?0AFP]l^[mфSʎwԓ<9Z?g,vnxF3IޗDN\W DwPɚ>SJs}Q >5mvn*)?WaPK!d6Ђj_!ApP%LOPaQ*yJ^F) q~5tv g̟qiFO: @-T~ bpF=[΢=4,~De-[|~|p]CU}1~ZFpBThP8ٜ8asM"h5TM]@,\_3PxsԵc"T,$Լp1W|p1;bYف ^PnN8YC(|PY[ bި4j ֐pPOu %Jf1O+BIed2(sL>j ,@5Eo+B#P i%%evVsURԏB>he鎠l'Q!"B)򻔵 ԆCZUFd:1ABH#\,Aȿ-黐F~ԭpP YׄR7CO/1a쇉PXTBYZ+1bP2!l4BfTPMncja]:hΕ 2#}-Hˑ#>|l=qeX_[pZ䦆ޅG(O oi² teYTGsoBBдh-$vKAc&5nSP= '}~K^Y5Pٰ>o$w9 =䗩6h-'~STۙv$K̥IL+=z|-!wȽěDC*{)SQǛBP_Ӈ@{>u_(`KWOkm]~BwVv";KtIC%n:~O 'Q#f?~,ŅW&,!"F:T#TxX *X%OoI8YcxBWQ_~-,hT/CK,w]u>u'g7ݲ)MU?R#hxG8R*z; gPJlSDӣI$@%͛q{H~/J&FvG,4n>M;NF, &G(gNa-/Ɍpw>A?T?deޞ:o=7Y~>P6E$*uz-!Kz+ϖUpy$ndxN_|ӳxޠakvv"*уcwP\2@ xz^&:|X=b>/v{7P<=XIr%&V*o:M ;u' UnĔo e3 +@Nȿh$@V/y ~wCBB7hGCXECGnF'4 y|hEX $SǻJHj)To&o{OHiG翦Ӽ4ҦgYK\A]]w|ɱīUa7=oxا` @%'1?&⛯+V$w^F?R[ `TjyjHKL.K}[@5BʗPIube3P*_>&# Cz{ZPYF (DgPP8ĺ7bqb P@+qPT-CjڡhPu)᜻X>HAN@kHrw.S@&ug9Ҹ(b? P hb9PUn&#/ă @+sXCIER\V-@`mӳaq]r'! ާ?O|jox {F8аx : XTb{L4TӪFV"zد_G%]GHϊ<05AUS+8͌R|'TGC7.ퟩֱy]uCޅpDSjoiYꞮwd!LҷQi+~w_B@M7 mq~{UfZ[P g)t]lKxřqۅ f#{}%oHy(s$d$4 e,UË _4#פS@)I$"{%>-@#K['zXEr"g2I- Թsz ;,Wﲌz#V,7#<Hq( J^tJӨt5!=kQP I@}^TBuIb P6"P&m$$>0IBz OaxR<4GDܺ>z_:?<sJL.u-JLey-4H>x䤇4ɛ*ϫ`M|_O6n3)"%O ;m';(9#?Q/Ac.{wTG=KO8/-tIb(Ԭ%dVg[; h bM~L~dBe>^w}gJ,#^+ S,b90%=γ\oiTB{PZG11G,&D5M8*!R?Rz]ڗ\hO3FZ5~jA[_pAS@h.b2(WGyX@h7q+pR0=U-6ɝ؆̈I$4(]^]Rޫrdľˠ'zG⽢Ӣ/Cb %ATBet]OgRJ@ClM+/t?ȵw>EDŽӲ$HGחDb=-uE"SԿsPgABSjB"ZdŰ׿8M,YNf P=(5݀SlOyo~6y>Cf /rA%˧a=MDvM-6\xpVlG@#Rov( E8,u;LH_<$T pFbTP\4PoI~-nc{j}uZ=T;B'">&n}KK}0dϝZ3U|O f_3L%򞐰~Ӏr77rtud퀿(F2Ł8 ,?[|v2r]qKV9fjw̆?e&1ib@wP?8Q[n?P/T ^QH^JnG&8SHHxrwSX/T~%o;6>aMfp)'!08&W |kAa7!r'ӅHpBW&_b>6M6p&+[(WᇡYkPZ/GB{÷0np{~?«KoodʸqUZsm_{;pcϛM;fb@4P+p\  1pCFMHr8Dp y耻;n{h>vH uU&χ? -Ekl5(Bps\>oJ'U뢙|{F~%Hy)j?[EŸ]}],~lHArֱlz(iC}$bAvX"}Ӣg<.xU$O7z>k *;$p=pr}E{څ؄jvE);f[#sJv=/Qhкq/^!9|6]gsyiShWջA8ok#pgjV\. X'չʕbB9/:| 4'<:v8+Ex l/)*nJrݽ=yX OwNHxUС_U8{#X\) (>\OWJ8wgøJn'sȆ}o\yVZybPH}U`9P o\yjL[ܸ{ڑ޼pJא}Ǣ4 ~c?HCL^hX"YO bµ u9͢n,ω%IB·ֺxx}-J !92繂j#GڋEGwZy<#(G= ױ=5uT*MԩOziS=1?bY᥃ _XVx:]MIpA9_06pRKHKn8M&yJ*z דCj!ЭPx_է{,~q=/[«i{0ַ[,r!ɯ?%}2'{6qs7A=ms/{o}9BAQ"d;}fu~|Xj yLh. ۉ rQ2~Ubّ֝iܩKƝw Ⱦm3[.?:xAr|䭇{hZǒ۠}f(oy遲"ݷ5I\d_8`O8uMfͲ4 :BpDM=s:A ڞ{v̭́geseȾQ7Wh;~xf}4o;#uCc?9n3pRPv)4*"}lY_(oL%hʓoE&jTL1˓MVα3jCAnp{Jq=~d]md}S̹;v9p;Ўpal߈lX.=gwNW"'[ (7mNZX~stCµ%|O-(Jl)#%$?o( B&'(Gi߫sA7[w=icKWz !.ӫ<[X/&UD #3IXpxxBL3Ex)_A0d#bvY)̷X<V4[?l_t/ڟ\+͍AyhדHΥ&܍hNhwXs^]is:Hx/cL(~64P,pb׉kA_1{Ic=gKmR8P,vxcIzˀ x(FhxMP Tk+‡ .5 u]^('#af֜i.`^P~v("^yU?󼀫|dEvyIf~1<;p#!#2(&?W#;nܩdYD)7T4_NN>S$?M[T~ŦSkwF=9F;bqb/q 8DUU7]M9{. b_}Pn*8GRy֏{R1ٜLN‘⓫F<™o~:p?[wN/-:+ֿjQ=}fo I7l~8O!iG1 )mc@PJxS[ ϴ?h}L}@QA@w8r߅zD܉xքH,Ov x_۴ϫy wď'jGw~;.aCҸ -iGP~{вߦH40spT(#@2nk3wESAѣ4oݟLegɯPm¹-A!Wu׷,#Ik78gfy/L.eS䯉٭[a^NϪ{(~'z$efvzۥ?FNOhiMXE'Pȝ;_řφ{ yb:Jq*mv?>$w] !d|ޞxX}B2Ǥ%:wS@N[ėꈇjV|5Vv_-"l!FV{y{󧒟mjfIϳ;|wƻ~&76W%'d4In_ON"T1R_nھIn=L~M_@)HbőL3{:j c&8u@m,fMSY+&ezǝuǦ<\zFz3WӀQԈ]Yڽ 4ە<+lO&WΊJck$~,"R(w]\wԦǶ` biT}KV?>SR*YQVףdⵓBvr_ǀߴnފ[ O_Ә?6ړތ1 xd|xnW_}$%:9wݗKözPt![ N_2##r~oMBԷC!,P΋NW. ܴDI?FMWH~Œkm< ɋRħnjTv%"6J$5b핿8Z\queUfq_ϕKFO@Qn~ٗl#sI~&!}T Q $Ot[}%,SԽlA/qGe0O]a=(ց28rO&{XY]-zाP B<Վ1~@tv/mFOLH~N d7mhy ,m4(bI|"=b}4X)N8BN ^vF{Xlq1F)+M?_6F`}5_ԋ#1u:M"1Y@rK,!^W25M܋M7){:= P^`;)c>I8= ٍIkWfC~(<Y_7%ɏfyz22*9=+fbPz(E-] }Ak'>=S׿Ch6䓻>c{|fG?jοx?&uǃRK"K%{k*cV+/#ˡxr s7&T܎Ko0$P4kbϨw~-Շ:C<4zkx>q6䟰|I'}ɟ-?2Pl,B_/v ~Wtݴ$Ph 95( (M7r_-ta{#|=לC ,Χ׏ A,1wr&#巐 1u-˖KhGj]_kNIu|'G*%pm ~<)Ro>^u#xNhũ9!v}zmFcgzҜ.9 9hu|Bc\%^91ӚG۱1ɄˌX_zNNj4^r6ԉxK![,oOiJ^پk8Yc"ZP=7 N|c_?ՌE q_ #3\(CvLʧپqzܶ;`AMCO1$Vok {ķM"Lhn ˪I ꝴz݉J~ˣᚿf$}${{BU^wM57'#eP>xn}\ӀJK,Ś8~[8x bШn iW=e{ f}M/8Bt4W礁H8W"8\̓8N  GA/|c ! RⱢf-s*|{}1-в~͛FP<}sX%PY<t,OZgxXgз4Pv\<'<6sALiz8]"'?[8ڢB"A27@+Ce{ҹРM X)H'E:![]Ay jk bbh5k @֊n{бZ8_)Mlm9(p;譥}.B%hm&#Y]Gײ9뛠c붽xZ<#h? FqZuT}RABz-бrQR?2Tg:Ve} $_sAAYd@c'fb[%GеV,3 ˕vTzl]׃HhQ.)NHm/O46^x.hY~v4! tsh>BХDyRy* GceQy[+;M78Gh ,߂>OHs-:1m4G緁NBJ<G'R~P x~ hs% s~A+һ$RgGt{!;%lR>N\ ā|RF G͏h~>\,A1#@wN"y2nZ%Ht7}.~T?XZ7.gEzDsEzdW3Y_7<-?*Gl`9zlAw=ADcxZO7s;іHyzeyP:C)϶J7:T/xj}=A/o\*7k;'R~cRvE(B'԰W Z߯HՄk1]JivYLnm?/|g,]l>xQeRB+O ZVPnVD3A{|2Xig~^˫|w6"}E&K׋7 sA'1.r&ebc♩BT?!3R?2 s3*6eTgYՎ:z[@ ;z&;+X2t,H}. jBp|dвZ!8 To)_a}0AKt&^Cۄ r@p#h |mguR}5\']~_:w:ϦVѱ]b e~J A?Эꜵ>1)4qshi%iGL7R8v]@PXZ4BC+>힠gOZx~#x%b"@% l"5R?)ad3&Yg}"~ ӧSRmPPqOѲ)eT?%Սk㥸6dW$*ʲ+ Ju|j $Oi@0 #)9}p(cˆoxgQ&#{(HEs`_h\KX_$=˓~):ѳ:cA3I>vNM+>`dY̿ΟǛ'mPSaRy-w-KXEⅺؽ#>(nkDȉ'1̣X3 *Ai,/ꄑ=#Gz%N Y'V?[6LP-uoZv.^:E }kM&uicARF-6"9:E4δI?W)hzm]W!mI9D~l1hdA~+VJyerR96ڠmG(pAYįtnR- i"?3ԲPz;)\{KYl2W Z{]rPA+)Slв>2w{~L@~؎_xݣxƌ P:BkH 4,WWU鎥%UE I%ADQAP 9"***w̽_3s|~>͍jͷ>&P` {ɫBٴJ} Diao,?ź1EL\ui c|_{?bưIh}@(p7u_ADkP:&Hg4(He dPӴq<[/D3@ u:&ȿkޖLb̿M F|ZvcP# 8 m_Z2O4.0xbځ%ħf㪀?o(~PpQĆLu< ku Q:QvѯjJ?츬#n굺񭶸ψ_#|fK?‘y¢krH1/bg1A`G{? Ѹ͎ 1_N k<>_a rCp5h ɾO_mʌV5oooJhGx6uwUkѿ@ k,D)ox#{N3>%L|=(C e !d?/WnEc5[ !&^sFw13/qj) !?O^P&CNؠȼ)ʳ̧?G|x# ɂTC?ؼIhy=xr3L1 Y5ڜa`{Ve<=垄иW] d~(U agGu:c4ګOoiXYӍ=-|; y;qyC|P:5mȵ (BңpCtL<[|;- |ܑz}&f?f6}x3ُNz3ڠ3o,k_ށfuZ^$;О*]? Ҿ" jK+u+pW@.+vm_Siї{.0,X|;^s]WϼنYǮ~f':XEWIYuK;UN1o#{Ft潯ה7I3큷3^%(<z_GNo }_x])ᷠzMھU6顊&`~&%z].{2xjiOGn=i?>=f)#oB#^=ɼG-=x}E OwgdwonD=^]?A: 4Cxf}~W7@A9k5oqg^NUۨ f/#HaW]2{ϞB[^Fe?ӻ7D\79 }E#NUg>_a=s>_'Xp]G`8/^56xk3z~$k,_R|Wy5|ܨ^O_=`mX m|c5lM}x\_MZ>1QgRuHtc5ջBQ$ח=x\ߞx}u3_@ GqNC489/s/ٓ"o!mL7>+W]oVܩ"f<[Gx*T?9[y7WNԽEiNC'BdߝҞ*VP6O?q}ǯɼn:jƔ_<Lݍcܫio}rMd K={.ݎ 91o}R<ӿׯq,գ+RޗĻYwh.[>UgJ^ߝ*RvQ?B7]ʯ;qF-,M{{מG<cd:!g,9xg~+yYs^ߤП^ yayZ^IywoA$4y|w#HW kX;M>5#\[su7z!L'D;}n?Ѱ4rQQg)[dg]O|!G{ij)Y/Pl g /7ҝ2_Xk<Ե}.:s ~+o'N74C6y+9|Ο{D~{䦙hp|U3@xlzϛ<ߑ>4'c}ɓ3m|{Ii@?<4桯nHiyy=-7CqI,&>C ))s{>ԇ%}stfcoOi|1o|i0KݦT5,xO\tB9ȼt(IW;Vj+Ib\e/mqN04@PLz?Wkk1{Zܢ [$Wi(7y(-27y,:4i 9(/>ޝzoڡSYq'[~;?8BN_q0M| 6 mZ؟K>/:CǼu4(֭0aG ֵ#%ucXHƷ5Xm&K}G=tf+1fsNx:(d)GЧzgi\I\ZBPou#`:ƑTy'1tAr_oB1><է}ڝpUJO=[vRnZHPWa^I'թk>g7Q*^IYy.Q>4qXBvfפW'':]Y]0.=VTy:#U[PH#g??_r|~o:9I˓ďOo7 臲7t2|N7v; Ng~%yU"z*q)ƯjOz]fz,2Ir9o3ȸ)>f6#cΎϹHdUib~`G\ij<[HԻV7ȠBFI׀}~= "_nRwfy} =5F$F?zNy %<,0{wOԧ-?qK,pʒS~|ԗ,&]RabXE}f liǴzy#e]=/N伇xWOO8v(;x\2BB'ޥأnoCucsоf@ _at+_3PD ktTus̐m!njw4H=Vwz߯ޟqϥDb}KߜK>#o;v.V]'9h_ 谧2x<ᗠ>ϭyoˏhyʹ&o6|wG %v_b_FQJoFQ][~:> F_N:\o HOiǹ]I|ݍ~ɨ_9+TH=O;K7dM-x~n9 mh 1.ej?:q9Uis5[5޶ OUy'V=$ׅ#~(y: 릟ntUPQۿ6t;~HV?U .#IW%@h¥;9rb\' x~.ɭ]?B#auzcpy[AyK JOAp}=F5 1)P[<ދLIW [q^GmjUCɐCmǍe.b#d;pH/_x5񂪤.Y&|qxב٣B)ډ4p?2e1lTmCkQBF񊇦ch A(I\wʋ||<ýZfw8DgVМ>֤: by.{=O&$U=0<ļ{ǮdcUZA@94sx]h7(|'-q?5h7I3yǹ7gh>E>;5a6Ϯ jqr3Ƕ<BSO !侗Љ:EK?_DՐD݉v&*u8@ !~5i6!b[=Qº2߼dvrj`<< 'C B݆t%  4Xk.g-ͿGѺ0nܪMZyJяbhlWB O bns< Q Y#h.*t^wCۛ? ֜Yd-W,AvI[^ r\s-!eTiOڠϹLx052X {au0{fcC 'jԷē2ކ:!(ߢPq\Ai+u'c]98} Az>C!XCWjnm~ S;~՝b7|I0TS4V֠[7o}8I;dh_h\ߐ2/)Յd@<‹ :v'܍eT0t ~궖_KrL_7r!dXkk> :̛zYxiA풬'l7ݹ ^GA='&@Hlqr} a =U(y̓Ez!E|PTtNYKs8,LоN.z B[E?M^bmUSqb_źRcDtW6B y.,L{$QrFAqzACvqL-/DKSFq^Lҭ O4>в d6WX+}R=7XoF h\ {ߋL, \ƾ8稶?-`_p__ntQ4Ň@r߃uYtG>ͣ7M(DJ> +|B TyHB4eݔuO(֣` (qPr^}Wx8ڪNN:OqqP@ݬkJUȗ7Uҟ<)?-@ܩ|4A|p5Ʉ5=|<2mѯ&^/oL8PaׂO,948b ΂;@*j64角#w02כގ4Do:_hj:a*#\]Lxl&ؿ~k?+i%4})v^V4oGL%]5x{ulL>iXu7I4\y/BH?;yX:#[X*zS!.Pc [>3+M(u}Äq!)A،'ue7J 5xLC7S~Hz6fj`QT~^u彠t65 QYc{Tey m0Ŭ=\M7Yջv}fК ԖA1*">dw1v8N ^'q2/I'O6 h"L:t׀*iIh?ؠL~ӃCп>PGwB A5K俄3Y걧*#c_140w>%7Xz7?_<OS\~k r;7v2I_i䇤3_kXt[ʴSsn0n gKB꧒ϼCId4JA~l! 9[S<ѱWCҎ:Y&2?ِ3,aY;IF|T'nƒhC\ez)C)ֽ`s(+FŽrR?=`?͆zv@͸)w;׭Ry_N|^gY@vx__f@݁!=!‡`JC׍?OO2ܟ&ӉV%!x=|oԃwӘ/g!#{acKTV#A>_yƆK|Ҍ~9zg+ga"9MT)Zp53 gJ}||ěr >_u_(c(>2S|6$H~hߴ-XS $Ks;>hFՈKj~b)$>R*T/2q~_wA5۔'᳞T~Snnq ky3]~D?ԯonp'*wWp^:~v:>Rz wtOߙ<I*sOIâUx}AR#ut8=50,? QJ]}+b^,i͎ YE oii`_-[~Hy'!Y+zk@qZTv^yd!&5"{SJȮ? ^Qߒ| l<h"wO4BuOnө̛T*7j♐xF*wwZ!{iE7@^$=%wRzGeVe4 AZJC!^@$դ;E>#67x4ߩ6H X~>:BFﳚHD|c9yI'J)qW?ufSz? xC:hKs+M@2.yPG3d M创֏=uB_pc3Ϻ>4%>~W>|L4r[>-Pշ}Ms:{4&]rF3㡿jE7@ݯ q_qOx?xM>3}*Dq4oNFXf .H9;k9imM} ^rd>y ]yVȉz˚!( /| 6]gwyFuJ }Jy&ɼ]TqSonͺ>Py~"= A$ Uvr!K͍)kTaBF ®TWNI3wb~mUB${)ϬXF~ս߽!U"obpn ^ߖݍ 'HO u;nq{1/q|;IaauRMdA1ON|q%/U?Sk HoIg^菱S>L+O7]=B8DLwߒ42[݉FB>u3ͩc! LCD \oLzr+hgRʼngA󜓙Fi` EZQ~ByTUt&:G.KjH@2_bG]/}zʻM(kƯws%N/Sퟗ)$aAqa J#0Q3ORPḿ$/+OR: yKAKe.GͩTTi9lz| s( >{Կn.W;Mx ɟ棇E mnJͱ]( P<OM#>Os}rկ!?!.Ps `~&e=qQ}M]$nss=@YicGk|(1Ә>yA΀5 x!HiQG(ˑnBSNoUH>@y-vG}֥cN]gɉߗ5x<Fbj͠1H7m~t^$qȹcVZJY-4r;ͣ?qJ4Tk-7> 5=gu#:DݦeVBPwVhvZ ^*2JüJ˓ K~RD'~/Kz~@z% (xC.R{jnҍR|&sƽ&X $Y]궴Moq&S g?|7w6hw8ǺAN%iߙڽa|A|#d@K;LҥdAXH |d P|Sy%Ϲ/Q#BJSAX%sdIM{uz 1ZfHC?qiq}|r%-V NY &_nyطxv/NIPg@^&_Փ슭#kW)|2~TG{uσC'_W=/.e?odusаݎ~8UzLjJ9ehFo=ֽ~t͖wG5];4X;z- Fq3)oʻ^?ro$QK3&?TeJ< 7-h&. _,0o|ʯxURo xNZ~Ns U޻qχ&s<"_u0 s'B:lZr$JU5 HC9 >_A:A7iv9zYR6ň3s"0QLt8^/xeo5Fj3 z8} Һo>_p ^W;i|yo {<ȺK{g [lzr9xgy[ےp]HoB^o#x? }EM&{p<<͎79 vtbD kiޕ>wF? P@_OM1x\/4݁9^ fΩ<֫*]e[,ֱc\,D|R!k}| ؛F׉=|K!m3^Ϗ}nO9 ^p'{ݖW:_'[ 3佫-2 t"4/qStV*D4Z>3'k ./hӿb?}p "9x^CG >M rhE}\Y]7{T7 wυ=UX3 7BDB>Yo0~{xi:  M;x8RȷUsx(2I_kH:|>3z]H(GL*8u">訟Co&'_{{UV+ԙr(`!? uqjW)(๭Hx冝6ߌ3"GP c#=WtחπWuد"]ȥdqxWuxbTOx/~HJ q[@sW/1߳%on_ ^ Sȁymyo*sHz_lgYCo#.кgG9(Qq ];YӯtsI ZCd 􃷗hk"\!((l CdX* /xzLG{L'PւM y=oQ0rnezF=O묳k"%vOOYm2O};I{EC|n?M(`<ܗ{(?]sNS: ~SKs,DokfL=D|h;H^| rB ?ǼmiNS%G;0k93yw s}n|z!+e{dȊa-'V}1ϻyxK(.poNH;?{o9bg@i)u, "u|cBNҌ]i9k*]0ʒeo ͱVX`y/߹+~o|ٛC{M}|jdh=)[|x[-חvt}qQ~}e}H4ϐ?)~vY6-l 4F}SW|os]xΈkI}R>Wz ?(K%>SoZnuD>xs(Cq^/ q~,|qA/饑ԾxZQ3r/Ez^[6:3n./Hs"شvh:ŖŨGCAJגϕ i4;lrr2OC+;*{y",x?|y)m wEtuHw>O^T<36W2O:/73>F>W.EsUt@ǷrdP^E"M_8턹?s=M -[>LnJ`42;T#Yg*4>広 hsN}ܑy{u=@sv/]'<34-}I50?mŠ9 "7^%NlxeѯAd٣҇H? >uDX2y/Ps~ѿGX㼌7=-g|`'w &|{F"nY^_8| ~xlTO/?|s>c onGx@nhvmY:D>Qڍ[jiᕡY埞:<$\8Qht[mP?YCz{?-Ceʮ~ߜj=x|IU΂%yo>9n9|ooB\}U#V?Zv iCGR>ʛ Pp竿Ƹ™}<sC32קΫgAy0߹nA^?'/x^3\ a[?x.|)kӪ\q?ohЯ:w\6}<Wf6CX "R|^uҳ~ӗ@dw7mI}k4Gqߋ׵='(xfI۫n ^&s~KC畨I;{]1q=DfTKxn JFkVwx[4: "Nxgr~=5Or"VBA۩ŷ|~|?U{9,#(y\󦙆s(ߣ~G(?u'3yÇ/Nzk>ǥxGVc|x2u#T wo=W ~%YVCdympjws.>yF~rCAߙi-R^+}܎W#;:ƥϦ R AxmHGGuOKa!rm[IOw? R~;W_TgL[,CtV{Do<(/~yې'^POw ڵC'hZ۷ͻ/sƓ0zTA'NA;p??uア4'x?߂%h=3:7޷;Ho`<˼vfbP|9ȏ*OӜv+@'LNh_{__[|/>g00x̏鱎SyZv5xguy"٭CVrKO:O8,5ü^qW!1;+=@ܝ7?]wիG m)gk' ^70"q\9:w=Ǐ<U~?n݄ s?"=շy}Cd׊/9o;+ ?_;~8X>/G"ҙLCGI-qN;h{_#Ֆ )x~ޓM뾀Z/E]*$/?TYocϑz~?X]GIn8DfV2b9|tsCƽD|5fv >28}-N1vGAA<{Z9q/溒[ܺo~y1t27E>lY\t!ҁq,|r&4GJOdQ_m7 GxП~U2$uO#USQ~wt^ꝬS<\MW4A9yWcT >rD?ǑVUǢadzC̻ieUgE˸n=qՍoA2 י M}ݧwZSZiJ% ϿGx#r`W"ǵw5Fmc7#l/0#4!X[oH|{ܷCNAa\~^iu"7;Qs^,Q[tG Au|gZy֫"k((q륛DA2v_B{{*Z7:1=mѿ'$)Ic ,$y1m vқHT!Ӥg=$$ /}ř4~>b/r@FPXR)ԁflmI!1,zǷr$ }I 9/>8g%>9MuM|H4.^ tA.d~$ϡ5!^82Gy :l<ɛI'z_ψ/:}۾kAa`~8f <_}b<} %%'Ǚ_: %oM$PQwUhJ$IyE3BP]>iy s!E̓~̻YR!GqI9e^"?M!nLWC.PfN7Aѓ) }`]ޢ=J|O5*m_ <ߚ4eyJzj:q](t%XۓXr#%8c8ۏĺR?z/4]b#qC7簂.lBM ނAi3Ɖ?, _^JtAHf?Gu/Zg8L@ I3da;- k聆%ßLҔ@h!Ҙ|MLwqߴ=pQq.~I<, g "~AE%I81[7q+8積JU9ϓ$:IAR\?K榓 n~{&/򹽨p#4ACM7h=ByHYQY8_q + q֛Kp~!}Lg%cqCӄ vtӴT/ qMcsoONA2gޘyuzuc +5z!8Klg:x5yBֻOf( sRϝ`դf? $.C ɑ0˵U';?0B|! oK̓8?i'"7檓Gs;spC, é2nڝKB!'&y=~|-$X=i~<'=ME}vZ}K!^v'Cb*^E4 S/!a&c" Jk!1.'B=N:g/,“A"?J5ET]i @y# Gپp"\4cRe SIӾIy,@ L$Y!}q"K~1!p*r_[xIM8. ZfH`_w,b>f'擎lWgNK<^AOBlA;vWzx!1WY򹱹ԇe"N#H3QD)i( 1#k~>yW# 11EߺAk+ˏk_EH 2~jo omygTs#FH,CsQ7{5:|B%:QI҅PC8"'o'!z?B{&X1`KLЏ?DY4$?>ih&A=eo$LbSPs1MH8iiY"@󀅬Q} --ģ[h99q R?~SzH!AMH}iLC'\c`_YOV$ޮ)C[4'MtٮZeA!H0_bv~9@pp Lr34S! Nh7[}Y)d>֢&Z"KMKc$'%׉ !q{C$#Hye"hD`gFlI8dݬiqqⵈ7~:Y2cl_M4=]#gԇ]ȼ 7q&w{d??n!RK~I &΢47Hߓ(Kg X'0YY ,oJKڇ23{I|j5q7χ%Y8Yq_">È.^IJwk'@J04CIMv&>ďI6~o; |E N8YF&E^(S_|u|F8OJ0U$\d"潉s?ŧ-qgB|i.;F<<OZCEpUD!CX4c%!^bc?+աPwEa&y0֐ߓ^Ya=f~ rģ=H_dzL&Xtb 5}QӞ u7!fhaNh gM#(^b @OZOEVqO%'N`(֘t&.[I!N@ iF&˭oQ+uc:?ay-d.n/\j?-vi.PWiqrwn ɚ/Zuz^0t<g^M3ڭ/Nj僟C"(|yܗy㾩ăPtxn71tfbi_wN pPl( "CHcU!qxfc$ZKy֮q':\ɳZw|#[MqT$8?<PhǾ11eԄ} u[ Y"sLqONܯ݁ʐx6MlC: ovK -1:h'C?N AVzc&'qG6W`8OPVD.C&;-я9?).b3:aZbtH EnKx/~x)DI3{Es,]?} ]6ew,4cɊu+Sƈ4E8佤[{ ( RM)<ݒdi( S }+ID9eLkP̟ZC [b ™oQМLy]ԯ,u(֙iK=u{(V<*^ 7 c(ѫWL"OS_du-ߟ!Mp}JDob?G-|Rz%R<S<8' ʲ^3t)*I>Y|Gǻ߲l7%F|<_q\#Y7%_{֠9|_y5Hwh.9hAmyS<2jX#DAx7ޢd KSf [<dkpߜ)~ީw4fCClVxKxnLmFgd>\U-(_$Ag;ҹ=_㡚΁ x?ߩRS:@)y z~2OoΠ~lfB-2v]ց5/6uN%B[^~?Fs?SҟGqI伨G3dpy#,ߴ(-Sm-S 眃UEy">/iw."?{TDeG "N:9$ͤk<xyyzݱ2V7P~Ρ> r}۲BQ\2E{۴RWP(PhPU(ޕ_ҠZeֳ 2I;h?LrZ(ͯ+ӗM<|GN,Mrr(S+wRσ*N K%9+M5=K3:j 4>l߾efƳ&:Oh\r\+~X]7UݗA]$'Y7"CMiK11M?@1󆲤!eIzșԇ-:\`ti+.* J/IM)T}>b"՚t) : i4S\wuw u{ޖ `,Xq!j%}iA~e^4t =o[O?4\<ԁFyVOȿUP-EA8)И8ֿZ9auyCGɮJ滖\7 J>=Ot!v> +20xUҖWPс yXVD11^z9l7'*GR.`>zs31r#AZ@m6{B|+bT` xL1?WB?8P)|\A:;%7KSָzdjF-߭z.ϻI&` $ȋ+q 1V/k:G1*wV֙:Gׁ:8|wE?~HQע%]&HϾek[ ӼKܿ$^vm߭7"?un5x.^"M&P4cO̻&*#Gϳ-sd>dg)+=vuP2LyNGY=`Bw_G*U+#Af]1t7QX9mqK`=:U=m~*Akqߖ:f:!2<'tWԟVk9q()X*48_*Ι\z?.:H}wn|FR/RD64P AV"msK܈d^=(g%h9ߌ0 :R]e^B)%vZ<P0yx1ȿ.*!]x*7Jj7Azq rK3ީ.WB?Ja'v߸fP? `yG1Az6o+;_Ќ7PIЇ#*ɼJW}rkvT?l V9ׂ>nݚ:9FeT3ź(Q<'p:Λt&yF:8k֕{%̗͟(堫S> I]ev.+ l甑c>NAQ/|]JKqUiq{WTڪstCf `(]|恐6?s%W$54:DrdF<B}ȒyG:yiNi;<,Ҏ+_|MCʡ_|v|ꨉ.w~/3PvkH}',֯zA0EV@i7'õae=Y"#^4O֙ 6Չi~Ir*:a={=uWAу6 ?xC-s06(x LxXZEec 3͗Fr]%YO1]=Ҹ'xU24~rfqb$%֏a=&} *3dNO鉶+ hAyᰇ!rB!?G dE T:c@<ǀp?9c)#b:e2#Q9 I㾡߿xUdÁ_9.pegPl|:v-_9$=u-F3KGd:b]h1ݶeHb}ƭ~UF(7jR>@;+y>K xȞ_)$OLmf26}SvMqK|/O>?! Jz%WKߐ<tGM ЏokŊ*QXŊKV߯A/fT///Y//9//yIco驿e[Vo٩[j5SkHOZ#=Fzj52RkdHZ##FFj52RkdLZ#3Ffj52SkdLZ#+FVjY5RkdJZ#+FVj٩5SkdNZ#;Fvj٩5rRkIZ#'FNj95rRkMZ#7Fnj5rSkMZ#/F^jy5RkKZ#/F]dkqVVVVVVVVVVVKwjntZ[-ݭVKw2jn Z[-íVpe2jnLZ[-ӭVte2jnLZ[-˭VreղjYn,Z[-˭VveղjnlZ[-ۭVverj9nZ[-ǭVqrj9n\Z[-׭Vurjn\Z[-ϭVsjynm!G|N |NGe=}|I4#RkyNϕ_s9o 9o~}z_JoCyݑQ쾱I2|Xϱ|x̲w?DZG1o4_X^?QGz3s߷7o_d^(1f~ާ=W9,:?^o*9*~_s=s:H;uv@/x4(/,n}MþW|&m;;z߳v`ڮc0PsӁ|}ϑxi}{ns?`t~;$~u[iq>Ws3$s8(9c-uFyg%mhh^w<({Qvf5 ^g&?3}G{Cs3C^ w_-N}2/azf7F[\5!^>=$1.~&~X C=X|`:ޟ->I|_~}v*=?י7I>xW?ghp}vW[d~sX;a9_{ά=mxt/=g߻}{~Ž=goף_}ʟ7E9g~.#{ [?}_!okqnsl+}Z ko|=oo]v{v=^g<_|>6?w:?|?FZ{s9_Ovs՟>kFZ\kc^7|?/bϾ/~x1ϩű%|v Z;qq^=ߛĿ7^{#=Oq=Znel\m~6.οomiLx_M<|nmSYalW[L̾O ߷d'q?Gky~8׷yo43a;^9e6ϙoqr ۭ5aƭ֎>ycퟵ6ޛj9XS=ε}߲y|}onvߌyBm#:8yTϟc훍_->xƳXws-86g}o=~=¿?/{}ټ-. ;̞3ܡ֏js̳cxoX{o-#@k9o~o?ߏ?\,_lqoWYq:ld< YF<\3m|e6a6;ߏw͓<\;xi6e}o}qKslm͋|`߬_kΞc~_u=G_gl.Z˟7֮ڸ{[`Zk_sGX߻lC<:sy6ko+X~[wya;GEUѴ'`!֝zQ% O@THלuD%7J4E' {|o= ͛U;Qa}%J[S^f#HEdŢ|hiz[Cj)UE[8!vmcܢr~u{Բ{$O5Z˾ PsUsn3$΢CUyI%HF|ÐC(*?$j0bfmĢ+F'}JѲ_#jD2D:C+j C-iԡ߇=G|ː:UʯM7GƸRVl&jNs*jjnm_[?~~|^RT*aßUuiٰ^Nr?U{zm\~ЬD(NŪ?FTmqNuqti [z7Q)$_T_WsQ$5ke#-gHE+?Կu1젢V%#$8WfpD*gDmD ˯hɟ҃[D'?M8Qh7UjtN[~v=+͝~\цHE}o}wEEb; CX ]R& QEb˴|GASFSdDe%* њ)Ekn Qm!g2ctUvr_QhUgkuѼZvG0)=FB!E$\4ޑXxVq,ƚ|(j1n&mQf8F.kXI!?f] dDWD*zw# ==W4lTDͼ}U^+"LZEwjQ% hgh(Q)HKX46֌C~gU7mwV-jN*2}E]c{[T;;MK:mH4-KwuZsgj2~F+@mu/h8,j8ِw=TSrQC.qQQ3bь?7n%ZF~O n*Yɐ(ZDR.0b#ٍ"j5,J3=v*ou_Q]o9~m ?n(Y\&IѨV%J]М׋JEWQhfGȈzQ}!#54Z(3B̈: ٭<݈ȈUhZQ!i *?gDDK/" r]Q?uN߅L$nEC5-vo+j~Rۄϊ9LX]4oU/Em"}5t-;,j~9X"jL]wGGno[4zQM[;Jm>b\KZuN4ZlHEɳ m_hUlb@Z>`0È=jPہ1 DD7 9]Ր拊+ ٹu5ۨ[r̢Υ,)j]ˉ&=o@/j.=&oKh2.m[&E^䗴bDEosQ{`{8WF,@4+/ԚYҬ5K7_N"t+N'4{P.ZFV`iE.4Q]IC:E8t!clzDkVoѠYo!fYV.}k8umCkMB?ߐO]E?} ؘU \nCQ9Xٲ7/=FTlN~F:I =Noq޿EߊЂ=(GDF?oDDWiO!1Qi$]"1"[;L5n7vk(HW9Q#$-e٦3h_v:kDZxtz:5HX4{K3EO輖gDEdj}HQAM@MTaD1~NƭZ0?("eT=Dc?.5AƉgjH-m!vF$+hV5chum. J~x))׊k濎G?l#FU*hAB[BQ"SJk#|Azd-QZѴd7|R[ѤQ?~zXoAJnb]DGOۑ,{"=#%jѪ#(Z.|.c{U61ZQ?hZ_Q+D>F4Oѿ)QcE '5/wb^/J{RkhTuog(I"ܺ-}RDk4໇DWi5A/N~L)-5ѬJs7ufQAj]乇hkD=;cѸ֮#QbKVh׈ZwcQ Dv7"| F N! k>YqIcDFdZYc?2(S##&j<7ZiD;(RGxX%茑hqfn(hZڈىʷLЪ1~6dN_?5eDScѐ-{xV4zk30ĥ*v^T_ʟ2D_YmIQj֐^ʈR6TE?h;w'>(a~\o YjF#)ʴ9m # sMM#1EԿ:lQTV'=Qܻ:e(֌⫊'=_ֈ:k _󲦢ӯ~A*4kE)Z :uFԬ`EZ>:Qy!> }E QhnQx ځhbIMuXʦokE;HX9%.?Q2y7KD;EQf#%ޤϢF_#'hTKDNFTHQo ;z9FgyZ}8a9Q}k#E/'xm3Q^QaQz#ƒ^4楢Zw#.7s]}:$ ?jNւ^T>6g9ŏݳUnɉ'ּ.zA!-Q=KD:|YԽc̛U7{VFOԞ=uI٧GZm3m<|-&n5xBj$$jX(3śzk->1ߢ||3E2cTT[kuŴd1Q\9ßZ]D7K6W8֢2KL24G#"#2bi ƨa"mzk4cOD膬ySЯFST~ߨjUPqQ ʻWlv.ݨiDDݡ2K{¿GeDEe> H \NjӏuE6ʕ՝Zsf(>-)#k*NݽQ4`VN-QA5z}ܼmEqLQStD D9F,VXe@DrFL48eDY_Fֹ; }KkoJj/m6couZ(~fI#)cv;ih[@an 퀈2ѭ_VԺ L=EhDE]zK+2@#_7[ᩢIܚe5+iOOrUec2ԌzGnD:*]7nFZTmֹ/w oRֲ+&j4qZ#{nO4@kv&Vㆾ/qSڐMv Q[ ݉_G#t3^T{皈 `VjP}#vzhx#sjO_͝~(Eb>_ uZhp^|'eQ>μ#qeyuVEWѠ4ck]ޣUD&NTԒ^] yy*h~ڈ^uv'zU3^MҏiyVQ{:DQSzvEϊXa?5M?Ռ+z>oɏ*sE[rx4_拦mtX IQm7ע>i֞F5Rah wcwD=&>eġD*X,4w7 nol:Jz#pZ&n@!W^y DtQGAx_jB:X^!ќGAܻOB[IH7M"Qwx9P MbIV0%Xp4gfUk;!S L"~ƿM& $mDbѺ&O{jsuOne< m× M@ɓA+C`,Dut/NwܽǾ@+޺!ؾϧ ]ϫ]?,uMGA{u^B.O) T10>%:!p&<.A`sAE#DW=akN'׃UEkL bô*X./ţ!@Ao6-lbO!VeIQz/~ _R!z-F"qk!݋@>H(2ݵU;v*xK7/v~7Du\YX7,f6 .1-ŏmab:ߎE_2 1=Їk]cD1@TkJ-kNG6*^e(~6=#A!hY6Ap1;L=lSs+o _5t}Z'vMShu D뙾ڠH98ßE핅7S?' XA5x#˱} 4 qq'\n[A}>LeMY2Z~qO ,2cqߜCKΆ8AxV6u׀VZx$ը3#OCԔ#m(^vQv3SA|y8fEQKh6ہxB}!}6[˷yv#{z@lN^Ʊk!:|L@tC6> ~Dy6D3}@S|'A_fuDۿ +1߃ q b]jd>ysf|1?^{e   L OA9SWlĩM~gI_ceޝO}H7/ܸ-C!hgpkpzE_V|x- );wZ1}ah4^Z{{2ɃXFrv 7.4@0^2Z3] B3x]P]$ l1/Z7ֆۦn"_ d]1xKs ~*_O[Xo823AfN8+[Z5y1^-w 'Wj[Q\Qv\b|`fAj_XSņcMF# 6a }Eo)'n5 J^{Vo+Mo`}k=((>'L\ARC2̓BPgџ6}xNaRףZ~&kv +席!Zװ#CIxg-贃 G|DY9dЫVyכކϳL҆`qeV3uH4Pg}0GL{_[~nC' bT7@E+pպFAŤclCEKXꞹŦДB18Pk kT0kd>9Wt]/b;V;G g_l:]X #S!4?Gz䚵AY^[6xmq񼼽^'\!V 82$)Ѳ& Wj׭a9&85BA^g`=!}vx>tqnF{OĜruwWALw C?n825M+Zz2r0py:n~s @6gÖ+hYu20kM ,R}E_5}s1$6]\YA@4FHFcQ~>  sM7ܷ͵"Oi@@}xѝm֦o[-=qDu'jɫCy 00}._=AbY޴I4>mU6CYA AZ3Am _AqoxcG|0xk$?hESO(­zIx!ޜ n6<fAE;3Qc[݉ G0JxH" 4ƃYşj2cjgmA?Ӵ+"ir;(=7u+^t>6#A{ ?Qk1N ]> ;*F(?y~`bῈAteNG5*D?1XG6![w{C!i t3to~ݶ}3A:a釂[7E4_Y뇙ZZkpz!l"@.v&nn*{ zQ&< :mm ( ~Zǩ5nЙF<@bt2 `|6>M50 Eu5?]tX gntnxU; _5 m1cA~C\7I;' !d\v3ޤhK~禕fs]M?D7~=vwRnx.p5D釙?&߉C﹀q4 yl~}e |!>?6 |O&O :<^PWLs_iM NScy0].{1_L0Ą!/aI COy? q2t5>?'r-4e!6$Bt{6hSGK! B/>7K@3qM<+]N{uBNu Q4|W ևgds6& !6.hJv#obўpĎ4[NX AcB 1 ⿙h)V~ ?҅m|SOe]٧3U}.Խky|O_WA.Dy>n Ǹh^-^-6}9Bj!n&u' /ƯU7ta\obV\荖>q|Z1:e%zzXhV{# ? x)#o:9ϼrW+1~^2umRy=)"$) tM@I}>IӇ~U<7M=TSO4 tp Ӎ?̛!4zC-X6@{nю?G崶\[iliS;o b:t=iJM}?-L:+ъ~ Ӏ UlSK<@yNy  um9+>[_FG .vg?(뇘 XWFz2r'69G<Sׇy1>6r_l \cq'Qz~ 6z':69lzz/;9=63}+\bTPͼ(Ov~2lθwC_ʿ6@wf> 791¡ w+HP8oR|W@[眩}c!Z/y>?fӤ^>+h%<`Q?jU !yB㆕'B0(NG\2,L_?0nWBFBD#l/3jB~ɇ_BW^|ߗk$c2JÅ{l_0U((MU;k r3u tz3Poxwuߡ`Q֗  yH=}^l#Pп՗]i9(ꋬu[A~k{?R OːzG?/CAo}:p*?ٚl ̷Hn~ 8:~>VA5Ke߁|[\Hf&J 3Rc;B7K$ߠ/haҡ`駇 ot8>vkuȿb~.@_*gnH'] W)3s(= ^$o?/Ϭ߮=]Yg0.|hx8TXm}_}H1 L5_9s1DN=BšPЫYb!_W^]['րSW\("x߹PpA?.~xSשB~GBĤGBAG{f_;+޻w>4'G>{33?4 /vN/ h~,~K }}˯? w ᫐ .lkȱ'v< {++^9`BpCE=Rž; ||=gu?'^N_8kT(XR_䂷 rߜX8w; |[UP0A?imV,5?oo:^ 6k h odCAZ,K ߤ[:Cuan^Ifffff;1ffƘ)1333k@nuWiޕfN9'ٯbA]h_A&ca<_K~ _R ߏZ+Uw^/o߀V0NJV&50j]vߢ?8cU_OVHs4bFoo)pӜkyywbiTM}yq_&LkiƁqcr7ȶ>7}k=/)譟-Ȼ?oԾz fz)0]'}eT)o1,{'Kcg7ǝ<>[.O6yc]~sځ{ˬ ܮi2͏Yxz3K]5=nٶ*4m2?mB|rOܧuәx^]2hoiΣ.ޯ{%'ѭ̼@mj{ hs]Q>#ǧVCiPP-栭}pv+ ,PN=vyw(/es؅[ރu(d?eF=ZZs}UׁiF{-^#A- r_~x>_f۝0_K[vF0/]7$cD:n^5' $U X8?hiIUBZ9 Cͮ}FR$<~\$+==MT3Z猉+AoMcAP#\\dcwp TW5y!U[(?Poy`i ޽:I|Dsgc\_:VDϝxf*3̙F+Ҳ'1=P3>+1Oc^yTfOff GòGv-uߡy}jn^1{Y|?iU ߋ8yV$؅uS}Uv[\=2G:k n}*wi=1>>^&@[T1X杁){{J*tRp]X>'vż惇y0~]~3G'Oʓ0Μrsyӫdg G|y9/Dv qA."ޠf\#)*<ۊy.A{@ /9ߕOe\5Cm}!fZޞrh@qY*U[5f,W=xp?9=Ŝ)>z/\X qRYC?`\ ̿V2`fХCw0 K])|z Pǣ̓/S}h}qh&?:vnq %1U'n8MV܏h^m;+ ) S޴RW*eJzIǿv8Hyn.k.!7mAM׿ϋp #l],[b_x_ju Dm@?]XhUQՇFq{;-yx0ߴA>drpokPˀΠNJcP}XJesүnA2aVn{ @3ӽxOӈRqS|ŭ,Un:+B5ЫO(t_$+qW^ *t+|+WKГoOڸsPs#Jg]@Z!g u]db2{U]鲯.b@Ena+X {b[yb_>Ξa_kvޟbLۑ+G C^at]]jӍڻ0U}ͷ`>sPuІPR?!OnG3=vSNbT|]=- t kdgM܏0?8ϱn,zu{ԯEܾ޽ҋkv>Z{}G!c&;x?ϑ3 amoi{׭χ;`m>l\`ng3.S!I-ß8ܫL#Ǩժ {ʖ$M#p>W3 ^ sfǟ/o,OάקrqF\~Yi =\'Z.hعa ZoTS9PlnDGjW1/4 Ub=O8/OUn0V|q$т3cjئX@UCqtMW.}fǼTzvm ߁&an &Gx_Ӑי-(/EZa֡AoOW_:g36JH7^tyڈ ZR6aCE{@]5_] ]E$B2{ 0ϾNٲ)f}wԦo6ϴ)G8&ܸ<5khжv*Vn.B<n⣂F_YXN+*PeWZ׿J_I1ԃ}G;DX<ܿA[ͤ1 Y#GxgG%FMS 8cRIfb>9EupvUwm.-Du˼%TOo[4~^%_bG^mb=~깆I[M)N:nӆ9N?T WE۸/77A[yϳc=$Es; :RD؛Jps~8}nKq#Mypb+{' m<9~"ŷ .A[12PsŸ=fx)BJW괰\#帵, ź}p>{SA1 ٝoXLȷIZpz?l,&_#7F$3r{!7XT﹗c޶R I@,{n޸5ֶ9FGE􎰮 w%]ټCO9 |sM]{b8Pkm s4K[^*x5x>?^k{E81.eh?{*x>ғ:n1 *2cUgAp~7/QKW+կ+W{֣K  apwݿU됤|Z\'-*ƃ _lux9he^6`)/WG|j7>gĸKwO_<[[Rg+?ua!G UIolߧ™Z1s4ռhy}l([1\?ySA-p׈:f pw[tegf{Yo Ƒ ]ܯh /b H|E) WcOWճ_,Pe8l eƄu3q<.qx :o7SrEOw;=o_=뱮]t7Oze U; "j)RdZerPtXKP<(sǚDOR ; ~إFԌmB\ h̏_mk[@e1jiFf8?NJ q}٣@Kڱ# 4!y' !d?01^*Y.oC@ v}y?90oVPf#O2L=B꜀Olz  Qػ`?I}JX;r=ootm%ͿR~#T7<ԸP/=^k\ #]}">[tT4ؼ)hu!}f?iwx傠^ 0dmA{GRgBBaH:Y1)ipҜ/7Rd%߽č!Olǻ=K&A@atJIc7_bdOA@1>f_lB4y/25VRIh< ﹆D{r #k?B7 '] C񏂻i8{?I/%8cp_Ϥ'}r 5$m6~xg^Z5;ϹJ,O`+Ļ V>-@0Uhq $"e!$!}e)b;+nPރ@e7$ꃟKt}OJzq ho0'GVߐ}_ RBWp7ͨ$H\u1-Bg~it-3x^/R 8Vր_eT>J7~)ůC<{]KLV)-/(u!Мe'JqwtP0d7thq̏)GsWb':8Ab`4@xmu5'z|,ߣXcuBy&G|. |7<ߢ(._$u M҇оWЉ@_ X# ǺC\g¼F}I0.v7y7Ё^Eow^)o&.t}1??\ Qb[D:-oXJ!n#U?ǣbW2d=wkViiƿRO ̒i}p4qW6*x!0/:'!I! iP$rJQ^?r1?K]Z2_,zԇCCB2?#Q m݄/!)>D{|@7|M#H!s?ߗy!OO~bqJ' Z'˟>%WJA=t=|)BɅLC?#]PN!к7 Ƴ?-0/[A,P~2O" q 0pfp?}dB_0:l+ hKML!_L BH~K<*C,29XZ+k .#'>)~/V Gl}@GI WQyoM!׊dGP7!j|L|~R[By Bk}y5_m_,_#n("Xo$h9ߑ _ u B tUA @>&u|\¶g ѿ|;!ȼ`' wJ+%yosK@$}?ݿOow`0)bARhh>`/¹~SGzIq7nҬ3!oKҔ`>=D3O_KM/LYHnɺJB~f `<|'E:\| \E*q?O̵snq`4h!J0|/v_^|A痨NzKAfl`"!?>m% ٓrH 9TVB E=܄HBI6[C`x 67D3W3YvB@fk GZ!A ɶG~$?/1%0Vc>x$p`AjR>8 8y|WN9N!.O**".{J۠q3$XmOñ*)uUBtj髂u\4u 2_-CH ?m_!OyԣhY#O|@4l ipy0tԉX/c6Qe  1 (Ԡ^A4)]W y7ۃH'\MƻY2B@'!8q:랥HPJ+sC ]';`hA[ b򒿛HAyx)e!WbIPcm1%U9t:=>%xOlց# }~S.1(GǡC^``px,Hav?n4; 9A\SAF\:' >w S5 zuIr(b (.z#.z/-lFteW\8RA#Vu_*!:j<Ȁ~IWϗs((#gc_BJyܿN`BȈAl!@=b_ ouC$c>LBݸz.HA+p%zu|{1  |_|ϥ0oUrG+ du!gK ߄ la3rF{!ϙW. dӷi,-D⺿ c}$eo0+Bb$0_J_$K1k eEglY:!>8ۄu&̗ ZyBBO "\>~&` geyϣ63Gl:v&f穏%ucS(" n.}!ZԹ?N Ր x WEa}rJX>qEHT{M~;KL0Dn1m?'hcIyt(eoCj=zA^0Υ_UG񽓮OV<&_'|~Es}|;U 0oAM 6eӏ}kT*!'>ˆ} ?' $Ž6XOa"R<7B0ޑJ` G^uARv"qį <&ZB!v,?({I~K  )K^S ( # h4O?%je7? I4qFЍ}V!Ŕ,h%swj;1Rz1NbQK>ѿz:PTl O/cݙ qF,+cx|D6w@K6|,_h@0w  ߂A[۾twI=$}\\IK4VI90Xƥ}Lvo{ר3Rpc6|gn9'9oV 5`/ Pc};"y ܩ ZyY qΕ%irg/+C>c'i%k:wecv&W?k {__ڴ7;m>l'U.VmO>ЗEU|^䭸7iTd݃(1kU6mL|͞8Es}m.Gk{ig4M۞Vy[\Z=he%ݻ'Eַ*B+Mos֧ʠq\?ݧ#90nTs<7JOp\sKpZϸf0޽^z7:Tߟy'0qߗX+ZZ<],YQE5桺Bk>S}臈u?nu/k/|-CI~a>xX_:PcjmжҾOp5_}_N)ǫI. ɦw{ZǸDq#<]:W$~'dXo$3GȀG ǸxZ=H]Cz5ϽsCvq`*AM@mL+~moj 'KڎG_A<8=) uw=[ ֓::n*+?R;k >۵MU-y4h~ϻfKÔ98in%Fq9/KA2>{bVD([66η#fސ>E[jy0x!}_AFzpzEI X"~Doק{9YTCˠze|O򺤯'}>P bm!3˻qA.X =ugMsoWl+0Ey;r]Xw=0ߘ7i@˽u, 1\RöF`YWV6;ŎAxnd6"vRy/zC:/0KY;ضt3^xi0gL|"XoT{tesď7ZN+4݇Ov{ZbJ>t)YB&6X{PYs{׎qC D~%w_ºW_MmzsYX )[g^&&}3Au@mDX/.e3ӌS+j!1`Zq?6!}d_h6>rGtx{m-Y<ү^;%]/әX//٨Ow?ҙD'6^|4r\z3t8G )&]]Hk1xKρ8U:A]_Gc˺XY,Fyar 5E9FymM~'GLKu05kz=eI14I %. @`n<>\=?z{D<~}S<覄/|Hfݩ:*mtk=RKv:B`6#{_*"lC[m{EbJ^{- ~,<%cC6v]>8N}8_Vى#s=zզj!^ҩ㱀ؔQjI xZ̚o-|1B"TE̚^]rybcqO4Yc=|ܟEwN[-E<σ+?z6=)7֒Oa7Tg*2-4">]J߳څvPm<#:Ex7'd^u'ZFo߱~6KWzj+\Tv8Xw1b_#lC{BsjmԹ-ȃ-sN`X&SRҾ,*8^Et#{50Sp|sW:;%fw zm 8o#@;`'9s]]=OAw3ܠrƯ}S0~6i?sߺ.s?@}<}DAc8Q#xr;5.dw_qE|t1,6 .u"@y~z`/O54_|S|0_]"P딈uۓLێ+Lo5ضӈvftEFpOY5QCA5+֥A#ֿ[E uL/Eú4)/UmP/exYkLZɄy"u3Z+4GM˝Ah/X4. n 1$xi*۴xG*V1~7hBnr7+Q_N[ Ե7nb<>8tҡ 9NuiC?b\}2,qG6s-o~Nhlr^(BV:o2 .~O!TT摅ߟtݢ+^U{;aݘY9}f&Y1ݝcq`ވ@[ dZ\K x+v_=cSX\t#^WJ_Iz>gm4wԦ}6>Gi{DϵP4~#Ny~QZk1W`V0_KY>Ն}.⨗{kw-S";k//9+n78?~ӶZ ~{M=nLWC@t>#ͺ|h;SrҖ)N[yZR.8$SAm|RӤ>ϖX&F`\"@»U#{tU|Ó^:2hhͼjR8XiALzdg؟}q|eg S7@c -օ8๮7mBΈGKF} ֪cdn67$ ꮭ=85:4_s۷0#&\ν|\5 ZUFuȻ'?% Q?(nRXoC{ZOcȾq/}ꌓxNFrB9`^fE$sƯXũݷF^, ZTBuUv!U$ݨsf}vyu-O_sGYIk fq=^u&3YW]S>kz,0e]U F?{{5pwHøtʾ{/(<'SSa5ߎwՎޕ~o.>!%i_uX ?WS+McIn퇒N7BNxP8ղ%MvҢls˓X/HŸk`HM$Y75;u?PĻ͜ݒa)8aiAGgJqsɳ!n['Akͷ5a_oI`J޷# o4xN1Y*v i}:݉{V_(tQ*e ~1_Yʽ5[קmbu.ֈ?_:ޗG9f,g"qS ׆|1duDCj46.9틂VFVɵ{>ҏPbt@ť:_u5ۘ] ^k;A"Kb]5WҶҌl fF0i|מ|ψ_;F~c,w~m_'kWZVtHП\оAx̞NK%~$.}7/< \Ubw҇3iۨ#`j>I:QY-F У*<1X?*gr K`snJmm O! ׉:->|>iK3l5¦!pM-VW|Wg>C0$y~;ю+ @=dm㿗|mó`%?M"-d@ӬحOx;ۀۂvadgU|a``IǾ~SPJ;?`g=^ :u`3|;5LzAJcJj>VO4C=U' qY_`x혼C0|b?^?>Qڀ-텳\_ akեeޥ/.y`T3w`>=rRN@/}E$l)5,9|WNy{r s;!L7U~j(u>@}g[Эv͇Sc4#> o! x?i  b5 u g}\aY/yN4ZJtZ<xN/>1n9'0:x_'h4-@H؎/B@딄yV/,4~~iR ̆ħU%CX+W^ur*o XD%!K l1 L-!އE$tߴ5sg9E5 0(ރ/كf8n]Tw`W+G]}Xb;|?.o;=_է^Ɲsn|D@~u['>b-K" o,DlPҀ5z潅}ѴHtD0f`ؿ|W7ݵy/X̣%>`4%؂j/0 (Xo} uS7|Lr cb]n=G{vG yI/i bgXЯ 6[KI{N2{)䧼h >`@*ƽoA/`>tB*d'cXNu,'SϱǒL gQ!d㝄C(DKZ_<[`W[{;m6S|K0S=wV3cX\mZ _{n@6gE<$M/?krIɇ;Џ~nL)Ak~u?-X%62_&Yfx '?y2~ݶNٟ|'`Oܺ  +qg^&|o JlA֫|vι>`3>M-PG{lUJ13 әTGQߚs>@l`oN, 0PVXKOس@q_ev?}3#=Np-a|>&Xo۵R,U`J`cU0jϘ[a!|VI? Rhe *B`~_sEB?8\%pađgl l8cw@f''&1|e'Ӆ*"X1 ü`a A 3q.];@pY\AP@lX(`s@| G _3 kCyps*xoX2G uc-$ὦ((U>'KFxt ޓ|@>N^_6 .;yl? [k9 FOf/;ȗ_Ep`WZѦ! <#/>'`'F9I_'UOƟtlwnoג ii>` | %snW]%zCֹ57<ߙtfE~r~AD u}P4Xmcf^1pX܎=Rչ-/h$q+_%&7B~h(6,֥чnT(o<'ls?:#j߇+ju'P һ p= <% X&c~FtLB` uRyί:>֡wd_ B,،^`0Xe؃7+!iӾ6̣a] %,;XR( 0;vD\7oZ3bτ<X&T[c>&7 o.ۈ1bPV e#n<lޣ!; Xl'=X}~xH7CXOA_% -0K`@8Ԛ(|s9y;KE}W?4F4l=&۝I,c"OO:y6VH!y׃xYv'M6|NsKDѴ?K\ƃu |'Fi=p'!,G.Q(`D2I d@e^c}{8AkVKΦ!|7v2Pe s|8='\P>#h ߵ-`z~K؃޵~n [> ',/JLȾ6fh-םH+Vd2soVbp v"!p||g|g388bl 7Y5s@l"fc/nX/FO3Pk)3#} Ea}lKك`~oC· WB %lOߧ_о{zx&P\Q8H>'|ug8TH9]1>6}/ׅWUZ؛HGzC\.xDvX'1\Y4$%Hw1*>l'ǸVpcS7!S g%ʊ3KϼQA ؜qvhbG;되?[Q;APSghX~ߓbc>@sIFg:o5^:}ƴcy͖ԧ/6Q?a~Ҋ)Fϊ69Nwq:ysK3 ].Z~ *ZZ)U[L'c?\6ZvR =.X;h$2t,nC}r?x}{O1>(3b=ZFT0X'߹E 3om #|FWsi v?ؕ#59!P*hb}*axD۷$&0è^gXgBvjD7_]1j]yH7Fܗw!>wASCMvz_KDWc}͟\`_ҋW"ޭ|XN'WbGY lAs GBglWGofǂhSHދF:<ݴ|H969:wZ\xAy{aZӮѸ6٬j[jwS/^\ 'avwn;+Ke<is`Ik nEL&obvLq;L!#)6%팤k$%uZFB<}ulOlO5Ǧ{4 oYWd] s'}Qޮq`Mq3Yu Oʭ_E!,W, I9N` 6 h`s,rZXo?6Ɇ+X;i^g]R G+/"\0lYX~;?? ë]\}W6ȒLfm?($dϬ5e@ WV/ b?֟I.'g^Kc_x XOHw~NzrE!2+(7?wΪJ@֧$$]py9ݝ=_2 |&X(>6c1Z!0U(JUmp|.yLt, ^8!|uH'PV(ߛy.X@tbV>o?e>%̚gBq@(>9O^Ö[3L(Jdv8~<.J$O+Vo48Ÿ'1X|Vd_Dqn!UYC(K2`+·2( ߺWq-8Mqص|/){EVq,$bN%jTuaUGo*'}DW鿥hsD%'I0UC_Gt}#)~[qe{KʃR"'˽3%2ԝQ"uAȻ>Q\^SmQ"G%j)mWҋבW\4T"zD~e}a<|>SR\y$P"S9*-c9}+g)TR4RH|I,9듙Hq\ K,t1LV,P"*>'(za!D=I\/Ɗ=,&Պ+ ;K~S\`Ź^m L*/ѥ)]c+.){Xo8Aq3Q‾WtwaOȮR@q~VcĿL?)R"ڣ #.IJT>vVeCCĨ3!5 W޴ϠD4DNz)ug7HJByW*#R_:/k*wq.oK+Sn`,"`>{řW+(.()Yq>⻫ݚ;cUV٤cG >sUэ ǝRH+**QDrxKK2qQ+D%0>ʹs/G.9gl\+ZRLqkA:(RGq%'ʽ:+$%%rE/9P"Ίvrk%h_ .H}x%b/8 kQJ c>Rj"4Jd63Ĺ!S\8KɹI`l)80KRīJZXqs*űTȁ&S ߇,Z,"(Q%UV (RN1Q(yOqH{>JT\ϮRW_q,}vUOG#Xc)1c(Vbad*%RʲJ*.iojS\yDső~*NNR&RKjQ<*űP`<Ć,!?I܌P )Ā@& D ch|W"~>'T׬^ͻvf%Jh1VʾZ"sDŵ~z,֝z*rЇqH,QIZ%Yw [G XHq5 LH7GSN)(RhGqJ*Q-LʡJDK+Ϊ"mU?Rq5sv%R~3JW*N )Zs<9@~RWq5I SVS8|ܱJd;G)4 (Dy,nD5>v%*(GˆTSa&dL +T*}*Q#UIT>W_;!BJd*LA|!Nb$<.0/89K}6%P(QeD\|J۫ĉ3zN6;4/8bgC&T#BGw#8rOul-jH}$%2ԱR"RwR^*}7T*_r8K}B5sRQXE~]Eq tSe⠽15[8W+Q^Tu'T"ANRY@+oV"BDq Zة+JTA3DPLqtK0!a`%1.k%i үϭWV"#ID9bWqV %N8J *r8m+ROqў5BP;w*Ξr.8$qW V>g^ _D<.~W^z8dqTw%r\dWx⌏(8/gON⤽į8wq$L).uRl"IID8$=% 2lE7.uf} 1Nq<J’gApQq[̗=̟oBqczkg*crIqy|'u5%<¤1'x;[?h D/ՊP8dۡ{OHSGD3en8D9?N()Qr qBJ}0 f&uΔ |g19R:D;V% 3g{%kW"K[<үXqJ,Ѯ%N*9T\"Y^b>Jfr[+ Q) 󋕞}P }ERJqq(e*5/J_[M. g%2eWS>Fl a1TOq.az54W"2n'땈rB6s+ [^>ŕA+N}P-yhJk#qD u{8_SqΔkeJq^) 鷀ϕd5KGΔtJRBq;Jnau)͓W|9P"hNqssnsF}PE<( WR\qΎYqL\*!Iސ+b$*qתnz%e!뾈I딿ERq ̢w;h#$X;yQ>D-<=5zGvAT%}1%=S\wN!jJ! 1ޏX}rS!lJdT՛8=\1q[{(N PS8h^xsU A2)r8h_Lq *uJd곻H]q͔o^&G?/Go9߁`LߗmNs'ho[q qR[q*=WZ|u Ӣ˽0%KE<2%2-ȷVL/=+Q5w$wGOY`*NQ"HaǫbYqa.vAqorS$[*Β]9"=<5^~1Zcqψ|MoE~tr.ej83%ˑJ6 Sq&3ɓTuD{*5H&Bq=~H*.ioͽ $Aq5Š _DZ!xȼb I/X69]=E~ bȸ7bXO =@2ֻ4q69K=($%ɱ>>CP#Ľ 用%RWq,JR8RQYT*?[q)y/䂱>9W䷉KUSrݒ|jF6Dϕaޖ^SڈD;*NEB;Q ?Iژ|vxH|fř3{?7:_jCVB>5>rMqVÊsAW"bЬm"%1H;[Y/r|3\46bɱ#;V"g7َth2I^$q" #X}+0 ]h 5:#7XVZV_kmif*uvMJ/VX)(Gвҁ%4E'_߾[y /9ćHVnXiO 6[kKZP_}t|,'{֟R0X`XRgXoUs}.f,ď71l:O H`=# D~s脟76EmӁ9I?>Eu.Xl-o/Zn"Ex,1} !oK'x9,ݴ5`/Mjްٜ|b,Awy-1p/rm d s`E}V0yb?:[fk{F`u)fݑY )}=kX=d^;\) Gux\{>C?a.ɾ0W^z`DzP{1IKզtD &Nb<M;i > yvNګXw{_ g~O}[ bc;\ S%A~GvQ4yX3Ř6!#4'w`2<.)kc+2`mg!dK^iZ4)N|4ʺ Yr!?X̫0H &؉J6`ߔ5J|T+9/~kv4"c\$?]}Ԭ9I|Y|Mz6拏ˏjr!aM?mgA0Oʾ{FՁl]_y;+{Q,W`% @`Uo-CUGG| +Z{?8eEs\{)1<]< KbX}A6Eٖraw%@}7r?l_`K bi^!|'#BV{>KbXxY'WE.'_kXZ ?'1_ B-xߟuLt9/Z5kү^^{ m~9/Xk?zX"سkqQ;VOU`3o0w֗dSOYpu b265b_[Mb>zlM؊qmm5*H¼XN}f}s>`綮I Qo?O~d95>P4[K{2/`M%`l>P/X 1xral6 v)Qg޼-&`\ žVS/7|We~Rc&>;Vn> -l"^O'X-s؝Iobqkc6y*Q3*= ~̏ruZ4#9:)o7_?ײw֣2?4j,u>asVbr>yL慅u"LlaWp~h$Z"N!<-g,dT0J}|xVÂɾ&ކ2Ϥ*ƫ gb# t)_'&ψocy{iec ('-bz,x0Yg-ђ o0i>VÉ"~?6s|ɧĒa6>룺Z;`}J, Y$0s Le<0k]qu𽱾N*}Ҹ|֦yXE$LL>-1w `14[bM0hu2Ґ޹u;7.5WIo)`6~yz^A\MW6Lq%ZiIw"Ǽ%錅V6:ߛ*B8f}s'C|=/keZc}u|!yjV֩c]3[lq:I}}9bvtD`zqX4R>3 e_3VSK jE{0X À`ןSؿbU1X盌61eR,E -Oy7oO '#!>r!<݁t-|}e+5XG}b%LSmGz&sy[]bv $̃3Gf0+=ts0,H2r;[A8b('錘Se]V)QD -JqXa]l@/)cO`0>ׁe&#k8o7C _ ? &8˽Bݸ'Q_֢y5[yL\ bC}-ڋB٪@o"ݠ\6IYoe:+geC XVAA{ǺyJ"(>&vZ ҃C!KBx}:l6yXId_01-b\u -`% Sԭ9i=5|G%_ 5*)&}?X݄qPU`2 bٶ}{|zK&'8,/u̧tML%h7&ucx\vbld%jR3ۮ{)ӹXiwgLzi+Nb|m+a$hh5,E}fy-LSo`@.E2i:8|dlˤaŕ%}K7V|< e u |?HA`1XSjqaaӛŀ) Yj3bpKFb`%5tw=8*/u,yq`5|GVM}#\M<`뜰'/ .N[o=OloجaUJ҉4l{~Nc_I'wgg"/=>üIsIK:mB#شuGRq䯖a] N?9{iUwXt%}1KbzlIlHTvү>a+1|Tk/Q`m'o 9gxLj<2+$!ϻT#<1ψO8cǹf?>Nj]3l7ru(gXa=yR'Փ )̍["yX f̟Sf㛜c/x`#snhOryH-Vyh+}Y9smlb"IKڣ`M81ٮ`fE`=<[ y|8pur;&{eCcXߋ<& D.\ŀwSbžFQp0k'~"?=4_X_7qJa&Mquͣl |ZnE:B?{F$_Φž6ͼ\ࠥv?9η~ź آ=1$PLXwۻJ\`ƒB`Δ/lf5 ab0"&璎}@6t{_>S7o#2.Ǭs<,'/g-TwL736;r ̩GĿYLb~U C|>mqjߦg\^ʏ`jLs.`'j _© &uL7e9lY8|)'|wo _-3o-oS:(0% ޳΃&c#>%x'H~c8#-fx`~Vy k0YW{,EYH%?&<ԣB,qr)X3dzN01 f'ZӯB]Wspؖ24^9l#_q?XF{v8} Z<єYBj?Tzeٍxu=;c.>M6.ֶ,)< <1ĺ ͇̓@|Da]k[O8 4GYߋn0n3N|`ۖd}kxU<^' Z!5.>v}ꃚOd+5Y^℃w ,`Khfߓgx.u,J!ד^u='[ ޵ņ7.XP<8 |Z[jc_al&I جGi\-!rOx3 0kkq3ec?Ily^LIGhEh&sG/M]uw1!f d39fbL0rs36o[c^L]YI\yym `%\`X)qto  Ix'nJ{ %?YM%sͱs~L9 ZwQ.%G,YU0Ѽ}u'(S~#hx؟˺SE ?'ҡtA^'uF rLd}Y3'zK{8ex^br LuFm $=+ wN-{Ec}&ӟf2eФsY+4T։M@{^.נz⾗:VşC}!;bIƉ4Fk4TP`[GaT7vY,4:NOibޯ%`[1s,uul"|88fu/d 0+)Xel ؒ&^YG+4Y7ښ37g?IxybJ9tN﨟OruGbzOk%E)/_r xוI!18a:+'3]M[Flw x#IJ%y`:@wє*tW.ל`n9eȿ YBuq q-b>ŋd%ZMh{W l{KTx)M$_bJF|IqޣI;D1; ]l% k!7-q"UA|E~>fṸ.9Ez#\)r<0=8||8v+34Pj,[HIC2~BHٛo4ygck* Xў䍁6:իf{N|ݘ}tMɿA. ⵁ瞐I3׳IWq&׫@8NOIo*=\kvW2Y^ݿwo)0kBXea4Xb銀7,1]i;wY>/Yq4֢w`ߐUY E~{цn^[ CCrƺuTGljw*"05*1zVogޟ$(>Hd޴%a04E1{^k6|"%/5A`¼wJ][۷Xen)1O~ \xsct KKMtF"=D0ILm8W7 ԧSسR3Վ,6ϟ) (Ug gϡ9 }`Ueoyהr5K<,拙[s"\mJzgm09D{ =^~{Vxυ?]ǼR)R8B43ma6 |t^e;~!ر {B5yeWl4xX 86s\osx7[iN1ϴ獪~)x싅x|M 9jcT z}bꄺşތFa{cw3/O},kcypdN`:,'@en;]*``yc7XWsӴv&Za=Wh}Vq)΁-u$ј_aK/|a U#b_R0/Dax/wʝbxewS-`c>}R]p~r{cB6̔4w3%-㲤dw=xQ*҂fx߫(ӢM M)(_sڲ7-^Iga{-'3;(-1pW6@+ʽs+m*bG}Akm x3?1-{-xysN6~OR^ T=J`p_M*u ]0mn #eI.`th`0xXrb{ӟgԟUO?>:/|L@ϳAXQPз~^2!Gpܛ^m}m0xþf SyRuyBԽp^1V,09=CSl4䗥W/'{ Co縊U'!l; yp#az`&pcQg̭wy zq`LClkA!U1T~5x ƅ:EHwj"'/hoB/f Z磦)uU@OTWꑀ+=BtR0ط}=ۉϻA4jxޒ y`$?"1jKk#x2ua!cԣ[itWͳ/\KxD30*R=9YӾ}) o<1QK];HA7HgRG ?cQy%^CXw{J]B0ؗ`\_,x6 }󾓑\|K&_}R| 0`?c_ٟ̏ݞ7P_4r7v: UWٴz}רּpwsw m޾śRG71 vU$>G{Emz.IW$0L*^7.О--4K~Z*k>0\Nߟ(Գ?o6h=N}Ao}W+S<Wke,mIg3tʛmHӿ<:!kYU1%;)u]=w1 %)`$sm=gO?)o+kԉ$O>ӾQK!efrc>lUJkb?c>zga>~KΊa}h0xFmgB}zR 3Q>ҏ<`^uΧh{x|!_woP&ċ3xըZL[G x>Ӟsa 1Q ywK}8ĵI,u9<_z}i0 *iW_$hpz@'7oGH~ĒB ic^~z;e-[xPCu~eb\5&hu3zR|<Wu!op_[{i 尢/GљuSbl xjqKڛ/zQW Pk%~v\ !'eCsz}>C"gMt-qPӓ>;x|B|+x{3*{z>d'{֋SYHϹxmeSwjY( iA;pT7oIœ=d#?Klc=C< M-3Ҽ>40W?zAwV0E_=<EA14$ yF"}ݝ#҂>yqv#>R#$vgeVc?Wgj_ȐіgFI{9Rg>~ s'7EF'iݛ%?@QF?=!N6ʒ/A:XK7?ԟ4\b,t'%Hq/i0 =ҽN6xc ֍R//I88q[X(gb=HwFo1h%aD̿ԏ3X-T#/ȷZ!A0JX)ya&A}s4UoGf~'T0RK-yр/aE0X% UdCsAsel֏+h>zcz)t\=G2x+ǫ 1m 1MV^ \;3"֩Ry x~wúIȗ/wƊ:bx]M0D{o Xp?A ,Y$c=l牞6q ) >= KaoW=_+DҜ[#I7XD,OI.EOuSʫF$1uO=i˘:ֽ`p'|{mكoD]M`X`qT o84W05,X\suZM% NXkVй)/rμemyx&jZǚƸ3 tx !q5~=Ju #]{FJ(??D2<]9&L?pq'󫍥YWKH:e׌g%Z/AEuW IW#Y$_( kx/S9K`|@ *Г+//xSUϬM6<\:0[rAqԾy-5jE['|lxnBӦcc3 ֹ(}g+hZ8q L^?cQ`~,AF*Va^iEںg\F);Cw .z/f??[o~Aon.iG!J[PGCT M-Kqݣj?ޯx/V^wI[5{K~I/lzځu_Os:2:;i%^pj$\X`޷'=I`AEaL(boiW7Ka=4?{5>q4nέy3GXK80S[~/ԌޭN ⟣b7c$50JY8oq iϯI!1~|\?j>y0ګ4' 0)0# =YX𰮙!ۭ>[~g$TqxSD5-xR w]8ol'E}6@kg:t x}PM˂Z.-$BY#a"E}]aʞBFhŒpކ/γӣyN柳Xt؆rn^1RMmʁb]J{Yb~`Oيx,W-8BXDz1Mw~GcTRRczB{xxs4hZ f!{$w갯x|N}ZJc38 o|oa{a}yWeƙ7& )<}Ho`=v AOLQK٦J=vuYI? q,Ĺ+7?'2_f΃Y&A~lXМ ?L0>hD|{M|м㹽 =" U1~I̓{5䷋ X:R$#~^ ; f[{W+$qoir;~t yh11I>ɂqӷU]ԒxK{"a[;k W{ɭEu|y{: r  { /=/c0my|ƓR C`ݰWAcQ^Vl܏ tx_bDHx.=k 'N`r5-W7I`ULw>nȎy}[<0Oܘ,E1Ҿm}܏& {!Xӗ1nEMCJ<75kۤ=~_۫㕆#`J3̧k: /͋Z7{eFSizJ[x6W+~F!V#9w /3/h^u6WmsLߪ$>tpb݌随ۏ|]Ĩwv=@쟢d1q$\ޠ x̵k6}\|M1&?9VI7aUȄX.\#S7 Y}#'wҙUovoI:cop7Xw~^ 1ޯ Cmwc}W ~3x]xuSb,h{M43xTz7^EF8ߒ?YF0[8ue>bouħj o$l$!U\.sy:ceK]Svn~=5ϑ؎k'd~++k즘JAux\9."̯Cbp>p2_x1X~U1KBO 9EW/mnU 8{5a]}.p#D x̞@={ (X[M[}' ȽX7X(vQ@ׯ6i#KU jw!foͅ}}bF:ַ[?y3Q3aWFW\C~Ԉ1M}U Fx߮Fg ^?o{q}瞎ܓO}X >FkHaG u%d_{_{a¾ 5m;Rɭ/[w?-:XZ( eI!<4T=?.8.?`¾_|ë=1o'u7ɾŅ.;eʼEYVNJ̿:`"-mϜuxlxWfG=1٧-zymH *xFKwPNjbH+= w%lEQwx} .牪Q{\7?㙬7 h>Ka@YFy0Of/0Vh$ZE McƩB_d߮.?>._~ Yʾ<ݶXS9O/d$0? Y([F J0?bwdv 1sF4^>GR`?ɗvq!UPͯ6oɟ"Oʗ^7ju|υ5j0?˾+F Ezs7hV/,ԋ)y,d %#ڰӖw'y E[^Q4oդzG ]9gih= /uT G `|\( c,+Lm/P evae.rkKq&ߋ{sF=qPexsjDe}=_V(J{ XwOlLcąpKdBYW/)_w~)tE m*k?׵~–>^)#J=f|14]օagui޷~OaBQ<~ͬWfۡ~;۟rT ˢә[ϟz7ߚ?0tp]H=4#YhBif6ۏ0Zw#Dco4ED)e_͌"=W1z9jWj_9ֻ(bv(h@y}ØߥXqE>Y9,l)xzؑ{lu쇩϶ڇ]="s&71ϣZH+\X[$G BG\u א)|ӵjoG%++C]{R.D e}=//:Fw{HŊ)H" }[k,8_&D7Eo7A㛐MF5ьotMjr߅߅ߩw1111111111111111111111111111111B1B1B1B1B1B1B1B1B1B11111111111jt 5:CFPc1jt -:CEТch1Zt -:CGУc1zt =:CcϩV͡om ҷ!6LߪFh94ZCh94ZCh94Z4Fh~Oi4?4ZF h-@h4ZF hA-Hi т4ZF hA-HhB4ZF h!-Dh4ZF ha-Li04J4J4J4J4J4Fi4Fi4Fi4Fi4Fi4N4N4N4N4NK?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,K?O,KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ$@, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KĒ $H, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KBĒ$D, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KĒ0$L, KTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D%XKTbJ,Q%*D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXK4bF,ш%D#hXKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%:D'XKtbN,щ%z%%rEa¿)"RYԦ]/"/ /`O1 WW??Xſ.UH"r rhe~@̿U9U8v)<'ƼZ:h2&Z]5+>1Ddо6J<V_EhGUP{YaZ,^9I]Ӣ\o$ofXC@/TAKb[.ZQ9=:Y4ro !j ! yջ$_Wh5Gݹ2GxXh넘'4Tq@[&UA+?_hZԵ< ES+Pn,<[&EsPPP YЊZ=M6e?Jomॿ=z Q4Z#ѵM}3j ZxMOe qKI@sήS4e3uԴV~+^5䷭(~,n! ڵS3?WMSMxKz QQPw,Nd !j/_ZyXc+Dy{QPw3׬MEtBBs9Ϣ+hwlb_CP&|MAYPt_! z+9NPx!Ze!8O5O *6K4+-^\E} 6oD?[7vuhֱq`͍"XV^΋ۢ`o령EsaPyW3Amž_pDS=PUTwKhKE8P~u>]*&W|;6m2'\A/wgQ<ꧢ)h=o54hEa9ϚkuEDټy(hE=D3[V wxwFn"{u;@B>ϡBy[#SE>bxU a%}D {Ru߇Z%A"T"y!w1+#NCny7mU]AZ4fKD4eeРDNMS@Ŧ͐9&A{wuf$!"xv׽}^sdCIB$r[ژ~#'C6ޏ he@s6;B _gIy}{eK^Wπ6?"E>9"nP'1 ֻ޻a^c<9/3v>ȞE:_r='[B? J"B4i([u[%ؼ"}k'޽jAQO\C3SO4 WmྡྷA;+Y@m(W*6qGs'7dF4fwmhv %LtqPS~HqrqQNtp_U*B\e7m\ƞKu^gaxBTB 41~r m- *)"~ŗO1;igV%K lfZ@)@+P=ƚWNXm.n[<[Z.7K9٧maøYl%Mhy7wWz]Թ cueZ]op[ Эh،OqRYXl~Y˂S?}?L(bm/NTӛ}JϠ Ŗ?#w<M|KKpq~Q(TovOw][yyEsZPP~%}'qC@Uփ: ף:'!_V-@nu|%GcPlfwf'E?2hۍ=ekoUɛj|il_"lA"grwXA3 %fE\5݄"V8'KhZhK.W kͤD^۸rG1O8Ўe4qWb8bզQF^`WفlOy[0hE3xP! ܜe\y }+O]'6\4u55s~۲dgCca~j|9;Dft_<u\n2*ͷ\ %~Alb WB 篁VAX! E̘Ҿ$ɰƬ!PUyWo+Z&wKT}ܗ~hkߡZl+O$>M@K^ 4[oHMj/p?Ѯq7WDS"̮Ӿ _FvKkytSW"yDtfPUܲ'!Z(Bg@>_K^Iñ6bm;#Zyey=';h7'piJ-]_+ŏ'A0^@EUb22; sKhɣ)}ׯP3ifr3|s]/%2 а"3Z0{OX|4 Ηl@?WP~yE]h-"k_56I?;iΒqI9ou-s?TC'Bܔ%G@ؾ ]/:;SWH?} LPw /ѽu6C?w@ϭ f.?vn亖-1?s'cv0>M@r@q7c. G?rmz+31V4gs4 Жz Ut{×Ko};a?+Ѹ&mu\/Dm)@ODF;GyVQS}nqyzV祸m'䞗r^ u\=$q~}hQ?H{n-T`~?u@u|Ne쀼޸Bl}h0.cөHcK}ߔqu~_}^s(kG[!N["H(ho9.qs2NSy~Wmh6Dfs|O`Our?WE^8U_ݤø_2ުC>j0>j̞$6ff]`?&,^"qGEWJd?S/ U#fbVtv)z4)?x8#8Lr q f@fxCƗ{؆ယFyqeOZcvf#dB{*DØ]-D@-h6 ̖}P}Yw/U?aG3q=d<>7wKy>}.y]ͪ@/&y|UC3GƟ?Ԋ qrޖ6~:㕹6T6|I=zswKj`CƏz}MHs@fod\ p9ZL3~}!Ux׏T?U653rBےBl>~ꖠơT/GQ#1Vlp7/rj?BT˞v6i2ZH'T@//֗2*!{^:V[̯CK>u*c+/l}zm|?K[ n%eGfkn˵hW&[-M$2B̚hgh:6rFaovabD>CP`|X}qz&Z;70fߡ}]},2^4/Si1~@A[[χ8-Vɸȯsh_Zj[>_qო-h+h?e}3+Z iϐvxQc uvϥcZ=)9ހB=0qA9OWJU+4|ϴhglȄ#xZ9ә&2{S8FkѸկ*x{i7UMjG`E%n}K.%KHEsq"eW_M z<~@@qܟ6܁|z4}G6˸?h7_m<70~v|=Sㅘ:(~6? J6UgHCp}wX\Ix^f_ٮۈtBB \X&x*f^0k[b܊7z]!rY pu<ˮc-»aaPn, wD! CZW:6=MD!y ׮x[FԾ#R416k{BTRsUf.+y!AIjF1+1p^!shB~~eIߺ@{ٳ@4]g\{24]9<ۿ Q!tM/ox6[k=[) B|\6!ZJpOW&sIvW"dAjAHwΐ1Q4Gfd:m^P \+.Psp2P\o&Π]!u; 0;J۠lDuHnk.k lf뵉 =J\\GF /Źf\y _Ict:yLj6 n.t9x/tg€0PBgh*O=o\ϞxN-+Ēw"W d ܝ80?ǯܾr\mU { !Z {y%Bae)ۅxx3PJ'u׋(x-Wtޭ֢-sWoi 3gps )GܫT#_q!3n (*]blyAPz|;|?d Js;}y{k0d QHye-H5cƐZ8%'_w&]qH' @9&k@iyi*$OM!+fGlMu!e W]\\ cx;mp]!P!E4v'L?r\e{7!RxZƹ_s9MPƵbT%/붼FHͻ~B]w !6 .pgzɑu򂫙3;!cTzާ 2Fai6A0./CN/*Lst b6/ck:1(\xĹ7/Eהl~3Pڋ(. \A,@h<31Obs6\xl!}M}Eay,8RIrܑjvugJo[ ɇsyf7f:WYl̞9(Ķ]x&̓s?|UqϠLcFHlDfwXPFb|Աi&6]go=+5חq5PÆFF;x' iøKK]2nM7\5:纶[qe..=\Uh1(?G\ӧ2r5iyUtۅ+zh>7x[ 1.KX EK/+0w#xǸ>מVB\C'rEo sI/cϯ(a>d(:[.ϢԨe0}e}[k30oxVns /Ejpa鞠,ϻ[V?WS]Zng2h\e7 WWO/oe6LvՁ's} h{b|Rz3غ[̤`ʪBVR1*'Ė!mm]D7p?]ǂ+!/fO<_g=̎iCj:B+Y^<(,!jS]wTmk]fȬ-:,^MѤqB 1=4 1&6^};ٟB2Cg~,-^n4 i +7s[+=_=f/N$5p0(V2JyUǢy>xOZ^XV ^ 3ݯ7_䛻Imehvz,$g{gw[? (an(Aݏy%ZaWWckUWK݃{.oҷ1-H^^!ۧV|!X2K, <_9k^KcT55xZvc|WeCZ#! 'A chyR2sNl) BIuǗ!߄ޭp݊ l]SH*>5@ڃoI,̾{߈>k ډ\cҾl^\(f>sW^g C|<" +*_>}r5#f/">/k{W 5S!3Gʳ瞮ە.3/p '")}x[ZW*{ZCa'u8Q2W06|" $ħ P}h޻B\q%_z}\P(([|sT})ĸRhiy9v= /*9~+oR)m"nf Zow}Վ-E1T|(]!cx<dׯ, .?\fm802sQbABSn[FrQ>~ bbV @;^}#އͽ#B渱\ྔpRPZaV%d>RB2>¼ǩ㽥 )Oip_Y&YƩ6xgPo.L? ypuڠ,'z}!^ 88N[)K.h^7$4s 1h25mvk,nyny eOpUː>|Kc_q](ڢA5Zřۦj>ߞdf<u'.KOAi0ݧ Dy?%9(ɕBTGcjx ov?|󑛭C̗sb3(e=W[⹌7ڜEWOӪ1)+B઀ڽv?Ř{6wHo׵c!5k`窄3@in^ȸ\ڈk1풳^!yԗx ۯ0uB=3`=rٿjdxfX3ÀT\ò-G%F?0f{ W^ |\7_c}PPy|vB1B q8p)[p=bO=GsUAwc/M9>'RXv)e9R )D\ðo⮉ 8ٺۘ]o?׌[ 8=\2`\ϱ~t+k?׹b"<\mۯ*=T:2 Dp_Vf=S/ X1(ĞQE&ʹրKY+nU>_ޡ ϞCwQ^߉[×' 'V8G%NR Q).3ܷ`~^S/7g)N1觾mg.׋?Mixɸ)vw|ׄ"xqq`lE+Yev1w$Du.Sk6|3x\qp~H厃wsl^א)WxOcW7~fg{~7W \!&) 0/{=e&Vo/!#!~ 'LS C֌*G &xb\Ey~>kk߇\>axVM%ԣ& .gi^oKڍ@ܧ\pk9n`|3sH ]pq4㚱- D~쮏,ö{K""Y{W1KF\3R\6sT,fiƳςvi\H"EUOs ÷~GC:a/q9wS($ X'irKB0Pr?{ƳP!xt!r5r27Cz<뙝uwz!㨼~7HXu n6NW8E$w;'h8av 2q#G?IoA~.2 Hh u~Q)bfspy-y/g3 T~hh*0?>_qKu?4ogptB?2l5>!͇q|Ϙu?6sWOס:K9' =) ǻx=O?/?8c~ZB;?j)8c\!7Џ˵d]۩]<+<[v?:̂浧躎Ѓ#o۳>*mczGM{0 YQoo ,W| Lb'خ JY0ھWV_|9?>SOVzkujO>Iߏ GlXg4bj)?fGNa;] W&W>.Hv 5gH+@_SG)u1 J[ϊ|uə9<8k:pZrT.Mʬ4}̾;̐)?XNW+Vj/c<-u'=oleG i&UYPX.sxQ\RBR[o!>6q3OgT,ػcv4:@?~e.~6IҮOj̜9/ |g1ً-3یB~*T̓oڱ .~k˅=8ity8}Fiǵl=vk=rZ<>ĸ݂uiep]dƤsf๟:۱ʻq9,9hcp7fԬ༂uJߖ>y`5pqo&O d:jIoL\gV-v)ۯ?Dµ@- 50|7e!}GÊ@mWawɺLZH̏y cycݞւS'g%3+WpH,7'[L\qAB o1֙M±FOo!?>Cd>M;I/>ђ֍Gԣ( ﺿüW,x~Ges';w `HzmÙe+0^ MyΞ"˽ ђ5n]6i?"7㴎~g wQm/a/TͅsمkBWȥķc*>/=VA~ lkNb=[ߍG@Bin~S?_Nӻu~K2N1?*[[5?3 [Fp}XuSd\wBR+9'ia?BYOjs"_"}zxxI%H;-:œq /JRr@Kba~)ڇIcDg0>υmq}sHzC|~@.,Gn탇Usc|ɫs<q)̒~61x9nȬۄbqb?w2x5?dohǸ<J]:]]\ &'*b7c?*%M}$eqB3^\5p >yu6'|yă xkq髖^bF#l=/-\넸48rt==GHx2Oa\vSV l۶gm&q1Iܜ 0,C\0c1P4b93sAsH0~aPm0֋8=~pJ|'cՁ;xg9m-ۺ\7[{}r\UDa1$Tӡ?}u:q\;86_K)ʏ;/e˵{I5*' 1Ivr"PWlx!vYA6$?+8PR2sܕ|&vPcmOx1Xݞ`?hf &(=}d+j OeqBP,`vLy_)cdw|Ͼ6 \h*c0ޖDZ?i;v/^Z]p,ºr6-nrx~j[ϥ?**A~El9R>V\9{פw{^x|hǼz^pU`f7_w6!1U6=į~;{S$w!O$*8/21I:Obe>0D+ϤR6eB"gJ+|YU;gmw<{ <񔍟~b&qӿ9+Wh{ *>!xdbYλ{Ox1,?^y6ix'7ٶ%~ӝq@GHΐ`Trq;:q9 )Ks?6Ok78zM p-SgG/8ܛYʐ!OT=VfWR1v{psį5C8?\vp\VqORx$|bmػzJW?a'2n׬R̠ .<+ 3ڱv80ot*|!B3rG= ?@y] +d+w;V~ZgwBh'~TD bې8g*7{Swho+0( Jh;Fb=RR{3h;v~?A оqEէ?@oJ*q qJ!SQ Vh:p |qVyz2p z +'}fnJye?Pg}q+捦}uYu@JA2#gb(Ř"ۜ>-36?y;A.x?'W~/NƏfXu-Hs8֕'wC]TǸmf"OO[y4P{{ʏ-S1?H Z>:p3Gg!(p< .GȳGo1? 1C1kFcS.?IzӤ}WpJEqfB?<ܬB:$y"?$oa"ps[ 훰ۓ+ZgꎕȯxT rU9=gc׽Cw;ฉ"^{ ~Qo-eg`]7_Usϕ-{Ms 1h托Gh#fV3F:X}C$ +ކاɬ*55TPʼ1״beG6K:,sG̓YȏۀyOʼCY܌Ӧ|1+QY[}K v'@WZ( ڲL~0 V/汙H*WؼiI!f/,f^Z_ 믘gvB0{k[.ts(K=5+Xb}ko‘2,Cb7ݼ1iI<@<s}f]o x9uc08y{O/ǟ45wu 6žtfq,1?|wY-G5`{:q Ly~#C;`a>5c?l+nE<<6^5%-Os L*'j/,2:`?ɸOYo[ўdL-g}[tLsel/7Z3uU AH8Ebc[cKoz- _V#~djnàs`y%d]9m8̗)g kW̯`}ew`fqwoys5ߖZ%Lh&LmGsQ߷S3v_ڞ}J,Xc3۳s?kg"` IMḇ;^l279t<>sYЏE1l=#?ct`]j `V^~U.*9]go֎I)#޿Mӽ~YX0/*X52c.1֣i;jLKq̇65¼ W \XX֜xnG`iķiv?m37 ֵtxi4eĚ0"}{O۳CJes ̭yL̜`~G`a^vwJ_ 2{L$yCOnBgWMs62 훏;w1o ,?zc)l~ns<~<Ǽ&3f%_$}ۘl=XkAP{,gk`Y]7{yTZfq1"b+acBO_NuҌpAhav6WtO`zϋr.C&6g[oò K)]e?x^dYu"a?bf~n,5φ9MD쟱?uX~qߴ4׻ 7ZjIjSGy0/);ꉘH}b2!!5x5 Ÿ97X88zCqxc ghvF~Ϛ/g[qk`9kZBK0t1?c?isZdUX|VR"_vAo &i/`}i-{[;wG{Krfn]XOkӡ?w1{)]c~nvS\C¼>a2죗}-_|'Ļudٜ鰩/lDd9)?7i?+˾ ~78~yl;Z0GdXaq.ݮ1&(0M|KsdvF!U#0ú}f/󍭲Nv}etNI1W<|Ǝl`=lxczك|>> U87װ\Ҝf7hcݥ7vƽm̬*߬ןfAۊq"hZ.oi8Xn}dGoW̳^{yxnnw{I L5yl0BS#@_չ76ڵJ 0K]5B\O^6= o&uf̫.< o:Ys+27YNZvاy<XW#u>}Vgu$4"Vv̾Vٗ(7"k63_bU_$3dnf˼9 þoQ"L{:y-`=uX0k&"1ϱo/1%aݖ[Ϙ&(e|آX+ sY#JZVv$㌖0s[YiG~u=#>x۾ 088VKQ^~e)ʰE<4 \ gy ϣLrC̍II3~Vg&oL%ۮj]`ySsȍ-ǯ)5 `*~fq,-Lx"a<7}3fגuf[z1eߚM<3)X~@{+ 5~p~X4_kl"SU;L Yíqb؏$%㫯zbwbY:?yZ!&q[[n7O>>幃bc>ATpK 0?+`vŇ0&-7PS-U$n3ikXb(3؟z fERY0^`nqdþ zkZaAƶ o ~طr 󚦲&7r ۗ7r0;Wuao<-" Yc` Va/`'`Zx-31o˺ `Ɂ֯LylSr!QW75_0xw./)4žmD~PcX3aH%Ӌ'\Yc*cxqQ擌[JĺQO|z-n-.PYkm k|x1i-KnuRua~_,qbsgv~2/\Hl?j/sv`Xi[^WwZ طv`އ;\Q.ڛ:\4eQI3;ύErS؇ٍWABh@Kr'Bgz୴7ϛsp®^& GA`22ا +~?WyquqU߁A˱_RB1|wuzX{o:j`!_f^ۏVV_g[幛c8H.[wnzB.v2x̏ۏx8]Ql"W=6PAYFȞ3|aY:geN$a[Eqt8rtF;ж[iYoC=u czAzAWYm;9_G%OT5]ɾI70ڇU:6cG@^A¬ xy+۸w q'㹦wIscp$%V&\װOs4O9t(7<|q2~hSf0ʽK;,]OS"0ZxZLHM l`}{okWc!Y9bųc:txNi :5c97}5)\%b8~6>I?uTn;/NtUo|M9syu-g6]s.O}mk8?q30/,nB@,?[.moqaPr굧e\T"UI$i~@<. *$g&pNy~~QJ엤,/X?̧sx1^WaV\ #8Fv3R0ޔ~nOqfaSu|vԱj= u31?eyUg;CnQH*ulW wTk <^㨈?/4V!k[uS ?<蟸>lVOuyE ď~YɣY̣ƾ%_0IIp/B,U8m#b< ?ͷa]GxPkԵO:ϏଧfqUW78D{zp9`˾xn~MUcY!]inYd;J.ϩ]QfB^%-ۯgk^ <=qd:P(Jl<9F`aydٞ瀫mqF_$웒]׻ N )"z[Gݘ%|,pb}!?_ ,<'c'<tv3"8#s8OQv+~ wv[&w8G3Qɾ%ߕ> o? zx^O10820$}}}Ox[vRʉ7.k}::p}}Pg>rUvsAr{Qʷ=x+!cu (}R'٠T;vs]Jh:O! (\%鼙3l+ CQ}VJNTM% M'qN*:-W PZ~ R`ųWatO`_o/nqnΞ Ӟy&;0eKu|dpzwve܄ԧ`0]R$C6_*`huE)IM9̒y)A&8O?zԖ\ 84=&w-R㰮?, 8&<\@Rqwsse_J_sURys8E {*~Se¾Zv]zNq U^W} >Y뙋 nynI; Q`;)1)l4%LP[7;!%mVlǾx@;[A:l of۝f@Ғ/G<.'z~WGoz&8.} Dn$CC;Ve .=_]Nz̬Wuy>Pv`;2g_R4QVi.6,9:dݎWL= < [ :}N7 u\Uqˣ}( lnƳ^[c0O2[v_:ط5Atw>jr*Iz2WϷmYq:c3X؂܌vC: 8hyfpWR OMz}"6c_l(>v`]"0)SRznx";'tɔ/q)2_ͮ\Pƣd .sbv:I$/\+>p}t/4p|qʹXȏ;Hg๳'k;̈́qBۘYw¾hw)S,O^3 sye?, L8s7VU<ðOIBSu6Y[I|6\58c2{sO!2W:o`o]yľiý#Q ܜ7聴8̓Hn7O@_/Y~v8 ^H[ؿRyunb61/݃!}ܲ.e:O gyc6sU)0>FʘfqBCKA|#vopf~HoaCs$+.-_Sf.(Yx=Rx3ni_Cez #H؋V ,}/aߚؗ@ʾOt8 s1Eez~{EPΖ}] 0huP]_~칲_IWp[ l;џT:q"8:u~xtଋ݋/ZEb8$m6hxd]CKLXK⹵}7d<MG.5=," a o؟;7;racƧ#o0*[8O]1M*%W(qwǫ؇ulU/Dx=1kQջc~9|gs >1嬁"yLS8$H-|ӘoyH΅8b8OxtmPC֭*P?3 uj 19 ǓeA%Ayՙ}lrKڊn^ve:A$Nf(31ne2 1=|> 'xngzñ?e/<ԥwWeGOR)_8קM'¸ԛ}| 37|SoEuh:[xΙnAֹ`<-X ?w*>b/ sgʯ_mYOa|AH IT₹QmR}l9D /ǹK8zB8C;I^"uOŦ.?&s-3E7~.?l9L<1[>|}͕_-ט_#g1ø/>Ηo<:\(Δss}+{.yLs4\!syϓypwE'0X_}4Y&nY,?g>^;Q~|3 y+rY?XK2:ȯNJ~urL;W~%u/77Y&aْo+-g|s|5x0W y?^|NyxN$ 6Owyn>33S~\g=x'nIm3zY,ݘg wc^v,9=W^aG-@>f'ccߒ=/,fL~$3îZ"g11irey|iN0-yo/W0汱ȯk{s.1xc<y)Mg؋68ooy,6y}Y}':|oygg5xes ¸]'ϰ ~̕㯔؟p;+_gwsv#B9 ^kofc쓆}km-m#}1߰Kgo0n6yD:FzثFW8xqc7_b9i_z7=FFߕ>aM_Wv|:,zcٍ68mpT^1? {%_eW#^fWƺ;7 Oho07x%_ְxߘWq!~6y0[i#88}O>]}00+r<#f; _l3{U7~y^Wk'amw,[f'ϰc {ӈ̈Mn2mF|Uz001|#Nc?(,,_c=2o󉕆/?G~2x.L5hp_&݈;:ϓg5lEƺ3xbg6:؏ ~}ٰyfs+n痆_dػ̰_ {aav48lo&qNg+Oik,1`델}.v97\-?F#igOϑxΆ}fFUqgFʘy/7g\nuin}Xg#$>Ӱkmcϵ8V%SVg ͽRQ\Ħx=(j2 `l'ii2lV6=*LE1l/6!jmǦ7uO~UH*If}kWv %lv@f+W=G^#HsHG)K_>kݴoT!:v`?fn)Sy/@jH_򬥛!k](sa~w7"r(ulЉW)%T={c ͏'3Q|~l-mKd W|~ba[ݿ=Cr>]]!9>v$ [_x|,ֶ6䭓@zg;6N{9O}egzR$:(fO⸪ z&/;0(ķ@q,иJbO{$&\ƻE2֣ކ]c&7iS*˛ɐ2kօ@+eۻwD CI)ok#(n/@ \-ly3(WE3B>(*oMp]y巏+r?cWm("ks0wU }a!֗U|a`:Qr>D12|. (*rRٵ*(+P\-ceu\ >eoTHڤ`퀢Y/c7E7zHW& ^`<9*n2lGRMS󮽑y׳ :n]Qj>9_\w1kո˝1_Lz omp^GCZl:yb}ʢUBf}kfًFn  ^|aEܲPb^~XT.q5Bf̜tto!D^lXU3Q\ЛM=Qtqlf IP<~E m{pF8ř͝ @q- '7y# O௼/C|r y=H)iwsgK_.\- >o c)uBNA\9AhoO-LޏG8˰2ɓfcl;^b.83vasp\䚻-9?Dߥ\EpMwmc3*Wy]pyoKnхL26lS)*vұ3ց?woæe(zRj]-o۠r (2{R3P9m&4g+qo"OOy f 7]j,e9P;folvO%gvD{'r_XxRMpEtlU 5j@vw>Տϝzh_bG4cx09%wEOoH/gmlzy;q+BwG2 5m?r0>6Vd3R}۽3HvVz]4EwݕKO4bi u(ـ"- oZ (יkTSUgS=\ܶEc]+>Ͳ b7mHb+.C)qYਏdPl_`/|Xnz_?[Oܸ30HorTv &Ibv$鏟[6?b i8(ПIQ)|->< d;[_Z`3쪿U&ѯ E%:^tlqP|D)3Ҁ_gNYW%7*3bI#v=(kab!=E{{nnYO[0 TWԟ(u2߇d5͢y@8í!q,զ]T+(7ngH?f>j⛖V$'nPƘ턖-ɣ? 2~`[(XnwNX72(SxQo&skN(!.s,ؠXFͥ8 ljՖy@G/|e{7(zBv3;'_f唸(C=+dvQq3'6LY94G&GBe~ 26Mu_e4$:w Uh#9;N7,(;` 1V8􏺓ٸ^Z{c7!=׹W|85m+ج;n5(^Y{jN8nU0pJ{=YPJ27cSpxP3mPb0UeJqm}䠍 *E͙(/3WS?$җm:osJ;((cQ& {L; q]u+O2?`,y?Sڢ-̵@/\#W=M_;%Eюt,UȖq9pݕV?󑙏lĆ֥d]y,9:&Hnj6嗂8/F1*ߡ]=<D*zTvdJvEOP=֝˵w!_8|HV,smcv)Z (rH㓒+\ o}^|p|O4ȲJL)b4GDϵʃwwcows~~Xjc~dFq\;l:T9LJȿ7[̜l5|#?櫐yEҎ~ r0Ki0-# 23LflKQ4"3a_Cz(E}3Q5!M;e68KT 1_8P*!sȧ٫hA1z4ɐ1ØSOEhy4M,OQX՞@%g F!ȵ3Ai׺7.G;S\\ X)#F<: ffO?<721N E\:R7㹒gȝo |;ENuDU9#S"EcQt/F}߾8(h$HG)w)8ΩZ$Ol~qx^Hf7bʲgVKqKk\ Ȝ]_[q̘B%Br+>]mq+ Mq-ւ~|'Gqx`78o&@zͣyҤLbȺ pxe1.i+QkͿl5\5xp/ HohbJ܋K+Q@EQ<0ߞځu 3ER}V\}*?sgku2_;sgn}Sz3few=΢ Nډgk?0{2w[ l6&z+rw Z;mJ.k *_n?{L/d.pN 3K ,[nz? ~.dțѹV(&"nn5qj 7Zۊ|Cd9OlAnE{ҏJ|>6lm[r]`mmy&ȏꁽ_p ٌqq @|IgنJe߿d,r%#ǀ*ڨ{@2s2~[lV DgEr{M:2/vQlȿZ7]xjH N]WRPϗ{0w?AI/۠w @{z1} V,?- .M ~zf}"|pI"‑Nֵ(i*0ո s"qGm Z*+g6jW(<˟ߩ׺?QnŞnUB`o")k "*ToS+#i?HޅEZD|j]~nm8&R 4̑B1_P|},N#7~H#9U-R-h9HGoC魊G}|ŒgtwGHpZE#37Q$J|+7{c3#y3 GӶK04kFa=㑁H^e/NN}w_Һ{פֿO?QjlH g$ȓauǗ"8G$ڝN#=>;n|~~H-#k)aV$D}GTN%#7o0['ގ?wMJ*&:~7#y[*7'wZ7u=0w:"w4~lݭ݌s[xIK{C-9Rh W$b10*_G'qW#yO<=tg#gǼH~8U%tCJ yrcݧVZ!w~ܝz7jΞηF>&D";gX`㿃f?o{R+ D^[l$gzV6MeGim+sf:,xY#ޚ]O$w, "ys5^u +Ÿcd #w\ |<'.!g&{kސ |kj>K;^U]q¾+vHj?F㵁I?}gsq7[$Jn "y N쒽3?:mMo2غY}$3 /JO~rvEjזq㏱W9hv~lHA-G |G<[3Ϗ\^{eA)-yl𴄏#O9ތ=gW;u]t'_on_H1)xoZs 򩤁F5mzU\1-]fW]H~;߱-HH ^4=ϔWOD*{sbG$l EkZzᯎH;SV s"w$fhYKغtZql+䦑"ɏT3푂`{~mOu:~o$Vj'Enu0V 6_dzK7ݯ;ʌ,MCTZш>au"WfɛW<|\ܓ_ePlF"8^>T7?oeHwԦGFg|xs<]D >~M?6U}ogF'gz4R0h~l}NrhRƧ%CӪ"y6/E Ls$jʪcKD?.R;׬㧞O,vel>;®)ZU-{<["Vm.]+畭2FE GTa_#E'4g?ZЕ]}ɋwUUfs͑|3E]_87?LEܜ2-)?1̎lkǶm۹m{b۞ضmXm9j;kﵪgfs^|Y>vg]@FKJ7ܩ[Ut!kA=P/[w`LQxow~26?EOU׫`>pdT[msiKكhBsO9Ռ 2[wzI Iy^Vef QS~r!]U5kѲhҊ3 97Ky ?a}9F/K u#p̲.UZ%VbwҀkb\hɠ;ڳx]4 g2c]KYieQ <hXwN͈ ٱXBվ1t +&?:U%}d`XoO3e7Xyy/}>tЊ-PI \O) KcL^o.&h[$,7+n~ Zhߺ5bʍO'a=hokk7T~ϷdSV} zRӷjAɳCc7UhjXnNbwgS/776x?\*'eL+vMl[yѣ}N=bиܤ cbJ1&2JɆq䷇moWc^ d]rZ ޴ߋ>=zUYǩ%swFū@3b6T2UGwBoOMO-VTG6uN0m2pxLr-V/nwQ0΍a[*PwK n%i :EV Q*b]d7ؿQ[L {mߌl;xuP ?7w_L}s`c[1}֡Eo?z9N>irbZhdibkqntº>c\wd<{ʌ8?:ὕa5܅y7Za?ڤ5o8i h6W=S_FcѸϾ/5yvO-ga)ڬԻSs!/- fjTbL[_VUG^J+>-Q; O~ΤC=?ÒfZn~ll1omWA圠vɕI7ۼ^/8R|}bߐw5|DS~N-;n;Ѓz.QO+/_ ?_6Hxr]IqX"pl(Aqɉ@8{T_??Xa8xx_\7Mi{өvqW\X CAQJ4=uu-^WdXGx^R_%$9Zا 9(F˃1c.?w}:o@3=ߞk3vji0q}i} 7:6{;HvfPw1~LX@ zaf?ցo'v:Sㇱ϶28Qaܨ#/(O8LlcxU16WÁ}gX5|6/M<>3_H 9Ei;cR<ZNxSwk-WT̿.oadH{~682z[7J 9Sް_N[ 6VVۦL7`?5YncɎuCfw)m.-euFm6Kuʈܭusj*^y.kz&_[-q:1XyAx.>?-xǍkzcoI\n+dI_lNX?쑉<% ܊OR"+coPoLznMΒcpE;rg/Uu@ѩT?$A {^gƟ;owxݙv֗[D1Gc|󳅯 ZA[u蓅<v}&c^oYU<1}ZOc][MeZs ){oi~eY+Tlϳp+rEgtuk~9Ы]8jZ\:z{q"?/ +u]|_; 1ŇWW2 ;voykq~:ٱy~uRǴ(މjOuq@[CޘH޷5_5+gTC^wU;9WO>FTʬxw'tiE۳ȟv`=.8fE|^N c./O)Ȃ70`z^\ 7Ej*y+jFK٣c~m/A90.-|/2O% |CNN:z͟%7q?:|Dl:)3h%o%kj*H,ސ;a=Yò2;ƧbILAd#Zƫ<eًZp̳㻧OpWfVӂo.}6ثm[\LOXnxje]w'"56# *u{̿vB{5ӆm{(a7]`2y[̛o&&Mȼ׮ֻ/[xűa$ w\zON~80tC Mw^v5 lR(?t=jo˃_f#s l*b~xN{0ÀJ΂ip_Cicly?A΄_jomq-Cò⽕X>?FnG# gƟFtӃ'~ja< v+3W4_ղ`?er̿'/Q_ߋZ"bT s7]ҮbP 2ƒÓb~&9Fm›6EoV rp?n`}%Fs? b2猑OٮHSBϮ~rt〶ʂuh6ހ }{} L ^r=놶4 NW)ߛ$Bb, žV`4١mm@gܷѾvGoMo8HV&Ŗx?dǔ,R<FALu3 Eې1 $-ܾ"$\!q_xn'H ߋ ((]#AlT0(~ GEv>ŧs\_6H pESF󂑮e+Db%3~3F7o _A'-}(ng0F0,5Q  .FKQ `x+g cX(D:AD`!ZD35@I3,M O B@,[t֧߁00Phx`q ==QVGAHՈ^u@TQi0Z 6x2|Kzo{{.'yxc{hO@R*.pYP‚bѥm`pj-3ɣ[E7qʒ^EK04K8uTaͩ ^UoS@Lʹ;nEkɑsҁX@^<7?K1FBұ~cٱ!YM~R{e0ycz*[o dr |#I"xBɓ 8JJR<806 oU8F|n*a1IGqe<`R;*oeAT"3I8$(_>A/ Z?D=BW`}+oo,X0?)2WDj?1Ӏa+H {L3ƳMoS'"0v^|Y\ wy?^#)Dzy2,h,*APC.fBL @o 3:d}8wZ|Q7{ 5гX,_<ԗlzWqf~, @ϛqM/dϏ8F'ynI'{|)7\ybI 3 ;!תg0 D"뢃tR(_( [xx_ )+&; DO()^S❩55#BMO 3fH9 6{Ɂ"TR.ly|?l{զ (5mt%o 0:[8@W:37^pK%(wTcgʶ`WK`(y=R Ϲ|AWO0&( *luZ3' U@L0[?^HMbga6O"YL\ˆދ2JOW9`T߃yt¼J4y$o:%yreޟ^%RmWO;G>)Uh%|^:`i97ɀuP\ ˮo^U<]|`ϝ9eD@ 9}/fXw)r;,i%Achċx@WiqNYAx U0%5w⻁:Ns@?U%x> !05aݬ*B F*sc}PcGu1b|oS[זQ:$`VZ$w5W+2g#Q]smT?㳤c>][69L\>* {J  o⹂[AGcx.wv{@4m̼N>xc6Vk9x\¯=W:Q`(xA3[ ba>(JTں[3^(ˑd|n9c};@(zM:|}eh ?/ cU|WQ?xoR%zĽ,8_ ] k!˔Ty0%WO5 whW_: ~+Rak0/5]U$1 E//}0H6^[l=(φuၐY{7(@Ko|@Ak ֺÒ&K E#ƾl atxX_BDڿ;OuR+.o8 ,6؟T8zǾTѳl`WO d0 ?GJQzzkOSþRh1$8RRd1 JpBQ1(">oY>zΓA."BTm9Ư-%!OٰدEW:d`(>ɕX]7ºtUl,= 뺏nŧͻ_%2-nG5>Uk'Y6q|0VYgsw% *}jǛ%ˬ\$03>#[f(R0,frS*P ((Dm}c!~22"zY|R"#ߟ){cU9YɁ11 /y~0ӏ5~|zq{? +,wR. /eN`ƿ{̋Mxo1xa^̒c**>J ϽSpD|VkQqo, ΔYr.Tr5u@9!a#FȐ0_@igPRS{egě|j);=o"cPWpHJ^uoDm@I==1pi)4}?Yڤ#}r-+=\ʷ`)6+}v-ϥ&|E ),5뇭N/i`޲2Kg`,xQʏYi𽏇QU\$uwaΉ'>e[m}?&ib@kWK7]0J82{k\%wR0bY٠`:!3jT ĵ M064Bj,χ[2;LgV^ FU8X}]XoS\C`e )`1QRD cU'uq3ʙ.XO/c,Sf|J E+M/#ǿR<ߚE}xCsJcT"iX/RxO *BW'ӱI{=T?& >9;7R̷4r"7[].}zio92ReCx>K7F ҭ|JXWr.-Jy$eOk.F݀@Ip1N0_sSr˛pZxoK}^:b=t)w9YzpgnoGtys: qWffy'dd%`sna#Ϗ(u%ڀYB.$wQ˓?A 1^?}ZMK1H{_ |`*v&͏s.G0| ηLϵ*]J 뇆I_ZF 2?B /< [D;R$kt1Q F`_+{?|/Y }]>㹤YtW50^(KˁmJ ڊz+C9'^O2 L p #;`oσ/,i~*ڽyϿwԆ\P n =L*hfՆZCG%$͞c6@˓#;1uҥB]sy_ , εw<[7^m.h?)0hjL 4o}}%Tж+x.~9/@ ǣ2hR/ +Rlŋ+ڼl^DA d鎞~>k[&i-x{YR;3ꬔOXJBяmǘ^l.Gmve'_θV _&$<8r$ xTs,ϜrV| xxDl,2)c.+hƂA;vf.hH@x0]o=GXV力eyLɳZɸO+>9m̟5vy%x9ѲZt2^7/d}@A[7E@{q?+ٽ)Vy%x`$'{7 ;[nۊmPʍMRjЀDE\7͖QxEރyQWWRj'g3:6G׹+yڭró-9О8xש9 ?`ԵӃM]#~gkhN@h1ڣ\-oY[$~PBA'FcLxZ FOˍ#bE3n;~]k<<{Du:b$?uU3~LK>ﵬ'AS]xkx/hf)s}~DxAg\Ra3\<krBhq>Kd]VJ{(zƂԹ.mlo:Nޟ_Ϲ%{4VbseK1"Ja#i.Z9?jn{>+xwYSſ&h3% <𾥈ZJ' 4)M T+9x.׸9qfj3qn{#qZA ^XK;LKxq}bbƱJΝ^mɋq3|LkIoݪ6z_bJOv畯ޝsMf1.k}| ;‹sXdzE@S2XoCVǁKٰ"=,ǛzrUzԪ5CKx x$+h|cebs4R{ 僲n瞺<2A.xӼ{dɞj뀾y 'xZzL/WUу xniw8ߓ?w2Wn˒Ņc}TzUe 'jcR֯> ?L^K9wY 6a^+V8w@afGuvfRh%^εa<;cؒ٘7eBsSrQ/ރu:^>[W:8q1.ݫ^(ܒ6`%-'xЧc{NϏAW_䋥c~|1f@3aT|Y UoOKbȊ[_2l߻k6S]#ާs 镳LGoz*ި01^L$GSʚ?G Cc\" hO\n\<} J?1cU-BPܚ* Qyd&dʜm]zޣyL@%މA/ߠV"lv-%% Rھ1bhQQO|$MnQ𖖅ֱ _Y$;(3FQr!xֳrw%'i^k2f *`|GJ s~Ĺ|oihb_C`Ma}OG{~īX/㔫SSNH =6KizA1RKF{y?9括פ}șg?>·7Ozo*CH1xTetmF_,f`pŘO֮,sϺvzqWb~}N#Eh>v3sV?ƿ?1&St?hܖԠ &d c4PuoXy"֛Lj5/=ұ2gNL_iw}j uo2u᫏c=?%ƃnD3%~aZi%ZߎُٰZKW?żp㰮0hx->HOD[ٸ;."!hE?Wn?{IE%U'ëoH)YɾO3O6#xjXKO~Lִ6a1A>“uzX'+rj$R]]|~ 4ڳ1`Pm{d>֠xrq, }88_"::x٧ `qyk`|o@^+4=߶J+C6ٻݷ-JGv1 .4kboUxX2bK!v'R+OL|^Kf˂Z5vQʡb?|*N| ݶH@[p¬{xo}fi9}o&La~m m&/)[Ү !x6[< eE,窯80֕;mGi/7,3g?>ÖZ7"jyVnnù%{3RN5vƥބ_k}Epg7S1scdXJT'xN/=mcdx?Fg=z3Nnuaê)OΪ>+CaUo`x'0fo?x.ށqkb2֗nvsxp@'Xou5r0ƹ8Ǐ~=~%#xQo;ĸ[M%PzHƵmϷΔȔ,Uۅ>e(}l `<64@KަWDIs܏ 4)3xp|:}N!kT=)c1M/4 ~xY{cTkqrCק Ĺ(@OXk,,յޜ|ztj~d/R{؃1/wtqO0/N 7mܶx.]U ^!s$+'|ϩYLKkqN]\2c抳0v-ӷhO>xTOW@Á8!oqޯyO]q˭?֭`ҕ=lR =XsH>|~?;Y0+~<ƛyGOkϢi>qң>e :ox_ĒUp]ǯ]oLJ0`pIW#ļ\%؟Ӱ>h{za ?^临hUb< ϥTa\;-/G"3v UtZ*mKmۍ){Os,ޛ2MN'.< lsUʯtghGi WrOU K$|(u~| 2Nk9 lv\`g);[{Kw eũMg*hR[WE|aq)5,κu%Z >g8EiH{[{h $>϶_+~~O`Ϣ͸[bş9c7!j1b4'qmgDG"T;B'e|sPćsXG~MZ~Ʃ:J)?#_<. fG'-m|TD\_D[IKGBc=𪾢*،pI:G&&<}Gcs#/)86I'͜%+9pO|A]b G(T~StHS`.z$p 8 &^U] 6Mk\<|6bM^pzf 7mS(1`KTw`gb#%;=y=Q4Vܖm}pZ^w4[1zy3ɶ3~:A?c]29[Ru!l.ﰘ٥|aƖ4r\/.L|% , ٝ?Xʆƻ^&_J: f'?[W_z)@oZmn|6']QYr>w7G'tW7*2.9-$ $z7Owk/X\w; d'f)$ج_a˩N%SKOeg ὿(mӁY>pZ1~ij)*mɮ̩~&=8"M[iK;|ITF*%}s ^c(_Jy?Ԗv|}Y/I%xZU#1pXG9J<@To^(?X'D|cyʂI>EkNNG?SIY>:iXx̸4ă=u$Nj#Y)r 8ى'^88W>`2/V4Ӓ`}#>P&`D.4{oK,jG6)5tѶϧ&LMI۽-dQ6e,]- ᲝU|ynK`pd0̻GP]Pn?! [v7Εq ޣRuL0iZ8Zハϯ5żJپ{kU ϹynJ7!c iolNElϥz6|c&ܛ}qVzvāCKzDv;eXU&x\SNzq[1޵:,(|o/ԩNzF6[| g5>yqUU|2-? R@"VSJÂxTg mG}nfst؊Gv\;^+JaKJ|&g@Ci0qP`x,XQnK:;'es8}SwlW-7 *pzh!pp+xR\q\O7Fj9NFk*YPv>[K˱[7I:wOaT`(~0J>}85;8` %ė2JuڋN,&M_,)TRV ?8+>5+X',D>sh(ipjDj~w8|| bE?/cKVd `VD0b<[[l>G8q+ )5Q2@I>Uە#+囌;4xRν{ X-w];c,G&b_t/DG; H?܉#e{%]Sll)y}|߇\l'*sgZ\X縵4>Q/,N޿ᄎHa)OvuƼq[e |̕Oqf/*)Ee]|->gұrXo–a]jT`MrO0[`]!7sȱUk Qx_ɟG~`T`XOxG7gڂ_ | 6~#UZ*TWaGU~0NڑU`4XWf}h`y/"(aW ^x,~uOb Fk 6m/rp'k:o#7HazuU#6v8龛9N8Hő2kL M5`7c)ي[Q+`]'~SʖƸN<][<Y;Ї}ہ͢-~/U-]Y/u!fłXgɸ AzִL'u9?T8ܡ{#܄N) |S1TgM/>fVsMOe[4bM>"3X=uL Y±Pt48n?$sIMBq ;'{q"BR15{."7*>֏vؿC0/FdYEʌ|X +U;tLjR EGu/[ȪlqL7XG `\F,-;Yr?99Jϗt諧|a#9 ?W6Jߏt*?ou>XopL|5=_?=q5dOӮ(ڀ/q1,4oWXp0^F:Z&5NR~gyݾwWu9H=L+`ug݃ne9O8/9:;'lx뫄ο|][)H'_Ig:>Z [(|^?*9# b<̷2a?Q*px6[)8^F1.tsWd}uy0V6V/ 4vu[L41%~>H@F|p i`5 ~nΊQc)\NzN,:nUy,և1NF:~g`*\s%d/PG0w']@Zt)jq>:mgrK7k1O'nL,v1ƒ'}hWɊax7GoO"k޼vM"\_V›8Hʑv۶I _z@' (Ҟ`']1no9cI3Qd'qx~Nruoc}R@zF LX)ce|r~R7dۍuhrPr}q~D+kyI ~vƏooH?ױTc6[|Hf>x{l_2b!0m^|of)'o``Ώ{9JƑu/)wWu{}[+ og~t>F`q%{=c༦Ν9/1_vwožpEyg@qhb/?ϑn*4c{7H湝';hﴦ B j_*k|8q,%}N8_%hZ~?_b]xp%6l:.<Mԟ;jWrxsR>"2Vn:.Y*ʍ ;Gc&1XY`%=G_ 橶[w4 MuO!W#1,C@!>[/>7:OVNcYέ[R2><_2]=R@x\:F)M Fg%H<+Fӯ B`~>3|IWgh{΍3| Ln]!H HRr @0tzw;tC0E,Uly79 fV0^c6`_bMGqAMF 8cr;@\Ěw 51Kp̯'e6'[ b+) LӄTy^Ĕ`nS3VTq)O^`_\f}U=Up>7K&qb-t AV0/[hfi!.1U0x>`DXkYQC8 b&ױ"Xu\ W꣄}G6 gpu`*H4~?8qf5=;ټu+իmyG˺kL7v]<\PB_oUU2HN`bU} K٩*ƀ)`5fpuÔVu L$} ﮷rwBe_4È`'l`:!`9NŒԟJޮ׍1fȱ~(+H]`S6Uu߭3O'" ?Sdwka\`J`np^k)2ϣؒ/.GzF75Uw4 ݐ6 k{`|LwDdc\ p(/m0?R)t)ak3;*VJڵ%L`>P49 &㖌3c6/u?=1) %l仪NY",`TWW(`x0JQ eC7 [huVDS+?'E/<S;Er,㧼f~# L9֝ e:ܱLW$Y8E~GVConB\Q1|>RueD()`_K Rvu>EuqJx;v?Is2?¼KF3a e+d Wu?i(u20&8@pۏc?oYAGo`V!0>7 V:F9/@YgP"*([N((a[0NL 6YoKp_iȣBX8tg31>Ψs>beJ 3t| F;]2[#@L[b?sRX,%kʣzוfq}P7x1;}i:o+ gʹmyDx7hJO!+~܇/dRX/?srW^UOQ^FCj:'ͻەP%Xf.6bL?WWNQq@~"Prfl"=1o`z`#ay) ȬΒ@0TۍyuՕb`hR[1O5vWTR/¦exJ`JRϝ|,:621 z3ZTbܠyMzfğ 0`.8jd'0J=0YxB9uuFVmo ֝j W?k"ͷMΧzk}SnU7x-d5 ޫ/5n`Ys5ec }fq fA8O>&Y ƣR8IehO#&~ia,M0ߒFC+>;cWs7$ڪYPk#i Yt8NX=scV$K=݌3?i#O~QO\ %4NHIw0H#y /E,x/LOy|F@L^aG1~'ܚ+KiAȮ髠:1ZBR(CoD&- Rhң6zz}>F9Ǿd1*\odG3>`μ*X$L>} 7\ºI5rbcWRx0X1?&p.q0FbQ'esx?$kR9cI`u02e@=+=quFsE[7{X~[yͷA ]|XLcUU醙1~ҞwKdadÛ4n7%6sz g?-D91ZNt ri5Cx,ucu[M}[jI`6Xċ.a ;W}cܤ=ߵaS:L5L s u-s bWiDєtυZ+v^. Gt6= {bfw3:b6g3?'ҏ& ֛0xzYA+U:.͝]B}\}y0,E&Ik F#[gT}b'\u<%ǣطnHκ_KurN :RyH V_tK_17IgUA\9Qp:op9X"{ŽH_!| ;x0vuO T^\#+Xny<)?z9/~~Lj;8Rnz\dn.S39w&/ܧ 5&:OgXU'ޘx$iL\ `8Է;?37=T:6h@C<`1OL8.oqS 0$|{Όi;$%cG`= MI D)+)sdN,3w1J(9n_I>xljcـu⓼6#^YX/(C?0;/JS0^Lb?<;,eد+a1sQnn>ZjaCwm,|.+sLI}ynvZCn>'}BG~HrH`p-7u@D$0 n%e\O1F 7"+$lO,Mi"8/$ۂqGFM0tAL桊[j13'iǏUb>tLV &3>?ۀ&WSa<_-i@wf4 ymC+A_}40"F{B!9$KRae$UFOrio,Wb_{<)q/A<./珼}l 0[4 p;$ Oo)H25(P`v&P)He wR*[X~?[9 +|+̗1Gb>1U-T0`:4׶ ,i?pI#"G6,]gn1gE;w}Ajs»;,d3_jG{;K?+m h_%~'1޺,#Xkq0f/? 6C`6y~/[Vpӱ`O3gPvز h2|\XGy?Lx n ؗ-i|OO7˶(2Zʅ~JJfXid)YC Q*8Z?Kx,mYq(YC20Ÿk鴚%p+64X7m+[4&]<[ɻ񾑮C Q;#&8]XWؕ n3ҖjrRB[ўb^-]yĸs'|me+Yͥf `}C [Rm3-x%A _ (#i^u4C͘ghOl1e} s%/<12^&sW0R^sh{*0b<ӕ|M$,xYo^͗kM.d)x~^sf:klPbk`3.ږp/ (` $%\I߀\~%˦jK*|,ַT)l籟3'1 Xq$_f-譓W%Kgc$9e1o8 & ae- [?c^" {2H _k/Y:F*Ci+0l5 :G+F ~9AuuO|#o%]+;pu8P:ău^.1Or۪Dv+cFPDcl Nn犮`3ŒO40|ŋ#g1_MvM' IO潸uw-vt0׏r'UJ&o#e7U0yg e ]]_,> yշ&e.)?iḴoلmw⭭h1Pa)8HL$}s_E'Mن??CmؾKzJ8{]|WUa9W,3TŲX9C~6#.Va3̓6rVT#pΤU`+Qmp=:Xj'k`Qd;vOL5L7{ZiuDY0=p4fyX{#0EU"`v]/I::D@ ntuql}>J 3Wc=/MUW_f9͎ jus Ⱦ{G/R`0?8XjE#܏:nz9`OQ`n$$\<[jt,YtH_ fƁrm{Pw+Q9։j &#ݘF%tp|n8ۜRjf5d =x4M3ɖk2K~^#<}9ڳɟb]vhA'C0%-#8}X}?'6PGܗ򢕇"w̛~RhwU *͐fܔ 0 :ye_)N)^H N Ly+!HPs ?ؘcWn_h3PX I:k3a(Kɝ+f]PCzd M8d+ zmuV=097&/>ʝwXct/Qk `w=whO.7{x~H= ɍュbXNb|^mw^|-Un-]ǜMs{k TȩC`&=GږuzVi}x/,>VRLƯ^;wE!a o.8vݾכq>ٷY`I;w.U6x2᢭i?㊬S7)SZ"q%aYb2O^:,V3'Hs\/ٌwD͉5gW!uV1di8[%0&) lpvޡ5֛3ANd:S %s{"u IvO.uup|5رVJ >HyOߖ4ٯǾQFws@;Yp*8_eyP1`d~vg`=Er% ~V.(''a`u{. cΦxe<;xw* kTXGXd~6KZpha9g# J, 3˳2Wk9>5^عya~Z,>_-mփY/NAoNQ_je >Z<'NwuZyJCnwz73Tǿ+ r0o!1>f&?3U[j?UchRH ߦdCz/&,ާYHPt.E:uiL̺̫d}+0\d~>lTdޓ5J].ɂs{uot ճ!j^6ᯭB1zV6ҁ6癉T# VDZ6u2՘˱I<'|aqi3`<`޲QS#yIRWf2OJ`=kT`2Hzfg &$z5KC7 ǸE{o}zLiwMZoqx|/Zču&LMR΀kk4*-YU 8^IؔQ VpI,(oId"hE}LT锋/S/uuݺ,#׌80c -)e=9>~P|^`tj}l`֬$;?TaVFsZ/t9,5ud!c'/6$# jf!>׫+G9OiXCq8[Jn(Gپ|7l7?zRg~Cb\>:yj@7f?i_oT8`IX׊xd0L+-֫r7q̣no {$pSlּm`%~QʏUk7_%ːxn#jǗp,e)N Ͽ9IOAVa+ŕs L)41պyZ%f. ?-V>Ghj6!'nHoaZ1?T$A86 E3Kx#,$pߒMYX%嚾939,[U$L kO1Y`xn[ux|K`K>Af_=i?hrf*:|nͺ&7 @v=2D Xf~%;a~#}Yq&1sk.?}[;ea50xhʶP_9]an2.tJGT`!;"sݘwhN޿Q<5-o gdc<lәgrJ~sF(Ds:3{ν˥8oU  L5^h̭/Sӿ"=CO/d9q ?yźTVaϘ{jQO£_ON~K[j YU5q\ź6oL?t})xnH9d=󌙲,Wt`>ۭO,јśo#$kǟ?~w[x~ʾ&xJu;_d%!|ʬIu?lkfŸM$I +}tO#IO؜2F%1)}fos齚̗qu o}tI=/^.-{.騙.~q"‘[/HyyM vny0V XtCgn֋0XR$e_O"0}J0:)HB:~ʾ2Kwxcyu%|Q?O6Xd\5MEøL\Ub6ʰ_+a.6SN~ƂY}' [5P )'hl͗0+jOӞW;`&<{awVgX`0u&41YH:XL0+2ޏ}xы~!pǾ0鲙Y1͕ܼb(aLZ7N(YW*/`}N라8yO簮[YnV*`̔Sc|};ٔ[6\5YdOvjvq{U:hWS;a>!T3+ J#fXs9ڰ5?y$e3'mֱuPjng0b{ 4՛)JYioe~=n=} wo _V/}SWG%& #uk|a?JXLkX$<=Q^['1.Jź1kn}+ H?%gc>%s\# `5Yˮ&" e[ɷP3i޸_4U!gYȇP4ƁطF<;Xj; -|d"ޔ?,/XONtL%t 0̚'Bf,)qEO`_u̻&<9quҡ9!mOE'[.N˯X'ʔ#==UI[eϑy>7W{k!2\=Nt&"[1-[^A:B)$A9 ,9hh~%G_/W_;9&ϓO Ⰺ+\3@f|Aog`F>/&L38Y74Y޺@F{ [四%k93jDd6Yr/7`#L^ϕlwS`&_Eg[X-1?tXɊԉyՑ]& M1gӼ`\BoS7Oa&]H}k-5NƾB'nϲ9k/9ɾ_y?x"ZmnKWW3:YsnW%Xs69^[`^9pMZs#3#XouzL>9g%-jgI@'Nq|jxi?jsb+Ec h #F\ypxzEnzյ&녛y 2uctw{2NV[wAu&] y^50ֵ1Y?9vTA|$]v]7>)a.0GW|/UritüݍnU=6,9'+(iIا,'<!MG[ܷ/J:P&'Ի+"<brvXi.l:~9e Ϧd`>:@Ej՚Fzo9x:Q)D&rR^$;hmɟ Oׂq@b4#K%>y{B5"u*y%[ [lzCw,h`ޢ>]dDH|ckMg~Uc K_uCLu`_Mo3w7LF5 {1e1h{O e9TVm3_Kxō8UAyКǺ8\&oh/#w NJ~'&_ ~0D~E7K\b&M޻}44O#f~#kUv:Bbq 'ˈE4N^j_soZmЏf Oca}`1#|!eZAwtW@gBP놢S/ Azw]n"/w^'~O9)oW糮~3G(i#SveM1ްN&ϋZQHޙ7c=JF^yը[PVMFh Y1IG_ܫgnF+7"r Ik;^ʠu&m;n`wI%?'Řiƕڽڹ_~ bLM‡ ޯXJ` _DB *F󔨕1IZ;RI(:*4Ѻ: {W/q+AAP뭼 #'ߙA"Q\ƒl`SG]ĴջH4x.}x.;i)/̼|}C;sDOxh;tҋs!tړ .TEQIna^}؃Ja0yD0vBǀ>`gZ е+V}M + W'[gʃPk ؏FsWEȏPWIrU 8=Z3 W#1}˕|=^sg}9[GAZl;@M7L WxKJCڻA"{MƫzgxG/(<$@;f@gA9a};ϋD{7@T,ݽh+aBA@Ԡsϑ#c#ƛk^>~q+,:Ǐh 걮8/k1;̃}Ǟt\Ys Hx*%F/E&&xO/s M_ݫr2@"}*1w_Tc^d!2t e u{%:rڶ}ͻ\o]6 +ҭ6mk&k'?L?kYVX7R?' )=ϝ=b};x602k΀hгxod%" uO|P_W \yj"E13f26}뉁GE%L̓ͶMʂ6j2M] 9<)֓6hԂ85`\%=|OzV>qҿ1n}<'оe-S3eC{:Ɖ 琾`e!XW$T4J!;Ԇ.MM,0ΰ^8H8Bc,!&&{LW=,C0^ }Yqj_ZFo b50YAzJm${3m= $)[p\z}>7ˀ`>TC>0 "VV,ЙG.'mw6 '29vVx::2' N~(N㻅a\9~ޥu+~-OTs>|Up(bsg]}.?~ݑz":ˢ2ǸG*{.^CW>G h)^ьp32[]}RwĻз4~4! + }] U`|y}Lӽñ~A~yX-Fߑ~=1k7b\ͮ1~X_zސbP Aҭ=S-q^An]:e ){~}%CU_E oJ;Wgæde?*e^i0>WۻmQẸI:u~+6 ٣!e[j̿% {ccy`])qz>}k~SbDA^nm3O-Ig_NjZQtClCE'Viő8^\=!ֿg7/OscMHYiN0|V'^p}ܬe뻀d㵣S0^F^% OzNoΆϣ?JbxltPצ{v.Ǒw.^ F\ xl>%^gy?cebvz>Mh>uޣI4xdF5Qx'qo3J Ϧ<ċ[%U ۺqQ?TrŢI@xz\1.Sԑ@u4o]ޫ }UIȢ#hWOYS__NNoS:F_0Cd<,HAy~H3I>>ͿErO!҃Hu}Ko>ڪ/=`>¼p`}VΦXg=C3`WWAsF ,\ 9 ?_6H"b. :@S̠w9~Ϭ3x<';7FJJGKzwy̏{wwr?gdb_Rcേg4Cӻ;W F/)7yb_WLJW-de#W>x +%%1rGٽ#1P 0qIxo*̀|~Qr$ӣq3V nm S9 KI؏Tin&H)Пѫ:L- .`~ٜH]fH1tM3N2ba@ۂ`;*̏ϋx)Ͽcѱd]/"Zi_/rnXAzh? _#?p@dckY2ƭKcW]Xcos#b}=t< y>~F?5o{I~bKw^s ]i,f~# %w;,۷~|s|ȜS79h˒ 1[(6bGhߨK.#'޳B7>pVP'a=dj׏Knx޲+#u;#D}qyxFsv_}kl/@Y>\lPЬ1 Ood9-A\nO1o%S91磤ϥ_7i'h]m&*E'3/Z O CeQ:Jy"XRM<1QY `H {=O_ FYKY?̉y/1fŸu ཨZyL/_ԔϳaZ3IZHxF"=/Ha0~I#yF ŀ&=;ԧ>VC,D5sNx QSNuv}Q'zػ$_n<}z9rhxni'R-;tUud1-9&_5'XQDG'Ga'7P -yOs$Tއ7sOi=U ?e|uѪe&%=6H~c_kz_L_6{G_10.Vo.<+^ԃX:ͫR4ۉ3nKI{ZR ֝G {mid\m%TZ:}emqv-@|t:zcfρ 71KB֜7NѸȽ1(ߜ`$`>}Ug`r'Y kMگm[uxB]Ҿ %7AegBN~2*z M7+ߊr|? T0 ]8~w.Kx^O<R~ ? 3_63 \/(a˞BD i~S Q{ LI{j?݃'lJ /K~Pa^/$TG T 14x%X%Ⱦ>a 4XξjR. ސ>k 2U1/rqVG&QAX7:\ѷ@}T ׾0u"i`!_`YAWcL7W9'!\ɀe>r.B' '6 `{`9sǩ0 !TA_cn6,"B|W%<&qOj@-~'^:7oE.b2 c)%sϿ2d|ҍ+O~aW+mdH|}YϼMCծNW"fh!IK!tf-zn t[0%[. KEz?Pk1C</ \2pB慆/E.(!?8c~}^=uCg ƕ37?lRLƼP.z۴? KGkH EX<6p!tFXG8^5|QpQRۀ+sBs_Ez<wGn!o G>^'koCXru1n}!o 1 kt6tBJ1g9#(鍵@lI3iRtfGRʄ+Pvi! V#뮇3?iG?ß~# )VN{pe f8ń=ckO:PuRn  xOL{P⽇2$:~+k Q8U}ogyy! 4|c !IHp|[ |⺊_a rx*"(ͤdg#h3)i/88B!Ǜ!qC,/u86#\p]%=P£378-zTF|t^C_8,)0 .*Z@औ<__7 |R,?X6ĺ# :hX.>k!UFd |9Ո A."8tC$ oA5^n}?Jfo ~gvpx7 0NAnsyR0R 0F^tA<:fNP֑o2?A}g7uPB0 3O~ꦠ)U K=>֏#&QϺ)a_C:AXg _Oz-.ޕLGO>jS2^ ~k~݈x qg-,ϥ7ww<8Xz8ē P|!IQ$Ǟg_'HBeP6DNC~y$c VWB&^e4k)>r)>M% 7BP[ύxᬓx[ eb~!\-J!AHўa<sɀ? r>ʍk!>:\#k$.BozH:tCb^L䄘O\}#!:[bUcǟayaHɭ``T(@ gUU?O5As0gκSЁE7i~%['LDzH>72N?+ Ta3;76I0V4WZšq܏ϐTHySC3ϔa6֥W$[gDEXO= _KT! A5Ω8imu?=X*,Y=>'bn"Rkx:׌(4ϴ/;g֖tB%av(Wo|aSٯ:IFƓ eth4U.ǵ-gHW/81B O~ao&o gߵrOY';~ȁw7L6ٿ/}`HP_zno`}?Q 7`=`]ֿ[hD2D<W7 th|x ( r=t@@"B|utnC!7?ȝO[gg Tu7RX3#̇Z9؟,l\wN uy! 2 ~eA8?Ɇ{ X2tC\m!gDy!I e-y[~}3c2=sFh3¹/O0$\Pz>',KJ4+k  >uV$[J<#ҹ Sige N^E~%w2"WW9Ȇ:(5O;c~@RBXYһ [$~ϋo}|qP g7bOWB oJΏ4a䠰w I0֭G`zAN%g?AB[! |!}!)I\L#޴oSE1A +>P41Lﳟ0;Nb:AֱHЁ'Jp9>Y?A'tW2ib?>zA BGt/x{}D#uhSX4o%ȸJ^Ҽ=GJJMƾQoc^o&'B߱llcAp3ySAq#L^tW< "} % R 0E'.Σs:cӂ\fП޻wcͅ(ImoHH aLsŰdF?w:&0lLz !֡ ]T pP^c~;|/<MʨyS$êJEcbp.11R_A|`DG$ ׇIXsa/B}kXo]8vE /ci#,xN)>}_Ւks`?J(;c膬S|rM%o,On#ia1qb$Yf=afff vWr]U k|g9"#~ȿ{jr!p1Zicyt ֵ2 ޺ $[QpO\b?5{Wnfnq&s1t]:vaDF\Ҵpip. ܢ/mZݴ==xD[&q>Da/\O!t8c9?[wYЃ঴f$6.Wqx%tT EoZUܢGeJ>5CO{] 5sn[\U؂7@ /crw뭹&.OY#gq=)ُ nV& \lg8t1s541ӎ𼃻ϷY/K|uF\e.ӗ[E?kwy]T[pEؓg <`B>'%.ݕ޹<[$$|k9ԫyjB,~ \My~Em#Ү*/>?g5g_y_Kړ "^&z=wn}}B DU^f'GϻޣҋОfWհݍNnQe`RCg'C7%RBԵݞ,qs91XKpnH]XgBGiЖqW ,}Z^fx \9z`޼?MZq~nb6 ss\{p["jp\daN{0y֐HãhhxAunqn-/5x兮u54Cgwucn 4#p1\w"}n&v\;w(w..L BD?ѿrXE'"DOD}\bWa\pT?.ZE|ns!JgLp19\D=B-x]5t6]q#4S8j2^zx['9oSZtq~tH7| e<y.#} ^:niwӁ<x7._߇M8Z3 vMIG8߬#pU<.ZpA|=Y_ =W3 ݃_?& uV9MeL^lV`ƍgm@?[mDk.ח0U"y_PE_[p>Yʏ~[ԓ2RӯO5.o&pOOtѿSVn)nNTWynZ0ެ{`J܇M& tC#w ~fVO̵+p>\iANy? /΍ܗ# mNH ^\wm3I,>Fů 4Ffa_w,Wcƀ8h=-\MyQ]8G }Gή[OunQ.-fEЧ.EOoat q͆u]9oϤ'N.tDu`͋*nv.>M {՗K`a[=mGwy^ sisQ9I_h{\?Dݔ!O$1tD.Gu׿/h@_ ۫~7Ws:#wv$.4sy~^W3pӮŦ' Qٻu/?e{eiGd+59*cP 1t WtVx q6~?=. \7Q1]"uPL>?:\н r0+\Y [xD_Kaйrp]IoF?}C.QV%x{Ͽ; v}+_'8u y1C-%D@[ԕfiD >][t{u._PUho}.JPq޸_-Ksw0H݂|3_9jOmuLB{o\BMr~_\5ш#]nDWrn#OQIW?Ft!/]0@MeOd^jF^.ѧ|u,A nq1t iry#vm~q>C @eS@"#Jܸzʽzv+%rH\!iX?%m=>M7O7/(Mɭ9^a$|tK(ωzGI -5ݢO}ĹG-9kl8pͧ]鹉T:&SD,E};;9y㥡^:."v#xܝӉ 72%xř8ixyp5Kx]M؂x'>Bs\iSz뙓7>U/nC^-r\t)/W!Hu>k7 N,UdRM,YfN<;y TMf Ef|o| HQTҁĹ,_Y__ a a a a a a a a a a aaaaaaaaaaa a a a a a a a a a #.0F\`q1cƈ #.050505050505050505050-0-0-0-0-0-0-0-0-0-0F|`1cƈ#>0F|` 1c$H#!0FB` 11R.UJ~YZ~i_e*/rrrrrrrrrrr4$G3Lr4$G3Lr4$G3r4,G3r4,G3r4"G,r4"G,r4"Ghqr89Z-N'GѬr4*GѬr4*GѬr4&Glr4&Glr4&Ghrx9Z-^/Ghr9Z-A GK%h r9Ē8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,X'$NbIĒ8%qK$I,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,J,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,I,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒx%K%K,X/$^bIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$H,IX $AbIĒ% K$$$UR0_NRe?Ko`ώz-Q`A?ƍgX)/MW~JS77";dɲ?_Ɋ;ȊLTȊӧVO㾺륍xbܦuVoGc>aS;4pf=u7Q\oۋ#>7Sz 韛o-bۉh&ߵ4x0Q\>>yi%>C܏a?y1汿&}}`̧zuNXZqv^3}c~,>Ÿx}k8~z F׆ƺ&\;c݉kdܯNb {x/}^O[N8&{ (~xqi?q}ٰ=pO1L_Osc<9ZcƼwL̋(?Oܟ U|}W}%槝5EogÿkÎs:*>^mqc~Lo XH$~C@1un^sx js3އ0q7=hdམN=v7J#.0 l!/ﻓx# ;*n#ma̳~+> |2Ouef-)?@1a ">gPc=6w gb>Pu:}$櫫x~?x~ #~g컖^ylnc3ogW\{jdು{0>'濯8y ?  2Јvÿ-s/5Mgu\췁L46Ƹ}﷍2#k}q}W&3V>{A2F>rq=qF>q7יdԈӌH~F\f %ޛ݌89FZ'#e=?fsFb?y3#4&43Pgc\8Oaagy#_a/ȗyEϰ͍|񉑷iiĹsCO]{s#d؅؇} |i[0aw'2Wq_͍|q!5k+gm5~ްzƹO Nea KIy404⾍s1#B^9Ꮛy#_de2b>p mW1d?~j*#$y18e=o-9`b/FF]nJo3UR5ʬFra$v:Q&i1OV޷ 3HNf,L6 oH+ֆuIEFNMrfݞjB_j1 y=xΌ'I;v\&ә|TIf&G\T=dHX_sOS˶I{ZIQrzC|mykbd$gJ]n&k1rdP.^K^ddk$[FFpq 9C!yD۳ hsg IݩZ 5ݥM:c$U!y b$gO2Ab"du" XhI}+;RD_g"$VQJ M0Յ \ $ęHN #!&32KLRzz#rSͳ##'!FM \E%.$aATĮ>1RUj6S LӸ )rT!d$[&~D3Ϡ$.lz֖٘HlIpY$/%!$ H(9#AI2sKĹ 'S䋩|-֘jE LĆdUx! ITu&jFe$$fKS$Nwvg$I&j@>V$zzJwUIאQ .AS-?$oŒT͒Df$W&{z^i,+\HÉ' y7qBd$O_J11>]IL͗bd${.̣80 E$ܔDIhCDQZuF\dं$y]oITEU^ Q1 ,_ex$})&RDrDWhIt/3%Q6T]+׭:d!9w%y-Q)4E"($>Ĉ bCb$aPbj}bxDDHn*y<*O9cIp&"F32$LcZ GUQh7#&1 HIL2.$mX<`$&?nN2fj$)# *UduYH7yh~_\LZ!R}H3 $o:&vCBS$&"<][SZpkՌu<$G,'%ynQʹ $'e}LRLbd$G;$g$Yw0RuJb|-IV&yRm$/'E&遱#0Us$y3|yG> +S i3 !':GS6v6#!Ϻ=, db$6В: 礬$#)&2QzjT 1C+&ٻ{j_NHxґdq] RZQaCLؖe$tY؛\6׉[H-^&sc.ZIe*I$՜LI\čDv,6UALBpИ3ZEP>vYAL= 4Ykgj0w"qA&qߐ>'x ɹɑJ2YIp4"0uiO5 JR d$Ӌv㞮:B2=D'N}%Ž}INF2Lr B=ԘH(BFfj$d #>i#_\$|Fv"yQRXMc*2#g S!)F ٧Hj!$5%qZW$r3S-#!u0 ~Sm$x?#^u>T u'(.jDR8c_$eGH%"#' 0%ao _ıHTsznE 7)Uϒk\$5j"iweЅGWDZ=D,p{' ha|ȿ=J$t%IոCz\JbJ3EHթ^0HLL~DHO&Lj$4y>0ntN$T= }~[$ziɝ9 lI<̲o쑴K4r"ui#f*$7Ɏs$[&BB3yjP%/纳1-$FLD&D HTF!AH'6IF¿Pwx83#x&oŰf$w, s 9nEb3uNQ}e! h/ NNpICMU0~Dđw!1( aQ!C"og9h%,W{A%I)TBҵb$עeTL cS/;04r_$+Mb0 NI`( nD!If&H?M=HPI&Brd8&;N&>+dohۉ.I&M:3$jU[d_j %If5Z'~zT݄A۩! Dp:P&Ƣ=jNˋ*o#<#Ao''RI$'])EeyIdm U")11S"#*vsҕ27jgHd&DFH_GgTf QmD9H.w?u< ^ֲ$6~DLaR9ixC"(z>b$doaeiM$}#YQÉi1IrG5캓o#Erl|$}fj$U7ن83\D'[zd\؅U2w9#$q~L`"S$,_Z bw${Q&Fd$%eb$$t{kOCGeӷ'9$ft+m#2(ǫ!31 ]%SY\ItW9F"Or8THp,'"2zKUHBǞ87DJ")L_$.ݰb e%!רJ$[S#ч:-y "$vCI*/Ef*>1U"qfo{1?34󏟼BTb"/$d4tД?xA1QcvݎI"^bA5qfV'9o4$][-p;ϟe<&1q1R5GebX$?#qߡ$@~Qŵ>Ts0;ASs~w5>O01>yLĉ82G(DtHz.:Nrf"$$S-#1m$m[&E2q+X %qz$z)|~b2܏N$'[0S=v ʐ`)J$?"As* b^1-H/%i=]u-+riЩiŠyʹ\|䝘!IǴ,$ YDrxXXB¶r8zϚlÙ 4ssCuAX9*ZpZ5VŅΝBK _x(FD$[i!!7Q4_̠wI;fEkI jm4nz$d4s':ҌZo7~e!"`ւT>DBI䞯3yܒ$*&ٿ#l'OfS@#*)[G^x)5 [|ک%IJI(\$fMV#9%uᳲzVD*CHw\ N @>wUY"Wn͞[I(KeGIl @Ә Hyn$UD6LئG>]Me$ՔjJx%P0ޣa;EP:*MXxY۝BxBsqHYpSIiL,\b*g? .Fvӄ=/Aֲ@BslхNb7ZAN66HH"9lv&W_n$D%K%YA= ˴V&uh{AcV) caARgԌm@Co?.&?X^ ͨ eTrY}8WCtY ۯxԇ,*`GG8s]W T*ɝvh?z6޵Zu4uu:@/AYJR-۩l-g*GZ{.iF?+D׶uxl xqk? Abw^%| !uwTqOWNqùAk}{Ӫ>.<}MK%x?a qʈs,*ۆW{De//[~u7AKӑ@'uRMݖ5V4K7u1T $N >ԆA});Á@{8|K/f}!Jթ=VO낿//Ƕ;'A+yBMqʔ>:[e]FUSA= /o,?Vۦl|hCXxax?Qk@jhy4z6\W_!@c.}Ǣv<Rݢ@ }/ +F""[Uj|KSx'}-Faem?J3Ͷ>cWb,^q<:5x/%xkP̈́UXQh=_W>6*8YmfkjD~O^!lħ6{=OVfq}İ{]k૩&㸨e AO Pe hρv D?_G;ӟZ L o5bƸ0O.7^~늯/խ[!Y~ zf)v.#1d@ >TTE{Wrg@>l=%8@%A'wEUsd hH(}J-҂Z/6:84:ܗOmZ6A MCh ug L۬ : +~x~Bx;s5Gy3Ɨt+FO4B&W=k YqUU7/M} z_TKYqEnA{N]8v6ל2h_xa)nT!T04Ÿ==^;>1yj]_ ^ G{ Au (c(T)ׂ~7z*V Z}6ټ.+ %OV' .;x:2M ډ ԑy u@ݑO@>H)?}|'GPڸMp2ٵ 9<E bƠduo ^z]nƑ 4|A|F{2Sz+2pj$?h4kP)s~w5Aka?gҰqMğj؋OK> VO:MwBXY/_c5ǫAGcu"w=f!}=~XZhvՃN2eu,p( F Ots bM@c>uAceqY+NOC^~+oO=63J Au4-#E㼰> xuֲ.Mi:e9xy;xhو2ƌ¸Ղ.&e!NzAk:Agۋj)ЃSV ey6.KO C4 4j^D;j}[Y+ 65u5+9ra5;o Z2>^O brPGM^rIXbᏏY طރ,OKhOfV4~GL-ލ~Q#7 m \/ ww T#oF%+=u=|Ft+} zշJ ᾧe}pp?s䕠q]1@o³ZZW {u&76-x?O,wIL&]Oz@[6ڏc|Оs"_Ӆ!.q8Mh:޺oڝՁ#^e-%>\P:,PSDP&u`oX]i~u=@;Mw׭ |4~ ln=n pP5iY?1 S=vx@q> ^[ח<btV3OcD|xA[pzLB -Ly~SK&9=`/k?@zNRs hCɹ?aD~\5 VQ8s'荒z,Nsў63KcZBo'ܥoɼ>ΨQAing|-igF8Kٟ4Я8{t윐;uZcXɯÿr:Eח~CbGs=ގ̹QAi>14p6$("tX9;WlSPvuI&ϧ3_䱽Jb_=zplΒ{0PisFmp>Zl}oP6]?cp:p6xz`$M7pdތϟX^Ay[~$gP4q Mq*ic|6eun-8D}ԐKq\iDpT38-v2M1m O/T}֦`*ipG4: JZG }hn6k"(k,z>>)+z"/T|]b9'f@i%vu:Gb]:χw8"/H/^Oꖭ@ۋ/jӔF kה/ROT;/(A=0׫{EO5|i604DpS3EϞ`Y~H\ (gH3g C *|PEP$w.(M], qn^mPYfr>p̺a/8LoxP1-^ 2M;19x Gk2r" p[<.92p 9ǫ.~[9oӥ|F\G;_'՝ئ)Y6fq]W1pԪpuɞޠ\}sp 3S^38{.• ?q (y`59^aL˃ ^Jp|{Fp ,,prHP3~pl]38h=w"'\?8ף?GvnW4n2)Y$;=e=%–#]0TC__Q~Nmqybqx"(]zU˚-G-پv ~ 7`JY>|]\ȆiqK~e` p-,?gGW\U˷yq7xZG1v؍&HNӃbov$p<]я={b5x]Pm3B5c D\+rc-s曦 Npswè;eţvOu5֧W[sHe%_CȤ] G/wѧÈ*5yᘷ4 ڥ3COlmfRQ^Ў{/fYV[S弄놟* zzߺr$C{(O*өހں* kjJu ~~N-߀ЙiOhwpנdST Ju0 h|]ĉО/C<(7=.7-3q5Ն`ptއH %~6v{ ֓.3 ʇ],u<ר(8ਟ{0Z㲏xJv Olo~5?Vƕ[Sok@M?Y>nלܻ n"q\I,պ[n|M޵Eо[vApdĨͻ2=̇3P:s~G1rԫhJ[oPGf{8ڷp}Y9v7,WO:Ը_U=O-xa_@)?hߙh|x6X8za5p6TOsZ5qtj+S/ыF |.mG{V02;%χKǸ?˳:|WLdz=yasoݩxJU7U˝6'̻/8'J3۲}WGs\V iJ//k*8LXfuyol[\O_}Qwij G7(5\Y%xpI-hogdjp|i$M]x XjKwcО|)u&0SrptK 1:h>f>ΣjFpL庻 7U =J@+8smj6g" c; ?M=(o# Wdo|w ގ~#:ǀn/-_Mq HXZr|~i8W(D<,7|J`o+s:{϶f֯]N7b4mBj_w c/_Y{؏js?51{UP gz{gW\,͏Avۏ`kζ Q@Xy3.܊U8' C<{qa.|Gr`tHea+w8xoN}zt:CMf]۽7snzK_rP9^|,_|B2Q?:~\I c}Ȗ_/Y`?R:ϣ+\y/gH͕JB|92O|z F;^b[ _6[GKٳefkKT܎f v%9llD8qA4/؟djp%S{vό~PcNy}ɽ 7@S~&?gp2wgE=Tnw Vu|e֧ۊ瓉q}'_UNu" g˂H3(^SË끺 Oo= _ГǸkdŢ/Xy~ FmU ʞۆ8HA؍Գ%E[W#~^6{a`/6Q$[of|M*K;pXƯ},sū157h\/ fw ?W90㱛m) E?ق1S$tX|.qnrvn޿NsҪ.[&xmPAL\7~H M;7yǽOum*>pcPţeYvɓlp}e6g w0.ٷb6ɏA=$U6g-LoKZpsZ%zJXݝc};NuvJ?:W::+8RQ#pGu=kaD-ѯU]plc<#dg{'|ߏS1Wd o7$eEޣ?S|k<9)7@Zlte[m #W {~^I)`w3M9UOUѬ:(^F|i{.h7|WҿIMˢ yڭ?}R5ʃ}3Oz\Bf`}JPqo)Yy#ǯTWO[!ҿ{@yv𥊈 C_8}!'(H{W7ߦ@)kȈ3gЮ,77>նtav?#^ÌzGO|C^jͯ7_?ci3#C?_}W1>^PlgB dDDO\lS@zfD|?~?_s*/qD[]}77g /?l!Jxk]nWćqQS^}Sl߻dVp?˛;3:k8X]:-~D?̓S/H˄G=ڣ o/uo"؅eC5lhZ37`|֧{}m}ĩ Vvl* Y'_l}q8uq+ߍ3}ͱHs>Gj&}$VG0!]:߸(i`8*$hC\BdFj5 fӛ7d qӌ8.gOk; ~m_Ը^jw1 Q!u00nAO^ЮG%a v6_M7_ kRh&p JE-ƛJ.Car~C>m1ۧ3~n8no 2JqSy'Q[9۠#W_QgduJ+ǸcNrcqGvЌqڳse 7thq_Ipeh'R8^8hbćk-[}PRC /< Ȱa0]zfYrܱͽ"&uXN ʎŎU]G e&|{(uʝ^*˷_u_N/_2m]cmXm٣,*,i %kqh5w@;)~#>9qR[3/zJڗЏ kj?w( SZ1^Vg[EO71~ltLhGOqj LVm^G_V-m?iJ ?3bE&#p?Kŝi\ BFK48.P^xtvpPkqyq|"'V􉥺/KV~GTP{[p`?kAq{,Y:󫟈kLuv}ѩAZ|o~/)]_97T_q8WƕUmPD^сb(~W̸6"WN:fA?Q'c|`_n9 m8&Paਂj"_hwU/: 񝫅 ZQ4eS(h7<}l8"q.'KSe=+@nꈇR("M%[;Y|gePt{YfFژsxlHnfG(ͫ'5qbҮ!N]w __}O*?RLGo$͛*gϝG{eC1Nb<ln>D/H'Xi~}M1~{&J_czJH5θ,#س*>w]:Kk{p3d\L,Ʒx1)8Ϣ/[9qRП܌Bޥ 'W:Ɣ}ތh ^P '{/ydnj&=%MwtO+7Cz^X3;g8/Sv=Wǘ#}Mvlo AB-P3lEߎv@8O2@l@;3Q1eu]F=kg=c_q*cŲ(53GDݹq>73zH~:޿k~F|I7tچ^,xE֑5WZ#VH>OK=*\ihO7+2q@f_cFm!;%~͆oQ6Ɵ ~FcVU,a%C;/YV@ ]{<]Jae8~A',Wt5?Z/F$W7cgk_Η+o&} uЖu yo9ډN۲\B;-OtǗ,zA;IA2K-^JS5&iubG-ϗG˓~Ck]Gg2d<>OvVA4&a'}>؈ ҴzDII )s_aޯS:a+-}n4 @;ΈA1-wsԍHuD_y;A<Ӳ^iE J3&FsҶ2c4!~z|ϼ:d_g't&c/cXpp4QG+xpUңT H*k7ʎ|%^ktF :kO|eLɔ`h;xс_zmIy tw볆9P1BP OgE=N-#g{U2&PS"y_u4@ZzwZM^Uy#~p SG3~zcۖxߙgoJWEqse]Ի??s{*-<tנۛATuq>KG۬=K׌(]p'(@ި~Mj>O]DVtVZ0>-Mp7@C_U. =GF]@ZwG⩧r'NACmAkъwHZDW*^ M h3NcP7}3i|oz:L+mL;ҡD+&&770ZS:j?"*_- zSoA|-,17Uޟ=ĈTp^\AJ `Zn<|^_w1b\ hסjO/:z]J[JMǁ^$ ky߬oZ*Z-kdfz};A5~~9{}nM tg zIYNhWR"-~4WSQAee#O+#g]M$b:3` $hwV38P}X2fϫ&K؇/Igux3G^$ta;Vc+\?vD?_kmL3tVo/wgIevR+u- J j?Ju.~;MAb o?.[rhd|GDUN/u{gIn2;|_rf}_:mAD+1~2S2Bn~U_|ϐh&bWyX7yO蓣D+Lz6~@5E}o&/Ln9) Jr3 J{ԺL'|]}7IO6nBOn I F#tJ_//QkX0#4ޗzRr~yPٲjdޯ?!tJ gwfFO8*{2Iq-W8.m>%cm /kGmvH=%9~Qd#mU[OHuR; x߰FTҢf;'q^]So6he|F 1xĽ*9n.tq:}skd ~'yA'$1XC?F đ nhHe20>qDcm8GԅANhX pFĀ^?h{߫`zjJtw8aK7gO_ʂOk ӫg~IFi?_EN;gf @ΏմlȚ8ްsge[>>>&δ; U| zkJ4mi μwvuL#|_9&ՠۮN)uTrh}xwFJY&4@Z5@3}|?GVouQez/xt0ЍxwsEF{}o8c< 9|}T+$}ƛq'5 0渫6h/ 8Uc_pP#,1D?iڪ_6"Og͈[\*OԓHɒx8ߋhq^h7Nj1rOׇ /?GݬaQ/4}Š$AQo tL=]/k"=gzc_?}9?GoP4: oky٨ J7 ~6M I7beuAtD;0.On|{;DJBĩ `D?髁nGdcz~#+}hg֚0HxAceMs$MUhA,OD~Ay&@F N' ~$ui[wb)}5A1^WBq\MP,]q%ugZ Σ21׀>ѯ㼱@d0nɺZ xAeNz okf7{cD)WG L0·nO|@;@-ө$5x0}}y|A ~=M FЯOq/]*!i/2@O?XXyO,/ D\ch׭'W `tk:q]ez|F9e}b ؖ[gZ4UioOWΓ=D#@ͤn^1-X &p?Ff{ǕwZ|v:{0_ %@+J~pUJ 4J_0m o](t=@/Έ@t@ic.  >ez ǥ,K_>Q[Uq/V}LOt"3mgF?ʂ}VpbH gzoxaҴs{\o,!>&?SwA}8}ߝ/P1]hЋp>I)V}E%<\ޘxGzƕz?]ѯyarOm-&&NBd:yˀ.x2_q9=W͆_?A,Gbw}YxL:7{3B4:| h>n38u1Na! ^hcֵYz!$ϡst{8n-v/ z0OcG-}mǍP95[~^Lo/Xtf༬aC3,u>эQ9^ G+s)L Ή~T]+}/Z鞁v% .p{ 力E$k1"Vcg㗬^czG=n5@NO1}-8Onu:dKngqSZ^TzrVJ:>(,L|8錎A T 8Jau&v~<+`?!2U@^b/vEcL|M~%{6[1 ~.h#?8bx'Uïy|~GQ3;J-I%,VN4lsFi+]* 8z6} Wc -3 ->@)ܞ)]~P(\I^_,!P놓}%i~ň%z`?73W1 㺛YGxihWQ > aE>g-? NVU~帥+3#`@gAUw`U(po4Ldrp4~x~QZA5kpt0X V0 2Uˠlus((o[;ۥMX p_l9eVjYY8YS)h ]pۺ` sT]}HC]OE(q=F= R[D;؇|Q"16 ʄNZ{!;%vIfAmnŮq8Pȑqtˇ_Jz=2ĄR)݇幆[38Jup:U-/(78V}pd ZU~O7n6wsp4L^B7dckQ ۶;bmh#yne?ܸ0ӏO+XxR;q8|#JɃř@ޭs)Eõ] Z9a˙7|ūHANeq>1;s#<(}>>gO;"^<x3}n8|Nr?1qѳ'ਟ䏃A9bh4X5>+a[]'~,G7^(F qL ecGb~rG6\ *O#oKB[6x|>Jv+~q䴮{'E) ֖ zy ʅ%t'$V λ :{TUE‹! S`<!%A}J_<ѱ'a\?o 6eZ]c:zp9Ǒ'lnE}Sn*8hu{ynj>"f<(}(9gc L1~ֹ8bԻ(8mBp쵔l|[?2=Kp4 ]/Dk]@ u †? 9G{ w#>mAcƲwFlpk/}w>v:82(1)kxBax-8"x Rdv?e2UуR.R0SZkM+-}H`Gv޽K^E"?3=3͌/^o2(OL(p;kvT#-tl'lOY[bܿ`v<7^sT!nN>6g RIoo"?|wvGڭ qoWaI1}ph_oڔ`\G7yש9RwĄ떟{b9"g}K+q()w% |ȚpčR9FAEۘl{1)ڧDX"ګ?ƵܫXoITV?گo'B?o?GS6ScJލ%_,ߙ)hNG3{3ݫ@)ΗUm`d/Rw~ 8s[l(?5Wc(ǚW'Cuzr2]`>`o#'WzU +{q^ ʔ~9Rgâxdo\(kywPFl+1ݍ_!BImMNʚQ(K _PLC>Cѿ7Ѳt:k3_ (#r.\/>!}'QFENVd!޳\ò;0nLZ>a߷UOwqݝG#$hv*S py@Y-?6}+h;BQ`´>15˚Ut-sm4Q`/F(w7 ߇qD7.,d"v 8 lbۙ T^g[85E'6sT\).~%M`܍Am ڑ>|}+~9(g¡vIC+3sRoӄ~ONX [? Nq{pexμXUy%lk} :ҨťtGdo)._G;fKg9ĩwo]nQОDUQ5#nRji iac0>kT:Ĺ~9wT;L:'WG{Yme"w AOήBݷOq^$ڋMWou1o*HX=Y$A7=]?E#;O/sمa-td;xˎp?BY诐_&~ 58ZiBOg1λtAYqEԩ;LOtH1^,_&/}, '^wz e鏱 7g9{~eְtwgG:j0lRx v7wؓMc P682lJd Sj 㽝-rAȖ F;8s-q}gJSΛ%%LJܷ}$7s2tx咡=u.V츸SVp2z o4a{Ez6unW {1ި\&AskR9o}:kwM˥g A&tqvte45b\M۲C8YjozS#Qa\w&r]JߏCk]aI 썩#ʡC目BQfE}Ѯ,|M>o\WxNqlf{O+51^]ӓa|3#FDV ;8t%۲GYq?dE&=oCy.6*B+̏F/'wq_'-v5Qq_ԠLhCP~dsQ 1[ x.}SGӒ{y`8 *koؿ-dUI_XHn*Ϲd({@~p|j:sg ~;יPZGQhѮ8N7?iҶqWvN Q,qJǛ!qYNN&x~!ʤQZ/f~/|b9-98nm=995P22"1p4 ݶ7\;0aQPϺR 1fM~[q_oCOC=D#'RIPDx:㘨؋?t,f|\h8o4u_x 8Rh$ޘǒŮC;4P4@ağW#cNFc>q} *M@Yw|GuJKu=c,/>.qg|Vb)Μ`ŞbLcM9Oa?qe@;jy0\ &p)s˟8Hd_n]#/NƘJڍJ5q=Ce?}tFlA;ٳaFTC6f[6\IYpVT^e^ FX T?c5g5tsg]/*tWYvovFԍ*@kEٔʨ:ӧP+ƛtGuqP9U]ܰeZ1D L'K ,#ѿI 8>n|NΫ}W<<< bPy'מ:WQ2:f<Ƈ%^ 4]}3@rmzm JQoo_Xp.FڶǦC4ܡ+jTid"Q} rG͜_Y!ߟ3<ݱ#F33 _렖\-칿욷0m"#G,N`=! }fe)y+Iq?OVxW\K韹 &}lz?+}*s~,c8ufzI;>Lَ6mS^|ǸˏJlGxc [;ngʘ,DjyI& ܗ\gشj18s4)P~r_/f Dy3;>WԥWp+ݧ7GAǃ1KVj/w~Ǯ?SM=C;̎ۚףѾoy @F'HOއhy]Κ ݫF<(,P|Zn? ^~"Jѿ+t˧vu^BMCo5UcE}J*'MO0@@۫٣Gr]mNMuF}Mu^c޵tD=q?tV^ H F*tw5chfjSQJкr[_igԶh@TCrE}7yS  x"$k܊}.:~~"u/-NvE F\:foRMjtBy1Ril褐 > M[TSec>OCJ;uѼTݽK\'ڭcQ;Ї6gѿx}#/V} BUv]bD?w3?D~A_[G_dh}k#ܤtUyiI@W=Jz^}mF^ȄA_uμgy_<=@KR yN^=9{3G|)]]8[&4|םLC? Fcy<{i" sr~OkKs>/m871>wG*wGP젚YA 3=[ S=7u˳L0LFpoy3K\?]QObMXX'>uCf@ x \9zOj6u#ׅ޲*Kaz.>xp6)=^R@$Þ{ʁϳxsooQwNAyJey4UO.0d hv?p|o #fs3wz/ zk,U}x Ms A\|BKwwdnB:༝\ۋ%d/՘A/ѸAE>{=.͵4oy^s!HB98q>4Wq)Q h[p}xѝݷ6g%BhKewfbž/rgIBQe'kٲD(K3-/$[$kXX=8G>뢇[WUaN Kw t#Ȭk."bxLOE*FB,/iɦ;XW8Rsd[MkQD㫯 k'Oo?ю88Ok+j~ WKqWY4V[E߁a {4/#[M.U|{dJxwor=x.݄U c^A[VM !}gUQ$U@}+Φa|7g-p3~RU-Әۖ"W)^2[W [˻:=7XpM2'/[E7+;KE)Zm@++R|vaY= )k5;oqXep_C#|tnp-q1ŕUQsJUy(|@졍! wq׉PˈlW!th/spxyc85 bgSȞ78/vxp#iyksOZ3˂KY/轂:`@3+.r&u41p~;EAwyBodbZHA\i}x v+ρC3~srcQv[ǂc]p%VZb53LCjX5I> byN%L7 ws^*O'_ L e=8'xhcfSnh?>"Zp0O@_v?ſPTD W?vωl4'㍡Vyɺ+Bp$~~u^&dB .OO+y9QO:zv恓+ 5"l<88~@w;xN~p| [ ."6?Yх^Cla[;b!8vP8 IJ~%Ą%LC1l i@ш,%s~x6g]I3R g@: \}g/,1_@'ÚH\ 7z|ȋ -s/񑁃ʒ{F.pt'^|Lb5MbpT?aX_q|m[BlAv|ڡ%ޫ>3]EAQJBjy̦+;pz1EeV > 8qsl8c{88KGz-{:!NP]A89gDlx v%",dǗ#)1?3~&08HÈ8 b9~P NgXo-8=8;>E $@Ag.:'ڥ^ߕ?aJݯ!f,὜Hr^q K_<'v9=q4n$STw |I8_@\/!V"҃8/SGm{M:(8 7 $f^Zd0?+8T, 9@|!$'gB1ٸR127 q{p(8SWcC髡fCIb w㮤ljωV%:z:f}YQ}яv)CWՇ g)/12<| bӇhOd\7Hb;h"6Z@,c*'^Rn֛ f?ЃuE#SXg Y /BPJorps9w[8Hu|sOXjSC@:ל {+ї=~x(~X( fcR?[7m(8K7 8NƅE?nѳ$E႘,88nsE:~:A}y p^( 8JxK"p*pwptC"GޘDK.꿂; hw6౸z;qO bΟ1#'\ݩGx豦O9 ut/;Nλ]cM|087P#8}[}۽6qR㈟d &q?![@|!:]yjw' 8vR?!8I`qy-qǾ\ 5d !vSl8 g43zpM:8yF _y|8N E׵0o)8x^ bY S+xSV|D+&9b>wI" (A, k-1H b{0y,bw}2XYccP~\ -x+/!SZY_^f OQO dop}N,80WGԖ$8F Ć_5ҙ؟|4 { ؎R ㌿^[kp?p[Ɯ@aEA8x~.$|=mO\HJoL}]brQGK#3$*?'gk=?Sa\U䦡kyH2D>8WbV>GPٞ&|EB3`sokL~E]d,[52i %z/{ҷ@;M|D<;SEݫ8zkgC/ 9!hFHbʲ>TG]#yZ%9#6λLkE8Cusө.1Խp_`߆~.Uq%` 89klDB Qs10وs=k\ټ_b>.Ǟ|lǜxeч39= )Tφ4q{Фc=S>:qD=۹_yr ΓT\ǡ$c\F?U|d пX-îns,Ր.P,3y .^Ұ>hW % eb1+!3 S@ 1Q>8R\]"\D'IybG5Z '1ϴ;g〲w${~>'>Bzh@<Q;s)@,c [||p9  TD]@jGt;f'Bc/@<[v@aW@QPIJ B Ӊ*_:wPWQaR}*oD} NB9~2 cϫxW4'2PZoB5zuVWZg b6 H ]oi>F^'OrTD$)p]b7!hPyońC߸9 47#6?s<sQhhn̓$#%!)꓀Ev:J\5 |,\FXpͥGM&lp΢M>scوq2׋$41pP{C88앂U 1H*cPk1销3>_ԯO} ~ֿ/c$EK2BEQdGxYHG.vg50ĸ.Utk d9`|M؍S0Np 4'5|s~9īO^98_%#-Ƌ9[u$=V<4/1É\ OciI>|c68ꀋuAEKx. CwwGE#=Q6hAl=C;E8ѩs-BICt+1+589MgZbC6N nojQ'Y*w]a%% ?_ { 7ٲ*QΕ O {/vDr6stPA$l~ݶڗCy֍ y{LK%ND ž!H"y^2z,5k%_1Տ]SGuqGKY&%[$Š.:4ҏ )Mv%5tljxі#D {GkتH _%R)ltm5g/d 8WXXʖg"\rfJ)L&K2Ћϰ//F~wPx703R(l57xc,5wyN( [{K2xY_rs6T[xK<+=ȲP\WXMȆn>:v;@ɼ !_iMɸutS))'^*CyZīd"+Y_'̯H O<笋kKYy`aS8tjؒJ\~}Hf?W] !P߲wN܈R9x%an˩Efr@ SlŠO4Cj^n G'KC~pw_։9"“OAjŎXj<_`QL/=[,5n)c[Tq\dp=QYeU焥t T6_8XQ$x~a5wc]~V[$X(:a-;0o6DUE5GϹ!rٸ~x1QW1J"|qTt6",?rXJoV6ױ5lums%/y[fS٤)վs^-GG%_Y< EXؑ9j5'm͗{ eu><)CѾ/:lCR&{WwS(:?j/[VXǩ&<'iO=ϥJ4,MZt}vG+AKXuzNP?_ ¼M6a-CkSt)?{>*w.>Xj+aX8H O3,"nR2)-ÉekW#$ [:X9hl#Cur[T+5cxdC/zxAhy!e{˷a[~cExQ#j E$vaY#'nЎ2F)s<7%ʌMhG/!.\Nxlw`\x_ ߷a-_հ{e?:C~>zGBE_ca_Q~8K OQ/;hy^:La|SNԗΝ݃홬kcsGS+jGx~s |){?h7&yܲǰNtmlR+e?N0e@[s[2 Y#,nE}ԷxKt qֻ Cj_IXϙx>37\֯?0>\帆vxn Ra x,=KR`ڲ3(RE9L@ExP%BjϞpIsZs)/oFTLBcz@֟4o"Š/ aQBL}F҂Pa]Rd@?WeBWnmʁhoKA).W|6m5OlSz86gnw5*XH͇JW"FeCWMgeʢmJYṣeyHĺP2}4Wϡ_y/˩,śh9c[؅,ZEC e#G[ m!ys?oE?V\ +^[Yȴ*dEg(E鄄՜,jz~ \?!έA^a)X966Ɋf~yC^{#<EJW#q~_؊&F–[6GĉpgEߐwˊ/JϼuYX#g~-^@"<\)b>ϧZ5!9¾,b&z1. zp>ס3ϢUMc.wjD;=[dpgcGFx,WfMb<  +⏝a]g[?aGa9ƢtV{{r$iG(OK/,[iW`fͺ|v_9d-\G[jRXa*cto.zDXᡥkT0 yg^8H>3 k?ײɟ [Š.] 2?A?/拳]g}g"9nYyH1d%Mah-| *~[Ӝfhg?pW 0.F-ǂ}v:YM=Md _I7eK+797":RMNgErsEvo u,oF?P&"iO{Ivo~ a)޿_"yW$ЏeB2 71Ok87"v;v)7ۋU笼 D EՎ#Ճ?[;*+/Ih>,Sƭ4 ~: ;{lXƝ?|:k6ʿ|c\ o9O%zgRBZX]E_Wya){-:1!1?qxʮ:9f[%[瘧D<lG@vik?퓆=KV|V~:q]01ye{Eh'^ixω&1[?-"l='_](Fmq!uK!rp׋ Z-(#G<ƟTv^u\<ׅ@ŅK3KHC ?I$,Q|7;-Ry4-,7 ,ٮ>Cd2a~Ao 0nz{h9+T[<jHvj/ԟ*ByE4Qt!MYPڍy F$A$a(ܟȢu5g a)=H]q$ :V9I;#73VϢZy_>3v&Z"܊KivcMG_m  kZ]s'Kqh$T:6iTǸ曄U⇶JX񋅨 )ByƐ3ׄq9ϭRtT^ƷD1s Y;M)ϼQwE#dGʠ[+؆ECgJXŸ|:[)ߚH"-_P 9] B*}uKJ)r[nA3z)k|N҇ k\ V7s]R!?Dy'WTC>#`a]S"`G˞ƼT5$VkKsX+/V|2i]X ~sPp? &z m#1~Puɒ6j!1/^HE' HK/U~FzšFshGNb)HR|4D$'Kͮtj@;BzžM"ʡ#6-qW&i uH ^zÏsNX{Zj!ZTwkq'0OQ$SZ)D) V}d[=97`(#ii {tSAܧjREg+:L'Ǡ}//ڿ(+v}[AR>D'sϊ""GOXJw|ⳋ䍭E2=Zx*)z\iE㸿q~Hga a|LHYa>xh*I3?-:5Ο"I}g:waA4-Eh&%(9OѺ>q%;D_-1L(axO {19V~H~c= [- ]#fp%9>(n7Eug1opH|GT0k'M~A&xhkV!@يT糅E24'qP~ϳ  )ѿT5Jq_GtzFX񄲕S4= B@רlU/ +x;mr>'Vߠ?#R|Va ;^@.*=JDXԦ!U~'Heh/Y%B0{Vu.kEcWdDx GB$6@0;PaJ-b:ؒЕRE"(f *kPOrLGSixJ<Z ĭq`+=4rWq)zRF))|EPZ&;y[>nM?V)='FDqeO >Bkh^h[X7iP@XIXq4-D;nWF7k2W}XGyDz_֯JGU^`pQK"v <#?c?dnKViߧҕyR:G9?KY&f"DeR񅽒;y&8x ^p5 폸Ϛ&\+5'Gd0[v(~\Or՚롑/+~ KwJZV\C*N"GBڢ0.8 x KAFj+,U+F"DtOm}DhL>J:eEֈc9d3|ZzӅVymxYEeDKRsrЍqK!Na}BCX-Tl5ϑQt"^IaVȬ轷dk!]7!(=׉7Q$bpa+HAEpqt/tB"l;`VxaE*D=;\ ehy2gvՁx0NK͑Y Ȑ`\Nqypg)_]d739_R}!! kEِ7ECDinQ}VlE5f}j3"Я2"4X$;]o..;t}iU\_l?^nP|>e0&`uI6HaK”eCTs GTB7,u'zz\j^T:쁌]yy9\rMa&>aQ1yLT'L"k\? R0AU”eM m*fѠ%/Ԥda*}S馛o`gծGҐx>yyEoL{"t4[͹iubT{>)zTSw*o"vnl S -/7WU'ysm*efax^Z4ki_CFh>OuVqi@$ӊF!>IQ#07q|]#_ ss9 |jv0tO&>ػx^M|]YQUHu3.eg 9P%ۿnBa>׺>M _$Bj.437oY|z0+nꦺp rv;o^#5U=*0qfWXNMc:KF_x5aJKD k7L|ˤ>_K~Oh4]ĭa\UF L-E+R͘ ˅YSS^IMYUK! ?YnyGtj},c6U>.uMU6ܚIm/\ᦚ5Ux9'(Ou'M&*FD(D<t{p pH_]s߃N؋xLjg'J?se2Ajc\Sݽ/;pxمL? > SA?ON߂ycLշo* s4_ʹ|͇\2n#V=Q~j6- zDP!5@t"dVbFx hǤүD`}͑BNV^\zA5~{ Ah}tq'HcUEMk^q]b 4ƟLe]j & 3}EkO1Y'W0cY*ojvx0U^N 9"p_lxt҅W 3A97.aryI 11ou7 ODKr_p dsIK܏uՎҁ7~K1jBa!,`N:WrDh`k|G`SXt[vi)UAh/OlH9a "ea>qN@?UT}YKj[v|/7}l+Wh"pDP)C.x%~> 4r,8Ҁ' J@s%.o2agQC!qH^aRzg!="}A~[oSrss3_.ڿn?V#'Wozox |\qU7w]M.>ܭ,EE Ю#Ԝl.lm_>*^JS͹Dה}/\?-g9B|z@POH(K<~Fk&wWpE) jgće_s-k,8#>Tsu2, s-Tt +M+o19?v:h՜Y![vm⾰c`}>dP{Haq|/j?`ߎSDP_)ߥuGX!L fC+ W^YkShO%M.gTgDތ"//{\be.LiJzw-\5NjD V]#.`z܁=Sh窣el+fu˪O:(iZvMuL5lJk4a?_}%9?ֻ? WN'{KJKҕ1sN8u\>g %뚪Nf x?sXt.'o0/oy豣?|֣HkUR;cُ0:-M0^ցWi33YoDq'q# w/TD~?* փ6k4QxugSO?0z6WV?+]L3(fڬC׹VxL`<)foH"g{X&/_:%"P[ݹO*@gV}$x]4Iީ"MMSǘT"TuzRMCx~g(2|KVW+L`*~`m f=@M#+/Hc׬:Z#hppzgE¼y@~+\2y9Tj@p)`ʷ⾲gVSFL( 0 V`~5y1U3@iЧDY#QkaU4ULi$ׁ. ʨ4ډrOo~L;$:JAtٵfn|b~~L ~q kM :Zk/qS.׉Lŏz["TK9OˢԒZ6ܧa4|-={-*_o(|UпM9g1^fhnq/d鑢/V^ő+b@$n-r;g;~y\&p?EqYw^~ө!/0T-fMW1_wavz)ɣyCI3ux0 w0pg5Ў(>x;?h_6c8:,ĺ&d hp_σuLqGU77T| *~u:炃T|Uօxe"뼒V54_o*:2?pp<1vؗ_x|1?J"YÔP|F[hh㡣+97Htg=[k~|O2\G%ӭ|0ϫIx/ޖ?<م~O}T󮑼{5Ӭ,DxOTP q;l#=|c[4_[ʬn85}a~ssEs7*cS10Xw,5hKG:p-&a|u`Mx5IS"vT 狐TezNZ C"}3ŋ]8Vc|ɷuIo'5_?G:XV|RN)eY. ]YxⷾGwdYdڙ5k{p>IBj@|^1"QJhdNNhx׌ԗ"|BLI+dn8:Ѽ{0(^"S]k~!g^N60~{x [o՘Z^tDq0ebukB#&[`w?&=+3/1^nW?'T߃_lsx.Z1>a7˙l{Y&MBx'p^1ƽ8;?ݥZ>1#!sj71e3۞q™nﮍxTQ-?w0y\vVC A?qgvyho?+4 \=zܠ~"026hVycE Oh _ f1rPNDk+'ϬKuxdg]}?#UsFr? PqSE6ݧTTȮاiWQ<?A*@ˡE1fMn"P$N*/31?KPՁL`VQu7ߛj~0 S9IG*=XD: 0qUR=z@Dw,ƷoҼO({vWmzxKJ!=2Ƌʭ&0^IPbl~Ԡ(_ I"^4{ 3QAŏUxF _P&+!njļ;蟫0;l}H)\0>]v7Ud|yLdEQ '`ܧ$SO|w>@Yk@-CUl=qQːУE;%phMR/ES^ %I'[yNTsAW /җm]d# ޷KjJ3cNj4(i{W-}_4c6ډVgecQ|zU0#O2IS;?9.3 i ͬOTc'bܦ!̫$% O}4)l' h2j̔/J'xݤ_7j~\q՗b6w1.. pѮ8Ju"<^P:EoT<@X&c}A1gzE )Lz="Y| /jD>}Eu tzv?E,h׋Y1.׏d~R=٘c4 j^#l䛽_>._NyiQ{Ҥ[CFd_{$LŇ8wF4>URН?wn8ƣ~A݈Gsҍ;4Ozl)SךJ?”]z5Fy? N< px VtQїwokq߷y<mp&` ^sr7hDО<#xGF7ܻ9]O3_@ڛSy7:+7)UDŇ#֘ѮR?_"*ۥ4*:'TO:8?B_oW^mx2bk5qVz==2xg/PEIrʪ@]^Qu_.<YT'> oG+#y rlbxq)I(n9V`ʽ6O":K.9@*Lq GO0n*FǶG /j.l& $bh_YMp=.GC0۷'"Fr_mƺC `<6|ƅ};ygl' \}dAi}~Q _\jF/)QSD%Jױ7K$ӿ9J$%#_Dȍ|"?¯i0UWѯr_Fʋ~ʈ~~]#;Fvt5kdGȎ]#;Fvt95rkDȉ]#'FNt95rkFȍ]#7Fnt5rkFȋ]#/F^ty5kEȋ]#?F~t5kGȏ]#?F~t #]ÈaD0k5Ft #]]]]]]]]]]]]]]]]]]]]Y#QÆFle2W_KRHHHHHHHHHHW֫eղjzlZ^-[W֫rj9zZ^-GWѫrjz\Z^-WWիrjz\Z^-OWӫjyzOӫj>Oӫj~_׫j~_׫j~ƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$[cIƒl%K5dk,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ%9Kr4h,X$GcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$WcIƒ\%Kr5j,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ<%yK4i,X$OcIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,X$_cIƒ|%K5k,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %CcXbh,14K %KFD~ݨHT0lE/_߿0__D>xb5?Ǫrz(xccJ(36uu_D7Ed;%9܆qR>4$Ćs^  *n oȩT0~&0.)h|]-|刼|ΞHj`flw&8Sc˟|H 4 R"cTX@` Y41=mܾ?~[UT3Knh,ہ??kow_<>wl5X+[4%g@_!VH0n.c\H)̎XL- _B [H|{t𽐊ywIm|h8D ʼ$J`"@"'9f-\sjX z+#Z~ D -v/LS%%R0-'չSs[r84͂q10l>"?c(ҁq|Kh|v@O,Oc;eṑctȯ@@xI\[T;Q&ԬK` sf\Z}K| v_0so(ՠe>GFt6|H|?˟6cHt bɭQxYw<|L 6|yuxdz#5n "g_* 1Aܓ&#on1_AC< E@x|9H5Nj#+>8UB"aE77*/.wDV=*G4V C={q7YXĸe'4=JCz`|EXh |HN^H|;W y,^6  8t ϕ]XQ>iXk|?7,Sq(Ijn>i4 q=i_|iD2o O/ ~/u$^>Qު |HK7i |eI e|߀i"{\V"rI t$!iT</c(gb#cg1&7HTiMȃxHC`N|0&M&?Z/ ΐ49%DŽ49L}^?e,ύ{<71ID >eH|^ٺ=8Fo໢t ?RNnoxHKlW  {i?A:[+L50>4 9)gR(Zc/B?i%,0%^23+QÂ&〱)cL !ׂqDzn [ /'_?Q0^"RDLE4Wo9\E0**s/"?O$%x`\aßv_u~kl;$ l/3վYc&vw"?ߗ}kNoQ>ȵϯtot#'-_ rչNS/4ּ\4i-9H:폱_!`-Y71T!{/H<$d0N+gU;HdߛRR"A%(<_rt=Jy*|}O+iHh@ʸUMD""@Msm\*_ukI38}K"d`ҏ`\gT{xE4]s?`ڏI1"'_vXh𽑨 4yZ=="?]_elq|psH!_*f³rz  57bMc3_C'$ƀH 1hBW!ÊA~i=Ax 錟"_]wec)kHT}˒Yh1>_)tmiy-gط툯پD >DqǓ>&/qxJ_E -0'|(`"h t ~b8$+<&%| *XKx6 cܠi!39RqcD<%B~_<{O3k>7Gz)c [;ϭH}*^ˍq۬#]:# ?D &8@|!Vh 2́qYe"F_=yTTWMM8.gB98[?Ko틓,8b94'}Ugq0sc>El冁E"_K$z$v~rdurA8"%W11k8<čo߇|_}u. 1҉d6S}=C;4m|*6rLi5?V,5c?|u9o਋D4r|gQ p2 Ωpu=nَ.uj5<%w_7fp`LTܻ Wo8%Y~NRhǁVZ<|~Dne8yIcQ?F6(!^sEjD+s0 ׳t5G[i2v+?o?OaIs{3NTӴb<34C 5Cr;վ`/PqoW=.7jsاʷ1>&ǣ2ƓIJ؀o)E,Ÿ A|s*Z猋,m}ʟnO(~Cd8iCb|UQ](P/qI*كqq8_E? ӌ*|ɠi$GٞWɰU툁oO/|*.W!Ϋtʗq\j&ӎq ʱ}7ԑ !>K.I_qNٵkD*(<>(**r_?%pWL9un9N2s YOY[[U_Y?tV.odey]09q6##yߐ52!}ѸU5_ZKcGU~fibG1t 98P0U2"qy 6>axNL߳*u߉їo̻"W+`;D*|va}5+@"^XT 1c˸]c/#QE>"_Dv1#jL ies~ʋ\R?߈3~zs׷s*/|P~ jGnʿoakⷕMcCWqREr<UW;>8#+Pyȇ|oW/3K68/' =|.׃%'UTh|%=Py]ʯتsʎ(ȱ14~,`/sഊUb6eG)] |?1֑WVv\YnKD.8xW黇6Ok<_K銝V/tIT2H-~IjO^D=A  i|@9zw7"I<~= #@$Rk2ܹ/xF#lb|+qaBH4Yʷo:L ,M$Y3^{عHo? UdN'1dP&W[X.dTom?+JV~IVٛDP Lb5V~EWLΐ1pwLG\m$Bw4^t+TXFc{{-3BnV 29 K\] UKJo!k Yp!o<;LBD D?r;/ ܽH%{w^cqsb_wE|үo;@%ןR2W&dӒOԠ't<ny4Oy%=-db0.!I@>w5 򛈤>R&T:F|.")>xJPOT )$!x:-xgr\F"9Jy[KvL4/T@5zLl*THR3HYb6yŖw:B$V+d&{ċ 3a/z}}D9]`W^cNsgȘ~1!Rm2޳o 7&yx5ꛐq QPਤY8/ӯr۷/:A$bx5<,^ O/ @F=RG~xs>dma;DLbP-wz #Lb>O:IF-ӂ̿;>2e7wP,x>0xgORm2~­R];\@T5!cl* R=`,daֱꔘynwwrϊk%wvA:9Frw Hl ^^7Iv> ^0嶑z"޿.wٱ2c}&$(cAMJW: ?f,y7˷^,YD!Wo2O7Z$w8L{}t<+5_wlLVxO Տ"D};[%{wE;[Nīt΃ے4{8+\|KgHV|S9l+r*ʵ i}/ g񹤳/KaG"BPnerv'!wo/=-xwz\7Ǥg2N1 c#,hW?\ur$:&9RCEIQ~g-*| ?mB$ RY >HPN~ ͽoJ$C8p $> fpwE|8>t<(+)ih/IC8.z߽O=?{X)i}]6ShpLz:/>wޛ kljϱMod jD>H=T:G@֡R>SRfTlc+H4<_iO,)UâQ$x.=C3pj`O#S@{*}'=<{tA#7;HdQ,&$b W;"x|%17HFj[I"aP~ݫTTicl)gZA渾o!$V >ɡ>2iFYeHRwnM?%!x ,g8}5'_m2tQ]!b>$ 伭'FjoxTUfA~t/V]}7$z z7$: sFLȪyop5~\bbP׀7ڟ|d>#F$& T>3o*N·!sWrTo  8)cCb k?a3U@\Wx_Z5|ID7YTm$~v(1^Ok[T| _7wJ]o- kz+v?6R\G ?>Ǹ!} \ պJ ThG*񃄥fxIU<دK/Wщk@#%In{& 7<9r# vaE]b X6It*Do ȸ`?[~L-o~YA<+e::o# g9K2 }۔}p]`\̓5B:ﹿ%h+uOOwRO/eC x^B8sdܚ<5zmp_'~poJJ0hWyH˺rQ=O_t7aQNrJ(גUVx#d~N7]9s7|5nmy*{%~1#dI' 3?'f% H?_ #cqS(w^U'!pȔY$Z ۺ'Z(~nO%#3!ڍ2$~Dj%OOH!*/:w5Ccߙv̷PQ.X d!2~(wsCZ/~VJL}ܟ_Z oOA&B1vׯ:ynZO{ǡT.zz#;u` cˤ6'+gȘ9?q>Llo/ փ̙}$:8qYK*_X >]q#W;s^\q"3Y~=\Tfx2W'T}KXLsH< *&,ezgY\1J_H2{}z ^A*OyvR $wT#+:g[oΊϣC^[>ho7RT/ cYzUbА5?SaPq.V FȈ *$$pyXSH T*U{%DgIzI2<^މAֽ4 Ip=*Oy(TU$r ޣC/T{ru=17^ɣ?M\~? ps.$щ87@k{3bἶg&烽]ߑ (ۘOjuҝ] xr=Q2#+-s ddo;2$_BU˪ F5ަP'.p+𜘐 σo! ^;0d6 qBcj*xh d%8CtdeajT'ݘ8gĪ~k[sD]x0-HG\=P 3ϓl]<2UBv4k*ц㯌)$ nw}wqwP?{]j-cll9~T-뗠pwT6]1ΘDc-Esv6IwyN[i"H`\t2:)G=#c-o{~1AKowAcd>m1NhU܏\[::^_@~,t5_$2 =q:{ j~cB_ /sgp7nb7=;-,Q"l@.=:󐩋8WCF}$& eAُxN< tC2^Pܱ6YjTw}) ~/匮Vyۯ5s$wǮV[|2Φz?>.VP#}6Tyڄ7ig HqdZ A , G:1ouʻPu ^B~U30xBi($UBFiɳH _qq!- 7=gK3x~daRr_{=Ru| +2R˲NQ>xAjS%r4zQm͇30)eG suYl I[79:Yu.fipxݣ6%, fA;;M!˵&9ϣ̘ )1\}dyI*x??i8n)@޸_bȒz'edU{ac9dHEa>&Uxkߒ$)YDO?R>+_ߊ;{>%Txs~UOx~]RJȐ~k=w-!L&1o?7Fs9urڃ2PMMS|e6$9r4w7)@eXBF/Ra3Y>̲2GUkqU/Ks)+kU \+){汳!eٍ:IBZu '6YwcMgۙ*XmG?IDѯ=Z} Ǥ.ӦߕRg$'!qu('ߏ >7cqE,H~Dt!w] H@Yoȶ! |3+}8tB%i?rgjGq ׯtBy*-a'J9 5^T9I8V~ 0nΣH6oI 쀴xN3*Ve8/X맲}{*8_~.dōz]߀>!ʹs@ Y5jOH~N4[?_O#7C/RW^^\8zxIgd=tqPds02q$;`ek2WyK(_ѡ)|_[Cd/I>-Kj>i[Ndv;~ $fuYPs<)d&1?;ݟxy >A/Bz+srӴl<{8<'zr-m<rGu~zO:Ze7]ëqӒ!\ܝip.>}]Y3NH ƥ&/59_r}H46 w߻\/[oi [ޖ!NZ3KzD!Z*j/1ټOdV2SKs^5AWyN7]ݛ<˨RUzU$na>*w)ӹ$ERܮ}V\C|# ܐUذuxa^}eOPyCʋ=cmH@q_}K7I8MkD T=FPfҒA?^~me4OHV71OV> 5{N}H~#rƏIlF`sh9H=zL"?T~HM?_xԿy+N{<"çyP^)sB'|޲[Pypj裡zx])s6?} 5O 7߂\^IxSj[A/ԐJs:R ee:cUp?usNҀLWΦ픿?7ygdbjl^Ғֵdt:_Vi*Aʁ'gĀ i&w9cdUb-5oϐaxϽGg d_|^T~3qR{5a?#'AP&AWxн_+(/u {.xbsJyI?LS}S kg,7Vm3Ti5S%;T?~5wuB"޹yS}^7ׇǸ6Ǹ|GVA9 qs~'!8xgb\o ͺv8:}ӮwFTxS%(B#!Hzm lc7!u6y<*fƂ[͋Sm!,x!k˴@5[noF WO幾"c($>/Z Qf˚AJϔC\J5~Okxa2IptQo\vR[Ҩ5Hk@Oox8Ӈ[n&牽D;ܶN6Brk:oBjt[^*&YP6zF+ѹqPH tPVa+w!%(_oKa$sL#/V?7z7RF3K[BrSʱNď<yi=z$^aox3_ƜL~gT'%LtAFK/UJXy<)Sx=CE^0~e,|jVEçO ݋d /pt*G0O%G$cx_?)qybRy_N+AJS'IO3gWp*87}RZHUҺm9~F!ul}>0,d<^E+y^=u-W.\qW_ ~l-YF~GOߥ!/>WD7a|iT,LAje/.L[2]*[E,[ʿY}})ݾbBoxZpwog5se6WvBI NP_xjHj<ϐL Q{\~qK:F#I{gc bqo܅^RJg q#8/QimW>Ȅ̷ aqi<$v|K 6?Vȁgpvc&O@|Ӗ6?ŕx/k'x^.o+Z8Hbs'ɶ7ԓdm$HlkvCӈ۵HH@3>⺶ Cyur_Q\uLH8+ҵG FvM.*PZ9<W]ʮ%<~b8;?Y(;9yVWzgG!qhWowx6Lnz7rEׄqU9q5(SOo A*BB,02ޜ܄md T8<** @HV'|_{CxNT}.mJ7 675T Q%YBh8%~ Q:A`H8$ʇ [o\7ܲ0 gS"j<Ԗ nqE: 'ec=8y>#y\Kn?;Y:T|kߍs\K̳|a >/O>)HL`wvA(SeO&Uu}|U_X'Jbg-ǩӻmpJpZ4qC8qKHLdNsz0W7\:P2 a$~G׾$ĩ,g[t6c>io[] 񋸮RήwjzUݫP&ed?s> ;l Gra{j0,>_ˉ++@)yG3APi@-{ $LBtoλꛋ8Y>\MPRp2d;top]" dgr=)C__y~"$>ܷ~ ;^P~~RW~sS!~x1L3~&.O@d@j7A{1ǡ+c{M S9AՓ>% 0y^b:q> @κ_[g Eu}g鮪^fa@ 1DB\wƈ +(*Qqw#FK91Fh0ň(4\ϥ{v0U:yw~-KƜ']z_;oT72@] {C yZ%} b4kGJSWkmztB2MƏ=LZd׀C#6"v'\tEŏ_\1{S~=b/uRX=Ja%K1`/Q7tTO-GÙI}r,-*F~̃OCy6?-c/^8xaKGGw,Y(=V-v8pő5~5]'v;7[>^#,˸m&\]B-$ Uz}$]=G0d=gz0HY3𿳾sp_cթmJfKiG?ǧ@ZeCԽ Z6~^P?a>_wx!|=l=izI ?Dȹi7 "?֊~=/7AghZ=~nS ۺp*MlB_ӎ˞b>]"F?+K}RT=Dݿץ`C/*܍s 'zR\ 6sAj-[^yZgZq^Ž pNtQ%>֍}O0|J=zm'q]3rW f^V8Ҷlݮg bF7~q (R=_G9-+*EsY:miRhJ W@c=LrQ̔d\wZ?Ms}8%Vp@o97J 9&Z =uyضR\UOQ~ۉ5Aaw/iC5sȯWyzZR{Hh:~氾涊sP'ކ~p6[sR~ sul5>֛s='O}v}<=X%pQIR)Z2ˡcwޫ34Ov Zuh\apG\_WqO$dGZڹ%\~Ս:l 1jݑu P(Bg=.^B>i$v= $w=eV]=ڠWk= <ې[;9b΁_(jy>yf|l(׍~O֝˱Ac}2y'@NO8QlF(4XC㱯\U5|Sg W-s/N1dAڈL*}V_?sם7+4GFc؇*bts` OaJý|8)u-Sj{ u_'EG},͙zI-k6󛩣>:u2g7.nim b1WҾC9 Sgulmb<釷wPZL/Wɿt@]g`>ă.bOۥ{tLepfOV?Zӊw}ˉkO5@ϴ?ToﱣgW{osm~X=L=ksqL=CsR5{_;*,Ĺ~V>_[Ǜi_Wy}>ٟet -ٺ@|[l')9;k#溽Q#is/Cm N/v}[Uݧ[ q}4-fÚϻA'Ź/\E9}6uJ_-hYܯg$#hw/I?)٦_(z̅܎zsރu]t^7d~s֯Џl+}Ƭ#j=LuzzJr`pZgԁ0Q,Ÿϰ+/y* j!ڇY>jX?@ey`ZGC'bt}:ni+),r`<>3k޶WSGJbc0u[:qagm\w!|`>/>q;?y:B k:PW5/ [՟{ )b6,=˾I]4 yrNs2}kլ >oO'i*O?H9#J//ű39/'F}imf)n I8wuo+uNUҊv~gкV]HS!QG/3 BA~.҆܉}oCSI]{P]p>^ 0cg!OM˳Lfװzi[  YW?{;YOP/&9't3PwźO|=6]uJ+ a8's0c`:lf}|©68[WC봆oT0o?s m|%'_+КƏS0| Gm;t{azaeN> 鵬;D 2z^h -͹Ĺ oOzz17moЮ6 jb[&.$Hх|=|yK>B^sRμ_αku@K|k3Rq'8Α'kςk]B|NӘ/ph{Ğy›+N3uP_-a=`t-9Z{zG*7{K>Q:';VP/t]0 'E+PxR-Esb >9괶/v* 6C߮z=Y+'M_Hyb# n;[ˑS?~#r=Sg4x#6[Yۧ }q>/vP/+s_~ uJe"Er}mKjsݻ&cm~FjV:=91}7F#Ij0^zOOME笚{u;F?C|Wۏ'g'۸R7 ֿ;?p[*w`ިv&^ ;9ւWטFc\ʿ!/CݨSUػX6Й.\{@u:~ ݺ*]RCuG_mB_Z<Kn^ljg+u|zOtܻzcR]VB:6ک7owT6N9 TmQ $) W^ Iy.]@Y; >E[b&H+]?X'Pju2EiZ|ǜVfNnӉt.s3U6/>۩MOWZ P}vҙoJqmw:_Ş{~? >CL!YwvMKDZ6`Tù_m(tGA)ns+C-ckϡCPo>be9# }w@J8G_hRvV [.G< $oݍrh)ߡg/VL#*?m4O= 1BwՉ~Ѹc.Poq-۸{:gԻ~ȇ=W]>%wfnu|.q}_;AWm2c}Ur)]6-:ǼsaoTFb?||q?\;>~M۴cC\?53T;J -Ǹ\JƟ`u_'z,c0jɗ͕au:G&/{q7?Iˊǟ'tu| )CN3/|S;HOe~Wqt9]%Z(ԮL:VxL%n*x]Y {/Qp?)-\1y:w54|s[>#?Ÿ~({?LKPAg@qe {q8SU6{Ӱ:q ݺ=\`ʝKgG]..)2m:"r)0>/_ڴ]/zҍymG^K}uLèGfc{,^<3zJ}-DU:D>8յfn9W0W1>LTW'R1c~rУ*ж^]-O1JmG"szyu'zy[Sw9ͭwC?j9|E{h>FpK/d}ǣJ}x_aHW: U |>VHN虽:C3|uHe[Hm.O­N%Rm[9/]A¢#͠v#(ɡf՚0AquoR tٝ/T~FKxҽ{/:/1s\__=ە0u~_qj :aPyf8lOPwm֧:V:,ـuwH,é +ַk,tV!jcTz 꼢B){'a\n6L;H3WVfXK)VKz-B*)$w1^?yS/oG,FB%C5u򽿔oQy?>@ R]H;}lJcabᏧG13|EW/QǫI} zH裗?ǾAM?!H(a뱘_};Mk~ ݹ7GJ-[^G߹^ 7 BUgJ4|}"\n?q;1_yA8!`>}Rv]G˷FK\8eۼW[阌yږ]۽U==:o_xðe_I+pƬR> `CoktjFfÆEZA nN{gZ"]'|]1`pժux\)1V}1k'jCfck9,yE^,[0WUi1Ыwr!fwtĵtΒC*e"$OQ\ϯQI{k)r4gjfo{3]e76I|{#Q/ ֔/A+wC}_}?¯ G:߅^Խ{{X:7s_ok㾪׿~$8W2u{?6#8OV{G}M4V,{#@g0jå#G ϩ5G\xT&BڽMRx^Ow=+r6Po5@M/6V|z~CA_yo@>$ݰG w0x.tuwqZΕ. d_hsY f9G1>Vwt6! |O2IT>>y̭VqZ]f){7uډ _xX||}չ_G]A:7}w~5FRy v/߰Y8E*WnU? F1ۢ??|pKf_]w$tP=^;L+<^_=[L53qVClݮDݺv7}.|**8bbn,;!oއٳU~ } /C_mШF#E{/O3s5a6TSi[)q4|Q{KwЯ_JE5ؿ_ׯ]ލ9_ʍ [__D_ě_$__d_EW}WAU|5_WIU|5_5cA3FЌ4cA3FЌ4cA3F،6ca3F،6ca3F،6cDQ3FԌ5cDQ3FԌ5cDQ3F܌7cq3F܌7cq3F܌7c$I3FҌ4c$I3FҌ4c$I3Fڌ6ci3Fڌ6ci3Fڌ6cdY3F֌5cdY3F֌5cdY3Fތ7cy3Fތ7cy3Fތo1oXˀ/C2˄/SF -`F -`F -dBF -dBF -d"F-b"F-b"F-fbF-fbF-fFK-aFK-aFK-eRFK-eRFK-e2F-c2F-c2F-grF-grF-g4$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$ K$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$$KB$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$"K"$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$&Kb$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$!K$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$%KR$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$#K2$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'Kr$'K&KmDХ`L~ӎݾ~!T 2ׁӢIl6?O;iG=N4NamO9e#gLdӱ;'N?#VO|Swb-dɬE2lĔG;-n8ySŽ_`ѬcîaKFf>;{^wn<-,cg 6v(aʂQkwugɿ~Gk7\P] .xW1gbٰćE̘G/qhU1K}iNڹ3 fck,Iǒuu.],ҩ}6ߵ;&&޷o];;Z{W_u_9&E-_墽;|T~Cvg[te 3G d¿qu043%;nϓ(C fa_=ǐq mhs"e(+4ūnWgP-*l\Ɗ.>WW׋A9fGsl7?xyfZ?s#e^߫ejg-W ՚2xMvw+ūmђ2xvkI[Z\Ղli-ok%yo-[خ^m(?^mWjAoq^3!^{6񟎟DݼZȿ+-۫ڻhm[fdnZ݂{-ޫA[k֬^-sy?%OZ2&떶?ZbǶֆhImG-?^-?Zcf޷^i oZ[ؑg]Zg_'-sλkW+5hz?Lj|oww+8V5}ݒyٖxCKc m/[~5 ؚ9>iixgna̮5<ȶė&'Y[|#6-mZ:^i߷&n-7] -WZVÖ6U[6zڽZm' .C[qKChiĖ-mmJk}3bvyygf-~*ڹֹ+mm}O&NBkߊ$>Zɻyf'-t=Ւ8OKl?n Z0/ڲO$ZB/,Zcu=ëja=Zwf'k?Y?[;[cϷ?nݚϫyE+ǻN[lԷkW+׉?Z9Zm{(h9_?O#@~^}9d>%}>ϗ1/}0~f2b\܇JHXoO /qw[!򲕼_fp{ Ңy{{^]c(HHc kc x!Oy{̂er;W\7_>C~bBN?q/u!x9BgخqkxBN>/3b0r<frAV|]~=yǀs;r~owЫyq|'CȁxDCc\[e>ܯ@q .K~%?4׼)"oPPOmP#&io8әa8#tvQ DuH )y;.ï3^ȅ]f@s8џ8t.aKoꍼx"^Ps~[z3}G(۷_# Rū?ns' wڼ}^!xB7#}@r3e. 9z0~]<=s4 r+Ke ; }:Tj)AUft 4jW M[|g $vaI~[ v!.9A57`g ֧0hx}f@Q~#=p.'_率<5b2oύ ? = Yl~}T>g.}&l9 żA [}n\}mףx}S ߾O-qX~ VWoFw8 qv=23<+Oz;,'s-. yo"߬L?'Gqo~($C_y}ف;xL&~] d"[%U+gu8~<e_r?rrąqcy>s g-V X$ur;˓> =(j(nށq>?6_&TW?\ӷ"a W|/ *-p@+u.w n3 s_@x>ayh(~ϟT={2{rr%t[ XϿ? op}c"/mۏlyt"ol8oxk!'kr>ل,y 9^3QQq+!~1~3Ũן7@ο u-/غ=^!}ȿsp\`oof y{ 5dݸ{$ @@U;,Ȟ~Ѱy/orhmzʹz,@;26!!o9y07X֣r7ȃK(o\Es}?ʪ7!O_5-\ 1~ߩpΧT~.>O<-ǹ+ >Cv~f[ 9c_rv_U^ Eb:jY#<}]卫|"o*}ÿrvXqwklA8zÞG?^̹]yPv2"cK}c#"/@4颣޹;/cP| )w~ oU']89P7,6]9Rp(5~~2嬅zT?k p5ޅo5㱽sCоL/#zy ԰鼼3փh;(< TH&9`PoMp畸lkv[m~ J^ 9՝\'*F B6E\ш-o?{X|>yk-8o } |$a/gn92 ncMVj7hE٨*xA$RQhdTʎ^I\}C!^_uV寜/%~_PzɝheM=oRGc2d^&A<ѐ6h-s.np|vS hx^#pZːD6 !ѷЮۺg?Y~q^B>x/Ӥ1ǗF.fBFy7k6߽wM^=3~J=? ?)ooDh馎yy~ pӘp d߿;Lc4:M׽Wc;>-󆣸:_By> o`=8џzf6,\63!k4!M}_?˅7/E{ \AĐ{y߂S r.;1u|J`JZĝ@Oz&w DB¹ 6o%Ow@ʗ󯻏wrC?}_*[ ?wxRm7Z)a~ xČs7\_K90K;w>7씅mCkgЄv ; kv8 Sύt:3DM k3y}?R^5{B%UP8:H)BzVgiyhGD~vl0pb~4H<كN{~~]-D=xF_f&~8A^;v '¿E;x>h(ڿS?}߾ ld- 󫄔 syZ9ߕvXH땚zy.logGWjrq?ѻx=zZJCח&s>7@h=_<` }~&-o! [!vJoV9O:8~Rwh7vs|~]5$5xDG'sȣ/y_ <{rFf]+ώc|xN[^ϩQcio<]p*^Ypz͝>7[Δ [ pZ~]<)W/>;q}~+//yj^xlhů_IM!o7'1.vCdoϿqC6!\G|z$펄梟/s8'8A9 39"^\=?]oL¯ G4'2D\"RĉezPH¸ ޝ(U*_nFNG1~n|FW_cj'pc jmuPŕkLwGM_Ԙ'A]=Ԡ90~~R_PW=pO8zxʏʏ܈dz`^ޛ! ga?|ߏۚkQÇYhr}4Myh_kq{u*?7"1rc_~#rDz_~PG O^eO"~'H˗# h~Eh7yW{ߧ@3"S0u;>%:_!QȽs8AJts{z-{ht%ϯ/$zJ >RW W|sO:[00<#2w=D<pKžAXvIw_a|&cw/Hϸ?_p펂occ?|?rY~Sإyρ{^> >?óï8}׉*D_kUO8țï7hr;.\CJ_ c$x+YG|BXG/b~vb%X`9#? }=Gp$F}ӵk9`|%of|Xxu9ڑ%7gWs;DZ^AI7a;D zȻ s+N0 BA$-n{p.SUs/&S ȹ6l߸Op)xrpyPVyY/ bߴxy^s=o~N-gB;`88߿ As=*8Sġ}}#-X70N>[b8TЏwz^଍A{&6;]b?JbϪ ~"i@JJzڏ5m _q KG#>6bݣ.#vm=Sz (~ׯroPw5>򠩋۞5c@q<}㸬^p#oP!V.MKq {b޽5W<ױ7Y H|qơ+Lc&Ad0NЏ!o=+wKH>?C\֭(P^! zf=N/y `\1 $^v+b|s汕&l7?C)8{9}+"Ñk42@G P~N8}J{нF-Pɧ ѯa>~?/`GnwiMD"uP؋h8I;%A*~,>C= -c~Sh[q<ɕ8?~Rm?2q(~V-f /}\[\g5S-1ΓMɝ ֔oPב]ejm>OkYokqop,ӗy?u4s Y|RsB \Ah>N# ^%R?YOov~Χ\܎\ >vm:Bܮu(^M'}x'ϴ!Vwzi+M}Cx=>Bw@S>v@ݘѸo=>h?eEߛq{?1^ r7Ÿ /SQW'ioHD$gT;SQבOWBD쐃7L)XS?fp=izY8/e~&Ec~ lxЁUOgn?M'^g=%vI PΕ=7 bźi#Œ9\7qlM$eyc{_w#/'#CѾI b'yڸmy8ȟpR_=w⺗}%U"~P S*obS8ƵRo,yl?W8㸬2C.U ~J ] {:v(~yY:-Ry__*_PAnyCi_vr{p5;eW7c/u/m!ZH| Ë>-{m\a߁nP7p]BWk69??@C1nQCKi0.m~ck0ϟo\_k!.m$> b#p}zbiz1^Pa;9ϫOVgWcU =Sv}4/b }qoUfN2^>3@-SJlC/all}+7i~T3FߍrV11?|C'Bhf>6Q~H%x]/́mÑWK3/O_p=}-ΛK_ŷ3Lĸ_]PoWwb5 aI:΋c\;ʫN5ohE'oK!\ScpH[DB}KWOuC>ѭCjM?Cv[IXk8Ɗ8>"s5/fvxXgžվ>AkD"E?:W ģ>d.4^+b:vN}^q4>F=G0~~Ж)^?Bpn9G:q|o"Hx ux='t{oC;c#'S]~E -ԁ$8zAaNxu`::]>_qs Hv~gFw=orЏ%u[~ʷ"|/Q: Y9Xݖk cs}nO<X9\cb?>7_A, uEaxbSpق: -)t{hH~?IvIC' α ׫j7GsϳYkEp 3^Cže:߇q_!w"gDߍ2c_^5m _ 5a|[{$go=/vgT=&t ;}8ܹ_8"eaxGz&iaSnm5'p(zݢS4__͟u c "^ :q?~q_eO՘FgɄǐ ~<y~S`kHÅ͊8J{gy5@ЃGs3?!N‚ܺpj(9pp97ϟvhLX?v]!x}ͽ*Q_> !d4j4=Z1D܊Im8[QOWQpX?y ~yz_p=8]yC'.G%߬|T'͂oݎAV`r#u,Det} B3-FF@/C$8d`ӳ>'}!{0<Bz/$㳹BQP t<话~Blګ7]w^Ǯu`?ŹU ~DI=kr08I>_z@XRG x>>QGu4 W|ŵ&_k44ƣwis>Ɖdc|$e m~!Br!w8IّL'?9Sıb IoލqOm;Pw2pnF[D}1[ұsbλlW!Dyq`N\O_{2a?Ž NF0_w'+7FQg=vm'/go: ;]Ẓy~5?vRԭD|5/o5~4=0 fAٓd.q}cx88ԟiæע~! uU(׳{qUŷ¾r hW|Vv˱oľfކz\3C6>J&~g+Lu.]ĭc'~6G!r{|G*8 m?] u ( ݧ} Jj~Uzّ0&gҼnvWN&C{#[ǢVn=_OʪF;$1[^iNø#nŷ5y11܇-:Cz=Śv:8zw U|֍Bj-n_+7<#1̺\rBH>M\ͼ]}{?…N#evW܋oBN%z`!c~VC=Oh΂*vD+]]|^ّ'/.x Q%/?~5Qdֳ;T<(I+\BՈ:myG??I[ܥt gEv/']-L2kzl8Ӝڹh&( ~uEimb7<BN~ uq ǔ|DeھG=n;^+g`H-&.x9sk; +q:֧e}ubƭvߍ3˵y6rw2ui,Mis} By6Mp,qa<Ȧ!"Cw!,l?I?byF\y)+<owpsO~DzϏ}}2ןC0KGҶވzؼʂwq!teoQ?h +ճڡ{ W1w-B&oXjǙkK8O}tiNؗܨ =|ƥ.Ě/.99P7g:y=+ YG} SB;~; ACY!XKMsx#uǤԨLi'?Kyʠ@vP)K~;߆LzL%o i79!_%snk('vtvLMV'D5׫. B}Q?9~orUlCqr9An';q߿X蒪mB}|8Ǖx}u@;<OX"[8t߇>g&ʸd5Bmu)qflߴwh{߷z>tSDAC nK]<-Z+ľCϗWF>F8g~:O=3pLzpv>@WŠp=D;1?ؖ6 (?IJ  \F yX\=ϗpG>aormOދ诗 {B9c8d:OMގP0FE@|/3!iY:!{Pq^O:lRV#~>-4\\wEoa˱}~VqU|['Q'xfXnVSlH Ȓ?9:F_sn+1.PQ7lʯЮmQӅ4 uB;;O˲` cxt$d(neo-QD(q*qoNGb{ݲa_ 1t sq{/d߂|kxiڿB'O=OfnRo ZܹdhǪ#?b?v9[[90%<^rSwOx8ǯh/ "XogY?,2וu'Nk>gۅK}x'ahEHu,xĊZ\%x&݆^v>NDEnr!?r@{<0=OC8RóO\~Ꭳ{w{ׇKn-8|(q쫵}8g@pM#D̯_'g:ßSPG ?n'uXŰ7j\}]'Bzˋ!8 pw\`?;lVQGpޜ=/P\ʺT>X#uK!`|{ jWnh'~X "_m\p}jLPڽ揊KJCq$,]p+tޚ^Z>¹}ies&ށ/50)Ŏ1xąڿ Wyw+Uީ]޿y=q&HhǾ5]p}M}ܫ{O[|B>"+Wb<&)Zۧ8kb`s:|{GxBSӵA'D:HDu>M3yCoPYxNbԿq`8^ϯ3Bްnƭ6lt^ZBJ+ڹ'ktF6йc 52 wxexIkx`cI\X2U̇04E/81xmXC!@Z~lZNKKma}ċs&9 a 0N𬐻!^[}}Xj yPƵD|4~6.E& }ur݈ 8 n&rV!ש^;Wnج?_%]~]BS6q1w!gF)d 9;;-t-q&"~vq GU;b|W6ϠBσP}|$7R`BIhspwh?g n\H2r+giEzXtj@j1a֣='T. E)X[(OV 6z}S]ts2qapL!-A|}s}!ߗ||}yxM>.(qPMS tyX Z}<2ת=>A=j$K,0EgBq탮t)h&NoB0>6VȶxJ 8ګ ]ENx>KҞ?;r_<~v]$WF^jLeuy/¸Y{PY#G'b}_qx`z;@Nx) :?i(M[øj,!ls; ^DxF8B}y7.G \/A-O4pG 8Z|7cK5w9:ߍi? RxOB؇L:*ޝ8s]@'=gA^M)ܲ`ߠ<9\OOǸpy<Irc{e㹾1d񼣃{ݝ(c}5x;1Q}ȽߖE@;Z5ߵO߂ui ~?OXˠ>ˉp/||8R?'aHsOc!+hG5N񼧤{w|S_;,t Ѿp_:@K% ox 4}d7.c^|OV"!՘ >QxS=X+x4ƕNgn.z&iïهB=s+t@}[u>h*>oh\\{力8T_N3>W&ྃs Δ] a;*yB|kG~Υ?sr0Μ*}{e9y(u1#z^9´>7 كu4G,qxtCH݂JpU?: m1W_j ;Os |$ u/RvRhHq 7Z;W<a 1(uu3]ASp+T+sGx']缲0U5~-['JQҲz{7Bw'oK_Q'WB> A8&-#ob?)Hq_*l['>6 v4 |t0b?(A<0w?ˮ I,a$ wq~/!#~Dgw K owK ŠK }5k|@=qs4]xִ~@w9eb2_sD~H=郏!OO㞻\Nમ4sE٧]WqLwG g2AwLzJM t BOq?Bq]{BUd \@am +Bꄆ_B'[:=(?!'$ sq/ܜ,>.q yaDžR&Ѯ~FN?usG>-r}S*.hݙoצ@ʉk|`-5[>/4۾ ab9ԅ^89{"Q$:){ WB}p~a9=Gp~Ax ύ"wJ[9 sBox$9|E8gjze׌1g'z79}DjnAYv%QiGݺ?#D|~ (f /q>'F~Fck:x.syڹa1B9g 4ɰ;s}ۉxޑ.u隸Y{.oWvm=ڷoMooWwrv8ܳTVv-ŷY񾃸_;o{O~J(w[ܷB{z6w-cN(~^DtϷo\;k#$t]r|q>ZDΊuԞO+oWM7_r{]>=DOQN%"ޢEiqSXpA(O=6S,ƃL1~^$yq}]hj:;y%9/uz4u|rq%gX,S/OjP-]g;(W8$]%'(;W^1ڋyK!Oַ7}Z~=8H>b|M}iߋ Mb5ωuqR+~/KKE}EP)-9߯NO]9;pNw1^\_(+S,'9#1ԋkޢ"?9|;y-K+.?;|/rԉ\Q+/Sٮr|wh/buFzϯ⽬Q$EvKSC7^T<b%_:\&|\~2_+Crѹ%%%d~r}|rK]"9+[&A~/ǻV{Y:9Dyz/x8#xQSW@_d; Ah'b~zOkU9_x /_ӜނD Il?'i9x^֯@\'_!緜roq3Y^r|K{\'}%oωru:E9kDzInry"A]r>rꜗS'y}Faޢ| v\[ooQ&iϻ NyXzivNkr4_"|v`!KJzjav(8(I7Ҟ&~M1};KJ{ q)7~>:' ~HX..9#CpD D\,ΏnN"癴%zo'"*~]O|MgQ.I_;KDWgv&auUK1/Ή=E_l^A'=i^%; <rފq[w_Gܿ_䜨n78X.ǯ\Gs ~;d*$q_.JJ;C::9O-S7q_?~&nJJ'~(2r}S\$)_^/y9*/9jDy]m]N::qxȧOr}_7vB#KW y?;Y{JC+Er~nAGў:.r]+!{QC}$Sg'87NƍvS ql/9]*E+&!X_β]~_}8,d@Gmr霕vr2i(O!## ~$\} 끈I~vo<>O|{ J;\St~H;V}|I nth3CsUOsV(a~މr5ypyDHE[䛜W E[#Y a{Qv丬I;Y_r~H"r=3OxOsuS_HPZgrREy-,ڡI[.r~y ^\Ci_vqJWӤ=`GWYO}>Ջ뻋q$Gj$DH$ۥS󸂴t;J;muُTb\Ix,o1eL+>"Cd>dAq)iHѝe@駊z+槴zuҎ2+w8#~[xv{nH;Vr]K"O x)}3d^Or^H{EAN6?2%r5x\eUryjWKDNqIWC.:i}_~zquXޯn5QFd\볘&{2n&~2W來hO!8/iȸ{d}z$GJ;aUr}>KArI[YocJCڧrՊ|=%ǝ@"] (a!cUڟ|?N:x?q{Q^iߟe GK ~ਗ7ht=< ^ݞ\VOrK^WD{ݯ~7"?9|v؟* ~l7ɡ!̢ܵn:/-!"ީoCs8*)/$~#פ,>z9&=%eL\c?'9'Te@?2\c3x~_nr=?k؇P6sz+/&ǿQo{}TOҏiΰO&}Ұ(/y}H;OrN]_#U,?,_YNx\-Ǜo Q?N8<ۿ q!5oχ~rNrK=mX些grT{INI=̷y|\H;rJJ?XPM%eA]9}NZ}Nr^~HBþ7qEQ/#TzK[eNx_x_h)zV`gK~/4aF{F[uG^I9xhnn\s=D =*a Oؗ='H;eЫ6r}`o7Jw\H2.#i6:e9+#y@_o:'˩=%ey\Y[˸EL%{RW-n>K=AK]lo.u=|rH D"}wC*/urKP~g}~LG&9@d<~A.wx!2N"%fiq(l8o뷴d"ǩs9.d\\ơ~Oy_ ,ƕ'dI.?GU帒x/ >XoyNB8_I{QƱ='|~t{R"ƃ5v(u~5n<ܕoD^XTW{YB(#YsI{]lm=Rci=$=iJTs?F/J@sp^@5:i-1ĭu`ȸd;}>}N2-CJ_'/YO9d2*ޠ7ү #I;ؠCq"Cwq oyCe9~{xԹqeq#TqP5ߌ {}?q]?%r"Ĺ0NHj_}I,;y]a z|'RӮU71+xi~2*海<$C?_kѮr?#x1+x;9_y)@?noDr}7"Gm?c;΅%nZ꙼侽~z?q@q '1e\I֥Ӯ+ݎmt{a?S7nZڡ}+pRw#uR,A;E9.2q_jKC/+8 ګl?A"bsp>< ^X2V|<ʟ6/ ,뇜 \sIcҏ_h_hoПT|Qa.\rq"&Ge'nHSvW]!!sϵ5 F/ߒGwztwpX[y C#1D{nH>HegU.yW{]`kw:'QOigȸH=r~t3?e(%獮vl'yIG^Szʸ%rI݌7͙ԝx'!/SjOqv[j0rY>sId.(6ԟ RRQ~YR|UtF}l=*ϻI.I9ڰE Rimq$-I;u{pnY\Wa8aԱmBy@sxH]f*~lCu>`o Z+#Q,#*ˣ |?[hg8"ɝ,' WPސ9r]}la}3xav.1ķd{upè4c\t}[U?o&%C>B> :zAWmx~>r F}Y9Xr 3eys* `9*|6>_F;HtjYA/CT~.XS6Qҏ룮K6r>X_gd[hWy%\C(1b|,R7(ur?yqNcEtsM=IIݒ!~*vdUY|\sL=PԹxosprH;Qk\/# CQsߪ0sggxYIYjސ:9oyy)._H=K?OB_z%'zXkX :5ɇ>=hἉr<M%G H?q\s d_C67!h)o^!.^lkJ<_!b9.թ2($? וvl_<<"ǥCNtWiCj2qf]%LJ?I('熝2#ԉv(j\yO M7_mZ]٠srtBE{Πinϋ(235K.74ak 3+pzϙ~Usʘ1w[2kьY v2fK|nA}0}Yfϐ/xPˮ̶KynP/C =RG&YaX6]AG_/Mˮ<`zLσy0=*7Grc*7rc*7rc*7rc*7rc*7r3L*7ͤr3L*7ͤr3L*7ͬr3*7ͬr3*7ͬr,*7͢r,*7͢r,*7ͪrܬ*7ͪrܬ*7ͪrl*7ͦrl*7ͦrl*7ͮr*7ͮr*7ͮrs*7͡rs*7͡rsKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%L)0XKb S,a%LĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)KL%&bIĤXbR,1)K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĬXbV,1+K̊%fbYĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK,%bEĢXbQ,(XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĪXbU,*XK%VbUĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)Kl%6bMĦXbS,)K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ĮXbW,+K%vb]ġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8K%bCġXP,q(8ttAo7#fm%v7ׇC!|}H>$_ׇCe$_Fe$_Fe$_Fe$_Fe$_F5|M$_D5|M$_D5|M$_3L5|$_3L5|$_3L|-$_ B|-$_ B|-$_+J|$_+J|$_+J|m$_F|m$_F|m$_;N|$_;N|$_;Nu|$_Au|$_Au| ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bWDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^LW&+ᕉDxe"2^̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙLxe&2^ ̄Wf+3ᕙBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^Y,W+ ᕅBxe!^YWV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕJxe%^Y WV++ᕕFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^lW6+ᕍFxe#^Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝNxe'^ Wv+;ᕝAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^9W+ᕃAx r^};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ};#vFѷ3ogDΈ}MDn"vѷD&o7}MDn"vѷD&o7}MDn"vѷv/v<wYdi3c^^WH"_H^-,W ?n=ZP֔__|owk[.^-l_|[KzZeKn}['-mvjCɼjýژW ~7{t'BFXi ^mdGkؒ5%kw+:ݻLn^Xڲ~]fjs4!?/yҒ1^m;6DKlZ5(O៏ׯklJ 9)~Pܱl &BҀ&\wnEl|(rv_kFp[lʄa>k)7 a;u%7AZu4vxe)r~g_6%w 57uu ţ^W˴Ǚ .]Z: ҍ =s3誯xԨݼ jޮ;T0o$hAOC@q(*y<!<-|||} 5p->]VBwx{@܅<`~9oFs< r=T 7dk۳$ػrCFBJfG,' 4f_+\ c/.ع~|r4r{| ϧ'oǯ aq|?B?Kv{I^+!p)I8a(~/Y7CP^ ,$oBs1X›Nb}į_Z j^-~~!=d Yvy( !4/03z">7C(;h߹|{vx+^ϋsUߍ;B|%ݲH-~ϬN9<9?,~=Pui~K|vkހoy9!$W_H?!K ow_2+!}(]^O!ycxj]5Ix;Yμ<b C"/\Ma7vO~vB.4qPt _zSo]r~+;BپfOȗ/^ uܜ; c/ۯ"r0'C0|P |-sg ^ϱ#ȇا!x![^*[_ӡz~7^ήPL r2+oNP}{'4~PWKhw.@>k H{]OEqi~®=_>y D{3JpEv(s1=y}/z/ y{n'&|Pm'P0=`ڿfԧe. *^<1e$.w8Ckx>k!p ^ (~sYwAQC9wXw܌ɼ2^ |-?‡؞]&v_[~UPor=g\ s)?up_? CCC|p,אy<ߓ;E( zޟ@\zYHO/@dU {?;y4mwyP]Ty iQu8]_=3s*X'p~V׭U@.!V[飐7b,o\M5,OX| >@ζ:,圥h߀|)ׅKD-kbGly;B<!jm%4!r7/DD;o9pgw.؄㻶,~-P6j@zb\&B8av=x _ŋ&u?7u124z3Y!ȗk*paX}#5s>az\o8 ^y=Al묬-qh_.y)MO"˫:sq00S[V ?:g b3 /EK Ưν?>y+yWv}ߖB;.WWWǸ{"ֿDHna]|M sG0^+ f|o!z^`ϙ{_ڹ_tf,|nWB^_<&PNpIXY|n{י!jz>\XD/G3/ I\AJa~7׳>KC;"$cö>w˧A'vBSj!B3B7C7__rرC_81xp.u-A{lGz ]a#ky}m|_%\:xBbXsȫUwa{;8R2WÀǐ;u<9T T5cjV,D$7 k_xXw,UG1]/==!{#~u[FO@{ky5_)kPouS_CCh74eyq>ߦBp{h} _a w%/쇬ݏLja l/#] o dMqۇ8 ^~Whǐs#80Aoy{Q: Szy}9{\Gc)h 3!˗'::q;@}RH~X4'?W ~Ð32\~vsu=pr$`N$Mc~SwTẑkނvܝd(~x ds2ѮwNqى\p]q~y)Σ^Wz~fC,~??照Jj~L^?qX sx{=qz`= c&uhw$|4xK܏w ?9n@QƯIȥd͟q ^/?-Z|#d~xH8g5Me1>!"N,gBUPDGT104Gt3r(L=vS7bU;iKPnحh-\{e ;4߀PG< AϡÅؿ%ː*ڥ`\HOC?GBK T~U~F'Ӌ _p= c~ DEt Ǹ_c>B{~ߧi;|Ebߠ݇Xh@?4u.BQWȳhֶ߮>yy'38q.G0v(B5\ñ6 VJCkȍC{+y~}!qǧT:0.X`\J߸ZcS&p=lEق߄q1)yrC?p?b^.9/אps(OFZ_SP^C݇}CmHm>W]0~#౏M8/= 5v90[p?w:[qBFqݨIbK{+ +_{bxVw ȹGs}_4׫Bj~6 0cK8qԷw{.E6}=^?p*ʿܿ\8^sƵVy> 8Dp._lF,Ob!tƏ8c^O K26 ^!$ \vM^q,hst.)8Җ_y@GT3 _= x_5;(sI~ hr۽"N8'JnG9' av?OZ;K6b3MIY\e/~!_<8wZK2 3h'u>{AFz)ǰ|;,hw|;{2.=n8~=cžNTi&?ڇ_Z~:9@ޤ(~=A۹tzrVb*WE$[:?r:w]~>8.ș6-O1&8 1^qxî0-y3[0oūю,9߹&> pM|!rm(CW`t[1nvif@pݏ{'|4\@\a*Ο;>_7 Rf'q~ Ax^ 3G_Br<nD!$DI/4{7wu|[p1Om [4Ӝ34akomymNIqvǠ`\ \5"G=:Խ/}w‰XU~߃}p&6ҭmO>ŸW}E֐~Mq#y=rO; l"}#h^GyH1/ ">Pcx>ioE_Dz^Oy4!88 G/:vRfkS_>v ;FznB?8;q| K_i'~ |1jrƌ}Ӧq9}D)-Zެw]+(e7|=I}@4-&%9æݙꍺM:~$dpܽgOž !.78Ny~. 3)b<3`#U7gw%x?v~װoL?+=CA/As|tgWNjr|T=활Up˟-օNQ/wo! _fk'/{۫}y=q}/M3oH>SmSO!GZ* 甾׽|+ѯR،R1zc!Kg^T>.]eyrbwPuZWCyRiVKR1vMS:3ރϮq_/kuxq<<}c@}AnGDܵU>Bo^,ikw~ƸAfwǿr8HF^O쇼ΙgJq_Jހq-o]y ~Z wi%0>_>{ P7M?R ,98 (]~^}up%??6ޞB+R끞~~˿_sxX_ؿ 2v g 4ݭy"nWf} gcdOuLC|_ID057~ِ5nlv;>:A/rVwD{6aAC/q,~an]~yb-!kqޤ^-1`"rx;'ۭI= Kq^xS^u%tyG;.?y;] B>{}Gb"pK\Bʄx2nP{o*rB\Oz_^(7V~i4?4ú?,=9zX'y-ѹj'!!s!~ ZƠpхku2kN/v_u xԗ'\1_>sx\Lp #ݞv9 בx;A@K 8ݳxq=?2+Nhqؠ< ASW {<.hOwïqԑxjl?}pABk?{6jYl| s~, S|؏zi:t,\OЭ;s5~N^7b~/z;yQ2-gQ_G+ ø^i8MoL\CDyO3H~=/qm^Sۿ9{͂].t`d[oxF@ЗQ*-P> 9so n1C2A~n}?~8w x;4K18߽<.[= ׃4L vKoy}>CO܈jk opMvЉS?{ގ*{j6: _ O&Du=|LC[KF.lV9p9-)fZ\y5{كx ]O}!.:RܗEuEe ^-| {>v)έJ#ڽO^aOҗb8Rǻ?:5խ;oz;-47];1N }7N| # ?,CVMo;=t& )pH͎Dg…?Iߟ">fHznSxj#߁FHO3hp[7E"ꋙ}߂Tԗpe# !K$@;pz+臖 9-U%t2b`] ܀;!_q|0ՍZ<1kF8y9{CmY]pp*_7 וLRn%yyůqnQ3 Ξ&sy[/Wù9t/O6]? i{EA%ޣGEd㿯|-[\^@283^5}3/P$56: Q7;[!g s"n;<!U?+>R)ĉeh <AY>=`uMPU{sฏ>ώ4?vKWcu2LW߂˘8wq xRV}0i'я ߢvvOsv}q-]֯q{ߎ>Dgoa.Xԃ9B? sn4BWkqΨ_IDv1Ϥx`5:vBIom{Au.t)+h4^|r* x~ztE{uTYs%B'\)"\Ͽʎ\>y^wdSv(yX1%󽰞`ܡAM_Q\zp F m[G?B,QmIBҍ.[h0F?+|9Ehτla_f7YcqŸOF;<4GFP0E5Xf/.,5nm7n}؝_Mô0ǽLkgoJDv γic^`+AG6 K?qBsL_a>`{A\O~3Z\&K1^yA~?|͋W %k票C*COɸ7qYLD]fȺ< W\ihur \j?&?Fe|L;_?S2ԟ>XO)^: dٵ6d+pd /yOHyE wgu(q+v\5E<8Οdj:ĴG8!⨡^vi({畫b3X`J˔4, r;x%݉BTo9C=<qG4|q:9>$A?3AW%qfGjwͯK3cCO E^>oyBX._pK#!SGXw+{kB'B4Pg4~oEt: x *eqk+}!s[K M:{/-4st\'hkf>aᗁw;qZ8._" ;@0>W/B*_du; V*S66/q r(u إhyyΐ}MGvl\w݋Qw;1>[ gku |آJ`ߐŹAS#q|wp.\ojHJ՞WK~W3&, q>=QˎIC>\r+o)Cd_ġ>E?kzq&`~}%!<_8+п>gʝE=!QG'l ihY?~J| s)z~:u0nUx >3kmZ{j!A LD6h!L$8Ovȷ~Qbo3}Lyߒ2? n¸|@_q \=_vtT};ylܖq>ϻP`zGq-ȭjf]˄'qm\\+8>v?R&]s.:KOs=4?ƝB,3xW\g9d?Nw+G-MGEljos`ǚw 9y㘰;y-C;WX\(rl+jګ+~b>~jR~ݫPwC(GqٸT}\61zwoU5öikbbwb *Hww% "*b bbb<ߋ1sϚ5짰.7x/룳 yeu^ +r)Z GR;ۨ6i]1o%{A up[gIxdli +`Byu[yW.ܫP׺+Go,ls# Q{놼CqICq2 T$u-WnS8}iUUa3[ץxf$Ndvf6# Fx*͛]lX}!ލ >IMoKo0) Xj ~ >5)3>Ŵ17Wp>5 C\c_CLƍƅ)yl0pQ:[4?]xcاS03 w~i7SbGƫySܮih C:\ϊxexLZ8>hBaݰT0>ւ۵- ڗ%<8Pykx0xЏ{3u < |a?ͅib6+wIUoCw{{Ҥ}fkou EB|_J zO+Z1)̻>M.K]2$2Kߡw+WӸ~þpx=pOZӎøZg 坥a{aܺwSi&۰/\o\ay|\[0iXWU:qhW{$NK q8C},|Udb xC7 :0)\ZoK7:_|!/ˇg{3>0o#7|;Ba;AFC,{2ǝ]+oJ˔> u[ڗ3`'-hleՁ`CIHnXuk~^jf]^ƆpɆx;4cKܻVX}<@Ux&Ӈ )A܇(nK%4?WHE}WG[8}  e= ~C' /+aVDy;? o+4ԏ@0ɺin_ o| yBuܓ ߗ;)G𲾧!F>2].f }vC;ͅ =~)|/GA*u[  3G}þY[8>W* o.)<7p# Kڿ!y ֥(Kݕ|ĥ7^]\G:iJ/ yZ#5/  7x5J}[!9ICƚIu=w1ԕ}?4{c73ϼ0\WVzB_#MמIHQ>+Xp'9mw.ܿS{i0 jx.{([YnIyw{ܓrwKd4ۛv%C6p7G} K2kϞ}b2![11{qEuhR\BR'C]"Ȕ p_ZU#{)~!/2!+iA R|}a1RV3_JY!0.Т b 84ڭm^tᾐxP*hgBzB7jhwr A- 6Gv=GN.~wǰwk9C-;x!|pwjV{0&{yv^C)zC} $#epIah?7J.mgvM?V>H{>&wn@&o%|v 2_5fYߛ2fp7׎ۚ3&u)ǟ g!nu}'Qnhړ#iFk&*jMhR;m [ۗc[Dwxpؐe-7+ m}.>m}Ә'xղR<Y~汰2Źmb=k~TSeTx~ny/ VB1qs''ICdž\ Sڭrp_ї?>-aݔdv~{G.ACoG #kUwuZZtgn_/53gs'~V|U,NucI[?Li ^â$hd [Q}!,X;|u2վ|ëO=f79"߿g^4Vn*jNcӕCF U- uOC}CqjN[7ÕCNub)<0gL~]'x̬~o:1gk׉Hl/*-Yp)y:l!VB>w٧҇!qPFfeM;ak8Fk{,WbҢ[fsOOϵ|o; rsBnC[s[+ٷNji#ڷky[c/Y|pr^v(?ms)og}|[UW_ _ꗸgq}|y~ܾ [io((xWz.o.EIqe +:i-|ײIb |Zt,SVo\n[w_\xd?!dp[:{Goyȵv@B}'ׅGWfv^% [96wW >ޭ%G9wJpwmNօ[ÚSݐ6ϿZ )Iϒsug==Y|xiT=:%DžDn|M)ǯM?_&Ɠݪ|^rʵ;nR7̂+A>l5<>QsSSؾZ)@Ʋ .wms&q-^CNA櫹sݔ3sF6A/^uO?b?s%>7Cy*ӌi{LYs?_֏ we.!#̰e{|Q{;M&ϜG;h+/lܥc~#s7,3[oj(it̉&/W~=I^~>V(hڮ:,tqH;t}LO_}}(^aP<?][p 㶛90牜ۅk<~auJZ%yBF/L'yׁ?/mE߻kۨ'?|"m 3b͑_ᵑ^ك 걗-jFVmӻ}X8+kO)|nױ7 ⑯8C1Xaf/m!Wr~8*o^V"՘%숸0/i]OϺTX_k1ȹ TC:̀Fc >?Fo קei#7|h5B>wk"g<_W3O6;Oڼ+b* l >^i 'MMzv_'+F(9O.pdh[4\q>WӲ߅2'ڿӒ3<󗜩OJ\kcy5k }+f+| Mм5Cڄϋҧ߇5m={5OO7;M8(p[7o]Ky(-O*=1^9qd9?y>񀨛^eH|v/sQ&㘿cE.מkrލ)jF/W#C⤜'8*]tJ>y0ޗnCZ[>/#*ř.59cp|~4~Zy SExGJ9]vjZ-[qJWycyYq38߇?ׁ?(70.K_ xVw?Ygy |Oy1=ek#示>z1o ahhuR/"y\|9`;9b\=i OX|혿ar 9+onqE]^[gu`GKCg8/8/Ũz|wB@pO9y8*gi[y8'Ҹ"l_/^Zr77h2aO8IU~jOv`r~ri)wv/J3\my< ip-s y/x؏^Us?SnwX6Y-MrĄםM͑=:mݤYHC۟5ZU-yaum)Bw,?N~!xp )]q\uC}cw1?5'}ic7<2g雾CU[3ǜr 旟?a>xKi~4bE~Z)-]nujτ+BnR+{zq9z_pxr~I/R6?`ƺ-o?`>&8^}#-]nXsrXB[v} r]8 wu)pcWCruQoKmrX}D,Sl} ߲NTx ~|c mÄY6n)mY΋-U)ky<x\:.}/Ε|k92|ָGrNˬUQ'smts;SdyX/m^y=#'\'qHa/Λ%,lxW^sc1?8/RZc3_b=.[i:ֿ k<_}? Gq/w}y 'p??v?p\,Oϑ81_y4ÿ#/o>cފ'ut?1&qH_Ҹ.?a 7r'=b?uZwbr}JGNxΆ|W8FIu#i=W/K0r)|Nqȼ\ ׿1\u"|X !8$^o=GC0?"[9I=]'C> y>r#=; 9Ix [y ΊT8.}'R ?7n//J㢼97{p} k8NO'!?|-H^wޛ+z.8u0<_?xq}Nq}SX{X:_g$σuueI a_p י1_AxMO8~`y cݡ$`=yzrKc 0Ob짘"O:yŮ>rByMp/KҲߔyox㌗&Uخ1ߒ;:XɆy=@pވXʧ~Xg$1??*3opVu)3p=zEcV^/eK7| 3d/ruSױ&~/>"<ǿ!/2 ۷by>yc}EQ:8_  wc?YxOk.hB?|˷ap\Oz5!c {Ș8ue"7$58X}#w8yx7R{[b<-`.]q//-?|Ɏfex&\/P׍㳜g^ o^)?NXw: p^ynuM!ȷYW?UY^\u\*]s[k| /ߥ6=|>x8S02g5Gߔj-aސ+qFIW,`|q۩kc8֡XR'dۣr~v m_"yv/J0>I}lma~z &!p%/1ίms]sbG 1|X|+ܮV~eK^9:8cyKD'aźο=z4?8a^l7}^hub{@ >O~7`,vA}>zny |{Wu:|NsDe!EqҺ9qp^[>_!ۮ:M*uJ"y=+O)#c%^ :Q('׉Iϭ_Ux 7n8?e*p=땸 5GvӦci:Sa]_Ac_K|/@i;u?yimo)mZ;9(i_|u\oT)(G O?e.`).,_^NlO؏ߊ~8WOx#Ϸ Ay}=_bgI%Oⳟ޸eu߆uKlov:XW|n^uUc,ic藢τy-CoHͫ3"~e .ܟ~uAcp>/''a?q3r"כ$_ ?bMqكC~fPvG}]9(i~%5ivgǣ7܎L_ڰ_[_UnJ;sy'>8yh!uߎ?C# SȞ?ΓsX+o}EGRRΛɱ1{^Qd<: }<'w_q'p~/{'|337R]^~ty<]X\> ֙0 #ӎE`'!XW |8ק^#r]7rJL伮h\su0Sa}h]Jq| ۳ #\C Ǡw[ _\|w|'lߤyMdCpu\Nrޅ8DC?2:̻HrEO8N`~g=8 ϱ!o0$y|~_|?Xȟ5zA;q_/{_ 'kiI|8<8NxYqփʕnlX'z0RJ8zOE짘cPtq4܏x}^죒oXQO~c ~O}ӏ籞ugɓϯ8iy|9 !r(mQr~M~πߥu~r 5"7S:PIw.Ckub^;s*<>/Џu"']ޢ ]/ d~'}ze =mOc;z4fx%/R#1z,O\?yցyKEg$ c$K8}\c&#}X@~<|p>(WrCy|*C8 7lX|7E!XlטG㸁e`ค*\F`?B^ ~}G9S87Ͼ"y):<-خp8WyԿe^O~ti$R]пy ^7wozq;<{}0+=`=%=|>g>>;ϊz%ΥS>"\{Te6֝0 9?6YOG77H'΁\؟/w^Wb~y/{8>X*Nc{C "qd/V}>q^2'1qH [/O|}y<JcXy G}"~Sc=|=+W8~b?|>paVˋ^l~q>e^M}:z Ƀ=Xüi_:P$ I@S }DY/3̯`ߖ[b˜<9!q]n8\<&Ҽ5?a;)yLX|09EMbC~zfy 9^X/2p:.oHJp{1_}E?b7̷qߓX_ngu:;cݟy=Kx8}:??'z.wz >7={]{=~]tQ/e>"1,Z<>q^&z;Hy.Wljzs`^W=g?/?f8a{F<[)X"X&7x֯TaJ=^ 쯘aY0b~ǘǣ ^)?1+ qzS$ "ufyI3A^b^ BZٰjIMJ+ x0?ü5p= ؎q'R>^$?yublGxX<'^(>n`=h8~b^`̯}/6GQ|GQ|GQ|GQ|GQ|GQ|GQ|GQ|GQ|GQ|GQ|(>(>(>(>(>(>(>(>(>(>(>QTm|Qp+QDł?Kn>T֭&-XdlMNJӺ-\Dѣy/Zl00ByPR3myD;'**JjZzC)EBOJJ?iOZN$`r &`r &`r &`r &`r &P1r C)P1r C)P1r C%P1Tr C%P1Tr C%P1r C-P1r C-P1r C#14r C#14r C#1r C+1r C+1r C'1tr C'1tr C'1r C/1r C/1FGGF|Ԋ:QDSh M!)D4"BDSh hLDc"јD4&1hLDShJM))E4є"RDShJM%D4T"JDSh*M%D4"ZDShjM-E44"FDӈhM#iD44"FDӊhZM+iE4Ѵ"VDӊhZM'D4t"NDӉh:M'D4"^DӋhzM/E4&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%L 0&XK` ,a%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q (K%J`RD)X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%*`JD%X,Q KT%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,Q KԂ%j`ZD-X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%`FD#X,hK4%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, hK%Z`VD+X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%:`ND'X, Kt%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/X, K%z`^D/ >+gF>+g&5䳖|֑$U WA*H\ q$Ue$.#qH\F2e$UJWI*I\%$q$U*WEH\"qU$U*WEI\5&q$UjWMI\5!q5$ՐWCjH\ !q5$ՒZWKjI\-%q$Ւ:WGH\#qu$Ց:WGI\='q$ՓzWOI\+ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx R^)W +ᕂJAx bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+Fx#bW^1+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^) WJ+%ᕒJIx$R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+ᕊJEx"R^TW*+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&R^ ԄWj+5ᕚJMx&^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+ ᕆJCx!^i4W+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^i WZ+-ᕖJKx%^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+ᕎJGx#^tW:+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOx'^ Wz+=ᕞJOxEvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFvFv%ەoW]Iv%ەoW]Iv%ەoW]Iv%ەoW]Iv%ەoW]Iv~{ >*Ս筛jɢusi}(-"CO?s"8&߽^ 쫏'=d}ḅh~ݬK^G&<^_6÷cR1soxGexu:_W0i>l(5E079a 8\2s *1ڟܻ3m'&dGJȋ&To5^˿`Ro?˙ދJW'G?ZA{ϯgMs"u<>^/d3 ?dC_'󃁿[ul/׭[8SSqgkWq>v・:cj}1 <h>> mt/9_IC|lم_grxT/,;UX>Jw?q2lk_ѻgJpYǀ;uzw9>Wap7x}ʓ =xyr>a:䍌< xZ;'\EM&leO嵶 >oXsx^3N 5۵9s]C jy[]@՛*^@gno1N.E{oêDf{ο'L>7m•pj.k%B磉.Ż|Ex{7rFe`\h6[x/ߩu?ٵ>;F g{8}#J\25曧ms v!p˪[更fp&zCh퇾=J=ǽHYg!9Q1%?S/&'8uz_Zbfg=jh$p JZg:X3D7wS2gWQ)t*s>GBi~??!mqk 'nW]QsO 9l:ev#a7=5tæ}-cq=;%aǻyZ5ߊ~ܶSF =g0Op[ +j[s_T] m{r<n6R*g@~νEB4tWdDdX'gt m/Jzd9zT-i´CS!NpfKImԪiL2pjл\jQ)kݺl(1F]3^=p x wi*nG: ~޼hGh:/o#~>S~,>ի?zwpaW's^.)}GZ_wΦ[Gbƌˏ'=ŗwȝVT{3l}C oS{LjSٵs ڜl3o3ߛx0JQ3 &t^9H7u̲C_  G|W+SN{w×snzgjmjV~A!kCf6f]!^Y{_ԶG#] ~,aW;4 Ye<ލG̽"os?pE3 r6~upQܣx`1,7[?Ӝ{AIu _ښ7у] rN3[Z{?oʃVu7n}pD]grJ3!mi*~$8st '=ێ) ˷lv΁ݥޤS{֪,|fԮuޗ)qC~ K&__[:K;>G2pY|di[pvSBسe{6v-H*V ïWp~?q۫f.n{W^n>7?#<>2rspqQ9/|]E<`ݤKQ'=,ʉ_('ױ9f~4.#Mm -F%G^zhlZL`S8nL>Ta{qYy6j:VjƔ:ɝ &aDr8ڳwmz̳[ܹ|'?7ys]sNC~5ɯ&s,<==zCdpsJV7?/?eUL&q&ל6qX=}l6j93쾝9x~^Ιy ]_ƶKuZ\T-1F=ԊԄhvN }1]ݤxp3s&cxChVYm!䲺S g) ͘eY%wѕœp垹BlS>twL!xuVu^@T2gqY}~wטI½% BQU6e@{~qopnⱝN+7kakyO;:-1 .ǎvnA}e ;h)f)Fk/{3.{ p7ϟbM;qƘmma]s_gp;k| 8y㙤h>wY1$pKxZqlä|iݷ;r|66{.=uK}"M6^ ?X2.3ߗsС3CC~ci"{qaA3u+z5vc܉L^GLrԩ;uMf[Hm_s<`S7t^9"; &Uݦ{Nc팝-7>= SmK3;:{#7Z?50`T_dҥFncJ5uU~ t-s_>dMhߋk x:Ӫ/ܼ-OC`{uz}C0^o6'3}nHNfb f^zLJ^~9<{̱jyhX7pg.=r]}.f3֫BEO&}8Οg^,]ھߒQǡC˽No1RK{ixN A5 +>Mҍű gu/3+W[GigLN]Kn1m}_-:[<{b;Yӽp{ѳ">x|kb8pq+ᩦ8z+McVp9y76OKmǚ^4˝\uF_l;t;zvIRɃ3/.W'-n6fu08cR*o,;~3-_ A>Sgçr`ύ=G[Vm9׸'^j=|6%lx|C scOn>Jb :?jћ }ӕLsV5t]L[y=ӒM^A ck/6{WhqO] onӓ ulܱsk/ҔGu]oY7T=J_=pܭaE}Z~Z iʕlxgy!~jϭWuu5s/#͞uS9[ k|N=]UW :{qwk36<74p4~_ںX-njd6wy_e99]jxăҝ7F1A68!66+|/{fdʤN- \ Ry.M|'?370ac>l҄-`Wa_ z8?c\~foN`3y5ݐLmez硛GIwZ [=aʁCuu&orO/?_~']yc:پ|ޝ/Xiܺ9vnMZ.Ӟ:(/}}f.vVMZya=SՌ >k<ӧUe&r9>CPSy~NR{ۛim=:,Նw6Sѕ6NW/)*lS z"ߩwZF9=p V<^vj8.UՋ \%nng7&Xx/m՞?/8vFKZ[ ΋{Ui)_1@nop6|涯UO8"4Fs&qf}dO=:paץ:w=}#b~pO_&}gae!C]o\ tU!l~|rW{}|)z鯴reh7X.jwhp;439|5/wrǁ#o&;M͈0_ O#jl;˱t{f(iw4>ww4b;-O˄Cjgݳ[|Ll 7i캼],}{wvv)~YoGuV ^m6[lyjO7n/!9+e-xV-u+qoG Qmb2[\^v:̗km48.LZ/x)j=婦8]}۾>v 85+km 5/Bkeh Sw3[erFQUsk>mKoF[~Ii3w傊̄{Ͱ^>1mRD;/3nA9p鲶):r6D\G'NLǍ<a4گ.3oյMHV qVm[ߛ-{T0fopܧcAJp~sgN@Tļϟ[ \"٤Cofkq{!n>k#0\݃L$^sny1c~G Cz}qFe@e B ~uOTJrKFϟхG!y=s8lKw#ϯXv\PO4Y~j|6\}GkqAZ5"֪GB.1 3UiqCA-x`SSn!יvyPVqǹnٳ*Us;T:?̶ σJ6k0o oEvu*k9*}wb'YKDgl[Xp.6j*nR@[}pcM`ŞU_ KLW/x?qiyz_xlP{%΋1-owe6D;Z< 8݃(/잩iۭ <)n}6쬿b=UvxV/٘sO>aK|vŴfwDŽ5Vv9T.Fik;}VyLutnת۲ő{>~_Ӆ *l)Q^ !a|h⸡xpZ55k]UJ N\7鄂*SÃ&+7h+ot`kh<`^럻OAn}9OyJ'u) i5Ye ,n37z1I DL$/k1]&c`r'+֖Ƴ!(Q6 " 63Z5.z ZKxZv6#UsA:كU%!lMew;wxQU |񲠞ݿ/_mvF-n N%%lYۺsQ;&T{Ӆm%޷[κ@㸛\d''y7pt;'xW?;Nuz2C{#JYU69flk+sӘ~mQm~kYeN|2ޜ!旪m|mBbv?/ãJH3o|BeV'NUzr{MTc,/̛b+նѴYb9/lRd߂wr;[CL^%ZvNσ*ӈ40Ts^[Hqq˹I9/UqsJNkU1!nn|FMvK-}-œa٨5ҹk7gt\N nQG+-vhbWֹo /Zq[F~IxܯeFo^UOd qݒݎ'U,4_3aw68yMAp߄))3pmjR ml;>njm!gmږ1@=$'Vh R}9O>ҵڞ!x5M* ~g\mAc6͹U)V,lCg7}d9a;[y7aVק[Aٹ{c'Ab\Rf}n*L#pS:o>:YZ Ў_K7{J_[\~Y~`t,hWnq©j~"l5it/6uѬH|˄A!ڒoz5KVv_i~-u=e6̚l qs,Y+^:_pʏv2w5U3~.],΀]*fڸC7<._~q_7fuάh峹ڈ}`|,Ƕ\b1uw}ăQ6).*hqUԪӃp }阣͗ƷwN+|3LZqO ?ZF3n뛵xP!.FW U+]ʽeHw6n=hޱ1$[cVnRFbn\)P?}3?Kp Q.oH8?=\CFqZ Wb8cc'Oh9r'YwJהx=­l-܂;Xwsg;HN%\:Fnm٢q_=bCXW~sQa `͵D<װgǺGM=*z.=Ç[-[R\rj,7|mqe_zn}3{1|t~$ik޻Z7XţBSF)ܒ}=Zu} O Z{# z~ZBi7 D]~v{:|aap<^;VT1 8X=7 ~vB/#672'&Kv3`}ԮRT)j{ 5!l?IwG^ ͚;!rC_7nXwtmR77UTrՙնVͩK/7-wy nT?Ԭm+gxPlOb§.6sW=ܛ!`UZg C[+v42z ͆N.CV>Z6YO[Y.^QhM\TpX<{c>iUudpmp^~%G/PnlKYzI_I1-rmZdq,=<]7Dʌʟ߂;|-c+V_QUˎ'_`G|77ά17sȃ<`Ǔi{k [.U5ِmBgZ]Z~}]?e;v;5w9_J'3V8[xtNjk<ؔw*@qsvFp]7LY硋Bq}Û-lUi%Dv0k;Di~ϼq 8YjUnBYlg`^eooǖ;\ };;6ʦ/9~NlgSV]1ƫjiF#]78cO}]pKj5_pwژ:}ZK All&'>CLMQWa Qu՛4ǶVϭz4nwn=* ǍokFj =+* &e6k, n߳rǽ?_!FA~yV--;au۫m }L=ԛq7Z ,`u+M,s;j^ Zs%u׍ Cfăږ}ºjxv,k:w4rQK!]9-wk /!+(}5{A^0G>j!ŇLCԼɝ_\rVsޫMN=p;oT?P ^ 2[Vujq[C@p-mv6QF ,盄y!\6D̶Vhh9q{%U{7<4Хg7]c3pط93Nޱ?*0[1~QKvLN9`08Ǭ8X1qϹZebZ]0s*k͵z+IJxhN;ۻZtGBW8s{~m_cۋ%fDcn1 y>oS+%L%Q~ܿ?Q˧ 'wKc_@r[5Ny^^~ػS2\w41Fy-K3n)18ΪbM?;;;E^m%w=6.ӗyvs{`mϡ57‰W&#| \|n$5Džҳu58_UXz/ݷց'MtlCҏI(=~].<_VK{xJd7K~?.u/Om6Jౚw29Ǎ0ћjN3Xm[rL*kS l'!e+]>յ e¹:4Z̊zxܵ] xR]5 kuo_y\eîavwq)Ǭ^cz1msJFcxk;\k4[~k8j޴><<}MƇź!$zms&<(o ,pyƌGtX><ߝCÁuJoƣִ0rK gXܖ{wl~௟=?hfbؐY&cwy3k'A#ޡ{OCzV[ˠVk\2~`Q1aϩwypCncҍwߦqAFk;֦%^},yl뾪cK_SC/Mq=߀sY2(/̣F[m&52z&誜p ?m}P k} _uU"۷<lj{XӴFϯ%_C ϻU?4~C|Lů|9{E"&.{k"XvP~iqxn"eeܹ1o!\>\s8kYǨ+dlrۭ}!F!.?|n~]<z=[Z/mi3jy>E խ!b~*nW4QÝnF po >ʎ]PrK>c_%+cF~HC/7u iZw=3<ՔЬIm7G[C x6Ű64fD`%?aI7[񝦖=.XW{;N^[*tM~V!>NQ;+]V\49b}d7htԡ\;Q-p?w]Vaƞ)?} GrCa1øyrCeՋSvx H|`~o>v-W2 \ΨQܘ=}ٳ7zע cyA׵~3)N3ǪVLb㥞`2Լͷ0N۳FLcofo5 nMg{Q03;6>{m>A}nO|w'ps`c!tc!vá>Ym2zO7`;&iłv$fw V< T HvHIXbRǏǹhZָlh׬Z~a{ac*o$s]']I WG^:S) gr#nՍ<[3s<Vgt5'Ar𪠽=Y7"mNbqUG>>l%ך]is'6~ ,y Lj|g z%GVEw77Mqgс{mm MyxRFQuZyl5XK]9J/H:c { iw]z{ccK`Z)_xCӵ6Dkx'Ԁ>y^!QY&u b|[Mܤc[׍ .&GIfgXG7ővi;G_wu%?;BT$+hE ("QH%QZ(+[{o++Hµ}?py_qx>/lb@ 0OվWeb?+z- e^KKl؃&]?A5㮶giPb1ccDvǩPL+уNj( I C #g!n ޠ Ba}kP~Jj>%Eyz W}c3VG1Z 51(,/;.U I+ }wFzK2pDV/PEDi7*S%.- 2-.5n~@`K}Vǯ+ i .B`OkGXz>nL6H}*tz=_{ eYŒuPoR%֒e u vy7Ilt!|뜨ڠ>N N-%U(+!;H yfJ7?Tp%iJѡwL#_U7-cz0j<rXMaD)lBF|c-!B1ozxI6T9ȺY蝶*=)I_\_ӆ*$wU;ҥֺ{@%}im͐# N=Eh"}eQuZ+jrk%{Ah'(UKfZ: 4 v܈䟍|^ou*j*6eRbGkNmXxTEKmGJ =rkG̾롔SΪq(/³R1(0oN5mCsPXI;nCMiFK" NY]К|I[)J5*auXp}ݠֆuQUɯ;AW\'PaW3Tȭq:"W6Eנa*PRSuQoM濠VzY94c~Rz՝攗ᵴ1BZ:mXҙ(:juɪg |2QFb's M_zzv+Ϸm پ@=ϫ߭BkPA&BHM]A-62]H)1sH6 =bv *NA1x3̃PlYIr|ACmhT{ _sFQ)zQ#I2T4:܅3;à^Nd$^!FBhoҤ2P/frHN7S!orrz Զ C77kz:KoYPKMb>5, U(5j-q™o #-rD~K{|3b«>Tz6*v .}C+rR-aʗ)稉|JƟ43@WBs!cңRдvr*O;Ub*+^rPVdN:yWyon9<^# ~' ue3-OG '?j \=ituBW "BB&-QMXwgʚc*u}LTFYѵ(\uXX+5@SOosa5ZoD!}$uKs:7'v*~ F*;AՉlrHU%˛y_'imah;nJ? ~tgD7K%9Hu ~6NB uC凓z۵a=AdbQQn;Pglғqr~J)#qy`Ћ5;madCqy;PƷ[.T1]硥вY[,YPjO(24]j$<?{oFyGwAմGd&= ~2V _TբJQq<^,; iWآ@!i廩zjh:b '?n髡&PbTQN|/oQ)]ן 1ѓqnVŇ\@ޜ(}sšktְm1 |;u+ ؙPH41Vb>BuZhz+w8y Թ.oT>M]B)AeI3=}Eit RYgI&R롨_s($qxX7fT;Bơ$>wLB]>b1W ]s_킗noxeKEeGxYn'd)^8!MO){L+*aVRZm+5zVhψ}grh> ~.z<6-B֡(k/{x=^?Irg涹/Zmu rӂP=}[ZYPGW۶Hg༔r6S]Ik]zcy:|8G=~?H{/Y4lrGo* 5B͢^g_zcKeDNKXj[XAabT_Ey#G&r^VTFK= Ժf&+oE蓼POXnuWt0jJPIsҾNWcA!%TEv}~9r5{}nޤdhIHA7.: gM@1HzzYhF6i ռ;qs'Pҋ(mq3G8z4s=& M<S7X ;^HG16_(MP)Źkߢ}>+{֌x(\w* ,ly{ >¯<{^:j?k(EzXb^#d|e*r CWR mlRekSjvAq;G!-ކxg^~}P<#5["~V+8f˽Q~V>ܘȪX2]ΞDeEɁs`Pĺ/c41:nS'K*fyIBՊ2!}n{C[M= UctHZeHX|D5ф2HC䕓=NO QuCogx/L8/B䍝DT%Թ|iz0'LZ~GMA LXh:pP&3iZ/m3/$|]I>lQKphJ=^R~N^ QDEwCIl|43Rm >%ʏ*7h»Ƿ֛x6zv@~1%$ 5;8/CӿFA6!]?jX* GZx@Ƹ : riF(5oftTɫb#{o- gX8 QO^lE-o# #Ƌ@=ގ@@F}W _nȰF:hUA_eM!WS*npD̦R /=/pv+zˆg\[QVv{!=T$#(Pd3ms^26ëBK"1(mcF s!5uU( и, },̕2?Nhȶ(=s78 4Obե6a<O<T?+P=kgz]Ty,x+袁gO]EJ#6vHXsnĽ9 v<y)34TU?$k*yvҶU5;Z+KQïS\Xq;I<5"r~ܦ{ u`v8y4p$ʺpݔ='!R;jڳ8,TD43kBiY e Vg@ eI R#էS EN>Åw)zE69cCer׮rM)210Kj>V#9¥QP^S2d&We*'Wi98cGr4P¸L3M%Ў.1 io6^g]B_̫З$ߟֶy۬57=$)r|BAngB{=TOoe{_߉ҬO>J!l J6{z|jE)2ePGX)@=?jd cV*3a<@6|2k`ÆLVԡfYi7`eU~=:0zPn„(NoiOv{,F<&C-ʋ66dw4|: 9VDsP|K8nDߗ@=P0Q[7ʵl|9XS.\B^c"3m!#ι~6wt*(fEyN(/pU~Lzuݛ>DZ c.P%nѵZ5Pø?8unzw'$%b%!ps C'N4Y @2y(R9lGQ9XL& YcyEhJe;i oj{A۴ЩI3/(RqTؕehA|p4msW2LyvRzncBY<\CsvtEAEie3i,g#sE,{YB%Sq''d.{QbXSy^>̓}+3]`j;U1ʚ<`:5.x͗s^Cm);Bj:zT.(zA UV{o:vYAi64ipȍtg2w==~tii?r&ߜL.-"Ϙӫw&P{5WQv3(EwNSϷp, ~b tؼ2%RӄNOI^s 4[Oţ/StEaOw ~o WX&:a -F@}YPj_-r}(]gb-uST['wY“ 2/$<  P"Qvr#hrt;RॽVG킦2yB%T&=Qa M_oy!O-n#y)4SFS!HQ-7GRM^'&w!2KP(+dp2D`i1}dMh v?aGmT ҷ dSʟl ;eW~ ?BZGrF7/ ̹D}35dQMn(u 'i+ b:zj]/t -Uf*iC-\wZGyP/(淹a9ϯIT(I(`́)#T e2 9Pٍ(˳PtR1;jJef` ]X~vnHU?~+K_ )/x¡CWQkɁo(;-m<ͅcPWXDhyz/$Զ~XdW:>"Q5v8?<}7-@ǃE&>-5{24(\ν24˷0coaMȀg3< 5Aj?T C-i-[pCht% Gd T17A`(׶Jgz9TtZnԊJ|tC_mP_ZA<μJ|>,WO/!mԴ~_IrsWC(5ikJxC} 9\z'.yoOq!xGy@{FVPX&-Ծ-E2ʓ܄s@ձ`x+"hRg4N_(g$oS=э>yWw*灚oKڐy!^3|?%x2E>1- jQs.>(UrU6_}5t߸85K&OdP;Aٶl??|+[S?=[Uorj_Ѝ'bs(Rw̾bc%Xw24n$\{ $U/Dtѯ@a GPP >e@nCpk;j(;j7R mg͉?CM m$AJ)9Yl[*PߙCԮ\಼3;%ܞ:.0D_q:č^271nO-f8gU-[$2%[s? t}ΒA0pۼ0c9?+@I-ylg{n@2^?YNSmb@qvRhtb"p]^?s# %@WA,̝"n)C"L)ۆq/̳WUA7k$)HgU,{%8 2"#@-OKJ}V QoP+{kSXf0EQd}ETE{*ЇEwYq gH~ n'g/@.\bo/ yC@DGf@yاۊPO0sPA~. m[iT ݮI0=OiP4Q zL̙c0۲w(+K?6 5; Ԕ K@Od$Q8 ϒlqlFmjH~&2n@%\f,y8> SF7 Ue7>y;fnS7 .v 4-v(Jѯ1yڻl> K#@i:~n{ܫZo7y^s&0Ep(ƌm@-uCW= S/vߞ!0h}H(|r=_ivj]wNO|au D,g0]~nd'R_2̡=zkc&bzׇ\x~nb] +!Nasׁ#K@.6=.skS4~TY}/cB"Ϥ“/og>C ǁXIw!703L݂?}[u $w=u@mԒI0)6n=JoB\k>hǙ3@>IB XyO?F[6 i׀n{\\@.P= lفid6~fe|=n+ogP%B,o_JE\r 30ҁ^b~ jy ^ WwS]H^:@R;p( 3@ *}c[՚h덄9:`m0{H=^kFW!p5^40œ׀`MW M?U|rTdKߝ-34C 'E_q\ZͽK@d4n3d KXo*S?_:/Rcl@th gǔ7qE#ÍwCCE`wuW%M깼llXa7jd dJYPκ3~E3>lRj뙅.d h.?'v^ h]Tץ-@ =k56р%O &]Y1kہ)HԇԔ@~MmM 4ph225@ {]݄903۬ձN?ٗ-+r/xQ$P/(4mniaT@7g$ xUHnZ_^]0w4Jߣ@j+]gSm0'QB&x7+#DOin 0]1@_JTLߠmH\jP>8[6k^[ >RD#gqB.UYwP,ފCcDFl{㎕iw Lu ȤNWqΊO%q4bM4L[J2LGYNuOkvuLcQ4+w @-Qh?l7}:xo|>Aܭ^@8n$ 5~ߝcK> ~<.F'03p9PH9[7[}LPl ϶/)l>a|@3NiOn蹵F7?\ĹS8T`9(&J{`s׿h9ȴtҤ5rŽeaDe?_~/WJdä@>~0 }|R.@# _3Vρ<,-6sv'0sc^%Ef)Eoڗ͛Y"PXT[}x#y!}7c)?pȿքaV%VW5=nx>DnK07.P.0 uW}F;v;jN+T~ }dkƷkcdAn0MBPi39=s@97LK壱H0yئ8DeCY@qckP/m7`\s2-d_:M[&@Q Kavɹ_q jxV<_ Rm@_4~dDaNJݐ ԫjw^@5 s~3Fپ@2O)s'AU ÃY kMCtvŵu%Ç9n8_7]R;@;pSJ&Ѩv3&}`F̿3@MU)`GrG0x=W'I\@8Dok sO:|FȹMW0dϏ˹@Qr`uG[= $ 7F:;^{}gRk;#i-2p>'-@ UZQ$M'y^<& @vWo5ܑ6}8-ZZӫ8?;^ĸHO`\J=}CGfmB in7,7Thʹյ޸5^ oxV&& T>|h$ 96 jKL/e@Qkل͆Z kr59#NyYid#w$}K YO>X\S DeOGua"P<.alNf`H7rT3\I@.0LB{'`v%c?aO}g<C>M%~Y ()$CL0H# s16, X7}Mgu߁fh2< ,I@ۯ-ugDB)@!N mz3w9#݁>. ssߥq`lI8rD9?,P'o)ΰQ`$( yQr f.ْ@hetȆ#qNiI‚@ڷ9v*Spy驟W$sS@+_1+揟'p|zr\J#5caE$7P{s(v bkچbxN[M̳ke9uֶc~9#2 cg+𥆚x?/{1[!!(rV%o} eS-b@qNVƼ5PH\/b~P2TMb"LުQ3>R#m-޼9<~H 06څ? ޳|_ўoB݉@]Ubv[ZNnϓ` ѓ$Me@v*K\49se$y6Kg=קc<8kiD2/D*NZL?k;@fH<s#e\!$UL MSḟ>0'p< 0d* q9fCvWWɇxL;"ƥݼ/ܑ.h ]3;lb7v}Pq#4n=~]o}-#;8s1@`#s[YƻɌb0w2ԷNά.羻Y{.SW]>P3XcrJo`>#- Z/@d3y+M14Wq^ߛǁV(Fχ;V SEd\Pr4l`}~-+ëO3m߽_=ak-@Ym."Ekn%-_ |@vO.[pp l0z뀠plA>?~=b'#]n<=csE7nyN(Ye{7سh-g`/(q>L5r֮7p6|` lG3@b9}( P6> `.NJ#6sg.oy c ttcxe7?y"z?uj;^yZx-Ŗ;Ig7O];_W燿>>>>}!Uqٝjd Lcy\[~60_.ިJ 0(~,n4C@ :_wm^`*hj;)ͺpִ!bU8wҌbc3{;+,k?׋@콡ʹ52q>56\?}U5)Qfo~ɩ`A[0yqm|9'E;SsҦp wNy=wÀh] e ̇6Bpn~qu3vOJ:-@xu4.np̰0wWחإg8oh| f٧apߔZM-I7E!$W`|/țfub%uQy6OEf}_/05'(ȝ\i?(U')7C7Ef_jnmƇ'z#f> vz8!pnG^OC z\3z1[L34sm )7Dm@`ޱ=̜1@'ȕ}=4Y@Xnn4ʳ_o/Q g= <2w6= J s|@r[Հ^g29 Ȧ'G_"|/%`&1ۈ=iq+d\lu*m]Jz.PLǔ'ы%!{ʯװ_cW̆3ǎU/juO% v^N-0%Bsw,VKsmqQ7UР: )0uߌr 0c_ߩk q{Zg[|%@3n:l AA!̝g"ӭ9C(b4OC΄oGRJp|V罫}1JQ~R7 uxȡ:ػW~\z#G؃w)87UsSrCC3~_[2z>5h2L[p{ٽn(A$|`#+Q]sos̻f{^[E 0M4ω@\d')Easpw? "orD,0nq-co@M,(b@u"Z.ǀM/<qI?31 3_mg8L%fƷt۰+y {RاK9怸~[f%k]/Eu6-}0 =KUOc7%Wxs5'̊r43!mķ h<駨#>髸_ﹽ:SO߇cM8ڙRXĪP7|Z[w̔uq~ĕG;{su垛>րpPF# ;3]xe㕁^)"}D"4`_@!-#xOs .'B/ZvuuUOzafl4P__~n]{t&_8 kb36mހ9*~IQ+. ܽLԹٽ\J70vi|ϼ Xп- ϝ$ dJƱ<ir%ƨܻp]zٶSWYW ͘908>mhg/m}@g(=#(@ G$Op^Ud:m KW[}L>+|{%0V^~"x|*P:X#%[7i/rz蛂):cTOf~ο O7+c6` /j6b€$ٰ~egiJ^'CdV{8W"lTsH%]bi@8vTڎv3":ใ4[sHFoo @ڡCHþr S]ߘ{6dM_!k7>5ٍ+S:~X-l.^qW^n K޵DՇ"R4wm׶8Cu\qPݎT&*ͺ#h$17nQVYڼux ~5N[i0~N zӌ "d己Y5xf*=bn]Hf@TܷV0ްLx~H0TErJjЯ Las?MfTui&Ugqhמ> ;X&>?w<жL9_'*e>M3v@{ 9_ %6kɄJS:3Y )6'JJag?0 xWU@A{ڿ $6 k@_EsrD7y(B 5xoz}7M4at@K..eI6Es6`2*A⾄.0ww[Hgq^6mBiEZ{{gZO_p6,g#cA'?s"P-k@,h4Ԅ=7zd}_WO9P pnm 5*z{xL7kTxmY">|=;T@iw|ږsv<LY6?{,$PgWIF&@]qMb-|#[Mط=bT;g'/V@~Yxf je,$ |Y1>-5O$qQ~>Ӎ?힕8Dpp$w\?4ߋ7q(~ 87~;(D5@:B5Vu_ҵ1*,+j{Y Gp_ wr,Lnz`Ԩ@Pכ9TJ:ksϑ[]Yv?a>4ڡtshELnjlk1 Ws`-WH0ċsjwޗ=}s'Kqo>c} Hϥk=?Ub{0ӴU{G&P %܍G<-@5 ;_ӟ<ټYcFn`z77Žⓞ_"@Qwhћe^ρ[Vqh]+ 2җ* NY-0;*ۉZ ovI@x} R U– ɿgO%9ZWo4_E: w2m=y;a+SN8\IY oyniʌm,oB]W.Pdlrٳ|(uLZ̟X[5%rwh@d |Y:T`JL|({I 6,a d'=M[bi\`xӗ@ouh r5&O ˢ )4ᕒ0wIv6*6vxl56yM{ڱہv{C$,l%r'w?b?y'.#BnQ- }ei/)۲~8ԋhN؏ȯ,e/=@Rͣ/Q+@ި{yONJxp݉sUt]޻Ayj rT;@QHV ߾ ~2oiwgZa%6 [`8v};0YB:`., yjgD5q:9a/'JL:un'znV ^ bSt_9"KÊ#zϊ䀴ڜa 80ھ >%/\837GF>/%w]Я2`xs}"os+_D)71 C:r[=մmP'M!V@ p5`Tunȷ`i&a1(ݘ-bM!`F]J~>@Ցhn?x {?U8-sb ןME&0߹p;,OM;U|}K鄰P( WXUW9`(ՄY$//&M _ß@bێ&`j>05ۄEӽMgboi|zv}JɨB0n[uu_D K2뗴`@W8 m>;i rC{1 <$s;7Xn=[:J{uZ{5VNq`4 m loŊ_տZn {<8[qfd&)QgKzFrã@u,nW\[)+An-=(:EeǁtEWT*{0_Lg5ۜ/@t75 ``.侺'O;"`L}yl->"hAy<@#_xCУ/Ew1[Q%5a@8{J qN۳$L@&ѹ/4=73^\52wao=}Ʒy RyiI8xEm78`0~ {[SNZE/!zp4 4!GB&d l庺Syu1 Kc:2m$ d |'C 2?\Mœ'&cBa @99;/ .W)H홦 mں͞W*P[(HU^;Fpc,+oDQ%9(5(}ӏ^nw_ ȱ1bR;C$6A{XOq=l#s E>ynÜ\؇5=|[PDB_Sǝ`M24P:O LfF@6=z90{rfq^ fo/ORmPwft7<5bp.F s;ZM{P[o7Y p}י#"y VuAW4| O<@3¾jagwǔ@'vì{!٠kgbPT{1~B;`z_1{Sn@S0u`^wESLo\Ӽj^`=ILj%?X02k?A~ݽ3@V ck\"bT-_+Iz03(Jz{bqx4g]@}<ԲCO u/9:+ʥya&u؋tN`PFT@lݤS9Ăb4TߜÜ%3R_- 4,k$[2(oErK˙nvZST)wwi@4}y~f <@n{"~\"@ i~_$u!P);nRW8&~ /_¬Nf)xXn{G) Y沇f?- ėnX7R2Yi.a{WG(sOy(9UreQ `Z|c-u9t o%&lƍ)Ղx/],'*H?;Ķcu{a@^;,[fk;mVpnU;ZN 0n'L9\ ~ݜ-@kxǙ$}^1}D!i$1Q̀ԭ&|?06^`PN\ese@9䠧rnA`<](FYn߸>O4#a ϕC?bݾ뼜7>ŏX}@fIʁP 36j8CJ2@_%s {TU{+|{a?jaWuуh@_-yĭ2#~ߎ mǹ7K.?|A 4sxg&Eّ/ga7e D}9K@ΈC@X;c yjN1q^,sRO (xllR;75M G<ǀ0V(_䗿h܀P@9(uC>1gXD81h@:sp-:,B-Tط^/ tPoj _f6KK,a~涯(0 sWDY ys\,,{7>L1iCca \(f0l;ǥLPڼP-ݜz'=!}*ؾ=n{>"s{y'&b/`63k:3w_Pkn|=,UcS0(zQy;w}A .o /@>T#{5*ށoޠLƮyW_5cdbK@@XZ=Н}z) :Bn_M0uac7 EX~Ĺޣ* bd̷ N^JA&P,>Uwu]x:s]~̋-ZTtyzt)qƸ0 uFq=)ڞ%KC zo! LkgP%sW]LKMc zf Q#-9=#s .8%%YD9ݿ0 $޽RA@#;9ԕwN*]&@稯/bm9Iw|` wѰoH1(sn[QhT732+nm.eאַ@d䈣?⽜Q8yr> Lqwdw* ӕV''vٽkoYKz)j#`6H>So=}7q rosgBTcYPNU%vd~9~n_{s%& 0䧏ܾ`Tߣ V7Q.mPq(=@ߠEs\^vyLiIFam-{‡475pRkΔEw7@2lCM0{awo0>(\ɆG{wGEv~I|1%=)PQ۔3t)Vø.b]Hӽ (Ww},NK" 8y)bjm344(|ǹ,W@!EsC A9-ISV3Fz`fG?U~GU\3/%2,7LtzSg Fv EsGbWlx(l,8{p s ?w #2j1-tϵ.3c]4:,> 9z@av4?_߼S`=-Tyخ؁1I'x+ô{ @;p {Lj%gl[d==eEiQQEfE Q>aQN@om,K^{7$ϻhch#E]iGbnQ6X߀@~xf=>96@ޫa~kt+5ZmP&P{'\\y\>ǽ\r7A{"9pn:~;W9Ч@sxUo)grVݺH ?G9t{D׍﾿r{;4w`(q†Q V0lSځ=SPTgUծM?#Jx4t ȗ[~ޅUKOb.;&,L?07N`ͽ@Nq 5c_ W@1~ZrUO^hʛ]L*FPSy{,l;jehS ˊ8vpf?oBhjwr$*cO]ķOX=bve cA|@c) ϕYݳIWev1[ʁzY>γiF2^Kv/^)[Y@iy*pZE0۰m E6os9b { U\*ͽ=Oך}B/nfFÀ8u@i)aZOȥbG?܌_" ijWso~&Q܍alWZj?o 4A9 ͏ְh99qW ΑJؗ/ߺ D8/3@zi;7889[3J~2qL+Þ B` z#[c'0̓"f$ M\THJ> @u2P& }^-̣ |wq~xqu``2@[. 4dc$̂GY,_Rr oiSNTixV]}ϯ;q- e +\ _Z1PYʾ_@('^ ljN+# ͝@<>7!4iAN m1 stɧ-G%R PMa'0>T's1wD{?.YI˱+@߯"V ߾hzM+9zw@=>xApeE}fq\fcˀ%|jtU4:W?OKiC| >RCTWĚѣY.{$#x^r漙3 Ge@ $o&`$iek@~h\[ԩ 'h)@kp w}ҫ4 :1bM<;sԕwh'< ɕ#x'_f⪭鍊@{z{)jkɥ<=ۑ:Js{P*P-6 r-[r=LDސ4"ڼ@Loj4Pݓͻ&vm?hK3y~6h]fQᤌ? Qn4 &r1/`֝9 B-ƹ2Y{Baj`~89wᾗddw@]JUzɏfaDgl&%zCd 1 q˟(^Ide|hQXE'S|Ğbr{f`YY芆mn^:ѥ+@L)rT^w>y جU?`.:j/n@x$@=elU;:jMql}z]0 @M^K HeQT'fa W׿ @8ӎ0?OEUc`"ȴ;$Ťg1/JI/0;nY7}Կj(+i`hbf@{`ur/ \U=?7OM_yI#ua+[ @&w3/]B,>sEq7P.: q-1y\\T3ԖN%͇|ew*~10OR9y -`^}FvR Gom Tȧ~!kc@uZmA'?AE)`(;PO]urxx yt !a8,:q$SJ\E#\gۋy0/fO{i7Ya& Ж 9&#(_V rI&Ni.!k]@;޷3~`Knhx̽o@n} su,v׀zts=tBs=g`fLjuHwk.Ҁq6M?5ʄGD'17&M &óoNqTOno3eb._ySq@ntebq_d`xiT,L[ y؋  "#c9@|? ^?unZ%Ήqu%`k\fOe~xU hwn/B`9e3.$>Gܐ#)03˯`aE`O MSa*0Wc ̵Ď['v09`>WP? W3!VE"@=$ HxCs)!?4mÆ$S2fJ TD2fh$SJPT- 3[Zfy{o-y8m4j= OD޻\DLsaP"U,V \KE3WiIUw8)B㝳hz诃{4+` ύh4f*b/ke4d]GMV3NT)A|ߎƶ=C}UcegttJu18&Bg}~Wc5>?t_}#ӗ˛ qh쥍\hE#4w%14z 46s2L MdR>=*9I8'晣L4t\qQ; ^QꍦN!i}$'1=mC:1L BS]RŷDy)`<ar2o6Fa+ZKf XrhVWh^h[ziX>4(G{>gR[ބ_l΅o88&>|9VtO.\T %y7Ef/1}%KEw_S؃S'~ ̰wBS2TdȗohҷӶEļuhR2>[挅3rΠ /oYߓߔvUM^i/M :bl<FA,?J{|khl8D<ZƧG>J _Q\4{d@}n?wBk-Ehߓ54jփ&E}UARQ^ј˙rBK_4}(2lNFSGR\?, ybk}:Xzh}{A SVShKl9Y4nz33e3PAO#419c ?J VF(h¤`&Tu\0g*{m(W-~Lg.y fG~}`^'q4ld1 Z$NS1UrN4MAB\mu oMП؋&b4wOa^s?y 7_LנU#IhꫵO4fخ e|sg܆v4\d>'*M =8 sI-6^%2ьCY 6>8=&4FZ3ЄRΥ޾dCsXdѨ͋hB#h׳a1h}4^&ωG;0gl+=#4**aq cm~Fj*D$Ry4#8'm_/f23NG3_DZ+*7z ; 4D(/ݱsʓD|hN{a{4މ4N*)3r8=Jk:1Og6a/#Pve4If<8vefgإh 1n=ˑy4))a5My- B$པ 4qQl4W `<{;bzp;Ds^Z}QC>7NY:F (qL&gxݣ<ЌT%g Z7 pZd [^Dcׅ.&BLvg[GT*s^Ɏ@ÇsoKiOuG(ذϣhj;3",Ѥ@FN7fiߐ>o[[GBӬȫhԁ=d*hooOC*Wsb%K 5V!f:qf9䡳̖hrV%꼄-ljG˪"ʂ*1oјʤB43cUƯ=V&Ρə}gBľoP<ZfnGk؝Ưȩ-hzb4=R0/>XBLM>hRt?4v}j^ 󵛁F->EBI8w\]CbdmvV 4%h/KJ7~tًѽ_4 hW8qhlVfAR[/Rch4~,W<܆Uysо y~4!tG&^s1>Uߘ@#׺$ uT !z3hЃiʎsc"&=uT t VkdtÅ7dmoeVُozu'v΃k4$V*uo__2q,U5G3GO_ZKHÞ[ 8dc2z8$K'8$/,D߁ur9]|z್ه^)*+ yON[ƾ(ިkMH˪mn[Rj7UT~MSY2aքE3nM~yz:7g4cɺc4g}JD(4< >|#m_ފ,X}F#t&mwLhUq`>V:0M(6{⌘7է!PPW]hxsgēFɇkhxD(K )hp>,M~a˲Cwq(W$ZZ#~/g&h,jD,o?On)-r#sAhZ9 Ml>S_hīdTPwF>LNFRC5GSY+}8N%xJ- wr $^ØSSWR$9-MҮF1Yhx1'l  h鎻!ًhjǞQ{ 8CД z|܃i۽g¦9ј~?uBht .Yώ2 f~oG>k!y|>i~P닦J=}?6[1:5S# a}V|vp:i [Sd4޼1OyA,]n{j+MUZh?A/7}?(/9RGcSvʎд{RhRC>M`/<7J'7`Nb FSbͬѬjmW,M~5@@c'T= Дǃb0dFGaB)V0Xㄦ<:Ez0}l 6Ƨ8)ߞ  zI<"8}shh9Y6sHMBO!top42UoMR>i%]߇{Ah4q qgjE i#l ́/]f@)KfmhJva̭ZqO $[[ Yᗏ9Իjs4wF7/eGSb<hV~,Z,b~d-~RRF,Gi>땜XQ 6ocDϽ\C&oSd4BRͰHvB,}߻?`NdFcESՖh$Fk-uTE7*V-7F‰'ͨV 1OH'Q4>8cFxc *mh쾈9 4|Fj^YX` {._M^-α\hlmSbd_^8;!1R5ۍ*3vgVy |d'淧NVg/y+ĜX["Q M-hbbM'Q#_t]8fn=]};kS.9;a~sgszUӇI8kјI~hz-@L@$sQc7uh b)Gsєc|&#fs2ѸFñ 8 f þTƅy;s@;qpD]ɪ:hLЗSӰUN4>Eom64v4cA1GS,{ظT`KBf9PV |ۣ둂acKhfrc:ދ!m4blf58_Ή-`)͸1/ 79^2,! hX {cE5R _F>}h4ƿ{%32= tlVGӊۊ:c~L3D#&1G&kEdsv1NY4U<MJ])5DM#ޮg}N;f/ ՚(4`R~,;NUhGrDt#4?yh߅۶>a4}|%Tc޺t٦'V9~)ǰ];q.;lO}^ht!#[Ogh8&n]y4껴*mKs{R;^4$; 7  M[Neo5ܻeZL[ohKF~1P*f5XД`eW!bv{No,*Ję\>JG5ic 1}E s85ojh26tXL|Es<hHc^4y--<v/dN+t^:p TY{OFco_g}F#O\:3ZtB4uK7ьE w739_ &ba,ӧF%~]םrXyzK@3WԻh4O+bkodٚѷvQqXW4ߠ̖*c_7oⳀ z^.WsŬ+[tݼ_r4忩%<ŇqAcRDtjR:g?7^p;j̃Ot\DSuѸa /7JB3ѱbgqNeT 4~',+74rV,C alܓJW;^oBf4m U4#?DѴYHncqsk[4:^>_ Z1_'.JWEC{*vk@j x׿áhמb"VvK\h&~oK3++"=UNWW Sa74WfUpGR*%2n9#7i1>sw9 }H|G>51okb[~ݐWSD|+_x뇈ieS'%7ɢI>`hķz0nn8&o'Y5BS/H4κ]ث3NGCj6TYȕ=G{HV&x /9,h pl=4|-,N 6.]kDEc\/!fbqn'^Z=R|h0F̊ދU{4n ؔ6bw jFSiq)%hĢFC]Mh,A)ޟ5n4X}8 ccS)hI?SEǶohB4v5h3 &{i(]]*SC hWEw/+p.hJ t^=00M 2^D#_z} 0/4-K9F炝a4`sGW[oGөfJ}~ Qʣ|/4i64/FkUΊ( <r4-+9M>qo>+is'^YQ:inB0Djg\/] B'Ģs488&*ϝF^lfcy c3>Gy8׎{))>4xwMFչ|Ac.be2iH{Ѕ F]BИX78.3}%&k :QW7مƤEeZI6ECѡJh5OV54E*p{AߕH7jZA {I,x4 6~j whBNE ?Dea4vl `o(FV*~BC6/75A#(5ǰyD?v݇&*Ȼ=сb8gJsmIvgH?\w~+ܝ=w.钯OUp<W3"p^ϧHӞChPP;h1M[i>^Ei5޽FozeayODD}c {] X0/ov\ýlG<9q}4s_٩`,FtDŽ^{7m YmfR 4Un}Zɋ= h\g*6|Qݱi>U_#ehrqU'=;;){/F`rx)|F}ѱs]/qCv|4!] ĹI4ޠd$1N,<ֲZ3b^Y,aM\ =vpxxnX7 Nwې&=Ĝ fIsj)-*Km@6p1vQ;4>:p[ཾ?9h`*@.4#G sCѬMg/|(>pM XeuunԢwIQ&9߮4s?ItS_8K!hhfڕK `!yFͣɠ1X4p2xM Mr İҧФַ h{lݟT4~wӍ3h"Kڄ o=Vh=!ČjRF yqIWB*4mn<ƣHhy9ázk4g)8c*'ayMulIߍFs(~*{%g8Χ,ik4}{9{hBBӬsW /;d]5UYn4R~Cehlnh@~̗/JJ4fgeXPumOף1N )Tw#"sohl4>u{/2?&)SI;}Em|u=) k8ӆ .zq?r[M^&ePWHGmReCKL/+ Ī+vJڛ B( nI+O:{!m@.6 ?֒\R me=BImȡg]^t@At jMG'T#~Ԕ4cw|0iPm>䞗ʓB'/"[Kh9E@f|'*PK}ʦ7aWO}҈s n}uԽ0HKQv_@,|◗$M@)r(.6,@$, gm74_(?">9R)=mn @>ctCƼkГGusYEV#@S#y|^dNwy $5(d? t[f/]6߁2s%I.Nnm"P޹+( /ޛ% (',<紐+\bԚM>& ^4. wPdv"3@-QC%|ܼ5v%! D~Φ2<?|Nf @uHϳXG9GZiS# @PKkQ$q>ʫXj( 7G.<*y4P1>?8:!t2WEVuE^_%xjNg&1c%Е|vz9{-;ϳ{ytM$m8D' ƟW#(ygmj @-h txٰe:,@uL, qƞ(yC.+h,l@ -ra'0)ܣ{Vs_3ڲO"΍bY.z|Y (ݥ_@ ,^rd H97uԟ*)}F(/{O5!{kS v@䪑y B@B7zsTS#u(3N}sOKgZèG^r-N¨r!<+ޢ㹖@f |n}W@{ݪ,}w8.zeijST K{)RN*i)?Kz(Ws;@^3?Tt{|@9ε eCq|>Q{ݬ@?zC-P ؟\hPMPFzU>UsܗN{R9>嚎#f#зmv+Oq& Ҵ گ-n1[}͵Dy}8g(׎#Bd 1,'-@˾.= DIdv|kr1N=0|[u riC뜬VfQYpP{H\ǚ3\*3=gX AV*}8Mz]y( KaE~@iy D[D&! @ 낟RY`*F9@_X׻H&w^F]@b6; ?FOY4I жn-]Zpf}q~(N+_;4z o Q`ý)=dQX_q8nsx>6]= s=KbW{M0hhZm@ :C\$SP@W+@JSjHp{-?5禬y{UpO~MW4-|1;e$l߲#PۗL^*vl@[4$ =P^wQ{2WRŋ;qAEbl &xl^Fe -e,],qJ sO#lkU 1yHHEQߪ HӅP#/gh@W-.@Lnny(޸Z| ϖO,M}Nc=xAn̰9܁ʲ F1[~KlSթɱ@jA_5m;0_bG6lYM3P ډ8 >! {=#@ (*[~BwZ.U}P+rf4~塓[w$TK9]~SmՆN16%2怺#x.}gz0Ox5oouٳuya.TEi3h u;=}=5aNu- 3'[%p}ws6@aںsߡI1襓gQ &۪eHeCfU@=Pl(J5.y@8&\؅l/d@ ${oT)t(}ʀtk?6tڻ}Gw&k%Ck @J8F XbKsι4{ҁ*p]s\-Gzÿr!zSo⹉>+ϼb9 @P60s~3Z4ޣ=YCVx-ḑ/3eݸ9J9 rl@th6ޟp7k_WqB|?8_vJTz[s ]֦ϸp `=TެvO@V\_4jř0y J;<)+!G&ͧ*m#(2G51OoWԀKeV@ut%W8UF ⡛uKا]OGv K6L%qCP>USj.y} ۋw\I@yԷľ;^@~8т91.@%)nœ-"zQȑT2Eq,[ վ(>8%}TޭX2N9e D7,q ~r+Kuoo<0sP"{u,:S*Pڿr0ے2i#UU ֥ʁ#B ëOL~u6(P}'M'8Gy z;ǥF$qݟ{qޒ6P&&ĩ_wp>|H \_ŀnQH>}g_1Ľ݈}UcUbU-{@g_>byam|<fw[NOJPt[vE(z@T;og˨ϵs&r@7bxjp[|ʴRY0P%w(^J+>́Rk}U GpNxفu{4P_n -{qHK,P~JhsY/Rُxsk:U@?%!;^Ca КNmwiЊ ~]2ʩ@^> sVMgpNŜxLT;h+ @ҩz\Lg <}}Ax@ߧC&쇀Z/(Nl^%4mbLHmj}>NH \b8\10~9gQIOiJ.&$(SɦN o);kc ґŅ^` W?vkJPڴ{[|Vܜo#:^'VNWV tF<7Qbnk4}s=-(]g Tb9@S ~40}G~-I#I2HJY~3IP'jnF m bo` 1%,؏=yݳnIeh7y4;-΁kW#A ث=s@UHʬ5mSس %6FE5$_ ٴsCW!K g>8rz/yjzc?O=udSO Tp ZWma'd B+); T]3/FG;o\u/)7]oeqJ"*ͧnַ@?CsڅG L0J\Qou1debn~>-{+Ը ~a fvP_"Px&>28O? S>D vY[G ϿI+p dd䥌 ⾌62{|Kf3=ooTb_gq@+Y6<:DZĹ\ ޿@qPd:[Iڛ"d@RX[(},ǣ4u|5h|#ş@^esv+f԰{z2ȸ,P-,瑞s\/90q{SA :<xHrixԈ?;;@u 2Y6y䮝K׀0r?>NY) ?b̀B9i=Ρowur% rܸw[{|\L!΀w838q^GI>G>"0"GU wT @VED9T?0' @OTfX o,H.# %Gb㞑'&tmǼˮ>HQ0oZh%'CO')2P|4=E:5\.ǀW0Ͷ[PbnZ7ϛIXx}Q :-y K7 Φvmn+`x+\N1~hU;+}lt܁Um&iGl Joo@h(]@9Vލ}蛑@7轞 D ZBMp|TH nn _tvH ~+Mw指צ'@zKv{Pelأߴ*baSܳ;1sbQ^ Ĉ;҆W0;mz6P!< XVe˸Qh[#Qܧ/m;;#Xh3`K I?lًӆ+S=@ެ?bN $Es6#w}ʤ赭k\Ao4!&zVh  =/vЍݶ` .,ʘ@J ؋|0?<jDzLog߀s9nEEuHfhJ *4PbR,])GÇ ֛_4r6{UdQQ ;I, 1y! % ?9REٮM`)Bzâx룁jCԤ1M^u@'uq5{@nYϳ;q?)w->"xVk7^Tn] tO_߃^цoq9ڢBļcٛr("]6>tx W'J;mÔ䫛@]V;, TmKg9]!FkL.z_Srduuzvj.pԿ;)'p{O1+3we]󫁺U[)뵱4^2T(S }ٗZ7+P W϶WUԏigɶ8]O~oIǶsa;-߲DzkLٗbi`f% _8w_f/Ͻ%B{/THkySULLJ1z5?\$%YU~Vi 7ĀLp9 m>)2@a@HGvV WDԬ>< zߘeJ`~UEŹ5q2=KPJO}S waǥ.H4=zK9uܣb&am.(B k[S@4Է0 =".%& 7fqP; L8z^ F6}&6_RGVr>;IudYH0׾W6΁8ÓQPTP| UqYwyĺ9[(oVcpL^1݁~l,oO*Rd5{9 6U~^zz궲Oct5V/~5m@<-Y$ОG@k"B Д 9lg/<f\jy7707*x19b/ P(*u{&Ɉrse^e_א|~R^){ҀQda Et55pz/lr{uo=ЏnϷ6ҟ]W0>{h8hgσnߴxVefc8׻W.5 (6:0Ѭ-h@Yyzi<XZPijV @o;:׾g[M?.`(% Vb~WN]'S~b>r sD]k7Gaz=!Ր< %p p2b@Ojrv1` =9S̹έ֛%<xc΃y{\yܧ,J 2 _%$>fy)=waq3o\:W,B%ׁE\}B ݷmJlj\Ywȅ d$|j {yO1 %ߝE@U>Yojr@8Qw sUFlY̳:gqTrj*{iqieL(2Su~˙ts(<"1RRe_a@95Q(#ST ~a /#NmCi[?OO<.uիŽ64s@(yQ:kk큽@KI dR1IQǠ+o$T<Bg묣%ǗcyG^zNF@yL=Ab-5fȡ~ mCRӤY93\ֶ^p=lV>c[w;p5ƣ {z@|g_3PoI7Eܙ Te͘rXW]nnf'szt/kYT 6N vջ6U/[cbh;A@\ {s@=sno2 kV +̭75N85>x R>4/^iˆq蝺Uo9k +vkw2ZX½ZV=L$ܲ, P4dUN \\߷qz,kʁPb^1މ#:OHt ^YJǑܧ%;FF8oJZ.XJZAH 1dcI@W;;Ov+H%4(" A!n`Y\)u9xP?qu~yhn{?A edPP gaWeFc@晵 c2PR;S/ef@QJ]'Ƣθ?[hR^8{KY`\Q} :s9V`EekKwc6k92ϚI}Č% ?ZZuw1}W|YS qmMh ɺ)%9LD(hV]-E^}{Q끪@`^USAO2sp5>~[!`ho& E=1BEZF^>Gǹ@ {hf*M@ }׸YzN=x f@0 'MN>׫ tqۻ"PHM|܆I@X=NdFo8WZ>eFa;@%y{wy% oyW * )@{z[F-|nj*BWKل" C-O.}Y}wM߫]Zɘ~<'<\y6QB`~p4:0' 4?2Yo6}K)_sĜA t7D$@xR&w0X,0bf;9o:ekZ,6'=ȱ~d;Wx>"Wys FWw.F!(-.'~ _UA9t HT̉@oNY6wc$R:q;rZJ7$hƷЎgLڽrzق>tQ /JMM! |*_H7 #_sـ:esr9@9K F Hۖ5luZbN7Gnw3@}v>L$|J92Wx6΍k&B@^ۅyN"d#OP p;j\uV=ƥbCG?ۧBI\! _ڞTg6g8NBB w/ )@#HFktLz3q[U ~gDl䶱==x'뎈lbņ/)O؞(qwSI_9 3ʭv s)uwzPaջјtW|3qaϗ}BWcޖm@7=| ;?W9Ujln?$ 7tnۍZF)z@o\9rI?_NM%@Zd@B@VW>d{eZW k=[|ߕ:l{7+*5ҧV'VϺ>Md V]֦ ϛ.?ž~j1Ƶb ~=>łkF\dg'xa{}O/Jz8ho?jDN!q^Eݷrl{Zn+I{^ϱ @l6"ʿ~>y |=5"˥6rqggKom8$bu S.9F-mo|A QR|/~CZ7ݘLt#Uv 37_t??7gK.Y*E{rt9ۑj,9 Ĕg&K$mH7uRPk@{\ve6C_|H4svRpSyut_~ݢ=Dmtm.~:@x'O򀸙7`7O`w`8u9/Rx(7cH?.i@N/*~l!v-qW :bsc]^hW|gAȥ[N7iWD2E,u^|}!%a ]|=ANrv-=3  V[@}17H2?> 'ͶONv/ż#}!d(acvo[hYEI8ǜ1@W+;C Tm;c&7n4<#c'1g^6C/q=5KGTx`& ȦD1מǹxcai(p>NMĘQ&LӀ~t_"~_L]XKOٕPw9?4cV_xJ^' U#)ӆ|. E{>}}@{$}Eh^STy!+_@j'MEOQB#ɓvm;!'yJaqU!0emK :^? sJ-Um3E-g8,ʝD6 ^X9hᷣ⌀2fi :eQ@zj$%ŀ.wV1myݟ6on ˿܎]RVO~ۀ(eHʎ?uu@z9w]"9 럋JhZ@I :?p^=:wrG}1QYþ/TyȏBd^N/3 +7K7͚tط2^2(TjnNsrs5d~|Gwtɱ#K["+v:Q|nhq' lg[5u:G|rދ~L0ڝH O$˼3jz1'@P<8#Au#*c̻7vwc_ɍ=zGJ9^ɼ8m{849kc^Mh{f~ª.p: {Gv27فQ6$N<3V ߅޴p,>௓趕n}{Dxly~ū>茟 w =_[҉+'2]l3ŷ N>{SSBFC ^"%>+=؈-@B72+ꛭɩ(i d.8c0ݗ>FiE"G=kЬS.B@#w?^%s ?vUIA@9̱;>hAoeg@+ɏy[ig- 8U6U0NϮ3^|EP&Я9{Ja?K[0WWn h<<(>-%1Xo~ GJg4s(-2SYrnkTz̹5%e ]iAT8߻ ls/<^N j/jMtb~^p]HZO @<gkO@\=7ynS@/-lҞO&:LmggУ<71FkH>> 87P7/DD3i@s\[\ao%2qT^Q~[Hv#7bt`~H [@>{sC$X<ǗH@n]xhr .)o^9Wg/\ץY(ar/qo)\j2^!$˳1Y@*6l <}LVO2ƪĀE,2tpLU I-w)KyQ >s, ~i)@~,2oZQ n{| 嵕ʄbQ?~5ЫO^f}տ!Yt|bgΫMi_)pzh]0?}pLۋ@̘T|]d>m쫥̡F`/Kh~ ]8u\^.e1Ywa|:XI5By͖cx5*s32$о>&z{]@}XQG0=Q]p|ie)ޛju|./< r_X&U92ļ:339bM ^6ٰ{ ŜHb@SFb&MJ1%`cMSɄP"|O8T6`ׯ t'g^u|-_P$Lnܝ6~ $ ,"3r_rWv@{+Z;V#3ݰ7 aO|1sϞutUFڅ;~T|x(C_2hpmP 9 HSW-'HPuKUpF=:Q}[nR2-m4y@H^?lJ")D|M $+a:x6%t@ا!J%Zozz|caOtpRD!% q`+؄\c~* dԤ[ jnr5yv`8f"_,{|Ngl#hIP>5>6ÆSѹEŭc@}p;SNwrp=יp$ ݭ} s{E8~8~ /0>> .uzЎGP8U+W0]<2k+w3 Ԟhj`XwrAm = ˁkf!^%? 14lc}ԟ5]b}'Ï0~4Q/eO~QڛwV}>1>)T`~OBCO_M%߾q{c@#]Sn8Bƨ/ r_a>Cs k]W\~8GvߦX*1aSkr1,VNz[bE mv9&k7Q__\7ŹnfF礝zvv4۟e3,1LĻGLL`;zta^u3>@Jracxw-Ȟ@WoҒ'N1޴#׷ #ܛc9\/   =-_t9 }:x Ylϫ6/IbЫH^ߪU@1P7qr؟7fz$]= [mTgdpcPFX֨ܯuAby KUX dm Z0ڔ.bҵwTƎfp=ۼ[z-VmmA_Zぬɵ!o:#R-^%?b9d2`0}Br>_0[\Ì?9n" ʣcַ|@J?ƒϘ'u5觕B+ȇp|r*87;`?Xj8^n0+ ;sqϑTIwq^rV*@ss׹y{AzA#ni@~a[ئԉڧ|>nh|PlT刍|HBcoZIMMD*H foU0(3Os`όζ 9D̹Wx }7˫ x/bsw< w+bib ϔӿx|z#"أ"deTC<(>p ɽ tӉQ"m} U.dAr3P#P̕)HCpgXmoz.%} G}ԏ߯[yx5xgK¹ej]iw094<^%7V2oTbl_E9\_#ŋ@<2q:Zf>_Jɲ9m yԸyקwday»@J )r@@W]]u kgԷ`^vq9An3dpGl\z@ѿflh7m0^sd[ً{9M3%$일  u e';DHڊ:! ָ+oHJLJ")iO}Bs'Ʒ/@_WVb*?Z@>G?Δ2= ʀ6l( /AӾxҴG11_ $ n ;NM'@KY(=d/q ;@8 (u[ ϧl7| he͟Ӂync Rj̨@}mE^^ s-­1H;R‚_dm>O'y(9KI)+bk@>oW3pߪkח@YA#aؙq06 O16OV+O/i$_ńzN[H\K=DŘƂw^ V ?WûrV;н-TVy7qcn.=AhѢ){J1^~u!}ڛ {mw=¼ҥoiW 6q SdhC sij:t"e8tª̺%KqbП5D!Rгc%= 0tGA@[("~OL"(H;|x7Ν@/}=uP>\h5K@ ?b$OV4oVNh eJgb- &jC}"ltm1Hw_Ue p0]z|=&4ma.iv H ,,<`9ZJ~yn7i5 yE,NJǁ~K͉FZI]򏀊l ,|v93wl P]SXaֻk@RW|9ü[M?y#}G>PdL?4='(ktz.JϾ(7yϕltaDoCם1zs(d q9-]oi vbnRYiq~քoog*Y>ۛ|sjᶶsG_Wb~Jyxj&ƝhUa4?i(J"y"/g }ZN/TdӺ dCK_+xsF?+uJ;⿗[IP<< $/ϗ oAh#뗍5/SN5­L sȟäeX-.Z؟vбoz {@IM0·Zw,j;@{;JwvHC;@}ҩMk1@myG,>U̱ɒK7͓&@Pu,J ;fx<l؁zƻ3@.Մwyۻ @k{Ԅ,~o`ۢhg&87hf nǽ& +8g.sRIVxί>T3П+"H&BGOpnWq0}I}ܓ"]w ֻe]Nc4< ; $ +)Fu#= 1Zl<0*UHdG e3ms'Lߕ$1Le;0XEjYo qo$Jrg Sc"<{r5-Mf Px{00?YIsEYޘ? @=Rbċ<Ksc@Q'^ TҢ d&&K.ţ4o1@~'l7^~os;3׋k;i{̿ ~}@^-v ]y. :Mm?Eȉ`3ߋq{e"`xYqLwu|g"騆GsxEzHY ہӄU@<"NXbM@+ h ǯp k@}pм tКW1E.N4cb]>u8C8.|Lq߬8% 9AW}(;M,Mg@:#] uxhI:8M9^)F칹@$0xϿ óG/k`{bYw*{]r RQЎ8HK !G޶$ 'G~U\4gAU [@okМ?Oݐžt9j[S_Mgrks5a72#{0 [Ğ'ТYD{EcW|yD[ awɲz=YN[}pͽo] swwEW?!n=! ]@b{ zneg[N:lH)S WKEG lI^3Q7PS33 jdiQ}q8N9_C+ҏ/j}kG&,hDAXr#?+fZ @0;R (.354 .~Qҭ$@]iZvP㷾WP=="6O?v.AoL()l>z?.}>'7+ H`_Y}> $co.9`5yX.aO>jQ{C򉃉qvo?0k }i۾gsSO^ =>5 G[¡WlF~U@^\q ԤRB@,t0 ^ڐBl u"+pN=dS~a@˥\0b8p yz8N(;ՄC=,E޿+Ol_w9zȴ;/kwz ;2vն4˙˘;zDpNOy zZ­ݣ>2(be{xʹx)z|h.?Iwˀ#ٓĽc/e]P.SV +)@oᗆy؇!O Ѧvƹ\J ylvLjr\P hLndW]tgx$駜1ĆNEY(0-X% qH<{x+ss (5֮9Pm Pns?ɼ Ԯ9٬^9? 4B1Wϒk,F Ha5{^ r˪#ρ>- gC>#-@ ǚ @$:T7أs_^wM)H fn؈9Bu1#Ug&P¹ sK^N[۪0^[~ (%ں=i`7^r &$J| dOx*+eS_@v(6 6Mڮ2xY>es@9i&|Wn@2еT^ޯQG՚`#Ҥ"(3nr'\}Nxdj7gF=>̍Dc'̘$`4;L9uV8l$0}s{Xq(PZs6URL hf'j})>0$l-eFc#$w0U!9Eo|f'݋y}_#'='umBq&t4l!sxCs4H="WB3"$,~L9H= ~p%ۣK } D-Lt<6|AG08 Nj}m@Tk%i̩o Z0Zl;Ud4goTŭR k mLwм5-9 vUg/ :o5d^dJW0<+tBr}+ܣץ KiU/ߦ;@8b {PKWG#c]=_dfW.Y.mh@rqGA#ErTy +Ng ϣ-wrՆ{9!v5DZ(6OJ}=y.*āV3|2r0艹*N|=T'/~յ p'/|γo~N6|G|\>}?x$RȚ7hs %cxM߹pyG+^J֒Nü&X~H#BBЗF{ @߽ٚ~@?"vk; *>^M{gFW h7Zx<'; G8] G2f/eoפL?'[,m:7_FeGPr ;[y'SZH@P@[s+EYJ`gW)Ǿ}U [b*nlåwV |&/0NCHiOiAE %2܁1~r-9/B-4 Z<~ ))ܢ d N-S eKjĹ侃>xKD*o^9-̕f쀬VOh$I@"r~xdϹWM{ȵ:ic@a lT<5gd~g}~@ʰk d_;Zeynzi)==ۧpѨ1J>d[dk)lHO5uߊ%sr{f%e {o(qG!z뎫sa_*в_+ǝ,BxI*uk*@2;v6Xa%T֕z@ӷm*Suo>tۍأwU`i8E ) j|n}6ڏ/`"-v H dk+JXlGdŝ@K|{7 ?Wcp.<Téw|jA0 :/WpoN/*~ߖb \W|#&z};2KNt ȿ7^Yjq!Ϯ> qeJ32>c[W80=E-r̊}/P'INRvn{4R7o(F'I@= xs/&ñ-E;vҔH)UJMC3{QFT^w|rݯu<n׉\Nn Yo{Vm.Vt폆Q 0xWhByk7/D^jR;ð, r I&ڧݜ#<#> 8@qZΗ 0а#ئ$#ۋh0 tk!oI8cL!ãv2hպ!r{X 4/!4zysM$ }fc^^ZbalB< 8(wl;_`|S BSxqVș0~6˟ 'QxUj4rM҈'@u!?I/ܣvC-s 2O5+Ľ96C~No/љy<+#qQgC`4 6^'[kjZ F%lyۤ4$a4f"qtJ{|_^zj n]e,ApƐ_uY0@{dIf~20 F_P/\ɁSt'^e\5U"Gz!:_/,m.-Aϔ y;?DP[`́# lj}|}_Y7 kw N &t*+DJPXB迹ON0rGb N(%̉{ݠ_ڞ\= Nl\+}DND@&ߚY(r:{vJڨ_mv7'rn7 DuD|Lej V!+ _}\o[^o}8mkA" (7$a;BׇY;E QLX^5aMW 4 < 6VX2G~fm:V=/o`r0I5;"W}OxØ@hV6wL+PƳ` &l'Μ!g|0$$iuorߊK&zXDu!͊rETv5zÐN+ND/; |&!^?mrդ );C3?n[#0'5KYc}Sj^C58YV K~p".6 }mooF2;%//zv\3}ϭ%DMO#㡟|Y^ 㾟Xg2|Zo.^ч'O셡G&Z36ul0x([QFttAމaO~+McuXW&zFnw*KVaұ\BO<{Uj .%@0GѡJ!qfEǽ˕"rRw5޷KhMOOk;TeB6wf;?wޮ !sWuۂfH0RTF>~GwYOR iT.4*B7?ˆWpOsDN|7kM0|~$% IlM9`dŭ0PҬoтDޱON^AM0dsؚ= `Ǭ]/ &|)!?XZ17 GNʀӏ7}%|)y/Rs)PL^ C{c;eRY3Wda0^yzbrލnц _I=ׄV)a)92|uOKޟ6wpAɦgZ`Q xbOp;cw7ʿAIϔ1@(gZ41wC;Y㣣nMqDby [2󬇁f0P}AcR匇U:lGbgΈ\'^IDO4+#q0n~mӊ5>,GwG xZ}幌#`eSZ]? cyT_ly5\r|Iτ#)Cς`^0tR#f0 [\Ci'0v$vx7d nbC{%D1̩YV$"'Q7-OPY8 4}/@IQw`x w!X nE%O6t:H'Doъ򂡴g *X-}ZV=F{`nFVMr9 ֵ ЗgG=MMv(/Bߪ 'n`!ҥPo3Tۓ~FƟ1.ЯJ=;CWh7/`Q!1_3U^8>9H[9g~yL]Zbks| r(0Y!Ɇ_} z]` ~w} EM퐧PiV,' ]PˇYA0o|ͪp nȎ?jW020vܞ%vD;I=QEn2/&q!zDJ!z,w#Q}ܙ z]9Y|ٺ>"Fn@߳w07//G_ZRu; R3Ո#0Y|z2/\ I#> Kh~+Y(2ÄOYAY0Y S3jVn[?͔+<Fng6QPFXϘhF'gg.\0Ÿrs> {x7+# k]{ #}f{[Y`h9t*Yu+79/1%W,`ms30L|*.p3v_8=Ι#w9Ugm=u3#opr[;7+MsUf3Ej09-/ym# 섑⍮wK뱧-tRno|EOc{pEv _7J2)n*?#JaQw_9CMnM+nD46Ow:>o{i҇qw\0/a2& 5^y>4"5eZOB 7 n~'pū?-(,g;9: ?˜Cjy_5!VOάk`T9- \+}(gOPs>_ca<ӇcGכjBI`Շ47o ?D7"kV [`X*2$ mf}0,NH云 IR5u?D?R1ѥS!F0o0D+/ >;hTa .^v+ *~웻mc>'1 ҧϧ r |v li&0|r#'F'o "Ҽq |M3n WOCDLn>n}ີNj]}6yv@iп{]4 ~-#t ' hHg !0"ުj~FS>èo] R[8#scx`Բ5+n=I;YK<}|񺿮h,>n;mW}uLzT.:%}/wK&r~ _mf5.V('}0wk,%E0LrكQ=Orx'E> <2;6=:AnjXĦ9DN^~GF ։P0LwK/+[G~7@vӒ*eyBH˵M0RAJ?Cǩo`P;o$̈́8m KKFh}4#s`۲7~^Fur\V:/}t4%tv拾_ 5A蜩9C5|y$/KՋ.M 0\W8q_WtdO"WnT&8t{gKOa(rdK1]U#7S`Ya'Bh3ףY 4p/ճ0KF1\)N{FiKFߌCjaH_0+y=-vP``ڗG:b}惀vC<3SC.mKHpwZh0sV9 +^3lœ9aTK8cY&-?m~IQ̥"9nv#zs oʺ0dkV"[ Z)9ʚ_}Ny @l+ v-sc]rs]c >0%m,㻫g Dh 8zc(woP%w]ka1ՂiZ&&5LXuό?ia!1yjC|sqG7=1j칉 {ݦO͉k?P7zjjTK8ז>.QePkb?Bˡ0J֏mSյ?n3o+/gazH!n](m~+[!<0Z>H: jsWBfLj,;ee35={ дch]QBauB.j{؏HI51gf82yR KU4BфX,ʎPd( [oj~RrQS‡bv2\i!PAG̻ne`JR77@YjkDE4tMD.Hq,f3[-CO3J&ٽO[Wg'_9@m*[@/Tf<9fy'O}$޺CłPz=Ԗo׏‹k7!.89/sٔEf>>\{y02qPd' \&9B== rWk ƨ{L:,aқkv-S]!;C[sKR^-[ *]w1|Z ԒTٱ)&uP9@y$ӗUHަ  Cgi`)Y#@Ig&Hp͹M4'^G+vVd˭J^ІG{>2cn=ׇXcE%yHy^ [հGZ_!bu _Ť5{b#O˱-!X-gp?HzIcƷ j2.IjΕC{^5rXq1_V7ò|\w+~H\ŎfX_Olܭm:0k W]C. [ˇ@SO1+qfd~5: EW:^`m{TP/0f ^&iWY%i/9pdEmzI)lH4QA gil.& d~Ā#7NCcotRLS 5qiƞB$uȥ%iaM( }0ǡT÷ۨ/٤{^noOWΊ?[n/hh6r:Wح!3%LaYsh׋ 3qE˥ԞGw}{0zU^x)D\/4,]NҀ&@m6#G|0Ttbt%3{kgv&w:"=i;LRTdi}nGOB{JYv0j^Aj`hKNOWd[: %'et4a&:lsற(kelڦb靲X'K'WzXmTd>ĶGE*(s퟼`xH)~㯵rdէl>U}V#x/Yt3 wHGDjLCfHjt:Yg..C_WU8`[Kض|qqA:uTZFM2rpJ̚BQB"L/kLNJ {'m++wt:qWSV 8]}?t?$} PCҒ;TL-=̖s 6`-8š0Vvƚן赋A|+*wmCz’Q}'VC[?0+W1{-5r^b;]臏|\ydWH <u-2:Bd,{*7d{Ve w>tmUhvj3i?cֆl,l>Y's #5T(.`3e'VP%OXB+;t&YJޱZ]^% jp%jd@ -"Qt#fl x+O#p(dr\]Bӟц\qm`/=ߋ_o *Cۅf.NKǰ ֢<-+H?:WK_CBǀ^WiWtT,0M.ӱ0mcš@Ai1EX^`oHW_xQ>6jEY(XjJL귶.H0,3J3rpM+TV ahǘtX}EM~HC}qKЈB˄$d:\t.)%Zm(d$k0dRm ʵbT_Jx}y73jہ;P7H၆~IzUvg+oĪ׻^:J6 6 ,;躎|'4B=NKObh: J8tɜzJ Vu:}DsNǣ }SZ+*EY{±0ڲH;FĄJ9 WWA6GA&l nlW'@eZY_Wtx/=9\gU^o ;>8eۂX?bue@^ecjU{w}: rCv:gΚ u?ADNWb]wc^1{ XSm3O @záclGP]d_^ dm[,xk?j#30h&W.tz3|\b=,S:fT%1RD4Z <ӹuxztGsU Jx~{*y-ed=nMY䶣ء}_/cbڕ-ꃠC۵4j tk $VyuBSbF;kO`Y Et6(LUzr=B1L 7VPQW@/7H?F7&Q+/uܒ.CKi9(b hÔu6kLy!c hzc[nG~T3ePkb2CHVp4 jlAgPJц,km|~niY^~H6k`"||uLX5E Ϻ!/|N&vBeU;hf5Z?oNci2X(o'׍X d5{>31i/) rOBn!k:9xgՙ,_9f < 0X1;ȉ n6c}ĕN,bԟ AbQ_p_Zʞij t;jKvJi*06_ٰbG>X.P#|G)kA 񼪒!̕J|~ʶ@Z'k=6dywZH$SYͧ+wK7>Un'ΩHg!Sf@bSK9dwaEsJQhmdk}[OCǶɭ`f#Io_c, (8V@!Z|r|_ش=c?CЇ=Ð#-Q%7>KBUqCG e` *I\=q2NEyS!/;c!Ë`45Ϗ= g-HǍX_Cld1 Xswmga/OuQ0'rw/O~BW]cYN=\3$*g8c\ѯM!,_IU 9c3Ë%*=G6X4'#Nz 9/}Sʛ{+J}$߇՝Uv\ket!Ns}iȸ{Lu'p.]|cs&-`R&9OSvu聂lNY}[oWkrՋ[%>+tC ۓ1K+ĆL ,Vɩ%!EB"BNF=YR5fF):f*\y?avũgvC+9A_h 3Dp/7D=B'BasoF#trߵ5TTaO~`&d6\&\CoՂ󾦽_>c :4m(^Uo< mh-W{ۡ'C4pEE]Lw/D=N9brD ?prrGg $B3'3m1 !?bW^@Fl!{$:E7I=) LxF^e慺&@N?MͰڐGGQ̓&8t?Ӏ+/"ty@B ` !,s)lWLtF=ɒX,im^305;CksˬLV@Xq^@S]sۢR~Q@= qcܛw!Q\}:]-nY"_9z z>uO#u1Lc*ցwW>X ŎByhel3f(| ޜ8Ux_FSljSbi鞑0+u88X ].:VX%ثnuarE =c|7WjrǣX' | P:%k)/wnVFfAHv*_zP'&8~EBnyYD/_jƚLjڵBA}\JB˩tA|Ӳ+>2?<Er1WZƦ}zI~d(o{9џ=OhD2#0wxvm]JZqctK3vlb:U4H$f+\MZ.qg.:85Wn&] /A-?x6]ʄJFR~!=daj\ sSeISP䢹lo {}g.n0_r QH^5$kjx638fTܑ,MoRY:V,x#uEøuD}X:F\04LQ7w\ჺVs(:6]LKYd]E=U3aҽy/f0dx S`ˋ-i3\wy _$V[a|ఎ?5(L2}6Ʌٻ"XdZ12j!  ^JwË D-S/5xJPpE4&]rR/l,jvDil!%Tf?y16/0Zo@aCiۍ혠"Tsq(9Kd ۛ07îT|wLܧP3$%C; WC$ }fĥ&X nQJ15˚Qfkcu;0ar][ 񳣐i:xUuGx] 3^QWbk!Si_Vdalv}\D1D^1zSwRd\'dw`~;mdQ(eb+Aj'-U e١M, LnJ|>5RGBM`rb7} *W̐u1N?Ի 瀪E;{kŝvwi3b`~BJ#z/gkք nƸJkna_ؾˮ\r<ə7<,Ɣ[="uǁ'va\桯nFv]lyf\V~CaQVO$GO<STBґ$&Ovx-+.at٠4L`*EL?0K02ԩ_Sjج:3e!5Jk{=<ʿsxYb.(nXt-Ik*}7X;s9)0ɖ*ϫ~k݃? +`uL-pWX? 1̈́'6 ǾL'ub($H{ : nx]ML_;h0\mpFR-}zW@:9}ta: t269{6j[C?܃01wE3P$< ˽//h@'vB jHM%'8Dm\kڳ砂!\Ut7[**b^VmmL]1TXYOBpz g5]X|pҡd/I? /lP>:a6q~ nSx4 wԡiI۫_>Ug 1%Eof=d5?ʧ{wn{^F¾ot`OG yniTr+ރyMwz969zF6tԖ(t/a\٣?/f h{iP 3WYTAx]] f \g\ ؏fh[tߕKOObԪh ZKeACsV5_/I iGỷc)0 iI{3 ںO9޵%17|>LT.Ƌ&0P fkp MPm?:R!]Piu}6 ٗ׌ve>jur^a˜TW6ނ,ѩ/ >0|3e޸p=yZ"!{r¥VNS4݂5ceȟ&cQ_g,_?&Ox$r[cnڿ {(ywf!Oݕ6|7T(t-@qtP5p &S%{at\sKP+t6(aNr7W)ȑYYwk6mz2(61YzVjM/GSX? B?6B=ґQL=s]fKլ}dnzEZJYbP}b,wvaD|mXc~e['-Wo Fn0ec{0W%½&d3 A+!4k( Y1.=+c'fm._I**mƺ5?_ ,_'CG<5;p1Ok2wJ1 㜣ӎ׏f#[^:Y)vD[ FAxt8;a&eV]%1-N lLߝh% $x}  M;L_! iۯoKuhi~}j;q$yEi|;ӖQKo|nu -{Os%HA﫾-=ffh2|x|o\vR2>Y~|nŋÅbƖbcns8pȼa΢ uv:4xZaܡ]}4Lqsu2iX֮m y6M4?rTV5^iyL-;Q;6;oe!Uʗc<*xu1!8=,|c0I8V:U\-_1EJuPtDf3f5o4 eJCC&w uPS`uo mJ) "* cJPb28sH.;ŒuzZc|W^AG.T}RCsT%{X?Cĺ ;/Cj13ń(/` >n6MS( =:vҺsW/]gvUp6e\M7bfX'\smZ`"I,u5" 5=-e~t[z[-QD% jqAAu{XC6#'o|x:C:D߰ؐi \)̕ulP%|z+36'Eǐf9 l:mYGWG0b2`+"Ǵ<Ae9Sk}  P _U!W^m{~+.Nɭ*RAmBy=Uv9gcmi9hi:i,8Ut.-1鳦$iw-`mѯWJfz[ru%[VC~[CY6}fv_8Cs) zls߰,B󑝼juT7_,>pdѰY5S͟qkz);\ּ#?xk 4oAvPokԼqk׸bbDvssOtBan%2WbC%y,e6]t]v;H/l*$=[^2X쐜{c* W'u&U]h1wB!ORkAg 7s=;DYf_z{ۘJA闵bXι> j+r趸 1/wOp>YJNE ]ݽ )oE0e/!5yOkTxQ%?2eBfXH ';@.<-:_xb{j/&:PF d!+zLTN7A'>_Wzx N*Bt {eBpIzkb_b/axũdԖmV̠auvYsP2(QyRX=~RW_Zͬ~b21bQzJ?~%hclute@ J%|tj+6*I>WB[zI3LaEԦ$ƐfY5C>y\~CY}>~&GEEFֲ  Cg5{4E0PjH1?KNN1P֋vh}Xs|OLbh8Ro$ۃ+&[>lW5ag\WR<ۏΚ3z1iG6G?raމio_Ŵ~$Q,+ZWo\1-5P<% /5gyXǻ$[<@p,SU_WSlJ2$4+(}vC|~ <f' k/1Arwf16kNy`R]iMtzT{v̝Xvc鑝NPH*>9.#zs!Cyϝ{nyl)RW]'Kiy~8(V1׉@!OpK$Ӏr57v퀚4J:1'JmM+wM,eDڳکy7~{sCTZ/om}E#&a7ӟ&9.[)+FM4o']$r45޵bRSfFTrw!p3zH˼s-$3"_@AI:@ iXW 7䄂-[ڟU ,wvCvLg{xr%#0eqYg.ey0^2^ۼwc7bT¢Ώo}NB]KYy讬dZX㟇P[)7!;F,#'P8 l/dBZZe.{Xz{tW/* n A+m>GGS[kLyj,ʪ'.M&_[g1>;wa'(>#k׽fBtgfD.~/앯 =Ƿ ָԲ{jY:YU!eL@>םnWYbȘ?J7ݙ *ukyak9%l;7 -N<_D<ǯϋCnPTlv Ahꥲ ({ÒGXR!k~nRū|o1$v J@ѺN \3>Pi'H@l-]Z%_vYuJy;c]ŭ'Eؼ[Н/$Z{Ӂw0P$Ibо[fOGvC찠y$Z!^tE|9d܇_fc`jY;֘@T,~m˼j15 o?g$}_:'k׻B_*'7#X0>۱G, 5[k801]ov̏.;H: >gO3 !HM~8Ggz>NGF{^F9XX%Rsc^ wzX?vT1;a`>T.%&Ucxiu|~šwjq6-g0=oA4ڴpI^yEJ]sPG0ce&ib@Ɉ?v1qռ {mB*h퉹g*>>UkouEX,B}x@Lᴫ(ˑi[0T(舟 ns[2’ (=8tN*x*]QRX!C`霒:W2cBPNu/pfʐqJ$ǹ_b8'yc8`{wA [=,VZX}ҲJ3~k?U=2r,ǘ@]{kcB-eɑjbvHюC`sjedEWwp !{('~& ^sL]%5~n7j쓸 WbcHP~* H2fnLڗ>c~X'!z%J@Ȭ:bN q*qz^b.ϩv.ǠL^ ›6P8]9o?:*w} lR?I|5)&6ԺڙI>1X<| Ƴxo^ALtO9?c c/wvΦL!6V&La\zL}< S%O6B/dE 23z-T~kƇloۿЬnL`&5E^ ̬|ΊHT=Jd1Y66q|4)Mh?_p4d:+0c8m= U-3\ ɓJ5!DZ`x<gor}/¶8̻__0fy2'f1vu1t'rV5`1VKņ7X>цd-q#^06a·;~Yrq |淬] FH)F;s!Ԥ7I7 ˁ's85_cOƪ#k.Db^Ǝ;e1_z5BմS\`%<>خcgl1;CT+L8 L1twMV,yϟ_)n~y:qGTs ~{a8]֙9a]֟m!am^[ U)uLܣ{Nn^` 0֌d۸J=t)Hb~ i9H?k{-R>qy.mHZof<{9r!6S&)ӌЅ/[gpC?5p,HzM ZKFf!-H[3RɃצHBJbdduCMZ2+.}4ق #ōr#·f#)S@'iFgI!sfu7dp=Xi?yCrاMHY>U'TC>/~_d[+DگAkqtyJ_GWG&XG>[d_FQr@Lr UEeե$$m/$,)BtHmIݯsMb&qalu;*ቴBH=#T\>>H7)yz:{݀O5\$i Ne}Wvp@rQHpMF#lvO!]j9|ڎGOkp=dםL:xnU"D<ۏYK'*!=|8_z}s^xo/pNtpD?4 ÕI_蕍dAȁ61{qF4b;|t<~5x}}UH54٪W-$pz@ȓH~>߻Hy[̬5RznFҩs u㲶2R]v=n@jO3GO}٫2]B.g[7}-CׅZ\AږSj._ۋTN݂X80}eY yQfgE]FѯHkx(N}$ %[/#) >d)n i\tّlw8REtQ2θx"䟷NrYפ#u{)qVh:$7meY'@3i2H<:7R4E LɖDOL" e [$_s9D5V훅)?:+5zFz~fp@RBj $ NE+HoJ/ y ]\$yZٻg1>$^_4Y{RݨOyc\b䏷x#|K+H6g3\uoLB3H xˀR=xǫy>Ċ=:OY7=3_焴͋o!{YU5\kbI 99^Dx# YO~㰥0QqS@*[@A5iFP'RV 7GNrξdCUWV~ΈqmKHN*9hldڏi F}58"`}ҏDL#Ovd(&Q'sdSA V_CH 'tGe&B~){B7G_C0m )?XXGsly^W8{g9sȿSę,HM,|-ԿZr5Lq[k:ҬsmHvOTdR6j$_ tFt .WNo4t`8ٛiV9d>$!IdJ`bl!RMN[߆=Ѹ?F \/XR6-fCʮ{8Kz⏔YL*Wguڷ ,f\R8x#$/]CCE|.` B3C^Cvpg^Z\%2dpzrq6NBn[ *DzG܄_'l818wU  >BGuMHyZy{ɛXSḒÜ]SEH;*UsݶN"58}='s/Eh.pe!|˩E>t;WG*el$5DY~Q|Ԗ]w;ܑߓr}} Ijq|s.[Hz<}R=;q/=Hh=Kwz G"#7UxF3_cqAu"Ce2 E1s!2dگy~zM DclTErh:^X7 i%Ա=~K߃2Kwٴ$IQ Se(y0PdkQVƯJqN΀R$VöD4}'{EcL̳] n E\ Wf.$(S!BAZ8Rw#9|)nW\R?]mBpϬxWGGO57#j+|g{*i%^"8Ng} |- #{\ɤ\Or[.$kJΉHnTK%%pņّejQ8Q3W35^ٲȍ}(!=Agp c%Y~%(Ҋ3{(q>iBnBlש>@s/w!uѾ{ dl?z+A8Sh}l**$E G!I&q];vOJVz8܅ޟN2ksV" ѣxnR3&N4AYD?yr}ȄM&I/Lwr4Fzն!VoȅE!$U~}B~l\8f>U9M?>.6OE9(/[ْ!KU @{58Wz G,q"t4oiGB4>Ϋ#|j78dPoڟ&~>H,y 1] y3"<>XNXLL*rp;wR 09~ekg8, #ʫZ85TJGk3př Ge$=p: uv6NdWgys&jb[4Coz7 SQ{E~9e}&dO*Uxs*.c_q%j |W4>7HڹsI4RrN9YS-H{IXRtkǜf/lڛ`:l[@:C }clX .@ݐebNuѴ$%ߺmְn"rѤd=_2&Kt2BJ*T+}I@d-Q?$/XϑM)19w,TPGIxLrdN!K#zǜ}źv|G6]W 7YG7]6#&ѧtk;Cj_CH &7/Gm')u$ vl w)K4W_3te/\`_Mc 1H5{Sڃ(3ِrpw ɍo":lCRM*26>>oy}F=ξ<ӥv # f;HaoFܟS'iHi^:E+H'mq$8'H?=۠z·\7ۏt! .I8/ '^/ S<4&6ލ. Vo . N+d9k<3jcv~%%Y 5DbM\(#;Lj(}{瑴sPiR6<~FwTCkz$Փ\Ɖ%ֵq8ck ZlѼ)D_̋zJW=F੶f)wɍZ \gf>?5ɱwH4#P[b>Бu .HR r1jƢHnI.$㢨:įw,R]M!l 8[(u;zpv_HyjB 2xv4x) NP",$yKU*n7®_;o:ز,F˲MI{ݲsraI-o0RcfHسGX(gz ъz(wSKV=q4O]8#GۑX{a%%sg 2fK?Rt+{뒋(6U`gݼӎHQtѿ %D)Yg32 )j]jM;$EJ7zn$Ĺ׻wWq`@2=jOfǐ4$%9c^pN8ZJTJ~9aO:A;wP2H|H gFVWκpc~ݱW3l?Dnz8ւVHٜo93/xJ87\}P^8Q _R>mRLiߣ USH ;m8gi:]N[Q;#FvL+GHQ={N" U|btM~Ǧ1dB߻fӯ>#ϯ'#q&n\3qM_wڔlE0D,]]BZ.B"%_S38MH6ۄaՔ$OUBg al*GPI<;1n\ueLGoE.q%D_4zm~ReF3>qnJ!Gp⫘D'|/")׮k٬MiSEK$59IP| s]]dZ^Fڟ"NE59pFoCnk*& '8gqqft^E2(ڽGȰ/+mS.ƹCp.8ɸq[Αp*syQ.ξPχ3_\'Cy%#}?|ƊmY>> ˆGq!RIx/U$f}w:| I{za{ӉO>Y?Z] E3{7RQ?spu3+-Vqno8|n#zƯ{lIq6c IgS ˎ&敩_ie)׵"eKs#Rl!rKcDdpl_pZJI_ ھ˩ibbt<=:myŶ8?T㋤ǜ*[CpH4>zuw< slw=2lvjiYsϰ^>E۞H ~}.9vm]k~ڨ g͏_<3` bWiJːuh=>!G}dމ?m?l>ke/!%"t[O mm~8$T܇!c9W߇3oɥՑ[7 Xo%ґq:tK114N>TGt/T ijg 3n}:|p.G8qgA9$L1"tGl ;IfYɚ~G ?#i\H}z0RR${*_ #;8ߎ= !cTcwH_zf RmoyN };LBc2cfܗ)8!R蘑>ЩkMi|F=SIT)N=dM+X)>b>ZO\ٰz0Ñ58~k$gW"|gl $J&xQ[J,Oo~9\ҏCRHQư;;?e#啇~ tTDTMЇESV翏t9meS+yH};1]߳̐ۻH߰\=&tqQ+vydt(, ͧHF8;,"RWPY;C|{rBC{4rHzaL# Tf7qv(4G{~#Q~_2\S|-fq7$=W_4d"O^iY2E! ]ygQI>'R+hLA=wفyσ)I ob_C[qѠrCMM4Mqtx>^[!mV:lHGovNզ!ߑ6:8^D5#D##xq8:RH8:4>]Gڐx&7/tHFYpx&j1gJ?M 4;;N |*ݒeiV :>d}ƹj4R ]a\+|"+I>ϯs7IOIO R8 &3|h'![Xgo֑HY{pڈȉԕukZ[x}KHq1g8Lb ݲj9A3~Qu $\8s?RNLVUH x?WdOBA*Nug~Bz;>MEҏIN&fʴivo42 E[H\%&1'-шzݶ0ǭH<7~Qd IVpJ_TQN1'ט" XuEhճ"֧!I#ɡ_#j&-ֲ/pѸtgg, qf8m;C?^=)}v5dVCmxJ(~d;߉#8c 5#T Q/Ejpz߯Ase;%2QHfCH9aA𜧁8M,hwO+"c#:)O.>#v+~ FM'W z-Cz:DV}P*8wR缑nUR>#=%8"dž.ݻ?N}Fn#%_Uںk)1SיgA ;M.tt Um|_}Si ]'Dϣ{2٪ዚ_rϩKD۫]Ufzȴ]NkO*C_=l_ MOaڳL#;˔U_$c5Mٍ{DSϱO'R}ƉA>r<$ )LlM]Jc?\"GnJGOЊc=7/ >߬qfwrg&Ӟxw#̝W& R$:}U~LNt\2-b̦qޠs}ѫ? E y w~@^Vhن>m>ESbHvd?2nvwEUbHjryy2|ѽnٓE@;J{ cd7Sg͏_~1_Sf-Cۈ(oHdR3ncp3%b&k>Yς:hZ1yd9y%ۖ0ΝCD7D.n6ZaJt~}e[g״ 1rgEcPiD5DϪK7=bN{E$K_pƇn9b^$5o$z#:.['B2>u<-o,sbCA?lj\A2PyQ.ZOg}?橏ѣo֊ۊ;匎Wڪ>nb[;w{1X> ={=z/1tHUt*ѭ8$Y@İ7#I{֫DO"R~'cd}3o:}$h+uZIRzk8oH>spW/<&{Ǜ/DWwDd` fR%sՉWl9Ab`!^M;(Wlb*DtYEbSM\7DOgl{b@(V&iwgOI"z7 0n]Nx1:莒1(XfAmՍ}}Z |KsWDq{^Qk/gQ3;b1Hgu9֡~f] 1HA僉'sI|ՍDχu=.qOu(~D.?Gtn {B 0tNt+{ s 7X&ᡚz)es3͈:Enѻ*ex1}6s7dibHO 1x;)Ov2w{z\s'絼PJbtϨdv 1{R!"H +}[K7W۔oǛS#׿>zgDb}簇wTV&}+,~DwX֍D׍~IĐ\RIȼ6g&i>bXEĐ/O7=pxc sF̶e^u{Oqbd tg"rb0¶їR|UN YIHjZ"gn( Ӱ$5]^\ .YUuCSyo~!Wp)vLje[Ư҉7a>|0 >~`11`4^O?Gf]'̶DanUNMΪ#l޶습]7Ets'>#@ JT9JoGtݛb %ꌅk/DEX>kCdYq5^}~V|l%V)84}_Ee#ުA Ys-~H )XkM isu'L4T5>A=KC2] <$}^ܿA޳?1[˙'|>Bͣ!9n !R!yfI7 *ҚDsNCDOyύi|/#BX}z筠I@ͷgnuMxQ9WA8w׻Zڝ'8Aj KCίGXħ{wN.&<7&-׌"Rҟ^EE,bj|LAF Ud'bFI\l(٨qa)'Eny=&m]#Hy@82#r zL jHk;u> " D-("ئ+ǎrEbr-b`GmIOoVRܶT1bNAAD/`#}ۨDĀ65bD́D=bxҜkDšD?-#T&14^VtV {sS[@e 1\ㆡ}=ߘiU˚Mt,k z9LjgǗi]!S"oS&zOUWLlbo:F,ybڤ:b`V Ab;?le 1d6 n/?-OmU}?,N$|)bZMt<ф֍k%z"z>?D)]ބD=| ]8fnP']ӂƉ!ʥ߈w06}5,&EF`Xͻ/~F֜/Gn<0Gf1s_oU>[R#ro`OȺ ."*D|F̳BW25$jMڮ_$w+l$z2\@.XgMX-nÞ;0b &J &^ϲѥD-[jĐ䑹?~r67B2)wھxi~L͏ΜN"߈< 1%Z@1Kq&sb1tXJ%}4A?PUn]@1Ep1D#Q{f_[w"?nd["zcyk &^uI&ʟ,$̻6WKcq#]k>&Uw]: >%:֧=#; m|_{m@u!65a;vɚ-,SzN؇fM6!:O5ĝ@^SZ735fh1\jus1P,:4+ȱ)D_A#D/7abxؘx䓫jwEAGdAq -|bN!1d"f'L .gG iDP\~1:l_#]a$OV{\>c&;pDz#ǭ ٸ`s~9\mg|.<@ngrn31L刉2.D8&@,?亣c$υ>1+J#r6_q&ҎO$ kd\20ΙF'lZ\#z9}E~ 1nΥ< F#ɏDOW^#F>_@dX)K'7\rDbf{ߑIu}s[LzyJ?ОwflCa9~m(ȕ2E91+7_z(?]c&ZߨpR޹}_9=O0 Yd< h)g-ٵ+0w_k};11Ir` rݬ NK!8;k>A)c@|$ɯOm(R"K= 170K"4;<(GDZ4sKPuLTUI`TܦlMGfϲ?$Jэq~# K*nGڛsOx3^B-FcL LQ^M:Wr~zg)0]rXfUvݰX4,LoVO6;)Š16ҾӸީ}ltlR1fz^YK˽}C`6c#߯>݈C4SJ74p.7<yKO0WF`H(ڏau"{=w Qu@*Զܜ̅kS뱗w{]E63`&g*9uP П YIʘ@hʊiN}`/~+Mݡ_"=bbMދ.'?NZ]de0[>m0jd%']Aw,h -kw ,3TL5h Hck|Sc3yd^ڿA+16<#L祓% lQXm ve\VlKSh|ybޱd/UԵjh%׺nwmjuܒۘKilWq/f^ Xc `x\ǹxGޛ٠xs"a; ğ= X;դ@V8ɞ_V.ҁw }>_%OkzBSV'@ ԅ@xbWͭ:8qL7{.lOrf M^29gၳw7J" 0Vmg4RܯA#`+vz@\л' Ȁ>XZ:%X̯r늖#V+E:ʯ lj~70Fڋ0ONnfSt~}{tz! Qǹ__#d^`\`zi KT`+ꡏgk3O!F $ݺm~:OsVAkˉ@{B9yc|`4p`´c@oD%#T ,p`b?윛yN8X|dW 9{1+^k{*SZڊ䕐e^v@TSAȗNVV33Ưu@u;1,[ o":u|=P,{m=5ܝ9y(P'y+nWNMWPMxB U] ϼ֤3+KL;`fml4أ>o .x0{vcpل8NwL5$,q뀥vD%pjYSz`:,q_;nu8_ Q$EzK|b%nYskƵrgݔ׮~u]2h{M/FMRQU6wCf L+} 0a)߁.y{c3@^tbG88NzJa/3z Zo_9a<0xM^YL]M_s.iU~%03B؊x_QsR>Gd'x[1>&>K|>Oser2I` ދX| 2ݳ<2]`*A_ }ޭJz5%!JgpyyкrfF}/D͝*h{M` `w)9٫CۣߡI}O8c1l#GStt5U/aVuCF`ϟ, \Kpw8r˿czp9';-}$g=a@̙-@-Z15w/(be* / E81mY5Tp=ԇkt`63 =C[oO; !gdk&Xg[9@YBpANJ̪t I?C{q>+ 9cIA 4}r۳6d]#1eO"ٳ8<ɰ Zl}gk:Q@ 5O`]z`vv0>O!YVMys|z抿 B] jqaY xP0PǏPs5ۉ8قeªIp.6{g`|d)9 ƙkz<^e~DڋV{q.J-?1`;-Zev㗲}+.W U{#嶥^;ʬAaO6~W 6W ̜cmAxL Wҏ_5Fϩ|q7?m|#)Y\Z5x}\ȍ'=o^أ^|9.9ngD`.<&#zR"=fkߙz_2[>z:M 䊖g@*Ue\$E$g!n^Vԧhrb`$ 4yG- 3p cm-w?0G(Gz-;sGZOy%iHFNh=rV˽%z5ڗs-"g!нR5p* 3C[xH6(}a866c<7qc?C ״_ zع+!/27 U_/lw@ s^an1`t x mAz'`(D)t*\r.{Oq@\pdA(0$|%M;}jƙ+4ﲉ #.XRTv?\wk Ȯ;w7Ʌk <TB.,iw[yR)~Up";ۀyP6ǻv]|OZ,yl]`Kސ>qh/3R!ԲK!o>9; Gz~OQP:=ΣqBw%+3Pi/ '2eۀqէK>lRڋܷvPܳm?Kѷ!oK;ޜ/Qw| 3Omפ6[wXy!o~2m, V{Svu]:`|Ydc## ٳsvf,]`]B[Iln?iZjY`==U swoJ31}M^E4PfaҚ$TfKKh4c,%tjH(;`YűpG@l{L/5. m:GuWK)F?JoBe>6o3*$ _ЋC_lmM̡^@Hg`ۙ~ nUU`|_$Wzqg{{Ȫr9Ԑh{r#N|7/! J ]{[ 0ǜsp]R6O-Qe̵p䤠0~jWHl(ߕzx@H W 9aK}ǮW""`y֝=h}p (_e :|bп'Vs]nْ/5C|?;}Z+fy9<&l!T::sX>+Z_ŧGf>6%r:oWr<Rcw?c7x-f6JcrX;dNF_7a$c JBtb=.6JEΒ[U 2?^kP~@^="wCGx")3o1oP2zUm` / O-S( $4t-Vt3P`^9LUVR37r; (|#j\8_v7lt_dq o *aX‚sgJ[]y%PbT'&w]qĀ;G5%dҸ$ #}oRUdkH̿u ,GY?doyfv __V5j׈vq Qw&.+kM󁴸Pyģ"C|+M,L{ws`=2ɎDI+of^Ht;ب56iqJh`d'dVwrͥw NTCIc%0Sg#ytO .zAWk G>W^tH2X3䓣C_&0_\͉ƹq>N}]L[Omϟʡgв黢5{wwLEBZ~5_An:}3ӯ@Oyaj'4ṮB\;68w![Mx'za5; E~0k0Gb<! *bNwP-a³2tJXJz|_z-P/,ef5v@{ɓSi;,s9GGcm]:b@ 9o,q:ڀ5rV]K7qNBI {0omX]|ΑVoq={*BJ /#A2XSHSc7SXi6#7f8վko޼:P4 o9nyBCn{}v>-qhSH. oEM:[xHnu`~*\|94!i/08k}]CpLB~փ`M?z]Vh0C.uL&hKVmfeC[Gc/.yYXۯ. Y ]%P]ffoœpO{xff'cexJEy;KM 6]l>lj@O?/+6o;"&./6G^nG흛 ){ .k|(^qE&`M5殬?Tq)Z/J̉q;c(-`նf֔ھg CS&5˵5A_Cۂ磁=_%=-9^<b/M4e(z_vsp~Bt.-`Ɛ@LQ|^;v,>h{ ptQTFC%6/ZUNYGi]wZ/0Չ>~̹lfGe9P=- (mV?H1h[y[0d'dzUG3 M\xq+`ʈz6lCQws}H^L<:v7)7ӯ.F8(.g5~s4#mo;rAaOG`k6}=R'}£_$ >@\:}o2Ɨ2H+_'c.|71]`ov*أXVcIj\s^QƼ4z(Kwg-Ӻ;ThPKƂc\XT!WueshC>jT=A8;4KRJ6"?/SY?oQ`ȣNY^V9}gҙ@H?,f4M {LQh1k~}>Ɔ9|Va~6ϫT횎G<}+UXzȗ(H3c zEC[Ȼ3TkZ Ikޝm{ 摒~3C%ˁqˀ38ɋͽ4a @UϪx9sRU!Uk#mz ﷩r{շkwc^ЫMG2^;_Iԥ5RBm`SyV=8%XAkhEF0^1Am(@߯ lBjX.a9ռ2&췑!/L5- ([pϽ{o`ߥ?гS2/HˢSvhItڻbS~2<B/-eo9J5ߡX7ڨzæT}HI;Wī=4,\~5ҁTomȷӀyIFA=i@y s >+hdm[ˀ7z8;c Z\%hS?Nf~=`,}9~tso_Xuq|Jt% 7~`_aw`98wY1؍^b_K*¿w{ '=T`pV9j+}_+9 n '_}Vת{@z_;!wCdvɭf1?㙇Tҁmo'͐֌}6Nmz 9~!@v`gron9 t_b2 KQtU/cily{VZu[4Я[~$gCt {N=6e[!_\4@Qcut] Ԭ{x|J7X5"O7 /@zW%11!H(xB1mE_ڧ~y˒kϫL!w1x7ӹذ|1P49gOV9wC5'7gFn>T ㉚<{B3G{hRh"c'Q~B[Àl(_=\m3Gy`/^!оO&;光]٥O,)* ̒5G}D@RV wzw'-/hZŻ]^Gt} ܎@_ƫC0+n94<_U˧_bWv?cqSxwV Ej`u5+h055MV_20q_RMm8(-t7qa`e`(0<}SgoW}}ҪQ1e?˾Gm< X?z#לVC A7wyuT-&2"GiRm~h ϻ%fj6PK5: @Nl7?>, $ Ko^VUkgq Ww@lbg*i#‘4 7z2JXR͍Guۼ3Y^LN b7ֵs9/7XS5{Y[37b|XQӱeq70;oY k$]@+jY*_ =A?k3 /ǣq?LA>]"?sh}`.tӌp{j{'<ÞSu %h 0Sc Ynǽ}TwS`}5C{@Ni |C!]G+\/yDd<-ĀUyRRx/Zl:,p1{>xmJ%BK p4з/O ʦKq*w7m;+CeX~dPWG>F~ݍ3,װ] &/wbqxh.Μ/R~Ohy % qʤȑZ5?(^!zA& E͘8%{䁺sY.)#刓@:V"!4J=ϗ=VpyP`: ƭ|t\Ɨ+7k)C~)wm>6u<&(:OMgL?_f`N>v=H#մXO9Z$ HY͢%@n5mű8~('|wIwmxP`)\=qn@i9OF CE˓.@}:'Y0`28m ,nUk70oyUdž1kjU0=Ӂ2EE 5cbn+ztG"6/.zYi .rϣ/y-zLyzb p5ՙ*@[̫Ƶo8jJ_#R'/{){P00yl/aJ8wr;/jã@:Du?pF}`,kѺ1cf3~׷xk^qC\ i޾̋1/྽\\2r(m+c??hc;CG|: w)KTam2iTܖd}`ͰnOڢё#I1pbl v!8Wm+-@ zovR^/D/)4} ;eWYx`p8WDQJښ@SM8n73CK;wfڊm;~o %-^؋!n|,}g)?Z~Y֫᠃DdVo ޟctUgZ-,ڶT7G˼8NP??ry.Ger_[|S懼'KWG{ WuyUϷ:Q6V9h;K~n k뗆'7zXhժ\Uego^N?~."G`C $<9te6UP&QDQR?8X>:9I Ve$?nm̥ꀹ%"H^IS;T]_$eyLz>=&ɮدf^yv_,p\מDnR{`ZLx ̝:zepdM݆Wv5R;' 3UkG*k~>ڻ@/s̍V3_Os 7`S*ܟ"q[uW+ x|uEjHڄ}BRVX_,/+gPg5W `Ż!VK?5zN~hKanא<_ۯi_NGlᬛl72*g ._t|0 l[9w3c|Zj[ر]a `2W|Fu<: =ݚ\qhDpl{2vh#*ۀLwԍʰ*,a`+5.ysH*YןWxn{$uCQ _jGFKuϏNЖ;tk&n@&_&tM;ޏ; oyEzq}$xfv/yqk$fm|5Ly['0ױ>> l]Y 2~آRӟ:u7;sń@/QV ݸ!aYsf@>xyܺNH]í%22[j7b|UJɁOd|{̀vsǃu9[P0 m~zs!c@'Vz!7vuet`;;c]fGrk@7䅚xCiVOY3@{]EEՆ,Y2X|tV]K4K{v)-/VuTޟ2OՇ @g_mLUr_XBw˂92;l[~

po}1{S:gxcr{KO@R`b"8вړ+ǭanhw\*)lm_ƫ7"Y1)]ks)4>/z3u^CR+3yχM@%xeeCzy|h,W .%Jb1hm~ m4gp\0||v~pfMwh#m&'NpoB^՟vPͰ*pvEnfG(;AY濯20?gmr2v5DȜ2`әGU/|87K#S8./{_+~WhL@+qXda๰-gN0o]mv{QQ;ǁѬ!%}@OK?p|-vDN_9T7[> 12dKN^gWsgx: #J9t{M]f8PXΩ{>oXiAK]]lh9)W9[8+?4_\>@}o\].3 96 4WRrx=y]\v(Ǽ"Ҁlۻ*<-__o3ܻϧ\FE8ܴnܓ{E5] m T 0p-uE#qEhɟ{mp\96MYrKYs?]߰3~d"+cֳJئ%tnl?̕Eљ7l z/%v-.Qywk (kBv}v8y.pGpX070V ݷh7Hoղ7ȾGMx^9xq/`AP _F*>2<[})=/(t͌&(/{-'g#˖t#| ;4p^ZY7o}ϣew.GMWvn~P1}ږhB`Ku+z?˹Lelw8?r5Z_{Y@4y )\Sș+;Q)%h{z lmI< uxۤRQzp-ޏqӄ6`/9col{mC4%|<,ƀ+?|{19.:q򋽧a4hTf6`ft^&K̵9UCj$أ {0}.f@Kxg \*=`{!A DG9sUq:H)`>03p<[߮15 N  15?6'Z%Un)Sp n*=s'Z9Paek=gνboɍ3ShmGEF th'lZLN?qt2`s_8ODJsw ?Wcnzqxg.2 &tOJRֈы9A+<RV|t]ʓg{5tRNGW{.DȹJywⒻu]$6{On z⫱^E%{򌺟H9-tkp-XjYɬۅfF `;@E|?=Ȕ[̷ -Bp2щUFtU7H2*SwesEHËOǷ́rX2$D@DZ5n3.}l5>oaOGWk  .Wor_6NǎYC-ثXKw<ˀ|0N5=;%<lp6=Klvz]Kߔ ξ<जYuV`)gV??N ;6z= ); /;dryI/P &p+ڬ\/f ^UD%ءk;~^OZ[?_̯3~2>%khL 8k?=qqVF!1<6/ X@#)9.iaonU!kgm~Y޲\s[kœ&EEޯ[PHý![{-h7U`e'rG!r@X FQM{G2<}iЙa3*@vK &[(.ɲLIsbW?/u)r0C%tlh ,q}"]:?Zc\-IC~/xr1m+t> jMnv*1"O^(ﰧҔ ZMPp[׀3ӏ!׎_= l >{N200g?\~﫫of `6(MvD92]O/"?y گD3zpY.??m oֻ""{Ez'se׹׳GW*`2\)ha LuF}3u)#"n[!*y3چk5O%L^fPӋf|A +EVl 'hy*5֚e[ \ːU0w/Ys`Ojf<'t_,=?S.qTaz <@UMc_\%}OD=kdv,7 l5쁥ՖiyY }p^4h xoU8y'G i"`VO(xlޅ! =Kyg"hU.EECsG?DjuxyV .XAkSE=P+Kr~&=zїpZy 09ϫJ*XSy x!aF'{wIRڨ"l}>LBż:H6Kvȹ=j.*I}BM%. Bf ^7W/dXS-eV'z`{I|>( $xpo ̻ mFn1uJ4֕|)1KCì@'N],w:|_ow%
X(#3FNL`hE%/yg7vy` Sw_v3nFQc[jtC& -$őjWOT`=򐠑ȡn:*a ,7Ý5EK<梾Л` se琌0cowm7V/^s)ZSve'XJ-&#PJ3O)U[BV(!m-K<$s}mșMrR~]qμ=Ϟ'q =AGT4/ >^d8Q+|Nِ${ڭ!&FWz\FsE./G){QԎwjT8̃WE3곆] (`3!:oe/앫}8s]iU{|8lL? lwX"a&PdہrыjwzmelFs<zTY4?+' g(+EmC@ r\:^^XLR#Iݭ8s +ʶ;g;M,RQ;"{p VRnq+:?1eD&'>s;rˉF?}k@ c}W]^snܳCgtdm)(w˓^wH~yNa˘yǁ*t tZlSL*O}>rNm@7xz1SCv<dz] `Tږ|LC8C30*͕mn/@ NBeֿi$3Roj:JUO-Uc@5߂$[jk"~Vw֒¦_¾_>o8zۣ?5=&gЄWOwrzX4C<]oRE's2ȟ^n~=#(z|~xNunrC>WBmuvV@tz&Z$mm[{_|Jy9qck=~pO~V@b'(i_t YW'Fs9ޗn@߻j,I}D6==<\8il:-Y2/t-re@fvyi}MswLY綳L-Ǜ꜉|S$4G;ZI5kJ5}1_~(F>Յ?ā* }Xf 37-~gzH@JR*f?{ V-m)6ЮjIRf7̧ɦsPnwh7_8,u.O\7cíS{L|?wj{g6*8uC2\#j.GVak^eUi@~ u#zS'Ώo!EC@2>G)\4_x#@6Wvi@H]p߫ս@hcmax.09^6A[|m<}qtn Yi-^uXŸsjū;Gٻ$m<{:OG95fm:&1Lz1oss69WcJ}tߩ'qnsr~ 7vFYFˠ^ Tn6l!7i|awJwqnp~D ,UL,[BF]8'7N_:fU>HqAE@%E9uAo(s]zR_aZї6=8{Y#psvÞ~jhKoݡdψmR>_1 }$g rrC/1ƙbUs=w5BŇͣ+g$ǦNwLc||׻b|Hgdv{`DYVEҒn T+MykW.ЅzT}*?n[E]F>ߡRTeKqh%uE>RK߽4½9+"oMMe,/nP)~~!]?&w.B0 "]67e(%xf/[[5R׭Ԃu:_lS=nnD^3}a>ywmMIݑ{,rv9G <"ϻerl+ &=c2n8]Nxa5)$iw'Ruy9zʳcٸKc|# J׌ɼsS=ƣ2".ΠQrzȉ6w¯=fYPOaKqF)S;Ϥ\|i5KN< ,fme.;Aes7;A`_ \R,몃h +R[\wkHL@ N[^G|Ū i`{U~_T݋sKmsQFrW!Iv^"PH#@{vNx\SCnm>qFW7.p7%Ђo3,NEߟ} %ć:䤸ҤEGZ!MuwzF߷fqt Npјg` PE>`/ڄsm^->{rԆ^[h /ۇG/\̻ 9kl .$P̄K?~!]&z]QLH.ci e牛 (IqNyhzG/;y'({Ბq,0wF}〽|!xO*~i? 1Hut#F:pq% KDac.,Y@oœN,S#N?^V>jFT!_LWw_P悫@v6 2{m+`:j}qw f*O[ݥ>Qo{"rfQ`w^;`i/ OVw^%0 K?|Ɯj[;#џ@bRXԬۏ -+ wdym|ßñyρ:*-+2W;:hvB?N7bZ\4^ﬕ\3w%{&s⩄mjdضv'M x\g@JW1@>|P##*ޯ>oHP.jr%՜<󮌶&gvH,iʝG^ 0F-ɶ}rE{z888Qqλtg9uf w>e#@jywTi2؟&b霁0P7'{'Ij0"xs +/x ! *L >;؟ m} 5/mǩI}(ؾ swXsa%'U{^yp=ۄ^8p`Nʍ9|`8D~ &;iw `D_uQAb>I&h/]xÙ"=nXuu%ٻ{ºqߺZiPsrKM}&gBxR#87칧_knE9pOLv$G_'P9@xAfտ;WjQ*Χg+V^57#G ?۫K]}؛Z|;T+}c⩳o#Kc|Q>]0"{n G3/w,P*qW['69h|Zuoso'>IN.voұ7:)pt(yb=" 8~%17#̫d^l-q?@]~񰆇>kzSY'}iνj ?jeZr!Y Y|yX(.1wZྈQݪ uTdeR5!S]Nئ53+qā+A[>D.ݱ &d tM+S7{~q!fp]utw9P7УWߟC$.B/5{Ejj\›a->~i9F]W1OsޭPv E449>gll0 ܧTtg$2M ݽ;'8Y\shmt4򿿍S}17*;eOYSag~dM.{3,t_s`e_3Lui {)8o40Ky_]b?pJ\4s9}w[}}Ib`} *<~OYn,x 8#7ǖ->IC,;<_[ "G/¾ sb ?z;d6ͫ'n?Rӫz2-=+٣w2S (Ն3qamrG{h|`V*gأ2Z]GjKKw[hWϐ|ڸfZxozÁ|[GknSٷOgMCx~s,po>I_ &|rz"tlRZk U}U/sc9$0O !/O}'CZbzfkzO6@-M{;Y?Qʲ}0^uYQ%&s%j8&q!zӁ-tCOlKo(M S:GHǎE+/juW[7 UUܺTPF^G2u{5yYe;#b%7Կ~Z.׈VTSiU:x[PJꈷ%_ߟixT/6pfi߯[iR)IqynMppi`oQ]Wӥ=^R;wmGh`uS4̨r:5юGѣ~ΉrhǏՀݼKq8%Mr8T>byWN|@ۥlSy*!s$sXG6Xcٞ}NP1;X]ފtS&w5" ǵڷs2zS60^UD'D e܋"Q^;Ꝼq>zVb-J[ 9zlWYdz?W5lm)P"^O7Gx#g3yP\ukW' ?*e|/`v޻ 10ֻq-sURM{{jk |4Foސ ܔ7i wKeowfpd%xwB*zj%d#QV2;<897I^E;3c<`OUP1z\GU8[bg>- Z>{ _ϭl=T?]Ǫ @6o+B@"UZ/as0UZo>ΝOK{%\ǫfDQͫS؅YnfºJ̩ 0<59vjpmJw <]Np_yݯxVa)XW%r]ٕKgcZ'p65*,_%"ѺCE l~:!?pJi rQQ䌱7`4g@M \wM7:7OJ yPsTi,ּ؂zfDAuФ]%/d9΃jL]C؜'>AX*dGB{o"牯W#-9*NIB4ς}m'IL qiC}E&{ȟʚ9bm>s "^.ܜc[Yk.p^ dwHLoX:QLC^9,lUFPNK~`S2L'Vv MQw Yq[+2ENk!s8sq/h])&8#gF6,&=Zn2o/FWZa쳷*e ϳl˳3E7uetȮd`q6~u`_ߑwQO;J."ZG i3uD ;i_om17D$#C>|R`Y7"5d YlS["E=bsUDiRڂ(:1wlACW/ǜ;D-Ձu,?ܻ@sV_U}".[ԙGQFO7.Gͳе/? "7Df z3:potKT[%bO8(ͅ"X>$rJWS]/}@=g#OE$>]{)uUD4Y!a0cGD} _C+Ρ>$7y }[~)"Z sUs~W%~Aw`а7~ n0!Yk+mugl"CM"=huq#|/,ya w_|!3ɇpgDQJ|TEN#rXIDnd?ID>v-40g>ߪSz/_;{4//4%!oî!1A+R)IlBQ+WC<qNA(o%Er7}xiVDo{`y4qԧE(ޓo[T*/4;Ouvڢ! !kKDet">?+fZX/,9_VA$[{ C&>Z?ҠcϴWe̡)w4}zc"=y ,}Ҵqۿ{ ;eW4T78؍t# ]^d^jխ*79QS}z Lll/9e.-V47B_μQ;]zcbL07hOL4"Mm5h[Z@6"| !rj1{Sdؤ`"ʭC3u \1} !*cO=ɋMkͫި;5!3v<<KeHa3IxBZ\<#8{ne,uBĩ'ݰn*@tqk7:=oOnaogPmwFҿ|SvӋQK;Ӿs ,|f]sw`${pMAsDꌆog^;O$vE1^mA4h3{%"=FRCkZlFn)SWbo"ӆH\oc/}z=KJ $x()v=m>ُH*J!Rvoqe ?.p?TV.1`뫚jܓ|*d0gsBN:GuaWkN<瓽:]Y#V9-5<Nyu:pgXu4{>Sh8MѰqXlh4X`,L/cN>j# )EdqZFƽAD)Oh(}ïst{a{95(ϳnd}yiHDd$"ߏ>>L&"սVZ{l]Dῲvt k_^F=O؏(ulm@[c;DsJY3YL6=挩&iE4x8/ǞJJc_N|M~ !uoi˜ee!rbbf4`u+vsuFSRbՏ1dq-V@?>3dkwhsu;k ZJ:wZձK<<=^ڿ,5}HF} AQˆS^{$ݡ&,5zY}XعK?ЀZMB"8<4o@D/F#D>H "SAA.z0_ rD4[tW@F%O*묺Q׫눲B´D1s].]f>{$]dy(~ lB3]=zӬSTr4!V[zI?yKqyynF܍HQ+؛р sfweꯖnTz(#Mif"z${Ihᶵ5^Ȥ0}q,ㅍD.buʬc+x??& YDS]ތ뭢rIs3Dx5$ rM30q{x'^ ,P~nٲ--Pfһ ѐo"ސ87ۈ"N{i}cX \-2۾K(/~@+:Ym̈~ҝtx̺md`U򬲗" yqQv' nDæ<bQU F5_I8)Z<|{po17" U][UǿwQjX\3[$\|'sveh(P#cDbbJk1z4؈p!_Y+;owD3Ζ@ӧhx.*VE=AdΊ; p3Ev.{kM!‘1{t7~MzB!gDL 9 8ySϱϟV;ewWYU=?|llQy$|D o|@p=jIu6*Ymp˞E- ufgҏV)jC$r^]e}.|]zi_CrLS?Ofs͈5m3h!H֑L"J7hrnW/D2y$ڍ"⡡+_#v_%;#Je}_rDBqJG~c\:tԗ/wy6u*읻P{ Θ\g<!4c$?)J?x zJ96_XEDpM᩾' [x>^CYY$ ww^܆&w",^Cī _h)5TODC=ˆ[$"wiӎA,BF{ QxoHz؈I#PVtC'Bfzfh)ӥ o5LIzRK Ѩ}:*UzϹQs̠-6>8cj!Nkg#D;{M+wY4X`4j8 <==A\x/p^(ԙOPT_=tvϟO?@#RB ݑ:ړzH ҽut׽+Q>=#.CƽixFeEDrx Z~{nkTE}GU1D6W ϴD8g6KϡB+1Hǿ /||+Cݞ]_m$~C"mǗȏ\ YTi#Wr{w^aw}RAe+Gn۸^|=Ϻkv#k㼘9lG{4#CՊo.bPFSx9ns?~M ^rSM{B,Y4C <n>CD/w)G0w&sTh@C7~RTv "?TU4D|sLxA!ݝ?v"BKqJ38|)oY%D|:z] s%b@c-OqQ;b`y{{|8 `{WPkgv7}89yO˶%و2jLYJeW9rh#FCM6Yh*Xz3$̜Ԫ@O 9p+kUЀ֏iJQ7#b[ke~s/mBCޱ?{lӱj[Uo=/?,V"NC+8^);di"^U6Sp<ߨ-sָ&b9Vגd\St8C]bζ ;ԃOs@7lLVf~dX<7|?݋(&l/";Ŋk'[߸"b~klۈ\tB;lJ‰J| ־9D, ,\ƜK7mޜ;IY(8EgJBGe:ډ_=FD-طhgG{2&Qtl$Uvu!"Xm߉a".AOįtPoMru<* b8nTFQH.f#/U}k%p?tB 8D+lq878d3E}ٚ"x:MPTz]ι|2`3"X1 "㏣{xR^Bw$O:Y9^ |~ʧH#RrW<?Nd Üqe"|;2=_KA .ޣG:m a}{p TSy戓s-0/ڣw;YDq79aE7h ҄RA@/ZT9[ϬB_|Q~y[B\D~=!x%F h#a0®"?qNwB:]K+q)u%"o.$Gq/i;> Z`?nq"*sGOeE+i*O/ub6G*sFh|VwF(ǡVsQܿGs]aB8ϫv!|\y h];py8:ewwhIJRDwA{Mz\Xu9*"rkqJ]cے$1g_'wSG@ѺL=[KGQOmHޓWgЗh.JHQ {u7mGڷlǯK|}CreKѠADnb C/Nz`q 3{Dxm9n~y0wwu;i4QB7a*Ïlgϋϸ()ӫxZZ8&(Cʛ6c!"K\+%T`T{u/QF&'0n#$[4"0b=QU( ؍IcjY=vJ\?Uz(Ɯ|4r{řxT>{g&hr.6Ӑni0#ĝxz:IҥX"=)$k АXn<2Qr1`e%:^Ol} S ׋]ŵѠe eD,pq!8tpUN|#"¬픊ſ F]<&hE"ki"}_ Yr,1t,A\2QUs BBө`4wvgFn^WK@绻5c_"= _N7~G=v-i Q([}ˈb-j꭛H}]@||c~(e,q^K=!8~q|])TXURC$| x4~h"ۖūL| 4a:δ^Ƥ'"֨_} TA_Gc8v20_NBN^tr1ߛr*NˢS-1PZ2}DV#u9u]H >,Y"<̓Ӟ(1\y-P_Ƭ^4[5h+ cl: qZo2Tyi& 4jCp*G3tcܷks6Q/qVy "M.hkcȵR9iqM+:ǥFTnDCW{ 1\"5C2o;Af2D.08~.ĥ " J9Hb | }|}Yh/DmɈT-$A":~sGٷ_a۽orl0E Is\"|a@+E:وHiw~x6U5vaTDzy MBބ͟9@,D' gE->g0Q + 0xr0"[GCѬ{$ΟZlr=~kD!%aoG}_Rmk0/6߈9`u>@%#_n~nX{Y~c?m:HOWh?F%O+簱'H |a"['R{[h >|ؓAD<,}tlM .?FB}-Ҟ"mQmD[۹7: #F):UGFc_)RfO㹙P scG4𔪺xY왳Y#E;oѻXD> uv1}<t=aDT|=c7ic\7hN";."ŝϞ"}hp~@c{}qO<͸C@mpG.sFI4✉|fc޸{9?"e/ ›9ث DۈcDjmZن^D۞~o5DI ^Dž GfGЩ˃%p^ʸa_'i]DVxmПDDB{lMDl-ϿTHaX烺dMlH<} y]ܙ ώ\LJ@q֘BwM9'ne>G JhpU;x;Wm$z9Yywy'šޱ Zs'N-*^6#Ǚx456B"DLEDn8_y$;>$ց(~?qeC.ԓe>HMRED)مH;]W!_.9/D4"|;9&&/xJ2ԧz4 OD0ŽH;/ HF$3~$fxAȗZAc%b0>fV;w ub|[%ߊ:5;wC./Bt#!c\*" }) <„6b_˪+<"cp_d.O_R<2NBS5"@XP}Z}kUo-痚tD~*KDL+ݎ\܋B A3w%D}4G=Ya؃/usp44h#2,9 Dk3ߎ֪>oA=cR%3Z`k,J`#rӞEqK>琵"fwh%دD RdGD}r/p`{YW#S^ءϞygzt^5qnpk"ZD'/OVNy~FM~>_"w{K^x;ZtawSRԗmbi;^~~gݫ~e#/#ro<.w;''p>+H>qI.٫Aegq8J~:aOLLjHΏ^g/k[Lw?<BE4b])g\>(=zuQ>?}{勓GidR/hu vB JCa DYj:+sM7SǟCh+!Dz-&q>{ a/v- .̍^N?AG"|!ueiCk^CQ?mA<|ק"2^!f.߮;q2xްS"yF6s>rl0hSs;w颾bmwg8Fh(4Tx>!D]³m%vDR :H8sB^J:?CύCBw7Ԡ>-S/N3"/劭ƾy܋C++IXxɇ79і6ЗOsԡ눘#Y9W%}&J]y.[+i:̭K x_+??&ִ] I*Lw|߀}Wړ`?_4۟`l];p]BÙLaNAo8-GM7:A7)wc8"3[ofJJǟh@No*uqFoAGOwtFZqY#T/~-?3+?;_7U }սt]q!kB(UKARiNDtVQ/ ;|ܩ$kEa Q2 ҨX!>®dDH(Yu/t"[*ceD*Fġ]&ϏR[: %h"Ke[KZBhyW|~3"eC䙃Ͻ`nI~m}0.KJ_FL;E.[j T1]e&ZI{KH`iT9/]̹&`Ϳo UzmUE`~V =c VdLmp vwN1m( z)@sbrz8{m + {^Kwojv+곏  w4tJ]XԒ*Fܩ[GW d B?<]<}jI`3yO(^Z01(|"k732@e)/M ~Ιq6NqPonLv>C? NXCfn[Cfʻ bt!+^U.2ldx.OFz;^X-5UꀾmP$" s~%9[Gi`m!e?qL8+)] ߇o<hҁNW0gho#PͲsd\"?0n~}o^lȫ d3KÏl=L~%#2 ߺ%eEظK,<"K($k V2pbb˗`g_jLɫ>ׁ)!wp]Q^jl'58g׈k[Tg׳7h&_sVݶFuɠ'Nl ƃ’Ut_`y)cOhWS&N;3nn@囷޸i;N?nƘO2% ⫟Hp ~沤n>_&l"o.nJJBҁU誶8V^%8wW]7sݝ$z[kSq9$X8pUMoީsՃC޳29/} LO{hTYG7'd(Dz%C=ړn\g~L%`g*>־e:%\ 42:=!Co,#3%ҒÑ]WG9mpl+?|ǀA3?.GY36RNeH7#0MΧxف6f >#Uԍ"}30Cǭ&F0pxp8C]rm;|9: tc)0ո?\^sT4/6o&&xuo_m5<'}/y܁p0z'l,q|R%JJ3p6innq^QW\½~Ó)`0 m9+x%O۵.xoO-q"05ZM'6+]ҢWnawf{ͪ5 *X3"csh>mTLO~}/i#؅?~(8ӓ~ťja/[Zv{dIɽ+^%9^`KS" ܔtL_yPvƺ/O4agk8O sV g6aTD,;fάpg?xheAlϩ/@? $K CߕօL=Y;؁^Cn[ ߆ïv้uoX gqyݿZTH6wT2a.n 0-=YO$X'ycw`L3"cf[`.+QzyA/@|8#i 0^Ū[0o?]/ rHf5uOR-:&ߗGD着t!HLnݮq|LՋD(++:zdy$pQϩ [᥈;e[puF}uuGժL`૑sAF5؞i4/9^x}e~}$<͒DQU'w0vaU(4ۓc2oS[ I/][ LOR`].nP4+*E5A&]H\;[5"-A\OpJoSO}~oDŋ=0y1DC|觭[wPR̙ ` YMg؁ZҸ/3/s3|g`m],ꐤYܻ=8|u@!0/4Kuc$ ~3Ι~#E38N9ص莈kK9[/cTRbP/sCU}k>`XYxFx#`1g-āY<ogrN͵>/CVZ[EqWrHWQWV]L M?ʂB{ 1O\n2v[`~02/\ý.\{Uv<6II-JZ& d[̲ V(&0ѝq=?y=s>coQ1:Cw0}YwOb$\ip44KRK%pvJ3O g-Xt [Ulɥ ,Mǽ~*[](ŜVu9 WNfގb{.W]Moeߍ<5]OVDj:-Nkz@:ٌܘ_svS@kmݪygh{q3ak8ս}9jE^=2\_&(0.Qe|ڊïo%bߙ!o s:Fݓ΋R8\{N LGofl?_~;/y(R 3ԉL)I)]G``7[z̏Z`u'[#=Yi:)>cw3`5vƵ^/Z {o֔:V`^R{^^P;0'ć(pbB_^?)a/lYk 1?3u}-My%c29=Ko;^ms' \[iۀO8 >MZz?'  e5W`nZ>6 @S Fpfu`lɼ7SwM_ų@w}7͞m(v;EʴA4K ҃rj8@szP>ǜCyσ7]DM}Q pfѲﳗpxȩ)}Tܿ+ֺ=Ʉ-ش_0fQ/p  RQڶp.9,dny3zsjN7oC}B^`7j˫wfe{*mke[ r3Բq[r%0\,U"2Dh)[Cz+>Nkޜsx%Fl !] Do1\R۸o?/S)њ7N匑[OuF&ى=|7p^eJa^]8SoSqn'7ez1ؾFkit@&,;gW P `,{ω z2xul_ l3յ~Yƚ} wTI:2pzc|B,jƇ֫ f& P瞊gg#_i-kW{be\|9%< 'H-dІ\Jt{w-{zXAٵRtR#JML,]Տe])y2ח-gw"SdcbvHO}zBu `LkJb/S6r2Bc?My|~\lr8~YL.r64qX4 Tיк~"{gA+OB`>Xs4aWEZ@Zd*l~2"ksq_Kc"Q+/הmrk'xN`Jܜ xbg,|>'dVӒ> 점-n;|?xJ3JiM8&{[-hޥLG}^S4Ky `o&߫Nuĸ`_"9rsۯ0n1\](yyUƃ{hޘ|:5'sFFw`-懗ci?tc %yE~d\OyW78zf+W*W|1M^=[cc814uw5+/b>%&ۯ˜צu`{]W0y$a,o`ŊG T=aKTwP$M敎OsؗgZ6^[JY}@_:ΩJ6RsUF@/]yU< >*Jd+Wcsn&1/J*Hk=ꠂX/hQׁJ&/W^۫ULq}Ns1IVĞNJ?T>9Qfs#x%݉q]-K jL[Y;pz7r1)Hǹ߾b,M`/T:R=91kM._qQ8;Vp`?0JzqoaIĖk;?h;{wO7p>XiŴi?~+>nk<9& d~/:+8k?ێ`X (Z Ef¶DJKu<\u :^mUUzj:I 1= + ~3 M6@힯y-z l9SE78ZX/6} &֬?^`?г6>÷*|ep4Sm EyLΝ>@WKV%w?^sYӝX`]1;g 7v90wrUvsl J>ʅhe.s[W[x>3lۉk˕FOOdVK7Γb~=w.807q=9aR٠kOt3 4:sWAFzʾX+-"Y:{h?i5{W^_,g02zҳ*Mq}j@o*-lw_T{&-l WQ ?~׺SbZi<̳ظ`Xb_1GX]/}_ pKƼT!gwfb~,FUӧsy'7F&|1GQ;ƽ7u~p_f{4ٺ撆 /m㥱Z97KWSܞ(.x)z#lL[8>zIZ_5p7Ixor/`K~ub3|6p˳F6t5p|qKmCdn`\Xotq?Lّo5臺B5KOԎNڍuP=\dk qR:33kWa@/O9] K XT>ƌ߱> l>Hy cF>^{Q"CݠWkȍpO;*X?*{Nss__wsv`B eӤt|SO+}Oa`WZ|X|h `%l`SzN`tJn~\t4 _T_%oo,vdMפ }hsr'u\Yh+>ǒa<1(yW @jہxSʪ.2@?d5^0cIY%Y]|>R/k%?.P}Uby;x$KM9=Q္X2vH0'G:&N=<5X߷/7tFs {u@;pi=W gd&w3K->{Y k{kS|y;ɧ@2s*TLagM2#^xdJL'WJ$*L?}݂~!a 3;/=ߵ)+g \v\;`6'm+6n!PJX=<6kZ0s>^2 L~:'Łs'F5_xS Ynǽ$SwWĝlY+~;߁*J*& ,m7Qk!4c+K8 PdUΧoiY Wm+pIV@yJl |m=E\_Xy^yN!` >c>VgIžp`,})|"[) 22)0 5HbkxV#Z`I{=ŏݷ,&]WH&%~bZP.ܴΒ@WfM 3fCq\p{HM\')*hr@EԝCڅ0n$9͎{V-v4+`?RNdI?a1^{i}@_B80-ujޏ1w9,n4r3<7ad{`5%]vN={5bn>Ϯ7󆉞˓R4(c'tI9W`UZjEv{+n@ x1]`=#>{_-\i|vhxzB'/kv z>Ϸu&e+絣R KS;k_\u` $TmU[ws4e 3S^8*%kyWEJ` V/1ϳb\saQ&ō*֠; 8 JSADL }#smp{~Dԟa 9*6T _9zm}) wtMWez1S*O%Xb*$-yx~e9ʒ?X$ };8o^^le q`Ow9ch)xF/Bń|?\lifGbMQ=MS,],hJś}{xWX޺$`8m^,7[ok< %5?'JkK?,%{i~LK2ٍykTof(No-u[Zn('zQ#;s n;鯀#pw߉8)c]#Vs5)J5a~Q85Иvs }50BsRu>qxh{Mƫ(CBmOʖ¨,޽ {NJEfAs@<hTz`H]jZ,eB<99IЧ:K9ALX h{vØ!Љu ,^x&k]S1~t׎+~&~#9+sA@i NO0.ӭ XNB; &pr߅UeO@~30ɪJWlF&6YKlxEm#Џ#}6=[LVIAK#^aq`?;(yWqU]s,P#r +c?3ᣒ_G LIǟi"7ZIs30Sx%#㗁#< FLw7oY \wX[|wR[`7Ƶ@ Zdx1:tϿ]e~q"#ޫY뗀m/"z؞j0H4 ؛9;zb #dgRaY+ZFCk_lZ g!KS2!O>soΟW{l!ytC 0‚w|L'97';LtF}_vKW5jg) zU6 `K㶱sUdf"ף /{v$|'W}k~F`%kNL^'E^n*5&7K-vwG%,}L#-p΅Ya2S `m?%GS01Q%3@x ߯\M7(Q0^FƧkGT\a֞TD [m^Kqx(ޕAG{%ͭ*c 8G):I؋z] TT#&=:*eESq%(7&1^Gtw쥝+8˱#`=}hg3T`t5{{ͮFWd?_ԎdvGw'qQ5ťU C2ك{~GK왂xL.aD+0ٗ@O-ea_zX 'υM=jg04:o]ݎe q0i2O-_OO`o Y6{_coݻ1 5L|#4}DYAAhC*#nSyoЌ>\{[` YE lq]s$ 7&$P:PbQ}}%uOaq:au& sޱ'\"j*Y/vͶ~iI7ßl2<|$ p֨hǼ_ {E^KqBGaq`K9n4Yu%Qs:BywTbqyX 8R~~{}0ֽ9/ui,!>`gٺ:̅{ΈXne@?x[^0-ΊV\LB`i1pn%rknκ?9%5Q,'_+k9݁Ҟ:6v`S~^n̘*}}埐{0%mvZMYu>)<]9zB.;?@#E<5q8eP:\hN^!0zC'VljjW)%̩)*y]}2lF`,]eŋsӥcow1P.ep٧[#sYP"bلL Kw뮔Ont7pB u1>NT45Nd;Uh2ML=ߋ抾 `3uf\ݑS'St: #]QU]=q] *94J*~?Rq >NLBޭ@7f#1hU߽^߮A7%]C!Y`H~zhWs8o~ jzrX^9^?d8` ʵ<@g%mm9Ra𣏕On[svk`멼3 xyjGD57IXAGq+_+?,9 9`VJ(1gt3#!%q_p}oYkc(& <~l:b󁘣S EZaܓJ_#O:_53ہAp 3i,|&^WSAO}ZKf]Kǜq؛/GGx0Fu+,NNߔ{0nJV!YC"~;7u`䬅a;sE%E FSKRxErjt`EUET \Ƿ"'|Ζ*Î_ٹ K܋ҍk/euGdR?_ε#<2x^=PÏC˲|{ ˱?. 0\aRo2xf- X//|'+`ꞰA`(¾+Hy`%W굋gב7c'mxlߗ}d6^8=ERсY%S;Z{?/Y6;9J^^o܆`^5t4JmWۄ5;K8| њ^Ggmi[!B5 +֐5( 螟TH{ֲbd1Q^;'xZF;I3Nܮ{u^șN$`2=\V=֪̈.NP7i+ؒOf`"dsJxnŢ>ɑk5|"|9s?!F{te\\\_ "wҋ[w[F,+{Q"|YŒ*-%ak=)OWjpg^spͤaR/zq_{Dz`{ȬS[%J.QHB|+`%g7?ZzQUolHڕ9S8$I ~zM;.,MY]bۚX0cS{gL`;? Æ{`3eŃ{N]޺SB'Z+.),KN6׉>IUrqbc`Ω8KF4`q?)FEρ#pɆH?1\x+O0 Z4Ρ/_g(7j:0!^<=jh?ps>ï'J,H *F,ҁU_謯@?[1gDsu5>W{h nRqj!:v'ރ@=+)hZ,`*K.YhТ39b>tjďد&`/zMKEVWۤLm<_-v/Vѭ_ٸF%\QY!,/vp ghx.+ WUws`&VMlK`&9aWZJqدΕ^%&]1˝N5%7cOk-eoѨh|4`y>Wm{4(uCuze`ys(цLS` ųiI{kKgY_Us:HixҌ<rJ/ہ)ƞ$[IH+s2CjW=­߄$8me/j :y?;ye0+g4HmJMݟ:o=WΨ +V)L'.>WzJ Xd4טWi36dlN1O:U_=47{EUpڽNL? otL+KnzV:O+cP5I qg͒9`_v`^ɴ{ы:ژcy5sW|:p؂~-wN)`:ۈ:s>Ay]\|1=Sh1DƘOX?k{N|s:3*eQYN`QV1ë5&^>eƁc*8|Z_bfVvs 8זyRw>|XA`ljobث}"86:`^AoG3WxǼ򺥺[ce'.8X:5me߁# Ӫc5핚%0GR"TD p%#Vgs?ڑnC3uC[>n&|{n>{[lx Is^P>ѶTdtV=_tV\҃&' ϖ L)7)}<?69Fzןkj`8K)u[scD/>f #.xɵ &/@opP|~d\)GP_T{ lZb~ɲ0^#v,2 de#x}W1Η~^߂p'4noA @+ogSr,u-zA;[ctC\()o.o/Ud~SP?q ׍Ta|pdzg-v}9WǶ%gh;?lΎBb1nk">,\r2}!ľJR8}Tr;>O-Vo0Ctdxq>}`_.pZFͶe pOCD\+{=Jb70+^KaY ׬J<}6y3Vs@:Uż 55a_WO]=%$(0gbX}&0XSr>0;f/0Ug]̾ȸ O`/;,{vSno|W1},& 瞫 9t10jeM@pЇd$Zbpj.<C.lA>faf(lMB0q3o?uHflG`^۞Xo@orpŋWЁ܏?O^yW Oniƣ$:{OT~\jF ~r{N ;zK=qMN*oz KWzMkESr?aG:>y+}}n}]Vi`vq__i>]I OQy,5A \Q@Zm+/|Vέ\2cVs|`عߥ]ucG}{뾵^KOjeuϺv孼{MIV}8g3<ͶL7{ϒ[&qn}xvġ&Mk6gG*1(f ēc^N8g0kGu*ը䐽eX(T!ˆ; qج{c-5U^x6.زccn24dC>/w6ˮ-̡>:~KrQ'7mZBWX@n|7=7Klg}7&7xX$`;{]qs;~qٳ]w>)?,$V-v N`׃S9yF=gwY9*>{M@S.}W9<M}@I.UU@MvL6p惏^u=XO8^"Պ@b%ߥLroCL5> }.}_ڈ랷a׊wݹ$o//iı(5K"PT ʥ|&9GR%O cn;ysG6"7|>jwELΥW"fCMgX<ύ{^!^Y1Mٟ` ~i,MDyuO)#K(vVqY<8ωV 7LGݍ@̡Y?U-SY'i܋ۮp0bo[Z[v ǿqI8f6GǏ^yFA=W(~Aܦ'!~ZDN䖟½'$K ?ܖeJ_星gx1MWa+Js/F뗓(b|b⬞#/=2h~31|"(_VMݓC  =z@\@Wı8"t0""g]9MK>nR;6v#ք;.s޽ Ɯi3kE/51gCF~X_wN8i!y'uSX`1c҂Xn "ex"R$~H܅O9.=]3zF qE> blji8NDيGSK^"ά)̙ cbBy&xa+7'c| 矴}Q%5>Q+ )D\~W.ԿinGJO!iVcnlb/ݏdf e3}Rܶ;6(zYL3ο ~Ov":רӲX^=b[}>_gl:lADtMMv:qyР^q?mw7 h?Xz_- @ȱ1z#/X=}' q<~i,G=swnwȡu[v<[+F1g鎶-^vMA!ʲiD f[I~2z-=D<ᮛ_pk׷C|1:])7u{7s`/Ĕ*l|E򟍹Rqܫ>ot7߳)2ޛ@դs2>}8֕!^я]|NT@?:zR@qhWDT߿SEċA s?4==[ȾC|}na4?w! 7͕</bGE׃cP퍈s1_v[8q -^>E t ҔYRY.xk[tp |r8OVx=. y5i렞lD|7[[NdMFܗI \OAxWlA0S nhALjk]'~Kp?%Ӟ# l!WvF W1[ɋU4ʞșcㇷ A-3sUO؍x;g UTc/tw1Կ|ܥ{պ͈]{co4dE^%Ø+\WDzv[\c!V8^uň;ʹ=b29po}Nu m<᮳U6.`^{gXc,մi5X+.k=](7~8wl>-!53gCDٷE}N5. $ǧU gwgys#Ӝ^GkID_xsf'M o==QZ'!dLk<(96=vߧrDV6;U\ b\Zq7հ_~q.NGLviVuUϓG4|Oyo푈=GS|> r# &)E0^w!╼]V&. D"5´ƹ]tŔ" B̝?eʵ^Wsn/yc[_<qύW`?QJ?*ы-k~:b^Ϸ eDʒS-\pwHnꃸb:6szݏZJJYQ}▙ЉFշ&#vPsJ ?9+dQEdK;1 F75\. SliS4x[r {5 [jkSֈ!U]C\~a<#Y~˗V5BWq޸wSu9]:^Ϧd7˟/=_mA}Oh҈޿t3V2 ̖DD>q9ơڢ˴h/oNj<4@}5޲~nWeVZF,b{9M_/=~1Vl8VsmG_ht_ы9}`_4{dP!"v_9$xo7D<̿?>300K0yΘ %_6!Y֟hpp='aRۏb;5#ƾ*]ن9Lb^q'qڥtkuuKm*g\E17sy_tWS\>ݿa+sW٣ nEو.Rqmrbw@#yԖT0codOnWs0*zyŨH sTqR\#XF&BW`x  G"Ѡ*ICqLI#lìnMYKuU!9ߎE}$0i7E}UJ;;^ yv~nMk3"#tCR_Msӹ>oѮzls:$F."g,9ey3sw~"vlH^yd0 EY ŞiÇǍU{: ,AT!|m 櫞!jjC1޸ {~4(5ȏJCh -Luu~FRwO"q{J创^"-5'bp -<,K@rPyc7^шt~ "2λI!.(d&B"ͧ#v#dx~ZoiQ83'%ɢsrԏ9oEӋDyvA_|^uֱKӟyg)^D{Y瓺qSV\GJNIF,nT";bĪqK1zoGC!BŮ$|k&_F͂?T`~u !4n{ҺFWNI)4T)4C5͚;oBNe|ߊD ٱ?~$}5QYCTa$yn)\v lO܏Ř4՝ڧ>qe҅_!m"s(g~J|1G]YZts&b?2WmhI_:E-HMc#C j'n{w.jqU2BT+,nouCDEץb?ϓJ"MGw7Fsl[@}]w=*{N ;2UwڰmB7^wXrZNY1gf7\{E􍽪n! xO5Mj/׿g}p,S ձJ+JS${d74@ZagSܳ- i ֑򈉜<U8L?>9F\)V/m: :Ӱ™D}]Y'sUB>"Dƪ :{̮@$mfo.V6!%u}=D7es>sfqGߣ"\80nR7z¼s8Cp]ϛq.a5bӅdAOs>1D_>mшObxenWD=+zG҈ǝF{ ڙS!e;4!Bc 77qA\4I{z ++kD_b::bW?3T})r DSX̕;/`Nqx3@ܜ l61?+6opF<+hH{_Y'+D.. 3\`^Fפ0.8W97&Em IP0BYsġڞ^A̙ .Doڵ>v@:[vMq~3<>4?gh6E'"^MGe!]BѠ+ Q=3ƾO@t͒y"B) V s-qM/GMsѠE0]UsNB4z\ =? ήi9PfLJC :| %#hsh=čңabgwݥU?B"OOۍu@9!5s DQZ돃Ci5%.]{ZTq ޞAv_i@\SZϟfB ˯w{pݴC2垾~ĠW5u rqJuy/:qLqZy\s338.v`X\" Tӵ'.2!;+]9r_9&5FW F|#aPԟ;G^s&ߦ~sfIoZ!k b=4A'Ig.#$pEw6 DzRd b}tyĉ_ʰ!=Yx_m9n+{cYu{Z/Y4p,e`NQHu}}@e2"Lr.L @a)ۇJ|v7 ./1DW$thN1={m"Wn@`ދz U_r!j:M73}k7ѺE8?C,]A!Γw;us7qOI q_HM7XXO b%'⛈UDm߶Q/܈ uE-coF;{{ "QIJDe|iTΧ+CJBTS&/}z:3/8;̋//tcoĿ.b9RsA _xO>5~{<{|D<2 n{^1S7l~<񽮅NvDZNLT[WKa|&q翿-LW'؞B҈;ou;?q>ƥep5KN/y_ bc}Cs؞]ыsUخ+2KbNEJG̞sb)~~v[Saĺ=|DM^7aBᇱo t~X 7A-j]{]0Y?^QF= 8;Dtfh^Cԡm؛l24Z7YF|{H{-9iЅψ,(Q H,>vAbtiU;qݮEי;.!RN3[:qQ;lfeyxS1_>5@4; b$?zgA2twmg컯^n{=ι!x>%܃5㹿i1ű)D d؊b K]ޅC5φ~3 1?_e#ng_`;Q vXI ě/8BDS0xjjA߯$a"1n[/q+mm?W nU]1eyzAw7ɶ|vhUCn ,y'k5?aug;"LBW Q4%u(~ pC GD̪⭓878>ѣe9xutcNYs¿>[0eq.lO=sx]EhLU 4xZdCA̙ž6_}chHs޳g>Gh}+G~Kx\/ZfDOgw" "v[ZǼ/4&"] w]0_YcN0⭾tu&v u""iq@*oNěsƻx)o HA3qZnŜ E7D$އm)Q/Jz|7QD{sFoH!Α-Gv"{TOػl„Va~<:勵|Ke2u;M=Tk(E1̘u@x&"~5ʟ@ZquE+]O"ÙTEb^PDw4&]Tqxz6 xW' #exDL{U"'#F@| ^])gzuEeb3D/yY q6Wgl=ݻi6;^P?(JEi?UJx>tGC>v}hCa |^2;qt+iFԮW&1O|ᇸK?ЧeNq,fD\ёEΈ|"]wCn>*v{% 8 D7~;":U%Ջ/_!.R|䒓=:dz"'yNC#wN #nMO{ #VZ"SxFc 1y&/Y<=w?#7FY*b23B@u +bwc?nWcϖ(y{#uΡP;i EMHSbn#;DQ^.J]SXVqA zJfal$4tAܰCq `#Fuծkw"2?D1_`solUq @DyeI>qoBF3\qDʏ;YZ&z^@l&cD}_0ohna~\~r_>;Lx}T7>$ N ?:y}&N,D-~=확 #AOBJa.c+o7Bc *Ҏߟ/y7Fq -zv@;9O _^=>16|VA?%ωCN18OL ˇhPmNS o̷Ľ}l_.Vw;>F b'q a70_ȗE}w jⱆZ}<륲rr`S3+yg rkq :n{:_!"gU{&w FV˼8RE܌x.^?A^:_[LCC+ ? ^S~oAdb9kMom}~Ui̻ ϛ=:s3"xk#7PV';:wW bB "K,%qCH(" D4+0oJ.y'xcM矌eȮ"Ɉ%}IOFpMd;dm:=S]!h)1sִD6\<8Reba)i3s9àc#-LޭƭѠ#^0??XϚ.i!ރ=6kDMo͟5)0"Sx4f5"{yKy}RijWٻ[&EѰ[5f?tŜ_cϿS>7Y_sB-eɢѐH[aa¼^}ۛ4㞻nUAu>D>^}5iA`wL?u]akӲoz`ޢ2yΒ H=x {e}HbF04zCgŤ ~6FPVKJS i{iIHwQAhen=0PlS_%^3NA8Y-;JI]wi9 4޺%OQFc 3ʇ&Psf '@V,.ymԆ澂r`W)9]=h+->`6w eOp cNX}`fͲ0:h'|2=%58WύvPν}nWD)0 N1J@LbgsR?*$$ڧ~Fײn@olQ寁U)ur tUw+\'ڢ'δa\>eK|ݼ[Yfweg`70MedYҤKY/L=N7r |^v0P@zUȿ䧁S7wj0fs=I,؄%j=QS-Rn"|'ߜl3ya +H/ٕ@:?`y_7@jQ`fxQ^y;@El0sVKzǒB[?{Hlsj.'t'*w\ 8W ڿtcܜ ӓn+[ݰH+^"OBɶ@(ɦopǞTdç cO}=7XɊ%kp}Qbûkן2jxOP-H]@tcyY];ǁh2n0^[$U[WU&i1$P zx~~ fz{Y1jO|5f|ٺ@G!/om=PYbzLa$\]M WTV8xB<9Kd:۶\ Ȩ9yaxOg4GH`~Z}7f V-]eխ/ogqx#\ om1^{FdvD,V͏q $,cb[E|;meJAqiBKssXYFQ8 pY., Ga#Y]~ ̠/@i_⯲@*">H*;}*3W O o.+<}Xy#-x_6{~d#9 5u{tH,PqY<䓔?z)'$n\ Jֱ%3Zx^pk%D9]vb`ka'SZ^>Vs0'Nv\Y #D&( gx'-1kT g("|R,9tJ _]};=:w|6ϴsҕ>g c]8U'̚ǫfx&5Ⱦ ȹ>'ro-vdWO:leec5zmCz"1 *fW׋Uln㿘=T@}xT9bV@o3_;sZ߽ =o>UF_y@`? jIc+L㫂spS~V܍فe}2%"rA`ǿݽyiv&[dLi=,X _ 6疝|(o1~>3vt,`OSd)%h-oB ä܎DO|}`[]S37n-l+) Q`׾VQ;&_[b΃55p*ʕ w^h4Ip/g1/l3R}ď^N@￴9߄Uj<ޣ[VG4.6(Hc+ՀfV`to=5e{Mlj+ଅ;>7asho@nK+|l`Vj:% 8.(;j:}~=3 mqk[:f{:Ŀ-0wn.\w0{Ǟk[y@]2xuֆU;#^%ޛ- :ˀ5ptbHE>~@ ,R]֪1`Pqkbg R`dk;, kQV1OOٽg^^`-]^3B|3fl9xԩkľ\G-\*xi}YcN7=|'`6e\栧 W]|Pm3=5kX ?rr ?U26?7{ '_Z/fz?N/X ̜p bYaEk=6>qrMfy`t,0W&m=$ P:ˬ+P~RѮ`],OzW}I3#.ήY;JԀMjqdwPk3 <kW<# 5Lٶ}mH~`{8^*pW@ezM1f=f Cuꭺuvw_\~ K&Y,)hg ^,Hq4#yE`l3,3BogcVج_G\y tI>ۼ܉{sou9LeD8y+>sf1bR L~sYsV˞ns-ha`lV[[eU8>^}6ai5&cc&/['w ~I4Z D8sMab8߶i (-h&Gܷ[/ yxhw 3-vKw~ de(PvL]9nJ iB./K"EGHImV y9@N t7u{pvOgwuܵNSk| G7B(yu@< ?9J޺ {R-ZRq38j{χ \,{za<P*׀ݵeؚlV޵1  ]'j{'!`f||)W9 ^ 0jC9 ] Z}⋹vǝh;sMy(bꃿ:8n Dw|U9֓Mlk0XϭJ[ sgB[lp>0gx]31oJ_sZUϯ &\ b} X_mt2s`&ƅځ ᦲ{Q@]5LzO׏k=HeA_3̏nͿW'1:d_l>zOb~=mYPWX2;i]{6cx_j&8bq( %;Q%g\ɪs.vC4g 1LGT[_;.Iu}i˥GvCOɆ fr[Nj=쿛n~|XGՓWl}3|I J[~/Yz` PxNQgfz!z x܉ě@}  )<-#3VzE0$$}8kO>Źy `r0ggX+暱QX)}OcUyf}KR7xoЏG8c9üc +s8jAtW|{ ی+ax{a{ף?S6M6yl<_vLoǹ?p)v:Ոq/(At:\_j'&B RO]=$G9H5wi`R'׍^ϣM)PU>gA@n~gxPb̟x4R_RqVa/`Y(So+9sQg:]@-K8 G<"Щ Rñ׽h P:0GFiL1wAu0brGnu?%?Ɨ=.(pfFFDxJQ5:Դ?XUN3fF ü`c+oozd-)Q_i*]x6z-Iƽ{ؑsٸF~Y]gX$)2MCB@U( 5#88O9ŚG-M˨#ք|wVV4=w" a˸v˪Ǿv.~͝&W,q>{0 V67{eۢ_yr[Gxɭ%Oub[F/49nwk^8ۓ\ӀTaO^A8R*؋}2Xb̋[kN“0mptx]͚K2lQ= ].n+ @} i޾.dք_WFwW`-~a:9Jv/vrprxܻYeÿa&ba#[UJ:?`f#׆J@5|Uy޴զR }A?Fμ"i CatE*RN}:*ɒ؉̋NJga!gv}0zsp"t'U<ьlX\ \{Bea@^N|~"#'8#|6R `l֪ \Z#w5=!=ovd9|kOZ;pto6X K߽XoK9=_;Qg䉥)qkY ~I00pyj^84q ,- Kyn=AlWϳ½"}YoPc>/fgx' 4u9dڧr; zY.`\I%fStՎ>f,MftDy}uk>ӣ*?"d2 :'/3ԜE(n}Gүooanvkb֯žkyF<8o1s}*ߨO1s}Lz[iL{;EG1jݭi% ˲Fq/< qπnH^q ,x?W9X챧;5ess`7r`k MM6X?T lp;;</lsy{& v^{_͌J>#>ToՁzȋO=',MW+`$w*TD,Xѥ7@_7p3&pEQcŊvː/*G%M=wspWGMMk0 W3 fcoXۙ'u@p `砈yZPKz+h ɀ7Aj 1,voOSO[уҁv.JՀJw:^zW0뗖x[@NK=4dka:^// q}V5:bP3d;^^tʪTWw}۶;T]r~$0LJa~mߝk.߃gbw';2}hmp8NZB?2=GS|0q a޿xCT`Eϔi 쨳 ؓW\=⳺Q{+9bh^;/n;a!A|58t-P-yΧuzЅ}՗q] >nbTE) v+y pϼhh[#yKBN^n{ 69[2<N =싟L.c@zŧ3J&6B*/^^ߗiIr0O7"yly֢_5$Vr{h؞Ø'܀V, Tə w(xwJ;,fl}}@)':08bWFc4)TNn $drc%KjGH.$M4j+ߣy.Gj,|z.쵍7' d} ̈d͒$LI`2w0}T%[Ӎf@l݉*բ@cCq5|)s 畿 DF܇͕|}a>q9/۹d4t)RD5-$fOd!"[.} -fq4o ݢn $1748th_iY؃mv !ý0/Xfh>{8 [^ߍ7OH-g׃O*[Usץ3]pKp ~f'[n4vj8k/ C}9q+z|m[N텍+>½,%' Ć+H̳vGw)JaNua![OM0~9:' mv}c[D:` %FfYUk5301y.%siܦ[t:x٘͟pJʳrX@6JQktI/:v̛GN_[BuH[#M@~3m{ sىMkN` }bZpzП|i[) NE+Àr^'%ዲ2[ |޻?ŽfYUq80>.|f$`9yCaS _]5d0-"7H񽬯ңgJ-( sJϫW3Y1KJZWkpPOTZ8^on~ _-\T ϭDoPMu1y.{).?9^+^"Ѝ?,u752叨[ޗV[ρ9H./ѳC\_r~n̼C~qa?q܌AC|y|x^/UX6& {V R/~̆c{zhK`ɽҷEʵ 'ne,Ǣ Qw'@w+>ipWĜLUZmmi3`nnbxw~ms}Y}EQ4v kS2< 3>08ĥ^uڔ` ,>|w.UYn _=> -TH)ЪO޶`vp){|ǏC8!l+ܿڂpKhR Fz׭y8miP'z RV}dEe.N9`Pz-1ڸ,5.)0_̈Y؞39,sdLf27gY5 LjTnIS(~B"~`5ndϓ%;%X]msl}=r gg]{bf5ܑ%-@{%/,޿ϗVDO~WTGsu]*[QjlWte!cBN7|r`%6 z_g="2pv>xwC>Suot=ux~*x ?n^)}ғ'E=_bS2 赮ɷq0mZI '7gFtW~7C@;뻥>bc$0͊Lzbck:>z{CJm߃cWc5jLqn]}9ж8?w7'`K4P Wq`\w]#ī[=4= gp潁gflOh.{o2 1@e3YU8?S QK#g}hQD+iÄ}Ws̐ó|Q\[~u/fb`Hr4X-gt[8j큕سnwI@V{c'g'⊦I䎧E&6}s=}0sŲ녀~oKO_7cmgk=qZɻM1{뀉8Ա|^BRw'wbޯ X#k*CgGɒySĎu, iWVaixOca@\%vbS6ye}@/{{p: C\X7E֬``I~1,J:~ۑSx; ޽Hd_=.Y9k:T qxNw%[/[fL3S@g()ɶA'M՚vKV$QRz%d%KD:k<bђ?q8*,Rf왷9JV׿WvZ{8\?Xor0wVl[{H3竾 mn {+ɔ66W;m?:>\~CZĺ)5}OMN.vyE`up_l"^h4| >qJnT>UO=+7|+0,-Z>[ЪG=ȅzi`T6n*mqf J"bek@]KUܠs١f5+R0AD7.q?xWm$]_LvŏsoDC/G ۾g UQ{sǚ:88|R] 2]@DEH+(ͲLRVowp>C>o8ri_/K36ow 2Ͼv&ZZOk5k[=q /j^8w=.1| ,΅9' e(A]zu萑Oj 7@Gϛ17"3Cۍ?T%tNUJ0iV7X_(-{ak $Y E+\JǼ7ҦuM{,t)9 d`|nWŮ?թ@4[vX6oWrq ܋NzeB=.+?ҋ'~ +z\>X\uoqס[<'GbŸ&FE?2[%XJϥ"L3dG3_F ۧYe%<`R`_LhޙDmT5/6[6pePx2# a5pyrXsXz^k"? %d;ol n.E?c'E}8y-N[Btzs` <>#?SY`p$/ 8֮;I$ u=IS@|T],{Jwz sCR-lhY`}>;>J}"*p"+2o}/0~#f.7=KTꉶz朴S!z Dt2~=}k+>frp 3oEȣT+cWbwf- ұ6ж=zʕ|s^!{1w,|Y0pm>ߏǻl)d?+c{'̀@i SB93993AJa-iLx8]3,2;OzIOS<_Lꑈ7Y5vϳv:ȍ>d8Ot\W@-[u1" 2+p-|nN)7 9ܶ ;Rfe?8h0B}8f+2oq70Φwd? tfX ^0S@Yn<\XV?%0iJɚ@ZO#bq4 Tώ%^\ xv gWNѷl/fydhQҊBUA vk.UTӺ}Y Ps'>gZat @*x/dޖwz?Rr5`γ 8ex^{kRIq9yج]?T$` c-; PO56^(NW):ދ5jrނYo-=3}ۧg :dT+x!*}߶y$x–Z1_y31ȩ+Kk.>xl[oً;]?]9XoZqg+x/-%y*F61f0ug2Ƶ4ʍլ3ˢWc /6^i>߶9^Z}=b xM_p=,zNj4m6{br  8{R˚a?# ׃,|[* wRڪcCL#>ɏ^_bQT!ٜ3j@xNE+=*ԆrnLD;|f;w\wt%#d,LN?ƞ=!̹]V=mW(,V)\km?Wk=kDG{>޷̽{ ZW)hy+.d5gR[]6?zoe=1Eg{_t!ZGr\60K K/uQ*}k ښ3oũꐒ*0w&}vBbk7o0?0 $u]b[^(dN^C̻ML.Gߎہ]ʨ@j[.@H4>yߧl}~0%bYmK>+ܖ2'<`$+"Cgwb>J0[tyM@H-Λ;, 'V[@1,g=j'-_ㆿE)x`~3JnZFK«oXw]ӜZ܁d-K~&>;p2?Դ;R&{O=a t[3"kfs0^ *mNy$3 ȑ¥\6'z6s窙P?$3IjU 0 Zr'K@GߦY5=潪Lщf`)LVaT/ ֵGcD`e {:-8:n u\61Z$zMD;]O,1OGo\pr8OŅ~3'm.͙3uW"G tp?6ĝ?i&h{\]MGY3Oׅ@(./w8IۻU쏬 "QO0Ǿ_r9/İPe5em@ N3`9j'K ?>:$К*$:y, 8=]7n6`ΰz!̍P4(O:?I(;ީ[ܳxܣcdM>l>OQ/uJq`bvW RiXzԸ 6_%NϷ}[< H49S2~O@x8lb>S<s=wu=rGl5M1yHWTX?&9@ =.mp <+ʚ0/ݘ?rwSU>{T6՗9~:58,]=GP+V~HXCr9;%[\%ʫN5x|rST.m ws},{&`Ir=*(* ] %zH S R{M'߀9yphZ`rjXu9Po}|Db &9>'SZҋZN8D͉طwq1G%l=,+8L^sݏWp5,{f `Km!V@X/Vy\{hS6u_q\?&@$lu`^KTQ{@p}oxNG皘405s/i23kt6u_Á2;W[ *S]drJ@>vjy…O ].2ünl}菳ɡ@^t:atS]"H{?o`=A+u"~#]hh;9|IY`U &*7ZtۻwY0T@YS@IIG-lۯ*Ni@m0X?~J7c> ߴC]oQfkzQ`<ج{^|`'þ@hv{uLa|:aAU5o5Q֙ GPFvCվ۾@J&HAR B~,ZWso1X)lf@1 vo[/'t_Fe/} 'y>~sHAs=l- Q=03 h?/w"/VE2J0Y0}ùagltXY _uMֲ)pm:78{X `gUxѡՉ5|ycw}30Wfkم}h#@p-{O `OqMx#2C0=STـ{vtϡm|A^_T OЩJE+ELʄd5k4jU;w{MIVdΉ@_΀< ^'j@sY`DxVXx׶R? Tb}jwru9&z؇TF˵l0O|Y0ox a^M^ц˧N`yFC?g,jBF ck޴S{[ .O4gSoL,*~hU$P{iNˊ@ /sy_n6=́|\ `s?Oka64Gh ĪF׀h6,z'a (nf{E|=f#:Xb6κ2ip1zgyTp4xG劼Grz*퍭00uϓ*+1wVX{fF5w HtZUn&Pbď>4ϙ1rÀWqsI=B+E /^])|94 2r/aS!=h,w1{o{JEތ f7.\yF@'MJR@oft">3 565Wߡ"F^5Z=f?PV q߮RUގN3y1sS^k )2.$+`M;ʾ9}:ãp.Wf>dXYb׈Oɘ9U{J/}w1?1?p(ޢɞ+Q{ M8*>y])}> 1g(NMjՌh]`عBl;C^뮲^%l^2VSQaZnvR+_+RL.e i>K^ ]\$^Y9ǓtؔEI`Fa.<Ϧ}>~<]GGԏ[v7C&auӍf=pjǀzq!{BMw,3X>댦fa@>&wW-s_gὍӏ; ?/zlu87-;yE-=6ܸط ‰t-67Gץl @ YqE{H\"ܰLgO>&g Ή ~w| ΟX?xo43pﳟ$~%wrj4e+N%uzaWŲ[9Kl ; [yQZ쬙!@][1=Jv]qc/ZSҌ{ޔj$(}ȴ+6 hBfu[ `ͷ:=ڛ-@-_1{hK* \ k aUT#l!~t Ek=;bnM  e4nEj Y`?leW! ThTύ@ /fow܋'hL55{F2K2qm]<&[u|blVs O1Z Dnc/X[8s93,ޒ8WwxX MW>*:uT!L߶ӿ8g 拆Fm5ɗ H5mݒ睍Oľ-0{k W^&8`}w>'NL~Ei_Xj|__H-*Ap.ZȴĜ.@|$zazh|Q]z!Y6݁k0'W}G{fGޏaf]ʺq+~XNF5N}Εc'8/OX O(Rf71qR M]@*^;c뱿Ԓf;>wY"Zd]WG. <~Ux݁a`(OS}j".(__cγ{ Xgu,vy`Yx _?fn;5L,zғZh]@'!y߉\߽{ >6lܣK9sq)&xO+>p}0oa? rhӼ  z5z~ٴٓ100$ׁ9T Hs~gǜvJM?Or%%1T]4Mų/żwA&knw}wVDž19sb<-YJqx_<4"ʘ5*v#Im1\mQc<}`r]613}~×|X:#~lӾ )%_׶@ ԅr?>]FP%m+3^Q^;}nh^Ч'^7-9^Y2mp |dxoXX̜EQTު)N1'p(x59S ߿g5h[@GW q˘X79yG@韛Lێ9kFdP_께?~-։72k+hR*S9sIգ_RNljZ 2kK ~d[Ar_eFSC~D?.+c8u]? w}a4`Ǣhc=SHp񻢉*nA+:ox ex^zu UVJ|} `B|7sZ`mX3?hٵVy>?}F@msZܫI' @9ޖP6_yCX8yR?$m.~={hV_9vk/[ k2ӵ@n}úM"ǀan؜~ {C8.5>w1 ǚ3]lT^)O{`z |^V@OĬ 4'I0לG9@:OO; I$_?&u XB /fnZv嚮 `*V>¶ԂO5r>{=l ^~LUbϼ3i`݀=gɻ: jӘo?Rߏ[my:;w⛘fVj՚wlr_xI9\ffPܘojRY0>X♈94cj(>lقb/%}q'2[xSnymd>EK/;8+\xk $Vq_2C@m{8Iwo`Iyq (sͧyP%@l8DG>b('qf4;W5fm_ckk|'0_ݡ9 6Wy!\ݶ@>_=.,xn8>`|ES  Z3R35:zI//o&-ǽA)`S;-pN)FaYoeɋ># ]ˁi<͘ӓ.c@<8c!p6~p<GZ-]ܡ%tުc;Px/_H]N}ž`d08/=쀒h<Ο[CJk\0U;oiTPuポN]k6ϿfE]k{xO"Z}-uwH/0B< k`OE |: }zF7By%'w y^X=-Ϛ}e ?Ul~:?mZ>y S\nXrs (=نh.\pSl&0.];'k't;|m</F*IL)62FpGwȄ-@e,U24G0Y!_ F6Zdn+c@ίYk?㋁im?"ۭvWUtj܂ @Kq=?FIU!aρ̸zBtvӭv ]\_&~{.Ր0vd]([濟*z{ P<}ޘJ>T J`kwy.Oׁ>շK39B$*Q<> *ܻ WՒIX<RD:J=Џ/]R zttݜ'u)>I%Hp{?@/_67--P%MPwdq.oձbRlXfP& H)E`5`Mox)]z1?pz|cu;?b_da82*t֋֩nџJ[H$i;SӗSgu4)VFc@3=))؟_Y^zwŎ!vfk[MfohKQϏ'beT'p-w*`$=i2^{*Ɓx P?mkwIx " Ro{T]F`%Z$|\,C<ٿ %sn|9`Sw{h_V-3XSƖV3tk24a~_rgÑ=!ZV.$ '[rgt2 ~'=z Ÿ 2.>wµ@|Ʃ /șe$-lw<?luoZ $SIfF( DBdUBDü=(!Y<^.8qqקɿ6C^֛Bi#Σ"b8]2ߏ` n)Y~<Q,ff7bU?!~GW&rWWb!a7Ġh[r'F&o$XGӎ=C8PYxe`!.b}[Az⬓1n%_4ɦ"175{m[s[+18oT>x{f۱bb{WSjwv(bPYеON[fZZ=.-&Dn0%X(8bd`oĘfbbM&1K=1xW{Շ̿ quZ4^F@w?WfGKDsiI'J 175A_^߰r%Zˀ؛:y̕3Ў?)P}bFȃH%3(qc?-#7c#'$O^uy1O (!Xo_yN/w ^}Fw՟xW zO@)1#bѾ<_66Āwр{v.Ct7$]֩ ]w ѩw ܊'KqF&Bbw71瞦͑K&Xk|`z*f_pho打[N<w^E Vɦ$}=zAt7Z+$(Cl^lqR<>qϪW@|1İ0OA*ǮO[kaR9?t?m67ًEm X-jC7./-Qz=bJLXBvI7b"*}> ^>.b%1.%)1]:d1d[@x~c7>X[/ĘzqF~Vd0 |yc7ĝpoH[(pNh09LL ͍jß**Ĉ-P\ھyuMb]Ԓ 1pveqq!FFj}.C\ƜYݯUʘ`='Fϯ;qwweWf_WFv[_C[ʉq'OMfa\"?\"NJJ?\~5vHx"X2zO>6î},xD'h^m#3>M$Гx&Ƌ6T躑 51xV{ݏJDxyۣIb($Ŕ*}NJ&XV!+޻I"BdnÉt^ J#X #ۚ%@O>vηY SNB hnA v<#H"1nkɯiL);9İi}cĨXmhLh[FbTݸՆ:! YUOzu, b|YTCOמC ټDuC%+ քFe'*І(M!pbP̈́+1qn_xw'`nQ_F>{_a|s {x;#r[`bUuj ӑsŅ*ow|$F-K 8t%UEddҀ'nKتG7'Fpѹ!&].em'F̽D ]R2{*D P^]ݴqڭ|RW=w_*[ѺQw/1$ϣlĀ 'bԬ&>]@2`\2!Z+}wO~zq)cC F.P/CQe#g-fTdo-SKgx1 vk7GEtyڒF}f< %*i؟AX[Q?~(ZR 1:Tg31|,苅󷇹 VrL r91Fi 1r|smݯW]MՐ#Xvϸ%/6]1bo5%b򷗹;c]ܡĈ=-%2%(tOHɲytŶG1?^-1^P4nq"3+E <Åa? aW}"!;}ĘI?/]bbxfbd#K~m(1v۵[a:'^OjXc/F Dg)_=yo-uiB?'Nr:.Kp!F߭˸F$oy3ߙkw} U[hr%81'HIgJB{ޮ&FeuB67 1F!,;8>ve;^[55-wxB@&\cAǜ@Iwopn!W]vkb|cGBRĈٔUPЮ'ԳE\]lc1;< Q61UOvQ-Cޙ;ȴ%X?΋wDOscw˻@֙~8H_1rHVbD L;ݵJ`+=)R 4|3sgR7: W(ebKܹHuc31W|֬Uk=V33_~nRׅHuzIwɂZ{ZgVOY.M+&zH|buhԹ X)˃U}uJGA҂Ġ-+wCS+{yQb?C{mX"6'Fޛz՛G6WC<5҈{_M M[x>.mμV Xj:޿."?=68wC'F<[W2Dž纈% ^ȷgZm0My'Oύ'K]z an&s vb51";5hL ^1|Lk#wom U\gM_;纥0Dth\ u9klw7%#lKՀ:%d^Ki_Ii5p]/ߵod >?7o}2 ƯXmJ|H8b+,䷡ b\DYF)t%R}R`>5|i(17v?608i[`뤸6"Ƃn%[ӜĨguVĠDn6bbg要gY|'kS怷c'?| V΋ jW/u6~u\n|<1l¾{ĨHJJ1zj9{1G-:4yЁ#*cT[-'F{/^r^X?Z^CXTM ,\9roȋgW |].$/!A:dI=}R}6c\10B_b47=u:$dVhŵ` ("h2#Ss1|&ȟ6B!ith+1)Vcn<R'S}eV_+Gs9Ͽ5Lg8?2;v'F(lzs>#ǭgpKM{Հ7$EO|A: EfKGcjRnĐl8- ױ"^%X.aG/6| x;[::R]\V_s(]'bFGŵ@ЭpjV0!W*2'IgҠ?xB9(1.L Uwj7t$&4*i`GA 7*V %'$7,Ƒkua*guy:3lg]bt ޮOeD֑HaoV ,bz4mbLhHKEbˆq 5a%r׶7o@7/IƟ !FnJ~Hxo0raz W/8ӕ#s}6Ā-g# VHybyaÉzr>S+ra2`]u_{CSd nTAp;yd0˃mB~u@ǥo6W [xfAL^K6&'U*au7[Nplg}.ǍчgNm&V=Hԟ2 -W =x4՛f{yZy6}q ޫ_fmOc-!x7cewSM_op8J~4v}D{u{{mibxD:1!pbdo᧨؉aEƇ VD==oU};\n:ӏN_lな(bCy[18Y1x#/QLoi:"9cLM9N}JYH $mZ}au%&BCzZd'9rsh$1=O^.W%bԄlOݚR1Y1\FExO.eX"%|lFRXa!@ '&&PqBvmw|1ZqB񭫋r϶DܨρKj Ś5x9ݵ℺ }dtpqBQ‹6}_qVq*2W> *~0[έQ~|:~h}whWPoR~qBNf@<G}dܯ?OF{ :!8xK#Op|?uҾIތP95\np>P%@ys0ܿiꇿxX'LeC;_S\rl;pZQBqBŪ0rTgA;6՝- |Z' y]|sڙ-륝|𜼑}Q-oHs^ṣG!Sh[_E|g#S&0O^˚9#/u"Kg~G'W~2'`5C.ïℲ)0L{8=3"ݦpů> 4xݑ$#jw/>B4u: oe7|{+.[⺏wqx: o(܁8+ɺO)[*t%D鲥SU-| dS u=N/U,aJsi&uq^rBxRo8TLc|v.I W%ө1~6\}c*<+V r=lz g Mأ'yxaXTqWrX ʞZ/\+T;t x3՘) bB+]m铀}^<;a@-K_}ތ#ETa]*C9Cvh&oࣼn]N3pC?O\s9WX$6!1!~jf'[$ uY꺽K̉\Fxo+)q?p|m m."=8'x粝 :BShGƓ]ΚAo Yuo CF1/qRWh2W 1^Ls]djL|n|=+¸_"nF/\݅~-uOD){{كot6 3ʮ PiZ:ى| L|vb -&c1h=}9&U;8Sq}\Ʀj0?9El?Uq~f`͹V񟺸]͛z@Վ|Uc Pxbv2J%~;2AOa~ ?-L?XkU#茆]g#K!>HٵW<ަ#c?Q'"7oaM=6~l5Q&܇7.J UL4?w9l:Aθzc,D Nx˧ܬ|r3~4؟Vn\|904%! Vi=Q9kzk0sdɼÔx]Zy 1'EJ!s>N)8f[o5\O(m UZq09[!_ˊmTq.ckY;v3y3Jxa[h?; ?x~;\1 wvי5:cM+.ap12[Ͱ#mn{>\w ƫ҇P gx:"5p]yii 0,Nƀ/bJ7Ղy a|JQό7҆TV\xEtGl03SC\s;7_sfڜrݙ5x]z\g?/հ^o|=Xy`[TYŸ\ -W^kڹD))zP*do%Syr3ek8vI}ݜz8V%СE&;A>䄮m.ow --2YC8xwÙ;p"#=J{¾; #;M事)0w'{1w\oLZK=pD\ 9WUB1^7`>h897v0\m FOO%@= q^&%O5wn]7gk7zi>"Wn<+ˬuƷq\f 09E0u@ϗ y1ʅ^.>n8)-!嗷훽jȃm)o<,nΞ~7_:zS- g8,ح lAƓ)Ęy*54/}_AQ#рe:%o<~PuE1ƵpkALf/̲:L؇k 8l xu2] $Mo&;Ckbݻp+|Ʀ9_W?]ZI2<2x99\Mqs1QW^k}/H/*TCѐٽy AV{CF[RuR!^;"[_/=Z:eذO]VbD[m )81}n|u\/a ]y^]+Xuo]jUXP}sZ>C7i}MaD]"r?Sc}v: ׏k>?63%sF#XW o U'XR H$^>1^aWuuϡ=qb)q/ 8+(T{_hr{>'o3Q0ߏ_c= Ģ Nsk2O%dV ?üs UIWWЮ/:*{r_>ᆸ)2vmbJző*A|9c_#`wHYߚiOƺ7WwX/]>]EO\Ɍ(j{ k}b 1WYVOXcth҆[i\\'~/S*s(X3L.^g_!kd q𱶐 -ӵgާUx_ڝ-3ͬV( "ۓ@T]&w;MơvOXot/Y`Tx}XX\8 Rxԃ<09%78u:3%da+K$h!D!~+??Oc^!>&8m[S|= CC{ TH0Owӗ?E^Odw Lj [.YL}cp(wHJ^:z%̏AoWj؄:?vKa&j_/O?1@ jp@ uC\9ŪEk[ #rxFŗxJyxĮKeu_na߰B{Q\)~x^Ϲupoq] z?4;9 +op&a?rۻ0FGFaݸk`ؕk9*8kEo^Wbcya5^7)^%wti΃O/z`K{U~*K{:In);{&Ou.{ïx=;+SdA k%oV'c6ivXsNf-ۙf?flj5歺' 1l^]W x$CoQ)܇ϭ=Yu691&x/m Dai1s\z/*J9U5E?Mrzc޸k/}!= KnG=@ʦ:ySɵ /$oގb> +9|S/$6o޷v࠽#}x-a#7ޏu&>uȃ8_.k+xH;ɢߥ:agcx_ժ4xӱw$l_'ճ:e·ڠK bip4o( S=B/\c}a"mAk ژU^*vxR6S˄[ĺJ>/h?2ggT]7S^?)1"g9xC|M3u.wN:&(q ç0.&Myc](<|U5\=ϼ`vF8$۷OC#^w¶ |lam?4p}]A? y'TUp=2b;#W8ͫB^6?h~Wr.'<}U\7WXw>bŒOcI0:@S/WIKAw0u9q}j 獨wwEo>wH1\A!C bF!^vB>ؿn\B/I};xz1e66Ŧugsxtn/uyIV:x Y]uo`|~!'x9 '[l>ѝ> cxaDNδpy+c SrއO/VJ-`8INgotr܀8ȉ+N/v}ˏ~g|eV5ԋ JoU˜,>&SJE0,a oZı}zE-Z#1o72?ͥ `j<(q@y`#Ƈ;*:A$Ox.zpU?P'%^:-7;'0@'^λCxA}8V}^N۹̫JpQU%vx4iW/M(|v><&J75~Mg7{op5xYtNn2lw /|SgvQGrsY33ݿz] a=?, [y~9}%*ThJ8|)ƗhDչ'#n^iFU8Z [cI>~kt@@֭G;xpw#v;} ] Le_&< 7}Ozn2e_X}q( >'}9_vɲc_f2 }0wQ'4o_N%_IWUxq 9RmpgT(/ RC3gOW#\6yݰzRk5goʂ-Y>m,̇x?['y{g ϕ؆zh`}[h׿9[ib4}l\UWN|r,Wdᩯ1\Uv)X!tR޽/=;!RpXu槄Yx%Rد4/-Cz-u~F{i/Bz׏Om|܋7^݋%sV~Wfp:]@YC~ya|;5ƕqaYڣ5B8>ީّ%x=*:E/8C+A 8UПeu[ ~󨖩޻ G}GS]Ϻ0ú-r JS5y#U\ ĜN_fb4 ޲r߉;x矗k|:缋Gёx蹅=xIiNvU1ȳ?x+:k,ǹaS}.uZ|g6 o9:5b#U B[B@.P*٨S |[G{ߓ0J*65z=qȿ_~-z[ 8gsLM5{qBx"N_共FWǪ &wV+T~y-:ɞӳW;Jf.5;F1Q+3QEtae`Vv幙]z-6dO2QHBׇx=5yME8}#.y7<F~&N{F_~pޚ}+~x}ON QXE\{ r{eoE@?(9(άz] TXs[}5Џx];ybW䞃no}%dny1ٮsA2a}#IwR]q^$ Nalw E Y:Oﻗ|oR|_ t_cs| >ҕQ|yo:1u=ooӪW//A?g}zz!⺔]bM׍|OG ƹ ַ}'}q}OO9ޗ^;}]^oVu$A|!\.U:e֍ͳxߩ Q6us;cTݼ<N;EJ{+dQ&^жpzesYX&Gtt> ϑ{yStaٷOOqbrk cن2$3nu5D-{xuh<`lAoY\ ,@{o`{[י ^\ݚlwwxu>v`QU^gy?C%E˛=Kj~3=|y:_LGԀT[^=z:lγOqR?"؋I},yqrXס uB޼nW+|@s⾘)-׍_0=wDNcCz S™Ԭ+T8t{8dcs;Z"#*7Vsـ:&B׺:A0\48,<|{9gzNʟ}z$ht}lV$1RSH?D"\{ iRItDbKe0 ~5(S #5F{~o WVfh>~M3M Z=qkJpj:"t92=n+XwYJFbc"- cG4~}X=8HFTO <ϥ=Bvq[?V2;Sk͊+4C+&.|(kcM"j mFRK0G g CdƗd]0w VN]Gb*[_)H6و19?pBa>JDv>}<6yRם:b3n:' Ol_D#,V"Z֏܋UO%w{ (?"O  eC#dP~T5 ]#L|ʣʃ]VBnً0| ~4= zq}ؚľ7s@ f ޵.Co#ƶ#Ԕאws H@ks a|FU'V5!jESo"Un#7E$ o~Gψm$Ve{2[ÉkKDw/!C\z/ĝcg"J'uۉ|?sD1]bqZbnBG_^5W PN{*'4tZz)HѶ->rxK)Q_#Mh3˪qO[ةQN13 s xnqDI '_"ڧ*Gc{"MspB=aʆ֥D!venJ_lGK+ƒU2FZd%8N6Q7y @j/#wW]4-\q=ȅ; K9nTl~_Qic[-y\>Gz'!*[/OQGN"՝ڋ$b7Dy_WHpqvG/qAbHDW٧-Z|BVlk!9_8CvvjRpJ[D {Ҏs"S+o1yvUNirFdݑu4{>-$ؠc0VXbKWp!JDt2Ei":+Xԁ뢪"潰k+Fn~~=Unzp?Ƕx7h,0(i6ݒ.Q(z!Qwo{o1nt;yq 'q!Q mi^cDW)#qe/??RݤG㴭DDԮ'&!z‰:'}s'^4ryX^ߊȑջ[ĉyN>SӅekںW#RK7GGoE m5f-K5 BRz3HK37_.zemQLfCfNV!uM1nx//b8{[b="(*v#(śՒ5H84fE.z|7b8q$!M75#rcWwTofu/Qa(E49q^\B[|A;(!;sڑPщA{t AgN"0ݪ̑@sM3pt2!R&GDMxgxA/C䎽Sn1HhU`YV!"Gͫ# ǚM /(] }-ff.D:\,?x>pj#s˷Sv|1qZ<-#`8LY oMt];bPOR؋~^g$mE7 \ґӀc'G]i"dZx>D{GHG O{-@5-{0n\jXQ;O\ruJq~"!Rlb٣GxBx%8 D[ OĬ>^]5( )^%g_g^[}]xzDQ:,t^L\5"nA W?:~LLOQxZW#z@Կ~$n!tOq-1U E.Sd >=v [ frlh+A%fWy0t[[g[h)'ߖnʒ8}]E|?>D#8~dm'2 U,8L!{Zz3O݃|7IDv%> b?GyWfVQS73-!n)S>]cq>ާ^=Dm;iq4J)DiӶsCt2N3$xab5xoc"ڻ 7 DmtC9H2輷oZ7C{fD_o =E+;gÇjXHK# bY!hozwomGbK]u܎=Bj@_"Z4g~H$V}*Y }⊮UIK:kxp<ϖEgHH՚>$[vc)wiDc)nFBmsE^f'y{9PDԹz9rM J{d!-GDyghe-ﮀ.9Q{ɦZpJC_= Z jĹy?<gNltsjHGW+FBz w "&.s:Q:"ED~,DY"'ݱm4C AH؉Π!z8;t's2"FR@]{s% {v,|K|(;K!z~G$f)aD+tx+tovض MQ/߆:ʁϽy()F_k1hg5&"h27wADIneg‡GÐ`1{[EYQ뿃(GA<)#2"-u툹060/t"A8ghJ',"XU(/֦ՂnođMN,<yE~N_B~{Q߮Fdg(  9~X5aDq_Eq:C 6˫G)-1u 1r ~%^l(G<\4mf7"Dx_E' n6CqGW ?Z#gXпھ}=c\SSCBqyϺvqqblepD#jo pe.'__bzD]$N%ϒ={ bĥ?@Lv%WE' eWD?۾'D_Qֈ: ػː]r戣z0~~e +#29Wɤ]dK6*EI}-K#ðMq όC.>Vzsb)${NOx'aUbAY[~D#im2Xn(}H 5hα=27rEtr_5È5J1wZޑn#["+~#sg(1HE4Έhyr-bX<%>?ctd‘ _l'$|씸^b ܭɹ Q_9(sk}M>:|AY[ ~㹼n췎dG7bćt`K+uH!GIBFk=\"P_LO!~A*k2}*">gxA;v gC;#Jf|!79CG\^Qy 7"5uAЏ±^?rO3D~T]jQ,^O}{:=dKV$-ի}wAРU5_o  r+Do 8=:Y49h̘*@ID6}wb+JU| _:{c=ArΊ{ 1RRDJQ4[y7vO6巐B˴Gii9?_gB7 >܏h>%r7up#q`J4%H5+JUjߌC}TAS>(OtB^[׎\pU'"请{8&6=i/Hҵ/" H;%zȩ~ވ{:oAGt 'g(}(~dDfaHTނb%Թʾ1.-;5HsZU4\_GT~OC:CvmB7#5J^JpjD4c]dDpzq=H%KW헉B݇qv|;!4."T,x&lE'&o ݆򥫑\s y#ԙs3T:rUr}h"* ވֻ? w4(q^mz$VaZĔ~'I'يێ ^m׊ho\/"&()&D.Rf*kD#QGq憎[3/!ZFiWئ`6DY 7YEzlx|{-$5Lh8=@,g!e4 iKe1gӈ5^I !ʧ/; Wg.t2AoXS߀8DdrÈ&=k~[J\G?>GBwEHbdy.Nໞ/|懶($[>W "ڦFgAkx rse"͐'t?Uf yyD r~ſB{uc.D73llbJRC1 _o1O,C}RDv:`4r14ϟD^t۸ >[lBO =r,QѮ|Yv[R)TT0KvZt.Y 2֏F4œowv ʑy9qE"ID!ߏQ?o1+ T@?H̐sL\,Dl;\v:"RNUT$̧ߣ1<5Oz z׾Yk6&my8t3ebMZV3DZ=ՈĊޯyy*kO +1>>?(D̦UAgӿJwGt݋y#Ƙ++c_7"lާ܉ỹYd ]ᦽz/g_xڞwE*bXiz˷ ˃HP/>+Df%g>F"Y lюB7>!!}[-=A ܧ gdx<KHź8O2{_s"1wOHGmȦD\Z*o@n8M3 ϺYI0~*!Zuzj/zFbۉC>!lk,_v!^Dxu+읙o/.~ݱ?EyHl툪AwW;
e\"J[^[M$|޽ T.oB*\| 5^BN3:ˬcN gOnY$~.7tVuAq ,.? Ԏ;!{Swt|CRB,s#a3{sC>"گ̘Fom$uM_;o?hU ?Ln·k˶ R?G^/jv$2m g[Hn#ԕ2j ~?2{/!_!!ZՈ(po j݈20F7^9mьDu!U!i$I[64WEhէs/^ (6DVxJ9SBvG!~?<>$\E>@b#;Ao*QZ"akBINKY ϩmeVc]1D4]A>")"1.!cw&hMʃ_ytǢ,yzU 'v:([܆HW5]J5eE|agxn!!{٘M%FD#tW Qk1wO3vWGHPwIn(BbiO}5RSϞWPvيxG H('iؚg QnȒG#׼i(Uao"6Wteںf=HѶ0,Jq#ѭע~|M7NUGB錬j 8ߍm_}>pbp"RHy]ůBOI[M6Wkt,"zCoڱRV\Dዬ*iIz $hW3 rwt"pŷ//p!ꖱv7Z7@ス~d dġVq"q}_CD}Ρ Q_g^,_'g8/S=HYͯgu1r]'>,w |?)Ep*Bz5/%bDF0G8nq!*ҿH˶Ĩ#p@3F-ADĿ v pPDIq. l-s@_x8_eM5 #ac~ᝠO}w@BB-]ˉzeᶈNGWWUȮ?/3(fٞkpn! Q.N #6"d|TOtlHk[?!4Gl?[ D"+FbOlV hU@25;[U #lȻd}>"zr.!:Ыր}}hEߚxcu*' ʵO|JIc#fD1]8wI*N1Ow!JxMQr@4i U8#ӏ+tFbbtLq}c0N|E|!*Sg^?D>KjE]"ˆ&$VX]#zѠ(+{@_e']cGtRa"g2i8gϮlq//]bʈcCo->x;Foy)uҋD=ěamacGVG7#gfXeź}f;'i1="Nv]B4z3Q'H#nx9Q.,1z~wF,8,}Z|`<*-8*w1E֜tB\>H$3-kDYcб:1 <|3׮ƑyWَ.=Iߏ~ UvUAܲ:[&z'o+};yDRQ94,|=Gjڏ8l|&2b (UDYrҸrk"5 $ȫȋMP}E s2f[2w}T7xCID:OD@z6q E3! %i\EԱWs Ϲuӯ^@ӷb'$ Y&Ml#߻?ۄD>yD1[D+܈*1yدZ}AT`:3\Hlva|s$MHYHp[tI0 L_>J?w#OR`q$vbGyd&Dr&w,J Z̑-a/W1SږvD}2>#&m\]*"I_u5$?K .'~F7ԃrhdm=f4Q:g%A)WsXRG+/< G*HڦZY#ǂy]_9J}/N2"WhdĘ{ߍD&;O3D:F<{[DFTRqe 3 ݿoF;kfD~C\>WiRAl b ^]jE1?;"K"_g w%_QBܑon+!J1D_BYwoZ1G_!MH0-hEH"1!GX1y{Ĩf-x RX%yDC+v9!w{ΙS2B<-|])rGBgȿˉۑ1w9#QNI+ZMYU>`?? |1bW1JwbCg3zbJx_ ҵ5gH~j4l-"з|‡*4!D:c 1K9~hZSbACًlE ec!v7- v;Š7SDB'j6#Jq.TD <,\FoZL`L jy z泥#U+Aolnz}ŗ }HyL]څY4#O_(3yJ//v<1J0Fw_A :YG{:5 oZ_ TB46'WGLe܎l^^ xZ98Xixoq zb@MJB²vcL$FHLJ5P+TRuraCO}) NơEՍYoI،,%b x R*hI|:eП!RorJGĈIS>c7c<k&:;u>r "Eig!S&y$*r5 _ qE9Fd_ܽo_WcYfU-|Rk_fͪd *>=+1;^ټ= RU^Fz!#T$bf!+DFS "U]SsZ#2{(29ed@?<plb qtE'B:oK! %"]®-~yHd)͐ @ 撚7_NѶO7ZxAdymblRxh}(;n#QS߫ CMV坍:hw9cs!$ΜˢB)1w,y1t2wr=rU"V><-hrV]nG$J:yx%ṕH^oCύx#A﬘'!k!>uOA{+Dw!D^m}7QT\.B Re{^SD9bpa)W*m<0tG#ƗHO60!$$EΝ홈b?-IqDUnLc~R7PBucd,KTM_oG_w"Ӟ&Tn4_r6FMyQSaYi?/r@}#/{/ |.Vx6%Z.Y;$# NG7 Rw }GO") 󴞔 ^`s˵}UhC1b4[SDqJd$Kz6}^{& KUw՞"ca'8_90E.²9}Zb! 5d#֙`zDT?, ۏ۞דB}Bj}_)AOI*"/R[o["ρ$D6M@"I"5Huul \D\c!jJS[$q6]s@_&!_B8AX(Ydh;9{_^ }5%k&=}=8܇Hҷ){ 0>XDѱ: L-#͡7׃nYXZ]Ԏmm^hܶgA f`țJ ij,t肎aBCJqv䝸0blqڭjNs kwx |J@<2ZDX ]sq!>HKK4DEtxIs FԋbYb+7t >\!1j6o,n)Yv]H,*׭0Dh}Xzq|Q$\bs΃ľ=p`Z !dJR+9!KG p'(?j?7 oP(},F;ه‹\#[P "}a hF$eJ^z歸?y|% 1>|"E lM.!YbqLjUә -M J!A oU4aJw1Qfv>0hWFJ tEum>м R֟< kl/VZ#zXH?S"srԷ2esTb>z(z}i2QY]Ad[YH݅5VuJJE ~4aם>'Bq>Asm!>5I>Q&_1S7(0GMG@J#3}plv6D}uVfMjX@kde@y_Bf?«p@u @ l@l^@4@&@ J@z?r^@,_, @%#bb@\!'@}&wfh@Tߙ3@ @&9 v@E3 (FMÕloo/tests/testthat/data-for-tests/normal_reg_waic_test_example.rda0000644000176200001440000005046314523242160025303 0ustar liggesusersy8]6|]JJdd,\5JI"e&D(2%DJTRRh"!C"|Mfۿ8>^k@{M8 @)N@%9CTЀ?‹1Ӽ%t'1UX> -Ⱦ qC{^fauOV_A}{|[  j]담'1@z7 4܇qˎx-q]("% c?b7$Qߖn37}@uV?{Uw[^:dL) 2Εez2Vghc95K}qZےq0V%?V݌/f`ѯEHP!1T~, (φ?%@I@P&WXP/JhmO>κC*@߶vyPUQ>;k^.|T7$^sT)d nȗ{v@ e||POfIYtnʫi;{$=c +xc ZgYC1`zݤ *+[o3Dh; \?(]Á-rZ: ;^*4WOÞc06DG+¶8O\X&v~R DsPWmUj/0ej S?Fנ5@ ^!ϻin TY!`o5 c@ywUG\lz OO]J"FZbp/du;t_'lU.u/Ρ-Z_&zb6 8\]RVhJk] ͹p&F)r{3ȜCC['z_v jXWz6fӂy/T, K X(.`4@PH+nchW0Iw^КD;]ZXd(bL`nݣہsg )gJ.ul~<ckWmq;0bIm@K/ iՀy/ʭ W=j>֓@>\#6_Vjr/q&qnu̟&=kxߚ6t6U:O] `|c؆;i1@9V](OIb~vH*E赼0"Cۡ(5oGz=>NƹljvxeҌ*¦&Pf>"N0e|0&-LƱա6@IQ1yr*8\O %ț͹?V~q7= ^?m+Ǘ1>P<&6nuz EO9|@>_c `4$:2ݘ ]珊>-㍅x,;Ĉ}2Kncc9`W:k`e)V;U" Oo7g`}twcEZm@;q5?;r4D'MzŊtJscd9?g`Z'|6Ǚ$p`bK-9|u{g0$S78>PӤUo;SZa삍ȫ[F3op6zKeխzəM@}w^4C ?JAK@-}e? B"\=-ܖRnGv8j3k"MR0Se wyM@ z ն.۲ PSWa=Aj7c>= ƿ:3WF1Uܯr.6R*>S{>? ^($)0|_mŸEE;`\%@;ڧu{v%뭴菔gUq>1X&KUF6X> +_b1 7 a6~ͨ!0Q-0ΖJCXo<6ToYIa>׉~Ϗ`]^5&::# ]. 2|J ºormU"Pۀ:5QA? K SIx퀺 o, 6cg 臟8zT:R8fP]%j~!tq7PõfNx8u w[=` 9 \AXdmJS̯^Hz"QVPg#op|VIc];`(BvN$-x>[I*A|L=;̭p۳NH& 0.\Q:alcgDRg{`4~Eϩ{a6~u1juF"?zaq5M|k_P&fk7'.n߸'<לٕi(5pHb'IIgtpN).N P~y·U@W]O* hJ JX mϟ8EIeX < m} \5(56w0U'Tc:nT]n@Vp!0ɳ|埥/:,'qֈ,,NOec;>ܥqKKf@D0im`=Go^#qœ*/!out c1)`eՉ߰'S g=GcILe}WK!@_s}'v`>Kc?5aUVUqN-by-# ^kg)MgiQϿ"+FTq4Ş92e0/}3X:̏T]gԳX$s/;WIbpwrctd6CBr0Vehh/?8\g/H'ha\t^빬bo..9xoR@i}e1 qPj7]R0]'k/a}:)WuJ֯2zGYކ8>]g&N:x'$/\xwD7Uo-NOytx̓;39*; Mٸn(V=ۚIqX" ұ& `.tƺ3ض>: .0rЯd T Ῡ'$>F:HPu]ztO<06S8ESS)0m TsR-yh~Oyn7^3 ѧ0y~6. gצb4^)뮿3_0_?y|@;֬>_M D#ϱ?,6%} 4C<&w sm3Mqths/6a<;kNveFuKۇTowƖ*y<4F_ԟ<hz@z;~`M;Po^~q^eiȨ:op6FotqwWW=C'^mFx77 wϔ*S!o=;1>|ITxb ҝX83*`vtx`I[cq6 yg͊$`j7f;ֈs`5r`2G!cEiHp;a=Gk/ D.DZee{x`>`J\C9s;My}lQkH_~W^}U z~~9-k;8i< sʥ8{E'"s* #HO %k&k[pE`D2Ihኧ}LDR/ fDg:lI_Y#rf<c=o79/w=+o.}';"W nݨ.nZ鼅LuG̽ezm*DvfKeS`*3:VyID>ע*:(2^q8)g--74t2v~#F`{4q\ 8-~&c"rӘeN?0{-oՒ =1p&k$j`vJ6`o&9Dn>n/ 4yɯwzw%HO:"]sX5|% ieNc 3gھX% ?t/oB`8׸j "?7k& ,9SB$Ֆm![O<VQR"_WՖ,^{xV~'"-)yяE7=c#g:c\X5hֶ:l+Yn90Xt:7O|y'3,<IP9s2Lmv܆d)'(nثkL!`?1tطu~(pͅm%0@Haiybr6o_[ ωl]Emm}dT%u7J jW ŒLc>VY"&t<Cǿ+!>p=j 4@ $Q*HnvJDoJWεf0 } "tVD<Ǧ R\l^&3$I൙,p,n] ,[gR]J0gE6>oVF'(RLޣ-ww-`mhx? =u[ lzL"x:VV\'g}zѻӐ- l35rW*[[8~|{T %zLm]ETf yY-Տܲn.=DZA+{U8j;i0W<"ť'*̬MN՟T^ &|) I6kuzZb/8h|~R x州N_w"7xx8D^aۋyp@Δ_iez>?x|1X@c'5𻼳R!9Je^K`h H{?ܸ˞"rJ?BJ,)D;#gpR;quuc"!R\Y\W4n: LF9߀I;YP?H);7)|N;ql]r6oG60@c3J0.TQB%'TΑXU  !KXMoE*0Ͽ3;{nC ;uml0xc{u34/HGV×)VXm6Gd9[0Q_xTg b]R}' `[ QՈOxiii`oy *`.m0r3"/_[T 8|=Uf6gDu,VT&mɈ{D1շXVjTG6|;:D;Wl5Q{١{k"TQC/b0ޙZYLH憴b=86橨|`Έ$& \;n^= ԛ{M׆{{W0!˱F-~CEF1ߢ޴ ^^{yXA{056]Ql6/xdGmz?j)} D<ˆ9D:u_mMb^}_@'4ҥ تf=HK3tMK8v|80L[m"%: } W[]1M ȊŠۛw۫ہӣPٕ?N"U6>#8n%V8HW2\p}TeqĐ"ɖ:Bd&oIb/Ð?5a'pr#?큏 "թZJ\0dfZzq[׫rC_`.})DmXoԚ3xq~*an't,P\̗ٮD"=0q{: j|5LW 5u`~_X*p҇#]O]܇ۛm)Hukk)yWpz:0 mn^yZl("g^ 5jM: W_C= N.Gd ."RlNO`tm-z8xSAb/l8\Ku`@8sT/`_\|ģK5Q@m ޸ t:{Y#0}{M^~]~=O`alLNo~љeYHZ v a]`Pgzn;D6έ[p\tNPh"'qT])%z hzDl7#cUҘ_J!y"M,+HLq pٗ wEքG +.- 鼎AWq=7Wϝ9O:#|\(` Wc]aE"$i^^-QێNO1&b; )z8'ֲO]Ǽ?)9 j:@$=CB`nWT3 U_PZVDe0΅Y"骷#BQzwsgg<{`/v8~5]?*c~ct-"G #1G*[{p"l;hU< D,=2NdgP]sH>f:ͷYɊ"x3DdF5v+=nãE=, {%ڧOtg+;b.z޶0ult'i+!?5I42NEUq.X6`S 󥋼傢9c iote^M zÁoQ2XNç.˝4 8i/ p15 I}e/8.5 -bi1C=Q2`eu%` FvèIYRKq]Qw{jv<#ost8{.}MX^݅HCp-yp1'5kp]*+|3+ى.Y 0Y[5; l/dv?r׫c67G:pN9f[ zAjBHU]dC la#۔vIŘaBXL_P-C+EoԳ|;[kW+wotY;b_z)omU0h6g;ZLF%ӧ2n#RxkIǿkr8{:%u(_7;U1/as喝w0ȻK23YW֧a{a^` C%A_oZׯU!2l:5׫-/1![p݊:^mk ۠OKP "[#R}60=g1YT`~ Z,f\Yr 4#ŭx ⸜h頊VqQODQ]Qm o fE;nHWV3,Va >(6_?24C$ZrE!~3SUX]Xn͇wa~.3̯yXwI;p +,nţdXZ痀y\.[1W@}5r\WF|CT걂0\,^Ӿ[6z Xg/Y\/vW¼jojQ=W? a~a2" 3y`Etg5uZ K%kť>WK,7 X%jpƁv \U~xZ"xj0 dJHyJcd {rr/|97"p}i("W_A$~ Y8/E3x,qwni%X^>E{gz[$Z SxhğLH!Zka^z&G!{v;)OK`-4dyL%oP8H+r%滶.1Lx3~튺 Y^<@3j9s]{̆:lFRofc9qMxe'Vu 5O885ۗXIºθFxUm; kz]lM ģmZv]ZTL) r#X;[}9ѽTH0p*(G rCGI؟+eX<)quAᥲBwR8oySp.y--K. tW1\~;2$_֍rn?z>}XrM0_cP.Xa9Y,ٲGVXiva ~ybi:XϽPN͗*:k2 =x>XI<‹G_ U5Yx<;8mEV Vk=8#b&G$F"X=pWqD#sfL0ΑM9b{k iSŅGiuw` yBR8/9-"{6ܟ0B_?'KC@6~!qXid~`cA·v{ࠡ}kUxɡ1~;`]f-u^^؈q";w?"vr?/8]?s.? Bܿ  8G*#wAi#P*>o9޾ޞ^ =/se `L1 \ͯ Ea}@ێ,1Q;N*ya-2\9-{j xѢRg#VذXF- 6}0˒Q)⚩*WsHD)doc$RvhN\ ޺KFjb0rJ>L_ qV"!3cp_BZe3:jEIc3i6| V77i%a8݃+`-Q8okؘˏE|[xoEǭ^c+GN?KbhOGb/՗ѧ74>߄WFm1 zof/9\a$Èܱd~w\%L_R'zV-{\:KG=bU8O2؀ܴAǝkSߢuo E!T!dM %`21m\~|gMn].S@!$|K19RL}KP8.x`SUưY; !;cq$ށ~@6^vXN#gu롳Jc9$%5i+I?R~DEy J~]1xo;E XFD_E,|vtŸ ӥۿ!.|]_S<)?{b6UA|uOM#XޮJY_pw7@P/ FJ7.!!@S0l ά/m$޲"OL%靠f I摊H$lP(Sh:^EB{/ӫg_o̬9Wj~\ֺ,.={ᰨy6w@/;q+侳J3ukW܈xFx L;5~8%1a %%?>i2՟;py*q5pP0K_aǤN2bՐH)l<E"> LfM,@֥@bZ?{ 6F E-E1 NSZ*}P!PqB|Ç6 G2UH<1ڥ|HՋ ^}@ n,eA#4? 7;xHurꌔk ثr?ItZe}&%U=9,JiEķo=k  yVAdc3ķ6Y$>wM0]tm8~$"a DO2U9-Ȅ>>wL1ձOqP@Z]bϫbu; 8&;l j6HCA]As#\sGNYȨ]]U`ƧO#W8ukSne#wًu+ZJkiIyVb|`~U_?PUQ-@BJwלYC@>Կ2%8x7] _ôvy+"oWxxx?}yCCs |L?$.sͶىZѩYњ~"iX|x_?l_> ˯ r˞vG0;~hvS߹<$-(LڜF-C >Kgs{ˮ-E ,T[d #/3%ڻӈsۿA֞B[Wߓ#PPGbB;E!"A1m7YTI' .zJ_OUTZxƧ%ke0hcmI!?Nm~Mض59ф5yOXʲ=-ܯuh{E! 8(awZS뵍}YfD~G:#Ea\m(#u)-6,Wm(1jbZ/J;]L0[J:R!(.i&5>O2@/k2A.%SlN~,K*n[DNx24L2'~;Dt6lj# Lw_6x=9=9q ݾMM|Wݐߵ8wD$?e0aA|<ýoYZ64Ȅhܜq+k%m#L"/Vs͗x ĕENtQwEZ{.HsrhDYawXICM di= /u 7o wMߍ{$e19ʍw$5lq'c} Ƌ?b1Vߧ}x!߮$ADGR~"ak6 tAƇ_}bހӟQ!rX!ܝ?gE>68|nDwrenHP}gWOs&'y~/~]gW/[mNs㠞S}O@oQUy>3Kzo\B7Opi#,=S߸t~}W _Mm!=||ļ~Eh!)NcF/q]58Ou11qIR?s'csľmBoo٩ _n1K{%rR=ϖ VL#淖q՘&ͻM|#4 -_>r󠣵j#~vO;n:~WrmkVrq1_ǎFy:6~"7}SV]n|WE5۩%vs߭jaxl%>zu^$k=E ֩z+ō4XjyCw ) |ZWA2gx5a%&_Drg] ^8@S21չmh1{nc'_(iwvGV,}=oW,Z{y1-{cD{t:GM.{}g}[vS51ib[Bb?iWbHg7quDTnu& >M|ͳG2w)xlE)WHop_O.[SX/E-n]y^)sLJZ>]7|4}{nvtM[~+8="9a^W.>o+b|h$b+?HYŹ j$lǷ-ľ9K.i#;/#[(oi7}^ʽ nz[מy.q# yaDWdqX_m΍yE=Bta[C؞nZ/Tr%_nfQw=]g~3wUz#۹8{o#ׯڮ HqԴIؿ/tp%:èwk6#.qO= 7ĹJTy3y?ISO PKsKHiļ/u N!敝HAWwɛ%ܼ`,EoF{Gt1^7/F\^ƹ8EoֱcCbJ}rAi1Ci_ߋ$7rdS9qǸU-"7S7.;~z}5g8+Dc0FĹq\+S:].?7?~qD+^x?_t&~L/^ 1_ne4|9&e3r8qeloo/tests/testthat/test_compare.R0000644000176200001440000001406315100712211016647 0ustar liggesusersset.seed(123) LLarr <- example_loglik_array() LLarr2 <- array(rnorm(prod(dim(LLarr)), c(LLarr), 0.5), dim = dim(LLarr)) LLarr3 <- array(rnorm(prod(dim(LLarr)), c(LLarr), 1), dim = dim(LLarr)) w1 <- suppressWarnings(waic(LLarr)) w2 <- suppressWarnings(waic(LLarr2)) test_that("loo_compare throws appropriate errors", { w3 <- suppressWarnings(waic(LLarr[,, -1])) w4 <- suppressWarnings(waic(LLarr[,, -(1:2)])) expect_error(loo_compare(2, 3), "must be a list if not a 'loo' object") expect_error( loo_compare(w1, w2, x = list(w1, w2)), "If 'x' is a list then '...' should not be specified" ) expect_error(loo_compare(w1, list(1, 2, 3)), "class 'loo'") expect_error(loo_compare(w1), "requires at least two models") expect_error(loo_compare(x = list(w1)), "requires at least two models") expect_error( loo_compare(w1, w3), "All models must have the same number of observations, but models have inconsistent observation counts: 'model1' (32), 'model2' (31)", fixed = TRUE ) expect_error( loo_compare(w1, w2, w3), "All models must have the same number of observations, but models have inconsistent observation counts: 'model1' (32), 'model2' (32), 'model3' (31)", fixed = TRUE ) expect_error( loo_compare(x = list("Model A" = w1, "Model B" = w2, "Model C" = w3)), "All models must have the same number of observations, but models have inconsistent observation counts: 'Model A' (32), 'Model B' (32), 'Model C' (31)", fixed = TRUE ) }) test_that("loo_compare throws appropriate warnings", { w3 <- w1 w4 <- w2 class(w3) <- class(w4) <- c("kfold", "loo") attr(w3, "K") <- 2 attr(w4, "K") <- 3 expect_warning( loo_compare(w3, w4), "Not all kfold objects have the same K value" ) class(w4) <- c("psis_loo", "loo") attr(w4, "K") <- NULL expect_warning(loo_compare(w3, w4), "Comparing LOO-CV to K-fold-CV") w3 <- w1 w4 <- w2 attr(w3, "yhash") <- "a" attr(w4, "yhash") <- "b" expect_warning(loo_compare(w3, w4), "Not all models have the same y variable") set.seed(123) w_list <- lapply(1:25, function(x) { suppressWarnings(waic(LLarr + rnorm(1, 0, 0.1))) }) expect_warning( loo_compare(w_list), "Difference in performance potentially due to chance" ) w_list_short <- lapply(1:4, function(x) { suppressWarnings(waic(LLarr + rnorm(1, 0, 0.1))) }) expect_no_warning(loo_compare(w_list_short)) }) comp_colnames <- c( "elpd_diff", "se_diff", "elpd_waic", "se_elpd_waic", "p_waic", "se_p_waic", "waic", "se_waic" ) test_that("loo_compare returns expected results (2 models)", { comp1 <- loo_compare(w1, w1) expect_s3_class(comp1, "compare.loo") expect_equal(colnames(comp1), comp_colnames) expect_equal(rownames(comp1), c("model1", "model2")) expect_output(print(comp1), "elpd_diff") expect_equal(comp1[1:2, 1], c(0, 0), ignore_attr = TRUE) expect_equal(comp1[1:2, 2], c(0, 0), ignore_attr = TRUE) comp2 <- loo_compare(w1, w2) expect_s3_class(comp2, "compare.loo") expect_equal(colnames(comp2), comp_colnames) expect_snapshot_value(comp2, style = "serialize") # specifying objects via ... and via arg x gives equal results expect_equal(comp2, loo_compare(x = list(w1, w2))) }) test_that("loo_compare returns expected result (3 models)", { w3 <- suppressWarnings(waic(LLarr3)) comp1 <- loo_compare(w1, w2, w3) expect_equal(colnames(comp1), comp_colnames) expect_equal(rownames(comp1), c("model1", "model2", "model3")) expect_equal(comp1[1, 1], 0) expect_s3_class(comp1, "compare.loo") expect_s3_class(comp1, "matrix") expect_snapshot_value(comp1, style = "serialize") # specifying objects via '...' gives equivalent results (equal # except rownames) to using 'x' argument expect_equal(comp1, loo_compare(x = list(w1, w2, w3)), ignore_attr = TRUE) }) # Tests for deprecated compare() ------------------------------------------ test_that("compare throws deprecation warnings", { expect_warning(loo::compare(w1, w2), "Deprecated") expect_warning(loo::compare(w1, w1, w2), "Deprecated") }) test_that("compare returns expected result (2 models)", { expect_warning(comp1 <- loo::compare(w1, w1), "Deprecated") expect_snapshot(comp1) expect_equal(comp1[1:2], c(elpd_diff = 0, se = 0)) expect_warning(comp2 <- loo::compare(w1, w2), "Deprecated") expect_snapshot(comp2) expect_named(comp2, c("elpd_diff", "se")) expect_s3_class(comp2, "compare.loo") # specifying objects via ... and via arg x gives equal results expect_warning(comp_via_list <- loo::compare(x = list(w1, w2)), "Deprecated") expect_equal(comp2, comp_via_list) }) test_that("compare returns expected result (3 models)", { w3 <- suppressWarnings(waic(LLarr3)) expect_warning(comp1 <- loo::compare(w1, w2, w3), "Deprecated") expect_equal( colnames(comp1), c( "elpd_diff", "se_diff", "elpd_waic", "se_elpd_waic", "p_waic", "se_p_waic", "waic", "se_waic" ) ) expect_equal(rownames(comp1), c("w1", "w2", "w3")) expect_equal(comp1[1, 1], 0) expect_s3_class(comp1, "compare.loo") expect_s3_class(comp1, "matrix") expect_snapshot_value(comp1, style = "serialize") # specifying objects via '...' gives equivalent results (equal # except rownames) to using 'x' argument expect_warning( comp_via_list <- loo::compare(x = list(w1, w2, w3)), "Deprecated" ) expect_equal(comp1, comp_via_list, ignore_attr = TRUE) }) test_that("compare throws appropriate errors", { expect_error( suppressWarnings(loo::compare(w1, w2, x = list(w1, w2))), "should not be specified" ) expect_error(suppressWarnings(loo::compare(x = 2)), "must be a list") expect_error( suppressWarnings(loo::compare(x = list(2))), "should have class 'loo'" ) expect_error( suppressWarnings(loo::compare(x = list(w1))), "requires at least two models" ) w3 <- suppressWarnings(waic(LLarr2[,, -1])) expect_error( suppressWarnings(loo::compare(x = list(w1, w3))), "same number of data points" ) expect_error( suppressWarnings(loo::compare(x = list(w1, w2, w3))), "same number of data points" ) }) loo/tests/testthat/test_loo_moment_matching.R0000644000176200001440000003175715064301501021261 0ustar liggesusersoptions(mc.cores = 1) set.seed(123) S <- 4000 # helper functions for sampling from the posterior distribution rinvchisq <- function(n, df, scale = 1 / df, ...) { if ((length(scale) != 1) & (length(scale) != n)) { stop("scale should be a scalar or a vector of the same length as x") } if (df <= 0) { stop("df must be greater than zero") } if (any(scale <= 0)) { stop("scale must be greater than zero") } return((df * scale) / rchisq(n, df = df)) } dinvchisq <- function(x, df, scale = 1 / df, log = FALSE, ...) { if (df <= 0) { stop("df must be greater than zero") } if (scale <= 0) { stop("scale must be greater than zero") } nu <- df / 2 if (log) { return(ifelse( x > 0, nu * log(nu) - log(gamma(nu)) + nu * log(scale) - (nu + 1) * log(x) - (nu * scale / x), NA )) } else { return(ifelse( x > 0, (((nu)^(nu)) / gamma(nu)) * (scale^nu) * (x^(-(nu + 1))) * exp(-nu * scale / x), NA )) } } # generate toy data # normally distributed data with known variance data_sd <- 1.1 data_mean <- 1.3 n <- as.integer(30) y <- rnorm(n = n, mean = data_mean, sd = data_sd) y_tilde <- 11 y[1] <- y_tilde ymean <- mean(y) s2 <- sum((y - ymean)^2) / (n - 1) # draws from the posterior distribution when including all observations draws_full_posterior_sigma2 <- rinvchisq(S, n - 1, s2) draws_full_posterior_mu <- rnorm( S, ymean, sqrt(draws_full_posterior_sigma2 / n) ) # create a dummy model object x <- list() x$data <- list() x$data$y <- y x$data$n <- n x$data$ymean <- ymean x$data$s2 <- s2 x$draws <- data.frame( mu = draws_full_posterior_mu, sigma = sqrt(draws_full_posterior_sigma2) ) # implement functions for moment matching loo # extract original posterior draws post_draws_test <- function(x, ...) { as.matrix(x$draws) } # extract original log lik draws log_lik_i_test <- function(x, i, ...) { -0.5 * log(2 * pi) - log(x$draws$sigma) - 1.0 / (2 * x$draws$sigma^2) * (x$data$y[i] - x$draws$mu)^2 } loglik <- matrix(0, S, n) for (j in seq(n)) { loglik[, j] <- log_lik_i_test(x, j) } # mu, log(sigma) unconstrain_pars_test <- function(x, pars, ...) { upars <- as.matrix(pars) upars[, 2] <- log(upars[, 2]) upars } log_prob_upars_test <- function(x, upars, ...) { dinvchisq(exp(upars[, 2])^2, x$data$n - 1, x$data$s2, log = TRUE) + dnorm( upars[, 1], x$data$ymean, exp(upars[, 2]) / sqrt(x$data$n), log = TRUE ) } # compute log_lik_i values based on the unconstrained parameters log_lik_i_upars_test <- function(x, upars, i, ...) { -0.5 * log(2 * pi) - upars[, 2] - 1.0 / (2 * exp(upars[, 2])^2) * (x$data$y[i] - upars[, 1])^2 } upars <- unconstrain_pars_test(x, x$draws) lwi_1 <- -loglik[, 1] lwi_1 <- lwi_1 - matrixStats::logSumExp(lwi_1) test_that("log_prob_upars_test works", { upars <- unconstrain_pars_test(x, x$draws) xloo <- list() xloo$data <- list() xloo$data$y <- y[-1] xloo$data$n <- n - 1 xloo$data$ymean <- mean(y[-1]) xloo$data$s2 <- sum((y[-1] - mean(y[-1]))^2) / (n - 2) post1 <- log_prob_upars_test(x, upars) post1 <- post1 - matrixStats::logSumExp(post1) post2 <- log_prob_upars_test(xloo, upars) + loglik[, 1] post2 <- post2 - matrixStats::logSumExp(post2) expect_equal(post1, post2) }) test_that("loo_moment_match.default warnings work", { # loo object loo_manual <- suppressWarnings(loo(loglik)) loo_manual_tis <- suppressWarnings(loo(loglik, is_method = "tis")) expect_warning( loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.5, split = FALSE, cov = TRUE, cores = 1 ), "The accuracy of self-normalized importance sampling" ) expect_warning( loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, split = FALSE, cov = TRUE, cores = 1 ), "The accuracy of self-normalized importance sampling" ) expect_no_warning(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 100, split = TRUE, cov = TRUE, cores = 1 )) expect_snapshot(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 1, k_thres = 0.5, split = TRUE, cov = TRUE, cores = 1 )) expect_error( loo_moment_match( x, loo_manual_tis, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.5, split = TRUE, cov = TRUE, cores = 1 ), "loo_moment_match currently supports only" ) }) test_that("loo_moment_match.default works", { # allow -Inf lwi_x <- lwi_1 lwi_x[which.min(lwi_1)] <- -Inf expect_no_error(suppressWarnings(importance_sampling.default( lwi_1, method = "psis", r_eff = 1, cores = 1 ))) # loo object loo_manual <- suppressWarnings(loo(loglik)) loo_moment_match_object <- suppressWarnings(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.8, split = FALSE, cov = TRUE, cores = 1 )) # diagnostic Pareto k decreases but influence pareto k stays the same expect_lt( loo_moment_match_object$diagnostics$pareto_k[1], loo_moment_match_object$pointwise[1, "influence_pareto_k"] ) expect_equal( loo_moment_match_object$pointwise[, "influence_pareto_k"], loo_manual$pointwise[, "influence_pareto_k"] ) expect_equal( loo_moment_match_object$pointwise[, "influence_pareto_k"], loo_manual$diagnostics$pareto_k ) expect_snapshot_value(loo_moment_match_object, style = "serialize") loo_moment_match_object2 <- suppressWarnings(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.5, split = FALSE, cov = TRUE, cores = 1 )) expect_snapshot_value(loo_moment_match_object2, style = "serialize") loo_moment_match_object3 <- suppressWarnings(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.5, split = TRUE, cov = TRUE, cores = 1 )) expect_snapshot_value(loo_moment_match_object3, style = "serialize") loo_moment_match_object4 <- suppressWarnings(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 100, split = FALSE, cov = TRUE, cores = 1 )) expect_equal(loo_manual, loo_moment_match_object4) loo_manual_with_psis <- suppressWarnings(loo(loglik, save_psis = TRUE)) loo_moment_match_object5 <- suppressWarnings(loo_moment_match( x, loo_manual_with_psis, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.8, split = FALSE, cov = TRUE, cores = 1 )) expect_equal( loo_moment_match_object5$diagnostics, loo_moment_match_object5$psis_object$diagnostics ) }) test_that("variance and covariance transformations work", { S <- 2000 set.seed(8493874) draws_full_posterior_sigma2 <- rinvchisq(S, n - 1, s2) draws_full_posterior_mu <- rnorm( S, ymean, sqrt(draws_full_posterior_sigma2 / n) ) x$draws <- data.frame( mu = draws_full_posterior_mu, sigma = sqrt(draws_full_posterior_sigma2) ) loglik <- matrix(0, S, n) for (j in seq(n)) { loglik[, j] <- log_lik_i_test(x, j) } upars <- unconstrain_pars_test(x, x$draws) lwi_1 <- -loglik[, 1] lwi_1 <- lwi_1 - matrixStats::logSumExp(lwi_1) loo_manual <- suppressWarnings(loo(loglik)) loo_moment_match_object <- suppressWarnings(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.0, split = FALSE, cov = TRUE, cores = 1 )) expect_snapshot_value(loo_moment_match_object, style = "serialize") }) test_that("loo_moment_match.default works with multiple cores", { # loo object loo_manual <- suppressWarnings(loo(loglik)) loo_moment_match_manual3 <- suppressWarnings(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.5, split = FALSE, cov = TRUE, cores = 1 )) loo_moment_match_manual4 <- suppressWarnings(loo_moment_match( x, loo_manual, post_draws_test, log_lik_i_test, unconstrain_pars_test, log_prob_upars_test, log_lik_i_upars_test, max_iters = 30L, k_thres = 0.5, split = FALSE, cov = TRUE, cores = 2 )) expect_equal( loo_moment_match_manual3$diagnostics$pareto_k, loo_moment_match_manual4$diagnostics$pareto_k ) expect_equal( loo_moment_match_manual3$diagnostics$n_eff, loo_moment_match_manual4$diagnostics$n_eff, tolerance = 5e-4 ) expect_equal( loo_moment_match_manual3$estimates, loo_moment_match_manual4$estimates ) expect_equal( loo_moment_match_manual3$pointwise, loo_moment_match_manual4$pointwise, tolerance = 5e-4 ) }) test_that("loo_moment_match_split works", { # skip on M1 Mac until we figure out why this test fails only on M1 Mac skip_if(Sys.info()[["sysname"]] == "Darwin" && R.version$arch == "aarch64") is_obj_1 <- suppressWarnings(importance_sampling.default( lwi_1, method = "psis", r_eff = 1, cores = 1 )) lwi_1_ps <- as.vector(weights(is_obj_1)) split <- loo_moment_match_split( x, upars, cov = FALSE, total_shift = c(0, 0), total_scaling = c(1, 1), total_mapping = diag(c(1, 1)), i = 1, log_prob_upars = log_prob_upars_test, log_lik_i_upars = log_lik_i_upars_test, cores = 1, r_eff_i = 1, is_method = "psis" ) expect_named(split, c("lwi", "lwfi", "log_liki", "r_eff_i")) expect_equal(lwi_1_ps, split$lwi) split2 <- loo_moment_match_split( x, upars, cov = FALSE, total_shift = c(-0.1, -0.2), total_scaling = c(0.7, 0.7), total_mapping = matrix(c(1, 0.1, 0.1, 1), 2, 2), i = 1, log_prob_upars = log_prob_upars_test, log_lik_i_upars = log_lik_i_upars_test, cores = 1, r_eff_i = 1, is_method = "psis" ) expect_snapshot_value(split2, style = "serialize") }) test_that("passing arguments works", { log_lik_i_upars_test_additional_argument <- function( x, upars, i, passed_arg = FALSE, ... ) { if (!passed_arg) { warning("passed_arg was not passed here") } -0.5 * log(2 * pi) - upars[, 2] - 1.0 / (2 * exp(upars[, 2])^2) * (x$data$y[i] - upars[, 1])^2 } unconstrain_pars_test_additional_argument <- function( x, pars, passed_arg = FALSE, ... ) { if (!passed_arg) { warning("passed_arg was not passed here") } upars <- as.matrix(pars) upars[, 2] <- log(upars[, 2]) upars } log_prob_upars_test_additional_argument <- function( x, upars, passed_arg = FALSE, ... ) { if (!passed_arg) { warning("passed_arg was not passed here") } dinvchisq(exp(upars[, 2])^2, x$data$n - 1, x$data$s2, log = TRUE) + dnorm( upars[, 1], x$data$ymean, exp(upars[, 2]) / sqrt(x$data$n), log = TRUE ) } post_draws_test_additional_argument <- function(x, passed_arg = FALSE, ...) { if (!passed_arg) { warning("passed_arg was not passed here") } as.matrix(x$draws) } log_lik_i_test_additional_argument <- function( x, i, passed_arg = FALSE, ... ) { if (!passed_arg) { warning("passed_arg was not passed here") } -0.5 * log(2 * pi) - log(x$draws$sigma) - 1.0 / (2 * x$draws$sigma^2) * (x$data$y[i] - x$draws$mu)^2 } # loo object loo_manual <- suppressWarnings(loo(loglik)) expect_silent(loo_moment_match( x, loo_manual, post_draws_test_additional_argument, log_lik_i_test_additional_argument, unconstrain_pars_test_additional_argument, log_prob_upars_test_additional_argument, log_lik_i_upars_test_additional_argument, max_iters = 30L, k_thres = 0.5, split = TRUE, cov = TRUE, cores = 1, passed_arg = TRUE )) }) loo/tests/testthat/test_loo_and_waic.R0000644000176200001440000001463315064301501017647 0ustar liggesusersoptions(mc.cores = 1) set.seed(123) LLarr <- example_loglik_array() LLmat <- example_loglik_matrix() LLvec <- LLmat[, 1] chain_id <- rep(1:2, each = nrow(LLarr)) r_eff_arr <- relative_eff(exp(LLarr)) r_eff_mat <- relative_eff(exp(LLmat), chain_id = chain_id) loo1 <- suppressWarnings(loo(LLarr, r_eff = r_eff_arr)) waic1 <- suppressWarnings(waic(LLarr)) elpd1 <- suppressWarnings(elpd(LLarr)) test_that("using loo.cores is deprecated", { options(mc.cores = NULL) options(loo.cores = 1) expect_warning(loo(LLarr, r_eff = r_eff_arr, cores = 2), "loo.cores") options(loo.cores = NULL) options(mc.cores = 1) }) test_that("loo, waic and elpd results haven't changed", { expect_snapshot_value(loo1, style = "serialize") expect_snapshot_value(waic1, style = "serialize") expect_snapshot_value(elpd1, style = "serialize") }) test_that("loo with cores=1 and cores=2 gives same results", { loo2 <- suppressWarnings(loo(LLarr, r_eff = r_eff_arr, cores = 2)) expect_equal(loo1$estimates, loo2$estimates) }) test_that("waic returns object with correct structure", { expect_true(is.waic(waic1)) expect_true(is.loo(waic1)) expect_false(is.psis_loo(waic1)) expect_named( waic1, c( "estimates", "pointwise", # deprecated but still there "elpd_waic", "p_waic", "waic", "se_elpd_waic", "se_p_waic", "se_waic" ) ) est_names <- dimnames(waic1$estimates) expect_equal(est_names[[1]], c("elpd_waic", "p_waic", "waic")) expect_equal(est_names[[2]], c("Estimate", "SE")) expect_equal(colnames(waic1$pointwise), est_names[[1]]) expect_equal(dim(waic1), dim(LLmat)) }) test_that("loo returns object with correct structure", { expect_false(is.waic(loo1)) expect_true(is.loo(loo1)) expect_true(is.psis_loo(loo1)) expect_named( loo1, c( "estimates", "pointwise", "diagnostics", "psis_object", # deprecated but still there "elpd_loo", "p_loo", "looic", "se_elpd_loo", "se_p_loo", "se_looic" ) ) expect_named(loo1$diagnostics, c("pareto_k", "n_eff", "r_eff")) expect_equal(dimnames(loo1$estimates)[[1]], c("elpd_loo", "p_loo", "looic")) expect_equal(dimnames(loo1$estimates)[[2]], c("Estimate", "SE")) expect_equal( colnames(loo1$pointwise), c("elpd_loo", "mcse_elpd_loo", "p_loo", "looic", "influence_pareto_k") ) expect_equal(dim(loo1), dim(LLmat)) }) test_that("elpd returns object with correct structure", { expect_true(is.loo(elpd1)) expect_named( elpd1, c( "estimates", "pointwise" ) ) est_names <- dimnames(elpd1$estimates) expect_equal(est_names[[1]], c("elpd", "ic")) expect_equal(est_names[[2]], c("Estimate", "SE")) expect_equal(colnames(elpd1$pointwise), est_names[[1]]) expect_equal(dim(elpd1), dim(LLmat)) }) test_that("two pareto k values are equal", { expect_identical( loo1$pointwise[, "influence_pareto_k"], loo1$diagnostics$pareto_k ) }) test_that("loo.array and loo.matrix give same result", { l2 <- suppressWarnings(loo(LLmat, r_eff = r_eff_mat)) expect_identical(loo1$estimates, l2$estimates) expect_identical(loo1$diagnostics, l2$diagnostics) # the mcse_elpd_loo columns won't be identical because we use sampling expect_identical(loo1$pointwise[, -2], l2$pointwise[, -2]) expect_equal(loo1$pointwise[, 2], l2$pointwise[, 2], tolerance = 0.005) }) test_that("loo.array runs with multiple cores", { loo_with_arr1 <- loo(LLarr, cores = 1, r_eff = NA) loo_with_arr2 <- loo(LLarr, cores = 2, r_eff = NA) expect_identical(loo_with_arr1$estimates, loo_with_arr2$estimates) }) test_that("waic.array and waic.matrix give same result", { waic2 <- suppressWarnings(waic(LLmat)) expect_identical(waic1, waic2) }) test_that("elpd.array and elpd.matrix give same result", { elpd2 <- suppressWarnings(elpd(LLmat)) expect_identical(elpd1, elpd2) }) test_that("loo, waic, and elpd error with vector input", { expect_error(loo(LLvec), regexp = "no applicable method") expect_error(waic(LLvec), regexp = "no applicable method") expect_error(elpd(LLvec), regexp = "no applicable method") }) # testing function methods ------------------------------------------------ source(test_path("data-for-tests/function_method_stuff.R")) waic_with_fn <- waic(llfun, data = data, draws = draws) waic_with_mat <- waic(llmat_from_fn) loo_with_fn <- loo( llfun, data = data, draws = draws, r_eff = rep(1, nrow(data)) ) loo_with_mat <- loo( llmat_from_fn, r_eff = rep(1, ncol(llmat_from_fn)), save_psis = TRUE ) test_that("loo.cores deprecation warning works with function method", { options(loo.cores = 1) expect_warning( loo( llfun, cores = 2, data = data, draws = draws, r_eff = rep(1, nrow(data)) ), "loo.cores" ) options(loo.cores = NULL) }) test_that("loo_i results match loo results for ith data point", { expect_no_warning( loo_i_val <- loo_i(i = 2, llfun = llfun, data = data, draws = draws), ) expect_equal( loo_i_val$pointwise[, "elpd_loo"], loo_with_fn$pointwise[2, "elpd_loo"] ) expect_equal( loo_i_val$pointwise[, "p_loo"], loo_with_fn$pointwise[2, "p_loo"] ) expect_equal( loo_i_val$diagnostics$pareto_k, loo_with_fn$diagnostics$pareto_k[2] ) expect_equal(loo_i_val$diagnostics$n_eff, loo_with_fn$diagnostics$n_eff[2]) }) test_that("function and matrix methods return same result", { expect_equal(waic_with_mat, waic_with_fn) expect_identical(loo_with_mat$estimates, loo_with_fn$estimates) expect_identical(loo_with_mat$diagnostics, loo_with_fn$diagnostics) expect_identical(dim(loo_with_mat), dim(loo_with_fn)) }) test_that("loo.function runs with multiple cores", { loo_with_fn1 <- loo( llfun, data = data, draws = draws, r_eff = rep(1, nrow(data)), cores = 1 ) loo_with_fn2 <- loo( llfun, data = data, draws = draws, r_eff = rep(1, nrow(data)), cores = 2 ) expect_identical(loo_with_fn2$estimates, loo_with_fn1$estimates) }) test_that("save_psis option to loo.function makes correct psis object", { loo_with_fn2 <- loo.function( llfun, data = data, draws = draws, r_eff = rep(1, nrow(data)), save_psis = TRUE ) expect_identical(loo_with_fn2$psis_object, loo_with_mat$psis_object) }) test_that("loo doesn't throw r_eff warnings", { expect_no_warning(loo(-LLarr)) expect_no_warning(loo(-LLmat)) expect_no_warning(loo(llfun, data = data, draws = draws)) }) loo/tests/testthat.R0000644000176200001440000000011413575010725014171 0ustar liggesuserslibrary(loo) library(testthat) Sys.setenv("R_TESTS" = "") test_check("loo") loo/MD50000644000176200001440000002426115122444652011365 0ustar liggesusers61b7d20af6e2e06b93ff1ff3a72e22c0 *DESCRIPTION 6c67ff8f4e035be871c6ac8bf16ad7de *NAMESPACE fb5ac2c200f1230acce9c673a305c34a *NEWS.md d602edb650e582be46691bc4c6a8abfa *R/E_loo.R 592618e122c9a1a57e2ebb85421722a9 *R/compare.R 68a82a11003d2aa887453462e2a45efd *R/crps.R 7920e0c85842613222bf4d46b2ee4f71 *R/datasets.R 9afb20890cb130f8219aeaa4ed2627f4 *R/diagnostics.R 010feb774e7b15e0f798fa5c97828ae4 *R/effective_sample_sizes.R 9d359fe929465868cc244bdb50a3d48c *R/elpd.R 794e76e90c0ceb3a68d3727c434c0d07 *R/example_log_lik_array.R 22c4f73fa34dca703e6ac54a7419714d *R/extract_log_lik.R fe84e4c56fab6db87779cb200f3db60d *R/gpdfit.R d95773dbe9ca9dcdff89bb986b93c054 *R/helpers.R 35af069c217a8db7801c0e08d54fa846 *R/importance_sampling.R 97fac5bdafff56c4e5b157e43104b86f *R/kfold-generic.R f74e0d14901ef276b6ced99a028c2ef4 *R/kfold-helpers.R 7eb2765dae37424cf4e40c465b3bae21 *R/loo-glossary.R 0e8e98fafff913c845481a4c6699cdea *R/loo-package.R 4ce3755ffe362856728a9366bc808576 *R/loo.R e4aa553abde81233972e6e418b01d154 *R/loo_approximate_posterior.R 89bd73d20a5460f5391874fd6bf3db96 *R/loo_compare.R 801fd85e0e572930249b74c1eaa0bd37 *R/loo_compare.psis_loo_ss_list.R 97de5eb3edfd7fd720a4b278c269ec2b *R/loo_model_weights.R 36826449114f3a931e4038f48b67660b *R/loo_moment_matching.R cb15ceef6ce6beabbae0c40aefd571f8 *R/loo_predictive_metric.R 90e0c3f3e7c92a6b2881933b62d0a248 *R/loo_subsample.R 060df13535ae33a467ae364c036b7559 *R/pointwise.R c2d801eb2bcc7c36761265291f9b4d3a *R/print.R 94db7f6a09333b5d0b3ab113229156e6 *R/psis.R 6c1079d027e6570eb0faee678d550084 *R/psis_approximate_posterior.R 79d12f0591f709c55310c01880567c7c *R/psislw.R f1f6ad737d526846f6783256a9527187 *R/sis.R 6f775f85f33825686d2270c91f35d569 *R/split_moment_matching.R 78e03b9540b613c9c96fd50b224bfb4e *R/sysdata.rda 45cb7bfaedffbaca71f651f1376da22b *R/tis.R 003a59122717344cc227a4e0f242ee86 *R/waic.R 2a68648a24c53052290859807ec13c30 *R/zzz.R 76161b65639451c966be75488458b3c3 *build/partial.rdb 261891b837ff634dfa934393a6a5b760 *build/vignette.rds 50fc728beb18fe44e15ba70bff59513a *data/Kline.rda 05b97c90b223a661715a584b35efc3a5 *data/milk.rda 1b4b29f3be934b8ab7259b87e50e62be *data/voice.rda 15b58967ca7adeefbcb9de5c15795cd9 *data/voice_loo.rda 187d186f75df6e41e5205ba1c74a18c3 *inst/CITATION abc1bb1c92a9efbe191f1bbc20480155 *inst/doc/loo2-elpd.R 2585b7f573f37ed49f002c9a79e13cc7 *inst/doc/loo2-elpd.Rmd cd9f883dbb22f850b94125c9ee04bc59 *inst/doc/loo2-elpd.html cc6a762020d81169a4e603c9480edcc4 *inst/doc/loo2-example.R 9d5b84892e270fed15bfc632043616e3 *inst/doc/loo2-example.Rmd 6690afa1c95b27837da11c87da276d87 *inst/doc/loo2-example.html 1dbecabeeb06676ce99dc32856a17549 *inst/doc/loo2-large-data.R 43dfe1471ff838111fb60ca2edcea0dc *inst/doc/loo2-large-data.Rmd 334bf1fb23175504b2771e32a3e841c1 *inst/doc/loo2-large-data.html 26ff9de77f1698f2c7ce69126210c64a *inst/doc/loo2-lfo.R 5edda0ab2bb3fa3bbdf1d487089f8e1f *inst/doc/loo2-lfo.Rmd 01e77b3cc474eada00d0dfe2b8dd1ea7 *inst/doc/loo2-lfo.html 678bbece3ffd69893aa5a48fd64c93bd *inst/doc/loo2-mixis.R cdc94041da03fa90d06b98d51e73b2f0 *inst/doc/loo2-mixis.Rmd 4d9149186546fedf12151d53fede8ca4 *inst/doc/loo2-mixis.html 27c5d41d20690af35f6a88c89f56b92d *inst/doc/loo2-moment-matching.R 2b6277aab90a99c6f106e9411530f718 *inst/doc/loo2-moment-matching.Rmd 2397856a01f82731ddd961b6c88924c4 *inst/doc/loo2-moment-matching.html 2d0ba3c4f49f2c7a692333b31fd9cf59 *inst/doc/loo2-non-factorized.R dd0e377bf1803ff4fd80a1d7db0c0de4 *inst/doc/loo2-non-factorized.Rmd 5f6278a4a21893765cbcec228d1c2057 *inst/doc/loo2-non-factorized.html 785a84a597f0eb3f2c40d0ff07e2e753 *inst/doc/loo2-weights.R 45765abfd51e7039e17195a27bf88f8d *inst/doc/loo2-weights.Rmd bd1b76f564fd3b8be8480865de19293f *inst/doc/loo2-weights.html a3d9803b8299c445b497ead3617fc4cb *inst/doc/loo2-with-rstan.R 6c5671158ec6b8d8b5dcbc2ba16bff9d *inst/doc/loo2-with-rstan.Rmd e508573d32651be8fe0cb84142254e7a *inst/doc/loo2-with-rstan.html e2535ddd9c6a0ea35b045be9b5839590 *man/E_loo.Rd b39658276d53d7b4b083fc943553414f *man/ap_psis.Rd a234b956e272ad74be53c6024927e7f5 *man/compare.Rd 97f65a406e5ed40217d870c9ea85ae23 *man/crps.Rd bddccab2d6da0727a82fb0d6fb5d5119 *man/dot-compute_point_estimate.Rd 41026ffb7fe3ade4123deb0b652a15e9 *man/dot-ndraws.Rd fb9a0f3841aaf4360859aa9a44ca370f *man/dot-thin_draws.Rd 04ec6370a6242d07a5b23c39a862955f *man/elpd.Rd 4a5880e757cf95c2042d1a0c091ec0aa *man/example_loglik_array.Rd bb77d98af64bd7c1d306a173a5e8b9fa *man/extract_log_lik.Rd 0522ef082a6aa6f564af9866469bb183 *man/figures/logo.svg 50042e941f16bde46a08897a1ba49cec *man/find_model_names.Rd e51cc716f3e4466cf6a3a8ecd3b57c08 *man/gpdfit.Rd b18f2680bee6f3021d9e077d62ce9838 *man/importance_sampling.Rd b9c29d40f9ce9094d0b8062f88fc570d *man/kfold-generic.Rd 851ec677b1ac9c497cc266d3b2f75c40 *man/kfold-helpers.Rd 1685ed38f92cbf251144f84b1339422f *man/loo-datasets.Rd 8c36e534d3ef7a046513fcb9a250769c *man/loo-glossary.Rd 94b6379a55d1935c322294fd008f8d00 *man/loo-package.Rd 246bc5c72b7a0ddd03047afedbd9fe88 *man/loo.Rd ca62edc86a2eb612e9f234a230f61655 *man/loo_approximate_posterior.Rd 3d6a4d43c9618e0c03feef31d62a4349 *man/loo_compare.Rd f0b3a85ff76103180204cd75f1be8671 *man/loo_model_weights.Rd 02bc1a3c67c877643339591a5e61e20d *man/loo_moment_match.Rd 26a1b99d6f22eeb86986400db89d54d3 *man/loo_moment_match_split.Rd f880b2ad50014c4e1b6d95bdde5794e2 *man/loo_predictive_metric.Rd 864f9c9c2ff36042db722b225eca0762 *man/loo_subsample.Rd 91647ff3e4471cf4ff7341c20adf4b28 *man/nlist.Rd f6c5902f97b330c65c5499c37509c007 *man/nobs.psis_loo_ss.Rd 831f68ae97e4a9815baa062a82d763f1 *man/obs_idx.Rd 6b36fcb6b009e92e74f495c54f074e30 *man/old-extractors.Rd 032b4b870c16fd1e8ef74357bcb9b0ba *man/parallel_psis_list.Rd c694836137752805a6e965703e9005c5 *man/pareto-k-diagnostic.Rd 78fd2ac194e563dbc08b938c2254c1a4 *man/pointwise.Rd 5e5bd2bc8bcf6d81f6198546eea854bb *man/print.loo.Rd 71ac1a537a6489dfc1eba3c70c2e4ae3 *man/print_dims.Rd 91b63b7853d9ed62c48462b7b50b34e5 *man/psis.Rd 6b8c5b2c48d35090e56fcb0e1c0d29e5 *man/psis_approximate_posterior.Rd 9fd0acd2e3706d99a61e93439524cd00 *man/psislw.Rd fc1c1b874c615434a17c05ba15118e4f *man/relative_eff.Rd 9060f6e9e0ea5967353894f2ed5f6013 *man/sis.Rd 731eec59ec7e602a4f62fc6b0597e023 *man/tis.Rd e76af7cd4a71c6211cd64ec77cbbcda2 *man/update.psis_loo_ss.Rd ddad4a7dc91c58780b724a69140239b1 *man/waic.Rd 4ef3766a66b29568792ee07a28319587 *man/weights.importance_sampling.Rd 578103d7f631ca1e067152f694081929 *tests/testthat.R 9f5c8ccaa91dcf521bc4a56bf363ba2e *tests/testthat/_snaps/E_loo.md 073d9a5f173559cc9325d9558536a47f *tests/testthat/_snaps/compare.md 05359290c78b3805a269daf9480d741f *tests/testthat/_snaps/crps.md 052316bb7162d7b8dbda99ea65afc616 *tests/testthat/_snaps/deprecated_extractors.md a33e4d4e704b7e361d9b330931260618 *tests/testthat/_snaps/gpdfit.md eca9a83490f8132311d8a8588f9e6833 *tests/testthat/_snaps/loo_and_waic.md abbf88923d932510fcefd79ad878e0cc *tests/testthat/_snaps/loo_moment_matching.md 6b50093f8383d1cc9b8421b1b1556bb7 *tests/testthat/_snaps/loo_predictive_metric.md 3c0cdb6b31dc3e25e43d956b95d9849f *tests/testthat/_snaps/loo_subsampling.md 9461401a252ad11f71157d557755e528 *tests/testthat/_snaps/model_weighting.md 2229fa359b071ada227a7c48a158bf91 *tests/testthat/_snaps/print_plot.md 4f92742a3a59c1b853629bec9a769713 *tests/testthat/_snaps/psis.md 22fe58533bdc981476c74e9c1de547a9 *tests/testthat/_snaps/psislw.md 81eb5ebe63d6aa6265baf8fb01e598d3 *tests/testthat/_snaps/relative_eff.md b17a0cc251343198244a40c7f1b43255 *tests/testthat/_snaps/tisis.md 2ac0cbcea1c3877ab7189ce6ef547456 *tests/testthat/data-for-tests/function_method_stuff.R e15f62365625657f8a5736d9ddd27ed9 *tests/testthat/data-for-tests/loo_subsample_vignette.rda 7d48326f38041a6a94282084a6cc100a *tests/testthat/data-for-tests/normal_reg_waic_test_example.rda bc687119963330560b4dd17fcfc0b568 *tests/testthat/data-for-tests/normal_reg_waic_test_example2.rda 12eca1a2e7d6acaeb276f1d43be11405 *tests/testthat/data-for-tests/test_data_psis_approximate_posterior.rda 461e6f7d77a22108db7634c8eb16de31 *tests/testthat/data-for-tests/test_radon_laplace_loo.rda 61746de573d7a667cb6278c82b1fc5a2 *tests/testthat/test_0_helpers.R 025f4def14d8c2aceba8f574b58def5f *tests/testthat/test_E_loo.R 20a33d603356948dd0603e01bce8cafe *tests/testthat/test_compare.R eb38eb1cd7e6f13ea59f24d482737768 *tests/testthat/test_crps.R 8697e747abc0e91e457b1c93ba58fb20 *tests/testthat/test_deprecated_extractors.R 5cabbfa70dc9d58a4fe8dd9dcf2a9ef8 *tests/testthat/test_extract_log_lik.R c3610f25d2f9bc9f3d383818d7e8f57e *tests/testthat/test_gpdfit.R 8a835a49a50a34650afe15879263ceed *tests/testthat/test_kfold_helpers.R 3dcf3b5013f2e755c0085d1314cf7030 *tests/testthat/test_loo_and_waic.R bd464410a3705f412b8ba6060552b414 *tests/testthat/test_loo_approximate_posterior.R 9d0a8e6dfb36a0e40b1d2f10c10454fd *tests/testthat/test_loo_moment_matching.R e83c7a09888be63f81c2e8ab8337e885 *tests/testthat/test_loo_predictive_metric.R 9a40af25cbbffd159c2e9aab59e7340a *tests/testthat/test_loo_subsampling.R 04054fdd55bf202bd6ddb93c007b79a7 *tests/testthat/test_loo_subsampling_approximations.R b25d6a7918446984ad11742860591eb8 *tests/testthat/test_loo_subsampling_cases.R 623ecd0236900eda85a4fcae54769779 *tests/testthat/test_model_weighting.R ebf48645e716ac46d07ceebe74424a9c *tests/testthat/test_pointwise.R 1c9b080d33ab76a8fa0e1002ac942822 *tests/testthat/test_print_plot.R 037e4a08aa1e264362b648092b4ea398 *tests/testthat/test_psis.R 8a701af5a5cfd5479faf80affafcc732 *tests/testthat/test_psis_approximate_posterior.R 930ff8a233b2516b4f7222803dc1b761 *tests/testthat/test_psislw.R d5b5929afedfa93193757d8263c8d4c4 *tests/testthat/test_relative_eff.R 26cf745d03d868ce6f818fa64b331324 *tests/testthat/test_tisis.R 8d1d0c1c75700b9a6a0b760b687f9615 *vignettes/children/SEE-ONLINE.txt de25cd7c13804ca23404116a8b3a9ec9 *vignettes/children/SETTINGS-knitr.txt 0522ef082a6aa6f564af9866469bb183 *vignettes/logo.svg 2585b7f573f37ed49f002c9a79e13cc7 *vignettes/loo2-elpd.Rmd 9d5b84892e270fed15bfc632043616e3 *vignettes/loo2-example.Rmd 43dfe1471ff838111fb60ca2edcea0dc *vignettes/loo2-large-data.Rmd 5edda0ab2bb3fa3bbdf1d487089f8e1f *vignettes/loo2-lfo.Rmd cdc94041da03fa90d06b98d51e73b2f0 *vignettes/loo2-mixis.Rmd 2b6277aab90a99c6f106e9411530f718 *vignettes/loo2-moment-matching.Rmd dd0e377bf1803ff4fd80a1d7db0c0de4 *vignettes/loo2-non-factorized.Rmd 45765abfd51e7039e17195a27bf88f8d *vignettes/loo2-weights.Rmd 6c5671158ec6b8d8b5dcbc2ba16bff9d *vignettes/loo2-with-rstan.Rmd loo/R/0000755000176200001440000000000015122306004011235 5ustar liggesusersloo/R/kfold-generic.R0000644000176200001440000000316213575772017014120 0ustar liggesusers#' Generic function for K-fold cross-validation for developers #' #' @description For developers of Bayesian modeling packages, **loo** includes #' a generic function `kfold()` so that methods may be defined for K-fold #' CV without name conflicts between packages. See, for example, the #' `kfold()` methods in the **rstanarm** and **brms** packages. #' #' The **Value** section below describes the objects that `kfold()` #' methods should return in order to be compatible with #' [loo_compare()] and the **loo** package print methods. #' #' #' @name kfold-generic #' @param x A fitted model object. #' @param ... Arguments to pass to specific methods. #' #' @return For developers defining a `kfold()` method for a class #' `"foo"`, the `kfold.foo()` function should return a list with class #' `c("kfold", "loo")` with at least the following named elements: #' * `"estimates"`: A `1x2` matrix containing the ELPD estimate and its #' standard error. The matrix must have row name "`elpd_kfold`" and column #' names `"Estimate"` and `"SE"`. #' * `"pointwise"`: A `Nx1` matrix with column name `"elpd_kfold"` containing #' the pointwise contributions for each data point. #' #' It is important for the object to have at least these classes and #' components so that it is compatible with other functions like #' [loo_compare()] and `print()` methods. #' NULL #' @rdname kfold-generic #' @export kfold <- function(x, ...) { UseMethod("kfold") } #' @rdname kfold-generic #' @export is.kfold <- function(x) { inherits(x, "kfold") && is.loo(x) } #' @export dim.kfold <- function(x) { attr(x, "dims") } loo/R/E_loo.R0000644000176200001440000002410715100712400012416 0ustar liggesusers#' Compute weighted expectations #' #' The `E_loo()` function computes weighted expectations (means, variances, #' quantiles) using the importance weights obtained from the [PSIS][psis()] #' smoothing procedure. The expectations estimated by the `E_loo()` function #' assume that the PSIS approximation is working well. #' **A small [Pareto k][pareto-k-diagnostic] estimate is necessary, #' but not sufficient, for `E_loo()` to give reliable estimates**. If the #' `log_ratios` argument is provided, `E_loo()` also computes a function #' specific Pareto k diagnostic, which must also be small for a reliable #' estimate. See more details below. #' #' @export #' @param x A numeric vector or matrix. #' @param psis_object An object returned by [psis()]. #' @param log_ratios Optionally, a vector or matrix (the same dimensions as `x`) #' of raw (not smoothed) log ratios. If working with log-likelihood values, #' the log ratios are the **negative** of those values. If `log_ratios` is #' specified we are able to compute more accurate [Pareto k][pareto-k-diagnostic] #' diagnostics specific to `E_loo()`. #' @param type The type of expectation to compute. The options are #' `"mean"`, `"variance"`, `"sd"`, and `"quantile"`. #' @param probs For computing quantiles, a vector of probabilities. #' @param ... Arguments passed to individual methods. #' #' @return A named list with the following components: #' \describe{ #' \item{`value`}{ #' The result of the computation. #' #' For the matrix method, `value` is a vector with `ncol(x)` #' elements, with one exception: when `type="quantile"` and #' multiple values are specified in `probs` the `value` component of #' the returned object is a `length(probs)` by `ncol(x)` matrix. #' #' For the default/vector method the `value` component is scalar, with #' one exception: when `type="quantile"` and multiple values #' are specified in `probs` the `value` component is a vector with #' `length(probs)` elements. #' } #' \item{`pareto_k`}{ #' Function-specific diagnostic. #' #' For the matrix method it will be a vector of length `ncol(x)` #' containing estimates of the shape parameter \eqn{k} of the #' generalized Pareto distribution. For the default/vector method, #' the estimate is a scalar. If `log_ratios` is not specified when #' calling `E_loo()`, the smoothed log-weights are used to estimate #' Pareto-k's, which may produce optimistic estimates. #' #' For `type="mean"`, `type="var"`, and `type="sd"`, the returned Pareto-k is #' usually the maximum of the Pareto-k's for the left and right tail of \eqn{hr} #' and the right tail of \eqn{r}, where \eqn{r} is the importance ratio and #' \eqn{h=x} for `type="mean"` and \eqn{h=x^2} for `type="var"` and `type="sd"`. #' If \eqn{h} is binary, constant, or not finite, or if `type="quantile"`, the #' returned Pareto-k is the Pareto-k for the right tail of \eqn{r}. #' } #' } #' #' #' @examples #' \donttest{ #' if (requireNamespace("rstanarm", quietly = TRUE)) { #' # Use rstanarm package to quickly fit a model and get both a log-likelihood #' # matrix and draws from the posterior predictive distribution #' library("rstanarm") #' #' # data from help("lm") #' ctl <- c(4.17,5.58,5.18,6.11,4.50,4.61,5.17,4.53,5.33,5.14) #' trt <- c(4.81,4.17,4.41,3.59,5.87,3.83,6.03,4.89,4.32,4.69) #' d <- data.frame( #' weight = c(ctl, trt), #' group = gl(2, 10, 20, labels = c("Ctl","Trt")) #' ) #' fit <- stan_glm(weight ~ group, data = d, refresh = 0) #' yrep <- posterior_predict(fit) #' dim(yrep) #' #' log_ratios <- -1 * log_lik(fit) #' dim(log_ratios) #' #' r_eff <- relative_eff(exp(-log_ratios), chain_id = rep(1:4, each = 1000)) #' psis_object <- psis(log_ratios, r_eff = r_eff, cores = 2) #' #' E_loo(yrep, psis_object, type = "mean") #' E_loo(yrep, psis_object, type = "var") #' E_loo(yrep, psis_object, type = "sd") #' E_loo(yrep, psis_object, type = "quantile", probs = 0.5) # median #' E_loo(yrep, psis_object, type = "quantile", probs = c(0.1, 0.9)) #' #' # We can get more accurate Pareto k diagnostic if we also provide #' # the log_ratios argument #' E_loo(yrep, psis_object, type = "mean", log_ratios = log_ratios) #' } #' } #' E_loo <- function(x, psis_object, ...) { UseMethod("E_loo") } #' @rdname E_loo #' @export E_loo.default <- function(x, psis_object, ..., type = c("mean", "variance", "sd", "quantile"), probs = NULL, log_ratios = NULL) { stopifnot( is.numeric(x), is.psis(psis_object), length(x) == dim(psis_object)[1], is.null(log_ratios) || (length(x) == length(log_ratios)) ) type <- match.arg(type) E_fun <- .E_fun(type) w <- as.vector(weights(psis_object, log = FALSE)) x <- as.vector(x) out <- E_fun(x, w, probs) if (is.null(log_ratios)) { # Use of smoothed ratios gives slightly optimistic # Pareto-k's, but these are still better than nothing log_ratios <- weights(psis_object, log = TRUE) } h <- switch( type, "mean" = x, "variance" = x^2, "sd" = x^2, "quantile" = NULL ) khat <- E_loo_khat.default(h, psis_object, log_ratios) list(value = out, pareto_k = khat) } #' @rdname E_loo #' @export E_loo.matrix <- function(x, psis_object, ..., type = c("mean", "variance", "sd", "quantile"), probs = NULL, log_ratios = NULL) { stopifnot( is.numeric(x), is.psis(psis_object), identical(dim(x), dim(psis_object)), is.null(log_ratios) || identical(dim(x), dim(log_ratios)) ) type <- match.arg(type) E_fun <- .E_fun(type) fun_val <- numeric(1) if (type == "quantile") { stopifnot( is.numeric(probs), length(probs) >= 1, all(probs > 0 & probs < 1) ) fun_val <- numeric(length(probs)) } w <- weights(psis_object, log = FALSE) out <- vapply(seq_len(ncol(x)), function(i) { E_fun(x[, i], w[, i], probs = probs) }, FUN.VALUE = fun_val) if (is.null(log_ratios)) { # Use of smoothed ratios gives slightly optimistic # Pareto-k's, but these are still better than nothing log_ratios <- weights(psis_object, log = TRUE) } h <- switch( type, "mean" = x, "variance" = x^2, "sd" = x^2, "quantile" = NULL ) khat <- E_loo_khat.matrix(h, psis_object, log_ratios) list(value = out, pareto_k = khat) } #' Select the function to use based on user's 'type' argument #' #' @noRd #' @param type User's `type` argument. #' @return The function for computing the weighted expectation specified by #' `type`. #' .E_fun <- function(type = c("mean", "variance", "sd", "quantile")) { switch( type, "mean" = .wmean, "variance" = .wvar, "sd" = .wsd, "quantile" = .wquant ) } #' loo-weighted mean, variance, and quantiles #' #' @noRd #' @param x,w Vectors of the same length. This should be checked inside #' `E_loo()` before calling these functions. #' @param probs Vector of probabilities. #' @param ... ignored. Having ... allows `probs` to be passed to `.wmean()` and #' `.wvar()` in `E_loo()` without resulting in an error. #' .wmean <- function(x, w, ...) { sum(w * x) } .wvar <- function(x, w, ...) { # The denominator (1- sum(w^2)) is equal to (ESS-1)/ESS, where effective # sample size ESS is estimated with the generic target quantity invariant # estimate 1/sum(w^2), see e.g. "Monte Carlo theory, methods and examples" # by Owen (2013). (sum(.wmean(x^2, w)) - sum(.wmean(x, w)^2)) / (1 - sum(w^2)) } .wsd <- function(x, w, ...) { sqrt(.wvar(x, w)) } .wquant <- function(x, w, probs, ...) { if (all(w == w[1])) { return(quantile(x, probs = probs, names = FALSE)) } ord <- order(x) x <- x[ord] w <- w[ord] ww <- cumsum(w) ww <- ww / ww[length(ww)] qq <- numeric(length(probs)) for (j in seq_along(probs)) { ids <- which(ww >= probs[j]) wi <- min(ids) if (wi == 1) { qq[j] <- x[1] } else { w1 <- ww[wi - 1] x1 <- x[wi - 1] qq[j] <- x1 + (x[wi] - x1) * (probs[j] - w1) / (ww[wi] - w1) } } return(qq) } #' Compute function-specific k-hat diagnostics #' #' @noRd #' @param log_ratios Vector or matrix of raw (not smoothed) log ratios with the #' same dimensions as `x`. If working with log-likelihood values, the log #' ratios are the negative of those values. #' @return Vector (of length `NCOL(x)`) of k-hat estimates. #' E_loo_khat <- function(x, psis_object, log_ratios, ...) { UseMethod("E_loo_khat") } #' @export E_loo_khat.default <- function(x, psis_object, log_ratios, ...) { .E_loo_khat_i(x, log_ratios, attr(psis_object, "tail_len")) } #' @export E_loo_khat.matrix <- function(x, psis_object, log_ratios, ...) { tail_lengths <- attr(psis_object, "tail_len") if (is.null(x)) { sapply(seq_len(ncol(log_ratios)), function(i) { .E_loo_khat_i(x, log_ratios[, i], tail_lengths[i]) }) } else { sapply(seq_len(ncol(log_ratios)), function(i) { .E_loo_khat_i(x[, i], log_ratios[, i], tail_lengths[i]) }) } } #' Compute function-specific khat estimates #' #' @noRd #' @param x_i Vector of values of function h(theta) #' @param log_ratios_i S-vector of log_ratios, log(r(theta)), for a single #' observation. #' @param tail_len_i Integer tail length used for fitting GPD. #' @return Scalar h-specific k-hat estimate. #' .E_loo_khat_i <- function(x_i, log_ratios_i, tail_len_i) { h_theta <- x_i r_theta <- exp(log_ratios_i - max(log_ratios_i)) khat_r <- posterior::pareto_khat(r_theta, tail = "right", ndraws_tail = tail_len_i) if (is.list(khat_r)) { # retain compatiblity with older posterior that returned a list khat_r <- khat_r$khat } if (is.null(x_i) || is_constant(x_i) || length(unique(x_i))==2 || anyNA(x_i) || any(is.infinite(x_i))) { khat_r } else { khat_hr <- posterior::pareto_khat(h_theta * r_theta, tail = "both", ndraws_tail = tail_len_i) if (is.list(khat_hr)) { # retain compatiblity with older posterior that returned a list khat_hr <- khat_hr$khat } if (is.na(khat_hr) && is.na(khat_r)) { k <- NA } else { k <- max(khat_hr, khat_r, na.rm=TRUE) } k } } loo/R/importance_sampling.R0000644000176200001440000001656614566461605015455 0ustar liggesusers#' A parent class for different importance sampling methods. #' #' @inheritParams psis #' @param method The importance sampling method to use. The following methods #' are implemented: #' * [`"psis"`][psis]: Pareto-Smoothed Importance Sampling (PSIS). Default method. #' * [`"tis"`][tis]: Truncated Importance Sampling (TIS) with truncation at #' `sqrt(S)`, where `S` is the number of posterior draws. #' * [`"sis"`][sis]: Standard Importance Sampling (SIS). #' importance_sampling <- function(log_ratios, method, ...) { UseMethod("importance_sampling") } #' @rdname importance_sampling #' @inheritParams psis #' @export importance_sampling.array <- function(log_ratios, method, ..., r_eff = 1, cores = getOption("mc.cores", 1)) { cores <- loo_cores(cores) stopifnot(length(dim(log_ratios)) == 3) assert_importance_sampling_method_is_implemented(method) log_ratios <- validate_ll(log_ratios) log_ratios <- llarray_to_matrix(log_ratios) r_eff <- prepare_psis_r_eff(r_eff, len = ncol(log_ratios)) do_importance_sampling(log_ratios, r_eff = r_eff, cores = cores, method = method) } #' @rdname importance_sampling #' @inheritParams psis #' @export importance_sampling.matrix <- function(log_ratios, method, ..., r_eff = 1, cores = getOption("mc.cores", 1)) { cores <- loo_cores(cores) assert_importance_sampling_method_is_implemented(method) log_ratios <- validate_ll(log_ratios) r_eff <- prepare_psis_r_eff(r_eff, len = ncol(log_ratios)) do_importance_sampling(log_ratios, r_eff = r_eff, cores = cores, method = method) } #' @rdname importance_sampling #' @inheritParams psis #' @export importance_sampling.default <- function(log_ratios, method, ..., r_eff = 1) { stopifnot(is.null(dim(log_ratios)) || length(dim(log_ratios)) == 1) assert_importance_sampling_method_is_implemented(method) dim(log_ratios) <- c(length(log_ratios), 1) r_eff <- prepare_psis_r_eff(r_eff, len = 1) importance_sampling.matrix(log_ratios, r_eff = r_eff, cores = 1, method = method) } #' @export dim.importance_sampling <- function(x) { attr(x, "dims") } #' Extract importance sampling weights #' #' @export #' @export weights.importance_sampling #' @method weights importance_sampling #' @param object An object returned by [psis()], [tis()], or [sis()]. #' @param log Should the weights be returned on the log scale? Defaults to #' `TRUE`. #' @param normalize Should the weights be normalized? Defaults to `TRUE`. #' @param ... Ignored. #' #' @return The `weights()` method returns an object with the same dimensions as #' the `log_weights` component of `object`. The `normalize` and `log` #' arguments control whether the returned weights are normalized and whether #' or not to return them on the log scale. #' #' @examples #' # See the examples at help("psis") #' weights.importance_sampling <- function(object, ..., log = TRUE, normalize = TRUE) { out <- object[["log_weights"]] # smoothed but unnormalized log weights if (normalize) { out <- sweep(out, MARGIN = 2, STATS = attr(object, "norm_const_log"), # colLogSumExp(log_weights) check.margin = FALSE) } if (!log) { out <- exp(out) } return(out) } # internal ---------------------------------------------------------------- #' Validate selected importance sampling method #' @noRd #' @keywords internal #' @description #' Currently implemented importance sampling methods assert_importance_sampling_method_is_implemented <- function(x){ if (!x %in% implemented_is_methods()) { stop("Importance sampling method '", x, "' is not implemented. Implemented methods: '", paste0(implemented_is_methods, collapse = "', '"), "'") } } implemented_is_methods <- function() c("psis", "tis", "sis") #' Structure the object returned by the importance_sampling methods #' #' @noRd #' @param unnormalized_log_weights Smoothed and possibly truncated log weights, #' but unnormalized. #' @param pareto_k Vector of GPD k estimates. #' @param tail_len Vector of tail lengths used to fit GPD. #' @param r_eff Vector of relative MCMC ESS (n_eff) for `exp(log lik)` #' @template is_method #' @return A list of class `"psis"` with structure described in the main doc at #' the top of this file. #' importance_sampling_object <- function(unnormalized_log_weights, pareto_k, tail_len, r_eff, method) { stopifnot(is.matrix(unnormalized_log_weights)) methods <- unique(method) stopifnot(all(methods %in% implemented_is_methods())) if (length(methods) == 1) { method <- methods classes <- c(tolower(method), "importance_sampling", "list") } else { classes <- c("importance_sampling", "list") } norm_const_log <- matrixStats::colLogSumExps(unnormalized_log_weights) out <- structure( list( log_weights = unnormalized_log_weights, diagnostics = list(pareto_k = pareto_k, n_eff = NULL, r_eff = r_eff) ), # attributes norm_const_log = norm_const_log, tail_len = tail_len, r_eff = r_eff, dims = dim(unnormalized_log_weights), method = method, class = classes ) # need normalized weights (not on log scale) for psis_n_eff w <- weights(out, normalize = TRUE, log = FALSE) out$diagnostics[["n_eff"]] <- psis_n_eff(w, r_eff) return(out) } #' Do importance sampling given matrix of log weights #' #' @noRd #' @param lr Matrix of log ratios (`-loglik`) #' @param r_eff Vector of relative effective sample sizes #' @param cores User's integer `cores` argument #' @return A list with class `"psis"` and structure described in the main doc at #' the top of this file. #' do_importance_sampling <- function(log_ratios, r_eff, cores, method) { stopifnot(cores == as.integer(cores)) assert_importance_sampling_method_is_implemented(method) N <- ncol(log_ratios) S <- nrow(log_ratios) k_threshold <- ps_khat_threshold(S) tail_len <- n_pareto(r_eff, S) if (method == "psis") { is_fun <- do_psis_i throw_tail_length_warnings(tail_len) } else if (method == "tis") { is_fun <- do_tis_i } else if (method == "sis") { is_fun <- do_sis_i } else { stop("Incorrect IS method.") } if (cores == 1) { lw_list <- lapply(seq_len(N), function(i) is_fun(log_ratios_i = log_ratios[, i], tail_len_i = tail_len[i])) } else { if (!os_is_windows()) { lw_list <- parallel::mclapply( X = seq_len(N), mc.cores = cores, FUN = function(i) is_fun(log_ratios_i = log_ratios[, i], tail_len_i = tail_len[i]) ) } else { cl <- parallel::makePSOCKcluster(cores) on.exit(parallel::stopCluster(cl)) lw_list <- parallel::parLapply( cl = cl, X = seq_len(N), fun = function(i) is_fun(log_ratios_i = log_ratios[, i], tail_len_i = tail_len[i]) ) } } log_weights <- psis_apply(lw_list, "log_weights", fun_val = numeric(S)) pareto_k <- psis_apply(lw_list, "pareto_k") throw_pareto_warnings(pareto_k, k_threshold) importance_sampling_object( unnormalized_log_weights = log_weights, pareto_k = pareto_k, tail_len = tail_len, r_eff = r_eff, method = rep(method, length(pareto_k)) # Conform to other attr that exist per obs. ) } loo/R/psis.R0000644000176200001440000003132715074562565012372 0ustar liggesusers#' Pareto smoothed importance sampling (PSIS) #' #' Implementation of Pareto smoothed importance sampling (PSIS), a method for #' stabilizing importance ratios. The version of PSIS implemented here #' corresponds to the algorithm presented in Vehtari, Simpson, Gelman, Yao, #' and Gabry (2024). #' For PSIS diagnostics see the [pareto-k-diagnostic] page. #' #' @export #' @param log_ratios An array, matrix, or vector of importance ratios on the log #' scale (for PSIS-LOO these are *negative* log-likelihood values). See the #' **Methods (by class)** section below for a detailed description of how #' to specify the inputs for each method. #' @param ... Arguments passed on to the various methods. #' @template cores #' @param r_eff Vector of relative effective sample size estimates containing #' one element per observation. The values provided should be the relative #' effective sample sizes of `1/exp(log_ratios)` (i.e., `1/ratios`). #' This is related to the relative efficiency of estimating the normalizing #' term in self-normalizing importance sampling. If `r_eff` is not #' provided then the reported PSIS effective sample sizes and Monte Carlo #' error estimates can be over-optimistic. If the posterior draws are (near) #' independent then `r_eff=1` can be used. `r_eff` has to be a scalar (same #' value is used for all observations) or a vector with length equal to the #' number of observations. The default value is 1. See the [relative_eff()] #' helper function for computing `r_eff`. #' #' @return The `psis()` methods return an object of class `"psis"`, #' which is a named list with the following components: #' #' \describe{ #' \item{`log_weights`}{ #' Vector or matrix of smoothed (and truncated) but *unnormalized* log #' weights. To get normalized weights use the #' [`weights()`][weights.importance_sampling] method provided for objects of #' class `"psis"`. #' } #' \item{`diagnostics`}{ #' A named list containing two vectors: #' * `pareto_k`: Estimates of the shape parameter \eqn{k} of the #' generalized Pareto distribution. See the [pareto-k-diagnostic] #' page for details. #' * `n_eff`: PSIS effective sample size estimates. #' } #' } #' #' Objects of class `"psis"` also have the following [attributes][attributes()]: #' \describe{ #' \item{`norm_const_log`}{ #' Vector of precomputed values of `colLogSumExps(log_weights)` that are #' used internally by the `weights` method to normalize the log weights. #' } #' \item{`tail_len`}{ #' Vector of tail lengths used for fitting the generalized Pareto distribution. #' } #' \item{`r_eff`}{ #' If specified, the user's `r_eff` argument. #' } #' \item{`dims`}{ #' Integer vector of length 2 containing `S` (posterior sample size) #' and `N` (number of observations). #' } #' \item{`method`}{ #' Method used for importance sampling, here `psis`. #' } #' } #' #' @seealso #' * [loo()] for approximate LOO-CV using PSIS. #' * [pareto-k-diagnostic] for PSIS diagnostics. #' * The __loo__ package [vignettes](https://mc-stan.org/loo/articles/index.html) #' for demonstrations. #' * The [FAQ page](https://mc-stan.org/loo/articles/online-only/faq.html) on #' the __loo__ website for answers to frequently asked questions. #' #' @template loo-and-psis-references #' #' @examples #' log_ratios <- -1 * example_loglik_array() #' r_eff <- relative_eff(exp(-log_ratios)) #' psis_result <- psis(log_ratios, r_eff = r_eff) #' str(psis_result) #' plot(psis_result) #' #' # extract smoothed weights #' lw <- weights(psis_result) # default args are log=TRUE, normalize=TRUE #' ulw <- weights(psis_result, normalize=FALSE) # unnormalized log-weights #' #' w <- weights(psis_result, log=FALSE) # normalized weights (not log-weights) #' uw <- weights(psis_result, log=FALSE, normalize = FALSE) # unnormalized weights #' #' #' psis <- function(log_ratios, ...) UseMethod("psis") #' @export #' @templateVar fn psis #' @template array #' psis.array <- function(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) { importance_sampling.array( log_ratios = log_ratios, ..., r_eff = r_eff, cores = cores, method = "psis" ) } #' @export #' @templateVar fn psis #' @template matrix #' psis.matrix <- function(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) { importance_sampling.matrix( log_ratios, ..., r_eff = r_eff, cores = cores, method = "psis" ) } #' @export #' @templateVar fn psis #' @template vector #' psis.default <- function(log_ratios, ..., r_eff = 1) { importance_sampling.default( log_ratios = log_ratios, ..., r_eff = r_eff, method = "psis" ) } #' @rdname psis #' @export #' @param x For `is.psis()`, an object to check. is.psis <- function(x) { inherits(x, "psis") && is.list(x) } # internal ---------------------------------------------------------------- #' @noRd #' @seealso importance_sampling_object psis_object <- function(unnormalized_log_weights, pareto_k, tail_len, r_eff) { importance_sampling_object( unnormalized_log_weights = unnormalized_log_weights, pareto_k = pareto_k, tail_len = tail_len, r_eff = r_eff, method = "psis" ) } #' @noRd #' @seealso do_importance_sampling do_psis <- function(log_ratios, r_eff, cores, method) { do_importance_sampling( log_ratios = log_ratios, r_eff = r_eff, cores = cores, method = "psis" ) } #' Extract named components from each list in the list of lists obtained by #' parallelizing `do_psis_i()` #' #' @noRd #' @param x List of lists. #' @param item String naming the component or attribute to pull out of each list #' (or list-like object). #' @param fun,fun.val passed to `vapply()`'s `FUN` and `FUN.VALUE` arguments. #' @return Numeric vector or matrix. #' psis_apply <- function(x, item, fun = c("[[", "attr"), fun_val = numeric(1)) { if (!is.list(x)) { stop("Internal error ('x' must be a list for psis_apply)") } vapply(x, FUN = match.arg(fun), FUN.VALUE = fun_val, item) } #' PSIS on a single vector #' #' @noRd #' @param log_ratios_i A vector of log importance ratios (for `loo()`, negative #' log likelihoods). #' @param tail_len_i An integer tail length. #' @param ... Not used. Included to conform to API for differen IS methods. #' #' @details #' * If there are enough tail samples then the tail is smoothed with PSIS #' * The log weights (or log ratios if no smoothing) larger than the largest raw #' ratio are set to the largest raw ratio #' #' @return A named list containing: #' * `lw`: vector of unnormalized log weights #' * `pareto_k`: scalar Pareto k estimate. #' do_psis_i <- function(log_ratios_i, tail_len_i, ...) { S <- length(log_ratios_i) # shift log ratios for safer exponentation lw_i <- log_ratios_i - max(log_ratios_i) khat <- Inf if (enough_tail_samples(tail_len_i)) { ord <- sort.int(lw_i, index.return = TRUE) tail_ids <- seq(S - tail_len_i + 1, S) lw_tail <- ord$x[tail_ids] if (abs(max(lw_tail) - min(lw_tail)) < .Machine$double.eps / 100) { warning( "Can't fit generalized Pareto distribution ", "because all tail values are the same.", call. = FALSE ) } else { cutoff <- ord$x[min(tail_ids) - 1] # largest value smaller than tail values smoothed <- psis_smooth_tail(lw_tail, cutoff) khat <- smoothed$k lw_i[ord$ix[tail_ids]] <- smoothed$tail } } # truncate at max of raw wts (i.e., 0 since max has been subtracted) lw_i[lw_i > 0] <- 0 # shift log weights back so that the smallest log weights remain unchanged lw_i <- lw_i + max(log_ratios_i) list(log_weights = lw_i, pareto_k = khat) } #' PSIS tail smoothing for a single vector #' #' @noRd #' @param x Vector of tail elements already sorted in ascending order. #' @return A named list containing: #' * `tail`: vector same size as `x` containing the logs of the #' order statistics of the generalized pareto distribution. #' * `k`: scalar shape parameter estimate. #' psis_smooth_tail <- function(x, cutoff) { len <- length(x) exp_cutoff <- exp(cutoff) # save time not sorting since x already sorted fit <- gpdfit(exp(x) - exp_cutoff, sort_x = FALSE) k <- fit$k sigma <- fit$sigma if (is.finite(k)) { p <- (seq_len(len) - 0.5) / len qq <- qgpd(p, k, sigma) + exp_cutoff tail <- log(qq) } else { tail <- x } list(tail = tail, k = k) } #' Calculate tail lengths to use for fitting the GPD #' #' The number of weights (i.e., tail length) used to fit the generalized Pareto #' distribution is now decreasing with the number of posterior draws S, and is #' also adjusted based on the relative MCMC neff for `exp(log_lik)`. This will #' answer the questions about the asymptotic properties, works better for thick #' tailed proposal distributions, and is adjusted based on dependent Markov chain #' samples. Specifically, the tail length is now `3*sqrt(S)/r_eff` but capped at #' 20% of the total number of weights. #' #' @noRd #' @param r_eff A N-vector or scalar of relative MCMC effective sample sizes of #' `exp(log-lik matrix)`. The default value is 1. #' @param S The (integer) size of posterior sample. #' @return An N-vector of tail lengths. #' n_pareto <- function(r_eff, S) { if (isTRUE(is.null(r_eff) || all(is.na(r_eff)))) { r_eff <- 1 } ceiling(pmin(0.2 * S, 3 * sqrt(S / r_eff))) } #' Check for enough tail samples to fit GPD #' #' @noRd #' @param tail_len Integer tail length. #' @param min_len The minimum allowed tail length. #' @return `TRUE` or `FALSE` #' enough_tail_samples <- function(tail_len, min_len = 5) { tail_len >= min_len } #' Throw warnings about Pareto k estimates #' #' @noRd #' @param k A vector of Pareto k estimates. #' @param k_threshold The value at which to warn about high Pareto k estimates. #' @return Nothing, just possibly throws warnings. #' throw_pareto_warnings <- function(k, k_threshold) { if (isTRUE(any(k > k_threshold))) { .warn("Some Pareto k diagnostic values are too high. ", .k_help()) } } #' Warn if not enough tail samples to fit GPD #' #' @noRd #' @param tail_lengths Vector of tail lengths. #' @return `tail_lengths`, invisibly. #' throw_tail_length_warnings <- function(tail_lengths) { tail_len_bad <- !sapply(tail_lengths, enough_tail_samples) if (any(tail_len_bad)) { if (length(tail_lengths) == 1) { warning( "Not enough tail samples to fit the generalized Pareto distribution.", call. = FALSE, immediate. = TRUE ) } else { bad <- which(tail_len_bad) Nbad <- length(bad) warning( "Not enough tail samples to fit the generalized Pareto distribution ", "in some or all columns of matrix of log importance ratios. ", "Skipping the following columns: ", paste(if (Nbad <= 10) bad else bad[1:10], collapse = ", "), if (Nbad > 10) { paste0(", ... [", Nbad - 10, " more not printed].\n") } else { "\n" }, call. = FALSE, immediate. = TRUE ) } } invisible(tail_lengths) } #' Prepare `r_eff` to pass to `psis()` and throw warnings/errors if necessary #' #' @noRd #' @param r_eff User's `r_eff` argument. #' @param len The length `r_eff` should have if not `NULL` or `NA`. #' @return #' * If `r_eff` has length `len` then `r_eff` is returned. #' * If `r_eff` is `NULL` then `rep(1, len)` is returned. #' * If `r_eff` is `NA` then `rep(1, len)` is returned. #' * If `r_eff` is a scalar then `rep(r_eff, len)` is returned. #' * If `r_eff` is not a scalar but the length is not `len` then an error is thrown. #' * If `r_eff` has length `len` but has `NA`s then `NA`s are filled in with `1`s. #' prepare_psis_r_eff <- function(r_eff, len) { if (isTRUE(is.null(r_eff) || all(is.na(r_eff)))) { r_eff <- rep(1, len) } else if (length(r_eff) == 1) { r_eff <- rep(r_eff, len) } else if (length(r_eff) != len) { stop( "'r_eff' must have one value or one value per observation.", call. = FALSE ) } else if (anyNA(r_eff)) { message("Replacing NAs in `r_eff` with 1s") r_eff[is.na(r_eff)] <- 1 } r_eff } #' Check if `psis()` was called from one of the loo methods #' #' @noRd #' @return `TRUE` if the `loo()` array, matrix, or function method is found in #' the active call list, `FALSE` otherwise. #' called_from_loo <- function() { calls <- sys.calls() txt <- unlist(lapply(calls, deparse)) patts <- "loo.array\\(|loo.matrix\\(|loo.function\\(" check <- sapply(txt, function(x) grepl(patts, x)) isTRUE(any(check)) } #' Warning message about missing `r_eff` argument #' @noRd throw_psis_r_eff_warning <- function() { warning( "Relative effective sample sizes ('r_eff' argument) not specified. ", "PSIS ESS (n_eff) will not be adjusted based on MCMC ESS (n_eff).", call. = FALSE ) } loo/R/print.R0000644000176200001440000001255414566461605012547 0ustar liggesusers#' Print methods #' #' @export #' @param x An object returned by [loo()], [psis()], or [waic()]. #' @param digits An integer passed to [base::round()]. #' @param plot_k Logical. If `TRUE` the estimates of the Pareto shape #' parameter \eqn{k} are plotted. Ignored if `x` was generated by #' [waic()]. To just plot \eqn{k} without printing use the #' [plot()][pareto-k-diagnostic] method for 'loo' objects. #' @param ... Arguments passed to [plot.psis_loo()] if `plot_k` is #' `TRUE`. #' #' @return `x`, invisibly. #' #' @seealso [pareto-k-diagnostic] #' print.loo <- function(x, digits = 1, ...) { cat("\n") print_dims(x) if (!("estimates" %in% names(x))) { x <- convert_old_object(x) } cat("\n") print(.fr(as.data.frame(x$estimates), digits), quote = FALSE) return(invisible(x)) } #' @export #' @rdname print.loo print.waic <- function(x, digits = 1, ...) { print.loo(x, digits = digits, ...) throw_pwaic_warnings(x$pointwise[, "p_waic"], digits = digits, warn = FALSE) invisible(x) } #' @export #' @rdname print.loo print.psis_loo <- function(x, digits = 1, plot_k = FALSE, ...) { print.loo(x, digits = digits, ...) cat("------\n") print_mcse_summary(x, digits = digits) S <- dim(x)[1] k_threshold <- ps_khat_threshold(S) if (length(pareto_k_ids(x, threshold = k_threshold))) { cat("\n") } print(pareto_k_table(x), digits = digits) cat(.k_help()) if (plot_k) { graphics::plot(x, ...) } invisible(x) } #' @export #' @rdname print.loo print.importance_sampling_loo <- function(x, digits = 1, plot_k = FALSE, ...) { print.loo(x, digits = digits, ...) cat("------\n") invisible(x) } #' @export #' @rdname print.loo print.psis_loo_ap <- function(x, digits = 1, plot_k = FALSE, ...) { print.loo(x, digits = digits, ...) cat("------\n") cat("Posterior approximation correction used.\n") attr(x, 'r_eff') <- 1 print_mcse_summary(x, digits = digits) S <- dim(x)[1] k_threshold <- ps_khat_threshold(S) if (length(pareto_k_ids(x, threshold = k_threshold))) { cat("\n") } print(pareto_k_table(x), digits = digits) cat(.k_help()) if (plot_k) { graphics::plot(x, ...) } invisible(x) } #' @export #' @rdname print.loo print.psis <- function(x, digits = 1, plot_k = FALSE, ...) { print_dims(x) print_reff_summary(x, digits) print(pareto_k_table(x), digits = digits) cat(.k_help()) if (plot_k) { graphics::plot(x, ...) } invisible(x) } #' @export #' @rdname print.loo print.importance_sampling <- function(x, digits = 1, plot_k = FALSE, ...) { print_dims(x) if (plot_k) { graphics::plot(x, ...) } invisible(x) } # internal ---------------------------------------------------------------- #' Print dimensions of log-likelihood or log-weights matrix #' #' @export #' @keywords internal #' #' @param x The object returned by [psis()], [loo()], or [waic()]. #' @param ... Ignored. print_dims <- function(x, ...) UseMethod("print_dims") #' @rdname print_dims #' @export print_dims.importance_sampling <- function(x, ...) { cat( "Computed from", paste(dim(x), collapse = " by "), "log-weights matrix.\n" ) } #' @rdname print_dims #' @export print_dims.psis_loo <- function(x, ...) { cat( "Computed from", paste(dim(x), collapse = " by "), "log-likelihood matrix.\n" ) } #' @rdname print_dims #' @export print_dims.importance_sampling_loo <- function(x, ...) { cat( "Computed from", paste(dim(x), collapse = " by "), "log-likelihood matrix using", class(x)[1], ".\n" ) } #' @rdname print_dims #' @export print_dims.waic <- function(x, ...) { cat( "Computed from", paste(dim(x), collapse = " by "), "log-likelihood matrix.\n" ) } #' @rdname print_dims #' @export print_dims.kfold <- function(x, ...) { K <- attr(x, "K", exact = TRUE) if (!is.null(K)) { cat("Based on", paste0(K, "-fold"), "cross-validation.\n") } } #' @rdname print_dims #' @export print_dims.psis_loo_ss <- function(x, ...) { cat( "Computed from", paste(c(dim(x)[1], nobs(x)) , collapse = " by "), "subsampled log-likelihood\nvalues from", length(x$loo_subsampling$elpd_loo_approx), "total observations.\n" ) } print_reff_summary <- function(x, digits) { r_eff <- x$diagnostics$r_eff if (is.null(r_eff)) { if (!is.null(x$psis_object)) { r_eff <- attr(x$psis_object,'r_eff') } else { r_eff <- attr(x,'r_eff') } } if (!is.null(r_eff)) { if (all(r_eff==1)) { cat( "MCSE and ESS estimates assume independent draws (r_eff=1).\n" ) } else { cat(paste0( "MCSE and ESS estimates assume MCMC draws (r_eff in [", .fr(min(r_eff), digits), ", ", .fr(max(r_eff), digits), "]).\n" )) } } } print_mcse_summary <- function(x, digits) { mcse_val <- mcse_loo(x) cat( "MCSE of elpd_loo is", paste0(.fr(mcse_val, digits), ".\n") ) print_reff_summary(x, digits) } # print and warning helpers .fr <- function(x, digits) format(round(x, digits), nsmall = digits) .warn <- function(..., call. = FALSE) warning(..., call. = call.) .k_help <- function() "See help('pareto-k-diagnostic') for details.\n" # compatibility with old loo objects convert_old_object <- function(x, digits = 1, ...) { z <- x[-grep("pointwise|pareto_k|n_eff", names(x))] uz <- unlist(z) nms <- names(uz) ses <- grepl("se", nms) list(estimates = data.frame(Estimate = uz[!ses], SE = uz[ses])) } loo/R/loo-package.R0000644000176200001440000001027114641333357013563 0ustar liggesusers#' Efficient LOO-CV and WAIC for Bayesian models #' #' @docType package #' @name loo-package #' #' @importFrom stats sd var quantile setNames weights rnorm qnorm #' @importFrom matrixStats logSumExp colLogSumExps colSums2 colVars colMaxs #' #' @description #' \if{html}{ #' \figure{stanlogo.png}{options: width="50" alt="mc-stan.org"} #' } #' *Stan Development Team* #' #' This package implements the methods described in Vehtari, Gelman, and #' Gabry (2017), Vehtari, Simpson, Gelman, Yao, and Gabry (2024), and #' Yao et al. (2018). To get started see the **loo** package #' [vignettes](https://mc-stan.org/loo/articles/index.html), the #' [loo()] function for efficient approximate leave-one-out #' cross-validation (LOO-CV), the [psis()] function for the Pareto #' smoothed importance sampling (PSIS) algorithm, or #' [loo_model_weights()] for an implementation of Bayesian stacking of #' predictive distributions from multiple models. #' #' #' @details Leave-one-out cross-validation (LOO-CV) and the widely applicable #' information criterion (WAIC) are methods for estimating pointwise #' out-of-sample prediction accuracy from a fitted Bayesian model using the #' log-likelihood evaluated at the posterior simulations of the parameter #' values. LOO-CV and WAIC have various advantages over simpler estimates of #' predictive error such as AIC and DIC but are less used in practice because #' they involve additional computational steps. This package implements the #' fast and stable computations for approximate LOO-CV laid out in Vehtari, #' Gelman, and Gabry (2017). From existing posterior simulation draws, we #' compute LOO-CV using Pareto smoothed importance sampling (PSIS; Vehtari, #' Simpson, Gelman, Yao, and Gabry, 2024), a new procedure for stabilizing #' and diagnosing importance weights. As a byproduct of our calculations, #' we also obtain approximate standard errors for estimated predictive #' errors and for comparing of predictive errors between two models. #' #' We recommend PSIS-LOO-CV instead of WAIC, because PSIS provides useful #' diagnostics and effective sample size and Monte Carlo standard error #' estimates. #' #' #' @template loo-and-psis-references #' @template stacking-references #' @template loo-large-data-references #' #' @references #' Epifani, I., MacEachern, S. N., and Peruggia, M. (2008). Case-deletion #' importance sampling estimators: Central limit theorems and related results. #' *Electronic Journal of Statistics* **2**, 774-806. #' #' Gelfand, A. E. (1996). Model determination using sampling-based methods. In #' *Markov Chain Monte Carlo in Practice*, ed. W. R. Gilks, S. Richardson, #' D. J. Spiegelhalter, 145-162. London: Chapman and Hall. #' #' Gelfand, A. E., Dey, D. K., and Chang, H. (1992). Model determination using #' predictive distributions with implementation via sampling-based methods. In #' *Bayesian Statistics 4*, ed. J. M. Bernardo, J. O. Berger, A. P. Dawid, #' and A. F. M. Smith, 147-167. Oxford University Press. #' #' Gelman, A., Hwang, J., and Vehtari, A. (2014). Understanding predictive #' information criteria for Bayesian models. *Statistics and Computing* #' **24**, 997-1016. #' #' Ionides, E. L. (2008). Truncated importance sampling. *Journal of #' Computational and Graphical Statistics* **17**, 295-311. #' #' Koopman, S. J., Shephard, N., and Creal, D. (2009). Testing the assumptions #' behind importance sampling. *Journal of Econometrics* **149**, 2-11. #' #' Peruggia, M. (1997). On the variability of case-deletion importance sampling #' weights in the Bayesian linear model. *Journal of the American #' Statistical Association* **92**, 199-207. #' #' Stan Development Team (2017). The Stan C++ Library, Version 2.17.0. #' . #' #' Stan Development Team (2018). RStan: the R interface to Stan, Version 2.17.3. #' . #' #' Watanabe, S. (2010). Asymptotic equivalence of Bayes cross validation and #' widely application information criterion in singular learning theory. #' *Journal of Machine Learning Research* **11**, 3571-3594. #' #' Zhang, J., and Stephens, M. A. (2009). A new and efficient estimation method #' for the generalized Pareto distribution. *Technometrics* **51**, #' 316-325. #' NULL loo/R/extract_log_lik.R0000644000176200001440000000545313762013700014547 0ustar liggesusers#' Extract pointwise log-likelihood from a Stan model #' #' Convenience function for extracting the pointwise log-likelihood #' matrix or array from a `stanfit` object from the \pkg{rstan} package. #' Note: recent versions of \pkg{rstan} now include a `loo()` method for #' `stanfit` objects that handles this internally. #' #' @export #' @param stanfit A `stanfit` object (\pkg{rstan} package). #' @param parameter_name A character string naming the parameter (or generated #' quantity) in the Stan model corresponding to the log-likelihood. #' @param merge_chains If `TRUE` (the default), all Markov chains are #' merged together (i.e., stacked) and a matrix is returned. If `FALSE` #' they are kept separate and an array is returned. #' @return If `merge_chains=TRUE`, an \eqn{S} by \eqn{N} matrix of #' (post-warmup) extracted draws, where \eqn{S} is the size of the posterior #' sample and \eqn{N} is the number of data points. If #' `merge_chains=FALSE`, an \eqn{I} by \eqn{C} by \eqn{N} array, where #' \eqn{I \times C = S}{I * C = S}. #' #' #' @details Stan does not automatically compute and store the log-likelihood. It #' is up to the user to incorporate it into the Stan program if it is to be #' extracted after fitting the model. In a Stan model, the pointwise log #' likelihood can be coded as a vector in the transformed parameters block #' (and then summed up in the model block) or it can be coded entirely in the #' generated quantities block. We recommend using the generated quantities #' block so that the computations are carried out only once per iteration #' rather than once per HMC leapfrog step. #' #' For example, the following is the `generated quantities` block for #' computing and saving the log-likelihood for a linear regression model with #' `N` data points, outcome `y`, predictor matrix `X`, #' coefficients `beta`, and standard deviation `sigma`: #' #' `vector[N] log_lik;` #' #' `for (n in 1:N) log_lik[n] = normal_lpdf(y[n] | X[n, ] * beta, sigma);` #' #' @references #' Stan Development Team (2017). The Stan C++ Library, Version 2.16.0. #' #' #' Stan Development Team (2017). RStan: the R interface to Stan, Version 2.16.1. #' #' extract_log_lik <- function(stanfit, parameter_name = "log_lik", merge_chains = TRUE) { if (!inherits(stanfit, "stanfit")) stop("Not a stanfit object.", call. = FALSE) if (stanfit@mode != 0) stop("Stan model does not contain posterior draws.", call. = FALSE) if (!requireNamespace("rstan", quietly = TRUE)) stop("Please load the 'rstan' package.", call. = FALSE) if (merge_chains) { log_lik <- as.matrix(stanfit, pars = parameter_name) } else { log_lik <- as.array(stanfit, pars = parameter_name) } unname(log_lik) } loo/R/waic.R0000644000176200001440000001156214641333357012330 0ustar liggesusers#' Widely applicable information criterion (WAIC) #' #' The `waic()` methods can be used to compute WAIC from the pointwise #' log-likelihood. However, we recommend LOO-CV using PSIS (as implemented by #' the [loo()] function) because PSIS provides useful diagnostics as well as #' effective sample size and Monte Carlo estimates. #' #' @export waic waic.array waic.matrix waic.function #' @inheritParams loo #' #' @return A named list (of class `c("waic", "loo")`) with components: #' #' \describe{ #' \item{`estimates`}{ #' A matrix with two columns (`"Estimate"`, `"SE"`) and three #' rows (`"elpd_waic"`, `"p_waic"`, `"waic"`). This contains #' point estimates and standard errors of the expected log pointwise predictive #' density (`elpd_waic`), the effective number of parameters #' (`p_waic`) and the information criterion `waic` (which is just #' `-2 * elpd_waic`, i.e., converted to deviance scale). #' } #' \item{`pointwise`}{ #' A matrix with three columns (and number of rows equal to the number of #' observations) containing the pointwise contributions of each of the above #' measures (`elpd_waic`, `p_waic`, `waic`). #' } #' } #' #' @seealso #' * The __loo__ package [vignettes](https://mc-stan.org/loo/articles/) and #' Vehtari, Gelman, and Gabry (2017) and Vehtari, Simpson, Gelman, Yao, #' and Gabry (2024) for more details on why we prefer `loo()` to `waic()`. #' * [loo_compare()] for comparing models on approximate LOO-CV or WAIC. #' #' @references #' Watanabe, S. (2010). Asymptotic equivalence of Bayes cross validation and #' widely application information criterion in singular learning theory. #' *Journal of Machine Learning Research* **11**, 3571-3594. #' #' @template loo-and-psis-references #' #' @examples #' ### Array and matrix methods #' LLarr <- example_loglik_array() #' dim(LLarr) #' #' LLmat <- example_loglik_matrix() #' dim(LLmat) #' #' waic_arr <- waic(LLarr) #' waic_mat <- waic(LLmat) #' identical(waic_arr, waic_mat) #' #' #' \dontrun{ #' log_lik1 <- extract_log_lik(stanfit1) #' log_lik2 <- extract_log_lik(stanfit2) #' (waic1 <- waic(log_lik1)) #' (waic2 <- waic(log_lik2)) #' print(compare(waic1, waic2), digits = 2) #' } #' waic <- function(x, ...) { UseMethod("waic") } #' @export #' @templateVar fn waic #' @template array #' waic.array <- function(x, ...) { waic.matrix(llarray_to_matrix(x), ...) } #' @export #' @templateVar fn waic #' @template matrix #' waic.matrix <- function(x, ...) { ll <- validate_ll(x) lldim <- dim(ll) lpd <- matrixStats::colLogSumExps(ll) - log(nrow(ll)) # colLogMeanExps p_waic <- matrixStats::colVars(ll) elpd_waic <- lpd - p_waic waic <- -2 * elpd_waic pointwise <- cbind(elpd_waic, p_waic, waic) throw_pwaic_warnings(pointwise[, "p_waic"], digits = 1) return(waic_object(pointwise, dims = lldim)) } #' @export #' @templateVar fn waic #' @template function #' @param draws,data,... For the function method only. See the #' **Methods (by class)** section below for details on these arguments. #' waic.function <- function(x, ..., data = NULL, draws = NULL) { stopifnot(is.data.frame(data) || is.matrix(data), !is.null(draws)) .llfun <- validate_llfun(x) N <- dim(data)[1] S <- length(as.vector(.llfun(data_i = data[1,, drop=FALSE], draws = draws, ...))) waic_list <- lapply(seq_len(N), FUN = function(i) { ll_i <- .llfun(data_i = data[i,, drop=FALSE], draws = draws, ...) ll_i <- as.vector(ll_i) lpd_i <- logMeanExp(ll_i) p_waic_i <- var(ll_i) elpd_waic_i <- lpd_i - p_waic_i c(elpd_waic = elpd_waic_i, p_waic = p_waic_i) }) pointwise <- do.call(rbind, waic_list) pointwise <- cbind(pointwise, waic = -2 * pointwise[, "elpd_waic"]) throw_pwaic_warnings(pointwise[, "p_waic"], digits = 1) waic_object(pointwise, dims = c(S, N)) } #' @export dim.waic <- function(x) { attr(x, "dims") } #' @rdname waic #' @export is.waic <- function(x) { inherits(x, "waic") && is.loo(x) } # internal ---------------------------------------------------------------- # structure the object returned by the waic methods waic_object <- function(pointwise, dims) { estimates <- table_of_estimates(pointwise) out <- nlist(estimates, pointwise) # maintain backwards compatibility old_nms <- c("elpd_waic", "p_waic", "waic", "se_elpd_waic", "se_p_waic", "se_waic") out <- c(out, setNames(as.list(estimates), old_nms)) structure( out, dims = dims, class = c("waic", "loo") ) } # waic warnings # @param p 'p_waic' estimates throw_pwaic_warnings <- function(p, digits = 1, warn = TRUE) { badp <- p > 0.4 if (any(badp)) { count <- sum(badp) prop <- count / length(badp) msg <- paste0("\n", count, " (", .fr(100 * prop, digits), "%) p_waic estimates greater than 0.4. ", "We recommend trying loo instead.") if (warn) .warn(msg) else cat(msg, "\n") } invisible(NULL) } loo/R/helpers.R0000644000176200001440000001175214641333357013050 0ustar liggesusers#' Detect if OS is Windows #' @noRd os_is_windows <- function() { checkmate::test_os("windows") } #' More stable version of `log(mean(exp(x)))` #' #' @noRd #' @param x A numeric vector. #' @return A scalar equal to `log(mean(exp(x)))`. #' logMeanExp <- function(x) { logS <- log(length(x)) matrixStats::logSumExp(x) - logS } #' More stable version of `log(colMeans(exp(x)))` #' #' @noRd #' @param x A matrix. #' @return A vector where each element is `logMeanExp()` of a column of `x`. #' colLogMeanExps <- function(x) { logS <- log(nrow(x)) matrixStats::colLogSumExps(x) - logS } #' Compute point estimates and standard errors from pointwise vectors #' #' @noRd #' @param x A matrix. #' @return An `ncol(x)` by 2 matrix with columns `"Estimate"` and `"SE"` #' and rownames equal to `colnames(x)`. #' table_of_estimates <- function(x) { out <- cbind( Estimate = matrixStats::colSums2(x), SE = sqrt(nrow(x) * matrixStats::colVars(x)) ) rownames(out) <- colnames(x) return(out) } # validating and reshaping arrays/matrices ------------------------------- #' Check for `NA` and non-finite values in log-lik (or log-ratios) #' array/matrix/vector #' #' @noRd #' @param x Array/matrix/vector of log-likelihood or log-ratio values. #' @return `x`, invisibly, if no error is thrown. #' validate_ll <- function(x) { if (is.list(x)) { stop("List not allowed as input.") } else if (anyNA(x)) { stop("NAs not allowed in input.") } else if (any(x == Inf)) { stop("All input values must be finite or -Inf.") } invisible(x) } #' Convert iter by chain by obs array to (iter * chain) by obs matrix #' #' @noRd #' @param x Array to convert. #' @return An (iter * chain) by obs matrix. #' llarray_to_matrix <- function(x) { stopifnot(is.array(x), length(dim(x)) == 3) xdim <- dim(x) dim(x) <- c(prod(xdim[1:2]), xdim[3]) unname(x) } #' Convert (iter * chain) by obs matrix to iter by chain by obs array #' #' @noRd #' @param x matrix to convert. #' @param chain_id vector of chain ids. #' @return iter by chain by obs array #' llmatrix_to_array <- function(x, chain_id) { stopifnot(is.matrix(x), all(chain_id == as.integer(chain_id))) lldim <- dim(x) n_chain <- length(unique(chain_id)) chain_id <- as.integer(chain_id) chain_counts <- as.numeric(table(chain_id)) if (length(chain_id) != lldim[1]) { stop("Number of rows in matrix not equal to length(chain_id).", call. = FALSE) } else if (any(chain_counts != chain_counts[1])) { stop("Not all chains have same number of iterations.", call. = FALSE) } else if (max(chain_id) != n_chain) { stop("max(chain_id) not equal to the number of chains.", call. = FALSE) } n_iter <- lldim[1] / n_chain n_obs <- lldim[2] a <- array(data = NA, dim = c(n_iter, n_chain, n_obs)) for (c in seq_len(n_chain)) { a[, c, ] <- x[chain_id == c, , drop = FALSE] } return(a) } #' Validate that log-lik function exists and has correct arg names #' #' @noRd #' @param x A function with arguments `data_i` and `draws`. #' @return Either returns `x` or throws an error. #' validate_llfun <- function(x) { f <- match.fun(x) must_have <- c("data_i", "draws") arg_names <- names(formals(f)) if (!all(must_have %in% arg_names)) { stop( "Log-likelihood function must have at least the arguments ", "'data_i' and 'draws'", call. = FALSE ) } return(f) } #' Named lists #' #' Create a named list using specified names or, if names are omitted, using the #' names of the objects in the list. The code `list(a = a, b = b)` becomes #' `nlist(a,b)` and `list(a = a, b = 2)` becomes `nlist(a, b = 2)`, etc. #' #' @export #' @keywords internal #' @param ... Objects to include in the list. #' @return A named list. #' @examples #' #' # All variables already defined #' a <- rnorm(100) #' b <- mat.or.vec(10, 3) #' nlist(a,b) #' #' # Define some variables in the call and take the rest from the environment #' nlist(a, b, veggies = c("lettuce", "spinach"), fruits = c("banana", "papaya")) #' nlist <- function(...) { m <- match.call() out <- list(...) no_names <- is.null(names(out)) has_name <- if (no_names) FALSE else nzchar(names(out)) if (all(has_name)) return(out) nms <- as.character(m)[-1L] if (no_names) { names(out) <- nms } else { names(out)[!has_name] <- nms[!has_name] } return(out) } # Check how many cores to use and throw deprecation warning if loo.cores is used loo_cores <- function(cores) { loo_cores_op <- getOption("loo.cores", NA) if (!is.na(loo_cores_op) && (loo_cores_op != cores)) { cores <- loo_cores_op warning("'loo.cores' is deprecated, please use 'mc.cores' or pass 'cores' explicitly.", call. = FALSE) } return(cores) } # nocov start # release reminders (for devtools) release_questions <- function() { c( "Have you updated references?", "Have you updated inst/CITATION?", "Have you updated the vignettes?" ) } # nocov end is_constant <- function(x, tol = .Machine$double.eps) { abs(max(x) - min(x)) < tol } loo/R/sis.R0000644000176200001440000001134113701164066012171 0ustar liggesusers#' Standard importance sampling (SIS) #' #' Implementation of standard importance sampling (SIS). #' #' @param log_ratios An array, matrix, or vector of importance ratios on the log #' scale (for Importance sampling LOO, these are *negative* log-likelihood #' values). See the **Methods (by class)** section below for a detailed #' description of how to specify the inputs for each method. #' @template cores #' @param ... Arguments passed on to the various methods. #' @param r_eff Vector of relative effective sample size estimates containing #' one element per observation. The values provided should be the relative #' effective sample sizes of `1/exp(log_ratios)` (i.e., `1/ratios`). #' This is related to the relative efficiency of estimating the normalizing #' term in self-normalizing importance sampling. See the [relative_eff()] #' helper function for computing `r_eff`. If using `psis` with #' draws of the `log_ratios` not obtained from MCMC then the warning #' message thrown when not specifying `r_eff` can be disabled by #' setting `r_eff` to `NA`. #' #' @return The `sis()` methods return an object of class `"sis"`, #' which is a named list with the following components: #' #' \describe{ #' \item{`log_weights`}{ #' Vector or matrix of smoothed but *unnormalized* log #' weights. To get normalized weights use the #' [`weights()`][weights.importance_sampling] method provided for objects of #' class `sis`. #' } #' \item{`diagnostics`}{ #' A named list containing one vector: #' * `pareto_k`: Not used in `sis`, all set to 0. #' * `n_eff`: effective sample size estimates. #' } #' } #' #' Objects of class `"sis"` also have the following [attributes][attributes()]: #' \describe{ #' \item{`norm_const_log`}{ #' Vector of precomputed values of `colLogSumExps(log_weights)` that are #' used internally by the `weights` method to normalize the log weights. #' } #' \item{`r_eff`}{ #' If specified, the user's `r_eff` argument. #' } #' \item{`tail_len`}{ #' Not used for `sis`. #' } #' \item{`dims`}{ #' Integer vector of length 2 containing `S` (posterior sample size) #' and `N` (number of observations). #' } #' \item{`method`}{ #' Method used for importance sampling, here `sis`. #' } #' } #' #' @seealso #' * [psis()] for approximate LOO-CV using PSIS. #' * [loo()] for approximate LOO-CV. #' * [pareto-k-diagnostic] for PSIS diagnostics. #' #' @template loo-and-psis-references #' #' @examples #' log_ratios <- -1 * example_loglik_array() #' r_eff <- relative_eff(exp(-log_ratios)) #' sis_result <- sis(log_ratios, r_eff = r_eff) #' str(sis_result) #' #' # extract smoothed weights #' lw <- weights(sis_result) # default args are log=TRUE, normalize=TRUE #' ulw <- weights(sis_result, normalize=FALSE) # unnormalized log-weights #' #' w <- weights(sis_result, log=FALSE) # normalized weights (not log-weights) #' uw <- weights(sis_result, log=FALSE, normalize = FALSE) # unnormalized weights #' #' @export sis <- function(log_ratios, ...) UseMethod("sis") #' @export #' @templateVar fn sis #' @template array #' sis.array <- function(log_ratios, ..., r_eff = NULL, cores = getOption("mc.cores", 1)) { importance_sampling.array(log_ratios = log_ratios, ..., r_eff = r_eff, cores = cores, method = "sis") } #' @export #' @templateVar fn sis #' @template matrix #' sis.matrix <- function(log_ratios, ..., r_eff = NULL, cores = getOption("mc.cores", 1)) { importance_sampling.matrix(log_ratios, ..., r_eff = r_eff, cores = cores, method = "sis") } #' @export #' @templateVar fn sis #' @template vector #' sis.default <- function(log_ratios, ..., r_eff = NULL) { importance_sampling.default(log_ratios = log_ratios, ..., r_eff = r_eff, method = "sis") } #' @rdname psis #' @export is.sis <- function(x) { inherits(x, "sis") && is.list(x) } # internal ---------------------------------------------------------------- #' Standard IS on a single vector #' #' @noRd #' @param log_ratios_i A vector of log importance ratios (for `loo()`, negative #' log likelihoods). #' @param ... Not used. Included to conform to PSIS API. #' #' @details Implementation standard importance sampling. #' @return A named list containing: #' * `lw`: vector of unnormalized log weights #' * `pareto_k`: scalar Pareto k estimate. For IS, this defaults to 0. do_sis_i <- function(log_ratios_i, ...) { S <- length(log_ratios_i) list(log_weights = log_ratios_i, pareto_k = 0) } loo/R/gpdfit.R0000644000176200001440000000624514641333357012664 0ustar liggesusers#' Estimate parameters of the Generalized Pareto distribution #' #' Given a sample \eqn{x}, Estimate the parameters \eqn{k} and \eqn{\sigma} of #' the generalized Pareto distribution (GPD), assuming the location parameter is #' 0. By default the fit uses a prior for \eqn{k}, which will stabilize #' estimates for very small sample sizes (and low effective sample sizes in the #' case of MCMC samples). The weakly informative prior is a Gaussian prior #' centered at 0.5. #' #' @export #' @param x A numeric vector. The sample from which to estimate the parameters. #' @param wip Logical indicating whether to adjust \eqn{k} based on a weakly #' informative Gaussian prior centered on 0.5. Defaults to `TRUE`. #' @param min_grid_pts The minimum number of grid points used in the fitting #' algorithm. The actual number used is `min_grid_pts + floor(sqrt(length(x)))`. #' @param sort_x If `TRUE` (the default), the first step in the fitting #' algorithm is to sort the elements of `x`. If `x` is already #' sorted in ascending order then `sort_x` can be set to `FALSE` to #' skip the initial sorting step. #' @return A named list with components `k` and `sigma`. #' #' @details Here the parameter \eqn{k} is the negative of \eqn{k} in Zhang & #' Stephens (2009). #' #' @seealso [psis()], [pareto-k-diagnostic] #' #' @references #' Zhang, J., and Stephens, M. A. (2009). A new and efficient estimation method #' for the generalized Pareto distribution. *Technometrics* **51**, 316-325. #' gpdfit <- function(x, wip = TRUE, min_grid_pts = 30, sort_x = TRUE) { # See section 4 of Zhang and Stephens (2009) if (sort_x) { x <- sort.int(x) } N <- length(x) prior <- 3 M <- min_grid_pts + floor(sqrt(N)) jj <- seq_len(M) xstar <- x[floor(N / 4 + 0.5)] # first quartile of sample theta <- 1 / x[N] + (1 - sqrt(M / (jj - 0.5))) / prior / xstar l_theta <- N * lx(theta, x) # profile log-lik w_theta <- exp(l_theta - matrixStats::logSumExp(l_theta)) # normalize theta_hat <- sum(theta * w_theta) k <- mean.default(log1p(-theta_hat * x)) sigma <- -k / theta_hat if (wip) { k <- adjust_k_wip(k, n = N) } if (is.na(k)) { k <- Inf } nlist(k, sigma) } # internal ---------------------------------------------------------------- lx <- function(a,x) { a <- -a k <- vapply(a, FUN = function(a_i) mean(log1p(a_i * x)), FUN.VALUE = numeric(1)) log(a / k) - k - 1 } #' Adjust k based on weakly informative prior, Gaussian centered on 0.5. This #' will stabilize estimates for very small Monte Carlo sample sizes and low neff #' cases. #' #' @noRd #' @param k Scalar khat estimate. #' @param n Integer number of tail samples used to fit GPD. #' @return Scalar adjusted khat estimate. #' adjust_k_wip <- function(k, n) { a <- 10 n_plus_a <- n + a k * n / n_plus_a + a * 0.5 / n_plus_a } #' Inverse CDF of generalized Pareto distribution #' (assuming location parameter is 0) #' #' @noRd #' @param p Vector of probabilities. #' @param k Scalar shape parameter. #' @param sigma Scalar scale parameter. #' @return Vector of quantiles. #' qgpd <- function(p, k, sigma) { if (is.nan(sigma) || sigma <= 0) { return(rep(NaN, length(p))) } sigma * expm1(-k * log1p(-p)) / k } loo/R/example_log_lik_array.R0000644000176200001440000000211013575772017015730 0ustar liggesusers#' Objects to use in examples and tests #' #' Example pointwise log-likelihood objects to use in demonstrations and tests. #' See the **Value** and **Examples** sections below. #' #' @export #' @return #' `example_loglik_array()` returns a 500 (draws) x 2 (chains) x 32 #' (observations) pointwise log-likelihood array. #' #' `example_loglik_matrix()` returns the same pointwise log-likelihood values #' as `example_loglik_array()` but reshaped into a 1000 (draws*chains) x 32 #' (observations) matrix. #' #' @examples #' LLarr <- example_loglik_array() #' (dim_arr <- dim(LLarr)) #' LLmat <- example_loglik_matrix() #' (dim_mat <- dim(LLmat)) #' #' all.equal(dim_mat[1], dim_arr[1] * dim_arr[2]) #' all.equal(dim_mat[2], dim_arr[3]) #' #' all.equal(LLarr[, 1, ], LLmat[1:500, ]) #' all.equal(LLarr[, 2, ], LLmat[501:1000, ]) #' example_loglik_array <- function() { # .example_loglik_array exists in R/sysdata.R return(.example_loglik_array) } #' @rdname example_loglik_array #' @export example_loglik_matrix <- function() { ll <- example_loglik_array() return(llarray_to_matrix(ll)) } loo/R/elpd.R0000644000176200001440000000375414566461605012341 0ustar liggesusers#' Generic (expected) log-predictive density #' #' The `elpd()` methods for arrays and matrices can compute the expected log #' pointwise predictive density for a new dataset or the log pointwise #' predictive density of the observed data (an overestimate of the elpd). #' #' @export #' @param x A log-likelihood array or matrix. The **Methods (by class)** #' section, below, has detailed descriptions of how to specify the inputs for #' each method. #' @param ... Currently ignored. #' #' @details The `elpd()` function is an S3 generic and methods are provided for #' 3-D pointwise log-likelihood arrays and matrices. #' #' @seealso The vignette *Holdout validation and K-fold cross-validation of Stan #' programs with the loo package* for demonstrations of using the `elpd()` #' methods. #' #' @examples #' # Calculate the lpd of the observed data #' LLarr <- example_loglik_array() #' elpd(LLarr) #' elpd <- function(x, ...) { UseMethod("elpd") } #' @export #' @templateVar fn elpd #' @template array #' elpd.array <- function(x, ...) { ll <- llarray_to_matrix(x) elpd.matrix(ll) } #' @export #' @templateVar fn elpd #' @template matrix #' elpd.matrix <- function(x, ...) { pointwise <- pointwise_elpd_calcs(x) elpd_object(pointwise, dim(x)) } # internal ---------------------------------------------------------------- pointwise_elpd_calcs <- function(ll){ elpd <- colLogSumExps(ll) - log(nrow(ll)) ic <- -2 * elpd cbind(elpd, ic) } elpd_object <- function(pointwise, dims) { if (!is.matrix(pointwise)) stop("Internal error ('pointwise' must be a matrix)") cols_to_summarize <- colnames(pointwise) estimates <- table_of_estimates(pointwise[, cols_to_summarize, drop=FALSE]) out <- nlist(estimates, pointwise) structure( out, dims = dims, class = c("elpd_generic", "loo") ) } #' @export print_dims.elpd_generic <- function(x, ...) { cat( "Computed from", paste(dim(x), collapse = " by "), "log-likelihood matrix using the generic elpd function\n" ) } loo/R/loo_compare.R0000644000176200001440000002763415100712211013670 0ustar liggesusers#' Model comparison #' #' @description Compare fitted models based on [ELPD][loo-glossary]. #' #' By default the print method shows only the most important information. Use #' `print(..., simplify=FALSE)` to print a more detailed summary. #' #' @export #' @param x An object of class `"loo"` or a list of such objects. If a list is #' used then the list names will be used as the model names in the output. See #' **Examples**. #' @param ... Additional objects of class `"loo"`, if not passed in as a single #' list. #' #' @return A matrix with class `"compare.loo"` that has its own #' print method. See the **Details** section. #' #' @details #' When comparing two fitted models, we can estimate the difference in their #' expected predictive accuracy by the difference in #' [`elpd_loo`][loo-glossary] or `elpd_waic` (or multiplied by \eqn{-2}, if #' desired, to be on the deviance scale). #' #' When using `loo_compare()`, the returned matrix will have one row per model #' and several columns of estimates. The values in the #' [`elpd_diff`][loo-glossary] and [`se_diff`][loo-glossary] columns of the #' returned matrix are computed by making pairwise comparisons between each #' model and the model with the largest ELPD (the model in the first row). For #' this reason the `elpd_diff` column will always have the value `0` in the #' first row (i.e., the difference between the preferred model and itself) and #' negative values in subsequent rows for the remaining models. #' #' To compute the standard error of the difference in [ELPD][loo-glossary] --- #' which should not be expected to equal the difference of the standard errors #' --- we use a paired estimate to take advantage of the fact that the same #' set of \eqn{N} data points was used to fit both models. These calculations #' should be most useful when \eqn{N} is large, because then non-normality of #' the distribution is not such an issue when estimating the uncertainty in #' these sums. These standard errors, for all their flaws, should give a #' better sense of uncertainty than what is obtained using the current #' standard approach of comparing differences of deviances to a Chi-squared #' distribution, a practice derived for Gaussian linear models or #' asymptotically, and which only applies to nested models in any case. #' Sivula et al. (2022) discuss the conditions when the normal #' approximation used for SE and `se_diff` is good. #' #' If more than \eqn{11} models are compared, we internally recompute the model #' differences using the median model by ELPD as the baseline model. We then #' estimate whether the differences in predictive performance are potentially #' due to chance as described by McLatchie and Vehtari (2023). This will flag #' a warning if it is deemed that there is a risk of over-fitting due to the #' selection process. In that case users are recommended to avoid model #' selection based on LOO-CV, and instead to favor model averaging/stacking or #' projection predictive inference. #' #' @seealso #' * The [FAQ page](https://mc-stan.org/loo/articles/online-only/faq.html) on #' the __loo__ website for answers to frequently asked questions. #' @template loo-and-compare-references #' #' @examples #' # very artificial example, just for demonstration! #' LL <- example_loglik_array() #' loo1 <- loo(LL) # should be worst model when compared #' loo2 <- loo(LL + 1) # should be second best model when compared #' loo3 <- loo(LL + 2) # should be best model when compared #' #' comp <- loo_compare(loo1, loo2, loo3) #' print(comp, digits = 2) #' #' # show more details with simplify=FALSE #' # (will be the same for all models in this artificial example) #' print(comp, simplify = FALSE, digits = 3) #' #' # can use a list of objects with custom names #' # will use apple, banana, and cherry, as the names in the output #' loo_compare(list("apple" = loo1, "banana" = loo2, "cherry" = loo3)) #' #' \dontrun{ #' # works for waic (and kfold) too #' loo_compare(waic(LL), waic(LL - 10)) #' } #' loo_compare <- function(x, ...) { UseMethod("loo_compare") } #' @rdname loo_compare #' @export loo_compare.default <- function(x, ...) { if (is.loo(x)) { dots <- list(...) loos <- c(list(x), dots) } else { if (!is.list(x) || !length(x)) { stop("'x' must be a list if not a 'loo' object.") } if (length(list(...))) { stop("If 'x' is a list then '...' should not be specified.") } loos <- x } # If subsampling is used if (any(sapply(loos, inherits, "psis_loo_ss"))) { return(loo_compare.psis_loo_ss_list(loos)) } loo_compare_checks(loos) comp <- loo_compare_matrix(loos) ord <- loo_compare_order(loos) # compute elpd_diff and se_elpd_diff relative to best model rnms <- rownames(comp) diffs <- mapply(FUN = elpd_diffs, loos[ord[1]], loos[ord]) elpd_diff <- apply(diffs, 2, sum) se_diff <- apply(diffs, 2, se_elpd_diff) comp <- cbind(elpd_diff = elpd_diff, se_diff = se_diff, comp) rownames(comp) <- rnms # run order statistics-based checks on models loo_order_stat_check(loos, ord) class(comp) <- c("compare.loo", class(comp)) return(comp) } #' @rdname loo_compare #' @export #' @param digits For the print method only, the number of digits to use when #' printing. #' @param simplify For the print method only, should only the essential columns #' of the summary matrix be printed? The entire matrix is always returned, but #' by default only the most important columns are printed. print.compare.loo <- function(x, ..., digits = 1, simplify = TRUE) { xcopy <- x if (inherits(xcopy, "old_compare.loo")) { if (NCOL(xcopy) >= 2 && simplify) { patts <- "^elpd_|^se_diff|^p_|^waic$|^looic$" xcopy <- xcopy[, grepl(patts, colnames(xcopy))] } } else if (NCOL(xcopy) >= 2 && simplify) { xcopy <- xcopy[, c("elpd_diff", "se_diff")] } print(.fr(xcopy, digits), quote = FALSE) invisible(x) } # internal ---------------------------------------------------------------- #' Compute pointwise elpd differences #' @noRd #' @param loo_a,loo_b Two `"loo"` objects. elpd_diffs <- function(loo_a, loo_b) { pt_a <- loo_a$pointwise pt_b <- loo_b$pointwise elpd <- grep("^elpd", colnames(pt_a)) pt_b[, elpd] - pt_a[, elpd] } #' Compute standard error of the elpd difference #' @noRd #' @param diffs Vector of pointwise elpd differences se_elpd_diff <- function(diffs) { N <- length(diffs) # As `elpd_diff` is defined as the sum of N independent components, # we can compute the standard error by using the standard deviation # of the N components and multiplying by `sqrt(N)`. sqrt(N) * sd(diffs) } #' Perform checks on `"loo"` objects before comparison #' @noRd #' @param loos List of `"loo"` objects. #' @return Nothing, just possibly throws errors/warnings. loo_compare_checks <- function(loos) { ## errors if (length(loos) <= 1L) { stop("'loo_compare' requires at least two models.", call.=FALSE) } if (!all(sapply(loos, is.loo))) { stop("All inputs should have class 'loo'.", call.=FALSE) } Ns <- vapply(loos, function(x) nrow(x$pointwise), integer(1)) if (any(Ns != Ns[1L])) { stop( paste0( "All models must have the same number of observations, but models have inconsistent observation counts: ", paste(paste0("'", find_model_names(loos), "' (", Ns, ")"), collapse = ", ") ), call. = FALSE ) } ## warnings yhash <- lapply(loos, attr, which = "yhash") yhash_ok <- sapply(yhash, function(x) { # ok only if all yhash are same (all NULL is ok) isTRUE(all.equal(x, yhash[[1]])) }) if (!all(yhash_ok)) { warning("Not all models have the same y variable. ('yhash' attributes do not match)", call. = FALSE) } if (all(sapply(loos, is.kfold))) { Ks <- unlist(lapply(loos, attr, which = "K")) if (!all(Ks == Ks[1])) { warning("Not all kfold objects have the same K value. ", "For a more accurate comparison use the same number of folds. ", call. = FALSE) } } else if (any(sapply(loos, is.kfold)) && any(sapply(loos, is.psis_loo))) { warning("Comparing LOO-CV to K-fold-CV. ", "For a more accurate comparison use the same number of folds ", "or loo for all models compared.", call. = FALSE) } } #' Find the model names associated with `"loo"` objects #' #' @export #' @keywords internal #' @param x List of `"loo"` objects. #' @return Character vector of model names the same length as `x.` #' find_model_names <- function(x) { stopifnot(is.list(x)) out_names <- character(length(x)) names1 <- names(x) names2 <- lapply(x, "attr", "model_name", exact = TRUE) names3 <- lapply(x, "[[", "model_name") names4 <- paste0("model", seq_along(x)) for (j in seq_along(x)) { if (isTRUE(nzchar(names1[j]))) { out_names[j] <- names1[j] } else if (length(names2[[j]])) { out_names[j] <- names2[[j]] } else if (length(names3[[j]])) { out_names[j] <- names3[[j]] } else { out_names[j] <- names4[j] } } out_names } #' Compute the loo_compare matrix #' @keywords internal #' @noRd #' @param loos List of `"loo"` objects. loo_compare_matrix <- function(loos){ tmp <- sapply(loos, function(x) { est <- x$estimates setNames(c(est), nm = c(rownames(est), paste0("se_", rownames(est)))) }) colnames(tmp) <- find_model_names(loos) rnms <- rownames(tmp) comp <- tmp ord <- loo_compare_order(loos) comp <- t(comp)[ord, ] patts <- c("elpd", "p_", "^waic$|^looic$", "^se_waic$|^se_looic$") col_ord <- unlist(sapply(patts, function(p) grep(p, colnames(comp))), use.names = FALSE) comp <- comp[, col_ord] comp } #' Computes the order of loos for comparison #' @noRd #' @keywords internal #' @param loos List of `"loo"` objects. loo_compare_order <- function(loos){ tmp <- sapply(loos, function(x) { est <- x$estimates setNames(c(est), nm = c(rownames(est), paste0("se_", rownames(est)))) }) colnames(tmp) <- find_model_names(loos) rnms <- rownames(tmp) ord <- order(tmp[grep("^elpd", rnms), ], decreasing = TRUE) ord } #' Perform checks on `"loo"` objects __after__ comparison #' @noRd #' @keywords internal #' @param loos List of `"loo"` objects. #' @param ord List of `"loo"` object orderings. #' @return Nothing, just possibly throws errors/warnings. loo_order_stat_check <- function(loos, ord) { ## breaks if (length(loos) <= 11L) { # procedure cannot be diagnosed for fewer than ten candidate models # (total models = worst model + ten candidates) # break from function return(NULL) } ## warnings # compute the elpd differences from the median model baseline_idx <- middle_idx(ord) diffs <- mapply(FUN = elpd_diffs, loos[ord[baseline_idx]], loos[ord]) elpd_diff <- apply(diffs, 2, sum) # estimate the standard deviation of the upper-half-normal diff_median <- stats::median(elpd_diff) elpd_diff_trunc <- elpd_diff[elpd_diff >= diff_median] n_models <- sum(!is.na(elpd_diff_trunc)) candidate_sd <- sqrt(1 / n_models * sum(elpd_diff_trunc^2, na.rm = TRUE)) # estimate expected best diff under null hypothesis K <- length(loos) - 1 order_stat <- order_stat_heuristic(K, candidate_sd) if (max(elpd_diff) <= order_stat) { # flag warning if we suspect no model is theoretically better than the baseline warning("Difference in performance potentially due to chance.", "See McLatchie and Vehtari (2023) for details.", call. = FALSE) } } #' Returns the middle index of a vector #' @noRd #' @keywords internal #' @param vec A vector. #' @return Integer index value. middle_idx <- function(vec) floor(length(vec) / 2) #' Computes maximum order statistic from K Gaussians #' @noRd #' @keywords internal #' @param K Number of Gaussians. #' @param c Scaling of the order statistic. #' @return Numeric expected maximum from K samples from a Gaussian with mean #' zero and scale `"c"` order_stat_heuristic <- function(K, c) { qnorm(p = 1 - 1 / (K * 2), mean = 0, sd = c) } loo/R/zzz.R0000644000176200001440000000122213575772017012237 0ustar liggesusers.onAttach <- function(...) { ver <- utils::packageVersion("loo") packageStartupMessage("This is loo version ", ver) packageStartupMessage( "- Online documentation and vignettes at mc-stan.org/loo" ) packageStartupMessage( "- As of v2.0.0 loo defaults to 1 core ", "but we recommend using as many as possible. ", "Use the 'cores' argument or set options(mc.cores = NUM_CORES) ", "for an entire session. " ) if (os_is_windows()) { packageStartupMessage( "- Windows 10 users: loo may be very slow if 'mc.cores' ", "is set in your .Rprofile file (see https://github.com/stan-dev/loo/issues/94)." ) } } loo/R/loo_subsample.R0000644000176200001440000014404615100205060014231 0ustar liggesusers#' Efficient approximate leave-one-out cross-validation (LOO) using subsampling, #' so that less costly and more approximate computation is made for all LOO-fold, #' and more costly and accurate computations are made only for m 1) { r_eff <- r_eff[idxs$idx] } # Compute elpd_loo if (!is.null(log_p) && !is.null(log_g)) { loo_obj <- loo_approximate_posterior.function( x = .llfun, data = data_subsample, draws = draws, log_p = log_p, log_g = log_g, save_psis = save_psis, cores = cores ) } else { loo_obj <- loo.function( x = .llfun, data = data_subsample, draws = draws, r_eff = r_eff, save_psis = save_psis, cores = cores ) } # Construct ss object and estimate loo_ss <- psis_loo_ss_object(x = loo_obj, idxs = idxs, elpd_loo_approx = elpd_loo_approx, loo_approximation = loo_approximation, loo_approximation_draws = loo_approximation_draws, estimator = estimator, .llfun = .llfun, .llgrad = .llgrad, .llhess = .llhess, data_dim = dim(data), ndraws = .ndraws(draws)) loo_ss } #' Update `psis_loo_ss` objects #' #' @details #' If `observations` is updated then if a vector of indices or a `psis_loo_ss` #' object is supplied the updated object will have exactly the observations #' indicated by the vector or `psis_loo_ss` object. If a single integer is #' supplied, new observations will be sampled to reach the supplied sample size. #' #' @export #' @inheritParams loo_subsample.function #' @param data,draws See [loo_subsample.function()]. #' @param object A `psis_loo_ss` object to update. #' @param ... Currently not used. #' @return A `psis_loo_ss` object. #' @importFrom stats update update.psis_loo_ss <- function(object, ..., data = NULL, draws = NULL, observations = NULL, r_eff = 1, cores = getOption("mc.cores", 1), loo_approximation = NULL, loo_approximation_draws = NULL, llgrad = NULL, llhess = NULL) { # Fallback if (is.null(observations) & is.null(loo_approximation) & is.null(loo_approximation_draws) & is.null(llgrad) & is.null(llhess)) return(object) if (!is.null(data)) { stopifnot(is.data.frame(data) || is.matrix(data)) checkmate::assert_true(all(dim(data) == object$loo_subsampling$data_dim)) } if (!is.null(draws)) { # No current checks } cores <- loo_cores(cores) # Update elpd approximations if (!is.null(loo_approximation) | !is.null(loo_approximation_draws)) { stopifnot(is.data.frame(data) || is.matrix(data) & !is.null(draws)) if (object$loo_subsampling$estimator %in% "hh_pps") { # HH estimation uses elpd_loo approx to sample, # so updating it will lead to incorrect results stop("Can not update loo_approximation when using PPS sampling.", call. = FALSE) } if (is.null(loo_approximation)) loo_approximation <- object$loo_subsampling$loo_approximation if (is.null(loo_approximation_draws)) loo_approximation_draws <- object$loo_subsampling$loo_approximation_draws if (is.null(llgrad)) .llgrad <- object$loo_subsampling$.llgrad else .llgrad <- validate_llfun(llgrad) if (is.null(llhess)) .llhess <- object$loo_subsampling$.llhess else .llhess <- validate_llfun(llhess) # Compute loo approximation elpd_loo_approx <- elpd_loo_approximation(.llfun = object$loo_subsampling$.llfun, data = data, draws = draws, cores = cores, loo_approximation = loo_approximation, loo_approximation_draws = loo_approximation_draws, .llgrad = .llgrad, .llhess = .llhess) # Update object object$loo_subsampling$elpd_loo_approx <- elpd_loo_approx object$loo_subsampling$loo_approximation <- loo_approximation object$loo_subsampling["loo_approximation_draws"] <- list(loo_approximation_draws) object$loo_subsampling$.llgrad <- .llgrad object$loo_subsampling$.llhess <- .llhess object$pointwise[, "elpd_loo_approx"] <- object$loo_subsampling$elpd_loo_approx[object$pointwise[, "idx"]] } # Update observations if (!is.null(observations)) { observations <- assert_observations(observations, N = object$loo_subsampling$data_dim[1], object$loo_subsampling$estimator) if (length(observations) == 1) { checkmate::assert_int(observations, lower = nobs(object) + 1) stopifnot(is.data.frame(data) || is.matrix(data) & !is.null(draws)) } # Compute subsample indices if (length(observations) > 1) { idxs <- compute_idxs(observations) } else { current_obs <- nobs(object) # If sampling with replacement if (object$loo_subsampling$estimator %in% c("hh_pps")) { idxs <- subsample_idxs(estimator = object$loo_subsampling$estimator, elpd_loo_approximation = object$loo_subsampling$elpd_loo_approx, observations = observations - current_obs) } # If sampling without replacement if (object$loo_subsampling$estimator %in% c("diff_srs", "srs")) { current_idxs <- obs_idx(object, rep = FALSE) new_idx <- (1:length(object$loo_subsampling$elpd_loo_approx))[-current_idxs] idxs <- subsample_idxs(estimator = object$loo_subsampling$estimator, elpd_loo_approximation = object$loo_subsampling$elpd_loo_approx[-current_idxs], observations = observations - current_obs) idxs$idx <- new_idx[idxs$idx] } } # Identify how to update object cidxs <- compare_idxs(idxs, object) # Compute new observations if (!is.null(cidxs$new)) { stopifnot(is.data.frame(data) || is.matrix(data) & !is.null(draws)) data_new_subsample <- data[cidxs$new$idx,, drop = FALSE] if (length(r_eff) > 1) r_eff <- r_eff[cidxs$new$idx] if (!is.null(object$approximate_posterior$log_p) & !is.null(object$approximate_posterior$log_g)) { loo_obj <- loo_approximate_posterior.function(x = object$loo_subsampling$.llfun, data = data_new_subsample, draws = draws, log_p = object$approximate_posterior$log_p, log_g = object$approximate_posterior$log_g, save_psis = !is.null(object$psis_object), cores = cores) } else { loo_obj <- loo.function(x = object$loo_subsampling$.llfun, data = data_new_subsample, draws = draws, r_eff = r_eff, save_psis = !is.null(object$psis_object), cores = cores) } # Add stuff to pointwise loo_obj$pointwise <- add_subsampling_vars_to_pointwise(loo_obj$pointwise, cidxs$new, object$loo_subsampling$elpd_loo_approx) } else { loo_obj <- NULL } if (length(observations) == 1) { # Add new samples pointwise and diagnostic object <- rbind_psis_loo_ss(object, x = loo_obj) # Update m_i for current pointwise (diagnostic stay the same) object$pointwise <- update_m_i_in_pointwise(object$pointwise, cidxs$add, type = "add") } else { # Add new samples pointwise and diagnostic object <- rbind_psis_loo_ss(object, loo_obj) # Replace m_i current pointwise and diagnostics object$pointwise <- update_m_i_in_pointwise(object$pointwise, cidxs$add, type = "replace") # Remove samples object <- remove_idx.psis_loo_ss(object, idxs = cidxs$remove) stopifnot(setequal(obs_idx(object), observations)) # Order object as in observations object <- order.psis_loo_ss(object, observations) } } # Compute estimates if (object$loo_subsampling$estimator == "hh_pps") { object <- loo_subsample_estimation_hh(object) } else if (object$loo_subsampling$estimator == "diff_srs") { object <- loo_subsample_estimation_diff_srs(object) } else if (object$loo_subsampling$estimator == "srs") { object <- loo_subsample_estimation_srs(object) } else { stop("No correct estimator used.") } assert_psis_loo_ss(object) object } #' Get observation indices used in subsampling #' #' @param x A `psis_loo_ss` object. #' @param rep If sampling with replacement is used, an observation can have #' multiple samples and these are then repeated in the returned object if #' `rep=TRUE` (e.g., a vector `c(1,1,2)` indicates that observation 1 has been #' subampled two times). If `rep=FALSE` only the unique indices are returned. #' #' @return An integer vector. #' #' @export obs_idx <- function(x, rep = TRUE) { checkmate::assert_class(x, "psis_loo_ss") if (rep) { idxs <- as.integer(rep(x$pointwise[,"idx"], x$pointwise[,"m_i"])) } else { idxs <- as.integer(x$pointwise[,"idx"]) } idxs } #' The number of observations in a `psis_loo_ss` object. #' @importFrom stats nobs #' @param object a `psis_loo_ss` object. #' @param ... Currently unused. #' @export nobs.psis_loo_ss <- function(object, ...) { as.integer(sum(object$pointwise[,"m_i"])) } # internal ---------------------------------------------------------------- #' The possible choices of loo_approximations implemented #' #' @details #' The choice `psis` is returned if a `psis_loo` object #' is converted to a `psis_loo_ss` object with `as.psis_loo_ss()`. #' But `psis` cannot be chosen in the API of `loo_subsample()`. #' #' @noRd #' @param api The choices available in the loo API or all possible choices. #' @return A character vector of allowed choices. loo_approximation_choices <- function(api = TRUE) { lac <- c("plpd", "lpd", "waic", "waic_grad_marginal", "waic_grad", "waic_hess", "tis", "sis", "none") if (!api) lac <- c(lac, "psis") lac } #' The estimators implemented #' #' @noRd #' @return A character vector of allowed choices. estimator_choices <- function() { c("hh_pps", "diff_srs", "srs") } ## Approximate elpd ----- #' Utility function to apply user-specified log-likelihood to a single data point #' @details #' See `elpd_loo_approximation` and `compute_lpds` for usage examples #' @noRd #' #' @return lpd value for a single data point i lpd_i <- function(i, llfun, data, draws) { ll_i <- llfun(data_i = data[i,, drop=FALSE], draws = draws) ll_i <- as.vector(ll_i) lpd_i <- logMeanExp(ll_i) lpd_i } #' Utility function to compute lpd using user-defined likelihood function #' using platform-dependent parallel backends when cores > 1 #' #' @details #' See `elpd_loo_approximation` for usage examples #' #' @noRd #' @return a vector of computed log probability densities compute_lpds <- function(N, data, draws, llfun, cores) { if (cores == 1) { lpds <- lapply(X = seq_len(N), FUN = lpd_i, llfun, data, draws) } else { if (.Platform$OS.type != "windows") { lpds <- mclapply(X = seq_len(N), mc.cores = cores, FUN = lpd_i, llfun, data, draws) } else { cl <- makePSOCKcluster(cores) on.exit(stopCluster(cl)) lpds <- parLapply(cl, X = seq_len(N), fun = lpd_i, llfun, data, draws) } } unlist(lpds) } #' Compute approximation to loo_i:s #' #' @details #' See [loo_subsample.function()] and the `loo_approximation` argument. #' @noRd #' @inheritParams loo_subsample.function #' #' @return a vector with approximations of elpd_{loo,i}s elpd_loo_approximation <- function(.llfun, data, draws, cores, loo_approximation, loo_approximation_draws = NULL, .llgrad = NULL, .llhess = NULL) { checkmate::assert_function(.llfun, args = c("data_i", "draws"), ordered = TRUE) stopifnot(is.data.frame(data) || is.matrix(data), !is.null(draws)) checkmate::assert_choice(loo_approximation, choices = loo_approximation_choices(), null.ok = FALSE) checkmate::assert_int(loo_approximation_draws, lower = 2, null.ok = TRUE) if (!is.null(.llgrad)) { checkmate::assert_function(.llgrad, args = c("data_i", "draws"), ordered = TRUE) } if (!is.null(.llhess)) { checkmate::assert_function(.llhess, args = c("data_i", "draws"), ordered = TRUE) } cores <- loo_cores(cores) N <- dim(data)[1] if (loo_approximation == "none") return(rep(1L,N)) if (loo_approximation %in% c("tis", "sis")) { draws <- .thin_draws(draws, loo_approximation_draws) is_values <- suppressWarnings(loo.function(.llfun, data = data, draws = draws, is_method = loo_approximation)) return(is_values$pointwise[, "elpd_loo"]) } if (loo_approximation == "waic") { draws <- .thin_draws(draws, loo_approximation_draws) waic_full_obj <- waic.function(.llfun, data = data, draws = draws) return(waic_full_obj$pointwise[,"elpd_waic"]) } # Compute the lpd or log p(y_i|y_{-i}) if (loo_approximation == "lpd") { draws <- .thin_draws(draws, loo_approximation_draws) lpds <- compute_lpds(N, data, draws, .llfun, cores) return(lpds) # Use only the lpd } # Compute the point lpd or log p(y_i|\hat{\theta}) - also used in waic_delta approaches if (loo_approximation == "plpd" | loo_approximation == "waic_grad" | loo_approximation == "waic_grad_marginal" | loo_approximation == "waic_hess") { draws <- .thin_draws(draws, loo_approximation_draws) point_est <- .compute_point_estimate(draws) lpds <- compute_lpds(N, data, point_est, .llfun, cores) if (loo_approximation == "plpd") return(lpds) # Use only the lpd } if (loo_approximation == "waic_grad" | loo_approximation == "waic_grad_marginal" | loo_approximation == "waic_hess") { checkmate::assert_true(!is.null(.llgrad)) point_est <- .compute_point_estimate(draws) # Compute the lpds lpds <- compute_lpds(N, data, point_est, .llfun, cores) if (loo_approximation == "waic_grad" | loo_approximation == "waic_hess") { cov_est <- stats::cov(draws) } if (loo_approximation == "waic_grad_marginal") { marg_vars <- apply(draws, MARGIN = 2, var) } p_eff_approx <- numeric(N) if (cores>1) warning("Multicore is not implemented for waic_delta", call. = FALSE) if (loo_approximation == "waic_grad") { for(i in 1:nrow(data)) { grad_i <- t(.llgrad(data[i,,drop = FALSE], point_est)) local_cov <- cov_est[rownames(grad_i), rownames(grad_i)] p_eff_approx[i] <- t(grad_i) %*% local_cov %*% grad_i } } else if (loo_approximation == "waic_grad_marginal") { for(i in 1:nrow(data)) { grad_i <- t(.llgrad(data[i,,drop = FALSE], point_est)) p_eff_approx[i] <- sum(grad_i * marg_vars[rownames(grad_i)] * grad_i) } } else if (loo_approximation == "waic_hess") { checkmate::assert_true(!is.null(.llhess)) for(i in 1:nrow(data)) { grad_i <- t(.llgrad(data[i,,drop = FALSE], point_est)) hess_i <- .llhess(data_i = data[i,,drop = FALSE], draws = point_est[,rownames(grad_i), drop = FALSE])[,,1] local_cov <- cov_est[rownames(grad_i), rownames(grad_i)] p_eff_approx[i] <- t(grad_i) %*% local_cov %*% grad_i + 0.5 * sum(diag(local_cov %*% hess_i %*% local_cov %*% hess_i)) } } else { stop(loo_approximation, " is not implemented!", call. = FALSE) } return(lpds - p_eff_approx) } } #' Compute a point estimate from a draws object #' #' @keywords internal #' @export #' @details This is a generic function to compute point estimates from draws #' objects. The function is internal and should only be used by developers to #' enable [loo_subsample()] for arbitrary draws objects. #' #' @param draws A draws object with draws from the posterior. #' @return A 1 by P matrix with point estimates from a draws object. .compute_point_estimate <- function(draws) { UseMethod(".compute_point_estimate") } #' @rdname dot-compute_point_estimate #' @export .compute_point_estimate.matrix <- function(draws) { t(as.matrix(colMeans(draws))) } #' @rdname dot-compute_point_estimate #' @export .compute_point_estimate.default <- function(draws) { stop(".compute_point_estimate() has not been implemented for objects of class '", class(draws), "'") } #' Thin a draws object #' #' @keywords internal #' @export #' @details This is a generic function to thin draws from arbitrary draws #' objects. The function is internal and should only be used by developers to #' enable [loo_subsample()] for arbitrary draws objects. #' #' @param draws A draws object with posterior draws. #' @param loo_approximation_draws The number of posterior draws to return (ie after thinning). #' @return A thinned draws object. .thin_draws <- function(draws, loo_approximation_draws) { UseMethod(".thin_draws") } #' @rdname dot-thin_draws #' @export .thin_draws.matrix <- function(draws, loo_approximation_draws) { if (is.null(loo_approximation_draws)) return(draws) checkmate::assert_int(loo_approximation_draws, lower = 1, upper = .ndraws(draws), null.ok = TRUE) S <- .ndraws(draws) idx <- 1:loo_approximation_draws * S %/% loo_approximation_draws draws <- draws[idx, , drop = FALSE] draws } #' @rdname dot-thin_draws #' @export .thin_draws.numeric <- function(draws, loo_approximation_draws) { .thin_draws.matrix(as.matrix(draws), loo_approximation_draws) } #' @rdname dot-thin_draws #' @export .thin_draws.default <- function(draws, loo_approximation_draws) { stop(".thin_draws() has not been implemented for objects of class '", class(draws), "'") } #' The number of posterior draws in a draws object. #' #' @keywords internal #' @export #' @details This is a generic function to return the total number of draws from #' an arbitrary draws objects. The function is internal and should only be #' used by developers to enable [loo_subsample()] for arbitrary draws objects. #' #' @param x A draws object with posterior draws. #' @return An integer with the number of draws. .ndraws <- function(x) { UseMethod(".ndraws") } #' @rdname dot-ndraws #' @export .ndraws.matrix <- function(x) { nrow(x) } #' @rdname dot-ndraws #' @export .ndraws.default <- function(x) { stop(".ndraws() has not been implemented for objects of class '", class(x), "'") } ## Subsampling ----- #' Subsampling strategy #' #' @noRd #' @param estimator The estimator to use, see `estimator_choices()`. #' @param elpd_loo_approximation A vector of loo approximations, see `elpd_loo_approximation()`. #' @param observations The total number of subsample observations to sample. #' @return A `subsample_idxs` data frame. subsample_idxs <- function(estimator, elpd_loo_approximation, observations) { checkmate::assert_choice(estimator, choices = estimator_choices()) checkmate::assert_numeric(elpd_loo_approximation) checkmate::assert_int(observations) if (estimator == "hh_pps") { pi_values <- pps_elpd_loo_approximation_to_pis(elpd_loo_approximation) idxs_df <- pps_sample(observations, pis = pi_values) } if (estimator == "diff_srs" | estimator == "srs") { if (observations > length(elpd_loo_approximation)) { stop("'observations' is larger than the total sample size in 'data'.", call. = FALSE) } idx <- 1:length(elpd_loo_approximation) idx_m <- idx[order(stats::runif(length(elpd_loo_approximation)))][1:observations] idx_m <- idx_m[order(idx_m)] idxs_df <- data.frame(idx=as.integer(idx_m), m_i=1L) } assert_subsample_idxs(x = idxs_df) idxs_df } #' Compute pis from approximation for use in pps sampling. #' @noRd #' @details pis are the sampling probabilities and sum to 1. #' @inheritParams subsample_idxs #' @return A vector of pis. pps_elpd_loo_approximation_to_pis <- function(elpd_loo_approximation) { checkmate::assert_numeric(elpd_loo_approximation) pi_values <- abs(elpd_loo_approximation) pi_values <- pi_values/sum(pi_values) # \tilde{\pi} pi_values } #' Compute subsampling indices from an observation vector #' @noRd #' @param observation A vector of indices. #' @return A `subsample_idxs` data frame. compute_idxs <- function(observations) { checkmate::assert_integer(observations, lower = 1, min.len = 2, any.missing = FALSE) tab <- table(observations) idxs_df <- data.frame(idx = as.integer(names(tab)), m_i = as.integer(unname(tab))) assert_subsample_idxs(idxs_df) idxs_df } #' Compare the indices to prepare handling #' #' @details #' The function compares the object and sampled indices into `new` #' (observations not in `object`), `add` (observations in `object`), and #' `remove` (observations in `object` but not in idxs). #' @noRd #' @param idxs A `subsample_idxs` data frame. #' @param object A `psis_loo_ss` object. #' @return A list of three `subsample_idxs` data frames. Elements without any #' observations return `NULL`. compare_idxs <- function(idxs, object) { assert_subsample_idxs(idxs) current_idx <- compute_idxs(obs_idx(object)) result <- list() new_idx <- !(idxs$idx %in% current_idx$idx) remove_idx <- !(current_idx$idx %in% idxs$idx) result$new <- idxs[new_idx, ] if (nrow(result$new) == 0) { result["new"] <- NULL } else { assert_subsample_idxs(result$new) } result$add <- idxs[!new_idx, ] if (nrow(result$add) == 0) { result["add"] <- NULL } else { assert_subsample_idxs(result$add) } result$remove <- current_idx[remove_idx, ] if (nrow(result$remove) == 0) { result["remove"] <- NULL } else { assert_subsample_idxs(result$remove) } result } #' Draw a PPS sample with replacement and return a idx_df #' @noRd #' @details #' We are sampling with replacement, hence we only want to compute elpd #' for each observation once. #' @param m The total sampling size. #' @param pis The probability of selecting each observation. #' @return a `subsample_idxs` data frame. pps_sample <- function(m, pis) { checkmate::assert_int(m) checkmate::assert_numeric(pis, min.len = 2, lower = 0, upper = 1) idx <- sample(1:length(pis), size = m, replace = TRUE, prob = pis) idxs_df <- as.data.frame(table(idx), stringsAsFactors = FALSE) colnames(idxs_df) <- c("idx", "m_i") idxs_df$idx <- as.integer(idxs_df$idx) idxs_df$m_i <- as.integer(idxs_df$m_i) assert_subsample_idxs(idxs_df) idxs_df } ## Constructor --- #' Construct a `psis_loo_ss` object #' #' @noRd #' @param x A `psis_loo` object. #' @param idxs a `subsample_idxs` data frame. #' @param elpd_loo_approximation A vector of loo approximations, see #' `elpd_loo_approximation()`. #' @inheritParams loo_subsample #' @param .llfun,.llgrad,.llhess See llfun, llgrad and llhess in `loo_subsample()`. #' @param data_dim Dimension of the data object. #' @param ndraws Dimension of the draws object. #' @return A `psis_loo_ss` object. psis_loo_ss_object <- function(x, idxs, elpd_loo_approx, loo_approximation, loo_approximation_draws, estimator, .llfun, .llgrad, .llhess, data_dim, ndraws) { # Assertions checkmate::assert_class(x, "psis_loo") assert_subsample_idxs(idxs) checkmate::assert_numeric(elpd_loo_approx, any.missing = FALSE) checkmate::assert_choice(loo_approximation, loo_approximation_choices()) checkmate::assert_int(loo_approximation_draws, null.ok = TRUE) checkmate::assert_choice(estimator, estimator_choices()) checkmate::assert_function(.llfun, args = c("data_i", "draws"), ordered = TRUE) checkmate::assert_function(.llgrad, args = c("data_i", "draws"), ordered = TRUE, null.ok = TRUE) checkmate::assert_function(.llhess, args = c("data_i", "draws"), ordered = TRUE, null.ok = TRUE) checkmate::assert_integer(data_dim, len = 2, lower = 1, any.missing = FALSE) checkmate::assert_int(ndraws, lower = 1) # Construct object class(x) <- c("psis_loo_ss", class(x)) x$pointwise <- add_subsampling_vars_to_pointwise(pointwise = x$pointwise, idxs, elpd_loo_approx) x$estimates <- cbind(x$estimates, matrix(0, nrow = nrow(x$estimates))) colnames(x$estimates)[ncol(x$estimates)] <- "subsampling SE" x$loo_subsampling <- list() x$loo_subsampling$elpd_loo_approx <- elpd_loo_approx x$loo_subsampling$loo_approximation <- loo_approximation x$loo_subsampling["loo_approximation_draws"] <- list(loo_approximation_draws) x$loo_subsampling$estimator <- estimator x$loo_subsampling$.llfun <- .llfun x$loo_subsampling[".llgrad"] <- list(.llgrad) x$loo_subsampling[".llhess"] <- list(.llhess) x$loo_subsampling$data_dim <- data_dim x$loo_subsampling$ndraws <- ndraws # Compute estimates if (estimator == "hh_pps") { x <- loo_subsample_estimation_hh(x) } else if (estimator == "diff_srs") { x <- loo_subsample_estimation_diff_srs(x) } else if (estimator == "srs") { x <- loo_subsample_estimation_srs(x) } else { stop("No correct estimator used.") } assert_psis_loo_ss(x) x } as.psis_loo_ss <- function(x) { UseMethod("as.psis_loo_ss") } #' @export as.psis_loo_ss.psis_loo_ss <- function(x) { x } #' @export as.psis_loo_ss.psis_loo <- function(x) { class(x) <- c("psis_loo_ss", class(x)) x$estimates <- cbind(x$estimates, matrix(0, nrow = nrow(x$estimates))) colnames(x$estimates)[ncol(x$estimates)] <- "subsampling SE" x$pointwise <- cbind(x$pointwise, matrix(1:nrow(x$pointwise), byrow = FALSE, ncol = 1), matrix(rep(1,nrow(x$pointwise)), byrow = FALSE, ncol = 1), x$pointwise[, "elpd_loo"]) ncp <- ncol(x$pointwise) colnames(x$pointwise)[(ncp-2):ncp] <- c("idx", "m_i", "elpd_loo_approx") x$loo_subsampling <- list(elpd_loo_approx=x$pointwise[, "elpd_loo"], loo_approximation = "psis", loo_approximation_draws = NULL, estimator = "diff_srs", data_dim = c(nrow(x$pointwise), NA), ndraws = NA) assert_psis_loo_ss(x) x } as.psis_loo <- function(x) { UseMethod("as.psis_loo") } #' @export as.psis_loo.psis_loo <- function(x) { x } #' @export as.psis_loo.psis_loo_ss <- function(x) { if (x$loo_subsampling$data_dim[1] == nrow(x$pointwise)) { x$estimates <- x$estimates[, 1:2] x$pointwise <- x$pointwise[, 1:5] x$loo_subsampling <- NULL loo_obj <- importance_sampling_loo_object(pointwise = x$pointwise[, 1:5], diagnostics = x$diagnostics, dims = attr(x, "dims"), is_method = "psis", is_object = x$psis_object) if (inherits(x, "psis_loo_ap")) { loo_obj$approximate_posterior <- list(log_p = x$approximate_posterior$log_p, log_g = x$approximate_posterior$log_g) class(loo_obj) <- c("psis_loo_ap", class(loo_obj)) assert_psis_loo_ap(loo_obj) } } else { stop("A subsampling loo object can only be coerced to a loo object ", "if all observations in data have been subsampled.", call. = FALSE) } loo_obj } #' Add subsampling information to the pointwise element in a `psis_loo` object. #' @noRd #' @param pointwise The `pointwise` element in a `psis_loo` object. #' @param idxs A `subsample_idxs` data frame. #' @param elpd_loo_approximation A vector of loo approximations, see `elpd_loo_approximation()`. #' @return A `pointwise` matrix with subsampling information. add_subsampling_vars_to_pointwise <- function(pointwise, idxs, elpd_loo_approx) { checkmate::assert_matrix(pointwise, any.missing = FALSE, min.cols = 5) checkmate::assert_names(colnames(pointwise), identical.to = c("elpd_loo","mcse_elpd_loo","p_loo","looic", "influence_pareto_k")) assert_subsample_idxs(idxs) checkmate::assert_numeric(elpd_loo_approx) pw <- cbind(as.data.frame(pointwise), idxs) pw$elpd_loo_approx <- elpd_loo_approx[idxs$idx] pw <- as.matrix(pw) rownames(pw) <- NULL assert_subsampling_pointwise(pw) pw } #' Add `psis_loo` object to a `psis_loo_ss` object #' @noRd #' @param object A `psis_loo_ss` object. #' @param x A `psis_loo` object. #' @return An updated `psis_loo_ss` object. rbind_psis_loo_ss <- function(object, x) { checkmate::assert_class(object, "psis_loo_ss") if (is.null(x)) return(object) # Fallback checkmate::assert_class(x, "psis_loo") assert_subsampling_pointwise(object$pointwise) assert_subsampling_pointwise(x$pointwise) checkmate::assert_disjunct(object$pointwise[, "idx"], x$pointwise[, "idx"]) object$pointwise <- rbind(object$pointwise, x$pointwise) object$diagnostics$pareto_k <- c(object$diagnostics$pareto_k, x$diagnostics$pareto_k) object$diagnostics$n_eff <- c(object$diagnostics$n_eff, x$diagnostics$n_eff) object$diagnostics$r_eff <- c(object$diagnostics$r_eff, x$diagnostics$r_eff) attr(object, "dims")[2] <- nrow(object$pointwise) object } #' Remove observations in `idxs` from object #' @noRd #' @param object A `psis_loo_ss` object. #' @param idxs A `subsample_idxs` data frame. #' @return A `psis_loo_ss` object. remove_idx.psis_loo_ss <- function(object, idxs) { checkmate::assert_class(object, "psis_loo_ss") if (is.null(idxs)) return(object) # Fallback assert_subsample_idxs(idxs) row_map <- data.frame( row_no = 1:nrow(object$pointwise), idx = object$pointwise[, "idx"] ) row_map <- merge(row_map, idxs, by = "idx", all.y = TRUE) object$pointwise <- object$pointwise[-row_map$row_no,,drop = FALSE] object$diagnostics$pareto_k <- object$diagnostics$pareto_k[-row_map$row_no] object$diagnostics$n_eff <- object$diagnostics$n_eff[-row_map$row_no] object$diagnostics$r_eff <- object$diagnostics$r_eff[-row_map$row_no] attr(object, "dims")[2] <- nrow(object$pointwise) object } #' Order object by `observations`. #' @noRd #' @param x A `psis_loo_ss` object. #' @param observations A vector with indices. #' @return An ordered `psis_loo_ss` object. order.psis_loo_ss <- function(x, observations) { checkmate::assert_class(x, "psis_loo_ss") checkmate::assert_integer(observations, len = nobs(x)) if (identical(obs_idx(x), observations)) return(x) # Fallback checkmate::assert_set_equal(obs_idx(x), observations) row_map_x <- data.frame(row_no_x = 1:nrow(x$pointwise), idx = x$pointwise[, "idx"]) row_map_obs <- data.frame(row_no_obs = 1:length(observations), idx = observations) row_map <- merge(row_map_obs, row_map_x, by = "idx", sort = FALSE) x$pointwise <- x$pointwise[row_map$row_no_x,,drop = FALSE] x$diagnostics$pareto_k <- x$diagnostics$pareto_k[row_map$row_no_x] x$diagnostics$n_eff <- x$diagnostics$n_eff[row_map$row_no_x] x$diagnostics$r_eff <- x$diagnostics$r_eff[row_map$row_no_x] x } #' Update m_i in a `pointwise` element. #' @noRd #' @param x A `psis_loo_ss` `pointwise` data frame. #' @param idxs A `subsample_idxs` data frame. #' @param type should the m_i:s in `idxs` `"replace"` the current m_i:s or #' `"add"` to them. #' @return An ordered `psis_loo_ss` object. update_m_i_in_pointwise <- function(pointwise, idxs, type = "replace") { assert_subsampling_pointwise(pointwise) if (is.null(idxs)) return(pointwise) # Fallback assert_subsample_idxs(idxs) checkmate::assert_choice(type, choices = c("replace", "add")) row_map <- data.frame(row_no = 1:nrow(pointwise), idx = pointwise[, "idx"]) row_map <- merge(row_map, idxs, by = "idx", all.y = TRUE) if (type == "replace") { pointwise[row_map$row_no, "m_i"] <- row_map$m_i } if (type == "add") { pointwise[row_map$row_no, "m_i"] <- pointwise[row_map$row_no, "m_i"] + row_map$m_i } pointwise } ## Estimation --- #' Estimate the elpd using the Hansen-Hurwitz estimator (Magnusson et al., 2019) #' @noRd #' @param x A `psis_loo_ss` object. #' @return A `psis_loo_ss` object. loo_subsample_estimation_hh <- function(x) { checkmate::assert_class(x, "psis_loo_ss") N <- length(x$loo_subsampling$elpd_loo_approx) pis <- pps_elpd_loo_approximation_to_pis(x$loo_subsampling$elpd_loo_approx) pis_sample <- pis[x$pointwise[,"idx"]] hh_elpd_loo <- whhest(z = pis_sample, m_i = x$pointwise[, "m_i"], y = x$pointwise[, "elpd_loo"], N) srs_elpd_loo <- srs_est(y = x$pointwise[, "elpd_loo"], y_approx = pis_sample) x$estimates["elpd_loo", "Estimate"] <- hh_elpd_loo$y_hat_ppz if (hh_elpd_loo$hat_v_y_ppz > 0) { x$estimates["elpd_loo", "SE"] <- sqrt(hh_elpd_loo$hat_v_y_ppz) } else { warning("Negative estimate of SE, more subsampling obs. needed.", call. = FALSE) x$estimates["elpd_loo", "SE"] <- NaN } x$estimates["elpd_loo", "subsampling SE"] <- sqrt(hh_elpd_loo$v_hat_y_ppz) hh_p_loo <- whhest(z = pis_sample, m_i = x$pointwise[,"m_i"], y = x$pointwise[,"p_loo"], N) x$estimates["p_loo", "Estimate"] <- hh_p_loo$y_hat_ppz if (hh_p_loo$hat_v_y_ppz > 0) { x$estimates["p_loo", "SE"] <- sqrt(hh_p_loo$hat_v_y_ppz) } else { warning("Negative estimate of SE, more subsampling obs. needed.", call. = FALSE) x$estimates["elpd_loo", "SE"] <- NaN } x$estimates["p_loo", "subsampling SE"] <- sqrt(hh_p_loo$v_hat_y_ppz) update_psis_loo_ss_estimates(x) } #' Update a `psis_loo_ss` object with generic estimates #' #' @noRd #' @details #' Updates a `psis_loo_ss` with generic estimates (looic) #' and updates components in the object based on x$estimate. #' @param x A `psis_loo_ss` object. #' @return x A `psis_loo_ss` object. update_psis_loo_ss_estimates <- function(x) { checkmate::assert_class(x, "psis_loo_ss") x$estimates["looic", "Estimate"] <- (-2) * x$estimates["elpd_loo", "Estimate"] x$estimates["looic", "SE"] <- 2 * x$estimates["elpd_loo", "SE"] x$estimates["looic", "subsampling SE"] <- 2 * x$estimates["elpd_loo", "subsampling SE"] x$elpd_loo <- x$estimates["elpd_loo", "Estimate"] x$p_loo <- x$estimates["p_loo", "Estimate"] x$looic <- x$estimates["looic", "Estimate"] x$se_elpd_loo <- x$estimates["elpd_loo", "SE"] x$se_p_loo <- x$estimates["p_loo", "SE"] x$se_looic <- x$estimates["looic", "SE"] x } #' Weighted Hansen-Hurwitz estimator (Magnusson et al., 2019) #' @noRd #' @param z Normalized probabilities for the observation. #' @param m_i The number of times obs i was selected. #' @param y The values observed. #' @param N The total number of observations in finite population. #' @return A list with estimates. whhest <- function(z, m_i, y, N) { checkmate::assert_numeric(z, lower = 0, upper = 1) checkmate::assert_numeric(y, len = length(z)) checkmate::assert_integerish(m_i, len = length(z)) est_list <- list(m = sum(m_i)) est_list$y_hat_ppz <- sum(m_i*(y/z))/est_list$m est_list$v_hat_y_ppz <- (sum(m_i*((y/z - est_list$y_hat_ppz)^2))/est_list$m)/(est_list$m-1) # See unbiadness proof in supplementary material to the article est_list$hat_v_y_ppz <- (sum(m_i*(y^2/z)) / est_list$m) + est_list$v_hat_y_ppz / N - est_list$y_hat_ppz^2 / N est_list } #' Estimate elpd using the difference estimator and SRS-WOR (Magnusson et al., 2020) #' @noRd #' @param x A `psis_loo_ss` object. #' @return A `psis_loo_ss` object. loo_subsample_estimation_diff_srs <- function(x) { checkmate::assert_class(x, "psis_loo_ss") elpd_loo_est <- srs_diff_est(y_approx = x$loo_subsampling$elpd_loo_approx, y = x$pointwise[, "elpd_loo"], y_idx = x$pointwise[, "idx"]) x$estimates["elpd_loo", "Estimate"] <- elpd_loo_est$y_hat x$estimates["elpd_loo", "SE"] <- sqrt(elpd_loo_est$hat_v_y) x$estimates["elpd_loo", "subsampling SE"] <- sqrt(elpd_loo_est$v_y_hat) p_loo_est <- srs_est(y = x$pointwise[, "p_loo"], y_approx = x$loo_subsampling$elpd_loo_approx) x$estimates["p_loo", "Estimate"] <- p_loo_est$y_hat x$estimates["p_loo", "SE"] <- sqrt(p_loo_est$hat_v_y) x$estimates["p_loo", "subsampling SE"] <- sqrt(p_loo_est$v_y_hat) update_psis_loo_ss_estimates(x) } #' Difference estimation using SRS-WOR sampling (Magnusson et al., 2020) #' @noRd #' @param y_approx Approximated values of all observations. #' @param y The values observed. #' @param y_idx The index of `y` in `y_approx`. #' @return A list with estimates. srs_diff_est <- function(y_approx, y, y_idx) { checkmate::assert_numeric(y_approx) checkmate::assert_numeric(y, max.len = length(y_approx)) checkmate::assert_integerish(y_idx, len = length(y)) N <- length(y_approx) m <- length(y) y_approx_m <- y_approx[y_idx] e_i <- y - y_approx_m t_pi_tilde <- sum(y_approx) t_pi2_tilde <- sum(y_approx^2) t_e <- N * mean(e_i) t_hat_epsilon <- N * mean(y^2 - y_approx_m^2) est_list <- list(m = length(y), N = N) # eq (7) est_list$y_hat <- t_pi_tilde + t_e # eq (8) est_list$v_y_hat <- N^2 * (1 - m / N) * var(e_i) / m # eq (9) first row second `+` should be `-` # Supplementary material eq (6) has this correct # Here the variance is for sum, while in the paper the variance is for mean # which explains the proportional difference of 1/N est_list$hat_v_y <- (t_pi2_tilde + t_hat_epsilon) - # a (has been checked) (1/N) * (t_e^2 - est_list$v_y_hat + 2 * t_pi_tilde * est_list$y_hat - t_pi_tilde^2) # b est_list } #' Estimate elpd using the standard simple-re-sample without #' resampling (SRS-WOR) estimator #' @noRd #' @param x A `psis_loo_ss` object. #' @return A `psis_loo_ss` object. loo_subsample_estimation_srs <- function(x) { checkmate::assert_class(x, "psis_loo_ss") elpd_loo_est <- srs_est(y = x$pointwise[, "elpd_loo"], y_approx = x$loo_subsampling$elpd_loo_approx) x$estimates["elpd_loo", "Estimate"] <- elpd_loo_est$y_hat x$estimates["elpd_loo", "SE"] <- sqrt(elpd_loo_est$hat_v_y) x$estimates["elpd_loo", "subsampling SE"] <- sqrt(elpd_loo_est$v_y_hat) p_loo_est <- srs_est(y = x$pointwise[, "p_loo"], y_approx = x$loo_subsampling$elpd_loo_approx) x$estimates["p_loo", "Estimate"] <- p_loo_est$y_hat x$estimates["p_loo", "SE"] <- sqrt(p_loo_est$hat_v_y) x$estimates["p_loo", "subsampling SE"] <- sqrt(p_loo_est$v_y_hat) update_psis_loo_ss_estimates(x) } #' Simple-re-sample without resampling (SRS-WOR) estimation #' @noRd #' @param y The values observed. #' @param y_approx A vector of length N. #' @return A list of estimates. srs_est <- function(y, y_approx) { checkmate::assert_numeric(y) checkmate::assert_numeric(y_approx, min.len = length(y)) N <- length(y_approx) m <- length(y) est_list <- list(m = m) est_list$y_hat <- N * mean(y) est_list$v_y_hat <- N^2 * (1-m/N) * var(y)/m est_list$hat_v_y <- N * var(y) est_list } ## Specialized assertions of objects --- #' Assert that the object has the expected properties #' @noRd #' @param x An object to assert. #' @param N The total number of data points in data. #' @param estimator The estimator used. #' @return An asserted object of `x`. assert_observations <- function(x, N, estimator) { checkmate::assert_int(N) checkmate::assert_choice(estimator, choices = estimator_choices()) if (is.null(x)) return(x) if (checkmate::test_class(x, "psis_loo_ss")) { x <- obs_idx(x) checkmate::assert_integer(x, lower = 1, upper = N, any.missing = FALSE) return(x) } x <- as.integer(x) if (length(x) > 1) { checkmate::assert_integer(x, lower = 1, upper = N, any.missing = FALSE) if (estimator %in% "hh_pps") { message("Sampling proportional to elpd approximation and with replacement assumed.") } if (estimator %in% c("diff_srs", "srs")) { message("Simple random sampling with replacement assumed.") } } else { checkmate::assert_integer(x, lower = 1, any.missing = FALSE) } x } #' Assert that the object has the expected properties #' @noRd #' @inheritParams assert_observations #' @return An asserted object of `x`. assert_subsample_idxs <- function(x) { checkmate::assert_data_frame(x, types = c("integer", "integer"), any.missing = FALSE, min.rows = 1, col.names = "named") checkmate::assert_names(names(x), identical.to = c("idx", "m_i")) checkmate::assert_integer(x$idx, lower = 1, any.missing = FALSE, unique = TRUE) checkmate::assert_integer(x$m_i, lower = 1, any.missing = FALSE) x } #' Assert that the object has the expected properties #' @noRd #' @inheritParams assert_observations #' @return An asserted object of `x`. assert_psis_loo_ss <- function(x) { checkmate::assert_class(x, "psis_loo_ss") checkmate::assert_names(names(x), must.include = c("estimates", "pointwise", "diagnostics", "psis_object", "loo_subsampling")) checkmate::assert_names(rownames(x$estimates), must.include = c("elpd_loo", "p_loo", "looic")) checkmate::assert_names(colnames(x$estimates), must.include = c("Estimate", "SE", "subsampling SE")) assert_subsampling_pointwise(x$pointwise) checkmate::assert_names(names(x$loo_subsampling), must.include = c("elpd_loo_approx", "loo_approximation", "loo_approximation_draws", "estimator", "data_dim", "ndraws")) checkmate::assert_numeric(x$loo_subsampling$elpd_loo_approx, any.missing = FALSE, len = x$loo_subsampling$data_dim[1]) checkmate::assert_choice(x$loo_subsampling$loo_approximation, choices = loo_approximation_choices(api = FALSE)) checkmate::assert_int(x$loo_subsampling$loo_approximation_draws, null.ok = TRUE) checkmate::assert_choice(x$loo_subsampling$estimator, choices = estimator_choices()) checkmate::assert_integer(x$loo_subsampling$data_dim, any.missing = TRUE, len = 2) checkmate::assert_int(x$loo_subsampling$data_dim[1], na.ok = FALSE) checkmate::assert_integer(x$loo_subsampling$ndraws, len = 1, any.missing = TRUE) x } #' Assert that the object has the expected properties #' @noRd #' @inheritParams assert_observations #' @return An asserted object of `x`. assert_subsampling_pointwise <- function(x) { checkmate::assert_matrix(x, any.missing = FALSE, ncols = 8) checkmate::assert_names(colnames(x), identical.to = c("elpd_loo", "mcse_elpd_loo", "p_loo", "looic", "influence_pareto_k", "idx", "m_i", "elpd_loo_approx")) x } loo/R/tis.R0000644000176200001440000001271214566461605012206 0ustar liggesusers#' Truncated importance sampling (TIS) #' #' Implementation of truncated (self-normalized) importance sampling (TIS), #' truncated at S^(1/2) as recommended by Ionides (2008). #' #' @param log_ratios An array, matrix, or vector of importance ratios on the log #' scale (for Importance sampling LOO, these are *negative* log-likelihood #' values). See the **Methods (by class)** section below for a detailed #' description of how to specify the inputs for each method. #' @template cores #' @param ... Arguments passed on to the various methods. #' @param r_eff Vector of relative effective sample size estimates containing #' one element per observation. The values provided should be the relative #' effective sample sizes of `1/exp(log_ratios)` (i.e., `1/ratios`). #' This is related to the relative efficiency of estimating the normalizing #' term in self-normalizing importance sampling. If `r_eff` is not #' provided then the reported (T)IS effective sample sizes and Monte Carlo #' error estimates can be over-optimistic. If the posterior draws are (near) #' independent then `r_eff=1` can be used. `r_eff` has to be a scalar (same #' value is used for all observations) or a vector with length equal to the #' number of observations. The default value is 1. See the [relative_eff()] #' helper function for computing `r_eff`. #' #' @return The `tis()` methods return an object of class `"tis"`, #' which is a named list with the following components: #' #' \describe{ #' \item{`log_weights`}{ #' Vector or matrix of smoothed (and truncated) but *unnormalized* log #' weights. To get normalized weights use the #' [`weights()`][weights.importance_sampling] method provided for objects of #' class `tis`. #' } #' \item{`diagnostics`}{ #' A named list containing one vector: #' * `pareto_k`: Not used in `tis`, all set to 0. #' * `n_eff`: Effective sample size estimates. #' } #' } #' #' Objects of class `"tis"` also have the following [attributes][attributes()]: #' \describe{ #' \item{`norm_const_log`}{ #' Vector of precomputed values of `colLogSumExps(log_weights)` that are #' used internally by the [weights()]method to normalize the log weights. #' } #' \item{`r_eff`}{ #' If specified, the user's `r_eff` argument. #' } #' \item{`tail_len`}{ #' Not used for `tis`. #' } #' \item{`dims`}{ #' Integer vector of length 2 containing `S` (posterior sample size) #' and `N` (number of observations). #' } #' \item{`method`}{ #' Method used for importance sampling, here `tis`. #' } #' } #' #' @seealso #' * [psis()] for approximate LOO-CV using PSIS. #' * [loo()] for approximate LOO-CV. #' * [pareto-k-diagnostic] for PSIS diagnostics. #' #' @references #' Ionides, Edward L. (2008). Truncated importance sampling. #' *Journal of Computational and Graphical Statistics* 17(2): 295--311. #' #' @examples #' log_ratios <- -1 * example_loglik_array() #' r_eff <- relative_eff(exp(-log_ratios)) #' tis_result <- tis(log_ratios, r_eff = r_eff) #' str(tis_result) #' #' # extract smoothed weights #' lw <- weights(tis_result) # default args are log=TRUE, normalize=TRUE #' ulw <- weights(tis_result, normalize=FALSE) # unnormalized log-weights #' #' w <- weights(tis_result, log=FALSE) # normalized weights (not log-weights) #' uw <- weights(tis_result, log=FALSE, normalize = FALSE) # unnormalized weights #' #' @export tis <- function(log_ratios, ...) UseMethod("tis") #' @export #' @templateVar fn tis #' @template array #' tis.array <- function(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) { importance_sampling.array(log_ratios = log_ratios, ..., r_eff = r_eff, cores = cores, method = "tis") } #' @export #' @templateVar fn tis #' @template matrix #' tis.matrix <- function(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) { importance_sampling.matrix(log_ratios, ..., r_eff = r_eff, cores = cores, method = "tis") } #' @export #' @templateVar fn tis #' @template vector #' tis.default <- function(log_ratios, ..., r_eff = 1) { importance_sampling.default(log_ratios = log_ratios, ..., r_eff = r_eff, method = "tis") } #' @rdname psis #' @export is.tis <- function(x) { inherits(x, "tis") && is.list(x) } # internal ---------------------------------------------------------------- #' Truncated Importance Sampling on a single vector #' #' @noRd #' @param log_ratios_i A vector of log importance ratios (for `loo()`, negative #' log likelihoods). #' @param ... Not used. Included to conform to PSIS API. #' #' @details Implementation of Truncated importance sampling (TIS), a method for #' stabilizing importance ratios. The version of TIS implemented here #' corresponds to the algorithm presented in Ionides (2008) with truncation at #' sqrt(S). #' #' @return A named list containing: #' * `lw`: vector of unnormalized log weights #' * `pareto_k`: scalar Pareto k estimate. For 'tis', this defaults to 0. #' do_tis_i <- function(log_ratios_i, ...) { S <- length(log_ratios_i) log_Z <- logSumExp(log_ratios_i) - log(S) # Normalization term, c-hat in Ionides (2008) appendix log_cutpoint <- log_Z + 0.5 * log(S) lw_i <- pmin(log_ratios_i, log_cutpoint) list(log_weights = lw_i, pareto_k = 0) } loo/R/loo-glossary.R0000644000176200001440000001767314641333357014050 0ustar liggesusers#' LOO package glossary #' #' @name loo-glossary #' #' @template loo-and-psis-references #' @template loo-uncertainty-reference #' @template bayesvis-reference #' #' @description #' The pages provides definitions to key terms. Also see the #' [FAQ page](https://mc-stan.org/loo/articles/online-only/faq.html) on #' the __loo__ website for answers to frequently asked questions. #' #' Note: VGG2017 refers to Vehtari, Gelman, and Gabry (2017). See #' **References**, below. #' #' @section ELPD and `elpd_loo`: #' #' The ELPD is the theoretical expected log pointwise predictive density for a new #' dataset (Eq 1 in VGG2017), which can be estimated, e.g., using #' cross-validation. `elpd_loo` is the Bayesian LOO estimate of the #' expected log pointwise predictive density (Eq 4 in VGG2017) and #' is a sum of N individual pointwise log predictive densities. Probability #' densities can be smaller or larger than 1, and thus log predictive densities #' can be negative or positive. For simplicity the ELPD acronym is used also for #' expected log pointwise predictive probabilities for discrete models. #' Probabilities are always equal or less than 1, and thus log predictive #' probabilities are 0 or negative. #' #' @section Standard error of `elpd_loo`: #' #' As `elpd_loo` is defined as the sum of N independent components (Eq 4 in #' VGG2017), we can compute the standard error by using the standard deviation #' of the N components and multiplying by `sqrt(N)` (Eq 23 in VGG2017). #' This standard error is a coarse description of our uncertainty about the #' predictive performance for unknown future data. When N is small or there is #' severe model misspecification, the current SE estimate is overoptimistic and #' the actual SE can even be twice as large. Even for moderate N, when the SE #' estimate is an accurate estimate for the scale, it ignores the skewness. When #' making model comparisons, the SE of the component-wise (pairwise) differences #' should be used instead (see the `se_diff` section below and Eq 24 in #' VGG2017). Sivula et al. (2022) discuss the conditions when the normal #' approximation used for SE and `se_diff` is good. #' #' @section Monte Carlo SE of elpd_loo: #' #' The Monte Carlo standard error is the estimate for the computational accuracy #' of MCMC and importance sampling used to compute `elpd_loo`. Usually this #' is negligible compared to the standard describing the uncertainty due to #' finite number of observations (Eq 23 in VGG2017). #' #' @section `p_loo` (effective number of parameters): #' #' `p_loo` is the difference between `elpd_loo` and the non-cross-validated #' log posterior predictive density. It describes how much more difficult it #' is to predict future data than the observed data. Asymptotically under #' certain regularity conditions, `p_loo` can be interpreted as the #' *effective number of parameters*. In well behaving cases `p_loo < N` and #' `p_loo < p`, where `p` is the total number of parameters in the #' model. `p_loo > N` or `p_loo > p` indicates that the model has very #' weak predictive capability and may indicate a severe model misspecification. #' See below for more on interpreting `p_loo` when there are warnings #' about high Pareto k diagnostic values. #' #' @section Pareto k estimates: #' #' The Pareto \eqn{k} estimate is a diagnostic for Pareto smoothed importance #' sampling (PSIS), which is used to compute components of `elpd_loo`. In #' importance-sampling LOO the full posterior distribution is used as the #' proposal distribution. The Pareto k diagnostic estimates how far an #' individual leave-one-out distribution is from the full distribution. If #' leaving out an observation changes the posterior too much then importance #' sampling is not able to give a reliable estimate. Pareto smoothing stabilizes #' importance sampling and guarantees a finite variance estimate at the #' cost of some bias. #' #' The diagnostic threshold for Pareto \eqn{k} depends on sample size #' \eqn{S} (sample size dependent threshold was introduced by Vehtari #' et al., 2024, and before that fixed thresholds of 0.5 and 0.7 were #' recommended). For simplicity, `loo` package uses the nominal sample #' size \eqn{S} when computing the sample size specific #' threshold. This provides an optimistic threshold if the effective #' sample size is less than 2200, but even then if ESS/S > 1/2 the difference #' is usually negligible. Thinning of MCMC draws can be used to improve #' the ratio ESS/S. #' #' * If \eqn{k < \min(1 - 1 / \log_{10}(S), 0.7)}, where \eqn{S} is the #' sample size, the PSIS estimate and the corresponding Monte #' Carlo standard error estimate are reliable. #' #' * If \eqn{1 - 1 / \log_{10}(S) <= k < 0.7}, the PSIS estimate and the #' corresponding Monte Carlo standard error estimate are not #' reliable, but increasing the (effective) sample size \eqn{S} above #' 2200 may help (this will increase the sample size specific #' threshold \eqn{(1 - 1 / \log_{10}(2200) > 0.7} and then the bias specific #' threshold 0.7 dominates). #' #' * If \eqn{0.7 <= k < 1}, the PSIS estimate and the corresponding Monte #' Carlo standard error have large bias and are not reliable. Increasing #' the sample size may reduce the variability in the \eqn{k} estimate, which #' may also result in a lower \eqn{k} estimate. #' #' * If \eqn{k \geq 1}{k >= 1}, the target distribution is estimated to #' have non-finite mean. The PSIS estimate and the corresponding Monte #' Carlo standard error are not well defined. Increasing the sample size #' may reduce the variability in \eqn{k} estimate, which may also result in #' a lower \eqn{k} estimate. #' #' Pareto \eqn{k} is also useful as a measure of influence of an #' observation. Highly influential observations have high \eqn{k} #' values. Very high \eqn{k} values often indicate model #' misspecification, outliers or mistakes in data processing. See #' Section 6 of Gabry et al. (2019) for an example. #' #' \subsection{Interpreting `p_loo` when Pareto `k` is large}{ #' If \eqn{k > 0.7} then we can also look at #' the `p_loo` estimate for some additional information about the problem: #' #' * If `p_loo << p` (the total number of parameters in the model), #' then the model is likely to be misspecified. Posterior predictive checks #' (PPCs) are then likely to also detect the problem. Try using an overdispersed #' model, or add more structural information (nonlinearity, mixture model, #' etc.). #' #' * If `p_loo < p` and the number of parameters `p` is relatively #' large compared to the number of observations (e.g., `p>N/5`), it is #' likely that the model is so flexible or the population prior so weak that it’s #' difficult to predict the left out observation (even for the true model). #' This happens, for example, in the simulated 8 schools (in VGG2017), random #' effect models with a few observations per random effect, and Gaussian #' processes and spatial models with short correlation lengths. #' #' * If `p_loo > p`, then the model is likely to be badly misspecified. #' If the number of parameters `p< for an example. #' If `p` is relatively large compared to the number of #' observations, say `p>N/5` (more accurately we should count number of #' observations influencing each parameter as in hierarchical models some groups #' may have few observations and other groups many), it is possible that PPCs won't #' detect the problem. #' } #' #' @section elpd_diff: #' `elpd_diff` is the difference in `elpd_loo` for two models. If more #' than two models are compared, the difference is computed relative to the #' model with highest `elpd_loo`. #' #' @section se_diff: #' #' The standard error of component-wise differences of elpd_loo (Eq 24 in #' VGG2017) between two models. This SE is *smaller* than the SE for #' individual models due to correlation (i.e., if some observations are easier #' and some more difficult to predict for all models). #' NULL loo/R/split_moment_matching.R0000644000176200001440000001401614641333357015766 0ustar liggesusers#' Split moment matching for efficient approximate leave-one-out cross-validation (LOO) #' #' A function that computes the split moment matching importance sampling loo. #' Takes in the moment matching total transformation, transforms only half #' of the draws, and computes a single elpd using multiple importance sampling. #' #' @param x A fitted model object. #' @param upars A matrix containing the model parameters in unconstrained space #' where they can have any real value. #' @param cov Logical; Indicate whether to match the covariance matrix of the #' samples or not. If `FALSE`, only the mean and marginal variances are #' matched. #' @param total_shift A vector representing the total shift made by the moment #' matching algorithm. #' @param total_scaling A vector representing the total scaling of marginal #' variance made by the moment matching algorithm. #' @param total_mapping A vector representing the total covariance #' transformation made by the moment matching algorithm. #' @param i Observation index. #' @param log_prob_upars A function that takes arguments `x` and `upars` and #' returns a matrix of log-posterior density values of the unconstrained #' posterior draws passed via `upars`. #' @param log_lik_i_upars A function that takes arguments `x`, `upars`, and `i` #' and returns a vector of log-likeliood draws of the `i`th observation based #' on the unconstrained posterior draws passed via `upars`. #' @param r_eff_i MCMC relative effective sample size of the `i`'th log #' likelihood draws. #' @template cores #' @template is_method #' @param ... Further arguments passed to the custom functions documented above. #' #' @return A list containing the updated log-importance weights and #' log-likelihood values. Also returns the updated MCMC effective sample size #' and the integrand-specific log-importance weights. #' #' #' @seealso [loo()], [loo_moment_match()] #' @template moment-matching-references #' #' loo_moment_match_split <- function(x, upars, cov, total_shift, total_scaling, total_mapping, i, log_prob_upars, log_lik_i_upars, r_eff_i, cores, is_method, ...) { S <- dim(upars)[1] S_half <- as.integer(0.5 * S) mean_original <- colMeans(upars) # accumulated affine transformation upars_trans <- sweep(upars, 2, mean_original, "-") upars_trans <- sweep(upars_trans, 2, total_scaling, "*") if (cov) { upars_trans <- tcrossprod(upars_trans, total_mapping) } upars_trans <- sweep(upars_trans, 2, total_shift + mean_original, "+") attributes(upars_trans) <- attributes(upars) # inverse accumulated affine transformation upars_trans_inv <- sweep(upars, 2, mean_original, "-") if (cov) { upars_trans_inv <- tcrossprod(upars_trans_inv, solve(total_mapping)) } upars_trans_inv <- sweep(upars_trans_inv, 2, total_scaling, "/") upars_trans_inv <- sweep(upars_trans_inv, 2, mean_original - total_shift, "+") attributes(upars_trans_inv) <- attributes(upars) # first half of upars_trans_half are T(theta) # second half are theta upars_trans_half <- upars take <- seq_len(S_half) upars_trans_half[take, ] <- upars_trans[take, , drop = FALSE] # first half of upars_half_inv are theta # second half are T^-1 (theta) upars_trans_half_inv <- upars take <- seq_len(S)[-seq_len(S_half)] upars_trans_half_inv[take, ] <- upars_trans_inv[take, , drop = FALSE] # compute log likelihoods and log probabilities log_prob_half_trans <- log_prob_upars(x, upars = upars_trans_half, ...) log_prob_half_trans_inv <- log_prob_upars(x, upars = upars_trans_half_inv, ...) log_liki_half <- log_lik_i_upars(x, upars = upars_trans_half, i = i, ...) # compute weights log_prob_half_trans_inv <- (log_prob_half_trans_inv - log(prod(total_scaling)) - log(det(total_mapping))) stable_S <- log_prob_half_trans > log_prob_half_trans_inv lwi_half <- -log_liki_half + log_prob_half_trans lwi_half[stable_S] <- lwi_half[stable_S] - (log_prob_half_trans[stable_S] + log1p(exp(log_prob_half_trans_inv[stable_S] - log_prob_half_trans[stable_S]))) lwi_half[!stable_S] <- lwi_half[!stable_S] - (log_prob_half_trans_inv[!stable_S] + log1p(exp(log_prob_half_trans[!stable_S] - log_prob_half_trans_inv[!stable_S]))) # lwi_half may have NaNs if computation involves -Inf + Inf # replace NaN log ratios with -Inf lr <- lwi_half lr[is.na(lr)] <- -Inf is_obj_half <- suppressWarnings(importance_sampling.default(lr, method = is_method, r_eff = r_eff_i, cores = cores)) lwi_half <- as.vector(weights(is_obj_half)) # lwi_half may have NaNs if computation involves -Inf + Inf # replace NaN log ratios with -Inf lr <- lwi_half + log_liki_half lr[is.na(lr)] <- -Inf is_obj_f_half <- suppressWarnings(importance_sampling.default(lr, method = is_method, r_eff = r_eff_i, cores = cores)) lwfi_half <- as.vector(weights(is_obj_f_half)) # relative_eff recomputation # currently ignores chain information # since we have two proposal distributions # compute S_eff separately from both and take the smaller take <- seq_len(S)[-seq_len(S_half)] log_liki_half_1 <- log_liki_half[take, drop = FALSE] dim(log_liki_half_1) <- c(length(take), 1, 1) take <- seq_len(S)[-seq_len(S_half)] log_liki_half_2 <- log_liki_half[take, drop = FALSE] dim(log_liki_half_2) <- c(length(take), 1, 1) r_eff_i1 <- loo::relative_eff(exp(log_liki_half_1), cores = cores) r_eff_i2 <- loo::relative_eff(exp(log_liki_half_2), cores = cores) r_eff_i <- min(r_eff_i1,r_eff_i2) list( lwi = lwi_half, lwfi = lwfi_half, log_liki = log_liki_half, r_eff_i = r_eff_i ) } loo/R/loo_compare.psis_loo_ss_list.R0000644000176200001440000001754115100712211017252 0ustar liggesusers#' Compare `psis_loo_ss` objects #' @noRd #' @param x A list with `psis_loo` objects. #' @param ... Currently ignored. #' @return A `compare.loo_ss` object. #' @author Mans Magnusson #' @export loo_compare.psis_loo_ss_list <- function(x, ...) { checkmate::assert_list(x, any.missing = FALSE, min.len = 1) for(i in seq_along(x)){ if (!inherits(x[[i]], "psis_loo_ss")) x[[i]] <- as.psis_loo_ss.psis_loo(x[[i]]) } loo_compare_checks.psis_loo_ss_list(x) comp <- loo_compare_matrix.psis_loo_ss_list(x) ord <- loo_compare_order(x) names(x) <- rownames(comp)[ord] rnms <- rownames(comp) elpd_diff_mat <- matrix(0, nrow = nrow(comp), ncol = 3, dimnames = list(rnms, c("elpd_diff", "se_diff", "subsampling_se_diff"))) for(i in 2:length(ord)){ elpd_diff_mat[i,] <- loo_compare_ss(ref_loo = x[ord[1]], compare_loo = x[ord[i]]) } comp <- cbind(elpd_diff_mat, comp) rownames(comp) <- rnms class(comp) <- c("compare.loo_ss", "compare.loo", class(comp)) return(comp) } #' Compare a reference loo object with a comaprison loo object #' @noRd #' @param ref_loo A named list with a `psis_loo_ss` object. #' @param compare_loo A named list with a `psis_loo_ss` object. #' @return A 1 by 3 elpd_diff estimation. loo_compare_ss <- function(ref_loo, compare_loo){ checkmate::assert_list(ref_loo, names = "named") checkmate::assert_list(compare_loo, names = "named") checkmate::assert_class(ref_loo[[1]], "psis_loo_ss") checkmate::assert_class(compare_loo[[1]], "psis_loo_ss") ref_idx <- obs_idx(ref_loo[[1]]) compare_idx <- obs_idx(compare_loo[[1]]) intersect_idx <- base::intersect(ref_idx, compare_idx) ref_subset_of_compare <- base::setequal(intersect_idx, ref_idx) compare_subset_of_ref <- base::setequal(intersect_idx, compare_idx) # Using HH estimation if (ref_loo[[1]]$loo_subsampling$estimator == "hh_pps" | compare_loo[[1]]$loo_subsampling$estimator == "hh_pps"){ warning("Hansen-Hurwitz estimator used. Naive diff SE is used.", call. = FALSE) return(loo_compare_ss_naive(ref_loo, compare_loo)) } # Same observations in both if (compare_subset_of_ref & ref_subset_of_compare){ return(loo_compare_ss_diff(ref_loo, compare_loo)) } # Use subset if (compare_subset_of_ref | ref_subset_of_compare){ if (compare_subset_of_ref) ref_loo[[1]] <- update(object = ref_loo[[1]], observations = compare_loo[[1]]) if (ref_subset_of_compare) compare_loo[[1]] <- update(compare_loo[[1]], observations = ref_loo[[1]]) message("Estimated elpd_diff using observations included in loo calculations for all models.") return(loo_compare_ss_diff(ref_loo, compare_loo)) } # If different samples if (!compare_subset_of_ref & !ref_subset_of_compare){ warning("Different subsamples in '", names(ref_loo), "' and '", names(compare_loo), "'. Naive diff SE is used.", call. = FALSE) return(loo_compare_ss_naive(ref_loo, compare_loo)) } } #' Compute a naive diff SE #' @noRd #' @inheritParams loo_compare_ss #' @return a 1 by 3 elpd_diff estimation loo_compare_ss_naive <- function(ref_loo, compare_loo){ checkmate::assert_list(ref_loo, names = "named") checkmate::assert_list(compare_loo, names = "named") checkmate::assert_class(ref_loo[[1]], "psis_loo_ss") checkmate::assert_class(compare_loo[[1]], "psis_loo_ss") elpd_loo_diff <- ref_loo[[1]]$estimates["elpd_loo","Estimate"] - compare_loo[[1]]$estimates["elpd_loo","Estimate"] elpd_loo_diff_se <- sqrt( (ref_loo[[1]]$estimates["elpd_loo","SE"])^2 + (compare_loo[[1]]$estimates["elpd_loo","SE"])^2) elpd_loo_diff_subsampling_se <- sqrt( (ref_loo[[1]]$estimates["elpd_loo","subsampling SE"])^2 + (compare_loo[[1]]$estimates["elpd_loo","subsampling SE"])^2) c(elpd_loo_diff, elpd_loo_diff_se, elpd_loo_diff_subsampling_se) } #' Compare a effective diff SE #' @noRd #' @inheritParams loo_compare_ss #' @return a 1 by 3 elpd_diff estimation loo_compare_ss_diff <- function(ref_loo, compare_loo){ checkmate::assert_list(ref_loo, names = "named") checkmate::assert_list(compare_loo, names = "named") checkmate::assert_class(ref_loo[[1]], "psis_loo_ss") checkmate::assert_class(compare_loo[[1]], "psis_loo_ss") checkmate::assert_true(identical(obs_idx(ref_loo[[1]]), obs_idx(compare_loo[[1]]))) # Assert not none as loo approximation checkmate::assert_true(ref_loo[[1]]$loo_subsampling$loo_approximation != "none") checkmate::assert_true(compare_loo[[1]]$loo_subsampling$loo_approximation != "none") diff_approx <- ref_loo[[1]]$loo_subsampling$elpd_loo_approx - compare_loo[[1]]$loo_subsampling$elpd_loo_approx diff_sample <- ref_loo[[1]]$pointwise[,"elpd_loo"] - compare_loo[[1]]$pointwise[,"elpd_loo"] est <- srs_diff_est(diff_approx, y = diff_sample, y_idx = ref_loo[[1]]$pointwise[,"idx"]) elpd_loo_diff <- est$y_hat elpd_loo_diff_se <- sqrt(est$hat_v_y) elpd_loo_diff_subsampling_se <- sqrt(est$v_y_hat) c(elpd_loo_diff, elpd_loo_diff_se, elpd_loo_diff_subsampling_se) } #' Check list of `psis_loo` objects #' @details Similar to `loo_compare_checks()` but checks dim size rather than #' pointwise dim since different pointwise sizes of `psis_loo_ss` will work. #' Can probably be removed by refactoring `loo_compare_checks()`. #' @noRd #' @inheritParams loo_compare_ss #' @return A 1 by 3 elpd_diff estimation. loo_compare_checks.psis_loo_ss_list <- function(loos) { ## errors if (length(loos) <= 1L) { stop("'loo_compare' requires at least two models.", call.=FALSE) } if (!all(sapply(loos, is.loo))) { stop("All inputs should have class 'loo'.", call.=FALSE) } Ns <- sapply(loos, function(x) x$loo_subsampling$data_dim[1]) if (!all(Ns == Ns[1L])) { stop("Not all models have the same number of data points.", call.=FALSE) } ## warnings yhash <- lapply(loos, attr, which = "yhash") yhash_ok <- sapply(yhash, function(x) { # ok only if all yhash are same (all NULL is ok) isTRUE(all.equal(x, yhash[[1]])) }) if (!all(yhash_ok)) { warning("Not all models have the same y variable. ('yhash' attributes do not match)", call. = FALSE) } if (all(sapply(loos, is.kfold))) { Ks <- unlist(lapply(loos, attr, which = "K")) if (!all(Ks == Ks[1])) { warning("Not all kfold objects have the same K value. ", "For a more accurate comparison use the same number of folds. ", call. = FALSE) } } else if (any(sapply(loos, is.kfold)) && any(sapply(loos, is.psis_loo))) { warning("Comparing LOO-CV to K-fold-CV. ", "For a more accurate comparison use the same number of folds ", "or loo for all models compared.", call. = FALSE) } } #' @rdname loo_compare #' @export print.compare.loo_ss <- function(x, ..., digits = 1, simplify = TRUE) { xcopy <- x if (inherits(xcopy, "old_compare.loo")) { if (NCOL(xcopy) >= 2 && simplify) { patts <- "^elpd_|^se_diff|^p_|^waic$|^looic$" xcopy <- xcopy[, grepl(patts, colnames(xcopy))] } } else if (NCOL(xcopy) >= 2 && simplify) { xcopy <- xcopy[, c("elpd_diff", "se_diff", "subsampling_se_diff")] } print(.fr(xcopy, digits), quote = FALSE) invisible(x) } #' Compute comparison matrix for `psis_loo_ss` objects #' @noRd #' @keywords internal #' @param loos List of `psis_loo_ss` objects. #' @return A `compare.loo_ss` matrix. loo_compare_matrix.psis_loo_ss_list <- function(loos){ tmp <- sapply(loos, function(x) { est <- x$estimates setNames(c(est), nm = c(rownames(est), paste0("se_", rownames(est)), paste0("subsampling_se_", rownames(est)))) }) colnames(tmp) <- find_model_names(loos) rnms <- rownames(tmp) comp <- tmp ord <- loo_compare_order(loos) comp <- t(comp)[ord, ] patts <- c("elpd", "p_", "^waic$|^looic$", "se_waic$|se_looic$") col_ord <- unlist(sapply(patts, function(p) grep(p, colnames(comp))), use.names = FALSE) comp <- comp[, col_ord] comp } loo/R/psis_approximate_posterior.R0000644000176200001440000001066213575772017017107 0ustar liggesusers#' Diagnostics for Laplace and ADVI approximations and Laplace-loo and ADVI-loo #' #' @param log_p The log-posterior (target) evaluated at S samples from the #' proposal distribution (g). A vector of length S. #' @param log_g The log-density (proposal) evaluated at S samples from the #' proposal distribution (g). A vector of length S. #' @param log_q Deprecated argument name (the same as log_g). #' @param log_liks A log-likelihood matrix of size S * N, where N is the number #' of observations and S is the number of samples from q. See #' [loo.matrix()] for details. Default is `NULL`. Then only the #' posterior is evaluated using the k_hat diagnostic. #' @inheritParams loo #' #' @return #' If log likelihoods are supplied, the function returns a `"loo"` object, #' otherwise the function returns a `"psis"` object. #' #' @seealso [loo()] and [psis()] #' #' @template loo-and-psis-references #' #' @keywords internal #' psis_approximate_posterior <- function(log_p = NULL, log_g = NULL, log_liks = NULL, cores, save_psis, ..., log_q = NULL) { if (!is.null(log_q)) { .Deprecated(msg = "psis_approximate_posterior() argument log_q has been changed to log_g") log_g <- log_q } checkmate::assert_numeric(log_p, any.missing = FALSE, len = length(log_g), null.ok = FALSE) checkmate::assert_numeric(log_g, any.missing = FALSE, len = length(log_p), null.ok = FALSE) checkmate::assert_matrix(log_liks, null.ok = TRUE, nrows = length(log_p)) checkmate::assert_integerish(cores) checkmate::assert_flag(save_psis) if (is.null(log_liks)) { approx_correction <- log_p - log_g # Handle underflow/overflow approx_correction <- approx_correction - max(approx_correction) log_ratios <- matrix(approx_correction, ncol = 1) } else { log_ratios <- correct_log_ratios(log_ratios = -log_liks, log_p = log_p, log_g = log_g) } psis_out <- psis.matrix(log_ratios, cores = cores, r_eff = rep(1, ncol(log_ratios))) if (is.null(log_liks)) { return(psis_out) } pointwise <- pointwise_loo_calcs(log_liks, psis_out) importance_sampling_loo_object( pointwise = pointwise, diagnostics = psis_out$diagnostics, dims = dim(psis_out), is_method = "psis", is_object = if (save_psis) psis_out else NULL ) } #' Correct log ratios for posterior approximations #' #' @inheritParams psis_approximate_posterior #' @inheritParams ap_psis #' @noRd #' @keywords internal correct_log_ratios <- function(log_ratios, log_p, log_g) { approx_correction <- log_p - log_g log_ratios <- log_ratios + approx_correction # Handle underflow/overflow log_ratio_max <- apply(log_ratios, 2, max) log_ratios <- sweep(log_ratios, MARGIN = 2, STATS = log_ratio_max) log_ratios } #' Pareto smoothed importance sampling (PSIS) #' using approximate posteriors #' @inheritParams psis_approximate_posterior #' @param log_ratios The log-likelihood ratios (ie -log_liks) #' @param ... Currently not in use. ap_psis <- function(log_ratios, log_p, log_g, ...) { UseMethod("ap_psis") } #' @export #' @templateVar fn ap_psis #' @template array #' ap_psis.array <- function(log_ratios, log_p, log_g, ..., cores = getOption("mc.cores", 1)) { cores <- loo_cores(cores) stopifnot(length(dim(log_ratios)) == 3) log_ratios <- validate_ll(log_ratios) log_ratios <- llarray_to_matrix(log_ratios) r_eff <- prepare_psis_r_eff(r_eff, len = ncol(log_ratios)) ap_psis.matrix(log_ratios = log_ratios, log_p = log_p, log_g = log_g, cores = 1) } #' @export #' @templateVar fn ap_psis #' @template matrix #' ap_psis.matrix <- function(log_ratios, log_p, log_g, ..., cores = getOption("mc.cores", 1)) { checkmate::assert_numeric(log_p, len = nrow(log_ratios)) checkmate::assert_numeric(log_g, len = nrow(log_ratios)) cores <- loo_cores(cores) log_ratios <- validate_ll(log_ratios) log_ratios <- correct_log_ratios(log_ratios, log_p = log_p, log_g = log_g) do_psis(log_ratios, r_eff = rep(1, ncol(log_ratios)), cores = cores) } #' @export #' @templateVar fn ap_psis #' @template vector #' ap_psis.default <- function(log_ratios, log_p, log_g, ...) { stopifnot(is.null(dim(log_ratios)) || length(dim(log_ratios)) == 1) dim(log_ratios) <- c(length(log_ratios), 1) warning("llfun values do not return a matrix, coerce to matrix") ap_psis.matrix(as.matrix(log_ratios), log_p, log_g, cores = 1) } loo/R/loo.R0000644000176200001440000005757615100205060012171 0ustar liggesusers#' Efficient approximate leave-one-out cross-validation (LOO) #' #' The `loo()` methods for arrays, matrices, and functions compute PSIS-LOO #' CV, efficient approximate leave-one-out (LOO) cross-validation for Bayesian #' models using Pareto smoothed importance sampling ([PSIS][psis()]). This is #' an implementation of the methods described in Vehtari, Gelman, and Gabry #' (2017) and Vehtari, Simpson, Gelman, Yao, and Gabry (2024). #' #' @export loo loo.array loo.matrix loo.function #' @param x A log-likelihood array, matrix, or function. The **Methods (by class)** #' section, below, has detailed descriptions of how to specify the inputs for #' each method. #' @param r_eff Vector of relative effective sample size estimates for the #' likelihood (`exp(log_lik)`) of each observation. This is related to #' the relative efficiency of estimating the normalizing term in #' self-normalized importance sampling when using posterior draws obtained #' with MCMC. If MCMC draws are used and `r_eff` is not provided then #' the reported PSIS effective sample sizes and Monte Carlo error estimates #' can be over-optimistic. If the posterior draws are (near) independent then #' `r_eff=1` can be used. `r_eff` has to be a scalar (same value is used #' for all observations) or a vector with length equal to the number of #' observations. The default value is 1. See the [relative_eff()] helper #' functions for help computing `r_eff`. #' @param save_psis Should the `psis` object created internally by `loo()` be #' saved in the returned object? The `loo()` function calls [psis()] #' internally but by default discards the (potentially large) `psis` object #' after using it to compute the LOO-CV summaries. Setting `save_psis=TRUE` #' will add a `psis_object` component to the list returned by `loo`. #' This is useful if you plan to use the [E_loo()] function to compute #' weighted expectations after running `loo`. Several functions in the #' \pkg{bayesplot} package also accept `psis` objects. #' @template cores #' @template is_method #' #' @details The `loo()` function is an S3 generic and methods are provided for #' 3-D pointwise log-likelihood arrays, pointwise log-likelihood matrices, and #' log-likelihood functions. The array and matrix methods are the most #' convenient, but for models fit to very large datasets the `loo.function()` #' method is more memory efficient and may be preferable. #' #' @section Defining `loo()` methods in a package: Package developers can define #' `loo()` methods for fitted models objects. See the example `loo.stanfit()` #' method in the **Examples** section below for an example of defining a #' method that calls `loo.array()`. The `loo.stanreg()` method in the #' **rstanarm** package is an example of defining a method that calls #' `loo.function()`. #' #' @return The `loo()` methods return a named list with class #' `c("psis_loo", "loo")` and components: #' \describe{ #' \item{`estimates`}{ #' A matrix with two columns (`Estimate`, `SE`) and three rows (`elpd_loo`, #' `p_loo`, `looic`). This contains point estimates and standard errors of the #' expected log pointwise predictive density ([`elpd_loo`][loo-glossary]), the #' effective number of parameters ([`p_loo`][loo-glossary]) and the LOO #' information criterion `looic` (which is just `-2 * elpd_loo`, i.e., #' converted to deviance scale). #' } #' #' \item{`pointwise`}{ #' A matrix with five columns (and number of rows equal to the number of #' observations) containing the pointwise contributions of the measures #' (`elpd_loo`, `mcse_elpd_loo`, `p_loo`, `looic`, `influence_pareto_k`). #' in addition to the three measures in `estimates`, we also report #' pointwise values of the Monte Carlo standard error of [`elpd_loo`][loo-glossary] #' ([`mcse_elpd_loo`][loo-glossary]), and statistics describing the influence of #' each observation on the posterior distribution (`influence_pareto_k`). #' These are the estimates of the shape parameter \eqn{k} of the #' generalized Pareto fit to the importance ratios for each leave-one-out #' distribution (see the [pareto-k-diagnostic] page for details). #' } #' #' \item{`diagnostics`}{ #' A named list containing two vectors: #' * `pareto_k`: Importance sampling reliability diagnostics. By default, #' these are equal to the `influence_pareto_k` in `pointwise`. #' Some algorithms can improve importance sampling reliability and #' modify these diagnostics. See the [pareto-k-diagnostic] page for details. #' * `n_eff`: PSIS effective sample size estimates. #' } #' #' \item{`psis_object`}{ #' This component will be `NULL` unless the `save_psis` argument is set to #' `TRUE` when calling `loo()`. In that case `psis_object` will be the object #' of class `"psis"` that is created when the `loo()` function calls [psis()] #' internally to do the PSIS procedure. #' } #' } #' #' @seealso #' * The __loo__ package [vignettes](https://mc-stan.org/loo/articles/index.html) #' for demonstrations. #' * The [FAQ page](https://mc-stan.org/loo/articles/online-only/faq.html) on #' the __loo__ website for answers to frequently asked questions. #' * [psis()] for the underlying Pareto Smoothed Importance Sampling (PSIS) #' procedure used in the LOO-CV approximation. #' * [pareto-k-diagnostic] for convenience functions for looking at diagnostics. #' * [loo_compare()] for model comparison. #' #' @template loo-and-psis-references #' #' @examples #' ### Array and matrix methods (using example objects included with loo package) #' # Array method #' LLarr <- example_loglik_array() #' rel_n_eff <- relative_eff(exp(LLarr)) #' loo(LLarr, r_eff = rel_n_eff, cores = 2) #' #' # Matrix method #' LLmat <- example_loglik_matrix() #' rel_n_eff <- relative_eff(exp(LLmat), chain_id = rep(1:2, each = 500)) #' loo(LLmat, r_eff = rel_n_eff, cores = 2) #' #' #' ### Using log-likelihood function instead of array or matrix #' set.seed(124) #' #' # Simulate data and draw from posterior #' N <- 50; K <- 10; S <- 100; a0 <- 3; b0 <- 2 #' p <- rbeta(1, a0, b0) #' y <- rbinom(N, size = K, prob = p) #' a <- a0 + sum(y); b <- b0 + N * K - sum(y) #' fake_posterior <- as.matrix(rbeta(S, a, b)) #' dim(fake_posterior) # S x 1 #' fake_data <- data.frame(y,K) #' dim(fake_data) # N x 2 #' #' llfun <- function(data_i, draws) { #' # each time called internally within loo the arguments will be equal to: #' # data_i: ith row of fake_data (fake_data[i,, drop=FALSE]) #' # draws: entire fake_posterior matrix #' dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) #' } #' #' # Use the loo_i function to check that llfun works on a single observation #' # before running on all obs. For example, using the 3rd obs in the data: #' loo_3 <- loo_i(i = 3, llfun = llfun, data = fake_data, draws = fake_posterior) #' print(loo_3$pointwise[, "elpd_loo"]) #' #' # Use loo.function method (default r_eff=1 is used as this posterior not obtained via MCMC) #' loo_with_fn <- loo(llfun, draws = fake_posterior, data = fake_data) #' #' # If we look at the elpd_loo contribution from the 3rd obs it should be the #' # same as what we got above with the loo_i function and i=3: #' print(loo_with_fn$pointwise[3, "elpd_loo"]) #' print(loo_3$pointwise[, "elpd_loo"]) #' #' # Check that the loo.matrix method gives same answer as loo.function method #' log_lik_matrix <- sapply(1:N, function(i) { #' llfun(data_i = fake_data[i,, drop=FALSE], draws = fake_posterior) #' }) #' loo_with_mat <- loo(log_lik_matrix) #' all.equal(loo_with_mat$estimates, loo_with_fn$estimates) # should be TRUE! #' #' #' \dontrun{ #' ### For package developers: defining loo methods #' #' # An example of a possible loo method for 'stanfit' objects (rstan package). #' # A similar method is included in the rstan package. #' # In order for users to be able to call loo(stanfit) instead of #' # loo.stanfit(stanfit) the NAMESPACE needs to be handled appropriately #' # (roxygen2 and devtools packages are good for that). #' # #' loo.stanfit <- #' function(x, #' pars = "log_lik", #' ..., #' save_psis = FALSE, #' cores = getOption("mc.cores", 1)) { #' stopifnot(length(pars) == 1L) #' LLarray <- loo::extract_log_lik(stanfit = x, #' parameter_name = pars, #' merge_chains = FALSE) #' r_eff <- loo::relative_eff(x = exp(LLarray), cores = cores) #' loo::loo.array(LLarray, #' r_eff = r_eff, #' cores = cores, #' save_psis = save_psis) #' } #' } #' #' loo <- function(x, ...) { UseMethod("loo") } #' @export #' @templateVar fn loo #' @template array #' loo.array <- function(x, ..., r_eff = 1, save_psis = FALSE, cores = getOption("mc.cores", 1), is_method = c("psis", "tis", "sis")) { is_method <- match.arg(is_method) psis_out <- importance_sampling.array(log_ratios = -x, r_eff = r_eff, cores = cores, method = is_method) ll <- llarray_to_matrix(x) pointwise <- pointwise_loo_calcs(ll, psis_out) importance_sampling_loo_object( pointwise = pointwise, diagnostics = psis_out$diagnostics, dims = dim(psis_out), is_method = is_method, is_object = if (save_psis) psis_out else NULL ) } #' @export #' @templateVar fn loo #' @template matrix #' loo.matrix <- function(x, ..., r_eff = 1, save_psis = FALSE, cores = getOption("mc.cores", 1), is_method = c("psis", "tis", "sis")) { is_method <- match.arg(is_method) psis_out <- importance_sampling.matrix( log_ratios = -x, r_eff = r_eff, cores = cores, method = is_method ) pointwise <- pointwise_loo_calcs(x, psis_out) importance_sampling_loo_object( pointwise = pointwise, diagnostics = psis_out$diagnostics, dims = dim(psis_out), is_method = is_method, is_object = if (save_psis) psis_out else NULL ) } #' @export #' @templateVar fn loo #' @template function #' @param data,draws,... For the `loo.function()` method and the `loo_i()` #' function, these are the data, posterior draws, and other arguments to pass #' to the log-likelihood function. See the **Methods (by class)** section #' below for details on how to specify these arguments. #' loo.function <- function(x, ..., data = NULL, draws = NULL, r_eff = 1, save_psis = FALSE, cores = getOption("mc.cores", 1), is_method = c("psis", "tis", "sis")) { is_method <- match.arg(is_method) cores <- loo_cores(cores) stopifnot(is.data.frame(data) || is.matrix(data), !is.null(draws)) assert_importance_sampling_method_is_implemented(is_method) .llfun <- validate_llfun(x) N <- dim(data)[1] r_eff <- prepare_psis_r_eff(r_eff, len = N) psis_list <- parallel_importance_sampling_list( N = N, .loo_i = .loo_i, .llfun = .llfun, data = data, draws = draws, r_eff = r_eff, save_psis = save_psis, cores = cores, method = is_method, ... ) pointwise <- lapply(psis_list, "[[", "pointwise") if (save_psis) { psis_object_list <- lapply(psis_list, "[[", "psis_object") psis_out <- list2importance_sampling(psis_object_list) diagnostics <- psis_out$diagnostics } else { diagnostics_list <- lapply(psis_list, "[[", "diagnostics") diagnostics <- list( pareto_k = psis_apply(diagnostics_list, "pareto_k"), n_eff = psis_apply(diagnostics_list, "n_eff"), r_eff = psis_apply(diagnostics_list, "r_eff") ) } importance_sampling_loo_object( pointwise = do.call(rbind, pointwise), diagnostics = diagnostics, dims = c(attr(psis_list[[1]], "S"), N), is_method = is_method, is_object = if (save_psis) psis_out else NULL ) } #' @description The `loo_i()` function enables testing log-likelihood #' functions for use with the `loo.function()` method. #' #' @rdname loo #' @export #' #' @param i For `loo_i()`, an integer in `1:N`. #' @param llfun For `loo_i()`, the same as `x` for the #' `loo.function()` method. A log-likelihood function as described in the #' **Methods (by class)** section. #' #' @return The `loo_i()` function returns a named list with components #' `pointwise` and `diagnostics`. These components have the same #' structure as the `pointwise` and `diagnostics` components of the #' object returned by `loo()` except they contain results for only a single #' observation. #' loo_i <- function(i, llfun, ..., data = NULL, draws = NULL, r_eff = 1, is_method = "psis" ) { stopifnot( i == as.integer(i), is.function(llfun) || is.character(llfun), is.data.frame(data) || is.matrix(data), i <= dim(data)[1], !is.null(draws), is_method %in% implemented_is_methods() ) .loo_i( i = as.integer(i), llfun = match.fun(llfun), data = data, draws = draws, r_eff = r_eff[i], save_psis = FALSE, is_method = is_method, ... ) } # Function that is passed to the FUN argument of lapply, mclapply, or parLapply # for the loo.function method. The arguments and return value are the same as # the ones documented above for the user-facing loo_i function. .loo_i <- function(i, llfun, ..., data, draws, r_eff = 1, save_psis = FALSE, is_method) { if (!is.null(r_eff)) { r_eff <- r_eff[i] } d_i <- data[i, , drop = FALSE] ll_i <- llfun(data_i = d_i, draws = draws, ...) if (!is.matrix(ll_i)) { ll_i <- as.matrix(ll_i) } psis_out <- importance_sampling.matrix( log_ratios = -ll_i, r_eff = r_eff, cores = 1, method = is_method ) structure( list( pointwise = pointwise_loo_calcs(ll_i, psis_out), diagnostics = psis_out$diagnostics, psis_object = if (save_psis) psis_out else NULL ), S = dim(psis_out)[1], N = 1 ) } #' @export dim.loo <- function(x) { attr(x, "dims") } #' @rdname loo #' @export is.loo <- function(x) { inherits(x, "loo") } #' @export dim.psis_loo <- function(x) { attr(x, "dims") } #' @rdname loo #' @export is.psis_loo <- function(x) { inherits(x, "psis_loo") && is.loo(x) } # internal ---------------------------------------------------------------- #' Compute pointwise elpd_loo, p_loo, looic from log lik matrix and #' psis log weights #' #' @noRd #' @param ll Log-likelihood matrix. #' @param psis_object The object returned by `psis()`. #' @return Named list with pointwise elpd_loo, mcse_elpd_loo, p_loo, looic, #' and influence_pareto_k. #' pointwise_loo_calcs <- function(ll, psis_object) { if (!is.matrix(ll)) { ll <- as.matrix(ll) } lw <- weights(psis_object, normalize = TRUE, log = TRUE) elpd_loo <- matrixStats::colLogSumExps(ll + lw) lpd <- matrixStats::colLogSumExps(ll) - log(nrow(ll)) # colLogMeanExps p_loo <- lpd - elpd_loo mcse_elpd_loo <- mcse_elpd(ll, lw, E_elpd = elpd_loo, r_eff = relative_eff(psis_object)) looic <- -2 * elpd_loo influence_pareto_k <- psis_object$diagnostics$pareto_k cbind(elpd_loo, mcse_elpd_loo, p_loo, looic, influence_pareto_k) } #' Structure the object returned by the loo methods #' #' @noRd #' @param pointwise Matrix containing columns elpd_loo, mcse_elpd_loo, p_loo, #' looic, influence_pareto_k. #' @param diagnostics Named list containing vector `pareto_k` and vector `n_eff`. #' @param dims Log likelihood matrix dimensions (attribute of `"psis"` object). #' @template is_method #' @param is_object An object of class `"psis"/"tis"/"sis"`, as returned by the `psis()`/`tis()`/`sis()` function. #' @return A `'importance_sampling_loo'` object as described in the Value section of the [loo()] #' function documentation. #' importance_sampling_loo_object <- function(pointwise, diagnostics, dims, is_method, is_object = NULL) { if (!is.matrix(pointwise)) stop("Internal error ('pointwise' must be a matrix)") if (!is.list(diagnostics)) stop("Internal error ('diagnostics' must be a list)") assert_importance_sampling_method_is_implemented(is_method) cols_to_summarize <- !(colnames(pointwise) %in% c("mcse_elpd_loo", "influence_pareto_k")) estimates <- table_of_estimates(pointwise[, cols_to_summarize, drop=FALSE]) out <- nlist(estimates, pointwise, diagnostics) if (is.null(is_object)) { out[paste0(is_method, "_object")] <- list(NULL) } else { out[[paste0(is_method, "_object")]] <- is_object } # maintain backwards compatibility old_nms <- c("elpd_loo", "p_loo", "looic", "se_elpd_loo", "se_p_loo", "se_looic") out <- c(out, setNames(as.list(estimates), old_nms)) structure( out, dims = dims, class = c(paste0(is_method, "_loo"), "importance_sampling_loo", "loo") ) } #' Compute Monte Carlo standard error for ELPD #' #' @noRd #' @param ll Log-likelihood matrix. #' @param E_elpd elpd_loo column of pointwise matrix. #' @param psis_object Object returned by [psis()]. #' @param n_samples Deprecated #' @return Vector of standard error estimates. #' mcse_elpd <- function(ll, lw, E_elpd, r_eff, n_samples = NULL) { lik <- exp(ll) w2 <- exp(lw)^2 E_epd <- exp(E_elpd) if (length(r_eff) == 1 && !is.null(ncol(ll))) { r_eff <- rep(r_eff, ncol(ll)) } var_elpd <- vapply( seq_len(ncol(w2)), FUN.VALUE = numeric(1), FUN = function(i) { # Variance in linear scale # Equation (6) in Vehtari et al. (2024) var_epd_i <- sum(w2[, i] * (lik[, i] - E_epd[i]) ^ 2) / r_eff[i] # Compute variance in log scale by match the variance of a # log-normal approximation # https://en.wikipedia.org/wiki/Log-normal_distribution#Arithmetic_moments log(1 + var_epd_i / E_epd[i]^2) } ) sqrt(var_elpd) } #' Warning message if r_eff not specified #' @noRd throw_loo_r_eff_warning <- function() { warning( "Relative effective sample sizes ('r_eff' argument) not specified.\n", "For models fit with MCMC, the reported PSIS ESS and \n", "MCSE estimates can be over-optimistic.", call. = FALSE ) } #' Combine many psis objects into a single psis object #' #' @noRd #' @param objects List of `"psis"` objects, each for a single observation. #' @return A single `"psis"` object. #' list2importance_sampling <- function(objects) { log_weights <- sapply(objects, "[[", "log_weights") diagnostics <- lapply(objects, "[[", "diagnostics") method <- psis_apply(objects, "method", fun = "attr", fun_val = character(1)) methods <- unique(method) if (length(methods) == 1) { method <- methods classes <- c(methods, "importance_sampling", "list") } else { classes <- c("importance_sampling", "list") } structure( list( log_weights = log_weights, diagnostics = list( pareto_k = psis_apply(diagnostics, item = "pareto_k"), n_eff = psis_apply(diagnostics, item = "n_eff"), r_eff = psis_apply(diagnostics, item = "r_eff") ) ), norm_const_log = psis_apply(objects, "norm_const_log", fun = "attr"), tail_len = psis_apply(objects, "tail_len", fun = "attr"), r_eff = psis_apply(objects, "r_eff", fun = "attr"), dims = dim(log_weights), method = method, class = classes ) } #' Extractor methods #' #' These are only defined in order to deprecate with a warning (rather than #' remove and break backwards compatibility) the old way of accessing the point #' estimates in a `"psis_loo"` or `"psis"` object. The new way as of #' v2.0.0 is to get them from the `"estimates"` component of the object. #' #' @name old-extractors #' @keywords internal #' @param x,i,exact,name See \link{Extract}. #' NULL #' @rdname old-extractors #' @keywords internal #' @export `[.loo` <- function(x, i) { flags <- c("elpd_loo", "se_elpd_loo", "p_loo", "se_p_loo", "looic", "se_looic", "elpd_waic", "se_elpd_waic", "p_waic", "se_p_waic", "waic", "se_waic") if (is.character(i)) { needs_warning <- which(flags == i) if (length(needs_warning)) { warning( "Accessing ", flags[needs_warning], " using '[' is deprecated ", "and will be removed in a future release. ", "Please extract the ", flags[needs_warning], " estimate from the 'estimates' component instead.", call. = FALSE ) } } NextMethod() } #' @rdname old-extractors #' @keywords internal #' @export `[[.loo` <- function(x, i, exact=TRUE) { flags <- c("elpd_loo", "se_elpd_loo", "p_loo", "se_p_loo", "looic", "se_looic", "elpd_waic", "se_elpd_waic", "p_waic", "se_p_waic", "waic", "se_waic") if (is.character(i)) { needs_warning <- which(flags == i) if (length(needs_warning)) { warning( "Accessing ", flags[needs_warning], " using '[[' is deprecated ", "and will be removed in a future release. ", "Please extract the ", flags[needs_warning], " estimate from the 'estimates' component instead.", call. = FALSE ) } } NextMethod() } #' @rdname old-extractors #' @keywords internal #' @export #' `$.loo` <- function(x, name) { flags <- c("elpd_loo", "se_elpd_loo", "p_loo", "se_p_loo", "looic", "se_looic", "elpd_waic", "se_elpd_waic", "p_waic", "se_p_waic", "waic", "se_waic") needs_warning <- which(flags == name) if (length(needs_warning)) { warning( "Accessing ", flags[needs_warning], " using '$' is deprecated ", "and will be removed in a future release. ", "Please extract the ", flags[needs_warning], " estimate from the 'estimates' component instead.", call. = FALSE ) } NextMethod() } #' Parallel psis list computations #' #' @details Refactored function to handle parallel computations #' for psis_list #' #' @keywords internal #' @inheritParams loo.function #' @param .loo_i The function used to compute individual loo contributions. #' @param .llfun See `llfun` in [loo.function()]. #' @param N The total number of observations (i.e. `nrow(data)`). #' @param method See `is_method` for [loo()] #' parallel_psis_list <- function(N, .loo_i, .llfun, data, draws, r_eff, save_psis, cores, ...){ parallel_importance_sampling_list(N, .loo_i, .llfun, data, draws, r_eff, save_psis, cores, method = "psis", ...) } #' @rdname parallel_psis_list parallel_importance_sampling_list <- function(N, .loo_i, .llfun, data, draws, r_eff, save_psis, cores, method, ...){ if (cores == 1) { psis_list <- lapply( X = seq_len(N), FUN = .loo_i, llfun = .llfun, data = data, draws = draws, r_eff = r_eff, save_psis = save_psis, is_method = method, ... ) } else { if (!os_is_windows()) { # On Mac or Linux use mclapply() for multiple cores psis_list <- parallel::mclapply( mc.cores = cores, X = seq_len(N), FUN = .loo_i, llfun = .llfun, data = data, draws = draws, r_eff = r_eff, save_psis = save_psis, is_method = method, ... ) } else { # On Windows use makePSOCKcluster() and parLapply() for multiple cores cl <- parallel::makePSOCKcluster(cores) on.exit(parallel::stopCluster(cl)) psis_list <- parallel::parLapply( cl = cl, X = seq_len(N), fun = .loo_i, llfun = .llfun, data = data, draws = draws, r_eff = r_eff, save_psis = save_psis, is_method = method, ... ) } } } loo/R/loo_predictive_metric.R0000644000176200001440000001463714566461605015771 0ustar liggesusers#' Estimate leave-one-out predictive performance.. #' #' The `loo_predictive_metric()` function computes estimates of leave-one-out #' predictive metrics given a set of predictions and observations. Currently #' supported metrics are mean absolute error, mean squared error and root mean #' squared error for continuous predictions and accuracy and balanced accuracy #' for binary classification. Predictions are passed on to the [E_loo()] #' function, so this function assumes that the PSIS approximation is working #' well. #' #' @param x A numeric matrix of predictions. #' @param y A numeric vector of observations. Length should be equal to the #' number of rows in `x`. #' @param log_lik A matrix of pointwise log-likelihoods. Should be of same #' dimension as `x`. #' @param metric The type of predictive metric to be used. Currently #' supported options are `"mae"`, `"rmse"` and `"mse"` for regression and #' for binary classification `"acc"` and `"balanced_acc"`. #' \describe{ #' \item{`"mae"`}{ #' Mean absolute error. #' } #' \item{`"mse"`}{ #' Mean squared error. #' } #' \item{`"rmse"`}{ #' Root mean squared error, given by as the square root of `MSE`. #' } #' \item{`"acc"`}{ #' The proportion of predictions indicating the correct outcome. #' } #' \item{`"balanced_acc"`}{ #' Balanced accuracy is given by the average of true positive and true #' negative rates. #' } #' } #' @param r_eff A Vector of relative effective sample size estimates containing #' one element per observation. See [psis()] for more details. #' @param cores The number of cores to use for parallelization of `[psis()]`. #' See [psis()] for details. #' @param ... Additional arguments passed on to [E_loo()] #' #' @return A list with the following components: #' \describe{ #' \item{`estimate`}{ #' Estimate of the given metric. #' } #' \item{`se`}{ #' Standard error of the estimate. #' } #' } #' @export #' #' @examples #' \donttest{ #' if (requireNamespace("rstanarm", quietly = TRUE)) { #' # Use rstanarm package to quickly fit a model and get both a log-likelihood #' # matrix and draws from the posterior predictive distribution #' library("rstanarm") #' #' # data from help("lm") #' ctl <- c(4.17,5.58,5.18,6.11,4.50,4.61,5.17,4.53,5.33,5.14) #' trt <- c(4.81,4.17,4.41,3.59,5.87,3.83,6.03,4.89,4.32,4.69) #' d <- data.frame( #' weight = c(ctl, trt), #' group = gl(2, 10, 20, labels = c("Ctl","Trt")) #' ) #' fit <- stan_glm(weight ~ group, data = d, refresh = 0) #' ll <- log_lik(fit) #' r_eff <- relative_eff(exp(-ll), chain_id = rep(1:4, each = 1000)) #' #' mu_pred <- posterior_epred(fit) #' # Leave-one-out mean absolute error of predictions #' mae <- loo_predictive_metric(x = mu_pred, y = d$weight, log_lik = ll, #' pred_error = 'mae', r_eff = r_eff) #' # Leave-one-out 90%-quantile of mean absolute error #' mae_90q <- loo_predictive_metric(x = mu_pred, y = d$weight, log_lik = ll, #' pred_error = 'mae', r_eff = r_eff, #' type = 'quantile', probs = 0.9) #' } #' } loo_predictive_metric <- function(x, ...) { UseMethod("loo_predictive_metric") } #' @rdname loo_predictive_metric #' @export loo_predictive_metric.matrix <- function(x, y, log_lik, ..., metric = c("mae", "rmse", "mse", "acc", "balanced_acc"), r_eff = 1, cores = getOption("mc.cores", 1)) { stopifnot( is.numeric(x), is.numeric(y), identical(ncol(x), length(y)), identical(dim(x), dim(log_lik)) ) metric <- match.arg(metric) psis_object <- psis(-log_lik, r_eff = r_eff, cores = cores) pred_loo <- E_loo(x, psis_object = psis_object, log_ratios = -log_lik, ...)$value predictive_metric_fun <- .loo_predictive_metric_fun(metric) predictive_metric_fun(y, pred_loo) } # ----------------------------- Internals ----------------------------- #' Select predictive metric function based on user's `metric` argument #' #' @noRd #' @param metric The metric used. #' @return The function used to compute predictive error or accuracy specified #' by the argument `metric`. .loo_predictive_metric_fun <- function(metric) { switch( metric, 'mae' = .mae, 'rmse' = .rmse, 'mse' = .mse, 'acc' = .accuracy, 'balanced_acc' = .balanced_accuracy ) } #' Mean absolute error #' #' @noRd #' @param y A vector of observed values #' @param yhat A vector of predictions .mae <-function(y, yhat) { stopifnot(length(y) == length(yhat)) n <- length(y) e <- abs(y - yhat) list(estimate = mean(e), se = sd(e) / sqrt(n)) } #' Mean squared error #' #' @noRd #' @param y A vector of observed values #' @param yhat A vector of predictions .mse <-function(y, yhat) { stopifnot(length(y) == length(yhat)) n <- length(y) e <- (y - yhat)^2 list(estimate = mean(e), se = sd(e) / sqrt(n)) } #' Root mean squared error #' #' @noRd #' @param y A vector of observed values #' @param yhat A vector of predictions .rmse <-function(y, yhat) { est <- .mse(y, yhat) mean_mse <- est$estimate var_mse <- est$se^2 var_rmse <- var_mse / mean_mse / 4 # Comes from the first order Taylor approx. return(list(estimate = sqrt(mean_mse), se = sqrt(var_rmse))) } #' Classification accuracy #' #' @noRd #' @param y A vector of observed values #' @param yhat A vector of predictions .accuracy <- function(y, yhat) { stopifnot(length(y) == length(yhat), all(y <= 1 & y >= 0), all(yhat <= 1 & yhat >= 0)) n <- length(y) yhat <- as.integer(yhat > 0.5) acc <- as.integer(yhat == y) est <- mean(acc) list(estimate = est, se = sqrt(est * (1-est) / n) ) } #' Balanced classification accuracy #' #' @noRd #' @param y A vector of observed values #' @param yhat A vector of predictions .balanced_accuracy <- function(y, yhat) { stopifnot(length(y) == length(yhat), all(y <= 1 & y >= 0), all(yhat <= 1 & yhat >= 0)) n <- length(y) yhat <- as.integer(yhat > 0.5) mask <- y == 0 tn <- mean(yhat[mask] == y[mask]) # True negatives tp <- mean(yhat[!mask] == y[!mask]) # True positives bls_acc <- (tp + tn) / 2 # This approximation has quite large bias for small samples bls_acc_var <- (tp * (1 - tp) + tn * (1 - tn)) / 4 list(estimate = bls_acc, se = sqrt(bls_acc_var / n)) } loo/R/loo_moment_matching.R0000644000176200001440000005612614703765324015436 0ustar liggesusers#' Moment matching for efficient approximate leave-one-out cross-validation (LOO) #' #' Moment matching algorithm for updating a loo object when Pareto k estimates #' are large. #' #' @export loo_moment_match loo_moment_match.default #' @param x A fitted model object. #' @param loo A loo object to be modified. #' @param post_draws A function the takes `x` as the first argument and returns #' a matrix of posterior draws of the model parameters. #' @param log_lik_i A function that takes `x` and `i` and returns a matrix (one #' column per chain) or a vector (all chains stacked) of log-likelihood draws #' of the `i`th observation based on the model `x`. If the draws are obtained #' using MCMC, the matrix with MCMC chains separated is preferred. #' @param unconstrain_pars A function that takes arguments `x`, and `pars` and #' returns posterior draws on the unconstrained space based on the posterior #' draws on the constrained space passed via `pars`. #' @param log_prob_upars A function that takes arguments `x` and `upars` and #' returns a matrix of log-posterior density values of the unconstrained #' posterior draws passed via `upars`. #' @param log_lik_i_upars A function that takes arguments `x`, `upars`, and `i` #' and returns a vector of log-likelihood draws of the `i`th observation based #' on the unconstrained posterior draws passed via `upars`. #' @param max_iters Maximum number of moment matching iterations. Usually this #' does not need to be modified. If the maximum number of iterations is #' reached, there will be a warning, and increasing `max_iters` may improve #' accuracy. #' @param k_threshold Threshold value for Pareto k values above which the moment #' matching algorithm is used. The default value is `min(1 - 1/log10(S), 0.7)`, #' where `S` is the sample size. #' @param split Logical; Indicate whether to do the split transformation or not #' at the end of moment matching for each LOO fold. #' @param cov Logical; Indicate whether to match the covariance matrix of the #' samples or not. If `FALSE`, only the mean and marginal variances are #' matched. #' @template cores #' @param ... Further arguments passed to the custom functions documented above. #' #' @return The `loo_moment_match()` methods return an updated `loo` object. The #' structure of the updated `loo` object is similar, but the method also #' stores the original Pareto k diagnostic values in the diagnostics field. #' #' @details The `loo_moment_match()` function is an S3 generic and we provide a #' default method that takes as arguments user-specified functions #' `post_draws`, `log_lik_i`, `unconstrain_pars`, `log_prob_upars`, and #' `log_lik_i_upars`. All of these functions should take `...`. as an argument #' in addition to those specified for each function. #' #' @seealso [loo()], [loo_moment_match_split()] #' @template moment-matching-references #' #' @examples #' # See the vignette for loo_moment_match() #' @export loo_moment_match <- function(x, ...) { UseMethod("loo_moment_match") } #' @describeIn loo_moment_match A default method that takes as arguments a #' user-specified model object `x`, a `loo` object and user-specified #' functions `post_draws`, `log_lik_i`, `unconstrain_pars`, `log_prob_upars`, #' and `log_lik_i_upars`. #' @export loo_moment_match.default <- function(x, loo, post_draws, log_lik_i, unconstrain_pars, log_prob_upars, log_lik_i_upars, max_iters = 30L, k_threshold = NULL, split = TRUE, cov = TRUE, cores = getOption("mc.cores", 1), ...) { # input checks checkmate::assertClass(loo,classes = "loo") checkmate::assertFunction(post_draws) checkmate::assertFunction(log_lik_i) checkmate::assertFunction(unconstrain_pars) checkmate::assertFunction(log_prob_upars) checkmate::assertFunction(log_lik_i_upars) checkmate::assertNumber(max_iters) checkmate::assertNumber(k_threshold, null.ok=TRUE) checkmate::assertLogical(split) checkmate::assertLogical(cov) checkmate::assertNumber(cores) if ("psis_loo" %in% class(loo)) { is_method <- "psis" } else { stop("loo_moment_match currently supports only the \"psis\" importance sampling class.") } S <- dim(loo)[1] N <- dim(loo)[2] if (is.null(k_threshold)) { k_threshold <- ps_khat_threshold(S) } pars <- post_draws(x, ...) # transform the model parameters to unconstrained space upars <- unconstrain_pars(x, pars = pars, ...) # number of parameters in the **parameters** block only npars <- dim(upars)[2] # if more parameters than samples, do not do Cholesky transformation cov <- cov && S >= 10 * npars # compute log-probabilities of the original parameter values orig_log_prob <- log_prob_upars(x, upars = upars, ...) # loop over all observations whose Pareto k is high ks <- loo$diagnostics$pareto_k kfs <- rep(0,N) I <- which(ks > k_threshold) loo_moment_match_i_fun <- function(i) { loo_moment_match_i(i = i, x = x, log_lik_i = log_lik_i, unconstrain_pars = unconstrain_pars, log_prob_upars = log_prob_upars, log_lik_i_upars = log_lik_i_upars, max_iters = max_iters, k_threshold = k_threshold, split = split, cov = cov, N = N, S = S, upars = upars, orig_log_prob = orig_log_prob, k = ks[i], is_method = is_method, npars = npars, ...) } if (cores == 1) { mm_list <- lapply(X = I, FUN = function(i) loo_moment_match_i_fun(i)) } else { if (!os_is_windows()) { mm_list <- parallel::mclapply(X = I, mc.cores = cores, FUN = function(i) loo_moment_match_i_fun(i)) } else { cl <- parallel::makePSOCKcluster(cores) on.exit(parallel::stopCluster(cl)) mm_list <- parallel::parLapply(cl = cl, X = I, fun = function(i) loo_moment_match_i_fun(i)) } } # update results for (ii in seq_along(I)) { i <- mm_list[[ii]]$i loo$pointwise[i, "elpd_loo"] <- mm_list[[ii]]$elpd_loo_i loo$pointwise[i, "p_loo"] <- mm_list[[ii]]$p_loo loo$pointwise[i, "mcse_elpd_loo"] <- mm_list[[ii]]$mcse_elpd_loo loo$pointwise[i, "looic"] <- mm_list[[ii]]$looic loo$diagnostics$pareto_k[i] <- mm_list[[ii]]$k loo$diagnostics$n_eff[i] <- mm_list[[ii]]$n_eff kfs[i] <- mm_list[[ii]]$kf if (!is.null(loo$psis_object)) { loo$psis_object$log_weights[, i] <- mm_list[[ii]]$lwi } } if (!is.null(loo$psis_object)) { attr(loo$psis_object, "norm_const_log") <- matrixStats::colLogSumExps(loo$psis_object$log_weights) loo$psis_object$diagnostics <- loo$diagnostics } # combined estimates cols_to_summarize <- !(colnames(loo$pointwise) %in% c("mcse_elpd_loo", "influence_pareto_k")) loo$estimates <- table_of_estimates(loo$pointwise[, cols_to_summarize, drop = FALSE]) # these will be deprecated at some point loo$elpd_loo <- loo$estimates["elpd_loo","Estimate"] loo$p_loo <- loo$estimates["p_loo","Estimate"] loo$looic <- loo$estimates["looic","Estimate"] loo$se_elpd_loo <- loo$estimates["elpd_loo","SE"] loo$se_p_loo <- loo$estimates["p_loo","SE"] loo$se_looic <- loo$estimates["looic","SE"] # Warn if some Pareto ks are still high throw_pareto_warnings(loo$diagnostics$pareto_k, k_threshold) # if we don't split, accuracy may be compromised if (!split) { throw_large_kf_warning(kfs, k_threshold) } loo } # Internal functions --------------- #' Do moment matching for a single observation. #' #' @noRd #' @param i observation number. #' @param x A fitted model object. #' @param log_lik_i A function that takes `x` and `i` and returns a matrix (one #' column per chain) or a vector (all chains stacked) of log-likelihood draws #' of the `i`th observation based on the model `x`. If the draws are obtained #' using MCMC, the matrix with MCMC chains separated is preferred. #' @param unconstrain_pars A function that takes arguments `x`, and `pars` and #' returns posterior draws on the unconstrained space based on the posterior #' draws on the constrained space passed via `pars`. #' @param log_prob_upars A function that takes arguments `x` and `upars` and #' returns a matrix of log-posterior density values of the unconstrained #' posterior draws passed via `upars`. #' @param log_lik_i_upars A function that takes arguments `x`, `upars`, and `i` #' and returns a vector of log-likelihood draws of the `i`th observation based #' on the unconstrained posterior draws passed via `upars`. #' @param max_iters Maximum number of moment matching iterations. Usually this #' does not need to be modified. If the maximum number of iterations is #' reached, there will be a warning, and increasing `max_iters` may improve #' accuracy. #' @param k_threshold Threshold value for Pareto k values above which the moment #' matching algorithm is used. The default value is 0.5. #' @param split Logical; Indicate whether to do the split transformation or not #' at the end of moment matching for each LOO fold. #' @param cov Logical; Indicate whether to match the covariance matrix of the #' samples or not. If `FALSE`, only the mean and marginal variances are #' matched. #' @param N Number of observations. #' @param S number of MCMC draws. #' @param upars A matrix representing a sample of vector-valued parameters in #' the unconstrained space. #' @param orig_log_prob log probability densities of the original draws from the #' model `x`. #' @param k Pareto k value before moment matching #' @template is_method #' @param npars Number of parameters in the model #' @param ... Further arguments passed to the custom functions documented above. #' @return List with the updated elpd values and diagnostics #' loo_moment_match_i <- function(i, x, log_lik_i, unconstrain_pars, log_prob_upars, log_lik_i_upars, max_iters, k_threshold, split, cov, N, S, upars, orig_log_prob, k, is_method, npars, ...) { # initialize values for this LOO-fold uparsi <- upars ki <- k kfi <- 0 log_liki <- log_lik_i(x, i, ...) S_per_chain <- NROW(log_liki) N_chains <- NCOL(log_liki) dim(log_liki) <- c(S_per_chain, N_chains, 1) r_eff_i <- loo::relative_eff(exp(log_liki), cores = 1) dim(log_liki) <- NULL lpd <- matrixStats::logSumExp(log_liki) - log(length(log_liki)) is_obj <- suppressWarnings(importance_sampling.default(-log_liki, method = is_method, r_eff = r_eff_i, cores = 1)) lwi <- as.vector(weights(is_obj)) lwfi <- rep(-matrixStats::logSumExp(rep(0, S)),S) # initialize objects that keep track of the total transformation total_shift <- rep(0, npars) total_scaling <- rep(1, npars) total_mapping <- diag(npars) # try several transformations one by one # if one does not work, do not apply it and try another one # to accept the transformation, Pareto k needs to improve # when transformation succeeds, start again from the first one iterind <- 1 while (iterind <= max_iters && ki > k_threshold) { if (iterind == max_iters) { throw_moment_match_max_iters_warning() } # 1. match means trans <- shift(x, uparsi, lwi) # gather updated quantities quantities_i <- try(update_quantities_i(x, trans$upars, i = i, orig_log_prob = orig_log_prob, log_prob_upars = log_prob_upars, log_lik_i_upars = log_lik_i_upars, r_eff_i = r_eff_i, cores = 1, is_method = is_method, ...) ) if (inherits(quantities_i, "try-error")) { # Stan log prob caused an exception probably due to under- or # overflow of parameters to invalid values break } if (quantities_i$ki < ki) { uparsi <- trans$upars total_shift <- total_shift + trans$shift lwi <- quantities_i$lwi lwfi <- quantities_i$lwfi ki <- quantities_i$ki kfi <- quantities_i$kfi log_liki <- quantities_i$log_liki iterind <- iterind + 1 next } # 2. match means and marginal variances trans <- shift_and_scale(x, uparsi, lwi) # gather updated quantities quantities_i <- try(update_quantities_i(x, trans$upars, i = i, orig_log_prob = orig_log_prob, log_prob_upars = log_prob_upars, log_lik_i_upars = log_lik_i_upars, r_eff_i = r_eff_i, cores = 1, is_method = is_method, ...) ) if (inherits(quantities_i, "try-error")) { # Stan log prob caused an exception probably due to under- or # overflow of parameters to invalid values break } if (quantities_i$ki < ki) { uparsi <- trans$upars total_shift <- total_shift + trans$shift total_scaling <- total_scaling * trans$scaling lwi <- quantities_i$lwi lwfi <- quantities_i$lwfi ki <- quantities_i$ki kfi <- quantities_i$kfi log_liki <- quantities_i$log_liki iterind <- iterind + 1 next } # 3. match means and covariances if (cov) { trans <- shift_and_cov(x, uparsi, lwi) # gather updated quantities quantities_i <- try(update_quantities_i(x, trans$upars, i = i, orig_log_prob = orig_log_prob, log_prob_upars = log_prob_upars, log_lik_i_upars = log_lik_i_upars, r_eff_i = r_eff_i, cores = 1, is_method = is_method, ...) ) if (inherits(quantities_i, "try-error")) { # Stan log prob caused an exception probably due to under- or # overflow of parameters to invalid values break } if (quantities_i$ki < ki) { uparsi <- trans$upars total_shift <- total_shift + trans$shift total_mapping <- trans$mapping %*% total_mapping lwi <- quantities_i$lwi lwfi <- quantities_i$lwfi ki <- quantities_i$ki kfi <- quantities_i$kfi log_liki <- quantities_i$log_liki iterind <- iterind + 1 next } } # none of the transformations improved khat # so there is no need to try further break } # transformations are now done # if we don't do split transform, or # if no transformations were successful # stop and collect values if (split && (iterind > 1)) { # compute split transformation split_obj <- loo_moment_match_split( x, upars, cov, total_shift, total_scaling, total_mapping, i, log_prob_upars = log_prob_upars, log_lik_i_upars = log_lik_i_upars, cores = 1, r_eff_i = r_eff_i, is_method = is_method, ... ) log_liki <- split_obj$log_liki lwi <- split_obj$lwi lwfi <- split_obj$lwfi r_eff_i <- split_obj$r_eff_i } else { dim(log_liki) <- c(S_per_chain, N_chains, 1) r_eff_i <- loo::relative_eff(exp(log_liki), cores = 1) dim(log_liki) <- NULL } # pointwise estimates elpd_loo_i <- matrixStats::logSumExp(log_liki + lwi) mcse_elpd_loo <- mcse_elpd( ll = as.matrix(log_liki), lw = as.matrix(lwi), E_elpd = exp(elpd_loo_i), r_eff = r_eff_i ) list(elpd_loo_i = elpd_loo_i, p_loo = lpd - elpd_loo_i, mcse_elpd_loo = mcse_elpd_loo, looic = -2 * elpd_loo_i, k = ki, kf = kfi, n_eff = min(1.0 / sum(exp(2 * lwi)), 1.0 / sum(exp(2 * lwfi))) * r_eff_i, lwi = lwi, i = i) } #' Update the importance weights, Pareto diagnostic and log-likelihood #' for observation `i` based on model `x`. #' #' @noRd #' @param x A fitted model object. #' @param upars A matrix representing a sample of vector-valued parameters in #' the unconstrained space. #' @param i observation number. #' @param orig_log_prob log probability densities of the original draws from #' the model `x`. #' @param log_prob_upars A function that takes arguments `x` and #' `upars` and returns a matrix of log-posterior density values of the #' unconstrained posterior draws passed via `upars`. #' @param log_lik_i_upars A function that takes arguments `x`, `upars`, #' and `i` and returns a vector of log-likelihood draws of the `i`th #' observation based on the unconstrained posterior draws passed via #' `upars`. #' @param r_eff_i MCMC effective sample size divided by the total sample size #' for 1/exp(log_ratios) for observation i. #' @template is_method #' @return List with the updated importance weights, Pareto diagnostics and #' log-likelihood values. #' update_quantities_i <- function(x, upars, i, orig_log_prob, log_prob_upars, log_lik_i_upars, r_eff_i, is_method, ...) { log_prob_new <- log_prob_upars(x, upars = upars, ...) log_liki_new <- log_lik_i_upars(x, upars = upars, i = i, ...) # compute new log importance weights # If log_liki_new and log_prob_new both have same element as Inf, # replace the log ratio with -Inf lr <- -log_liki_new + log_prob_new - orig_log_prob lr[is.na(lr)] <- -Inf is_obj_new <- suppressWarnings(importance_sampling.default(lr, method = is_method, r_eff = r_eff_i, cores = 1)) lwi_new <- as.vector(weights(is_obj_new)) ki_new <- is_obj_new$diagnostics$pareto_k is_obj_f_new <- suppressWarnings(importance_sampling.default(log_prob_new - orig_log_prob, method = is_method, r_eff = r_eff_i, cores = 1)) lwfi_new <- as.vector(weights(is_obj_f_new)) kfi_new <- is_obj_f_new$diagnostics$pareto_k # gather results list( lwi = lwi_new, lwfi = lwfi_new, ki = ki_new, kfi = kfi_new, log_liki = log_liki_new ) } #' Shift a matrix of parameters to their weighted mean. #' Also calls update_quantities_i which updates the importance weights based on #' the supplied model object. #' #' @noRd #' @param x A fitted model object. #' @param upars A matrix representing a sample of vector-valued parameters in #' the unconstrained space #' @param lwi A vector representing the log-weight of each parameter #' @return List with the shift that was performed, and the new parameter matrix. #' shift <- function(x, upars, lwi) { # compute moments using log weights mean_original <- colMeans(upars) mean_weighted <- colSums(exp(lwi) * upars) shift <- mean_weighted - mean_original # transform posterior draws upars_new <- sweep(upars, 2, shift, "+") list( upars = upars_new, shift = shift ) } #' Shift a matrix of parameters to their weighted mean and scale the marginal #' variances to match the weighted marginal variances. Also calls #' update_quantities_i which updates the importance weights based on #' the supplied model object. #' #' @noRd #' @param x A fitted model object. #' @param upars A matrix representing a sample of vector-valued parameters in #' the unconstrained space #' @param lwi A vector representing the log-weight of each parameter #' @return List with the shift and scaling that were performed, and the new #' parameter matrix. #' #' shift_and_scale <- function(x, upars, lwi) { # compute moments using log weights S <- dim(upars)[1] mean_original <- colMeans(upars) mean_weighted <- colSums(exp(lwi) * upars) shift <- mean_weighted - mean_original mii <- exp(lwi)* upars^2 mii <- colSums(mii) - mean_weighted^2 mii <- mii*S/(S-1) scaling <- sqrt(mii / matrixStats::colVars(upars)) # transform posterior draws upars_new <- sweep(upars, 2, mean_original, "-") upars_new <- sweep(upars_new, 2, scaling, "*") upars_new <- sweep(upars_new, 2, mean_weighted, "+") list( upars = upars_new, shift = shift, scaling = scaling ) } #' Shift a matrix of parameters to their weighted mean and scale the covariance #' to match the weighted covariance. #' Also calls update_quantities_i which updates the importance weights based on #' the supplied model object. #' #' @noRd #' @param x A fitted model object. #' @param upars A matrix representing a sample of vector-valued parameters in #' the unconstrained space #' @param lwi A vector representing the log-weight of each parameter #' @return List with the shift and mapping that were performed, and the new #' parameter matrix. #' shift_and_cov <- function(x, upars, lwi, ...) { # compute moments using log weights mean_original <- colMeans(upars) mean_weighted <- colSums(exp(lwi) * upars) shift <- mean_weighted - mean_original covv <- stats::cov(upars) wcovv <- stats::cov.wt(upars, wt = exp(lwi))$cov chol1 <- tryCatch( { chol(wcovv) }, error = function(cond) { return(NULL) } ) if (is.null(chol1)) { mapping <- diag(length(mean_original)) } else { chol2 <- chol(covv) mapping <- t(chol1) %*% solve(t(chol2)) } # transform posterior draws upars_new <- sweep(upars, 2, mean_original, "-") upars_new <- tcrossprod(upars_new, mapping) upars_new <- sweep(upars_new, 2, mean_weighted, "+") colnames(upars_new) <- colnames(upars) list( upars = upars_new, shift = shift, mapping = mapping ) } #' Warning message if max_iters is reached #' @noRd throw_moment_match_max_iters_warning <- function() { warning( "The maximum number of moment matching iterations ('max_iters' argument) was reached.\n", "Increasing the value may improve accuracy.", call. = FALSE ) } #' Warning message if not using split transformation and accuracy is #' compromised #' @noRd throw_large_kf_warning <- function(kf, k_threshold) { if (any(kf > k_threshold)) { warning( "The accuracy of self-normalized importance sampling may be bad.\n", "Setting the argument 'split' to 'TRUE' will likely improve accuracy.", call. = FALSE ) } } loo/R/loo_approximate_posterior.R0000644000176200001440000001613014566461605016715 0ustar liggesusers#' Efficient approximate leave-one-out cross-validation (LOO) for posterior #' approximations #' #' @param x A log-likelihood array, matrix, or function. #' The **Methods (by class)** section, below, has detailed descriptions of how #' to specify the inputs for each method. #' @param save_psis Should the `"psis"` object created internally by #' `loo_approximate_posterior()` be saved in the returned object? See #' [loo()] for details. #' @template cores #' @inheritParams psis_approximate_posterior #' #' @details The `loo_approximate_posterior()` function is an S3 generic and #' methods are provided for 3-D pointwise log-likelihood arrays, pointwise #' log-likelihood matrices, and log-likelihood functions. The implementation #' works for posterior approximations where it is possible to compute the log #' density for the posterior approximation. #' #' @return The `loo_approximate_posterior()` methods return a named list with #' class `c("psis_loo_ap", "psis_loo", "loo")`. It has the same structure #' as the objects returned by [loo()] but with the additional slot: #' \describe{ #' \item{`posterior_approximation`}{ #' A list with two vectors, `log_p` and `log_g` of the same length #' containing the posterior density and the approximation density #' for the individual draws. #' } #' } #' #' @seealso [loo()], [psis()], [loo_compare()] #' @template loo-large-data-references #' #' @export loo_approximate_posterior #' @export loo_approximate_posterior.array loo_approximate_posterior.matrix loo_approximate_posterior.function #' loo_approximate_posterior <- function(x, log_p, log_g, ...) { UseMethod("loo_approximate_posterior") } #' @export #' @templateVar fn loo_approximate_posterior #' @template array loo_approximate_posterior.array <- function(x, log_p, log_g, ..., save_psis = FALSE, cores = getOption("mc.cores", 1)) { checkmate::assert_flag(save_psis) checkmate::assert_int(cores) checkmate::assert_matrix(log_p, mode = "numeric", nrows = dim(x)[1], ncols = dim(x)[2]) checkmate::assert_matrix(log_g, mode = "numeric", nrows = nrow(log_p), ncols = ncol(log_p)) ll <- llarray_to_matrix(x) log_p <- as.vector(log_p) log_g <- as.vector(log_g) loo_approximate_posterior.matrix( ll, log_p = log_p, log_g = log_g, ..., save_psis = save_psis, cores = cores ) } #' @export #' @templateVar fn loo_approximate_posterior #' @template matrix loo_approximate_posterior.matrix <- function(x, log_p, log_g, ..., save_psis = FALSE, cores = getOption("mc.cores", 1)) { checkmate::assert_flag(save_psis) checkmate::assert_int(cores) checkmate::assert_numeric(log_p, len = nrow(x)) checkmate::assert_null(dim(log_p)) checkmate::assert_numeric(log_g, len = length(log_p)) checkmate::assert_null(dim(log_g)) ap_psis <- psis_approximate_posterior( log_p = log_p, log_g = log_g, log_liks = x, ..., cores = cores, save_psis = save_psis ) ap_psis$approximate_posterior <- list(log_p = log_p, log_g = log_g) class(ap_psis) <- c("psis_loo_ap", class(ap_psis)) assert_psis_loo_ap(ap_psis) ap_psis } #' @export #' @templateVar fn loo_approximate_posterior #' @template function #' @param data,draws,... For the `loo_approximate_posterior.function()` method, #' these are the data, posterior draws, and other arguments to pass to the #' log-likelihood function. See the **Methods (by class)** section below for #' details on how to specify these arguments. #' loo_approximate_posterior.function <- function(x, ..., data = NULL, draws = NULL, log_p = NULL, log_g = NULL, save_psis = FALSE, cores = getOption("mc.cores", 1)) { checkmate::assert_numeric(log_p, len = length(log_g)) checkmate::assert_numeric(log_g, len = length(log_p)) cores <- loo_cores(cores) stopifnot(is.data.frame(data) || is.matrix(data), !is.null(draws)) .llfun <- validate_llfun(x) N <- dim(data)[1] psis_list <- parallel_psis_list(N = N, .loo_i = .loo_ap_i, .llfun = .llfun, data = data, draws = draws, r_eff = 1, # r_eff is ignored save_psis = save_psis, log_p = log_p, log_g = log_g, cores = cores, ...) pointwise <- lapply(psis_list, "[[", "pointwise") if (save_psis) { psis_object_list <- lapply(psis_list, "[[", "psis_object") psis_out <- list2importance_sampling(psis_object_list) diagnostics <- psis_out$diagnostics } else { diagnostics_list <- lapply(psis_list, "[[", "diagnostics") diagnostics <- list( pareto_k = psis_apply(diagnostics_list, "pareto_k"), n_eff = psis_apply(diagnostics_list, "n_eff") ) } ap_psis <- importance_sampling_loo_object( pointwise = do.call(rbind, pointwise), diagnostics = diagnostics, dims = c(attr(psis_list[[1]], "S"), N), is_method = "psis", is_object = if (save_psis) psis_out else NULL ) ap_psis$approximate_posterior <- list(log_p = log_p, log_g = log_g) class(ap_psis) <- c("psis_loo_ap", class(ap_psis)) assert_psis_loo_ap(ap_psis) ap_psis } # Function that is passed to the FUN argument of lapply, mclapply, or parLapply # for the loo_approximate_posterior.function method. The arguments and return # value are the same as the ones documented for the user-facing loo_i function. .loo_ap_i <- function(i, llfun, ..., data, draws, log_p, log_g, r_eff = 1, save_psis = FALSE, is_method) { if (is_method != "psis") stop(is_method, " not implemented for aploo.") d_i <- data[i, , drop = FALSE] ll_i <- llfun(data_i = d_i, draws = draws, ...) if (!is.matrix(ll_i)) { ll_i <- as.matrix(ll_i) } psis_out <- ap_psis(log_ratios = -ll_i, log_p = log_p, log_g = log_g, cores = 1) structure( list( pointwise = pointwise_loo_calcs(ll_i, psis_out), diagnostics = psis_out$diagnostics, psis_object = if (save_psis) psis_out else NULL ), S = dim(psis_out)[1], N = 1 ) } assert_psis_loo_ap <- function(x) { checkmate::assert_class(x, "psis_loo_ap") checkmate::assert_names(names(x), must.include = c("estimates", "pointwise", "diagnostics", "psis_object", "approximate_posterior")) checkmate::assert_names(names(x$approximate_posterior), must.include = c("log_p", "log_g")) checkmate::assert_numeric(x$approximate_posterior$log_p, len = length(x$approximate_posterior$log_g), any.missing = FALSE) checkmate::assert_numeric(x$approximate_posterior$log_g, len = length(x$approximate_posterior$log_p), any.missing = FALSE) } loo/R/psislw.R0000644000176200001440000001400013575772017012721 0ustar liggesusers#' Pareto smoothed importance sampling (deprecated, old version) #' #' As of version `2.0.0` this function is **deprecated**. Please use the #' [psis()] function for the new PSIS algorithm. #' #' @export #' @param lw A matrix or vector of log weights. For computing LOO, `lw = #' -log_lik`, the *negative* of an \eqn{S} (simulations) by \eqn{N} (data #' points) pointwise log-likelihood matrix. #' @param wcp The proportion of importance weights to use for the generalized #' Pareto fit. The `100*wcp`\% largest weights are used as the sample #' from which to estimate the parameters of the generalized Pareto #' distribution. #' @param wtrunc For truncating very large weights to \eqn{S}^`wtrunc`. Set #' to zero for no truncation. #' @param cores The number of cores to use for parallelization. This defaults to #' the option `mc.cores` which can be set for an entire R session by #' `options(mc.cores = NUMBER)`, the old option `loo.cores` is now #' deprecated but will be given precedence over `mc.cores` until it is #' removed. **As of version 2.0.0, the default is now 1 core if #' `mc.cores` is not set, but we recommend using as many (or close to as #' many) cores as possible.** #' @param llfun,llargs See [loo.function()]. #' @param ... Ignored when `psislw()` is called directly. The `...` is #' only used internally when `psislw()` is called by the [loo()] #' function. #' #' @return A named list with components `lw_smooth` (modified log weights) and #' `pareto_k` (estimated generalized Pareto shape parameter(s) k). #' #' @seealso [pareto-k-diagnostic] for PSIS diagnostics. #' #' @template loo-and-psis-references #' #' @importFrom parallel mclapply makePSOCKcluster stopCluster parLapply #' psislw <- function(lw, wcp = 0.2, wtrunc = 3/4, cores = getOption("mc.cores", 1), llfun = NULL, llargs = NULL, ...) { .Deprecated("psis") cores <- loo_cores(cores) .psis <- function(lw_i) { x <- lw_i - max(lw_i) cutoff <- lw_cutpoint(x, wcp, MIN_CUTOFF) above_cut <- x > cutoff x_body <- x[!above_cut] x_tail <- x[above_cut] tail_len <- length(x_tail) if (tail_len < MIN_TAIL_LENGTH || all(x_tail == x_tail[1])) { if (all(x_tail == x_tail[1])) warning( "All tail values are the same. ", "Weights are truncated but not smoothed.", call. = FALSE ) else if (tail_len < MIN_TAIL_LENGTH) warning( "Too few tail samples to fit generalized Pareto distribution.\n", "Weights are truncated but not smoothed.", call. = FALSE ) x_new <- x k <- Inf } else { # store order of tail samples, fit gPd to the right tail samples, compute # order statistics for the fit, remap back to the original order, join # body and gPd smoothed tail tail_ord <- order(x_tail) exp_cutoff <- exp(cutoff) fit <- gpdfit(exp(x_tail) - exp_cutoff, wip=FALSE, min_grid_pts = 80) k <- fit$k sigma <- fit$sigma prb <- (seq_len(tail_len) - 0.5) / tail_len qq <- qgpd(prb, k, sigma) + exp_cutoff smoothed_tail <- rep.int(0, tail_len) smoothed_tail[tail_ord] <- log(qq) x_new <- x x_new[!above_cut] <- x_body x_new[above_cut] <- smoothed_tail } # truncate (if wtrunc > 0) and renormalize, # return log weights and pareto k lw_new <- lw_normalize(lw_truncate(x_new, wtrunc)) nlist(lw_new, k) } .psis_loop <- function(i) { if (LL_FUN) { ll_i <- llfun(i = i, data = llargs$data[i,, drop=FALSE], draws = llargs$draws) lw_i <- -1 * ll_i } else { lw_i <- lw[, i] ll_i <- -1 * lw_i } psis <- .psis(lw_i) if (FROM_LOO) nlist(lse = logSumExp(ll_i + psis$lw_new), k = psis$k) else psis } # minimal cutoff value. there must be at least 5 log-weights larger than this # in order to fit the gPd to the tail MIN_CUTOFF <- -700 MIN_TAIL_LENGTH <- 5 dots <- list(...) FROM_LOO <- if ("COMPUTE_LOOS" %in% names(dots)) dots$COMPUTE_LOOS else FALSE if (!missing(lw)) { if (!is.matrix(lw)) lw <- as.matrix(lw) N <- ncol(lw) LL_FUN <- FALSE } else { if (is.null(llfun) || is.null(llargs)) stop("Either 'lw' or 'llfun' and 'llargs' must be specified.") N <- llargs$N LL_FUN <- TRUE } if (cores == 1) { # don't call functions from parallel package if cores=1 out <- lapply(X = 1:N, FUN = .psis_loop) } else { # parallelize if (.Platform$OS.type != "windows") { out <- mclapply(X = 1:N, FUN = .psis_loop, mc.cores = cores) } else { # nocov start cl <- makePSOCKcluster(cores) on.exit(stopCluster(cl)) out <- parLapply(cl, X = 1:N, fun = .psis_loop) # nocov end } } pareto_k <- vapply(out, "[[", 2L, FUN.VALUE = numeric(1)) psislw_warnings(pareto_k) if (FROM_LOO) { loos <- vapply(out, "[[", 1L, FUN.VALUE = numeric(1)) nlist(loos, pareto_k) } else { funval <- if (LL_FUN) llargs$S else nrow(lw) lw_smooth <- vapply(out, "[[", 1L, FUN.VALUE = numeric(funval)) out <- nlist(lw_smooth, pareto_k) class(out) <- c("psis", "list") return(out) } } # internal ---------------------------------------------------------------- lw_cutpoint <- function(y, wcp, min_cut) { if (min_cut < log(.Machine$double.xmin)) min_cut <- -700 cp <- quantile(y, 1 - wcp, names = FALSE) max(cp, min_cut) } lw_truncate <- function(y, wtrunc) { if (wtrunc == 0) return(y) logS <- log(length(y)) lwtrunc <- wtrunc * logS - logS + logSumExp(y) y[y > lwtrunc] <- lwtrunc y } lw_normalize <- function(y) { y - logSumExp(y) } # warnings about pareto k values ------------------------------------------ psislw_warnings <- function(k) { if (any(k > 0.7)) { .warn( "Some Pareto k diagnostic values are too high. ", .k_help() ) } else if (any(k > 0.5)) { .warn( "Some Pareto k diagnostic values are slightly high. ", .k_help() ) } } loo/R/datasets.R0000644000176200001440000000330314411555606013205 0ustar liggesusers#' Datasets for loo examples and vignettes #' #' Small datasets for use in **loo** examples and vignettes. The `Kline` #' and `milk` datasets are also included in the **rethinking** package #' (McElreath, 2016a), but we include them here as **rethinking** is not #' on CRAN. #' #' @name loo-datasets #' @aliases Kline milk voice voice_loo #' #' @details #' Currently the data sets included are: #' * `Kline`: #' Small dataset from Kline and Boyd (2010) on tool complexity and demography #' in Oceanic islands societies. This data is discussed in detail in #' McElreath (2016a,2016b). [(Link to variable descriptions)](https://www.rdocumentation.org/packages/rethinking/versions/1.59/topics/Kline) #' * `milk`: #' Small dataset from Hinde and Milligan (2011) on primate milk #' composition.This data is discussed in detail in McElreath (2016a,2016b). #' [(Link to variable descriptions)](https://www.rdocumentation.org/packages/rethinking/versions/1.59/topics/milk) #' * `voice`: #' Voice rehabilitation data from Tsanas et al. (2014). #' @references #' Hinde and Milligan. 2011. *Evolutionary Anthropology* 20:9-23. #' #' Kline, M.A. and R. Boyd. 2010. *Proc R Soc B* 277:2559-2564. #' #' McElreath, R. (2016a). rethinking: Statistical Rethinking book package. #' R package version 1.59. #' #' McElreath, R. (2016b). *Statistical rethinking: A Bayesian course with #' examples in R and Stan*. Chapman & Hall/CRC. #' #' A. Tsanas, M.A. Little, C. Fox, L.O. Ramig: Objective automatic assessment of #' rehabilitative speech treatment in Parkinson's disease, IEEE #' Transactions on Neural Systems and Rehabilitation Engineering, Vol. 22, pp. #' 181-190, January 2014 #' #' @examples #' str(Kline) #' str(milk) #' NULL loo/R/sysdata.rda0000644000176200001440000071000713261751552013420 0ustar liggesusersBZh91AY&SYXYYw{;>o@a;w}y_yn{Ν<ݾ{4} mk{ͱ[v׺}w>λ]8ڼn{{nr{sv6fj#֩뵧w ;;ۭʽЦoGʼ]ws;{{T[g<)ǭub.v{jKmi^޶{RqUo/{֫ݽڻ-;H2 mZZ:+D+#Y,H`A\`Jg}G^4'8"k1-yklG˃شr-3tLH8.s3 b^KA`b[fbϥ/M-/թi6- QJblHub/@ ]OR;q%$㱲?NI]L[U_ClGU]/@_h-ۅ-n? Ղ5H"K~b+^E{h]Uh3M r#,M$_Ћ{ňyPXٍBuYISO91dž3)PaK`+ZG 54c؃jb7}m,3ivZ6 ]Er@*6~B_H6lgLBElA @c/]H9v#: $ 4(@_]U~aJk`CU$}q_UF4C^ Ui/!Jf H$Ͻh3lƱծ aAlEuHDZ 1q1l=U*+Gm)W敩9! c`6ZH>X G( Ym}l4 cKvkxR6Z&V:gXхa~`vN劺iʄUG֩P+믲pZ;ƴ1P ڥ3hF5-O1% W؃W.ťg{M^X)1dXJЋ#E"W3.Ђ4*Cy AziX f"F=w1rʼni#vLb);Е%4Li+9"F೦b= ėUI-?i%1/XX |BgaT4#i"ă>O0;ւ4mXf_ɱ)|˷ZXX1.JM%hF]b 2Ih_CM?b=mh.Mzi Ա.XV@f c3Ɓ_Qشr 6{BPjzO1]`#HYkb_#A3h\N,6,* kH>v4&Ɩk`o  cAgi/1U˶its}4 zoѦA8}ZQ !a4oa(Vj96 gXvĩ-!n {>4C!/]ֲ .KDŽ,FijL('`x7- g df$B(Bиlh +\U6y" sFEhExw\Z@Yؗ×b3 ~./̏ }4ʄ9օiv7. 6j(ܥ1m{L@vQsR4~,X#}H щQ^aװ\E1*s-?] 36.W*<ƅArdX 7{ȵLhJČj6ѼklO0 +GΎl.98E]^~y-#:>LHa9;h\PwW3`[]o=m;'k 7/<ցJГke %ej2 ["qhx,< g>L~3GfUq_x-.jЅ@t$ʹA@u&F1k;K b04*B4y'<(HeYgo=r_e~w,^_)1`غNEp- qw}88:jq/ˉcAex$Eg6;O艈& Q[RdIa/r:D/7ǑLV)s:BXF6%4ez%ش^abeixGB9 5m";UVeyi#ЎmKKv |)rh=sI9Żb9V5[2/iR/i "4qW _Kt8Bʴ>V[bY}a ֏V'XZKks̲t{LSe?ײFDľ&׊ó@ a[k Ή3j kBߴUYXY\JB K1-fYtLWf$.”kh]JbH^|f0VF )M{0~()F6 8FQEWZx- 冉ר?AYгL9Vi t_P@veS%a2PHDR][\Wb\~,yX5l4 v 4bGm,*c.-ɣf-T +͢D7dKXšŋaivB^G]`Af9jцjyZ7LDH A#FAh/#ՎM֋{Z18Z:N]mV&ς>֒1,=_z7 Ttl AzpMdK@Ÿ3Mdd%Ѵm9;Ve? h= rtхjע1h#YiqhX+ȵ4kߩ%hY]{ XO5Sbue' &+؎Peb%<^h+,Gii_y` ]N?9T֪lfXpXF-P>BZ +HEхIk Ge03ϡy4͈Tƕ6 JTmUs(BF*EA3|55_f5g +K`:f]_ 25ip0&*Sp!X #D8-,kMX9ݿv l9o!bpR_15ع_ v9oJw}w6!:ljoy:@k z+-4NqhZWݤ$Z5]gM FE5gx¬ױy9Zn8@LtƵ/1 )-g2~2q*(ߖCN"bi)4Yٍi"Fg\lFA'5ԺK{VVֲ>~*Y&hڟb8xƴzN3hUWt+LlbU@Rwx *GE20 3fϼsE=Wz F}RX+)hDZӱyUP03i|^M&,Y<.)L K@ ] E#`\ |IRVZ>OQRgX)ub7x]W)010dX_+P{͜%eBi)~L])G?ԵlkFiYh`X0Ph  ,JNb<݋h^ڣ E4Y[$~MRX%gCTj,#gh'[ToVgi[+bY2{HUWמhop,FE03}xJBfef%v7A6xk/OLRUgKyOn}kCE^wim@ n.fDxWbQ qQä?M,Y= ug֑\1\԰ 8iHY|'ŸBF^d wVV{8 2P".T?57H *R 6֣-FM<>K $Gs]íHA;B;&*k :;Kk`t#g#YHTZ8<|UWJ V䞑K= k)1[K %Եb1kN&"^];I@zz8AQhaIi*&04NmhU.~ FYv־ū^[ B~M"i`(+,䘬fPoc`\kXWRıM._q2vy ց|Uwa^U ~[9Ps-",QB ?sEE3Hݵ?#F-vOzU?6 -DƕAɼ-u O2Tq}6](FXGRh;c_M^,æWYeGWm:혱 д*X`]aeUE<ð~5ط4C5^/i]X[X\]12GͮSw< gu4Opshب-0<&~E:`-mma0]ѫ_X-ռsE$l0"Fg;'Eյ;A+2A֠. f㤵f#X'`+kmQȔ*,>ɘfngYYLԤ8= iXlk+5Ohia+ær?.ͱ^\|h =L,bجڧPNۿ,h2%0?ҥVْR-wH-* ޒ8,<^c>&&8+/ԖܘSXM\z,9PkϺab4)Qg氲4{i y}̠|Bj_Ɖ7ղu!EA#h&2:_4H[fe>ju} <3(ߔKcyv.ݔ֌kY:t,Z&94^T[$;s+Mu Z0#ʇ,X~G* e= ZNb$?G%+Nx؋5z/LnKjvu)M~G'Rz] BpWLD`X&^=ga?z_1qx}6ROú2$Ǽ+^vguaY@V>j=9q%앿ON/: Uў>V|[_\R՛y85TlEM>AS W-6LJ9Ņkf1WfbćF-K\^C05X>RlW\Ea]qbdjZ'ٸʋ֫q)QB`i6g]YMd|mBw֬3,>ɥjq sDu3[-}tĈZ7dƴ ia<:6+Կ4Q@][5{U\t뀹kI fb99S >~Lc |LyVY5w}F,ϊjKO{Kؿ,E틜e֡ |.Åq]?srYdhྍW-S9.E>Ls, ۧȖEZ^2;_8nuvIr_Tdajl4y #JZKbp%|@wP"c=S`sYan#UUUx*tH|YRs,,oKgR/9(A?{d [vzW˻AsUzM9-n? olPKF€iW)n'3LFfK/I@}HI V[Q>D@ ea4g4Yٽu75B}o 7hppvOĖtmO4,q7wfb@ 2))%g= |ܴ{/k+濶YdTa ՆHѴ-6>0?*trzsD?j #{揽 ;M^}MiX6*sl<}-Mgök ,>qӦbZYT%k`5oGA}@Z>O k߇BopV(بXG)+>7W"\5jkNϙ8#\W6 ;FX_!T(Ts ]N0׶Rk%)7GXƌ㓐7U6Chc6l3 b@xlTuGR¦23ۓg\rju4V9:G/֜,=3,L\8 )qqw\~8Uh7.ce =BXGzt.Z =miG/sԃBy-_ܩ{H'ڰ;/)ͬ7IQt,!_"FioZW'>›6M-Sz9:6[_eW2fo%Yq[YO j|Wٮ휮ǞΰZVV=Rd Wj{cFMq V]j-cnV /ũn9e0zh*|]r(:j{9 _i©3`ƿ \"l^n:ɵF0H,+곙 cU܀1gj1w`iݝWHq̯zNUIqd3lW :bXa5|+a;e|EWd]mر3b}ɸK컠'@PLƮnjI%vZfw Բbٯg"1e (UO>MȮp*1HKbb?~MY\CBmcZa$:W_"ٱrV3i_mvޖb{O&dy/[jֿSPd:3zV\F2aC/٣å3kPљ6ىaֹyNVa麯I:[< T4flnkT$띳/v߾g~áb, Voc bNL>>T-#ٙ: $*cې'6ؘ=ԞFupxLC˻ H{1vX6p^ߵB0P*[ Tص0ؽlzofYnI.ǁ!g&0پy|lZ((ḱmLVP>3Pe<|W..'  *L!GugSaf{ֵbrWY9>êd;J|U9͇(L\N "UIPk̃]iޝǯGǽw $zL9Xh`3`+jCv/5*XrUJk.,WoqJea䵉{3Y,4Đ3l.§29fL{&OP=wJfәn3 sKmYxWGr,ٖfɋBY6W'+n{vۈ3E1IN>2xُmGŭgㅇe( W+%[*vR9[ea3gq3~xl <:4L0l 7>fn<;M=rh Pi؏ ~#+m1łH,1^QSy>T [~Գ6E yyziS@V{e wJ /240aNl;B34eO=e:Ǹ}OVi<Ol4~vUJ{8/kv1{oYϣդqn4?OsN򚎿&/UnEz*\?vOegHfAҬ+_aMn_m%.-qMS9 3,ml-eXHmxl_WAy/"n&R\믹{~Sy~&jaZUaS^-'h,GV&Nsa >qcڤ=2u<Ƶ\ױ":'5af< 9]?c\嫱im^W<~z$??l˕3(WWPq;Gz6L<^y<=/1eC88=^'tע$EnMAq$=+mXjħ=C̾>ժ`[Odzeبkv3eQl&_fԹ$NEQ}Juye+` -h;ƫg=n;="QX$14~l0]7Fgۦ|2E'd>?aͧe0F6X>ݟikÛ;nuRUܽ>+6_= K2dGY-;Ty#=R" 1j:`jm[MJiN_b5NBq& D/,F` f h0>YX8.>I{D,7F!}Fdww5},M_d^s.:[YIN3K1t_]M\]fA^}'$h o5V FӉWJ/P˷eӿ_#zjk̜vZ/LV'XN'mszǦ;LK-ҽL6Y囥}fEmf[} $Lί45Kv{VʿK𿡙<S(ܺ=s%U)pZ^?I39e3K_oNfu& l[$=l;uy|ړk=&SNOiP\NGVdXqGUeČn^7h[,9h\fL8FSkMbv2Wcyz9s^x/v^ƺSa|(j9_ S \_K±8`PTQ/+Vq~بu^f)~q_+_U9z|ͯ&T{ʠKy^['[L[Bo5]t&/a>]ǚEK9I<UI#gc9|vW{)uwdGejg-lW^ƚPqxؤbe_ay&;\[SoR"Ni_\뢓OzO_ SgVjv!|X>M.E9Znvyh͖SeW xZm7MOȂ@@D3 A8/@ycbl2=Ke M) voJ~v=ܪ١Mbf,۸oAxz|T7 Wtä߹QI5>z?[̸?n1l}_7M\I #9-TMJuͺKݓ[sBʻz^N޷ԯtcm"-h7MĊHt=_aFZ&+/|{yE_ (0bod2 'X PB1nAlH0PiO\:0R Y.0U] %(>2?U'd;Z:qP+ CX kXI'5R|\x=㵸j_V[qmK9bC}[ޭŧ%%+u[` @>' @?ĤNb;2$}?M4=.8j b/]Q)#!]@T0 9\ > S$$X7ૻu'LtאAg >Y=|3ճp6;-K<"PHPBI3h ~oE!-  ȟ|0˯~$R= %fSA2zcd2|O@nZ)qDM}YW-9Yy=/)|@HUXEMn9{s-݁~ұPtPAv@ҋEׂ]9ըhe8@@QsJrg`ץK>SJNƝG|MoѕZ*o|;O[/7Ko.$]1R!z:ZDwM~aO}ui[-XSi3sE_S h= u<x]8hCQ$-ݐiӀox§ Kȷ##va: w`*@ȟ{ ŃfsCt`!,]J$cw٪&Fahl0)YQS-RVEE&ɢi|lU/ O]55~g$(I5@lfnr|BD1i\6Msਜǀiq vB @ H0޳i#,k\X,Jۭ6,!J ID-h0?OdbKhUѴ4q:Yڧ2hm_ J̿ʵ6#նUZɝR26`,_VY>THұ xPXY *lrߞA|e !qcX QfEa9.]PL!iR-|GRp}oRʽc)ݦNhT3}#7:˃g$uYTdSe=o\$xNIUAW}}H'D%" C.W@2 L&~|20트s*QB1d`(Scˉk Y .nS~ha| ޳:SW%lv` 7I o`lN(NqtPʦsRo$(R)R|יeC.UOX/hA`|7#@{+UuS' @  .OG 0GFR  _Ȝ0Â`%a+GҒ'E1L |40` Z'<ռ33 =n @+W!!} (ª$  հ%{ 3yB-%=񔇮d#& C}\~f=I,&UnH( sUn1@7$V3 ^ 0,UUɢNɚ :%7Z=Ιs]= E`Nb!"D-JXk\eM APR" hK t,%S&!h*0U\Q  @Hm nc~Qg=mLm=Ff0ْ/ qѵ!H p$ -z3UGt(sf>0GwEI& AQTރƅ SyWs+c>%b*Ȼ7;5M_ <-{&~y; FZQQ(MOW%dC# nA,`Z_ @1=/2f-k9E&9I3R/H>Z<hR C^  /v HmTDycJQ.&K턆mB K`9l!:*(& J *F{!ͭaLtu}Si Pwe t8 b-1FSpa>|=CkѵHG 2{iY\7}UT?^@'`6MȃjD:k %/q. ..oJ ig)vj{q<|BXcJY= UU?5$NIk}   =eZqApQĂ;}ﮂ!0jEц-EA _U0 A O$ިtDT 0[b xإm"S]M !ïfQo~c*t:/4"ky&QR<*|"  vpĪ(5P7ƃXސQ:_ qjDcARwYRJA g%7$9nH(5@ ~n810T}ՠڰ*>|C+?nᱚZiB~},ζ )Un$%5FHFV`C V)b5 [tyEL9P_<"&zOdiR:JNi3Gδ+mOW`ZĚf&A+?HXJY΁-( Z4QN@RP%rpt`ypT^@>`>n fS{Lz@4Bqt^n1@ &I BatD񥆕AX9C|ȋ0 6R6rbybĹ@{M n9,&`aubj~;{=NUUϽdz|%Nxv\v j8 BS "_M5dVYmi5_gyL->WlG9ӝ:Pwhdm22_M'54?w%+}3uOv/8ߥېL^e]fl;Hmk㟔[@7HB3t/-T{*mWĜ,Zߢ||-1Z~_ub^G_VoRj߽QnR LquVV8Ttxovx'pqꢮ-~yg}oUs7PO{HhaQDָ!ꂞ!ﳬ,MnApyy-n&Xh<ϞZP3= IW7_ye\ IWɠs2b k!X Ж u1Zx>; Q=1'XiG1n'US?Vt9kw!ȳW1f^Ɍ_7\Ah>|Tύ=>_$0z˃sZ^nam',]]o)qK{VM[2jz9tlLo=0Ľ1\C c%Cx V]n y cE'jc:E1'_-NkHR_Epx=L_hp(tldsSK|~,Y%U C;gBkzs!zsd!`upCP7 K ǪZL?Gi\x3nF.‹ǐ_4rekvkKy}MPke:[>J葉<“ACS`_c+kO#@hY_U+*/O|S~yX(?:{/ ;]a]As8˽Nj[!vT5zW;̲3܏nP#:w΂<h,EL9|}mr%z9D'aBD І H@쵂C$U!Ƿjavn媉6uǖQtW3CfJ St<7S;rxN>j?VIjPp˜rS_ʻ?総ujbO B~'o>^Y֙~+OYovԒ={^sB7`n#+G@XQ@R0a .&8JIZ/|YgEt cM2Md-;UGXS=H#_F9Wya2m*Cvs R/ {y88}HeTJfs^9?KbwS8Z}az)ݿ'e8 XZRm^*۽_Ogɞt[]03ζNyɐmz GzWi}V?YM[#q$PGc|j+v'=vY.Z󕺷tq2s֬[ CCB (HD D00 O@cB`@>hUs!;zipė\C*-.2tי_`,=ύ+|R3/Ud.~</\z-zgдn]cy(O@7Fia5KwS~AB4H m\oGYZ 8zo:}*'~{I-`2@ü 0y&JrlxtYu\xzUl'=]ۦ: &"r4S@:G9 R/CuK}PNQeLfz&vդ\4K=e}mf &FQO͙O%T DɭF{D491n[YSn8M$SqxԪFʅ ъݱ3k'DSsrK֮"lo0rUO6Tp_fF{wȑ_wG,o _,MsLms`ڿ ` ki?,Qkf첝Z:͵x} 6sq\/yÙ-l2 ,D =~Z2y W2;>۔͞5?k߿)VW;dYwX:iu"R  C4iX[4:KWDP^ nbly@$IO%\+e6i!uotbK¥f`@.c -{D*@= dkS?)b绉mpK+M2tC~S!5 xH\('3l>[ kH (B@؁K>!9q!Y\.-МUf!P'lmk5X,D'{˅9nZ4wvNl줰:4nr|?$&3oXA"%F`"JO禠[BteE\uaU}fbiI3$( k)`o -qP<\6@-Mt|A=\ebKTQӁ(d_ZWp,:p_@`+x#9c5`L1@!\evŶeɋ,,Pp-E*;f'V `L4+axM(\Gļ:a6̐`%S[v _!+UAjB9UIsv(*Dg}sBm -z]!Ecoz=vf cy투6jn?ԝ.n!Ęncal)OLF噌(߾6Q$A{6x1Oeݴc }7vp/~l`ofu̜S03H`_%WjJs57)HmEꖼ gG2rB2Rр~N v9fz0=V!Y\B,t-*(qI%0XJ)%cB`svJv7: Es j2AALOWv5g=P3Nw(OJ@rVjj?sF )4piX UR5xa!TP%ɂwxx1rک7ZaSHAw޿QBmeÖ#_Yeab];i/twM]ܖGs@GV~\$즗? 2fD,x ΅񴫐p$0(  MƵ@7UE(nXۀ[] = r=S)|I )4é*~p ]Ϝpn:p^s(A- k!߯$b5PkՇz`R) e3h#Φ]H>5ДG 71lӋ Zo}lp $Hn4 #@33[?!^4 }ljB9_QȐCj_!HI}Db)?)sB$޳s^^TNy92+Nnt Ьn{ .?rzGY+{ Nl5u}*pvjuI*,z<"e_وvAwd`S%L ƹH̞ͫqKKO8UcY/8*cg;מtBvF/UPX̯w[ xkRc^$mtFtlWS.sQ$saU؅Iv:?wVaU[&KlDPxBQԓlLRq8&J0T2@WpÀ#׆7aя .On!FO0OD&~l[>'\FFd1܏arEziqӹ4F4~ЁWL W١ă `k8mi澂eg>:+tTE2Z"m>(M6, bsABo9Г_,m^E 7IդB$\jVk;4߃W 4WPFa܋h1I^VD&4vBW#x-hڱ\c}3׼TR a\' ʦ/W81LKz"k7۵XGuj8I:5w^g/,wV0zѻoaԞQ\Ll?7M$wQX $ fa QߙVQ2.fE Wc>xN=Ex @N.l^Ee[}Q]uĂfS,5䟌qHHcpy}ÂњlJy 0W։*^mEp ha^s >^& 0SD~ ˏ2w9!5+aϒÊIP<.o)&$x~xGshcQJ?]OUAm!98 O9>BPZJ*bk{+/VWp-vEb*"e8<{EeE2@t-1D]l mÈa-tDTwo]':@P@zhɕԔW >]#3>;ڼu4t#r?CFK@uERc㼁Kݚ6~b ,mYdH_”D e[_BX-H3JF"S˛2ޡ uH`QYɚLO8u [`E @${9JP-5̔x 9^@`}R[p} DFV>e;6xݜ)Qe<;\;GgG''%Z&HHØu`K[CS-%tRW _2 GFE0V:> KO4?ZJYXq.ɽkfAus-XxO^#U Og' T& *V_Dw:WuDDme]:+W%Ӌit4 5%owN K!|h4q/-=Itg('7$lL/Hp}j\"Vڲ.wS,DW@(3Lj0Mr"ş': ?Ȏ#X,JyBzhBvu{M6ՔYy,tyZO[n`njM[)ϸ"x8_Oh}OTKi{* PΞjPwta&jͩfW*5!֕ܗg%S!z@Ű6.-9TĀu P [تm6zB:zEfZOaڟ}X.UJPm*# SNiEȄ61zy UQEC 3s4 P.jœxXhG8JotLU_ԈGPI,;}kndc̘w >4?l=;~0n[ls2~#S2(PZ8=$}Y~>j \pT_ȿ m% 8+E뾖3m!粺(g 10vkt3- L#Fgʱ.[󋎴U1&`_~ZG7Ju)N. FIwuTUm>h(,FscQ5˺! m#熔pv^*JvBbb 4qL/Gwt2jLr,8"sl0wዜ`8:}. "O ]ǶYԟ(L ,[~;g݇󜟖\k|TUb"wh a1냽X7~DHbFoZӔ[2m+>a}ȍ)8j땐޴30ҎAb3}dEQz@6jjkTO#X!d9뜊ѬҠg>Sn)e|pp|.{ġM5BHѯ`[K~_v$xb[}e1vy۽phl5^Λ}ћ: ~h˒N|s#Hu++jt~pxlm@ xg]MG@H}(?SUM:\K:]-ЙeO\Oe 0c:" 8MqĞռ B;*Q4Lw" sץyeۆa^p=xpGFtkmȝfΈ/kkɌYV}66G5`Iޕ(i?鿮7?=DbLGgx1F^522K1KSʅ28x^y^o#s,ヒ=%,us7*G6 T"?7I9~fze+ g>A;,a`AH /5<5 )yۇYAߔ>J~A9K85~6o8bB Ӡ GBOLD_*AL\[Dl]Ht]@yJQ~RKۿz_!dIpn oz,3E=XG-] w 7*HyjUap~8ֵF<8ðT~Un opcu@vҙ3#1n|~`"9bK%^c(D8r/Nʉ$W)JHb1 %VfV}B9Ҭ.0h57cNTB'Mh\ \%nz(VVd>`B@_ ҹ,M"s#='􋖓D<%m0ELSkP ʥ@tE G齱vܨ5 ,''=@ W` T^JM'Oob1 ݽp `^*kWif˨(>x.q ՗ӒHln՞+a̴i9e:)8r$. [+Lb[`{ or+SG&n]lFu2yZW02h+_I\mkJߥʄ0F\NNE]PWr[oBK{~9/Sr9Mg0(fRpJj*;hf\tדr[ TWc#讆H%C8e㍈ijZy= qV'moZpEHW(aPux]Բj@K J3# IwpBҕGjE GgGNr8@LeTzb$╢: IJo+ҷ1*xn{%ZOj5C ,4Lr&āT~LrenAjMOܠ| vPS޶#W*L1[>lL8: :!)ƕ@eV̆^:[J?\, a5$wbedmt)t; j سOA[ ^ fU~AjvBpy`d֮lq>2&d>l`r›S>^Jy"w^˺qUQxʑ/PΦeųiM;2ևP 7ji#y<DVJ5:77z43[?=6u\g9)'HC:ƫ7Pũ8@ T/9P$WZqD ,BƖs,ЯjPLJOB%& NCR.ʪU~\d &"YtWmb 5T(j-ZIذQt /LO ]|a@h`Ԧ C@4tߩ:kQH~!ec{`t)0S9o)%)/mf=y"MX2I$kT@|.C~jԲ"*|i1DN.a >;qLa!A{ߩK " ^]k&>0$"-؄|}`[+XIrU?2)̨l>~i#deag;` KͳuƼf0<+P3DW D`6=`UOKL(@Y{Vt/$ÙKnoQ6:wZّiěT(^vʹƝd I1TK;M%~н?"u RĽuX+lGk>/ xY/t[ ;& Wq, rH*"4Y Кx3_xԝ7V9@Wҏg@Nf:Q g k[AnjtR$A.~wZ6VZXzW]x 9PVp/TLHzCcC!Uj8>,u /̶mGj[av&ZtCj,"ʾ7.ti&d̉ʖ?߶颉zZ2Y9#Po]%gwE:|hyאv,!3ѱ@G hd ;MER/aQ=x~4p-$nug*zD{uB&pܠeֹWZIg$PH+*\mMO]aD"{r׌#8L.۝=xEb>x rC:q; Fg).UZܦ"gX8{)h]}r0ݬ?ƚM+⽻Az˲R*p'Wū`\¬GvY[BNg-L?0.kI?&X(.&@xGAv^ܭZҵ6WIqk \>&F2#?{TUӴ4vl6VPP[G% -\s[(߱aՎڔ*"-hVG6f2t ö'j$UQ|iofYI4ZoQDW4d2pǺG{dd_s,6ke O v?sF#ݞɾkkB?ch=xVSڗxx|c,  +J,êvvj#z9ׯ 0P꛱zeFL^-~;/>rټ ("آ!z`4XIN69 T&BS@L`Y Tz _]YrH!ЫeV CȲgB#_P=ӱa9p(+šS To,z;Wid*k57XK@/3Q`U]nk_P෭zCJߨ)z`qxăgX'L GzNo>E/zT/{$^u5kD][SdCV3yfT,'ds>$>/cAwMs|/D.%pt],DH)~ K&CS}y-ezb0@`D-aJ^<՘CvXP2 2gƠFpmaq3$׫clDwvL<5%ȸ'YGM7|M2Qc1ƎnqD=pF-xlҗ4z.noU\B%,tR_uj=^DhK@ jk|(t08,튬V(wuX&kr0]&,r>p-"vg*Hc)4Y%Gf44Cϸiz읇;vL{A1o_܏gu Z 7b0D{)T(zh4bU6h#N|ܫOyTej?;+K@n4n@@U.l- Z!w^O=?k#g`ې^].yJY cX!,)\H ̒\ e|t|^1lU /l9#Lz4ɚL9 |a K!9^zi ڿQt*ТW9"GG$Ȥ0_䄺S%Bns`@wO*ڏ/akƳe%hF%>֑oəqz~.%{,**%qC>_h=rv>-27橙rnr-grR ({PG#d?) t|d!S& Ғy0eӯRD,j@lEL^0 #4p@Hk2SA:?iLw"d3ihx dvԔܑ/~ۜ8n~]@q ZI0CW"9 \PX(,qPb o9j|zh`LI}Ұ_>myYsSpnm+BdLͲы#iqPwDS~7~RoTgc79?x~sS| i9bN`uR ngš4<3暝L&í)0^If.hE[lI,*ɱZ.ZG1E4ɦH~S;ۍ4eLA- t7DmPpxg_ ))u3 oSaps{ "hHDZYY M).9hFwg.x_v_̳ g< oo\".|SqyVе%]ob L(X{ * G(QrW{[@7L 𾐺Y7 z+S|*|cPÏ;cs#& 9(P"aIr..W})j^t[Ws' 9 r^ oW?3~TV 6[>Yy'p]'VEݡxN8*CUW&Xd,awŁR:S6Àr.B ݟ!z |0RЏ]R!@0`5N)wsXO:/u0O+иF RuSl~* 9gSFHDgs=il9&f.6QT5 }(9%˛d'gQXM>n][Wo C$sp2|ƎrAZZiusȗ.縿@F1Ϫ?G?ޅp魡{.]WakV@ɺQ@ /W5]$lN2'( -~yCU$^ ĿmF̒VUcI PR_5gϣZ Ç^jfőI;̄"苍%1ubYoV䁃R4ᔩXpܵA[*Û"WN7B=-@_0=wlLV*IbK>|l0GKّgZ|F8iy݈`OQQ좯/׀&e3*#GyAY甍hK*jw-qX~ E$l OSdKIv!}(< L_^#Ja>Xr͘vjQw)c?0i #-^:Oqwqu QSM VH~#Nj4J,PfX+8WyBRJ ~X;Ot-x\dzV4r0%e/yqb Q& .`k]=r!`sɠER7"u)=5+\ɬ1xҟ<=ו[7夵)DP` '><\ OyFBi,PcA,7G|9?Gwv<ZEC7:^a~2v`]E*vҼf/j4^d=:KYhz( ¾D? ǻN(r1C k"15Ig9ڙc뷎ggS]ɍёbЂu~i\;lE_նxzbn?9몸 g 6u Le.=T~߉6a9 S}ᚄHh$~|UG!~uOh`I/i%|,m_vM۪%Yu:1(} ޶cZFl6OLg;F}1qʦR:7TjG|( Țp !uwQUSkY ,IBo"O>fު[hX7{Ưn9mqY(<ϿEh7\-9{51WbU bq 4DKD(lP* 62B e$ق  Dk⥿։x(wPMlxRc%i/ߗ#ҞLv6ںhyF+VS/q0 Ԣ5 o)hFӌӛ[ \H/ 劄o;n;4ɣP%%+K[ukhdǒ:NO]b8E,xmC3^cj7򧿋$+d'2eH{ z.Z6 #9}OJ銻*Pr%Y`humJ{ل||Pua~,؃6dO6I2PI(bGxlm&{nWxeY-`o% kǖ{RRwc|P?6Cb[oG&]:q+-irV.Pw{6j~klv^m6 jOveqnoJ#q̤(AJZGCϑe BVp F|4X7!-uI{q9C`=e{QؼGjªo!i8\"vy$GGиiadWJ-GHJ?WDk#)  4sQx]OA5 ~ENֳ_tH fqjMVыSŪT% ˸>,5]Q9YVbdY kߴFY@^&L n0{__kcҷ|JɗwXQOQVp^_.H zy6 £G"9 9 %g.>?uB!r-*f#$r7ћpvpn*'U8vK{ƗeNUT'q LWb}_} [b[aP|h4歆%;{~HP 6?!'mTSRn^eP|1h\r^"| 6iӅhٔ;Йu%\跍=T0_F}a$mtƪG R$ON Z)ÔN0l^+F>nORiTzϯH6ѢBf4qQ-ZAU&D !n}Bתj:q!!\c6a)}" 2rDgۙDd@ 'Z ?a8Oԙ$Ӹ*RްD`T]]7I<`q+.}qrp}1w+_"ZP=ِ3%<~ȥUfՇǭmu>]< 2b$Ge@ }lj<.sWOC'cゆ" mY\2yUKMϫd܍C>6L+p2X$XN8bfk0/*D섋o+DT"*(p\ Ů^,' %Wr/-[i,W"҇5YDҀ} 7ˆ[i,@<>x~iyc&|T ,r #_%%6ѠBʉ@01/Sx \9ѣҭi{ E`$H~:n2{D:}2OF%9X6r1:guVD[bϐb7$sd~^!׌R'!RE\y^㣂MaϡJ'?ja@e `\ײl!MxP:jHdWGC?yh+U'v26>lj[W$ lncʬU}aZՌ³Z]՝-yћu2"/rXhz CD\Nbv>n9?|"XcRHߌoM0f=1nmáj@TҺTR#6>i2pQ8{/dLr{ep񵇣8<؀.s ''3Wn@^Sʔj%b,B)H.d*-wW;^W ]Xx(aSԉQ|aioJ57NQ_4<+KS}T~r?n ` MT㫔kZ&V dy?ɭkݷMʬ*t5}nZH^,%Pr `^!k+`J:1&؇vsZ$+yIaWW _8ݧNLt[('K˲ր`?qqtxq&EMa/';#M'DLgN뛯$',]<8`rHͱV H1vnjs W^_UBw+D@ {E0!Iep4Nެ1seZL'/7Z4!+L@<9icn,Qh77:h {HrƝLϿƕfc:͙V #;,_7w͠:/fp|c8H\}1'lW΀(>VlȷSzXӯKSwx1`2/Aо;_Phmt!`;<~IH~|u׿d :^=:C+)}foZG39h>@? 'YHёٔ:[rf3AWBi`HQڽqO`e0&j|@<[۞:rKe-T&3[iJ1.ve@);2'\J9}2N7; O/-x!|nje4$GͲGEF,KD)"y-Ma %c8Q; t-/aN-HL@h9Pڽ9_A DV4a"#!tz)cZhkOIo9lh\C%ӑviuf,윧u2v=u&'@x Mqa4ixޭt,ݓ! &txMw靹wس rA]/2->ԳrXo6xqc}"0$YP̪(EG:e-w'WI]t t!?œd @q/nCݕ%5%#6! ed1@*O<׎ 1xeà rF ߛaP0of _ }ʹӾH}V@$UsC3_#qmRp_'󣜏dD{ENOYu9݋1Х% Q[ RXW.)lj}j71*A@qB'2}T?vVnC<>r);"짮I:epo)āɅ_ y~Ia{=4DTz5O7G!gn BZ%u]u]nϒ֞NGSc. dZ bG&@kwNjjvjaWQzsN$m6'[^g:ju q1kgV1޾xƜLƠ54$^`gT qj.Pn i4'DZOCk%G̓!*"/`jDFq>)\jq" oٷ~Hfۖs|N%c/iiı\au>}/6 Y`y[>K1_z0=<>bU҆7sk d 843&Ә;WHldדLH/;Ω`|IH]%A {vE9hEˆo)*2_">$ ښ0<>}a/ʹOնڒr?bilOuC\WXmҒazz 9}D{~xhƻot)ER\!Ї524MgCkz*pDxQdl3Qp={ܛ,۷?ebh4H4%\$j#z1drTzˣm+x'mVnc}+^L-y-L5\]J2u2O9@P tmEMlS3iLtS7|QFU+S[nUUwٸF`\ϟF-8s c'"΄@o6iK0AEA'w5k}s>kÇuN0/;!;S-CN]OX,kPw, '_sr֥l:vIh _ 0 *K R }_M+j[Z%1/4,*VCQp^=+M+-E-ߜ:0 + (8u;&z!vS'q'\  vQBzd"qYϖmOt(qqwiwELn/ZxFP޿T M"ISɃ힜#,H7ǩGsܽd/*p ꁜBڶ{բ. !O^W~R6x:uwZ IGж3( *\{mkQM=&-P,W2UG_(,TF7=`Խh Ŕ:HNgĜt6q3@Z2#d: '/˴sUW벜b֕oVAwt)?x*n?FB@H3kbl-~]Fu !ʙp H!s"VCGl&WGnlLH]`nQ1wyaL Q* >}gZ qhz uZ=!"j q6ix!x=}ůrF_@~|) L뉢SжïqэUjٞL60IY (.B@6"W %g_MK}L%r~12+p3(P2]eI\/>PYu~/q 'x:ɏŒ3 Fe&|8V70 PN^/#4*e╿NV(yu@os}WΰSK^7Y{H-v0ЍDž&IBl%'HĭU?2iI`er*ukKE&qWz'9sWR9g{$/,-hs` K~Nh[)Ǜժ,jK*3=-G2A#}n6 RVBɇBS*P vl9M $I"gb%B%^ @=)]0qHj5{5w:ch/̻DRB7)c8ϖJ{wy@[ΩKao ctͿO+~_=9:P+ 䗞2[ùi8u%XxX}aANMq٫=brb>nJ(rܣZl`~7y7Z#n_G:>ƇuaƇICI_Osn"qnxH~OB iٺV1-"$#A)1ǁc a]4A;it$#"WS "kٻ1]/{"* >Յ~FC)#6ފFRB#8ΟΙSSfAgqWq|Օ1D$Zowe2`&036E?ZB` ͠q4=N?=t=quHf3xz[c&P >D@x޿߸(r LODtސ?cɜR\E6*  c4M?'k-#0#?;BAP&}M"m/cH]3AX1=OݠQɭj+P&T2o3F`lvrbJMNUϳX)昘?;0Bޙt=5'Ly3"?"ejfo[L㩹1[W$@hG=2UBRV['2 CT|{|7u=ZHcu C[H߿JXzQaň?I694`es*wXO 7 wK]Š 08z sؒD"xcd5pƐFQYׂW() f `/FP-XĴ;kQ㬓MX>bML ЪV@=R[DX^T/21ȳ_j`kPU㻷N$9x#C4P,f"Wx" ҉ש|@6V z( i71+28 pZ~ebÎh(:msA*FIҬ\>. o\?AJԲ;GTuH̚ f4j' 3ntV'w2U}ߩͷ'9]~ZxWxK]&%2;~i07J.2z F^Mؤ'yq|a2 :HQk)xF4|CދU<:!$ 985tx>u@ɋ)Fp "/Y~x3KG$)O&(Pt dA*_hyB&Ӵ]V%wZ ~M.Xn\pSׂe6悤3@F}]?YfBcHJfJzXx,d# ާkwuT8^\$j,5֫_/%TOqj.x[>$veʾ B Vg= w[:$˼#vrL&QN |ؤVMQr9m_i ģeO^{>[Zsd_~pɵ7޷@;hyx>cIEuİxX8jwWbH.sr #olcxHyg"% WYwOf*e8yg(e[ vkjAc/R/ۛYL-IɚFM-8:D8?-7]iWS+)V{tҲ~L-qe1~B]u[.3bZesгz^WZr}D"oq#o<WQ.S#V䭯''m-*(?@V P`FO/tRY266d0{Kٶ5" h' `1 cb^ے>I^3JH'|JMcmM;.~ʩKO*T"d6I|̀Ncy`N@#٘~@d3HF-Us]*(3dQx8ToJ"*QSrj)|#x4[2Ă}Y.SS׮#FѻDBl$,Ug2Zq3.@ lyn9:_Ҽ.ήp)5D"AK:/9q0i4TV ܝ>h}?.yZĉ`k R$x :`rWBOO80&oH0AJb}-B;Iʦfæh 8` 0~ Wp#^, bdEJbsNulZwVKs])9V0kІR *Xs'7fz'v lfQƠ}/§ m̽ǑPH9$G\M ;['ׅ?A_ 8V8ma6O&,@FDX:rtH5DUG!ߩȟb:h<~iIh_r՟9UĪD 励`RORq,AnhVM'}oo/.KM31ls7Pl vV}LC$Q0Цo:犲Z μ_?C4*I♕'RzH·}ens_rh D^_d{Zvt=$̮&唝.TԺ48eە[4>K7{V/SV3Z4j4빹6?4fMb_߭K -Fجŕ1aZAԪ 9"vyh@6GEqQ@4w~`ܽXMi7,K怀+EMg3ԁPLK5墵9-M8xJ։ fCHy:> ջ댛竲yC4'LX(j Y(:+s&J!kq!◯ҋj~,1!%7KVŵiQ 7Ѕw뾺rۺmG}E/D[6Ң#pXv1[4@y]n)i$Xd϶Pd&V*+y{șܪ9 qx`zodFTX$o/ÿCL ? "lsO4Ҥ.WK|PDRq~n\v$6S2r^֛9-=KF1y,O֓?4T\:$8Y9X3вJwځx0afEs71I6ޣss):RCŮ{6flv)YVDUVG}a#f:HY^=$)b-YG#G7JL3幎E_ 4UX|.ɋ/] /cA G˱Ed:#fTvlU\nnj s\ޔ |Qs~rg8&s恾 0eά1_w̅GѪi$"жh-e@  |T:)XꯛVﳧ+꺊Ōl&cAYjh ּ >7*޺WIˎ=Jrf r#2[ϫOOj\EL*I\f <--4x )0f+-n'*O-6^]No$Hq9Ok)ϫ_;\ Fb_M!|P i cL+ƍ`Փ_4vM)-Zְx}}%C*˟U3}a ҅9P(?u{({{G3V3ױrh*הYBOco1:8FZ@>acʣ)JJA{cbD0$u btU- S/%%9FoE+{lI YmЯ) ͖1ytF "'9 CU>0׿b]_`p=g<ӅM u2X>_X''ҺJވd3fDod al?HeGIE\Z_y)$A tu_:zcO K&v7v7Xk&9^&bmEu^y6ەCJPa ڲ;UQ=bPWײ 8  nt L{Fn`zXwac`|]J;q1"&f_dynW_xbQZ#A_ NQ0Q^;vd MGڇDC\Tvyqaeq%h2%6 L\]ByxD+<r=U{[_gP %5!\bӧ'5ѴتETd1W<4ԃ+ET Uh FzA+u/'l @lԠ+6*Mk? -Ot-wN?FIY|_ 9/*6RaiXӗ.Bk6`褵Ki#$>9{WhLW\FJͤCMO g3}wTWQz? h)Mo FT@iYe$v1r3>-8vKv*j=yZYzijYl7Xޚp (ZV4FT˽ҧ$wkwV hT&7})\iSd]cHE -/{}rnAs* 6RCw5akZnv\:?C`6%7F sj܏;I۾]X8NGQ"[sC8%4'rlYCxdZ[ AhPt)ԮFk(] &JDD[ y3қb5{"Է(+˻ǕI-Sq*!Y+:NJtU%Z Ъ|HawG(#ҝҜ !lVKxGHDY`T bE5 8d {BR{Pٌ7x ">R_/gFv,5陝8gowNc|b`K;$1,ηkS턅 xDw#_:}Du ixsBB kϙr U1 9buq4IӋsbS)tb.%ڗ#r妝|Ϡ4 Fl*|Pi2R 0ۉTa :s9%.BޭZv+A(BGR։]-jXa@#g-MNZLsbZ;_.-'csS䠚!!ljI5i߲=Ns *}K|S|!=FRkΈV5ۏ lT|XC7lݨ(oWSdia ߙĜ21+&i<|p0e9 z_ {%dj8.ft!(F*adXw AOuQD{EȜ34_8-N>BdUZV}WaGLUDv(: 7xZhP\e>`N%O607U$zDZ 9YE|Y4Wo׆&`MB g 9lK7ģݡ4myc^-73q"6Vmt jx,͠,Q1iA2GTnUj%o Y=6 ڱps9wie$qg_G웷̓CLf ]B.%3q\~̓y# iQڬ75(dxra\Gknm]0'lfy))`RNPYF}Z ~75)da(]YȞT+-TusnQHv /ϕf|e`',S1\/\g\Zp+!%>-׭soĒtky9t\j>;Krn˪BfwN*[8VA2+'_ 5mY AAu>\{NJB őCQ:ȟdq@nȪALwEnH5KבH*l߸ :XvAӆXDvh6xZ Q\S%\-p7ZƲ*zvpO&\KI\a,"D=bw,v.YЬV} c*f=*ch1de1(RB&bɂk~b.AG\[ hRBZ`zwwyh2]%!Z`,$uip(G'|ЮbMlףYFQi1v²!Gs[im1*R=LվqKnU y(mTS|oą,|>onow>X+P'1eṺq mt @;SBMv6 X nS*P j}}Rg }txŵn1J}. ٯ4~ z*u\{B{Qx6TƸ꽻,ie7́0}>ܤ%'i z_u% u5-z ˭1oXjec(ګi9`!)1ƼB.Hew32Ck?~$Bgl ݈Pj jʓ" DdICϹl!hMOlA% ҸԣqIY>G]3B\8Z[uQ.9/22AC'%m2<كpwe iϨ~_7тED}O -SȖl~l d.I 9bFHٺ>hCm?KT:}1'<*ʏ爄&=-3Э"-ny cL$$Yq>uKiAh=ʂIl Љ*T*Yh(mg¦Y2R4U83os~|-p  Smx , :kG| U yI/uF|QNAf6 /}e~UIOK{ >%ׯs#%a\/BN/~ N"EiG-]G/^(>Ś Q[ fu)8j wnVӈbV,qzd빷ZBmA.BvHrR :1"~0lQ!jÑ@$G;ɬc@doVb,9hjJْR4K*p=ιuvpSRBPa)ER gǺ%ۤshE0-FXt)XʯP Ō7DbM[B 8Udk#59 d2T8U).yF ;1ithbTxa2Ang%\)'?ˠ&NmΑ ϽXSHas>2E1'++~(JQy?moz^Y0)+ 8[~s},S02AB-:vL\SKĞ6raB Q 2Tʝ7ApkjV&TG>%<F0yK8VY Q%VE$]I<6#yeJ3=R3X,;wXULC @av Kwp˳̖] dNG:궩;Pygt8\'Kh`݂qgN!{lk8ɘmEO"3C`NǦչe/ aCd;dsײ9b 1 k{AKg.#ZIWa`::MV_Ȓ>wq>bFux.+Cq& 4DHTKQ֛fd7ZOZB9K [ W/6h, P3MVUsέtd{VQа$t~ʏ]3Ƅ_6CDhs!B$dI4?l]ěg,"b Sz0W BT%ӯ G9XL,[:L%8>8 ujhٸW?e@17K&idġ :/4$ShĜ^Tw6Mw$[Xzj{DeC?Ս H/29td.nH+SJ`bϨ܀ۭ,QDh9z>O7yzmch_12,/Zh>zn1s6L#EWٺ>bC[bA$ɾ{mfQv5K<:誆{rKYٝy`~qb'v5%C϶u]DR|ֺјҬͳAD+E;}|;|fm>QzLõi% pd e rL58-04G g+4s !Y@M۫( og#Ϲ5* oI`G,sꓱtd]{xT3ٶhg^skI@S@2:OB62(uEKZ 8yBs/[ GIei0.9#,jy5߇+-HT* fv0/LT~fhTFxxb3Aؖ}|v41p+]A*l{YfIE.!\#gS[~G^&̓)_C{b]:_!g|J[>F42v .|QoٹTFAt%MRv=_ >EQGf"r'"4U_grK:CFKNyD [0,y:PoF3rͪ~j!ptET&z9*;̱yq\r0]diԟ T "80|l?%oe"jV|tzsV6s sr;`HҼ;""=fdz^G~>g=rZms `p \+w5݌җ -@8f GTõ8O'fѰPzk.PI ̎ݓmg# KObdd3e$Gp ~EFzbyXgjgj,U$Daon[w4/3cJw(xQ2fJb"yQ)I0Oڵ5t/`Fwj:Ӓe3 iECL;LKr (Vv (H nX27ϊM!wB =ÏTb()EkOpE(DBg؅=mAh`~y;*8LZ% >DRv_/m@hp'es~FոHkgk },?[;iqfFj@ΚzmAUX=Qstg8j5V:REܐkLBP ʀ[_笴*X7jOAdsLC}R+DecwV?V6hL2CzАՐUjF<@45Ul0?;`L8x%ˇ]I#dwg}N*S0DzuɞqH䰎Y'  5[fKY^Vpltu yca!@nadUe['w\$#{I5"定D"̤[&Jvk!ܶ.Yxp~`Β!2U x^/voY [0p*h`7Hm ӑesE* 54r5Tb>R No"! FQՄȁ1:}*ss;;h@NWbsF\ӉD~^2t?0yL4?DŽpy|xgYq;mQj7l"KpܸUg ֏tMaA [N1%T0/ S3V<v.1G!"U.J!4LgiQ"-z'dn~N (-m4\  ;G4DPS]28}hY[UX̠?=r}GP0V-84Xnya!^H]5z(OoH˴L{IFeyB{leϺ#Rsep^Q1-fE;Dp/lS[DAop#q4<z; )&'QRxzU[|K!eA1e9\O +IO O #S4+j-יRl aw?y`o+ӛz`{u4>*T="$İ[ob,}Zab6 xU s!ʙggh1'њ2$K+0yv8+&9ō3v$+Ύ4:C06S@wy{\;tdܲY$Ӎ5YZ6iWol4Xku2)RW4Qg2c9 o ,SV7]NX5s1YbțcssAhm4I lveq)+L Ϛ \WvϜ\v^-CM^N.! -Ix5x$B;Zբn_bV2ߺ^IƾA^. g&9ܿO"0\s ~W?p"x-|=wSٝ}Hd^l@KknC1&d,:]@M YgbCc"}2"0NI#SsRyL`!%l!3uN0W"#LnUƔFr)RN q;N xbi~W 7Aw.qthBA&#L<* _}hH 6/~XE8+h∕Jqo\S-Ȼ"SUН )G>/s;xUhٰ` 'x`w* ^OI]T9qԐ(v81D_x!ɺ݌8F/+^E=$v};w$ksѪ9C7bd NMW/u7u#,J\"/RAe1hi}68vEâV!.{kC9`]2HUw.޸TsjJPI@I=*U %}}*"J>_=ܵŴ-־gzp%?\ hYgs#}e!֔m)2-^HÐqSjL8 crۉe/$Zƙ5AIx UsX;JmrRxw .hStCXTA%ۮgtg ^SSBAa xvfHUF囒<< z3JC?<ۅ-7줨"Ó/Ә߸hi>a844n6L3@hW%Ȧhx{>z}_/NRƕ}\1 Sxk/^DswGl֙b_my h~͛9:fa{{ˑ׉; u"̱t-*ّVGrMFk7s fDskL==}GmGEozۏ8MMn{lR1ʹ`Un3/Zfl~Q;EOւt'b MDU%igW,ܴ7Ĵ#穮ɵ'sK{;bـ~ҡ"3Ɩu+j'?qp/&2', @}IG@I.6@_ 4 Q&$aO 6Z(Jy Е0m'y/6,6PȆho$ݤ5Tj.0I4rDk^' .^`Zx,S% [>&lr jG$w"-G>!LOmmIX( 9q{Qcܽ]o6毨H=gScX-rH VgUwU;5Oھ@u?fiQ5 d] #yɳ'/t<r%S8f]oVi$ŻLۿF"M [x{v5_GLq0I[HΕܻ߰kxH,V=uv 7">|`xUwr=~Е`2sjaPګ^';Es;ՈKw$H89kR9ۏ.4haI?]MޚAPm~l;&3A F쵿3/ LϭE37+-ZDXSLKU į(Ѣ&ؐ_CH{pۡ&";ט] . _hpf(A)+1KyW2HWctB{-]%im_y;0 h&"n!";r>:6ڒ~aK>k]U3ȓ#MIW~3$/:%AOuݥ_,Hg$&b{ck{ T5'V ZI}td5BEen-7GB$D\`52,v,O̻3/i>ıy<12QNƼb@I٫{{5WV0_bb 8៘>JS#СEsn)c,tX d0I u`b ҴVnciW)qө KV̹)=pB3 1`"={ )5\J,kp h.)Ҳ>`Kb"2Z_pe`@i]&!*Y}B0-# ѢY\OK>Xg|(0!bDtDiG.ҝӚq879)]Dj9&^T@/02ᅮ,z(XD1` z?+ށ/U3[d>0)k2U' vjL}YX#6e*ޑ2a'J6jR=;W~6PyЫ,\XKGea6W[ ֖ܶ!=(<4!Rj}l^:EBװ,IU}4Ƚ՞CEf3#yJLD8J8qPfܟj/SOQyO&2>|g&*Dqb5P-f**حJ tW)fbD*K\mrECgS\k&fdm4xLtǪx&rm 4U 1s7A/<~$[G;C䒀Џijw^-AxC7Lو+mmkuLw%֍.TvJJ1ߪ0=q=,  썌r9y}ʔgҪ˞+zHaK]u 3`=U`⒏JA4o A|ƱAY .od҃PkG)8 6Ryǎxْ@o[gg4!p, is{2i'Fʈ$v0n5>_q_FD_1Ȕ&2~VIF?4aF)]z 6( Ldž 5YEd*?MZL?:cu@@5%+y`d[_^KK!1;kU-u|XFd`P?Xx;8EW9В>~T ]22 .3#VTR.YyEcJMKBHשJҘ=$_754XLuWfvD͗{e6_J5NӪ( gNAraԧ}2 ޠd 'Kj uBL!Ur@r@OEJq=LE\bf<@ APEgV~=(b^r?pdo[efݽ6+Z Q–+W,rCuDn]h,0\JGk5wq3bޝm%oNRb=a 8sS\R=f"lW+.W ǏffQEJZ]4[2RIķ:  c{q/TID,IX[}LVnE/ 51kE*w_\X3ǍYPV6َƩhFOw\Bh`io[-]ak/랧vy`0nX7ˆmɃa<4J}%TYd۫R"nOzK]fԶ;*RZF}NM

B5(IWES֍y7%.kڗ Ƒp2D;)KZTfw.+fh5ޗ|KCF)Gyɬ&[(ʊl`fHk i a34K;92dl2ȿa _x1FX0Z;y:PNpxmNdVE\fbd7V}/ݢNo0t|B@5Ƨ=awIKDKe\N}}P<^1]8Ydcu(=FڥH}{Z4QdO0@06N_o;gזׅF\g*wtg|orJ'VO9V$3L\ eL1ؙ: ϸ,(֒sՀHօv"j.9B{\bkX~ 0GԖ`S@cWYUCF) k{zso15wĝb>T_Pdwed?l;ٜ#qɴT{-NC𦫲֍#9(";?չ4P 1f"#A5l붴J.*XGC9dN'UQ TU:MmRS"JpP#BPkA"q˅KXBOq-U }>H#(,Gh ́wVEғɔǀdR$D҂$4~NU#DfsJV_$5I`e5M\͝=8jIG?K{mH`}@ cطl<6aϺ  4L I`U\^~I!7K6J͂ #(YMesU!< Q]Lxgƛ}K8{ٓp#(O1a掼% @F.":?7˻$# =,9HyN~X2Ěgwޕ'd֜uLgZ<.*%A<b@#D 5ݘKpD]NKh+YF@Ӑsã5`$B{YN_Ps׌SdTʬ-؝-yKɯ@V w\ @L bvf0*bIz#'S˜F )$`uE-$0&JHվ];tCU u[oHq$И9Ƥ=p^v}a(d_ʦ^]mXTn>\X;]zLvi޸#Q4de(UKguVޮr{*0&*N+T+vzExdl9qP$\ulF%%\hR_K0KԱ7޸?{`Q[D]YP."8;y!\i2G(M<{BܞG)u#A\ kn/vб:ui~L a}[q3wpl)\f0^\̂~EX"* tmyhVWi)y FHy񲨂#OX H'm#-K0GK(!y96E`<yU{̕_+dϜ_EMs\>z A 2`xErLƘRIa|U|~"Eǚ£=H" γOLk/Az8ءaą,Ɔ?,cii/"6֋=Rx} c3RN(uj[L]3Χ$qgZܓQ 'b=d9\`%9j~AIL`U5&`'#ז."NNأ0g%ǯvdq= ԉYTF| 7)»qَAv$@yJIED_RoI0ʤ#)oC [#Fl0|-i}:w^@y}A>G}cI\εmf\ru ^9ll2:/'{myfy1'T,$PS>}Oq.@%Ix.Y9#fő %9wymsNFx"|OjJcBK&?UEu oFΈRVSE`ye! oH=YY!he8Gb 2c"6s5@r|ӑHm`i%N%Ђ}RI{44a/@ǯ 梁䢅5qC*:GwHcw;ګ_lJP^^5B^Y}?ڋLeo']|3\]^i#P)n 9q BBlO]id7u&R~E^+>ٺ}JX//DK* E+8Vv{׉3Yv# X/es&?[LsYXζF[wͅF{a25cմ T>`" "7y<+%jN׻EלT ð8ϑfcVyH mO֐RςJS{ m[LS;ԠKTV"G-G}F[M Y qv3IiF`$vPEjkptL,JcL<}DXʇ+BEܱz%;_19I8ګJv/ȜR}1Ј{{ Q"haM_VɎqmTz %J.VѲ =.$â.i(`.6rS+i^i F@ ~C 8Ȣ/~̴mܧ#'HO$|_q`@w=YįyeY,((3Uűq|luo L['H)gR.N"ŃU]rt'2!m0[.P6L(k۸TT1&uysr*Vf{4Bo-Ů~>DkK~Ju[*"v@]`΍ߦ 2Z@gP?nr%-#[m8f]^Om5(~J2TnrI^a(H5ѼoDCH\N$Coc̮#rwSNh{k~O*ڨZT*)jjw9F=!ڒ:w88OG`ɀč(=!ʫI&; ~|&kikgyUN< 7 *4,(gTugt3Fأr LKpXAi;-:d TҋXk4^5] ~vټ U> Ve&9B)sr;1>$s GRh'=cFғ13jAZ綢y-8ǻrT5)E.m<bBls*"tJSUNe[3;Dj*@hFːE4h%ȹ@ +ܦ@e,!b/U5 Ӫ1ngɪR]W_ʱI{ǫw<#;,T7}e>#Se+}3kycp|!~7@ܺmqʏ:.fYhw;OxZFOi _@ "X^FSxK1B/o1}{Z_&CWvL(alҝܢʶB{;^8L2.݄LSX9QRt)W _hd-@d!?[CBn4el^ѧaѵ zSӚ_-̈$$7k s3KqV=\}7;;,֖#sU[ETLmZIsB8.z.'UǩiŪlzLbHY[/oaq7/?-SLQ4X\pܕ5DJCxݾ@Ms4i3c(RQg }Z ,_/ozׯ. BmߐCu12(FT1[>MUXU.s+s;~#UD5>u'& 7Nn{;sM!O-qSV/vnit/J~B*|.@ږzMcdAމ6gb㗲-p,h Zo|Yަa-h;N@oo0ͫ3霍F ݪbyٵ2jzN zt׏tb-8-ڌK7QyIK˙oDwgo~hUøy-XݡۗglGHc4]zYۿKDK|Цs=smP߆`Jw)JWuO:wf 07ue#v$ Ʈ)+)5pKZ 7AKBCl ^{ech_sǨty x͘(IH#Do͞8ڱ7HR7F6pY5FÙO!3O!18l',gԠ3uVKlXJ)7f'1Ȱ*+ \fjؾtEALbl-V5aOV>, b{6=ՍAqҪ4hX>sǀb20 D*J@}lñVR1HX2 D-14vg:[xgxҟt#,5ΚxX{K4qjbfۡBº)~eD7l>*ݩ\{phc ös\ksmZdulE^pVq鹥&S#xh8dE]8yD5:-cf>mbBQeǜsUqO 3,Q1Vl&w?+uvo7ɒםstf2%YuHz< jUaYA1_#ߓEb{ q QZ:*3~+N-EW3c9zr+>YRf$ {`ӊ Q`_Vft?vՇkߣd>|>0-71R?kh:?Y?j 8<ĺXou}71gRP:xVD12#w] 42}?2O15n-9Z;gTґQCzUJ31|ZpHbL T6mq²IMas4Ӭ|5k:#l1 f 83,+%pz=!]ԍ= Z9M(F,L#!l=,0ĘG070w@F:Dߡ~xu}<ĺn ۇѰ6S+u`!dW~#Xb SugM4S ӳ*)HA4Dcj8)MFC\ҵ/@akmQb3jE봁OWz(݃'v`^->mhDoy2~R5"Ϟ [ mpN%Q]hCCK`V= x/hj^1Mc&/ӽ]j֖4ɴy R!U2źVA7}ipU]L遀yWoѩUM*`;ZDO VHYo7\'%FPwױNfѮb "vc{P1{zgQ̼ep?K& Ss㘕r#8DP!L%>QM .zJ,WN%wKF6^/)#Pѡuu>膟wة0bÐdl#Sirv&&X1RpX߭لEN)/K!kso:hMuW><:c! 4tmSdEH_29zy7>tfyҳ&/ /"ÿ}: پSiuIƘU'%4yE]D`CyBKr ۜ(J>MU-Ÿo{M"9Wžd:+6֩-1w^mP!FecT%dnEPҥO5f!K95rs^7Pop!:3 ;^dewa_iL| Ds:iΗ85swa&xhJAZyBM4wh6sۃ,TOUlԧNg@I$LooN6uDa9ERO}4O.U=楯JV[ȧ,pjdq֨$-QpΓ31Ƴ )QJ|GR2ϢWA& t 1X+{/2S81:m|LT; U0F\vQ Ѫߙ2)k_n)ƛTy(/IY^K"`-}E. NԥTq[oʃ=>C:l}}#qI]Di(Wo BIHeC&ƃk~W?<\ [rxJh5.gKaisք.84K\#;N.*X73/0,2(EM쪐Wo=BƩ^JgY?$T\]lo3vJU^#EaQ|?۪SZp> n2?׊yXUH[Y温a/~9eirmm*eewfgAS:|E٩ݓB03FDYV] 7*쓉?R^/}ƒ30-8)E5+N RaWYŝ1Ep ҷE$9~=i3 ]OY;ũzTF8#k"U NhG=ϲx#

U5ٲkiJC3 v2Y5n\#]X& +* c)NK%=;Z+U&ėZ8\\Gy3z=XԺE>)JC#R %$3flonf=-O?ig(g n};De/D  L8=<\ʨ8Ƈ9[ sOxC Q#rCVQ1ߺg.bٕzT5ARm9+FTD0MVK)&'u$ s$6wYB+8-|9vI,0ꯜa'{Q,85:Z:)bBg ʜ"-}zÞm֢*TH)(:U:,dB=n@{N~UExNG[X/S<) 7i6Q?:ު~e2 ax|w]b ܺx54ՓiUR3&`=vHySvMi0%DOqz"j $i[VdI!V@7($s*Mܾa*bFREG!O\kOY{cfّ]> OC&؉PQq}鮈æ E@=<6DʺUy*!*6*+4^q_q^O"&ixkU,1yQ\T> "iXq-M͟Ȣn1^ES)cNzA>5N{v"WC[ OR+1 4B*W?3|UJ?n"dlٕ[HbS`x51ݜK3 %,6.;{I^d*ڎV~8&2u}4S;y~-eOq mٚ_id]yK! g5C)Ļ*ay2^= EX5#F?3}tʨO发;Yf5 ]k:O:ihp5 Qwe=XBA^$?U+LʯC!fO~ w@KyȜ0u?Q>dTU-T n!}S7ŽWTd\ӂі>$R x.Ww}p m]a$`dZ)vaEfWyЦz7ץ$IH3s %{IN'YTYM#mnNVtkF`4#و`9M<ɧ8^8 s@c5K8$^x{Ήz_mai.׺쮽 >C5ٚ.G7z^DvW/nATݱ8u"]LHi\mdn0D_}X3p_4TёLdo䓊CG8N;<ׄZH4Qvj6[2)R؊XqغKv/#yM27&N-\ !8YSȦ3 $m`斣Uz1 2Vk)t8r?*4u+PЖ#2By1 4BBxƠLz VSw鏾3~|K9A<8ܓpL#ʐdXQ~Sg{#ٖ726Q|)I O Q(S ϜF|VYYT)i_ZգIڕC-J55:ILS{n|`aG-F[a╃vsFo1U'궊lc ܲVOM;*緶kgCJv^)+|wO5sb᧐+ OۖqΧ I =9Y%F079}~q#s@5wRReԤg/An)<ś,_[ mɫGtlHxOB6?H@*Ma}8ܐ/V Toh*),xQf!{_ }]ccN!7>/grNDI) Qcr Z=XuN2MLޟqmٌu鬉<O+q/ C${<hopeiN!sMc~sNR#|C9a(>a f a\`šn0Eo!2/ݞܵa4F! i$ct4ԜBἶ3];z)Tim&R-rhگ ־oO6!UZ~:x=4|Y: gBG,QD2;5Zk'&:HϨdӺa;ڎc LjbU ]1xwy9’o+hr+sZ4؍E<[TdҔ0ЅTVkoKxcc "t#si?TAHpfq3>]qd0ɶYߺ1Nr'f_Ecc蒲-9Z}fٵUM7H F,"OZơȘ9]:]m_<;UL ݒlrcC} Y.,[hjQS&7:uZQKyOg{n lR(I8eZGߋû[J|ƣwyl:^-~Fӱuoac}9ZDw( kE.+AXtZ`5̨;;4On VDVD'e/um^GJn&zBO>?1?34Qs`ixqS]}ϒ'HWeQA߃e7$>01KNAi{*oA*ӳyGej4 A2ONwQ&vL-4 =6Ugq?]if;BTsd1rN0pXħp恒ٚ^$`+k4P߆"lȂ5/,&O#~$͔ٺlCXVhH#egṼ{Rնv5ř$p>o~xqVͨInXfc~SZk ܜ*yVULj*٣ZĻWuĥU;q+Yl5dpsRtaʫ)U08N8,-l5T&nuiPH#B?>pg#e΋)!st8.uh {ez;^$}at>bR3Ew*$C?©VzhT=4-P_ouN, ڢ6' Xg5UK[r83h# nX'ڬwlr=WnwيN7rvrIst9Z*r|O" rLR`*F"1rM2EtV,Z\~AnеuA&c'tTS`'IMڸB#6(&5נs  [M 2?,F*=n׮{p&@d wITpuz#TlV@hͷ℟ s MvJmCr 9iJ oS4s~3Ԫ& n8w(n.02 L7В,!Qe { Y&,J$ͲR,3tTi5"+ w);9*,31Lp^c& C,UMe=zg{YZ͐2b~gVQ&w>$.^L =4Fz`HE=. -+cWtKKԌhW) "SeNa]wnj[@qhnU 'Fۑ_Fr%׷Z}[TecHkf7ٰT7 ]?b:ݲ?793[\WwO?Xbv#/!enOp] *&-s qum-Ò.A ͼCWҴݡZ_']~!1~MSi)ZgT: ]ߛD$:ldGiH5o1%c I}] U񌹌Ƭ-; Z>XÌ4\&[mG x FY3[%YS1V]ne+J%A-OQpI0cݸp\_ƙk&56y5ff%^s7Zf{9HŔ($_7Ϊl坚c<+j~cꌿ&%wl?Wpܛc/"Ր6=Z$A?şQƌ|- /Raultd̿ݬܤnlQ.o5LqĶuphOIw9~zGzLY OAR3$-(oHd]B4\NBxbǿEǶm5{JDf:%ƊxN˯ӊzG؝ e;c lĮ !#X)GD:bco\DgiD_(x(œg*ơW}lWF hA`xXi,(kJXLt]1LSW4DV^d]A=DRW(+3;"\,ug0t+h YtB{hi*ўՈ\dEItB| M1~L)[lgBvKS1Ixusq)Qh-iK<UVJ-66s$O􍓦8TJ DW/<گN׋7^P^2ƨc$:D 8YR)kv+i;:eŽ} i;'*Xn_^>,ʿӋ ☹a4V#ITIvo_z'җ0wsa3dD3 {̷]4fy{^@VlGcc对j RVh{w66,B"Hxxm}3ZE,螶}!~59"qy[~qw3-h ȒdM鞻aTewa\0ᢻ9޾i"FSc{vG~K0+_@&D&&~^`;I7$X? rdzM(wd.ꇋ4J֟VzsMywJtsz`͗f'[r,IC7=SgI'Dj B1MpecI씪{܊ao%z݂MGcl}o ܲTJЯ)S,il@y\h=W8RqdgS^/_[{k?YˮR@JAfH,  KrRxYuGʠ>yDg2վf Qzx+ج  CY΢|FVLXu2wGS BGwV> v2A>]ȗ*L1ثyj|\n@䖬QÊ^rylVc( mn/ba=?PFsNJ&id$R׭|{2;Pmq_03=I0Nozgw=8r>CҷFM"Y^,0@LBYRX.dxnzɇٝ:#t۪0u|sQ ̋7j%<caN٣ka|D}$ؘc* qubO6HtGFb, .0>bYPyp!3;fg;кdrv!B\GA;OoKQvYy_YqHJx *kNz_gG+q4Qo< ~#=hQjr;^9FjfCxj2ck?>Ym=fޕq96 %`~]⨜KB>ueX`vt 0fQn6wa%`yMZ5ij|2]6xC,EPE,YFhX1AnǨi:Dq*r ɵd{._"Z[<"Y2b9iH6l]$$/4H P~㄀n:5cC E4>T.yu8(m㮹uVږUlj٘8uJX謣&J un(zjx Qs2̅ 9"ǣZ9|'7v54gND$1l\Q]ϗ #?fV ;"(5:oCzk&B`4sR(2<7V應q!MfrGÀH~^~}W` Recj7~-\+n~ѝEykD>yڝyq+G2~i*[:9- G'Ybq>∋,5$GG)pOg*kiq0A^ČI^I{N*1X@t)Q<@jMePH|Q>N |H&ub%ơsk̔(b'A3v){qPjhLP!* D9Z~lYCYRtz)[roYR$-Y{q:G$`G,ݙ+Dxvrjd=uanC+n:6+.a&`amG !oC݂PNi1 <eF| ld# (PXA_ڎ+nuPz~\)mkG/pv˰v3%˱X#"M>sv֑qfښUn8^fvRڦHB֍paԨl(G3Rl;L" alW9?Cӥs2d &QB)?E& R/S'"'%OzzKCGm)Xi0We*̒i-^ߌ5ϐ( ,Yﭘ"𸧷PUK+{y#]-xER2OY;Ji/2m$ZϾ95ۀ}܆'bz}^pఎ='FV,I~cJogw e$Н^NDW YjwЬ׺$,.cl?@L].U?̱6@jɵ,ܓpD;YP}VkBEvq11yS rrĄU5*XZM; P4bV!GomFYE 5 UoH*إرҌylU+Q&zo!WcZMٞFchLk# 7 T&q'w&ڞK \ d.塸;7 nFM`ԣ lR mGi5-W?msh6 a>keԇ|QƙQpYyl@5s&*bo_7\v'т97kw}qc![Tqg|5R:䱇q, Pmۚ5jSpo3Tɿs;J!\H4 wO IpJ  }ڹc\J+IAFgh)޸doхrOJD+^y%oOCu2[~UO11vCtcfɍם#( H HL{:CԹf2ؚx൉rs;=ZQ+DŜ,@4az<7sd ozEdv5ͩ0C; "3)! F:(e?GT>χ Ta 39.<ٴi^a+A^;<~XUǑ+ WQ3/!=z)a=MV]*yHeZoU MPrLc4zdOP4. mWCJc1[hvXz SG#晹3 ưZR\)PK!c,&`}8Wd)q_^t,109Þ HR 36 wA0S n<$h;NS^KM tDpl_CWv.Esߓ̱[P)+ŕ:dvzģ'i|wRV6[,quBgetq*DR]RƷֹ%,Q*-ȎĥeEhgK*L+IiP$}fMz֘nM ]sLVPW1xHyngwcsђPfV4ݍ%+ay/]-fdσqN{KzʘO!n{5 j9'qLUme5\fK'yWvXțO}7t;)hN+{BUθbx'6b UaIlHe<^R]i.֪zoҤjٙI2 D; i>Ct >\aۥM #U:LSL_s^\ ԥαRѰ2!vxbXb%~<;MeJ-=V!y]۹KX@R.fP.mz:e[Lj{Jg)Ղ`vE7{,_i5Zo !odŤ^TݸY.K vXeS${4]<~~ 1sWݗ$ߘ_1vKm|Zk.WU+0ݤG|MϢFFwbASu4XEIMp3k>e1|oQt>)+ҽ6?RlvTvָ'<؀Q`Ф ӨN#Jh Uձp%{ :Se"uY%]G%۩IG#u.gĻ n^YQt\VPy,nq eEBV E&G&zp11f`(P ] x``|c6%|ħP~v3 Zo?pZwj m&v%f~aJ/F2Տl!4y(n$ն5킄TkG偔PPp팆:Ս Z!cU; 2X->Lp~xUI(\&! ό/Gz"[Z0qʍZNҠq0+-Du;E!#.X}t"oķnM6}>V*w:V?LMd];Qχ(*XS9!GzJqHX 6N<4ϗ8>Hsz=v#j6ovҢrrTdQ "6R1d&mz>?g ˵K=ɣӯ0 ,}*ADI;̦ll[V&Ol-4 z/c7WGIPܩ\Pbx7Eblp Pi?v2Z^M&1呦m*x&mtL".LZMhe۲ɺe gN5 =U5";9Lo xCqZlO:吂]Y + rd~99w<e:USJ˞^8Fi9B[DjbڙyjJ6lP Gqy Fse@TF%~ 6znQ rF6YqҶgZ@A ^i].,C˅_%KelF̯ⶀ(^_@Atg27)ROk&'/)cÿ̸|n$b 7z-<2Ԟ}&u2:)({kY_)w_@M[ 7/i ^>݅LVk~v`,}ڳz'9PCny/m M3COlԺ1Vܮar exZKn(D\5}߈ShC0ʖa1~n#Z5O l1z|3Mzx"/O5PaUF¶yv>$)VZrvFkҸ3\:x ԽN;Ǔ<!|ܱyɹ퐌+@ʌ]jezj;#ƾIK ?ф-Ӭ}ZA"ӔMzf0wnVPLߓw|C+RJgܒv)wN d$gGҨk9\2^hr%!RlA"1A<7;d:׼QGoE4F#wnC}e}IN=hIߕ.ruDܙr4I_dX7懱 9^SFi#1E{'""p5:EGQ e@y\/[ :Ŵ#i.(~WI ?%`oW]8C(n߹UD\1;aXs¶|O*W2[QTo[}T+)Y|\A7QQy:95I,Jϸ+4Tn='/4er ԓV8cҘ/xkݶh5},kQM 4VK6c=>m3l,j`U|t7B7ι=cw޺WؤBzW5Y O;ӰS!vMFwIy+K[Zg#@8݇>xx.[*@E_BHCp,LJ߯tYJ;H2#.eeתWmZ"Q䟈.m :JZxb @:eu5v;?ڽ.iw)fDbjNmq.{z&)hW+J5ٻ]SI6O︮ PvvTzN4@HU|Wm#QDDG48]u|tV@- ˹vjOvҋ vX{Tx ʑuB%V`mǒ@h/FNvD~ӍME#)Y)8ENH ,)ܶ"l ᭇ&2pߚjήª''biF#SlI/aH?J7TOYV}832mA`e9\O֡tY)7vOn= [WNbç*ыP 轏&@,.9Ji[j>?hJچZ_KJ  IN^%z.k4Ca Tx֬N16K$ ~K%Y0㏮IwNgT6f85挼w%4$q?g _"-ޛX@l*QGm4[DG4: 54A08|9hR`*VҺKD=AySLKĦ+֧ 23$ c5l|`q~M s V"aS窚Mg)yT>h+- 4mش+=ag,D-%?Uy[GQ zL'6 b9$Flc>cH߸^H"ʒkN҅͐Pm XHڬD S5\/]2Ki ~wـdvOu* pet[f6p|2/Cd:Ƀ( \Kg%RēqUѮؑ^z}!Ʃ2~d?m&cp Qp'XĽx~o]l7"}͡.ڴ&)MÑjjv 怮9!~C~Txv [EF(<|h5.A͉5:|Pa+:z!#Χ֔7tOvת&U3MŠnΌMG ;bf\5e{YT|Jg؃b1ʅQ1&OBA?xRPK_m/*/V%9g۳E@_aQZBQӆË!tkR^ٙ? [O^5G^$ψc nᷨcYr``;quĈjw2D䱖5,ߡ2$?Ͳt-{!^:g>"3vP3 л3yKcC4jE+4D[8!b@i ]bFTzPx{P/gAi}ꛩ6ϊ#ҹ9mWdfψ{@gIӧȭu)k44 W5xMO#xI fL gZG8lml2r.">pZ9ھyYg"'ʗBW " 屽}fM񗯚Σ  Cʓ1U2гi6L}w66ʁ12?gC~']x_&~ONr-85JU@P q*p ^'Sk]٥["_͘(R }.^׼w\ @z1ѣ* b"3ap,Ify}wuddžksnmq&oBT@(/FJ|RbP, L\wК]5펑2jy/)J=p¿ebD6fd+ƏWhF<]Ži\`%S~uw0!i4sU #G]'ա.qy['3 + ,,ajBvvbLi#b2@I) o2? "?\ȱR"U'b @"s.ChN7 nipN=6wm;]ᒳHIGGC٘&Ũ x+y Ȟ6\s[V#d9B6UA! E hq@p\3ÖUdT6ˑE4a֖H3(F*e}|@0Cau +-ZqkLI?Qs*-mō7d2 “!8vĿ1%h#RXQ(v3#j((;rzūk|-x"VЬfZW #ih )'O\m@A4,.mN2h'!N9P!9&U^W^F-M}b"nfڦ0zGmz#T`Y89QH!/_ NQZԕ TWsVMjHnK|ij|˹IŞO4H)$h.9mDT'|9Nċ>'TZl"T iXA'ٲU}}']T@=bn$xPXe{ZxzS/oR,Cй&T KIʪᷟZXsUo "Aka'3;_6M*:OklJI+uwM4:#h'd2^|B>jۉ ǠnBIY#Fwϣ:!!Rklpzp2K* EIԾh2BX]B3ϧ~씮:hl&VZyykyISoEnۏxINy l[CB+39<#<͌+4H;=aHo^O_*&Kj5|]wظJ]gB61#1JcϨ4xZBQ~19$֡ˤv3Q^bQ,^j̦B>"صmw6ؽz $g2trH…"-4)~ wsxpt[v8nwl}y㉓ulp&{u@Yߧ"aX;ߴWNܡ?6fgǤI/*N^r4FLx}#"ڗMHfnA7Oʠts;vE YJXH2SLhe~{fB.:z5ə*nmN\I1 .*m#*% 401+ ;]0`yNTJDɻǴ&Y6W5/$h [mF+RV8/@#.[aRHpi$Ƚ3>F(=UB9NOFiuvsTHRD#iu}VTl0~32 2b! \mƨ#w2rDE2μ}DςLi2ij zhy Lv;F.fCľ݌5$q֠%zkw~=m>3^QJlG=]U`t5&*6^v- F3Klg\l 0_K_cIJ>ʨi l#=eCALAvOEin{;9O9%G,)nνLz. ۊ >;;ߩ,;8l<=CžUWv?4/?yr>`y4B!+p7k3]i1e%KWJHs홂 S0ٳJ篇{XQ\%7^Z3'P\Iv"y!R[Y"`-D4m&9OґAg[nl~H$aqm׵ pN}E=;V~G%ܬ%caa &_%R^FԦӬLZ4 Lf-58=nSdjW YYD"Y ɕG?"XX:JoFb?*{bĜqXEM^nH*C.ЫĩɲMl@ޟG(:U hF}Xj~V0dVyxt~kDj)EGxenK.ZfoN{#v>o!gVܹʛL_Kl L'p+Z5}c+@6$LpQ79*#pW\njT.ci׻ZEEVvǹ MQ%5~-0G׎2B[J1 G~KY~v,3bLld'lEn|XvXѷaUO{(Op#Hڪ_#.Hh> xZ;z`jz5 {ēSLPBwW5L>4)f`.!GLl`K7/z .BGbϧ>;!zMjrV JG7 =?O2r9W^[8"1JY֡ζwÎןk _}B(rяT4&pxrX@Ii Aijgl"fjY1!Y)IMw(<ߍД ssF~3aO׉dr{YqHDX"D}]R2xPr2&*qQT=~_!S|3ÖJ\'  km:.rjp+bk 1$7R f6h pȿwCilyդ:!C+MD{x(Yw?(L!A/v<%LaJ>Ww*$z |Zad%c>az`OZUX*r-aīBW7 XfjD [nW/k:/PE'W;ã`nȭ3sP[zkz}'D& @X6[n|)3CxlvTR0a:׮ a.IFQ԰̒W'ϸcd_J411!/#&5<2!=ZWl-EVt7j MfxXRZW\(b}n}а/H|zЭ 4]/+D)!xul8=*O⧎v;˨rċR1Q(= kdiipheάǗ֖Urpg?C1) h/"/ѬDXP%DH9ѾTJrHR ⼒J9QpRKp$j̆PJ#&=7v$2J|1 #fWfE*RA Dy|p,`!ۘ|ٳoZv o](){8.'R/JhIj>5r2 uVѰى$Z ?!8[E ƯK? ÛY:;yelI.%^gOcT / Fq$%9%Q3&~X֩w8--VZ}"x? rBj'F %јmf23_Mov6b |3 `롴# p泦% NMXažt;p!h=XǯWLl6} ȼLH8*钭͗#_ )Ch0":7>Fl5YeYib LL^@X@Sp!5n_12чj}J7g 2pD6p5m%Zݧ\qt>h55u^L^*+<$ܲGMv~J*%&.Ţ;a\V[8fŪip]MDn _ք|S^'I#;Lz3'`. Oks-Y ~%; o~,&80j6N|/` 6Dñ0U&xu.e?{Iwqr'z4KQ3si׹?4^1Avo"KMx7!#uj9 ،cM h!ؙ8$}:t{(jޢ&&jnq;cIآHcA3&"'?mfA8a"DL}Zx75 7Od~Z/z緝`-IW1KvdQcha-{/yT:8XPPDΈ-Z)m>rV#3xMCj+#^_[ƨ:D{f9hpv;*Cv葭"5%lNu" -"߱01qFp;>Ve"Wq; &P6qgI'g"R1哴WT!}MwinىxejIV$.ACß}0сr)x" } oŌlUH!VI>D (xrz$ٖ~J:$Rf ITmؔ ~R/ژ/O:y._´.j~D$KrE儞*@O'tD0L碶k?Rby tY~1j5A!Y'9B1ǩV;z1PN\_4&s ½Okj{2ƔTi3h] 47i _(,KٸKOK쟀g`x `z̴ T5h7=, ?69 .JmM>oNU3ˮ FRHPpI G*jF1߬(1_=Aز&3/8.JTtΒ]9y.I&<[OE-pq}Ww^9G_̨hRD>ɿOb9$(. u)HTx E t3:&6ա\6/uKw,[k!fxyMQд;^ɬyZIp>Doòfv@Ơ1ٲ^Zϥ@ʾ%X8R \;1x;D@'/a}P%q|;#ETǶ)A\MbKkouMw=gfF̄7K͘[jw0||Cn4HA ]>rζ1Iؒj:+- hJtEho@ vRwC?xc۝wiǣ=|Dc94Ns'!"z֮2bv%WEuSLݐ>R@w8_me߲|.{zRѥ҅WHa vL픟q'?3JN"xn>

x]΍u%J%RLH{fOEn |lj_+GwR?HTUv$X<]n\*֮ufӗIzWsYӲOq%0{(UtˌPIL.#z>XoS{F(&rjx/_%qgћ?N} O-E/)ձ7fѨL+vJ_nR/S~up 9yU(sLR93uh3o"NUPi&mΖ#-ˉѳi[{P<%aI2\Hݟ|wD\Ez_f+e{j'ᚷ2B?CR/?#Ρ1xA´?4cK.3)}*Hݱۺ\E>պ(L]>SY kmמPYN)AyJ."XCJISYyn3:JFL}$jY'P-aF`}앱rT }g]9trz: ߧSIF%_R]\x$n<0N d`J1gi&Bvy]0%/R5;J$)\sb?K<(Tjf?Sz0D뿭+هFT,Ԏaa~6Q OOEqs?k-BQ2b:?Š:*p g!~q U课sxU?h6 J}y%pf&9ٕaTI$r̤M'bž82T,; N-ы[z[} "wr0F$5w~H>7 TEZ,D'X{9ZU{OSPx A2譖f!*.BvbT'`rS ,z1[>vS$ٛǢ>v Vv\'Z>j- ,rlSp"LG'|fZ5aX/jäY#쏛]V=$PcA4q7Ϩ_-ƎC'J̡ V%sR\wtXBP=7*e{yzMA'EyRG03Aϱ?1i1’+[Vxrml8ۏvyXh!E|2H#SӓcwU8Z;pX >˅VpT45`ݐ1+\}ŀ'[`d_$09'h<:qZԥ wJ#E r|ϠMu&)+2\%۰H҃t +sLP!0Sw'=a"-M @ a%y ф._X^!=QMވ]+m_8 pz܊ʈyT=Oxd1Cm~3Ƣg [tEx ՘Ѹ^TcGVQbhUoEݐrÖni+8jVr/)Jqr.ɵt34$jRsZ(RY*͉cUL&|wMU.F@e~yd`#aH5.,./nmL #Ui"oBpttvj&c| 6g +*%NpV|3kC#4%{>?tOI8&?knv-ӎC7HH`"< O6q_U m1F2dQC޽:,ƒGm[7=tKkő}DS-dTi: >3Roޟ>T{gSwtm/'JH]V_zimBx坸胅d* a|ϨRD}*:)S7-bqP'޴m0a "Wh}±JD ZQy$GZ2ȵ*_ 3N?+(+U7 Ȃmf`pbZ/WuaYo)zV*"LטUgzrS஦~x Vz( AK g Xg8 Пۄy ְ c"子U_׀L οD*ie٠>Z]y 2^>%K!:]kIØ{ `YxcgE9Vp e'c򃛯Oi)Ēca2^Fu gwQ[Dk|;n{ #Yޝu3m_j2b Wjr {3;GZu ѦW=RQ_L5K,IN:!I-u x+e}.4&(\Ji!VC7H&+P ŴnOB8l]1~d|.Cx2`o?&{D}s0C}cz_W  1q`c+\?t6@c/0gsn\h}KIj <&71uӘr5X;sHB \ ?g! dӰ ɗJ*ť4N$?ic%4鈌^NXrXԭ6;*`JD6X"kY{rޤ*iV\{>I}4Y>e-9I C% 7l?+0ʒaB6' Q ;G`O XNmVu O3|[8&M|/2}8"MTX?m˴3DݢŅ@̨s f 0O@l'nJot-ãܵ WH MW6}c8GF% S{g̀5,m>f=_=$-oGPvn3OH2Wfg}䐖YIYe,[D-o$pOO0 /BKo1u(5H%NoiŘ H [\+ -aEUVG<%֤q1~ FIu ^؅kVݏ'7M6dƦ}kf{Q.@P( !sr  L1 -2ClJJU=nt!Wi 43p3cR7OH—i*B( kOT!@{L7 p:ϞثƉЭzreu9ř#~y$t& Fu89'HGĿo&hI^йƖ}xcgHfH[1h *[wy ‚G0HY-tg+>}cq`ğKձD~q.)9 !Q$HrgŘ t3d`R#L'P&I]䷆į>ȟ_-~-s֬z/ qX/!TY5;ӏ{{! W hR붃sx.\Ќ~?XH WI'|ԱGш:D0 0k]a $`v花R_J 9YVpXAUbKu y ])!XsgB}0ގ<"tψӖ@?auHz=hE$Ǫr ^oWSGR?jݲva#d1p3ѩz܁M|* *+@L wXqC濔0Kw1oYN :Ԩ1>. }(2(Y.sK4<:b?1m2M_*gARp0b19^oaCW ofԖL APVAySsꟉh>q&v(;0t)FL5yRo)#<_G0mjV$@g$`o#2pquVO6F;Nz\|I fX7 i5G-ʻGkZQO%dr . tg 'MVN"* /ǀßBiq:'q4u~zDĚ:=@Z.k 膭{ƫdO`]z{鱁:Ù/%<\TDe|}S8TSkJ~;/ ؓ;hB]kBx] qxp4EZ[r[UQM0>n:<",z̼>t뉒էnbjw$G|n/2ujjqY߇R4C&͘\=d?k4n'@;X^C:YZӏ^T)zѥZ ^-MJh`x9v (ՠ$ j{n2lxTY2`$͎1&-:+ROmU#-ArՆ|GQ\i.q<@#9:35F VmF$/GDVFs6=eglcpq^/Ҵz ru@UH"G}Lg7$c:D9\m-TFeC. xSrB>ܒe~$Rt ߆鴞&iO`UZsN%L"vԐb7xQ # 9klf]Iݶ=4}{VCl8~tg&~U UզJJIF҂ lTEPWBB=D( cP|HMOk!]p"zꭄs{Ds.u,/1h<( NfTs)#d/8PkHګ*󟯡:q vI%\0ߗ^}Uyc۱,L%l!ݪ]R)t29RI';miƮq8Nc)?=?^YzˈՠFb8-!GKϳUV1*B)x]t=zdßjfvxf,"߯PڀVyMh0J[uY\)rhi_JE1>,at薘ȑ#E+̶)6+Q5&2PI 6*d^@يTB0vLr.{A]zA,Y^4Ѐ(aH06)RmXtEubg詮eQ\<]'e%>@@O6@Ǟ&yN*ءgBd?McghĂE lBbB}"#<ĐcahD1Kpe#`p?[i1$ v[< ),;hI5X8~DfxgU+{"J%v֩_ϢQ&5V,gLAזr_^5@VtN} -)c_T%NҙCREe; a6𐵧B zhS!gDݴ*ɳ Fh7qº˞|fq?g2M|rb; +lo2PpVa'2w_c@ؓ'zqU MvUB\{z";$V]b.alEG{]̄R*}hrnVX$ ltٽ޲ Q"TĚ !o@ay(af{SGZytN}RPcJo1k)NvӰRxDdr=f| F!uYV"mL"<2sT0]D P1*ekl##3uN8O";/'D8*Ǧ]jf%tb\zҤ[6`Vj:ĔO(,eS6#5%Yl ,[}Jk3n9%;I*nʎR(vZ7P %e_R3![n+ oIryr*&M=d> {_[c66A6Fه ,,DNMNQoM׉q Fa.!.2Y9 ^+D1M7R&ٔJם܏  C@hR#/AkC [7ɷ ?`5c0`H(=1I}'?@fF.3jS//n-Mٗytk Wλ;MAyk4Ժ[/f(K&ݘNѭؼ+{@3s@Fx^t~e'8A^O^,L2q)ֲ5x&\ :XJGY8/em_)tdhn_߄ŭc7 O&]_aOzfvF);r!Z9e(<>@ΰ\2*խy+Hh wIԡ0Wu>n|/~wZ$^ЬAOEA:6(sGJU1D)J6f]Wb#U-y**gwe}Sn`\Xc-7v;_':GV`omfby!F! ({Q//HӶ7V_: ,Yt R_z\Wϔo =s)N/G5YaN|%D UUE*Y ݟu Cl wL 2ڹZpc1~4L[k-QV\ fg _VfA 5g2Mwg"@ca~5q̫NN 7^K'ol})NK[ÂP9ٝM1XlC۞HO$ڬ9p⤂;oRj^8CǎxGsUt!W Qb/irݩPv$bq11 (73N0(2{~t+3o=g1eO-׼:#ۭm<{vsWs^gutZLI]$3ײۚ~@@N%aq*.,LtwR}SB|Pgp8'¬_U, >BUa*(.4h5h_WӠwaLh> f܃|`|%~͔rN:+ؗAڮq%Qut)E"OQ֤m͌~V?(+3S锁nSmL?BO/bt{M{,yЖ)WA_- F'ZgFżZW PG#5Hr[ Vcs^Q&-AFv:{+8mIC@dP[Y$C>.ͷo&}`^>ITayaZl:@(a+,C#A`51h|)+,.d?0ɜuS>m+D+ 1tol.!ܒp4Pjs")4Nu0p?a|ul0-D ̆,KO[R5G=bò{c|([r˒y>j<|?acP& C"K܅~e @g0~N&'Gtlis|vE։K׀?<:0u l%涛6*@ͬH MJ\["W!>_OM.blK+;@mc='6c7Txt R)}uJi3?,Y_c)me$?&#~7lJduN*>" Jq5/4Yt *#GWr-td\R|mS|rVG WCgD"/*G8YR2~u*]al;lg]~|SNhNe/s 1iOC\q탍 `Bg$p>$(XD]RC gF.%יZ>Sİ:}{+vm r!Z+tL̖Sw& uO"{Xi{fI))B dztWtIXqL^jD]3m\nl4 *}xe1iȳ\zo;IA C|K4Ƭ-՘+d_!϶XqZU(:TG}ۣ &jBЭFnUV3 ILL PYy5o_2Pi<"JI7QSB[ĻhQۗc{K4 ?\In:9Gǧ<;,mUKȼ i{lw "B^x0z}@~TC~Ϣ!."IG QEBl`?6pOp&t4yw N:ӋW~6-]"X(.SrJ`\)8#l@y2uZ,pts|W7,R$sL4Aڎ\w;ۧyRxg8#y#fHC2$&eu?!WGO>rVeAVG=6(c`f (yĿ6S_dΰ$/GΏ:q6Cv,p[1˸%_ 7LFPSkaD^6ӷ%X9l۰D>]: M)je։ f7kH*ܠ 8AQJTD/4Z_fG7:m/6b Zߔe幾45܏l[m`2Fdͪץf7NGvZBd:.uޢ+H͡Cꄌ\R ^`C!q=UUL3tde,lD騉vVCqqMi0+wm-M{3ls >ũ$`"b(n!yD;S jx~'.A?G'vnkB`>+X˻Z3dk3 }:8f9֥pmDmka *s)ٖn/f{~HäJ%l_ʣi~LT&!B!KHePE&&IQ7~7w(nA[1]-pT޿aG zإCnvjݼ:yy58=GUL%C,p`5NѲ=MR9S0>u?\'f#Qۗ3RկVgw s$ &#|ZNx@BQJLt JoLvxm>,\m#oM8 Ɂ4p9)ڷ DIxcq) @? 1#K( 13oK`b>Ąaq5ciA,W ]Llf`bv [Ȳ`CO@-V,=R"o23$-Q R%kyac7i@YofM`AAx$ivvDL2@OͥUK̏y<غzb}˯Kw2ϩkAx 8fC‘_|^DXo^qS~Z m[|),ݝ NEg4toҺ#\{!tgH6=D#5╏=9³Hp441 MA$>y-L}šǿ["{ H˄0ddA>j0ڎ&,aU=\ek)'<Ig`LN?2ט4`=}|RVTMx{# SY €+J{ ͺRKeIӥt!\EGw1~16yS @RI8'uVMYS5*n~hޖs}(T@θl:0 KA#qdz j'G,،6X$k 54VL>xɏ8(bH^,;O Д[ ҝ ejrxdr`6'g% ;}NZJĕpt˫߽1LP_v؟TE $ ONٔ9DAm#jԑޛ`-ÖӠ\7_<w&:ޙg)ATVN%K_s>CvF=2@?߻[)VTDP3مO bO,ܩ,X}J5/Ii#N9f#ЗYdtݫ$KI4z>S΍Ʃ`¢ Cn=2e;lJ(n'!ωxUp䌻bR"K>'& &5qaX`Kt"qF֖clu7(?<=嗤<0O똑*8fOXukd+Qu# #9{-m`4߷(Q`fk'5D0R8j߄}~4sD8+0ړ{M͚wm S J3mΌyFӎ.rvBV!{v/8!@Z@p`X)b+ܾzXV uZ#0yrɍ.[qHH13(:i(jG rCQ?~lˉO.ZR_8[U*ݯapsz />cskv%;#P)vx)VCKkZl@ذjmVaQ8ܖ'^ Vc69^vTah =O5rHv25e 0Y%Q60})CD؛Le@@ì* :`uL78]bp[~bZ(]?c;qrrVғd낛4u܎-\U /_ǔ)~X{勀Z䂔LI{(VNbsćўB<8( qYNtP:$n-o{+RZ>M Tܼe{Cq pE&kVHP;B"X~2~Lp]IVNDcx>A]!!~_T|:;@#gm]ӱ*sѼ$sNy$x^-?w<&moqf.G=wg0o8@J\H6aJ?_Fu6gF_pH0bmje^ }?vB=9:?l  ,vgl8 /Żm -;q񣕗bR8SB DO*zkBs.T p-p8 SK f+qECfR/&ͪA:w 㼎J0Ϻ170e.Evq}X3yZ:H/# Yؘv2TԢYh50_LxŇUp%mq-%q5FM> ^3woҦ#^7 '(Un2\!DG/&j#۔X+XW' MIJ6ўUH޵/;c9^ & eO\./F*VjNJ^H Z+l -},VZTȇ2i? WqndP=3pnjzý}z ֍㯊"bunV ZH[ 5ݰ1\8xq*.ddRxʰ pMObaFwEz͞+t4a|U4,!ؼ#ioM  # dFAItQggipo9D`")Ĵ qYh-mn%{xQv@';Vy;&{ rP`)!|(SŶ**g_f#WR_(GZ|PPC)(2I]<i! L > @=Գ,}/نv>?NS/!HSHxK)#kpC?j-*90YZ6Wإ ;iՁ|AEd > i<^shC<OV*/>eZ6-i"):]XcmŬݨ#96XF꾴º\"%ZAzJb{4\RFZzl ]FkhS{{р6"{ޘ^3] &sM%GrtXȈxZ"~0)(NJ}2#K + )#pU<=1)W#dĪ]!#gϿ^-K(!\W:~ne(sQ哥/Iy>JrRiV6*8{.c<8~9r-kጬX)؄pYY~ Cz޵) GHF`,i]) ] >{@]G= rzК{)[ 7NmQH~Ǡy`g!"$˭'KgPvY\/1e&[y"wt2^0WSrz;6'Iz 󗜩Pl@,Cb @p.eRTK̸ X^AD?R0 Gi^a 2 Jo :ςy;%UmUIlv5fXC@E 0I)RZx|[I75}/KFFT+I Jfi5#B!XJ L~|Va"<&O'Kh~Muͣԝ6-7[7\^8Y U}SB'eb͘&0a‡~kEje} 7.z"{RGp2E:MVU8EROÏJFT:+$_ŒJݙ~K`QWFI m)ٶ pE7Xo'WђVaNȨWu\vWA-Kߴ:0 n#jׄ&FU,P&F8A((Ts&s 1;ږi0 YjbXI+`PzXNHʻIpiL1[η,U:ݎlvOkrg.FKXaF讀 7s֩0ouef>y?PR[y}Imh镛_VGOMiho62li-3L@7Ǖ&;<-;)>S7'#Ɵ UޡE{0Ԕ"qEszK2!b֠-޽{Qgx\< t|.5aS?ih4KEfJý1`v ,Al#2/$o | {+˛OfUEr:hZy(Gl01z' [wIlDPTO;܍|UH>愧,@5sPL|եɗ+qM0ItycIϲ~J`9)Ú)>,}\/{bL_?l4`F `e|vîLBSJٿ5'`|KIUZ2ʀ} >:!|R0 :v'hR.xAILc2!mAI,LAwnOt9e@u0^-%i㶬v͆go} 둋!zi\)z( 07u;YDtjwGm5qG _H8Sqp1K!Joq) AVFgss>@_fPLK7;>Gd'kաD'Ϛþ&NśTzF-YMώeIboZqk &[#xy9͖9H.-dC({.TXnݓjY{F+U.)>&yܦRgA`:[}'T; ;IXҭM\nn.Ӿ룟!RB}!#ΌiO8lTczp)2=Eo># 7-[*UK]ΤWJ2c Gw_G|FJWmAshRSB[{OK~wLx,k0{#Hgf\Ĵ1$NU|壶i1Lh]Er_5?rQS" 6V d  g03 TSQ5 Q)KB-Hv'}~&+SpɰlHMOդ%IUHsxoMб&?e`xkT{`sVbgƏ$f w%v$pZ,iZa%Xg s*[qB[TɽgN'}sX4o1_L3t^ 7ЦQy`&c\W%"6tv;۔R@4L6BxS0^HnYd(&sL}EaK-q|!-Hh,PtC |Y{&#aky]>G0I$_LeCN1O/ʩjY؃rxޡlx  y>N)9O ANǙ9}8QD lLl{WҮ*/@'2DJ-PNbp6dKv}EZDLkH<[U>(<ĐJHN^!=#  0$a0+"MK+xBBmjFD9GF/ҎowԡUe$zǪ {;JAdY,>92.M@B4@BPz>bQ :9s&nk G/JgUe)Y8ɥ'yM{mR&Jwי>l2s8%-5cV}DY8pڪd,YNkbh;Tq ·OI Z#8$ rP҆~ F+䲿$o1;`H͏ g"MT`d'µY5]zo$s"/>8E KL|7oEsyB}.Ә<9sVX)k%GrCts>MB۸.%ԥ05o  `ZmM!$?i2-]2ŃV/cf$UWt%t6hB!C>(ǚc#!PfNvQ@+'iz,t֞[6V!!?ś৳N6KMX"1ǓYln(P9wo0t)橒xPx߹i4E64RO u$P^ޅ 3>FN C۽*R-!66p7Mٝ]_m\`9&H(:xjZnji `L25f].q1IHsk2K{^`olEAp;#1R47!Z)=U})Ԛj`)Ga(|L V[;3FhJz_}RAUˮHc$ɓ 3)ߎUtl =!W->mD-|=c(m;t丣D7[KLz\~ "a1i,lLM79?>aTIԎ~~%96 %Xe^7'˒;Q.-9 砅mLY B|F [ntXԁ͆NE78UE"?@q hTB*zck-X OуYAƀAk F=!)U7^t>D=p1gedx[&%YL"G28Ό!#/(+Z#f#ț.H y}Akvu5yJٵ4.2l ävߪ_bfk ixELwzIwEh!*bEk4KS&D.(E Aawc@ $[[;B?3q́ zVd_ФEmyx N|FGKH{No r6J7y9 QI?8瀸ORh27$n$ VDe&*1D=.#6K!^xዂʶDO圮jy'.0>fH~HvZ9X `--e;W!knqhdPf.{>PTQ QEc) gvhO}}w\OVn.l} 5VͅXnzȚ4^Z[bkĭPMوFv}O\)rAj %%3SO'2͕m/8сRW|ٛ6!NZu?bgBܣXF`B'#ް xz9|ެ}BMedIm>N-J[Ώj'>$j_ms{o.`w)mLB[8=AM+ ݚ~K:9^i^Jn`C3 a?YxsưH; Gm- TAKOG>IO*ͧ%p,by3w؍wJ+z;"-יFV xylejzo"kь~I-z TAm _svbu*pI{/U}'jx}]\ $ƊJ>;35"-Izx יl!൲,;ٝ ecG.dQ~p9ꕐ2}##pz`]%\]?=+29u6zYFz OHKLE{oK p5ztZG :TP>[1r@ [|YQP("a?9 s~(Z3FB/$EqaP%;|f9\͕/NGI8jIގ8L}ɩ. B 9 id ZJ{#ts'aIeu+p_Sc fW}9 }gWyiP͜LGyy3_H3T{D%;&1yR}ވ᝿vOSfM#4GfupߥYСsFMhU%"}7]&"'ՠKH|8;}48A~`K*rw1N04ԘH!/-ůYCis"Y{g'Ljet((87Tߌhhܜ>Y՟ޜ01)+2)%I]vQ~/GD@ʝ.R+Wؕ(lkD3e& q5! *x@6 ]X| H'U`A(w r8UʟʹJ$Xh+"x|tA]8iOB"J6t2~;ǥc8,)g.t@Fd]^GG͜JJ|9{pi|>AY,tTX BbZ?*VóX6*P\D`g_z?7cFlָfiޡeU5:B_hFgEZY~ɘL#f*G:0i},Mv&pVO{~<]vS$nR/ȋ-v5gx"hN&|Mn"z xz\<5YY?'Hh?y_[Gu3eT*jcsM-1ѣ8S}W`:KX ;ÅXq !MN~ xK|Ep?Wva8iIK4 ]WFT\< ^G1KaN&{"03 T9nI+NB$T>d/7rG,` z‚>t݋Vk(dҢGĵ&DS0ny$Maas"J64`3ņƹ^>M5y̆f*!~Ahv?Z8?iV2Pdɍ>R)]Ml^VB԰bVXbZ#?1,i-RhV3SɌe?4M#ͪ R>V>Tw$0L;xՔGwNli? B#€ٛǬ!ƆFCb>6z >NћNڥOQFղIܞtl4*R0{SU%{UkL<>_""fMJбogmЌHOIx&!Y\vI =w^-&뙥NBmlzc'}V\IXhGƘ0áF _.^U 媛5g+뭓}}쒐ot({:^;1ةdOS5J54o<9L*2Ocԯ(t*X{ />?_p!X2!q?a͵2j.7)(T}P\*nQt:^ѽ*u+ar_!65el}3"Da,~kh3sW+ 8Mދ|f aly qPS .)~NRK17Q??}QQ%wSN{ L }b=:6~,Q71 4[qc]ԖK[nd|/K'XOՂal=(J_zx`L[ ?~z#q42G=3+-) X ѕa"EJ+d͖ ]1x ]XA=_D A/{g8~ ^ UAߝXS65?W\ {-sZ<F3?a]փ'6LW5+6ޥӉS,4it5; z }Xw!Hd?gr,Xªj/Ȏ_/ W 8TpǾTK/[Ni 9駭p(@+VVaۭ fO?vCU .N,֑w6%d>*02e]>ȉنCY@FUc&ggvDg|l 3ͣQ+aXi0I+ҘKq3RYn;N #qc =`"@4;^:Fۈ٪A2y݊cW;uB()ZFNg\|V&qt_[eiÖYiKf슎eu^?v3>5wҫ/H5SD?0[:P[Ԁ^Z?-i=5{H-g@&Vv?ZΊTč\̂Fg+n%*6)Ȅ$],8xiX#G\ =s9}-CZ[dj.BfYNiM*&JJ y{:SwJ4Q;)`>=%S߫yq:@ ZxY;3!փ}4b u0؂#R>5Q:NqkM#p!X55-UŞOY'ܵZ?laC'LPa!Ɯ/7(_u[c<E/LTpФFvn!,~R H(y!1;ӥmD|jߏ*ˈ4 M?P'WǪ5&7C+oE (4W V-(J(1 rDQ?8nDb3 ~z"klc% s1Ē&je\kmj W#;S4fE{ݓNU ޒ YY>N1U3!m'ZT ^j.@>_}t9e̹5}rT,BG\LLnyyHJ!Qu—-vCt!u+|[lKR"U< _BE 2ѝp`)|朢_mp?FB@W-2؀3s^7DޚxB{",q'q]4YGj8-qĐ0=&R%=n:m^aAQWYSHt7WbW)%NC6nL[-%%Ž+cG;ED꿲D(%@ngIo4*j7f/)5?\?.pܜ[.WNneG=c'^$}V2R kp5OPDŎ-#:qɔ?k [M{ MdG5hrS,ԌVYr٦s+-k9D;t*NY#eb.@oeSGp _SU*DoD%\*kyFc3tR7@j JjҋυӟZjA#d/ۚ ,x(S׽9ߘ.ms~x!:~MSn/:̨`HQwhE92q)|M(Ec̉Ba=+dL)c3:Llx3A]@uΝx՛qKJ:MQ5򉹖 ˑUlV^OJgUu7m[W Gd(XjcĞ` Uȗq֧⮌H<~^VP>KQ@#EʮɊshG Q ~><0)LHӺ?Z1FKidcӝkĉ( )!!gHvoI R{шX}_iT$vLyrJo)5+P6gij費x+Ǡdy]f<n{65~gGcw/zj~~lXWj|7Ho4Nc(>{Jj&c .^4oRQj u4G)buwG'yn[Ŷ#N/zKMfUb z_D"8JS@YyjҳpΔGO%g0]Z X Cl@}\a^[ySe[B4gE"\9Z("˖\ @1R0?N\o*|q⎃sԫL_C_f |f ^*ٖA=[NpY יִ}W)- .0s7c NTPS%J,&|C=wKIcs]<Q!*2}sd(S@IU"q~9.nqQM!BټY!1:QDB{XRhMGp KRxs~k=-'PmWSRׁrMOv/i]Gl/ >iS?1H]}$~ h޷A-dfUs6zո:a˗-ˡl2BK/O̓:'0G #2bjBSjZ^n(!7@{t\\puc"X&ByY=;G6ަ6{cb||̳|%AJ_6*u9 Ba)H; +"Ynb|-( ny}#, ȧ;FG4>noZ]M1ƴx}8><YF7F8oF\7zQqU jk{y Ǭ=q OH%bFΈCɀg]^MB_ `;NŬ NFkeaTz҆ ȂB_ #w(y8kk7M P{))N̹Q@JI+%>֌nq)Xp"ROˍQt)__-k?RP:qomEL͊bÓOO 7cPN nsI? 8i\Rm_J[-Bbut 0 B,,% 6|7>77dɇ3g㺷b!ǝU;ޛMj&M64]_]\tUYߊ*`~M\11c3brV4 g[FH.qF _|vUƣt>IBY"G^kJl \KPlW .|c%aVYQ3ȏ{nhϱ?.󹨴!{29fcWWHI>Qg&rYR%TIy FYClocȊ@PrXnGSo_FI\ M=:cl ʕdK>{9(?+y"Gt"ϐ0u؈ù3ޛ*ytu_q7R#)0t&h6@t1s&S"yh|ʦWLR-~/v /ȯ;F,qHqYV;Rh )L]7y ["Ǡ`R3ltC)A~SGEcR{i(sd<6.oiT:">=R="Y]8w5Hdj;VsR\w72My슮˱O:o*3MǤfe~&3U?"Gos a;s@+*>YQkUO$٭a bY0*F}`z{܀S /֚}99O5mZ,Q旞HguHŪ Śr"LI?Ӄ >N|<x82#,X(S1QG Y*K;8:۔e45NRfb@4M׽qsGJ$BB\ewERQ` ,Uu*QE\/~^[5;8Iiq%D_i?Wzd1qo)J([uoB @հRs8JS c ;[MOY"WYVP87KPE)u'N)A&U1:{Cp\ν*$`0eij\ ;I‰D0sq,'|d5D-P$AMMZ Onjj=c$IJKubORIbNHj+m׮fiC !Y 7taw%[XVқ ex`SͰ0O"zMҸ3EpxA0ovdߕYdSr A&o)/_ܳήJCoiSm*W'q5 >uB|qJO21EuG}-} SG+BkъPӫB5M"ÛVm  h|C eh$X0uFXmFp%@I$;{Pݿ j4y>DB&[]qꗖ[v?PL;‹JAʗސ(?UeQ-~, _]xHV$T10[2F߭gXQoI$_7b3=ASd9v4!SXT0p*}ڐgUdg|88"d\eF7= 9X㬁˄%PH'ExM7jd:<,#f|g, $[J X/7&* G|* *E[9[5DQM--WF (W.C4 #8y[mKن<-A^7HrEZ1m &>ÍQ"BHb DbMJD.mlz<|4rܯN8Z#|aTѱ$ge3kVJ!tIH@1(3eKyF} חxR\x- \sFlyhUǔLpwܴyqIm]ܙ44zYGjI[y0oUl˟˦V{Wӷ.zidQ<ڡqnY&#;8S4-.5,%wV8z7_{ m4\\]0!N혍];hk. HR3̽w9 [Uk Pl(n |)>$2YwyW.(ZJCiফA;(tp9S6HͨɊiD̃"QKGJzeGgWu>BĽhO+ u xzW*jh9 Hd+CkF Q 1cf}^"^7x>Jq^|  U0a@*ԼۜvQB$>A@x[Lq _*X$qII }*h;/;XжY)4Ώ@G'UiT<ݻV7_-Y.A4pc4/[x'pdaQ~ׁʫП _Gkp;mS-v$P0wӁ'{1hM֤ĉ%*sꕊlzsZp|xkL楜:;=PL`2"]aNn$cxnp{\zm7eSnf/%KO$וW萐֖H9/Epkq&8*JRLRRa^1i,(G]=006(w/FtP Pˡϊ&Dff@bוo~΍&R|I>t("9.'^3A *-,VoH]אRnn`xV['|j͢ F!>rRqeQWr_RnWa)Hѝ%'.n |:$ h3^}=~(P"$ɐA*RQvtMܕ@+X `D:?9=6xCÌ$.P,Y0v ݖ%1hχ?|:&dCʓ?/Nj9 :/%493BDհ ! MlUxkV*'UP/ZZVW` 'VJ[٣ K29v 0v.ڻI]l2"sSh> +i."7Y/Yu wlm0WSKB!z:" 6Ko\r)1zhǐr2+hk%Svۉ^P鲩0&qW_ 41+%hz }X[=_LxYO8Z#]yθ` ㎼/y~rZ}Uqۨp @[.8O&.XKu?.^^IQ|cn:ط@NN;H84\S` {{P4/xņ?mY$ C zSBB13T cn_qt (#zj󗨥/z FskJx.:"܎G$BQO0R7mPT,J`tIQz~ k=`0  V|Zi|txߎ7FqgSWv q]ܦKGЫ$5oְ7_[{_`uܟl-T(ԵM&TR V>'xg9ydCjِz i+7DBkP\umaOY~ߵZo *4CA f#7L/V#z}ˣ)Kb`{ Gj L?$&߮2x I5i%~pYN& \9 ݳf"ci4Mv6?K>In",7mEɪwN6N%5 BP9j(y Kr^*.5?5<t'\݌o>cG*l$ A1otD'~] ,`0òc5Urɲ<')30w. !SslTa3.Iy3Nu@9CwsZ$ri[sx8**əi*MI`CG&0+Jgw/-)3Ƥ>uoymq 3y7ip u*p7tMӲlTZ =7 ӨްRJ,ƶLe?QK!DTIE.mSe[YoAlƟ/ YFS G\,-B^;SL$VY9n)"Zs0ȶˊx,l0.i0pǟ''ɛZ^nUѱa=ȔuF 'sØ:;drfb\H1y 66MAs< {Oj_j"t>oͶ[w0<j }x.uvNٵDŰ߫@| 9My@g #9K5,v隝k.HqCzccVTVpB Uw!.7^A*++[LnM/l aUj{V'r[p볞|mQAKW)j4 %2ST+XLcاnPf%Mxjb+qWv 'i"ͯCqVep Շ?"0Joswv*b[q-34+}3/{9iZN/L5|n::j% ==bU0O|,M E!ITp@Tmno#ðc;0@| OYːE()Hj B>C6Ҙ3[>/(C &??CM0o^[f'ٜ[5 :.,HL* +ӘsbTj/oz= +JmixL~JXNLU}\X3O ǧKQl͎|ܤiZUؙŗ- )k}E.ٍڃ>&H-1R .gE!'S]CIy?,ӎ{3tI/}QumVϾDDc \<焐S˿Yh(\:?ڕCo{;{[b?"m `2v?*d`PXu "!RKCXgnz3npmC٭+:'j?[0J sێD|ݾj Vtz^+ǚX-e|'zA،K+#.!~g5ȄUNF;g_ =m[kۼƀ~W;xrP~%`3mc&.N }WDs C8j™5Ƙ-Ôe3̕H#h}PE"qLKh-X XbرP^.=KxTn s 02%j?",I6 8eА}:+݈+r@$DqZYr"I 瀄=S .L0&Tb͒GHy)er_ȅ d=- 66X Šۛ}3x.H} 6iu[؀ 1k@pT,@k]bu}- sOc!EGiC?DuϊG)V&|tG&Nߩ;DaѼ+) d9EQ& rgmB?М#]D営e3-PG <#-Q dewBimXsaP < I6|Ӆ2Fk{ 9EFVe mdBӳ QԴ<8tN""8Wcp\Uΰ$d$ľ๵5;)svtЫ_K}@͢-8)+0zZtj3L,}iԚiF) seHf~B՝6ml)->  'iWT%"D@ŹQ ٸ?JAy07t6 jU#yd{ 'y ؾ0& jruVmy\CZ]KCJiP:[e,&$;J)ΰzg6"P 5Th0)ܹDҶj@ߣpV|6H ᤥ'5$HC@wM1s؏-ںqSb6t~91~݃߃EN#2&vt Z4٦0LIG<*5M*!gL+v& g,*"xCt}Z-D/v9i XOn.$SCjߜ][',KEF[z[?6[a'X@e5#Jd=LuS5zé՘Y;)X n*Gł^[;ļҙV gy@̹ifmEnPeF95rXiѮhI1 %9rOՀ4{?k1+ {wy3fr<lo{$ mN.Gc! G_W)]-Rݰ3=/>,p0vq~/tOH!#e2|D,hLKASp4\..`(}.wlHǝ!Y}GoMϧe:3DsbUկFD`eȞMH'`9Aw5b );VS؇=]nOmy~SrIo;Dl{l;п.O"s;ayV4wEn0QZ'x7P)Ujb3c͡D.hښɆxuY֗|<URpc;C#joos\#߫"wf/kuߎt9hG;Uܒ,Gx6Ձ=͸ɞzHT*A?\Y/=3ncуҭsķ[1B<)vC.[zJwH9_BBP-=8 OX^>y0;A$Ŷ Tk&2@b O0CG8}'uldU _` #]@E0t/OC¤^V:LXșS<\gdh?e*R#-Y0FT~۹E9Icͪ r9e̚Tǭ[YF D0|+]7fl%@m 03K؄J/28z8*w }KQ+M"^IX 5ҟ螔KGסcb*ٯрL7v[^D=qG>V ? =AxnDYt߽~xuϪ/ajO.e#@FV!oַA;mwBV9q^5P{ tî댌S.3Kc֤ Ĩi3jrʂA 3?!WPӢp O=unuma`Y%'7Ath~L0K_ %{[ϻr loKxzet%Bh*Q]vaܺ"n?Χ=8oX%NutRq.Y՗V+2l}nj?#3bč%DJ !iN*Vi jrMa2V?{ }c Qŧmlʠa?.,j+Lk'YXdҥl9xԪR4`.Y;|onYnG=`)$wc|NZ(BNQ9ltC"֗Qb&{{Yn?-fhP򸕛ݜ̕&M ~)g@Nx6nNRwTA Q_Be3\X |a@+P;syLOs3-13LD,-^G4gSr*:&RCr=yHT{*ZWNXG fMNH*'3v9cr5Q F/(+=이$5"`SWx 5/fO7K?aC4Ғ̣TO λr& er+fV6yӄgb+L8,2ӦS5n#w~gJ23`tP= 6fP9ɾ5i#r tKomܧd젰u0ueⲀ{imZ^/OT19sV%:P̤ql)4ov. ʝQ}:zAA԰v)JF(hHR#r8!\}7Q`$q{Wt>{NI"q~z1.S!)' R7y ._$AN@qMfK%_PBP9·8TFm=N0{y;! d{G.tvLz"Tj0I~%N .M2vzȟa Tdц޵]}Zl@uiKCYO['_*HNҷ?xss>y^j5]ZB#ӫ* bS6\Gų|mݘf)0$'G)!~ WEio`}s@uv!7,8N]U?9iq/jeY:^n8q-ˬf| _I<-&R3t4nZ[=F/|Rp]03x1#68.| A?]`a@? i)ak&aiF9~AU i%H`}A'3X`6'e T(`)qkkż`Owy>zϋ뉊㩳heo30٧)zlm/UOM|/;#S|/=CIѵ|;O褤?ז[? ܮ\fp\,b0D&+ 6C;c⿄E8;yO{ F&^l+i^ pf~ΐ 'Uי'6 tP|uvDFC@(@0#mV69ͪ/X[|BvUr0j!ͮ\o8@"~^V>))*՚)ŤY;pX OK;ug~'9d*df5;v]p|J'd ՞EH@1^  t p39ހl6 =)lȑ/{ V'GEIN{ ^hoT-Q癐kLlt> i}KZZPcHMXG^MIZ@tTk(M㝋,\{Em >]ˋ־UNkC2=(ox2k =7ϓ|sc/":KĿ\pgQGg/;>ȏkg&~nzLM.PZ3r~\s\K "?묵]`rߍaPŁоJڌ+K.bj̽C 6)X=>;yz6Ͷg WU_)<߫/#{pJ^!qQ[rTGֽKuG~q ]B.|My۹uYi`^2Rw.+UNˬT,Tz 2h/t:"!^*ۄ0o sؿAεhyeAڟcE=ȺEx2U|VҢfA޼[;ڐ_?[h~ \eJ=Z(5#=Gen+}v& ,.ނY=r7kN3f>FɇbvEfOIWtƸ58XZ aO{ ƾ=;w-cEAL95XgB t/+M[r$dy4&`j`iw7loX' +_Z,y`ޯ)o?\wG%iԞqNũU3vmiY}\Ikk= '겏#*>W!,!=eLse["wHKYkz, g3ƙY9w,I*ЭWY|{W(7Yߏ[Hx"LA`D{_!E՛ejHϖc. 2Wf!e:wNz{wft`p,ۨn98zܳ&.w9"T^E`0-kzZkب|(-m/m"o"#iKr٭TT}ur/;|4lR\7ɴi*-Y,zs/ל'mR$Gۜ~ w,t6y:93f#^[\Aa1-G``(p/+ y||l<3,RP1S;$_kaoasYsaUgWK)QjhS­` ,ƿgU?ydyU셒}-}"0:ڵjR\۠:X3;抋^[V0b Rb[ؑX4Xo>1\نⲽi5RZ|M8²֕h\_sF>s4C_`aViɤb-2/U14-+7i梉_A5yiZl*l!S#kwfR 7h]+8v#ȿ-c9!V(L~N|efea;jR+n2t|$' @q 4,^7H# ݙV%D7h3= v6,R@.[D.U#Ɠay|',^9Ɩ!u#X%3b5]b>\ڷ*u0Щ6|ܜݒ<Pe~ ,q 2f+fFϿFN03 M^WKV;[ , gkxV%xV-KghrۤV`NV@Ԅt ²?S$F-(faaWhc; U)„ĥ loo/R/diagnostics.R0000644000176200001440000003722015027034123013677 0ustar liggesusers#' Diagnostics for Pareto smoothed importance sampling (PSIS) #' #' Print a diagnostic table summarizing the estimated Pareto shape parameters #' and PSIS effective sample sizes, find the indexes of observations for which #' the estimated Pareto shape parameter \eqn{k} is larger than some #' `threshold` value, or plot observation indexes vs. diagnostic estimates. #' The **Details** section below provides a brief overview of the #' diagnostics, but we recommend consulting Vehtari, Gelman, and Gabry (2017) #' and Vehtari, Simpson, Gelman, Yao, and Gabry (2024) for full details. #' #' @name pareto-k-diagnostic #' @param x An object created by [loo()] or [psis()]. #' @param threshold For `pareto_k_ids()`, `threshold` is the minimum \eqn{k} #' value to flag (default is a sample size `S` dependend threshold #' `1 - 1 / log10(S)`). For `mcse_loo()`, if any \eqn{k} estimates are #' greater than `threshold` the MCSE estimate is returned as `NA` #' See **Details** for the motivation behind these defaults. #' #' @details #' #' The reliability and approximate convergence rate of the PSIS-based #' estimates can be assessed using the estimates for the shape #' parameter \eqn{k} of the generalized Pareto distribution. The #' diagnostic threshold for Pareto \eqn{k} depends on sample size #' \eqn{S} (sample size dependent threshold was introduced by Vehtari #' et al. (2024), and before that fixed thresholds of 0.5 and 0.7 were #' recommended). For simplicity, `loo` package uses the nominal sample #' size \eqn{S} when computing the sample size specific #' threshold. This provides an optimistic threshold if the effective #' sample size is less than 2200, but if MCMC-ESS > S/2 the difference #' is usually negligible. Thinning of MCMC draws can be used to #' improve the ratio ESS/S. #' #' * If \eqn{k < min(1 - 1 / log10(S), 0.7)}, where \eqn{S} is the #' sample size, the PSIS estimate and the corresponding Monte Carlo #' standard error estimate are reliable. #' #' * If \eqn{1 - 1 / log10(S) <= k < 0.7}, the PSIS estimate and the #' corresponding Monte Carlo standard error estimate are not #' reliable, but increasing the (effective) sample size \eqn{S} above #' 2200 may help (this will increase the sample size specific #' threshold \eqn{(1-1/log10(2200)>0.7} and then the bias specific #' threshold 0.7 dominates). #' #' * If \eqn{0.7 <= k < 1}, the PSIS estimate and the corresponding Monte #' Carlo standard error have large bias and are not reliable. Increasing #' the sample size may reduce the variability in \eqn{k} estimate, which #' may result in lower \eqn{k} estimate, too. #' #' * If \eqn{k \geq 1}{k >= 1}, the target distribution is estimated to #' have a non-finite mean. The PSIS estimate and the corresponding Monte #' Carlo standard error are not well defined. Increasing the sample size #' may reduce the variability in the \eqn{k} estimate, which #' may also result in a lower \eqn{k} estimate. #' #' \subsection{What if the estimated tail shape parameter \eqn{k} #' exceeds the diagnostic threshold?}{ Importance sampling is likely to #' work less well if the marginal posterior \eqn{p(\theta^s | y)} and #' LOO posterior \eqn{p(\theta^s | y_{-i})} are very different, which #' is more likely to happen with a non-robust model and highly #' influential observations. If the estimated tail shape parameter #' \eqn{k} exceeds the diagnostic threshold, the user should be #' warned. (Note: If \eqn{k} is greater than the diagnostic threshold #' then WAIC is also likely to fail, but WAIC lacks as accurate #' diagnostic.) When using PSIS in the context of approximate LOO-CV, #' we recommend one of the following actions: #' #' * With some additional computations, it is possible to transform #' the MCMC draws from the posterior distribution to obtain more #' reliable importance sampling estimates. This results in a smaller #' shape parameter \eqn{k}. See [loo_moment_match()] and the #' vignette *Avoiding model refits in leave-one-out cross-validation #' with moment matching* for an example of this. #' #' * Sampling from a leave-one-out mixture distribution (see the #' vignette *Mixture IS leave-one-out cross-validation for #' high-dimensional Bayesian models*), directly from \eqn{p(\theta^s #' | y_{-i})} for the problematic observations \eqn{i}, or using #' \eqn{K}-fold cross-validation (see the vignette *Holdout #' validation and K-fold cross-validation of Stan programs with the #' loo package*) will generally be more stable. #' #' * Using a model that is more robust to anomalous observations will #' generally make approximate LOO-CV more stable. #' #' } #' #' \subsection{Observation influence statistics}{ The estimated shape parameter #' \eqn{k} for each observation can be used as a measure of the observation's #' influence on posterior distribution of the model. These can be obtained with #' `pareto_k_influence_values()`. #' } #' #' \subsection{Effective sample size and error estimates}{ In the case that we #' obtain the samples from the proposal distribution via MCMC the **loo** #' package also computes estimates for the Monte Carlo error and the effective #' sample size for importance sampling, which are more accurate for PSIS than #' for IS and TIS (see Vehtari et al (2024) for details). However, the PSIS #' effective sample size estimate will be #' **over-optimistic when the estimate of \eqn{k} is greater than** #' \eqn{min(1-1/log10(S), 0.7)}, where \eqn{S} is the sample size. #' } #' #' @seealso #' * [psis()] for the implementation of the PSIS algorithm. #' * The [FAQ page](https://mc-stan.org/loo/articles/online-only/faq.html) on #' the __loo__ website for answers to frequently asked questions. #' #' @template loo-and-psis-references #' NULL #' @rdname pareto-k-diagnostic #' @export #' @return `pareto_k_table()` returns an object of class #' `"pareto_k_table"`, which is a matrix with columns `"Count"`, #' `"Proportion"`, and `"Min. n_eff"`, and has its own print method. #' pareto_k_table <- function(x) { k <- pareto_k_values(x) n_eff <- try(psis_n_eff_values(x), silent = TRUE) if (inherits(n_eff, "try-error")) { n_eff <- rep(NA, length(k)) } S <- dim(x)[1] k_threshold <- ps_khat_threshold(S) kcut <- k_cut(k, k_threshold) n_eff[k>k_threshold] <- NA min_n_eff <- min_n_eff_by_k(n_eff, kcut) count <- table(kcut) out <- cbind( Count = count, Proportion = prop.table(count), "Min. n_eff" = min_n_eff ) attr(out, "k_threshold") <- k_threshold structure(out, class = c("pareto_k_table", class(out))) } #' @export print.pareto_k_table <- function(x, digits = 1, ...) { count <- x[, "Count"] k_threshold <- attr(x, "k_threshold") if (sum(count[2:3]) == 0) { cat(paste0("\nAll Pareto k estimates are good (k < ", round(k_threshold,2), ").\n")) } else { tab <- cbind( " " = rep("", 3), " " = c("(good)", "(bad)", "(very bad)"), "Count" = .fr(count, 0), "Pct. " = paste0(.fr(100 * x[, "Proportion"], digits), "%"), # Print ESS as n_eff terms has been deprecated "Min. ESS" = round(x[, "Min. n_eff"]) ) tab2 <- rbind(tab) cat("Pareto k diagnostic values:\n") rownames(tab2) <- format(rownames(tab2), justify = "right") print(tab2, quote = FALSE) invisible(x) } } #' @rdname pareto-k-diagnostic #' @export #' @return `pareto_k_ids()` returns an integer vector indicating which #' observations have Pareto \eqn{k} estimates above `threshold`. #' pareto_k_ids <- function(x, threshold = NULL) { if (is.null(threshold)) { S <- dim(x)[1] threshold <- ps_khat_threshold(S) } k <- pareto_k_values(x) which(k > threshold) } #' @rdname pareto-k-diagnostic #' @export #' @return `pareto_k_values()` returns a vector of the estimated Pareto #' \eqn{k} parameters. These represent the reliability of sampling. pareto_k_values <- function(x) { k <- x$diagnostics[["pareto_k"]] if (is.null(k)) { # for compatibility with objects from loo < 2.0.0 k <- x[["pareto_k"]] } if (is.null(k)) { stop("No Pareto k estimates found.", call. = FALSE) } return(k) } #' @rdname pareto-k-diagnostic #' @export #' @return `pareto_k_influence_values()` returns a vector of the estimated Pareto #' \eqn{k} parameters. These represent influence of the observations on the #' model posterior distribution. pareto_k_influence_values <- function(x) { if ("influence_pareto_k" %in% colnames(x$pointwise)) { k <- x$pointwise[,"influence_pareto_k"] } else { stop("No Pareto k influence estimates found.", call. = FALSE) } return(k) } #' @rdname pareto-k-diagnostic #' @export #' @return `psis_n_eff_values()` returns a vector of the estimated PSIS #' effective sample sizes. psis_n_eff_values <- function(x) { n_eff <- x$diagnostics[["n_eff"]] if (is.null(n_eff)) { # Print ESS as n_eff terms has been deprecated stop("No PSIS ESS estimates found.", call. = FALSE) } return(n_eff) } #' @rdname pareto-k-diagnostic #' @export #' @return `mcse_loo()` returns the Monte Carlo standard error (MCSE) #' estimate for PSIS-LOO. MCSE will be NA if any Pareto \eqn{k} values are #' above `threshold`. #' mcse_loo <- function(x, threshold = NULL) { stopifnot(is.psis_loo(x)) S <- dim(x)[1] if (is.null(threshold)) { k_threshold <- ps_khat_threshold(S) } else { k_threshold <- threshold } if (any(pareto_k_values(x) > k_threshold, na.rm = TRUE)) { return(NA) } mc_var <- x$pointwise[, "mcse_elpd_loo"]^2 sqrt(sum(mc_var)) } #' @rdname pareto-k-diagnostic #' @aliases plot.loo #' @export #' @param label_points,... For the `plot()` method, if `label_points` is #' `TRUE` the observation numbers corresponding to any values of \eqn{k} #' greater than the diagnostic threshold will be displayed in the plot. #' Any arguments specified in `...` will be passed to [graphics::text()] #' and can be used to control the appearance of the labels. #' @param diagnostic For the `plot` method, which diagnostic should be #' plotted? The options are `"k"` for Pareto \eqn{k} estimates (the #' default), or `"ESS"` or `"n_eff"` for PSIS effective sample size estimates. #' @param main For the `plot()` method, a title for the plot. #' #' @return The `plot()` method is called for its side effect and does not #' return anything. If `x` is the result of a call to [loo()] #' or [psis()] then `plot(x, diagnostic)` produces a plot of #' the estimates of the Pareto shape parameters (`diagnostic = "k"`) or #' estimates of the PSIS effective sample sizes (`diagnostic = "ESS"`). #' plot.psis_loo <- function(x, diagnostic = c("k", "ESS", "n_eff"), ..., label_points = FALSE, main = "PSIS diagnostic plot") { diagnostic <- match.arg(diagnostic) k <- pareto_k_values(x) k[is.na(k)] <- 0 # FIXME when reloo is changed to make NA k values -Inf k_inf <- !is.finite(k) if (any(k_inf)) { warning(signif(100 * mean(k_inf), 2), "% of Pareto k estimates are Inf/NA/NaN and not plotted.") } if (diagnostic == "ESS" || diagnostic == "n_eff") { n_eff <- psis_n_eff_values(x) } else { n_eff <- NULL } S <- dim(x)[1] k_threshold <- ps_khat_threshold(S) plot_diagnostic( k = k, n_eff = n_eff, threshold = k_threshold, ..., label_points = label_points, main = main ) } #' @export #' @noRd #' @rdname pareto-k-diagnostic plot.loo <- plot.psis_loo #' @export #' @rdname pareto-k-diagnostic plot.psis <- function(x, diagnostic = c("k", "ESS", "n_eff"), ..., label_points = FALSE, main = "PSIS diagnostic plot") { plot.psis_loo(x, diagnostic = diagnostic, ..., label_points = label_points, main = main) } # internal ---------------------------------------------------------------- plot_diagnostic <- function(k, n_eff = NULL, threshold = 0.7, ..., label_points = FALSE, main = "PSIS diagnostic plot") { use_n_eff <- !is.null(n_eff) graphics::plot( x = if (use_n_eff) n_eff else k, xlab = "Data point", # Print ESS as n_eff terms has been deprecated ylab = if (use_n_eff) "PSIS ESS" else "Pareto shape k", type = "n", bty = "l", yaxt = "n", main = main ) graphics::axis(side = 2, las = 1) in_range <- function(x, lb_ub) { x >= lb_ub[1L] & x <= lb_ub[2L] } if (!use_n_eff) { krange <- range(k, na.rm = TRUE) breaks <- c(0, threshold, 1) hex_clrs <- c("#C79999", "#7C0000") ltys <- c(3, 2, 1) for (j in seq_along(breaks)) { val <- breaks[j] if (in_range(val, krange)) graphics::abline( h = val, col = ifelse(val == 0, "darkgray", hex_clrs[j - 1]), lty = ltys[j], lwd = 1 ) } } breaks <- c(-Inf, threshold, 1) hex_clrs <- c("#6497b1", "#005b96", "#03396c") clrs <- ifelse( in_range(k, breaks[1:2]), hex_clrs[1], ifelse(in_range(k, breaks[2:3]), hex_clrs[2], hex_clrs[3]) ) if (all(k < threshold) || !label_points) { graphics::points(x = if (use_n_eff) n_eff else k, col = clrs, pch = 3, cex = .6) return(invisible()) } else { graphics::points(x = which(k < threshold), y = if (use_n_eff) n_eff[k < threshold] else k[k < threshold], col = clrs[k < threshold], pch = 3, cex = .6) sel <- !in_range(k, breaks[1:2]) dots <- list(...) txt_args <- c( list( x = seq_along(k)[sel], y = if (use_n_eff) n_eff[sel] else k[sel], labels = seq_along(k)[sel] ), if (length(dots)) dots ) if (!("adj" %in% names(txt_args))) txt_args$adj <- 2 / 3 if (!("cex" %in% names(txt_args))) txt_args$cex <- 0.75 if (!("col" %in% names(txt_args))) txt_args$col <- clrs[sel] do.call(graphics::text, txt_args) } } #' Convert numeric Pareto k values to a factor variable. #' #' @noRd #' @param k Vector of Pareto k estimates. #' @return A factor variable (the same length as k) with 3 levels. #' k_cut <- function(k, threshold) { cut( k, breaks = c(-Inf, threshold, 1, Inf), labels = c(paste0("(-Inf, ", round(threshold,2), "]"), paste0("(", round(threshold,2), ", 1]"), "(1, Inf)") ) } #' Calculate the minimum PSIS n_eff within groups defined by Pareto k values #' #' @noRd #' @param n_eff Vector of PSIS n_eff estimates. #' @param kcut Factor returned by the k_cut() function. #' @return Vector of length `nlevels(kcut)` containing the minimum n_eff within #' each k group. If there are no k values in a group the corresponding element #' of the returned vector is NA. min_n_eff_by_k <- function(n_eff, kcut) { n_eff_split <- split(n_eff, f = kcut) n_eff_split <- sapply(n_eff_split, function(x) { # some k groups might be empty. # split gives numeric(0) but replace with NA if (!length(x)) NA else x }) sapply(n_eff_split, min) } #' Pareto-smoothing k-hat threshold #' #' Given sample size S computes khat threshold for reliable Pareto #' smoothed estimate (to have small probability of large error). See #' section 3.2.4, equation (13). Sample sizes 100, 320, 1000, 2200, #' 10000 correspond to thresholds 0.5, 0.6, 0.67, 0.7, 0.75. Although #' with bigger sample size S we can achieve estimates with small #' probability of large error, it is difficult to get accurate MCSE #' estimates as the bias starts to dominate when k > 0.7 (see Section 3.2.3). #' Thus the sample size dependend k-ht threshold is capped at 0.7. #' @param S sample size #' @param ... unused #' @return threshold #' @noRd ps_khat_threshold <- function(S, ...) { min(1 - 1 / log10(S), 0.7) } loo/R/effective_sample_sizes.R0000644000176200001440000001336415027034070016112 0ustar liggesusers#' Convenience function for computing relative efficiencies #' #' `relative_eff()` computes the the MCMC effective sample size divided by #' the total sample size. #' #' @export #' @param x A vector, matrix, 3-D array, or function. See the **Methods (by #' class)** section below for details on specifying `x`, but where #' "log-likelihood" is mentioned replace it with one of the following #' depending on the use case: #' * For use with the [loo()] function, the values in `x` (or generated by #' `x`, if a function) should be **likelihood** values #' (i.e., `exp(log_lik)`), not on the log scale. #' * For generic use with [psis()], the values in `x` should be the reciprocal #' of the importance ratios (i.e., `exp(-log_ratios)`). #' @param chain_id A vector of length `NROW(x)` containing MCMC chain #' indexes for each each row of `x` (if a matrix) or each value in #' `x` (if a vector). No `chain_id` is needed if `x` is a 3-D #' array. If there are `C` chains then valid chain indexes are values #' in `1:C`. #' @param cores The number of cores to use for parallelization. #' @return A vector of relative effective sample sizes. #' #' @examples #' LLarr <- example_loglik_array() #' LLmat <- example_loglik_matrix() #' dim(LLarr) #' dim(LLmat) #' #' rel_n_eff_1 <- relative_eff(exp(LLarr)) #' rel_n_eff_2 <- relative_eff(exp(LLmat), chain_id = rep(1:2, each = 500)) #' all.equal(rel_n_eff_1, rel_n_eff_2) #' relative_eff <- function(x, ...) { UseMethod("relative_eff") } #' @export #' @templateVar fn relative_eff #' @template vector #' relative_eff.default <- function(x, chain_id, ...) { dim(x) <- c(length(x), 1) class(x) <- "matrix" relative_eff.matrix(x, chain_id) } #' @export #' @templateVar fn relative_eff #' @template matrix #' relative_eff.matrix <- function(x, chain_id, ..., cores = getOption("mc.cores", 1)) { x <- llmatrix_to_array(x, chain_id) relative_eff.array(x, cores = cores) } #' @export #' @templateVar fn relative_eff #' @template array #' relative_eff.array <- function(x, ..., cores = getOption("mc.cores", 1)) { stopifnot(length(dim(x)) == 3) S <- prod(dim(x)[1:2]) # posterior sample size = iter * chains if (cores == 1) { n_eff_vec <- apply(x, 3, posterior::ess_mean) } else { if (!os_is_windows()) { n_eff_list <- parallel::mclapply( mc.cores = cores, X = seq_len(dim(x)[3]), FUN = function(i) posterior::ess_mean(x[, , i, drop = TRUE]) ) } else { cl <- parallel::makePSOCKcluster(cores) on.exit(parallel::stopCluster(cl)) n_eff_list <- parallel::parLapply( cl = cl, X = seq_len(dim(x)[3]), fun = function(i) posterior::ess_mean(x[, , i, drop = TRUE]) ) } n_eff_vec <- unlist(n_eff_list, use.names = FALSE) } return(n_eff_vec / S) } #' @export #' @templateVar fn relative_eff #' @template function #' @param data,draws,... Same as for the [loo()] function method. #' relative_eff.function <- function(x, chain_id, ..., cores = getOption("mc.cores", 1), data = NULL, draws = NULL) { f_i <- validate_llfun(x) # not really an llfun, should return exp(ll) or exp(-ll) N <- dim(data)[1] if (cores == 1) { n_eff_list <- lapply( X = seq_len(N), FUN = function(i) { val_i <- f_i(data_i = data[i, , drop = FALSE], draws = draws, ...) relative_eff.default(as.vector(val_i), chain_id = chain_id, cores = 1) } ) } else { if (!os_is_windows()) { n_eff_list <- parallel::mclapply( X = seq_len(N), FUN = function(i) { val_i <- f_i(data_i = data[i, , drop = FALSE], draws = draws, ...) relative_eff.default(as.vector(val_i), chain_id = chain_id, cores = 1) }, mc.cores = cores ) } else { cl <- parallel::makePSOCKcluster(cores) parallel::clusterExport(cl=cl, varlist=c("draws", "chain_id", "data"), envir=environment()) on.exit(parallel::stopCluster(cl)) n_eff_list <- parallel::parLapply( cl = cl, X = seq_len(N), fun = function(i) { val_i <- f_i(data_i = data[i, , drop = FALSE], draws = draws, ...) relative_eff.default(as.vector(val_i), chain_id = chain_id, cores = 1) } ) } } n_eff_vec <- unlist(n_eff_list, use.names = FALSE) return(n_eff_vec) } #' @export #' @describeIn relative_eff #' If `x` is an object of class `"psis"`, `relative_eff()` simply returns #' the `r_eff` attribute of `x`. relative_eff.importance_sampling <- function(x, ...) { attr(x, "r_eff") } # internal ---------------------------------------------------------------- #' Effective sample size for PSIS #' #' @noRd #' @param w A vector or matrix (one column per observation) of normalized Pareto #' smoothed weights (not log weights). #' @param r_eff Relative effective sample size of `exp(log_lik)` or #' `exp(-log_ratios)`. `r_eff` should be a scalar if `w` is a #' vector and a vector of length `ncol(w)` if `w` is a matrix. #' @return A scalar if `w` is a vector. A vector of length `ncol(w)` #' if `w` is matrix. #' psis_n_eff <- function(w, ...) { UseMethod("psis_n_eff") } #' @export psis_n_eff.default <- function(w, r_eff = NULL, ...) { ss <- sum(w^2) if (is.null(r_eff)) { return(1 / ss) } stopifnot(length(r_eff) == 1) 1 / ss * r_eff } #' @export psis_n_eff.matrix <- function(w, r_eff = NULL, ...) { ss <- colSums(w^2) if (is.null(r_eff)) { return(1 / ss) } if (length(r_eff) != length(ss) && length(r_eff) != 1) { stop("r_eff must have length 1 or ncol(w).", call. = FALSE) } 1 / ss * r_eff } loo/R/pointwise.R0000644000176200001440000000245714566461605013435 0ustar liggesusers#' Convenience function for extracting pointwise estimates #' #' @export #' @param x A `loo` object, for example one returned by [loo()], #' [loo_subsample()], [loo_approximate_posterior()], [loo_moment_match()], etc. #' @param estimate Which pointwise estimate to return. By default all are #' returned. The objects returned by the different functions ([loo()], #' [loo_subsample()], etc.) have slightly different estimates available. #' Typically at a minimum the estimates `elpd_loo`, `looic`, `mcse_elpd_loo`, #' `p_loo`, and `influence_pareto_k` will be available, but there may be #' others. #' @param ... Currently ignored. #' @return A vector of length equal to the number of observations. #' #' @examples #' x <- loo(example_loglik_array()) #' pointwise(x, "elpd_loo") #' pointwise <- function(x, estimate, ...) { UseMethod("pointwise") } #' @rdname pointwise #' @export pointwise.loo <- function(x, estimate, ...) { stopifnot(is.character(estimate), length(estimate) == 1) pw <- x$pointwise if (is.null(pw)) { stop("No pointwise estimates found.", call. = FALSE) } estimates <- colnames(pw) if (!(estimate %in% estimates)) { stop( "'", estimate, "' not found.", " Available estimates are: \n", paste(shQuote(estimates), collapse=", ") ) } pw[, estimate] } loo/R/crps.R0000644000176200001440000001555114753703132012352 0ustar liggesusers#' Continuously ranked probability score #' #' The `crps()` and `scrps()` functions and their `loo_*()` counterparts can be #' used to compute the continuously ranked probability score (CRPS) and scaled #' CRPS (SCRPS) (as defined by Bolin and Wallin, 2023). CRPS is a proper scoring rule, and #' strictly proper when the first moment of the predictive distribution is #' finite. Both can be expressed in terms of samples form the predictive #' distribution. See, for example, a paper by Gneiting and Raftery (2007) #' for a comprehensive discussion on CRPS. #' #' To compute (S)CRPS, the user needs to provide two sets of draws, `x` and #' `x2`, from the predictive distribution. This is due to the fact that formulas #' used to compute CRPS involve an expectation of the absolute difference of `x` #' and `x2`, both having the same distribution. See the `permutations` argument, #' as well as Gneiting and Raftery (2007) for details. #' #' @export #' @param x A `S` by `N` matrix (draws by observations), or a vector of length #' `S` when only single observation is provided in `y`. #' @param x2 Independent draws from the same distribution as draws in `x`. #' Should be of the identical dimension. #' @param y A vector of observations or a single value. #' @param permutations An integer, with default value of 1, specifying how many #' times the expected value of |X - X'| (`|x - x2|`) is computed. The row #' order of `x2` is shuffled as elements `x` and `x2` are typically drawn #' given the same values of parameters. This happens, e.g., when one calls #' `posterior_predict()` twice for a fitted \pkg{rstanarm} or \pkg{brms} #' model. Generating more permutations is expected to decrease the variance of #' the computed expected value. #' @param ... Passed on to [E_loo()] in the `loo_*()` version of these #' functions. #' #' @return A list containing two elements: `estimates` and `pointwise`. #' The former reports estimator and standard error and latter the pointwise #' values. Following Bolin & Wallin (2023), a larger value is better. #' #' @examples #' \dontrun{ #' # An example using rstanarm #' library(rstanarm) #' data("kidiq") #' fit <- stan_glm(kid_score ~ mom_hs + mom_iq, data = kidiq) #' ypred1 <- posterior_predict(fit) #' ypred2 <- posterior_predict(fit) #' crps(ypred1, ypred2, y = fit$y) #' loo_crps(ypred1, ypred2, y = fit$y, log_lik = log_lik(fit)) #' } #' #' @references #' Bolin, D., & Wallin, J. (2023). Local scale invariance and robustness of #' proper scoring rules. Statistical Science, 38(1):140-159. #' #' Gneiting, T., & Raftery, A. E. (2007). Strictly Proper Scoring Rules, #' Prediction, and Estimation. Journal of the American Statistical Association, #' 102(477), 359–378. crps <- function(x, ...) { UseMethod("crps") } #' @rdname crps #' @export scrps <- function(x, ...) { UseMethod("scrps") } #' @rdname crps #' @export loo_crps <- function(x, ...) { UseMethod("loo_crps") } #' @rdname crps #' @export loo_scrps <- function(x, ...) { UseMethod("loo_scrps") } #' @rdname crps #' @export crps.matrix <- function(x, x2, y, ..., permutations = 1) { validate_crps_input(x, x2, y) repeats <- replicate(permutations, EXX_compute(x, x2), simplify = F) EXX <- Reduce(`+`, repeats) / permutations EXy <- colMeans(abs(sweep(x, 2, y))) crps_output(.crps_fun(EXX, EXy)) } #' Method for a single data point #' @rdname crps #' @export crps.numeric <- function(x, x2, y, ..., permutations = 1) { stopifnot(length(x) == length(x2), length(y) == 1) crps.matrix(as.matrix(x), as.matrix(x2), y, permutations) } #' @rdname crps #' @export #' @param log_lik A log-likelihood matrix the same size as `x`. #' @param r_eff An optional vector of relative effective sample size estimates #' containing one element per observation. See [psis()] for details. #' @param cores The number of cores to use for parallelization of `[psis()]`. #' See [psis()] for details. loo_crps.matrix <- function(x, x2, y, log_lik, ..., permutations = 1, r_eff = 1, cores = getOption("mc.cores", 1)) { validate_crps_input(x, x2, y, log_lik) repeats <- replicate(permutations, EXX_loo_compute(x, x2, log_lik, r_eff = r_eff, ...), simplify = F) EXX <- Reduce(`+`, repeats) / permutations psis_obj <- psis(-log_lik, r_eff = r_eff, cores = cores) EXy <- E_loo(abs(sweep(x, 2, y)), psis_obj, log_ratios = -log_lik, ...)$value crps_output(.crps_fun(EXX, EXy)) } #' @rdname crps #' @export scrps.matrix <- function(x, x2, y, ..., permutations = 1) { validate_crps_input(x, x2, y) repeats <- replicate(permutations, EXX_compute(x, x2), simplify = F) EXX <- Reduce(`+`, repeats) / permutations EXy <- colMeans(abs(sweep(x, 2, y))) crps_output(.crps_fun(EXX, EXy, scale = TRUE)) } #' @rdname crps #' @export scrps.numeric <- function(x, x2, y, ..., permutations = 1) { stopifnot(length(x) == length(x2), length(y) == 1) scrps.matrix(as.matrix(x), as.matrix(x2), y, permutations) } #' @rdname crps #' @export loo_scrps.matrix <- function( x, x2, y, log_lik, ..., permutations = 1, r_eff = 1, cores = getOption("mc.cores", 1)) { validate_crps_input(x, x2, y, log_lik) repeats <- replicate(permutations, EXX_loo_compute(x, x2, log_lik, r_eff = r_eff, ...), simplify = F) EXX <- Reduce(`+`, repeats) / permutations psis_obj <- psis(-log_lik, r_eff = r_eff, cores = cores) EXy <- E_loo(abs(sweep(x, 2, y)), psis_obj, log_ratios = -log_lik, ...)$value crps_output(.crps_fun(EXX, EXy, scale = TRUE)) } # ------------ Internals ---------------- EXX_compute <- function(x, x2) { S <- nrow(x) colMeans(abs(x - x2[sample(1:S),])) } EXX_loo_compute <- function(x, x2, log_lik, r_eff = 1, ...) { S <- nrow(x) shuffle <- sample (1:S) x2 <- x2[shuffle,] log_lik2 <- log_lik[shuffle,] psis_obj_joint <- psis(-log_lik - log_lik2 , r_eff = r_eff) E_loo(abs(x - x2), psis_obj_joint, log_ratios = -log_lik - log_lik2, ...)$value } #' Function to compute crps and scrps #' @noRd .crps_fun <- function(EXX, EXy, scale = FALSE) { if (scale) return(-EXy/EXX - 0.5 * log(EXX)) 0.5 * EXX - EXy } #' Compute output data for crps functions #' @noRd crps_output <- function(crps_pw) { n <- length(crps_pw) out <- list() out$estimates <- c(mean(crps_pw), sd(crps_pw) / sqrt(n)) names(out$estimates) <- c('Estimate', 'SE') out$pointwise <- crps_pw out } #' Validate input of CRPS functions #' #' Check that predictive draws and observed data are of compatible shape #' @noRd validate_crps_input <- function(x, x2, y, log_lik = NULL) { stopifnot(is.numeric(x), is.numeric(x2), is.numeric(y), identical(dim(x), dim(x2)), ncol(x) == length(y), ifelse(is.null(log_lik), TRUE, identical(dim(log_lik), dim(x))) ) } loo/R/compare.R0000644000176200001440000001220615075016621013021 0ustar liggesusers#' Model comparison (deprecated, old version) #' #' **This function is deprecated**. Please use the new [loo_compare()] function #' instead. #' #' @export #' @param ... At least two objects returned by [loo()] (or [waic()]). #' @param x A list of at least two objects returned by [loo()] (or #' [waic()]). This argument can be used as an alternative to #' specifying the objects in `...`. #' #' @return A vector or matrix with class `'compare.loo'` that has its own #' print method. If exactly two objects are provided in `...` or #' `x`, then the difference in expected predictive accuracy and the #' standard error of the difference are returned. If more than two objects are #' provided then a matrix of summary information is returned (see **Details**). #' #' @details #' When comparing two fitted models, we can estimate the difference in their #' expected predictive accuracy by the difference in `elpd_loo` or #' `elpd_waic` (or multiplied by -2, if desired, to be on the #' deviance scale). #' #' *When that difference, `elpd_diff`, is positive then the expected #' predictive accuracy for the second model is higher. A negative #' `elpd_diff` favors the first model.* #' #' When using `compare()` with more than two models, the values in the #' `elpd_diff` and `se_diff` columns of the returned matrix are #' computed by making pairwise comparisons between each model and the model #' with the best ELPD (i.e., the model in the first row). #' Although the `elpd_diff` column is equal to the difference in #' `elpd_loo`, do not expect the `se_diff` column to be equal to the #' the difference in `se_elpd_loo`. #' #' To compute the standard error of the difference in ELPD we use a #' paired estimate to take advantage of the fact that the same set of _N_ #' data points was used to fit both models. These calculations should be most #' useful when _N_ is large, because then non-normality of the #' distribution is not such an issue when estimating the uncertainty in these #' sums. These standard errors, for all their flaws, should give a better #' sense of uncertainty than what is obtained using the current standard #' approach of comparing differences of deviances to a Chi-squared #' distribution, a practice derived for Gaussian linear models or #' asymptotically, and which only applies to nested models in any case. #' #' @template loo-and-psis-references #' #' @examples #' \dontrun{ #' loo1 <- loo(log_lik1) #' loo2 <- loo(log_lik2) #' print(compare(loo1, loo2), digits = 3) #' print(compare(x = list(loo1, loo2))) #' #' waic1 <- waic(log_lik1) #' waic2 <- waic(log_lik2) #' compare(waic1, waic2) #' } #' compare <- function(..., x = list()) { .Deprecated("loo_compare") dots <- list(...) if (length(dots)) { if (length(x)) { stop("If 'x' is specified then '...' should not be specified.", call. = FALSE) } nms <- as.character(match.call(expand.dots = TRUE))[-1L] } else { if (!is.list(x) || !length(x)) { stop("'x' must be a list.", call. = FALSE) } dots <- x nms <- names(dots) if (!length(nms)) { nms <- paste0("model", seq_along(dots)) } } if (!all(sapply(dots, is.loo))) { stop("All inputs should have class 'loo'.") } if (length(dots) <= 1L) { stop("'compare' requires at least two models.") } else if (length(dots) == 2L) { loo1 <- dots[[1]] loo2 <- dots[[2]] comp <- compare_two_models(loo1, loo2) class(comp) <- c(class(comp), "old_compare.loo") return(comp) } else { Ns <- sapply(dots, function(x) nrow(x$pointwise)) if (!all(Ns == Ns[1L])) { stop("Not all models have the same number of data points.", call. = FALSE) } x <- sapply(dots, function(x) { est <- x$estimates setNames(c(est), nm = c(rownames(est), paste0("se_", rownames(est))) ) }) colnames(x) <- nms rnms <- rownames(x) comp <- x ord <- order(x[grep("^elpd", rnms), ], decreasing = TRUE) comp <- t(comp)[ord, ] patts <- c("elpd", "p_", "^waic$|^looic$", "^se_waic$|^se_looic$") col_ord <- unlist(sapply(patts, function(p) grep(p, colnames(comp))), use.names = FALSE) comp <- comp[, col_ord] # compute elpd_diff and se_elpd_diff relative to best model rnms <- rownames(comp) diffs <- mapply(elpd_diffs, dots[ord[1]], dots[ord]) elpd_diff <- apply(diffs, 2, sum) se_diff <- apply(diffs, 2, se_elpd_diff) comp <- cbind(elpd_diff = elpd_diff, se_diff = se_diff, comp) rownames(comp) <- rnms class(comp) <- c("compare.loo", class(comp), "old_compare.loo") comp } } # internal ---------------------------------------------------------------- compare_two_models <- function(loo_a, loo_b, return = c("elpd_diff", "se"), check_dims = TRUE) { if (check_dims) { if (dim(loo_a$pointwise)[1] != dim(loo_b$pointwise)[1]) { stop(paste("Models don't have the same number of data points.", "\nFound N_1 =", dim(loo_a$pointwise)[1], "and N_2 =", dim(loo_b$pointwise)[1]), call. = FALSE) } } diffs <- elpd_diffs(loo_a, loo_b) comp <- c(elpd_diff = sum(diffs), se = se_elpd_diff(diffs)) structure(comp, class = "compare.loo") } loo/R/kfold-helpers.R0000644000176200001440000000713314702053164014134 0ustar liggesusers#' Helper functions for K-fold cross-validation #' #' @description These functions can be used to generate indexes for use with #' K-fold cross-validation. See the **Details** section for explanations. #' #' @name kfold-helpers #' @param K The number of folds to use. #' @param N The number of observations in the data. #' @param x A discrete variable of length `N` with at least `K` levels (unique #' values). Will be coerced to a [factor][factor()]. #' #' @return An integer vector of length `N` where each element is an index in `1:K`. #' #' @details #' `kfold_split_random()` splits the data into `K` groups #' of equal size (or roughly equal size). #' #' For a categorical variable `x` `kfold_split_stratified()` #' splits the observations into `K` groups ensuring that relative #' category frequencies are approximately preserved. #' #' For a grouping variable `x`, `kfold_split_grouped()` places #' all observations in `x` from the same group/level together in #' the same fold. The selection of which groups/levels go into which #' fold (relevant when when there are more groups than folds) is #' randomized. #' #' @examples #' ids <- kfold_split_random(K = 5, N = 20) #' print(ids) #' table(ids) #' #' #' x <- sample(c(0, 1), size = 200, replace = TRUE, prob = c(0.05, 0.95)) #' table(x) #' ids <- kfold_split_stratified(K = 5, x = x) #' print(ids) #' table(ids, x) #' #' grp <- gl(n = 50, k = 15, labels = state.name) #' length(grp) #' head(table(grp)) #' #' ids_10 <- kfold_split_grouped(K = 10, x = grp) #' (tab_10 <- table(grp, ids_10)) #' colSums(tab_10) #' #' ids_9 <- kfold_split_grouped(K = 9, x = grp) #' (tab_9 <- table(grp, ids_9)) #' colSums(tab_9) #' NULL #' @rdname kfold-helpers #' @export kfold_split_random <- function(K = 10, N = NULL) { stopifnot( !is.null(N), K == as.integer(K), N == as.integer(N), length(K) == 1, length(N) == 1, K > 1, K <= N ) perm <- sample.int(N) idx <- ceiling(seq(from = 1, to = N, length.out = K + 1)) bins <- .bincode(perm, breaks = idx, right = FALSE, include.lowest = TRUE) return(bins) } #' @rdname kfold-helpers #' @export kfold_split_stratified <- function(K = 10, x = NULL) { stopifnot( !is.null(x), K == as.integer(K), length(K) == 1, K > 1, K <= length(x) ) x <- as.integer(as.factor(x)) Nlev <- length(unique(x)) N <- length(x) xids <- numeric() for (l in 1:Nlev) { idx <- which(x == l) if (length(idx) > 1) { xids <- c(xids, sample(idx)) } else { xids <- c(xids, idx) } } bins <- rep(NA, N) bins[xids] <- rep(1:K, ceiling(N/K))[1:N] return(bins) } #' @rdname kfold-helpers #' @export kfold_split_grouped <- function(K = 10, x = NULL) { stopifnot( !is.null(x), K == as.integer(K), length(K) == 1, K > 1, K <= length(x) ) Nlev <- length(unique(x)) if (Nlev < K) { stop("'K' must not be bigger than the number of levels/groups in 'x'.") } x <- as.integer(as.factor(x)) if (Nlev == K) { return(x) } # Otherwise we have Nlev > K S1 <- ceiling(Nlev / K) # number of levels in largest groups of levels N_S2 <- S1 * K - Nlev # number of groups of levels of size S1 - 1 N_S1 <- K - N_S2 # number of groups of levels of size S1 perm <- sample.int(Nlev) # permute group levels brks <- seq(from = S1 + 0.5, by = S1, length.out = N_S1) if (N_S2 > 0) { brks2 <- seq(from = brks[N_S1] + S1 - 1, by = S1 - 1, length.out = N_S2 - 1) brks <- c(brks, brks2) } grps <- findInterval(perm, vec = brks) + 1 # +1 so min is 1 not 0 bins <- rep(NA, length(x)) for (j in perm) { bins[x == j] <- grps[j] } return(bins) } loo/R/loo_model_weights.R0000644000176200001440000003624314702053074015104 0ustar liggesusers#' Model averaging/weighting via stacking or pseudo-BMA weighting #' #' Model averaging via stacking of predictive distributions, pseudo-BMA #' weighting or pseudo-BMA+ weighting with the Bayesian bootstrap. See Yao et #' al. (2018), Vehtari, Gelman, and Gabry (2017), and Vehtari, Simpson, #' Gelman, Yao, and Gabry (2024) for background. #' #' @export #' @param x A list of `"psis_loo"` objects (objects returned by [loo()]) or #' pointwise log-likelihood matrices or , one for each model. If the list #' elements are named the names will be used to label the models in the #' results. Each matrix/object should have dimensions \eqn{S} by \eqn{N}, #' where \eqn{S} is the size of the posterior sample (with all chains merged) #' and \eqn{N} is the number of data points. If `x` is a list of #' log-likelihood matrices then [loo()] is called internally on each matrix. #' Currently the `loo_model_weights()` function is not implemented to be used #' with results from K-fold CV, but you can still obtain weights using K-fold #' CV results by calling the `stacking_weights()` or `pseudobma_weights()` #' function directly. #' @param method Either `"stacking"` (the default) or `"pseudobma"`, indicating which method #' to use for obtaining the weights. `"stacking"` refers to stacking of #' predictive distributions and `"pseudobma"` refers to pseudo-BMA+ weighting #' (or plain pseudo-BMA weighting if argument `BB` is `FALSE`). #' @param BB Logical used when `"method"`=`"pseudobma"`. If #' `TRUE` (the default), the Bayesian bootstrap will be used to adjust #' the pseudo-BMA weighting, which is called pseudo-BMA+ weighting. It helps #' regularize the weight away from 0 and 1, so as to reduce the variance. #' @param BB_n For pseudo-BMA+ weighting only, the number of samples to use for #' the Bayesian bootstrap. The default is `BB_n=1000`. #' @param alpha Positive scalar shape parameter in the Dirichlet distribution #' used for the Bayesian bootstrap. The default is `alpha=1`, which #' corresponds to a uniform distribution on the simplex space. #' @param optim_method If `method="stacking"`, a string passed to the `method` #' argument of [stats::constrOptim()] to specify the optimization algorithm. #' The default is `optim_method="BFGS"`, but other options are available (see #' [stats::optim()]). #' @param optim_control If `method="stacking"`, a list of control parameters for #' optimization passed to the `control` argument of [stats::constrOptim()]. #' @param r_eff_list Optionally, a list of relative effective sample size #' estimates for the likelihood `(exp(log_lik))` of each observation in #' each model. See [psis()] and [relative_eff()] helper #' function for computing `r_eff`. If `x` is a list of `"psis_loo"` #' objects then `r_eff_list` is ignored. #' @template cores #' @param ... Unused, except for the generic to pass arguments to individual #' methods. #' #' @return A numeric vector containing one weight for each model. #' #' @details #' `loo_model_weights()` is a wrapper around the `stacking_weights()` and #' `pseudobma_weights()` functions that implements stacking, pseudo-BMA, and #' pseudo-BMA+ weighting for combining multiple predictive distributions. We can #' use approximate or exact leave-one-out cross-validation (LOO-CV) or K-fold CV #' to estimate the expected log predictive density (ELPD). #' #' The stacking method (`method="stacking"`), which is the default for #' `loo_model_weights()`, combines all models by maximizing the leave-one-out #' predictive density of the combination distribution. That is, it finds the #' optimal linear combining weights for maximizing the leave-one-out log score. #' #' The pseudo-BMA method (`method="pseudobma"`) finds the relative weights #' proportional to the ELPD of each model. However, when #' `method="pseudobma"`, the default is to also use the Bayesian bootstrap #' (`BB=TRUE`), which corresponds to the pseudo-BMA+ method. The Bayesian #' bootstrap takes into account the uncertainty of finite data points and #' regularizes the weights away from the extremes of 0 and 1. #' #' In general, we recommend stacking for averaging predictive distributions, #' while pseudo-BMA+ can serve as a computationally easier alternative. #' #' @seealso #' * The __loo__ package [vignettes](https://mc-stan.org/loo/articles/), particularly #' [Bayesian Stacking and Pseudo-BMA weights using the __loo__ package](https://mc-stan.org/loo/articles/loo2-weights.html). #' * [loo()] for details on leave-one-out ELPD estimation. #' * [constrOptim()] for the choice of optimization methods and control-parameters. #' * [relative_eff()] for computing `r_eff`. #' #' @template loo-and-psis-references #' @template stacking-references #' #' @examples #' \dontrun{ #' ### Demonstrating usage after fitting models with RStan #' library(rstan) #' #' # generate fake data from N(0,1). #' N <- 100 #' y <- rnorm(N, 0, 1) #' #' # Suppose we have three models: N(-1, sigma), N(0.5, sigma) and N(0.6,sigma). #' stan_code <- " #' data { #' int N; #' vector[N] y; #' real mu_fixed; #' } #' parameters { #' real sigma; #' } #' model { #' sigma ~ exponential(1); #' y ~ normal(mu_fixed, sigma); #' } #' generated quantities { #' vector[N] log_lik; #' for (n in 1:N) log_lik[n] = normal_lpdf(y[n]| mu_fixed, sigma); #' }" #' #' mod <- stan_model(model_code = stan_code) #' fit1 <- sampling(mod, data=list(N=N, y=y, mu_fixed=-1)) #' fit2 <- sampling(mod, data=list(N=N, y=y, mu_fixed=0.5)) #' fit3 <- sampling(mod, data=list(N=N, y=y, mu_fixed=0.6)) #' model_list <- list(fit1, fit2, fit3) #' log_lik_list <- lapply(model_list, extract_log_lik) #' #' # optional but recommended #' r_eff_list <- lapply(model_list, function(x) { #' ll_array <- extract_log_lik(x, merge_chains = FALSE) #' relative_eff(exp(ll_array)) #' }) #' #' # stacking method: #' wts1 <- loo_model_weights( #' log_lik_list, #' method = "stacking", #' r_eff_list = r_eff_list, #' optim_control = list(reltol=1e-10) #' ) #' print(wts1) #' #' # can also pass a list of psis_loo objects to avoid recomputing loo #' loo_list <- lapply(1:length(log_lik_list), function(j) { #' loo(log_lik_list[[j]], r_eff = r_eff_list[[j]]) #' }) #' #' wts2 <- loo_model_weights( #' loo_list, #' method = "stacking", #' optim_control = list(reltol=1e-10) #' ) #' all.equal(wts1, wts2) #' #' # can provide names to be used in the results #' loo_model_weights(setNames(loo_list, c("A", "B", "C"))) #' #' #' # pseudo-BMA+ method: #' set.seed(1414) #' loo_model_weights(loo_list, method = "pseudobma") #' #' # pseudo-BMA method (set BB = FALSE): #' loo_model_weights(loo_list, method = "pseudobma", BB = FALSE) #' #' # calling stacking_weights or pseudobma_weights directly #' lpd1 <- loo(log_lik_list[[1]], r_eff = r_eff_list[[1]])$pointwise[,1] #' lpd2 <- loo(log_lik_list[[2]], r_eff = r_eff_list[[2]])$pointwise[,1] #' lpd3 <- loo(log_lik_list[[3]], r_eff = r_eff_list[[3]])$pointwise[,1] #' stacking_weights(cbind(lpd1, lpd2, lpd3)) #' pseudobma_weights(cbind(lpd1, lpd2, lpd3)) #' pseudobma_weights(cbind(lpd1, lpd2, lpd3), BB = FALSE) #' } #' loo_model_weights <- function(x, ...) { UseMethod("loo_model_weights") } #' @rdname loo_model_weights #' @export #' @export loo_model_weights.default loo_model_weights.default <- function(x, ..., method = c("stacking", "pseudobma"), optim_method = "BFGS", optim_control = list(), BB = TRUE, BB_n = 1000, alpha = 1, r_eff_list = NULL, cores = getOption("mc.cores", 1)) { cores <- loo_cores(cores) method <- match.arg(method) K <- length(x) # number of models if (is.matrix(x[[1]])) { N <- ncol(x[[1]]) # number of data points validate_log_lik_list(x) validate_r_eff_list(r_eff_list, K, N) lpd_point <- matrix(NA, N, K) elpd_loo <- rep(NA, K) for (k in 1:K) { r_eff_k <- r_eff_list[[k]] # possibly NULL log_likelihood <- x[[k]] loo_object <- loo(log_likelihood, r_eff = r_eff_k, cores = cores) lpd_point[, k] <- loo_object$pointwise[, "elpd_loo"] #calculate log(p_k (y_i | y_-i)) elpd_loo[k] <- loo_object$estimates["elpd_loo", "Estimate"] } } else if (is.psis_loo(x[[1]])) { validate_psis_loo_list(x) lpd_point <- do.call(cbind, lapply(x, function(obj) obj$pointwise[, "elpd_loo"])) elpd_loo <- sapply(x, function(obj) obj$estimates["elpd_loo", "Estimate"]) } else { stop("'x' must be a list of matrices or a list of 'psis_loo' objects.") } ## 1) stacking on log score if (method =="stacking") { wts <- stacking_weights( lpd_point = lpd_point, optim_method = optim_method, optim_control = optim_control ) } else { # method =="pseudobma" wts <- pseudobma_weights( lpd_point = lpd_point, BB = BB, BB_n = BB_n, alpha = alpha ) } if (is.matrix(x[[1]])) { if (!is.null(names(x)) && all(nzchar(names(x)))) { wts <- setNames(wts, names(x)) } } else { # list of loo objects wts <- setNames(wts, find_model_names(x)) } wts } #' @rdname loo_model_weights #' @export #' @param lpd_point If calling `stacking_weights()` or `pseudobma_weights()` #' directly, a matrix of pointwise leave-one-out (or K-fold) log likelihoods #' evaluated for different models. It should be a \eqn{N} by \eqn{K} matrix #' where \eqn{N} is sample size and \eqn{K} is the number of models. Each #' column corresponds to one model. These values can be calculated #' approximately using [loo()] or by running exact leave-one-out or K-fold #' cross-validation. #' #' @importFrom stats constrOptim #' stacking_weights <- function(lpd_point, optim_method = "BFGS", optim_control = list()) { stopifnot(is.matrix(lpd_point)) N <- nrow(lpd_point) K <- ncol(lpd_point) if (K < 2) { stop("At least two models are required for stacking weights.") } negative_log_score_loo <- function(w) { # objective function: log score stopifnot(length(w) == K - 1) w_full <- c(w, 1 - sum(w)) # avoid over- and underflows using log weights and rowLogSumExps sum <- sum(matrixStats::rowLogSumExps(sweep(lpd_point[1:N,], 2, log(w_full), '+'))) return(-as.numeric(sum)) } gradient <- function(w) { # gradient of the objective function stopifnot(length(w) == K - 1) w_full <- c(w, 1 - sum(w)) grad <- rep(0, K - 1) # avoid over- and underflows using log weights, rowLogSumExps, # and by subtracting the row maximum of lpd_point mlpd <- matrixStats::rowMaxs(lpd_point) for (k in 1:(K - 1)) { grad[k] <- sum((exp(lpd_point[, k] - mlpd) - exp(lpd_point[, K] - mlpd)) / exp(matrixStats::rowLogSumExps(sweep(lpd_point, 2, log(w_full), '+')) - mlpd)) } return(-grad) } ui <- rbind(rep(-1, K - 1), diag(K - 1)) # K-1 simplex constraint matrix ci <- c(-1, rep(0, K - 1)) w <- constrOptim( theta = rep(1 / K, K - 1), f = negative_log_score_loo, grad = gradient, ui = ui, ci = ci, method = optim_method, control = optim_control )$par wts <- structure( c(w, 1 - sum(w)), names = paste0("model", 1:K), class = c("stacking_weights") ) return(wts) } #' @rdname loo_model_weights #' @export #' pseudobma_weights <- function(lpd_point, BB = TRUE, BB_n = 1000, alpha = 1) { stopifnot(is.matrix(lpd_point)) N <- nrow(lpd_point) K <- ncol(lpd_point) if (K < 2) { stop("At least two models are required for pseudo-BMA weights.") } if (!BB) { elpd <- colSums2(lpd_point) uwts <- exp(elpd - max(elpd)) wts <- structure( uwts / sum(uwts), names = paste0("model", 1:K), class = "pseudobma_weights" ) return(wts) } temp <- matrix(NA, BB_n, K) BB_weighting <- dirichlet_rng(BB_n, rep(alpha, N)) for (bb in 1:BB_n) { z_bb <- BB_weighting[bb, ] %*% lpd_point * N uwts <- exp(z_bb - max(z_bb)) temp[bb, ] <- uwts / sum(uwts) } wts <- structure( colMeans(temp), names = paste0("model", 1:K), class = "pseudobma_bb_weights" ) return(wts) } #' Generate dirichlet simulations, rewritten version #' @importFrom stats rgamma #' @noRd dirichlet_rng <- function(n, alpha) { K <- length(alpha) gamma_sim <- matrix(rgamma(K * n, alpha), ncol = K, byrow = TRUE) gamma_sim / rowSums(gamma_sim) } #' @export print.stacking_weights <- function(x, digits = 3, ...) { cat("Method: stacking\n------\n") print_weight_vector(x, digits = digits) } #' @export print.pseudobma_weights <- function(x, digits = 3, ...) { cat("Method: pseudo-BMA\n------\n") print_weight_vector(x, digits = digits) } #' @export print.pseudobma_bb_weights <- function(x, digits = 3, ...) { cat("Method: pseudo-BMA+ with Bayesian bootstrap\n------\n") print_weight_vector(x, digits = digits) } print_weight_vector <- function(x, digits) { z <- cbind(x) colnames(z) <- "weight" print(.fr(z, digits = digits), quote = FALSE) invisible(x) } #' Validate r_eff_list argument if provided #' #' @noRd #' @param r_eff_list User's `r_eff_list` argument #' @param K Required length of `r_eff_list` (number of models). #' @param N Required length of each element of `r_eff_list` (number of data points). #' @return Either throws an error or returns `TRUE` invisibly. #' validate_r_eff_list <- function(r_eff_list, K, N) { if (is.null(r_eff_list)) return(invisible(TRUE)) if (length(r_eff_list) != K) { stop("If r_eff_list is specified then it must contain ", "one component for each model being compared.", call. = FALSE) } if (any(sapply(r_eff_list, length) != N)) { stop("Each component of r_eff list must have the same length ", "as the number of columns in the log-likelihood matrix.", call. = FALSE) } invisible(TRUE) } #' Validate log-likelihood list argument #' #' Checks that log-likelihood list has at least 2 elements and that each element #' has the same dimensions. #' #' @noRd #' @param log_lik_list User's list of log-likelihood matrices (the `x` argument #' to loo_model_weights). #' @return Either throws an error or returns `TRUE` invisibly. #' validate_log_lik_list <- function(log_lik_list) { stopifnot(is.list(log_lik_list)) if (length(log_lik_list) < 2) { stop("At least two models are required.", call. = FALSE) } if (length(unique(sapply(log_lik_list, ncol))) != 1 | length(unique(sapply(log_lik_list, nrow))) != 1) { stop("Each log-likelihood matrix must have the same dimensions.", call. = FALSE) } invisible(TRUE) } validate_psis_loo_list <- function(psis_loo_list) { stopifnot(is.list(psis_loo_list)) if (length(psis_loo_list) < 2) { stop("At least two models are required.", call. = FALSE) } if (!all(sapply(psis_loo_list, is.psis_loo))) { stop("List elements must all be 'psis_loo' objects or log-likelihood matrices.") } dims <- sapply(psis_loo_list, dim) if (length(unique(dims[1, ])) != 1 | length(unique(dims[2, ])) != 1) { stop("Each object in the list must have the same dimensions.", call. = FALSE) } invisible(TRUE) } loo/vignettes/0000755000176200001440000000000015122306010013041 5ustar liggesusersloo/vignettes/loo2-mixis.Rmd0000644000176200001440000002224014641333357015532 0ustar liggesusers--- title: "Mixture IS leave-one-out cross-validation for high-dimensional Bayesian models" author: "Luca Silva and Giacomo Zanella" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette shows how to perform Bayesian leave-one-out cross-validation (LOO-CV) using the mixture estimators proposed in the paper [Silva and Zanella (2022)](https://arxiv.org/abs/2209.09190). These estimators have shown to be useful in presence of outliers but also, and especially, in high-dimensional settings where the model features many parameters. In these contexts it can happen that a large portion of observations lead to high values of Pareto-$k$ diagnostics and potential instability of PSIS-LOO estimators. For this illustration we consider a high-dimensional Bayesian Logistic regression model applied to the _Voice_ dataset. ## Setup: load packages and set seed ```{r, warnings=FALSE, message=FALSE} library("rstan") library("loo") library("matrixStats") options(mc.cores = parallel::detectCores(), parallel=FALSE) set.seed(24877) ``` ## Model This is the Stan code for a logistic regression model with regularized horseshoe prior. The code includes an if statement to include a code line needed later for the MixIS approach. ```{r stancode_horseshoe} # Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: # int y[N]; stancode_horseshoe <- " data { int N; int P; array[N] int y; matrix [N,P] X; real scale_global; int mixis; } transformed data { real nu_global=1; // degrees of freedom for the half-t priors for tau real nu_local=1; // degrees of freedom for the half-t priors for lambdas // (nu_local = 1 corresponds to the horseshoe) real slab_scale=2;// for the regularized horseshoe real slab_df=100; // for the regularized horseshoe } parameters { vector[P] z; // for non-centered parameterization real tau; // global shrinkage parameter vector [P] lambda; // local shrinkage parameter real caux; } transformed parameters { vector[P] beta; { vector[P] lambda_tilde; // 'truncated' local shrinkage parameter real c = slab_scale * sqrt(caux); // slab scale lambda_tilde = sqrt( c^2 * square(lambda) ./ (c^2 + tau^2*square(lambda))); beta = z .* lambda_tilde*tau; } } model { vector[N] means=X*beta; vector[N] log_lik; target += std_normal_lpdf(z); target += student_t_lpdf(lambda | nu_local, 0, 1); target += student_t_lpdf(tau | nu_global, 0, scale_global); target += inv_gamma_lpdf(caux | 0.5*slab_df, 0.5*slab_df); for (n in 1:N) { log_lik[n]= bernoulli_logit_lpmf(y[n] | means[n]); } target += sum(log_lik); if (mixis) { target += log_sum_exp(-log_lik); } } generated quantities { vector[N] means=X*beta; vector[N] log_lik; for (n in 1:N) { log_lik[n] = bernoulli_logit_lpmf(y[n] | means[n]); } } " ``` ## Dataset The _LSVT Voice Rehabilitation Data Set_ (see [link](https://archive.ics.uci.edu/ml/datasets/LSVT+Voice+Rehabilitation) for details) has $p=312$ covariates and $n=126$ observations with binary response. We construct data list for Stan. ```{r, results='hide', warning=FALSE, message=FALSE, error=FALSE} data(voice) y <- voice$y X <- voice[2:length(voice)] n <- dim(X)[1] p <- dim(X)[2] p0 <- 10 scale_global <- 2*p0/(p-p0)/sqrt(n-1) standata <- list(N = n, P = p, X = as.matrix(X), y = c(y), scale_global = scale_global, mixis = 0) ``` Note that in our prior specification we divide the prior variance by the number of covariates $p$. This is often done in high-dimensional contexts to have a prior variance for the linear predictors $X\beta$ that remains bounded as $p$ increases. ## PSIS estimators and Pareto-$k$ diagnostics LOO-CV computations are challenging in this context due to high-dimensionality of the parameter space. To show that, we compute PSIS-LOO estimators, which require sampling from the posterior distribution, and inspect the associated Pareto-$k$ diagnostics. ```{r, results='hide', warning=FALSE} chains <- 4 n_iter <- 2000 warm_iter <- 1000 stanmodel <- stan_model(model_code = stancode_horseshoe) fit_post <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0) loo_post <-loo(fit_post) ``` ```{r} print(loo_post) ``` As we can see the diagnostics signal either "bad" or "very bad" Pareto-$k$ values for roughly $15-30\%$ of the observations which is a significant portion of the dataset. ## Mixture estimators We now compute the mixture estimators proposed in Silva and Zanella (2022). These require to sample from the following mixture of leave-one-out posteriors \begin{equation} q_{mix}(\theta) = \frac{\sum_{i=1}^n p(y_{-i}|\theta)p(\theta)}{\sum_{i=1}^np(y_{-i})}\propto p(\theta|y)\cdot \left(\sum_{i=1}^np(y_i|\theta)^{-1}\right). \end{equation} The code to generate a Stan model for the above mixture distribution is the same to the one for the posterior, just enabling one line of code with a _LogSumExp_ contribution to account for the last term in the equation above. ``` if (mixis) { target += log_sum_exp(-log_lik); } ``` We sample from the mixture and collect the log-likelihoods term. ```{r, results='hide', warnings=FALSE} standata$mixis <- 1 fit_mix <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0, pars = "log_lik") log_lik_mix <- extract(fit_mix)$log_lik ``` We now compute the mixture estimators, following the numerically stable implementation in Appendix A.2 of [Silva and Zanella (2022)](https://arxiv.org/abs/2209.09190). The code below makes use of the package "matrixStats". ```{r} l_common_mix <- rowLogSumExps(-log_lik_mix) log_weights <- -log_lik_mix - l_common_mix elpd_mixis <- logSumExp(-l_common_mix) - rowLogSumExps(t(log_weights)) ``` ## Comparison with benchmark values obtained with long simulations To evaluate the performance of mixture estimators (MixIS) we also generate _benchmark values_, i.e.\ accurate approximations of the LOO predictives $\{p(y_i|y_{-i})\}_{i=1,\dots,n}$, obtained by brute-force sampling from the leave-one-out posteriors directly, getting $90k$ samples from each and discarding the first $10k$ as warmup. This is computationally heavy, hence we have saved the results and we just load them in the current vignette. ```{r} data(voice_loo) elpd_loo <- voice_loo$elpd_loo ``` We can then compute the root mean squared error (RMSE) of the PSIS and mixture estimators relative to such benchmark values. ```{r} elpd_psis <- loo_post$pointwise[,1] print(paste("RMSE(PSIS) =",round( sqrt(mean((elpd_loo-elpd_psis)^2)) ,2))) print(paste("RMSE(MixIS) =",round( sqrt(mean((elpd_loo-elpd_mixis)^2)) ,2))) ``` Here mixture estimator provides a reduction in RMSE. Note that this value would increase with the number of samples drawn from the posterior and mixture, since in this example the RMSE of MixIS will exhibit a CLT-type decay while the one of PSIS will converge at a slower rate (this can be verified by running the above code with a larger sample size; see also Figure 3 of Silva and Zanella (2022) for analogous results). We then compare the overall ELPD estimates with the brute force one. ```{r} elpd_psis <- loo_post$pointwise[,1] print(paste("ELPD (PSIS)=",round(sum(elpd_psis),2))) print(paste("ELPD (MixIS)=",round(sum(elpd_mixis),2))) print(paste("ELPD (brute force)=",round(sum(elpd_loo),2))) ``` In this example, MixIS provides a more accurate ELPD estimate closer to the brute force estimate, while PSIS severely overestimates the ELPD. Note that low accuracy of the PSIS ELPD estimate is expected in this example given the large number of large Pareto-$k$ values. In this example, the accuracy of MixIS estimate will also improve with bigger MCMC sample size. More generally, mixture estimators can be useful in situations where standard PSIS estimators struggle and return many large Pareto-$k$ values. In these contexts MixIS often provides more accurate LOO-CV and ELPD estimates with a single sampling routine (i.e. with a cost comparable to sampling from the original posterior). ## References Silva L. and Zanella G. (2022). Robust leave-one-out cross-validation for high-dimensional Bayesian models. Preprint at [arXiv:2209.09190](https://arxiv.org/abs/2209.09190) Vehtari A., Gelman A., and Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. *Statistics and Computing*, 27(5), 1413--1432. Preprint at [arXiv:1507.04544](https://arxiv.org/abs/1507.04544) Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/vignettes/loo2-non-factorized.Rmd0000644000176200001440000006624714641333357017342 0ustar liggesusers--- title: "Leave-one-out cross-validation for non-factorized models" author: "Aki Vehtari, Paul Bürkner and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes encoding: "UTF-8" params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r more-knitr-ops, include=FALSE} knitr::opts_chunk$set( cache=TRUE, message=FALSE, warning=FALSE ) ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction When computing ELPD-based LOO-CV for a Bayesian model we need to compute the log leave-one-out predictive densities $\log{p(y_i | y_{-i})}$ for every response value $y_i, \: i = 1, \ldots, N$, where $y_{-i}$ denotes all response values except observation $i$. To obtain $p(y_i | y_{-i})$, we need to have access to the pointwise likelihood $p(y_i\,|\, y_{-i}, \theta)$ and integrate over the model parameters $\theta$: $$ p(y_i\,|\,y_{-i}) = \int p(y_i\,|\, y_{-i}, \theta) \, p(\theta\,|\, y_{-i}) \,d \theta $$ Here, $p(\theta\,|\, y_{-i})$ is the leave-one-out posterior distribution for $\theta$, that is, the posterior distribution for $\theta$ obtained by fitting the model while holding out the $i$th observation (we will later show how refitting the model to data $y_{-i}$ can be avoided). If the observation model is formulated directly as the product of the pointwise observation models, we call it a *factorized* model. In this case, the likelihood is also the product of the pointwise likelihood contributions $p(y_i\,|\, y_{-i}, \theta)$. To better illustrate possible structures of the observation models, we formally divide $\theta$ into two parts, observation-specific latent variables $f = (f_1, \ldots, f_N)$ and hyperparameters $\psi$, so that $p(y_i\,|\, y_{-i}, \theta) = p(y_i\,|\, y_{-i}, f_i, \psi)$. Depending on the model, one of the two parts of $\theta$ may also be empty. In very simple models, such as linear regression models, latent variables are not explicitly presented and response values are conditionally independent given $\psi$, so that $p(y_i\,|\, y_{-i}, f_i, \psi) = p(y_i \,|\, \psi)$. The full likelihood can then be written in the familiar form $$ p(y \,|\, \psi) = \prod_{i=1}^N p(y_i \,|\, \psi), $$ where $y = (y_1, \ldots, y_N)$ denotes the vector of all responses. When the likelihood factorizes this way, the conditional pointwise log-likelihood can be obtained easily by computing $p(y_i\,|\, \psi)$ for each $i$ with computational cost $O(n)$. Yet, there are several reasons why a *non-factorized* observation model may be necessary or preferred. In non-factorized models, the joint likelihood of the response values $p(y \,|\, \theta)$ is not factorized into observation-specific components, but rather given directly as one joint expression. For some models, an analytic factorized formulation is simply not available in which case we speak of a *non-factorizable* model. Even in models whose observation model can be factorized in principle, it may still be preferable to use a non-factorized form for reasons of efficiency and numerical stability (Bürkner et al. 2020). Whether a non-factorized model is used by necessity or for efficiency and stability, it comes at the cost of having no direct access to the leave-one-out predictive densities and thus to the overall leave-one-out predictive accuracy. In theory, we can express the observation-specific likelihoods in terms of the joint likelihood via $$ p(y_i \,|\, y_{i-1}, \theta) = \frac{p(y \,|\, \theta)}{p(y_{-i} \,|\, \theta)} = \frac{p(y \,|\, \theta)}{\int p(y \,|\, \theta) \, d y_i}, $$ but the expression on the right-hand side may not always have an analytical solution. Computing $\log p(y_i \,|\, y_{-i}, \theta)$ for non-factorized models is therefore often impossible, or at least inefficient and numerically unstable. However, there is a large class of multivariate normal and Student-$t$ models for which there are efficient analytical solutions available. More details can be found in our paper about LOO-CV for non-factorized models (Bürkner, Gabry, & Vehtari, 2020), which is available as a preprint on arXiv (https://arxiv.org/abs/1810.10559). # LOO-CV for multivariate normal models In this vignette, we will focus on non-factorized multivariate normal models. Based on results of Sundararajan and Keerthi (2001), Bürkner et al. (2020) show that, for multivariate normal models with coriance matrix $C$, the LOO predictive mean and standard deviation can be computed as follows: \begin{align} \mu_{\tilde{y},-i} &= y_i-\bar{c}_{ii}^{-1} g_i \nonumber \\ \sigma_{\tilde{y},-i} &= \sqrt{\bar{c}_{ii}^{-1}}, \end{align} where $g_i$ and $\bar{c}_{ii}$ are \begin{align} g_i &= \left[C^{-1} y\right]_i \nonumber \\ \bar{c}_{ii} &= \left[C^{-1}\right]_{ii}. \end{align} Using these results, the log predictive density of the $i$th observation is then computed as $$ \log p(y_i \,|\, y_{-i},\theta) = - \frac{1}{2}\log(2\pi) - \frac{1}{2}\log \sigma^2_{-i} - \frac{1}{2}\frac{(y_i-\mu_{-i})^2}{\sigma^2_{-i}}. $$ Expressing this same equation in terms of $g_i$ and $\bar{c}_{ii}$, the log predictive density becomes: $$ \log p(y_i \,|\, y_{-i},\theta) = - \frac{1}{2}\log(2\pi) + \frac{1}{2}\log \bar{c}_{ii} - \frac{1}{2}\frac{g_i^2}{\bar{c}_{ii}}. $$ (Note that Vehtari et al. (2016) has a typo in the corresponding Equation 34.) From these equations we can now derive a recipe for obtaining the conditional pointwise log-likelihood for _all_ models that can be expressed conditionally in terms of a multivariate normal with invertible covariance matrix $C$. ## Approximate LOO-CV using integrated importance-sampling The above LOO equations for multivariate normal models are conditional on parameters $\theta$. Therefore, to obtain the leave-one-out predictive density $p(y_i \,|\, y_{-i})$ we need to integrate over $\theta$, $$ p(y_i\,|\,y_{-i}) = \int p(y_i\,|\,y_{-i}, \theta) \, p(\theta\,|\,y_{-i}) \,d\theta. $$ Here, $p(\theta\,|\,y_{-i})$ is the leave-one-out posterior distribution for $\theta$, that is, the posterior distribution for $\theta$ obtained by fitting the model while holding out the $i$th observation. To avoid the cost of sampling from $N$ leave-one-out posteriors, it is possible to take the posterior draws $\theta^{(s)}, \, s=1,\ldots,S$, from the \emph{full} posterior $p(\theta\,|\,y)$, and then approximate the above integral using integrated importance sampling (Vehtari et al., 2016, Section 3.6.1): $$ p(y_i\,|\,y_{-i}) \approx \frac{ \sum_{s=1}^S p(y_i\,|\,y_{-i},\,\theta^{(s)}) \,w_i^{(s)}}{ \sum_{s=1}^S w_i^{(s)}}, $$ where $w_i^{(s)}$ are importance weights. First we compute the raw importance ratios $$ r_i^{(s)} \propto \frac{1}{p(y_i \,|\, y_{-i}, \,\theta^{(s)})}, $$ and then stabilize them using Pareto smoothed importance sampling (PSIS, Vehtari et al, 2019) to obtain the weights $w_i^{(s)}$. The resulting approximation is referred to as PSIS-LOO (Vehtari et al, 2017). ## Exact LOO-CV with re-fitting In order to validate the approximate LOO procedure, and also in order to allow exact computations to be made for a small number of leave-one-out folds for which the Pareto $k$ diagnostic (Vehtari et al, 2024) indicates an unstable approximation, we need to consider how we might to do _exact_ leave-one-out CV for a non-factorized model. In the case of a Gaussian process that has the marginalization property, we could just drop the one row and column of $C$ corresponding to the held out out observation. This does not hold in general for multivariate normal models, however, and to keep the original prior we may need to maintain the full covariance matrix $C$ even when one of the observations is left out. The solution is to model $y_i$ as a missing observation and estimate it along with all of the other model parameters. For a conditional multivariate normal model, $\log p(y_i\,|\,y_{-i})$ can be computed as follows. First, we model $y_i$ as missing and denote the corresponding parameter $y_i^{\mathrm{mis}}$. Then, we define $$ y_{\mathrm{mis}(i)} = (y_1, \ldots, y_{i-1}, y_i^{\mathrm{mis}}, y_{i+1}, \ldots, y_N). $$ to be the same as the full set of observations $y$, except replacing $y_i$ with the parameter $y_i^{\mathrm{mis}}$. Second, we compute the LOO predictive mean and standard deviations as above, but replace $y$ with $y_{\mathrm{mis}(i)}$ in the computation of $\mu_{\tilde{y},-i}$: $$ \mu_{\tilde{y},-i} = y_{{\mathrm{mis}}(i)}-\bar{c}_{ii}^{-1}g_i, $$ where in this case we have $$ g_i = \left[ C^{-1} y_{\mathrm{mis}(i)} \right]_i. $$ The conditional log predictive density is then computed with the above $\mu_{\tilde{y},-i}$ and the left out observation $y_i$: $$ \log p(y_i\,|\,y_{-i},\theta) = - \frac{1}{2}\log(2\pi) - \frac{1}{2}\log \sigma^2_{\tilde{y},-i} - \frac{1}{2}\frac{(y_i-\mu_{\tilde{y},-i})^2}{\sigma^2_{\tilde{y},-i}}. $$ Finally, the leave-one-out predictive distribution can then be estimated as $$ p(y_i\,|\,y_{-i}) \approx \sum_{s=1}^S p(y_i\,|\,y_{-i}, \theta_{-i}^{(s)}), $$ where $\theta_{-i}^{(s)}$ are draws from the posterior distribution $p(\theta\,|\,y_{\mathrm{mis}(i)})$. # Lagged SAR models A common non-factorized multivariate normal model is the simultaneously autoregressive (SAR) model, which is frequently used for spatially correlated data. The lagged SAR model is defined as $$ y = \rho Wy + \eta + \epsilon $$ or equivalently $$ (I - \rho W)y = \eta + \epsilon, $$ where $\rho$ is the spatial correlation parameter and $W$ is a user-defined weight matrix. The matrix $W$ has entries $w_{ii} = 0$ along the diagonal and the off-diagonal entries $w_{ij}$ are larger when areas $i$ and $j$ are closer to each other. In a linear model, the predictor term $\eta$ is given by $\eta = X \beta$ with design matrix $X$ and regression coefficients $\beta$. However, since the above equation holds for arbitrary $\eta$, these results are not restricted to linear models. If we have $\epsilon \sim {\mathrm N}(0, \,\sigma^2 I)$, it follows that $$ (I - \rho W)y \sim {\mathrm N}(\eta, \sigma^2 I), $$ which corresponds to the following log PDF coded in **Stan**: ```{r lpdf, eval=FALSE} /** * Normal log-pdf for spatially lagged responses * * @param y Vector of response values. * @param mu Mean parameter vector. * @param sigma Positive scalar residual standard deviation. * @param rho Positive scalar autoregressive parameter. * @param W Spatial weight matrix. * * @return A scalar to be added to the log posterior. */ real normal_lagsar_lpdf(vector y, vector mu, real sigma, real rho, matrix W) { int N = rows(y); real inv_sigma2 = 1 / square(sigma); matrix[N, N] W_tilde = -rho * W; vector[N] half_pred; for (n in 1:N) W_tilde[n,n] += 1; half_pred = W_tilde * (y - mdivide_left(W_tilde, mu)); return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) - 0.5 * dot_self(half_pred) * inv_sigma2; } ``` For the purpose of computing LOO-CV, it makes sense to rewrite the SAR model in slightly different form. Conditional on $\rho$, $\eta$, and $\sigma$, if we write \begin{align} y-(I-\rho W)^{-1}\eta &\sim {\mathrm N}(0, \sigma^2(I-\rho W)^{-1}(I-\rho W)^{-T}), \end{align} or more compactly, with $\widetilde{W}=(I-\rho W)$, \begin{align} y-\widetilde{W}^{-1}\eta &\sim {\mathrm N}(0, \sigma^2(\widetilde{W}^{T}\widetilde{W})^{-1}), \end{align} then this has the same form as the zero mean Gaussian process from above. Accordingly, we can compute the leave-one-out predictive densities with the equations from Sundararajan and Keerthi (2001), replacing $y$ with $(y-\widetilde{W}^{-1}\eta)$ and taking the covariance matrix $C$ to be $\sigma^2(\widetilde{W}^{T}\widetilde{W})^{-1}$. ## Case Study: Neighborhood Crime in Columbus, Ohio In order to demonstrate how to carry out the computations implied by these equations, we will first fit a lagged SAR model to data on crime in 49 different neighborhoods of Columbus, Ohio during the year 1980. The data was originally described in Aneslin (1988) and ships with the **spdep** R package. In addition to the **loo** package, for this analysis we will use the **brms** interface to Stan to generate a Stan program and fit the model, and also the **bayesplot** and **ggplot2** packages for plotting. ```{r setup, cache=FALSE} library("loo") library("brms") library("bayesplot") library("ggplot2") color_scheme_set("brightblue") theme_set(theme_default()) SEED <- 10001 set.seed(SEED) # only sets seed for R (seed for Stan set later) # loads COL.OLD data frame and COL.nb neighbor list data(oldcol, package = "spdep") ``` The three variables in the data set relevant to this example are: * `CRIME`: the number of residential burglaries and vehicle thefts per thousand households in the neighbood * `HOVAL`: housing value in units of $1000 USD * `INC`: household income in units of $1000 USD ```{r data} str(COL.OLD[, c("CRIME", "HOVAL", "INC")]) ``` We will also use the object `COL.nb`, which is a list containing information about which neighborhoods border each other. From this list we will be able to construct the weight matrix to used to help account for the spatial dependency among the observations. ### Fit lagged SAR model A model predicting `CRIME` from `INC` and `HOVAL`, while accounting for the spatial dependency via an SAR structure, can be specified in **brms** as follows. ```{r fit, results="hide"} fit <- brm( CRIME ~ INC + HOVAL + sar(COL.nb, type = "lag"), data = COL.OLD, data2 = list(COL.nb = COL.nb), chains = 4, seed = SEED ) ``` The code above fits the model in **Stan** using a log PDF equivalent to the `normal_lagsar_lpdf` function we defined above. In the summary output below we see that both higher income and higher housing value predict lower crime rates in the neighborhood. Moreover, there seems to be substantial spatial correlation between adjacent neighborhoods, as indicated by the posterior distribution of the `lagsar` parameter. ```{r plot-lagsar, message=FALSE} lagsar <- as.matrix(fit, pars = "lagsar") estimates <- quantile(lagsar, probs = c(0.25, 0.5, 0.75)) mcmc_hist(lagsar) + vline_at(estimates, linetype = 2, size = 1) + ggtitle("lagsar: posterior median and 50% central interval") ``` ### Approximate LOO-CV After fitting the model, the next step is to compute the pointwise log-likelihood values needed for approximate LOO-CV. To do this we will use the recipe laid out in the previous sections. ```{r approx} posterior <- as.data.frame(fit) y <- fit$data$CRIME N <- length(y) S <- nrow(posterior) loglik <- yloo <- sdloo <- matrix(nrow = S, ncol = N) for (s in 1:S) { p <- posterior[s, ] eta <- p$b_Intercept + p$b_INC * fit$data$INC + p$b_HOVAL * fit$data$HOVAL W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb) Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2 g <- Cinv %*% (y - solve(W_tilde, eta)) cbar <- diag(Cinv) yloo[s, ] <- y - g / cbar sdloo[s, ] <- sqrt(1 / cbar) loglik[s, ] <- dnorm(y, yloo[s, ], sdloo[s, ], log = TRUE) } # use loo for psis smoothing log_ratios <- -loglik psis_result <- psis(log_ratios) ``` The quality of the PSIS-LOO approximation can be investigated graphically by plotting the Pareto-k estimate for each observation. The approximation is robust up to values of $0.7$ (Vehtari et al, 2017, 2024). In the plot below, we see that the fourth observation is problematic and so may reduce the accuracy of the LOO-CV approximation. ```{r plot, cache = FALSE} plot(psis_result, label_points = TRUE) ``` We can also check that the conditional leave-one-out predictive distribution equations work correctly, for instance, using the last posterior draw: ```{r checklast, cache = FALSE} yloo_sub <- yloo[S, ] sdloo_sub <- sdloo[S, ] df <- data.frame( y = y, yloo = yloo_sub, ymin = yloo_sub - sdloo_sub * 2, ymax = yloo_sub + sdloo_sub * 2 ) ggplot(data=df, aes(x = y, y = yloo, ymin = ymin, ymax = ymax)) + geom_errorbar( width = 1, color = "skyblue3", position = position_jitter(width = 0.25) ) + geom_abline(color = "gray30", size = 1.2) + geom_point() ``` Finally, we use PSIS-LOO to approximate the expected log predictive density (ELPD) for new data, which we will validate using exact LOO-CV in the upcoming section. ```{r psisloo} (psis_loo <- loo(loglik)) ``` ### Exact LOO-CV Exact LOO-CV for the above example is somewhat more involved, as we need to re-fit the model $N$ times and each time model the held-out data point as a parameter. First, we create an empty dummy model that we will update below as we loop over the observations. ```{r fit_dummy, cache = TRUE} # see help("mi", "brms") for details on the mi() usage fit_dummy <- brm( CRIME | mi() ~ INC + HOVAL + sar(COL.nb, type = "lag"), data = COL.OLD, data2 = list(COL.nb = COL.nb), chains = 0 ) ``` Next, we fit the model $N$ times, each time leaving out a single observation and then computing the log predictive density for that observation. For obvious reasons, this takes much longer than the approximation we computed above, but it is necessary in order to validate the approximate LOO-CV method. Thanks to the PSIS-LOO approximation, in general doing these slow exact computations can be avoided. ```{r exact-loo-cv, results="hide", message=FALSE, warning=FALSE, cache = TRUE} S <- 500 res <- vector("list", N) loglik <- matrix(nrow = S, ncol = N) for (i in seq_len(N)) { dat_mi <- COL.OLD dat_mi$CRIME[i] <- NA fit_i <- update(fit_dummy, newdata = dat_mi, # just for vignette chains = 1, iter = S * 2) posterior <- as.data.frame(fit_i) yloo <- sdloo <- rep(NA, S) for (s in seq_len(S)) { p <- posterior[s, ] y_miss_i <- y y_miss_i[i] <- p$Ymi eta <- p$b_Intercept + p$b_INC * fit_i$data$INC + p$b_HOVAL * fit_i$data$HOVAL W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb) Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2 g <- Cinv %*% (y_miss_i - solve(W_tilde, eta)) cbar <- diag(Cinv); yloo[s] <- y_miss_i[i] - g[i] / cbar[i] sdloo[s] <- sqrt(1 / cbar[i]) loglik[s, i] <- dnorm(y[i], yloo[s], sdloo[s], log = TRUE) } ypred <- rnorm(S, yloo, sdloo) res[[i]] <- data.frame(y = c(posterior$Ymi, ypred)) res[[i]]$type <- rep(c("pp", "loo"), each = S) res[[i]]$obs <- i } res <- do.call(rbind, res) ``` A first step in the validation of the pointwise predictive density is to compare the distribution of the implied response values for the left-out observation to the distribution of the $y_i^{\mathrm{mis}}$ posterior-predictive values estimated as part of the model. If the pointwise predictive density is correct, the two distributions should match very closely (up to sampling error). In the plot below, we overlay these two distributions for the first four observations and see that they match very closely (as is the case for all $49$ observations of in this example). ```{r yplots, cache = FALSE, fig.width=10, out.width="95%", fig.asp = 0.3} res_sub <- res[res$obs %in% 1:4, ] ggplot(res_sub, aes(y, fill = type)) + geom_density(alpha = 0.6) + facet_wrap("obs", scales = "fixed", ncol = 4) ``` In the final step, we compute the ELPD based on the exact LOO-CV and compare it to the approximate PSIS-LOO result computed earlier. ```{r loo_exact, cache=FALSE} log_mean_exp <- function(x) { # more stable than log(mean(exp(x))) max_x <- max(x) max_x + log(sum(exp(x - max_x))) - log(length(x)) } exact_elpds <- apply(loglik, 2, log_mean_exp) exact_elpd <- sum(exact_elpds) round(exact_elpd, 1) ``` The results of the approximate and exact LOO-CV are similar but not as close as we would expect if there were no problematic observations. We can investigate this issue more closely by plotting the approximate against the exact pointwise ELPD values. ```{r compare, fig.height=5} df <- data.frame( approx_elpd = psis_loo$pointwise[, "elpd_loo"], exact_elpd = exact_elpds ) ggplot(df, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + geom_point(data = df[4, ], size = 3, color = "red3") + xlab("Approximate elpds") + ylab("Exact elpds") + coord_fixed(xlim = c(-16, -3), ylim = c(-16, -3)) ``` In the plot above the fourth data point ---the observation flagged as problematic by the PSIS-LOO approximation--- is colored in red and is the clear outlier. Otherwise, the correspondence between the exact and approximate values is strong. In fact, summing over the pointwise ELPD values and leaving out the fourth observation yields practically equivalent results for approximate and exact LOO-CV: ```{r pt4} without_pt_4 <- c( approx = sum(psis_loo$pointwise[-4, "elpd_loo"]), exact = sum(exact_elpds[-4]) ) round(without_pt_4, 1) ``` From this we can conclude that the difference we found when including *all* observations does not indicate a bug in our implementation of the approximate LOO-CV but rather a violation of its assumptions. # Working with Stan directly So far, we have specified the models in brms and only used Stan implicitely behind the scenes. This allowed us to focus on the primary purpose of validating approximate LOO-CV for non-factorized models. However, we would also like to show how everything can be set up in Stan directly. The Stan code brms generates is human readable and so we can use it to learn some of the essential aspects of Stan and the particular model we are implementing. The Stan program below is a slightly modified version of the code extracted via `stancode(fit_dummy)`: ```{r brms-stan-code, eval=FALSE} // generated with brms 2.2.0 functions { /** * Normal log-pdf for spatially lagged responses * * @param y Vector of response values. * @param mu Mean parameter vector. * @param sigma Positive scalar residual standard deviation. * @param rho Positive scalar autoregressive parameter. * @param W Spatial weight matrix. * * @return A scalar to be added to the log posterior. */ real normal_lagsar_lpdf(vector y, vector mu, real sigma, real rho, matrix W) { int N = rows(y); real inv_sigma2 = 1 / square(sigma); matrix[N, N] W_tilde = -rho * W; vector[N] half_pred; for (n in 1:N) W_tilde[n, n] += 1; half_pred = W_tilde * (y - mdivide_left(W_tilde, mu)); return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) - 0.5 * dot_self(half_pred) * inv_sigma2; } } data { int N; // total number of observations vector[N] Y; // response variable int Nmi; // number of missings int Jmi[Nmi]; // positions of missings int K; // number of population-level effects matrix[N, K] X; // population-level design matrix matrix[N, N] W; // spatial weight matrix int prior_only; // should the likelihood be ignored? } transformed data { int Kc = K - 1; matrix[N, K - 1] Xc; // centered version of X vector[K - 1] means_X; // column means of X before centering for (i in 2:K) { means_X[i - 1] = mean(X[, i]); Xc[, i - 1] = X[, i] - means_X[i - 1]; } } parameters { vector[Nmi] Ymi; // estimated missings vector[Kc] b; // population-level effects real temp_Intercept; // temporary intercept real sigma; // residual SD real lagsar; // SAR parameter } transformed parameters { } model { vector[N] Yl = Y; vector[N] mu = Xc * b + temp_Intercept; Yl[Jmi] = Ymi; // priors including all constants target += student_t_lpdf(temp_Intercept | 3, 34, 17); target += student_t_lpdf(sigma | 3, 0, 17) - 1 * student_t_lccdf(0 | 3, 0, 17); // likelihood including all constants if (!prior_only) { target += normal_lagsar_lpdf(Yl | mu, sigma, lagsar, W); } } generated quantities { // actual population-level intercept real b_Intercept = temp_Intercept - dot_product(means_X, b); } ``` Here we want to focus on two aspects of the Stan code. First, because there is no built-in function in Stan that calculates the log-likelihood for the lag-SAR model, we define a new `normal_lagsar_lpdf` function in the `functions` block of the Stan program. This is the same function we showed earlier in the vignette and it can be used to compute the log-likelihood in an efficient and numerically stable way. The `_lpdf` suffix used in the function name informs Stan that this is a log probability density function. Second, this Stan program nicely illustrates how to set up missing value imputation. Instead of just computing the log-likelihood for the observed responses `Y`, we define a new variable `Yl` which is equal to `Y` if the reponse is observed and equal to `Ymi` if the response is missing. The latter is in turn defined as a parameter and thus estimated along with all other paramters of the model. More details about missing value imputation in Stan can be found in the *Missing Data & Partially Known Parameters* section of the [Stan manual](https://mc-stan.org/users/documentation/index.html). The Stan code extracted from brms is not only helpful when learning Stan, but can also drastically speed up the specification of models that are not support by brms. If brms can fit a model similar but not identical to the desired model, we can let brms generate the Stan program for the similar model and then mold it into the program that implements the model we actually want to fit. Rather than calling `stancode()`, which requires an existing fitted model object, we recommend using `make_stancode()` and specifying the `save_model` argument to write the Stan program to a file. The corresponding data can be prepared with `make_standata()` and then manually amended if needed. Once the code and data have been edited, they can be passed to RStan's `stan()` function via the `file` and `data` arguments. # Conclusion In summary, we have shown how to set up and validate approximate and exact LOO-CV for non-factorized multivariate normal models using Stan with the **brms** and **loo** packages. Although we focused on the particular example of a spatial SAR model, the presented recipe applies more generally to models that can be expressed in terms of a multivariate normal likelihood.
# References Anselin L. (1988). *Spatial econometrics: methods and models*. Dordrecht: Kluwer Academic. Bürkner P. C., Gabry J., & Vehtari A. (2020). Efficient leave-one-out cross-validation for Bayesian non-factorized normal and Student-t models. *Computational Statistics*, \doi:10.1007/s00180-020-01045-4. [ArXiv preprint](https://arxiv.org/abs/1810.10559). Sundararajan S. & Keerthi S. S. (2001). Predictive approaches for choosing hyperparameters in Gaussian processes. *Neural Computation*, 13(5), 1103--1118. Vehtari A., Mononen T., Tolvanen V., Sivula T., & Winther O. (2016). Bayesian leave-one-out cross-validation approximations for Gaussian latent variable models. *Journal of Machine Learning Research*, 17(103), 1--38. [Online](https://jmlr.org/papers/v17/14-540.html). Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. *Statistics and Computing*, 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [Online](https://link.springer.com/article/10.1007/s11222-016-9696-4). [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/vignettes/loo2-moment-matching.Rmd0000644000176200001440000003061115122274256017466 0ustar liggesusers--- title: "Avoiding model refits in leave-one-out cross-validation with moment matching" author: "Topi Paananen, Paul Bürkner, Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to improve the Monte Carlo sampling accuracy of leave-one-out cross-validation with the __loo__ package and Stan. The __loo__ package automatically monitors the sampling accuracy using Pareto $k$ diagnostics for each observation. Here, we present a method for quickly improving the accuracy when the Pareto diagnostics indicate problems. This is done by performing some additional computations using the existing posterior sample. If successful, this will decrease the Pareto $k$ values, making the model assessment more reliable. __loo__ also stores the original Pareto $k$ values with the name `influence_pareto_k` which are not changed. They can be used as a diagnostic of how much each observation influences the posterior distribution. The methodology presented is based on the paper * Paananen, T., Piironen, J., Buerkner, P.-C., Vehtari, A. (2020). Implicitly Adaptive Importance Sampling. [arXiv preprint arXiv:1906.08850](https://arxiv.org/abs/1906.08850). More information about the Pareto $k$ diagnostics is given in the following papers * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) # Example: Eradication of Roaches We will use the same example as in the vignette [_Using the loo package (version >= 2.0.0)_](https://mc-stan.org/loo/articles/loo2-example.html). See the demo for a description of the problem and data. We will use the same Poisson regression model as in the case study. ## Coding the Stan model Here is the Stan code for fitting the Poisson regression model, which we will use for modeling the number of roaches. ```{r stancode} # Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: int y[N]; stancode <- " data { int K; int N; matrix[N,K] x; array[N] int y; vector[N] offset_; // offset is reserved keyword in Stan so use offset_ real beta_prior_scale; real alpha_prior_scale; } parameters { vector[K] beta; real intercept; } model { y ~ poisson(exp(x * beta + intercept + offset_)); beta ~ normal(0,beta_prior_scale); intercept ~ normal(0,alpha_prior_scale); } generated quantities { vector[N] log_lik; for (n in 1:N) { log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n])); } } " ``` Following the usual approach recommended in [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html), we compute the log-likelihood for each observation in the `generated quantities` block of the Stan program. ## Setup In addition to __loo__, we load the __rstan__ package for fitting the model, and the __rstanarm__ package for the data. ```{r setup, message=FALSE} library("rstan") library("loo") seed <- 9547 set.seed(seed) ``` ## Fitting the model with RStan Next we fit the model in Stan using the __rstan__ package: ```{r modelfit, message=FALSE} # Prepare data data(roaches, package = "rstanarm") roaches$roach1 <- sqrt(roaches$roach1) y <- roaches$y x <- roaches[, c("roach1", "treatment", "senior")] offset <- log(roaches[, "exposure2"]) n <- dim(x)[1] k <- dim(x)[2] standata <- list( N = n, K = k, x = as.matrix(x), y = y, offset_ = offset, beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) # Compile stanmodel <- stan_model(model_code = stancode) # Fit model fit <- sampling(stanmodel, data = standata, seed = seed, refresh = 0) print(fit, pars = "beta") ``` Let us now evaluate the predictive performance of the model using `loo()`. ```{r loo1} loo1 <- loo(fit) loo1 ``` The `loo()` function output warnings that there are some observations which are highly influential, and thus the accuracy of importance sampling is compromised as indicated by the large Pareto $k$ diagnostic values (> 0.7). As discussed in the vignette [_Using the loo package (version >= 2.0.0)_](https://mc-stan.org/loo/articles/loo2-example.html), this may be an indication of model misspecification. Despite that, it is still beneficial to be able to evaluate the predictive performance of the model accurately. ## Moment matching correction for importance sampling To improve the accuracy of the `loo()` result above, we could perform leave-one-out cross-validation by explicitly leaving out single observations and refitting the model using MCMC repeatedly. However, the Pareto $k$ diagnostics indicate that there are 19 observations which are problematic. This would require 19 model refits which may require a lot of computation time. Instead of refitting with MCMC, we can perform a faster moment matching correction to the importance sampling for the problematic observations. This can be done with the `loo_moment_match()` function in the __loo__ package, which takes our existing `loo` object as input and modifies it. The moment matching requires some evaluations of the model posterior density. For models fitted with __rstan__, this can be conveniently done by using the existing `stanfit` object. First, we show how the moment matching can be used for a model fitted using __rstan__. It only requires setting the argument `moment_match` to `TRUE` in the `loo()` function. Optionally, you can also set the argument `k_threshold` which determines the Pareto $k$ threshold, above which moment matching is used. By default, it operates on all observations whose Pareto $k$ value is larger than the sample size ($S$) specific threshold $\min(1 - 1 / \log_{10}(S), 0.7)$ (which is $0.7$ for $S>2200$). ```{r loo_moment_match} # available in rstan >= 2.21 loo2 <- loo(fit, moment_match = TRUE) loo2 ``` After the moment matching, all observations have the diagnostic Pareto $k$ less than 0.7, meaning that the estimates are now reliable. The total `elpd_loo` estimate also changed from `-5457.8` to `-5478.5`, showing that before moment matching, `loo()` overestimated the predictive performance of the model. The updated Pareto $k$ values stored in `loo2$diagnostics$pareto_k` are considered algorithmic diagnostic values that indicate the sampling accuracy. The original Pareto $k$ values are stored in `loo2$pointwise[,"influence_pareto_k"]` and these are not modified by the moment matching. These can be considered as diagnostics for how big influence each observation has on the posterior distribution. In addition to the Pareto $k$ diagnostics, moment matching also updates the effective sample size estimates. # Using `loo_moment_match()` directly The moment matching can also be performed by explicitly calling the function `loo_moment_match()`. This enables its use also for models that are not using __rstan__ or another package with built-in support for `loo_moment_match()`. To use `loo_moment_match()`, the user must give the model object `x`, the `loo` object, and 5 helper functions as arguments to `loo_moment_match()`. The helper functions are * `post_draws` + A function the takes `x` as the first argument and returns a matrix of posterior draws of the model parameters, `pars`. * `log_lik_i` + A function that takes `x` and `i` and returns a matrix (one column per chain) or a vector (all chains stacked) of log-likeliood draws of the ith observation based on the model `x`. If the draws are obtained using MCMC, the matrix with MCMC chains separated is preferred. * `unconstrain_pars` + A function that takes arguments `x` and `pars`, and returns posterior draws on the unconstrained space based on the posterior draws on the constrained space passed via `pars`. * `log_prob_upars` + A function that takes arguments `x` and `upars`, and returns a matrix of log-posterior density values of the unconstrained posterior draws passed via `upars`. * `log_lik_i_upars` + A function that takes arguments `x`, `upars`, and `i` and returns a vector of log-likelihood draws of the `i`th observation based on the unconstrained posterior draws passed via `upars`. Next, we show how the helper functions look like for RStan objects, and show an example of using `loo_moment_match()` directly. For stanfit objects from __rstan__ objects, the functions look like this: ```{r stanfitfuns} # create a named list of draws for use with rstan methods .rstan_relist <- function(x, skeleton) { out <- utils::relist(x, skeleton) for (i in seq_along(skeleton)) { dim(out[[i]]) <- dim(skeleton[[i]]) } out } # rstan helper function to get dims of parameters right .create_skeleton <- function(pars, dims) { out <- lapply(seq_along(pars), function(i) { len_dims <- length(dims[[i]]) if (len_dims < 1) { return(0) } return(array(0, dim = dims[[i]])) }) names(out) <- pars out } # extract original posterior draws post_draws_stanfit <- function(x, ...) { as.matrix(x) } # compute a matrix of log-likelihood values for the ith observation # matrix contains information about the number of MCMC chains log_lik_i_stanfit <- function(x, i, parameter_name = "log_lik", ...) { loo::extract_log_lik(x, parameter_name, merge_chains = FALSE)[,, i] } # transform parameters to the unconstraint space unconstrain_pars_stanfit <- function(x, pars, ...) { skeleton <- .create_skeleton(x@sim$pars_oi, x@par_dims[x@sim$pars_oi]) upars <- apply(pars, 1, FUN = function(theta) { rstan::unconstrain_pars(x, .rstan_relist(theta, skeleton)) }) # for one parameter models if (is.null(dim(upars))) { dim(upars) <- c(1, length(upars)) } t(upars) } # compute log_prob for each posterior draws on the unconstrained space log_prob_upars_stanfit <- function(x, upars, ...) { apply( upars, 1, rstan::log_prob, object = x, adjust_transform = TRUE, gradient = FALSE ) } # compute log_lik values based on the unconstrained parameters log_lik_i_upars_stanfit <- function( x, upars, i, parameter_name = "log_lik", ... ) { S <- nrow(upars) out <- numeric(S) for (s in seq_len(S)) { out[s] <- rstan::constrain_pars(x, upars = upars[s, ])[[parameter_name]][i] } out } ``` Using these function, we can call `loo_moment_match()` to update the existing `loo` object. ```{r loo_moment_match.default, message=FALSE} loo3 <- loo::loo_moment_match.default( x = fit, loo = loo1, post_draws = post_draws_stanfit, log_lik_i = log_lik_i_stanfit, unconstrain_pars = unconstrain_pars_stanfit, log_prob_upars = log_prob_upars_stanfit, log_lik_i_upars = log_lik_i_upars_stanfit ) loo3 ``` As expected, the result is identical to the previous result of `loo2 <- loo(fit, moment_match = TRUE)`. # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2020) _RStan: the R interface to Stan, Version 2.21.1_ https://mc-stan.org Paananen, T., Piironen, J., Buerkner, P.-C., Vehtari, A. (2021). Implicitly adaptive importance sampling. _Statistics and Computing_, 31, 16. \doi:10.1007/s11222-020-09982-2. arXiv preprint arXiv:1906.08850. Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/vignettes/loo2-lfo.Rmd0000644000176200001440000006055114641333357015170 0ustar liggesusers--- title: "Approximate leave-future-out cross-validation for Bayesian time series models" author: "Paul Bürkner, Jonah Gabry, Aki Vehtari" date: "`r Sys.Date()`" output: html_vignette: toc: yes encoding: "UTF-8" params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r more-knitr-ops, include=FALSE} knitr::opts_chunk$set( cache = TRUE, message = FALSE, warning = FALSE ) ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` ## Introduction One of the most common goals of a time series analysis is to use the observed series to inform predictions for future observations. We will refer to this task of predicting a sequence of $M$ future observations as $M$-step-ahead prediction ($M$-SAP). Fortunately, once we have fit a model and can sample from the posterior predictive distribution, it is straightforward to generate predictions as far into the future as we want. It is also straightforward to evaluate the $M$-SAP performance of a time series model by comparing the predictions to the observed sequence of $M$ future data points once they become available. Unfortunately, we are often in the position of having to use a model to inform decisions _before_ we can collect the future observations required for assessing the predictive performance. If we have many competing models we may also need to first decide which of the models (or which combination of the models) we should rely on for predictions. In these situations the best we can do is to use methods for approximating the expected predictive performance of our models using only the observations of the time series we already have. If there were no time dependence in the data or if the focus is to assess the non-time-dependent part of the model, we could use methods like leave-one-out cross-validation (LOO-CV). For a data set with $N$ observations, we refit the model $N$ times, each time leaving out one of the $N$ observations and assessing how well the model predicts the left-out observation. LOO-CV is very expensive computationally in most realistic settings, but the Pareto smoothed importance sampling (PSIS, Vehtari et al, 2017, 2024) algorithm provided by the *loo* package allows for approximating exact LOO-CV with PSIS-LOO-CV. PSIS-LOO-CV requires only a single fit of the full model and comes with diagnostics for assessing the validity of the approximation. With a time series we can do something similar to LOO-CV but, except in a few cases, it does not make sense to leave out observations one at a time because then we are allowing information from the future to influence predictions of the past (i.e., times $t + 1, t+2, \ldots$ should not be used to predict for time $t$). To apply the idea of cross-validation to the $M$-SAP case, instead of leave-*one*-out cross-validation we need some form of leave-*future*-out cross-validation (LFO-CV). As we will demonstrate in this case study, LFO-CV does not refer to one particular prediction task but rather to various possible cross-validation approaches that all involve some form of prediction for new time series data. Like exact LOO-CV, exact LFO-CV requires refitting the model many times to different subsets of the data, which is computationally very costly for most nontrivial examples, in particular for Bayesian analyses where refitting the model means estimating a new posterior distribution rather than a point estimate. Although PSIS-LOO-CV provides an efficient approximation to exact LOO-CV, until now there has not been an analogous approximation to exact LFO-CV that drastically reduces the computational burden while also providing informative diagnostics about the quality of the approximation. In this case study we present PSIS-LFO-CV, an algorithm that typically only requires refitting the time-series model a small number times and will make LFO-CV tractable for many more realistic applications than previously possible. More details can be found in our paper about approximate LFO-CV (Bürkner, Gabry, & Vehtari, 2020), which is available as a preprint on arXiv (https://arxiv.org/abs/1902.06281). ## $M$-step-ahead predictions Assume we have a time series of observations $y = (y_1, y_2, \ldots, y_N)$ and let $L$ be the _minimum_ number of observations from the series that we will require before making predictions for future data. Depending on the application and how informative the data is, it may not be possible to make reasonable predictions for $y_{i+1}$ based on $(y_1, \dots, y_{i})$ until $i$ is large enough so that we can learn enough about the time series to predict future observations. Setting $L=10$, for example, means that we will only assess predictive performance starting with observation $y_{11}$, so that we always have at least 10 previous observations to condition on. In order to assess $M$-SAP performance we would like to compute the predictive densities $$ p(y_{i+1:M} \,|\, y_{1:i}) = p(y_{i+1}, \ldots, y_{i + M} \,|\, y_{1},...,y_{i}) $$ for each $i \in \{L, \ldots, N - M\}$. The quantities $p(y_{i+1:M} \,|\, y_{1:i})$ can be computed with the help of the posterior distribution $p(\theta \,|\, y_{1:i})$ of the parameters $\theta$ conditional on only the first $i$ observations of the time-series: $$ p(y_{i+1:M} \,| \, y_{1:i}) = \int p(y_{i+1:M} \,| \, y_{1:i}, \theta) \, p(\theta\,|\,y_{1:i}) \,d\theta. $$ Having obtained $S$ draws $(\theta_{1:i}^{(1)}, \ldots, \theta_{1:i}^{(S)})$ from the posterior distribution $p(\theta\,|\,y_{1:i})$, we can estimate $p(y_{i+1:M} | y_{1:i})$ as $$ p(y_{i+1:M} \,|\, y_{1:i}) \approx \frac{1}{S}\sum_{s=1}^S p(y_{i+1:M} \,|\, y_{1:i}, \theta_{1:i}^{(s)}). $$ ## Approximate $M$-SAP using importance-sampling {#approximate_MSAP} Unfortunately, the math above makes use of the posterior distributions from many different fits of the model to different subsets of the data. That is, to obtain the predictive density $p(y_{i+1:M} \,|\, y_{1:i})$ requires fitting a model to only the first $i$ data points, and we will need to do this for every value of $i$ under consideration (all $i \in \{L, \ldots, N - M\}$). To reduce the number of models that need to be fit for the purpose of obtaining each of the densities $p(y_{i+1:M} \,|\, y_{1:i})$, we propose the following algorithm. First, we refit the model using the first $L$ observations of the time series and then perform a single exact $M$-step-ahead prediction step for $p(y_{L+1:M} \,|\, y_{1:L})$. Recall that $L$ is the minimum number of observations we have deemed acceptable for making predictions (setting $L=0$ means the first data point will be predicted only based on the prior). We define $i^\star = L$ as the current point of refit. Next, starting with $i = i^\star + 1$, we approximate each $p(y_{i+1:M} \,|\, y_{1:i})$ via $$ p(y_{i+1:M} \,|\, y_{1:i}) \approx \frac{ \sum_{s=1}^S w_i^{(s)}\, p(y_{i+1:M} \,|\, y_{1:i}, \theta^{(s)})} { \sum_{s=1}^S w_i^{(s)}}, $$ where $\theta^{(s)} = \theta^{(s)}_{1:i^\star}$ are draws from the posterior distribution based on the first $i^\star$ observations and $w_i^{(s)}$ are the PSIS weights obtained in two steps. First, we compute the raw importance ratios $$ r_i^{(s)} = \frac{f_{1:i}(\theta^{(s)})}{f_{1:i^\star}(\theta^{(s)})} \propto \prod_{j \in (i^\star + 1):i} p(y_j \,|\, y_{1:(j-1)}, \theta^{(s)}), $$ and then stabilize them using PSIS. The function $f_{1:i}$ denotes the posterior distribution based on the first $i$ observations, that is, $f_{1:i} = p(\theta \,|\, y_{1:i})$, with $f_{1:i^\star}$ defined analogously. The index set $(i^\star + 1):i$ indicates all observations which are part of the data for the model $f_{1:i}$ whose predictive performance we are trying to approximate but not for the actually fitted model $f_{1:i^\star}$. The proportional statement arises from the fact that we ignore the normalizing constants $p(y_{1:i})$ and $p(y_{1:i^\star})$ of the compared posteriors, which leads to a self-normalized variant of PSIS (see Vehtari et al, 2017). Continuing with the next observation, we gradually increase $i$ by $1$ (we move forward in time) and repeat the process. At some observation $i$, the variability of the importance ratios $r_i^{(s)}$ will become too large and importance sampling will fail. We will refer to this particular value of $i$ as $i^\star_1$. To identify the value of $i^\star_1$, we check for which value of $i$ does the estimated shape parameter $k$ of the generalized Pareto distribution first cross a certain threshold $\tau$ (Vehtari et al, 2024). Only then do we refit the model using the observations up to $i^\star_1$ and restart the process from there by setting $\theta^{(s)} = \theta^{(s)}_{1:i^\star_1}$ and $i^\star = i^\star_1$ until the next refit. In some cases we may only need to refit once and in other cases we will find a value $i^\star_2$ that requires a second refitting, maybe an $i^\star_3$ that requires a third refitting, and so on. We refit as many times as is required (only when $k > \tau$) until we arrive at observation $i = N - M$. For LOO, assuming posterior sample size is 4000 or larger, we recommend to use a threshold of $\tau = 0.7$ (Vehtari et al, 2017, 2024) and it turns out this is a reasonable threshold for LFO as well (Bürkner et al. 2020). ## Autoregressive models Autoregressive (AR) models are some of the most commonly used time-series models. An AR(p) model ---an autoregressive model of order $p$--- can be defined as $$ y_i = \eta_i + \sum_{k = 1}^p \varphi_k y_{i - k} + \varepsilon_i, $$ where $\eta_i$ is the linear predictor for the $i$th observation, $\phi_k$ are the autoregressive parameters and $\varepsilon_i$ are pairwise independent errors, which are usually assumed to be normally distributed with equal variance $\sigma^2$. The model implies a recursive formula that allows for computing the right-hand side of the above equation for observation $i$ based on the values of the equations for previous observations. ## Case Study: Annual measurements of the level of Lake Huron To illustrate the application of PSIS-LFO-CV for estimating expected $M$-SAP performance, we will fit a model for 98 annual measurements of the water level (in feet) of [Lake Huron](https://en.wikipedia.org/wiki/Lake_Huron) from the years 1875--1972. This data set is found in the **datasets** R package, which is installed automatically with **R**. In addition to the **loo** package, for this analysis we will use the **brms** interface to Stan to generate a Stan program and fit the model, and also the **bayesplot** and **ggplot2** packages for plotting. ```{r pkgs, cache=FALSE} library("brms") library("loo") library("bayesplot") library("ggplot2") color_scheme_set("brightblue") theme_set(theme_default()) CHAINS <- 4 SEED <- 5838296 set.seed(SEED) ``` Before fitting a model, we will first put the data into a data frame and then look at the time series. ```{r hurondata} N <- length(LakeHuron) df <- data.frame( y = as.numeric(LakeHuron), year = as.numeric(time(LakeHuron)), time = 1:N ) ggplot(df, aes(x = year, y = y)) + geom_point(size = 1) + labs( y = "Water Level (ft)", x = "Year", title = "Water Level in Lake Huron (1875-1972)" ) ``` The above plot shows rather strong autocorrelation of the time-series as well as some trend towards lower levels for later points in time. We can specify an AR(4) model for these data using the **brms** package as follows: ```{r fit, results = "hide"} fit <- brm( y ~ ar(time, p = 4), data = df, prior = prior(normal(0, 0.5), class = "ar"), control = list(adapt_delta = 0.99), seed = SEED, chains = CHAINS ) ``` The model implied predictions along with the observed values can be plotted, which reveals a rather good fit to the data. ```{r plotpreds, cache = FALSE} preds <- posterior_predict(fit) preds <- cbind( Estimate = colMeans(preds), Q5 = apply(preds, 2, quantile, probs = 0.05), Q95 = apply(preds, 2, quantile, probs = 0.95) ) ggplot(cbind(df, preds), aes(x = year, y = Estimate)) + geom_smooth(aes(ymin = Q5, ymax = Q95), stat = "identity", linewidth = 0.5) + geom_point(aes(y = y)) + labs( y = "Water Level (ft)", x = "Year", title = "Water Level in Lake Huron (1875-1972)", subtitle = "Mean (blue) and 90% predictive intervals (gray) vs. observed data (black)" ) ``` To allow for reasonable predictions of future values, we will require at least $L = 20$ historical observations (20 years) to make predictions. ```{r setL} L <- 20 ``` We first perform approximate leave-one-out cross-validation (LOO-CV) for the purpose of later comparison with exact and approximate LFO-CV for the 1-SAP case. ```{r loo1sap, cache = FALSE} loo_cv <- loo(log_lik(fit)[, (L + 1):N]) print(loo_cv) ``` ## 1-step-ahead predictions leaving out all future values The most basic version of $M$-SAP is 1-SAP, in which we predict only one step ahead. In this case, $y_{i+1:M}$ simplifies to $y_{i}$ and the LFO-CV algorithm becomes considerably simpler than for larger values of $M$. ### Exact 1-step-ahead predictions Before we compute approximate LFO-CV using PSIS we will first compute exact LFO-CV for the 1-SAP case so we can use it as a benchmark later. The initial step for the exact computation is to calculate the log-predictive densities by refitting the model many times: ```{r exact_loglik, results="hide"} loglik_exact <- matrix(nrow = ndraws(fit), ncol = N) for (i in L:(N - 1)) { past <- 1:i oos <- i + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_i <- update(fit, newdata = df_past, recompile = FALSE) loglik_exact[, i + 1] <- log_lik(fit_i, newdata = df_oos, oos = oos)[, oos] } ``` Then we compute the exact expected log predictive density (ELPD): ```{r helpers} # some helper functions we'll use throughout # more stable than log(sum(exp(x))) log_sum_exp <- function(x) { max_x <- max(x) max_x + log(sum(exp(x - max_x))) } # more stable than log(mean(exp(x))) log_mean_exp <- function(x) { log_sum_exp(x) - log(length(x)) } # compute log of raw importance ratios # sums over observations *not* over posterior samples sum_log_ratios <- function(loglik, ids = NULL) { if (!is.null(ids)) loglik <- loglik[, ids, drop = FALSE] rowSums(loglik) } # for printing comparisons later rbind_print <- function(...) { round(rbind(...), digits = 2) } ``` ```{r exact1sap, cache = FALSE} exact_elpds_1sap <- apply(loglik_exact, 2, log_mean_exp) exact_elpd_1sap <- c(ELPD = sum(exact_elpds_1sap[-(1:L)])) rbind_print( "LOO" = loo_cv$estimates["elpd_loo", "Estimate"], "LFO" = exact_elpd_1sap ) ``` We see that the ELPD from LFO-CV for 1-step-ahead predictions is lower than the ELPD estimate from LOO-CV, which should be expected since LOO-CV is making use of more of the time series. That is, since the LFO-CV approach only uses observations from before the left-out data point but LOO-CV uses _all_ data points other than the left-out observation, we should expect to see the larger ELPD from LOO-CV. ### Approximate 1-step-ahead predictions We compute approximate 1-SAP with refit at observations where the Pareto $k$ estimate exceeds the threshold of $0.7$. ```{r setkthresh} k_thres <- 0.7 ``` The code becomes a little bit more involved as compared to the exact LFO-CV. Note that we can compute exact 1-SAP at the refitting points, which comes with no additional computational costs since we had to refit the model anyway. ```{r refit_loglik, results="hide"} approx_elpds_1sap <- rep(NA, N) # initialize the process for i = L past <- 1:L oos <- L + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) approx_elpds_1sap[L + 1] <- log_mean_exp(loglik[, oos]) # iterate over i > L i_refit <- L refits <- L ks <- NULL for (i in (L + 1):(N - 1)) { past <- 1:i oos <- i + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) logratio <- sum_log_ratios(loglik, (i_refit + 1):i) psis_obj <- suppressWarnings(psis(logratio)) k <- pareto_k_values(psis_obj) ks <- c(ks, k) if (k > k_thres) { # refit the model based on the first i observations i_refit <- i refits <- c(refits, i) fit_past <- update(fit_past, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) approx_elpds_1sap[i + 1] <- log_mean_exp(loglik[, oos]) } else { lw <- weights(psis_obj, normalize = TRUE)[, 1] approx_elpds_1sap[i + 1] <- log_sum_exp(lw + loglik[, oos]) } } ``` We see that the final Pareto-$k$-estimates are mostly well below the threshold and that we only needed to refit the model a few times: ```{r plot_ks} plot_ks <- function(ks, ids, thres = 0.6) { dat_ks <- data.frame(ks = ks, ids = ids) ggplot(dat_ks, aes(x = ids, y = ks)) + geom_point(aes(color = ks > thres), shape = 3, show.legend = FALSE) + geom_hline(yintercept = thres, linetype = 2, color = "red2") + scale_color_manual(values = c("cornflowerblue", "darkblue")) + labs(x = "Data point", y = "Pareto k") + ylim(-0.5, 1.5) } ``` ```{r refitsummary1sap, cache=FALSE} cat("Using threshold ", k_thres, ", model was refit ", length(refits), " times, at observations", refits) plot_ks(ks, (L + 1):(N - 1)) ``` The approximate 1-SAP ELPD is remarkably similar to the exact 1-SAP ELPD computed above, which indicates our algorithm to compute approximate 1-SAP worked well for the present data and model. ```{r lfosummary1sap, cache = FALSE} approx_elpd_1sap <- sum(approx_elpds_1sap, na.rm = TRUE) rbind_print( "approx LFO" = approx_elpd_1sap, "exact LFO" = exact_elpd_1sap ) ``` Plotting exact against approximate predictions, we see that no approximation value deviates far from its exact counterpart, providing further evidence for the good quality of our approximation. ```{r plot1sap, cache = FALSE} dat_elpd <- data.frame( approx_elpd = approx_elpds_1sap, exact_elpd = exact_elpds_1sap ) ggplot(dat_elpd, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + labs(x = "Approximate ELPDs", y = "Exact ELPDs") ``` We can also look at the maximum difference and average difference between the approximate and exact ELPD calculations, which also indicate a ver close approximation: ```{r diffs1sap, cache=FALSE} max_diff <- with(dat_elpd, max(abs(approx_elpd - exact_elpd), na.rm = TRUE)) mean_diff <- with(dat_elpd, mean(abs(approx_elpd - exact_elpd), na.rm = TRUE)) rbind_print( "Max diff" = round(max_diff, 2), "Mean diff" = round(mean_diff, 3) ) ``` ## $M$-step-ahead predictions leaving out all future values To illustrate the application of $M$-SAP for $M > 1$, we next compute exact and approximate LFO-CV for the 4-SAP case. ### Exact $M$-step-ahead predictions The necessary steps are the same as for 1-SAP with the exception that the log-density values of interest are now the sums of the log predictive densities of four consecutive observations. Further, the stability of the PSIS approximation actually stays the same for all $M$ as it only depends on the number of observations we leave out, not on the number of observations we predict. ```{r exact_loglikm, results="hide"} M <- 4 loglikm <- matrix(nrow = ndraws(fit), ncol = N) for (i in L:(N - M)) { past <- 1:i oos <- (i + 1):(i + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm[, i + 1] <- rowSums(loglik[, oos]) } ``` ```{r exact4sap, cache = FALSE} exact_elpds_4sap <- apply(loglikm, 2, log_mean_exp) (exact_elpd_4sap <- c(ELPD = sum(exact_elpds_4sap, na.rm = TRUE))) ``` ### Approximate $M$-step-ahead predictions Computing the approximate PSIS-LFO-CV for the 4-SAP case is a little bit more involved than the approximate version for the 1-SAP case, although the underlying principles remain the same. ```{r refit_loglikm, results="hide"} approx_elpds_4sap <- rep(NA, N) # initialize the process for i = L past <- 1:L oos <- (L + 1):(L + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[L + 1] <- log_mean_exp(loglikm) # iterate over i > L i_refit <- L refits <- L ks <- NULL for (i in (L + 1):(N - M)) { past <- 1:i oos <- (i + 1):(i + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) logratio <- sum_log_ratios(loglik, (i_refit + 1):i) psis_obj <- suppressWarnings(psis(logratio)) k <- pareto_k_values(psis_obj) ks <- c(ks, k) if (k > k_thres) { # refit the model based on the first i observations i_refit <- i refits <- c(refits, i) fit_past <- update(fit_past, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[i + 1] <- log_mean_exp(loglikm) } else { lw <- weights(psis_obj, normalize = TRUE)[, 1] loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[i + 1] <- log_sum_exp(lw + loglikm) } } ``` Again, we see that the final Pareto-$k$-estimates are mostly well below the threshold and that we only needed to refit the model a few times: ```{r refitsummary4sap, cache = FALSE} cat("Using threshold ", k_thres, ", model was refit ", length(refits), " times, at observations", refits) plot_ks(ks, (L + 1):(N - M)) ``` The approximate ELPD computed for the 4-SAP case is not as close to its exact counterpart as in the 1-SAP case. In general, the larger $M$, the larger the variation of the approximate ELPD around the exact ELPD. It turns out that the ELPD estimates of AR-models with $M>1$ show particular variation due to their predictions' dependency on other predicted values. In Bürkner et al. (2020) we provide further explanation and simulations for these cases. ```{r lfosummary4sap, cache = FALSE} approx_elpd_4sap <- sum(approx_elpds_4sap, na.rm = TRUE) rbind_print( "Approx LFO" = approx_elpd_4sap, "Exact LFO" = exact_elpd_4sap ) ``` Plotting exact against approximate pointwise predictions confirms that, for a few specific data points, the approximate predictions underestimate the exact predictions. ```{r plot4sap, cache = FALSE} dat_elpd_4sap <- data.frame( approx_elpd = approx_elpds_4sap, exact_elpd = exact_elpds_4sap ) ggplot(dat_elpd_4sap, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + labs(x = "Approximate ELPDs", y = "Exact ELPDs") ``` ## Conclusion In this case study we have shown how to do carry out exact and approximate leave-future-out cross-validation for $M$-step-ahead prediction tasks. For the data and model used in our example, the PSIS-LFO-CV algorithm provides reasonably stable and accurate results despite not requiring us to refit the model nearly as many times. For more details on approximate LFO-CV, we refer to Bürkner et al. (2020).
## References Bürkner P. C., Gabry J., & Vehtari A. (2020). Approximate leave-future-out cross-validation for time series models. *Journal of Statistical Computation and Simulation*, 90(14):2499-2523. \doi:/10.1080/00949655.2020.1783262. [Online](https://www.tandfonline.com/doi/full/10.1080/00949655.2020.1783262). [arXiv preprint](https://arxiv.org/abs/1902.06281). Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. *Statistics and Computing*, 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [Online](https://link.springer.com/article/10.1007/s11222-016-9696-4). [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html)
## Appendix ### Appendix: Session information ```{r sessioninfo} sessionInfo() ``` ### Appendix: Licenses * Code © 2018, Paul Bürkner, Jonah Gabry, Aki Vehtari (licensed under BSD-3). * Text © 2018, Paul Bürkner, Jonah Gabry, Aki Vehtari (licensed under CC-BY-NC 4.0). loo/vignettes/loo2-weights.Rmd0000644000176200001440000004003114641333357016051 0ustar liggesusers--- title: "Bayesian Stacking and Pseudo-BMA weights using the loo package" author: "Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates the new functionality in __loo__ v2.0.0 for Bayesian stacking and Pseudo-BMA weighting. In this vignette we can't provide all of the necessary background on this topic, so we encourage readers to refer to the paper * Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018). Using stacking to average Bayesian predictive distributions. In Bayesian Analysis, \doi:10.1214/17-BA1091. [Online](https://projecteuclid.org/euclid.ba/1516093227) which provides important details on the methods demonstrated in this vignette. Here we just quote from the abstract of the paper: > **Abstract**: Bayesian model averaging is flawed in the $\mathcal{M}$-open setting in which the true data-generating process is not one of the candidate models being fit. We take the idea of stacking from the point estimation literature and generalize to the combination of predictive distributions. We extend the utility function to any proper scoring rule and use Pareto smoothed importance sampling to efficiently compute the required leave-one-out posterior distributions. We compare stacking of predictive distributions to several alternatives: stacking of means, Bayesian model averaging (BMA), Pseudo-BMA, and a variant of Pseudo-BMA that is stabilized using the Bayesian bootstrap. Based on simulations and real-data applications, we recommend stacking of predictive distributions, with bootstrapped-Pseudo-BMA as an approximate alternative when computation cost is an issue. Ideally, we would avoid the Bayesian model combination problem by extending the model to include the separate models as special cases, and preferably as a continuous expansion of the model space. For example, instead of model averaging over different covariate combinations, all potentially relevant covariates should be included in a predictive model (for causal analysis more care is needed) and a prior assumption that only some of the covariates are relevant can be presented with regularized horseshoe prior (Piironen and Vehtari, 2017a). For variable selection we recommend projective predictive variable selection (Piironen and Vehtari, 2017a; [__projpred__ package](https://cran.r-project.org/package=projpred)). To demonstrate how to use __loo__ package to compute Bayesian stacking and Pseudo-BMA weights, we repeat two simple model averaging examples from Chapters 6 and 10 of _Statistical Rethinking_ by Richard McElreath. In _Statistical Rethinking_ WAIC is used to form weights which are similar to classical "Akaike weights". Pseudo-BMA weighting using PSIS-LOO for computation is close to these WAIC weights, but named after the Pseudo Bayes Factor by Geisser and Eddy (1979). As discussed below, in general we prefer using stacking rather than WAIC weights or the similar pseudo-BMA weights. # Setup In addition to the __loo__ package we will also load the __rstanarm__ package for fitting the models. ```{r setup, message=FALSE} library(rstanarm) library(loo) ``` # Example: Primate milk In _Statistical Rethinking_, McElreath describes the data for the primate milk example as follows: > A popular hypothesis has it that primates with larger brains produce more energetic milk, so that brains can grow quickly. ... The question here is to what extent energy content of milk, measured here by kilocalories, is related to the percent of the brain mass that is neocortex. ... We'll end up needing female body mass as well, to see the masking that hides the relationships among the variables. ```{r data} data(milk) d <- milk[complete.cases(milk),] d$neocortex <- d$neocortex.perc /100 str(d) ``` We repeat the analysis in Chapter 6 of _Statistical Rethinking_ using the following four models (here we use the default weakly informative priors in __rstanarm__, while flat priors were used in _Statistical Rethinking_). ```{r fits, results="hide"} fit1 <- stan_glm(kcal.per.g ~ 1, data = d, seed = 2030) fit2 <- update(fit1, formula = kcal.per.g ~ neocortex) fit3 <- update(fit1, formula = kcal.per.g ~ log(mass)) fit4 <- update(fit1, formula = kcal.per.g ~ neocortex + log(mass)) ``` McElreath uses WAIC for model comparison and averaging, so we'll start by also computing WAIC for these models so we can compare the results to the other options presented later in the vignette. The __loo__ package provides `waic` methods for log-likelihood arrays, matrices and functions. Since we fit our model with rstanarm we can use the `waic` method provided by the __rstanarm__ package (a wrapper around `waic` from the __loo__ package), which allows us to just pass in our fitted model objects instead of first extracting the log-likelihood values. ```{r waic} waic1 <- waic(fit1) waic2 <- waic(fit2) waic3 <- waic(fit3) waic4 <- waic(fit4) waics <- c( waic1$estimates["elpd_waic", 1], waic2$estimates["elpd_waic", 1], waic3$estimates["elpd_waic", 1], waic4$estimates["elpd_waic", 1] ) ``` We get some warnings when computing WAIC for models 3 and 4, indicating that we shouldn't trust the WAIC weights we will compute later. Following the recommendation in the warning, we next use the `loo` methods to compute PSIS-LOO instead. The __loo__ package provides `loo` methods for log-likelihood arrays, matrices, and functions, but since we fit our model with __rstanarm__ we can just pass the fitted model objects directly and __rstanarm__ will extract the needed values to pass to the __loo__ package. (Like __rstanarm__, some other R packages for fitting Stan models, e.g. __brms__, also provide similar methods for interfacing with the __loo__ package.) ```{r loo} # note: the loo function accepts a 'cores' argument that we recommend specifying # when working with bigger datasets loo1 <- loo(fit1) loo2 <- loo(fit2) loo3 <- loo(fit3) loo4 <- loo(fit4) lpd_point <- cbind( loo1$pointwise[,"elpd_loo"], loo2$pointwise[,"elpd_loo"], loo3$pointwise[,"elpd_loo"], loo4$pointwise[,"elpd_loo"] ) ``` With `loo` we don't get any warnings for models 3 and 4, but for illustration of good results, we display the diagnostic details for these models anyway. ```{r print-loo} print(loo3) print(loo4) ``` One benefit of PSIS-LOO over WAIC is better diagnostics. Here for both models 3 and 4 all $k<0.7$ and the Monte Carlo SE of `elpd_loo` is 0.1 or less, and we can expect the model comparison to be reliable. Next we compute and compare 1) WAIC weights, 2) Pseudo-BMA weights without Bayesian bootstrap, 3) Pseudo-BMA+ weights with Bayesian bootstrap, and 4) Bayesian stacking weights. ```{r weights} waic_wts <- exp(waics) / sum(exp(waics)) pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE) pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE stacking_wts <- stacking_weights(lpd_point) round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2) ``` With all approaches Model 4 with `neocortex` and `log(mass)` gets most of the weight. Based on theory, Pseudo-BMA weights without Bayesian bootstrap should be close to WAIC weights, and we can also see that here. Pseudo-BMA+ weights with Bayesian bootstrap provide more cautious weights further away from 0 and 1 (see Yao et al. (2018) for a discussion of why this can be beneficial and results from related experiments). In this particular example, the Bayesian stacking weights are not much different from the other weights. One of the benefits of stacking is that it manages well if there are many similar models. Consider for example that there could be many irrelevant covariates that when included would produce a similar model to one of the existing models. To emulate this situation here we simply copy the first model a bunch of times, but you can imagine that instead we would have ten alternative models with about the same predictive performance. WAIC weights for such a scenario would be close to the following: ```{r waic_wts_demo} waic_wts_demo <- exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)]) / sum(exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)])) round(waic_wts_demo, 3) ``` Notice how much the weight for model 4 is lowered now that more models similar to model 1 (or in this case identical) have been added. Both WAIC weights and Pseudo-BMA approaches first estimate the predictive performance separately for each model and then compute weights based on estimated relative predictive performances. Similar models share similar weights so the weights of other models must be reduced for the total sum of the weights to remain the same. On the other hand, stacking optimizes the weights _jointly_, allowing for the very similar models (in this toy example repeated models) to share their weight while more unique models keep their original weights. In our example we can see this difference clearly: ```{r stacking_weights} stacking_weights(lpd_point[,c(1,1,1,1,1,1,1,1,1,1,2,3,4)]) ``` Using stacking, the weight for the best model stays essentially unchanged. # Example: Oceanic tool complexity Another example we consider is the Kline oceanic tool complexity data, which McElreath describes as follows: >Different historical island populations possessed tool kits of different size. These kits include fish hooks, axes, boats, hand plows, and many other types of tools. A number of theories predict that larger populations will both develop and sustain more complex tool kits. ... It's also suggested that contact rates among populations effectively increases population [sic, probably should be tool kit] size, as it's relevant to technological evolution. We build models predicting the total number of tools given the log population size and the contact rate (high vs. low). ```{r Kline} data(Kline) d <- Kline d$log_pop <- log(d$population) d$contact_high <- ifelse(d$contact=="high", 1, 0) str(d) ``` We start with a Poisson regression model with the log population size, the contact rate, and an interaction term between them (priors are informative priors as in _Statistical Rethinking_). ```{r fit10, results="hide"} fit10 <- stan_glm( total_tools ~ log_pop + contact_high + log_pop * contact_high, family = poisson(link = "log"), data = d, prior = normal(0, 1, autoscale = FALSE), prior_intercept = normal(0, 100, autoscale = FALSE), seed = 2030 ) ``` Before running other models, we check whether Poisson is good choice as the conditional observation model. ```{r loo10} loo10 <- loo(fit10) print(loo10) ``` We get at least one observation with $k>0.7$ and the estimated effective number of parameters `p_loo` is larger than the total number of parameters in the model. This indicates that Poisson might be too narrow. A negative binomial model might be better, but with so few observations it is not so clear. We can compute LOO more accurately by running Stan again for the leave-one-out folds with high $k$ estimates. When using __rstanarm__ this can be done by specifying the `k_threshold` argument: ```{r loo10-threshold} loo10 <- loo(fit10, k_threshold=0.7) print(loo10) ``` In this case we see that there is not much difference, and thus it is relatively safe to continue. As a comparison we also compute WAIC: ```{r waic10} waic10 <- waic(fit10) print(waic10) ``` The WAIC computation is giving warnings and the estimated ELPD is slightly more optimistic. We recommend using the PSIS-LOO results instead. To assess whether the contact rate and interaction term are useful, we can make a comparison to models without these terms. ```{r contact_high, results="hide"} fit11 <- update(fit10, formula = total_tools ~ log_pop + contact_high) fit12 <- update(fit10, formula = total_tools ~ log_pop) ``` ```{r loo-contact_high} (loo11 <- loo(fit11)) (loo12 <- loo(fit12)) ``` ```{r relo-contact_high} loo11 <- loo(fit11, k_threshold=0.7) loo12 <- loo(fit12, k_threshold=0.7) lpd_point <- cbind( loo10$pointwise[, "elpd_loo"], loo11$pointwise[, "elpd_loo"], loo12$pointwise[, "elpd_loo"] ) ``` For comparison we'll also compute WAIC values for these additional models: ```{r waic-contact_high} waic11 <- waic(fit11) waic12 <- waic(fit12) waics <- c( waic10$estimates["elpd_waic", 1], waic11$estimates["elpd_waic", 1], waic12$estimates["elpd_waic", 1] ) ``` The WAIC computation again gives warnings, and we recommend using PSIS-LOO instead. Finally, we compute 1) WAIC weights, 2) Pseudo-BMA weights without Bayesian bootstrap, 3) Pseudo-BMA+ weights with Bayesian bootstrap, and 4) Bayesian stacking weights. ```{r weights-contact_high} waic_wts <- exp(waics) / sum(exp(waics)) pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE) pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE stacking_wts <- stacking_weights(lpd_point) round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2) ``` All weights favor the second model with the log population and the contact rate. WAIC weights and Pseudo-BMA weights (without Bayesian bootstrap) are similar, while Pseudo-BMA+ is more cautious and closer to stacking weights. It may seem surprising that Bayesian stacking is giving zero weight to the first model, but this is likely due to the fact that the estimated effect for the interaction term is close to zero and thus models 1 and 2 give very similar predictions. In other words, incorporating the model with the interaction (model 1) into the model average doesn't improve the predictions at all and so model 1 is given a weight of 0. On the other hand, models 2 and 3 are giving slightly different predictions and thus their combination may be slightly better than either alone. This behavior is related to the repeated similar model illustration in the milk example above. # Simpler coding using `loo_model_weights` function Although in the examples above we called the `stacking_weights` and `pseudobma_weights` functions directly, we can also use the `loo_model_weights` wrapper, which takes as its input either a list of pointwise log-likelihood matrices or a list of precomputed loo objects. There are also `loo_model_weights` methods for stanreg objects (fitted model objects from __rstanarm__) as well as fitted model objects from other packages (e.g. __brms__) that do the preparation work for the user (see, e.g., the examples at `help("loo_model_weights", package = "rstanarm")`). ```{r loo_model_weights} # using list of loo objects loo_list <- list(loo10, loo11, loo12) loo_model_weights(loo_list) loo_model_weights(loo_list, method = "pseudobma") loo_model_weights(loo_list, method = "pseudobma", BB = FALSE) ``` # References McElreath, R. (2016). _Statistical rethinking: A Bayesian course with examples in R and Stan_. Chapman & Hall/CRC. http://xcelab.net/rm/statistical-rethinking/ Piironen, J. and Vehtari, A. (2017a). Sparsity information and regularization in the horseshoe and other shrinkage priors. In Electronic Journal of Statistics, 11(2):5018-5051. [Online](https://projecteuclid.org/euclid.ejs/1513306866). Piironen, J. and Vehtari, A. (2017b). Comparison of Bayesian predictive methods for model selection. Statistics and Computing, 27(3):711-735. \doi:10.1007/s11222-016-9649-y. [Online](https://link.springer.com/article/10.1007/s11222-016-9649-y). Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018). Using stacking to average Bayesian predictive distributions. In Bayesian Analysis, \doi:10.1214/17-BA1091. [Online](https://projecteuclid.org/euclid.ba/1516093227). loo/vignettes/logo.svg0000644000176200001440000001371615076255137014557 0ustar liggesusersloo/vignettes/loo2-with-rstan.Rmd0000644000176200001440000002112514641333357016502 0ustar liggesusers--- title: "Writing Stan programs for use with the loo package" author: "Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to write a Stan program that computes and stores the pointwise log-likelihood required for using the __loo__ package. The other vignettes included with the package demonstrate additional functionality. Some sections from this vignette are excerpted from our papers * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) which provide important background for understanding the methods implemented in the package. # Example: Well water in Bangladesh This example comes from a survey of residents from a small area in Bangladesh that was affected by arsenic in drinking water. Respondents with elevated arsenic levels in their wells were asked if they were interested in getting water from a neighbor's well, and a series of logistic regressions were fit to predict this binary response given various information about the households (Gelman and Hill, 2007). Here we fit a model for the well-switching response given two predictors: the arsenic level of the water in the resident's home, and the distance of the house from the nearest safe well. The sample size in this example is $N=3020$, which is not huge but is large enough that it is important to have a computational method for LOO that is fast for each data point. On the plus side, with such a large dataset, the influence of any given observation is small, and so the computations should be stable. ## Coding the Stan model Here is the Stan code for fitting the logistic regression model, which we save in a file called `logistic.stan`: ``` // Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR) // To use an older version of RStan change the line declaring `y` to: // int y[N]; data { int N; // number of data points int P; // number of predictors (including intercept) matrix[N,P] X; // predictors (including 1s for intercept) array[N] int y; // binary outcome } parameters { vector[P] beta; } model { beta ~ normal(0, 1); y ~ bernoulli_logit(X * beta); } generated quantities { vector[N] log_lik; for (n in 1:N) { log_lik[n] = bernoulli_logit_lpmf(y[n] | X[n] * beta); } } ``` We have defined the log likelihood as a vector named `log_lik` in the generated quantities block so that the individual terms will be saved by Stan. After running Stan, `log_lik` can be extracted (using the `extract_log_lik` function provided in the **loo** package) as an $S \times N$ matrix, where $S$ is the number of simulations (posterior draws) and $N$ is the number of data points. ## Fitting the model with RStan Next we fit the model in Stan using the **rstan** package: ```{r, eval=FALSE} library("rstan") # Prepare data url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat" wells <- read.table(url) wells$dist100 <- with(wells, dist / 100) X <- model.matrix(~ dist100 + arsenic, wells) standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X)) # Fit model fit_1 <- stan("logistic.stan", data = standata) print(fit_1, pars = "beta") ``` ``` mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat beta[1] 0.00 0 0.08 -0.16 -0.05 0.00 0.05 0.15 1964 1 beta[2] -0.89 0 0.10 -1.09 -0.96 -0.89 -0.82 -0.68 2048 1 beta[3] 0.46 0 0.04 0.38 0.43 0.46 0.49 0.54 2198 1 ``` ## Computing approximate leave-one-out cross-validation using PSIS-LOO We can then use the **loo** package to compute the efficient PSIS-LOO approximation to exact LOO-CV: ```{r, eval=FALSE} library("loo") # Extract pointwise log-likelihood # using merge_chains=FALSE returns an array, which is easier to # use with relative_eff() log_lik_1 <- extract_log_lik(fit_1, merge_chains = FALSE) # as of loo v2.0.0 we can optionally provide relative effective sample sizes # when calling loo, which allows for better estimates of the PSIS effective # sample sizes and Monte Carlo error r_eff <- relative_eff(exp(log_lik_1), cores = 2) # preferably use more than 2 cores (as many cores as possible) # will use value of 'mc.cores' option if cores is not specified loo_1 <- loo(log_lik_1, r_eff = r_eff, cores = 2) print(loo_1) ``` ``` Computed from 4000 by 3020 log-likelihood matrix Estimate SE elpd_loo -1968.5 15.6 p_loo 3.2 0.1 looic 3937.0 31.2 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.3]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` The printed output from the `loo` function shows the estimates $\widehat{\mbox{elpd}}_{\rm loo}$ (expected log predictive density), $\widehat{p}_{\rm loo}$ (effective number of parameters), and ${\rm looic} =-2\, \widehat{\mbox{elpd}}_{\rm loo}$ (the LOO information criterion). The line at the bottom of the printed output provides information about the reliability of the LOO approximation (the interpretation of the $k$ parameter is explained in `help('pareto-k-diagnostic')` and in greater detail in Vehtari, Simpson, Gelman, Yao, and Gabry (2019)). In this case the message tells us that all of the estimates for $k$ are fine. ## Comparing models To compare this model to an alternative model for the same data we can use the `loo_compare` function in the **loo** package. First we'll fit a second model to the well-switching data, using `log(arsenic)` instead of `arsenic` as a predictor: ```{r, eval=FALSE} standata$X[, "arsenic"] <- log(standata$X[, "arsenic"]) fit_2 <- stan(fit = fit_1, data = standata) log_lik_2 <- extract_log_lik(fit_2, merge_chains = FALSE) r_eff_2 <- relative_eff(exp(log_lik_2)) loo_2 <- loo(log_lik_2, r_eff = r_eff_2, cores = 2) print(loo_2) ``` ``` Computed from 4000 by 3020 log-likelihood matrix Estimate SE elpd_loo -1952.3 16.2 p_loo 3.1 0.1 looic 3904.6 32.4 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.4, 1.2]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` We can now compare the models on LOO using the `loo_compare` function: ```{r, eval=FALSE} # Compare comp <- loo_compare(loo_1, loo_2) ``` This new object, `comp`, contains the estimated difference of expected leave-one-out prediction errors between the two models, along with the standard error: ```{r, eval=FALSE} print(comp) # can set simplify=FALSE for more detailed print output ``` ``` elpd_diff se_diff model2 0.0 0.0 model1 -16.3 4.4 ``` The first column shows the difference in ELPD relative to the model with the largest ELPD. In this case, the difference in `elpd` and its scale relative to the approximate standard error of the difference) indicates a preference for the second model (`model2`). # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2017). _The Stan C++ Library, Version 2.17.0._ https://mc-stan.org/ Stan Development Team (2018) _RStan: the R interface to Stan, Version 2.17.3._ https://mc-stan.org/ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/vignettes/loo2-large-data.Rmd0000644000176200001440000005044314641333357016410 0ustar liggesusers--- title: "Using Leave-one-out cross-validation for large data" author: "Mans Magnusson, Paul Bürkner, Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to do leave-one-out cross-validation for large data using the __loo__ package and Stan. There are two approaches covered: LOO with subsampling and LOO using approximations to posterior distributions. Some sections from this vignette are excerpted from the papers * Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS), in PMLR 108. [arXiv preprint arXiv:2001.00980](https://arxiv.org/abs/2001.00980). * Magnusson, M., Andersen, M., Jonasson, J. & Vehtari, A. (2019). Bayesian leave-one-out cross-validation for large data. Proceedings of the 36th International Conference on Machine Learning, in PMLR 97:4244-4253 [online](http://proceedings.mlr.press/v97/magnusson19a.html), [arXiv preprint arXiv:1904.10679](https://arxiv.org/abs/1904.10679). * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) which provide important background for understanding the methods implemented in the package. # Setup In addition to the __loo__ package, we'll also be using __rstan__: ```{r setup, message=FALSE} library("rstan") library("loo") set.seed(4711) ``` # Example: Well water in Bangladesh We will use the same example as in the vignette [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html). See that vignette for a description of the problem and data. The sample size in this example is only $N=3020$, which is not large enough to _require_ the special methods for large data described in this vignette, but is sufficient for demonstration purposes in this tutorial. ## Coding the Stan model Here is the Stan code for fitting the logistic regression model, which we save in a file called `logistic.stan`: ``` // Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR) // To use an older version of RStan change the line declaring `y` to: // int y[N]; data { int N; // number of data points int P; // number of predictors (including intercept) matrix[N,P] X; // predictors (including 1s for intercept) array[N] int y; // binary outcome } parameters { vector[P] beta; } model { beta ~ normal(0, 1); y ~ bernoulli_logit(X * beta); } ``` Importantly, unlike the general approach recommended in [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html), we do _not_ compute the log-likelihood for each observation in the `generated quantities` block of the Stan program. Here we are assuming we have a large data set (larger than the one we're actually using in this demonstration) and so it is preferable to instead define a function in R to compute the log-likelihood for each data point when needed rather than storing all of the log-likelihood values in memory. The log-likelihood in R can be coded as follows: ```{r llfun_logistic} # we'll add an argument log to toggle whether this is a log-likelihood or # likelihood function. this will be useful later in the vignette. llfun_logistic <- function(data_i, draws, log = TRUE) { x_i <- as.matrix(data_i[, which(grepl(colnames(data_i), pattern = "X")), drop=FALSE]) logit_pred <- draws %*% t(x_i) dbinom(x = data_i$y, size = 1, prob = 1/(1 + exp(-logit_pred)), log = log) } ``` The function `llfun_logistic()` needs to have arguments `data_i` and `draws`. Below we will test that the function is working by using the `loo_i()` function. ## Fitting the model with RStan Next we fit the model in Stan using the **rstan** package: ```{r, eval=FALSE} # Prepare data url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat" wells <- read.table(url) wells$dist100 <- with(wells, dist / 100) X <- model.matrix(~ dist100 + arsenic, wells) standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X)) # Compile stan_mod <- stan_model("logistic.stan") # Fit model fit_1 <- sampling(stan_mod, data = standata, seed = 4711) print(fit_1, pars = "beta") ``` ``` mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat beta[1] 0.00 0 0.08 -0.15 -0.05 0.00 0.06 0.16 1933 1 beta[2] -0.89 0 0.10 -1.09 -0.96 -0.89 -0.82 -0.69 2332 1 beta[3] 0.46 0 0.04 0.38 0.43 0.46 0.49 0.54 2051 1 ``` Before we move on to computing LOO we can now test that the log-likelihood function we wrote is working as it should. The `loo_i()` function is a helper function that can be used to test a log-likelihood function on a single observation. ```{r, eval=FALSE} # used for draws argument to loo_i parameter_draws_1 <- extract(fit_1)$beta # used for data argument to loo_i stan_df_1 <- as.data.frame(standata) # compute relative efficiency (this is slow and optional but is recommended to allow # for adjusting PSIS effective sample size based on MCMC effective sample size) r_eff <- relative_eff(llfun_logistic, log = FALSE, # relative_eff wants likelihood not log-likelihood values chain_id = rep(1:4, each = 1000), data = stan_df_1, draws = parameter_draws_1, cores = 2) loo_i(i = 1, llfun_logistic, r_eff = r_eff, data = stan_df_1, draws = parameter_draws_1) ``` ``` $pointwise elpd_loo mcse_elpd_loo p_loo looic influence_pareto_k 1 -0.3314552 0.0002887608 0.0003361772 0.6629103 -0.05679886 ... ``` # Approximate LOO-CV using PSIS-LOO and subsampling We can then use the `loo_subsample()` function to compute the efficient PSIS-LOO approximation to exact LOO-CV using subsampling: ```{r, eval=FALSE} set.seed(4711) loo_ss_1 <- loo_subsample( llfun_logistic, observations = 100, # take a subsample of size 100 cores = 2, # these next objects were computed above r_eff = r_eff, draws = parameter_draws_1, data = stan_df_1 ) print(loo_ss_1) ``` ``` Computed from 4000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.5 15.6 0.3 p_loo 3.1 0.1 0.4 looic 3936.9 31.2 0.6 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` The `loo_subsample()` function creates an object of class `psis_loo_ss`, that inherits from `psis_loo, loo` (the classes of regular `loo` objects). The printed output above shows the estimates $\widehat{\mbox{elpd}}_{\rm loo}$ (expected log predictive density), $\widehat{p}_{\rm loo}$ (effective number of parameters), and ${\rm looic} =-2\, \widehat{\mbox{elpd}}_{\rm loo}$ (the LOO information criterion). Unlike when using `loo()`, when using `loo_subsample()` there is an additional column giving the "subsampling SE", which reflects the additional uncertainty due to the subsampling used. The line at the bottom of the printed output provides information about the reliability of the LOO approximation (the interpretation of the $k$ parameter is explained in `help('pareto-k-diagnostic')` and in greater detail in Vehtari, Simpson, Gelman, Yao, and Gabry (2019)). In this case, the message tells us that all of the estimates for $k$ are fine _for this given subsample_. ## Adding additional subsamples If we are not satisfied with the subsample size (i.e., the accuracy) we can simply add more samples until we are satisfied using the `update()` method. ```{r, eval=FALSE} set.seed(4711) loo_ss_1b <- update( loo_ss_1, observations = 200, # subsample 200 instead of 100 r_eff = r_eff, draws = parameter_draws_1, data = stan_df_1 ) print(loo_ss_1b) ``` ``` Computed from 4000 by 200 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.3 15.6 0.2 p_loo 3.2 0.1 0.4 looic 3936.7 31.2 0.5 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` ## Specifying estimator and sampling method The performance relies on two components: the estimation method and the approximation used for the elpd. See the documentation for `loo_subsample()` more information on which estimators and approximations are implemented. The default implementation is using the point log predictive density evaluated at the mean of the posterior (`loo_approximation="plpd"`) and the difference estimator (`estimator="diff_srs"`). This combination has a focus on fast inference. But we can easily use other estimators as well as other elpd approximations, for example: ```{r, eval=FALSE} set.seed(4711) loo_ss_1c <- loo_subsample( x = llfun_logistic, r_eff = r_eff, draws = parameter_draws_1, data = stan_df_1, observations = 100, estimator = "hh_pps", # use Hansen-Hurwitz loo_approximation = "lpd", # use lpd instead of plpd loo_approximation_draws = 100, cores = 2 ) print(loo_ss_1c) ``` ``` Computed from 4000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.9 15.4 0.5 p_loo 3.5 0.2 0.5 looic 3937.9 30.7 1.1 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` See the documentation and references for `loo_subsample()` for details on the implemented approximations. # Approximate LOO-CV using PSIS-LOO with posterior approximations Using posterior approximations, such as variational inference and Laplace approximations, can further speed-up LOO-CV for large data. Here we demonstrate using a Laplace approximation in Stan. ```{r, eval=FALSE} fit_laplace <- optimizing(stan_mod, data = standata, draws = 2000, importance_resampling = TRUE) parameter_draws_laplace <- fit_laplace$theta_tilde # draws from approximate posterior log_p <- fit_laplace$log_p # log density of the posterior log_g <- fit_laplace$log_g # log density of the approximation ``` Using the posterior approximation we can then do LOO-CV by correcting for the posterior approximation when we compute the elpd. To do this we use the `loo_approximate_posterior()` function. ```{r, eval=FALSE} set.seed(4711) loo_ap_1 <- loo_approximate_posterior( x = llfun_logistic, draws = parameter_draws_laplace, data = stan_df_1, log_p = log_p, log_g = log_g, cores = 2 ) print(loo_ap_1) ``` The function creates a class, `psis_loo_ap` that inherits from `psis_loo, loo`. ``` Computed from 2000 by 3020 log-likelihood matrix Estimate SE elpd_loo -1968.4 15.6 p_loo 3.2 0.2 looic 3936.8 31.2 ------ Posterior approximation correction used. Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume independent draws (r_eff=1). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` ## Combining the posterior approximation method with subsampling The posterior approximation correction can also be used together with subsampling: ```{r, eval=FALSE} set.seed(4711) loo_ap_ss_1 <- loo_subsample( x = llfun_logistic, draws = parameter_draws_laplace, data = stan_df_1, log_p = log_p, log_g = log_g, observations = 100, cores = 2 ) print(loo_ap_ss_1) ``` ``` Computed from 2000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.2 15.6 0.4 p_loo 2.9 0.1 0.5 looic 3936.4 31.1 0.8 ------ Posterior approximation correction used. Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume independent draws (r_eff=1). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` The object created is of class `psis_loo_ss`, which inherits from the `psis_loo_ap` class previously described. ## Comparing models To compare this model to an alternative model for the same data we can use the `loo_compare()` function just as we would if using `loo()` instead of `loo_subsample()` or `loo_approximate_posterior()`. First we'll fit a second model to the well-switching data, using `log(arsenic)` instead of `arsenic` as a predictor: ```{r, eval=FALSE} standata$X[, "arsenic"] <- log(standata$X[, "arsenic"]) fit_2 <- sampling(stan_mod, data = standata) parameter_draws_2 <- extract(fit_2)$beta stan_df_2 <- as.data.frame(standata) # recompute subsampling loo for first model for demonstration purposes # compute relative efficiency (this is slow and optional but is recommended to allow # for adjusting PSIS effective sample size based on MCMC effective sample size) r_eff_1 <- relative_eff( llfun_logistic, log = FALSE, # relative_eff wants likelihood not log-likelihood values chain_id = rep(1:4, each = 1000), data = stan_df_1, draws = parameter_draws_1, cores = 2 ) set.seed(4711) loo_ss_1 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_1, draws = parameter_draws_1, data = stan_df_1, observations = 200, cores = 2 ) # compute subsampling loo for a second model (with log-arsenic) r_eff_2 <- relative_eff( llfun_logistic, log = FALSE, # relative_eff wants likelihood not log-likelihood values chain_id = rep(1:4, each = 1000), data = stan_df_2, draws = parameter_draws_2, cores = 2 ) loo_ss_2 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, observations = 200, cores = 2 ) print(loo_ss_2) ``` ``` Computed from 4000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1952.0 16.2 0.2 p_loo 2.6 0.1 0.3 looic 3903.9 32.4 0.4 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [1.0, 1.1]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` We can now compare the models on LOO using the `loo_compare` function: ```{r, eval=FALSE} # Compare comp <- loo_compare(loo_ss_1, loo_ss_2) print(comp) ``` ``` Warning: Different subsamples in 'model2' and 'model1'. Naive diff SE is used. elpd_diff se_diff subsampling_se_diff model2 0.0 0.0 0.0 model1 16.5 22.5 0.4 ``` This new object `comp` contains the estimated difference of expected leave-one-out prediction errors between the two models, along with the standard error. As the warning indicates, because different subsamples were used the comparison will not take the correlations between different observations into account. Here we see that the naive SE is 22.5 and we cannot see any difference in performance between the models. To force subsampling to use the same observations for each of the models we can simply extract the observations used in `loo_ss_1` and use them in `loo_ss_2` by supplying the `loo_ss_1` object to the `observations` argument. ```{r, eval=FALSE} loo_ss_2 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, observations = loo_ss_1, cores = 2 ) ``` We could also supply the subsampling indices using the `obs_idx()` helper function: ```{r, eval=FALSE} idx <- obs_idx(loo_ss_1) loo_ss_2 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, observations = idx, cores = 2 ) ``` ``` Simple random sampling with replacement assumed. ``` This results in a message indicating that we assume these observations to have been sampled with simple random sampling, which is true because we had used the default `"diff_srs"` estimator for `loo_ss_1`. We can now compare the models and estimate the difference based on the same subsampled observations. ```{r, eval=FALSE} comp <- loo_compare(loo_ss_1, loo_ss_2) print(comp) ``` ``` elpd_diff se_diff subsampling_se_diff model2 0.0 0.0 0.0 model1 16.1 4.4 0.1 ``` First, notice that now the `se_diff` is now around 4 (as opposed to 20 when using different subsamples). The first column shows the difference in ELPD relative to the model with the largest ELPD. In this case, the difference in `elpd` and its scale relative to the approximate standard error of the difference) indicates a preference for the second model (`model2`). Since the subsampling uncertainty is so small in this case it can effectively be ignored. If we need larger subsamples we can simply add samples using the `update()` method demonstrated earlier. It is also possible to compare a subsampled loo computation with a full loo object. ```{r, eval=FALSE} # use loo() instead of loo_subsample() to compute full PSIS-LOO for model 2 loo_full_2 <- loo( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, cores = 2 ) loo_compare(loo_ss_1, loo_full_2) ``` ``` Estimated elpd_diff using observations included in loo calculations for all models. ``` Because we are comparing a non-subsampled loo calculation to a subsampled calculation we get the message that only the observations that are included in the loo calculations for both `model1` and `model2` are included in the computations for the comparison. ``` elpd_diff se_diff subsampling_se_diff model2 0.0 0.0 0.0 model1 16.3 4.4 0.3 ``` Here we actually see an increase in `subsampling_se_diff`, but this is due to a technical detail not elaborated here. In general, the difference should be better or negligible. # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2017). _The Stan C++ Library, Version 2.17.0._ https://mc-stan.org/ Stan Development Team (2018) _RStan: the R interface to Stan, Version 2.17.3._ https://mc-stan.org/ Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS), in PMLR 108. [arXiv preprint arXiv:2001.00980](https://arxiv.org/abs/2001.00980). Magnusson, M., Andersen, M., Jonasson, J. & Vehtari, A. (2019). Bayesian leave-one-out cross-validation for large data. Proceedings of the 36th International Conference on Machine Learning, in PMLR 97:4244-4253 [online](http://proceedings.mlr.press/v97/magnusson19a.html), [arXiv preprint arXiv:1904.10679](https://arxiv.org/abs/1904.10679). Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/vignettes/loo2-elpd.Rmd0000644000176200001440000001760115122301125015311 0ustar liggesusers--- title: "Holdout validation and K-fold cross-validation of Stan programs with the loo package" author: "Bruno Nicenboim" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to do holdout validation and K-fold cross-validation with __loo__ for a Stan program. # Example: Eradication of Roaches using holdout validation approach This vignette uses the same example as in the vignettes [_Using the loo package (version >= 2.0.0)_](http://mc-stan.org/loo/articles/loo2-example.html) and [_Avoiding model refits in leave-one-out cross-validation with moment matching_](https://mc-stan.org/loo/articles/loo2-moment-matching.html). ## Coding the Stan model Here is the Stan code for fitting a Poisson regression model: ```{r stancode} # Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: int y[N]; stancode <- " data { int K; int N; matrix[N,K] x; array[N] int y; vector[N] offset_; // offset is reserved keyword in Stan so use offset_ real beta_prior_scale; real alpha_prior_scale; } parameters { vector[K] beta; real intercept; } model { y ~ poisson(exp(x * beta + intercept + offset_)); beta ~ normal(0,beta_prior_scale); intercept ~ normal(0,alpha_prior_scale); } generated quantities { vector[N] log_lik; for (n in 1:N) log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n])); } " ``` Following the usual approach recommended in [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html), we compute the log-likelihood for each observation in the `generated quantities` block of the Stan program. ## Setup In addition to __loo__, we load the __rstan__ package for fitting the model. We will also need the __rstanarm__ package for the data. ```{r setup, message=FALSE} library("rstan") library("loo") seed <- 9547 set.seed(seed) ``` # Holdout validation For this approach, the model is first fit to the "train" data and then is evaluated on the held-out "test" data. ## Splitting the data between train and test The data is divided between train (80% of the data) and test (20%): ```{r modelfit-holdout, message=FALSE} # Prepare data data(roaches, package = "rstanarm") roaches$roach1 <- sqrt(roaches$roach1) roaches$offset <- log(roaches[,"exposure2"]) # 20% of the data goes to the test set: roaches$test <- 0 roaches$test[sample(.2 * seq_len(nrow(roaches)))] <- 1 # data to "train" the model data_train <- list(y = roaches$y[roaches$test == 0], x = as.matrix(roaches[roaches$test == 0, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$test == 0,]), K = 3, offset_ = roaches$offset[roaches$test == 0], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) # data to "test" the model data_test <- list(y = roaches$y[roaches$test == 1], x = as.matrix(roaches[roaches$test == 1, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$test == 1,]), K = 3, offset_ = roaches$offset[roaches$test == 1], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) ``` ## Fitting the model with RStan Next we fit the model to the "test" data in Stan using the __rstan__ package: ```{r fit-train} # Compile stanmodel <- stan_model(model_code = stancode) # Fit model fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0) ``` We recompute the generated quantities using the posterior draws conditional on the training data, but we now pass in the held-out data to get the log predictive densities for the test data. Because we are using independent data, the log predictive density coincides with the log likelihood of the test data. ```{r gen-test} gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test) log_pd <- extract_log_lik(gen_test) ``` ## Computing holdout elpd: Now we evaluate the predictive performance of the model on the test data using `elpd()`. ```{r elpd-holdout} (elpd_holdout <- elpd(log_pd)) ``` When one wants to compare different models, the function `loo_compare()` can be used to assess the difference in performance. # K-fold cross validation For this approach the data is divided into folds, and each time one fold is tested while the rest of the data is used to fit the model (see Vehtari et al., 2017). ## Splitting the data in folds We use the data that is already pre-processed and we divide it in 10 random folds using `kfold_split_random` ```{r prepare-folds, message=FALSE} # Prepare data roaches$fold <- kfold_split_random(K = 10, N = nrow(roaches)) ``` ## Fitting and extracting the log pointwise predictive densities for each fold We now loop over the 10 folds. In each fold we do the following. First, we fit the model to all the observations except the ones belonging to the left-out fold. Second, we compute the log pointwise predictive densities for the left-out fold. Last, we store the predictive density for the observations of the left-out fold in a matrix. The output of this loop is a matrix of the log pointwise predictive densities of all the observations. ```{r} # Prepare a matrix with the number of post-warmup iterations by number of observations: log_pd_kfold <- matrix(nrow = 4000, ncol = nrow(roaches)) # Loop over the folds for(k in 1:10){ data_train <- list(y = roaches$y[roaches$fold != k], x = as.matrix(roaches[roaches$fold != k, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$fold != k,]), K = 3, offset_ = roaches$offset[roaches$fold != k], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) data_test <- list(y = roaches$y[roaches$fold == k], x = as.matrix(roaches[roaches$fold == k, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$fold == k,]), K = 3, offset_ = roaches$offset[roaches$fold == k], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0) gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test) log_pd_kfold[, roaches$fold == k] <- extract_log_lik(gen_test) } ``` ## Computing K-fold elpd: Now we evaluate the predictive performance of the model on the 10 folds using `elpd()`. ```{r elpd-kfold} (elpd_kfold <- elpd(log_pd_kfold)) ``` If one wants to compare several models (with `loo_compare`), one should use the same folds for all the different models. # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2020) _RStan: the R interface to Stan, Version 2.21.1_ https://mc-stan.org Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). loo/vignettes/children/0000755000176200001440000000000013761524476014662 5ustar liggesusersloo/vignettes/children/SETTINGS-knitr.txt0000644000176200001440000000041613703433563020001 0ustar liggesusers```{r SETTINGS-knitr, include=FALSE} stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ``` loo/vignettes/children/SEE-ONLINE.txt0000644000176200001440000000016713761524476017025 0ustar liggesusers**NOTE: We recommend viewing the fully rendered version of this vignette online at https://mc-stan.org/loo/articles/** loo/vignettes/loo2-example.Rmd0000644000176200001440000003000615122262764016031 0ustar liggesusers--- title: "Using the loo package (version >= 2.0.0)" author: "Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to use the __loo__ package to carry out Pareto smoothed importance-sampling leave-one-out cross-validation (PSIS-LOO) for purposes of model checking and model comparison. In this vignette we can't provide all necessary background information on PSIS-LOO and its diagnostics (Pareto $k$ and effective sample size), so we encourage readers to refer to the following papers for more details: * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [preprint arXiv](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) # Setup In addition to the __loo__ package, we'll also be using __rstanarm__ and __bayesplot__: ```{r setup, message=FALSE} library("rstanarm") library("bayesplot") library("loo") ``` # Example: Poisson vs negative binomial for the roaches dataset ## Background and model fitting The Poisson and negative binomial regression models used below in our example, as well as the `stan_glm` function used to fit the models, are covered in more depth in the __rstanarm__ vignette [_Estimating Generalized Linear Models for Count Data with rstanarm_](http://mc-stan.org/rstanarm/articles/count.html). In the rest of this vignette we will assume the reader is already familiar with these kinds of models. ### Roaches data The example data we'll use comes from Chapter 8.3 of Gelman and Hill (2007). We want to make inferences about the efficacy of a certain pest management system at reducing the number of roaches in urban apartments. Here is how Gelman and Hill describe the experiment and data (pg. 161): > the treatment and control were applied to 160 and 104 apartments, respectively, and the outcome measurement $y_i$ in each apartment $i$ was the number of roaches caught in a set of traps. Different apartments had traps for different numbers of days In addition to an intercept, the regression predictors for the model are `roach1`, the pre-treatment number of roaches (rescaled above to be in units of hundreds), the treatment indicator `treatment`, and a variable indicating whether the apartment is in a building restricted to elderly residents `senior`. Because the number of days for which the roach traps were used is not the same for all apartments in the sample, we use the `offset` argument to specify that `log(exposure2)` should be added to the linear predictor. ```{r data} # the 'roaches' data frame is included with the rstanarm package data(roaches) str(roaches) # rescale to units of hundreds of roaches roaches$roach1 <- roaches$roach1 / 100 ``` ### Fit Poisson model We'll fit a simple Poisson regression model using the `stan_glm` function from the __rstanarm__ package. ```{r count-roaches-mcmc, results="hide"} fit1 <- stan_glm( formula = y ~ roach1 + treatment + senior, offset = log(exposure2), data = roaches, family = poisson(link = "log"), prior = normal(0, 2.5, autoscale = TRUE), prior_intercept = normal(0, 5, autoscale = TRUE), seed = 12345 ) ``` Usually we would also run posterior predictive checks as shown in the __rstanarm__ vignette [Estimating Generalized Linear Models for Count Data with rstanarm](http://mc-stan.org/rstanarm/articles/count.html), but here we focus only on methods provided by the __loo__ package.
## Using the __loo__ package for model checking and comparison _Although cross-validation is mostly used for model comparison, it is also useful for model checking._ ### Computing PSIS-LOO and checking diagnostics We start by computing PSIS-LOO with the `loo` function. Since we fit our model using __rstanarm__ we can use the `loo` method for `stanreg` objects (fitted model objects from __rstanarm__), which doesn't require us to first extract the pointwise log-likelihood values. If we had written our own Stan program instead of using __rstanarm__ we would pass an array or matrix of log-likelihood values to the `loo` function (see, e.g. `help("loo.array", package = "loo")`). We'll also use the argument `save_psis = TRUE` to save some intermediate results to be re-used later. ```{r loo1} loo1 <- loo(fit1, save_psis = TRUE) ``` `loo` gives us warnings about the Pareto diagnostics, which indicate that for some observations the leave-one-out posteriors are different enough from the full posterior that importance-sampling is not able to correct the difference. We can see more details by printing the `loo` object. ```{r print-loo1} print(loo1) ``` The table shows us a summary of Pareto $k$ diagnostic, which is used to assess the reliability of the estimates. In addition to the proportion of leave-one-out folds with $k$ values in different intervals, the minimum of the effective sample sizes in that category is shown to give idea why higher $k$ values are bad. Since we have some $k>1$, we are not able to compute an estimate for the Monte Carlo standard error (SE) of the expected log predictive density (`elpd_loo`) and `NA` is displayed. (Full details on the interpretation of the Pareto $k$ diagnostics are available in the Vehtari, Gelman, and Gabry (2017) and Vehtari, Simpson, Gelman, Yao, and Gabry (2024) papers referenced at the top of this vignette.) In this case the `elpd_loo` estimate should not be considered reliable. If we had a well-specified model we would expect the estimated effective number of parameters (`p_loo`) to be smaller than or similar to the total number of parameters in the model. Here `p_loo` is almost 300, which is about 70 times the total number of parameters in the model, indicating severe model misspecification. ### Plotting Pareto $k$ diagnostics Using the `plot` method on our `loo1` object produces a plot of the $k$ values (in the same order as the observations in the dataset used to fit the model) with horizontal lines corresponding to the same categories as in the printed output above. ```{r plot-loo1, out.width = "70%"} plot(loo1) ``` This plot is useful to quickly see the distribution of $k$ values, but it's often also possible to see structure with respect to data ordering. In our case this is mild, but there seems to be a block of data that is somewhat easier to predict (indices around 90--150). Unfortunately even for these data points we see some high $k$ values. ### Marginal posterior predictive checks The `loo` package can be used in combination with the `bayesplot` package for leave-one-out cross-validation marginal posterior predictive checks [Gabry et al (2018)](https://arxiv.org/abs/1709.01449). LOO-PIT values are cumulative probabilities for $y_i$ computed using the LOO marginal predictive distributions $p(y_i|y_{-i})$. For a good model, the distribution of LOO-PIT values should be uniform. In the following QQ-plot the LOO-PIT values for our model (y-axi) is compared to standard uniform distribution (x-axis). ```{r ppc_loo_pit_overlay} yrep <- posterior_predict(fit1) ppc_loo_pit_qq( y = roaches$y, yrep = yrep, lw = weights(loo1$psis_object) ) ``` The excessive number of LOO-PIT values close to 0 indicates that the model is under-dispersed compared to the data, and we should consider a model that allows for greater dispersion. ## Try alternative model with more flexibility Here we will try [negative binomial](https://en.wikipedia.org/wiki/Negative_binomial_distribution) regression, which is commonly used for overdispersed count data. Unlike the Poisson distribution, the negative binomial distribution allows the conditional mean and variance of $y$ to differ. ```{r count-roaches-negbin, results="hide"} fit2 <- update(fit1, family = neg_binomial_2) ``` ```{r loo2} loo2 <- loo(fit2, save_psis = TRUE, cores = 2) print(loo2) ``` ```{r plot-loo2} plot(loo2, label_points = TRUE) ``` Using the `label_points` argument will label any $k$ values larger than the diagnostic threshold with the index of the corresponding data point. These high values are often the result of model misspecification and frequently correspond to data points that would be considered ``outliers'' in the data and surprising according to the model [Gabry et al (2019)](https://arxiv.org/abs/1709.01449). Unfortunately, while large $k$ values are a useful indicator of model misspecification, small $k$ values are not a guarantee that a model is well-specified. If there are a small number of problematic $k$ values then we can use a feature in __rstanarm__ that lets us refit the model once for each of these problematic observations. Each time the model is refit, one of the observations with a high $k$ value is omitted and the LOO calculations are performed exactly for that observation. The results are then recombined with the approximate LOO calculations already carried out for the observations without problematic $k$ values: ```{r reloo} if (any(pareto_k_values(loo2) > 0.7)) { loo2 <- loo(fit2, save_psis = TRUE, k_threshold = 0.7) } print(loo2) ``` In the print output we can see that the Monte Carlo SE is small compared to the other uncertainties. On the other hand, `p_loo` is about 7 and still a bit higher than the total number of parameters in the model. This indicates that there is almost certainly still some degree of model misspecification, but this is much better than the `p_loo` estimate for the Poisson model. For further model checking we again examine the LOO-PIT values. ```{r ppc_loo_pit_overlay-negbin} yrep <- posterior_predict(fit2) ppc_loo_pit_qq(roaches$y, yrep, lw = weights(loo2$psis_object)) ``` The plot for the negative binomial model looks better than the Poisson plot, but we still see that this model is not capturing all of the essential features in the data. ## Comparing the models on expected log predictive density We can use the `loo_compare` function to compare our two models on expected log predictive density (ELPD) for new data: ```{r loo_compare} loo_compare(loo1, loo2) ``` The difference in ELPD is much larger than several times the estimated standard error of the difference again indicating that the negative-binomial model is xpected to have better predictive performance than the Poisson model. However, according to the LOO-PIT checks there is still some misspecification, and a reasonable guess is that a hurdle or zero-inflated model would be an improvement (we leave that for another case study).
# References Gabry, J., Simpson, D., Vehtari, A., Betancourt, M. and Gelman, A. (2019), Visualization in Bayesian workflow. _J. R. Stat. Soc. A_, 182: 389-402. \doi:10.1111/rssa.12378. ([journal version](https://rss.onlinelibrary.wiley.com/doi/full/10.1111/rssa.12378), [arXiv preprint](https://arxiv.org/abs/1709.01449), [code on GitHub](https://github.com/jgabry/bayes-vis-paper))
Gelman, A. and Hill, J. (2007). _Data Analysis Using Regression and Multilevel/Hierarchical Models._ Cambridge University Press, Cambridge, UK. Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/data/0000755000176200001440000000000014523242160011753 5ustar liggesusersloo/data/voice.rda0000644000176200001440000111425114411555606013565 0ustar liggesusersBZh91AY&SY+8#w󏳋m+}^<^Rowޫۯsv=g{ޣym緞Wz>ojn{sּU=y;oOun^6F*^[jfhms1r]{{]^^lvl+{+SoYْnZwL07]vێugٺwj냮m՚wny{cZl;7w$=w^vz;'{==d]ws]7]]U^UR빝M7}s磱5ݾ뭷wo}W.}v]۾V93ﻳyGλ=zw]v۹iN/ZWUj=n{ۑvΎ.z۳ۺo9 "jzk[iw.Ƕ6Х7cGZlE{{Nyso-6[Q5=gwsץݽ:uc{ȗFݽշmn8ifONum;mq{W\ӷm{{]=gz]ܽM+{&J˶Sζ޻g=׵[&kowsM^kkkF^7Wwni[rzn\w_ywfwڽUkw]3Jys{txxeo/tzαw׻{*7R׳z秷Lw4wvusgszLwYW#w^}t}ofލ;&˶:UveT^nyRnwwGOzh:soYU=M뗛λ\6MLGOzy{*c-a(ݷ;u$is]i֬VzݻUU]̻9nhk^4=n==L]즫{qt{wr;=^Ն^T{k]ɛ;osnkWd׻=mu۱j{ou۞{ݷkY{zknYJwp$neǝ#w-ɾKnwGo9ݭs-6M{^bh[n{w= {=Ӝ}'޲o}{}Ͻ{}/zٽ;WzmZC}+}}xx|îz㮒yV7ݕ޻׷_yuG[]bs٥sW]]e{n݋j7w-Y{ez)sݽW[[l/nC^t{h(y[wrr뭷SRb}ڒGZ6*jM}w h @}@P@@h͏9>==@}0ӶF$>@}:}4>69@@ݸt{t=A=Nvu7 @h;`z@}}t}@(:2op@;`@UT]v> $^@{l :T 7]C}zp vb}N{bw}L tL`& #F2@LLLL0$ I&SjSF@ !h ` cLF4&&?ɈPz#`iL` 04ha0#AF&LFb4b4(DA i& i 4 hBH Pw d@ yy|yl- t>eɍOzf c5&,4d`M7%wT;f*Kc~]ZߏK#KID5]T$R"R/Ά[R~L)ΥȖ= R~r^/zSj[f%l{Ԁ!0 AB)Ba ,!oEA\/Ђ%Oϩզv,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 3@ &(<[/Es1[eYABl%^H{@!_s>}钸 :V9^ttNzplj>;SoO 4v:k;bv%sp)ǸlI;DwpmtĠ-e'ZW|4FP*oΘ1PTvVЮQ?YpHMʀ*G&پwH(ogeSIAx2G,fDtc59+e屓q(맳 =dv,P|&scN?>hBnGP$0$ PܛE'B*r"ĨdP/ҿ|`RѬ8 BDp`30%0'4g!86x34tglQ'?(K~¼{NN~h-*lOx _N%Wo3bbvn(ѐ_Use,l{I j!@k켎VΣ'f\5KfWs.s+W؎ e=h/fdBۧm)Эvg_j?`z*emwfKr 8DӶj-CF)0HGo^xAcjUڱav{g`a@Wa]ҔjO˒@h1sMCPq(=g =3 U`S1[o0BQR`bł=>^%>Et+x(^hۈ.Ř-kuN԰}؟Pѳ<n~ї@C#R $v"€j+0 9z2=P=m@fݩSvo1'&3>XN8ѴYLE`hk/bx 0!r"Lmϲ,,q`RF8\u2H_˺\d҅^Z}u#E{GՂk%ZXUC#q !5B:;OH,0@ "O3 ? =P.gg#)~*2BnLFr2U;0WLMoj+n4bgjuVgHh^.CJkjɟ/*V8\Sӟ\@3j$ !^ܗX憸aT{PSVsfQΑ3sTf1h| 6ڡߜXT7<{6T>B˛-OVJ&H@=y7_XwLW!+@3jOמSH gkRHy;!gЃ:P(Boqp8VGeͪLPDEJP5=!#`uiĨXp D|ӞMZu@  Zo+y'x?G61D;1 6GOϧI5wxv0>ّz`B;Oَ$uH9V xyj,>t+A`1;wdkg^_paSp%D@fL/(n JFvw:OlH(R GA]aiq=Rŏw2 $HSpPX&B]n 6 }[_4||ǃ@@@Hv𬻨7{J2 QxF঩LW'; 0UlLJvY;EiA>i#yd)T*#P"L&4(_ga`V- OSHڄ^٣!sA:8Xm%%~ eZNĖn"7 eiUJ_v5X/P!N26B|@b d$éY}nOXp9mU8 7MQ `ntT0@M掲Qq"gt p] =f1CFwrh+v<@5mQΈ0;s"x1$]8[,aBNc&4]rʇŨ^&G↝0sc,k bRI ƕF[F[`ұ}BΣp M/ݜgQ4< AXC7i8wQ"M}f994"j aIYc/۰<xipy03''W$aie_%V#GU =][-N /e5u͛rt:NK@.{sY3F߮⬇ųg ȕ`h,8฾9'q\wZ*i}%i+G̃BB,̲rA0pܺa~mgp|6B b ~F_]|9;[<8+ i?i"2 ~M=)E@Ty.6d>E%g&SUuEY_xoa4 @PtD;8yӑ92?=US\R)}HD!j2 3MO$gɄ ZkɨY_FFu& +u`G@M$k .k=g:!0`X1mPz tʄJ`H 3Wb>8G~@~nzwF)xFuH E2>T~)PKV`-7E&IٙPWo翛d4`@ >Z?:Ei@">}qtRfҠ>]k?{þp^XUiڦL*\P9ΦrɊcʊi+[Hܐ@>z13G5yJJw׭ YU*EyBD[!f9shD6(4F1=86I/8xP~!F -u +ն}$"Gc]RL5ǝ 4+/BiNcldu4.]u ^!q4{:3b¡ XE_+fDA5ק3- Xc~a/\0"$ĻsV#&mMIʷv _72B;uDPzh_; g"NYfDѿCA$$c`ЪN^WFqU+VT8] ݨ.#aq UrğE(\P"2w6q3@ɥt?&w̤BhKi-Ǜ1ATn" < üwԀPFjXT FƜhMQ!t1nb+>u}wt>%}韓 ]^SzKm]C!3cU%;r { 2G ҊrMkhvx7T\jLQf![G пUۡHIVT6-9?% 4#~+1P2IFYx U:2%LgFX]rqt*`q3ѻk]>HӌӺ s&VJȹ-竁-3qH1 Di1mܗ!1>n[V j }GDiUXDr@]*DI,;b*+@H8cP-9w bcWLgrTD5;rL{gʈlM'Bt{iK}C,##|xGX.;4 6%ny>l]m*Z5«rӎ3ΞKcp! Z%?@LpP B=?W$3_ޔ#~ i\Z "7x)נ 7)wpƖȡ %[捠Ac%F(ͺoMIn|69D~ JM#Q2 {/$lɜ@p=JpϾ`cc"U49|J:ak} I֞߰iZC(I[|9̀3"WL~uT⮀g~xo}FA H<7t/|'#i5dV%G&qܣAX %ٰ,=aHH?lT7憆R2Æ-B+{鬦m`m0klmq waُ%Wg<ېG6prՊ*d.ʜ0dK~W0sכZktUIӪK&dQ!ŷm3ȥby9\][ސA nM}W@=_)bՀ$D᪍=a '7 ,Ul&bA#^S 4.:b&=9L C n"rt4C>:AY2MqWq&Mxܶ 4YCڃr=pN﹀JWGX %xZs8s wX9KK ~66EV[V\v<cw|GbG:ss޼V[Gɑy1 :ϤLwJRTNAVP< |=ETTAfNt]9oXJ|_lgډTLh^(ڂ|U6 [eI`d*P' (5Zl#rYWc>6dHJ@aY/$4| ^1eus62 8"҂ =-"^#U.ь(\o1!/V>: j6z̯F)ѐnL$kkcFSys%b=Za;V{ᛦ/!s[̞s9*l"$*x$ *hY0ycIQ2ހ.z9 @o_b?:bwH8@2=)[bWѷ)LUh*tINzQY9+H CW% | Ϩ?%xO&i漏! ~D`WHʓd,8#Ckk7{˰܎KǜP4 /KM6w"@XF=2t"u107d#ʿo#ThT๴[(-xb+j7ŮGs*ߙ&9'K_Wlu~g2z+Jz{SV2~0UoĤ$>__h}k׹ϭJ`m02dkh sxżINNo(l]qUDJK%P[|M#o<7WM!Yǂm <ώ*ely>P!ֵ@!YIO4@{4JWh_Ys`@f:,NϦUhh)`Ϗ/w|jC{IsmW'[3k"9o ȼO˘ tͪ(6M?qD*H/CP/ vt`Bn?bRJ2ߟ6Z[p01y߂ %f}ϝQc aM OB;1{ebȒo`#z uVz-RO|rѨu0&-j XR H"cuo^Lrߣ"] ~p]Uę\  .1[Gq5Hr:hx ܠ|R9_D3#jqrͺY}oxMĺV[C: yu8No>.&H^rnm`9Ƿ 0dZM^5hX`XgBAB&K^b("A=tTw#+Ե%OBn-mkM*Égo@>ҏ<9DSLC1]R ~$ET-Moڸaqiw!PHH 27D>묾HNރ{/(д$G5zj vڵ>WEbh?+[@V>~*@VL/שּׁTn=|#k5NOl zwb}j86E"wl@GX!h ]\cS{,PEz2{R/Кr@mna+ FlSqR[-6p,IyXBvQ[9\5(͸] ?GJeK|ݻGmQ;MWBjQU8\P8 Ĭ7q10cMXtcFŔa T|Wwvmh5T!=JHio چւS|٤mfFc_&-ϕS\ALHs)'vn'e j8h/7ڽ~U>ɶq*F$|Pɛimǻt͙Y`l͸ ݝ}-)|l7vOc %w]!'cXž뭟X ~q[QdnFp8h3z"p4tmVN >nǁmיf;=:FRl<,*o.Jq8>lrN|5 QZ' 8D4o0JS&&Bw*m Cn'EՀP}U"G #0@/o2aZ /O9I=#$WUX*a  )4n5'~<66'_ ͑ ;f]K%նCN>KRzMӆҹ-4s[og^zRDꂢ 6etEg@_G (`,gؓ6 [8|@X;-rff, 31(Q/~4s8Hz\u={R.g 7m-Ga: P?_zpҧhk+ SXNmPjp7lb6Q Cx þgu,=&PCM43 !A6qHh5Cb*^xԦ3;# Yn829[ѥg4f.0Pj4%_ZC s{vt?ۻtźY/NJp4,LwE\@%F߅ψ*F-K<$fxj٭Ƨ)O;Tm[6l׏;> _vے9+~&֒'%<3\R(3hl6; hF'YV-BLGl( {? /M94 p3Abfj\q@ݲ}r^.G5"}r ">B*eߐ5;pMXK;#p'%F+AW*UMλއ_{ 9"Y i*5l<6)YTn{3/\f#;33sJ{5_㒼X lOZ,Eϸ^TTX ][ygO[oLnJ ;_48C_&QWd>%6Y&X?h<} t/U$>W9pIqȊR bj'W4U&eLCkoj.izo~ 3D'@\(Ah[?4'_\V~Ɋu[O=|nid]0 Ba4)O*>4`T<3`X^^O S!}KEd]?<Z(:%A^u(DְcSj5~YZXK%52)z:W;{ßd$+Wa^pDne.9_#3q" '":t@E,x?k/\3RO(oJ+~J)a)0T{{ \r5N? :"^1eBc%/ w[ OmUjaj\ˣ0}Yٺ?.45d׍;"NCKзVNQ^eo.[x3M<̞ÇM|ac%Pk,PO#X+üwTI$=ӮΊaˬ")8zG2Fjid/z홈Sv%gߟ39f'CQ%xRx`;1dAZA>\|JDan9/$r{&ގ1=Cu93c/8_wg9kNJ >0'_f57.ThsX`XV`7: %Wv7,|&.*PlkSupm0m;. 'vs 8fQ0Ux+{lIlք!ӚRJâݬ'%/uEhP.orANIXs'\ ḭZsAHchHސdP_1zS̸v&z!0t?ʜLuJT) Xs ͪTv~]{x#zAEŒE2:0X-L2>9 ВrK1s73A{f#<7ʈb# vz7mx|0JuY<:24!sC HvZA]W^WњHSLIL୳X>i/$z&.;epa;ұAXR6<caZ;DY4㭧CS&VFkv!V9sOS:\\0]sstN>[f%Th<3..=}#yf aLOoGӇZJ}gi7I\ߤsKzHSeAirD[mT]3dhNGeTE<۵ Y:/'em8?,u }e|@E's9SS>'~p4vnϓ/)uo} ̍/܀@!! >[*ȷs9Y?ޥqPVd:g;yRzFF%+3LlN &843ʋ h^1o3d;i*f׎|KWD F*D2'\8ܯ4]KM(ل#ff MM3n@8"K`O֮j/A[)? 6ԸoWX-bqom<;Z<A8۪ڮE">l2̓d`S|X-&%-h_HNmQ@o 5N%{,{0;M|z&C4B^b8&v| `_4>2UTұ^ ac[ 60;]:$._Lj( Nyu45/O">E%AXPo|ĺve H74R]|Fte~i&0brܟx択}#&8"F&T!ܚg/S~1boQDR[q% h?t|P`uxLIl ,w#6ڛeza7G' M%Y+ Z]ޠmlXQ጗ב-e7mO$?-exV!*(G*F6/Q֊=bY|>R BC29ȶ@v+_6-F6˼S+(oT(lE b:q^.OcmxB"˛B{rDJ rOHm D5N}wyˌ8DneWd\ q3$BGW uJ:9a?莴a+#˔ޅ-ŔYkPGח]WS`ηʎt3[_"Eߚ|[@HLomaٳt9>UA3j۲[} } {,quVd;ΔQqwo<}s%O_kpY8pJ!~j6cʺH(زD$Y_yg||Ԍ(DV!w8. K_S!<}΋j?زC F|a_:Ã[|";\LOfYE &=\~)_jw^}dXk)ZR;";Ytlpኼ:V*k> mW%>"ne;AĴӇ*euzC<󻬵4%_3W SuV\Aex4L#DQ*lNbu6됣 'Ja|1Ayg铴һ2${TZ:]]bKИZɚگ{/YGb-?}a{ 5&K&mJ"! m"%W<̭\r6H%m_ZHx e{?p+izDgݮo|@8ѭ$8bR#,[險*A PZU7M׹.dyL!!L:Xs$$lvuiMK?nFy\x\0繹xlPڰ6?}4J9 ^x UӫO \p81PWQWѳGRqc{ 2V1~z+ۉ=gAOm4]o'+o4,w{e]xi.%T| er!! [}eTPZ9ŀCVp^iq~El$bjUu55Q(?U(ಸ KAR/4mt*MTIOu p1h9\9N#`#R/=Džt İVel)eԃ9T3f}G%F7˼0{5 ,*1X~Ձ`DEy=YMKr5CmYO5 ݀D̼9`ϱF1/dKw\ju`a(` -K-}ќ6XCtk_LBލs>Ǭ㕤 ɁߢG?z%ØlT`y'q3!}q&!(v@D0î;Rb)]y;I`#C|Li<~&LfN՘{0ђSg>{ь]K}-u !C|jGPxlLtB4!˿3{1L ,6ԵcC'E~L5Nx!I7XA \?.+0Mx2ADT^C@FQ4_t[uxPz].qy܌^A9`uB+uRͬe/~n1xl 7IaFqEJfB޾p لDv:S}LʉIu@Ec4Ѽ؏tY] GRvDgf-ds^<^3jy'DyY B0Ӕ\B7-+[V߰Q]A7hV[w1hpZ}mMlH94oԀMM=bv&(qbt>Ls6S{G>I~8vU5E]s:{}XDF2|1҃gm!<|tÄ^8%e߳Q|K|#q:ΪUExqbv'e0ue^$∜Qx9?^}0AД:^Ԕ@XSQ/7}c,^՛9> aIo{NDt$pE2U/erd,JKfp;;pǖ _Xg:?}MHSnv8,Hj@[?s{NRmxeWZ-XPR~/b/; !fF q;^F%˳dN J$g-"+ -.&R\`SNknp/k(3lNuDew]DcYvcO=@є4a Oui(fW2F0Ɍ(VjN)NC$y^oJ nqQR;`\c\' th晿&5E]d`Ynӑ6x,kF$JP[9>{vsM/wwiQeиd=< Й~>soDHΏXj`A{7F \k",tbGEa%ʠ`qQIAJ@ҋR(U;9l:d$jJ\"&3Ag "UEۂwa-xWq7BXԓ+@7ًio@dSKY2x1_RȝR2+7XyN|tg2)8[oINDZ,8vjȯ͞}{s6Ċ3Y KZ_N؆G DMP$a-ĥ3@0ւ!LQ]:,(aiDΗ${*0Vxv߈v]+6JDJM_ćh@(1y&[_yv˪ bԠ1xBTo!#{X^Eȹ.FqcvJ]Y׾ x\$rшaO޲~Blzce{Rko&:6‚*~iݢ$&.-`+ 46v4O!O?i 8?Txf&ET /q j,B\Oq[)`brҎ2̂N/ˑ[c8Gn1 *s2QCZNZZ6*7V KqcP#/rrCW)'kQ'pA[gdȣP #'2v\`I'I7ab8S"|"m#|yY uEҴ~/c}CNbEIve턵k^HKy fXWfZnǂw{pXF9BBRC`{e#Gf_O ܩ3tjR'iDb<.7 6/̉ֆ"s ʪ#ˏ#05U1f҂Ԇ}8?M@@hfX\ZhbuޛTTmO;UnH3{:Ogt5LS5#?w !nӏvp iWR7.WzSӹkiG0qŔvx Z`nX-ýc@Ш`'+f|jkſ*tb5I!؀(F>jLfI.8; d~œ^gײ"֬6\y2:,hk1IRtˏ ~@J<đ8яk(WԦ Ӆ/ok ]_B]GΥ73ֈD ɭ97O?(@!KÚT8>\_~,7躣=Y胱xr8&D"k#

EQtv:QҜ޳cf>;k/o!f Ǔz(Z3۶EeH /?&f ?3FJfbܜ}ԪYm7>dTcbsmNgiPAƈC~6mTtϑr'B*PfDI3Z' 0UH'5)d^8 &%7%GQC@!/Dfڙ\ i6,dm鲞Δwc?.Yr.Ȓ9c>4#J8̣ l ^FfRDmeu>1s39o- !ƜP}TB>CKFtqV{;Ky\ިtYՍn(woc8,ZӨ4[3կ"y.vM(艋Z69I>R> 5Mk7\=5p_BmtA~,'bɇJf#3t/0O"n| o\Fs.k#3~n9OPs(8z[Pn("|;g/K\4AxOXk@pPJ rߖ$8lo;r]ҙM1A?◴ͫoA/).ʑ))"F[9P=,E 8*JܜAp_u>|!RΎ?yiԹD;m$C9l-ݶZDϸ[LN3.loaЫR{ [g>k`m4p],1Znk 7+М'NJ߃p.D2'LlcgRL!DzYri]¤# +qAco[$FS$CnެYY8hsWYk; 1 -DQ2$B;ěl|Q0u4zK&<>90D8#t+"b-nj’bT'ԣ#m_]2dVjj h'1CЬw{,*1kHyɔh\ .8 \Y(OYDJSUgMcM9XDo-&*Ũy6$+>+sMxRU){;@.?hAɬjN{Y@VVM8"8 | 'be}$XNԜ՜mkgƏƍﬦP^- ~dA D]êW׵Q\2F+._fϱuO<$\nʲd6a3W%e3З@7I>L["T^͇[i3L—}!af{K[ g`P*{e)D.?O[.e燕£L+&z* Wg0 Q˓]BSeI{:G'mpO.86g+y@E n5܇SY>݈/&~:l]^_`ʕD>S'c\ O;"66=Q^D'|4:V X)!1̋Q(Z/q1docD-SA*zFrXYHW"6<3i$5jA!LΛw׹ak"{@cڈDp20#͂Y(-iʼn$ss$0>Pjuy7QH(=K nqtmA([A[ b9bm[k_ w>D l1R5WK5he#OD׀*@QE5;s= /ŹD_҆=^yUtZܒ/ ~,dй+!NN4+foY9X,wBq o@o\Ls%q=oǒ* g 6?rs4RxS%Xxx0e{ЌF#]`ڸ/?ˏ$9xF{ (_0t\DSG]0qӭĻt.XΊR4&K@y#H@i'V/T`8P:Aƃ <*"{kbEn׎5:oZ)UYPM;|DY}VyRq#.59+@Hp-oM$((Gt_ܔTnocЮf^.wYgy_!4#.XyaM .{% 쓶l3ð[B|w~h D#\I(.:ޏ? ]Giy<,+3wN'T%BOvF %К0%Mչ۷lLuP\PM5ƗlGUf7.1{GCP-r!!KivӡE ( ʁBfr5AyǓllA(, y3NUʹ{U=?@!ͦбᓟɎ3 .w^;]j,x_3K.A.'z?gƍ 2 sB~"o-pZ?,;Ë-^}=߳.gk^ٲIRF/n-Z;|pL(P-Ϙd+n><!#j+7d ZW7箺ib[]dd P}C`_'Bz"tЎY:900ׂޠ_02γ$١tg=M۫26i)b6$/ɉ+OO?I,/_9Ysa* i8 ;Wt8]R6<#?}Qrcߌ(s^Ck[YnHκj)'u\iru 6! YYqSM)2Ȼ@GQ\PxC]9謖84աI GK7A:M*j`} d$ᶚ6ڢCYZ~EWqJ6-ӮI/8vi_R/(-5.ǞI JY@ukFdRXSH6KMҋQzQوިC\ieF.F4`99vw+r u§yf>hJȞsZK~ĮDVfh+: ,\܊H֞48 >x@k3>'⯣0>hS(N{kH٤w{9pT}N^Vim]k}Z h~N hxϳ)ԣO!&ɦ Y.q@'OSQ#t,UԿōiTTt\dİޟ '8_ tu3 k[_=GRș, Xd8%g֫uޕ9v-BA{DA\bnvIw-fz '[ &_$sȠh=3/-W+:& DX l$ R=12ۦO!j2DinѼ/1[6"9[tG7 t/=L::LF.cAV5 `j%DŹW^Z~@ѕ!cvEo-G`qI7(N8|i] 'âuQ?=c=C]Bv8z c&0]aP&qZB:ΦS FJ&0?oO$}K?3LIDBX Gi/ 8Ș;?[:!3؍*!O> ؿXbN @48';s {:%jM;64~䝫b;tRa«t =3šy5r'U[sg@z(E #gFv+NTWډ[E5cl_v,50Њ{u +tӟOu&gȵTt4#,MbU06K6{㿥mmզcjD򏄛Ȁk. hW-O;DѸZ@e7??b,ϻpGtu)Dߕgwzo9Fx1(>(qqa2yjKj֠=W3jGaYʒE{FeQe|O?JvCk ㌬R ?e) ;s\CMjs)s {n%jֈֵ]}j4!P!3v%,/, gg+ R K?%п'Wmj!~_fj LSBŖHZKEy(wxFgLj6ݮ$W 6#}h/7~XgM9CKʹhBG[pTGಿO{|f,qnfN$ 4%G%&4h;1 N)"|Z3RDc/ܭJnPb%0 d);KpʶB(ܺxSV;d z E/A稼•#e2gR6 (tY $Hg CkqR F/]M<=u7wL' ,)WYiO鉾53* xyg'`tm]?8zT9)>2ƃ5?嗛7G{2>~.zj|L:J]fsywsj;5/{ՄD4+$v[@Ϣƽ_G*Zzj85 eԒ(mw>hk 7޷aR˛*0H٘MD4|fs?>:Rf$<LR:mz] {C*7Wg#2-+AQ}N$k=$Gv'D#iw3$1 Xˢdzx >)ś%ewvg. 1Vg39N 8S(Mܔ\앹 bXۦN2RJhJSL30f'1OɇV,վ@Nf[ G~-١s#h $>)n"C?{O*OfAƛ& .ʝC2WcP5ʜph=&!/DMd8˵9CʸbLfPz6âYxS3LԿ_7jVEySo~W~`PU2YEC='>Pυv]ze&W5 MHZC6ijѢ;cG9\M[76eǨDKs#&P'2z*}#X$)8uLaDю`\MgIյs H]'Mh{qIԭu7{Ӓ5>FjtIk,4SO_~m&g*v 0"9~>%xB_aFʩmZ-Wkӄƙܿ*wGd]cD5܁gRqFB$7`+*gL6NAZ6C;҂S˷OQgBIwQ6M/gI\iDEF'98 n^C9P)xde a餂Y^4f1OZPt! uj@{V"c R#*uĕ.19m#vM'Nxhk:J壣Me-p>Cb{<ƕ_1%>p `4Y *hwe8jp*{c;}3Yҝke0 ?xM,ԗ銵u4A<5/Vy ۳)*JŀcJE6(_0 Q kPB<*ղQy{JR[;{bx.B7ύs+tq[>IA, aK,#^z r5bǵ4׎t%D8Tx~7lKH%#ן5&0XM.4[ :Ȅ:rl١}T?3G;{&?c %&6nXp#mqO8tEDզ)o,̄U< zPY-0ɵM rN"$ñWĹ孞R]M&P;No݈/$ZɾeMXNڭ,(AQ5k78tt{\S3$KKDxG*E:?rej@3nQdHpY9G3B!BTba=|! w ƾ|v?[D7nk~)beAr{ ґ+H.dUapu9ts 0\)N/K|v#Ѳ >r!\"wvOK5_rRuf e[iOR QWXDKL0 =Hce7۽6cB%+%v&GI+i´2Cr19/5>^fH,QSNeb߲pޛ~L#ht)f/mmvSGcMt1# MmLg}~k'cXfLȗ )T݀6C#Lrԩf M qv/+,9imLj?%.!I -h 4jUpՋsRQ $ [[-UFxq9t`g`|p,"r#:rMS2Vg7m)@^w֒{kW,xU?ã 0>2DRҶ~ox$q=UMew㟵K_4O'dj.el~XH½~xw!_J5'V3F>+pdMd扼F=67 qa;6R?X>(wX8;Y@:v>75$u훨1:zEѾ8|h4Ӆ*jDnX溅˫5|+gki ~:pjw` K@ުBIi)ƈ.Ioӫϥnx mLX?hӕY/tIQ<p-W.I 4@::"{kC^V(Zh-5ch?:q@ۍľEg4,^qIBL/!4P1ՊTǍ3ce ;47~5\zxA&ؘbYV\uάqEM34 w_cgLbj"gL jUaޫ{re`Kb^ptk'qN(Cߢ+ND4Z~#h?\PuWi-Äo3*Ϧ3r" p*-2yfSօ.1 rN?ey^B [{y{kdYW \Xmeڸѹ] Ioq"cBiOE|;!%EM:/&k%F6ڧ[vuSd{xw1iDeذkDn?j┫8 (]DJ.U^Y8%vAsU#qpؼ%HKU8yRmI6q6VɳkbL=?QD5{YG`D굪(atɰ ܯy,4^!D9L]۰ӡmѩxxde)RlzvK!-pKbF NOx^k݊ W_,>/fgk j G ;I5YUk9qt8NU7. 9 ~UjYN)aPw`,p8h78F0HVn=^zLQM%)luip8*W҃; u1s0H'iJʒܻ|}rCTvH&̏q'a<6&q:Unz}d\E;|{LwpCbq){նI/蕒Drt{:I_0uobeCM"O/4:($l r]DD3]4EUDPHyE,簝눝(/s>6Ϯ۝Vdi< R;l ć uHP Ϩ-غM$p 1TG3Zóʣ 'I>n! lPɨ 3AB(KυQ݊X$%RđhV%(A1jPywkLIܔ@/czk|׍sW7╉ .sxey%jO12RDY{Z󹍦dDCl/EdR;"$(惛 ܬMS駒 piJzөw4Ӭʱ%.RN%^!Ik2Wz,N2i` .OD|%6 Y"]n"\DR8cXVrCN᪙m3}|13ܺ#;p`qe/qdbص+:t27:T^ƽ7#)Fr~#MjqM @ ~;5hsYD2) / L "ĊPE_0 Thb52 ko1l`n{Q9(pi$6b4|Ť*%59h67;e=K Yz~_u2ODziQo'\h9#}Mr@(ŅhկJʁ^tی)&έ--Ʒ~c3g1xsXGePiQ%YfOURZ]D:k{(N7(-KїJWprrȫ_6+3I[2(@qbScn'un=&b* ӋqGkůV7>4CHthzC) ȯ uXZ[B>/Cec9f/onX5Z%&5BkJddlwX /r|h7^.h튐?@wuæ 8 U,=pb8F?uI6d;1l<_΅6f%~#6rUdGOVJ oruSAEN 4F!ndh2j-t2wI Q' tRrOMmK~OK1 ߀N9MU]4R.~N H(AZpC[uP-tN|vT|[cib-Y mY1jc8Xl)6d'@p:,2 UY\= 1Xafl1A9[_chHTe˽?m.q r b AxPPE>94]fOc ^> CJDDMIu4Lc1P.PR:8 z*Bd,E:-viӷET0њ1HgfQ»„T2 QAn.ω6yZ!X"gW笠V=Q9CbI"׋ صm âܩyJH! [-j GiҎgW(y?5u?/PKt/"&T1ħgXo@nO9د[1t ? axwYE I(x6зZ[QAپ y ]ZKI_Hvةހ1ӞJrM% s@9WEFhy3C 6n@p9h=D.Kj+%Sg>Eͮv@IhQMQ`e: 9+!a, `]J3eJ)j*,Հ_N=%eB67p2//$ W_iٜL^&򧝭P-\`Dj_C.$\Q΁b6e%lOqHPl QČLD&ͥVŹ2 K?J mV0u0IӶCD45ɚ *,n~*zc$ 'ġ=J…{*$1e.w״ 1 7NG0ygWw{t[u5(&7a9H_8 }H.-"KǝwfYF rj*L$SXOxJ$5t "ߝFeK!$Ux"v .ғhmZ }< `FzNlx ?3"w~ް_y{M$Ȭ9وc\*U& U%'9hA֤= &_BV̛FhB:ϩn@:71L/{Mjh9Fd%L,Qɐ8W5NjYi\FUm?ذ욝wd6/ەDzRз^]VůBg˴;\xfH yi;u N(B>c]lP_u\- %a[0[+}Z;YUZ􍔬\H=alC4"OK S_B?QM|)Vp(2ɁLs6ЕZMMO*ҫ—QT,ѠvAiNIodfȗ6Ҟ$.RoY$޷!s>^h E0,nKVFXKs b=fh%A˂Q#DVuzAb?bM*6H}`{MͱP5Fn}]ębn-HTr:Px3zF@ې'b)V0ϢWe'~|b[r&GωNH_*Q L46#":өFڎҊqVREkVΝ?s][y*1 )l|> oIOaBsfİMVح"fxϜwQ EA,&@ex/X7D[q{dAsd8dL*/ߥB\eZ< >lvވK|rGՆZIlz)$kQ/߂~=on՗ϐ)sH{Y5}xlh>>Sp+çɰ hyQJr};FHg:GRP)|KN6q [naCǬ&Oƕb/;T$%/?bJR16EJV_ `:]-ԻȜ=Þ3П3F]R:#(2:04p,CZم[w nmœe+AmS^bgA?MzÆrHs#ޔ/ZB˫Ds\/ k)Q§sWj~;ZcaӖ`SZC߄g1c@gr~ !otu AS*OtXA%9&krlE,هZiv>io=)?UZB"?oxR:[В,-YӜ|/pN۶X,U ]>(*w0x0>_2XH  gA2g+2~{dJh@ )iQyQa*zIRC#9S-UgL͢b*.(^ Ԅk`a xiK:: -ߥ&>1¹K(" {pP6$\"VqA:9>V=J8tn0')VH WnVG|0F..g:A6ur u"~MVS %,_q5@jVc1㎬PDE OЍ2VpܤSbN6 Qup8' $F,LjL*KawusS_S$ӣ+Z H?E F>+:yn>W տz5cs'1XwJ~?(<.&͞DC E^?*'YE}'0M _&L%'K+KA1e@sȷͫ&CxdI?;[Yd{m=K* H#HbNX~JkQ0g'Bӂ}nc!&V;Ha&6MuOb.k9-+?-6Z| |C_7) K54G) AuNM6o^!L!wozƧG bѐ qs!}aּN7b"0yu|,}>A7%B|󼧌4fjQ;(!4W!ο*m9J76OG.pcm6G*pm{'S7UncLg<#ļ̫2u<ƱvQ174091SoV_#)^|}0eiS~k Tz|D xFgo;1chJwXΩ)\9Z‡JJ{9̧&G4վLt*zJ/gڑyd-s; bLb\9v2Fە& bmQ>?nM}Me@:y1,u֋z((Ux?i.גz*Ʃ?rc4/eu,iϜY/̬3 K^ 3omh#ID קQr/3.BPRExj֗l&r:]WfCHtl7@."8JDF MlJETRΤ9. EzkS.r =E>l^)\;}g9Z~LO156N-G; "pvHm0 mC8phB39<&uQnKAVB!.qQ 1h:hԡGU*\{&lfW?!8jUs%݆ZbO t2k4O>v_),zPz(%/[af8yo9 jzvvhTkZg{ קJJ@1̍2_MwFէaj{7HF TƓ*jC|\ۍOm ( 0 Ycqz$7JkX#ǨH_E!ycr.Y%7b#g9ѷB 5LJq+Nե oIc"rEڡ#' nז"g"uug%:QYkL5dcDTOT[ޅ鉨: )?¯o!cChi0w/Pp[JPJ豨Q_E{j7»DbMc}Г,/XzɅVeUqZ9a2 9FVxO'=hİ$x埻' .dHv۬Чêvږhғ4V1m蘾H_)DOLĂW]㱋Xү`tU+9Usqz뮷}6`Cy`?bd ,&EZ.sV43w?|Efx}D5lH\w!*MNzprlYid٪4 Y_*4P=҅ *rhj`:ָK;s_"R/<ϖf=u!:e6]A1 [?n65VaUo{}aXs%g?&!W,{|`mkIA;2G9]ѕY7Qw &HOH?"c!1/.DTZh71=`?YڞoY2":'Q"^9cXSnT#Ӥ\O@`D U( 3ZCH[\Kx\#MܓV|r!!5tl{b vGqFq/pi|$8!S^J*b]KAkؕk@&ِpr;z«vw 8Ce8Ϭb?;:O`cE`QWr<9t4PctF;zuω^]eag & zOavI3u{Dō¾RWzBkD3K*a[x&zN1ιpR:.Fo))D3A/yMfoLNzWCifDQbz@7ĞRgi%kZr-]:,q^fϜz7̴wFEMI/pݻ1H#к*|b,$H<Yg_frKv>rBpg/QJǒ+jSrRt+0"~Qĉ>)~8}:;Rd9$cHkϽː2^HİP h0pb: {'yG+B!.8?6/#ռeէfQ_h֏?xs[vl.SqT÷z>y)eI~`~>i'b*^\Sޏ\HCg$CR5)dSb`1seA<÷rmUTٲAkѢtDRUg&L!um.e#vG_FԳ8aUsVxjjًi[E!cpriF/Ɲ8S/ӛa|[ki k([3O0Ϭ ^'- \KYahan!Ѷe1odьfqc nx?pŸ jpkȼׁAi=v[OLDh,Bb$w9mV{vUAB{'B~iČ}h!tP)#00f+>~RÊhM2VLNG>=)ۉKQ}>(ZJ5!3csc+> =~yX^ 7Q9H/sOؗ$e z*ΙLfUg/- @H) ~&аUAP'CU4j4Sڱ&5y[;ƽ5:ǚ~l=Yc )cwmh=*Hy]1.W*WktLʔ8R:!ǽX09EQE>RpgcD.l54rsRKJuIT@ؑ߹rjq;٣8zbV{JIt.iF,8 -gq-"!Y19 '`\̤^QSjo$á(/#tKNP_Y5[|yyݏMnhd Lȹ.rLK*Ay@}]FX87qS_hNh)M͉Ӹo9I$Eo;xu0Q*Λk?k8Dq8]G55(Cq;\Va-Zvn<jtm]Va+~V+#.մW9ޗ|T׼u(D'$ʻw^jn$9S<2T:ƃXeCVt5DX}&„ E/K*\!Bd2!z4/* + ee6qt]j* wIDӖ_%oM(2\Mzy~d$WdӃaKOvTZɟ't%_nfZ."jA8}i*WMR; Ys~?It1fYl~~j{&X %TQab5 b7S.li]ׄ5yb .M͜=*'sp(eѰ&gPu|̉# 8|w83Ra{pFYK,|ZO;"ɻIpqg:ZQ%(Yb_66 [tr*O: ߈:Xc~8HŸb"9A<=kRYUk`p!yON5~Kq:FNt)S=Nwǣ1BSI5gN&mȜN~L ;&ަIywd^ oP 3]edMft 켚Z&q1%eb$Qx$_hyIZk:qҪ7@Z9visG|0J..ǡk?  l ߓ2)wZ!}HěD'γK,)hQ㡡K|']HPt$Dί`]Д>#y_4,҂FopKd'6FO ־l-AV}Y!mp.IJa:1PҤ k7qP]?Y'J Gg?YuukLzeryzqVn 4a&F] ZS}3tyjGGc!1Iҷe%s|[R& TEcPA<-C6wVMSs2~]@STzP=. #!1`~FQ5}MxrOUM2 Z2YzF(f]$F F^NrU=,!;|SӋ ϣE9)3ܨ-zfsa? ^PP0w"p S+[w shu=׌KDXteHa4X#ZRv7 ۋr59g`tڔ! D=m@c|X~F@%v̥-̰el t2d/}L{c aC}!'eJoKpv SZ*SA6)#ԈpnWW\ry)b9СfNFIL"8YVS^́1VB:.M/91U423Q~צM4^JN/^u*E*qn߹"zʧgF 1uҟ!D72F \F2d9+h p$ۈ:ҹQZ£,ᝒkM6 f1)zdd5.hyOK9ϊBSwٍ8Ri"C0mgfl!## lN~nARta D+^" =@ /ʉg+]/wXgdCR(R{#;vmr~=´8G8Bvb}0a X3toݜf>?uI=yEܪ*Of_fry7~a)3%n&$ [WWX/@P/Qֳx S4'$VU"2q\~ynWjz"?Wz)đF41o9Cz3ш'W]EX6йҫJ"WuEUTb?N\c ES׽{Y%~m}uy:65Vé;#ǺR2)h~ftkVGqpRtҢǩ:XMlj:RC~ܼ{$_4[  At8DձTѽbKC S)"lŰX` cU^zAgukwY2w-=5xo֮ ,,,,,wRWɷ1!&yԱ:'1ZOi{>CR|]!ixUݦEH<5Y["z!0ѿSڱ覆DS hj20;ZwZqdCqLLh]-R{-34ʥ)JS jYk暪+R)JꔥV;m?ʓΉ=O;5{]4O/ϛ[y\㶅ؒFZ^⁈,::Z;ʘk8YnMK% eh+|FhיE+WPbQU g"|qv? ]kh྘sYzLxڕGyiL±1t޸-ZÃB2Wc\ЦYkAQ=|ttcx?6^B%,"/:c pSѸ_0K|I9lkJYRsa3Y}@Yv!w ~}*#PNLxŃ0"x9w5 MF+8H=bAj"~m})`AfOD"^>D4bQ I'xFL)Oi]uTo\R2[z}ފ/T+Sz,pyE-’5~?18oM_(ݢ=QNO$ R>|:wkD5Kݳ֣ڒBݼ`coH.@y԰p4_"%r 3Tˀ ( V9;LTXL݌;I`H C0U=dx̊ĴJ')Dhං Ta8_7 Hg4;2hX;Ç+!)Cx+4~ެ[S+볬ɉg3Cc`hԫZDe7s!w^`:BEdmlsb҉^Fi\v t@oϒm 0/V?5β'`~BMɛY!5c ߨJ&Ӄlj6(?p>(u/US;2|EBa;Ռra;'}Msm豉Ox5wZ>''{x+CO"".OuL*RG%v8WDq+; ̓ A(k{e7xTs?BZ0=Q-^i 8- 䘨a(4Oϧ6*NP2sRԢdII&qf 7mL0@\I) @R&) @~)|M\ےSt0@ٝ"W __X>QѮZ3.P矯~m ̒)?C3>QI06@ ۤ.t2 |K֟R }[Ͳ+ot큿l}z li-uy;Z 68kn90h_68Qx3 &Vb1C!G}{ɣڠp^hy6OG>i8\o&7qc0&T/aN _BGv<}o%qV,<4qS}L+2cNc[T=5[gtpCM_|)6+'raLj]ʮ*PGpO]od G&')"h%vkR5d {5>Ϛ󁥌NS뼶4Hp]:Ʉzj9;~5=f*(!\e.,ޚ ^[ĢVlɤ;0J?SEXSr(\#z]æ<=%[zP*E)I3ԉ򡾴wߩ66cb>V%{;SFF.*L8cLCI<5e7uHW.J t0(O NXo^,Q \n=i̲,,,,8O-ZꞢʙ/cww\؆.֚j}twYO6cGw]ٜfL% 2 &x(|2C.֌,rCpEP {"L40:Ioe:;GyLQX&B@k^ }#l=SJc"RDLg1IJSpD8?咔~e,~'NBlZG8Vz_2eA!1t9J4v,n\IEvGӃi\!}rj}0h< OgWB2}ZWW'+jOKc2]?%~1^&i\>ey"\v/[f=mS^x5D'tpѴ͚'M8>#@DޔEZ޽9EDh|r87]]O\Nars<}FO:ypnu/'B22*w uj ]9=ζòՂi73+q_BHplWec*''^5Hï\iK$yB٣Abxzt:`N~hzT9H e;Yi7Bh8rȀmG?lJKNsQ+$j&PRY]B-<ҒLn6IݩvyS49i:zBA,R֖WImnPC1GĭnKZ`TBeH[x0&hXr+ .)C9["Ȳ湒;Y89n/˶D<.1c}$J}'0Ŵ) (PB (PB (Pupi`_tt*c5>Φ|/?#s|HQNTꨴs`+zf)a* ,wm#s@g d1^|pp>][P|>OqG,Uii{SIeZ)XgMJW}d2_ Jite"IYlI86ck$S%S<+@\xÌqNϝ&3|1,ܤL$Piqm!qR81b6;iȷ=݉LȔ_Y8`l;Htyeya sCO sf|v 3L%DW4 :ʼn8ڽ+؈:R* zp#$eBx֬5ԿPMh {l+;S4L:]`$EHةU_˸֩JRR)U[*RH0` DNDc7UN=?^1kj^AChD&-%oT#gL=!|a15u&Nd4>z0UTZkh2yg 7W ,O#aN wV1k$~ՆC`v8)EгM]UP{*{/2}[sQ' c\WZ~ w+@neL<=Kp-o>m a7i詽1x>h)/jǑqFO*F-Y߿_vNvm/V+}ÙΡ)pL(YVnP!9-!bkĤ\  y4>E @'@3Xyo Q +: ?3pį:!q[x$Iуo'+L~ m"L/uPAY~JEeV5(p2\0IJ!.eO/飞'{x 7"U\-Axj&ͫMcI| &H!NB gJ|%#) lo <ތh[4}t9BhyZ;*L %{0ڳTjp0+W.7IQJSTiT4Υ)"+)M^]Lv-ߴ~D8y`U'37y?N!ՠAL!7AP[3mdR7mHJw+i6 ".J7cnFqkhlY8U ^jž.VW}Ǒ07ߏa4QqV~T2OuYAnMKpX}Ru_[qHfkk Z;lӇǙFhY-A%4sqsy)'nYX|^V}lM[t%x:}EJh V5f[Z6s«*3?vfplܙb*k쏸7Y<DANljl!4M$M^:l|rcj+ohOUcz/Z`]&ZͰl\rC  hB$G֞4NahtzbXD'nQLeVlI5UVS(%D^ޖL;xds۪<){]͊=7UvK֒͘2~-B@rN%AP2'Zj|āB_DGq̌M6:OlCQ+)ʩ TߝV}CxTR hYY$.d9L̀}RDXRCRM"ի^i}ZͲ)J{I[ Ѻ <}9,*@lwVw04ߐKp:P7Vq/7eՁB;~%5!P W(Ús-'!'nBE%'+ chPD_6IIǵ>||Z_@x%UL^ fiu:T?q'O9iw;pʼn" (SE@pZ'ZQgZ@еrBtJ6"D 0`)~a֪Ug(VG D֠QJUS`)]"1!Ne% ] Vdu>lkëE |DrgĢg˿̋Q40:,W_hHLqTX<7x۝o1vޫ&4בт\FF[:ijP`D1dK[`ȸ7b )So^Lꎣ&`|[0=vJɧc"pen !WE(44Y JO#MEEa]&Ѝ27R ^? >D?z/_0G.%#FkH|<,>{}#,f e/rEˏjr^U>i&Af!,M!t3$"|$0Kg;jHgG^? (> *v%y?1EWP}|Mpu#:̆l1mSKAK4 1~3}-Jnu l#80=Z쫚WY^g4)qB jy yVwcYmn׼ds*4k+g%TJ`z[$46f~D\r oecs+LwrsPļzj/x9jt1(Ñ :'|Dz.:?kEQrXJ\mm H%򅉹?}Ljmc2Ċy1L7ɒ%[|QN9FH0ޅ{(Wݫ+dF?tbsSTN0d(]6Wv gMf{ב"6.EC#;R7U~\d{Q{#`LAmW[ NSmNbc3~^Nz,,,,,1}OdNh]q>_#\#W`W"owFCYX}/ XQ ]"S%.ӯN|vYՌ/Xn~K__O`@>V]1s<j2Nлn?g])R JS-0Ye?JSAJA=DE;*qצ"OjTLsS^Y?2tw?HbCnS;Ј,C΋ JJ%r0ߤeվrT< K F[E1Bc R N)n7Շ!'竰ݻEU(p+ZmX0j0׹8Ce[ y䩒PCUOPe- /_wYp;e};I5wFyɏzh^*KcK_k6Po ɖ} t8ko,;d9 d Ut8P D?77cb͏m^I+(ET1]^zbY7&i6EVtK wUQPB扞A ov=?%m}ưk-WCֳFIp3"'Y26Y=ڴ]U_&G/>q78rsOE\KZ3|MPP2e y _g;^aIX#z19^=LULp_*=CjFJ T}kŷ\o? 龈h@ܭmr]CdTY\򻆹kG 9@ tPqDmUJ2 b"}IIH9c2VH>M)QXm򂽁x8fҖm5y5P0=$y<71ڰ6, ȱKu1,9i5QF2Lc(Moh⤰ QLɏ@)2rʳȔ@F;?"(z? W.ypװG4/x?}Y{ m#8Ʀ8!8]_۶of} ?Cäa5ť ‹- 1uH?Q2Z }r=F5ih\uW܈Z#H,UV}Ԍz1nЭsbDt!vƁ:tK#|T,⨴[Q/s1 Nd ǰ< }fj!58XiB &.Xv7b"CMHyJ,B7hb\Ib8f5)/E//ҳ@R0>{2\a,-6"Td 6$nvQCgtlB6lӧ$>ri};I8.YD~ǒJC\I#gȬX!bɛ61x+&EgiµhE0N&{䱣JY'4 ^OYޓ,cod}[H8Q$،Ru BJ(U𘘶HZRWD6>JB|)!@;>@ N4k|7[h ~˞<oVkg٪7ok -ʁZD-Gx]:D앉@|ةV\N^ςX)G f=tp*d.)iԁ?ܘw64ﱈ|*YR9Ǧj8',`;2!,@͒%IQڿRnto)ixv2n DZe_7} 2fewG7RFxΉ܌9x ꊹv"`ʂ 㜤%XOy֣M_ H_lvyuB3gr.e\ TWnq\+3'_*n<g_ݿXB".ގ+&uVMsx/ӗy6iɿW'ODjOj V괇b+P5fDVak5Ow€/yҠqO<jAԀF; C1u.=+8dIJAъ5"T5ohN_f쾊!pjbH(VHs ڹ#z>IN\жv~T8̧@M9cg34:od-LpdKk)[*JďVm,P1!JYͿDG(-ŜYKv{oY"p.c{ ܕZ~\7;{? N1v6FWt޹<.KuC(;b PJ?cIq1ۢrp?9dHg *HjUKJ.m7DpgL>k`-+HDʴVF[%%' =~E@ܚG !(*_T`yD5#Z;M u`)KH0#ױ}:eyDVi|[s| ~ タT+rSћ̕=<9֥$FQ:-^D@ YK u>쬠n5䡴 yj:}?TWLā\/8ӵ ~ ivUlGdIvixf6֑YuVkb@i7b`Ƣ{_gѷx0NgT(^ '>aRwm@ns-rU.<K\6ıK(MLS,o8}MvKr"ߡeT^Iؚ*/+/dz:ʹ$~$RLqU/S'2t!edn˟ا0@Ɵ~K@}mTSDՋل47c$$|'XK6ϭGt lPQs}!%>(`PUJצ8ZnFk:Ǎ\pbRBv_NDo28KW:zmjV1f5mޝ_t]yRU7|֨-PlĹ}}+͓߼g~4%@ݛ {+]RކrtJ %:HůZE-y1asS׊V.y7 . "J_vbrSz(N^b5?=VHrb(6>{C2'*rHJ :[=re)K%-| dӈY(({rH]ӔdOG!]7S-glE.~WD䀄x^p >e/6y(v>M ѭIx?;Ynh؄GmƐS\:`eq8jso< cF.Cb&+/STzve)ϋ;f x(<%+!}l q,2((rr^goD[2:P:zKiv|dY F Ӟ_ T^w. #˧eA#n0Lt:PG?3fU{4RzjDs:@ii[`>zh;,r#q+U}2e6&X'}rHׇW6[:4c\ k@#x7J0So6+mc5F-,ݒH2p00[8:*Sjz@&1VN絅>)d|x?w=CT>Vj*A-=Q+/Lq)o4*总eЇ137B%iE1 )^(.+3aM"S2L$GNvP2ӄi[߲>jHp0"e;ua{a!LD 6Xbn-Mi%^u3`TBTr#G%h J.p9D8i}BH"-zzrsBCɸ-#p`}F#ӯ>[NB siwfɑl"Q|h`^-O j{ݥyfJ)ʫᇕ!e1%93myX_T.2AN-%LRVwpկvҵop4<\T"{&v7 ]xepx=U*"-<+&j>I zy?}(|zuuÀR[-Eҡ,LzqmxCraH25J;'H|kT#@}vԆ2ÊCxeМey[`Ad%X 5[Ei7\\'ݖG?h) iWdg]N4QV@V>V'kדzG=+5+"[D:Kd.FupҒJ顣juIJ$I<}_CvbŦu7I A.L&dTcepTi7R>mEFJA[1c/9pވ5z6 jkiD),]ڞ~$"Yp4imF=8ykU*a? CAphpI!+xzF]@ 1'xIс0Ks;=vm{%@2лF0֑Oԫ<[;+E"y0ވD35(fH\]\^$UyOHd"@WX0lh sauzѧC?c9ajvj|,+``XTbz|BCK-k:yX!jFs^-ٮF_uQ+fo!KfX@kx}|%\&f+CJmƱ5ocgl,ƭX+zβ'3$wCX4s)TuJ|X(} { {{.@ ao˘B)P.[x˜P[Y8ű̲nme~7?oC Osۊ] i7Qg,RXN9ntFQd#poxxl'x{UhZ7ܥc`f+= >y 0WZQu]Zh%•񠞊 ЉPG@R %r%c+`Y0% { aKpЫR;+@LdʤdWy(!=a [xpZ8Pݝ;sb˪{9NDڀ;R\+ Um;RѩcvɁ*^Ī0ϹakYyY[Zf:0^j k 5ԑ;K@6/މ_ _ AG;E#灅\жpcj@&T7y1uOΙe_eWUޑ3p(3##užn\sjh]}[vRm>}#mˉB[79M{fFN,ZVKH\{㑮aRy !Fߟyr w13 lbKYnSΆRjC$ O IXX:^e]g+Q+ތlqnJ``@VxKɌ`Z[ʥ>k-s!YOW g.-k΋]ij zMXbr J?̊4Bie{'}ޣ6 #YX+A#ӒqYQuIh61LYf[9͂Gs6h+x.m'xܜ2:mlPl+áb U3R;@UM.,卍oi6\SRs:>l"j4/ lz1Y+̡!eV/*M x;5r3Z?[#RQi9M 7< #Cas½ -³zRyqu-Z!=L<ɞ 7c+sF>%  ω Yg$uLcLT.DJ ?%9Y'Yq آe N5c|$UFOhw]+3SF=ٜZMV1s &+*]X]B#Nc`PZϨeą3BIvwV71ɊTn=$ib>!m /j{{·6`ߴGQmBQ_ 5D n}jxyIAz<]OeiȬse~FIC׉8y׹/;.kB:n4/µp #խ0 ##/fo Bіu'< Xݤץ+BR7︍8uF+O&:/?H$b/L=M{fwμE4}D"WC~?|%9*\)sK/ƈI;98=|o#WO{4™.;Fr?bN]w/bf5IWn"3H8mx l/(l.\GY(ʾ㊐UE_ Q)蓙#g?hƣ`E A.Zi.?xm:TdpJokf&!sR D9\/»o%~`K''trhsKu>>I5ycq :!*oVm)v>nN֤KbL6*ˌF;.FSli]yiJ.\\]w#'}2#U")xS*l\PTuͽf{CԹɖ{$3F@:֛N=~{ɯAGyATP;U? (59rDS $K1EUp]a^Jhe|%=Lɝ-az:!W >Imv,s\R;JeR"_ O.omq,g?LD=p:͛Bw2dLcgT v`D| 'P5j=Mr`;=փ6+4oP.;jߡܝO~X[- f.[q{ ]0X $S׆=?ʹg+e,.MrU7%q.ijN  keWSo{f) iǏa1- gb F)&gܬ=^=!8o?-z-q'9+6+6뼜ȖwGu'hm%O.G/NS;.nN\IZ2Fپ%E{Vd&BJj|doPz2=IѐöDLYCGSkib5aEu/Ї~f0?R¦DUfY42Nrd8@q7E`_χ ex+j ǯwKE K@RώYxj^&՟}f'4!0 ep}$v['g8h=Ưyl3m;]Ҟ?VBtΔ_L >tz]^}g:ГRB 8 A 9O'a<*/m)68ڴEk!pu 4Gp㛾`j|BOWcH|aڢh ,~sԟFΒ]粂 $볔ԟ !!A9O *`Жa6}JHX<%ܜ| Cj/OV)r` `_YTLN)$m(Ž(%-٣+F7(4y pMS> 6o:G'V]̌z mQr?n5C4Y\H-gޟqLO r|5ewkZ$^-%,䯫 F=GM0@[Vwqm> 7ry6"NYᎦSZ⺤BěE\*SLDHZ,n/Nġ8AHQ{99uc,0ήӢI>`Ҋ24QpC;y%ƲMoIñAv/!X*,Aҥ$UM$ K.#4<B6j!W%req}E᥽8D3|h0&=67u"䠴k5h-R b=>ê9Jpׅ) sh?(߭ W{ 8`<@4ѮMdK]9E8¹!#K#JUx yd ~Md,/ O)z8*^>(UA) VL"g0sPQUTJzҦԭw=h?D[9 "-XMO"7WV}`6ɕ1JjsYQgx^_ۧ{yVuKu}z;{8_烃?p0lecti=ӛZdg0 M8IQ<1!Iz [ZS|*E%#x@L[-zfPty9O?Mv+XlZ.PUl%,MS'^1T]Ut!d.< onvɧRCB0y/BZpר_I )AH^~Y)v"UNq5s2HG !؎ҳG~:F`-gK2Tpno6~,MiY W4kXPkWCLQ~S&!64̑V^Qgitz(^ a^瓨ȁjK1ѸX3wcڵY#mX,~%:ĴLbYj"39[gp}ħ'HnJ h$-Pk~⋭1O j٘ql(kQ(^/9{*|k=>)/c`Oo2:S ۛme^ 8D[qF8pZL'zBh>jL®񉻗$; wRA!/ o * aѵ5GԷ*#I嫵< lt=kVo;; I_FgNsM K+PZf6礴Gj$]ؤY uQhk3GHIݒ &]Oe®3b=,uj{ӛ-QlEhe,k>{D]zK{: uhd*4{O*x%= *'Ѷa͹LW'2l9!j'NmY\AxwQ%\};ѕ"0/sM91AHo3<*5هXɗB;hQ 'p"9r ,Ӥ*-8V r28yjnCptV3.yo/*lc3P-* 02d&װ<:W+/ˢhۃ[kл|d"9)T]B8պ^|(|qsΨqiД >Ȕ WcN"IjWܣ52[`~w`lp֧}spQ'uJ!՗zEu"XTPGX:Xy~bGںN8w#7rmPlc ind[ [ cZԟIe☸fD:94L,EU:.H@PwxKu3B8}0t(UaG|;C츫N~#ۃ#ÏceiwҐ4N%x<86 DA¡oen*n1Чt\zל'-l&F%) gCeEXo-uv3>:PBx#el?sඎD>7nVX~Y80aq8/ҧjl#%z`v^?f>#'bV•+g&7AvE';q7X>n]sg9$VL&ĤlL=h_{j2elLW*IO16 S{R((P !Bs5Hg)>l2yfҕLk=0<]*fp9tc\:nɆQMNنq?ün6:|FZcoY,ُXaUU \(')x/G#_Zϔ6#O-,JQWpCn)JvsbqMX'_y?`ȟȬ nؤM`؇FUk N6- 8B(NviQsMJSR^gXㆣ9fD9,yFTs.yp<*9f=@>c3]W ]o@I h 롄]NiGy0x x%`~;sb{rɦSnt֑:<]P\{^XvXi&)^[⥺?*Aze gpjГp+mC=fVɽk,&iԨ_qFQm “A6)%?"[C%!⣐SlXI6{ߠޘhI& 6 } @YB?_֜re fwc#]tS˳*:Q3pqNXcdpIs崊 SI/und1F8W'C=;0.^|gX]I=sv*\ !7 ̐69aƙh@n{n~і'sSZ9Xfj V5ϐw:S1 . 9.!lv<}ĕ[_7\J[5j8E}Ĥ6۫jU >a}$f,Ų(< 8@mc|V~i(WhW]#sا42(3+}%$2O&/ Ť⥘U5ݛz:*Qg -I+ \l0 >uf2h][0?.~=ksM:{*iL%y-yyD/t` PP̛e&tw:9!A,}Zz03{\3rDȞĕy9} EpҾ%]Ւ]wìBWjH糘?,S-մlCFC˥Ov]CC@ p qǫ?:Gƒd2QH궍<`9<%3Xts.jhumK]N󓊹W"=-ڡ4>tT/V1bφ.z~^!Z΋[gL|6%n}$bӡ:^Tdq]M푖Id(Yq &>5`Up2w$E.Y$yzxCׅt.+P X&dCkܰƶH@XX=Ę*%oҹY<Þ뷱տ6mኖP P @P":sA /}.x(_Lg5I+ t_)Sj&w! SFc6¡>,r} z{zzЀ|Y# 8+O;v[>yglU W#ﶩ=Rox髝C6m(v:W "OəSB2al?I:"iq%^46>ӖاSWƁ6:hBq(wuyK˙G.kV<4"bk, vGH#LD5U=12 :P\E.vWCF ڜF>{ ]hu$f }nVOI&Bk҃WKL.E쮥06OkmG+\VكU&2<:g#lbvM)*%4ktץ(ܑyd3C;XθqhآF:TӸZfs%ѷa`׭dHXV{Pى1.oEmG$W'BBcȪ7ZR2|$t}70#%Av`ZШ'%Lԃbـ;+ПmJߚ*! v-ٮ9,tTbbɛx1'__,6M7IR|zȳLנ%OX7 Sq_E3~f_ԥBwcy' 7I5uK~ᶮ[%3pY&Intߖ#{݅Jm$[u@4@$d\#>s}Y oYlk2-"!!+6l|@Bcl%Q`O W#尭 ~E 2;/}n\9CʋM#cpJ~t4t,)l%@ƞ`7|?\*TPGA-nɪ9_R瞪$2/@i Мq~;Owl8X̪Ϡ'([c,ވ/k߳xNbq-8LڧŎlz tn^x=iWܙێ{vcK-=4RpeYO'>5BEY3wF+'A_9d<$}DhXJ!˵hf' e|s*(dT<)H:AU=z[z#^dW$ 4I0fY!IXؒ)B ({ 3x_*>LR>jR=nq/xddy +EФK_B((.b-/P"w1%޹|G0%|u,>uUL܏up3m2cE$fp[r?`t 3^al˼Z혳0XuJ6EfK^܀0 G^M)-'O4$ I<^d-3QP?y(IAvʏJdN6c!L }J ʙL2ZK\FIn' CPajlbe!흸b{o5s%tAj ~~@~5Ď#?xߡѧ=[Voؓf*C}/`1}m( Ob4k,Nc6d=e6Ԅŗn6֊\ ѝcw(8,^ģ(^ȳ,;. 7@C^vv{.&q u'mGח|nB3ɌGo:fAH.gF5Ѩ^,t'?ڋ{`Uz" H7 5Y\X]ЪA:<h~f$>~`;һ!v:%!LhK+U$R6/x;v i(WX4u#WKOp]XLCY<ׯj?UW:%*z ȓ\p#E1yu OT C sP!7Aח~=%>$ 16@y6e(3H:aP쭡-Xs;"s,K|: 4tW()~,%-W}IY{ރ+[|3* f_ɍ*3}(-&ZJk`((&"j`r'Xt3(ߟi6yJ0Kudʑr'2f!Ba^DoLը#j !V ReAb]sM7ܬ/h*Lcv4<6kf/6|QaNZqg|8lYס߾}nа:ld=`\tŴp0zja}XNJ+ YH,O3ؤM#ױV4ہ=(vS{]ڲ}^Kϔ0{ern#T K ud鉅\R/a0p8|3> "odS=o!)]E1gH5E%81?T*5.3wL', 6H|r2P\?Kx瞑,ɀ]<`zӕ:ȦdDSc in>bxmF \QTN?H1Mnw`ՃZwˁ v0Hu xNnPCgbk=">-pIP28x-Oޔ (+yP3lYkaK)UM,뉮 tWN9ƒU46䅯)젞,)+vB$µİy-B9 F5ř"&{#xQm9Hv>rì^/:R`F<^3 ZҘo_^UE?$E1mGk61SB)K28Ϥɽm.t'e}IF u/D{pQV@c $C$%ɕO#d_֮/{md`L|Smd=bU#M`w'u v yJ}%2dݓ7q$OIǜR8 *E\ڌ/'&][d\=XIǬ([IDTteWy!cղ 2#v'zH.?(W6-6xD^1vaD}1y >{ sY.V6vKS5SxYQz_>/Op .$D~q~X9d?{d+1"(L|<~nؿ!z x\SZȿ£@"fNUpHP@w% q7l㦗UzSNiP[Fv8BBϭeSf_qAVQ )$_dxs5'W?@)΀UdoDȬDˉt.4יT"{q1OzLɌ2M0D$XJn d=9$~ f tL C-]MIkq*}$~B5;{eLLz[qAv^ PaOV0^<V9Sx,SM5|o| \6dḫ9j:kH9M*oFN02߸4*HlAe&^$"dtd^Dlҫקuv1JqT轎G\-"<oh0=&i6 :Ig 3ru[vuU1B1CsW$# x!fS~@|FC]y['7 <r~Ò$]pB*wNJ0 RPD֯؈T?0y3L`mE}Bò\@}qJ&ȾZ<] qz;+!6ߥ݇3UrBeFPI&) Cb~g_P~&y;p/0tc"@ju Fcw?ۧxx,2)?| CoJ! ,5VAߍT-3C~/ Ǥ` 4{5vZ ]ԿRx)=>4ߧCK";ur=)LQzPvE:w*y) z#)?_iD/y~$}9xx>v;uoM+##H7\B RnGx+&z6nCu eo/iGMԬstr $k(gԀU4ttR'7JFv-gl*HvVdWc=nzVm SnW[Rj =BV}ҥEYB7],)T\bs4LWeB[*RDGqeUrRe" Ӎ9E>֩ :@(+ `|"@wf0L£SϲVoroΔ4dG >>/RU3o*#{mZʥ?zlE)?Y&bEi)[m4?rwqOh,#=Ƃ޻ l޽ &i@D-4:9fGY r)yv)ĭ!* vir5=Qq|OwPm<hF$'+O)tɘI7#P>1QؗJBL̩D6V+8L<k!SH`svlEpDd.~EMaEPXMtmaPM)._D T:|왃]JSi㛠K.T 꿙w[Ke<癉ǯGpH'n%Fy澐{ܛx]ٷXUԞ!렲n3{AMkpV\14JLjVK"00{EQv:iVمˁ?ި{sт;\T9?#U PqnKW'<< rsi9$x\7Ex}v/J1/ZzDkp+X!g.m2_ (5HD(b~|!>^-Pe:Ֆ#)`l  OX_\^_ wxҥC vcu)ue 5?{b8쉣*"p4!iY1,KhJKRhDӪ&ԕȳK(5n7вN"14,v)ڇ#C  mO>RݎN4/Ao ~@ex^݃^-O|A5Y::񀭦蛼-Nc(EqromekdfviISFk1 bDC<PFrO׷;'zͽ"/2_uj"IQ^wǹmi̱vQIL+64&TFBow2xQor8辽fiI+[ L\,7O|j e"egE=ۍ爆,D:BSL!-CZ&fVƩS)N?wLJs&,5$Ӝ}6 b[:2ǐSyxhآT5F*qJQ Ìr>T>`gsɦ4[j;GTVA% 41X+k\e$T>#MD/2k%D1a}He!3ɝYfۗN UdhV9*Ys n:G9l{[¢Bom_AZ \-&Z/)=g:T:rޠڝT kUI FkS3$%jׄs_ӳWbwGۅY5 d=wbYލ@; 3Y&nD۪pm?8I*DTx1-H5!tٳuV8C/&%^G6_ 鹬ltlzYp13w*϶&:WNb\U~uo9Ce$z|!Ӈ!Ooqu,TK[p'i"))] T&L 6 n+ z FUW8XBCŵ+ :\̑5R5-~Y'M2960CzOڹt;WM'nF7#mn{ H{:*r_WRNB_xACDeB0{k3!(3:MXjfQփHX$;})$j+4vڝ큓5JILp2.9}V%orgwzK؃?/kbgy∮Lͅ)Rvݤ;fVHZ\yArN~P -^!!ଷ]<6g5U8Cf4klAy6 !by:SE9VDiM9B7pBmT.H'QK m @;M=/a>hCoe.W5߽_Pc|Ek(HpH.s:OAo/0>.PaO"|:)slȯ܍ƪl9߸jzVF19fP' )C#OY#puuLarhC3 ~ pʇ,Z\Bq-fw[|Aõ!־?Й)P_ tY>v"{(w+@Ō'db^R1ך_n ɟHkXv .Li'}3Ū̓ 5OX3y`+E' `Gb1MMr- <sȉ߅upՀ8W{;vLYu!^pq;5YO)+cq-7UW-5[3hok/5ćW b$igR޿r(H* v5γy{hO:4%A.`Ϳ?@Ɂl'L8%.+;M"DT#>MtCfjPf+-ҐX$Ս +њ;qJUk{og7tZ9g]mt(] a T gaB(P/ޥʯȸa$Z$ L@*7I!xq*>xpVSJ Kex:Im ZBJ dLmG^bdR[1'Mqm pG?sxݴkȓs ?st5 e#-f)5~bT[qH_p%En9Y\3 Z ģ77x/З 1<j"`PEͲ#k(>)&e36:Ü/I>C}xl?]WI2xճ|psu$.euQŽOJhҢIrE^b·vY5a<h]lrՆ °9T)ujs [M+> 8QqrTZ faw\URr ԑ(#0coq!2h8yysSY}`(:?_cu ȩ*O6Ed QV7R^\ ` dhl s c7g*'ywnpIh }hUUV"<Ǖ"ĉςuUU׺=m賂,\XXe +s(!ہB_YJ]0 /c9䡏ȃ${5ݢ!2 |E ̈́)X>w5qovxJDWce~`b!ccwO7HQkP8 tH9|IwY-72 o^<!3 οi?RRZ: ݃T(X*z*S.s0'KHܱV*31|0#G+mצ%Uͤ~ABГ}Kcg1?4nWYόVsUN aA4)xåS6zFT7V+yQ<ƊjF 9,Z^x:4O!k^ck{n?~x_7 $dHmPP("BՋc?;,R4zMk|˴2f[~൨ ĈqT{Doa,/DZw_ߒfݳ{liC<"J̗& ٓ/[딘d8 Q~< b abf0X=/=爁P͇W}(K$y5 ԃӱ Vl,IӰ `fCǪthD9P $:1mܰWD_wFy-<--Nu2#2]Z2zҥéz(PW wVx>VPޚ[z߇!cK=O"vމtڎ)ݖJwq8?4PYG]{@-]FMX? #OϚW%D\hH5FH)}?tx̢0P҆fU/< n@aq1Lw*U_@ה±PiA%*+:kiOtD[Q@ƴ"l4vK,%5@7hK a`4ss6v0Fؚh#:stR&N,:pbNuҎm+ {Sr\2/qck8 N0ةPh ^;ZqlNqcES .a>)]ONQU,AyGE@gu__sԭCw.Q]`K2wS>o áJy MZg^WGq&C Zƅ! w_)rb"l][-l  ˸ȦHt5J y¥B iڱ(JDb7+n)&ׅZ#і=_9ZgP!,I3RXyq%T$S5o]v1jʬɘa)ӖҘJ\z}ߟץł}0\D n.ɓlVdUuepΙ{ZJ~؍ Sq7Џ}Q(63e@J0Ll>܈۫MʕbH@j{拿k |v~X>1HʏWUH˧Q#QHN;\`F=,aaSiB[æ 6mSv>ҔzS 9.ޮ/2\d׬X㛰k9M uvaN6e#.` %׍Ϡ>7P.sq7ϊQ԰@޿Y5se)hZ${,MCU`Coj-x懥r~0}@,"W:,DťכMN<SyHͼVӴMKҤ-:C' V !C[ [RTQCPҡfǣLsr*iz/ڔ>K' YJ5𕰎O=)5FZak ~Fr"qV?FfSL<<PږHYh0?9i8֤(,J`W='W>M9!l*ǩ72l.aVn'w-x0jO/ռ7 ^Ӌ)a܏ g&Pe3fn.Wdbۆ+LJYBCr8q`/~CŻFe/=\.UXX|Q"'d#ݐطyV&aZ!t^n~_؎V]=̀2\}7U|󈈬@HKIx|6TDT(L*W-Ix']JvY3Y"5$ڵkv >NY$\$ n!q(sOPIz+>2?)B~1OECI #ruKz~ Pi3f};|̘}eMq\ClXd1Izsne~ʔcbKtuoy܂o-+VIb_d2ÁuV;o^De 4h$skE3wXv8AZqbv8i`UNCg+Ko5DNR`@ձװ x`P"f ;j,d%ڔ~3$LJpkh-ԾO]Me["iU Hx?Y_^E`HwPn_@ct,_6;| >q \.d) ~ؿ>yd33f_ fł8^?q,LF2䂣M-86oZO>_җ ss!?lWRO%UWe3,t-3,_Nk,1$jӨ-i:G;5nkXCݓ^+>Q+ufp9`hkDN :>ָ\{rg˪"rڧ@R]F r &jŌ}W@t)ERiǔֲv2Mj"&: (r|P1?|C]ԯ=f;N}Ma#o3K"FEe!;V@vwu4l>ek' (lPq?&=#Qv(f$vBrdQf-EAaţaFo5g`{O.UcKMOH еNTX׾X0^bdZ¿o8m8uxL9}'.U-9֝ZJ3.9tA2cxmX;CLúq?5< {wl?P1`]| 43`ܧ%<0rF)MEFP,SxTNIC";%Ɛe1#հݗ $ܨ.~T@e)U, 1ymAS`SizɠvheJlxnԇ5 OC;U<ڐ4T>jB2z4bb>)1]D (txp =7<}/FNNJ6gXl`º/1t~X-tZb Rg zM6( ; Ѿ޽SE?qXz[b?$c `;ާ dj8deW}9~ +ϓ |$FV{VZLp½+$fRߧ W0ͯOX+^VWYkٞ@KѶBOv'{m},ZG#WZ}wiܪ W <QMp6tJCp\LHHA)UΏgeYvekCPj1k-1z;>ݦ)w*Y|*gB)a{~.vY֩YX@W1Z{UnkiV2?.lYu%in٨!vhHcƻ7af㝕[a#`& rxМj$LܾM(i Ž&@_7,"5E }e q o{bbFֈK[@l(P~3WQ4n=(~&ٗXaS._q&099$kڔ S߿X}v.rg|c{$bVQ4W$~x V lyt9LOWg` u{K pi<|nQ7hyKfSQ5Jt &k 6τŷ؁3Y F-hIȁ)2D:Z]ŀ}gH~Y76ƍ\"Lƕ4v~TuDBHT\8g9;6aF[S2*܎-#6'nM4*H1ɕal U)$FgߥR=&t~7q?k!IfJA3``K2hn~}W=K7LbWhnqaGח{ظ5bAL{xq& g"L&b+TR[/14' _qlM7yj4~i!0Q*ylYAB{gB>3Ig 9clښfhEZR?["S$,@ˣٓwiR$h^Nĉ< 5_ fV"txrwE5.%Kcxj a_TSǝaaTu*DMN`7P 5i{6l'mp:U6⣔!9 o_55:;R| ӓfrT{~G4'kAY϶iwX0e 7Ju3ߊqwt o#OV ,: > =^i[Z@f0h}PƵy̷x<|#Z9#e^(K26}ol]y k6+0ZH/'MTb8ޥnfEy=g%&(~yD%_]sr%g5G:HKkLغI|D(ʞ${򅊚s-c!WA,3FbKX@p5L ʔhtP^$Caz]j8ߊn_ٱ 'DK>;{PaMd(C*[k wa>D([D}ZVep824 4o!%yty\i|4NߛM|ׅ9y+fS&Hn=ː\Vz'JQ lw#̐88888Z&aπ#ʹj~CzNP\)[)]ٮ0pͫJgf֐^?a ֈ1JGav? 0M[I7Z[^"))ˍk G~:~&P|~ٗ4~}c 0rbU8ŕ O[¼lO 3m2(7>lipz Y t&HC y*AZdž,^c!Df)`~\W/ ֕Y.Gio$nOckQc΄XVq(.1S~֧agU>㪛wO\c E -dIZIvG֑UKC6>IFmCR+r=Y4a"} ;XcNY >ZhE7S%X0vtes񁠄C^JhP?)\O.z\&/ =).mνp`傪?Y2<gRLaX3t0AX0o~ëauriU.踆 D<{\UoO#tANt^En XɕI[ RuC˥gS{h,~wgɂyF8iwuDHP %|*`iH!MoЙfaۆ*󇵈/F#^grl[4%6Q?|\hspq@])IZ#$CX(D1GuAÔ=L?=,'H&dg[b||,fY f"⃝A _#߷2ͣ5e0g{煂b/;ft[TS}?F' NW`+b9iX]ToSIodZDeºLK(W.ΟRbWcL͢1pSY)٥U1sSa)ݰaq/[o-)una-,X5hL0K>qF~4 c': l )b9B4)rzV1~=چƙg bt,Pw\H/Ik+Fe@ncLp7{l^@Wpbs%j#"aoX'!QI]$.*YdbBdb7eEZ- d?S:PڝJi]%~zy*噀S/R(jܟ} ~9-y>3+,bIɖKz @dɟ-iYΐfuL.+ Mi`߬v@33םT)^L4;@)/%7: 'ljN(X'n;> gi) #~-1F;pd[z̢(7!X{6%l@i+ޫd[ϙfݽ&]n'XSMJ~$'Eއ|₫TtaCM`@0]&F# QPopu(5 u28l =M'W|A̭ͭWAlyd X!;U:LigLO#J_*W[ #+ӕ*(g{++' CC 1QXmڷW{ *̃!  Z9i_X;OGPF GW\h.g~BSt@e!?v!rmY@T޴ijXPǻI^HMؑh̩5g3sK,0#tG(c)ht8(*E7_mҡղ?T:^ ,^#A4o }m2Rq2|j-,: 4ٱXIYHfOyw;zF ǻ[ {xÂSٖ;[3~j΂4'YFmf,_ttP_A.{maRD;jW66Ka>~)'פ4ʜ2d-V>CYVݓ6 ˧%'kDTPJ<3섲&o_8RCC Khr23¿cXhXmIݚ&v5QMF@!>B%wZ%UCBܨ?*0a 3]X#0>\Wt΋7TexHoRk9u &֩lx52K>l٨i\sG*N*幆}:Cv~gm̟o`nxC ç9(t{wp3DЕD%CyhZCHi9uN9/oR)`LqDE25QZ]ܦ؏aLy}lOGuwyN{rLr;z,?aNS9odPBV ]JgAa C7jy $^ig%*@5ib=/!Ukh)4Z,q3 B|O3~mIZo 48u4y1Q^h[8ݛZcm EG.xLCYId^դtV:$/&+Nז0;YFW b%6>C$S=&cV:]0;Ą_ў@VXѭ.Zh$7D'[I3Nͽ sY21" i|3*ǻÑX-H'^ڧ&ɱ-DnMq-.q#8j5&9=)$R\8/ .I5ҘCMg ~'6~HTCJ~{*|gy8z]92fcр1MINNBNTԧӫa,m+E C+zuiȇU`l$_xۚ4L@vvL0=u* U;b7륇#ͤRY1,o&C7IB~:E?U=偽X28Y'C ^:۷*T7_4&|gav'3=Jr6H(u=QdNܦ'Q6ӞIGatWh؄MZM.|kh%4+$*AeN;u# Ws3dX)F-| z|Zhcm&S0cX#1))yDJ!2Z}K<4| v*6/gFytK8t >@ɯq.ngnHϡGtO1H4)rcYdt\DZEsh(!lfe3ўLdwEGaoy:֢ս 1N z6U ٝiMܸL3VPS ]-1^H/^:RZew.Z q-eQnJɒ iJ<nvO!?)rjGlbBIO2*)!FmbaFkp]NHWr c 9u̻-?*×)ߟM ˟ej؊!7`Wx/7"7zkV˒n;{!v9 $1Wtظ&x^ arN3HKeuPzhD WC+;}&=_:)"%B9Gl)ԩ :􅶞v!^ {iǝhG)1)N~n"X u|M7ZrP;iKQ59 > # jl8O4&;ųDu[Il%0Ri^']:96p _*#]?[Q[7G%JnO_R9 t gXtP rٲ{|tͳY5ChF֏1Jb_u&4}AXCs3*bi̭fĄ:^1ls~72! [`'R~ ̂3 s:bVB:3#k+Z<*dx{瑶bl)b0up^jد뙻e/*xBCG 62XK* 71M,dF9P')6`o}{WQS(ɱ mht*2ĝO`iX>< #=ЗYІg&{ jSas2n6a^9 婇p$vI`Wˣ5'R.0;2p3WsmIj6>;7=œ5Kgˈ tώhIۋO4AeP ;`T5B&Y!Ó/{Xb]qWqYBvq5ە}@a-uGl2;_E)|L޻[e?9ǜ#٫SCp4ǎ|g*rm?]D|0mL[hSg "_HSA;YazzܒjVaz_o˾aQ_aтI4:1,]  ^5&{TEBBAP0tOnw}[Q'F3$>`̹ey6Y+O|w76=V[љ$*B dih缁vqJǟ?8׾l]pz>v3/l]c_'t&- KwYvRg'FuM;MўX]HWRz(4:#_ߴ*_-֏2ƔYxUk5+h1q}d4ć$=(R2HĒ ^Y>6lYJea=VGGIJ_f/È0HddV73`8-J*iAOJmU#u^pǫ) (p;`7o4l ҴϜ|6(-?FE>Ͻn}_nbzz/2OQy2r{JgmʾL~ kRf-76qa~NWfAL@)Y6ZCWBɓ59N± 2`xNSVIR) ҳ3fohdi^,$x#0G(FgH5é^aj*ކ&Z^~~ TwK8-:/)e+3>Q3A=9aBÞ H9 %BfA?W]hhq$&8hi2+^<,*c4Z;Gi ڣǕVO7Cp0Ƿ:e@?8\hC3!IgrJ=Lt,~dU%RsM?1CA,D1r#]90쉩Cw2)Y몴-73gmhd0*Ծ=ޑ5P3^d_Vqlʍ+'6"ѼPa`;5RKur<~#^gfZH5CGmteC<7`puQ¹t=kZg&ED1 wꐿKRni+Q &.(m6?T.=keuaz^ wK_K]z70$ݘlMao~b+N>RO c^ s q<)Xa4^^8LYX{W_dh;}GKs{_>&0qwXc0gx%895:2<0_heX? RAHCө9RoR;Ak>?Y~)ϼª,g[%l27LA74xmZ! -#J2o+JiʋXs'Ĝr_woOA;ZkG[~|RynTfͻo\jnfr)2rhgaͷy Lwl RauAzW<- N}, O:?*@չo0!xaމqβI財iB!09GҼ T6^F2WO$/uuVIR)_Yy1Ƚyb{ÒLx40J 5\9"#ΞAnCAOSX@Jz1MnU@ErVSfmhDW- O ?R0F>N}Fw$tQJxV`=,.{ @߾$ՃsM/Bv*G?%U}оbQe+jUѸѢg# !c4kf! {g*qT{`\t;ap/^yMiҽ8gEt.`Z[@[5^N ԯ$P\*)%|~tmp_Go}"5fXZbb1 {J_=uhm؝$XUAA|ʝ4;$mb7W9/#(ނ9]!-.zb%[()S%6#{&f*Qca=4ʳ&V:0~i*2EȨ2\!yd#M h\0,>_+E< 0Z(QCCPU7uYd5(5\XV]4eSD[!򸂸Gǣ3g24&/6hֶq%@i[Ye (e҄-\ȏ(KϨا-z7#nC Zw8B$NMN2a㻖V/@F#̿]f!TuJ/T#%$3K{H= ^wyDv-Xo ɍGJ?ywYj[pLR5EPQ A"@D [dY`@D  A"@D>doy0@/e&N )ue d%ŀ2@,@EEmgz71]Uv':nU5Ť˭?t+TcW@ܓVl(j-cx-TC:6iioŠ7a3![Gwܛȝ*%hܣ$c:Cs|iֹL\2.{x{35۵q_C TtR6ӹzKU85e ƫ/p3Ñ2eZ~&c̡ %ЄEku XH18šohx[ XvMʅ܊QɞV,5`_'_lޔu]w@,!jw035A[;XR\`F *,q= {/s8|X,z]&T䦅y2t_d!u?*Si?/ARir*q;YtĚFS9^ǧ6Vb`Ë:]X5{]3<,͛XSB>$gpyώ-) C׆զI2{4,4NNV62/h tR ;Ttᛎ+=_v`f6{i5pG-, }F%S,xQ.R6v/P.ffVbZJrJ:>*o օE+ZgܱTɒI:߄}{ ժB,偝~ } _h+x(̓ !r DZ`U<6kÝHo0G$+PgݩA6ԣ՞yT? &~?nP UkݟUyhrvSe-Ke~)O!5`wf+Xoഽ$+褌Ϛ"i9B!n0nsݫWaN8qi @rII` I2 mF,ڦpPf6KrhKU6bD-oE>;h_;=+M/3sn2;^F4O,{0 a*qڌ!Aš z2 yf=d .GUb9Kqp ߚ9I}F J[*!6_"L!/ HrPCoQtS7=EtǏ{7+6߮^*&sE2%A.,&71 ҿ6hj-hWOdnq2BRkh.K-;ԳTM#;>9Pm$$ L@-V"Qm?4b4=wt Ɋg~aă) vW,D@bRJ&5^fM +mbƒG lft{[u'A7Ȭ|2&uI.x8}mdMpX/Kc+_T^ VJ6@!xQM9ai;ze| F>3j?NmļK׵^.qOWf[   b~s|fWe7\($YExyg XTs~}sf̕I19ٓ*<|h*1Ro-ґvռ. L6TLvF͌YC+*öB*Ht:[D GG(xFMB9H") M9fhqv;a.r%:K1މ:%~߬Vqy( e| MbȄ7ieksa2O:rV?)e/yۼ[E(/f[1x973W`KLvRC7piz o ;y@4k4qHNmU.Lf-%)xKT  6% iĐF+'kfh=Q">͒ZHQm7¼f4vYF+>J5-AG^C0~:XεbYE,ch#Ct"T󞁑ᐋ. 4_L!<=!ZvI{[nl Y'NѦbZ wD Ŋkz*R %EET KT ?+$pRs:Re&#H3GmL4? 5R82M$\Сa/Kݨhb _ڣ.g?I߯9M-o`e @cIU1JY ٖZSz !?88El+nؓcʩѐkaK;gSϊ =N$$ IK x'\8g(YDŽLj@eʲshK>#ɂR]CzlpA84G_Ƀmek ~Gio1q(l^]1:pJn?Fs/1B|_-ToӪK)*4'3^%f,*KliMb\P3:e 1K9y,݀X5 6hQ,">pDG?߽@gq Rw8=\v2Z.ڌH5.~5āƺ1[pw[_ &%Q@5^B9:vYdFF&Ql@50P};#^ەh^fvfBRq~Zm4[DQ21|bdz>i'u@Ak NŠlOP :ݐLxOt[ ݛx&_Wjף`ݷ㾛yt%/9φ\V=B+c;*Seaϝ_9nQAXDt,iED gS~3#0°IBvPB>L%y,P]Xw ,Ľ =az4%ݒֲOhɂG^Si/]mDEqmÍ./ab+Ã8JQ$TLsT{-fI>%D\|c*eQ_Tmx_F GRkiHa~֩vb0ԡ"~)IP\AZsIPW:IB>gs^@m{e ?cR݂"z۰RC$k)筦 l_}k@H럓Eu3քC: vcx8U+VD&/44uf4z36_21@p'ΩÑ 3i0FrubY3FtQ]Pa ' 2q~kYƐl/17,!(esf`XD?'62.\IE2pie&uyL79Afn׼/F" <*,8>̈́ u;A0P¨x#KQ%;Lw @>v+IH:]-O=͈Oti*%6w},z/ݾn_َ-LG)NG!4R3S8lOMLbᦗtc:*֋t)>IYbP ͝&cZp<-`E0}pu_>ۈM,o;e߲;Ѕ[G(  4%0RG H"/(*2axBXL(&ES *#Z&aA_ESPF,_5_ Xy8d wϟ`5v?ΜbBy=G\]I[gs/Zij/~><2]76Y{ئ~(eS+C:|w$I=,m{oKN6ݝ_W{`bm󚿨5gh;a=ra8 jb Q)R=}{PIL P0XKW-|G]nƱrg 10,ȹy6]08 oRnkvd/mRPɯ'̡[h^ٟ?t/&@LA=j~&l]PN_KJWb4 .qOTT,_l!j^U_9[V\t}wBe CkG?Na$[zjJ'j ¼2<(Ṕ{@YldP?\m3p1-6:EbFO~̻ܤi Oʾ?10V;蹶Q.Ug*~EZATGJQ bfxgnZެ_>IdP$qC)^O&vܡP!6sՎiOr79d9wNmA=BZNW8ygZJ,r$4DKp?6^':'[A|2Ѱ\Kvqw惻-7M-6nC\|'I?6ExܡSB\"Yמ3U,JA?0%hu%䃲\T+e"CX(P ?^UUT~C2wc~8 HE!xlX%1ш")mYۏ(hώ{gӜ^+%PLZDZF.}&rNjykr+b$aC㻛?5Y*Pl/wTR׭){E_ ?j:1<՟$yutglcR~$?apTQARp@UU3G)PEߙ0C]e0mNDU*ʲPŚMw˗(fކ&gP7Λ_(ezKz',[ $i:$IRBXzB ?g$?P{%t|mLۘɁmyp4#|BpR $7N NH i  ,UzjO4pyuX$B+9@kW9qVKrCFtSjna=l짌Ñ<#qVI.U /!qO _<ȶ֜qL.>^'b́2  T"54vd)Qw:*޺.ȯK;>` + h:xi 4oi;pB@4=4)Ǫ|-ۛ2ZJ{Vi_N`"KgGW׸UmJ7 `|MA]?.g8,)rqn^ xD ĹdZTZf\b-)"%:jffMG*cnz`̅q1FV5S]k0#Y˧](HIz]F^+FXƋbfͅN> P +n籢H肩*G9 P=AОleơ9G#dT1uP!?y~ܯgC!b'nW-+#?}ݦw;΃ۤ%pH4m"̭}]ZuTDL[߿ N_"N{ւ_yEzMV1SU&YmR9C0J҉ل&3 Bz꥿:?qh)!ֈޞ_Ylh5E"[)qc䪹=4Y*~^=s{eApsQs'C: q~v}[iR][GXQsapg> 2xR{`jgb$$wX^BTNŌ)yZTs55bHbM&F*`}!;#!cε8SW5{; ۱Ztj(.Iu֓;{("K,O|:0T)ݢdw`]Fx::i>ŁfeZ̖?؝`TLٙ⡙N:i@v I-]x\2Xdo8~9JꍎMy l2Lž\7)dߧoA4Xx}o-X0=$) n>ؙ=#Uͣ 4S> \zu*/8pivDrICrjXl!Ƴap'0h i.O$Yu^ 7Ĵ0CI#*abHݮi1S*n>/"DpbG}ȵ8W.*J&s4B?ъ(lyBXe;%S2M;vUtRDM{"<S̥blX+}䡩9Uaʓ|x1(q*"`%x%1>A'p'L1*>52a'zDhUl |"_ kzyGwɩ> OӴgHSuƒ}b!M }m'$HLZJ'1q)r)6Z(3O$?Z,UZf<`2\}Qi룈lkC,-Rl]54|tk_rT {VۃTÇWvyנD>2}8눐-\X)sě?sF_|;^pSP{;H~Dw 3(=YĭqВøK40w:ԑF%9AP~ՀX֐Ѯ(x*&&uwRQ}D!0qk<+@bTܿ,JKBsGj OObÝswm8)$\ؖж"ӈB0辵{)=뮺poٝ["iDݱ_C? -fF s*~qFJd#(.,*L¯n"~sK᯴4׬{0-k]z {HP֔Ⰿʹ'gUxک8h 2 -wJD;7CK~Yzء?N2Q'cnPn}D] Ao:J^2^q:5ۄ71q;X!?+kIH:CC\A2 I8ݝ7y4$0{T ^mcRdGTO\jT%aȑ^bg,ZȃFdC7:k'kb27d%VF_ke~Sju߀ 7u;ۄGp Q~yܵ.?ϻ<@s+dtsZr.oMlcy <e.X!g'Q VײC~z) "Exp' ]p6!IGV!l)ht"ZnQf ؘzp+YC Q*OS{5f'n81OL -2oz6X(hK `ՓCk_~xj:|*&gduOۋJ_ 2v)dHrރO=gNݾ1Qz9׼尨H 4^ij(wD`&*nr*Dqˉ,\8ueL5rxHmi+"$F)+tӁ,@I`~t-}$=jZGᇠa)xyܭR٤ tlM-g2adh cHrjp3{1) wv&z-kd_߉U1\0+2&7 $}qRL9"H!v'wkKk̀X'zdNkJ>[jY=6c>Ӕ%C 5L^8#QcMaœ1ɹN2ևhW!prKĜ9M_G'ǾD i&ϯl%k@u3S&[ژ'rDBYEJDhB,cqnT{ƍNl.F*'كIWXSǥl435*[r|O]GҷF~.eo38gpKAE]m,y`} kq(R({m>pdR2j fKNph:?=yf'ފqNR#uLb4<r­12n!Fx9ꍭ ee}'JvRsk4'0l$J }`KI >׵h)-t,/NGⴵNG=ĬjWaZR-$qB![Kx)cR-Dj$u;(;spԾ:u &$%Pd,&:K@%'~YԓXw•eɺtzxSCuQDUhb=ʃj(KىqY2vsZkFs?UʵVOɿ0Dd`_;琀Hԭ~:/6bZ=n?b5Y_2VuTehHx&n[$ՠzc.tfT1isy#Y٬-:\jTBX3nÿT_q:zar ^!:QPb]t5J t%EZbM|Q׻UܶOMvt[LuЏ`ږ(CN#::G>$IUpSI1@伇b>c5PxzO o5_Ac4W 8E{fl}(1p83)@݄٘`AuQuE!"gWAYQipIE5r\̥h-xzj_shz4Ej(%GEL EWX ]7M_`-3t$0Cq;DǭvoWz*wIղP  x5e.T]lC"kTIe1ݒV1i~kzez.6övٌvXͯbx3h {P$X ?Ch#Nx-ߡzbS?1򿿑]$т\LbD%Ku; LPͳ F?¡H(_ 1jx3ٙ69(~Gɚ 6|h"^^^ِD[F3.v}"wҖT>FmQ_AgPH{l %VAR&K@g`̀a8;|ǝ9&?<Ǥ岊VZ_zH{K0Zkx+Jd&5'e-qSAiLjtjgB&rm/"\H@s0T}+J7KS ;чpSD;3a_'wK}Pʞцδ oY/Df@dwH`ͪPcc7%f v]0'?ry皒%8[fL.@e~#%4k=PwD]N-G7Hu2}&E]ݰKff(;z`J y2AvJIC$.h DZu%S,UZӢ~}@+D\ļl4l8m1ѾtT7x\×9`jly®2ɯ a sX:oUVb^)#9C S"3O`~)f\!׼ ޾a5,K8]w`-a;WJ~jwx|vhjGY׭<|Ҥ"rvc?kX:A%ߗz)SXvW+Z缩| >a'f4I?dBC 55G|&)e쳙bV>c32{W /3O8/Y{ݤ 40+n9r>-@N>_xQ}C|>u*@hc7Xc>KB)Wh \nA19upzhhuDOlrCh"YM$0QpEP1k%t9WF䛍9aKH ;474Zi W dy` &I"R%)'ywy2i]٬[ t?ſ*^}QT[xt=*=fDR ޗ^+C^#WoͲL0-MrZyZ(XJG_v*s DcQ鰀Vs)-8ċ w98ߌ6ۖ<0@AcG1γ^mʶ@eudZQ~q:aw_4E8a$BOBҮ(sC=QG7_!23S5;rx9>F~.sGr@~16~Yhp_%϶$̽қƤ|H}ēgM\`nThFz CHG"WZOHb?۳:g 4(\K 5@hQk?=9L5ree0MYJ_DZ['51%f_6wr\;'KoSPmIrtjdf(ATUׄZ"C῍ x8<ĵ4^N2B~{XI/ _M5Qs MS"x3 Hr2GqkW𛐑2?(@ "@XyzE9͍ປ[ʣRwcXH\CaG,3JGVf|N.Tfr:\o]X%<pOR:%HtzhuHWdDUTb>&7-ݓ_@O ăB 4ĭE2[\/T7Ԋ9ݽm|%uV`ʕZq]8'Qƴ݃zVBN ۓ6v 1M"f+i*LBYƋ1}lW#3W~ݯ64(W ]U|$ȩŷΎk"UOѓ2< <_Q+b w1o,A^)< *gf\ew*3sU2Sew5@Rc(:ben.DK䇻Pd׹D8$V8}8YraP{:o^j*sUd76Q[`N$|D1^ y'ns^#aYK(`FԑȀ>T)=r WJ.8.èEROlv[ݝVw;m}0NRHzdUP+_VZR^Qv dg 9)ΒVgs1]N;PSl)EH\GA2{GIkT2=G, NT {(KcRW;oَNz1{:9-Fzazo+1{W >$%i~8$1N3:ܩ́"\z{,b}J4ɉ=@?sU yV4\'ATK+Ep.%&F# ?H 3Jլ_lxO,U36$βyvܐY%)q9bA mxXj4*Z,EoɷXK\)l2)NcEGO# D?{gb rС-5i^xRvī+]S4`g%R@uB<;5 vngQc urgIM{wҡSkPJlڣh<҈H3,zR$ǘάryAF2 2e3_DdT;ҜhsfU5>;v NIfт{K6j:s>E&#8ѐ7NBP p%>q(͆G!C{!s54Z"xY| ukj&vn8Q#w8V? A#8&KOz8G+[Ct4Wˎ+!5Aaʣ8P]cf>5/K:fL O\cHcOw% =3J,J]z>aW^g`iqynŸ&dM7bs6aȯ2Ne-BGg##v2sErA{Zr& Yzѫ^4]9-3n"3屝mV<352ɶءhm&p_rL{gM瀄vJBœ[2&:I4UcPjnoa->YJ׌W)`TP!d H?Q,y sQ:F׽q$`@XՅM":*gS Fv:Q<d'd.h&dzZMpBÄфjaW0 9Od,Ԕu(_ćzS^SS-evs5KL&w5_@/pȣŚX59/"\OJ\2IY.r; ^Oa]AGefy`X *MEro) }-B"rB]s j+L|B @JrSӪmE~K6%jhꏪܫ 4uAnү;٦ߤ*?'F&WLJ<躆ۏ6*@}Q{x)4?j5{~*1νTOf N8 Uj,0Cg1B绑DN[O&:_pUơԮ+D/E@"(삣T􏐾A>cBg0EB!9Y9,B JfL=i[kFywڿC [M9k2I#Jsۊ:kQX019e\Dypi@ځ!(-:qzCo-])}#Mcp# R2cisoj),/aMY.%#Ys^ 哑Z.S+7?R`h 5$ŋ?]2))$: KD7+1j[yѶ/Q)](i8=yuK3d#*Kd'={ zA2>`䕲ywM3P+\LFa 79MxS3fk!xl{ebmƏfteTה' $QG#[&ua-`9Ru Eb'&X6La qZ69099 )[yŪk͔ 'B⫫)8WT ERSʈՇ؎= GޘKqՐ0yF ײO5l|ADmݩꂐtp\OڜqRVG;[iNz~8D ~:%.]%cKA2hNSn6hqvql3qqQ]anq)G;:H:!L Vrn 1`;: 5+ 0$Y60H,4Xv[F~JbTGq> ۦdJqGZF1&bIPv1 UCOL^_EO:'ݦg|ӓp 4( N \ECbe.\d> V4U9t$?m%%1|ԉpHNoFMB!.hāVTd,~_}(yٟdll9.?a]*hOٸC w5Xڿsz<dʂ>(oJ-9X:0-!TW;J(#ֆ}TbuT`>Dmu=:Gf xڈ7:)w[;5A մn^͍z? nb{z*$jl+HV3: l0ƕ#u TwOxUU 1GzTOTz}Үʘ7.eބ[VӺ]4f^Sش~XHk/I8lMy!nėЈ9,xҩoD&AOZ:iet9į4Ӌw x5/KRqca4L-Bվfoл)1]\T 3þmhD'go$FV+\lv4XsL 4zɉL20i8ia.S&M} 7JLO(HD]M 餄ggk~/[h9qnƢ3 ɂp(,C.alC.)KuPUg"t9rnjU6ڰ^Pb@dxs>PXRM:dxRV`9S[ >p$RfH?܌t Д bNՏXrYiìKN1'kzrߤ &ŧĹBTQl[kWU@Ib>6:?e zIgWcUzr߾;uAz⎩edC)"5a %{hFXS eO޳u?C&Y8ܪՠ֖/~ %ISPM gâ-]/.|l?_P^qt}r!9Erj OIQWz)<WE'1\2sfߐy#ۂ"[@>/sji^Je Niv֔DžW4쉜'>vp-,bUMx|#XےXt5C*>ՈNEZJ %zgN!ƀZG00o(/+LD!AjZ u]iΪ!ڃ%Ak3(_#\ Ρ#Ϡ6KFȒa]ld@)\=S:k @}0hkTB~EDJPq9sJIe`),B7΅L+ЋYɤo׊gBG.!}[lavgC 9ph+un7oĬc)͂+"*vLYa7?2kL|'Τ.%2D~b~Sa=Sv:I\֮xx4 Hw"m>ܻ܇3g ՄE{α {h7܂wkL&e*dM `^K [p q.q$g3&5;A (~r;<͌@́R`hwEY@I&޾fnQB:w;mFA {T[*>vaWl .N,?n|Bu{[F&|S,'$,97a Ks]2Y԰)dDQqU"$l)}=G mM=(uPtܠI*Y S Xe|!LNmzeꞼкN%]'vfSN!- :k[g'yKMdubVƂ耰+mN i5x 3_'E95RMu a$'g ULo({zE^%l[+Gt[pJ"P~>(FgqW 5 нq#A&Bb㖘}otzc[<{"~ 0~@WAƴyZvFҀ8Dej xҤEbYYoKhi # Hnn~Vxr]{~R>7+I;Cq:;f.쾁_Vt>ԝ\@x4#P !_s4G1[Mt$h;cy}s -Ω,] $9p('9#A\f2vvS5Tf\J!o&W 5QÑMǐxVF:~, J@Ƥ}hޟ.ۣL)" w[;l<,Ć@FbXb եgCG#l#Tu%V!Ov>Cd דߚٌB@:};RFɰ5Gۓ/rp{s=1Ȥ`H1c- ydObO(]~IPL7@X3?ͧOB$K^cQNX y^MapO\QmrُI0 !s Nv\]LDӯ Ԣˏ ""} `U~Ok`@zQM (ZͲn,Oqܳv&u:ŊGg!r̛%~ui~4?T|`(( 1Eu> 񄧱;:E 〨R O3vr |PqVꜝ^ϩ  ],M&ݹX㞵hITUVczDݹF!=yc|}ll=.]-8G|koF!m8֘wM{ZѹY*WԍUұt >&vҽQx3>+ FtCN}g^& fh ^臣53!=IFߩQo9iŮ]+C)p2)\c1@&>uT6~џGGxK"K%&5ŴUQx- *Ċb -VX;}-nL[)xY왷#I(.hHr@Epw4}q$WMޒ,ŕZDެؗ]Na DYE b ϝEIvj{ jNKyVGJeO"X#b )HtI|jTwfD`eZW?-E2~]îQ˜IpU0S/G3u_NԪ RoFŁVrMwfvr"e* +1,lxUI {3&Ŵ|s"؊)Q%3-1JGČ(.nlgkybfZ56QЅUdẗHg;ӂ&5ù" *// Hal%e}ޗ7+!$J.EƀoGw?jn3aRֻ }P|0 i^i>O ֹS7 K@ (~0 G|_Cbk"?sj%>BA/|4G]\m fQק. O:rC~#gO5)]T_"K15]5eu(ZpN(>_lT䶜8& & .4~)NSuDb]{ [TT8ܥŔnnp[^ނݖ.x[6@5 *j& Y]~SIcUPp'ZIyuQC{Z] Sz1˺ܰπPV9lnwa3R#~49/oYjY%a:Szf8Q'Y d:`},fQ"Ӻ阌b9Xj[b4;H$ro0ȑ ΁x(J)R c>j? džJŃSpɼTN1jA=tedtv:⛼WZ9Y](Y(U;Z58x駌8uhb,XϿ$5jB7%ªfe.?3P=]2WqmٯG 9KF/oj^=0l>S 8Tgs7?1/Ջ1|,s 6NwK /Ir>':Xf>.da\Ƕblx3u ۟?jr!D0`3y#>l㓱);(vSX te =2d G ns;@lS7D"%͉MPiܟ}԰j36SH:/EI AN$Xxi- [0 JE ,G89#dn¿ =)R2r/oZmfr;>AF:nfF^}^ CZ?z찒Çvg/7y jQ9vA8q4NrPm52VZض0M5mi dգq 8/oyL0?Y4 -^*ʽu(Pyc`!_Lτa#0 u0;g^M ! K逖3~_E_ @@ Zݏ^& tӑwٞ)kr]#L O[_{0:xs1Y\'+goJǚw'`shҦ1H€9DQ3/[M7HQqsY;.v%W[PZh"fF-FɷqZbQ)2=ey-, MkF1l"&}d N,>'6~oxi+ZMj{;>~pd]< ]{/4*Nt{2;=w̉92~`c2yԧֳ:ԥ1%WɃt|E6d0F2C/c9{3}ۤ_yl- uwۗ lYZ>eC%}y^Ri5;0H? kȓ{WܓFd N%p.nKz#\L4ȺogTϔj 9J166RJÚvٗvVhEGwxht H>;(Ƒ?unޱ4R)Zkӆ h[Ǟ !e`Bɜ۶H(@|MTPu<HSrT4PA :${ _ o>Qwt lN|歅dsw]V*#/oPTVǭD1D=/CNo\,]UzLQfLv -r#3,"|Yxty"淚K]\"OTȯPaQzX!{~J$,ߚqi{ kɗֵ*d;ĢCd.aEVIoPdv F,^GNqb)PtV^P/k-=Ϗ^[Old'"A<ȟR1k GJpc^A}>Q]bL~#*ӊ– nX F̤h(u<<FF#bbs(ԋ '=B%p~.phW<biިٖFMd.NpK/~(ۂΣarH+۬gQgD(Ykm>l+)ENWoCn"sǨEa@^|ʦ' w$rO1V9hoѠ[N,W՛pKΔ=2o GB;ش@Mj,Ӹd评Rz^ӬgWY #"auUZFcypmR*Ϝ -C8K"@ IZmFi_U>6پFPD'mwzH{_mi*$# ɷN,_>IX$%/w6߶e, C%C^EEq) ^\AOµƤQ^C>!YvvVJ<ЂTQ=f%-5YA5YaS&p/YEa= kȃ`h*ѪĒJGtrz 5 !HsU?2u\n ') zN8I<Ѽs'uRٷ(fn2]'I{bk-+$ ,2M;KG^N;kZ3)+:"ϖM#GK jՠ6 f)1$͸ o Rqr'dLRƉZ\yhV}D듷q|8 FX(=F6+S ,ӮdsB2Ŵ/kO͕)~+1~RvF!Tl˞hv0'NY'?O2&/)>Tn n ?L6^Wރox c0=2 ubT_-w8-- aI|~Y:j,T BZl{3s޷g$n/ؤkCVӸ[|DZ7vWiI)VW3+IM(l[\Srɉ0}^lڗ>:2 Fډ(-1;k +B~vIvdD0y]&Zu UQ$:ЅۅLNTʝp3u7q 21F_ (SRldHxƗ# .@20t? )6rY-=_C3|3p &o𮿋 #xr)t,_&*QsJdYŜե߳K') 숗uYqGY!6G]T(m#f !2vuToųIKҽ&r|Eu~%d_#k/%Y/BQ>֑(f1+bu5b~If{mx<iP# &_Sʓbv%K.KʆF+ph2pGyL[lY*[$M7U/V\ClRyiq9P))ʵ-n$6]/p]GBߡBh`s Zٺ5 =Fz(d6C?pme9$AR4H:/$]@UΑ5hշN0{gS2̓ÇzIYg=}ǡK}zߡ7]K>sJzO_b-dTFi/|LCH0֦CC;<2>Tq=ltTQvSx>ف}ޔ=^+=@ܖuo^5H՞+. آswG=džCq2#>OU'.CVFl UTS,<}FV'ɗKInYBwp4XאhT:OEj#ǰUaMBG Ʌa503umI<@87 p]5*AtN8%#+Q'K3j4h/,%it%POSq2/cYL/)X%D轱i3~Ue%͚>OÖ)pj6#{įB63 +M 0V침(m$&OO^QQV6Ejsv?ГALjR L% [JN6δ8 Oq>%"i΍,5DgE(S<¼{lXCpyd̑Fr_On׸4BZzAoQV١v`6svƨevq,<8f.u]ap$M[":gz8F\R3 V-,&~3QeUCBNJiuFΆp_-`IMw,s-&OOR5!*WNY ji$j;yD߄{Èca% XSxU}l"3Yk 1Ȕ%ꉙgW!INNd#,ąߪd^0ץ/9%Ȭ*w+g12;/?mJ-I6"%9*h:J;՚yI-JRy 8}<__'[?=GLhx^h!5Yڼ mQ[7|u |6 /eJ}ɜ@B4=S҃H}g~⥥]cA,(~tɯaQxM^1oI\tӠF 0lUv/4RŲR.*i3IHIVeR3+Yٻ Øq^7Tv$Zz!v/ܯ4R®j숇ömqw|On됲i\v La5l[FVQ=Z @*'*F.$RFnFz5pnpR{hҦ5e1ܱB-OYaֳ3+{ q<0 L|,hp>@,nOS` E GGHcGB#X=u*kP$ش=()SLq ?9ȡEp@`"(y>"M#8q Ԁ2~  k`oGH\z$hcQ,Q!}uvb&KvHtre3ՏsIs󢮾2ջ=5,a vn(?Aa)l~X2†/!PvO~WPjEinH4NvEh"aǁ7D>˂>_LRVDlrQ (~/0[ĚB¸պOUAce(!zTl(r^5 %_P5 Y`ZFK04ccőr\9~XBKZN Z &@#FWF4y1?e瘬#&9zsb9:.i)>+ U>8jpts>>bE|€\矤@ bg˔(r(EC fE@> L 6"(n y{hwP݈ ABmE!+A-YoEW;6 |LMb`< C«Hςj5t߶I o1y)8JUVRkS}@*_ga .;'`M:'Z?[H.un-RX",SEe=4ٯEa M᠟S*LKH|;mm[:c29#S o3>QQ`nqqQ*饉1*,o [Mɲp^9f%̘E%P>xfІZEh)}k$EI/7 v[7?{SP?e"ȝE9zi,)]3yXJKgJ]DQ!:anwcfE2&lJ\p M&xe m_a\y5@]՘Ǖ#cv cg/*Qu-g/_÷)f1߄|wQi92LCgEI &~*o*-{ܮ8w^ SNpudU0/Ki3g_M8DtwoRKwPCs͋~Ȭ34bs̃zXʾ*6˫ HFs@àdw*G#%5@( fE)P@:Gޑ4zb&l_$s fmPJ_٪e4g'Pɔo{{iO=uye@vRIAٛ5T. 3v-p}sNnsqӐ[XNsr5 EkZAN55ArUC1qk&m$ 7鿷嚤c 8 8ʍ XJ~>TxێWu&g˅^ %mIؤlI2gᙑ:k(ƚd/C*fk~aK+*zixԼ%/G@xvH6+_-).n( )"t>qxyөf<7وr73Lх12UkDP=Z>m`Էl)68UJ`Oy`錡E uu|:M_TpnfGZ'ՖuPvpd\T&Sin1heߴh=([ JhV~a{Rg&s8 ym) 6eL +PrZtJ<.vY\7R\#M|zF+GY:7K@X)%4IC?:YCAϽ%tBFBo-" Kx7r-# ){Q:H p>Aa$T8 *mZHQsPSAф>A`uX:T\Wʈ ^{e]F t7m&hsBCLld i:97JPm߶EXN q"YE0@ 4kХdc<=oVنu3BNH}>֟4H8n h"ݨ_Ǘ=bQN&஠ۨL"( u G_ jQ?`ݜj] gȨس oPbc51wz9U~&;}Sε-*o,U ` !~j/;y^+l!DUE vEb|- rIKe?F4,~*Há Gg: A ?Lyq .NېV f(mz 0 (c;Ԣ<&g˛׌킓wvNAnG$Aɖ+ը'pEּav`TZ4-h'c}`ɡQ$ƪ479v`ì{9dfv)Q%Ǐ]WT cj狂^g36!UB ) _ܢ_>lDq5#20o_!KWQN.zjɀ4@|*n e't1{CSyh. J 37%Ɲ?,Z/j+b(?v#Tm726xԒBe'O5(.H2ޡNq.?Պzewrw}@q,D9eiSE)XAEL\0nleɍ֍(Xx(S_dه \k |~'ꃉT(L4K]_,W{HtaۺDi>ğyc8A;Y՜rZdvX$rl'|}?Up]2Oʏdz8+d4?w3t5g4T$m&.HUBSb~΃Dz"xWEfKZxdcDk3 4JQU+GlƼJ JʋFOLUP^ȃ[YNS!Ak7)eOUhljFE[Bm.7uȚ@B,3\9ZlC 9 GENا3u$ĞG]M]=uBGd)ԼU|מIxG.SS- cu{? 6ԙ_f\zԹ[*#Zudi)Y2N^݄Xdχ}@vvEm596TХh+M\*(^˟ ro2ҸJ8dWΔP%ٷQ$y>ӏ!¸H&rߟ>Ɲyֳ\) ^IU&d[C`)l$I!N+OՑ L,IC&yQ6dWCלl%NR4LUw.hӶӘi˜5KIBʩs[u5^j$LF!% Uui}6Մ)yGV-^1d e@hZ%#6e'xmy5A4 O^fڡAK>3t:Jbm[<$! $ Sp!sѷ#KYe)m,']<5Q6ܛ)*2]( `976着waDSŁQԙx T9tXRtWk.#6p=-4Y*x0OOΒm ׈1!:Av|/z&$T"x3&>$}"FpK]‘mHkmG$6EّkKb(OoSkhiѯ QG/w{`/Zjh[Jc/ͺ{T[AY,H\GCW||̤_=IȖլH51[rsĠgx ;XbB>y'F55,, xpWX÷m6bS)aRvQ Q7+A 6H9shэ]iʋw4w[l7޶hx Zd5&4"&BItG+luTіj,oW復h;]1 ,>ɢPsAW68t%Se8Rqajo%q}xe TJ=J[{XHa [H="|Gra _f+0^cqK }Qѕ º: %gds>k%iqd>d쫹~ڏ W0cx@ނ]1Y-Kxes% dO Lݖ%Os]' >p}X_q7F%-,uDy* i3yK&3$}EG{7@Ӭ_›Bo+E #We85޺N^ckFZwZ6 sϨ!3;M[ug0@%sƆ@*wt܆ [F(sqG{ٺ.W ז(yp[{\ss`tisdSzNZVoa}5}>3u` Aᔜ'=-G=)'-$?"q/% lJ|w̬mqt& V ywGønkܴl|_o0@Ǭz{gPɷGߍu~xu.v]$HzQ'>$O4o7wH{d#5B(a/|(Mh)Jn`mR2 UdWhѽL5'}dfAR>%;2Q a@ FNeI%.Ӻ*_H+[)4)o 96UA _xt"G*^5ۤ=(4n㺰"fQUACEo]T2wFJx.g6`PbQ&qRSf!bpi Z/xzs.`++ 1OL47+M &Ky"/DBi0@ۼֻ /~R,ZxuEuN2u! fO揶BWZYچebG)va!"3WJn;~7■kg GTME(u1AB/풊nqhrK22fyi߰uJttpky:S~'|G<]&?( A5-2tL ƸTz آR2Ϯ/D˭CZΊƛ1=&#B|F]XPk<ԍɶ*'78#Ky0m.$<8E]yO:ٌKůc$Bטطll N~|,r>۔'Q],}|D}OfP, n6il)PO"h[]5< ]F([Fpz=ѢUbe 0>9LTɊ>>. \xR+n ruY1*E)z%@r.8y2WeU SI3BBR_0G8d:*dFG a0Xoxܴ/]G(tnuD#:z:[[7_[YlpE"|:Օt(n\&tT~ZlT?D;EGO-gN(ڸȼD5:Bq[+>_v€TYt1 {'?8[_߻1}a le/?}"0MfblL ) { $ JȞ;B"#\idEV ^ur÷7a@p-)Pͤ]wW--*X! 5>% L1uC4@`zlF8<^Y5QS]Jt~Ęv>#+gЬϩHxՂb1!4K:9 o{MIךx9Նc:v:+sGzUL ]} OmշQQ4Կkv 0)t{acKIj.%v$SbHX)?[DPlYMnDF:cl` XFMjNF8e|Vߩ(MuzUޔZv+gǀAF @g>|L5[v9H7'KQ|o&TBYYVbى?M%VY‡fn;*jDgjsn1Mi-R¶Qhh7CԿm >]Դ3S ,o9n6O`ˍ~K#_] ГVS/7dS&~Gx$58F P:09;o!]m~3 E|Gj䛃.+KLvm 0S rf\#s*i@ˊ-ɞlKMoEv[=wnq7W0xX+v ^qI#FE.Q`nys&ZV@1KFdBIi6bk}W˻^HxDG$- h|k$`ESa\t%q[fETס "7}{א3WO谰Fp$X@پIw#)cj1YA!;|5>bY1GD4leDEYbqu7,4RpG'evJ Ү)CHv N %È3+Yrܽ\؞Cа3.5SwlJʼY]ocu[~mzcLAJ2=)t)*uMr,gm;G#c8aF߉UjC#p avȭ vFbS*GFz!vD+LWfKҝØ V>d<*Xx@PpTsr#%Oay9vb$:ú~po\{|}(a9Ѻseb3<?j(*~2/qU*>%%u!\`&jM(@*wOn7oAӰN.W)-u9IsIĄ63=!b]|0VcRRȝ6m, f&G GM )!ީ'!?>WH9ՠ5/3FJ_lߠ;M]e+)9˝h_qP\Sb4K`ʣ@~m>&/0/~^'h.IwA$`dØy)q|gt8Bҍ̠#[% ͭzX?># @?R&ƦVq%/Q)EJIUIH'2K>nɽ:23B }/RIn;yˬ^sOLuIIR~pϒد;?IJ0{d׃*{HEY6t A1?WB!eIt8 Qm!T?7o| bqc,jK 1Uz{bHQa4ТXΡ⩾yOc68ErYV8y/+KgZSvU ZFGgjI"jLjqZB L0P=<)j%\B2C)X9yˆK/c5xƙ9!m#!{g`б\Wk4Usy%ʚ-Lh/yƶ tkO sQ&RJ'yL@*b1ReV@'_mJM pJW`$d5^P`AR WVf:Ć/|j`I^Hך%l#K8F_as~p,RP:>OH̝@ӿQפ+h~jTu6.Kw>gie$ˡvT޹Dn]UX'Y*õ|c6o;ɾ 8mሉ^MbccWI$t sյΥR3@0~eC ~&5&rK3P]-,u1cyg 9"&mJd;$8~̳ Y}YWx,r&$|&RqLF2 I-qd xSd9KV:iENu叓V.^ !Pb( l`{;^bC5p?$S[ʎh,o}TB5f84 dτwgq l^*6gmKY d!/("xI@YDwOag&ڝljϐef -#T4=!x8sALa#|zFa 13I V ! c@| :8\2FJ_FLʻɩĄ?/Ȍ~L(o3?8T T=WyF oZ P Uao-XέUힷ 3K^cBZZoٿJgO^$&H(e24lt) l T.@}^ eޠ ޫ*ȥ`* 3>b+ W hmd/Y F Ss6LQja< TPה2oq0Ԯ[Ђ. eـ,?RYP3gur*\Z[ tSkrU)nә%WlsĮJ!Z}/SYk[?5A2aӺYiӓ67^ɠb ( +nSNDYD+D-M&0bK ۳P>X[1t7 SER C_E B|]ܼS a_jd |2bI=RvkTmVYS%i]k) rKNӁt>-fWQn艹W'-~q{2h^\DYeeH4HZ-Jg9LU2M4b qXb HogB)쉾IόUyUjRj@HqR+˞g1#* LBޑIY"׾"1w){Ss;He[1՜uh+dǕŌIF e)2Ԁ%!R}2JT\<tEK$o[1k TtxrTdJgbNv<'htTFp\c !7@~Z&poUl1-cRmjiUJ o %8"K6T̙>Zr¼cʞ%֔?ҩj6IIGx FB^{B`]:9ZP4nNd8ay^v, XZ[k*QA},pI9POpBʆGULA;);c5emsܬ5A%mL˲hAHu1|u vS"&MJ!wksYoERz5y'Ebd=t50jl;(J;t<7xp[e | 3r/ez[ Yji%n ,+.U8ڊIa KOCoT–ye6cG{'rAc̾ yex/IlbBpj_ŸCCO,4o3n#0 äZ;~F9ZNg Irσ6D?Tr9}IkGBY4޸myw@ӯ73G Z#N 2?NHpxoO:5㿜z`CAFp-9^D2ZFs DH/@ YM B%=xEXqE*Cz&;VF ѫ͈TO ᙿ\;6,.X^z=雁֏s§ 6wvU :]R}*{I M.sQnUoX"ߨt0o@pD @fr 3dtԜ4TÒskK!`@>:H&NG151kM,_Ƅ.u'}RZJ$9s xiR:,Աa@8㟜F L9a?OmYFC@#0u%oXrΞ*.7qrʋ^&*2;!D^ƋpH{ĊV)h3  Z׉Ι-dzQ Ox΄G."Z`i~_]dfMX޿$ӋOBrY/50m̗LktZbhTKeI)>!S= 1WP:lU1mM z=7gp%ф|+=xhTn"hcl6]%A! PMX V$!-4mxX JEe1*Bv^*E'Uqp {Ԉ>e`܃dw!Pc7t$mаZÅ/3ވqhLН& E0bYapK^)"C mäg-QĩxSZLrll{2s6!]v_>`vV:1&K K.\ɯm5ҤA( YHwe8D1"t-ʢmDQXoc*΢npryl]z DzL/ؖM'LWPޱFa勯eA oaPV2$؆ף(|֮xfxn*kYȦ 3 CFXn7Ldd(l-&OJu Y[k)-9 t0ƧXhwD[XD"Ba\Q 9;͐SBkqB2K?QRbnֳA-xt&E x㫘p:.x f&P*i1qr++B?ʺo{1Gێ̜g_ ͒oP[ӥ nC>pOR>z8}p Fco5P}ik^}c$I7{߷쑭TjIFAn`tpԯ@$o%]\(()|cNt DHOh ˍ+A,(G3BK#Ϟ\0dǾ[\B"PI 4G)#z㷟kp>cAOA/ b; ~X.u$짓= ݾDDa\ *h"{ݝeHNFme`n镒?cR<gB3 yX?:B36bk$8Q$]*<{>8uޡ lG9C h3"^Si*eS*cD h!tuҭ {Uftx猭j"G~`gv"BxbqqW#$' t1z? nJȩdRtz{HL*^JUlJ0y9(er2~DV,E{ؒ\>@"b(L HC /aM’UxẻtY42+6z­zB/|s&/{]_+O_d&褊x#)ELLT#Tdg<\^s; y-a#Ĺϥ^oTVz3[yp)ϲU$ 50HRZqdJpGa) _MS(}XՂsE#5:s/T:gUVePK$Bto?}sByg֦.0v8,ꎔU#fхvq^8"QFf΢!tY??!:m;@;=}pzt{W@^I #Pjb^t 1)۝hn,KDJNq]+9%7Wf~2=.@`|E1n|@6<I(b9BeIi'{i̓=yq3?0j\=Q}au>Ba%iP)ۯ?Z`Yj5LdDH£s#ہ; XDb͎vnD׻,849n8&ȼlP]_IvOo]cO5T)y1#!q<*фDCC:xrI(PB'tcI~(_pK b`8 \iTCgW]]B 0#-yvY0:^$fV^Ӗ- tڑZ5,)F.%+-)2[N^7B6 _$,n?HNT 6y>EU[1km J95EOC&k^?T;( =j0\bVB(ܵˈ¥i}1\5c2{dQ)ǧHy62<|nȏoj兔‚UowbUb:cR;0!З1xcZ JpW)jBr6vz*z荧^⺦y쬺5ShX1'p^ XSnh}8F3Rq<@N7f c1K4RA {7O,0F1!~#T}fN(5 DEi;:eZzUaXi3T3\+} lWy_!eFo~*Yүn tJSC(ѕ[G S/U(!_dO:0nr14Ė*3!M I꭬ N`Ͳ-"jSPSLIyڗ3W)8,17h#'-9IQ:T QmhR2ֽ['aƽT}*gnJ OVɀ,ߖk";@WW.!E=*D~telR7+7#Z94Rsnng}R#`gPщ~ؖVxfvUդP)Ұb:1~{.a=Z8 N'{C/S2E[*y>HsE N w=l=d}Uی%ޏh3D#b #{JEOpuVD@3q=U3Xv xэN~0\z4mWs dB2gS"30,uEN>'bln2`ڡzgƫYA03e  EA!2  #o[.W;hlN =.1J&0qb!]oFS ȅ<jVȡoq| 8-vbͻQ"irg$Sh(JFnަrG/{G32j 8)<6"I{J8 ç;ѮHĪhH" _Km6{Ӓc#RBH`vK1nslL9ME`KC?QAOA\aB;$S8K_\_p4(ע|kM QI{V> k7R=wEx1ξ59uYyx]Q%jSEFZʂ cXTp{t@A+rH$B[VWeN]%5OLZF%??KL#YtjwnR%?G'mFgwMh:MJ&{̭·f?ZϮ)ބT&!ݟ5>jШS 67ܑGs{D}$rE%ݚw<ʁ,ѡp vƞeۚW|E@ j1r4eE.eܳE;o=s&%Ouq7`ՒvFXciY֖en^kyn.OQNvgzwi;pəf4ѫ1Xm XC?_3eY#yŴ9٢q[}$3s~!a|唜%~zC6Eݨopȕ %HY]p+hf)/?I0lq1 &hBPm"zk"(}˴m&tx8hWUZ"Fn޽#C`oFu(3&OsH k$KxVB;ʨc/!V3NԂ6"@yi_IԎt-r|/[w[$Ìo~dWTz<5KKjHF+ς#_aGGJO` |_Lq{{A ݫ.f;55r64*,Dplq935.Zh:{\nr,f&4=X&#U[;5` UtY{r{7EyQaZ>qHdU:G>VJ4[R:(luwеd`d SQE ipWt562CEͳRմAY*5J$W v)i*7o* YT(ϊȺkXQi15YcqubG/tOP}9ÄT"|3kD%ǁep8Cl ۼ^0mٽ53MT.YƼȰ͐Q[;yEl qɥR~ FZ?-tj+i(˽Ef[ٿS*F(H&ϤtqJ|9ʫf |G_[˔]{+0߱$h@ML ꕛ`7DxyUl{mɯQi+RL}Skት(7{YnQ6eRBfCW @e,\`UQhJf=UKvЍOŝR~)¼O?\ O n0(I꧗_'-FwOo#Jy7,#ElJ*fΒ?"9%za 4LGv!U Įx~U&J3G-,R/Q]K!LCҤP_6QWN$]ElpӆQh?fiDQwJQદЍ}WamXF<pn)^NG_3?R/#m`H#3>Un|3]Q \8 A*Ue [Y@[.C1U@4R%1:в+-|Սbƀݱo cd&\'t5:FxQc *Zk9*c<(P 1{$=%'1 ZƇzfpLpm$PN(eqjО:" 6RqHW U{X6P}6Y!G¿"W -6Hwx@4& ?v.HyPRi$nǏ21I2W[3 nNX IhVs3_ > OAcHcX?ed0E:أp pض䪰R_U*i)L[5&CċkQPcaC$WCj*@1j4T|zv7apw* _b✞a xݩ/4g F i"Ʌ+U0gkPH ?5^@ZC2 qV"3ڮ \B)՘k^0T#hj}_$wG$;q2NgD3ߜʐZjI ocA-\"@#4_ X2~{)}O*"/GSSzCKނXr+ؾ-WU`=Z-+{ 5b툆e3%NwyVW]d) ʹXo}G8 2 Y Mi$Ps Ë"ӣh!1V FBh{`Ҷ|;t$*KkIűAfN]!rVM+cu9l!,-4͑; O>'3 4WQaN,G([Ii=V~B(\"8MZj}߮eZ@:>1SǸ)qMȐ Å"(,MbƠBxqR. U6P5y4 Cu'Qa)җRaG}#4 sdnO=xv)љO!ђiUmb[^>v\R^Ke;9xgWߘl0QV'3cLhtaR:̦ fZ)z8M- l^Zè gҞJ1|>(4 4XXŅBqzZP?$eǞ|Qߚz,xC7*7{F4&.;y AJB&M<ϋD*nEE[CRl[-hcaZ\ͻ0`[_& ʗ~`yd< r"4>/n(DҏSU i8x`#Hs j~q#WaqS2gCyE#COo;_Ux6Źѣx0.J`r|59txel!ǪiW<T4\ #ZШ8qo G<8$Rwx^h[i=RX(P1ڻ }aE絞0QQ_/-~UY}BO"PFxQ3p"G u.D֚;u oHc7aM'*\ ړXHj]FD? =n,eOusG3^`AawғS "ZČǛp$Yz{CRx[N/v/ɈČ;UўOSE\U7Ȅsw^k厛oH*ʶ n&9?mQNyZ? pvkYHk'a6U]Ȧ{7= ^ơ8lqLS6FпK^K,?ǝ5 /| wYu\QFXC.WC}TD{+A:Wɩ0R-vfu 299t4`h"f]a>vR}+\bd0ifHu +uP_7 tDt'AΎz!b rq O˳aI+Df2unq7_ Yq>Pv5S0?y"-N'M=o^cxaYydM( 7AJѺ g p+,Wb*LzjG+uGۢa>KnED$EYmj8J.WHɢe$75l8F7*7:o.8 us+իlVz? CV,7,!~Zk nVr-]u6)*ša%7rg})_-l'֌W:/s1c)~Cq oYPP틴Gw&zʢ-:A\8lzoHȋ%C1vL fiFoz/ň!'{_לE~_[ZR/ =y:r`n( 6̓M1 .FU?Uˏyy U;­<F ssj4aRi/ t"ŝn]c!)l[Sz޺O5ewðң.`[К6Uia@ @KUkTZ_qb SepG0ˡyH=WFCe הB6Cm[Ch֫o"Qg`TQ.BdFiEm: Xs<1 }P%=AvW_t>`׼&"+_ O[-IZ2 BfWd? L=:Nw44?9lC$ \CQQi*4H|Hg: '|MQ7He't spxJhQMpI*͛TJ Pߤl)VlPB{lia|XC-/"E /}bG>*IQzGRqM-gWxanճ%6]1 -rM{3F5dJpWٍupT$<& G{k*d$[8\-h;Dwq2fF nE$ڐ̾aI67oG&/Wa I}#|yzaA9*Zh  ,^k{XJ#ivΆ[tEH+}2'G[7H Eإ`^!D^;0d; 7u|30ޗy׻5>`7O L1A?~ا0fM[ςxdHDZM@i${Or뾊X6K[ BO:?DJ.G߉ C ӻB]uTPHOP_*SJrW>>Z_m{*HIʘBv/2v4:NjԦt?.3\.,uM]u X&@dCއgR/IP?WOR LQlKcN p gLNZ`rl Ys?9=#{h:ۛ>feK-8loGIp1UBD̗ӋFդ{HYG't-eć\al5Wn$hգˈ}iR;q*^<j}v9G1j$M*,iRILjוpߌvWUwf c՝ o :G =&+ɽ/,#<颎Z|A?ɫ%=oag˄&,O퀜h#3łPܘo{?^2{gqȇ6n %1D2Pk[\ UgSC@KV)̀J0FA2B}g x `(ufJ5N/x dESC;M^ |C36 h4[= *~$"m7ßb$RŤw9wwJS9fzR R 1=W]^=r\e[f8bYJۤsɨ#u =m\1 @!,?C赥_SW?ie !ɞǺCAmB:kVۧ/L ;6RcGM&ca6z[s\Hy q UiI[4i5޼(Kҍ:4ty. * G@a4̗=JT'=33jL1:C**'ry"j\iن*9 T3"8**Jí7}'C`1Ƙ|}*i`)$CuvޓG6ldrUw-xd,۳Ȑ mm(4- FeQA?dBHyqPn\{i |AԨOSʶ_K)=z/uD.މM?o`Um3ԗGZ Ad"d]v/{/= TDt% @jD9R nOQEUDԂUKOC~`?VA'j.0Qkg4@OQN%,)W%*ENra_[rDTsC avTqBf p>V1R-a (-yŸ5G8eȭ~9Ny!iLv-4Qc?"\Q 6.R݌Hr˱+'jEBr"Ź k͆* {6ʣDqMB߶ȗ& @K7P{&AUTqBJԗߨǎ z7oc5=C^[p?gH9U|gI\&`pyMȴ" _{aףf~W#&=ƙTe/_Љ挠IYV-z4 ! \ ]cq s"US egUhh6FA $[=ݥUe+2bZM%oLJ@FԇS/0gD@uQ#A$Y Qn ԛK &S0gČ1X 8<9i ctTӋjhu\ w&4GMkq!=}H=,Ptf7֤@c.WGMёŎh g`:<t=;m!$l'^fHe5س\:ZO!I^O3u;H*r&̩G-uLuָ0uLfY*Ƕ:6I*^l./ )N5=eqDlH"Uw@8 ."}C}el !ƾ*QȬ"@ReCy,l~<{[Q!]<2L`Ds$En&UU)bMSlIekC)f|Gٷjkv=5L%ItlUI봹1l˟;pHGMQ@ae"pߪB\@)yNdqOpX@<%nΰ!xQ"ˁk%ٰuXrܿd/iHpBjB^]tKѯVN3IoC WS¢T.*j eD?Xa QgJ9FC}D)O'wjg$mUIsN&G*i* #@?"_ALLwn_Λ!Ŧ\'c2eMfb܉2ieEGhvFDټDفe`]T8vȚ˛ at`5swOK8'A1+;F%K[ӭVMpNHG'X>j- ?[E/ЛrP]F!}[#XnQς99{"bS)1s92\'\< v..xKS$ 5&^| Eܰ!&5J!ToJ H)NgŁ%L)taH)eہ3%Aߢ, @rҳQ;dDTI3PA|FO`Q ~Mz8)(ImnHIxcM@ m]}c8e|v!6S-1%Ǖb=5|KŧVӤ $ '_rp,?0-š=[ϭHxIܳV-]MRb1ƿ *}un]'m]z)zer:+Vhl:#lrݛ(csDch%lYr '(%@&WLx+'IOr` ՛Ţ%3T%f~V]mr׊yX*N4lNemm;)1dRWgT4(=Qr-GK_H.APe'FD߱-J<ʰp m/V'rϾDX74TcYxoW39- \?# S /n&G~wwU8:cbL3T;xlWibs& \[):OrZꈋ 10vրǪx8w(k*f0_8EjDO (!G{_ȌrU$"jkqw)ztZ'ue{8|ds-d߷\Qu"/1eL7ť};{X՘W;(d)3jnj,@PR5$Z.Sj\^?rXLLtƃ ]d4O|;!ATUdP0Tzyo2<*yRقOnp=M;DNN˘Dd^ME=ʮ^}b1vb/!QabE+0T&#Ic=YvS4if )ElAgm6d=H{ܓD0.hkug>-T? gJ(M/a B{dB\ AhO"X J!˃؅NQnWsWo"RY󍲐 +ۛդE8e;MWv] 2N|x$%o'3ԃ'W7._fgsou'=p۹>Hp?_B |$iooSQmWªjt%yvE3tN\O߹ŏzHHi5_ƎZݱKƔ$y{̼ YF֙W/Cݸ|ެ2Ɣj1&_(1(==Cgt3y0sc@t>) g *#Be gʂ!W*/jͫ_i P:8/;C$X["CLc{ɟ8AJy*70}ztrP qIjϗ{.ִ!:ߡ-/|P<8m5pdS[; U<2հ ?;'/!S\괯d9СeԎdxDR"#-Ca8쟮іv$Bvꬉ^RVZ)y퐔}twJm݅"Zf-+<Lѕz%N G0or@A isI\nǸz {jp9p|4vWE{Ɗv{D5U_(C$j X5#Ap~PP䈗eٍB:X;V's\줮T y3fW)U'MѼ!H@K,f| ɦG(=B,y|>617 ~LnsY=?=ϻw:OW Q*бo݇D4 }PiXuz-L `ׯCr%<=@ +R);1ˣGt jnI(z'_QNgN;0-a[I1|xiؾ/.D00fUs75 S?%䧖$;EP2{x>PpjV4B3tKsRV.XWj{ Pe6poWx Mێm;)1| A;&3쏠\{+}JRa/T=MNqy?-$}X=)baδSZåYrlT-YN=\}0 GS [7[4A'n^E[~ٵ֙*ubx*4YUЍȻp ?U󪾢Z5@$qQ ^gNh;ykRH:OʧI^<ٗ8*'C7%-U ns?ΆJLdYC CCbC99sR7vsAtpj~QeKL>fεޤ-3tU1|Ok #y(@߃ G 7r7c`5>M$q[Iv|n=zIw7~˴Jq։ ShPUb,bQ%GqompkC\U{&DwkF'&iлo$Y@e!(90(|$mbM/珙 Տ]FiQrEYc=R 8_(़HtV C}J9z g~%ηDG~V_˓^Kԕm>ƶ7Fc"7vE6!tGKiiȔT*` 4]LRin2uSňX80dVZRkzDi6}]ϑة 3\H?kϮ5 T*Ж}o%iJ7(i>rÓW; }@0_m#+pڏG{4 Vˮ5;Ǫ@"sٴh$ߔ2pP1d8/g8B~VTftk\C)_kM j<ˏx? Cv1 ĜT2B6_$˴87}ẝ͚>REV)zma«€!`-გeq2W5)JɎ0f,fF@sA(b4W̭)k$`^_̞<{D*j8r3>Ym(8L|=@ aC3K,2s8 +ꅧ|- Y5e댡8i`?>E5QQEw%VCU!WSIvF^| _Tjm ;#{B95~)cxϊ?,Ge_hY?|KFMxiG\d>8/$Ldف9$~pT8Y]߽IJz>gѤ&od7P);74̨fuKEDHeml>3 eA_xJPYkgn.c E0*tMjog52A6*L'{AcW3Sc!N;=˻¸H4DŽ0q2(LR'z5C}Kd‚Mi2Re,7jFD  ^e#Q:4qbNr L@K.8SO, "ؑ`c՜;g^(6%%kFr? (b@&MmXMAk]@Y\wh)?r-$nβ3+}BjIMYF؁ϨżNfY˕rdb䩑;hcDe| 8Ռ\a9:{/E?Ώ:5ߋ9 zFjVL[k*[Н`hQ +u)7Uѣ*eEE7(Y3FܙF "c vu9Cu:*FezBNb>1%Ùp!AX|Oa/ilRo iY\þNT_>te:ab(W:lߒR䦔8 դQ'HTV[T`>AytSf4AFI/ S K%]Yrs1rR> ,%~p`lb)84wM4˲gcRے@K&G/gjiEJrVCBB?:VE =~~g@ !u45SBv!B>$ z%kM_TC^}qB2m(.܍@$֮<;wxq9ٳU3"\g d*-rlZ6D00Y$X8mp u, FrVtBלש颦mc09@U$) KØwtzjAyU7}o"|Z#P$H"i$~塽jCH#:-6Qa|tRɫ k~j D ?܂N4=f( 8$42HDzOjljw90h~@≇s*ex0Σ{wmM=NEU EVuz״G܂.uSFAiO45n؇Sr@} \m3 .kZWwnW~; t8.ܥ'x3Rj:+5YOU~VpsBnX +t>B(iˮΪ$ hSq fq3tN^r4 ۇsӥ3e!ʹJi0&<[8^n]yHyaA9J6 jBn5|IT^$Ro/W`zUY(VS &EL%]ޱkQy /tq>%8ZsZet>I4wo~.nC+łW7^?B$0"j9HOȗ~%%ٞTH|KleIL #a͍]>eFTf>hN<|34˅KB_@Zi% JkX&RXƩ:&HW~ثo1i'髃FY+hrb/eHC>O4,K QTH/1ʯIgKnLK6퀨AXbtSۃwzą1`XalOzN'1VSł=[`u[! r V b Ž۵y֐W0ά1uDžmEJ4 ־4a5VcS3[ɮa#{YQ˛9SÇz6("K1Gr{S4 "t.}Uy''ߔ''dEaA⮾R 2cZ1Fڢ&\߆59q9ݛ@/Npb +nYro饲sȠֲHacCS hDFpHfHjx[hE =+ nm1 c}xݻ(lBN:(&9*o: ىY dn~ Tc1$0,a΃B<+zj17-pJ/ $U3 #.GucǰD1> 2~![) 8H<Şb&Gr&w@ڤYCQAn)"#}/àο.oLD= ȗxGvA ò{A(,$VHBh 7;sx }?J+=~՛i yhQ 4>HÕv`FzsB k`v?Gx|/*sYE0RbrZ_UV79lA@fS1߯ՑF$y]H%JT|ʢ>Ko(۝?C/Nc0ޙ8qJ*3Djf$5KKM3=`'pnaĥ@'[)B>TGT17OҒyKRZ^ Yf(ďSz3sM) vT"W[$zV̷VSu62roZ ?vbrql׺uBWl{EBuPYXܽ KIcH= |iLS$ ۿo,r[=<`tfIA\I @)>%Oy&߼e9YDDhOkZ#םZG8nor4w9F c\s|O5>-Ve}M'H,'(ZYS-vn(~&vs pN]HP"vu q~r@HmAS~FBFwA㑶m[n(!Tx|NP%ezV1OSTH"eGVW% +>O#$rqYAaj!8u@"%1{dn! kOyhSѫG0СNx Jߥ`e)(2ئm^ezwa4xF_TAP:E< ˼sA>,Ka fpQih =C`%սԶjY/D{*}]I+nc U ﶤM&0CbWdUND5Mw"!n68 w. /$KxiNqZ(|H[R<$i$ўNH.^?uT. +^47\6-5ǔNL\;\BF.902i!-=P> k!1 ߲)r_4arN-tyP aWo2v3?*LPvq1g:,^ɒPkXFBfB 0Vqzz}[!XXWG LxKa\blzjZy뿉h{88 Bȍ*߻I5uF`l+X!|<芧={{K;-Aac70PJddѶ+ QNѼ cX&d⿤8f}շU+,,Msk .nr->(9rz/ouE Q'CV PG>jV1gz c̃ԯ_=@9y\}TΣ@MŶ.)n͸Hg|?n#Tu2Lѯjp+~8KCPR4m!2I8Zu*q6I+.CZA9cY@B}sS;¸QHw a$+L]#s0My~^o۱@nl*H85Q! c4j$"NI ;9ۛB\;w:X>jn}M=*uC`IS\Day?NݼE\ csse#ُG:W|n廿OM1rW&YϺBת'Ɗq|~ @t!y. y":zl%yKq;b<QT| ;?.Hcf4@blc`džiY0QoumEXdb B^Qq"Tnqe-etH"6BSh ᗗ*E)W%4vvnœ<{v\56|""Aߏv}ŸHXpfb4\ExHJt!G,JfE<͢uvLbhW蹓%Y^jL&+on9^SAIN,'eɻGX/#tبte(gPU?+EOSb;D{zqG-G(,$M}~\|H1DTyu!uU\ mib΋O!FMke@>Gl$  "ZRW0T63y*6@J5]7 pD.' ;P$R4MIMb$RSnU]06KrИM<`Aa"AK–T~Eap( (j(X+˖K [1Kgc7<ߟ:ӛd-1BJ$/Fq]=9I` rDM?fo [ yi2q֊N}U:N X THĩ{5b~{N0(y䦋 &uۯAK\ GIW#4}? )x؂k_d*MԩT gZ+T:"V} f8~;-Ҭ5_I-7}ة6/E:b3Kox0)6C=`qqYl>3srsàMaMeMNrZx5%08/GFdIaν& ǟ<9H"jňV(8/,²yاp\gGUꥉ`}F@yK<;Rځ! 6z1r,lUTw|ٯC*`0ȩlq|7+!?[(q2oVJ&{.A$ f(%ndA5yB:@g+xG+IU_ucYrZZl.,7!do{k bSW< 1;DT&u?Q& "lB3+d[M]@*Bܒ?5Y,SwىIgjOuVL|wG)k&Ϯr['ٺ}E`m.<ڑ]'wmJ\m );!<*pl^7q|#b|ez'L/WtG_O<q\I@L3Ba ("҂c #gͫr)/RϪ2*ɅwC´TXSٷwlYy:П,:+ mH1wwbVInXeUVw\zpf}z)Պlç%úyҎ+շGdu"K(+ T26p$ai(ls=yW6̫-+t" .P25ӦXɖEݟ pT}Ŝ'e's̎S,g*> on^#qa@M&ه;z7;7Qzjz.芸UC%׿ٔ~@ghE@N6;h|tpHm;rqxd5>>4@[Ӟ$j!wޮ($ p?X:p(vD/&0{ 靤p G=C 4WJ}+H<ֱN}Nj=Z/ k9;c dFg{Z؂H4 kmKȏ:em}l *o{Β.O!n"БViТOTE22XuT2 *OD8# ~) nϼ) [K u|N]ÔÈp2L@=0m 8JHV1ם1ߞ~J]5CղX[ ;sýe!xAB~ҶzwIV~I$Rw6w*~w )dg N/B]LO*23q[YPqeuMqẅ́^6c+u39&\œ@tYVJ.X 2`?hn-U Gv@H%)H7(~G@O  nG$b̸|%x+s)"Zgxl\9{q1NcioA1;/ev g0ߝ),+g`uOƙ&ӠR !-3u)Jd6! OL1#e_y >YA~2/ ds uXtܛ0cBDB֠=.ec 4>A یx'^EKѷnj*Idۿ܁j`jbAi eJ@V2! x:/]t29M/w) A]8A}@ZLtePpW'hc\r#"-{LCr-@@ 'Oase)ˀD1%1 ;B&'v4OG=V h{e=ڴtm;ؒi A<%X#){OD^eIyTt]8 `EfZc8Ϣ{2$v-/ y* ^ ٿ [Y 볹x@b (c-oQhv7!GhvߙжNY 6K)0aL_Nsغh >yܜN!{}b&%pXQC1Zŧ~}&N wVuڡajbe+6YU^ +qj_Kjc lVvRV U,&N|jF [P.%jBE9A+-A{i cfؓ9~"7i'7J/ !D˕98PMd?SBqFt ]5dyѰ,ܣbAV|K4] k[jK2F lho/c\]ynYeYC>iKѰW1ш<Ǣ6RekDY$SM0m3 PHf?}kB8!`)9I27GUpƙt lQG ԯ*r`!q_')Dܠ[! b$11J DÑ>`chEW;˔4ǻgSF֔u(__}o( ^~Xf?FEw @Nz݈vAJO" ?},=oY|nl[pwjJװEim)gah2cJ@oe'g1Q|J3hǾt꾋ꀡ/_9ˆMo,8, ܆S„Ƨfv{W_I gjLq!+}3Jld I]['qO!$`Zi:)>(Q舛}PH*wH7(߆ ;႞44' (D '5Т;h.RLSO8PH(z=QG }2os=qCn?x,a?~n}OK tmVJ8A*㊼e)r?ޡ㔨0( 3 `ͪz6uAV9yKxNa3]j/6-^DVӫťxὈ xt2}^n !(WÚD σ/Y\cV,@j0 czr}e _f7xLZséyʳw1/\9BWgF1/ϱ N=ی1vvF0YT/d~%dc׹^@O!7 :>]C>Ţ;XFJ5{ޏH1Kx9G*beގ2΍ڍĭct:fB|~Q06ˌG+Cκ0_1!z$kL6{DieҰH;4xpmg(4C0q3cU?`kLܔa26mw}tFpUD^B1Z5:p35EgKHfCH!55P{%  D"# |Z@7P9;W?mj:+LM\.ߑrer}!pӺ?;H'L{bL ci49[M16Fv"i4r׫yl>uDoTa'z14W6ʟM˗aˏ>nk’Z;1kc (`BiXs3F]Ls|hTFZ ;VGtRf]~CKxa& lXXtxP ِhV5;2d90q!f'TogZd$isp1 3ޏcF+MS6X?㏲߂7SHdbfA0igqU,~'ɷz.WҞ/v(C)z&0mFA&?cUMQ.w}zr‹nρvU~­skiߣ[T7'h ‹J_:0Ҭ&XfZ ~#띜0Ņ~tc%RK)Pwf< q2h+`ke UOիAp!5]6689QksJDX]\Rso5&-8 P0nd.,?r qѡƮDp䒈]ۘ4NNz caE(7 q 쇵}Mvifu9F|nD+&e *TtO;ڣF{`kz@2Ȗ(՗ UF@yJNzP.#oW dz%k"p Ä?T̘$ xmF$xE@MlZpK%[s&jŰFocXm2LUi5 \roҁf=ބˈQ\]2ȫ'4㨺C r|qN̅2U{`'biNP; < Mߙ@UַxױQBF-%`opj ;,"΁Q.ey:*O8b`ŤߑE K@(-Y_bOjs?fx9ETBGJݏ{tG(iAӑ2xԮmxa{Fh&ԥF5}m<Ҟ%t}"nPgG9q!'-n=Lg#k,'l5"t+{o@L &i5M~tl2co60P}ֺ H P-L͠| _tD7MA_yRS *9d3KaqL`bQ]!H)!ěV3؄'` fێt,acpl^xaҔl{Ay}#elY0̇!EH`Ē^Ct-kỏ44p!h:je B%)Krto$QG3i|v\ lwѻ_풁ac'> -^DPjgDżršczFX롎 "Ih`7vm/d∽"Vo1ms"HՐEi ZH+p::.Č* sdi-J˯|ϗl3LH^b4v(%m/,|Ԟ``leE!Þ͉1Ò.^J?FF}~Grn!]2WzK١ӏxUT C~̔ի9pv&x&CS/`7?kZ?{\"^x|"RttJBP/SR ppIEB"{i$jDb xH+wh< Hk)iz<]3[^8ܨ@5C|@p,sǏH0g1#\uaqmjĚ7Qs/+D} AGSiZht =ғo H" @,0@M`QIh(ƅ/] *24g=83{i̼f١BŤIB +h4m덝/ MRcUC. ֣YS '?/Z~.Y<A g1CkE&0EŅ#& @P^ "{č&2!?F1x0@jHINh1xX',ƯIyq4*[Ą ˳>l 0"{S(2FMꐶh˧Z)<]e<K.F/$Pp"\2vр `Fn\NZZqWU2!jHo/7|@2& c@cY]X]=qiX}EW{Fi k|g]M3Si #AS2cX:.bdνV#N=)}wqbsP."/bpFyϤA uk|H 0AD)t^( }uHa~!GXrH{3uo vQ{C,47Ly8 ` ?tnpm@> zGeob[Os)|͐ ŤUF"j 8@\/$H;m DØxحg| Qht| z+YbX h!{N'k7\tY0 X1~T'.tC?(V*S\@  d"w51x?5 =zN HE2!syqrzAe+w..*uz'jS$m)WBgFxb~g}/8 - \ dq`푌>-HY/"B1>rbQTt@jp Nf \8iԑe!PK ,B4S{*K[|je;D_c6˹t#߻ y\kTWIβ= 9@ ҾOt#gXJfnK p`!-]?lhi Ne/,O.z; s* a_R\#*Ҁf]r!2eVt:FmkP d?MP>f?̕r.\CևT/ͥ'Z3=$ȁg= V0hI >PcdALJgtʞWB?A]27KLǖ8Pcvy)' 12DE/@鏺c I+H6S@NeMy1k+YlN7K`uDKӊ\@"cʞJa]]8AQ5Wa Jqk+шOȾonT0]=[kS6%m$~FQvJ !Y?R !~ȃ΄wş$%xonG:=%#W҅npwI "M6$ 2M̼D.1̭o[{ϼY7@i3/]fsWIt~Q3u+)LAR#O;sZc LcWwEpBep}ˇ(ƹLr~$k8GI=XH%Oϣ}9cYn:tf( LmTaarNcz쑲<:29!KeBx.̊B)bj+pϲOe5HκZfV -]Oz%*~>D:} GƱ8n7TQ[z#E0Ey! TFaXy:EрJGŎfS*!έ'i0X,X5@կbBM2 dآD$rxξB5H+ʠ-Ә4bƕuw.Hn?|A9Y}MUAJOOhtV"pm\͐ru&­ $5r=m՟@%N~"287L΄9P=.PxP Hkd {f$x#ٞݶ%qLf0|cGRًc/+F 1>-тTlP.mĽDj,/J& 3ik<[uҀSpf8E 0yЏJ EjϭߓЯaX{%P]8m@$WW Gcy>9XLw&.tg]p_Ė,`0 !uEL.tzˁC;#jEtfK牦70@už{JJ[? ^9lBU[D){6 %zZYTP* A?}ͮܨlւq^ Κ};Jh F-K]i͍2~3? TUkTPx#fB4-O#bEFIžMDWr2&yٻ<Ǚ*TE+UB; z./{ۀcco] b)քK9ߞ_"jSB3jύ-ݐ G^#8; Sc gIH;)$67貔KZ,C06fd?3}C_]剢@]շqNx픦 9JVNal7}y>c\4xx`{5// &4*(0 xᐂ*$0Xi>DN Ȅkdllx*IwH7Hf<YHR7>s myhCZO<,g}D^^Ih$,U|̺ _2~SGA> Tcilq͢U)\|6~Ԫ<2;oMq6Ơ<+`B9Eq(5[aDa4Y="5t2K H/IFM ! 6x G_ Kq\ϣ>_hu-rB3l\ACa!a"Tͫ[xf /\k)A l-LvK\-DsGPQ.MͼxtЀ25(,Ry)ں7}]˻KbӼO|r9"MGO}z x#^D=NptA6?r,ZaGt$%#g̺i dX r ՜D>ʧ׻.=WG]= 9ת<7G)MX{ݬ.Ex}еIUN?nq+h%=d6Uh`,B8ur '3a13T ;2{Ov){oo (C, ,Icmʨ|^hWܩoܕ!v߃sj.Mz_[Rƪ*u=Ad&m_Kᤵ6tA?Wy1Qz\_"Mh |NP Gx.B<CZ `ZI#:fVz81\G㸗:]) U!$# 6._Pbefvre)s&/ݯ<膿LkmΎ^UᝬzI[[5U] xae>8+ew8ۍ Csޭ_ޤ:s˄!io%4tA'AprՒ Dë]ưnK{V yȰ~[8[9ۿ!w^=Fs16@!/O+oUUyNJe̽-%G*9P?oZ/S_*aEH-^%#8pUt{yE)9=LDHfQA }w$r(=6ق+ܼϹhK7ӑj8]ehwm4&vm }Ш\a4Un_tjO[AȌ4QiU*#O+qED x㱷lrPTdwQYU[;V-X5{7Tt4mzY}w˷Ps&wibþ?$EHS5BC9){r <#;aCs8GC0nqZmG)֍}8^2X$KGY" LLZ_2-~gQVSMƖVHgM4465J\c/}t: E ON] rҩ|J?WfB^y{Dߧ?Y>Ny; 9`#ɮ{h*G]+C} nA8m's=۶Eμ:v٠#;>1"f^_3+a-@/J +P2% i1_DtZ?#bQ c-(3T(WaBw8ڦlG~"РheMR8&1J=^d-d+"h$ne r֗dB/~ NbMq*o߃ y)ՠb'^V.%KQDO-.ۄA\5ْ[~=0Wc^gt1s|oᅿm3ՒݪN)VS`v#|}+|Q $p^q<|('6agIoNmԐܐ-VaW=C "=^A==,7#2nƿhs0#5+bDŔ93d?wl![`6 5KcdPŵ>izb b"IJ=42  )8V?T! +$w fH09W5qaԧ`̄:ڨT!j:O6ZAFH qjO.Ο)4tweD⋞wn]y#CHҷ4 tش͉fvMZAI81'BU( 8:ه#[{'vykc_s9Q&FxxG}Z}(Jc̽DpG2LgJ%pF _wC'^o  i/e:~TT:ءBB앂T{sLS`0`]1x<ҢkTs\[L3O$Wj]he<2aD-X2]~kdmQ0 :/ ((=:HE~'-e9K3dp!$TrƩ P,`HwEF݄P i,ws H>9⻖>ɊNz?=RxuQH)=ﴒ$U§"eCܺN%_)ePw=ޯinBPa:u>@' 1 69׉Yؒj*Mnz_=%ܖƞDlz c rSkSq'k.(E_t0M+hP 7 , %\j]Hˣv (fQHg8arHGjd(xZc;!ZYvX&ˆkp& 0rmu ![5rd_w3cEn@nʨv:-{AOf&0i ENǴS{Uu U&']j7:C>VwYOnհL} K@ѹ8Zf1Z UOknM?@x;'#sSO,ZYӇUW_>jkH SvU5B$,!C(1P^aAun qmWM?B+u5C&414,(hCA;([8}VI_ZۍF9 jM91X3ۇtWec\1_5jm#>g(yQf>dL>y$qs"I/ ;R96#>l#MZ,'}-$MCk]?׿P:Іz\_5' 218);$u i?}x+@^Fcٯ'@7_Hg+ d5{|rӐzֿ.G*+h 2!p^l:sh{9Hμ]lU> Hr( Lb 3pHW+fݩ 6PPEk,Y8$|}Hv.M9->ɭПO>+{ Ni8͠鐒-}=۫QVcg"EK:>{V4q"~ha&f rȴxݯ EFˁ}xojBˈl- 4ZKnpn;F 3́#է Ѥ[b.~ ;EB@xͫL=Jf=,g!&n 6PPWo/HŠZVŶm2w|Ƥ.[B0\. ^"jj✙i #(}aܿ'F0i ak&Чos$KX{ qUj+ӛBeKQdv]c~7wIUv$pG**YEߦͨ6V]EX7XgsgP(?97XQmu a9cJOPDgv*@i#b',w "ʝs"HX \?˜% vp axO@C),7킓p^ѥO<ûL4Cˢ>ziZAzڏa4KVY ˩.7AF@l=fxm J:$Un#@Gq&LӟJ) @  p t4%.>=;j$ieߙEZi;IoX\P?!W("UsZ^gTj?ذ/߮|3e`iv/ad6%vFk2 !묽FHahӧgu-E%bέAs]4Z)EA>ϔan5V?Ԕ@y |-ρvX1co(ZqE8bzÖKtG?EV8 kS&Jw*kqK(8]Z3*"'l+ p4]h.'*^ `#YX{%])^Q릜Q$AKIs\T !j3+}PZ^CD;@m6gGzҬg;bV!2&0s j&S>v5fr,PVqfe/RVli(aMԍWil`_l4` Dj!ƔА}R=RipNj,XO4 c3gv;zb):H7i<(ǛX7bm<\)"^:"RR%@lSG UOX7䊡gˬp6;UKlT4f/[7NT61KX7+Wr[p$4m FlA =27[&i3,eϋص.֘6)fb7 {$n*.g]=wf8/3`n!=W]ydwK}%9q^|z=t '.{U9PQmnr{id3+ьq jodǒ16@b+'Yܪ"/FG `q'3{>JLKHW@~"!v_ؙͬ^oNޡD-4}.tdiV, z)he1Î}y` aL,9!ynbk66˹;h*7!^\f^cӻs`bb8c83FDy Vsrlw -m1~5h<LD MVC% wVP*p-AOjUlx W$(26ҁl =P;m5"ߡmP0.n7ei&n`[q4~[@&-un&/LYAJGIlmra F\‚xiM'7_*DtGjPӕN(C)G/BTœ;ӟ2IJm_B7ά˦<> +;&(no|T^95O"J㟯X0ZM[NϪ?--!`P%-Wɡ #(q]`W,\^cNq10K< tXAg>X|T[{K*(psUXI 9}$RK(>D2caM~x> }ļy7O]л:ozr8#4࿬'m;}R&k+-]RLzkPOD j$h̟_?"ݮ+mX0zPre Z:k "&e)944LAUc^{_k1uPf2@6h>2g˘K!1v}HJG-jtA$= $(;wLPLg1R%tѩ86W)M#gSR6.a~?m,*a&P-_5J! M#9ՀxI1d1W4$W(T)-[] _K^3"5 g I cm]}P02~RlPQ7YZ!%; Ml@(Y[RkT&h7=}KE;!r)wGErsDamfƦIjPy758ޠ4dMt)'Lʊ!H ]YL xȓ> .NԗrwD(H M 16 ySsğ${=K K f&pLEr$q1s]LSu: Έh͐OI@ڎƚ\0Q63УV_z=Z3eKGͨpv}igpyytVfwҀλxIX cI?Zm})w9or*S"JhdAƳػ:pF]9"2 jmq[޻ӡab5hjvo1&GZ:^u镥[p﷫~^H^ "b#Q7A\ygD}W:lFMe6V=Z%BR;o0 %ިʬ Ir;$o7Xc!IU<_SU}=줖 N22fT @~CY}O!lϳC܈ޱ]gF@qp7Rp e CЁmqWG'\=WK"vc*?׳R;*O%WfH2ݸfW1:ӨΐٶjN5'2,vo EONFvu)j)4ὊcoMлU9'#cq)b if, 'F"sh?n;%H @" P\fi`򸘵P-`%٩~IҹL8|H􊒨؅voP˶.@(&x;>;! W2՝ M\wKva[wL:8w0thPn!tW,LΌNu>ȏ 3e9f#zKL=8+|$t6JΌ=$\*YPv&OzWR]2p~bMbdmc&2d%*d } $U44o»LRUm7ckU2Z9D+,w:5Sar#i"+Xc>[z;r02'OJ\nA7+>,4,, ŝt٩*}Kg]c--<.1LH(s ^`_f:iU3+sISѧNO?[*oJE@ŦދyJ1Qb6d\m[5K.[Py>XYI XȂ0]Sh+x|[?CQ |-!Bz\w3RG^ F<ūŹ佖i6,q7"#ռH4q?,Qˆinq,p=0ϗx3E%J4gxN[ϼopT!|i@M~CkXHvC-e,H%9Bɪ.okV BQ^q82ҩ^w%a+A͍t$uZ.KchyoeSGMYĶt}RI Sey /CZxh'͗/@CL)#V=w3ز~(B?Jd~sP" F+Kjo[ov}eL t`/p~jyzu pID+~xErqo\([}Togȱs^zzp{d];&&V\'ZQ2*)X}*^B؜鿨p 6FTuK^y6!~ ?ef{9}>'gp'|[+;~#$5|N'#'+郢y"D9+dfz+?ki? 8OgTxkZ9uCsYYu>oX G)0I_"W J-U/R!HbpӶt!)Hs?7M6:.N tƾNzPglGWAUC¨q HL& [Tť p{鍛_\B 5gb7 . wSOi v8L[)ڣ5NK?x umE"]y\' 1Ou<Ugٞ뱑e3Z ]:iY|sa:``V,5ۺX$gO72E2*1&e}C姄9T|PTfoA;} S);b~3giOjc:F'/=x]ی!R$rs`8!%x"kꊒ͋ ٞg$fr~b4c"%pu*ِKt=yI*;}+B5,k_#8iԏ5f`.93I"wZF8 s6iHxJgt 'iO+o*d"#)#\hEg$ XAѻjGM ,N=3Y Q]tK%XY/AuTKZԠD2/v YgJ-Cf+h5 'U}Q\V'~ Hⶸ[{PI;(U_1覠ɧ8(!kqA墧f1# 7Sh4į}|~ sBG6ru^hz7"R%w, xY FӒ3S(6!rOd*Gm' OE-wݐ̨"E$w-b=NA| H~Epc_@fDڒˉlt)cFyTY% =6Q$Vȣđr)ޣh$` rPX䢻fJ3K CsnxrL:XxBES`{=t4 ﳐ%Kz | ;Gδz65|]##gctEbg3͚E5x'hY%ճ7uI"fDCMZņ(=(T>`5R q`%SX' .L˖ QфQ&]F\Nx(_qf:h}шZWON=X,wg*nͅv$M^e;_aW%Vӭ1Fe"Y u8CPw09bFVFϝN-~\C7 xNނ9y;H,N5Eƍσ$لm5V:NJ3]h=78E/na)nY+c 4tv!#/xF**]/yQk4_8}׈S2?$!nI29By 0U%zN,R@ڦ<5#ϔ\7Yjx0Lm~fOǷ9|Ju뫗%nTN=㻪թ2Є3 f$\ QNB#zS+ v&h]UBZ>e(X&u[uxJ W[h!=zkzBTݒ0<YFi@J}9BklnW{JyjB@[:D..^`GfSs˹Ⱦ #8T(m&扸D!MZΎ ԻѭTb^"/k!ehD'AX?WQ>Lk;-Izȓ*A3.xOnThTq ŇIcIjBjl:'!$+Iꠥ[UÒ~|SឃL|"Jy=՜eK( һ`T"ꎐF#&%@÷6r`淁%{ s+ {M>~Ғ,#o|ZPj^i/[1Z =lWJ$~8BFv5uWF(?4 7Rȶ~+/vF\f p%A.3MFh ۅwP?+`eD.sXȎߔ zP xZl%WX#w6kw7X~/ $G\Gd ՐehZBP|&AJ&J?%QҘLqq~gB U7m2DL%uHn{uJrkE,fY%S pÛ ҽ<‡BQCl/m_ҏqEN S͐F6Gڏ=G2dJшXޡ8Ҷ13౐`U -RFgT!ꏘ2sc$R>]\(WLqJ@R#>u+ck@NULcS $/\WFO%7 Ye T&"YHjrʎ>3ڝSvglA/)pPo-8=oU`k8n'5?_v3 iXDG֮ةClx-9^|"p] :+"ˍ#&///BE9?2 NiF<-1åM)h݃ jt; (X e&}݊&X$1OPPe$hvϦXp-s>ΡYhNpr;ޓN: -E wvQKݦ[uijC)..aZĖ mv#@'TejH@4*Y#|(y݁tjp" %lni&-Է-T@ލ]7KP3{UR8Bm'5K셝ǟs.{kkTB U?Wv,z +|PMPQr +:V7&p<*>+-0vה2.qî{Z7Rǡm`76gC7;HrTowN5VvWbmk9gv.vMzU&:,<ht=ԑX[\~H=1U0,Y9_ʻ U7m`8L`[?b #^G FC^.ӈܕ-3ԝuLΕ2ki,&\,JQp1pxaǰI꼪+fw~`p?aTCA 3Be~Outb:şӣa v}J2~ }C o=]okhxϓ߻ 1Vmwʓ ߔ'BLzae}Fǁ2!I ycmdlyrE9AnN*0+*vU)8Q/w'JKa#eBg$ժ "C/EH75EM8qƃ)=OX'פI~N8i@.V ? <]@7ͱ˷mm!a#1V]]w ZcilVfA:hy;\?Fe3bA`}'8""JH6jWXJ{mqQe mơP:"bgnaQYaX^2+~G0 Yn'-XgGa!VG]F]쫂 UkPxI t  aa:طm$#Gن_ɖ5+6Yy >}˃.Ӏ9J۴wGeG nC!wL$џB lTOBZ90hGWZ0^?_#9fAEְqP?Orlvm$!E`eO[K:@9-HoSߜt)F; ="qYw.@\[H P:°pِ4ƛϴzm;}c0"c|P0UmJi0G/i&B `cnq( `V|ic\5 O+)` K)o͕!< o( FfO[㝜U[$rN/3m!4LP]^td|P~=h.eq{D-g n@q A}";vVM_pǸ}7&)ES9|8(>pföKzd< j <@=? eKpHwRDHN{2$t^KVI݇1@͛) iJ$gմ]3w_,NM!Z<.>ߴ" oB"nx/suIg3 8zX<8yNR}zv!:2?4#:`!\J zoB2>gCL>&uLGiP1E0.t" L(kgƽ?oL;VEK8zW3.ǛiV/z. Q@p.#-t#&SwҘR\țɟӡQymn1̶Tg{a]wG &,qۍ@%bJMF]'M߉}mKX1_LϥQݩt'P݁^E /|Iƪb6yoe%,(U˯ohkp".P5V'Y4a7 ">h/+v z[kyj=G2ߦk5W9vg3I=g:~C@j!L!< /z:9w@(@ m;XQ+D!.]Gm ѕܓWD+GrL2ϥ~ g:{R L?g9/4f3/k"Y-V43K٠ʪ 7aYH;ʔr71~1YCkѥpJS{P%|-2CIÝwH02 Oɖp,J$!yֱQ^nedê̞ieJ*\Qɪ EGa }/62$$pA 67e5>s̛v É C3C<%N\bm20"l$䦕祛jQۏMD>Oֆxam&R\~Ӊ9 64濁[l it~C!b]DRA+2.$"ј6=l%a18n ) I"ޏdL莶ۗe :?Z 2lڐ*Δ*> Ht{y16Nuږq;,j@.M̾(xnfRJTi L~qa%L vi#9Gfh${ahjKPzf~?W_v([[rP۹yA[F]XjsLk2\i~qgW^0qUn!qXgRd }{䣫7+ HMex%(lCYZ)dbƩj^D~Zx~Q$2yM滂`FoƕBX:FxjghSXOyl"X0vZXh?m@NW1yacVJ TjÊH C;PDXP㯘z-V\)y^wZ"E\!ЀSEGX2#oBYkQ3,Nfd@ ^OU`2 1(GJzeQ.;qJ,#_dEȹuwLo0Vjl'J̺Y#bUI\]eUaSs_G~߂okl4E|^ 5]*s4duI}Ɯ\CnZŒQ*C[i.bE& q>26AŌB^8<ʋ1kh>D/Й؏zA}`9\ Z[iqec_$تCXd,ŁI=P7w!fV^#Yy7`)y+6Hx5`(e(/K&gHDhg&:i?!KCo9{c)oA^p{,0!$ W-p:VK~NEϤN"PR]KF2Hl{u͑ʀ/t߷?E`(euVX<(~DW"%ݮ՜YEn cskQc wF1x--Aw cl' 3XU Q-Qn.QOsc)ew:b@B@=2,qb P6, ҕ]G04vw }Z C>6> a1_ f<@obLǰʥ_A᳭SGzz3K "͏)n!kڰZP/*;ixsAZh\6] 䛨I/K]-o;<q& &1޿?nQTOalCBџiQP.*ƻ<y>'⁉Cg9|4xJ& _Mx*1ls  ! k ydך8AWӪojq]Wm2O'"|j`nX4zTς<~W,m|g` `mEaleu?{׳oT ޡnSٟqaVXI9 $_Ly]:=^ D0|M8e /!Sg:mT4X > %\?QWQP@MTrCnaAJU,-$]}Nd+8LP/8/xY^R&k藉W:o"2NdW:Ѷ <;4 V4M͟ Q n\7`L>ARъ?!}:gxiּ些X )Eblܕ'xe7D];njmmTE-sٟ S9|W;lF^#*d>(9{^(-x!p5ft7uӾx#&7?۹ѳ;B*pVMȇr@w"e?Ńa @y ;<iu>&K0 jeE?_B"7y:?rI$t3RQP4'3-}4 +қekjB uU0fԔB0 p~_,t l%3A]>h@?< 2_Ac|SHQ7i1S ZWcƇKϲ̱+Tw&hH[raPmԹIη9Ǿ*d|Oy*hRCB^r>|Vi\ lC4kX=v=Ծ#w4f Tn& ? :ʐ=pP kV"`ӇvC# 0Xu$׆0Q"  uEJV@6M&5x[,@ɜ 7T}|-WFb2Sz?Ϗ~״LWHغLv]U? ,Xܲl +(,+0̯tT屆Qqk߹=;6vqbF5j!3Bv^[m#ԷKӤy@T2%IQi yZա  c{a ~ C͚j U.)oR;2ԩH鋤+?S5T˵c}ǝ\Rbck5<ugx w|2=z8zpi't:#v4UoQ^U'hݡ 31wc[e/Ο[M MJULηta00m~(h2L]-^n1 Cǣr)є+5<=[`1%c^ߍGBܽԜ:R |{ X19 FЧ(]^'STN1,R>_%u"3~Kh[ PF$9`caG G 2,̦2skΝI!sYj!MBYtp"׍L&x)S݅'t3dJO]úcGhoM `tn Fbk2u^FV{1dx[GΤu51s47c(6V# ̯$`(f]0p?zLFelz>LBH#ѣe1w”&Bn}Ǿ4tc`!<4 ؆ cV,'fa0K&\@b>vTju^2$!-\ol:߶i&[$grKœ0[^&Tz2tv>mm 2mwJ'^˩E_24in`cBJ,J_!Xsb$>VzW.8 | cm|G{d(tJ[`$NaLO;М :VNME,UxIOD;j'Ob(2HLɣ!dZdl6\fKA_$ߣׅ>"{7Յ9͈?d[b+y_QgqĶS>Sv68XZ z#셎2W#ɺdF>D\="Tٛ=&pumX;>&f]۴0ɘZR HobNH+5H|_3<9:{f, n^vX}8!RRqbNeq}Q&%h5M<,Ez54I`*i@pVA{PEwunm8Wk}L_/w ?mqNYJ DRi@ɘ*E&ҹuPW^sɀ,T.5۔iW!Waq>K|Z?LQ0gt|N}Ȗ熨ν'\sHႸ 6j &ө yG#냼68n[V X~)CA[ %~n~+HwGdV@7"NMڰfK?H .I(,}\GZϐ,* Wm^8p/brTggcG6FE `.es 6B<ۤ5&"Av 1Q'2-3d0W-M4âp:g^b|௞#10~QvE|Q Á˷zN@~vqÝjf7'nnc_ۗ"MQ4ÜokAPzJ,ض`!z{Bi",JM+ۯ[.Ȥ#s_b [?"g7GFCN%C!+aHZLMir&J&z-D&,0&̾!8BEu_?zк\'eW j<?GV!cὮlVק|y6\ }) =KRLe6D0j# 4mBxWeu!u6`0aq|[ {}e,N‡f˴O*.؎K,;`6 @r B!KN5=خD8^"MTp@SG]x@ | گ# n4KwQ+šu>;4~Gü]C+{:ӟ! S&}O & aީmr(_R!EmN!VCE<݅m~8Ε0=i`¹"Bn3~X$lE+s^@MDK&1xc0ődWCM.?'^4.lq q nʫ.&VFKWzWdtN^gmc|{O6/R"ے3'i7F/u~'Gy^@9!?Zw)NEFj狐ڷCo\.Ճk?/($_LqC>0qsdmyV#YۂciŔ(tM BEAZc Pl, }a)5ц-?}->jϦ]^ :+߃_WX(} 2R"|=#bWZВJ/ [Ce Q#t9#3,oq~L`t~V < x1gŽ1i/V=Hu)D˻a4`s(Ld.,{X Z /T ؊͗Sh]8Cw-Bzǎs h]/ \EQWcC>j:Ax\.OEkIm/`F•|ė]>>BurA .1 m6#m+@=xOvW ZԂwS=_ᙰkmJFdګtpF츂׬WW@TkJi(t @N=S&%%ֹp-I wǟho1[9vLz77'< \{[x=T}f2DnRHq67m}l# :.+j6}zv}$h 2Yx8̪yn5x=#v&|=rLĚyHdo6f O^V]`U)NED3%gZVcy}`W c69V x;.q'ɩ&3ܚN)f*$W[h,C,/6S 0g pT @wn> kHUTGd&rJt&іl=x/u- PvZ\KfcīDWθ`t :M;bI4;W%gstŘ#|tLz2i*uoQ5[3ے(hdz3R)x&"£>K]`-Hqk$ϚԮQ UL98`aL,+vn-8.nGtbx`cy$T1-H0Ө"Y͘{\F_ݓ5cl}Rx1N<f6A2_dRa_*%Q༧)}X`l|u$Ewą$1|bԃ6bH-%1jJuov򚝆TftXAUÊ|oPP|83{jQ]D] Qtu!g18aRBNic+}m?\Q4<{5AimWb@`l<0uaV. JZ~,RCZ8(hu6/u?%.<4` :aHU U0Z[W6 M uLT*8m#0H)k&y!cX3 J8u<ܖm Eùքft{v\>JanLk7>oD}j%9!ͨb8B/pNw0Gon,Kp5+RpML&LB\9#@^eCV}{5WI}Rw 1BPv*iphӕeח|9^Q:"U?pE] 󌴒s&'B:n \A`r}J& t5l^m1 z#pTHޓ9|T>CM&2@&1 ?r冘޽3##v[?ǚr3NOb8W9AeڎIh!\cfm[׈iw4 Vk\J14H%L8BO|O$< 6 ^irRsYŢhwU1X(t Ky6(:o=,_^?X_$D@W;LUW]Q0l}S爑IN8?o;ig3{ޏ4s`%n7M 9Pf(st#P}#$ QHD` ȞcI["Vq"Hv&?|Z:F)rjnm{J}佘0Br|qZ-xknZkGbMaCO6xFݕk3+);;{/"#bwΥz!DLe}|gf y*VrدO]<ʁ-0y#TvGnh*:{+5kCB߾':Gv#V4-y\GOr"! G0߬"J/=$?j?ZY ߟАa]@#IQ}='u74VKͤ$ZOŏv f":\KO{I̗gs\UMcҧLN ^]O Xe)-M2d >\jnW{I;ls. 4XXj}l ^NLFLҢzt]L眎Qur.qu{ *.d4s,AՕHqכZ?U0d4|D&1u%,Fo6]hACwNWWvdס=l e9aRZMwQe@īPa ʬ.D%*|sǪNզ18%U5M)l䅬&,JG >{L-e`kA($޸ D}5rӏiМ_˗,ZdG]6f[cjC X50EDZ᣼G.%%rlgJ4|cmH 5`h_O o 2}aЖX.2wWO3i(W4SrJG9;*˥ЗFKVtr`(Ф7y׼NF*cMT6*/4RǩeH?z(} a8.疎)_XK;<Χ9(L#_Npٱ➈L擁-~>MhQz-\ǂC0 hL;lxep$u(7oxUHR}nʱ:hƖz3vl^%egE _zV@ .A3.;t(*E9D ܯʜ,Z ue)si=5'/0qakJ^Zuɯq BfG SLɾYӯ[1imuݼlmg s61IY/ uxdu{߳)}"6>ء=FUd;jIy,]4#$ז :DXGQ}5mTkfD!l2Ul޴ej4@9[=3{SniHK ~o [9+,?uuPNgF<7BDٻemy`oFM1aJ/h+2nqjR RR wp\Ù.XK ?j}leoßRiS $[Zw+"78c8*$owfBoczqN{'Lxl|4io=nMW"`0͗yʼ ِ[U6>_Eҹm5/Vc9Fh̗^.a|7kRA&~Չ)Ԣk P _vqQMP[rPvϦSJMƔOfJ7GjiiK=A8~cYC@?=P>Jg YB#G ~TKV]E =]^p)  5#my^X-Rm½*1& 499 ~hBD8^>~KձH3a],V5ʂl&*lc,>c3-|Bpy;tĎJou-+HNU3.sNFqxa{޹Җq8a"hBUb62qpڂ]PjK%Wsx6W WȰ†h~.S :^}|, DUB_XTΧEƖYSHJ܇NHɧdG#sζ!ΊȨ0t o(p贍ߐG-P~C[ʕ& a4j_Dx; 7CV3+ZbN-} h*W/H^>ir2$:NV*3h7n)0GP/3A6ŀxQaT)6v7cK}vƳh*9>5 f}  &n}~$@ΊN1\xh}쨸}m`ͅy@?^遇uKL?ғ^!'7#gy$5ص(ylae;^8Ꝑ)< mzPI\qF-:aw~Z0 Cp&tDK;w>qk:OK'~RX-r_ l\(O p"MT m x"aƪDr$Ww#kH$ϵ#M{XoGnu.I]):yLc@^W0D,WYY47fI-˻TjQ!k5\J@7y]6:'!;\ZJWsi-uDwV,L:_TsYE;.4ֶL[n 󿕷$PQz6.\L4Lw{iDQi Gぬs?syZ1ѤՖ3mۛcdLAZIM%Hʍ$OPt}KNZ@rnf4 NM2w8&I;/22c2"5 BEwU{ʧAc S\@+ 1 ‡Y'xY>NwN|0?]QT:k04ckb-c6aَ~u H &c]ʳf~+ =ɮgqX>Y59鍇FG1ލ'v6U`@0r,9u7Bv:n+Dsϟ>y=~,)9q!͒' MrܞIBzClr}SCx~5qKvBU$U ~d/"R"@zعr$tseWF IoHuv̯Fo٧|!W~l2wҖ!fSץw mԺn5ϭLSjXK\(|iiE-pwLFe}Fcp%OͽċP\60I_tQJܞmx`pr 1QN}P*Z1p0쇘 "L@p!g԰ި@,o˰ =F!%{ j.6(b FZ{7aZi-=MZQ8"[8 ,C&DW#!dY=!k}"OPx؂7I7=|Ѫ,JDBP.3qA]QRy_"n+1b`Ζwn)[Uk1l,0aǗ,ŽRNoIM^W! /d L~Օϙ[HX4 G8-,@<'c,fqjhl:PtjJƉpP3u#Qx7 M1\~%Q!k^;> ~@/LdF30l !h_a]a%fw~Nԥ,yBAōWBqJ"+dS{FvU؆*L@1ʊ1sKOJgdz!JdkÕa>d}m^SA@Q T1/ԭuJuBl UFDdmi ǩq 0 P(z}SNȃ mEN< hⓗ_Ar(ءY/H r%rT(|7Qh`]d+vm)\|"" `:"5ufOa8#Syq !jˌ9mRHHBg+=@߅f|,ga % ܄t#oH]:m*0)@^ 8߮afW|r\W&PЋxqz(l(ĎPŊG aG ;B"wӮQԶgڐ=WQG7LtLtV=HhJr~;$1 kZ(NB!ےi#X @G:C nVm߫dFɖέ<_NeL- o`WѰrӫIAQ;BFz!F c; B:opf3Tb0f]5CzӑSG'Ε$bRIUF@49h +:>f sb#iZ \Q5̮6 tZ `)]گZEXCσ Jr&, DE&!c`$nd sMHl.kOTrG g7/¸=9Ek<|3VXv:%Ă:z25-X~`1ӊ<ےњNHqnU۷MGA\'B!< 3"߰Q tmj˓԰j-.oxb [rW/i7siՠ1iaiCPoIA&PG :z&c@Tn6E* |H>_IԂ@bD2vЪX0 DD6L {[9Ȁ@QF4|Bc"ۀU0DH@H>yK_r5) u]ے@#k^k1TMlÔ%oKVW}u_h&5MoWPenŧkfH3wO!7-e@̼=Tc|>[" 0Cv3ʣq5 Ν$e!FSOq>(jkIbp^:)yt(Lpx \qJW3XAsGX Eqa4\{jQ1}ppHҊ~瞱.Y"[` j 鯸/m3ђ0 HDxwP-CqH_}Bo ||tl&k_팫2a]A1.<,f[+ߪ!ɷL=Lt~nvlL"7#HdG.CRtwqܬE8|"F?\D"Vn1 ` և_=JGrwrE( "<DD/t4uƟ-RYIkQvOXw{!@"?%/spj38FtZh7e(?p !M G[BG" Yl/9$H7Ag"$[! h l4Bا&( Ds"& Qե dAbG"Az|mR| ŁN_?Rl$GD;u.u8|0 \͕!9,%p>X$ A0x̬h QH<^UX0u\u8bCl}%,!ĸqЖ­=)`*A J+^{o:mi\]}ʹY"nu2Nؖ mAE_$P^.$GgO|R# !&arfJ^S肑@Ec,t"Р"NFZ<o 3&ph/0a/jr 5>С ~6xݹkotrWo;KisT{֟^xlly̖*X]%7HcW.*!G{fDK¬uT^ATq?1yi;z03~{(QQ)`@w{,ğ2[W3@;wV9ܖRD}vN D6l+sr:A ]F|LHqegpo׻MzO NQ.9Z%< S9vlQVfmғ\4h@@ޜx ƌA%6%Q 0 ]ECLhZՒOtm-{QPujS%ASֿ>)pŽ,ATzDD(.J5OX%LjoD/wStsW2h8z'!aȗ׽󛟡Wys=(‘ >&hy t&Hxщv)HFytDzՖVDpo};75@6s?yP8wɨnSxZW #ãYDs$uȃ%< W=l1Q8n\?8er̓ eSl iEߪ .;$v $ou*ޔ[5US0ko[pB:Eqnqb+_}GpEKzl8u$Fޡ %Hr;LmCNcYĴ_K82ӿ(gdhc6~=^jݝt MT]5e GfDV| O(!ׇKrd`:&")tў3mqwC< +NesͽH%q # aH[#яW5*i.V_5n#異K_t iIN#1B 9>B+~:G;C:V_|ڎcMhLoYhl) =MUm)fDA.Ǭ@ӏ, =qň|Q_4$ek)rbw׵p 'Ro~VYj6ĊnY*9 r",v;! 1tFiPc<Is~ FgLl}4E~;ƼWAv!C?)?+UЫ?aqGIhnqYeV! @:q sB xZH<POZ7vL(܂_{IAZ׬@ 42Zb{@<^xWb JH}!mQ (!Ү*T֐D|?'SғZ|8,P "-B8\ *yеI*H@tb (Y *H(o!( vACjdeT=^LT!gv4]`D^ pΤ )x|ӥH(@ՅBC9SZh4+RY݉a3 qDؐ B~Tk[b"LBK^F"q pr͑c$OF ,MZJv޾n J7b=y@׻1"y- "u(P 5CHN4zK[ (u0ue2@51$y vCiƟ=J W4Z-sEY+ROz<Ї AQ# H7[c @Ȼ VP#MO$XG2˘mN @Q.'0@T!#~ՉSU!o16dXq I= ͞B`~RW ]FHG&" IRa4~,%} 4ג8 Y HW|+ːg`÷緜09AiIѧEWk 'f_Ap7"E*wyF"A!=Hȋ率֫L4b ~wWo\Y o;VTPzM@@O*Cm)RV :aO(85`L*$xKJVGnyY E@MU 0 }L9"#eC$ (D > ٧`IΖa~?4>ڧKpd:s@n:# rX%%F|ֆhWГ \/r恥=r۪3r~:=e`ч_)e]^q7mOಐ"*AB6b%0TL3c3X-$l7"aZz@cB!8pXʆ(XC CNfjqn tC(BE ~l ,"ξ ˥Ho~]u;Oe<[i0,yQev[UA']ERz=Noo6Uֵ1sϥYW'( V0[ 0rZYpQj0zvDD,BHL1GZ9RSGY PpPP@(*HC!IQB@9bGfzH >A4C턑˞eq y-5eP7;yIJmhB SS^BQ Qx>?^Tk2]vtB3\2aaR׎ArIjb۪rtb Eh `lNĊrz(}Frܒ)=مhN r@^$Y)"vO #~+D];:?ZAǪ2s#A?UӘLC sb!RE .nҎa/T/l &/Ҧ`ƒXMbEhwxU\k lkC qy3 xaD2μ%OYVD;=p( vȊr}`зHtR Vc{|v@jC}H= ;@P#(GAކܝ gwIX *ܿtb`w$Q&$T`a}U& "!g2v (, B uPH$!3OƬhi$OY=\x!H"x9K'YqݕV2̝Y c\MdƉW~$B7Y sdlGh&(k*"( ݌)Eě/[y1p:+x:Kd&XN'GΟCRbĈ5ʵkoZj$5Q  :Bv82@+?_."A &`f_4s⽘"HlƑ8{aXDüm#cKgE] @]_TMLs# b*e(-^Ƚwm{.obY!|,P_.&@VSƘ[IQ(BnGi)דlj{_d.͋6-1$nb-'!x# 6ÚPRĬ$W 2za\@T`,."q ,bȧۖ>eEk8 $8Xbit^4@_Àf Cgj{R`$`L$X,`ه>P*|hD@K_q|< u:> |J=>dy"UgYXbivqLB;U B[jH,%̂5ALDBVG8fxhzx !5rԗS aW4hGBLHkПANjfH. a̅Ő=t>E!nN&<~=܁ポN}wT0Ǣ+CE5T5Joo?K融6RL^G= w̚{iE 5Nk\ ]bnq[z۴`kA鍶XCw6eFKlޠZh7(iK7>S#[VmOu`,7|6( 2_iWy19ܗ( xc !Ů~= Dg9-RutFtdT-,F۪.c p+ݗX0 . O8HPh|__;N!L! PiR`" OjD z4!Hن(\MҶV!ѻ-jR2O`oA ɸ '90nA\sDy7]7]/S/h @ZO" }x9Ox ɩa(AxV}b?e ?T Ӊ>p/l`^%r^,j1 ` [C0r'Qf3 s EF)w qkd 0@87^A|oi&~Y1xN^]ȇC\(@mE@P0͊Q#Sp9DVA%ۤyč\5I߼xOjۉ6h3v"լ\X;E=> 2T:Ws7B QmZz4qz׏Q@0\H{EГl8)Xmq 0PEy۲Kcsw'sOlok8GD=G]6ۃFvu#=ׅ\xǑ$=XKl%+V%&a x y"hF\L>,7K-F &t!rX &HÊi`DCD$Yk \CE`;>% yE *L@12V[|~*4!#!ҏbor2@bR> jưѴqE7JŒHy?"(G@83DG1Bz@Q++PD7Dgh_GtPWPj#:bgIk9 CF5B@$rq ! TD/E#`%.(ԍj' F H1 @/,\!݉WD!/ЀV y9S Hq4Ɖ1 .p 6Di츺 z"@ #0"(|#85:s5FPbżL>|U#`BDy a ipG:@mëu&=Qm/3S`Ո87ɘT.C< a-QjJQwrw+# γR(giӣ?f=N~CX <Q?$+ qZ*2SnSa {^JRKPb[޻ a*-a prXb@qs"@݃Ȣ!CItCؙL.%b G !q$!>'کsq&WB[H}*`Zs 6sdSg~oth ? ooPgu7Sk{5 i)<)v>h!Ji0=mڑ d@Xpy%,\d rIߗ 2FMBxԷy֨FIys~ _Z*fb#skBQ_R39|b莝=Dnh(,iZj ȅV2!ʈ 7srzA RR C҇~! k?~f=Y"p}ez."_9GIm!)*ab 3e Hބ'r[ci\TJ:^=zM ad0a]A &pM=$M|@ΐ^t'O\EbPln $ڞrG5'It'WN皌MжW:O; E>(FG"^/ ߑ>gP!Ia`5XFy$mm8ZRyjǪ| kǸ!odXeUuC@kMYN0uymZV e ;3 "ېs^S^;ACQ mceKqywrrЭm;E_8[xDy _ 3=amIC:>apE o xXrWts|EI6,2Q]n_9"2EN 'iv^>X 9.o>ezh $ۈ]ig>`QK,[],Xj@`ٻLDZ8^t _hJD|]Al!uJOlf,n|>ei /-_tež/Y7N9Bq&,"6C Ea{MA%>0eKԪۤ9OSߚɿ/6wO_=ڰa -Y#cO6E\+YB #p'c`:nMw+Xcd{i \|_hYիm`3@4^[$(O#qEZ5u%k1H9=>)_}ևC:~znLO.L,< "һkcQCዌ>ZLtJ[3  j{>Bž[ {I.ӎlEo\ +Y·?KQ,$1[S>#wU~oE7Oxdo֫|KԿ>b4,9\I0QPdzBӖ[T.Foؤ4ݐ]Pi2Ź%} $Ȃa tiq= .2| 9jnǬH,b-ɩŨQ j(oT]ivƐ(qw-;zִ=& d8_n#g ǛY$קx=嚭07/=-Vmsp:HڲZGO.׾"ͦPOnRf. >7Щu! ƃaD\=;Ox/2FOv" tso5.B?v\1dm۴[TZ"{+7_yH1"3cM-ڃn4,m(|;5xiL7}@M Af򜅜.ܒ*\'7@ 'R|4_X̏p0CKիf/l}alu{u "PXApMJox;GZ-7eTEo"ݓ}x,-ou?1a˴<<( r,LֵPd5ːvՎ9a"N͋?dy{]W {;͈Uy&bfM9ҽ<Os"vX1/nBny;%>.7"R.G@Z=fa> `Fd xh[;D?曕8(-3{:du$u khI-g~L `i|Y3!s1 aDg[-_V765*firQ/cO/IH_Q-΋\7fhRPljNI;c05$E 谝gs|?A]%L#]AٴklݭZpP2,hnPBoᓏe*L8XEu# hҽ]yt&06 ]~8ϿE<3ڨE `.#MaTҲ)B*UuBMl1T w]x׳Q(*Uy0Qna˕C%zjHr1OR=r\ur ( -[Nlq?}n APrU⯺y :. ?Yu7e w>l{{+xbu(!G&ŏ1RUPE:z|ciQf3s?O|}EV8fTFKgۼqߏTa![~N9g֩½aTSijÍ36QޞlQ{sj! ׬@ =tٙP(:Nqjcԩ)p. vQSLna[o=_+5aTw1|Q{~Z&x'E;l@Zs-UW޲&B$I^yRyKⷊq kKSyJBl1Teᩩٵ\{v^Y}wTVo +/o@;cTFpP7$/rz?fiϦ<5וMU\dӉVF/(vij_YK$HAhSLB8"5(),(6޸RF4o-a Z:sۅ +VeJq,sp!ٺ@@}50c(M:槗d(ty5$!ǩO9Bdfd:ɬPRʑת!-VB#-T#Q@U_érf_]K?yW/'lǷ51E(jB B `jȭ7ӱDH_,v{a/O`C\eJ|ee8!P#Ɵ2Zt h5{|[]+2Haazimeܶ%v8U_a]ɀOĭ`'<0 ğd?<4|^Q&%ҋ!ۗQ\-|Y5#){٩Z"oCg8ʈGR7fwA@.LJs ~IQD0DຕwXNYm 7ԷJTաN:|hSRtQ`qA n<( :A„nG>.Q}FӃPV!sQz-HƜdI6H#i ;JvE@ tjm2/Aꪄ 渻€mbQ/SJ†uqt8sn7Ɩb&WHWF>۴?̷ujܫ{tV-mrU;`O2[l3B`]I[5PDϝ8E*^;:^Ӄxe}T!F;׹UKp M"}K5;ޓ%7m+ջ%+mt "BLU}ol cs&BLM +Jucj]6=x̫ Dwƫ3N vޡS1 H֫^'lYK8^JN o$:#?𜰄(xAko| p﯁HCjPeO<3"z" ljadӜeo4Z\Xl٪c4q.//ڑ?0;=Fhۈw+9Rܟ#{t 0.Xs-'u#I`QVT*f,;9.ۜZTY;1'ڂDiʁ֡!K νɡ7iıP6iW!PKƾvذ[t0Yř`FV[hM'9mA:<ʡn0n kzih}h+Gay*j(Q ?$NKپ35{:5 hmo[O*wΪ{ŝd]N$nߋ?i;g~@-{gAnp\V&atN^4߄dSNe>Rr{n9W6iω / rg=iAʖ`Ukݮ="5??."%q`m/&G˹Akƺ^Au!t.γ5ӫJ?At0b^ ՝*[4ѡWs׹%N3UoѰ~JSW`'y]u|:ezЪ{a'!=*ƖH4#pʅKsQ,tH~Ik˭|qjKʱO-U!̨*ssx:X: Tc-]Mc~k|7ȋLJo"ړҔκTa% ,t/a#6щuĕB\֜?@xy)$a=·CֵV&$|bc-?})ԚapTi=ߏKO(STDOØlڥ[[$3>y[FCn[(1bLUA? Tf=ͽ&| C^QC^gTw??%u.ѧ7θal7F{qgt?O?-bK/Sn] \J?4xm[7l+k@UaSvr#/%|YI8rر|nyu/D:n" XXe9}TkI!ohg-*;`ʥ<J/5pp]'Z?wXyDC+Ȏ40Dl/*~@A#|vl{EֵGl.g?[078Z@=|b =;r>e X-*RkVXx(lkrA&18R(Hvϖ4t(-mߑzXv"ƶsd('ՙQ^٥׭9-զZFpM̦kRb#qEf|Yn0f qiLj(%ߜno&b^^%Fo(.ޏ9갑#H{b 9iľK\ 7@A h E; WQbݢ̊If*p8,N)#\(gUYZTO#]"ގaPWƱW5Zə|uϼ!̘<qE{VE5 Ssh/ -Oc]یpz+X 9#򺪧6sdC֤!2Zj$Ykuλd d۹y;%Zg븽oQYB\]_X[tfN%tLk!Gؗk㢞u*% /dZh:gk?ZhĪ!TE[hVJ0Kk;2*YBf"%(a`b{L#^pL`cd*:]}y'#Žp,ïC+9zO" =xcht'0u.&m[6H>v*jr9?ʎw{mמ)\9uzi|r݀z=(6p%Ūra &I8MWeYij7\bAX:G/kii-eQ0` {@(µqfƒh>b2" . & md]7LI͕dBD3$AQ˝4KzeP~-mO4xc7zS3 =ir"".`"'5z;@gpv^8&U0D8q)?@$Cf8am] e >h@X@?QbIҿTK_ ֙܆6#ކYCa8{&+ yó$ -}"_cל2ۉ&0ڒcz;g0mk;Udr=ԺrtSqwkַU'L%pL+p}0$!NLlBfq(ޒvƘXSH>l1}%* 0HvJH&qZ2DžPK\E88JA/+9 ^Wh_,SS/IvqJtQlW(\hΫ}܅oJ5H )H6{4z xUD}hɥeOb{#M\Ua>6k&)BZ1Q-j~:찏nBst|ma D"H[j+Pm(-o/uy6Q3T2]˰t(2U rrop_\-Ŧr,ۘ?D> e2p/x6fa^YAyg4v5~!"4Hh!uzz;fD<PBUu8|K ȥRu4\a5[`r:T aG6= ,22.6hw2$^޹q*iTq&O)\. ?RN˷mfZvdeor22I$O8v۬+Z; _{>kk-.s["Kr;u]9w].Ë́0f;#L\Hc9$D;={q735^[Ph^́ 8"\4i!fT (ŭή-6!(ݭ;z_3a4ퟦ/rԼ9rB!_1$$-?hDjeg;!q cQjI4P%kϑv.4PEvUwгw('<[>4|D=V,Eahd{X,n.u <ШAXJ~ϋ3?S_祁 PEF` oۑZ{:vv) SYBsElZp. x\L~<.,p5e;[aQqee6)1_ʐt! ?Vzwřhו!YUk+ƣ=Cyš#p#,>jZCu҃;k9-ps߈\|9Tr8p@<5AԣE+\F<^(u @yofJᎦ[9Bj~5VֱQ9::| CUύ6KjЂ@!lV\R&))M8#BZ! \f97`X[%DHL0B~!R/J}"ʒI$1tXހcf +Λh0ۣ{^:[[y-@y5)?]mi|Y䦼&5HI` }Y\QI 34M3jzef"0_]uˇHlUe~~eX ڦa$!50c@twJV`;0W=C̕yOv|=ۚ÷4]P]If7L5eRK'%KaQ$܀1t;!I8 5xK*؛ v""n:B>=(A;X|cLdѮ^'{˄o{F\`y1~Ie;$V+^om!]ƿY!\ VsN5o~rVJ[΋Y񘋟n܆mC6nǫk蟥y4废G[䳃+Tq伥t|q- $l-.DW=VAmvQ贞N~Gud-Sx q STy#0S$1+^t X@㯼Ba앭ҹ~mqAq6Hsî߲Z˯HtwqHx_<'(Z(4-t ~ݍچ_\ީɘxrT:aI_+wG47#u60Sy) nJ!0i?}s2Rjp`ݢ:ReeMC:haИ̹}y y@WZS`D. 5 ɐ|lÜX/m)؈xUhBlMaΉvpIW\C !a hTӡ w { 7f͇UKOL\n .a+'а,\^`9Q1::ì\U_N`s} 6xFl`4)zWsП`'z 1_C3gKԅW~$W(Dz81Uͽ1kg ; N+Wf!_a1W4cڵ$m/~si|D㿗eSvV*┯ZʜBdi7n7(|2F 6R2E&oV/6 e6ϋY2b vT -͹g?A^n1`p~tn׶+cn{Gɋ)V ϟ!qt8Tm e|WȵTsGOQܺX9Z&<]7<4<]姚-DB'˰q)/86'|Ԙ /{5nVAOR5 K{JMaW] /T+=rK|Vr<[_y<}ko|tkdx*a/>SV<|R ]H3J+2a)s)k89lG@S!X-P U%{V,~ È$wON'Vꇊ2ؾ!DK$8ld5gASMNw?f\+ѨIRb~O S-m[eay YHn 8bojHC9x7Tz7 ~9Y zr| [ 3Fm1ӎ2y6yC\lZ8)c~` +8Q owl'or8W1HE1L=n.t=x x2T$Frf?{ȦVw1%8{ V}#w=e! w5ϲjQg{D`B]b/:ˢAzJyF&f"4 t&M{=ӶXE P<2 iP'Vg.]'8-2%8q7˹Rc˸9bq/Lb9%QoMG㜄}4mX.5!}'zqi3*q6+T)Sx}Ma,3Hԛz5K(sN:2Rc=:<6w3`qٯҟ+MY4~χUT`ûsl8Ƹ^{ߘ}+Tvr;^-'\JO*/@t11>?ш Z1(;nug!!urP &d駞7 u?[jW{){85y;g?p,#!tK)xeW/E }:z>,D 1TmOO.zW"4@V)IKR.Q`\! PHqd EY?.o>ޞ?b8x@6'1̎;{".7 a]k6%AEOIpi"KulʕJ`}nDC!(n/ު_vlM `hzz&vaض;,;n .e/֡fXAĚ|A^(B1R!K))li / 1o=ʯxj\IP5}(?:a$O5.ќFjZ:R " ˍHco`jPh4*EOPt lIt!%H; =& ޸>{wѲuSj2ίjiq,eWsL%! $.osk40cxb"oY>}xu92DWed8wߡn-||*݉ڿEdTBY]o X*QV>PT6@X?aOzQ\ us,sB7xd[)df}zxW檧[H!힮tοNK(ݱu"@8Xl~pjyvƞ~aF,C qZdܖ ໑F`I@7T*7,1K>nΘozpJM|=j 1Iyym~,J= ̥sL7ՠ;+]/CЇLnܞz+_S޿rf %bۥR ƚ8?є$g2Ϝn9G|u0!~D0+&@ l+*+9ɫ'~.S} ]u=|j]tQ|?Z 9oRA0 @ B ^Wl4Ar1Oْ0\t6{G}kr[:c5}ZBL n`G$A;9GO0dA6l,iDti}3`>6Ekza["(wl2z` n:Ai3Ty&3vBxY2ABMp\!Bc]wh8fWi+}#[khe4s,UmFI8tAJ>I*ػb]~CN c@`D@Qՙ$JZҊ? ,ϻSBYD8߲@ o"P _SrL}U?@A{rW_DwGuMby3Kp8#delDP7 Wfjn ӈ~#v8 ш;L3 mu4| f}/ܬ^Rrp?== L~O oҋdaJIhgxvɹB`wZi|3Zx C-ܗki0m?9x9-dĈ1K%xN;~Em.Z/T%1;d,at TYJ*;J>B&=EX)1{)Iы)ǵYM6s8cc0MVkջ126B4Vk:YbkEǎĕ΀=XFvf ts=a %fKdԱS:9yq[P|\o\%fm]yH\]|̯ a>qsK /u-tTr]$z 4 کl0=cUpy {F@~Bqx͐5H\Z B$b@|@fܧo'|ݵH%aW~ִ8ǿ/ #; ._rtv˧$̺<>lT# /Nآ4GVh9{שiʹ JdyrOj5:Z+ =,>|i0J wSӆOZRjEr]s߰A>\ñ$xH+$w}xpz.e`9U=.-pi%7cT[XCH~n4?YVvk:# W޺h/^էko6]yf=3E]vMXfPVB JGPzZJ5vpGlǬ{ dHHr"ueliMr -a|@~R<}9(էv Ϣ9_:!tx_Ķ/l1HGoUtvԨ%%tu0.C LӶ"eB2Hw9@!X @혾Ne}6wXtl UR+y:O+^˙e|}HZߓ! ;Dk4-AP> OzQ}iO^H:UP(<]rۓS5]'gֹ\-!!hw խӳZɊSqH 3X>Jͤ0Jif+0l<7 dFlB+C$Ac.)]Ix 'b:mR Q:%V2R0Ǘ?s ]ȵf3Fȱ[[}s+mN*S;)0?^SHcJ'!ͧ/[G3',)25CydsknZWp/c; 'Jz\qy^'dY+SF 8T^ܝv{U KK;%IOf,Ymp3m eqMO M "װǺDX7Zf.z݂|jH{Ϳ?2.oȥ*.˗{ë<%fϽ[LLj5\:lm#.tyi;pwf{9~?m\mn*Ejv 8bRV^n9[yj9D?F@+\MfVjZ/)od$~N7G xCѺLURC ҅{޲63stsE8ś=/ 5fh;Ɯ)rÞ?',u'Y_S/CKpz( JG.@:  3HA*gV{oҀz iq9Aty8ե7|U@h9Hؾ]@ H9"8WI4 (6RU%1._'c>==54{YQ: L[bθv ~0B ?u(^@<}k4/Cl)*-:ޘu 8a(ِA1q\BJW@55qA&ļSSChO# ONMRTu._K7k3 =(!v/6ZkVô=~z=u}fǡ*%p7EqS,"iOwM^ C,vv:p{?JՊD!]rFUXt-}eWذ!`n%V̐ Mx 8.=3l-# U3*׵R6kAWi\Z/wn8CUI f*@}ª'L @$G F~$9Fklc15-c5Gih6#L{<2C%5hat?BԖHYMrFΧW@_z8nQںlG 90b/u&*Pqw b$U|@,}pA|gM|X -MMeE{{Ow>SkWJ5q`\"YQ!p%%h/ `OC ^-e_̛ axk$Fw;F0*vC.Ƅ^snѽQJNbgP{~l=Ex{1F4\oUM=ll4V"R԰xApXAQsǴSVWjMp}h;7¤ݷoqQr|z!Hα 4rq-asC%$Eأ $2wִIpN$[Œ?;ix[Z9rb83SF{N. q cǮ]Al#/ͱfXlrί\.}f $&>z F; A$8B81u3 1^ZV~Ɵw|.! ?`bi/GnylkBiVE=Vs1жЏ}4:Ea]pkr^-@{h[\ ӨNn/w #_q$RzΟٽ䶞Dr:/#w)=vWw"9|L&sVDdT&eQSȰ ӻ>=..KVFdix"J3 wHGwCԒcVN:4PpEIY:}Up0b^;Fyl.OKT~\5 jc=>9z4{ar^݇ϕ,ppw|Aq-bK)m76z cQ,90B@62W4=|_},w3jĹؾR_+GjliT+vO.ɀPuurpcV^CYUw:wm}ˌX=}T4kB R~uW)yLpl}JFwu*<0{շ|=-Uq9G L8P+S*]ْ”h{ԫ~=e|xLI!b͙Fzmrޚ+ث|ˆ XO='u^k~_ )fY`jۼϩܳY6!vFxr:2izni2a3/V172LַC#Ow53iX8z}/*pһңwj s:(E-usuFTB!*@[/׶ш7qB~UкO-xwb•s^UJDo?~` ӊo.)xݮOށQKw?j֓}H%wB6*R^WڬxOJoxDe$vHNZqo;j( kQt>p0<URXRAݗ̥eMջZo\6Zy\taǨLۥV1׬HPA.Cr/~Na\'sW~z11/k[D`|G4)f]sb]]*̰x$Q7J c?ӱC0DNȞ 4NJhM b79 P ztE2Te}_)l;?D\t>I]yZz0mi?ȑ>6 vc:6^.*~BV${ŐoB422#96$:>&6;ґ ùQL //o._6' gd9Ʀ$Ne>E$)!F\ 7PBqݨa4Vvq^wds,{L(ywKǣ4vns4=gAPaV::׺toLyd&Xfq>v~ާ](u 9aV*7 yyW$a+ԲX K>C&SoTTe,uhvܮۖ+ KvPjH|pyX .g]n[5>]Ó]^6ʃOѳE$uV|7twpr Ỏj"vzW'}:B%[QJ队mbէ}ߏ-~:tO @CzwD$Jh }Rذ/R .%Ծ4$X1ו$gN!nXFh\U${5;;g-Y;jTHO`%wS.˂1 0qo!RP`*y9i$nt* jq睑uahڏikF6ZV^ mcm=['v wGlq =@:5^fQ8AuPEZAţit J'uN[+o$'qX U.5_!GWeo|lo_cq`(؍;3=77 om&` 46 h8`F$~m^ WsW^y0GNl>jAݞx+IyB/70*2emCWqvg0Xڿ;7 7򹻁o@L{&\V5B6sЖY TRqԄJz ' n\J"J&`.TOs@KЗHx<n"9 jUi@(X4v=N3{P\.Wf.i^o{{?H1Uźcu@n, m6X(Ae& prNG:)\}\pL cg-d(p:uj2S9rCse:7&!M*;(`cs,| 9j̊/ҬfBttz2 s|b683km;;Tr]׍CzjWs􇻔?j2g[kΈdeD\nφ¶vD GS̈́W3K*=z@n/?obt(c*i\對O7ZYy6_:rvQɟWypcVEtST:n&=]>5<Tuv{CN]?LH7+5|$ڔM]R>!S0AҍLq׶n:Q͖P}G<^uY/#n(3`4(s;6mu}y:_㶢xۅ!{mMh(s,*nlSLJ.nyQЖj*783!GSʲjخ_Yj|-Wi"zq* Oɽ#h3RoJkKVٹv>Kޏ,~@xWVؕ>4'^7REulob xWBTI|Rm@? UwKi/>?+4wk1iwp 0!ÍqF pcv*1n*^Ac[>ضBpɓ4M׊V_j(%M~9g;z.?y+ۑԚcˆ1@|z(Gm#2Q ٠3)#yn՗%̗cxO>e˄?JWT$Zbt(.Z}U7dk>i fFTATI6_{]̆!5P5C[WsV/DZZz)r%pvzBC [UP.c㑈鮻 kQUm݆!}QGΨXbؐVqME@s]U{D1>,~,v<>#&dti ȄRl=wjCqo?HpIIRAQ+Sm- sRCV>ğnaU?]?C=2A" 9$If0Q` Ւޝk1Ҡ'R׈}YςCaZMSBS #. ~Jt Wgq*FwM Vmj3AdCVqjct\~R=8 5 { PEa. 2iLqY^ 1Zr~;ƻDOzk^kFvV9CMI b9`CqVƵ~y.C1j5.;V)o`m,/ ϑpuQ_8;,M+׾3Dw_x,Xi0+=MeLa@/p8Wi(SqmT\]P,|Sˆy/α^^ @D9j"+ѥB2>}e?{tJA#Hڻ&CP-hi=^IjG>ɕp1)ycyb&O_4 M=.CZCm()^luT='dBҹVڋQ-)`3{0{yfE1;dVϡ/XB>wS#)[U6=Vㅡ=` LnBI:Q-JTD3 4ةP"g?/nY$~HZ?Fv0غVK į;t ը[8v;溻N1S7qzb ^2+[QˍJ=e$7kƪa}Jy< Q^۫_͆xG^ZLrW,@L}(ůsyQ<{p0ڙd8'lxMV|qEo6-Z!`GmxR)[~!0/hzFO;2{ye[5F [1YkeJ%֖IOWJrM3]C\fE\S!;vl֔ʋ'u\/bANчPh_ e>얚fi)\@?iuB`+gGDwxV/.@ݒjȦ8|>)2C|M}okm-Fu?-fH0 xԻ{l.{STvx'o68BAQ,"&10QB}x)dWvz[]Lށ%6NTeR^?hu%EKY1QU f] 21'*1qg0)Մ֗$;SľDE!pcLqq8u iunoI;AmUxεx:[P1 ܝGhn`] <ҐͽHH߼+? >a?c]b~z=-M}hsij*:@B{ .kCR., 0`r p$M TBX(-H,"k"֎>" }~@`4&-פ/QR Mva[<`Bp0Z#-lJ&G9==;ِwH,äF AD >5Slo„TārL5ásNXT m,J\NV?rB8w [8Lk)7@2#8KSBrP@ZgkWcP8J|o l_+}6[HB8[DMCT"IqHk+kICM wXz.Ș;ƒEf\> W| o?W)Bg6\;:ӡ5 \ʷ5H*bn񥭂 "f仭R=V}r4ݾ8?5AQjC.]&U AQ9qՙTp4slx(֗nY6~9(-؁aH: :iCȮ+lGDjBN/#Sqgd$\0iJCUIP)2ȴۉr)qor]ȡ)O@A$h D@ # Q`(oHKv_Wl^rȇrJ@cKqLG$tyVldsH&{,VnHc&l[G345lPeGw/{@et!M'Ԡ= H(Œ?;@=A-ؽ2Cp$Fյ'>RkXp>0,آd & m˨ӡY d͓^1EP@tGRhlUj Dk(kCsp+6 } 4쉏J@++ 0w,]i_H:0)4sU:E&EY-])#֌vסwb/ YTNa>2X~WĐwEhHן"aM5^@@C6I(:<_]/[Zdb:kLq]#cXiꭲԧ&<֔pA<$q_`@%n]H]L#6&0\K+`0;) 'NI{\ʐov3DA@8L8=͊„kaUAa4.N`(։42(#F|=lQNໜKt dirɛL,x #A db4n'5#^-GlJ@|2 5{|儖! ) G;9]? )7 z1m !Ey*z⸠E' C*RBj${]`4{  }JW A^>5Fy^=_GbJ""Z=TJ쯞P.8Ma532E tIxHW}ԅ"K\ajIWȐ1c ѻ.6A@IH!q4v_ 5gO{nqR4BBxn%X'i_ȄuY͡JۢoɀpT,^;(2A4RA9EЦyHU2'x5m"EAL %ޮUM`Di|O8gz[Q'ª>*ܧ;}εNɄՒA# ~ȨDkcF`XhS]R @Cu滫ma(zAebsgԮ@nqF|HqM0XB6ɽ55r@ ʲHvF@BY*U7檷zsURo>$hgH֒3BM>7`P&P;Jkֲ DTD5k})T$HTiA@@y:,°شk?WWYl=T#.+g+]mY1ýH/Af. ^k#rXЫYT&|X;$x՘q|v;-Wn(9&-

X4*'uFDBNqsx|FԖ:F^";Ƹ\vDI794^KdD̑x0{2rҌT9:n\F[`isX*T!a FZUanݴ Puѯh,oCz l}j5/&^D:d AmHT>K~G7t>kMo_0*9_z;/`# }-;[vG{Ob)v cLĤA732xj.YuYf](`9B7#'A2QX'8uhv51g).2km[`;Ɣ^WFC{ @E;1EXUYYhS1 o ~\(C+cg~Qu9(WM3e‹hX Tv/crNPjw!bMEqVQ&-IC( smԀ0|62D*|!QZ%A)4 (o$}|ӳ]"*5>\$u[O[ ,a Fo0OۧA(i{!_BX B(d'KS1} O[c[P軕)8c?lOa `Haĩ[e l0 &(e(yOM, ' >#WXG96=,o͓T\]WZ h7X8O7,I r2talACژ&( ֠\Bt@OgmO,AƤXEµ>n''|(RKD0?" x.FiXn"`.F^(Gf0ne/Ϊ rQ豈O0V`w:jq4K}2UAb`&x Ebz|U}IY܀ So*HQAԮaI(F?U De[PǏdh !gHwx ||` i eoWČ+,ll,])IO :='7WR B\M1cGI"Feϛ%9u0IQ|c먯'C./Qh_6gyc}na~B&+{/:K.2u2FfO>^I@@RAU)Aܺ叻4wZg,s<sr1ܦy_2kr;~LZ^ tx).͂y}n_eǝ9P,[$ㅳ flӳVFT>'*}jA  ( /J[X!ùj_yGVitMx?L:}JwO 4ccӵs^*#I/vNpQU|uðh}2 VxD1þQT !g+w e7q}ϿXas/ժ.|zݐ`hPY`nS=b } ߍv'rX)S:lX+^].8B/WeZFQ=GCHf++"|lIg #J@<I2ai@ >1J Cr(H:OkDP?̴hk ާ6.dRJn"'V:xrlx>Vҋ5˜;X5d'E)~Ee('|VxyAGW3Ж w;y-1'[@Us$J)C@';,Rub=aOhp*Jx(H*ҬǃjOKEGW;@OCB~5K|Ӎd|, %A<N츪Yr] \Xk6gtT|l\v?6 پΆ3Q?w DAeA"[pGֲ%4NXzRxNĴj S^ҰJ[s? 5x4]Mk(/, vʇLZD[yJD@BO'u'h(!K5A[ 7EUwz:] =c\(;wsgeKIҠbYUԟؽm-G򛝢ϖ`ALdg[1]7!{ ˅mqFI삻&_zea"<"jU:נi=3\cS_?'d":"O絺S{߽uqĴQ|W])KhRN:O6b; 4 i-c_1*)ZU kT^9%Xw! 0J1ZՒox0ΖWZcfH &Q/o v9VoZ2[ `v9s_;m9n\BP6Ik.U]obH[༗yh_j3 B 1_MCCi x"*[y_˃o.Ae?W5_U5-7wD'3hvCQl ?O?= zZn7@X)ƼE*w-y>'-Nsk>BWaZymX>^'L],+%dil dp" r$|uB\ p#* /V˰8S4BJg}}ٴeQ@ pOq^̨8n6]uϝPrk0sXJwm.?]tun%?r zJsAY_oxKN-BGI՝\yj)&Wa xَ" >dՁ,Ht(#[;Ӧ@jhLJN܃|/XMJ++z!ތ]\TRux~HOTC pUO®v֯z[{~`0-۶;qf%ŘN]$20ME:NM_|!*1Y 6YmWI\+& c1V-e;a Т}W5Au>jZ?&^/[뎸izYȝ*w$@OUv됤mBSoYeT1j |TcWhЯc<,` 7rlpSp'FҙFx yx\JY6NQ(VG0`[9 ͊ ÜM@ԅ`BtI \M;;)̗֕ XrPzUI PzkJUʂs,Ayv].~G2&|\ K =1ì!858(~nhkWHd>2Z&>JA<2=]l˓Gbt&Bi$2>ӫ;" Om&6ղ9Ljb˛enf71ENM0?U8&20oG r;L?3lg;ڔke$'gd7y_AR= ΓPk< |I}̩U=@(q(>)c_WG]lQ+,y`Dޤ6P^W+T\!RO!YSޚ(n Q9zoBYoUbgluWl: 58PkyNF(تUfk* h Li%ojŇ7ɼoԩ카!({HRjWRYW(_tasHcgEᴥ0SBY \E=WP:u˝g~\OOk K|""yKʼnZX*9'[\m $#ss "LQfdv$pgPX =ykk{[MVi;%9Ŧ&5á Q:_bTMLm9qQYՈ]&0l ;mM/K`ܨI5!5?Ln9S6Q}+Uhb ,sRWL}CĬR@.ꔈ6(P_Gc-kHnP,Ҍ-$ 3xS޽g֍)U_z05_4 n0]'mJM;X2rʃL@P ^1ۊ| {ӗ9`Ԑ JpiDŽi}& PTn?LC Xٶg9sUm-=0XB@b9QPx&Gz   Wbi5#h$T>.7b|@b!,煉җѓqblc&w. g۵*;3"Af/AfsUώ5+KC\DDx* aHkXkG)"i()kUbӛ@}IH єhc|(dU#wK!R:KOh q3TMҎø,K!=@6V|%Os`f[P.d\, @[D܈ ^ =ƜQ-+5;CUj5/( 44a4APrb ޹RJ\Y !dA{HbT)65ҜU_;TM.xZE ݱ`²^ )hUp=?2vhx%}P@Qg HMprW8:f0:>WV- ,삭-kag_jͯA?YB3?'_\$qks]\>)*ؿp 爜H mnP@~O9MtSka10"/V\ @QC *:Ou,g-Z'vY)b}@pMR"x  d0 0sYsG+ c&)YK 0gp8' iUhwUמHz0Rv2}L.E;j3`ZV(3«LJmݦrʛN#u^-d q>=N;DpozJ<'ޠ|IoVch;=^ɬVڦV_mH4r5tqN#!M,ۛH1\&_&8\H{~GJ؏Jھ) =u/"Ȧ/TGKch O?u."Ո A//wk;~^`•SM;B̍ 8o.X2`:=bcN+l`*(k զ,ԧCF$q-_en!o !MR42eW4|o9都ػ~v*m1v4z'F/l_$ =\-'W!ZGzI2=&Y5"9 n/AyXWH=fZT/3? K,1ŕEJn!g_YsZ+|M~!}Mӂ79۴..=5o8`f6 xNm hiU}Q7Ugw>qd]I 'jyCg^5'F]N@u+n}nv5r?/_6k$s)OEg m)C]_(jidqCq sV?٭ʥڒ^9(Mk퀗iZT) lF ™Ua5fӛހ5Ķ88Egh xQpKn8Z-kj[( ""IE~NT4H_H́RƋW 4`+Č7V˾]]$:@_\Wa-ڳP` N=-z7PF=q_/+Nk+ 4eEN[d6u4777<`yO6o`AE.,xA~B  Гx !|,'VL4t43f6ir\(n1^ *_/qʥnC h yr'on _&՟Y yz%\{GT1;A1ִgHڼCR9ʨ=fvH:B:b{S@KyN_ϑ݅w8p{ex1ruqPI=M_}i7ܚ/rwDd)BJZV.zw5^5ŖY0 , zRst8!]]I<{Ih&וO_d^B185G]}VQR.6tvMEYݯT M k|؊]^)H;5Ŵ2zc/Z7N/ݶy=vBPCLUpL?⛈3؉\gǾ<{ A+d._Dn2b]Q+[/@Di |e<P[cj 1LoO;7l x>8 2^2c{/EOtZQK-8dbcj娋=/ҲhǜMNk+וWUV(RwW1q+FUA0- ꨜ j.û.wRj(@nN[^l 9ʜ`jWnӮ]pصv \dk  *'foa;Ь){Qխke0{W8&Ţ˨-Æz۬YH,sc+sCh@;RNm QԱv dos*a_ﲗxZzGCҰZ#ɯwXj0hd uF 2%>FVV1ۑJY%"(c9:RQuEXR1bA~zA#"Z ] ]aK8t6Iͥ^X]r|=*Sr81$iӎP He|J@&>C}{C}jo6CrȰW=Z ?*O)͞5I%ܛ@ҳA{Gl_&hXWWUނN~922d[G)/M > I8DhQ‰kr_Mc8>/kuޝ"Gn7x;uRH.OxFnOIKHw$n |ϷQ=B:̀<)_O*c#$dN|p݋+G ?ˢ(c۳5!$AdT-7z»0|v|k( 0-OV3C憛wY`e jXB/ڋ?-=O1?Qvbsqǔ]4z!c5_C*F'U-H!{Lh蛐Ŋ8y'$'IG_1 %ٱ6)ą5.T[G8wlcgIN!nKV#ᢹڸy߅zПm;'ZSxMIy Su\b&ʊDܨj @3U(HaJ=A|'iCC2:" :m95,!L 37Z78{M&pn%^KbS"lAQ^l+=6<]!\ьaH#0Xo}̱C:C(b;O R}4x,n{}:,Ht9~E5kY7B^ƫƿ@׵g>|^Is,8EX|*:\^ЫR:8H0 -8N>Ow"Fиl6?[z.n<9˻0#]FQØY%QPD(DoFp osh1oVZLv|Yb7 )Mq_'*)\|} ".v о)Uejf(IULLJ@c6s㜃A⅞J`/Xs uDB2zz Q78(ϵYVm[վ} WDzv"d(kh)hMQKd+F~q2%{P`rO? ,+siGkU8יد#dD؊&X CQ~Ee^5j C?w<0l,k~ !tْ%k/5VOABV)M0:lujw1]c3ҩ.va#s4i>ԫ,ky-Zr6V(:Vߧv ]+$m)g r %0[R豎'J.~f] I WYlAEK%찐r B@fjjt.e@Ŗ%v*R@DGi*.RL#a5k,ƭ%L 0v*CsƱq r@C8Sߴ#Ge~qstzVN6B=A I^S/^&yqD`@Y2Ue_ sM4UFXzm9j f3aEPIXq=5,ts7'8)S߅QVAj98wU!2'  i[_ULpWGMZ wPY+C7mi1'<\-GA9bl[ZIuRD_k/4==@\\K⦯ `%׌Í,MDZ+`o2[ńQtRa`#!ך-vc8ѢH@kd|1q5 $׿eB\)T͓Y4*!0Ю]r -i~ ^[ș^B0Q1{trɄĨ8;az9T0ڋغSsFf Ų\ hBp4ߩQV?y)]`?ιlU|bd:~cVt4eV$W&!\k\ +qHTRY+h5OD㸦BwR##i?FUBG!&vA?毿li- I.zY)4zjRԆIS* AZѿcxaw_vf_l*A~(~JV+:>")ˡ!θ4POMIxbߠkz(s\i.9o7@ZFԳC֋8+p+x$V }`'?u6ᲴNq.WmY?´.3Co\^c3M\156B~~X;c=r8֤#mJ9lIXLB$BCy}$9 }Kl<Uh>'2(?eZR:x˕/T*!a |UŌ22-1,wƽ䆄BZn J0^l)mqfeWj>;W=ǻe7mz l!5=5?&6zJ 3UK?l>o4'uH[^Mkx(x\%lI=ذ{h 0y^[GRؗRסC%( ؈Wؑw"P?{V`yAE$8< u=OSx/ׄsi;PpՃf ]7DFdZR(=jGWA**6Y9I>x WRi$>kzQCt}ϳ>pCۺdԍ>cU Gux?`ӆ/n~Q,w9eFeT{rF?A`aCRybx6cNHO }ksSGcD=s2>IUpPˎ>i2My5E!$۠"f8 ̖Y [iN~e(MJTM\ѣ0&8FϠBP.(bA<)@z}$ǞHE{*}R+>!'OBN۞'mDo-;2 LEzbKMQ^?aWSz'Q|m@y4)v$swEo.szhMGrzޛikIc1yxkXV.AsJD6KՆx 9!e>m?~5Mu?JCԞ#%&0i׌a ;Y#M Z?-7zzRko { BIoT{ nAI~AXkM!x/խQ|4_؏W3M7[=2g0v<*&k ;Cydlg5] *b 5ffUxj{[}G5H\#ISm Tƽ\'8W}TTV;deoڕxȇwv^> 4!XDn0.p;aִo yHp 7+Z៊ \Ќѳt@?Mh^ohnN&Cj~W*س[BǪ4f̿xÝN]9bXS \-&zdoE%p_0f`}t?"S|IEO:TqE(7B|܇exro8|- &jqenmzl<9~֞=΁z"VVg(ӹRe[Q]tawmUUL|4AV(%bC]_Ah=;klJO=H>ZӘ3 5Sk+v__XWD^0c810)2n2Yң-|M]/V[ShysUxY̭) jp8mj q0k΄~wWQGG;l$Nd}C*8:`D=]G*[G`W&NUc-Q9=/YY UNe|VD0)rz$IGa -S|)^"ij`h[Agh}rHuS_c/o4YPbZ. a^B([ 7muLsΠ48uBᗘa X zqڦY(6kIHΒ-m:yj~.vO3Ą P&}IBv^:#^6sJntnPeyپx>E]87l[VΝ9,qW2 H >r;gWIuҕ^ݍZ-iZ_ϙUutLU͟ʟ9e23v2lSmG0,~ѢC-R0׍[s2=g”Z{V?Cw41; V U(9Îq >P \R1~_cjpG:˅MOB+]r:sMm,o7_9-0L+2EȈ x}uH^xټ^ zaH>Q\6Xg5?|u)hd#,W2  Ο]Ђ7F5JeQFn\D5ĒJ8V8 yqx 0*HM.8h0X6.Ρpx'@hԍ !1ty'e4L: w6Yi!Fqf~֎e]:ںgEOZ\.QӬ*AZrw< 1?lpnm&œOߥ>+L-K ]MU>)σzً0L[A[3 ($P9l!s;9iy0~fgzΥG 5Crt{G1߮4xdV@i~C~vS3 MzlQS.Re*,{غ_1QAĎͳ|H4cRDΙ}egN $b\׳Ski?]ݽnƼL:gY|{j!X=xqaW#Z !o\e#*AqjbRN:֕Y:D{i=UCX, O/"1*"NIpңuV^8$^ڊS?I 2[a!|KOa`4=m.iъ+J1wJ 0\3#Dљpԋ 7MPjjߎEy/Q0U޸sk;l?@}:,sl&PjiS{AdS{_bT2h9z'B?_;F~_2d)m}p 1TKpܙIa z12]u{l s߶9ՃĴ-`|*Ѐuvtc.OVv XXn7jCu|:ǟǣⶠ=YFTW?v#"cݴC䚥3l|[2N[wX&Z5d4ig| U.Ȝ8ڜ6=;Q w0fOS5$ߨGG42jd x :;U,?EƪVlTH9jWy5JB7Ǩ9Cfa'՛}R :Q4,PVvxxc[u֚EGd!{6jI8N*M"^ˏ86;?XTVUԷE|8ZMkN3諂T6.X0c3ł= ZR$~8 +;O^Do9 SuCPe=sqUqlfIs ؏t\Fo\ŠMş}hypM5 syQw 5m'7MJTTN6;x!n0 Q@[wt8Aqg@橢Z`)m;6pCއ2$f\RēQ#HvG<*½oCHmcbj \÷Z(ٲ Fa= 5 U cOfVxCq,O``ר;wX:Q̩q{4D1" d[D};Yó9NVظFCmn_ݝVO:{Y+|[Zyuާq_^"Hm', Fk@#[jȒ$IJ;5/YW0oc0(4r0$AoǤ?/x^V7*m_ſ{sP:kƅKHBA!NϡY&g5GlF?gsY"?c?[-{{]rT* ȉr7-\ 5Ӟ?[JrBFl]Drk?>A}h ڲ4SaWc{ٰU ӑ'?n iaܰcos{VA .kTfu1"-eqB n*?n5?2S˚|irfRPz;l_Ӡ[y9eӔuΰ%o2elttbCjۮΘM(3Kυ8f'=$@͍@Ymfah[?˗A&n%t0Voŕ9ê/`}1->y!7,IHnU&!Btxj! !`4Nr}; \7cq-8az19]ۡd8HhiHe{eWJ e-_Y$wg÷_:xD$B)ȥ-)z l2q VG够GqiI>iVPt exH חlrY#GꬢĊ 캐ey>'*tqt%RP 50˻VWvKyb}0|j.& !صt M?@fr=-E{uod8@nGٰxg/)PwոAR"۬G'>QQ>v7k~/#GI lLeTU?&#~d$YYZ5mE'8:К ss-u%yt%27 mpP ߯nd KFXC%'nP0=t t~ Zm3&w,hA Zq-Mbxs*ØHk2ᑅWDժ6;]sz*vxh8j#oZ"Xo)|5#@M!*dZxB:CEEzyM/OQ6v4PVp!svhW` b8Af/ "6耿 :ùFM..G:ɫtDx >ŭQ%zZ-67q6/7j}İu#OL.,%:xfPO 4ӡ;ɤ$:pxbT Eѻްp=%bM ƅ N!M.) jA1P xqcj7=eoғAvJ $%m.:LqR[E+(97ݑV̘;/E2Ci׊ue-򤞘["Mpo!qPXdi֧.LVHz*&Q+EN2z4ޒ=!iֲYx8P$uU W︷{dAHqR>&5Bum^-SX9zF-Q]_A(.yx'9Sw[oXEx =<:Wp8D 9##y#P^T'X6bJ "}7԰Z<'jRGcH56yBFfu@m j=@tDa\*‘rS %o.вWD v!Oi'Eq/V~xz{yCn_O+{K81u5$RGX-Lg5N`k0㧋"|dI7~ĿgPOQM$>46^_d2N;|G~통 ZtpEUc:(9V'ۤnx+f oMX9ivl~; Ķ3)icB|-awtbʞ)~r8CQlSyc.Zd }lrr{'y^O5 mYD:,uj~ԀtEU^6MF"0liq<_p SA2V2gp{˻Qjč;n1oڳ?|f̲\:~M~x!ϖkۋV/\C+ζJ˝>\EP}?%ijC?_J`9yI()Z! k$$ \[q mKd<&q.L<;{NH|$'_q?;_w:]7Y K]#!,{,3ni6u'0ىǯj_X*l`'4/c(1Hny0s K}a񫅍O>'#""Q /o¿a3# ?) ޜa"VXU biZEXx:6m )4x-ר9SnLc6LE@WV dJ?;jrG:ȭw}ߘLn.:X܂}tCL_iOK(2^ꏱ3GE1ɏFC(yH a{k5/g|?P$UH~i R%JD&~)H-eOO$l=!mF`!u,VEQuxY&7I*G`pu _w2}3I\hj␡ڴ [+8%ݻH , T*]œeͭQyx#5AJ_ë?;AKͨYozJ]gB@4W^s7"OKX!|9DWf܃;#ٲǙ.a9|VN ALm߻;cZxONHԐB:Jc^`$pEIq^AE[U\T 1R4G_CH\2v4 "uGc0k(ᢡ' ߛC*XC7z~3Átk\Gq;ZJCL׉p³-U%W}ug K-EG5#.ԝr=g3WE-hje1`J)ȯs&%Kj%QT E-O'9^)RAUjNbѪ%Zdc-v/7Cuɠ|*.1O/u؂@:is-.[qPE1mѿ/lqslVapWԅ~ث!}ݸ]Fyӫ5g.W+*aig +M js~Gm0lAvBS)j2zBf~YATsI @UDQԜ3rcQ_kR+˜4 .{ڈnݔ߀#,Fxg<^ <$R. r5`C+'o#맞enV/u&g9FWbِ_/ڎrsȜF!K.d{g !;=u8.'dkᅿ~r+f~ V;8=Vn߽gp54č(nj5{y͖K:~:Jl]V篶z@ZIʕimKڪ29#^3- Z &O %s̪u ,|B X-IiK/f{WB3F5/2WS /vVvGp  K* {NțC!lOS<xZ]95  ,xyihz'>Ucý\uvǓs`k$Ԍr~0?>70~RytK14P9P9"6ꅙVZU( B/&:Cc X OKغVu,֐|3f]?8t$NnkeAbW-wcտOw𧺰yzNy΃[ 2>0웛5N %?nK1Uq"tjExŇ;PAF/6䔩8 Uǧ 8C8Wqk0ofY\+J#xjm›D;|5ӴZg6o(KElSu, > |"a`鱾:,*+\2lwOæg~g+ {KxirW&G3y 7Ϋ_@d$ы˴JWu#2up"}%ՃWrg>{9+>{R;]bJ8%b%X!x1GKT+ /k}չnyv~\C*3-~kNњ@Yt~٧{|ZfUQ?Jd=[ӣЦqy6H3_= 9pI~5 {l~cnwk dlTT&f ARik1 L!vVlAAS>JbSDC>_|c]^e/̷=/SN ɼB:^r9}8g0xfՅ署:Ǡ,6vD7"0t`VyX 2_k9qWQK:/d8ꓠӏisܗ/ 2Os͒h'9Ot`6C D)(Ԥ,g|>*>QV4]Gݦ-4󸃅 WXBjyع9LW[TwOpݹl5=5 x9E}t"{o=Sʶl bi9KIEx׻c2UykbG+S}%$~(5H!BqgZ-x5XePT7:^_lȋ@=<{ ۄWֵƀc7RgsU/SQ~Wמ2WF>x,)$]7mSztn%HD4' Nk/iV+!0Gږ]w^AP|.m#-y&gCylz!B( hS/5HSO[akQfn'ӧ*+Y],Ppa,j"UU^}ҹЛ*L^8m/%My笰 7\CԆ>bȅvs>2,zԣLhp^OU-E⧅m}R+'؝Gr)򅛌JkŽ6p WePӰbxZT_O hnQ_BK9qܒ馧.ⵃLFY<ݟ f<=X=qw;~5H0aR[E+k>PR[S)>ȮX* c+;Mҝ1w* X۞[#(s2|*|ik^=DnVM=Y"NH.ӆ4p15< 2ruE6;\.K<ҸU5@[!;/tk{y-3ۤ o+6\ٱM [rS{dnHc9zSφ٦=2|r}[t Eu(Tr8V L8C3 xG%DF;u.B812fb>1L$\i&[<Mc(A&/).{⥜Z+DbMIiI]%U)%Syz@!ւ*nC^rGR%j~(jDER4uMy{.1}C:yڱ߿ !#F;YʵYmȴ1Q1 +$}oưǎۋ]Wr^( uz}ɼҘym3L2xN^X7yHE-xX ωѠ?'8.aRla JxVwqsQgH1S k Sq 8Mh-AP}ܚ`Vg5 Oݮ_avLEX(t/m.]3b\ v61XUpI.\F=yFױ0{O/aۍpQհ-/1qL J7]be"aQ'pۅ;UǵQJ B`5N@ XiAB3*) y.?TO~l벥|T];y;vػ%ۍR\^zcgmo?\"&vu18†ul[~4Đ_2vVv ߔB ʃ7)eW\^gt8\X+wa0Fj`(jy 9JBr#`q QnK^6#w+<|=|ĬSG9er+/_A?U55mr=u:\31uvLu5!2G:&ސIH{H,aOTN kEzґ0tEa<Aah\t>WIf,q5.B09mtv$!ߞ (ZV\UtW?dq)ͥw;x3ZH`DԊ+0~9CO?G`1C:s \j䖥Pze%>6ZmcX TwVхѠLCϚx߯+i9Z^ SH{E׉݇z)$GwmU0]#p=.=eJn0.+8O{fhNE9,3EBDxRA] GA͉sjcqd4_\nL0dPB>o2?簙+&UO觡d}ֽ-g7"ZdS(-M]D@p\ƽdh6m 1p]r) z48~7O%'KO`Jxb_+ff'INXnFrb%o#p:v%$" mݤX;= A?o`Tox!mY:=x={v:kNq% ӿ4d.ol1 s{NhlZ]̗[Y=HWVv"rbeZ2,@y+y; xԦ"T`NDWWE5Y?K>)zzB ]gJ-Ta֮.d08<$6 TT>:P72: yI'bbe#\`n;*UԲv>P !7X"Jpe/@[΢J+`PQژGy#|k{WMv~tLxyW\Lo-3M^fl+ $5İ4Q1 Jzߑ G}EnxCM}{Y]\į4Ӑ ,Kƺ8 f+x>dw KpP$WBFsϠ`wF$n "hk+?e l<}wR`9Yxќo]Z3r ^]eqrW[l$Y"vja.{80X^~~bKM5nFvh`G|a BCK{qL;qy7&l}fOo>ͩCqU-n춧\ z,}sh#,Q. Ԗsh8<H8%OIOO-nbA{W?c NpXX9G6Z pQ@'uze#*?%( O2pQg Bڔ䝼MtѴi.=d7c7;ܫQRS-ss%>JVPbZ?C׎snSWTD yͱkk5$ë* foZM L*%6%Ӌ=鱬7 J|ŖݎumX"k+Ges +; Xӑ A*lS'-QC0I~hИU6>?ҙ_z?cl GNIp(%Lyg:JU8u9Z޼bg4Yqby^5Pi4W}4BwNQdxpXCT'́2jE $əA geKtpHkM\6RxH%.<(R] :޶?txpAXF gT@j_{0ANꈷp!] ea31 [+ pצCqØs V6cJ lz +8FE5r>LoSԟ~'Bpˊ5kqzZxc.7:e8|N'0t &]|M0|ruΊ˪W\ZPBׂȈq~tQAHzdG-l0ʎ q$ڿ#zV!J] к*y]|AS?eN61-'}4< Re Fe>8x:9Sp_IQ ' v*[\{vkyv,,E7ʸkd݈tc@=Ieۄ չOEA%QotRgSű߹kogTfY)Zc+ήf"T*GYx)MBndbwx- %P]hfD >-ASCtdw|ʏHӽv)w8ĖT*ubiT\^(tM7OcLrם\0hƳ[%U5$_I"pYTsj| 2mӴ6nq u1 F,j  N9t,$O{9&>w{u?d1kdj8lW/Dݓ/ ,wWBg_}/ޯ@26zzĸB+-{Z^`MtDbutF 1{hAB gH:e/m.fد?3lX, `!Au8fBB1WAw3\e* "[ 3ߺBYӱ\5d S"HvD}s 2Uo8UU&!CNX`산2z['f_!E&l##Ez+ $,A#Zdsܪ͒L@(xȖuL5G,Xyau!R\C=]ɒ "ǜO{|^3 XObS 7e"+$M@KiS? 'AR̄ A珢  ']qv#V;si=AQ eZ|1$HHUፘSmKO~Wp۳LUSkV I"-D IweS>`d#|KOVq|'CLu 4 5! ዖČI yL%/'=9NáZn&VlFD=B9 ;kO|BGu+:W;/_hmن޽p]Mc'AB'Z 2Rlq}O  w9NrZ,ki >է]2.~w&`)CG )yj}N>U `^.f'xiqOɿ4nѼnb m?]']!q ؤl~5@71!Jܧ z R lf8L s%Y 56(>فj!d2kv?>9;h= {E-7 J>2 Nh2׆T'z7st4 P}(`uCt_)t>X` %ȏ%h ^(f֐|=;4\RӥЅMYOm`.ƪ[8J.RO$00(@w4u<po-Bԁpî7MuL{%( iZ-h_): PϾ84G%|'bֺDŽ d!R(SУ!$ TYWOf'YpILKmg]L?#NsKS[\~_a~-8TjG"y ՝?<͡㯸PF}, v0eЯKH *rؚ|0GuSy<|F?5VVШjY y:Se<3z %AlkH+prw}[iEPq[FV.nV ezi֜=q:nb2 Ri]S)^i1v@R(@ p{$D0>4_q *Z#t7Ȫ F  k`m7"ƯiA.YC%?<s#"9X/+<"T^SS"4z操y֌-ţu;9@Ndӿ7qV`^Y =SMy2-gr& 5wIN§v5`k.W&rMUJ!;:OM_ pzi'M!t6E^Nw]` ppt4e9WYJ c y/i.˽ꝶ)C|*}*pAWk*^xHi+$j "=pE7d0|m~Juva G=`PNBkRz58\gPmdl ^E4d ذqvIH + ٌ-@6Y}!cpN,%ecΆfO{ݮkÄʾAhn dF^m̟Ok.8MvCJ&Ʀʝɵ;~3h "*4H0qa ܥ>L~N;4&A[h^ANk /АZq" P~ӠP-p/D!e ϳ (ͺu(I\ϱY&X®8'r7| $WZٓ1㠻({aDw$S aloo/data/voice_loo.rda0000644000176200001440000000332614523242160014425 0ustar liggesusersBZh91AY&SYLzm]2wwtmI2dm#hAMC GCF iѣACFi ̠ 0M3S4Cj0i2ih&FЙ4ɂh2zHh&4zSBi4=M#a04MM'h jI4I&&44=54zFCi C@d2j4 4ɠ4hCFC Fdhz j= h4h4h 4h ͂afIʨԀp,LA, 0̓‘u+bWJ;_B|GXX b  c !e,ŜA) C[Im"qF#Hr.eВeԻܕ/ё.3/h` `0jaA2bLP̚1f0lMNQV PpAd%&F01nHƮ3y*؁Kv 2#68L%fgc>oF5Z#BAdWRhUܡ $ jN :}\Y@ŘD3 S5H%rڊ0gӌON!| ~.?=>lpB(v :Ġ!XP'x|95u%J%ja":qH|8Qʤ']wP!38 5 h,m߯D $^ߔb۸,9 f8r# P| Q휾qS1OsSeD SwG ܜDbq); Ay&vc/UYftBU_auuAjOʿ4J!1ҜPE c+㵩D/'T(IG_FQ~rrBT2Ì87B=wiRb൘RQJ Bg֥mYF- Ҽ°l{|D`H"U*gN=zYS*AWXB<(/庢NK;\ƉY8PMԔx*~`indv `oOonax '(2yB!2ؖo5-I8U]d;/tCGF.wƩ2ȭS@4[]OgQ;9襒hOt8MMKn6)c?V2ډy UQn{ ,AkO4#-Xlcm7 Ryxukae E|CꠓKŸ@;jQ$ %mm&: T=8$3^W G$ߌ-(W4M/|Fp \ k\8=BrnLxj6ğ51 ^B9xCQY[i$UPs0AA-sU l5PÑgLásU+܁J2V nQ/JK"(Hv!Floo/NAMESPACE0000644000176200001440000001121115075016621012261 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("$",loo) S3method("[",loo) S3method("[[",loo) S3method(.compute_point_estimate,default) S3method(.compute_point_estimate,matrix) S3method(.ndraws,default) S3method(.ndraws,matrix) S3method(.thin_draws,default) S3method(.thin_draws,matrix) S3method(.thin_draws,numeric) S3method(E_loo,default) S3method(E_loo,matrix) S3method(E_loo_khat,default) S3method(E_loo_khat,matrix) S3method(ap_psis,array) S3method(ap_psis,default) S3method(ap_psis,matrix) S3method(as.psis_loo,psis_loo) S3method(as.psis_loo,psis_loo_ss) S3method(as.psis_loo_ss,psis_loo) S3method(as.psis_loo_ss,psis_loo_ss) S3method(crps,matrix) S3method(crps,numeric) S3method(dim,importance_sampling) S3method(dim,kfold) S3method(dim,loo) S3method(dim,psis_loo) S3method(dim,waic) S3method(elpd,array) S3method(elpd,matrix) S3method(importance_sampling,array) S3method(importance_sampling,default) S3method(importance_sampling,matrix) S3method(loo,"function") S3method(loo,array) S3method(loo,matrix) S3method(loo_approximate_posterior,"function") S3method(loo_approximate_posterior,array) S3method(loo_approximate_posterior,matrix) S3method(loo_compare,default) S3method(loo_compare,psis_loo_ss_list) S3method(loo_crps,matrix) S3method(loo_model_weights,default) S3method(loo_moment_match,default) S3method(loo_predictive_metric,matrix) S3method(loo_scrps,matrix) S3method(loo_subsample,"function") S3method(nobs,psis_loo_ss) S3method(plot,loo) S3method(plot,psis) S3method(plot,psis_loo) S3method(pointwise,loo) S3method(print,compare.loo) S3method(print,compare.loo_ss) S3method(print,importance_sampling) S3method(print,importance_sampling_loo) S3method(print,loo) S3method(print,pareto_k_table) S3method(print,pseudobma_bb_weights) S3method(print,pseudobma_weights) S3method(print,psis) S3method(print,psis_loo) S3method(print,psis_loo_ap) S3method(print,stacking_weights) S3method(print,waic) S3method(print_dims,elpd_generic) S3method(print_dims,importance_sampling) S3method(print_dims,importance_sampling_loo) S3method(print_dims,kfold) S3method(print_dims,psis_loo) S3method(print_dims,psis_loo_ss) S3method(print_dims,waic) S3method(psis,array) S3method(psis,default) S3method(psis,matrix) S3method(psis_n_eff,default) S3method(psis_n_eff,matrix) S3method(relative_eff,"function") S3method(relative_eff,array) S3method(relative_eff,default) S3method(relative_eff,importance_sampling) S3method(relative_eff,matrix) S3method(scrps,matrix) S3method(scrps,numeric) S3method(sis,array) S3method(sis,default) S3method(sis,matrix) S3method(tis,array) S3method(tis,default) S3method(tis,matrix) S3method(update,psis_loo_ss) S3method(waic,"function") S3method(waic,array) S3method(waic,matrix) S3method(weights,importance_sampling) export(.compute_point_estimate) export(.ndraws) export(.thin_draws) export(E_loo) export(compare) export(crps) export(elpd) export(example_loglik_array) export(example_loglik_matrix) export(extract_log_lik) export(find_model_names) export(gpdfit) export(is.kfold) export(is.loo) export(is.psis) export(is.psis_loo) export(is.sis) export(is.tis) export(is.waic) export(kfold) export(kfold_split_grouped) export(kfold_split_random) export(kfold_split_stratified) export(loo) export(loo.array) export(loo.function) export(loo.matrix) export(loo_approximate_posterior) export(loo_approximate_posterior.array) export(loo_approximate_posterior.function) export(loo_approximate_posterior.matrix) export(loo_compare) export(loo_crps) export(loo_i) export(loo_model_weights) export(loo_model_weights.default) export(loo_moment_match) export(loo_moment_match.default) export(loo_predictive_metric) export(loo_scrps) export(loo_subsample) export(loo_subsample.function) export(mcse_loo) export(nlist) export(obs_idx) export(pareto_k_ids) export(pareto_k_influence_values) export(pareto_k_table) export(pareto_k_values) export(pointwise) export(print_dims) export(pseudobma_weights) export(psis) export(psis_n_eff_values) export(psislw) export(relative_eff) export(scrps) export(sis) export(stacking_weights) export(tis) export(waic) export(waic.array) export(waic.function) export(waic.matrix) export(weights.importance_sampling) importFrom(matrixStats,colLogSumExps) importFrom(matrixStats,colMaxs) importFrom(matrixStats,colSums2) importFrom(matrixStats,colVars) importFrom(matrixStats,logSumExp) importFrom(parallel,makePSOCKcluster) importFrom(parallel,mclapply) importFrom(parallel,parLapply) importFrom(parallel,stopCluster) importFrom(stats,constrOptim) importFrom(stats,nobs) importFrom(stats,qnorm) importFrom(stats,quantile) importFrom(stats,rgamma) importFrom(stats,rnorm) importFrom(stats,sd) importFrom(stats,setNames) importFrom(stats,update) importFrom(stats,var) importFrom(stats,weights) loo/NEWS.md0000644000176200001440000003573215122143602012147 0ustar liggesusers# loo 2.9.0 * Avoid under and overflows in stacking by @avehtari in #273 * Fix `kfold_split_stratified()` when a group has 1 observation by @jgabry in #278 * Fix `plot_diagnostic(..., label_points = TRUE)` by @annariha in #288 * Use testthat 3e and rely on posterior for ESS by @VisruthSK in #289 * Fixed NAs in `psis_r_eff()` by @VisruthSK in #301 * Print unequal sample sizes in loo_compare by @VisruthSK in #307 * New website theme and pkgdown workflow by @VisruthSK in #292 * Small fix in `loo_model_weights()` doc by @avehtari in #276 * loo_moment_match.R: fix doc for default k_threshold by @jgabry in #279 * Update scrps reference and improve doc by @avehtari in #280 * Fix url in vignette by @jgabry in #282 * Update stacking citation in inst/CITATION by @jgabry in #284 * Added contribution section. by @VisruthSK in #286 * Update LOO uncertainty paper to use BA doi by @avehtari in #311 * Update documentation for `E_loo()` function by @avehtari in #312 # loo 2.8.0 * make E_loo Pareto-k diagnostic more robust by @avehtari in #251 * update psis paper reference by @avehtari in #252 * update PSIS references in vignettes by @jgabry in #254 * fix loo_moment_match p_loo computation by @avehtari in #257 * fix loo_moment_matching NaN issue by @avehtari in #259 * catch Stan log_prob exceptions inside moment matching by @avehtari in #262 * Fix E_loo_khat error when posterior::pareto_khat returns NA by @jgabry in #264 * update psis ref + some minor typo fixes by @avehtari in #266 * update PSIS ref + link to Nabiximols study for Jacobian correction by @avehtari in #267 * Fix issue with pareto_khat output no longer being a list by @n-kall in #269 * fix equations in loo-glossary by @avehtari in #268 # loo 2.7.0 ### Major changes * __New sample size specific diagnostic threshold for Pareto `k`__. The pre-2022 version of the [PSIS paper](https://arxiv.org/abs/1507.02646) recommended diagnostic thresholds of `k < 0.5 "good"`, `0.5 <= k < 0.7 "ok"`, `0.7 <= k < 1 "bad"`, `k>=1 "very bad"`. The 2022 revision of the PSIS paper now recommends `k < min(1 - 1/log10(S), 0.7) "good"`, `min(1 - 1/log10(S), 0.7) <= k < 1 "bad"`, `k > 1 "very bad"`, where `S` is the sample size. There is now one fewer diagnostic threshold (`"ok"` has been removed), and the most important threshold now depends on the sample size `S`. With sample sizes `100`, `320`, `1000`, `2200`, `10000` the sample size specific part `1 - 1/log10(S)` corresponds to thresholds of `0.5`, `0.6`, `0.67`, `0.7`, `0.75`. Even if the sample size grows, the bias in the PSIS estimate dominates if `0.7 <= k < 1`, and thus the diagnostic threshold for good is capped at `0.7` (if `k > 1`, the mean does not exist and bias is not a valid measure). The new recommended thresholds are based on more careful bias-variance analysis of PSIS based on truncated Pareto sums theory. For those who use the Stan default 4000 posterior draws, the `0.7` threshold will be roughly the same, but there will be fewer warnings as there will be no diagnostic message for `0.5 <= k < 0.7`. Those who use smaller sample sizes may see diagnostic messages with a threshold less than `0.7`, and they can simply increase the sample size to about `2200` to get the threshold to `0.7`. * __No more warnings if the `r_eff` argument is not provided__, and the default is now `r_eff = 1`. The summary print output showing MCSE and ESS now shows diagnostic information on the range of `r_eff`. The change was made to reduce unnecessary warnings. The use of `r_eff` does not change the expected value of `elpd_loo`, `p_loo`, and Pareto `k`, and is needed only to estimate MCSE and ESS. Thus it is better to show the diagnostic information about `r_eff` only when MCSE and ESS values are shown. ### Other changes * Make Pareto `k` Inf if it is NA by @topipa in #224 * Fix bug in `E_loo()` when type is variance by @jgabry in #226 * `E_loo()` now allows `type="sd"` by @jgabry in #226 * update array syntax in vignettes by @jgabry in #229 * Fix unbalanced knitr backticks by @jgabry in #232 * include cc-by 4.0 license for documentation by @jgabry in #216 * Add order statistic warning by @yannmclatchie in #230 * `pointwise()` convenience function for extracting pointwise estimates by @jgabry in #241 * use new `k` threshold by @avehtari in #235 * simplify `mcse_elpd` using log-normal approximation by @avehtari in #246 * show NA for `n_eff/ESS` if `k > k_threshold` by @avehtari in #248 * improved `E_loo()` Pareto-k diagnostics by @avehtari in #247 * Doc improvement in `loo_subsample.R` by @avehtari in #238 * Fix typo and deprecations in LFO vignette by @jgabry in #244 * Register internal S3 methods by @jgabry in #239 * Avoid R cmd check NOTEs about some internal functions by @jgabry in #240 * fix R cmd check note due to importance_sampling roxygen template by @jgabry in #233 * fix R cmd check notes by @jgabry in #242 # loo 2.6.0 ### New features * New `loo_predictive_metric()` function for computing estimates of leave-one-out predictive metrics: mean absolute error, mean squared error and root mean squared error for continuous predictions, and accuracy and balanced accuracy for binary classification. (#202, @LeeviLindgren) * New functions `crps()`, `scrps()`, `loo_crps()`, and `loo_scrps()` for computing the (scaled) continuously ranked probability score. (#203, @LeeviLindgren) * New vignette "Mixture IS leave-one-out cross-validation for high-dimensional Bayesian models." This is a demonstration of the mixture estimators proposed by [Silva and Zanella (2022)](https://arxiv.org/abs/2209.09190). (#210) ### Bug fixes * Minor fix to model names displayed by `loo_model_weights()` to make them consistent with `loo_compare()`. (#217) # loo 2.5.1 * Fix R CMD check error on M1 Mac # loo 2.5.0 ### Improvements * New [Frequently Asked Questions page](https://mc-stan.org/loo/articles/online-only/faq.html) on the package website. (#143) * Speed improvement from simplifying the normalization when fitting the generalized Pareto distribution. (#187, @sethaxen) * Added parallel likelihood computation to speedup `loo_subsample()` when using posterior approximations. (#171, @kdubovikov) * Switch unit tests from Travis to GitHub Actions. (#164) ### Bug fixes * Fixed a bug causing the normalizing constant of the PSIS (log) weights not to get updated when performing moment matching with `save_psis = TRUE` (#166, @fweber144). # loo 2.4.1 * Fixed issue reported by CRAN where one of the vignettes errored on an M1 Mac due to RStan's dependency on V8. # loo 2.4.0 ### Bug fixes * Fixed a bug in `relative_eff.function()` that caused an error on Windows when using multiple cores. (#152) * Fixed a potential numerical issue in `loo_moment_match()` with `split=TRUE`. (#153) * Fixed potential integer overflow with `loo_moment_match()`. (#155, @ecmerkle) * Fixed `relative_eff()` when used with a `posterior::draws_array`. (#161, @rok-cesnovar) ### New features * New generic function `elpd()` (and methods for matrices and arrays) for computing expected log predictive density of new data or log predictive density of observed data. A new vignette demonstrates using this function when doing K-fold CV with rstan. (#159, @bnicenboim) # loo 2.3.1 * Fixed a bug in `loo_moment_match()` that prevented `...` arguments from being used correctly. (#149) # loo 2.3.0 * Added Topi Paananen and Paul Bürkner as coauthors. * New function `loo_moment_match()` (and new vignette), which can be used to update a `loo` object when Pareto k estimates are large. (#130) * The log weights provided by the importance sampling functions `psis()`, `tis()`, and `sis()` no longer have the largest log ratio subtracted from them when returned to the user. This should be less confusing for anyone using the `weights()` method to make an importance sampler. (#112, #146) * MCSE calculation is now deterministic (#116, #147) # loo 2.2.0 * Added Mans Magnusson as a coauthor. * New functions `loo_subsample()` and `loo_approximate_posterior()` (and new vignette) for doing PSIS-LOO with large data. (#113) * Added support for standard importance sampling and truncated importance sampling (functions `sis()` and `tis()`). (#125) * `compare()` now throws a deprecation warning suggesting `loo_compare()`. (#93) * A smaller threshold is used when checking the uniqueness of tail values. (#124) * For WAIC, warnings are only thrown when running `waic()` and not when printing a `waic` object. (#117, @mcol) * Use markdown syntax in roxygen documentation wherever possible. (#108) # loo 2.1.0 * New function `loo_compare()` for model comparison that will eventually replace the existing `compare()` function. (#93) * New vignette on LOO for non-factorizable joint Gaussian models. (#75) * New vignette on "leave-future-out" cross-validation for time series models. (#90) * New glossary page (use `help("loo-glossary")`) with definitions of key terms. (#81) * New `se_diff` column in model comparison results. (#78) * Improved stability of `psis()` when `log_ratios` are very small. (#74) * Allow `r_eff=NA` to suppress warning when specifying `r_eff` is not applicable (i.e., draws not from MCMC). (#72) * Update effective sample size calculations to match RStan's version. (#85) * Naming of k-fold helper functions now matches scikit-learn. (#96) # loo 2.0.0 This is a major release with many changes. Whenever possible we have opted to deprecate rather than remove old functionality, but it is possible that old code that accesses elements inside loo objects by position rather than name may error. * New package documentation website http://mc-stan.org/loo/ with vignettes, function reference, news. * Updated existing vignette and added two new vignettes demonstrating how to use the package. * New function `psis()` replaces `psislw()` (now deprecated). This version implements the improvements to the PSIS algorithm described in the latest version of https://arxiv.org/abs/1507.02646. Additional diagnostic information is now also provided, including PSIS effective sample sizes. * New `weights()` method for extracting smoothed weights from a `psis` object. Arguments `log` and `normalize` control whether the weights are returned on the log scale and whether they are normalized. * Updated the interface for the `loo()` methods to integrate nicely with the new PSIS algorithm. Methods for log-likelihood arrays, matrices, and functions are provided. Several arguments have changed, particularly for the `loo.function` method. The documentation at `help("loo")` has been updated to describe the new behavior. * The structure of the objects returned by the `loo()` function has also changed slightly, as described in the __Value__ section at `help("loo", package = "loo")`. * New function `loo_model_weights()` computes weights for model averaging as described in https://arxiv.org/abs/1704.02030. Implemented methods include stacking of predictive distributions, pseudo-BMA weighting or pseudo-BMA+ weighting with the Bayesian bootstrap. * Setting `options(loo.cores=...)` is now deprecated in favor of `options(mc.cores=...)`. For now, if both the `loo.cores` and `mc.cores` options have been set, preference will be given to `loo.cores` until it is removed in a future release. (thanks to @cfhammill) * New functions `example_loglik_array()` and `example_loglik_matrix()` that provide objects to use in examples and tests. * When comparing more than two models with `compare()`, the first column of the output is now the `elpd` difference from the model in the first row. * New helper functions for splitting observations for K-fold CV: `kfold_split_random()`, `kfold_split_balanced()`, `kfold_split_stratified()`. Additional helper functions for implementing K-fold CV will be included in future releases. # loo 1.1.0 * Introduce the `E_loo()` function for computing weighted expectations (means, variances, quantiles). # loo 1.0.0 * `pareto_k_table()` and `pareto_k_ids()` convenience functions for quickly identifying problematic observations * pareto k values now grouped into `(-Inf, 0.5]`, `(0.5, 0.7]`, `(0.7, 1]`, `(1, Inf)` (didn't used to include 0.7) * warning messages are now issued by `psislw()` instead of `print.loo` * `print.loo()` shows a table of pareto k estimates (if any k > 0.7) * Add argument to `compare()` to allow loo objects to be provided in a list rather than in `'...'` * Update references to point to published paper # loo 0.1.6 * GitHub repository moved from @jgabry to @stan-dev * Better error messages from `extract_log_lik()` * Fix example code in vignette (thanks to GitHub user @krz) # loo 0.1.5 * Add warnings if any p_waic estimates are greather than 0.4 * Improve line coverage of tests to 100% * Update references in documentation * Remove model weights from `compare()`. In previous versions of __loo__ model weights were also reported by `compare()`. We have removed the weights because they were based only on the point estimate of the elpd values ignoring the uncertainty. We are currently working on something similar to these weights that also accounts for uncertainty, which will be included in future versions of __loo__. # loo 0.1.4 This update makes it easier for other package authors using __loo__ to write tests that involve running the `loo` function. It also includes minor bug fixes and additional unit tests. Highlights: * Don't call functions from __parallel__ package if `cores=1`. * Return entire vector/matrix of smoothed weights rather than a summary statistic when `psislw` function is called in an interactive session. * Test coverage > 80% # loo 0.1.3 This update provides several important improvements, most notably an alternative method for specifying the pointwise log-likelihood that reduces memory usage and allows for __loo__ to be used with larger datasets. This update also makes it easier to to incorporate __loo__'s functionality into other packages. * Add Ben Goodrich as contributor * S3 generics and `matrix` and `function` methods for both `loo()` and `waic()`. The matrix method provide the same functionality as in previous versions of __loo__ (taking a log-likelihood matrix as the input). The function method allows the user to provide a function for computing the log-likelihood from the data and posterior draws (which are also provided by the user). The function method is less memory intensive and should make it possible to use __loo__ for models fit to larger amounts of data than before. * Separate `plot` and `print` methods. `plot` also provides `label_points` argument, which, if `TRUE`, will label any Pareto `k` points greater than 1/2 by the index number of the corresponding observation. The plot method also now warns about `Inf`/`NA`/`NaN` values of `k` that are not shown in the plot. * `compare` now returns model weights and accepts more than two inputs. * Allow setting number of cores using `options(loo.cores = NUMBER)`. # loo 0.1.2 * Updates names in package to reflect name changes in the accompanying paper. # loo 0.1.1 * Better handling of special cases * Deprecates `loo_and_waic` function in favor of separate functions `loo` and `waic` * Deprecates `loo_and_waic_diff`. Use `compare` instead. # loo 0.1.0 * Initial release loo/inst/0000755000176200001440000000000015122306002012007 5ustar liggesusersloo/inst/CITATION0000644000176200001440000000753515100205060013154 0ustar liggesusersyear <- sub("-.*", "", meta$Date) note <- sprintf("R package version %s", meta$Version) authors <- do.call(c, lapply(meta$Author, as.person)) authors <- grep("\\[cre|\\[aut", authors, value = TRUE) bibentry(bibtype = "Misc", title = "loo: Efficient leave-one-out cross-validation and WAIC for Bayesian models", author = authors, year = year, note = note, url = "https://mc-stan.org/loo/", header = "To cite the loo R package:" ) bibentry(bibtype = "Article", title = "Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC", author = c(person("Aki", "Vehtari"), person("Andrew", "Gelman"), person("Jonah", "Gabry")), year = "2017", journal = "Statistics and Computing", volume = 27, issue = 5, pages = "1413--1432", doi = "10.1007/s11222-016-9696-4", header = "To cite the loo paper:" ) bibentry(bibtype = "Article", title = "Uncertainty in Bayesian leave-one-out cross-validation based model comparison", author = c(person("Tuomas", "Sivula"), person("Måns", "Magnusson"), person("Asael Alonzo", "Matamoros"), person("Aki", "Vehtari")), journal = "Bayesian Analysis", year = "2025", note = "doi:10.1214/24-BA1453", header = "To cite when using loo_compare():" ) bibentry(bibtype = "Article", title = "Using stacking to average Bayesian predictive distributions", author = c(person("Yuling", "Yao"), person("Aki", "Vehtari"), person("Daniel", "Simpson"), person("Andrew", "Gelman")), journal = "Bayesian Analysis", year = "2018", volume = 13, issue = 3, pages = "917--1007", doi = "10.1214/17-BA1091", header = "To cite the stacking paper:" ) bibentry( title = "Pareto smoothed importance sampling", bibtype = "Article", author = c( person("Aki", "Vehtari"), person("Daniel", "Simpson"), person("Andrew", "Gelman"), person("Yuling", "Yao"), person("Jonah", "Gabry") ), journal = "Journal of Machine Learning Research", year = 2024, volume = 25, number = 72, pages = "1-58", header = "To cite Pareto-k diagnostics:" ) bibentry( bibtype = "Article", author = c( person(given = "Topi", family = "Paananen"), person(given = "Juho", family = "Piironen"), person(given = "Paul-Christian", family = "Buerkner"), person(given = "Aki", family = "Vehtari") ), title = "Implicitly adaptive importance sampling", journal = "Statistics and Computing", volume = 31, pages = "16", year = 2021, header = "To cite moment matching:" ) bibentry( bibtype = "InProceedings", author = c( person(given = "Måns", family = "Magnusson"), person(given = "Michael Riis", family = "Andersen"), person(given = "Johan", family = "Jonasson"), person(given = "Aki", family = "Vehtari") ), title = "Leave-One-Out Cross-Validation for Large Data", booktitle = "Thirty-sixth International Conference on Machine Learning", publisher = "PMLR", volume = "97", pages = "4244-4253", year = 2019, header = "To cite subsampling loo:" ) bibentry( bibtype = "InProceedings", author = c( person(given = "Måns", family = "Magnusson"), person(given = "Michael Riis", family = "Andersen"), person(given = "Johan", family = "Jonasson"), person(given = "Aki", family = "Vehtari") ), title = "Leave-One-Out Cross-Validation for Model Comparison in Large Data", booktitle = "Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS)", publisher = "PMLR", volume = "108", pages = "341-351", year = 2019, header = "To cite subsampling loo:" ) loo/inst/doc/0000755000176200001440000000000015122306002012554 5ustar liggesusersloo/inst/doc/loo2-mixis.html0000644000176200001440000010310015122305335015447 0ustar liggesusers Mixture IS leave-one-out cross-validation for high-dimensional Bayesian models

Mixture IS leave-one-out cross-validation for high-dimensional Bayesian models

Luca Silva and Giacomo Zanella

2025-12-22

Introduction

This vignette shows how to perform Bayesian leave-one-out cross-validation (LOO-CV) using the mixture estimators proposed in the paper Silva and Zanella (2022). These estimators have shown to be useful in presence of outliers but also, and especially, in high-dimensional settings where the model features many parameters. In these contexts it can happen that a large portion of observations lead to high values of Pareto-\(k\) diagnostics and potential instability of PSIS-LOO estimators.

For this illustration we consider a high-dimensional Bayesian Logistic regression model applied to the Voice dataset.

Setup: load packages and set seed

library("rstan")
library("loo")
library("matrixStats")
options(mc.cores = parallel::detectCores(), parallel=FALSE)
set.seed(24877)

Model

This is the Stan code for a logistic regression model with regularized horseshoe prior. The code includes an if statement to include a code line needed later for the MixIS approach.

# Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR)
# To use an older version of RStan change the line declaring `y` to:
#    int<lower=0,upper=1> y[N];
stancode_horseshoe <- "
data {
  int <lower=0> N;
  int <lower=0> P;
  array[N] int <lower=0, upper=1> y;
  matrix [N,P] X;
  real <lower=0> scale_global;
  int <lower=0,upper=1> mixis;
}
transformed data {
  real<lower=1> nu_global=1; // degrees of freedom for the half-t priors for tau
  real<lower=1> nu_local=1;  // degrees of freedom for the half-t priors for lambdas
                             // (nu_local = 1 corresponds to the horseshoe)
  real<lower=0> slab_scale=2;// for the regularized horseshoe
  real<lower=0> slab_df=100; // for the regularized horseshoe
}
parameters {
  vector[P] z;                // for non-centered parameterization
  real <lower=0> tau;         // global shrinkage parameter
  vector <lower=0>[P] lambda; // local shrinkage parameter
  real<lower=0> caux;
}
transformed parameters {
  vector[P] beta;
  { 
    vector[P] lambda_tilde;   // 'truncated' local shrinkage parameter
    real c = slab_scale * sqrt(caux); // slab scale
    lambda_tilde = sqrt( c^2 * square(lambda) ./ (c^2 + tau^2*square(lambda)));
    beta = z .* lambda_tilde*tau;
  }
}
model {
  vector[N] means=X*beta;
  vector[N] log_lik;
  target += std_normal_lpdf(z);
  target += student_t_lpdf(lambda | nu_local, 0, 1);
  target += student_t_lpdf(tau | nu_global, 0, scale_global);
  target += inv_gamma_lpdf(caux | 0.5*slab_df, 0.5*slab_df);
  for (n in 1:N) {
    log_lik[n]= bernoulli_logit_lpmf(y[n] | means[n]);
  }
  target += sum(log_lik);
  if (mixis) {
    target += log_sum_exp(-log_lik);
  }
}
generated quantities {
  vector[N] means=X*beta;
  vector[N] log_lik;
  for (n in 1:N) {
    log_lik[n] = bernoulli_logit_lpmf(y[n] | means[n]);
  }
}
"

Dataset

The LSVT Voice Rehabilitation Data Set (see link for details) has \(p=312\) covariates and \(n=126\) observations with binary response. We construct data list for Stan.

data(voice)
y <- voice$y
X <- voice[2:length(voice)]
n <- dim(X)[1]
p <- dim(X)[2]
p0 <- 10
scale_global <- 2*p0/(p-p0)/sqrt(n-1)
standata <- list(N = n, P = p, X = as.matrix(X), y = c(y), scale_global = scale_global, mixis = 0)

Note that in our prior specification we divide the prior variance by the number of covariates \(p\). This is often done in high-dimensional contexts to have a prior variance for the linear predictors \(X\beta\) that remains bounded as \(p\) increases.

PSIS estimators and Pareto-\(k\) diagnostics

LOO-CV computations are challenging in this context due to high-dimensionality of the parameter space. To show that, we compute PSIS-LOO estimators, which require sampling from the posterior distribution, and inspect the associated Pareto-\(k\) diagnostics.

chains <- 4
n_iter <- 2000
warm_iter <- 1000
stanmodel <- stan_model(model_code = stancode_horseshoe)
fit_post <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0)
loo_post <-loo(fit_post)
print(loo_post)

Computed from 4000 by 126 log-likelihood matrix.

         Estimate   SE
elpd_loo    -42.9  7.4
p_loo        24.3  5.5
looic        85.8 14.8
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.4, 1.0]).

Pareto k diagnostic values:
                         Count Pct.    Min. ESS
(-Inf, 0.7]   (good)     100   79.4%   240     
   (0.7, 1]   (bad)       17   13.5%   <NA>    
   (1, Inf)   (very bad)   9    7.1%   <NA>    
See help('pareto-k-diagnostic') for details.

As we can see the diagnostics signal either “bad” or “very bad” Pareto-\(k\) values for roughly \(15-30\%\) of the observations which is a significant portion of the dataset.

Mixture estimators

We now compute the mixture estimators proposed in Silva and Zanella (2022). These require to sample from the following mixture of leave-one-out posteriors \[\begin{equation} q_{mix}(\theta) = \frac{\sum_{i=1}^n p(y_{-i}|\theta)p(\theta)}{\sum_{i=1}^np(y_{-i})}\propto p(\theta|y)\cdot \left(\sum_{i=1}^np(y_i|\theta)^{-1}\right). \end{equation}\] The code to generate a Stan model for the above mixture distribution is the same to the one for the posterior, just enabling one line of code with a LogSumExp contribution to account for the last term in the equation above.

  if (mixis) {
    target += log_sum_exp(-log_lik);
  }

We sample from the mixture and collect the log-likelihoods term.

standata$mixis <- 1
fit_mix <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0, pars = "log_lik")
log_lik_mix <- extract(fit_mix)$log_lik

We now compute the mixture estimators, following the numerically stable implementation in Appendix A.2 of Silva and Zanella (2022). The code below makes use of the package “matrixStats”.

l_common_mix <- rowLogSumExps(-log_lik_mix)
log_weights <- -log_lik_mix - l_common_mix
elpd_mixis <- logSumExp(-l_common_mix) - rowLogSumExps(t(log_weights))

Comparison with benchmark values obtained with long simulations

To evaluate the performance of mixture estimators (MixIS) we also generate benchmark values, i.e. accurate approximations of the LOO predictives \(\{p(y_i|y_{-i})\}_{i=1,\dots,n}\), obtained by brute-force sampling from the leave-one-out posteriors directly, getting \(90k\) samples from each and discarding the first \(10k\) as warmup. This is computationally heavy, hence we have saved the results and we just load them in the current vignette.

data(voice_loo)
elpd_loo <- voice_loo$elpd_loo

We can then compute the root mean squared error (RMSE) of the PSIS and mixture estimators relative to such benchmark values.

elpd_psis <- loo_post$pointwise[,1]
print(paste("RMSE(PSIS) =",round( sqrt(mean((elpd_loo-elpd_psis)^2)) ,2)))
[1] "RMSE(PSIS) = 0.06"
print(paste("RMSE(MixIS) =",round( sqrt(mean((elpd_loo-elpd_mixis)^2)) ,2)))
[1] "RMSE(MixIS) = 0.05"

Here mixture estimator provides a reduction in RMSE. Note that this value would increase with the number of samples drawn from the posterior and mixture, since in this example the RMSE of MixIS will exhibit a CLT-type decay while the one of PSIS will converge at a slower rate (this can be verified by running the above code with a larger sample size; see also Figure 3 of Silva and Zanella (2022) for analogous results).

We then compare the overall ELPD estimates with the brute force one.

elpd_psis <- loo_post$pointwise[,1]
print(paste("ELPD (PSIS)=",round(sum(elpd_psis),2)))
[1] "ELPD (PSIS)= -42.88"
print(paste("ELPD (MixIS)=",round(sum(elpd_mixis),2)))
[1] "ELPD (MixIS)= -44.58"
print(paste("ELPD (brute force)=",round(sum(elpd_loo),2)))
[1] "ELPD (brute force)= -45.63"

In this example, MixIS provides a more accurate ELPD estimate closer to the brute force estimate, while PSIS severely overestimates the ELPD. Note that low accuracy of the PSIS ELPD estimate is expected in this example given the large number of large Pareto-\(k\) values. In this example, the accuracy of MixIS estimate will also improve with bigger MCMC sample size.

More generally, mixture estimators can be useful in situations where standard PSIS estimators struggle and return many large Pareto-\(k\) values. In these contexts MixIS often provides more accurate LOO-CV and ELPD estimates with a single sampling routine (i.e. with a cost comparable to sampling from the original posterior).

References

Silva L. and Zanella G. (2022). Robust leave-one-out cross-validation for high-dimensional Bayesian models. Preprint at arXiv:2209.09190

Vehtari A., Gelman A., and Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing, 27(5), 1413–1432. Preprint at arXiv:1507.04544

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

loo/inst/doc/loo2-mixis.Rmd0000644000176200001440000002224014641333357015244 0ustar liggesusers--- title: "Mixture IS leave-one-out cross-validation for high-dimensional Bayesian models" author: "Luca Silva and Giacomo Zanella" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette shows how to perform Bayesian leave-one-out cross-validation (LOO-CV) using the mixture estimators proposed in the paper [Silva and Zanella (2022)](https://arxiv.org/abs/2209.09190). These estimators have shown to be useful in presence of outliers but also, and especially, in high-dimensional settings where the model features many parameters. In these contexts it can happen that a large portion of observations lead to high values of Pareto-$k$ diagnostics and potential instability of PSIS-LOO estimators. For this illustration we consider a high-dimensional Bayesian Logistic regression model applied to the _Voice_ dataset. ## Setup: load packages and set seed ```{r, warnings=FALSE, message=FALSE} library("rstan") library("loo") library("matrixStats") options(mc.cores = parallel::detectCores(), parallel=FALSE) set.seed(24877) ``` ## Model This is the Stan code for a logistic regression model with regularized horseshoe prior. The code includes an if statement to include a code line needed later for the MixIS approach. ```{r stancode_horseshoe} # Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: # int y[N]; stancode_horseshoe <- " data { int N; int P; array[N] int y; matrix [N,P] X; real scale_global; int mixis; } transformed data { real nu_global=1; // degrees of freedom for the half-t priors for tau real nu_local=1; // degrees of freedom for the half-t priors for lambdas // (nu_local = 1 corresponds to the horseshoe) real slab_scale=2;// for the regularized horseshoe real slab_df=100; // for the regularized horseshoe } parameters { vector[P] z; // for non-centered parameterization real tau; // global shrinkage parameter vector [P] lambda; // local shrinkage parameter real caux; } transformed parameters { vector[P] beta; { vector[P] lambda_tilde; // 'truncated' local shrinkage parameter real c = slab_scale * sqrt(caux); // slab scale lambda_tilde = sqrt( c^2 * square(lambda) ./ (c^2 + tau^2*square(lambda))); beta = z .* lambda_tilde*tau; } } model { vector[N] means=X*beta; vector[N] log_lik; target += std_normal_lpdf(z); target += student_t_lpdf(lambda | nu_local, 0, 1); target += student_t_lpdf(tau | nu_global, 0, scale_global); target += inv_gamma_lpdf(caux | 0.5*slab_df, 0.5*slab_df); for (n in 1:N) { log_lik[n]= bernoulli_logit_lpmf(y[n] | means[n]); } target += sum(log_lik); if (mixis) { target += log_sum_exp(-log_lik); } } generated quantities { vector[N] means=X*beta; vector[N] log_lik; for (n in 1:N) { log_lik[n] = bernoulli_logit_lpmf(y[n] | means[n]); } } " ``` ## Dataset The _LSVT Voice Rehabilitation Data Set_ (see [link](https://archive.ics.uci.edu/ml/datasets/LSVT+Voice+Rehabilitation) for details) has $p=312$ covariates and $n=126$ observations with binary response. We construct data list for Stan. ```{r, results='hide', warning=FALSE, message=FALSE, error=FALSE} data(voice) y <- voice$y X <- voice[2:length(voice)] n <- dim(X)[1] p <- dim(X)[2] p0 <- 10 scale_global <- 2*p0/(p-p0)/sqrt(n-1) standata <- list(N = n, P = p, X = as.matrix(X), y = c(y), scale_global = scale_global, mixis = 0) ``` Note that in our prior specification we divide the prior variance by the number of covariates $p$. This is often done in high-dimensional contexts to have a prior variance for the linear predictors $X\beta$ that remains bounded as $p$ increases. ## PSIS estimators and Pareto-$k$ diagnostics LOO-CV computations are challenging in this context due to high-dimensionality of the parameter space. To show that, we compute PSIS-LOO estimators, which require sampling from the posterior distribution, and inspect the associated Pareto-$k$ diagnostics. ```{r, results='hide', warning=FALSE} chains <- 4 n_iter <- 2000 warm_iter <- 1000 stanmodel <- stan_model(model_code = stancode_horseshoe) fit_post <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0) loo_post <-loo(fit_post) ``` ```{r} print(loo_post) ``` As we can see the diagnostics signal either "bad" or "very bad" Pareto-$k$ values for roughly $15-30\%$ of the observations which is a significant portion of the dataset. ## Mixture estimators We now compute the mixture estimators proposed in Silva and Zanella (2022). These require to sample from the following mixture of leave-one-out posteriors \begin{equation} q_{mix}(\theta) = \frac{\sum_{i=1}^n p(y_{-i}|\theta)p(\theta)}{\sum_{i=1}^np(y_{-i})}\propto p(\theta|y)\cdot \left(\sum_{i=1}^np(y_i|\theta)^{-1}\right). \end{equation} The code to generate a Stan model for the above mixture distribution is the same to the one for the posterior, just enabling one line of code with a _LogSumExp_ contribution to account for the last term in the equation above. ``` if (mixis) { target += log_sum_exp(-log_lik); } ``` We sample from the mixture and collect the log-likelihoods term. ```{r, results='hide', warnings=FALSE} standata$mixis <- 1 fit_mix <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0, pars = "log_lik") log_lik_mix <- extract(fit_mix)$log_lik ``` We now compute the mixture estimators, following the numerically stable implementation in Appendix A.2 of [Silva and Zanella (2022)](https://arxiv.org/abs/2209.09190). The code below makes use of the package "matrixStats". ```{r} l_common_mix <- rowLogSumExps(-log_lik_mix) log_weights <- -log_lik_mix - l_common_mix elpd_mixis <- logSumExp(-l_common_mix) - rowLogSumExps(t(log_weights)) ``` ## Comparison with benchmark values obtained with long simulations To evaluate the performance of mixture estimators (MixIS) we also generate _benchmark values_, i.e.\ accurate approximations of the LOO predictives $\{p(y_i|y_{-i})\}_{i=1,\dots,n}$, obtained by brute-force sampling from the leave-one-out posteriors directly, getting $90k$ samples from each and discarding the first $10k$ as warmup. This is computationally heavy, hence we have saved the results and we just load them in the current vignette. ```{r} data(voice_loo) elpd_loo <- voice_loo$elpd_loo ``` We can then compute the root mean squared error (RMSE) of the PSIS and mixture estimators relative to such benchmark values. ```{r} elpd_psis <- loo_post$pointwise[,1] print(paste("RMSE(PSIS) =",round( sqrt(mean((elpd_loo-elpd_psis)^2)) ,2))) print(paste("RMSE(MixIS) =",round( sqrt(mean((elpd_loo-elpd_mixis)^2)) ,2))) ``` Here mixture estimator provides a reduction in RMSE. Note that this value would increase with the number of samples drawn from the posterior and mixture, since in this example the RMSE of MixIS will exhibit a CLT-type decay while the one of PSIS will converge at a slower rate (this can be verified by running the above code with a larger sample size; see also Figure 3 of Silva and Zanella (2022) for analogous results). We then compare the overall ELPD estimates with the brute force one. ```{r} elpd_psis <- loo_post$pointwise[,1] print(paste("ELPD (PSIS)=",round(sum(elpd_psis),2))) print(paste("ELPD (MixIS)=",round(sum(elpd_mixis),2))) print(paste("ELPD (brute force)=",round(sum(elpd_loo),2))) ``` In this example, MixIS provides a more accurate ELPD estimate closer to the brute force estimate, while PSIS severely overestimates the ELPD. Note that low accuracy of the PSIS ELPD estimate is expected in this example given the large number of large Pareto-$k$ values. In this example, the accuracy of MixIS estimate will also improve with bigger MCMC sample size. More generally, mixture estimators can be useful in situations where standard PSIS estimators struggle and return many large Pareto-$k$ values. In these contexts MixIS often provides more accurate LOO-CV and ELPD estimates with a single sampling routine (i.e. with a cost comparable to sampling from the original posterior). ## References Silva L. and Zanella G. (2022). Robust leave-one-out cross-validation for high-dimensional Bayesian models. Preprint at [arXiv:2209.09190](https://arxiv.org/abs/2209.09190) Vehtari A., Gelman A., and Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. *Statistics and Computing*, 27(5), 1413--1432. Preprint at [arXiv:1507.04544](https://arxiv.org/abs/1507.04544) Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/inst/doc/loo2-non-factorized.Rmd0000644000176200001440000006624714641333357017054 0ustar liggesusers--- title: "Leave-one-out cross-validation for non-factorized models" author: "Aki Vehtari, Paul Bürkner and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes encoding: "UTF-8" params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r more-knitr-ops, include=FALSE} knitr::opts_chunk$set( cache=TRUE, message=FALSE, warning=FALSE ) ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction When computing ELPD-based LOO-CV for a Bayesian model we need to compute the log leave-one-out predictive densities $\log{p(y_i | y_{-i})}$ for every response value $y_i, \: i = 1, \ldots, N$, where $y_{-i}$ denotes all response values except observation $i$. To obtain $p(y_i | y_{-i})$, we need to have access to the pointwise likelihood $p(y_i\,|\, y_{-i}, \theta)$ and integrate over the model parameters $\theta$: $$ p(y_i\,|\,y_{-i}) = \int p(y_i\,|\, y_{-i}, \theta) \, p(\theta\,|\, y_{-i}) \,d \theta $$ Here, $p(\theta\,|\, y_{-i})$ is the leave-one-out posterior distribution for $\theta$, that is, the posterior distribution for $\theta$ obtained by fitting the model while holding out the $i$th observation (we will later show how refitting the model to data $y_{-i}$ can be avoided). If the observation model is formulated directly as the product of the pointwise observation models, we call it a *factorized* model. In this case, the likelihood is also the product of the pointwise likelihood contributions $p(y_i\,|\, y_{-i}, \theta)$. To better illustrate possible structures of the observation models, we formally divide $\theta$ into two parts, observation-specific latent variables $f = (f_1, \ldots, f_N)$ and hyperparameters $\psi$, so that $p(y_i\,|\, y_{-i}, \theta) = p(y_i\,|\, y_{-i}, f_i, \psi)$. Depending on the model, one of the two parts of $\theta$ may also be empty. In very simple models, such as linear regression models, latent variables are not explicitly presented and response values are conditionally independent given $\psi$, so that $p(y_i\,|\, y_{-i}, f_i, \psi) = p(y_i \,|\, \psi)$. The full likelihood can then be written in the familiar form $$ p(y \,|\, \psi) = \prod_{i=1}^N p(y_i \,|\, \psi), $$ where $y = (y_1, \ldots, y_N)$ denotes the vector of all responses. When the likelihood factorizes this way, the conditional pointwise log-likelihood can be obtained easily by computing $p(y_i\,|\, \psi)$ for each $i$ with computational cost $O(n)$. Yet, there are several reasons why a *non-factorized* observation model may be necessary or preferred. In non-factorized models, the joint likelihood of the response values $p(y \,|\, \theta)$ is not factorized into observation-specific components, but rather given directly as one joint expression. For some models, an analytic factorized formulation is simply not available in which case we speak of a *non-factorizable* model. Even in models whose observation model can be factorized in principle, it may still be preferable to use a non-factorized form for reasons of efficiency and numerical stability (Bürkner et al. 2020). Whether a non-factorized model is used by necessity or for efficiency and stability, it comes at the cost of having no direct access to the leave-one-out predictive densities and thus to the overall leave-one-out predictive accuracy. In theory, we can express the observation-specific likelihoods in terms of the joint likelihood via $$ p(y_i \,|\, y_{i-1}, \theta) = \frac{p(y \,|\, \theta)}{p(y_{-i} \,|\, \theta)} = \frac{p(y \,|\, \theta)}{\int p(y \,|\, \theta) \, d y_i}, $$ but the expression on the right-hand side may not always have an analytical solution. Computing $\log p(y_i \,|\, y_{-i}, \theta)$ for non-factorized models is therefore often impossible, or at least inefficient and numerically unstable. However, there is a large class of multivariate normal and Student-$t$ models for which there are efficient analytical solutions available. More details can be found in our paper about LOO-CV for non-factorized models (Bürkner, Gabry, & Vehtari, 2020), which is available as a preprint on arXiv (https://arxiv.org/abs/1810.10559). # LOO-CV for multivariate normal models In this vignette, we will focus on non-factorized multivariate normal models. Based on results of Sundararajan and Keerthi (2001), Bürkner et al. (2020) show that, for multivariate normal models with coriance matrix $C$, the LOO predictive mean and standard deviation can be computed as follows: \begin{align} \mu_{\tilde{y},-i} &= y_i-\bar{c}_{ii}^{-1} g_i \nonumber \\ \sigma_{\tilde{y},-i} &= \sqrt{\bar{c}_{ii}^{-1}}, \end{align} where $g_i$ and $\bar{c}_{ii}$ are \begin{align} g_i &= \left[C^{-1} y\right]_i \nonumber \\ \bar{c}_{ii} &= \left[C^{-1}\right]_{ii}. \end{align} Using these results, the log predictive density of the $i$th observation is then computed as $$ \log p(y_i \,|\, y_{-i},\theta) = - \frac{1}{2}\log(2\pi) - \frac{1}{2}\log \sigma^2_{-i} - \frac{1}{2}\frac{(y_i-\mu_{-i})^2}{\sigma^2_{-i}}. $$ Expressing this same equation in terms of $g_i$ and $\bar{c}_{ii}$, the log predictive density becomes: $$ \log p(y_i \,|\, y_{-i},\theta) = - \frac{1}{2}\log(2\pi) + \frac{1}{2}\log \bar{c}_{ii} - \frac{1}{2}\frac{g_i^2}{\bar{c}_{ii}}. $$ (Note that Vehtari et al. (2016) has a typo in the corresponding Equation 34.) From these equations we can now derive a recipe for obtaining the conditional pointwise log-likelihood for _all_ models that can be expressed conditionally in terms of a multivariate normal with invertible covariance matrix $C$. ## Approximate LOO-CV using integrated importance-sampling The above LOO equations for multivariate normal models are conditional on parameters $\theta$. Therefore, to obtain the leave-one-out predictive density $p(y_i \,|\, y_{-i})$ we need to integrate over $\theta$, $$ p(y_i\,|\,y_{-i}) = \int p(y_i\,|\,y_{-i}, \theta) \, p(\theta\,|\,y_{-i}) \,d\theta. $$ Here, $p(\theta\,|\,y_{-i})$ is the leave-one-out posterior distribution for $\theta$, that is, the posterior distribution for $\theta$ obtained by fitting the model while holding out the $i$th observation. To avoid the cost of sampling from $N$ leave-one-out posteriors, it is possible to take the posterior draws $\theta^{(s)}, \, s=1,\ldots,S$, from the \emph{full} posterior $p(\theta\,|\,y)$, and then approximate the above integral using integrated importance sampling (Vehtari et al., 2016, Section 3.6.1): $$ p(y_i\,|\,y_{-i}) \approx \frac{ \sum_{s=1}^S p(y_i\,|\,y_{-i},\,\theta^{(s)}) \,w_i^{(s)}}{ \sum_{s=1}^S w_i^{(s)}}, $$ where $w_i^{(s)}$ are importance weights. First we compute the raw importance ratios $$ r_i^{(s)} \propto \frac{1}{p(y_i \,|\, y_{-i}, \,\theta^{(s)})}, $$ and then stabilize them using Pareto smoothed importance sampling (PSIS, Vehtari et al, 2019) to obtain the weights $w_i^{(s)}$. The resulting approximation is referred to as PSIS-LOO (Vehtari et al, 2017). ## Exact LOO-CV with re-fitting In order to validate the approximate LOO procedure, and also in order to allow exact computations to be made for a small number of leave-one-out folds for which the Pareto $k$ diagnostic (Vehtari et al, 2024) indicates an unstable approximation, we need to consider how we might to do _exact_ leave-one-out CV for a non-factorized model. In the case of a Gaussian process that has the marginalization property, we could just drop the one row and column of $C$ corresponding to the held out out observation. This does not hold in general for multivariate normal models, however, and to keep the original prior we may need to maintain the full covariance matrix $C$ even when one of the observations is left out. The solution is to model $y_i$ as a missing observation and estimate it along with all of the other model parameters. For a conditional multivariate normal model, $\log p(y_i\,|\,y_{-i})$ can be computed as follows. First, we model $y_i$ as missing and denote the corresponding parameter $y_i^{\mathrm{mis}}$. Then, we define $$ y_{\mathrm{mis}(i)} = (y_1, \ldots, y_{i-1}, y_i^{\mathrm{mis}}, y_{i+1}, \ldots, y_N). $$ to be the same as the full set of observations $y$, except replacing $y_i$ with the parameter $y_i^{\mathrm{mis}}$. Second, we compute the LOO predictive mean and standard deviations as above, but replace $y$ with $y_{\mathrm{mis}(i)}$ in the computation of $\mu_{\tilde{y},-i}$: $$ \mu_{\tilde{y},-i} = y_{{\mathrm{mis}}(i)}-\bar{c}_{ii}^{-1}g_i, $$ where in this case we have $$ g_i = \left[ C^{-1} y_{\mathrm{mis}(i)} \right]_i. $$ The conditional log predictive density is then computed with the above $\mu_{\tilde{y},-i}$ and the left out observation $y_i$: $$ \log p(y_i\,|\,y_{-i},\theta) = - \frac{1}{2}\log(2\pi) - \frac{1}{2}\log \sigma^2_{\tilde{y},-i} - \frac{1}{2}\frac{(y_i-\mu_{\tilde{y},-i})^2}{\sigma^2_{\tilde{y},-i}}. $$ Finally, the leave-one-out predictive distribution can then be estimated as $$ p(y_i\,|\,y_{-i}) \approx \sum_{s=1}^S p(y_i\,|\,y_{-i}, \theta_{-i}^{(s)}), $$ where $\theta_{-i}^{(s)}$ are draws from the posterior distribution $p(\theta\,|\,y_{\mathrm{mis}(i)})$. # Lagged SAR models A common non-factorized multivariate normal model is the simultaneously autoregressive (SAR) model, which is frequently used for spatially correlated data. The lagged SAR model is defined as $$ y = \rho Wy + \eta + \epsilon $$ or equivalently $$ (I - \rho W)y = \eta + \epsilon, $$ where $\rho$ is the spatial correlation parameter and $W$ is a user-defined weight matrix. The matrix $W$ has entries $w_{ii} = 0$ along the diagonal and the off-diagonal entries $w_{ij}$ are larger when areas $i$ and $j$ are closer to each other. In a linear model, the predictor term $\eta$ is given by $\eta = X \beta$ with design matrix $X$ and regression coefficients $\beta$. However, since the above equation holds for arbitrary $\eta$, these results are not restricted to linear models. If we have $\epsilon \sim {\mathrm N}(0, \,\sigma^2 I)$, it follows that $$ (I - \rho W)y \sim {\mathrm N}(\eta, \sigma^2 I), $$ which corresponds to the following log PDF coded in **Stan**: ```{r lpdf, eval=FALSE} /** * Normal log-pdf for spatially lagged responses * * @param y Vector of response values. * @param mu Mean parameter vector. * @param sigma Positive scalar residual standard deviation. * @param rho Positive scalar autoregressive parameter. * @param W Spatial weight matrix. * * @return A scalar to be added to the log posterior. */ real normal_lagsar_lpdf(vector y, vector mu, real sigma, real rho, matrix W) { int N = rows(y); real inv_sigma2 = 1 / square(sigma); matrix[N, N] W_tilde = -rho * W; vector[N] half_pred; for (n in 1:N) W_tilde[n,n] += 1; half_pred = W_tilde * (y - mdivide_left(W_tilde, mu)); return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) - 0.5 * dot_self(half_pred) * inv_sigma2; } ``` For the purpose of computing LOO-CV, it makes sense to rewrite the SAR model in slightly different form. Conditional on $\rho$, $\eta$, and $\sigma$, if we write \begin{align} y-(I-\rho W)^{-1}\eta &\sim {\mathrm N}(0, \sigma^2(I-\rho W)^{-1}(I-\rho W)^{-T}), \end{align} or more compactly, with $\widetilde{W}=(I-\rho W)$, \begin{align} y-\widetilde{W}^{-1}\eta &\sim {\mathrm N}(0, \sigma^2(\widetilde{W}^{T}\widetilde{W})^{-1}), \end{align} then this has the same form as the zero mean Gaussian process from above. Accordingly, we can compute the leave-one-out predictive densities with the equations from Sundararajan and Keerthi (2001), replacing $y$ with $(y-\widetilde{W}^{-1}\eta)$ and taking the covariance matrix $C$ to be $\sigma^2(\widetilde{W}^{T}\widetilde{W})^{-1}$. ## Case Study: Neighborhood Crime in Columbus, Ohio In order to demonstrate how to carry out the computations implied by these equations, we will first fit a lagged SAR model to data on crime in 49 different neighborhoods of Columbus, Ohio during the year 1980. The data was originally described in Aneslin (1988) and ships with the **spdep** R package. In addition to the **loo** package, for this analysis we will use the **brms** interface to Stan to generate a Stan program and fit the model, and also the **bayesplot** and **ggplot2** packages for plotting. ```{r setup, cache=FALSE} library("loo") library("brms") library("bayesplot") library("ggplot2") color_scheme_set("brightblue") theme_set(theme_default()) SEED <- 10001 set.seed(SEED) # only sets seed for R (seed for Stan set later) # loads COL.OLD data frame and COL.nb neighbor list data(oldcol, package = "spdep") ``` The three variables in the data set relevant to this example are: * `CRIME`: the number of residential burglaries and vehicle thefts per thousand households in the neighbood * `HOVAL`: housing value in units of $1000 USD * `INC`: household income in units of $1000 USD ```{r data} str(COL.OLD[, c("CRIME", "HOVAL", "INC")]) ``` We will also use the object `COL.nb`, which is a list containing information about which neighborhoods border each other. From this list we will be able to construct the weight matrix to used to help account for the spatial dependency among the observations. ### Fit lagged SAR model A model predicting `CRIME` from `INC` and `HOVAL`, while accounting for the spatial dependency via an SAR structure, can be specified in **brms** as follows. ```{r fit, results="hide"} fit <- brm( CRIME ~ INC + HOVAL + sar(COL.nb, type = "lag"), data = COL.OLD, data2 = list(COL.nb = COL.nb), chains = 4, seed = SEED ) ``` The code above fits the model in **Stan** using a log PDF equivalent to the `normal_lagsar_lpdf` function we defined above. In the summary output below we see that both higher income and higher housing value predict lower crime rates in the neighborhood. Moreover, there seems to be substantial spatial correlation between adjacent neighborhoods, as indicated by the posterior distribution of the `lagsar` parameter. ```{r plot-lagsar, message=FALSE} lagsar <- as.matrix(fit, pars = "lagsar") estimates <- quantile(lagsar, probs = c(0.25, 0.5, 0.75)) mcmc_hist(lagsar) + vline_at(estimates, linetype = 2, size = 1) + ggtitle("lagsar: posterior median and 50% central interval") ``` ### Approximate LOO-CV After fitting the model, the next step is to compute the pointwise log-likelihood values needed for approximate LOO-CV. To do this we will use the recipe laid out in the previous sections. ```{r approx} posterior <- as.data.frame(fit) y <- fit$data$CRIME N <- length(y) S <- nrow(posterior) loglik <- yloo <- sdloo <- matrix(nrow = S, ncol = N) for (s in 1:S) { p <- posterior[s, ] eta <- p$b_Intercept + p$b_INC * fit$data$INC + p$b_HOVAL * fit$data$HOVAL W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb) Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2 g <- Cinv %*% (y - solve(W_tilde, eta)) cbar <- diag(Cinv) yloo[s, ] <- y - g / cbar sdloo[s, ] <- sqrt(1 / cbar) loglik[s, ] <- dnorm(y, yloo[s, ], sdloo[s, ], log = TRUE) } # use loo for psis smoothing log_ratios <- -loglik psis_result <- psis(log_ratios) ``` The quality of the PSIS-LOO approximation can be investigated graphically by plotting the Pareto-k estimate for each observation. The approximation is robust up to values of $0.7$ (Vehtari et al, 2017, 2024). In the plot below, we see that the fourth observation is problematic and so may reduce the accuracy of the LOO-CV approximation. ```{r plot, cache = FALSE} plot(psis_result, label_points = TRUE) ``` We can also check that the conditional leave-one-out predictive distribution equations work correctly, for instance, using the last posterior draw: ```{r checklast, cache = FALSE} yloo_sub <- yloo[S, ] sdloo_sub <- sdloo[S, ] df <- data.frame( y = y, yloo = yloo_sub, ymin = yloo_sub - sdloo_sub * 2, ymax = yloo_sub + sdloo_sub * 2 ) ggplot(data=df, aes(x = y, y = yloo, ymin = ymin, ymax = ymax)) + geom_errorbar( width = 1, color = "skyblue3", position = position_jitter(width = 0.25) ) + geom_abline(color = "gray30", size = 1.2) + geom_point() ``` Finally, we use PSIS-LOO to approximate the expected log predictive density (ELPD) for new data, which we will validate using exact LOO-CV in the upcoming section. ```{r psisloo} (psis_loo <- loo(loglik)) ``` ### Exact LOO-CV Exact LOO-CV for the above example is somewhat more involved, as we need to re-fit the model $N$ times and each time model the held-out data point as a parameter. First, we create an empty dummy model that we will update below as we loop over the observations. ```{r fit_dummy, cache = TRUE} # see help("mi", "brms") for details on the mi() usage fit_dummy <- brm( CRIME | mi() ~ INC + HOVAL + sar(COL.nb, type = "lag"), data = COL.OLD, data2 = list(COL.nb = COL.nb), chains = 0 ) ``` Next, we fit the model $N$ times, each time leaving out a single observation and then computing the log predictive density for that observation. For obvious reasons, this takes much longer than the approximation we computed above, but it is necessary in order to validate the approximate LOO-CV method. Thanks to the PSIS-LOO approximation, in general doing these slow exact computations can be avoided. ```{r exact-loo-cv, results="hide", message=FALSE, warning=FALSE, cache = TRUE} S <- 500 res <- vector("list", N) loglik <- matrix(nrow = S, ncol = N) for (i in seq_len(N)) { dat_mi <- COL.OLD dat_mi$CRIME[i] <- NA fit_i <- update(fit_dummy, newdata = dat_mi, # just for vignette chains = 1, iter = S * 2) posterior <- as.data.frame(fit_i) yloo <- sdloo <- rep(NA, S) for (s in seq_len(S)) { p <- posterior[s, ] y_miss_i <- y y_miss_i[i] <- p$Ymi eta <- p$b_Intercept + p$b_INC * fit_i$data$INC + p$b_HOVAL * fit_i$data$HOVAL W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb) Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2 g <- Cinv %*% (y_miss_i - solve(W_tilde, eta)) cbar <- diag(Cinv); yloo[s] <- y_miss_i[i] - g[i] / cbar[i] sdloo[s] <- sqrt(1 / cbar[i]) loglik[s, i] <- dnorm(y[i], yloo[s], sdloo[s], log = TRUE) } ypred <- rnorm(S, yloo, sdloo) res[[i]] <- data.frame(y = c(posterior$Ymi, ypred)) res[[i]]$type <- rep(c("pp", "loo"), each = S) res[[i]]$obs <- i } res <- do.call(rbind, res) ``` A first step in the validation of the pointwise predictive density is to compare the distribution of the implied response values for the left-out observation to the distribution of the $y_i^{\mathrm{mis}}$ posterior-predictive values estimated as part of the model. If the pointwise predictive density is correct, the two distributions should match very closely (up to sampling error). In the plot below, we overlay these two distributions for the first four observations and see that they match very closely (as is the case for all $49$ observations of in this example). ```{r yplots, cache = FALSE, fig.width=10, out.width="95%", fig.asp = 0.3} res_sub <- res[res$obs %in% 1:4, ] ggplot(res_sub, aes(y, fill = type)) + geom_density(alpha = 0.6) + facet_wrap("obs", scales = "fixed", ncol = 4) ``` In the final step, we compute the ELPD based on the exact LOO-CV and compare it to the approximate PSIS-LOO result computed earlier. ```{r loo_exact, cache=FALSE} log_mean_exp <- function(x) { # more stable than log(mean(exp(x))) max_x <- max(x) max_x + log(sum(exp(x - max_x))) - log(length(x)) } exact_elpds <- apply(loglik, 2, log_mean_exp) exact_elpd <- sum(exact_elpds) round(exact_elpd, 1) ``` The results of the approximate and exact LOO-CV are similar but not as close as we would expect if there were no problematic observations. We can investigate this issue more closely by plotting the approximate against the exact pointwise ELPD values. ```{r compare, fig.height=5} df <- data.frame( approx_elpd = psis_loo$pointwise[, "elpd_loo"], exact_elpd = exact_elpds ) ggplot(df, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + geom_point(data = df[4, ], size = 3, color = "red3") + xlab("Approximate elpds") + ylab("Exact elpds") + coord_fixed(xlim = c(-16, -3), ylim = c(-16, -3)) ``` In the plot above the fourth data point ---the observation flagged as problematic by the PSIS-LOO approximation--- is colored in red and is the clear outlier. Otherwise, the correspondence between the exact and approximate values is strong. In fact, summing over the pointwise ELPD values and leaving out the fourth observation yields practically equivalent results for approximate and exact LOO-CV: ```{r pt4} without_pt_4 <- c( approx = sum(psis_loo$pointwise[-4, "elpd_loo"]), exact = sum(exact_elpds[-4]) ) round(without_pt_4, 1) ``` From this we can conclude that the difference we found when including *all* observations does not indicate a bug in our implementation of the approximate LOO-CV but rather a violation of its assumptions. # Working with Stan directly So far, we have specified the models in brms and only used Stan implicitely behind the scenes. This allowed us to focus on the primary purpose of validating approximate LOO-CV for non-factorized models. However, we would also like to show how everything can be set up in Stan directly. The Stan code brms generates is human readable and so we can use it to learn some of the essential aspects of Stan and the particular model we are implementing. The Stan program below is a slightly modified version of the code extracted via `stancode(fit_dummy)`: ```{r brms-stan-code, eval=FALSE} // generated with brms 2.2.0 functions { /** * Normal log-pdf for spatially lagged responses * * @param y Vector of response values. * @param mu Mean parameter vector. * @param sigma Positive scalar residual standard deviation. * @param rho Positive scalar autoregressive parameter. * @param W Spatial weight matrix. * * @return A scalar to be added to the log posterior. */ real normal_lagsar_lpdf(vector y, vector mu, real sigma, real rho, matrix W) { int N = rows(y); real inv_sigma2 = 1 / square(sigma); matrix[N, N] W_tilde = -rho * W; vector[N] half_pred; for (n in 1:N) W_tilde[n, n] += 1; half_pred = W_tilde * (y - mdivide_left(W_tilde, mu)); return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) - 0.5 * dot_self(half_pred) * inv_sigma2; } } data { int N; // total number of observations vector[N] Y; // response variable int Nmi; // number of missings int Jmi[Nmi]; // positions of missings int K; // number of population-level effects matrix[N, K] X; // population-level design matrix matrix[N, N] W; // spatial weight matrix int prior_only; // should the likelihood be ignored? } transformed data { int Kc = K - 1; matrix[N, K - 1] Xc; // centered version of X vector[K - 1] means_X; // column means of X before centering for (i in 2:K) { means_X[i - 1] = mean(X[, i]); Xc[, i - 1] = X[, i] - means_X[i - 1]; } } parameters { vector[Nmi] Ymi; // estimated missings vector[Kc] b; // population-level effects real temp_Intercept; // temporary intercept real sigma; // residual SD real lagsar; // SAR parameter } transformed parameters { } model { vector[N] Yl = Y; vector[N] mu = Xc * b + temp_Intercept; Yl[Jmi] = Ymi; // priors including all constants target += student_t_lpdf(temp_Intercept | 3, 34, 17); target += student_t_lpdf(sigma | 3, 0, 17) - 1 * student_t_lccdf(0 | 3, 0, 17); // likelihood including all constants if (!prior_only) { target += normal_lagsar_lpdf(Yl | mu, sigma, lagsar, W); } } generated quantities { // actual population-level intercept real b_Intercept = temp_Intercept - dot_product(means_X, b); } ``` Here we want to focus on two aspects of the Stan code. First, because there is no built-in function in Stan that calculates the log-likelihood for the lag-SAR model, we define a new `normal_lagsar_lpdf` function in the `functions` block of the Stan program. This is the same function we showed earlier in the vignette and it can be used to compute the log-likelihood in an efficient and numerically stable way. The `_lpdf` suffix used in the function name informs Stan that this is a log probability density function. Second, this Stan program nicely illustrates how to set up missing value imputation. Instead of just computing the log-likelihood for the observed responses `Y`, we define a new variable `Yl` which is equal to `Y` if the reponse is observed and equal to `Ymi` if the response is missing. The latter is in turn defined as a parameter and thus estimated along with all other paramters of the model. More details about missing value imputation in Stan can be found in the *Missing Data & Partially Known Parameters* section of the [Stan manual](https://mc-stan.org/users/documentation/index.html). The Stan code extracted from brms is not only helpful when learning Stan, but can also drastically speed up the specification of models that are not support by brms. If brms can fit a model similar but not identical to the desired model, we can let brms generate the Stan program for the similar model and then mold it into the program that implements the model we actually want to fit. Rather than calling `stancode()`, which requires an existing fitted model object, we recommend using `make_stancode()` and specifying the `save_model` argument to write the Stan program to a file. The corresponding data can be prepared with `make_standata()` and then manually amended if needed. Once the code and data have been edited, they can be passed to RStan's `stan()` function via the `file` and `data` arguments. # Conclusion In summary, we have shown how to set up and validate approximate and exact LOO-CV for non-factorized multivariate normal models using Stan with the **brms** and **loo** packages. Although we focused on the particular example of a spatial SAR model, the presented recipe applies more generally to models that can be expressed in terms of a multivariate normal likelihood.
# References Anselin L. (1988). *Spatial econometrics: methods and models*. Dordrecht: Kluwer Academic. Bürkner P. C., Gabry J., & Vehtari A. (2020). Efficient leave-one-out cross-validation for Bayesian non-factorized normal and Student-t models. *Computational Statistics*, \doi:10.1007/s00180-020-01045-4. [ArXiv preprint](https://arxiv.org/abs/1810.10559). Sundararajan S. & Keerthi S. S. (2001). Predictive approaches for choosing hyperparameters in Gaussian processes. *Neural Computation*, 13(5), 1103--1118. Vehtari A., Mononen T., Tolvanen V., Sivula T., & Winther O. (2016). Bayesian leave-one-out cross-validation approximations for Gaussian latent variable models. *Journal of Machine Learning Research*, 17(103), 1--38. [Online](https://jmlr.org/papers/v17/14-540.html). Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. *Statistics and Computing*, 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [Online](https://link.springer.com/article/10.1007/s11222-016-9696-4). [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/inst/doc/loo2-mixis.R0000644000176200001440000001047215122305334014714 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----warnings=FALSE, message=FALSE-------------------------------------------- library("rstan") library("loo") library("matrixStats") options(mc.cores = parallel::detectCores(), parallel=FALSE) set.seed(24877) ## ----stancode_horseshoe------------------------------------------------------- # Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: # int y[N]; stancode_horseshoe <- " data { int N; int P; array[N] int y; matrix [N,P] X; real scale_global; int mixis; } transformed data { real nu_global=1; // degrees of freedom for the half-t priors for tau real nu_local=1; // degrees of freedom for the half-t priors for lambdas // (nu_local = 1 corresponds to the horseshoe) real slab_scale=2;// for the regularized horseshoe real slab_df=100; // for the regularized horseshoe } parameters { vector[P] z; // for non-centered parameterization real tau; // global shrinkage parameter vector [P] lambda; // local shrinkage parameter real caux; } transformed parameters { vector[P] beta; { vector[P] lambda_tilde; // 'truncated' local shrinkage parameter real c = slab_scale * sqrt(caux); // slab scale lambda_tilde = sqrt( c^2 * square(lambda) ./ (c^2 + tau^2*square(lambda))); beta = z .* lambda_tilde*tau; } } model { vector[N] means=X*beta; vector[N] log_lik; target += std_normal_lpdf(z); target += student_t_lpdf(lambda | nu_local, 0, 1); target += student_t_lpdf(tau | nu_global, 0, scale_global); target += inv_gamma_lpdf(caux | 0.5*slab_df, 0.5*slab_df); for (n in 1:N) { log_lik[n]= bernoulli_logit_lpmf(y[n] | means[n]); } target += sum(log_lik); if (mixis) { target += log_sum_exp(-log_lik); } } generated quantities { vector[N] means=X*beta; vector[N] log_lik; for (n in 1:N) { log_lik[n] = bernoulli_logit_lpmf(y[n] | means[n]); } } " ## ----results='hide', warning=FALSE, message=FALSE, error=FALSE---------------- data(voice) y <- voice$y X <- voice[2:length(voice)] n <- dim(X)[1] p <- dim(X)[2] p0 <- 10 scale_global <- 2*p0/(p-p0)/sqrt(n-1) standata <- list(N = n, P = p, X = as.matrix(X), y = c(y), scale_global = scale_global, mixis = 0) ## ----results='hide', warning=FALSE-------------------------------------------- chains <- 4 n_iter <- 2000 warm_iter <- 1000 stanmodel <- stan_model(model_code = stancode_horseshoe) fit_post <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0) loo_post <-loo(fit_post) ## ----------------------------------------------------------------------------- print(loo_post) ## ----results='hide', warnings=FALSE------------------------------------------- standata$mixis <- 1 fit_mix <- sampling(stanmodel, data = standata, chains = chains, iter = n_iter, warmup = warm_iter, refresh = 0, pars = "log_lik") log_lik_mix <- extract(fit_mix)$log_lik ## ----------------------------------------------------------------------------- l_common_mix <- rowLogSumExps(-log_lik_mix) log_weights <- -log_lik_mix - l_common_mix elpd_mixis <- logSumExp(-l_common_mix) - rowLogSumExps(t(log_weights)) ## ----------------------------------------------------------------------------- data(voice_loo) elpd_loo <- voice_loo$elpd_loo ## ----------------------------------------------------------------------------- elpd_psis <- loo_post$pointwise[,1] print(paste("RMSE(PSIS) =",round( sqrt(mean((elpd_loo-elpd_psis)^2)) ,2))) print(paste("RMSE(MixIS) =",round( sqrt(mean((elpd_loo-elpd_mixis)^2)) ,2))) ## ----------------------------------------------------------------------------- elpd_psis <- loo_post$pointwise[,1] print(paste("ELPD (PSIS)=",round(sum(elpd_psis),2))) print(paste("ELPD (MixIS)=",round(sum(elpd_mixis),2))) print(paste("ELPD (brute force)=",round(sum(elpd_loo),2))) loo/inst/doc/loo2-weights.R0000644000176200001440000001141615122306001015224 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----setup, message=FALSE----------------------------------------------------- library(rstanarm) library(loo) ## ----data--------------------------------------------------------------------- data(milk) d <- milk[complete.cases(milk),] d$neocortex <- d$neocortex.perc /100 str(d) ## ----fits, results="hide"----------------------------------------------------- fit1 <- stan_glm(kcal.per.g ~ 1, data = d, seed = 2030) fit2 <- update(fit1, formula = kcal.per.g ~ neocortex) fit3 <- update(fit1, formula = kcal.per.g ~ log(mass)) fit4 <- update(fit1, formula = kcal.per.g ~ neocortex + log(mass)) ## ----waic--------------------------------------------------------------------- waic1 <- waic(fit1) waic2 <- waic(fit2) waic3 <- waic(fit3) waic4 <- waic(fit4) waics <- c( waic1$estimates["elpd_waic", 1], waic2$estimates["elpd_waic", 1], waic3$estimates["elpd_waic", 1], waic4$estimates["elpd_waic", 1] ) ## ----loo---------------------------------------------------------------------- # note: the loo function accepts a 'cores' argument that we recommend specifying # when working with bigger datasets loo1 <- loo(fit1) loo2 <- loo(fit2) loo3 <- loo(fit3) loo4 <- loo(fit4) lpd_point <- cbind( loo1$pointwise[,"elpd_loo"], loo2$pointwise[,"elpd_loo"], loo3$pointwise[,"elpd_loo"], loo4$pointwise[,"elpd_loo"] ) ## ----print-loo---------------------------------------------------------------- print(loo3) print(loo4) ## ----weights------------------------------------------------------------------ waic_wts <- exp(waics) / sum(exp(waics)) pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE) pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE stacking_wts <- stacking_weights(lpd_point) round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2) ## ----waic_wts_demo------------------------------------------------------------ waic_wts_demo <- exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)]) / sum(exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)])) round(waic_wts_demo, 3) ## ----stacking_weights--------------------------------------------------------- stacking_weights(lpd_point[,c(1,1,1,1,1,1,1,1,1,1,2,3,4)]) ## ----Kline-------------------------------------------------------------------- data(Kline) d <- Kline d$log_pop <- log(d$population) d$contact_high <- ifelse(d$contact=="high", 1, 0) str(d) ## ----fit10, results="hide"---------------------------------------------------- fit10 <- stan_glm( total_tools ~ log_pop + contact_high + log_pop * contact_high, family = poisson(link = "log"), data = d, prior = normal(0, 1, autoscale = FALSE), prior_intercept = normal(0, 100, autoscale = FALSE), seed = 2030 ) ## ----loo10-------------------------------------------------------------------- loo10 <- loo(fit10) print(loo10) ## ----loo10-threshold---------------------------------------------------------- loo10 <- loo(fit10, k_threshold=0.7) print(loo10) ## ----waic10------------------------------------------------------------------- waic10 <- waic(fit10) print(waic10) ## ----contact_high, results="hide"--------------------------------------------- fit11 <- update(fit10, formula = total_tools ~ log_pop + contact_high) fit12 <- update(fit10, formula = total_tools ~ log_pop) ## ----loo-contact_high--------------------------------------------------------- (loo11 <- loo(fit11)) (loo12 <- loo(fit12)) ## ----relo-contact_high-------------------------------------------------------- loo11 <- loo(fit11, k_threshold=0.7) loo12 <- loo(fit12, k_threshold=0.7) lpd_point <- cbind( loo10$pointwise[, "elpd_loo"], loo11$pointwise[, "elpd_loo"], loo12$pointwise[, "elpd_loo"] ) ## ----waic-contact_high-------------------------------------------------------- waic11 <- waic(fit11) waic12 <- waic(fit12) waics <- c( waic10$estimates["elpd_waic", 1], waic11$estimates["elpd_waic", 1], waic12$estimates["elpd_waic", 1] ) ## ----weights-contact_high----------------------------------------------------- waic_wts <- exp(waics) / sum(exp(waics)) pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE) pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE stacking_wts <- stacking_weights(lpd_point) round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2) ## ----loo_model_weights-------------------------------------------------------- # using list of loo objects loo_list <- list(loo10, loo11, loo12) loo_model_weights(loo_list) loo_model_weights(loo_list, method = "pseudobma") loo_model_weights(loo_list, method = "pseudobma", BB = FALSE) loo/inst/doc/loo2-non-factorized.html0000644000176200001440000130067615122305744017266 0ustar liggesusers Leave-one-out cross-validation for non-factorized models

Leave-one-out cross-validation for non-factorized models

Aki Vehtari, Paul Bürkner and Jonah Gabry

2025-12-22

Introduction

When computing ELPD-based LOO-CV for a Bayesian model we need to compute the log leave-one-out predictive densities \(\log{p(y_i | y_{-i})}\) for every response value \(y_i, \: i = 1, \ldots, N\), where \(y_{-i}\) denotes all response values except observation \(i\). To obtain \(p(y_i | y_{-i})\), we need to have access to the pointwise likelihood \(p(y_i\,|\, y_{-i}, \theta)\) and integrate over the model parameters \(\theta\):

\[ p(y_i\,|\,y_{-i}) = \int p(y_i\,|\, y_{-i}, \theta) \, p(\theta\,|\, y_{-i}) \,d \theta \]

Here, \(p(\theta\,|\, y_{-i})\) is the leave-one-out posterior distribution for \(\theta\), that is, the posterior distribution for \(\theta\) obtained by fitting the model while holding out the \(i\)th observation (we will later show how refitting the model to data \(y_{-i}\) can be avoided).

If the observation model is formulated directly as the product of the pointwise observation models, we call it a factorized model. In this case, the likelihood is also the product of the pointwise likelihood contributions \(p(y_i\,|\, y_{-i}, \theta)\). To better illustrate possible structures of the observation models, we formally divide \(\theta\) into two parts, observation-specific latent variables \(f = (f_1, \ldots, f_N)\) and hyperparameters \(\psi\), so that \(p(y_i\,|\, y_{-i}, \theta) = p(y_i\,|\, y_{-i}, f_i, \psi)\). Depending on the model, one of the two parts of \(\theta\) may also be empty. In very simple models, such as linear regression models, latent variables are not explicitly presented and response values are conditionally independent given \(\psi\), so that \(p(y_i\,|\, y_{-i}, f_i, \psi) = p(y_i \,|\, \psi)\). The full likelihood can then be written in the familiar form

\[ p(y \,|\, \psi) = \prod_{i=1}^N p(y_i \,|\, \psi), \]

where \(y = (y_1, \ldots, y_N)\) denotes the vector of all responses. When the likelihood factorizes this way, the conditional pointwise log-likelihood can be obtained easily by computing \(p(y_i\,|\, \psi)\) for each \(i\) with computational cost \(O(n)\).

Yet, there are several reasons why a non-factorized observation model may be necessary or preferred. In non-factorized models, the joint likelihood of the response values \(p(y \,|\, \theta)\) is not factorized into observation-specific components, but rather given directly as one joint expression. For some models, an analytic factorized formulation is simply not available in which case we speak of a non-factorizable model. Even in models whose observation model can be factorized in principle, it may still be preferable to use a non-factorized form for reasons of efficiency and numerical stability (Bürkner et al. 2020).

Whether a non-factorized model is used by necessity or for efficiency and stability, it comes at the cost of having no direct access to the leave-one-out predictive densities and thus to the overall leave-one-out predictive accuracy. In theory, we can express the observation-specific likelihoods in terms of the joint likelihood via

\[ p(y_i \,|\, y_{i-1}, \theta) = \frac{p(y \,|\, \theta)}{p(y_{-i} \,|\, \theta)} = \frac{p(y \,|\, \theta)}{\int p(y \,|\, \theta) \, d y_i}, \]

but the expression on the right-hand side may not always have an analytical solution. Computing \(\log p(y_i \,|\, y_{-i}, \theta)\) for non-factorized models is therefore often impossible, or at least inefficient and numerically unstable. However, there is a large class of multivariate normal and Student-\(t\) models for which there are efficient analytical solutions available.

More details can be found in our paper about LOO-CV for non-factorized models (Bürkner, Gabry, & Vehtari, 2020), which is available as a preprint on arXiv (https://arxiv.org/abs/1810.10559).

LOO-CV for multivariate normal models

In this vignette, we will focus on non-factorized multivariate normal models. Based on results of Sundararajan and Keerthi (2001), Bürkner et al. (2020) show that, for multivariate normal models with coriance matrix \(C\), the LOO predictive mean and standard deviation can be computed as follows:

\[\begin{align} \mu_{\tilde{y},-i} &= y_i-\bar{c}_{ii}^{-1} g_i \nonumber \\ \sigma_{\tilde{y},-i} &= \sqrt{\bar{c}_{ii}^{-1}}, \end{align}\] where \(g_i\) and \(\bar{c}_{ii}\) are \[\begin{align} g_i &= \left[C^{-1} y\right]_i \nonumber \\ \bar{c}_{ii} &= \left[C^{-1}\right]_{ii}. \end{align}\]

Using these results, the log predictive density of the \(i\)th observation is then computed as

\[ \log p(y_i \,|\, y_{-i},\theta) = - \frac{1}{2}\log(2\pi) - \frac{1}{2}\log \sigma^2_{-i} - \frac{1}{2}\frac{(y_i-\mu_{-i})^2}{\sigma^2_{-i}}. \]

Expressing this same equation in terms of \(g_i\) and \(\bar{c}_{ii}\), the log predictive density becomes:

\[ \log p(y_i \,|\, y_{-i},\theta) = - \frac{1}{2}\log(2\pi) + \frac{1}{2}\log \bar{c}_{ii} - \frac{1}{2}\frac{g_i^2}{\bar{c}_{ii}}. \] (Note that Vehtari et al. (2016) has a typo in the corresponding Equation 34.)

From these equations we can now derive a recipe for obtaining the conditional pointwise log-likelihood for all models that can be expressed conditionally in terms of a multivariate normal with invertible covariance matrix \(C\).

Approximate LOO-CV using integrated importance-sampling

The above LOO equations for multivariate normal models are conditional on parameters \(\theta\). Therefore, to obtain the leave-one-out predictive density \(p(y_i \,|\, y_{-i})\) we need to integrate over \(\theta\),

\[ p(y_i\,|\,y_{-i}) = \int p(y_i\,|\,y_{-i}, \theta) \, p(\theta\,|\,y_{-i}) \,d\theta. \]

Here, \(p(\theta\,|\,y_{-i})\) is the leave-one-out posterior distribution for \(\theta\), that is, the posterior distribution for \(\theta\) obtained by fitting the model while holding out the \(i\)th observation.

To avoid the cost of sampling from \(N\) leave-one-out posteriors, it is possible to take the posterior draws \(\theta^{(s)}, \, s=1,\ldots,S\), from the posterior \(p(\theta\,|\,y)\), and then approximate the above integral using integrated importance sampling (Vehtari et al., 2016, Section 3.6.1):

\[ p(y_i\,|\,y_{-i}) \approx \frac{ \sum_{s=1}^S p(y_i\,|\,y_{-i},\,\theta^{(s)}) \,w_i^{(s)}}{ \sum_{s=1}^S w_i^{(s)}}, \]

where \(w_i^{(s)}\) are importance weights. First we compute the raw importance ratios

\[ r_i^{(s)} \propto \frac{1}{p(y_i \,|\, y_{-i}, \,\theta^{(s)})}, \]

and then stabilize them using Pareto smoothed importance sampling (PSIS, Vehtari et al, 2019) to obtain the weights \(w_i^{(s)}\). The resulting approximation is referred to as PSIS-LOO (Vehtari et al, 2017).

Exact LOO-CV with re-fitting

In order to validate the approximate LOO procedure, and also in order to allow exact computations to be made for a small number of leave-one-out folds for which the Pareto \(k\) diagnostic (Vehtari et al, 2024) indicates an unstable approximation, we need to consider how we might to do exact leave-one-out CV for a non-factorized model. In the case of a Gaussian process that has the marginalization property, we could just drop the one row and column of \(C\) corresponding to the held out out observation. This does not hold in general for multivariate normal models, however, and to keep the original prior we may need to maintain the full covariance matrix \(C\) even when one of the observations is left out.

The solution is to model \(y_i\) as a missing observation and estimate it along with all of the other model parameters. For a conditional multivariate normal model, \(\log p(y_i\,|\,y_{-i})\) can be computed as follows. First, we model \(y_i\) as missing and denote the corresponding parameter \(y_i^{\mathrm{mis}}\). Then, we define

\[ y_{\mathrm{mis}(i)} = (y_1, \ldots, y_{i-1}, y_i^{\mathrm{mis}}, y_{i+1}, \ldots, y_N). \] to be the same as the full set of observations \(y\), except replacing \(y_i\) with the parameter \(y_i^{\mathrm{mis}}\).

Second, we compute the LOO predictive mean and standard deviations as above, but replace \(y\) with \(y_{\mathrm{mis}(i)}\) in the computation of \(\mu_{\tilde{y},-i}\):

\[ \mu_{\tilde{y},-i} = y_{{\mathrm{mis}}(i)}-\bar{c}_{ii}^{-1}g_i, \]

where in this case we have

\[ g_i = \left[ C^{-1} y_{\mathrm{mis}(i)} \right]_i. \]

The conditional log predictive density is then computed with the above \(\mu_{\tilde{y},-i}\) and the left out observation \(y_i\):

\[ \log p(y_i\,|\,y_{-i},\theta) = - \frac{1}{2}\log(2\pi) - \frac{1}{2}\log \sigma^2_{\tilde{y},-i} - \frac{1}{2}\frac{(y_i-\mu_{\tilde{y},-i})^2}{\sigma^2_{\tilde{y},-i}}. \]

Finally, the leave-one-out predictive distribution can then be estimated as

\[ p(y_i\,|\,y_{-i}) \approx \sum_{s=1}^S p(y_i\,|\,y_{-i}, \theta_{-i}^{(s)}), \]

where \(\theta_{-i}^{(s)}\) are draws from the posterior distribution \(p(\theta\,|\,y_{\mathrm{mis}(i)})\).

Lagged SAR models

A common non-factorized multivariate normal model is the simultaneously autoregressive (SAR) model, which is frequently used for spatially correlated data. The lagged SAR model is defined as

\[ y = \rho Wy + \eta + \epsilon \] or equivalently \[ (I - \rho W)y = \eta + \epsilon, \] where \(\rho\) is the spatial correlation parameter and \(W\) is a user-defined weight matrix. The matrix \(W\) has entries \(w_{ii} = 0\) along the diagonal and the off-diagonal entries \(w_{ij}\) are larger when areas \(i\) and \(j\) are closer to each other. In a linear model, the predictor term \(\eta\) is given by \(\eta = X \beta\) with design matrix \(X\) and regression coefficients \(\beta\). However, since the above equation holds for arbitrary \(\eta\), these results are not restricted to linear models.

If we have \(\epsilon \sim {\mathrm N}(0, \,\sigma^2 I)\), it follows that \[ (I - \rho W)y \sim {\mathrm N}(\eta, \sigma^2 I), \] which corresponds to the following log PDF coded in Stan:

/** 
 * Normal log-pdf for spatially lagged responses
 * 
 * @param y Vector of response values.
 * @param mu Mean parameter vector.
 * @param sigma Positive scalar residual standard deviation.
 * @param rho Positive scalar autoregressive parameter.
 * @param W Spatial weight matrix.
 *
 * @return A scalar to be added to the log posterior.
 */
real normal_lagsar_lpdf(vector y, vector mu, real sigma, 
                        real rho, matrix W) {
  int N = rows(y);
  real inv_sigma2 = 1 / square(sigma);
  matrix[N, N] W_tilde = -rho * W;
  vector[N] half_pred;
  
  for (n in 1:N) W_tilde[n,n] += 1;
  
  half_pred = W_tilde * (y - mdivide_left(W_tilde, mu));
  
  return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) -
         0.5 * dot_self(half_pred) * inv_sigma2;
}

For the purpose of computing LOO-CV, it makes sense to rewrite the SAR model in slightly different form. Conditional on \(\rho\), \(\eta\), and \(\sigma\), if we write

\[\begin{align} y-(I-\rho W)^{-1}\eta &\sim {\mathrm N}(0, \sigma^2(I-\rho W)^{-1}(I-\rho W)^{-T}), \end{align}\] or more compactly, with \(\widetilde{W}=(I-\rho W)\), \[\begin{align} y-\widetilde{W}^{-1}\eta &\sim {\mathrm N}(0, \sigma^2(\widetilde{W}^{T}\widetilde{W})^{-1}), \end{align}\]

then this has the same form as the zero mean Gaussian process from above. Accordingly, we can compute the leave-one-out predictive densities with the equations from Sundararajan and Keerthi (2001), replacing \(y\) with \((y-\widetilde{W}^{-1}\eta)\) and taking the covariance matrix \(C\) to be \(\sigma^2(\widetilde{W}^{T}\widetilde{W})^{-1}\).

Case Study: Neighborhood Crime in Columbus, Ohio

In order to demonstrate how to carry out the computations implied by these equations, we will first fit a lagged SAR model to data on crime in 49 different neighborhoods of Columbus, Ohio during the year 1980. The data was originally described in Aneslin (1988) and ships with the spdep R package.

In addition to the loo package, for this analysis we will use the brms interface to Stan to generate a Stan program and fit the model, and also the bayesplot and ggplot2 packages for plotting.

library("loo")
library("brms")
library("bayesplot")
library("ggplot2")
color_scheme_set("brightblue")
theme_set(theme_default())


SEED <- 10001 
set.seed(SEED) # only sets seed for R (seed for Stan set later)

# loads COL.OLD data frame and COL.nb neighbor list
data(oldcol, package = "spdep") 

The three variables in the data set relevant to this example are:

  • CRIME: the number of residential burglaries and vehicle thefts per thousand households in the neighbood
  • HOVAL: housing value in units of $1000 USD
  • INC: household income in units of $1000 USD
str(COL.OLD[, c("CRIME", "HOVAL", "INC")])
'data.frame':   49 obs. of  3 variables:
 $ CRIME: num  18.802 32.388 38.426 0.178 15.726 ...
 $ HOVAL: num  44.6 33.2 37.1 75 80.5 ...
 $ INC  : num  21.23 4.48 11.34 8.44 19.53 ...

We will also use the object COL.nb, which is a list containing information about which neighborhoods border each other. From this list we will be able to construct the weight matrix to used to help account for the spatial dependency among the observations.

Fit lagged SAR model

A model predicting CRIME from INC and HOVAL, while accounting for the spatial dependency via an SAR structure, can be specified in brms as follows.

fit <- brm(
  CRIME ~ INC + HOVAL + sar(COL.nb, type = "lag"), 
  data = COL.OLD,
  data2 = list(COL.nb = COL.nb),
  chains = 4,
  seed = SEED
)

The code above fits the model in Stan using a log PDF equivalent to the normal_lagsar_lpdf function we defined above. In the summary output below we see that both higher income and higher housing value predict lower crime rates in the neighborhood. Moreover, there seems to be substantial spatial correlation between adjacent neighborhoods, as indicated by the posterior distribution of the lagsar parameter.

lagsar <- as.matrix(fit, pars = "lagsar")
estimates <- quantile(lagsar, probs = c(0.25, 0.5, 0.75))
mcmc_hist(lagsar) + 
  vline_at(estimates, linetype = 2, size = 1) +
  ggtitle("lagsar: posterior median and 50% central interval")

Approximate LOO-CV

After fitting the model, the next step is to compute the pointwise log-likelihood values needed for approximate LOO-CV. To do this we will use the recipe laid out in the previous sections.

posterior <- as.data.frame(fit)
y <- fit$data$CRIME
N <- length(y)
S <- nrow(posterior)
loglik <- yloo <- sdloo <- matrix(nrow = S, ncol = N)

for (s in 1:S) {
  p <- posterior[s, ]
  eta <- p$b_Intercept + p$b_INC * fit$data$INC + p$b_HOVAL * fit$data$HOVAL
  W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb)
  Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2
  g <- Cinv %*% (y - solve(W_tilde, eta))
  cbar <- diag(Cinv)
  yloo[s, ] <- y - g / cbar
  sdloo[s, ] <- sqrt(1 / cbar)
  loglik[s, ] <- dnorm(y, yloo[s, ], sdloo[s, ], log = TRUE)
}

# use loo for psis smoothing
log_ratios <- -loglik
psis_result <- psis(log_ratios)

The quality of the PSIS-LOO approximation can be investigated graphically by plotting the Pareto-k estimate for each observation. The approximation is robust up to values of \(0.7\) (Vehtari et al, 2017, 2024). In the plot below, we see that the fourth observation is problematic and so may reduce the accuracy of the LOO-CV approximation.

plot(psis_result, label_points = TRUE)

We can also check that the conditional leave-one-out predictive distribution equations work correctly, for instance, using the last posterior draw:

yloo_sub <- yloo[S, ]
sdloo_sub <- sdloo[S, ]
df <- data.frame(
  y = y, 
  yloo = yloo_sub,
  ymin = yloo_sub - sdloo_sub * 2,
  ymax = yloo_sub + sdloo_sub * 2
)
ggplot(data=df, aes(x = y, y = yloo, ymin = ymin, ymax = ymax)) +
  geom_errorbar(
    width = 1, 
    color = "skyblue3", 
    position = position_jitter(width = 0.25)
  ) +
  geom_abline(color = "gray30", size = 1.2) +
  geom_point()

Finally, we use PSIS-LOO to approximate the expected log predictive density (ELPD) for new data, which we will validate using exact LOO-CV in the upcoming section.

(psis_loo <- loo(loglik))

Computed from 4000 by 49 log-likelihood matrix.

         Estimate   SE
elpd_loo   -186.8 10.7
p_loo         8.0  5.0
looic       373.7 21.4
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume independent draws (r_eff=1).

Pareto k diagnostic values:
                         Count Pct.    Min. ESS
(-Inf, 0.7]   (good)     48    98.0%   651     
   (0.7, 1]   (bad)       0     0.0%   <NA>    
   (1, Inf)   (very bad)  1     2.0%   <NA>    
See help('pareto-k-diagnostic') for details.

Exact LOO-CV

Exact LOO-CV for the above example is somewhat more involved, as we need to re-fit the model \(N\) times and each time model the held-out data point as a parameter. First, we create an empty dummy model that we will update below as we loop over the observations.

# see help("mi", "brms") for details on the mi() usage
fit_dummy <- brm(
  CRIME | mi() ~ INC + HOVAL + sar(COL.nb, type = "lag"), 
  data = COL.OLD,
  data2 = list(COL.nb = COL.nb),
  chains = 0
)

Next, we fit the model \(N\) times, each time leaving out a single observation and then computing the log predictive density for that observation. For obvious reasons, this takes much longer than the approximation we computed above, but it is necessary in order to validate the approximate LOO-CV method. Thanks to the PSIS-LOO approximation, in general doing these slow exact computations can be avoided.

S <- 500
res <- vector("list", N)
loglik <- matrix(nrow = S, ncol = N)
for (i in seq_len(N)) {
  dat_mi <- COL.OLD
  dat_mi$CRIME[i] <- NA
  fit_i <- update(fit_dummy, newdata = dat_mi, 
                  # just for vignette
                  chains = 1, iter = S * 2)
  posterior <- as.data.frame(fit_i)
  yloo <- sdloo <- rep(NA, S)
  for (s in seq_len(S)) {
    p <- posterior[s, ]
    y_miss_i <- y
    y_miss_i[i] <- p$Ymi
    eta <- p$b_Intercept + p$b_INC * fit_i$data$INC + p$b_HOVAL * fit_i$data$HOVAL
    W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb)
    Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2
    g <- Cinv %*% (y_miss_i - solve(W_tilde, eta))
    cbar <- diag(Cinv);
    yloo[s] <- y_miss_i[i] - g[i] / cbar[i]
    sdloo[s] <- sqrt(1 / cbar[i])
    loglik[s, i] <- dnorm(y[i], yloo[s], sdloo[s], log = TRUE)
  }
  ypred <- rnorm(S, yloo, sdloo)
  res[[i]] <- data.frame(y = c(posterior$Ymi, ypred))
  res[[i]]$type <- rep(c("pp", "loo"), each = S)
  res[[i]]$obs <- i
}
res <- do.call(rbind, res)

A first step in the validation of the pointwise predictive density is to compare the distribution of the implied response values for the left-out observation to the distribution of the \(y_i^{\mathrm{mis}}\) posterior-predictive values estimated as part of the model. If the pointwise predictive density is correct, the two distributions should match very closely (up to sampling error). In the plot below, we overlay these two distributions for the first four observations and see that they match very closely (as is the case for all \(49\) observations of in this example).

res_sub <- res[res$obs %in% 1:4, ]
ggplot(res_sub, aes(y, fill = type)) +
  geom_density(alpha = 0.6) +
  facet_wrap("obs", scales = "fixed", ncol = 4)

In the final step, we compute the ELPD based on the exact LOO-CV and compare it to the approximate PSIS-LOO result computed earlier.

log_mean_exp <- function(x) {
  # more stable than log(mean(exp(x)))
  max_x <- max(x)
  max_x + log(sum(exp(x - max_x))) - log(length(x))
}
exact_elpds <- apply(loglik, 2, log_mean_exp)
exact_elpd <- sum(exact_elpds)
round(exact_elpd, 1)
[1] -188.9

The results of the approximate and exact LOO-CV are similar but not as close as we would expect if there were no problematic observations. We can investigate this issue more closely by plotting the approximate against the exact pointwise ELPD values.

df <- data.frame(
  approx_elpd = psis_loo$pointwise[, "elpd_loo"],
  exact_elpd = exact_elpds
)
ggplot(df, aes(x = approx_elpd, y = exact_elpd)) +
  geom_abline(color = "gray30") +
  geom_point(size = 2) +
  geom_point(data = df[4, ], size = 3, color = "red3") +
  xlab("Approximate elpds") +
  ylab("Exact elpds") +
  coord_fixed(xlim = c(-16, -3), ylim = c(-16, -3))

In the plot above the fourth data point —the observation flagged as problematic by the PSIS-LOO approximation— is colored in red and is the clear outlier. Otherwise, the correspondence between the exact and approximate values is strong. In fact, summing over the pointwise ELPD values and leaving out the fourth observation yields practically equivalent results for approximate and exact LOO-CV:

without_pt_4 <- c(
  approx = sum(psis_loo$pointwise[-4, "elpd_loo"]),
  exact = sum(exact_elpds[-4])  
)
round(without_pt_4, 1)
approx  exact 
-173.2 -173.1 

From this we can conclude that the difference we found when including all observations does not indicate a bug in our implementation of the approximate LOO-CV but rather a violation of its assumptions.

Working with Stan directly

So far, we have specified the models in brms and only used Stan implicitely behind the scenes. This allowed us to focus on the primary purpose of validating approximate LOO-CV for non-factorized models. However, we would also like to show how everything can be set up in Stan directly. The Stan code brms generates is human readable and so we can use it to learn some of the essential aspects of Stan and the particular model we are implementing. The Stan program below is a slightly modified version of the code extracted via stancode(fit_dummy):

// generated with brms 2.2.0
functions {
/** 
 * Normal log-pdf for spatially lagged responses
 * 
 * @param y Vector of response values.
 * @param mu Mean parameter vector.
 * @param sigma Positive scalar residual standard deviation.
 * @param rho Positive scalar autoregressive parameter.
 * @param W Spatial weight matrix.
 *
 * @return A scalar to be added to the log posterior.
 */
  real normal_lagsar_lpdf(vector y, vector mu, real sigma,
                          real rho, matrix W) {
    int N = rows(y);
    real inv_sigma2 = 1 / square(sigma);
    matrix[N, N] W_tilde = -rho * W;
    vector[N] half_pred;
    for (n in 1:N) W_tilde[n, n] += 1;
    half_pred = W_tilde * (y - mdivide_left(W_tilde, mu));
    return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) -
           0.5 * dot_self(half_pred) * inv_sigma2;
  }
}
data {
  int<lower=1> N;  // total number of observations
  vector[N] Y;  // response variable
  int<lower=0> Nmi;  // number of missings
  int<lower=1> Jmi[Nmi];  // positions of missings
  int<lower=1> K;  // number of population-level effects
  matrix[N, K] X;  // population-level design matrix
  matrix[N, N] W;  // spatial weight matrix
  int prior_only;  // should the likelihood be ignored?
}
transformed data {
  int Kc = K - 1;
  matrix[N, K - 1] Xc;  // centered version of X
  vector[K - 1] means_X;  // column means of X before centering
  for (i in 2:K) {
    means_X[i - 1] = mean(X[, i]);
    Xc[, i - 1] = X[, i] - means_X[i - 1];
  }
}
parameters {
  vector[Nmi] Ymi;  // estimated missings
  vector[Kc] b;  // population-level effects
  real temp_Intercept;  // temporary intercept
  real<lower=0> sigma;  // residual SD
  real<lower=0,upper=1> lagsar;  // SAR parameter
}
transformed parameters {
}
model {
  vector[N] Yl = Y;
  vector[N] mu = Xc * b + temp_Intercept;
  Yl[Jmi] = Ymi;
  // priors including all constants
  target += student_t_lpdf(temp_Intercept | 3, 34, 17);
  target += student_t_lpdf(sigma | 3, 0, 17)
    - 1 * student_t_lccdf(0 | 3, 0, 17);
  // likelihood including all constants
  if (!prior_only) {
    target += normal_lagsar_lpdf(Yl | mu, sigma, lagsar, W);
  }
}
generated quantities {
  // actual population-level intercept
  real b_Intercept = temp_Intercept - dot_product(means_X, b);
}

Here we want to focus on two aspects of the Stan code. First, because there is no built-in function in Stan that calculates the log-likelihood for the lag-SAR model, we define a new normal_lagsar_lpdf function in the functions block of the Stan program. This is the same function we showed earlier in the vignette and it can be used to compute the log-likelihood in an efficient and numerically stable way. The _lpdf suffix used in the function name informs Stan that this is a log probability density function.

Second, this Stan program nicely illustrates how to set up missing value imputation. Instead of just computing the log-likelihood for the observed responses Y, we define a new variable Yl which is equal to Y if the reponse is observed and equal to Ymi if the response is missing. The latter is in turn defined as a parameter and thus estimated along with all other paramters of the model. More details about missing value imputation in Stan can be found in the Missing Data & Partially Known Parameters section of the Stan manual.

The Stan code extracted from brms is not only helpful when learning Stan, but can also drastically speed up the specification of models that are not support by brms. If brms can fit a model similar but not identical to the desired model, we can let brms generate the Stan program for the similar model and then mold it into the program that implements the model we actually want to fit. Rather than calling stancode(), which requires an existing fitted model object, we recommend using make_stancode() and specifying the save_model argument to write the Stan program to a file. The corresponding data can be prepared with make_standata() and then manually amended if needed. Once the code and data have been edited, they can be passed to RStan’s stan() function via the file and data arguments.

Conclusion

In summary, we have shown how to set up and validate approximate and exact LOO-CV for non-factorized multivariate normal models using Stan with the brms and loo packages. Although we focused on the particular example of a spatial SAR model, the presented recipe applies more generally to models that can be expressed in terms of a multivariate normal likelihood.


References

Anselin L. (1988). Spatial econometrics: methods and models. Dordrecht: Kluwer Academic.

Bürkner P. C., Gabry J., & Vehtari A. (2020). Efficient leave-one-out cross-validation for Bayesian non-factorized normal and Student-t models. Computational Statistics, :10.1007/s00180-020-01045-4. ArXiv preprint.

Sundararajan S. & Keerthi S. S. (2001). Predictive approaches for choosing hyperparameters in Gaussian processes. Neural Computation, 13(5), 1103–1118.

Vehtari A., Mononen T., Tolvanen V., Sivula T., & Winther O. (2016). Bayesian leave-one-out cross-validation approximations for Gaussian latent variable models. Journal of Machine Learning Research, 17(103), 1–38. Online.

Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing, 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Online. arXiv preprint arXiv:1507.04544.

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

loo/inst/doc/loo2-moment-matching.html0000644000176200001440000012621215122305466017423 0ustar liggesusers Avoiding model refits in leave-one-out cross-validation with moment matching

Avoiding model refits in leave-one-out cross-validation with moment matching

Topi Paananen, Paul Bürkner, Aki Vehtari and Jonah Gabry

2025-12-22

Introduction

This vignette demonstrates how to improve the Monte Carlo sampling accuracy of leave-one-out cross-validation with the loo package and Stan. The loo package automatically monitors the sampling accuracy using Pareto \(k\) diagnostics for each observation. Here, we present a method for quickly improving the accuracy when the Pareto diagnostics indicate problems. This is done by performing some additional computations using the existing posterior sample. If successful, this will decrease the Pareto \(k\) values, making the model assessment more reliable. loo also stores the original Pareto \(k\) values with the name influence_pareto_k which are not changed. They can be used as a diagnostic of how much each observation influences the posterior distribution.

The methodology presented is based on the paper

More information about the Pareto \(k\) diagnostics is given in the following papers

  • Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Links: published | arXiv preprint.

  • Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

Example: Eradication of Roaches

We will use the same example as in the vignette Using the loo package (version >= 2.0.0). See the demo for a description of the problem and data. We will use the same Poisson regression model as in the case study.

Coding the Stan model

Here is the Stan code for fitting the Poisson regression model, which we will use for modeling the number of roaches.

# Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR)
# To use an older version of RStan change the line declaring `y` to: int y[N];
stancode <- "
data {
  int<lower=1> K;
  int<lower=1> N;
  matrix[N,K] x;
  array[N] int y;
  vector[N] offset_; // offset is reserved keyword in Stan so use offset_

  real beta_prior_scale;
  real alpha_prior_scale;
}
parameters {
  vector[K] beta;
  real intercept;
}
model {
  y ~ poisson(exp(x * beta + intercept + offset_));
  beta ~ normal(0,beta_prior_scale);
  intercept ~ normal(0,alpha_prior_scale);
}
generated quantities {
  vector[N] log_lik;
  for (n in 1:N) {
    log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n]));
  }
}
"

Following the usual approach recommended in Writing Stan programs for use with the loo package, we compute the log-likelihood for each observation in the generated quantities block of the Stan program.

Setup

In addition to loo, we load the rstan package for fitting the model, and the rstanarm package for the data.

library("rstan")
library("loo")
seed <- 9547
set.seed(seed)

Fitting the model with RStan

Next we fit the model in Stan using the rstan package:

# Prepare data
data(roaches, package = "rstanarm")
roaches$roach1 <- sqrt(roaches$roach1)
y <- roaches$y
x <- roaches[, c("roach1", "treatment", "senior")]
offset <- log(roaches[, "exposure2"])
n <- dim(x)[1]
k <- dim(x)[2]

standata <- list(
  N = n,
  K = k,
  x = as.matrix(x),
  y = y,
  offset_ = offset,
  beta_prior_scale = 2.5,
  alpha_prior_scale = 5.0
)

# Compile
stanmodel <- stan_model(model_code = stancode)

# Fit model
fit <- sampling(stanmodel, data = standata, seed = seed, refresh = 0)
print(fit, pars = "beta")
Inference for Stan model: anon_model.
4 chains, each with iter=2000; warmup=1000; thin=1; 
post-warmup draws per chain=1000, total post-warmup draws=4000.

         mean se_mean   sd  2.5%   25%   50%   75% 97.5% n_eff Rhat
beta[1]  0.16       0 0.00  0.16  0.16  0.16  0.16  0.16  2802    1
beta[2] -0.57       0 0.02 -0.62 -0.59 -0.57 -0.55 -0.52  2399    1
beta[3] -0.32       0 0.03 -0.38 -0.34 -0.32 -0.29 -0.25  2561    1

Samples were drawn using NUTS(diag_e) at Mon Dec 22 11:16:41 2025.
For each parameter, n_eff is a crude measure of effective sample size,
and Rhat is the potential scale reduction factor on split chains (at 
convergence, Rhat=1).

Let us now evaluate the predictive performance of the model using loo().

loo1 <- loo(fit)
Replacing NAs in `r_eff` with 1s
Warning: Some Pareto k diagnostic values are too high. See help('pareto-k-diagnostic') for details.
loo1

Computed from 4000 by 262 log-likelihood matrix.

         Estimate     SE
elpd_loo  -5460.9  695.3
p_loo       259.4   56.8
looic     10921.8 1390.5
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.0]).

Pareto k diagnostic values:
                         Count Pct.    Min. ESS
(-Inf, 0.7]   (good)     242   92.4%   185     
   (0.7, 1]   (bad)       12    4.6%   <NA>    
   (1, Inf)   (very bad)   8    3.1%   <NA>    
See help('pareto-k-diagnostic') for details.

The loo() function output warnings that there are some observations which are highly influential, and thus the accuracy of importance sampling is compromised as indicated by the large Pareto \(k\) diagnostic values (> 0.7). As discussed in the vignette Using the loo package (version >= 2.0.0), this may be an indication of model misspecification. Despite that, it is still beneficial to be able to evaluate the predictive performance of the model accurately.

Moment matching correction for importance sampling

To improve the accuracy of the loo() result above, we could perform leave-one-out cross-validation by explicitly leaving out single observations and refitting the model using MCMC repeatedly. However, the Pareto \(k\) diagnostics indicate that there are 19 observations which are problematic. This would require 19 model refits which may require a lot of computation time.

Instead of refitting with MCMC, we can perform a faster moment matching correction to the importance sampling for the problematic observations. This can be done with the loo_moment_match() function in the loo package, which takes our existing loo object as input and modifies it. The moment matching requires some evaluations of the model posterior density. For models fitted with rstan, this can be conveniently done by using the existing stanfit object.

First, we show how the moment matching can be used for a model fitted using rstan. It only requires setting the argument moment_match to TRUE in the loo() function. Optionally, you can also set the argument k_threshold which determines the Pareto \(k\) threshold, above which moment matching is used. By default, it operates on all observations whose Pareto \(k\) value is larger than the sample size (\(S\)) specific threshold \(\min(1 - 1 / \log_{10}(S), 0.7)\) (which is \(0.7\) for \(S>2200\)).

# available in rstan >= 2.21
loo2 <- loo(fit, moment_match = TRUE)
Replacing NAs in `r_eff` with 1s
loo2

Computed from 4000 by 262 log-likelihood matrix.

         Estimate     SE
elpd_loo  -5477.5  699.9
p_loo       276.0   63.3
looic     10955.0 1399.7
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

After the moment matching, all observations have the diagnostic Pareto \(k\) less than 0.7, meaning that the estimates are now reliable. The total elpd_loo estimate also changed from -5457.8 to -5478.5, showing that before moment matching, loo() overestimated the predictive performance of the model.

The updated Pareto \(k\) values stored in loo2$diagnostics$pareto_k are considered algorithmic diagnostic values that indicate the sampling accuracy. The original Pareto \(k\) values are stored in loo2$pointwise[,"influence_pareto_k"] and these are not modified by the moment matching. These can be considered as diagnostics for how big influence each observation has on the posterior distribution. In addition to the Pareto \(k\) diagnostics, moment matching also updates the effective sample size estimates.

Using loo_moment_match() directly

The moment matching can also be performed by explicitly calling the function loo_moment_match(). This enables its use also for models that are not using rstan or another package with built-in support for loo_moment_match(). To use loo_moment_match(), the user must give the model object x, the loo object, and 5 helper functions as arguments to loo_moment_match(). The helper functions are

  • post_draws
    • A function the takes x as the first argument and returns a matrix of posterior draws of the model parameters, pars.
  • log_lik_i
    • A function that takes x and i and returns a matrix (one column per chain) or a vector (all chains stacked) of log-likeliood draws of the ith observation based on the model x. If the draws are obtained using MCMC, the matrix with MCMC chains separated is preferred.
  • unconstrain_pars
    • A function that takes arguments x and pars, and returns posterior draws on the unconstrained space based on the posterior draws on the constrained space passed via pars.
  • log_prob_upars
    • A function that takes arguments x and upars, and returns a matrix of log-posterior density values of the unconstrained posterior draws passed via upars.
  • log_lik_i_upars
    • A function that takes arguments x, upars, and i and returns a vector of log-likelihood draws of the ith observation based on the unconstrained posterior draws passed via upars.

Next, we show how the helper functions look like for RStan objects, and show an example of using loo_moment_match() directly. For stanfit objects from rstan objects, the functions look like this:

# create a named list of draws for use with rstan methods
.rstan_relist <- function(x, skeleton) {
  out <- utils::relist(x, skeleton)
  for (i in seq_along(skeleton)) {
    dim(out[[i]]) <- dim(skeleton[[i]])
  }
  out
}

# rstan helper function to get dims of parameters right
.create_skeleton <- function(pars, dims) {
  out <- lapply(seq_along(pars), function(i) {
    len_dims <- length(dims[[i]])
    if (len_dims < 1) {
      return(0)
    }
    return(array(0, dim = dims[[i]]))
  })
  names(out) <- pars
  out
}

# extract original posterior draws
post_draws_stanfit <- function(x, ...) {
  as.matrix(x)
}

# compute a matrix of log-likelihood values for the ith observation
# matrix contains information about the number of MCMC chains
log_lik_i_stanfit <- function(x, i, parameter_name = "log_lik", ...) {
  loo::extract_log_lik(x, parameter_name, merge_chains = FALSE)[,, i]
}

# transform parameters to the unconstraint space
unconstrain_pars_stanfit <- function(x, pars, ...) {
  skeleton <- .create_skeleton(x@sim$pars_oi, x@par_dims[x@sim$pars_oi])
  upars <- apply(pars, 1, FUN = function(theta) {
    rstan::unconstrain_pars(x, .rstan_relist(theta, skeleton))
  })
  # for one parameter models
  if (is.null(dim(upars))) {
    dim(upars) <- c(1, length(upars))
  }
  t(upars)
}

# compute log_prob for each posterior draws on the unconstrained space
log_prob_upars_stanfit <- function(x, upars, ...) {
  apply(
    upars,
    1,
    rstan::log_prob,
    object = x,
    adjust_transform = TRUE,
    gradient = FALSE
  )
}

# compute log_lik values based on the unconstrained parameters
log_lik_i_upars_stanfit <- function(
  x,
  upars,
  i,
  parameter_name = "log_lik",
  ...
) {
  S <- nrow(upars)
  out <- numeric(S)
  for (s in seq_len(S)) {
    out[s] <- rstan::constrain_pars(x, upars = upars[s, ])[[parameter_name]][i]
  }
  out
}

Using these function, we can call loo_moment_match() to update the existing loo object.

loo3 <- loo::loo_moment_match.default(
  x = fit,
  loo = loo1,
  post_draws = post_draws_stanfit,
  log_lik_i = log_lik_i_stanfit,
  unconstrain_pars = unconstrain_pars_stanfit,
  log_prob_upars = log_prob_upars_stanfit,
  log_lik_i_upars = log_lik_i_upars_stanfit
)
loo3

Computed from 4000 by 262 log-likelihood matrix.

         Estimate     SE
elpd_loo  -5477.5  699.9
p_loo       276.0   63.3
looic     10955.0 1399.7
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

As expected, the result is identical to the previous result of loo2 <- loo(fit, moment_match = TRUE).

References

Gelman, A., and Hill, J. (2007). Data Analysis Using Regression and Multilevel Hierarchical Models. Cambridge University Press.

Stan Development Team (2020) RStan: the R interface to Stan, Version 2.21.1 https://mc-stan.org

Paananen, T., Piironen, J., Buerkner, P.-C., Vehtari, A. (2021). Implicitly adaptive importance sampling. Statistics and Computing, 31, 16. :10.1007/s11222-020-09982-2. arXiv preprint arXiv:1906.08850.

Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Links: published | arXiv preprint.

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

loo/inst/doc/loo2-moment-matching.Rmd0000644000176200001440000003061115122274256017200 0ustar liggesusers--- title: "Avoiding model refits in leave-one-out cross-validation with moment matching" author: "Topi Paananen, Paul Bürkner, Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to improve the Monte Carlo sampling accuracy of leave-one-out cross-validation with the __loo__ package and Stan. The __loo__ package automatically monitors the sampling accuracy using Pareto $k$ diagnostics for each observation. Here, we present a method for quickly improving the accuracy when the Pareto diagnostics indicate problems. This is done by performing some additional computations using the existing posterior sample. If successful, this will decrease the Pareto $k$ values, making the model assessment more reliable. __loo__ also stores the original Pareto $k$ values with the name `influence_pareto_k` which are not changed. They can be used as a diagnostic of how much each observation influences the posterior distribution. The methodology presented is based on the paper * Paananen, T., Piironen, J., Buerkner, P.-C., Vehtari, A. (2020). Implicitly Adaptive Importance Sampling. [arXiv preprint arXiv:1906.08850](https://arxiv.org/abs/1906.08850). More information about the Pareto $k$ diagnostics is given in the following papers * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) # Example: Eradication of Roaches We will use the same example as in the vignette [_Using the loo package (version >= 2.0.0)_](https://mc-stan.org/loo/articles/loo2-example.html). See the demo for a description of the problem and data. We will use the same Poisson regression model as in the case study. ## Coding the Stan model Here is the Stan code for fitting the Poisson regression model, which we will use for modeling the number of roaches. ```{r stancode} # Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: int y[N]; stancode <- " data { int K; int N; matrix[N,K] x; array[N] int y; vector[N] offset_; // offset is reserved keyword in Stan so use offset_ real beta_prior_scale; real alpha_prior_scale; } parameters { vector[K] beta; real intercept; } model { y ~ poisson(exp(x * beta + intercept + offset_)); beta ~ normal(0,beta_prior_scale); intercept ~ normal(0,alpha_prior_scale); } generated quantities { vector[N] log_lik; for (n in 1:N) { log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n])); } } " ``` Following the usual approach recommended in [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html), we compute the log-likelihood for each observation in the `generated quantities` block of the Stan program. ## Setup In addition to __loo__, we load the __rstan__ package for fitting the model, and the __rstanarm__ package for the data. ```{r setup, message=FALSE} library("rstan") library("loo") seed <- 9547 set.seed(seed) ``` ## Fitting the model with RStan Next we fit the model in Stan using the __rstan__ package: ```{r modelfit, message=FALSE} # Prepare data data(roaches, package = "rstanarm") roaches$roach1 <- sqrt(roaches$roach1) y <- roaches$y x <- roaches[, c("roach1", "treatment", "senior")] offset <- log(roaches[, "exposure2"]) n <- dim(x)[1] k <- dim(x)[2] standata <- list( N = n, K = k, x = as.matrix(x), y = y, offset_ = offset, beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) # Compile stanmodel <- stan_model(model_code = stancode) # Fit model fit <- sampling(stanmodel, data = standata, seed = seed, refresh = 0) print(fit, pars = "beta") ``` Let us now evaluate the predictive performance of the model using `loo()`. ```{r loo1} loo1 <- loo(fit) loo1 ``` The `loo()` function output warnings that there are some observations which are highly influential, and thus the accuracy of importance sampling is compromised as indicated by the large Pareto $k$ diagnostic values (> 0.7). As discussed in the vignette [_Using the loo package (version >= 2.0.0)_](https://mc-stan.org/loo/articles/loo2-example.html), this may be an indication of model misspecification. Despite that, it is still beneficial to be able to evaluate the predictive performance of the model accurately. ## Moment matching correction for importance sampling To improve the accuracy of the `loo()` result above, we could perform leave-one-out cross-validation by explicitly leaving out single observations and refitting the model using MCMC repeatedly. However, the Pareto $k$ diagnostics indicate that there are 19 observations which are problematic. This would require 19 model refits which may require a lot of computation time. Instead of refitting with MCMC, we can perform a faster moment matching correction to the importance sampling for the problematic observations. This can be done with the `loo_moment_match()` function in the __loo__ package, which takes our existing `loo` object as input and modifies it. The moment matching requires some evaluations of the model posterior density. For models fitted with __rstan__, this can be conveniently done by using the existing `stanfit` object. First, we show how the moment matching can be used for a model fitted using __rstan__. It only requires setting the argument `moment_match` to `TRUE` in the `loo()` function. Optionally, you can also set the argument `k_threshold` which determines the Pareto $k$ threshold, above which moment matching is used. By default, it operates on all observations whose Pareto $k$ value is larger than the sample size ($S$) specific threshold $\min(1 - 1 / \log_{10}(S), 0.7)$ (which is $0.7$ for $S>2200$). ```{r loo_moment_match} # available in rstan >= 2.21 loo2 <- loo(fit, moment_match = TRUE) loo2 ``` After the moment matching, all observations have the diagnostic Pareto $k$ less than 0.7, meaning that the estimates are now reliable. The total `elpd_loo` estimate also changed from `-5457.8` to `-5478.5`, showing that before moment matching, `loo()` overestimated the predictive performance of the model. The updated Pareto $k$ values stored in `loo2$diagnostics$pareto_k` are considered algorithmic diagnostic values that indicate the sampling accuracy. The original Pareto $k$ values are stored in `loo2$pointwise[,"influence_pareto_k"]` and these are not modified by the moment matching. These can be considered as diagnostics for how big influence each observation has on the posterior distribution. In addition to the Pareto $k$ diagnostics, moment matching also updates the effective sample size estimates. # Using `loo_moment_match()` directly The moment matching can also be performed by explicitly calling the function `loo_moment_match()`. This enables its use also for models that are not using __rstan__ or another package with built-in support for `loo_moment_match()`. To use `loo_moment_match()`, the user must give the model object `x`, the `loo` object, and 5 helper functions as arguments to `loo_moment_match()`. The helper functions are * `post_draws` + A function the takes `x` as the first argument and returns a matrix of posterior draws of the model parameters, `pars`. * `log_lik_i` + A function that takes `x` and `i` and returns a matrix (one column per chain) or a vector (all chains stacked) of log-likeliood draws of the ith observation based on the model `x`. If the draws are obtained using MCMC, the matrix with MCMC chains separated is preferred. * `unconstrain_pars` + A function that takes arguments `x` and `pars`, and returns posterior draws on the unconstrained space based on the posterior draws on the constrained space passed via `pars`. * `log_prob_upars` + A function that takes arguments `x` and `upars`, and returns a matrix of log-posterior density values of the unconstrained posterior draws passed via `upars`. * `log_lik_i_upars` + A function that takes arguments `x`, `upars`, and `i` and returns a vector of log-likelihood draws of the `i`th observation based on the unconstrained posterior draws passed via `upars`. Next, we show how the helper functions look like for RStan objects, and show an example of using `loo_moment_match()` directly. For stanfit objects from __rstan__ objects, the functions look like this: ```{r stanfitfuns} # create a named list of draws for use with rstan methods .rstan_relist <- function(x, skeleton) { out <- utils::relist(x, skeleton) for (i in seq_along(skeleton)) { dim(out[[i]]) <- dim(skeleton[[i]]) } out } # rstan helper function to get dims of parameters right .create_skeleton <- function(pars, dims) { out <- lapply(seq_along(pars), function(i) { len_dims <- length(dims[[i]]) if (len_dims < 1) { return(0) } return(array(0, dim = dims[[i]])) }) names(out) <- pars out } # extract original posterior draws post_draws_stanfit <- function(x, ...) { as.matrix(x) } # compute a matrix of log-likelihood values for the ith observation # matrix contains information about the number of MCMC chains log_lik_i_stanfit <- function(x, i, parameter_name = "log_lik", ...) { loo::extract_log_lik(x, parameter_name, merge_chains = FALSE)[,, i] } # transform parameters to the unconstraint space unconstrain_pars_stanfit <- function(x, pars, ...) { skeleton <- .create_skeleton(x@sim$pars_oi, x@par_dims[x@sim$pars_oi]) upars <- apply(pars, 1, FUN = function(theta) { rstan::unconstrain_pars(x, .rstan_relist(theta, skeleton)) }) # for one parameter models if (is.null(dim(upars))) { dim(upars) <- c(1, length(upars)) } t(upars) } # compute log_prob for each posterior draws on the unconstrained space log_prob_upars_stanfit <- function(x, upars, ...) { apply( upars, 1, rstan::log_prob, object = x, adjust_transform = TRUE, gradient = FALSE ) } # compute log_lik values based on the unconstrained parameters log_lik_i_upars_stanfit <- function( x, upars, i, parameter_name = "log_lik", ... ) { S <- nrow(upars) out <- numeric(S) for (s in seq_len(S)) { out[s] <- rstan::constrain_pars(x, upars = upars[s, ])[[parameter_name]][i] } out } ``` Using these function, we can call `loo_moment_match()` to update the existing `loo` object. ```{r loo_moment_match.default, message=FALSE} loo3 <- loo::loo_moment_match.default( x = fit, loo = loo1, post_draws = post_draws_stanfit, log_lik_i = log_lik_i_stanfit, unconstrain_pars = unconstrain_pars_stanfit, log_prob_upars = log_prob_upars_stanfit, log_lik_i_upars = log_lik_i_upars_stanfit ) loo3 ``` As expected, the result is identical to the previous result of `loo2 <- loo(fit, moment_match = TRUE)`. # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2020) _RStan: the R interface to Stan, Version 2.21.1_ https://mc-stan.org Paananen, T., Piironen, J., Buerkner, P.-C., Vehtari, A. (2021). Implicitly adaptive importance sampling. _Statistics and Computing_, 31, 16. \doi:10.1007/s11222-020-09982-2. arXiv preprint arXiv:1906.08850. Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/inst/doc/loo2-lfo.Rmd0000644000176200001440000006055114641333357014702 0ustar liggesusers--- title: "Approximate leave-future-out cross-validation for Bayesian time series models" author: "Paul Bürkner, Jonah Gabry, Aki Vehtari" date: "`r Sys.Date()`" output: html_vignette: toc: yes encoding: "UTF-8" params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r more-knitr-ops, include=FALSE} knitr::opts_chunk$set( cache = TRUE, message = FALSE, warning = FALSE ) ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` ## Introduction One of the most common goals of a time series analysis is to use the observed series to inform predictions for future observations. We will refer to this task of predicting a sequence of $M$ future observations as $M$-step-ahead prediction ($M$-SAP). Fortunately, once we have fit a model and can sample from the posterior predictive distribution, it is straightforward to generate predictions as far into the future as we want. It is also straightforward to evaluate the $M$-SAP performance of a time series model by comparing the predictions to the observed sequence of $M$ future data points once they become available. Unfortunately, we are often in the position of having to use a model to inform decisions _before_ we can collect the future observations required for assessing the predictive performance. If we have many competing models we may also need to first decide which of the models (or which combination of the models) we should rely on for predictions. In these situations the best we can do is to use methods for approximating the expected predictive performance of our models using only the observations of the time series we already have. If there were no time dependence in the data or if the focus is to assess the non-time-dependent part of the model, we could use methods like leave-one-out cross-validation (LOO-CV). For a data set with $N$ observations, we refit the model $N$ times, each time leaving out one of the $N$ observations and assessing how well the model predicts the left-out observation. LOO-CV is very expensive computationally in most realistic settings, but the Pareto smoothed importance sampling (PSIS, Vehtari et al, 2017, 2024) algorithm provided by the *loo* package allows for approximating exact LOO-CV with PSIS-LOO-CV. PSIS-LOO-CV requires only a single fit of the full model and comes with diagnostics for assessing the validity of the approximation. With a time series we can do something similar to LOO-CV but, except in a few cases, it does not make sense to leave out observations one at a time because then we are allowing information from the future to influence predictions of the past (i.e., times $t + 1, t+2, \ldots$ should not be used to predict for time $t$). To apply the idea of cross-validation to the $M$-SAP case, instead of leave-*one*-out cross-validation we need some form of leave-*future*-out cross-validation (LFO-CV). As we will demonstrate in this case study, LFO-CV does not refer to one particular prediction task but rather to various possible cross-validation approaches that all involve some form of prediction for new time series data. Like exact LOO-CV, exact LFO-CV requires refitting the model many times to different subsets of the data, which is computationally very costly for most nontrivial examples, in particular for Bayesian analyses where refitting the model means estimating a new posterior distribution rather than a point estimate. Although PSIS-LOO-CV provides an efficient approximation to exact LOO-CV, until now there has not been an analogous approximation to exact LFO-CV that drastically reduces the computational burden while also providing informative diagnostics about the quality of the approximation. In this case study we present PSIS-LFO-CV, an algorithm that typically only requires refitting the time-series model a small number times and will make LFO-CV tractable for many more realistic applications than previously possible. More details can be found in our paper about approximate LFO-CV (Bürkner, Gabry, & Vehtari, 2020), which is available as a preprint on arXiv (https://arxiv.org/abs/1902.06281). ## $M$-step-ahead predictions Assume we have a time series of observations $y = (y_1, y_2, \ldots, y_N)$ and let $L$ be the _minimum_ number of observations from the series that we will require before making predictions for future data. Depending on the application and how informative the data is, it may not be possible to make reasonable predictions for $y_{i+1}$ based on $(y_1, \dots, y_{i})$ until $i$ is large enough so that we can learn enough about the time series to predict future observations. Setting $L=10$, for example, means that we will only assess predictive performance starting with observation $y_{11}$, so that we always have at least 10 previous observations to condition on. In order to assess $M$-SAP performance we would like to compute the predictive densities $$ p(y_{i+1:M} \,|\, y_{1:i}) = p(y_{i+1}, \ldots, y_{i + M} \,|\, y_{1},...,y_{i}) $$ for each $i \in \{L, \ldots, N - M\}$. The quantities $p(y_{i+1:M} \,|\, y_{1:i})$ can be computed with the help of the posterior distribution $p(\theta \,|\, y_{1:i})$ of the parameters $\theta$ conditional on only the first $i$ observations of the time-series: $$ p(y_{i+1:M} \,| \, y_{1:i}) = \int p(y_{i+1:M} \,| \, y_{1:i}, \theta) \, p(\theta\,|\,y_{1:i}) \,d\theta. $$ Having obtained $S$ draws $(\theta_{1:i}^{(1)}, \ldots, \theta_{1:i}^{(S)})$ from the posterior distribution $p(\theta\,|\,y_{1:i})$, we can estimate $p(y_{i+1:M} | y_{1:i})$ as $$ p(y_{i+1:M} \,|\, y_{1:i}) \approx \frac{1}{S}\sum_{s=1}^S p(y_{i+1:M} \,|\, y_{1:i}, \theta_{1:i}^{(s)}). $$ ## Approximate $M$-SAP using importance-sampling {#approximate_MSAP} Unfortunately, the math above makes use of the posterior distributions from many different fits of the model to different subsets of the data. That is, to obtain the predictive density $p(y_{i+1:M} \,|\, y_{1:i})$ requires fitting a model to only the first $i$ data points, and we will need to do this for every value of $i$ under consideration (all $i \in \{L, \ldots, N - M\}$). To reduce the number of models that need to be fit for the purpose of obtaining each of the densities $p(y_{i+1:M} \,|\, y_{1:i})$, we propose the following algorithm. First, we refit the model using the first $L$ observations of the time series and then perform a single exact $M$-step-ahead prediction step for $p(y_{L+1:M} \,|\, y_{1:L})$. Recall that $L$ is the minimum number of observations we have deemed acceptable for making predictions (setting $L=0$ means the first data point will be predicted only based on the prior). We define $i^\star = L$ as the current point of refit. Next, starting with $i = i^\star + 1$, we approximate each $p(y_{i+1:M} \,|\, y_{1:i})$ via $$ p(y_{i+1:M} \,|\, y_{1:i}) \approx \frac{ \sum_{s=1}^S w_i^{(s)}\, p(y_{i+1:M} \,|\, y_{1:i}, \theta^{(s)})} { \sum_{s=1}^S w_i^{(s)}}, $$ where $\theta^{(s)} = \theta^{(s)}_{1:i^\star}$ are draws from the posterior distribution based on the first $i^\star$ observations and $w_i^{(s)}$ are the PSIS weights obtained in two steps. First, we compute the raw importance ratios $$ r_i^{(s)} = \frac{f_{1:i}(\theta^{(s)})}{f_{1:i^\star}(\theta^{(s)})} \propto \prod_{j \in (i^\star + 1):i} p(y_j \,|\, y_{1:(j-1)}, \theta^{(s)}), $$ and then stabilize them using PSIS. The function $f_{1:i}$ denotes the posterior distribution based on the first $i$ observations, that is, $f_{1:i} = p(\theta \,|\, y_{1:i})$, with $f_{1:i^\star}$ defined analogously. The index set $(i^\star + 1):i$ indicates all observations which are part of the data for the model $f_{1:i}$ whose predictive performance we are trying to approximate but not for the actually fitted model $f_{1:i^\star}$. The proportional statement arises from the fact that we ignore the normalizing constants $p(y_{1:i})$ and $p(y_{1:i^\star})$ of the compared posteriors, which leads to a self-normalized variant of PSIS (see Vehtari et al, 2017). Continuing with the next observation, we gradually increase $i$ by $1$ (we move forward in time) and repeat the process. At some observation $i$, the variability of the importance ratios $r_i^{(s)}$ will become too large and importance sampling will fail. We will refer to this particular value of $i$ as $i^\star_1$. To identify the value of $i^\star_1$, we check for which value of $i$ does the estimated shape parameter $k$ of the generalized Pareto distribution first cross a certain threshold $\tau$ (Vehtari et al, 2024). Only then do we refit the model using the observations up to $i^\star_1$ and restart the process from there by setting $\theta^{(s)} = \theta^{(s)}_{1:i^\star_1}$ and $i^\star = i^\star_1$ until the next refit. In some cases we may only need to refit once and in other cases we will find a value $i^\star_2$ that requires a second refitting, maybe an $i^\star_3$ that requires a third refitting, and so on. We refit as many times as is required (only when $k > \tau$) until we arrive at observation $i = N - M$. For LOO, assuming posterior sample size is 4000 or larger, we recommend to use a threshold of $\tau = 0.7$ (Vehtari et al, 2017, 2024) and it turns out this is a reasonable threshold for LFO as well (Bürkner et al. 2020). ## Autoregressive models Autoregressive (AR) models are some of the most commonly used time-series models. An AR(p) model ---an autoregressive model of order $p$--- can be defined as $$ y_i = \eta_i + \sum_{k = 1}^p \varphi_k y_{i - k} + \varepsilon_i, $$ where $\eta_i$ is the linear predictor for the $i$th observation, $\phi_k$ are the autoregressive parameters and $\varepsilon_i$ are pairwise independent errors, which are usually assumed to be normally distributed with equal variance $\sigma^2$. The model implies a recursive formula that allows for computing the right-hand side of the above equation for observation $i$ based on the values of the equations for previous observations. ## Case Study: Annual measurements of the level of Lake Huron To illustrate the application of PSIS-LFO-CV for estimating expected $M$-SAP performance, we will fit a model for 98 annual measurements of the water level (in feet) of [Lake Huron](https://en.wikipedia.org/wiki/Lake_Huron) from the years 1875--1972. This data set is found in the **datasets** R package, which is installed automatically with **R**. In addition to the **loo** package, for this analysis we will use the **brms** interface to Stan to generate a Stan program and fit the model, and also the **bayesplot** and **ggplot2** packages for plotting. ```{r pkgs, cache=FALSE} library("brms") library("loo") library("bayesplot") library("ggplot2") color_scheme_set("brightblue") theme_set(theme_default()) CHAINS <- 4 SEED <- 5838296 set.seed(SEED) ``` Before fitting a model, we will first put the data into a data frame and then look at the time series. ```{r hurondata} N <- length(LakeHuron) df <- data.frame( y = as.numeric(LakeHuron), year = as.numeric(time(LakeHuron)), time = 1:N ) ggplot(df, aes(x = year, y = y)) + geom_point(size = 1) + labs( y = "Water Level (ft)", x = "Year", title = "Water Level in Lake Huron (1875-1972)" ) ``` The above plot shows rather strong autocorrelation of the time-series as well as some trend towards lower levels for later points in time. We can specify an AR(4) model for these data using the **brms** package as follows: ```{r fit, results = "hide"} fit <- brm( y ~ ar(time, p = 4), data = df, prior = prior(normal(0, 0.5), class = "ar"), control = list(adapt_delta = 0.99), seed = SEED, chains = CHAINS ) ``` The model implied predictions along with the observed values can be plotted, which reveals a rather good fit to the data. ```{r plotpreds, cache = FALSE} preds <- posterior_predict(fit) preds <- cbind( Estimate = colMeans(preds), Q5 = apply(preds, 2, quantile, probs = 0.05), Q95 = apply(preds, 2, quantile, probs = 0.95) ) ggplot(cbind(df, preds), aes(x = year, y = Estimate)) + geom_smooth(aes(ymin = Q5, ymax = Q95), stat = "identity", linewidth = 0.5) + geom_point(aes(y = y)) + labs( y = "Water Level (ft)", x = "Year", title = "Water Level in Lake Huron (1875-1972)", subtitle = "Mean (blue) and 90% predictive intervals (gray) vs. observed data (black)" ) ``` To allow for reasonable predictions of future values, we will require at least $L = 20$ historical observations (20 years) to make predictions. ```{r setL} L <- 20 ``` We first perform approximate leave-one-out cross-validation (LOO-CV) for the purpose of later comparison with exact and approximate LFO-CV for the 1-SAP case. ```{r loo1sap, cache = FALSE} loo_cv <- loo(log_lik(fit)[, (L + 1):N]) print(loo_cv) ``` ## 1-step-ahead predictions leaving out all future values The most basic version of $M$-SAP is 1-SAP, in which we predict only one step ahead. In this case, $y_{i+1:M}$ simplifies to $y_{i}$ and the LFO-CV algorithm becomes considerably simpler than for larger values of $M$. ### Exact 1-step-ahead predictions Before we compute approximate LFO-CV using PSIS we will first compute exact LFO-CV for the 1-SAP case so we can use it as a benchmark later. The initial step for the exact computation is to calculate the log-predictive densities by refitting the model many times: ```{r exact_loglik, results="hide"} loglik_exact <- matrix(nrow = ndraws(fit), ncol = N) for (i in L:(N - 1)) { past <- 1:i oos <- i + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_i <- update(fit, newdata = df_past, recompile = FALSE) loglik_exact[, i + 1] <- log_lik(fit_i, newdata = df_oos, oos = oos)[, oos] } ``` Then we compute the exact expected log predictive density (ELPD): ```{r helpers} # some helper functions we'll use throughout # more stable than log(sum(exp(x))) log_sum_exp <- function(x) { max_x <- max(x) max_x + log(sum(exp(x - max_x))) } # more stable than log(mean(exp(x))) log_mean_exp <- function(x) { log_sum_exp(x) - log(length(x)) } # compute log of raw importance ratios # sums over observations *not* over posterior samples sum_log_ratios <- function(loglik, ids = NULL) { if (!is.null(ids)) loglik <- loglik[, ids, drop = FALSE] rowSums(loglik) } # for printing comparisons later rbind_print <- function(...) { round(rbind(...), digits = 2) } ``` ```{r exact1sap, cache = FALSE} exact_elpds_1sap <- apply(loglik_exact, 2, log_mean_exp) exact_elpd_1sap <- c(ELPD = sum(exact_elpds_1sap[-(1:L)])) rbind_print( "LOO" = loo_cv$estimates["elpd_loo", "Estimate"], "LFO" = exact_elpd_1sap ) ``` We see that the ELPD from LFO-CV for 1-step-ahead predictions is lower than the ELPD estimate from LOO-CV, which should be expected since LOO-CV is making use of more of the time series. That is, since the LFO-CV approach only uses observations from before the left-out data point but LOO-CV uses _all_ data points other than the left-out observation, we should expect to see the larger ELPD from LOO-CV. ### Approximate 1-step-ahead predictions We compute approximate 1-SAP with refit at observations where the Pareto $k$ estimate exceeds the threshold of $0.7$. ```{r setkthresh} k_thres <- 0.7 ``` The code becomes a little bit more involved as compared to the exact LFO-CV. Note that we can compute exact 1-SAP at the refitting points, which comes with no additional computational costs since we had to refit the model anyway. ```{r refit_loglik, results="hide"} approx_elpds_1sap <- rep(NA, N) # initialize the process for i = L past <- 1:L oos <- L + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) approx_elpds_1sap[L + 1] <- log_mean_exp(loglik[, oos]) # iterate over i > L i_refit <- L refits <- L ks <- NULL for (i in (L + 1):(N - 1)) { past <- 1:i oos <- i + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) logratio <- sum_log_ratios(loglik, (i_refit + 1):i) psis_obj <- suppressWarnings(psis(logratio)) k <- pareto_k_values(psis_obj) ks <- c(ks, k) if (k > k_thres) { # refit the model based on the first i observations i_refit <- i refits <- c(refits, i) fit_past <- update(fit_past, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) approx_elpds_1sap[i + 1] <- log_mean_exp(loglik[, oos]) } else { lw <- weights(psis_obj, normalize = TRUE)[, 1] approx_elpds_1sap[i + 1] <- log_sum_exp(lw + loglik[, oos]) } } ``` We see that the final Pareto-$k$-estimates are mostly well below the threshold and that we only needed to refit the model a few times: ```{r plot_ks} plot_ks <- function(ks, ids, thres = 0.6) { dat_ks <- data.frame(ks = ks, ids = ids) ggplot(dat_ks, aes(x = ids, y = ks)) + geom_point(aes(color = ks > thres), shape = 3, show.legend = FALSE) + geom_hline(yintercept = thres, linetype = 2, color = "red2") + scale_color_manual(values = c("cornflowerblue", "darkblue")) + labs(x = "Data point", y = "Pareto k") + ylim(-0.5, 1.5) } ``` ```{r refitsummary1sap, cache=FALSE} cat("Using threshold ", k_thres, ", model was refit ", length(refits), " times, at observations", refits) plot_ks(ks, (L + 1):(N - 1)) ``` The approximate 1-SAP ELPD is remarkably similar to the exact 1-SAP ELPD computed above, which indicates our algorithm to compute approximate 1-SAP worked well for the present data and model. ```{r lfosummary1sap, cache = FALSE} approx_elpd_1sap <- sum(approx_elpds_1sap, na.rm = TRUE) rbind_print( "approx LFO" = approx_elpd_1sap, "exact LFO" = exact_elpd_1sap ) ``` Plotting exact against approximate predictions, we see that no approximation value deviates far from its exact counterpart, providing further evidence for the good quality of our approximation. ```{r plot1sap, cache = FALSE} dat_elpd <- data.frame( approx_elpd = approx_elpds_1sap, exact_elpd = exact_elpds_1sap ) ggplot(dat_elpd, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + labs(x = "Approximate ELPDs", y = "Exact ELPDs") ``` We can also look at the maximum difference and average difference between the approximate and exact ELPD calculations, which also indicate a ver close approximation: ```{r diffs1sap, cache=FALSE} max_diff <- with(dat_elpd, max(abs(approx_elpd - exact_elpd), na.rm = TRUE)) mean_diff <- with(dat_elpd, mean(abs(approx_elpd - exact_elpd), na.rm = TRUE)) rbind_print( "Max diff" = round(max_diff, 2), "Mean diff" = round(mean_diff, 3) ) ``` ## $M$-step-ahead predictions leaving out all future values To illustrate the application of $M$-SAP for $M > 1$, we next compute exact and approximate LFO-CV for the 4-SAP case. ### Exact $M$-step-ahead predictions The necessary steps are the same as for 1-SAP with the exception that the log-density values of interest are now the sums of the log predictive densities of four consecutive observations. Further, the stability of the PSIS approximation actually stays the same for all $M$ as it only depends on the number of observations we leave out, not on the number of observations we predict. ```{r exact_loglikm, results="hide"} M <- 4 loglikm <- matrix(nrow = ndraws(fit), ncol = N) for (i in L:(N - M)) { past <- 1:i oos <- (i + 1):(i + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm[, i + 1] <- rowSums(loglik[, oos]) } ``` ```{r exact4sap, cache = FALSE} exact_elpds_4sap <- apply(loglikm, 2, log_mean_exp) (exact_elpd_4sap <- c(ELPD = sum(exact_elpds_4sap, na.rm = TRUE))) ``` ### Approximate $M$-step-ahead predictions Computing the approximate PSIS-LFO-CV for the 4-SAP case is a little bit more involved than the approximate version for the 1-SAP case, although the underlying principles remain the same. ```{r refit_loglikm, results="hide"} approx_elpds_4sap <- rep(NA, N) # initialize the process for i = L past <- 1:L oos <- (L + 1):(L + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[L + 1] <- log_mean_exp(loglikm) # iterate over i > L i_refit <- L refits <- L ks <- NULL for (i in (L + 1):(N - M)) { past <- 1:i oos <- (i + 1):(i + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) logratio <- sum_log_ratios(loglik, (i_refit + 1):i) psis_obj <- suppressWarnings(psis(logratio)) k <- pareto_k_values(psis_obj) ks <- c(ks, k) if (k > k_thres) { # refit the model based on the first i observations i_refit <- i refits <- c(refits, i) fit_past <- update(fit_past, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[i + 1] <- log_mean_exp(loglikm) } else { lw <- weights(psis_obj, normalize = TRUE)[, 1] loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[i + 1] <- log_sum_exp(lw + loglikm) } } ``` Again, we see that the final Pareto-$k$-estimates are mostly well below the threshold and that we only needed to refit the model a few times: ```{r refitsummary4sap, cache = FALSE} cat("Using threshold ", k_thres, ", model was refit ", length(refits), " times, at observations", refits) plot_ks(ks, (L + 1):(N - M)) ``` The approximate ELPD computed for the 4-SAP case is not as close to its exact counterpart as in the 1-SAP case. In general, the larger $M$, the larger the variation of the approximate ELPD around the exact ELPD. It turns out that the ELPD estimates of AR-models with $M>1$ show particular variation due to their predictions' dependency on other predicted values. In Bürkner et al. (2020) we provide further explanation and simulations for these cases. ```{r lfosummary4sap, cache = FALSE} approx_elpd_4sap <- sum(approx_elpds_4sap, na.rm = TRUE) rbind_print( "Approx LFO" = approx_elpd_4sap, "Exact LFO" = exact_elpd_4sap ) ``` Plotting exact against approximate pointwise predictions confirms that, for a few specific data points, the approximate predictions underestimate the exact predictions. ```{r plot4sap, cache = FALSE} dat_elpd_4sap <- data.frame( approx_elpd = approx_elpds_4sap, exact_elpd = exact_elpds_4sap ) ggplot(dat_elpd_4sap, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + labs(x = "Approximate ELPDs", y = "Exact ELPDs") ``` ## Conclusion In this case study we have shown how to do carry out exact and approximate leave-future-out cross-validation for $M$-step-ahead prediction tasks. For the data and model used in our example, the PSIS-LFO-CV algorithm provides reasonably stable and accurate results despite not requiring us to refit the model nearly as many times. For more details on approximate LFO-CV, we refer to Bürkner et al. (2020).
## References Bürkner P. C., Gabry J., & Vehtari A. (2020). Approximate leave-future-out cross-validation for time series models. *Journal of Statistical Computation and Simulation*, 90(14):2499-2523. \doi:/10.1080/00949655.2020.1783262. [Online](https://www.tandfonline.com/doi/full/10.1080/00949655.2020.1783262). [arXiv preprint](https://arxiv.org/abs/1902.06281). Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. *Statistics and Computing*, 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [Online](https://link.springer.com/article/10.1007/s11222-016-9696-4). [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html)
## Appendix ### Appendix: Session information ```{r sessioninfo} sessionInfo() ``` ### Appendix: Licenses * Code © 2018, Paul Bürkner, Jonah Gabry, Aki Vehtari (licensed under BSD-3). * Text © 2018, Paul Bürkner, Jonah Gabry, Aki Vehtari (licensed under CC-BY-NC 4.0). loo/inst/doc/loo2-elpd.R0000644000176200001440000001117715122301475014514 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----stancode----------------------------------------------------------------- # Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: int y[N]; stancode <- " data { int K; int N; matrix[N,K] x; array[N] int y; vector[N] offset_; // offset is reserved keyword in Stan so use offset_ real beta_prior_scale; real alpha_prior_scale; } parameters { vector[K] beta; real intercept; } model { y ~ poisson(exp(x * beta + intercept + offset_)); beta ~ normal(0,beta_prior_scale); intercept ~ normal(0,alpha_prior_scale); } generated quantities { vector[N] log_lik; for (n in 1:N) log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n])); } " ## ----setup, message=FALSE----------------------------------------------------- library("rstan") library("loo") seed <- 9547 set.seed(seed) ## ----modelfit-holdout, message=FALSE------------------------------------------ # Prepare data data(roaches, package = "rstanarm") roaches$roach1 <- sqrt(roaches$roach1) roaches$offset <- log(roaches[,"exposure2"]) # 20% of the data goes to the test set: roaches$test <- 0 roaches$test[sample(.2 * seq_len(nrow(roaches)))] <- 1 # data to "train" the model data_train <- list(y = roaches$y[roaches$test == 0], x = as.matrix(roaches[roaches$test == 0, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$test == 0,]), K = 3, offset_ = roaches$offset[roaches$test == 0], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) # data to "test" the model data_test <- list(y = roaches$y[roaches$test == 1], x = as.matrix(roaches[roaches$test == 1, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$test == 1,]), K = 3, offset_ = roaches$offset[roaches$test == 1], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) ## ----fit-train---------------------------------------------------------------- # Compile stanmodel <- stan_model(model_code = stancode) # Fit model fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0) ## ----gen-test----------------------------------------------------------------- gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test) log_pd <- extract_log_lik(gen_test) ## ----elpd-holdout------------------------------------------------------------- (elpd_holdout <- elpd(log_pd)) ## ----prepare-folds, message=FALSE--------------------------------------------- # Prepare data roaches$fold <- kfold_split_random(K = 10, N = nrow(roaches)) ## ----------------------------------------------------------------------------- # Prepare a matrix with the number of post-warmup iterations by number of observations: log_pd_kfold <- matrix(nrow = 4000, ncol = nrow(roaches)) # Loop over the folds for(k in 1:10){ data_train <- list(y = roaches$y[roaches$fold != k], x = as.matrix(roaches[roaches$fold != k, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$fold != k,]), K = 3, offset_ = roaches$offset[roaches$fold != k], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) data_test <- list(y = roaches$y[roaches$fold == k], x = as.matrix(roaches[roaches$fold == k, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$fold == k,]), K = 3, offset_ = roaches$offset[roaches$fold == k], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0) gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test) log_pd_kfold[, roaches$fold == k] <- extract_log_lik(gen_test) } ## ----elpd-kfold--------------------------------------------------------------- (elpd_kfold <- elpd(log_pd_kfold)) loo/inst/doc/loo2-elpd.html0000644000176200001440000010235615122301476015260 0ustar liggesusers Holdout validation and K-fold cross-validation of Stan programs with the loo package

Holdout validation and K-fold cross-validation of Stan programs with the loo package

Bruno Nicenboim

2025-12-22

Introduction

This vignette demonstrates how to do holdout validation and K-fold cross-validation with loo for a Stan program.

Example: Eradication of Roaches using holdout validation approach

This vignette uses the same example as in the vignettes Using the loo package (version >= 2.0.0) and Avoiding model refits in leave-one-out cross-validation with moment matching.

Coding the Stan model

Here is the Stan code for fitting a Poisson regression model:

# Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR)
# To use an older version of RStan change the line declaring `y` to: int y[N];
stancode <- "
data {
  int<lower=1> K;
  int<lower=1> N;
  matrix[N,K] x;
  array[N] int y;
  vector[N] offset_; // offset is reserved keyword in Stan so use offset_

  real beta_prior_scale;
  real alpha_prior_scale;
}
parameters {
  vector[K] beta;
  real intercept;
}
model {
  y ~ poisson(exp(x * beta + intercept + offset_));
  beta ~ normal(0,beta_prior_scale);
  intercept ~ normal(0,alpha_prior_scale);
}
generated quantities {
  vector[N] log_lik;
  for (n in 1:N)
    log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n]));
}
"

Following the usual approach recommended in Writing Stan programs for use with the loo package, we compute the log-likelihood for each observation in the generated quantities block of the Stan program.

Setup

In addition to loo, we load the rstan package for fitting the model. We will also need the rstanarm package for the data.

library("rstan")
Warning: package 'StanHeaders' was built under R version 4.4.3
library("loo")
seed <- 9547
set.seed(seed)

Holdout validation

For this approach, the model is first fit to the “train” data and then is evaluated on the held-out “test” data.

Splitting the data between train and test

The data is divided between train (80% of the data) and test (20%):

# Prepare data
data(roaches, package = "rstanarm")
roaches$roach1 <- sqrt(roaches$roach1)
roaches$offset <- log(roaches[,"exposure2"])
# 20% of the data goes to the test set:
roaches$test <- 0
roaches$test[sample(.2 * seq_len(nrow(roaches)))] <- 1
# data to "train" the model
data_train <- list(y = roaches$y[roaches$test == 0],
                   x = as.matrix(roaches[roaches$test == 0,
                                         c("roach1", "treatment", "senior")]),
                   N = nrow(roaches[roaches$test == 0,]),
                   K = 3,
                   offset_ = roaches$offset[roaches$test == 0],
                   beta_prior_scale = 2.5,
                   alpha_prior_scale = 5.0
                   )
# data to "test" the model
data_test <- list(y = roaches$y[roaches$test == 1],
                   x = as.matrix(roaches[roaches$test == 1,
                                         c("roach1", "treatment", "senior")]),
                   N = nrow(roaches[roaches$test == 1,]),
                   K = 3,
                   offset_ = roaches$offset[roaches$test == 1],
                   beta_prior_scale = 2.5,
                   alpha_prior_scale = 5.0
                   )

Fitting the model with RStan

Next we fit the model to the “test” data in Stan using the rstan package:

# Compile
stanmodel <- stan_model(model_code = stancode)
# Fit model
fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0)

We recompute the generated quantities using the posterior draws conditional on the training data, but we now pass in the held-out data to get the log predictive densities for the test data. Because we are using independent data, the log predictive density coincides with the log likelihood of the test data.

gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test)
log_pd <- extract_log_lik(gen_test)

Computing holdout elpd:

Now we evaluate the predictive performance of the model on the test data using elpd().

(elpd_holdout <- elpd(log_pd))

Computed from 4000 by 52 log-likelihood matrix using the generic elpd function

     Estimate    SE
elpd  -1734.5 288.3
ic     3468.9 576.6

When one wants to compare different models, the function loo_compare() can be used to assess the difference in performance.

K-fold cross validation

For this approach the data is divided into folds, and each time one fold is tested while the rest of the data is used to fit the model (see Vehtari et al., 2017).

Splitting the data in folds

We use the data that is already pre-processed and we divide it in 10 random folds using kfold_split_random

# Prepare data
roaches$fold <- kfold_split_random(K = 10, N = nrow(roaches))

Fitting and extracting the log pointwise predictive densities for each fold

We now loop over the 10 folds. In each fold we do the following. First, we fit the model to all the observations except the ones belonging to the left-out fold. Second, we compute the log pointwise predictive densities for the left-out fold. Last, we store the predictive density for the observations of the left-out fold in a matrix. The output of this loop is a matrix of the log pointwise predictive densities of all the observations.

# Prepare a matrix with the number of post-warmup iterations by number of observations:
log_pd_kfold <- matrix(nrow = 4000, ncol = nrow(roaches))
# Loop over the folds
for(k in 1:10){
  data_train <- list(y = roaches$y[roaches$fold != k],
                   x = as.matrix(roaches[roaches$fold != k,
                                         c("roach1", "treatment", "senior")]),
                   N = nrow(roaches[roaches$fold != k,]),
                   K = 3,
                   offset_ = roaches$offset[roaches$fold != k],
                   beta_prior_scale = 2.5,
                   alpha_prior_scale = 5.0
                   )
  data_test <- list(y = roaches$y[roaches$fold == k],
                   x = as.matrix(roaches[roaches$fold == k,
                                         c("roach1", "treatment", "senior")]),
                   N = nrow(roaches[roaches$fold == k,]),
                   K = 3,
                   offset_ = roaches$offset[roaches$fold == k],
                   beta_prior_scale = 2.5,
                   alpha_prior_scale = 5.0
                   )
  fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0)
  gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test)
  log_pd_kfold[, roaches$fold == k] <- extract_log_lik(gen_test)
}

Computing K-fold elpd:

Now we evaluate the predictive performance of the model on the 10 folds using elpd().

(elpd_kfold <- elpd(log_pd_kfold))

Computed from 4000 by 262 log-likelihood matrix using the generic elpd function

     Estimate     SE
elpd  -5558.1  729.0
ic    11116.2 1457.9

If one wants to compare several models (with loo_compare), one should use the same folds for all the different models.

References

Gelman, A., and Hill, J. (2007). Data Analysis Using Regression and Multilevel Hierarchical Models. Cambridge University Press.

Stan Development Team (2020) RStan: the R interface to Stan, Version 2.21.1 https://mc-stan.org

Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Links: published | arXiv preprint.

loo/inst/doc/loo2-example.html0000644000176200001440000103214415122301513015755 0ustar liggesusers Using the loo package (version >= 2.0.0)

Using the loo package (version >= 2.0.0)

Aki Vehtari and Jonah Gabry

2025-12-22

Introduction

This vignette demonstrates how to use the loo package to carry out Pareto smoothed importance-sampling leave-one-out cross-validation (PSIS-LOO) for purposes of model checking and model comparison.

In this vignette we can’t provide all necessary background information on PSIS-LOO and its diagnostics (Pareto \(k\) and effective sample size), so we encourage readers to refer to the following papers for more details:

  • Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Links: published | preprint arXiv.

  • Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

Setup

In addition to the loo package, we’ll also be using rstanarm and bayesplot:

library("rstanarm")
library("bayesplot")
library("loo")

Example: Poisson vs negative binomial for the roaches dataset

Background and model fitting

The Poisson and negative binomial regression models used below in our example, as well as the stan_glm function used to fit the models, are covered in more depth in the rstanarm vignette Estimating Generalized Linear Models for Count Data with rstanarm. In the rest of this vignette we will assume the reader is already familiar with these kinds of models.

Roaches data

The example data we’ll use comes from Chapter 8.3 of Gelman and Hill (2007). We want to make inferences about the efficacy of a certain pest management system at reducing the number of roaches in urban apartments. Here is how Gelman and Hill describe the experiment and data (pg. 161):

the treatment and control were applied to 160 and 104 apartments, respectively, and the outcome measurement \(y_i\) in each apartment \(i\) was the number of roaches caught in a set of traps. Different apartments had traps for different numbers of days

In addition to an intercept, the regression predictors for the model are roach1, the pre-treatment number of roaches (rescaled above to be in units of hundreds), the treatment indicator treatment, and a variable indicating whether the apartment is in a building restricted to elderly residents senior. Because the number of days for which the roach traps were used is not the same for all apartments in the sample, we use the offset argument to specify that log(exposure2) should be added to the linear predictor.

# the 'roaches' data frame is included with the rstanarm package
data(roaches)
str(roaches)
'data.frame':   262 obs. of  5 variables:
 $ y        : int  153 127 7 7 0 0 73 24 2 2 ...
 $ roach1   : num  308 331.25 1.67 3 2 ...
 $ treatment: int  1 1 1 1 1 1 1 1 0 0 ...
 $ senior   : int  0 0 0 0 0 0 0 0 0 0 ...
 $ exposure2: num  0.8 0.6 1 1 1.14 ...
# rescale to units of hundreds of roaches
roaches$roach1 <- roaches$roach1 / 100

Fit Poisson model

We’ll fit a simple Poisson regression model using the stan_glm function from the rstanarm package.

fit1 <-
  stan_glm(
    formula = y ~ roach1 + treatment + senior,
    offset = log(exposure2),
    data = roaches,
    family = poisson(link = "log"),
    prior = normal(0, 2.5, autoscale = TRUE),
    prior_intercept = normal(0, 5, autoscale = TRUE),
    seed = 12345
  )

Usually we would also run posterior predictive checks as shown in the rstanarm vignette Estimating Generalized Linear Models for Count Data with rstanarm, but here we focus only on methods provided by the loo package.


Using the loo package for model checking and comparison

Although cross-validation is mostly used for model comparison, it is also useful for model checking.

Computing PSIS-LOO and checking diagnostics

We start by computing PSIS-LOO with the loo function. Since we fit our model using rstanarm we can use the loo method for stanreg objects (fitted model objects from rstanarm), which doesn’t require us to first extract the pointwise log-likelihood values. If we had written our own Stan program instead of using rstanarm we would pass an array or matrix of log-likelihood values to the loo function (see, e.g. help("loo.array", package = "loo")). We’ll also use the argument save_psis = TRUE to save some intermediate results to be re-used later.

loo1 <- loo(fit1, save_psis = TRUE)
Replacing NAs in `r_eff` with 1s
Warning: Found 17 observations with a pareto_k > 0.7. With this many problematic observations we recommend calling 'kfold' with argument 'K=10' to perform 10-fold cross-validation rather than LOO.

loo gives us warnings about the Pareto diagnostics, which indicate that for some observations the leave-one-out posteriors are different enough from the full posterior that importance-sampling is not able to correct the difference. We can see more details by printing the loo object.

print(loo1)

Computed from 4000 by 262 log-likelihood matrix.

         Estimate     SE
elpd_loo  -6247.5  727.9
p_loo       292.1   73.3
looic     12495.0 1455.7
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.2]).

Pareto k diagnostic values:
                         Count Pct.    Min. ESS
(-Inf, 0.7]   (good)     245   93.5%   84      
   (0.7, 1]   (bad)        8    3.1%   <NA>    
   (1, Inf)   (very bad)   9    3.4%   <NA>    
See help('pareto-k-diagnostic') for details.

The table shows us a summary of Pareto \(k\) diagnostic, which is used to assess the reliability of the estimates. In addition to the proportion of leave-one-out folds with \(k\) values in different intervals, the minimum of the effective sample sizes in that category is shown to give idea why higher \(k\) values are bad. Since we have some \(k>1\), we are not able to compute an estimate for the Monte Carlo standard error (SE) of the expected log predictive density (elpd_loo) and NA is displayed. (Full details on the interpretation of the Pareto \(k\) diagnostics are available in the Vehtari, Gelman, and Gabry (2017) and Vehtari, Simpson, Gelman, Yao, and Gabry (2024) papers referenced at the top of this vignette.)

In this case the elpd_loo estimate should not be considered reliable. If we had a well-specified model we would expect the estimated effective number of parameters (p_loo) to be smaller than or similar to the total number of parameters in the model. Here p_loo is almost 300, which is about 70 times the total number of parameters in the model, indicating severe model misspecification.

Plotting Pareto \(k\) diagnostics

Using the plot method on our loo1 object produces a plot of the \(k\) values (in the same order as the observations in the dataset used to fit the model) with horizontal lines corresponding to the same categories as in the printed output above.

plot(loo1)

This plot is useful to quickly see the distribution of \(k\) values, but it’s often also possible to see structure with respect to data ordering. In our case this is mild, but there seems to be a block of data that is somewhat easier to predict (indices around 90–150). Unfortunately even for these data points we see some high \(k\) values.

Marginal posterior predictive checks

The loo package can be used in combination with the bayesplot package for leave-one-out cross-validation marginal posterior predictive checks Gabry et al (2018). LOO-PIT values are cumulative probabilities for \(y_i\) computed using the LOO marginal predictive distributions \(p(y_i|y_{-i})\). For a good model, the distribution of LOO-PIT values should be uniform. In the following QQ-plot the LOO-PIT values for our model (y-axi) is compared to standard uniform distribution (x-axis).

yrep <- posterior_predict(fit1)

ppc_loo_pit_qq(
  y = roaches$y,
  yrep = yrep,
  lw = weights(loo1$psis_object)
)
Some PIT values larger than 1! Largest:  1 
Rounding PIT > 1 to 1.
Warning in .loo_pit(y = y, yrep = object, lw = lw):

The excessive number of LOO-PIT values close to 0 indicates that the model is under-dispersed compared to the data, and we should consider a model that allows for greater dispersion.

Try alternative model with more flexibility

Here we will try negative binomial regression, which is commonly used for overdispersed count data.
Unlike the Poisson distribution, the negative binomial distribution allows the conditional mean and variance of \(y\) to differ.

fit2 <- update(fit1, family = neg_binomial_2)
loo2 <- loo(fit2, save_psis = TRUE, cores = 2)
Warning: Found 1 observation(s) with a pareto_k > 0.7. We recommend calling 'loo' again with argument 'k_threshold = 0.7' in order to calculate the ELPD without the assumption that these observations are negligible. This will refit the model 1 times to compute the ELPDs for the problematic observations directly.
print(loo2)

Computed from 4000 by 262 log-likelihood matrix.

         Estimate   SE
elpd_loo   -895.6 37.8
p_loo         6.7  2.7
looic      1791.3 75.5
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.7, 1.4]).

Pareto k diagnostic values:
                         Count Pct.    Min. ESS
(-Inf, 0.7]   (good)     261   99.6%   378     
   (0.7, 1]   (bad)        1    0.4%   <NA>    
   (1, Inf)   (very bad)   0    0.0%   <NA>    
See help('pareto-k-diagnostic') for details.
plot(loo2, label_points = TRUE)

Using the label_points argument will label any \(k\) values larger than the diagnostic threshold with the index of the corresponding data point. These high values are often the result of model misspecification and frequently correspond to data points that would be considered ``outliers’’ in the data and surprising according to the model Gabry et al (2019). Unfortunately, while large \(k\) values are a useful indicator of model misspecification, small \(k\) values are not a guarantee that a model is well-specified.

If there are a small number of problematic \(k\) values then we can use a feature in rstanarm that lets us refit the model once for each of these problematic observations. Each time the model is refit, one of the observations with a high \(k\) value is omitted and the LOO calculations are performed exactly for that observation. The results are then recombined with the approximate LOO calculations already carried out for the observations without problematic \(k\) values:

if (any(pareto_k_values(loo2) > 0.7)) {
  loo2 <- loo(fit2, save_psis = TRUE, k_threshold = 0.7)
}
1 problematic observation(s) found.
Model will be refit 1 times.

Fitting model 1 out of 1 (leaving out observation 93)
print(loo2)

Computed from 4000 by 262 log-likelihood matrix.

         Estimate   SE
elpd_loo   -895.5 37.7
p_loo         6.6  2.6
looic      1791.1 75.4
------
MCSE of elpd_loo is 0.2.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.7, 1.4]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

In the print output we can see that the Monte Carlo SE is small compared to the other uncertainties.

On the other hand, p_loo is about 7 and still a bit higher than the total number of parameters in the model. This indicates that there is almost certainly still some degree of model misspecification, but this is much better than the p_loo estimate for the Poisson model.

For further model checking we again examine the LOO-PIT values.

yrep <- posterior_predict(fit2)
ppc_loo_pit_qq(roaches$y, yrep, lw = weights(loo2$psis_object))

The plot for the negative binomial model looks better than the Poisson plot, but we still see that this model is not capturing all of the essential features in the data.

Comparing the models on expected log predictive density

We can use the loo_compare function to compare our two models on expected log predictive density (ELPD) for new data:

loo_compare(loo1, loo2)
     elpd_diff se_diff
fit2     0.0       0.0
fit1 -5352.0     709.2

The difference in ELPD is much larger than several times the estimated standard error of the difference again indicating that the negative-binomial model is xpected to have better predictive performance than the Poisson model. However, according to the LOO-PIT checks there is still some misspecification, and a reasonable guess is that a hurdle or zero-inflated model would be an improvement (we leave that for another case study).


References

Gabry, J., Simpson, D., Vehtari, A., Betancourt, M. and Gelman, A. (2019), Visualization in Bayesian workflow. J. R. Stat. Soc. A, 182: 389-402. :10.1111/rssa.12378. (journal version, arXiv preprint, code on GitHub)

Gelman, A. and Hill, J. (2007). Data Analysis Using Regression and Multilevel/Hierarchical Models. Cambridge University Press, Cambridge, UK.

Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. online, arXiv preprint arXiv:1507.04544.

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

loo/inst/doc/loo2-weights.Rmd0000644000176200001440000004003114641333357015563 0ustar liggesusers--- title: "Bayesian Stacking and Pseudo-BMA weights using the loo package" author: "Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates the new functionality in __loo__ v2.0.0 for Bayesian stacking and Pseudo-BMA weighting. In this vignette we can't provide all of the necessary background on this topic, so we encourage readers to refer to the paper * Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018). Using stacking to average Bayesian predictive distributions. In Bayesian Analysis, \doi:10.1214/17-BA1091. [Online](https://projecteuclid.org/euclid.ba/1516093227) which provides important details on the methods demonstrated in this vignette. Here we just quote from the abstract of the paper: > **Abstract**: Bayesian model averaging is flawed in the $\mathcal{M}$-open setting in which the true data-generating process is not one of the candidate models being fit. We take the idea of stacking from the point estimation literature and generalize to the combination of predictive distributions. We extend the utility function to any proper scoring rule and use Pareto smoothed importance sampling to efficiently compute the required leave-one-out posterior distributions. We compare stacking of predictive distributions to several alternatives: stacking of means, Bayesian model averaging (BMA), Pseudo-BMA, and a variant of Pseudo-BMA that is stabilized using the Bayesian bootstrap. Based on simulations and real-data applications, we recommend stacking of predictive distributions, with bootstrapped-Pseudo-BMA as an approximate alternative when computation cost is an issue. Ideally, we would avoid the Bayesian model combination problem by extending the model to include the separate models as special cases, and preferably as a continuous expansion of the model space. For example, instead of model averaging over different covariate combinations, all potentially relevant covariates should be included in a predictive model (for causal analysis more care is needed) and a prior assumption that only some of the covariates are relevant can be presented with regularized horseshoe prior (Piironen and Vehtari, 2017a). For variable selection we recommend projective predictive variable selection (Piironen and Vehtari, 2017a; [__projpred__ package](https://cran.r-project.org/package=projpred)). To demonstrate how to use __loo__ package to compute Bayesian stacking and Pseudo-BMA weights, we repeat two simple model averaging examples from Chapters 6 and 10 of _Statistical Rethinking_ by Richard McElreath. In _Statistical Rethinking_ WAIC is used to form weights which are similar to classical "Akaike weights". Pseudo-BMA weighting using PSIS-LOO for computation is close to these WAIC weights, but named after the Pseudo Bayes Factor by Geisser and Eddy (1979). As discussed below, in general we prefer using stacking rather than WAIC weights or the similar pseudo-BMA weights. # Setup In addition to the __loo__ package we will also load the __rstanarm__ package for fitting the models. ```{r setup, message=FALSE} library(rstanarm) library(loo) ``` # Example: Primate milk In _Statistical Rethinking_, McElreath describes the data for the primate milk example as follows: > A popular hypothesis has it that primates with larger brains produce more energetic milk, so that brains can grow quickly. ... The question here is to what extent energy content of milk, measured here by kilocalories, is related to the percent of the brain mass that is neocortex. ... We'll end up needing female body mass as well, to see the masking that hides the relationships among the variables. ```{r data} data(milk) d <- milk[complete.cases(milk),] d$neocortex <- d$neocortex.perc /100 str(d) ``` We repeat the analysis in Chapter 6 of _Statistical Rethinking_ using the following four models (here we use the default weakly informative priors in __rstanarm__, while flat priors were used in _Statistical Rethinking_). ```{r fits, results="hide"} fit1 <- stan_glm(kcal.per.g ~ 1, data = d, seed = 2030) fit2 <- update(fit1, formula = kcal.per.g ~ neocortex) fit3 <- update(fit1, formula = kcal.per.g ~ log(mass)) fit4 <- update(fit1, formula = kcal.per.g ~ neocortex + log(mass)) ``` McElreath uses WAIC for model comparison and averaging, so we'll start by also computing WAIC for these models so we can compare the results to the other options presented later in the vignette. The __loo__ package provides `waic` methods for log-likelihood arrays, matrices and functions. Since we fit our model with rstanarm we can use the `waic` method provided by the __rstanarm__ package (a wrapper around `waic` from the __loo__ package), which allows us to just pass in our fitted model objects instead of first extracting the log-likelihood values. ```{r waic} waic1 <- waic(fit1) waic2 <- waic(fit2) waic3 <- waic(fit3) waic4 <- waic(fit4) waics <- c( waic1$estimates["elpd_waic", 1], waic2$estimates["elpd_waic", 1], waic3$estimates["elpd_waic", 1], waic4$estimates["elpd_waic", 1] ) ``` We get some warnings when computing WAIC for models 3 and 4, indicating that we shouldn't trust the WAIC weights we will compute later. Following the recommendation in the warning, we next use the `loo` methods to compute PSIS-LOO instead. The __loo__ package provides `loo` methods for log-likelihood arrays, matrices, and functions, but since we fit our model with __rstanarm__ we can just pass the fitted model objects directly and __rstanarm__ will extract the needed values to pass to the __loo__ package. (Like __rstanarm__, some other R packages for fitting Stan models, e.g. __brms__, also provide similar methods for interfacing with the __loo__ package.) ```{r loo} # note: the loo function accepts a 'cores' argument that we recommend specifying # when working with bigger datasets loo1 <- loo(fit1) loo2 <- loo(fit2) loo3 <- loo(fit3) loo4 <- loo(fit4) lpd_point <- cbind( loo1$pointwise[,"elpd_loo"], loo2$pointwise[,"elpd_loo"], loo3$pointwise[,"elpd_loo"], loo4$pointwise[,"elpd_loo"] ) ``` With `loo` we don't get any warnings for models 3 and 4, but for illustration of good results, we display the diagnostic details for these models anyway. ```{r print-loo} print(loo3) print(loo4) ``` One benefit of PSIS-LOO over WAIC is better diagnostics. Here for both models 3 and 4 all $k<0.7$ and the Monte Carlo SE of `elpd_loo` is 0.1 or less, and we can expect the model comparison to be reliable. Next we compute and compare 1) WAIC weights, 2) Pseudo-BMA weights without Bayesian bootstrap, 3) Pseudo-BMA+ weights with Bayesian bootstrap, and 4) Bayesian stacking weights. ```{r weights} waic_wts <- exp(waics) / sum(exp(waics)) pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE) pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE stacking_wts <- stacking_weights(lpd_point) round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2) ``` With all approaches Model 4 with `neocortex` and `log(mass)` gets most of the weight. Based on theory, Pseudo-BMA weights without Bayesian bootstrap should be close to WAIC weights, and we can also see that here. Pseudo-BMA+ weights with Bayesian bootstrap provide more cautious weights further away from 0 and 1 (see Yao et al. (2018) for a discussion of why this can be beneficial and results from related experiments). In this particular example, the Bayesian stacking weights are not much different from the other weights. One of the benefits of stacking is that it manages well if there are many similar models. Consider for example that there could be many irrelevant covariates that when included would produce a similar model to one of the existing models. To emulate this situation here we simply copy the first model a bunch of times, but you can imagine that instead we would have ten alternative models with about the same predictive performance. WAIC weights for such a scenario would be close to the following: ```{r waic_wts_demo} waic_wts_demo <- exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)]) / sum(exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)])) round(waic_wts_demo, 3) ``` Notice how much the weight for model 4 is lowered now that more models similar to model 1 (or in this case identical) have been added. Both WAIC weights and Pseudo-BMA approaches first estimate the predictive performance separately for each model and then compute weights based on estimated relative predictive performances. Similar models share similar weights so the weights of other models must be reduced for the total sum of the weights to remain the same. On the other hand, stacking optimizes the weights _jointly_, allowing for the very similar models (in this toy example repeated models) to share their weight while more unique models keep their original weights. In our example we can see this difference clearly: ```{r stacking_weights} stacking_weights(lpd_point[,c(1,1,1,1,1,1,1,1,1,1,2,3,4)]) ``` Using stacking, the weight for the best model stays essentially unchanged. # Example: Oceanic tool complexity Another example we consider is the Kline oceanic tool complexity data, which McElreath describes as follows: >Different historical island populations possessed tool kits of different size. These kits include fish hooks, axes, boats, hand plows, and many other types of tools. A number of theories predict that larger populations will both develop and sustain more complex tool kits. ... It's also suggested that contact rates among populations effectively increases population [sic, probably should be tool kit] size, as it's relevant to technological evolution. We build models predicting the total number of tools given the log population size and the contact rate (high vs. low). ```{r Kline} data(Kline) d <- Kline d$log_pop <- log(d$population) d$contact_high <- ifelse(d$contact=="high", 1, 0) str(d) ``` We start with a Poisson regression model with the log population size, the contact rate, and an interaction term between them (priors are informative priors as in _Statistical Rethinking_). ```{r fit10, results="hide"} fit10 <- stan_glm( total_tools ~ log_pop + contact_high + log_pop * contact_high, family = poisson(link = "log"), data = d, prior = normal(0, 1, autoscale = FALSE), prior_intercept = normal(0, 100, autoscale = FALSE), seed = 2030 ) ``` Before running other models, we check whether Poisson is good choice as the conditional observation model. ```{r loo10} loo10 <- loo(fit10) print(loo10) ``` We get at least one observation with $k>0.7$ and the estimated effective number of parameters `p_loo` is larger than the total number of parameters in the model. This indicates that Poisson might be too narrow. A negative binomial model might be better, but with so few observations it is not so clear. We can compute LOO more accurately by running Stan again for the leave-one-out folds with high $k$ estimates. When using __rstanarm__ this can be done by specifying the `k_threshold` argument: ```{r loo10-threshold} loo10 <- loo(fit10, k_threshold=0.7) print(loo10) ``` In this case we see that there is not much difference, and thus it is relatively safe to continue. As a comparison we also compute WAIC: ```{r waic10} waic10 <- waic(fit10) print(waic10) ``` The WAIC computation is giving warnings and the estimated ELPD is slightly more optimistic. We recommend using the PSIS-LOO results instead. To assess whether the contact rate and interaction term are useful, we can make a comparison to models without these terms. ```{r contact_high, results="hide"} fit11 <- update(fit10, formula = total_tools ~ log_pop + contact_high) fit12 <- update(fit10, formula = total_tools ~ log_pop) ``` ```{r loo-contact_high} (loo11 <- loo(fit11)) (loo12 <- loo(fit12)) ``` ```{r relo-contact_high} loo11 <- loo(fit11, k_threshold=0.7) loo12 <- loo(fit12, k_threshold=0.7) lpd_point <- cbind( loo10$pointwise[, "elpd_loo"], loo11$pointwise[, "elpd_loo"], loo12$pointwise[, "elpd_loo"] ) ``` For comparison we'll also compute WAIC values for these additional models: ```{r waic-contact_high} waic11 <- waic(fit11) waic12 <- waic(fit12) waics <- c( waic10$estimates["elpd_waic", 1], waic11$estimates["elpd_waic", 1], waic12$estimates["elpd_waic", 1] ) ``` The WAIC computation again gives warnings, and we recommend using PSIS-LOO instead. Finally, we compute 1) WAIC weights, 2) Pseudo-BMA weights without Bayesian bootstrap, 3) Pseudo-BMA+ weights with Bayesian bootstrap, and 4) Bayesian stacking weights. ```{r weights-contact_high} waic_wts <- exp(waics) / sum(exp(waics)) pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE) pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE stacking_wts <- stacking_weights(lpd_point) round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2) ``` All weights favor the second model with the log population and the contact rate. WAIC weights and Pseudo-BMA weights (without Bayesian bootstrap) are similar, while Pseudo-BMA+ is more cautious and closer to stacking weights. It may seem surprising that Bayesian stacking is giving zero weight to the first model, but this is likely due to the fact that the estimated effect for the interaction term is close to zero and thus models 1 and 2 give very similar predictions. In other words, incorporating the model with the interaction (model 1) into the model average doesn't improve the predictions at all and so model 1 is given a weight of 0. On the other hand, models 2 and 3 are giving slightly different predictions and thus their combination may be slightly better than either alone. This behavior is related to the repeated similar model illustration in the milk example above. # Simpler coding using `loo_model_weights` function Although in the examples above we called the `stacking_weights` and `pseudobma_weights` functions directly, we can also use the `loo_model_weights` wrapper, which takes as its input either a list of pointwise log-likelihood matrices or a list of precomputed loo objects. There are also `loo_model_weights` methods for stanreg objects (fitted model objects from __rstanarm__) as well as fitted model objects from other packages (e.g. __brms__) that do the preparation work for the user (see, e.g., the examples at `help("loo_model_weights", package = "rstanarm")`). ```{r loo_model_weights} # using list of loo objects loo_list <- list(loo10, loo11, loo12) loo_model_weights(loo_list) loo_model_weights(loo_list, method = "pseudobma") loo_model_weights(loo_list, method = "pseudobma", BB = FALSE) ``` # References McElreath, R. (2016). _Statistical rethinking: A Bayesian course with examples in R and Stan_. Chapman & Hall/CRC. http://xcelab.net/rm/statistical-rethinking/ Piironen, J. and Vehtari, A. (2017a). Sparsity information and regularization in the horseshoe and other shrinkage priors. In Electronic Journal of Statistics, 11(2):5018-5051. [Online](https://projecteuclid.org/euclid.ejs/1513306866). Piironen, J. and Vehtari, A. (2017b). Comparison of Bayesian predictive methods for model selection. Statistics and Computing, 27(3):711-735. \doi:10.1007/s11222-016-9649-y. [Online](https://link.springer.com/article/10.1007/s11222-016-9649-y). Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018). Using stacking to average Bayesian predictive distributions. In Bayesian Analysis, \doi:10.1214/17-BA1091. [Online](https://projecteuclid.org/euclid.ba/1516093227). loo/inst/doc/loo2-large-data.R0000644000176200001440000001567515122301514015572 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----setup, message=FALSE----------------------------------------------------- library("rstan") library("loo") set.seed(4711) ## ----llfun_logistic----------------------------------------------------------- # we'll add an argument log to toggle whether this is a log-likelihood or # likelihood function. this will be useful later in the vignette. llfun_logistic <- function(data_i, draws, log = TRUE) { x_i <- as.matrix(data_i[, which(grepl(colnames(data_i), pattern = "X")), drop=FALSE]) logit_pred <- draws %*% t(x_i) dbinom(x = data_i$y, size = 1, prob = 1/(1 + exp(-logit_pred)), log = log) } ## ----eval=FALSE--------------------------------------------------------------- # # Prepare data # url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat" # wells <- read.table(url) # wells$dist100 <- with(wells, dist / 100) # X <- model.matrix(~ dist100 + arsenic, wells) # standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X)) # # # Compile # stan_mod <- stan_model("logistic.stan") # # # Fit model # fit_1 <- sampling(stan_mod, data = standata, seed = 4711) # print(fit_1, pars = "beta") ## ----eval=FALSE--------------------------------------------------------------- # # used for draws argument to loo_i # parameter_draws_1 <- extract(fit_1)$beta # # # used for data argument to loo_i # stan_df_1 <- as.data.frame(standata) # # # compute relative efficiency (this is slow and optional but is recommended to allow # # for adjusting PSIS effective sample size based on MCMC effective sample size) # r_eff <- relative_eff(llfun_logistic, # log = FALSE, # relative_eff wants likelihood not log-likelihood values # chain_id = rep(1:4, each = 1000), # data = stan_df_1, # draws = parameter_draws_1, # cores = 2) # # loo_i(i = 1, llfun_logistic, r_eff = r_eff, data = stan_df_1, draws = parameter_draws_1) ## ----eval=FALSE--------------------------------------------------------------- # set.seed(4711) # loo_ss_1 <- # loo_subsample( # llfun_logistic, # observations = 100, # take a subsample of size 100 # cores = 2, # # these next objects were computed above # r_eff = r_eff, # draws = parameter_draws_1, # data = stan_df_1 # ) # print(loo_ss_1) ## ----eval=FALSE--------------------------------------------------------------- # set.seed(4711) # loo_ss_1b <- # update( # loo_ss_1, # observations = 200, # subsample 200 instead of 100 # r_eff = r_eff, # draws = parameter_draws_1, # data = stan_df_1 # ) # print(loo_ss_1b) ## ----eval=FALSE--------------------------------------------------------------- # set.seed(4711) # loo_ss_1c <- # loo_subsample( # x = llfun_logistic, # r_eff = r_eff, # draws = parameter_draws_1, # data = stan_df_1, # observations = 100, # estimator = "hh_pps", # use Hansen-Hurwitz # loo_approximation = "lpd", # use lpd instead of plpd # loo_approximation_draws = 100, # cores = 2 # ) # print(loo_ss_1c) ## ----eval=FALSE--------------------------------------------------------------- # fit_laplace <- optimizing(stan_mod, data = standata, draws = 2000, # importance_resampling = TRUE) # parameter_draws_laplace <- fit_laplace$theta_tilde # draws from approximate posterior # log_p <- fit_laplace$log_p # log density of the posterior # log_g <- fit_laplace$log_g # log density of the approximation ## ----eval=FALSE--------------------------------------------------------------- # set.seed(4711) # loo_ap_1 <- # loo_approximate_posterior( # x = llfun_logistic, # draws = parameter_draws_laplace, # data = stan_df_1, # log_p = log_p, # log_g = log_g, # cores = 2 # ) # print(loo_ap_1) ## ----eval=FALSE--------------------------------------------------------------- # set.seed(4711) # loo_ap_ss_1 <- # loo_subsample( # x = llfun_logistic, # draws = parameter_draws_laplace, # data = stan_df_1, # log_p = log_p, # log_g = log_g, # observations = 100, # cores = 2 # ) # print(loo_ap_ss_1) ## ----eval=FALSE--------------------------------------------------------------- # standata$X[, "arsenic"] <- log(standata$X[, "arsenic"]) # fit_2 <- sampling(stan_mod, data = standata) # parameter_draws_2 <- extract(fit_2)$beta # stan_df_2 <- as.data.frame(standata) # # # recompute subsampling loo for first model for demonstration purposes # # # compute relative efficiency (this is slow and optional but is recommended to allow # # for adjusting PSIS effective sample size based on MCMC effective sample size) # r_eff_1 <- relative_eff( # llfun_logistic, # log = FALSE, # relative_eff wants likelihood not log-likelihood values # chain_id = rep(1:4, each = 1000), # data = stan_df_1, # draws = parameter_draws_1, # cores = 2 # ) # # set.seed(4711) # loo_ss_1 <- loo_subsample( # x = llfun_logistic, # r_eff = r_eff_1, # draws = parameter_draws_1, # data = stan_df_1, # observations = 200, # cores = 2 # ) # # # compute subsampling loo for a second model (with log-arsenic) # # r_eff_2 <- relative_eff( # llfun_logistic, # log = FALSE, # relative_eff wants likelihood not log-likelihood values # chain_id = rep(1:4, each = 1000), # data = stan_df_2, # draws = parameter_draws_2, # cores = 2 # ) # loo_ss_2 <- loo_subsample( # x = llfun_logistic, # r_eff = r_eff_2, # draws = parameter_draws_2, # data = stan_df_2, # observations = 200, # cores = 2 # ) # # print(loo_ss_2) ## ----eval=FALSE--------------------------------------------------------------- # # Compare # comp <- loo_compare(loo_ss_1, loo_ss_2) # print(comp) ## ----eval=FALSE--------------------------------------------------------------- # loo_ss_2 <- # loo_subsample( # x = llfun_logistic, # r_eff = r_eff_2, # draws = parameter_draws_2, # data = stan_df_2, # observations = loo_ss_1, # cores = 2 # ) ## ----eval=FALSE--------------------------------------------------------------- # idx <- obs_idx(loo_ss_1) # loo_ss_2 <- loo_subsample( # x = llfun_logistic, # r_eff = r_eff_2, # draws = parameter_draws_2, # data = stan_df_2, # observations = idx, # cores = 2 # ) ## ----eval=FALSE--------------------------------------------------------------- # comp <- loo_compare(loo_ss_1, loo_ss_2) # print(comp) ## ----eval=FALSE--------------------------------------------------------------- # # use loo() instead of loo_subsample() to compute full PSIS-LOO for model 2 # loo_full_2 <- loo( # x = llfun_logistic, # r_eff = r_eff_2, # draws = parameter_draws_2, # data = stan_df_2, # cores = 2 # ) # loo_compare(loo_ss_1, loo_full_2) loo/inst/doc/loo2-with-rstan.html0000644000176200001440000006103115122306002016414 0ustar liggesusers Writing Stan programs for use with the loo package

Writing Stan programs for use with the loo package

Aki Vehtari and Jonah Gabry

2025-12-22

Introduction

This vignette demonstrates how to write a Stan program that computes and stores the pointwise log-likelihood required for using the loo package. The other vignettes included with the package demonstrate additional functionality.

Some sections from this vignette are excerpted from our papers

  • Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Links: published | arXiv preprint.

  • Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

which provide important background for understanding the methods implemented in the package.

Example: Well water in Bangladesh

This example comes from a survey of residents from a small area in Bangladesh that was affected by arsenic in drinking water. Respondents with elevated arsenic levels in their wells were asked if they were interested in getting water from a neighbor’s well, and a series of logistic regressions were fit to predict this binary response given various information about the households (Gelman and Hill, 2007). Here we fit a model for the well-switching response given two predictors: the arsenic level of the water in the resident’s home, and the distance of the house from the nearest safe well.

The sample size in this example is \(N=3020\), which is not huge but is large enough that it is important to have a computational method for LOO that is fast for each data point. On the plus side, with such a large dataset, the influence of any given observation is small, and so the computations should be stable.

Coding the Stan model

Here is the Stan code for fitting the logistic regression model, which we save in a file called logistic.stan:

// Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR)
// To use an older version of RStan change the line declaring `y` to:
//    int<lower=0,upper=1> y[N];
data {
  int<lower=0> N;                   // number of data points
  int<lower=0> P;                   // number of predictors (including intercept)
  matrix[N,P] X;                    // predictors (including 1s for intercept)
  array[N] int<lower=0,upper=1> y;  // binary outcome
}
parameters {
  vector[P] beta;
}
model {
  beta ~ normal(0, 1);
  y ~ bernoulli_logit(X * beta);
}
generated quantities {
  vector[N] log_lik;
  for (n in 1:N) {
    log_lik[n] = bernoulli_logit_lpmf(y[n] | X[n] * beta);
  }
}

We have defined the log likelihood as a vector named log_lik in the generated quantities block so that the individual terms will be saved by Stan. After running Stan, log_lik can be extracted (using the extract_log_lik function provided in the loo package) as an \(S \times N\) matrix, where \(S\) is the number of simulations (posterior draws) and \(N\) is the number of data points.

Fitting the model with RStan

Next we fit the model in Stan using the rstan package:

library("rstan")

# Prepare data 
url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat"
wells <- read.table(url)
wells$dist100 <- with(wells, dist / 100)
X <- model.matrix(~ dist100 + arsenic, wells)
standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X))

# Fit model
fit_1 <- stan("logistic.stan", data = standata)
print(fit_1, pars = "beta")
         mean se_mean   sd  2.5%   25%   50%   75% 97.5% n_eff Rhat
beta[1]  0.00       0 0.08 -0.16 -0.05  0.00  0.05  0.15  1964    1
beta[2] -0.89       0 0.10 -1.09 -0.96 -0.89 -0.82 -0.68  2048    1
beta[3]  0.46       0 0.04  0.38  0.43  0.46  0.49  0.54  2198    1

Computing approximate leave-one-out cross-validation using PSIS-LOO

We can then use the loo package to compute the efficient PSIS-LOO approximation to exact LOO-CV:

library("loo")

# Extract pointwise log-likelihood
# using merge_chains=FALSE returns an array, which is easier to 
# use with relative_eff()
log_lik_1 <- extract_log_lik(fit_1, merge_chains = FALSE)

# as of loo v2.0.0 we can optionally provide relative effective sample sizes
# when calling loo, which allows for better estimates of the PSIS effective
# sample sizes and Monte Carlo error
r_eff <- relative_eff(exp(log_lik_1), cores = 2) 

# preferably use more than 2 cores (as many cores as possible)
# will use value of 'mc.cores' option if cores is not specified
loo_1 <- loo(log_lik_1, r_eff = r_eff, cores = 2)
print(loo_1)
Computed from 4000 by 3020 log-likelihood matrix

         Estimate   SE
elpd_loo  -1968.5 15.6
p_loo         3.2  0.1
looic      3937.0 31.2
------
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.3]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

The printed output from the loo function shows the estimates \(\widehat{\mbox{elpd}}_{\rm loo}\) (expected log predictive density), \(\widehat{p}_{\rm loo}\) (effective number of parameters), and \({\rm looic} =-2\, \widehat{\mbox{elpd}}_{\rm loo}\) (the LOO information criterion).

The line at the bottom of the printed output provides information about the reliability of the LOO approximation (the interpretation of the \(k\) parameter is explained in help('pareto-k-diagnostic') and in greater detail in Vehtari, Simpson, Gelman, Yao, and Gabry (2019)). In this case the message tells us that all of the estimates for \(k\) are fine.

Comparing models

To compare this model to an alternative model for the same data we can use the loo_compare function in the loo package. First we’ll fit a second model to the well-switching data, using log(arsenic) instead of arsenic as a predictor:

standata$X[, "arsenic"] <- log(standata$X[, "arsenic"])
fit_2 <- stan(fit = fit_1, data = standata) 

log_lik_2 <- extract_log_lik(fit_2, merge_chains = FALSE)
r_eff_2 <- relative_eff(exp(log_lik_2))
loo_2 <- loo(log_lik_2, r_eff = r_eff_2, cores = 2)
print(loo_2)
Computed from 4000 by 3020 log-likelihood matrix

         Estimate   SE
elpd_loo  -1952.3 16.2
p_loo         3.1  0.1
looic      3904.6 32.4
------
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.4, 1.2]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

We can now compare the models on LOO using the loo_compare function:

# Compare
comp <- loo_compare(loo_1, loo_2)

This new object, comp, contains the estimated difference of expected leave-one-out prediction errors between the two models, along with the standard error:

print(comp) # can set simplify=FALSE for more detailed print output
       elpd_diff se_diff
model2   0.0       0.0  
model1 -16.3       4.4  

The first column shows the difference in ELPD relative to the model with the largest ELPD. In this case, the difference in elpd and its scale relative to the approximate standard error of the difference) indicates a preference for the second model (model2).

References

Gelman, A., and Hill, J. (2007). Data Analysis Using Regression and Multilevel Hierarchical Models. Cambridge University Press.

Stan Development Team (2017). The Stan C++ Library, Version 2.17.0. https://mc-stan.org/

Stan Development Team (2018) RStan: the R interface to Stan, Version 2.17.3. https://mc-stan.org/

Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. online, arXiv preprint arXiv:1507.04544.

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

loo/inst/doc/loo2-with-rstan.Rmd0000644000176200001440000002112514641333357016214 0ustar liggesusers--- title: "Writing Stan programs for use with the loo package" author: "Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to write a Stan program that computes and stores the pointwise log-likelihood required for using the __loo__ package. The other vignettes included with the package demonstrate additional functionality. Some sections from this vignette are excerpted from our papers * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) which provide important background for understanding the methods implemented in the package. # Example: Well water in Bangladesh This example comes from a survey of residents from a small area in Bangladesh that was affected by arsenic in drinking water. Respondents with elevated arsenic levels in their wells were asked if they were interested in getting water from a neighbor's well, and a series of logistic regressions were fit to predict this binary response given various information about the households (Gelman and Hill, 2007). Here we fit a model for the well-switching response given two predictors: the arsenic level of the water in the resident's home, and the distance of the house from the nearest safe well. The sample size in this example is $N=3020$, which is not huge but is large enough that it is important to have a computational method for LOO that is fast for each data point. On the plus side, with such a large dataset, the influence of any given observation is small, and so the computations should be stable. ## Coding the Stan model Here is the Stan code for fitting the logistic regression model, which we save in a file called `logistic.stan`: ``` // Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR) // To use an older version of RStan change the line declaring `y` to: // int y[N]; data { int N; // number of data points int P; // number of predictors (including intercept) matrix[N,P] X; // predictors (including 1s for intercept) array[N] int y; // binary outcome } parameters { vector[P] beta; } model { beta ~ normal(0, 1); y ~ bernoulli_logit(X * beta); } generated quantities { vector[N] log_lik; for (n in 1:N) { log_lik[n] = bernoulli_logit_lpmf(y[n] | X[n] * beta); } } ``` We have defined the log likelihood as a vector named `log_lik` in the generated quantities block so that the individual terms will be saved by Stan. After running Stan, `log_lik` can be extracted (using the `extract_log_lik` function provided in the **loo** package) as an $S \times N$ matrix, where $S$ is the number of simulations (posterior draws) and $N$ is the number of data points. ## Fitting the model with RStan Next we fit the model in Stan using the **rstan** package: ```{r, eval=FALSE} library("rstan") # Prepare data url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat" wells <- read.table(url) wells$dist100 <- with(wells, dist / 100) X <- model.matrix(~ dist100 + arsenic, wells) standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X)) # Fit model fit_1 <- stan("logistic.stan", data = standata) print(fit_1, pars = "beta") ``` ``` mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat beta[1] 0.00 0 0.08 -0.16 -0.05 0.00 0.05 0.15 1964 1 beta[2] -0.89 0 0.10 -1.09 -0.96 -0.89 -0.82 -0.68 2048 1 beta[3] 0.46 0 0.04 0.38 0.43 0.46 0.49 0.54 2198 1 ``` ## Computing approximate leave-one-out cross-validation using PSIS-LOO We can then use the **loo** package to compute the efficient PSIS-LOO approximation to exact LOO-CV: ```{r, eval=FALSE} library("loo") # Extract pointwise log-likelihood # using merge_chains=FALSE returns an array, which is easier to # use with relative_eff() log_lik_1 <- extract_log_lik(fit_1, merge_chains = FALSE) # as of loo v2.0.0 we can optionally provide relative effective sample sizes # when calling loo, which allows for better estimates of the PSIS effective # sample sizes and Monte Carlo error r_eff <- relative_eff(exp(log_lik_1), cores = 2) # preferably use more than 2 cores (as many cores as possible) # will use value of 'mc.cores' option if cores is not specified loo_1 <- loo(log_lik_1, r_eff = r_eff, cores = 2) print(loo_1) ``` ``` Computed from 4000 by 3020 log-likelihood matrix Estimate SE elpd_loo -1968.5 15.6 p_loo 3.2 0.1 looic 3937.0 31.2 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.3]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` The printed output from the `loo` function shows the estimates $\widehat{\mbox{elpd}}_{\rm loo}$ (expected log predictive density), $\widehat{p}_{\rm loo}$ (effective number of parameters), and ${\rm looic} =-2\, \widehat{\mbox{elpd}}_{\rm loo}$ (the LOO information criterion). The line at the bottom of the printed output provides information about the reliability of the LOO approximation (the interpretation of the $k$ parameter is explained in `help('pareto-k-diagnostic')` and in greater detail in Vehtari, Simpson, Gelman, Yao, and Gabry (2019)). In this case the message tells us that all of the estimates for $k$ are fine. ## Comparing models To compare this model to an alternative model for the same data we can use the `loo_compare` function in the **loo** package. First we'll fit a second model to the well-switching data, using `log(arsenic)` instead of `arsenic` as a predictor: ```{r, eval=FALSE} standata$X[, "arsenic"] <- log(standata$X[, "arsenic"]) fit_2 <- stan(fit = fit_1, data = standata) log_lik_2 <- extract_log_lik(fit_2, merge_chains = FALSE) r_eff_2 <- relative_eff(exp(log_lik_2)) loo_2 <- loo(log_lik_2, r_eff = r_eff_2, cores = 2) print(loo_2) ``` ``` Computed from 4000 by 3020 log-likelihood matrix Estimate SE elpd_loo -1952.3 16.2 p_loo 3.1 0.1 looic 3904.6 32.4 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.4, 1.2]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` We can now compare the models on LOO using the `loo_compare` function: ```{r, eval=FALSE} # Compare comp <- loo_compare(loo_1, loo_2) ``` This new object, `comp`, contains the estimated difference of expected leave-one-out prediction errors between the two models, along with the standard error: ```{r, eval=FALSE} print(comp) # can set simplify=FALSE for more detailed print output ``` ``` elpd_diff se_diff model2 0.0 0.0 model1 -16.3 4.4 ``` The first column shows the difference in ELPD relative to the model with the largest ELPD. In this case, the difference in `elpd` and its scale relative to the approximate standard error of the difference) indicates a preference for the second model (`model2`). # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2017). _The Stan C++ Library, Version 2.17.0._ https://mc-stan.org/ Stan Development Team (2018) _RStan: the R interface to Stan, Version 2.17.3._ https://mc-stan.org/ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/inst/doc/loo2-large-data.Rmd0000644000176200001440000005044314641333357016122 0ustar liggesusers--- title: "Using Leave-one-out cross-validation for large data" author: "Mans Magnusson, Paul Bürkner, Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r settings, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to do leave-one-out cross-validation for large data using the __loo__ package and Stan. There are two approaches covered: LOO with subsampling and LOO using approximations to posterior distributions. Some sections from this vignette are excerpted from the papers * Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS), in PMLR 108. [arXiv preprint arXiv:2001.00980](https://arxiv.org/abs/2001.00980). * Magnusson, M., Andersen, M., Jonasson, J. & Vehtari, A. (2019). Bayesian leave-one-out cross-validation for large data. Proceedings of the 36th International Conference on Machine Learning, in PMLR 97:4244-4253 [online](http://proceedings.mlr.press/v97/magnusson19a.html), [arXiv preprint arXiv:1904.10679](https://arxiv.org/abs/1904.10679). * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) which provide important background for understanding the methods implemented in the package. # Setup In addition to the __loo__ package, we'll also be using __rstan__: ```{r setup, message=FALSE} library("rstan") library("loo") set.seed(4711) ``` # Example: Well water in Bangladesh We will use the same example as in the vignette [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html). See that vignette for a description of the problem and data. The sample size in this example is only $N=3020$, which is not large enough to _require_ the special methods for large data described in this vignette, but is sufficient for demonstration purposes in this tutorial. ## Coding the Stan model Here is the Stan code for fitting the logistic regression model, which we save in a file called `logistic.stan`: ``` // Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR) // To use an older version of RStan change the line declaring `y` to: // int y[N]; data { int N; // number of data points int P; // number of predictors (including intercept) matrix[N,P] X; // predictors (including 1s for intercept) array[N] int y; // binary outcome } parameters { vector[P] beta; } model { beta ~ normal(0, 1); y ~ bernoulli_logit(X * beta); } ``` Importantly, unlike the general approach recommended in [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html), we do _not_ compute the log-likelihood for each observation in the `generated quantities` block of the Stan program. Here we are assuming we have a large data set (larger than the one we're actually using in this demonstration) and so it is preferable to instead define a function in R to compute the log-likelihood for each data point when needed rather than storing all of the log-likelihood values in memory. The log-likelihood in R can be coded as follows: ```{r llfun_logistic} # we'll add an argument log to toggle whether this is a log-likelihood or # likelihood function. this will be useful later in the vignette. llfun_logistic <- function(data_i, draws, log = TRUE) { x_i <- as.matrix(data_i[, which(grepl(colnames(data_i), pattern = "X")), drop=FALSE]) logit_pred <- draws %*% t(x_i) dbinom(x = data_i$y, size = 1, prob = 1/(1 + exp(-logit_pred)), log = log) } ``` The function `llfun_logistic()` needs to have arguments `data_i` and `draws`. Below we will test that the function is working by using the `loo_i()` function. ## Fitting the model with RStan Next we fit the model in Stan using the **rstan** package: ```{r, eval=FALSE} # Prepare data url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat" wells <- read.table(url) wells$dist100 <- with(wells, dist / 100) X <- model.matrix(~ dist100 + arsenic, wells) standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X)) # Compile stan_mod <- stan_model("logistic.stan") # Fit model fit_1 <- sampling(stan_mod, data = standata, seed = 4711) print(fit_1, pars = "beta") ``` ``` mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat beta[1] 0.00 0 0.08 -0.15 -0.05 0.00 0.06 0.16 1933 1 beta[2] -0.89 0 0.10 -1.09 -0.96 -0.89 -0.82 -0.69 2332 1 beta[3] 0.46 0 0.04 0.38 0.43 0.46 0.49 0.54 2051 1 ``` Before we move on to computing LOO we can now test that the log-likelihood function we wrote is working as it should. The `loo_i()` function is a helper function that can be used to test a log-likelihood function on a single observation. ```{r, eval=FALSE} # used for draws argument to loo_i parameter_draws_1 <- extract(fit_1)$beta # used for data argument to loo_i stan_df_1 <- as.data.frame(standata) # compute relative efficiency (this is slow and optional but is recommended to allow # for adjusting PSIS effective sample size based on MCMC effective sample size) r_eff <- relative_eff(llfun_logistic, log = FALSE, # relative_eff wants likelihood not log-likelihood values chain_id = rep(1:4, each = 1000), data = stan_df_1, draws = parameter_draws_1, cores = 2) loo_i(i = 1, llfun_logistic, r_eff = r_eff, data = stan_df_1, draws = parameter_draws_1) ``` ``` $pointwise elpd_loo mcse_elpd_loo p_loo looic influence_pareto_k 1 -0.3314552 0.0002887608 0.0003361772 0.6629103 -0.05679886 ... ``` # Approximate LOO-CV using PSIS-LOO and subsampling We can then use the `loo_subsample()` function to compute the efficient PSIS-LOO approximation to exact LOO-CV using subsampling: ```{r, eval=FALSE} set.seed(4711) loo_ss_1 <- loo_subsample( llfun_logistic, observations = 100, # take a subsample of size 100 cores = 2, # these next objects were computed above r_eff = r_eff, draws = parameter_draws_1, data = stan_df_1 ) print(loo_ss_1) ``` ``` Computed from 4000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.5 15.6 0.3 p_loo 3.1 0.1 0.4 looic 3936.9 31.2 0.6 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` The `loo_subsample()` function creates an object of class `psis_loo_ss`, that inherits from `psis_loo, loo` (the classes of regular `loo` objects). The printed output above shows the estimates $\widehat{\mbox{elpd}}_{\rm loo}$ (expected log predictive density), $\widehat{p}_{\rm loo}$ (effective number of parameters), and ${\rm looic} =-2\, \widehat{\mbox{elpd}}_{\rm loo}$ (the LOO information criterion). Unlike when using `loo()`, when using `loo_subsample()` there is an additional column giving the "subsampling SE", which reflects the additional uncertainty due to the subsampling used. The line at the bottom of the printed output provides information about the reliability of the LOO approximation (the interpretation of the $k$ parameter is explained in `help('pareto-k-diagnostic')` and in greater detail in Vehtari, Simpson, Gelman, Yao, and Gabry (2019)). In this case, the message tells us that all of the estimates for $k$ are fine _for this given subsample_. ## Adding additional subsamples If we are not satisfied with the subsample size (i.e., the accuracy) we can simply add more samples until we are satisfied using the `update()` method. ```{r, eval=FALSE} set.seed(4711) loo_ss_1b <- update( loo_ss_1, observations = 200, # subsample 200 instead of 100 r_eff = r_eff, draws = parameter_draws_1, data = stan_df_1 ) print(loo_ss_1b) ``` ``` Computed from 4000 by 200 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.3 15.6 0.2 p_loo 3.2 0.1 0.4 looic 3936.7 31.2 0.5 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` ## Specifying estimator and sampling method The performance relies on two components: the estimation method and the approximation used for the elpd. See the documentation for `loo_subsample()` more information on which estimators and approximations are implemented. The default implementation is using the point log predictive density evaluated at the mean of the posterior (`loo_approximation="plpd"`) and the difference estimator (`estimator="diff_srs"`). This combination has a focus on fast inference. But we can easily use other estimators as well as other elpd approximations, for example: ```{r, eval=FALSE} set.seed(4711) loo_ss_1c <- loo_subsample( x = llfun_logistic, r_eff = r_eff, draws = parameter_draws_1, data = stan_df_1, observations = 100, estimator = "hh_pps", # use Hansen-Hurwitz loo_approximation = "lpd", # use lpd instead of plpd loo_approximation_draws = 100, cores = 2 ) print(loo_ss_1c) ``` ``` Computed from 4000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.9 15.4 0.5 p_loo 3.5 0.2 0.5 looic 3937.9 30.7 1.1 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` See the documentation and references for `loo_subsample()` for details on the implemented approximations. # Approximate LOO-CV using PSIS-LOO with posterior approximations Using posterior approximations, such as variational inference and Laplace approximations, can further speed-up LOO-CV for large data. Here we demonstrate using a Laplace approximation in Stan. ```{r, eval=FALSE} fit_laplace <- optimizing(stan_mod, data = standata, draws = 2000, importance_resampling = TRUE) parameter_draws_laplace <- fit_laplace$theta_tilde # draws from approximate posterior log_p <- fit_laplace$log_p # log density of the posterior log_g <- fit_laplace$log_g # log density of the approximation ``` Using the posterior approximation we can then do LOO-CV by correcting for the posterior approximation when we compute the elpd. To do this we use the `loo_approximate_posterior()` function. ```{r, eval=FALSE} set.seed(4711) loo_ap_1 <- loo_approximate_posterior( x = llfun_logistic, draws = parameter_draws_laplace, data = stan_df_1, log_p = log_p, log_g = log_g, cores = 2 ) print(loo_ap_1) ``` The function creates a class, `psis_loo_ap` that inherits from `psis_loo, loo`. ``` Computed from 2000 by 3020 log-likelihood matrix Estimate SE elpd_loo -1968.4 15.6 p_loo 3.2 0.2 looic 3936.8 31.2 ------ Posterior approximation correction used. Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume independent draws (r_eff=1). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` ## Combining the posterior approximation method with subsampling The posterior approximation correction can also be used together with subsampling: ```{r, eval=FALSE} set.seed(4711) loo_ap_ss_1 <- loo_subsample( x = llfun_logistic, draws = parameter_draws_laplace, data = stan_df_1, log_p = log_p, log_g = log_g, observations = 100, cores = 2 ) print(loo_ap_ss_1) ``` ``` Computed from 2000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1968.2 15.6 0.4 p_loo 2.9 0.1 0.5 looic 3936.4 31.1 0.8 ------ Posterior approximation correction used. Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume independent draws (r_eff=1). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` The object created is of class `psis_loo_ss`, which inherits from the `psis_loo_ap` class previously described. ## Comparing models To compare this model to an alternative model for the same data we can use the `loo_compare()` function just as we would if using `loo()` instead of `loo_subsample()` or `loo_approximate_posterior()`. First we'll fit a second model to the well-switching data, using `log(arsenic)` instead of `arsenic` as a predictor: ```{r, eval=FALSE} standata$X[, "arsenic"] <- log(standata$X[, "arsenic"]) fit_2 <- sampling(stan_mod, data = standata) parameter_draws_2 <- extract(fit_2)$beta stan_df_2 <- as.data.frame(standata) # recompute subsampling loo for first model for demonstration purposes # compute relative efficiency (this is slow and optional but is recommended to allow # for adjusting PSIS effective sample size based on MCMC effective sample size) r_eff_1 <- relative_eff( llfun_logistic, log = FALSE, # relative_eff wants likelihood not log-likelihood values chain_id = rep(1:4, each = 1000), data = stan_df_1, draws = parameter_draws_1, cores = 2 ) set.seed(4711) loo_ss_1 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_1, draws = parameter_draws_1, data = stan_df_1, observations = 200, cores = 2 ) # compute subsampling loo for a second model (with log-arsenic) r_eff_2 <- relative_eff( llfun_logistic, log = FALSE, # relative_eff wants likelihood not log-likelihood values chain_id = rep(1:4, each = 1000), data = stan_df_2, draws = parameter_draws_2, cores = 2 ) loo_ss_2 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, observations = 200, cores = 2 ) print(loo_ss_2) ``` ``` Computed from 4000 by 100 subsampled log-likelihood values from 3020 total observations. Estimate SE subsampling SE elpd_loo -1952.0 16.2 0.2 p_loo 2.6 0.1 0.3 looic 3903.9 32.4 0.4 ------ Monte Carlo SE of elpd_loo is 0.0. MCSE and ESS estimates assume MCMC draws (r_eff in [1.0, 1.1]). All Pareto k estimates are good (k < 0.7). See help('pareto-k-diagnostic') for details. ``` We can now compare the models on LOO using the `loo_compare` function: ```{r, eval=FALSE} # Compare comp <- loo_compare(loo_ss_1, loo_ss_2) print(comp) ``` ``` Warning: Different subsamples in 'model2' and 'model1'. Naive diff SE is used. elpd_diff se_diff subsampling_se_diff model2 0.0 0.0 0.0 model1 16.5 22.5 0.4 ``` This new object `comp` contains the estimated difference of expected leave-one-out prediction errors between the two models, along with the standard error. As the warning indicates, because different subsamples were used the comparison will not take the correlations between different observations into account. Here we see that the naive SE is 22.5 and we cannot see any difference in performance between the models. To force subsampling to use the same observations for each of the models we can simply extract the observations used in `loo_ss_1` and use them in `loo_ss_2` by supplying the `loo_ss_1` object to the `observations` argument. ```{r, eval=FALSE} loo_ss_2 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, observations = loo_ss_1, cores = 2 ) ``` We could also supply the subsampling indices using the `obs_idx()` helper function: ```{r, eval=FALSE} idx <- obs_idx(loo_ss_1) loo_ss_2 <- loo_subsample( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, observations = idx, cores = 2 ) ``` ``` Simple random sampling with replacement assumed. ``` This results in a message indicating that we assume these observations to have been sampled with simple random sampling, which is true because we had used the default `"diff_srs"` estimator for `loo_ss_1`. We can now compare the models and estimate the difference based on the same subsampled observations. ```{r, eval=FALSE} comp <- loo_compare(loo_ss_1, loo_ss_2) print(comp) ``` ``` elpd_diff se_diff subsampling_se_diff model2 0.0 0.0 0.0 model1 16.1 4.4 0.1 ``` First, notice that now the `se_diff` is now around 4 (as opposed to 20 when using different subsamples). The first column shows the difference in ELPD relative to the model with the largest ELPD. In this case, the difference in `elpd` and its scale relative to the approximate standard error of the difference) indicates a preference for the second model (`model2`). Since the subsampling uncertainty is so small in this case it can effectively be ignored. If we need larger subsamples we can simply add samples using the `update()` method demonstrated earlier. It is also possible to compare a subsampled loo computation with a full loo object. ```{r, eval=FALSE} # use loo() instead of loo_subsample() to compute full PSIS-LOO for model 2 loo_full_2 <- loo( x = llfun_logistic, r_eff = r_eff_2, draws = parameter_draws_2, data = stan_df_2, cores = 2 ) loo_compare(loo_ss_1, loo_full_2) ``` ``` Estimated elpd_diff using observations included in loo calculations for all models. ``` Because we are comparing a non-subsampled loo calculation to a subsampled calculation we get the message that only the observations that are included in the loo calculations for both `model1` and `model2` are included in the computations for the comparison. ``` elpd_diff se_diff subsampling_se_diff model2 0.0 0.0 0.0 model1 16.3 4.4 0.3 ``` Here we actually see an increase in `subsampling_se_diff`, but this is due to a technical detail not elaborated here. In general, the difference should be better or negligible. # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2017). _The Stan C++ Library, Version 2.17.0._ https://mc-stan.org/ Stan Development Team (2018) _RStan: the R interface to Stan, Version 2.17.3._ https://mc-stan.org/ Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS), in PMLR 108. [arXiv preprint arXiv:2001.00980](https://arxiv.org/abs/2001.00980). Magnusson, M., Andersen, M., Jonasson, J. & Vehtari, A. (2019). Bayesian leave-one-out cross-validation for large data. Proceedings of the 36th International Conference on Machine Learning, in PMLR 97:4244-4253 [online](http://proceedings.mlr.press/v97/magnusson19a.html), [arXiv preprint arXiv:1904.10679](https://arxiv.org/abs/1904.10679). Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/inst/doc/loo2-lfo.html0000644000176200001440000155357015122305175015125 0ustar liggesusers Approximate leave-future-out cross-validation for Bayesian time series models

Approximate leave-future-out cross-validation for Bayesian time series models

Paul Bürkner, Jonah Gabry, Aki Vehtari

2025-12-22

Introduction

One of the most common goals of a time series analysis is to use the observed series to inform predictions for future observations. We will refer to this task of predicting a sequence of \(M\) future observations as \(M\)-step-ahead prediction (\(M\)-SAP). Fortunately, once we have fit a model and can sample from the posterior predictive distribution, it is straightforward to generate predictions as far into the future as we want. It is also straightforward to evaluate the \(M\)-SAP performance of a time series model by comparing the predictions to the observed sequence of \(M\) future data points once they become available.

Unfortunately, we are often in the position of having to use a model to inform decisions before we can collect the future observations required for assessing the predictive performance. If we have many competing models we may also need to first decide which of the models (or which combination of the models) we should rely on for predictions. In these situations the best we can do is to use methods for approximating the expected predictive performance of our models using only the observations of the time series we already have.

If there were no time dependence in the data or if the focus is to assess the non-time-dependent part of the model, we could use methods like leave-one-out cross-validation (LOO-CV). For a data set with \(N\) observations, we refit the model \(N\) times, each time leaving out one of the \(N\) observations and assessing how well the model predicts the left-out observation. LOO-CV is very expensive computationally in most realistic settings, but the Pareto smoothed importance sampling (PSIS, Vehtari et al, 2017, 2024) algorithm provided by the loo package allows for approximating exact LOO-CV with PSIS-LOO-CV. PSIS-LOO-CV requires only a single fit of the full model and comes with diagnostics for assessing the validity of the approximation.

With a time series we can do something similar to LOO-CV but, except in a few cases, it does not make sense to leave out observations one at a time because then we are allowing information from the future to influence predictions of the past (i.e., times \(t + 1, t+2, \ldots\) should not be used to predict for time \(t\)). To apply the idea of cross-validation to the \(M\)-SAP case, instead of leave-one-out cross-validation we need some form of leave-future-out cross-validation (LFO-CV). As we will demonstrate in this case study, LFO-CV does not refer to one particular prediction task but rather to various possible cross-validation approaches that all involve some form of prediction for new time series data. Like exact LOO-CV, exact LFO-CV requires refitting the model many times to different subsets of the data, which is computationally very costly for most nontrivial examples, in particular for Bayesian analyses where refitting the model means estimating a new posterior distribution rather than a point estimate.

Although PSIS-LOO-CV provides an efficient approximation to exact LOO-CV, until now there has not been an analogous approximation to exact LFO-CV that drastically reduces the computational burden while also providing informative diagnostics about the quality of the approximation. In this case study we present PSIS-LFO-CV, an algorithm that typically only requires refitting the time-series model a small number times and will make LFO-CV tractable for many more realistic applications than previously possible.

More details can be found in our paper about approximate LFO-CV (Bürkner, Gabry, & Vehtari, 2020), which is available as a preprint on arXiv (https://arxiv.org/abs/1902.06281).

\(M\)-step-ahead predictions

Assume we have a time series of observations \(y = (y_1, y_2, \ldots, y_N)\) and let \(L\) be the minimum number of observations from the series that we will require before making predictions for future data. Depending on the application and how informative the data is, it may not be possible to make reasonable predictions for \(y_{i+1}\) based on \((y_1, \dots, y_{i})\) until \(i\) is large enough so that we can learn enough about the time series to predict future observations. Setting \(L=10\), for example, means that we will only assess predictive performance starting with observation \(y_{11}\), so that we always have at least 10 previous observations to condition on.

In order to assess \(M\)-SAP performance we would like to compute the predictive densities

\[ p(y_{i+1:M} \,|\, y_{1:i}) = p(y_{i+1}, \ldots, y_{i + M} \,|\, y_{1},...,y_{i}) \]

for each \(i \in \{L, \ldots, N - M\}\). The quantities \(p(y_{i+1:M} \,|\, y_{1:i})\) can be computed with the help of the posterior distribution \(p(\theta \,|\, y_{1:i})\) of the parameters \(\theta\) conditional on only the first \(i\) observations of the time-series:

\[ p(y_{i+1:M} \,| \, y_{1:i}) = \int p(y_{i+1:M} \,| \, y_{1:i}, \theta) \, p(\theta\,|\,y_{1:i}) \,d\theta. \]

Having obtained \(S\) draws \((\theta_{1:i}^{(1)}, \ldots, \theta_{1:i}^{(S)})\) from the posterior distribution \(p(\theta\,|\,y_{1:i})\), we can estimate \(p(y_{i+1:M} | y_{1:i})\) as

\[ p(y_{i+1:M} \,|\, y_{1:i}) \approx \frac{1}{S}\sum_{s=1}^S p(y_{i+1:M} \,|\, y_{1:i}, \theta_{1:i}^{(s)}). \]

Approximate \(M\)-SAP using importance-sampling

Unfortunately, the math above makes use of the posterior distributions from many different fits of the model to different subsets of the data. That is, to obtain the predictive density \(p(y_{i+1:M} \,|\, y_{1:i})\) requires fitting a model to only the first \(i\) data points, and we will need to do this for every value of \(i\) under consideration (all \(i \in \{L, \ldots, N - M\}\)).

To reduce the number of models that need to be fit for the purpose of obtaining each of the densities \(p(y_{i+1:M} \,|\, y_{1:i})\), we propose the following algorithm. First, we refit the model using the first \(L\) observations of the time series and then perform a single exact \(M\)-step-ahead prediction step for \(p(y_{L+1:M} \,|\, y_{1:L})\). Recall that \(L\) is the minimum number of observations we have deemed acceptable for making predictions (setting \(L=0\) means the first data point will be predicted only based on the prior). We define \(i^\star = L\) as the current point of refit. Next, starting with \(i = i^\star + 1\), we approximate each \(p(y_{i+1:M} \,|\, y_{1:i})\) via

\[ p(y_{i+1:M} \,|\, y_{1:i}) \approx \frac{ \sum_{s=1}^S w_i^{(s)}\, p(y_{i+1:M} \,|\, y_{1:i}, \theta^{(s)})} { \sum_{s=1}^S w_i^{(s)}}, \]

where \(\theta^{(s)} = \theta^{(s)}_{1:i^\star}\) are draws from the posterior distribution based on the first \(i^\star\) observations and \(w_i^{(s)}\) are the PSIS weights obtained in two steps. First, we compute the raw importance ratios

\[ r_i^{(s)} = \frac{f_{1:i}(\theta^{(s)})}{f_{1:i^\star}(\theta^{(s)})} \propto \prod_{j \in (i^\star + 1):i} p(y_j \,|\, y_{1:(j-1)}, \theta^{(s)}), \]

and then stabilize them using PSIS. The function \(f_{1:i}\) denotes the posterior distribution based on the first \(i\) observations, that is, \(f_{1:i} = p(\theta \,|\, y_{1:i})\), with \(f_{1:i^\star}\) defined analogously. The index set \((i^\star + 1):i\) indicates all observations which are part of the data for the model \(f_{1:i}\) whose predictive performance we are trying to approximate but not for the actually fitted model \(f_{1:i^\star}\). The proportional statement arises from the fact that we ignore the normalizing constants \(p(y_{1:i})\) and \(p(y_{1:i^\star})\) of the compared posteriors, which leads to a self-normalized variant of PSIS (see Vehtari et al, 2017).

Continuing with the next observation, we gradually increase \(i\) by \(1\) (we move forward in time) and repeat the process. At some observation \(i\), the variability of the importance ratios \(r_i^{(s)}\) will become too large and importance sampling will fail. We will refer to this particular value of \(i\) as \(i^\star_1\). To identify the value of \(i^\star_1\), we check for which value of \(i\) does the estimated shape parameter \(k\) of the generalized Pareto distribution first cross a certain threshold \(\tau\) (Vehtari et al, 2024). Only then do we refit the model using the observations up to \(i^\star_1\) and restart the process from there by setting \(\theta^{(s)} = \theta^{(s)}_{1:i^\star_1}\) and \(i^\star = i^\star_1\) until the next refit.

In some cases we may only need to refit once and in other cases we will find a value \(i^\star_2\) that requires a second refitting, maybe an \(i^\star_3\) that requires a third refitting, and so on. We refit as many times as is required (only when \(k > \tau\)) until we arrive at observation \(i = N - M\). For LOO, assuming posterior sample size is 4000 or larger, we recommend to use a threshold of \(\tau = 0.7\) (Vehtari et al, 2017, 2024) and it turns out this is a reasonable threshold for LFO as well (Bürkner et al. 2020).

Autoregressive models

Autoregressive (AR) models are some of the most commonly used time-series models. An AR(p) model —an autoregressive model of order \(p\)— can be defined as

\[ y_i = \eta_i + \sum_{k = 1}^p \varphi_k y_{i - k} + \varepsilon_i, \]

where \(\eta_i\) is the linear predictor for the \(i\)th observation, \(\phi_k\) are the autoregressive parameters and \(\varepsilon_i\) are pairwise independent errors, which are usually assumed to be normally distributed with equal variance \(\sigma^2\). The model implies a recursive formula that allows for computing the right-hand side of the above equation for observation \(i\) based on the values of the equations for previous observations.

Case Study: Annual measurements of the level of Lake Huron

To illustrate the application of PSIS-LFO-CV for estimating expected \(M\)-SAP performance, we will fit a model for 98 annual measurements of the water level (in feet) of Lake Huron from the years 1875–1972. This data set is found in the datasets R package, which is installed automatically with R.

In addition to the loo package, for this analysis we will use the brms interface to Stan to generate a Stan program and fit the model, and also the bayesplot and ggplot2 packages for plotting.

library("brms")
library("loo")
library("bayesplot")
library("ggplot2")
color_scheme_set("brightblue")
theme_set(theme_default())

CHAINS <- 4
SEED <- 5838296
set.seed(SEED)

Before fitting a model, we will first put the data into a data frame and then look at the time series.

N <- length(LakeHuron)
df <- data.frame(
  y = as.numeric(LakeHuron),
  year = as.numeric(time(LakeHuron)),
  time = 1:N
)

ggplot(df, aes(x = year, y = y)) + 
  geom_point(size = 1) +
  labs(
    y = "Water Level (ft)", 
    x = "Year",
    title = "Water Level in Lake Huron (1875-1972)"
  ) 

The above plot shows rather strong autocorrelation of the time-series as well as some trend towards lower levels for later points in time.

We can specify an AR(4) model for these data using the brms package as follows:

fit <- brm(
  y ~ ar(time, p = 4), 
  data = df, 
  prior = prior(normal(0, 0.5), class = "ar"),
  control = list(adapt_delta = 0.99), 
  seed = SEED, 
  chains = CHAINS
)

The model implied predictions along with the observed values can be plotted, which reveals a rather good fit to the data.

preds <- posterior_predict(fit)
preds <- cbind(
  Estimate = colMeans(preds), 
  Q5 = apply(preds, 2, quantile, probs = 0.05),
  Q95 = apply(preds, 2, quantile, probs = 0.95)
)

ggplot(cbind(df, preds), aes(x = year, y = Estimate)) +
  geom_smooth(aes(ymin = Q5, ymax = Q95), stat = "identity", linewidth = 0.5) +
  geom_point(aes(y = y)) + 
  labs(
    y = "Water Level (ft)", 
    x = "Year",
    title = "Water Level in Lake Huron (1875-1972)",
    subtitle = "Mean (blue) and 90% predictive intervals (gray) vs. observed data (black)"
  ) 

To allow for reasonable predictions of future values, we will require at least \(L = 20\) historical observations (20 years) to make predictions.

L <- 20

We first perform approximate leave-one-out cross-validation (LOO-CV) for the purpose of later comparison with exact and approximate LFO-CV for the 1-SAP case.

loo_cv <- loo(log_lik(fit)[, (L + 1):N])
print(loo_cv)

Computed from 4000 by 78 log-likelihood matrix.

         Estimate   SE
elpd_loo    -88.6  6.4
p_loo         4.8  1.0
looic       177.3 12.8
------
MCSE of elpd_loo is 0.0.
MCSE and ESS estimates assume independent draws (r_eff=1).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

1-step-ahead predictions leaving out all future values

The most basic version of \(M\)-SAP is 1-SAP, in which we predict only one step ahead. In this case, \(y_{i+1:M}\) simplifies to \(y_{i}\) and the LFO-CV algorithm becomes considerably simpler than for larger values of \(M\).

Exact 1-step-ahead predictions

Before we compute approximate LFO-CV using PSIS we will first compute exact LFO-CV for the 1-SAP case so we can use it as a benchmark later. The initial step for the exact computation is to calculate the log-predictive densities by refitting the model many times:

loglik_exact <- matrix(nrow = ndraws(fit), ncol = N)
for (i in L:(N - 1)) {
  past <- 1:i
  oos <- i + 1
  df_past <- df[past, , drop = FALSE]
  df_oos <- df[c(past, oos), , drop = FALSE]
  fit_i <- update(fit, newdata = df_past, recompile = FALSE)
  loglik_exact[, i + 1] <- log_lik(fit_i, newdata = df_oos, oos = oos)[, oos]
}

Then we compute the exact expected log predictive density (ELPD):

# some helper functions we'll use throughout

# more stable than log(sum(exp(x))) 
log_sum_exp <- function(x) {
  max_x <- max(x)  
  max_x + log(sum(exp(x - max_x)))
}

# more stable than log(mean(exp(x)))
log_mean_exp <- function(x) {
  log_sum_exp(x) - log(length(x))
}

# compute log of raw importance ratios
# sums over observations *not* over posterior samples
sum_log_ratios <- function(loglik, ids = NULL) {
  if (!is.null(ids)) loglik <- loglik[, ids, drop = FALSE]
  rowSums(loglik)
}

# for printing comparisons later
rbind_print <- function(...) {
  round(rbind(...), digits = 2)
}
exact_elpds_1sap <- apply(loglik_exact, 2, log_mean_exp)
exact_elpd_1sap <- c(ELPD = sum(exact_elpds_1sap[-(1:L)]))

rbind_print(
  "LOO" = loo_cv$estimates["elpd_loo", "Estimate"],
  "LFO" = exact_elpd_1sap
)
      ELPD
LOO -88.64
LFO -92.43

We see that the ELPD from LFO-CV for 1-step-ahead predictions is lower than the ELPD estimate from LOO-CV, which should be expected since LOO-CV is making use of more of the time series. That is, since the LFO-CV approach only uses observations from before the left-out data point but LOO-CV uses all data points other than the left-out observation, we should expect to see the larger ELPD from LOO-CV.

Approximate 1-step-ahead predictions

We compute approximate 1-SAP with refit at observations where the Pareto \(k\) estimate exceeds the threshold of \(0.7\).

k_thres <- 0.7

The code becomes a little bit more involved as compared to the exact LFO-CV. Note that we can compute exact 1-SAP at the refitting points, which comes with no additional computational costs since we had to refit the model anyway.

approx_elpds_1sap <- rep(NA, N)

# initialize the process for i = L
past <- 1:L
oos <- L + 1
df_past <- df[past, , drop = FALSE]
df_oos <- df[c(past, oos), , drop = FALSE]
fit_past <- update(fit, newdata = df_past, recompile = FALSE)
loglik <- log_lik(fit_past, newdata = df_oos, oos = oos)
approx_elpds_1sap[L + 1] <- log_mean_exp(loglik[, oos])

# iterate over i > L
i_refit <- L
refits <- L
ks <- NULL
for (i in (L + 1):(N - 1)) {
  past <- 1:i
  oos <- i + 1
  df_past <- df[past, , drop = FALSE]
  df_oos <- df[c(past, oos), , drop = FALSE]
  loglik <- log_lik(fit_past, newdata = df_oos, oos = oos)
  
  logratio <- sum_log_ratios(loglik, (i_refit + 1):i)
  psis_obj <- suppressWarnings(psis(logratio))
  k <- pareto_k_values(psis_obj)
  ks <- c(ks, k)
  if (k > k_thres) {
    # refit the model based on the first i observations
    i_refit <- i
    refits <- c(refits, i)
    fit_past <- update(fit_past, newdata = df_past, recompile = FALSE)
    loglik <- log_lik(fit_past, newdata = df_oos, oos = oos)
    approx_elpds_1sap[i + 1] <- log_mean_exp(loglik[, oos])
  } else {
    lw <- weights(psis_obj, normalize = TRUE)[, 1]
    approx_elpds_1sap[i + 1] <- log_sum_exp(lw + loglik[, oos])
  }
} 

We see that the final Pareto-\(k\)-estimates are mostly well below the threshold and that we only needed to refit the model a few times:

plot_ks <- function(ks, ids, thres = 0.6) {
  dat_ks <- data.frame(ks = ks, ids = ids)
  ggplot(dat_ks, aes(x = ids, y = ks)) + 
    geom_point(aes(color = ks > thres), shape = 3, show.legend = FALSE) + 
    geom_hline(yintercept = thres, linetype = 2, color = "red2") + 
    scale_color_manual(values = c("cornflowerblue", "darkblue")) + 
    labs(x = "Data point", y = "Pareto k") + 
    ylim(-0.5, 1.5)
}
cat("Using threshold ", k_thres, 
    ", model was refit ", length(refits), 
    " times, at observations", refits)
Using threshold  0.7 , model was refit  2  times, at observations 20 57
plot_ks(ks, (L + 1):(N - 1))

The approximate 1-SAP ELPD is remarkably similar to the exact 1-SAP ELPD computed above, which indicates our algorithm to compute approximate 1-SAP worked well for the present data and model.

approx_elpd_1sap <- sum(approx_elpds_1sap, na.rm = TRUE)
rbind_print(
  "approx LFO" = approx_elpd_1sap,
  "exact LFO" = exact_elpd_1sap
)
             ELPD
approx LFO -92.98
exact LFO  -92.43

Plotting exact against approximate predictions, we see that no approximation value deviates far from its exact counterpart, providing further evidence for the good quality of our approximation.

dat_elpd <- data.frame(
  approx_elpd = approx_elpds_1sap,
  exact_elpd = exact_elpds_1sap
)

ggplot(dat_elpd, aes(x = approx_elpd, y = exact_elpd)) +
  geom_abline(color = "gray30") +
  geom_point(size = 2) +
  labs(x = "Approximate ELPDs", y = "Exact ELPDs")

We can also look at the maximum difference and average difference between the approximate and exact ELPD calculations, which also indicate a ver close approximation:

max_diff <- with(dat_elpd, max(abs(approx_elpd - exact_elpd), na.rm = TRUE))
mean_diff <- with(dat_elpd, mean(abs(approx_elpd - exact_elpd), na.rm = TRUE))

rbind_print(
  "Max diff" = round(max_diff, 2), 
  "Mean diff" =  round(mean_diff, 3)
)
          [,1]
Max diff  0.42
Mean diff 0.02

\(M\)-step-ahead predictions leaving out all future values

To illustrate the application of \(M\)-SAP for \(M > 1\), we next compute exact and approximate LFO-CV for the 4-SAP case.

Exact \(M\)-step-ahead predictions

The necessary steps are the same as for 1-SAP with the exception that the log-density values of interest are now the sums of the log predictive densities of four consecutive observations. Further, the stability of the PSIS approximation actually stays the same for all \(M\) as it only depends on the number of observations we leave out, not on the number of observations we predict.

M <- 4
loglikm <- matrix(nrow = ndraws(fit), ncol = N)
for (i in L:(N - M)) {
  past <- 1:i
  oos <- (i + 1):(i + M)
  df_past <- df[past, , drop = FALSE]
  df_oos <- df[c(past, oos), , drop = FALSE]
  fit_past <- update(fit, newdata = df_past, recompile = FALSE)
  loglik <- log_lik(fit_past, newdata = df_oos, oos = oos)
  loglikm[, i + 1] <- rowSums(loglik[, oos])
}
exact_elpds_4sap <- apply(loglikm, 2, log_mean_exp)
(exact_elpd_4sap <- c(ELPD = sum(exact_elpds_4sap, na.rm = TRUE)))
     ELPD 
-404.8864 

Approximate \(M\)-step-ahead predictions

Computing the approximate PSIS-LFO-CV for the 4-SAP case is a little bit more involved than the approximate version for the 1-SAP case, although the underlying principles remain the same.

approx_elpds_4sap <- rep(NA, N)

# initialize the process for i = L
past <- 1:L
oos <- (L + 1):(L + M)
df_past <- df[past, , drop = FALSE]
df_oos <- df[c(past, oos), , drop = FALSE]
fit_past <- update(fit, newdata = df_past, recompile = FALSE)
loglik <- log_lik(fit_past, newdata = df_oos, oos = oos)
loglikm <- rowSums(loglik[, oos])
approx_elpds_4sap[L + 1] <- log_mean_exp(loglikm)

# iterate over i > L
i_refit <- L
refits <- L
ks <- NULL
for (i in (L + 1):(N - M)) {
  past <- 1:i
  oos <- (i + 1):(i + M)
  df_past <- df[past, , drop = FALSE]
  df_oos <- df[c(past, oos), , drop = FALSE]
  loglik <- log_lik(fit_past, newdata = df_oos, oos = oos)
  
  logratio <- sum_log_ratios(loglik, (i_refit + 1):i)
  psis_obj <- suppressWarnings(psis(logratio))
  k <- pareto_k_values(psis_obj)
  ks <- c(ks, k)
  if (k > k_thres) {
    # refit the model based on the first i observations
    i_refit <- i
    refits <- c(refits, i)
    fit_past <- update(fit_past, newdata = df_past, recompile = FALSE)
    loglik <- log_lik(fit_past, newdata = df_oos, oos = oos)
    loglikm <- rowSums(loglik[, oos])
    approx_elpds_4sap[i + 1] <- log_mean_exp(loglikm)
  } else {
    lw <- weights(psis_obj, normalize = TRUE)[, 1]
    loglikm <- rowSums(loglik[, oos])
    approx_elpds_4sap[i + 1] <- log_sum_exp(lw + loglikm)
  }
} 

Again, we see that the final Pareto-\(k\)-estimates are mostly well below the threshold and that we only needed to refit the model a few times:

cat("Using threshold ", k_thres, 
    ", model was refit ", length(refits), 
    " times, at observations", refits)
Using threshold  0.7 , model was refit  2  times, at observations 20 55
plot_ks(ks, (L + 1):(N - M))

The approximate ELPD computed for the 4-SAP case is not as close to its exact counterpart as in the 1-SAP case. In general, the larger \(M\), the larger the variation of the approximate ELPD around the exact ELPD. It turns out that the ELPD estimates of AR-models with \(M>1\) show particular variation due to their predictions’ dependency on other predicted values. In Bürkner et al. (2020) we provide further explanation and simulations for these cases.

approx_elpd_4sap <- sum(approx_elpds_4sap, na.rm = TRUE)
rbind_print(
  "Approx LFO" = approx_elpd_4sap,
  "Exact LFO" = exact_elpd_4sap
)
              ELPD
Approx LFO -408.49
Exact LFO  -404.89

Plotting exact against approximate pointwise predictions confirms that, for a few specific data points, the approximate predictions underestimate the exact predictions.

dat_elpd_4sap <- data.frame(
  approx_elpd = approx_elpds_4sap,
  exact_elpd = exact_elpds_4sap
)

ggplot(dat_elpd_4sap, aes(x = approx_elpd, y = exact_elpd)) +
  geom_abline(color = "gray30") +
  geom_point(size = 2) +
  labs(x = "Approximate ELPDs", y = "Exact ELPDs")

Conclusion

In this case study we have shown how to do carry out exact and approximate leave-future-out cross-validation for \(M\)-step-ahead prediction tasks. For the data and model used in our example, the PSIS-LFO-CV algorithm provides reasonably stable and accurate results despite not requiring us to refit the model nearly as many times. For more details on approximate LFO-CV, we refer to Bürkner et al. (2020).


References

Bürkner P. C., Gabry J., & Vehtari A. (2020). Approximate leave-future-out cross-validation for time series models. Journal of Statistical Computation and Simulation, 90(14):2499-2523. :/10.1080/00949655.2020.1783262. Online. arXiv preprint.

Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing, 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Online. arXiv preprint arXiv:1507.04544.

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF


Appendix

Appendix: Session information

sessionInfo()
R version 4.4.2 (2024-10-31)
Platform: x86_64-apple-darwin20
Running under: macOS Sequoia 15.4.1

Matrix products: default
BLAS:   /Library/Frameworks/R.framework/Versions/4.4-x86_64/Resources/lib/libRblas.0.dylib 
LAPACK: /Library/Frameworks/R.framework/Versions/4.4-x86_64/Resources/lib/libRlapack.dylib;  LAPACK version 3.12.0

locale:
[1] C/en_US.UTF-8/en_US.UTF-8/C/en_US.UTF-8/en_US.UTF-8

time zone: America/Denver
tzcode source: internal

attached base packages:
[1] stats     graphics  grDevices utils     datasets  methods   base     

other attached packages:
[1] ggplot2_4.0.0           brms_2.23.1             bayesplot_1.14.0.9000  
[4] rstanarm_2.32.1         Rcpp_1.1.0              loo_2.9.0              
[7] rstan_2.36.0.9000       StanHeaders_2.36.0.9000 knitr_1.50             

loaded via a namespace (and not attached):
  [1] gridExtra_2.3        inline_0.3.19        sandwich_3.1-1      
  [4] rlang_1.1.6          magrittr_2.0.3       multcomp_1.4-26     
  [7] matrixStats_1.5.0    compiler_4.4.2       callr_3.7.6         
 [10] vctrs_0.6.5          reshape2_1.4.4       stringr_1.5.1       
 [13] pkgconfig_2.0.3      fastmap_1.2.0        backports_1.5.0     
 [16] labeling_0.4.3       threejs_0.3.3        promises_1.3.3      
 [19] rmarkdown_2.29       markdown_1.13        ps_1.9.1            
 [22] nloptr_2.1.0         xfun_0.53            cachem_1.1.0        
 [25] jsonlite_2.0.0       later_1.4.3          parallel_4.4.2      
 [28] R6_2.6.1             dygraphs_1.1.1.6     bslib_0.9.0         
 [31] stringi_1.8.7        RColorBrewer_1.1-3   boot_1.3-31         
 [34] jquerylib_0.1.4      estimability_1.5.1   zoo_1.8-14          
 [37] base64enc_0.1-3      httpuv_1.6.16        Matrix_1.7-1        
 [40] splines_4.4.2        igraph_2.1.4         tidyselect_1.2.1    
 [43] abind_1.4-8          yaml_2.3.10          codetools_0.2-20    
 [46] miniUI_0.1.1.1       curl_7.0.0           processx_3.8.6      
 [49] pkgbuild_1.4.8       lattice_0.22-6       tibble_3.3.0        
 [52] plyr_1.8.9           shiny_1.11.1         withr_3.0.2         
 [55] bridgesampling_1.1-2 S7_0.2.0             posterior_1.6.1     
 [58] coda_0.19-4.1        evaluate_1.0.4       survival_3.7-0      
 [61] RcppParallel_5.1.10  xts_0.14.1           pillar_1.11.0       
 [64] tensorA_0.36.2.1     checkmate_2.3.3      DT_0.33             
 [67] stats4_4.4.2         shinyjs_2.1.0        distributional_0.5.0
 [70] generics_0.1.4       rstantools_2.5.0     scales_1.4.0        
 [73] minqa_1.2.7          gtools_3.9.5         xtable_1.8-4        
 [76] glue_1.8.0           emmeans_1.10.2       tools_4.4.2         
 [79] shinystan_2.7.0      lme4_1.1-35.4        colourpicker_1.3.0  
 [82] mvtnorm_1.3-3        grid_4.4.2           QuickJSR_1.2.2      
 [85] crosstalk_1.2.1      nlme_3.1-166         cli_3.6.5           
 [88] Brobdingnag_1.2-9    dplyr_1.1.4          V8_4.4.2            
 [91] gtable_0.3.6         sass_0.4.10          digest_0.6.37       
 [94] TH.data_1.1-2        htmlwidgets_1.6.4    farver_2.1.2        
 [97] htmltools_0.5.8.1    lifecycle_1.0.4      mime_0.13           
[100] shinythemes_1.2.0    MASS_7.3-61         

Appendix: Licenses

  • Code © 2018, Paul Bürkner, Jonah Gabry, Aki Vehtari (licensed under BSD-3).
  • Text © 2018, Paul Bürkner, Jonah Gabry, Aki Vehtari (licensed under CC-BY-NC 4.0).
loo/inst/doc/loo2-lfo.R0000644000176200001440000002253215122305174014345 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----more-knitr-ops, include=FALSE-------------------------------------------- knitr::opts_chunk$set( cache = TRUE, message = FALSE, warning = FALSE ) ## ----pkgs, cache=FALSE-------------------------------------------------------- library("brms") library("loo") library("bayesplot") library("ggplot2") color_scheme_set("brightblue") theme_set(theme_default()) CHAINS <- 4 SEED <- 5838296 set.seed(SEED) ## ----hurondata---------------------------------------------------------------- N <- length(LakeHuron) df <- data.frame( y = as.numeric(LakeHuron), year = as.numeric(time(LakeHuron)), time = 1:N ) ggplot(df, aes(x = year, y = y)) + geom_point(size = 1) + labs( y = "Water Level (ft)", x = "Year", title = "Water Level in Lake Huron (1875-1972)" ) ## ----fit, results = "hide"---------------------------------------------------- fit <- brm( y ~ ar(time, p = 4), data = df, prior = prior(normal(0, 0.5), class = "ar"), control = list(adapt_delta = 0.99), seed = SEED, chains = CHAINS ) ## ----plotpreds, cache = FALSE------------------------------------------------- preds <- posterior_predict(fit) preds <- cbind( Estimate = colMeans(preds), Q5 = apply(preds, 2, quantile, probs = 0.05), Q95 = apply(preds, 2, quantile, probs = 0.95) ) ggplot(cbind(df, preds), aes(x = year, y = Estimate)) + geom_smooth(aes(ymin = Q5, ymax = Q95), stat = "identity", linewidth = 0.5) + geom_point(aes(y = y)) + labs( y = "Water Level (ft)", x = "Year", title = "Water Level in Lake Huron (1875-1972)", subtitle = "Mean (blue) and 90% predictive intervals (gray) vs. observed data (black)" ) ## ----setL--------------------------------------------------------------------- L <- 20 ## ----loo1sap, cache = FALSE--------------------------------------------------- loo_cv <- loo(log_lik(fit)[, (L + 1):N]) print(loo_cv) ## ----exact_loglik, results="hide"--------------------------------------------- loglik_exact <- matrix(nrow = ndraws(fit), ncol = N) for (i in L:(N - 1)) { past <- 1:i oos <- i + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_i <- update(fit, newdata = df_past, recompile = FALSE) loglik_exact[, i + 1] <- log_lik(fit_i, newdata = df_oos, oos = oos)[, oos] } ## ----helpers------------------------------------------------------------------ # some helper functions we'll use throughout # more stable than log(sum(exp(x))) log_sum_exp <- function(x) { max_x <- max(x) max_x + log(sum(exp(x - max_x))) } # more stable than log(mean(exp(x))) log_mean_exp <- function(x) { log_sum_exp(x) - log(length(x)) } # compute log of raw importance ratios # sums over observations *not* over posterior samples sum_log_ratios <- function(loglik, ids = NULL) { if (!is.null(ids)) loglik <- loglik[, ids, drop = FALSE] rowSums(loglik) } # for printing comparisons later rbind_print <- function(...) { round(rbind(...), digits = 2) } ## ----exact1sap, cache = FALSE------------------------------------------------- exact_elpds_1sap <- apply(loglik_exact, 2, log_mean_exp) exact_elpd_1sap <- c(ELPD = sum(exact_elpds_1sap[-(1:L)])) rbind_print( "LOO" = loo_cv$estimates["elpd_loo", "Estimate"], "LFO" = exact_elpd_1sap ) ## ----setkthresh--------------------------------------------------------------- k_thres <- 0.7 ## ----refit_loglik, results="hide"--------------------------------------------- approx_elpds_1sap <- rep(NA, N) # initialize the process for i = L past <- 1:L oos <- L + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) approx_elpds_1sap[L + 1] <- log_mean_exp(loglik[, oos]) # iterate over i > L i_refit <- L refits <- L ks <- NULL for (i in (L + 1):(N - 1)) { past <- 1:i oos <- i + 1 df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) logratio <- sum_log_ratios(loglik, (i_refit + 1):i) psis_obj <- suppressWarnings(psis(logratio)) k <- pareto_k_values(psis_obj) ks <- c(ks, k) if (k > k_thres) { # refit the model based on the first i observations i_refit <- i refits <- c(refits, i) fit_past <- update(fit_past, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) approx_elpds_1sap[i + 1] <- log_mean_exp(loglik[, oos]) } else { lw <- weights(psis_obj, normalize = TRUE)[, 1] approx_elpds_1sap[i + 1] <- log_sum_exp(lw + loglik[, oos]) } } ## ----plot_ks------------------------------------------------------------------ plot_ks <- function(ks, ids, thres = 0.6) { dat_ks <- data.frame(ks = ks, ids = ids) ggplot(dat_ks, aes(x = ids, y = ks)) + geom_point(aes(color = ks > thres), shape = 3, show.legend = FALSE) + geom_hline(yintercept = thres, linetype = 2, color = "red2") + scale_color_manual(values = c("cornflowerblue", "darkblue")) + labs(x = "Data point", y = "Pareto k") + ylim(-0.5, 1.5) } ## ----refitsummary1sap, cache=FALSE-------------------------------------------- cat("Using threshold ", k_thres, ", model was refit ", length(refits), " times, at observations", refits) plot_ks(ks, (L + 1):(N - 1)) ## ----lfosummary1sap, cache = FALSE-------------------------------------------- approx_elpd_1sap <- sum(approx_elpds_1sap, na.rm = TRUE) rbind_print( "approx LFO" = approx_elpd_1sap, "exact LFO" = exact_elpd_1sap ) ## ----plot1sap, cache = FALSE-------------------------------------------------- dat_elpd <- data.frame( approx_elpd = approx_elpds_1sap, exact_elpd = exact_elpds_1sap ) ggplot(dat_elpd, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + labs(x = "Approximate ELPDs", y = "Exact ELPDs") ## ----diffs1sap, cache=FALSE--------------------------------------------------- max_diff <- with(dat_elpd, max(abs(approx_elpd - exact_elpd), na.rm = TRUE)) mean_diff <- with(dat_elpd, mean(abs(approx_elpd - exact_elpd), na.rm = TRUE)) rbind_print( "Max diff" = round(max_diff, 2), "Mean diff" = round(mean_diff, 3) ) ## ----exact_loglikm, results="hide"-------------------------------------------- M <- 4 loglikm <- matrix(nrow = ndraws(fit), ncol = N) for (i in L:(N - M)) { past <- 1:i oos <- (i + 1):(i + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm[, i + 1] <- rowSums(loglik[, oos]) } ## ----exact4sap, cache = FALSE------------------------------------------------- exact_elpds_4sap <- apply(loglikm, 2, log_mean_exp) (exact_elpd_4sap <- c(ELPD = sum(exact_elpds_4sap, na.rm = TRUE))) ## ----refit_loglikm, results="hide"-------------------------------------------- approx_elpds_4sap <- rep(NA, N) # initialize the process for i = L past <- 1:L oos <- (L + 1):(L + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] fit_past <- update(fit, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[L + 1] <- log_mean_exp(loglikm) # iterate over i > L i_refit <- L refits <- L ks <- NULL for (i in (L + 1):(N - M)) { past <- 1:i oos <- (i + 1):(i + M) df_past <- df[past, , drop = FALSE] df_oos <- df[c(past, oos), , drop = FALSE] loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) logratio <- sum_log_ratios(loglik, (i_refit + 1):i) psis_obj <- suppressWarnings(psis(logratio)) k <- pareto_k_values(psis_obj) ks <- c(ks, k) if (k > k_thres) { # refit the model based on the first i observations i_refit <- i refits <- c(refits, i) fit_past <- update(fit_past, newdata = df_past, recompile = FALSE) loglik <- log_lik(fit_past, newdata = df_oos, oos = oos) loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[i + 1] <- log_mean_exp(loglikm) } else { lw <- weights(psis_obj, normalize = TRUE)[, 1] loglikm <- rowSums(loglik[, oos]) approx_elpds_4sap[i + 1] <- log_sum_exp(lw + loglikm) } } ## ----refitsummary4sap, cache = FALSE------------------------------------------ cat("Using threshold ", k_thres, ", model was refit ", length(refits), " times, at observations", refits) plot_ks(ks, (L + 1):(N - M)) ## ----lfosummary4sap, cache = FALSE-------------------------------------------- approx_elpd_4sap <- sum(approx_elpds_4sap, na.rm = TRUE) rbind_print( "Approx LFO" = approx_elpd_4sap, "Exact LFO" = exact_elpd_4sap ) ## ----plot4sap, cache = FALSE-------------------------------------------------- dat_elpd_4sap <- data.frame( approx_elpd = approx_elpds_4sap, exact_elpd = exact_elpds_4sap ) ggplot(dat_elpd_4sap, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + labs(x = "Approximate ELPDs", y = "Exact ELPDs") ## ----sessioninfo-------------------------------------------------------------- sessionInfo() loo/inst/doc/loo2-elpd.Rmd0000644000176200001440000001760115122301125015023 0ustar liggesusers--- title: "Holdout validation and K-fold cross-validation of Stan programs with the loo package" author: "Bruno Nicenboim" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to do holdout validation and K-fold cross-validation with __loo__ for a Stan program. # Example: Eradication of Roaches using holdout validation approach This vignette uses the same example as in the vignettes [_Using the loo package (version >= 2.0.0)_](http://mc-stan.org/loo/articles/loo2-example.html) and [_Avoiding model refits in leave-one-out cross-validation with moment matching_](https://mc-stan.org/loo/articles/loo2-moment-matching.html). ## Coding the Stan model Here is the Stan code for fitting a Poisson regression model: ```{r stancode} # Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: int y[N]; stancode <- " data { int K; int N; matrix[N,K] x; array[N] int y; vector[N] offset_; // offset is reserved keyword in Stan so use offset_ real beta_prior_scale; real alpha_prior_scale; } parameters { vector[K] beta; real intercept; } model { y ~ poisson(exp(x * beta + intercept + offset_)); beta ~ normal(0,beta_prior_scale); intercept ~ normal(0,alpha_prior_scale); } generated quantities { vector[N] log_lik; for (n in 1:N) log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n])); } " ``` Following the usual approach recommended in [_Writing Stan programs for use with the loo package_](http://mc-stan.org/loo/articles/loo2-with-rstan.html), we compute the log-likelihood for each observation in the `generated quantities` block of the Stan program. ## Setup In addition to __loo__, we load the __rstan__ package for fitting the model. We will also need the __rstanarm__ package for the data. ```{r setup, message=FALSE} library("rstan") library("loo") seed <- 9547 set.seed(seed) ``` # Holdout validation For this approach, the model is first fit to the "train" data and then is evaluated on the held-out "test" data. ## Splitting the data between train and test The data is divided between train (80% of the data) and test (20%): ```{r modelfit-holdout, message=FALSE} # Prepare data data(roaches, package = "rstanarm") roaches$roach1 <- sqrt(roaches$roach1) roaches$offset <- log(roaches[,"exposure2"]) # 20% of the data goes to the test set: roaches$test <- 0 roaches$test[sample(.2 * seq_len(nrow(roaches)))] <- 1 # data to "train" the model data_train <- list(y = roaches$y[roaches$test == 0], x = as.matrix(roaches[roaches$test == 0, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$test == 0,]), K = 3, offset_ = roaches$offset[roaches$test == 0], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) # data to "test" the model data_test <- list(y = roaches$y[roaches$test == 1], x = as.matrix(roaches[roaches$test == 1, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$test == 1,]), K = 3, offset_ = roaches$offset[roaches$test == 1], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) ``` ## Fitting the model with RStan Next we fit the model to the "test" data in Stan using the __rstan__ package: ```{r fit-train} # Compile stanmodel <- stan_model(model_code = stancode) # Fit model fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0) ``` We recompute the generated quantities using the posterior draws conditional on the training data, but we now pass in the held-out data to get the log predictive densities for the test data. Because we are using independent data, the log predictive density coincides with the log likelihood of the test data. ```{r gen-test} gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test) log_pd <- extract_log_lik(gen_test) ``` ## Computing holdout elpd: Now we evaluate the predictive performance of the model on the test data using `elpd()`. ```{r elpd-holdout} (elpd_holdout <- elpd(log_pd)) ``` When one wants to compare different models, the function `loo_compare()` can be used to assess the difference in performance. # K-fold cross validation For this approach the data is divided into folds, and each time one fold is tested while the rest of the data is used to fit the model (see Vehtari et al., 2017). ## Splitting the data in folds We use the data that is already pre-processed and we divide it in 10 random folds using `kfold_split_random` ```{r prepare-folds, message=FALSE} # Prepare data roaches$fold <- kfold_split_random(K = 10, N = nrow(roaches)) ``` ## Fitting and extracting the log pointwise predictive densities for each fold We now loop over the 10 folds. In each fold we do the following. First, we fit the model to all the observations except the ones belonging to the left-out fold. Second, we compute the log pointwise predictive densities for the left-out fold. Last, we store the predictive density for the observations of the left-out fold in a matrix. The output of this loop is a matrix of the log pointwise predictive densities of all the observations. ```{r} # Prepare a matrix with the number of post-warmup iterations by number of observations: log_pd_kfold <- matrix(nrow = 4000, ncol = nrow(roaches)) # Loop over the folds for(k in 1:10){ data_train <- list(y = roaches$y[roaches$fold != k], x = as.matrix(roaches[roaches$fold != k, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$fold != k,]), K = 3, offset_ = roaches$offset[roaches$fold != k], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) data_test <- list(y = roaches$y[roaches$fold == k], x = as.matrix(roaches[roaches$fold == k, c("roach1", "treatment", "senior")]), N = nrow(roaches[roaches$fold == k,]), K = 3, offset_ = roaches$offset[roaches$fold == k], beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) fit <- sampling(stanmodel, data = data_train, seed = seed, refresh = 0) gen_test <- gqs(stanmodel, draws = as.matrix(fit), data= data_test) log_pd_kfold[, roaches$fold == k] <- extract_log_lik(gen_test) } ``` ## Computing K-fold elpd: Now we evaluate the predictive performance of the model on the 10 folds using `elpd()`. ```{r elpd-kfold} (elpd_kfold <- elpd(log_pd_kfold)) ``` If one wants to compare several models (with `loo_compare`), one should use the same folds for all the different models. # References Gelman, A., and Hill, J. (2007). *Data Analysis Using Regression and Multilevel Hierarchical Models.* Cambridge University Press. Stan Development Team (2020) _RStan: the R interface to Stan, Version 2.21.1_ https://mc-stan.org Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [arXiv preprint](https://arxiv.org/abs/1507.04544). loo/inst/doc/loo2-weights.html0000644000176200001440000014244615122306001015777 0ustar liggesusers Bayesian Stacking and Pseudo-BMA weights using the loo package

Bayesian Stacking and Pseudo-BMA weights using the loo package

Aki Vehtari and Jonah Gabry

2025-12-22

Introduction

This vignette demonstrates the new functionality in loo v2.0.0 for Bayesian stacking and Pseudo-BMA weighting. In this vignette we can’t provide all of the necessary background on this topic, so we encourage readers to refer to the paper

  • Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018). Using stacking to average Bayesian predictive distributions. In Bayesian Analysis, :10.1214/17-BA1091. Online

which provides important details on the methods demonstrated in this vignette. Here we just quote from the abstract of the paper:

Abstract: Bayesian model averaging is flawed in the \(\mathcal{M}\)-open setting in which the true data-generating process is not one of the candidate models being fit. We take the idea of stacking from the point estimation literature and generalize to the combination of predictive distributions. We extend the utility function to any proper scoring rule and use Pareto smoothed importance sampling to efficiently compute the required leave-one-out posterior distributions. We compare stacking of predictive distributions to several alternatives: stacking of means, Bayesian model averaging (BMA), Pseudo-BMA, and a variant of Pseudo-BMA that is stabilized using the Bayesian bootstrap. Based on simulations and real-data applications, we recommend stacking of predictive distributions, with bootstrapped-Pseudo-BMA as an approximate alternative when computation cost is an issue.

Ideally, we would avoid the Bayesian model combination problem by extending the model to include the separate models as special cases, and preferably as a continuous expansion of the model space. For example, instead of model averaging over different covariate combinations, all potentially relevant covariates should be included in a predictive model (for causal analysis more care is needed) and a prior assumption that only some of the covariates are relevant can be presented with regularized horseshoe prior (Piironen and Vehtari, 2017a). For variable selection we recommend projective predictive variable selection (Piironen and Vehtari, 2017a; projpred package).

To demonstrate how to use loo package to compute Bayesian stacking and Pseudo-BMA weights, we repeat two simple model averaging examples from Chapters 6 and 10 of Statistical Rethinking by Richard McElreath. In Statistical Rethinking WAIC is used to form weights which are similar to classical “Akaike weights”. Pseudo-BMA weighting using PSIS-LOO for computation is close to these WAIC weights, but named after the Pseudo Bayes Factor by Geisser and Eddy (1979). As discussed below, in general we prefer using stacking rather than WAIC weights or the similar pseudo-BMA weights.

Setup

In addition to the loo package we will also load the rstanarm package for fitting the models.

library(rstanarm)
library(loo)

Example: Primate milk

In Statistical Rethinking, McElreath describes the data for the primate milk example as follows:

A popular hypothesis has it that primates with larger brains produce more energetic milk, so that brains can grow quickly. … The question here is to what extent energy content of milk, measured here by kilocalories, is related to the percent of the brain mass that is neocortex. … We’ll end up needing female body mass as well, to see the masking that hides the relationships among the variables.

data(milk)
d <- milk[complete.cases(milk),]
d$neocortex <- d$neocortex.perc /100
str(d)
'data.frame':   17 obs. of  9 variables:
 $ clade         : Factor w/ 4 levels "Ape","New World Monkey",..: 4 2 2 2 2 2 2 2 3 3 ...
 $ species       : Factor w/ 29 levels "A palliata","Alouatta seniculus",..: 11 2 1 6 27 5 3 4 21 19 ...
 $ kcal.per.g    : num  0.49 0.47 0.56 0.89 0.92 0.8 0.46 0.71 0.68 0.97 ...
 $ perc.fat      : num  16.6 21.2 29.7 53.4 50.6 ...
 $ perc.protein  : num  15.4 23.6 23.5 15.8 22.3 ...
 $ perc.lactose  : num  68 55.2 46.9 30.8 27.1 ...
 $ mass          : num  1.95 5.25 5.37 2.51 0.68 0.12 0.47 0.32 1.55 3.24 ...
 $ neocortex.perc: num  55.2 64.5 64.5 67.6 68.8 ...
 $ neocortex     : num  0.552 0.645 0.645 0.676 0.688 ...

We repeat the analysis in Chapter 6 of Statistical Rethinking using the following four models (here we use the default weakly informative priors in rstanarm, while flat priors were used in Statistical Rethinking).

fit1 <- stan_glm(kcal.per.g ~ 1, data = d, seed = 2030)
fit2 <- update(fit1, formula = kcal.per.g ~ neocortex)
fit3 <- update(fit1, formula = kcal.per.g ~ log(mass))
fit4 <- update(fit1, formula = kcal.per.g ~ neocortex + log(mass))

McElreath uses WAIC for model comparison and averaging, so we’ll start by also computing WAIC for these models so we can compare the results to the other options presented later in the vignette. The loo package provides waic methods for log-likelihood arrays, matrices and functions. Since we fit our model with rstanarm we can use the waic method provided by the rstanarm package (a wrapper around waic from the loo package), which allows us to just pass in our fitted model objects instead of first extracting the log-likelihood values.

waic1 <- waic(fit1)
waic2 <- waic(fit2)
waic3 <- waic(fit3)
Warning: 
1 (5.9%) p_waic estimates greater than 0.4. We recommend trying loo instead.
waic4 <- waic(fit4)
Warning: 
2 (11.8%) p_waic estimates greater than 0.4. We recommend trying loo instead.
waics <- c(
  waic1$estimates["elpd_waic", 1],
  waic2$estimates["elpd_waic", 1],
  waic3$estimates["elpd_waic", 1],
  waic4$estimates["elpd_waic", 1]
)

We get some warnings when computing WAIC for models 3 and 4, indicating that we shouldn’t trust the WAIC weights we will compute later. Following the recommendation in the warning, we next use the loo methods to compute PSIS-LOO instead. The loo package provides loo methods for log-likelihood arrays, matrices, and functions, but since we fit our model with rstanarm we can just pass the fitted model objects directly and rstanarm will extract the needed values to pass to the loo package. (Like rstanarm, some other R packages for fitting Stan models, e.g. brms, also provide similar methods for interfacing with the loo package.)

# note: the loo function accepts a 'cores' argument that we recommend specifying
# when working with bigger datasets

loo1 <- loo(fit1)
loo2 <- loo(fit2)
loo3 <- loo(fit3)
loo4 <- loo(fit4)
lpd_point <- cbind(
  loo1$pointwise[,"elpd_loo"], 
  loo2$pointwise[,"elpd_loo"],
  loo3$pointwise[,"elpd_loo"], 
  loo4$pointwise[,"elpd_loo"]
)

With loo we don’t get any warnings for models 3 and 4, but for illustration of good results, we display the diagnostic details for these models anyway.

print(loo3)

Computed from 4000 by 17 log-likelihood matrix.

         Estimate  SE
elpd_loo      4.5 2.3
p_loo         2.1 0.5
looic        -9.1 4.6
------
MCSE of elpd_loo is 0.0.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.
print(loo4)

Computed from 4000 by 17 log-likelihood matrix.

         Estimate  SE
elpd_loo      8.4 2.8
p_loo         3.3 0.9
looic       -16.8 5.5
------
MCSE of elpd_loo is 0.1.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.4, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

One benefit of PSIS-LOO over WAIC is better diagnostics. Here for both models 3 and 4 all \(k<0.7\) and the Monte Carlo SE of elpd_loo is 0.1 or less, and we can expect the model comparison to be reliable.

Next we compute and compare 1) WAIC weights, 2) Pseudo-BMA weights without Bayesian bootstrap, 3) Pseudo-BMA+ weights with Bayesian bootstrap, and 4) Bayesian stacking weights.

waic_wts <- exp(waics) / sum(exp(waics))
pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE)
pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE
stacking_wts <- stacking_weights(lpd_point)
round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2)
       waic_wts pbma_wts pbma_BB_wts stacking_wts
model1     0.01     0.02        0.07         0.01
model2     0.01     0.01        0.04         0.00
model3     0.02     0.02        0.04         0.00
model4     0.96     0.95        0.85         0.99

With all approaches Model 4 with neocortex and log(mass) gets most of the weight. Based on theory, Pseudo-BMA weights without Bayesian bootstrap should be close to WAIC weights, and we can also see that here. Pseudo-BMA+ weights with Bayesian bootstrap provide more cautious weights further away from 0 and 1 (see Yao et al. (2018) for a discussion of why this can be beneficial and results from related experiments). In this particular example, the Bayesian stacking weights are not much different from the other weights.

One of the benefits of stacking is that it manages well if there are many similar models. Consider for example that there could be many irrelevant covariates that when included would produce a similar model to one of the existing models. To emulate this situation here we simply copy the first model a bunch of times, but you can imagine that instead we would have ten alternative models with about the same predictive performance. WAIC weights for such a scenario would be close to the following:

waic_wts_demo <- 
  exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)]) /
  sum(exp(waics[c(1,1,1,1,1,1,1,1,1,1,2,3,4)]))
round(waic_wts_demo, 3)
 [1] 0.013 0.013 0.013 0.013 0.013 0.013 0.013 0.013 0.013 0.013 0.006 0.016
[13] 0.847

Notice how much the weight for model 4 is lowered now that more models similar to model 1 (or in this case identical) have been added. Both WAIC weights and Pseudo-BMA approaches first estimate the predictive performance separately for each model and then compute weights based on estimated relative predictive performances. Similar models share similar weights so the weights of other models must be reduced for the total sum of the weights to remain the same.

On the other hand, stacking optimizes the weights jointly, allowing for the very similar models (in this toy example repeated models) to share their weight while more unique models keep their original weights. In our example we can see this difference clearly:

stacking_weights(lpd_point[,c(1,1,1,1,1,1,1,1,1,1,2,3,4)])
Method: stacking
------
        weight
model1  0.001 
model2  0.001 
model3  0.001 
model4  0.001 
model5  0.001 
model6  0.001 
model7  0.001 
model8  0.001 
model9  0.001 
model10 0.001 
model11 0.000 
model12 0.000 
model13 0.987 

Using stacking, the weight for the best model stays essentially unchanged.

Example: Oceanic tool complexity

Another example we consider is the Kline oceanic tool complexity data, which McElreath describes as follows:

Different historical island populations possessed tool kits of different size. These kits include fish hooks, axes, boats, hand plows, and many other types of tools. A number of theories predict that larger populations will both develop and sustain more complex tool kits. … It’s also suggested that contact rates among populations effectively increases population [sic, probably should be tool kit] size, as it’s relevant to technological evolution.

We build models predicting the total number of tools given the log population size and the contact rate (high vs. low).

data(Kline)
d <- Kline
d$log_pop <- log(d$population)
d$contact_high <- ifelse(d$contact=="high", 1, 0)
str(d)
'data.frame':   10 obs. of  7 variables:
 $ culture     : Factor w/ 10 levels "Chuuk","Hawaii",..: 4 7 6 10 3 9 1 5 8 2
 $ population  : int  1100 1500 3600 4791 7400 8000 9200 13000 17500 275000
 $ contact     : Factor w/ 2 levels "high","low": 2 2 2 1 1 1 1 2 1 2
 $ total_tools : int  13 22 24 43 33 19 40 28 55 71
 $ mean_TU     : num  3.2 4.7 4 5 5 4 3.8 6.6 5.4 6.6
 $ log_pop     : num  7 7.31 8.19 8.47 8.91 ...
 $ contact_high: num  0 0 0 1 1 1 1 0 1 0

We start with a Poisson regression model with the log population size, the contact rate, and an interaction term between them (priors are informative priors as in Statistical Rethinking).

fit10 <-
  stan_glm(
    total_tools ~ log_pop + contact_high + log_pop * contact_high,
    family = poisson(link = "log"),
    data = d,
    prior = normal(0, 1, autoscale = FALSE),
    prior_intercept = normal(0, 100, autoscale = FALSE),
    seed = 2030
  )

Before running other models, we check whether Poisson is good choice as the conditional observation model.

loo10 <- loo(fit10)
print(loo10)

Computed from 4000 by 10 log-likelihood matrix.

         Estimate   SE
elpd_loo    -40.2  5.9
p_loo         5.0  1.7
looic        80.5 11.9
------
MCSE of elpd_loo is 0.1.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 0.7]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

We get at least one observation with \(k>0.7\) and the estimated effective number of parameters p_loo is larger than the total number of parameters in the model. This indicates that Poisson might be too narrow. A negative binomial model might be better, but with so few observations it is not so clear.

We can compute LOO more accurately by running Stan again for the leave-one-out folds with high \(k\) estimates. When using rstanarm this can be done by specifying the k_threshold argument:

loo10 <- loo(fit10, k_threshold=0.7)
All pareto_k estimates below user-specified threshold of 0.7. 
Returning loo object.
print(loo10)

Computed from 4000 by 10 log-likelihood matrix.

         Estimate   SE
elpd_loo    -40.2  5.9
p_loo         5.0  1.7
looic        80.5 11.9
------
MCSE of elpd_loo is 0.1.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 0.7]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

In this case we see that there is not much difference, and thus it is relatively safe to continue.

As a comparison we also compute WAIC:

waic10 <- waic(fit10)
Warning: 
3 (30.0%) p_waic estimates greater than 0.4. We recommend trying loo instead.
print(waic10)

Computed from 4000 by 10 log-likelihood matrix.

          Estimate   SE
elpd_waic    -39.9  5.9
p_waic         4.7  1.7
waic          79.8 11.8

3 (30.0%) p_waic estimates greater than 0.4. We recommend trying loo instead. 

The WAIC computation is giving warnings and the estimated ELPD is slightly more optimistic. We recommend using the PSIS-LOO results instead.

To assess whether the contact rate and interaction term are useful, we can make a comparison to models without these terms.

fit11 <- update(fit10, formula = total_tools ~ log_pop + contact_high)
fit12 <- update(fit10, formula = total_tools ~ log_pop)
(loo11 <- loo(fit11))

Computed from 4000 by 10 log-likelihood matrix.

         Estimate   SE
elpd_loo    -39.7  5.8
p_loo         4.4  1.6
looic        79.4 11.6
------
MCSE of elpd_loo is 0.1.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.5, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.
(loo12 <- loo(fit12))
Warning: Found 1 observation(s) with a pareto_k > 0.7. We recommend calling 'loo' again with argument 'k_threshold = 0.7' in order to calculate the ELPD without the assumption that these observations are negligible. This will refit the model 1 times to compute the ELPDs for the problematic observations directly.

Computed from 4000 by 10 log-likelihood matrix.

         Estimate  SE
elpd_loo    -42.5 4.7
p_loo         4.1 1.1
looic        85.0 9.4
------
MCSE of elpd_loo is NA.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.4, 0.6]).

Pareto k diagnostic values:
                         Count Pct.    Min. ESS
(-Inf, 0.7]   (good)     9     90.0%   653     
   (0.7, 1]   (bad)      1     10.0%   <NA>    
   (1, Inf)   (very bad) 0      0.0%   <NA>    
See help('pareto-k-diagnostic') for details.
loo11 <- loo(fit11, k_threshold=0.7)
All pareto_k estimates below user-specified threshold of 0.7. 
Returning loo object.
loo12 <- loo(fit12, k_threshold=0.7)
1 problematic observation(s) found.
Model will be refit 1 times.

Fitting model 1 out of 1 (leaving out observation 10)
lpd_point <- cbind(
  loo10$pointwise[, "elpd_loo"], 
  loo11$pointwise[, "elpd_loo"], 
  loo12$pointwise[, "elpd_loo"]
)

For comparison we’ll also compute WAIC values for these additional models:

waic11 <- waic(fit11)
Warning: 
3 (30.0%) p_waic estimates greater than 0.4. We recommend trying loo instead.
waic12 <- waic(fit12)
Warning: 
5 (50.0%) p_waic estimates greater than 0.4. We recommend trying loo instead.
waics <- c(
  waic10$estimates["elpd_waic", 1], 
  waic11$estimates["elpd_waic", 1], 
  waic12$estimates["elpd_waic", 1]
)

The WAIC computation again gives warnings, and we recommend using PSIS-LOO instead.

Finally, we compute 1) WAIC weights, 2) Pseudo-BMA weights without Bayesian bootstrap, 3) Pseudo-BMA+ weights with Bayesian bootstrap, and 4) Bayesian stacking weights.

waic_wts <- exp(waics) / sum(exp(waics))
pbma_wts <- pseudobma_weights(lpd_point, BB=FALSE)
pbma_BB_wts <- pseudobma_weights(lpd_point) # default is BB=TRUE
stacking_wts <- stacking_weights(lpd_point)
round(cbind(waic_wts, pbma_wts, pbma_BB_wts, stacking_wts), 2)
       waic_wts pbma_wts pbma_BB_wts stacking_wts
model1     0.38     0.36        0.31          0.0
model2     0.58     0.63        0.53          0.8
model3     0.04     0.02        0.16          0.2

All weights favor the second model with the log population and the contact rate. WAIC weights and Pseudo-BMA weights (without Bayesian bootstrap) are similar, while Pseudo-BMA+ is more cautious and closer to stacking weights.

It may seem surprising that Bayesian stacking is giving zero weight to the first model, but this is likely due to the fact that the estimated effect for the interaction term is close to zero and thus models 1 and 2 give very similar predictions. In other words, incorporating the model with the interaction (model 1) into the model average doesn’t improve the predictions at all and so model 1 is given a weight of 0. On the other hand, models 2 and 3 are giving slightly different predictions and thus their combination may be slightly better than either alone. This behavior is related to the repeated similar model illustration in the milk example above.

Simpler coding using loo_model_weights function

Although in the examples above we called the stacking_weights and pseudobma_weights functions directly, we can also use the loo_model_weights wrapper, which takes as its input either a list of pointwise log-likelihood matrices or a list of precomputed loo objects. There are also loo_model_weights methods for stanreg objects (fitted model objects from rstanarm) as well as fitted model objects from other packages (e.g. brms) that do the preparation work for the user (see, e.g., the examples at help("loo_model_weights", package = "rstanarm")).

# using list of loo objects
loo_list <- list(loo10, loo11, loo12)
loo_model_weights(loo_list)
Method: stacking
------
      weight
fit10 0.000 
fit11 0.802 
fit12 0.198 
loo_model_weights(loo_list, method = "pseudobma")
Method: pseudo-BMA+ with Bayesian bootstrap
------
      weight
fit10 0.310 
fit11 0.539 
fit12 0.151 
loo_model_weights(loo_list, method = "pseudobma", BB = FALSE)
Method: pseudo-BMA
------
      weight
fit10 0.356 
fit11 0.629 
fit12 0.015 

References

McElreath, R. (2016). Statistical rethinking: A Bayesian course with examples in R and Stan. Chapman & Hall/CRC. http://xcelab.net/rm/statistical-rethinking/

Piironen, J. and Vehtari, A. (2017a). Sparsity information and regularization in the horseshoe and other shrinkage priors. In Electronic Journal of Statistics, 11(2):5018-5051. Online.

Piironen, J. and Vehtari, A. (2017b). Comparison of Bayesian predictive methods for model selection. Statistics and Computing, 27(3):711-735. :10.1007/s11222-016-9649-y. Online.

Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. online, arXiv preprint arXiv:1507.04544.

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018). Using stacking to average Bayesian predictive distributions. In Bayesian Analysis, :10.1214/17-BA1091. Online.

loo/inst/doc/loo2-example.R0000644000176200001440000000462415122301513015213 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----setup, message=FALSE----------------------------------------------------- library("rstanarm") library("bayesplot") library("loo") ## ----data--------------------------------------------------------------------- # the 'roaches' data frame is included with the rstanarm package data(roaches) str(roaches) # rescale to units of hundreds of roaches roaches$roach1 <- roaches$roach1 / 100 ## ----count-roaches-mcmc, results="hide"--------------------------------------- fit1 <- stan_glm( formula = y ~ roach1 + treatment + senior, offset = log(exposure2), data = roaches, family = poisson(link = "log"), prior = normal(0, 2.5, autoscale = TRUE), prior_intercept = normal(0, 5, autoscale = TRUE), seed = 12345 ) ## ----loo1--------------------------------------------------------------------- loo1 <- loo(fit1, save_psis = TRUE) ## ----print-loo1--------------------------------------------------------------- print(loo1) ## ----plot-loo1, out.width = "70%"--------------------------------------------- plot(loo1) ## ----ppc_loo_pit_overlay------------------------------------------------------ yrep <- posterior_predict(fit1) ppc_loo_pit_qq( y = roaches$y, yrep = yrep, lw = weights(loo1$psis_object) ) ## ----count-roaches-negbin, results="hide"------------------------------------- fit2 <- update(fit1, family = neg_binomial_2) ## ----loo2--------------------------------------------------------------------- loo2 <- loo(fit2, save_psis = TRUE, cores = 2) print(loo2) ## ----plot-loo2---------------------------------------------------------------- plot(loo2, label_points = TRUE) ## ----reloo-------------------------------------------------------------------- if (any(pareto_k_values(loo2) > 0.7)) { loo2 <- loo(fit2, save_psis = TRUE, k_threshold = 0.7) } print(loo2) ## ----ppc_loo_pit_overlay-negbin----------------------------------------------- yrep <- posterior_predict(fit2) ppc_loo_pit_qq(roaches$y, yrep, lw = weights(loo2$psis_object)) ## ----loo_compare-------------------------------------------------------------- loo_compare(loo1, loo2) loo/inst/doc/loo2-with-rstan.R0000644000176200001440000000423715122306001015655 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----eval=FALSE--------------------------------------------------------------- # library("rstan") # # # Prepare data # url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat" # wells <- read.table(url) # wells$dist100 <- with(wells, dist / 100) # X <- model.matrix(~ dist100 + arsenic, wells) # standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X)) # # # Fit model # fit_1 <- stan("logistic.stan", data = standata) # print(fit_1, pars = "beta") ## ----eval=FALSE--------------------------------------------------------------- # library("loo") # # # Extract pointwise log-likelihood # # using merge_chains=FALSE returns an array, which is easier to # # use with relative_eff() # log_lik_1 <- extract_log_lik(fit_1, merge_chains = FALSE) # # # as of loo v2.0.0 we can optionally provide relative effective sample sizes # # when calling loo, which allows for better estimates of the PSIS effective # # sample sizes and Monte Carlo error # r_eff <- relative_eff(exp(log_lik_1), cores = 2) # # # preferably use more than 2 cores (as many cores as possible) # # will use value of 'mc.cores' option if cores is not specified # loo_1 <- loo(log_lik_1, r_eff = r_eff, cores = 2) # print(loo_1) ## ----eval=FALSE--------------------------------------------------------------- # standata$X[, "arsenic"] <- log(standata$X[, "arsenic"]) # fit_2 <- stan(fit = fit_1, data = standata) # # log_lik_2 <- extract_log_lik(fit_2, merge_chains = FALSE) # r_eff_2 <- relative_eff(exp(log_lik_2)) # loo_2 <- loo(log_lik_2, r_eff = r_eff_2, cores = 2) # print(loo_2) ## ----eval=FALSE--------------------------------------------------------------- # # Compare # comp <- loo_compare(loo_1, loo_2) ## ----eval=FALSE--------------------------------------------------------------- # print(comp) # can set simplify=FALSE for more detailed print output loo/inst/doc/loo2-moment-matching.R0000644000176200001440000001055015122305465016654 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----stancode----------------------------------------------------------------- # Note: some syntax used in this Stan program requires RStan >= 2.26 (or CmdStanR) # To use an older version of RStan change the line declaring `y` to: int y[N]; stancode <- " data { int K; int N; matrix[N,K] x; array[N] int y; vector[N] offset_; // offset is reserved keyword in Stan so use offset_ real beta_prior_scale; real alpha_prior_scale; } parameters { vector[K] beta; real intercept; } model { y ~ poisson(exp(x * beta + intercept + offset_)); beta ~ normal(0,beta_prior_scale); intercept ~ normal(0,alpha_prior_scale); } generated quantities { vector[N] log_lik; for (n in 1:N) { log_lik[n] = poisson_lpmf(y[n] | exp(x[n] * beta + intercept + offset_[n])); } } " ## ----setup, message=FALSE----------------------------------------------------- library("rstan") library("loo") seed <- 9547 set.seed(seed) ## ----modelfit, message=FALSE-------------------------------------------------- # Prepare data data(roaches, package = "rstanarm") roaches$roach1 <- sqrt(roaches$roach1) y <- roaches$y x <- roaches[, c("roach1", "treatment", "senior")] offset <- log(roaches[, "exposure2"]) n <- dim(x)[1] k <- dim(x)[2] standata <- list( N = n, K = k, x = as.matrix(x), y = y, offset_ = offset, beta_prior_scale = 2.5, alpha_prior_scale = 5.0 ) # Compile stanmodel <- stan_model(model_code = stancode) # Fit model fit <- sampling(stanmodel, data = standata, seed = seed, refresh = 0) print(fit, pars = "beta") ## ----loo1--------------------------------------------------------------------- loo1 <- loo(fit) loo1 ## ----loo_moment_match--------------------------------------------------------- # available in rstan >= 2.21 loo2 <- loo(fit, moment_match = TRUE) loo2 ## ----stanfitfuns-------------------------------------------------------------- # create a named list of draws for use with rstan methods .rstan_relist <- function(x, skeleton) { out <- utils::relist(x, skeleton) for (i in seq_along(skeleton)) { dim(out[[i]]) <- dim(skeleton[[i]]) } out } # rstan helper function to get dims of parameters right .create_skeleton <- function(pars, dims) { out <- lapply(seq_along(pars), function(i) { len_dims <- length(dims[[i]]) if (len_dims < 1) { return(0) } return(array(0, dim = dims[[i]])) }) names(out) <- pars out } # extract original posterior draws post_draws_stanfit <- function(x, ...) { as.matrix(x) } # compute a matrix of log-likelihood values for the ith observation # matrix contains information about the number of MCMC chains log_lik_i_stanfit <- function(x, i, parameter_name = "log_lik", ...) { loo::extract_log_lik(x, parameter_name, merge_chains = FALSE)[,, i] } # transform parameters to the unconstraint space unconstrain_pars_stanfit <- function(x, pars, ...) { skeleton <- .create_skeleton(x@sim$pars_oi, x@par_dims[x@sim$pars_oi]) upars <- apply(pars, 1, FUN = function(theta) { rstan::unconstrain_pars(x, .rstan_relist(theta, skeleton)) }) # for one parameter models if (is.null(dim(upars))) { dim(upars) <- c(1, length(upars)) } t(upars) } # compute log_prob for each posterior draws on the unconstrained space log_prob_upars_stanfit <- function(x, upars, ...) { apply( upars, 1, rstan::log_prob, object = x, adjust_transform = TRUE, gradient = FALSE ) } # compute log_lik values based on the unconstrained parameters log_lik_i_upars_stanfit <- function( x, upars, i, parameter_name = "log_lik", ... ) { S <- nrow(upars) out <- numeric(S) for (s in seq_len(S)) { out[s] <- rstan::constrain_pars(x, upars = upars[s, ])[[parameter_name]][i] } out } ## ----loo_moment_match.default, message=FALSE---------------------------------- loo3 <- loo::loo_moment_match.default( x = fit, loo = loo1, post_draws = post_draws_stanfit, log_lik_i = log_lik_i_stanfit, unconstrain_pars = unconstrain_pars_stanfit, log_prob_upars = log_prob_upars_stanfit, log_lik_i_upars = log_lik_i_upars_stanfit ) loo3 loo/inst/doc/loo2-non-factorized.R0000644000176200001440000002141515122305743016510 0ustar liggesusersparams <- list(EVAL = TRUE) ## ----SETTINGS-knitr, include=FALSE-------------------------------------------- stopifnot(require(knitr)) opts_chunk$set( comment=NA, eval = if (isTRUE(exists("params"))) params$EVAL else FALSE, dev = "png", dpi = 150, fig.asp = 0.618, fig.width = 5, out.width = "60%", fig.align = "center" ) ## ----more-knitr-ops, include=FALSE-------------------------------------------- knitr::opts_chunk$set( cache=TRUE, message=FALSE, warning=FALSE ) ## ----lpdf, eval=FALSE--------------------------------------------------------- # /** # * Normal log-pdf for spatially lagged responses # * # * @param y Vector of response values. # * @param mu Mean parameter vector. # * @param sigma Positive scalar residual standard deviation. # * @param rho Positive scalar autoregressive parameter. # * @param W Spatial weight matrix. # * # * @return A scalar to be added to the log posterior. # */ # real normal_lagsar_lpdf(vector y, vector mu, real sigma, # real rho, matrix W) { # int N = rows(y); # real inv_sigma2 = 1 / square(sigma); # matrix[N, N] W_tilde = -rho * W; # vector[N] half_pred; # # for (n in 1:N) W_tilde[n,n] += 1; # # half_pred = W_tilde * (y - mdivide_left(W_tilde, mu)); # # return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) - # 0.5 * dot_self(half_pred) * inv_sigma2; # } ## ----setup, cache=FALSE------------------------------------------------------- library("loo") library("brms") library("bayesplot") library("ggplot2") color_scheme_set("brightblue") theme_set(theme_default()) SEED <- 10001 set.seed(SEED) # only sets seed for R (seed for Stan set later) # loads COL.OLD data frame and COL.nb neighbor list data(oldcol, package = "spdep") ## ----data--------------------------------------------------------------------- str(COL.OLD[, c("CRIME", "HOVAL", "INC")]) ## ----fit, results="hide"------------------------------------------------------ fit <- brm( CRIME ~ INC + HOVAL + sar(COL.nb, type = "lag"), data = COL.OLD, data2 = list(COL.nb = COL.nb), chains = 4, seed = SEED ) ## ----plot-lagsar, message=FALSE----------------------------------------------- lagsar <- as.matrix(fit, pars = "lagsar") estimates <- quantile(lagsar, probs = c(0.25, 0.5, 0.75)) mcmc_hist(lagsar) + vline_at(estimates, linetype = 2, size = 1) + ggtitle("lagsar: posterior median and 50% central interval") ## ----approx------------------------------------------------------------------- posterior <- as.data.frame(fit) y <- fit$data$CRIME N <- length(y) S <- nrow(posterior) loglik <- yloo <- sdloo <- matrix(nrow = S, ncol = N) for (s in 1:S) { p <- posterior[s, ] eta <- p$b_Intercept + p$b_INC * fit$data$INC + p$b_HOVAL * fit$data$HOVAL W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb) Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2 g <- Cinv %*% (y - solve(W_tilde, eta)) cbar <- diag(Cinv) yloo[s, ] <- y - g / cbar sdloo[s, ] <- sqrt(1 / cbar) loglik[s, ] <- dnorm(y, yloo[s, ], sdloo[s, ], log = TRUE) } # use loo for psis smoothing log_ratios <- -loglik psis_result <- psis(log_ratios) ## ----plot, cache = FALSE------------------------------------------------------ plot(psis_result, label_points = TRUE) ## ----checklast, cache = FALSE------------------------------------------------- yloo_sub <- yloo[S, ] sdloo_sub <- sdloo[S, ] df <- data.frame( y = y, yloo = yloo_sub, ymin = yloo_sub - sdloo_sub * 2, ymax = yloo_sub + sdloo_sub * 2 ) ggplot(data=df, aes(x = y, y = yloo, ymin = ymin, ymax = ymax)) + geom_errorbar( width = 1, color = "skyblue3", position = position_jitter(width = 0.25) ) + geom_abline(color = "gray30", size = 1.2) + geom_point() ## ----psisloo------------------------------------------------------------------ (psis_loo <- loo(loglik)) ## ----fit_dummy, cache = TRUE-------------------------------------------------- # see help("mi", "brms") for details on the mi() usage fit_dummy <- brm( CRIME | mi() ~ INC + HOVAL + sar(COL.nb, type = "lag"), data = COL.OLD, data2 = list(COL.nb = COL.nb), chains = 0 ) ## ----exact-loo-cv, results="hide", message=FALSE, warning=FALSE, cache = TRUE---- S <- 500 res <- vector("list", N) loglik <- matrix(nrow = S, ncol = N) for (i in seq_len(N)) { dat_mi <- COL.OLD dat_mi$CRIME[i] <- NA fit_i <- update(fit_dummy, newdata = dat_mi, # just for vignette chains = 1, iter = S * 2) posterior <- as.data.frame(fit_i) yloo <- sdloo <- rep(NA, S) for (s in seq_len(S)) { p <- posterior[s, ] y_miss_i <- y y_miss_i[i] <- p$Ymi eta <- p$b_Intercept + p$b_INC * fit_i$data$INC + p$b_HOVAL * fit_i$data$HOVAL W_tilde <- diag(N) - p$lagsar * spdep::nb2mat(COL.nb) Cinv <- t(W_tilde) %*% W_tilde / p$sigma^2 g <- Cinv %*% (y_miss_i - solve(W_tilde, eta)) cbar <- diag(Cinv); yloo[s] <- y_miss_i[i] - g[i] / cbar[i] sdloo[s] <- sqrt(1 / cbar[i]) loglik[s, i] <- dnorm(y[i], yloo[s], sdloo[s], log = TRUE) } ypred <- rnorm(S, yloo, sdloo) res[[i]] <- data.frame(y = c(posterior$Ymi, ypred)) res[[i]]$type <- rep(c("pp", "loo"), each = S) res[[i]]$obs <- i } res <- do.call(rbind, res) ## ----yplots, cache = FALSE, fig.width=10, out.width="95%", fig.asp = 0.3------ res_sub <- res[res$obs %in% 1:4, ] ggplot(res_sub, aes(y, fill = type)) + geom_density(alpha = 0.6) + facet_wrap("obs", scales = "fixed", ncol = 4) ## ----loo_exact, cache=FALSE--------------------------------------------------- log_mean_exp <- function(x) { # more stable than log(mean(exp(x))) max_x <- max(x) max_x + log(sum(exp(x - max_x))) - log(length(x)) } exact_elpds <- apply(loglik, 2, log_mean_exp) exact_elpd <- sum(exact_elpds) round(exact_elpd, 1) ## ----compare, fig.height=5---------------------------------------------------- df <- data.frame( approx_elpd = psis_loo$pointwise[, "elpd_loo"], exact_elpd = exact_elpds ) ggplot(df, aes(x = approx_elpd, y = exact_elpd)) + geom_abline(color = "gray30") + geom_point(size = 2) + geom_point(data = df[4, ], size = 3, color = "red3") + xlab("Approximate elpds") + ylab("Exact elpds") + coord_fixed(xlim = c(-16, -3), ylim = c(-16, -3)) ## ----pt4---------------------------------------------------------------------- without_pt_4 <- c( approx = sum(psis_loo$pointwise[-4, "elpd_loo"]), exact = sum(exact_elpds[-4]) ) round(without_pt_4, 1) ## ----brms-stan-code, eval=FALSE----------------------------------------------- # // generated with brms 2.2.0 # functions { # /** # * Normal log-pdf for spatially lagged responses # * # * @param y Vector of response values. # * @param mu Mean parameter vector. # * @param sigma Positive scalar residual standard deviation. # * @param rho Positive scalar autoregressive parameter. # * @param W Spatial weight matrix. # * # * @return A scalar to be added to the log posterior. # */ # real normal_lagsar_lpdf(vector y, vector mu, real sigma, # real rho, matrix W) { # int N = rows(y); # real inv_sigma2 = 1 / square(sigma); # matrix[N, N] W_tilde = -rho * W; # vector[N] half_pred; # for (n in 1:N) W_tilde[n, n] += 1; # half_pred = W_tilde * (y - mdivide_left(W_tilde, mu)); # return 0.5 * log_determinant(crossprod(W_tilde) * inv_sigma2) - # 0.5 * dot_self(half_pred) * inv_sigma2; # } # } # data { # int N; // total number of observations # vector[N] Y; // response variable # int Nmi; // number of missings # int Jmi[Nmi]; // positions of missings # int K; // number of population-level effects # matrix[N, K] X; // population-level design matrix # matrix[N, N] W; // spatial weight matrix # int prior_only; // should the likelihood be ignored? # } # transformed data { # int Kc = K - 1; # matrix[N, K - 1] Xc; // centered version of X # vector[K - 1] means_X; // column means of X before centering # for (i in 2:K) { # means_X[i - 1] = mean(X[, i]); # Xc[, i - 1] = X[, i] - means_X[i - 1]; # } # } # parameters { # vector[Nmi] Ymi; // estimated missings # vector[Kc] b; // population-level effects # real temp_Intercept; // temporary intercept # real sigma; // residual SD # real lagsar; // SAR parameter # } # transformed parameters { # } # model { # vector[N] Yl = Y; # vector[N] mu = Xc * b + temp_Intercept; # Yl[Jmi] = Ymi; # // priors including all constants # target += student_t_lpdf(temp_Intercept | 3, 34, 17); # target += student_t_lpdf(sigma | 3, 0, 17) # - 1 * student_t_lccdf(0 | 3, 0, 17); # // likelihood including all constants # if (!prior_only) { # target += normal_lagsar_lpdf(Yl | mu, sigma, lagsar, W); # } # } # generated quantities { # // actual population-level intercept # real b_Intercept = temp_Intercept - dot_product(means_X, b); # } loo/inst/doc/loo2-example.Rmd0000644000176200001440000003000615122262764015543 0ustar liggesusers--- title: "Using the loo package (version >= 2.0.0)" author: "Aki Vehtari and Jonah Gabry" date: "`r Sys.Date()`" output: html_vignette: toc: yes params: EVAL: !r identical(Sys.getenv("NOT_CRAN"), "true") --- ```{r, child="children/SETTINGS-knitr.txt"} ``` ```{r, child="children/SEE-ONLINE.txt", eval = if (isTRUE(exists("params"))) !params$EVAL else TRUE} ``` # Introduction This vignette demonstrates how to use the __loo__ package to carry out Pareto smoothed importance-sampling leave-one-out cross-validation (PSIS-LOO) for purposes of model checking and model comparison. In this vignette we can't provide all necessary background information on PSIS-LOO and its diagnostics (Pareto $k$ and effective sample size), so we encourage readers to refer to the following papers for more details: * Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Links: [published](https://link.springer.com/article/10.1007/s11222-016-9696-4) | [preprint arXiv](https://arxiv.org/abs/1507.04544). * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) # Setup In addition to the __loo__ package, we'll also be using __rstanarm__ and __bayesplot__: ```{r setup, message=FALSE} library("rstanarm") library("bayesplot") library("loo") ``` # Example: Poisson vs negative binomial for the roaches dataset ## Background and model fitting The Poisson and negative binomial regression models used below in our example, as well as the `stan_glm` function used to fit the models, are covered in more depth in the __rstanarm__ vignette [_Estimating Generalized Linear Models for Count Data with rstanarm_](http://mc-stan.org/rstanarm/articles/count.html). In the rest of this vignette we will assume the reader is already familiar with these kinds of models. ### Roaches data The example data we'll use comes from Chapter 8.3 of Gelman and Hill (2007). We want to make inferences about the efficacy of a certain pest management system at reducing the number of roaches in urban apartments. Here is how Gelman and Hill describe the experiment and data (pg. 161): > the treatment and control were applied to 160 and 104 apartments, respectively, and the outcome measurement $y_i$ in each apartment $i$ was the number of roaches caught in a set of traps. Different apartments had traps for different numbers of days In addition to an intercept, the regression predictors for the model are `roach1`, the pre-treatment number of roaches (rescaled above to be in units of hundreds), the treatment indicator `treatment`, and a variable indicating whether the apartment is in a building restricted to elderly residents `senior`. Because the number of days for which the roach traps were used is not the same for all apartments in the sample, we use the `offset` argument to specify that `log(exposure2)` should be added to the linear predictor. ```{r data} # the 'roaches' data frame is included with the rstanarm package data(roaches) str(roaches) # rescale to units of hundreds of roaches roaches$roach1 <- roaches$roach1 / 100 ``` ### Fit Poisson model We'll fit a simple Poisson regression model using the `stan_glm` function from the __rstanarm__ package. ```{r count-roaches-mcmc, results="hide"} fit1 <- stan_glm( formula = y ~ roach1 + treatment + senior, offset = log(exposure2), data = roaches, family = poisson(link = "log"), prior = normal(0, 2.5, autoscale = TRUE), prior_intercept = normal(0, 5, autoscale = TRUE), seed = 12345 ) ``` Usually we would also run posterior predictive checks as shown in the __rstanarm__ vignette [Estimating Generalized Linear Models for Count Data with rstanarm](http://mc-stan.org/rstanarm/articles/count.html), but here we focus only on methods provided by the __loo__ package.
## Using the __loo__ package for model checking and comparison _Although cross-validation is mostly used for model comparison, it is also useful for model checking._ ### Computing PSIS-LOO and checking diagnostics We start by computing PSIS-LOO with the `loo` function. Since we fit our model using __rstanarm__ we can use the `loo` method for `stanreg` objects (fitted model objects from __rstanarm__), which doesn't require us to first extract the pointwise log-likelihood values. If we had written our own Stan program instead of using __rstanarm__ we would pass an array or matrix of log-likelihood values to the `loo` function (see, e.g. `help("loo.array", package = "loo")`). We'll also use the argument `save_psis = TRUE` to save some intermediate results to be re-used later. ```{r loo1} loo1 <- loo(fit1, save_psis = TRUE) ``` `loo` gives us warnings about the Pareto diagnostics, which indicate that for some observations the leave-one-out posteriors are different enough from the full posterior that importance-sampling is not able to correct the difference. We can see more details by printing the `loo` object. ```{r print-loo1} print(loo1) ``` The table shows us a summary of Pareto $k$ diagnostic, which is used to assess the reliability of the estimates. In addition to the proportion of leave-one-out folds with $k$ values in different intervals, the minimum of the effective sample sizes in that category is shown to give idea why higher $k$ values are bad. Since we have some $k>1$, we are not able to compute an estimate for the Monte Carlo standard error (SE) of the expected log predictive density (`elpd_loo`) and `NA` is displayed. (Full details on the interpretation of the Pareto $k$ diagnostics are available in the Vehtari, Gelman, and Gabry (2017) and Vehtari, Simpson, Gelman, Yao, and Gabry (2024) papers referenced at the top of this vignette.) In this case the `elpd_loo` estimate should not be considered reliable. If we had a well-specified model we would expect the estimated effective number of parameters (`p_loo`) to be smaller than or similar to the total number of parameters in the model. Here `p_loo` is almost 300, which is about 70 times the total number of parameters in the model, indicating severe model misspecification. ### Plotting Pareto $k$ diagnostics Using the `plot` method on our `loo1` object produces a plot of the $k$ values (in the same order as the observations in the dataset used to fit the model) with horizontal lines corresponding to the same categories as in the printed output above. ```{r plot-loo1, out.width = "70%"} plot(loo1) ``` This plot is useful to quickly see the distribution of $k$ values, but it's often also possible to see structure with respect to data ordering. In our case this is mild, but there seems to be a block of data that is somewhat easier to predict (indices around 90--150). Unfortunately even for these data points we see some high $k$ values. ### Marginal posterior predictive checks The `loo` package can be used in combination with the `bayesplot` package for leave-one-out cross-validation marginal posterior predictive checks [Gabry et al (2018)](https://arxiv.org/abs/1709.01449). LOO-PIT values are cumulative probabilities for $y_i$ computed using the LOO marginal predictive distributions $p(y_i|y_{-i})$. For a good model, the distribution of LOO-PIT values should be uniform. In the following QQ-plot the LOO-PIT values for our model (y-axi) is compared to standard uniform distribution (x-axis). ```{r ppc_loo_pit_overlay} yrep <- posterior_predict(fit1) ppc_loo_pit_qq( y = roaches$y, yrep = yrep, lw = weights(loo1$psis_object) ) ``` The excessive number of LOO-PIT values close to 0 indicates that the model is under-dispersed compared to the data, and we should consider a model that allows for greater dispersion. ## Try alternative model with more flexibility Here we will try [negative binomial](https://en.wikipedia.org/wiki/Negative_binomial_distribution) regression, which is commonly used for overdispersed count data. Unlike the Poisson distribution, the negative binomial distribution allows the conditional mean and variance of $y$ to differ. ```{r count-roaches-negbin, results="hide"} fit2 <- update(fit1, family = neg_binomial_2) ``` ```{r loo2} loo2 <- loo(fit2, save_psis = TRUE, cores = 2) print(loo2) ``` ```{r plot-loo2} plot(loo2, label_points = TRUE) ``` Using the `label_points` argument will label any $k$ values larger than the diagnostic threshold with the index of the corresponding data point. These high values are often the result of model misspecification and frequently correspond to data points that would be considered ``outliers'' in the data and surprising according to the model [Gabry et al (2019)](https://arxiv.org/abs/1709.01449). Unfortunately, while large $k$ values are a useful indicator of model misspecification, small $k$ values are not a guarantee that a model is well-specified. If there are a small number of problematic $k$ values then we can use a feature in __rstanarm__ that lets us refit the model once for each of these problematic observations. Each time the model is refit, one of the observations with a high $k$ value is omitted and the LOO calculations are performed exactly for that observation. The results are then recombined with the approximate LOO calculations already carried out for the observations without problematic $k$ values: ```{r reloo} if (any(pareto_k_values(loo2) > 0.7)) { loo2 <- loo(fit2, save_psis = TRUE, k_threshold = 0.7) } print(loo2) ``` In the print output we can see that the Monte Carlo SE is small compared to the other uncertainties. On the other hand, `p_loo` is about 7 and still a bit higher than the total number of parameters in the model. This indicates that there is almost certainly still some degree of model misspecification, but this is much better than the `p_loo` estimate for the Poisson model. For further model checking we again examine the LOO-PIT values. ```{r ppc_loo_pit_overlay-negbin} yrep <- posterior_predict(fit2) ppc_loo_pit_qq(roaches$y, yrep, lw = weights(loo2$psis_object)) ``` The plot for the negative binomial model looks better than the Poisson plot, but we still see that this model is not capturing all of the essential features in the data. ## Comparing the models on expected log predictive density We can use the `loo_compare` function to compare our two models on expected log predictive density (ELPD) for new data: ```{r loo_compare} loo_compare(loo1, loo2) ``` The difference in ELPD is much larger than several times the estimated standard error of the difference again indicating that the negative-binomial model is xpected to have better predictive performance than the Poisson model. However, according to the LOO-PIT checks there is still some misspecification, and a reasonable guess is that a hurdle or zero-inflated model would be an improvement (we leave that for another case study).
# References Gabry, J., Simpson, D., Vehtari, A., Betancourt, M. and Gelman, A. (2019), Visualization in Bayesian workflow. _J. R. Stat. Soc. A_, 182: 389-402. \doi:10.1111/rssa.12378. ([journal version](https://rss.onlinelibrary.wiley.com/doi/full/10.1111/rssa.12378), [arXiv preprint](https://arxiv.org/abs/1709.01449), [code on GitHub](https://github.com/jgabry/bayes-vis-paper)) Gelman, A. and Hill, J. (2007). _Data Analysis Using Regression and Multilevel/Hierarchical Models._ Cambridge University Press, Cambridge, UK. Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. _Statistics and Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. [online](https://link.springer.com/article/10.1007/s11222-016-9696-4), [arXiv preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, 25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) loo/inst/doc/loo2-large-data.html0000644000176200001440000015652115122301514016331 0ustar liggesusers Using Leave-one-out cross-validation for large data

Using Leave-one-out cross-validation for large data

Mans Magnusson, Paul Bürkner, Aki Vehtari and Jonah Gabry

2025-12-22

Introduction

This vignette demonstrates how to do leave-one-out cross-validation for large data using the loo package and Stan. There are two approaches covered: LOO with subsampling and LOO using approximations to posterior distributions. Some sections from this vignette are excerpted from the papers

  • Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS), in PMLR 108. arXiv preprint arXiv:2001.00980.

  • Magnusson, M., Andersen, M., Jonasson, J. & Vehtari, A. (2019). Bayesian leave-one-out cross-validation for large data. Proceedings of the 36th International Conference on Machine Learning, in PMLR 97:4244-4253 online, arXiv preprint arXiv:1904.10679.

  • Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. Links: published | arXiv preprint.

  • Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

which provide important background for understanding the methods implemented in the package.

Setup

In addition to the loo package, we’ll also be using rstan:

library("rstan")
library("loo")
set.seed(4711)

Example: Well water in Bangladesh

We will use the same example as in the vignette Writing Stan programs for use with the loo package. See that vignette for a description of the problem and data.

The sample size in this example is only \(N=3020\), which is not large enough to require the special methods for large data described in this vignette, but is sufficient for demonstration purposes in this tutorial.

Coding the Stan model

Here is the Stan code for fitting the logistic regression model, which we save in a file called logistic.stan:

// Note: some syntax used in this program requires RStan >= 2.26 (or CmdStanR)
// To use an older version of RStan change the line declaring `y` to:
//    int<lower=0,upper=1> y[N];
data {
  int<lower=0> N;             // number of data points
  int<lower=0> P;             // number of predictors (including intercept)
  matrix[N,P] X;              // predictors (including 1s for intercept)
  array[N] int<lower=0,upper=1> y;  // binary outcome
}
parameters {
  vector[P] beta;
}
model {
  beta ~ normal(0, 1);
  y ~ bernoulli_logit(X * beta);
}

Importantly, unlike the general approach recommended in Writing Stan programs for use with the loo package, we do not compute the log-likelihood for each observation in the generated quantities block of the Stan program. Here we are assuming we have a large data set (larger than the one we’re actually using in this demonstration) and so it is preferable to instead define a function in R to compute the log-likelihood for each data point when needed rather than storing all of the log-likelihood values in memory.

The log-likelihood in R can be coded as follows:

# we'll add an argument log to toggle whether this is a log-likelihood or 
# likelihood function. this will be useful later in the vignette.
llfun_logistic <- function(data_i, draws, log = TRUE) {
  x_i <- as.matrix(data_i[, which(grepl(colnames(data_i), pattern = "X")), drop=FALSE])
  logit_pred <- draws %*% t(x_i)
  dbinom(x = data_i$y, size = 1, prob = 1/(1 + exp(-logit_pred)), log = log)
}

The function llfun_logistic() needs to have arguments data_i and draws. Below we will test that the function is working by using the loo_i() function.

Fitting the model with RStan

Next we fit the model in Stan using the rstan package:

# Prepare data
url <- "http://stat.columbia.edu/~gelman/arm/examples/arsenic/wells.dat"
wells <- read.table(url)
wells$dist100 <- with(wells, dist / 100)
X <- model.matrix(~ dist100 + arsenic, wells)
standata <- list(y = wells$switch, X = X, N = nrow(X), P = ncol(X))

# Compile
stan_mod <- stan_model("logistic.stan")

# Fit model
fit_1 <- sampling(stan_mod, data = standata, seed = 4711)
print(fit_1, pars = "beta")
         mean se_mean   sd  2.5%   25%   50%   75% 97.5% n_eff Rhat
beta[1]  0.00       0 0.08 -0.15 -0.05  0.00  0.06  0.16  1933    1
beta[2] -0.89       0 0.10 -1.09 -0.96 -0.89 -0.82 -0.69  2332    1
beta[3]  0.46       0 0.04  0.38  0.43  0.46  0.49  0.54  2051    1

Before we move on to computing LOO we can now test that the log-likelihood function we wrote is working as it should. The loo_i() function is a helper function that can be used to test a log-likelihood function on a single observation.

# used for draws argument to loo_i
parameter_draws_1 <- extract(fit_1)$beta

# used for data argument to loo_i
stan_df_1 <- as.data.frame(standata)

# compute relative efficiency (this is slow and optional but is recommended to allow 
# for adjusting PSIS effective sample size based on MCMC effective sample size)
r_eff <- relative_eff(llfun_logistic, 
                      log = FALSE, # relative_eff wants likelihood not log-likelihood values
                      chain_id = rep(1:4, each = 1000), 
                      data = stan_df_1, 
                      draws = parameter_draws_1, 
                      cores = 2)

loo_i(i = 1, llfun_logistic, r_eff = r_eff, data = stan_df_1, draws = parameter_draws_1)
$pointwise
    elpd_loo mcse_elpd_loo        p_loo     looic influence_pareto_k
1 -0.3314552  0.0002887608 0.0003361772 0.6629103        -0.05679886
...

Approximate LOO-CV using PSIS-LOO and subsampling

We can then use the loo_subsample() function to compute the efficient PSIS-LOO approximation to exact LOO-CV using subsampling:

set.seed(4711)
loo_ss_1 <-
  loo_subsample(
    llfun_logistic,
    observations = 100, # take a subsample of size 100
    cores = 2,
    # these next objects were computed above
    r_eff = r_eff, 
    draws = parameter_draws_1,
    data = stan_df_1
  )
print(loo_ss_1)
Computed from 4000 by 100 subsampled log-likelihood
values from 3020 total observations.

         Estimate   SE subsampling SE
elpd_loo  -1968.5 15.6            0.3
p_loo         3.1  0.1            0.4
looic      3936.9 31.2            0.6
------
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

The loo_subsample() function creates an object of class psis_loo_ss, that inherits from psis_loo, loo (the classes of regular loo objects).

The printed output above shows the estimates \(\widehat{\mbox{elpd}}_{\rm loo}\) (expected log predictive density), \(\widehat{p}_{\rm loo}\) (effective number of parameters), and \({\rm looic} =-2\, \widehat{\mbox{elpd}}_{\rm loo}\) (the LOO information criterion). Unlike when using loo(), when using loo_subsample() there is an additional column giving the “subsampling SE”, which reflects the additional uncertainty due to the subsampling used.

The line at the bottom of the printed output provides information about the reliability of the LOO approximation (the interpretation of the \(k\) parameter is explained in help('pareto-k-diagnostic') and in greater detail in Vehtari, Simpson, Gelman, Yao, and Gabry (2019)). In this case, the message tells us that all of the estimates for \(k\) are fine for this given subsample.

Adding additional subsamples

If we are not satisfied with the subsample size (i.e., the accuracy) we can simply add more samples until we are satisfied using the update() method.

set.seed(4711)
loo_ss_1b <-
  update(
    loo_ss_1,
    observations = 200, # subsample 200 instead of 100
    r_eff = r_eff,
    draws = parameter_draws_1,
    data = stan_df_1
  ) 
print(loo_ss_1b)
Computed from 4000 by 200 subsampled log-likelihood
values from 3020 total observations.

         Estimate   SE subsampling SE
elpd_loo  -1968.3 15.6            0.2
p_loo         3.2  0.1            0.4
looic      3936.7 31.2            0.5
------
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

Specifying estimator and sampling method

The performance relies on two components: the estimation method and the approximation used for the elpd. See the documentation for loo_subsample() more information on which estimators and approximations are implemented. The default implementation is using the point log predictive density evaluated at the mean of the posterior (loo_approximation="plpd") and the difference estimator (estimator="diff_srs"). This combination has a focus on fast inference. But we can easily use other estimators as well as other elpd approximations, for example:

set.seed(4711)
loo_ss_1c <-
  loo_subsample(
    x = llfun_logistic,
    r_eff = r_eff,
    draws = parameter_draws_1,
    data = stan_df_1,
    observations = 100,
    estimator = "hh_pps", # use Hansen-Hurwitz
    loo_approximation = "lpd", # use lpd instead of plpd
    loo_approximation_draws = 100,
    cores = 2
  )
print(loo_ss_1c)
Computed from 4000 by 100 subsampled log-likelihood
values from 3020 total observations.

         Estimate   SE subsampling SE
elpd_loo  -1968.9 15.4            0.5
p_loo         3.5  0.2            0.5
looic      3937.9 30.7            1.1
------
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume MCMC draws (r_eff in [0.9, 1.0]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

See the documentation and references for loo_subsample() for details on the implemented approximations.

Approximate LOO-CV using PSIS-LOO with posterior approximations

Using posterior approximations, such as variational inference and Laplace approximations, can further speed-up LOO-CV for large data. Here we demonstrate using a Laplace approximation in Stan.

fit_laplace <- optimizing(stan_mod, data = standata, draws = 2000, 
                          importance_resampling = TRUE)
parameter_draws_laplace <- fit_laplace$theta_tilde # draws from approximate posterior
log_p <- fit_laplace$log_p # log density of the posterior
log_g <- fit_laplace$log_g # log density of the approximation

Using the posterior approximation we can then do LOO-CV by correcting for the posterior approximation when we compute the elpd. To do this we use the loo_approximate_posterior() function.

set.seed(4711)
loo_ap_1 <-
  loo_approximate_posterior(
    x = llfun_logistic,
    draws = parameter_draws_laplace,
    data = stan_df_1,
    log_p = log_p,
    log_g = log_g,
    cores = 2
  )
print(loo_ap_1)

The function creates a class, psis_loo_ap that inherits from psis_loo, loo.

Computed from 2000 by 3020 log-likelihood matrix

         Estimate   SE
elpd_loo  -1968.4 15.6
p_loo         3.2  0.2
looic      3936.8 31.2
------
Posterior approximation correction used.
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume independent draws (r_eff=1).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

Combining the posterior approximation method with subsampling

The posterior approximation correction can also be used together with subsampling:

set.seed(4711)
loo_ap_ss_1 <-
  loo_subsample(
    x = llfun_logistic,
    draws = parameter_draws_laplace,
    data = stan_df_1,
    log_p = log_p,
    log_g = log_g,
    observations = 100,
    cores = 2
  )
print(loo_ap_ss_1)
Computed from 2000 by 100 subsampled log-likelihood
values from 3020 total observations.

         Estimate   SE subsampling SE
elpd_loo  -1968.2 15.6            0.4
p_loo         2.9  0.1            0.5
looic      3936.4 31.1            0.8
------
Posterior approximation correction used.
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume independent draws (r_eff=1).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

The object created is of class psis_loo_ss, which inherits from the psis_loo_ap class previously described.

Comparing models

To compare this model to an alternative model for the same data we can use the loo_compare() function just as we would if using loo() instead of loo_subsample() or loo_approximate_posterior(). First we’ll fit a second model to the well-switching data, using log(arsenic) instead of arsenic as a predictor:

standata$X[, "arsenic"] <- log(standata$X[, "arsenic"])
fit_2 <- sampling(stan_mod, data = standata) 
parameter_draws_2 <- extract(fit_2)$beta
stan_df_2 <- as.data.frame(standata)

# recompute subsampling loo for first model for demonstration purposes

# compute relative efficiency (this is slow and optional but is recommended to allow 
# for adjusting PSIS effective sample size based on MCMC effective sample size)
r_eff_1 <- relative_eff(
  llfun_logistic,
  log = FALSE, # relative_eff wants likelihood not log-likelihood values
  chain_id = rep(1:4, each = 1000),
  data = stan_df_1,
  draws = parameter_draws_1,
  cores = 2
)

set.seed(4711)
loo_ss_1 <- loo_subsample(
  x = llfun_logistic,
  r_eff = r_eff_1,
  draws = parameter_draws_1,
  data = stan_df_1,
  observations = 200,
  cores = 2
)

# compute subsampling loo for a second model (with log-arsenic)

r_eff_2 <- relative_eff(
  llfun_logistic,
  log = FALSE, # relative_eff wants likelihood not log-likelihood values
  chain_id = rep(1:4, each = 1000),
  data = stan_df_2,
  draws = parameter_draws_2,
  cores = 2
)
loo_ss_2 <- loo_subsample(
  x = llfun_logistic,
  r_eff = r_eff_2, 
  draws = parameter_draws_2,
  data = stan_df_2,
  observations = 200,
  cores = 2
)

print(loo_ss_2)
Computed from 4000 by 100 subsampled log-likelihood
values from 3020 total observations.

         Estimate   SE subsampling SE
elpd_loo  -1952.0 16.2            0.2
p_loo         2.6  0.1            0.3
looic      3903.9 32.4            0.4
------
Monte Carlo SE of elpd_loo is 0.0.
MCSE and ESS estimates assume MCMC draws (r_eff in [1.0, 1.1]).

All Pareto k estimates are good (k < 0.7).
See help('pareto-k-diagnostic') for details.

We can now compare the models on LOO using the loo_compare function:

# Compare
comp <- loo_compare(loo_ss_1, loo_ss_2)
print(comp)
Warning: Different subsamples in 'model2' and 'model1'. Naive diff SE is used.

       elpd_diff se_diff subsampling_se_diff
model2  0.0       0.0     0.0               
model1 16.5      22.5     0.4               

This new object comp contains the estimated difference of expected leave-one-out prediction errors between the two models, along with the standard error. As the warning indicates, because different subsamples were used the comparison will not take the correlations between different observations into account. Here we see that the naive SE is 22.5 and we cannot see any difference in performance between the models.

To force subsampling to use the same observations for each of the models we can simply extract the observations used in loo_ss_1 and use them in loo_ss_2 by supplying the loo_ss_1 object to the observations argument.

loo_ss_2 <-
  loo_subsample(
    x = llfun_logistic,
    r_eff = r_eff_2,
    draws = parameter_draws_2,
    data = stan_df_2,
    observations = loo_ss_1,
    cores = 2
  )

We could also supply the subsampling indices using the obs_idx() helper function:

idx <- obs_idx(loo_ss_1)
loo_ss_2 <- loo_subsample(
  x = llfun_logistic,
  r_eff = r_eff_2, 
  draws = parameter_draws_2,
  data = stan_df_2,
  observations = idx,
  cores = 2
)
Simple random sampling with replacement assumed.

This results in a message indicating that we assume these observations to have been sampled with simple random sampling, which is true because we had used the default "diff_srs" estimator for loo_ss_1.

We can now compare the models and estimate the difference based on the same subsampled observations.

comp <- loo_compare(loo_ss_1, loo_ss_2)
print(comp) 
       elpd_diff se_diff subsampling_se_diff
model2  0.0       0.0     0.0               
model1 16.1       4.4     0.1               

First, notice that now the se_diff is now around 4 (as opposed to 20 when using different subsamples). The first column shows the difference in ELPD relative to the model with the largest ELPD. In this case, the difference in elpd and its scale relative to the approximate standard error of the difference) indicates a preference for the second model (model2). Since the subsampling uncertainty is so small in this case it can effectively be ignored. If we need larger subsamples we can simply add samples using the update() method demonstrated earlier.

It is also possible to compare a subsampled loo computation with a full loo object.

# use loo() instead of loo_subsample() to compute full PSIS-LOO for model 2
loo_full_2 <- loo(
  x = llfun_logistic,
  r_eff = r_eff_2,
  draws = parameter_draws_2,
  data = stan_df_2,
  cores = 2
)
loo_compare(loo_ss_1, loo_full_2)
Estimated elpd_diff using observations included in loo calculations for all models.

Because we are comparing a non-subsampled loo calculation to a subsampled calculation we get the message that only the observations that are included in the loo calculations for both model1 and model2 are included in the computations for the comparison.

       elpd_diff se_diff subsampling_se_diff
model2  0.0       0.0     0.0               
model1 16.3       4.4     0.3   

Here we actually see an increase in subsampling_se_diff, but this is due to a technical detail not elaborated here. In general, the difference should be better or negligible.

References

Gelman, A., and Hill, J. (2007). Data Analysis Using Regression and Multilevel Hierarchical Models. Cambridge University Press.

Stan Development Team (2017). The Stan C++ Library, Version 2.17.0. https://mc-stan.org/

Stan Development Team (2018) RStan: the R interface to Stan, Version 2.17.3. https://mc-stan.org/

Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS), in PMLR 108. arXiv preprint arXiv:2001.00980.

Magnusson, M., Andersen, M., Jonasson, J. & Vehtari, A. (2019). Bayesian leave-one-out cross-validation for large data. Proceedings of the 36th International Conference on Machine Learning, in PMLR 97:4244-4253 online, arXiv preprint arXiv:1904.10679.

Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. Statistics and Computing. 27(5), 1413–1432. :10.1007/s11222-016-9696-4. online, arXiv preprint arXiv:1507.04544.

Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. Journal of Machine Learning Research, 25(72):1-58. PDF

loo/build/0000755000176200001440000000000015122306002012131 5ustar liggesusersloo/build/vignette.rds0000644000176200001440000000112415122306002014466 0ustar liggesusersTMs0U@JQn:rqk:LCUccMe#)ĿFXr"5pAv:>vp%=\ R *;Rfo9++~pYP*W簒Ϲ U%H̦xd*qa͚EeQq7)s%)Hr$h1I^c$Zi5֬4Nmsъl > !/M!QXK+M]oi[Ѱs.@>@SvnK4CK0>1kAQ:041ENBL5r63$NOW#(~zxzp,F\u05Q*k*$p ؖιhg ˏٹj[ *Y뀧p7SDZ͎d\X,+J3L_~:loo/build/partial.rdb0000644000176200001440000000007515122301353014264 0ustar liggesusersb```b`aab`b1g``d`aҬy@D?M7loo/man/0000755000176200001440000000000015122306004011607 5ustar liggesusersloo/man/elpd.Rd0000644000176200001440000000301714411555606013041 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/elpd.R \name{elpd} \alias{elpd} \alias{elpd.array} \alias{elpd.matrix} \title{Generic (expected) log-predictive density} \usage{ elpd(x, ...) \method{elpd}{array}(x, ...) \method{elpd}{matrix}(x, ...) } \arguments{ \item{x}{A log-likelihood array or matrix. The \strong{Methods (by class)} section, below, has detailed descriptions of how to specify the inputs for each method.} \item{...}{Currently ignored.} } \description{ The \code{elpd()} methods for arrays and matrices can compute the expected log pointwise predictive density for a new dataset or the log pointwise predictive density of the observed data (an overestimate of the elpd). } \details{ The \code{elpd()} function is an S3 generic and methods are provided for 3-D pointwise log-likelihood arrays and matrices. } \section{Methods (by class)}{ \itemize{ \item \code{elpd(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{elpd(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. }} \examples{ # Calculate the lpd of the observed data LLarr <- example_loglik_array() elpd(LLarr) } \seealso{ The vignette \emph{Holdout validation and K-fold cross-validation of Stan programs with the loo package} for demonstrations of using the \code{elpd()} methods. } loo/man/importance_sampling.Rd0000644000176200001440000000610414566461605016156 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/importance_sampling.R \name{importance_sampling} \alias{importance_sampling} \alias{importance_sampling.array} \alias{importance_sampling.matrix} \alias{importance_sampling.default} \title{A parent class for different importance sampling methods.} \usage{ importance_sampling(log_ratios, method, ...) \method{importance_sampling}{array}( log_ratios, method, ..., r_eff = 1, cores = getOption("mc.cores", 1) ) \method{importance_sampling}{matrix}( log_ratios, method, ..., r_eff = 1, cores = getOption("mc.cores", 1) ) \method{importance_sampling}{default}(log_ratios, method, ..., r_eff = 1) } \arguments{ \item{log_ratios}{An array, matrix, or vector of importance ratios on the log scale (for PSIS-LOO these are \emph{negative} log-likelihood values). See the \strong{Methods (by class)} section below for a detailed description of how to specify the inputs for each method.} \item{method}{The importance sampling method to use. The following methods are implemented: \itemize{ \item \code{\link[=psis]{"psis"}}: Pareto-Smoothed Importance Sampling (PSIS). Default method. \item \code{\link[=tis]{"tis"}}: Truncated Importance Sampling (TIS) with truncation at \code{sqrt(S)}, where \code{S} is the number of posterior draws. \item \code{\link[=sis]{"sis"}}: Standard Importance Sampling (SIS). }} \item{...}{Arguments passed on to the various methods.} \item{r_eff}{Vector of relative effective sample size estimates containing one element per observation. The values provided should be the relative effective sample sizes of \code{1/exp(log_ratios)} (i.e., \code{1/ratios}). This is related to the relative efficiency of estimating the normalizing term in self-normalizing importance sampling. If \code{r_eff} is not provided then the reported PSIS effective sample sizes and Monte Carlo error estimates can be over-optimistic. If the posterior draws are (near) independent then \code{r_eff=1} can be used. \code{r_eff} has to be a scalar (same value is used for all observations) or a vector with length equal to the number of observations. The default value is 1. See the \code{\link[=relative_eff]{relative_eff()}} helper function for computing \code{r_eff}.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} } \description{ A parent class for different importance sampling methods. } loo/man/dot-ndraws.Rd0000644000176200001440000000136514566461605014211 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_subsample.R \name{.ndraws} \alias{.ndraws} \alias{.ndraws.matrix} \alias{.ndraws.default} \title{The number of posterior draws in a draws object.} \usage{ .ndraws(x) \method{.ndraws}{matrix}(x) \method{.ndraws}{default}(x) } \arguments{ \item{x}{A draws object with posterior draws.} } \value{ An integer with the number of draws. } \description{ The number of posterior draws in a draws object. } \details{ This is a generic function to return the total number of draws from an arbitrary draws objects. The function is internal and should only be used by developers to enable \code{\link[=loo_subsample]{loo_subsample()}} for arbitrary draws objects. } \keyword{internal} loo/man/E_loo.Rd0000644000176200001440000001101015100712414013126 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/E_loo.R \name{E_loo} \alias{E_loo} \alias{E_loo.default} \alias{E_loo.matrix} \title{Compute weighted expectations} \usage{ E_loo(x, psis_object, ...) \method{E_loo}{default}( x, psis_object, ..., type = c("mean", "variance", "sd", "quantile"), probs = NULL, log_ratios = NULL ) \method{E_loo}{matrix}( x, psis_object, ..., type = c("mean", "variance", "sd", "quantile"), probs = NULL, log_ratios = NULL ) } \arguments{ \item{x}{A numeric vector or matrix.} \item{psis_object}{An object returned by \code{\link[=psis]{psis()}}.} \item{...}{Arguments passed to individual methods.} \item{type}{The type of expectation to compute. The options are \code{"mean"}, \code{"variance"}, \code{"sd"}, and \code{"quantile"}.} \item{probs}{For computing quantiles, a vector of probabilities.} \item{log_ratios}{Optionally, a vector or matrix (the same dimensions as \code{x}) of raw (not smoothed) log ratios. If working with log-likelihood values, the log ratios are the \strong{negative} of those values. If \code{log_ratios} is specified we are able to compute more accurate \link[=pareto-k-diagnostic]{Pareto k} diagnostics specific to \code{E_loo()}.} } \value{ A named list with the following components: \describe{ \item{\code{value}}{ The result of the computation. For the matrix method, \code{value} is a vector with \code{ncol(x)} elements, with one exception: when \code{type="quantile"} and multiple values are specified in \code{probs} the \code{value} component of the returned object is a \code{length(probs)} by \code{ncol(x)} matrix. For the default/vector method the \code{value} component is scalar, with one exception: when \code{type="quantile"} and multiple values are specified in \code{probs} the \code{value} component is a vector with \code{length(probs)} elements. } \item{\code{pareto_k}}{ Function-specific diagnostic. For the matrix method it will be a vector of length \code{ncol(x)} containing estimates of the shape parameter \eqn{k} of the generalized Pareto distribution. For the default/vector method, the estimate is a scalar. If \code{log_ratios} is not specified when calling \code{E_loo()}, the smoothed log-weights are used to estimate Pareto-k's, which may produce optimistic estimates. For \code{type="mean"}, \code{type="var"}, and \code{type="sd"}, the returned Pareto-k is usually the maximum of the Pareto-k's for the left and right tail of \eqn{hr} and the right tail of \eqn{r}, where \eqn{r} is the importance ratio and \eqn{h=x} for \code{type="mean"} and \eqn{h=x^2} for \code{type="var"} and \code{type="sd"}. If \eqn{h} is binary, constant, or not finite, or if \code{type="quantile"}, the returned Pareto-k is the Pareto-k for the right tail of \eqn{r}. } } } \description{ The \code{E_loo()} function computes weighted expectations (means, variances, quantiles) using the importance weights obtained from the \link[=psis]{PSIS} smoothing procedure. The expectations estimated by the \code{E_loo()} function assume that the PSIS approximation is working well. \strong{A small \link[=pareto-k-diagnostic]{Pareto k} estimate is necessary, but not sufficient, for \code{E_loo()} to give reliable estimates}. If the \code{log_ratios} argument is provided, \code{E_loo()} also computes a function specific Pareto k diagnostic, which must also be small for a reliable estimate. See more details below. } \examples{ \donttest{ if (requireNamespace("rstanarm", quietly = TRUE)) { # Use rstanarm package to quickly fit a model and get both a log-likelihood # matrix and draws from the posterior predictive distribution library("rstanarm") # data from help("lm") ctl <- c(4.17,5.58,5.18,6.11,4.50,4.61,5.17,4.53,5.33,5.14) trt <- c(4.81,4.17,4.41,3.59,5.87,3.83,6.03,4.89,4.32,4.69) d <- data.frame( weight = c(ctl, trt), group = gl(2, 10, 20, labels = c("Ctl","Trt")) ) fit <- stan_glm(weight ~ group, data = d, refresh = 0) yrep <- posterior_predict(fit) dim(yrep) log_ratios <- -1 * log_lik(fit) dim(log_ratios) r_eff <- relative_eff(exp(-log_ratios), chain_id = rep(1:4, each = 1000)) psis_object <- psis(log_ratios, r_eff = r_eff, cores = 2) E_loo(yrep, psis_object, type = "mean") E_loo(yrep, psis_object, type = "var") E_loo(yrep, psis_object, type = "sd") E_loo(yrep, psis_object, type = "quantile", probs = 0.5) # median E_loo(yrep, psis_object, type = "quantile", probs = c(0.1, 0.9)) # We can get more accurate Pareto k diagnostic if we also provide # the log_ratios argument E_loo(yrep, psis_object, type = "mean", log_ratios = log_ratios) } } } loo/man/kfold-helpers.Rd0000644000176200001440000000365613575772017014674 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/kfold-helpers.R \name{kfold-helpers} \alias{kfold-helpers} \alias{kfold_split_random} \alias{kfold_split_stratified} \alias{kfold_split_grouped} \title{Helper functions for K-fold cross-validation} \usage{ kfold_split_random(K = 10, N = NULL) kfold_split_stratified(K = 10, x = NULL) kfold_split_grouped(K = 10, x = NULL) } \arguments{ \item{K}{The number of folds to use.} \item{N}{The number of observations in the data.} \item{x}{A discrete variable of length \code{N} with at least \code{K} levels (unique values). Will be coerced to a \link[=factor]{factor}.} } \value{ An integer vector of length \code{N} where each element is an index in \code{1:K}. } \description{ These functions can be used to generate indexes for use with K-fold cross-validation. See the \strong{Details} section for explanations. } \details{ \code{kfold_split_random()} splits the data into \code{K} groups of equal size (or roughly equal size). For a categorical variable \code{x} \code{kfold_split_stratified()} splits the observations into \code{K} groups ensuring that relative category frequencies are approximately preserved. For a grouping variable \code{x}, \code{kfold_split_grouped()} places all observations in \code{x} from the same group/level together in the same fold. The selection of which groups/levels go into which fold (relevant when when there are more groups than folds) is randomized. } \examples{ ids <- kfold_split_random(K = 5, N = 20) print(ids) table(ids) x <- sample(c(0, 1), size = 200, replace = TRUE, prob = c(0.05, 0.95)) table(x) ids <- kfold_split_stratified(K = 5, x = x) print(ids) table(ids, x) grp <- gl(n = 50, k = 15, labels = state.name) length(grp) head(table(grp)) ids_10 <- kfold_split_grouped(K = 10, x = grp) (tab_10 <- table(grp, ids_10)) colSums(tab_10) ids_9 <- kfold_split_grouped(K = 9, x = grp) (tab_9 <- table(grp, ids_9)) colSums(tab_9) } loo/man/extract_log_lik.Rd0000644000176200001440000000461313762013700015262 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/extract_log_lik.R \name{extract_log_lik} \alias{extract_log_lik} \title{Extract pointwise log-likelihood from a Stan model} \usage{ extract_log_lik(stanfit, parameter_name = "log_lik", merge_chains = TRUE) } \arguments{ \item{stanfit}{A \code{stanfit} object (\pkg{rstan} package).} \item{parameter_name}{A character string naming the parameter (or generated quantity) in the Stan model corresponding to the log-likelihood.} \item{merge_chains}{If \code{TRUE} (the default), all Markov chains are merged together (i.e., stacked) and a matrix is returned. If \code{FALSE} they are kept separate and an array is returned.} } \value{ If \code{merge_chains=TRUE}, an \eqn{S} by \eqn{N} matrix of (post-warmup) extracted draws, where \eqn{S} is the size of the posterior sample and \eqn{N} is the number of data points. If \code{merge_chains=FALSE}, an \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I \times C = S}{I * C = S}. } \description{ Convenience function for extracting the pointwise log-likelihood matrix or array from a \code{stanfit} object from the \pkg{rstan} package. Note: recent versions of \pkg{rstan} now include a \code{loo()} method for \code{stanfit} objects that handles this internally. } \details{ Stan does not automatically compute and store the log-likelihood. It is up to the user to incorporate it into the Stan program if it is to be extracted after fitting the model. In a Stan model, the pointwise log likelihood can be coded as a vector in the transformed parameters block (and then summed up in the model block) or it can be coded entirely in the generated quantities block. We recommend using the generated quantities block so that the computations are carried out only once per iteration rather than once per HMC leapfrog step. For example, the following is the \verb{generated quantities} block for computing and saving the log-likelihood for a linear regression model with \code{N} data points, outcome \code{y}, predictor matrix \code{X}, coefficients \code{beta}, and standard deviation \code{sigma}: \verb{vector[N] log_lik;} \code{for (n in 1:N) log_lik[n] = normal_lpdf(y[n] | X[n, ] * beta, sigma);} } \references{ Stan Development Team (2017). The Stan C++ Library, Version 2.16.0. \url{https://mc-stan.org/} Stan Development Team (2017). RStan: the R interface to Stan, Version 2.16.1. \url{https://mc-stan.org/} } loo/man/loo-glossary.Rd0000644000176200001440000002166215122076370014551 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo-glossary.R \name{loo-glossary} \alias{loo-glossary} \title{LOO package glossary} \description{ The pages provides definitions to key terms. Also see the \href{https://mc-stan.org/loo/articles/online-only/faq.html}{FAQ page} on the \strong{loo} website for answers to frequently asked questions. Note: VGG2017 refers to Vehtari, Gelman, and Gabry (2017). See \strong{References}, below. } \section{ELPD and \code{elpd_loo}}{ The ELPD is the theoretical expected log pointwise predictive density for a new dataset (Eq 1 in VGG2017), which can be estimated, e.g., using cross-validation. \code{elpd_loo} is the Bayesian LOO estimate of the expected log pointwise predictive density (Eq 4 in VGG2017) and is a sum of N individual pointwise log predictive densities. Probability densities can be smaller or larger than 1, and thus log predictive densities can be negative or positive. For simplicity the ELPD acronym is used also for expected log pointwise predictive probabilities for discrete models. Probabilities are always equal or less than 1, and thus log predictive probabilities are 0 or negative. } \section{Standard error of \code{elpd_loo}}{ As \code{elpd_loo} is defined as the sum of N independent components (Eq 4 in VGG2017), we can compute the standard error by using the standard deviation of the N components and multiplying by \code{sqrt(N)} (Eq 23 in VGG2017). This standard error is a coarse description of our uncertainty about the predictive performance for unknown future data. When N is small or there is severe model misspecification, the current SE estimate is overoptimistic and the actual SE can even be twice as large. Even for moderate N, when the SE estimate is an accurate estimate for the scale, it ignores the skewness. When making model comparisons, the SE of the component-wise (pairwise) differences should be used instead (see the \code{se_diff} section below and Eq 24 in VGG2017). Sivula et al. (2022) discuss the conditions when the normal approximation used for SE and \code{se_diff} is good. } \section{Monte Carlo SE of elpd_loo}{ The Monte Carlo standard error is the estimate for the computational accuracy of MCMC and importance sampling used to compute \code{elpd_loo}. Usually this is negligible compared to the standard describing the uncertainty due to finite number of observations (Eq 23 in VGG2017). } \section{\code{p_loo} (effective number of parameters)}{ \code{p_loo} is the difference between \code{elpd_loo} and the non-cross-validated log posterior predictive density. It describes how much more difficult it is to predict future data than the observed data. Asymptotically under certain regularity conditions, \code{p_loo} can be interpreted as the \emph{effective number of parameters}. In well behaving cases \code{p_loo < N} and \code{p_loo < p}, where \code{p} is the total number of parameters in the model. \code{p_loo > N} or \code{p_loo > p} indicates that the model has very weak predictive capability and may indicate a severe model misspecification. See below for more on interpreting \code{p_loo} when there are warnings about high Pareto k diagnostic values. } \section{Pareto k estimates}{ The Pareto \eqn{k} estimate is a diagnostic for Pareto smoothed importance sampling (PSIS), which is used to compute components of \code{elpd_loo}. In importance-sampling LOO the full posterior distribution is used as the proposal distribution. The Pareto k diagnostic estimates how far an individual leave-one-out distribution is from the full distribution. If leaving out an observation changes the posterior too much then importance sampling is not able to give a reliable estimate. Pareto smoothing stabilizes importance sampling and guarantees a finite variance estimate at the cost of some bias. The diagnostic threshold for Pareto \eqn{k} depends on sample size \eqn{S} (sample size dependent threshold was introduced by Vehtari et al., 2024, and before that fixed thresholds of 0.5 and 0.7 were recommended). For simplicity, \code{loo} package uses the nominal sample size \eqn{S} when computing the sample size specific threshold. This provides an optimistic threshold if the effective sample size is less than 2200, but even then if ESS/S > 1/2 the difference is usually negligible. Thinning of MCMC draws can be used to improve the ratio ESS/S. \itemize{ \item If \eqn{k < \min(1 - 1 / \log_{10}(S), 0.7)}, where \eqn{S} is the sample size, the PSIS estimate and the corresponding Monte Carlo standard error estimate are reliable. \item If \eqn{1 - 1 / \log_{10}(S) <= k < 0.7}, the PSIS estimate and the corresponding Monte Carlo standard error estimate are not reliable, but increasing the (effective) sample size \eqn{S} above 2200 may help (this will increase the sample size specific threshold \eqn{(1 - 1 / \log_{10}(2200) > 0.7} and then the bias specific threshold 0.7 dominates). \item If \eqn{0.7 <= k < 1}, the PSIS estimate and the corresponding Monte Carlo standard error have large bias and are not reliable. Increasing the sample size may reduce the variability in the \eqn{k} estimate, which may also result in a lower \eqn{k} estimate. \item If \eqn{k \geq 1}{k >= 1}, the target distribution is estimated to have non-finite mean. The PSIS estimate and the corresponding Monte Carlo standard error are not well defined. Increasing the sample size may reduce the variability in \eqn{k} estimate, which may also result in a lower \eqn{k} estimate. } Pareto \eqn{k} is also useful as a measure of influence of an observation. Highly influential observations have high \eqn{k} values. Very high \eqn{k} values often indicate model misspecification, outliers or mistakes in data processing. See Section 6 of Gabry et al. (2019) for an example. \subsection{Interpreting \code{p_loo} when Pareto \code{k} is large}{ If \eqn{k > 0.7} then we can also look at the \code{p_loo} estimate for some additional information about the problem: \itemize{ \item If \verb{p_loo << p} (the total number of parameters in the model), then the model is likely to be misspecified. Posterior predictive checks (PPCs) are then likely to also detect the problem. Try using an overdispersed model, or add more structural information (nonlinearity, mixture model, etc.). \item If \code{p_loo < p} and the number of parameters \code{p} is relatively large compared to the number of observations (e.g., \code{p>N/5}), it is likely that the model is so flexible or the population prior so weak that it’s difficult to predict the left out observation (even for the true model). This happens, for example, in the simulated 8 schools (in VGG2017), random effect models with a few observations per random effect, and Gaussian processes and spatial models with short correlation lengths. \item If \code{p_loo > p}, then the model is likely to be badly misspecified. If the number of parameters \verb{p<N/5} (more accurately we should count number of observations influencing each parameter as in hierarchical models some groups may have few observations and other groups many), it is possible that PPCs won't detect the problem. } } } \section{elpd_diff}{ \code{elpd_diff} is the difference in \code{elpd_loo} for two models. If more than two models are compared, the difference is computed relative to the model with highest \code{elpd_loo}. } \section{se_diff}{ The standard error of component-wise differences of elpd_loo (Eq 24 in VGG2017) between two models. This SE is \emph{smaller} than the SE for individual models due to correlation (i.e., if some observations are easier and some more difficult to predict for all models). } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} Sivula, T, Magnusson, M., Matamoros A. A., and Vehtari, A. (2025). Uncertainty in Bayesian leave-one-out cross-validation based model comparison. \emph{Bayesian Analysis}. \doi{10.1214/25-BA1569}. Gabry, J. , Simpson, D. , Vehtari, A. , Betancourt, M. and Gelman, A. (2019), Visualization in Bayesian workflow. \emph{J. R. Stat. Soc. A}, 182: 389-402. doi:10.1111/rssa.12378 (\href{https://rss.onlinelibrary.wiley.com/doi/full/10.1111/rssa.12378}{journal version}, \href{https://arxiv.org/abs/1709.01449}{preprint arXiv:1709.01449}, \href{https://github.com/jgabry/bayes-vis-paper}{code on GitHub}) } loo/man/old-extractors.Rd0000644000176200001440000000126213267637066015100 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo.R \name{old-extractors} \alias{old-extractors} \alias{[.loo} \alias{[[.loo} \alias{$.loo} \title{Extractor methods} \usage{ \method{[}{loo}(x, i) \method{[[}{loo}(x, i, exact = TRUE) \method{$}{loo}(x, name) } \arguments{ \item{x, i, exact, name}{See \link{Extract}.} } \description{ These are only defined in order to deprecate with a warning (rather than remove and break backwards compatibility) the old way of accessing the point estimates in a \code{"psis_loo"} or \code{"psis"} object. The new way as of v2.0.0 is to get them from the \code{"estimates"} component of the object. } \keyword{internal} loo/man/compare.Rd0000644000176200001440000000652315075016621013544 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/compare.R \name{compare} \alias{compare} \title{Model comparison (deprecated, old version)} \usage{ compare(..., x = list()) } \arguments{ \item{...}{At least two objects returned by \code{\link[=loo]{loo()}} (or \code{\link[=waic]{waic()}}).} \item{x}{A list of at least two objects returned by \code{\link[=loo]{loo()}} (or \code{\link[=waic]{waic()}}). This argument can be used as an alternative to specifying the objects in \code{...}.} } \value{ A vector or matrix with class \code{'compare.loo'} that has its own print method. If exactly two objects are provided in \code{...} or \code{x}, then the difference in expected predictive accuracy and the standard error of the difference are returned. If more than two objects are provided then a matrix of summary information is returned (see \strong{Details}). } \description{ \strong{This function is deprecated}. Please use the new \code{\link[=loo_compare]{loo_compare()}} function instead. } \details{ When comparing two fitted models, we can estimate the difference in their expected predictive accuracy by the difference in \code{elpd_loo} or \code{elpd_waic} (or multiplied by -2, if desired, to be on the deviance scale). \emph{When that difference, \code{elpd_diff}, is positive then the expected predictive accuracy for the second model is higher. A negative \code{elpd_diff} favors the first model.} When using \code{compare()} with more than two models, the values in the \code{elpd_diff} and \code{se_diff} columns of the returned matrix are computed by making pairwise comparisons between each model and the model with the best ELPD (i.e., the model in the first row). Although the \code{elpd_diff} column is equal to the difference in \code{elpd_loo}, do not expect the \code{se_diff} column to be equal to the the difference in \code{se_elpd_loo}. To compute the standard error of the difference in ELPD we use a paired estimate to take advantage of the fact that the same set of \emph{N} data points was used to fit both models. These calculations should be most useful when \emph{N} is large, because then non-normality of the distribution is not such an issue when estimating the uncertainty in these sums. These standard errors, for all their flaws, should give a better sense of uncertainty than what is obtained using the current standard approach of comparing differences of deviances to a Chi-squared distribution, a practice derived for Gaussian linear models or asymptotically, and which only applies to nested models in any case. } \examples{ \dontrun{ loo1 <- loo(log_lik1) loo2 <- loo(log_lik2) print(compare(loo1, loo2), digits = 3) print(compare(x = list(loo1, loo2))) waic1 <- waic(log_lik1) waic2 <- waic(log_lik2) compare(waic1, waic2) } } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } loo/man/waic.Rd0000644000176200001440000001234314641333357013044 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/waic.R \name{waic} \alias{waic} \alias{waic.array} \alias{waic.matrix} \alias{waic.function} \alias{is.waic} \title{Widely applicable information criterion (WAIC)} \usage{ waic(x, ...) \method{waic}{array}(x, ...) \method{waic}{matrix}(x, ...) \method{waic}{`function`}(x, ..., data = NULL, draws = NULL) is.waic(x) } \arguments{ \item{x}{A log-likelihood array, matrix, or function. The \strong{Methods (by class)} section, below, has detailed descriptions of how to specify the inputs for each method.} \item{draws, data, ...}{For the function method only. See the \strong{Methods (by class)} section below for details on these arguments.} } \value{ A named list (of class \code{c("waic", "loo")}) with components: \describe{ \item{\code{estimates}}{ A matrix with two columns (\code{"Estimate"}, \code{"SE"}) and three rows (\code{"elpd_waic"}, \code{"p_waic"}, \code{"waic"}). This contains point estimates and standard errors of the expected log pointwise predictive density (\code{elpd_waic}), the effective number of parameters (\code{p_waic}) and the information criterion \code{waic} (which is just \code{-2 * elpd_waic}, i.e., converted to deviance scale). } \item{\code{pointwise}}{ A matrix with three columns (and number of rows equal to the number of observations) containing the pointwise contributions of each of the above measures (\code{elpd_waic}, \code{p_waic}, \code{waic}). } } } \description{ The \code{waic()} methods can be used to compute WAIC from the pointwise log-likelihood. However, we recommend LOO-CV using PSIS (as implemented by the \code{\link[=loo]{loo()}} function) because PSIS provides useful diagnostics as well as effective sample size and Monte Carlo estimates. } \section{Methods (by class)}{ \itemize{ \item \code{waic(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{waic(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{waic(`function`)}: A function \code{f()} that takes arguments \code{data_i} and \code{draws} and returns a vector containing the log-likelihood for a single observation \code{i} evaluated at each posterior draw. The function should be written such that, for each observation \code{i} in \code{1:N}, evaluating \if{html}{\out{
}}\preformatted{f(data_i = data[i,, drop=FALSE], draws = draws) }\if{html}{\out{
}} results in a vector of length \code{S} (size of posterior sample). The log-likelihood function can also have additional arguments but \code{data_i} and \code{draws} are required. If using the function method then the arguments \code{data} and \code{draws} must also be specified in the call to \code{loo()}: \itemize{ \item \code{data}: A data frame or matrix containing the data (e.g. observed outcome and predictors) needed to compute the pointwise log-likelihood. For each observation \code{i}, the \code{i}th row of \code{data} will be passed to the \code{data_i} argument of the log-likelihood function. \item \code{draws}: An object containing the posterior draws for any parameters needed to compute the pointwise log-likelihood. Unlike \code{data}, which is indexed by observation, for each observation the entire object \code{draws} will be passed to the \code{draws} argument of the log-likelihood function. \item The \code{...} can be used if your log-likelihood function takes additional arguments. These arguments are used like the \code{draws} argument in that they are recycled for each observation. } }} \examples{ ### Array and matrix methods LLarr <- example_loglik_array() dim(LLarr) LLmat <- example_loglik_matrix() dim(LLmat) waic_arr <- waic(LLarr) waic_mat <- waic(LLmat) identical(waic_arr, waic_mat) \dontrun{ log_lik1 <- extract_log_lik(stanfit1) log_lik2 <- extract_log_lik(stanfit2) (waic1 <- waic(log_lik1)) (waic2 <- waic(log_lik2)) print(compare(waic1, waic2), digits = 2) } } \references{ Watanabe, S. (2010). Asymptotic equivalence of Bayes cross validation and widely application information criterion in singular learning theory. \emph{Journal of Machine Learning Research} \strong{11}, 3571-3594. Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } \seealso{ \itemize{ \item The \strong{loo} package \href{https://mc-stan.org/loo/articles/}{vignettes} and Vehtari, Gelman, and Gabry (2017) and Vehtari, Simpson, Gelman, Yao, and Gabry (2024) for more details on why we prefer \code{loo()} to \code{waic()}. \item \code{\link[=loo_compare]{loo_compare()}} for comparing models on approximate LOO-CV or WAIC. } } loo/man/ap_psis.Rd0000644000176200001440000000441714411555606013560 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/psis_approximate_posterior.R \name{ap_psis} \alias{ap_psis} \alias{ap_psis.array} \alias{ap_psis.matrix} \alias{ap_psis.default} \title{Pareto smoothed importance sampling (PSIS) using approximate posteriors} \usage{ ap_psis(log_ratios, log_p, log_g, ...) \method{ap_psis}{array}(log_ratios, log_p, log_g, ..., cores = getOption("mc.cores", 1)) \method{ap_psis}{matrix}(log_ratios, log_p, log_g, ..., cores = getOption("mc.cores", 1)) \method{ap_psis}{default}(log_ratios, log_p, log_g, ...) } \arguments{ \item{log_ratios}{The log-likelihood ratios (ie -log_liks)} \item{log_p}{The log-posterior (target) evaluated at S samples from the proposal distribution (g). A vector of length S.} \item{log_g}{The log-density (proposal) evaluated at S samples from the proposal distribution (g). A vector of length S.} \item{...}{Currently not in use.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} } \description{ Pareto smoothed importance sampling (PSIS) using approximate posteriors } \section{Methods (by class)}{ \itemize{ \item \code{ap_psis(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{ap_psis(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{ap_psis(default)}: A vector of length \eqn{S} (posterior sample size). }} loo/man/loo-package.Rd0000644000176200001440000001363115122067200014266 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo-package.R \docType{package} \name{loo-package} \alias{loo-package} \title{Efficient LOO-CV and WAIC for Bayesian models} \description{ \if{html}{ \figure{stanlogo.png}{options: width="50" alt="mc-stan.org"} } \emph{Stan Development Team} This package implements the methods described in Vehtari, Gelman, and Gabry (2017), Vehtari, Simpson, Gelman, Yao, and Gabry (2024), and Yao et al. (2018). To get started see the \strong{loo} package \href{https://mc-stan.org/loo/articles/index.html}{vignettes}, the \code{\link[=loo]{loo()}} function for efficient approximate leave-one-out cross-validation (LOO-CV), the \code{\link[=psis]{psis()}} function for the Pareto smoothed importance sampling (PSIS) algorithm, or \code{\link[=loo_model_weights]{loo_model_weights()}} for an implementation of Bayesian stacking of predictive distributions from multiple models. } \details{ Leave-one-out cross-validation (LOO-CV) and the widely applicable information criterion (WAIC) are methods for estimating pointwise out-of-sample prediction accuracy from a fitted Bayesian model using the log-likelihood evaluated at the posterior simulations of the parameter values. LOO-CV and WAIC have various advantages over simpler estimates of predictive error such as AIC and DIC but are less used in practice because they involve additional computational steps. This package implements the fast and stable computations for approximate LOO-CV laid out in Vehtari, Gelman, and Gabry (2017). From existing posterior simulation draws, we compute LOO-CV using Pareto smoothed importance sampling (PSIS; Vehtari, Simpson, Gelman, Yao, and Gabry, 2024), a new procedure for stabilizing and diagnosing importance weights. As a byproduct of our calculations, we also obtain approximate standard errors for estimated predictive errors and for comparing of predictive errors between two models. We recommend PSIS-LOO-CV instead of WAIC, because PSIS provides useful diagnostics and effective sample size and Monte Carlo standard error estimates. } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018) Using stacking to average Bayesian predictive distributions. \emph{Bayesian Analysis}, advance publication, doi:10.1214/17-BA1091. (\href{https://projecteuclid.org/euclid.ba/1516093227}{online}). Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2019). Leave-One-Out Cross-Validation for Large Data. In \emph{Thirty-sixth International Conference on Machine Learning}, PMLR 97:4244-4253. Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. In \emph{Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS)}, PMLR 108:341-351. Epifani, I., MacEachern, S. N., and Peruggia, M. (2008). Case-deletion importance sampling estimators: Central limit theorems and related results. \emph{Electronic Journal of Statistics} \strong{2}, 774-806. Gelfand, A. E. (1996). Model determination using sampling-based methods. In \emph{Markov Chain Monte Carlo in Practice}, ed. W. R. Gilks, S. Richardson, D. J. Spiegelhalter, 145-162. London: Chapman and Hall. Gelfand, A. E., Dey, D. K., and Chang, H. (1992). Model determination using predictive distributions with implementation via sampling-based methods. In \emph{Bayesian Statistics 4}, ed. J. M. Bernardo, J. O. Berger, A. P. Dawid, and A. F. M. Smith, 147-167. Oxford University Press. Gelman, A., Hwang, J., and Vehtari, A. (2014). Understanding predictive information criteria for Bayesian models. \emph{Statistics and Computing} \strong{24}, 997-1016. Ionides, E. L. (2008). Truncated importance sampling. \emph{Journal of Computational and Graphical Statistics} \strong{17}, 295-311. Koopman, S. J., Shephard, N., and Creal, D. (2009). Testing the assumptions behind importance sampling. \emph{Journal of Econometrics} \strong{149}, 2-11. Peruggia, M. (1997). On the variability of case-deletion importance sampling weights in the Bayesian linear model. \emph{Journal of the American Statistical Association} \strong{92}, 199-207. Stan Development Team (2017). The Stan C++ Library, Version 2.17.0. \url{https://mc-stan.org}. Stan Development Team (2018). RStan: the R interface to Stan, Version 2.17.3. \url{https://mc-stan.org}. Watanabe, S. (2010). Asymptotic equivalence of Bayes cross validation and widely application information criterion in singular learning theory. \emph{Journal of Machine Learning Research} \strong{11}, 3571-3594. Zhang, J., and Stephens, M. A. (2009). A new and efficient estimation method for the generalized Pareto distribution. \emph{Technometrics} \strong{51}, 316-325. } \seealso{ Useful links: \itemize{ \item \url{https://mc-stan.org/loo/} \item \url{https://discourse.mc-stan.org} \item Report bugs at \url{https://github.com/stan-dev/loo/issues} } } \author{ \strong{Maintainer}: Jonah Gabry \email{jgabry@gmail.com} Authors: \itemize{ \item Aki Vehtari \email{Aki.Vehtari@aalto.fi} \item Måns Magnusson \item Yuling Yao \item Paul-Christian Bürkner \item Topi Paananen \item Andrew Gelman } Other contributors: \itemize{ \item Ben Goodrich [contributor] \item Juho Piironen [contributor] \item Bruno Nicenboim [contributor] \item Leevi Lindgren [contributor] \item Visruth Srimath Kandali [contributor] } } loo/man/obs_idx.Rd0000644000176200001440000000123013575772017013546 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_subsample.R \name{obs_idx} \alias{obs_idx} \title{Get observation indices used in subsampling} \usage{ obs_idx(x, rep = TRUE) } \arguments{ \item{x}{A \code{psis_loo_ss} object.} \item{rep}{If sampling with replacement is used, an observation can have multiple samples and these are then repeated in the returned object if \code{rep=TRUE} (e.g., a vector \code{c(1,1,2)} indicates that observation 1 has been subampled two times). If \code{rep=FALSE} only the unique indices are returned.} } \value{ An integer vector. } \description{ Get observation indices used in subsampling } loo/man/loo_subsample.Rd0000644000176200001440000002267214641333357014773 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_subsample.R \name{loo_subsample} \alias{loo_subsample} \alias{loo_subsample.function} \title{Efficient approximate leave-one-out cross-validation (LOO) using subsampling, so that less costly and more approximate computation is made for all LOO-fold, and more costly and accurate computations are made only for m}}\preformatted{f(data_i = data[i,, drop=FALSE], draws = draws) }\if{html}{\out{}} results in a vector of length \code{S} (size of posterior sample). The log-likelihood function can also have additional arguments but \code{data_i} and \code{draws} are required. If using the function method then the arguments \code{data} and \code{draws} must also be specified in the call to \code{loo()}: \itemize{ \item \code{data}: A data frame or matrix containing the data (e.g. observed outcome and predictors) needed to compute the pointwise log-likelihood. For each observation \code{i}, the \code{i}th row of \code{data} will be passed to the \code{data_i} argument of the log-likelihood function. \item \code{draws}: An object containing the posterior draws for any parameters needed to compute the pointwise log-likelihood. Unlike \code{data}, which is indexed by observation, for each observation the entire object \code{draws} will be passed to the \code{draws} argument of the log-likelihood function. \item The \code{...} can be used if your log-likelihood function takes additional arguments. These arguments are used like the \code{draws} argument in that they are recycled for each observation. } }} \references{ Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2019). Leave-One-Out Cross-Validation for Large Data. In \emph{Thirty-sixth International Conference on Machine Learning}, PMLR 97:4244-4253. Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. In \emph{Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS)}, PMLR 108:341-351. } \seealso{ \code{\link[=loo]{loo()}}, \code{\link[=psis]{psis()}}, \code{\link[=loo_compare]{loo_compare()}} } loo/man/loo-datasets.Rd0000644000176200001440000000346614411555606014524 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/datasets.R \name{loo-datasets} \alias{loo-datasets} \alias{Kline} \alias{milk} \alias{voice} \alias{voice_loo} \title{Datasets for loo examples and vignettes} \description{ Small datasets for use in \strong{loo} examples and vignettes. The \code{Kline} and \code{milk} datasets are also included in the \strong{rethinking} package (McElreath, 2016a), but we include them here as \strong{rethinking} is not on CRAN. } \details{ Currently the data sets included are: \itemize{ \item \code{Kline}: Small dataset from Kline and Boyd (2010) on tool complexity and demography in Oceanic islands societies. This data is discussed in detail in McElreath (2016a,2016b). \href{https://www.rdocumentation.org/packages/rethinking/versions/1.59/topics/Kline}{(Link to variable descriptions)} \item \code{milk}: Small dataset from Hinde and Milligan (2011) on primate milk composition.This data is discussed in detail in McElreath (2016a,2016b). \href{https://www.rdocumentation.org/packages/rethinking/versions/1.59/topics/milk}{(Link to variable descriptions)} \item \code{voice}: Voice rehabilitation data from Tsanas et al. (2014). } } \examples{ str(Kline) str(milk) } \references{ Hinde and Milligan. 2011. \emph{Evolutionary Anthropology} 20:9-23. Kline, M.A. and R. Boyd. 2010. \emph{Proc R Soc B} 277:2559-2564. McElreath, R. (2016a). rethinking: Statistical Rethinking book package. R package version 1.59. McElreath, R. (2016b). \emph{Statistical rethinking: A Bayesian course with examples in R and Stan}. Chapman & Hall/CRC. A. Tsanas, M.A. Little, C. Fox, L.O. Ramig: Objective automatic assessment of rehabilitative speech treatment in Parkinson's disease, IEEE Transactions on Neural Systems and Rehabilitation Engineering, Vol. 22, pp. 181-190, January 2014 } loo/man/print_dims.Rd0000644000176200001440000000162013575772017014272 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/print.R \name{print_dims} \alias{print_dims} \alias{print_dims.importance_sampling} \alias{print_dims.psis_loo} \alias{print_dims.importance_sampling_loo} \alias{print_dims.waic} \alias{print_dims.kfold} \alias{print_dims.psis_loo_ss} \title{Print dimensions of log-likelihood or log-weights matrix} \usage{ print_dims(x, ...) \method{print_dims}{importance_sampling}(x, ...) \method{print_dims}{psis_loo}(x, ...) \method{print_dims}{importance_sampling_loo}(x, ...) \method{print_dims}{waic}(x, ...) \method{print_dims}{kfold}(x, ...) \method{print_dims}{psis_loo_ss}(x, ...) } \arguments{ \item{x}{The object returned by \code{\link[=psis]{psis()}}, \code{\link[=loo]{loo()}}, or \code{\link[=waic]{waic()}}.} \item{...}{Ignored.} } \description{ Print dimensions of log-likelihood or log-weights matrix } \keyword{internal} loo/man/gpdfit.Rd0000644000176200001440000000325213575772017013402 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/gpdfit.R \name{gpdfit} \alias{gpdfit} \title{Estimate parameters of the Generalized Pareto distribution} \usage{ gpdfit(x, wip = TRUE, min_grid_pts = 30, sort_x = TRUE) } \arguments{ \item{x}{A numeric vector. The sample from which to estimate the parameters.} \item{wip}{Logical indicating whether to adjust \eqn{k} based on a weakly informative Gaussian prior centered on 0.5. Defaults to \code{TRUE}.} \item{min_grid_pts}{The minimum number of grid points used in the fitting algorithm. The actual number used is \code{min_grid_pts + floor(sqrt(length(x)))}.} \item{sort_x}{If \code{TRUE} (the default), the first step in the fitting algorithm is to sort the elements of \code{x}. If \code{x} is already sorted in ascending order then \code{sort_x} can be set to \code{FALSE} to skip the initial sorting step.} } \value{ A named list with components \code{k} and \code{sigma}. } \description{ Given a sample \eqn{x}, Estimate the parameters \eqn{k} and \eqn{\sigma} of the generalized Pareto distribution (GPD), assuming the location parameter is 0. By default the fit uses a prior for \eqn{k}, which will stabilize estimates for very small sample sizes (and low effective sample sizes in the case of MCMC samples). The weakly informative prior is a Gaussian prior centered at 0.5. } \details{ Here the parameter \eqn{k} is the negative of \eqn{k} in Zhang & Stephens (2009). } \references{ Zhang, J., and Stephens, M. A. (2009). A new and efficient estimation method for the generalized Pareto distribution. \emph{Technometrics} \strong{51}, 316-325. } \seealso{ \code{\link[=psis]{psis()}}, \link{pareto-k-diagnostic} } loo/man/sis.Rd0000644000176200001440000001224514641333357012720 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sis.R \name{sis} \alias{sis} \alias{sis.array} \alias{sis.matrix} \alias{sis.default} \title{Standard importance sampling (SIS)} \usage{ sis(log_ratios, ...) \method{sis}{array}(log_ratios, ..., r_eff = NULL, cores = getOption("mc.cores", 1)) \method{sis}{matrix}(log_ratios, ..., r_eff = NULL, cores = getOption("mc.cores", 1)) \method{sis}{default}(log_ratios, ..., r_eff = NULL) } \arguments{ \item{log_ratios}{An array, matrix, or vector of importance ratios on the log scale (for Importance sampling LOO, these are \emph{negative} log-likelihood values). See the \strong{Methods (by class)} section below for a detailed description of how to specify the inputs for each method.} \item{...}{Arguments passed on to the various methods.} \item{r_eff}{Vector of relative effective sample size estimates containing one element per observation. The values provided should be the relative effective sample sizes of \code{1/exp(log_ratios)} (i.e., \code{1/ratios}). This is related to the relative efficiency of estimating the normalizing term in self-normalizing importance sampling. See the \code{\link[=relative_eff]{relative_eff()}} helper function for computing \code{r_eff}. If using \code{psis} with draws of the \code{log_ratios} not obtained from MCMC then the warning message thrown when not specifying \code{r_eff} can be disabled by setting \code{r_eff} to \code{NA}.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} } \value{ The \code{sis()} methods return an object of class \code{"sis"}, which is a named list with the following components: \describe{ \item{\code{log_weights}}{ Vector or matrix of smoothed but \emph{unnormalized} log weights. To get normalized weights use the \code{\link[=weights.importance_sampling]{weights()}} method provided for objects of class \code{sis}. } \item{\code{diagnostics}}{ A named list containing one vector: \itemize{ \item \code{pareto_k}: Not used in \code{sis}, all set to 0. \item \code{n_eff}: effective sample size estimates. } } } Objects of class \code{"sis"} also have the following \link[=attributes]{attributes}: \describe{ \item{\code{norm_const_log}}{ Vector of precomputed values of \code{colLogSumExps(log_weights)} that are used internally by the \code{weights} method to normalize the log weights. } \item{\code{r_eff}}{ If specified, the user's \code{r_eff} argument. } \item{\code{tail_len}}{ Not used for \code{sis}. } \item{\code{dims}}{ Integer vector of length 2 containing \code{S} (posterior sample size) and \code{N} (number of observations). } \item{\code{method}}{ Method used for importance sampling, here \code{sis}. } } } \description{ Implementation of standard importance sampling (SIS). } \section{Methods (by class)}{ \itemize{ \item \code{sis(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{sis(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{sis(default)}: A vector of length \eqn{S} (posterior sample size). }} \examples{ log_ratios <- -1 * example_loglik_array() r_eff <- relative_eff(exp(-log_ratios)) sis_result <- sis(log_ratios, r_eff = r_eff) str(sis_result) # extract smoothed weights lw <- weights(sis_result) # default args are log=TRUE, normalize=TRUE ulw <- weights(sis_result, normalize=FALSE) # unnormalized log-weights w <- weights(sis_result, log=FALSE) # normalized weights (not log-weights) uw <- weights(sis_result, log=FALSE, normalize = FALSE) # unnormalized weights } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } \seealso{ \itemize{ \item \code{\link[=psis]{psis()}} for approximate LOO-CV using PSIS. \item \code{\link[=loo]{loo()}} for approximate LOO-CV. \item \link{pareto-k-diagnostic} for PSIS diagnostics. } } loo/man/find_model_names.Rd0000644000176200001440000000070715100712211015362 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_compare.R \name{find_model_names} \alias{find_model_names} \title{Find the model names associated with \code{"loo"} objects} \usage{ find_model_names(x) } \arguments{ \item{x}{List of \code{"loo"} objects.} } \value{ Character vector of model names the same length as \code{x.} } \description{ Find the model names associated with \code{"loo"} objects } \keyword{internal} loo/man/nobs.psis_loo_ss.Rd0000644000176200001440000000065013575772017015420 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_subsample.R \name{nobs.psis_loo_ss} \alias{nobs.psis_loo_ss} \title{The number of observations in a \code{psis_loo_ss} object.} \usage{ \method{nobs}{psis_loo_ss}(object, ...) } \arguments{ \item{object}{a \code{psis_loo_ss} object.} \item{...}{Currently unused.} } \description{ The number of observations in a \code{psis_loo_ss} object. } loo/man/tis.Rd0000644000176200001440000001176314566461605012731 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tis.R \name{tis} \alias{tis} \alias{tis.array} \alias{tis.matrix} \alias{tis.default} \title{Truncated importance sampling (TIS)} \usage{ tis(log_ratios, ...) \method{tis}{array}(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) \method{tis}{matrix}(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) \method{tis}{default}(log_ratios, ..., r_eff = 1) } \arguments{ \item{log_ratios}{An array, matrix, or vector of importance ratios on the log scale (for Importance sampling LOO, these are \emph{negative} log-likelihood values). See the \strong{Methods (by class)} section below for a detailed description of how to specify the inputs for each method.} \item{...}{Arguments passed on to the various methods.} \item{r_eff}{Vector of relative effective sample size estimates containing one element per observation. The values provided should be the relative effective sample sizes of \code{1/exp(log_ratios)} (i.e., \code{1/ratios}). This is related to the relative efficiency of estimating the normalizing term in self-normalizing importance sampling. If \code{r_eff} is not provided then the reported (T)IS effective sample sizes and Monte Carlo error estimates can be over-optimistic. If the posterior draws are (near) independent then \code{r_eff=1} can be used. \code{r_eff} has to be a scalar (same value is used for all observations) or a vector with length equal to the number of observations. The default value is 1. See the \code{\link[=relative_eff]{relative_eff()}} helper function for computing \code{r_eff}.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} } \value{ The \code{tis()} methods return an object of class \code{"tis"}, which is a named list with the following components: \describe{ \item{\code{log_weights}}{ Vector or matrix of smoothed (and truncated) but \emph{unnormalized} log weights. To get normalized weights use the \code{\link[=weights.importance_sampling]{weights()}} method provided for objects of class \code{tis}. } \item{\code{diagnostics}}{ A named list containing one vector: \itemize{ \item \code{pareto_k}: Not used in \code{tis}, all set to 0. \item \code{n_eff}: Effective sample size estimates. } } } Objects of class \code{"tis"} also have the following \link[=attributes]{attributes}: \describe{ \item{\code{norm_const_log}}{ Vector of precomputed values of \code{colLogSumExps(log_weights)} that are used internally by the \code{\link[=weights]{weights()}}method to normalize the log weights. } \item{\code{r_eff}}{ If specified, the user's \code{r_eff} argument. } \item{\code{tail_len}}{ Not used for \code{tis}. } \item{\code{dims}}{ Integer vector of length 2 containing \code{S} (posterior sample size) and \code{N} (number of observations). } \item{\code{method}}{ Method used for importance sampling, here \code{tis}. } } } \description{ Implementation of truncated (self-normalized) importance sampling (TIS), truncated at S^(1/2) as recommended by Ionides (2008). } \section{Methods (by class)}{ \itemize{ \item \code{tis(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{tis(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{tis(default)}: A vector of length \eqn{S} (posterior sample size). }} \examples{ log_ratios <- -1 * example_loglik_array() r_eff <- relative_eff(exp(-log_ratios)) tis_result <- tis(log_ratios, r_eff = r_eff) str(tis_result) # extract smoothed weights lw <- weights(tis_result) # default args are log=TRUE, normalize=TRUE ulw <- weights(tis_result, normalize=FALSE) # unnormalized log-weights w <- weights(tis_result, log=FALSE) # normalized weights (not log-weights) uw <- weights(tis_result, log=FALSE, normalize = FALSE) # unnormalized weights } \references{ Ionides, Edward L. (2008). Truncated importance sampling. \emph{Journal of Computational and Graphical Statistics} 17(2): 295--311. } \seealso{ \itemize{ \item \code{\link[=psis]{psis()}} for approximate LOO-CV using PSIS. \item \code{\link[=loo]{loo()}} for approximate LOO-CV. \item \link{pareto-k-diagnostic} for PSIS diagnostics. } } loo/man/loo_approximate_posterior.Rd0000644000176200001440000001400314641333357017424 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_approximate_posterior.R \name{loo_approximate_posterior} \alias{loo_approximate_posterior} \alias{loo_approximate_posterior.array} \alias{loo_approximate_posterior.matrix} \alias{loo_approximate_posterior.function} \title{Efficient approximate leave-one-out cross-validation (LOO) for posterior approximations} \usage{ loo_approximate_posterior(x, log_p, log_g, ...) \method{loo_approximate_posterior}{array}( x, log_p, log_g, ..., save_psis = FALSE, cores = getOption("mc.cores", 1) ) \method{loo_approximate_posterior}{matrix}( x, log_p, log_g, ..., save_psis = FALSE, cores = getOption("mc.cores", 1) ) \method{loo_approximate_posterior}{`function`}( x, ..., data = NULL, draws = NULL, log_p = NULL, log_g = NULL, save_psis = FALSE, cores = getOption("mc.cores", 1) ) } \arguments{ \item{x}{A log-likelihood array, matrix, or function. The \strong{Methods (by class)} section, below, has detailed descriptions of how to specify the inputs for each method.} \item{log_p}{The log-posterior (target) evaluated at S samples from the proposal distribution (g). A vector of length S.} \item{log_g}{The log-density (proposal) evaluated at S samples from the proposal distribution (g). A vector of length S.} \item{save_psis}{Should the \code{"psis"} object created internally by \code{loo_approximate_posterior()} be saved in the returned object? See \code{\link[=loo]{loo()}} for details.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{data, draws, ...}{For the \code{loo_approximate_posterior.function()} method, these are the data, posterior draws, and other arguments to pass to the log-likelihood function. See the \strong{Methods (by class)} section below for details on how to specify these arguments.} } \value{ The \code{loo_approximate_posterior()} methods return a named list with class \code{c("psis_loo_ap", "psis_loo", "loo")}. It has the same structure as the objects returned by \code{\link[=loo]{loo()}} but with the additional slot: \describe{ \item{\code{posterior_approximation}}{ A list with two vectors, \code{log_p} and \code{log_g} of the same length containing the posterior density and the approximation density for the individual draws. } } } \description{ Efficient approximate leave-one-out cross-validation (LOO) for posterior approximations } \details{ The \code{loo_approximate_posterior()} function is an S3 generic and methods are provided for 3-D pointwise log-likelihood arrays, pointwise log-likelihood matrices, and log-likelihood functions. The implementation works for posterior approximations where it is possible to compute the log density for the posterior approximation. } \section{Methods (by class)}{ \itemize{ \item \code{loo_approximate_posterior(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{loo_approximate_posterior(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{loo_approximate_posterior(`function`)}: A function \code{f()} that takes arguments \code{data_i} and \code{draws} and returns a vector containing the log-likelihood for a single observation \code{i} evaluated at each posterior draw. The function should be written such that, for each observation \code{i} in \code{1:N}, evaluating \if{html}{\out{
}}\preformatted{f(data_i = data[i,, drop=FALSE], draws = draws) }\if{html}{\out{
}} results in a vector of length \code{S} (size of posterior sample). The log-likelihood function can also have additional arguments but \code{data_i} and \code{draws} are required. If using the function method then the arguments \code{data} and \code{draws} must also be specified in the call to \code{loo()}: \itemize{ \item \code{data}: A data frame or matrix containing the data (e.g. observed outcome and predictors) needed to compute the pointwise log-likelihood. For each observation \code{i}, the \code{i}th row of \code{data} will be passed to the \code{data_i} argument of the log-likelihood function. \item \code{draws}: An object containing the posterior draws for any parameters needed to compute the pointwise log-likelihood. Unlike \code{data}, which is indexed by observation, for each observation the entire object \code{draws} will be passed to the \code{draws} argument of the log-likelihood function. \item The \code{...} can be used if your log-likelihood function takes additional arguments. These arguments are used like the \code{draws} argument in that they are recycled for each observation. } }} \references{ Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2019). Leave-One-Out Cross-Validation for Large Data. In \emph{Thirty-sixth International Conference on Machine Learning}, PMLR 97:4244-4253. Magnusson, M., Riis Andersen, M., Jonasson, J. and Vehtari, A. (2020). Leave-One-Out Cross-Validation for Model Comparison in Large Data. In \emph{Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics (AISTATS)}, PMLR 108:341-351. } \seealso{ \code{\link[=loo]{loo()}}, \code{\link[=psis]{psis()}}, \code{\link[=loo_compare]{loo_compare()}} } loo/man/nlist.Rd0000644000176200001440000000137513575772017013262 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/helpers.R \name{nlist} \alias{nlist} \title{Named lists} \usage{ nlist(...) } \arguments{ \item{...}{Objects to include in the list.} } \value{ A named list. } \description{ Create a named list using specified names or, if names are omitted, using the names of the objects in the list. The code \code{list(a = a, b = b)} becomes \code{nlist(a,b)} and \code{list(a = a, b = 2)} becomes \code{nlist(a, b = 2)}, etc. } \examples{ # All variables already defined a <- rnorm(100) b <- mat.or.vec(10, 3) nlist(a,b) # Define some variables in the call and take the rest from the environment nlist(a, b, veggies = c("lettuce", "spinach"), fruits = c("banana", "papaya")) } \keyword{internal} loo/man/psis.Rd0000644000176200001440000001416214641333357013100 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/psis.R, R/sis.R, R/tis.R \name{psis} \alias{psis} \alias{psis.array} \alias{psis.matrix} \alias{psis.default} \alias{is.psis} \alias{is.sis} \alias{is.tis} \title{Pareto smoothed importance sampling (PSIS)} \usage{ psis(log_ratios, ...) \method{psis}{array}(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) \method{psis}{matrix}(log_ratios, ..., r_eff = 1, cores = getOption("mc.cores", 1)) \method{psis}{default}(log_ratios, ..., r_eff = 1) is.psis(x) is.sis(x) is.tis(x) } \arguments{ \item{log_ratios}{An array, matrix, or vector of importance ratios on the log scale (for PSIS-LOO these are \emph{negative} log-likelihood values). See the \strong{Methods (by class)} section below for a detailed description of how to specify the inputs for each method.} \item{...}{Arguments passed on to the various methods.} \item{r_eff}{Vector of relative effective sample size estimates containing one element per observation. The values provided should be the relative effective sample sizes of \code{1/exp(log_ratios)} (i.e., \code{1/ratios}). This is related to the relative efficiency of estimating the normalizing term in self-normalizing importance sampling. If \code{r_eff} is not provided then the reported PSIS effective sample sizes and Monte Carlo error estimates can be over-optimistic. If the posterior draws are (near) independent then \code{r_eff=1} can be used. \code{r_eff} has to be a scalar (same value is used for all observations) or a vector with length equal to the number of observations. The default value is 1. See the \code{\link[=relative_eff]{relative_eff()}} helper function for computing \code{r_eff}.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{x}{For \code{is.psis()}, an object to check.} } \value{ The \code{psis()} methods return an object of class \code{"psis"}, which is a named list with the following components: \describe{ \item{\code{log_weights}}{ Vector or matrix of smoothed (and truncated) but \emph{unnormalized} log weights. To get normalized weights use the \code{\link[=weights.importance_sampling]{weights()}} method provided for objects of class \code{"psis"}. } \item{\code{diagnostics}}{ A named list containing two vectors: \itemize{ \item \code{pareto_k}: Estimates of the shape parameter \eqn{k} of the generalized Pareto distribution. See the \link{pareto-k-diagnostic} page for details. \item \code{n_eff}: PSIS effective sample size estimates. } } } Objects of class \code{"psis"} also have the following \link[=attributes]{attributes}: \describe{ \item{\code{norm_const_log}}{ Vector of precomputed values of \code{colLogSumExps(log_weights)} that are used internally by the \code{weights} method to normalize the log weights. } \item{\code{tail_len}}{ Vector of tail lengths used for fitting the generalized Pareto distribution. } \item{\code{r_eff}}{ If specified, the user's \code{r_eff} argument. } \item{\code{dims}}{ Integer vector of length 2 containing \code{S} (posterior sample size) and \code{N} (number of observations). } \item{\code{method}}{ Method used for importance sampling, here \code{psis}. } } } \description{ Implementation of Pareto smoothed importance sampling (PSIS), a method for stabilizing importance ratios. The version of PSIS implemented here corresponds to the algorithm presented in Vehtari, Simpson, Gelman, Yao, and Gabry (2024). For PSIS diagnostics see the \link{pareto-k-diagnostic} page. } \section{Methods (by class)}{ \itemize{ \item \code{psis(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{psis(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{psis(default)}: A vector of length \eqn{S} (posterior sample size). }} \examples{ log_ratios <- -1 * example_loglik_array() r_eff <- relative_eff(exp(-log_ratios)) psis_result <- psis(log_ratios, r_eff = r_eff) str(psis_result) plot(psis_result) # extract smoothed weights lw <- weights(psis_result) # default args are log=TRUE, normalize=TRUE ulw <- weights(psis_result, normalize=FALSE) # unnormalized log-weights w <- weights(psis_result, log=FALSE) # normalized weights (not log-weights) uw <- weights(psis_result, log=FALSE, normalize = FALSE) # unnormalized weights } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } \seealso{ \itemize{ \item \code{\link[=loo]{loo()}} for approximate LOO-CV using PSIS. \item \link{pareto-k-diagnostic} for PSIS diagnostics. \item The \strong{loo} package \href{https://mc-stan.org/loo/articles/index.html}{vignettes} for demonstrations. \item The \href{https://mc-stan.org/loo/articles/online-only/faq.html}{FAQ page} on the \strong{loo} website for answers to frequently asked questions. } } loo/man/update.psis_loo_ss.Rd0000644000176200001440000001214714566461605015744 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_subsample.R \name{update.psis_loo_ss} \alias{update.psis_loo_ss} \title{Update \code{psis_loo_ss} objects} \usage{ \method{update}{psis_loo_ss}( object, ..., data = NULL, draws = NULL, observations = NULL, r_eff = 1, cores = getOption("mc.cores", 1), loo_approximation = NULL, loo_approximation_draws = NULL, llgrad = NULL, llhess = NULL ) } \arguments{ \item{object}{A \code{psis_loo_ss} object to update.} \item{...}{Currently not used.} \item{data, draws}{See \code{\link[=loo_subsample.function]{loo_subsample.function()}}.} \item{observations}{The subsample observations to use. The argument can take four (4) types of arguments: \itemize{ \item \code{NULL} to use all observations. The algorithm then just uses standard \code{loo()} or \code{loo_approximate_posterior()}. \item A single integer to specify the number of observations to be subsampled. \item A vector of integers to provide the indices used to subset the data. \emph{These observations need to be subsampled with the same scheme as given by the \code{estimator} argument}. \item A \code{psis_loo_ss} object to use the same observations that were used in a previous call to \code{loo_subsample()}. }} \item{r_eff}{Vector of relative effective sample size estimates for the likelihood (\code{exp(log_lik)}) of each observation. This is related to the relative efficiency of estimating the normalizing term in self-normalized importance sampling when using posterior draws obtained with MCMC. If MCMC draws are used and \code{r_eff} is not provided then the reported PSIS effective sample sizes and Monte Carlo error estimates can be over-optimistic. If the posterior draws are (near) independent then \code{r_eff=1} can be used. \code{r_eff} has to be a scalar (same value is used for all observations) or a vector with length equal to the number of observations. The default value is 1. See the \code{\link[=relative_eff]{relative_eff()}} helper functions for help computing \code{r_eff}.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{loo_approximation}{What type of approximation of the loo_i's should be used? The default is \code{"plpd"} (the log predictive density using the posterior expectation). There are six different methods implemented to approximate loo_i's (see the references for more details): \itemize{ \item \code{"plpd"}: uses the lpd based on point estimates (i.e., \eqn{p(y_i|\hat{\theta})}). \item \code{"lpd"}: uses the lpds (i,e., \eqn{p(y_i|y)}). \item \code{"tis"}: uses truncated importance sampling to approximate PSIS-LOO. \item \code{"waic"}: uses waic (i.e., \eqn{p(y_i|y) - p_{waic}}). \item \code{"waic_grad_marginal"}: uses waic approximation using first order delta method and posterior marginal variances to approximate \eqn{p_{waic}} (ie. \eqn{p(y_i|\hat{\theta})}-p_waic_grad_marginal). Requires gradient of likelihood function. \item \code{"waic_grad"}: uses waic approximation using first order delta method and posterior covariance to approximate \eqn{p_{waic}} (ie. \eqn{p(y_i|\hat{\theta})}-p_waic_grad). Requires gradient of likelihood function. \item \code{"waic_hess"}: uses waic approximation using second order delta method and posterior covariance to approximate \eqn{p_{waic}} (ie. \eqn{p(y_i|\hat{\theta})}-p_waic_grad). Requires gradient and Hessian of likelihood function. } As point estimates of \eqn{\hat{\theta}}, the posterior expectations of the parameters are used.} \item{loo_approximation_draws}{The number of posterior draws used when integrating over the posterior. This is used if \code{loo_approximation} is set to \code{"lpd"}, \code{"waic"}, or \code{"tis"}.} \item{llgrad}{The gradient of the log-likelihood. This is only used when \code{loo_approximation} is \code{"waic_grad"}, \code{"waic_grad_marginal"}, or \code{"waic_hess"}. The default is \code{NULL}.} \item{llhess}{The Hessian of the log-likelihood. This is only used with \code{loo_approximation = "waic_hess"}. The default is \code{NULL}.} } \value{ A \code{psis_loo_ss} object. } \description{ Update \code{psis_loo_ss} objects } \details{ If \code{observations} is updated then if a vector of indices or a \code{psis_loo_ss} object is supplied the updated object will have exactly the observations indicated by the vector or \code{psis_loo_ss} object. If a single integer is supplied, new observations will be sampled to reach the supplied sample size. } loo/man/figures/0000755000176200001440000000000015076255137013275 5ustar liggesusersloo/man/figures/logo.svg0000644000176200001440000001371615076255137014766 0ustar liggesusersloo/man/pareto-k-diagnostic.Rd0000644000176200001440000002144014641333357015763 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/diagnostics.R \name{pareto-k-diagnostic} \alias{pareto-k-diagnostic} \alias{pareto_k_table} \alias{pareto_k_ids} \alias{pareto_k_values} \alias{pareto_k_influence_values} \alias{psis_n_eff_values} \alias{mcse_loo} \alias{plot.psis_loo} \alias{plot.loo} \alias{plot.psis} \title{Diagnostics for Pareto smoothed importance sampling (PSIS)} \usage{ pareto_k_table(x) pareto_k_ids(x, threshold = NULL) pareto_k_values(x) pareto_k_influence_values(x) psis_n_eff_values(x) mcse_loo(x, threshold = NULL) \method{plot}{psis_loo}( x, diagnostic = c("k", "ESS", "n_eff"), ..., label_points = FALSE, main = "PSIS diagnostic plot" ) \method{plot}{psis}( x, diagnostic = c("k", "ESS", "n_eff"), ..., label_points = FALSE, main = "PSIS diagnostic plot" ) } \arguments{ \item{x}{An object created by \code{\link[=loo]{loo()}} or \code{\link[=psis]{psis()}}.} \item{threshold}{For \code{pareto_k_ids()}, \code{threshold} is the minimum \eqn{k} value to flag (default is a sample size \code{S} dependend threshold \code{1 - 1 / log10(S)}). For \code{mcse_loo()}, if any \eqn{k} estimates are greater than \code{threshold} the MCSE estimate is returned as \code{NA} See \strong{Details} for the motivation behind these defaults.} \item{diagnostic}{For the \code{plot} method, which diagnostic should be plotted? The options are \code{"k"} for Pareto \eqn{k} estimates (the default), or \code{"ESS"} or \code{"n_eff"} for PSIS effective sample size estimates.} \item{label_points, ...}{For the \code{plot()} method, if \code{label_points} is \code{TRUE} the observation numbers corresponding to any values of \eqn{k} greater than the diagnostic threshold will be displayed in the plot. Any arguments specified in \code{...} will be passed to \code{\link[graphics:text]{graphics::text()}} and can be used to control the appearance of the labels.} \item{main}{For the \code{plot()} method, a title for the plot.} } \value{ \code{pareto_k_table()} returns an object of class \code{"pareto_k_table"}, which is a matrix with columns \code{"Count"}, \code{"Proportion"}, and \code{"Min. n_eff"}, and has its own print method. \code{pareto_k_ids()} returns an integer vector indicating which observations have Pareto \eqn{k} estimates above \code{threshold}. \code{pareto_k_values()} returns a vector of the estimated Pareto \eqn{k} parameters. These represent the reliability of sampling. \code{pareto_k_influence_values()} returns a vector of the estimated Pareto \eqn{k} parameters. These represent influence of the observations on the model posterior distribution. \code{psis_n_eff_values()} returns a vector of the estimated PSIS effective sample sizes. \code{mcse_loo()} returns the Monte Carlo standard error (MCSE) estimate for PSIS-LOO. MCSE will be NA if any Pareto \eqn{k} values are above \code{threshold}. The \code{plot()} method is called for its side effect and does not return anything. If \code{x} is the result of a call to \code{\link[=loo]{loo()}} or \code{\link[=psis]{psis()}} then \code{plot(x, diagnostic)} produces a plot of the estimates of the Pareto shape parameters (\code{diagnostic = "k"}) or estimates of the PSIS effective sample sizes (\code{diagnostic = "ESS"}). } \description{ Print a diagnostic table summarizing the estimated Pareto shape parameters and PSIS effective sample sizes, find the indexes of observations for which the estimated Pareto shape parameter \eqn{k} is larger than some \code{threshold} value, or plot observation indexes vs. diagnostic estimates. The \strong{Details} section below provides a brief overview of the diagnostics, but we recommend consulting Vehtari, Gelman, and Gabry (2017) and Vehtari, Simpson, Gelman, Yao, and Gabry (2024) for full details. } \details{ The reliability and approximate convergence rate of the PSIS-based estimates can be assessed using the estimates for the shape parameter \eqn{k} of the generalized Pareto distribution. The diagnostic threshold for Pareto \eqn{k} depends on sample size \eqn{S} (sample size dependent threshold was introduced by Vehtari et al. (2024), and before that fixed thresholds of 0.5 and 0.7 were recommended). For simplicity, \code{loo} package uses the nominal sample size \eqn{S} when computing the sample size specific threshold. This provides an optimistic threshold if the effective sample size is less than 2200, but if MCMC-ESS > S/2 the difference is usually negligible. Thinning of MCMC draws can be used to improve the ratio ESS/S. \itemize{ \item If \eqn{k < min(1 - 1 / log10(S), 0.7)}, where \eqn{S} is the sample size, the PSIS estimate and the corresponding Monte Carlo standard error estimate are reliable. \item If \eqn{1 - 1 / log10(S) <= k < 0.7}, the PSIS estimate and the corresponding Monte Carlo standard error estimate are not reliable, but increasing the (effective) sample size \eqn{S} above 2200 may help (this will increase the sample size specific threshold \eqn{(1-1/log10(2200)>0.7} and then the bias specific threshold 0.7 dominates). \item If \eqn{0.7 <= k < 1}, the PSIS estimate and the corresponding Monte Carlo standard error have large bias and are not reliable. Increasing the sample size may reduce the variability in \eqn{k} estimate, which may result in lower \eqn{k} estimate, too. \item If \eqn{k \geq 1}{k >= 1}, the target distribution is estimated to have a non-finite mean. The PSIS estimate and the corresponding Monte Carlo standard error are not well defined. Increasing the sample size may reduce the variability in the \eqn{k} estimate, which may also result in a lower \eqn{k} estimate. } \subsection{What if the estimated tail shape parameter \eqn{k} exceeds the diagnostic threshold?}{ Importance sampling is likely to work less well if the marginal posterior \eqn{p(\theta^s | y)} and LOO posterior \eqn{p(\theta^s | y_{-i})} are very different, which is more likely to happen with a non-robust model and highly influential observations. If the estimated tail shape parameter \eqn{k} exceeds the diagnostic threshold, the user should be warned. (Note: If \eqn{k} is greater than the diagnostic threshold then WAIC is also likely to fail, but WAIC lacks as accurate diagnostic.) When using PSIS in the context of approximate LOO-CV, we recommend one of the following actions: \itemize{ \item With some additional computations, it is possible to transform the MCMC draws from the posterior distribution to obtain more reliable importance sampling estimates. This results in a smaller shape parameter \eqn{k}. See \code{\link[=loo_moment_match]{loo_moment_match()}} and the vignette \emph{Avoiding model refits in leave-one-out cross-validation with moment matching} for an example of this. \item Sampling from a leave-one-out mixture distribution (see the vignette \emph{Mixture IS leave-one-out cross-validation for high-dimensional Bayesian models}), directly from \eqn{p(\theta^s | y_{-i})} for the problematic observations \eqn{i}, or using \eqn{K}-fold cross-validation (see the vignette \emph{Holdout validation and K-fold cross-validation of Stan programs with the loo package}) will generally be more stable. \item Using a model that is more robust to anomalous observations will generally make approximate LOO-CV more stable. } } \subsection{Observation influence statistics}{ The estimated shape parameter \eqn{k} for each observation can be used as a measure of the observation's influence on posterior distribution of the model. These can be obtained with \code{pareto_k_influence_values()}. } \subsection{Effective sample size and error estimates}{ In the case that we obtain the samples from the proposal distribution via MCMC the \strong{loo} package also computes estimates for the Monte Carlo error and the effective sample size for importance sampling, which are more accurate for PSIS than for IS and TIS (see Vehtari et al (2024) for details). However, the PSIS effective sample size estimate will be \strong{over-optimistic when the estimate of \eqn{k} is greater than} \eqn{min(1-1/log10(S), 0.7)}, where \eqn{S} is the sample size. } } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } \seealso{ \itemize{ \item \code{\link[=psis]{psis()}} for the implementation of the PSIS algorithm. \item The \href{https://mc-stan.org/loo/articles/online-only/faq.html}{FAQ page} on the \strong{loo} website for answers to frequently asked questions. } } loo/man/dot-compute_point_estimate.Rd0000644000176200001440000000156514566461605017475 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_subsample.R \name{.compute_point_estimate} \alias{.compute_point_estimate} \alias{.compute_point_estimate.matrix} \alias{.compute_point_estimate.default} \title{Compute a point estimate from a draws object} \usage{ .compute_point_estimate(draws) \method{.compute_point_estimate}{matrix}(draws) \method{.compute_point_estimate}{default}(draws) } \arguments{ \item{draws}{A draws object with draws from the posterior.} } \value{ A 1 by P matrix with point estimates from a draws object. } \description{ Compute a point estimate from a draws object } \details{ This is a generic function to compute point estimates from draws objects. The function is internal and should only be used by developers to enable \code{\link[=loo_subsample]{loo_subsample()}} for arbitrary draws objects. } \keyword{internal} loo/man/loo_predictive_metric.Rd0000644000176200001440000000657614566461605016512 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_predictive_metric.R \name{loo_predictive_metric} \alias{loo_predictive_metric} \alias{loo_predictive_metric.matrix} \title{Estimate leave-one-out predictive performance..} \usage{ loo_predictive_metric(x, ...) \method{loo_predictive_metric}{matrix}( x, y, log_lik, ..., metric = c("mae", "rmse", "mse", "acc", "balanced_acc"), r_eff = 1, cores = getOption("mc.cores", 1) ) } \arguments{ \item{x}{A numeric matrix of predictions.} \item{...}{Additional arguments passed on to \code{\link[=E_loo]{E_loo()}}} \item{y}{A numeric vector of observations. Length should be equal to the number of rows in \code{x}.} \item{log_lik}{A matrix of pointwise log-likelihoods. Should be of same dimension as \code{x}.} \item{metric}{The type of predictive metric to be used. Currently supported options are \code{"mae"}, \code{"rmse"} and \code{"mse"} for regression and for binary classification \code{"acc"} and \code{"balanced_acc"}. \describe{ \item{\code{"mae"}}{ Mean absolute error. } \item{\code{"mse"}}{ Mean squared error. } \item{\code{"rmse"}}{ Root mean squared error, given by as the square root of \code{MSE}. } \item{\code{"acc"}}{ The proportion of predictions indicating the correct outcome. } \item{\code{"balanced_acc"}}{ Balanced accuracy is given by the average of true positive and true negative rates. } }} \item{r_eff}{A Vector of relative effective sample size estimates containing one element per observation. See \code{\link[=psis]{psis()}} for more details.} \item{cores}{The number of cores to use for parallelization of \verb{[psis()]}. See \code{\link[=psis]{psis()}} for details.} } \value{ A list with the following components: \describe{ \item{\code{estimate}}{ Estimate of the given metric. } \item{\code{se}}{ Standard error of the estimate. } } } \description{ The \code{loo_predictive_metric()} function computes estimates of leave-one-out predictive metrics given a set of predictions and observations. Currently supported metrics are mean absolute error, mean squared error and root mean squared error for continuous predictions and accuracy and balanced accuracy for binary classification. Predictions are passed on to the \code{\link[=E_loo]{E_loo()}} function, so this function assumes that the PSIS approximation is working well. } \examples{ \donttest{ if (requireNamespace("rstanarm", quietly = TRUE)) { # Use rstanarm package to quickly fit a model and get both a log-likelihood # matrix and draws from the posterior predictive distribution library("rstanarm") # data from help("lm") ctl <- c(4.17,5.58,5.18,6.11,4.50,4.61,5.17,4.53,5.33,5.14) trt <- c(4.81,4.17,4.41,3.59,5.87,3.83,6.03,4.89,4.32,4.69) d <- data.frame( weight = c(ctl, trt), group = gl(2, 10, 20, labels = c("Ctl","Trt")) ) fit <- stan_glm(weight ~ group, data = d, refresh = 0) ll <- log_lik(fit) r_eff <- relative_eff(exp(-ll), chain_id = rep(1:4, each = 1000)) mu_pred <- posterior_epred(fit) # Leave-one-out mean absolute error of predictions mae <- loo_predictive_metric(x = mu_pred, y = d$weight, log_lik = ll, pred_error = 'mae', r_eff = r_eff) # Leave-one-out 90\%-quantile of mean absolute error mae_90q <- loo_predictive_metric(x = mu_pred, y = d$weight, log_lik = ll, pred_error = 'mae', r_eff = r_eff, type = 'quantile', probs = 0.9) } } } loo/man/parallel_psis_list.Rd0000644000176200001440000000652014566461605016012 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo.R \name{parallel_psis_list} \alias{parallel_psis_list} \alias{parallel_importance_sampling_list} \title{Parallel psis list computations} \usage{ parallel_psis_list( N, .loo_i, .llfun, data, draws, r_eff, save_psis, cores, ... ) parallel_importance_sampling_list( N, .loo_i, .llfun, data, draws, r_eff, save_psis, cores, method, ... ) } \arguments{ \item{N}{The total number of observations (i.e. \code{nrow(data)}).} \item{.loo_i}{The function used to compute individual loo contributions.} \item{.llfun}{See \code{llfun} in \code{\link[=loo.function]{loo.function()}}.} \item{data, draws, ...}{For the \code{loo.function()} method and the \code{loo_i()} function, these are the data, posterior draws, and other arguments to pass to the log-likelihood function. See the \strong{Methods (by class)} section below for details on how to specify these arguments.} \item{r_eff}{Vector of relative effective sample size estimates for the likelihood (\code{exp(log_lik)}) of each observation. This is related to the relative efficiency of estimating the normalizing term in self-normalized importance sampling when using posterior draws obtained with MCMC. If MCMC draws are used and \code{r_eff} is not provided then the reported PSIS effective sample sizes and Monte Carlo error estimates can be over-optimistic. If the posterior draws are (near) independent then \code{r_eff=1} can be used. \code{r_eff} has to be a scalar (same value is used for all observations) or a vector with length equal to the number of observations. The default value is 1. See the \code{\link[=relative_eff]{relative_eff()}} helper functions for help computing \code{r_eff}.} \item{save_psis}{Should the \code{psis} object created internally by \code{loo()} be saved in the returned object? The \code{loo()} function calls \code{\link[=psis]{psis()}} internally but by default discards the (potentially large) \code{psis} object after using it to compute the LOO-CV summaries. Setting \code{save_psis=TRUE} will add a \code{psis_object} component to the list returned by \code{loo}. This is useful if you plan to use the \code{\link[=E_loo]{E_loo()}} function to compute weighted expectations after running \code{loo}. Several functions in the \pkg{bayesplot} package also accept \code{psis} objects.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{method}{See \code{is_method} for \code{\link[=loo]{loo()}}} } \description{ Parallel psis list computations } \details{ Refactored function to handle parallel computations for psis_list } \keyword{internal} loo/man/loo_moment_match.Rd0000644000176200001440000001123114703765333015442 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_moment_matching.R \name{loo_moment_match} \alias{loo_moment_match} \alias{loo_moment_match.default} \title{Moment matching for efficient approximate leave-one-out cross-validation (LOO)} \usage{ loo_moment_match(x, ...) \method{loo_moment_match}{default}( x, loo, post_draws, log_lik_i, unconstrain_pars, log_prob_upars, log_lik_i_upars, max_iters = 30L, k_threshold = NULL, split = TRUE, cov = TRUE, cores = getOption("mc.cores", 1), ... ) } \arguments{ \item{x}{A fitted model object.} \item{...}{Further arguments passed to the custom functions documented above.} \item{loo}{A loo object to be modified.} \item{post_draws}{A function the takes \code{x} as the first argument and returns a matrix of posterior draws of the model parameters.} \item{log_lik_i}{A function that takes \code{x} and \code{i} and returns a matrix (one column per chain) or a vector (all chains stacked) of log-likelihood draws of the \code{i}th observation based on the model \code{x}. If the draws are obtained using MCMC, the matrix with MCMC chains separated is preferred.} \item{unconstrain_pars}{A function that takes arguments \code{x}, and \code{pars} and returns posterior draws on the unconstrained space based on the posterior draws on the constrained space passed via \code{pars}.} \item{log_prob_upars}{A function that takes arguments \code{x} and \code{upars} and returns a matrix of log-posterior density values of the unconstrained posterior draws passed via \code{upars}.} \item{log_lik_i_upars}{A function that takes arguments \code{x}, \code{upars}, and \code{i} and returns a vector of log-likelihood draws of the \code{i}th observation based on the unconstrained posterior draws passed via \code{upars}.} \item{max_iters}{Maximum number of moment matching iterations. Usually this does not need to be modified. If the maximum number of iterations is reached, there will be a warning, and increasing \code{max_iters} may improve accuracy.} \item{k_threshold}{Threshold value for Pareto k values above which the moment matching algorithm is used. The default value is \code{min(1 - 1/log10(S), 0.7)}, where \code{S} is the sample size.} \item{split}{Logical; Indicate whether to do the split transformation or not at the end of moment matching for each LOO fold.} \item{cov}{Logical; Indicate whether to match the covariance matrix of the samples or not. If \code{FALSE}, only the mean and marginal variances are matched.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} } \value{ The \code{loo_moment_match()} methods return an updated \code{loo} object. The structure of the updated \code{loo} object is similar, but the method also stores the original Pareto k diagnostic values in the diagnostics field. } \description{ Moment matching algorithm for updating a loo object when Pareto k estimates are large. } \details{ The \code{loo_moment_match()} function is an S3 generic and we provide a default method that takes as arguments user-specified functions \code{post_draws}, \code{log_lik_i}, \code{unconstrain_pars}, \code{log_prob_upars}, and \code{log_lik_i_upars}. All of these functions should take \code{...}. as an argument in addition to those specified for each function. } \section{Methods (by class)}{ \itemize{ \item \code{loo_moment_match(default)}: A default method that takes as arguments a user-specified model object \code{x}, a \code{loo} object and user-specified functions \code{post_draws}, \code{log_lik_i}, \code{unconstrain_pars}, \code{log_prob_upars}, and \code{log_lik_i_upars}. }} \examples{ # See the vignette for loo_moment_match() } \references{ Paananen, T., Piironen, J., Buerkner, P.-C., Vehtari, A. (2021). Implicitly adaptive importance sampling. \emph{Statistics and Computing}, 31, 16. doi:10.1007/s11222-020-09982-2. arXiv preprint arXiv:1906.08850. } \seealso{ \code{\link[=loo]{loo()}}, \code{\link[=loo_moment_match_split]{loo_moment_match_split()}} } loo/man/weights.importance_sampling.Rd0000644000176200001440000000177013701164066017622 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/importance_sampling.R \name{weights.importance_sampling} \alias{weights.importance_sampling} \title{Extract importance sampling weights} \usage{ \method{weights}{importance_sampling}(object, ..., log = TRUE, normalize = TRUE) } \arguments{ \item{object}{An object returned by \code{\link[=psis]{psis()}}, \code{\link[=tis]{tis()}}, or \code{\link[=sis]{sis()}}.} \item{...}{Ignored.} \item{log}{Should the weights be returned on the log scale? Defaults to \code{TRUE}.} \item{normalize}{Should the weights be normalized? Defaults to \code{TRUE}.} } \value{ The \code{weights()} method returns an object with the same dimensions as the \code{log_weights} component of \code{object}. The \code{normalize} and \code{log} arguments control whether the returned weights are normalized and whether or not to return them on the log scale. } \description{ Extract importance sampling weights } \examples{ # See the examples at help("psis") } loo/man/loo_moment_match_split.Rd0000644000176200001440000000720214214417101016640 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/split_moment_matching.R \name{loo_moment_match_split} \alias{loo_moment_match_split} \title{Split moment matching for efficient approximate leave-one-out cross-validation (LOO)} \usage{ loo_moment_match_split( x, upars, cov, total_shift, total_scaling, total_mapping, i, log_prob_upars, log_lik_i_upars, r_eff_i, cores, is_method, ... ) } \arguments{ \item{x}{A fitted model object.} \item{upars}{A matrix containing the model parameters in unconstrained space where they can have any real value.} \item{cov}{Logical; Indicate whether to match the covariance matrix of the samples or not. If \code{FALSE}, only the mean and marginal variances are matched.} \item{total_shift}{A vector representing the total shift made by the moment matching algorithm.} \item{total_scaling}{A vector representing the total scaling of marginal variance made by the moment matching algorithm.} \item{total_mapping}{A vector representing the total covariance transformation made by the moment matching algorithm.} \item{i}{Observation index.} \item{log_prob_upars}{A function that takes arguments \code{x} and \code{upars} and returns a matrix of log-posterior density values of the unconstrained posterior draws passed via \code{upars}.} \item{log_lik_i_upars}{A function that takes arguments \code{x}, \code{upars}, and \code{i} and returns a vector of log-likeliood draws of the \code{i}th observation based on the unconstrained posterior draws passed via \code{upars}.} \item{r_eff_i}{MCMC relative effective sample size of the \code{i}'th log likelihood draws.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{is_method}{The importance sampling method to use. The following methods are implemented: \itemize{ \item \code{\link[=psis]{"psis"}}: Pareto-Smoothed Importance Sampling (PSIS). Default method. \item \code{\link[=tis]{"tis"}}: Truncated Importance Sampling (TIS) with truncation at \code{sqrt(S)}, where \code{S} is the number of posterior draws. \item \code{\link[=sis]{"sis"}}: Standard Importance Sampling (SIS). }} \item{...}{Further arguments passed to the custom functions documented above.} } \value{ A list containing the updated log-importance weights and log-likelihood values. Also returns the updated MCMC effective sample size and the integrand-specific log-importance weights. } \description{ A function that computes the split moment matching importance sampling loo. Takes in the moment matching total transformation, transforms only half of the draws, and computes a single elpd using multiple importance sampling. } \references{ Paananen, T., Piironen, J., Buerkner, P.-C., Vehtari, A. (2021). Implicitly adaptive importance sampling. \emph{Statistics and Computing}, 31, 16. doi:10.1007/s11222-020-09982-2. arXiv preprint arXiv:1906.08850. } \seealso{ \code{\link[=loo]{loo()}}, \code{\link[=loo_moment_match]{loo_moment_match()}} } loo/man/example_loglik_array.Rd0000644000176200001440000000203013575772017016310 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/example_log_lik_array.R \name{example_loglik_array} \alias{example_loglik_array} \alias{example_loglik_matrix} \title{Objects to use in examples and tests} \usage{ example_loglik_array() example_loglik_matrix() } \value{ \code{example_loglik_array()} returns a 500 (draws) x 2 (chains) x 32 (observations) pointwise log-likelihood array. \code{example_loglik_matrix()} returns the same pointwise log-likelihood values as \code{example_loglik_array()} but reshaped into a 1000 (draws*chains) x 32 (observations) matrix. } \description{ Example pointwise log-likelihood objects to use in demonstrations and tests. See the \strong{Value} and \strong{Examples} sections below. } \examples{ LLarr <- example_loglik_array() (dim_arr <- dim(LLarr)) LLmat <- example_loglik_matrix() (dim_mat <- dim(LLmat)) all.equal(dim_mat[1], dim_arr[1] * dim_arr[2]) all.equal(dim_mat[2], dim_arr[3]) all.equal(LLarr[, 1, ], LLmat[1:500, ]) all.equal(LLarr[, 2, ], LLmat[501:1000, ]) } loo/man/dot-thin_draws.Rd0000644000176200001440000000170514566461605015053 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_subsample.R \name{.thin_draws} \alias{.thin_draws} \alias{.thin_draws.matrix} \alias{.thin_draws.numeric} \alias{.thin_draws.default} \title{Thin a draws object} \usage{ .thin_draws(draws, loo_approximation_draws) \method{.thin_draws}{matrix}(draws, loo_approximation_draws) \method{.thin_draws}{numeric}(draws, loo_approximation_draws) \method{.thin_draws}{default}(draws, loo_approximation_draws) } \arguments{ \item{draws}{A draws object with posterior draws.} \item{loo_approximation_draws}{The number of posterior draws to return (ie after thinning).} } \value{ A thinned draws object. } \description{ Thin a draws object } \details{ This is a generic function to thin draws from arbitrary draws objects. The function is internal and should only be used by developers to enable \code{\link[=loo_subsample]{loo_subsample()}} for arbitrary draws objects. } \keyword{internal} loo/man/loo_model_weights.Rd0000644000176200001440000002322614703765333015630 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_model_weights.R \name{loo_model_weights} \alias{loo_model_weights} \alias{loo_model_weights.default} \alias{stacking_weights} \alias{pseudobma_weights} \title{Model averaging/weighting via stacking or pseudo-BMA weighting} \usage{ loo_model_weights(x, ...) \method{loo_model_weights}{default}( x, ..., method = c("stacking", "pseudobma"), optim_method = "BFGS", optim_control = list(), BB = TRUE, BB_n = 1000, alpha = 1, r_eff_list = NULL, cores = getOption("mc.cores", 1) ) stacking_weights(lpd_point, optim_method = "BFGS", optim_control = list()) pseudobma_weights(lpd_point, BB = TRUE, BB_n = 1000, alpha = 1) } \arguments{ \item{x}{A list of \code{"psis_loo"} objects (objects returned by \code{\link[=loo]{loo()}}) or pointwise log-likelihood matrices or , one for each model. If the list elements are named the names will be used to label the models in the results. Each matrix/object should have dimensions \eqn{S} by \eqn{N}, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. If \code{x} is a list of log-likelihood matrices then \code{\link[=loo]{loo()}} is called internally on each matrix. Currently the \code{loo_model_weights()} function is not implemented to be used with results from K-fold CV, but you can still obtain weights using K-fold CV results by calling the \code{stacking_weights()} or \code{pseudobma_weights()} function directly.} \item{...}{Unused, except for the generic to pass arguments to individual methods.} \item{method}{Either \code{"stacking"} (the default) or \code{"pseudobma"}, indicating which method to use for obtaining the weights. \code{"stacking"} refers to stacking of predictive distributions and \code{"pseudobma"} refers to pseudo-BMA+ weighting (or plain pseudo-BMA weighting if argument \code{BB} is \code{FALSE}).} \item{optim_method}{If \code{method="stacking"}, a string passed to the \code{method} argument of \code{\link[stats:constrOptim]{stats::constrOptim()}} to specify the optimization algorithm. The default is \code{optim_method="BFGS"}, but other options are available (see \code{\link[stats:optim]{stats::optim()}}).} \item{optim_control}{If \code{method="stacking"}, a list of control parameters for optimization passed to the \code{control} argument of \code{\link[stats:constrOptim]{stats::constrOptim()}}.} \item{BB}{Logical used when \code{"method"}=\code{"pseudobma"}. If \code{TRUE} (the default), the Bayesian bootstrap will be used to adjust the pseudo-BMA weighting, which is called pseudo-BMA+ weighting. It helps regularize the weight away from 0 and 1, so as to reduce the variance.} \item{BB_n}{For pseudo-BMA+ weighting only, the number of samples to use for the Bayesian bootstrap. The default is \code{BB_n=1000}.} \item{alpha}{Positive scalar shape parameter in the Dirichlet distribution used for the Bayesian bootstrap. The default is \code{alpha=1}, which corresponds to a uniform distribution on the simplex space.} \item{r_eff_list}{Optionally, a list of relative effective sample size estimates for the likelihood \code{(exp(log_lik))} of each observation in each model. See \code{\link[=psis]{psis()}} and \code{\link[=relative_eff]{relative_eff()}} helper function for computing \code{r_eff}. If \code{x} is a list of \code{"psis_loo"} objects then \code{r_eff_list} is ignored.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{lpd_point}{If calling \code{stacking_weights()} or \code{pseudobma_weights()} directly, a matrix of pointwise leave-one-out (or K-fold) log likelihoods evaluated for different models. It should be a \eqn{N} by \eqn{K} matrix where \eqn{N} is sample size and \eqn{K} is the number of models. Each column corresponds to one model. These values can be calculated approximately using \code{\link[=loo]{loo()}} or by running exact leave-one-out or K-fold cross-validation.} } \value{ A numeric vector containing one weight for each model. } \description{ Model averaging via stacking of predictive distributions, pseudo-BMA weighting or pseudo-BMA+ weighting with the Bayesian bootstrap. See Yao et al. (2018), Vehtari, Gelman, and Gabry (2017), and Vehtari, Simpson, Gelman, Yao, and Gabry (2024) for background. } \details{ \code{loo_model_weights()} is a wrapper around the \code{stacking_weights()} and \code{pseudobma_weights()} functions that implements stacking, pseudo-BMA, and pseudo-BMA+ weighting for combining multiple predictive distributions. We can use approximate or exact leave-one-out cross-validation (LOO-CV) or K-fold CV to estimate the expected log predictive density (ELPD). The stacking method (\code{method="stacking"}), which is the default for \code{loo_model_weights()}, combines all models by maximizing the leave-one-out predictive density of the combination distribution. That is, it finds the optimal linear combining weights for maximizing the leave-one-out log score. The pseudo-BMA method (\code{method="pseudobma"}) finds the relative weights proportional to the ELPD of each model. However, when \code{method="pseudobma"}, the default is to also use the Bayesian bootstrap (\code{BB=TRUE}), which corresponds to the pseudo-BMA+ method. The Bayesian bootstrap takes into account the uncertainty of finite data points and regularizes the weights away from the extremes of 0 and 1. In general, we recommend stacking for averaging predictive distributions, while pseudo-BMA+ can serve as a computationally easier alternative. } \examples{ \dontrun{ ### Demonstrating usage after fitting models with RStan library(rstan) # generate fake data from N(0,1). N <- 100 y <- rnorm(N, 0, 1) # Suppose we have three models: N(-1, sigma), N(0.5, sigma) and N(0.6,sigma). stan_code <- " data { int N; vector[N] y; real mu_fixed; } parameters { real sigma; } model { sigma ~ exponential(1); y ~ normal(mu_fixed, sigma); } generated quantities { vector[N] log_lik; for (n in 1:N) log_lik[n] = normal_lpdf(y[n]| mu_fixed, sigma); }" mod <- stan_model(model_code = stan_code) fit1 <- sampling(mod, data=list(N=N, y=y, mu_fixed=-1)) fit2 <- sampling(mod, data=list(N=N, y=y, mu_fixed=0.5)) fit3 <- sampling(mod, data=list(N=N, y=y, mu_fixed=0.6)) model_list <- list(fit1, fit2, fit3) log_lik_list <- lapply(model_list, extract_log_lik) # optional but recommended r_eff_list <- lapply(model_list, function(x) { ll_array <- extract_log_lik(x, merge_chains = FALSE) relative_eff(exp(ll_array)) }) # stacking method: wts1 <- loo_model_weights( log_lik_list, method = "stacking", r_eff_list = r_eff_list, optim_control = list(reltol=1e-10) ) print(wts1) # can also pass a list of psis_loo objects to avoid recomputing loo loo_list <- lapply(1:length(log_lik_list), function(j) { loo(log_lik_list[[j]], r_eff = r_eff_list[[j]]) }) wts2 <- loo_model_weights( loo_list, method = "stacking", optim_control = list(reltol=1e-10) ) all.equal(wts1, wts2) # can provide names to be used in the results loo_model_weights(setNames(loo_list, c("A", "B", "C"))) # pseudo-BMA+ method: set.seed(1414) loo_model_weights(loo_list, method = "pseudobma") # pseudo-BMA method (set BB = FALSE): loo_model_weights(loo_list, method = "pseudobma", BB = FALSE) # calling stacking_weights or pseudobma_weights directly lpd1 <- loo(log_lik_list[[1]], r_eff = r_eff_list[[1]])$pointwise[,1] lpd2 <- loo(log_lik_list[[2]], r_eff = r_eff_list[[2]])$pointwise[,1] lpd3 <- loo(log_lik_list[[3]], r_eff = r_eff_list[[3]])$pointwise[,1] stacking_weights(cbind(lpd1, lpd2, lpd3)) pseudobma_weights(cbind(lpd1, lpd2, lpd3)) pseudobma_weights(cbind(lpd1, lpd2, lpd3), BB = FALSE) } } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018) Using stacking to average Bayesian predictive distributions. \emph{Bayesian Analysis}, advance publication, doi:10.1214/17-BA1091. (\href{https://projecteuclid.org/euclid.ba/1516093227}{online}). } \seealso{ \itemize{ \item The \strong{loo} package \href{https://mc-stan.org/loo/articles/}{vignettes}, particularly \href{https://mc-stan.org/loo/articles/loo2-weights.html}{Bayesian Stacking and Pseudo-BMA weights using the \strong{loo} package}. \item \code{\link[=loo]{loo()}} for details on leave-one-out ELPD estimation. \item \code{\link[=constrOptim]{constrOptim()}} for the choice of optimization methods and control-parameters. \item \code{\link[=relative_eff]{relative_eff()}} for computing \code{r_eff}. } } loo/man/kfold-generic.Rd0000644000176200001440000000317513575772017014642 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/kfold-generic.R \name{kfold-generic} \alias{kfold-generic} \alias{kfold} \alias{is.kfold} \title{Generic function for K-fold cross-validation for developers} \usage{ kfold(x, ...) is.kfold(x) } \arguments{ \item{x}{A fitted model object.} \item{...}{Arguments to pass to specific methods.} } \value{ For developers defining a \code{kfold()} method for a class \code{"foo"}, the \code{kfold.foo()} function should return a list with class \code{c("kfold", "loo")} with at least the following named elements: \itemize{ \item \code{"estimates"}: A \verb{1x2} matrix containing the ELPD estimate and its standard error. The matrix must have row name "\code{elpd_kfold}" and column names \code{"Estimate"} and \code{"SE"}. \item \code{"pointwise"}: A \code{Nx1} matrix with column name \code{"elpd_kfold"} containing the pointwise contributions for each data point. } It is important for the object to have at least these classes and components so that it is compatible with other functions like \code{\link[=loo_compare]{loo_compare()}} and \code{print()} methods. } \description{ For developers of Bayesian modeling packages, \strong{loo} includes a generic function \code{kfold()} so that methods may be defined for K-fold CV without name conflicts between packages. See, for example, the \code{kfold()} methods in the \strong{rstanarm} and \strong{brms} packages. The \strong{Value} section below describes the objects that \code{kfold()} methods should return in order to be compatible with \code{\link[=loo_compare]{loo_compare()}} and the \strong{loo} package print methods. } loo/man/psis_approximate_posterior.Rd0000644000176200001440000000641314641333357017617 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/psis_approximate_posterior.R \name{psis_approximate_posterior} \alias{psis_approximate_posterior} \title{Diagnostics for Laplace and ADVI approximations and Laplace-loo and ADVI-loo} \usage{ psis_approximate_posterior( log_p = NULL, log_g = NULL, log_liks = NULL, cores, save_psis, ..., log_q = NULL ) } \arguments{ \item{log_p}{The log-posterior (target) evaluated at S samples from the proposal distribution (g). A vector of length S.} \item{log_g}{The log-density (proposal) evaluated at S samples from the proposal distribution (g). A vector of length S.} \item{log_liks}{A log-likelihood matrix of size S * N, where N is the number of observations and S is the number of samples from q. See \code{\link[=loo.matrix]{loo.matrix()}} for details. Default is \code{NULL}. Then only the posterior is evaluated using the k_hat diagnostic.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{save_psis}{Should the \code{psis} object created internally by \code{loo()} be saved in the returned object? The \code{loo()} function calls \code{\link[=psis]{psis()}} internally but by default discards the (potentially large) \code{psis} object after using it to compute the LOO-CV summaries. Setting \code{save_psis=TRUE} will add a \code{psis_object} component to the list returned by \code{loo}. This is useful if you plan to use the \code{\link[=E_loo]{E_loo()}} function to compute weighted expectations after running \code{loo}. Several functions in the \pkg{bayesplot} package also accept \code{psis} objects.} \item{log_q}{Deprecated argument name (the same as log_g).} } \value{ If log likelihoods are supplied, the function returns a \code{"loo"} object, otherwise the function returns a \code{"psis"} object. } \description{ Diagnostics for Laplace and ADVI approximations and Laplace-loo and ADVI-loo } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } \seealso{ \code{\link[=loo]{loo()}} and \code{\link[=psis]{psis()}} } \keyword{internal} loo/man/pointwise.Rd0000644000176200001440000000233514566461605014146 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pointwise.R \name{pointwise} \alias{pointwise} \alias{pointwise.loo} \title{Convenience function for extracting pointwise estimates} \usage{ pointwise(x, estimate, ...) \method{pointwise}{loo}(x, estimate, ...) } \arguments{ \item{x}{A \code{loo} object, for example one returned by \code{\link[=loo]{loo()}}, \code{\link[=loo_subsample]{loo_subsample()}}, \code{\link[=loo_approximate_posterior]{loo_approximate_posterior()}}, \code{\link[=loo_moment_match]{loo_moment_match()}}, etc.} \item{estimate}{Which pointwise estimate to return. By default all are returned. The objects returned by the different functions (\code{\link[=loo]{loo()}}, \code{\link[=loo_subsample]{loo_subsample()}}, etc.) have slightly different estimates available. Typically at a minimum the estimates \code{elpd_loo}, \code{looic}, \code{mcse_elpd_loo}, \code{p_loo}, and \code{influence_pareto_k} will be available, but there may be others.} \item{...}{Currently ignored.} } \value{ A vector of length equal to the number of observations. } \description{ Convenience function for extracting pointwise estimates } \examples{ x <- loo(example_loglik_array()) pointwise(x, "elpd_loo") } loo/man/psislw.Rd0000644000176200001440000000476014641333357013446 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/psislw.R \name{psislw} \alias{psislw} \title{Pareto smoothed importance sampling (deprecated, old version)} \usage{ psislw( lw, wcp = 0.2, wtrunc = 3/4, cores = getOption("mc.cores", 1), llfun = NULL, llargs = NULL, ... ) } \arguments{ \item{lw}{A matrix or vector of log weights. For computing LOO, \code{lw = -log_lik}, the \emph{negative} of an \eqn{S} (simulations) by \eqn{N} (data points) pointwise log-likelihood matrix.} \item{wcp}{The proportion of importance weights to use for the generalized Pareto fit. The \code{100*wcp}\\% largest weights are used as the sample from which to estimate the parameters of the generalized Pareto distribution.} \item{wtrunc}{For truncating very large weights to \eqn{S}^\code{wtrunc}. Set to zero for no truncation.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}, the old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until it is removed. \strong{As of version 2.0.0, the default is now 1 core if \code{mc.cores} is not set, but we recommend using as many (or close to as many) cores as possible.}} \item{llfun, llargs}{See \code{\link[=loo.function]{loo.function()}}.} \item{...}{Ignored when \code{psislw()} is called directly. The \code{...} is only used internally when \code{psislw()} is called by the \code{\link[=loo]{loo()}} function.} } \value{ A named list with components \code{lw_smooth} (modified log weights) and \code{pareto_k} (estimated generalized Pareto shape parameter(s) k). } \description{ As of version \verb{2.0.0} this function is \strong{deprecated}. Please use the \code{\link[=psis]{psis()}} function for the new PSIS algorithm. } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } \seealso{ \link{pareto-k-diagnostic} for PSIS diagnostics. } loo/man/loo_compare.Rd0000644000176200001440000001300415122076122014400 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo_compare.R, R/loo_compare.psis_loo_ss_list.R \name{loo_compare} \alias{loo_compare} \alias{loo_compare.default} \alias{print.compare.loo} \alias{print.compare.loo_ss} \title{Model comparison} \usage{ loo_compare(x, ...) \method{loo_compare}{default}(x, ...) \method{print}{compare.loo}(x, ..., digits = 1, simplify = TRUE) \method{print}{compare.loo_ss}(x, ..., digits = 1, simplify = TRUE) } \arguments{ \item{x}{An object of class \code{"loo"} or a list of such objects. If a list is used then the list names will be used as the model names in the output. See \strong{Examples}.} \item{...}{Additional objects of class \code{"loo"}, if not passed in as a single list.} \item{digits}{For the print method only, the number of digits to use when printing.} \item{simplify}{For the print method only, should only the essential columns of the summary matrix be printed? The entire matrix is always returned, but by default only the most important columns are printed.} } \value{ A matrix with class \code{"compare.loo"} that has its own print method. See the \strong{Details} section. } \description{ Compare fitted models based on \link[=loo-glossary]{ELPD}. By default the print method shows only the most important information. Use \code{print(..., simplify=FALSE)} to print a more detailed summary. } \details{ When comparing two fitted models, we can estimate the difference in their expected predictive accuracy by the difference in \code{\link[=loo-glossary]{elpd_loo}} or \code{elpd_waic} (or multiplied by \eqn{-2}, if desired, to be on the deviance scale). When using \code{loo_compare()}, the returned matrix will have one row per model and several columns of estimates. The values in the \code{\link[=loo-glossary]{elpd_diff}} and \code{\link[=loo-glossary]{se_diff}} columns of the returned matrix are computed by making pairwise comparisons between each model and the model with the largest ELPD (the model in the first row). For this reason the \code{elpd_diff} column will always have the value \code{0} in the first row (i.e., the difference between the preferred model and itself) and negative values in subsequent rows for the remaining models. To compute the standard error of the difference in \link[=loo-glossary]{ELPD} --- which should not be expected to equal the difference of the standard errors --- we use a paired estimate to take advantage of the fact that the same set of \eqn{N} data points was used to fit both models. These calculations should be most useful when \eqn{N} is large, because then non-normality of the distribution is not such an issue when estimating the uncertainty in these sums. These standard errors, for all their flaws, should give a better sense of uncertainty than what is obtained using the current standard approach of comparing differences of deviances to a Chi-squared distribution, a practice derived for Gaussian linear models or asymptotically, and which only applies to nested models in any case. Sivula et al. (2022) discuss the conditions when the normal approximation used for SE and \code{se_diff} is good. If more than \eqn{11} models are compared, we internally recompute the model differences using the median model by ELPD as the baseline model. We then estimate whether the differences in predictive performance are potentially due to chance as described by McLatchie and Vehtari (2023). This will flag a warning if it is deemed that there is a risk of over-fitting due to the selection process. In that case users are recommended to avoid model selection based on LOO-CV, and instead to favor model averaging/stacking or projection predictive inference. } \examples{ # very artificial example, just for demonstration! LL <- example_loglik_array() loo1 <- loo(LL) # should be worst model when compared loo2 <- loo(LL + 1) # should be second best model when compared loo3 <- loo(LL + 2) # should be best model when compared comp <- loo_compare(loo1, loo2, loo3) print(comp, digits = 2) # show more details with simplify=FALSE # (will be the same for all models in this artificial example) print(comp, simplify = FALSE, digits = 3) # can use a list of objects with custom names # will use apple, banana, and cherry, as the names in the output loo_compare(list("apple" = loo1, "banana" = loo2, "cherry" = loo3)) \dontrun{ # works for waic (and kfold) too loo_compare(waic(LL), waic(LL - 10)) } } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} Sivula, T, Magnusson, M., Matamoros A. A., and Vehtari, A. (2025). Uncertainty in Bayesian leave-one-out cross-validation based model comparison. \emph{Bayesian Analysis}. \doi{10.1214/25-BA1569} McLatchie, Y., and Vehtari, A. (2024). Efficient estimation and correction of selection-induced bias with order statistics. \emph{Statistics and Computing}. 34(132). \doi{10.1007/s11222-024-10442-4} } \seealso{ \itemize{ \item The \href{https://mc-stan.org/loo/articles/online-only/faq.html}{FAQ page} on the \strong{loo} website for answers to frequently asked questions. } } loo/man/print.loo.Rd0000644000176200001440000000263013701164066014036 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/print.R \name{print.loo} \alias{print.loo} \alias{print.waic} \alias{print.psis_loo} \alias{print.importance_sampling_loo} \alias{print.psis_loo_ap} \alias{print.psis} \alias{print.importance_sampling} \title{Print methods} \usage{ \method{print}{loo}(x, digits = 1, ...) \method{print}{waic}(x, digits = 1, ...) \method{print}{psis_loo}(x, digits = 1, plot_k = FALSE, ...) \method{print}{importance_sampling_loo}(x, digits = 1, plot_k = FALSE, ...) \method{print}{psis_loo_ap}(x, digits = 1, plot_k = FALSE, ...) \method{print}{psis}(x, digits = 1, plot_k = FALSE, ...) \method{print}{importance_sampling}(x, digits = 1, plot_k = FALSE, ...) } \arguments{ \item{x}{An object returned by \code{\link[=loo]{loo()}}, \code{\link[=psis]{psis()}}, or \code{\link[=waic]{waic()}}.} \item{digits}{An integer passed to \code{\link[base:Round]{base::round()}}.} \item{...}{Arguments passed to \code{\link[=plot.psis_loo]{plot.psis_loo()}} if \code{plot_k} is \code{TRUE}.} \item{plot_k}{Logical. If \code{TRUE} the estimates of the Pareto shape parameter \eqn{k} are plotted. Ignored if \code{x} was generated by \code{\link[=waic]{waic()}}. To just plot \eqn{k} without printing use the \link[=pareto-k-diagnostic]{plot()} method for 'loo' objects.} } \value{ \code{x}, invisibly. } \description{ Print methods } \seealso{ \link{pareto-k-diagnostic} } loo/man/crps.Rd0000644000176200001440000000757115074562565013105 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/crps.R \name{crps} \alias{crps} \alias{scrps} \alias{loo_crps} \alias{loo_scrps} \alias{crps.matrix} \alias{crps.numeric} \alias{loo_crps.matrix} \alias{scrps.matrix} \alias{scrps.numeric} \alias{loo_scrps.matrix} \title{Continuously ranked probability score} \usage{ crps(x, ...) scrps(x, ...) loo_crps(x, ...) loo_scrps(x, ...) \method{crps}{matrix}(x, x2, y, ..., permutations = 1) \method{crps}{numeric}(x, x2, y, ..., permutations = 1) \method{loo_crps}{matrix}( x, x2, y, log_lik, ..., permutations = 1, r_eff = 1, cores = getOption("mc.cores", 1) ) \method{scrps}{matrix}(x, x2, y, ..., permutations = 1) \method{scrps}{numeric}(x, x2, y, ..., permutations = 1) \method{loo_scrps}{matrix}( x, x2, y, log_lik, ..., permutations = 1, r_eff = 1, cores = getOption("mc.cores", 1) ) } \arguments{ \item{x}{A \code{S} by \code{N} matrix (draws by observations), or a vector of length \code{S} when only single observation is provided in \code{y}.} \item{...}{Passed on to \code{\link[=E_loo]{E_loo()}} in the \verb{loo_*()} version of these functions.} \item{x2}{Independent draws from the same distribution as draws in \code{x}. Should be of the identical dimension.} \item{y}{A vector of observations or a single value.} \item{permutations}{An integer, with default value of 1, specifying how many times the expected value of |X - X'| (\verb{|x - x2|}) is computed. The row order of \code{x2} is shuffled as elements \code{x} and \code{x2} are typically drawn given the same values of parameters. This happens, e.g., when one calls \code{posterior_predict()} twice for a fitted \pkg{rstanarm} or \pkg{brms} model. Generating more permutations is expected to decrease the variance of the computed expected value.} \item{log_lik}{A log-likelihood matrix the same size as \code{x}.} \item{r_eff}{An optional vector of relative effective sample size estimates containing one element per observation. See \code{\link[=psis]{psis()}} for details.} \item{cores}{The number of cores to use for parallelization of \verb{[psis()]}. See \code{\link[=psis]{psis()}} for details.} } \value{ A list containing two elements: \code{estimates} and \code{pointwise}. The former reports estimator and standard error and latter the pointwise values. Following Bolin & Wallin (2023), a larger value is better. } \description{ The \code{crps()} and \code{scrps()} functions and their \verb{loo_*()} counterparts can be used to compute the continuously ranked probability score (CRPS) and scaled CRPS (SCRPS) (as defined by Bolin and Wallin, 2023). CRPS is a proper scoring rule, and strictly proper when the first moment of the predictive distribution is finite. Both can be expressed in terms of samples form the predictive distribution. See, for example, a paper by Gneiting and Raftery (2007) for a comprehensive discussion on CRPS. } \details{ To compute (S)CRPS, the user needs to provide two sets of draws, \code{x} and \code{x2}, from the predictive distribution. This is due to the fact that formulas used to compute CRPS involve an expectation of the absolute difference of \code{x} and \code{x2}, both having the same distribution. See the \code{permutations} argument, as well as Gneiting and Raftery (2007) for details. } \examples{ \dontrun{ # An example using rstanarm library(rstanarm) data("kidiq") fit <- stan_glm(kid_score ~ mom_hs + mom_iq, data = kidiq) ypred1 <- posterior_predict(fit) ypred2 <- posterior_predict(fit) crps(ypred1, ypred2, y = fit$y) loo_crps(ypred1, ypred2, y = fit$y, log_lik = log_lik(fit)) } } \references{ Bolin, D., & Wallin, J. (2023). Local scale invariance and robustness of proper scoring rules. Statistical Science, 38(1):140-159. Gneiting, T., & Raftery, A. E. (2007). Strictly Proper Scoring Rules, Prediction, and Estimation. Journal of the American Statistical Association, 102(477), 359–378. } loo/man/relative_eff.Rd0000644000176200001440000001073514411555606014555 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/effective_sample_sizes.R \name{relative_eff} \alias{relative_eff} \alias{relative_eff.default} \alias{relative_eff.matrix} \alias{relative_eff.array} \alias{relative_eff.function} \alias{relative_eff.importance_sampling} \title{Convenience function for computing relative efficiencies} \usage{ relative_eff(x, ...) \method{relative_eff}{default}(x, chain_id, ...) \method{relative_eff}{matrix}(x, chain_id, ..., cores = getOption("mc.cores", 1)) \method{relative_eff}{array}(x, ..., cores = getOption("mc.cores", 1)) \method{relative_eff}{`function`}( x, chain_id, ..., cores = getOption("mc.cores", 1), data = NULL, draws = NULL ) \method{relative_eff}{importance_sampling}(x, ...) } \arguments{ \item{x}{A vector, matrix, 3-D array, or function. See the \strong{Methods (by class)} section below for details on specifying \code{x}, but where "log-likelihood" is mentioned replace it with one of the following depending on the use case: \itemize{ \item For use with the \code{\link[=loo]{loo()}} function, the values in \code{x} (or generated by \code{x}, if a function) should be \strong{likelihood} values (i.e., \code{exp(log_lik)}), not on the log scale. \item For generic use with \code{\link[=psis]{psis()}}, the values in \code{x} should be the reciprocal of the importance ratios (i.e., \code{exp(-log_ratios)}). }} \item{chain_id}{A vector of length \code{NROW(x)} containing MCMC chain indexes for each each row of \code{x} (if a matrix) or each value in \code{x} (if a vector). No \code{chain_id} is needed if \code{x} is a 3-D array. If there are \code{C} chains then valid chain indexes are values in \code{1:C}.} \item{cores}{The number of cores to use for parallelization.} \item{data, draws, ...}{Same as for the \code{\link[=loo]{loo()}} function method.} } \value{ A vector of relative effective sample sizes. } \description{ \code{relative_eff()} computes the the MCMC effective sample size divided by the total sample size. } \section{Methods (by class)}{ \itemize{ \item \code{relative_eff(default)}: A vector of length \eqn{S} (posterior sample size). \item \code{relative_eff(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{relative_eff(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{relative_eff(`function`)}: A function \code{f()} that takes arguments \code{data_i} and \code{draws} and returns a vector containing the log-likelihood for a single observation \code{i} evaluated at each posterior draw. The function should be written such that, for each observation \code{i} in \code{1:N}, evaluating \if{html}{\out{
}}\preformatted{f(data_i = data[i,, drop=FALSE], draws = draws) }\if{html}{\out{
}} results in a vector of length \code{S} (size of posterior sample). The log-likelihood function can also have additional arguments but \code{data_i} and \code{draws} are required. If using the function method then the arguments \code{data} and \code{draws} must also be specified in the call to \code{loo()}: \itemize{ \item \code{data}: A data frame or matrix containing the data (e.g. observed outcome and predictors) needed to compute the pointwise log-likelihood. For each observation \code{i}, the \code{i}th row of \code{data} will be passed to the \code{data_i} argument of the log-likelihood function. \item \code{draws}: An object containing the posterior draws for any parameters needed to compute the pointwise log-likelihood. Unlike \code{data}, which is indexed by observation, for each observation the entire object \code{draws} will be passed to the \code{draws} argument of the log-likelihood function. \item The \code{...} can be used if your log-likelihood function takes additional arguments. These arguments are used like the \code{draws} argument in that they are recycled for each observation. } \item \code{relative_eff(importance_sampling)}: If \code{x} is an object of class \code{"psis"}, \code{relative_eff()} simply returns the \code{r_eff} attribute of \code{x}. }} \examples{ LLarr <- example_loglik_array() LLmat <- example_loglik_matrix() dim(LLarr) dim(LLmat) rel_n_eff_1 <- relative_eff(exp(LLarr)) rel_n_eff_2 <- relative_eff(exp(LLmat), chain_id = rep(1:2, each = 500)) all.equal(rel_n_eff_1, rel_n_eff_2) } loo/man/loo.Rd0000644000176200001440000003334614641333357012720 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/loo.R \name{loo} \alias{loo} \alias{loo.array} \alias{loo.matrix} \alias{loo.function} \alias{loo_i} \alias{is.loo} \alias{is.psis_loo} \title{Efficient approximate leave-one-out cross-validation (LOO)} \usage{ loo(x, ...) \method{loo}{array}( x, ..., r_eff = 1, save_psis = FALSE, cores = getOption("mc.cores", 1), is_method = c("psis", "tis", "sis") ) \method{loo}{matrix}( x, ..., r_eff = 1, save_psis = FALSE, cores = getOption("mc.cores", 1), is_method = c("psis", "tis", "sis") ) \method{loo}{`function`}( x, ..., data = NULL, draws = NULL, r_eff = 1, save_psis = FALSE, cores = getOption("mc.cores", 1), is_method = c("psis", "tis", "sis") ) loo_i(i, llfun, ..., data = NULL, draws = NULL, r_eff = 1, is_method = "psis") is.loo(x) is.psis_loo(x) } \arguments{ \item{x}{A log-likelihood array, matrix, or function. The \strong{Methods (by class)} section, below, has detailed descriptions of how to specify the inputs for each method.} \item{r_eff}{Vector of relative effective sample size estimates for the likelihood (\code{exp(log_lik)}) of each observation. This is related to the relative efficiency of estimating the normalizing term in self-normalized importance sampling when using posterior draws obtained with MCMC. If MCMC draws are used and \code{r_eff} is not provided then the reported PSIS effective sample sizes and Monte Carlo error estimates can be over-optimistic. If the posterior draws are (near) independent then \code{r_eff=1} can be used. \code{r_eff} has to be a scalar (same value is used for all observations) or a vector with length equal to the number of observations. The default value is 1. See the \code{\link[=relative_eff]{relative_eff()}} helper functions for help computing \code{r_eff}.} \item{save_psis}{Should the \code{psis} object created internally by \code{loo()} be saved in the returned object? The \code{loo()} function calls \code{\link[=psis]{psis()}} internally but by default discards the (potentially large) \code{psis} object after using it to compute the LOO-CV summaries. Setting \code{save_psis=TRUE} will add a \code{psis_object} component to the list returned by \code{loo}. This is useful if you plan to use the \code{\link[=E_loo]{E_loo()}} function to compute weighted expectations after running \code{loo}. Several functions in the \pkg{bayesplot} package also accept \code{psis} objects.} \item{cores}{The number of cores to use for parallelization. This defaults to the option \code{mc.cores} which can be set for an entire R session by \code{options(mc.cores = NUMBER)}. The old option \code{loo.cores} is now deprecated but will be given precedence over \code{mc.cores} until \code{loo.cores} is removed in a future release. \strong{As of version 2.0.0 the default is now 1 core if \code{mc.cores} is not set}, but we recommend using as many (or close to as many) cores as possible. \itemize{ \item Note for Windows 10 users: it is \strong{strongly} \href{https://github.com/stan-dev/loo/issues/94}{recommended} to avoid using the \code{.Rprofile} file to set \code{mc.cores} (using the \code{cores} argument or setting \code{mc.cores} interactively or in a script is fine). }} \item{is_method}{The importance sampling method to use. The following methods are implemented: \itemize{ \item \code{\link[=psis]{"psis"}}: Pareto-Smoothed Importance Sampling (PSIS). Default method. \item \code{\link[=tis]{"tis"}}: Truncated Importance Sampling (TIS) with truncation at \code{sqrt(S)}, where \code{S} is the number of posterior draws. \item \code{\link[=sis]{"sis"}}: Standard Importance Sampling (SIS). }} \item{data, draws, ...}{For the \code{loo.function()} method and the \code{loo_i()} function, these are the data, posterior draws, and other arguments to pass to the log-likelihood function. See the \strong{Methods (by class)} section below for details on how to specify these arguments.} \item{i}{For \code{loo_i()}, an integer in \code{1:N}.} \item{llfun}{For \code{loo_i()}, the same as \code{x} for the \code{loo.function()} method. A log-likelihood function as described in the \strong{Methods (by class)} section.} } \value{ The \code{loo()} methods return a named list with class \code{c("psis_loo", "loo")} and components: \describe{ \item{\code{estimates}}{ A matrix with two columns (\code{Estimate}, \code{SE}) and three rows (\code{elpd_loo}, \code{p_loo}, \code{looic}). This contains point estimates and standard errors of the expected log pointwise predictive density (\code{\link[=loo-glossary]{elpd_loo}}), the effective number of parameters (\code{\link[=loo-glossary]{p_loo}}) and the LOO information criterion \code{looic} (which is just \code{-2 * elpd_loo}, i.e., converted to deviance scale). } \item{\code{pointwise}}{ A matrix with five columns (and number of rows equal to the number of observations) containing the pointwise contributions of the measures (\code{elpd_loo}, \code{mcse_elpd_loo}, \code{p_loo}, \code{looic}, \code{influence_pareto_k}). in addition to the three measures in \code{estimates}, we also report pointwise values of the Monte Carlo standard error of \code{\link[=loo-glossary]{elpd_loo}} (\code{\link[=loo-glossary]{mcse_elpd_loo}}), and statistics describing the influence of each observation on the posterior distribution (\code{influence_pareto_k}). These are the estimates of the shape parameter \eqn{k} of the generalized Pareto fit to the importance ratios for each leave-one-out distribution (see the \link{pareto-k-diagnostic} page for details). } \item{\code{diagnostics}}{ A named list containing two vectors: \itemize{ \item \code{pareto_k}: Importance sampling reliability diagnostics. By default, these are equal to the \code{influence_pareto_k} in \code{pointwise}. Some algorithms can improve importance sampling reliability and modify these diagnostics. See the \link{pareto-k-diagnostic} page for details. \item \code{n_eff}: PSIS effective sample size estimates. } } \item{\code{psis_object}}{ This component will be \code{NULL} unless the \code{save_psis} argument is set to \code{TRUE} when calling \code{loo()}. In that case \code{psis_object} will be the object of class \code{"psis"} that is created when the \code{loo()} function calls \code{\link[=psis]{psis()}} internally to do the PSIS procedure. } } The \code{loo_i()} function returns a named list with components \code{pointwise} and \code{diagnostics}. These components have the same structure as the \code{pointwise} and \code{diagnostics} components of the object returned by \code{loo()} except they contain results for only a single observation. } \description{ The \code{loo()} methods for arrays, matrices, and functions compute PSIS-LOO CV, efficient approximate leave-one-out (LOO) cross-validation for Bayesian models using Pareto smoothed importance sampling (\link[=psis]{PSIS}). This is an implementation of the methods described in Vehtari, Gelman, and Gabry (2017) and Vehtari, Simpson, Gelman, Yao, and Gabry (2024). The \code{loo_i()} function enables testing log-likelihood functions for use with the \code{loo.function()} method. } \details{ The \code{loo()} function is an S3 generic and methods are provided for 3-D pointwise log-likelihood arrays, pointwise log-likelihood matrices, and log-likelihood functions. The array and matrix methods are the most convenient, but for models fit to very large datasets the \code{loo.function()} method is more memory efficient and may be preferable. } \section{Methods (by class)}{ \itemize{ \item \code{loo(array)}: An \eqn{I} by \eqn{C} by \eqn{N} array, where \eqn{I} is the number of MCMC iterations per chain, \eqn{C} is the number of chains, and \eqn{N} is the number of data points. \item \code{loo(matrix)}: An \eqn{S} by \eqn{N} matrix, where \eqn{S} is the size of the posterior sample (with all chains merged) and \eqn{N} is the number of data points. \item \code{loo(`function`)}: A function \code{f()} that takes arguments \code{data_i} and \code{draws} and returns a vector containing the log-likelihood for a single observation \code{i} evaluated at each posterior draw. The function should be written such that, for each observation \code{i} in \code{1:N}, evaluating \if{html}{\out{
}}\preformatted{f(data_i = data[i,, drop=FALSE], draws = draws) }\if{html}{\out{
}} results in a vector of length \code{S} (size of posterior sample). The log-likelihood function can also have additional arguments but \code{data_i} and \code{draws} are required. If using the function method then the arguments \code{data} and \code{draws} must also be specified in the call to \code{loo()}: \itemize{ \item \code{data}: A data frame or matrix containing the data (e.g. observed outcome and predictors) needed to compute the pointwise log-likelihood. For each observation \code{i}, the \code{i}th row of \code{data} will be passed to the \code{data_i} argument of the log-likelihood function. \item \code{draws}: An object containing the posterior draws for any parameters needed to compute the pointwise log-likelihood. Unlike \code{data}, which is indexed by observation, for each observation the entire object \code{draws} will be passed to the \code{draws} argument of the log-likelihood function. \item The \code{...} can be used if your log-likelihood function takes additional arguments. These arguments are used like the \code{draws} argument in that they are recycled for each observation. } }} \section{Defining \code{loo()} methods in a package}{ Package developers can define \code{loo()} methods for fitted models objects. See the example \code{loo.stanfit()} method in the \strong{Examples} section below for an example of defining a method that calls \code{loo.array()}. The \code{loo.stanreg()} method in the \strong{rstanarm} package is an example of defining a method that calls \code{loo.function()}. } \examples{ ### Array and matrix methods (using example objects included with loo package) # Array method LLarr <- example_loglik_array() rel_n_eff <- relative_eff(exp(LLarr)) loo(LLarr, r_eff = rel_n_eff, cores = 2) # Matrix method LLmat <- example_loglik_matrix() rel_n_eff <- relative_eff(exp(LLmat), chain_id = rep(1:2, each = 500)) loo(LLmat, r_eff = rel_n_eff, cores = 2) ### Using log-likelihood function instead of array or matrix set.seed(124) # Simulate data and draw from posterior N <- 50; K <- 10; S <- 100; a0 <- 3; b0 <- 2 p <- rbeta(1, a0, b0) y <- rbinom(N, size = K, prob = p) a <- a0 + sum(y); b <- b0 + N * K - sum(y) fake_posterior <- as.matrix(rbeta(S, a, b)) dim(fake_posterior) # S x 1 fake_data <- data.frame(y,K) dim(fake_data) # N x 2 llfun <- function(data_i, draws) { # each time called internally within loo the arguments will be equal to: # data_i: ith row of fake_data (fake_data[i,, drop=FALSE]) # draws: entire fake_posterior matrix dbinom(data_i$y, size = data_i$K, prob = draws, log = TRUE) } # Use the loo_i function to check that llfun works on a single observation # before running on all obs. For example, using the 3rd obs in the data: loo_3 <- loo_i(i = 3, llfun = llfun, data = fake_data, draws = fake_posterior) print(loo_3$pointwise[, "elpd_loo"]) # Use loo.function method (default r_eff=1 is used as this posterior not obtained via MCMC) loo_with_fn <- loo(llfun, draws = fake_posterior, data = fake_data) # If we look at the elpd_loo contribution from the 3rd obs it should be the # same as what we got above with the loo_i function and i=3: print(loo_with_fn$pointwise[3, "elpd_loo"]) print(loo_3$pointwise[, "elpd_loo"]) # Check that the loo.matrix method gives same answer as loo.function method log_lik_matrix <- sapply(1:N, function(i) { llfun(data_i = fake_data[i,, drop=FALSE], draws = fake_posterior) }) loo_with_mat <- loo(log_lik_matrix) all.equal(loo_with_mat$estimates, loo_with_fn$estimates) # should be TRUE! \dontrun{ ### For package developers: defining loo methods # An example of a possible loo method for 'stanfit' objects (rstan package). # A similar method is included in the rstan package. # In order for users to be able to call loo(stanfit) instead of # loo.stanfit(stanfit) the NAMESPACE needs to be handled appropriately # (roxygen2 and devtools packages are good for that). # loo.stanfit <- function(x, pars = "log_lik", ..., save_psis = FALSE, cores = getOption("mc.cores", 1)) { stopifnot(length(pars) == 1L) LLarray <- loo::extract_log_lik(stanfit = x, parameter_name = pars, merge_chains = FALSE) r_eff <- loo::relative_eff(x = exp(LLarray), cores = cores) loo::loo.array(LLarray, r_eff = r_eff, cores = cores, save_psis = save_psis) } } } \references{ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluation using leave-one-out cross-validation and WAIC. \emph{Statistics and Computing}. 27(5), 1413--1432. doi:10.1007/s11222-016-9696-4 (\href{https://link.springer.com/article/10.1007/s11222-016-9696-4}{journal version}, \href{https://arxiv.org/abs/1507.04544}{preprint arXiv:1507.04544}). Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. \emph{Journal of Machine Learning Research}, 25(72):1-58. \href{https://jmlr.org/papers/v25/19-556.html}{PDF} } \seealso{ \itemize{ \item The \strong{loo} package \href{https://mc-stan.org/loo/articles/index.html}{vignettes} for demonstrations. \item The \href{https://mc-stan.org/loo/articles/online-only/faq.html}{FAQ page} on the \strong{loo} website for answers to frequently asked questions. \item \code{\link[=psis]{psis()}} for the underlying Pareto Smoothed Importance Sampling (PSIS) procedure used in the LOO-CV approximation. \item \link{pareto-k-diagnostic} for convenience functions for looking at diagnostics. \item \code{\link[=loo_compare]{loo_compare()}} for model comparison. } } loo/DESCRIPTION0000644000176200001440000000506215122444652012561 0ustar liggesusersType: Package Package: loo Title: Efficient Leave-One-Out Cross-Validation and WAIC for Bayesian Models Version: 2.9.0 Date: 2025-12-22 Authors@R: c( person("Aki", "Vehtari", email = "Aki.Vehtari@aalto.fi", role = "aut"), person("Jonah", "Gabry", email = "jgabry@gmail.com", role = c("cre", "aut")), person("Måns", "Magnusson", role = "aut"), person("Yuling", "Yao", role = "aut"), person("Paul-Christian", "Bürkner", role = "aut"), person("Topi", "Paananen", role = "aut"), person("Andrew", "Gelman", role = "aut"), person("Ben", "Goodrich", role = "ctb"), person("Juho", "Piironen", role = "ctb"), person("Bruno", "Nicenboim", role = "ctb"), person("Leevi", "Lindgren", role = "ctb"), person("Visruth", "Srimath Kandali", role = "ctb") ) Maintainer: Jonah Gabry Description: Efficient approximate leave-one-out cross-validation (LOO) for Bayesian models fit using Markov chain Monte Carlo, as described in Vehtari, Gelman, and Gabry (2017) . The approximation uses Pareto smoothed importance sampling (PSIS), a new procedure for regularizing importance weights. As a byproduct of the calculations, we also obtain approximate standard errors for estimated predictive errors and for the comparison of predictive errors between models. The package also provides methods for using stacking and other model weighting techniques to average Bayesian predictive distributions. License: GPL (>= 3) URL: https://mc-stan.org/loo/, https://discourse.mc-stan.org BugReports: https://github.com/stan-dev/loo/issues Depends: R (>= 3.1.2) Imports: checkmate, matrixStats (>= 0.52), parallel, posterior (>= 1.5.0), stats Suggests: bayesplot (>= 1.7.0), brms (>= 2.10.0), ggplot2, graphics, knitr, rmarkdown, rstan, rstanarm (>= 2.19.0), rstantools, spdep, testthat (>= 3.0) VignetteBuilder: knitr Config/testthat/edition: 3 Config/testthat/parallel: true Config/testthat/start-first: loo_subsampling_cases, loo_subsampling Encoding: UTF-8 LazyData: TRUE RoxygenNote: 7.3.3 SystemRequirements: pandoc (>= 1.12.3), pandoc-citeproc NeedsCompilation: no Packaged: 2025-12-22 18:20:57 UTC; jgabry Author: Aki Vehtari [aut], Jonah Gabry [cre, aut], Måns Magnusson [aut], Yuling Yao [aut], Paul-Christian Bürkner [aut], Topi Paananen [aut], Andrew Gelman [aut], Ben Goodrich [ctb], Juho Piironen [ctb], Bruno Nicenboim [ctb], Leevi Lindgren [ctb], Visruth Srimath Kandali [ctb] Repository: CRAN Date/Publication: 2025-12-23 07:50:02 UTC

&uJlea (Rr|`qp v;12юf~ߣ?p$lMM林Rb6p;DG6ChQ6 jR hZ㢙,U=T HJ n$TDϻeEZ#(d nvVHwq*=X@֜Ō ZaxPL rOkL|K ">uQ"84gܫdiaEZ{;<؎n SТd^ py6T?hX Ae=?%[` 8Ixeq#i[WG"˪fT)Zޝu^\ox_M^$˱[VDFIΌ3,i7Kp%4k2/Kj4"xhbU{(:VLiV]nNy@W|%cS'%(KJu0䁿F.Bzܰa玴-iN5U7 HWNtI: G" gVI},x|Y:$/ q9U^ T@EG$ռxDRB:MF|v:VT5ʡðbc,8'9lJ U!>Tކmn7 9 ߜSq۬ ]Nžk( "c)&`&MbrVDŽhӢH\Er[F_iI;# DNE od-~ocڕj`e V lWêlb@60pwF+)8#Zzuߦo~w͢HGz=H׉n"P#fJY*LdݰtR;{Ř`K"wV £7 H|OK@ P{D:Ma8{LE(:\-BcH'x:<jQ崩GK`z>Vl ;}iXl)(v[˂ș(b֧g R!ZFD!)\O^OPʱ \-}g*~A,?Ԟf]|]wbk7K& LXVu)DЬwW#Q]wq (,bqMep!}T 2*-^?۶CX-wm7,p8a&_|6u JqljR<~zҫ;K;8-ӷ\q"Jyz6$^Mۜ/5&m:{zK,BG$/vi$\]%\N62}d=쥪W7Vw0N JiJԇ3_S aG-Bp|2TY:Ҷe鱝ˍ&V@O*{,U;^G@pCe "'ҧ=i(F?$(R 8=pUU+P-Z^Dh*wú1 [:A@gqF SASFo/zE&w3sNtu|}0;ENGRLdAi5K>\uZ%"dz^3|f .6+ekJRo6j!_:@8pIM}+_Za혮Q ~g~o5y'S0ݰR ) N24y)[RKZok$? cgiPnV;Cp2*vI\6qa C$R8 *p3I/AP`I2"¬ k bfG_#1g&e$`6Lws&0 !+ȑiלC1Qq-'.hnk5b$) 9FP:D y[66Ʊ*ք6aPYt!d##.%u$H2z8Ki2,y4)!;R;Uq;?' @:9A?1%7AϜ|M*+ g.jՖr3Q BBDa4` +eE o ˖2'M*Hh͂颻 DyDOup!\nNoqq$:YZK< ԔrYI>͒K2q(DO$\bB55 r)mXNJ"tN]P92£R>?XYSۺeu4AeJt\WI%sl nWʑ"*XS3'j tpETt yEQW~ۡC*J5v@諄ׯ3ux("l<8ʇѰ#hoxshX&N^#]YN:[O<O!r?!kt^ k szrg/ M<$[X?|?}E;$>8'Zn~Xjr)<3Bm㳕Nʍ#|+uC+iHPhLrK0ǻv_ސǒW괌=1(8kpW<n=.&;F*`F1~0"]/DJ?t=hiXL`8=)!W֮HAgwt=!B'+qaۦU8"sC#gTuSxl(K8bi$AyI=4y713k~eZIa4 inJ*8kyu* \o n"3[LqUՆ* gaРڶ^cfK!w=(V \r5Z^>ȿ GpXyw O618Нԅ'C=Eml^6 gI6 &wQb)<%5]e/I^N{sl!17;0oPg\z>vɿRW?V.O 0;ߪ-%t3kc,ފ kۥ Au!]yH._|P.C>P=t;wXc=B{(PdU^ڟt>rGyq4M78㥲I;;HЍ1yKYtƝc].@aFCfE2ҫB,JO[KHyML)ky J?/m93vG`%N c6/ZQΆ |gz[u,Bd:Xk)FS)EȌ(F'!jjrA,i?DsL`D#A'W$x]66ٖM4rqxφaYV "Anb9BcN5`v}$_`I"tф‹OCS`ODY 5~K_KGZ˃݋Pe Gz=.K!\6Ҏ^7Z;V>lxiR#=az >[pp"Q em=[*$g6'0g6L$"e>>v oVvށpHI5p&pҠ0 XAlсXpW.[D2SÌ{îsFw,şzwQ0'$;He'V. sNgյIe4覘k78ӑc?O(#8;sWZ4įӴ|6?ȝ; z|KW: Au;f5 e>-.&q }]_wҴ&AӂC-0%YRԿ0bYZҜ&PYk{ FI_l4*>,sP`Uk.m:Ym?hLPM',`<߲U)B)>̪`¨U&݅sŠs9jȠ+ygzdŋHWL 2 yB{4(mO:o$^WC =y{'*eFN-5 `*8=U!ԿܠKk=#!ğv,4=lkiZ0sOW)4Ử!qk֗dڸ!!Oa*3T#>Cd?AӅoc{T>\@(n_C4]iw` swϺ?>h?3iEE0xZk?ilKkiOf#oq1ΡD$ᡢi S!eƌ]A9,M`"6,u㲪͆}hr)W(4&m2%5l_5-{P n.+4* SuvxVV GLDW'+lK*,vNwJ~*hU|5+FZw)t_qIZwpTP-fɶ~߷Sf Q?$'OZ{LS8d=ƈqtf&BTAle|]dQ k*D4-K0r![ :p` bTW $'F+/?E]_0V߭D&К=YM™Ie_{Rg{nSK0^v8hK66Q9ש(>%^5^yGyN\R%f¦]{SgJ{A{aui7)dD!JWTDWRAR#=S >Ն߳%كw'>Ώh{%^:Uwhma~f7Ge+|&MGX@[V͓0'!g곩S*\jW$Nuh|rgaLZOtQ~sQ4?- xMT=v52~n1B+:m~NEփ}QQk) PtjA.p©6g"g2MZݚ,'ʴFeR5\dR'eYJ^Iȧ`G ڶ"O?"3ЃM=ljLsGKX=R墳Lxe{r_])eCKud#9h6Xe۟76M`A m宨(8&zG+y1l0.`x>CLUTeW ,"ٺ~Qږ.Oе@=}C7ӝ6Zhx6`uO2@n95Oj\x֣E`:rSg]CVa7\ Q-ْ"|=5 sf[S#*\u:HD/W9 J18MD3P_XڗP.$WZt&ʪBI j}uGGg}J10h9Еjpdjٕ4a]TY/tGWZ ˢ7ߢ)_^z *p:û;6Sc3u[~axW6:d4l[^h\5>㱖my!ǞB#TNCr?fr#&ečYBVه Bhܰ w V*:;(jÀ1”~/6Řy K|'!%tN1>u=Ma GŒ*L`(`dIH]ʼH '0yU,39vZ|A@ѢCQ1#uLOgd0!~!/~ds=@e#~b5 5w>HNH; 0jQT̼!3$nF ~S$f埯4rc醓8g>\i1cɨW%S*!G꜂HбLP0iFΈXCm>U=.b̷WR!z r 0N>KTcm7}D#N>z( ]la ^ I&bhY3^U>rm pTOZvr8qc$u099Ѵv[A\ H5}y@8[BQ\uZ@*/UsΠcA=.o*s4;f~!M֪/\vk {N/f0DŗGo*pZtR y"bAŽMVevGF085vʹ !|s 7S⾿CBqJbe;!4*-[Ňh8aBmnz% U`Oj71⯥Q @O"06x rtYRz$UjX ‚i>6'q@?9OBWRfQKl(C=ʃn E:F+Y~TC"9K Bsun^!-Gp音(Mr(Yƻց/'O$ (慷ٳ߇>zF{)"/h+ ^y|P鏿CuNȲTj@ :mbr Ev]eзilKumeiG>əf[b9F ~)Ӷ%jtc2"i/Mܓ*7 ҟÿG.R; ~1prpEݿK""`B&Vr<~r1館L% f$ gDUakx:W w^)&Fki)r5UG80 /6#NUNdXxo\z 7A2"&' BaX :ɩLTJ}[V|'[7iK<^.ci]IhR -3FI1%z)W*EgWu`c']7uf:\ʟ"uq]u%]uj#`Hn?!M^ᚻ mzM73&F!gYHY:z#]C!Ygar_U7 P/wqMY%`j&z~BJ 2恛KnQWDٲu+_Ixķ)픈uʉQ^L2æhu=~Ty&[_4_pU{L,1Z1 ;?3D|jp޶ƉF?ݺ6T Z[2Lc_%46A* E9AA8MERؒ*>pW84ĀP km1e9_\]EOHm:2}m<Z_N~0h IY2O}K.E3zL`T(ͽdC܈6O}}-{ZiY*ݛ?h0CMuČLz!+0w$EN+o\sLMSaɕ~{ڹ?+Uc68.aHtGddjwR勲hX ^n'O4tc.Gt h-XR&Ei H(#EI;rm1hs Y_DJE-6wF2mb M":˴88u 5zX!1<9?[8GoB}F~NC4y'cc4^z/lE֋ׂ+К}2|%Q*ǁ8H:P&!W`z$ kfUF(S2dovؠì>N@b Njc3uo$nT}z!<#2UPcٙDs{KOLE(X~N΍ZGռ12G;47w>eQE->k;H6s._Î>|{Y!ϑlX!0%#;U^Q743S5/"uXDSF+#lU'T- hFM__-X6`m*Ҡj-_Frm$SWN 5S;MǗ\LXm=aak轿i(x$;!tn7`(Ұ l*]Yt+Y"u'˜q=aԹK`&rذ4`}mXEj6d::G5RcJ#܌@siHWBW3UMd} ]wfQ-~ZP,fv?8Y iS6* s"N%գ|ÍO֗Ĺ܌'U$Uw60HTL_P#wx߾ĒD Y#Š^MK|'@YQMW^3g`Zbs/7Uc)Wn)檄֯0yR9'=-^Z קF*3 w:E B2EQ1fJ4%|rm{u4_9+#t0,z2y8Hj7%JL#@s'KÂ!ݓcN~~fQzs)O,(t淎IKё);!Q. 0/3zUl5A]- wIxpIȀYnRL8L]3.,7: 'e xu$<6_`1a9^ 9& *rEzA8&']8^"+-j.p/nܑ<5/envT}ann4۞202hKy3A0@T` 9"g8PO,t&G?.` ۙlqYDfuhePzME Ǥ4*{0D+Va~bϲ 9/=TnMI/qlRREd'UH~U3`ZTdgeAwbi )]p۠}PYPJW o_0еIֿ Y7fP)yd~oͪm L<lc(|əp#Yb~u񪙛ؗ:h>̀W|L5"+e8~-…Aȓ "]p])/NI']YB}I2-(>ؖ Nyrd@`ڞNDB!"w,}ݦG"(gcJA]O[dH̉T"# ,.b_ΩEMt\.43V%;ED!N,B)eؼi8_6áA]W|@[}*f7OZ LD_dn`//ĚiLm] e҄]sMsR䳭'͌ Dq(g+gG| U&,L%]sWM2U. -b`J궐 }$Vz_GC׽nRcvC`0"AC&RJM)^&]!;)J")8Č\vR z@H֞m gWyDJHq,RL @G;J``X%J2WEQz:Hj(b]u {Fc^k|$$5 i0Ir{ ʓaf} Zs0N"6J_sZCK;"9w:Ϣy#i6όXG1\XR:wf !|y9'p=]5cR?p~&/NPsRp#kޫ;\0xeK;z-)RG *0[3sJ,V'ݱPhlHa~b0(uTyN=6Cٴ(4d`1i)6Ug\f^ߤbmQEGNhQr=LD_9컢!:#̼bw}!0"?8_}w\z ^<Ӝޮ5 \#ZRb&J`F36>_ ^r,q* ÊX4 q?]isch^2 Sɲ8ʎlٲd`KH{'k/E&A`4L:~)w[Nz+XJ)N$Xn®jJ@H,F:ɔĵ;n&\Ci|!9!8`Z߁gAP/qF?2M?]X3x9~!AoH)# ?홷vuTnekM{ujgrULGm*98Q]|$x> d~Lgn+a$v3GH(a̱Ichx\쥏.X!Λ\BP'2ͥo .:ハK -84[ l1d=lC\B'TM+j,y's>8ڑ'c&M39-yX'jt\eg%0YhMHi7cGNS93-¶c9sV˿3Rre~?K3CJ6TJ-al#`&M(S9jS|p|Vpp~0D1e{BN 3dG3úC'Yڃ-l(41(?d3C`n܌Jg2bFt(=u缂/H^" -߻/(o6oB"l2) ;zaeS{@gOEYxh(2^~ю䑂?Qx+0(S; ^pxrfrmŀBGD zĦ$giu>G .T"ͱhZp@|u26L!v D^,~gW `<*tWG$tR"9 :?ano#I_>F.}4t؞kUAH[YA-л=]|$[R%fh^R2?}ؗ \+|h\1ZQn#3Zb3 0vbw4:̨M0 7a5W|Ug"ލUe(\7|>{Qo pɣ[߹|%:MIVfn=0 /r=P}a>U`R Cw7tǹ3*oip0n=NS [,]c<'ReVӁ/A9Ju+$5%4F.>Big6> yD9p!lOҼ"sVA;f=Q*jyA_ / !:#dž橋dKVP?ɍpͨaOdzw2pć T̎r% *k 7ed||7 E kaE~P[ݤ}ODm4 ṟVO 7 *QaOs8So"ԩs5(c@_] U3G릡4EW͕L!txFyPꈝFj5^f\Yܸg)J#Lw| ~GQÕ|e Mtȍ- NI]jri%T]ih}a VW2DSq?&z'HyKw- }\n,K""k*BR٢Y2i-qT@>Pd*Dcv{,Y pQяjc45üoFrҖ%?͊uLz)PW*E!fy88lu 26/@& KQJk gV|`[C5mMϭ2ӡ?Ю0ڢ/c;bv 9)]R6q865]esh>@BŠK(?Ӄ!׆g1wi[n:cw,D ''-y E3c8K5BлDI8Kc),9fr!G WnM,?R|46a1Z/0q IT~ur !8anIL4*rb.B` vo6D3,ˡ&cl4|h6vhI}~XzDYFg@n4EkOwmdre&\&*]\ *{,aXgP|{ܘc>HN> `֚9K`-)drn৐~oܷ4B@qo2]`20DOrD$8άCW^")Ǎ ^d:3& "xȸObYb͗&|9+p.vód)q(HYÅv{w-M`cQJce^i]S~7gWI*ad^-l}(ii%Nz&3UXކhir,Dk7H+g').S1&L~E eSղ"7^MMϽNj/25 LEI0o RUubML(zȿA?'p9{`!٠mfN_ݳ":ޱZ)â\*94<}@h6 ukh杦VJ'lPHdfQ0 Guډ])UKbC򐶑8;P&cXL,|꽞` qxM98<D_ GNjLՙ9٩Ҧͪ03楗>[BhpYN@SaE 5'w 0 V(y102s_YT#} !I^T􇃗` KIW=qB./UT^#$AQS_\PXK>xMfn)R-@HlA4 31d(vkb, #Хg3ʤQA݄'5哓V ,E}~Z8-qw¶R>=NħrurFxY[>Cn{H#Usxy$Nj:Z{o2v2,.79|U~V!8Qb'U{L.. ɖ^ēnq NƟu;m 7TWHT+S~a e3\bQnh[1ո '߾H[^vdiQڲSqKK[e2(T߼zW^2`aO|xe\T\ڌ4fϠsx7V'L٩ch{OM@ٷ1SiBpGW R ,Ϧ| O%aC f5$ ljAs /Ԯ8 $W1|`#egARU?-NwkYR# MbcGj ź(X"6jxYsPpr#@CaJ}o4?FNk5ao=UqںEFdG@ 7G Ax3RI;2@QLaПUC*n!!iVԵmbtټxhR(Sȯ& ƅw!bI?q,i:h-[ncYYv \)DrSC Fm(wy}|gD49ޢڧO,է|f]Ud>0~Տ%c a:qZהgeـ[„ɿ Pˆ,,Q;A&+{(઴N}ȇ% Aً۝3x}'}~m7qϳ[SdL,F}?nHbtc>fV&Ԕ?J ߺ{}W\}#[(+`H@Ճx_ǘ:/8cI-#k~Kh$ RWH?zbq)_ЌA=;P|v,x""m3 (G25ȃqr~w_V~ k@ {b$]tS1}dS,BaDR`ʒ?̵xӥo ~K~h~tYym-3m9X>r F8Z2}X=}w x*Dc8*(d$q8.K7ZJ-%B5rrd^0B (=-u8I%I$zpM }DŒ\w7PӅfpxVe3"J'g:}}1Nx3W6>Jd|̀ %^q;Jc> \yirJ ݘDߚ@#L9N$V|PyGd΄A#'hKrBZUb%( <{;#kUl@jBi[]yn;!D=ВX)*]/zA;:aNvNfu~:tCi1=J0k&+.[3XsT\_=~ii5*?@ƘSy\j+j 8UɦR'ٓS'\c5zPPNDSD?#B)rr+C"qV|`1(Fd%N:σ1 cj5`xOS<$2)54-.8nc7TgbG2'&WEеר3̯}T*+Ty[ * d8صR'D}#Sj݌!*Y |+}GE,vPI yw|6tqöQn4`ީST&K3K42,CC}[X]- 9dP2#_%s hcfU;/O{?ȍjAdr7N}6#fP;^R/]EǎD!t]`vjԁz2oZe7BMV6]~7b0/).:VgkPz sp$CW! y`!tBepcښr\Tm(`w]!` +.1 C%Cqj#bN\+/˙ZyquJO5oؐ!2DB[部GCFFU2#\WYx0@ Ieu郐GATJui|3sx[o_Zs-b-T\袢VOTߐle.e{o9c.d";;,0W=ddC/}sw);8T!P9ڎMAoo~iu]z4.#y*Ծ4RjhivLa6Ni]}p/X:$P7HQ=@/O]nY"Z9ߎ"˪=xb#(b+Q+mv Bs {+`ߦ*-pn18jǷs[$`Bf]IߎJQ[Si\Î`sW %?\IfL~UBlNX:m~{DUd٘,'` нĞ[:F)DKO*ZP}Z[[W vY= 1bkH`|Ccc>6GUBxT ɲH'P^]+fv;=hꁣ _ &U3AgbDquo;7yQ1PBtm0~O 5'Z'}d^MO+n>,fK~9M{c6:Fb*e喇&HQڝ+iˏ09fHd/BVWPwޱJ{2ӎNvRC'ڥ Ag,1L芫 čf7vtW⒯^stCI^[C6`yG`sfFjd\ػqf|d&\b]mbk8~?-ĢT3x q$XxCx{KI5GC*p)hc_L f16-xsZt݄/QD=UI 8Mt Gp ؗgý#*2^^d F"3]k=F/AL [gkAO2~oM_JMSx恴f)a{R)8t!_'rZP5Ic^ne`1'q(MrFp n/ .@Dt'aXu6$w#2"k-/c&[MqO+3ӨPOuiRlt#+IbٹU,Y@Z/%Va $2E bC:re/ߢBa}iMwVԏ#U Cﱺ)|5Ֆ:"W$8]soMEО~ܡɽ:F>ZQ%C-F$A KBI2yf l4"ES_Մ(!JPOFgq7r>(Ё!cz7u(J *?@+\ ! F *J_.spcs7i?{ d{g:FFO_!v򪰼Mrbn9K$DbdFЫ=˙\Vr~d5$nshE! k\ʀ[Xh@R,92~1t;J 3Gu ^aF W۟Fl{Q. YN~8mIxN@{X[z)X+Ar̛^y?;m%K-e8~A26 8|{ޒ@*<S3Q1&q&Vo\ujӭxLu%%NйU!h]fŸ|^&AHI}P}9,*|8M(`&Bfw5źc_u`9sFS<;؟8_ӈĻ,?1[ B*N?R)e6{N>!{K>- *{yMϧ܁H}t:Kl6^heֲ+ȿ_A!~ aB2an k2șO7(,:~iұ[S'r-3ԄHn e_H> w9 .Qҽڏ]q'ZYqR;3<@̗7dYk~XTƩڈX/nhɥ@>4QE;*lWϕaQF1!(\i[fmIc14r9E~kB1k'(t3%a3,p}vU2r~b/o;zQGQO ipFOKJ0XG1s)Rz "PXզ#CS:jm 9`\FKe%;Cwb( c5| x GaC.ádC;iF FdgY"$%n@[<)ɮ/?F\Lp˞bOwpIkkwXP&܀QB&TNh+ƒluy*PRiXm?B@:|%(Ori]p^{v]7ZWyDe{x#U3~-ak"|YRN_Q*5טc7@ /E^Ikg׮-Qgx!'$Bɱ @Z1 4m!+k6iL JX~|7 ,Kzs >vB>l!*@[:i1XrFt2|Ǟն0H~lgHPA׳JRqa{UW|.ɘBar@Q1j)ֈ1击J{KZB!uhcScg _JwWȸ"Ā+E%Ow˶benVbM 4\m%%6[hY[\mZq1\!B'acN*묹y(39&>]HHZdFK>Bpa@ÐڢM}[ OH1ArJ p6GU8i͙bx6Y)eIAAid34CMjR:~A7}QF Iݽ`[NhyT'ohs常_ 8;,<ψ~HJ$ܟ> '1.'7ç?s6I(R8 ݃wMMH $z1[,^J48+9}νpG֤m0|e=/EmZPC.ug\$n|I+5/mP=I\+LHjp]>,y:, vJDfi2Prb-Cf(@". 굷Hb!  mTb # Iz<"\1$[KU}`Vѭ["K mO,pq4bJj2Y-e^;~Jq[ b'z|2{ `YPT-/LS=D]+΄(AtJEvTa'O!%NjYRm26j4%c| ay;hLG&DV @j.3P/P|aMhȃ͝°3=*L06SE5[]'ZQF6Q~R^\ 95Aȟۨ6lL (خTnBVV@Db.K.6 4]ד`kVP'[L?[ ϊ 分>c83Ra/y^TNᾙD={H)`rU5uDH,0t)]r vIoGN q91e>4E g>WĨ ɨ>4YO#5P،*vO\҈[tى? K]Ж'~q..s\nt *7|(х\lj~9:3,zWYDΞ: ~%4I隺ѻc[!0!LibM ^hedePRhf@ Qfe'a$z{97{վ0{HO\~a _͸%m8d7RD)#,ң&z 4($FeK)u~CWoQdeRE axmLXG4PiFl6ʋfR ?*-+TjBb\T…iyAK4e1ᯱ"Jqb x-$@"z=,$anZ7!n8Jڈ-mҌk{Dh@ mnVQB%'6;G&]ESA.` m2z)4Î"JIS<1Hq<S^Ɣ&: Bԝ]6/(ɼ'wNصKINof͞\|1 ZPp >jTA^fم{ղRɠ}rLYc`CN C0_HKdRpWhg ΖS6Tp;!ܬIT-HwKLϹ ߌ@R9y(?6eq1l M*$!/ϿP6Y]jK 㳛NX}'7X }(12u+o}wyȋ^~/>ۇ_#F]ՋT `5ca8SĹР~ΨqUr+e.Me:l"SX(,  e5OLӃP9=|.S@vK.N gOQe䟖ǽl[&\[pC%F6 蹗u - 2k _T8B"$&%>øtaN_%XXәH ~_Fy(ZQq)wݬ. ̠‘GJJU^t5Z<6ela vhK[7n$^Va馯$~9ً7`V"ٲFRL}PgtڬG2ٱ儥n>qo'}R)-GLI13-{;쨲RQ=G@O q.R6\ZƨQ'6ۭ^CG=CH@R^·nuryy3(OԠezn՗Oj j1e$Tp[~/C0[3>jEvK'(OxBKZKQqܾ1д ^N1Ee4X$ ^)~|Z *e灟°Ӄ͟y)"ׁ-F>9V,DM\iAX&GL N\ ZX9u)4DC NX XEcCe'lcI?DD\gHJ?f~Z7ƧKȝ|Aãa$ #&3%w}Cjqz/~lSl(=ΦW$NnDžAM81V = *QITThPI @'D%v#LkA3 m_˘w7x+B83_eᗵB%kd ]J[i$4j26zF|ƹ !›2Z)v>:aNq{\}$ZHP'w/4[LjIU(/Ҕb,P}hRgzOfƃ*ܭ}:Bgtb~%'\Rqex|OCya.M:db0hu_ʍY%WԎ5 =zgٗHW8vZ;d>rj\ExIf`S73z{#+:ހ:a+43k.\٫5 2NCܭ]=|ktȚ:(AEcvs^{EE!D%Rd]+Eȁ He@U MɤKF\d !tHV$c E䎙7Y^s*-fH9ɟש9#,,dD&/xJ)SXDa(p}VW~AhGJ#9IГ)MLM.[]*弙1ؖt'KADŽʸ ~Ńo1cLm(99 q&Sm7Ti!kޟlRݙϴrnb.!˴O:Ƞ$' ~W<ɳ,g͘6?SF(z̔3EXx9`3wi``n~#=B0SōA@k-h̊ znBe:Qs;T= w+ >4^|v|4/8$̿ȃs2M xϤ%%Y3lK~Z Ї7pW8Zg9TuّLѩ yMvqD%5R`@?DK.{"zkHt{x :v;9ptsa6e>t0nRhw#芰P%8T3e/kTd]m?cJ?[x"H9; ́i:cePw|T'?{ gZ9oy5ߎg< 1'%fХwqcG)1Vk骖;GYցV-j䚇L0L@,gej}ÝXRKBE6 SC dY:(J:R}2ݒf%i@=)O3y^uY}Mt&c;o')pd{,D[_6ur6;0Bl,iTm?QɗY`..,<0[dxxt԰9jp?{].^|}' |P&`oJhaWUm#aҊjEBuD@<˳ц7/R*7%Rv(eB3'U]G =Yΰܟѝ4L`&[" C`7`+Q.;1PWd6!Se'F1 ,6J/s+:J޽3:T;ϻQ lƪsAkL $< DOg&B3dLA-t+Q>"szZux*;[DJkM0vt )]NSH"yO1% TSl+;GSRҭubcmG76!dFT3̅Mػ$S/wa:.AQѡpcG$J5 G8Ɇ'?d:y,33ȈG *" ppl 8]! тB-,2sm,䧡0)[,q}`+IЦEde8fDMj$< AǮyz@yͮ_T"]I8-QqTg8"{+?ϗ5A,/uEз3$[&z3 nHN~?bU|SKdI\6&;rJ }'xj%?O }THp!)[yXRm~u?  Z6R[Ck;CBs<6k^!vh^cqPS~,h,)䗖j:"*D7zV6P|(hkG1}Geo%2$Wl491C%HPPNS!Ր3x'Lm7U]0 6R;t,F۶P2 =2tO(iX\Kbn$F%-%VfBj b;[k SY~Hk!DXSoNEDhāDgqgCoyNd uTin(pEhAܖxֆ`_2:l!@hvm޸hTi3TcL?|<_ HRJ$؟(A@֔FoMۼ$H#m@ ȉS+WdDp?h6Qh7y̝yPm *-=Nyc"g5ʥ5fb"VIfU?,OKش{>G ?rΙ1rf yŹ %@DuY % EO{q녆U ReT*Yg֖]4hq[ 7WCac D7y}On9Y8iw4\/b4󋓂W=W8p u(AB7цu>}Ǯ[.ub6 (xfz)||*trzi#%n /"'\G`ZObFltA e&/I" %SL /XewW[Ȗ7<'3 ~EZ6&xm6xK?^$>șx Sd}Jv7zBNۯ e"G1@RcٶӲi] `rdS*Bx< HK9r8|j10-Ib'ZXj⯢0*;tƳԶ̍h]uVZaߣuCov~Q ˴sCR'W~auj 1P-2x]ڈl7=-z)e}䬎|g7忮66uxr#8}䉤t+!.%S9HOR 6JI;''DgƄphݛ:?3<C5w4<-%4>W1 ,!TF4, {lʦ[.R _{TC2'ˠ]@ 2'$DmBu8$8"~]Q`DHsɓk^Ϭj]FBğ!Xm9\½9ӟЪMٷ -ٽV<צvsWcY'U<ģ5Ы <87fWO{Q@E;?l {QJ:aTUoyrNe|SBu#TqN/)FI&`tSPVY:>kC_:_ù/gm?x}cbP͗[ Ļ*pTW Z/X}d76/1 nm{!,FǠo(JwNo4Kz*kׯߓ ey~ tE7Yk59LI'ʿm U20R4 H'HX>NƝ"gS3d?&e9X|Β^tC<=#L-o2sm5z_ޱ2l3x<,%~B?@aOX(Q[$YИzP=<66W+974^E/ ۲hx'ʱ薾|bBѭ~f$zpW(!} Wd隣q ~A4[^+ +bGjQb/Uzbզ(Z(B`HR2[n-Kj` 2Nҩg5f#T`6Nrڕ }GH0ølL )yײǯ ?HgsV']2A?GPajŒMvP9RF_J"y,@`@#H.]+zqJg/} N*ZXq)֣uX>/!a$R #9ԼPu\ ˳#p蟻}3aʌY7M$ʠ.?!)02Q!%]I R@wD: -Rլe  :|ӵ+ uSe|Sk'xALډ$l zw"ݯ~be;4nי"/`W.EǞ jeSt ex{!b;W.ڿE{xEkBiR86vmCnyԣRzí ak w7|1a wMn]To5HX[ m^T $:l!wdAT*;ᑷ[";j又jT 1^h!p4}!Lȣuul 9. LYB G:R}9#~Ǜ*K` Qnb~LAahRܦwQDK0vVDfؼ0 -!NYX ''иIEXznTuWx9mКf4$/Q@O_W7Y2d&1 kOG!>C)1@}K ?#$H`9;P k'U[.) #vPg&HT_l P5p0x9Wƿʠ8v<<]̕hF"DwR~؉x?]ǡ5ԺZuǼg'o(D ?aSIZc5٦޷=}=̙pa?_LSw4 Ŧkթݤ.t*KonpʝM&*(&o6V(|c*>T ɖ+T+St=>ͨM"erȾ|`ۍ]>UÈ,}3)1utռjU# n8k)&!ΆCPHvvL4HI6-m8l%?&{?ȟaaT8c]IB'@RYCK4+C9zI}RdQ|f {%w!5(P딫E$v^񅜮_ONi$.$aa5HXI@_ rHz{Y2A↑zd0A;e懍 bTREL#iܩ= n Fyb3Q1Z QSR򰒔(\:`vBFlrK{zuozj 7qGy8 j. {#_6^Ygm/_rKx-鐄;m j |MS3`b>3sv0D!Jx+s3Xk j?D j|fo.͙ g`\r|JxA0NvSչJAsl`P]xI0d8i½YM+5ALM'$nW/hEB: IRI`-S&–z+NHr|vR9k㺘 ^6"dԹS]L=1SӐsG]A }cCC,/TX BCYOl#fbQ1% Dl׾L?C)K7i0}sN2{˚3ysuiSѾ0Omml':Z"*M7Bf(J!w~.<,Q9>cT"9XE6E]weta]f-FB8D'yT>*?F,ZI?+&1WP|۠YJLf/ 8c M na%= Ic>'r8NCQ>J>V o %{L W^$]ݫ,i}Kߞrk`.:yV|FWZ^ fC/j,O堲I 8-=q2lI{x(0)$2f$í IVOlƋɠS?SE(k51yrG0&@sK((5,-)yىOnգ![<Ҥfy4TvU^2cC6w6Kk/ew?HFw$z3K^ע@s0Hv40HXkږ(6ۧ0EkGh׌@v0@;¯x: gB[tWJ*NW{ܞVAB5F6  nsތ?I72.ʻ%12ez=(Y=Vonzu|cj?d P](J$~ Pvč #4:Wx\7SɄ̖~W!YV#*@ @\۶.\I4!d`7Y5yMWK A?UO6Em%v~SKW黉lZ dדX~ԣىK$In%NA)Xn7D^E(ͷS6=-fݦȌm(uB+UXY DVq'}Dź8xiAVѾm},C%^F1 PNq )h4g8@}m}ŧ |b[fu$<L֧zư~Weȣ Uaa1\tK.H?Zp͒Q=c0ӑk 𚶃u߻4\]XUn n?1d%\(Cɂ߁*}OMbKwu6ż.읎=iz0gfMKAU'^DJ'ƌ*;Ohg:^[;GUnh~WS0ϣψRI43Yl)x.*DGw1U(N }׹X[Xz0 )tS;hJ&"p 6`eC:DxK I4TlƫnQ9q+"N{aEsoLzw y8OVKIK:]N3lGXGwڛgHS5Ğ]ϧ܆}GZ:ͭ>wXۦR3_8Y`! bG.>O.H1I!ȼ>te }v5/폴1ӹ\}1!Ⱦ2_Xs%+ Aj,(,ׄ: iE;a;:E_zmiibU^YL0?(kqɎ?wi=%bvќmؿ`̂{+<:xm&-z 'v2E?RZjA6H]L 0"eSknΛر93=˼[1}WffQibѸehuɻFT!Ƅ&=) 0bIIUva=/U0m=+^脑UZl{MOg8x5-yr bĻ:H3sEf~HHE\Դ<ӊWXNU0WFM!~xDsL 5_bf<+{9oH*c$)/ 5m6gII^Hř&61fW?Kσ. ˟Aǖt@1wQ`@b_ά8Quh<O:e"{*[\ڌ,9"(QG~b`P弛Oe\(k>9}]w0. c\n<x"SÉfNi.6ڪy1Q67(9g*ibf7噓0L:b8 %>޼ǯfTA,*\2?y+?@к\VT3Z}5Jl 7{`/ܡwu0&I?ehxJWȔ": +%). ;Irq+n}^qR;H|>G~{6nd\VPw\W4W8<@RzSI | lݬ]k)-4:¦anfl|Չ cscҏR*\JY#jwVURoa ~ʣY|`@ii+@9+l$p_bݽltb;oRqcB$P(' 9u >,Uw֎lo5fZ69hޯ{8W]u,u7c`ai"DPr.1 |idրnBzt{dfMz8k5)(>)E/_L Uf98wFE+.D ˚񒂯4{DnǹO- Meo~ w0&1у\3MGMEl ݷ,HnUEF~Q nb| $Ij2$˜>SWy"i8W:co tn:5,bA+8ضW1# ;L؂ Exv\]fis$ jGGcs2^@SC\*2z#rzO+0u^:w0B%n%Xf$-aY3(>Иu~4_q]:ߴz m؏o1dD8U>?֗ pjE w|t>z~0#7e'\էDoٓ/O/!-3 ΣpuʖfrDIC^߽[Sm u"B:"cX+.5Il#9xN-S*-df~.͒E0Oc/uASz4GjW}H 3cP4Go?[w07jWBhq;jrNE3Y>˕9S"9U/֚;QзE duA~.Ӫe ] i˚y)0l{h|psxG_jKY#q )|/Ժ4хM|p )SSf&Z$,$Gi i/i-ض]g+B 35B"OҷhoR&3+9W^W"&.< TlgH_D7r:1Z"@KNEa=vl w=/i~w<җ,g*iR~mTO-e;X7H=S kN^JS5ZGাMyOQa<ߠۼ ?$Җ^IVK~W&2٢S?ae ${p|%r#k7ҋ GJL&(rb>maD,Q1kjj9{F|(n}>LmZUAjB.h\ڣYI|=b!4CsJ{xz6 K :)x)@~ +(w97W-S.U]cCр\$qJG=/\IdQ-aja5~r$ \(eX3sOaX<+CZR2GK&fTqt" l\KULs_RI{ } &oBq(a: c^8s %E$~q ˛.u`x6#e(.gzw/pI^AǡF֏:8Cf!W`#:{۷rOKpGK-pu+!ߨFRPIFqc݅72@I 2{.YUP6WMG06;'Ħkd/=VcVN3S_$値IZ=D7an-K4)K>z[=2eP;FJ,+bW_I_/ C*c6_oCr9)ѱ$ Բu׍d!|-6 S!θ9h6L$# (T(&q4^Z[v1ۋ ہZ1"m0Dz G$,0wb?sI1e~EJEƥ3).a%;-"]C02݋/>!E:W:ڕ^c[N$]EJWC$ND{`?/J.'e_<妨{wMvCҤ_=rc9CZx2n!;x;o&UVs& /{ wc $G$ Y<]OhN3dD&0vS!VϾLS#0W~S~❩ CLM}iԗXȬhpF"4v2qqd:Z}t5rܔ W7]P V9aR63+-. G #,uQK`uH5Sxŭ"f##%n35rf`M"% z$"{As.+c}2ZXW{'<МH `fίj:| 8OT]p!܅V%@M50o4Z=c $"0rHBD,'cz D;m~6yY½|MMyvpKo|z*_i/# gzyDB>y=Cabs[!~]\Hr:Rvy$2DW*lF9mi=/i=Un& (}|3QTڂo+)Oј6!Fyi-fB1vuM&p>t5pob7-ˌ~E}ȼ1cG1Pdv{+],A;/Ej;%mxB߼1,A5Z0ܐ+t2n}i`w \[@x%Qxq Z=&˚I@}Zqpl~{lp }fwv"|V,+O:8ɰw߿ѿdR{Vf;X:.C1,h33ud i W&ϠnK_WuES`:B[|n)G7Qڈ =T{:|b%3(qYaSۓ?ЪSz j+%gΔjX;vw+l tP/hU Nqݬ@h>?70y?/f5ܲmZF"=I8b+wSyeJS%M&?r{J jߘU !d̃cM EY7G Hp4L4K݊¿ꃞ]l%~:t4cc svnG#e^yL+h3^ ^w=AHqQGC"Ok|lHd:@x6+\/ݏROcȲčfҢ6+q+ݏ2$k<<>t̗E&Ani7$5u|S~ ótmFAs`Eko]T}B"#8j{ڑѮ nŹ=C~1C<dcq`|n)9k''?^,_ΌEBQeWRfUFXLEЋa)Fr{4֔imCw29T>"1҅:5kd'OXХM أ((lABho4pЌ9$A*-`M3G\Li+Ec>[_-[@\q߈ [ w}7d2ZpdhZI>9ǎ ֲ[m[eoݜ? OGT]nyv;h%LZߌ{ PJߞY^Cx<ɒ S1[! sqցdIc4@d WFr4#\Y}$.h-CW^Ok6칞W"VOcVsg]OLfCC zT1y+9Fr'#[ycp.?!\SMHz!n|9D)ک7QLeHCjQNBw!VD/m61-rEOC.GrpQ!Oͱ]ow? 9{4D<3a-:ϒ}!H]bg}u$&NFj ȭOt.ȔU=$)8aDiɷšp6 jajF ɯHmd;Xlx8m$p2:'$A*5 TF{(!9qֲ؃˧o8\1)c|˃ۤ5P\8msc8x q=p*F-q\X"}6" >1U o߄;U=IQkRoHD女a /eF3Xƶ.dDo<8#wbNPΧ?-[^]NX^XiRiQSɱ:m_sTlS Һv@?{H44$~Is-g/Ț0[zǯ(ϟk}5šˮg jFnU?U|c1ɾ@ |< ʿ]6%ʵ%S̰9"*`vjꅏ*Vm*(^qK g1 0wxE ;z1悃:"$T O-5輭²+s9ʴo+9)crg;G\)i鞦F>qWWlDO P_l2Z爤y''HY4m/u1c y6V$J,h'޺yf0BD4&yBi ~% H:C?'7(u%j.'Oa):e49sRqgƙ#rh7#>kХR?}f|;Jh1qSkX-6`/z }Π mVd/ WCO] Y@kPAyݦ{;d6 d] uc;뜨QD.>6w(NP4OiL`VCLNor5gI 0k{HZSަFぴ?\Fi)@j&BߺL4 ,*XBES]@YydRS2dVQZdUr'JKa^giظì{haQ3DtG? r, DZ[ P+O?~$Dj#5ҠR@p.[$q34z#&Q!H$]jَ&wtmyM`bvě[CwIlG+mֆuV`o~K:s v P_}z}\ko1;ګ^\Sud])q ɿOԝcycR {|DL{-pYw+@5ʌ4]+xP>1Žf|y5dcD?o8IO<;<bDgղϫ{n(o9!O A4 ԉcb0,}dX~$ŕu7.8D Lnab"s`^mquoUHxRwm4 d K,*F[~]2)/{oebL9""`Z3~oA18]~3'_ .B:uheΖ]qPg=~Vm7PF$j46W@WQ( Tm*! St<׉ La w-ދ x!)$y.iM;t l}Y tmZ3֍gRxjF6*z}<:TsIKDE/fl;:#HW}[3E>q㝳p:fdpGF -%3W3u2TWhǑbTY_q#zK$wQ,E;jGRJ堋î45[jСٽj++j" 4E?sX/݈Kz 8bo)DB!hsBU'9.+W+_m{$'U~aOpe_ m/%f%ϫ>ed_d\tcOKX)EW)IF=bLkgWBC#%]1l!58hB z&0c&f8 S)2P3cɯ8poX V(˷ sho Ch?k2~ ~"gC,ί;#*nwe {D=Q2/luo\L2ǣ{%"Wœq+Ke !dI!{fO',%F Jft*zW˜tB+3&E+f"'˨%bݻ1ֶ1 =)VuNӿ㥸t,ʱOU%&Ή|lm3ܰ@ncusv' lz+oY5V&| '3Pt9ʙ6x!k(l!*1tvh$ѝx`p"'`q@_sZ &.iVޠB [1̦ƙw܆ktm3ho0,K(48vg@f=sHM޹$x;Umt_[y2!1!nB`ڷ5,CEMX*ʱ蹫uz+Yˏo%@gB\&yޭ|A_]Xer`$mb9(0wGݳK@%[xKepS#*T/ǹV 7~e%6}8g.g;/] 鋴1v6K;^56R)fIØ-&S cW5OL%tNK;Gӑ'PjMi`>_Nr|>sVM;ãy<6MJ5RDwP8XT|803ΨJ:eOF<7eNn~11QHQmZ˺pex:Z9Yì w9F$ dy'婜58id$9]՗fgU< Hy \ǁœC_8- [Ȥ> I`.1)~$GFՅnF_E]1cܩvv`y f1B.n % T/?8O~AO~Ws'l@#Q65LCtrVbͪM"liF.L`BZVŚ{.mt{]"|jC_M XN6.*5߭ jZn m~a _kJ| Kqht(wFgwJu>8dm(Oab,Gx87|A$D4tEЮ:@i0+#qz9ec,?b^Ęidbf)78fG!Oj<,ﵨ8!)DΩgMpQ' Zy "n9G QnOMağj8 w6X4x S= icJ,;$}00/ >;2MI}Ƴ!4%LYzvd4u_ćvYuˇjD?燕`tB4$R*K)]xA#qUylJ M{Ï>s8=WU%VcG}}GMaL`q%Z*8_L䥫7mеԃBs>,o?|gRP7jP(-|gډXn'l)p zٟ+#eUhJF"2YkwJll Lifhzlm5gK)^1Z)cƮnˮ&[ 0g$V%3{*o7GS=j0ANVۤ1Ca="-IzO#|TWoF:L]-GON2XO'QX%{Ove%:_ox6R9r9^G*|`ܒF=DsCa.ZgO4nr 9b-={q/hXa/eR 3q6Sj.%?S(L1͔:AOSnɓe(/ٹI*Uhȣ˼R',#;H:@e5Pmn6jEJ(!u{钻F2KtE`ZNuRUS(l}i.Д03"A1tӹ$54sV4P_@y67 @pz졏%iքL;e()Qs ЈܧJ$R蔆9bAdmc{ _#>N٘T棢H8_i|zw}Lo'vC=ёlqbsbFp{v`K7'`v5}E75M@9eWKe:bUO0wKKuNA+ǽNd,3O'!${]yO) :#uoŐ_r.jȗ n{~6HS>qπJIe`שz%Emp12<)b*:Z%U Xۂ 2^U`܏G 2߆~-GQ̉bXĈiٔl3"5q$gœ qlAzz<*u̲~]sQ|RP;DO1N]q8g/_knڦ񦇨 /]e2 (CkY >o=ב3\l )ht#uyeV;0`C-{ %$UF^RMz' z QSh?ҒdR~.`K|DF/rTZ}ܱ?4/~ Mة0,t9 ÷_ x 22 >8t& 7d||5k?aIZ bsèM΀RG'0D]"iUDZi,E{ءz'A\guI߳6ZktN~Lj5DhzJҫKNF! zz) @^< ^meڭd1\bJo5@4 V[MTg;U,jYm WCLrA{@M(xxK'6&_TlEW,ɂTɭC ͽ8z`UcCNOV:j=LSl3W;_`d>Νl^}@ܑg,W^.S.'GZe}w <;Rm=hZ"6x(ߐ dbɴV"=+t ӳGt{,CC [wR]!OĚd7ؒO4%$ HPDMUx'{^A5j#!U*F*f=>~l= @T[f ~-bSf`oEAn[ \%V K~-p +ыή% K">݂`\6ps)(?3o{ 0 L[qWl?:Ekj+QkQ`b6%(II0E̻^|ZD-yQv+d1gFSf_|YqZnZ}R>Lw2 6,ð@]?鱺O Gsڴ^1 *_lI(l;Ȏ%Ϧt^`HՇ$|K7(hIg=%#NKƉY~UrAN6vW6}M HPLʾOoc>B[ vy{HYWqVEG|/y-4BiYŎT ,S:-sf\NsDdFK Y["qW%BvGz{foR;dP|/ūMb [%vxUx W(7o@}.1+Ua`v gW7LtgkQggӜ~N3e/ˆt|td'55/RR{xAj#>AIkӭ{<(M%x1Ff3hM0u>"[iٵa2"٘bVŽ,<ΞL ܕTfSeYf͗U@s(,IdH+!wC-؝ ]j7g6r$,E Ma>s nidM$(G?k@7چ HT;NGӛL+[dKh B_!D4: Vغ:Z42-l>=ϙ,ڴZ?׌㿥jQ٨^$+6{[ Y>vPnnQqLQEC AEdgS^BSyAGbRnni%s Hi*CM:3!(@hzVCUj_ZW>u7ʦ%=J?2kXڕF6D;86nw n/U株ձS85m1pտR j l< h6[Y,Od8o)ĆѠJW6F"`AYOsE5}0 'm[g5"r6AHHu2I *ϗ$e\2P 8[j胶kqZ?W\DJ[1 f<1ݗ~t6\F:g,G֑`Ǐ D17 .+]NV'u{R~^T1xUʤ& MelHD\wR'q,Y#G꣓i1hw}a.|ҝ8ӑM:G')MH%z{Ƴ,3}ǟ0k@+,4`d鳙wF9CӋ_p\I9qw}D{VB }Du'e&)nSMۭ=5a5ryIv7зZTes> KeP}PS[hH n/k@s4Fץx˯շ:?E|qoˊ"_[4/2A`MMwJ}^/,2 5G-gstH FC$E6=jޤW̷mv~{sJ['颙2ĕx?sfCdP4IM+X( K{Id*< Ķ+pscy*v2ڶu5w/x,CZI;vd|RCj DQ*Rn IJ!z ˀqLJ-֜$Fi T HΫ8;"1*`WTXMYj>rAdl&^ʻXI(6pdf (sA|ˀIW%੥F!THӜ~r.`ݴcGK;,p\F_;j~ eJz ܊\:'S1*^ V[88FUvaNpbW``sxY^ȇm@@˹$*wW%͠BÎY%9S!83U$MeS1"x:JPsY_4{Ʊ8c4&@&@LI 4t2pQ8fj e,pvj_ əa3 ԓDo w˹Rx "^|ĞgҖ!}#;$,D{~]1f0; ~Ϛa7ăgG}':'9ju@v1E1'v4RӇv[QoQ Ob2L":mҺ. s5X-ey PifU5@̬ +#R7KEd ŢKoa9B'כQA{f!\9^z?e Dآ9&$P(ߟYx,J&<<λ4skSMoUk!DHbPJѮ:.kq/,+hg9B Շ$˚Ɩd:n4(Xyj l^Њ1[=xTC֛Ojh-$?p=vE3% {rkbҏMH~Nz'ݯ0H!./k,ju{5&Ɐٳ|{18$uyN0I#a;duHXx4e`a̽C\' .}VC"_T9z ѴM%[Ec亮"Wxk!$g;rpL !iHCCu}|n%GrqEpIV 5K`*h27m|n4fwr?F/w&z\\f uȤ,qMy7T%9{O Ԛb 5}KO˽%z7 ;4VW"]XpHomNPswc@0SW֎jN/fѽpzpcmRdt1 :& [ߥI~9cGoO?fɩ+=C>#DšڰjCUgt ě}*oYhcܯ^4@LQр`i,An8C]3sUsKGyлe`~>HNp& }V jt(Ђ ] ]"TԘ5TU|%Yh>ra>'t~65I9_?VR T='p`w' 25^]۾W,I4ETfqphE ҢeHW["n0(fCKOQEy- J7/pJ{6\)ݜT%pZ| O No =hf˔?-~'4n٫XGY9IP+wkhSL޶km&`L!cYD'g仆`RDzA;͊R:Id6{ .DIw۩% jض띲 ̺K#q{U $}ÈoXWLл\R(0.e$~.~d.ͻiXHFMd{Ѧr!<(@9ϵD N^-x{DÓ!#u d-brr~VާI6d@ (3ﮐ _TRpc:&L{vo_i=xA1 9*H>~N)ZC=.g(\5$[IhPXO謑4[b{I ƮZ?u9BlhUnh/NҮ# 4N5 * TGh*$f0?6.m?>wE,FhCD^&3w>M}E2]+NbD}L-!N-&&j9Zod[ê1ܶ{>oFqīrkFtQVMs1%&X'O+kke>gWA<ɕpk^#1\|}i zw[6!7)*]s%0*rXg@HV\mC, L6%?2T#>ǫ{g[NJ6/ r=㳃&KC -%V';$KSJtT{ݣk?*!mw\"-tKDd DqhJ)"LQ7TՅh`Y*DLLdڢNԛ|e/օv_j\UL ZLZ]5l_p8j{ީI=(W,7oPXY,YNaXCU|H2pfjgF,({?dW5L?BUYTNoDi 2l}u6PPqc.T2| /ǔ{z)&ѾKS w\E;BSk3qjj>>,pr1V Hd=N40b|0ʯ #29jl/BtU VK+M RXz2fR,HL+ə18Bap;xx\s,5ݍ# L4ѶG[В:<ٌ 3db\ֽE^QOԄ mU1}?TpV}F\b`Dk(}OPNJ1FJ@‰ՠUNR1*QR8CrIZ8yqnK )1_sU?{#7WڂI.gpڦI?`JJaPA1zfߓ(A6}_<4&hePp{r2~I{mrU!^xTHYL}EXjY(TSxu-( _`M(3qNt~VH*V0b&sV ;\…T^|,Hg߆wUy{ڟKc"qvm:훊:zP-jhӿ>KrBL8Z.Da ,̰&g s_N}IqE=#g;85p 5ZvDpJ%;]%7Cr@ty`L8#nnuU2Q{K4^5:փ1},j<40֗ÚsxV3zGOK/~~AH=,ex(w #sxDlmܲ®ㅸC;!MH qfˮŋnO`f#  tr HYb{HajOM'Xl4j- $YͿX(4 PR49p?Y9 a Q_۷=PƂA~ F? p ZѺ?Y 7'CU !Ze7x&lW߈"-mu 9A6&cgŮIFIA4Zwƿ)`v4>m@ž<0Ň+<̊xg^{c8?Zh'qx ='xf%$n8fȁ ,3Kj:ڵ7*1IT۳+3be(TѴe[8Q^JOٰzG׽N3\UbgRhf4-f]e^( b bm@K~vG *Ob&P|EwMBQN䶑=؞4zwU}p(m3*D]#ϩH4W@լoK EGw#FZuW"1']+oҒp}) V::a  ГbU~g.oYǃZk`\~6Pݝu'PX'I:è@Zq]СdrƬ}WC*0av@mkdÓ@GWAs3THb|eۏgV+&T]Dy>9P@ˊ=%[he} Gk7K}!"2;vC/($$ЦSA).Qg,`W-} ` |Bxxw➗RCa]c!-M~ø훺nE#HќGѼ) L O# <^c dњkWkm<\fPG$=@4OyLUUZRnhQ>6z=g.1B1tLD`u̾ڣ}E~gU8]t%4u5_̘uS4°4;eP |H|P20180c&[`3_h^Bϕڦ'*a!᧐/E61 (p[b5ќ@Š1|G Gs|aa{H(l S (l M7t_ɷ7-l&)J7vR["2f,h % #)sJEtRȕgxf=$Xh D*M}eA5FCY%#@&OݤՂu-B3𨀗RaŃizEF*a[x=n@۳M_}U`ŀy3]U$ vH͸ѻfbn_liJPAݿx~Ulu:m6t Gd}†ltl(|oQŲō[ß%=4^! y[G#n:tu>kkЅVU0n9Rh Y)>މQ*s,H%Ш ߉)nă_cǹ|?AϐMyWjv1Ε ~)W2_*&=Qwi`X* j[glda}~-oY\ʸv2oBI!g?SāhѧQHaF] hO|:]4ah{6S/4?9^s0if(Paj?ݾ~Bu9fӠ5#[jNҊ)ɯC<77cxϪ$E:6кFs{S-\ >@C^b;:n] iiώIJj- /R!U$V&Vv"s'?O}`owu%a:u(TGev,\dNx˄}0)(|P*mBe--_V┪ICF52;LUcl*i‹wz3rEs2"٣ՔeMd_yy,/LhB/. gf]9'&!QQ8Ho0vQ"][+5߳`f5do&- ]Yl{1vk`*g_>l'j'vhNv:"q4d7;%AkR/`TyI@BsbprH, q((%VBtg M}Ǟ=6K\>legb RJ715Ө>CV6>. ' 2M^aq -#Gp#w*7F]/ՖIDZoY1'+^NU_dO$m  Q,YLG(%6UFQ,%V?[PڧD DAUR?a7̫”~3ZJ{##@\ixSјTȪW?6?-9CQ{¿?wH޷۠pC3b')XAJtFn+G0&?<odSgtvW$2קsmZI4=;_fa~d|jm;5lnus(0Z&##+KyE{(]q#GV[TO):PJ9 gsCݬVMm^cRoWqle5|=qqrBq.56ƹͧZ'C +ߒH&e^QV0fd]Q@zrau9 Xbx4/Im`@M0qp~>"al,=u=[\A6a6Pj[p2Ki7<$=#5F88*~5Q˄%Ed@BU4~8yC@<ӵA v3V!B/6 \uO8w":3;[oR*N@],gH߱FOJ7ߖצ:)eUrkLɈHI1.IIiiތ#{:$~u4%pU0W:@Y/{3DxE׆Ts< y>/sxa<|#u7*MN34|oC38JV}F['u=^^O8JjQ*Xh_:EН|CLڵpŕZ,M.k&$ jcPw$֖ o'֗y6^&/t%P׍D/)<\.픈Mzpzu`fXTK3#`e"CLb)Rޏ4)֬yÿܥ1+#_i`2 \sҧ'b@ƐRncBޓ$ >`[k? F0y{YpIXص5gJ5dk`80"r4l] %w+=I)H:?cq vP6$RBs9#ʀHf-"% a7pBdhQx &F{Ȁ{֕ᢡ:``͎8[ #8~()Liiex1\?^@>M%cvm #,y^?͕nHWE\v*54Co'-@ԵNvJPkG@HJê ü<_ӪÉ>"sf*yZ!`ѣugFk&'HM8]NPDZuAV7*A""@ˀS^n;pd5 *ƘģŽ,%ݪ[^g<| ok_;}&8&X9`ߪeT,z!6\y`tmZn92L%>G:AV y@U Q2i&k4c&: 될H]rxӢ}޲h"OKA#^< n N޼ '.;gSrEZ2bV$Vp>\UPm^~q(;eKBv+Y`>*rOĴ#J4gK O1J"ձZ2~vYkLZ [w{\m7AѤH{ 6ظ:A5VL,u H} ] ݯ̎K]?E? <:JUȗG!uK8Q5VDcPHʬ-^8瀇P!*BU Gvæ쌞SB4Ͷ73[FO6(aIkB" f<в{U8g@!k!e~[ :#x C<[n2߁o/C2<_9UCjWo4넓l /VԕlU/_1w㿟itHy='F׹R}z]kJɰ"%r@'~SE+xԵ92b%Md%򗲛 v?ND vn3P[Po=. q"GNbc~yZ4Ȃ[LÀ+L}CT8rOad[ ;-}K-ѐaJWI{e*:r-Jj`\VƎHJ-ahE~I&8y1zNBVᮬi,EYIn b)>[I2Ըm$G/7eX)tZ’׭X@Za\C+%Ykz'(njME87yDq1Xӂ ixe͏~ 1a],kuzf1^-P%Cr^M^:U/2GIMRd8RߴHI%U _Rxc~z+(/Š?lcD2=mh2BVcg0xڣ48ܩ@6¾RbI\ L[w⽆3}`9=PhElzTYc,&NTJ` ?6Y'!Ec/Mg.EzK~ |Fʚ9[S6iYj6%0"#w"\^.?'u-[ G>b-7+ԇ20%= wNv!DO; tĊA/ !b&mw]V柗qHp3/܉7|63NJfEk6LT;3JJoҗ[ѴڑQ" cı]#x}IY-%aLŢb(E+Ƿ*e߸[wyI Dv]dל4n/iϳඟӓ9{%6^4jmt> v,8؈1$fF%pLqBۙYlE)no'Wz̓Xc.؃}ӯ{۪]xH۬dȗɶ1ޥ/~4HPb^j.|? L0cjvihMoٜ7lAE< 3=*U#?^qt6Ve*3= ֏' wN)V,غYI`M2uQJ~+'o Y%oʩzJ2i|;7Ȓoޥr0]?$ma!Ӣ @3N?(pF J`"ί4?*/lP!բd8iz"Z+Q"'њv2$rOCGjzּ l۽ >lWY_@ԛ*VGu<޷Ǩs?({Fv߄kʺr;Atێ>XVΞc3{ շz5pbaF'tތPAFz4) Byc#An!r*Ow:t y?9bcLMަ'AHlJBM+"ʺ311AlT>Ap<};v`G:Ee]\`9JU BA ˸hudLT ϟn&c )ćclu^=Rբc$AF=| -|4p,%JPg'jC$\f,MkQ+luMC7e+M4BHNärkdaQZK{qV`nxJoS"F._Jeך=жj2ܬe^խن4%t9{Oro{n+ܼeu;OU,:VuMٹj=r0!{ǏqG/&]n>Ԑ&">:<ϳz6vAi孓'G E;;ED7'~8l=D52M7! ×ӝ~H1r[CM3U2};$_{Rs]f).8 gv=åH:3y#?lUd${7 ~ Jʋ=+iJ+UƇh}g _/nnWbe!D/,d 15p5$Lc*RL}10 Fp#_%۟MM8x8gEj#'h6D0SD1&mh2 x{8}̄RH|b^5x|,[$r48\ҡz8х9+ 뿊2KfVTWAM(=ռxGߔw_<.х;SĖSp"F/Ԙ#OOC5_,(˄Ꞛ|Ί@zhD9xoHD3/™FGWꍐP4E{"(o4o lY%&[=#Kt7~Kj7Q `ݝMk,H5O-$NØczaeo?΀W(֘\_vDIE ŨAdT4ka|O8lٴn[j[=GƀN+٧[S >wC`+@吒pU"X9#y[궨6vb΀&"S~0IŁl m>.NA"2f[{ZE Bdh=BGA4ҭ.icVP"`]P݆g9QQ*%Ctfqk4!JBZGY!aeߔ=QLG]]n 5T4иuq7Kv6)Y4) gέ֦P{Q8)KU _ ,8DKYx\N}<>҈XpP OMSdD+dbܪ`;>R,xLӪR7͊ PR/R_wM kU#8CM5V.W<+jT<3`sybV.MwQRlLxzys>aL`*87~7* ULtkY!?4yh$Q9@gva 0|cl3Gx+'y cWT92G%힒Jj0ui;޾25/&Bάۑ5}R}`?iEl 6Ne"i?+yKqz{]14@qh"6M\Ko{$\ UiSf+} Є\ӑ}ClLSRSS yApL%_bt*ل߳ŕ?.N qz7duOB/aM>R>_8׿AU[@ϗ/&%8yVu!cPVJ-L }O~W˚jpumO(;q9@w̧=?/zcj~u4,F:yKEl#TqŢ6Sw01L\w@~f4;~X[Y8#ʊ.6"h=cָ`CޣƩmiUcO*E+)> n.=6y-H339}E˷,fҘUY gp캾c"53"k:q aǏc{k,iz9.GzF']zY6t4kQ^R2=l @( %h:GL){i 3LtΰMr^GuRGZdic9U ˇu:X0X!^Lʅkᆣ;7ĢKрқ>èd-7fB(=J hr/ |[pU'K%*zLY> /9ZT?3:^n%E6 * ,0n',QѳDW!!1N6 .go _i"~8-`7~ν1ٕC`_a;pۈOʱ3@)nݝn\ԣvoo=ABHv1v֗)HЖt+hB~wwM̛~x(*i2ΘK5N=#wpcOz>Ź qJ_ם)CdDʘ(93f6j HW[Uџ</MZk a@hFOBMo$KBQ[輄s[<'wP$22 H0i0Wi(n"KO'3^,msţE'*?_]qHTOyҸw' WV`~@ ӵt@ho̯W{Y?(5*2 y J ;3ޤOx< lԩ+ d̢9Xz]1̪NT( ߸ jHb(ƎPk2U5(RqW)V6t*/DmP}ʪo^3,W2N'G^D;o JyT/E nr $' (A^sfBJ8vxeZT2;Rwz%3Jv56=rbM H<\˃CQ#ʋ?Nl#sR`[P{7HX pxo&+\{,o'hcS5Y[̒ KP;Kݦ #H&bU,㱀@} eu1$$rJݚ}Xaa2e6&P;.39sPs4'C3}خK1QHlqLa"\wIߺЌGrh(CR.O] όUP>~]ÐPf…’ XƙmBU;<5I0}u ݋& 7辇Ө(gێ:*ȳ(XH癁Z%=bf?k4&VnGvzp Hm IO*M~y oA794t