maxLik/0000755000175100001440000000000015124517754011530 5ustar hornikusersmaxLik/tests/0000755000175100001440000000000014077525067012674 5ustar hornikusersmaxLik/tests/constraints.Rout.save0000644000175100001440000004257514077525067017070 0ustar hornikusers R version 3.5.3 (2019-03-11) -- "Great Truth" Copyright (C) 2019 The R Foundation for Statistical Computing Platform: x86_64-pc-linux-gnu (64-bit) R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for an HTML browser interface to help. Type 'q()' to quit R. > ### Various tests for constrained optimization > ### > options(digits=4) > > ### -------------------- Normal mixture likelihood, no additional parameters -------------------- > ### param = c(rho, mean1, mean2) > ### > ### X = N(mean1) w/Pr rho > ### X = N(mean2) w/Pr 1-rho > ### > logLikMix <- function(param) { + ## a single likelihood value + rho <- param[1] + if(rho < 0 || rho > 1) + return(NA) + mu1 <- param[2] + mu2 <- param[3] + ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) + ll <- sum(ll) + ll + } > > gradLikMix <- function(param) { + rho <- param[1] + if(rho < 0 || rho > 1) + return(NA) + mu1 <- param[2] + mu2 <- param[3] + f1 <- dnorm(x - mu1) + f2 <- dnorm(x - mu2) + L <- rho*f1 + (1 - rho)*f2 + g <- matrix(0, length(x), 3) + g[,1] <- (f1 - f2)/L + g[,2] <- rho*(x - mu1)*f1/L + g[,3] <- (1 - rho)*(x - mu2)*f2/L + colSums(g) + g + } > > hessLikMix <- function(param) { + rho <- param[1] + if(rho < 0 || rho > 1) + return(NA) + mu1 <- param[2] + mu2 <- param[3] + f1 <- dnorm(x - mu1) + f2 <- dnorm(x - mu2) + L <- rho*f1 + (1 - rho)*f2 + dldrho <- (f1 - f2)/L + dldmu1 <- rho*(x - mu1)*f1/L + dldmu2 <- (1 - rho)*(x - mu2)*f2/L + h <- matrix(0, 3, 3) + h[1,1] <- -sum(dldrho*(f1 - f2)/L) + h[2,1] <- h[1,2] <- sum((x - mu1)*f1/L - dldmu1*dldrho) + h[3,1] <- h[1,3] <- sum(-(x - mu2)*f2/L - dldmu2*dldrho) + h[2,2] <- sum(rho*(-f1 + (x - mu1)^2*f1)/L - dldmu1^2) + h[2,3] <- h[3,2] <- -sum(dldmu1*dldmu2) + h[3,3] <- sum((1 - rho)*(-f2 + (x - mu2)^2*f2)/L - dldmu2^2) + h + } > > logLikMixInd <- function(param) { + ## individual obs-wise likelihood values + rho <- param[1] + if(rho < 0 || rho > 1) + return(NA) + mu1 <- param[2] + mu2 <- param[3] + ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) + ll <- sum(ll) + ll + } > > gradLikMixInd <- function(param) { + rho <- param[1] + if(rho < 0 || rho > 1) + return(NA) + mu1 <- param[2] + mu2 <- param[3] + f1 <- dnorm(x - mu1) + f2 <- dnorm(x - mu2) + L <- rho*f1 + (1 - rho)*f2 + g <- matrix(0, length(x), 3) + g[,1] <- (f1 - f2)/L + g[,2] <- rho*(x - mu1)*f1/L + g[,3] <- (1 - rho)*(x - mu2)*f2/L + colSums(g) + g + } > > ### -------------------------- > library(maxLik) Loading required package: miscTools Please cite the 'maxLik' package as: Henningsen, Arne and Toomet, Ott (2011). maxLik: A package for maximum likelihood estimation in R. Computational Statistics 26(3), 443-458. DOI 10.1007/s00180-010-0217-1. If you have questions, suggestions, or comments regarding the 'maxLik' package, please use a forum or 'tracker' at maxLik's R-Forge site: https://r-forge.r-project.org/projects/maxlik/ > ## mixed normal > set.seed(1) > N <- 100 > x <- c(rnorm(N, mean=-1), rnorm(N, mean=1)) > > ## ---------- INEQUALITY CONSTRAINTS ----------- > ## First test inequality constraints, numeric/analytical gradients > ## Inequality constraints: rho < 0.5, mu1 < -0.1, mu2 > 0.1 > A <- matrix(c(-1, 0, 0, + 0, -1, 0, + 0, 0, 1), 3, 3, byrow=TRUE) > B <- c(0.5, 0.1, 0.1) > start <- c(0.4, 0, 0.9) > ineqCon <- list(ineqA=A, ineqB=B) > ## analytic gradient > cat("Inequality constraints, analytic gradient & Hessian\n") Inequality constraints, analytic gradient & Hessian > a <- maxLik(logLikMix, grad=gradLikMix, hess=hessLikMix, + start=start, + constraints=ineqCon) > all.equal(coef(a), c(0.5, -1, 1), tolerance=0.1) [1] "Mean relative difference: 0.1624" > # TRUE: relative tolerance 0.045 > ## No analytic gradient > cat("Inequality constraints, numeric gradient & Hessian\n") Inequality constraints, numeric gradient & Hessian > a <- maxLik(logLikMix, + start=start, + constraints=ineqCon) > all.equal(coef(a), c(0.5, -1, 1), tolerance=0.1) [1] "Mean relative difference: 0.2547" > # should be close to the true values, but N is too small > ## NR method with inequality constraints > try( maxLik(logLikMix, start = start, constraints = ineqCon, method = "NR" ) ) Error in maxRoutine(fn = logLik, grad = grad, hess = hess, start = start, : Inequality constraints not implemented for maxNR > # Error in maxRoutine(fn = logLik, grad = grad, hess = hess, start = start, : > # Inequality constraints not implemented for maxNR > > ## BHHH method with inequality constraints > try( maxLik(logLikMix, start = start, constraints = ineqCon, method = "BHHH" ) ) Error in maxNR(fn = fn, grad = grad, hess = hess, start = start, finalHessian = finalHessian, : Inequality constraints not implemented for maxNR > # Error in maxNR(fn = fn, grad = grad, hess = hess, start = start, finalHessian = finalHessian, : > # Inequality constraints not implemented for maxNR > > ## ---------- EQUALITY CONSTRAINTS ----------------- > cat("Test for equality constraints mu1 + 2*mu2 = 0\n") Test for equality constraints mu1 + 2*mu2 = 0 > A <- matrix(c(0, 1, 2), 1, 3) > B <- 0 > eqCon <- list( eqA = A, eqB = B ) > ## default, numeric gradient > mlEq <- maxLik(logLikMix, start = start, constraints = eqCon, tol=0) > # only rely on gradient stopping condition > all.equal(coef(mlEq), c(0.33, -1.45, 0.72), tolerance=0.01, scale=1) [1] "Mean absolute difference: 0.1777" > ## default, individual likelihood > mlEqInd <- maxLik(logLikMixInd, start = start, constraints = eqCon, tol=0) > # only rely on gradient stopping condition > all.equal(coef(mlEq), coef(mlEqInd), tol=1e-4) [1] TRUE > ## default, analytic gradient > mlEqG <- maxLik(logLikMix, grad=gradLikMix, + start = start, constraints = eqCon ) > all.equal(coef(mlEq), coef(mlEqG), tolerance=1e-4) [1] TRUE > ## default, analytic gradient, individual likelihood > mlEqGInd <- maxLik(logLikMixInd, grad=gradLikMixInd, + start = start, constraints = eqCon ) > all.equal(coef(mlEqG), coef(mlEqGInd), tolerance=1e-4) [1] TRUE > ## default, analytic Hessian > mlEqH <- maxLik(logLikMix, grad=gradLikMix, hess=hessLikMix, + start=start, + constraints=eqCon) > all.equal(coef(mlEqG), coef(mlEqH), tolerance=1e-4) [1] TRUE > > > ## BFGS, numeric gradient > eqBFGS <- maxLik(logLikMix, + start=start, method="bfgs", + constraints=eqCon, + SUMTRho0=1) > all.equal(coef(eqBFGS), c(0.33, -1.45, 0.72), tolerance=0.01, scale=1) [1] "Mean absolute difference: 0.1777" > > ## BHHH, analytic gradient (numeric does not converge?) > eqBHHH <- maxLik(logLikMix, gradLikMix, + start=start, method="bhhh", + constraints=eqCon, + SUMTRho0=1) > all.equal(coef(eqBFGS), coef(eqBHHH), tol=1e-4) [1] "Mean relative difference: 0.003536" > > > ### ------------------ Now test additional parameters for the function ---- > ### similar mixture as above but rho is give as an extra parameter > ### > logLikMix2 <- function(param, rho) { + mu1 <- param[1] + mu2 <- param[2] + ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) + # ll <- sum(ll) + ll + } > > gradLikMix2 <- function(param, rho) { + mu1 <- param[1] + mu2 <- param[2] + f1 <- dnorm(x - mu1) + f2 <- dnorm(x - mu2) + L <- rho*f1 + (1 - rho)*f2 + g <- matrix(0, length(x), 2) + g[,1] <- rho*(x - mu1)*f1/L + g[,2] <- (1 - rho)*(x - mu2)*f2/L + # colSums(g) + g + } > > hessLikMix2 <- function(param, rho) { + mu1 <- param[1] + mu2 <- param[2] + f1 <- dnorm(x - mu1) + f2 <- dnorm(x - mu2) + L <- rho*f1 + (1 - rho)*f2 + dldrho <- (f1 - f2)/L + dldmu1 <- rho*(x - mu1)*f1/L + dldmu2 <- (1 - rho)*(x - mu2)*f2/L + h <- matrix(0, 2, 2) + h[1,1] <- sum(rho*(-f1 + (x - mu1)^2*f1)/L - dldmu1^2) + h[1,2] <- h[2,1] <- -sum(dldmu1*dldmu2) + h[2,2] <- sum((1 - rho)*(-f2 + (x - mu2)^2*f2)/L - dldmu2^2) + h + } > > ## ---------- Equality constraints & extra parameters ------------ > A <- matrix(c(1, 2), 1, 2) > B <- 0 > start <- c(0, 1) > ## We run only a few iterations as we want to test correct handling > ## of parameters, not the final value. We also avoid any > ## debug information > iterlim <- 3 > cat("Test for extra parameters for the function\n") Test for extra parameters for the function > ## NR, numeric gradient > cat("Newton-Raphson, numeric gradient\n") Newton-Raphson, numeric gradient > a <- maxLik(logLikMix2, + start=start, method="nr", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3619" > ## NR, numeric hessian > a <- maxLik(logLikMix2, gradLikMix2, + start=start, method="nr", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3619" > ## nr, analytic hessian > a <- maxLik(logLikMix2, gradLikMix2, hessLikMix2, + start=start, method="nr", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3619" > ## BHHH > cat("BHHH, analytic gradient, numeric Hessian\n") BHHH, analytic gradient, numeric Hessian > a <- maxLik(logLikMix2, gradLikMix2, + start=start, method="bhhh", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3512" > ## BHHH, analytic > a <- maxLik(logLikMix2, gradLikMix2, + start=start, method="bhhh", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3512" > ## bfgs, no analytic gradient > a <- maxLik(logLikMix2, + start=start, method="bfgs", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3381" > ## bfgs, analytic gradient > a <- maxLik(logLikMix2, + start=start, method="bfgs", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3381" > ## SANN, analytic gradient > a <- maxLik(logLikMix2, gradLikMix2, + start=start, method="SANN", + constraints=list(eqA=A, eqB=B), + iterlim=iterlim, SUMTRho0=1, rho=0.5) Warning message: In (function (fn, grad = NULL, hess = NULL, start, maxRoutine, constraints, : problem in imposing equality constraints: the constraints are not satisfied (barrier value = 0.00173566161904632). Try setting 'SUMTTol' to 0 > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.2285" > ## NM, numeric > a <- maxLik(logLikMix2, + start=start, method="nm", + constraints=list(eqA=A, eqB=B), + iterlim=100, + # use more iters for NM + SUMTRho0=1, rho=0.5) > all.equal(coef(a), c(-1.36, 0.68), tol=0.01) [1] "Mean relative difference: 0.3621" > > ## -------------------- NR, multiple constraints -------------------- > f <- function(theta) exp(-theta %*% theta) > # test quadratic function > ## constraints: > ## theta1 + theta3 = 1 > ## theta1 + theta2 = 1 > A <- matrix(c(1, 0, 1, + 1, 1, 0), 2, 3, byrow=TRUE) > B <- c(-1, -1) > cat("NR, multiple constraints\n") NR, multiple constraints > a <- maxNR(f, start=c(1,1.1,2), constraints=list(eqA=A, eqB=B)) > theta <- coef(a) > all.equal(c(theta[1] + theta[3], theta[1] + theta[2]), c(1,1), tolerance=1e-4) [1] TRUE > ## Error handling for equality constraints > A <- matrix(c(1, 1), 1, 2) > B <- -1 > cat("Error handling: ncol(A) != lengths(start)\n") Error handling: ncol(A) != lengths(start) > try(a <- maxNR(f, start=c(1, 2, 3), constraints=list(eqA=A, eqB=B))) Error in sumt(fn = function (theta) : Equality constraint matrix A must have the same number of columns as the parameter length (currently 2 and 3) > # ncol(A) != length(start) > A <- matrix(c(1, 1), 1, 2) > B <- c(-1, 2) > try(a <- maxNR(f, start=c(1, 2), constraints=list(eqA=A, eqB=B))) Error in sumt(fn = function (theta) : Equality constraint matrix A must have the same number of rows as the matrix B (currently 1 and 2) > # nrow(A) != nrow(B) > ## > ## -------------- inequality constraints & extra paramters ---------------- > ## > ## mu1 < 1 > ## mu2 > -1 > A <- matrix(c(-1, 0, + 0, 1), 2,2, byrow=TRUE) > B <- c(1,1) > start <- c(0.8, 0.9) > ## > inEGrad <- maxLik(logLikMix2, gradLikMix2, + start=start, method="bfgs", + constraints=list(ineqA=A, ineqB=B), + rho=0.5) > all.equal(coef(inEGrad), c(-0.98, 1.12), tol=0.01) [1] "Mean relative difference: 0.2716" > ## > inE <- maxLik(logLikMix2, + start=start, method="bfgs", + constraints=list(ineqA=A, ineqB=B), + rho=0.5) > all.equal(coef(inEGrad), coef(inE), tol=1e-4) [1] TRUE > ## > inENM <- maxLik(logLikMix2, gradLikMix2, + start=start, method="nm", + constraints=list(ineqA=A, ineqB=B), + rho=0.5) > all.equal(coef(inEGrad), coef(inENM), tol=1e-3) [1] TRUE > # this is further off than gradient-based methods > ## ---------- test vector B for inequality -------------- > ## mu1 < 1 > ## mu2 > 2 > A <- matrix(c(-1, 0, + 0, 1), 2,2, byrow=TRUE) > B1 <- c(1,-2) > a <- maxLik(logLikMix2, gradLikMix2, + start=c(0.5, 2.5), method="bfgs", + constraints=list(ineqA=A, ineqB=B1), + rho=0.5) > theta <- coef(a) > all.equal(c(theta[1] < 1, theta[2] > 2), c(TRUE, TRUE)) [1] TRUE > # components should be larger than > # (-1, -2) > > ## > ## ---- ERROR HANDLING: insert wrong A and B forms ---- > ## > A2 <- c(-1, 0, 0, 1) > try(maxLik(logLikMix2, gradLikMix2, + start=start, method="bfgs", + constraints=list(ineqA=A2, ineqB=B), + print.level=1, rho=0.5) + ) Error in maxOptim(fn = fn, grad = grad, hess = hess, start = start, method = "BFGS", : Inequality constraint A must be a matrix Current dimension > # should explain that matrix needed > A2 <- matrix(c(-1, 0, 0, 1), 1, 4) > try(maxLik(logLikMix2, gradLikMix2, + start=start, method="bfgs", + constraints=list(ineqA=A2, ineqB=B), + print.level=1, rho=0.5) + ) Error in maxOptim(fn = fn, grad = grad, hess = hess, start = start, method = "BFGS", : Inequality constraint A must have the same number of columns as length of the parameter. Currently 4 and 2. > # should explain that wrong matrix > # dimension > B2 <- 1:3 > try(maxLik(logLikMix2, gradLikMix2, + start=start, method="bfgs", + constraints=list(ineqA=A, ineqB=B2), + print.level=1, rho=0.5) + ) Error in maxOptim(fn = fn, grad = grad, hess = hess, start = start, method = "BFGS", : Inequality constraints A and B suggest different number of constraints: 2 and 3 > # A & B do not match > cat("A & B do not match\n") A & B do not match > B2 <- matrix(1,2,2) > try(maxLik(logLikMix2, gradLikMix2, + start=start, method="bfgs", + constraints=list(ineqA=A, ineqB=B2), + print.level=1, rho=0.5) + ) Error in maxOptim(fn = fn, grad = grad, hess = hess, start = start, method = "BFGS", : Inequality constraint B must be a vector (or Nx1 matrix). Currently 2 columns > # B must be a vector > > ## ---- fixed parameters with constrained optimization ----- > ## Thanks to Bob Loos for finding this error. > ## Optimize 3D hat with one parameter fixed (== 2D hat). > ## Add an equality constraint on that > cat("Constraints + fixed parameters\n") Constraints + fixed parameters > hat3 <- function(param) { + ## Hat function. Hessian negative definite if sqrt(x^2 + y^2) < 0.5 + x <- param[1] + y <- param[2] + z <- param[3] + exp(-x^2-y^2-z^2) + } > sv <- c(1,1,1) > ## constraints: x + y + z >= 2.5 > A <- matrix(c(x=1,y=1,z=1), 1, 3) > B <- -2.5 > constraints <- list(ineqA=A, ineqB=B) > res <- maxBFGS(hat3, start=sv, constraints=constraints, fixed=3, + iterlim=3) > all.equal(coef(res), c(0.770, 0.770, 1), tol=0.01) [1] TRUE > > proc.time() user system elapsed 1.676 0.329 1.571 maxLik/tests/finalHessian.Rout.save0000644000175100001440000002665314077525067017124 0ustar hornikusers R version 4.0.3 (2020-10-10) -- "Bunny-Wunnies Freak Out" Copyright (C) 2020 The R Foundation for Statistical Computing Platform: x86_64-pc-linux-gnu (64-bit) R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for an HTML browser interface to help. Type 'q()' to quit R. > ### Test the 'finalHessian' argument of optimization routines > > library(maxLik) Loading required package: miscTools Please cite the 'maxLik' package as: Henningsen, Arne and Toomet, Ott (2011). maxLik: A package for maximum likelihood estimation in R. Computational Statistics 26(3), 443-458. DOI 10.1007/s00180-010-0217-1. If you have questions, suggestions, or comments regarding the 'maxLik' package, please use a forum or 'tracker' at maxLik's R-Forge site: https://r-forge.r-project.org/projects/maxlik/ > set.seed( 4 ) > > # log-likelihood function, gradient, and Hessian for 1-parameter case (exponential distribution) > ll1i <- function(theta) { + if(!all(theta > 0)) + return(NA) + log(theta) - theta*t + } > ll1 <- function(theta) sum( log(theta) - theta*t ) > gr1i <- function(theta) 1/theta - t > gr1 <- function(theta) sum( 1/theta - t ) > hs1 <- function(theta) -100/theta^2 > t <- rexp( 100, 2 ) > > ## the same functions for 2-variable case (normal distribution) > ll2 <- function( param ) { + ## log likelihood function + mu <- param[ 1 ] + sigma <- param[ 2 ] + if(!(sigma > 0)) + return(NA) + # to avoid warnings in the output + N <- length( x ) + llValue <- -0.5 * N * log( 2 * pi ) - N * log( sigma ) - + 0.5 * sum( ( x - mu )^2 / sigma^2 ) + return( llValue ) + } > > ## log likelihood function (individual observations) > ll2i <- function( param ) { + mu <- param[ 1 ] + sigma <- param[ 2 ] + if(!(sigma > 0)) + return(NA) + # to avoid warnings in the output + llValues <- -0.5 * log( 2 * pi ) - log( sigma ) - + 0.5 * ( x - mu )^2 / sigma^2 + return( llValues ) + } > > gr2 <- function( param ) { + ## function to calculate analytical gradients + mu <- param[ 1 ] + sigma <- param[ 2 ] + N <- length( x ) + llGrad <- c( sum( ( x - mu ) / sigma^2 ), + - N / sigma + sum( ( x - mu )^2 / sigma^3 ) ) + return( llGrad ) + } > > ## function to calculate analytical gradients (individual observations) > gr2i <- function( param ) { + mu <- param[ 1 ] + sigma <- param[ 2 ] + llGrads <- cbind( ( x - mu ) / sigma^2, + - 1 / sigma + ( x - mu )^2 / sigma^3 ) + return( llGrads ) + } > > ## function to calculate analytical Hessians > hs2 <- function( param ) { + mu <- param[ 1 ] + sigma <- param[ 2 ] + N <- length( x ) + llHess <- matrix( c( + N * ( - 1 / sigma^2 ), + sum( - 2 * ( x - mu ) / sigma^3 ), + sum( - 2 * ( x - mu ) / sigma^3 ), + N / sigma^2 + sum( - 3 * ( x - mu )^2 / sigma^4 ) ), + nrow = 2, ncol = 2 ) + return( llHess ) + } > x <- rnorm(100, 1, 2) > > > ## NR > # Estimate with only function values (single parameter) > a <- maxLik( ll1i, gr1i, start = 1, method = "NR" ) > summary(a ) -------------------------------------------- Maximum Likelihood estimation Newton-Raphson maximisation, 5 iterations Return code 1: gradient close to zero (gradtol) Log-Likelihood: -25.05386 1 free parameters Estimates: Estimate Std. error t value Pr(> t) [1,] 2.1159 0.2116 10 <2e-16 *** --- Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 -------------------------------------------- > b <- maxLik( ll1i, gr1i, start = 1, method = "NR", finalHessian="bhhh") > # should issue a warning as BHHH not possible > summary(b ) -------------------------------------------- Maximum Likelihood estimation Newton-Raphson maximisation, 5 iterations Return code 1: gradient close to zero (gradtol) Log-Likelihood: -25.05386 1 free parameters Estimates: Estimate Std. error t value Pr(> t) [1,] 2.1159 0.2145 9.863 <2e-16 *** --- Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 -------------------------------------------- > c <- maxLik( ll1i, gr1i, start = 1, method = "NR", finalHessian=FALSE) > summary(c) -------------------------------------------- Maximum Likelihood estimation Newton-Raphson maximisation, 5 iterations Return code 1: gradient close to zero (gradtol) Log-Likelihood: -25.05386 1 free parameters Estimates: Estimate t value Pr(> t) [1,] 2.116 NA NA -------------------------------------------- > ## (vector parameter) > a <- maxLik( ll2, gr2, start = c(0,1), method = "NR" ) > summary(a ) -------------------------------------------- Maximum Likelihood estimation Newton-Raphson maximisation, 7 iterations Return code 1: gradient close to zero (gradtol) Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate Std. error t value Pr(> t) [1,] 0.8532 0.2031 4.201 2.66e-05 *** [2,] 2.0311 0.1436 14.142 < 2e-16 *** --- Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 -------------------------------------------- > b <- maxLik( ll2, gr2, start = c(0,1), method = "NR", finalHessian="bhhh") Warning message: In maxNRCompute(fn = function (theta, fnOrig, gradOrig = NULL, hessOrig = NULL, : For computing the final Hessian by 'BHHH' method, the log-likelihood or gradient must be supplied by observations > # should issue a warning as BHHH not possible > summary(b ) -------------------------------------------- Maximum Likelihood estimation Newton-Raphson maximisation, 7 iterations Return code 1: gradient close to zero (gradtol) Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate t value Pr(> t) [1,] 0.8532 NA NA [2,] 2.0311 NA NA -------------------------------------------- > c <- maxLik( ll2, gr2, start = c(0,1), method = "NR", finalHessian=FALSE) > summary(c) -------------------------------------------- Maximum Likelihood estimation Newton-Raphson maximisation, 7 iterations Return code 1: gradient close to zero (gradtol) Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate t value Pr(> t) [1,] 0.8532 NA NA [2,] 2.0311 NA NA -------------------------------------------- > > ## BFGSR > # Estimate with only function values (single parameter) > a <- maxLik( ll1i, gr1i, start = 1, method = "BFGSR" ) > summary(a ) -------------------------------------------- Maximum Likelihood estimation BFGSR maximization, 26 iterations Return code 2: successive function values within tolerance limit (tol) Log-Likelihood: -25.05386 1 free parameters Estimates: Estimate Std. error t value Pr(> t) [1,] 2.1159 0.2116 10 <2e-16 *** --- Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 -------------------------------------------- > b <- maxLik( ll1i, gr1i, start = 1, method = "BFGSR", finalHessian="bhhh") > # should issue a warning as BHHH not possible > summary(b ) -------------------------------------------- Maximum Likelihood estimation BFGSR maximization, 26 iterations Return code 2: successive function values within tolerance limit (tol) Log-Likelihood: -25.05386 1 free parameters Estimates: Estimate Std. error t value Pr(> t) [1,] 2.1159 0.2145 9.863 <2e-16 *** --- Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 -------------------------------------------- > c <- maxLik( ll1i, gr1i, start = 1, method = "BFGSR", finalHessian=FALSE) > summary(c) -------------------------------------------- Maximum Likelihood estimation BFGSR maximization, 26 iterations Return code 2: successive function values within tolerance limit (tol) Log-Likelihood: -25.05386 1 free parameters Estimates: Estimate t value Pr(> t) [1,] 2.116 NA NA -------------------------------------------- > # Estimate with only function values (vector parameter) > a <- maxLik( ll2, gr2, start = c(0,1), method = "BFGSR" ) > summary(a ) -------------------------------------------- Maximum Likelihood estimation BFGSR maximization, 22 iterations Return code 2: successive function values within tolerance limit (tol) Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate Std. error t value Pr(> t) [1,] 0.8528 0.2031 4.199 2.68e-05 *** [2,] 2.0309 0.1436 14.144 < 2e-16 *** --- Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 -------------------------------------------- > b <- maxLik( ll2, gr2, start = c(0,1), method = "BFGSR", finalHessian="bhhh") Warning message: In maxBFGSRCompute(fn = function (theta, fnOrig, gradOrig = NULL, : For computing the final Hessian by 'BHHH' method, the log-likelihood or gradient must be supplied by observations > # should issue a warning as BHHH not possible > summary(b ) -------------------------------------------- Maximum Likelihood estimation BFGSR maximization, 22 iterations Return code 2: successive function values within tolerance limit (tol) Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate t value Pr(> t) [1,] 0.8528 NA NA [2,] 2.0309 NA NA -------------------------------------------- > c <- maxLik( ll2, gr2, start = c(0,1), method = "BFGSR", finalHessian=FALSE) > summary(c) -------------------------------------------- Maximum Likelihood estimation BFGSR maximization, 22 iterations Return code 2: successive function values within tolerance limit (tol) Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate t value Pr(> t) [1,] 0.8528 NA NA [2,] 2.0309 NA NA -------------------------------------------- > > > ### Nelder-Mead > ## Individual observations only > b <- maxLik( ll2i, start = c(0,1), method = "NM", finalHessian="bhhh") > summary(b) -------------------------------------------- Maximum Likelihood estimation Nelder-Mead maximization, 63 iterations Return code 0: successful convergence Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate Std. error t value Pr(> t) [1,] 0.8530 0.2032 4.199 2.69e-05 *** [2,] 2.0312 0.1670 12.163 < 2e-16 *** --- Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 -------------------------------------------- > ## Individual observations, summed gradient > b <- maxLik( ll2i, gr2, start = c(0,1), method = "NM", finalHessian="bhhh") Warning message: In maxOptim(fn = fn, grad = grad, hess = hess, start = start, method = "Nelder-Mead", : For computing the final Hessian by 'BHHH' method, the log-likelihood or gradient must be supplied by observations > # should issue a warning as BHHH not selected > # (yes, could do it based on individual likelihood and numeric gradient) > summary(b) -------------------------------------------- Maximum Likelihood estimation Nelder-Mead maximization, 63 iterations Return code 0: successful convergence Log-Likelihood: -212.7524 2 free parameters Estimates: Estimate t value Pr(> t) [1,] 0.853 NA NA [2,] 2.031 NA NA -------------------------------------------- > > proc.time() user system elapsed 0.436 0.020 0.447 maxLik/tests/tinytest.R0000644000175100001440000000015614077525067014704 0ustar hornikusers### Run tinytest tests if(requireNamespace("tinytest", quietly=TRUE)) { tinytest::test_package("maxLik") } maxLik/tests/finalHessian.R0000644000175100001440000000772014077525067015431 0ustar hornikusers### Test the 'finalHessian' argument of optimization routines library(maxLik) set.seed( 4 ) # log-likelihood function, gradient, and Hessian for 1-parameter case (exponential distribution) ll1i <- function(theta) { if(!all(theta > 0)) return(NA) log(theta) - theta*t } ll1 <- function(theta) sum( log(theta) - theta*t ) gr1i <- function(theta) 1/theta - t gr1 <- function(theta) sum( 1/theta - t ) hs1 <- function(theta) -100/theta^2 t <- rexp( 100, 2 ) ## the same functions for 2-variable case (normal distribution) ll2 <- function( param ) { ## log likelihood function mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output N <- length( x ) llValue <- -0.5 * N * log( 2 * pi ) - N * log( sigma ) - 0.5 * sum( ( x - mu )^2 / sigma^2 ) return( llValue ) } ## log likelihood function (individual observations) ll2i <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output llValues <- -0.5 * log( 2 * pi ) - log( sigma ) - 0.5 * ( x - mu )^2 / sigma^2 return( llValues ) } gr2 <- function( param ) { ## function to calculate analytical gradients mu <- param[ 1 ] sigma <- param[ 2 ] N <- length( x ) llGrad <- c( sum( ( x - mu ) / sigma^2 ), - N / sigma + sum( ( x - mu )^2 / sigma^3 ) ) return( llGrad ) } ## function to calculate analytical gradients (individual observations) gr2i <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] llGrads <- cbind( ( x - mu ) / sigma^2, - 1 / sigma + ( x - mu )^2 / sigma^3 ) return( llGrads ) } ## function to calculate analytical Hessians hs2 <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] N <- length( x ) llHess <- matrix( c( N * ( - 1 / sigma^2 ), sum( - 2 * ( x - mu ) / sigma^3 ), sum( - 2 * ( x - mu ) / sigma^3 ), N / sigma^2 + sum( - 3 * ( x - mu )^2 / sigma^4 ) ), nrow = 2, ncol = 2 ) return( llHess ) } x <- rnorm(100, 1, 2) ## NR # Estimate with only function values (single parameter) a <- maxLik( ll1i, gr1i, start = 1, method = "NR" ) summary(a ) b <- maxLik( ll1i, gr1i, start = 1, method = "NR", finalHessian="bhhh") # should issue a warning as BHHH not possible summary(b ) c <- maxLik( ll1i, gr1i, start = 1, method = "NR", finalHessian=FALSE) summary(c) ## (vector parameter) a <- maxLik( ll2, gr2, start = c(0,1), method = "NR" ) summary(a ) b <- maxLik( ll2, gr2, start = c(0,1), method = "NR", finalHessian="bhhh") # should issue a warning as BHHH not possible summary(b ) c <- maxLik( ll2, gr2, start = c(0,1), method = "NR", finalHessian=FALSE) summary(c) ## BFGSR # Estimate with only function values (single parameter) a <- maxLik( ll1i, gr1i, start = 1, method = "BFGSR" ) summary(a ) b <- maxLik( ll1i, gr1i, start = 1, method = "BFGSR", finalHessian="bhhh") # should issue a warning as BHHH not possible summary(b ) c <- maxLik( ll1i, gr1i, start = 1, method = "BFGSR", finalHessian=FALSE) summary(c) # Estimate with only function values (vector parameter) a <- maxLik( ll2, gr2, start = c(0,1), method = "BFGSR" ) summary(a ) b <- maxLik( ll2, gr2, start = c(0,1), method = "BFGSR", finalHessian="bhhh") # should issue a warning as BHHH not possible summary(b ) c <- maxLik( ll2, gr2, start = c(0,1), method = "BFGSR", finalHessian=FALSE) summary(c) ### Nelder-Mead ## Individual observations only b <- maxLik( ll2i, start = c(0,1), method = "NM", finalHessian="bhhh") summary(b) ## Individual observations, summed gradient b <- maxLik( ll2i, gr2, start = c(0,1), method = "NM", finalHessian="bhhh") # should issue a warning as BHHH not selected # (yes, could do it based on individual likelihood and numeric gradient) summary(b) maxLik/tests/constraints.R0000644000175100001440000003177714077525067015405 0ustar hornikusers### Various tests for constrained optimization ### options(digits=4) ### -------------------- Normal mixture likelihood, no additional parameters -------------------- ### param = c(rho, mean1, mean2) ### ### X = N(mean1) w/Pr rho ### X = N(mean2) w/Pr 1-rho ### logLikMix <- function(param) { ## a single likelihood value rho <- param[1] if(rho < 0 || rho > 1) return(NA) mu1 <- param[2] mu2 <- param[3] ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) ll <- sum(ll) ll } gradLikMix <- function(param) { rho <- param[1] if(rho < 0 || rho > 1) return(NA) mu1 <- param[2] mu2 <- param[3] f1 <- dnorm(x - mu1) f2 <- dnorm(x - mu2) L <- rho*f1 + (1 - rho)*f2 g <- matrix(0, length(x), 3) g[,1] <- (f1 - f2)/L g[,2] <- rho*(x - mu1)*f1/L g[,3] <- (1 - rho)*(x - mu2)*f2/L colSums(g) g } hessLikMix <- function(param) { rho <- param[1] if(rho < 0 || rho > 1) return(NA) mu1 <- param[2] mu2 <- param[3] f1 <- dnorm(x - mu1) f2 <- dnorm(x - mu2) L <- rho*f1 + (1 - rho)*f2 dldrho <- (f1 - f2)/L dldmu1 <- rho*(x - mu1)*f1/L dldmu2 <- (1 - rho)*(x - mu2)*f2/L h <- matrix(0, 3, 3) h[1,1] <- -sum(dldrho*(f1 - f2)/L) h[2,1] <- h[1,2] <- sum((x - mu1)*f1/L - dldmu1*dldrho) h[3,1] <- h[1,3] <- sum(-(x - mu2)*f2/L - dldmu2*dldrho) h[2,2] <- sum(rho*(-f1 + (x - mu1)^2*f1)/L - dldmu1^2) h[2,3] <- h[3,2] <- -sum(dldmu1*dldmu2) h[3,3] <- sum((1 - rho)*(-f2 + (x - mu2)^2*f2)/L - dldmu2^2) h } logLikMixInd <- function(param) { ## individual obs-wise likelihood values rho <- param[1] if(rho < 0 || rho > 1) return(NA) mu1 <- param[2] mu2 <- param[3] ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) ll <- sum(ll) ll } gradLikMixInd <- function(param) { rho <- param[1] if(rho < 0 || rho > 1) return(NA) mu1 <- param[2] mu2 <- param[3] f1 <- dnorm(x - mu1) f2 <- dnorm(x - mu2) L <- rho*f1 + (1 - rho)*f2 g <- matrix(0, length(x), 3) g[,1] <- (f1 - f2)/L g[,2] <- rho*(x - mu1)*f1/L g[,3] <- (1 - rho)*(x - mu2)*f2/L colSums(g) g } ### -------------------------- library(maxLik) ## mixed normal set.seed(1) N <- 100 x <- c(rnorm(N, mean=-1), rnorm(N, mean=1)) ## ---------- INEQUALITY CONSTRAINTS ----------- ## First test inequality constraints, numeric/analytical gradients ## Inequality constraints: rho < 0.5, mu1 < -0.1, mu2 > 0.1 A <- matrix(c(-1, 0, 0, 0, -1, 0, 0, 0, 1), 3, 3, byrow=TRUE) B <- c(0.5, 0.1, 0.1) start <- c(0.4, 0, 0.9) ineqCon <- list(ineqA=A, ineqB=B) ## analytic gradient cat("Inequality constraints, analytic gradient & Hessian\n") a <- maxLik(logLikMix, grad=gradLikMix, hess=hessLikMix, start=start, constraints=ineqCon) all.equal(coef(a), c(0.5, -1, 1), tolerance=0.1) # TRUE: relative tolerance 0.045 ## No analytic gradient cat("Inequality constraints, numeric gradient & Hessian\n") a <- maxLik(logLikMix, start=start, constraints=ineqCon) all.equal(coef(a), c(0.5, -1, 1), tolerance=0.1) # should be close to the true values, but N is too small ## NR method with inequality constraints try( maxLik(logLikMix, start = start, constraints = ineqCon, method = "NR" ) ) # Error in maxRoutine(fn = logLik, grad = grad, hess = hess, start = start, : # Inequality constraints not implemented for maxNR ## BHHH method with inequality constraints try( maxLik(logLikMix, start = start, constraints = ineqCon, method = "BHHH" ) ) # Error in maxNR(fn = fn, grad = grad, hess = hess, start = start, finalHessian = finalHessian, : # Inequality constraints not implemented for maxNR ## ---------- EQUALITY CONSTRAINTS ----------------- cat("Test for equality constraints mu1 + 2*mu2 = 0\n") A <- matrix(c(0, 1, 2), 1, 3) B <- 0 eqCon <- list( eqA = A, eqB = B ) ## default, numeric gradient mlEq <- maxLik(logLikMix, start = start, constraints = eqCon, tol=0) # only rely on gradient stopping condition all.equal(coef(mlEq), c(0.33, -1.45, 0.72), tolerance=0.01, scale=1) ## default, individual likelihood mlEqInd <- maxLik(logLikMixInd, start = start, constraints = eqCon, tol=0) # only rely on gradient stopping condition all.equal(coef(mlEq), coef(mlEqInd), tol=1e-4) ## default, analytic gradient mlEqG <- maxLik(logLikMix, grad=gradLikMix, start = start, constraints = eqCon ) all.equal(coef(mlEq), coef(mlEqG), tolerance=1e-4) ## default, analytic gradient, individual likelihood mlEqGInd <- maxLik(logLikMixInd, grad=gradLikMixInd, start = start, constraints = eqCon ) all.equal(coef(mlEqG), coef(mlEqGInd), tolerance=1e-4) ## default, analytic Hessian mlEqH <- maxLik(logLikMix, grad=gradLikMix, hess=hessLikMix, start=start, constraints=eqCon) all.equal(coef(mlEqG), coef(mlEqH), tolerance=1e-4) ## BFGS, numeric gradient eqBFGS <- maxLik(logLikMix, start=start, method="bfgs", constraints=eqCon, SUMTRho0=1) all.equal(coef(eqBFGS), c(0.33, -1.45, 0.72), tolerance=0.01, scale=1) ## BHHH, analytic gradient (numeric does not converge?) eqBHHH <- maxLik(logLikMix, gradLikMix, start=start, method="bhhh", constraints=eqCon, SUMTRho0=1) all.equal(coef(eqBFGS), coef(eqBHHH), tol=1e-4) ### ------------------ Now test additional parameters for the function ---- ### similar mixture as above but rho is give as an extra parameter ### logLikMix2 <- function(param, rho) { mu1 <- param[1] mu2 <- param[2] ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) # ll <- sum(ll) ll } gradLikMix2 <- function(param, rho) { mu1 <- param[1] mu2 <- param[2] f1 <- dnorm(x - mu1) f2 <- dnorm(x - mu2) L <- rho*f1 + (1 - rho)*f2 g <- matrix(0, length(x), 2) g[,1] <- rho*(x - mu1)*f1/L g[,2] <- (1 - rho)*(x - mu2)*f2/L # colSums(g) g } hessLikMix2 <- function(param, rho) { mu1 <- param[1] mu2 <- param[2] f1 <- dnorm(x - mu1) f2 <- dnorm(x - mu2) L <- rho*f1 + (1 - rho)*f2 dldrho <- (f1 - f2)/L dldmu1 <- rho*(x - mu1)*f1/L dldmu2 <- (1 - rho)*(x - mu2)*f2/L h <- matrix(0, 2, 2) h[1,1] <- sum(rho*(-f1 + (x - mu1)^2*f1)/L - dldmu1^2) h[1,2] <- h[2,1] <- -sum(dldmu1*dldmu2) h[2,2] <- sum((1 - rho)*(-f2 + (x - mu2)^2*f2)/L - dldmu2^2) h } ## ---------- Equality constraints & extra parameters ------------ A <- matrix(c(1, 2), 1, 2) B <- 0 start <- c(0, 1) ## We run only a few iterations as we want to test correct handling ## of parameters, not the final value. We also avoid any ## debug information iterlim <- 3 cat("Test for extra parameters for the function\n") ## NR, numeric gradient cat("Newton-Raphson, numeric gradient\n") a <- maxLik(logLikMix2, start=start, method="nr", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## NR, numeric hessian a <- maxLik(logLikMix2, gradLikMix2, start=start, method="nr", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## nr, analytic hessian a <- maxLik(logLikMix2, gradLikMix2, hessLikMix2, start=start, method="nr", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## BHHH cat("BHHH, analytic gradient, numeric Hessian\n") a <- maxLik(logLikMix2, gradLikMix2, start=start, method="bhhh", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## BHHH, analytic a <- maxLik(logLikMix2, gradLikMix2, start=start, method="bhhh", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## bfgs, no analytic gradient a <- maxLik(logLikMix2, start=start, method="bfgs", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## bfgs, analytic gradient a <- maxLik(logLikMix2, start=start, method="bfgs", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## SANN, analytic gradient a <- maxLik(logLikMix2, gradLikMix2, start=start, method="SANN", constraints=list(eqA=A, eqB=B), iterlim=iterlim, SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## NM, numeric a <- maxLik(logLikMix2, start=start, method="nm", constraints=list(eqA=A, eqB=B), iterlim=100, # use more iters for NM SUMTRho0=1, rho=0.5) all.equal(coef(a), c(-1.36, 0.68), tol=0.01) ## -------------------- NR, multiple constraints -------------------- f <- function(theta) exp(-theta %*% theta) # test quadratic function ## constraints: ## theta1 + theta3 = 1 ## theta1 + theta2 = 1 A <- matrix(c(1, 0, 1, 1, 1, 0), 2, 3, byrow=TRUE) B <- c(-1, -1) cat("NR, multiple constraints\n") a <- maxNR(f, start=c(1,1.1,2), constraints=list(eqA=A, eqB=B)) theta <- coef(a) all.equal(c(theta[1] + theta[3], theta[1] + theta[2]), c(1,1), tolerance=1e-4) ## Error handling for equality constraints A <- matrix(c(1, 1), 1, 2) B <- -1 cat("Error handling: ncol(A) != lengths(start)\n") try(a <- maxNR(f, start=c(1, 2, 3), constraints=list(eqA=A, eqB=B))) # ncol(A) != length(start) A <- matrix(c(1, 1), 1, 2) B <- c(-1, 2) try(a <- maxNR(f, start=c(1, 2), constraints=list(eqA=A, eqB=B))) # nrow(A) != nrow(B) ## ## -------------- inequality constraints & extra paramters ---------------- ## ## mu1 < 1 ## mu2 > -1 A <- matrix(c(-1, 0, 0, 1), 2,2, byrow=TRUE) B <- c(1,1) start <- c(0.8, 0.9) ## inEGrad <- maxLik(logLikMix2, gradLikMix2, start=start, method="bfgs", constraints=list(ineqA=A, ineqB=B), rho=0.5) all.equal(coef(inEGrad), c(-0.98, 1.12), tol=0.01) ## inE <- maxLik(logLikMix2, start=start, method="bfgs", constraints=list(ineqA=A, ineqB=B), rho=0.5) all.equal(coef(inEGrad), coef(inE), tol=1e-4) ## inENM <- maxLik(logLikMix2, gradLikMix2, start=start, method="nm", constraints=list(ineqA=A, ineqB=B), rho=0.5) all.equal(coef(inEGrad), coef(inENM), tol=1e-3) # this is further off than gradient-based methods ## ---------- test vector B for inequality -------------- ## mu1 < 1 ## mu2 > 2 A <- matrix(c(-1, 0, 0, 1), 2,2, byrow=TRUE) B1 <- c(1,-2) a <- maxLik(logLikMix2, gradLikMix2, start=c(0.5, 2.5), method="bfgs", constraints=list(ineqA=A, ineqB=B1), rho=0.5) theta <- coef(a) all.equal(c(theta[1] < 1, theta[2] > 2), c(TRUE, TRUE)) # components should be larger than # (-1, -2) ## ## ---- ERROR HANDLING: insert wrong A and B forms ---- ## A2 <- c(-1, 0, 0, 1) try(maxLik(logLikMix2, gradLikMix2, start=start, method="bfgs", constraints=list(ineqA=A2, ineqB=B), print.level=1, rho=0.5) ) # should explain that matrix needed A2 <- matrix(c(-1, 0, 0, 1), 1, 4) try(maxLik(logLikMix2, gradLikMix2, start=start, method="bfgs", constraints=list(ineqA=A2, ineqB=B), print.level=1, rho=0.5) ) # should explain that wrong matrix # dimension B2 <- 1:3 try(maxLik(logLikMix2, gradLikMix2, start=start, method="bfgs", constraints=list(ineqA=A, ineqB=B2), print.level=1, rho=0.5) ) # A & B do not match cat("A & B do not match\n") B2 <- matrix(1,2,2) try(maxLik(logLikMix2, gradLikMix2, start=start, method="bfgs", constraints=list(ineqA=A, ineqB=B2), print.level=1, rho=0.5) ) # B must be a vector ## ---- fixed parameters with constrained optimization ----- ## Thanks to Bob Loos for finding this error. ## Optimize 3D hat with one parameter fixed (== 2D hat). ## Add an equality constraint on that cat("Constraints + fixed parameters\n") hat3 <- function(param) { ## Hat function. Hessian negative definite if sqrt(x^2 + y^2) < 0.5 x <- param[1] y <- param[2] z <- param[3] exp(-x^2-y^2-z^2) } sv <- c(1,1,1) ## constraints: x + y + z >= 2.5 A <- matrix(c(x=1,y=1,z=1), 1, 3) B <- -2.5 constraints <- list(ineqA=A, ineqB=B) res <- maxBFGS(hat3, start=sv, constraints=constraints, fixed=3, iterlim=3) all.equal(coef(res), c(0.770, 0.770, 1), tol=0.01) maxLik/tests/numericGradient.Rout.save0000644000175100001440000000330314077525067017623 0ustar hornikusers R version 3.0.1 (2013-05-16) -- "Good Sport" Copyright (C) 2013 The R Foundation for Statistical Computing Platform: x86_64-pc-linux-gnu (64-bit) R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for an HTML browser interface to help. Type 'q()' to quit R. > > ### test numeric methods, in particular handling of unequal > ### function lengths > library(maxLik) Loading required package: miscTools Please cite the 'maxLik' package as: Henningsen, Arne and Toomet, Ott (2011). maxLik: A package for maximum likelihood estimation in R. Computational Statistics 26(3), 443-458. DOI 10.1007/s00180-010-0217-1. If you have questions, suggestions, or comments regarding the 'maxLik' package, please use a forum or 'tracker' at maxLik's R-Forge site: https://r-forge.r-project.org/projects/maxlik/ > > f <- function(x) { + if(x[1] <= 0) + return(NA) + # support of x[1] is (0, Inf) + return(c(log(x[1]),x[2])) + } > > ng <- numericGradient(f, c(0.01,1), eps=0.1) Warning message: In numericGradient(f, c(0.01, 1), eps = 0.1) : Function value at -0.04 1.00 = NA (length 1) does not conform with the length at original value 2 Component 1 set to NA > > nh <- try(numericHessian(f, t0=c(0.01,1), eps=0.1)) There were 13 warnings (use warnings() to see them) > > proc.time() user system elapsed 0.188 0.016 0.192 maxLik/tests/numericGradient.R0000644000175100001440000000052414077525067016140 0ustar hornikusers ### test numeric methods, in particular handling of unequal ### function lengths library(maxLik) f <- function(x) { if(x[1] <= 0) return(NA) # support of x[1] is (0, Inf) return(c(log(x[1]),x[2])) } ng <- numericGradient(f, c(0.01,1), eps=0.1) nh <- try(numericHessian(f, t0=c(0.01,1), eps=0.1)) maxLik/tests/BFGSR.R0000644000175100001440000000357714077525067013676 0ustar hornikusers### BFGSR-related tests ## 1. Test maximization algorithm for convex regions ## ## Optimize quadratic form t(D) %*% W %*% D with p.d. weight matrix ## (ie unbounded problems). ## All solutions should go to large values with a message about successful convergence set.seed(0) options(digits=4) quadForm <- function(D) { C <- seq(1, N) return( - t(D - C) %*% W %*% ( D - C) ) } N <- 3 # 3-dimensional case ## a) test quadratic function t(D) %*% D library(maxLik) W <- diag(N) D <- rep(1/N, N) res <- maxBFGSR(quadForm, start=D) all.equal(coef(res), 1:3, tolerance=1e-4) all.equal(gradient(res), rep(0,3), tolerance=1e-3) all.equal(nIter(res) < 100, TRUE) all.equal(returnCode(res) < 4, TRUE) ## Next, optimize hat function in non-concave region. Does not work well. hat <- function(param) { ## Hat function. Hessian negative definite if sqrt(x^2 + y^2) < 0.5 x <- param[1] y <- param[2] exp(-(x-2)^2 - (y-2)^2) } hatNC <- maxBFGSR(hat, start=c(1,1), tol=0, reltol=0) all.equal(coef(hatNC), rep(2,2), tolerance=1e-4) all.equal(gradient(hatNC), rep(0,2), tolerance=1e-3) all.equal(nIter(hatNC) < 100, TRUE) all.equal(returnCode(hatNC) < 4, TRUE) ## Test BFGSR with fixed parameters and equality constraints ## Optimize 3D hat with one parameter fixed (== 2D hat). ## Add an equality constraint on that hat3 <- function(param) { ## Hat function. Hessian negative definite if sqrt((x-2)^2 + (y-2)^2) < 0.5 x <- param[1] y <- param[2] z <- param[3] exp(-(x-2)^2-(y-2)^2-(z-2)^2) } sv <- c(x=1,y=1,z=1) ## constraints: x + y + z = 8 A <- matrix(c(1,1,1), 1, 3) B <- -8 constraints <- list(eqA=A, eqB=B) hat3CF <- maxBFGSR(hat3, start=sv, constraints=constraints, fixed=3) all.equal(coef(hat3CF), c(x=3.5, y=3.5, z=1), tolerance=1e-4) all.equal(nIter(hat3CF) < 100, TRUE) all.equal(returnCode(hat3CF) < 4, TRUE) all.equal(sum(coef(hat3CF)), 8, tolerance=1e-4) maxLik/tests/BFGSR.Rout.save0000644000175100001440000000631414077525067015353 0ustar hornikusers R version 3.6.0 (2019-04-26) -- "Planting of a Tree" Copyright (C) 2019 The R Foundation for Statistical Computing Platform: x86_64-pc-linux-gnu (64-bit) R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for an HTML browser interface to help. Type 'q()' to quit R. > ### BFGSR-related tests > > ## 1. Test maximization algorithm for convex regions > ## > ## Optimize quadratic form t(D) %*% W %*% D with p.d. weight matrix > ## (ie unbounded problems). > ## All solutions should go to large values with a message about successful convergence > set.seed(0) > options(digits=4) > quadForm <- function(D) { + C <- seq(1, N) + return( - t(D - C) %*% W %*% ( D - C) ) + } > N <- 3 > # 3-dimensional case > ## a) test quadratic function t(D) %*% D > library(maxLik) Loading required package: miscTools Please cite the 'maxLik' package as: Henningsen, Arne and Toomet, Ott (2011). maxLik: A package for maximum likelihood estimation in R. Computational Statistics 26(3), 443-458. DOI 10.1007/s00180-010-0217-1. If you have questions, suggestions, or comments regarding the 'maxLik' package, please use a forum or 'tracker' at maxLik's R-Forge site: https://r-forge.r-project.org/projects/maxlik/ > W <- diag(N) > D <- rep(1/N, N) > res <- maxBFGSR(quadForm, start=D) > all.equal(coef(res), 1:3, tolerance=1e-4) [1] TRUE > all.equal(gradient(res), rep(0,3), tolerance=1e-3) [1] TRUE > all.equal(nIter(res) < 100, TRUE) [1] TRUE > all.equal(returnCode(res) < 4, TRUE) [1] TRUE > > ## Next, optimize hat function in non-concave region. Does not work well. > hat <- function(param) { + ## Hat function. Hessian negative definite if sqrt(x^2 + y^2) < 0.5 + x <- param[1] + y <- param[2] + exp(-(x-2)^2 - (y-2)^2) + } > > hatNC <- maxBFGSR(hat, start=c(1,1), tol=0, reltol=0) > all.equal(coef(hatNC), rep(2,2), tolerance=1e-4) [1] TRUE > all.equal(gradient(hatNC), rep(0,2), tolerance=1e-3) [1] TRUE > all.equal(nIter(hatNC) < 100, TRUE) [1] TRUE > all.equal(returnCode(hatNC) < 4, TRUE) [1] TRUE > > ## Test BFGSR with fixed parameters and equality constraints > ## Optimize 3D hat with one parameter fixed (== 2D hat). > ## Add an equality constraint on that > hat3 <- function(param) { + ## Hat function. Hessian negative definite if sqrt((x-2)^2 + (y-2)^2) < 0.5 + x <- param[1] + y <- param[2] + z <- param[3] + exp(-(x-2)^2-(y-2)^2-(z-2)^2) + } > sv <- c(x=1,y=1,z=1) > ## constraints: x + y + z = 8 > A <- matrix(c(1,1,1), 1, 3) > B <- -8 > constraints <- list(eqA=A, eqB=B) > hat3CF <- maxBFGSR(hat3, start=sv, constraints=constraints, fixed=3) > all.equal(coef(hat3CF), c(x=3.5, y=3.5, z=1), tolerance=1e-4) [1] TRUE > all.equal(nIter(hat3CF) < 100, TRUE) [1] TRUE > all.equal(returnCode(hat3CF) < 4, TRUE) [1] TRUE > all.equal(sum(coef(hat3CF)), 8, tolerance=1e-4) [1] TRUE > > proc.time() user system elapsed 0.562 0.560 0.338 maxLik/MD50000644000175100001440000001620015124517754012037 0ustar hornikusersd2f36df1df159a46417dc2e9597c38a0 *DESCRIPTION 111e2bb5ed9300c47b52b11a72c6839e *NAMESPACE b1e0337b88240a2a98bf57333278ab3d *NEWS 07512e0403e60dbe4c310f66880b4fb0 *R/05-classes.R 08296268a86bd7e22e055f73a666ed67 *R/10-MaxControl_class.R a9f1662e6dabd302ab25fafb6fead56c *R/20-maxControl.R c24d12178e12dc7a13df319bfbc65368 *R/25-addControlList.R 495172f78b9e46d0169fe8e40f65f6c8 *R/30-addControlDddot.R 23151b49bc8cdafc5378098818605dc8 *R/AIC.R 616d4b76bd50cdff58e1b31249581c2a *R/activePar.R 6b999dafa9bdf5880be41146752383ae *R/addFixedPar.R 1cb94fb786cf735b89302987f2f45ece *R/bread.maxLik.R 628fd12f511412a5431211ebc242b33b *R/callWithoutArgs.R b281b27bd439a07982fc1363b2c95e44 *R/callWithoutSumt.R f6b11464c98bd3662e2b4aae72410ee9 *R/checkBhhhGrad.R e5e6a17510e2f5b56254f573cc03d43e *R/checkFuncArgs.R 7446aa5174844bfb8b0c530d4b0292da *R/coef.maxLik.R a5a97856a454fa21a85d28c0638a0c1f *R/compareDerivatives.R f3c12fa85c5c5fb1f6e4b9ca564536b1 *R/condiNumber.R 59725aaae98563f4590529586dc1921b *R/confint.maxLik.R 16ce669188a2349f588a121fc48f85b9 *R/constrOptim2.R 804efa9a9611766159532fa1ba2535b7 *R/estfun.maxLik.R 1d0a677cc7248ab9651c2bec1d3316d6 *R/fnSubset.R 9732841388f169abd62c06b2dc886baf *R/gradient.R 3db7b1048b55148dd123d70e61c8d733 *R/headDots.R 64d17ba17bea697ac45de7f5ec616c47 *R/hessian.R 21446ccb89769a76daad63220a2f2640 *R/logLik.maxLik.R 4e136956778600d4bcdc8b0b3d52d876 *R/logLikAttr.R 1e691a97c6eb732e7267ebf51942426a *R/logLikFunc.R 110d44648be69c6a208bd84d6bb29ef9 *R/logLikGrad.R 770537e5bbde56b8e3590b68bca2d033 *R/logLikHess.R b45d21b3666c4b778868fe1c902df8e6 *R/maxAdam.R 4a93edf08f36c69063511a11c81f4a05 *R/maxBFGS.R 002a26be9f64bbf092e7e33a1805a53c *R/maxBFGSR.R 9447998fb601ccd499919c8796882000 *R/maxBFGSRCompute.R fe5933fbf427415757cac7cccc7ed277 *R/maxBHHH.R dc26c765189aab53893ef20871587498 *R/maxCG.R fbd13bdfa6f2dc09a1a22bd6ee8569ba *R/maxLik.R b71d12473ef6a49f903b66a2aa2fb8b1 *R/maxNM.R 962f17ef6572a024e2bb64c32a0961bc *R/maxNR.R c4632c52075b7f1504bb80e59eecd042 *R/maxNRCompute.R af12951f1f661072362aa23df9a0b16a *R/maxOptim.R 63c770407939aefb4bfd99529cf92517 *R/maxSANN.R ff2d179d6daeb89e49c6122713c30107 *R/maxSGA.R 47d4f0a4a4564a349d468723b56aefd8 *R/maxSGACompute.R 64abe1be44c5c8f770fe8af7e2f57bca *R/maxValue.R c80a65acb972edefc36fb40ed9fc127b *R/maximMessage.R 999df96b4d40ca70de26151b9f04d63e *R/maximType.R f4e22fe11b2dcb40e338b2f19dbfbde8 *R/nIter.R aa6c83d64ee8b14b3b5934fca4e1cf90 *R/nObs.R 1dc47c8109c34ff37a0c657d1cc82419 *R/nParam.R 76c96796242b1f29c693fff5946344d4 *R/numericGradient.R fd6764e2955ed5802e9d5c02dcc8cd17 *R/numericHessian.R 2f8dcbd5c37ed9702f45baed2bc79b7d *R/objectiveFn.R 32ee3ff9f876da5b606fea22ee11c3ce *R/observationGradient.R d6b8d3df6587f6fe54074c51e40c1dcf *R/openParam.R 0db0a207aa820ecd499edc72f953ef1e *R/prepareFixed.R f681ec2a71708e712ea0c3841ecc9711 *R/print.maxLik.R eb71f0f87deded32d343f3838e844c49 *R/printRowColLimits.R 3ec0b9003101ec88c863870f2d30e6bb *R/returnCode.R 69d1f09210d746bb394c6dd9bfa680bc *R/returnMessage.R 0ad6b3077e2e22603a12300892089655 *R/showMaxControl.R 39305dd91a5f62f3b5da99daee4bfa7f *R/stdEr.maxLik.R a355236eada2891f64bc25ad3937aba7 *R/storedParameters.R e696f2804d0a702a70a5f9666caf61ee *R/storedValues.R 1fe4bf86e3e4b60e8460780b4142d567 *R/sumGradients.R b482c4c3a7a159b72916328d7ed1ef2a *R/summary.maxLik.R c8eb0796e44b16d1224d56d12a532288 *R/summary.maxim.R e6734c7b9e086764cd181d3213d668ce *R/sumt.R fd6d0b337a47860570a58d9584fe146e *R/tidyMethods.R 7f8d32f62e006396f5dab695599f9037 *R/vcov.maxLik.R 68795c50173f11bb4b55f764cea06751 *R/zzz.R 76e123f5dcc8dee07678412adc0a2a84 *build/partial.rdb e3b238d4f2d248d45414bcfa10dd55da *build/vignette.rds 5d7b3dab1f63435b6769508825f9e829 *inst/CITATION 04940345afaff3ea5cf2b68e8ab3a9e8 *inst/doc/intro-to-maximum-likelihood.R a1ca41b6ee4853ab70480b92916ceddc *inst/doc/intro-to-maximum-likelihood.Rnw 1998702da277fc2ed86988fdf5134cd4 *inst/doc/intro-to-maximum-likelihood.pdf 1d29293bd50b9db3de7c4d10061a0dac *inst/doc/stochastic-gradient-maxLik.R 77a22f458067cb82babeca70de304851 *inst/doc/stochastic-gradient-maxLik.Rnw 7babcc7639d9efcebf0a3565f23ce913 *inst/doc/stochastic-gradient-maxLik.pdf 6159ce7b0178bc8d0a201642d8ad2bb0 *inst/doc/using-maxlik.R c8e525273570da2c5e6d7b9605f445c5 *inst/doc/using-maxlik.Rnw 0dd2e8e19ccff571ecdd7e12c1856b7f *inst/doc/using-maxlik.pdf f569138ad20796e8a2f4589f8ee8ed2c *inst/tinytest/test-basic.R 910334fb9160bb263386bcb59f471615 *inst/tinytest/test-maxControl.R 9bca2fe2c1a2c40104a088f1a6de2260 *inst/tinytest/test-maxSG.R edcb435c99d3b127b13cd18177dc93f0 *inst/tinytest/test-methods.R fdcd59987029bf9017113aaa16ca9659 *inst/tinytest/test-optimizers.R 133dc8408ac5caab3d8ca4e21d9a7c2b *inst/tinytest/test-parameters.R 472202e09a86012ac5ec3a08ecff1dcd *man/activePar.Rd d5c48ed9e4983ebae6a5d9f9c53136c7 *man/bread.maxLik.Rd 45b84f03f0c60f456444e163360fee34 *man/compareDerivatives.Rd 789962ea4222142cd8f51f51172c79ac *man/condiNumber.Rd 65bf0d37c2054eb707194f98846a432c *man/confint.maxLik.Rd 812c056426e615573fe71f21ca484374 *man/fnSubset.Rd e57b580051b91705923cb2eef891c83b *man/gradient.Rd 4cb76c1ed5f2dc2508d1010893089052 *man/hessian.Rd 0c14b964346dbc38fb5a13009bcbd85c *man/logLik.maxLik.Rd 2a6b97101ae7e49f852bc19a2762a36d *man/maxBFGS.Rd 3e69da935c298f34961e0e5c40405b9e *man/maxControl.Rd 3649275859d0c46a5bb9ecdb1fc9ce59 *man/maxLik-internal.Rd 0cdf8c75edd5da491dd355e1339f0e57 *man/maxLik-methods.Rd 4879bb28b424061901b63104d2d95c89 *man/maxLik-package.Rd 0641331cba5492be390b0715f27efa40 *man/maxLik.Rd 54562c66ad418a38e08220e98b39ddf5 *man/maxNR.Rd e225ef8eafd05364957af6d7026dd317 *man/maxSGA.Rd f76b141dc0d11b6e22d9391a1f82f905 *man/maxValue.Rd 35f8dbfb6a4aefdeede7ce8f3095f441 *man/maximType.Rd 22d3768a69246b071d441f4130db0a8a *man/nIter.Rd a231757d606ddb9355c3fb09107fad6d *man/nObs.Rd 3bd8e330c2ed25ec1ef6eb89474a40bb *man/nParam.Rd 3552f1fe651edc3af0638c1ec4493b1b *man/numericGradient.Rd 4bf4ed2639941e261d783a1625f9958e *man/objectiveFn.Rd efade94935dfb91192c52aec7655a68b *man/reexports.Rd 353b9db4026a3bbb073eb918e1422f5d *man/returnCode.Rd 8046210012af544d6987ac1f35fbb67f *man/storedValues.Rd 0e302b5c44fb4c66365ff606f5c758fd *man/summary.maxLik.Rd 416c56ea557a1c2933d86c0daa3c17ac *man/summary.maxim.Rd e39688cb93ce2092adca8dcb79617203 *man/sumt.Rd 322d257b978770eb56ff86558ab34f63 *man/tidy.maxLik.Rd c9832e82bd584e5837f81a676c5e877b *man/vcov.maxLik.Rd b2d527d3c65311cbb6e3e035a81b4268 *tests/BFGSR.R 7b1651cf9375022a41f4084af5270f70 *tests/BFGSR.Rout.save 29c954c33db65e9105745799a996cb8d *tests/constraints.R 76f00749d5657d785a124bb8df9d50e5 *tests/constraints.Rout.save 70db31a194f4a57154ea8d6ca822dc52 *tests/finalHessian.R 22066f4a839377d0a775a0d33d8825d9 *tests/finalHessian.Rout.save c1bbe611737d8fb90e93a7bac3b1be7a *tests/numericGradient.R da073867a297c038b711fda70344d2b7 *tests/numericGradient.Rout.save 03c1442d4a9c62351f781935c265f2e5 *tests/tinytest.R a1ca41b6ee4853ab70480b92916ceddc *vignettes/intro-to-maximum-likelihood.Rnw 991278afedabd4e7507305b345a73c2c *vignettes/maxlik.bib db26618cc7773afcaf2fae96043b1673 *vignettes/probability-density.asy e5cee16df2fac32daf93cce4a359f204 *vignettes/probability-density.pdf 77a22f458067cb82babeca70de304851 *vignettes/stochastic-gradient-maxLik.Rnw c8e525273570da2c5e6d7b9605f445c5 *vignettes/using-maxlik.Rnw maxLik/R/0000755000175100001440000000000014077525067011733 5ustar hornikusersmaxLik/R/logLikAttr.R0000644000175100001440000001447514077525067014145 0ustar hornikusers### this function returns the log-likelihood value with gradient and Hessian as ### attributes. If the log-likelihood function provided by the user does not add ### these attributes, this functions uses the functions provided by the user ### as arguments "grad" and "hess" or (if they are not provided) uses the ### finite-difference method to obtain the gradient and Hessian logLikAttr <- function(theta, fnOrig, gradOrig=NULL, hessOrig=NULL, fixed, sumObs = FALSE, returnHessian = TRUE, ...) { ## fixed: logical, which parameters to keep fixed ## # large initial indentation to be able to diff to previous version # that was defined in maxNR() / maxNR.R. ## number of parameters nParam <- length( theta ) ## value of log-likelihood function f <- fnOrig(theta, ...) ## if there are NA-s in the function value, do not ## compute gradient and Hessian if(any(is.na(f))) { attr(f, "gradient") <- NA attr(f, "hessian") <- NA return(f) } ## gradient of log-likelihood function gr <- attr( f, "gradient" ) if( is.null( gr ) ) { if( !is.null( gradOrig ) ) { gr <- gradOrig(theta, ...) } else { gr <- numericGradient(f = fnOrig, t0 = theta, fixed=fixed, ...) } } ## if there are NA-s in active gradient, do not compute Hessian if(is.matrix(gr)) { if(ncol(gr) != length(theta)) { stop(paste0("if gradient is a matrix, it must have length(parameter) colums (currently ", length(theta), "), not ", ncol(gr))) } activeGr <- gr[,!fixed] } else { activeGr <- gr[!fixed] } if(any(is.na(activeGr))) { attr(f, "gradient") <- gr attr(f, "hessian") <- NA return(f) } # if gradients are observation-specific, they must be stored in a matrix if(observationGradient(gr, length(theta))) { gr <- as.matrix(gr) } ## Set gradients of fixed parameters to NA so that they are always NA ## (no matter if they are analytical or finite-difference gradients) if( is.null( dim( gr ) ) ) { gr[ fixed ] <- NA } else { gr[ , fixed ] <- NA } ## Hessian of log-likelihood function if( isTRUE( returnHessian ) ) { h <- attr( f, "hessian" ) if( is.null( h ) ) { if(!is.null(hessOrig)) { h <- as.matrix(hessOrig(theta, ...)) } else { llFunc <- function( theta, ... ) { return( sum( fnOrig( theta, ... ) ) ) } if( !is.null( attr( f, "gradient" ) ) ) { gradFunc <- function( theta, ... ) { return( sumGradients( attr( fnOrig( theta, ... ), "gradient" ), nParam ) ) } } else if( !is.null( gradOrig ) ) { gradFunc <- function( theta, ... ) { return( sumGradients( gradOrig( theta, ... ), nParam ) ) } } else { gradFunc <- NULL } h <- numericHessian(f = llFunc, grad = gradFunc, t0 = theta, fixed=fixed, ...) } } ## Check the correct size of Hessian. if((dim(h)[1] != nParam) | (dim(h)[2] != nParam)) { stop("Wrong hessian dimension. Needed ", nParam, "x", nParam, " but supplied ", dim(h)[1], "x", dim(h)[2]) } else { ## Set elements of the Hessian corresponding to the ## fixed parameters ## to NA so that they are always zero ## (no matter if they are ## calculated analytical or by the finite-difference ## method) h[ fixed, ] <- NA h[ , fixed ] <- NA } } else if( tolower( returnHessian ) == "bhhh" ) { ## We have to return BHHH Hessian. Check if it contains NA in free paramateres, otherwise ## return outer product as Hessian. h <- NULL # to keep track of what we have done if(is.null(dim(gr)) & any(is.na(gr[!fixed]))) { # NA gradient: do not check but send the wrong values to the optimizer. # The optimizer should take corresponding action, such as looking for another value h <- NA } else if(is.matrix(gr)) { if(any(is.na(gr[,!fixed]))) { # NA gradient: do not check but send the wrong values to the optimizer. # The optimizer should take corresponding action, such as looking for another value h <- NA } } if(is.null(h)) { # gr seems not to contain NA-s at free parameters checkBhhhGrad( g = gr, theta = theta, analytic = ( !is.null( attr( f, "gradient" ) ) || !is.null( gradOrig ) ), fixed=fixed) h <- - crossprod( gr ) } attr( h, "type" ) = "BHHH" } else { h <- NULL } ## sum log-likelihood values over observations (if requested) if( sumObs ) { f <- sumKeepAttr( f ) } ## sum gradients over observations (if requested) if( sumObs ) { ## We need just summed gradient gr <- sumGradients( gr, nParam ) } if( !is.null( gradOrig ) && !is.null( attr( f, "gradient" ) ) ) { attr( f, "gradBoth" ) <- TRUE } if( !is.null( hessOrig ) && !is.null( attr( f, "hessian" ) ) ) { attr( f, "hessBoth" ) <- TRUE } attr( f, "gradient" ) <- gr attr( f, "hessian" ) <- h return( f ) } maxLik/R/logLikFunc.R0000644000175100001440000000251314077525067014114 0ustar hornikusersif( getRversion() >= "2.15.1" ) { globalVariables( c( "lastFuncGrad", "lastFuncParam" ) ) } ## objective function: ## sum over possible individual likelihoods logLikFunc <- function(theta, fnOrig, # the original user-supplied function we wrap here gradOrig, hessOrig, # Arguments "gradOrig" and "hessOrig" are just for compatibility with # logLikGrad() and logLikHess() start = NULL, fixed = NULL, sumObs = TRUE, ...) { if(missing(fnOrig)) { stop("Cannot compute the objective function value: no objective function supplied") } theta <- addFixedPar( theta = theta, start = start, fixed = fixed, ...) result <- fnOrig( theta, ... ) ## save gradients and the corresponding parameter values assign( "lastFuncGrad", attr( result, "gradient" ), inherits = TRUE ) assign( "lastFuncParam", theta, inherits = TRUE ) if( sumObs ) { result <- sumKeepAttr( result ) g <- attributes( result )$gradient if( !is.null( g ) ) { g <- sumGradients( g, length( theta ) ) names( g ) <- names( theta ) if( !is.null( fixed ) ) { g <- g[ !fixed ] } attributes( result )$gradient <- g } } return( result ) } maxLik/R/logLikHess.R0000644000175100001440000000360214077525067014123 0ustar hornikusers## Calculate the Hessian of the function, either by analytic or numeric method logLikHess <- function( theta, fnOrig, gradOrig=NULL, hessOrig=NULL, start = NULL, fixed = NULL, gradAttr = NULL, hessAttr = NULL, ... ) { # argument "gradAttr" should be # - FALSE if the gradient is not provided as attribute of the log-lik value # - TRUE if the gradient is provided as attribute of the log-lik value # - NULL if this is not known # argument "hessAttr" should be # - FALSE if the Hessian is not provided as attribute of the log-lik value # - TRUE if the Hessian is provided as attribute of the log-lik value # - NULL if this is not known theta <- addFixedPar( theta = theta, start = start, fixed = fixed, ...) if(!is.null(hessOrig)) { hessian <- as.matrix(hessOrig( theta, ... )) } else { if( is.null( hessAttr ) || hessAttr || is.null( gradAttr ) ) { llVal <- fnOrig( theta, ... ) gradient <- attr( llVal, "gradient" ) hessian <- attr( llVal, "hessian" ) gradAttr <- !is.null( gradient ) hessAttr <- !is.null( hessian ) } if( !hessAttr ) { if( !is.null( gradOrig ) ) { grad2 <- logLikGrad } else if( gradAttr ) { grad2 <- function( theta, fnOrig = NULL, gradOrig = NULL, ... ) { gradient <- attr( fnOrig( theta, ... ), "gradient" ) gradient <- sumGradients( gradient, length( theta ) ) return( gradient ) } } else { grad2 <- NULL } hessian <- numericHessian( f = logLikFunc, grad = grad2, t0 = theta, fnOrig = fnOrig, gradOrig = gradOrig, ... ) } } rownames( hessian ) <- colnames( hessian ) <- names( theta ) if( !is.null( fixed ) ) { hessian <- hessian[ !fixed, !fixed, drop = FALSE ] } return( hessian ) } maxLik/R/returnCode.R0000644000175100001440000000054714077525067014176 0ustar hornikusers### Returns return code of maxim objects ### This is tells either error, or other cause the iterations ended, ### such as the result converged returnCode <- function(x, ...) UseMethod("returnCode") returnCode.default <- function(x, ...) x$returnCode returnCode.maxim <- function(x, ...) x$code returnCode.maxLik <- function(x, ...) x$code maxLik/R/condiNumber.R0000644000175100001440000000273114077525067014326 0ustar hornikusers### condiNumber: print matrix' condition number adding columns one by one. ### In this way user may investigate the which columns cause problems with singularity condiNumber <- function(x, ...) UseMethod("condiNumber") condiNumber.default <- function(x, exact=FALSE, norm=FALSE, printLevel=print.level, print.level=1, digits = getOption( "digits" ), ... ) { ## x: a matrix, condition number of which are to be printed ## exact: whether the condition number have to be exact or approximated (see 'kappa') ## norm: whether to normalise the matrix' columns. ## printLevel: whether to print the condition numbers while calculating. Useful for interactive testing. savedDigits <- getOption("digits") options( digits = digits ) if(dim(x)[2] > dim(x)[1]) { warning(paste(dim(x)[1], "rows and", dim(x)[2], "columns, use transposed matrix")) x <- t(x) } cn <- numeric(ncol(x)) if(norm) { # Now normalise column vectors x <- apply(x, 2, FUN=function(v) v/sqrt(sum(v*v))) } for(i in seq(length=ncol(x))) { m <- x[,1:i] cn[i] <- kappa(m, exact=exact) if(printLevel > 0) cat(colnames(x)[i], "\t", cn[i], "\n") } names(cn) <- colnames(x) options( digits = savedDigits ) invisible(cn) } condiNumber.maxLik <- function(x, ...) condiNumber.default( x = hessian(x)[activePar(x), activePar(x),drop=FALSE], ... ) maxLik/R/maxAdam.R0000644000175100001440000001063614077525067013434 0ustar hornikusers maxAdam <- function(fn=NULL, grad=NULL, hess=NULL, start, nObs, constraints=NULL, finalHessian=FALSE, fixed=NULL, control=NULL, ...) { ## Adam stochastic gradient ascent ## Parameters: ## fn - the function to be minimized. Returns either scalar or ## vector value with possible attributes ## constPar and newVal ## grad - gradient function (numeric used if missing). Must return either ## * vector, length=nParam ## * matrix, dim=c(nObs, 1). Treated as vector ## * matrix, dim=c(M, nParam), where M is arbitrary. In this case the ## rows are simply summed (useful for maxBHHH). ## hess - hessian function (used only for finalHessian, otherwise ignored) ## start - initial parameter vector (eventually w/names) ## ... - extra arguments for fn() ## finalHessian include final Hessian? As computing final hessian does not carry any extra penalty for NR method, this option is ## mostly for compatibility reasons with other maxXXX functions. ## TRUE/something else include ## FALSE do not include ## fixed index vector, which parameters to keep fixed ## ## RESULTS: ## an object of class "maxim": ## ------------------------------ ## Add parameters from ... to control if(!inherits(control, "MaxControl")) { mControl <- addControlList(maxControl(gradtol=0, SG_learningRate=0.001), control) } else { mControl <- control } mControl <- addControlList(mControl, list(...), check=FALSE) ## argNames <- c(c("fn", "grad", "hess", "start", "fixed", "control"), openParam(mControl)) # Here we allow to submit all parameters outside of the # 'control' list. May eventually include only a # subset here ## ensure that 'fn', 'grad', and 'hess' do not take any arguments that maxSGA eats up if(!is.null(fn)) { checkFuncArgs( fn, argNames, "fn", "maxAdam" ) } if( !is.null( grad ) ) { checkFuncArgs( grad, argNames, "grad", "maxAdam" ) } if( !is.null( hess ) ) { checkFuncArgs( hess, argNames, "hess", "maxAdam" ) } ## ensure that at least 'fn' or 'grad' are supplied if(is.null(fn) & is.null(grad)) { stop("maxAdam requires at least 'fn' or 'grad' to be supplied") } if(length(start) < 1) { stop("'start' must be of positive length!") } ## establish the active parameters. Internally, we just use 'activePar' fixed <- prepareFixed( start = start, activePar = NULL, fixed = fixed ) ## chop off the control args from ... and forward the new ... dddot <- list(...) dddot <- dddot[!(names(dddot) %in% openParam(mControl))] cl <- list(start=start, finalHessian=finalHessian, fixed=fixed, control=mControl, optimizer="Adam") if(length(dddot) > 0) { cl <- c(cl, dddot) } ## if(is.null(constraints)) { ## call maxSGACompute with the modified ... list cl <- c(quote(maxSGACompute), fn=logLikFunc, grad=logLikGrad, hess=logLikHess, fnOrig = fn, gradOrig = grad, hessOrig = hess, # these are forwarded to the logLikAttr nObs=nObs, cl) result <- eval(as.call(cl)) } else { if(identical(names(constraints), c("ineqA", "ineqB"))) { stop("Inequality constraints not implemented for maxSGA") } else if(identical(names(constraints), c("eqA", "eqB"))) { # equality constraints: A %*% beta + B = 0 cl <- c(quote(sumt), fn=fn, grad=grad, hess=hess, maxRoutine=maxSGA, constraints=list(constraints), cl) result <- eval(as.call(cl)) } else { stop("maxNR only supports the following constraints:\n", "constraints=list(ineqA, ineqB)\n", "\tfor A %*% beta + B >= 0 linear inequality constraints\n", "current constraints:", paste(names(constraints), collapse=" ")) } } ## Save the objective function result$objectiveFn <- fn ## return( result ) } maxLik/R/vcov.maxLik.R0000755000175100001440000000221114077525067014256 0ustar hornikusers## maxLik vcov.maxLik <- function(object, eigentol=1e-12, ...) { ## if exists $varcovar, take it if(!is.null(object$varcovar)) return(object$varcovar) ## otherwise invert hessian activePar <- activePar(object) if(!is.null(hess <- hessian(object))) { hess <- hessian(object)[activePar, activePar,drop=FALSE] hessev <- abs(eigen(hess, symmetric=TRUE, only.values=TRUE)$values) varcovar <- matrix(0, nParam.maxim(object), nParam.maxim(object)) # this makes the fixed parameters to 0 rownames( varcovar ) <- colnames(varcovar ) <- names(coef.maxLik(object)) if(min(hessev) > (eigentol*max(hessev))) { ## If hessian is not singular, fill in the free parameter values varcovar[activePar,activePar] <- solve(-hessian(object)[activePar,activePar]) # guarantee that the returned variance covariance matrix is symmetric varcovar <- ( varcovar + t( varcovar ) ) / 2 } else { ## If singular, the free parameter values will be Inf varcovar[activePar,activePar] <- Inf } return(varcovar) } else return(NULL) } maxLik/R/confint.maxLik.R0000644000175100001440000000126414077525067014745 0ustar hornikusers## confint method by Lucca Scrucca confint.maxLik <- function(object, parm, level = 0.95, ...) { cf <- coef(object) if(missing(parm)) parm <- seq_along(cf) pnames <- names(cf) if(is.null(pnames)) pnames <- parm else if(is.numeric(parm)) parm <- pnames[parm] a <- (1 - level)/2 a <- c(a, 1 - a) pct <- format.perc(a, 3) q <- qnorm(a) ci <- array(NA, dim = c(length(parm), 2L), dimnames = list(parm, pct)) se <- sqrt(diag(vcov(object)))[parm] ci[] <- cf[parm] + se %o% q return(ci) } format.perc <- function(probs, digits) paste(format(100 * probs, trim = TRUE, scientific = FALSE, digits = digits), "%") maxLik/R/print.maxLik.R0000644000175100001440000000064014077525067014436 0ustar hornikusersprint.maxLik <- function( x, ... ) { cat("Maximum Likelihood estimation\n") cat(maximType(x), ", ", nIter(x), " iterations\n", sep="") cat("Return code ", returnCode(x), ": ", returnMessage(x), "\n", sep="") if(!is.null(x$estimate)) { cat("Log-Likelihood:", x$maximum ) cat( " (", sum( activePar( x ) ), " free parameter(s))\n", sep = "" ) cat("Estimate(s):", x$estimate, "\n" ) } } maxLik/R/sumt.R0000644000175100001440000001751214077525067013054 0ustar hornikusers### SUMT (Sequential Unconstrained Maximization Technique) ### borrowed from package 'clue' ### ### Adapted for linear constraints sumt <- function(fn, grad=NULL, hess=NULL, start, maxRoutine, constraints, SUMTTol = sqrt(.Machine$double.eps), # difference between estimates for successive outer iterations SUMTPenaltyTol = sqrt(.Machine$double.eps), # maximum allowed penalty SUMTQ = 10, SUMTRho0 = NULL, printLevel=print.level, print.level=0, SUMTMaxIter=100, ...) { ## constraints list w/components eqA and eqB. Maximization will ## be performed wrt to the constraint ## A %*% theta + B = 0 ## The user must ensure the matrices are in correct ## form ## maxSUMTiter how many SUMT iterations to perform max ## penalty <- function(theta) { p <- A %*% theta + B sum(p*p) } ## Penalty gradient and Hessian are used only if corresponding function ## for the likelihood function is provided gPenalty <- function(theta) { 2*(t(theta) %*% t(A) %*% A - t(B) %*% A) } hessPenalty <- function(theta) { 2*t(A) %*% A } ## strip possible arguments of maxRoutine and call the function thereafter callWithoutMaxArgs <- function(theta, fName, ...) { return( callWithoutArgs( theta, fName = fName, args = names(formals(maxRoutine)), ... ) ) } SUMTMessage <- function(code) { message <- switch(code, "1" = "penalty close to zero", "2" = "successive function values within tolerance limit", "4" = "Outer iteration limit exceeded (increase SUMTMaxIter ?).", paste("Code", code)) return(message) } ## the penalized objective function Phi <- function(theta, ...) { llVal <- callWithoutMaxArgs( theta, "logLikFunc", fnOrig = fn, gradOrig = grad, hessOrig = hess, sumObs = FALSE, ... ) llVal <- llVal - rho * penalty( theta ) / length( llVal ) g <- attributes( llVal )$gradient if( !is.null( g ) ) { if( is.matrix( g ) ) { g <- g - matrix( rep( rho * gPenalty( theta ) / nrow( g ), each = nrow( g ) ), nrow = nrow( g ), ncol = ncol( g ) ) } else { g <- g - rho * gPenalty( theta ) } attributes( llVal )$gradient <- g } h <- attributes( llVal )$hessian if( !is.null( h ) ) { attributes( llVal )$hessian <- h - rho * hessPenalty( theta ) } return( llVal ) } ## gradient of the penalized objective function if(!is.null(grad)) { gradPhi<- function(theta, ...) { g <- grad(theta, ...) if(is.matrix(g)) { g <- g - matrix( rep( rho * gPenalty( theta ) / nrow( g ), each = nrow( g ) ), nrow = nrow( g ), ncol = ncol( g ) ) } else { g <- g - rho * gPenalty( theta ) } return( g ) } } else { gradPhi <- NULL } ## Hessian of the penalized objective function if(!is.null(hess)) { hessPhi <- function(theta, ...) { return( hess(theta, ...) - rho*hessPenalty(theta) ) } } else { hessPhi <- NULL } ## -------- SUMT Main code --------- ## Note also that currently we do not check whether optimization was ## "successful" ... A <- constraints$eqA B <- as.matrix(constraints$eqB) ## Check if the matrices conform if(ncol(A) != length(start)) { stop("Equality constraint matrix A must have the same number\n", "of columns as the parameter length ", "(currently ", ncol(A), " and ", length(start), ")") } if(nrow(A) != nrow(B)) { stop("Equality constraint matrix A must have the same number\n", "of rows as the matrix B ", "(currently ", nrow(A), " and ", nrow(B), ")") } ## Find a suitable inital value for rho if not specified if(is.null(SUMTRho0)) { rho <- 0 result <- maxRoutine(fn=Phi, grad=gradPhi, hess=hessPhi, start=start, printLevel=max(printLevel - 1, 0), ...) theta <- coef(result) # Note: this may be a bad idea, # if unconstrained function is unbounded # from above. In that case rather specify SUMTRho0. if(printLevel > 0) { cat("SUMT initial: rho = ", rho, ", function = ", callWithoutMaxArgs( theta, "logLikFunc", fnOrig = fn, gradOrig = grad, hessOrig = hess, ... ), ", penalty = ", penalty(theta), "\n") cat("Estimate:") print(theta) } ## Better upper/lower bounds for rho? rho <- max( callWithoutMaxArgs( theta, "logLikFunc", fnOrig = fn, gradOrig = grad, hessOrig = hess, ... ), 1e-3) / max(penalty(start), 1e-3) } ## if rho specified, simply pick that and use previous initial values else { rho <- SUMTRho0 theta <- start } ## iter <- 1L repeat { thetaOld <- theta result <- maxRoutine(fn=Phi, grad=gradPhi, hess=hessPhi, start=thetaOld, printLevel=max(printLevel - 1, 0), ...) theta <- coef(result) if(printLevel > 0) { cat("SUMT iteration ", iter, ": rho = ", rho, ", function = ", callWithoutMaxArgs( theta, "logLikFunc", fnOrig = fn, gradOrig = grad, hessOrig = hess, ... ), ", penalty = ", penalty(theta), "\n", sep="") cat("Estimate:") print(theta) } if(max(abs(thetaOld - theta)) < SUMTTol) { SUMTCode <- 2 break } if(penalty(theta) < SUMTPenaltyTol) { SUMTCode <- 1 break } if(iter >= SUMTMaxIter) { SUMTCode <- 4 break } iter <- iter + 1L rho <- SUMTQ * rho } ## Now we replace the resulting gradient and Hessian with those, ## calculated on the original function llVal <- callWithoutMaxArgs( theta, "logLikFunc", fnOrig = fn, gradOrig = grad, hessOrig = hess, sumObs = FALSE, ... ) gradient <- attr( llVal, "gradient" ) if( is.null( gradient ) ) { gradient <- callWithoutMaxArgs( theta, "logLikGrad", fnOrig = fn, gradOrig = grad, hessOrig = hess, sumObs = FALSE, ... ) } if( !is.null( dim( gradient ) ) ) { if( nrow( gradient ) > 1 ) { gradientObs <- gradient } gradient <- colSums( gradient ) } else if( length( start ) == 1 && length( gradient ) > 1 ) { gradientObs <- matrix( gradient, ncol = 1 ) gradient <- sum( gradient ) } result$gradient <- gradient names( result$gradient ) <- names( result$estimate ) result$hessian <- callWithoutMaxArgs( theta, "logLikHess", fnOrig = fn, gradOrig = grad, hessOrig = hess, ... ) result$constraints <- list(type="SUMT", barrier.value=penalty(theta), code=SUMTCode, message=SUMTMessage(SUMTCode), outer.iterations=iter ) if( exists( "gradientObs" ) ) { result$gradientObs <- gradientObs colnames( result$gradientObs ) <- names( result$estimate ) } if( result$constraints$barrier.value > 0.001 ) { warning( "problem in imposing equality constraints: the constraints", " are not satisfied (barrier value = ", result$constraints$barrier.value, "). Try setting 'SUMTTol' to 0" ) } return(result) } maxLik/R/maxBHHH.R0000644000175100001440000000155214077525067013300 0ustar hornikusersmaxBHHH <- function(fn, grad=NULL, hess=NULL, start, finalHessian="BHHH", ...) { ## hess: Hessian, not used, for compatibility with the other methods ## check if arguments of user-provided functions have reserved names argNames <- c( "fn", "grad", "hess", "start", "print.level", "iterlim" ) checkFuncArgs( fn, argNames, "fn", "maxBHHH" ) if( !is.null( grad ) ) { checkFuncArgs( grad, argNames, "grad", "maxBHHH" ) } if( !is.null( hess ) ) { checkFuncArgs( hess, argNames, "hess", "maxBHHH" ) } ## using the Newton-Raphson algorithm with BHHH method for Hessian a <- maxNR( fn=fn, grad = grad, hess = hess, start=start, finalHessian = finalHessian, bhhhHessian = TRUE, ...) a$type = "BHHH maximisation" invisible(a) } maxLik/R/tidyMethods.R0000644000175100001440000000142014077525067014350 0ustar hornikusers require_tibble_package <- function () { if (! requireNamespace("tibble", quietly = TRUE)) { stop("The `tibble` package must be installed to use tidy() or glance() methods") } } tidy.maxLik <- function (x, ...) { require_tibble_package() s <- summary(x) ret <- tibble::as_tibble(s$estimate, rownames = "term") colnames(ret) <- c("term", "estimate", "std.error", "statistic", "p.value") ret } glance.maxLik <- function (x, ...) { require_tibble_package() ll <- logLik(x) nobs <- tryCatch(nObs(x), error = function(e) NA) # nobs = NA in case of error ret <- tibble::tibble( df = attr(ll, "df"), logLik = as.numeric(ll), AIC = AIC(x), nobs = nobs ) ret } maxLik/R/addFixedPar.R0000644000175100001440000000025414077525067014232 0ustar hornikusersaddFixedPar <- function( theta, start, fixed, ...) { if( is.null( fixed ) ) { start <- theta } else { start[ !fixed ] <- theta } return( start ) } maxLik/R/openParam.R0000644000175100001440000000130414077525067013776 0ustar hornikusersopenParam <- function(object) { ## Return character list of 'open parameters', parameters that can ## be supplied to max* outside of 'control' list ## if(!inherits(object, "MaxControl")) { stop("'MaxControl' object required. Currently ", class(object)) } c("tol", "reltol", "gradtol", "steptol", # "lambdatol", ## Qadratic Approximation Control "qac", "qrtol", "lambda0", "lambdaStep", "maxLambda", ## optim Nelder-Mead "alpha", "beta", "gamma", ## SANN (open versions) "cand", "temp", "tmax", "random.seed", ## SGA ## - none ## "iterlim", "printLevel", "print.level") } maxLik/R/maxBFGS.R0000644000175100001440000000244214077525067013307 0ustar hornikusersmaxBFGS <- function(fn, grad=NULL, hess=NULL, start, fixed = NULL, control=NULL, constraints=NULL, finalHessian=TRUE, parscale=rep(1, length=length(start)), ## sumt parameters ...) { ## Wrapper of optim-based 'BFGS' optimization ## ## contraints constraints to be passed to 'constrOptim' ## finalHessian: how (and if) to calculate the final Hessian: ## FALSE not calculate ## TRUE use analytic/numeric Hessian ## bhhh/BHHH use information equality approach ## ## ... further arguments to fn() and grad() if(!inherits(control, "MaxControl")) { mControl <- addControlList(maxControl(iterlim=200), control) # default values } else { mControl <- control } mControl <- addControlList(mControl, list(...), check=FALSE) result <- maxOptim( fn = fn, grad = grad, hess = hess, start = start, method = "BFGS", fixed = fixed, constraints = constraints, finalHessian=finalHessian, parscale = parscale, control=mControl, ... ) return(result) } maxLik/R/objectiveFn.R0000644000175100001440000000025714077525067014320 0ustar hornikusers## Return the objective function, used for optimization objectiveFn <- function(x, ...) UseMethod("objectiveFn") objectiveFn.maxim <- function(x, ...) x$objectiveFn maxLik/R/maxNM.R0000644000175100001440000000253514077525067013103 0ustar hornikusersmaxNM <- function(fn, grad=NULL, hess=NULL, start, fixed = NULL, control=NULL, constraints=NULL, finalHessian=TRUE, parscale=rep(1, length=length(start)), ...) { ## Wrapper of optim-based 'Nelder-Mead' optimization ## ## contraints constraints to be passed to 'constrOptim' ## hessian: how (and if) to calculate the final Hessian: ## FALSE not calculate ## TRUE use analytic/numeric Hessian ## bhhh/BHHH use information equality approach ## ... : further arguments to fn() ## ## Note: grad and hess are for compatibility only, SANN uses only fn values if(!inherits(control, "MaxControl")) { mControl <- addControlList(maxControl(iterlim=500L), control) # default values } else { mControl <- control } mControl <- addControlList(mControl, list(...), check=FALSE) ## result <- maxOptim( fn = fn, grad = grad, hess = hess, start = start, method = "Nelder-Mead", fixed = fixed, constraints = constraints, finalHessian=finalHessian, parscale = parscale, control=mControl, ... ) return(result) } maxLik/R/maxSGA.R0000644000175100001440000001174514077525067013206 0ustar hornikusers maxSGA <- function(fn=NULL, grad=NULL, hess=NULL, start, nObs, constraints=NULL, finalHessian=FALSE, fixed=NULL, control=NULL, ...) { ## Newton-Raphson maximisation ## Parameters: ## fn - the function to be maximized. Returns either scalar or ## vector value with possible attributes ## constPar and newVal ## grad - gradient function (numeric used if missing). Must return either ## * vector, length=nParam ## * matrix, dim=c(nObs, 1). Treated as vector ## * matrix, dim=c(M, nParam), where M is arbitrary. In this case the ## rows are simply summed (useful for maxBHHH). ## hess - hessian function (used only for finalHessian, otherwise ignored) ## start - initial parameter vector (eventually w/names) ## ... - extra arguments for fn() ## finalHessian include final Hessian? As computing final hessian does not carry any extra penalty for NR method, this option is ## mostly for compatibility reasons with other maxXXX functions. ## TRUE/something else include ## FALSE do not include ## fixed index vector, which parameters to keep fixed ## ## RESULTS: ## a list of class "maxim": ## maximum function value at maximum ## estimate the parameter value at maximum ## gradient gradient ## hessian Hessian ## code integer code of success: ## 1 - gradient close to zero ## 2 - successive values within tolerance limit ## 3 - could not find a higher point (step error) ## 4 - iteration limit exceeded ## 100 - initial value out of range ## message character message describing the code ## iterations number of iterations ## type "Newton-Raphson maximisation" ## ## ------------------------------ ## Add parameters from ... to control if(!inherits(control, "MaxControl")) { mControl <- addControlList(maxControl(gradtol=0), control) } else { mControl <- control } mControl <- addControlList(mControl, list(...), check=FALSE) ## argNames <- c(c("fn", "grad", "hess", "start", "fixed", "control"), openParam(mControl)) # Here we allow to submit all parameters outside of the # 'control' list. May eventually include only a # subset here ## ensure that 'fn', 'grad', and 'hess' do not take any arguments that maxSGA eats up if(!is.null(fn)) { checkFuncArgs( fn, argNames, "fn", "maxSGA" ) } if( !is.null( grad ) ) { checkFuncArgs( grad, argNames, "grad", "maxSGA" ) } if( !is.null( hess ) ) { checkFuncArgs( hess, argNames, "hess", "maxSGA" ) } ## ensure that at least 'fn' or 'grad' are supplied if(is.null(fn) & is.null(grad)) { stop("maxSGA/maxAdam requires at least 'fn' or 'grad' to be supplied") } if(length(start) < 1) { stop("'start' must be of positive length!") } ## establish the active parameters. Internally, we just use 'activePar' fixed <- prepareFixed( start = start, activePar = NULL, fixed = fixed ) ## chop off the control args from ... and forward the new ... dddot <- list(...) dddot <- dddot[!(names(dddot) %in% openParam(mControl))] cl <- list(start=start, finalHessian=finalHessian, fixed=fixed, control=mControl, optimizer="SGA") if(length(dddot) > 0) { cl <- c(cl, dddot) } ## if(is.null(constraints)) { ## call maxSGACompute with the modified ... list cl <- c(quote(maxSGACompute), fn=logLikFunc, grad=logLikGrad, hess=logLikHess, fnOrig = fn, gradOrig = grad, hessOrig = hess, # these are forwarded to the logLikAttr nObs=nObs, cl) result <- eval(as.call(cl)) } else { if(identical(names(constraints), c("ineqA", "ineqB"))) { stop("Inequality constraints not implemented for maxSGA") } else if(identical(names(constraints), c("eqA", "eqB"))) { # equality constraints: A %*% beta + B = 0 cl <- c(quote(sumt), fn=fn, grad=grad, hess=hess, maxRoutine=maxSGA, constraints=list(constraints), cl) result <- eval(as.call(cl)) } else { stop("maxNR only supports the following constraints:\n", "constraints=list(ineqA, ineqB)\n", "\tfor A %*% beta + B >= 0 linear inequality constraints\n", "current constraints:", paste(names(constraints), collapse=" ")) } } ## Save the objective function result$objectiveFn <- fn ## return( result ) } maxLik/R/sumGradients.R0000644000175100001440000000044114077525067014522 0ustar hornikusers### Sum the observation-wise gradient sumGradients <- function( gr, nParam ) { if( !is.null(dim(gr))) { gr <- colSums(gr) } else { ## ... or vector if only one parameter if( nParam == 1 && length( gr ) > 1 ) { gr <- sum(gr) } } return( gr ) } maxLik/R/coef.maxLik.R0000644000175100001440000000036514077525067014222 0ustar hornikuserscoef.maxim <- function( object, ... ) { return( object$estimate ) } coef.maxLik <- function( object, ... ) { return( object$estimate ) } coef.summary.maxLik <- function( object, ... ) { result <- object$estimate return( result ) } maxLik/R/fnSubset.R0000644000175100001440000000272014077525067013650 0ustar hornikusersfnSubset <- function(x, fnFull, xFixed, xFull=c(x, xFixed), ...){ ## ## 1. Confirm length(x)+length(xFixed) = length(xFull) ## nx <- length(x) nFixed <- length(xFixed) nFull <- length(xFull) if((nx+nFixed) != nFull) stop("length(x)+length(xFixed) != length(xFull): ", nx, " + ", nFixed, " != ", nFull) ## ## 2. names(xFull)? ## # 2.1. is.null(names(xFull)) if(is.null(names(xFull))) return(fnFull(c(x, xFixed), ...)) # 2.2. xFull[names(xFixed)] <- xFixed, ... { if(is.null(names(xFixed))){ if(is.null(names(x))) xFull <- c(x, xFixed) else { x. <- (names(xFull) %in% names(x)) if(sum(x.) != nx){ print(x) print(xFull) stop("x has names not in xFull.") } xFull[names(x)] <- x xFull[!x.] <- xFixed } } else { Fixed <- (names(xFull) %in% names(xFixed)) if(sum(Fixed) != nFixed){ print(xFixed) print(xFull) stop("xFixed has names not in xFull.") } xFull[names(xFixed)] <- xFixed { if(is.null(names(x))) xFull[!Fixed] <- x else { x. <- (names(xFull) %in% names(x)) if(sum(x.) != nx){ print(x) print(xFull) stop("x has names not in xFull.") } xFull[names(x)] <- x } } } } ## ## 3. fnFull(...) ## fnFull(xFull, ...) } maxLik/R/maxOptim.R0000644000175100001440000002777514077525067013676 0ustar hornikusersmaxOptim <- function(fn, grad, hess, start, method, fixed, constraints, finalHessian=TRUE, parscale, control=maxControl(), ...) { ## Wrapper of optim-based optimization methods ## ## finalHessian: how (and if) to calculate the final Hessian: ## FALSE not calculate ## TRUE use analytic/numeric Hessian ## bhhh/BHHH use information equality approach ## if( method == "Nelder-Mead" ) { maxMethod <- "maxNM" } else { maxMethod <- paste( "max", method, sep = "" ) } ## ## Add parameters from ... to control if(!inherits(control, "MaxControl")) { stop("'control' must be a 'MaxControl' object, created by 'maxControl()'") } control <- addControlList(control, list(...), check=FALSE) ## Any forbidden arguments in fn? argNames <- c( "fn", "grad", "hess", "start", "print.level", "iterlim", "constraints", "tol", "reltol", "parscale", "alpha", "beta", "gamma", "cand", "temp", "tmax" ) checkFuncArgs( fn, argNames, "fn", maxMethod ) if( !is.null( grad ) ) { checkFuncArgs( grad, argNames, "grad", maxMethod ) } if( !is.null( hess ) ) { checkFuncArgs( hess, argNames, "hess", maxMethod ) } ## check argument 'fixed' fixed <- prepareFixed( start = start, activePar = NULL, fixed = fixed ) message <- function(c) { switch(as.character(c), "0" = "successful convergence", "1" = "iteration limit exceeded", "10" = "degeneracy in Nelder-Mead simplex", "51" = "warning from the 'L-BFGS-B' method; see the corresponding component 'message' for details", "52" = "error from the 'L-BFGS-B' method; see the corresponding component 'message' for details" ) } ## initialize variables for saving gradients provided as attributes ## and the corresponding parameter values lastFuncGrad <- NULL lastFuncParam <- NULL ## chop off the control args from '...' and forward the new '...' dddot <- list(...) dddot <- dddot[!(names(dddot) %in% openParam(control))] # unfortunately now you have to do # do.call(function, args, dddot) instead of just calling # func(args, ...) ## strip possible SUMT parameters and call the function thereafter environment( callWithoutSumt ) <- environment() maximType <- paste( method, "maximization" ) parscale <- rep(parscale, length.out=length(start)) oControl <- list(trace=max(slot(control, "printLevel"), 0), REPORT=1, fnscale=-1, reltol=slot(control, "tol"), maxit=slot(control, "iterlim"), parscale=parscale[ !fixed ], alpha=slot(control, "nm_alpha"), beta=slot(control, "nm_beta"), gamma=slot(control, "nm_gamma"), temp=slot(control, "sann_temp"), tmax=slot(control, "sann_tmax") ) oControl$reltol <- slot(control, "reltol") argList <- list(theta=start, fName="logLikFunc", fnOrig = fn, gradOrig = grad, hessOrig = hess) if(length(dddot) > 0) { argList <- c(argList, dddot) } f1 <- do.call(callWithoutSumt, argList) if(is.na( f1)) { result <- list(code=100, message=maximMessage("100"), iterations=0, type=maximType) class(result) <- "maxim" return(result) } if(slot(control, "printLevel") > 2) { cat("Initial function value:", f1, "\n") } hasGradAttr <- !is.null( attr( f1, "gradient" ) ) if( hasGradAttr && !is.null( grad ) ) { grad <- NULL warning( "the gradient is provided both as attribute 'gradient' and", " as argument 'grad': ignoring argument 'grad'" ) } hasHessAttr <- !is.null( attr( f1, "hessian" ) ) if( hasHessAttr && !is.null( hess ) ) { hess <- NULL warning( "the Hessian is provided both as attribute 'hessian' and", " as argument 'hess': ignoring argument 'hess'" ) } if( method == "BFGS" ) { argList <- list(theta=start, fName="logLikGrad", fnOrig = fn, gradOrig = grad, hessOrig = hess) if(length(dddot) > 0) { argList <- c(argList, dddot) } G1 <- do.call(callWithoutSumt, argList) if(slot(control, "printLevel") > 2) { cat("Initial gradient value:\n") print(G1) } if(any(is.na(G1))) { stop("NA in the initial gradient") } if(any(is.infinite(G1))) { stop("Infinite initial gradient") } if(length(G1) != length(start)) { stop( "length of gradient (", length(G1), ") not equal to the no. of parameters (", length(start), ")" ) } } ## function to return the gradients (BFGS, CG) or the new candidate point (SANN) if( method == "BFGS" ) { gradOptim <- logLikGrad } else if( method == "SANN" ) { if( is.null(slot(control, "sann_cand") ) ) { gradOptim <- NULL } else { gradOptim <- function( theta, fnOrig, gradOrig, hessOrig, start, fixed, ... ) { return(control@sann_cand( theta, ... ) ) } } } else if( method == "CG" ) { gradOptim <- logLikGrad } else if( method == "Nelder-Mead" ) { gradOptim <- NULL } else { stop( "internal error: unknown method '", method, "'" ) } ## A note about return value: ## We can the return from 'optim' in a object of class 'maxim'. ## However, as 'sumt' already returns such an object, we return the ## result of 'sumt' directly, without the canning if(is.null(constraints)) { cl <- list(quote(optim), par = start[ !fixed ], fn = logLikFunc, control = oControl, method = method, gr = gradOptim, fnOrig = fn, gradOrig = grad, hessOrig = hess, start = start, fixed = fixed) if(length(dddot) > 0) { cl <- c(cl, dddot) } result <- eval(as.call(cl)) resultConstraints <- NULL } else { ## linear equality and inequality constraints # inequality constraints: A %*% beta + B >= 0 if(identical(names(constraints), c("ineqA", "ineqB"))) { nra <- nrow(constraints$ineqA) nrb <- nrow(as.matrix(constraints$ineqB)) ncb <- ncol(as.matrix(constraints$ineqB)) if(ncb != 1) { stop("Inequality constraint B must be a vector ", "(or Nx1 matrix). Currently ", ncb, " columns") } if(length(dim(constraints$ineqA)) != 2) { stop("Inequality constraint A must be a matrix\n", "Current dimension", dim(constraints$ineqA)) } if(ncol(constraints$ineqA) != length(start)) { stop("Inequality constraint A must have the same ", "number of columns as length of the parameter.\n", "Currently ", ncol(constraints$ineqA), " and ", length(start), ".") } if(ncol(constraints$ineqA) != length(start)) { stop("Inequality constraint A cannot be matrix multiplied", " with the start value.\n", "A is a ", nrow(constraints$ineqA), "x", ncol(constraints$ineqA), " matrix,", " start value has lenght ", length(start)) } if(nra != nrb) { stop("Inequality constraints A and B suggest different number ", "of constraints: ", nra, " and ", nrb) } cl <- list(quote(constrOptim2), theta = start, f = logLikFunc, grad = gradOptim, ineqA=constraints$ineqA, ineqB=constraints$ineqB, control=oControl, method = method, fnOrig = fn, gradOrig = grad, hessOrig = hess, fixed = fixed, start=start) # 'start' argument is needed for adding fixed parameters later in the call chain if(length(dddot) > 0) { cl <- c(cl, dddot) } result <- eval(as.call(cl)) resultConstraints <- list(type="constrOptim", barrier.value=result$barrier.value, outer.iterations=result$outer.iterations ) } else if(identical(names(constraints), c("eqA", "eqB"))) { # equality constraints: A %*% beta + B = 0 argList <- list(fn=fn, grad=grad, hess=hess, start=start, fixed = fixed, maxRoutine = get( maxMethod ), constraints=constraints, parscale = parscale, control=control) # recursive evaluation-> pass original (possibly # supplemented) control if(length(dddot) > 0) { argList <- c(argList, dddot) } result <- do.call( sumt, argList[ !sapply( argList, is.null ) ] ) return(result) # this is already maxim object } else { stop( maxMethod, " only supports the following constraints:\n", "constraints=list(ineqA, ineqB)\n", "\tfor A %*% beta + B >= 0 linear inequality constraints\n", "current constraints:", paste(names(constraints), collapse=" ")) } } # estimates (including fixed parameters) estimate <- start estimate[ !fixed ] <- result$par ## Calculate the final gradient argList <- list(estimate, "logLikGrad", fnOrig = fn, gradOrig = grad, hessOrig = hess, sumObs = FALSE) if(length(dddot) > 0) { argList <- c(argList, dddot) } gradient <- do.call(callWithoutSumt, argList) if(observationGradient(gradient, length(start))) { gradientObs <- gradient gradient <- colSums(as.matrix(gradient )) } else { gradientObs <- NULL } ## calculate (final) Hessian if(tolower(finalHessian) == "bhhh") { if(!is.null(gradientObs)) { hessian <- - crossprod( gradientObs ) attr(hessian, "type") <- "BHHH" } else { hessian <- NULL warning("For computing the final Hessian by 'BHHH' method, the log-likelihood or gradient must be supplied by observations") } } else if(finalHessian != FALSE) { argList <- list( estimate, fnOrig = fn, gradOrig = grad, hessOrig = hess) if(length(dddot) > 0) { argList <- c(argList, dddot) } hessian <- as.matrix( do.call(logLikHess, argList) ) } else { hessian <- NULL } if( !is.null( hessian ) ) { rownames( hessian ) <- colnames( hessian ) <- names( estimate ) } result <- list( maximum=result$value, estimate=estimate, gradient=drop(gradient), # ensure the final (non-observation) gradient is just a vector hessian=hessian, code=result$convergence, message=paste(message(result$convergence), result$message), last.step=NULL, fixed = fixed, iterations=result$counts[1], type=maximType, constraints=resultConstraints ) if( exists( "gradientObs" ) ) { result$gradientObs <- gradientObs } result <- c(result, control=control, objectiveFn=fn) # attach the control parameters class(result) <- "maxim" return(result) } maxLik/R/summary.maxim.R0000644000175100001440000000612714077525067014673 0ustar hornikusersprint.summary.maxim <- function( x, max.rows=getOption("max.rows", 20), max.cols=getOption("max.cols", 7), ... ) { summary <- x cat("--------------------------------------------\n") cat(summary$type, "\n") cat("Number of iterations:", summary$iterations, "\n") cat("Return code:", summary$code, "\n") cat(summary$message, "\n") if(!is.null(summary$unsucc.step)) { cat("Last (unsuccessful) step: function value", summary$unsucc.step$value, "\n") print(summary$unsucc.step$parameters) } if(!is.null(summary$estimate)) { cat("Function value:", summary$maximum, "\n") cat("Estimates:\n") printRowColLimits(summary$estimate, max.rows, max.cols, ...) if(!is.null(summary$hessian)) { cat("Hessian:\n") printRowColLimits(summary$hessian, max.rows, max.cols, ...) } } if(!is.null(summary$constraints)) { cat("\nConstrained optimization based on", summary$constraints$type, "\n") if(!is.null(summary$constraints$code)) cat("Return code:", summary$constraints$code, "\n") # note: this is missing for 'constrOptim' if(!is.null(summary$constraints$message)) cat(summary$constraints$message, "\n") # note: this is missing for 'constrOptim' cat(summary$constraints$outer.iterations, " outer iterations, barrier value", summary$constraints$barrier.value, "\n") } cat("--------------------------------------------\n") } summary.maxim <- function(object, hessian=FALSE, unsucc.step=FALSE, ... ) { ## The object of class "maxim" should include following components: ## maximum : function value at optimum ## estimate : matrix, estimated parameter values and gradient at optimum ## hessian : hessian ## code : code of convergence ## message : message, description of the code ## last.step : information about last step, if unsuccessful ## iterations : number of iterations ## type : type of optimisation ## nParam <- length(object$estimate) if(object$code == 3 & unsucc.step) { a <- cbind(object$last.step$theta0, object$last.step$theta1) dimnames(a) <- list(parameter=object$names, c("current par", "new par")) unsucc.step <- list(value=object$last.step$f0, parameters=a) } else { unsucc.step <- NULL } estimate <- cbind("estimate"=object$estimate, "gradient"=object$gradient) if(hessian) { H <- object$hessian } else { H <- NULL } summary <- list(maximum=object$maximum, type=object$type, iterations=object$iterations, code=object$code, message=object$message, unsucc.step=unsucc.step, estimate=estimate, hessian=H, constraints=object$constraints) class(summary) <- c("summary.maxim", class(summary)) summary } maxLik/R/zzz.R0000644000175100001440000000157414077525067012722 0ustar hornikusers.onAttach <- function( libname, pkgname ) { packageStartupMessage( paste0( "\nPlease cite the 'maxLik' package as:\n", "Henningsen, Arne and Toomet, Ott (2011). ", "maxLik: A package for maximum likelihood estimation in R. ", "Computational Statistics 26(3), 443-458. ", "DOI 10.1007/s00180-010-0217-1.\n\n", "If you have questions, suggestions, or comments ", "regarding the 'maxLik' package, ", "please use a forum or 'tracker' at maxLik's R-Forge site:\n", "https://r-forge.r-project.org/projects/maxlik/"), domain = NULL, appendLF = TRUE ) } .onLoad <- function(libname, pkgname) { ## max rows and columns to output when printing matrices/vectors options(max.rows = 20L, max.cols = 7L) } .onUnload <- function(libpath) { .Options$max.rows <- NULL .Options$max.cols <- NULL } maxLik/R/estfun.maxLik.R0000644000175100001440000000104514077525067014606 0ustar hornikusersestfun.maxLik <- function( x, ... ) { if( is.null( x$gradientObs ) ) { stop( "cannot return the gradients of the log-likelihood function", " evaluated at each observation: please re-run 'maxLik' and", " provide a gradient function using argument 'grad' or", " (if no gradient function is specified) a log-likelihood function", " using argument 'logLik'", " that return the gradients or log-likelihood values, respectively,", " at each observation" ) } return( x$gradientObs ) } maxLik/R/maxLik.R0000644000175100001440000000616414077525067013312 0ustar hornikusersmaxLik <- function(logLik, grad=NULL, hess=NULL, start, method, constraints=NULL, ...) { ## Maximum Likelihood estimation. ## ## Newton-Raphson maximisation ## Parameters: ## logLik log-likelihood function. First argument must be the vector of parameters. ## grad gradient of log-likelihood. If NULL, numeric gradient is used. Must return either ## * vector, length=nParam ## * matrix, dim=c(nObs, 1). Treated as vector ## * matrix, dim=c(nObs, nParam). In this case the rows are simply ## summed (useful for maxBHHH). ## hess Hessian function (numeric used if NULL) ## start initial vector of parameters (eventually w/names) ## method maximisation method (Newton-Raphson) ## constraints constrained optimization: a list (see below) ## ... additional arguments for the maximisation routine ## ## RESULTS: ## list of class c("maxLik", "maxim"). This is in fact equal to class "maxim", just the ## methods are different. ## maximum function value at maximum ## estimate the parameter value at maximum ## gradient gradient ## hessian Hessian ## code integer code of success, depends on the optimization ## method ## message character message describing the code ## type character, type of optimization ## ## there may be more components, depending on the choice of ## the algorith. ## argNames <- c( "logLik", "grad", "hess", "start", "method", "constraints" ) checkFuncArgs( logLik, argNames, "logLik", "maxLik" ) if( !is.null( grad ) ) { checkFuncArgs( grad, argNames, "grad", "maxLik" ) } if( !is.null( hess ) ) { checkFuncArgs( hess, argNames, "hess", "maxLik" ) } ## Constrained optimization. We can two possibilities: ## * linear equality constraints ## * linear inequality constraints ## if(missing(method)) { if(is.null(constraints)) { method <- "nr" } else if(identical(names(constraints), c("ineqA", "ineqB"))) { if(is.null(grad)) method <- "Nelder-Mead" else method <- "BFGS" } else method <- "nr" } maxRoutine <- switch(tolower(method), "newton-raphson" =, "nr" = maxNR, "bfgs" = maxBFGS, "bfgsr" =, "bfgs-r" = maxBFGSR, "bhhh" = maxBHHH, "conjugate-gradient" =, "cg" = maxCG, "nelder-mead" =, "nm" = maxNM, "sann" = maxSANN, stop( "Maxlik: unknown maximisation method ", method ) ) result <- maxRoutine(fn=logLik, grad=grad, hess=hess, start=start, constraints=constraints, ...) class(result) <- c("maxLik", class(result)) result } maxLik/R/25-addControlList.R0000644000175100001440000000661214077525067015234 0ustar hornikusers ## Function overwrite parameters of an existing MaxControl object using ## parameters supplied in a single list. ## We do not make it to a method: the signature would be indistinguishable ## from add(maxControl, ...) where ... is a single list addControlList <- function(x, y, check=TRUE) { ## add list y to the control x ## ## x: a maxcontrol object ## y: a named list of additional maxControl parameters ## ## check only accept known control options. ## useful if attaching known control list ## if false, no checks performed and can add arbitrary list ## setSlot <- function(openName, slotName=openName[1], convert=function(x) x ) { ## Store potentially differently named value in slot ## ## openName vector of accepted name forms ## slotName corresponding actual slot name ## convert how to convert the value ## if(!any(openName %in% names(y))) { return(NULL) } i <- tail(which(names(y) %in% openName), 1) # pick the last occurrence: allow user to overwrite defaults slot(x, slotName) <- convert(y[[i]]) assign("x", x, envir=parent.frame()) # save modified x into parent frame } if(!inherits(x, "MaxControl")) { stop("'x' must be of class 'MaxControl'") } if(is.null(y)) { return(x) } if(!inherits(y, "list")) { stop("Control arguments to 'maxControl' must be supplied in the form of a list") } if(check) { knownNames <- union(openParam(x), slotNames(x)) if(any(uNames <- !(names(y) %in% knownNames))) { cat("Unknown control options:\n") print(names(y)[uNames]) stop("Unknown options not accepted") } } ## setSlot("tol") setSlot("reltol") setSlot("gradtol") setSlot("lambdatol") setSlot("qrtol") ## QAC setSlot(c("qac", "QAC"), "qac") setSlot(c("marquardt_lambda0", "Marquardt_lambda0")) setSlot(c("marquardt_lambdaStep", "Marquardt_lambdaStep")) setSlot(c("marquardt_maxLambda", "Marquardt_maxLambda")) ## NM setSlot(c("nm_alpha", "NM_alpha", "alpha")) setSlot(c("nm_beta", "NM_beta", "beta")) setSlot(c("nm_gamma", "NM_gamma", "gamma")) ## SANN setSlot(c("sann_cand", "SANN_cand", "cand")) setSlot(c("sann_temp", "SANN_temp", "temp")) setSlot(c("sann_tmax", "SANN_tmax", "tmax"), convert=as.integer) setSlot(c("sann_randomSeed", "SANN_randomSeed", "random.seed"), convert=as.integer) ## SGA setSlot("SGA_momentum") ## Adam setSlot("Adam_momentum1", convert=as.numeric) setSlot("Adam_momentum2", convert=as.numeric) ## SG general setSlot("SG_learningRate") setSlot("SG_batchSize", convert=as.integer) setSlot("SG_clip", convert=as.numeric) setSlot("SG_patience", convert=as.integer) setSlot("SG_patienceStep", convert=as.integer) ## setSlot("iterlim", convert=as.integer) setSlot("max.rows", convert=as.integer) setSlot("max.cols", convert=as.integer) setSlot(c("printLevel", "print.level"), convert=as.integer) setSlot("storeValues", convert=as.logical) setSlot("storeParameters", convert=as.logical) ## validObject(x) return(x) } ### Method for 'MaxControl' objects: add the second argument, list setMethod("maxControl", signature("MaxControl"), addControlList) maxLik/R/numericHessian.R0000644000175100001440000000543714077525067015044 0ustar hornikusersnumericHessian <- function(f, grad=NULL, t0, eps=1e-6, fixed, ...) { a <- f(t0, ...) if(is.null(grad)) { numericNHessian( f = f, t0 = t0, eps = eps, fixed=fixed, ...) # gradient not provided -> everything numerically } else { numericGradient( f = grad, t0 = t0, eps = eps, fixed=fixed, ...) # gradient is provided -> Hessian is grad grad } } numericNHessian <- function( f, t0, eps=1e-6, fixed, ...) { ## Numeric Hessian without gradient ## Assume f() returns a scalar ## ## fixed calculate the Hessian only for the non-fixed parameters warnMessage <- function(theta, value) { ## issue a warning if the function value at theta is not a scalar max.print <- 10 if(length(value) != 1) { warnMsg <- "Function value at\n" warnMsg <- c(warnMsg, paste(format(theta[seq(length=min(max.print,length(theta)))]), collapse=" "), "\n") if(max.print < length(theta)) warnMsg <- c(warnMsg, "...\n") warnMsg <- c(warnMsg, " =\n") warnMsg <- c(warnMsg, paste(format(value[seq(length=min(max.print,length(value)))]), collapse=" "), "\n") if(max.print < length(value)) warnMsg <- c(warnMsg, "...\n") warnMsg <- c(warnMsg, "but numeric Hessian only works on numeric scalars\n", "Component set to NA") return(warnMsg) } if(!is.numeric(value)) stop("The function value must be numeric") return(NULL) } f00 <- f( t0, ...) if(!is.null(msg <- warnMessage(t0, f00))) { warning(msg) f00 <- NA } eps2 <- eps*eps N <- length( t0) H <- matrix(NA, N, N) if(missing(fixed)) fixed <- rep(FALSE, length(t0)) for( i in 1:N) { if(fixed[i]) next for( j in 1:N) { if(fixed[j]) next t01 <- t0 t10 <- t0 t11 <- t0 # initial point t01[i] <- t01[i] + eps t10[j] <- t10[j] + eps t11[i] <- t11[i] + eps t11[j] <- t11[j] + eps f01 <- f( t01, ...) if(!is.null(msg <- warnMessage(t01, f01))) { warning(msg) f01 <- NA } f10 <- f( t10, ...) if(!is.null(msg <- warnMessage(t10, f10))) { warning(msg) f10 <- NA } f11 <- f( t11, ...) if(!is.null(msg <- warnMessage(t11, f11))) { warning(msg) f11 <- NA } H[i,j] <- ( f11 - f01 - f10 + f00)/eps2 } } return( H ) } maxLik/R/AIC.R0000644000175100001440000000021214077525067012445 0ustar hornikusers## Akaike (and other) information criteria AIC.maxLik <- function(object, ..., k = 2) -2*logLik(object) + k*nParam(object, free=TRUE) maxLik/R/maxValue.R0000644000175100001440000000014714077525067013642 0ustar hornikusersmaxValue <- function(x, ...) UseMethod("maxValue") maxValue.maxim <- function(x, ...) x$maximum maxLik/R/maximMessage.R0000644000175100001440000000230214077525067014473 0ustar hornikusersmaximMessage <- function(code) { message <- switch(code, "1" = "gradient close to zero (gradtol)", "2" = "successive function values within tolerance limit (tol)", "3" = paste("Last step could not find a value above the", "current.\nBoundary of parameter space?", " \nConsider switching to a more robust optimisation method temporarily."), "4" = "Iteration limit exceeded (iterlim)", "5" = "Infinite value", "6" = "Infinite gradient", "7" = "Infinite Hessian", "8" = "successive function values within relative tolerance limit (reltol)", "9" = paste("Gradient did not change,", "cannot improve BFGS approximation for the Hessian.\n", "Use different optimizer and/or analytic gradient."), "10" = "Lost patience (SG_patience)", "100" = "Initial value out of range.", paste("Code", code)) return(message) } maxLik/R/05-classes.R0000644000175100001440000000007114077525067013733 0ustar hornikusers## first to be loaded: setOldClass(c("maxLik", "maxim")) maxLik/R/checkBhhhGrad.R0000644000175100001440000000645414077525067014534 0ustar hornikuserscheckBhhhGrad <- function( g, theta, analytic, fixed=NULL) { ## This function controls if the user-supplied analytic or ## numeric gradient of the right dimension. ## If not, signals an error. ## ## analytic: logical, do we have a user-supplied analytic ## gradient? if(is.null(fixed)) { activePar <- rep(T, length=length(theta)) } else { activePar <- !fixed } if( analytic ) { ## Gradient supplied by the user. ## Check whether the gradient has enough rows (about enough ## observations in data) if( !is.matrix( g ) ) { stop("gradient is not a matrix but of class '", class( g ), "';\n", "the BHHH method requires that the gradient function\n", "(argument 'grad') returns a numeric matrix,\n", "where each row must correspond to the gradient(s)\n", "of the log-likelihood function at an individual\n", "(independent) observation and each column must\n", "correspond to a parameter" ) } else if( nrow( g ) < length( theta[activePar] ) ) { stop( "the matrix returned by the gradient function", " (argument 'grad') must have at least as many", " rows as the number of parameters (", length( theta ), "),", " where each row must correspond to the gradients", " of the log-likelihood function of an individual", " (independent) observation:\n", " currently, there are (is) ", length( theta ), " parameter(s)", " but the gradient matrix has only ", nrow( g ), " row(s)" ) } else if( ncol( g ) != length( theta ) ) { stop( "the matrix returned by the gradient function", " (argument 'grad') must have exactly as many columns", " as the number of parameters:\n", " currently, there are (is) ", length( theta ), " parameter(s)", " but the gradient matrix has ", ncol( g ), " columns" ) } } else { ## numeric gradient ## Check whether the gradient has enough rows. This is the case ## if and only if loglik has enough rows, hence the error message ## about loglik. if( !is.matrix( g ) || nrow( g ) == 1 ) { stop( "if the gradients (argument 'grad') are not provided by the user,", " the BHHH method requires that the log-likelihood function", " (argument 'fn') returns a numeric vector,", " where each element must be the log-likelihood value corresponding", " to an individual (independent) observation" ) } if( nrow( g ) < length( theta ) ) { stop( "the vector returned by the log-likelihood function", " (argument 'fn') must have at least as many elements", " as the number of parameters,", " where each element must be the log-likelihood value corresponding", " to an individual (independent) observation:\n", " currently, there are (is) ", length( theta ), " parameter(s)", " but the log likelihood function return only ", nrow( g ), " element(s)" ) } } return( NULL ) } maxLik/R/nObs.R0000644000175100001440000000131614077525067012760 0ustar hornikusers## Return #of observations for models nObs.maxLik <- function(x, ...) { if( is.null( x$gradientObs ) ) { stop( "cannot return the number of observations:", " please re-run 'maxLik' and", " provide a gradient function using argument 'grad' or", " (if no gradient function is specified) a log-likelihood function", " using argument 'logLik'", " that return the gradients or log-likelihood values, respectively,", " at each observation" ) } else if( is.matrix( x$gradientObs ) ) { return( nrow( x$gradientObs ) ) } else { stop( "internal error: component 'gradientObs' is not a matrix.", " Please contact the developers." ) } } maxLik/R/checkFuncArgs.R0000644000175100001440000000231214077525067014562 0ustar hornikusers### check of any of the args to the function that calls 'func' ### match arguments of 'func' ### checkFuncArgs <- function( func, checkArgs, argName, funcName ) { ## is the 'func' a function? if( !is.function( func ) ) { stop( "argument '", argName, "' of function '", funcName, "' is not a function" ) } funcArgs <- names( formals( func ) ) if( length( funcArgs ) > 1 ) { a <- charmatch( funcArgs[ -1 ], checkArgs ) if( sum( !is.na( a ) ) == 1 ) { stop( "argument '", funcArgs[ -1 ][ !is.na( a ) ], "' of the function specified in argument '", argName, "' of function '", funcName, "' (partially) matches the argument names of function '", funcName, "'. Please change the name of this argument" ) } else if( sum( !is.na( a ) ) > 1 ) { stop( "arguments '", paste( funcArgs[ -1 ][ !is.na( a ) ], collapse = "', '" ), "' of the function specified in argument '", argName, "' of function '", funcName, "' (partially) match the argument names of function '", funcName, "'. Please change the names of these arguments" ) } } return( NULL ) } maxLik/R/logLikGrad.R0000644000175100001440000000415614077525067014103 0ustar hornikusers## gradient function: ## sum over possible individual gradients logLikGrad <- function(theta, fnOrig, gradOrig=NULL, hessOrig=NULL, start = NULL, fixed = NULL, sumObs = TRUE, gradAttr = NULL, ...) { # Argument "hessOrig" is just for compatibility with logLikHess() # argument "gradAttr" should be # - FALSE if the gradient is not provided as attribute of the log-lik value # - TRUE if the gradient is provided as attribute of the log-lik value # - NULL if this is not known theta <- addFixedPar( theta = theta, start = start, fixed = fixed, ...) if(!is.null(gradOrig)) { g <- gradOrig(theta, ...) } else if( isTRUE( gradAttr ) || is.null( gradAttr ) ) { if( exists( "lastFuncGrad" ) && exists( "lastFuncParam" ) ) { if( identical( theta, lastFuncParam ) ) { g <- lastFuncGrad } else { g <- "different parameters" } } else { g <- "'lastFuncGrad' or 'lastFuncParam' does not exist" } if( is.character( g ) ) { # do not call fnOrig() if 'lastFuncGrad' is NULL g <- attr( fnOrig( theta, ... ), "gradient" ) } } else { g <- NULL } if( is.null( g ) ) { g <- numericGradient(logLikFunc, theta, fnOrig = fnOrig, sumObs = sumObs, ...) } if( sumObs ) { ## We were requested a single (summed) gradient. Return a vector g <- sumGradients( g, length( theta ) ) names( g ) <- names( theta ) if( !is.null( fixed ) ) { g <- g[ !fixed ] } } else { ## we were requested individual gradients (if possible). Ensure g is a matrix if(observationGradient(g, length(theta))) { ## it was indeed by observations g <- as.matrix(g) colnames( g ) <- names( theta ) if( !is.null( fixed ) ) { g <- g[ , !fixed ] } } else { ## it wasn't g <- drop(g) names(g) <- names(theta) if( !is.null( fixed ) ) { g <- g[ !fixed ] } } } return( g ) } maxLik/R/maxCG.R0000644000175100001440000000266614077525067013067 0ustar hornikusersmaxCG <- function(fn, grad=NULL, hess=NULL, start, fixed = NULL, control=NULL, constraints=NULL, finalHessian=TRUE, parscale=rep(1, length=length(start)), ...) { ## Wrapper of optim-based 'Conjugate Gradient' optimization ## ## contraints constraints to be passed to 'constrOptim' ## hessian: how (and if) to calculate the final Hessian: ## FALSE not calculate ## TRUE use analytic/numeric Hessian ## bhhh/BHHH use information equality approach ## ... : further arguments to fn() ## ## Note: grad and hess are for compatibility only, SANN uses only fn values ## if(!inherits(control, "MaxControl")) { mControl <- addControlList(maxControl(iterlim=500), control) # default values } else { mControl <- control } # default, user values mControl <- addControlList(mControl, list(...), check=FALSE) # open values result <- maxOptim( fn = fn, grad = grad, hess = hess, start = start, method = "CG", fixed = fixed, constraints = constraints, finalHessian=finalHessian, parscale = parscale, control=mControl, ... ) return(result) } maxLik/R/stdEr.maxLik.R0000644000175100001440000000077414077525067014373 0ustar hornikusers stdEr.maxLik <- function(x, eigentol=1e-12, ...) { ## if(!inherits(x, "maxLik")) ## stop("'stdEr.maxLik' called on a non-'maxLik' object") ## Here we should actually coerce the object to a 'maxLik' object, dropping all the subclasses... ## Instead, we force the program to use maxLik-related methods if(!is.null(vc <- vcov(x, eigentol=eigentol))) { s <- sqrt(diag(vc)) names(s) <- names(coef(x)) return(s) } # if vcov is not working, return NULL return(NULL) } maxLik/R/gradient.R0000644000175100001440000000021314077525067013647 0ustar hornikusers## Return gradient of an object gradient <- function(x, ...) UseMethod("gradient") gradient.maxim <- function(x, ...) x$gradient maxLik/R/constrOptim2.R0000644000175100001440000001227614077525067014471 0ustar hornikusers# This file is a modified copy of src/library/stats/R/constrOptim.R # Part of the R package, http://www.R-project.org ### This foutine is not intended for end-user use. ### API is subject to change. constrOptim2<-function(theta, f,grad=NULL, ineqA,ineqB, mu=0.0001,control=list(), method=if(is.null(grad)) "Nelder-Mead" else "BFGS", outer.iterations=100,outer.eps=0.00001, ...){ ## Optimize with inequality constraint using SUMT/logarithmic ## barrier ## ## start initial value of parameters, included the fixed ones ## ## This function has to operate with free parameter components ## only as 'optim' cannot handle ## fixed parameters. However, for computing constraints in ## 'R' and 'dR' we have to use the complete parameter vector. ## R <- function(thetaFree, thetaFree.old, ...) { ## Wrapper for the function. As this will be feed to the ## 'optim', we have to call it with free parameters only ## (thetaFree) and internally expand it to the full (theta) ## ## Were we called with 'fixed' argument in ... ? dotdotdot <- list(...) # can this be made better? fixed <- dotdotdot[["fixed"]] theta <- addFixedPar( theta = thetaFree, start = theta0, fixed = fixed) theta.old <- addFixedPar( theta = thetaFree.old, start = theta0, fixed = fixed) ineqA.theta<-ineqA%*%theta gi<- ineqA.theta + ineqB if(any(gi < 0)) ## at least one of the constraints not fulfilled return(NaN) gi.old <- ineqA%*%theta.old + ineqB bar <- sum(gi.old*log(gi) - ineqA.theta) # logarithmic barrier value: sum over # components if(!is.finite(bar)) bar<- -Inf result <- f(thetaFree, ...)-mu*bar # do not send 'fixed' and 'start' to the # function here -- we have already # expanded theta to the full parameter result } dR<-function(thetaFree, thetaFree.old, ...){ ## Wrapper for the function. As this will be feed to the 'optim', ## we have to call it with free parameters only (thetaFree) and ## internally expand it to the full (theta) ## ## Were we called with 'fixed' argument in ... ? dotdotdot <- list(...) # can this be made better? fixed <- dotdotdot[["fixed"]] theta <- addFixedPar( theta = thetaFree, start = theta0, fixed = fixed) theta.old <- addFixedPar( theta = thetaFree.old, start = theta0, fixed = fixed) ineqA.theta<-ineqA%*%theta gi<-drop(ineqA.theta + ineqB) gi.old<-drop(ineqA%*%theta.old + ineqB) dbar<-colSums( ineqA*gi.old/gi-ineqA) if(!is.null(fixed)) gr <- grad(thetaFree,...)- (mu*dbar)[!fixed] # grad only gives gradient for the free parameters in order to maintain # compatibility with 'optim'. Hence we compute barrier gradient # for the free parameters only as well. else gr <- grad(thetaFree,...)- (mu*dbar) return(gr) } if (!is.null(control$fnscale) && control$fnscale<0) mu <- -mu ##maximizing if(any(ineqA%*%theta + ineqB < 0)) stop("initial value not the feasible region") theta0 <- theta # inital value, for keeping the fixed params ## Were we called with 'fixed' argument in ... ? fixed <- list(...)[["fixed"]] if(!is.null(fixed)) thetaFree <- theta[!fixed] else thetaFree <- theta ## obj<-f(thetaFree, ...) r<-R(thetaFree,thetaFree,...) for(i in 1L:outer.iterations){ obj.old<-obj r.old<-r thetaFree.old<-thetaFree fun<-function(thetaFree,...){ R(thetaFree,thetaFree.old,...)} if( method == "SANN" ) { if( is.null( grad ) ) { gradient <- NULL } else { gradient <- grad } } else { gradient <- function(thetaFree, ...) { dR(thetaFree, thetaFree.old, ...) } } ## As 'optim' does not directly support fixed parameters, a<-optim(par=thetaFree.old,fn=fun,gr=gradient,control=control,method=method,...) r<-a$value if (is.finite(r) && is.finite(r.old) && abs(r-r.old)/(outer.eps+abs(r-r.old))obj.old) break } if (i==outer.iterations){ a$convergence<-7 a$message<-"Barrier algorithm ran out of iterations and did not converge" } if (mu>0 && obj>obj.old){ a$convergence<-11 a$message<-paste("Objective function increased at outer iteration",i) } if (mu<0 && obj t)" = p ) } else { results <- NULL } summary <- list(maximType=object$type, iterations=object$iterations, returnCode=object$code, returnMessage=object$message, loglik=object$maximum, estimate=results, fixed=!activePar, NActivePar=sum(activePar), constraints=object$constraints) class(summary) <- "summary.maxLik" summary } maxLik/R/storedParameters.R0000644000175100001440000000036414077525067015405 0ustar hornikusers## Return the stored parameters in a 'maxim' object storedParameters <- function(x, ...) ## stored parameter values at each epoch/iteration UseMethod("storedParameters") storedParameters.maxim <- function(x, ...) x$parameterStore maxLik/R/maxNR.R0000644000175100001440000001233214077525067013104 0ustar hornikusersmaxNR <- function(fn, grad=NULL, hess=NULL, start, constraints=NULL, finalHessian=TRUE, bhhhHessian=FALSE, fixed=NULL, activePar=NULL, control=NULL, ...) { ## Newton-Raphson maximisation ## Parameters: ## fn - the function to be minimized. Returns either scalar or ## vector value with possible attributes ## constPar and newVal ## grad - gradient function (numeric used if missing). Must return either ## * vector, length=nParam ## * matrix, dim=c(nObs, 1). Treated as vector ## * matrix, dim=c(M, nParam), where M is arbitrary. In this case the ## rows are simply summed (useful for maxBHHH). ## hess - hessian function (numeric used if missing) ## start - initial parameter vector (eventually w/names) ## ... - extra arguments for fn() ## finalHessian include final Hessian? As computing final hessian does not carry any extra penalty for NR method, this option is ## mostly for compatibility reasons with other maxXXX functions. ## TRUE/something else include ## FALSE do not include ## activePar - an index vector -- which parameters are taken as ## variable (free). Other paramters are treated as ## fixed constants ## fixed index vector, which parameters to keep fixed ## ## RESULTS: ## a list of class "maxim": ## maximum function value at maximum ## estimate the parameter value at maximum ## gradient gradient ## hessian Hessian ## code integer code of success: ## 1 - gradient close to zero ## 2 - successive values within tolerance limit ## 3 - could not find a higher point (step error) ## 4 - iteration limit exceeded ## 100 - initial value out of range ## message character message describing the code ## last.step only present if code == 3 (step error). A list with following components: ## theta0 - parameter value which led to the error ## f0 - function value at these parameter values ## climb - the difference between theta0 and the new approximated parameter value (theta1) ## activePar - logical vector, which parameters are active (not constant) ## activePar logical vector, which parameters were treated as free (resp fixed) ## iterations number of iterations ## type "Newton-Raphson maximisation" ## ## ------------------------------ ## Add parameters from ... to control if(!inherits(control, "MaxControl")) { mControl <- addControlList(maxControl(), control) } else { mControl <- control } mControl <- addControlList(mControl, list(...), check=FALSE) ## argNames <- c(c("fn", "grad", "hess", "start", "activePar", "fixed", "control"), openParam(mControl)) # Here we allow to submit all parameters outside of the # 'control' list. May eventually include only a # subset here ## checkFuncArgs( fn, argNames, "fn", "maxNR" ) if( !is.null( grad ) ) { checkFuncArgs( grad, argNames, "grad", "maxNR" ) } if( !is.null( hess ) ) { checkFuncArgs( hess, argNames, "hess", "maxNR" ) } ## establish the active parameters. Internally, we just use 'activePar' fixed <- prepareFixed( start = start, activePar = activePar, fixed = fixed ) ## chop off the control args from ... and forward the new ... dddot <- list(...) dddot <- dddot[!(names(dddot) %in% openParam(mControl))] cl <- list(start=start, finalHessian=finalHessian, bhhhHessian=bhhhHessian, fixed=fixed, control=mControl) if(length(dddot) > 0) { cl <- c(cl, dddot) } ## if(is.null(constraints)) { ## call maxNRCompute with the modified ... list cl <- c(quote(maxNRCompute), fn=logLikAttr, fnOrig = fn, gradOrig = grad, hessOrig = hess, cl) result <- eval(as.call(cl)) } else { if(identical(names(constraints), c("ineqA", "ineqB"))) { stop("Inequality constraints not implemented for maxNR") } else if(identical(names(constraints), c("eqA", "eqB"))) { # equality constraints: A %*% beta + B = 0 cl <- c(quote(sumt), fn=fn, grad=grad, hess=hess, maxRoutine=maxNR, constraints=list(constraints), cl) result <- eval(as.call(cl)) } else { stop("maxNR only supports the following constraints:\n", "constraints=list(ineqA, ineqB)\n", "\tfor A %*% beta + B >= 0 linear inequality constraints\n", "current constraints:", paste(names(constraints), collapse=" ")) } } ## Save the objective function result$objectiveFn <- fn ## return( result ) } maxLik/R/observationGradient.R0000644000175100001440000000053714077525067016074 0ustar hornikusers ### The function tests whether a given gradient is given ### observation-wise. It tests essentially the # of rows ### in the gradient observationGradient <- function(g, nParam) { if(is.null(dim(g))) { if(nParam == 1 & length(g) > 1) return(TRUE) return(FALSE) } if(nrow(g) == 1) return(FALSE) return(TRUE) } maxLik/R/callWithoutSumt.R0000644000175100001440000000032414077525067015225 0ustar hornikusers## strip possible SUMT parameters and call the function thereafter callWithoutSumt <- function(theta, fName, ...) { return( callWithoutArgs( theta, fName = fName, args = names(formals(sumt)), ... ) ) } maxLik/R/storedValues.R0000644000175100001440000000033314077525067014535 0ustar hornikusers## Return the stored values in 'maxim' object storedValues <- function(x, ...) ## stored optimization values at each iteration UseMethod("storedValues") storedValues.maxim <- function(x, ...) x$valueStore maxLik/R/maxBFGSRCompute.R0000644000175100001440000003452014077525067014770 0ustar hornikusersmaxBFGSRCompute <- function(fn, start, finalHessian=TRUE, fixed=NULL, control=maxControl(), ...) { ## This function is originally developed by Yves Croissant (and placed in 'mlogit' package). ## Fitted for 'maxLik' by Ott Toomet, and revised by Arne Henningsen ## ## BFGS maximisation, implemented by Yves Croissant ## Parameters: ## fn - the function to be minimized. Returns either scalar or ## vector value with possible attributes ## constPar and newVal ## fn must return the value with attribute 'gradient' ## (and also attribute 'hessian' if it should be returned) ## fn must have an argument sumObs ## start - initial parameter vector (eventually w/names) ## finalHessian include final Hessian? As computing final hessian does not carry any extra penalty for NR method, this option is ## mostly for compatibility reasons with other maxXXX functions. ## TRUE/something else include ## FALSE do not include ## fixed - a logical vector -- which parameters are taken as fixed. ## control MaxControl object: ## steptol - minimum step size ## lambdatol - max lowest eigenvalue when forcing pos. definite H ## qrtol - tolerance for qr decomposition ## qac How to handle the case where new function value is ## smaller than the original one: ## "stephalving" smaller step in the same direction ## "marquardt" Marquardt (1963) approach ## The stopping criteria ## tol - maximum allowed absolute difference between sequential values ## reltol - maximum allowed reltive difference (stops if < reltol*(abs(fn) + reltol) ## gradtol - maximum allowed norm of gradient vector ## ## iterlim - maximum # of iterations ## ## Other paramters are treated as variable (free). ## ## RESULTS: ## a list of class "maxim": ## maximum function value at maximum ## estimate the parameter value at maximum ## gradient gradient ## hessian Hessian ## code integer code of success: ## 1 - gradient close to zero ## 2 - successive values within tolerance limit ## 3 - could not find a higher point (step error) ## 4 - iteration limit exceeded ## 100 - initial value out of range ## message character message describing the code ## last.step only present if code == 3 (step error). A list with following components: ## theta0 - parameter value which led to the error ## f0 - function value at these parameter values ## climb - the difference between theta0 and the new approximated parameter value (theta1) ## fixed - logical vector, which parameters are constant (fixed, inactive, non-free) ## fixed logical vector, which parameters were treated as constant (fixed, inactive, non-free) ## iterations number of iterations ## type "BFGSR maximisation" ## ## max.eigen <- function( M) { ## return maximal eigenvalue of (symmetric) matrix val <- eigen(M, symmetric=TRUE, only.values=TRUE)$values val[1] ## L - eigenvalues in decreasing order, [1] - biggest in abs value } ## maxim.type <- "BFGSR maximization" param <- start nimed <- names(start) nParam <- length(param) ## chi2 <- 1E+10 iter <- 0L # eval a first time the function, the gradient and the hessian x <- sumKeepAttr( fn( param, fixed = fixed, sumObs = FALSE, returnHessian = FALSE, ... ) ) # sum of log-likelihood value but not sum of gradients if (slot(control, "printLevel") > 0) cat( "Initial value of the function :", x, "\n" ) if(is.na(x)) { result <- list(code=100, message=maximMessage("100"), iterations=0, type=maxim.type) class(result) <- "maxim" return(result) } if(is.infinite(x) & (x > 0)) { # we stop at +Inf but not at -Inf result <- list(code=5, message=maximMessage("5"), iterations=0, type=maxim.type) class(result) <- "maxim" return(result) } if( isTRUE( attr( x, "gradBoth" ) ) ) { warning( "the gradient is provided both as attribute 'gradient' and", " as argument 'grad': ignoring argument 'grad'" ) } if( isTRUE( attr( x, "hessBoth" ) ) ) { warning( "the Hessian is provided both as attribute 'hessian' and", " as argument 'hess': ignoring argument 'hess'" ) } ## ## gradient by individual observations, used for BHHH approximation of initial Hessian. ## If not supplied by observations, we use the summed gradient. gri <- attr( x, "gradient" ) gr <- sumGradients( gri, nParam = length( param ) ) if(slot(control, "printLevel") > 2) { cat("Initial gradient value:\n") print(gr) } if(any(is.na(gr[!fixed]))) { stop("NA in the initial gradient") } if(any(is.infinite(gr[!fixed]))) { stop("Infinite initial gradient") } if(length(gr) != nParam) { stop( "length of gradient (", length(gr), ") not equal to the no. of parameters (", nParam, ")" ) } ## initial approximation for inverse Hessian. We only work with the non-fixed part if(observationGradient(gri, length(param))) { invHess <- -solve(crossprod(gri[,!fixed])) # initial approximation of inverse Hessian (as in BHHH), if possible if(slot(control, "printLevel") > 3) { cat("Initial inverse Hessian by gradient crossproduct\n") if(slot(control, "printLevel") > 4) { print(invHess) } } } else { invHess <- -1e-5*diag(1, nrow=length(gr[!fixed])) # ... if not possible (Is this OK?). Note we make this negative definite. if(slot(control, "printLevel") > 3) { cat("Initial inverse Hessian is diagonal\n") if(slot(control, "printLevel") > 4) { print(invHess) } } } if( slot(control, "printLevel") > 1) { cat("-------- Initial parameters: -------\n") cat( "fcn value:", as.vector(x), "\n") a <- cbind(start, gr, as.integer(!fixed)) dimnames(a) <- list(nimed, c("parameter", "initial gradient", "free")) print(a) cat("------------------------------------\n") } samm <- NULL # this will be returned in case of step getting too small I <- diag(nParam - sum(fixed)) direction <- rep(0, nParam) ## ----------- Main loop --------------- repeat { iter <- iter + 1L if( iter > slot(control, "iterlim")) { code <- 4; break } if(any(is.na(invHess))) { cat("Error in the approximated (free) inverse Hessian:\n") print(invHess) stop("NA in Hessian") } if(slot(control, "printLevel") > 0) { cat("Iteration ", iter, "\n") if(slot(control, "printLevel") > 3) { cat("Eigenvalues of approximated inverse Hessian:\n") print(eigen(invHess, only.values=TRUE)$values) if(slot(control, "printLevel") > 4) { cat("inverse Hessian:\n") print(invHess) } } } ## Next, ensure that the approximated inverse Hessian is negative definite for computing ## the new climbing direction. However, retain the original, potentially not negative definite ## for computing the following approximation. ## This procedure seems to work, but unfortunately I have little idea what I am doing :-( approxHess <- invHess # approxHess is used for computing climbing direction, invHess for next approximation while((me <- max.eigen( approxHess)) >= -slot(control, "lambdatol") | (qRank <- qr(approxHess, tol=slot(control, "qrtol"))$rank) < sum(!fixed)) { # maximum eigenvalue -> negative definite # qr()$rank -> singularity lambda <- abs(me) + slot(control, "lambdatol") + min(abs(diag(approxHess)))/1e7 # The third term corrects numeric singularity. If diag(H) only contains # large values, (H - (a small number)*I) == H because of finite precision approxHess <- approxHess - lambda*I if(slot(control, "printLevel") > 4) { cat("Not negative definite. Subtracting", lambda, "* I\n") cat("Eigenvalues of new approximation:\n") print(eigen(approxHess, only.values=TRUE)$values) if(slot(control, "printLevel") > 5) { cat("new Hessian approximation:\n") print(approxHess) } } # how to make it better? } ## next, take a step of suitable length to the suggested direction step <- 1 direction[!fixed] <- as.vector(approxHess %*% gr[!fixed]) oldx <- x oldgr <- gr oldparam <- param param[!fixed] <- oldparam[!fixed] - step * direction[!fixed] x <- sumKeepAttr( fn( param, fixed = fixed, sumObs = FALSE, returnHessian = FALSE, ... ) ) # sum of log-likelihood value but not sum of gradients ## did we end up with a larger value? while((is.na(x) | x < oldx) & step > slot(control, "steptol")) { step <- step/2 if(slot(control, "printLevel") > 2) { cat("Function decreased. Function values: old ", oldx, ", new ", x, ", difference ", x - oldx, "\n") if(slot(control, "printLevel") > 3) { resdet <- cbind(param = param, gradient = gr, direction=direction, active=!fixed) cat("Attempted parameters:\n") print(resdet) } cat(" -> step ", step, "\n", sep="") } param[!fixed] <- oldparam[!fixed] - step * direction[!fixed] x <- sumKeepAttr( fn( param, fixed = fixed, sumObs = FALSE, returnHessian = FALSE, ... ) ) # sum of log-likelihood value but not sum of gradients } if(step < slot(control, "steptol")) { # we did not find a better place to go... samm <- list(theta0=oldparam, f0=oldx, climb=direction) } gri <- attr( x, "gradient" ) # observation-wise gradient. We only need it in order to compute the BHHH Hessian, if asked so. gr <- sumGradients( gri, nParam = length( param ) ) incr <- step * direction y <- gr - oldgr if(all(y == 0)) { # gradient did not change -> cannot proceed code <- 9; break } ## Compute new approximation for the inverse hessian update <- outer( incr[!fixed], incr[!fixed]) * (sum(y[!fixed] * incr[!fixed]) + as.vector( t(y[!fixed]) %*% invHess %*% y[!fixed])) / sum(incr[!fixed] * y[!fixed])^2 + (invHess %*% outer(y[!fixed], incr[!fixed]) + outer(incr[!fixed], y[!fixed]) %*% invHess)/ sum(incr[!fixed] * y[!fixed]) invHess <- invHess - update ## chi2 <- - crossprod(direction[!fixed], oldgr[!fixed]) if (slot(control, "printLevel") > 0){ cat("step = ",step, ", lnL = ", x,", chi2 = ", chi2, ", function increment = ", x - oldx, "\n",sep="") if (slot(control, "printLevel") > 1){ resdet <- cbind(param = param, gradient = gr, direction=direction, active=!fixed) print(resdet) cat("--------------------------------------------\n") } } if( step < slot(control, "steptol")) { code <- 3; break } if( sqrt( crossprod( gr[!fixed] ) ) < slot(control, "gradtol") ) { code <- 1; break } if(x - oldx < slot(control, "tol")) { code <- 2; break } if(x - oldx < slot(control, "reltol")*(x + slot(control, "reltol"))) { code <- 8; break } if(is.infinite(x) & x > 0) { code <- 5; break } } if( slot(control, "printLevel") > 0) { cat( "--------------\n") cat( maximMessage( code), "\n") cat( iter, " iterations\n") cat( "estimate:", param, "\n") cat( "Function value:", x, "\n") } if( is.matrix( gr ) ) { if( dim( gr )[ 1 ] == 1 ) { gr <- gr[ 1, ] } } names(gr) <- names(param) # calculate (final) Hessian if(tolower(finalHessian) == "bhhh") { if(observationGradient(gri, length(param))) { hessian <- - crossprod( gri ) attr(hessian, "type") <- "BHHH" } else { hessian <- NULL warning("For computing the final Hessian by 'BHHH' method, the log-likelihood or gradient must be supplied by observations") } } else if(finalHessian) { hessian <- attr( fn( param, fixed = fixed, returnHessian = TRUE, ... ) , "hessian" ) } else { hessian <- NULL } if( !is.null( hessian ) ) { rownames( hessian ) <- colnames( hessian ) <- nimed } ## remove attributes from final value of objective (likelihood) function attributes( x )$gradient <- NULL attributes( x )$hessian <- NULL attributes( x )$gradBoth <- NULL attributes( x )$hessBoth <- NULL ## result <-list( maximum = unname( drop( x ) ), estimate=param, gradient=gr, hessian=hessian, code=code, message=maximMessage( code), last.step=samm, # only when could not find a # lower point fixed=fixed, iterations=iter, type=maxim.type) if(observationGradient(gri, length(param))) { colnames( gri ) <- names( param ) result$gradientObs <- gri } result <- c(result, control=control) # attach the control parameters class(result) <- c("maxim", class(result)) invisible(result) } maxLik/R/headDots.R0000644000175100001440000000034414077525067013612 0ustar hornikusers### paste head of vector, and if some of it is left out, add '...' to it. headDots <- function(x, max.cols) { s <- paste(head(x, max.cols), collapse=", ") if(length(x) > max.cols) { s <- paste(s, "...") } s } maxLik/R/10-MaxControl_class.R0000644000175100001440000002247614077525067015562 0ustar hornikusers ### should move checkMaxControl to a separate file but how to do it? setClassUnion("functionOrNULL", c("function", "NULL")) setClassUnion("integerOrNULL", c("integer", "NULL")) setClassUnion("numericOrNULL", c("numeric", "NULL")) checkMaxControl <- function(object) { ## check validity of MaxControl objects if(!inherits(object, "MaxControl")) { stop("'MaxControl' object required. Currently '", paste(class(object), sep=", "), "'") } ## errors <- character(0) ## Check length of componenents for(s in slotNames(object)) { if(s == "sann_cand") { if(length(slot(object, s)) > 1) { errors <- c(errors, paste("'", s, "' must be either 'NULL' or ", "a function of length 1, not of length ", length(slot(object, s)), sep="")) } } else if(s %in% c("SG_batchSize", "SG_clip", "SG_patience")) { # integerOrNULL if(length(slot(object, s)) > 1) { errors <- c(errors, paste("'", s, "' must be either 'NULL' or ", "of length 1, not of length ", length(slot(object, s)), sep="")) } } else if(length(slot(object, s)) != 1) { # length 1 errors <- c(errors, paste("'", s, "' must be of length 1, not ", length(slot(object, s)), sep="")) } } ## check missings for(s in slotNames(object)) { if(is.vector(slot(object, s)) && any(is.na(slot(object, s)))) { # is.na only works for vectors errors <- c(errors, paste0("NA in '", s, "'") ) return(errors) # return errors here as otherwise NA-s will interfere the # block of if-s below } } ## if(slot(object, "steptol") < 0) { errors <- c(errors, paste("'steptol' must be non-negative, not", slot(object, "steptol"))) } if(slot(object, "lambdatol") < 0) { errors <- c(errors, paste("'lambdatol' must be non-negative, not", slot(object, "lambdatol"))) } ## qac valid values--only check if length 1 if(length(slot(object, "qac")) == 1 && !pmatch(slot(object, "qac"), c("stephalving", "marquardt"))) { errors <- c(errors, paste("'qac' must be 'stephalving' or 'marquadt', not", slot(object, "qac"))) } if(slot(object, "qrtol") < 0) { errors <- c(errors, paste("'qrtol' must be non-negative, not", slot(object, "qrtol"))) } if(slot(object, "marquardt_lambda0") < 0) { errors <- c(errors, paste("'lambda0' must be non-negative, not", slot(object, "lambda0"))) } if(slot(object, "marquardt_lambdaStep") <= 1) { errors <- c(errors, paste("'lambdaStep' must be > 1, not", slot(object, "lambdaStep"))) } if(slot(object, "marquardt_maxLambda") < 0) { errors <- c(errors, paste("'maxLambda' must be non-negative, not", slot(object, "maxLambda"))) } ## NM if(slot(object, "nm_alpha") < 0) { errors <- c(errors, paste("Nelder-Mead reflection factor 'alpha' ", "must be non-negative, not", slot(object, "nm_alpha"))) } if(slot(object, "nm_beta") < 0) { errors <- c(errors, paste("Nelder-Mead contraction factor 'beta' ", "must be non-negative, not", slot(object, "nm_beta"))) } if(slot(object, "nm_gamma") < 0) { errors <- c(errors, paste("Nelder-Mead expansion factor 'gamma' ", "must be non-negative, not", slot(object, "nm_gamma"))) } ## SANN if(!inherits(slot(object, "sann_cand"), c("function", "NULL"))) { # errors <- c(errors, paste("'SANN_cand' must be either NULL or a function, not", slot(object, "SANN_cand"))) } if(slot(object, "sann_tmax") < 1) { errors <- c(errors, paste("SANN number of calculations at each temperature ", "'tmax' ", "must be positive, not", slot(object, "sann_tmax"))) } ## SGA if(slot(object, "SGA_momentum") < 0 || slot(object, "SGA_momentum") > 1) { errors <- c(errors, paste("SGA momentum parameter must be in [0,1], not", slot(object, "SGA_momentum"))) } ## Adam if(slot(object, "Adam_momentum1") < 0 || slot(object, "Adam_momentum1") > 1) { errors <- c(errors, paste("Adam momentum1 parameter must be in [0,1], not", slot(object, "Adam_momentum1"))) } if(slot(object, "Adam_momentum2") < 0 || slot(object, "Adam_momentum2") > 1) { errors <- c(errors, paste("Adam momentum2 parameter must be in [0,1], not", slot(object, "Adam_momentum2"))) } ## SG general if(slot(object, "SG_learningRate") <= 0) { errors <- c(errors, paste("learning rate for SGA must be positive, not", slot(object, "SG_learningRate"))) } if(length(slot(object, "SG_batchSize")) > 0 && slot(object, "SG_batchSize") <= 0L) { errors <- c(errors, paste("SGA batch size must be positive, not", slot(object, "SG_batchSize"))) } if(length(slot(object, "SG_clip")) > 0 && slot(object, "SG_clip") <= 0L) { errors <- c(errors, paste("SGA gradient clip norm threshold must be positive, not", slot(object, "SG_clip"))) } if(length(slot(object, "SG_patience")) > 0 && slot(object, "SG_patience") <= 0L) { errors <- c(errors, paste("SG patience must be positive (or NULL), not", slot(object, "SG_patience"))) } if(slot(object, "SG_patienceStep") <= 0L) { errors <- c(errors, paste("SG patience step must be positive, not", slot(object, "SG_patienceStep"))) } ## general if(slot(object, "iterlim") < 0) { errors <- c(errors, paste("'iterlim' must be non-negative, not", slot(object, "iterlim"))) } if(slot(object, "max.rows") < 0) { errors <- c(errors, paste("'max.rows' must be non-negative, not", slot(object, "max.rows"))) } if(slot(object, "max.cols") < 0) { errors <- c(errors, paste("'max.cols' must be non-negative, not", slot(object, "max.cols"))) } if(length(errors) > 0) return(errors) return(TRUE) } ### MaxControls contains all control parameters for max* family setClass("MaxControl", slots=representation( tol="numeric", reltol="numeric", gradtol="numeric", steptol="numeric", # lambdatol="numeric", qrtol="numeric", ## Qadratic Approximation Control qac="character", marquardt_lambda0="numeric", marquardt_lambdaStep="numeric", marquardt_maxLambda="numeric", ## Optim Nelder-Mead: nm_alpha="numeric", nm_beta="numeric", nm_gamma="numeric", ## SANN sann_cand="functionOrNULL", sann_temp="numeric", sann_tmax="integer", sann_randomSeed="integer", ## SGA SGA_momentum = "numeric", ## Adam Adam_momentum1 = "numeric", Adam_momentum2 = "numeric", ## SG general SG_patience = "integerOrNULL", # NULL: don't care about patience SG_patienceStep = "integer", # check patience at every epoch SG_learningRate="numeric", SG_batchSize = "integerOrNULL", # NULL: full batch SG_clip="numericOrNULL", # NULL: do not clip ## iterlim="integer", max.rows="integer", max.cols="integer", printLevel="integer", storeValues="logical", storeParameters="logical" ), ## prototype=prototype( tol=1e-8, reltol=sqrt(.Machine$double.eps), gradtol=1e-6, steptol=1e-10, # lambdatol=1e-6, # qac="stephalving", qrtol=1e-10, marquardt_lambda0=1e-2, marquardt_lambdaStep=2, marquardt_maxLambda=1e12, ## Optim Nelder-Mead nm_alpha=1, nm_beta=0.5, nm_gamma=2, ## SANN sann_cand=NULL, sann_temp=10, sann_tmax=10L, sann_randomSeed=123L, ## SGA SGA_momentum = 0, ## Adam Adam_momentum1 = 0.9, Adam_momentum2 = 0.999, ## SG_learningRate=0.1, SG_batchSize=NULL, SG_clip=NULL, SG_patience = NULL, SG_patienceStep = 1L, ## iterlim=150L, max.rows=as.integer(getOption("max.rows", 20L)), max.cols=as.integer(getOption("max.cols", 7L)), printLevel=0L, storeValues=FALSE, storeParameters=FALSE), ## validity=checkMaxControl ) maxLik/R/numericGradient.R0000644000175100001440000000452514077525067015204 0ustar hornikusersnumericGradient <- function(f, t0, eps=1e-6, fixed, ...) { ## numeric gradient of a vector-valued function ## f function, return Nval x 1 vector of values ## t0 NPar x 1 vector of parameters ## fixed calculate the gradient based on these parameters only ## return: ## NvalxNPar matrix, gradient ## gradient along parameters which are not active are NA warnMessage <- function(theta, value, i) { ## issue a warning if the function value at theta is not a scalar max.print <- 10 if(length(value) != nVal) { warnMsg <- "Function value at\n" warnMsg <- c(warnMsg, paste(format(theta[seq(length=min(max.print,length(theta)))]), collapse=" "), "\n") if(max.print < length(theta)) warnMsg <- c(warnMsg, "...\n") warnMsg <- c(warnMsg, " =\n") warnMsg <- c(warnMsg, paste(format(value[seq(length=min(max.print,length(value)))]), collapse=" "), "\n") if(max.print < length(value)) warnMsg <- c(warnMsg, "...\n") warnMsg <- c(warnMsg, "(length ", length(value), ") does not conform with ", "the length at original value ", nVal, "\n") warnMsg <- c(warnMsg, "Component ", i, " set to NA") return(warnMsg) } if(!all(is.na(value)) & !is.numeric(value)) stop("The function value must be numeric for 'numericGradient'") return(NULL) } NPar <- length(t0) nVal <- length(f0 <- f(t0, ...)) grad <- matrix(NA, nVal, NPar) row.names(grad) <- names(f0) colnames(grad) <- names(t0) if(missing(fixed)) fixed <- rep(FALSE, NPar) for(i in 1:NPar) { if(fixed[i]) next t2 <- t1 <- t0 t1[i] <- t0[i] - eps/2 t2[i] <- t0[i] + eps/2 ft1 <- f(t1, ...) ft2 <- f(t2, ...) ## give meaningful error message if the functions give vectors ## of different length at t1, t2 if(!is.null(msg <- warnMessage(t1, ft1, i))) { warning(msg) ft1 <- NA } if(!is.null(msg <- warnMessage(t2, ft2, i))) { warning(msg) ft2 <- NA } grad[,i] <- (ft2 - ft1)/eps } return(grad) } maxLik/R/maximType.R0000644000175100001440000000022114077525067014026 0ustar hornikusersmaximType <- function(x) UseMethod("maximType") maximType.default <- function(x) x$maximType maximType.maxim <- function(x) x$type maxLik/R/activePar.R0000644000175100001440000000061514077525067013776 0ustar hornikusers## activePar: returns parameters which are free under maximisation (not fixed as constants) activePar <- function(x, ...) UseMethod("activePar") activePar.default <- function(x, ...) { if( !is.null( x$fixed ) ) { result <- !x$fixed } else { result <- x$activePar } if( is.null( result ) ) { result <- rep( TRUE, length( coef( x ) ) ) } return( result ) } maxLik/R/30-addControlDddot.R0000644000175100001440000000036314077525067015350 0ustar hornikusers ## Method to overwrite parameters of an existing MaxControl object addControlDddot <- function(x, ...) { ## add ... to the control dddot <- list(...) addControlList(x, dddot) } setMethod("maxControl", "MaxControl", addControlDddot) maxLik/R/printRowColLimits.R0000644000175100001440000000164714077525067015532 0ustar hornikusers### print vector/matrix while limiting the number of rows/columns printed printRowColLimits <- function(x, max.rows=getOption("max.rows", 20), max.cols=getOption("max.cols", 7), ... # other arguments to 'print.matrix' ) { x1 <- x msg <- NULL if(is.null(dim(x))) { x1 <- matrix(x, nrow=1) colnames(x1) <- names(x) x <- x1 } ## we have a matrix (higher-D arrays not supported) if(ncol(x) > max.cols) { x1 <- x[, seq(length=max.cols), drop=FALSE] msg <- paste(msg, "reached getOption(\"max.cols\") -- omitted", ncol(x) - max.cols, "columns\n") } print(head(x1, max.rows), ...) if(nrow(x) > max.rows) { msg <- paste(msg, "reached getOption(\"max.rows\") -- omitted", nrow(x) - max.rows, "rows\n") } cat(msg) } maxLik/R/logLik.maxLik.R0000644000175100001440000000047314077525067014527 0ustar hornikusers### Methods for accessing loglik value maximum likelihood estimates logLik.summary.maxLik <- function( object, ...) { ll <- object$loglik attr(ll, "df") <- sum(activePar(object)) ll } logLik.maxLik <- function( object, ...) { ll <- maxValue(object) attr(ll, "df") <- sum(activePar(object)) ll } maxLik/R/showMaxControl.R0000644000175100001440000000111114077525067015037 0ustar hornikusers showMaxControl <- function(object) { cat("A 'MaxControl' object with slots:\n") for(s in slotNames(object)) { if(s == "sann_cand") { ## This is a function or NULL, handle with care: if(is.null(slot(object, s))) { cat("sann_cand = \n") } else { cat("sann_cand =\n") print(str(slot(object, s))) } } else { ## Just print cat(s, "=", slot(object, s), "\n") } } } setMethod("show", "MaxControl", showMaxControl) maxLik/R/bread.maxLik.R0000644000175100001440000000011314077525067014352 0ustar hornikusersbread.maxLik <- function( x, ... ) { return( vcov( x ) * nObs( x ) ) } maxLik/R/compareDerivatives.R0000644000175100001440000000651614077525067015722 0ustar hornikuserscompareDerivatives <- function(f, grad, hess=NULL, t0, eps=1e-6, printLevel=1, print=printLevel > 0, max.rows=getOption("max.rows", 20), max.cols=getOption("max.cols", 7), ...) { ### t0 - initial parameter vector ## ## 1. Initial function and grad eval ## if(print) cat("-------- compare derivatives -------- \n") f0 <- f(t0, ...) attributes(f0) <- NULL # keep only array data when printing if(is.function(grad)) analytic <- grad(t0, ...) else if(is.numeric(grad)) analytic = grad else stop("Argument 'grad' must be either gradient function or ", "pre-computed numeric gradient matrix") out <- list(t0=t0, f.t0=f0, compareGrad = list(analytic=analytic)) # if(is.null(dim(analytic))) { if(print) cat("Note: analytic gradient is vector. ", "Transforming into a matrix form\n") if(length(f0) > 1) analytic <- matrix(analytic, length(analytic), 1) # Note: we assume t0 is a simple vector -> hence gradient # will be a column vector else analytic <- matrix(analytic, 1, length(analytic)) # f returns a scalar -> we have row vector along t0 } if(print) { cat("Function value:\n") print(f0) } if(print) cat("Dim of analytic gradient:", dim(analytic), "\n") numeric <- numericGradient(f, t0, eps, ...) out$compareGrad$numeric = numeric if(print) cat(" numeric :", dim(numeric), "\n") rDiff <- ((analytic - numeric) / (0.5*(abs(analytic) + abs(numeric))) ) rDiff[(analytic==0) & (numeric==0)] <- 0 rDiff. <- max(abs(rDiff), na.rm=TRUE) out$compareGrad$rel.diff <- rDiff out$maxRelDiffGrad <- rDiff. # if(print){ if(ncol(analytic) < 2) { a <- cbind(t0, analytic, numeric, rDiff) dimnames(a) <- list(param=names(f0), c("theta 0", "analytic", "numeric", "rel.diff")) printRowColLimits(a, max.rows, max.cols) } else { cat("t0\n") printRowColLimits(t0, max.rows, max.cols) cat("analytic gradient\n") printRowColLimits(analytic, max.rows, max.cols) cat("numeric gradient\n") printRowColLimits(numeric, max.rows, max.cols) cat(paste("(anal-num)/(0.5*(abs(anal)+abs(num)))\n")) printRowColLimits(rDiff, max.rows, max.cols) a=list(t0=t0, analytic=analytic, numeric=numeric, rel.diff=rDiff) } cat("Max relative difference:", rDiff., "\n") } # out <- list(t0=t0, f.t0=f0, compareGrad=a, maxRelDiffGrad=rDiff.) ## ## Hessian? ## if(!is.null(hess)) { if(print) cat("Comparing hessians: relative dfference\n") anHess <- hess(t0, ...) numHess <- numericGradient(grad, t0, eps, ...) rDifHess <- (anHess-numHess) / (0.5*(abs(anHess)+abs(numHess))) rDifHess[(anHess==0) & (numHess==0)] <- 0 rDifHess. <- max(abs(rDifHess), na.rm=TRUE) if(print) printRowColLimits(rDifHess., max.rows, max.cols) out$compareHessian <- list(analytic = anHess, numeric = numHess, rel.diff = rDifHess) out$maxRelDiffHess = rDifHess. } if(print) cat("-------- END of compare derivatives -------- \n") invisible(out) } maxLik/R/hessian.R0000644000175100001440000000021014077525067013501 0ustar hornikusers## Return Hessian of an object hessian <- function(x, ...) UseMethod("hessian") hessian.default <- function(x, ...) x$hessian maxLik/R/20-maxControl.R0000644000175100001440000000114014077525067014417 0ustar hornikusers ### Default constructor of MaxControl object: ### take a list of parameters and overwrite the default values maxControl.default <- function(...) { result <- new("MaxControl") result <- addControlDddot(result, ...) return(result) } ### Standard method for any arguments setGeneric("maxControl", function(x, ...) standardGeneric("maxControl") ) ### Method for 'maxim' objects: fetch the stored MaxControl setMethod("maxControl", "maxim", function(x, ...) x$control) ### Method for missing arguments: just default values setMethod("maxControl", "missing", maxControl.default) maxLik/R/maxNRCompute.R0000644000175100001440000004272414077525067014451 0ustar hornikusersmaxNRCompute <- function(fn, start, # maximum lambda for Marquardt (1963) finalHessian=TRUE, bhhhHessian = FALSE, fixed=NULL, control=maxControl(), ...) { ## Newton-Raphson maximisation ## Parameters: ## fn - the function to be maximized. Returns either scalar or ## vector value with possible attributes ## constPar and newVal ## fn must return the value with attributes 'gradient' ## and 'hessian' ## fn must have an argument sumObs ## start - initial parameter vector (eventually w/names) ## control MaxControl object: ## steptol - minimum step size ## lambda0 initial Hessian corrector (see Marquardt, 1963, p 438) ## lambdaStep how much Hessian corrector lambda is changed between ## two lambda trials ## (nu in Marquardt (1963, p 438) ## maxLambda largest possible lambda (if exceeded will give step error) ## lambdatol - max lowest eigenvalue when forcing pos. definite H ## qrtol - tolerance for qr decomposition ## qac How to handle the case where new function value is ## smaller than the original one: ## "stephalving" smaller step in the same direction ## "marquardt" Marquardt (1963) approach ## ## finalHessian include final Hessian? As computing final hessian does not carry any extra penalty for NR method, this option is ## mostly for compatibility reasons with other maxXXX functions. ## TRUE/something else include ## FALSE do not include ## fixed - a logical vector -- which parameters are taken as fixed. ## Other paramters are treated as variable (free). ## ... additional argument to 'fn'. This may include ## 'fnOrig', 'gradOrig', 'hessOrig' if called fromm ## 'maxNR'. ## ## RESULTS: ## a list of class "maxim": ## maximum function value at maximum ## estimate the parameter value at maximum ## gradient gradient ## hessian Hessian ## code integer code of success, see maximMessage ## message character message describing the code ## last.step only present if code == 3 (step error). A list with following components: ## theta0 - parameter value which led to the error ## f0 - function value at these parameter values ## climb - the difference between theta0 and the new approximated parameter value (theta1) ## fixed - logical vector, which parameters are constant (fixed, inactive, non-free) ## fixed logical vector, which parameters were treated as constant (fixed, inactive, non-free) ## iterations number of iterations ## type "Newton-Raphson maximisation" ## ## References: ## Marquardt (1963), "An algorithm for least-squares estimation of nonlinear ## parameters", J. Soc. Indust. Appl. Math 11(2), 431-441 ## max.eigen <- function( M) { ## return maximal eigenvalue of (symmetric) matrix val <- eigen(M, symmetric=TRUE, only.values=TRUE)$values val[1] ## L - eigenvalues in decreasing order, [1] - biggest in abs value } ## ------------------------------------------------- if(slot(control, "qac") == "marquardt") marquardt <- TRUE else marquardt <- FALSE ## maximType <- "Newton-Raphson maximisation" if(marquardt) { maximType <- paste(maximType, "with Marquardt (1963) Hessian correction") } nimed <- names(start) nParam <- length(start) samm <- NULL # data for the last step that could not find a better # value I <- diag(rep(1, nParam)) # I is unit matrix start1 <- start iter <- 0L returnHessian <- ifelse( bhhhHessian, "BHHH", TRUE ) f1 <- fn(start1, fixed = fixed, sumObs = TRUE, returnHessian = returnHessian, ...) if(slot(control, "printLevel") > 2) { cat("Initial function value:", f1, "\n") } if(any(is.na( f1))) { result <- list(code=100, message=maximMessage("100"), iterations=0, type=maximType) class(result) <- "maxim" return(result) } if(any(is.infinite( f1)) && sum(f1) > 0) { # we stop at +Inf but not at -Inf result <- list(code=5, message=maximMessage("5"), iterations=0, type=maximType) class(result) <- "maxim" return(result) } if( isTRUE( attr( f1, "gradBoth" ) ) ) { warning( "the gradient is provided both as attribute 'gradient' and", " as argument 'grad': ignoring argument 'grad'" ) } if( isTRUE( attr( f1, "hessBoth" ) ) ) { warning( "the Hessian is provided both as attribute 'hessian' and", " as argument 'hess': ignoring argument 'hess'" ) } G1 <- attr( f1, "gradient" ) if(slot(control, "printLevel") > 2) { cat("Initial gradient value:\n") print(G1) } if(any(is.na(G1[!fixed]))) { stop("NA in the initial gradient") } if(any(is.infinite(G1[!fixed]))) { stop("Infinite initial gradient") } if(length(G1) != nParam) { stop( "length of gradient (", length(G1), ") not equal to the no. of parameters (", nParam, ")" ) } H1 <- attr( f1, "hessian" ) if(slot(control, "printLevel") > 3) { cat("Initial Hessian value:\n") print(H1) } if(length(H1) == 1) { # Allow the user program to return a # single NA in case of out of support or # other problems if(is.na(H1)) stop("NA in the initial Hessian") } if(any(is.na(H1[!fixed, !fixed]))) { stop("NA in the initial Hessian") } if(any(is.infinite(H1))) { stop("Infinite initial Hessian") } if( slot(control, "printLevel") > 1) { cat( "----- Initial parameters: -----\n") cat( "fcn value:", as.vector(f1), "\n") a <- cbind(start, G1, as.integer(!fixed)) dimnames(a) <- list(nimed, c("parameter", "initial gradient", "free")) print(a) cat( "Condition number of the (active) hessian:", kappa( H1[!fixed, !fixed]), "\n") if( slot(control, "printLevel") > 3) { print( H1) } } lambda1 <- slot(control, "marquardt_lambda0") step <- 1 ## ---------------- Main interation loop ------------------------ repeat { if( iter >= slot(control, "iterlim")) { code <- 4; break } iter <- iter + 1L if(!marquardt) { lambda1 <- 0 # assume the function is concave at start0 } start0 <- start1 f0 <- f1 G0 <- G1 if(any(is.na(G0[!fixed]))) { stop("NA in gradient (at the iteration start)") } H0 <- H1 if(any(is.na(H0[!fixed, !fixed]))) { stop("NA in Hessian (at the iteration start)") } if(marquardt) { lambda1 <- lambda1/slot(control, "marquardt_lambdaStep") # initially we try smaller lambda # lambda1: current lambda for calculations H <- H0 - lambda1*I } else { step <- 1 H <- H0 } ## check whether hessian is positive definite aCount <- 0 # avoid inifinite number of attempts because of # numerical problems while((me <- max.eigen( H[!fixed,!fixed,drop=FALSE])) >= -slot(control, "lambdatol") | (qRank <- qr(H[!fixed,!fixed], tol=slot(control, "qrtol"))$rank) < sum(!fixed)) { # maximum eigenvalue -> negative definite # qr()$rank -> singularity if(marquardt) { lambda1 <- lambda1*slot(control, "marquardt_lambdaStep") } else { lambda1 <- abs(me) + slot(control, "lambdatol") + min(abs(diag(H)[!fixed]))/1e7 # The third term corrects numeric singularity. If diag(H) only contains large values, # (H - (a small number)*I) == H because of finite precision } H <- (H - lambda1*I) # could we multiply it with something like (for stephalving) # *abs(me)*lambdatol # -lambda*I makes the Hessian (barely) # negative definite. # *me*lambdatol keeps the scale roughly # the same as it was before -lambda*I aCount <- aCount + 1 if(aCount > 100) { # should be enough even in the worst case break } } amount <- vector("numeric", nParam) inv <- try(qr.solve(H[!fixed,!fixed,drop=FALSE], G0[!fixed], tol=slot(control, "qrtol"))) if(inherits(inv, "try-error")) { # could not get the Hessian to negative definite samm <- list(theta0=start0, f0=f0, climb=amount) code <- 3 break } amount[!fixed] <- inv start1 <- start0 - step*amount # note: step is always 1 for Marquardt method f1 <- fn(start1, fixed = fixed, sumObs = TRUE, returnHessian = returnHessian, ...) # The call calculates new function, # gradient, and Hessian values ## Are we requested to fix some of the parameters? constPar <- attr(f1, "constPar") if(!is.null(constPar)) { if(any(is.na(constPar))) { stop("NA in the list of constants") } fixed <- rep(FALSE, nParam) fixed[constPar] <- TRUE } ## Are we asked to write in a new value for some of the parameters? if(is.null(newVal <- attr(f1, "newVal"))) { ## no ... if(marquardt) { stepOK <- lambda1 <= slot(control, "marquardt_maxLambda") } else { stepOK <- step >= slot(control, "steptol") } while( any(is.na(f1)) || ( ( sum(f1) < sum(f0) ) & stepOK)) { # We end up in a NA or a higher value. # try smaller step if(marquardt) { lambda1 <- lambda1*slot(control, "marquardt_lambdaStep") H <- (H0 - lambda1*I) amount[!fixed] <- qr.solve(H[!fixed,!fixed,drop=FALSE], G0[!fixed], tol=slot(control, "qrtol")) } else { step <- step/2 } start1 <- start0 - step*amount if(slot(control, "printLevel") > 2) { if(slot(control, "printLevel") > 3) { cat("Try new parameters:\n") print(start1) } cat("function value difference", f1 - f0) if(marquardt) { cat(" -> lambda", lambda1, "\n") } else { cat(" -> step", step, "\n") } } f1 <- fn(start1, fixed = fixed, sumObs = TRUE, returnHessian = returnHessian, ...) # WTF does the 'returnHessian' do here ? ## Find out the constant parameters -- these may be other than ## with full step constPar <- attr(f1, "constPar") if(!is.null(constPar)) { if(any(is.na(constPar))) { stop("NA in the list of constants") } fixed[constPar] <- TRUE ## Any new values requested? if(!is.null(newVal <- attr(f1, "newVal"))) { ## Yes. Write them to parameters and go for ## next iteration start1[newVal$index] <- newVal$val break; } } } if(marquardt) { stepOK <- lambda1 <= slot(control, "marquardt_maxLambda") } else { stepOK <- step >= slot(control, "steptol") } if(!stepOK) { # we did not find a better place to go... start1 <- start0 f1 <- f0 samm <- list(theta0=start0, f0=f0, climb=amount) } } else { ## Yes, indeed. New values given to some of the params. ## Note, this may result in a lower function value, ## hence we do not check f1 > f0 start1[newVal$index] <- newVal$val if( slot(control, "printLevel") > 0 ) { cat( "Keeping parameter(s) ", paste( newVal$index, collapse = ", " ), " at the fixed values ", paste( newVal$val, collapse = ", " ), ", as the log-likelihood function", " returned attributes 'constPar' and 'newVal'\n", sep = "" ) } } G1 <- attr( f1, "gradient" ) if(any(is.na(G1[!fixed]))) { cat("Iteration", iter, "\n") cat("Parameter:\n") print(start1) cat("Gradient (first 30 components):\n") print(head(G1, n=30)) stop("NA in gradient") } if(any(is.infinite(G1))) { code <- 6; break; } H1 <- attr( f1, "hessian" ) if( slot(control, "printLevel") > 1) { cat( "-----Iteration", iter, "-----\n") } if(any(is.infinite(H1))) { code <- 7; break } if(slot(control, "printLevel") > 2) { cat( "lambda ", lambda1, " step", step, " fcn value:", formatC(as.vector(f1), digits=8, format="f"), "\n") a <- cbind(amount, start1, G1, as.integer(!fixed)) dimnames(a) <- list(names(start0), c("amount", "new param", "new gradient", "active")) print(a) if( slot(control, "printLevel") > 3) { cat("Hessian\n") print( H1) } if(!any(is.na(H1[!fixed, !fixed]))) { cat( "Condition number of the hessian:", kappa(H1[!fixed,!fixed,drop=FALSE]), "\n") } } if( step < slot(control, "steptol")) { # wrong guess in step halving code <- 3; break } if(lambda1 > slot(control, "marquardt_maxLambda")) { # wrong guess in Marquardt method code <- 3; break } if( sqrt( crossprod( G1[!fixed] ) ) < slot(control, "gradtol") ) { code <- 1; break } if(is.null(newVal) && ((sum(f1) - sum(f0)) < slot(control, "tol"))) { code <- 2; break # } if(is.null(newVal) && (sum(f1) - sum(f0) < slot(control, "reltol")*abs(sum(f1) + slot(control, "reltol"))) # We need abs(f1) to ensure RHS is positive # (as long as reltol is positive) ) { code <- 8; break } if(any(is.infinite(f1)) && sum(f1) > 0) { code <- 5; break } } if( slot(control, "printLevel") > 0) { cat( "--------------\n") cat( maximMessage( code), "\n") cat( iter, " iterations\n") cat( "estimate:", start1, "\n") cat( "Function value:", f1, "\n") } names(start1) <- nimed F1 <- fn( start1, fixed = fixed, sumObs = FALSE, returnHessian = ( finalHessian == TRUE ), ... ) G1 <- attr( F1, "gradient" ) if(observationGradient(G1, length(start1))) { gradientObs <- G1 colnames( gradientObs ) <- nimed G1 <- colSums(as.matrix(G1 )) } else { gradientObs <- NULL } names( G1 ) <- nimed ## calculate (final) Hessian if(tolower(finalHessian) == "bhhh") { if(!is.null(gradientObs)) { hessian <- -crossprod( gradientObs ) attr(hessian, "type") <- "BHHH" } else { hessian <- NULL warning("For computing the final Hessian by 'BHHH' method, the log-likelihood or gradient must be supplied by observations") } } else if( finalHessian != FALSE ) { hessian <- attr( F1, "hessian" ) } else { hessian <- NULL } if( !is.null( hessian ) ) { rownames( hessian ) <- colnames( hessian ) <- nimed } ## remove attributes from final value of objective (likelihood) function attributes( f1 )$gradient <- NULL attributes( f1 )$hessian <- NULL attributes( f1 )$gradBoth <- NULL attributes( f1 )$hessBoth <- NULL ## result <-list( maximum = unname( drop( f1 ) ), estimate=start1, gradient=drop(G1), hessian=hessian, code=code, message=maximMessage( code), last.step=samm, # only when could not find a # lower point fixed=fixed, iterations=iter, type=maximType) if( exists( "gradientObs" ) ) { result$gradientObs <- gradientObs } result <- c(result, control=control) # attach the control parameters ## class(result) <- c("maxim", class(result)) invisible(result) } maxLik/R/maxSGACompute.R0000644000175100001440000003047114077525067014540 0ustar hornikusersmaxSGACompute <- function(fn, grad, hess, start, nObs, finalHessian=FALSE, bhhhHessian = FALSE, fixed=NULL, control=maxControl(), optimizer="SGA", # type of optimizer: SGA, Adam ...) { ## Stochastic Gradient Ascent: implements ## * SGA with momentum ## * Adam ## Parameters: ## fn - the function to be maximized. Returns either scalar or ## vector value with possible attributes ## constPar and newVal ## start - initial parameter vector (eventually w/names) ## control MaxControl object: ## The stopping criteria ## tol - maximum allowed absolute difference between sequential values ## reltol - maximum allowed reltive difference (stops if < reltol*(abs(fn) + reltol) ## gradtol - maximum allowed norm of gradient vector ## ## iterlim - maximum # of iterations ## ## finalHessian include final Hessian? As computing final hessian does not carry any extra penalty for NR method, this option is ## mostly for compatibility reasons with other maxXXX functions. ## TRUE/something else include ## FALSE do not include ## fixed - a logical vector -- which parameters are taken as fixed. ## Other paramters are treated as variable (free). ## ... additional argument to 'fn'. This may include ## 'fnOrig', 'gradOrig', 'hessOrig' if called fromm ## 'maxNR'. ## ## RESULTS: ## an object of class 'maxim' ## ## ------------------------------------------------- maximType <- "Stochastic Gradient Ascent" iterlim <- slot(control, "iterlim") nParam <- length(start) start1 <- start storeParameters <- slot(control, "storeParameters") storeValues <- slot(control, "storeValues") learningRate <- slot(control, "SG_learningRate") clip <- slot(control, "SG_clip") max.rows <- slot(control, "max.rows") max.cols <- slot(control, "max.cols") patience <- slot(control, "SG_patience") patienceStep <- slot(control, "SG_patienceStep") printLevel <- slot(control, "printLevel") batchSize <- slot(control, "SG_batchSize") if(optimizer == "Adam") { maximType <- "Stochastic Gradient Ascent/Adam" Adam.momentum1 <- slot(control, "Adam_momentum1") Adam.momentum2 <- slot(control, "Adam_momentum2") Adam.delta <- 1e-8 # maybe make it a parameter in the future Adam.s <- 0 Adam.r <- 0 Adam.time <- 0 } else if(optimizer == "SGA") { momentum <- slot(control, "SGA_momentum") v <- 0 # velocity that retains the momentum } else { stop(paste("unknown optimizer", optimizer)) } ## ---------- How many batches if(is.null(batchSize)) { nBatches <- 1 index <- seq(from=1, to=nObs, by=nBatches) } else { nBatches <- max(1L, nObs %/% batchSize) # ensure that we get at least one batch if batchSize set too large shuffledIndex <- sample(nObs, nObs) index <- shuffledIndex[seq(from=1, to=nObs, by=nBatches)] } ## f1 <- NULL # mark that we haven't computed the fcn value if(printLevel > 0) { f1 <- fn(start, fixed = fixed, sumObs = TRUE, index=index, ...) cat("Initial function value:", f1, "\n") if( isTRUE( attr( f1, "gradBoth" ) ) ) { warning( "the gradient is provided both as attribute 'gradient' and", " as argument 'grad': ignoring argument 'grad'" ) } if( isTRUE( attr( f1, "hessBoth" ) ) ) { warning( "the Hessian is provided both as attribute 'hessian' and", " as argument 'hess': ignoring argument 'hess'" ) } } if(!is.null(patience)) { if(is.null(f1)) { f1 <- fn(start, fixed = fixed, sumObs = TRUE, index=index, ...) } fBest <- f1 # remember the previous best value paramBest <- start patienceCount <- 0 # how many times have we hit a worse outcome } G1 <- grad(start, fixed = fixed, sumObs = TRUE, index=index, ...) # have to compute fn as we cannot get gradient otherwise if(any(is.na(G1[!fixed]))) { stop("NA in the initial gradient") } if(any(is.infinite(G1[!fixed]))) { stop("Infinite initial gradient") } if(length(G1) != nParam) { stop( "length of gradient (", length(G1), ") not equal to the no. of parameters (", nParam, ")" ) } if(length(clip) > 0) { if((norm2 <- sum(G1*G1)) > clip) G1 <- G1/sqrt(norm2)*sqrt(clip) } if(storeValues) { valueStore <- rep(NA_real_, iterlim + 1) if(is.null(f1)) { f1 <- fn(start, fixed = fixed, sumObs = TRUE, index=index, ...) } valueStore[1] <- f1 } if(storeParameters) { parameterStore <- matrix(NA_real_, iterlim + 1, nParam) dimnames(parameterStore) <- list(epoch=c("start", 1:iterlim), parameter=names(start)) parameterStore[1,] <- start } if(printLevel > 1) { cat( "----- Initial parameters: -----\n") cat( "fcn value:", as.vector(f1), "\n") a <- cbind(start1, G1, as.integer(!fixed)) dimnames(a) <- list(names(start1), c("parameter", "initial gradient", "free")) printRowColLimits(a, max.rows, max.cols) } ## ---------------- Main interation loop ------------------------ iter <- 0L ## we do not need to compute the function itself here, except for ## printing repeat { # repeat over epochs ## break here if iterlim == 0 if( iter >= iterlim) { code <- 4; break } ## break here to avoid potentially costly gradient computation if( iter >= slot(control, "iterlim")) { code <- 4; break } iter <- iter + 1L if(printLevel > 1) { cat( "----- epoch", iter, "-----\n") } for(iBatch in 1:nBatches) { # repeat over minibatches if(!is.null(batchSize)) { index <- shuffledIndex[seq(from=iBatch, to=nObs, by=nBatches)] } start0 <- start1 G0 <- G1 if(any(is.na(G0[!fixed]))) { stop("NA in gradient") } if(optimizer == "SGA") { v <- momentum*v + learningRate*G0 start1 <- start0 + v } else if(optimizer == "Adam") { Adam.time <- Adam.time + 1 Adam.s <- Adam.momentum1*Adam.s + (1 - Adam.momentum1)*G0 Adam.r <- Adam.momentum2*Adam.r + (1 - Adam.momentum2)*G0*G0 Adam.shat <- Adam.s/(1 - Adam.momentum1^Adam.time) Adam.rhat <- Adam.r/(1 - Adam.momentum2^Adam.time) v <- learningRate*Adam.shat/(sqrt(Adam.rhat) + Adam.delta) start1 <- start0 + v } f1 <- NULL # we are at a new location, mark that we haven't computed the f1 values ## still iterations to go, hence compute gradient G1 <- grad(start1, fixed = fixed, sumObs = TRUE, index=index, ...) if(any(is.na(G1[!fixed])) || any(is.infinite(G1[!fixed]))) { cat("Iteration", iter, "\n") cat("Parameter:\n") print(headDots(start1, max.cols), quote=FALSE) cat("Gradient:\n") printRowColLimits(G1, max.rows, max.cols) stop("NA/Inf in gradient") } if(length(clip) > 0) { if((norm2 <- sum(G1*G1)) > clip) # compute norm w/o cross-product as grad may not be a vector G1 <- G1/sqrt(norm2*clip) } ## print every batch if someone wants... if(printLevel > 4) { f1 <- fn(start1, fixed = fixed, sumObs = TRUE, index=index, ...) cat(" - batch", iBatch, "index", index, "learning rate", learningRate, " fcn value:", formatC(as.vector(f1), digits=8, format="f"), "\n") a <- cbind(learningRate*G0, start1, G1, as.integer(!fixed)) dimnames(a) <- list(names(start0), c("delta-v", "param", "gradient", "active")) printRowColLimits(a, max.rows, max.cols) } if(any(is.infinite(G1))) { code <- 6; break; } } # end of repeat over batches if(storeValues) { ## store last value of the epoch if(is.null(f1)) { f1 <- fn(start1, fixed = fixed, sumObs = TRUE, index=index, ...) } valueStore[iter + 1L] <- c(f1) # c removes dimensions and attributes } if(storeParameters) { ## store last value of the epoch parameterStore[iter + 1L,] <- c(start1) # c removes dimensions and attributes } if(slot(control, "printLevel") > 2) { if(is.null(f1)) { f1 <- fn(start1, fixed = fixed, sumObs = TRUE, index=index, ...) } cat(" learning rate", learningRate, " fcn value:", formatC(as.vector(f1), digits=8, format="f"), "\n") a <- cbind(learningRate*G0, start1, G1, as.integer(!fixed)) dimnames(a) <- list(names(start0), c("amount", "param", "gradient", "active")) printRowColLimits(a, max.rows, max.cols) } ## stopping criteria if( sqrt( crossprod( G1[!fixed] ) ) < slot(control, "gradtol") ) { code <-1; break } if(!is.null(patience) && (iter %% patienceStep == 0)) { if(is.null(f1)) { f1 <- fn(start1, fixed = fixed, sumObs = TRUE, index=index, ...) } if(f1 < fBest) { patienceCount <- patienceCount + 1 } else { patienceCount <- 0 fBest <- f1 paramBest <- start1 } if(patienceCount > patience) { code <- 10 f1 <- fBest start1 <- paramBest break } } } # main iteration loop over epochs if(printLevel > 0) { cat( "--------------\n") cat( maximMessage( code), "\n") cat( iter, " iterations\n") cat( "estimate:", headDots(start1, max.cols), "\n") if(is.null(f1)) { f1 <- fn(start1, fixed = fixed, sumObs = TRUE, index=index, ...) } cat( "Function value:", f1, "\n") } if(finalHessian & !bhhhHessian) { G1 <- grad( start1, fixed = fixed, sumObs = FALSE, index=index, ... ) } if(observationGradient(G1, length(start1))) { gradientObs <- G1 colnames( gradientObs ) <- names(start1) G1 <- colSums(as.matrix(G1 )) } else { gradientObs <- NULL } names( G1 ) <- names(start1) ## calculate (final) Hessian if(tolower(finalHessian) == "bhhh") { if(!is.null(gradientObs)) { hessian <- - crossprod( gradientObs ) attr(hessian, "type") <- "BHHH" } else { hessian <- NULL warning("For computing the final Hessian by 'BHHH' method, the log-likelihood or gradient must be supplied by observations") } } else if( finalHessian != FALSE ) { hessian <- hess( start1, fixed = fixed, index=index, ... ) } else { hessian <- NULL } if( !is.null( hessian ) ) { rownames( hessian ) <- colnames( hessian ) <- names(start1) } ## remove attributes from final value of objective (likelihood) function attributes( f1 )$gradient <- NULL attributes( f1 )$hessian <- NULL attributes( f1 )$gradBoth <- NULL attributes( f1 )$hessBoth <- NULL ## result <- list( maximum = unname( drop( f1 ) ), estimate=start1, gradient=drop(G1), hessian=hessian, code=code, message=maximMessage( code), fixed=fixed, iterations=iter, type=maximType, valueStore = if(storeValues) valueStore else NULL, parameterStore = if(storeParameters) parameterStore else NULL ) if( exists( "gradientObs" ) ) { result$gradientObs <- gradientObs } result <- c(result, control=control) # attach the control parameters ## class(result) <- c("maxim", class(result)) invisible(result) } maxLik/R/returnMessage.R0000644000175100001440000000036014077525067014701 0ustar hornikusers returnMessage <- function(x, ...) UseMethod("returnMessage") returnMessage.default <- function(x, ...) x$returnMessage returnMessage.maxim <- function(x, ...) x$message returnMessage.maxLik <- function(x, ...) x$message maxLik/R/maxBFGSR.R0000644000175100001440000001245614077525067013437 0ustar hornikusers maxBFGSR <- function(fn, grad=NULL, hess=NULL, start, constraints=NULL, finalHessian=TRUE, fixed=NULL, activePar=NULL, control=NULL, ...) { ## Newton-Raphson maximization ## Parameters: ## fn - the function to be minimized. Returns either scalar or ## vector value with possible attributes ## constPar and newVal ## grad - gradient function (numeric used if missing). Must return either ## * vector, length=nParam ## * matrix, dim=c(nObs, 1). Treated as vector ## * matrix, dim=c(M, nParam), where M is arbitrary. In this case the ## rows are simply summed (useful for maxBHHH). ## hess - hessian function (numeric used if missing) ## start - initial parameter vector (eventually w/names) ## ... - extra arguments for fn() ## The maxControl structure: ## The stopping criteria ## tol - maximum allowed absolute difference between sequential values ## reltol - maximum allowed reltive difference (stops if < reltol*(abs(fn) + reltol) ## gradtol - maximum allowed norm of gradient vector ## steptol - minimum step size ## iterlim - maximum # of iterations ## finalHessian include final Hessian? As computing final hessian does not carry any extra penalty for NR method, this option is ## mostly for compatibility reasons with other maxXXX functions. ## TRUE/something else include ## FALSE do not include ## activePar - an index vector -- which parameters are taken as ## variable (free). Other paramters are treated as ## fixed constants ## fixed index vector, which parameters to keep fixed ## ## RESULTS: ## a list of class "maxim": ## maximum function value at maximum ## estimate the parameter value at maximum ## gradient gradient ## hessian Hessian ## code integer code of success: ## 1 - gradient close to zero ## 2 - successive values within tolerance limit ## 3 - could not find a higher point (step error) ## 4 - iteration limit exceeded ## 100 - initial value out of range ## message character message describing the code ## last.step only present if code == 3 (step error). A list with following components: ## theta0 - parameter value which led to the error ## f0 - function value at these parameter values ## climb - the difference between theta0 and the new approximated parameter value (theta1) ## activePar - logical vector, which parameters are active (not constant) ## activePar logical vector, which parameters were treated as free (resp fixed) ## iterations number of iterations ## type "Newton-Raphson maximization" ## ## ------------------------------ ## Add parameters from ... to control if(!inherits(control, "MaxControl")) { mControl <- addControlList(maxControl(), control) } else { mControl <- control } mControl <- addControlList(mControl, list(...), check=FALSE) ## argNames <- c(c( "fn", "grad", "hess", "start", "activePar", "fixed", "control"), openParam(mControl)) checkFuncArgs( fn, argNames, "fn", "maxBFGSR" ) if( !is.null( grad ) ) { checkFuncArgs( grad, argNames, "grad", "maxBFGSR" ) } if( !is.null( hess ) ) { checkFuncArgs( hess, argNames, "hess", "maxBFGSR" ) } ## establish the active parameters. Internally, we just use 'activePar' fixed <- prepareFixed( start = start, activePar = activePar, fixed = fixed ) ## chop off the control args from ... and forward the new ... dddot <- list(...) dddot <- dddot[!(names(dddot) %in% openParam(mControl))] cl <- list(start=start, finalHessian=finalHessian, fixed=fixed, control=mControl) if(length(dddot) > 0) { cl <- c(cl, dddot) } if(is.null(constraints)) { cl <- c(quote(maxBFGSRCompute), fn=logLikAttr, fnOrig = fn, gradOrig = grad, hessOrig = hess, cl) result <- eval(as.call(cl)) } else { if(identical(names(constraints), c("ineqA", "ineqB"))) { stop("Inequality constraints not implemented for maxBFGSR") } else if(identical(names(constraints), c("eqA", "eqB"))) { # equality constraints: A %*% beta + B = 0 cl <- c(quote(sumt), fn=fn, grad=grad, hess=hess, maxRoutine=maxBFGSR, constraints=list(constraints), cl) result <- eval(as.call(cl)) } else { stop("maxBFGSR only supports the following constraints:\n", "constraints=list(ineqA, ineqB)\n", "\tfor A %*% beta + B >= 0 linear inequality constraints\n", "current constraints:", paste(names(constraints), collapse=" ")) } } result$objectiveFn <- fn return( result ) } maxLik/R/nParam.R0000644000175100001440000000040214077525067013270 0ustar hornikusers## Return the #of parameters of model nParam.maxim <- function(x, free=FALSE, ...) { if(!inherits(x, "maxim")) { stop("'nParam.maxim' called on non-'maxim' object") } if(free) sum( activePar( x ) ) else length( x$estimate ) } maxLik/R/prepareFixed.R0000644000175100001440000000572614077525067014506 0ustar hornikusersprepareFixed <- function( start, activePar, fixed ) { nParam <- length( start ) ## establish the active parameters. if(!is.null(fixed)) { if(!is.null(activePar)) { if(!all(activePar)) { warning("Both 'activePar' and 'fixed' specified. 'activePar' ignored") } } if( is.logical( fixed ) ) { if( length ( fixed ) != length( start ) || !is.null( dim( fixed ) ) ) { stop( "if fixed parameters are specified using logical values,", " argument 'fixed' must be a logical vector", " with one element for each parameter", " (number of elements in argument 'start')" ) } activePar <- !fixed } else if( is.numeric( fixed ) ) { if( length ( fixed ) >= length( start ) || !is.null( dim( fixed ) ) ) { stop( "if fixed parameters are specified using their positions,", " argument 'fixed' must be a numerical vector", " with less elements than the number of parameters", " (number of elements in argument 'start'" ) } else if( min( fixed ) < 1 || max(fixed ) > length( start ) ) { stop( "if fixed parameters are specified using their positions,", " argument 'fixed' must have values between 1 and", " the total number of parameter", " (number of elements in argument 'start'" ) } activePar <- ! c( 1:length( start ) ) %in% fixed } else if( is.character( fixed ) ) { if( length ( fixed ) >= length( start ) || !is.null( dim( fixed ) ) ) { stop( "if fixed parameters are specified using their names,", " argument 'fixed' must be a vector of character strings", " with less elements than the number of parameters", " (number of elements in argument 'start'" ) } else if( is.null( names( start ) ) ) { stop( "if fixed parameters are specified using their names,", " parameter names have to be specified in argument 'start'" ) } else if( any( ! names( fixed ) %in% names( start ) ) ) { stop( "if fixed parameters are specified using their names,", " all parameter names specified in argument 'fixed'", " must be specified in argument 'start'" ) } activePar <- ! names( start ) %in% fixed } else { stop( "argument 'fixed' must be either a logical vector,", " a numeric vector, or a vector of character strings" ) } } else { if( is.null( activePar ) ) { activePar <- rep( TRUE, length( start ) ) } else if(is.numeric(activePar)) { a <- rep(FALSE, nParam) a[activePar] <- TRUE activePar <- a } } names( activePar ) <- names( start ) if( all( !activePar ) ){ stop( "At least one parameter must not be fixed", " using argument 'fixed'" ) } return( !activePar ) }maxLik/R/nIter.R0000644000175100001440000000030214077525067013132 0ustar hornikusers## Return #of iterations for maxim objects nIter <- function(x, ...) ## Number of iterations for iterative models UseMethod("nIter") nIter.default <- function(x, ...) x$iterations maxLik/NEWS0000644000175100001440000002362614600010344012215 0ustar hornikusersTHIS IS THE CHANGELOG OF THE "maxLik" PACKAGE Please note that only the most significant user visible changes are reported here. A full ChangeLog is available in the log messages of the SVN repository on R-Forge. CHANGES IN VERSION 1.5-0 (2020-07-26) * maxLik methods for 'tidy' and 'glance' generics (by David Hugh-Jones) * maxLik method for 'confint' (by Luca Scrucca) * most tests moved to 'tinytest' package * fixed an issue with negative reltol values CHANGES IN VERSION 1.4-8 (2020-03-22) * added two vignettes: "Getting started with maximum likelihood and maxLik" and "maximum likelihood estimation with maxLik" CHANGES IN VERSION 1.4-6 (2020-11-24) * changed the name of internal function head... to headDots to avoid issues with perforce VCS * maxNR and friends now correctly return code 8 if reltol stopping condition invoked * documentation fixes and clean-ups CHANGES IN VERSION 1.4-4 (2020-07-08) * fixed another issue with CRAN tests on ATLAS CHANGES IN VERSION 1.4-2 (2020-07-08) * fixed CRAN test issues CHANGES IN VERSION 1.4-0 (2020-07-07) * includes stochastic gradient ascent and Adam optimizer CHANGES IN VERSION 1.3-10 (2020-05-13) * fixed an issue where maxControl() silently ignored a number of parameters * print.summary.maxim accepts parameters max.rows and max.cols, and only prints this many columns/rows of output matrices CHANGES IN VERSION 1.3-8 (2019-05-18) * better handling of matrix class CHANGES IN VERSION 1.3-8 (2020-01-01) * better handling of matrix class CHANGES IN VERSION 1.3-6 (2019-05-18) * 'maxim' objects now support 'maxValue' and 'gradient' methods. * tests cleaned and give fewer notes on check CHANGES IN VERSION 1.3-4 (2015-11-08) * If Hessian is not negative definite in maxNRCompute, the program now attempts to correct this repeatedly, but not infinite number of times. If Marquardt selected, it uses Marquardt lambda and it's update method. * Fixed an issue where summary.maxLik did not use 'eigentol' option for displaying standard errors CHANGES IN VERSION 1.3-2 (2015-10-28) * Corrected a bug that did not permit maxLik to pass additional arguments to the likelihood function CHANGES IN VERSION 1.3-0 (2015-10-24) * maxNR & friends now support argument 'qac' (quadratic approximation correction) option that allows to choose the behavior if the next guess performs worse than the previous one. This includes the original step halving while keeping direction, and now also Marquardt's (1963) shift toward the steepest gradient. * all max** functions now take control options in the form as 'control=list(...)', analogously as 'optim'. The former method of directly supplying options is preserved for compatibility reasons. * sumt, and stdEr method for 'maxLik' are now in namespace * the preferred way to specify the amount of debugging information is now 'printLevel', not 'print.level'. CHANGES IN VERSION 1.2-4 (2014-12-31) * Equality constraints (SUMT) checks conformity of the matrices * coef.maxim() is now exported * added argument "digits" to print.summary.maxLik() * added argument "digits" to condiNumber.default() * further arguments to condiNumber.maxLik() are now passed to condiNumber.default() rather than to hessian() CHANGES IN VERSION 1.2-0 (2013-10-22) * Inequality constraints now support multiple constraints (B may be a vector). * Fixed a bug in documentation, inequality constraint requires A %*% theta + B > 0, not >= 0 as stated earlier. * function sumKeepAttr() is imported from the miscTools package now (before maxLik() could not be used by another package when this package imported (and not depended on) the maxLik package) (bug reported and solution provided by Martin Becker) CHANGES IN VERSION 1.1-8 (2013-09-17) * fixed bug that could occur in the Newton-Raphson algorithm if the log-likelihood function returns a vector with observation-specific values or if there are NAs in the function values, gradients, or Hessian CHANGES IN VERSION 1.1-4 (2013-09-16) * the package code is byte-compiled * if the log-likelihood function contains NA, the gradient is not calculated; if components of the gradient contain NA, the Hessian is not calculated * slightly improved documentation * improved warning messages and error messages when doing constrained optimisation * added citation information * added start-up message CHANGES IN VERSION 1.1-2 (2012-03-04) * BHHH only considers free parameters when analysing the size of gradient * numericGradient and numericHessian check for the length of vector function CHANGES IN VERSION 1.1-0 (2012-01-...) * Conjugate-gradient (CG) optimization method included. * it is guaranteed now that the variance covariance matrix returned by the vcov() method is always symmetric. * summary.maxLik is guaranteed to use maxLik specific methods, even if corresponding methods for derived classes have higher priority. CHANGES IN VERSION 1.0-2 (2011-10-16) This is mainly bugfix release. * maxBFGSR works with fixed parameters. * maxBFGS and other optim-based routines work with both fixed parameters and inequality constraints. * constrOptim2 removed from API. Names of it's formal arguments are changed. CHANGES IN VERSION 1.0-0 (2010-10-15) * moved the generic function stdEr() including a default method and a method for objects of class "lm" to the "miscTools" package (hence, this package now depends on the version 0.6-8 of the "miscTools" package that includes stdEr() * if argument print.level is 0 (the default) and some parameters are automatically fixed during the estimation, because the returned log-likelihood value has attributes "constPar" and "newVal", the adjusted "starting values" are no longer printed. CHANGES IN VERSION 0.8-0 * fixed bug that occured in maxBFGS(), mxNM(), and maxSANN if the model had only one parameter and the function specified by argument "grad" returned a vector with the analytical gradients at each observation * maxNR() now performs correctly with argument "iterlim" set to 0 * maxNR, maxBHHH(), maxBFGS(), maxNM(), and maxSANN() now use attributes "gradient" and "hessian" of the object returned by the log-likelihood function; if supplied, these are used instead of arguments "grad" and "hess" * added function maxBFGSR() that implements the BFGS algorithm (in R); this function was originally developed by Yves Croissant and placed in the "mlogit" package * maxNR() now has an argument "bhhhHessian" (defaults to FALSE): if this argument is TRUE, the Hessian is approximated by the BHHH method (using information equality), i.e. the BHHH optimization algorithm is used * maxLik() now has an argument 'finalHessian'; if it is TRUE, the final Hessian is returned; if it is the character string "BHHH", the BHHH approximation of the Hessian matrix (using information equality) with attribute "type" set to "BHHH" is returned * maxNR(), maxBHHH(), maxBFGS(), maxNM(), and maxSANN() now additionally return a component "gradientObs" that is the matrix of gradients evaluated at each observation if argument "grad" returns a matrix or argument "grad" is not specified and argument "fn" returns a vector * the definitions of the generic functions nObs() and nParam() have been moved to the "miscTools" package * added methods bread() and estfun() for objects of class "maxLik" (see documentation of the generic functions bread() and estfun() defined in package "sandwich") * replaced argument "activePar" of numericGradient(), numericHessian(), and numericNHessian() by argument "fixed" to be consistent with maxLik(), maxNR(), and the other maxXXX() functions * maxNR(), maxBHHH(), maxBFGSYC(), maxBFGS(), maxNM(), maxSANN(), and summary.maxLik() now return component "fixed" instead of component "activePar" CHANGES IN VERSION 0.7-2 * corrected negative definiteness correction of Hessian in maxNR() which led to infinite loops * changed stopping condition in sumt(): instead of checking whether estimates are stimilar, we check for penalty being low now CHANGES IN VERSION 0.7-0 * Holding parameters fixed in maxNR() (and hence, also in maxBHHH()) should now be done by the new (optional) argument "fixed", because it is convenient to use than the "old" argument "activePar" in many situations. However, the "old" argument "activePar" is kept for backward-compatibility. * added (optional) argument "fixed" to functions maxBFGS(), maxNM(), and maxSANN(), which can be used for holding parameters fixed at their starting values * added function constrOptim2(), which is a modified copy of constrOptim() from the "stats" package, but which includes a bug fix * added optional argument "cand" to function maxSANN(), which can be used to specify a function for generating a new candidate point (passed to argument "gr" of optim()) * added argument "random.seed" to maxSANN() to ensure replicability * several mainly smaller improvements in ML estimations with linear equality and inequality constraints (via sumt() and constrOptim2(), respectively) * several internal changes that make the code easier to maintain CHANGES IN VERSION 0.6-0 * maxLik() can perform maximum likelihood estimations under linear equality and inequality constraints on the parameters now (see documentation of the new argument "constraints"). Please note that estimations under constraints are experimental and have not been thoroughly tested yet. * a new method "stdEr" to extract standard errors of the estimates has been introduced * added a "coef" method for objects of class "summary.maxLik" that extracts the matrix of the estimates, standard errors, t-values, and P-values * some minor bugs have been fixed * we did some general polishing of the returned object and under the hood CHANGES IN VERSION 0.5-12 AND BEFORE * please take a look at the log messages of the SVN repository on R-Forge maxLik/vignettes/0000755000175100001440000000000015124514352013527 5ustar hornikusersmaxLik/vignettes/probability-density.pdf0000644000175100001440000002637714077553422020245 0ustar hornikusers%PDF-1.4 %Çì¢ %%Invocation: gs -q -dNOPAUSE -dBATCH -P -dSAFER -dDELAYSAFER -sDEVICE=pdfwrite -dEPSCrop -dSubsetFonts=true -dEmbedAllFonts=true -dMaxSubsetPct=100 -dPDFSETTINGS=/prepress -dCompatibilityLevel=1.4 -dAutoRotatePages=/None -g612x792 %%+ -dDEVICEWIDTHPOINTS=315.873 -dDEVICEHEIGHTPOINTS=113.838 -sOutputFile=? ? ? -f ? 5 0 obj <> stream xœÕ˜»Žd·†wàg8átÐïN ”Y;™¤h+YØ ßßOž OOËZ“‹EwUW‘uý«8?/Ö¸Åêßúùòzù꛲üðÏ‹5ÍÚÐÜòÃ%Øb\ò‹kÙTë–×’5¥,Ÿ/!$“–½q¹òLK(ÁäP–Ð`¤k2þË%´f|Îpª±e‰6›ÑkÎØä–肉2ç ò+c3àó“>_>]œi­¥¸ürqË×üÿéb—¿ý>Œÿñòáòí¢iQ'­Ÿß“ˆVk­­ñ-Ì®¬œPœI^¾èœ¡[¥·Þ“ÇÂ6… 98Ø&½ÕÌCheÌ2ûIÕw_w¡!!Ÿ’É9NBÞUb˜OR–ÂÂtŽË³ä¦KÔm«HîœÇFqî´¾OïxÖ‡)ä1WÝìK²‰Î8ûÝâç»w½ý¦³[s¯5üx¿³T –bá—¶¢¸+üA Øß*¡{©×KöÄö™õޏ%lU~ I× ÆQòv™Ãu O»ÖçKªÁ¤t✕FÌÞë¤3êÑôÖd7uÛÆitD² »+¡š‚d¢Ü+)hšßÒá)i©@Fàö4€ 1¢I …j·ÔÚŒä«ç² éŸ ù•sîÞ®;èûÝX= 4:¦Ûç=æm¶Ìþœ­{\°»×¥aH™âàȯ©£`7;'©ÍòYj×´;I N—ʹQt³T¦Ør8IÙ^YŒŒóy»®ÚUX»qðÇÔà{ÏZ£ôßיּ}]­NÞO§6N ùìqÑ„zŠËЛnÚOÚ­¹ÓZýx·³ÁÞ}ÍÈŸòçߪ¢;)†Šê‚7ëMqk5ƒ¯KM Vè÷ Î |»ÔdMa©»žµV ò3笵ÆíÝÎúpÉò#4ü Z!î¾S}ìÝÓL¢R8©*D·Îà²q--žuJàz4r´&›ä\_Q2Á+X›hÝÆgqŒ„–—ĉlBh”˜{̱qli…¢äÿ’0Ò‚`ÕãKX2 !D{¹TVª‚ÞVGï ñ§P£Í3›"Ö‘”–äGKÕTêN·óÑÀ]b_'Âkë©v¿”¤ÑÕñÖæ„CKa˜ylq4„!¥a=íá9Â2»á³+ÂÛ¥Ûw^çI+ÞW|k}‚cX~kk*…‚í%vË\p£3jéuáX «Õ43#—.mDVC" @Ú"±Eå¨ùî|ÌM kƒ8(‹fRöFf2ÞA&4 ‰¦Êð=à-äºm ‹!®ÓÅzÄÁ7FôIXú$ìƒðÍàÓ¤#dšt|]:àš«¨Y4úª•wS }AYtÁ¤H_`nÅ«®±µÞž oé‹F®иx›Š®”-‘jð–båóȶw:‹z   ¡Çq¡`a;'Û”V÷æ('OÿI gµ$­êé ÈÈ'/>°ÒRÚGÁ"né°EñÕXô®àÄDò÷‘œà£)|¤r€Àôê H° °U%ªD‘ô‰®PcîÇ.îUQ%KÒ|¢+0zxÀÀg²FªŽæö¹ãí_;Ðà!¦¹­ëõBÓáÀ ¾äñ²CÚõ•®B¥8®¨´øq ”¯XC²‚NÊ`X#¤üœÊ7¢ZzWD£Þ6ú¡·P¢^îàç½#åw }9‹à4¶Ú´ÜéÓ¨8$Z÷éUœPYÐý»Š€·M‰IEpÔªmH´Öi ]'ý2îP7 Ñ´‰h-Ç.¡ßQZ˜®3|l§G1$x«fŽz½\ëÚ‰pšz—i5°_tw´ô¶8èŒU£sxxpÆq‚j³Nwþ°Õ­p4oÒ“|5’¦¦`üî¸Ïis]qT”[ ØY—ʪ^ƒK0'‰lѳºôWô‘q4Y¶„:´ŽØ­~2üVŠ oü z¥ôʘ÷Œ”†µ“Pêméä‡þ’„óMtÁ|ŠƒºÆ‹õ7«B„9ÓYx¸Šó¿ ‰¼Ò!ŒÓý §˜4Õú¬¥±­Ï hÚ³Š&m¢µ`Œ+ôzâ”~´hí¤¢kZö˜ ²+ð³"ÍðÌç+T¡“}ÊOF2í];¹¡ùß&?A¾¾’nqØé=R;g¥í~ 4Å©Ï- Õn?°%9?¥ÐÝ·ä¾¼Iw/¦k3Âìýõkì¯ÚOòIhçBwõuÜ!ÒÇJ™ÑleL".Oˆ·q¡ŸÙgú_#×—×å/Ï—¯¾a¨Òù™Ð=ãÙ`ȳ©îÈ‹¶„ç×Ë·Oÿ¾Þ8Ÿqç¾þMê­H]•²cÔåF¤³Õûü%w­X`m\uü|·,gñ?_o›dœ%ñØiÐÛ!ö‡Cìt Kâ&â®ÚVY¨†ÜÉM,wæMôOWz$YW ¸Í7çql,O­ó¥=¥'ý›[´g]!€]ç—+qr,ÁOÿØnzúx½i£Á¾§]oHó:rO?®ÜXXÍë¸'£[òôÇëóOMÐÄvrã ïé0îûtõš›î‘;¿¹òÝ“.&7>=¸-˜=>[âCü­Ä§VòœøØRxdETžNâß]1FNŠ^E Qo£{²=´¼MÅþ¬)¬é R¸Ÿ¹¦PßÒ¹'i/T½öX¾ÌAI9ÿÝàG7¤‡f>ì†/ „ªZ÷ÿÄ"ÌǪty ¤qÚãÒ­i+]Ó^º©l¥ ÷‹K·wÈ£Ò­î¡íoj·ßüEµËåWÒT´öÜÕnOS|@v¯žÙ_+_÷6kÜñŽåvÜ©ýÛÇ vïÉÚ?ˆý­è ÍNô8.§+o^ó^úè®É¹)ÅÈdG„nÅò8Muã'3n¤¿•À8J±3õjôülæ_Ÿ/çßgïbƒendstream endobj 6 0 obj 2275 endobj 4 0 obj <> /Contents 5 0 R >> endobj 3 0 obj << /Type /Pages /Kids [ 4 0 R ] /Count 1 >> endobj 1 0 obj <> endobj 7 0 obj <>endobj 16 0 obj <> endobj 17 0 obj <> endobj 14 0 obj <> endobj 22 0 obj <> endobj 12 0 obj <> endobj 10 0 obj <> endobj 8 0 obj <> endobj 23 0 obj <> endobj 15 0 obj <> endobj 18 0 obj <>stream xœcd`ab`dddsö Ž´±T~H3þaú!ËÜÝý3æûÖnæn–•ß;„¾G ~çÿ"ÀÀÌȘWÜäœ_PY”™žQ¢ ‘¬©`hii®£`d``©à˜›Z”™œ˜§à›X’‘š›Xää(ç'g¦–T*hØd””Xéë———ë%æëå¥Ûiê(”g–d(¥§•¥¦(¸åç•(ø%æ¦*€¦&ós JKR‹|óSR‹òx##‹æ¾Ÿ›¿Wnc|ø“ù§ù÷Jѹóº.(ï®’ÿ³‡­ª¼»¬t^÷\y¾âÅ?í²ý–™Æ¾k·‹XZ=ç¶9<<@ÌËÀt\Ò endstream endobj 13 0 obj <> endobj 19 0 obj <>stream xœuT{PSw¾1{‹BzË£îMu|ÚõAW냕"ŠR­£‚"ˆá!  •’!¼ŸAÂ+>ª‚Š®Z­®\­­]­¸®µhõ®³íÎôŸ3sçÞ™ßùî÷ßw>e7‚öþÁ«ü†_&Ú>ØÆŒ°ýI$(}(Û…àh· s .팫GSB@‘¤õW&¦«âbbÕÒIQ“¥3üüæL“Îôõõ“.RÈTqQ‘ ÒàHu¬L©æ?äÒÕʨ8™:]:i^¬Zø©OZZšw¤"Ù[©Šùlò4iZœ:VºJ–,S¥Ê¶H” jéŠH…L:ÌÍ{øá¯T$¦¨e*i°r‹L•@Q”‹:mKlœßüI“§ûΘùɬÙs)j µŽZB-¥‚(OŠ£> ('Ê…r¥ÄÔ‡ÔG”åA¹òš);*Pà 0p¡2 ¡EøÖNlWhwÛ>Ðþ(›v§lz'›—¶Å&¶âÜzÁË~ôlâmÛxÖJ&àr%mœ;~ŠžQÝ'­4±ûAÝwùÀ¥kÜåz‘<&&þYËaí4ô•Öló´ ꇾ¹à}¶áüa“yc¥£ôØ 2ØQǽät·±*Ò Ë«ßéC Ü'b³®  À£ÉšßÄYéúÍrH€ø÷{¡ÞX¾½T[z0è´`ô#)îÓ°2·òÁäÑ|ê†û jcÿ¯ˆ{ßו†Š8dÉK÷ÂÌ‚¬j¨S¹©l÷¹?'ûLY&m5xTCA™©Œq²]ÖZ¶[mY0ºËuÿ=´\sï³y # é ß–*ߨ‰‚ˆjLêJê6œƒ½p¬¨¯©³aïaktAgj³¬"Œ a γº¢ð¾ùÀgéd—gqä#ÝæZ‰xÃ2§–Û[éϳ‹{%¶4™¤µ—ÓWJw†Jxð;Úõct¿— ±8ò¦†Þ¨…¥œœ>–_·«xÆ>/×J/ÕÂQ Ñ/ož¼x®2t%G2þïÙu £•Á/i¸ ­ÊêYåz„ÐXùjÆ(ržˆ}:"åú=R‹øa àrÛ –Œ¦r©47TBÌÿƒ?’oÞõ5ÔðSèÕ¿ƒÊ+<$ÿZ`ë`Û’rµ*!aÊÚÖØØÆ9ÙÊRZ>úÉbkqíþ9h?»‰_Ùœñ ›B=F¬R¯3íKâL¢áèdšÒë•JMºbi_ÄCdùrG%'~Äñ»„žàM܈øÑT´çįn@g_m7CØ¿² ñ4Ð Ãâ›;ÚNªl‡+Ð`ʪŠ6­€fñV¯éœÓP'Š»qV·àÌÐBáÐ:ü‰­ªsC l“¼y%Ú–)©õP)±½&lu4˜5°]òæ­hG¤¦4@5u^$ßüzU½à׋B|L²Y(Ôä¶`  ? O†àXô4™ò  )Ö™vfÎð'‚5\ØÔäñ@F‚)&çˆóÀâ `Š MÅ@ÆLúšXø1G¬Ea:{ßÈ&Ë£×åë‹wJήÅd  ^I$ŒL ôyy côù9e¯£à×ó¤ñ9 #ã„ÿÂà ‚rlb³Hs0=|=–!W‹ óVÝÚxƒ…Ëú{±77=^Ðø¬„É›¦Æè>…”?¥{Þ±Ù·3ŽÂ·ÐßÒõ¨írÑM¸Ã9¹Ê*À¿JƒöYçàœá‡þ}Åéú—j÷C\L«ó.ÿæÃr†9Y!©D´(Kü–ñ¾;h`Ú-!ž³c¾Š ^æÃç#§ëà<œjbÞTÑÑúLo.^.j/x‡ùzíï¬O{ë+J†îàà‹A«àÄ Œ$´mÆ%¬RdØ‘²##7;€O †ˆ~yŒsqÜù>ô@zÚâLèù¾¾ë»u¥ m5ím© rŒy\ãµK{O3x`ZÀ„Ïæ†­•e$T«¨=lbÑ;Oó{‚ãšÐáÙÞ×(Àõ‹ÝÄoñ,þÆøúü¹5û/¯ù! ¶6hZ[ÍæÖÉÕq\S×ùú^`îÖ,Ý È SÉ$òðXC‚Ae0@0âÿdsŒZÏ%°›;&ºß±dùðó˜uš)…ýá’v]‹ö3ûTæ•b[äÌdÐeàá '~ ÷ú§š™wÜPðƒ,èvWpâ@­³ _àS6´U%¦Ò"®­«§ñ$0¯€xÇÈ!X.IŒŒÔl<˜Ü¿ Š ÅÀÔ·V¶[SjS•™Q¾ÿðA!ŽzþGã¨Y¿AXhFL¬×ÓmÙVt¾þ ²4פãeè²%4s"ùø /ÙX—Uhàs’É„¬¯$ïØÙ$–»:‡ÓA|¼AO~a°üLóÉúýgû¡Žæí‰a¢34‰Iµi--uµm‹êóåÃu,“d‰ø5iD§ÀÇ(|6ÀǃØç áÓàÍJˆˆU®dp4¹Í®‚­Ç2[5GŒw¡†¹lEêIM/ïåêd|N„ñª‰–¯Ú´3ÞÏlÈ™gt*©[z’N¹‰oáB[(«¨S7ÃAæÛ«‡¾»uaÍ’%+7F©9K{¼ñÆqøy<ýôŒq~ }dU1 ÜnMQÞÙhFüà“È~žc ä-Â<ý©/êQÕpÚ’„ÚÌbfB‹®ó÷l;+<æý%lÞŸ_¸{ýø7WÛ9§”›ÿnL(/j‘Í´Õ¡$ç`7§ÞñK‘£cã(Šú/§CÍ endstream endobj 11 0 obj <> endobj 20 0 obj <>stream xœcd`ab`ddduö 21T~H3þaú!ËÜÝýcÝO]Önæn–µ?”„¾G ~åÿ(ÀÀÌȘ[Øàœ_PY”™žQ¢ ‘¬©`hii®£`d``©à˜›Z”™œ˜§à›X’‘š›Xää(ç'g¦–T*hØd””Xéë———ë%æëå¥Ûiê(”g–d(¥§•¥¦(¸åç•(ø%æ¦*€\¦"œós JKR‹|óSR‹ò˜ „„Abñ?ô£ƒïÇ÷îM?æoÚ>ŸñûÒ‡Ìßÿä]ÙÞ+çé­ÚÞ͑¾´ûl÷ñåzÙ³ºëTäRØõßì> „7»µïåQÞ”À®Ò=c<Ȩ§ßŸgx²‰ñòw†ïS?0ÿX÷Ý_4­³¶Ñ³¦¡¥Õª»¶›ã·?Ûw‘Ïk—=9½j¥äÊ•[öu_âø.ø›õÊo©ß¢†fºÛZ§-Y1wýºÕñr‹öØx¡›ãËa[+—'Woùß¿s»ººs$¸³ñ•-øá<ë{þÔÉ Ø~'NcßÄu[Ž‹Å|>çêI<<÷æðð20Fh®Ò endstream endobj 9 0 obj <> endobj 21 0 obj <>stream xœ[HSaÇ¿Óæ:ÙXZÕÙ¡ ) -‰D‰"YÁ’Ê.sK=éÑ-·sÖÙÙ­Ômi}˜nÙr³mn§žÈX`t#Zd=„ôäCAy¡"ˆzˆïÈyÉ|ùóû¿ý~P®†© F㱊ÿ´SÚŒI[VH[P¦¤©EKT+ Z9¾èX.­CÔd^ ”Ƹ:» ¬ÓÏÙZ¬>stream 2021-07-26T08:38:58-07:00 2021-07-26T08:38:58-07:00 dvips(k) 5.999 Copyright 2019 Radical Eye Software probability-density_.dvi endstream endobj 2 0 obj <>endobj xref 0 25 0000000000 65535 f 0000002913 00000 n 0000010646 00000 n 0000002854 00000 n 0000002697 00000 n 0000000332 00000 n 0000002677 00000 n 0000002978 00000 n 0000003842 00000 n 0000008108 00000 n 0000003685 00000 n 0000007368 00000 n 0000003341 00000 n 0000004870 00000 n 0000003112 00000 n 0000004301 00000 n 0000003019 00000 n 0000003049 00000 n 0000004510 00000 n 0000005179 00000 n 0000007581 00000 n 0000008332 00000 n 0000003255 00000 n 0000004204 00000 n 0000009177 00000 n trailer << /Size 25 /Root 1 0 R /Info 2 0 R /ID [<0EBD16C126AFE1F871F9B1F51F37959F><0EBD16C126AFE1F871F9B1F51F37959F>] >> startxref 10865 %%EOF maxLik/vignettes/probability-density.asy0000644000175100001440000000403614077525067020260 0ustar hornikusersunitsize(25mm,65mm); defaultpen(fontsize(9)); real xLeft = -2.2; real xRight = 2.2; real yTop = 0.5; // normal density real dnorm(real x) { return 1/sqrt(2*pi)*exp(-1/2*x^2); } // compute normal curve, plot later path normalCurve; for(real x = xLeft + 0.1; x < xRight - 0.1; x += 0.15) { normalCurve = normalCurve..(x, dnorm(x)); } // Example points real xs[] = {-1.695, 0.3}; real delta = 0.15; int i = 1; for(real x : xs) { real fx = dnorm(x); real xl = x - delta/2; real xr = x + delta/2; real tl = times(normalCurve, xl)[0]; real tr = times(normalCurve, xr)[0]; path striptop = subpath(normalCurve, tl, tr); path area = (xl, 0)--striptop--(xr, 0)--cycle; filldraw(area, lightgray, linewidth(0.2)); draw((x, 0)--(x, dnorm(x)), dashed); label("$x_" + string(i) + " = " + format("%f", x) +"$", (x, 0), S + 0.2E); // width marks and width real barheight = dnorm(x) + 0.06; Label widthLabel = Label("width $\delta$", MidPoint, 2N); draw(widthLabel, (xl, barheight)--(xr, barheight), linewidth(0.4), Bars); arrow((xl, barheight), W, length=50delta, margin=DotMargin, linewidth(0.4)); arrow((xr, barheight), E, length=50delta, margin=DotMargin); // mark the function value real xmarker = x + 1.5delta; draw((x, fx)--(xmarker,fx), dotted); Label valueLabel = Label("$f(x_" + string(i) + ") = " + format("%5.3f", fx) + "$", position=EndPoint, E); path valuePath = (xmarker, fx)--(xmarker+delta, fx); draw(valueLabel, valuePath, linewidth(0.4)); pair barx = relpoint(valuePath, 0.5); draw((barx.x, 0)--barx, Arrow(4)); // ++i; } // add normal curve later as filling area cuts into the curve otherwise draw(normalCurve, linewidth(0.7)); // Add Axes after are to avoid cutting into it path xaxis = (xLeft,0)--(xRight,0); path yaxis = (0,0)--(0,yTop); draw(xaxis, Arrow(TeXHead, 1)); draw(yaxis, Arrow(TeXHead, 1)); label("$x$", point(xaxis, 1), 2S); // Axis labels real tickLength = 0.05*yTop; for(int x = (int)xLeft; x <= (int)xRight; ++x) { draw((x,0)--(x,-tickLength)); label(string(x), (x,-tickLength), 3S); } maxLik/vignettes/using-maxlik.Rnw0000644000175100001440000010626014077525067016647 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{amsmath} \usepackage{bbm} \usepackage[inline]{enumitem} \usepackage[T1]{fontenc} \usepackage[bookmarks=TRUE, colorlinks, pdfpagemode=none, pdfstartview=FitH, citecolor=black, filecolor=black, linkcolor=blue, urlcolor=black, ]{hyperref} \usepackage{graphicx} \usepackage{icomma} \usepackage[utf8]{inputenc} \usepackage{mathtools} % for extended pderiv arguments \usepackage{natbib} \usepackage{xargs} % for extended pderiv arguments \usepackage{xspace} % \SweaveUTF8 \newcommand{\COii}{\ensuremath{\mathit{CO}_{2}}\xspace} \DeclareMathOperator*{\E}{\mathbbm{E}}% expectation \newcommand*{\mat}[1]{\mathsf{#1}} \newcommand{\likelihood}{\mathcal{L}}% likelihood \newcommand{\loglik}{\ell}% log likelihood \newcommand{\maxlik}{\texttt{maxLik}\xspace} \newcommand{\me}{\mathrm{e}} % Konstant e=2,71828 \newcommandx{\pderiv}[3][1={}, 2={}]{\frac{\partial^{#2}{#1}}{\mathmbox{\partial{#3}}^{#2}}} % #1: function to differentiate (optional, empty = write after the formula) % #2: the order of differentiation (optional, empty=1) % #3: the variable to differentiate wrt (mandatory) \newcommand{\R}{\texttt{R}\xspace} \newcommand*{\transpose}{^{\mkern-1.5mu\mathsf{T}}} \renewcommand*{\vec}[1]{\boldsymbol{#1}} % \VignetteIndexEntry{Maximum likelihood estimation with maxLik} \title{Maximum Likelihood Estimation with \emph{maxLik}} \author{Ott Toomet} \begin{document} \maketitle <>= library(maxLik) set.seed(6) @ \section{Introduction} \label{sec:introduction} This vignette is intended for users who are familiar with concepts of likelihood and with the related methods, such as information equality and BHHH approximation, and with \R language. The vignette focuses on \maxlik usage and does not explain the underlying mathematical concepts. Potential target group includes researchers, graduate students, and industry practitioners who want to apply their own custom maximum likelihood estimators. If you need a refresher, consult the accompanied vignette ``Getting started with maximum likelihood and \maxlik''. The next section introduces the basic usage, including the \maxlik function, the main entry point for the package; gradients; different optimizers; and how to control the optimization behavior. These are topics that are hard to avoid when working with applied ML estimation. Section~\ref{sec:advanced-usage} contains a selection of more niche topics, including arguments to the log-likelihood function, other types of optimization, testing condition numbers, and constrained optimization. \section{Basic usage} \label{sec:basic-usage} \subsection{The maxLik function} \label{sec:maxlik-function} The main entry point to \maxlik functionality is the function of the same name, \verb|maxLik|. It is a wrapper around the underlying optimization algorithms that ensures that the returned object is of the right class so one can use the convenience methods, such as \verb|summary| or \verb|logLik|. It is important to keep in mind that \maxlik \emph{maximizes}, not minimizes functions. The basic usage of the function is very simple: just pass the log-likelihood function (argument \verb|logLik|) and the start value (argument \verb|start|). Let us demonstrate the basic usage by estimating the normal distribution parameters. We create 100 standard normals, and estimate the best fit mean and standard deviation. Instead of explicitly coding the formula for log-likelihood, we rely on the \R function \verb|dnorm| instead (see Section~\ref{sec:different-optimizers} for a version that does not use \verb|dnorm|): <<>>= x <- rnorm(100) # data. true mu = 0, sigma = 1 loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=2)) # give start value somewhat off summary(m) @ The algorithm converged in 7 iterations and one can check that the results are equal to the sample mean and variance.\footnote{Note that \R function \texttt{var} returns the unbiased estimator by using denominator $n-1$, the ML estimator is biased with denominator $n$. } This example demonstrates a number of key features of \verb|maxLik|: \begin{itemize} \item The first argument of the likelihood must be the parameter vector. In this example we define it as $\vec{\theta} = (\mu, \sigma)$, and the first lines of \verb|loglik| are used to extract these values from the vector. \item The \verb|loglik| function returns a single number, sum of individual log-likelihood contributions of individual $x$ components. (It may also return the components individually, see BHHH method in Section~\ref{sec:different-optimizers} below.) \item Vector of start values must be of correct length. If its components are named, those names are also displayed in \verb|summary| (and for \verb|coef| and \verb|stdEr|, see below). \item \verb|summary| method displays a handy summary of the results, including the convergence message, the estimated values, and statistical significance. \item \verb|maxLik| (and other auxiliary optimizers in the package) is a \emph{maximizer}, not minimizer. \end{itemize} As we did not specify the optimizer, \verb|maxLik| picked Newton-Raphson by default, and computed the necessary gradient and Hessian matrix numerically. \bigskip Besides summary, \verb|maxLik| also contains a number of utility functions to simplify handling of estimated models: \begin{itemize} \item \verb|coef| extracts the model coefficients: <<>>= coef(m) @ \item \verb|stdEr| returns the standard errors (by inverting Hessian): <<>>= stdEr(m) @ \item Other functions include \verb|logLik| to return the log-likelihood value, \verb|returnCode| and \verb|returnMessage| to return the convergence code and message respectively, and \verb|AIC| to return Akaike's information criterion. See the respective documentation for more information. \item One can also query the number of observations with \verb|nObs|, but this requires likelihood values to be supplied by observation (see the BHHH method in Section~\ref{sec:different-optimizers} below). \end{itemize} \subsection{Supplying analytic gradient} \label{sec:supplying-gradients} The simple example above worked fast and well. In particular, the numeric gradient \verb|maxLik| computed internally did not pose any problems. But users are strongly advised to supply analytic gradient, or even better, both the gradient and the Hessian matrix. More complex problems may be intractably slow, converge to a sub-optimal solution, or not converge at all if numeric gradients are noisy. Needless to say, unreliable Hessian also leads to unreliable inference. Here we show how to supply gradient to the \verb|maxLik| function. We demonstrate this with a linear regression example. Non-linear optimizers perform best in regions where level sets (contours) are roughly circular. In the following example we use data in a very different scale and create the log-likelihood function with extremely elongated elliptical contours. Now Newton-Raphson algorithm fails to converge when relying on numeric derivatives, but works well with analytic gradient. % using matrix notation We combine three vectors, $\vec{x}_{1}$, $\vec{x}_{2}$ and $\vec{x}_{3}$, created at a very different scale, into the design matrix $\mat{X} = \begin{pmatrix} \vec{x}_{1} & \vec{x}_{2} & \vec{x}_{3} \end{pmatrix}$ and compute $\vec{y}$ as \begin{equation} \label{eq:linear-regression-matrix} \vec{y} = \mat{X} \begin{pmatrix} 1 \\ 1 \\ 1 \end{pmatrix} + \vec{\epsilon}. \end{equation} We create $\vec{x}_{1}$, $\vec{x}_{2}$ and $\vec{x}_{3}$ as random normals with standard deviation of 1, 1000 and $10^{7}$ respectively, and let $\vec{\epsilon}$ be standard normal disturbance term: <<>>= ## create 3 variables with very different scale X <- cbind(rnorm(100), rnorm(100, sd=1e3), rnorm(100, sd=1e7)) ## note: correct coefficients are 1, 1, 1 y <- X %*% c(1,1,1) + rnorm(100) @ Next, we maximize negative of sum of squared errors \emph{SSE} (remember, \verb|maxLik| is a maximizer not minimizer) \begin{equation} \label{eq:ols-sse-matrix} \mathit{SSE}(\vec{\beta}) = (\vec{y} - \mat{X} \cdot \vec{\beta})^{\transpose} (\vec{y} - \mat{X} \cdot \vec{\beta}) \end{equation} as this is equivalent to likelihood maximization: <<>>= negSSE <- function(beta) { e <- y - X %*% beta -crossprod(e) # note '-': we are maximizing } m <- maxLik(negSSE, start=c(0,0,0)) # give start values a bit off summary(m, eigentol=1e-15) @ As one can see, the algorithm gets stuck and fails to converge, the last parameter value is also way off from the correct value $(1, 1, 1)$. We have amended summary with an extra argument, \verb|eigentol=1e-15|. Otherwise \maxlik refuses to compute standard errors for near-singular Hessian, see the documentation of \verb|summary.maxLik|. It makes no difference right here but we want to keep it consistent with the two following examples. Now let's improve the model performance with analytic gradient. The gradient of \emph{SSE} can be written as \begin{equation} \label{eq:ols-sse-gradient-matrix} \pderiv{\vec{\beta}}\mathit{SSE}(\vec{\beta}) = -2(\vec{y} - \mat{X}\vec{\beta})^{\transpose} \mat{X}. \end{equation} \maxlik uses numerator layout, i.e. the derivative of the scalar log-likelihood with respect to the column vector of parameters is a row vector. We can code the negative of it as <<>>= grad <- function(beta) { 2*t(y - X %*% beta) %*% X } @ We can add gradient to \verb|maxLik| as an additional argument \verb|grad|: <<>>= m <- maxLik(negSSE, grad=grad, start=c(0,0,0)) summary(m, eigentol=1e-15) @ Now the algorithm converges rapidly, and the estimate is close to the true value. Let us also add analytic Hessian, in this case it is \begin{equation} \label{eq:ols-sse-hessian-matrix} \frac{\partial^{2}}{\partial\vec{\beta}\,\partial\vec{\beta}^{\transpose}} \mathit{SSE}(\vec{\beta}) = 2\mat{X}^{\transpose}\mat{X} \end{equation} and we implement the negative of it as <<>>= hess <- function(beta) { -2*crossprod(X) } @ Analytic Hessian matrix can be included with the argument \verb|hess|, and now the results are <>= m <- maxLik(negSSE, grad=grad, hess=hess, start=c(0,0,0)) summary(m, eigentol=1e-15) @ Analytic Hessian did not change the convergence behavior here. Note that as the loss function is quadratic, Newton-Raphson should provide the correct solution in a single iteration only. However, this example has numerical issues when inverting near-singular Hessian. One can easily check that when creating covariates in a less extreme scale, then the convergence is indeed immediate. While using separate arguments \texttt{grad} and \texttt{hess} is perhaps the most straightforward way to supply gradients, \maxlik also supports gradient and Hessian supplied as log-likelihood attributes. This is motivated by the fact that computing gradient often involves a number of similar computations as computing log-likelihood, and one may want to re-use some of the results. We demonstrate this on the same example, by writing a version of log-likelihood function that also computes the gradient and Hessian: <>= negSSEA <- function(beta) { ## negative SSE with attributes e <- y - X %*% beta # we will re-use 'e' sse <- -crossprod(e) # note '-': we are maximizing attr(sse, "gradient") <- 2*t(e) %*% X attr(sse, "Hessian") <- -2*crossprod(X) sse } m <- maxLik(negSSEA, start=c(0,0,0)) summary(m, eigentol=1e-15) @ The log-likelihood with ``gradient'' and ``Hessian'' attributes, \verb|negSSEA|, computes log-likelihood as above, but also computes its gradient, and adds it as attribute ``gradient'' to the log-likelihood. This gives a potential efficiency gain as the residuals $\vec{e}$ are re-used. \maxlik checks the presence of the attribute, and if it is there, it uses the provided gradient. In real applications the efficiency gain will depend on the amount of computations re-used, and the number of likelihood calls versus gradient calls. While analytic gradients are always helpful and often necessary, they may be hard to derive and code. In order to help to derive and debug the analytic gradient, another provided function, \verb|compareDerivatives|, takes the log-likelihood function, analytic gradent, and compares the numeric and analytic gradient. As an example, we compare the log-likelihood and gradient functions we just coded: <<>>= compareDerivatives(negSSE, grad, t0=c(0,0,0)) # 't0' is the parameter value @ The function prints the analytic gradient, numeric gradient, their relative difference, and the largest relative difference value (in absolute value). The latter is handy in case of large gradient vectors where it may be hard to spot a lonely component that is off. In case of reasonably smooth functions, expect the relative difference to be smaller than $10^{-7}$. But in this example the numerical gradients are clearly problematic. \verb|compareDerivatives| supports vector functions, so one can test analytic Hessian in the same way by calling \verb|compareDerivatives| with \verb|gradlik| as the first argument and the analytic hessian as the second argument. \subsection{Different optimizers} \label{sec:different-optimizers} By default, \maxlik uses Newton-Raphson optimizer but one can easily swap the optimizer by \verb|method| argument. The supported optimizers include ``NR'' for the default Newton-Raphson, ``BFGS'' for gradient-only Broyden-Fletcher-Goldfarb-Shannon, ``BHHH'' for the information-equality based Berndt-Hall-Hall-Hausman, and ``NM'' for gradient-less Nelder-Mead. Different optimizers may be based on a very different approach, and certain concepts, such as \emph{iteration}, may mean quite different things. For instance, although Newton-Raphson is a simple, fast and intuitive method that approximates the function with a parabola, it needs to know the Hessian matrix (the second derivatives). This is usually even harder to program than gradient, and even slower and more error-prone when computed numerically. Let us replace NR with gradient-only BFGS method. It is a quasi-Newton method that computes its own internal approximation of the Hessian while relying only on gradients. We re-use the data and log-likelihood function from the first example where we estimated normal distribution parameters: <>= m <- maxLik(loglik, start=c(mu=1, sigma=2), method="BFGS") summary(m) @ One can see that the results were identical, but while NR converged in 7 iterations, it took 20 iterations for BFGS. In this example the BFGS approximation errors were larger than numeric errors when computing Hessian, but this may not be true for more complex objective functions. In a similar fashion, one can simply drop in most other provided optimizers. One method that is very popular for ML estimation is BHHH. We discuss it here at length because that method requires both log-likelihood and gradient function to return a somewhat different value. The essence of BHHH is information equality, the fact that in case of log-likelihood function $\loglik(\theta)$, the expected value of Hessian at the true parameter value $\vec{\theta}_{0}$ can be expressed through the expected value of the outer product of the gradient: \begin{equation} \label{eq:information-equality} \E \left[ \frac{\partial^2 l(\vec{\theta})} {\partial\vec{\theta}\, \partial\vec{\theta}^{\transpose}} \right]_{\vec{\theta} = \vec{\theta}_0} = - \E \left[ \left. \frac{\partial l(\vec{\theta})} {\partial\vec{\theta}^{\transpose}} \right|_{\vec{\theta} = \vec{\theta}_0} \cdot \left. \frac{\partial l(\vec{\theta})} {\partial\vec{\theta}} \right|_{\vec{\theta} = \vec{\theta}_0} \right]. \end{equation} Hence we can approximate Hessian by the average outer product of the gradient. Obviously, this is only an approximation, and it is less correct when we are far from the true value $\vec{\theta}_{0}$. Note also that when approximating expected value with average we rely on the assumption that the observations are independent. This may not be true for certain type of data, such as time series. However, in order to compute the average outer product, we need to compute gradient \emph{by observation}. Hence it is not enough to just return a single gradient vector, we have to compute a matrix where rows correspond to individual data points and columns to the gradient components. We demonstrate BHHH method by replicating the normal distribution example from above. Remember, the normal probability density is \begin{equation} \label{eq:normal-pdf} f(x; \mu, \sigma) = \frac{1}{\sqrt{2\pi}} \frac{1}{\sigma} \, \me^{ -\displaystyle\frac{1}{2} \frac{(x - \mu)^{2}}{\sigma^{2}} }. \end{equation} and hence the log-likelihood contribution of $x$ is \begin{equation} \label{eq:normal-loglik} \loglik(\mu, \sigma; x) = - \log{\sqrt{2\pi}} - \log \sigma - \frac{1}{2} \frac{(x - \mu)^{2}}{\sigma^{2}} \end{equation} and its gradient \begin{equation} \label{eq:normal-loglik-gradient} \begin{split} \pderiv{\mu} \loglik(\mu, \sigma; x) &= \frac{1}{\sigma^{2}}(x - \mu) \\ \pderiv{\sigma} \loglik(\mu, \sigma; x) &= -\frac{1}{\sigma} + \frac{1}{\sigma^{2}}(x - \mu)^{2}. \end{split} \end{equation} We can code these two functions as <<>>= loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- length(x) -N*log(sqrt(2*pi)) - N*log(sigma) - sum(0.5*(x - mu)^2/sigma^2) # sum over observations } gradlikB <- function(theta) { ## BHHH-compatible gradient mu <- theta[1] sigma <- theta[2] N <- length(x) # number of observations gradient <- matrix(0, N, 2) # gradient is matrix: # N datapoints (rows), 2 components gradient[, 1] <- (x - mu)/sigma^2 # first column: derivative wrt mu gradient[, 2] <- -1/sigma + (x - mu)^2/sigma^3 # second column: derivative wrt sigma gradient } @ Note that in this case we do not sum over the individual values in the gradient function (but we still do in log-likelihood). Instead, we fill the rows of the $N\times2$ gradient matrix with the values observation-wise. The results are similar to what we got above and the convergence speed is in-between that of Newton-Raphson and BFGS: \label{code:bhhh-example} <<>>= m <- maxLik(loglik, gradlikB, start=c(mu=1, sigma=2), method="BHHH") summary(m) @ In case we do not have time and energy to code the analytic gradient, we can let \maxlik compute the numeric one for BHHH too. In this case we have to supply the log-likelihood by observation. This essentially means we remove summing from the original likelihood function: <<>>= loglikB <- function(theta) { mu <- theta[1] sigma <- theta[2] -log(sqrt(2*pi)) - log(sigma) - 0.5*(x - mu)^2/sigma^2 # no summing here # also no 'N*' terms as we work by # individual observations } m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH") summary(m) @ Besides of relying on information equality, BHHH is essentially the same algorithm as NR. As the Hessian is just approximated, its is converging at a slower pace than NR with analytic Hessian. But when relying on numeric derivatives only, BHHH may be more reliable. For convenience, the other methods also support observation-wise gradients and log-likelihood values, those numbers are just summed internally. So one can just code the problem in an BHHH-compatible manner and use it for all supported optimizers. \maxlik package also includes stochastic gradient ascent optimizer. As that method is rarely used for ML estimation, it cannot be supplied through the ``method'' argument. Consult the separate vignette ``Stochastic gradient ascent in \maxlik''. \subsection{Control options} \label{sec:control-options} \maxlik supports a number of control options, most of which can be supplied through \verb|control=list(...)| method. Some of the most important options include \verb|printLevel| to control debugging information, \verb|iterLim| to control the maximum number of iterations, and various \verb|tol|-parameters to control the convergence tolerances. For instance, we can limit the iterations to two, while also printing out the parameter estimates at each step. We use the previous example with BHHH optimizer: <<>>= m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH", control=list(printLevel=3, iterlim=2)) summary(m) @ The first option, \verb|printLevel=3|, make \verb|maxLik| to print out parameters, gradient a few other bits of information at every step. Larger levels output more information, printlevel 1 only prints the first and last parameter values. The output from \maxlik-implemented optimizers is fairly consistent, but methods that call optimizers in other packages, such as BFGS, may output debugging information in a quite different way. The second option, \verb|iterLim=2| stops the algorithm after two iterations. It returns with code 4: iteration limit exceeded. Other sets of handy options are the convergence tolerances. There are three convergence tolerances: \begin{description} \item[tol] This measures the absolute convergence tolerance. Stop if successive function evaluations differ by less than \emph{tol} (default $10^{-8}$). \item[reltol] This is somewhat similar to \emph{tol}, but relative to the function value. Stop if successive function evaluations differ by less than $\mathit{reltol}\cdot (\loglik(\vec{\theta}) + \mathit{reltol})$ (default \verb|sqrt(.Machine[["double.eps"]])|, may be approximately \Sexpr{formatC(sqrt(.Machine[["double.eps"]]), digits=1)} on a modern computer). \item[gradtol] stop if the (Euclidean) norm of the gradient is smaller than this value (default $10^{-6}$). \end{description} Default tolerance values are typically good enough, but in certain cases one may want to adjust these. For instance, in case of function values are very large, one may rely only on tolerance, and ignore relative tolerance and gradient tolerance criteria. A simple way to achieve this is to set both \emph{reltol} and \emph{gradtol} to zero. In that case these two conditions are never satisfied and the algorithm stops only when the absolute convergence criterion is fulfilled. For instance, in the previous case we get: <<>>= m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH", control=list(reltol=0, gradtol=0)) summary(m) @ When comparing the result with that on Page~\pageref{code:bhhh-example} we can see that the optimizer now needs more iterations and it stops with a return code that is related to tolerance, not relative tolerance. Note that BFGS and other optimizers that are based on the \verb|stats::optim| does not report the convergence results in a similar way as BHHH and NR, the algorithms provided by the \maxlik package. Instead of tolerance limits or gradient close to zero message, we hear about ``successful convergence''. Stochastic gradient ascent relies on completely different convergence criteria. See the dedicated vignette ``Stochastic Gradient Ascent in \maxlik''. \section{Advanced usage} \label{sec:advanced-usage} This section describes more advanced and less frequently used aspects of \maxlik. \subsection{Additional arguments to the log-likelihood function} \label{sec:additional-arguments-loglik} \maxlik expects the first argument of log-likelihood function to be the parameter vector. But the function may have more arguments. Those can be passed as additional named arguments to \verb|maxLik| function. For instance, let's change the log-likelihood function in a way that it expects data $\vec{x}$ to be passed as an argument \verb|x|. Now we have to call \maxlik with an additional argument \verb|x=...|: <<>>= loglik <- function(theta, x) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=2), x=x) # named argument 'x' will be passed # to loglik summary(m) @ This approach only works if the argument names do not overlap with \verb|maxLik|'s arguments' names. If that happens, it prints an informative error message. \subsection{Maximizing other functions} \label{sec:maximizing-other-functions} \verb|maxLik| function is basically a wrapper around a number of maximization algorithms, and a set of likelihood-related methods, such as standard errors. However, from time-to-time we need to optimize other functions where inverting the Hessian to compute standard errors is not applicable. In such cases one can call the included optimizers directly, using the form \verb|maxXXX| where \verb|XXX| stands for the name of the method, e.g. \verb|maxNR| for Newton-Rapshon (\verb|method="NR"|) and \verb|maxBFGS| for BFGS. There is also \verb|maxBHHH| although the information equality--based BHHH is not correct if we do not work with log-likelihood functions. The arguments for \verb|maxXXX|-functions are largely similar to those for \maxlik, the first argument is the function, and one also has to supply start values. Let us demonstrate this functionality by optimizing 2-dimensional bell curve, \begin{equation} \label{eq:2d-bell-curve} f(x, y) = \me^{-x^{2} - y^{2}}. \end{equation} We code this function and just call \verb|maxBFGS| on it: <<>>= f <- function(theta) { x <- theta[1] y <- theta[2] exp(-x^2 - y^2) # optimum at (0, 0) } m <- maxBFGS(f, start=c(1,1)) # give start value a bit off summary(m) @ Note that the summary output is slightly different: it reports the parameter and gradient value, appropriate for a task that is not likelihood optimization. Behind the scenes, this is because the \verb|maxXXX|-functions return an object of \emph{maxim}-class, not \emph{maxLik}-class. \subsection{Testing condition numbers} \label{sec:testing-condition-numbers} Analytic gradient we demonstrated in Section~\ref{sec:supplying-gradients} helps to avoid numerical problems. But not all problems can or should be solved by analytic gradients. For instance, multicollinearity should be addressed on data or model level. \maxlik provides a helper function, \verb|condiNumbers|, to detect such problems. We demonstrate this by creating a highly multicollinear dataset and estimating a linear regression model. We re-use the regression code from Section~\ref{sec:supplying-gradients} but this time we create multicollinear data in similar scale. <<>>= ## create 3 variables, two independent, third collinear x1 <- rnorm(100) x2 <- rnorm(100) x3 <- x1 + x2 + rnorm(100, sd=1e-6) # highly correlated w/x1, x2 X <- cbind(x1, x2, x3) y <- X %*% c(1, 1, 1) + rnorm(100) m <- maxLik(negSSEA, start=c(x1=0, x2=0, x3=0)) # negSSEA: negative sum of squared errors # with gradient, hessian attribute summary(m) @ As one can see, the model converges but the standard errors are missing (because Hessian is not negative definite). In such case we may learn more about the problem by testing the condition numbers $\kappa$ of either the design matrix $\mat{X}$ or of the Hessian matrix. It is instructive to test not just the whole matrix, but to do it column-by-column, and see where the number suddenly jumps. This hints which variable does not play nicely with the rest of data. \verb|condiNumber| provides such functionality. First, we test the condition number of the design matrix: <<>>= condiNumber(X) @ We can see that when only including $\vec{x}_{1}$ and $\vec{x}_{2}$ into the design, the condition number is 1.35, far from any singularity-related problems. However, adding $\vec{x}_{3}$ to the matrix causes $\kappa$ to jump to over 5 millions. This suggests that $\vec{x}_{3}$ is highly collinear with $\vec{x}_{1}$ and $\vec{x}_{2}$. In this example the problem is obvious as this is how we created $\vec{x}_{3}$, in real applications one often needs further analysis. For instance, the problem may be in categorical values that contain too few observations or complex fixed effects that turn out to be perfectly multicollinear. A good suggestion is to estimate a linear regression model where one explains the offending variable using all the previous variables. In this example we might estimate \verb|lm(x3 ~ x1 + x2)| and see which variables help to explain $\vec{x}_{3}$ perfectly. Sometimes the design matrix is fine but the problem arises because data and model do not match. In that case it may be more informative to test condition number of Hessian matrix instead. The example below creates a linearly separated set of observations and estimates this with logistic regression. As a refresher, the log-likelihood of logistic regression is \begin{equation} \label{eq:logistic-loglik} \loglik(\beta) = \sum_{i: y_{i} = 1} \log\Lambda(\vec{x}_{i}^{\transpose} \vec{\beta}) + \sum_{i: y_{i} = 0} \log\Lambda(-\vec{x}_{i}^{\transpose} \vec{\beta}) \end{equation} where $\Lambda(x) = 1/(1 + \exp(-x))$ is the logistic cumulative distribution function. We implement it using \R function \verb|plogis| <<>>= x1 <- rnorm(100) x2 <- rnorm(100) x3 <- rnorm(100) X <- cbind(x1, x2, x3) y <- X %*% c(1, 1, 1) > 0 # y values 1/0 linearly separated loglik <- function(beta) { link <- X %*% beta sum(ifelse(y > 0, plogis(link, log=TRUE), plogis(-link, log=TRUE))) } m <- maxLik(loglik, start=c(x1=0, x2=0, x3=0)) summary(m) @ Not surprisingly, all coefficients tend to infinity and inference is problematic. In this case the design matrix does not show any issues: <<>>= condiNumber(X) @ But the Hessian reveals that including $\vec{x}_{3}$ in the model is still problematic: <<>>= condiNumber(hessian(m)) @ Now the problem is not multicollinearity but the fact that $\vec{x}_{3}$ makes the data linearly separable. In such cases we may want to adjust our model or estimation strategy. \subsection{Fixed parameters and constrained optimization} \label{sec:fixed-parameters} \maxlik supports three types of constrains. The simplest case just keeps certain parameters' values fixed. The other two, general linear equality and inequality constraints are somewhat more complex. Occasionally we want to treat one of the model parameters as constant. This can be achieved in a very simple manner, just through the argument \verb|fixed|. It must be an index vector, either numeric, such as \verb|c(2,4)|, logical as \verb|c(FALSE, TRUE, FALSE, TRUE)|, or character as \verb|c("beta2", "beta4")| given \verb|start| is a named vector. We revisit the first example of this vignette and estimate the normal distribution parameters again. However, this time we fix $\sigma = 1$: <<>>= x <- rnorm(100) loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=1), fixed="sigma") # fix the component named 'sigma' summary(m) @ The result has $\sigma$ exactly equal to $1$, it's standard error $0$, and $t$ value undefined. The fixed components are ignored when computing gradients and Hessian in the optimizer, essentially reducing the problem from 2-dimensional to 1-dimensional. Hence the inference for $\mu$ is still correct. Next, we demonstrate equality constraints. We take the two-dimensional function we used in Section~\ref{sec:maximizing-other-functions} and add constraints $x + y = 1$. The constraint must be described in matrix form $\mat{A}\,\vec{\theta} + \vec{B} = 0$ where $\vec{\theta}$ is the parameter vector and matrix $\mat{A}$ and vector $\vec{B}$ describe the constraints. In this case we can write \begin{equation} \label{eq:equality-constraints} \begin{pmatrix} 1 & 1 \end{pmatrix} \cdot \begin{pmatrix} x \\ y \end{pmatrix} + \begin{pmatrix} -1 \end{pmatrix} = 0, \end{equation} i.e. $\mat{A} = (1 \; 1)$ and $\vec{B} = -1$. These values must be supplied to the optimizer argument \verb|constraints|. This is a list with components names \verb|eqA| and \verb|eqB| for $\mat{A}$ and $\vec{B}$ accordingly. We do not demonstrate this with a likelihood example as no corrections to the Hessian matrix is done and hence the standard errors are incorrect. But if you are not interested in likelihood-based inference, it works well: <<>>= f <- function(theta) { x <- theta[1] y <- theta[2] exp(-x^2 - y^2) # optimum at (0, 0) } A <- matrix(c(1, 1), ncol=2) B <- -1 m <- maxNR(f, start=c(1,1), constraints=list(eqA=A, eqB=B)) summary(m) @ The problem is solved using sequential unconstrained maximization technique (SUMT). The idea is to add a small penalty for the constraint violation, and to slowly increase the penalty until violations are prohibitively expensive. As the example indicates, the solution is extremely close to the constraint line. The usage of inequality constraints is fairly similar. We have to code the inequalities as $\mat{A}\,\vec{\theta} + \vec{B} > 0$ where the matrices $\mat{A}$ and $\vec{B}$ are defined as above. Let us optimize the function over the region $x + y > 1$. In matrix form this will be \begin{equation} \label{eq:inequality-constraints-1} \begin{pmatrix} 1 & 1 \end{pmatrix} \cdot \begin{pmatrix} x \\ y \end{pmatrix} + \begin{pmatrix} -1 \end{pmatrix} > 0. \end{equation} Supplying the constraints is otherwise similar to the equality constraints, just the constraints-list components must be called \verb|ineqA| and \verb|ineqB|. As \verb|maxNR| does not support inequality constraints, we use \verb|maxBFGS| instead. The corresponding code is <<>>= A <- matrix(c(1, 1), ncol=2) B <- -1 m <- maxBFGS(f, start=c(1,1), constraints=list(ineqA=A, ineqB=B)) summary(m) @ Not surprisingly, the result is exactly the same as in case of equality constraints, in this case the optimum is found at the boundary line, the same line what we specified when demonstrating the equality constraints. One can supply more than one set of constraints, in that case these all must be satisfied at the same time. For instance, let's add another condition, $x - y > 1$. This should be coded as another line of $\mat{A}$ and another component of $\vec{B}$, in matrix form the constraint is now \begin{equation} \label{eq:inequality-constraints-2} \begin{pmatrix} 1 & 1\\ 1 & -1 \end{pmatrix} \cdot \begin{pmatrix} x \\ y \end{pmatrix} + \begin{pmatrix} -1 \\ -1 \end{pmatrix} > \begin{pmatrix} 0 \\ 0 \end{pmatrix} \end{equation} where ``>'' must be understood as element-wise operation. We also have to ensure the initial value satisfies the constraint, so we choose $\vec{\theta}_{0} = (2, 0)$. The code will be accordingly: <<>>= A <- matrix(c(1, 1, 1, -1), ncol=2) B <- c(-1, -1) m <- maxBFGS(f, start=c(2, 0), constraints=list(ineqA=A, ineqB=B)) summary(m) @ The solution is $(1, 0)$ the closest point to the origin where both constraints are satisfied. \bigskip This example concludes the \maxlik usage introduction. For more information, consult the fairly extensive documentation, and the other vignettes. % \bibliographystyle{apecon} % \bibliography{maxlik} \end{document} maxLik/vignettes/intro-to-maximum-likelihood.Rnw0000644000175100001440000010672614077525067021615 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{graphics} \usepackage{amsmath} \usepackage{amssymb} \usepackage[font={small,sl}]{caption} \usepackage[inline]{enumitem} \usepackage{indentfirst} \usepackage[utf8]{inputenc} \usepackage{natbib} \usepackage{siunitx} \usepackage{xspace} % \SweaveUTF8 \newcommand{\COii}{\ensuremath{\mathit{CO}_{2}}\xspace} \newcommand*{\mat}[1]{\mathsf{#1}} \newcommand{\likelihood}{\mathcal{L}}% likelihood \newcommand{\loglik}{\ell}% log likelihood \newcommand{\maxlik}{\texttt{maxLik}\xspace} \newcommand{\me}{\mathrm{e}} % Konstant e=2,71828 \newcommand{\R}{\texttt{R}\xspace} \newcommand*{\transpose}{^{\mkern-1.5mu\mathsf{T}}} \renewcommand*{\vec}[1]{\boldsymbol{#1}} % \VignetteIndexEntry{Introduction: what is maximum likelihood} \begin{document} <>= options(keep.source = TRUE, width = 60, try.outFile=stdout() # make try to produce error messages ) set.seed(34) @ \title{Getting started with maximum likelihood and \texttt{maxLik}} \author{Ott Toomet} \maketitle \section{Introduction} This vignette is intended for readers who are unfamiliar with the concept of likelihood, and for those who want a quick intuitive brush-up. The potential target group includes advanced undergraduate students in technical fields, such as statistics or economics, graduate students in social sciences and engineering who are devising their own estimators, and researchers and practitioners who have little previous experience with ML. However, one should have basic knowledge of \R language. If you are familiar enough with the concept of likelihood and maximum likelihood, consult instead the other vignette ``Maximum Likelihood Estimation with \maxlik''. Maximum Likelihood (ML) in its core is maximizing the \emph{likelihood} over the parameters of interest. We start with an example of a random experiment that produces discrete values to explain what is likelihood and how it is related to probability. The following sections cover continuous values, multiple parameters in vector form, and we conclude with a linear regression example. The final section discusses the basics of non-linear optimization. The examples are supplemented with very simple code and assume little background besides basic statistics and basic \R knowledge. \section{Discrete Random Values} \label{sec:discrete-random-variables} We start with a discrete case. ``Discrete'' refers to random experiments or phenomena with only limited number of possible outcomes, and hence we can compute and tabulate every single outcome separately. Imagine you are flipping a fair coin. What are the possible outcomes and what are the related probabilities? Obviously, in case of a coin there are only two outcomes, heads $H$ and tails $T$. If the coin is fair, both of these will have probability of exactly 0.5. Such random experiment is called \emph{Bernoulli process}. More specifically, this is \emph{Bernoulli(0.5)} process as for the fair coin the probability of ``success'' is 0.5 (below we consider success to be heads, but you can choose tails as well). If the coin is not fair, we denote the corresponding process Bernoulli($p$), where $p$ is the probability of heads. Now let us toss the coin two times. What is the probability that we end up with one heads and one tails? As the coin flips are independent,\footnote{Events are independent when outcome of one event does not carry information about the outcome of the other event. Here the result of the second toss is not related to the outcome of the first toss.} we can just multiply the probabilities: $0.5$ for a single heads and $0.5$ for a single tails equals $0.25$ when multiplied. However, this is not the whole story--there are two ways to get one heads and one tails, either $H$ first and $T$ thereafter or $T$ first and $H$ thereafter. Both of these events are equally likely, so the final answer will be 0.5. But now imagine we do not know if the coin is fair. Maybe we are not tossing a coin but an object of a complex shape. We can still label one side as ``heads'' and the other as ``tails''. But how can we tell what is the probability of heads? Let's start by denoting this probability with $p$. Hence the probability of tails will be $1-p$, and the probability to receive one heads, one tails when we toss the object two times will be $2 p (1-p)$: $p$ for one heads, $1-p$ for one tails, and ``2'' takes into account the fact that we can get this outcome in two different orders. This probability is essentially likelihood. We denote likelihood with $\likelihood(p)$, stressing that it depends on the unknown probability $p$. So in this example we have \begin{equation} \label{eq:2-coin-likelihood} \likelihood(p) = 2 \, p \, (1-p). \end{equation} $p$ is the \emph{model parameter}, the unknown number we want to compute with the help of likelihood. Let's repeat here what did we do above: \begin{enumerate} \item We observe data. In this example data contains the counts: one heads, one tails. \item We model the coin toss experiment, the data generating process, as Bernoulli($p$) random variable. $p$, the probability of heads, is the model parameter we want to calculate. Bernoulli process has only a single parameter, but more complex processes may contain many more. \item Thereafter we compute the probability to observe the data based on the model. Here it is equation~\eqref{eq:2-coin-likelihood}. This is why we need a probability model. As the model contains unknown parameters, the probability will also contain parameters. \item And finally we just call this probability \emph{likelihood} $\likelihood(p)$. We write it as a function of the parameter to stress that the parameter is what we are interested in. Likelihood also depends on data (the probability will look different for e.g. two heads instead of a head and a tail) but we typically do not reflect this in notation. \end{enumerate} The next task is to use this likelihood function to \emph{estimate} the parameter, to use data to find the best possible parameter value. \emph{Maximum likelihood} (ML) method finds such parameter value that maximizes the likelihood function. It can be shown that such parameter value has a number of desirable properties, in particular it will become increasingly similar to the ``true value'' on an increasingly large dataset (given that our probability model is correct).\footnote{This property is formally referred to as \emph{consistency}. ML is a consistent estimator.} These desirable properties, and relative simplicity of the method, have made ML one of the most widely used statistical estimators. Let us generalize the example we did above for an arbitrary number of coin flips. Assume the coin is of unknown ``fairness'' where we just denote the probability to receive heads with $p$. Further, assume that out of $N$ trials, $N_{H}$ trials were heads and $N_{T}$ trials were tails. The probability of this occuring is \begin{equation} \label{eq:general-cointoss-probability} \binom{N}{N_{H}} \, p^{N_{H}} \, (1 - p)^{N_{T}} \end{equation} $p^{N_{H}}$ is the probability to get $N_{H}$ heads, $(1 - p)^{N_{T}}$ is the probability to get $N_{T}$ tails, and the binomial coefficient $\displaystyle\binom{N}{N_{H}} = \displaystyle\frac{N!}{N_{H}! (N - N_{H})!}$ takes into account that there are many ways how heads and tail can turn up while still resulting $N_{H}$ heads and $N_{T}$ tails. In the previous example $N=2$, $N_{H} = 1$ and there were just two possible combinations as $\displaystyle\binom{2}{1} = 2$. The probability depends on both the parameter $p$ and data--the corresponding counts $N_{H}$ and $N_{T}$. Equation~\eqref{eq:general-cointoss-probability} is essentially likelihood--probability to observe data. We are interested how does it depend on $p$ and stress this by writing $p$ in the first position followed by semicolon and data as we care less about the dependency on data: \begin{equation} \label{eq:general-cointoss-likelihood} \likelihood(p; N_{H}, N_{T}) = \binom{N}{N_{H}} \, p^{N_{H}} \, (1 - p)^{N_{T}} \end{equation} Technically, it is easier to work with log-likelihood instead of likelihood (as log is monotonic function, maximum of likelihood and maximum of log-likelihood occur at the same parameter value). We denote log-likelihood by $\loglik$ and write \begin{equation} \label{eq:general-cointoss-loglik} \loglik(p; N_{H}, N_{T}) = \log\likelihood(p; N_{H}, N_{T}) = \log \binom{N}{N_{H}} + N_{H} \log p + N_{T} \log (1 - p). \end{equation} ML estimator of $p$ is the value that maximizes this expression. Fortunately, in this case the binomial coefficient $\displaystyle\binom{N}{N_{H}}$ depends only on data but not on the $p$. Intuitively, $p$ determines the probability of various combinations of heads and tails, but \emph{what kind of combinations are possible} does not depend on $p$. Hence we can ignore the first term on the right hand side of~\eqref{eq:general-cointoss-loglik} when maximizing the log-likelihood. Such approach is very common in practice, many terms that are invariant with respect to parameters are often ignored. Hence, with we can re-define the log-likelihood as \begin{equation} \label{eq:general-cointoss-partial-loglik} \loglik(p; N_{H}, N_{T}) = N_{H} \log p + N_{T} \log (1 - p). \end{equation} It is easy to check that the solution, the value of $p$ that maximizes log-likelihood~\eqref{eq:general-cointoss-partial-loglik} is\footnote{Just differentiate $\loglik(p)$ with respect to $p$, set the result to zero, and isolate $p$.} \begin{equation} \label{eq:general-cointoss-solution} p^{*} = \frac{N_{H}}{N_{H} + N_{T}} = \frac{N_{H}}{N}. \end{equation} This should be surprise to no-one: the intuitive ``fairness'' of the coin is just the average percentage of heads we get. Now it is time to try this out on computer with \texttt{maxLik}. Let's assume we toss a coin and receive $H_{H} = 3$ heads and $H_{T} = 7$ tails: <<>>= NH <- 3 NT <- 7 @ Next, we have to define the log-likelihood function. It has to be a function of the parameter, and the parameter must be its first argument. We can access data in different ways, for instance through the \R workspace environment. So we can write the log-likelihood as <<>>= loglik <- function(p) { NH*log(p) + NT*log(1-p) } @ And finally, we can use \texttt{maxLik} function to compute the likelihood. In its simplest form, \texttt{maxLik} requires two arguments: the log-likelihood function, and the start value for the iterative algorithm (see Section~\ref{sec:non-linear-optimization}, and the documentation and vignette \textsl{Maximum Likelihood Estimation with \maxlik} for more detailed explanations). The start value must be a valid parameter value (the loglik function must not give errors when called with the start value). We can choose $p_{0} = 0.5$ as the initial value, and let the algorithm find the best possible $p$ from there: <<>>= library(maxLik) m <- maxLik(loglik, start=0.5) summary(m) @ As expected, the best bet for $p$ is 0.3. Our intuitive approach--the percentage of heads in the experiment--turns also out to be the ML estimate. Next, we look at an example with continuous outcomes. \section{Continuous case: probability density and likelihood} \label{sec:continuous-outcomes} In the example above we looked at a discrete random process, a case where there were only a small number of distinct possibilities (heads and tails). Discrete cases are easy to understand because we can actually compute the respective probabilities, such as the probability to receive one heads and one tails in our experiment. Now we consider continuous random variables where the outcome can be any number in a certain interval. Unfortunately, in continuous case we cannot compute probability of any particular outcome. Or more precisely--we can do it, but the answer is always 0. This may sound a little counter-intuitive but perhaps the following example helps. If you ask the computer to generate a single random number between 0 and 1, you may receive \Sexpr{x <- runif(1); x}. What is the probability to get the same number again? You can try, you will get close but you won't get exactly the same number.\footnote{As computers operate with finite precision, the actual chances to repeat any particular random number are positive, although small. The exact answer depends on the numeric precision and the quality of random number generator. } But despite the probability to receive this number is zero, we somehow still produced it in the first place. Clearly, zero probability does not mean the number was impossible. However, if we want to receive a negative number from the same random number generator, it will be impossible (because we chose a generator that only produces numbers between 0 and 1). So probability 0-events may be possible and they may also be impossible. And to make matter worse, they may also be more likely and less likely. For instance, in case of standard normal random numbers (these numbers are distributed according to ``bell curve'') the values near $0$ are much more likely than values around $-2$, despite of the probability to receive any particular number still being 0 (see Figure~\ref{fig:standard-normal-intervals}). The solution is to look not at the individual numbers but narrow interval near these numbers. Consider the number of interest $x_{1}$, and compute the probability that the random outcome $X$ falls into the narrow interval of width $\delta$, $[x_{1} - \delta/2,\, x_{1} + \delta/2]$, around this number (Figure~\ref{fig:standard-normal-intervals}). Obviously, the smaller the width $\delta$, the less likely it is that $X$ falls into this narrow interval. But it turns out that when we divide the probability by the width, we get a stable value at the limit which we denote by $f(x_{1})$: \begin{equation} \label{eq:probability-density} f(x_{1}) = \lim_{\delta\to0} \frac{\Pr(X \in [x_{1} - \delta/2,\, x_{1} + \delta/2])}{\delta}. \end{equation} In the example on the Figure the values around $x_{1}$ are less likely than around $x_{2}$ and hence $f(x_{1}) < f(x_{2})$. The result, $f(x)$, is called \emph{probability density function}, often abbreviated as \emph{pdf}. In case of continuous random variables, we have to work with pdf-s instead of probabilities. \begin{figure}[ht] \centering \includegraphics{probability-density.pdf} \caption{Standard normal probability density (thick black curve). While $\Pr(X = x_{1}) = 0$, i.e. the probability to receive a random number exactly equal to $x_{1}$ is 0, the probability to receive a random number in the narrow interval of width $\delta$ around $x_{1}$ is positive. In this example, the probability to get a random number in the interval around $x_{2}$ is four times larger than for the interval around $x_{1}$. } \label{fig:standard-normal-intervals} \end{figure} Consider the following somewhat trivial example: we have sampled two independent datapoints $x_{1}$ and $x_{2}$ from normal distribution with variance 1 and mean (expected value) equal to $\mu$. Say, $x_{1} = \Sexpr{x1 <- rnorm(1); round(x1, 3)}$ and $x_{2} = \Sexpr{x1 <- rnorm(1); round(x1, 3)}$. Assume we do not know $\mu$ and use ML to estimate it. We can proceed in a similar steps as what we did for the discrete case: \begin{enumerate*}[label=\roman*)] \item observe data, in this case $x_{1}$ and $x_{2}$; \item set up the probability model; \item use the model to compute probability to observe the data; \item write the probability as $\loglik(\mu)$, log-likelihood function of the parameter $\mu$; \item and finally, find $\mu^{*}$, the $\mu$ value that maximizes the corresponding log-likelihood. \end{enumerate*} This will be our best estimate for the true mean. As we already have our data points $x_{1}$ and $x_{2}$, our next step is the probability model. The probability density function (pdf) for normal distribution with mean $\mu$ and variance 1 is \begin{equation} \label{eq:standard-normal-pdf} f(x; \mu) = \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x - \mu)^{2} } \end{equation} (This is the thick curve in Figure~\ref{fig:standard-normal-intervals}). We write it as $f(x; \mu)$ as pdf is usually written as a function of data. But as our primary interest is $\mu$, we also add this as an argument. Now we use this pdf and~\eqref{eq:probability-density} to find the probability that we observe a datapoint in the narrow interval around $x$. Here it is just $f(x; \mu)\cdot \delta$. As $x_{1}$ and $x_{2}$ are independent, we can simply multiply the corresponding probabilities to find the combined probability that both random numbers are near their corresponding values: \begin{multline} \label{eq:two-normal-probability-likelihood} \Pr{\Big(X_{1} \in [x_{1} - \delta/2, x_{1} + \delta/2] \quad\text{and}\quad X_{2} \in [x_{2} - \delta/2, x_{2} + \delta/2]\Big)} =\\[2ex]= \underbrace{ \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{1} - \mu)^{2} } \cdot\delta\ }_{ \text{First random value near $x_{1}$} } \times \underbrace{ \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{2} - \mu)^{2} } \cdot\delta }_{ \text{Second random value near $x_{2}$} } \equiv\\[2ex]\equiv \tilde\likelihood(\mu; x_{1}, x_{2}). \end{multline} The interval width $\delta$ must be small for the equation to hold precisely. We denote this probability with $\tilde\likelihood$ to stress that it is essentially the likelihood, just not written in the way it is usually done. As in the coin-toss example above, we write it as a function of the parameter $\mu$, and put data $x_{1}$ and $x_{2}$ after semicolon. Now we can estimate $\mu$ by finding such a value $\mu^{*}$ that maximizes the expression~\eqref{eq:two-normal-probability-likelihood}. But note that $\delta$ plays no role in maximizing the likelihood. It is just a multiplicative factor, and it cannot be negative because it is a width. So for our maximization problem we can just ignore it. This is what is normally done when working with continuous random variables. Hence we write the likelihood as \begin{equation} \label{eq:two-normal-likelihood} \likelihood(\mu; x_{1}, x_{2}) = \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{1} - \mu)^{2} } \times \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{2} - \mu)^{2} }. \end{equation} We denote this by $\likelihood$ instead of $\tilde\likelihood$ to stress that this is how likelihood function for continuous random variables is usually written. Exactly as in the discrete case, it is better to use log-likelihood instead of likelihood to actually compute the maximum. From~\eqref{eq:two-normal-likelihood} we get log-likelihood as \begin{multline} \label{eq:two-standard-normal-loglik} \loglik(\mu; x_{1}, x_{2}) = -\log{\sqrt{2\pi}} -\frac{1}{2} (x_{1} - \mu)^{2} + (- \log{\sqrt{2\pi}}) -\frac{1}{2} (x_{2} - \mu)^{2} =\\[2ex]= - 2\log{\sqrt{2\pi}} - \frac{1}{2} \sum_{i=1}^{2} (x_{i} - \mu)^{2}. \end{multline} The first term, $- 2\log{\sqrt{2\pi}}$, is just an additive constant and plays no role in the actual maximization but it is typically still included when defining the likelihood function.\footnote{Additive or multiplicative constants do not play any role for optimization, but they are important when comparing different log-likelihood values. This is often needed for likelihood-based statistical tests. } One can easily check by differentiating the log-likelihood function that the maximum is achieved at $\mu^{*} = \frac{1}{2}(x_{1} + x_{2})$. It is not surprising, our intuitive understanding of mean value carries immediately over to the normal distribution context. Now it is time to demonstrate these results with \texttt{maxLik} package. First, create our ``data'', just two normally distributed random numbers: <<>>= x1 <- rnorm(1) # centered around 0 x2 <- rnorm(1) x1 x2 @ and define the log-likelihood function. We include all the terms as in the final version of~\eqref{eq:two-standard-normal-loglik}: <<>>= loglik <- function(mu) { -2*log(sqrt(2*pi)) - 0.5*((x1 - mu)^2 + (x2 - mu)^2) } @ We also need the parameter start value--we can pick $0$. And we use \texttt{maxLik} to find the best $\mu$: <<>>= m <- maxLik(loglik, start=0) summary(m) @ The answer is the same as sample mean: <<>>= (x1 + x2)/2 @ \section{Vector arguments} \label{sec:vector-arguments} The previous example is instructive but it does have very few practical applications. The problem is that we wrote the probability model as normal density with unknown mean $\mu$ but standard deviation $\sigma$ equal to one. However, in practice we hardly ever know that we are dealing with unit standard deviation. More likely both mean and standard deviation are unknown. So we have to incorporate the unknown $\sigma$ into the model. The more general normal pdf with standard deviation $\sigma$ is \begin{equation} \label{eq:normal-pdf} f(x; \mu, \sigma) = \frac{1}{\sqrt{2\pi}} \frac{1}{\sigma} \, \me^{ -\displaystyle\frac{1}{2} \frac{(x - \mu)^{2}}{\sigma^{2}} }. \end{equation} Similar reasoning as what we did above will give the log-likelihood \begin{equation} \label{eq:two-normal-loglik} \loglik(\mu, \sigma; x_{1}, x_{2}) = - 2\log{\sqrt{2\pi}} - 2\log \sigma - \frac{1}{2} \sum_{i=1}^{2} \frac{(x_{i} - \mu)^{2}}{\sigma^{2}}. \end{equation} We write the log-likelihood as function of both parameters, $\mu$ and $\sigma$; the semicolon that separates data $x_{1}$ and $x_{2}$ shows that though the log-likelihood depends on data too, we are not much interested in that dependency for now. This formula immediately extends to the case of $N$ datapoints as \begin{equation} \label{eq:normal-loglik} \loglik(\mu, \sigma) = - N\log{\sqrt{2\pi}} - N\log \sigma - \frac{1}{2} \sum_{i=1}^{N} \frac{(x_{i} - \mu)^{2}}{\sigma^{2}} \end{equation} where we have dropped the dependency on data in the notation. In this case we can actually do the optimization analytically, and derive the well-known intuitive results: the best estimator for mean $\mu$ is the sample average, and the best estimator for $\sigma^{2}$ is the sample variance. However, in general the expression cannot be solved analytically. We have to use numeric optimization to search for the best $\mu$ and $\sigma$ combination. The common multi-dimensional optimizers rely on linear algebra and expect all the parameters submitted as a single vector. So we can write the log-likelihood as \begin{equation} \label{eq:normal-loglik-vector} \loglik(\vec{\theta}) \quad\text{where}\quad \vec{\theta} = (\mu, \sigma). \end{equation} Here we denote both parameters $\mu$ and $\sigma$ as components of a single parameter vector $\vec{\theta}$. (Traditionally vectors are denoted by bold symbols.) We have also dropped dependency on data in notation, but remember that in practical applications log-likelihood always depends on data. This notation can be converted to computer code almost verbatim, just remember to extract the parameters $\mu$ and $\sigma$ from $\vec{\theta}$ in the log-likelihood function. Let us illustrate this using the \emph{CO2} dataset (in package \emph{datasets}). It describes \COii uptake (\si{\micro\mol\per\meter\squared\sec}, variable \emph{uptake}) by different grasses in various conditions. Let us start by plotting the histogram of uptake: <>= data(CO2) hist(CO2$uptake) @ Let us model the uptake as a normal random variable with expected value $\mu$ and standard deviation $\sigma$. We code~\eqref{eq:normal-loglik} while keeping both parameters in a single vector as in~\eqref{eq:normal-loglik-vector}: <<>>= loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- nrow(CO2) -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } @ The function is similar to the function \texttt{loglik} we used in Section~\ref{sec:continuous-outcomes}. There are just two main differences: \begin{itemize} \item both arguments, $\mu$ and $\sigma$ are passed as components of $\vec{\theta}$, and hence the function starts by unpacking the values. \item instead of using variables \texttt{x1} and \texttt{x2}, we now extract data directly from the data frame. \end{itemize} Besides these two differences, the formula now also includes $\sigma$ and sums over all observations, not just over two observations. As our parameter vector now contains two components, the start vector must also be of length two. Based on the figure we guess that a good starting value might be $\mu=30$ and $\sigma=10$: <<>>= m <- maxLik(loglik, start=c(mu=30, sigma=10)) summary(m) @ Indeed, our guess was close. \section{Final Example: Linear Regression} \label{sec:linear-regression} Now we have the main tools in place to extend the example above to a real statistical model. Let us build the previous example into linear regression. We describe \COii uptake (variable \emph{uptake}) by \COii concentration in air (variable \emph{conc}). We can write the corresponding regression model as \begin{equation} \label{eq:co2-regression} \mathit{uptake}_{i} = \beta_{0} + \beta_{1} \cdot \mathit{conc}_{i} + \epsilon_{i}. \end{equation} In order to turn this regression model into a ML problem, we need a probability model. Assume that the disturbance term $\epsilon$ is normally distributed with mean 0 and (unknown) variance $\sigma^{2}$ (this is a standard assumption in linear regression). Now we can follow~\eqref{eq:two-normal-loglik} and write log of pdf for a single observation as \begin{equation} \label{eq:co2-epsilon-loglik} \loglik(\sigma; \epsilon_{i}) = - \log{\sqrt{2\pi}} - \log \sigma - \frac{1}{2} \frac{\epsilon_{i}^{2}}{\sigma^{2}}. \end{equation} Here we have replaced $x_{i}$ by the random outcome $\epsilon_{i}$. As the expected value $\mu=0$ by assumption, we do not include $\mu$ in~\eqref{eq:co2-epsilon-loglik} and hence we drop it also from the argument list of $\loglik$. We do not know $\epsilon_{i}$ but we can express it using linear regression model~\eqref{eq:co2-regression}: \begin{equation} \label{eq:co2-epsilon} \epsilon_{i} = \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i}. \end{equation} This expression depends on two additional unknown parameters, $\beta_{0}$ and $\beta_{1}$. These are the linear regression coefficients we want to find. Now we plug this into~\eqref{eq:co2-epsilon-loglik}: \begin{multline} \label{eq:co2-single-loglik} \loglik(\beta_{0}, \beta_{1}, \sigma; \mathit{uptake}_{i}, \mathit{conc}_{i}) =\\= - \log{\sqrt{2\pi}} - \log \sigma - \frac{1}{2} \frac{( \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i} )^{2}}{\sigma^{2}}. \end{multline} We have designed log-likelihood formula for a single linear regression observation. It depends on three parameters, $\beta_{0}$, $\beta_{1}$ and $\sigma$. For $N$ observations we have \begin{multline} \label{eq:co2-loglik} \loglik(\beta_{0}, \beta_{1}, \sigma; \vec{\mathit{uptake}}, \vec{\mathit{conc}}) =\\= - N\log{\sqrt{2\pi}} - N\log \sigma - \frac{1}{2} \sum_{i=1}^{N} \frac{( \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i})^{2}}{\sigma^{2}} \end{multline} where vectors $\vec{\mathit{uptake}}$ and $\vec{\mathit{conc}}$ contain the data values for all the observations. This is a fully specified log-likelihood function that we can use for optimization. Let us repeat what we have done: \begin{itemize} \item We wrote log-likelihood as a function of parameters $\beta_{0}$, $\beta_{1}$ and $\sigma$. Note that in case of linear regression we typically do not call $\sigma$ a parameter. But it is still a parameter, although one we usually do not care much about (sometimes called ``nuisance parameter''). \item The likelihood function also depends on data, here the vectors $\vec{\mathit{uptake}}$ and $\vec{\mathit{conc}}$. \item The function definition itself is just sum of log-likelihood contributions of individual normal disturbance terms, but as we do not observe the disturbance terms, we express those through the regression equation in~\eqref{eq:co2-single-loglik}. \end{itemize} Finally, we combine the three parameters into a single vector $\vec{\theta}$, suppress dependency on data in the notation, and write \begin{equation} \label{eq:co2-loglik-simplified} \loglik(\vec{\theta}) = - N\log{\sqrt{2\pi}} - N\log \sigma - \frac{1}{2} \sum_{i=1}^{N} \frac{( \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i})^{2}}{\sigma^{2}}. \end{equation} This is the definition we can easily code and estimate. We guess start values $\beta_{0} = 30$ (close to the mean), $\beta_{1} = 0$ (uptake does not depend on concentration) and $\sigma=10$ (close to sample standard deviation). We can convert~\eqref{eq:co2-loglik-simplified} into code almost verbatim, below we choose to compute the expected uptake $\mu$ as an auxiliary variable: <<>>= loglik <- function(theta) { beta0 <- theta[1] beta1 <- theta[2] sigma <- theta[3] N <- nrow(CO2) ## compute new mu based on beta1, beta2 mu <- beta0 + beta1*CO2$conc ## use this mu in a similar fashion as previously -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } m <- maxLik(loglik, start=c(beta0=30, beta1=0, sigma=10)) summary(m) @ These are the linear regression estimates: $\beta_{0} = \Sexpr{round(coef(m)["beta0"], 3)}$ and $\beta_{1} = \Sexpr{round(coef(m)["beta1"], 3)}$. Note that \maxlik output also provides standard errors, $z$-values and $p$-values, hence we see that the results are highly statistically significant. One can check that a linear regression model will give similar results: <<>>= summary(lm(uptake ~ conc, data=CO2)) @ Indeed, the results are close although not identical. \section{Non-linear optimization} \label{sec:non-linear-optimization} Finally, we discuss the magic inside \texttt{maxLik} that finds the optimal parameter values. Although not necessary in everyday work, this knowledge helps to understand the issues and potential solutions when doing non-linear optimization. So how does the optimization work? Consider the example in Section~\ref{sec:vector-arguments} where we computed the normal distribution parameters for \COii intake. There are two parameters, $\mu$ and $\sigma$, and \maxlik returns the combination that gives the largest possible log-likelihood value. We can visualize the task by plotting the log-likelihood value for different combinations of $\mu$, $\sigma$ (Figure~\ref{fig:mu-sigma-plot}). \begin{figure}[ht] \centering <>= loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- nrow(CO2) -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } m <- maxLik(loglik, start=c(mu=30, sigma=10)) params <- coef(m) np <- 33 # number of points mu <- seq(6, 36, length.out=np) sigma <- seq(5, 50, length.out=np) X <- as.matrix(expand.grid(mu=mu, sigma=sigma)) ll <- matrix(apply(X, 1, loglik), nrow=np) levels <- quantile(ll, c(0.05, 0.4, 0.6, 0.8, 0.9, 0.97)) # where to draw the contours colors <- colorRampPalette(c("Blue", "White"))(30) par(mar=c(0,0,0,0), mgp=2:0) ## Perspective plot if(require(plot3D)) { persp3D(mu, sigma, ll, xlab=expression(mu), ylab=expression(sigma), zlab=expression(log-likelihood), theta=40, phi=30, colkey=FALSE, col=colors, alpha=0.5, facets=TRUE, shade=1, lighting="ambient", lphi=60, ltheta=0, image=TRUE, bty="b2", contour=list(col="gray", side=c("z"), levels=levels) ) ## add the dot for maximum scatter3D(rep(coef(m)[1], 2), rep(coef(m)[2], 2), c(maxValue(m), min(ll)), col="red", pch=16, facets=FALSE, bty="n", add=TRUE) ## line from max on persp to max at bottom surface segments3D(coef(m)[1], coef(m)[2], maxValue(m), coef(m)[1], coef(m)[2], min(ll), col="red", lty=2, bty="n", add=TRUE) ## contours for the bottom image contour3D(mu, sigma, z=min(ll) + 0.1, colvar=ll, col="black", levels=levels, add=TRUE) } else { plot(1:2, type="n") text(1.5, 1.5, "This figure requires 'plot3D' package", cex=1.5) } @ \caption{Log-likelihood surface as a function of $\mu$ and $\sigma$. The optimum, denoted as the red dot, is at $\mu=\Sexpr{round(coef(m)[1], 3)}$ and $\sigma=\Sexpr{round(coef(m)[2], 3)}$. The corresponding countour plot is shown at the bottom of the figure box. } \label{fig:mu-sigma-plot} \end{figure} So how does the algorithm find the optimal parameter value $\vec{\theta}^*$, the red dot on the figure? All the common methods are iterative, i.e. they start with a given start value (that's why we need the start value), and repeatedly find a new and better parameter that gives a larger log-likelihood value. While humans can look at the figure and immediately see where is its maximum, computers cannot perceive the image in this way. And more importantly--even humans cannot visualize the function in more than three dimensions. This visualization is so helpful for us because we can intuitively understand the 3-dimensional structure of the surface. It is 3-D because we have two parameters, $\mu$ and $\sigma$, and a single log-likelihood value. Add one more parameter as we did in Section~\ref{sec:linear-regression}, and visualization options are very limited. In case of 5 parameters, it is essentially impossible to solve the problem by just visualizations. Non-linear optimization is like climbing uphill in whiteout conditions where you cannot distinguish any details around you--sky is just a white fog and the ground is covered with similar white snow. But you can still feel which way the ground goes up and so you can still go uphill. This is what the popular algorithms do. They rely on the slope of the function, the gradient, and follow the direction suggested by gradient. Most optimizers included in the \texttt{maxLik} package need gradients, including the default Newton-Raphson method. But how do we know the gradient if the log-likelihood function only returns a single value? There are two ways: \begin{enumerate*}[label=\roman*)] \item provide a separate function that computes gradient; \item compute the log-likelihood value in multiple points nearby and deduce the gradient from that information. \end{enumerate*} The first option is superior, in high dimensions it is much faster and much less error prone. But computing and coding gradient can easily be days of work. The second approach, numeric gradient, forces the computer to do more work and hence it is slower. Unfortunately importantly, it may also unreliable for more complex cases. In practice you may notice how the algorithm refuses to converge for thousands of iterations. But numeric gradient works very well in simple cases we demonstrated here. This also hints why it is useful to choose good start values. The closer we start to our final destination, the less work the computer has to do. And while we may not care too much about a few seconds of computer's work, we also help the algorithm to find the correct maximum. The less the algorithm has to work, the less likely it is that it gets stuck in a wrong place or just keeps wandering around in a clueless manner. If this is the case, you may see how the algorithm gets slow, does not converge (returns the ``maximum number of iterations exceeded'' message), how the results look weird, or standard errors are extremely large. % \bibliographystyle{apecon} % \bibliography{maxlik} \end{document} maxLik/vignettes/stochastic-gradient-maxLik.Rnw0000644000175100001440000007613714077525067021432 0ustar hornikusers\documentclass{article} \usepackage{graphics} \usepackage{amsmath} \usepackage{amssymb} \usepackage{indentfirst} \usepackage[utf8]{inputenc} \usepackage{natbib} \usepackage{xspace} \newcommand{\elemProd}{\ensuremath{\odot}} % elementwise product of matrices \newcommand*{\mat}[1]{\mathsf{#1}} \newcommand{\maxlik}{\texttt{maxLik}\xspace} \newcommand*{\transpose}{^{\mkern-1.5mu\mathsf{T}}} %\newcommand{\transpose}{\intercal} \renewcommand*{\vec}[1]{\boldsymbol{#1}} % \VignetteIndexEntry{SGA introduction: the basic usage of maxSGA} \begin{document} <>= options(keep.source = TRUE, width = 60, try.outFile=stdout() # make try to produce error messages ) foo <- packageDescription("maxLik") @ \title{Stochastic Gradient Ascent in maxLik} \author{Ott Toomet} \maketitle \section{\texttt{maxLik} and Stochastic Gradient Ascent} \texttt{maxLik} is a package, primarily intended for Maximum Likelihood and related estimations. It includes several optimizers and associated tools for a typical Maximum Likelihood workflow. However, as predictive modeling and complex (deep) models have gained popularity in the recend decade, \texttt{maxLik} also includes a few popular algorithms for stochastic gradient ascent, the mirror image for the more widely known stochastic gradient descent. This vignette gives a brief overview of these methods, and their usage in \texttt{maxLik}. \section{Stochastic Gradient Ascent} \label{sec:stochastic-gradient-ascent} In machine learning literature, it is more common to describe the optimization problems as minimization and hence to talk about gradient descent. As \texttt{maxLik} is primarily focused on maximizing likelihood, it implements the maximization version of the method, stochastic gradient ascent (SGA). The basic method is simple and intuitive, it is essentially just a careful climb in the gradient's direction. Given and objective function $f(\vec{\theta})$, and the initial parameter vector $\vec{\theta}_{0}$, the algorithm will compute the gradient $\vec{g}(\vec{\theta}_{0}) = \nabla_{\vec{\theta}} f(\vec{\theta})\big|_{\vec{\theta} = \vec{\theta}_{0}}$, and update the parameter vector as $\vec{\theta}_{1} = \vec{\theta}_{0} + \rho \vec{g}(\vec{\theta}_{0})$. Here $\rho$, the \emph{learning rate}, is a small positive constant to ensure we do not overshoot the optimum. Depending on the task it is typically of order $0.1 \dots 0.001$. In common tasks, the objective function $f(\vec{\theta})$ depends on data, ``predictors'' $\mat{X}$ and ``outcome'' $\vec{y}$ in an additive form $f(\vec{\theta}; \mat{X}, \vec{y}) = \sum_{i} f(\vec{\theta}; \vec{x}_{i}, y_{i})$ where $i$ denotes ``observations'', typically arranged as the rows of the design matrix $\mat{X}$. Observations are often considered to be independent of each other. The overview above does not specify how to compute the gradient $\vec{g}(\vec{\theta}_{0})$ in a sense of which observations $i$ to include. A natural approach is to include the complete data and compute \begin{equation} \label{eq:full-batch-gradient} \vec{g}_{N}(\vec{\theta}_{0}) = \frac{1}{N}\sum_{i=1}^{N} \nabla_{\vec{\theta}} f(\vec{\theta}; \vec{x}_{i})\big|_{\vec{\theta} = \vec{\theta}_{0}}. \end{equation} In SGA context, this approach is called ``full batch'' and it has a number of advantages. In particular, it is deterministic (given data $\mat{X}$ and $\vec{y}$), and computing of the sum can be done in parallel. However, there are also a number of reasons why full-batch approach may not be desirable \citep[see][]{bottou2018SIAM}: \begin{itemize} \item Data over different observations is often more or less redundant. If we use all the observations to compute the update then we spend a substantial effort on redundant calculations. \item Full-batch gradient is deterministic and hence there is no stochastic noise. While advantageous in the latter steps of optimization, the noise helps the optimizer to avoid local optima and overcome flat areas in the objective function early in the process. \item SGA achieves much more rapid initial convergence compared to the full batch method (although full-batch methods may achieve better final result). \item Cost of computing the full-batch gradient grows with the sample size but that of minibatch gradient does not grow. \item It is empirically known that large-batch optimization tend to find sharp optima \citep[see][]{keskar+2016ArXiv} that do not generalize well to validation data. Small batch approach leads to a better validation performance. \end{itemize} In contrast, SGA is an approach where the gradient is computed on just a single observation as \begin{equation} \label{eq:stochastic-gradient} \vec{g}_{1}(\vec{\theta}_{0}) = \nabla_{\vec{\theta}} f(\vec{\theta}; \vec{x}_{i}, y_{i})\big|_{\vec{\theta} = \vec{\theta}_{0}} \end{equation} where $i$ is chosen randomly. In applications, all the observations are usually walked through in a random order, to ensure that each observation is included once, and only once, in an \emph{epoch}. Epoch is a full walk-through of the data, and in many ways similar to iteration in a full-batch approach. As SGA only accesses a single observation at time, it suffers from other kind of performance issues. In particular, one cannot parallelize the gradient function \eqref{eq:stochastic-gradient}, operating on individual data vectors may be inefficient compared to larger matrices, and while we gain in terms of gradient computation speed, we lose by running the optimizer for many more loops. \emph{Minibatch} approach offers a balance between the full-batch and SGA. In case of minibatch, we compute gradient not on individual observations but on \emph{batches} \begin{equation} \label{eq:minibatch-gradient} \vec{g}_{m}(\vec{\theta}_{0}) = \frac{1}{|\mathcal{B}|}\sum_{i\in\mathcal{B}} \nabla_{\vec{\theta}} f(\vec{\theta}; \vec{x}_{i}, y_{i})\big|_{\vec{\theta} = \vec{\theta}_{0}} \end{equation} where $\mathcal{B}$ is the batch, a set of observations that are included in the gradient computation. Normally the full data is partitioned into a series of minibatches and walked through sequentially in one epoch. \section{SGA in \texttt{maxLik} package} \label{sec:sga-in-maxlik} \maxlik implements two different optimizers: \texttt{maxSGA} for simple SGA (including momentum), and \texttt{maxAdam} for the Adaptive Moments method \citep[see][p. 301]{goodfellow+2016DL}. The usage of both methods mostly follows that of the package's main workhorse, \texttt{maxNR} \citep[see][]{henningsen+toomet2011}, but their API has some important differences due to the different nature of SGA. The basic usage of the maxSGA is as follows: <>= maxSGA(fn, grad, start, nObs, control) @ where \texttt{fn} is the objective function, \texttt{grad} is the gradient function, \texttt{nObs} is number of observations, and \texttt{control} is a list of control parameters. From the user's perspective, \texttt{grad} is typically the most important (and the most complex) argument. Next, we describe the API and explain the differences between the \texttt{maxSGA} API and \texttt{maxNR} API, and thereafter give a few toy examples that demonstrate how to use \texttt{maxSGA} in practice. \subsection{The objective function} Unlike in \texttt{maxNR} and the related optimizers, SGA does not directly need the objective function \texttt{fn}. The function can still be provided (and perhaps will in most cases), but one can run the optimizer without it. If provided, the function can be used for printing the value at each epoch (by setting a suitable \texttt{printLevel} control option), and for stopping through \emph{patience} stopping condition. If \texttt{fn} is not provided, do not forget to add the argument name for the gradient, \texttt{grad=}, as otherwise the gradient will be treated as the objective function with unexpected results! If provided, the function should accept two (or more) arguments: the first must be the numeric parameter vector, and another one, named \texttt{index}, is the list of indices in the current minibatch. As the function is not needed by the optimizer itself, it is up to the user to decide what it does. An obvious option is to compute the objective function value on the same minibatch as used for the gradient computation. But one can also opt for something else, for instance to compute the value on the validation data instead (and ignore the provided \emph{index}). The latter may be a useful option if one wants to employ the patience-based stopping criteria. \subsection{Gradient function} \label{sec:gradient-function} Gradient is the work-horse of the SGA methods. Although \maxlik can also compute numeric gradient using the finite difference method (this will be automatically done if the objective function is provided but the gradient isn't), this is not advisable, and may be very slow in high-dimensional problems. \texttt{maxLik} uses the numerator layout, i.e. the gradient should be a $1\times K$ matrix where columns correspond to the components of the parameter vector $\vec{\theta}$. For compatibility with other optimizers in \texttt{maxLik} it also accepts a observation-wise matrix where rows correspond to the individual observations and columns to the parameter vector components. The requirements for the gradient function arguments are the same as for \texttt{fn}: the first formal argument must be the parameter vector, and it must also have an argument \texttt{index}, a numeric index for the observations to be included in the minibatch. \subsection{Stopping Conditions} \label{sec:stopping-conditions} \texttt{maxSGA} uses three stopping criteria: \begin{itemize} \item Number of epochs (control option \texttt{iterlim}): number of times all data is iterated through using the minibatches. \item Gradient norm. However, in case of stochastic approach one cannot expect the gradient at optimum to be close to zero, and hence the corresponding criterion (control option \texttt{gradtol}) is set to zero by default. If interested, one may make it positive. \item Patience. Normally, each new iteration has better (higher) value of the objective function. However, in certain situations this may not be the case. In such cases the algorithm does not stop immediately, but continues up to \emph{patience} more epochs. It also returns the best parameters, not necessarily the last parameters. Patience can be controlled with the options \texttt{SG\_patience} and \texttt{SG\_patienceStep}. The former controls the patience itself--how many times the algorithm is allowed to produce an inferior result (default value \texttt{NULL} means patience criterion is not used). The latter controls how often the patience criterion is checked. If computing the objective function is costly, it may be useful to increase the patience step and decrease the patience. \end{itemize} \subsection{Optimizers} \label{sec:optimizers} \texttt{maxLik} currently implements two optimizers: \emph{SGA}, the stock gradient ascent (including momentum), and \emph{Adam}. Here we give some insight into the momentum, and into the Adam method, the basic gradient-only based optimization technique was explained in Section~\ref{sec:stochastic-gradient-ascent}. It is easy and intuitive to extend the SGA method with momentum. As implemented in \texttt{maxSGA}, the momentum $\mu$ ($0 < \mu < 1$) is incorporated into the the gradient update as \begin{equation} \label{eq:gradient-update-momentum} \vec{\theta}_{t+1} = \vec{\theta}_{t} + \vec{v}_{t} \quad\text{where}\quad \vec{v}_{t} = \mu \vec{v}_{t-1} + \rho \vec{g}(\vec{\theta}_{t}). \end{equation} See \citet[p. 288]{goodfellow+2016DL}. The algorithm takes the initial ``velocity'' $\vec{v}_{0} = \vec{0}$. It is easy to see that $\mu=0$ is equivalent to no-momentum case, and if $\vec{g}(\vec{\theta})$ is constant, $\vec{v}_{t} \to \rho \vec{g}(\vec{\theta})/(1 - \mu)$. So the movement speeds up in a region with stable gradient. As a downside, it is also easier overshoot a maximum. But this behavior makes momentum-equipped SGA less prone of getting stuck in a local optimum. Momentum can be set by the control option \texttt{SG\_momentum}, the default value is 0. Adaptive Moments method, usually referred to as \emph{Adam}, \citep[p. 301]{goodfellow+2016DL} adapts the learning rate by variance of the gradient--if gradient components are unstable, it slows down, and if they are stable, it speeds up. The adaptation is proportional to the weighted average of the gradient divided by the square root of the weighted average of the gradient squared, all operations done component-wise. In this way a stable gradient component (where moving average is similar to the gradient value) will have higher speed than a fluctuating gradient (where the components frequently shift the sign and the average is much smaller). More specifically, the algorithm is as follows: \begin{enumerate} \item Initialize the first and second moment averages $\vec{s} = \vec{0}$ and $\vec{r} = \vec{0}$. \item Compute the gradient $\vec{g}_{t} = \vec{g}(\vec{\theta}_{t})$. \item Update the average first moment: $\vec{s}_{t+1} = \mu_{1} \vec{s}_{t} + (1 - \mu_{1}) \vec{g}_{t}$. $\mu_{1}$ is the decay parameter, the larger it is, the longer memory does the method have. It can be adjusted with the control parameter \texttt{Adam\_momentum1}, the default value is 0.9. \item Update the average second moment: $\vec{r}_{t+1} = \mu_{2} \vec{r}_{t} + (1 - \mu_{2}) \vec{g}_{t} \elemProd \vec{g}_{t}$ where $\elemProd$ denotes element-wise multiplication. The control parameter for the $\mu_{2}$ is \texttt{Adam\_momentum2}, the default value is 0.999. \item As the algorithm starts with the averages $\vec{s}_{0} = \vec{r}_{0}= 0$, we also correct the resulting bias: $\hat{\vec{s}} = \vec{s}/(1 - \mu_{1}^{t})$ and $\hat{\vec{r}} = \vec{r}/(1 - \mu_{2}^{t})$. \item Finally, update the estimate: $\vec{\theta}_{t+1} = \vec{\theta}_{t} + \rho \hat{\vec{s}}/(\delta + \sqrt{\hat{\vec{r}}})$ where division and square root are done element-wise and $\delta=10^{-8}$ takes care of numerical stabilization. \end{enumerate} Adam optimizer can be used with \texttt{maxAdam}. \subsection{Controlling Optimizers} \label{sec:control-options} Both \texttt{maxSGA} and \texttt{maxAdam} are designed to be similar to \texttt{maxNR}, and mostly expect similar arguments. In particular, both functions expect the objective function \texttt{fn}, gradient \texttt{grad} and Hessian function \texttt{hess}, and the initial parameter start values \texttt{start}. As these optimizers only need gradient, one can leave out both \texttt{fn} and \texttt{hess}. The Hessian is mainly included for compatibility reasons and only used to compute the final Hessian, if requested by the user. As SGA methods are typically used in contexts where Hessian is not needed, by default the algorithms do not return Hessian matrix and hence do not use the \texttt{hess} function even if provided. Check out the argument \texttt{finalHessian} if interested. An important SGA-specific control options is \texttt{SG\_batchSize}. This determines the batch size, or \texttt{NULL} for the full-batch approach. Finally, unlike the traditional optimizers, stochastic optimizers need to know the size of data (argument \texttt{nObs}) in order to calculate the batches. \section{Example usage: Linear regression} \label{sec:example-usage-cases} \subsection{Setting Up} \label{sec:setting-up} We demonstrate the usage of \texttt{maxSGA} and \texttt{maxAdam} to solve a linear regression (OLS) problem. Although OLS is not a task where one commonly relies on stochastic optimization, it is a simple and easy-to understand model. We use the Boston housing data, a popular dataset where one traditionally attempts to predict the median house price across 500 neighborhoods using a number of neighborhood descriptors, such as mean house size, age, and proximity to Charles river. All variables in the dataset are numeric, and there are no missing values. The data is provided in \emph{MASS} package. First, we create the design matrix $\mat{X}$ and extract the house price $y$: <<>>= i <- which(names(MASS::Boston) == "medv") X <- as.matrix(MASS::Boston[,-i]) X <- cbind("const"=1, X) # add constant y <- MASS::Boston[,i] @ Although the model and data are simple, it is not an easy task for stock gradient ascent. The problem lies in different scaling of variables, the means are <<>>= colMeans(X) @ One can see that \emph{chas} has an average value \Sexpr{round(mean(X[,"chas"]), 3)} while that of \emph{tax} is \Sexpr{round(mean(X[,"tax"]), 3)}. This leads to extremely elongated contours of the loss function: <>= eigenvals <- eigen(crossprod(X))$values @ One can see that the ratio of the largest and the smallest eigenvalue is $\mat{X}^{\transpose} \mat{X} = \Sexpr{round(eigenvals[1]/eigenvals[14], -5)}$. Solely gradient-based methods, such as SGA, have trouble working in the resulting narrow valleys. For reference, let's also compute the analytic solution to this linear regression model (reminder: $\hat{\vec{\beta}} = (\mat{X}^{\transpose}\,\mat{X})^{-1}\,\mat{X}^{\transpose}\,\vec{y}$): <<>>= betaX <- solve(crossprod(X)) %*% crossprod(X, y) betaX <- drop(betaX) # matrix to vector betaX @ Next, we provide the gradient function. As a reminder, OLS gradient in numerator layout can be expressed as \begin{equation} \label{eq:ols-gradient} \vec{g}_{m}(\vec{\theta}) = -\frac{2}{|\mathcal{B}|} \sum_{i\in\mathcal{B}} \left(y_{i} - \vec{x}_{i}^{\transpose} \cdot \vec{\theta} \right) \vec{x}_{i}^{\transpose} = -\frac{2}{|\mathcal{B}|} \left(y_{\mathcal{B}} - \mat{X}_{\mathcal{B}} \cdot \vec{\theta} \right)^{\transpose} \mat{X}_{\mathcal{B}} \end{equation} where $y_{\mathcal{B}}$ and $\mat{X}_{\mathcal{B}}$ denote the elements of the outcome vector and the slice of the design matrix that correspond to the minibatch $\mathcal{B}$. We choose to divide the value by batch size $|\mathcal{B}|$ in order to have gradient values of roughly similar size, independent of the batch size. We implement it as: <<>>= gradloss <- function(theta, index) { e <- y[index] - X[index,,drop=FALSE] %*% theta g <- t(e) %*% X[index,,drop=FALSE] 2*g/length(index) } @ The \texttt{gradloss} function has two arguments: \texttt{theta} is the parameter vector, and \texttt{index} tells which observations belong to the current minibatch. The actual argument will be an integer vector, and hence we can use \texttt{length(index)} to find the size of the minibatch. Finally, we return the negative of~\eqref{eq:ols-gradient} as \texttt{maxSGA} performs maximization, not minimization. First, we demonstrate how the models works without the objective function. We have to supply the gradient function, initial parameter values (we use random normals below), and also \texttt{nObs}, number of observations to select the batches from. The latter is needed as the optimizer itself does not have access to data but still has to partition it into batches. Finally, we may also provide various control parameters, such as number of iterations, stopping conditions, and batch size. We start with only specifying the iteration limit, the only stopping condition we use here: <>= library(maxLik) set.seed(3) start <- setNames(rnorm(ncol(X), sd=0.1), colnames(X)) # add names for better reference res <- try(maxSGA(grad=gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000) ) ) @ This run was a failure. We encountered a run-away growth of the gradient because the default learning rate $\rho=0.1$ is too big for such strongly curved objective function. But before we repeat the exercise with a smaller learning rate, let's incorporate gradient clipping. Gradient clipping, performed with \texttt{SG\_clip} control option, caps the $L_{2}$-norm of the gradient while keeping it's direction. We clip the squared norm at 10,000, i.e. the gradient norm cannot exceed 100: <<>>= res <- maxSGA(grad=gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000, SG_clip=1e4) # limit ||g|| <= 100 ) summary(res) @ This time the gradient did not explode and we were able to get a result. But the estimates are rather far from the analytic solution shown above, e.g. the constant estimate \Sexpr{round(coef(res)[1], 3)} is very different from the corresponding analytic value \Sexpr{round(betaX[1], 3)}. Let's analyze what is happening inside the optimizer. We can ask for both the parameter values and the objective function value to be stored for each epoch. But before we can store its value, in this case mean squared error (MSE), we have to supply an objective function to maxSGA. We compute MSE on the same minibatch as <<>>= loss <- function(theta, index) { e <- y[index] - X[index,] %*% theta -crossprod(e)/length(index) } @ Now we can store the values with the control options \texttt{storeParameters} and \texttt{storeValues}. The corresponding numbers can be retrieved with \texttt{storedParameters} and \texttt{storedValues} methods. For \texttt{iterlim=R}, the former returns a $(R+1) \times K$ matrix, one row for each epoch and one column for each parameter component, and the latter returns a numeric vector of length $R+1$ where $R$ is the number of epochs. The first value in both cases is the initial value, so we have $R+1$ values in total. Let's retrieve the values and plot both. We decrease the learning rate to $0.001$ using the \texttt{SG\_learningRate} control. Note that although we maximize negative loss, we plot positive loss. \setkeys{Gin}{width=\textwidth, height=80mm} <>= res <- maxSGA(loss, gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000, # will misbehave with larger numbers SG_clip=1e4, SG_learningRate=0.001, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") ## add some arrows to see which way the parameters move iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 10 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) ## plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ We can see how the parameters (the first and the second components, ``const'' and ``crim'' in this figure) evolve through the iterations while the loss is rapidly falling. One can see an initial jump where the loss is falling very fast, followed but subsequent slow movement. It is possible the initial jump be limited by gradient clipping. \subsection{Training and Validation Sets} \label{sec:training-validation} However, as we did not specify the batch size, \texttt{maxSGA} will automatically pick the full batch (equivalent to control option \texttt{SG\_batchSize = NULL}). So there was nothing stochastic in what we did above. Let us pick a small batch size--a single observation at time. However, as smaller batch sizes introduce more noise to the gradient, we also make the learning rate smaller and choose \texttt{SG\_learningRate = 1e-5}. But now the existing loss function, calculated just at the single observation, carries little meaning. Instead, we split the data into training and validation sets and feed batches of training data to gradient descent while calculating the loss on the complete validation set. This can be achieved with small modifications in the \texttt{gradloss} and \texttt{loss} function. But as the first step, we split the data: <<>>= i <- sample(nrow(X), 0.8*nrow(X)) # training indices, 80% of data Xt <- X[i,] # training data yt <- y[i] Xv <- X[-i,] # validation data yv <- y[-i] @ Thereafter we modify \texttt{gradloss} to only use the batches of training data while \texttt{loss} will use the complete validation data and just ignore \texttt{index}: <<>>= gradloss <- function(theta, index) { e <- yt[index] - Xt[index,,drop=FALSE] %*% theta g <- -2*t(e) %*% Xt[index,,drop=FALSE] -g/length(index) } loss <- function(theta, index) { e <- yv - Xv %*% theta -crossprod(e)/length(yv) } @ Note that because the optimizer only uses training data, the \texttt{nObs} argument now must equal to the size of training data in this case. Another thing to discuss is the computation speed. \texttt{maxLik} implements SGA in a fairly complex loop that does printing, storing, and complex function calls, computes stopping conditions and does many other checks. Hence a smaller batch size leads to many more such auxiliary computations per epoch and the algorithm gets considerably slower. This is less of a problem for complex objective functions or larger batch sizes, but for linear regression, the slow-down is very large. For demonstration purposes we lower the number of epochs from 1000 to 100. How do the convergence properties look now with the updated approach? <>= res <- maxSGA(loss, gradloss, start=start, nObs=nrow(Xt), # note: only training data now control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-5, SG_clip=1e4, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ We can see the parameters evolving and loss decreasing over epochs. The convergence seems to be smooth and not ruptured by gradient clipping. Next, we try to improve the convergence by introducing momentum. We add momentum $\mu = 0.95$ to the gradient and decrease the learning rate down to $1\cdot10^{-6}$: <>= res <- maxSGA(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, SGA_momentum = 0.99, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ We achieved a lower loss but we are still far from the correct solution. As the next step, we use Adam optimizer. Adam has two momentum parameters but we leave those untouched at the initial values. \texttt{SGA\_momentum} is not used, so we remove that argument. <>= res <- maxAdam(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ As visible from the figure, Adam was marching toward the solution without any stability issues. \subsection{Sequence of Batch Sizes } \label{sec:sequence-batch-sizes} The OLS' loss function is globally convex and hence there is no danger to get stuck in a local maximum. However, when the objective function is more complex, the noise that is generated by the stochastic sampling helps the algorithm to leave local maxima. A suggested strategy is to increase the batch size over time to achieve good exploratory properties early in the process and stable convergence later \citep[see][for more information]{smith+2018arXiv}. This approach is in some ways similar to Simulated Annealing. Here we introduce such an approach by using batch sizes $B=1$, $B=10$ and $B=100$ in succession. We also introduce patience stopping condition. If the objective function value is worse than the best value so far for more than \emph{patience} times then the algorithm stops. Here we use patience value 5. We also store the loss values from all the batch sizes into a single vector \texttt{val}. If the algorithm stops early, some of the stored values are left uninitialized (\texttt{NA}-s), hence we use \texttt{na.omit} to include only the actual values in the final \texttt{val}-vector. We allow the algorithm to run for 200 epochs, but as we now have introduced early stopping through patience, the actual number of epochs may be less than that. \setkeys{Gin}{width=\textwidth, height=110mm} <>= val <- NULL # loop over batch sizes for(B in c(1,10,100)) { res <- maxAdam(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=200, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, SG_patience=5, # worse value allowed only 5 times storeValues=TRUE ) ) cat("Batch size", B, ",", nIter(res), "epochs, function value", maxValue(res), "\n") val <- c(val, na.omit(storedValues(res))) start <- coef(res) } plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") summary(res) @ Two first batch sizes run through all 200 epochs, but the last run stopped early after 7 epochs only. The figure shows that Adam works well for approximately 170 epochs, thereafter the steady pace becomes uneven. It may be advantageous to slow down the movement further. As explained above, this dataset is not an easy task for methods that are solely gradient-based, and so we did not achieve a result that is close to the analytic solution. But our task here is to demonstrate the usage of the package, not to solve a linear regression exercise. We believe every \emph{R}-savy user can adapt the method to their needs. \bibliographystyle{apecon} \bibliography{maxlik} \end{document} maxLik/vignettes/maxlik.bib0000644000175100001440000001237714077525067015517 0ustar hornikusers% Encoding: UTF-8 @Article{bottou2018SIAM, author = {Bottou, L. and Curtis, F. and Nocedal, J.}, title = {Optimization Methods for Large-Scale Machine Learning}, journal = {SIAM Review}, year = {2018}, volume = {60}, number = {2}, pages = {223-311}, doi = {10.1137/16M1080173}, eprint = {https://doi.org/10.1137/16M1080173}, owner = {otoomet}, review = {A long review of different optimization methods from ML perspective. Revolves around SGD and a lot of space is devoted to show how other popular methods are related to SGD. A lot about convergence speed. Very little about non-smooth objective functions, just l1 norm optimization.}, timestamp = {2019.08.06}, url = { https://doi.org/10.1137/16M1080173 }, } @Article{keskar+2016ArXiv, author = {Nitish Shirish Keskar and Dheevatsa Mudigere and Jorge Nocedal and Mikhail Smelyanskiy and Ping Tak Peter Tang}, title = {On Large-Batch Training for Deep Learning: Generalization Gap and Sharp Minima}, journal = {ArXiv}, year = {2016}, volume = {abs/1609.04836}, abstract = {The stochastic gradient descent (SGD) method and its variants are algorithms of choice for many Deep Learning tasks. These methods operate in a small-batch regime wherein a fraction of the training data, say $32$-$512$ data points, is sampled to compute an approximation to the gradient. It has been observed in practice that when using a larger batch there is a degradation in the quality of the model, as measured by its ability to generalize. We investigate the cause for this generalization drop in the large-batch regime and present numerical evidence that supports the view that large-batch methods tend to converge to sharp minimizers of the training and testing functions - and as is well known, sharp minima lead to poorer generalization. In contrast, small-batch methods consistently converge to flat minimizers, and our experiments support a commonly held view that this is due to the inherent noise in the gradient estimation. We discuss several strategies to attempt to help large-batch methods eliminate this generalization gap}, owner = {siim}, review = {Analyze the sharpness of obtained minima in loss function when using small/large batches for SGD. Incorporate several mid-size neural networks for image processing, including fully connected and convolutional. Show both using graphs and computing sharpness that small batches lead to flat minima while large ones to sharp minima. Speculate it is because the small batches are more noisy, will jump out of the sharp basing but get stuck in flat ones. Give some evidence that small batch followed by large batch may improve the results.}, timestamp = {2020.04.08}, } @Book{goodfellow+2016DL, title = {Deep Learning}, publisher = {MIT Press}, year = {2016}, author = {Ian J. Goodfellow and Yoshua Bengio and Aaron Courville}, editor = {Thomas Dietterich}, isbn = {9780262035613}, owner = {siim}, timestamp = {2020.06.02}, } @Article{henningsen+toomet2011, author = {Henningsen, Arne and Toomet, Ott}, title = {maxLik: A package for maximum likelihood estimation in R}, journal = {Computational Statistics}, year = {2011}, volume = {26}, pages = {443-458}, issn = {0943-4062}, note = {10.1007/s00180-010-0217-1}, affiliation = {Institute of Food and Resource Economics, University of Copenhagen, Rolighedsvej 25, 1958 Frederiksberg C, Denmark}, issue = {3}, keyword = {Computer Science}, owner = {siim}, publisher = {Physica Verlag, An Imprint of Springer-Verlag GmbH}, timestamp = {2020.06.02}, url = {http://dx.doi.org/10.1007/s00180-010-0217-1}, } @Article{smith+2018arXiv, author = {Samuel L. Smith and Pieter-Jan Kindermans and Quoc V. Le}, title = {Don't Decay the Learning Rate, Increase the Batch Size}, journal = {ArXiv}, year = {2018}, volume = {abs/1711.00489}, abstract = {It is common practice to decay the learning rate. Here we show one can usually obtain the same learning curve on both training and test sets by instead increasing the batch size during training. This procedure is successful for stochastic gradient descent (SGD), SGD with momentum, Nesterov momentum, and Adam. It reaches equivalent test accuracies after the same number of training epochs, but with fewer parameter updates, leading to greater parallelism and shorter training times. We can further reduce the number of parameter updates by increasing the learning rate ϵ and scaling the batch size BâˆÏµ. Finally, one can increase the momentum coefficient m and scale Bâˆ1/(1−m), although this tends to slightly reduce the test accuracy. Crucially, our techniques allow us to repurpose existing training schedules for large batch training with no hyper-parameter tuning. We train ResNet-50 on ImageNet to 76.1% validation accuracy in under 30 minutes.}, owner = {siim}, review = {Employ Smith and Le (2017) scaling result that noise scale ~ B/n(1-m). Instead of decreasing the learning rate, they propose to increase the batch size. Show that this works well, mostly in SGD with momentum framework.}, timestamp = {2020.05.04}, } @Comment{jabref-meta: databaseType:bibtex;} maxLik/NAMESPACE0000644000175100001440000000453714077553264012762 0ustar hornikusersimportFrom("generics", "glance", "tidy") importFrom("methods", "new", "show", "slot", "slot<-", "slotNames", "validObject") importFrom("miscTools", "nObs", "nParam", "sumKeepAttr") importFrom( "miscTools", "stdEr" ) importFrom("sandwich", "bread", "estfun", "sandwich") importFrom("stats", "coef", "logLik", "optim", "pnorm", "printCoefmat", "vcov", "AIC", "qnorm") importFrom("utils", "head", "str", "tail") export( "activePar" ) export( "compareDerivatives" ) export( "condiNumber" ) export( "fnSubset" ) export( "glance" ) export( "gradient" ) export( "hessian" ) export( "maxBFGS" ) export( "maxBFGSR" ) export( "maxBHHH" ) export( "maxCG", "maxSGA", "maxAdam") export( "maximType" ) export( "maxValue" ) export( "maxLik" ) export( "maxNM" ) export( "maxNR" ) export( "maxSANN" ) export( "nIter" ) export( "numericGradient" ) export( "numericHessian" ) export( "numericNHessian" ) export( "objectiveFn" ) export( "returnCode" ) export( "returnMessage" ) export("storedParameters") export("storedValues") export("sumt") export( "tidy" ) exportClasses("MaxControl") exportMethods("maxControl") exportMethods("show") S3method( "activePar", "default" ) S3method( "AIC", "maxLik" ) S3method( "bread", "maxLik" ) S3method( "coef", "maxim" ) S3method( "coef", "maxLik" ) S3method( "coef", "summary.maxLik" ) S3method( "condiNumber", "default" ) S3method("condiNumber", "maxLik" ) S3method("confint", "maxLik" ) S3method("estfun", "maxLik" ) S3method("glance", "maxLik") S3method( "gradient", "maxim" ) S3method( "hessian", "default" ) S3method( "logLik", "maxLik" ) S3method( "logLik", "summary.maxLik" ) S3method( "maximType", "default" ) S3method( "maximType", "maxim" ) S3method( "maxValue", "maxim" ) S3method( "nIter", "default" ) S3method( "nObs", "maxLik" ) S3method( "nParam", "maxim" ) S3method( "print", "maxLik" ) S3method( "print", "summary.maxim" ) S3method( "print", "summary.maxLik" ) S3method( "objectiveFn", "maxim" ) S3method( "returnCode", "default" ) S3method( "returnCode", "maxim" ) S3method( "returnCode", "maxLik" ) S3method( "returnMessage", "default" ) S3method( "returnMessage", "maxim" ) S3method( "returnMessage", "maxLik" ) S3method( "stdEr", "maxLik" ) S3method( "storedParameters", "maxim" ) S3method( "storedValues", "maxim" ) S3method( "summary", "maxim" ) S3method( "summary", "maxLik" ) S3method("tidy", "maxLik") S3method("vcov", "maxLik") maxLik/inst/0000755000175100001440000000000015124514162012473 5ustar hornikusersmaxLik/inst/CITATION0000644000175100001440000000060615124514162013632 0ustar hornikusersbibentry("Article", title = "maxLik: A package for maximum likelihood estimation in {R}", author = c( as.person( "Arne Henningsen" ), as.person( "Ott Toomet" ) ), journal = "Computational Statistics", year = "2011", volume = "26", number = "3", pages = "443-458", doi = "10.1007/s00180-010-0217-1" ) maxLik/inst/doc/0000755000175100001440000000000014600010432013225 5ustar hornikusersmaxLik/inst/doc/using-maxlik.pdf0000644000175100001440000042015015124514354016350 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 4224 /Filter /FlateDecode /N 75 /First 622 >> stream xœÍ[YsÛ8~ß_·™­©nÜšJ•ÏØ±ÃJb'Sy%ÚæF–<•c~ýv$EE'Ú­-Z&@‚8}|Ýl ˆ$œ ¢ˆd†h¢bN 1Z“ˆXΈ%±2$†6L¨sñB‡æðœÐ±%\ÉÜ„^´Š×DZ÷ QBÂ9"J3hg¡ÿXèBk¦<¢ÁΈ¶ ΂IŒpVÄHà Ä_ÀÌâX‘HHhoId´$"&NSÂ|¥%qbq RÀÙZ"%±Æ‘ŠÄ,‚³†U)˜4¬,‚ÁeDâ8‚:L‘ X#“Åp‰0;Î" ‚B ‹SH8])X—Ð@i(¨ KæF+‚´à&òBÏÜj¸= f,Ñг°b = kèYDÐVÄ%n ô`èYZ†ä"ÃÌtÅ•‚…kèYi {=«(BB!†ÉÀйæ‰  …2×Èc gm° ô¬-ƒ+Ð³Ž¡7= ‡IÂnÃ^èöƒ“÷_Bc !‡òCÏ&²°г‰Àô1X PG0>l„°ÿøãB/’l4e#EÀ{—„¾zLf{ã,ÏÈŸÀîÚqšOpo•MÓY²„Ýõ7^î’‹ù$!t6Ÿ%¾¾Þu7ß~„KÐùt~Gž=sãí­²ûù‚üúO¸³HF8Îá(Kȯ‡ÿLh.D «¦þñ_û¥h‡ÏœÞ&×äkšÝ“{è{±HnáöYòýë|1Yº>a2Ûº{½˜OVãú{þúœ<¿Ÿ/³åx‘>fÀGf Ú W7ÿNÆ™ëòmšM,å+8˜¯fŠ =KaÔ‚HpOÆýç°î ’éÏž&Q^󭌿ȅ¯ZW=q}ÖWâ¼åG‰e^ÍQùpÚßþTßïGAúÙlžá„yœOR­æ³,™ÁM®óÍ»H&éhþ šcŸÀ‰nP ˜Ñ'ìy[}™,ç«ÅÆÂŽ¡7è=¿ô“ :¢¯afÉ· zxö¬:É`Ž:j™›ÞÑÜ´ý©Uæav4#†DF·(ÚÕöÉ™Ûþh™¸ÇéõðÍ‹½×¿ //8EIfãù$Ý¡ŠÊ•Êb™Üh–Ü ‡‰DuŠ6 uQ,37&ŽÌá47KǨx±¥~N~Wé$»GJIXŒt rÄ`Œ¬m%¨s$¥-[¬µ’I'‡yÚáÙ÷Ô燭ý ôl~YœÆÕ šÀXÐð`i"PxOîÚ0¶ Ã|{xÛA L%ÎÚ=­}o`*m°€|Í¢*Ï’éòº[Ö…Ÿ$’¯÷ñºŒü”#Cq ‡Æ3þ°ÏO%ƒ.ÇÈ[˜Ðþ'IzwU·™éWºG÷é=¤Gô˜>§'ô”žÓ ú’¾¢¯é%Ò·ô}O¯èˆÞÐ1ϧóüxÑ M¨ë’&³ÉhyOoé-þ¥Ôý}Iè-03½£wh7’½§`-î“Mé¿ég:¥tFg`Ç蜂բô¥ašÜf¾´p½ƒIçúýk5Ï’ÉÍÔ5(*¾«ùâ‚.é2yHýT—Ép™~£ÍîIB³¯sº¢_èWú~§Ó¿“Ŭ òŸ¾Êb¼¦H›ØTEÚ€?ðD‘¾-Çiš¥ÓIEŸågò›Åhü9Éœåe/X¥ÿ¯ÕhŠÂß-ò(ðÓd¹|šÔ?NWK'úÉ!j)ö¾°ÖUÙ/$~9E­Ô[îA}¶Ë½ö75xAHøºÔÞ”ú@ »¤^ãÖåRÞÝ6©W-RÿþøêäâÆÛ¿ª&õº)õà R¯žháèzã0Ù¶=HéÔ zn¢öÛôŒU¨_-G×[E,ªÜ5èL1S»‚ÙÍ¢CŽmоº3zëh›¡{¬û j´¶PŠeÞ:¿Ë@ÇÎÞªüË[»ôØaV·Ã®ZZÙc'ª(¢ÞÞ¢”y³êåª!O(J¥yA¨³¼c÷§#“xNW5Ngþàí¦×àtÙäô€ó:í[´æt½Ó@‡XöêãÇ{(Y§ û 8Û›-Óõ…5σoð¼y*Ï×\é&ªÕmp¯ë0Qý #¢Z½Kà.8³ÊÔKþ¿t2eàÐr륊{«`‘  ‹”¢|BÁÊ”ÕîzȰ¦Î°¦Ê°GÀªCǦȤwmŒéyñKª‚a. *váÆkp`¡ÏÖ«Éä1p1p®Ê! 6pVBPÓ}ªHýy:û\LÙE—‚P“òvHµDœL>·Ÿ‰8}Ëž3ŒÛ*³ÞŒÖÿLêÝéÞ‡ë#糨¿ÙŽƒ}MÛté̸KgšÐâ°0UÙwÕRöA”*<^b(£µ± #†à>_gïˆ5Ù; N§³%sG|sä?>>}óf¿-XØ”JP†!@8²;,ñ.²· c¥j*×WK²ß€X%ÙÊ+R 7vÑ ¼hR> KÏèŒÙʯ/_Ÿ<ÿíàââtŸ×o‹î!HåÝt—t7ÚN••9ÄäO­O=¬éã n?´ÃÁm®PÇm~—¨ÃXQå®}z“d#4²ËÌ. ‡À®Õ«èð™À'ÕÊõÜ`º"ìV2]“%:cÈ”^µ}™îÕðüÁ—u–+¢“kTr\JY7Çé.Ž‹ÕÎxIâËj‹Ó‹¢µ«T í7áÚU[ïæPÎWGùEœ¾Á½àÚ…yVãHW-9ò|ôp3•°ï ]7ŒCܶŽx#4û2Ýáóã{ |‹­Y“+(Àóo»úr/ÛĽÝñìWg'g{NXj¼\˜Nõ)YÀÌ[ÔgÔÁÌÚT7V9iÒŽ<˜á6:òÿN­"3y†BúyöÕ²ë Ëú¨GÅ«ÊXåoÑò1ãfüB–Æ×£Ú”j' ¿ôJÕ{0·¥Ãòyôø8BOÅo}X!géh:IooK?$‡ã\»m y C[ÛÊÐ…á-ºÁc}9Znò-r2 ÷ÈíhºLŠVkdÂãó÷ço<4?‹7Å}áqÔ ç-ð¸îóÃðX3힯oeÍí ˆÓµAÈä<Ö}íååÕ‡“ƒ3?‚íOþ0¤#õŽíÂÉ"ÖN4›öä)‡1’`úV÷Cb˜…JX.΃%ùõõl¬Õî ÞñêEÁ/&èáâuP]ΓG”ⱺ“ekÿªªxÛ\D@n]ì;Í㊵P÷Š~mBµ-ÁÅXH7‰ïBÀÝÁÅux[þ˜¹;|w}ñá¥{qôN´-¦/jr"·ÝœØå9{ÜÅ5}8k] ô ¯!t_-9â¸aA¿T·¸ .a: ôìL&vÓØâŽ$ïÔÿ’—ê%ÞºÃ2nyñþêäùgllÏí圅ûûÛÓm'NoÉQŠ×=òÚ»ªYu/8ëÞ îw»hìF.ëäéiŠãMJ”·øæ/.^œÁXÃÕ­(ãå„äW¼Aþ-Äï•V˜€øTv…ø¾Zÿ!­–ý£!‘Œ\ Ú8¨N¾07¿Ÿ™½:;|qñ‡¾œß‚pÔŽí ¶i¢ñõrŒ„S#LOº—v“½W0;Ï”õÑlÅLa»ŒBë ÑlG‚«´`ž8“|®Œ¨¸_,»ßÜ¢ÊÔŠ@»°b $ß05ÅâÃüB75… öƒHc.»Hõ›Ü%&Ã{ñ.®`N¼ŸÌ¥Ä¬õ¼Œyïº(£g‘—µËÚÏË3öó²&¢h¯Q$‹rLDÑ¿A¸U”¾6ÌË3òó2Bò2xœy2”fàW&¾½³½áõárXÔWôl(zM›cºÁmÜÿ¹úsÀ V—Â{µ6 Ã'˜ëQÒ¨æ†ø(H55 c'•<ÀòV#é”h¼m™qÃ5d:@Ž5Š÷ñõ֥ǧçgW¨«‡¼/L4a„nÚ1ÛãÎøò£½Þùk¾zUµ¯^¯ûk_£…{¾±Sl²/æØ¦Ë­:9?zqŠæèºÂ*ì@ÎÐMk·mºPžÒÖýÚöBÖ®1¥Ù¥5ƒŽÒbsüÔ%lÛÍÁ' >¸ºý×>mšoKr®‡£êѨʛöýôî®Z¿©Ô]4u¿yÁµ(ã®P«Uªµl]Î+Úò™F¾Z åÂc˸¼˜é8I—ÓÑwú%ù–%³ ²á4©i«X—nPÛÍ^‹ØÝàÑž2 6uÃ8î«£!xYoOûë«ýé À»_DÆ]È»™ƒÕ`îÎ ¬âð9TëCäáÿuÆwâ%¶A|µ’[5¤‰‹û/hVÏ«èv¶ð+¥[¢qø¬oJoÄßÃÝ-ðÿý›½½Sç^Ôõ`‘TØ¡9 7¾;]/îÂýQbßý»¡ÿ§¿ùô߯ÜÜþ‡-ÔB b€±Àf0AKxÜŠž˜{ÂÆoM ô¼m?a[|÷Q”É?Óƒ²‚5eCŠï.-fPæå˜¹3Z!ö‹·§¯Î>>9/0ÔqÅ'%«ËNW°¼ýÆ´Àc‰z¬MÔbm.ù+°5Û<]æ>K®kÁ‚Äø€H=ãMo`üúðÝéó—NËT](Ès •ŒiæuuëJ‚·q¨ðE¤ëAý3AàWì¡Añ A»NÞ*~ º½DõkJ1ý é­HiU2ÁnçS5?ùlŸÅ®ßÅqQq\ îä¹õÒ­EyM¹¡RoR®RăHd¤`:¼”fºÖ(5@ª~ÁUt˜ÞÞ&@(¤ÌŸ>¯Ëg[­¿\.ÿHÀS޳wÔZ}|±ŒÜTæë`*µE¼£Q$ÉS0vG„ªË@ê΀[T ä»ÕœŒÆ#Po¸0$pY\ÎyÃñ!§ã=¯…ÔœdÚªS›Fæ¼Ñ¦»²ó.÷£#.³@éà m=}LÝM.]?&Ïß ÿæÐ…¨\A±’"ÕÀ%%çY»š2ôXý@•Ô¿PÅ `þ¬›2ÌÞ¦˜¤‚_Ì:+¿þŽm×Ò—“- Ü ù¦Yú8ýîôb4IÇ£é®Ä§¾”¸ ‹#Ên†È ø¿ÐQÿ;ìendstream endobj 77 0 obj << /Subtype /XML /Type /Metadata /Length 1658 >> stream GPL Ghostscript 10.06.0 2025-12-29T16:06:19+01:00 2025-12-29T16:06:19+01:00 2025-12-29T16:06:19+01:00 LaTeX with hyperref endstream endobj 78 0 obj << /Type /ObjStm /Length 2894 /Filter /FlateDecode /N 75 /First 660 >> stream xœ½ZYoÛ¸~Ÿ_ÁǹŒ(î$PÈÒ4$mêt/òàÚJ"Œc¶Òiç×ßs(QÖfYNÜ㘢(êð;Ï&Gbbc¢,±Œ0?œ+‰D)N e´"V§` &,¶†ÀãŽ-a’ÃiG˜áŒ¸˜0 S9Fx¬á—Î… N.…#N®Œ†¹·Ž5LÁ¯!B2o‰Ð0‰sD8˜—Å1‘Üph0"•”ÐàDZ˜Å ÓxJ%H+¢¤ÀSš(mñ”…)ì±DÇ Ç8¢9ƒ™Y ÌÑÂbƒ­@HÆ4,‘I¢µ?¥Ì‹c/7ÄÄ© ØpÄX ƒqFƒ 1Åàœç3XãÝ9ÌA(†àqŽƒ5@îO¼RáU  cƒ Êj°cp?ë`V&8`­A02à (ÇØ•iX€·•8ư…1\nÌq‰8 FÅ3\U¬@xP-b‰k’ˆ¡‹±%;”F" Ìà8 qìó¨!@Ò"lûRâ0…ZWyÀ¿ÒÏ ž8¬ñ·gÏ=Z<Ì3"=I—« “¡çc<0ùÁÛŸßB_?d³tž¬ÈóçþÊã†|!*óñÓgD;²1G’ëˆ[àÔüa6#ׄ¾J~dÈ{?ôr¼LàžÆ³§Ù,!Ïn’››8–.ŽuÇF—Cú4¶üм_»¢?ydÉWñ§ì•JfÖRKæÕ%ëü Øt |¹L¾—@Õ¥G‰a/QH ¿Ðå¥÷}pNøªçPÒF8MEQ*€Z“C!Jï“ß_O ¹ ON Ù¾çtnÒ‹p!§îÒ6‹#ÍySNÙ)gïZ@ÅùWƒ ÆÂ$9p^`V³õ˜Rèpk eèpNj5e:Å-ÀÜ4¥{%ºn®‹¯*+…)¾)ÆOó_3^_cD…ïó Xõ/˜s‘LÓñáâ,™©œŠÐ®Iϸ.%.”2JV‹‡å63Îv³y3X,f1¹Jzy|ëÁ…_Ã!¸ß& m ÓQì*¼èDMv¡&ÿ–`²­0Vl¶¶‹¹\@C´,‚­X'{Œ@׿“献pU*—C B—¸ÒÄCR㤋ëœ4=2…½Ð.쥰¿<à¦0 Ó:]M0j2¿¾lW Τ˜÷ì´8.ŒŽŸ+ÛbÜÍ›©²ô¾¶€ÐOØ®CcZÊ⪦Æx§ÆrJÙ.õUìwË:4ÍãM]¨Êí°ÖöÎÁ|¾`¿øH¥Á@¯k°›½Áî»cC,O»Ü »ërçÞ;µ„« uÅLmÚY~\RÙI|›A·û‚UÊ'À*ºÌ¥à„Ìmƒ^C²Ë·K] cv°joØÚÖYE©á÷eý ¡rž ©Ê½z],ÛhgÇ«ÄãIß_½:>9üãêdtI˜%ôÅ|²˜¦ó[B?¤óƒù*]wxGrt7^ír}°“eú-[,1+ý‹ƒ© ½zøšyu Rü,ÞÍS˜/N„PÙŸÏ¥ùN³;ܤÚoŸõGaÞ¨Y­oó|J~d9˜†6÷3H[³TaÂuò +ö‚·ˆc¯K²š 30‰¢Gão§Iz{‡àí)®éø;}AÏéÓ)MèMéßtFïéœ.èŠfôþCüÏ3ÒÏõ' ~2ß®0-ÃdÔq¶œ†”Ïßñ:?u’ÎȇY±µ±ëÕø>éPäY6ž¥“ƒù-04q‘®V E0äÍ]eÉýû\MkT4J?+”Àµˆsyzrzú î÷ölâ@úÞ"N%0D”.â(ëQÄÓÄ6{¸÷TR ú¶ÝWÁÙÝ(qt@"Ü×UïúU®Kõ¼©ú–*zUñ@P=l­ªgmÕ½¿Í ‘¡¼oÛ*»}ù^–¢÷ì_Z>ePÑLÖK{ኧ'üK®” .^|zó!OãÁ*€£eDDÓˆl«P‰Þê¤Êk@XÞV^­Ê¬SxÁò³¡ODë"Ûêþ°´9ã"iŸåU¡ÒsÝYn­šc áÞôÜŠ­Ö§’› † ‡ø¯ŒKg$^n“]“î¡A]%ôxùòäí™÷±f ñ¶6U“Àà{ ÜWb•!âÚðÑ@Ve\‡“u')ª„Méú³NSÙ_³Ä X…5¨ÚªYÖ‘è%í¦‚AWi¼ÁÓšôï­¡_ ÙÄÚêXƒ^tª×±8„ãøD†["„‰üK`Ò•sÃêX¿L8'#öVsÅ`¢dì"ØZ¨Èr1°Æö„Ý»=m{Ƚ½Ù$“—­ÍË_Çï/^ûM14Æî4/²UVÇQ}æE÷ùG!:LG½Ð¥k…®´ö¨ƒmyÒÁü³Lœ¢a4ZuÆ:>†þ½¡uÝ0Íü ³ýí¿‰Ÿ² HÒrtë×qd.Hí5±¿÷E;3XÃ^ŽªY,.M$}Z E¸æÃò«’>Ö~’ÅzRݱ“(mëµ·”F>æí¨_^9†‚Ü[–£ØQèÀð÷ùãôæ&iQ¼/¾JåkG¸Š\€rhx£ªvÖÙ°È×9üÿÛÝ”jendstream endobj 154 0 obj << /Filter /FlateDecode /Length 3192 >> stream xœ­YK“Û¸¾Ï)9'Uå(±‚"‰sØÊVÖ)o%»™Töh‰3õDiùX{’?Ÿ¯»ŠË»ÞJjCF?¿î†¾[å™^åôÿo7ùêþæ»ÍoWñßö°úìöæw_~¥}VhgW·w7²E¯´)²Ü¸UYÚ¬zu{¸y¥¾¬×›<Ë­®Œsê}»ÆÂ¥öNÖE‰/>¨é°Þ]dÁê¥h‚Uo™$q³_ì\’<×"2Öªøè´S;âh°·RŸãå±$OUh_¨(\á¬-ÕÕqù¥cfêÝòÜñaýÍí_ÉaiŒÂ»,¹ÝA}>Ì”õªSgª*TÐMöXNCuWÐþ¶:«J½ÚUVU>Ÿ¿#d(À;xu»®r~ }}–k]¨ã¡™áÂøà=˜Ìx'ûÿÜlØÄ_½a;…²²ªé_§ŠðŒøJUäk­ —„4+m3cË($xÛVkœ—3ôz£uµzÑ­ Dt^«±g{;ˆÞ‡ª"‘ilU•NMËÅvl¼pÞx%p«‘‹’­ éjuûòæö7¯ÔíC;¬76h]ªï×trÀRí}×£táàà±!2–Pvà1/TÛÁ2jlº]©l…s`™»c/$äíihúyG©ÞQÔñ·* ˆ4±)*Õ7g1îêC»oë>1,>ˆ™¸Ë«í±Û6§qÁôx—vkµoß’·B…÷;ݰ$LtR›#¿¢¯T|„šdX›[¼†d/k«ñ¡I ¹÷õØÌ´Z!˜fî0Ín@`Ø¢ CªaÚ²h¡êAÎb+ ñ±Óv0Þ¡? ÏJ5ßIôk‘šê}; ›ÜªÇHU&‰ ì³/¾øb¡xHŠ×§Só¦,ضr(I[гÈМ/l‚N~N±íC)I«¾^ß~û4àBVÚ"ø”Öûº»Ÿêû&[o¼7ì½[6(ʵG J¢#îJŽB =®¿“¼eÕ·ˆ¬!Iˆ4î®Êçè|å;’¼½"¤q™÷IH„,$*`WÈ*˜o3éÀ–1!pì±Äòp{ÕÇD¬Xšú´¯Û.Z‰§D8!“úýãÒ1Ý}ü ¬‚“$/+ï*Z¶ÛzŸ¾Û95`Ý’M¦Õß×H¯#RtÍR:àJ›¶P¨ŒuðI–Ü÷ÇéÂJDk»í~Ú‘ŠòÆ#ø±  vˆÜ ›²y»f(D}z@òSLåƒÁ}_連< $ç€Fð$ÙÔ(„È? 11ŸCI±›†±Œo¬:õ5 Žâ•±%IHØBzðï$E<ű 7…=Rˆ„§à ¯!)ÈâüÅ“;ZBö@{¨š1:xpï" ˆ{ãñ k¤ÖE.ÍùÉõ™Ïq32ˆV ÓCŒeÊÄKŠ26¨¾ðð±gwÃðâî,ø#³+]PÇéü¶cŸh‹¢Ñ™’AP°M)Õªoîà: $­¨Æ ÛKÀ •0Ôà=âh˜öd®’c”·«z»=Nu×ì™’ªèG²–60ö,%Œ™ÚÃéu?Êö’Ï„åϰîlÍÈïœIŸdMCéõƒÖ4NWÐí–7•ùQt@©ö¨ßBö󌡡ҙö¹‰ÐPÆ2ƒ"KÚ]¤=·!«;4±Rã×P’Æ~o»iËÉF{€3ã’ó›zh·qY,î@RšŠ¹ù³ã­W´/P¯ð?ª½Í\e#ÕÝÔm¥N0<¢ô€K´Ý KZ”RHŒuEYpgÐH¿@©\”@ÿR«SÔ5ñMA…sŠŒ´Ì!˜%s¯NõvÞðv uBIØqßü"€“ž!§m"x0ø£c<Úµ¿hzúHi °Œ§ÔOȾöߦ/,–ätG]Dj4Þ%i XR„"‰ÎêôÇ=)ZrA%¼åŒŽÅš/$ŽMJÄó ˜ †úî„G—9wì3j4Eรç¨û&2%Ð;µÛ–.ŠQéD“(eÛCÝï„ØÀh)âVIá÷gøigâ (ÜtW`øØ¿ ôôFÊš5%£}l°xå\¬q,Ì—/Ós•@¶¢ÂIÓð˜Otêç·7_ÝÈXÕ|»ˆð4†Cº^Y hØi S†=òü©ì¬ÉJdÇ‚] &C¥·í†¨’£–GòÅÐìI¥3’¹¶Z®cCIË‹ÿ‡c/ɽ)4Ægžô*]K)¢<œô×Îq×°„ ¸k@S0bÖhjz ‰\Ρɵ“èP¾Ç3c£öÇûÍ ËáGP™˜éj($•ž«˜=å# €¦åqí'™ÂµøC<Ÿjõݵžw™_tЉ@B‘Ä*Ë’À3»6²1ì†nŽÞ)Ž€0å23¹¹1\šcÃÌ[adª c7óK}!Jvu>„psl’½8†ŸEÈ/%.æ>îWŸN}a9cê¬t>Û¤Èxäd¸à!³¨*L}äK®qŽç/ðjü—­9*i¶c`*ueÉVDmÒ;Ρóî+c(bÕðˆžæPžÀ`™JFð’ð±< qÝ *SþPÝ ÖÅO'‚ÜgUñ EÏ@™4µ¤(•Ù‹:/‘=8Ã"klå“iO|C%åNH©W]ZøïÐ$.ªÃâÙ5É7-½.íE†b×òJ\jËÃÜ‹‘J…•®~Hó©a* #þ»(,ޤã»Ù0ÇXÀ£!ïŠ ûôÇæù\=©ux«êýý±ü†Hb]bLK:læœ&U4§½$~!õ‰ ™q†«\‚$òHߌS/9H´ˆ¤¯ãkÕ·ÀØDjØÌB$np‰Ÿt}{ÿ0WýQˆ¨m¥DS7‰w[5\»&±5j[wçÅ4$™«‹]l"¨Öð,r½Ù,ðÊÔ†¾3`° ÍòB‚n÷–ãØ“ ™É4]H\õó 13g˜‡ùøaÜYUyL”ýHrÁ (çR㉪ñ©\j)4ÀIXv?Õ.¼pˆE‚Úºc?¦)Á7 s)ãé1È©˜¸9%ûjÏg’CçˆÆ°•›ê29 ?Ô]¸óÕµ;ýä”,g2n?‡k¶YôL†IE ŠÆÙ¯¤@Ÿ§ñ1•(M D®sL05HAÚ ÓÕåIs _ت€é”ó ^¼ ð¨¤\§|¯ÿ¨6ÍKN,oÅøKžg´¤7ìÌ!}Õ‹V³ákâ`Q÷ÚÃißüaëÛiÛ‘¢ôçž$'½Œ!óÉä`êZ–ÍEê`ÂÇK•¢xjáÄK[U±`ÿzÙKq¦×²+䙣)ò§åÄëµ\’RwvÅ" øVÖŸqÌÆ¨æ <.ݺÍÃÓ~Z}¢ðš0—R^x(;Ò !œeGFÄÁËf–¤@¼Än6„LÏ –FË]s¾Š¯´_Äz®4k¾¬ŸC/.ßÌÍÜc|iÜ> stream xœåZÛn䯵_…ù€¼ `Ûãhhö½ÛH‚ØÆëÀ^Ã^1°Ê5CIŒ932ÉÙµòùìTUwó&JZû5ЃÈf_ërêTõü²Ê3¾Êñ/þßîÏòÕÍÙ/gœZWñßv¿úêâìó¥]ùÌaV×ga_qo2kùÊæ&ã¯.ögoÙþ´ÞHck«›}±þ×Å?Î6Êf¹|µá<óZÙÕÅznò,ç¹È5ë Ï•g<Ë…ÞæŽÆ}þ£–£e7ôÈqŸAÓ°ÿ®/þ=ß >q+`“Ø£ív/›ØKz Ÿ)£tèõ–5ewj-ì^Ê,Ï%ënËðâ½IŠÃ®hvØ"¨¥lšc“ºsÎ.ÙÕZ˜Ì»Ü±ûØl«ká ƒwìÝZXX]{ÛU‡›4—d¯J˜(‡±ÂX’+—ë/¢ ÔDaÓ(—)-ÂÖÿ¶Þh¡iÓtØK¶¿\ÇáÁ(ž9Xs"Á¨2®le¢µ¾óXk`0¨$.U\”[hËí”—{£˜°œsùÛ¨Sà[ö=¨¢Yo„€xÅ®O‡mWQWBØIåç•—‚mëÓ®\:>ì<³2šE}¼ù¶úyaqPœvÖ§Å»cZÆFIï*˜Hz 75̈J– ã°#+duu‹³ u8ÎðMBh¶£á`Ze^qÇŠúTž/î]©L%“ûøúÇ|¸˜<‡cº¨8Íi˜ï‚§€" êxâß5¢6-ì§-nzQޱ<ÓÚ‰±¤ö¤ž$¤ðΣÁ zÉöx^"ÅÄnÊû™Íº`,áÛ•ÃÜxŠ~â}ÙÒöbƒ‡•Û»([p·r» †ìyÆ=WyÀ–«°¾ó)ëûµWTà»ç¸²!«ÂÕ–¼ÈfŠë¤‹/¿ùzA àžÂ;51"ƒv2ؾÂz_þ”¯$+’õp€—mìƒÖ}}löÚ|šF²mSueMÙz£À3rô¦D{4|{¿èT2“Ã)YçŒRòaã»h¬¹sl{Ú—/]ÜN@KÇ`‡ø¢ØþØ”J޶ž=ƒ = }$ðpÒÐI·ÊEÚ ´‡æ+„#W£N–ý‚þ˜ké¹0ìT6÷qˆI’¢~‚…zëØi0‘hæ(÷±/L}ÖTìxÕ– 8±ÆM‚:ÅÏï«îvÉ9ŒáÑß_µKa P„[“Žæ© M{uê†5ºÛª‡p îéi+ÐвP€nÖÍ|¡‡­\/°B5k=+™{lfñ¬Ðˆ€…Q£ÈUŽN,¢Ð@¦†¡U³ötwWWå.¶pqÏ`5ÅMlÔfQ¤q8í%kË4gBž³¯^½z•ZàWÅ“X[:í—Ê¥—Æ·gàD]Ö/Î^^œýp¸Nó7ÒKÜÈ©L³Ò°&€r#&2‰ú³þÆ EžgÈ¡F¾˜e}1²÷—ëägү±9šÕ™ãüF» y‘ j÷ͤ¦¤Ï5X>= ç”ew£ö»Q{}²ÃWå…q ÙŒÕ$ÓbÜ^Ô÷]µ>Þ4Ånô¹D‘  Œ’ÝR XÎIÔÌÅgoÙªZéœà®­öw5¾w, Ï•ôí pÊ«hÄ`Íôèé(ø¤Žœ½ï}ÿØ€h«aI€¹¢íRGë4‡`j¢öF25™¯¬kÀiëYü7‡0ÎxWÜžê¢×VÖÜ3­‰ ÀÀ~;7V J´óÒH—¹ؾøõæ6+U´¶ÇýÝ© žjHHf-ƒ°s(êšœ¬O2T»ðâ|Ç.~ÁÓ plË4“G¡ýæ&8=hÀæ®9^Õå¾Å&ÍöÕ©‹£ šœÚ´赉±ËJð §fTµíšãá†ö©9Z+vïFVRµt6Ô%h9PTbĨ4ÎÀBpÞû1¢“ÖùXø@˜@6çØh1EĨÃ2Ø›•‡a¡1Dv W­h?£OGqŸ= ãǺ÷¿$´ bJÊcè6ÑœèÅc²Fl7‚ðÜTc÷ñ{iIãßQ4Wž¨+YD µ`¤œÕètÊ£ ˆ^w¡{C"AÔLæ>®ùÁ4*„=LÒ¤®)¶]qU÷c +©£ã‚£¿?ïÏHd2®ýÙr"“È$‡#µM”©Þ£†¯6Ç»®Ú55²öXŸöQV‘ÔHFQÿ`Ô4xiÙÈa‡e‹n´l]§±’U×£IGTƒ¼;ôÑ=Ðk;(6Ð+[ªÚ@[™ÑcŽ•ìuYîjÐqL^A‰$Jd‘Ð÷“†ÁDvf<ˆþ úrÛ[ t ©¿JÓafS»~%>Yéálé`Ù ÕÏFHæ’½*›2%Ô.€'Ñ‚Ä'`íí1ì_!ºb_Zr´äØ’v³ô®ŒÛ'‡!ÌVV"P'Ì&Y#v>®³”ŽG亇J„Ô>l]d°¾ñ• N9l†0¼‚s>ÏPþIúñ5!=ÕÁfvå¸%øHWF&HˆÈ’%€E¢›a€³hô‰0ÖÕ¡,šô ó¥›fVy  ƒ–² !­Œ;ûúxØ óir¨ê?ˆÑËœòšyÝgʳĶB±ã¬T<ÅðLÜö&°ë0á6˜ Õ:B-EîzèÖ–]?Hì½X£ÙžšörMŸ£ƒÁCs<ÝÜ’íà Hé·UCÅ`Ñt(‡)Mo)³ð|}¬ë`³øÎ{ªûf‚5.’ÄZïÆÜ!5J €q4˜æ®èŠa®êºqV #ÆuŸÐZAÄþ:b€5hùaZÉÚm1ìC§@@S’á…÷>°„U±2 ’ °><¹Ð¡Ê’|#´ ʋƠçgL¯kÊ}B5ø«Ä æµh´€º«ÜÌÇÙÆøMš}êœÓi_{^ò>ŽÀÖò}ö>;Ü ½~,în[Úµ3Ö7GHÒo÷¡ üº¨0=•.&mÇøœ?U\`M`ê‡4B±fݘ&Ŧ{l—‰oGíTptÁއaÝ… çßÁkÈÀÈ"Õ ¨:ÑbÈðžpâê4ʳ–\`J«IˆÀýûj«£=…K‚_Ÿ4Ooê1F‘I> Â1ÆL@h[Év8Ñ›0XIvUÊd°MÙwœT®¶Ý±iSÅNO2\>•ÈØ¯o»|ªÄêô0"jƒÖLA@Sak¨!Ôr·FT¦@tz5á›æž¬n2Ùö]\g/.ž\œÜ–›Éâ}aqzxÓCÄ|æðBÎÖ—Ï+y“!ęɾ"<í¢¢rh)Mû£G;Ejš9E ô)ây i€uZ‰™5%ö:âZ; Ž1â0»²­nBéì÷‡«W­óLÈþžâ'ìlóÜnzõ¡ø¨LöWê ÞàLœYMfÎ4––`ëø|ò‘°i±`ÓTÔÈáø ¾4äQíRGR•˜öÔf@½`²£`¸iê†û{§~¸ë69b²I¼˜A{¦!Ï„8ó"ç´‡LäJÛd9&¥ÞK°¢}fáüIÃ÷O­„Nhy¼Ê`E»à¨`È.ã­ «Î¸Kußù´3#Ë© þ¨‘‰¹‘ •ðæ;f¨š«•švã¦1á—°I¬Ïý}ýb¾!—YŽç¤GG[ÃqCˆ††“:§yfºÄë;³Q8—ç­ÿåÃõ9\è¿ã2üó\.z^ÐÎuïÖÄÎpxoc_?Ù—ãjáó‰§3a2çô_2Ä“ )‰ç¯ðQ»|1æ9ó#Í[HT’ñ=êB¹Þ)­þИ–`=YŒi°¶ÐϬà7]û¹¦48ádíGCZ.õsëC>[ÿé†ë¡W“!fs>^§ÁnŽû¨Ži³§;êàTJ÷|¸.ªÃw 7“¢WºFò!£kŒ8ÇPš["Suòòr@œþÊ dq#Í®e¥S,È;#ç‘™áµt^ûŒˆ¬ó¹„]iAØ]iIÛßçIȆÏSàåSˆ­ËnÉÀ=ÝR<ê–“pJ¡ú`;»ïˆ‹>üå@Ð`*”p mwj®Šp*#ã,›ýòO¸Ô™ÐxQ‡RõòÁo>ùŸ­•6ˆL”°É¡Û»¢¡êL{÷yVxãì‘#xƒ^Sݦº†,qì6òÓðø—ÍhO˜Ÿ‡uH²w—¬A\¢Å]®ÏÓ¯ F­}›fíî¯}É—rÔ_>:€—ör1ÛàXR‡cW~å1Il0ªô“ƒòúºÚB‚Ñ‘Ät(àg„L~>L5y^Z÷~Y0?%=)öégŸ&YÃÒpªsøÃ EèË1¸¤)ÒÁ) 4‡’\¼LA‹q}ÆúzšJÇÒ5&ŇPlöXä'oTê Yö¡¼éÓCàåà Kø{,ºîéClÌC%vzU BÝ¥T_§ÙD7àãkQi2ÑgWoÞ¼\º:0¥ž*¥•û2^ê¯OþŠu‰fñG(ÀŠçîM€: L·æYAŸÞ:J>ðr|A„͇*s¨}uß{]3`­#ÎDM©È9Þ^A^¯ìäùåœ^Ì`›ã®{9QÕp•/¤ÌœU¦ïü‡'Ó°7Žl ͼF‘CàÉ=²F|´N=³ þ¶AZþáý޶&eêûÑ“ÌtÆÀ2ÓéÌÐnlêüñ“ÜN`þ½"šÈÂç+‚c„/ˆáØŒsŸò“ a¼öf‰¬+šDÈ&c~“ŽÿßD.g„@‹43ÈMLè´¥®@z.æ¼Ùƪ|¸úÐ4H?Ï Ü=Å"›–AL=ô?:Y¸HµÔPÜ~ª–Jµ…1¼#–÷ ¬‚äjš²çlÈä‡ß'üpö?ÚÙþendstream endobj 156 0 obj << /Filter /FlateDecode /Length 2911 >> stream xœ}YK“ÛÆÎyOòÙ9°*‡ mƼW%UÙ’e9e«*ÒÞ´>`ÉÙ%lX VʯO?fKE<,0˜éééÇ×_>®òL®rüÅ¿ÛÃU¾z¸úx%itÿl«ë›«Þi»*³Ò)·º¹¿âr%•ÉŠ\­|î2©ÊÕÍáêƒø×°Þ˜\geiEÕòsž[>¯7y–[iœÕá± /á£Rð±Okåa…/Dˆ«‹Rl»ÃcÕÇ\:1F4Ýæ©ÿ¤…º¡©÷Ýzƒo…µ X¤¬ØáE«v7 xè«]ZPbÄ/†´¹?¶Û±îÚ!i/YÁ\‰I)þ8ãú÷›_I™•ÖøÕͯW7ß}µY27bv?Ҝ޽´`™Úé´äŸëU6ÒéЯB_ªÆµ„A㵨?…áV´ááýûŸ^ÒlÒÿe\(µóloEþ~·ëÛuÜUûÅ®J‚»l™vÝÄ(À¾r¶8ä²ýI Ü=©XÌë¦Ã[™d¾Ž¦›e~ªšc`#à„ò÷ø l¾ã)©œ ßËuŽ&˵nšüª>Ì“»û$Ô#«æ Gº²c½§E·Ž?¦!-äl^M²µË\®ËÕ†µ‹ì rÛãŽË¢ ØF–g"6g2$Ê'¬oþ¸RÆÉ“Opò—óÑ%¼)~ƒ'ý;‹<]‡EëäK˜©ò’tÊáQæ9ýÍË,É^d–Ù`ýäh\8•²‚Ù©Ë Û°zñ1éw¶ª\ê—Î¥­·Fi“bH £ÁÁyétaç‚´7Ê¢ßgo/íêήd•!xµVUÖΣ›¾¢õå…§ZgEø>÷sà›LsXæ)ÃìEoÉæPývý¤_f¿ƒ¡»׃ žp»þoÍ›ÒÓ©Ì›Rû›½àI“t¤éˆz˜Må- PÂ8'nè癄© á—•y#‹"SI› ëq—”¬ I’ï'ˆÖV2/ûÊ ”ÀÇ+’¦3ÔRéÖ%`5£-}-í²ˆB¼{1BEÒ¥&ÿ¶SQO‰MæöYé],‚½ë>©–Âmµ¨ƒóÎw›…ZjQ K®=Å*Õ­-¦P<%}q¢©ú)˜`K‹±F@‰‡9ŽnR´¶Ú“2Ú"ßòF<ôimaU™·´u›Þ „‰®9Ž“êlÝí:ƒÌ·šÊûͬ­mÇâ!¥‘–h?%öpÆ/ø iå4ï'¹ n«!ĉ…§„ãg…§ O—X‘–˜oFðÁ®µ»~ˆ›äN<íC—×c6âP­Ú°*iÌì.²#UŠ0ÏÝWý.¾I(è]ú¢Äð©X¬çñj~lº6œÄh2  ¯†J-`V;EJr޼2î+z³tb2¦ó$¸û<`Aæ÷K›VL–¤7†.|Fö¡º¶ºCuH ÄÉpHlÔ€ã>îµà™¢ÞÑŸg †ž7åx¥•cñ<ú‰¤øÔ†’ÅhK ·ãˆF²4G+}DæØÑó™gx©í«¦AðÁÔÌÕFì±'Øg‹LG`2¢£”jê{øSh²]éÌISƦ¿à1 € ©Xh‘/V8€\WÚ—'[ˆ9Y\@?“¶K³Úœl.ô6G4¹>Žñ€¡˜xêq7Íï.t)éàel?` %cZÕp–¨¨|JΓ CKs½£ú¦µ¦xß6¡ê)dpÀY@äî® pç6»Ää1£9åog­KòÁ‰EªÜ'ƒÇGŒ7JèšzT«Í'J]ã,'; ¢ —!k%¥¥0~Šœ0rlš»¥öŸÇ€HÊ/_©)lG@¤ÂÙ3²ö& CÒ´Í °Ö¬œ»B~¼†¢§AfÎm%CQéK‚" ÈÅ;*[Ò[ |á^·­nDªN¶{O5ãÜð¢³ô•6qƒ§€€ÝsÒ¢)8¾Nã{Üçn4зçjR¥ârKöàEúÌxsfË¿ö\Ó°{Ãd®ß Pð§T(mwzù  ¨Öb?9J+Ê'RŒö*f9˜~CØv¼‰âb8+Bó8Å}¹býÙǃ}\ªð*:+±ÉE*³2>AÜ$ƒ’šÎä™3vkœtÙ>õ¡þoè/:K"–ù©U¾þ‚™¯)jvá¾:6ÀY.Dˆ5™Ä›vß¡úüëEï ¸b&Ç! Q>Xømx»vó®zÜ];wQaRD–îÐ4%§^|áÜÃ… ê&ËðDeøÐ#GJQâ=Ž?‹Æ;‹[yÞ ÜÁB•d=,ã×Ý”g_fø¼`_d…/Åz»ïv—ÍS3è_ p[,Ê7¬¢¡þ,bY*´Snöc˜Ëß. 0n2èÀ'ÁºZ·Ûæ¸ ñh1/–±þöÝ7 ¦Læ]qf°{Êœys¼¬âœŒ1¿‚9OŒ7aÛûR¼¸~½Nåÿç÷ß fÂI½,„— ÓÕв¶æñ›®%ãîè×}·`bàB›J <æÔM¶›×M·10€Ð-®wæ»­Ÿ»fw_õw‘{9/Þ!hé¤D_\¿yót·@HIoØ0±{~ÁYu ß°Æ!U7|\šüX5À)áes¨´F‹;à`»YòuèÛݸäDâ ùf"Éü6oó¦:µ!ÓÇk¢x;è É^¼ýŒ_ç‰Æ÷çU}ÓêÅφoCýêæ·Pí P³D%_EPZ­Éu%‡äErè„ISCÏÆG>–“\¯°úB‡rΧÑÏ6Y‡&åX–é™I³£#-*|Oâ¬ÚÔÍp¼Ì“%›¤zZRm§<Ü#mU9å é Ë·¡+î<|lèp>ŽCZPb².Å™('¡1› À"H©iÆæ‡J¼3¢ÚÄjç,/Sõ¹ÏA’ä½LõJM= ·,ˆ—^PqIÓDñ?.‚èXSm”m‡õg|bÈâÔíÒŸCíë5uư-šRÈp†±âîU¾¯©ÀËãÃ'h ªóR¡”¤&­¢«u.§G$ju¼vW†™ÿ}…\€V¥øç¹FÔÜ*a×5Â)kˆFŒ²´‚!@ˆqñ/ýpÖ`QM·äÛ5€Ab35L ¹¼µpŽÙÓ,1õEy\a±‡¾‹Õ@tM[,Âqžß†°æWj<çÏ%–´ÝÔ9>ÍÝÞIK–Hê…›8W_ÆK0.DÄm\Yjj~'.„Ÿóxݵ†ªV:°÷iO7Э€S\Fn¨yÁuT¼â3€ãÈI-nb½,Ä€½Z±ä2…7–r’„,¿Ÿëq#a?ߌ̨0öÂ½Ž±E¤ü;î²ðþâLâ%ডá:GYü‡Pâ;…OBUÊC×Gƒo¡ï;¨YÔ8à€ ;ó!¾Ï}ÚóFjº`<Ža—¶v|W•º:°d$F)²…G'BøkcÓév䦉`£MµMM8óí»4Qưå^©x~w¶éÖ‰_µsQ§JŽÂì`*ä õS‹<äOXwŠÂÌÿ2µ]–âÊÁ> stream xœYßܶ~ß§ä9}X Çuoñ§H#W œ¸…k$ö}ð°¼Òí©Y­Î’Ö>'èÿÞ™!)i÷dÇ­ðŠ"9Î|óÍz·L¾Lñ/ünëEºÜ-Þ-8½]†Ÿm½|²Y|ûRfK—8#Ìrs³ð3ø’ •ØT,³Ô$\¸å¦^¼f]µ«óÕš§ &XÆ“TØüÏsL¸ÕZ¤zK“Ôîh¨Åœ'Nrlkèì;Q®¹Y­µÐIšöèÑ£Õ/›¿/pœVÙró|ÁÖëõêb±yôš½ªv‡ê&Áá ¤Y¶mв{<Î0gàÐ4Ê”ì ½M.Q§•MQŒŸ »’-ÇQ|l½»,vij±7ÉÉk>6ÞŒr/ÆG~¾7Ôt=óô˸í¤(\¦'. —Û(ö_·åaµ– Ŷ«uŠ3©e ²ڿ5\gì.oWаҥŠU‡IÏ¥ é,ëoËØ¬-»ã¾÷m´Ñ‡ª¿½†æ±ÏñÉš¤“!i?­ ïJÚËÓÍâç…‡\ûˆêˆJ™&™rK#M" ¡Ì­6ÿŽ"ÿgi.áY6‘öš}X‰ ö‘YVŽÆØæÃ^$ëÊ2Ûª³½OŒfXs×Wuõ[ÙҦׂ«Dfz¹&Lh¾Ü°Ü¡¡õ,,òaµV«Ø¡,‹›hJÍê¦-©“U}Ùæ}Õº0ÖÉEl8ÏCU³®oîºÐt"8m˜5ŽkËþØb ‚mŠe|-ÂVãüjÐ@€€}Þ—A °ë›ý¸SÓ¾Ùƒö‡my F’„Ø?šOJD —S½'‹h™‘hç&^2X_)~fÌM¦ Ÿ‚kÀ†Î)öä€ 3c?¾ ra³d3ˆ‚&ŽcuÙu ë%*#ȵàP͆é’Ý–è}lÁœü­)ì娇1°øWÝq»q7Ç=‰?¡ËÀÛ¯§>Kš8í÷UC@sæ-Ë•c·y·ž%¢ ’¤´†0õÐÒZÂ^ÞmÃ{„B¦8â» Í·3ò6#qÛ¦¾Û—}¹ÿèߥF²¢ú¦l˰¯ÌLE…lI+O2g4çLzÅÛ¶˜ ªì`RŒøŒ½¢Ô„ê§#ùàŽÞÕÖs4’-昀® DðîPöž;%¥„¯¼E3´¨D‹ÒFÉ¢Þx>ŒÆ›ìŒDpö½·^ˆsßT1LJ"M”’18jªN2 AÑ4ÄÀ¯Ò¢îë$Dœ\b’…y¶ÔêY·\£² Ë%”.¡"å)&.öýŠòÔ¦C*k ÚÜ ¬ ñæ‡éû­ç0Šâìè{t&awÝŠœ(” "g蘯Ü,w7·˜HÚ)³ÜbrÇ–(J^ps ¡‰ßýè,”BxZ΋÷D$"äHW?0Ð&5 ¨÷mqÎn|)¨sºßákZ!p§±ÞÏ÷pZÛXvìÆåÀ]L5€ØQz›ñ¿L]¢þC§gà[?,úÜ-}t{ŸK•¢¯—¸¾µø’|Žõ:GûË9•jzIA 0#ïÉv:ûð5@x Ê6¢™¼Ý']5˜ËT‰tÉmúg¬'ÊQÔ¾Ù­÷°KIP3í«éH”€¾¿ÆG©PO’ Ôvs< T„aÎDë¹²Ã'*e’£t<„dþ¥ê£ õ™õ¥i÷EÞçÁ+ú$7AÂÔίÜÏ8„§‰ä|pˆg: pkq'„1¢W ® ·ÛÅ-`¾ gddÌØ'xH3ˆ‚ó8ð‘ùŒî:2RÔ=ñÇ Ü'‡p¬SNêÙxýd¨J_*SeE·ûýœŠ˜JÉQÁ8i£žá®…˜IRE×™FfÔñ‹lÅS\FE$™Ñ€ 0raÔc?ª´ÉyTB€f#A:TXoÁ `0F†;¿ïÖþÞ k› ×b}~¯í,»¿^Å{?Ã~ŠØ¿„ËF<ÇQ&>“¤×ü—¹9á&3Þ >œ&æ§ëkVš~î½’ùóÃU}¼Œ`Tqååsvy²ý«ÍË>½^ÁŽ&²cÿæ°óÕããTݳù:˜yPGâ•BÛ_m¯ÁPW|ú¾ÚW‚ù•¸^]ŽÎ¹¿ºjž—@#D‚aõ ) Z3à.ÂÍ”÷ ´ŠuÛÅ 4…“§±{"âdÀœŽ'gÆ ®tß–£íBÒèFE¾ÇŒÐû`³ÝQ@€tôæÄ[€”:o?‚уÏ9'—¶ÿÿ]ð™wþ‘ßWõ±oÊ>A½]ŒŠ—pt«éòpPþEù¡oë—ùÝm‡iÉ,;XUWÍ ØÏÆ On#£Ä—á.1b oíý:<ªÎÜý?áæ!ÂijðßJ:])'áä=yMbúfÚ!ŒžC®Ž–h ¿8×»Z%ç )â· <•Aí¡@ô{dO½ñSÄ…?‘œ(Ž7÷ª/ü‡ 8Ä·X¨ Ûmù>ß'ø©½öÐòͮO–Ž™i1òÖPðñø…†¾Z¤ÂŠñ qøÅFSN†NðÁ•Ÿýò#Ó“/?œ:'_~dñ…_~¼Ç—Ÿ3|î#ÐyebLbÑ ÔH\ß~^üÊ­r~endstream endobj 158 0 obj << /Filter /FlateDecode /Length 588 >> stream xœ]”»nÛ@D{}ÿ@|܇ ÛØ‹A’ È¥¡ÂAËEþ>3#;EŠK`,J÷œYcO/Ï/ëåÖì×éW½5Ëe÷ú~ýاڜëëe=t}3_¦ÛgÒsz·ÃñéÛ¸ýþ³Õ/Ôåž¿oõø³·Vêî_š®s}߯©îãúZ§¶-§e)‡ºÎÿ}÷oœ—ÏW;¼ÊéÛn*ˆ]Ñ ÎŒ^4ˆbŸå>m÷ÈøP4m‹'âcÑ êÓ©hùËý\4ˆüå¾ bE€Ài[<ÀAìû¢A쇢A$Õ`EƒhŒàå :c b0Ÿƒ˜ŒÀ¤0Paþ … ÃX4ˆ#ã¹hÏŒÀ¤0PÁÀkb62xMÌFf¯‰ÙÈlà51™ ¼&f#³×Äld6ðš˜Ì^³‘ÙÀkb62*7Õn¬ÝP¹©vcí^³‰y)Dü»œ'à:ç)8ð] N¾KÁ©àÀw)8ø.§‚ߥàTpໜ žB]» , - . , - . , - . , - . , - . , - -BO¡®‚]z uì*p¶¡ó žoàlCç<ß@‹¡&ƒMZ 5l2Ðb¨É`“CM›L´˜j2ÙdÂ5å›ôM¸¦|“¾ ×”oÒ7ášòMú&\S¾Iß„kÊ7é›pMù&}®)ߤoÂ5å›ôM¸¦|ñäíðu ð¢à•óuÃ4ÓǾ×õ¦{I÷¯›ËZÿ]]Ûuã·Ìá/L¤+\endstream endobj 159 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 8105 >> stream xœ­y TS×öþ$÷:UKšGškµ¶µuj­u¬sEEpqFD $a™ç2 sH˜Â  â<[­óØûlÕW[íëÜjOxÇÿ{¿sAûú†Õ®·Ö]V\&¹9gïóíoß¾!D¯P"$$¤÷ŠùËcÞ?~<óŸáÁ!¡Á¡añ¨àoÚ.u8èúõr }5?^yÎðÝ@"4$äO#禥¦‹²„ÃbÒ¶'Ö§¥nüÛ›ADÍÌI››>/ã=áų̈¬¢…bÉÖìm‹sâcr·Ç&,Ù±4qçò¤É+SVñW§®ykÂÛŸgÒä)S§½:pÆk£†ŽÃ;þÍ!<‚N,!FK‰—‰eÄHb9ñ ±‚x•XI¼F¬"F«‰×‰5ÄZb±Ž˜KŒ!Öóˆ±Dñ1Ž˜OŒ'¢ˆ7‰ÄBb±ˆx›ˆ&&‹‰wˆbKL&ØÄ ‡ø!!" .1ˆL !†‘DÐDo¢‘Jô'ž#ÒˆÄóDDHŸ¾Äœ9‚$êC††Ü ý8ls¯Ð^¦ðá:Ö@ÖyRF~FÅS·{¿ÑÛÓ'·/¯ï—ý’úïŸÓÿñs¾ÔûÀ>eÏ÷>=‚``k_˜öB!g!çøŸ^þÓ™ÈI‘ÜPnÍ šÁ#Ÿ2mÈ?†º_ùb oïKz2Ý<,lXÙK/É^z0 ©É$ÏËa¾!d¥gX›i˜F‚&Ss^CÖ¾µÅA,X¼U²„b?4ýȬyzanPŠá·UpwÀ,‰€ôgðýoæüÉîØÕ5‹Ã'ÝÀm.Ô®z]<ý$@&$k“dr3Pðøð ÉUUšª†ü¹Gà°¯Qøõ©tÉnùÈùåù»ƒ¤’ˆsèU@±÷J$¦ì!/¡ÅúæøìÉoEGÌË3ƒ^EiY[Àv_æ.u‘¶x(8nÆuüÊ‘3?´ Èy: 5é˜} ÝB]a !ðÅ{Á~kþ#Ü|OB€}φ¯”ìqt€}ާ£(ož9äáÊ=¡8R5, …¡ç“SUÀÔDVVŽ-à0;€*Ià2Õ^b¯s:MÀλĺ®JÄžXT< L¦ð7Ô@cR3Ç6 ö7©jºK®‡œã˜QLø¡q5¯;EdZ.§àYRl” Ó£…§Ö´Ä‚A(bÞDoÔë Ô ö?tÞÐG£ø"§ˆ®ÿú‹:gËìÁE…Ë(ô6 –,Þ°]¡0ÈM ]°ùºô¼¡Ìˆ/fõñ  À¦@z®÷‡5u½ûïÙªûm¶‚7ɽ°(\È’Ž—¿ŠBÒøš&ôK"*h aݲ´*øÅbs:RJ?,âØWÉ·,Ug³A6 ÞƒFVs•¹Šû±Î€› }uC¼mµ=Žò³Ò Ó@ˆÞÑC=!.÷€è?)G`ÉA¾I¨”t¦AäÝJÈùÖj&+ÏDJµê<|vY%yÕzª¾¢Ü¿;Á›¸tõêUJÚô Ã4æ x(ôľêc‡7d>È5åj]ªõ wÈf°ÇX!ðˆÌi ã×`×(7Ūsp°9€Z YÍ•8ØÞ¬c༱m{[¼5Ö±æŸÁšÖJop¤JÑtæø2G²oÂy˜=vè·kÂLq~H¸‚ßô:»2må̵'îåóŒ.“ '@­×jhôiÖY 6UÀ*¥­ÅÊÌr‚Za•¦L[n,ÔgŸì{p)îÄÂ2:¦l$7)öþ.—ì;|%}¢Ê¬.*´: m&Ì ¼Ÿ0Ù NužRˆÃ6±Ô Ï-·â¯Ûs$–ì!ìN­Î¤òÜ?Âê1ý@i ø Ã@ÁI÷Â_^ฅ­ n@Ù-v' gÀnrÕRS°@»˜—šKu¨Ñ°‰Î S¨œzçÝíÓǽ}òö^ò95.ðmåO·¯tì:]} ÀPðÑøZb—h}€*²:þ}òܲ’<÷–ÀzÏj Å5hz-¡¥¤-†¯£Ép5Ù•ªƒ¦Ö\`˜šã`Œ$âçkB?â‹dGŸpæ Ðó#y"’ýÉ=TÄÉÉ¥oüÁÜ}€ú´ôÁyºUH©Y ò¨ÍÊK—«®îçAN²ÐpC>më ð|¶4tãêØ}Œ«?` C$rkzÁ^–Ãc¨œŒOÒ'ò’™÷1Æe Æwòmít0@¶7Ú›Ýn(©<Äo&÷Ö·ÒH‰ŽA%¹»Ùº‹×`õp±ÂŒÜeÇ4´§uwv·nYêöTœs%PP(ŽÉf~¹ vËiùA°ì©ô·: …–bó.Y’É©–ý4îg'ÑkŠpvç¯Å¸œ0eò ç1ncðŽ>$)üù½Vÿ4n+˜–€ný¤™LHÑ&K¥f ÇqŸ 2\Ã2_н6“­Í¶f·ÇŠy²‡(‹ÄÁ±ŠžNrX%™·ÑFniBå,¨7JZïÀx®ÿœ·ñ h"Ÿôús3eÚ|ŒÌK{OðžA¥S•”‰¥©VT«ª§ÀM\ƒSïNÊJÚ*æJ|ÙÓM{V+c @äŸçŸ_3ß7¿aת°ä{˜òòÏewSùñŠã•Çm»ÇâÁ¥fÉ·çÛòë§rË×T®-_cUXåàe׋w†¸Õç=·?fö7…OÖ˜êò-i -7E•‘·EŠ/µX)Ò‰ðu>YƒÒŸu1ó‚è’ÁaxºGo`öˆVóËÉvà’º“ 46P›T:¥A½Màjsu¹šÚ íÜÅ`ù°Z¦ÊÈS;O¾#}î2ßÞð–/îLæ¬2~ Ø’`(zaÍFáö4¼†UÐÀÑÇ鶦î¦$ˆãBþ,F¹@ Õñn!töæS¨AÓ»e£„Ò;SŒDBÁr²®»±«§Ê…H‘@-Ø]ÌfI ¤6PKw“»wÙšŠ MÀÅ«ï¦FÇlñ?yZŸ¥˜\¼†²„(t ;ˆB-j‡¨€Éd&X)QI$ëQ1@ $vyq…¥²ÔQÀ+-Pü¼ẃüý´™u´}è?ÓpƒÛro¾|§¹ekZO?­îêê<¡L+ÓÉiATÆüÔ¨îêVõTw•Ê›q‘)׆>S;vo=xZß!‡Waê‡MpŒÐÑtÀ5˜! Fr2Œâ9‹Á³ÊÒlKxO¦Ìþ ÛrŸ«M ÷³ Z öxë|-»1VÃXcPÿ7Ñ ãÐV‹©ˆR: …<ø€U—`i¢tSÞPfE Po³n~ÿá÷~ç­ï8Ê(—Ú¢æý[…`\v% žf²RJ¶Ôê—Ñ&´#s÷òªXóõê?ìSÔrôïn¢‘ð5Ü›rI~ªå ì"ÑÝ'fN >t}tõ@eqµ£P‡¯Å/–áæ"£ÑTÈt*2cûÁì¢6^V\5z@‰ÑC=sâÝg3´.8¥îi©xzdJC­AJ?©!ß[¦ÛÔs6[bí—™³qÒAYšã¹%pŠ‚ýÐBæ_‡Ò‘‡û]¡Åᢃ—ÏÚ:pže|²ã¬n!ýd6‰þ¬É)ÿ©Up¶7¤în6ßRÜÃ*²9Š/z€z¡Pê;îè”Ïãéje­Ö( «\è¨í4[°‡³P=Æ¡6Y¡å 7Ç Ö€ ¥*½6þ€á"xŸ2Wq:÷|ý)ÏÆúVeQ¸„bï"B\Ó5¹;U,Ù¶þ„jìZÍyK6K°ô½Øy)ãÀ8 Í‘,…;áZ8FÁÅŒaFCà"´·zΤ‚’ž<Ç)#$}0’—(vgI]ÀÕ8ä.¸±¤}ZíJç|° .;бkóõRc>@#Á|8J ûc9Zf,g䨛y=tíÞv9 †aĽ˿…î°’†Tft  ¤°0ŽÌò"µ ¸A1?‚¬ê #É“m—OzŒZ'/[§ÈÙTFe®¯¦¢Ò{hÛøEó^ç=u9 °½^h`& /Þ‡‡pÔßü¡/ü§*ÓÂÅä_°ïû8ZðxøµW )öm³4ß,Âþ¦{”0}ÅÔ–Y* ¨X§ÀEcûŸÀA¢!A”š´e™jí³YÚZ —ŸùÌ ¾"IDÝqÑÇpÎÇMÇ#Ùß*`ßjŽ‰Ü Ú¼ l ÆÀ—S:ï@^ÃO7î_RÆK,ÚaNÔxVŠEØ)iv;j ÛàQ”èʵ•† ¬TÿüÁÝæüÝé^ÚLÖVÖ”ïæ2©´Z QyîüÒ²bOi­È›¶3)_”Á3ôz½ 3¨¨: ÅþYáÉHu¦™þÎÊñÉîÔÊ Úªwè¾´:£^&ÑA&à{¶5H)öcEÂ0Ž“<~ p‚w‹uþkÕŒ9Ãø1 Ç5\QÒø‡NÀ'7yÒýá‡çüË¢·©§¾›!,öæò¤•FŸÁGýfÌÄÐ ½ ®…52GSB¶Öèãè'$И5˜aÒõi¼ÌžaÓÿ0jj’6dî[çވѸh{ö ü‹W½ó!ö‚x w `ëO<*Øÿ+$\E^g3¨ÉdTýÛpèÁ½›Nžt U9–5Â<žã@Ìc5….u“uû¯bô¢Q¨b¡ÃÙËäê5øÂ#Æ-~üã‚Ë‘ìû­=Ú¸¾F¿‘F äÎí¼Õ”€„ÌÈ´4ÒÁ¾är(á°;…‰(}°ÀŸYW[ã Ô }i<öCƒÑdÒs â@×øº“àÕ a:Ì9¿þ°¸s}ß~ùñ³ ÓšèXÿLãêÆxVØG}rùèõï®Nµ”g‘Ò±¿,2»\4L%F‡ZªH“©yZL¹zå¶=êjÔÈ(ÄñqÔ·bºJW©­Ò’Ê÷͵I-y@JÍZ;wÒ«¯‡¤„gr™ M.ÊAêlj¥Q­á¡tRS q–Õ¹<»ËQh-|ÊŒüøûÓtüü¸;ï÷¤£¹Ñ€Óq‡9æC­ÐŽ‹’fXŠä¼jKq¨¢êE¥™<=++[”¹â\æÁÎ çÒA9ûÇ©Â6ÁÜü5Wï‡ìšÊÉF"Ü ­Ç,×ÍÍ-§-6÷µu ‰ñ;ññÚDnÅJlè‚Q¬í Ö=ŒÕ©/³µñü¬à´¡yg É2ÏÊ̲»g¿ª`X&éžôHÇÆúÿZé,VªÐÚF?$AÕî:_¹+×--Êó%ûR¼Év•S†ýuŽD/ŨX`„/Èsf¹ó|ùUÂᮌ–Ä£\ÜE\ÀJáŸv4x=EvÛÑ[‹ød‰[‘Ÿº!5]D§f2R…T0ó©ÏÅ€‰ìžtzšf Ùá×o£Ÿ„’ˆ÷=§Âð{?>Æ©U ’‚wÈ«=íPÉ'÷ÑÍ£ÑòÕ•áÃæ.;qYÂS´šÚ@¿Âû0¹°€ÛÝòÌÓö`¿ë Iw›hú]†ÈÄ È´¶Ó]ÄÓ­q×;X¥2wU“ÕÀgòÈŠåÖl{Þôò1ôÚI4ò,z‹k“Û´Üblv[±Ó˽_ƒ1WatéÞÊÝ·˜t$òɆŽüDÚHšÔzµQ#KâJò·K&!!W¶Sž(MÊk•µç·­ƒ‘kaäzÈ5ZM6`£,¤­æ ¨ï¢a³_ú—Éz<ó‡;Òï ×o?c¼›»¿üú‚cáFR‘‰©Ædžô_ñ¿.f¥¦[Zh(&A«±)³9£i‹+,«Ò—e_ß䟢Al¼8†bÓ=ˆï¦{š³r&gUäy`%œGË΢eçÐr[¾Eˆ[M«Übi´:À1‡àØ£p´Åeq¸°4µeeÙ³ö#Þ>D@ú,YýHI··Ñ_~x-¸’V³­û „SžkÔ(4¼1hø4b,.Ï‘K±ÃÖšµ@KÕ8ì‡Ð$ßOOERê…‘ŸÊ?ò]\fßǺéSØc3ÚMv@ÕWUÕ×eUf¤ge§¼üéÔ`ÿ¿þûÿ8wÚ~4ûë8°)gg2×>­Oë7á\]—ž´Úívìì H“MoÕ9ÈÊÏÑ T|=lñE[$UMaq‘×fãÕ5uTìcú_ŒnÔ ÌQúwf$ä©s ¹¸Ô¢äžgºVÌ,"8Q W=ŽpÂ~"ïÌ 6¢‚¿p€Ò¬JÃáÊ-[ôzhð‰êKIÍ®Šbžwé!pìV|°ÞA±ï'ˆ$É|¯(PîqxŠi¸ãOÿÓíÿõö_!7Ü Sü!{¿€ò/Âà x‡cbûÖ/\­oØUÕšA“¬2Ý`Ö 2²%•’šÚŠòæ›Q%hÚh´ ½^¬G‰p1|îÏ{=š {£¾Ë–óåÐfÔ‡SH^ñAÖw0ÄŒÅòrÈZÔo§ý‰ž A‘©ð©HâàÝZ0ßÏ}öy×N2ŠÇmÖ¹×x¼P5,¨.ðay²˜µzîŠ9[§¯»…ƒ—¨Ô Q=¯€Uc.«õT9£°Ìpø¥GîJO…§â+Èå:‹6ìC‹ÉI Æ0,Kí†zêYºàŠ;,Úý¢#XOÀXøÇHÎɘ7 L¡¤ýµŸÿP_CÖeا3áäì}´Ü©pÉìØm;»û“Á÷&}0æ­±1Ãf\{n'FöZû¾Õdµ׎œ<Ùb-°Ym8Ò†¬rY{æ÷qà=jÒ¬uÓæÎ8ð¹-opÇE^îÜuòÒ 3‹y@o6=X¶ióÁ¨×ë€ dTå»Ö4E×kÞÿuÛp§78²&bïýèpû¸›Cá5Ž™&šF­x/-9)k3Hñά:ªÖïiðçV§e‰ó2 ´Šœyξ7Âñð9ÜæO˜¾hª€Ç~¤Ê0¤¥FÄLj…ÝÏ©ö3%<dsIý<þoj„r¯_¶Þ¾ÔF—8ˬe€zTˆ†¯x*U½âàl¦Jï=Žd_…‘Á9¬«²€V µuU»µFR—Âc_5°¶B½þí ̧ø#À<`d~Íw…öü¤²*’]£*8:†*@TʲÍqFœ ` ô6­ÍSÛæuó|{ÜÁAj¿ì̼Jª(+5©&»¶¤ÔævÑqÁq°¡žE™j=†—„ý³ª™@¢¿R[+ð²íeÝ¥&;‡aq÷?Jáú1\%‰ÐzE°ßÌK$ûaY0‹°ö…–³þ“V+ªƒ‹Aš‘˜-ç ãóâ@¦£ØÃ ¸6oeS@äåçäkòåÝð¿Ü~ÿ_oÅÁÓ’®ÐÆ >uîιIë²Ö‰Öe¡˜w¸ZƒL[«ûA§Ón±óàˆ pÄüzØûm(%£ÚÖ{Ñðóhä5üªµ¨íj@ipŠTn½Ý@W+Žû9îQÜ ûN¡¾PW¨ÔóLe‹ÿ–Çœì÷g˜_ëRqòu9¹ @Ö¡½ ÐÂ3h|õ±ì:{÷&kQz•ÂM¿ ¤Àɾ à”&¸„íöBfÈnµë¬Z| &£É4äŸkI1¯äœ ;‡×ÑÌfòêÜ"ù/h`#³Ò[MLFѨê§ÑÈ”X[êl:;ÏÌ*.¨¬*.†0✘¿ÅK`4zÄÁ¦&|Mÿôµ½üV˜™pÇå°G¿nt›ÕÌ@&+-ÂÍê8‘¬óêV)MJ ¤‘òWýǨÙ4¡uzCŽ8?²€Ïì-ôzži<A@YEeˆrÒ3+²j««Jý´¼:7°tÖ›)svò”b“dQ¿–Pš{‹«˜LÿOt÷_+¨É0öLH4r–Š3O›4tyC` —(¨÷’ϸ Û£±ÃÈÇÿYuU=UwüYÕáŸUwq½fIDûgÐq‡Ô*ºÆqЪ\=R€Q`¬…œŸÝ˜p2ó¸. tÖ^µ| n‚¯”{•bïoOZå[;„Ý©Ó3êð'£9Œ¡.ê‰+ó í¥Íº·ªÉÛÔRÝŽ»Ho'zå5z ÒÍJ¡r…¢Án¸ú‚ö™Öièˆa“þQØÁï9|²²µWQO*Èí Úí¹¹fÀ<\¸ÅX¬\SV€;™‡Z7I—Á¡ÈÏÍV+x›«ÔÝC Ð¨ñO ÞÛy#ºáɾýûÙýì³÷uÿe*FPŸ¿’àœû/7\æ¹€6W-“1VõåÙ5 oõ‘Õ•Û’Re¹<½A§ef:ù% »¨(Óži¦¬Ñ¨ß&4†Mƒ (€’ÂÍ9éɾýàÊ©ÛuÕj…Ÿ'ÖäÉA&È)Ì)×R¿º™îGâeýãÄÿöûÁ$þ&˜ÿUÀ~ÂÔÀ›Ó@ì® à¥®ÍmX»&;qOÚ[¶¼ Pï­òW(@\Jþ‰õå…3Ÿ5Ö+D~ž0+WoÏ/RјaŒf õ¯OXÑÌ~½ âÿ«üendstream endobj 160 0 obj << /Filter /FlateDecode /Length 608 >> stream xœ]ÔMnAà=§˜0ðþl õÆÞx‘(Jrah,Æ‹Ü>U…Eo¤âGÔ÷hõúéåùe9݆õëyþÕoÃñ´®ýýüqû°ï¯§eµÙ‡Ó|ûLzÎoÓeµ~ú6]~ÿ¹ôèÇ{þ>½õõÏ­§^ÚÜ¿4Ÿýý2Íý:-¯}µǶ;Ûª/‡ÿÞ*»cüüèvÛ4ãˆ'bµûŒ›GƇ¦Á»ŒMƒ¨w§¦Aœ÷Mƒ¸gœ›qf<4 â±7 bg<6 "zï Î8≸iÄ #úš:;›5 ¢1zÓ :c4 b0fÓ &#¬Äb„Õä5z V“×è5XM^£×€3@Î4 8Ð4àL@#Сq‰œ"‡Æ%rŠ—È)rh\"§È¡q‰œ"‡Æ%rŠ—È)rh\"§È¡q‰œ"G}ÁIpÔwœG}ÁIpÔwœ„Àÿú‚ÿQ ~ˆ$ê‡AB ~ˆ$ê‡AB `¨d°dà…Rð …Z[%~“ƒ#Ê'¶˜Údr“‰ ©É‰ ©É‰ ©É‰ ©É‰-¦6™Üdb‹©M&7™(˜*™ú]l1µÉä&}S“KM-6¹ØDý!EÀRS‹M.6±ÔÔb“‹-àJ‹-.¶€+‹À®, ¸°,àJÀ"°€+‹À®, ¸°,àJÀ"°€+‹ÀÂÁ/þâá/Ô/ðäEóu£ðÎáíõuY óÇõÚ—›®8]a¼¹NKÿw ^Î~kÀ¬þÝ>Eendstream endobj 161 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 8418 >> stream xœ­zxÕÞ÷„™¡(šq%A¤¨H± EŠ€@zPšÒÒ6ÙÞk6{¶ïf{M# ¡ RnhRdƒ´H‘Kñ‚÷êU±^ËYîpßï;³¹ïûù¼ïó}ßCžÙ93çü˯ü'X×.XFFF·üi ¾4bÄî?ýS¹]ROeYqjëíYY g&èÙÕõÔs·²á•Ça^¯Ÿúddä¾0¥¬¤\Pµ†ßovÙ{kø¥ýJÊ׈øÐOþÓ'†Í˜TºxrÙ”ò7+¦ò§UæUMÌŠ Å«fIVÏ–¾7gÍܵóÖ­_°!añÛK^~EùêÈ×Fî?fì¸ñƒ'<7ñù– yqùÐaÃWŽx Ãúcs±1سØ{zÕÓçé"¦;g¾î·¤ßÑgz=#xærÿý?~¶m9཭‰oä¼0hР†Áü¯.=ÿ‚=šþné(±ÉÔ3¢ljûç<=ÐYt€ÈUb±OÕÀÜ©r§ËìÊ­óù£ µ=ªð ô:30Ðõ?gÁWñúgõ@oу>…JÌPåbŸ²Ž¦¶»\gî-¼Áç‹Få>­Ð35C´:"ø&Gðµð+Áך½Æ«ï9É|=Ћ kÙ¡Åìð5ìÐj‰A\#!aÃûίƒÙYV¼ŽÍþ€ŸuoÓI˜L.Lf{áð©pagoê›âñ¼DL¬ _Å%rµHÒ¢ Ÿ:À&áÔ.‘R)5 ŽÆëBîDLá3‹àëøŸ÷»vÈÿÞ š•-#„j¹¬*¢1èd‰`m<¬ô‹ÐUeøYÉ_Ù®Ë–Š‹ÖÓ÷·°J˜jOZPØâplïK¸ð;va–h”RQ@nãM^_´Q`—1r\LjÏÊók/¿wÑä4:“´ÀépZ_ù96·Íëd{áF!Z2§ƒÅ+–©… 4QÚOeL€²u=Õø&°ÉÒäíhøà8 Î/87ÅKR©ë>Ür¥ïw¯F³\ VHÓAþᎂ…GD@ÄÑ‘Ô/»$MËWô³ß)\`$ÙgàN,€ŠtEÞzá†"¦D\6òIøi2ãØ9Xß™ -©Y<¶‡zJ?ÐÔv6ãÆ Ø >ö¨½ð ø•ôÕ0säOlw@²9¬„ìf°“?¼ö—ËíSÞtÓZâw‡¼BWão8—Ó]½÷M2åNfììÌL O}Â[ß²<ºjœ¢ÇŽlÀ>}‡H7þíogaׄÊ[¹ÆYRidÔù ó©ÇÙG\Kápð ùÛ 8gÏšã¢íý†8 [쑟'âÚ¯,–ïEœ­HÂáIxæ÷J·Ò›úUzž›hZ$ô8\$ÕU‰‚T _¼Ó„*ýÊTz!¾¥x‡d !ñ·_!ñÛ$ˆ³HJ60ÐÎÎæé'-ž4yWŽù,>àc€×Œþ"O~|à£{_Ê3‚j³ñß‹?ûZ' £BØ{-]„X¢0”XìWÆij¯Ë‰0Ã)±Ý*Žè„ã`_HÍ…o±#v-.”kD"?ªjï-T )ÅÌng±šeØ®jæßâp¡Ã,ÊÞyÖ¡'^KõúÇåb…Z$ó*¢ œ7z½Ñ¨Ê'dòSÇð¹K$’ëÊh A}»eÃÊø²\¶Ç‹ýÙîlÆ56öúpoݶm ›€C¸2°£2¸~æòõÓíoÍvÑóˆÉy³çŒ_töšæ¡ÃI¸"™}«>ùÉîÎeho¥Ï]DdZÔ¢JQH…Î}aŸ+7Eã«;B'p‘W‰„ÚW%Ë #0ÒsØ(ÀÞ0˜u@O–D ‘Hع‰¾‰@ÁÂ-h;¥‰¡DàØ@³½á œ:Móð’°1Zçõ5ÐWˆDu¸˜¾óÎWhéŽzh·7~ÍK¾Ìn…ùSÑV=oó¶ Í>‚‹åZ¡Ð¯A¡wý…MŽÄ©÷ªT*‰Ô'3TƒÃiqäÂ%è‡Á9”Ûu41”%¢öñi–€þÅ~Ø>JS sÁû¹‰X¸±}ApÍôùEË ìq_ê„}Q¼zSßÿ”ŽÖz¢ EKâW¢œÃë9‚Py…Ì3Õëž¹åᨣ‘†SýDÜï‹ÖI\"†ÍÀ À€X˜¬R)$¯*ÎÀ *†NÔ“,¾¾ðÓŸc_~t„IÄšƒÍà$ø`škž}c°&ŠÀÇEàó\|ªÔ•B=]\¿>¾&œ}ö5v$;‚Í=ÇN‚uL-¾ìŒmÞü;öžIÂ^¢ øb'| áï‹©)¼x¥J%… ýõã®ÉÇ ÿ È‹×ö]g|D“Ä.¨TU´žX^ X—/Ú²†J8†ˆ$lM¨s ;ñ¯x³Ì)Ëô¥·Þ.ŽyP1Ýì„ëQ)í¾™.¥R‚ˆ‹¡„ˆo‘èØír#À¹‰S´`I9Á²;&¯Ð¥ˆfÖ³ÂRü¼ ¥hÆc=jkTá¯B£º2ä¢NÙŠÊJŸ++‘ÜÙ 7A7³ìzXÐÌI†·@ŽX-Ðß_ C ø(3Ü‚Zp?€ 9Ã! 'aöÞJá<¶†¥A¸´iÕ!í>° ìñni$)±Ñ+ýÚ‘'ö'fÃxÂ,¢'àRAM™Ä«L0‡ØBœÚ;nAɼµï9’4¬#âÆH)= q„V€ÔPäAs¦É (B)¥ãÈf|¤ZäE¨†x¢6ލ|•ƒS ˜…7ÔúÃAm@feؤoR‰OUïfaª[ZNÜþ1ö|V Îbð|V ŽŠ¹.¦ôò™Á8×sQ¨’sÝxXÖFé<, ›:òvNÓ˜¦Ñ-¯»+j«ZéÄ# lñÝ«ÏîÝüãöÚ~°ÕZ@6àѰAT<&÷¡œN÷ú`œ¯T‹¿ôž2ΠíØJ]eŽÒvbŽw®9?ØÞ6˜cÙC–0iÁÁòÆÙ¾wQÔPÚHÖisÍ®M¶f2ˆÇƒžDBêF öŽ )buœ9Ž/f1d öQ1|¾¬ÕëuåÊBe¡få"x(ç8÷¥©HÀ-D -µqæúœŸœÈ>Ò %(ëb8>Õƒ±Å¬¨?:W5Ža{²y¥ì#þZ}±ýOžºŒÕbµÚiÅ ÷7i¶ƒf@Ö×#Œ0‡)@"Z,²FDd œI€o&'Ÿw’ëðwF¦—&¤u ÑX½ ³Ù—x¯ÔÞ™“TK gEû{d§‹Þìh³m¾/«ÒüŽß·“:x7a¯¿±½²dé ƒHWÃ'PUø¢M"»˜ÓöF+AµKï6æ…MÂF_TyI á­õ<¾M[Ú/ÇéÁˆÝ€ŒÄíMQÏEˆ##Ô‘q&€O<ᇬ žðú#HÜ&¤ÍUj¤ñH£ÒåVÊÕŠ„µjÜÍ1­È«üøõ·:,CHŠD®V"ѪeÂ2A™¤*G¯5U«YŒ !?f 1¿pB×…„n,\›®'…·êA[ Iˆ'3nq‚+3Íñ¢8dˆ5xüõ´‹øc5›Ø"µ éá¸Â¤V9)ö«¢tª;ûÃÿ•º‚X'ÔsÀ±t›.'„¹ é+ÔíõH_¥OŸêñßé }JÑA Á‰¦[z*.Ô*d²Zùý‚dEÉTÛÝþµŒUãÐÚ5­cÛÆµ¾Þ”—c׸@T:¥I-Y"û&°¹¬ÜƒšDáÊRE•`Ù®¢=?î„=®6Ò[!¶ã·í¿9ÃÎ#ìðÖÜ£‰Fm­ ‘CÊôG ÆÎ*‡¦5Эøsì!‡‡²|jc ¹GÈôã®4¤¯T¤¯ h~'oNñ_@b¼ ê`2›€‰,­—%b_£ƒ¾<Ú>oô4þ”¥4­²¬ û_c»Á§Ïm<ÝJÛ\6p“…GBkp!0 JYÑj9²/O»tãó“Ÿ~×ÔÜŒ¥#%ëÈè< ëev¦3@×5 Ç„†Ü¯9‰*ýdÀPXRUŽõþõ¾¢>áwÂoGÞq‰r\gqhÎÍ®„+–Óp¡þbâ‚=àXƒ–;U9ÿÝS ‚ë' >$¢öW1óðR ¯‘­äç¦Uå óL“Ƭ!kpS£z“®A~H|HzHr4G¶Gn-ä<¼ ±žÄ§L8]àü]¢às’\‹÷@y‡5êÕñ#Ǭ[¿º ”“B·¢‘Neáј­•¹@4ÈÝb±B/5Ñ Ù(qëÖOßüøU$ª $*¸J ®J…¶ŒYD twHš|Þ&ÔºµæZO߇èî ×Y¨Ö¶ƒNŽÈá?Q`Ù—‰â†5ÑUÈ–õzeۃ͸4vÑ"ÁÑöĶn%åU*ðªêè¿àш;—׊˜%©ÓøˆhÓæw:.‘"]éSÆö]ö{p Uï /Ÿ¸zvâMí®wÉÅP[T@EN™;ý­ñ+®žÓÿ;ÿÕQ(<Ú›jûêß$ÆÆ»Š¡-=×ø¯óù"éJ-ÅQÊý‹êG–ôù€%>`ÉÝ,á+÷Èwä[l±“ò4‘ F­b†âT8Ýwå@[Z4ߩ >“¿ÚWIôµ’E_ä(·7#£¹[‘žÈÕˆ:Û„ øÔ H%3ROÏ„\ȃñxÄÑò¡üXÑNfÝoÇÞF1Ízö%6—ѶǯÃaO@ÖâG¬Å·ìs£S€—¸$%f±TC ‹×(Ö‚±`öá5Òr‚=µ»ã:¸vÏ·Oû73|âlDò¡ìv&oœ6Ï¡bÇU€å €œB€yÖ¹áEã.šÎòO(ƒ‡m[Úví²}.“0“ÌuíºÃŠö¼p±»£ ºÓ‡8ëÍ=Ø ö:öØšì›#‰º¦Ö-€ôãm`'h¬l-µÍóI¶æ‚Qüu‚õËLkÐ?ó­Š›úíÖmÕÛɇæC0‰œ·Ð.ßBºôuüÞ`«R© "úó3›…s³+î‡jÅ}õ³yýá˜ÒW¥Ós3«Ñ0 sð¨ßi¬²Ëöñ£øÝ±]q½Vå¤ÝŠ .ÈÆP¼ápž£ìÍWW¿ú{wÍJBSšDL™EQ1dðX8]ÉULœûá¿Öž½þnM¼òœ‹dMU‰!m‚ùkâAìI|dÕª·ê€Ž¬&@bY¥–o惇û \ÿ^e§%Ê/Pωÿ$û´¸Xá‘s½t>­ƒÓ½”žÿŽ„ÈÝIbš}— «ËA]=R’ÐÈ+¯–U)ˆ”5·&êiêÛ„dS MýbBŸû;òp# ƒgà®Ó½©Ë°Ç6Þ¡õGÕòûÏ?ýñ²³S˜™uìx~#OžÚvôäÁuo…iW¹©b;7dCFöCº7¡ ˆøÚ ‘‰¦.o*X]˜;ì…ƒgüiÁ±Jf‡ta§¶ÎX_Wo7¬«É©o­š=sXËßZy-®Ih¶!¨U 1س=ÈmŠÃ—6vÂÞÜJ+W_°÷żt”Å^u#ç p „Òõ59«ÞsqêZiÄIø¼ ôgD Ê$}g0^j”ˆ™QDEH†\›èå°'ÇRq80±Ôû°Kºzò j÷˜´1ƒãðèýÇä_ů\‚=ÙᆻSz¡Œó_U‚ˆ6FS»Òlv§ª\n³;·ÑïCîµ­Qh—Ð\ ä­ÊPÑžu{ŠöšlFâR Ü_]‡IøVäjã ±G¬OK¶?.”kéqÜ>œÉÉ‚7 ಸ°7*}ˆªßO{⹸\¡”‰ªû† ¥AåÅN¨îÌ¼È ‚M›Å@ ž?Àö‚<éÅÿ NÛtÞ¿Õ¶lQsDIm#!2ƒ^!Ã28EÕTÓÆ^ÂA¨2 Tï.˜æÂcªÃƨu$¼ô«þÈ3„©3AX‡zÿ'šÏ¬få…ÙÜWoêc$æŸåm9DZ-ê Z…K$ú*y-G>Ô‰V6y’C},’hi[ôG¾?!ñˆî¾D€ãî«!‡Õiqr'ÈßȰ"9!䌔O]‡Td<àý?­@škóQÍ_G¯^Á/‘ÐTh³”ŸX“[ŠdU‚0À¥-5ð“ÌÔ «ÁO<}ðÄ‘“ÇŽœ_ð1–8ûÌKlF>›Q4ÕÇâõA•á;Ö5„¼õáFW /^_9bÐsoNÏWl,/®XR˜#Wˆ´ Š,KÔ„hx )ûf¦ˆ"eb©¡Œ~Hˆd§ï\…ºíšjâYˆ- Û Ùq¶rÞüÅËë˜uÆN»xsíð)–MYþޝm#SƒjÛì®ÚZà%­D£8Ê/Rñ+W4”…¥ôžÂOÇÁ—b{ã°;7‰D:—œ‰`õÆþ¶­‡>ê{kpûˆ),!&T´4Ä"M“Çp†—RŸVΦހ3S)Þjcq¿Ä Óiš¤Ô²ctè9³‰§¶Öüdƒ8VY!— €]—~ö+ìröôÐâçÑ_³] WiEkžnï€c:²ŸrÖ}ÂQÔcð+žë§OaOØiÊÕ ­^ÔLy,Q‹6ŸŸÒ:‡}­û;”åmaÀé¿ýð9$ýK.X.^ó.b%7K!Éñé…ghÞ©oŸ¦““ççM˜Xpî#5&€³ÆavÞk¯0u<B)*ôÄ9Bâ,åg ;‹TVIHáF;1 ’âq2DX–:[" G^‚C! ¯7èFPÕú‘Ǻ„SÛöPEn•B[‰¬g´¢ÜœüÁ¡­I¸<™½§3¿ÆÑ·ÞÔ?à[ð/¼Yü‰kÁ‹$òÙäÔÐü{Ìè(7ßÜ×DRßžˆžÚu¾/¸5èÄ0;©Á‡³] ˜q|ÅCýc;šç!>½Ò~îô¥÷&¸h-1qì²SgîüTB€;QHÎ\i;zâTÑ\7­'¦Û¿ ¯í„îÞ¶àOBø'ÞŸÇÿuâé?xs"âï›ð‡Û]x‰¼FˆŠ Ñb°-˱®²®²¯²ò kw²Ù²3¸ êsûQëûQ‹p<ä"LqqXˆêhæ“é¶GúÞVC²Z m…–JãÃë/ØXÚ¤_&8‘½·þ~ž •K2ûP'¬¿2å<ª•ÔØÈ3Ž˜Ëöì§Fá8‚§Ô2•!!_,*•ŽýN„aŸµp;…Åd»õ¿4 ’FÚ…»"w8‚ªÍÜ«W*eÑÚ4Ú¾¦©SVÍäóO^f\„½óè™kà°c^`Õ`5ã½]%„©É¢ ¸öÊ„nä`Šu!("×5–ï¦7#çÚÈmM+iöx×?úèî® Sjt£Að8ÏJ]þÚ€ûÚÁëû¯zân CÊ T¡ÒèU4ë`‡ U ²R§Òù´~]BY§H( ú0ǧ÷i€DíJÝr3æÀØýc*ÜÊZ¹ëÞÆá»½˜Ì¦Vv@?OfÒ*‚”ûdÈ?Çîà<ªÕ©•¹“ƒ›úc&á®r“†»X'ЕÀIÃÿÀë¼þPPä.í.­æ.½÷¤CBø9*¬wáðÙ ìKiy`nÉœ·7ªôR²0ÂXþIøì/à&ùaÅùk+Ê6Õ—ì ©s^€¬‹|8J©Gá ^ !×+5 ÝØÅc–Ž]‚R‡¤©Ì/}^—þå̯§~;éwû< B*ÒÝïä3§ûŸV¹T^9 ¥J¹L×µÌõ%×–^[Ðù5Aý½§ÌÞŽ¢\LîÌLõ¿=“§¬‘)Õ:¶ûË3Ø\À>KJ 'Ûÿ;>6Écôk¹H‚a©KUËÌð y!¯ŸyÑëô¹A +}S 0›èusé—ÕW3¡8õ¼úÁ0‚øÊÛÈ5ÐV„¤/j#ÿ6ȶG­ÐÎT'VXVÔƒ`Ý9_2sEÁB´JA‰S1‘a<v´2[ ç±{Û›wÖï®ÇÈߨÜè(Út'øŸõ;'Þ‘r÷÷tïØmˆº c+j´* â¨6¨ùyü¶`³KÍgg±‰ ­Rd(ÞªÐûgÎîÙJw\iý|`¿™ði¶ÛM…W£‚©y¨²îöŠìãL(ƒwx%Ñá’(ÊG­"¥X&õ«Cz¦I^¯¬—5júÔšJY¥²\ž#Ó+Õ@Šž&Ž|µ(»-ÑÖpKÈÍrdߊ³mTIjA@mpÇÃtÛj¶ƒm-âVi‹ø^CÄüÑPQ›‹)Vú+Cµ}”¨­Õû›‚9!—ß Â¨Ñ¢¢»V"Þ(-ƒmVt2H„uq¾X¡Ð‚{šE‰ änw8uÔíBN÷à „éÍpwûYø 6e(ƒÀ)¥©æôœ)Q¡Ä´‘ù×j¦u)üf?èÇàÔú´ÆÄœ ÕUú„÷rƒÀyQ¾ƒÂ'OfB9ü‚ÇŽÂ%f… HlÊ·,de_Ã¥^Y4âDh8 ¾HAÈâÙü9 ¢ðI,$û‰‡Õ!±D©Üÿ †nBFw~Ê„[í¹ñéþ²Ão[˜5\g&ømFÆ…»8¤¶þ×Ó+Ò§WÜ~”÷¯gÙW¡Š{ ·žŸ0"%Õœ~›‡<|ìXœò§_ÂI„>:¾Ø§Šß'§9éÑ·f@3OaÒ)P¶‘‹JÐp+žðù"]{ïÆ©7Òo@› æ €Ñ0û1úîÕs„·Ãœc¦§&£.˜ôb½|›ÿ »x»@Q*/Q””jJ»Ô¯ÓVê]ÛíL;P–Ïæ¶É>ñ1;¾H»qØ/Ð'ØÀGóaûLPkÑ-âçÞìU^€ç¼÷‹oj­ßÐýEµ…ƃÉê7ÖÌo,Ze&kðÑ`%즼^ƒêª&øÐ{¨Û™è@F”÷íИñîŒJðUjîWÍø}+Mm-Á\¸’ËV4ªðV2ÃRux\V«¤ZZ¼®´h•¦R_álª§ÒoòÒ…ïp4[ëÈGéî™Þ‰=»aØÿlytendstream endobj 162 0 obj << /Filter /FlateDecode /Length 383 >> stream xœ]’ÁnÂ@ DïùŠüABصAB¾Ð ‡VUÛ›M•I¡ßñ=ô0+=°½3WÇÓËiÖ²z_¦ô™×²ÆnÉ×鶤\žó÷0›¦ì†´>ˆgº´sQ_ÛùëgÎ% rç·ö’«&ÞÚÜ›ÒÔåëܦ¼´ãw.um‡¾·"Ý¿¿¶rï8÷Ò¦3ª®q³QÀ ÜnŒªkœÀÆ(`ã¸5 ¸u F0 /°7xo@a`qðâ€ÂÀâÀb1 (ŽÉ(`r„Á@“ÁMFŒ‰}TĘÈQÑG îÞ+~¯À¯Ð³¸gAŸ°W¼WÐ'ìöF£€ÑŽ„®Ä]‰TÇQÀãÞ(àÞñlðìˆ4ÂDâ‰i„‰Ä ž\øìâÏ.½Q@|Ńâsºê'i”‰Ô)Ò(©'R¤Q&RO¤H£L¤žHáWéYݳ¯ҳºgm¶Üžçšø"ùJ>7°L·eÉãʽå^ú:cþ[íy𽫄Š_øÃÄÁendstream endobj 163 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4326 >> stream xœX tSÕº>iÚsCóšŠ&ÌÈŒ ÊÃ'ƒÐBe¬-ЖRÒ‰–Ò¹iC§¤iæìÌéDé\:І@–ePŠ¢ " WeÒ+÷á}(êæ-ßE½×wßZoeX+9geÿûÿ¾ÿû¾åíEq8œ¡Ao†¿²`þ|χÉîñ^î¹GýR6âƒFsÑhoû‹<•dƒéÏ5–òâp„sV¥§fäH²&lHOÈJ›ðfzJü„À½’„´ø„ø?]¤(jêÊ´ôU«³‚²%91{c×çÅmÈOØ•˜´;4lOJê‚…‹æÌ}e£æ•ß©y‹l¶’PspL ïZIU£„¦¨D•òؤæì¡n·Ô7 vöô¶Ö¨ñžè\¡ºX-×ËYÞ-xcŽÆgÌÐ62“mž'‡ü‚º¸=CËødK÷'ŠÝŒ @Û)æzG³/̲‘낃b=g»l“0ÚÀ—þ]CÇVÖí²ÆXcY 2dœ(u§,×½ò !\÷e(:ÙŸ×á„ùËUKÒÐl„'B|õ»»jeû÷¢,ž«cóé´±Aò¹¶ÒX—‹X-Ó*‹ÕF5ÒŠf2HkP# ËKA%¦’R=k¡K‘Ýj·ÖìXÌV« ±FòMMa¹§½¤©I{`üÍÛ€¾¶étÐ~ÑÛ•˜1¡‹ìû7,]ƒÇ$kË…EVm¹ÐAóœûÍö Ñ™deyÕeÚÒ⼂\T,T¢¼2™YjÍ1“jxN•Ú Ov×ï™hÆ™ì‚P©ßÀ»À÷˜?ï¼>4–ÿzö(Lfx_ü×Ò¯ öÆÑ>Ýoت„b1  ‹(8.„f˜Êðú\x*„2Ãt×8Ýì)p‡U‰ûT“™:T«³ç²¸„Ai¦ÔªŒƒáŸªO£3è£úÆ Æ¥XÙŸCgKÌm"÷(½.úwì^8Z)çÑ#(‡1ÜN¢b¦9Œ¥ˆm¬Ó¤ŠžüÌôûÏ ÄµJ>"Glé+˜/M¾‡Ÿ‹Ú»g‡úž Ô#_«‹OOÎNK(Ü…Ø+:‰@Éë¡«ÝD €.Ÿ°B”É ¤4(=ýi:ëQ.ŽG•†5Éý.Ó>bºdB v2ÑŸ€§å’vâíÎd˜âÞáòkzìÏënòôõ Sê6îKí{Ù5¥†IoÔ"«'íÔ¨têÂdANDòÆõºb¤0)̬‰©·—×£–—Ñ™Y›¶+?-c뙤àý°· ÏÀˆ³ÀÀÈêUõuOã–Š™*tPo*au ’ïMVH¥;¥béNEN‰Dž‹yÐ&8㩎óè–g¾ü»9ÝàË…_e2¥ å¢=õ¹jÖF:Ðìü3ÇnÜT´+R“„‚PHÍÆÞ˜¾íK?DŸ ÏZ;>4Û¨”-'z!TÒRdÌW˳vÆen%nê+æë«?ÿW)ásj64iH½g~õ¬0ý„ûï0Š{t¸ŸŸ3ùqúÝÂ=Ù£““MÕ"£ûC“Ò¢2«ªv„']ÄÓÞÇ“ú±—#ßšß„ꌶZûþS@ úÁ«¸ä½ï'£¦¬ÖVk|òƒ€¸ÆW®‰î?»p”Jua±DŒç¯Äs‚ñé±0ðÛ ~[€WÜ(PW)ùOÍ‚tÊØååä¼Kà+Æ+|\´©Çâjmmm9bîAì·´R®—È·¯DG¾$P«T ¤`‹*´¥B÷ót5DŽh£4ÉÉ)iѺÄ.¦[M]Íý—ú?8qéôÕfuFV(mrádw‚NêÊqááS¶;†ÙÞÐ@ØŽc™„ƒ;ª‰«a?›£òqø#¼è키hÑÖ˜Ì5h-‹'ƒ÷B˜vý«Î…&Xز›,Ç É“ Óc#r#I@Û^—åÊéÑ÷!{Ðû‹¶»÷?u®[o‹P‘©ÈĶöa}&%;ýÀço3ÝÖŸw|4+ÅŸá_žzö"øhEÌû‹*CÕþ*¿¼e· vaaQIÊey÷[rÚÛ[m _3+gyºP‘M<2“N“8ªÜ¿ÖN«ÔïÜcXùØzÓŸ÷FTðÛ%h+;ææüðÌ©ø8 nZf†e åHn”£$SFr¢2S]‰Ú•%eºFÕm“­¢oÕÞÝ…ÎÌfQ[åáRR¯DÊ€ìÚÂúÆêÚ†–ìæ 3 ‚‰(]ž–§Õk5‚„¡6³<¨J޳%Œ_…îJCX-³lb^ÈÚ9UŸ*„'ᾓ9nùàëOöm[VôöªÌ̲šaa¡^×ÀþCŠ'$A~åäGçq¯Ÿz=4<@rac=ñ¿'—þÈ…ÿï¿-ÏÙ`€¿ å‹\¨ ‹5j…'1FµYkód£T7Pm&‘ÈŠJ¥Ú´‹²At ]©ëhï®êGýF,½¹MU‚t{š9³2©:ýþIû_^müû×Á˜Ý'4Uû[JWØ,å"ˆ#'R¹LSRR"”djµZ¥h•Zstçæ²P*qǼæç6ÑÏAÀZÊ—b‘&ó)ËûΞÎ#gMÄÃŸÜÆ3ÅîÕ.<Ó}û}]Ó4Íq¤˜÷˜ö)}š#‡mH$%a˜`ì‰ÂÏð½ò;¾ž(\B@M›*O_<Ï&Sª²ô>b¸r˜”gÞ{OƒÍ]cWÛ`øZlýf~[Ì´jªb9Ä“}Ü;™Bœ,{ìÙ²Çþå²WÐ3IJ¸ÿ|ªÜ9ü" ðÛÉòÚïñýÞ³“åGAð* •„–7Ï}ûDyä!nzFË7˜ÿóR)ƒœúCymé'6Û#cWÅK6±ZqmMS Eá9I1Å2y6Ÿ¨‰¼þ8‰a¤[é✃Ø|.9Ì\àwttÖö!öÓ÷·Ì)˜\<~Âk æ&'† T6ǽ„¤ˆ=Ä!ÔbCø.”­ËDÙDÑ_¥Oî;~êðgç^@àÅ’¾¸ŒGÏǾј‰éÈ:t¤¥½«+³5Ñ$^X~Ô}§‹LÐDXJ‰ “üy?ÂL÷ 5Ël¥Žr[se»ý ºˆšâŒ;X%ƒ6å.][˜¤ÞY†ÒY™]_!öÁ“ðíù[ïs* ?î3ìkÈ…)RÎ×0… Óa*ß#(‘ý×Úñ´>÷”EàÃýÖ}—ÈÑ)ä²®dŸ4`‘ØàÂ/¹?fÅhC6¶R[^ TÑr¤R”h6Ç.WzfÕËúoÿd\$»Æÿf]?<úç"\'Îÿ¿]ìæŸ\¬H¦(B9,ï‡äYή†¶¶³[Ú"bbTù BN­!½0+Jsæ¾K*š‰ðøœ”YZ"ÞÀÂ¥|¼û{š÷—‡_µ}ÖT§Q7 såŠ|¸ìú|’à=QÄ]áHnùòÑ#(êeˆKÎendstream endobj 164 0 obj << /Filter /FlateDecode /Length 296 >> stream xœ]’1nÃ0 EwB7°¢&b\Ò%C‹¢íd™> stream xœ]—ytTUžÇ+Ä~umÁœ+óž[ÛØj·ë8:Ú J`€„°…$d_*©}_^½ª÷~U¯êÕšZRIURI‰ $ˆP°sЖuÇîÏŒ=ÓÎiÏ+¦<ÃÜœéñÔ_UïS÷þî÷ûù~o‘äŽ9’¢¢¢;ß,«XñôSO=UøòPnÁœÜÂbÈï¼þÅõM?»‹áî; ç½pŸøÊ|qÞ½ÿ1O2§¨èþG_ëhëT)ë»XÓQWßÕþÀ eMkSí~•H$ÓÞ±¬«[©®ÑîZ­Ó×Õ74½ÙÒÚö¤D²N²^ò¨äMI…d£d³d©¤JòšäIÉIµd™¤Lò†d¥ä9ÉjÉÉZÉ\Éýx©äxÑ3sÊæ|X\Vü_wôü¤šx„¸,åÑËè»;?ýéλž½ëÊÝ#÷øçλ÷±{·Þ(ªŸ”̽1ç™Ä–Î=‘.Êm;],ˆ7d¶=ÖÃoÂ+ Þ¼‰s±.`C`Bña8G&AÐyMÈ!…fk³£e[~•œVXš¡™Cæäa -ÇDEïG§ÿð¹§ÔG¼ÜÝYÖºX¹‚²+¬]ЉªßjßÿñÄÕ£ ÒãóúÁ¼Òísè˜&ƒ‹´4(6l¤uÇb>AŠö8™<[Èøa]Ø>QQ±²½ª“œ{£è¿¿¿½vqÎ%1z©XüÏÜZYþïŒä| zyú¹¿B\ýNü9ˆ2ôÙ¶«?#?É—ÊÜõtƒ­A³¤öñÇ hæØ*"õî ŒFg3gìïJŒâ‰£}kk]œÜÕÐ/$žQ«Zü—QqlÔ£¹ïúŠÃ÷—L‹ŸåYìlòLâlÏûr_Êߛᑸˆ8}ž~¢Ó©ÅHíw0,ø#$Ÿ ÷+¯‡•ŒEì¼,™fνàûÂbw™œ>g€£Fˆ¾AN¯¶*•—~¿Ãº]èüG(µáW‹Mp©œš(Qúο þÉp:s<}b`–÷óÂÍ©<­u6뤪ª»bG3*™®ß¼ Ö.XDXmžpÐò©³¢Ÿ²‘8”~FìÝï®UZ5 ¥A&Øã„øÂQzx|¥¼4àôѾåíͤZׯ¶ÂGõK"êÜR,Ÿu¹Çd*ƨÒG é=}CÙ÷VG[65+¶×‘Ž­#5°4Ùk°¦˜‚¦¬ÐÇÅã>2:ÿh?øÀÏùÜ‚S.8ûb%$ü½Ð‹‚Ræ@÷xmb{o™.ø¿½ï}ƒqC¨YÅêô Dƨ%9œÊž.ÏV½ôëuÏäåâªKð¹tY]ùîªØkA‚«)ü§|T¬Ô‰›sȘv{;ݱ+ÿ¢Ü©vªY5b¤Û‡·Í^y÷Ú’÷y7φœ^'4€%ÿг€”L*E¤ž”¯××{^ŒÉù0${}H†â Ñí3Ó44µšî‹ì í É=>Ôg ššœ- 'Ùš¿‡uxl&(eÀéqZE˜m.å’zA.póÌÎ}õÜž:]œÛˆ=£f»5 mÜ4G´‚T bTNµò…ºüý¿´Õxhââè¥Ì9*˜¤! oi&· ˆ&–@íæJ¼G‡×íã² ì™>}P"`1¶[lí”ÓàÔ; ê2¹SÉ(  ½9¼æ2Dˆw…ˆkʾ´/ ”¥§›ÿÊÔ™"Qr¡Xüö”,±³¿ó \„ôW‘#_`ÐᥬÍÎ’Öú—wÒÑ}q*$…´Ðïï?%:ñ}O¥¬aƒÒ¬Ó¸IÕ¨z¸‚×ów;•PkúW^T„Ýa.ïÀ±±ô$:.&eœÕeuÛóÈÝfÖà´Ðz“Áb2åz}§¦P½îàaÊ;š MÄO&Nx7™Ê¤²L‹ƒ™µ­ENÞ🲲~¡7 ¨×W‘휮hÇaÝä¡ìžÉ,Ý5­8—áƒÓg¼ß ¨àZµ»ÑH“º]u•U€ZŒ{G±,ø™é™‰ÎÄfã_œ !?‘€¤+jSÑz†¯!j¼i¹w·$w×Ô·×ëHÃvj-¶\å]«Y'¶[ÎL²‘¨ŸLÌìÿð ½½M:—Îm ¬[ÌÕ–jÓFÝo€ ‚%ø×J¬n鬨q±X4åJe|¯€µzR4É1Ç<àEFp8ì,8Èw;ÛÁšX£Ô|×¥!hA¬”5ºŒ.Ó’üQ¹¹ÊT›QÙ[‹¯Šóÿ,¾*οÐðå »vw··“âÓ‹eîFG³½I¿Bµ¤¼Æb7Ò@Û,³STLê O&.ígÒC±˜<™Lz»gËnhŽ.0º{>fôõ¢žK>Í×Ë· ?EÖ&Ó˜œ×Jk'£Êßó½Jn©Ö2M€„õVCT®‚èÇ<Åçv(9“<äGú!†pFÑVar’ ùâå‹ó%€ž'ì6O$Hx£”8?çäã|Rˆ•F3ãü q"õ&)¯'ïôl€r÷P€•ô{\.³ çæ “þC§$¿3ûwlÊëC2íßWççýp\öØÇL§ãޏާ¢ÎÔÚV>Ó>óÍ•«WÇ©Ød`*¼ÿ²øžÜÄÔ¡¸%bTØUj'¹&Jƈd–3Pn)8ÜvÖQ›NΨ1ÔȰîIô ÜQgQww` jÌ –”(4Ñ÷é´8ÿSþ&~Ö·P·]›;ÒâKNsŸH\™ÉûK>¿-´£…¦Eù¹-žX$ιüû©óS$_Hx tØnwZi¹½jCí2@嚇„Ïöúxêkñg!ñNªä›£[F+É’Ï] çZ8¿^VòÍåäì?_05ÐÖúpþqnùíU`E‹«Ò¹  ³üGeúUÌÒm›Õ]­ê&9ˆnèö)|°)¡'}(eìQ“œº eãEv„ sê!ÎgFÇö¦Cû¾Î¡‚ÚCâ–€ ºÁ¢wkÕ“}»a7ÔiÚÕK6l{žF¯î埦'FFÈ߈kvYÜææ|)†¢iW‡Ù¦sè±Ä.Ó‘‰©Èþð>*v2uæ·ñt<ÈKL³ƒÐr[TxKÏDo«§YÓuM܆'ºÂÞükc%˰ÎB© º‚£n\”?'c!›Vië°+)Ý]µn‹¥Yß ¨¶¿ö .c_ÄÜlÀ¾[ ˜n `ýi¤c=Ùp64@õIî=Mî…q4­œª ¯ˆÍ2¥[«ÁÀ×ÅŒ©Áô Yr1eŠkÉ’k,^ç‚[^½çúÓÃEâÏgd^"³ùªkV\ø­øx׬êhy–2 FÞä[•’oÕ÷à ä“þóg×®}r`ýK°sÒÝÆ¹8\ÒJÝR-°HùÈ!q­Ü{+X½RÁécÌm[ÕZRóþª6l~ÞCùyyie_å *îH¸Ìü„~T «Ð/_\ôìŽ-‰‰R“¥÷⢕ õC† Ð^ç ggí›óõr\ Ýù]¾Ôäl4Hþ8ƒ6ŠzÙ´ý´ÊÁÀY=²J·g6ž¸’þ×™ÒsKÆA'Oë¼vMZk ¯Wã.ãJq –ÂpxÂÅNUf«£VÞÂ[!ùƒÙCGÿò1`­¹‚ö.—J‡ï)†h¡ÄfG&;šò’ ùE-¯’ñðXpüñ¤Üã+”bÄK¹Óë€&h ›XsÁØdÂÉLúˆÞ^ll—”ÕÒZ—.¿ðû)9džŠt=öøˆ0Œ/J‘ÑÃÑ#=‡KçÞÜ0ã&}}£¦èë“ÅâÔõU²rç^‡ƒgÂd‘æô:[£¡^ÈÿÞ¼YµªK=5Ã; ïøûg¢û„~¹/*$!RXV€æiÐt¹ ¶ÝŠ@fK,óvòPä%Þ•SxÁRXÜ·±¸‹ŸýpLˆßÉ8‚ý fb Q+[Õ΋;Œ1© «`¨Ÿ·Vr.7}³Ç»B!Ÿ_𒱃ѯÞÅ=žç|,îñÿëñš²üèí 1[|Ä?8Àw‹®ß¾ÿå—P:lè¬1×ÙvQÕùM·Þ+õð˜‡ß¿w$0ðŽX&÷÷ñé`*3{ìwnþcî`#t ›Ô¶R»N½ÆÖnjƒV|3õÕ¶MÁy˜º”<åõûð6QájRG“–ZÅÚê‚ëÍ&ÂE±ô!åO ½}ÇúŽÆ…!È¢ó;÷­/}÷æî‚ñ=Y\¼èÏ¢Kfw1Ýœ9|Î()6i~2H±aÚãÀêo$pšC!ÍÉŸGß}§Dò?l7Çendstream endobj 166 0 obj << /Filter /FlateDecode /Length 574 >> stream xœ]”=nÛ@…{‚7¥ 0¶± ‚$ È¥¡ÂAËEnŸ÷žì)†À“HÌ÷ ‡»zy~Y.·nÿc»Ž¿Ú­›/Ë´µ÷ëÇ6¶îÜ^/Ëîpì¦ËxûLºŽoúÛ?}ÖßÖÖá†6ßó÷á­í'ýt¸?4^§ö¾cÛ†åµíû¾>Îsݵeú﯈ûçùóÖ㱪úWÄRUˆ…Ñ« Ѫ ññTUˆ'Æ¡ªÆsU!žǪB§ªBœ[U!6ƹª¡ñXàÃê{\U…x`~‘B¡B~‘B¡B~‘B¡B‰ªB Ƭ*Äd~‘B¡Bos!sos!sos!³ÈDe¤2™¨ŒTfU…hŒ 2Q© D&*#•ÈDe¤2ŒÜ4vãØ D&*#•Á„aÄpôtõuöuLÂ5 ç4. '†Á…áÄp4q5r6r¼=×t¾A‡ºKß©ïèéêëìPéõ!Œ F!„Ä „0‚„F#0‰Ð4‚ÓL"4à4€!È d`CK\Âoˆ9ȘShVÁYðC A…~H!¤€ m]pë—ÚºäÖ%äR‚IÁ„\J0)˜K &r)Á¤`B.%˜LÈ¥“‚ ¹”`R0!—L &äR‚IÁÄz¦V4¹¢ ×”oÒ7!—Ä•ßÿׇΣ€‡Ê×ÒÛÖ–›N,> stream xœÅz tTUºnÅ"(eA…è9(Îà,‚Ê È,3È „©*5Ïó°kžç:U©„„0„I‰ˆ ¶Pà¨8+Ý­Hß¶[Ÿm÷.úpß}ûT´o¿û^ßµÞZo Ú§öÞÿþÿoøO•pÝÄ)))¼lö²åLš4‰ýÏØBåM…;JÅŒ¬ð?¯-(ƒCKáÐA¾;îýýôÞmhîpTv+ç&<ìÑ™M Í¢Ö­‚1 ›ª· Ç,—5o•êØ¿,Û±©±ukõÁápfLo\=£©ù…–Y‚ÙÂ[çˆÄ’MÒÍ d[äÕ‹¶n«©]Z·lyýŠ ª{âɧîû̸©Ï>wï}÷¯}àÁõmxxâ$Å#ÎÝœ—8“9c9‹9K8ã8K9ã9Ë88Ë9÷rVpîã¬ä¬â¬æÌà¼Ì™Éyˆ³†óg-gg"g6çEÎÎ\ÎcœyœÇ9Oppžä,ä,âp98·sFrFqøœRÎ N%pîäŠ3˜3„s3g*g(çÎsœaœç9Ã9·rnã¬ãŒàÌÄã”sò%â›”7]/]]ú—A/ú¶¬ <Ε[‰çˆ ƒ÷yyÈw7û†N½åÁ[Þ6uØ_†¯~ùVÇmOÞÖ3b w>÷úí¿ç GR#»F©øSø® çèQ£wW.­üæïewêHù ueÌ’1Çîšt—û®ëw¿;vòØ#cÿ~OÍ=¹qÔ¸ã2㇌§'PÿqÓЯ9Ê?ùÍbôL¾p—dúùýµïâAÏžåíûÎŒ8ËÐ8à[&b¹N,‰é2ÔWêõÙ}•ÙH$•ÔDD÷Ñh7UšÁÃÖC÷ˆñó†lÝÖ]ön{Ú–­‡„Ô@½T/\Ë ÞÀ YÇ ‘®¯•¯5ë J-Ðâ°BB¤P§±Ê‰–”)AöOã­¤ašâ¡å~ y Mæý u‡ö–1Õå‘»Ê6€ÞË— E¡‚çð8<ÐcKªRH,Fhp !VèZ´%A«™|Ù2 Qi¤Òˆ6K¡i ‰$ãÚ¨„Z Ü=þoOà8?úé›^†„ ìL«·K,b‹„1•¡IhV¬R•Z& kÚ(ôÈÄ™¤*,£Ö'í¡]t á{:½]°‹ð–ÛÞ^»w\7H77¿„»p4ïŒàÒheá(Ï-óË|òäêôªäj§Ùe‚fÂZn33žiâ›t&•­@¢ÐŠKœúp«<^›Tœ R¥¤ áhù»Ìò2+4RŠ[%‹ªi’Kû|oå¿\8œJiB­”ØüV|)ñ•¯(+^‘½Á·z¬~è%œåŸÏëòþ„æÇ3ÁÎ$$¼Àñ\HÇ"t®Õ¥¢\Éä0UJåZ1ž^×%Ixˆ¹gº§, 2‘pŠâzØÜÀ³sãA Ðà‡qÈoœ™86äG]Øxq÷G4‡â™ëª²ZЪÔJä!ušB)ðÇÀ•ÏáO0¿éÄ‚(Á½z¾ýÜñÏG_ZúÖý$£bµF"ŽÒTüºŠ×ƒÄeh$àþø›ÞÚ…S§¿<šŽòv£ eh8à^ÝR1ÿù)ëçôÏoΣó%ß^D;/”¢ x O=Ž™Û‰ÇO<ˆn&ÃñÂ?¡›!â—_þNþĬâ'/s/$ž¯ûèìïÏÿù{Ê Þ6\C‹Ý -ý'Ì´äÑÄ<ú0_RXw±) wò Ëá„."¡‹ÈÉ U¡ S¦4ÎÇÇ®wü×ù¸Uï®Ù+?+Ðè¿¢Qˆ{eÉU†³f“¬e+…Z™x†GçÜNô泯SžrgÖ›ue\÷„³ÇO¿òHœß»`‰ÙŽ ìFØ *¼¬í”¢iýñn"…N$NRZЂQ-zòÊ6TÁ PŒªøª$©Kùüè¥t8Ò_Te¨\ÑåÆÀ9ðÞmLéS šEõº:øOøðbiaäë¼:fkÙK@¬ÒJ¤Qv—ó@*K$5! µ 8㮄+þuÁÉwxÙê%²b·²ÞÖZg&·ï¯N­…sSÆÜÆŒzøìÃhÐñ×2Ý©=¨Šg®¶ÖˆêÖ­Ù¼ÓêÞ;ý‡/~øá]‹—[¡Õn¥~…nóhc~·½Ô¿é@ªVË(n³,¬n#¹=Å)ŒíØé¦Cïð>‡úZ5šÕÛM¤UlÙÄ“=HÄ&š]oÕA=ÑŠÁªÛA']døgØá {JÁhPZ¶Û 6F f(>ÌeȬb£ŠŠ·u¹hÚMN:‚® V¤”1yýN °é”PI´&M)òWòËϲ±ý;ÏòÄ`„høA2·)Ææ"}^Ä·GPU¬EXÉÖ¿‡µŸ-·*·Ç+;ioGRƸí1pm>\;½;=;=]î]¡»?ûäìï!јRÒ¹4#]åÇæúZæÍ«ß°áÆjÐvö«­èæ: Ѩä¸dq = Ú¢áFD µìEÛËÐh6TZ*aÚÛ;Úݤ§Ë×åêÂh“R…Dµ–íB’Ý7þDË}8@wßP”&ªyš£ûåG ÛòH}v7&ìáíi8¢ÿ†ü)£Ok’TMï"ÿJÿv/¿Æ»Ì÷o¡2ß®]Ç­-v+´ÜÝ:·¸3µyHÿ gßÄiMH&µÊ †´…K»äN¹C‰ _ÌÜ4M2¿©¥v›Ž)a1{œzýšÍéc’»£Ýè ë*ë¡Md•ª§òíz‹êy\Ëúéd°õº³è‘<:ÂR—Fq¯nïáI½òŽlØ»ÇMÒh¸3æŠ9c1‰3,RL 0A£Ãˆ™U¥’¶²X²Èg­~|$žªGœOQéQtÇO§©dˆöÑ0éÅÞµ„¥šì&h*2ìGí"3ˆßÇ™ÜOŠR¤Ü¡Ð“Ü«] [ãU•̬ñŒ–¹)}¹¡^uíKîn'~‰ùó¨*†c&kFuÀSÏÔÎÐÌ´i¬»†Ð–O?µíÂgß¿qç5%÷(àFß*ÜžY¼¶²¡>Ô÷$1*üAôƒà‡z$3:$¦˜g€'ãʺ³¿ËDŽH‹A!´’šVƒÝ¦‡OÎsYT“ùu­bi…e×4«??ëS ®vi\CTû×€ÛìóÙý•™Dˆ¦åAüzÁ`7V‰¤lcÄÿÍY\ì. ŠÛ™R…1ûõÙŸ–cŸ(©§ûPt;³¡Œ©,9ýk7 „|yåû¥Ã#8àš¹º9Ú9±¶“/)OÝMï'eqm’ä..Öýû À EÊÚþBÇä;® )eX‚ÂÄN:ÈÅ&"¢JQGv‰a‰n‰©Ú²½±±±¥F½òµ°ÇÇâ"Õ‹:ú“†–ùdäSE*Zß:âÂâ*O^À«dž`>\†>,Û ²± VÔ€…Áa¬lUj°„8Ñš6ÅÈ4ˆA«Ú©>žx,ö˜wuË›ðŒœNí;ôuïW/ûã|ºƒ/Äk’x¨(€Ôa!É=adOa>³"Ÿ ¬&). Æ`uûiˆJJÑíxApŒ{ CÈËw$-©Ž¤c‚ŒìLl{ը¡€õ‹*ùäLÉ%,I…!¼Œ3íÎÂÃð]z½Ãì0C3ÔÙtv-Á c661•ÓϯÿôÔ‘HÛq*ÙÐk߃£s)˜Žê£JÕTµSíÄ¢4Êҡ|WÒ“‚IxXx¨–ÞÚ^åÞ‰åYž£\Â´Š†Ä®ØÎ…c&ñtÕµ/·@¢iYôÈ›_掇¨}¾ýwœÝÄRR_*.8‹Uù°÷™a}˜TÁb’(aLRh Yt»Ø-£Z€j;µqeNѦÌ飯˜6L8ÊÃ@8Ú³'ÿE$vƒ&&| ¬v,–Õ=XÎêÄ"ö@ý`Ò„—~ðõhE:N㡸¥$·Ê„‘­ò. •…¢„>ÝW6>€«[ó-q|ÅZùN«K5¨o•j5Ûkf=!‚Äv Z›duç’ßïðUþR‰NU,PnœÌ‡yú%+#LU†Mƪù×oåc}ÅŠ?AÚïvå2N²O´ÿ,Ú|¯Žú_óìéÇ\ÕFTâa ›D”¥\E/‰.0gvù×)ï¿&슮‚=µý¨÷2õÉyX•°…[þIãd”~YƒEÙЯqDc,”½2ÑPšVDóB‘áÅqýÔ2 ÿ# õýð®*om°Î_»™Ä÷ =ü-â;,8k-6…L “´¬k\¼Ðî ¸C0Dp«²’¬F¹CPÕÝ|ðçèæO‘o£[Ï ‘ï£aî +Š ­³«+#ñ*©1¬åÀ¹U)R©ñ–hqTŸ%Û¦¦ãb4¨ ½T¶»G=Iõ/¶c cüÆËNÿƒìÿë…ÒÂó…¡¼þƒ&emݱŽ]¤ùôÎ*HÔæv-3ˆI0¥—§"Î… ]gö‘¿;€©(-õÉ4[¥BR$o24AâÅ—Ž}ú—ï®ü)MÛìé_ Ι³%h. rرD¦¨÷äÝ“Ç=©Òê  ¡,wêýÆ€(ŒÐp%‚§±vâô&²¯¢Î´m“•dÒFCÿ€†ü1KE`;‘RùT$ówÐ ufãÃ:¾ÝŒÝ‡™À%’ØðŸöÞPEËOõeíˆóXW)°z­™y¢¡«:±V0·B¦„¹ÙÄ 'ôåpFû‹—¶¥õYK‡éý]ö>©`)çJ官)Vy3‰?‹yXJm}’äãÂz¾³˜E¶´Ò'ƒÄ –šE%èfhžñÙøˆFBb^ý…Sß^úùOûz$ÕÊ¥wcé mv´ÿ”¿ þµ‹ºö;¬]‹=‡L2ˆÏ˜N+ƒbR>/;™){™gðW…[àW`x ƒ]Ðtç>.¬ãc_‘p&ˆn@'|m˜wp"ÅŒ`‚ëú¨©×e6`íUÕ÷¶¢!Bt³ ±d¬™j,¤ú}™¦­oìºã âæK¾¹€ºÎ—¢ÇÐrê¾>Bêqï:ôÙŽ?1£>s3¾*˜²+SPéûïvœaMšâ”Ýeõ`¿ì(w÷h‡K’P'!‘MÑ™[# µÛtò&Êb³Z-°áh‚z7ÁýYî—¹¥B xøé{¥‰ON9-N ’РRš4F…EePê‘z—슩Ÿ¾v™÷6Rç7„aîݵ§;ž ·9røýÎ6›Z‹žiÉÚ%ó!QU·ç0Æcïéc§ÏAâÕîÚ-Ô¯ZP,=Üp˜ÿŸšLnœð®n¿A1ú°ÒEL2‘©ERýO«ÅèkãF ¿žŸ2Ï^´]Å=ðŸD3+|1qàƒÿº˜â~H È^LÎÐH±®í ð#Œ³ÿ¤n™Úª±©)sS·Õ*³Š%£«R+ß9þú›o´K\J‘Þ À<£Ô5+ ÁíÕZbaŠ{ 7ùê«£aÖ˜U§LAÆDüR>ûÆÇïŽþzÖi¦„dfNpi\K÷Uâ>f;ø-p ¾nìXáÖ9µPçÍß´T­°) â†L)–Ì\ Yê2:Ù_6Ì@¢S(qÁÄqÁ\éX(i£Õ`dw4¸:½žÎH‰)ñÀ퇱‚Ñè¬ Â\Ž%FÖÀK!Q2óV‰È& ¬åZœëÈÑ.ÒSÎ=ß.¦$÷²Õf·Vö[½|Á“„û2½WЏ=¼×¶SÆRƒJ/£òs[ÞZœ¡&w1\ ügàü…CçàØ»Ã;“–·$ÍÉÝŽ¶m4ѯÀÊ R¨m’Èšîu±µ¸Ÿ!f3ƒ¦_ÿ•˜º$ý“>AØÊ­ÏM_> ¦·}äw ŸÊI=ò:Ø"¶òqvE5’ˆ6ÑîH{oøü…&.*bòbœGؼXÈö!%’¸.C¡… $ûÍ~±-4’õï­PLpO°z¨Óʺ1û;ƒÎ€#Xá*k1?_Ÿ¡Ia•hgñíFÖý7´ûãmN|ƒ+éN:“ÿpŠCY­¢Ç±CFëû“tq±©$‰°‹ydÅ2‘R«Aôó+—®2Ë„lãZ$¤Í Ý ÚÃX2ºÊ3­n5)2¨î6F›_m|UÐkcÇÍö?Ý^—ËÕ–â·¥sé¶T:Á÷y_ìç°§0´b‰”¡:¥ÑÔ`nlbžã뚟]·NØTÝŒÍܢƷOPrGÚ“v¥÷ -|ï^ÿ>÷¾ÿû;þºˆ.ösO_AùÔq+…~4 w: wÌè~¥ðIas?"9‘KI2“²ÕÞ bÓô+0ôÔòb¾Í,Làí@¹ Èdz±$†a4Zlnµ \Öf¨/YUÀ¶ÁéX$…ëqºhú¦Œ~É_H£ÁW ‘òH•«#¬g˜Zl‚Š_õOäaM†ú ±UIj­”ü?x‡Œ'ëÎЈË÷`Ó ; ®§W|lÉÆšÖÆFR½¿1ºÖÂ&e³ôFŽã|()4ai:é,QSOÙ³¸ÅÖ_÷“óׯbåú Êó‰tËuK%Ìp¾vÎÓæUì3‰V©ÀV—õbà7¡ùhÆÙæÉeÐüŒžßÅÀ•Ô†[I Øâší‡0 á“Ï¡ñ|gÔƒQâ/Ì}Îm±¡¥%g‹‘pÀþ¡e3«c̾MeUAÜ––´IˆrAγh]~DaáùµÙ&Ùt™w¬çÌø5qù¡ƒÏÏ™ šVOZtV-ÔÍ)e6—Nåœ$V¦˜¶`ÇþÞ.ûO¢'¯X£0)Œ*ªö…EšgMR{‹6Ò6KžÜO©^0ù‘yÏl[ãko¤Ì½úœ‘ˆ¦‰œ$ÞZ£ÝÞr*¯ÿ•£[n>¾ýÜŠ>$^害èq|Žs°‡o,px»°5ALLQuù"®ßáÆ1q[cš€Ì¬ä j‹¤‰™Â7m3Õ˜¶a%÷©r±ŽH†ìJôø÷Bâݽ+VLztêÄšYéýáx¦1†Ç[·iëëüqËGèVÄ¿ŠîülÕwÌ-›5Êm¿¬hÿ$•”\½ˆrعÎCßóö\~åÓßíÏÛ|m0sv¯†°—C½VåŽVšítÇ:;ήÜ=‹™Ç c–1*(+%P—FgônÞÝxXý üüÝ[û?Švøv¹vÑrøÝ¼&x5X×H6ÖÈÇðxƧg?ó(¦'>¤§Çá…lÝ=¾¿~㜪½«–Û®ýÞÊÇÙ¾ÚÌN0ã™Jf0³¸‰)v/eaEÚN8ÁèØímß‹^F<4݉GCÑLžrk\éccúæjè3x.í6¡±r“eQ0eŽSÜÝÐu¶õ'úbô–Ô™k7ñV™=ó-U–-æÍ–-vƒíÕÊÊÙérpg¢'ç§œ~§ßáÇD‘•&D$š6rµÙ³ß²ÑReÞˆobݲ€Š½É¹ß“ ¾88€m¾Ï7p_ßÜö¹+š”GSðÔ…%¼‰@„¹•mÛ{°ØöPy°™T6ˆµJ¥8¡k§ömµªZ·ÅfœirÈâ|€«ÚŸóµùsþN¾3àf[d?ýW#ÓÁ½‰/±˜þÓ%–=ª°˜zô¢I}~?Õ&öµRÛ€3쉸Ã^Ú›ñÑ®—µ-IyX%1*Å&R[c’š$fi…EcÆb`Êÿ{ã`x˜TØYQ9¨c— ¤0‡"ŠF”¢K…™¼í–¦XKÔ· »Ièö´·»ºñ!w·äêÉ%Ì[¼zKc#¬Ã¯·ì&½`—»½Ý½ âîæöúþ/_K°ê݆æEÞ\†æŒâ~Rp^›Ç“X ½‚á0Ëîb–ã_Ë7Mãë•J)”Ò¸"E:AÆáOhjÁßM¸ÀšüÑHÆ î‘”2.%¹ŸØ¬v[%÷ÓíàÍÓÕ<çë’KÏž;ðã™×$§‘håu?o¾®æ)üšøÒso÷üxú¨ü­d–)åÙµë•ØcŒßQiéˆSþrg7}ºò ”V\VŠYYðòlõº¡ÐbÅÄõâ´º­^¾%hc{)ÞrמÐᮘÓ/l€½-J<䯰/Ä-Žéjô^ÂÆ(îå'‚í˜!⪸\­Òµä‚Úª_ØòBuÅ´M£ÊhSTàzkéåäŸ-ÿbåg˂ư.‰D4Oʪ 5«wîÑ{å!iH¨¥ulþAÌàUè¶ÂDžÂjPCó9ëõ`A¢¿ƒtÊŸ‰i£r'uðþ ÕN˜úƪ 9‹åî­D 6îtLû‡Ñ¿ÞVaf)¹Q®UèXÁ_ºcÉŽeÛåz¶Ѝ2AºAØ =ˆx 9‡¿ñE}Î.C×4µ»ùSNN}sÊ OTCB®V*¤ }TO_aõ…õqm\ÓôO¹@|-#ÉR´gP­qÑêššœÏŒ†Ì|”ž4âHá8…c±„ʧ PϾS÷-ü3DÿÝ‹Fü®-ÆûB~#âê°Üjƒvkª._KSõÛ¢$lÁS¨lz TCeD2|:¿gœ™§`eÔiMP U!UœEÄŒ…¢4ỿ¡)Í"’åFT¶•?xŽàöøtz¯¦’{¾˜¯ýGUÖWj¯¢ë¼–„ Ñ’ÐúUa•ƒ°‰D*im3&´TNÙ&o×`ûТo’·¨w(ùR­Â€c‹GÆÂaŸ7HîK÷$÷ÒÖ:]©ÎÆ>æÃÑE÷»öw¹IÏ.w·gW»<'o—‡ QMÔŽ3;N¥³BŸ:‚}tß\?‰ÑÊ l «ÒªP=Å·÷‰YHìp¦vzÉ$º‰õŽp6+±6WGNèj4v¨¡ÆGÌ/JNgÐÆeÒDä-&‘ÀJJ™›l›Æ®©èo¾ £ž|º»ívò´Û7×ÖéTf¹Š¡2¬ŒÙˆ‹WþðÛýÂÞõ”lÝÑPÝH7ïsã tSÿ›`]¹6ŒÇ”üûØ2fPÚt ¨$¸UØ1ÅÙž±Ÿ­ÃñÁÈA¿Ï¿k/ô·ýˆ6¡‹ù'Ùp²s$b£€–ÊÀ*,*N¤Eã‘§ìƒÖË$–*NF 1…“b¢€;ͨw*MìCÊ2/¼$Æ,9¶¸Ð çx»Áb°l‘ó[4Íš­H×h„BBÐáhÀ$$%{Ò.§Û邸,ÜXe¹l´†ŸU·©³*¼1S§g¥$\1&`\íSû„a~]².¾=et™Ü†Þž÷6å64â_å™’Â~\´R«N¤WÞË<4žyÿœ}7_Ú i6è”r”c4T$3lž‘!V0ܳ¶‡7Y€}Œn;æ"n‚rƒ¯Ú™AóÌ8Ìæþ|xúìµ±ØÎ½‚Þ⡃'ƒoÑèV~øý/Ý'ØÇ]©H4Yü¬†Ì‚kjªÅVEØÊÜÁ×Ì}^³F Ú6œìx[H%Ìi-ÎC3,Šùñ¬˜Ç0EL@cLGhÚÛ™àŒ1ƒ-Ž™nx5OšÀwE]Q¶ÿ&¡…ô¯zÂ×J‹GÿQ¡•7ûú“e÷¡Š} [Õ’2wáüŒÅ±J´”ý<”ÇÃ1ì]Y &$e?X‚¯K…5O;šˆLl*õdÃÆêÚzB QK(k¹CÑÅ!{]{cb9¤4ôüÐÁÎÿ# Wendstream endobj 168 0 obj << /Filter /FlateDecode /Length 2966 >> stream xœYKÜÆÎyŽ9ä# q Í~°»éXbÀˆÈF,-rÙ j†3ËhH®HŽVk ÷üì|Õ>FÜX±}аÕÕõøê«Þ÷ë$fë„þ÷ÿîªU²>®Þ¯˜]ûvÕúÛ«Õ—¯¤Xgq¦¸Z_Vn[3.c“ðµNTÌx¶¾ªV×ÑŸ6Û”§q’ˆèß›^ýmÅXœ¥R¯¯^®®ž^GߌóÕøóë-ý–q–) 'qftj¢üãËòÝMtjާòݳ°BD]Ÿ·ýóÝMTŸ³éxyÜ0ÈãÜDUþœßln6V0“ òÿ8{,?~»aN¬Ÿd"úŸÎÅøÙ5¤W&3Á£ª¸¿ÍÃÒDEÍá0ÞU‰…»vçªÊÛ¨ìUúò•Лrk%+mþ³WÓÌ,NÚƒ ) ‚~È?–Õ¹ 62,ZœÊÛfâf?½èú²Êû²©ƒýXÜ÷M½}•ßÝvöfÒ3ÿ”UÙÙ]Ö i¤Ç—}ÑÚ©nøªèÏm=šm×ìù#öըʱÍ÷eQ÷a„E»SÓM<Ñ7ã⟋Ñ+ídüÆŠé›Ó$^6Çm¸~³w'F +#ÅÄzY°GÌ$i‚S£C[x]ïò6¯ Ü®»4xô³bÑ}µy²,:]o‡{¤GX1^îu¿à¢m›vrÍÑ–¡ø÷öÆÅ–‹Û~pˆ¿ìvñèê¼Ùò$Ëè6[ºS™A\nì…¥bvI–á;Õ¤(dXl‰T«r»ØîFhrCÊ–Š$È¥m‰æšr™Œ™ Ë£¯y±ej ЧOŸÚS(ÌaÅ‹”z]ëòñL1s/ M2Úï „>í‡s›M™BèÍ–=ÍJ&¿ÞŒSd¬$u>{Ï†Ùøñf”ûdü9Zð×§8 #½„9düÕ-"E2~å§cÓ–ým凌„Áê Wø‰Œù°árÒ ±w,BÒ­eTÖA†¦œÆð4Ÿýòz?žÕÔÅ(`·ÙR^jaD” LR%òyà ViÝô;zç7¦f ¬á€~¼ŒŽÚG“(©Et>õAEÚb ýŠ÷¤ÄJÅ£s~¢ ÜfaˆZ˜pˆ°‡tyuw*ÆÉªÈëð%Â……Ř›4…õh¸-ózWÄV‡ï®V?­\…l¯¨PN€}¦•g°‹ák¨‹9Csõ¯ óqq³ˆ 4¬£¥ZK©c™W ¯nËÎ_ñX|œXh¸º´™¸/*x»o Ђå¼é~"'j ø1Pa(’Œ`Ñ[ä¾&çQP5‡ ‚Ü=„݃?NeÑ¡ÈQÆ£ íY*•2¥¸!` užHÙèÂ÷˾r+Ò)ž*6 0™ÑváDÑ/¦½(ŸaÜФÑm×Ó3gœöx® k—,Kd4LÙûÛŸÆ×€C“F 66¥9le&ûA4lFÇÀÀ(¬ûAPå’æ9Û“I–ÎÌFýA^Sª[ÀT.?mݤZô1„ ¤J£b×7- Vb^޾¯§W(?)6ã–≀!pïÌ¢Mä`_Ôaž`2ëù¼óžOgqÀóJ?ü–Ž2HK¡ýâi˜*"¬}Ù y;5°wGr3¿-›ž6 ù°ïw´VAû,ú3ÀG‘ýžFÌ'ÅÂÉLÅrˆ¼k*Êóóæa¤( ‹Ÿ‘99¬±£ü‡‹9ûx=•µMûÉI 7F‡cˆR¸¤È}*Åa½ ‘‡ba™(CœšKª1E×YDLv}…ŒnÕYã ý\àh™`+¨,š8´M5‚ÑÝ Ójæ"×ûf–óö'óIoËæç$½Íy·fE<–Ôù¸5®OY‚"+$T0åa¨‡g[Ï,gªw–|ƒ(h›­åÊ] ØM)ñ”(W)Ëúx*üšÈ6Ãdâ險?u$´ÑÒA''#…È}ù¡ÜÛjéNLéf[çE‘1®¬çSìDæÌoÁ r°o3ˆM/ÀL(iIÔômùö†"¾d\ÙP„?Üi6Q#rZ.%±š”ˆëÈF ÌP÷õB g1X„ïšêΩJt¤¦Þ„¾#â)Ée@‘ïGçõã…ÀŠáدµDŸº&L·ºÜеÞ„RûÜCZO•](*µyÄ2§‡Å©…!£®RLEß¾xñ"|¡¬ý­wJaähŽçªþºØ ýáçqœER"Ñþl-@è(,Çá±øõ,Ç–'˜q"ðzz‡ËPšûxh¿/¨€Q±øÿ`á:ú™P&æP±tA+%ã‚´èOqMú£|u]º„w7HÈ› fD|g¢wMÛ»Þy*êc‹øÔ ¾?„S‘Ó½? r}HQÎ¥Ú‡”ÿp«´£öËŒºFë»·Í~`ÛšK㢟¦IÅ]&\Ñs.¤ImdïËîîäse {·Ú%ûRcfµ/JáQeÁKÈQIp(Á®˜ZM@^pÔ‚pÚ„«{Ủ8,HF§å°ˆÄ~Z@5x¬S=GDÂþ»vI¢ŠéajÊlNòiºêŒÛˆŽîo6Ë5õ†%Ùr,ëGKÜÿ°fsÆ*°ÊEÃÔ©4²xÎlTR÷ „e¬·0ÞƒÛ› øÂñn^rýܽ…XP´© ¤æì¤\÷WÖ»ÓyZ68ŠmAÖêÎôÈÚ²ãõ÷´A3Û£ó¸Êè¯ëòcñllc&½âðhfgßW^ä·ÇÝ¡m$šDèKìÜý¨¡÷”ºüb‡.Òñ¡Š_Á`wïãR+‘°Ë\ñ¤¿Á-¬¡’?ÎjÌ©´þ"ÄHJå¬QþLïbn »©~H¤ ñTk:’s—ïã¿Û¤ÈI ÁÎÄ”é\b¿ety;ஓ´Þ2|º;|)|q‚R¡a|F+¶$%õ‚¾ÔM?ú©*k'2öTb_!áwü¥#,eóÞ†+÷²/÷îƒø‰¶¨×/ê e¸Ø•‡+ iß²ùuƒIŸ-‚aªâ4IÑ¿V=vMw¥rÈò $ûäÍ—SM…’om§Ï£¯º†}qÈ‘‡Ï<—ÉÀ°l/=µç$î¥-> stream xœ]O1ƒ0 Üó ÿ ªê‚XèÂЪjûà8(NÂÐß—èÐáN²ïN>Ë®¿öìÈGôø¢Ö±‰4û%"Á@£cQÕ`¦}*Œ“Bv7ÞŸ@°Ènó]O$ŸõùRVÕBoh)jI4Jµµ­ 6Òìî> stream xœcd`ab`ddä v ò5070qä~H3ýaîîþ‘÷S—µ›‡¹›‡eÒ÷D¡‚où_ 0012Š©:çç”–¤)øæ§¤å)åç&æ¡ 200020t°'ÿgl¸ÌÀ÷ŸÉÒ¡gãO¶rÆï“î2gúñV4}o÷ÎI»çsü™ÊÞ^ÐYÒ™×™Û×Í‘þ=}Qëk¶½¬ëÙ+»Ë»*º9R'nÿqš}z뤺ꚪæz¹¨œÀæ n>9.æiö<œ RµIëendstream endobj 171 0 obj << /Filter /FlateDecode /Length 161 >> stream xœ]O1ƒ0 Üó ÿ ¥ ˆ…. ­ª¶Žƒ2àD! ý}I€ÎÒùîä³ì‡ëÀ.|D/J`›H‹_#Œ49µã0¬Lœu²¿éðþ‚Í@vçw=“|ªúRVõBoh )jžH´UÕµÖv‚ØüIG`´‡³iºU)Uü§’£¹Äyp‘8•¦¥I.à˜~Ïr 6ˆ/3fS>endstream endobj 172 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 175 >> stream xœcd`ab`ddä v ò5400qä~H3ýaîîþáÿ#ˆµ›‡¹›‡eÒw¡‚où_ 0012Š©:çç”–¤)øæ§¤å)åç&æ¡ 2000v00”€t°;ýgôu`àûÏøùò’å Ê…„|¯û(ÖYš•ŸQßÏ¡òÐà;çg¿Ê /xkÿö7·œð¶ö®6i>9.æiö<œ `D<œendstream endobj 173 0 obj << /Filter /FlateDecode /Length 205 >> stream xœ]A E÷œ‚”RkmÒÌF7.4F½¥CÃBJh»ðöÂ`]¸x„Ïü ó§8žOgg^ܤ¸pcÝpžÖ ‘÷8ZÇJÉ«—¯¢S¿”gÅñ¢üóí‘Gš¬¯ê…Å]ÊšžÊܤ§g¯4åFdÐ ÝðWjrCo6§†LÕKˆÒ@¦2u”¥"VQîâ5!Ä.™÷5B쓹9!D“ÌM D”-M²ý™†Jñ¶4\¯! [h”1E³kò“O]<Â>”‡gendstream endobj 174 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1305 >> stream xœ=SYPSg¾—`ø­”iFñÞXŠv‘Íb¬-ÕŽ¡6. *6BL‚!$B¢²æ„-ì„@€„"`.(¸€Ni±3µµµtjG»èh_:LgüÃü}hpœ¾œåáœïûÎBSþ~MÓ(I,Nþ(6f9Þä]O{Ãü¼x@2–þZ’¬€@úÛÂVìÁÍkpE06¯¦ø4m>okµ÷'irZ¥\¡Ù“é3±¢Øøøí[D[cbâEæÈ´ÊL©Z$–격ޗ¨D5™J™Þø²$A¡×ç6 QÒ]”F+ßµÜe‹È Ô+D™N¦-e‰öjÔzQŠ4G&zÅ6ê•OÒääæëeZ‘X“%ÓªOÉôËPz©,W§TiÔ±E|Th”†ST8u”J§öÒh+…|ò)ª>áäWÉófýwx«K/`_ãp×$½”‰×óðöya•Éb1ÖY `‚<¨ªšÒr_X…Œ°3Þ>–=Ç¡þ¶ûJt‹ãgžÁ<ú5i†ldþàŠ@ml‡Ag·Åâdñ“°¾Ójm´6[mÐ ¿Íïyq{ŠHK¶ò%ä¡Ðì)·ÞÇWCq*ßý"v¥‘ä V°´ù<ç åè1,À°€çñFågJТü.݈gtðÆôOŠäôçÆF;q°[ˆ„|B‚³‹;¡¹ÍíõìàÓûÆ9Œg ‹“³J²Áœ"ÏQºJЃ¡“kq\ê-êÏVœV§™VÜ™½âhg\Ò[…3>Ê!ßâà‘3œóÍ¡¤ŠUÇ$åœ$3:†íÓÍ£¬ÍÝ÷¼ÁÞ9çò@ôA ¼¿“ o}Iÿ{d[äyô P u¨×Ô«J-=ž[Îä…—+ÜUvj]e¾JW]SV®+ÊtÀr³ïÎÐÏ_Üg[Üu6èB³Ÿ Úx”„þ×ÅŒ`Þ×-€l-P /Ë«0±Y› $†ÿoÓm×Åz÷(£!Á»÷í…ÐÔÚ†Ûí0€8u‹á´âÌÉ´iùC€Cðê!Fà½yžÃ¿ÏÑ70»æxX‹ÕÂy¸Û8Ññx°c†Ñ족x’L^'kH4 X؇CÝ÷¬}sÌlßåqhCM&Ç¹ŠšêóåÌaIb~" ¤¼Ë3Ýõ#­ãl3'tüÐ?Ðe÷x®·âÆM›È»ðÎòŽÉ'ÞÆá®ŸÃZŽööã^Ù!´ÔT—€-+îZg#SgŸÃåÖÚ‰= ê_|k9ýq$^ûj,ÛÅrÆïÂ=ôX<IÂÉ[„G"ä þ*C¨+ÒB1 ª“Ðd¿íŽ6(ïf/z„ÙM(tf'–Χët_c›Ìu(y R¥ªÒÓ³÷ŠJ¼õ¤§ÁYÛÂvÞÆkÛ®c-âU€F{Y*Ý›ùY¬À;ë›ßÔžâèë/ðå<ïUo‰Î¥$lN%a»™´H ƒˆDü”üdþK‹÷˜G8x¿Y$«„³î§¾ìŸž4á8y`~\™”-=&çö\…OFA}ñPé\+Ìô>k½â»3ƒW<‰7LÒ71âa‚•ÂÎ~põB!K>àÛ¿}°Lu‚ß1î¦Ê6³••òÃFˆK<.•%C"a8 ¯ì®I¦ÞzË vÔZÑ]Råûʳ̧rU…6²áwƒ«ÑÅ ˜×üšÀ•õÆq~!endstream endobj 175 0 obj << /Filter /FlateDecode /Length 250 >> stream xœ]‘1nÃ0 EwB7°d#U\’%C‹¢íl‰ܸùl;¯ü6—Ä÷2D®C¾²é£~šÈpNÿ^ùÃ61N{kGOŽ#™¾}%Å9Tè‘èQt$*Í>#8‡ õ¤@½hK ´Å\§³Î&R  úr Å9Th D#)Ð(:‘Å‘}À AײFÀúAO@•ö[% ÉuÑÆG­œW _ÕLçÌÿ§,E¦,0¿=!Iendstream endobj 176 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1840 >> stream xœMU{PSW¿!¯«òÒ;ÂÖM2»Žot•ŠP©Õ¢ à”]Wˆ$%!!Â;áHø’ð–‡4 $ä¨Uu}̬ÌÒÖç²£«3Ûv«R§Âž¤‡?öãLçΜ¹gÎý~çû~ßïû]Áò  '2úDHðÒÛFׇ ×z×o™€Ín‰»” žLðdÕ­çÞ÷C-¾(ߥ®%X F¾ÆÜ)Wä)SS¤*áéàÝÁÉô" ݳ]¸388TxP&Q¦&‹2„Ñ"•T"©èMºð¤<9U¢Ê[Ù/U©{wìP«ÕA"YV\™òÑÊv¡:U%ždI”9±0Jž¡É$ÂåLƒ–×H¹L‘­’(…Ñr±D™ñ™HvV,"ÂG®È•¤¤ïܶy˶à&AıDE&¢ ±žà«ˆÕ„'áKø1ªˆµ4‹¿0¾òØÈôecÞbýŽ%fM²Øbö¿9Û8v—ÞÛ¥×:\Á팮g¨çÓ•†ÎQèƒM Ø ûnÅ ì‹©wÛÐ*äõö âñq&>M%A¢MqU9÷`†aÊ>~ñê5ÛŒÃåìΤÎ$ˆ ¤@|vbö™ÄŒ ½]“ø”mowŒ)~ 3è³u¼×(ÜåIMsñ-;ûM.QÀû×¢”{F ‡ùéÜKÆIS'\„ CÕ Ïß;¹‡µpE€ös¸5cñDkÞ½AâmÃ,ïuˆÕÙ¤·R(‹Û=q½{È÷va6ö:´ç`²Ä:ªÙÀV:#;8]l'£Éed¢¤¡z††®Û;£­×Œ¤“›iUe‚ N™Šôíé\›¡¾   ¤¸Bæö¶´LP­­ Ö刪O § ÓrÄ×`¬ºv|SþxÍŸcc 9Îí49Ð cûJYÙæòfh…öî¾{ß>Þä_¯m.”–*ªÈtnñ–©fa¸ªÇ°üy<ä™TÝt1Z’ÙPD»ݹÒi¥cÏ…X¨†ªE[ž;.ùs#þP,À¥9êå´›Á6ÿšn'¹cöðæñoæ·".ò¾‚˜–Æ2SE©AWfàK7„@ $@rjPöLÃ}•Ÿí@'n?ï@Ðá×5©~„"õO&Þ^ÇûŸ)Q8õø©ã¢±·Õ!¨oêèò%lR)ªÒ •‚2Jž $&ÅTCG›uX>ª·ùÃÌÌ“Á‚Ae‡`àÒ@Í—4—5`®ÒVVhACæ7k.4Zê:Ú4½bu¢î¬ˆ/꙳ ŠŠú(±]âÈæ)ÉsÞ,qªç§%A29‡¼ÑêwOû‹&>ïâŸêŠ…c „$(7¦Uç8¡—Dð µ1jâæÕ¡Áq³àçbÁîø¶¼Q8ÝŒ1bN3Ñ!÷jEœ_×—ÿI°Øö^š}Æ›&8àºaðWÒ¤y‚'h~þ­ÓïÆ»ï—©«¬ÔéÁ`ɼÙü7¼ °ð/ðÇx-ö+Ö,“ «®¬müÏ?ïy4!&<_*û6te]µ2\ì¦+å/MQËûÚq/êã´€E­†Ü">îÃ}Ü¢÷#zÇ)‚\«,-|ZÙínÿvÆð3TOûkz@ÁlŬôá_^…^ÁqˆÈ<”Uû ¼:dôÀå½Ô×a ÛÆì©y ÿ$q ~@‰!Æ‘÷J{¾‡;ðw˜®Ÿ² 5ÐÓy–Í'á|áð‰öháö²3K¾«§]nÐ9N3¤xÈD×ÜPv0©øÑGwAÐöb¥¡&;ÉÅVnŠ^ÂOKçô›_Âý¼„~ý’¶Ó¸!úóƒo÷z2­Œ^÷*¦‹ã>Jé,:“–vDCeE>Ž[üÞ¿D¦Zí3Mh‹»ip®ŒG¼QuìK^Ó‚¶º¾ðG;µ5E4ý†Y dÒ#Õoþ†èŸÉÃðJÌÐ×jIoþjÖ«ç*‚ø?Á#¸®endstream endobj 177 0 obj << /Filter /FlateDecode /Length 271 >> stream xœ]‘±n„0 @÷|Eþ'Pà$äån¹¡UÕöB0'† QŽú÷µMéÐáEz±­8vu¾^®iÙlõ^ÖøI›—4z¬ÏÉŽt[’qÞNKÜ~MÏxÙTç׿¾3YN y÷·p§êÃ×^¹½(®=rˆTBº‘p˜g4”¦!×ïã|¤FÜ©GfpÖ^´A…5ˆ¶¨Œ/¢'T Ž :¢ÂZ³ÖO¢X£è„ ë,J¨Ô’ÜðïAºj8Òh´‘hËÁ;'µÜQ«]ñÉÊ¿X塎ÛødíQa•vܾÀzÒyƒ‘ÑÉŽ™Ûø,…Ò¦›ÒMÈ–DËÌk–*˘.—Œíendstream endobj 178 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2510 >> stream xœ]VkpSeÎ!%в·õrNqK•UAÁŽbr/÷ЦMÚ´I“Ð$½Ð\šæò&éIsm›4Iïmz¥ÖR¥`êr‘uWa¬þXÅa¾S¿*{Š»;;þ8·™ó½ßû<ïó>ïGð’fñ‚x #3sà é3¯Ï°ìã³Ø'ø€Ý?Ïž2͆y|˜—T÷øÜÆ…èÒècò>Ä›KåFÆWߨžÉP(õ*Y¾T#Ú—þRzw{AôН,½˜ž¾B´¶H¢’刋E™bTR$ÖprÑ6EŽL¢Ñß_²JªÑ(W.]ªÕjÓÄEê4…*ÿµ™(KDZ™F*Ú*QKT¥’\Ñ[Šbh“¸H"ú-á´ßŠ"åD%ÊTäJTŇ%q¡X©+eJ±J#ËseyyjY~ÑÌþ±\£’)5<o¡F§ß”'íÕ•«™÷ÇÔ5-<Äãíàeóöð–òÖñ2y4o>oï!^ˆp@8 á&j †÷Ž6^OC,&¾šåç/ãù·“žKZŸdœ.Hœ|`㟓/“MsV°¶dô‹!Φv£Œ(qñfŸ~/ˆú!0‚ž^&èGO1Qðá*™Mᚨ\¼löó­tU^ˆÒ·xÀZzrÇ ä̿΢g¼~3XM›¡†:´h§Zd©1ÔÓ⪫ ÓÉìg†xè6ßʼnÎIôé$}ƒ>zÚš'/-;ª¨Ñª@M–tíñDdèô–zY–t§¦ŠRMìð¹h+ªŒ~pyÝ.Ÿ‹î½;ÑÚ dSŸ)K^¶>‹èòL©ø X@¥¡V&ƒ²ýˆ·¢HUR¸ÿ”vd¢ÿ²ÇMóúsÏyç,¶úM`3Ù­Õvº@´N]dny¢7î‰F¸„bq|øÖpœ}>N ÞÅÀy>[Æ>,¬­Ô‘‘J_…±ÜRa¢ì5f¹U_]R­´(àõ)ÖŠConr·ad`4<Þ{‹n|·6Qòxá@ö’<¼Ø4Áït52Tû÷C]o UÆ’jó¤~+ì#_¼PtîXo¸£ƒ:º«jOÎΙ¤x?TÂwÙ±FA}Ðïó‡ ‰ìVÖ5å{²GòÆïÜùÍî ’§F {Šè&¦lìu¡(Ò}ÃM2Ag“üGéÚ @Rwœš·ÈRZƒ¼ôYG Ž€Í^á°á¤é¬\Â6™#§Rñ£­‰ûÏMÌ0ÑøÑé^cNYáJ[ªÅàÐU3V/€>wôB¿­g&.‚¨ßä4ÐY.s¢PçdÜ”¶¦ÔZÀΑkúfJ]™Û›‰Él%ÇwÓ-VÙߎĉ'Ñg“|–Ï óæ…YF/Ä 6VÅLõ5…bP mФ?h5J¡‚ÔDªšÚ:ャû›ËÞÑ?[DYJmF5^H¾%Ð8årè ÁšiZÿ\Yú.Hµ‚ t‘Q·¯Èf}PWR|Dr`DýîGCç#TâB3šï¬#“цø”p˜åz%0õºp†LÍf{ ev #9ÞwšÌ”M‹NÛl{5”§n‘žèiö‡©È`p \äŒâŒk¥*{[·%È"HÔ·8¦‰NžŠäÆØÞ(ñ‹øS¯ 0ÀxBÐ̘éP}ÊÑ:;Ø€4˜Lc©¯¦Ù8vNsWeuL©Ÿ)àæÔ¢’QÞCkîhCœ:…ªc¹Z•B-é †j}~Êír:@:ŽÊµÒÍ´Á\|’+(¸ñš3î7ïòÑö…ê–âbµº¸¸EÝÙÙÒÒI%³6ìŽý¼-J|Ìþ‰ÏnœÚ'äê^ N2`òª«m…ü5³æ(XÁq?¹».ìáþ€å1ôNœ]%P9úIÈÄüƒÀüŸÙà§§ÿf«âÒ±§VªÂ±`Äí¢j}¡Óƒ(-¡ùÅú-‡TÚÚ¢ñà oÝxîÿ`mC*ÓÌmè¹ßÅ5%™™Ú‡7WeÄ6cª£foúJ ³Õ=g:ë:ê:8+Ç;cè¥8º¯#&NÜ›µà+Þþ½Y¯Ýã±ÂëÛÇWf)ÊK¤ÔöœíÏé^ÇË~Yâ°2/xcÙ}u¦4mÉ»×àzßñ‘÷Gás¸šw:¿u¬çì8\%ñz¼Rx´Æ`Ws}®u—¿ß€ø TC¢‡P–Þ”dì’+ýZWg­Ûœ´CqÚhpvó¹s¿+K½F‘›»c}þV _—Œ]ádÉië"òÄo^EËÐk[þ¦-Ùy©uYêŠü*w xŠ {Ú‚'þ§ È7qÒùÔC÷Ætû$EØ9B4[P€Ÿ5‚õPJ¯Ì0`i2‹D=à²ÔÓÅWátÀ‡ug§I$„ú¡·¥t4¾%88¸Oš[²ŸKŦ ×UëõP'¿ãL‹<+(ÄówàùâtsÅ}rtæP—·c HÕ‚+tŠs­ö#ìÚøÄ­­ÄÐÓ¨ú:ŸÍA“BãøáWÃ&x»|õêåË«Ó@NfŽ`‰é't-BËÒ~Ä ðâýøE¥·´«'ÖÕÜ'‡rjäó+žN ¯]Þ……¹–Ý¥+ilÄ7…xÜñ··"Ñ»ç顳'ã@"Â…“´ñœöÛ€E¿Žƒã¨ã"Ÿmà¨ùR=uügÌÇ+ðƒËνù-š‹RÑ|”†÷å#4Và_…{rú¯vÿÑwŽþëùzn&}íÝ ß§]!ÛMçnÙQ’ T·Áµº'†'PïŸÝËÚ„zD/Æ4NÃ}Xÿ|'拞DKÐÒPø<~B¾½1t¥ýäÀÇÇ<éÙ¸uy·úYÚT(4ì/}Y’!]'ÙsÈ‚ÒÎï†=cmÿä€ü¥*>õRœøî’·ðÚm6XÉÙøÝÜPôÖE›¼^ÿªÎµ×`úšc õþÇÀOºm>“Íá0›¨ïäUpG†å§ÿîqºÚåW4úå—è)´ÐR2 ŸlY¾¯Ð©½˜ã9€$/}*tÞCª³hNû™ØPÈ^È/ǯâU8co6LÍMz%:o÷o…=Rendstream endobj 179 0 obj << /Filter /FlateDecode /Length 160 >> stream xœ]O1ƒ0 Üó ÿ vD,tah…Ú~ 8Ê€…0ô÷%:t¸“컓ϲëo=»rˆ_”À:6‘¿F$ir,ªŒÃtL…qÖAÈî®Ãû6Ù}~è™ä³¾î«j¡7´5O$¥ÚÆÚV›?éŒöp^ª¶@©³ÿTr4—8o®1§Ò´4ÉÓï™àCNÁñ)¦S"endstream endobj 180 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 220 >> stream xœcd`ab`ddä v ò5030qä~H3ýaîîþÑûSµ›‡¹›‡eÒ÷l¡‚où_ 0012Š©:çç”–¤)øæ§¤å)åç&æ¡ 200020t°çþg\0ï?SÒ7†þMee?uË…¾/}(.¼á;ïw¢Éìº÷NÚ³€ãÏDöîô¼î’ΜÎìîœná‚ä'ìÂv÷±~f.ØÛÁº™½¼»¸«¬›#>mÂzùÇÙgµLª­¬«nª‘‹Èök èæà“ãbžfÏÃÉÀÊ»N¯endstream endobj 181 0 obj << /Filter /FlateDecode /Length 298 >> stream xœ]’AnÂ@ E÷9En†1HÈذhUµ½@˜8( &Q‹Þ¾ß袋oéÅéÉNu8OyXÊêcÓ—-e?än¶ÛxŸ“•g» ¹X5e7¤åI¬éÚNEuxk§ïŸÉJ Xÿà÷öjÕg¶ü´z> stream xœ•V t“åþÓ’|•›ýÀý? °©ˆÝ¼âvD¤ ·Ö ¶P(-PhnmÚ&i›&i®ò¥¹õ–”’ÞÒKz¡­mªePDE´Xe:²¹Í:æ÷w‡}i˜;s;ggçÿOÎIòž÷{Þç}Þ÷{xĬ8‚Çã%l[¿55ùñääè—¥‘Eq‘»â!—ý·¿ÏàÃÙñpö,ï]s¾KD«ï@óç}5Ÿˆãñî\¶V.-()Þ¯Xœ*ß·_![¼U.Ý+ûÑA$É䊢âÕÞœ-¥¹©ûÒöÈ;xHºb%A>þ«YæYçg 'ûV$n;vûæÛOÎv;6'é&/ç'ÄÜ›qiK W§NI%"òíuSH:•D Fžæ!ë‹ ×r í;hª8PCçœüªø<˧Âa%91h ˜»úù™†ÏÒ\98XÀÊh• >Ç…U@VàìcÐ!Ãöžò.ÅÈvß 0 ®Î/Ùbÿ\t€åcDMº"‹C¼ŽOÑÉkñÈÙ*ä€nÉã[Jr ÑüÇ@$Jø=JBÂ%78Í-ãÜBXm¯5ðU÷¥áöÖ~_?$¿yôîçmiûw3ÒÜò\¸>÷ªþ›úl~rîM^Ówÿª=îÍ “ITO´ð೓}á_…àBú_sò‘@@\Æ•t„êì:HŠån\ϯÁå¯?¡++ÍZ¨%åÍeÍ­G޶ï Øšž¹]OSýö;¬ü¹‘Ç ¡ÈòÖÄÞSHß%™H¢þø§a·l b’|Þf1^`9·s8£é¹&î'f8EN¼ç{éÜËÒ/mtèý>W]M]÷Ø=ž…¿Õ¯¡ÔPõ´M qWÔ–º4NMU9$SîQ.aLÀ™×¬)ßߎî ×/'×¥*·=•yê’–fkaŽÒØ+ 7@«Ý-d¥³²®¦¦Îã¦=×ãq{D)E›1;ŒRU‰ˆ7%éB?íL¢¾@F¦…)‡¸Û—Ór@ݸÆÕ  ¸JÜ(oÉy¹ü8$¸v‘é“Xᨀäýšô íÄË4  {Ãܽ(ĤWÙ™{̃S“1áMÍÏZa—Í-¯ÒÃJ¨'¹J²†‚ÐÎ7´cð8>ÜÑ[±ºêOˆÐ}µü°È䮆EàÐó©©üzÐÓiÛÅp«Á¯²Ÿ|äà‹5¯Xh[ĉÃÎEÜ\AÔ+#÷‡yÈZa¾œ‚NqÛ†Þ•}+úVºô® ¿t ‚Ðô4N‰‚§žžvû\~¯kcpMÛ3 ‘§Ì]„8ÌW…ÿ©ëÿfàëëþàÀ8$ÃܯóA›½MS%‡Š2©¡¨d[ÉVÕVÖh5Ú ¤]`iѶ›ñsÁæµTÃj²ýl¢Ã åÿB𢓙è‚~ëá<§Ée„&V©U”û4ø1© j‹g1+ƒÆ@ñùâ7 _g½l4‹ x‚]¸R2 ¶°ü•‚[C’£Ü6t(qäC$›z§+‰Ò¢…‘Ÿ  »!Tƒæ­Ñ¬'™bÀÍçRr¹;»^páôë­§Û‡`oï¡¡W^:6NŸ•ì…–±µ?HcáÃz‡Ö“_ßsòáŒåå¿*ñtv ´ÕU¸it€[!¼P¯¬ËÌX¿û…®VÚ â¡Em¹É˜ðDÐ!äUñPÜ$ê¹ßóýS?Làô ÓÑÿÚ±ìùY¸ÍE‚Ï‘%ÜN¸m‡rß>½ Pxš²Ù¦lùAÙ¡½ÊlHîybà ƒÆØìQlSÃgѬ.Ž\e‚f»‰ÆçÖï%Ý‘›Ý<´ì³øÑ艠WRÉžNË>fºäŠ-‡ô:®üÈè@>¾D`xH{77G.5I ’O ”eî&Òû]aút8Êsr€àö„"ow:U‰T ˆuEç7€Ød:œ×œ%LJL®Q‹ªX ­$ ìf«‰5kŠŠ³¤ë7@ ¬ðh=‡½ µ°lU5+JUÅãòáFÞ½¢Û_?ñ—‘¿Ž}» q0pò$©@Pã9,ÿiÈròñi Ã¯T©ujS“Â=d,3•ËZt-ú–U(UÄV[½X#nà Ìh$–a3Ë_ñó¦áyGÇ#éJì½–D]ŠÄG– UF}9T‘ò£êî¿=ðî#þ4­pw.YÎÝÉÍ»±Qo_ê»bª¼žœ¼ËUíT–êhÅžÝò,HÞ“õåu†ê¿Úˆ Ø4!Ü…ø =3û÷{¦;Ý×yõ£$jtl¦uÐ7ÓŒnËÜ1hf¦KÁÆçÌÙ4¥Æ»xô¢“ÿ€RG/¡Ñ0ؽµúÍG-¬a‡GU«ªV-˜â6\áR/sHJíÖº•Mö&èx|ï ÍïEßMÕ_‹;à˜A“ÃäþCæ|šÕi•‹b§Lâ S ‡Úè¦?$v÷{^èa"µ`0ì·ÃZ:Ì­ePo±”é E¹Ç×qmâV™Ë åÖrÕhé õñ ´pZˆ?+[EfŸ©V¬üÞk¨g2}-½ ´ÙÒ.äõ½ØInÇ¿Ÿró¾]†æŽ‡ú˜-=€—^9J'˜š¶ ý¾íûxª÷hmЀ£ðdQkŽÓâ´:Ì$'Y«^ؾ&óìemk°7Ø0†øE-1þé¨ZѲžÈÆž$ªqàÂùx·ÀV[+òÚªl˜ éHÙfÙMSLN8ÖIÌ€=ÏyߪvàÖ3Êê{þX&*ç&EíûÛ´ï')I¥×«õ/r ê]ލú"§ÁÅ Ï0M5bñ…ñ’Z, 7bùâs‡'Ì™é§À4ñký|ªQ]Å)³2ŒrÚÑ£!º • Ñ„ÀçñÖ¿¬™Èëgòúv4bq ñKî.Nð.Ýsåê‰Éãp:œÐIV›¼F5+ÖhÅÞ…;a&Ì Ž)ÙQ8B:Ú…µgj_?ÛÑXÓä>/À‘ŒÚµÔUé¢F!Å3 KÂ(Ý&Çv!›…!”€ç{Uþ‡ÜUÁA©MLWÜò<·¬Bñår$ §Ø5Ä wÐì07ï3pñȧWjjml ­1Ë`9I Qµu46·ŒesV®Ù°ŒŽml.»­¿8Õ‰´õªÄ޳E gßO¢¾Ds‚Â=†]9p¹­*¾üz õºƒ[päçô#‚\EWãŽz÷‘š*¬5yØVm§%É«Ÿÿ±£²³´™a]f7t“þ³nÚl²¡,k¨hlòùšÂò£Ò¼ÜRu ÍÚ¬ ÄWŒ¼&IýÅ'“W‹=ñàöâzq³‚qZÝVlìËU «³é î÷«ÛĤøn!õW§½Ê¹ð7ÍÇé)Áø‰¹îÒ `î.‰Ü_‘g=7ÃåmÆ‹8õ)Ä ëÀ™¦‹WÎwenH—=¿©°°.PJ—e›ÙVòßœ*žì&Qv cƒÑ©iÝmتNc$&‡ ‹u¯ÄžK›”÷c,æY5Q­ŽýÏ:ó¬šs3ñj —9{$°ËÞU*>‘Ù°¦Ãµ2EºýY„åñ³;bÈÖ! Å©ª˜<£1/ÙÝbÎf¸Ö[^rfx0 ëxxNϘâ STõ0°ÿ-FB©µ´*È‚ ¼»§¹½ƒ¦Âí%!1M ±¬]„ç¶ä¡yvYôM¢†ÿ«mWâ‡c(£ôþ@Áf0yâ·_ž9¼+æ4?P0£@¥`øÿŽ/RYÔæçØmk:‹F¶7DmþSÒ¢T,'Ã¥”æt˜ Ÿ>X’öO×å[­DgT‰Ÿ¡ù²Ž$êcôìa!ܤܲs§Åj6C¾+&Õak}SÛ`ã+pŒ<®=ŸF‹-¥%PFŠÛŠ1[— ßSWCÆ’•±–Pãh1ŠÿOn1nÇcÜÒ·Å×­ž@ÿ bendstream endobj 183 0 obj << /Filter /FlateDecode /Length 183 >> stream xœ]O1ƒ ìy?œ¨ Cc‹d2I>€p8"ƒXä÷SS¤ØYööØ«úá:x—hõˆ‹~A¢Öya]¶¨Ž09OxMÓéPÈzVTýM…÷'Í`w}W3TϺáøÄ÷^ ¬AiˆÊO@cRX+ xóg]öÀhɦ–Æ2Ñr‰`,3]v:t»âv­DdÙâòsMù§4> R½Å>áYX»´u~—‡%”Í _§Ÿ\mendstream endobj 184 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 725 >> stream xœ]‘]HSaÇß3Ý9g!}ì0„çÄ‚èÂè¦jÔ"aÎL )­õ¡–ëœíl+§é–{çÒYnkGϱmN3ÊÀqé$(»XF0ˆ> ºè¢«÷Ô j%ÝtóÀóüøÿàG€b Bg=TSS¹³²òÏR¦5ʦ"Ô¥Œ+Š•¡’âðwNu0X÷m=Єq«¹õ’Ýé°qeU­gmÜå²Ýæâ..ÿ%€bNh¨À Žƒ`Ð:: Š5'i¬ý;ƒ¹zA!—޾ÒOâº}ùRf ø‡á5æ—šYUGº\>Nˆu$!“|«Îï$™ Žv¯ÛkƒLt`08`Ä"9Qœ–®EyŸ?€n°;°–|7‡fX»äJ%e)õ¬*rÎbmª¯gW›EAÑæ‚.=“U´ a(WŸ—ãçÚ™ˆßJŒµÛaÉÈ~_Ðoä½^2YNö°29‚†ênséÝé]ã»#§äî4¢‡Étk6ö8ý~üÓý•Ðm‰!ºð)‡ زìñ,“õû~cio¿îäã] ¸JѳŒXoÉéòXÈïÃæRf›Ò0”ú%Dg^»BµXµ:ÔõÛ?žø˜™‹%g Ø’éF´DÞIõ§ï¹<ÐK2yÞ‰F!®¥ÐÓüÞˆ§ßƒ<ˆ>VÙc¶KŽ‰Ô˜œ ¡›ćÕme1yOÚŽŒf³ŸãÂìÔÀTh Ñÿ¤Ôøa7æô+y¼¿€U«î)¸É¬(O ?yŽrz:®;ÞD8DaøDƒ0¢bT–Ú£vÈá3ËäÜ¢ç »¦hØT¢à7Z\B×endstream endobj 185 0 obj << /Filter /FlateDecode /Length 161 >> stream xœ]O1ƒ0 Üó ÿ J ]Z¡¶Žƒ2àD! ý}I€î$ûîä³ìú[Ï.¢Ç%°ŽM¤Å¯ Fš‹ªã0Saœu²»ëðþ‚Í@vŸz&ù¬«KYU{½¡%h¤¨y"Ñ(Õ6Ö¶‚ØüIG`´‡óJmRgÿ©äh.qÞ\c$N¥ii’ 8¦ß3Á‡œ‚ â \ÜSÅendstream endobj 186 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 420 >> stream xœcd`ab`dddsöõõ´±T~H3þaú!ËÜÝøãÇ_Önæn–I?ô„¾§ ~Oæÿž ÀÀÌÈXUßçœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© `gêIçü܂ҒÔ"ßü”Ô¢<F áÏÀÄÈÈrøGßO‰Æå?ä—3þ¾0á2ó÷˜ï¢}³¦nì›5ó`wÏ¢5köìÚ~®›cý‚²˜ÄßlQòuYÝ­ÝuA»ò·½Ýò]aÆÔæ¾–úΖÆ¹bËÄ”ÔnŽêæ©‹M^8u‰ü¬Û¿³w渾ÞÙ*<*5O®åPüêˆîôêœâ˜ÆÈîNަIí“gôô/$7}ÏÜc[»9–N,*­l«hÎ’ïl® h¯Mû­%Ñ^ÓZÔÝÎQ2¯yÆÜYÓ§O’ë›8yí„|r\,æóy8½„¢Üendstream endobj 187 0 obj << /Filter /FlateDecode /Length 158 >> stream xœ]O1à Üy…@ÂeI— ­ª¶ ÆD 1ˆ¡¿/¤ª:œ%ûîì³ÆËÈ.¼GOJ`›H«ß"L4;­ã0]­¸è äpÕáõY@vïoz!ùPª­£v7¡7´5Ï$º¦é;k{Alþ¨Ã0ÙeA^¨ªþdе„8on1§š´&)Ó÷™àCqA†ø&Sendstream endobj 188 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 252 >> stream xœcd`ab`dddsö Ž´±T~H3þaú!ËÜÝý³ôûRÖnæn–îïS„¾§ ~Oæÿž ÀÀÌÈXUßçœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© `gêIçü܂ҒÔ"ßü”Ô¢<F ^ÆÀÄÈÈ¢ù£ƒïç¹ïå;Ÿü”fþéÿ½\töÂîE‹J»«åÿ\d«.í.)YØ=[ž™†Vgendstream endobj 189 0 obj << /Filter /FlateDecode /Length 161 >> stream xœ]O1ƒ0 Üó ÿ N• ]Z¡¶Žƒ2àD! ý}I€î$ûîä³ìú[Ï.¢Ç%°ŽM¤Å¯ Fš‹ªã0Saœu²»ëðþ‚Í@vŸz&ù¬ëkYU{½¡%h¤¨y"Ñ(Õ6Ö¶‚ØüIG`´‡óRµJmœý§’£¹Äyp‘8•¦¥I.à˜~Ïr 6ˆ/,ºS'endstream endobj 190 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 298 >> stream xœcd`ab`ddduö ²1T~H3þaú!ËÜÝý#ø§.k7s7ˤïÛ…¾' ~Oàÿ+ÀÀÌÈXYÛãœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© Wêçü܂ҒÔ"ßü”Ô¢<FC!&FFÿ|?–w¯ù±zÍöùŒßëo1?÷SPtYgO\RrX±N7Gû²îƒÝG—qü™ÊžÖQ¯#—‘Ŷ¥ïa÷ |Ô½¥c/òš vŽéäùä¸XÌçóp20 gendstream endobj 191 0 obj << /Filter /FlateDecode /Length 207 >> stream xœ]Ð1Â0 Ð=§È š¦ y¥‰ƒ2F¡ ÜÛ- ?ÒkmÉv³öCN“nNuôœtL9T|ޝêQßðž²j­ÉO‹äõWT³;¸r}ÔT€qöÑ=°9[käS;7ù1à³8Õå;ª­1°æð÷k37ÜâRÙQ%Çz‰-Hˆ-Ó‚„h™Hˆs⊹ qÍìABì™$DÇ !™ó;ÌËwÕþU+æI.$àÅSÆßËX¸KSÔòõl«endstream endobj 192 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1159 >> stream xœeR{LSw¾·Û‹AÄÇõ¶ÎMqRÅ÷45.™Öˆ_—Z*-ÞÚ^¨˜¶SQG=3%•¦@ÁÐRÂâƒI8Ñiˆ8DƒJb6c’¹øØ¹Ýo&+ü»¾œs’s¾ïËwh*NFÑ4=N£Ýž6Z|&ͤ¥Y2i6öh[tÅ8H` !®|V\d2:&¡i"îO¢š.rz4‚¥ÈjÊ6Šª}êej} RT)+W¦-R-Q«Wª6ä¬&½Î¬ÒêD£!_'Æš<ÕAo2ˆEc+kŒ¢hY•œl·Ûëòm‹köºÑ+‹Tv“hTm7Ø ÖBC–j“`U[uùÕ¨ÌÅ£ ò-¢ÁªÒ Y«™¢¨ø%K—§­XµVBQS¨©Ô4j:5ƒšIñÔ$j25>f“Š£,1‹eô{™SvU&1z&"•$J=ðG> 5Ó÷>`ñ#•âלI^Rpl›ëXñÉuà–l‘¿ ï´õw÷þÃ,N˜ÓO¦’¤´ÔäÝWOUBU;á]|ã¯jÚ€}Ó±vUªvSn¶’œ$Ç73$µ<Æeozˆ Î ¢âmWˆ~ñ…¿¬Ç߸[*’¤9˜®_z¾[§l+=ªÙ&[mž 9ðÅ»­8•¯‡Þ l ²Zþ}KËc¸Ë>Uw†0Ë3V›j74úkƒ!{u^)¾Ö{>ìõ{©:g桇èÎ?Ñ÷šÁ…øŒƒÁòþ–»W:;#÷¡Z‹ý«»ð˜YkUACƒÏº­ëX@>%óÉt’K.`âZ”aâ‡!œ†SæýAâW³s³Ènd… ¯9ŠÛá;rûñ³Eëø€Á£ƒtØ:K–͘{x/°£bª¥®ü4ꇩIú‹«¸¥Pϯ\G=Ùe»Î¸ËKªX“ÜHrÆ]‘—z+z.Vœýþ9xE³— [/ÙÝ0ýðžx»ªŸÞ¯þ:¡#Ç—~1ãTØ\[ìê¯ ôc¡B³T×|-jùs&q—Üß¼E}ú°ÙŠô@Wý·\‘®ù¼![îóàIJ4N.©9Ýž0·Ù ˜•—•‰üø¸4B> stream xœ]1à {^q?£DraÑ8‹XQ’`8, ¸ÈïØN‘bW:nÍÑ~¸ Î& èÕ ëtÄÕoQ!L8[GÚªtLÕÕ"¡ý]†÷' äš}å‚ôÉy[Ÿš½¤¼Æ5H…QºIǘèŒþ[ñ½0™#yiDcÙIwmEcÙKý –Ÿ Ó‰j‹]ªà¬ðX‡¿Û‚¥Yä €vVIendstream endobj 194 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 358 >> stream xœcd`ab`dddwö 641U~H3þaú!ËÜÝýSá§k7s7ˤïß„¾ç~Oçÿž,ÀÀÌÈXÝ8Á9¿ ²(3=£D!ÆÀÔ H*ZZšë(X*8æ¦e&'æ)ø&–d¤æ&–99 ÁùÉ™©%•`-6%%Vúúåååz‰¹ÅzùEév StÊ3K2‚R‹S‹ÊRSÜòóJüsS îÔƒPÎù¹¥%©E ¾ù)©Ey LŽ J – ²xµüèàû©×½à»÷áFó—ÿøÆüƒ÷§—¨1ÛX¯²ÝøžÀú};Ûõß ¬ÏÙ~}Ÿ!ú»‡íÌwÏÇV=˜´A²‡­·vRÑ"O°)?ç3Îü)Äüƒõ“è÷ÖïwXµÙ~üžù;ìûLVe¶ï“ßù½ðëk¶ï9ß}/ú}Œõ3Ÿ‹ù|Ný‰+endstream endobj 195 0 obj << /Filter /FlateDecode /Length 183 >> stream xœ]O9à ìy?ÀŠ\X4Nã"Q”äv- Âv‘߇ÃN‘b†™ÙeÃx­Ù({§^°Q4VXÝÐ fcIÝPmÔv°üªEz†›ôï €…ßåìÙ´mþªKH9 «— ‚´3¾ªD(Xý'ñ˜ðp¶Ñ™€L"ÒZd`‡—HyTxQu¢QáEíòðsLÚ“Ÿ©ÚC»å³ríÔÖXø]îO)A¾n³^cendstream endobj 196 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1358 >> stream xœmS}Lg¿—–r*Ö©«âv–Š‚13[ü¨è6%›ÄŒé&fn,-= (”£XBùlûÐ¥À ‹cqqqºÅŒe¢n8³aˆšh²àÔDÍŒ&f~Œø\yÏlW`S–ýs÷þÞçã÷»ç÷a”Q !$&=ó­=©)‘ãJñe"rQâRpã¥\ôÞSÑ«€X¥ŸS™ˆ%óÅ]óÄô˜ùr!SÉ8™6¦ù’ùž9ÏŒ1÷˜0‰& H_ddP¨ÛeÉ-àûdɦ|A(Ù¸fMEEE²¡¨,Ùb5¿é²ZWQ äëÞçËx«7éÞ¶ ºw E¼nêS’§^é–¢’r·ê2-&ÞZ\b°òÅ…|ž`,0›'Á¤¸2Z ¹{yáŸà4ü¿ðÖÿ„#¸¬¼(¢ÞRl*(+)4ØŸ'šÁcã÷ |±©¬ Ø\È?‹%Ï*„Ó·SµÃ0ê _Ô$,ßÃØÔ7S¶0ÄC€´/ñ‘VÒFÚ‰Ÿté$]¤›™qHÉØ™?dGÖ‘ˆ#JeŽWìP|¡T*óD—:,pOFBܬu šn(Â;¹'—5Ðêöº}yÇ3.d C×b×VQØ  O_Å„ë×}>ð‚íl€êj7TUiémz.AÊIÒÒß%ú陃Âׯæî/ôÛã‡ÎøçIÅ7âgݧqiª¨£~–Ïœ‹W¢­*ÓòeÛhL‘­iU)ÍVz ÛÅ{þ¯Ú–D&ÞÐ)å…çljߩð%¸SpÿXkÿÕ*/æÝȨè\<ÍQ£JÇÕèqÁ>¨?š'õJ:k  ¥±A#xjYS Hˆ„NIºH­¨ L£Æ^{k“> stream xœ]P1Â0 ÜóŠü i)EHQX:€ðÔuª M£´ üÇ¥ wÒùlÙçâÔžÛàYÜÒ\¤ó¡O8O¯(;|e%{ËW1Ãh£(NŸïˆ’ЭújG,îÕîÈ¥r‚©Ç9ZÀdÀB+e´sF`èÿ¬Ã:йog½7 ¥ˆ…Þï C)b¡rv›ì6`$!Kg$i—>T†¡1ÉÚ0HÖ|ɶ3•ãmi$¼R°ð8cŽæþÞ§˜§$A|WPfáendstream endobj 198 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1188 >> stream xœ5“[peÇw»mX‘ —qg ânä2rIÁÑ2¥0Ö©VÊEÀjÚ@Ó„4iÒ&m’æ¶Ù³»I6ÍöBÛ¤ ¤UJA§hQaÄ¢>88":¾À‹Ž>|É,¦2>œs¾óðóýÏï|8V\„á8>§²fÏ[¥ºÙã˹ðÜ’¢Ü‹Øóù½%0€yÅÒ’â™EȽ™æ#ýŒÀq—/Vi¶´[M6íºWu Wª--+Û²F»A§+Ón7¬Æ†úmM½­É`ª·’fm¹Áh°µÿwåõ&›ÍR¾~½ÃáXWoj]g¶6¾1[eÖa´5iwZ Ö6ƒ^»ÓÜbÓ¾So2hŸ>tÝÓPi6Yì6ƒU[cÖ¬-†Í1WYm†ºf «ÅV`uØ^l'¶ «ÁÈ‚L¬;ˆ—ãWŠ^)J?æXu.áMçÖ¦qTtõÞ#rq”¥×ÎlR(”²TY±üÛšh¢ÐR´’VâJŠÒËþðù"ƾd&nMŸòZæHíQûòï2-{Þ?|Hu¾²èfŒç«'ˆü›ÈCñ‚Ø<){€õrœ­›~O¡›ª•@*jU3ˆR\LòqfeJ~WÝ•\–çö·0‰%=IˆË¢0¥¿B^ hz=î¶-H;w8íå@–«n!.1.ñã A U;kÚa‰t±­ âºg«Èíç£C}U‰#AhÍ“FU‡Ú»$HÝ7°›Q?÷ƒþ] qyÁééSL®U•ž—Hu®Ì{ÍÜHã¹Õ¿HF—)^¼Š¨ß Hu6yÙ"»zœ#CÙþÉéjùøÎÆví½®>à°¹LΆ®Zˆ¾$Äúx>£‡î _Ë9-^ë »˜ åz$È…€ÕxîÔyé\B Õèa˜&‹*S8zö&úrË©‡ªA†{»ÁÅlUM UÑ~ ¦9ÝóY}mî0­W6–lVuü/ågU¥r‰ ‡]Щ90Þ0ý×hu,釀¸v?Ý~¨¡b78{J‚SR†QçŽzÓÈ’F[ ›¢šA¡‹ò#uîˆSÃ÷Çà;ÈÏŽM¾½TY¶B©*Úü=÷àϘ_ö‡9Îç§·¯Ýî²ÚûýäÅäUi”‰§©+赿Å$—ÎXŒ/)ep¼Ðç{6¯ËàèÐ]tä.‘Ÿ›ßFñQ>QRà¾Çy»éÃuú;;xh”ù ^°9ûÓûnX˜8›„¯u>?íN°ƒír§l‡FR·må†ŠŠ ÿœ‰Š2 /€@ÊÝôD"…ÁìW ‘ÇFŒ…ã/$´Ü;2–ì›Ø{Û= $Z„´-:¸|rŒ±f=⚌3ÖÜŒcp™üå§?>ÊìÚ~’ „í4×Y2 TùÁhUáwå©OñGN_çK©§‹KöE?ëã8–^å÷p^œoú§„€e»€£•…OX.r€[³ï±©‹©Ñ³}ôà'ý·A$g1…‚b逵£ÊYoÍIí—` þ£¦çoIÍ{Ãþº£7íendstream endobj 199 0 obj << /Filter /FlateDecode /Length 180 >> stream xœ];à D{NÁ øÈ’‹Æi\$²’\ÃbQÆEn>q¥¤3Ë,§ËälÂdŽ^= acްû#*À ¬Ö!Ʊ¶*}¨žj“‘ñ*Ãóg˜Æ7¹¹ó®«W¬…”×°© J·(ƒ1ÓO] ,æÇYÄ9ã"#Mté rÑDuÁ>ûf–uø9¦üSŸ±:b—êZµvik|7>”ÎBoÆY\Èendstream endobj 200 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1ƒ0 Üó ÿ Vê‚XèÂЪjûà8(NÂÐß—èÐáN²ïN>Ë®¿öìÈGôø¢Ö±‰4û%"Á@£cQÕ`¦}*Œ“Bv7ÞŸ@°Ènó]O$ŸõùTVÕBoh)jI4Jµµ­ 6Òìî¬/mR+gÿ¡äh.qÜ\b$N¥ii’ 8¦ß3Á‡œ‚â /S2endstream endobj 201 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 193 >> stream xœcd`ab`ddä v 1400qH3ýaîîþ¡þ“‰µ›‡¹›‡eÒ÷p¡ïœ‚ßÙø¿3 0012*9çç”–¤)øæ§¤å)„T¤–e‚D‚sóJRSÐT0000ª30d€´³{þgâ¹ÏÀ&¿,û¾«œñû¯ŸL¢ý¾í íIm‰íI]M ÝU“«—È­êëX=eù²ï<N^Ü;Ÿ£}QÕ¼9>9.æiö<œ ßÅE[endstream endobj 202 0 obj << /Filter /FlateDecode /Length 159 >> stream xœ]O1ƒ0 Üó ÿ ‰ ±Ð…¡UÕöÁqPœ(„¡¿/ PUÎÒùîä³ì‡ËÀ.¼GOJ`›H‹_#Œ49µã0¬Lœu²¿êðz‚Í@vç7=“|¨¦)«z¡7´5O$ÚªêZk;Alþ¤#0Úg†Rµ*þSÉÑ\â¼ ¸ÆHœJÓÒ$pLßg‚9Ä*eS!endstream endobj 203 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 253 >> stream xœcd`ab`dddsö Ž4±T~H3þaú!ËÜÝýsÉ÷å¬Ý<ÌÝ<,Ýß§ }OüžÌÿ=A€™‘±ª¾Ï9¿ ²(3=£D!ÆÀÔ H*ZZšë(X*8æ¦e&'æ)ø&–d¤æ&–99 ÁùÉ™©%•`-6%%Vúúåååz‰¹ÅzùEév StÊ3K2‚R‹S‹ÊRSÜòóJüsSÀÎÔ“Îù¹¥%©E ¾ù)©Ey Œ@¼Œ‰‘‘EëGß/ïe{¿3ü´`þYó½LtöâîÅKJº«äÿ¼`«.î..ZÔ=GžüVOendstream endobj 204 0 obj << /Filter /FlateDecode /Length 2377 >> stream xœíY[oãÆ.ú¨P h„ìC‡[‹Ë¹Ï8Ù¢X ͶØ.>¬6(-Ñ6Iô’”í¢èï73¼J´c»èKûA¼Ì9s.ß¹ ?Ï“˜Î÷ßü®¶³d~9û<£þé¼ùYmçoÎf¯>p9·±ULÍÏ.f‚Î)±IØ\'*¦ÌÎ϶³$Ý­£ç›} 2ö²‚­™ßùß̘Ñ:Vf®Œæ1¥óí $:6ª{²™}wo|ŒÍÞÄXÆÚ±„e¸.>›À’€åÉoÊ‘^&fÂþX«Y•Hù ÕÆ±á¬Æ;XaêPÅŒ}œ(”ñv®bÍ­3Ô£N=ïލ‰©€}øKÂâ\X`HvOžlq°Œ­c©i t×1×jPKÍÍ3Âï(FFAÒJ¤Ð÷ÅÞïŸ>°' ¨Ó¸dÃ8‰l×z•‚ÈҜNJJÖ Ÿ9 ‰ IMpdÙå.¤³ÇYÞNY^É8A:ì-?¤¡´PMÎY#ò”¨IÙˆX)€ ᘖDÃ$p0Êìˆ-P“”äM:£¼®šRd4¹,Óuží|õ1†Ô“q‘0¤v3_PX4ðü³“Ò œ©ÎžŒ c¡bVAH—;¸’Y÷äÉöËXz–0¡`Á ØZªØTÀ1j®a°®áî÷—Z4Y‹ÒcÚç#£ôA>b–B+;ð"O¨„«ÿ/Z4%žåÀ‹ ­§#øX{%|®°þlRÛA»•p×ÞùXTZ>ŠèóTÂY îƒPlžeú瑎Z-— ¹T$ê>ÃÄ‘r½ébDsz(ÿ¸ç1h°#:t ؼéyÌaÏÃѳÞ¸ç‘a˜¶)%~ØÆµad•îš«0I‡9Û&‚¬»‰Üºñ¼ÊÚ‰Óºoެ6ä¶»*ükr±ß­Üø]µÄœ¤UÓB >»3dëw³³—ÉŸ4™ôÌ1ý»á¿¹Wä«…»ÆÆTuÜ—N¢:užð˨!ÿö›`¬¸hËQÅë†v²Ý6NÍ–¥»öŒ>ÒOr;A^å—Û´ÝOp`Ÿ~B€÷½ŠCêM¶»¬¯–äÊ8T‹÷/a%©>—õ’°—×ù2 jòEϵ]™_úŒ5d›vkÑÊÖVûí’’/—.¨‚Vr¸`»_Fß{>V Â^yý¿gc!)p«È‹Û~»â&+Û;JŠó*+oÒ€–ô?½ùŸ€…kË‹7­ê”|9k9RìqA–8vЋ½Yß¼}ûv±*¶×ø|“õLšé žòÕ£ öL\=LhŒ0E[üãfAvûíypJˆ­â¢§½ÏAí–æ½;oÓºÌ“ÖåäI¹,H$oé±[Ç\óª§ \O§ 1¿UÖi^ù®®zx/IYÜV˨˸ÄÜR8o»ÌQ<È­ÈOZÖšÀ¹“®oÂk«Á.¼Ú˜z0¢.ò²ª{|¬ŠÍ~»;mydé2wN»éb>ëÝ{[ì t>àÕ¡2ìeôUWg>:ŒßcM‰¤Íüáì‘­ 7–·âß«lDÉMÖï6R3Hø~§ý8žkŠÕû¢vÅПB „iZ»;Žvþ ž:sˆMR_>ÝTÖ¾h¶lFÖE Ü7òi`"PaýŽZÏ@IK\0·lÃz` 9ÖÀ:ˆã?eŠäë}ºé·»‰$ì%€—t³Ï:QÙ@VbÔÿᙈë ê©iS¯;ŸWÞKr¾¯Ý­ô¬Gz‡5¤ªóͦYï­‹æ ÜŸ{NÂÊtŸÐhIÜ÷ß(8ú&°Œâh!¼Mù뮪³t p3+ý“c!0ü¾“¼½æá@Æè{ª•:$Îáú©Þ áýèöþÁÞ•&nm×bþÚ¯ ×fj”òHKÎí¹ñ¯è½ÜA·ÆPÜúȱ=:Új½¨O´]ÉïšVOYr›×W}+7øL£& ÄySEú7 W¨­°ÜYA1ÊR^eq€‘[+h#«ÐAÖ3¿†V€¤ÌªýÆŸÑ)‡^IÒ²{)ñÛ|“–ýƒºh¯±aˆNw—èà|„$úKqJ¹Z™ž7HšŽ¶@d­l΃xõ@Ôá·¨¡3FNʨ»¼Ì‚9„DiöŸÃ¼È„×HzY¶î_ä¡8-Æ6SÍyPSûÇȨ49o>š)0©ƒ:ÈEƒÈÚ¯dT·i¬q´Ãw yŸÝÖpâ‡ôúªê¾«¹¾¾?BEÃô—Nío¾;}Rÿîe¶]{—Þ½Ë\6ƒAÛ_ؾ%´U–õë•71öè5=鋊¯¯™ë¦ÛAe÷Ý\V_ë×_¸Vð‹Aß;éy›–ÿÂÍ{„á0P˜SÓò^Lüu17õÒ/˜H´Oïòm×iÃC0OHˆ€íº·`†äºõØ ï”é{ò­c”Wn‰76SÚ’ÝIϪþ:¯³rÐTÐY½/wÃÚ½š˜ÓžGµ_­²ªÊoºŽZÊF[ÂoOºÑ#«…É(lRf×úd=i]l ånÑa´–u¿ãÒbeã´ë¾C©i Z¬ƒðh}„ˆ•}Ãúvö¢Ì]¯Ó2n²2؈|> stream xœ­XËrìDÝ÷Wt°™Ò`‰z?˜Å%†¸f8bãYèv«Û‚–dô° _Of•JRËb†öÂR©ª2ÏÉ“YYþuO3¶§ø;þ=T;º?ï~Ý1?ºÿªý‡‡Ý? ·g,sJñýÃi–°=ã2³”ï ÕãnÿPíþCx&“”1®È×MžQÉIß6—$FfÎZÒ<'†¥ãÚ’¾„iók—ü÷á;´hö.sškoÐfÂ0p¿#Uþv_þ’üç¨Å¡2)¨ÇŽàF7K_V+8$5 ÀGriΗò—7ã8)@mwx$ÕpÇ⸤+“©Þœ«üŽ?&7[ûOFE)>JìÜ6¬øYkqÍ e¤LBƒª¬ &oÚ+D¦ÝÂt,…-ù¬¥©Ÿõ2,þ;§Å^òv¶"4Aˆ¡Š§cdxÚâ›í~;óeÊ·:>ß֨ޝª*oƒ¾m»šÒrõ3¡Üú¸ÊÑIZßçoe5Ts÷Ídq)á6×ãþ‘/„ñ‡ö6æp…•Nñrm©oæøŒpqш›ýXôC[ÏlšcБ‹¼ÿ_z ¹q­Ìܵ^ÐÚ}sN#Ðæ ¬¢µRêBV¾wñ+·Çÿ0­3‹­ \$Fn¥Œá¥ô_»à÷ƒ¹Ûendstream endobj 206 0 obj << /Filter /FlateDecode /Length 2802 >> stream xœµYKÜÆÎyï9%‡sØY¤ÙÍ~*±,Ë" ò°+CÔ g–Òp8âC«uÿžªên’»KÛHœH€D²»«ë]_Õ|\e)_eø7ü¿©Ï²Õþì㧯«ðߦ^=½8ûêUnV.uZèÕÅîÌŸà+.dj3±2™N¹p«‹úì’eëD •fYÎÞ>zôè<¼òœeðqÍá§%ãw¶Ñ.™:ËqŸÞÞNK—½±·éÏ|zy;Ñ=ŸùúÍÅ_Ï8O’fuñâìâÑ%Kþ†ÛÅEÚ€„@j¦t¹•‘îÅuÕ­“\À+Yq:µM±Y ‚e‚]Ç%ÁšãáÞrK*vƒ[œ3–5í‡u‚Âr©CZ9²¥Yµ›÷×exqší~¨Ë£§IÖÇ3œ‹ºìÂ- ×¶‰§;6q׬Y F²ODF+ÇÊöPœâYÍnªþ:Ê?÷‡¢¶rvÓz4‘òòà¹L 4ÃqëßóSÇ1h†šB,ë½#2àJz$4YCÑ“£,³ðB‘g¤÷öMâ·ä¨ n’(©÷NŽÀ-„DMΤà³CoAF¥•Ž<§+ûð=r"1Ü;€ZQûK2‰39Hv¨®›à±`,|ÄýàÛ¤-E_n HouÙ_‡™0l;2tº!äÐÆu¼øì";šu=p]´ sÁ“)°¬#g{Þ“Âå&™’)ªÁ»mé#BZtE¸^8Mѳk›9S}U—A³™—d}T4Ù—§ÍSB¤HrV‰‚ôš¸ ìÔ£-ã^.1BÐþ~ƒ³‹Ÿ8» åD‚‚‰½“ûê¸wúÄ;)ÀE¸„çgÄ$ÞTj˳òņâ'¨ÛùÒï½r‡€á~qƒ‚í“­%#výø6%%k~ú¤d}1Ú4-ÆwX…$¨Õ?»û9ÝÞNŽÛL_né39B¤øÆÍ>‰¥4‡¤Êg@!d’<Η¦ÑÃÔB‰À\*°’½II˜÷‚QÏ *!û=ûk‘Z=暟OI€ˆC$¹S¬lùAg¨È°R"”"i´c?ø8íè›ø¬ÁašnvøgX實ËEÄ÷«œR­·”ÂÈ-ã•l»Þ;Wž؃$™Œs×GTÏ}Hå>«yW'ømÆ þ8®çoÊ×¾üFua@Žû®‹.^“j"¤ï(¢·áõØm?øOk¥DÄt°× jˆŸ„Ë „xB ò³ 'sOÛ²&C¢A»ÅúvÍYA@æ„·zji?ÜÅôak1biI"´q@EÖŒŽ|š"©÷½Ã>²Á™H¶€¯ŽQŽœº€–ÉåaüÍÂÐ~A_ù88 ˜oæ0Ü€/€e8 ¡lÈ¢˜`4(Ý…#sSØ‹¨Ñ{ ^`µ©ùý<µFê±» Ø.gp…?ƒ¤Z§Ü…Zá+îC@ ¬˜×nîÀ]¿ö¬LÀ;©eîi@¶‚„H<4øÊj³’)ç2ÓžÔïðŒ[ðp/WÙìŒh§îðnÀ»­ »…XÑŒ#§”•Î8³¢†Ç†„W¤°’/qڱƺUrçÐÆÖm º‚Bœ-9ôÄè×wÃ!ê×À¹##Æ1 «ã¦\’ðÙ˜C¢“¡â•_~Ú̾íÀW¡ÌÛçgJ@*½ʰÑ)NÄ8۷Ŷ*9-ºäßL†z/È ^&“†37éQ¤R é`=sKs‹Kñ¿ ÇÿOCõHöeƒZ’`* z?Û•!øÉ€ðÓ™àöøAãXiކ8: ½?ƒ–Ç AÛ¡±ítZdá”"P¹Å óó#¢Ì@ž“kAi­ê#еåiš?6mßE¾çŒæìT´Ðñ÷e»¤E_•hणOøñ¦1„”­çЩB)B§8GÉ2‚œô«Ã©­È¹ò0 í­¡ö¶पè>„]RÇÁ9nƒŠéÛ[MµÔÿ^@\™ÿbn9±Fu~ÐC?‘¤h‘•ޱý´¼®PÒxn¼M0Õ¦<–8êTœG KâÞ05£­p.b\ᦺ@%T«å^È@ß&CÛynC~òlsú=Á_.YŒaa•cï©iWvð9,8MÆU½<¿±æGÞæs×Í¡èüo ÂÿÒôK÷Iq¹ûõ–z\®ã<)!â)îS Há«;£h™K\ùöâìïð÷ß•²Åendstream endobj 207 0 obj << /Filter /FlateDecode /Length 2950 >> stream xœ­YKÜÆ¾ï9ÇŒô8;4ûI¶°¶¡±´@hs †­:|H$gÿ>UÕÝ$g–’¥8öAÓÍêêꪯžûq“&|“âÿáß}s‘nn.>^pÚÝ„öÍæ‡«‹ï^I»á<±Z‹ÍÕû „o¸PIžŠM–š„ »¹j.Þ2™ÈíŽs¡ÙÕÖÊÄæÚ27ŒU»M“TI›êœÝlw2ÓIÊ9ÛwþƒÈs•±’~++LΪ±‚o@¨ðbÖn%OàpÆŽ ¢f+E’¦F²w@ "«™ë‡í¿¯~A¹õÆÂž0$vžÈ<1_\\}û–}ßõÃv¼4WF°±ÚowBëÄZÃnú¢¬\»pÏ26ÆOŠÝᦵYÎnx†b¥kºv€‡ÃJ¤èSxº0™e}1ºÒSÚÜ2PD8ÅÙk·§G¢°Ï¯.~½ðzí?cˆ“;HJÏÕÆäY’)Žv`"Û«ß"ׯe(óÄd|Éð-;¸úÃ0¿xìæw[‘ÃL±Û­€g*£YW•AgðèvÒÙ±q}µ/ê¨Á>ôÝ»·ƒ´\TÆNp™äÆnvv B "Ô®’íN °9g?Á*Ò¤ø»íð·¶^œºŽ ÏÏáN2¶/Úx޳® ݱ.ãQ…"±…ESû] êjÿJ# s'äñ‘aZÌ(£×y˜ù#öf< úçqçÇm.q;C!Q+“.<€«v‹vï.ŸJéi pbÇîèêºj]ÑW#؆R„ôù‘EÈΩRV”eï†ÁE"‹~ÃëXYŒE`ê%ýIËšÎóÉsð…:~ÏÀd·¤–<eÕIôËl:ÉE"¥ä‘YSÜ¿¨þƒØ=ç.ÒyL¨Ìcì‹/¼]xrU:j†þŠà„ŸˆSBp,ãúH#ÙûcKžx¹&çY¢‹^>XeõòؼÃ(óXJ#’L‹ð˜· l#rïøä0(HŠ*Áõi͆ãÞÛÀq˜Åޏ@(ˆ³Öæì_Û\à7N‘‡Þ$}äÙ‘Ü;ž§‰²»ïîùÂpýÊÞÌ?ãȸd› ŠÔw`«k8)Šœ÷âr6å½\WÎÃ:ã70Š}óí73›=h`âÊçßp„£NçÏ?2Ïî!£Y¦™^IÙöšµîæõëçßO‚€!Æ¢ŸíQÏÒI(2æýç¤g ÏR"túI"ÈÌpñœáʧ³za"ü­›w†cÐI³{??`øx,úho×÷](ù?uÙ]5f%‡²kœ¤pa,ÇÂÐc3Ž}ÑÏ­ÙäjŠþáš5á½g%Tp*O§Vc·òA7ƒ‹Ö>ÁŠÏÿ£¸¯š¨ t`R©¡£-ú6AnºÞÍ«†Å+ë¨pÆ34ß9„Eèìâ†øáIJ¦¼¢©G¤Çš‚ÜǸæîPí§|ÜÀ‚·[œd0JèðÂ'$^ah Ò fo7zx{½xm1MC”;mµwuXcåMµêJvôAÀZ"ê )´ü!¢UꆕÕ9è_Y®Ï±ÖÆX~Sp¼tmÈæmî§lþV£,æ0oÇJ¯¨ã G-2ãèIFù,ðcÕX‹+î'ewHßjEð b ¦@5„È—ž¹Ò}RÁ.ã @iQ‹”qlŒØ!3•né{²uw¡É&¯äkÍVh·=A"5äl­M ]<ÑéLÓèO>S>?–éŒRš©ã@ÌEÞN‹ë“b tðýv^uä°H‰3ǪÝC@¾šœÙ/䨓q•ÑIf²ã{Þ<ÝÀ}PæxAÙÀÀ€WVxppä­!Bó|5U¤Bo(‚ïÓÛmb =þv£”>»]|Ýí8ÝŸb†g¯éJýǘ˜?qþo|¹·øªÎ ™SkýÕ…BîÉ“Aë2N8{ïlf1êÄ,S´ÓðÿÁ§„jFªÀÁÆÇ:ü•CèiV™¦:t•/üqìýx\sJ¼?uSH_”s·LúË8¥•ø÷Ì>2ôo“Éß±3ôÍÞÎàd^ŸY+›q¾V2ÑQ„p¨ ¶±P¤ØÏéÏNÑ…ÖðÞ}qÜjåfM øÂÊ .Osi—·cÖ +nË/Ýòo|4 öÁS…ñºŽ¤`•ª®qR¶É´¤"ÜgßH;o £ãI1L¬˜A@Ë-ÿ3h*ª@íXì} .äP¨lGÜÀœj4Žå´°IN-†o\OSõ_/þ brendstream endobj 208 0 obj << /Filter /FlateDecode /Length 2402 >> stream xœXݓ۶¿¡y膋%¾ÀÓëL2ãL’qÝÖQ§VxNÇX$eŠ:ßµÓÿ½» €¤zÒ©ïÁ"± ìÇo»à§EžñEŽñÿm}“/ö7Ÿn8½]Äÿ¶õâÛõÍßK³p™+D±Xßß ¾àBe6 “n±®o>°'¾\i¡læãËŸ×?Þpž9­ÌbýöfýŒ+Ń­Ééš ·Ìá^qæH lÑ[lÎmÚûÛs¿\I)@E³þÁツþ÷§SU6qÕjÖùÇ¥0°vÌ—‡SÍs zeŸ%«šíἫš}<]OOç&ÏLñXïnØÓrý HØ|QdÎH‹E¦ £íbÅ3å”(ø›y¦ó¼àvÆ'“ëòbq¡R5ÑBn§ž¬n—+p$Ïa;ÀERÕà“e§¾:F]{wðuÙWÛ×Ñ/%'¬¸Õç̆—¢ˆ6ü³£i‡mÛìªwçúÎwæpx«Œdå «7ËÍ2åË\ç«ö´r-B;áÜ(§ù 7Aœ¡sÇ.ïZ‚€…`}†à˜â&ctÆ­ P@+]®ØÝ²˜óÞCð‚LÎCœQ0Ú´}|A¯awv>@Ûáj|ÙUýRXöÏRšÝ`e79îËí°U09@Qð¬ê7X@‘a»ÿ€6n ¶Õ%L‚¤¤ÃŠ©(°¡Ã=îÜ·Q6w¬Üýr>õãs{î‚ >Ä:tÖ†2L·]ÚLAÜú «®mæ0|껲÷û祊* ÌY‚°[QA97™µø3hÉLAйÐì»êÉï"œ4;psÀÕàZWÖ¾÷Ý)®m€·¸üRX–´‘PØ´ ¬U†Lª¦’Ãð»=NÀ±ê_£k×-ÃfÒ‡6CžÞV—/¯Q´¥”ÒËóñ‘+0p¢Ø£ñ¹ÅŠë:ïé 2•Ò÷ŒÎµt\ 51×&b57Dæí}ø V`³®¤×Púîá°t$kN?JsýÏ;Uõñà´'$¡7>€¤§c‹ñÇdnØÖw=Æ•Ê"ù½à€î‡œ½=¥ ŒK­1^š•‡39¦ âú¯ž¦ÞûšnÑ«"š®4q[ ÕÚ¥ýÔ$zc´¯pÝHö¾ñ]‰C…8`±†QK.»¸”è?¸‚ª:—‡*nÄù ³›]2ˆ³I:Á쨄)Vî"Ÿ3Å‚é à¤DK ÏSì}9Þ§v œÑÖþóÐÔ‰ Ú°Ž!9}š˜ cE(~•˜¿n!ãóò€œ¥ø€³.GQ')¾È-¤ëÈ-ŠêRÑù²O‚µrôpŸtT Û$uÍ9Iè8)xz‡äuŠzV…à•1t`}^]zœzý€-NsNÚâEÏ»0{èð-!ƒ•ÀÑzkÙCç,j»(öVMøMÙJªˆð¡bºçxޱìF±ºl™€T­°CÝ% Úóþ!½ˆS:ºìöçÚ7<ûYÆçvOÄú+Â-.Ôœ†Tÿ@mA,pÀP… žhcÔ¨‡ùôV2šNé7–ÆIûH3ʶoÑoi ¾Š’Êf€¸ØU[µ”ôØ:)Ya¡cO³sšt™€±! Û ¯Lt4_\²6Ï`'‘‚LÚ}µ%ò04d}ᤕpEf„¾ùÝ7ozóŠæ: Êúý?Âc˜ÌÇå0#áò8t^Œé .¶˜Úè0Ô›¥ÃbeÛ*£Ø϶÷q Ò5o7„Oãp”ö¾¸£9˜ľ/’mHXêKõâ æå2“¹J;í«IÅÌvÓ•„96ö‚™ W—]?“!á2+dÚ¦ÕÁ#°îdaè²Ô_±[‹$…,]XbÇGªeUè€=D¹EõÏ¥EðqOÈ:Þ«&œ^ª>ì5^ÉètǾê¨& õ”{ËPéa?Z6© PÖ[ºÒ=bÛÖ¹‚UûÔ– õ>y&cË¡3 «8‹† \R; gH¯ǵHµš&˜ì»Á¬4N“lWA÷©`|Ç'*ðKÞ¥(AËÛC“‚¨稅~/ò¢i„”+`'¢5e)ɨ`°Ï' $j‡nƒ‡ÃÀ3@¼!¸kÛæï0LŽ’QØ\ þVä&Aåi †M ÈÙË×™²ÎFŽx wy„Ê•x„{ðõ=òiüù§U*õ‚u4’ÁÅ™â¾a<Ïcí } ™ “ ¤M°½ÞŸ›-ægƒ™ìKØ Ö£eÿöúœÌbÕçÑ€©1¤ýÿ<§sªöu9Öù¯ÕļÚœÚßžO°Ú—Ím}žÞiwöçìÕ…Ë·ë&!ùz\ÿÏänáRÞ'!«çC^!§ù|C;˜#ÁÜÑÕç[>}_íé®'uyË7ËWs×0±€šC÷©¡Þ¾ ç^\&;jÁ~?Ú²ñ(›úx|À¹ 椦O¯`h¨¡eк‹ÆfM¦оœ­ÆO‘/c~Ièr›iïŸÝf%áÒ㌺â˜I"uÙ=Ó×’/~+>V¬fþQØ àyn‘fòð—ò©ªÏuÊŸem¨Z€Y»quY¥&þsß6«÷åñᄼÂn.°÷ÀiEX‹Ñá ˜‘–NsŸzÞûþÜ5c)oÛ]H6³¯G«`ŠÙâ·¼GŸªN…>VÎ#Ü(8®PtLêŸa^ª&‡tþ=úQµo`e³…høQînõã‰Tɘ¸«¿m÷«ÔvŒg€ì.zFÑä€ö¨ ‚.´(6i!(ÇÞ„,øÓk@iغ¾)&‰Ñ…Ÿz¼ân¾ëpÆIôÔ¹ÀùqåoÝ&@S…:CB£‡«Ù£‘1¥A_á\Jß®À3¸:ô2|˜[Á&ñk…¦û mæØ*²*èÑF˜È|­àg<wÈñ4xŽx÷ÍÅï™äüÿutÝþŠ"³ }Pð0„qƒTðf}ówøû/I#š_endstream endobj 209 0 obj << /Filter /FlateDecode /Length 271 >> stream xœ]‘1nÃ0 EwŸB7°œº’\’%C‹¢íl‰ í_I±HÑH 4B]Gе¨Ðžh/Škœ^åä*7D÷¤@÷¢)ÐI=öuÒס§Ó¾Nû2)PM¤@ñ“ÿBе¨P$òšÊK*D^SyIå=)P/Š€^C¢Ê¼žƒ‘ÑÉž37á^+çU7¥›Ì™ÿ–Y–"§ h~­‹øendstream endobj 210 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2198 >> stream xœV{PSW¾!$çj©¶²Q¬Î½ÖíÎÖGÝÎXu­ÖǶ ¢ ñ±/»¾=S5”!3z¸äjý×x=eG/^—/æ6z˜zf—Ým1h+õ&BÞYæ“B|ËrÅ¢CÒsZ2Z×g‹NÔȹ¸lÍ.J]Y<~CMØ}ŽâFÚj&¹UÀê0×7zCn¢ÞSïqzùD JRãÈ–±(M)x4,DDlJ²]ÍÍÿ9¡ùŠS¨£µP‡¯éOûÓDó½+Ä~IËh¡Ï­[µ=Swm˜@ÝhË-@i€ï&Uli¯[9ëI?ÍIžxÖQÚ ­Z’{ÀBæ`¸”•Öž€£ðÒɦfI šî±ZP¤ôž"“‡ÐcÀ%ÕŠ’'¨ èk®Í'¹­`}áòå”ÜÓ¡!jZé0ìÀùß¾|­»Ž=¥ Æ6H¨# ¡ô”èÒè’ÞÅÎ:‡¶ ™–q__ÇxÏxëMÜ!viݺzm×ÊÈÊžUNƒCw„nÁq×{—h>ÓÚ=èÀYޤ@'Ý©ePeP[ÔòÕŠ·¯©ë0w;¨[·ËoÙ¼V/lÀ]ÀÙÞÍ4CþÈ Êú¡O×Xì0»ŒÐ„Û]g©³ÖQkJ×PkäãÊϨ›†HõÏÀ\í?žÎˆ)ûrÖÙË(wx'Z<'ùá7è$pî\ô<ÄÇû7½G‚e(÷¬®U™+`nî-ŸQ£p^Ž4„3bæ@÷Aˆ{Äɨ÷¹Žã·U9 Ø²¿€H~X`¿À·ò»éˆ6\z>3’wÁì2E¦Eo᫃—4í¡`S{HTZ-4m%P‡I~à–i›-¸Y¼/·ÿƒG þ•ä?ø’Ÿ²Ø Vp$žyh†-tÀÔl8¢:¢ •…JCeÑý).‹×Í´Ùh²Ydy)¿Û.ݲâZăÓà¶5À†¹ðäÍK]C<üŽÃ]EEmTŠÛè1A¬Ñkhþçõl-PC£õ¨ô­úࡳ‡ÎÈÎZë ènÛåê9‘28rúã»ñ,¢@Àâ1˜—š[¾OÁ*#U]U‘‹ÛÐÈ Š?ÐàgžääŠc_˜št?·MU“JÁÀ5Ôô{!º=!£”¤9 ufO8Îåf®á'\ð)'BÓ.ŒF†X’áÅš[(TûŽ““à®O1’ø¢½‚’>¾”±[O롞àV€]³26î¿QGØ0`ĵ§ÝúÜ®£š„÷Q­EÅ .Gãý]ßp"nú:þ%Xx“·‡$3éÁCm7'ã„.¼Þæ´ÖXËôfB‘»O–å°È§hË8o™€c8”8îžøÇŸáؽÖîÐÓ5=þÄí&ãnwIy·‹ÓlѰm9Õ¤JÛû?ÉíTâJ¨pXÃ82‚{ÔÕ_¦eØ¡"h„âJŸx'^ìéʯç$ ư m1ä‘Ü1pHoŽ[êf>g••¼eÆ^{‘\¢°ê«¡¯*:ÃÍm!"ùx‹¦£‚H³Ùhû¼ÿ™4Ô¬üë¢pàÿBOe5 ¬ÂÙG~ `ó‘úÆÆ`S lò)ÕþêŽÃ²v™ÛРƒz\£1ëø+X݇/](8_|ÁVoäÇ çÑéhò žÉèÓY¨¶äð!ÒØ*P¡pFÉɤø,Ì| ûþ‘~xç29-ýA²ÀXÏÃûWš·§œVØsʶTÂÊg5‚Q:ªéV ìõçÁt¸-O—ÓâšyG×ñW”ô÷k²âWbºÐ÷vÒ4 û ûœ7endstream endobj 211 0 obj << /Filter /FlateDecode /Length 196 >> stream xœ]1Â0 E÷œ"7hZP¡R•– \ Mœ*C“(mnã¶ ÿKϱeÿ—îÚy7óâ‘‚~ÁÌ­ó&Á–¤÷08ÏÊŠ§çÈõ¨"+.7ߟÀ®|W#Ϫ:P©\‡t00E¥!)?k…­µ’7Oõ:ÐÛ­ó¨%ItÖÖ¥$ ŽØHb“±—$Ä>£‘$Dƒx:K’èyõ¾$_‘óìçs½¤~¦Ð*gq~ÿCÌSž`ˆd\endstream endobj 212 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1263 >> stream xœ]“[leÇg[˜ù¸´Ë†­Å™"_ôAŒÂƒÁñRh)—Z( ­½ÓBéÅî­—½ÏìÌœ™Ý™½w·Ûînï–^í" "h‰&\TB¸ˆ!ø"úff›%ÁAñÅœœ‡ïËÉ÷?çûÿŽ[–‡i4šß«(ymÛÖíOÅÙ¢¼ì†|p.M-íY«óaõ2ÿ†åÛ ”UÏÿµöç°y P$ä‹ù‘¦SÖ¥I‘9 *f;Èw4N!JûS‰/Ï ª© û“Bg,sûÉûJƒÎÌõô¨þô‡úSC‰$©MÆÑ^R{‘ãx®è?T–*LÊØ—Öko(%J¿î ú›N¨@ÚF—±û…šÔÑÅ›ãÊK!™˜^•/!?fF†¦ãS0MÁâáñ£a»W @q÷ó³Ê†‡tžÊw£ž˜+:I}rr¨msmnsýV28˜ ÎÞU®ë¯ ©FˆøXÑ#¨œ—”;” â"òãƒS` X‚3ÓfÆœ{ññY=Ïp 0¨ÀšOÈ䜲ÑõEÅháš'ùئgðg÷\ÈÏÚ”?u<î¹Ú8yÂÁ¦Ž#ô‚cáØ –œžuóõ)b$N€HP$à ‰»gÀïceZ/;;8ÀȨñnnFÏ:Y8GØB®)âc¢/-%/)íúÄ­ï©Ë8m=Vg­w4PGr‡ÿ­.äkØU«'$qBNe”÷õÿœWV=$ãÓl° Ú‘‹°—õ•÷ÙZ{A3b‰úɶÓ7f»#}„ÌL—Cý÷]¤™HÒþ$5ümòrlQJI#F×k§*Ë÷WWu‘kÈ•ùá·W¯À°¿`#düendstream endobj 213 0 obj << /Filter /FlateDecode /Length 257 >> stream xœ]‘1nÃ0 EwB7°lG2Z’%C‹¢í‰ i ´ñçt¥æ»ë|j·¡°Dº•)Pò…ÔhŒSòŠrü×jí6qN¯£]ð"cPÕØc’e *°ó"`Çh½h÷^Üw;/2˜¼ˆ_-:Vº–»®Nœ;»Þ‹€=#–8Yäx‘‹^ŒŒpuâìØy€Í V[ X2È"TŽà}WNƒs}ǨýVÊ«„/ár¦s¦¿÷)Ká) ©'Öˆ€žendstream endobj 214 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1952 >> stream xœ…•{PSWÇo¸÷h­é­ZôFmµu»ÖwkÑ+°v­Š|´+U*ƒšIHx!ïäþ‚Q,„—P_Uj«ÎŽVÔµà.…vÄ­»N­ÛZ«ÛÎIæÐqoÖíNgÿÙ9çŸs~gæ|¿¿óûüŽˆŠS"‘hT抌5óæÏ$‰#“%@ÖGG7ÆC‚â|“QÆxüâÓý~%‰ž‘ªViå…Sרså…ª©jåûªÿÙ¤(j¬:­P»-G¾v½rþ¢×;—¢ÖQ3¨Lj•J¥Q+¨¹Ô"*’RÏRÏQc¨§©q‚"Š¡¢9¢ ègq¹øžd“äNÜê¸oâ•ô,ú"ó S÷X´b-•øXtOG™#syýøð nš =K#ìTãìW€H¡ÿJ$x"N¸÷ÅI‡pÜb<†L⤧í6ÞžDÆ‘j–¯Øèó}=tI‡jꞆ¤n8ZÚ¢ìÚæÉ„Mɯ’gîÊ+‘ÃbÈþÜ<àôÃ>Þ‹¿ P®6³.ú|» Où2ÂÞ ­?}ƒ•Ru2‡:ìr o«l¤žÉUZ•.¨äT¸…‘î ùPÒ\\¹0x½û{x€ð$§YÄÂÀ^Þ~$=Ô =Ümºú LùäMßBH†J0ò•‚L‘¡›r…#sô¢^‘´ERXUÓÞîÈ’‘N&¯Ø–!Úf*¡‚¯´Sí=*ÓÚÅÃÍ5nŸ«¼àá½€xÆ`5  ieÁÖÁàQE(c®|k1g7ÙŒ¼‘Ç’‰OîÞeRPÆkôã]+jÄSB¤Ÿã™‘ö­Ý„Áéé—_ å|9”£yg6Þj¸ÝË*bÀà2€e›“Ó3*ûÎr¸Oc¤=a2 ¯a„¤Ë¾ LÁÈìöÅ áú!I›ð_<üÀ„Üûë›ÏÎ>¶óL£¿{„Çb´S½»Z“­•ñx ×ð¬}³-[•·{G®.¶¼YÓeÙoÞu'3PË×B-ú´ÿÌ%,n&‰‹-`æ-œàîø31wsE]Cüs±pº€´%ÅÚÂ@icS°îàð¼ÎUdÌ"Oan E8îú¥#}a™»Ú³j‡á=v· ‡±xã;òL@Óø©[íXŠãŽ÷”ä7Ëþ{×¼ (ü6^‘ü„ËY|Œö{=þGë¿%£I«DDЂžùÛ.k2¶XZUц\Unx×êµUØ öX¸Â-Y;7€òCš¶œìWàSä °ƒ'ïßú{ó i‚3q4‡Ò7E—6 Å|á›HÞÝO£X²¤ŒL'ÏÙ†Èk8޼‹wá˜Å+ñïcT½I¤8l°²Úx[Ù5’ÀÖ3€G· gF]¨:p°}oGÒè_{<ù`æÞ•„UUd^îG¹³ˆ –ãYü”3À”Ýäj‹ŠÛDøO7%ç£KØ¢‰o¤=»o„;;º/zÎ:MCβ‘£Ì¶<«¢R@ËÄ)Uøax--ßî9!‹„¨æ«¡…¼‡¸Nú3¸a -ªÉw+ªòQ#]‚5ñ‰%s.ÆàH Š0u çĦ¤3ÆH€ ·Ùßû#ʰ–Qi<«˜¾®û÷/×®[Ç‘ F¡r(¹ÒÇéÿ/Øíê–a=|GY›öDÖ-B.VËKÖ"ž¶ô¥‡WÂH¯,ƒ4ñIƒs݈ô†E8rK2 ²ÁÞ³çúÝéym錔×W§ÊR“uZF§™¤Ým5›í°gØ\6°£E·â¥ñE´9«js6¨AåT"Éô¹srálÿ• ÂÿÍŸ 5kÁ’™j¿îˆ•;TßÔÒTP{e°‚ʼnžˆÐŒL â©zуƒ¸ì¦dc«y¯]Ô´¶èmQY!/³2/.xˆîþˆ¬ÄÔTg¤·Bz©6ǹæv[®AoËÙóžêj/ø›á½OX(*/³©ÍJ» ¶€|_ö›ÛVes!'m‡·!rƒ1»ÍþÚê@M î<8¨šþÊþ‰‡å°Â²p™¢Ô\ê(…•ðÖù²«¿hoÊ`df‹èÄ0¶ K"d]4,p¾ðöï Šâ-°ä>M5µú;BåEzƒ†—Ù™Ô«;n^»píwª¿î&ÜDx¬ç“ioçdär¹¦­…fô‘ÑxÌ¥Ë'/×rN<†£Ë&daliªYs½dÀñC²ÃÌg,~øC™iç`õ‘±ßQd™ŸÒ‹pjH‚SX—„áÈ,Z—“c·[…9·Í㬂Ðܵ/Ðt샠u¯¾£,),T7·ÖÕúü~î?VƒÑç‚.ýøãƒxïà̲è+,É(á‰^FdJùNãöÓ8£Á p |OVá­$EÀØf±KÌŠ}‰B“‚^¸êí­:îý°ép¨³«ñ( <ÙDrfË–3°Ü¾´`SÉö<åû€¬4Ið×à…¦¯Q"7Z²oYÂ(ŠúùEóCendstream endobj 215 0 obj << /Filter /FlateDecode /Length 279 >> stream xœ]‘½nÃ0 „w?…ÞÀ²ˆ–dÉТhûŽL"Ž3ôí{¼$:œ€O>ñh²<œŽ§> stream xœ]•{TwÇ'2C¥¨ŒYXlgXÑ®U|ÔÒ­[mµ¼Z_Ey"T!‘B€€¼„ä^IxÈ£<! „‡‘Ú*b±vë±]Öm×j[w‹­Ûv·ÊÒÞñüÜãNÚnÿØ3çÌ93sÏÜ{¿ßϽ?áîFÏèˆøg‚ƒ7»¹•nÜB„8Dy /!òr7<ñãÛ—ƒx)Ë7€Ù*ËÊQäKsöÉR¥¹Ù!²ÌÔ€pU¾4;Ušú ‚ _Αåæå+v$KR¥ÇŽoyö‚XEDÄ~"šˆ!b‰ƒÄ:"Ž!ˆp"‚x•ØMÐÄ Â‡XÁ—ID11/¨Þê±QsÎ ‘ÚVl+³†ÁÞ°—¿W·Ö•H†4…ªC¡8z+ÞŽc*YšlTŠJõˆ‚8*¦€ÂSÞÜR€OÊ-µûŒÃ¶uß®[„ÄE_z~œ‹KºIúú€¹6“Å·Hz>\Z§ˆ¾GD·&¾þv¶~w4ƒ[~|m$rh*[ÎdŒ1…¢ð$eEÏë€ôÀBßÜã#®æ.ÚÁâô@ð„w`ùÓ ö¥Ç„Š%‹VI¸ äDóůNö,húaõ‡kšû¬ç‹±ù£kIÓ‘–ø¾«§ðóÿñÃEØGö#³®¯Œ¢§å=ûcüÑ¡ÂCI9Ç•i:)¢ð¸- ¼ÀG èÌ|wFÑ}8‘*H:*OW¤ ¢xÖ|FÔ88Ö&°ƒš‚‰Kc¯ÒU»Þ„^íu&y*ýÝ’Y^ÁÇÁûÏͦM° ïììåó,Å]âi×_>5Þ×7Ü2„®£s‡;ê”z%RQy$JÐFeIó²ÒJÒ¥¶çÛåÃhBsŠO.8ï’Æ~™vŸ!x~ãÝ÷îЗžúÙ…?þâÂ/w ï‚%V‹æëá)X”–ªKQ!EÏ(° ÷Xߎ³ÙYÉèÀ5¼Í‚ñ÷ˆŽ.tÐgì*ŽK¯úÒw†ÅC²³gõ姧زföÈ™¸ny³¬!§QbmFÓÔÅ÷[Ï^žÎ32•HÝÎD­'Yú^“®©Ñž4i+ ‹¨‚)EÊvU}Q}‘¾Q!¿Uar x©þ\ÊžQ_>öS/ÿ¾6…ÙÙœéÈù¼¶Y*UZ£)a±7‰ju5¨†ª¬¯jnmk3C“±±ÉàfÈárÅËžé„h¥Ïw‹ÒqX6æKÛ,¿(ÁÞL&Ir÷‰PV]V§ÜzäRÅ›ˆº1võë”’¨D_‚Š)IÅ®ØCÓS Ø ¤ßtâ@ˆ&b¿ÆÎ½¨áâ^ø#õýäȬ­SP¸šäÿšÙ‘cŸÓ¼…ÞEW,Ý—õÏùÁŽ)¹(¿ ÎÎrcŠÜ¯öô’½µÙ,–“Õ{S^ ATj2ú¸]ßÚX˜åcø^L߸ælÁ)€X.´¹˜·‡ ØMÿtb7nÜœ®šÂ2ˆÎnƒSÐËK¦ùbÚcÚrά›X;µÞXTWhAý¨¡£©óì]ç×ÿ˜õkÔ;åÄÏIÈv4¨k¬ jIT­©ÒjÔEU'ÔEQXìWžWš[™KiE˜†1KNÑÏÌÛ!Õ< AúÌ~ËÎèêêQeªl,WÕÈKÔŒ<%^qÅ¡xsî9Õ¸nMPw4ÙxÎ2Úcqt ó  oýNö'$p¢žÿâ†ÊlY£>—îÿî4dÞ7Þð¥¿)Äù¹e(I;²e‰U©i(™Âl/ºùwê⇭gñz&FTŽÊteˆz=½o’m õm-M-Î"GÅ$¢n/8¿?Ul•ÛYs³¹e@Ï“ªFjJÑYÒÓÓÑÙm—™³ÒÓ‹•9ŒV[«ÕòBU«ê)úûòöŒ4CÚʧ· ’v%9¬‹Q½®Õ£Á7,ƒÆÖú6ÔŠ¨»¢™ïPÄÎUª¨];®W°o‘LWn|Й¨Ƀűa¹rSW1s¢K×[ÛçÒü™üx>TúLÂ6ì~OÝó¥oOºöi@F½Qô•ތşGH Ä¹—Ûˆ…$}{´búÃŒ*¹ü¸º%c‚_‘Æä Fr†“™Ÿ4Õƒ‡œ‚X1à-’» ¶œ:×ǯªÛo‡¼Ä–‘Õv¾¸oOLÔèøHµZ[ƒÊ©n3i4©¨:E0åÕæ¡<žÂí¢é©é7g&æf)ðÃ+.nÂdl@²-oxÜl—™Ó~Î\>Æ…ÁáãŸ)ð[ Kax¯†_ûÒ§á7Ü_ÅVëÔIÞ‹c™‘¬šDûò·‡•äi™P.UMÖ•· ýØ|·è©èýÊôfÖIóM½iM\j«%Ñ“Ž3ß0*°k¤Ô¢yvv‡€¬Ox`~už¼÷ì{›šYúê‡Ö[ßúÿ/ ~ S×§dŽö[FyÿSÊÉ=ä×q!„ãîðâŸCo5LµSñ“n¹¯äþB¢½5Q'kŒÅ*e~E›Y½Ÿ?ǙDŽ-;¼< â¿l£È;endstream endobj 217 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 533 >> stream xœ%OÏ‹aþ¾Õ- ± (•Î%ö"®±…Õe±Cz±µƒè° 댌Sƒ 9:æª_£3ã¯4EA(ëѵkÿ@]ƒŽKçåûÜÙ–Æ]^Þ÷yŸÃû<ïy @ÏCá¨ß·Zo’k\_#7Lè$²< ¥ud5!«Y#ß®ââÌ]Â{—ÂbYÙlžKï¦x*æ»ãKÃOù-uËç P34—NÄ*çSt&Îd ³‰4ÍçOOî¥x>{wsSo<“ó²Üîý•ЇÒ|ŠzFçh° O=ghêìSïÙLö%OsTˆMÒÃÅ“+NceX€€ƒM°nP`ðü€R·-'Ý M!Þ ‚‰8ñg»Ã;¥AC–³\®ˆU¹ÑÝ8¦¿ÐÅFUb¯ÒëÈò`àÂ;F%ô„ÜÔj*r:ݾŒÚuÅÝ”õÛØýŽ·ñ¶CQGïõ­Ün#ÍÒ—º%i_]†;.|…¿ 6-7pÁ>ž é4 îã/ç y$4vÛð'=„M8‚[öEnÎ0¹ÃÌs‹Å|¾0D~DfGá)ü¹œþýµëˆ³ú¾†êÈÙD5$Jž ãuÙ uK­õFSÇÃNߥ*£~¿ƒ£ÇZQA]4B-U’çGÚµ‘léI½’T­%—ÍuѼ5µ^à?‰-êÐendstream endobj 218 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1ƒ0 Üó ÿ µC%ÄB†VUÛÇAp¢†þ¾$@‡w’}wòYvýµg—@>¢Ç%°ŽM¤Ù/ ‹ªã0íSaœt²»éðþ‚Õ@v›ïz"ù¬O—²ª¶zCsÐHQóH¢Qªm¬m±ù“öÀ`wçùÔ(µröJŽæÇMÀ%FâTš–&¹€cú=|È)X!¾2ÂS8endstream endobj 219 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 269 >> stream xœcd`ab`dddsö ¶±T~H3þaú!ËÜÝýÓù§3k7s7ˤïó…¾§ ~Oæÿž ÀÀÌÈXUßçœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© `gêIçü܂ҒÔ"ßü”Ô¢<ÆS&FF“|?3»üxqƒñû™Ì? ü}É6½{z×ônŽï*ÿʺ~Wñ{€ü—þ}`ã“ãb1ŸÏÃÉÀÈx^¤endstream endobj 220 0 obj << /Filter /FlateDecode /Length 2602 >> stream xœ­YmÛÆîg¡ÿ _Á­Î'šûÆåºqÓ¦0ú×m|àÀ^H ²»± e°Ž QX[šgiþù…‚ tg5}]îå.‰XƒE 1žpGcö”^Ä’„,J-ÒéL áä—rÑÁ\‹oïâPŽCÒåÉ»dô™Æ—wQïE|¤½9üªË'QýÐW¸@ˆ’¤>÷)D6ÅÑPAZc®ðMÙ¦c1‰ nåŠÃ¾nÊî®òŸrFö¦óZñ­íN›ƒ™ZròÞ딤@8äÂyxW”‡ÞjÁÅ !  R=eŠ|°z2©¡ÞöÁ"ò¹‹ÐA—­yhÝ®&&Bï€Q¨Ö…«áÁU­Ì"\Ùâ3*Ô0ÒÖ~$—äÁmMAçY0Ì`¥‘°.XOú‹ DÕ•Á7krÚÔì›ÎÏ£*Ø¡µ†¸0ÊQsfi’qØÑšàuJ!÷h×ù<ɥ̳ ÿ[Wxy6¡\$‚S&=7£Ž›ÔÙ,?A£"ÂI±7v"AÏ,…âUÊè.rôµTÙº*eä®°!È >؈LÆqH´Ê·fë ¡d Uê f=C‚¤a,I³œ@ç»Ã7jÊV–  —ùèR #]Sàb.¬ÓìO°“R°«»šâT ¹›CÖCKäcSu«xÂRÊn‚Zür©y([3µ ä…Ä÷¡¬,_uñiç"šE³;µÀ8AlÉ)Œ0~Ö ±B!Eìò›ºº?a[Ì<°Wd"[/AµëŽXB<µp²Ã^é¦Ã®MÑ,[ ì§CÑ%ŒüÙ´mY ‰Òlãn–öåÃ)ÙÖØè` 2¶>P뤋ܹÐÅ£PÐ$‡•üž}$OzýO)ø?C—@Oÿ  œ‡TÅ{›Ž\@’"¤iÜr\e´µ}[~a€ÇmŒÿÃM¹¿ëÓx ë¥t®Éí) çÈÓë¡î#âëc}Â!«7ã}DS$±ÖR ªp¢°¥’Ë„B_×nÛ¤‰Àzv9íœÛ¦çˆseÊö¥‚aÑn´…3;sWµ3X‚à`LØæ0*¹¢º?˜Ö3`„HAÏRó×^}%2µ}ÿ`º ‡¤¤¬àìz ¼ zжðëL·/Èi!ÔmÒ[s*%¹wŸ5Ç”® R¤9®°¸…¨çKMŠÃ§A?"]¹ñJÁû¦Ø–Á»PÖ¹Îl2¬œF$Åw&*„)çým9í§7Îõ0nõÎ#Ì@B…ÏÂï;à%eû6àHƒ›$·Ö?)nÑ&‚ @0u|VÀ1¨jU‘Nå`¼Jrå1ú÷¸E¨Wn7øj5û¦et¾og¸‰ùÃ,ÿiÆ8—`ÑœåҗͫƮÿr˜]?y4²"ÜÊD¢J­#Wî.hl Ì”£{‹\d¹ NùµíPZwÂÐÛ†Ý.O¨y–䜃vÊ5NaP¤Bž_Ùg€5Ñe©¤—SŒ`ÂFž}ÒÆ‘fH? ë÷šY‘—ç?χ&©„f‘%üêGY‚J”Èó`ÙoàŒ%ØŠâPlÂŒR‚rþ¤9ãS*{›¿{ä9òLÔFÿb7Žö§Ó¹¥U܈e 4ƒ)v);e…3 Þ@ã„å"ɬþåhÎã è§7ðâÜ5£j×*‘xvpˆ°&¶à0a¸M¨n{3Ǿ òŸ!,H{ZcyÛ"ȱïÐÙ›¢³¼Á%€æ”AÜ|×§Î’דËÄ q´d‚z¶`A0¼)½Î2àù]ù!6ì $íå‰pçŽ0˜Í¦ð9Ôû!ò’Cù¾nw£µ -mÀR< ÷WßB¥´v7¦Å.ƒ'} ¿ö„ÀeæºkÄdhWÒ§M}8UG/† ÜÆ:Ði`v‹AÛàf¤/[¯ÜQýŠ4õ •{au¾ ž¸)³ü{DÚAÜ7 ¿ðÆû]¶5SçòPxÏê"¾ÌÀF(„ß>kËaü{Ìø¦˜ßøbŽÇøŸ}±íܽ³ËnÝ_ãÓÏ^ã#ÓˆK¸{£Ñàwà þñUöYu¹mö§ýq,¼ã|Óv'Ýb»õ¼òjHF`YçT̼©ƒMŠéñŽ™æýI°h ÉÝ)ËÆ,G+Jtuqˆqp¾rÌhÒ è8™ WË ÒðD•oyy€”Ô ³çDŸ'ÛÙÿçw4é%þsÒAúß ú±G?<ýóÄϺÚåE¦ž¹@à…Ð=a™Õ·³ÿ ˆ^üendstream endobj 221 0 obj << /Filter /FlateDecode /Length 1705 >> stream xœíWëÓFÿõð76ÇÙÝ÷q¨­„Š*Š —H ï%n“ø°TõïÌÚ;ix ÔJå$b{ç½3¿™y•ÐŒ%ÿúßÅfB“åäÕ„…¯Iÿ³Ø$?Í&ß?&q™Ó\'³«IÇÁÆef)O Õã.™m&ÏHzâß”e”fO‚ç³_&ŒeNI“ÌNfgÏȯù›r³ÛLSjœ³äaù‡_—«jÊHUÄÏšø¦-7y[VÛ Yù›¶Ú¦OòëUŸ;RkÈfJñWY‚²Ë&pã¹""ü€‚”­¯ÃQ3Ø¥E´ë‰owõ6Rk²¨ T~g°ªÙ-¾iÊ×¾§dŠ\í¶‹`gOÅÈë|½ ÁqZ‚#ûMٮʑ’¶ZƒMÛ…Îh²Z|cŽ4óé©H>¬–i ^UtF’ÔÉÌIÅ>jFSÉUí{Ÿ®ó:ßxH r¿‹¶oîLoMË„QIô)–Ì H€H1xyÙY'Í×uUƒeð„´CÌ1>z#ÈoõœÜ‹¾2ÒöŽ¥‡]§ñ;>ÐB8­ó·)Ew¤CwLf¸ñ)å‘J•¤HۻܧL1?;;ÛÇæ?RAé± Æ¨P‡*$PÉT!>¨?¥Ô¨àè…ý° ¬=¸2ÔtY.·åU6T¦0\h´„×òØoÅ ’ƒîó•ÝlÇÞ^ Gº‘ÙÁg6¼¼äÞÙÙxï]f>ؤ©¤q–)³/˜GÕ”4Þ’›iÊ Hd„+/õ²ª¡H7ñ\B·Sná”ò:ÐÊAÊ/±°‘˴ίËbývjyæ¤vЇã³#ù¶ˆâõ  aËï+*¨¢’”D²XWM$t 2¹±µðRïöÜŒS ŒƒûÄ¢ËNö¡‡ÚB€c†ì|–Vàhªþ.%/ŠH&ÁKñŠ…cœ(x ÞYI`Ð4®#M™#ð+A˜ëDX0ºlé‹ÀàŒ€LÏÑÏN–&e4Žc$ºëÛG )ÓIÊi¦µâNü€ò,À‹îX,Mt†â‘EejÉ$"ÓLÓžƒ#‡‚ aYîÏ&O³dÙL¨lr]óç —Z€àDZ F¹d3áÆ¨LÙý—õäò½õÐò¾·‚ÈL Hí0î]o=v@SÐÖw0ý;¤5ªZœÍ”–VFâ/ìh*] ±Î¸sœC`%ø®:–²h§@by}(Ÿ*ž Ímé—ÈÂ![¥9x¾ž!ÁMoéÁ`â2†}¨·tNBAb±nÁ©Ù»ã5– yÂ@ÿ^ò«É‚lG.ºG9Îû\´| -)m<†M‚I%I×2¥Õ;‹²ˆ/ÐnZ|¶¡ .ºÑÄH²Ê·KßAÇÄîö7¦‘ŽW½g™ x€¶ ¸»ð*qŒÖ «/v¬| Wê¨ÁáæQÕŽ„µ«¼=•ü0¥’òg¡KŠ1V¶¿¦{Ã!l@ºpκ5±cÔäÕØ¤]^ Ø.†%³a_<†oIòFšUµ[Q‹!0›G~0cd¡0Öµ_ô^Á*#4—.³Vä" lj°W=žü B?Ëendstream endobj 222 0 obj << /Filter /FlateDecode /Length 3018 >> stream xœYëoÛFÿîOí?P-Ь\‹å¾¸»A[ ‡+Ðz¯Ô@ ÄBKk‹ItI*®ïpÿûÍÌîr)…M‰?„"‡óøÍs‡¿-Ê‚/Jü‹ÿ¯÷åâþâ· Nwñ¿õ~ñ§ë‹/_H½p…«Dµ¸¾»o𪰥X˜²*¸p‹ëýÅKÖ·»ãд‡åJV¼pŽ5ã¥fu¸,yÅúæp¿óéQÅšÁwuzQe)Y{Ø=-­(œRšË•–%Ü7ìûv)Lá¬äì‘®œ±Ì¿¡Keá²»B&ä(6l›>²äðè÷å ˜h®*ÁêýCÐ@°m=!:ŒL{ß5ëzÕ´Š5}ôýò×ë¿\p¸£•Y\ÿxq}ù’=n=ê.Ñ<‰V ‹Ü*« ۉJà¯ØÁ×Ý 8îê.Ü.KǾ÷=hÃÑVЦ©—@m50=Ð]^ª‡RHûûÁ'¹[ׇÈ~øºoÂðPK¶:Ŷ¯ÙëøÌÀ©²úÑáHÏuçkÒÍF[×-¾ýf© .YÝ5õàA)ƒ¿%¨Æù[‚-g;m’š³>ÞwüÔ#Cç÷>?ëzØ!!B,IGø•~€Ým¼¯@¯Jà÷þ°9‚r}|¥`ï7‘;Hkö{¿ACŠà^´MñÅŠü\ÉÅõÂûçmƒ1#0ŠKÅŽ=9TT%±ïýC Qœ8 s1 è‡:PfšP p’ d°û®Þ,¯ÿužuBº46Õ‡DsÂÈ•ª£-ÆÑ<#nª@ô’ !ýà¸4 ·€Øé¶õCŸ ‰h:Ëöm?$«!•‡®nî·!¸œeÃ*¸T:`„殄( ëˆ%æ J¾k»¿¼îÀJ* éœÖõ²Oñ±[¼F/BŒvéÄ1¢×DÄAÓ¡¿šE\s(]‡¦¦YN¼Ál cìQ·„­sÂAREZȵö.ª }±oöMh‘&Q? ’‚ƒéÕ*>ä]›Ü[ Y™à_l‡LÙ¿©>ñ/ÔV¸#O Aa©„BçÅd·Ž`ß×ce}Š$UÈP²´Ž 7±&XC±>´Yw~uì}|•Cʶû‘ý4BïÒ+.VÒFÂûýq7`ä)jl‚ý¼´8ÀdX¿7~h TÇþ¬]êÏq +Fs†Õ¤A9’´¯©™œ™®B¨bµ¿1xŠo(Å»&†¢ µ¬Ž¼8iL}uè‰Á@/8òeòŸtïr_ÒS°»ãa‡'4ŠæH– O0„Âz Œ‘[75éªf¦b¥V\^› %¡™¦ö<–Q%§….…NÊ|CÉ}ð÷?ýôÝ·x#T‘¯Vὕ̻a·~¨o–‘ bë?s6~A?ÅÂõÙg‰c…" £Þøt‡3˜XYöØ Ûô Äb4XJߘmþ>[FQOùö*_þ’/?¿ü<šˆFŠ%%?û,S=úÌò±Ùí²zS£0¹ÔnÚãŽç* ÏÂ$pâ ]ád$ðà„Å Á‡•€¾(KsÖŸ&õ½ÏO!Z­»¶ïºvsÃ<¸t&e $ÐN 9´Ã¬µª€€Éâÿ´võ!ÖN ^²çó~ª;Ÿ= ƒ”ö§R ¨@úÀ\ñ‡pa£½AЮj’}Ópø4D~¨ZS,Ŭ—C@3ñI !þ¿¼'[f¥Çtι>u$IwÊIÁ.'Ný%:õLF£5ÞNþ7«†³ö[õa?ŸhûÑráš×7©Ž\åÑu7|½¾aåüÝ,§±–oQýq¿¯»§¶'.’†*ßÀQbhw_s¿â:ò8›*°ä6q\Íü[¦1qî!Ì8è¯DÇ}.`hè 0El2¾š}8C'€ÿæ‡ö°zQ?l{ìshOo‘µšÉŒÄx(ϵï…ŽÝ!‘à,² ñÎÄó¬J\¯1lÞø3¹G¥òû¦ÞDZ›P®uЭ'BwP„NrÉ«;P{Hî©Ø ÒLîGh¢ ¦v4c+PQ¥É$–]šÙ]ç£!x†Û{0½?Iö]€Ø÷Ï—Ï.à¬,>;Ô$ŠI“6E`ì»®…!kƒOØF$|²F²@~“‹ý [HLÕã%¿ú5ÓœÖù/Ê26°ÌF¿*ENj^¨©x¦úJ@`WóËË˹`|)Τ•å¹4. GJ}*M•z¿4!ß+o­Jw"B Aöý"0÷À{g¥æ§æþÐÜ9É0°ûçs”Ùo¯€é³\lq34?!{–-ÂpË¿^åGÜØ«âä6Ï?^e¾Ïò%áûðºs¾(8)g×4’»0Ô~àyÈɰìØGã‰Tlø†Hc(s™$Šü(ö¡ø¼Ä|Ífù¸Ÿ€À‹ = ˜o/DdUT¥Ö©¯_E™¹ã5œŒêÛHÂÚqï‘ töà’ö`5½–æybf¦ó|é¨o5ÃÈÿ­qžø)ZdÔD°n6}Ö¯!A–²,ë:.E>šBrâ`\%}Œa \¤«UѶŽJ…ôOA¸¥€ŽõŽ-$­±‚‚äšyÄÃjvßLNìa`àÄňD+Â¥³".¿èœÞQ]8ù±¡ÁpÐËœðùd f­Ÿ"7pÆ}]+Ø£-º.ƒmT|à0ÛlŽà5(A_¾ÐÓ8TÀXˆq¾$Iä¶›ÉM[TF*5®£pè3ÉKx6˜ÀǃM1—ZòcOÔ\z¨BODÍ-‘Çmmv¹™«ñÔ°;®bé± G^ºæ§¤c`Qxº¼k&oRt’Ý÷Nag&maËó6 2:b)ÂfŠÞ¤1#D}üƒJІZ±Åã®{bd³¡5–!wM¢2wMá8 Ó.óy‹4оžlì‡CänÁSôm!ü¨q§·w>¤š™*Á÷É:„ŸPho>¢¬l ¾@ªÃqqæLµñ!uJüRB°G!JÛãòþ%=¨÷í1n¶@}„ÍXSÈ…&l_Î6V¶$(ÃêgCK“’8×Y¤Íb°M?»üáò,òJN7PPq¡ÞΩu½Û¥/V¥íŒ1¸9¦dz[ðöܧ‚pã§‚P±ëC{êü­£YãÃ0ð¾µµ°w>‘AýÝ=Ž»¸“ÍøI’D¨­ß=äh¼;îÒ£ØX«aq÷I2,.bpr‡óÏwSRpjˆ†ŠmpØxÚ{ŠL`Ø¡%áôFrôÓò¥4/oiñ¾§Ø°6¤Ï ¸ÏëbýÖ 9}}p*iO'ü5 ÚxL3QR}ùá0~…i» Oü5• z‡ô »‡ÍÒåTºªÛã}bpö‘dâðàšèñ2{Âè2~O@:OÜ’Ãât~Lýø‰8=‰Á™ÈæC èrZWèã`¬&•+díKšÂjc©;Wؘ¿»¾ø'üýZx¦{endstream endobj 223 0 obj << /Filter /FlateDecode /Length 4060 >> stream xœÕZ[¹uv^y ’óÐØ<,{­®-ÞŠäÚ ›U‚µKäÁ  RwÍLïöe¶ª[—üú|ç¬K«f´Úu`Dz˜®"Ï!y.ß¹°~X”…\”ô?ý]ï¯ÊÅíÕW’ß.ÒŸõ~ñ»ë«/Ÿk»E¨Tµ¸¾¹Šr!•)|©®¬ ©Ââzõg±oNwÇåJù¢,¥›åJS”2ˆÓ]}¢']„ Äú¸¿?Ÿšnx³=åoÄqI¤3âí!³¨Äö_/NM{¨wyHŠúþ¾‘µµâÝrU¥ÕA*PíëÓö˜Ù”Ro–§»&oÁˆgM×mëÑÌ·wÛ]3l±mV#Æÿ}ýWRÁ·¸þöêú‹?‹Ýû8.M…n—+#-H8hŒžJ+h3<൸mëͶ9Ä­ã\]±\UAay+þké¨q¼&8ÚĹKÏeâ â ›úTÓS%êÃfx½;Þ®vÛïyD³Û²’’´èg…u´`’ l¸9ÖQl‘I7íq¿¤CÓI£Ü´&%‰n»S|¢ 6ïFB¨÷÷,Aƒø mÚž´#p°¢Ÿƒ³v'RY³I¯‚‡c»gmÓxÞl»S»}}NzM³î붆ý5m÷ïóËçFíKY•Uõ¯Ë•U$A-öÃÏ߬è·» ¯K:õ¢~÷íöû—$EñIž¡EwªÛÓÓõK±??•ã÷ÛÛ¥?¥¼Ø×OÕËå“eo-Væ-ü šöŠ$Ýfóô³ßýÛ7/>{¹ìÅ<ÚewÞïëö=KãpK7>ž/´²™÷jæïÉI?;Èf6ùûúÝvÞçÃyQDû#mi%­‘Bòæé0iÔº'FÛÿ¡),XU¹ Oª~o¡EfÖõÜž7§s{Èò¨€ ›† DùÕ@Ø×kxñÍy—f„ÖÇÛ¦½mëftÂOø-œ#급¼ÄJ*oôœLÔr%K[’ânÚ&íb°>&_'3†1~~¥e¡]¬zF`ež‘w/ŋӦˆÜš¶=¶ùZœ;xSïÎÍ0òŸíËh%QʧÞh“•¬&KTÑÒû3|¾ Ž@Î*K%Ó¨Q¡T^Ò)ÇÆxJx¶Ž6Š ó³f`îûš'35¬Ly²5Yaª.3_"+r#¾Ò"NÒTâ7ªYÉjÐñ_|1È¿Ò$2^ôx_loÛ›b°R²Ž4^ò{¦Ÿòú%;F¨Œ“iŸb% O¯†!Vi£Î^“×rxx5ðý|ø9Hð§{ëelž€À «tP#ÖØtr“¦Iì™[uFöè:mÓw˜u´ÐbG 7bK~Hê6ֈȂC$Û®ëÝ“ÌJ  õ°~¬iìχ%á­åKåÄæVY„,øï&ÓkÊnøÉ°1£çeT¡}IÍxKÛ’%kä”R@Û÷x[âÃÀÄ¿ñ~LüŽbû yª.=ES€Þ’‚©×J|ó¢H„Jüû!Í÷$ám—9>.%ƒKT3žòÍÔêÇä=3ÇèB{p’Ïý¶×ZÔ*½†*v5¤Ngs%+†rÈD.ªšMá¼oÚíz™²7õGt1õãì(2î/ÒŠ§76Y «,0~r$°ñþq-ŽD¯§¯c®TˆyFRvK8Š9IÊÇ8uló$Yñfw¤"楽8‚™¥ Æþ]ƒ)Ú¨6œ¤E&ªÏž(›3†B|2f‹Œb˜Ú!*BÐy_)uwÒ'y6²áCÚ(ùð€ºØÙû”KU^lÚã}tR]tÍ™÷þÓ³˜^aUí‚%ó)½™Ï&º›æìúx¢ØØVDs‚#/‚J„/º)£aX``¤˜Y- $á`Çÿ&ŸÇLS¼OÃàp?èðxŽ#[?EõÑ<0ýý·i²Ã!'aÖi v§gÏžñí$ËŽl,e˜ësCùjzܾ8Ø9Hi-ܲÑb×nIì°ƒ#[\לÉgèHòH¨2®¤…›ìóÊ?ŒUuÞ¶\G%.£ŽýÂ?¦ ÜSvXHÅ"ï… ‡$ãX‰ð¹vÚ÷v9ªª¸<`TåߨzLâø±d? ŸääÝqß¼e ÄA3Û_B¬)¸JðåÉo––tBž„lˆœLIŽû×™‘é¿ã¤^pÕÉYùyK­‚7øq¸¡Jc5‡™ÉŒl„¤¬Fís½ƒ10Â[ß/="‹˜’ [ËûŠPn%ÃÝM½>Å'ùú”‡¢ÛÚh‚ëºË4>Ö¬6bã'©’iü •˜4àXCÒ@R)5jz6ëWt0åT:ÍWHج´UžüRÐìH“3Œuá©džüw4WCfž3¤ (Éœ—œ'yÎç^Ðn4‰°™ÄûAHnà;7ÝÂûê¹a`–V³"¦Û1x%=¤°4´ ªÀBfE:™À­É9Ò$²¾PÈ\U^ ðÀ«%錬J$”¢ôÀíâd_.`ÁN{š ^Rù€#Nb"MI4Ø,ýŒD=Ø—Á,&$1gä}'\)I}Š`¦ÙË}K·É£¤Œöx¾½K‚ðQ*QêÀT¥/+¢)·¤.Ÿ´•¢‘Îί³jøµ‰ªáV€·'àùçSŽx”{RÄ‹G!Œ9¯G¹ïƒ,.»4±²øúúêOWåâ›ûfR†¹ÆãÃB—Xˆºfâëåõw¤C3š S™²Z@N(–¢xþ>j(£f¼ÍöµX¹±@E’ß2 ÎVÍÚ‰n†….*(?Q¨K3™,bò"šÑøj03ž­ é”rcÆ#žý¡O8“¡S]8¬ž/ŸƒÙmwEÉþâí•„jQ×Cøoäb¥TÀÞMÿfwõâA}ÚYu¢¸õ`©TåSôRî“3’Ü+ï>ãT­¶ ¶?ãÏbÊW°:NÖö`Œ/m$¹&¬ŽÉ`Ì”YU ÐÈ2D’à€·ðiKRMLYXãJö¸ÑÎddegìÒ•òVÅcˆ§Ñ;¦\«B²ù•ð“JŽ †ˆ@ Án@DpñFΗC¨SNÃà­é¬O/ÏêýØ"à8K›úÅ‚™À<¼$þñ”a@SçRç-|F\UðaÎX|¡J¶€Ušãto3(Y¼Gù¸§RÚçá'OU›TfäšZrfþÓ]SQ‡ Æ÷—sÍKúï ÒLØ,¼Ç—!aúßNÔ 19çtÎ9§ƒ¾¡éXn~ú¥ËèBK†nlÙUÒýHOdÇùKz"»öDZ2é26."êþxܪo‡ÇþŽíÛtÝÀ™¾¢‹Ê9Ó¡4)Ó›‰u\</«&™>‘й ?¾õ¨GW¾Çs·{Ïý¤J.Ku• n>iËí¢øÛq“"Þ Ó¼ÒQ§Æ6—“Y´%u¡çz»¼€K}¦²±-¥«É;ˆ>ÏÐb}lÛ&–=“¤þl~Kýœ&oUVÛ {º©Û¹æß CÔ!5²H ¾äF`,~㿬«g Ç#ªVÕÃÈsYèRÑú)…®ÉIÕ˜„;H,hñ‡ã©ß/•˜Ý1=…ܱŒ'SIlùécxêwó\XÜãunæxÑÖÝžîblÁ±gZEO\‡îOœÌ®£é£¦n­©Æä¾"["=iÍ)àwªƒ©êà•]wÞß§V˜œ|ØA×)}ÿ…Êñ×]ÓŽútùÒ„¯]T²*^þpØ4ãî SÛPq"åhÇÍëë¡ñH5ã™ûxzêþ俏“é: 5.:Ô1‡TS“&Uÿ±§LÔJ¬›öTç†:_‘ÁkÚ_J­ÊÑç ãž}žñdh«wçõ2ïäŽ_CÀi¸¤´}߂Т‹ÝtºlÚmóh'þY2DÌ]ØEk0dhíêyÆÛ:™2±µxl7@c£’;·&ÞѤ/vҠϰi"lÖ Òd"ã;»×iJjÃ(£øð€3mɆ™¨Ã•84ìFÆÏ^ŽydØÚ#ÍúËNþ@DŽ39Êû´ÍÐóšìÇvØhÌõ¨ƒ:Á”Jl3e¾ô)U§t{DBdpN? ÐôއẊ?¥8¤¦]9¥£Æ± ¾;wùb bËíôL9¹V:Üîšðƒ¸–¤U"iC–³ê¯Øc÷³ÇõéHÆ¢™îò‹x#)îêÞÜF”Ãx<€c´èÍ(º¸ç$“íö]š[éþk¡¸¤HðJfñ¶KT!F³._Y®Í@—¦OÇ6œj#ÍÒýúLBK…ÛñÀ™¿2Rñb€?¦Ê êøþi;ÜPÇ»qÅJLaX³'­»óþ½™¾4;æß£Þ"‰ô¶¾‡Ž"Â)è½}îÑ„—@Àx<ŒQr²ùOï¸\`·Ù4{@ò©å,MsÀ“énD»túá%‡0=Mà˜cEöx¿‹y]ç{ÞôšNi@ÿA—cŸ»ü Ë—­ûË;ý¼|üµY)+ÉW¢õëx ‰š¾/œÂ,»m Ï›}³_æ&ñ8`°3h3m¿OKØöº~½å[!Ò  1ú r?tyÄ™!L1$Ì$ý¨û`˜ O9ÿM¬,JæËL«­z¸€}ä–†ånr¾€­}N”&ÿúÑ*©–’¦‡Åì·Lt²r Ôâ—Þ Q§Ñƒé¨`Vd›p‹^œ²…U¥ã’96;’ Ø\rÞc|!•ã¦/Œ»péñ±zzÚßËm%ìÀ33Xaéc=}?©Ûâ¢ÞÉÂ"ÕäJÃOÖo-åÝÜÒžÚ’` ÁT±”*–¯¢ÇJ*7nþ)Íp“V!RKÚ }ç†Êu*/ PAI›å¥Ë õ°þ¸¼æû%òqb†ªÕ¹*Êëq½“ŠQ%@›(]}¾º$‘¨ZÇE=rx§ð粯À‘Ú®¿3™öx¢«‰y£Ñ ɪ I·¨eErPtŒôøÉÊ?êÁ‘ö!s5(/·«°œåÃü|¯.Idt·úAcz 7øUZû¡×ÎȯâŒI-æûYÕm™ôCWGVéÙë¦$w_Q£k$÷ʸûQ¢Ÿ5A¤Æ¹±ègLÐO@¥](ì¹ï<~5&a³&^+sêñÞŒ0úÞL•z3²’E¥ˆ—)¤“lÿÂ÷†ù§«ÿÖEøÉendstream endobj 224 0 obj << /Filter /FlateDecode /Length 329 >> stream xœ33Õ30S0P0b3cSs…C.=C aŒä\®B3° Paĺ@@`bdbRâäÉ¥ïé«PRTšÊ¥TÏ¥ïTÌ¥ïà¬`È¥ï 4;–KßMß-È °±Ñ(JMÉL.É/R04åÒwÎÏ)ÍÍ+ê´³ãòtQ¨˜“üç<Óÿ™2Ûþóð‹>yolx¥ä}IŸÓG.î½Þ¸pŸ Ómy–‹=<<Ÿø™þÈ+šüöcò9Þd×4ÁC kò.rr^à–Øp¡‡‡_D)àÖG»–²ýœËþ3Èö÷²6VÊ.p0æèîeŸáÆÖâpeƒxƒÒ—ì\R¯Òg0<®c÷Hxãºôf¹SÆ¿^[û.2Jp¸ð¯þ£ˆîy©×/ipdfðݸâ%—«§B šr˜endstream endobj 225 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1ƒ0 Üó ÿ „ªRÄB†VUÛÇAp¢†þ¾$@‡gé|wòYvýµg—@>¢Ç%°ŽM¤Ù/  Uƒq˜vV&N:ÙÝtxÁj »ñ»žH>ku.+µ…КƒFŠšGMUµµ­ 6ÒìîTu[P†KñJŽæÇMÀ%FâTš–&¹€cú=|È)X!¾:SYendstream endobj 226 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 355 >> stream xœcd`ab`dddwöõõt21U~H3þaú!ËÜÝüãýÏ Önæn–Iß?}ÏüžÃÿ=S€…‘±ºqÂTçü‚Ê¢ÌôŒ…Sƒd a¨`hii®£`d``©à˜›Z”™œ˜§à›X’‘š›Xää(ç'g¦–T‚µØd””Xéë———ë%æëå¥ÛLÑQ(Ï,ÉPJ-N-*KMQpËÏ+QðKÌMU€8TB9çç”–¤)øæ§¤å-)Id```b`ìb`bdd©ùÑÁ÷“£ùÔ÷™§¾Ï8Åxì;ï÷Cßy™¼ÿQ"úÝé7Ãnë˜ßLÕ¿åµåÜ53~«wsüÖ{ìþaÿûß¹ŸÊ]zµõ»f÷gŽ?ÂJ¢ó.Ì9µö袻èæX»¡È4´.¼Æ[¾6X´!¾Ô4Õ/+46=±›#ºxç™}³/Nß#Ï'ÇÅR™ÏÃÉÀ0V†>endstream endobj 227 0 obj << /Filter /FlateDecode /Length 168 >> stream xœ]1à EwNá@hÇ(Kºdhµ½1&b B†Þ¾@’¾¥ÿGϼnƒ³ ø=¾(±NGZý‘`¢Ù:ÖHÐÓáêÄEÆû» ïO È2»¨…øSÊk}jözMkPHQ¹™X+D×Ó1rúo%÷ÂdŽä%'‹„È3[ÙUe+ký –Ÿ Ó‰¸ÅH.Uð Vx¬£ßmÁ‡Ò‚,ös×V'endstream endobj 228 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 444 >> stream xœcd`ab`ddduö 21T~H3þaú!ËÜ]úãíO=Önæn–I?\„¾§ ~Oäÿ+ÀÀÌÈXYÛãœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© Wêçü܂ҒÔ"ßü”Ô¢<&#aAãXJoüèàûiÓýúûÃï\×¶3>ûÎù}Ù7濇Š&²uT4úV76·šw×tsüŽ`;»çêá㛟ž¼´û+Çw‘ß¼—~+ÿ–Ö°Õ ßÞ4sÙê¹[ÖÎÏé“[¾ãèÊsÝoîjšzÛyÉÿîøÜØÔÕÝÕ"™û#•ha隟Lko½ÿ~í=ó·ß¯ˆ^îþ¾ì÷Ìß3»/svêþ½ðûÌï3»¿/¼"<)šÖm>£èCíÁ®KÝw»¯v_˜}dÉýs¶™Rç»ÎŒè¶èèvﶪñ/Ô²®Iîæà“ãb1ŸÏÃÉÀ‡h»bendstream endobj 229 0 obj << /Filter /FlateDecode /Length 2387 >> stream xœíXÝÛÆ×Sòéí®Ãý"—F .R\ Çhm½YÌ“ö$Æ"y&©³¯Aþ÷ÎÌîry:ÙŠ¢}© œ¸_3³3¿ùÚó4áóÿûßM=Kç»Ù‡§Ù¹ÿÙÔóç«Ù·¯d>/’"Ù|u3s'øœë,áy>ÏSøÅ|UÏÞ°û¡ªËÁ.–Z¨¤(8{=l1Ûum‡Ÿ:I¹dƒÿL%»+GWþÞ­Ù÷ž‚á°1…ß\¶^,Þ®þ6[JžÈ\Ï—œ'…Vù|µÖõq±iQ ¶Láošf~¦0 nÊSð×€`´¥à0Ö9  ‘y¢ªù|õb¶º|ÃújW—tŽñ$fy§øÈ)¤&ã:²@BRáXÚ`ß »ÄîÚ»¼¼Œ 3‰ Ùr¹\\ÌÆ‹9^W»¦ºI‚R Û´[Û?¥Ã¸žFU¾¢Q•À7EaÓ"SŒ?Øv5ŒÚŠ£wq õ–jg¾wɃiï"Ý‹øÉGñ–gþ‘P97gižèé)ô€ŸR o¡Œ³ûOÍb) ”¤`›²ue! ³8jàìãBä ¸|œ, Á¶­ß¦hÚ! 4Û—°ÝÑ™L‘pF²²ÙÆC¶±ÝîÇY’Jņ‘:š P‚¤Šmmœö#5 ÔÊœG…Ë‚‹ ˜mcÉv]¹­l³d†'‹¥âœXÓí_ÖyGÔR2ªiS¢žòy°CPïÔ³¥Ldax7œ`uùéEõ~±úõÔ $Ê…tÛ€p[ßÑãeŽ^Qø[Á-ÒŒš?Ö¶£ådzÖ6vèƒ;¯Øó««+¿béQT~‹ stŸƻÀ†}Õ‡Qްq5ã©çθ@Œ>µ0i.3‰ÊÓüDsdÉ o!Y¼½E+IØš*éï ‹ˆ²C»[:¦æØÔ‡$08.)fT{PhÁßNh¶õd¹f×£ÖF^k¯{ÛÝ-4À%D”CÕ6¨eHm+§¢3Û÷-à CUF©eÎj[6ã^ZrŽA—ÌYgë§NõCÑðX×U³ƒÍ€æM×Ö8R$¸Ó-À]µ«Õ~4"Ànöè×§Š@ùØlðŽO=V•œ†‚ o6FÈïcÜ+›ç1X~·tß(R ¹FI‡Š•†ýv.øÿ9æÌ/Žd6’Äo"ô†¿U49ãsGˆÆ‰·®Å®K¸ÎšõºaÍÄåmµ^D™ÙòÁ­×Ž[—Äd²!Môåš}ŠÙg²V׋_Ä·tü̶Ìr&>£®2 ý§x¾iã½F€õïmg§ºytº<ômÌ>@Ë™»˜†&‘Â…dæCÓ…‹J¡! ”ØËK·ã ž&J›/Ñ … ‘m°]Ýe‚œ}¼åG;ùn»÷Qþë{K„Äz@Ä’s÷¯šmuWmè(ΰÚû:¹x?U^8óû89A~?§H«Çú‰"ú:xÈ“ 4m(»áÙf HxÆÃ<†»ŠÐ  )`ã™X/žœ–,^²ÌInb‡}»}ö Fño|ÁvRÎLDF´”Ý=0ö[O‹MLôùÈç߯$NPüsù©ªuD)(Æ$È3Û¨;ëêY0èp—ž‚¹k$Tõ¸…Ô,²’Þ“HÁ|î»DMlz¢ÄWv8vMtP,ò\éežNýk³à^ÝÙˆ–ÕbÕõX÷Ù)p+È—&=€Dw“‚}h e³Mظï·"Ç5„ç üaPh»uÂ3¾J2£ä¨IJcYÄú»Þ–] ²ÓÑØQ@¹{1{PøŸúôõ¸ç(þË=‡óßsü¿ÑøƒFã¹í«-ú’Òœ¢f{ã¿AQVx+M–bÕŸ:5B0Ê…JKÖüToT T½>¶¸I¨‰>¸úr„`ÀåBÁ¿0L›s†•¿DÏÅ"'Pì=!`¬ÿ¨Ê  ­f•iAüíKCyØA%7ìkä[ÐL…ªHÙËW‰Ÿ–?¸ÉØ(c¨¸Šµ/F.lDÜáDÆ3ðý뱋Œ•··]»Õá§icDÞ¾¥K˜¯†>0{@pš}XÆv;gÚ<†:àmJ LŠõà›Qþ›ôn@9„¨õ£›8º¦{ûå+"Dq6ìÉ?ÓÛ!C¤wå´ªÌ%¢ ìz¼ rhÄÜý/îjþJ8%7¾ òX¸òF®P™Æv⎮­¥¡Œjö¶98ì)#ö¤Ô˜ù6FäÁ–ï)4Î÷¾ÐújtðÆÔ(V·õ§ Ê>Uy}°‰‹Igá÷×…!A£&« 0’³­cè ®aè‘7¥¤æûªL@'0êâ*×u •-ÞY \¾"u¬4õ|¾9Q/‚Sƒ2LcR(>Ù£îlù±êGþ[øþ¬×ÞëðqQ…û\g©Ò³}”o8ohH#ô\Èz=FÍZ‹â)ÉI¥ÓVÝ9ô×þå‚®×ûÍÐm–Ýä¤÷\th챦³Îå•VA ÜW! ,« G`W€žaüŒ€ÿº T=¬Òë…J3 øžŽ ë>}\Q©&¯r!(u+Kfµ;…­š°a`ò¨^â“ND6uÙ4p#pö¡SŠ{Hc•ÉÑÃÆÈçp˜ôÑ#¨ !PÙí¤q¾…Цú'(<9[Ÿñ“/<×Àn|ŸõM ¯ÑqÞ;¸BË]î§ZÓ{€‡? ¤j6‡#å<©%½¶ø÷5L¨C€Öª.bBí»/{÷p¥e‚z—ŠÂDÅ~ãx®§ ãý±I]˜ý¡wëhM¹.rC$OŒ>}°™:6§ç¥²˜n”{+ìJ?iM´á¸SyÃ)MÇ~Žh~ˆIo€þ”KŒ¯4mÐÒ»¢£YœDð‹ž”*Ç¡aßµÇÝ>D(à|5“›rØök<YtÛí ÜO‚ u)Ýü/Ѐ¤ï‚°í¢¤ôKpzÐÓ†ä¹{|w†°ÂÚW¯‡ðž¥åY 8*Â_X*" `‘~ú(ÎzñÈ x’ñ<ûÃÇJ,Û3ÐÿõT{>ë@t6¼ ²¥19…_ÆS¤õãjöøÿ/ˆæ@endstream endobj 230 0 obj << /Filter /FlateDecode /Length 3184 >> stream xœ½Yݓ۶o_ﱯ̓Æ/†ÜKHxr™©§N“ÎeÚÆ÷æËŒyOÇXÏ$eçò×÷· €ŠR·N}3,‹ýÞŇEšÈEJáw½¿HÛ‹’gág½_¼º¹øã÷:_¸ÄYe7÷~‡\H•%Eªyj©Üâf!^w}½/ûª{¹¼ùñBÛĦÚ-VR&Îdùâfsñv€Y® 08'Å›~“ðHTmÛ´ôi’TjчÏT‹åîX+ooÅWC!˜â77…¸].¸ùëÅj~¶‘þìýq¹R©sÀ!V)gØ´\I“^¤…uCLCd~™„³£g¤V/n®/n^¼]½Ý—¼ÏÑ>™¤J.ñ¿:¡]8Æ"ËLNÀ&R%—*UˆÔeaöKU­¤w·âÅ‹ã©ÄÆë ±Z­–Ïùð7õöPß'‘…X7í õtdâ;`z>2‘ÀÇãZBÎÀž¼”GïÆ¥‚–ŒÜ»d6-ÇÁ»ïóñSä­Îüc¢rYœ]dÚ ½4½TàŽ,¢Hn -:wLÃmUÒ4}Ý.#Ž©nç6Ñô›Õå±­ýuõ±Ú±43“‰+}îl«’ŽkŸÂ¼ÉE×WÐø\Ñõrq]¶ÛxÚŽöXÚãDµë©ˆÇH¹Óbß´ÕhVtK¯9¢.ý}ˆÔ9gw0--T Î`AÒ§ãÏæ°{ ¹:ÙNTÇΕ¤§~zÊ{ ´ò°—v¥_ʽœ£=´sF|\¡}ð”¸c­c“%Sïéâ=#?´#¹¸o›ý9­4ybRõY¥”‰Îí`«zÿ¸«ö¤39‰Èˆ¾ÚÐYt›ŒMp_ÿ  çJÔÌ/Ðû²n‰} ¯3¸´CWC茷Eh ÙHC^_Ü ×Àly€EÀ¡n:¯ÎE‘y!OÔ¹(=CÅºÜ "Ô3Ò²Ô2koņ"ât1è8„B;˵1Lýý(mÕ]z-${éŽkåÄè8e< *ñêëÍ_Þ\ŽøbÈS•‹¨YÙ¨Õaó¦º;n·õaûÕ&wÀd`rü.ã§èΩPVë¾ K`Á¦þ=F1p´Ë7ÕŒO|ÍÜû¡Ü«ô§aÒßEæîR(XR.Å0ºÜE÷3Ÿ» òßD,ÿÊ+£Šú^YAt{]ï¯Ô}5iRètp¢]ß´ý­H¾+×õ¡zûöÙ¦9Þíª¤B>DE¯Bñì‡B_åŽÕ‚Ýrb½Ê¤lUódUÁó îßqæ’RæQÑdÊFU>>¶Ípfù 7ŠvO6Ū¬Vi¬Õ¡\vú$ù¤¹Š(cÌF’¦®lSµ ”qn±nöH «–ì¹h¾¢´-Íg–Šåó6ƒ²)9K+q”ä»yO"÷ZËÁ1>p.ͽ"ò&Ò×!·âõq½«7Uy ð8¤8 #0¤eÞ÷…I.åC©Tøô[ú"’]¼ô%d·GÙÃ>(ÈƃJJÑ`d¸óâ2ðß$…Ò)ï-ÿ'±•B…¾ û?­öß­á€[WT‡H$Z–ñý9Üg¥¨œv”Œr{*˵·cÌÙ”ú%Ïk8Ã(QÊÃh'ê–&òV•A¦]‰i²ÊSïó‡Bâij5U¬Oa“…À'—¾dbFlâÉðJ‡æ¸} óÄ}À(<öñxǵ Ÿžåm_ò„-X}×eWR)À5‡jÜÈæ>#óLù9©þ|_€";KNñ(’Ã?ú¾ÒæÇc×ã>X õ઎ã7¹„Ò¯—…&EE­ÇEZ_ºžR^J(Rß÷ã»tëI¥Z-ØÎWµÔy¬"¬72O¢™Äyå2n+ô]ÆÛxQT:ÏΟ ÄŽW‰§±1=¨ä¤@q'$§D(Žà}“‰GÖ OV°æ˜ó_Æ2Ê÷“Tì¶l'Ðqaw&ÁR¡£2 ŠÐÙˆŠtvâoRÉ®ÍÃA-Æ­ñVë–jÔº¤Z_æ¼ÿO Ðìu:nÑ8çñX@Oû~Ù>õ3¹¯ŽÖ¤õ´3WóèÎL.¯/¯šxTF•m\P!Pq“¦æIt) ÚÍGíŸ{ŸëO#îšèýˆìþN+G ‘n•tìãͬuÎélÖ ù¹j›$´2$%A§±òÛClÅf±Ÿ`êcËÑÅ/©çý þjÆþ-õjêîúÞ«ñ¶ADÖã»¶þ˜ Ø}:KdŽ¿ÆMaî…ªÐ: If@ÀMœ¨iÓ¬øôPqãÎņ«ÈcIígBIºogJj ‹*Ü„¾oéõ÷þ¸ûQu3t_OÝwkçî)ó*ÎîÉSa‡ÔIËö±ëØÅÊ̆›ãPRÛ 3Ê}¨3ŠfÒ¹ØV}lYdúlÇ‚Nùj|¦ÙŸ_®â “Åt|nãvð­Ø5Û]ýþÕe|ö±EÙöWë[±?^É8ï4,|9¼‡l÷啺]^žkU"ý¶…¢<Îwt7WÏ^}óÍ7Ï.ùN ´¾mvW»ºC.ëËß«ô2_„„©6ðÔX¼]†ŒöäÆÝq¿/Û'Ð=f¼³ä|öÊôß?^\÷»ò§zÜx`kµ«kˆf3r¾ò§¤|gÞ!‰CãSÝžpÖdU͘l.GdÊŽßcWòô©‘Ï-ÉñE’žýßz9âªãá¡v0ãk/½ä¯Ž0ãvjsÖ“C&q'jUhfê2`Ç-ÁLäxÝlW‘qÍÆS&PÖg‰-2=€)ÿÎJšsOÝA_Ånú”ýÜ¿n˜÷ùzÈžíοc#ì¦aÆñû+Ý/>d;qjö ÙŸ«ç,Õs8Š`*ì8Sçþù뛋àïŸ8ÛÃjendstream endobj 231 0 obj << /Filter /FlateDecode /Length 205 >> stream xœ]1Â0 E÷œ"7hZa¨¼ÀÂBÀ‚ë  ¤Q(·Çváé%±ãïf³Ûîršls¬#ži²1å¡Òc|V${¥[ʦíìpú:ÞC1ÍfÊåUÈòŠ3šS×y=jç"z”€TC¾‘éƒ>F0”‡¿«õ\pŸ—«Tα3.@Ÿ\‚Šq)èAÅèTŒ(AÅÈ_÷¾•sìŒÜÕkgvì;‚Ì(i¿á,>k¥<éJ4²$M™~[+c‘*Ë2oÀiþendstream endobj 232 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1178 >> stream xœ]“{LSwÇmñÞŠÌõÒ° wo’‘ñ‘8³èÔ ‘©è0£@qTZ (–òZk ôyTÅ@Á;“ÊÔÌ=Ñd>6“¹dº§™sn§äš°2ÿÛŸ'çóÉ÷|¾2"'2™,T“pÝÚµk‡è@”<°R’6ðd>i „) ,äôÊ«JŒz—¾‚d‘Ëd|Ü6SAQ‰Å`Vï1é æBu¼)_¯Þ^n1ê úÿ- !Ôd¶èôc>!{‰†ì'ñd;I$;Ȥ ,±‘G²Ù#ùQù3ų·dU›Iø‚|Ë⸈¸ ób6c„ÏÒTûOdÕèŠvêr·Â8Ð¥õ~ÍÝÒo€â2|ã+Ü€«¤ð’Z–Kݪ¯Y˜ª¿2âíë»Ð6 "ˆ•}æ#ìƒ ZÀ¡ºSŽÅd°fƒô%CÅOhø‚ìþqâòa;FE¥ˆcEÉ͉d•ÖÍr7<}µ‚t“åæµØåc¸õ(0[0)_Uy²*(7—;xdttppô³}ƒéÉ{µ;­¼—HrÇ’àÿJbŸÀÑèCM™òOd´>\6ÉÝÁÍó+TuÒò7y#Ë}û@je ¯þãÓù¬ëVÐ[×fw:;¡SÐê˜l{RJªÍ?Éc/ưœÏ'Å † †·úGâ‘¢Ò‹¡h@FbPÉ=ÅøÀU“ÇÕôºß#³ïB\uB¶®Ä¨‡tú6®¨˜›½Óÿ¹›o°ÖVž¶vgã@'îçŽÖÆ{ S›²ícÍ,.SOÆñG˜jdžœÞ‚)a¬Ñí/¸mÖŠ»¥Õjöƒ™I™©ƒþïÿºÙÌ·¹Ú¡­>˜°¼rf0BÄÚ2åXðÁAF²òsÄùm*-Û Í® ½½ŽBAÊ` gu¤•^“ÂÞ“b%õ?ëñu øaº{b˜¦aÒöØœ'€Zʇ…@@X©ìE³ê2{ïìs /Ï?ÏŸr4:\v*cÁö‡épª) Öå®íÞ"Uøó s¸{7ŠÿigûUëѤÂ/˜®æÓ]OÓQ.mÚ“T‘¥Òtæ]° ><§¹”é?x§ü6Ü‚ûc_6µ6w@›³­ªé8”@]iuEQfJi*ÐtHw›/•ŠuSp‘Ž„Üì~òì'Ïš÷ù—ªKZWýv[D‹hœT^E& ¢þ‡Hî$˜¦*5U™ rÛ•S'›U“‘ Q)÷U_û7·ãÒÙ¢iíŸr~$fòŠÁKëYh;ÕÚxF,ûÔ>ô1ÊÆQ.)öîwK¯«îd NÒ➊þ>w—g4 7/¿ÒZÌ;jv»“:8NÕ4P»³ Mº¨cÌ;µ±‡ Œ=©#å´UR5±ÓãÓüßÌ > stream xœ]OË ¼óü´>ªIÃ¥^zÐõè²BéÁ¿hëÁÃL2;;›YÖõ—ÞšHÙ=8xb¤ÚXprs¤¾%UM•¸ªÂ0JOXw•þõñHÓêEßäˆìQöeT-!p '/ƒ´o$-ç¢ÕZ´êÏÚ-A¯›Ç³(à<1i›“(à> stream xœU]HS†Ïq³Na3…A sn !Ñ…Ù„P”¬ r©éœGº¹mné~4çðÓù“ºMMÛÔÍéÀ¿©•^¤Þ„u¥)AF uçŒuÑ´«n^¾ïâýÞ÷ùP„ƒ (z,[,¾›y8ã’P.9†;˃H÷)|!âxÇï YE[ÏJO#<5šÙ´Ê –WU3d±è²H• 2#++3•¼(e‘·”Z.“*I±”©¦R&ºÔ‘¹´LN1†#Ëõj†Q]MO×ëõiR…&VWÝ<¼’JêåL5ùÒPjUAÞ¦• y_ª È£žiGšM+TZ†R“bº‚R+á5ä"A#÷ޏ>²„ÖÇ œ]N²úß³9ûn?ºrÀ~ÿÁc; ­òšüB°ƒtîË1=˜Ç8ÈÐJM¥dƒY}š\ÇÇ Ö €±Çß± ~— ìMí6s¡»ýàQ.`2onblÏÏòˆÑWãAè„)xÙTÛl¦Á€i‡½ÞàÈâFΰL"«¡´¸!TˆÚ"üœH¼Â䇫£ËÕMŒïn.­ôŠôfI³œp#eþà~ÈÏ~ôëA`›Ç½åN »ÜÑ„lô©Óhi°5Xp{kK­M¯»r¦USI´[Ú¡ôÒ ÀîÀ‡kÞÍÀáœîvÂ0¶R>›þI$Ùxêîèpà㟗æ£dÃÏ-¦Yk3EµJÁÅóµSõ‹6,ÀÌ`0`ʳH¥U¬¦žA1è‚k=°Ø´²Çª®Ñ—®TnýbãwÙ„)\À¥X[³Âo(ËßšÚæ±;áXáÙC«”õJ£Kç›ôù}x¤ìO·°Íd3ƒ+^ Ö.³øà¿·ZÚpæZiy`z›Ëãíéóý¾ßÀDߨÌ^—ÓÑïpB?öF6——RòÔÄ—å™YÀ†úZTLk£ZLð“üÌѸòó¼N©endstream endobj 235 0 obj << /Filter /FlateDecode /Length 2957 >> stream xœíY[ÜVGâmÅ7h…FBUÏ4×çjŸŠ­T¤ @!‚t•²Hxg¼»&{k{’ >;¿ÿ¹ø²ë$оð@ö!gÎå¿ûûMšðMJáÿýñ,ÝÜœ}ÆÝî&ü·?n~uqöåK©76±F˜ÍÅõ™Á7\¨$OÅ&KMÂ…Ý\Ï^±Ûêæ¶~·Ý)e“Ô°}[×US]ØIsö¶n·¹øÀê9X ('à‡3v¿½ønäéÆ$6“9Ý0‰6™Í6;ž(«¬ ‹¯ßîÒD§©á¹‡º 6O¬x3{Šæà¡/ð @áÖ~¿ÃKüâƒøU¢Œåj³x’lw™†d¿m‚dxΆ۪§_ 9W¬¼'À©æÊVïê2ÊÐàf.ZÃîºöª.qƒ³ ŠeíÕVdXáÑOZ. «ÚSïáY›³bËWe’õN5;®d"$‰š'V«ÌÓí ”‚ˆÐ̯3‡æ¶õhÀÅÛxƒcI›6ËYI›D‘dû®,†ò°j–©>¢«¥W–ÜÓ%?¦‚,ÅÕÅ“§‘ Áª&¬3Ë@[íè""gw[`K¹±–ÝÕÕ¾ª¶‰¬[ÁÚ&2g3Ö^e35eyè'L×§šëüFÊ,± g™ÔÒ“P9 Äkƒ-” å¿Þæ’ ˜Wy;ªš~(š} ¾`p@mD8ÓšÙmv,¶Â>{v¥bWÛ§Ü2÷˜;UM\3p_Þ´¤PÓžt†ðf«5¤Í%+êSÙ{pÄïp[ ¥B,h¶çPŒð¬fC ”$ ÖŽH®Ë·ŽǶ‘›‹çg_¼‚)÷e7Ãô Q,4ü:*ò—{Ú쌳OïçR†õù«9,ó“r?Œ`t Ù½C°N]~e{ŠGÜö—bó»šìfÜí®…TíuÆŽ[Ç5¸=ÕC5†ÉdbœœÎ3þ Æ})vÓz 9^œÐìohÖŸnnÊždC{Ö)Ûy*­SáèŽûtïb'ƒtÒ V‹qÛ?$׸éʾ€É Ù1R“³CYG{ #/'4ÞCá\h⮆5ø˜Cœ:›&|铲9TtÛæRnn¦³``½èªâªvÏ\b§¾o"uM?‚:'ø7Ë•H`‚Q+äøNÀÐ#j+ø ‘¢¶¿£¶Ã+\‹ÚžIáÐEBG ù r¥›vç„0©äs cÙ,ŒÉ<ɬÑ>8¾bõñ’Ý#ði¡ þ+, î·$²Lç”ãæ“Ù¹¸Ün×r¥Nò\ÛˆÒäNáͪ$Â9¼‚~«½³ßTdì6ìs;Af®•õ]¼©¼÷„“…ÈœA¬ånÉ÷#¹«éÙåeQYd+,›Ää65ËôœŽŽ‹˜kQ12‹s……a6<ƒ“ôÕ ~xèª{ZÃŒ” IÛ8Wû´vuÂ-næà‰ß#ƒÔûˆœ¤|5cd_œz÷V»Dw(†"¦z˜¹â‘;S¾r†;¹9P8?W™vÑèÐÆ;ðÎvˆ?81·-ûŽkt…‹+wÈqÈ|vpÞh‘Yyî,gÕ̸Tlí]¸¡òÀ[êÂùìnÛPP5×mZª7>Ú*;ݵÎý´ÀïÆ(„Du¨|EÅàà6c%s:Ž•Î<ìwÓÝö:®9û EÌÂR®ÒÕŽr7ÕÊåí²8@DVú4yáTLÇ.«E ñ¥jª¿¨ô"Äĺ¯²ú‰ªbdÏv—Œr‚ ß¾+:W“ uÅg_á|Ÿòù{“p8ö1ƒÀnŒe}Øâc¬´>MøàÇ“ä(þ€²PÝÞTxëë.)í}Ð$;óihÊ ”’‘·o(ÓIárå2Ô4]yÈHOã‘òn$¥tnl»ºzíÔ*¡Êººë}e¥tâp€ Î@éÞC#9/Ò¤CgÈ·CD³³Ø—Ð"G AL„Gy¿û+q†RÓÈPÊêlQÊr͵‰ú’99Àø |™ä™2<^þ™ƒŒøoW£÷2DK¼ G0:§w”›ÿ0Wó®‰'é;Êh!ýóï|Ì…„˜Ëõ£{Å~òPc »Èˆ‚\~ØBõÿ¼y.¤±¬#9HÍ"ƒ—Œ§¸.ÑéS"GQŠT eæ0‡þÈ æ(ýAF”•’Sš•PÏIöbÌ ÷}aEƒT9Y51&aÐÅz XTÜðµ”÷̨Äölž¯°à4*ô2C9²©’ª ro! ¢zläÑ‘£™‘Õà0iTªÈÈ6DEïÅ ѾÏ€ a=SÇrBŠc¦4íÚoíg%¾ò hgÕCØÙŸŽc±{ª}åšÑÄIÊßBW]…ÆÀWšª Í®OÍ~«¡GÃ(uö Eùó6§Ê…{xÒÏi**,eã«dHõ 8­†X)ÙÐZ†!&EðË£]¼ôvñ`«•ÞüHj¸¸hsifÙ¡$ùkJή¡m±Iް¶ãØ!~=kr]ï«\MöË_S„ë´ä ih‚©lœ¿?ðüßmÖó‡ ¾›–AÛíüUÕÐõó§ñƒÜøÕ÷½\àà+8Þ­ãp¨¥«?ûâ³ â”ø´ÆN>|=½OçÒAß‚Ó_¬â¦…þ7~™NˆÃô)Vùõ» ×Ô®¬¨öBµüt®Šhu—쪊‘|¤ÀŒ (4¥–lHx=±þHZßBZvMÇ3 ý ÆP]—u_^aÌÉOgö¶‰\Mìë™pp~ñòOÏ.·O×Ô tYž‘›0võ ‚•Â6޵(ú›‹!’ôÏ51×Íè8šjqÿ¼z}Ô1CÜE7œïÉŠÏÓÙþ½8îs!ÜËótAÙŒÈòXtï.ÙqšgeËÞÄò=ÐK~,Á¬ç“=‹“„¯¶ŸSùo¹yמ£ìÈÕ·MUZÙuôq`äïAšNþÐ]zûòn6æv«ç ®­Lù$3 Õ'{Q¼€ËwÿXQÝ<ëЇSéñÔ}d2QJ”O„¦¡zFƒ„YZ:˜|Mó|”'&ãÿy?Îaäþ¥¾ A¡°©¤÷§î®«¨¬3Vm }›6÷ó$7½w—ÉžÃÄ ËŸï«ÒMå@ 7còßCÝ8²Z¡ D7ò_žÝÐ9ÀªšO”6®ÖÊ Ebw¢ma¹kôéë¥z³/Mö~=ûô —ÜÓיƯ+ O¥¡ûq§/²ä|5M‰ãŒ)Ž ¥¤žQ¸©+}»³)Ř$÷Ø8ÔêÃY«¥û"+%Ûúª»“Ñg¡þD.åU¶(¯bQõ¨~p³Ñ§ãU _ùný;7&¡æt§PA§îÛ1ㆊ¸ggÄß¿¸Q¢cendstream endobj 236 0 obj << /Filter /FlateDecode /Length 163 >> stream xœ]O1ƒ0 Üó ÿ JH• ]:´ªÚ~ 8Ê€…0ô÷%:t¸“컓ϲ¿^®ìÈGôø¢Ö±‰4û%"Á@£cQÕ`¦}*Œ“Bö7ÞŸ@°Ènó]O$Ÿõ©)«j ¡745$Z¥ºÖÚN›?i vw6ç®@©•³ÿPr4—8n.1§Ò´4ÉÓï™àCNÁ ñ6SFendstream endobj 237 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 384 >> stream xœcd`ab`dddsöõõ4±T~H3þaú!ËÜÝðÃÿ§>k7s7ËÄ\BßÓ¿'óO``fd¬ªïsÎ/¨,ÊLÏ(Qˆ105H† †––æ: F– ޹©E™É‰y ¾‰%©¹‰%@NŽBp~rfjI%X‹MFII•¾~yy¹^bn±^~QºÈ…òÌ’ … ÔâÔ¢²Ô·ü¼¿ÄÜT°3õÀ¤s~nAiIj‘‚o~JjQc&ƒ##Ëü|?V6¬ù!¾æ;ß'ÆïŽwÝcþ¡ð“Etqå¼Âü¬‚¬ü…5 Ö¬X¹RnןÑ™UÝíU]í­rU!qA‘Ý%-³.˜0mò<ùÉó'ÏŸ0³ʦ3×Ož?qF÷ŽÝ±›}”‚‹ÔÎîž0§g¬ r /íÞ¾­›cÞ´†‚öîºÖ ù–Ê8…ÚÌ–ªîΚ̶ÚÖÚî*Žð=éÇ¿þ.¶DŽOŽ‹ÅŸ‡“þq‘çendstream endobj 238 0 obj << /Filter /FlateDecode /Length 2944 >> stream xœYKÜÆv®{L€\âÃ9¸'ÙaúÍn'@ 8A"Àò9ìÈ5ÃÕ2š®Hޤuÿžªên¾–’,K‡²»ë]_U5߬x&VÿÇ¿»ã_½ºxs!èí*þÙWO¯/þð\™•ϼ•vu}{Nˆ•:s\®rn3!ýêúxqîïÊõFËœ5e{>tøÛfœçì®h×/®ÿÄŒ³.Ësã€àõŽÿf½á™ò^k6O9Ã7&m.ßãnn„¶’»îðÙiÍÊ7£µsq ¡2. ëê$G>"›Ì˜\ÒL¬¯ÿ3g®3ï„ì™_Eíñ«º¯Z¤o2ïYÛ§}ÑìCÏʦ©›%ž^f¹à>òäŸÅ˜,™XÍm:С—^êsæ™3Âô›ß® h`…bÅá\SzŸ³ói_nˆÙF¢È\¯6BdÞè<üòTî³õÆJž–!”W §e_Ž}Tîq•’lWï×dáLÎêSyZ§‡®ç½cE“ˆy0ô«SÝ"†½»ƒ#‘"rç®:½Š<{Õû ©æðÄõ”ê)yû¶lƒz¤”«ë\\ÿU 4÷àEVÅŸÞ|ãøÈÖ÷]u¬~,tŽÄwàñ¶Œc]U(2!z¹õûóŽÕÜ4f ‡ŠÜ7õËCyL«ŽÝ6uz‚e¹ÙWÇòºde5«ê…7Ê#St“*V%UÄfä…@€ŽeP,€Ëu^IvÚ¡Å•$æ$šR L`Á·eSÆõà‘Û!²Ça(rž9™ë×HÝ‚©ýBúLÉ> nXÕF~ÜA*U‡CäE.nšr×e!…5™ðù,ŸM0¡CŸ€(ˆ?ïB 䎡±5ršíÑÐ`H!Ákõ©íš¢£e ïòG Ru}4¡C5ˆi4ÈE!BB`ƒaó\‰þ½v¨q+Ê‘®xM¤Œ“×w#î]/ñ {Mî'oAî*ýMqšE“` £Inϧ]»À:ÄÐÄJ{B©sK‰¥ói§´bØ÷e8<¾¹¾øî"Àó™åBwmÝÊj›)£°\0•ID½Hõs ZY+Çobf“ä€cû¤’Wc7!Î@¤w‹ÉáOÞc.Å“öàÉ|ÂÁáB;6ÿ7çõÎ.eïÓâjœ—‹„¥•B¤½W ‡r Žë¥Êh²ÜôI„ií9ŠÁ8zr©F@Œ\ÇXv9GÚ&ÅÏ-T%Ðnž½ØÆÊ&-Ž5Jí®©p Ϩ<€½°×N´Q°cÑ5Õû¸¦ BÈ1ª1R âÅÙÞOÈ´ 6vÆLÌëKÙ/q³1Ô’Óò,åvÚŸ¬’\¥ÍOç Ê„2˜Ïå^¼&shpiø¢×\fuì}°žA­£`"Ø*«ù j34å¹rj‚¦Â(è9Ç8–]Ù ëo ¬òµë"¬o$`‘ù,0B’QUн?P.¸Òˆ,·Æ“+­÷V¹E¼õ¦WaÂkIÆ™¹ Ô¥ut×Ôþ2Ó*ïM; aI€M…Љ¡‚ØB|(¥Tjþ†Ñ®4º»*`PÑ–Ãàp€`ÜK«§¡ò¾k*(Eјz$ô&Ï´„hŸ˜LY}òÅÇÐDgÆ"òlÐzÞËpD¬7€k”½b‰“€ŸÐ|NÎübÆÆ¹q3¡ŽíêÆa§#ôèÈ8¿&L°„¤^Añâ:W£ _,àhžçÊÌpšgRf¹s̈b >åÐRD½éÀ¯I2éà¡Ûv䊱ôeÂ…~²Ol4÷ÞĬä=7Ó÷‹G¨5ƒ8í¤qæ‘Ýŧ¼;£Ì>|â¦5Ï Sz`þã\êÙ|'v”z˄خQô¢!xãŒ%•£õ*+³¨˜œF-7R}l¦€ÍÑçv\fm,p' …ÎÁb¢àkœgP¢ô~Að Ί v-:']~ZA0PÃÄ{õ(²Ü´úÀÛ+ó3"äcÃ'NxÊÐ|Má˜Çb2›iÊÂÁ1µ€e¡}ðaþ턜gö|¨Âhú dÂ"Žk8O¥+L@t,Ö*!6HóZÕ¼:Çc`*Ä|„©j ð âc³±‡ðÊÒ0¡À*õ4öi6üö¬ÀŸa8>Td z‚¡¤êîâ6¯ÓH[³0§>­M‡<;Aån—´¹ÑÒFùæÉ‚ôÐr€mü<@§t`&Úôtž~’Ž3jþ¹C>Uý­§~40ÃùpÍk¼0Ã,Zì`ŒÜÃø #¹“8$çäCî&Úâˆ:ÐÚÖ¡ûF'Ÿê.=‡¡J2ͱâsC3lô0>A7P¤³á5 "â£åz•±ÿ-EêÈÚO–õ;öÖ¦¤Û²4D—q/ÔQèDFb@à®Æj<]f°éˆ¥SÇOŠõþÙó-»½¬ÔvEÓ]‘œ$ÝÈÄVÂú¨°^a!Üb‘ºz”ÂBsõt»Ûs¸Á ֞ǢyزcÜú¸|A-è=±Yø·N@³´H\ú¬|×Õ§Íóâþ®Åë´`ú|bèLÚ¢¿-›ùüÙùø'î ˆeõí`VHìþ¶lˆDûuâ¡Fz^vçæ4ÙÕû’6*B‘ac¼ÿî†ØjlÛÒV„ýÄþDzIOP×¶t¸«£xúk˜È½Åv/±†ÔÇ;^ ÊÏǾi! H‚F_]@O€3÷´)(ãÈE“«Ñõ}¼ Z8MŒâp »‹ôZ­cA_Vâãýh$ÃÈY§îä!î19µøý.I°p©¥@x;V®ªdlfMÀ±Gä±ñ¢°=Ôët÷?)#3ŽŸºN»¦ WhFŒHzPWIhprtã!¾‚Bº(&ÓpR#ôăë®zYuU¼Š„^£—PçÓOÒQüÞcÎB/O÷óØÄ±'m<ËÃgÀ…²3ýʺ•Ä/rûj‡Àz™zj5ë©ëÃ9ƽQ“–}ú±¬)AÜ©zмôò”€<–¾*ôwœHðÅNX›9Äpé3çrî\ÿý绋ÿ»#­ endstream endobj 239 0 obj << /Filter /FlateDecode /Length 2878 >> stream xœ½YKsÜÆv®<åqõaošM¸0æy(¡«Ä”•JÊ%WdVå úí‚"\»`E1¿>Ý=3x-(ÊŽÊV•¹ ôôô»¿î}¿J¾Jñ_ø»=œ¥«wgïÏ8=]…?ÛÃêòêì›×2[¹Äi¡WW7gþ_qé£ôʤ:á­®goØÕm±Þ(.“4ìØæïè«€¯šÕ7þ³³+«âýz“&i&šó}Ù­…MR®{<2ǶuÕvM^Vô2†umxË+ÛÀžgì&/›}‚Þ“3¿›]× TbÈRÌÙ‘±ý'— u!ûVà¬TÙQ¹âû žµ¥ôºžºÅHJ!2f–séÇ’d‰ÒX`Hm„ÿÓ‰(ftʦ ðXú“8ÜBŸ8_ML4wÞĪä<;óÝWŸ jƒIfå4¨OÌΟrî¢Ù'‚?YÔ3nÕ´¨O˜ÎrëùœÝ¬CAÔ¡°kÆÅõyn$Ô%hn«‰Ô"„ÄÇ»»ýøіÕ;HB Õ ¿Ø  Ð˜Î‚…´) OÎ4÷e[Ð+¸ÒF<¸oDu¼‚¯°,ö|®´ˆÈ&`‡‡ÀO©9²É€I{¯ÂË~>¶¥jÖߟ]ýù¿IA7mñ"ç¬ùvU°Ànãå€j© Û—ÀПth†Ãï/xs]c1L‘‡¾õýi|¬CáJ¹ñ)gÛ|¿‡¶ü9‘B—‚HòþDTôb¡üÂUÖh=ïÁrVÄD8].qHª´r¯7G€ÞS™bí’¤P4¼ üùÇW¯‘Të¾_íêØª8Å\–³¢TÕÔ–QÇZÚpLV7]ìj6ú–œe¾ááK½ÌCñ£c÷>îŒe¡éc8¶Å¢â24ƒâ—/ÿñã‚ê€úµS=0,A€"ßQ«Dù¬¢’ÛºiŠö.ô¹LB¤í07}KD#9=3ÒjC-"Veô—’ÓxÐK¸Ï¨R™ðÈúÅðñoü ú9 f0™eÔÆ¯ÙÊËy …1‚_¯Ï5$Xµ­÷X|úLDhr×åò]›þ.>0ÀæÂàð¤°äkvÓ `±Ë›î‚ä'©ªt(#„!VºökÁµÏË#Žh”O—×ë ñLÊöx8äÍÃ5;„÷'ižÈ‘~›…ÿé–Y|I ZP(µ9Æ&âØ+™G@/gU/ï«ãá-bU/´;£=!‹°^*™R4È¡jŸUbÉM¯‹îØTÃm½+è¤î’.ÅF{ÜÂTÒÞ÷1² :àCús説yWTÛ¢—úeÁ½¬œ}È÷Çþ«Œ€©V25t†}ךϻäv&9Äl–HE €Š—”•½kò]YT]¨QãCqb}ÃÏL ]=…Þ'T É6$G&S½d«7â—GÝRgãñ¿ÇÅéÈÛÛÑÜBaã´bÁñ#;½ÍÛžšcÜíCþ‡»®ºò°_9´Iš|Žïܲ‹aKtÚ·yÓ”žJQÉ#W ‡°rƒ´Yµá½‡}6Ì›f`« ü«:¶u(îí±¹kÊJ,à!+Ñ7šc›Q´ÒÀƒ†Ù­=îé¬$Ë•mü ýâãÂäÛÑžKÍ”G›ŠxŠã2£ç=)PAÈ[¢ÂµŠ¢„$ªô)¨DTJ>•Â/•Š“aSœRÛ‹@ïÒ¸C ÂÌÏÅx-¸Â°ã!¾ñë%ÄÀ”w¬pdW‚‚2ï"™sSã™O@¹ ï¤VØž•"ÒÉ9oÉ( Ráa¦¼¿®ØÕm˜çU' ðL±-¿¦ƒ´)¹¿-*ßL1DwÅÁÛ´óH‰l”ˆ®~Ä7)ñ•‹ :¿›˜®ÄbUúƒ‘…Ü¿Í+ü‚ö"3+žì§KfÜê§Âæ‰Äò»Eøÿ³%%D*?­Ä´AÄY\&ŽDÂe£œ,¡ðl0B‰ÉÏQ Ñ0´ßwlßþZK§t6‡Êx :mž  ÎjW4m𢙀äDÆvá0ì"t$ÍX±/EØöÚÝøm¥§DPÛ¯¤üMÏjŒ§¿ƒ ò}[Ó7ÿ‹¨/‘?)Ц§¤ßEãLST0p:Ñí}=¶†À]YtÈ÷ñnøÓ– ü¼äŸ›ˆ…ÚødÂÇ,9B6Žà_M„~Õzd,žÄ…¤wQì¶ßÐÕýúkÒº„@úkø•KaMº‚Ë `ª:Ž«›”/œ¡éLŠg`àÕÜ.Ä2B!̘ɑ ìÃ6± ë5#þáL,¡-û`‚êoõJc—~PS‰|øõ-…ˆŽÜ$ÈQŽð‘N3 ¸«øû¶äPF²ù¶à±]ýì¾ `ìÝ y°Æ[ìmÝìüô:ÌÏ›Åßù@;þ¼o9«¶Ã i¾6 Oï·½8×pÅè¼oiùöÿ­ÅHÓ4. ¿À†ðñ•æÒ²p€ZÜ? —@Ã0¾;9¬¼ß]ýþýžý+endstream endobj 240 0 obj << /Filter /FlateDecode /Length 878 >> stream xœTÉ’ÜD½ë8èF‰°Dí ÜLxˆ ÝÆ>hÔÕ=-i¬e0|=™U’ºÛÑ'¤ƒBU¹¼|ù2?ç´b9Åwý¶]FóSö9cñ4_?m—¿­³ï?“»Êi®óú˜%–3.+Kyn¨®wyÝe¤¼ó¬¢Ô0{÷2|ªÉ«œ’&¯ßgõwäíÃÏ¥‚Î2Ò5_Bþmæ0N¸6ŽôÑ]º'?¢¹‚Tš ÇÕÕifôpÒ Nüˆúé‡-² J_²k±eÿàçeì/Ûá࣓€P‚Ð{ˆ§¥mý4—óêÇ øõ¯ëgÚ92ž|ßúõÃÒ·gÃÊÈks^ö]ÈÆvQnx:¡Œ‘’VF(¥¬ÝÁ?òkŠ.ð@Z*-í'‰*¬‡VVª~‚ÌczØ8vdx™C”ŠÓ’¬Í¾âæ©™vk&Èå »ƒá~{™w¥…îÒ!¤&¥eO2F†eN‚I ó&7 Ç1$+ÌcS{.N%ÖȬäLî´üýÔ©«)ãÙ yý y…]’i8/‰<‰ð§5„ºTm+iŽklòGRb ö˜XíÝ•½­,´Xoö?¢¹qÖZ}'¸¬¤`\mÆôcæø`wªa¼âFlÆóu5íy˜@µk1Ö—¢ä®8Ìqè P,Ì)'É‚ÌÃæªoã c8…Nþ~öãÕõSŒ  ´ùù*ý&Å‚[h“bdžðVƹoÆ4®¥ƒÁÄÍ´‘ $3}ãUQÿ™1Ј”0YIëëdÕÏÃ)Cü$HÍIÓ½œœÂâ8‚hÏËÁOÛ‘‹µ­DÞ,_¢Ùhǵø>ü…Ù¿â[ÁíFø25§-3d-–¤çqX‰¡’–¸*8Ó‘‡ÂbS,Ò{ñï†q&!Úq»&©¸dFV&ýv-aX˜,!D^$}9cG…Œ{2u2ÞrlÂxþgý—ê–¹Ù÷Sx-²ÀjŒAXÂÁ]½D%¦ßùµ±’4; 9núÖU_ àF‰ààow¯ €p(Êpêý K¸J+FëÊ•—ÂÑJA°/œbOÞÕÙïðþD¡É¨endstream endobj 241 0 obj << /Type /XRef /Length 305 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 242 /ID [] >> stream xœí’¿KaÇß÷UñÎ<4°ZŠ ¨†¶¦ÄÍ5¨Æ¶F‰ú„‚šŠÆ–¶  © "$Ú„¦h)«%Ë)êòûq¸ûoøðå{ϯ{žsF³Æä`Û8vÀ¾dhò‹®N7tÍÖÞàšýÌÐLÎèšÁ­î˜kˆC'ðLt‡âxRLàO,â×ÅázƒÜsô61Çbö‰ãV´oЇ£bj ]îÒz»èU13n‘;Þ¤æÚˆÉ9êSÓýá¿ —c½j°H¯gô717QßåFÐ!5yÛ›| ç½"ú¢×ÛÒS=0ói4¿ÿ‰¨Y!z?økÄßáWc½ÖñÙIú‹Ê1?ÅÛý(Ò^Cº86™9ƒ«Øn;h:&t5›mâ|õÈü÷tÙaŸ—æMã=* endstream endobj startxref 138788 %%EOF maxLik/inst/doc/using-maxlik.R0000644000175100001440000002261415124514352016001 0ustar hornikusers### R code from vignette source 'using-maxlik.Rnw' ################################################### ### code chunk number 1: using-maxlik.Rnw:46-48 ################################################### library(maxLik) set.seed(6) ################################################### ### code chunk number 2: using-maxlik.Rnw:98-107 ################################################### x <- rnorm(100) # data. true mu = 0, sigma = 1 loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=2)) # give start value somewhat off summary(m) ################################################### ### code chunk number 3: using-maxlik.Rnw:147-148 ################################################### coef(m) ################################################### ### code chunk number 4: using-maxlik.Rnw:151-152 ################################################### stdEr(m) ################################################### ### code chunk number 5: using-maxlik.Rnw:208-212 ################################################### ## create 3 variables with very different scale X <- cbind(rnorm(100), rnorm(100, sd=1e3), rnorm(100, sd=1e7)) ## note: correct coefficients are 1, 1, 1 y <- X %*% c(1,1,1) + rnorm(100) ################################################### ### code chunk number 6: using-maxlik.Rnw:224-232 ################################################### negSSE <- function(beta) { e <- y - X %*% beta -crossprod(e) # note '-': we are maximizing } m <- maxLik(negSSE, start=c(0,0,0)) # give start values a bit off summary(m, eigentol=1e-15) ################################################### ### code chunk number 7: using-maxlik.Rnw:256-259 ################################################### grad <- function(beta) { 2*t(y - X %*% beta) %*% X } ################################################### ### code chunk number 8: using-maxlik.Rnw:263-265 ################################################### m <- maxLik(negSSE, grad=grad, start=c(0,0,0)) summary(m, eigentol=1e-15) ################################################### ### code chunk number 9: using-maxlik.Rnw:278-281 ################################################### hess <- function(beta) { -2*crossprod(X) } ################################################### ### code chunk number 10: hessianExample ################################################### m <- maxLik(negSSE, grad=grad, hess=hess, start=c(0,0,0)) summary(m, eigentol=1e-15) ################################################### ### code chunk number 11: SSEA ################################################### negSSEA <- function(beta) { ## negative SSE with attributes e <- y - X %*% beta # we will re-use 'e' sse <- -crossprod(e) # note '-': we are maximizing attr(sse, "gradient") <- 2*t(e) %*% X attr(sse, "Hessian") <- -2*crossprod(X) sse } m <- maxLik(negSSEA, start=c(0,0,0)) summary(m, eigentol=1e-15) ################################################### ### code chunk number 12: using-maxlik.Rnw:338-340 ################################################### compareDerivatives(negSSE, grad, t0=c(0,0,0)) # 't0' is the parameter value ################################################### ### code chunk number 13: BFGS ################################################### m <- maxLik(loglik, start=c(mu=1, sigma=2), method="BFGS") summary(m) ################################################### ### code chunk number 14: using-maxlik.Rnw:473-493 ################################################### loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- length(x) -N*log(sqrt(2*pi)) - N*log(sigma) - sum(0.5*(x - mu)^2/sigma^2) # sum over observations } gradlikB <- function(theta) { ## BHHH-compatible gradient mu <- theta[1] sigma <- theta[2] N <- length(x) # number of observations gradient <- matrix(0, N, 2) # gradient is matrix: # N datapoints (rows), 2 components gradient[, 1] <- (x - mu)/sigma^2 # first column: derivative wrt mu gradient[, 2] <- -1/sigma + (x - mu)^2/sigma^3 # second column: derivative wrt sigma gradient } ################################################### ### code chunk number 15: using-maxlik.Rnw:503-506 ################################################### m <- maxLik(loglik, gradlikB, start=c(mu=1, sigma=2), method="BHHH") summary(m) ################################################### ### code chunk number 16: using-maxlik.Rnw:514-525 ################################################### loglikB <- function(theta) { mu <- theta[1] sigma <- theta[2] -log(sqrt(2*pi)) - log(sigma) - 0.5*(x - mu)^2/sigma^2 # no summing here # also no 'N*' terms as we work by # individual observations } m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH") summary(m) ################################################### ### code chunk number 17: using-maxlik.Rnw:557-561 ################################################### m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH", control=list(printLevel=3, iterlim=2)) summary(m) ################################################### ### code chunk number 18: using-maxlik.Rnw:601-605 ################################################### m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH", control=list(reltol=0, gradtol=0)) summary(m) ################################################### ### code chunk number 19: using-maxlik.Rnw:639-648 ################################################### loglik <- function(theta, x) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=2), x=x) # named argument 'x' will be passed # to loglik summary(m) ################################################### ### code chunk number 20: using-maxlik.Rnw:680-689 ################################################### f <- function(theta) { x <- theta[1] y <- theta[2] exp(-x^2 - y^2) # optimum at (0, 0) } m <- maxBFGS(f, start=c(1,1)) # give start value a bit off summary(m) ################################################### ### code chunk number 21: using-maxlik.Rnw:710-720 ################################################### ## create 3 variables, two independent, third collinear x1 <- rnorm(100) x2 <- rnorm(100) x3 <- x1 + x2 + rnorm(100, sd=1e-6) # highly correlated w/x1, x2 X <- cbind(x1, x2, x3) y <- X %*% c(1, 1, 1) + rnorm(100) m <- maxLik(negSSEA, start=c(x1=0, x2=0, x3=0)) # negSSEA: negative sum of squared errors # with gradient, hessian attribute summary(m) ################################################### ### code chunk number 22: using-maxlik.Rnw:733-734 ################################################### condiNumber(X) ################################################### ### code chunk number 23: using-maxlik.Rnw:767-780 ################################################### x1 <- rnorm(100) x2 <- rnorm(100) x3 <- rnorm(100) X <- cbind(x1, x2, x3) y <- X %*% c(1, 1, 1) > 0 # y values 1/0 linearly separated loglik <- function(beta) { link <- X %*% beta sum(ifelse(y > 0, plogis(link, log=TRUE), plogis(-link, log=TRUE))) } m <- maxLik(loglik, start=c(x1=0, x2=0, x3=0)) summary(m) ################################################### ### code chunk number 24: using-maxlik.Rnw:784-785 ################################################### condiNumber(X) ################################################### ### code chunk number 25: using-maxlik.Rnw:789-790 ################################################### condiNumber(hessian(m)) ################################################### ### code chunk number 26: using-maxlik.Rnw:815-825 ################################################### x <- rnorm(100) loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=1), fixed="sigma") # fix the component named 'sigma' summary(m) ################################################### ### code chunk number 27: using-maxlik.Rnw:863-874 ################################################### f <- function(theta) { x <- theta[1] y <- theta[2] exp(-x^2 - y^2) # optimum at (0, 0) } A <- matrix(c(1, 1), ncol=2) B <- -1 m <- maxNR(f, start=c(1,1), constraints=list(eqA=A, eqB=B)) summary(m) ################################################### ### code chunk number 28: using-maxlik.Rnw:908-913 ################################################### A <- matrix(c(1, 1), ncol=2) B <- -1 m <- maxBFGS(f, start=c(1,1), constraints=list(ineqA=A, ineqB=B)) summary(m) ################################################### ### code chunk number 29: using-maxlik.Rnw:947-952 ################################################### A <- matrix(c(1, 1, 1, -1), ncol=2) B <- c(-1, -1) m <- maxBFGS(f, start=c(2, 0), constraints=list(ineqA=A, ineqB=B)) summary(m) maxLik/inst/doc/using-maxlik.Rnw0000644000175100001440000010626014077525067016361 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{amsmath} \usepackage{bbm} \usepackage[inline]{enumitem} \usepackage[T1]{fontenc} \usepackage[bookmarks=TRUE, colorlinks, pdfpagemode=none, pdfstartview=FitH, citecolor=black, filecolor=black, linkcolor=blue, urlcolor=black, ]{hyperref} \usepackage{graphicx} \usepackage{icomma} \usepackage[utf8]{inputenc} \usepackage{mathtools} % for extended pderiv arguments \usepackage{natbib} \usepackage{xargs} % for extended pderiv arguments \usepackage{xspace} % \SweaveUTF8 \newcommand{\COii}{\ensuremath{\mathit{CO}_{2}}\xspace} \DeclareMathOperator*{\E}{\mathbbm{E}}% expectation \newcommand*{\mat}[1]{\mathsf{#1}} \newcommand{\likelihood}{\mathcal{L}}% likelihood \newcommand{\loglik}{\ell}% log likelihood \newcommand{\maxlik}{\texttt{maxLik}\xspace} \newcommand{\me}{\mathrm{e}} % Konstant e=2,71828 \newcommandx{\pderiv}[3][1={}, 2={}]{\frac{\partial^{#2}{#1}}{\mathmbox{\partial{#3}}^{#2}}} % #1: function to differentiate (optional, empty = write after the formula) % #2: the order of differentiation (optional, empty=1) % #3: the variable to differentiate wrt (mandatory) \newcommand{\R}{\texttt{R}\xspace} \newcommand*{\transpose}{^{\mkern-1.5mu\mathsf{T}}} \renewcommand*{\vec}[1]{\boldsymbol{#1}} % \VignetteIndexEntry{Maximum likelihood estimation with maxLik} \title{Maximum Likelihood Estimation with \emph{maxLik}} \author{Ott Toomet} \begin{document} \maketitle <>= library(maxLik) set.seed(6) @ \section{Introduction} \label{sec:introduction} This vignette is intended for users who are familiar with concepts of likelihood and with the related methods, such as information equality and BHHH approximation, and with \R language. The vignette focuses on \maxlik usage and does not explain the underlying mathematical concepts. Potential target group includes researchers, graduate students, and industry practitioners who want to apply their own custom maximum likelihood estimators. If you need a refresher, consult the accompanied vignette ``Getting started with maximum likelihood and \maxlik''. The next section introduces the basic usage, including the \maxlik function, the main entry point for the package; gradients; different optimizers; and how to control the optimization behavior. These are topics that are hard to avoid when working with applied ML estimation. Section~\ref{sec:advanced-usage} contains a selection of more niche topics, including arguments to the log-likelihood function, other types of optimization, testing condition numbers, and constrained optimization. \section{Basic usage} \label{sec:basic-usage} \subsection{The maxLik function} \label{sec:maxlik-function} The main entry point to \maxlik functionality is the function of the same name, \verb|maxLik|. It is a wrapper around the underlying optimization algorithms that ensures that the returned object is of the right class so one can use the convenience methods, such as \verb|summary| or \verb|logLik|. It is important to keep in mind that \maxlik \emph{maximizes}, not minimizes functions. The basic usage of the function is very simple: just pass the log-likelihood function (argument \verb|logLik|) and the start value (argument \verb|start|). Let us demonstrate the basic usage by estimating the normal distribution parameters. We create 100 standard normals, and estimate the best fit mean and standard deviation. Instead of explicitly coding the formula for log-likelihood, we rely on the \R function \verb|dnorm| instead (see Section~\ref{sec:different-optimizers} for a version that does not use \verb|dnorm|): <<>>= x <- rnorm(100) # data. true mu = 0, sigma = 1 loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=2)) # give start value somewhat off summary(m) @ The algorithm converged in 7 iterations and one can check that the results are equal to the sample mean and variance.\footnote{Note that \R function \texttt{var} returns the unbiased estimator by using denominator $n-1$, the ML estimator is biased with denominator $n$. } This example demonstrates a number of key features of \verb|maxLik|: \begin{itemize} \item The first argument of the likelihood must be the parameter vector. In this example we define it as $\vec{\theta} = (\mu, \sigma)$, and the first lines of \verb|loglik| are used to extract these values from the vector. \item The \verb|loglik| function returns a single number, sum of individual log-likelihood contributions of individual $x$ components. (It may also return the components individually, see BHHH method in Section~\ref{sec:different-optimizers} below.) \item Vector of start values must be of correct length. If its components are named, those names are also displayed in \verb|summary| (and for \verb|coef| and \verb|stdEr|, see below). \item \verb|summary| method displays a handy summary of the results, including the convergence message, the estimated values, and statistical significance. \item \verb|maxLik| (and other auxiliary optimizers in the package) is a \emph{maximizer}, not minimizer. \end{itemize} As we did not specify the optimizer, \verb|maxLik| picked Newton-Raphson by default, and computed the necessary gradient and Hessian matrix numerically. \bigskip Besides summary, \verb|maxLik| also contains a number of utility functions to simplify handling of estimated models: \begin{itemize} \item \verb|coef| extracts the model coefficients: <<>>= coef(m) @ \item \verb|stdEr| returns the standard errors (by inverting Hessian): <<>>= stdEr(m) @ \item Other functions include \verb|logLik| to return the log-likelihood value, \verb|returnCode| and \verb|returnMessage| to return the convergence code and message respectively, and \verb|AIC| to return Akaike's information criterion. See the respective documentation for more information. \item One can also query the number of observations with \verb|nObs|, but this requires likelihood values to be supplied by observation (see the BHHH method in Section~\ref{sec:different-optimizers} below). \end{itemize} \subsection{Supplying analytic gradient} \label{sec:supplying-gradients} The simple example above worked fast and well. In particular, the numeric gradient \verb|maxLik| computed internally did not pose any problems. But users are strongly advised to supply analytic gradient, or even better, both the gradient and the Hessian matrix. More complex problems may be intractably slow, converge to a sub-optimal solution, or not converge at all if numeric gradients are noisy. Needless to say, unreliable Hessian also leads to unreliable inference. Here we show how to supply gradient to the \verb|maxLik| function. We demonstrate this with a linear regression example. Non-linear optimizers perform best in regions where level sets (contours) are roughly circular. In the following example we use data in a very different scale and create the log-likelihood function with extremely elongated elliptical contours. Now Newton-Raphson algorithm fails to converge when relying on numeric derivatives, but works well with analytic gradient. % using matrix notation We combine three vectors, $\vec{x}_{1}$, $\vec{x}_{2}$ and $\vec{x}_{3}$, created at a very different scale, into the design matrix $\mat{X} = \begin{pmatrix} \vec{x}_{1} & \vec{x}_{2} & \vec{x}_{3} \end{pmatrix}$ and compute $\vec{y}$ as \begin{equation} \label{eq:linear-regression-matrix} \vec{y} = \mat{X} \begin{pmatrix} 1 \\ 1 \\ 1 \end{pmatrix} + \vec{\epsilon}. \end{equation} We create $\vec{x}_{1}$, $\vec{x}_{2}$ and $\vec{x}_{3}$ as random normals with standard deviation of 1, 1000 and $10^{7}$ respectively, and let $\vec{\epsilon}$ be standard normal disturbance term: <<>>= ## create 3 variables with very different scale X <- cbind(rnorm(100), rnorm(100, sd=1e3), rnorm(100, sd=1e7)) ## note: correct coefficients are 1, 1, 1 y <- X %*% c(1,1,1) + rnorm(100) @ Next, we maximize negative of sum of squared errors \emph{SSE} (remember, \verb|maxLik| is a maximizer not minimizer) \begin{equation} \label{eq:ols-sse-matrix} \mathit{SSE}(\vec{\beta}) = (\vec{y} - \mat{X} \cdot \vec{\beta})^{\transpose} (\vec{y} - \mat{X} \cdot \vec{\beta}) \end{equation} as this is equivalent to likelihood maximization: <<>>= negSSE <- function(beta) { e <- y - X %*% beta -crossprod(e) # note '-': we are maximizing } m <- maxLik(negSSE, start=c(0,0,0)) # give start values a bit off summary(m, eigentol=1e-15) @ As one can see, the algorithm gets stuck and fails to converge, the last parameter value is also way off from the correct value $(1, 1, 1)$. We have amended summary with an extra argument, \verb|eigentol=1e-15|. Otherwise \maxlik refuses to compute standard errors for near-singular Hessian, see the documentation of \verb|summary.maxLik|. It makes no difference right here but we want to keep it consistent with the two following examples. Now let's improve the model performance with analytic gradient. The gradient of \emph{SSE} can be written as \begin{equation} \label{eq:ols-sse-gradient-matrix} \pderiv{\vec{\beta}}\mathit{SSE}(\vec{\beta}) = -2(\vec{y} - \mat{X}\vec{\beta})^{\transpose} \mat{X}. \end{equation} \maxlik uses numerator layout, i.e. the derivative of the scalar log-likelihood with respect to the column vector of parameters is a row vector. We can code the negative of it as <<>>= grad <- function(beta) { 2*t(y - X %*% beta) %*% X } @ We can add gradient to \verb|maxLik| as an additional argument \verb|grad|: <<>>= m <- maxLik(negSSE, grad=grad, start=c(0,0,0)) summary(m, eigentol=1e-15) @ Now the algorithm converges rapidly, and the estimate is close to the true value. Let us also add analytic Hessian, in this case it is \begin{equation} \label{eq:ols-sse-hessian-matrix} \frac{\partial^{2}}{\partial\vec{\beta}\,\partial\vec{\beta}^{\transpose}} \mathit{SSE}(\vec{\beta}) = 2\mat{X}^{\transpose}\mat{X} \end{equation} and we implement the negative of it as <<>>= hess <- function(beta) { -2*crossprod(X) } @ Analytic Hessian matrix can be included with the argument \verb|hess|, and now the results are <>= m <- maxLik(negSSE, grad=grad, hess=hess, start=c(0,0,0)) summary(m, eigentol=1e-15) @ Analytic Hessian did not change the convergence behavior here. Note that as the loss function is quadratic, Newton-Raphson should provide the correct solution in a single iteration only. However, this example has numerical issues when inverting near-singular Hessian. One can easily check that when creating covariates in a less extreme scale, then the convergence is indeed immediate. While using separate arguments \texttt{grad} and \texttt{hess} is perhaps the most straightforward way to supply gradients, \maxlik also supports gradient and Hessian supplied as log-likelihood attributes. This is motivated by the fact that computing gradient often involves a number of similar computations as computing log-likelihood, and one may want to re-use some of the results. We demonstrate this on the same example, by writing a version of log-likelihood function that also computes the gradient and Hessian: <>= negSSEA <- function(beta) { ## negative SSE with attributes e <- y - X %*% beta # we will re-use 'e' sse <- -crossprod(e) # note '-': we are maximizing attr(sse, "gradient") <- 2*t(e) %*% X attr(sse, "Hessian") <- -2*crossprod(X) sse } m <- maxLik(negSSEA, start=c(0,0,0)) summary(m, eigentol=1e-15) @ The log-likelihood with ``gradient'' and ``Hessian'' attributes, \verb|negSSEA|, computes log-likelihood as above, but also computes its gradient, and adds it as attribute ``gradient'' to the log-likelihood. This gives a potential efficiency gain as the residuals $\vec{e}$ are re-used. \maxlik checks the presence of the attribute, and if it is there, it uses the provided gradient. In real applications the efficiency gain will depend on the amount of computations re-used, and the number of likelihood calls versus gradient calls. While analytic gradients are always helpful and often necessary, they may be hard to derive and code. In order to help to derive and debug the analytic gradient, another provided function, \verb|compareDerivatives|, takes the log-likelihood function, analytic gradent, and compares the numeric and analytic gradient. As an example, we compare the log-likelihood and gradient functions we just coded: <<>>= compareDerivatives(negSSE, grad, t0=c(0,0,0)) # 't0' is the parameter value @ The function prints the analytic gradient, numeric gradient, their relative difference, and the largest relative difference value (in absolute value). The latter is handy in case of large gradient vectors where it may be hard to spot a lonely component that is off. In case of reasonably smooth functions, expect the relative difference to be smaller than $10^{-7}$. But in this example the numerical gradients are clearly problematic. \verb|compareDerivatives| supports vector functions, so one can test analytic Hessian in the same way by calling \verb|compareDerivatives| with \verb|gradlik| as the first argument and the analytic hessian as the second argument. \subsection{Different optimizers} \label{sec:different-optimizers} By default, \maxlik uses Newton-Raphson optimizer but one can easily swap the optimizer by \verb|method| argument. The supported optimizers include ``NR'' for the default Newton-Raphson, ``BFGS'' for gradient-only Broyden-Fletcher-Goldfarb-Shannon, ``BHHH'' for the information-equality based Berndt-Hall-Hall-Hausman, and ``NM'' for gradient-less Nelder-Mead. Different optimizers may be based on a very different approach, and certain concepts, such as \emph{iteration}, may mean quite different things. For instance, although Newton-Raphson is a simple, fast and intuitive method that approximates the function with a parabola, it needs to know the Hessian matrix (the second derivatives). This is usually even harder to program than gradient, and even slower and more error-prone when computed numerically. Let us replace NR with gradient-only BFGS method. It is a quasi-Newton method that computes its own internal approximation of the Hessian while relying only on gradients. We re-use the data and log-likelihood function from the first example where we estimated normal distribution parameters: <>= m <- maxLik(loglik, start=c(mu=1, sigma=2), method="BFGS") summary(m) @ One can see that the results were identical, but while NR converged in 7 iterations, it took 20 iterations for BFGS. In this example the BFGS approximation errors were larger than numeric errors when computing Hessian, but this may not be true for more complex objective functions. In a similar fashion, one can simply drop in most other provided optimizers. One method that is very popular for ML estimation is BHHH. We discuss it here at length because that method requires both log-likelihood and gradient function to return a somewhat different value. The essence of BHHH is information equality, the fact that in case of log-likelihood function $\loglik(\theta)$, the expected value of Hessian at the true parameter value $\vec{\theta}_{0}$ can be expressed through the expected value of the outer product of the gradient: \begin{equation} \label{eq:information-equality} \E \left[ \frac{\partial^2 l(\vec{\theta})} {\partial\vec{\theta}\, \partial\vec{\theta}^{\transpose}} \right]_{\vec{\theta} = \vec{\theta}_0} = - \E \left[ \left. \frac{\partial l(\vec{\theta})} {\partial\vec{\theta}^{\transpose}} \right|_{\vec{\theta} = \vec{\theta}_0} \cdot \left. \frac{\partial l(\vec{\theta})} {\partial\vec{\theta}} \right|_{\vec{\theta} = \vec{\theta}_0} \right]. \end{equation} Hence we can approximate Hessian by the average outer product of the gradient. Obviously, this is only an approximation, and it is less correct when we are far from the true value $\vec{\theta}_{0}$. Note also that when approximating expected value with average we rely on the assumption that the observations are independent. This may not be true for certain type of data, such as time series. However, in order to compute the average outer product, we need to compute gradient \emph{by observation}. Hence it is not enough to just return a single gradient vector, we have to compute a matrix where rows correspond to individual data points and columns to the gradient components. We demonstrate BHHH method by replicating the normal distribution example from above. Remember, the normal probability density is \begin{equation} \label{eq:normal-pdf} f(x; \mu, \sigma) = \frac{1}{\sqrt{2\pi}} \frac{1}{\sigma} \, \me^{ -\displaystyle\frac{1}{2} \frac{(x - \mu)^{2}}{\sigma^{2}} }. \end{equation} and hence the log-likelihood contribution of $x$ is \begin{equation} \label{eq:normal-loglik} \loglik(\mu, \sigma; x) = - \log{\sqrt{2\pi}} - \log \sigma - \frac{1}{2} \frac{(x - \mu)^{2}}{\sigma^{2}} \end{equation} and its gradient \begin{equation} \label{eq:normal-loglik-gradient} \begin{split} \pderiv{\mu} \loglik(\mu, \sigma; x) &= \frac{1}{\sigma^{2}}(x - \mu) \\ \pderiv{\sigma} \loglik(\mu, \sigma; x) &= -\frac{1}{\sigma} + \frac{1}{\sigma^{2}}(x - \mu)^{2}. \end{split} \end{equation} We can code these two functions as <<>>= loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- length(x) -N*log(sqrt(2*pi)) - N*log(sigma) - sum(0.5*(x - mu)^2/sigma^2) # sum over observations } gradlikB <- function(theta) { ## BHHH-compatible gradient mu <- theta[1] sigma <- theta[2] N <- length(x) # number of observations gradient <- matrix(0, N, 2) # gradient is matrix: # N datapoints (rows), 2 components gradient[, 1] <- (x - mu)/sigma^2 # first column: derivative wrt mu gradient[, 2] <- -1/sigma + (x - mu)^2/sigma^3 # second column: derivative wrt sigma gradient } @ Note that in this case we do not sum over the individual values in the gradient function (but we still do in log-likelihood). Instead, we fill the rows of the $N\times2$ gradient matrix with the values observation-wise. The results are similar to what we got above and the convergence speed is in-between that of Newton-Raphson and BFGS: \label{code:bhhh-example} <<>>= m <- maxLik(loglik, gradlikB, start=c(mu=1, sigma=2), method="BHHH") summary(m) @ In case we do not have time and energy to code the analytic gradient, we can let \maxlik compute the numeric one for BHHH too. In this case we have to supply the log-likelihood by observation. This essentially means we remove summing from the original likelihood function: <<>>= loglikB <- function(theta) { mu <- theta[1] sigma <- theta[2] -log(sqrt(2*pi)) - log(sigma) - 0.5*(x - mu)^2/sigma^2 # no summing here # also no 'N*' terms as we work by # individual observations } m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH") summary(m) @ Besides of relying on information equality, BHHH is essentially the same algorithm as NR. As the Hessian is just approximated, its is converging at a slower pace than NR with analytic Hessian. But when relying on numeric derivatives only, BHHH may be more reliable. For convenience, the other methods also support observation-wise gradients and log-likelihood values, those numbers are just summed internally. So one can just code the problem in an BHHH-compatible manner and use it for all supported optimizers. \maxlik package also includes stochastic gradient ascent optimizer. As that method is rarely used for ML estimation, it cannot be supplied through the ``method'' argument. Consult the separate vignette ``Stochastic gradient ascent in \maxlik''. \subsection{Control options} \label{sec:control-options} \maxlik supports a number of control options, most of which can be supplied through \verb|control=list(...)| method. Some of the most important options include \verb|printLevel| to control debugging information, \verb|iterLim| to control the maximum number of iterations, and various \verb|tol|-parameters to control the convergence tolerances. For instance, we can limit the iterations to two, while also printing out the parameter estimates at each step. We use the previous example with BHHH optimizer: <<>>= m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH", control=list(printLevel=3, iterlim=2)) summary(m) @ The first option, \verb|printLevel=3|, make \verb|maxLik| to print out parameters, gradient a few other bits of information at every step. Larger levels output more information, printlevel 1 only prints the first and last parameter values. The output from \maxlik-implemented optimizers is fairly consistent, but methods that call optimizers in other packages, such as BFGS, may output debugging information in a quite different way. The second option, \verb|iterLim=2| stops the algorithm after two iterations. It returns with code 4: iteration limit exceeded. Other sets of handy options are the convergence tolerances. There are three convergence tolerances: \begin{description} \item[tol] This measures the absolute convergence tolerance. Stop if successive function evaluations differ by less than \emph{tol} (default $10^{-8}$). \item[reltol] This is somewhat similar to \emph{tol}, but relative to the function value. Stop if successive function evaluations differ by less than $\mathit{reltol}\cdot (\loglik(\vec{\theta}) + \mathit{reltol})$ (default \verb|sqrt(.Machine[["double.eps"]])|, may be approximately \Sexpr{formatC(sqrt(.Machine[["double.eps"]]), digits=1)} on a modern computer). \item[gradtol] stop if the (Euclidean) norm of the gradient is smaller than this value (default $10^{-6}$). \end{description} Default tolerance values are typically good enough, but in certain cases one may want to adjust these. For instance, in case of function values are very large, one may rely only on tolerance, and ignore relative tolerance and gradient tolerance criteria. A simple way to achieve this is to set both \emph{reltol} and \emph{gradtol} to zero. In that case these two conditions are never satisfied and the algorithm stops only when the absolute convergence criterion is fulfilled. For instance, in the previous case we get: <<>>= m <- maxLik(loglikB, start=c(mu=1, sigma=2), method="BHHH", control=list(reltol=0, gradtol=0)) summary(m) @ When comparing the result with that on Page~\pageref{code:bhhh-example} we can see that the optimizer now needs more iterations and it stops with a return code that is related to tolerance, not relative tolerance. Note that BFGS and other optimizers that are based on the \verb|stats::optim| does not report the convergence results in a similar way as BHHH and NR, the algorithms provided by the \maxlik package. Instead of tolerance limits or gradient close to zero message, we hear about ``successful convergence''. Stochastic gradient ascent relies on completely different convergence criteria. See the dedicated vignette ``Stochastic Gradient Ascent in \maxlik''. \section{Advanced usage} \label{sec:advanced-usage} This section describes more advanced and less frequently used aspects of \maxlik. \subsection{Additional arguments to the log-likelihood function} \label{sec:additional-arguments-loglik} \maxlik expects the first argument of log-likelihood function to be the parameter vector. But the function may have more arguments. Those can be passed as additional named arguments to \verb|maxLik| function. For instance, let's change the log-likelihood function in a way that it expects data $\vec{x}$ to be passed as an argument \verb|x|. Now we have to call \maxlik with an additional argument \verb|x=...|: <<>>= loglik <- function(theta, x) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=2), x=x) # named argument 'x' will be passed # to loglik summary(m) @ This approach only works if the argument names do not overlap with \verb|maxLik|'s arguments' names. If that happens, it prints an informative error message. \subsection{Maximizing other functions} \label{sec:maximizing-other-functions} \verb|maxLik| function is basically a wrapper around a number of maximization algorithms, and a set of likelihood-related methods, such as standard errors. However, from time-to-time we need to optimize other functions where inverting the Hessian to compute standard errors is not applicable. In such cases one can call the included optimizers directly, using the form \verb|maxXXX| where \verb|XXX| stands for the name of the method, e.g. \verb|maxNR| for Newton-Rapshon (\verb|method="NR"|) and \verb|maxBFGS| for BFGS. There is also \verb|maxBHHH| although the information equality--based BHHH is not correct if we do not work with log-likelihood functions. The arguments for \verb|maxXXX|-functions are largely similar to those for \maxlik, the first argument is the function, and one also has to supply start values. Let us demonstrate this functionality by optimizing 2-dimensional bell curve, \begin{equation} \label{eq:2d-bell-curve} f(x, y) = \me^{-x^{2} - y^{2}}. \end{equation} We code this function and just call \verb|maxBFGS| on it: <<>>= f <- function(theta) { x <- theta[1] y <- theta[2] exp(-x^2 - y^2) # optimum at (0, 0) } m <- maxBFGS(f, start=c(1,1)) # give start value a bit off summary(m) @ Note that the summary output is slightly different: it reports the parameter and gradient value, appropriate for a task that is not likelihood optimization. Behind the scenes, this is because the \verb|maxXXX|-functions return an object of \emph{maxim}-class, not \emph{maxLik}-class. \subsection{Testing condition numbers} \label{sec:testing-condition-numbers} Analytic gradient we demonstrated in Section~\ref{sec:supplying-gradients} helps to avoid numerical problems. But not all problems can or should be solved by analytic gradients. For instance, multicollinearity should be addressed on data or model level. \maxlik provides a helper function, \verb|condiNumbers|, to detect such problems. We demonstrate this by creating a highly multicollinear dataset and estimating a linear regression model. We re-use the regression code from Section~\ref{sec:supplying-gradients} but this time we create multicollinear data in similar scale. <<>>= ## create 3 variables, two independent, third collinear x1 <- rnorm(100) x2 <- rnorm(100) x3 <- x1 + x2 + rnorm(100, sd=1e-6) # highly correlated w/x1, x2 X <- cbind(x1, x2, x3) y <- X %*% c(1, 1, 1) + rnorm(100) m <- maxLik(negSSEA, start=c(x1=0, x2=0, x3=0)) # negSSEA: negative sum of squared errors # with gradient, hessian attribute summary(m) @ As one can see, the model converges but the standard errors are missing (because Hessian is not negative definite). In such case we may learn more about the problem by testing the condition numbers $\kappa$ of either the design matrix $\mat{X}$ or of the Hessian matrix. It is instructive to test not just the whole matrix, but to do it column-by-column, and see where the number suddenly jumps. This hints which variable does not play nicely with the rest of data. \verb|condiNumber| provides such functionality. First, we test the condition number of the design matrix: <<>>= condiNumber(X) @ We can see that when only including $\vec{x}_{1}$ and $\vec{x}_{2}$ into the design, the condition number is 1.35, far from any singularity-related problems. However, adding $\vec{x}_{3}$ to the matrix causes $\kappa$ to jump to over 5 millions. This suggests that $\vec{x}_{3}$ is highly collinear with $\vec{x}_{1}$ and $\vec{x}_{2}$. In this example the problem is obvious as this is how we created $\vec{x}_{3}$, in real applications one often needs further analysis. For instance, the problem may be in categorical values that contain too few observations or complex fixed effects that turn out to be perfectly multicollinear. A good suggestion is to estimate a linear regression model where one explains the offending variable using all the previous variables. In this example we might estimate \verb|lm(x3 ~ x1 + x2)| and see which variables help to explain $\vec{x}_{3}$ perfectly. Sometimes the design matrix is fine but the problem arises because data and model do not match. In that case it may be more informative to test condition number of Hessian matrix instead. The example below creates a linearly separated set of observations and estimates this with logistic regression. As a refresher, the log-likelihood of logistic regression is \begin{equation} \label{eq:logistic-loglik} \loglik(\beta) = \sum_{i: y_{i} = 1} \log\Lambda(\vec{x}_{i}^{\transpose} \vec{\beta}) + \sum_{i: y_{i} = 0} \log\Lambda(-\vec{x}_{i}^{\transpose} \vec{\beta}) \end{equation} where $\Lambda(x) = 1/(1 + \exp(-x))$ is the logistic cumulative distribution function. We implement it using \R function \verb|plogis| <<>>= x1 <- rnorm(100) x2 <- rnorm(100) x3 <- rnorm(100) X <- cbind(x1, x2, x3) y <- X %*% c(1, 1, 1) > 0 # y values 1/0 linearly separated loglik <- function(beta) { link <- X %*% beta sum(ifelse(y > 0, plogis(link, log=TRUE), plogis(-link, log=TRUE))) } m <- maxLik(loglik, start=c(x1=0, x2=0, x3=0)) summary(m) @ Not surprisingly, all coefficients tend to infinity and inference is problematic. In this case the design matrix does not show any issues: <<>>= condiNumber(X) @ But the Hessian reveals that including $\vec{x}_{3}$ in the model is still problematic: <<>>= condiNumber(hessian(m)) @ Now the problem is not multicollinearity but the fact that $\vec{x}_{3}$ makes the data linearly separable. In such cases we may want to adjust our model or estimation strategy. \subsection{Fixed parameters and constrained optimization} \label{sec:fixed-parameters} \maxlik supports three types of constrains. The simplest case just keeps certain parameters' values fixed. The other two, general linear equality and inequality constraints are somewhat more complex. Occasionally we want to treat one of the model parameters as constant. This can be achieved in a very simple manner, just through the argument \verb|fixed|. It must be an index vector, either numeric, such as \verb|c(2,4)|, logical as \verb|c(FALSE, TRUE, FALSE, TRUE)|, or character as \verb|c("beta2", "beta4")| given \verb|start| is a named vector. We revisit the first example of this vignette and estimate the normal distribution parameters again. However, this time we fix $\sigma = 1$: <<>>= x <- rnorm(100) loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] sum(dnorm(x, mean=mu, sd=sigma, log=TRUE)) } m <- maxLik(loglik, start=c(mu=1, sigma=1), fixed="sigma") # fix the component named 'sigma' summary(m) @ The result has $\sigma$ exactly equal to $1$, it's standard error $0$, and $t$ value undefined. The fixed components are ignored when computing gradients and Hessian in the optimizer, essentially reducing the problem from 2-dimensional to 1-dimensional. Hence the inference for $\mu$ is still correct. Next, we demonstrate equality constraints. We take the two-dimensional function we used in Section~\ref{sec:maximizing-other-functions} and add constraints $x + y = 1$. The constraint must be described in matrix form $\mat{A}\,\vec{\theta} + \vec{B} = 0$ where $\vec{\theta}$ is the parameter vector and matrix $\mat{A}$ and vector $\vec{B}$ describe the constraints. In this case we can write \begin{equation} \label{eq:equality-constraints} \begin{pmatrix} 1 & 1 \end{pmatrix} \cdot \begin{pmatrix} x \\ y \end{pmatrix} + \begin{pmatrix} -1 \end{pmatrix} = 0, \end{equation} i.e. $\mat{A} = (1 \; 1)$ and $\vec{B} = -1$. These values must be supplied to the optimizer argument \verb|constraints|. This is a list with components names \verb|eqA| and \verb|eqB| for $\mat{A}$ and $\vec{B}$ accordingly. We do not demonstrate this with a likelihood example as no corrections to the Hessian matrix is done and hence the standard errors are incorrect. But if you are not interested in likelihood-based inference, it works well: <<>>= f <- function(theta) { x <- theta[1] y <- theta[2] exp(-x^2 - y^2) # optimum at (0, 0) } A <- matrix(c(1, 1), ncol=2) B <- -1 m <- maxNR(f, start=c(1,1), constraints=list(eqA=A, eqB=B)) summary(m) @ The problem is solved using sequential unconstrained maximization technique (SUMT). The idea is to add a small penalty for the constraint violation, and to slowly increase the penalty until violations are prohibitively expensive. As the example indicates, the solution is extremely close to the constraint line. The usage of inequality constraints is fairly similar. We have to code the inequalities as $\mat{A}\,\vec{\theta} + \vec{B} > 0$ where the matrices $\mat{A}$ and $\vec{B}$ are defined as above. Let us optimize the function over the region $x + y > 1$. In matrix form this will be \begin{equation} \label{eq:inequality-constraints-1} \begin{pmatrix} 1 & 1 \end{pmatrix} \cdot \begin{pmatrix} x \\ y \end{pmatrix} + \begin{pmatrix} -1 \end{pmatrix} > 0. \end{equation} Supplying the constraints is otherwise similar to the equality constraints, just the constraints-list components must be called \verb|ineqA| and \verb|ineqB|. As \verb|maxNR| does not support inequality constraints, we use \verb|maxBFGS| instead. The corresponding code is <<>>= A <- matrix(c(1, 1), ncol=2) B <- -1 m <- maxBFGS(f, start=c(1,1), constraints=list(ineqA=A, ineqB=B)) summary(m) @ Not surprisingly, the result is exactly the same as in case of equality constraints, in this case the optimum is found at the boundary line, the same line what we specified when demonstrating the equality constraints. One can supply more than one set of constraints, in that case these all must be satisfied at the same time. For instance, let's add another condition, $x - y > 1$. This should be coded as another line of $\mat{A}$ and another component of $\vec{B}$, in matrix form the constraint is now \begin{equation} \label{eq:inequality-constraints-2} \begin{pmatrix} 1 & 1\\ 1 & -1 \end{pmatrix} \cdot \begin{pmatrix} x \\ y \end{pmatrix} + \begin{pmatrix} -1 \\ -1 \end{pmatrix} > \begin{pmatrix} 0 \\ 0 \end{pmatrix} \end{equation} where ``>'' must be understood as element-wise operation. We also have to ensure the initial value satisfies the constraint, so we choose $\vec{\theta}_{0} = (2, 0)$. The code will be accordingly: <<>>= A <- matrix(c(1, 1, 1, -1), ncol=2) B <- c(-1, -1) m <- maxBFGS(f, start=c(2, 0), constraints=list(ineqA=A, ineqB=B)) summary(m) @ The solution is $(1, 0)$ the closest point to the origin where both constraints are satisfied. \bigskip This example concludes the \maxlik usage introduction. For more information, consult the fairly extensive documentation, and the other vignettes. % \bibliographystyle{apecon} % \bibliography{maxlik} \end{document} maxLik/inst/doc/stochastic-gradient-maxLik.pdf0000644000175100001440000043245415124514353021133 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 3434 /Filter /FlateDecode /N 58 /First 470 >> stream xœÍ[[sÛ¶~?¿om§sŠqá™Nf,;v.–íȮ㸓Y¢mžÊ’+Q©Ó_ß]€”‚¢äÄF&H‚¸øöÛ A‘Dq’n¡@¤DÅ1ÄrøÎŒ%)á‰P&\§ŠpNÓpS!µ!\¡%V"¨ ÉΚHˆ)à¹%‰ÅsJ”ð#J1M'ʤ–èœAûЕNRFDB´‘p_èîkb8Ö3Äh†c- "%–A’+ žäÄ*•)ˆµð>t•2fˆLH*`R‘TÃ7JMR›r"áÓF ¡¡ ’)OÂP •„;Q(’À÷0Ë$Á„×R’€ô8Wð‘(‰oi((è&–A² ¾¤ÅTB3„ 5Aü\HhY$Ê- ïB}’…;в°L¥±ÃP(v]C}.y BËRrõŸ_%´ŸÃñ°‚@aŽ„ž ï²̳»¸øú˜ºÏ'³;òê•{ež ‹|6=ùñà‚ #L¹fš§?3þc?üTÖ›ÍÉÙ\ögãmoœÍgãå(ƒWŽÎŽÉÑýlQ,Fóü±IþÂô/ì§ÕfËi8¡ïóñ‚üă¨ñ¤ü·âðd¸?ùo”âÉú›Ö¿`ýë wò5S_&º<ëò\Þç¬<ûûŸ+qy ®:-²i±€j©—i?çÃÞì  hPÆçPµ’ý [Ì–ó4…í¼~*ŽÎ ”_âGNgXW”³6ŸÎ³Ú¤g‡0’ì©€&_½ªªSo¸ÈÜÛôðxðîýåÏ燃>7 %ôõt4çÓ;B?æÓ½é"_ß8Ìç‹bÿ~8'ZùÞ2?;0Ç^ÒôxXVèz¾¼)\ç8§ÙoÓšËÜãÀü`>æãâçҰĉÆFšUYYS{×B×á‘÷Pâ4”ÛÕ;B£“gÎ ¡ásãß’ðy0ƒç²¨÷¹ÞÞb„“d o²ü„/ÇIø‘îÑ#zLÏéŽè˜fôžæôú@§tFçtA úpGùa[ÿå(ÐÉðnÊáëDzHð¹>ÀN>ûg‡ù$Cå¯aádøµÌè[ÐÝ|´7½›dP—öóŦÓÉøOÂ,ÙÃ%ÀSÔg£6µôªüÆ´+BÐÁþåÙÞkߟªÜ AIu“6jÕ‰ Ñ a*\$6]ý]Ÿë¥ð0‰m¿ïS–™i­Sb>.‘ˆÿ·~XÇUßÒ‘þÑ¥Cté:ºè)½ 7€­ÑìáaøºÍ¿d¯|š9„´økFÿÎæ³f@3Ý8K‘‹¤ë¯4ÙZ4ñ@z 4»h&ÚéÉ»‹ƒ è¯wÅ“dg ¥Q7œ?—«cØ@ø%Áo}HÎ>Z÷?»V¯YNë%Ì'lãá{Ba=tÃ$ø(x¯}ÏÊú1€•õµÊ§ð8 ø8 ðCÕÖŽQ(BŠã^;–”<7šMfSÇv·ô,½+ioRß ÀùXÒ_q?Ï2ÒeH…J£R4ñ‰­Ã§hâS5ñᥟzOp·á\©Ÿïúû×ýkèïââøo.§|.>u>Y;Ÿµ¢*ªëïÔÿ¶¿ÕDŽFòFú85ä^¼= ¦~‹W–×rcêMs꣩èšzl´œzœŒmÌÄâ™ÿп:<9óݱM ŒM WÑ̋]3/êüƒWþNUúÖ_ÕZØîæ¶×õÛë² fX»íÍÍãzÞ“zÛ~ÓÀþj0óõè>°ß!ðßúð|ùÌ1áoôÐ}CoæÃÑY1Én‹ª<ǶVDéM7Ò¥ëƒf.‡¤N4å+þ¼ÿúxŸMk4ZšxÏ¥0¸.\Éwð˜ÍóÙ˜>N– úçrVd㛉cÝEöšZäOt1.î,¼œŽ³ùb4›gô }¢GÞƒ@o²dæP9A5ÝSñµN#×!mêg¤0ú‰²/©Yl§æ6õøêüÍàÄ»*¡‚ªŠ;VjÉ[9nºÕÒúMj)Á¾à¯"Ï€J1¶¬#LvñJ‚Ù—¥ é®1Cglk×_½ÕÄ9ËF1` sbÀ5Ñàh¶bc|&ÁV ¯…ue‡MùïKpW%ë(Œêv®­k3òi|˜ÜH³jºý޾ýî—þ4üÂéøGúÉéùFužŽQDz§Ñdø€jÿrêþ¡‚OZtüÿÏ×ò?AųfcVºîjV¾²»òÅíDð…þúÿµœ7í@‡0ÿÔ¥¹PÄ!È(LT²Ó;“ë0•m·Ñª…Þ8:éü¼ßï¿å؈Zìr›BÜÝI]±)¢2ÔͪhUñ®Èr—Cã~~ur1*öÞ,̪.”˜\Õk‹ãcÅFßÕL[b `„º…µ²®‡=§u'`QK½Ê&Å´(§“l± Ë ÿóûY â¯RÕ*ŠˆüH|fmêFЀiä6`ÓíG²ç„‰ˆAz}ùñjÿz„­ è:ªeQË¢Õ¦I7D»‚Zi­û1£€²Ye³Ö¥zY¸úõ·¾õÀö¶€Æ_®@3Êç£åÃí${ ]ª•CåY9µ•QÑaZd¹'ô¶J VZ©jBh˜‰hEñi8Ù»"‹mJžD`úí´wuzê Û á$špâ"†S„n¡¼® T¤¢tWjãßu¨ äæ\ Y¦N4~;~£j#ºÎ$ÊM¼vç“"÷XFß;¤2VOÚ1£#ÐAnW”‘QdÜÅŽ°K·§ìÚŒî›þÑEßAÜìš®“1›A±ÞÄ®˜ØÖéL§«ßú«]3W?Š uX.ĆuæqTãX\­Ö„,·5>Ñ6ÊÇÂs öјÚ(¨ „½+`fi7:Ùÿtv=¸†æÏ?…l’4Ù¤Å6Eï6N ë˜CSópbÅ|Žéqžç÷Zéïn#Ŀϳ†=:œÏgyÛv3œ;ó8ÉÆ³‚>äÓ%:Q“"œ|¥ÓáÍdX:TØp6ÏÀ±ŽóÑpRGkª7;Uø,í-_ùþ (íŠÕ„ïŠÕÓë·‡Ï<ËÕ™GUFxmùl ÖÈÙ·ÝXíòõµˆs£ÝÁYºÔm§Ç±7kˇ'‘+Ûʮ̿3AŸö/ ý×W!AèHè±»!›Þ«mú¦tJÉÞ©q¿—UÆçœ©æ’oËB$æ&⮜Ó^~WóOñj±„`ƒýq¾xœ ¿®oÙSA¿ÀŸl:ƉœdÁ¨n“꺇`6 Ž£¢ŠÜ×q{8Ë;¢Hìî¶¾{¿yz‰fLíê3Ä:,›Þk²E‡U—·¬Œiæ‡u°>gN;­¼ÛÍÄ]# ñë¦ø鼸 _ì½ûxŠyÙ·ÏXÈH〈Œ2º—°Ó1²s¹}}Gz7K”Zh Þ‘Ö_iðClº¶ÖF ó'â>¹_U^_U#ýd·™@kx_ùeV·-,4Â]ðÅñ-ôäÝ’–tyÈÍiILJ¹¥†¾[f¸*“˜pôë¬Õ*ëã*5âZË|^Q$ùÌ?„Éçn 8V¹²#Àtfñ”ÙÆv'ò¯®Ž>!ûœŸó]ái[è¡é¶Z›:Ó.zÐñæ †!B„WµÉ`]̀Υpo‡Qno«'BÁìÊ fWfè]\^{ça×PδÈý¹{|ï|ʶåÛdõg<ÔøDÆ[q|frS¢ò4ï±]Íé†M6à)kåjLk•˜YMk(öåd»rñê+Ï÷Èíp²ÈªWÖû Ë ¨[ÆcÕŽ[\ýXWøª²ÂìyY6¤Ü e·£×—n ®Ê’T»¸æWûlÀÓÕž†ptµÊ=ȸUY’*Ë>sS¥3 ÛU*j q¬,RÝM‰­Ízƒ¦*½†ïÞ YãIRqÇ·nÐ<|ûáC¯u鲊«Ö~µŒ-qYb¹Eå:7;µädq3QÝ—MWèf9™dÅ‹Âæh›hhF”=ä²ãú‘Þ´|S^ïõ‘Ó½óOA )›bóIÓ¾è-2ïÊÚ´k‹ëKFo[ö1ª%ÁTʪ0‘M²¨çuŽtYÌÍQ-<66qí5€e4ÃiÚ•"ùË×Fzÿ2BKp}»,+tI7šýRsXÔú)¬ÃO8Èoo3è;úÝ“’§Šõ6úUåojB.Ì­»ìË©¼PÀ!‡fü?¾_ºéF.ެRuî³Vy< skI>¿6~ãÜ*¨V&ð|®Õ¥ø^J´/,Ql¹p{pêY¦AäaæÃ9ðÍäÈK SßZA½¸ðZnöÀ<7½½…_¿ –sôykÛ/^jL0¥nIéÐì‚Kḡñ+çnµÄ-¿P×ʬZÛýIxendstream endobj 60 0 obj << /Subtype /XML /Type /Metadata /Length 1438 >> stream 2025-12-29T16:06:19+01:00 2025-12-29T16:06:19+01:00 2025-12-29T16:06:19+01:00 TeX Untitled endstream endobj 61 0 obj << /Type /ObjStm /Length 1853 /Filter /FlateDecode /N 57 /First 471 >> stream xœÅYmoÛ6þ¾_Á†âDŠ)`'M·¡i3;m û 8L¬Õ‘\IÎÒþú=Ô‹-[²âÄÙÃæ‰¤ŽäñyîÈs ˜ÇŸ)…dÜ“(ãÒX0®•`fÜhÃÃ„ç¡ ™à:dÚcÂÓœ %Q &‚ÐcÚgB£Ÿ–L…R1”ó¹{ÖÌ÷1TúJ ªü‡ú g¾>#˜B©ñ™ô”ÇŒd’£“QL è1“R ]£tφIåsfB&n¦"¬#ä(•bP-µï³ú ÖB_è¡^aùXT „²P3Å=”†)!8 C¦|ÔsØHa&8áú¹˜ +åè§‚Ð P«ÁœJe08‡mT¨Ý[! œÎauî;³@`@Îa÷À‡BîÃà ‚„B® ¸Íá°WÖhlÌʱj)÷ÓOŒFQnOÓ¤`ôæõ‡¿Ÿü89\` £×É4½Ž“[FŸâä(ÉãuÅiœåÅñ,ʘ/ñ„÷Ol>ÍâE‘f(3zÕ}8Œ@“åUñua]à—£H?$1ôYÆUX½pQ¶W³ù_³œýãªòëÕ¿Þªæ9_oðÉë•}¨v—¦§ÏöÐïŸìçŸË=?ʧ6v¸£ãhñ‹ogxØJ·_[°ïéˆFôšNé ýJoéŒÞÑ{:§1Mè‚>ÐGº¤ˆ°Á6‹óÏtEWÑôs>ò¤Œ®²hjçö¦¨¤Ì PŠŸm±ª†\5LišÎÓ¿ww]“¥rFd¿,£9ÝÐM|oé&]ftK·™0(Íhöu1³ Åô™æ4·yNw”P'–’åÝ•Íòø6¡”RT,he6)‡.¥jà…Íœ%\§×´˜/súB_–ia¯¯æ”QNyü@Õº *f™µTüÒ’–É5F˜¦™¥{ú›è+}£o6K`ä¸à üJˆxžG·¹s%²G£ô¨~Q6;ä6âϪù4ž[p?ð*F¸ªwÑíaç¯E4§GÉíܲWð tç9ÈYûÍœöî#Ø´©Õ"*]Ö[Ù`cíÎÏ}ˆ£K.ööp4 :þ@ úƒ€øŸ‹ŠSp–Û_oÇÇ`ú†7í!b«]ªZ ÀbyŒ {µ)©Ë~e4t%œ¯/e9÷ìfèž½R ýºwݪ\@Ç“è!›¶°™‘Ó¹ÍS@Òâ©v{±æéˆŽÁÑ÷+^F ã´$Ñ&qf`Ê_`Š#ÉŠî¿”HßÄ÷=}Û@²´B2Ð’•_5‡0ƒ›ê’Å6’;¸j#yǾ^ãØÇJ7¶s9«±Ç˜ìÎ-Œ`42©©eS4ï óJ-ûÊÅ»JVnë9tÇ"'×£c‚°[õu0<³×qTY¬Äæ£qÔ€±Î?ÂÜk]6Ç~MmΜžûºžÊy–NÝžÿñä¶°E –Æ,çÑ­íòølüîôòÃÇg°÷Þ,6]‡[,v}†H,‡‚ºî G8|µÃQù¸‚ù¶kÅöÖxDÇ`Ó¹ze³rÞU©m<ªmÑÈÜð`:Ð ˜Ãã•KŸÿ«?Ð]Ðá—Çß[Pw‚ò…È7È÷ à:Î>^¾;­RFûÿ5Ñ“ŠäMh‹äÃÿM˜z);r‘a™Êèûz=5/ùQŽ`~†²ý‘Jv3Šj3£X>®îe.ƒX%àËÔa™Q/ó…U¶0§ÜÞÛ¤_å ; p¹J€sŸ;³•§qì¨›ÂæåÎkDU—» ‡à^}¿ÃÉíѼ¡òzî6³B7²‹aù—\×}´Ýľ>ð YÓïy>pe´˜9@Á“øæÆºƒ°u$‘ Ø]œ,s7ùjÜUWèûZ±¶æendstream endobj 119 0 obj << /Filter /FlateDecode /Length 3953 >> stream xœÝZMo$¹‘½ >Ø{4` nf­»r“ߤ9Œ1‹±^{½£ƒé=d«RRNׇ\™Mû×ï‹ óS)¼;† £­Ê$ƒŒà‹ˆÁüó¦,䦤ùÿ›ãU¹¹»úó•ä§›üßÍqóëë«ûo¿‘¾PÒšÍõíUš!ñ¨,´‹'m¡£ß\¯¾_uçíN¹¢ÔAÜl•-bŒâ¾ÚîÊ¢4ÊãDÛ5[üÒÊIo1h§¥.ÊÒŠ/é±ÑÑ)/.³)ûéŒú´¥J'E×Oâóö&¿¥ž¾˜N=ÑcUÄÅ‘ Jz%fk}÷»4ÔÚzñqû?×ÿ Ä”E´V±JS¸è6;Š|Ü\ï¡ú:Z•UöâzJþƒÌá±)©ÄùXw,nkúÓí”.´×! ø¢¾©[]j)>°£ FÔ—^°*¾£¤§ªT6oPâ@L¡ãî$$ÛÍÎ@ÙÒJ’/äöú¨§1̹q 4äXAûyÜL  EÔ&¤q_‹êÄ&ÓÇ%Å;rض_¥ÇÖã`(DÚµÃ1ëRÜOßVÀ¾s|LéðË`èðùo£ŒÓ¢ÚO'5õvüã†t§7É‘= "ã®ëmc74P96õP¹„q”,‚Œò¹æ~2\ëÂ/{Å›:Áª%”­òŸ8ˆ‡ `xª2ÖÑÈ ª;ì6+µ¨I àA-ÞÑÉB <~¸4ÇêÒ>eQðšæÄ’d ¢«OûzO¯€½àÅíù2.ùŸÕwl+qD¢9¦YâñØ‘‚4" ©ˆ šûŒÅÒj†%ïÅGèsÚ÷ÊEq©U‡Å¯8}Úì’Èd‹'x¬ºæ|j‹íÎ yAü‡a ,$t¹9<îë6?;´õ·¼% êKu 7–´çHkþR_ÚþYÞœ’®Ú6Å–pä7 o‡Æ¨ØX£ó¡—€3b“áý×±I+Å':[¥ÄC“NÇDŒM‘Ò¬ªAXûÝÕõ¿~ý‚¹q„dp ”vlpyðöÑà@€yë8‰}û<ñèèƒ8_>NÄÿìœÐäx*ÒnRF.Nà7g`ö§¤ÅÇoù¡ ÖŒ§zØÛ’Äwõ¾¹éš4ÔG5&â¥Ç~ÛAì¡Åé.½Šðô| >}>>êï²T@÷=Æ×ï·Y”\Šjû5 ¥¸JZÞÙ8.oÄ]Õœzà-ÔΑö! ®šÇ¨K†ÄìOâÉ–<)ýM á¾æÀ5Ä>¿ìöõMµ¯ß­E iAÂZhœSgì„R]FöØÑhíªß·õS?ê¹:ô&0 ªÃÝÚÝÛ>qøÂb±9 È9á¤×lóè(ì2L©ûoMHÎ~w©ö …Êt:]zÏ`œôa‹ƒ)ÞFV lI$Nrµcs¹¤µ%‡3„ƒ»:o˜ÊûR,6Ï+GÇó¥î÷¢ÅSp|Êb8é²+ µ('NÙà²O'¶¹ã÷h©“[@KŽÀÒX°ˆíSA‹zeÓpÄn€!¥ìI]A¨¤Ø«Åõ=…þ4и“6w§ºëê~U/î²_i rìÈ矲„øpiê[~#’~Ò›¹; é‚UHΗe± Œ8ga°r[÷ËÆq¿›LêÍ×Kež}—Uòoþè YÍe n-.~’g­ùK©+Õ÷:Œçqý°bmì$¼JÚ°"œJÎÎO–ä·=Ñ@söE¢‘Þ$¶tîep"3ƒp̆hœ–›p?‹(9œM¨Ç×â·§TÊG"„"£Å=â ½1œþuu9q0NOÄ¡éA»ÇKÝ3B@¬éò{`,–/û[šè)pÏâí‡w ÿKó!cÃ0 “°ì¾4(çéj·¡³JD à;2Ñ€‡Ëùá>¶ýrý è›Ò)yfé‘È z2»§%FZÖé¾>ÝÔü“wªÕácþ…8Q}¸Àc7ΆŽY! ~žû(¨UäÉ£uO7Ëïß‹¯¾üüýv94½æ“é>T-'ܨ¹p[*ÞpÖly’hù$²|–NOûÐOŸžÓÊgm€ ÍÑ(£Ísâd›¯´ÌÛ$×™¨†9ή¸¤Å"ˆy›Ù”ì˜q˜²t«­šÛ9 ¸po´s(©G÷VÇœHÖr.Ù¡ÛKø˜ãäæ‚@ï¢í ?zÕ|ˆM öÛì\a¼õáóÍ$¬­ÍA GÊ{&Õ’±S¾ Në 37Å©@'Heˆ+ãiNW}V`¥€v>«9Ѩ§¨üC<|yоâBŠjÆtæa©zsæˆÏ#KAk¬ê8vñd¤ŒeìÊR¹²æÕ.:ª·Ç±çÎ'—ql¤@ýæÒÎï³g>>ÛJ%eÍ_±'NÒ¯ÆÖÙž(‡¢b›ÍøÅrK3ß”e¡BìÁõÓ .=÷^[úð±Ó¿>>Fõ<>¾9ƒ¾Ø(¹³ô›úR¯PŒ™[p&sfA¸ F€Zc‹ÎûO׳:ÜÁÊ!#ƒRZrb”}©K./[KNmá]½V°Zƒ¸0žå»Ü™…çÚ¦ÔÜ¿¡¾Q p¼öXQ¶íN[[íÀZ•óLÎJÃM‡›ó©íªÓ@%»$™^qeZ¦Ò >實GñÈm6öQëqíý¹_B‹Ó¹Ë/0õ<0éIˆ¸´}É©=Þë4ÊËÂZ„¿yúãz¸ç´Ç¢/r•ø¢Îê‡ölzÅ­ñTå)îlg Â/@WºªýH¿¨”e>Ÿ^¸|õfwƒÂŸ¦ÙCs“è=KƱR9Å¢°Êù²¯7E½ßr'<$2ÎüÉ@ ´÷—¯2YUxG¯œÝ:MÅ9JP#@!N:_xØoþç+‹€îª`þ6[†«¬8%×@ÚO=õ"÷o_Cè¾µÂxQ!ž©Ï´yÎw¨é©û¦¨»À$°ošTØði{*l&7Ývð‚fÒ'œu‘™Ó¿VùH…H¤†ìr»Hè?lá3K[ÿçÂÄÅMˆËž=,ß”ÀÃÚ¬xɿؔŽHEEf¶©÷þã|ûq¾´?Évѳ“Nd¿ÈŸ2; N¯ì«)m†ÁéוâÇSßÜ—@|—¹Ï¼²±\xÍËÍ×ÍKºEµù«1-z¶j†‘e¾¼ž’ËõJ5_ðûâ*ºá.ò鞨åóœÇñt×&q] ¿"²çÇC(GªÛ”Œ¨|š¯m}ùvkmº¹O—þÓP&Þ圇ßvuÏ%4 à–×,Äœ°ºzzkwöòÇGŠï÷¹ÙÚ¬ÔÒ·TûÜø§{“Üèžöà§‘ü Ý­ñ ØòòÚ· Ü ×F&2³¼ãôR&w|›ÉÃÕôgýr”«¡À¿o÷OwÚ¦DÎóÏÛh3] î_ß4·Ÿò8cÄ=}J!žò”olGžçǦaZ·oLh¾sœ6 KIHkíu!õ€†¿gULÀT£Ô}cU|n\ôS^W¾P~ Ô $\Âj[¹¬ŸªôMÝâµ(ñø3 šÛô7ùüÓ}“¿°Ò‘!ÈÏí409*p“;­’ÝX˜ Þ‘EL9&%†‚dÒÝÑAô_;vÙÏÇÝœè:–Zê¬}Àôðp9ç+]jo哲\@Ó¼üñR2°@^1ª±þ”X°TzQO¤¯pœ:¾ »ùOªé•¾€¼£ åèÆ](žÁtlvÆ–3þœËö}Áßç)~µ›êQBÛà6³)ÿO:±Ä7щ|¿Ø›uŧø~²) ‡š"®ð†Îc(sÿýúêŽPx×^Q®Ú<]•›/‘—}!°‘¦Å’›ã• žÚ9ÓÃÕW/~|;‡qþø–¶V’H”^ço¿õœ(Z¬¤âFF”Î=ë{vZ36º£¯C¹Ï9£Šzuly/þÊA _9<çDÓsSEa¬/?{ÞJWhðe†#øù=„Š!öò_¼†ùí×Ô"š<ÜÃõº}?m%òózX^‡P>^¹ΉnCø¢aròÒ!Òeˆ¿M—!o¼ QahmýW!|³’®BB¿‡|¢‚‘+0¢» uÜ,õ ý¨yAOpíïT# 3SEiXPuHäªÒ©EèùãÕÿX/â¯endstream endobj 120 0 obj << /Filter /FlateDecode /Length 268 >> stream xœ]‘1nÃ0 EwŸB7°œÚ’\Ò%C‹¢íl‰_ÏiÙLýQÖðÅ›™— ßÖ{ l&¾,©j&.a{˜ÖpsUŸÞÆüý“ÙàÏ»¿W®?ïôS³…5ò-˘.\ ÖÒ0ÏTqŠÿZM¿OLóãiÛb-*Ô“õ¢h€v/¤X‹Z sNgÌ:tœvv[R ­hG ´íIö¢GR Gщè$IFQ&Ê¢3)Püóà¤X‹ E"¯©¼¤òHä5•—T¼Æ@•}=#«“#> stream xœV}PçÞãäÞÅ &\NItv‰©M›¤“4m£Ö$~DI ðƒD ààîŽ;îCîûö·Ü'wÀr*롞 ñA*1M4X4~Ä4¦I§é$Ž6¦¾Ç¬t¯¦¤Ó?2³³3û¾»óû=Ï>Ïó{EÄ´$B$¥dæçþ|á3‹Äç&Åç‰A?Õ2µ!RÅ:Í?/åë4üÔxú¬÷I"Ñì/ÖÕÖ«Ë•¹ueåJyF~]m©ü ‚H[.¯S6¼ÔXÚ”óFYùÖª‚êZ‚˜O䫉¢ø ±–XA?#V«ˆL"‹È&~Iä¯R¡3}¢é"èã$µxޏbZú´’_HÞ/™/ Jî \ôO²-å©oD‹ f~“4VA°®æâ9Qÿn™/æP²7J·"¶Ã×îmï>ÙóQ/ã’·àûD "LX‹Ûܳ!=Tª Õ¸-Þ0“õeŒŠRàJ΄^cÐxP~Eyµþêf<«ÏÚŒgÚ:]œtàRÚ Å¼,½ˆ_PÀ/x_ /Ó–e‰3±M†gà/ðLþ‹ä™ñí,Z#º3†ßŽ/“)Â(¶Ý¼…æP•ÉZ(ÔÊã´¨AéÞKãô»èW7Ï…óÖP¼UÕ¹j)sb¿Y¢Pº÷ÑX`€yKÙ¯ØS(ƒ5ðê&có÷ô™ßˆ.Ÿ&:ÕñuQìÞý¾+ây2>UÿÒ#AοÀ§à¹ø¾p2Nº³§ò³)~.20À¡¿ü~Ø×å Ãu8¶4°ŒÕ±ZÐB¡#§¢¸¡¶²©–@ɇ–IÒÕ®R¨W›CÜv{T<ð}T[M¶ªbNŽj•žAg ˜ð\;ÙÕ¹£-"Ôó3§ÑºÍ"°¨koÞî"û:ÂÛ*ºªó6æ¯m¡,N*(mß"Ú{›Н—µJ"•Çu‡œüt÷í£êck£tþ^Údú3éEããÜøè„üI Ø‚~Öë§ðläqùìf}£ÉJÉwÕ‡ÊÌþ•ê‰uÇÊOèéXË~gÌ|hÛ`Uk é@¯¬­ÎYTsἎr…Z# -ŒÃFó‹‘£Õæ "Aåõ{ý=é³ „¾ë¹øãœèÔÕøÊ qüüE™¿‰+êÒ >/—!ð ÜúÉØ÷QŠãYÅ!t²ßµ™®G`bM`".*[úØ3ïüqU{Æ|ÎÁ­è?>ž8ràtä7€gÀ%žØÁÏ&m­Ö6¿Ç py¯Í\U­§6þ6gð ù|þa~ŸÅ¯ÀRþ\üÁ¥Þ‰£ô½Ÿ#P÷4‡s5¢¯ÆÄ˜Šß•­Öñó¥š?ãµ Œ äór?þ°ûúYꨰ¤gõÐL–´,^]`xŒÂý8ƒã3p.º§-k4þkMÚñ÷pÉØ:üäé­/ñMYøÄ‰Ø0¬Ê¢+ÑÓÀ‹4¯-Ù¦µ5€š´ _öUÅ8ŒÁ{Ñî3$+a·ôoÒ/‘~>èíÛï /i‹P}s%½Uáé£ñýLTßW7\-„õ°¶^U`7Ù›AO*ÃÍ;"‚~"Í]‡a®æ ÙOdoÉÍ´“6IqÉ“_a4ô§7?M¨s&'j¿(S„P7ô0ak·¹]Û®ÔGê"õ±Íé{À 6Æf±:íU¥é¯¯.Ï^¤?Á!³Ûìs¶AÛCpøâÛ» íï|swõ®ê^EºÏâ·‚Œz‹‘^^ÎmC:вz¿¶Í´ÝÔµõøÖ£UÇ^³üàóû<ž=‡Ò‡Ny÷ÿ„…í~ ¡4§DY¬â4Qõnu4Ýî3ct„Û:X’8OzåAÁÔ}œÏ›O Û…örŽr c+}×ÀÆZÀJVÔ:k("^)Ù…}ÉZ‰åYÇ|~VM½­ê€\.iÒy¢>Ö^:~ îõíëè` D%jˆv8¾uØ q|[æ`í¬HµA§Qwë{{»Ã}Wž~™O{‘Ÿ‘Á§\ŸïŸxgälÝêf½à!ÝÈã`FЛ”jýæ²ЀګmÏÔ~Èk~'‘÷PBvª§òµpç)1M@êB{"–ršá튣¯õÁCü¬çyÄ‹3®ðÉ8edÙ‰Ʃ﾿ÁÄ~¢ø…hýÊÂü•›.œo¡œa»Âäw1Æ&°ñŒøÞ&Ã1I›§5xcý—|2?}™pÍ¿($dªH&áü±Þ‹¬›u ø¼N·Ãè¨7Ù(UIqUÈ¡:¤êͶ“$Û%k½vè‹?ÀeèÁ—×jbŒ­&òß?•üQ\.~fŠõ9_£ï@åçÆøZI#¨Z}$¶ ëŠsånY£¥ŒÉ8írBÓ5MÚÑ9ÒÉ¡Dþv¢}=æRš?øŸüÍ8kl¦F|Ú„å2•ÃÔ:²©Kµ«¯»7BIzšw6PÒI§“qÍúÖsõTæ.F“vj_ž#Ý:'YûŽò’0ÕˆkŸß¾RvyÕ>:kÏGþáJN}†Èkg®|}îkfôA }·ŸÆ HÚíe<6“nÂ@Ic§ä/GWÏýÑ¢ç}ülÞßôuÕ_ûuÜzÁÛVd_¾>óYÈ‚—úËN«b-ÎCLÀÝeJ¤°ÍJór$­Ø=.Ò- Bd,ÔG~Çä¸[#úzT<ø©ÎášQ}ƒ{?}Aw»7ìê ‡»:CšpSGÓÎ7wVí¨ò™Û B 77Û Â 9Tªþ7G«G*†kFœ^‹àd’Eˆîì >qâ¬`Qþ)ê n3Uå–l­¡• 57®Ïú1ñß‘_–¸¾?õ‡P•œ)ÿž Ñäž[7Îv¯Î£x ªlp•ümj„Æo%1&ÖܯÜÔQ yðj©!Ÿd$Æó¥û– ‡„¼ÆÂÄ!š.-MM!ˆý–³Ñendstream endobj 122 0 obj << /Filter /FlateDecode /Length 258 >> stream xœ]‘1nÃ0 EwB7°lGrZÒ%Cƒ¢í‰ > stream xœ…•{PSWÇo¸÷h­é­ZôFmµu»ÖwkÑ+°v­Š|´+U*ƒšIHx!ïäþ‚Q,„—P_Uj«ÎŽVÔµà.…vÄ­»N­ÛZ«ÛÎIæÐqoÖíNgÿÙ9çŸs~gæ|¿¿óûüŽˆŠS"‘hT抌5óæÏ$‰#“%@ÖGG7ÆC‚â|“QÆxüâÓý~%‰ž‘ªViå…Sרså…ª©jåûªÿÙ¤(j¬:­P»-G¾v½rþ¢×;—¢ÖQ3¨Lj•J¥Q+¨¹Ô"*’RÏRÏQc¨§©q‚"Š¡¢9¢ ègq¹øžd“äNÜê¸oâ•ô,ú"ó S÷X´b-•øXtOG™#syýøð nš =K#ìTãìW€H¡ÿJ$x"N¸÷ÅI‡pÜb<†L⤧í6ÞžDÆ‘j–¯Øèó}=tI‡jꞆ¤n8ZÚ¢ìÚæÉ„Mɯ’gîÊ+‘ÃbÈþÜ<àôÃ>Þ‹¿ P®6³.ú|» Où2ÂÞ ­?}ƒ•Ru2‡:ìr o«l¤žÉUZ•.¨äT¸…‘î ùPÒ\\¹0x½û{x€ð$§YÄÂÀ^Þ~$=Ô =Ümºú LùäMßBH†J0ò•‚L‘¡›r…#sô¢^‘´ERXUÓÞîÈ’‘N&¯Ø–!Úf*¡‚¯´Sí=*ÓÚÅÃÍ5nŸ«¼àá½€xÆ`5  ieÁÖÁàQE(c®|k1g7ÙŒ¼‘Ç’‰OîÞeRPÆkôã]+jÄSB¤Ÿã™‘ö­Ý„Áéé—_ å|9”£yg6Þj¸ÝË*bÀà2€e›“Ó3*ûÎr¸Oc¤=a2 ¯a„¤Ë¾ LÁÈìöÅ áú!I›ð_<üÀ„Üûë›ÏÎ>¶óL£¿{„Çb´S½»Z“­•ñx ×ð¬}³-[•·{G®.¶¼YÓeÙoÞu'3PË×B-ú´ÿÌ%,n&‰‹-`æ-œàîø31wsE]Cüs±pº€´%ÅÚÂ@icS°îàð¼ÎUdÌ"Oan E8îú¥#}a™»Ú³j‡á=v· ‡±xã;òL@Óø©[íXŠãŽ÷”ä7Ëþ{×¼ (ü6^‘ü„ËY|Œö{=þGë¿%£I«DDЂžùÛ.k2¶XZUц\Unx×êµUØ öX¸Â-Y;7€òCš¶œìWàSä °ƒ'ïßú{ó i‚3q4‡Ò7E—6 Å|á›HÞÝO£X²¤ŒL'ÏÙ†Èk8޼‹wá˜Å+ñïcT½I¤8l°²Úx[Ù5’ÀÖ3€G· gF]¨:p°}oGÒè_{<ù`æÞ•„UUd^îG¹³ˆ –ãYü”3À”Ýäj‹ŠÛDøO7%ç£KØ¢‰o¤=»o„;;º/zÎ:MCβ‘£Ì¶<«¢R@ËÄ)Uøax--ßî9!‹„¨æ«¡…¼‡¸Nú3¸a -ªÉw+ªòQ#]‚5ñ‰%s.ÆàH Š0u çĦ¤3ÆH€ ·Ùßû#ʰ–Qi<«˜¾®û÷/×®[Ç‘ F¡r(¹ÒÇéÿ/Øíê–a=|GY›öDÖ-B.VËKÖ"ž¶ô¥‡WÂH¯,ƒ4ñIƒs݈ô†E8rK2 ²ÁÞ³çúÝéym錔×W§ÊR“uZF§™¤Ým5›í°gØ\6°£E·â¥ñE´9«js6¨AåT"Éô¹srálÿ• ÂÿÍŸ 5kÁ’™j¿îˆ•;TßÔÒTP{e°‚ʼnžˆÐŒL â©zуƒ¸ì¦dc«y¯]Ô´¶èmQY!/³2/.xˆîþˆ¬ÄÔTg¤·Bz©6ǹæv[®AoËÙóžêj/ø›á½OX(*/³©ÍJ» ¶€|_ö›ÛVes!'m‡·!rƒ1»ÍþÚê@M î<8¨šþÊþ‰‡å°Â²p™¢Ô\ê(…•ðÖù²«¿hoÊ`df‹èÄ0¶ K"d]4,p¾ðöï Šâ-°ä>M5µú;BåEzƒ†—Ù™Ô«;n^»píwª¿î&ÜDx¬ç“ioçdär¹¦­…fô‘ÑxÌ¥Ë'/×rN<†£Ë&daliªYs½dÀñC²ÃÌg,~øC™iç`õ‘±ßQd™ŸÒ‹pjH‚SX—„áÈ,Z—“c·[…9·Í㬂Ðܵ/Ðt샠u¯¾£,),T7·ÖÕúü~î?VƒÑç‚.ýøãƒxïà̲è+,É(á‰^FdJùNãöÓ8£Á p |OVá­$EÀØf±KÌŠ}‰B“‚^¸êí­:îý°ép¨³«ñ( <ÙDrfË–3°Ü¾´`SÉö<åû€¬4Ið×à…¦¯Q"7Z²oYÂ(ŠúùEóCendstream endobj 124 0 obj << /Filter /FlateDecode /Length 344 >> stream xœ]’Ánƒ@ Dï|ÖN¤È—ô’C«ªíÀ²TˆCÿ¾ãIÒC³ÒcY{Fvq:¿œ§qË‹÷uŽŸiˇqê×tokLy—¾Ç)«vy?ÆíA<ã¥]²âôÚ._?KÊñCîüÖ^RñQí…Ÿªû£8÷麴1­íô²cYÚq,KSÿïª.ï/ºáñk]U–8;£€;ÇÚ(`íØl[£€-°A™†¥/Õ£€ÁQªc4 =ûïÐ3°o𾂪ÂÊâ•7Â[á- ]‰»ôöï+bPaAhC܆ìîFŽQÀÎ~…žÅ=Ko°wLF“ã`“8*Fâ*Kœ@dUæUÏ«H£L¤žH‘F™H=‘"2‘z"…_¥gœ>ãç0}ܾ8Ï=Éãm]Ó´q»¸=¾4ã”þp™•CÙ/Ù]°™endstream endobj 125 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3497 >> stream xœ}V TSwº¿1pï¥(b j{ÃCk}­¶ty­U˦V© "‚(KØDö-ìYøIXÙc»D\ÑŽV[k•çàXmk[k=mÏ 2t¾8ÏéûG­3ïÍœ—ääœÜ$÷ÿ}ßoù~Ên%ý½w­òòZiû°Èºp–õE!Âaw~¸ÝÍ¢ÙvúçÖ¹ÀgpŸ ÔóÔ,€[î“’”š•“ìî’íî—““ý¾¤(Šû 9%Õ/=c}fVdÎfitLl\üö}û“^ÃëÝUåAm¡¶RÛ¨%T µƒZJQ;)o*„ZNùR~”?µžÚ@m¤Þ >¤¼¨ÍTõ%¢æQbj!åBIHCPw‘‚Ÿf…ÍfÙ-´Ë·ûÑ>—v¦}écÌ&æ<Ë9¸9ä?çú\šãÇ#Ž?ÍΙ==§}vÚà„~$)§ß„k¼(-ïðÖ=2ÁÀ°X <“¼ 5Õéê$ƒ­¼q€…fú3üŽý )Lðž´¢» »Øì ›×Âfò^^¯6ÈP Råæ„øàÀwp€ÞQš•¤JFE¨H+G,¤B„Ù;ý6+ ȡֹ—!xïÕ_^°WÑÝ!ë±´…]ïêP'JðmFt×O ž]öƒpúöÈO¿œ¯ú0Ãu/ûAƒú4}9Ý©£; aÈùí‘ù³¢»ÀØc!MÎyùkJÙg•t ,@C-°B0ZCÅxv‘ÇF„_C:7œ‹‹ÿ¤ð<©q8]¯syçcG$¡X×âƒX<7‹ÇtZ{áÈP{{o]ºŽNì6¬­”ie(‡Í`P¨f{RLFRla,ŠAÑM™–´^4¢:Â>iñ°Ôb]ØçÒ>3P Œ«h¦ÇÖg'#ú¥­]“ yÔ̈fž6B÷'ó²“h}Ù9ö©y¸á:Å‚½ÐØ304g_7Ží«ÖŠË}Ò=¡pÑ™~DUƒÈ“£ë¹6t ±kÁó6”£²Šr )äZ âã<´òpgaÿõbéÌ£?{H­g†¡Å>†. ([úAҤïùpü<=‚N(Cjd(e³C`Š¡ ȉ™²ýÑê8ÄnƒVð¤¿@WÊFüÅUGé¤ì E_Áé]¶-à–h!¿½âþŠ©)ºŠ&zžBüé3ˆ'Hëub“,¦ïVÁp䊊E(—MÄuew÷¶u™O›Â‚·o)å*ÀSJÛC—¨ÙVŸÃ.ƒ—!w(沫èþt¯¸'å˜ü(bï}5<-©c”ç÷¤Õ¦T§ê¤zŸZ4Ξý¬þØ…ñL_WŠMœž®×é%¢©šŠÝ‚»ŒQ¥+Í-ÈBr®Éšrªòªò´yˆõ~%3/Ãìœÿ.’U\ÈG[ÙþKÅ­«MìKýF]«@¥ˆ-Rª %؉Aê %R²¥Ueµõ z§¯1èjô„ˆ`ª„‡pÞêÊ .Â|ë*‚ÆÃç>ké–ô³ ͈5hõz $Þàñ=)¼äÉ ÌŠìüœäø½ò„íÉët,¸«Ž¡£è³ê ó…Ñïÿ<ò‚9hr¥É½²@“ˆÜ„n¬ª©“€œ1ªkä…9©……\ yÈ‹wôíòGØa§Ø5KÃ2ßG«Y¼–àåP~ỆómOŸM6³-‰<Ê\þ23ϺŠ&὇ϋߗb'w.‘}y ·Ó(©2é`š9ücùqÄÞ¼|SÂÇ0¨P[ˆ X©|cPˆ||ŒƒnXĈŽóx2O”¡´Xß— €²I_8ôD‡P‡¦2‹Åå ¹kâTó® ÕIô ºhj¹ }Ó ÖŽ¥Ñ™Ù•‰uf™­ {iÓÕ¦N–à4¦|sÔGÞˆŽD7š´PƒΓß<µ DwŒ‚'!cÿ¯þ`^3¹Šn§u…ج7Õ˜{v<ÍC²ÁŽx{žç‚¬Ëç®›.6K´tðp‰±3–3Ü¥˜íL^YzvV¹©‰ƒhæ¾ß¼€ý¸ JÞŒ7ÉÌ–ÎöžŽ¢¦œF ìÀ¾bOfSHè&Nôsd˜ùÔµÜÐÛäè›`3?Š´X·‰¥Ç™*¤ÓêP-ª.וÁ øm·j¥QÊQyi™J‰…ø·|Yœ& ±ÙRØÆÓ‰:yM•aÌ _-¶—B¿›V#%ᜩôåzì>n„Ɔ =ª1è««¿r;xHß`©j­jA­ˆåñv)Ó£¬-WÊßXô 8Ûò6³4Úêšæý}lö0Mìá$czlåÅkC"¥ñáÅAˆõ°Nƒ›í/‚ûçmDYØ'FغRœ_,/$v‘Ò’ÜËÕÐýÍfÓ]Htgg¦¥»³£çli·ÿÊ ïTNžÒQûD68Ì oÓ ÅÝI.?xknºŠ~. BÅ™éÅ (ÅH:œV‹"YLÁê¼[ßg P{ç^Æí KPqE1b÷Æ·Jªmƒ®®¦ŽÏë“"öÎ4ÿë‘sšEÒQÛQ×¥U•iHÁf,lm=p°Å’Ò‘_ Kå4µFC(5”U±¢_Kšbõ± ÿóõž1Í{ú²$ZÜ+FU•¨ >d:l¨¯j@õˆ½OŸû ò_瑳}ãŠ×å’“€Å=ÌãÅ›_ óæÌ΂ ßô4cs—ß\Ѧngÿ)›~‘x²vÉDp23d£Ø!²´»:Q¬äÑçÿXÚÿ.™4 ”YRG‚H2Ù„Ö„çTÀ´¶'²®ÚH•Xrx¶Ï»•<†`¾«¨àʃoxóùÕ}oJ|ÑI¿ŽÕà{ü¬éšžÓ1µjcA\ë*äϽ… °\Ý¥:‘;j.0æóªt • ºÏÚ/ŸD7ÑdàøfÓF“O¥/bWá[bà0gÑÕ½a-©•1h? WDD¦J3B5{‘Ë«òIûÉ\Fá=ìnK¦\EwFmÍè‘Ak@¬èb[«*E‚¿ñ—B!¿Ùº Ñ!ЈEWÊÒÒö-ˆ7%ŒÍÚ«ïêB}ljo$÷d°%üÃ9ƒ‚‹w`â[!,8->³÷fÞç${±ÉW!7¼û%Iût †ÍnAuºÐMöOŸ~‚ë±°™«,Q‘õ¦§kuUÄc¥ŒAc(MËÜŠ ¹ ZQUR§¨‰<Z· ±‹ñ¬møÅ·Æý'%Ë©šU2Ý®ŒÅÖl`KÿÐÀw_ÅÂ>pÌàÔ½öF¥NΕÒÅ…B‚c™RÒÐØÍ·à~7ë‡2cÖ>EÏS\}ƃ;s9ÁÜ™®l^ôf‚Ü#»3z‡:º{‡R:b¹''— Z}a÷¾>—1p[ sa98-¶)oþÃú'±Ù<ÖHléì`≂A™«} 3T!F”Ζ3•%uˆŒú‰Zè#¨_Þ¶ŸdÔ¶ÄÊðØ)^{98ºÞÊi™¾êÂ{ԓݧctTZr,¶÷¤/‰m¾p^šzãÒkµÑ嫿Û#¿,ø+vì÷z £—Åw% tšúû2ºžÖKR´o’¹À%°÷ ˜MYàu1X°ƒ»®òËÀág~™u Ý ª) Ë3)²‹ò_Áîxž¿»&©P){Ô ÝLÚãɧDj˶n• îÀB²©ÈÆHP¤e¢86δÿ±p ]íh€Hí‰äÊð¥ÿçÛß«´>"a.Âo­?Jž@'«ÇšØG‡ñKR«3ZÿÈ j-É#l£ÒP#Ë”gsAé[Ê·ÏážÖ­í@Qÿu&jãendstream endobj 126 0 obj << /Filter /FlateDecode /Length 196 >> stream xœ]1Â0 E÷œ"7¨‹P¡R•¥,@¸@š8UÒ(mnã¶ ÿKϱeÿmw邟eqO£yâ,6á4.É ìqðA”i½™7b7oEÑ^u|}"Jj@·òM¿±x”5p©\‡ÌhqŠÚ`Òa@ѨÆ9%0Ø¿§jèÝÖy4Š@.šªT,rÂZ±댽bö­bZÂÓY±Èóê}I¾"çÙÏ—fI ÃÌ¡9TÎâþþ%Ž1OI’øcHd_endstream endobj 127 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1079 >> stream xœ­R[hUžMvÏL%´éK6Ì©oR¼¤)^jCkK½ ©Ð€ÅXCHsÝÝ™½Ìîdvg³{&›ìlf/™½dÍŦ}-F)ÅnðMR©hôAÚªõᬌPO"…곜ÃsÎÿÿÿßg£ìM”ÍfÛõÆÑžž'ººß¾<Üp75:›‘Ò˜iüé@-ͨŮuÚï:që^ܼç÷VªÉfsï?2<4â™èÛ÷òð»ýcg÷õ#ý¾±òòŸŠ¢ÀÛüÉÁ!Šz‘:M=F NR¯P4)MÑÔ²m£é`ÓRó€½ïžÍR¨Ý;§Rï­ã›uçµMÌo¶±køÉF««’ªNWsyk´NÒ’25«hŠÛo´Ï^ïýú‹Ë¹ÕÏ Ëƒ•Õ$s‹©Åy)ËóÀë®@ü*–’K‘†¿:rî%ÄŒƒ·+Ç}Å@¹j˜•Tr¥ ~Í:à:H³kǺ§úOW¿ü¡ü©Î­ÎžOGÌ?ý©õ÷½–ºês²ìÃã®$-†¥€òŽxG}#Þ‘ö¨Ÿ1$«2bü‚2Ù>!)slEÓTÍý30 ™R9”áaÜÆw­Gð¢$ðÙ„wÛ§iIͽ0—7jŒيMÆÜ1àCþU)ï]ò.z#ÉsŒJ糺¡ÌZ‰,³Ö>7§jyĤAe’ÄÍ™dgM);AˆÊ¨°³1Yºù ì#è|~²ÄÀ3–½ ÛPѳÅR(#ÀG·£$pbR Œã& ÎdÔ´; L”ˆ6^h×E=˜‹<ÈÏ•ºS›XÙj\qý•è£) ôp•c×Òi‚ïâtÚ@ó(ÔPšù†6#yžE^Qç^·”Fb6ós¦)ê<´€m”PF4áýrx«Žû*>'“r·p¸Ñâ²¶,†öD$¿¨‰…ÄŸƒR6g<ðü+XöO9«ˆžÄŸ—jd ¤Á;8âò=‚§è­-”ÍÈÞ¨ú+ã{+1•L¸ÿ%?vÖK¸­÷Rû ¶í ø&ñ‘Vš‡ø)`ê9ò<õ#¸ùÞcŠ"B$êð…#<áÁgDm4ÂÃuÀòZ†^.è%È®-ð3AÎH6åì;ë§×ß»v±¶qûCÄhDTâ "j)]­ šO–“( ­½;ÄôH®ë9ëiGƒ¢ËSóCÜ1 (¿)ݧì*þ¶îT뽄ۻýåt%çÄj~â3ãB@É|hNœOBöûk‹˜9FšgøD$X?±p)¤óÑÉ€ø(VÔe}9·R½p1)ðñLÏ´$úLâîL=kü/™=4*ªETd6ý7<N‰Å£±˜ôÑ™Ò BgÂ(³›{¨9û|Ë.ŠúIòAöendstream endobj 128 0 obj << /Filter /FlateDecode /Length 545 >> stream xœ]”=nÛ@„{‚7¥}?`lc7.I.@‘KC…)‚–‹Ü>3c9EŠG`$ûÍ·äîŸ^ž_–Ë­ÛÿØ®ã¯vëæË2míýú±­;·×˲;»é2ÞîI×ñmXwû§oÃúûÏÚ:ÜÐæÏü}xkûŸ‡SÑO‡Ï‡ÆëÔÞ×alÛ°¼¶Ýcß×Çy®»¶Lÿý÷'ÎóýÖã±júWćªA|`> stream xœ­y xSÕÚî.…½7‚¢ÝD Õ½œeT¡2ˆ€@QP¤X° -ÒfžÓ$M³’4S3mSJ)RN"R™PÁsñ ‡îâÜÿ®ñÞëóÿÿsïÓ<’½ö^ëû¾÷{ß÷kѳ‘••Õ»`úÂ…#GŒÁÿ1$×#}_¶ IÓÉ«³{¾Ù oO×}ÎÛïÏõûån¢GVVÞãùe•¢šbáà9o Ë/”UK„kð;ÿÛ'ALœR>µ"¿ršpzõŒš™¢ÅEÒå³WÈßš[üòÊy«V/XSPúÊÚWËž~fÔè1cÇ ÿìÄG[òÄ“K‡=µløˆ‘1„x™JÌ#$æÄ#ÄBâQââ1âUâqb1•xÈ'†/KˆiÄpb:1‚˜AŒ$f/ϳˆQÄlb 1‡KÌ%¢?! î%¹Ä@"›Dä,Ñ›¸ƒ˜@ô%î$&wýˆç‰»‰{ˆbA»³&gê1¾Çæì—²¯öTôÊíµ—\F¢ât.]L_í¹#Ø'¯¥Ïo}WÞ™}çλž¼«»_A¿}w_º§8gdN+3¥ÿ}ýÛúÿ$Ü«¼÷ê€hîÜΣ&Mt!O˜×ußC÷™ïûæþüûc÷ÅNe5ìFö7‰Û=ø¡ÿèÑ÷ â®Ìwë‘"1|6•~@’Ãt~yD`µÖZ@‹”©Ô§iá.“L¥Óeqå5ùüQŽéŒª|"C­Ùæ_zÁQdóP0X ` H¥‘rL¥Ô§nb™N—ËêÌ»L¶ø|ѨÒ'ªÅ+ \}Ä­‹ˆ¾Ï}'þVôn§éü[NºŒ\ cÕJ4¬ /FÃêdFi½Œ†Yp¼à8¿ æô²‘M(ç4¿×M§`*µ0•ã…çuÅݘSðÙ´@à¥bú`;Š”)µqH7|ì]”ZD2Û$jµ\Ô%88Žl ¹1•WÊ-‚Ï‘Ÿî:qa¯ÿ­e,R,¡ÄZ¥¢&bˆqød‰`c<¬öKðUäG²¿£žK^—–¬fomAœ‚…©œ¯»¡¼ûu¼…_¿Nw ®©Ê(±ÜP-õi›9¨%×uÖVï‘–wŽÁÉ'ò½4“~¯é½çýsÔÁ§X$"EZ•<ó̯©P TD,‘Zšùu›¬ué²A`ΫE L4znÀBx'Ť»RU3V‹×¬•peÒ°Ð×7eJÁ³©¬ƒ'`sw6´¦g Pmþ`0˜ÖQ (ëâð7 }$ìÓxêWðí¥ê`öèŸÑ€F¹H& h O¿wá‹Oºò_p³zêù7žx†­#Ÿw.9Q{þÆCt©´;•µµ;;=<ý±`õú¥Ñ×­%Ѻ h@CºÿšŸ0Ònò‡ß?‚=ou±³¬ÚÄi Ú@¢;]¯Ãáà{ú÷ÃðHß7{®‹mXë7ƽ¾!áàcT\ Õ¥rÓˆ£ªž‚ÇÿH|Ãǘïqâ ÜT‡Ø*c'ym$¨Ã‰ÿzϵVœøs‘ø"rcéfÙ.@Cê¿Aê÷)DwÊÊÖp°Í¦,žò, gž;è³ú€^ þAýàÝCwŽœauÓ­ZX.†[SpY*çr7¼÷ãíÝKp=친©‡J¤Ð‹9F- iâ,ó.F†+/Í’›;BC ¸èóTBë«‘IMF« ˜Ø¹( EpŒ–Z` Ë"ÆH$ì\Ç^¢€ÓÊ/h“:å2™±Ì€ÁjdѸŒdö@ó<²,lŠ6y}-ì9*Q.e¯ÝM UzQ¦ÀnÛíÅßf¤^Iå´Ã‚ix«(„W›¨¿ŠEw’R¥^,öëš9ÆõJ&™·j4™Ü§ŒsL‹ÃiuäÁ×ð›Á%”·>êhåkD번‚ùÕnØÞÅ2-Ká–¼D,œìZ,ž9¿dIá­Àþe•äüÜ áx `®üœ‰ÖjªGKæWG98læÛ‡Æ+ææÁ˜GþW£‘‡¯¶^EÂçK°?PÌç‘:¥K‘Ç\Á½Ë‡þE2Ÿ VcîZ޹" Õ6±714N‡½áãsŽäœLÁHj†\‡ ³b§—ËÅß×ÿ ù`f«pÀz°ó”»Ýæk#hï\+h¦£ÊeQtäµ5D£Ü9*¦õKUZ½ÆÄ® ”¹Š=õ=ŽÆÀsgÀ`ÎL½ô`£lXï§™µI –å•ÕÉdÜ(¾p‡¥E>u´Ù‹¸Ø›(„O§àÎTVg7l:™î÷ŽÀHIÜÊd8êH²pšŸŠû}Ñ&™K¡,ÒŒ¸GÓ5•LæÕĹwIÓ‚ªa“ 4"_‡ÿüiìëCû¹D¬-ØŽ‚w¦»æ5¬ ÖG1íQŒÅG3X¬ÑV‹ lióêø›€6“hè4@y'ÐøP-×Hî[c6üÑŠŽ§`?I|²ŽÄíèÉt¾`Y­ÑH%!ŒÏoŸtM=Xô) O_Øõç£Ze 2‘FS襅¢U’»X¨†ã©HÂÞÕøDšüw²Má”J†òÛ¡·oĸ˜.uÃÕ¸”¶_Ê”R9%Ôªe#–´˜’¶»Ü˜’.‘Œ8Cg‰ OgÛcÊF[Ž»îj$.'ÿË Òª62†› ǼUùk🮌y)oã²2䉔jL†Ûq'J°m¨­†…m<¡ÄøJ¿ˆ_`¸¹ /âÌð xÁÍ.äYŽï(<Áí¼œ&¨žåVQ¸¼uù^ý.° ìðnLÒŒT‚Ù†ýïÈ“Àû“r‹áE2a –°“H¹¨¾BæU'¸½¨ˆdvNXP6oå[Ž ›¨¸)RÎŽÅ-S/Â\¹δ*•%8e’LQ/J¨SË•U¨žƒGÈDc Ñøª9Ô3­‚½È–F8¨(l:BŠt*¹Ì§i¾q³ˆ8Ý›Ï ³/Mñû'ÐÉ^õ$"àÉ^ëI\ÌM1µWÈ=B2q…%Æ}â°>ʶ`UÙµ‘Wr[Ç·Ž[ÿœ»ª±f= d„­þ Ûcó5x7üÔùcÇöFk è2öà)}8'û2X„ªµ’ÛúøqøñáœýÝP†+…Ó}{̆ëýЉš ê‹f”£;ŸúvÅ鮿yš6s6«Íf´t€€ÒoZ§ëm€nn F8;e «o+.—FK£gÐm]ì8ŸM\Ë›N@'ߟáÿÀáDOS¥-ÅÑåX|ö{æAÔeyöÐcé;bo¿M+)¦\äÕ4±_ш;W6J¸×Ò2¦ qÿ4qMÎ$er,|ê8‡Þ@WÀaiú“Ãç/°Nr¹«Ðõ½˜Z«hèü—g¾4qÙù†?çïìÛP|`Óñퟘcíu"èȘ™ïÈ&Ÿ/’PzÄ\9iM×ø5-޼6ðD½ƒèíˆòUz”›Ö¼ëí±—ö´Qñ 'e 7ŒdÂZ¸Þå;2]¾íZ iô™ýu¾"Hã×›.ù*W½Ñ´Ëé…ü #Ö'Rldð ñí¼ï; ™TVú¾“Ù p8Ö¿§Áý ™Ð ûR‡LÇ Á `Ié²¥’Jm%¨¸IHzN=°ÿ0|ã°S’Óö¾„× ‡0¿háçé'²&±Ü½ÐãÆÎ~²ÂU¬ælu“ÕD[0¤¥@ʽRÍüS»BR²hñ ¡pвﻺœ±ìÎxצ†÷h7ïA¹±Ù¹¤ÉZg5Yi=fÓd¼ÑÖhoäZÔI][=.¬‹Ž}Ÿ4´hš¸z›[HàétEì R¿2¢ ê·'hµyydÍÊ7 ‹L×t`·Ø€Þÿ~Û.¯×Ù=d |grM¡XUT]¦-Q¯ª[ V‚•ÎÕÁ*¹ÆgÛ !v{{·wícíd—yK±s1]K-zsÉ«¬™\bk³qÏõi<·M ~¸žÜ®~ýÿ7€óÿo¤ss‹Åp ïuxq:³{œ‹žÁÙ}»Då» ÖšÀÉ]ʳ мØä•&WK¾#w·V`RŒ|kø7][v½Ÿ”;e2¹©Œµ¨ê…X~0»kÌÛ“ÜçTýváúµëhf×»‰ ‡í»iÎ,ø`Z¼Ô³”`Ø Å¼÷뚸+¨DKøiƒ5ö‚³šø4›ÁV jé: bFPTë…!¸_à³+’¸$'C‹¿B¯éRèn K,©Ê£ä±t’Œû¼Ñ –2CŸÑ‹v™CÊ¢ÇH¹¸®R¬mÆêš•uŠ ¢E‘жöDs’e~HÈÖ•±Ì¯fLytž#IΞãpÛ‡˜O`ŸM‚½«húÊ—g:µä£ünVšÀïôÑc›ݳê¥0ëª4Wuò£ì¿à`¬µº€D¨¯’˜Yæ“u…+£ óžzüÅG^üÛ‚ƒÕÜfùãV}“©¹.®Ý$m)+èi/-Ÿ3ë©õÿX=Ii½DÆ¢Á”¨Q…H2fgÿ¤Ñ`_ž@âð¡L [`Lb (fçŒÆF6ê „2Vpž2^r'^70òò:7.o”bk’Q/Ã2g‡tqœx ÿ›Yøá¾èM°WÁ˜Ã¿0`:T°NâèõW¬†”É 5ÊFžÀ˜Ãí(uÓä3HdzQFÎÿ“ŒcK˜y$×+ޤÀûîóGB›ÓêôVP°–C%%æ €OÛÄ}GÆÞØÿÓ ¬Û6Ðý}ÜŠeÂ2Ë„6È…‰â¼P¢¨Ýj%|}¥ú8;ýðÒ‘G÷|¸çðþ£÷_ÑðnD}ôÀH”U€²*"¢ÖæX¼9¨ñ`ŽÁ¦–·9œtµúôgoŽxøÑ¦ÍçJ«ÖV–V½V”«TIô †®HÔ‡Xx«¢† \7F*¤rc{›˜ÉIßÓ½÷°‹pXºU`¥6¶t:ßÁÒå£êyó/]\Ë­2Íð‚"ÚLY¥~}Ð[HæÄéK+‡çOZ’¿ôU_ÇZ®ƒÐâ®ÆFà¥mTR–h„ÕËZ*ÂrvGÑǪ÷Á×;ãð~H…¿º—…[óÅÝoï=4èò#]#ò%zJ¨Zß‹´zÌ£ëö¹Æ¤Tú~ÜkOä-Èi8 ûç™?‰ËÞ½$#íØ`Ù—$[[ìë8f'böEHÇFÕiqâZÅ &¢õ‹¸1ðw2R/cW’% Vª¢BÔ«“ëô@H¨Ò¨9ƶ§° þÔ ýDÝ1µ§àèT֗ݰÃo$ü·.€e”¸y™ˆâðZ‚j¯óÊF²RS[ΡJæ­kr†ýAv}ófß@¶{æ´§Ÿ[4¹xE MÈ™Þ&áÞÕØè~ºE«®’HË„=_ÿü7Øã#Ø×è_Æ}‡z-×KVþ1ðê<ÇÉÙ{2ÿŒÆ aH°-€3ÿñKHÿ6^°TZü¦7b°ž8{jóqÖGvº¦ƒ™ôÔù3&M.ÐÏÏ?ú 確¿>›çØŒ6#0ÝØUBœž*É‚Ka¿lèÆö¤´V$%ôªdåvv¶ ƒÞTÕú&‹ÞïùWÝÿ|wx»ø7@¿@aÖ«ŠVúجƬ!à52jŒµ¹»–Kšþ²Ðð‰žJ³Ž¿XP&𕘬àÿ$›¼þPPä/€/­ã/½þ¤9â«Q¼å©ÝÙé!Wg Ôõ µ¶Ýñô‹( ¡´œr¢!gЃðî)“ÇÔè°? Ë]šFîSóa€ýÂ65û´×ésƒ Vûdæz`1³zDn0=ð#Tõz PiTÔý2q?êP@Ì|t"&«ôêZ  ~MhËñv¼Í9×~\pð,x?ê}Iå•¥ø¨õœ`<ÕÕ"^¢Á* ž6¸C,¸:>Cž(Çø[DN9Ë´e&\X4ˆeæµÜ¿WPv½Kå·øÁ@8ždVgäX$ælᘶ¨Ú'¾±uŒ‚E)ø*Þ½2• •ð+KÊ,* ÙÕ¹nEÈ‚}"†Ær¯"ñ", Ÿ¤‚ du‡ìþ\QùdV£É°6$•©U²›ÿíê-†a|ç7àÃÙðí©LTY¥”ëJ- àì…,ô±/žÝ]±ï+W\U±¦$!ì0q.ÒÅCÂöž^•9½êê]‚EÏPb ?¡^-L˜Bøø™Q7vBñˆ½»º‚2{ ~µM ¢gIÆŸ™PËÄF!>¾Ô§‰ß,ù™¥fA‹@e®U5o‚…o“ Ÿ/¨ òCá0É<Ÿ™b$XŒ–[`€÷ÜÌùK0\¿zXwÖE¡@fÖ)púkÂÚž'“Á@,¬ Kñ3.añc·n>W|5ÌÛf_z*®)™Ù 5(Ÿ@Ï ÅO ªre™ªÌ¨Öª°Y“ûUaÖF&¬Î¸'¼ëëßíøæ¼÷xïQx¯ÓßèÇËÄCª€‹¶úzKý¾ä„$kw7Ü‚ëÖ~5[€R õ£QÿÐËðIÖMÂÁ#p2€¼«Î@õVÐã.7(pÂ{zÇÁøºöæM€n ÕlD‰`ªöùâ9’Òµ%Ë-t=9¼ {«?«ÇES¼m<|5'Õ„“Ú Mi‰àÚøa”P£åÿ/ŒÚY¦3´óà›|*¢Q•·š{*ÝDÆõqµ©F®g¥«ÊK–ëª U°Æ®Éuªýf? ]äfG2ØÞDßÅÞ‘íÜ·7Aü/É×Yendstream endobj 130 0 obj << /Filter /FlateDecode /Length 610 >> stream xœ]”=nÛP„{‚7É·?`¼Æn\$’\€" ¦Y.rûÌŒì)†Àˆv¾Y`÷O/Ï/ÛéÖí\Ïó¯vëÖÓ¶\Ûûùã:·îØ^OÛn»å4ß>žóÛtÙ퟾M—ß.­Ãm½ûïÓ[Ûÿ¡Ÿ†ûŸæóÒÞ/ÓÜ®ÓöÚv}_×µîÚ¶ü÷*Çû?Žëç§>¥Æ~˜+ìP%Ø…Ö«[`G¾úOجwõÃö¡Jxû@{¨¬ÞÎU‚å q©,­J°v­,0 R}',"Å(ŒQÆ*ÁŽ´¥J°Ì\¬J°F  Öi£J°A  6iASDTHT@SDTHT¦*ÁN´È[”¹0³! )¤1¤! )¤1¤! )¤1¤! )¤1¤! )¤1¤! )¤1¤! )¤1¤! )¤1¤! )¤1¤! )¤1¤«{¤ÅBLK1.ŰÓRŒK1,Ä´ãR p&@# c!®¥8—â`uñ:yp.@' ÎètÀ¹€8 Ðçt:h\DN¢ÀРà @©¡bƒÅf†æçf†æçf†æçf†æçf†æçJ ,6!#¥†Š (5Tl°Ø@©¡bƒÅJ ,6Pj¨Ø`±RCÅ‹M”š*6Yl‚5Å›äM°¦x“¼ Öo’7ÁšâMò&XS¼IÞkŠ7É›`Mñ&y¬)Þ$o‚5Å›äM°¦xñäeù:!<2> stream xœ­ytWöþcÍб¢µ€DCHHBBq(!Æô:¶1¶eÙ–-ËEVïO½Yî–%˶\°)¦wB/! @6= yò>þ»¿7¶ÉfËIvÏùŸñI£™{ïw¿ï»w"ˆž=ˆˆˆˆ^Ëæ,=66–ùÏÐðàág#ù¯šUè úöt<ûê‘høÙÓpvÿï=""þ4lfFz¦('I8$>#1I(²4#}³à_Þ$bÁtÁŒŒ533geÍÎÉž›óŽh^î|ñ渼- òâ &-Úº8yɶ”e©ËÓVðW¦¯3vÜø§ß˜ðü›'½<à•©ÃŸñÜHΨѱ¯æÄPbñ±˜˜D¼H,!†K‰—ˆeÄËÄrâb1œXI¼J¬"^#V3ˆÄb&1’x—˜EŒ"Ö³‰ÑÄ"–˜K¼N¼CŒ!æó‰qD1žX@¼AĈ…Ä››x†à"ÄD Á%z‰AÄ`‚"Rˆ^Ä¢7‘Nô#ÄSDÑŸ@ðoƒ|ƒ}Ï>ÿì©ç&<‡x.úyÚ3¤ß“ϯy¾qhÄÐÒX/4¼ýbðÅð0ç°à°½Ã¾~iÁK·_õrÇ+þ="~ñÔß{|w™0…àšPx½8¢±‘Çü™¶”Z€Û沸Îe}{tú›câæ+,4BúmVn½–†'zB3 Jl>3Ù¬÷ÀžÜºÅ»Ý‘oË2޲è r“POý=â/wE <ºÆ(Ž®» ënǰ÷BYx9EK'=E¡¨{¨ûþøŒæ±oÂ>¯C ä±÷ê´FÝ`Ôy8{HöÍý‚Q·öÍêâ™E ˆÀ|õ¤¸­b©H“f€Õ'å¼ Äफ़`¨#"oßô0 ò‘û@ÆŸÜÈ=2þÈGßpʼPH¡7I 6©€’Ú²M—ÄãóáÞŸµ!Ó¼‡†oÀn´¨ô–¸ÊŽ~âÞÑ/[NµW¾g¢B(…OÖ†”)´^¢’©d®``­`]Æ®V©Vꔑ¥skœWú!î¦+ë.%\Õ9ÔNà¢,¤­ªÙä”=Ä·z¥‘êŠ8þsdønÁ‘¹å^™{sóÊÊ€BËÑ4­EËá4¾ Þ?Cž—6Ù€Ø(ޡ̳%Šu¼Ü—^Ÿˆ"À,0¯iÝÑ¥ÜwÀEÇ•Ðû»>¹{ú+€[¨·¹&¿P%f»›†ÒatÉ%ë&«õ¼.d£!8ˆxx–܉ 2À®$êõWÙdr–NÀËa`›ÍJ϶n§MpzÍ–âv0øþìûŽzˆz-_›½%™þ€„åHÇÑÎH«¤4¬M ÑŸ½]åѸA '“Àkİ¥Ž^ €k{ïûJ÷¼˜9^iRyÜV§ÛfÄÌÀûS¦]ïT*„8l#K ½2+þº=_lÉ̾©Ñ5ƒŸú{ä¦( …_b(<á^dø«s¯°5É‹ÊbwÒðm†ÀAµÔš÷ðBh;“à2S 4ê7ÐY$a¶—QoLIœ-»–n’@bb¨£âíÅK•—öò %ÙûCh(Œ'»Õ£ž Á–†N\¹‡qõ t ”BnÎ4ï¦a<‚*È„]2/•yc\Ê`|ß¶ƒ‡Èöf¯×<8•øÍäî a3èT;›­ÛyMº€¹XîÇl·c±ïÑå2nvºŒ²Ôìé8ç §ÐZ¤šø‚ও²ý`/ØUhu¸nK±i,óD…$djºe/¥÷o$zEžùk3Î#§'M|s™ äV\xG‘õ„|Ÿ½Yßw‰ ›­–v+ý¸™LJÓ¤J$& ÃqŸ‘RÜÃR_š½7“­Í¶fo‰óBdQzrãB•]JrXÅ¡©·ÐznYRÕ4?¨•WK[oÃnàŒ¯ñÀÒ:žOúÙRM‘VJ feÌÌÖ+µJ£©êy²f"ÜÀÕ;uNबXNC¦*|sµˆD®O—…Å¡è]0¾z—qS˜(Þ#ÛZu‰ôã82^ŠˆØ›k¶gó,F °àsýþÍÎkÈ©Mã E)ãî®ýðöcŸ—ðÌ8t,QáwÉærÛ鮈°bwETÑEkG€Y˜˜S;Ç?§a×*·•0-ì%¼MG+Vµ•ØK,%¸-Eö"[Qý$nŪªÕ«¬r«¬ø@ù•âí'áXnÍÙ’[29˜ˆí…±®È’2 Ò”Y…›$øPå*DZ΂Ö/mPr>È>':¯wè»óàkLÐJ~¹¸$Þ³Ú¦*£R«Ð«–¢±\M¶@](«UÕ+‚’öÂvI{þa®Á®cº»¶Õäë °Ó´ùD­0µ:ÇŠ|Ñ»>Âð»ˆagÀžá§9О’V@]ozgn~D¢)É(†V‘η¿Ÿ‡ÁÍÞÃ8Rû¦àVŒF°½É¶—º&\Mfj·ðÓ´;‚<øÙÙH^êÛØ£o.ËÇÞ#¨Î«­+÷ùësÊ3y0 á¼D‚9k—Ï×Q:ֺͭhéøFµÜµ`ˆìåaPÚ£“øÿ›îÄ(•0(MØwÐÌ ”V–»ËJËÊÊJ‹=%·ßr©°"»,»=nï‚Ýó —Ì$¥ŒdN†:'çCwˆ%Ù@ìÎwI+de›Ïr±äó…Ÿ-zŒ!ÊDç¡=þêK繇Þ{PæÚÆgUUYùù…R9-—ku:­V70vlìøÑã(x…ÜÛb:Ísvö;=q@Ë`ld9­* ­’ÜÁר?תsª{¤Réu‹ælŒ}P|87Dæ)*JªJKŠKéâÒâ2o©ÓírÙ=Ìê4Ÿ Rȵjj ¦ EÊ"œ†Í¡|²ä™$Î|—¬RVšºgÛžÔ=Êb®Ál`:Áî°[¬¡6n…¯Ì_]ªÛQ¶›ÁÓ>Y¥u*° Ò² rÔJI.¶j ¨FéÕmÛÊA©Ïé5uu æ-,…#|Ð.ÆÒ+nG™ê”“!†pOúÍÞrÿî(3Ë¿öTö!lÍ| Ø’caô̪õÂÄm4¼Œ­ãcÀÑ­ÕnNß*LKÊMØ=r÷è¥T'„:ÝãékûN@¢EOîôfŒ}̼ùû#l„+ȺN7¤š$E"E•àéq:Kl–Ø@ ï$wn·5yÜFàâÕ?©Úä ÜMje˜‘}úò¤}¨Ç¹õ°¨r F£Õ¹ ±R,~W07H€Ø.+®´Ty@|’ ‘™âæ ×ìçï¥M¬« ízàTÃUnËÝíøðŸä–ï¯m=ÙM‰55…B©Fª•Ñ‚¹YsÒçvR¢²‹«•¾¬øç³1à^fÈÀî«ݤqpÖKØG ÑM·\…¥¢p 'Ë[2¨Ìʼ`C™¯Žg'×Þð/ÁüÔkCÐõbAÎù¶_Òf»Í…Y¡µ©%Ê$©š—¸%{ƒqHýEû1UL²÷‚O*a_ØÃM9Y­`‡¼^Èô©+‹™ÄzàZ © û>lý÷rXÈù‹4›0<¥!rÃbû%— »pÚvÙrÝb§xàe-¸Šâ¬r›¨0}j¶ÖØŠ¯ÂE7à’+p±£ÜYn«0=¾ÎMHÓ¤I»”igˆüåAæé…(ýñi$ŸV”åY¢º2e „Ù¾ˆ£¸®r4>*À2·˜wùêüû-;1V#Y#Q¿×Ñ3£Q­Fƒ¹•R8ôn¼Ï*‡‹0ˆÔsµ_SäD PãX×~¸þÃõï}õí@9åRYT¼é<9”_Œ„'™¬”‘-AÝÚˆ¶fï\Z½ ý‡?ú ùõ†Ü#û;›èù$| zÉO·ì§Ã$ºóØÄ)%Áu×KûªŠk5€:x9as”F“H Åã¨"+q“`ž’×__2”€RCÉ?¦d¦6ÏÖ…'Öu·JI—·kê%ôãZrö톮ÚlZh¿ÀÔÆI‡•dY¾WäÃþh.ì‹æ1ÿ:ŽBl܇‹¿O^8mkÇy–òÉöÓÚyôãé$ú[¸6ßõ+ Ó«át_DÝHØJ8pËcsxÍ¿z¢þcQê3úðÄ/èEP”…uœkÞ4Yðàk¡:ŒCMª\Ãn\+X¶‚´êÌ`Â>ýà=ÊT͹¹ë›Oy6ÖwJã îEÝ‚‘[Ûñf­Q}îæŸ±/“+9c¤Ó‹g/œ•6Œ¦Ð4Éb¸ ®†Ï¹p³ˆCƒá|´û#­Ö¨ŒR?Å)'$ý0’ç)öÍÒº«qðpuÑŽ·‚ËsÀ€¸|<¶Z]¤“ŠæÀáØ{ørCãa_/ì¢{h÷EÀÈ ‘0#n ÿ#t›•"Ч3«!M( +NŽIæQ¹€Sðd…P/Co»pÌ]bÐ8yyZy>È£²ª üµ•U¾k[bçÏz•×=6Àµð\³1zî<€£þö‡éXY \@þÙû¼‡£†^~ÉM±o™$E&É`ö·«¢Ñèk ‘ Xn©4S7X'À†ëüKHÉú$Qzʦ%ÊÕOvAhs \zê3?Ôû=â躣¢áŒ›ŽÆ°¿“Ã>5#¹N¹qXG„/¦Ý¼ y ?°~ï¢r^²g«) P±¬4‹°4R6ÒäuÔºW_"/ÕVhªô•ØÞòþ梙>ÚD«j+¶ÆÛYŒJF ¤T¡·¨¬¼¸¤,(òelK)eñôzNgÄ ª*§ÖB±ÿ"/ÉJw¦žüÆòØTozUmÕ9´­A'«…ØðK¶4H(ö#yÒŽ“<º/tŒ÷ëì7Ê·g áÇÏÝpQAã:s¼äqïõëgKⶨ&MÉû x’*ƒ_ïï$Zñóe8Eä ×¢O|Y9œ§áOö¼|_EýPÿðí$xˆ+2¥+ôl.b¿ðáÜïréjm•¦Z{Bz¢È?Ó&± 5mõÌ /¿z’bžÑet]”ƒtkm*…A¥æ¡LRmV;K=u^'Ïîr¸­în^a¬ÉߺÓñ—Géx¯+ÍzœŽÛ$È7åƒ|j™fô\I–Å#ãÕXŠ«A5U/*ËæéX9y¢ìeg²÷ß@XÁ: šd)–ižÃýCsY‰IÖ]ÌìX_nkãXûÀI}ó¶PŠe–•Y²äu.Y~uȰ\ܹT겕õÿZ™¡VºÐÚF? AõÎ:…«À+ñúSýi¾T»Ò)2*_¬“`T¬1¦:s¼…þ¢ja‹p{VKòa.V°Rø§ ¾ϾGöžc¼Ø|>Yê•¥¯KÏÑéY‚¬t!Îî^`ÀÄtm£:B]‚ÚB¶t[èÇ=HÄ{€ž†“`ÔÝŸa„TjPEÁÛä¥.©TðÉ=§´³htŸ|y9z ™¹äØ1OÞjlmü߇хÍÝΖ'K‚.ìw¼&ßíµlÜK‚l뺃è¾UÛñv°ÌY5d ðK¤Å2kž½ðzñzå8váÚd6 Ín+vú¸Wà‚Ë0þŒ+Û]µó#&É|²¡½(™6F•NePKS¸’¤¢DIÒ$äJ·É’%)…­ÒEmk`Ìjó.ä¬F°1‹ðÚý ¾Ë¤FNþŸžª$0X­~ïÁÊ­'ÜqmçWßœsÌ[ÏCJ29ÝÊ“ü3·üçÏsYé™–æ’ ÕДݜմɕ惕I™Kò®lÌq`aBn<Åþ¶ó!LÏHmwÎ*˜œU“gUàI:‹–œFKΠ¥¶"‹Ë™Ua±4Z‡àÈpÔa8Ââ²8Í.l[mÙž{Î^ÄÛƒè}ˆçÊäZÔVµÓ€½M°øm%gaüI¸è Œ/ÞUÝv™IîV>ÙJ-‰@cP4J!·`iÞ²¼¥‹P†äƒÉWÆ”Ñì®Õ^|ÿóA_Äùö+›G¤ùó‚Ášª†¨Šß}“¦ ‹°çÚÿÑÌ«1ìëà$¬ŒemáVñŠø0bI»F“´L{ýÕ÷¡WÂËI`5Ù:é8eµ\͉†ŽD/ŒBCeù2 ž¾5& ÐPu!ûԇɨÇdwUÄÁð³>óYˆ¡üC߯ Á¼{ØS} ûrlæU_]]_—S••™“—öâ§“~„ýþü3ì÷ÓH¢¾4û›µ`Cþ¶T ®"ý¿&`Ĺº Î7ì?nµÛíxê3“F›Îª2S”¯(ù:>Ø<›J°iÒšµ­‰2°´Û)-…®“J‹Ú]ìñÙl¼º¦öÊ=Œ{ÿÜàE=ÁT0W÷ÆÛI…ª|}nµ¸CgÕŠŸF„Ç碰¯È÷%ó‚íÐÃð/ 0)B¿H¸|Ó&N­j\Q­Co)­Ý^YÌ ì,;€ò÷ßuPì{I"q*ß' U”8JŠi¸õOÿÓéþùô_!7ÔÓ»¿„²/#á@x›cd{ÖŸÎ]ªoØ^ÝšA“´*SoÒ= ²òÄYYUâÚ`eE󵹡Eè­h z =W’áøÔ'{>œ {¡>K–òççÓ&Ô›ã&/ú!ë{aBòòÉw4¨"Æe~ó±ŽÜÀctwh¾s£óýÌ‘_tL䤢,³Î݆£í€ªeùAÙíÉÖÊ™Ëflžüî¨m( #s">D,<Øqª”g„lN2©›Åm ^PAî• Ö[çÛèRg¹µPÝhè2úI ÃÓ™Þ}ÔYÂð N~RÂ*A(XWâ±[kÅuiLuô¸:èfÏùó)þ¨«:ø™~=‚öË©a~3î÷{µµÒËóï(ïl6陵¸Bwþ­Ù ÁâhOû~ɼİ”‡s8»_à¶œoo·Z1TŒ}P[ô’¬ä<O˜P¸¬Å„´ð ×¾ÍWÕùøùEê"Y' ü/§ßûçÓ»8¡¼Ü¿2üáíå%§H›/–ÉPÔ z )Í;…VÀ—Qo›Þ®µw>>³z2«å^úEØ? ¾ àëNl‚óà8Ànw3›:v«]kÕà´ F#vÒ73Ū†óD¾èã÷}æ¾½Ãnÿ*ü˜Ó´Oõ1°ìSV$;%¦|S%#ÁfeNjâÚe™3´3¨5$Ð5x`UšUn¦mf Vê<é¸ÚzæØþÊ2_qYU¹ÏU‚Ô7±;žŸ=rÃ8ûc‘'3ÔXQ¬•eò õØÁù½ÞìN‡ÓPþ©È38:ƒAÉOVSà‘ý‚42ÉÐøT<’öE}Õj©[Q­Mkç™XÅæªêâb £Á·ó·`ŒC/ÁXGl`ćÁHý:Pö ñ[!`$í"þvOÒ9%f‡²YY<%Þ†ãÉ:Ÿv…¨ )~µ‹ŒùÍZ·ÓØ ×dQˆó øM>·ÏwøTã ÐBŠš*K”Ÿ™]™¬©. ˜i;yifhñ´×Óflã)r"Ó͇hj7ô{åV3`˜[É1kmsÓ–l\kÀÔô”Φ±•Û|÷»¼ûÁ~j¯ôÔ*ûNº('=¥6/XZfóºèßD ëÄÿUŒ[ñ÷÷ÿáêåF™N(KeRÚTæ[9ßD²†#r5"iØ€"8(ånð€k‡ßûÜMyX>P­õé%2…ˆÌVäVÙôVƒIÝ­Ëpá©4pÃÊSÝvº|0ÔSÖùÈ'ôÒ9m²Â˜GÿÎ,Õ]Ìr´“YºAb¦"ÂÓ1xM†ºpÆVMG+ñëL´œkÔêµ@‹«Æ¢³ó,,ÙÙä¨8 §p¯ÃU×àêkpÕYø×l1Y;§4;N8³p7vˇ¨¦ƒë3‰£w|·™·¼c4­(Ð!9F9QÄÙéIdz»àüñÐÍà%ˇàøZY /Qì½;RVøWfßÔ꘭_Ôãfú.ê‰Il¿í€¥Íº»ºÉ×ÔR³Kn/'zéz 2MN¡g õ@ƒ¼på9ÍcØÐ17MæÈ/Ã?pød;8`ÛQM=®$“4‰&À<¥ùˆ™G Œ…Ø.oc©^#]z‡¼¨ O%çm¬PunÓzÌíFÍ8HbªèN ûÖíX/’!ÔûÏ$8ãýüª«„yÀ¢)PIe @Yyµ %¾šC+«¶¤¤K ²x:½VÃ,ÇŠJåv‘'Ûžm¢ä¬¨ï4€†Mí½È‚ÂN&å!ɾuÿâ‰[u5*y€—«.”lïίÐP¿Ž~»P&Žä 0îkfmñ×ß&ùŸúàÿ°3, ðå7Ð!»«ø¨Ë3–,\•—¼†'i[X¾ Œ¨×fÙK !.¥þÌúêÜ©Ïëå¢O˜S (²y”4–€ÎâõŽôLíÛ‹ þaW°Rendstream endobj 132 0 obj << /Filter /FlateDecode /Length 256 >> stream xœ]‘Ánƒ0Dïþ ÿ6"$äKzÉ¡QÕöŒY"1–CùûÎ.¥‡fä‡wÄ2TçËÛ%-›®>Ê¿hÓó’¦BõY"é‘nKR¶ÖÓ·_÷Uu~ùû•Ic€æ¯áNÕ§í{yd÷P\'zä©„t#5ã‡yöŠÒôïʶ{bœQò»šÑy5ØÎ‹Œ[ÆàE¦‰ØàÈ2¦&ÆÑ‹€‘1z‘Á4ãäE¸Å*ƒ«½È89'Y89'YÇY‡X@~Q‹\+Ù–³ìdI8°÷"`<áx„sÇ·rÜëQ£ŽÏR(mR¾”Ë.‰þþO^3§4¤~•Wƒ‰endstream endobj 133 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2134 >> stream xœ}”yp÷ÇW‘-Ö`dFĆd×iBK(‡Mš¡ÒƸ¡8á0W Æ>uZÆ–/–å•ÞjuxuÙÖ±6 ø$`H‡@Ò4¦@IËL¦ ”„^Ð$ÌoÍz:]áÐL§Ç{ÿ~oßû|¿ïI°”'0‰D2-oÆüœìäíùùþé'øg¤ 0gNXS!] é)¾§ÓüO¢Ødž…4³±T‰¤Áâñ‡ò4Z£¾¼´Ìµ;û¥ìbñ”“•³råŠÅY˳³Wf媔úòâ"uÖ†"C™RUdª²¶jŠË•ã£-«Ë íªeËjkk—©ª—jô¥?KFYœU[n(ËÚ¢¬Vê*K²ÖjÔ†¬E*eÖT®K§.y•¶Æ ÔgmД(õêe•¡H_¦Á0lÖš7Ö7ØR¾úåU¯Ì•Ïðç°ÅØl¶ û%ö"ö:ö 6›…ÍÆB§°Ù" ,;"9ùD4Mz6¥0u]êuÙY¥Ì!û`šnÚ=üOÉ'œÐ.qh¬_Âû•ýäw K‰£e5àõ¨5³!Ñ„ -GÓÑô¿~²ù7‹ä7Ño¯ÁŸðkù…tB¸-«›ZuEÁÑEF¬š¡ÍIQ&¼Kp)Î#šr1' )d·â{_|i± '…­Â-E8 •¶Ð íÀ€Ë¹Œ2ÐfÙŸ£¹û._$L'å„þ ‰˜ž—_¨p2mÉ…t$hGyh~ÆÝËcî>¦öÓÁÝ”¯…¨=U ¥P”Âéôçl&¨µy(Ÿ“䬚À¨2´Ú'ç g3n kì°Ëÿ)db!ƶ¸ZI äQPÃ/õ£ÍÍPg¿…"Ngìtx)0O•&Ì›ÌxLÊ,® “¸Ê@‚ŒçàqÀ}!ˆ¬Pw@Ÿ—c:Èð™ÐÙÐôÂ3.pAmA¦jŸÓj6‹ƒà"ŽÁqæ˜á}ŠK&áIÆ0ƒ•ÜO7³C¸|bõwD¦óW°ÃLà.à}í šÁNVÀJªT°†Q=.ÿ¿‹óo<Öš/–õ ޮߣª w\@'©Z­N{ƒ(_¶W ¹€/‘ý§r¸œoŠ¸ã·ŽsüN‚°‹sR¾žŸ£p‡Å8><ÒÄ6ZZ­„ÃÞRÕf´élÚVm¡°.£­qßkù€¿e9Õu~ðÙùŽøï~²rdçâ«Å.?íêôGîë¼½ *õ­Ž,~Ö¸vãË/¨>ìJ$ˆæ¦]ÅoêÊ•ê=¢Ú…LýX'„ ô³þDñ~­Ûb¨hصsüÀù¯¿¾ƒR„|b| èÅ_WøEúo0¸'HGið7‹@ËD åPÆT$¶&ú¡üÐ85@9”2¹9CÐñÑ–ˆ“6@¦0OVkï¹ *—dm)®¯\Ee¶šu6O[;9CÌQ„aêèc•ýVÚLnvµpía(ƒ?”án‡Ç‚\“73|õŒƒ…óˆw“È;z‹×ö£·#œäì—è·_Jy)ÿ¼â¦,æ]czò§²8’¹ãá«Þ`&íꈃCÔ¸·ÍR¸!bŠîåFOíéÞ_¾Þ¸PE´¤,Õ“øZYù‘ý£ä%Ù«“¤qQ}öÈl=ÔEN1ì0tàÝÆ`N]£,¯~çƒcFˆ Ýh&í] 5œ8[ÎrèŒhÕÞƒ_IùüC…·‡ñG–‹Ä¯"),,oX[RBêtoVæ.¤È²YqŸC3®À}üfÎEá)BýÏÿod£uª‡È\a€«T"I·=ø}z]ÚK^B‘T4Cv'²µ¬±¥ªÕ@Ú4ÖP‹ J‡Ð†î‡ÅvrZ6›ƒÐçî«7^aãÆú½ï¹HWXL# Ÿ¯ë×úq9ú‹9iy4÷¶äƹç¤hd"Ukê4hµ:m]¸¾§»¯»øùdº"Ø+úøH{̘t÷åÿr÷AÛÁ6 Ù¢+ð†}]sÄw–ýêîZÀú­@Y€j´Æõߨæï>ä w,ÎËHIœŒI¾à‘tboW½>ð€•5S°5“ï a³½Å¦L0µÛv^|‘²¹ÄFÆ,Ø<&/9É!:UŽú„íqôê Êç¤h/­¸¤+©¨Õk´1ÝÑ`ÈÍú ÆEÓ4à4ílÊ-ÛTQAŠ“Ì.Úܸñ9J#Ä(ð@róm;«è­îQ«««ÕêžêÞÞžž^BÎS¸5&ù5ÿ””cb·Bì·8z’9ÛlT+E÷þ±Á. ÎL3k „_—WÜø7¡*ŽÖsü²˜5 ož¸> stream xœ]‘1nÃ0 EwB7°$ÆE\’%C‹¢íl‰ 4DgèíKÒI‡À³DŽçÓ¹–Õ}‰_´Ú\jêt[î=’éRªñÁ¦ׇiש™áø6µïŸF–/PÞü}ºÒðè'¿5Å%Ñ­M‘úT/dÎá!g4TÓ¿#[ÇœWÃ+*ÎqeݣºQaYÿ+8ǕգÂêE*¬At‡ ëNtD…uåLÐ\\àLÐ\\àLÐ\ÐÜ„ kb â‹ùœFæ•Í=eã½wª«®W×'[+•þ^ -Mº,c~*×zJendstream endobj 135 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1518 >> stream xœMT{PTe¿ËÝÇÕp¶atw›Z)²£«‘<ÊL%0+]r•…%X»«»ËCPY<îòJEdMZP×1FBÊG¥öRË¢Á²ÔÒÊtÐsñã.X3Í7sæ;sæû}¿ß9g~"J@‰D"iBb’:rìöÿˆˆà¥¸Gô#›$HC ¸6\úRn Æü \3™¢E¢‚wBnžÝ”™a°¨VEFG®‚Z¥ÖhfOWEEFjTqF½)s­.G•¨³ôFEH²Uɹk3õûø“ç KÞÜ™3­Vk„ÎhŽÈ5e¼0†2]eÍ´TIz³Þ´^Ÿ¦Z›cQ½ª3êUãD#ÆcB®1oEoR%æ¦éM9EMŠz.zŽf^jÌ´§ŸTS”‚b©‡¨0j Nm 8j5‘ ¤‚©J.ȦĔ:'rÈZéú :?"N_–ÌàrÞ qxøo_Hß0¾}Õä Uð¨¸ÈÞ¹‘È€›a(U9‘ÄÍŒJîwãí]{í{ e•°ÅÉí93à?ÌÕÃs5óÞ|>%QIRÈ;¥¥P æ0Å0/“îÃx‰IºÙèx{c‘£ìXÌØ§>œâãuö^¤±5ìs²ŒÊÒ8n}¶´ËÝÝÂ9]ξÀÇ}ëeq•õ•—oú¦ò+e¨)Üä„’2ŽœÍ”dËÐëc‰SŠN~‚D@^·þÄ%-:Ò÷×JÚ~ UÜå—£–Å47 µru¡!ƒÃ2Oy‡£ úúêŽgµcK%87+­›Þ².„µ ­]Õ\殄0¡¬PIŽÊìPÖPër×Tqõ»ü?B?xµuö]é®4x“QÜ,5ëÌiéùZˆ‡¤c'Ý[Ü[êéhô¶¶xß)68´Ñ_=ƒb¥â.2·n ByŸðDZÑÒÝgéÁ½=–¾PÅ8›×²Ú÷+öC#óé™îoΟ^‘”¶r©k)bÏùöÃYæZÔñÈ©sã"ì®ÔîU\}‰ßä-`·—åD'O›òØ­…Ã(¹>t÷`é}7gk.ܶ¸“™ªc1(fŸ­×¯†ÅÅ®ÐD'|6t¢ýôO½Ü8“ƒ×.í៨\Yt Ó®Œ5ŽÆoØ,Ùð¸vYÁr×þ<®§®­ÑgÞ¹®Ð¸)máÀšŸE9†c–Rq[ˆì˨”·ì©«¹5©Ä2$ø—§QÌ)î~ ]Ý{N0dò l!h!ûs»úà( @o³ÿ½¾C;À‡Ð‘Ù˜º=«j h™9™ÏÌâäø>YÞŒ±w0÷NÈÏwl-¡Š¿1…—³mæÖœ³9'§ÕÜÖÖÚÚÆ)~³Ò,äBÌâ8G™05'㨮¨®?7ˆL[å…å{^~~cÞÁjlÝÊÕn­wÕÕŽ-"ïoþ±_„ zht,bɾÛ.LeOîÞ-‘ìÆ°NŒè}22ƒIÀAvgx<6(VŽÞÛÀjm‚Jþ:ym¸_)RŽþ.-º_i&ü0q7ßKöˆþ:Aã%â`Á ®‚FŒ!Y¸ p`ü!ŒÇ Þ±\àfj+ÜåÕ/ÅkÜ$ „Ð0bêɤ‰ä úyÑ6`jªÜµÿƒÆB»Ý,|_Š“RP²ƒ¢Ïl¦¼¢¢Ü •MvåŸÓŽ“Y@ô@æ§“Éd²¡d¼MåU5Û®|‰Š“ÜG°i¸Ä²ç4ã‘NüÐ#â%4? Xl—6ü§´c‡´š¬V°s¤ƒtÈŠÿSŒB&-›ÇM ã{Õ2òp‹èƒ!¬¢ùÕxž…Á̓† Úš]:X ñùk"²lŽy[¥îšxîyëØ^|×Üs½ý«êïá†dól¼æµß(=×à$œ†³uMøÀ·ïz¡ÎÚ›¦mK†ù°báåÒÅEÓ“Œ©ÿ ï÷õÊ» øÌȃl+¸,\ââYL¶à&'¡/3ºS–á,QsYÙÒîËÐ)œËp`Ü}²djg½_)ç&Šg{'PÔ?°zùÖendstream endobj 136 0 obj << /Filter /FlateDecode /Length 210 >> stream xœ]±Žƒ0 @÷|Eþ€ yé-îTõú!8(CC”Ò¡_Ûnx–^lG¶«Óùëœâ¦«KYý/n:Ä4|¬ÏâQO¸Ä¤êFÏÑo&Ñß]VÕéÛåÛ+£¦ »ÿ¸;V×ÚZyª÷&¿ÎøÈÎcqiA5c 0ÍÿRýÞ0…£ÒÃN;5 Æº´'í,ÆPT£¥ cŒåbÛ‚@Ú²v v¬=¤ü•@ d°cž‘·=–ÓþY ¦MN"+ó¦1áçjyÍÜ¥ õÅ(i°endstream endobj 137 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1438 >> stream xœm“YPSWÇï5X ÁêdÔ)Þ—:ã‚¶ŽÕZk¥uqpCŒ²˜äÂMB!¬a †%1&a {nPÜ­´¢ØN§®U;v¦U«Ó>8^<×¹ÓNuúÔ—³<œïÿÿþ¿ó‘DÌ‚$I”œ’²cË꤉óþ=’O˜Â/ {“ø&u*ÄŠ 6Æ•0õ¯Ù¸ö]ldžYÄT’,*u5y“™ «VªŒÒ̤µIÇ£Ëjéêõë×-—®IJZ/ýL§`ÕÇåziŠÜ¨RèäÆèE+McŽ«FËä“*£1gêUf³y¥\gXɰÊMU–KÍj£Jšª0(ØLŒ´ø¹Üg´é+Ké#*’Áa´ð)ó o~€#O)ñ›¥ÿ*b1~8A¨Ÿ_,Qž´¨$Žt_‰f°;5ûKK1ÅžO;“ H˜½Mˆ×ü'Ew¿¼ß3hÐg9‘oM/QÒjaAÑn¥N óËÁfçö @ ´kTÙúŒý#ª[£‘ ¾ªC~=ž}Ç÷·A‰ÍÅ´>)Yw Ââë ´ŒœŠÐ®Î¶?꽞±poê¡ üEQ„χ×LÚ¿‡ã\ã"þG>NRï'Ô¢`aP{À–™ã rm‡ó7ÙÍ/7i •Uv‡¡@hOõÕ¶[½?uݧݵ.8ýöcÑ7nÏÛí¹e…tÖÒ¼T8ˆ„wþ,¼Ù1Pס!~óέP ŒÓÜÜ×â Ah’¶êÄу#ÊŸñ4<ï1žÕ…<4áwÇÏâÈ+xªˆñ¤¤Î P‡úr›óÌ–’¢bÊêÑtî‰FüE¢°F˜+ÌøMöôÚ9“.cËXëÐ;t*F®Ò@À¹uÝg´ÂD¶sdÇóŠ{/0À±=Ô6¼ERa… ›}ÿáL(”¥Ž ÷ãXO„?míôm0ÃQU•v:ú„ý¼–ùv³Ñ‘mÇÓD~á#IuUe1TO£œªÖ;†5Îó_<Ö?‰Ò;‚cð <÷®vPÓAs=CßÃôkÊð !Qx_ ‹•þZ&1°`Tq½W Å× Ž3ô@X¢id Љ °O>áÓ—àÌ%º±¨Ö  Ìâ´´š]€V~rý…¿>àtÓž›}xnóÅ>Lâ¸q<P$hÉÒ™²h1£”ÃÃÃx˜#/¿Æç^‹ø‹|±žÈíÞ¸ô€°™:¸B+$À$,~´ãÅÏÏŽß¡~Áñ!ü> stream xœ]± †wž‚7(Ô¨–ºtÐõ( C)Á:øöÞ]­ƒÃwÉ\¸ÿª®?õ).²º–ÙÝa‘!&_à9¿Š9À“еôÑ-_ãê&›EÕm~¼3H|aõ‹ ºéfÏGzmr³‡g¶ŠM#ˆV)Ó†`$ÿwu\†ð}Y†Q «hwØH(…UU“Ö†A­I†A=6†AmH½aP=O²ýICQ¼-t¯R -¼ÎHÑb‚ßšòœ©K"â5Uf“endstream endobj 139 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 945 >> stream xœ-RLu¿ë±öF1Øe·ëét („ÉØ`Jj0Qº –M]¦–³íXi½­˜¶†±MºW5a®)ÐŽ_nÀ2ã2AеsHÆd87Fb¢Y‰þ¡ä]÷•ÄBöÏ'÷yŸ÷ƒ¦’TMÓô†êâµà9å)ZÙ¦R¶3@’ã#ñÜ Â@JRÛ£ÃéèÚŒæMX›F14Ýä èíŽ&Éj¶ÈBn—Δ€¡ ¤¤8W(ÔéJ„r›(YMÆÁ`”-¢Í('Èqá Ýdå¦õ’R‹,;öäç»Ýî<£Í™g—ÌûÖTr·U¶Õ¢S”\bPao…ýF›(¬ÙÌ[½Ýæh”EI0ØëD©¢(Má‹»Ë^ÐPTµ•ʤx*…ÚL¥Slb@*‰rP«ôiú†j‹êCÕJkª2 ¿âòÊâ}s[–åc|³ª[›øš[Ní/°¤Rýïâ`old.:ó,±øÄÓsd I+.Êó›ÓŸ‡ûÎO¾Áüt«kØ¿ÇËö*êÍZrŠœh>~°g*:u¢—û+…ë_ì£'þÂàŸ æà< ms—o\š˜ž‚~¸ÒrH~×ÐÀJçûûƒ¡¾ëÆñlò,É"O’zò¦–¡ SWq+fìüƒlÜûöáÚF·‘[Ü[P×çºêéoƒ»|ýçùkMïFø°0BTƒÑQç´Ô¿Øu3q&a$†3ß2ˆ¸ÊÝ»<63ìòÎñ¢*ÐçÖtYƒn>èjû(j9èÌ>ϰ„AÕ˜ñϤ¦ë¾«ºÄûÎZBÞ³,)^âÞë°÷À0{óÎðì½™#¯î¨õûýOOòú ·—^ðŒÙà@æË»kvå—Fæ&¿Ÿ½È§>¢‘Å—FéÛq™‰†¿sÝ ;Á§]ÍQûœà”º¡Cž8¸ö0„#2x´«Ùj ’3 íÚ„IîÄ»£øKˆV GÅ„.£êÇÙ^-‰bLÝa§²—'1Ó<Ö=§ÅSûÀŽ@÷9~m-½q¶—ˆƒ¥g9@•ïþÑÛǯ®ìM<ƒÈ½H=è Íj‰‡LqGa{Á…˜ƒ;p·c²óþTç×0ãÇ‚U_ è¡Ê¡ÜWéÖ½ÚhZ¿ )‘¡«!Û0ø[<ëñìü땺3ïkÖ„a&{ÙÿÚ4ÇÀ—Å‹fu0€›>†ÀôÖ®3c);†DM´_Ô¦òÉIÅ¡”õ?=6Äendstream endobj 140 0 obj << /Filter /FlateDecode /Length 216 >> stream xœ]An! E÷œ‚ X£Ò‘"6é&‹TUÛ 0`"a™,zûÚ¦©ª.Òƒoa{:ž^Nµìzzë[üÀ]çRSÇÛvïõŠ—R•JÜLÎx MMÇshŸ_ 50 WœÞí³“+;Šâ–ðÖBÄêÕÁÈÙ+¬éßÓ2 Öü'ÉXð¤ÖÌêXÁL^XR à`áÚ™‚³„gÖ§àc\$uô‰V 2ÆIŸŽ¸eþ1«Ž÷Þ±î²!Ù^*þ.±m«4¡¾c‹mendstream endobj 141 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1073 >> stream xœ%Q}L[Uòñ@†Ó¬‘Fxl0ŠHe„‰’9>L„°¥+ £-y}ØZÚm/ïµ}…–vE`XÈÌDÂŒš¸©!‹‰Éœþctò‡c\L–û^/_!7÷žsî¹÷÷;çüp,= Ãq<«¾¹µ½\›rOI/ãRAšT¨mÉzÙ™rU 7]LÊ/ÂÛ/@ñy8yËÂñ‘±À||õn½yÐÆöõ8ºS[©Õ+G9]^]]UJ¿ªÕVÓŒ Û§×™èfg`Œ:N èV³¾ál‡_j 7øÆ¹sV«µLg´”™ÙÞÚJ)míã ôûŒ…a?bºé³‰£[tF†>*¶ìÈÔ›ƒCÃÒÍæn†5éXÖl=¬Ç¤»6 cuÝ)B}«`ºÍ†aÙoõ+=±ƒ¹vëÂØ*Æâ>àS8å(9,ÛÁžâ&<„o¦iÒ®¦}«ª‘¨(½~Ò@Ù',€Uà|‚Éɧ¿½WŠŽéµµ=[s£ÔdÀÇ»1ôá€Íóù»ƒEPÿ/¬|ÒúðÔ-ê×OaöOà/⻎{úÐ '*AÇÈ<¸†„Ådk‡Ïmª`…|E1«ØläÁkY6`‹ÇA,FæÉ±éE©YyvZ²ª$ \W£NØaxBÍØ˜Óáæ½a;QR¶Ãáõ·Æ1㜠ñ|$BÂe鑞÷‰A ‰„¦Ã<ð{”G°¼ýkØóÁùÙ@pŠ÷ûH„]Óv×ä¤Ã‘b‡Ã›øïTɧá°:ñ¸ S{™Ã6`µÆ@”RZíÏðÝg*Øu²l2Y,&Ó²%‘X^NyÉ›)É•ü‹Ò¦¼æ½ÒàjR$°G@(:ÅÇüäÒ?ß?Û¿Àˆã‚wêjûŠ5žˆO„ËìÞ馭‹Tܘ0Ü»NìdÚŽªˆQyòÏý‹rú#\^”‹Õ¼ (Ò¢oŠ#÷29ŸÏ=)¸yÿܯG…ñ(ÐDg ´"g¨¥íL´º÷E†‚rÐv(ËcyQ÷þßjô¶¤q/LÐøÀp¸JëóGÇ”ÐCLãb0: “ÁÀ|8‚í{ïä‹#0 æSàsÒåd]¾8ü€'f\3v—{bÄ•šêë+ÐIèZÁ¤¼BÅSÉ=Êô`*Ũµ 3è,º„š` *V4jgàYx‰D»hGêP!¤àyXiXk[ˆ(t^¹§Qª¡~„;êõ½©¿—Yøl3qç.™Gæ¤WÅs³1ìªrªendstream endobj 142 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1ƒ0 Üó ÿ „.­„XèÂЪjûà8(NÂÐß—èÐá,ïN>Ë®¿öìÈGôø¢Ö±‰4û%"Á@£c¡j0ÓÎÊÄI!»›ïO X d7~×ɧº¨²R[½¡9h¤¨y$ÑTUÛXÛ bó'íÁîNU·Õi8ÿ¡äh.qÜ\b$N¥ii’ 8¦ß3Á‡œ‚â ;øS\endstream endobj 143 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 355 >> stream xœcd`ab`dddwöõõt21U~H3þaú!ËÜÝüãýÏ Önæn–Iß?}ÏüžÃÿ=S€…‘±ºqÂTçü‚Ê¢ÌôŒ…Sƒd a¨`hii®£`d``©à˜›Z”™œ˜§à›X’‘š›Xää(ç'g¦–T‚µØd””Xéë———ë%æëå¥ÛLÑQ(Ï,ÉPJ-N-*KMQpËÏ+QðKÌMU€8TB9çç”–¤)øæ§¤å-)Id```b`ìb`bdd©ùÑÁ÷“£ùÔ÷™§¾Ï8Åxì;ï÷Cßy™¼ÿQ"úÝé7Ãnë˜ßLÕ¿åµåÜ53~«wsüÖ{ìþaÿûß¹ŸÊ]zµõ»f÷gŽ?ÂJ¢ó.Ì9µö袻èæX»¡È4´.¼Æ[¾6X´!¾Ô4Õ/+46=±›#ºxç™}³/Nß#Ï'ÇÅR™ÏÃÉÀ0V†>endstream endobj 144 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 802 >> stream xœUohSWÆïI“Û»£ív›\7¡¦b)Û˜kAªâ:PÃØÜ&ÁüÛÍmbmmî}{SmS6çŠ)tƒ EY¤ û0Töa hQMТ"7Ø{Òs;vÛÂ`_ÎyxÏïyÎK8»#„Ôwtï=Òê_•ïз m´¹ÄÚÑq¢êg8퓵Ǜñ§¼¾ç7q!¹üù™ïî<èH¦”h8¢JŸùÛü½ÖÑ*µïû¤~@Ú—•ho0!uÕˆª–‰IŸ${£²:°ö䃈ª¦Ú·oÏf³-Áxº%©„?Z¥ø¤lTH‡å´¬däÔ™L¨Ò`\–Ö붬_Éxª_•©;’•DF>©Ê‰P:šÇäTP‘1¹OÝ ¯éµ¢–I÷ÇW«XDkü?ЦS±àÇq禆CG8¢ ãÄ %Ž·6ÅÙ9w…x‰fÛH‹.|b¾ªÐ;— n[¨Ãmæïîd*³pù½[ *™~Èz\ô…µÊŠÈ?¶&»ÕÑ/ÅšÏmèåü8³|ij¼<¡Zɾúéín´1{z @áf¼ˆ¸·T‚Ât†‡49ëñ$æídác¬QemÀÚåX[Ù©ïB׎ƒ>=|17Uþ—þÜJß¹²ä¬váÏì(öŸ¾Ô÷CºR¸ Áù³›34ï|äVÏ/̆ë©2V³“9Ù ëbû5M+BQÈ—aú+¦Êž—üÞû¾Í³¸ °Mp-Wÿ9u{¹¹JhÀ¼âfhîÉëŒ5)ÃŒ§¶È?AáðÊÇ]v%r£iö.æ‡<.ÑÝ“e£M0¡Ÿ+³_éáF*ñÈÙAaŽoÃ!ÇÊ"ŸÒõ˜×%:æÄåâSÑþúG"Úÿ´´7àC Úºe«gFßû‚õXô%ž^ /&§,úù&0 T˜6ûj ô&oÁ³ÓeíÖÈÓÁߎÎåî}>ßnå° 8ïÙq^]/žÓ‹pÎÐoW"æ×æ_ùa}ýC.Ï›ö¤ó Žû‰o~2endstream endobj 145 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1Â0 Üó ÿ ), U]ÊÂBÀRÇ©2Ô‰Òtà÷$iËÀp'Ùw'Ÿe½\Ù%èñE ¬ciöKD‚FÇ¢9€q˜¶©2N:ÙßtxA6]绞H>›ó©®š5„ÞÐ4RÔ<’h•êZk;Alþ¤-0ØÍyÌÎ¥2ÿ®”h)±ß\b$NµimR 8¦ß3Á‡’‚ ñ.áS)endstream endobj 146 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 323 >> stream xœcd`ab`ddduö 21T~H3þaú!ËÜ]úãíO=Önæn–IßÏ }OüžÀÿ=V€™‘±²¶Ç9¿ ²(3=£D!ÆÀÔ H*ZZšë(X*8æ¦e&'æ)ø&–d¤æ&–99 ÁùÉ™©%•`-6%%Vúúåååz‰¹ÅzùEév StÊ3K2‚R‹S‹ÊRSÜòóJüsS@®ÔÎù¹¥%©E ¾ù)©Ey Œ ‚ LŒŒ,é?:ø~Ú”®ùÉ´†ñÖûï×Þ3ÿxûýŠèåîïË~Ïü=³û÷2g§îß ¿Ïü>³ûûÂ+r@Á“¢iÝæ3Š>ÔìºÔ}·ûj÷…ÙG–Ü¿1gy u¾ë̈n‹î€n÷n«ÿB-ëšän>9.óù<œ Le}éendstream endobj 147 0 obj << /Filter /FlateDecode /Length 353 >> stream xœ]’1nÂ@E{ŸÂ7`mðÐ6¤¡H%¹€Y¯#Ø–"·ÏŸ¤HñWzàѼÙÕáør‡k¹z_¦ô™¯e?ŒÝ’/ÓmI¹<åïa,ªºì†t}Îtnçbuxm篟9—ü ÷w~kÏyõQ‡J?U÷¢4uù2·)/íø‹}qß÷±Èc÷ï¯õ£âÔ?>­sTBàYì7UTBàI\G…¸vÜD…¸q´¨ÍqâÎ1E…˜»¨;bSG%žD6iÔ¨ñFÍ6*Ä-4‚¬àV`T ¯ë Zx-(IÂ%ÑD…Ø8Òr†;ƒM FP#êC#ÀGõ¡à#€úÐðÀkƒ®~uè£BäÕïoà '‘¾&gsg£¯ÉÙÜÙèkr6w6úšœÍ Q!‘ú¦ÌG0ê›FàéOþ|[}ߣçÚ”é¶,y¼jÙ´L¾CØÿöqžf¯*™â²$³òendstream endobj 148 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5376 >> stream xœXytTUš¯}¹¶¸@u qÞST\Zm—ÖÖAö°HHBBö¤R•Ô¾½Z¿W¯Þ{µ/©TeO±dc“E ›€¦mµÕvº=½Œ}Ú~…3Î-Ðéž>3ç̹'d9©{¿û[oì¦I²‚‚‚[VÍ_½ðÉ'žx"ÿÍŒÜôI¹» AÚò­ñjùÍ0¹&ß$Ü­œ7U,"ßñç;e“ ¦Í|¥©¡YÝVÕzÏÒ¦ÊªÖÆ{¶•×o¯ø§ŸÊd2êåÆÒ¦Wšçµª´©jÚË;¶.©XZYU]³rûªú†Çe²²å²çe%²ûe+dÈVÉVË’­‘­•="['+•Í•­—½"{L6Oö¸ìUÙ|Ùk²…²Y²E²§eKeËdwÈîÁGÉþT-¼­ÐpÓÝ7ßsób#q¡è-¤ºå¹[.ý¨åÖ×&ß:ùW·9oÿÅícw\½óã)•SŸ›úüÛ§MŠ?OCÓ^ü® jDvûw“þ}¦ŒÉÚ4bG6·±}jÎwyö'Ë=M>–‹‹:¿Ž[ ¥P³Ý¥ò"a5{ô6Á%Pò?Çø§Ç‰./@ò±}°ö¹Q0ÌÆx·@3T± è|É÷8¾¢ïø ÉØEä:íï2òÅnÆ nh„ª§U[¤Øµ×•ª­Me+&–…vDs!ÝÅBš”w  wЋ¢„ü€ßïõO |4³R{(_‘/Nâ±Þx/þêWú?p^? GV!.?ÊHÝŒù”BfÍäÍä6.{ÄïÖ!ËáUðèž1­õº<.p";ï ›Åûà2 ¼ÖgDö"¨µÔÚë6I‹•t‹¹TÈ4%‡øž O[:/þí¦ØOìlkž_?»m!ek±´B3*ÛÝ8öÞð' ’ñû8à¯H ýv­s»ÞEš«[VnÔáŽÅÂlé:Ûs*y¶Xèæz¡ß<¼zõ¢ÆõÍä߯è*•*OçæŠçýsúfÍõÙ9cGÞ"P•y N‡ÛmÙðü‡w³0LÊ· ÂP~v Bžö²,°ÓÂ¥¼!;cdÊßP¨ö³¦÷›/7¾ïèTzX/ ~F 'Êřʽ‡¼ˆ2)d€wóvF·ލ¼Xä ,v‡¹íP¶Q nlÑkŸLiÔë mëÕ;š³Þz³–cý(àäì½ ä¢j}µ€,v§b¬!oPˆ8œ{Ç3:×§6¤Ö)}N¿È]ä¶[lNZzPzQ¹yݦ5ó¬&O+m@³Î€7a&È $Á<ƒúˆ€—ñD”çÃIÛÂRfžAR©Îh|ßaå¾Ã{ßÚôÉäR±Â]EW[«ÛçTüä'z´´ýØ*\ä…ÁÄÙî³Gö§;ûƒ€Xb¢kY…Ë‹AOá}§\¦É^},[ >òyáÕŠ«ж“óûK`&<¸rþü§Ÿ^&ÝÒ$Ýùž4Cü¹¨ ÿ$¢Ãdd'Ãùðùн‚ƒ¥A Pg×m{äáG¤›ÍXüÛoÄ™ïŠËEåÔxß‘ôa@ߌ<ú‚t=åuzh!+NQú\’M Ñ%v*ÜÛíÛmµÆíŠòMz­ª£Ð&õ™,Å„ü!_hX¼OØêÎŽÆbÊd¢7Ù h_tÃ6h/¿£¿\À¬»Z-¸ºà“BÑ|u¶ ÿYyw˜Ìé^¯Þî²y”4ýÚ¨µ¢*¡ØN˜éëPä¡,~Lˆ¯GÄgE ˆ›á¯kÅI·& ¬ €¤w‰¼z¼(·QªUX¤§Hi«ZïÑ{õ€ƒÿ¨” ;ƒÿU\èôÇDyNPºÙ}€¾$F`—k·zCW©o]Â}œ,¬ÉÍÅ ·<÷°Bí4t€éÂúÌPWß›K"ukk[6W’ö½õƒå0Ú¶•cÙsæeOðð]ÞxÜOFÆã—Æn¨›w(1á-`¯ùÒ ÒƒÏ 60D¬qïpݾ“¢^™8÷Æç(@tBƳä÷=ó;Q(N:¯èëM÷w¢Ûuõ ¯\j9~ùÒ·(>ÁuB' 9÷ªvU$6wηÀyîð¾Ñ·`'ìÒkÕ­Ú!bN$úûN¯è[ÿÂ/–?%)ÅÅárî«•+¶­íµ`ÌÞ\9%²¹™7h•û쌂SwWÅ ¾8ïÁ÷ÜØÅu²i!èL3IÀ»Žr¡åå]~³ÊÝ[°°mgk°<:i3V–µ“òtèhKOÙ*µ uÕÛÖµ—â+,\ø…X0Àp}Tø/G1O‰Ëyæô—±Ç®ÃÌÓå™gÿ2OurAÿ ˜•,X0kV‰tûu_‘Hñù·Ï¤Æ’Á€ ò#^r~ÄʬXÚ^ ®Ëݯp6Ú馭ÒóJ‡Æ¡ñh³hóÀ¦¾ñéÑn’õû„ëäå>TƒYš1 P›3•¢„"&åïôwžcJ6É( _y p€Â Ó~M{¦–Ñðhp4¸SÉl^Ë»,ãvG]‹ƒ¬—nóØ«ŠàÀæŽ&«ËH¹Š|ö€W¼çIJϰøééÂÜíGÑê~Õœ„ôéØùêg8ðã‘ÜØ%Ö·V—ÞV­]T HíêïgYj¿8Oè쀞â¸9lh¦Õjš\ õšÊªV¯ÄŸNT¬Y†óýç»Ï{#ýX=oÀ#ØÒJc*Ô ó`˳–µ§ÇN†¸èô:(É u†ZK#¥^Þ´£—¼~îðpÏð@^­, ÍÕ{óf,jó²àqÙ…"ÓçÕëè—Ž’n¿¦Ó/2Â*(¶6†Cs.Áû&¼É¾‰75ý$ßâÅ©†ÃÆSòtØî³’*BÝÊ$:ƒ}|%NÎibÇ’ì÷áˆ/)8ý8]•B©»ê1Rë!/#noï|þÃ|ŸÀó]ƒ-LãQµc‡ìˆÛ¦p߆gªvªš¶ç*¥i?´ÑppøBöb÷;T #`µ…Ýí#yDs âQS)ÆÝçˆtyû<8½PB0ãñÔY)‡Þ¡sè5ó•Ž6g ´¢UKÿ@ ñÖÐqY¯x»?ãÏ@7ê£÷ÔæcLôµüîæ¶§Û§ŠOœ?=%7eBá#b[ÞÔ쑀ݢ²÷·±£É“põÙGëÚTj•—]ÌÌo_…·ßlo¢[4/ÕI“_l¯ ÆaAþ"f8´+°ûˆ¸Y·DõÍ´¦&Í´óšg;ç[qDª¿Ù‰VvìÝ{){©÷]ŠO qH#ùêýe/U?³Ú@bÇræ+ຘ˜ŸŒŽuOPgЪ­7ÖßÄX†fÝôáØhßh’ü{tè.eç ůßV$¶¤›÷ÃÈ|>ÂøÙ<ïvmðXmÒòºîÅ­€´tWœ A†Osé·E&ž?Ì„QÊÒ·™´ínRÕ ”c-˜'M6Ô©·Ja=,M/ºÐr‡pØ9ÇvfFÐq1©ðZ\·µFºGé6yô3­3êÍFƒA©Ó5·7ªÒî?LùF‚#Ááø©Äé½o$SÝ©>g’ýÝËê±Õœ?õ=nþاHóIèAƸšlôj›¡½~X;r°oh¤ŒlÝÓ²Þ‡£ïeÎø8–¥5îMj·V–®TgØ‘ÅjÂ&¨ð¡è¡È¡Ø‰øÇg‚ˆ#tE¬jìõ8Eë#†ëÆôƆä¶òªÆ*-©ïï_†©ôµÖ%6;6&w é G82qhì—oÚÑÙ´]ëÒºõ”eƒ©Ì\f\£}ô5ì¦zÞøGˆ<Å¢bÓ…Bј+V°<–¸S¢Q >†œ¼Ýnó€lr7zš#´Yšjé¶kj¥¹¬Ã¹P ³°ÙÊ»‚Tn5‘nßÛÁä¡äA.NC á²A·{ZŒ²Z*\0[’z†°Y™pDHø"”8%ç`ãl’Gºw±=€v]—\ª€Çñ‹Y +Ü+¡0|Hžf\.Æ9=/4ÿ(5sÛó¹0· w›¢¹­½©=fJ eúwœX3´VºWºIš%ýø©c³ÿñ{ûÞ§8NbÏðyZ Á†f‹eËÖU Ë+ÎjÁx—Èl`€J]îÿðÂþd¨KèÂ29hŒ4âÏ,´´ÊØA«&s£ •ˆ__½ Rvm` ·6.=‘HH0Fw[è"â'¹CÊp*š u!†0.Ñ-Õ/ùéµ›”r®Êê0–LW´ÇIc ÉKÌy{–sL,ÊĦ‹Å„ü ×BÓ„¼„Ã^8=äbýÏ6À.<"oŒëÂ3t+ùi›F!ø¸à•×g$Í$šV™Ú¥c×@I[ìf§Ùƒ Ó+¸‡”ñ½ø3­ø¬»_¥ýRz˜°ZÝc'tQ~"öê)w‘§Ö8Û¥›¯­WšË •Ë]ÈL˜mNÍѸ€sш/:ýÞg^µ3^ÃûÄNÂN„ØxÿKMp;›äð‰Ü&%ãâ¾Ø?vë$Áš•{>§P8qktñÚÞ–h#¬…²š††’åU³à9XþÏšsCÞÐÿÐ7Áá·WƤû`©¥qåÁÖÕG‰wånô¾˜) ЬÝáð€‹lÀ„k𘽠t ›ß!cD²ïú‘Áî¶yìÒÓJ§‹Šé—ÿ·ºÝ«´Wš5ª&¬kz}“ëZjç)ŠÁªîúõqʯÙë’VRG}Ïn±)#¾ÐíÅ~C\#ÓäWįó½înãCùlu/š=ü8éý/ÆÏ“,ËøE¸ Úl í"7¯_Yñ* í' íôó^`©ß‰Å[(ù&6dKIù—Óëš.Ý'•(äx?yâË+ÓÇ{êï“~â]ðý.¤-™|‹º7_-±?|9¡Ð-vÎÝ´NÓZ¯Ùž¯<*PùÛxÌŠMAJ¢²É«iÅä[sÁ3è yÃ%Îðg²»&Ò»„=ÁÑßåÒ`ƒô&Ì‚T`Ö¹;4#Í]Û`T¶7jæ¬Ü4žD/½ôo{FzÉwÅn…Çä2»MµR1¶–í[›LV­]ÈCl5…F©Ø©Ô™ûñL¼Øãé] u¸§à®òÏèÉ=rºPÔaî³)>Å¥&Ä¥/ÄE!ˆ÷¿è«OõËhµ­Õ¡¾÷ÚÏ•ÖJkU¾(‚ ÜÈ$º‚=}yLlJ]>ûée(fˆ±@MÓkMóÕK(kƒ±j‘»¨|wýøåÑ/OÄH`_^ÌíÝám3¹HSyË¢2@:ËFƒT|o|OdŸÐ€AôþÆìÊU[›«ªóiäÃy2M²âÑŒøVvjNûÕâv}9Mþ7ñ¼ø©B, Ng+Êpºqë(kÕÍòo*G«ê®M;V|y|Ï‘·â$Žª~\”xGû_"=c  zÏh†’ÿm"ztâ®`‘÷pÕMñ¯²bш7ÁWšåGÒ(Vn—Ép©¢«Ë£×Ù4ù´xç5»½žU¿œO‹˜çf¬HAê{Þ¼{P(ÊÆÃ6ŸÒêf&Áû GWñ=o1ŸQÄ/v]H^ŒM(¹ êôqüŠåù5êÅ55TEE‰iM>†<ùþ kóYLþ©¸ “b¡­ö†Ò¡G W ëïƒ+d,híh³6ÙÚ(ím™vƒ¹VW 5¨"]qŒˆóüÎãG®¿pùo¼pÑ5ÖŸ‘:í õ{¨Î#©ÃGÂÙ䨅ö´¯&?kmîŽvœ´1Cª·'ÓKÊ/¤ŒñRþ©Cmú;ÍÐÕ' ÄáŒØ½î–ñî¯Åâ­'Ô+ú(o`þÅ)åÆXUNâøÙGŸ~úÁÞ’ì`óÚIwƒ×åÅ ·Ø]ÄÐ<Öß@€ —aœ^Ï[¾"Þáwš6j:Èö·'6áVwç éN©¨´«ôh ·'Ü ç­ò¤.Û†«ÀãÏ?4ëõ ‰á&²½Þ[jw0 Ý^öÑ8†xmÛ:©J‰{5Æ4Âiߟ9 `¼É¾3áz~uM{ÁïNŠãW+V8æø¡ÙqÅ$£DjÀ«ÓZ[íÔsÒ¦uêePVÌ”l9H péC‘Q>­ôGø$„óI Y:®$kµv!.H&s¬{_ò`x/%Þškññ¸ÊŠ1@q‰Ï›ç“Á@â7 /á9Z>¼‹X ‹ê[Ê)Ž|†d[H ¯‚æK©×妯?¸‚A?ÇûÈØþÈço`̳^¿‡w(…¿?´Ï—²ß? }Ìäz{LãÖž_½õ›ß@ñH º¹ÜTiÝJ•Ikoü]1£Úâ€üÄçzˆó•\› ¤ºO»|)_Ìwº5Є¬EÖEË5K­Æ¨Gö"e¶aÎÁøÅäÛ>ΉNÖ®aÚ´8òW´,+Ë š‰q†½ ÀÊÄwv뚈zù~èCç¶Œ–¬˜·mмÁ†Œæ¯×™pTüÅ7Š:Wk#ªŒ>½+;´“äŠäén·6®ž.?꾎MòG…áÙ“o‘Éþ v)©Uendstream endobj 149 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1ƒ0 Üó ÿ ¡jÄB†VUÛÇAp¢†þ¾$@‡w’}wòYvýµg—@>¢Ç%°ŽM¤Ù/ ‹êÆaÚ§Â8é dwÓáý «ì6ßõDòY벪¶zCsÐHQóH¢Qªm¬m±ù“öÀ`wg}i ”Z9û%Gs‰ã&à#q*MK“\À1ýž >ä¬_5WSBendstream endobj 150 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 285 >> stream xœcd`ab`dddwö 641U~H3þaú!ËÜÝýSá§k7s7ˤïk„¾g ~Oãÿž,ÀÀÌÈXÝ8Á9¿ ²(3=£D!ÆÀÔ H*ZZšë(X*8æ¦e&'æ)ø&–d¤æ&–99 ÁùÉ™©%•`-6%%Vúúåååz‰¹ÅzùEév StÊ3K2‚R‹S‹ÊRSÜòóJüsS îÔƒPÎù¹¥%©E ¾ù)©Ey Œ – LŒŒ,v?:ø~êu/ø©8ŸqæO!æ¬?˜D¿·~¿ÃªÍö;à÷Ìßaßg²*³}ŸüûÎï…˜X_³}Ïù~ì{Ñïc¬ŸÙøä¸XÌçóp20 Dc\endstream endobj 151 0 obj << /Filter /FlateDecode /Length 183 >> stream xœ]O»à Üù þRE}HKºdhUµý&b B†þ} $:ÜIgûì3ë‡ëàl¢ì½zA¢Æ:añkT@G˜¬#Íj«Ò¦ «YÂú› ïOŠ`ª¾Ëس9×RSMÊkX‚T¥›€tœ‹ÎAÀé¿V[ £Ù&[œ#“îxœ#£Ô¢¥FyjEçÈyù¾&ßɉ÷€T­1‚Kå­;§µ~Ÿ²‹"È3é]«endstream endobj 152 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 900 >> stream xœ}‘]L[Ç便¹D]Ö„*뽉šð@ø01ˆŸq˜Åq0ƒ ËZÊ-”­½Ýí-e½ý¤-=ý¸ô‹[.…¤ØRæP&Ž™¸ÍŒÙ"Ñ胾Ý‹ñÁÜ — Æ'_NÎIÎùç÷ÿQÖ (Šªºtº·;»g¥§Q©±F:¡ßþ‡{¯×Bꔩýw‰Ô“¢éqQÿ¢@Q—'ÑEÙ.Óæá†h{¡ÍX-íD{ggG3ñ|[['ñ†…¤ÍFƒ•ИÒb`ªÃE¢—2šIæòÑÉ+# c{©µÕét¶,öŠ~íP¥™pš™â i'é1rˆ8EY¢Û`!‰#Ì–£ÚEYl†¤ 5DÒVA”L·Ù‚ g‘V䤩­:C”ˆmEo×|#Eê¥.ߊôXE<¹€þ¾«£®þS5ÏC)Ï‚ oVmˆM1.ÎARSpçô„ëRP¤ùÏÏË'±çTã,8Ü3PÀªzdï$íVÓ³3|WT|-<ÏBÄr´ÔéþÞ>ÀÆÜ|%›,Ƴxý~#T$±‚îíJ÷ÕÙëÂÕã7+N‡Ò>|La3Øáý¤vêž M€ƒå£|;:74¢¡ˆWFÞjSÒ\`nªºª‘Oü‹S,–¢á".?u°î']–W#š ;êô¦Ã)ü*l$¯ÁÜ / g³œ÷ļøé8,C’À%ñ¸”io÷ÙôGӥ鷘ûâ kë·vîþØÄ9û{zYqÅ€{©°¿ÊûÁÖèíÿðêÕÏ…óÅEŽ/ÜÄgÖ6ÿ‚; À'¦UoÍ@;ôõ\0~~S臡·ßb ~ÛS=0Ys˜ÜgªXÿxI”SÚÙ¯æo} ØrÊa÷›‚6> stream xœÝZKsãÆ¾³|H|µ“â-`Ê‚1rð:޳yUÙVURe呈]’påõ/ÈÏN?fð yãì%©=¬L÷Ìôóën~·.r±.ð_ü{\ëÛÕw+Ao×ñ¿íqýârõé×­C¬´ëË›SˆµP*wÖ¯­q¹Tf}y\}›½ƒÓûì|¤³øÂf×›‹t¬ªW77iã•» ¾¦ BÚ^þicÄúò/«Ë_›½ÙƒbÅ£ ã¢6ëËÛªËá”x›B‘D¥Ç.»+Û¾Þže 2“ÁÂ6’N.½%Q Xh10ÚU}ÕëSÝÍøú*»­ß*ŒòYuJÄ@Pöåùé×jªp©tne¥_î@ÅÿÀK¹¢ðVñâ™u„Ü€/&_¾‚5RM-Ä\˜á[d¨@UrŸÍ¥V¤µW›tó¤@>½;ÞûútKW¸6äði}1ÈÉQ=2 ª ØZŬPoÝysV¡ÀFŽéµÓ:%9¨\†l tÙŽ… !k6ÈB’µ /'Kkb†–¨Q•h¶Ô´È5ûã”ÙÓÙ=[¿U½¡?µ‡?QÿªÐtF¸G[%Æ2+ÛÉŽå¡ã{ᡚÉû´Þe§aƒ§­›$fÒk«²kN mù~ªT`-èiè„Ñþ ©¬ýPè@®¬Ô¢‡CXÁèq,Ùi¥c®*/œÌNM„} ¥$nËëCy6ßUà ›½hú¾9'S7SS76wÎ%k«p3à +AU ö)e.´OŸ¤Mt& á¯6¿‰4°d¤ã„s¹Æ /¥!Wù;ÊgÓw2qÿ=:(h@QÄm„ Ã:•…ß³]ýØE º –>~ói®»ªÁG€ú5?"ÄÄ€Ê{ qÏcÃÖ… UÖ´#ÏCÕ™ËØO»ÑXdm­ó%™&÷½G‹©â³ÈÎ]Å® &†é&zoàû—Ö} »%÷Ňgn¤¼§°Ø7iiˆq"ÒRÀMŒ 0üóÝhù«ÄS ‚¹ÌÜ2qY‡Äû1°î×@9$n߯»…¡¯ËÃàDÕGM;Ø7$“SúÛ¡TÇð0ÈïΘ13ÐÍóE«Ã?‹ ÿS«ûÃÆÃ €Ë=pwgmܶ›#¦u˜ë i`+£MÏëqÌZ@²¿Ž¡žU±ŸD×Ó¶? ¡° Ô9å{jÒ2›uhtrcR9ÙÓ#IÝUùS63Êß÷5Fé Gß[!°K*8§7g<ã˜M™€þö19Mö ìɉ‰Ÿ‚£VwDÇA‘"0}›¿ëëcýi“¢7”¥vÈ›®IÀ/öÕá.žqi2½È°ŒcüB^ãPL,ãĬl ¤ƒzÃiÍè8`Š#%1.iaD\Žív¿€¹¶â¸g•¸Úìe?”˜sc–aÚ„}8íyÁBÐÙ«jÛ×|d…ü7# †ÝœO[☠ª²=¤T¦9o³Fm¥Ékrkˆ€ïÕñ?I±“¤ÏöuD v¯ºøÂÍq“Ï]’)|¨Ä1›ùج-ïHmAQ G£„/£Çœ;ª¹joÙËÒQ0‚–ˆ ¥I“€â!ñçhÏ'Ç®ú}ÃñÏC»¤ß+Hðí|K«½#´ñaD‚#  öˆ\OƒóïºÄÂ?x -å>nË„²\Õc¶À,,$A^ƒ^öñ‰å+É¡ÛꙢ²Ã¡¿Úä›_½/ú¼é¨°Q“êD“wx9~VQu’Sý²DãÊÛãú, _š!ìÝwi»ì¾îy0óºòxG`M ‡”x™ìúÜÿ"è÷å/_OE‡¨9Ih§zÈTpØ=®‘ç £]}L©L'ØsvÏ÷êö/q¯É¾¨ÌITÇ»º­±Ð}K/òÂÚìõÄ0¿%'º?%I>H êÆÛê±Ni©œå“D0+æÀ@ŠðåÇü^“¦º}ÙÞýˆ£‰À0@N ÜhÊœ®º×#€k—Ð9T°J‹8wö`E¹š"sÃP ¹½Ú¤M}%åóŸ&*Ü(Á J66P\g¾gŒë@AM#]?ápD¤!H펅KrEÈ­õQHÚñ1±G¸¬5Õößó*r¢G¡òù êP•;6_>QÌ`åX*M (Æ©bš`éáØCi3ÁÈU{Ó´ÇòCJ¤ŸJ+£M#µõ„gY¦os}Bz¤œÎ)b¹Œ‡·ŽÓ1$”¥+o()Zÿ†·fÉó#ùtÇ-' §°Ð¡’aÇ Ñ„šÉq^»~üRFí¢ÒÁVç.ÐA¸=LJÕT¶$ IbV”Ë.Úõ¼yv£ØÎÂ3ã[<¬€q"…Yƒ@áìHar¬GŠ\+í¢¥ $1²„4b²‹Î5Àk¹ž‘\e$0"±p0•{§ÇN͇t$ËÊ-ž)­1Dä–ašâÎ4#AG–J!ûu¦@§>‰aÖ·t¹0r¨í[\,¡$KÇ “ãù\NËÇû0v¾„51à„ “ ˜g~¼$±øÃÂ… öàÌøÿKñ>à,Œ‚ßo FÍå*‡Fö=g(5‡ÍuÆ?G\EÀൠ•+áàÍŒ"À¿œ÷º~³ cJü‘ gw^ÜÍc&S%Èü{íѲ„f’f†’„d¦R­ðÁsV'àžë jœÂjfŽ€­UŸl,˜»¥sá a 9Sü”RkLÂxFb§Œ¿ÂØEèct,`âµXH•¢`»ŽØÞÒI®2 ²ƒÓ@8ÂbÞÄ€céV1Ð2–™j]†ÜÙ1G“Æt(ãöCV ‰ÑWÃfT#B±2Û76^¸ÀjÈæ0Qˆ€ ÙÒ)î„¿¸]il•‡×\@pÏ Õã-TH˜T´äz{߯¢F+Â^TwjA(¹Œ¯a‹ ÒÁ²¦ÝqCXnh5i-à×Sw¦ŒˆlèÊ„u4g˜*fQ¯Ohµ(Š‘€$±ÂÓmÇôz8ï ?å Âv˜  x$Á!(ÁYšÖW2? ¡œi·qN¼3fyZ†à S©î6†j/“5C¢öÙv¿ˆU®!¢$JêTòÜ勈r<7ÎÑ¥Ivéuˆ0ô™øN¤&Áͨ8…ŽÕÑÆé×Ũuá†"s¨"[I°0Jq J1 ˆFƒÕ%˜1ö+Tµ±l&}t15ÌáFo'ÇÊ  åXCå0¢–)˜¬5ÎÐavÇ[O°ær=9G°THïI.#¨Ï:nãa{”À"޵HjdK4äBžÛm5S×U]Q+‹9€ó%<–ž—ñOÎð1­«M¢\œ¿E†çª6Îà êe7-:êpFìOŽ«YýV*]º75þȨ念ð?ãX´;óôP8 —4=4<_šO­å)©J+pÚE%DŸ`™fTõ°¼€PÅÏ©Š‡]©Å  š¦_Ú©4ø@8››½tz7vÛRç‘2 N7oϧST·"«º­dª“µŠýÅ&=HŽt±8æ2µ?©-dgmêænè×γɢþ•>à0ÆÕלc,„…åÔ¢=·”YE9Ä$”ÝpÌÐ&pé ûök|ùvV¬íÞ‰iL2œZÚ®¸CoZIvÇ+%†Ô||z[äk‡,“ZÛ’ÔKq$™e†ã›Ôq+Ð3÷Tâ‚óÉ™5JfÅö0EŸömãÌ×Ó„éq o‰÷lì-hõt´ ¥æÉ ¢å‚­Èá¡Y!hÀ×7<¦U°Šj±€"·Þ¡yi(HÝr?«†f¼–IŽê 1ÃÖP²·ž‘¼ça.UhZLëŒg x.ëÉ3®¤‡˜ ¥QzsX}ó䯜äÒ¯œ€%HX:ÙÓ¯œ^çÌ^`ˆô©ÅôõB±)L.!v¬«B9ù-Í´>šéêcï¥Tz)Âe°)&D2÷…µ(£- ³TrCì z©?”$%ðÇ9â›$û˜À»7I’ÿÃ&Ép·wo’ $?©Iòô†O6If.ønMŸú*›$Îý„& Vÿÿ#MGÓ"6qšs•©GM—Kl€`ËC[ªAJ^ƒ¯Ã™‰æ‰&lbäÕbÝÓŸ¹iÀt.vC¤Š?dØréÏ{¡z:OÔ #CD_q´¬s…{0×ãá$ñˆeèð7¬uxd…J¿ÝQR,¸ãS,4÷“Rãbø%²&Ì®¹ÇSŸHÅð=Œm—×Ö< <+»I¬Ý´ƒÂyÿ¸e€Ú3o ¡aÅ´±nW}7)ÐÏé§]8r¯£Tp¥•³ß+P}—Ðo•°û-Xy[ˆ,Ö®Q7ÒIjeAIy÷«Õ¿ß%lµendstream endobj 154 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1ƒ0 Üó ÿ e@,téЪjûà8(NÂÐß—èÐá,ïN>Ëþz¹²K Ñã‹XÇ&Òì—ˆŽE­À8L;+'„ìo:¼?`5Ýø]O$ŸõùTVõBoh)jI´UÕµÖv‚ØüI{`°»³iºU)Uü‡’£¹Äqp‰‘8•¦¥I.à˜~Ïr Vˆ/4S?endstream endobj 155 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 175 >> stream xœcd`ab`ddä v ò5400qä~H3ýaîîþáÿ#ˆµ›‡¹›‡eÒw¡‚où_ 0012Š©:çç”–¤)øæ§¤å)åç&æ¡ 2000v00”€t°;ýgôu`àûÏøùò’å Ê…„|¯û(ÖYš•ŸQßÏ¡òÐà;çg¿Ê /xkÿö7·œð¶ö®6i>9.æiö<œ `D<œendstream endobj 156 0 obj << /Filter /FlateDecode /Length 177 >> stream xœ]O»à Üù þ€G:dˆXÒ%C«ªí0C2ôïË£©ªgùlßéLÆé<9›0¹E¯°±NGØüàëãX[•>¬VµÊ€Èx‘áù €ó˜Æ¯rrg=­#ÖDÊkØ‚T¥[ ”ŠÁÀé¿U׳ù¹,àœq†Ž‹ ÎiŸé)·”æZÜ]1.DXí1‚Kõš³Ä³¾¯Š g 7ï¾Y8endstream endobj 157 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 578 >> stream xœUOíKSa·Æý`kô²{Á™WÃl29ˆ>8©Ö ’—íêfÞmÝ=îzÍ´­¤é™[¦ëÍ‚p£2„‚ˆ"ŒÄ!Ô·þ ©@*è¹·§mû‡çð{9ç`d­Ac›Ïèm¯tûŒÝØØScìµ€×,™§¶@­j­`üÜAÉvªl£a²b<:‘+øbq] „‰Ð'¶‰Á2´-nw{“Ð*Šná°"«‘ ü ËŠDÊÈ#2Ñ«O˜xGs³¦i.II¸bê@g%¥IÐ"$,œ²š”C‘X”=’" Õ;]UôÅ”ø0‘UÁ ÉjT’9JB–.ÔŠP=z„§«ï!+úŒ8idìæ¤¶nl®cƒnXè{ÓS33Èr…4\ŸÔÃÎcJ¿wuòwnÏ>xæL²®AÖ˜–ôNÝÝ+N Ãd*t®°<·ôÄ9“[X|ü¡¸zëÕÜÒKzàÍQpkï”ÀÙ¨Ö?ŸOtÄ•Ôà•tF':¤aŠ›ÌgnæsYÈ;³P¼Qîû×3 RþãC|(Ù:!89×÷"5?½ÜÅÓ‘‘ôrâéeþõÈsxí¢ÒÚºypµí.ÿi™îü_¸µ¾7n‘í 0Ç~§ý—ƒ&W0Eæ!‹9F“u E(–Œò¿7l—Pá>o7 Þ:[©²ô­ÅÄÿ)ƒ¶{´n¥ˆŠå•^Û_†ùl¬‡5±Fv”uS«§Ý<õýÃ{mLVvŽÑΎЊ^Yendstream endobj 158 0 obj << /Filter /FlateDecode /Length 3710 >> stream xœ­ZK“ÛÆ®ÊqÿB|Øœý˜†´rªR:,Ì£»§ûë¯{ôómYÈÛÿÅ¿›ÃMyûtóó¤··ñÏæpûõÝÍモòVšB›JÝÞ=Þðx¥uá*[YW(moï7?½ZKY–E)Å÷«5üÕÆûÊŠ7+ü¤)ƒx½ZëÊ!hÑv<È:íÌ¿ïþ‚›é|3_0ö»ÛÞˆCýþmûÓêîÇd2¾&(÷ƒ8e«Šz³Ò%ìf¬øiUá/®~jV×ÛÛP„JU¸ ha #ÍíZÉÂKÿñþ.M–pÓöíá´oM·R ¡¬‡ÕZiïWÊ Î‹çé×>4Z%¶í¯›>N-]ÃyfpâxÛCûŸ¦¾Z-ˆ,m(¼Õ³­¾óúó²>{ÜÁÓIž„1Wˆ‚q§µÙŸ·kÚƨBky –*‚µ2ªÞ=­Ö¦4 ¼‡ã“¨ˆô¢›ÔχûÕ+¶ ¡u·]R(¸B?ëóz[Y+S^*dJM2»&=ð; ”Ίm ¦|Gòè šyüߎtl8¬Ttlôd<4ãÉ‹mR°“ Ͷ0AŠ74}Tèip Ñ^R»ÂkíUé(m³ßIo4x›Açf€mitÒ¯Ð^Ã.AÔû".·ˆ‚“ nšñ ãMÒ+p1zÄ ­8«µ…õñ·.åý*=ÃÐ;4Ï“â<`ÀÀcIÆ8>âo2hñªâ¹ª Žãާ 1&ƒ¥”Û!N)5øÅ0î?¤¡Žl2‚µâ99îê1Ž+=nK¦´ (Mi«Ig­M Ï=AÔ§­ZY‹¶±õ_iŸBüá§<ûŸ?¬t»c?4¯NW…öjvÐo¿[pOŠ â+FtˆåµøsÓu3CC‚ 8¢TZÜ­ð¬K¯&ŸÒ‚Š€¬ÔRŒ¯âT88_I¡¥-βâáÆ?ܵ«Çsߤ9š=P+š(ÅaŒ¹†¤»|ù‡zh7IN5ùµ¾\q’ˆ"¿~Ÿ¹A"[l1YÈ‹:û=94Dõ„×Ö]¤˜I\x©,KûŒ@ËaB~/»WøRSX>õõöUî1Œu?¦g@›îïCöysìÆþ¸¿_-Ám‹ùóíxÞÕW_^»8ž‚¯aìü[É)÷aª³Y—ÌH€ €]Ðñc³™à×H1 2âñÜÁ§c·o¥‡xK  ±nº0ι—ŤÇémÄúÒ9ʳÖ1½,„-dÕ%sÿoB g‹óƒ,Á'ÄXÓs¨#â_¹1ú'òñahúw+‹8V£´%TvëO%TU¸”(“k|"¡âþ™ô´1„O?ùø¾FN}xp¹t°ò ‘Êk°9nÁ£ÙNu_C^‰¦ª ΟáÀAôÇCZ¿J©›'žA_@î˜Hu¡­ºBÿÓœ†`ùSLÄ0uö5«XFrȦ á3že {çfÑDð¥ûÀHDW¶í¦ÞSžóͪÈFxާ$ÈO@?ƒª8ÝÇÔ@+Øy9<Žy¹,¹÷¹4€Óçº_gŸÈª@´%Ø•Ó=#‚Ã| f¿mr˜¤¤ä8wÌ™µaæO8ß ›¾}ˆç¹yú–£‡˜’ÐUcI}þp±ÝiÏ©¼ºœ|‘†øUFW€IÑæ¨8HGI¶ÂƒË,xŠRªÚ}šd¯µDv|nYkˆ }"H!ú`‚ý¬Âé)Faá˜ôjÙJÖ’ÏŒìõ#Ä\üŽó”ÀΡIÓ7Ïr@KŸ“  suB«qHå/í_£» “‘¶ñö9p*À‰þ22:½”òÁ0öõÈæ^;åC2\`ývQ‰G5±ŒD"ËB,F6¸µVŸ­‰LQ…™£‘+ErrêkM“ˆ6HÄ’Qݹö¦ðÊ«ñ¼Ç—¸€.$ÖÁàÕw;,3òÞ8²µ3)"µ¶)b…êÁíhˆiˆ¦BY[añ4ÿ¦ä4/»”Ù§ÅŒé¢ÿÕí¡z¥ƒ¯(ÞL…T? ¾KŽh ëmøŒ#‚ྠ“#’ïñº:BxåI¾ÙÃ!O_³jùJ©È3‰]¬N èØNåºMƒ!ÝqœÖض=˜!–v1¢kªuÁ~Ìwa¬«È¯'J$§$Ñ84ÊMl`™ @F­ô‹ô¨,ŒõU²ä=ï¸Ö¼›e‘ó.ðÆRßÔéwÛ=æQçÈÙÄJ„õ¸ÕSD{—£~»%“ã.eH $=žrO)Á¥SJ^%Øç(¡Ól;þÙ„Sì_Í@5ˆ-¹Ì„4AEI¬I­¨#?ôç.­bÒAú2¹JüŽ9¥…ê’×eÊÓŽhÚª¢ úÍcšl>i&“w¼½¹ûmò,•¤ÉϤbFÎòÆÞUžIð­!¼"¯b#"¾>1ÚÉý K75¼h¹Œ£ßY{„›(,IƒTʯzÎÞ×cú ¾‹¥/‚%Ô†»´àõ)oNĤËE²t&D?ºÕqZˆËÐŒ¹PÈÑxb8·cý°_( 4–,À´Ýø¶y×,ÑMÈ26˜"äŠ=¢±¼÷؉R¤ úñ#S–iü†½„i5¶$•ÙÃxIœ2|$BŸg8 X¥é.1…Õ0G©“¦ƒMHÚD%¾Ëó?UeSJcžo‘\çÅê¨elbAVœ»Kºk@Ø„$#†Y~8ïÇá7/ Î\öÈÉ¥G„Fæ«€‚7²kUÒkbÇTlüæÉ¤¦ÂÆ‹’Â~›ÖõfӜȳJ‚qê_ôÞSv/²ip€¬Ö7\3)ê¤eKs¾[ë03B|Ñ©çv¡J¤‡U*MÏT²9îå^æt~kgBB4§¤§>•XYa{ o\Ù€Gtì_Í•'×4 ‹rb|¡Ø§»&‹±¸X¦(_n*SÚn ò‹Â(– í0‹šj-ªšBy]„r§!9&÷é %ضÍzŒabîøû¢‰·9÷ÜUL ÅCÛµõÈ©ãm÷RqûšºÆåܘÐFÒùd.HŸ£bteâ"v’ØTð`ÌKå IQŸõh+ómL΀ÒÚí84ûGêý*Ö}¤É,€¡[q>ÅÈrµÄWIÑ6ü€¥Ó:Kš¬ ù‚'àÝ6H@ ð-èôL5Ÿ‘œ¢Û1~Ëy<ö}<£ãë.£¦ó2-ÏC6êém¦N¿Q;Þ»çq’-"’¹À‹§\²~d†ïæKb^ “PB”‡Ç`Me1å™ììnó7Dså¹Ïô‘¾€cðå-òÕÍBjI«±òuä†=K|}ŽCð`‰ƒ«˜ˆ‘Ó&%ëý€N"w :¥#d%Q´ô°x0îˆOÑ+,\÷á†â£š†S›¼ØÁTå@úº€!±iâ^xEzL[Éù,ùEÊ’K‹k>œÞS@âï”ylê—îÛm}qºðX#þZ²Ï•¼M½Mßt*¡V jûÔñ^BQI=?V/q­Eâ µ‚wŸCTˆ:7~èÊÎ(K~—oÕ6åEõ$Þ&Lb¢:æšpÐ\ä8I!ªgÏû¤âÞ<]¶]å·ö1^¨tMBê<ߦÖ&œÃ8$XBÍá„×'ñèC„t§fHGš|Š||¼©Y?ÔÃLFªŒõóÅp} FiëånÎÔÃábªàØÅy“5q°9p"Z3ÜVÎõY`œ¼2,N±¹ô»ï”¼ÀH|2é.Ym•«íŠ ¢sê´þ ]ÀÁö¾Š¦ —mYíüT˜ü+ÈC‹@¬ag=ÝæBðõíû¨’“ñê> stream xœ]”=nÛ@D{‚7ÐÏ÷gÂ6vã"Aäµ2T˜h¹Èí33²S¤#‘À{ƒÅ®Ÿ^ž_æómXÿX.Ó¯~Nçù¸ô÷ËÇ2õáÐ_Ïój»ŽçéöÙôœÞÆëjýôm¼þþsí^è§{ÿ>¾õõÏmÝÚÞ?š.Çþ~§¾Œók_í7›¶?ÚªÏÇÿþ*»q8}¾ºÛ5e³ÁÕš‚j¬ÑÔ`}h êëcSPYǦ Ž¬‡¦ X§¦ N¬Ç¦ Y{SP;ë©)¨ÐØ|˜ÍOÔmSP·¬À7) ø&£‚ySP6&#£‘eSP“r&A£ AÎ$h4È™‚“‘ÑÈ`c22lLFF#¯‹ÙÉìàu1;™D.*'•ƒÈEå¤òj j±‚ÈEå¤r ¸0œ†ÃàÂpa`T×°Îa£††  Q©+†– .X1´dpÉoˆ9Èà 1™+†– .8¡Ã< Þs9Àb2CAÈÄl©é’Ó%SIÈ` 2 ™LA&!€)È$d0™„LŒš69l‚7ÅœdNlœÚ9¹s?¥THà§’ ü”BR!1yjöäì ›”QÒ¨0yiöâì¹’`Q° W, äJ‚EÁ‚\I°(X+  r%Á¢`A®$X,È•‹‚¹’`Q°p´KÇ»x¼ ®%ߢoÁµä[ô-¸–|ñäEóu£ðÎáíõuY ÓDzôù¦+NWo®óÜÿÝ‚×Ë•_ Èê/y&?èendstream endobj 160 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 9081 >> stream xœÅz tTUºnŘ]DPŠ’ ‘sPp•AÂ<#aÈDƪÔ<ÏU»æy’J„!L"2¨PqbÄmº[[¯m÷.î¡ß}ûTj÷½÷Ý·Ö[ë­“J 9çì³ÿýßÿ}ÿ>¬Ûocô[2sÉÒñãÆcþ32Wr[îÞB!-Éý¯ës‹à€B8àvϽ–F¸-„Šîb݆O{bzC]c‹`Sóˆy ›šëG,•4n5W3¿Y²e}½`SÅ?Áb±fO­_9­¡±iFóLþË‚–ÙBÑzñ†¹’ó¤ó7-ؼ°²jqõ’¥µË¶,¯“?¡xrÂSO?{ÿÄ‘Ï=8ùùÆ<ôpÙ#«]óØãëÆŽ“g±îg-`Mdd-d-b-fb-af-ea-c-g=ÌZÁZÉšÆz…5õkë%Öã¬2Ö ÖXÖLÖ8Öˬñ¬Y¬Ù¬'Y¥¬ ¬§XsYO³æ±žaÍg­aqXkYCX\Ö=¬¡¬bV!këvV °†³HÅêÇêϺƒ5™5€õ<ëNÖ ¬¬A¬)¬»Xw³V³³Jq4YlV¶@x›ü¶…+ ÿz»ªhzÑ9PÁ¾Ÿ}‰¨íÇé·©ß_û'î˜5àÙ—îœuç ¬4qÐëwUÝ=ønÛàÇâŒ2uÈÏÜË÷TåMòøÅã‹õÆ—Ü[ÒTBßëþôð²Œ"¨ö÷8qßùû9j¤{ä?h|à̃“GÝ9J2*7zíèÔ˜;Ç”ŽéC?Tñп?,ùÛ|Á˜ÿnËn¢ç²¹ûDƒÑ/gÊÎ åBÏ÷pwçö‡ìEèAàYÔ „RPѤ©?ŽØí±zJZC¡D\j¡8ûõz«¡Ä¸0ï'8‡ô?Ôýhé2o·vY“–ÖZH´€J¨kùet¿µtÿÕtñja™´Ì¨ 4A“Í CÊ,%š†Ù7Œ»$ ¦(Ρ”Ô+"¢‰Ü¿£®À®"º‚º¯èæÐÙ$Ësm.› º,q¥O ‰ù@u6$„2 9eŠQûÁJ:[´ˆ*±8¤n¥Ð…âQuXD•g··ÛÝí;Æ _~óÂUH8@gRY#2 M"ª….1ñ | ¿x+”IPÕF¡§@:êKÇA µØS®”#•Dýy®­împáf[Þ-Û5®€kÅê‰_Ã;’µ‰sRhyî×)ñJ<ÒøÊäŠøJ»Ña€FÂ̶èõz“E7ð ƒB ÉÔÂæ´)J}8å.7ŽM"ÊÄ&•ûŤ AhéûôÒ"ÊTbŠS. +S$'åñØÜ%™`0‘PX¼f|È÷óä¯ÊÈ^•¼Á3»Ì^è&ìl›Çãv¸Fó¢iÿÖ8$ÜÀñXHFB©ŒÀ¡ —Á`3”ˆ¥j!^ÕÄIxŒ~`z ( Ò¡`‚⸘ÜÀ£çsãQЬÂ!âßZ3a­ÍÎÍ?·îüPÎO¨‡â¹Š¢* «EÒ€2I¡ø³ïÛ+ðg˜]|n˜à\;Ûþα+Ã..~ûa’–¡R%FuI*zCÁíFÂ"tàüôÖáªy“§¾2•šŠöqw µEhà\ÛsB6çÅIkfõoÌ¢³ßœGç ÑG¹¹\š«|é!Ä„ã¢;È @ÜàÏ舸ÄÕW~¤‘?Ó+¸ú‰ GŒÄ‹Õõüáì_¾§Üà]ݾep!4YÐÔ·ÂtSÍ¢ Ù‚Üêó…HžÎ…›:ˆ˜&ÔLN™FÈO’8Ÿ¼Ññ_çãNT±£r—ô,FÃþ††"η‹®Ñ¬Uë%M›($ áꞘõðC8Ñ{^§\l{«»Õ‘v„`Ôl=vêÕ· qv×ÜEF+õÏvá|aîž×¹Õô¦¢@¨P‹ÄafüRFbqU@D­ö¨#æˆ~‘³ólnWD«Ð)¯µªdÍžŠD$è{é"únzèã=£Û½–îÚGíDå\c…¹²¥ºnõª ‹ 1¥úƒSüôÇßÛ¾p©š­fê7¼³/‹ÖesºÑ‚¾å_ÄJ¥„â4J‚Ê6’ÓOÞÜÐî‚[©À{<›Çæ"%÷IêÊi[,B‚ŽƒnÔ¤€UkÖ@-!À4ÒeKÅdðC{ЂÁbkBê“À&¨“›j,:‹s=‡y]2³Pß[ò—ms¤RNÒwÂæw ¿8!÷ ÉÃÌ¢‘C9!ˆäoÀ ²K{0ïFÿàÚ® mÑ$}'cÖÌ3Å•<ó<øŠ<2eA9ƒLk Z…á‰Z£%[S,ˆÕ¥×aÔLŽNw§«ÓµÍ¹=йã“K=€D}BžÊ$SÒÁ>:ÛÓTZZ»ví­§A5ÌWZÖÍÕ@¤RH1˜z´…ƒ ÌU"j9Ø…jŠÐ0&Tj*~ÒÙ ;Ú¤k›g›cæ„"ÐReªá“ô°Þ“á ”Ôƒtÿ­Å Iò&Îæ£™¨zx~v0º;‹”=C914z'wgÝ!íAHü õÛ÷sZ›TÅ©ÊÃó½Ë½5n^¥{‰î$ÞFEžíÛ™×™¬fh"9;4NáÖŒ/Ðæ"½Çí½'U‰Ø,*Ò d™OêÚ¥6)$¦Ñ‹1dýÙ¹¡œk¹!Ý\±[ÚÑtït’)4ÈqDì‘âHGüi¦DQt0@½MkžB!0(ߤ3VN˜‰gjë2*<‚îýù¤<)˜…©…î2ÂĆ«òµo§­#å Óˆ×[ÍŠ9—b²€¼E¾E¦%9×¶ÕmŠ–—Ð3FÑjú!ºðuz8AtìŽïh'~ù‹¨<‹áÓˆªs€«œ®ž¦šnQ™UV¡fO=¹ùÜ'ß¿q ç9!uÉà:Ý$i&8Ýu’jiUI]m ;êŠÛ#TðÃð‡þ zÄÓÎŽ„2 ¤èç€+íhu¶Ú¼/ôm8"M:ßLª¦™uV‹? ZpžKªô-º]šEÙ¾¢7?j¶f–z–©Yß›ˆ‰å'Žì '÷’¨:Nræqq$c¸¶*qm-ëæxžÝçðC?‘EA¦ºÌR¡¡YR$¨#@½H·H³ÈPaª©¯¯oªTn‚„´ v{Þ £Ž¾ ¦$ ùL y*¾‰ œÃç||hð•j‘«ˆY(t¤0€’²€P§³B=õ,ø4§(Bý@®¬a]Xj§èÓ@„«†üÍBæúçç}<÷ž7ý}a ºP´ ´Fü©¤ÒßL¸Hêlú\…ËéqAÒ!“ ÍJ»6y(ÔvŒŠ×¶îÄÑÀÊÌŸ kÃrU6¥]iÇ-Í´_Ðع3ûi$v€.xA¥cô,í4ÂfA½`Üè?ºóÚ¬8 &ñ©))‚œræ’’û€X¬ç·Ä´I×Ý”ø ËìÔ†„¼XSÏnvbÙµ±ZUS9ã©HÔ ßÜ )[q.y½6OÉÏ óáTÅ’àÖÊ\È"Ð'o˜Âm(׭חϹqk F5'MÑ.G&m'{ ñ¯Æóšïˆ÷5×Î>–ƒauHa#2¡¡…2±eX’èÝó«ÈɾN¹ÿ'"§Oa3«QÖäW€¤·ðJƒŠÖ›’8wç¿¨Š´Ü+©3ÉëúTE Aëò ¤ÃdJák¡è—ò5UÕÞ$7Z„ wp®¶/žÄ¨rWù«½U»éq<7ßÕŒ?-<› g­É"“4kDM«ëγº}Î œòVQ@R)ßÒ\ÞÕ¸ï—ãèŽËûÉwÑ]§Ñ=gÐ@§ßÆ@kÇüëØ–¹åÔF~ãÜ*iQ(ñ”R°¶•l¸¢ÛËÑ‚¢yWÂHpå¯|bfÂüþÛ¹ÂÜ‹¹ܾ…ãÇ%m]‘Ží¤ñÔSörHTzˆš¾¾‡  ¯NF¬sç¶ÞMº¼N&ÿ¤Ø#­SmóÉiƒ®//8zù¯ß}ûC2e±ÞbÕžœ¬§=Õ»lÊ00šU ‰$€+ÄwdRáQJp%LJ5ž*Wm±w¥o…w¥gÏ!v5wÂ4 Ç1ž;y{/¸´óŠ+à Ø1fÀäÍÿí ñ깡ócq sM\…ÕÕJPåFÁšÚRµ`MËje£¼IÛ`ñØðy†C<Ý íqí Í)žé Ì]‰Y@,Ëëã_«:ç3/4²Ye:¡H"+~ðéû'>ø´B­Õ™D„œm×zõ> ‘: 3æˆùOamÇp%ZEnYµaóz3I'"þ‚üõÿs"’Áv"¡ð(HúßjŒú-ºÕ<«; #!ëôyO¹o骥'{Q8ø,V&‡rŒâk¤ŸªÛV[ ‹é» ]@ßa Z6œÖþòÅÍIm«) Á}©=Û¬½bÃÄæˆ¥Žˆ!Rr×Zo+®äbjè5çÖðìyTX’r“€PlhÎkI'âêŸß‚SîH”Öž;ùÍÅ_~ØÝ-ªQ­‹oh±Z …ø<ÎíÃc-.Lù2Ù†W‹Sžï'¤ã~œ³©¤Ü/$ùL"l¥'í¢ŸÃ_ÅÎf¯ Óml‡Î¸3óqn5;“˜=FtTÌÓ†ë(&‘Ïpö1˜· ‡Â©QdæÃŠ#ÚÃÔŸîà£þ¦´9]¥XŸçRµõž{ ÷žFœlÁ×çж³…èI´”‹º§·Àv;·ïÿdËô0šGßbºèÛI¨ðÌû'PŸ; ,œ-I™[¡Y¢”7Õn”n€ÄôÒ£_R¨ó4—Ž€Ô™‹§ÎCâ­}¥:+¦ô¾‘5Ùœ7[ðÅôÞ…BôñõB®Ò¢„J+=\¢¤IÍVN‘N†/:6l´U%ÛjŽ™1«gá‡Î½—v¼ü2x1ðAèx2½1'^ã«„ÕøË5NOÐVhW5Ö5Ö–™ñì_™c¿òº÷ˆçþÔ‡9ÖÙùšm$B î´¦„Q™½Š!#¼Í·ë7´@T€Ì8í33áf{E TŠû$Òk Çê@35¼²Œ‹¥ÜìÖø}pµÂŒ/õPø¾.&j댵v{ÙÞ´`vò:R)0óMf!Ϭµ`I<ZTJ jeÆãfjØ­G™›Ef\D}õçWã˜W^hhÅtÁΧêÁ64·Ý ° )øê¼éø+Tu¢Ò¶›¿ÀW+%¢°&M¦ÍÜ}È\ô9x ž²níRÙ•P_xqÉKú›Æ^’ESß:y­>í ÎM>5”ó=*<Ì]!Z;.&ôlÿÈ[wþ"ò°± DÐw†§’Ï#Ãþ6B ¤šH"æ yÔ ´Šwë·a ôÝçßÇÌqC”²:Ì.èÀBÀéßåKÙ¢˜2‰ÖD*½wS¨®j³FÚ@™,f³ |•_ë$8¿H½§ØF¨Á#?;Fìy¤”ÝdÇåêrƒJ/3I Š]b¿Œ@ïÓ·sgÐýk§–-ñîØLj¼º Ã]ÛwvE[ƒm¶ $>G¾³Ì¤ÊÍ5,*[4åÕ;àÊë>uôÔ;8ØUµ‘úMã·áÿSk͉©ÀK¡!·Ä„6(wc¤ÅÐ$(~%î•B´—±ÈƒÑßÎÎDóèçÏ[ÎåìE3~GL⨊‘8ã¾Èƒß ‰t,ÂòJÔ+ýG¼aÐļpB³DiVY””±¡ˆ#0KÌBѰòÄò÷޽þæí"‡¼E«kÆŠB®i”A‚sXmŠ)ÎÞÃñƒ‡ÁV}«2aðkpµÀʶ¾ññûþ˜qŠ. é™ #KU§z9j7]Ãe~…¯ë;–95v5ÔÀÒ9ë+e”Áßau¶(…õÈUt¢¯ô½@¤‘É1R£©çAI,:=3£‰À±Õ½Õµu/’c1„ •‡ÀRUJÒc­ªÒ˜e„‘Åd«×‡…DÅÈÈm6‹Z,Í„™Ýœf:2©ébsζ SÍ$çªÙb5—ü–Üçdø qº¹¯ÕUœÂ¢… ¯"ö;ß^˜¦&n£9*ø#Î~ßÙsûß§áá-î鄘Ý7ÆwØ2اÐà>÷“ûå|uƒXGVv­Ž”Aâaš˜Iß>éØšÏ…ÔEñFøaa›_˜ºt:$æNmûÈk÷A/•»¤Õ°Ih"¥Z5&T‹BêX[°#é¾ÕCÉQhìü|µZˆ““ó˜î«HÅDæD8ïk¤ä-à=LoD…ç8£|·:­N¬»¼Çì~»Ïæ/v°£j¬ÄnŒõÐ 3‹Ô3xV½;„¾ Ú½Ñ6;¾ÀwÆíñß­âFU¢ð±¡œkhM_’.Ì7ìD!æa&€ÖX&b¬kÂW¾½x[ÄgÚõ-ü”1F¡»@{›;-p*I@e—>Üx°þ`óa C8N¦ëët;޶¯-™I¶%’1žÇårâ´ß…QHaÉ‚-võrµPÕ¥©Vz>ýPäóÐáÏ‹C—ìAÓ^s²9—bf¹WV¹¦g<È< Q*¤’°*ñk™33úk–³h"žq‹±Ö°¥Ž.å*u›`%œ¶{Î…º˜!j‰Á¿Â+_m½æŠ;Žø>¤å9·z¶9¶A2ŽQœî„ŒqFgó½2¦úËt,†ÓÔV`ª7ÔëèxšyÆçW¯æ7T4n¢þÝã”mKº’ŽäN´‘çÞåÝíÜý¾ãÞx PeÔL¡_@Š¡; ¦;zXŸ†º”ÛÐÇHD¦Å!'é‰@.°ÖIýØ÷Þ2,Ì̯ðs_¯ëabò¾9'£§é)Qzª]ìn‡D œ„ž7c{#îI4äâÚƒo¤!XûíI¢]â’Nw¾1/’¨D’jœ¬ÛýÌ{kž,ú’]•#‹2¿Ǿ °•+Eç,Ó)ømgsÀÉ¥y LÏæv6C¹H$Z¡(‚ù=œïh6å5—8¨NSŸ1BŽÙ•HEB L,ø†©¼íEø3^ð\õû —X.2‹1õóéM͆&KsñošfÒ ñ)ì–ñS (øp‡´«Õ™N!ÏÕéî„[ Žë°ðè¢u•‚úzR¹§>¼VÁy£¸|ó…×cyþ<ž»íz)WdÖKͺðó,JVfX;ÛÄENvxãŽ$*¼>–g8K‡Š[äai ç¸Ålµ”ÜÊFµ4`Ë5®‡K+ÀÉ}'öÜûÝè¶O!âŸÍ»JóÆ>üÊ£u¤EkÖB јR¤ÚÓ©vŸÞ }¤=|¦ë$²gj§Ðýé’I/-§JWÏY]ºJ£Ó©¡Š„ÔØ5ÿÚ`Èëȸ¶Ýlܧ¥É£°ÉÒ·º³×Gbó5Êrç·h–j‹èA<õ¬g+˜}8‘Z.“†”Láü:°7NÛÛ\™4ÎK`õüø>¦é¸:( U`£c¦ îÇ5çyâÙÃŽ ¥²o uMMK„t½Ö &¸?ÓLæYfTÀÍIQ›ˆ¸IÈÞƒVgçæ-;Ï´['¡«Ü£Ý§Á/ˆ«í{qÖhÅ”ZÒ¤1«¡šhLÈ[3ÉDÆNb‡‚‹4ìøÔ›ÀÓeþItge«d™^AU½4_õ¼Alm²Â:BP·’=€óÓÉÓs'Ž/}nó*O{=eti½Ðc…Ü0IdDQA¥º¦éÄ®ý±Ñ]@w«ygÙM`XßA¬ln8Âÿ†^àJhVѦ! il3eö°ÃýÔqÐÀõ±šó€³Ë³ÃÕEx/y gdy-Ô$àŠÛ“ŽÄA”äÙ#®ŒI9æçj° j…‚ÆÕ ye ʬž˜/ÁJ¢)c’»²\Ô…~\BkÒK-E··½MÀ©5ë\!ªÏ±¸ÛÑ‚"ÔŒµ‹k—0 ;˜ÄE4*ج®­~ô§¡»ïþÉŠïèþ7¨ä›É[O´ç4‹ ®G,ÒKÑ÷ÜW_½üÕž´¿ÍÓ#0cu«+jÕ*(µ RLÛ­+²µ£gùŽt)=^B?!£ÌXB>\žvxÃŽúÊWáWð«·÷|îðlwl'Âlø]éG£]„,ƒ«Dëj 䡹\ý³3Ÿ{×ýuÇ/P6—ÝesC—<ñƇ=‡W,Êï)ôq°º9N£5ÙÁçÏëÎ"ÍùÍ8÷~AZôwJýä åhB;ÿ‡E_# ÿ Ýy¢âȪ©ujŒúþÓ¹ä'G?öÝc¯?EªÁÃ&zÐËð>¸äð¦·EXš£µô}\ã³–< ˜ë\¼^ .ÿðÖù·ÏWÎ$w 6®nö K±(üSÙôÎÓg®:sèBó|3ã»úý›½†“-}ý6î £kŽ©Ü´Ñ¸Á´Ñª³0;ŸvŒüê«ÝhDwÂ?×ü•Èìâ6c =„&FÐCÇŸüã©ýÉ'¨z†k–ÙeF™xÖÔŠÙXºbï™ï¯^ùúÍÎò5~Ê®q0j×̸Ò>ÂH3ï- qY4 H™[Ä Z°Jc6×\Ø/º¨,XK+š „j¹\Ó´S{€ºBQ¡Ùh1`õe°I¢*œ‰ëÞ”7ãióf¼[yvŸ“i«ÿü_™ôïŠ}†mÙ,¼0˜A!õ"8ŒÆõömB€Ú ìAWÈt§ÜiOÊp3­¸4¨éåB©®4ˆ "£¸Ø¤2bÛ@Ðìÿ»ó ë¸Xž0£"6¨¦ûÝ HnE .DsÓ¹5¦†:XEÔ¶ó»Hèrµ·;º»«)SK.¢ßæÖšêëa5þ{ÓÒ ¶;ÛÛÛ±µêjl¯%_Ñݨ4ôæ4‹É{¾"êDZÍ¢—ÜG/Å?–®ŸÂÓÊåb(!ÄQY‚´ƒ´Í›ðÇÐ@Ô„? ˜)¼áPF Ρ„<*&9—ò¥‘³Ÿî²qK5•àlbu|qÏ;{:ýšèÔ|-¿áåÎÑT>ƒÿ°*ºøw»:uDúö\²•.äZêÕ«ëå+ÓhÂw”›:¢”—mïJj‡„ <Íò^ÑËsn®¥VSÇç›Ìø`2ÈnvšÝ<“ßÂk7Û±3p`[ÄnÃ$‚î&y>Éúø©0§Æ1]‰ÞâÆ¼QÔ‡’h /æo÷Á4UD¥J…F¯&çV½TñÒÆ—*Ч¬Wéz‹¬ØÄ†M‡[2ÒO–~ºü“%~}P‚D,‰Æ¥>…Ÿšqxö‘—Kâ€,ܗϸöm¢G±ä*GwçÆref*œÏ­nv£$úwLxÓ5³×tƒðü ÙJ4Óôž«$¤ø\l$ðéîDƒTÌ—Œ¨#¿;û·ÓÊMà @ª—ªeÚG–ñoY´eITÛ¬…B–ÇH'Ú>¿  þï ~ò©|e;x“NL~sÒqGåWBBª”ËÄ1mXK]snå¹5QuTQõ 9Wx=‰#ÉÈΠ*ýü•••>:‡é{!Ýï þ2կǑÂq F"1…G壞¯úøˆÿAƒ¿j‹ð‚ž€×˨2(5[ ÕÜ—ªó„×ãùTý&o.šð ‹V•PÒt—çtƒ£!ýÌ#ôôð‰rÚåPPD­„Ä`$ö¡ÑßýM‚hgëQÑÄ~ô‚ÓíÑhݪÎÙßH9TÔ µƒè·)ÖkŠ©½Š ÂFXH$ Úô15•‘·IÛU؈6i¤MÊ-ržX-ÓáØâ3#Á Çí'w'»ã»RæjM¥¡ÚÂlÆcJiÂ{{¶9I×vg—k{»4#m—taUØŠ3;™L$[ùi€jŽðcML˱Ý×kmð¨¶øL¹J¥7hÈJÑfI•бӿ۽Óîc¶Ì‰¸Ò¯ØdÞTg"[LµÆ-ÄME¸Ax}}߯~!\Y„¦€ˆÝψŽ+#tɘ½„¼5us‚•ZS þ1gÑ[uØÑòÛ°ÞCovuciOֆ䎾±~¢åY´ ‡o?ú†KâfS£Â¯ˆX(ô,°ÈÌr³Lñ ÏÚ+ô$e¼Ãžèt“qtã4mÁbl{#mŽŽ ßÃW©¬PE¶ˆ3š÷v¿#ˆaP…¤M†–f3)¦o³¨,*«ªøŸµ=ZŽ“DfÖb”A™Oì’gé™GéÒÓôÌðúPy¸Ü! *b8C‘HR•6&(m»®C×þ ¿[…Æë:´ø 8©”Þ¼«žû–[B÷¢žâft!Úaçªk6TUkF© ¡<(XˆóßþñË=üÃk(3Ø´¥®¢>Õ¸Û‰qî¤þ“%ùöú@.]ð‘Eô ·hdPNpʱÃ2»Y^í£n†<½ïn®¿Ô×À'Úøvl>ŸfÙh õÍÌš(¼Ø¹ÞäžùùÔh²rå½Ä#Á6ŠD»@:Œ‡u™¢Ã€3E¯µéJ Ì Ö_ ñÝxªmhðÿ ¤Do`Áü÷ë…\zšx-‡ôÄØ}ô(Ä!ýM‹}"¾Ù„FÓCɿЮž&–Ñã؈íÓ žØŒnÿÝ} ÍFœåŸ·Ó·—ñÃûÖðÙžë#³èUô6÷tÊÂÿv ÝÅ žùÌyœÙ®ªœ§š›M|}/O5ûEÕJHè”o­[{¢ã]>3&Õ8wŒ0o¡F1 10J¥Ü[3Íx•`£mºîcì˜ÍÕ†FóaG˜éñŠRüÔovd®æó꣜€;óÆÓE¾B-{yS¸ çT$b‹” ÅÌ›†.««$ŒÄpÊ0]ò æÅ0|\Ì­ºù6FL’ub¹–¬[WQµLK4«”"Ê̶ÉCš(Ó¦ØåØéHÉþ…ôc±þ7 ºendstream endobj 161 0 obj << /Filter /FlateDecode /Length 387 >> stream xœ]’±nÂ@D{…ÿw» ¡kHC‘(Jòæ|Ž\`,Eþ>3¤H1'=¸Ýñíbx9ŒÃµ^¼ÏçüY®u?ŒÝ\.çÛœK},ßÃX-Wu7äëƒtæS;U‹ýk;}ýL¥Æ…Òßù­=•ÅÇÒ·úiy/Êç®\¦6—¹¿Kµkš´ëûT•±û÷×ÚîÇþquU’Ô48«Ýz™¤¦Á \% ¸"®“\C’€“ŒÀ€º ÚÀÚ€º ÚÀÚàI:±Osï".F]޼Ñ$ªQd£ˆ&Q"EKЀ†ñMŒ #˜Æ0Žap59 ®&g£³ÁÕält6¸šœMΘ×4³qfÛ$ ¸!n“ÜÛ$[bN0»$;"^Àô ÆW0| Ó×0~ ÇsRMƒˆp®€Î€Žp®€Î€Žp®€Î€Žp®€Î€Žp®€Î€Žp®€Î€Žy]3ãäö<ׄ‹Ä•|n`oó\Æ«öV{ÉuÆò·ÚÓybU U¿êìendstream endobj 162 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4439 >> stream xœW TS׺>!pÎqlKš j{‚U[µj½V;ØkQŠ02 BÈ@&’?óaždÂä-âÐj+Ö¹µVÛZmµv¾í½¶ÝÁã[½;b{ߺ뽷ÞZIÖJÎÉÞÿù÷7ýÂßàp8c"CWDÿuÁüù¾/Ó¼Sü¼Oq·°q÷gŒDÀx.Œ÷·=ÅD¢'г!âqÂÃaæ®eå¤ä…¬%§äe‡¬íJY]T’œ’ü ‚˜ž½B´2'7/MA¡xGbÒî䔩ié;#3¢6ïÚ’µàÅ…‹æý• 6ÓˆMÄt"‚x“ˆ$¢ˆ™Äfb ±•XA¬$VÛˆÕD(1ŸXC¬%ˆ7ˆ…D8±ˆx‰XO¼Ll xÄ“Ÿø ñHÌÀOFPDGÀÙããw“Ë=íßÐO†•Ô êz ½LĘ÷Æ Çžwiü߯WMÈšðýÄEë'^|lÕcß>žøDÐ…³c爿!&þî—s0x¼cÝÑ’ç~zqPâñîônä )Þ•¶–Š,û1Å»*Dà!yçBÑë$Té«ÀErâöÝSÆð- ë|x5™)èÕõwå¼a…5°lGáZšwG8¬?9ñw®À†7DGݨňf ÇÐ{(hz*ˆ÷˾‘•|á¯~˜*ô¾B}ˆï<„Æ[jLu†ZÑSÏÎbÎ4\®ê¼Bó¼ö:S}ýd¸óö†=[›6£€f_ý¯ >:Š6Q­Ðªo‘Ó¼_Žç6oŒš 1Eññ¢Ì¢T}¾ëeô9µ %TìÑ·ÊðZ ãâ'Claœ0/]œI@ã¾,ˆ#4}ÞÉnNš€ìè1.ªñÆò—)WÆ…æ%Kã`D4l:8ûAÑ%¸5 =p þ]8§ÅÃIžØKëVãý&²õü÷(8b8Ù×ÛÒÒ[Ùƒ'ƶÊ(1H ˆÎ¢ ¾"J”VR– É\SЕï†Ú\wÎzBîñ÷$gÐÔ Þ%ïê‘üEÊ¥%«b_g9€_ ;¾÷ƵWи Dz ÐìÏhm¥:»”)'¥j­T ¢ŠÙñ³V±c`ÌÜŸp'çõ%xÎX/vž:üÅß÷} Èî,j 1›ä& h'Ym±T xýHF9uNYIVšDÂÈå:½NW“´ wh,[z75…Í»¤%¨ôô(¢ØÍ4Ãí ê ìE¡,ƒÊŠ&àîõª™âý°§¹"Sð Šâýò6QdoÖáÝG€FhüÇh÷Úo쬨­ÅBÁ1Š}Jà}šÕð5ëÊÂÂÒK”%»Þ±´gH€¤”¥ûƒ£@ÿÚÄ>ªÆE¨¸Šˆš?ªøK?§Ÿ¡q¸ÞŽñÙøÑ-½?R)º­¯íÚê^“Baeá¶­©‰ÅoB$Íòn?‹fžìiê=°V4‡ï¢®µâ%¸oõ‹’š»Ô^b,¥Ù¥äe¦å—–ˆ90rV~„À9+|D©^ôÊœïæÜ âÝê}ĦK²é®Â…Ù4¼…_YÐt4ŽÑSeåªÝPL§·åw3V²ËÜÔ ]ôñ­q1ÂÌ1£–«e:ŒÙ[zäÏr4x³#{u¯Tì]¾ƒ¤ÿ"*Û—|1ˆ×…¢1Z—ª^Ɇ9ÀNEÉuï¦6Hk‹ ¢¢‹+èÝdlh.¸®uÅ@+H©V)WÔ ̦@«Wƒ†æí‚rc¹CG›IØ,6K}u°Ùd±X6à_êKkð=]å­­Ú=Snü܉ȫ‡Ö Þt±”ÎÐï_3÷ *X[É”Y´•Œä¹kM¶*Á L3Xh^Së—ˆAÎ(¡Ø)5I,…&\ Ï­RëUS0 Æa mo ‡ƒà]€‚¹# T¿îaÿ)D!³I(Ö+JDI[Ûa¼Þ´õhøé²»p‹n§Œç—÷½=tév÷ @cá»ùµì8ƒÊ¨´ëèz²Òn¬`d»4–òa(H©2ªmstß›oËvÜ®%/o-€aÑÞ§×]ÍD~pÎV 0™,6³áa+NG·b¾VŒ<ÎÁdŸrPî Š”þŒ8Bšp(ˆw½6ò8ÿµ$vÂT&ƒâ}ò)Û@¾´?Ñלý°V_ 5ŒPH¦¨Â¢bÊOfPšAñ=ì IÒJãöÒÇ8ˆ;*Ò܇ÝB5BC…ML³åd³jröF¨>oÃ…¦–Ó†ÅÁhùP!™_`êxDZ!TŠ2@ØDµ5Wd ØlJ¾#b9¤C hNé› šht ß5J :±w¶‡Óêà ÏRF•YeRÕì îYé^ѽ²zWM–k—³ÈTÐ `­·79MŽÆïQj°£ÁÑèhpu˜ïía ©èÐ[´ŽµF©ÓÆ‹$ÛÅ ŠÂò¹˜}¹ƒßñ`wð{æ¨ï)Ÿvº‘ÿ 4-ò™Ñ7h±w>ß@µZÚL{€>}4k¶ “ÂÒÍò ÿÂë‚Jò¸i¸»nÈT¦-³•5dªèz˜t™U.½D " ;ºfåQ0œ÷Vd玦xK<ÆÚµ–ªX˜SŸãÖldWgsƒ²¡Ä)@ëÙPþ Ôšèøp†÷Íö¸½C_Þën‘Tª”‚2›ÄÝjÚJöìisÉ>aJܰ±,5V¹#7B!¢~ÃÁƒÛ.KÎÃðQG÷y“Í^º“Q’0ìVËò¶'ånÁö5A‚¨›Wª~ý§·¸Úô­šG[ú¼ãÜœw18äì²i0{:::Ú÷›€þ’TʲrÙ¶ðàØ°Ø7b¢CƒÕ*•tY•ÖÁxŸ$ëPl€ÔÆib22ve'TìzÙáníkë:7töȹ㗂-V‹WV¥´Ê˜7Ü*$%½òýô°ãöÑŽ77㎳‰TÊÞøº\} ;îuv;áÆT4ãí£µ½A4âSCÖåëb9ÐK—Àûàá°â&þQ N™/ëk©ë°wÀA8XÔ–aÖš55ÍÊ1|A­Uç$F‹ba%$¾«>¯sB¥Þéƒç|(r/Zò04pÑ”ÇGgÈj«Ýõ[ô=vá›%é ‚-;r×ÀZš†ü_DÏ~üyïµ½ŒÑ'ˆfÚYn•ê ŠeŒ(1F‹#ƶÆäß_êÎmtºö9*”:%('å7”6µÔ54·ç·å˜pÈJ‘,»X«Ój4¸‡ »ÚDóPMF’5eÊ¢iÂgvVïlɘ*ìì«z EM±Vª“B¤Ö¤uäÓÚy|;u¸¿~ù‰<wõŽZK½>µ8bíÜšÌQtŸï¦›ÏÞü zëÚ¨²7Wææ:ëK˜Òz}SE3ýß‚<&Îò˧ÝCI>íüå  {pæjišÿàÜ¿3×ÿ;ůƒ¥É…ëõèÇ`MðÙ¾² ï5" D¯³ šÎúùBΠÏ0;ÖÕá¦F ö˜ñŠ2Ïzïól=HÇç]JSåˆÒ&§t¦fldŸ­½úèìÞ„GÇ‘—ÚÏ9óºòNÝÎQê-¥II;Ó‹c æŸyý«¼ùõ€â=٠̤_]“òÆó¬¢ ˜ w@¥Æ,c¤\£VØtÊ 6i­¾ Ó'êLد-àP8´'²ÏH‡á\j ·ìÀu_ËÚ¨öVm¦JÙ'x@Q ú¹Ë¤e&“œ©­rÔaGæõä¶fdäæí\ó©äÄÕ¯Ýr0F|nPE{‹Ø©ÔiÜT_K!þ{Êø²·=tç§á‘Å| «À’m:f~ß=лÿ#ö—á·ÙÙBï*;Û{û4y®jZçÚw™2™´‡” >x®p;>¯¯4{C¹£3Û2ª’}½´MÂFŒ˜‹(¬'š:ú‚jê3¹«]zp2¶JHUC­¡hw‡V,x0ƒR/Ø:‹]tÈŠãh¦À;ÿWõ'À’ÀC>$clz„âK¢Ø 5žÐKùÆÊˆ(Â% ñpZ|w^§jA¯2+«rƒ]™Õ»ª3/²/\`çãÏÊ|KnÐv²,-¶†OÑÆÏЦ›hCe»±Æmn65C³/³È„T;«Ì—YÊ•Rmy8;+”¹žÍfzåÁ_ûÒ wïêÿž·¾±Êý1@_ý3òß}4@_EKHpé]˜z7Nݼ}¤2v3ö>¢Þê½ä À­ë)îÙd‹Á¬\™\°‘Ö“Š«kZC!¢ ÓwÈ¥²ín¬˜zä7:pûôÑ€Æz•ÎI4 mD|.âyOó»»{þðýÍs JÌN yyÁ¼ŒŒÕEâ¼ebV õÑ©_‘ ùØO_"í<|lßG': ÈFØ1ÙñóÙ ,µ£;¯g{W__nGš‘ÝXvÀ{§«ÄT´«.=ÄûÍö^ÃaÎiuØ+­m®.Û^¨[“ ñ´’‚âÅkKÓÕÛ ¢¥6]³Ÿä}3 Ãó³ õÙZ°ra|TUÛ.¦ßÒØ Ýô>QW²p—HÈŽAÏe]ûö;÷ýŒ£ÉÒ-ðEøÉe-RS™¹Ì@ó~þ¸ãʉ/&#‚}¼{þâYi³äŒ†JmËhoèËoËø£hl[¤Ç› DÑXÖùᢻÐ>êgI–#D×<³ùƒg¶7Œ¬QYË9YJ±¬tÛúà5/†.\ûbØKÁ@IÚù8½êÐåö25ºº|?ú‡ûgÐ- |ñ– " âÝGc~â÷u¶¸óºRÒ³s’žû.ü¶ZPMâ <å4ž‡'͸ÊÒÞoq'OΦÑ, [”¾Ûùõ®w?´9pPrÒJgÕšÕ8"–«Êu ¥ñÊ8ˆ…h{|½Ú¨6© *ÐàÁŠfÏSr³¬¶ÁÙTåböî9Pµz ­´>»ZdL… zõšøoÓ·nn?”ËŒ’±YŒ¦K87Ñt.zÍø?Õf´ƒÞ •¡î—Þ¯°)õ¸‹~ÐÌ „ÞIöiïe Ì+Xi—¶²„Q‘2P)Ê5›—*WcÅdÆr+—ŽCÿÈkÉendstream endobj 163 0 obj << /Filter /FlateDecode /Length 3797 >> stream xœ­ZK“ÛÆÎY§$UÉ!•DZcÞ3®øà¤Y)•“X›“•JA$v ‹$(”,ÿ‚üìôc.´²«R:ˆæÑÝÓýõ×=ûö¦,äM‰ÿÒÿ›Ã“òæþÉÛ'’ÞÞ¤ÿ6‡›?Ý>ùü[éobr7·wOx†¼‘ZÞ…g}¡´½¹=<ùNlÚýåpìVk£\QÊ/Îçº;­ÖÊ1:'Úã¿ZÑ·ôƒý®æ—›ö€cCQ–VÂØú¸‚…b°^ô/¥í]Ú¡TãÜSu®u_ŸóîJ¼[á¶ÎFQoúö¼ú÷íß@¥'ꨛ) *ÝnA_¯ÖeácÁñà™î ­t¥Êƒ‹ÕÚ«ôò⯫ aËÒ‹–ö7 4jXÓ¦ku¼YKYDk%/pªúæu³oz’3ª(>Àd+™(Þ7ýŸ|Ñ¢=ÏüXJ#ÚSßšAu‰VPAœW$‰svi‘2Šæ˜”–vz†6Ò'¥Å¡úáEófuûýµºôõ!fuAÌ$NÕ¾kÓSP¢Úlê}v¢Ê’Ѿîêó»•5¨ q{\£•K«£Tôìj6’Œ¦Ö\éPõçæ4*¨j¼x†¨ù1)Î-Ïñ¾K£¤¾r<«³ãÉäxrêxì´mÞM¥j¶—jŸ–+Ý¢´=Qñâ*ùüx$wDý­^<¹ýÃw¼·Öè6~ê¼ø '9¯—Éyáw)HÒCß¼ÊùÀÝniÇè1 Ä™}lnÄ[ô«Ðº—? ÜNBÔòâŠ#fƒ¿ß‘@1’“õÓuïÏÕ¶ÁYž´èñK€9âîrÜ Ùò"VTçûË! ¥ID2HuÎkÎÖ¢« ¸¾‚÷u¼N»¢|æðë+ƒDrü»ã‚Ó˲06¸ìô_¬ÖVƒ[”!‰á=éüûs‡ªyšˆÛÐ]ð™ü¢1(ç£9J£3"™SF¶ M×â0¼¼ðªžéuò^ÀÚÂ1š,"gœù÷×€÷Þ”׳‡ò,G‘̛ȇ{[Ž` pŽÊd¹/í*>a9sTœ  ªcú ¶zpÎKGTáUôØ€¼ÚêV ` /µ&®ð5JDC‹ã Á¹Ùd¡,nëò£Íí=m2q¤cÏ¡vq°b}ím†pÂóäp` ã` Üi³¿lëm6g(‚»6gsÌXG`À@;4ÇæuÕoX#Å®HÆ'æ%YÃ!˜ÁEœNBZ| žXK‹@tšSÌh3kÀ”É_GëO]3¿5b³o»a?Ÿ&ð¢?Ö烄YÀò8 ‚“îêã¦Nr€#MäU‹Ì}p p轌² rˆÓЩ-‡µá”úHXCNp&Ñq´WóðÑ®´v e\ÛP6]Ú²{W÷ùÁ°5’h |r¤ôëÁO?¤—Vlë»ê²ïѱ!—¥ÏïÆùÍ„¼ï rŸ¦ð†àhWIü/³ÖCÅ¥œ¢í0‚0ª7$…6LR”?Á!JœFÐm;È9R@¬D¢©Tñü_£þ %¦yô°„sÌ*¾!Ž·ÿ°ËD Áú>E‡¨9)þ±h+ 0ı~‘,É`GÎbJª†1–yðHع{.cK¦*¯Ä®¹‡ªw6òÜÑR«õ¹¯†øp¦—L©”溠ß5ôÈiRõñ!ͪ1Á®v4Ä0 Œ³%Á}H‹D©r‹IEs8?G¹¬&¡»Ë$­ä×<·KeââÊ’Mªý} x°;ðg$oÛÌ ‚äiŒcŒ¸< ‰L^ß‹æp¨· äÄìgPßPæTê¼¾ôyóÈHƒ!…µI“*.Øé2HwW¹ýü[=ŸyYzZYªZs¬,ÚZb|.±‹PØIGäÐbÙ¦¢¥@'6”Ôo1R”˨X3Dˆòù¡sizÏû¼†O|Ru®ûË™<…ŸÙüÑM’Ó³®ÏcÔX"u˜ JM.Í'1çA”NR[ÝuÕ¹ëYB(+:ŸY ¿¯¨€B¾ á>n›| ô*5›y 2 ‰R”}7X6)lND{UþÑÛ0¦,È!½ì‘=)ê<¨Ô2ÊOl\ãä³LÒƒ-´L¯xùì?§,×C¨’çµIò)™¯W¨¡‚ž®÷²x¸&RÆ”y|# åÈÂ_£÷/œ&¸Ä’›ø©Ãúà]ºô%™ G šñ2”jºz÷›];Ðü÷yM ð3æòéµñýM£&hD’2à@ ¨D†¯PrJË»PMДÓ[p%Ö]°†ãÍZÞ âéÜŽçöÂjá0®¾Ó¨æx‡…LŽHÝäó`ù%å}zuZ+Xu•yy]-.#¼võÝeO¶ h›Ñ;›sMÅ è¼Î›˜•ÊD¬þ1·p¨­Oƒ·õ¸¯<é§Œk-wQÖØ¸/¡LÚ)è}º0©™ò÷iÏ„;ðçŸÕøøH£]#‡Ê溥À#`Þbàx§}}HW"إʔ),ŒMðáW›)® ÎÝ‹ÀPÚ!‹cw†ÒŸ4N-uÞÊÂ#¯;oyŸ ÏRCKÂŒIÿÍH䦽bn 뫺M~iu.68žÒ¼ƒð*u×8$‡è¡ÍmkŠËáÕê)‹è+KPØ,ð‘ ùv[-vUÔ;J›ÉMÑžœýkºÐ²$R2Ãs|I-òL³¥ÃX+:;+a@¯H?9ÂI¤™Í¤æaiǽRËi"D:šc}Mh¤%åuŠsaYC=ƒáZC•t}¢I×|}BÀðÕ*7+Ðfü h—Èbì–—ÏSµx]uØÎ0Ā܃ëƒu{Ä »CœˆpùöÐN,ÒC¬×Ûñr'9}5TÝ9ˆ®P¤›·£‹KFˆ0 ºX ÄñC_„†ŸöPÃpÛŠÉÞØ¾ âe=U„Rµx'ÙàÈu÷ˆ¿À„ÐmÁPk,h´K瀣b˜Uá—frÌýxç0õ“É@ZO®¥ßsµ ±m ø€Òž„4YÑÕ‰ft¾²¤ìuâm©K®’»‚‹×T#ÎQý¨Â¦Ž„ïË7š¾,œ-Ãx£ù‘³,´wjzg@¢çÁõç•ày¿8ÝÏ.„Üü­èÁ-`E„=Ë4‹ÅRr*V€Ug>U&ÁgûA§ÌûýÍï Ћ?±Çëñíüòz¾ žsºÌrÁ8Ȥš·—tj-iGÛöœZ2ȕۡ}ËOUhJëžï>sBAx@|¤~ZjjÍl&¹°ÝØñ¯rêÝè{Yr8ì$Ãä^?€7iŸÐ?Þ@…É÷ú&`£¨0@|MçX%#Ó6v2‡jp?œÈg’FCú‘aÁøRÚ‚˜³ ¾|p^zÖ,¬ CŠù´xö) ¦2ÙÂ+ðËٌωjc’.ÌPl©D¢.cø”Dó»W`Z0e2#ÝösÈLEÑŠFýÓ%ÁóçÇ êÏÇk¡WÇu3á±a¡G¾õi鹋ù¨ô¶ü¸sþ‚|òT ž ª*‡S}Ü‘àžVŸpš™afN#~·p–Ègé‡{¿û$­^B1pÚèÕx%(ÓAÞ• vF\5Nþôú´¿^á6äœ+c lÎ$šãJ¢/ýs#È–À*í9 ¬M͉^Îî%ÝŽîö<Ëù9„içm{WS³Á'¢³@„ÍÁêÔ& 2k_|„ ;k&'¢ îŸò­²ñ’coˆPª¸4÷šº{\šî­N¨ô )mº°[¨åb˜õyRúæØô ýõEúúËéÖ;¤{õ>[Al𯭨hÇüj)¬#(f´üR°F§Ãb,• ™¤|4¤|¡É¥‚M(K™CÖÍ/Œ çs|„Á_{°Ó?U3^QF~Þg? éÚ(1\¢œôduº7Jwíƒká…Үꗑ,A&ýqü›j ”K+3÷%·b=PàrÉÃ$0¿ k› =cñ ÿ™†%gÊE„–Ä$Ì\§c»žÐ<(KûKö;)Ó­,FÜþ@o®Ӱ掙=Æ;DÀš¿¢€å/·Oþ ÿþÆÊÓuendstream endobj 164 0 obj << /Filter /FlateDecode /Length 159 >> stream xœ]O1à Üy…I×(KºdhUµý1&bˆA„ ý}!$:ÜIöÝÉg9Œ×‘]ùˆ_”À:6‘V¿E$˜hv,šŒÃtL;㢃ÃM‡÷'dÙ:ßõBòÙªºjj½¡5h¤¨y&Ñ)ÕwÖö‚ØüIG`²‡ó’Je.þS)ÑRâ¼ ¸ÅHœö¦{“RÀ1ýž >”dˆ/&6Sendstream endobj 165 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 329 >> stream xœcd`ab`dddwöuŠ041U~H3þaú!ËÜÝðcéOeÖnæn–I߯ }ÏüžÆÿ=Y€™‘±ºq‚s~AeQfzF‰BŒ©A20T0´´4×Q020°TpÌM-ÊLNÌSðM,ÉHÍM,rr‚ó“3SK*ÁZl2JJ ¬ôõËËËõs‹õò‹Òí@¦è(”g–d(¥§•¥¦(¸åç•(ø%æ¦*@Ü©¡œós JKR‹|óSR‹ò ˜Y2tðý¸Ü°þdzõŒ;¾‹|¯ø.Âücé÷Ë¢oº¾7ÿnýÝÚõ»IS£çwõ÷Öï-½ß«ÞËýnû½K4½;xaÝ•¦ýÝŸ»u_î>3yÿô'§.îÞн¥b^üœ´nÅnËîànƒ–ðZ=ŸÚÜn>9.óù<œ Ð<endstream endobj 166 0 obj << /Filter /FlateDecode /Length 4612 >> stream xœÕ;ÉŽäFv>  ؇ ¤OŽœQѱ/Ì¡%Ø’ µO’m°3YU”r)‘LµZ_ï÷^D &ki{dØèCÉoßâå^‰ ÇéÿÝñ†oîo~¸ôv“þÛ7Ÿ½½ùû?Jµ U°ÒnÞÞÝÄb#”ªœõk\%•Ù¼=Þ|Ãî··¼ œoþýí?áR1[j*œt°ü퀿eíCP6ÏÏQ•wÚŠ ü—˹ruç ŒUãÎÛÅÎÂÍ€]¥µà2·ýöVÁk¶;Ÿú¡>m¥¯8W ŸnWP3ª2ð5oð#fa¹P>Ù*8åØV\£6·¢ÒA × ¸ÆoµHÌ1´•õ&èÍl ûÛíÛï’0—…ª„æ6"Â~!JT+a¸óÕû%Ç_Oà‹W‹”½V<³ ¢ŠÙïQóí4ÈJH³‚(Ûv jGÂþ Á]ðÞÛ•ã…£pÿš„‹Ê°B—E-1òiºÜÓ«¶·Fx $°¯ÏIƒfÃC“$;ž£&JJ&á55Çæ„`e õH–õÛ[lš=)´g—ÇüQ³v{+Ø Ÿ-SO'vÍ}{>ßn…U•ä”UTÁQ}߸7Ðæ yw$¥Ú»ïê}›1rŽ @˜– Ñcoú´,X<ÿfû3ã¹eïO}»o>…o^ÁÇÚa‚ki¹€¿=«ý9Ÿ©YS÷mÓ寚E> §ç|êú\‚T§ÎC&!Ìq9Ö?¡Ð€—AHËÚ#­\³Ë1Q‚gŸ]âÐÈ—¾yûk0ãòZ "°wtÇ}š‡z+Õ^Éè¸ÚJÖž;„ç„õ±þžNƒõ¬÷q v1RĆËñv¶¾ùaŽí¥}|LÂͶPÈ Ç¾þâM<ÕèÐôyw!Ùcw>5Ò°ó]FȱûfÚÓ}Z Òè‡Ën›9ø}ÚÃdNÓQ51¹q8OÈìê‚8RÍóãKkñà÷ç¬Îà’‘Ú¼ÊÀtЧ‡wik ‚¥·¬o†Ëñ3má û^–Ñ–¡®}4¦¡;òOØ%“53“•ÖVRdWôõÿI’A$£/ž›7Ðcäèâ>˜ƒü 2.ºä”^×¾¹«/"E’rþ¸5àà´•¿þ*ZpŸþæžñ**#Âi‘7 qÃ7@&Û×@T´E“ÁÒÒ‘ß$¥-,hbбÈ^öH—/ý¥>> ¬©¸BŸq×t*­¯0œ§¿ë>1SÌ®²Sxy³5€µß×Ç튿”x¯Üœ£„އÿEFÔ–þ„}îšÃ!¹cØû5$¬¨”•£w'’ýX}¨V±*Brñ€>Z‡¶ AJÐt@R#’ U äVD“*16(·¬FéôùÙ&=…$îÐÔÝ)!~××ÕÖ\¯P,U ГÊ#¬U¨<l-@Ö][Ÿvz(£¡ûqÆñø™JñWí]þª ÿN&:šûî||Lb§’ dBz¥ éÝ¡úé¨d—S èñm”&y|ú 1 å‡J±÷i t¬EÀ—BP8¡jL±âLðà,€¼éÉæó-})Oåá‰Ò‹Q4®ÂHŠKbÈÛdÏÈ’dMîãV…è3ÉP§…àk“ƒlÏ’_ 1¨¡ÑD¹g‘à¤r”mÓÞ?Dâ°•\|‚©Wc^}?Û†Dލ€áû_‰<¡¡ußþXÄ=I:’›•¨q#HæE<@òV}ŠSÒz§Ø…d€ßÐ2º& ìaTŽkr$ò$‹q3R…縑`žãí¯âþ†g ™>̹r2˜ÉHáÈ ·h$)3øÄöY{ðXt?jÉ>\M˜&³Áä@›d6ø` .ݾoûuM{:ÿ«S\GÊs MbâÉDù‡l$åw)Çt-os•¯Å°õܠѯ=ü-{ÿÐt)2!A”¥²RkÀé$þ)]º’YH’S>M.…õí±=PF n‚#¡¨¼ÎÂÌO‡sˆS1ÊæM®å)“fî‚+úäx˜‡7ž¤dl(W©cÞ€‰\ò=3ÍÊgÁwPIJEuéúaúDLÍÈõ ä²ûÉ¢‹rÀJC³˜¬˜Èï~­Ÿ!!É‘^礨G}€”E¯uZ`[+ý˜@ýnYK[SÖÒ†ûÜŸà+Y3˜†1#’yÝÃ*«Çr¿£#A V­â§ ZÏ?Ï•óî¿…Ÿ©œô¹Â‡šÓ?dåíB¶r’íç`y—a¬.{eÏ+ >Ü…§º6EÉT~s++­°aôDo©ìYHg6Å‚+!–ÈÀfbÒŽÿ½Ò‚Ì€-ŸØEƒ|ü”b—ÎËM±â#Z4IÚàW*«“°mÚFMÒþ·œàÛ×C>‰v’b7 ¸°â#<¡Fiþ,d%ëÆÕè#~»ª/–W˜§ xÁHIì N*gžâ¤™­A5nëoAƒc~…ïB ~ÅW:¶lºÍuìªéfʆ*Ç–fIÀ5Jªèj‹U\±äE6‰å!Ïëf^Ž/èþÍöR‰ØËmJ‡nM¿®Mé?ëý«9æ¼x‘cbÉ1¡Kb®¬ç9{~ÁkEGAžKƒj­ówadϳÑDœCK]‚ø¶¦ÙϰF'„[ÁV_˙ҬxÎ脟Ðìê1=XÖMŸñ¹h%<Ö]}l†¦ÃÜÄ¥ •Åâ–êîóM‰Töù‹"ü„„²Šz‡ó)-2”À›ã¹û6Œp?6Q¨ic¦_l1¶‡Ø>®brVtgR×YQ6lÒÐ+½<©G®Q:˜ê„•¦ž€ð­r̳¯cWO¬$âBλP1Î鱃—j*cÅB ©îfÖÔCªÊ¦^¨R^bÖ!1G?s”гºܳAGa3C„)—¤æHyVBÔÜ'Ó•Í ÔJj¶°yã½|Ù§þß< ÔåË1géA_æfeu 64!!_Fámå_:B ¸Ë×PY1öÃ_t­kÜš;Õ_&ÞØÀÃÇ `ªuÜ k‹%¾¬( œ€CxÝûZœUøË4e9qBtÑZû'›B gç±]™zί( bNñ§Bõbä ²n®Ú|Eò±Mï5š¡ÒLU©ß|­j\í¼Ð˜¶ýÑ c?F7P!®3ݵÜí#b­Ó&e€\I.Ê;EŸlO±s>ÆpG¢S»<¦Ù>váBºS•!Þ7=¡ÐU¥Ž\N7z,zÖËH@¼]üë;•¿\•IÆóUÎs1êOB@Y„€ºq{U„<«µÒ«ÜXÿÕŠÍ@ö õ½"{«×FŠ û :ô—¦¿aAGƒNé5Wd+¥ù4,tÅ(Y^‘HÚÙWj ›:D¸4;ÿáíÍ`™›ûþ¯¥6ïoøæ‹å5@Éž8¨#¼Á,Ào7_?=ö[(]û…-¡¤Ð°8§qì÷5NzÊóNºp¥/9(pš’â|Jö~=ÈÐÓ”ºŒ?)›qA-yVgœ¡†×Õ콬J÷ýÎÒÝÝ“ÓÔV˸,Ûðë©ü¼:yÐô»SdÃ1 Ð]&rnhøÌÀ%”x R þ™Å¹›XË¥-¨‘êZ¿tP…\<ÎbéT0¤iG#dì2ºØrÚ%ÞYb$ i$Þ`“]ŽM×â4a”˜€lØ›Bb4âÐÚŸS©‰±„YÞ¿¦É¸c¾:Uq0±ý™*ÊT ¥©CIeÒrê0–\—¾ÙO·ËT׬õ ð3û—cýÖx+•‚Š~ŒÞ¹¡ ¾,(cÏ!´ l¸Ê{íó%”pŠ}š ¸q-©b>àx&ލ)m5ö ¡¨ŠÿúÕ2×PDXÐÄHy¿Z=Œ“…5a»qûì „^W²R£Ú؉Pœ=]¡ÀdŽ¡S ¸hÍÎëŸá™&óãeš.Šeæ¾Áy„8äâhNYá0Úµ@Óš4©»é,Y“(TªFè ½ùã rÒVò¡y©Ž»Ê41ìxîqÂ"j5TÑ~¡Õß°æ§Ù`Æ4FF…™V3¬5 Áâ¤Ýý¥˜¦í!wr&rG‰pÕ#Tåíî qX›ûà+glA! ¥ÑŠ»Ëig™p-µå_Šr.ÀˆaSƒ.Ðhö|Îã¨8аtÚþÝ’éplæøÝi…ÝØi1¾(t=݌ݫèÇËãtÙl(äÌ’4R¾æ1–†±J¾k'²Áˆ³“Z+õ „N>º¸L Öíi¼s‚àFf<Þ£ƒ÷Šì²Òø³6ô¡(̸ù/ì‘·endstream endobj 167 0 obj << /Filter /FlateDecode /Length 3118 >> stream xœ•Y[ÛÆîó¢ÿ }Ü‡Ž ‹å\9cÔ6E °´»AxÓ‚+ÑkJÚˆ”/ùõýΙŠ’•4õ>X"gΜëw¾3úaVrVÒ_ú¹½)gë›n$?¥ÿ–ÛÙ÷7ü‡¬f¡N¹ÙýÛ›¸CΤñE%ÕÌÙªPÚÎî·7oÄín¾0Úˆvû4_¨ªÞ‹ýa^âƒ4Öˆ¡ÞÍñ¸,•-UxcÄÝ×·‹þ´£Y¶¿]Ò[]„ÄrMž6Ub8ì»ôÆ+±Úý®JéEÛÏ¿¿ÿ+©l'*«Ò6¨}¿ºÁaÿ~¬‡åæ®ý±™ßÿçÒ>§ŠÊ*¿Å|Q=´¸ß´|”*N¬š¡9,拲(­R9:x¡¼)Tp³…”E°VF!Ûv×`«ÖZ ›†?@b%Hhš•bôPë9}¤c4yïŠE9›-úÛ·¯^]±D ŒÊ–¼…(>¹TS-œx{ìºÅ¨Š4!«R?=öõ’"BiĦ`]`›…q6~Õîê®û4÷©…žS8ÉqJw]ûn®^hD/\Ée5ø ¢>êUKñ¬;z(¡Šæoá‘CÏòû¤ö1WBåEÒº‚zu?´Ë¼7o¥o! Bø¥Ø5Í*-+­`a¬ï&ñ»ý›ÉpØcŒ¼°|t&´àð%×z'öoé3<¼XÕC¿)ñ êÃú¸mvÑ»* ®Zú–)†b÷Íc-ÐP^V.úaÃ×î²ZT„«æpÒ…lŽá‡ënyìê!ØM,r)I9±›¦/²ž„…6N‘ ©CaÔlA'é£*fW•%…ãËõ@Po˲Oìim+ÈutœCR9'ŽÓ}½n槯/P~Þs¤^‘u§7MMÖ9hé´8ð9F|žn_ÿÔ‹¾GÚMÄEQ_1Îl Ò…—RÏî_ÝÜÿ–’lSVܱPJ‹ÜèEß•÷¦k(ÅEàÄ·Oñð œO<ƒÕ1µ|¡}¢ÿþÉå¤äªÑ>&R³è¡\†üÐÛTJ†\-Å‘ý†w#ˆ,¼–Z ϲ­?„¯ä’ ²•IËêÝ*­9“äùª:‰º]!ÖWd‘þÎæ<åÂãgˆÀ¾{Ï•Gžl­Dr¢ˆR”ã+qhÖ‡&."/OÊKä}ß¼º£‚0F³k€h]³%XG`bn»a³?®7´†0×Ж¼ÁÆ|CQ ù3kÆKMëþ]úfø°iM@Øïš,¬B+Û"v@ÉøòÐtm“@“9ш³í„x¨„xNˆ‡ä› ^M ¸4¥â.ÙBkí— Y£¹„Y÷hØ\„>°—(Îñ%@¬©ûOS\\0z´îãÀÒq©=¶QÙRÂ7«¦ƒ¯$ûÏÓ9ž }31Yf“9¡MY±W¾ØÃÄ-ÊÀGÀúv·¦ž[+™L8,§Œd"Ñ BØýpî¤ÈˆÈ}3œ¤äÐñVBWzþ2iO?Þ‚“†¡Ù> ‘w°Þûû€dkVírˆÑ&½¢QüÖ‹-^Ö»ü’mJ©BQ{:´Ë1s,„™Ú±(&T¡Ò¤bêo}^­ 6ŸcÐîZd7s+kÅãć EŠ ©€ñ#AÚªµÊ~f­RÒ‹HâÐØ·Q,ZF‹òi£3¨ÿEÂ$§j8ÿËÔÈ› Ò¨_Ú§aÏDÀØÀ>ê˱}nN‹ëLÓhÛ¦Žœ”A8û˜Û³É\‹ÄQV4³pthNé(Æl êÄÇi Ö†‘!PJh‹wL(²ýÙÔ‡ŽÊ[y¢.•8´ï¹—šª‚Ÿ ÊXvPˆX¯¸ÿ¾Ÿ[j†ˆi}hëÇQ-¤¾¥¥>¿˜S6G)pÃa|Dâ L H-¢SÁÅÔ"C£xC"OÛÜT¢QmÛsfdK“ʈZÝI_JV¯$è˜<zB+¦tHG\â¨øìë÷g¾^e‹{%ù 64}Ö†Ðîtr{y=ÝK%c$™{w7¿Ò¡G‰¾›7?Õ‰óhR8FcÝd l]aJ“»MÈPø<ó(à ׉¬,ãŽp¼<4'²%Ý9Gž·ë݉¨mëáЦ¬“ ZÉì3æ`%ŸÏšG‹«²ôN_1Ÿl¨F3#l§³›É9w\ÙãgŒ0UQ4H2R1 à®&»7œ‘ U8/ó¹ÜOT𡺢£- 4ÑYp>&vºb¡K¨l¨Ù㡲QèŸç NCŽoOÿ´ Ï±ü?l棚író võ¶éÄëÛ»»/b“!²€½âåËÓ¾g€ê9M Áñþ–ä´ŸœùÝõ3Fã™E çùož/Úïÿ/‘Ëlj»Õƒx¶$Bøì¥|ž¡@¿#Cdi™üî$¨^­â"¤ˆà}õn¸vø§ë‡¿¾=>5¢ýþZYq|hBˆMýĵ”­NQY¦t™7”Gðz>¡#´ˆtŽÀÁû}(zEi OŠ%©O~IeÈÈä²@jq‘$®ÃÐU* ЧôÜÙDñâ·*ŽÒyˉ›Y-N3ô»Óæ5Q‡<阇k|§î—Í ¤†M{™‰»žª2RǤ'R»ªòÎñ+*{Y¾©’šJ#TL É»ýÇ“ž‡-¿à|§!µ<—’µDåº Šî¤eYXk]°ð3+\¡<ÆY®Iˆ•T¬{á|]‰¢®ìQyѳVmŸ5ò‰¿ á¡´–óÄ×w•åà¤ò¹°¤2†EèN*‡ÂšJ-eо´‚سҕ´FZö,ûƒæHk-°¿äC®ﱫ—ï8ؼ©€ S.×#¢…« ”ujrú‘Õ¥ÓÊ\íœt1o¿Ù‰¼«w9í­è›±V¨z¸FL*º–ðy†æä¸r‰C2Nì…3(óˆzRÙõxßfg10xÞÏ|BÁÅN%Ë •—Itößê¨ýøm¼38§XNo(G>7DVøxÚŽføõD¡”G2«Ë¤—¬*Pžˆ®©Wü-¢9m0L?n2Cìºa䕉ÁG)æ|i¿Å¸šÖJkšHÕxzlשåx-?Ëš,b¼S?cÊiÄò§Ygåfä$º0{{ãèbÍÚ Q)•@òž¶ÊÂUVkÇJÂyg{>;Fɳ[q cÎÎ7â%ÝH˜ÂcìB77ï¯ñMWØr¼L/(¥Úмà/pýnß5qø§4 b=ÏT;|zÔÙ´5¤ŸJ…¶!1¡q¨â ºm†qôE^®x¶U*^É\̶¼%ÄÙ¶ŒTìîëÛç1N4Wl€ Õò«@¿š»&)ù0ê·?œÝy§K•(¿ÝeUÎ*ËÙxh.ø`7¹ñv´?vÄcT Úšêkú!ÞOO®k>Mê9Ý1_¿nÿjîN”`¦s“Có–Ï’ÙŸ+YÍ®~ßó"NèÉÑFr-/÷Û§#ßžÒƒ\†tÑÿº;»ðŠ¿*ÐAž.*ßóòŒJ¼•j¯MŽ·•iÕåuå98è”`‰cƒŠ 8S¯ضt×öb~%ó¥†h6ššò%þ±YééàTRn(Áu²ôøõÏÖUÀè†W“ºR•/*`̓`4AJÉk“©)tп"”CYR¹Kï D€úTsq¶çç!K’aúSÉ™þ¶„ŽLA O09#jhпy˯h‡GNˤ“²“-Ð äg¼â–´Ø"¥¿j€ÔPt¶8;àC)íùy?ýä« ?q†kô%u±Å 2$>Ìye‚»‘:V*•£pôæËû›¿ã￯:1aendstream endobj 168 0 obj << /Filter /FlateDecode /Length 162 >> stream xœ]O1ƒ0 Üó ÿ ¡¢C%ÄB†VUÛÇAp¢†þ¾$@‡w’}wòYvýµg—@>¢Ç%°ŽM¤Ù/ ‹êÆaÚ§Â8é dwÓáý «ì6ßõDòY]겪¶zCsÐHQóH¢Qªm¬m±ù“öÀ`wç¹n ”Z9û%Gs‰ã&à#q*MK“\À1ýž >ä¬_2'S7endstream endobj 169 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 269 >> stream xœcd`ab`dddsö ¶±T~H3þaú!ËÜÝýÓù§3k7s7ˤïó…¾§ ~Oæÿž ÀÀÌÈXUßçœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© `gêIçü܂ҒÔ"ßü”Ô¢<ÆS&FF“|?3»üxqƒñû™Ì? ü}É6½{z×ônŽï*ÿʺ~Wñ{€ü—þ}`ã“ãb1ŸÏÃÉÀÈx^¤endstream endobj 170 0 obj << /Filter /FlateDecode /Length 3354 >> stream xœ¥ZKãÆN®:%9ç 0Ò²GÜ~?‚l°  ö,8GC[Ç$µ³ã ÿ=UÕÝ©áxwìaEvWWu=¾zp~™óBÌ9þKÿ¯÷3>ßÎ~™ z;Oÿ­÷ó/®g/¾7nŠ`¥_ßÎ"…˜ ¥ gýÜWHeæ×ûÙì‹¥‘¦à\±›ª/ߤG¡Øß—ø[Ão˺f÷¶Z±uÛtÝ}ÛlVìÍj±ZÄ Á{öɧŸœhÛB e,í¿JëÁ³G üñúŸ3!Š`Œ˜_¿š]ú¡¢lÚæ~7 ‚^pÉþr&Þ—}[¿ÃgâXÖ7™µeo«uß´Ä}’%.¼ø^˜ú´(¼Rr¾”p#ÐÚ´¶n]O§ .\ïK+ƒGQ~=,–ÚsV6ÇŽN]*[X®Õ|yº6ž¤ «MСúŒ‹,ŒdK`åOäBèj‰ëx[¦Drmá•ÄWÚÃÕ˜ጷŠÞ"ÓÜ#~뻲>Î)$:4 ÿë ${—nR·{Ú"ñw¹­¢üÓçÉÂzë”ùùH~çNò[kE¼ɯ ÏÁ.&’dùm¤”t+åw·R$~ÚE~›z ~[nÒ¡/áÊÆ•û¾-ûº‰’YÈ .µ3ƺ±pX)˜¨o¸n”J‘¢¾£')¶ gÕF:°8Ññ¯}(œ€ˆ^«ƒŒ§~SŽ ^ŒÐV²bR[K1û° oá «à¥‚ƒ¼ÆÐ]HÏÞF„´¬Þàw7¬¿K@ɶ`Žº:àAxbŸ¶yÅn‡5XãP,–ÎTóç]æ†ݯ­ö1U;ìÛW¯ÓVà7d¸>ó0iQÌÇ]X;‘8ÏŽû \0` FEM²]‰—|¤Ö Ö{\T7ëò€’Ðãf±¤cd@-=éó¾­º®Ú¤ý ¿²K¦CŒE£½Á¦Â»Üâ9sãM$Qa pÊ#‰)\ð>KYhe„4{¤qÄL4-g6®çn>"Y1¤ñàÀvB2ˆL§-æ Úü'Ú ç 9q>ØQ o§ÍÆÒA qÏ^’``=Ÿ¹Œò‘+À5]&üÝ“ÍC.FCÎ*œ"“‹ëŸf_^Ͼƒ½`èm7C¨?@Füzøæe¾ð~¾ŸIe}aN/v³×ϦM9•6áÄBã‰V˘6: ;ƒÃƒ×bø:Yqtˆ^¨¹°€G9$ß ‰!7ax^(¸q¬L#QD" \D"ÃD²ðàX&+XF3š”Ž„Ø zJ@ ÔJÕá•JHô‡ÈËëî$ÂØ Å7I¶GbÁ…–÷Ñ`0äÑ7(gž¹ÎȦ0Š{;‘<õ5B¿`!F @f‡yñ½³s¬O„Êr[«çà‹’«„×x¨ Ž™´…<Àïe(ŒWòCdK Í-¡•>iè÷¿);È肺ˆA¨ÝÜ„Á0! ̇úÿ㥽F§;¼ ÁÀdÏê½þ#õ#”|¯~ÄH?ÎBI:RÏ{ЃF|8z‚Í¥º€Xë¬P¿…þ?ÀŽ,ù¿€…|õ ¥D̾^LÄâ|_â#0µº Ä'ð0º¤/„rB¼'•{.ØAÆ‚:uB&(„´RöãeÏ‘|t€=k 0Ëå0Éô?ŠUH‹Áî±ràr3Ž—“´ºÀX=_ŽhÞ¯&ñ^˪A…=œ> _2pˆ(W¨„š£g‚O»ª­"0ŒœLBpCÕ{ád\óŒ—Áî(«z «†ëi3!+8‡Rs> aåaE)O°X¡>ܹ$”C¢<«6Å…ùˆdSšË^E¡K5°PTÀV»jëSάuiIhÖÜ&oÆ4Po®›ýéD õ6•¢ªLêV™"5 ÙÞ°nW¯Ó#TÖÀ†ê`õ‚¼¬ƒ‰²) ¸IWo±¼åŽÊÕÜ8ã3·Ð8ß•}Z…FpÝ´PßÞ§Ø(Ö 4p5ØŠ[13`PÈ×7e¿^äŠùn1¾RÚn—MøÅe0Šq°xhiNÐ_`_›÷-<ªNvºžaÀ£ÊivÒoÖt§ n$÷¦~ÚåÐ>!éN¤Sè1Ä/túva ÊhX¹;"™¦C·{ŽG|é±cQ%$ •Ä÷³®þµšR·)§;h*»±²NŸ@¸>D±½iZè¶²&j„ķ쮌·C‡Ìë~¢ÝËwI ?E…tç Ìx|Û·w»“f@ûzW¶gþ¨‘«ÔÓqè൹Ð;vŠÉ!•± Qµ–:Cláš#Wêà|rÏÔÛ=±E"@Îè\ZQ°Ž‹Z;Áêý} uKã€ÈÕ¹u”Ðüý-t< ×y˜z¡Bw89K31š{åz…wèË«<£Sõ.ÎÂÀÁþ=5Zû Ô$8Ç Ä©˜>ª~üÎøñÌyyëM\¼Zv…÷—_}þêõ—?ÆáÈhîG‚fk™çvšg¿bÕy‚(Æ'ežW"i£Ù€ó%‰“hx<ûtûbW¶ýÝꤕÁæ|£ÿL Lva×à%½œb¶ ©¸:Û*&ÈáAÐzwF©l·5A4€SEÁ¨Éñpú•P !¬l·ÇýpÞqò©‘hÐçv'šá©`¹…?×buƒ¼7¢v´Ä}Ù–ûª'\àq{‘‘ÀùöÂQJšh)¥ !ó0d£M&$ƒ³}–¬¯v;LœF“¹îê¦Òa˜jC°¹é¨ó\â@¦À*œNP!ùͮ9lã $KÄ;:.CC|€Üq„dƒÛó8ËJ\Çy Òà@…ç1¯ã:ª†&uÚ)ÈDKÁúc¹K¿tÝ1œ=±5¦t´ˆcõn‡O†RU¼ Ç4Uå·™›”žáøYXÊpuD׫¶U›|RcàÀMYºC*2âÀñ®:ÄÚ tÀ•ÆcùÌcWMù‚´Ú¯ì2>Ÿzƒ“…À*ys“jT+›U–:Å?S"ý¹S2î)Æ'È_”Àøƒ- ¹À9;ô·M B&ôg_Õ‡r9ʃ³HÀ,ô{0ÖN£y)‰àq|ÙÛÙňߡڂ_F½ƒÆ†tTR!bSYž¨Ây„8R©‚‚P©0P·½{ýõçš\æ}cO»„C{Û´û.ç5‰gÅ:HZ@1Hsõ¯MW9‡ åoÊ•´xÚSÄÔ39€ýªn;š7;ä.¼ ?£€·mª=~XiKª¯q ïo¹yÈoEž<û(D“ÊGtèhø¨¶?&µ]>¶ÔýM|3Íèä‚ÍköDJ²Ÿ²æ,·&Œ_þÖø9 ¿=§¸ÅŠ!‚ÇCŠ@ó±ÞÂinN'†ë«"´tÇû{¬èÙ†Œ]šŠ¸‰9¼Ñ§<„6@¦Su_—»L9þˆ½TÊá…b)—Ay…ºE㪠ür« Ä¥¦<ÏZ€“f˜i48O»/ÉD”êÕ6ŸM½Z\E3 B"&c]µëšÉ$è MË­:;|{3•œ!‰M{Ê4Ä‘z‡_¢!¤Am›$X¥xMòCšL=´5Y-Õt‡ëôÕGˆLh¬'Ú3 ud„#ÊêóHñ Ý¶Í¿£èh‡˜qðKØhWöÑz©À­3FmUmðÛD|¦o‰.-@yßcDÇs"m݃è·ñ\Œ™ ÆÖ½`¨jp!ƒNKžéNþ‚Ó±è/åz]aÙ«©@Š}™ÆÊY•}I+솂” #PeS"Q¤š*? ÉÁ¥û:–_˸xÝçå@ùñÌ£ÿRñ jkzò9ÛPIësÄAUûô1)J£ÏÞz#ãOkJ±ç¤.ÞåÀ‡jëæˆ-tìj›Ã©êÛf—Wä9|;ªÏRE9Ž;ðŽãúT Ü%FñUbú›1²E¦‹1ƒä}ÕFÇ'Î1f»¾¹¿¯±Ø"ñ[ZsXšì\™oêLª±¨äiΡ‚!p}Ò¥–Ô¤Y¨‚7iH }\ÖÏOŠ>>QܨùÄ5@Ç8×@Zü°élX×·ØX(+¨ÉŠKô½w{b3ø r³9÷’Du–ãjyí¦Š€™ÿT¤­nAsXòNuˆ6ÊB .çÓ‡[æN_¾›ýìÎP‚endstream endobj 171 0 obj << /Filter /FlateDecode /Length 1809 >> stream xœWmoÛ6þOÆö¡ôj©| –ÝІuk l@\ ªB'Ú,)•ä¦úãwG‰’ì¸Û ±ÉãñásÏï–4eKŠ¿Ãÿ¼\ÐåíâÝ‚…Ñåð//—ßmÏ^)³t©Ó\/7»E¿‚-™©Ñv©•I¹PËM¹¸&ß®ÅUJ© oñ‹Lµä›døì4éVÆŒ²¤ù¸%eöáõÕó-¹m²›Kü³¯Ûv½z³ùiÁXê”bËÍÏ‹Í××äé*ÑÜYÒvYÓ]†¿½Ý|²zù¶½¬šúaKþØ®ÏçuÕ5õþr_´Ý–oöEyÉ(¥ÛÕ#ãa¨Çáf8¸æÎ ž½bjÆg©p"šÿ{d]QW‘ A´8=ù5k²ÒƒéÅêIXvÍÞLô‰T­”æ”)ƒô9#ÀŒø§t=¹f©RÆqË…‘\Z…ó.Ì`ëÐ“ÖÆQ+©²\Îæ1jšÈö€uB¬X Ÿ”$RQkM4Æ8–rÄO® \…¯:Äll*…aËddëôp½Ç0&Ðu=F~ÍçÃkñ&’œ8aƒ¶¹`$á)3Z*@ˆ*'%.5JpŠã¸ÕýJ*„Åñ‹aïÏl½–çñ«#üú,~i>ƒ_ Ê­<ƯføY*-,8Å ?ð.¬t'øågñ2ófµùë˜gl y Ý«”sëNOÖø,¿ó7º%·¾{ybß’Q?_BЧy½o¿„t„fI2«uYtÝ܉‚•°ûCYµ=¿Ç(b^4MÝ ž™ E5yÆê‚@¨Ó’\=ÿ¾.ïß’]°QärªW»C•ÏÒÔ1²%Ý]X ›ße뉫]õ²)n׳ƒC*̇ \w¾mǽ‡9F¥CÉ\ô€¦2åø4¿<öcµ›22‡2vÁ„‹Ug^—“è-”9xÛÜP†!çìæP _œ!+nðƒ%Ùh¢H ,هϊýa• & ¦€›I|¾ŸÓUb%f„$¿¯,GD¾Êkv¥’@±Ã˜÷³b¾1ÀL²`g•šã\q <I>¢±IMà¤îM«X?:¨wñ³‚@Ž8F' cá ¥¹#>Ï-.tØ{1aäÆï²Ã¾fü&Qïb›z,‡2Kªi¸¨nÑ^¸ |  ›_¬Š¦F¸\1u¿À¼}öг™…ƒ$–Æô×(^)` 0FÏ:ijKÍ/0T`l­î|s¸Ì¡² »³~÷#¹”3ÈÊè.è*èR®â04¤ŽŒiò¶èÜ' fhœiù*ò|m\ìM]Ýî?~'ù¡y–tòŸY—2­N2¦†°) ‘Œüå!‰ßïB« >*XLo©Æ–|wÀRŽÁ: =€bä'ýEoÔA廟-È‚#‰¡3Iü$*&5'¾É‹ ©`éÈCä¶áAÿƒó¶Ìö{ßÄ9òÊš …ŠÂR#X€¸÷Ý ‡¡LhMR™$×OS{¤h.;\阶ù¾¸¿‡}€$ÇYˆÕU4†ðR£{k¢­CœDRðè ÐX† wnP¦eʬ:¹ÇoŠÆÇìPÁ“.Œß, B €ÃȨ}×K]8Æ59d¡¸KFú3GKÌ–Þ!#Œ®¡÷ƾÒñPœ‹ÔzªÛqÝ㺠þi`k¾†àWUÝM=¶›ñ£|Ì}€(P͈„^ "9~ï8ˆðô ù?/œrìÿëq3Tk‚Çï›Çï hú/¯hòÙÏt‡Ó£…@ aHÉKæ%vmŒÂåüj:=¸)º¡O!útûéSlÂùær¢7;ƒlöàš‘ÚÊ2Ç!°ûoÏ+éHÎýŒÍçÙY˜?Ãëë®Îï²¶+òx 3T¿N¯ê¦S=oó؉áê_å[ßLm[½›Lõ±•êß‚íÅÔfâãstóÊw‡¦šÜäõ¿˜X–ç´sæ…yÿµ>uØØÛR8zãŽ\üð¨~Ÿí¾OòH*áJiñáÏÅÙiOîÈ/¨à©³Ç½kr¼*Ö!k °á‘‹›’dÌ#ŸqP㬙$EÐÀÅÅ”O†G "Ï›¢ t‰‹¸± jðl‘·¡ÒÚ‹NŠ6ôj©ÐÚh—ÂváK,¶C/6‹ßà÷xõ»endstream endobj 172 0 obj << /Filter /FlateDecode /Length 2618 >> stream xœX[ÛÆú¸¡}X B%+fî—¢[ m’¶¶ÚðW¢WL$R&©¬í¢ÿ½çœ™áEËmY‘3çþß_³œ_3üÿnWìúáêý§·×ñÏöxýÍæê«W\_ûÜa®7﮿æRæÖ¸k£m.¤¾Þ¯ÞdŸêÕZ«2– !˜ޝÖZèœq­ynµ²Æ¯xΘå.+×L­ÞnþxÅyîµæ×›ï®6_¼ÉªzwîVkÁ¼Ë½fÌ(m”´f"çøÖ >€Òºª¬WžœËúD# »DâØ}$K%D ü®¹2"; D-y¶KR8ÏŠz7’<’o8‰–Ó—í@(³âþPŽ”}3²|(ý²"ñáY»âÈ–gÝùÐçà|k€Tdßœû%—“ùŠ!:tVvh‹wƒ—Š_„CФn:À8¶ølèð]ÑŽ‡ïÚæˆO X©Q>uqø&=Äl#!˜Ô5‡3`¹N÷á; ~Ñà¡:15àð3™çøræºi´Ïʸ‰y š•ùxÁ´K’2CQkjÀlMôLz ·Q‘² æÇ7˜ê ²ÃGIò Oô[Nå¶#…²™_C$#{k¦ìƒƒÂS ð‚ßÛ¦mËî0„67õ®ªFYËþ£Sò€·„Íá<*M®´',HŠów%ÅnjH€Ï)7 :ñ)ÿO„>à éñ¸/ú”Ñ6„i_œN! YYé.Ö]rÁÒcÆá¡ÏšxºúT¶¤ž"õþ±rèd‚»É¶EÔ¼w?ÇpfÓŽ…,zNã~‚t%‘ø€It*ÚâXö_a9Y6w–}ôTÊÛ.&k@–F0BÜ*·}0û´¤¬& w®·ÑáAQ Ûž 1™7‰·f0ô’èUÖõM[î’ÒŠ HÊbʰÚþ(cT8XÖÄŸBgÛ3P°öàvÅ=¹+ÄZ #ÀIO– ÕИê>*Æ-E‡^r¢#´¥¤hdέú.žñKˆÞDk 2Tuà†5¬§zè1ò]™˜ìX馳Y÷~ Ö3T!Ìs 7Á]b# 9[ril²»ìûÕ:µ–×/îV7K]m^ªC¥´Wó¡æX>=§*-TàÓ ³I¢ÃlF†×ÿ9R„z="'°µÛ# `¥vÜÊìõËÑŠ¯ó( rQÃ'Š (/ÇÓ¹OB|öýëñBDêT&/&¢® Î)%q;Vuu_ôÛ€( {:ƒäŒ-YO[2€Ì 8ý!$2;4]‡OŠ*Éï×á7†(¹àõé‹›4I8dËw«q®ù×Roû¢ÍCh! e‰=âêãbó6½!oÔêÇpxÝôíøö³/>Kªº ÔjIØšhµË¶-Xvj›Ý]VÞ­¾:”õC¿¿t_˜Y¿%ý{i¬™ûð‡f€á#d±Ä:¢æˆU’S±¢ô¤?¤g ð±?ü2¯LÝ„e…µTIlv¸Ÿ{H2è¡¡Á 8h›C)¨°CƒMfLW¡@ް°îlvWA¿¤šÜ­6?]š H¶R©xKr¸3åIÐ…Ùòz=`6ñþ{0ç)_¨(€‹ÀöM†•…¶¡B/D(mÏõdºyYñ|\¥áoZ?[,f8’úX'鷞݉oa²*û¶*C]ÑÊÖ.HB×S – ÀpR%KB£ˆ.E8zå¥ =@:Ø\B®Î<·ìe#`C™Fn÷¬{ Ô]X“V Ë>ö DË®C‡+F.üvå0?!Ac3¿k`Å…=*±ª†Äªãí«%ÃD«A$ºÁíÊ#©ä–†&ènGœðk àÜÖ8ÜP_„Y32|ÂÜsÏLb~—ÑDô›‚n˜w™™bl0žçZ>¯ Àx镞:,LêÏÆ35Ôň%¶Â‹ˆ¡G¥ 蛾8`|h›€E8l¿2ÌÇOz/ÅvAôÓU‘\¬BñÒé@Ÿˆd(?ó0F鸒Ì]´+·mIëNà4“í S‹6îÖÈÚÑw˜òrôÅ”ÿüÒÛP˜¡mD÷el!×hþC<~úà\,&2ý7±c|¡ÿC?ƒÈ7Ù¹‹š«a½¸èòJæBú¡)½~ùÏC¹Ju,Xþ*Ù{9"rúh)/§Rš4Àhhÿ‘Š’°_4¤¤8˜PÎûpŽŸ[±V¤û¸zMöMúr Ç (þœC¯. Úï,¦ºY Z{°cJZÓ.M| ËP‚QPö4Õt;&Uzú^%I’›)””?»’A ~º“µe7î9Óé8L^Ň×/¿¾Kf¤/âø5¼ZÞÇ`A{at,Úþ–þ3]¡Òyýçûî¶n›Ç»ìÇ´š_\ cloU×ÃV'AXÁÆfÊf„0¤€¿­|¬‡qó`‹Hûf}>Þ§yzÆ@»=T§[^ªÅaz1¥3‚úa~³´u$›ÒíæÕß^,X8YzèÊr5õ¹4P)¡ä:ë-¥±Ç$~±¹ú+üûI]>endstream endobj 173 0 obj << /Filter /FlateDecode /Length 1591 >> stream xœWKÛ6¾û”Þsœ ÕÚŠøº@‚I‹í#]79¬ƒB¶µk¥’åHònÜ_ß’zxãÅ,‡Ã™o¾™á~ âˆ1þùßM9‰ƒ»É§ µÒÀÿlÊàåròü©ÅT°¼¸4 œGZ%’:b\ËrrC¾ çT(£È* ?,žP)i°¼š,¿µÛ:Ñ,2¦S@éá\2Å1'‡´Æ…ˆL’ïçþì5a 2-ÒVu¶ý=­Ó2k³ºY‘:k.»O‹ÿeì]Z³¯¯V¤¼­«‡ÅfE茅4Š)‹1г@MèøpQµ+4q3£fΉ˜r'aÄJÚÓ![L×ÓN”€Òf·˜F ¹ &§ÂÏEº^ì¡iÓº]…7þ&ð€Jrô86JÇš¬×_Ë4ß/¦¸6N#™^æÙ³Ôt»Ð®Êl§5`ØtçÀ?JJ6R|Øå›.9ìqòž†½v7R< ¾öñ–Õ}HIvÉáüåpá˜VôØÃñl¸`Ûô­ÂçÌA5–]Æ%qùšîú˜*2Ôi|ɈËQ'1£=éΨn“óme/gô\6ÄgwϘWdû»v·€v0¦³âè )ï„þO¾7Ù§•7Ûû½pë&"7§½ƒd²~Å})SÈã¬ÇæŒðÓìPmv}­INVüËõ+'æ$Æ3úªjšé—†Šên1=uÜ~µœ¼ÅÈ¹Ž ¯1IMDyÀ¬yÀ˜Ò‘¦Aïƒ=4E“?VîÄŦI¿Ö4‡=™¸¦ù>L°%Räìœkj¶I÷¸`X¶®6¸Æš¢dW…L㔉W¡ÂU…= ú㪰ǠS ˆËÓºi튤ûí >³Òd›Êm¢F6á?4‡ÆTÙ†ÂXBJ'µÌ:„sp-¦1#Õ>Û‡LU©IÛ@J8øþY(ã„Yk´šö4Kn(SZ©ó;Tž8ݘq¥É¦ÎK8,(”$9âÅc!oüX§wÇ:ƒ{ œd÷=¥]î›C5fö0xËÀ@]ïv ßpÜy(a@5móÊ–Ük·äÆY‰!•ÐÁŠl¸vd#Þ5ͰeÝeÆâ_§‡|[œ."Ò7þÛ´(òý]¡'Îåßöh]`í0Ï¡¬qǽL·!]+ÚçmŽÒƒl<–¯ áaj—Õ½uå®L} Ö$u×Ù½Þ8¨•4$«O^OƒZÓ"5 o«¢ðÌ–îjeÛ¾d×G «”`‘渆þâ@b*Ñœ‘l ‚ ¢b‚fHƒQÌÐb/.4fzìV‰6€½:Ÿÿ“ͺ(å(JFanÀà`¤}¾~ý"\~| —‘fƫ݇¼(¼iFÒc[•з6P—'{Ü)Ѐ\×^ðÔ!GÏÈßpR …¯x¡oÖ¨Nl“û"<MbEºò„ÿ9æ÷¡„”teæ¡ÓÚö#| ºK±Óh[WŰS€*û‹1{ºà¯_ÿ…Îí®Q÷h[ ÏÎ_ÿ¼ººÄ%c"®bÖÙX…Q*'וC3iáÖú&i‡÷ˆHiã†ì¾‚±dÛ!,ŒµÓpøQ-Èz!€ß[;ЬAÓ$m»Á¬Ï‰êT„#j7ºÓµ'"©zû÷v …qI©-宲v¸æØ½`¦ D°»JaÁvŠM™"ºå#Ø™¥T”h`‘LŒÅ º@[ûÐz;ùCЀendstream endobj 174 0 obj << /BBox [ 1337.68 2519.13 4774.97 4786.84 ] /Filter /FlateDecode /FormType 1 /Group 86 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R41 40 0 R >> /Font << /R96 87 0 R /R97 88 0 R >> >> /Subtype /Form /Type /XObject /Length 12035 >> stream xœµ}]¯l¹qÝûùý–;Ó&Yů׆Á"k?y0&c9‚FŠFü÷êµØçôœ&¹O¬@æöºë²k³÷ª]U,rÿÝ?i¼ýþ¯oy‹-Ի曄>þ{‹!´{ª·˜’Þs¹ýòÓíŸozË÷.%ÇÛ¼ÅÛoÆÿÿðnÿðö—[¼‡Sºûß½w­µø‡#—~—z“ôÞòíç·T¤Ýs»i’toéöÇ·ßÝ~;Fúýàj¸×v“8-Þê½çdü·Õ<Œxû×ñ[ºûÇ9ÝÓ#†þ3,8rš’Ó–$93îæHÙby"K|AJ4Heqír—t¤pŸ¾é‰Ñž(¯)s”¾¤Ôx¦tPJÝLMkH=ƒ4†_‘ôô‰ôj’õiv“ÍHãæ?“„6õÕ4ªýL'J¦=½­)ýH)õH©nî¸_Ë’‚û}K锨 J¾‡z¤DÚ’ÖúÌ÷$H䬧AégŽrVó—ïEŽ^¸®¦/S[J£)y=}]”r ”²ºóÊ=‰¦¬•Yî¢gŽÒ˜Z—Â,÷üÄy=?7©­Ý@¹—rTiv_9Òb§¥»=Ã󬿰š–>R>ûãjBeåIë=Å#EÎ Z×SìaÀ™ThNZOq…œW?zµ_ÛI²ÖxµòLê¼þ•$šý'F'c%ñfWs¢-É+·»Æ3…¶”µ¹¹>Q>ßZí^hn]O]»W¹@j´g µ{/'N¿‡|æDÔV“Ü‚ åóµ -îy9Š”#zOµ•è÷rf4PâÊOtF¬[ ÖAéKEõ{OO¤Üi©ðÕ>H/¾k ’¥$IHWRš´Š}%—#¥ð«¿ø`Ôô‘ñéÎŒFKòâþ”^”Èp5ÛójuÑ‘áêDƒ«î8õIhuKË_=2b}^„˜õ@*´iñÈŒšO ĬyùJgŠ›[Ì;½¦$†µ[ "Ör²¸uFŸ(¯FÚ’ârêFX/r¾@*4:­Fb²w 5Ú$uyù=?Q>_¾0r-æxW”Ï”J^ͳ0ºÝRÜ–eŒ<(ùÌhGJ¡)e=½ÂõAz%NaÐZ6aô õ+ZÝÖþB¹H‰&õ´¸~½Ë™Ñž(Ÿï-e·º±Ô²¥íß·³Ú£oEA¸º¥ }«VñYQªn(™±jµ'ìjb3ÃÕ)Ö $DµuFدn­Ì µnÂp#Ñ&Yyḭ̀uK)´GW¿Vfк¥4œWn"3ª}P>ßVã©Q£Æ«Õ\銒â™B[êÚ[»ã<“”ÓW×îÄÝÞGÒ«ß¼0Ñ;Œ4UÑV~¿0t}P^]~§üV±øüÖÏ µÙ#dEA!vKAT»¥±HéÐ>2^ ‘iH\&BƒTô ñj³çÙšÔ®pH’µ«hŒZ¤W·Í°7]‰]Ms³ºû‰"œÆÕMÑXƒÝ12-Y>#Õ-¡ê–ÒhJYÛ‚XõAù|o5ƪž¶¯&·3Z=b¾@ZÔÖ§3f=”6µ•ãïŒX”ϗ߯6óË«QjÿÞ qkIæRW£xວx产T0âÊ»5D­{J×¥#b-ëfAãÄ ”FÎú§îXtx'½º­:¹ÓHœM>Ô³žHåÊHˆ[—‡FiñLé ,šÒëåÓ(10nÝRmY4':¥)Råä-»”å©ô ¤J‹êª©ÆH-?‘^ÔiӲűƈeˆ#©_ EÞ÷«kP$)Ÿ(Ÿî£P…‹Å^£är¤ €]vBñë–ÒŽ o(Ë^É#×-%¦#…Œe/¥sÚéÕÍ•¹FRÚ¼¾#Ò½\ ´3§ÎqV?&½÷”ÎùÑÅc.gÙ·9þžaë–’8Ê*‹„¬{J?R”¶,–œ‚`õAyqÅ U·£Tš»öY#‰®gNÏ'Ž2ZÝtˆ)Æ+$Îà²CÔH©^ ©<‘^I†-ÞeÙHê”~¤”z¤ f]¶£ë–‚LnCa‹wy4›~¾y2ZʲÕ())ƲÕ9í Aë”ió:_Œhò>rÚR£IËõt#u}"½ºÁØì½ íÞeÕýê„vbˆžJKí±FÉgF{¢|¾»Øã]– ´F©r¤ ]öØ¥×¥¢£uO‰´eÙak¤ŸH¯~êʨõ0’Ðèeú08YΜ/phÐ*ëŒlòÞSZ>R: ^¬ÿ×Èï=%Ö#ÅWʲŸ×(¾¢°§ TÝSÚ‘‚nÙl„ª[J;3&e¹–ÙÞ½'u†«ÖM ²‘®p¤\ )MZv)§ ¤òÄy¥Pöz—e²Qj9Ršî))àï·qb÷–ˉ‘hE^xâÄöî²ltvJ?R¦Êj¤‘Š<‘^üDƒTã-Z6C©Õ ¤ž¤ÈÈuÓ m¤ŸH¯¦€]ܧ‘$_ )mZÄ^‰}Þ{F?1 í]¥Ú‰ Ü{JgŠ›²ìή‰ Ü{ ‚Ö-9ܲÛ(g¢Ñ-%ë‘RøEëVŽAªOœW÷Bb¸z 5Ú¼n÷HlÞ>‘âMZ4Š%•'ÊgçÄþíý(J‹½äFÉGF)GJÕ#¥Ñ”ECºQúãÕ%÷vD°.Ò2…°ª JêÇQ„¶,ÛÚ¤ù)ËR‰WH4{] HÊpô@êr$±‘»l:äÔ/b½@JùIhø¢ÝÞ(Ú”\¬,¬´ÇîÍß·Ã_÷ýß#^]vë×ÄÎí=%°cöó¿òÃ…‘ꧦÿg"Õ)§ $D¡›¶#ÕrÔô©Ó¦u™‘Ëf÷À`0f=b¹@’iÐ*6A÷ÑŽ”Q—ÛŒRôHAк¥´v¤tڲةP»·÷”ÏÞð‹Í F™?÷†"ùHÉñL¡-Ë F*õ©æ ¤&Hf/wMÔÄí)æ ¤D››+Œ"ñ‰òùyÎîý(ZŽ”©‡U}ePê™Ñ”VŽÈá±ãÓ5K`¸ºÜêa”xd ÝR‡®¶ƒCÓ‰‘„FƲµ@Ø´]6;JŒTõ©_áФ嶓*ìÈ>‘¢^ ¥x$à,·¦GËéÕT²;û4R‰HœÅ6gœ)-)]6f—¶ØünŒX?2^‘ò‰!rb(çmÕT&ìÙÞSJ>Rª)¶,¶í8¥)½>Q>O­01ÛŒ2{¶7[vŒ”ú’” $ÍH™¾k¹ùÇHSmY¬0R»0RÓ ¤>mZ„nÓ#¯ÿžÁè–Ë‘’øE‹FF‘t¤h{¢|¾yf_övD¢ëíJ2»²·”vfœ(³{K‰4eµ-JØ‹½Þ[%™qè–!l)(–îvV ‚Õ©Ô $D¢»=ZƒÔ¦MËÐPfWö‰ÔŸH¯¾nveoG*ŒD$¡Më~C™]Ù’Ö ¤œ/ _í “Ùr½Þ[6(­)=(•Aézs™Ì®ì=¥)Sy W†¥ÛQxA‹Æ)#´ŒWC”rbT=1Z:1ú´c±Î>ë)êé•`f;öa$Þvë½r2[²$-H%] Õ+šÔÖññlÎÞmߤ.GR(bKŠõ)ñn]õ˜ŠÈ‘¢ñLéGJ¦Á«Ý‚2»®·”Ï”~¤ôƒ-œÖõ¶Decöžâ)ÚžâÝ+{ŠžmÉ´eµ¹qPJë=ÖûAR;R¤¿GÏÖÎAVŠfgõžRó‘ÒôHéqOÉáéï?]pX¸ßÁØtK²ZœÊl®ÞS´)9)õlK=ÛÒζtÚ²ÚO“ÙP½§Äx¤¤z¤ÈÙ=Û’Ÿmù|¿°•ºîΤ¤R.Zº@ºòm½I‰!éõ‰ôÂÛK–Îv'†››Ó42›«œ¬gNIgN½@y2ùõµ÷£9l­ÞŽƒÎêÃ03.]í´Êl¬ÞSf\º¡ä3£)åÙ”ÏÊ‘GXºe†¥; ±:ÅÞS}ÄœJŠgʳ-Ÿ/Z!éf”n(ùln9Í Å®ë“\2û©÷„¡ {©”~¤¤“-™µÏ-E϶ üÜR|n)=×ç×d´I¿3>ß.™õÎí ½(…!ç–Ó‘rfÈÙ=›’Ï_ÄïYögöQ?8¯¼.Û¨ãôtâÔ#Ï='©Œ<÷I§ ¯ ,÷ãä|æ”iÏúÁÅé#©_ µz$5†˜R<ÛÄnéº;ò)³_ú4’ä $½bx>Ï»¦O¤J›Öí™]ÓuóúK#õxa¤ÞޤÎxó@JéI®p®˜¤å8Qéa¤za zŤV.:MZíC.l¢ÞSâ‘‘Ê‘"Ϧ|zÎvQïGÉgF;RÊÙÚv6¥õ#¥·Ã5G–B7£D†¥[J:ýD‘1ç–¢´eu2›)GJ‰gJ?RfXº¡ô“-ésî('[Ø3½§¤ÓÔ±kzOÑg[>ß/<Çz?ÊŒ97”z¶¥'Šüøó[lö²ª|“}ñþç7lðkäo¿»ýöË£<`krõÒ()ØÆþÑ–‰|eoŽG™ÈWFÉÃ~Ñ£Lä+£üj.? O£„ÛïíWö!nüÏ?ßþëo÷O#t±v¼áäø×7{ÃÃêv‹VãÑ{jãýðóÛÿøö}ÑeIá»ï%šâ:‘¿ûŸ?üæíïx;þ–µ{â I}›Ù;¢©e‹m.Ý/FQ?:d"_E“ï‡ù0 ‘/’»Ÿ„þa"_¥â|££ùÒ(]ý=¢F!ò•Q4Ú ôÃ(ùÒ(ü]?ŒòüKïî̬CévkúømÜiåö}4²ºyk¿×÷Û3õôÝ÷Ã? _W´î€M |ùn}WX ¾÷îÝMDÛDþø~åäßüJ§¯uGkóaÖÚìŠ}þ³sì_šïÝ*¸Þì­+iDü¿–p2ˬ&=²ô9úïßÙAG¡è·ùå_~þéßúå;;Á?´úí¯>-¿ö Ö8=òÎ1…¶w¥Þ~ø_oß~üóŸþúïßýð‡óϤ^4[]ÓêüøËø=tÌtÔoÿûç÷ŸÂ§Ç~8µŸÕ_«Cðµð˜’ŸhñŸx‡Yf§C·€Îƒ_½Å÷’Ç-ÊjíŽWc¤b‡GÛƒ» ¯ùñÈú—û©ÈO»×å¯ÙV¹µ#ûž¦W£¤Â ¾¹÷šu¶îâ£øË¦‰xÎ äâ5Ê,Ž÷«A’¿sÁŒ³ãÐfh`w…ää›lþöw…dÛ‘e“Öý;ñŠØ„fÅ@ü¸‡ ŸÀKm¸7ˆŒÉät³ÍG¬±šœTø‹‰ˆ¯ë $[˜éˆ¯F9R1²õ’S8r¾GÚø¡ÅŽHRñ1`± !Š;c Í©©¸lë¨@ ¬³×j’£°®ø¡ÎŽ$XW¼Ö‘ëì-8>}‚'ó@ÔjŽ4|»-9D ßnG ü*'‚‚I/¾…Ä‘ˆË²ò— i"ݧ/Ýá\sõ·]9RqYÈrQrΑ„ù6ï àŠ’¿Uf ‰+vv+€BJƒ¼B Û=èèôQUàâƒNe%·ß4½]Pg5CâVP ð ÓEU/f9B5½…ú¨ñè-äÐI5‰#œ¿!ò ŒJKˆâYWxHÓIÇl@5ž²®ð¦jÅ#¸¦ÈZ…w]Ô@\á!2Œ*ã¹Û*FN‡ÂB'Õ|g¡# g¯'H<Ä饺@â!N/5ž¼®ñ¦›ê™tS½Pâa@é•ÓMÇ,$æìYà=fÏpÄQuüq†4™H'Ç«ZñNɈµ˜9€¬öC“’¨KÜC}âçÖ‹¥ pSþÞî¤ò_yc»#™ûù­Ž(¿Ûϸs$¾èoËq$€co=uN5ËI¨X „ÿÈ-²L¤“‚8Šoäs~Š/˜s~Н;ó1WPùÝÍnHæÀöaðÜ÷÷ðÁsßß·º¿Åç&3¸öq$ œ¿äoÙs„ó—ü0GGt½HD¼-Ä8)?óÜ93añã·Hå—w—¸!œ?qìý<„×èÊÏŽm@ðì÷“N}þ„Ï~?–³AÂç'E’à ´( ´c÷2„+·Ãâð«;ª˜Ý÷üé ’;0ªU xøû‰Eä@U~9xøûé1 ?éÄ'0Î ÄQŽàáÏó#!àÇ8 0p’CYe¡Ègzâ›Çœ!òÖç Úxøû®"xúûŽ‚§¿·ä“à D‡¹#T•Õà ˜é‰·é*ªÊ{KÀ³ß{&+å¸ÙãC8%Cãmf'¯ÑÈÌN*Î|w„ÿ¨Aâmf'ÞA €ê¯Èr€³gkÇ8{Õ·ÇŠÕªðä÷Õ8Ô”-25 xòWÔÑQŽëç›8ÂQü¼ "ñ¿Ÿ©Éøè©‰!ÔSíÐwËSO-à×ÕüÌ/G7Õæ‹²Ž nªãéëN¾ÍܤŽ?á÷ŸÉIµ'¯y#Ê‘+ÿ™Ôæ}JŽÐ'{#úôɔԸ1cBIõ‰7™ó×½ÓÜÎ_÷ÞgG:Uó<äPSæåœ“øô¯ÝߨâHãȶ‡ÍÂ/¯ðòáÚS ]’=/+¤‰Û"eõm¸M’ ð rjãa:L ­Ðr$N¤“ƒg ~Ï@fBÒ‚Ÿã&Р• ^¾ú¤¼äê4Õð–uG ©†w‹j1NLðòµsþš%ˆ<µèïluÁS³÷ÔH¤(¼Î|¤Å …×™Ò+N Õ8È)´¯Cáuf$-ùÉûŽ`˜¡ðÚ¨ª–¼|­V³jYŸÔ¬VW€À)µ”áåëLH^Hã„ÕR…ÈMÉ005ˆ¼ «%¬‚/—€OU“‘×™4;t4Õì¬tR8{ãÙë¯3i¢ÐxÉH“ ×™4)ÐxÙH³¥]’8rƒÆkžÓgÇ{:g¦#M¹:Ó‘†SÉä$hkëŽ9,ÈqW¥_jªÐx IÓ ×™4«3“CYÙÑ>ee…fT•vhÜz:` Õ™ HôJ-Gh¼¦)*+3' ¯iŠÊªÌ¤p±¡Ú8¥†]Àâ;ÂëD\ãu¦$ÍvX œÀÜ qãH‡Æ-?„=%@äÖ¦ Bã8‰Ë‘ã,5G„SUE©ñ0Ue…fœ¿R¨ñÀH©Y¡y¥O§duæ „¢*÷† ¸ÄKŸšBŸš#tJÖ•ÕPS5Aâ¥M§ä«ê@*9 㥎dh¼Ìü£Õ—6Ý’i› Ë܈sêVíÐx©SXæ.N +Íþª!Xh•frRšˆë¼Ì¤±Òìï Ðy™ Hk:/sÁ¤µ—¹`ÒZ…ÎË\0i­Açe.˜´Ö¡ó2L*ÍþNØ×#d^fÒPiöªâ3 ÍþêX" ‘—™€4šýmÁ· ]+31ÄE^æŠI³lO€9wÁL@šeµæ¯[¥Ù€™tšýUö $¹ÈËL?zˆ¼Ìü£[yD€$Ž›¡r[Œ@ ï¹<Ò­ ’ÂV‹2î^ Ì죳Îl‚çÎ:³!˜½#^föÑYh6N©[ŶÉä($^æâHþ2:GH)üý#³·no¼ö+˜ÙGþG* D¥Ùˆª³ÒlDÕñ¦VGP1Ä3ìru$Aäy.Žt+67 X‘¡ä¹8ÒQkNy®t«5“ ´W‰qÜÀaPlNØ>íH÷µ¤„­Ù†°ÚlŠ"]"Rìw$¡ a5H…çÆªHg¹9™çnñŶÔó_e(Ü’+\8ËÍ ‡8RáçíØP‡°Ã h`G’2ž¨ŠXŸŠ;úìont$¢‘ËœA+8€¨ºÕ›ISêVo6`¦]34žçjHׂ:Dž«!]+4žóœ@–› ájC¢’çrHÇ)bŽ .Òqø•ØJ“^ >“nÑ÷í½m ×™ktK2 ×è-@à:?ºåR H"'Aà:“n)£MŽ-‚À¾¦¸Îd£· …ëL6zC;…!xªâ W&½UÜ€kh ×¹úÑ[‡Àu&½\g²Ñ{„Àu&Ý Yäà©Þ»ÀÍëL6ºU› (Óõ [ˆLJqÛa¥¸„^!p+bhз>Õ{ïз2ÝÐô­L7q}+ÓDèÛêeä$è'Þ:"з-jE }ã ^G2ômçû(7Žv¤"Sfj­.®pœxìH‡Âq–²!1@á8¥Ù‘è ÇéÏ$öK ë|ôgº}„Âíðj «`öÂB3íˆòV®s ¤BÞÊ…Ž4è[¹Ð1}k‚ïÖô­iN]ŠÈÔ4á™>š­ô‰kN}+W:ÂB³-OÈÈN” ޏ¼• a¡Y¹Ð1 }+:Ò oÅBÇXgÖÿ£Áê̤®ÁêÌ"¬‘} ×9Â2³pc } ×9Â2³ô9,3 ×9Ôš˜]Þ´b (3 ³ <;±×p`™YÚœ½Yf¶—‡À¼Yf¿IHN„˨[ê”Ï,3Kòl)°Ïê€Â>+3 ¥ÝùO"~9óÎ(xžkÈÕ)S<³È,L)Â"³”){¤H°nÖ˜…Å@â–<§o™…-XêíÕ H&‡UfÉÔa¤P?³È,Ì)4Ì"³èœ>‹k;N߬2 sŠ<¼° k Š„è”PÉÈMD§„f™ÙÞ†  5™h‰`Bï3Ë̤b ^˜Th¨‘é}Xg–4½UˆÐû°Î,iúŸªP¸0­HFB˜V ¤@á’æ¢<58}•Â:ú Ú q‰SB³Ö,qjåFG@i—8çUÕ„é‚PÒu„"jŠ„½Á“œL‡éƒ¬ÔL}•šÉ¡²RóRŸ.¨uT R§ ²B3>SB³ÎŒ·»é….¨'h<µé‚º@ã©MÔOmº žQ€HÌ*R ñÔæüY¡™ú ,ޤNaeÖ¿î¸Ä5ŒD¨'PT™H ¤C呉„Æ òئŠr„Êc£Šr‚Êc›*™²©sþpVª#ÿ*C呉Ä@XkŽuª(¨z·ØcŽjûdû̼Ë,A¸ã n(¡æ!9‚àÀûø|jÒ”k̆pú¬ÆLÝO÷ƒ-QrÄEîOfrü AG,Z·©oÊïqjÈ~Æ•ë²}P÷ƒŸrPf6*²vç@¶÷QûU&]Â2³!¾¡Öû]É! 9W#TXe6„£dŠœ{1Ôý h9—m2p‘7nÅPa‘Ù_õý¡‰HHXd6a Èl@ %AãX)pDð±ò“'idÁ6+2G ÂA=\v„sÇ:³kˆxI#Ô™= ãw¡ÎläÛÞä{î`î’ßpî’7tZäë’â×o3Yð-•DÛD,-1Ù– 0á²-a‘Ù΋̶’cµ a‘ÙèGPÒrßÄ"slìqˆWîÄd”,„Efìçâ…#ðÖ"Þó툒ƒ*³!wD||c“#Ýg‡=Né8,£±ÇIeuGà­E#ï& ‚å”™ ¡|°@"žÀ@õYq„òA™ÙN ú/ïÂAÙ€DŠ'ÄŽ Ú¬ê‰e)ÔOŽw‹S?9ÁÇ7ö89Ò:„;‚%ZG¨š A$"9ÃÇ·™6H.Ðw L¸+îŽdZØàãwY Ä[ AÚ Vi¶+­3m°-ù¶ŒdE„þ GõˆUš ï’¢8¡ÎÕ)üÈ¿Ïpñuæ bUæ ÝÈ‘Bã^çÚƒ_q„Â+Y¡†ð®,± ´““àãëÌmrŽ äôè9‚˜Gª/Ù9Âa D^çꃌ?¹Èë\}t\:‚GÐîéET;D^gÊ ö+f ‘ÝC2JKyÍSCVfPIQˆ¼Î”AÐ"í n¯3eV¡ñš™sIkÐx‹Ò:4^ç⃠ÑÞD<†¸ÆëLÄž–EdOfròHh¼rñA,ühP³ž¡ñ:{S¡†°1Æ‘Èqšk¼Î¤A¬Êìó7³µ23(H->¯@  ¯3iÐ ÐxIƒ…Æñ#ØGxIƒœ`ä£ç‚ÙÓ€ƒ ‹ƒ ‰äàà/ëà "0R+2“ iLx¸ê{®špn€•·2h¹†TrPa63“pl@,Ü;1`2µó È~4áØ+­uŽƒs áo—pn€!w4áÜC4NÄ^fÆ ‚s ‚Tpn€ŸE ç)Ḵ°yi 88ÀÄ<ʳa!K̆@Av$—KÜ<,`@åÀ©Z™9ƒ¢ÂËLTÑÎËL%æXfÆ`€g'E§€Ýœý<Ž Äl<*Î ˆe¦ ª87 –™2¨Vd'ö¡}88 –™5¨âàCPµP–˜ ¡ˆXb6.H³÷s@ ±Äl•[8le‰9–™5hFC§!È4Þ‰² 3>SB,0B YÙ60  ó˜)ƒ–€ô¤Ì”AKDzRâTKÌ8t Î 0„*b‰Ù Öˆy´d¤'e¦ ZÐÏi /_fÊ Å÷3;B±Æl燚ŠËéƒÌ÷=Ê"³!TQõEG¨"™ q}<éÿÏ‘sóD—÷#›Þ­¸~äÜçQÈh{ì?|e"_eö¸¾2‘/ŒòXJ}Ÿ—‰|e”™²?!ð•1~õ{|@þÇÖYœ]†»úxl]øî{Û‘1â‘o)ŒÉÊy¥}+ö!ZŸbþÃøpñX°‘0†}¿;€x5*]; í1ЇÙ>в¬GäK£Ø™?ŽBäK£X‹x©G!ò•Qæ,¼ò«yù:òÍÒK_°{¤â—Íö Z»ùÈ–ñ~ïaWHöWÿ‰tùðÿíwo£óÔ¶ÿ ËŽÚíendstream endobj 175 0 obj << /Filter /FlateDecode /Length 2664 >> stream xœ•Y[oÛÈ~×{ßöA@`”J\gHÎu‹Èv[½$*  Z¢-&”¨”·èï¹Ì#…ÞlaÀæ\Ï™sùÎÅ_æq$ç1þ¸¿›ý,žß;Ì$ÍÎÝŸÍ~þj=ûñ­Ìç6²Y’Í×·3>!çR©(ÏÌ2ūƽ…×öUÅø'èÎúCb[v›@¼‡A|ã–‡]EJ'^“Á|iœÌ•8jdŒЂbQ™dîØ¹y°iöǺdNÈ.£EÊÛI¤` ©" ë]ÕñA÷¦8ø7î+j™(œpAs»Ê¡®’™ Ð0£ªß¹‘±Œ.ãâ>„Ñê‡ ±Ç/ÆgVÈBô³ôÌ ”#Ës#%>zê…·f/½ d•€>YÐ|[.î•6|LÝÒo /J4¸Ä° „@ŸÓšÌìÕ"Æ¿ }Â(¥I´EçÞ¯o|d&~h <"c&À]ˆ6*! <Ç¥½3úõà&¶àîñiÈ3ÀH¥ àÄF2ÉsfýšÁ!«ñó÷+üÖ¤ë®Ø€&v-móp-Þ_/–t@Ä‘y>Ì]/Àbþ#~;^8z_+AÅÛjSvËqÆÄÏx€v‡nê9 §ô¦0ü¾÷„¿ÿ€Â·Ú*@ùåGd(™›§H<>Aâ1$ñq œB>ï¿ÏçÊ1ªí…äîïå šŽÜ^ CÈø4Ïh~œŒ¿hf¸t½+Û²¸í)5@HëR.7‰9ˆói„Ñêö‘C­²2É&Ü×¢E{7c÷%+³™Áï[÷UFfþ ¡3ÑT?Ô8‚€Щól“´/™Š nÅ< W܃]\Àu¨ 篂˜­"•gg1ûW‚ÈCE‰¡‰Ÿ@P‘CE€ôˆ*°<ô|DK‚œNqvè2"îwÀ¼u jq‡õŽ yÔÄs²ò*ëv½àç”i@† c¿ BqÊüczÛÅ—{˜½FôÅrÌ*‰'=VügÊ7~Æ Y¤(ÇôÌ/útÉG7©Ù*DZ] á×årÛ6Ç«Ÿ^¾y÷z8“‰gÏŸùa.Tñ ÜM3°Jž÷×¢ÄGð”9¿ì{,\’YQÊSÈ\~¬ËÃ]¿»Äìõ/üïúª0ÿ·*ø’ìLö×j#@¯Õ4Âq±t¿•Ʀ…GÛfKÂDóxÿKr™BHŒ Lä¯ z£V’“Ê•­”ZågùÓ¦ ¯¦>lÃ@4G(n¡äÄ¢TqÚÅx†;!–"t´ËßÐDZÜ%øz2aÊ¡*×Úùçáo7“˜”F‰”ÊcRÑÞö.é•P÷\™¨8‰ 8¾¨L. 0Êe¤Øs9¾áÐ&!G)¿¥Ø©¨=DI—t+.úCàê|ÄDÎã9­‰ éVåê6Íœ`'c1 á˜b"™ªó+È»2bc:¯À^½<4ÀW‹SŒy§Â&µ¬×Æ-·U·9¡ñ"¸GG‹®€¢ Aú©÷U›ìŽh:è!dI 8+·Ñ¤fùLX싯oªÏªÅ×e™öª­0Šn3Œ P)\J(ß_²¦“<Žòä²%Ùa#&ÆjÒ}¦â¶¨Z Ã8ά‹T_ý”ªœ!)5GP9—ñ¥÷vh0”CA®Ä±¥ÊR`ÑD•¸3оiƒ¸˜Cœ“êyZžpŽ…XÝ-§€ƒUBn§c®€ûæxtngifÓ@Ë… Í VSýxù¾tZNî²T gS¼)÷ý–\C–‚ßâsO‰*<ÓÜ +›ÒÓLQÄËØéò¨vÙìR©áŸj×8Gð&K-eAuYl;¿Í¸t‹—è .‹ztÓ™ñ¯”«çî´Yxì ȧ¯áSªº*ZGÎ…,›î·bfxåú9=5y¨+¯º•/|Ò¯\ºê …‰öÊžQæ§ÍÃI»gôÝ·M}UWTGU_¶uµ¿›Z Þpö^¬@3üï‚Íî Ë•œÞþ{â ÿ1½mSWGXÖ«˜z”/ÚbÕxÛ]­ßþóõr²\ȲÈ`S ±‘19¥‘BJÌÑ^¯gÿ€Ÿÿ´Ã^endstream endobj 176 0 obj << /Filter /FlateDecode /Length 1291 >> stream xœVYoã6n_ýú,ø‰j-­†¤xU (¶Ç&n·@¼(Y›¸ÐáHÊÕ_ßá![î*E ‡3Ùo¾ò>Hbóç‹z‘·‹ûXiàŠ:8_/Þ\¦2бTëO g0K¡‘ʘ²4X׋kòMZ~h»ò÷¼z(ûl}ùÛEøqýã Öi ÁúÝbýõDyÚm/“JÒXë©ôû0Ji' #û¼3 k¥Èw‘ÿ6'† Êdªˆ9zûkÞåu9”]¿!]ÙÏ;{Ì«ÿåÌ%2uäRÑc*§nHý©kŸ²bC`ECˆ ‰IèÄ挫vØ×+ø¸rñ$Àœ„Ž­ /û2[Þ,G‘B¥â.[Æ(™€É€kò\å7Yƒp` ýwÃ&¼öîñXHÉ *˜0-8ù·&=hb€u¾k²å\›œæœ,çÞAŒÐˆ5׌ž¬Žeh9 Á&|CÝÉSÙ+ÇœÍ3Ÿ€ ÑQæ|äžÒ;ôwg+8Ôí»7é=+;_Á©ì˜žÝ=)^U6·Ã]†í7e„`¯2¢/ï7ÞêVæÖÃbƒK‡óI„²ÃŠy²TËÈêx—“N<eYîÛâî@,Í,=²åOWNȰNÚ3á]Û÷3Œ«ÚÛlù2râb½xo† c2ƉASæ—q\³€R!c AW‚Ç $:ø¡u&³óþkÑsææÑ‡P™Y@JŒ iZäý0µATK¿P)îì“Ó¶¤#ÁÚåcHe¬9r´­Ã(‰“ç%»æÖh™z ’7ÛÑ GúÞoaÛ²èʼ÷êŽ mHnJNœw‘jRv£; {FT *ºnñÓ(“Â!Îw}F\*Ì€“õ1Iж‰\ŒL3E÷J<¤»-›Â3Ó*ÔS›À™ÄCk>î(rƒ1 •¦Ú"j¾cØZä¨×á`Ì6OÓÇîa?<àl%€¾Ñ5bóÈ %%·]¾Ý•‘wHQíö{2vlF,9‡ :´Ñkÿsù<)Ò€Ôå\YП¬7-•Écà¨'C÷b ¸MÙmàl©÷+º2¨a9¸H¶ž;na€wÊTžÔÔClµ´rÉÚÆc™D&¡#i-Ê $”l K«‚¥©ÛÚ á)1<ÔHIÁ¦0e»Á%:ÆÜ8˜|k@—ÌêýY^¡7k‡¦'†×1æ+°Ë¬ùWU³I;] ]ówFÝÌ1YâIæ\ƒˆAH:ªk\K K‰ßû¯d§Lt®ÿúl0,JŽþ,sM¢šúþ–nxͰJêsOX¯æ;¶|ÅKUæ]ãÚÙX#‚]>L”·­; õÔøŠ`I°#}EL3b*¦Må$[ 1HH}¶à5¦ÉʰÉ~iÀSÐðÌÓüà.qþÒ$@&I¦\ítª4ÎåXWÜyýÂzE¢¼¦ãôÖé‚0Ê)ŽAP¾Øò$|í¢ÉXrÈT1²ÃA2¼†1’É¥ˆï°Ïßmf¨ÔùóÕÛ³¼þí`Jí–3oLûÆÉìÿÙýæ—›>s¯?ð-4«ƒ [eծǛz‡—Fµ«3Hû@ 9±soÞ«·ÞäCqwµû»Ì`5÷¨œêŽT»D‚ePFbÎ BÄJ D‡'gWkj ¶wðûÅ?îÇá¼endstream endobj 177 0 obj << /BBox [ 1337.68 2638.68 4774.97 4906.39 ] /Filter /FlateDecode /FormType 1 /Group 86 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R41 40 0 R >> /Font << /R96 87 0 R /R97 88 0 R >> >> /Subtype /Form /Type /XObject /Length 2714 >> stream xœµYMo$Ç ½Ï¯è[F‡-×ɪº0 µäø`(ë/XÞxe ?ﱺgF£.¯&YÃXkDqØ,>’õÈþìkIËO‡ß©ÅD—’“…®Kб…\—”³­Ë‡wË?–_z1MËiùÿ~>Äå‹ÃoK 1jÎKä¡w©Õü—‡G˜®)TYJÉ%ä‚Þig)Rsˆ}ùåðÍò†~€ªÄP¼ÐŠ-®ÔXð³U…‡ï9–¢á˽¨ˆÅ5æ`ù™JÂóV%-¡S)Á;Ý·c)ªäšk”t­ÑBËÔè¡Mœ©2UJC`ö­´B?‹J qâJO!ACkHiv¢ÞT³l¢“"Bª!÷Ý'¥TBÚ$”}gR() ‰F t¢R4p4Ä_'®Hq¨ Ï3HcWê¡NuªcmÀ MÜ1u¬M ¸ƒt­H[S/‘]•CNfûÉ€ZÆ¿#þIfÞ¢<ðK1 úD)#²B%`g±AJð0¥B¹´]pªP*Tƒ”}´€JÀ û¡É%‡ä*}†wFÝÄRµ™Ãˆ~¤~Ö2SRñVUC}^QJ¨]ö˜ \{Þ÷Èz(T©ú~}g/!¯€"N¢ƒ?ò â~}g/Qo€!ÏúQîèkTyŸÍQo€¢ìûƒ‡;æ PÈ~¢É³úKšw].(`bÞ„Ö}+Åó¬ì[A+UB':°cÞC›EíÈ1oh•m?“‹[¥ˆ´¯RÑÛÐh;€ˆ“Ø ‚+UCÚÏ8¨ Ò(Ô°Q)6ë×IéezÓIB½¡‰â6 ²ïŠ zU€ìÃ$Y™O¥#<¶AõVz &Kp1'ª þu’0Y¨Ôsh³ž$Šî&‹ÄˆøcèmTAúÄÔn£ @ˆ“ØÔJ°¡ÒCÚoׂڭTy N“¨„ÓåÙ½*¨^ƒp(û-RcÅß‚ìû¢ ] À@w‹IQµÀZÈ&ć4$S.Û~/RT-~An½ =çӨі”®‰Ï…ÊVi(uÞ­»îhw¨É&Ô‡d׎¤2¥>Š¢mTAü÷óE[b•IBôóìVUTm¥’½ >JÝȯPV/ÈÏYÉb!c‘ d¿3—*}Ê,5¦Šä8å?†ÒmȰ¶`+¥¦C¥“’MFñ•ò ús©ÔX&‚&yM€.”P¼õ7£@†âíTqt˜ iÿ3/AÏõ:©4I•æM}â0 깿`@g¥££^â t©Tõ’¦,¨&'Pé×sÇI%»ÂûÕö£SÇ,!¥¼à@'ü‰¨ÀÐê¤@«x7 × èBI3‰¨€÷Î8PUçtR6ã@UÜ©(Ò~k"ÉÂ%&¸¯9Ъò–c(¸d,M|U«„ñÿ™B;’ޤ¼…EðH—”-p“Œ1ôV+'IUÄ¥÷+àÜÉgŸ³/›ä+d3¸û.¬l’[¬([ݳ¸l’[¬\ÅòBòÌ ‡| ì&–õÇÃãò×ûÃg_ã¦c\q÷ßð0ŒªCmI0UHj¬ËýãáŸÇ7¸ø»¡ÁÞ½IÆ©¶m¢fwßÞyøüþðQ8k÷k§àb^ο"I#ëëUqmBÒXOà7}ßÔ—!&VÉMV~¦K#Cp‹ \´Bb{6²In²²ÆðÂÊó¨þQ¨4ð¤ÛoÝðõ7‰Êl4Yž…+ñ” èïïÞ CÐ?t½9Î Üm±]´‡M"‚fé’ÓaO’ýp[+ó>Æð³ôÈרÏ?»¿ÉÖö‡R %ƒÎ‹ !‰s\Ä@IÊZ!¿+C49~÷á»Çw¿¿ûp—À½c«Ç'ËuÍaÄÂéÆI¬.÷ÿ:ÞÿúôûÝýχ9Âp¡u1÷àáŠKOrüéñŒÅÛ?wãˆ1Y}-×L­xgÄTÌ…T;±×õzXŸ[d²íZÉ`Œ$z˜“ýZ{eßîÞ»øôAIóµÆ8ïµFÚ˜¾{%ר1‚¼-¼ø1b:O}]kS\Óœ=ÓèãH) I'¹~¥å=V‘ùÝ=+ K‘3 º‚¯P9NÑl¾ÿúô‰Q8Ø€ÜnnP¢â,2a†ÀOÔ¤²†ÈÝkæ0E lŒøK6—4o͘8ñ'÷¥Jç|Q‡]m¬Tß/D?e%åëc)ý”3A¢Îઔ4·‘òÊ좄{¯-#‰†íÊ+´Ÿ((AKÊÔá8ä–[ÆÁY:œ¢†¤ø ’È8R‚ ÷òâ¼6$˜€‹—`æ@N‰E~ß·T²Jpö:–[2žŽêÏeìÄž “©®‹´'‹º•±}ó âºàúŸ ;Wè( ‘±ä7 î¿@ÉŸGAÆ’È$?ròõM‚– %(X6Î:v ” ÷¹ø!Wq *EòجÊt’/dýH€Ú×2¾ÇÍ«ÄW¾þMC‚œç[Î;1»$mVM}ܦžx‰:8DKaC ƒgÚµ)cܤQ‚óÌ7Ö€ç±/Q|ÏtH¸´ªc»ÝNBò3ßlË%”ibuzMÖ!á«ê —<+ 7K[ªKÊØ3s)P†³¶R‡Ñ]¢ÕóMYÉ%Fì½Óô!‘æ‹îCªÇ+aæ®RmÅFüÕutÅFòz‰)kÚ]θĔooŠ·J0”v‹å£?%ÜçSÕ!‰§Ö8Jt} ãæŒôG芠10ûжK?yƵe2^ýµñp\[™ËÐ@î3á­Uý˜\z“`‹¿¡¡ ù:ᆂqZÙ6 ¸ž  ýYœÜzI —ÑFè­†l®Ê‘À£Cû…Åâ$UÃ3nܹ8ø†þð/uØÚ…ß–áϰXäq‡ŸÊ„+¤I~,zF*KOG¶#:þ*§ÉÄrL¾;T„F•è…A ïZ”q%嫬̗ 1¬ÏŒÖãH]rÔ?gÜÞnÚ3Ÿ>{ñúqû¥•“ä†át»’.¬l’[¬l=÷le“Ü`åÔTÎqÙ$·XY‹ældÜbã É'Ù™ø†zy6²c@K tAå˜ñ™7jÇØ.þN$;ÚÅç_;½¡Uï|—T–³åEöºd»6"k‡‚›l˜ù‹º #«ä&+¯é륕Ur‹•-g+WQùŸ&ñÂWé¬)Þš6ð….!ޤ‘éˆ×cK.GÜøüÂ0*ØìÍàžUý½Ä¹“¬‚ó~:ûÎ`ŽáжÁÜ?¯Ã8ž©Ï>ŠÁܯ2üó`~üêýÓÓÅD}¿×©û«ÃñÝ¿ß?üx÷—£€›°û»pÄì…æÿöÍç´¾Úÿçd+žendstream endobj 178 0 obj << /Filter /FlateDecode /Length 1112 >> stream xœVmoÛ6þ®_!øµYªHJ¤L,0´ØÚºí€¨(™‰5èÅ‘ä4Ù¯ßñE/öœl0`‰äïî¹çîôà†vCõ³Ï¢vB÷Þyp°Þuí£¨Ý«­óæCÌ]F˜»½sŒv1¥g‰Ëb»ÛÚ¹A?z>Ž˜`èã¯ßŠª<¤XFkïëö7ã@Ä1v·oí'‚›ou[Ëf8Öž“¥ú„!Ea „Q?Ñ釶“ä]^ËAv}ºýðéú%±Ïyu”Fäß™·ðM,|ã '`ûT`rþçÙÃCÞ©Eˆ$A?ùö]÷Àù„Ç R^ìfo3ÔÉÞÞ{vÙc^ý¯ËLL¯\^e¨¾ëÚïi‘!¼&BLBÐ U;dJïf¿®åS³CÆ!0ž2]ݮƭ„Š}º V') 8è©ÊoÓgû!ï†Ì»±×ƒY£gP¾…‚Eè\’L’à`—MºšaÔ‰(B«e޽£òjFr‰*@£p‘ Eázƾä4™÷†'–{K‹¿dqsÙâèIˆògq|)#y{““r³ÆS çd°ñ,1Õ{Wk|º7GªOORZÉæ~اР^#G/2+:ù’šu¦¨«pññdù°7­¨åMµò0z(+yh‹ýD,A5=ÒÕ»×f“Br„eÂÛ¶ï/0®jïÓÕóȉë­ó^u5Jy­Š¦:`IÕŠ»t¿¸ ´9Œ™ûKk4.öAüZ$òÛ>øÅKTÿÀIpŒÂ{BP^x$AûR>z„uCr§Ž©¢2ÊGIHH«$¿{„à ‘ì¬$£‚ÀG%ŠnƒU6,æ»òNZq•UV•Y©º½Ë»éšE=ß4ìG%`jÑv,´H³©o«ãP¶M`ÊrEØõ§.¹6ÊIÁ· þ3ÔÈ'Ï*hÁ©[EÊ9y€4GX‘N˜`„à‰Fß„Žp©ê§‹ÝÍ$¹ËksŸ‚»Õ…­…Èe]þ-»Àó±—µ÷yo]ÃÊËQf†¸ìëñ S¤×SÌ„J ê|*˜0…•–0`MMͧ ¤F›WE¢$9ƒÂJT2×Û `ú¨ßX,–þííRÅpl4ë’P ‘c¡ý…$ ½æ  9ʇQß&Íî—M9”j:éC€çÑ‹c° Ô³È0*$^TáaEª‚<ôÛ¿Îˉ± †*2²e¯$|U¶!>gVÓ37½Ü­-3‡Ú±PøjV Ó}Cjwz¯[ƒágXÚ"öùdTµæûcmÔõPn&KÃÄé'¼’øÂœ€~yà×ÓÀÏŸ6ÀËL—½D÷]¾3ËËVãÇ‹ž£©þ?i”ãyóûmŸš±ö'ÌÛõ| m3tm•VeÓ ¾Veâ0œ!]48‰R›pæú =TgS•aÝŸß;ÿ£}’­endstream endobj 179 0 obj << /BBox [ 1337.68 2260.1 4774.97 4527.81 ] /Filter /FlateDecode /FormType 1 /Group 86 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R41 40 0 R >> /Font << /R96 87 0 R /R97 88 0 R >> >> /Subtype /Form /Type /XObject /Length 2756 >> stream xœµY]dµ}ï_qßÒó°Æv•]ök$ ) #å!âMÚaÃ,Rþ~Î)÷í¾3}ÍN',Mwá>.»>Où‹o4-?~<üzH-Zвä&-ˆ-)Æ2þ次ÔåéÝò÷å—C ]jIËiù ÿý|ˆË_¿.)ÄXr^"ÿ ½«Yõ/€î9Ķäs0] éMDI_Þ¾]ÞæG,Ô ›õ`}±Ð‹EÁßf +ŸÞ~8ä¨m(Ú%Xž-*=dâ—«¯‘4Ö”uE­¡t,I5è3”ó “ÐVd£Ú“Z Útéû Úþ‰A¦ ½ð¼¹×ªíâ¤(!Q˼¹Ý% [T,Áåf›l•’…J[`Ë2ÁÉe‘+D3É! %øHÚ‘ qrçùw—(¶hX«·4ÛªhH‹JißN ·¦Š%U‚Nqj pI‰¦/yƱJ7•Øüïî’&A¨M·¹5S¡BŸ„ªü¾C›”cˆyw \ ¯M!·ÉN0U(\¤9Ô Lê¡Q™"Üqw ü;Q™ZfÖÌ¢ ¸!uw…&F£¤é,t³ö‰Ó;ƒf§TžWàðA÷C*ÃÎPG Mf;²ÔÉ"!N` Î e²âlSœV‚R‚Ýwcä‡ÀÕBÜ(“û0w”YLÁ3½‚¼ö7‚™éOB¸¦ÜÀ ü*LTA82¼q1ôѽÄ(R™ðE`ó™!EÅCWÂy?š¤$ú¯ e?¤x"Ç~.:Ù©úÅ‹4ÜáÇÜÆ"=1v—à׌[ešÜ.—Q«¸å´ÒÕ£Vs¡âû s‹ m×hû8;s¯(*YÜEUj¸E“™¥9ánÎ^'8Hȉ¢(S½?³÷%ߨO÷ì¶‹CCU)QY5w— ÃÀ»a÷ò²Bm¶ªPú䜼ŸŽ„ßKVeVßÕŠûqAFiœ–™A¤T:âž§+B­QÔT”"U'ªƒÔ¿ï¼?8Jê¤D•„Â]j|Ùl\6*ÈÔÚTThÛÞ’kèRÅqß ^2EU&(¬·T¦L»¢^Ø¥"ÕÆYI-ÅÃ@ª]õgœêñ†/4ûî8/£·ö«†ã²Ä܉-NÛ‚ž! –úË µQ ¢C“83% ƒ×tÚmЄyÂàî:+Ì5 ÝR¬N»Šò…„&fWÝÆ'#°¹I÷³Jñص>m6*’5c·¥«fc³ûS¨Ó –ìTE*G—!M¦ÝFÅy;.§éU·±Ù á†Ø—ÿÉû¹¤‚Pãzî7®³VE›Ö¨Ž]ug$ìDuèy?,&ƾôxÕo\¶¢ãÁozºê8Î0È׸[騫iß‹ ÿ =£t¹ê7ÎKà˜(–¨3çnãeÎ2+`‹4 LÃÏÙtÖ‰- í4½¸Û¬Û0T£.š8ËY†\qÖorሸgœ¦1Jþy8>|øåãow÷?Ú0 ½œv€¦D4®ÁÃÓݸ0R¾ÿõx±ÅÛÏ; ³ïHñ»8JØ‘_Úëu©÷Qœ@7:ïˆOÈl±Ò^—úA «Tqz娑$Žk¯‹õLfšÈæ`väê=§ÔìÝ•¾.!áVMPNcÆ¥¤zã¥Q¼| çgh!;8¢Õ]ô7ÂöÈä£ïÏa#ä$±·¹SºÅª%E}ðÈþÒ¯ ìNœå±)•!A{iLgƒS&HŠÑФ%ÀÂ%péλ1¼Ô@š IG³BIbçB ¸I‘5ïü)É>ÜEïl”ô-lJÙEW§Âqg,ŽÖ]«‰3ˆ¸â àdÇiÇØÝF#mˆˆ6™² àìa, 9_Œ.±è“(v«Õ¿ÃXLã†Ãe?¸!"”\+ñŠ(@Ê©• IsºΨÒ% Ø™®žnÏËW¦©…亰…0ÝH eD-ˆ[b)ÉL”£;y(*é„¢å!Ó`©­ w¸>'2Pa˜LÀ³›Ïz|ëÎÆ3-†¯Ÿ›oÉ“Šç)AH+×àFÚXÃá´§ÆCóî´P8MÊCÐ}ËI˜Ç‚rÎOWÓ茟d†}HàùäZž0‡D9ì@)­ëQ}àg´%£ö)¡‡Æ1L@Ò>]jµxhŠx~§„W®Aø5žS<Ÿî&œ¡8NBéeh ãæTÎÚÉ”s¤»»ùÈÕ÷â«,™÷>¡GáºÇøt¥»‘©Ä!q6 ñ쨜““Tf# hjÅÇã™[ú!R/£ÛƒÊÞ¥™s$®‰Î¼(AÖ%ä¼¼ú!2cÅÇì}ø›¢ÊyÝHÃ[)ÑñÂ'¡25#—Kl¶ÎP^x¼<öÂ…ï ¶ªcmÐU:9%,¹\ƒŠÔ™_åËg.@ßAfÖ9/qù˜=,)Á‘‘&Ðúúx›' ١­MÊg¥Èk½ôÒ%^O’¯QÎ’HåZ6(«ä”SÎÝ ¬’PÖ¬²¹—Ur ʈš ÈIp Æ {l$ÙF9÷OÏÉvHpӢnjÏ,©½·£úgô$©ëæs‹¯en^™,2ÜdPwƒþuÎöD×ç‚›0Pˆ™66 'ÉM(…Oau‹r’Ü„Â9¢=C9InBiÕ[ ÊIr Êj Ê ýOóVñÆQ ŸÀÚð5”§ÈQ@'¡=ŠK|°.Gå—=k%%qÒIc'Á…ÿ_{=@õ©ëDÀ?Ÿ¦Ø³<ûüGL¼.¡ l&ǯ?|ü¸¡òÞ¢û_Žïþýáá§»?}úÚù(ç­ümwø¿~û%ÑO ÿ¿–I3Wendstream endobj 180 0 obj << /Filter /FlateDecode /Length 1206 >> stream xœVÛnã6}÷7ôAðK©ÖÖò&Šê ºhQ¤h7vÛ‡xQÐ2ck«‹#ÉÙ¸_ß!©‹½u‚Âgg†gGz pDlÝZLp°›ñ’ïÕðV @ê'ñ3£%Ï{ÖY²æÚwEQY¶™íÜ•´*¹9?ÖÌ:•Íæ«`þЋ¬1Þç®í^·½“»¦¹gÖΔ¦†¬­Ïh3ðpêRC#ºèÊÕÑ´žt79RÇ•J€Ý´YÚÇF†jÝôðõs`-?8Ý»»é 2Ý^ˆH&"GK夂WŸü\þ0ùý› endstream endobj 181 0 obj << /BBox [ 1337.68 2160.47 4774.97 4428.18 ] /Filter /FlateDecode /FormType 1 /Group 86 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R41 40 0 R >> /Font << /R96 87 0 R /R97 88 0 R >> >> /Subtype /Form /Type /XObject /Length 2711 >> stream xœµYMÇ ½Ï¯˜[fªTY¼0Å äø`l;†ÖŠ$ùûyÕ=Ó³Ó½š6ÂÌpÙ,ùøÙ©Ç´s“šSŒ=ävL9k(õøéÝñïÇ_ñøX5†ÖÁÚcH娂•Ÿ½0þE–b,-]±¤h ¦tÏtV³W˜r½ƒIt0õú S<¦Î#éöfÍÅø ·.o}°@÷u–² ÑMer¨“Ú† qbi²)GB-ƒÉêÆ­4ˆ[zõM9%dLµmèS‰°HÈÛú´PÊ`jÛ‡Yˆ®‘ãoõ°/ùa%¨¬ß«È¤O E6¤Ôé R7X,¨;ÔBÙðy• ”bІ”Úƒï¦!¦u–†?ÑÂÖÖ¯\Vü½G|n\§÷€§ÀR(j•Åê’"Z·¿áy+d:[î¥6–Jè_Ïl]ËÕ]Ý%oYÎh 6@»lÅ¥iÛÑŸ[°² £§žápsVÃÔ¸õ êo‚ÌÕ ‘°Îó1ÛXܲLfò\Û²'¦U}sÌÑ3®I ²9Òp°Ž•›?†Ø²ÕÒj¤äŒw˜Æ¹­\©¼8 ødÚ2»1΂‰i¯%&eâY=Ìs,ÙB×Ãà…C$"§”u9)µPÁQÏ)rE ÎhT§µWSmNÈÆràPÛ@{NÅOW×m˜*ÜžÀ‚‚¨›r,LJd^Ç"‹ MR)4÷ÚQ€-©%ºmýê¸ï#H¹ çìV hFØŠ1V)b¼ßª€p–,:Û«gd@„¸äç¬þò깕g底ŒsIXSædp«± ûÑY€sHëÃK!C<<§¦9ð$òŸ@P¨ë b™_„0Ýrº¨ƒ¼0ÓºÓ¹©Ã£²Si² rÁéŠÊ(ut–nãT¬¹³”8ÝJ+.½àô¥Eîª0¡Žú¸Ê"¨ aºzm…ÙPÊ` MŒjá̽€®_[[dR–²QbÐ$I‘sÝ[‘ƒÒKO• F_Ê)Èêú¤uý¨’:A%å¤gI„¦Btã tehµšW Z$P#¤2Ï­;¡T·­ÔÛLºÓŒžª„ÞˆAãBWÕ¶YEjTÏýõ¡gt h¥Ÿåªf//Ò.ø¼½w.qizÓ¢œå”L“H»ÅçBNU6ò;@tÝ~a¸Šý_Û8 }ºuéŒÞÕbb’•žœyMNƒ«Ñ I £-Oþìnûº#Z&ªs Ñ…êíáCAüéóáã!!Ã1—¢Z¢¹aäã7zX™Æ¦H$©H§ÿ=¤ã·øÿ æÊ?>ÑZÄ‚´ù/˜iƒƒùã陽ñÍЦ||>xÛï3æ ¼?||»WÈL¨èéà‰{d 8 §Ì…&3e”Ô*-º2SöHA¡¤y/B&Š¡¢\i2SöH©hTûRÈDØ#ã…G”+)\Bj.â8}<=ÿô ¢=Á¬¦ˆõG´ê°g,ƒí˜P_ÑH†°Ÿÿ8½AˆXyx“ZF&ÿÝ£ÿ†Å×òðÃã·‡o_WCE6ô  ¾ñó.ˆ®Há2¥ž){¤ ëíÌ")3e—ÌoA–ºÌ”]R´2,¥L”]R˜÷õêFe—”É# )×>z ^ñ¡Ä—ËÇ4Çß$2#!›y~a™=c,÷‡7¹éc9ïÆÔ%$:êLì‹´7S„£+*ÙûË-Ï”ŸýVo§ íé™wçwUT«÷‰åú»óðÉyï·i“vLÜ…¡zs™š5=j*¨TSÔýõA`ƒXõôã§ŸßýöîÓþŒ‘¹>»]^F1ú\&Æm:àñŸ‡ÓÓ‡_?ÿöðøË—}‚éØg JK¸OŸâè O§?_\áæ¹oú{*ŒN£|áîuƒr@AJvgõÁTl:’Á†”d%˜0Ô¸ ¹¯a¶«œ\¡$ EŠVöŒ Þ)ÄdǽéX’#Zðì“v¥Ö÷ÕÌŸpFš®®[ð ¿‹ÁKå¾ÚŸÙ(Ö>:¢¸.ZòAøõšåR ß´~u\HDá¦Y RŠr,sJfüboHñB(¥ù.G¸?³æ^ÌÅÝ.lêeâ©Ìm˜C}IæµA1æ`§äê”1e9%f§°go¤ÔàGU‘M/):v †ÞÖ\å {‹ŠºÊµ÷€kã¡6ÚsâÐ1îr¹çãoî¿@ÈÜ-ØW€à‡4 úÌÞ†I]׆¹•dnøÙ¦:G™Tm@©å§æ×i”Ìû%‹IÁXÊ™;Îa¹Æ–ÖD†'òÄÂ}8)½úú§ÛYóç…ëÄÄÀÁÔC ?Ia]'¬Ñ]CnT"tø:“ T†@2üp®K9¶ðÓÆCÜÒ°ãÇìém÷7+>’¤:(æS^Gôµ!“!¡æ[éñTï\a _d¿z·æÛ‹VûC–äÌ„Tæ5ÈF9hñ݇²ïzSý6–GðX ÃwÆ—œ=ºI@Iãj¬Bb1¢gŠ+Ç nªc’C ߎÌna³è0«‘ò)Ü‘gö/(6¦oúÎ (3C¦˜S€ ¸ÖBq@ï« Äb!»‘ÀwÉ #·Ka6íNæ¹Ö)9ò@Rš·qRpt' Je¸:háL1T?)õÄšâÞžH5‡>aiÈÒ„#89ªäá8åz”XÓÔ†`&h! Ä+$í>i³˜ÒjðÔ| ÏeZ9k½”/¾Só‹'4êG2°5.‘ëØ&tB6x}îTùòj¨ƒC+Yê”ÔÛG_Þz~ Á|‚!}êvU¸ôg•¬#äUp¤o mªµJ`Fz5ú¥`'ÌXëHs*|%G"gÈAË"e §s›öâV§C” ´ÊC0_xrV@·A±èeŠ“ÆSPÌßE Ú< «/ù6%­ú=‰ö­6Ö6¤ü™/SK®ŽU(l”ùIŠ«Àœ4Äý¸Mæ~£1H`⯚ÚEoÂ[Ô–öÿ·:˜ 쥉¾hqÿîàVÊ™²cÆËÑBÊLÙ#eJ· )3e‡”9©,ì2SöHQ³2öÈxáå+LþÈÆÀpºžý9æ'´ EOß½ŒZ?©ພêâ{÷ŽlÈëmd„Qc‹éN¨ÝˆÐ)ÃŹpïÂ|im)e¢ì’ÂWûBÊDÙ%…ÛîØYÊDÙ%¥±¨^é2QvI1ŸÎ"Þù]Ó?J(+™í:2G @”ùúàÿ•øRþÚ ªE€ÔÕe›—™ÿrÏÛ-&ü:oüû4ùãÌrõýkl¼&¡˜,¶§ï>|þ¼ß/³þ4âw8½ûχ§Ÿþðe»C¬ñ% &±ŽDJñùþJŸÆúÿ:¢\¢endstream endobj 182 0 obj << /Filter /FlateDecode /Length 2335 >> stream xœ­XÛnãÈ}÷7äAPÒJ,›}#(À 0ØÝÀÙ cm`´DK\ð¢!){¼Aþ=UÕݼÈôf³Èx“ìêªêS§.íÏ‹0à‹Üï}y.Ž7Ÿo8}]¸_ûrña{óî7‹$Ht¤ÛÇ»ƒ/¸ÑñB+DB-¶åÍ÷,-Žu“w§rµ" ÂP°®vÏܰ"KW‘Ï\³'|J´JX†qĬYüš„’íÓbØX¦_Vë0—:by™«5<¡&öÄ$¬ñ„µ—ã1k»ì@;YÛ5i—_œ©YÞzÇLïX’–Wû&KÛl°ØF/i·_E1lSœVÿØþñ†ó QŠ/¶w7Ûß~ÏÚüG”7p 8GMÂÜH<¥i Ë¿®Y——$=rŸ9KÑ ;å™EGNN0I;ZxBF ÑF¤ØÁ+,Ãt.j8ݼ8õÒ°sSŸ-Äq uyÖzÛðž6…—…°äUï–Ã>ƒŠQ²Ö+K«aC€téC»d„Qɺ"(bùÐ.³jObœ+ ` íb;Öfý g÷%-I"s#Kï>óFÜÔ ÁcàçölÌ:Ø.,°Eà6LÈ ÎQ݆[g.Ö,DðUJ´ÉëfX+ëÆž 0‚…2íòºÚ­€“hrɶ'`Ú[óX±‹uO4”žF ¶Ãñ„\ œ;Š -¶!ÀgävMœ‚=Sq€÷<{±q 4ë5Àc^æEŠÔÒ1^P°ïóÒ2:2삨üdTUei‘WG‹¤äWGø:#BC<Û£˜˜¹aH í]3¢íáBÑcª íÅA%ý¼=FŠ˜&þ>ô6_Ü^É.-¸íÝŠ¯·ßŠéÚzB$cB¨$àIÏ «L€Æ G|L·¥Þ ~€ƒÏ‘ëÀDÊJÕ¶?\§#*õËGA iÈÃ9ëkA‰d|rSöµ#< ŒŠÌÏt„PmøÈ!Á.À‘YO¢ÜÔ‘ß`ë §ú„À’éɤ ›‘F[Å–nk!JJHè–Êfèê÷L&"gÈQ[iH0 «ÏgâŠ1X¡y76­Ø76‹5¤uįSÀ–ÅØæjý€Û°ÖqöC¶ïrWµb^ y¼T{´á¿(† ó R2-.½>cë@,F™U7Ô  lP©×ŽŽ\vå 4dtÃaË[¦I0õH…‚ü²Ï=ÛzgµX»3µ‹þy¥0`î•ÂrH›á6mhJžØ[¬¸¡NYåÞ¸;¾à9hÊXq ¬–,·}Áà º<Æ‘6TOи—KÛ\Ô[¬ÓZQvE DW5Œ6((^€r,"Íโ«àhc ®÷\ys¥1“‰W‚«î@Ü–….“ôæZ±DñX¼„íÞ®¬ÓGËêǦ.Ýî »»çŒ}¯+¥Ûo+¥‹(ϰã”]=hJ½KXY«c‘ K£1o'ñ<3G€h¬ØÌ}¶"Mj§ t ÏÀTC¸û`GIí LG)JdÅd: Õ"„“Àù‰F `=@©pœòÝ7´q¬ñYP F¸ ÖÁ ƯÃâ5¤M¿KÃXüØ :.U^A‚ü£U%)t;6]@‰qØ}û~:¨Ý*ÖºõdRØ­n=|P•£ë9åäj¦°Õk’!BD„*fÈŒgÐHU¤YV¥A]æÝŒ{˜>QØñˆCyq9xKÀýº¢ùÞBéKɘ°ï.ö€N%Sàm%ˆB,‡(ÂId®â©®_U iæDýLYT‡ÄÙï¿å!üw«ÝŠ„Ø¿zFü„%ÖôÎ`îûsŽÏ\¦_ÞÒrGÝôÖ‡Ûv“àÓª¿t¶·cí8qà-wDº ¾v¯¥ª??´½Ô¦jêçû{çKoŸüw“M0Žvý¦¦.6EÞv;¸ùfM‘—HÙÛ9ô`;‡ñG³û¯þIˆßÎ>qª( o Û?ÁýoÃ3:ªLà†¾ÖóöE~A9¿êsj£¦ëRÃúˆ.Ïv’õQ}²“ÅR÷ŒÏF:veß³Ky=ÚMŒ3,í¢~üWê½›í§ï>þ$b@¢ëÈM?¡ö=¡RÇòƒgµíÒÈêåíàâ‡ÛhË[»"Yõ DqÇš¬ï¿Î‚äI‚sÖ2;×ûÓÀFî/«I:_œUIÅ (M‡îM8ÿÀ‡Ýnˆrµœ9ßÓp¾b>a óÀâ讹ïüøã§c ¼s‚’t>Vd·ííb ñžX®³G§îHzHþ=×À¯ â‡×i0ËñÑNx·ƒZj”¾ ]Å(]î,uù5µ×ʈ@i®ç¼ÿiw¡òýý‘0Ds¥å\`þ›?=ÂÁŸ‘kfxüEŽÁ¢cãÂø3ûÚ¹¨‘…Ùg(æYuìN+?ømì;1™¸æÞUÎÖ#6ƒíîåœm–ÅrÅÙ¨Ââ…¢g™à2a_Šôacu9j/ôùO÷íGA3U™æÕfy fùVÑ'E}Ü,_–óô´½”eÚ¼ŒsaŽëaïôzî¡#aܘ]…õ'î;8oÚvùÞ#fØWÐ)sê±+Ÿ½±ïÛ}Vuï°ÁÎÿIL±KÞ>bcÿ&ÅÖ·7ŸÿËyðÜendstream endobj 183 0 obj << /Filter /FlateDecode /Length 633 >> stream xœuTMoÔ0½çWø†ŠñøcÆæˆTP…DÉ­E(ͦËBw·ÝdQůgœl’ ´É!‘í7ïÍ›? ­@èôž¾õ6Ób=fЯŠÓ§ÞŠ÷eöö ¼ˆ*¢AQÞeX«ƒ@OÊX/Êmv-?··Í!/¼ñJk”û»ôïTŒ(7]“¯ä¡ê6û]ûnÜIù·òS ¢÷ Êˬ|}-¯šîxØÍáêýªé1–1V‚îAéäå¾íNÁÈßìêfXÑòF~ýø}\¾É'܇ã®NRf!¿«ûãDPŽ“ƒ@yÑv›mÕ5¬ýU†F‘Å :ŠrÅ4§ya°'—ëCµbÞ®P,00`jö‚僧¤A:×üuÑ©´¼6p²¼eÁã›Bû)“ú°Ù2§Ža‘ dµ¦(òÞ¹è'n‘œäã‘‚6.ÐhwH‹ý“ƒlÞèÙìÍnul_’‹h¢¨g¹FÙè,3»Yîª,J4ž,:Œzæþ/ÇÞ0´ckìöO,gÛ õ#ºs„¨—ÎŽ€sGc„Ãv퉄èÎùÁó¢K¹Ÿë®ÖÍ9/çebÔhΑ:ê‘av‚­6íÜ©8『së‹c”çÅá¶YPqùtpdg*Ë")R\(쪧%D²â©O­µÑNò¦îlyèúá"ô!<"@?½¬ü8k¥ó ' ÿÌðí}Uÿz©Y4+…87‹SÈÅð´pï¾íªnnîd—ZkwæERVgÏ=)pÞ=¿Ëû Í·ßzˆ*ŠÂšd ¥¹•€yù3»(³/üþøG3¼endstream endobj 184 0 obj << /Filter /FlateDecode /Length 1656 >> stream xœ…WKsÛ6¾ëÞ[<.;C< öf'}8“4«™L'éa‰5E*$•ÄùõÝ@Šñ¸ÓñÁ°Xìî÷íCŸ’Ú | jDɸ†µ¤Ÿ,’ÍëÕæÇPìè(„’ž X‘‡qåÿã†`«œÑŠðí]„À+@vy̽jÔ0ŒÎVþ)DEk8Úí| ·r^‚Ûv¢¨ô>)8µŽüÀ3i|öÞi…r-ÂcTNׯdzüÁ"ïazH–³n¢“yn«Ï©Rs ¶õqÍ™†Ñî\w¢ á©3vÓ·¡é|F1® ¿]¢EÕtâ± D›øN2¥†C¸™óbéÀÁµ3VãYáÝ©§`f°ïˆ¯X þ%ÅÇòë‚-ÇÆÖ-&M<å`o#ph^7#·0!”ŒàBï÷µ×Ë rdçh7Æ ^8Ì=Úm7Æ7070€ñ»À”â-0žÃ}8£•§j,áàÆ=‘Ê@5;SÄT 2h|ïžJÿ) C×òòÒ“w×ÛªžbЬC`rŽéqp9«¸§£m+µÐÿÀøs56PÕQ28–J+ìkâ(9.šAuè|µwé£~3»çYT„€Æ—·M7Ì: Ï» Dx&Í´m-y<ã=Ö[Œ!Ö[B ãqëŽòB 9šuE%˜—D,èNýô­"2þ¦Æᨠù–x²‹ä¨6“!Ó~å];Œ=ÖŒ¨¨Œ9$8œL ó#ÝÝâÁýâ…cˆà}ªXÌ-»KsC`PŒÁŹZQ±ÕÊÇŸÄF óßaØì\Ø÷ä"›ÓÂöçë½Û!8Fk’Ñßg’ë·õà(”\xÞû¶P²ùR»¬[Í#>L2´‹y‡@¥‹šL.>¿j1pYdFháp“nþy<"àè™ ƒÄX6%IY½¨÷Ñaà[”,ÈÚ§d“°µm\`>ÚÊÇm £4’m®gÄÜ9y©l"1üÉ\,cù«û邆ֹj ý¡'erˆê%R`ùÆ!r!Ñ î–‹ž¾¥ØÀ ì«BZïvÚ.O†éÁe9Ë +ÙôÞU7ŽÝ‰¦­ýäô:›^`-®‡yCÁ/È¡J?_B‚ÁïS< FT¶¹m™èþjqé#ŽSÌ|Lé4§ðÁÛãXêovôd »ëcØ•±ÛGUíw.ôuïž)È­m\lIXN”ud‘ÄŒ¯Ûé½j07ÚºÝ]¤Op’å9É'Æýy}¹äÙ²ýÆØß¤Š’ùá>×îËSÁGË3Eä÷Eä·(":Ë(MÑù9€NJr2Zt1¹ˆ£*?àžÅ¶Qæ™Q£‚w¹ Ò¿N@?>O½ºÃ±ª›zÇšl÷™qME Ð¾‘¿ÊüðBW®ÝÕ]Ü <øk–Œä /°ö~^—ºiœ¿'ý½ËÅ=Ïüxª:(ä.דß/]ª¨ÌSÁQMš@‹×~7÷¥Ô‚ÃV€÷)D4š`ŒXÄÒûÜs½ a9Jjê¾4="0PùÌœõ™|‹0ÿæZzrp­‡'Lä(ýz¢ ²G„¡Íÿ¶Á˜Ê%$8ŽÓåÞ.îú 1ŸDBûör°ËâÝÔ÷KŽþÚ¾· èÃ^zPÊ–È…g—mXAgíA_}H§‰òt8[E¯N£ vƒ8qcá0…NðWQfJŸ˜8eSÜ~ÞàOÃw«2e(endstream endobj 185 0 obj << /Filter /FlateDecode /Length 244 >> stream xœ]‘1nÃ0 EwŸB7°ä² \’%C‹¢í™ žOç´l¦þ(køâÍÄ%Í…ïë£6¾.©r™—°=Mk¸M¹ªoSþþÉlpãîïÓëO×úÉíMaùž§ÀeJW®FkiŒ‘*Nó¿#×î—ø¼Ú0)Ö¢B#)PôüH°êH:цh#z zmI¶¢ž¨íHv¢=)Ð^t :@=2½æzÉõÈôšë%·Cf§¹¨2ók8_ùÚ› R8mºmݦ,qIü÷ yÍÒe@õ ¹|Mendstream endobj 186 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2000 >> stream xœ]• PSgÇoxÜûI]®wqoØJÇÚ-ŠëJÕZ-໵#¥®µ¤Š˜* DyHHBN  ÈËb@ R1 ÜrMÞRTŸ Ùh÷ºä½ûÚn*yƒÓÒ¨TBŸ*+ÒJÉBŠ-E5e¨œn…æŽz¨„ÊÒ SÇ©~c »ݳß=Ó ¯#>f¸‚>ÿZ=£yûêøò½Ú¨‚n²V×JbjÕ…š‚“PÈCž#߆ءrʪ e§õ –-zæµêþ¼ô w`^€˜&4–±OpŒ°šk­tYZMŽf®f3ë!¢xkòáÜ9$ •xñ©¯Æï´N5ó¶S¡½ 1eÜÜhf8MRö ½ž\Ú‡°’Á/„ FðYôÎ0CTê™ô!i_E³ º YW_àDøm²•[ÅìØŸøVâ¶Ñï~é¸^Å×Zê Ö:/°p˜*í–¸±Iø+¦…i\—±³½s±œŒ©†*K5 š éÒ§ÿb.Z.OtºªƒŠ}~ÀR¼dõcòòûñùGåR|‘H!ì©3ÉËä'Ò²3’ó€ØG‰›»/Iñ)ÆZ×ÒXïp5vÕt´#,Fªd@%fݼ˜Í3”jä3Ž·cÞ‡ 7¸özëz}{iÇæðõ›vî”Êde2™F\\lÔCÊ¢™N¬ô•Ñ…9^JS(‘ÕôèÀÈ'cý·ÆNÂ,Â/‘åž?ðZz£øÉP(¥E1:ÐÙ5àÎmK­âÿ« øa¦?pG߉î~yñÄ>Àë„iîÜÙþÚ~h—¾Qí̳æXTHÇ@œjÓ[Ú4ÝNHCFƪ­1;õÑìý+Í#Aq¦U+ŽÖµ”Z˜ÞŠ–èAÃ)mG?~ˆ ,M¹‹½p¸‡_OßÓΫÊê+b§o»qéû¥XB‚;Öl$ÞIá‰]™}çÛ»ÝîœÖÔêçb-,ñÔø6á¢E¸ºðŸ'â#Ã÷<¯`É#Ï+B4í4:Ô¢÷(QkóÉ×OWô” Ñ*NtøŠ/¹Â<Ï`ÿâÆqòù –á½8 / bgðü3Óð…åNç§mž±Ž 1‹6]ËÉ4cN–hž}t¬5½ÏÝÖ:xo³;&j]ê®÷øøí§ÖÁ:XïX=¶ý»7r¾ø'öÃômü¢´);gh׺’ïlèÂþ“YqÈB¢¸næú™Ç?ÌÔDoTB–YÉ‹´üÜ“YÇó‹ÍžÙ¤q©:­}ÆžçØ8TB»@r‰YìÃâŒøyn—J쾺¢×öY7 JºÍ&ÊÉÛJ§')RÓ“bWþ…,ˆ2œI#nWG?È€r1ðóñȺ¦Ó õ bW„;j«+¡ ìf;T"rŒf‡d–«Ì¦‚4‹ñ,ú_|<÷öÝÁ}ßÏc³Ë™™B£V§ÕïO?rv"Âbÿ1}ýFýÈe¾§»ñ2ŒÀÌ;wb]ÈÊL4y&a =~ÕChž^þÒ»dÁ;î]SJ)ûà0ÙÁÕ00Ñ9|±Óf­°•Cœ.ªÑ·ç·åµç»UWOÀµAµc[´ëG-FäØi³ ¬¶¥7ŸT´ONibOóÌWbn²<ÓTf4A,›Éªª V:³ë”NñUô´±û7G8nÄgæQÃ" " „<ÉYŽÁkzâO|3CŠLÅæ"È…\›ªZo+-×ÛÚkZàcäV¶9¢>z|í̱ëe| }nŸýâÚÙŸáðP‡}ß¼•4w:^Äb–,"~;ÈJiSA?‰Á>0½ŽŽÊ*{µÍÌøUîcØ“±q  mÙã_Kg¸j¿u·ÕÒß4GGIÅm±åCqˆDJfp¤7^‰#¹$Ó 9¤ ”sIÞ®¾óîŒó F#˜MÏ;ˆhTÿÎS·¨Ù1!®„+)3éºÂô`HÆ GÆÈ2n,ÑB*©ÔÕñ åUö±g+f±S¸ò*^l³—WYªÛâ(©.áÙ1s™Ùú<>÷œßüýC˜ÅÌÍ/¯3sá\B©¬X‘¶ém-ñ»c­…ø}}!ášæs…‰š Ýß]«ÂþðàBEÞì׋M£±©ÅõRZj6„þñéËœbØaÊ:e™²”M~ÜÜp¶º0Vâg[oQB6(Ñ.™Õ›7À^8ä)ý²´×Úgêýmèz„_Õ¬Å/xßî‹S>µ 5 §N"• /zH˜ð7¦ÞT—“WR¬+âwË·éb-âý¼k¶,\@QÿbYõãendstream endobj 187 0 obj << /BBox [ 1337.68 3553.74 4774.86 6672 ] /Filter /FlateDecode /FormType 1 /Group 86 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R41 40 0 R >> /Font << /R96 87 0 R /R97 88 0 R >> >> /Subtype /Form /Type /XObject /Length 3416 >> stream xœ¥šK¯¥E†çûWì™Ý>ê²V]¦&Ä„àèÄqDBDDþ}ßK}}J„Ž1Æ>Ï®]—uyתÚ~üEÔç7?=~|Ôåêíeõkų­¶®^Ÿ­Žöüç×Ï?=ÿþÈ«hùü×£>?Å¿{”ç?>˵'‡ü§^¥­¾ôﯾÔµ>}æÞyþÙ%¯&Ñž©íª‡ô4Y×<¤-‘†ÏnREz½Ö6)Ód]±HâÚ]$ðÙ!s‹d½Æ!Ãkåâ""#®5Mšç™õ ‘~mk.®*2¼ú þK¤yÌ®WÕÛµ<Ïž×2h •W×J£T˜S¤xË óJ’µ¯UEZ\•ó¬u “ŽÑ"Óæ™W¹»H`ó"݆o%Ë••kzõœOç!£_<ÔŒ+—ÀDœè«ãâîFÒh˜? -M€£IØ5MÆ5Hb]I›·Z`5xÕ§mfõ:¸ïÜ ghý*ÜmÝô»È¦A@ê1LíƒÉùÏJÑhÆ\Œ?|ÃtU¯ Ï,>Ú%P®ðçc_6Xø Èluæ5±µ•XEF¨«]cÞ œµ9; OŒ$ ddzW%Á‰ê2A&p· } »c^æ*• EÒ*wš“a6L—ȉôâ­c$بAèë9Ǹê!‹Ñž“!à‰é/’XΰÖò€¤ïmà3X~É: ¡1AH ,¤bÂ)>xƒ©' ë1H5¤~žAcz)LµŒÖn2éÂXÀ‹÷Š< A¦«sQ’ûO$ÿ„ºv×{§6ä€A¦–î¾€GŒR‘I¿æ@Àu/ ¿’2.OŒàF’g2= ¹HÖ ÃH&vN‚™>ò¬`¬¿„Áƒ ç'¡z`ÇãD[ß›;ÍDÜtMØE'&n ((U6ι@% Ô¶y ÎG• ª£@oÙ@”ôi²%²Û8ÃmÈ€=ÙhëÔÿlil0¢<ñÐ.2 Jމ˜M­øÏ% ä¸( R”îŽrQd«@$P`i¢í…ð¶3Ù4­GòÞÿ‰C$ÖYRØNa‘ùêI…íØžÏ(%TØŽ±Î ,+éfÌ©!B Û\ÈHPJ¨°¨w×öÌH*lÃq}ŠÒÍlˆ'4Ò@ÚÍt»É¤Ä"‚n@‡`ŸÅ+A@±™ÆÃ5ƒI‰E=9¢6PG •¦­ è¶2·É”ÂRΜßúÉ *ô˜.Kªæ< Q{ÖJÅI,¬®F£M¾‘à·Ç@< ¥×«#¶ƒ2Õ=3 I!ÁÁÃçbÕ'AI³ L6E8?©Z‹Õa‘ ×!‹ ”wϳ ž›™!_/„’uœäaXIX}LPŽ!f1 "éoáëI}“I/ÇÀAÏ·PI’ª¦Žª-T’Jº±L&50FíÇà ±‹6)(²ŽûÅq")¤a2¹F$]*€‚§Ú¨$ƒKÙ:›YMRë¨]ÕõÅÑJBo üçâ·aˆlPðRkí®r”>ëåF1ÈF µv˜ †g´j{Lʷѱu«ÍÆŒ›…å{£B`Ñ¡1–ï MæÌ½Ÿfd³Š« º¡•ˆë F9.Õz’¬»éØ[ÍÒ A¡<¡ˆêj/('C$í M¥‰Ë_jj÷‚â'3÷‚¡ŽËý:DòRÜYuæ7b$à X”‘*2è6‘E¡鎞^†¶RÜ]uéÉ8Á ²(´ ýòRØ{x¿8zlMœ'¼;«óÜ„¦©…Å1#X+Qå’ Ÿ…*ò©²l]íTÆÁï 3¼»Úå4æÂ2h Å韕6Ðù¸ëè®.ÜÂlz¤ê w/½¢œ4®ŽLí›,’º\–@d¸ #½1ÝÃ?QMèÈc=€ 5Àdã€ËÃÅuKœý7uŸÁöTÊ0XCƒHÔÑbJm_¶ÓdHmItÿé´+Õ–ÄîG™—Úâ>åN°Ó_yHxfv#‡ό̙"Ó÷”΀¢Ú’¨pu¨™Ô¶îÓHl©­q¯…ÏŠÈm'àM˜Om¼Ð,¾ Ü11=|'q½˜]3äåñ]ƈ³¯‰zREú±íD[>µŠ/jŸ^øv=³ ]\tÑèÓ¯:ÁKöœºQ Í·ëêDÁŸtÕÄ`ír6»ù$$`°U¥Ð¶<š—ïåÛz1ØEn¿.ä^;Äò¼š…DœËzMÉ£ R9`ó#€8â·R½¡ÈÙàP©Öë§{ OÄ^[Ñ}Hõ!PQR¼ ýZz1€Ë¦@=Å.zñŽ·£_ÄÀ9 p· …2KâÒ°!^ã¢SñÆB™-ì =†wÞC\b¶%8Ø.mÏœz‘)6ôÐÑõd:ø¸HÏ‚­SdIª×†c·È8Vçsj¢3DAÊÔCt)ù.3}T½ ‰tiz)Ó´pñˆ‚rR‘@EA¡Ê’¨˜L©lA9]‡tJæ§=‰ ™ziŸ4‘êhRÀ®CôУDC|È…ƒ¬rô,Ñw(Òõ€æ!UE»Ú/”§!ù&Qx¡ðéqH¤yL×ãÈ:§ª¨&õŸª¢'ž‡øTl(³´€ÄZíÄ:dx?°Aâ!Sý-À¸Oµô:$bg©‘:cº·ÌæOcÎ¥ ­§ˆDì,6´å;K=0ÓzÄ9Vk)¡Лð*¡'ñ–„Ö‡nlnôä­Í·¬–Yîmä²Ì‚/1ôÓÈÑô6*Š"޽ÆÄi&Õû€òHh‘lÓá‡è1dZi}Ué%¬´lr´ŸÎ_ I¾ éo52Ñù[œ§ËfðÙK:Ëkl5™ÖY:Ø AC¥³¹|;ÇE´Xgs¹Ý™ÖYþ~âµa mZ'¢ÏbÍs³Ñˈc±/ݬ@ÆqZg© “᥶ €ˆ~ Âmº[gU낉RY>ºx€ßnEì:ƒU–‚×L¶e6Ï='¢ûˆÄ®Ã WQvšÕd[f!v]`Æuˆ]~ù©^køˆ÷Ýåµ _ãðZ«YgAš×ZËB›õx/ø«Å!ºŽD?‘Ø{‰z"¡Íó`|®‡hšlÕ:›·’dóûP‰[I²û}ˆD?¦ Û®ÖYî¥bYg_ˆ ²Ü E?qLñêcZgAlž„*ÇÏ *J™?ûÖòÑ+²ý@ôž|ùüüñ9ÍdHCdÔc7ˆg½êöޏûÿÐìüIöý{;ÝÄ»øÀI^Š›Œý«“ܯÍ/“¼ß<É}‘™ä…üöINgôj’÷ä7OòŸf|M^ORžßÐÁÅs”óõçïß=>þý*Àà{Ç»¿>°q¡‘Oþ"‡ü º ôÌï¾üùMyûš~Èô´ëøwM¾Á¿iç>F¿é¯?Aƒòö/ï>}|òîñk>ž»:5–zõ’<áúmò “°iïɇL2ø{Þë9 >`Šl­½š@~È×ÏÉ_Íðs[ü/¿Ö…â!¿rú\¨FUŽå³ºîØ, aÇ&¦÷ð1è[þ•ºˆÒ¹t¨ž«+~¨G_E(q(óUêßd°)Ô êþÖ ùöŸu>¹OT©_ˆa5(#P-HWåYß|öÃO?½}÷Ý…?ßO;ý³Ç›¯ÿñÃWß¾ýݯšÓþèVøÿàäüòÎ-ã|þø7Àõ±endstream endobj 188 0 obj << /Filter /FlateDecode /Length 769 >> stream xœ½TMoÓ@½çÎÝ7ƨÞî÷7 ©´… !µ¶‰›b§µ]¤ö×3»¶WŠ*N(‡Ìξ™y;óÆ÷ %,¡á7ü/ªMV³û‹Þdø[TÉû|vü™Ä§¹NòÛYÁ&1‚%ZÂ…JòjvëmšqK(µ“qK4 ½EÛ••ïÊm=øœ‚2ÚÃ4GéÏü‹bÊ}Qc°n¾Ä*¶ÕÝC“øMš αŒ‚ypaúE;¤xΛj¢TŸŽÒüW7A(¬¡ÐÓC¸0Ï’0"•b#‘#,N±8Õ ¥x%•xv—RsÜRÊ,ÍÒŒŠé×]Æ™A>&É0§ 9cFÊF$“šEÔôÌH|ZY‡ÁhqÕÇ~.Úß©’ÄIfÁ§ YpnCG3a5¶HÂAÛ(ì:ƒ9‰ˆwpþ€# NK(WESŒwÖÀITñp1NØhXK¿ÙÝq8%c)ê`^›Ç”|œfàkä¶Gù˜ZŽD9 á(8Ÿóõ²?„N橈 Ý«]=_ƒß9,¶‹Dð`SÆz@ ìû¶oÛ´ç×À)Ó×išIËâC¾ÔƒL6¾YÙïi”³t°o5t/ë²^O»€Ûm3P’Ë¢¸›¤*|ÀoÓÌÈàr𩨋Æoʧa%ú,V~ŒÃ Íôw¬Ïfù›+h×¾A”ØI+¡B"•?¸>[#¤eû.彚ӡü9´7\ì·{iqpàV8;@üM{Ì4u„J+-‘ÑÄQ-8 ˆŒáH¹£ mOt^•Ý: \¸Ø•y˜­0±sgQlÙ3ŸËzY4ªl‡6Óég§»H1h+œE¥ ‹—;”…ï;*Ä…døLlë×ÝÀ1/|”ºU ”bЭ‹}’qú/(±ñ]O‡G:e½h ߯rLž¤a§ÇPfýÐö+lpû˧â ’dú?êÀ°@IZ÷:0ŽhÎñ«(-~>‹W̆»ùìd“ˆYendstream endobj 189 0 obj << /Type /XRef /Length 270 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 190 /ID [] >> stream xœcb&F~0ù‰ $À8JR™üÏÀ{ÿ8ÍöÎosGÙú!,mqÈØ [ÆPÉ©f{ƒHŽß ’i!ˆäc‹ó‚HÖI`ñ,)S f¯‘Ê—Àj<Áæl³-@$;X„QD²ý³@$ 7˜­f÷€Ù¯Á¦í‹Ô Dç‚ÅWHÑh$•å`6ˆd¾"‘tq‚É_`•s€$#ÿ °Êå Rå Xö ˆÚ"ùåÁ"/ÀêƒÅý@$O’é ’7>`—pµ€ýØ" ¤À"`ÓØÀáɽb‹ÀæÜ³A®eäY ¥L°{ÕƒI&[°ÉÚà0χƒäe/¾ endstream endobj startxref 144139 %%EOF maxLik/inst/doc/intro-to-maximum-likelihood.Rnw0000644000175100001440000010672614077525067021327 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{graphics} \usepackage{amsmath} \usepackage{amssymb} \usepackage[font={small,sl}]{caption} \usepackage[inline]{enumitem} \usepackage{indentfirst} \usepackage[utf8]{inputenc} \usepackage{natbib} \usepackage{siunitx} \usepackage{xspace} % \SweaveUTF8 \newcommand{\COii}{\ensuremath{\mathit{CO}_{2}}\xspace} \newcommand*{\mat}[1]{\mathsf{#1}} \newcommand{\likelihood}{\mathcal{L}}% likelihood \newcommand{\loglik}{\ell}% log likelihood \newcommand{\maxlik}{\texttt{maxLik}\xspace} \newcommand{\me}{\mathrm{e}} % Konstant e=2,71828 \newcommand{\R}{\texttt{R}\xspace} \newcommand*{\transpose}{^{\mkern-1.5mu\mathsf{T}}} \renewcommand*{\vec}[1]{\boldsymbol{#1}} % \VignetteIndexEntry{Introduction: what is maximum likelihood} \begin{document} <>= options(keep.source = TRUE, width = 60, try.outFile=stdout() # make try to produce error messages ) set.seed(34) @ \title{Getting started with maximum likelihood and \texttt{maxLik}} \author{Ott Toomet} \maketitle \section{Introduction} This vignette is intended for readers who are unfamiliar with the concept of likelihood, and for those who want a quick intuitive brush-up. The potential target group includes advanced undergraduate students in technical fields, such as statistics or economics, graduate students in social sciences and engineering who are devising their own estimators, and researchers and practitioners who have little previous experience with ML. However, one should have basic knowledge of \R language. If you are familiar enough with the concept of likelihood and maximum likelihood, consult instead the other vignette ``Maximum Likelihood Estimation with \maxlik''. Maximum Likelihood (ML) in its core is maximizing the \emph{likelihood} over the parameters of interest. We start with an example of a random experiment that produces discrete values to explain what is likelihood and how it is related to probability. The following sections cover continuous values, multiple parameters in vector form, and we conclude with a linear regression example. The final section discusses the basics of non-linear optimization. The examples are supplemented with very simple code and assume little background besides basic statistics and basic \R knowledge. \section{Discrete Random Values} \label{sec:discrete-random-variables} We start with a discrete case. ``Discrete'' refers to random experiments or phenomena with only limited number of possible outcomes, and hence we can compute and tabulate every single outcome separately. Imagine you are flipping a fair coin. What are the possible outcomes and what are the related probabilities? Obviously, in case of a coin there are only two outcomes, heads $H$ and tails $T$. If the coin is fair, both of these will have probability of exactly 0.5. Such random experiment is called \emph{Bernoulli process}. More specifically, this is \emph{Bernoulli(0.5)} process as for the fair coin the probability of ``success'' is 0.5 (below we consider success to be heads, but you can choose tails as well). If the coin is not fair, we denote the corresponding process Bernoulli($p$), where $p$ is the probability of heads. Now let us toss the coin two times. What is the probability that we end up with one heads and one tails? As the coin flips are independent,\footnote{Events are independent when outcome of one event does not carry information about the outcome of the other event. Here the result of the second toss is not related to the outcome of the first toss.} we can just multiply the probabilities: $0.5$ for a single heads and $0.5$ for a single tails equals $0.25$ when multiplied. However, this is not the whole story--there are two ways to get one heads and one tails, either $H$ first and $T$ thereafter or $T$ first and $H$ thereafter. Both of these events are equally likely, so the final answer will be 0.5. But now imagine we do not know if the coin is fair. Maybe we are not tossing a coin but an object of a complex shape. We can still label one side as ``heads'' and the other as ``tails''. But how can we tell what is the probability of heads? Let's start by denoting this probability with $p$. Hence the probability of tails will be $1-p$, and the probability to receive one heads, one tails when we toss the object two times will be $2 p (1-p)$: $p$ for one heads, $1-p$ for one tails, and ``2'' takes into account the fact that we can get this outcome in two different orders. This probability is essentially likelihood. We denote likelihood with $\likelihood(p)$, stressing that it depends on the unknown probability $p$. So in this example we have \begin{equation} \label{eq:2-coin-likelihood} \likelihood(p) = 2 \, p \, (1-p). \end{equation} $p$ is the \emph{model parameter}, the unknown number we want to compute with the help of likelihood. Let's repeat here what did we do above: \begin{enumerate} \item We observe data. In this example data contains the counts: one heads, one tails. \item We model the coin toss experiment, the data generating process, as Bernoulli($p$) random variable. $p$, the probability of heads, is the model parameter we want to calculate. Bernoulli process has only a single parameter, but more complex processes may contain many more. \item Thereafter we compute the probability to observe the data based on the model. Here it is equation~\eqref{eq:2-coin-likelihood}. This is why we need a probability model. As the model contains unknown parameters, the probability will also contain parameters. \item And finally we just call this probability \emph{likelihood} $\likelihood(p)$. We write it as a function of the parameter to stress that the parameter is what we are interested in. Likelihood also depends on data (the probability will look different for e.g. two heads instead of a head and a tail) but we typically do not reflect this in notation. \end{enumerate} The next task is to use this likelihood function to \emph{estimate} the parameter, to use data to find the best possible parameter value. \emph{Maximum likelihood} (ML) method finds such parameter value that maximizes the likelihood function. It can be shown that such parameter value has a number of desirable properties, in particular it will become increasingly similar to the ``true value'' on an increasingly large dataset (given that our probability model is correct).\footnote{This property is formally referred to as \emph{consistency}. ML is a consistent estimator.} These desirable properties, and relative simplicity of the method, have made ML one of the most widely used statistical estimators. Let us generalize the example we did above for an arbitrary number of coin flips. Assume the coin is of unknown ``fairness'' where we just denote the probability to receive heads with $p$. Further, assume that out of $N$ trials, $N_{H}$ trials were heads and $N_{T}$ trials were tails. The probability of this occuring is \begin{equation} \label{eq:general-cointoss-probability} \binom{N}{N_{H}} \, p^{N_{H}} \, (1 - p)^{N_{T}} \end{equation} $p^{N_{H}}$ is the probability to get $N_{H}$ heads, $(1 - p)^{N_{T}}$ is the probability to get $N_{T}$ tails, and the binomial coefficient $\displaystyle\binom{N}{N_{H}} = \displaystyle\frac{N!}{N_{H}! (N - N_{H})!}$ takes into account that there are many ways how heads and tail can turn up while still resulting $N_{H}$ heads and $N_{T}$ tails. In the previous example $N=2$, $N_{H} = 1$ and there were just two possible combinations as $\displaystyle\binom{2}{1} = 2$. The probability depends on both the parameter $p$ and data--the corresponding counts $N_{H}$ and $N_{T}$. Equation~\eqref{eq:general-cointoss-probability} is essentially likelihood--probability to observe data. We are interested how does it depend on $p$ and stress this by writing $p$ in the first position followed by semicolon and data as we care less about the dependency on data: \begin{equation} \label{eq:general-cointoss-likelihood} \likelihood(p; N_{H}, N_{T}) = \binom{N}{N_{H}} \, p^{N_{H}} \, (1 - p)^{N_{T}} \end{equation} Technically, it is easier to work with log-likelihood instead of likelihood (as log is monotonic function, maximum of likelihood and maximum of log-likelihood occur at the same parameter value). We denote log-likelihood by $\loglik$ and write \begin{equation} \label{eq:general-cointoss-loglik} \loglik(p; N_{H}, N_{T}) = \log\likelihood(p; N_{H}, N_{T}) = \log \binom{N}{N_{H}} + N_{H} \log p + N_{T} \log (1 - p). \end{equation} ML estimator of $p$ is the value that maximizes this expression. Fortunately, in this case the binomial coefficient $\displaystyle\binom{N}{N_{H}}$ depends only on data but not on the $p$. Intuitively, $p$ determines the probability of various combinations of heads and tails, but \emph{what kind of combinations are possible} does not depend on $p$. Hence we can ignore the first term on the right hand side of~\eqref{eq:general-cointoss-loglik} when maximizing the log-likelihood. Such approach is very common in practice, many terms that are invariant with respect to parameters are often ignored. Hence, with we can re-define the log-likelihood as \begin{equation} \label{eq:general-cointoss-partial-loglik} \loglik(p; N_{H}, N_{T}) = N_{H} \log p + N_{T} \log (1 - p). \end{equation} It is easy to check that the solution, the value of $p$ that maximizes log-likelihood~\eqref{eq:general-cointoss-partial-loglik} is\footnote{Just differentiate $\loglik(p)$ with respect to $p$, set the result to zero, and isolate $p$.} \begin{equation} \label{eq:general-cointoss-solution} p^{*} = \frac{N_{H}}{N_{H} + N_{T}} = \frac{N_{H}}{N}. \end{equation} This should be surprise to no-one: the intuitive ``fairness'' of the coin is just the average percentage of heads we get. Now it is time to try this out on computer with \texttt{maxLik}. Let's assume we toss a coin and receive $H_{H} = 3$ heads and $H_{T} = 7$ tails: <<>>= NH <- 3 NT <- 7 @ Next, we have to define the log-likelihood function. It has to be a function of the parameter, and the parameter must be its first argument. We can access data in different ways, for instance through the \R workspace environment. So we can write the log-likelihood as <<>>= loglik <- function(p) { NH*log(p) + NT*log(1-p) } @ And finally, we can use \texttt{maxLik} function to compute the likelihood. In its simplest form, \texttt{maxLik} requires two arguments: the log-likelihood function, and the start value for the iterative algorithm (see Section~\ref{sec:non-linear-optimization}, and the documentation and vignette \textsl{Maximum Likelihood Estimation with \maxlik} for more detailed explanations). The start value must be a valid parameter value (the loglik function must not give errors when called with the start value). We can choose $p_{0} = 0.5$ as the initial value, and let the algorithm find the best possible $p$ from there: <<>>= library(maxLik) m <- maxLik(loglik, start=0.5) summary(m) @ As expected, the best bet for $p$ is 0.3. Our intuitive approach--the percentage of heads in the experiment--turns also out to be the ML estimate. Next, we look at an example with continuous outcomes. \section{Continuous case: probability density and likelihood} \label{sec:continuous-outcomes} In the example above we looked at a discrete random process, a case where there were only a small number of distinct possibilities (heads and tails). Discrete cases are easy to understand because we can actually compute the respective probabilities, such as the probability to receive one heads and one tails in our experiment. Now we consider continuous random variables where the outcome can be any number in a certain interval. Unfortunately, in continuous case we cannot compute probability of any particular outcome. Or more precisely--we can do it, but the answer is always 0. This may sound a little counter-intuitive but perhaps the following example helps. If you ask the computer to generate a single random number between 0 and 1, you may receive \Sexpr{x <- runif(1); x}. What is the probability to get the same number again? You can try, you will get close but you won't get exactly the same number.\footnote{As computers operate with finite precision, the actual chances to repeat any particular random number are positive, although small. The exact answer depends on the numeric precision and the quality of random number generator. } But despite the probability to receive this number is zero, we somehow still produced it in the first place. Clearly, zero probability does not mean the number was impossible. However, if we want to receive a negative number from the same random number generator, it will be impossible (because we chose a generator that only produces numbers between 0 and 1). So probability 0-events may be possible and they may also be impossible. And to make matter worse, they may also be more likely and less likely. For instance, in case of standard normal random numbers (these numbers are distributed according to ``bell curve'') the values near $0$ are much more likely than values around $-2$, despite of the probability to receive any particular number still being 0 (see Figure~\ref{fig:standard-normal-intervals}). The solution is to look not at the individual numbers but narrow interval near these numbers. Consider the number of interest $x_{1}$, and compute the probability that the random outcome $X$ falls into the narrow interval of width $\delta$, $[x_{1} - \delta/2,\, x_{1} + \delta/2]$, around this number (Figure~\ref{fig:standard-normal-intervals}). Obviously, the smaller the width $\delta$, the less likely it is that $X$ falls into this narrow interval. But it turns out that when we divide the probability by the width, we get a stable value at the limit which we denote by $f(x_{1})$: \begin{equation} \label{eq:probability-density} f(x_{1}) = \lim_{\delta\to0} \frac{\Pr(X \in [x_{1} - \delta/2,\, x_{1} + \delta/2])}{\delta}. \end{equation} In the example on the Figure the values around $x_{1}$ are less likely than around $x_{2}$ and hence $f(x_{1}) < f(x_{2})$. The result, $f(x)$, is called \emph{probability density function}, often abbreviated as \emph{pdf}. In case of continuous random variables, we have to work with pdf-s instead of probabilities. \begin{figure}[ht] \centering \includegraphics{probability-density.pdf} \caption{Standard normal probability density (thick black curve). While $\Pr(X = x_{1}) = 0$, i.e. the probability to receive a random number exactly equal to $x_{1}$ is 0, the probability to receive a random number in the narrow interval of width $\delta$ around $x_{1}$ is positive. In this example, the probability to get a random number in the interval around $x_{2}$ is four times larger than for the interval around $x_{1}$. } \label{fig:standard-normal-intervals} \end{figure} Consider the following somewhat trivial example: we have sampled two independent datapoints $x_{1}$ and $x_{2}$ from normal distribution with variance 1 and mean (expected value) equal to $\mu$. Say, $x_{1} = \Sexpr{x1 <- rnorm(1); round(x1, 3)}$ and $x_{2} = \Sexpr{x1 <- rnorm(1); round(x1, 3)}$. Assume we do not know $\mu$ and use ML to estimate it. We can proceed in a similar steps as what we did for the discrete case: \begin{enumerate*}[label=\roman*)] \item observe data, in this case $x_{1}$ and $x_{2}$; \item set up the probability model; \item use the model to compute probability to observe the data; \item write the probability as $\loglik(\mu)$, log-likelihood function of the parameter $\mu$; \item and finally, find $\mu^{*}$, the $\mu$ value that maximizes the corresponding log-likelihood. \end{enumerate*} This will be our best estimate for the true mean. As we already have our data points $x_{1}$ and $x_{2}$, our next step is the probability model. The probability density function (pdf) for normal distribution with mean $\mu$ and variance 1 is \begin{equation} \label{eq:standard-normal-pdf} f(x; \mu) = \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x - \mu)^{2} } \end{equation} (This is the thick curve in Figure~\ref{fig:standard-normal-intervals}). We write it as $f(x; \mu)$ as pdf is usually written as a function of data. But as our primary interest is $\mu$, we also add this as an argument. Now we use this pdf and~\eqref{eq:probability-density} to find the probability that we observe a datapoint in the narrow interval around $x$. Here it is just $f(x; \mu)\cdot \delta$. As $x_{1}$ and $x_{2}$ are independent, we can simply multiply the corresponding probabilities to find the combined probability that both random numbers are near their corresponding values: \begin{multline} \label{eq:two-normal-probability-likelihood} \Pr{\Big(X_{1} \in [x_{1} - \delta/2, x_{1} + \delta/2] \quad\text{and}\quad X_{2} \in [x_{2} - \delta/2, x_{2} + \delta/2]\Big)} =\\[2ex]= \underbrace{ \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{1} - \mu)^{2} } \cdot\delta\ }_{ \text{First random value near $x_{1}$} } \times \underbrace{ \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{2} - \mu)^{2} } \cdot\delta }_{ \text{Second random value near $x_{2}$} } \equiv\\[2ex]\equiv \tilde\likelihood(\mu; x_{1}, x_{2}). \end{multline} The interval width $\delta$ must be small for the equation to hold precisely. We denote this probability with $\tilde\likelihood$ to stress that it is essentially the likelihood, just not written in the way it is usually done. As in the coin-toss example above, we write it as a function of the parameter $\mu$, and put data $x_{1}$ and $x_{2}$ after semicolon. Now we can estimate $\mu$ by finding such a value $\mu^{*}$ that maximizes the expression~\eqref{eq:two-normal-probability-likelihood}. But note that $\delta$ plays no role in maximizing the likelihood. It is just a multiplicative factor, and it cannot be negative because it is a width. So for our maximization problem we can just ignore it. This is what is normally done when working with continuous random variables. Hence we write the likelihood as \begin{equation} \label{eq:two-normal-likelihood} \likelihood(\mu; x_{1}, x_{2}) = \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{1} - \mu)^{2} } \times \frac{1}{\sqrt{2\pi}} \, \me^{ \displaystyle -\frac{1}{2} (x_{2} - \mu)^{2} }. \end{equation} We denote this by $\likelihood$ instead of $\tilde\likelihood$ to stress that this is how likelihood function for continuous random variables is usually written. Exactly as in the discrete case, it is better to use log-likelihood instead of likelihood to actually compute the maximum. From~\eqref{eq:two-normal-likelihood} we get log-likelihood as \begin{multline} \label{eq:two-standard-normal-loglik} \loglik(\mu; x_{1}, x_{2}) = -\log{\sqrt{2\pi}} -\frac{1}{2} (x_{1} - \mu)^{2} + (- \log{\sqrt{2\pi}}) -\frac{1}{2} (x_{2} - \mu)^{2} =\\[2ex]= - 2\log{\sqrt{2\pi}} - \frac{1}{2} \sum_{i=1}^{2} (x_{i} - \mu)^{2}. \end{multline} The first term, $- 2\log{\sqrt{2\pi}}$, is just an additive constant and plays no role in the actual maximization but it is typically still included when defining the likelihood function.\footnote{Additive or multiplicative constants do not play any role for optimization, but they are important when comparing different log-likelihood values. This is often needed for likelihood-based statistical tests. } One can easily check by differentiating the log-likelihood function that the maximum is achieved at $\mu^{*} = \frac{1}{2}(x_{1} + x_{2})$. It is not surprising, our intuitive understanding of mean value carries immediately over to the normal distribution context. Now it is time to demonstrate these results with \texttt{maxLik} package. First, create our ``data'', just two normally distributed random numbers: <<>>= x1 <- rnorm(1) # centered around 0 x2 <- rnorm(1) x1 x2 @ and define the log-likelihood function. We include all the terms as in the final version of~\eqref{eq:two-standard-normal-loglik}: <<>>= loglik <- function(mu) { -2*log(sqrt(2*pi)) - 0.5*((x1 - mu)^2 + (x2 - mu)^2) } @ We also need the parameter start value--we can pick $0$. And we use \texttt{maxLik} to find the best $\mu$: <<>>= m <- maxLik(loglik, start=0) summary(m) @ The answer is the same as sample mean: <<>>= (x1 + x2)/2 @ \section{Vector arguments} \label{sec:vector-arguments} The previous example is instructive but it does have very few practical applications. The problem is that we wrote the probability model as normal density with unknown mean $\mu$ but standard deviation $\sigma$ equal to one. However, in practice we hardly ever know that we are dealing with unit standard deviation. More likely both mean and standard deviation are unknown. So we have to incorporate the unknown $\sigma$ into the model. The more general normal pdf with standard deviation $\sigma$ is \begin{equation} \label{eq:normal-pdf} f(x; \mu, \sigma) = \frac{1}{\sqrt{2\pi}} \frac{1}{\sigma} \, \me^{ -\displaystyle\frac{1}{2} \frac{(x - \mu)^{2}}{\sigma^{2}} }. \end{equation} Similar reasoning as what we did above will give the log-likelihood \begin{equation} \label{eq:two-normal-loglik} \loglik(\mu, \sigma; x_{1}, x_{2}) = - 2\log{\sqrt{2\pi}} - 2\log \sigma - \frac{1}{2} \sum_{i=1}^{2} \frac{(x_{i} - \mu)^{2}}{\sigma^{2}}. \end{equation} We write the log-likelihood as function of both parameters, $\mu$ and $\sigma$; the semicolon that separates data $x_{1}$ and $x_{2}$ shows that though the log-likelihood depends on data too, we are not much interested in that dependency for now. This formula immediately extends to the case of $N$ datapoints as \begin{equation} \label{eq:normal-loglik} \loglik(\mu, \sigma) = - N\log{\sqrt{2\pi}} - N\log \sigma - \frac{1}{2} \sum_{i=1}^{N} \frac{(x_{i} - \mu)^{2}}{\sigma^{2}} \end{equation} where we have dropped the dependency on data in the notation. In this case we can actually do the optimization analytically, and derive the well-known intuitive results: the best estimator for mean $\mu$ is the sample average, and the best estimator for $\sigma^{2}$ is the sample variance. However, in general the expression cannot be solved analytically. We have to use numeric optimization to search for the best $\mu$ and $\sigma$ combination. The common multi-dimensional optimizers rely on linear algebra and expect all the parameters submitted as a single vector. So we can write the log-likelihood as \begin{equation} \label{eq:normal-loglik-vector} \loglik(\vec{\theta}) \quad\text{where}\quad \vec{\theta} = (\mu, \sigma). \end{equation} Here we denote both parameters $\mu$ and $\sigma$ as components of a single parameter vector $\vec{\theta}$. (Traditionally vectors are denoted by bold symbols.) We have also dropped dependency on data in notation, but remember that in practical applications log-likelihood always depends on data. This notation can be converted to computer code almost verbatim, just remember to extract the parameters $\mu$ and $\sigma$ from $\vec{\theta}$ in the log-likelihood function. Let us illustrate this using the \emph{CO2} dataset (in package \emph{datasets}). It describes \COii uptake (\si{\micro\mol\per\meter\squared\sec}, variable \emph{uptake}) by different grasses in various conditions. Let us start by plotting the histogram of uptake: <>= data(CO2) hist(CO2$uptake) @ Let us model the uptake as a normal random variable with expected value $\mu$ and standard deviation $\sigma$. We code~\eqref{eq:normal-loglik} while keeping both parameters in a single vector as in~\eqref{eq:normal-loglik-vector}: <<>>= loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- nrow(CO2) -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } @ The function is similar to the function \texttt{loglik} we used in Section~\ref{sec:continuous-outcomes}. There are just two main differences: \begin{itemize} \item both arguments, $\mu$ and $\sigma$ are passed as components of $\vec{\theta}$, and hence the function starts by unpacking the values. \item instead of using variables \texttt{x1} and \texttt{x2}, we now extract data directly from the data frame. \end{itemize} Besides these two differences, the formula now also includes $\sigma$ and sums over all observations, not just over two observations. As our parameter vector now contains two components, the start vector must also be of length two. Based on the figure we guess that a good starting value might be $\mu=30$ and $\sigma=10$: <<>>= m <- maxLik(loglik, start=c(mu=30, sigma=10)) summary(m) @ Indeed, our guess was close. \section{Final Example: Linear Regression} \label{sec:linear-regression} Now we have the main tools in place to extend the example above to a real statistical model. Let us build the previous example into linear regression. We describe \COii uptake (variable \emph{uptake}) by \COii concentration in air (variable \emph{conc}). We can write the corresponding regression model as \begin{equation} \label{eq:co2-regression} \mathit{uptake}_{i} = \beta_{0} + \beta_{1} \cdot \mathit{conc}_{i} + \epsilon_{i}. \end{equation} In order to turn this regression model into a ML problem, we need a probability model. Assume that the disturbance term $\epsilon$ is normally distributed with mean 0 and (unknown) variance $\sigma^{2}$ (this is a standard assumption in linear regression). Now we can follow~\eqref{eq:two-normal-loglik} and write log of pdf for a single observation as \begin{equation} \label{eq:co2-epsilon-loglik} \loglik(\sigma; \epsilon_{i}) = - \log{\sqrt{2\pi}} - \log \sigma - \frac{1}{2} \frac{\epsilon_{i}^{2}}{\sigma^{2}}. \end{equation} Here we have replaced $x_{i}$ by the random outcome $\epsilon_{i}$. As the expected value $\mu=0$ by assumption, we do not include $\mu$ in~\eqref{eq:co2-epsilon-loglik} and hence we drop it also from the argument list of $\loglik$. We do not know $\epsilon_{i}$ but we can express it using linear regression model~\eqref{eq:co2-regression}: \begin{equation} \label{eq:co2-epsilon} \epsilon_{i} = \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i}. \end{equation} This expression depends on two additional unknown parameters, $\beta_{0}$ and $\beta_{1}$. These are the linear regression coefficients we want to find. Now we plug this into~\eqref{eq:co2-epsilon-loglik}: \begin{multline} \label{eq:co2-single-loglik} \loglik(\beta_{0}, \beta_{1}, \sigma; \mathit{uptake}_{i}, \mathit{conc}_{i}) =\\= - \log{\sqrt{2\pi}} - \log \sigma - \frac{1}{2} \frac{( \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i} )^{2}}{\sigma^{2}}. \end{multline} We have designed log-likelihood formula for a single linear regression observation. It depends on three parameters, $\beta_{0}$, $\beta_{1}$ and $\sigma$. For $N$ observations we have \begin{multline} \label{eq:co2-loglik} \loglik(\beta_{0}, \beta_{1}, \sigma; \vec{\mathit{uptake}}, \vec{\mathit{conc}}) =\\= - N\log{\sqrt{2\pi}} - N\log \sigma - \frac{1}{2} \sum_{i=1}^{N} \frac{( \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i})^{2}}{\sigma^{2}} \end{multline} where vectors $\vec{\mathit{uptake}}$ and $\vec{\mathit{conc}}$ contain the data values for all the observations. This is a fully specified log-likelihood function that we can use for optimization. Let us repeat what we have done: \begin{itemize} \item We wrote log-likelihood as a function of parameters $\beta_{0}$, $\beta_{1}$ and $\sigma$. Note that in case of linear regression we typically do not call $\sigma$ a parameter. But it is still a parameter, although one we usually do not care much about (sometimes called ``nuisance parameter''). \item The likelihood function also depends on data, here the vectors $\vec{\mathit{uptake}}$ and $\vec{\mathit{conc}}$. \item The function definition itself is just sum of log-likelihood contributions of individual normal disturbance terms, but as we do not observe the disturbance terms, we express those through the regression equation in~\eqref{eq:co2-single-loglik}. \end{itemize} Finally, we combine the three parameters into a single vector $\vec{\theta}$, suppress dependency on data in the notation, and write \begin{equation} \label{eq:co2-loglik-simplified} \loglik(\vec{\theta}) = - N\log{\sqrt{2\pi}} - N\log \sigma - \frac{1}{2} \sum_{i=1}^{N} \frac{( \mathit{uptake}_{i} - \beta_{0} - \beta_{1} \cdot \mathit{conc}_{i})^{2}}{\sigma^{2}}. \end{equation} This is the definition we can easily code and estimate. We guess start values $\beta_{0} = 30$ (close to the mean), $\beta_{1} = 0$ (uptake does not depend on concentration) and $\sigma=10$ (close to sample standard deviation). We can convert~\eqref{eq:co2-loglik-simplified} into code almost verbatim, below we choose to compute the expected uptake $\mu$ as an auxiliary variable: <<>>= loglik <- function(theta) { beta0 <- theta[1] beta1 <- theta[2] sigma <- theta[3] N <- nrow(CO2) ## compute new mu based on beta1, beta2 mu <- beta0 + beta1*CO2$conc ## use this mu in a similar fashion as previously -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } m <- maxLik(loglik, start=c(beta0=30, beta1=0, sigma=10)) summary(m) @ These are the linear regression estimates: $\beta_{0} = \Sexpr{round(coef(m)["beta0"], 3)}$ and $\beta_{1} = \Sexpr{round(coef(m)["beta1"], 3)}$. Note that \maxlik output also provides standard errors, $z$-values and $p$-values, hence we see that the results are highly statistically significant. One can check that a linear regression model will give similar results: <<>>= summary(lm(uptake ~ conc, data=CO2)) @ Indeed, the results are close although not identical. \section{Non-linear optimization} \label{sec:non-linear-optimization} Finally, we discuss the magic inside \texttt{maxLik} that finds the optimal parameter values. Although not necessary in everyday work, this knowledge helps to understand the issues and potential solutions when doing non-linear optimization. So how does the optimization work? Consider the example in Section~\ref{sec:vector-arguments} where we computed the normal distribution parameters for \COii intake. There are two parameters, $\mu$ and $\sigma$, and \maxlik returns the combination that gives the largest possible log-likelihood value. We can visualize the task by plotting the log-likelihood value for different combinations of $\mu$, $\sigma$ (Figure~\ref{fig:mu-sigma-plot}). \begin{figure}[ht] \centering <>= loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- nrow(CO2) -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } m <- maxLik(loglik, start=c(mu=30, sigma=10)) params <- coef(m) np <- 33 # number of points mu <- seq(6, 36, length.out=np) sigma <- seq(5, 50, length.out=np) X <- as.matrix(expand.grid(mu=mu, sigma=sigma)) ll <- matrix(apply(X, 1, loglik), nrow=np) levels <- quantile(ll, c(0.05, 0.4, 0.6, 0.8, 0.9, 0.97)) # where to draw the contours colors <- colorRampPalette(c("Blue", "White"))(30) par(mar=c(0,0,0,0), mgp=2:0) ## Perspective plot if(require(plot3D)) { persp3D(mu, sigma, ll, xlab=expression(mu), ylab=expression(sigma), zlab=expression(log-likelihood), theta=40, phi=30, colkey=FALSE, col=colors, alpha=0.5, facets=TRUE, shade=1, lighting="ambient", lphi=60, ltheta=0, image=TRUE, bty="b2", contour=list(col="gray", side=c("z"), levels=levels) ) ## add the dot for maximum scatter3D(rep(coef(m)[1], 2), rep(coef(m)[2], 2), c(maxValue(m), min(ll)), col="red", pch=16, facets=FALSE, bty="n", add=TRUE) ## line from max on persp to max at bottom surface segments3D(coef(m)[1], coef(m)[2], maxValue(m), coef(m)[1], coef(m)[2], min(ll), col="red", lty=2, bty="n", add=TRUE) ## contours for the bottom image contour3D(mu, sigma, z=min(ll) + 0.1, colvar=ll, col="black", levels=levels, add=TRUE) } else { plot(1:2, type="n") text(1.5, 1.5, "This figure requires 'plot3D' package", cex=1.5) } @ \caption{Log-likelihood surface as a function of $\mu$ and $\sigma$. The optimum, denoted as the red dot, is at $\mu=\Sexpr{round(coef(m)[1], 3)}$ and $\sigma=\Sexpr{round(coef(m)[2], 3)}$. The corresponding countour plot is shown at the bottom of the figure box. } \label{fig:mu-sigma-plot} \end{figure} So how does the algorithm find the optimal parameter value $\vec{\theta}^*$, the red dot on the figure? All the common methods are iterative, i.e. they start with a given start value (that's why we need the start value), and repeatedly find a new and better parameter that gives a larger log-likelihood value. While humans can look at the figure and immediately see where is its maximum, computers cannot perceive the image in this way. And more importantly--even humans cannot visualize the function in more than three dimensions. This visualization is so helpful for us because we can intuitively understand the 3-dimensional structure of the surface. It is 3-D because we have two parameters, $\mu$ and $\sigma$, and a single log-likelihood value. Add one more parameter as we did in Section~\ref{sec:linear-regression}, and visualization options are very limited. In case of 5 parameters, it is essentially impossible to solve the problem by just visualizations. Non-linear optimization is like climbing uphill in whiteout conditions where you cannot distinguish any details around you--sky is just a white fog and the ground is covered with similar white snow. But you can still feel which way the ground goes up and so you can still go uphill. This is what the popular algorithms do. They rely on the slope of the function, the gradient, and follow the direction suggested by gradient. Most optimizers included in the \texttt{maxLik} package need gradients, including the default Newton-Raphson method. But how do we know the gradient if the log-likelihood function only returns a single value? There are two ways: \begin{enumerate*}[label=\roman*)] \item provide a separate function that computes gradient; \item compute the log-likelihood value in multiple points nearby and deduce the gradient from that information. \end{enumerate*} The first option is superior, in high dimensions it is much faster and much less error prone. But computing and coding gradient can easily be days of work. The second approach, numeric gradient, forces the computer to do more work and hence it is slower. Unfortunately importantly, it may also unreliable for more complex cases. In practice you may notice how the algorithm refuses to converge for thousands of iterations. But numeric gradient works very well in simple cases we demonstrated here. This also hints why it is useful to choose good start values. The closer we start to our final destination, the less work the computer has to do. And while we may not care too much about a few seconds of computer's work, we also help the algorithm to find the correct maximum. The less the algorithm has to work, the less likely it is that it gets stuck in a wrong place or just keeps wandering around in a clueless manner. If this is the case, you may see how the algorithm gets slow, does not converge (returns the ``maximum number of iterations exceeded'' message), how the results look weird, or standard errors are extremely large. % \bibliographystyle{apecon} % \bibliography{maxlik} \end{document} maxLik/inst/doc/intro-to-maximum-likelihood.R0000644000175100001440000001224615124514324020737 0ustar hornikusers### R code from vignette source 'intro-to-maximum-likelihood.Rnw' ################################################### ### code chunk number 1: foo ################################################### options(keep.source = TRUE, width = 60, try.outFile=stdout() # make try to produce error messages ) set.seed(34) ################################################### ### code chunk number 2: intro-to-maximum-likelihood.Rnw:237-239 ################################################### NH <- 3 NT <- 7 ################################################### ### code chunk number 3: intro-to-maximum-likelihood.Rnw:245-248 ################################################### loglik <- function(p) { NH*log(p) + NT*log(1-p) } ################################################### ### code chunk number 4: intro-to-maximum-likelihood.Rnw:260-263 ################################################### library(maxLik) m <- maxLik(loglik, start=0.5) summary(m) ################################################### ### code chunk number 5: intro-to-maximum-likelihood.Rnw:501-505 ################################################### x1 <- rnorm(1) # centered around 0 x2 <- rnorm(1) x1 x2 ################################################### ### code chunk number 6: intro-to-maximum-likelihood.Rnw:509-512 ################################################### loglik <- function(mu) { -2*log(sqrt(2*pi)) - 0.5*((x1 - mu)^2 + (x2 - mu)^2) } ################################################### ### code chunk number 7: intro-to-maximum-likelihood.Rnw:516-518 ################################################### m <- maxLik(loglik, start=0) summary(m) ################################################### ### code chunk number 8: intro-to-maximum-likelihood.Rnw:521-522 ################################################### (x1 + x2)/2 ################################################### ### code chunk number 9: intro-to-maximum-likelihood.Rnw:612-614 ################################################### data(CO2) hist(CO2$uptake) ################################################### ### code chunk number 10: intro-to-maximum-likelihood.Rnw:621-628 ################################################### loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- nrow(CO2) -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } ################################################### ### code chunk number 11: intro-to-maximum-likelihood.Rnw:647-649 ################################################### m <- maxLik(loglik, start=c(mu=30, sigma=10)) summary(m) ################################################### ### code chunk number 12: intro-to-maximum-likelihood.Rnw:769-782 ################################################### loglik <- function(theta) { beta0 <- theta[1] beta1 <- theta[2] sigma <- theta[3] N <- nrow(CO2) ## compute new mu based on beta1, beta2 mu <- beta0 + beta1*CO2$conc ## use this mu in a similar fashion as previously -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } m <- maxLik(loglik, start=c(beta0=30, beta1=0, sigma=10)) summary(m) ################################################### ### code chunk number 13: intro-to-maximum-likelihood.Rnw:792-793 ################################################### summary(lm(uptake ~ conc, data=CO2)) ################################################### ### code chunk number 14: plotSurface ################################################### loglik <- function(theta) { mu <- theta[1] sigma <- theta[2] N <- nrow(CO2) -N*log(sqrt(2*pi)) - N*log(sigma) - 0.5*sum((CO2$uptake - mu)^2/sigma^2) } m <- maxLik(loglik, start=c(mu=30, sigma=10)) params <- coef(m) np <- 33 # number of points mu <- seq(6, 36, length.out=np) sigma <- seq(5, 50, length.out=np) X <- as.matrix(expand.grid(mu=mu, sigma=sigma)) ll <- matrix(apply(X, 1, loglik), nrow=np) levels <- quantile(ll, c(0.05, 0.4, 0.6, 0.8, 0.9, 0.97)) # where to draw the contours colors <- colorRampPalette(c("Blue", "White"))(30) par(mar=c(0,0,0,0), mgp=2:0) ## Perspective plot if(require(plot3D)) { persp3D(mu, sigma, ll, xlab=expression(mu), ylab=expression(sigma), zlab=expression(log-likelihood), theta=40, phi=30, colkey=FALSE, col=colors, alpha=0.5, facets=TRUE, shade=1, lighting="ambient", lphi=60, ltheta=0, image=TRUE, bty="b2", contour=list(col="gray", side=c("z"), levels=levels) ) ## add the dot for maximum scatter3D(rep(coef(m)[1], 2), rep(coef(m)[2], 2), c(maxValue(m), min(ll)), col="red", pch=16, facets=FALSE, bty="n", add=TRUE) ## line from max on persp to max at bottom surface segments3D(coef(m)[1], coef(m)[2], maxValue(m), coef(m)[1], coef(m)[2], min(ll), col="red", lty=2, bty="n", add=TRUE) ## contours for the bottom image contour3D(mu, sigma, z=min(ll) + 0.1, colvar=ll, col="black", levels=levels, add=TRUE) } else { plot(1:2, type="n") text(1.5, 1.5, "This figure requires 'plot3D' package", cex=1.5) } maxLik/inst/doc/stochastic-gradient-maxLik.Rnw0000644000175100001440000007613714077525067021144 0ustar hornikusers\documentclass{article} \usepackage{graphics} \usepackage{amsmath} \usepackage{amssymb} \usepackage{indentfirst} \usepackage[utf8]{inputenc} \usepackage{natbib} \usepackage{xspace} \newcommand{\elemProd}{\ensuremath{\odot}} % elementwise product of matrices \newcommand*{\mat}[1]{\mathsf{#1}} \newcommand{\maxlik}{\texttt{maxLik}\xspace} \newcommand*{\transpose}{^{\mkern-1.5mu\mathsf{T}}} %\newcommand{\transpose}{\intercal} \renewcommand*{\vec}[1]{\boldsymbol{#1}} % \VignetteIndexEntry{SGA introduction: the basic usage of maxSGA} \begin{document} <>= options(keep.source = TRUE, width = 60, try.outFile=stdout() # make try to produce error messages ) foo <- packageDescription("maxLik") @ \title{Stochastic Gradient Ascent in maxLik} \author{Ott Toomet} \maketitle \section{\texttt{maxLik} and Stochastic Gradient Ascent} \texttt{maxLik} is a package, primarily intended for Maximum Likelihood and related estimations. It includes several optimizers and associated tools for a typical Maximum Likelihood workflow. However, as predictive modeling and complex (deep) models have gained popularity in the recend decade, \texttt{maxLik} also includes a few popular algorithms for stochastic gradient ascent, the mirror image for the more widely known stochastic gradient descent. This vignette gives a brief overview of these methods, and their usage in \texttt{maxLik}. \section{Stochastic Gradient Ascent} \label{sec:stochastic-gradient-ascent} In machine learning literature, it is more common to describe the optimization problems as minimization and hence to talk about gradient descent. As \texttt{maxLik} is primarily focused on maximizing likelihood, it implements the maximization version of the method, stochastic gradient ascent (SGA). The basic method is simple and intuitive, it is essentially just a careful climb in the gradient's direction. Given and objective function $f(\vec{\theta})$, and the initial parameter vector $\vec{\theta}_{0}$, the algorithm will compute the gradient $\vec{g}(\vec{\theta}_{0}) = \nabla_{\vec{\theta}} f(\vec{\theta})\big|_{\vec{\theta} = \vec{\theta}_{0}}$, and update the parameter vector as $\vec{\theta}_{1} = \vec{\theta}_{0} + \rho \vec{g}(\vec{\theta}_{0})$. Here $\rho$, the \emph{learning rate}, is a small positive constant to ensure we do not overshoot the optimum. Depending on the task it is typically of order $0.1 \dots 0.001$. In common tasks, the objective function $f(\vec{\theta})$ depends on data, ``predictors'' $\mat{X}$ and ``outcome'' $\vec{y}$ in an additive form $f(\vec{\theta}; \mat{X}, \vec{y}) = \sum_{i} f(\vec{\theta}; \vec{x}_{i}, y_{i})$ where $i$ denotes ``observations'', typically arranged as the rows of the design matrix $\mat{X}$. Observations are often considered to be independent of each other. The overview above does not specify how to compute the gradient $\vec{g}(\vec{\theta}_{0})$ in a sense of which observations $i$ to include. A natural approach is to include the complete data and compute \begin{equation} \label{eq:full-batch-gradient} \vec{g}_{N}(\vec{\theta}_{0}) = \frac{1}{N}\sum_{i=1}^{N} \nabla_{\vec{\theta}} f(\vec{\theta}; \vec{x}_{i})\big|_{\vec{\theta} = \vec{\theta}_{0}}. \end{equation} In SGA context, this approach is called ``full batch'' and it has a number of advantages. In particular, it is deterministic (given data $\mat{X}$ and $\vec{y}$), and computing of the sum can be done in parallel. However, there are also a number of reasons why full-batch approach may not be desirable \citep[see][]{bottou2018SIAM}: \begin{itemize} \item Data over different observations is often more or less redundant. If we use all the observations to compute the update then we spend a substantial effort on redundant calculations. \item Full-batch gradient is deterministic and hence there is no stochastic noise. While advantageous in the latter steps of optimization, the noise helps the optimizer to avoid local optima and overcome flat areas in the objective function early in the process. \item SGA achieves much more rapid initial convergence compared to the full batch method (although full-batch methods may achieve better final result). \item Cost of computing the full-batch gradient grows with the sample size but that of minibatch gradient does not grow. \item It is empirically known that large-batch optimization tend to find sharp optima \citep[see][]{keskar+2016ArXiv} that do not generalize well to validation data. Small batch approach leads to a better validation performance. \end{itemize} In contrast, SGA is an approach where the gradient is computed on just a single observation as \begin{equation} \label{eq:stochastic-gradient} \vec{g}_{1}(\vec{\theta}_{0}) = \nabla_{\vec{\theta}} f(\vec{\theta}; \vec{x}_{i}, y_{i})\big|_{\vec{\theta} = \vec{\theta}_{0}} \end{equation} where $i$ is chosen randomly. In applications, all the observations are usually walked through in a random order, to ensure that each observation is included once, and only once, in an \emph{epoch}. Epoch is a full walk-through of the data, and in many ways similar to iteration in a full-batch approach. As SGA only accesses a single observation at time, it suffers from other kind of performance issues. In particular, one cannot parallelize the gradient function \eqref{eq:stochastic-gradient}, operating on individual data vectors may be inefficient compared to larger matrices, and while we gain in terms of gradient computation speed, we lose by running the optimizer for many more loops. \emph{Minibatch} approach offers a balance between the full-batch and SGA. In case of minibatch, we compute gradient not on individual observations but on \emph{batches} \begin{equation} \label{eq:minibatch-gradient} \vec{g}_{m}(\vec{\theta}_{0}) = \frac{1}{|\mathcal{B}|}\sum_{i\in\mathcal{B}} \nabla_{\vec{\theta}} f(\vec{\theta}; \vec{x}_{i}, y_{i})\big|_{\vec{\theta} = \vec{\theta}_{0}} \end{equation} where $\mathcal{B}$ is the batch, a set of observations that are included in the gradient computation. Normally the full data is partitioned into a series of minibatches and walked through sequentially in one epoch. \section{SGA in \texttt{maxLik} package} \label{sec:sga-in-maxlik} \maxlik implements two different optimizers: \texttt{maxSGA} for simple SGA (including momentum), and \texttt{maxAdam} for the Adaptive Moments method \citep[see][p. 301]{goodfellow+2016DL}. The usage of both methods mostly follows that of the package's main workhorse, \texttt{maxNR} \citep[see][]{henningsen+toomet2011}, but their API has some important differences due to the different nature of SGA. The basic usage of the maxSGA is as follows: <>= maxSGA(fn, grad, start, nObs, control) @ where \texttt{fn} is the objective function, \texttt{grad} is the gradient function, \texttt{nObs} is number of observations, and \texttt{control} is a list of control parameters. From the user's perspective, \texttt{grad} is typically the most important (and the most complex) argument. Next, we describe the API and explain the differences between the \texttt{maxSGA} API and \texttt{maxNR} API, and thereafter give a few toy examples that demonstrate how to use \texttt{maxSGA} in practice. \subsection{The objective function} Unlike in \texttt{maxNR} and the related optimizers, SGA does not directly need the objective function \texttt{fn}. The function can still be provided (and perhaps will in most cases), but one can run the optimizer without it. If provided, the function can be used for printing the value at each epoch (by setting a suitable \texttt{printLevel} control option), and for stopping through \emph{patience} stopping condition. If \texttt{fn} is not provided, do not forget to add the argument name for the gradient, \texttt{grad=}, as otherwise the gradient will be treated as the objective function with unexpected results! If provided, the function should accept two (or more) arguments: the first must be the numeric parameter vector, and another one, named \texttt{index}, is the list of indices in the current minibatch. As the function is not needed by the optimizer itself, it is up to the user to decide what it does. An obvious option is to compute the objective function value on the same minibatch as used for the gradient computation. But one can also opt for something else, for instance to compute the value on the validation data instead (and ignore the provided \emph{index}). The latter may be a useful option if one wants to employ the patience-based stopping criteria. \subsection{Gradient function} \label{sec:gradient-function} Gradient is the work-horse of the SGA methods. Although \maxlik can also compute numeric gradient using the finite difference method (this will be automatically done if the objective function is provided but the gradient isn't), this is not advisable, and may be very slow in high-dimensional problems. \texttt{maxLik} uses the numerator layout, i.e. the gradient should be a $1\times K$ matrix where columns correspond to the components of the parameter vector $\vec{\theta}$. For compatibility with other optimizers in \texttt{maxLik} it also accepts a observation-wise matrix where rows correspond to the individual observations and columns to the parameter vector components. The requirements for the gradient function arguments are the same as for \texttt{fn}: the first formal argument must be the parameter vector, and it must also have an argument \texttt{index}, a numeric index for the observations to be included in the minibatch. \subsection{Stopping Conditions} \label{sec:stopping-conditions} \texttt{maxSGA} uses three stopping criteria: \begin{itemize} \item Number of epochs (control option \texttt{iterlim}): number of times all data is iterated through using the minibatches. \item Gradient norm. However, in case of stochastic approach one cannot expect the gradient at optimum to be close to zero, and hence the corresponding criterion (control option \texttt{gradtol}) is set to zero by default. If interested, one may make it positive. \item Patience. Normally, each new iteration has better (higher) value of the objective function. However, in certain situations this may not be the case. In such cases the algorithm does not stop immediately, but continues up to \emph{patience} more epochs. It also returns the best parameters, not necessarily the last parameters. Patience can be controlled with the options \texttt{SG\_patience} and \texttt{SG\_patienceStep}. The former controls the patience itself--how many times the algorithm is allowed to produce an inferior result (default value \texttt{NULL} means patience criterion is not used). The latter controls how often the patience criterion is checked. If computing the objective function is costly, it may be useful to increase the patience step and decrease the patience. \end{itemize} \subsection{Optimizers} \label{sec:optimizers} \texttt{maxLik} currently implements two optimizers: \emph{SGA}, the stock gradient ascent (including momentum), and \emph{Adam}. Here we give some insight into the momentum, and into the Adam method, the basic gradient-only based optimization technique was explained in Section~\ref{sec:stochastic-gradient-ascent}. It is easy and intuitive to extend the SGA method with momentum. As implemented in \texttt{maxSGA}, the momentum $\mu$ ($0 < \mu < 1$) is incorporated into the the gradient update as \begin{equation} \label{eq:gradient-update-momentum} \vec{\theta}_{t+1} = \vec{\theta}_{t} + \vec{v}_{t} \quad\text{where}\quad \vec{v}_{t} = \mu \vec{v}_{t-1} + \rho \vec{g}(\vec{\theta}_{t}). \end{equation} See \citet[p. 288]{goodfellow+2016DL}. The algorithm takes the initial ``velocity'' $\vec{v}_{0} = \vec{0}$. It is easy to see that $\mu=0$ is equivalent to no-momentum case, and if $\vec{g}(\vec{\theta})$ is constant, $\vec{v}_{t} \to \rho \vec{g}(\vec{\theta})/(1 - \mu)$. So the movement speeds up in a region with stable gradient. As a downside, it is also easier overshoot a maximum. But this behavior makes momentum-equipped SGA less prone of getting stuck in a local optimum. Momentum can be set by the control option \texttt{SG\_momentum}, the default value is 0. Adaptive Moments method, usually referred to as \emph{Adam}, \citep[p. 301]{goodfellow+2016DL} adapts the learning rate by variance of the gradient--if gradient components are unstable, it slows down, and if they are stable, it speeds up. The adaptation is proportional to the weighted average of the gradient divided by the square root of the weighted average of the gradient squared, all operations done component-wise. In this way a stable gradient component (where moving average is similar to the gradient value) will have higher speed than a fluctuating gradient (where the components frequently shift the sign and the average is much smaller). More specifically, the algorithm is as follows: \begin{enumerate} \item Initialize the first and second moment averages $\vec{s} = \vec{0}$ and $\vec{r} = \vec{0}$. \item Compute the gradient $\vec{g}_{t} = \vec{g}(\vec{\theta}_{t})$. \item Update the average first moment: $\vec{s}_{t+1} = \mu_{1} \vec{s}_{t} + (1 - \mu_{1}) \vec{g}_{t}$. $\mu_{1}$ is the decay parameter, the larger it is, the longer memory does the method have. It can be adjusted with the control parameter \texttt{Adam\_momentum1}, the default value is 0.9. \item Update the average second moment: $\vec{r}_{t+1} = \mu_{2} \vec{r}_{t} + (1 - \mu_{2}) \vec{g}_{t} \elemProd \vec{g}_{t}$ where $\elemProd$ denotes element-wise multiplication. The control parameter for the $\mu_{2}$ is \texttt{Adam\_momentum2}, the default value is 0.999. \item As the algorithm starts with the averages $\vec{s}_{0} = \vec{r}_{0}= 0$, we also correct the resulting bias: $\hat{\vec{s}} = \vec{s}/(1 - \mu_{1}^{t})$ and $\hat{\vec{r}} = \vec{r}/(1 - \mu_{2}^{t})$. \item Finally, update the estimate: $\vec{\theta}_{t+1} = \vec{\theta}_{t} + \rho \hat{\vec{s}}/(\delta + \sqrt{\hat{\vec{r}}})$ where division and square root are done element-wise and $\delta=10^{-8}$ takes care of numerical stabilization. \end{enumerate} Adam optimizer can be used with \texttt{maxAdam}. \subsection{Controlling Optimizers} \label{sec:control-options} Both \texttt{maxSGA} and \texttt{maxAdam} are designed to be similar to \texttt{maxNR}, and mostly expect similar arguments. In particular, both functions expect the objective function \texttt{fn}, gradient \texttt{grad} and Hessian function \texttt{hess}, and the initial parameter start values \texttt{start}. As these optimizers only need gradient, one can leave out both \texttt{fn} and \texttt{hess}. The Hessian is mainly included for compatibility reasons and only used to compute the final Hessian, if requested by the user. As SGA methods are typically used in contexts where Hessian is not needed, by default the algorithms do not return Hessian matrix and hence do not use the \texttt{hess} function even if provided. Check out the argument \texttt{finalHessian} if interested. An important SGA-specific control options is \texttt{SG\_batchSize}. This determines the batch size, or \texttt{NULL} for the full-batch approach. Finally, unlike the traditional optimizers, stochastic optimizers need to know the size of data (argument \texttt{nObs}) in order to calculate the batches. \section{Example usage: Linear regression} \label{sec:example-usage-cases} \subsection{Setting Up} \label{sec:setting-up} We demonstrate the usage of \texttt{maxSGA} and \texttt{maxAdam} to solve a linear regression (OLS) problem. Although OLS is not a task where one commonly relies on stochastic optimization, it is a simple and easy-to understand model. We use the Boston housing data, a popular dataset where one traditionally attempts to predict the median house price across 500 neighborhoods using a number of neighborhood descriptors, such as mean house size, age, and proximity to Charles river. All variables in the dataset are numeric, and there are no missing values. The data is provided in \emph{MASS} package. First, we create the design matrix $\mat{X}$ and extract the house price $y$: <<>>= i <- which(names(MASS::Boston) == "medv") X <- as.matrix(MASS::Boston[,-i]) X <- cbind("const"=1, X) # add constant y <- MASS::Boston[,i] @ Although the model and data are simple, it is not an easy task for stock gradient ascent. The problem lies in different scaling of variables, the means are <<>>= colMeans(X) @ One can see that \emph{chas} has an average value \Sexpr{round(mean(X[,"chas"]), 3)} while that of \emph{tax} is \Sexpr{round(mean(X[,"tax"]), 3)}. This leads to extremely elongated contours of the loss function: <>= eigenvals <- eigen(crossprod(X))$values @ One can see that the ratio of the largest and the smallest eigenvalue is $\mat{X}^{\transpose} \mat{X} = \Sexpr{round(eigenvals[1]/eigenvals[14], -5)}$. Solely gradient-based methods, such as SGA, have trouble working in the resulting narrow valleys. For reference, let's also compute the analytic solution to this linear regression model (reminder: $\hat{\vec{\beta}} = (\mat{X}^{\transpose}\,\mat{X})^{-1}\,\mat{X}^{\transpose}\,\vec{y}$): <<>>= betaX <- solve(crossprod(X)) %*% crossprod(X, y) betaX <- drop(betaX) # matrix to vector betaX @ Next, we provide the gradient function. As a reminder, OLS gradient in numerator layout can be expressed as \begin{equation} \label{eq:ols-gradient} \vec{g}_{m}(\vec{\theta}) = -\frac{2}{|\mathcal{B}|} \sum_{i\in\mathcal{B}} \left(y_{i} - \vec{x}_{i}^{\transpose} \cdot \vec{\theta} \right) \vec{x}_{i}^{\transpose} = -\frac{2}{|\mathcal{B}|} \left(y_{\mathcal{B}} - \mat{X}_{\mathcal{B}} \cdot \vec{\theta} \right)^{\transpose} \mat{X}_{\mathcal{B}} \end{equation} where $y_{\mathcal{B}}$ and $\mat{X}_{\mathcal{B}}$ denote the elements of the outcome vector and the slice of the design matrix that correspond to the minibatch $\mathcal{B}$. We choose to divide the value by batch size $|\mathcal{B}|$ in order to have gradient values of roughly similar size, independent of the batch size. We implement it as: <<>>= gradloss <- function(theta, index) { e <- y[index] - X[index,,drop=FALSE] %*% theta g <- t(e) %*% X[index,,drop=FALSE] 2*g/length(index) } @ The \texttt{gradloss} function has two arguments: \texttt{theta} is the parameter vector, and \texttt{index} tells which observations belong to the current minibatch. The actual argument will be an integer vector, and hence we can use \texttt{length(index)} to find the size of the minibatch. Finally, we return the negative of~\eqref{eq:ols-gradient} as \texttt{maxSGA} performs maximization, not minimization. First, we demonstrate how the models works without the objective function. We have to supply the gradient function, initial parameter values (we use random normals below), and also \texttt{nObs}, number of observations to select the batches from. The latter is needed as the optimizer itself does not have access to data but still has to partition it into batches. Finally, we may also provide various control parameters, such as number of iterations, stopping conditions, and batch size. We start with only specifying the iteration limit, the only stopping condition we use here: <>= library(maxLik) set.seed(3) start <- setNames(rnorm(ncol(X), sd=0.1), colnames(X)) # add names for better reference res <- try(maxSGA(grad=gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000) ) ) @ This run was a failure. We encountered a run-away growth of the gradient because the default learning rate $\rho=0.1$ is too big for such strongly curved objective function. But before we repeat the exercise with a smaller learning rate, let's incorporate gradient clipping. Gradient clipping, performed with \texttt{SG\_clip} control option, caps the $L_{2}$-norm of the gradient while keeping it's direction. We clip the squared norm at 10,000, i.e. the gradient norm cannot exceed 100: <<>>= res <- maxSGA(grad=gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000, SG_clip=1e4) # limit ||g|| <= 100 ) summary(res) @ This time the gradient did not explode and we were able to get a result. But the estimates are rather far from the analytic solution shown above, e.g. the constant estimate \Sexpr{round(coef(res)[1], 3)} is very different from the corresponding analytic value \Sexpr{round(betaX[1], 3)}. Let's analyze what is happening inside the optimizer. We can ask for both the parameter values and the objective function value to be stored for each epoch. But before we can store its value, in this case mean squared error (MSE), we have to supply an objective function to maxSGA. We compute MSE on the same minibatch as <<>>= loss <- function(theta, index) { e <- y[index] - X[index,] %*% theta -crossprod(e)/length(index) } @ Now we can store the values with the control options \texttt{storeParameters} and \texttt{storeValues}. The corresponding numbers can be retrieved with \texttt{storedParameters} and \texttt{storedValues} methods. For \texttt{iterlim=R}, the former returns a $(R+1) \times K$ matrix, one row for each epoch and one column for each parameter component, and the latter returns a numeric vector of length $R+1$ where $R$ is the number of epochs. The first value in both cases is the initial value, so we have $R+1$ values in total. Let's retrieve the values and plot both. We decrease the learning rate to $0.001$ using the \texttt{SG\_learningRate} control. Note that although we maximize negative loss, we plot positive loss. \setkeys{Gin}{width=\textwidth, height=80mm} <>= res <- maxSGA(loss, gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000, # will misbehave with larger numbers SG_clip=1e4, SG_learningRate=0.001, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") ## add some arrows to see which way the parameters move iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 10 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) ## plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ We can see how the parameters (the first and the second components, ``const'' and ``crim'' in this figure) evolve through the iterations while the loss is rapidly falling. One can see an initial jump where the loss is falling very fast, followed but subsequent slow movement. It is possible the initial jump be limited by gradient clipping. \subsection{Training and Validation Sets} \label{sec:training-validation} However, as we did not specify the batch size, \texttt{maxSGA} will automatically pick the full batch (equivalent to control option \texttt{SG\_batchSize = NULL}). So there was nothing stochastic in what we did above. Let us pick a small batch size--a single observation at time. However, as smaller batch sizes introduce more noise to the gradient, we also make the learning rate smaller and choose \texttt{SG\_learningRate = 1e-5}. But now the existing loss function, calculated just at the single observation, carries little meaning. Instead, we split the data into training and validation sets and feed batches of training data to gradient descent while calculating the loss on the complete validation set. This can be achieved with small modifications in the \texttt{gradloss} and \texttt{loss} function. But as the first step, we split the data: <<>>= i <- sample(nrow(X), 0.8*nrow(X)) # training indices, 80% of data Xt <- X[i,] # training data yt <- y[i] Xv <- X[-i,] # validation data yv <- y[-i] @ Thereafter we modify \texttt{gradloss} to only use the batches of training data while \texttt{loss} will use the complete validation data and just ignore \texttt{index}: <<>>= gradloss <- function(theta, index) { e <- yt[index] - Xt[index,,drop=FALSE] %*% theta g <- -2*t(e) %*% Xt[index,,drop=FALSE] -g/length(index) } loss <- function(theta, index) { e <- yv - Xv %*% theta -crossprod(e)/length(yv) } @ Note that because the optimizer only uses training data, the \texttt{nObs} argument now must equal to the size of training data in this case. Another thing to discuss is the computation speed. \texttt{maxLik} implements SGA in a fairly complex loop that does printing, storing, and complex function calls, computes stopping conditions and does many other checks. Hence a smaller batch size leads to many more such auxiliary computations per epoch and the algorithm gets considerably slower. This is less of a problem for complex objective functions or larger batch sizes, but for linear regression, the slow-down is very large. For demonstration purposes we lower the number of epochs from 1000 to 100. How do the convergence properties look now with the updated approach? <>= res <- maxSGA(loss, gradloss, start=start, nObs=nrow(Xt), # note: only training data now control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-5, SG_clip=1e4, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ We can see the parameters evolving and loss decreasing over epochs. The convergence seems to be smooth and not ruptured by gradient clipping. Next, we try to improve the convergence by introducing momentum. We add momentum $\mu = 0.95$ to the gradient and decrease the learning rate down to $1\cdot10^{-6}$: <>= res <- maxSGA(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, SGA_momentum = 0.99, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ We achieved a lower loss but we are still far from the correct solution. As the next step, we use Adam optimizer. Adam has two momentum parameters but we leave those untouched at the initial values. \texttt{SGA\_momentum} is not used, so we remove that argument. <>= res <- maxAdam(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") @ As visible from the figure, Adam was marching toward the solution without any stability issues. \subsection{Sequence of Batch Sizes } \label{sec:sequence-batch-sizes} The OLS' loss function is globally convex and hence there is no danger to get stuck in a local maximum. However, when the objective function is more complex, the noise that is generated by the stochastic sampling helps the algorithm to leave local maxima. A suggested strategy is to increase the batch size over time to achieve good exploratory properties early in the process and stable convergence later \citep[see][for more information]{smith+2018arXiv}. This approach is in some ways similar to Simulated Annealing. Here we introduce such an approach by using batch sizes $B=1$, $B=10$ and $B=100$ in succession. We also introduce patience stopping condition. If the objective function value is worse than the best value so far for more than \emph{patience} times then the algorithm stops. Here we use patience value 5. We also store the loss values from all the batch sizes into a single vector \texttt{val}. If the algorithm stops early, some of the stored values are left uninitialized (\texttt{NA}-s), hence we use \texttt{na.omit} to include only the actual values in the final \texttt{val}-vector. We allow the algorithm to run for 200 epochs, but as we now have introduced early stopping through patience, the actual number of epochs may be less than that. \setkeys{Gin}{width=\textwidth, height=110mm} <>= val <- NULL # loop over batch sizes for(B in c(1,10,100)) { res <- maxAdam(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=200, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, SG_patience=5, # worse value allowed only 5 times storeValues=TRUE ) ) cat("Batch size", B, ",", nIter(res), "epochs, function value", maxValue(res), "\n") val <- c(val, na.omit(storedValues(res))) start <- coef(res) } plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") summary(res) @ Two first batch sizes run through all 200 epochs, but the last run stopped early after 7 epochs only. The figure shows that Adam works well for approximately 170 epochs, thereafter the steady pace becomes uneven. It may be advantageous to slow down the movement further. As explained above, this dataset is not an easy task for methods that are solely gradient-based, and so we did not achieve a result that is close to the analytic solution. But our task here is to demonstrate the usage of the package, not to solve a linear regression exercise. We believe every \emph{R}-savy user can adapt the method to their needs. \bibliographystyle{apecon} \bibliography{maxlik} \end{document} maxLik/inst/doc/stochastic-gradient-maxLik.R0000644000175100001440000002063715124514350020554 0ustar hornikusers### R code from vignette source 'stochastic-gradient-maxLik.Rnw' ################################################### ### code chunk number 1: foo ################################################### options(keep.source = TRUE, width = 60, try.outFile=stdout() # make try to produce error messages ) foo <- packageDescription("maxLik") ################################################### ### code chunk number 2: stochastic-gradient-maxLik.Rnw:163-164 (eval = FALSE) ################################################### ## maxSGA(fn, grad, start, nObs, control) ################################################### ### code chunk number 3: stochastic-gradient-maxLik.Rnw:375-379 ################################################### i <- which(names(MASS::Boston) == "medv") X <- as.matrix(MASS::Boston[,-i]) X <- cbind("const"=1, X) # add constant y <- MASS::Boston[,i] ################################################### ### code chunk number 4: stochastic-gradient-maxLik.Rnw:384-385 ################################################### colMeans(X) ################################################### ### code chunk number 5: stochastic-gradient-maxLik.Rnw:392-393 ################################################### eigenvals <- eigen(crossprod(X))$values ################################################### ### code chunk number 6: stochastic-gradient-maxLik.Rnw:404-407 ################################################### betaX <- solve(crossprod(X)) %*% crossprod(X, y) betaX <- drop(betaX) # matrix to vector betaX ################################################### ### code chunk number 7: stochastic-gradient-maxLik.Rnw:431-436 ################################################### gradloss <- function(theta, index) { e <- y[index] - X[index,,drop=FALSE] %*% theta g <- t(e) %*% X[index,,drop=FALSE] 2*g/length(index) } ################################################### ### code chunk number 8: gradonly ################################################### library(maxLik) set.seed(3) start <- setNames(rnorm(ncol(X), sd=0.1), colnames(X)) # add names for better reference res <- try(maxSGA(grad=gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000) ) ) ################################################### ### code chunk number 9: stochastic-gradient-maxLik.Rnw:476-483 ################################################### res <- maxSGA(grad=gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000, SG_clip=1e4) # limit ||g|| <= 100 ) summary(res) ################################################### ### code chunk number 10: stochastic-gradient-maxLik.Rnw:495-499 ################################################### loss <- function(theta, index) { e <- y[index] - X[index,] %*% theta -crossprod(e)/length(index) } ################################################### ### code chunk number 11: stochastic-gradient-maxLik.Rnw:514-538 ################################################### res <- maxSGA(loss, gradloss, start=start, nObs=nrow(X), control=list(iterlim=1000, # will misbehave with larger numbers SG_clip=1e4, SG_learningRate=0.001, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") ## add some arrows to see which way the parameters move iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 10 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) ## plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") ################################################### ### code chunk number 12: stochastic-gradient-maxLik.Rnw:565-570 ################################################### i <- sample(nrow(X), 0.8*nrow(X)) # training indices, 80% of data Xt <- X[i,] # training data yt <- y[i] Xv <- X[-i,] # validation data yv <- y[-i] ################################################### ### code chunk number 13: stochastic-gradient-maxLik.Rnw:575-584 ################################################### gradloss <- function(theta, index) { e <- yt[index] - Xt[index,,drop=FALSE] %*% theta g <- -2*t(e) %*% Xt[index,,drop=FALSE] -g/length(index) } loss <- function(theta, index) { e <- yv - Xv %*% theta -crossprod(e)/length(yv) } ################################################### ### code chunk number 14: batch1 ################################################### res <- maxSGA(loss, gradloss, start=start, nObs=nrow(Xt), # note: only training data now control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-5, SG_clip=1e4, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") ################################################### ### code chunk number 15: momentum ################################################### res <- maxSGA(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, SGA_momentum = 0.99, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") ################################################### ### code chunk number 16: Adam ################################################### res <- maxAdam(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=100, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, storeParameters=TRUE, storeValues=TRUE ) ) par <- storedParameters(res) val <- storedValues(res) par(mfrow=c(1,2)) plot(par[,1], par[,2], type="b", pch=".", xlab=names(start)[1], ylab=names(start)[2], main="Parameters") iB <- c(40, nrow(par)/2, nrow(par)) iA <- iB - 1 arrows(par[iA,1], par[iA,2], par[iB,1], par[iB,2], length=0.1) plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") ################################################### ### code chunk number 17: SANN ################################################### val <- NULL # loop over batch sizes for(B in c(1,10,100)) { res <- maxAdam(loss, gradloss, start=start, nObs=nrow(Xt), control=list(iterlim=200, SG_batchSize=1, SG_learningRate=1e-6, SG_clip=1e4, SG_patience=5, # worse value allowed only 5 times storeValues=TRUE ) ) cat("Batch size", B, ",", nIter(res), "epochs, function value", maxValue(res), "\n") val <- c(val, na.omit(storedValues(res))) start <- coef(res) } plot(seq(length=length(val))-1, -val, type="l", xlab="epoch", ylab="MSE", main="Loss", log="y") summary(res) maxLik/inst/doc/intro-to-maximum-likelihood.pdf0000644000175100001440000130700115124514353021306 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 4270 /Filter /FlateDecode /N 71 /First 587 >> stream xœµ[YsÛ8~ß_·™©©„$nnM¥Êw2ñÛ‰l̓"Ñ6wdÉ‘äÄ™_¿ÝAщ·dš  úëMJRˆÈ'™†aTI¸Îˆ"JK¢I–rAr’±LB™dBÀ¿ šçpM Í$´a„ õœPC_ÐOá!I˜i§g’ÂCÐ5‡úœ ÝÒ”!áገƤ”H ýPF¤„ ʉÌSìÎL*‰Êà&©˜Èa0¢”†ëœè4W„¥Dg8xF4…q%šÃóŒ­S“"9ΗÁ í$É5üc ^-Åø¾,…š 0>Oq žÃ;§"§„Ã{À`^ ‚»sxãLä°Š 9‡ĵɠ‡ž)ðôL…†ôL5¾1ôÌRX"XþŒQ˜#4Ë`Ý zff)8àR@ÏLìôÌpt=sŽ5Ð3Ïq)¡gAa $’‰Ã|$ô,dŽ«Jp‚šÀbg2…Æz–Y m`ñ%ô, ý¡gÉSh =K(ÁâCAiXmèYj Ë=Ë–\AÏ*Uù¿þøƒ$GÅj4­F@ÖING7ÅÀe..¾ß$ÙûÓù yõÊ<²³(F«r>Û­ òëî¿iJEF))…Yýžf¿¤é/¿Uíæ òëEq—Góɦ'NóÉø€GNÉÁí|¹ZŽåý È÷2•/Óßê9Ìf+oò¶œ,É€p€"<ÁûãIØJU´9Fª³´çÌÞâWg^s{®úÌqxþË­Š]¨z>³U1[-aJ¶[XØI9Úž?ÂÜp6"/ûšg/uŽœŽð€[è³b9XŒ¡Cìmïqup¾ÂŪ&‘ìCÿÈ+–D‹ùø¼XAÏÉéî>̧x\A—¯^µ§æf¶=ZæéäèjwÿÝïçûgG™‚¹${³ñ|RÎnHrYζf˲©Ø/ËÕÎíh°1£ï–@P»Éá¨j’8“ó‡Ï+38Nž¹˜¿Ÿ•Ð]K¢Úh²“¹,'«[$œ2ëúÔWö,@äðLW5xE%Ò‹çÒ”ŠHlSµg(Bà•ðŒõÀ(*7­ÿr ¶µ#aTš"òï_åÍ­»„·Å…ÿ59HFÉ$)’›ä6)“¿“ir—Ì’y²H–É*yH¾%€g\3ìë^|:ºY¢€3K°í°an*18Ä_öæ~9-€í¹n¨<º+zhøX³oÍn¦´MŽÊåhV¸]VÅÝ”zíõo3¹ªÞ™Ìœlíl½?‚ñ..ž‚ “?3y3"¥ƒ‘¶µ5íÿýOu!q9DH\Ò‡€DÂOyáèr´R—ö€BèгOy‘v)P"Fyì´¢@õÞÛí×ǧ–‹RO~J§Ôk,g¡º¬ŒÒ'`Yİ̘9œnóÐ…<‡®Û4?ªÐÇÊÍQ_C;jìiìkȭ톌W%®6\¢À¹‘€Hixï°J.KjQmZW=(pq¾æIÉYJuõ`W‘§¦)áíÎÈ(®Þ¼^S;A\³Õ}¬g =°4.«Ã¡ñŒözPcmA\Þb£­d»f¥×†™Ž€N€¡ÎA¼O.“cYu€ìeºKŠÙd´¼M®“kü+ó‡|7ía½ÿÖÌW©Ë÷è€L‹ë•--LÇ÷Å¢œO’/É—‡b‰Næ«bòyjZº ÛØ\Ù¢açâ®´Ó]_a使¢]Œ,h0tý+>—›¿à©½ \”›UípºìrzÀyQNg‰Œºx§³·j÷õîÉ¥5Ì:œîœ·šÓYrzèMm°bÞ” Â;ÒÈÕ¦£ûlº¦æÎæþ;ÌY÷7äÙç8Òà¨ÿ< MRî3n[Z¦=MÞÆ%£å¸,WåtR@Ycù70ñçÑ"ù¼ÿ.V†‰ª²eœµþåa4Eo©Ó ¨‹·§Årùtæm˜ºÍÆ1æía]tFzYWØ›‚i³ªÆ }Û.#E=¤KŸt³Šæ=Œ{ppòn{Ç{Óaܨᙆ,:·Yœ…cÎ-Fañh LdxaL7á ¸,#]M×81ÔMh”¢­„ÂRI¹öÙk¢Á F*®ÜÌÄ–°Ž¢Ù–¿Ôܱ3ãrw``´ à‘ à9:d¹®®L=]‰šòØÌêCcbZ®ð#/q¼Û¸Œr¥ƒÖk Ž¥ù‚ŠZe=*Șò’dEUt‡d0_~ÉÇGoÞ½û}çèÆkáX9ç®15iÐ5 ô ¦¦Œ9è£OEÄ;Ž‘¬B-BÙÿë<äG•2‡‚Ã7h[xgí1tÚ6n…殞v¹¸ƒ´ÀQ€•º±ß,;¥[˜ó-r=š. ÷H³ãƧN÷ö¬?‘ÊŸB¦ì S°80c.=†rºG`Ê*ß”U^°±!5¸&­'?²Ú²Bê þ‚(ä¦ßU§,XÄUyV‡_ô:¢d:¼<Ù;½²#hß s³©‰CUHÝ›´`Ì “fow£ËhnÎCtžÊ^R”¶jÇç%²\ùZIÉõ¢H Ém0ƺIU½•m% vÖº»XÏ3ÐòÀÚ€Õræ™ÅNã·â2,  wâq•FšªmæcZ›#5lcTúG,5­zSjØÎ9ÆU{ÅyÞŸîî{,{¶ñagn³<3aÑ^áÒg…øf°Ô¬Íú§±‰)ÇÓÑU`7ÎPiE>zýÀéÃr}”Ò*?¸éÛ_[kG§• \ZÑ*ëJ ChÔÂ?—|{tpp¸ƒôÑ/ºè#,Ø~ÈâÚÐyšý)-_-ªß: öAÑE6›õÃ~È"¡ùíë¼Tø‰Å´¸ƒvˆîòëhZÌÆErWΰwÓUy?ý^i6ì­X“d1š”ãÑ´Ñ\¦-cÍE©œâ}-˜½ƒS'kœv 3¨j°±¶÷ñýé ék1›:mjFÏã¡7åe¶uCoFšñZ¦1ÚÀB‚Ã/êÌâc¢%¹iù-´'ïÌeMvj¥±[ÐTùî(,¶éÉÀ¼ÌMw:Š.ƒeŽûRõVH>À•ÊbJÛù ãó<]° ˜ë“ãö é5FÔ³¦ß‰k·©üƒÆÈ§7ŸößšŠj‹cç71º0‰†Óæñô)Ý$ÊžKÚ=å—{I3yÚ6¤ê7#ú~<·•\õx”¨ÈìȺõöâäãGšvòwíÚ6¶y¨F\ó`'RGms¶I€ Ö“†Ýùp;j÷þ^Æšð …xWдg+Cæ·¿x‰£‹ÁÃó³Ë3Œ& ÍËïÉ]]Ë’Çý'-c4Â/«²8ºÚ¯Y}é߆ÀlAÑѪKoÍRDlØÄÜä=S$v­3¤fU9ÇOÜlY˜Ûª²ù¬­* â>®8¶J·)]Íóg)·™üƒnÓáåÞÑ9~ñrÞM¯‹!…éá⩉v:ú Š—Ðösihaíº°ì§áõµíoõô¤¿Ÿy¯§'×í€åshRê.\2ݸ\ŒîÖ%Ö¡iÜÉý¼6ÓÎî$MæÓéhAåv’]o~Ÿ]÷p÷¹X,˛٠;rþÒΩ39kRbƒ¬ºõéH™½û¨a^]îì­Ö>U‡ƒæÕå’‘hȰ»Ÿwp¸Ã'0¬ c{â©ßŒå±Ä¤î.ˆË+fº=—BîÊU|Oò¸ýµÓÆÓ:iàS ¼'íêøødµ³jÑè`ÙÍg“q‹&šÿÓëÜ©0Ø„©ŽmjJšV昔‹¾¦ ¦¦±IÓe‡vA·–ƒ#øëX¾6ll7· ›¶Ñ“vÙúgR—|¯Y¥P¶¡þny}],p—ÄxÞ¨Åjh>Í®ÛþP‡@. |¦þ`IH:›8¤ÞÝÁoÚ›?è9¹M!ë4Uû=Ï4#ܧ¹¾†£|®%´ÃRÄD?LE:‘)¦f²ù7AÄŒô„ÌH7fF‚ Ù3¾gû{⃩´Õ§9í5N®§X.Í×M²ý3Í©Óiî¦hæ÷Ìߎl˜0Mõx¦Ñð *Çü¹È†H*€¡Ö®“kÑïFD—+¹/ ÙLB-ЧŠa‰51&Üæ)þ$§©Sendstream endobj 73 0 obj << /Subtype /XML /Type /Metadata /Length 1438 >> stream 2025-12-29T16:06:18+01:00 2025-12-29T16:06:18+01:00 2025-12-29T16:06:18+01:00 TeX Untitled endstream endobj 74 0 obj << /Type /ObjStm /Length 2464 /Filter /FlateDecode /N 71 /First 613 >> stream xœÅZÙrÛF}Ÿ¯èÇ™JÙ½oU©Ti‰Å¢-QŠ-+åš‚$L(R!!GÎ×Ͻ ´ˆ $Á™b‘l4š½œ{î ZM±†p­ˆµ„[A¬#’C§'ÊIâÑÜljVð%ˆ‡{ÐϹ†oE¸dŠ8 pø†‰œÖÄY"¸áÄ9"$ƒoO„æœxF„Wð͉ðá‘Fxâ%‘žKâQÂ[â5QZÁ·!Ê F Ky ß6Ã`œ'ZÂxÎ`wRaö'qy&`£ öôp$ØŸ¶¸cèÕrfˆö{,1NÇ™#F„1žËr8¥±°oÛ¶L h4¸i [«Ã‰-ì±ðö€/üÜ€&jìô˜Àà<Žyl04 Ès˜ÂI‰ ÄTbÀ4x.aE˜•Ñ<×(º 2( @…=y Bà°oo Ì#4DZ¡ û‘`´¸4ÐØ@°˜ÂAE>H‡ƒ<8ö3} !”8‡/Ì̓ 4ÞQ ØB)XÆÿõã„î-æY2ÏV'cdLè(¹L'»‹ò;\ƒ½~ GrŠ¿Ø?z]\¦óëÀ©|ßér•íÝL–(¨pŠýd5]¦wÙb‰V* :šÄ1 5zzÿ% Ëââ{È—û˜^f7+8·Ò:à:üK¸ï5³ò®Ç( ¨|Ž@ﬦÈ 6îMî~IÒë›x €!þM/“Y6¡Wô.Y¦‹KúðBdüá+Æ”Ì&×+´6 ´0 ¹™hp8ççüæA:KÀLx»¦Þ»ÉmRöa6™¥Óùõ,t”®V ú !¢ 2Kn? ï)‰°$}z^Gyjëݯ㽓1¬56%^æ;óUºîX Ü^ƒ`ªF0Ý—_àn». ctYùå£$ó„f-J`}£5X*øè]˜£†¼«#_A£/ðlð ¬Ço÷ߟãì¾/Ö¬‰µ©+3Xä~`Kˆð+€T"Ç\wùÍZ[ø²¿æ¥m¹µÉ Ä[Õ¢°H´’†˵ÂÒÐM“?ï'3z•~Mè ½ùvw“ÌiJç)°™ìó,¹ÊòÖ2üf•>ÐŒf7Ë$PþEÿN–‹ŠÊEˆ*aU]åa€¿†»ªÏ׉W¡FOâa(¶Må!òiÐðltñóÇXëôSÕ—0Ró%-ìs5öõUtäN]zÂVÌm~ù(½Ût~¿Ú âQ/J*ކg¨ -Ê~=Gºzú¾PCˆÜSÉ÷÷vÞžíÿpzpzÄ<ÎP˜×®1¡guý†(«‰ñâ·y ó&pÄbÖVص囼$ËßøBÕô!WAÌ\â»ú›Í>ÛëÈâÛÆe­à<(0¶9Æÿ-Ö—„aJÛÇo·×Åæ`laÌŽ½ñ.ö+†oŽûpQ]¼›ÖƒW¬ yÍ¿zHè)=£é„~¡S:]Ìsø¼½ mPà*¥×e›òÑ[:§‹Ü°´›–"~ø“.é m X—{ú,ÌýÖ´2ú‘÷¶JúW"¿2OÌQ…Ÿ³:ü€úÂïºáÇÊÕ£„•«1VÔbšSª(Ò±Ãr/)W±Îy¥«\pÜTdìSL9«"ØPX„å9Vó‹6œcƒÌضÄÅßB€îâÁ<Öÿc[aݾh˺c VQâ©1/ö?T³ä~Cyøâ¯%Ül‘ËKŠ¿åA¾iÈ®‰§Ï8]ŽgáIx–¢“Ó·ŸÆ¹CÚåÕ´0ŠmŸ4ÓBÎêö uا¸Ýö´Ð4]ƒeÕâ'«dãÙM’MÊÞÚu¥ãpŸWYVwÔ\¯!-u —¾æIô®ºO.Æ`³Ã î]þÁئ¢Y-å!¶[¦C–oŠá¬iËŽ1X°˜*hBþ_ñJFå#ñ%Aÿ´4-’×UÉW‚‚=úòß)仄ÌörÖû2¸W‘ °@xÓá£BVx£î]“Tß M> stream xœZKãÆ¾Ïɾ;€Ž­ÀbØ>úİa;Xǰ=ˆÞ8gD/EÊ"¹³Î¯ÏWUÝ$5#oŒ=,)ëùÕ«9¿mÒDoRúþߟîÒÍÓÝowšÝ„ÿö§Í÷wû±Øè"1:s›ûÇ;yCo´µ‰+Ü&Ï]b¼ÞÜŸî~Q_oÓ$uÖd6Wõ86tkµOK§º§íÎjK÷j«íŽ(Mæ\®.c}µz/iïü»:1ßÒ訫W?µÉu‘ÊäIšZM'zÏ$>3ª]Q¬©ßmM–xŸU¿&ñÇ~»#–Ö9E—äúREeS­LwØþçþŸp˜¿r˜Åu¡7;£¯ËÍýŽbƒ¬KmV€Çrýauýæ•Vòû»åZÄiD¼³Ì¼Kr—§ˤ, /¿G¨Í&ê~[¦|¬ÒÚ¨þT¬ÿʾ(ÀÀ&¶Èäý/ë} ƒ˜"øÀŽñ9[_"ßLÿ9ÝääWeÒ­V& >Ñv£]b].JjðvJ:d¹Ó"DowZ§)áàÛnk¡#Es¼¬¬Ï8ô»/9쉲Ì35­oöcÓwòCVØÂ²‡Î&ç°¸Ä;'a)µ Ü›a ©ðž9d:«æ© ëøL«…Ñé¶ðuª±f¹ÞWju `; NiÕc‰7¥"“‹ßky¯«C}Yx&Æ×«K½¼>uÕ©i›*²ôHÊ!@âÌÄðFßíës n´[ ìî)nD®‘0¬O™§¥jÞQÈ=¼Š,iŽðÌ”¤†÷V8æŽß­:JC™œ‹ÁÖ‚ 4ê‡ZnÉ6%BÛgÆá¼ÚjPSªnþmŒªYJ·Èà7 ŽõÚäjjöۨػ €uWq™š±yÏDYiòp™†ãn:'p˜õ ãûc˜u“} èQp‰!0¡Æ¦jÙ£ìGWlîßÜÝÿõ5V—§šTNgØÓlÑ¥ŸÎô;áeßN‡zTä·Ãûm†rЭB´LÈàš:`âéR¦ŠðgS‹7j(YêNìGjŽmŠº @ •êØ5{hô³ê³º= ÁÔ3Ù0íÅó°ýÈ’U1u>H®Æf›=ÿV29Ò%®W5ðÖŸð˜¸fT'á…EäŒk]ŠÂô.{F(ÈðAì+t¹Û¾aåY˜ì›ÞšØ@æ êî©éêúÒ ÷DCw>cK8¥¬/¸”ê«|îX>è­›K¤%ð3® §ž»HT¨N9±Ò:÷~…屿3(«Q‚ެC¦.õPW—½04™:r`Aº¸&=_*6*m ¢D&Í@Ì|â±HÀ}‚û¥öš’<©Úf[¾%.̯íï§!>Dÿ°z$ƒ‹«ÒµªhÍêgŠ%FR¤X…Ò¨ïÞ$ñÆ©oúÂ\ð~y% X‘axӯˢñŸÚC”—‹Öo†Gš[EÊ¡§H¤\a#æè%´äyj¹«bÓõsx^x´»ÍÃQžHxf$_c[,V= #•IKƒù EYý¸½ÿõe×Bg¶p–Pü¢Úª{šª§šj—.ÙâoƒÔ±ßg»û)þjõ‘di#&Ó”µË»~z:.Bô؆BzŒð™[ î-ƒ² ט,ÚÐEJ¨w£‹ð©—Gò*ÌQ;—RçyÑ©NUÀ¢ÉKŠÒI°‚\á¡/Ï8¸Q¤%ðÞ‰\ãÎUhö ,¦– wN… ¾ƺ:ÈÐÍþèG{äîýó À¯õÉwÕ:y¢¾™]ékÕ› o¦ÍG\‹ØWTh*™q¬•hpln`ª°æ0v ¦à=õXV'£| û4!’O1žš^Dêé`ÕÜöþ„A€ð:à[Tƒ·[±gn]ÌÌ«†[æWm&+y £')†•Ó Åšÿ2”KÔ`ðàrΣcα ¾ò+'ô,÷sv5ïDõŒ’¨Â¿ïTœô¯|èpç²øz?ïi1áÞΨauݬZo™dYñÂÍçêRa$—RäæqÆ4íçÁîúN^Ð%~Þ–„×T†zQzöeŒ·yHdpâF[uáÚ\úêtnro^-l/ÈÕþõÓ·ú‡=øG”L&:’—R$ªñV÷:_b(³“ôø’àç7ÃþRsŽ•RÒdxÊ›ª„TŒ}|í…nmÅc2$i!„ð`e•RG(v¥_Áµß¾„q¹îí¥LéÇ~®{Ï I3Î:‰$ôκ­d *Œë–®WNy¨P¯G ¿-P≫E,œªIæW—ò®†)¼mûmÔú™óÀ¥9'ÝPó¦4„_h¾"Ô^µÚøLS‰”·–:QsyLvåË0ÐÈcdt9͹€Û0¬X? Op¾›‹› Þñ˜ÈHß¹L¯G˜ýÈ‹x^2N$¨L¤ìÇãa¾Ú+êÈ‘­àé[È–lpa¦¬D`‹Ñ‘·«ÀëR?!Ñ®¹Â*¿j•´¹û Ý[òFžð®Lݹ[E4ò³ŽÇØÜ°Ò! ôƒeáÓ00’‰Fç±ýhŽÄC5ÈøÎCNÍ\ g×w»¨¼ü†Ô=T¹e@› ZÉŽÃò(5 ­Õ†&ÎÓ2Ó NCZòÜõ¦3Î9M{¯Xœë¬àf^f¯—÷1ÿQ)ud&b¹Áà6 Ó)>DeX†ÚŒå=Tû±W3Ü–±™ ÀÃ"¬^ÍæGäëÅðõòC­×ǥà T ù‹FÌ*yý‘á]ÀåeûÇÿ:iÅhiªLnŸ°x—d‹lR¸ÃÝègž< gáF–jïe¢»” &%蚮֧۶…Q‡c–¡…tÊáZM$LY¬f¦þŽ’ˆýýÃ\a¯÷ LÑ݃Fö;¼XŠá\¼Ä*êåØe Ìhõª+ tÈŠ­áã|´° ”ÍÞp!wÊMùí™ur˜€u0Ö/£Wõ ±0E©cáþ†T0е¸]ëh8´2¯xÇsÀX5íMØ, J|éžÎ٠ЩěGpè\€åÄ@G¡âÝxÇÇÚpv6ݬKGÏ57÷ÌÕrŸ×!ò¥á«iÛDLEHG¥¤×OÁ(îí7‡]~ŠÝ_Óµ~1ÙWû±t¹Vi’ÑøÅ *ÕOÓ~®†d…ëæîä«ÎÍ5¦¼±Å8Îb#^srRµèÊ·¶K:¹5f>»ù¢¾tý„)0ãTÄã:(tâ®Ü†Žpá峤È öò” 7;+ö]f…Gy §}'k´ã.§¶³uó¶ ̲”·l$Â+»¶ß0Që,1¹}mbзmÞRD°çßÐãF®ýœËˆ¡lOömœF³"EøÈÀbÙä<¡kˆ”æI­˜Njd¨(ݼˆx&ÊD–¿ÊçÕj ãl൨ írÙ“Ð P#\ùò´ë_ýÜ õ|m°ßðÇ?>:PtÒ`aÀõït<š•G„HºH¿ç¼Y0ÐG>H}Tôª¢“Œ2ŽE¾ìVÅ_Ëø'ÐufåL+кef‘HpùDZªéüÑ9-ŒûþÈa à Ç£y§g3!ëL™Ç³šakÿ1¿å—aO–m™°4¿†YvLh],ú¢z¨×ûMwXý<¢ÐnPµt>TÅ®`ÛXT÷Ìp7Súæö‹ ûñnEøËÕASQͨ_§A¿º¿ûäf›§áŽÆ‡Íó]ºùúNÝÌF_$èK§;““”nþ¥½ûéÿîÅlÀE‘_ÿÝK–šô š,<Íæ¿_hÒ¡‘-u;ÈÒý—mùê=¹ ID_Åè¯Rcd)$l2Å×8Á×–¿ŒæÁ×|‡õeŒ/yLôu¼/Ö»Ë ŠBd8 ô^¤àý‚N¥;QÌZ5×Ë¢ó1¾ña'UcCç#7ê¼Ô\FZAuá…ŒùŸŽæâW 橪æÃBûiõîê@þjXÎàF\ÒKeY®ÀÃÿáî ´Þ endstream endobj 147 0 obj << /Filter /FlateDecode /Length 268 >> stream xœ]‘Anƒ0E÷>…o€I&šM²É¢UÕö`‹Ë!‹Þ¾†¤‹.ÞHøÒg¦:]Η4¯¶ú(KøâÕNsŠ…ïË£¶#_çdêsXŸ¦3܆lªÓÛ¿2[|ÀÓæïëÏúØê£z …%ò=Ë®lz稟&2œâ¿Wu·%ÆéùiãIqÓômMŠs˜Ð†h#z zE®Õl«ÙŽh'z$zIŽ¢h¤@£(“eщ(þª÷;RœÃ„îIîEÑ×kg/=úzí쥳G_¯½töèëµ3¦ìëµYáµs¥pZõRz 9Àœøï˜yÉ’²Àü€Šüendstream endobj 148 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2296 >> stream xœV{PSW¿!’sµ´ÝšÒÚ¹—vg·»ÛǺÝéªëPíËV…‚€¶¸Š`y‘@!äEBB^÷Ë;$„$@@BÀF±€/JÕ¾´X@[q·nÛÝíÎδ¶nÝí {;Ó½©m§vöÎܹ3÷œ{æ;¿ïû}¿ßÇ#¥<oqᆂÜß®zxuêãÞ䊴äÝ|`KZòÓ!ƒ‹¼w/ùÝR¼ò|ËíŸþ„Hãñ–ýü‰Úš:yC…,+·¶¼B&É*¨­)“ü`‘ ˆ¥’ZYýÓ rEYSyEÕqõ¾‚È#ò‰B¢ˆø%±•ØF9ÍOžŸy›bÅ@ºÁã¦ñz.·^2Ñã§b¬]z°í¢ëèì:Б«V—¯»ïá×þº“ªyÕpÎÁµøÞ›~ièTôÀ·ÂE–èa—‘&Gk›×åôQØ\ŒÛd¨ÞX­¡v¼ž3ò(l{û»‘} Ù͸äÂÅîéqúFq¸Ô­Œá\ïó)>¦’_ŠòUìÝ?£Ô²JZFZ2{(÷½ùÈ•³Ô8·¤±k@M–¶¬É/Ô¾5Eáœc³p.âlL>tÀ£Xú5‡— ç¿añHH_¬fÔ ¡Ù‡TÙ_è«IÆô‡á4œí8e0/ñ¥Ç4¨Zá;B 'ðWˆÍЧ ç¥at0¢/§ÙÍ豪G‘J¼½jª¹‹éƒ^’ûíCÄå¸ÿÐ ¼pò ‘´ BØè¯Œæe&J‹ÚHrö Ø#ÀY% ¢!hýû&—ZIbZÌ-–ivm¶4[2«¸$3Ä›F8.äê‰CêºÑ—­ñäK¾‰K§¶á– ¯}‚?…ŽKrvè©tZ ,O±}­^iª‡FÒˆ<›.IOü¼JÚöݻǀô „ÿq÷t “•ÅhŸyW%%¼Vi;ÙGã-˜¸¦¯öxa¼žƒ­uòB³ÎÌU‡”…Ô=Q®÷¢ê°Âbf …«YBô+›vçn0“&AIéЉÏ1û{Š<Ÿ}êìÛb¼öTæ(L¨5bhW¶k¢uÑÚh]bW¦Ëìkc2¶ZÍâ²Ì?æWlZ ¤ßC§Ácmƒ¶;atîLÿwýÞ½ýÕû«»¥™£· Ь163ÜÏÅôHJ»Æ«lÓuéÂ{Žîµ¸ ^ð‚Çëq¹gŽzéË©Ü/E!³×ÍP–S*+‘ÇñÆþÆx¦Ùcðs¢ µí©œ'»: ¼‘·pÇË|<¹À‘5Œ£Æ šaÍÒñí}Åp'{{6‹X~Ö»l:^|òt|"FÛ9aäèªEUªÀ0½@ vúK»(„þÒ‹ïÂoœ)x^ÇpK±«ÐsO<¹sö| e AÈJ bå[žÆÍ¯ò¯b½'m.‡ÿêsŸ°éì’õÜ‹wï'Å&Ú'˜óÝsv§Ý .ÒmuZš-u:%/-ƒªòî‚ãæy˜!ía‘ãòá߇w`àQOžCÇ4;tä n±qÍx2S±46j~}gÞñÚ±#Ë…ø:.1¨±É(†ZŃj’AÛ ¥%°\‰º÷½+XxàêÙ]G #T¥¿ÒÎicž X60õwø:éAE¿î¹œÐ–öÒvïîï7¸­ÕÜj=© 胔]²{ÐNöKÛb±¶QFY­V‹ZAÖzš| —HáÇkŸÎYSÚ—ÓB\ÁŠ|è̉¡3ÔG‚©¿i²½§&wóƒýõ´ðãSø¿¢z¥ûí·ç"ë åÏlÌo˜š2Q_[éBÊJ'qg¥©º¶¡DŸu;ýåªPXwü(+U @î°ô‘؈®HÏÝ—[´{‹’b0Ny΢w8î/,QD9±›\.œK)]z±ÓPF³‡¾u¶ I8?NÞŠvb‰HnÑ5Šl Ë÷÷Eº£”p¸SÝ[O g¬Vƶâ{-…# Þ¿'ù#ÿ÷ê915ª«w¤¯#ˆ´»ýþpG(î(BMÁ¦Þ½½â±ÇЦå¬C­6i¹«b¨ä{'«OVßwÒê6r}CÚøâ½±ÑcÇÎr ÁþFŠ:üz8·tÏ>ZÖÐP/m$“ZôÚ¯3úÃtV©ÌÅ\ˆÜX#ª—;ôBw€û/ˆï¦™òÔsó@3†Ä¦â¦*¡™ÁkWÏFòó(Öˆªêm¥?f[… á›A‚I¨ä#;ƒeÏ–i HFÐ|¾ìÅõÜü“·£¹(5ÿPKøu‹ âM¢Ôendstream endobj 149 0 obj << /Filter /FlateDecode /Length 196 >> stream xœ]1Â0 E÷œ"7¨[! R•¥,@¸@š8U†&Q(·Çq)ÃÿÒslÙ?U?œ†àY]s4w\¤óÁf|ÆW6(Gœ|u#­7Ë—ØÍ¬“¨ú³NwBI èV¾è«[5—êuÈD‹Ï¤ f&€êœSƒý{Ú¯£ûvîŒb‹n_+9a«X„mÁQ±Ç‚V±-áá¨Xäeõ¶¤\QòlçKóÊá9TÉâþþ%ÅT¦$I|\âdXendstream endobj 150 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1108 >> stream xœµS]ˆUžìnîLkik†Ø””™úfeý/*ˆÔV©TZ¡liéÃB]»+»u7ín’É$Ùüì$“Ÿ;Édf’l’™É-tÛÝV±uÙ! ZYÑe_}°¢Â"ÈŽ 7Õ‚ú.÷ráÞÃ9ßw¾ï\1ÐGØl¶-C‡Oœxâ™'Ÿí]ìºûº{ú¡ØÍtµÃmýpÛ€ºÇît ÷£þ?ï$úl6÷Ã/z&Ïû¼£Ó{yÎŽNOí=ÁŸå¦ÇñË"A€×GÇÏMÄ!â4ññ q”x• 14A-ÛjßC}Õ~u õ‡í—÷‰íwO±=ÜFß®9:ÄwvÑ+h°»ÃÙT/åZúø¶çi6JZÖ!Ÿå|é‹áõnT–ÖX:0Z¹•y×]U3×Ò…y?ë3祷Xtœ„72×'ß™X>Þ8 ©‘}©ƒ¬HæzÌ€”©WëJVÉ(,²ö;÷“ôÊ˃§ÆŽÔn}øõ¥UYT–òKú‹™Ô>çïîhKœƒn¢}kΟcÏM‹±‡€_ˆ…øù„Á~è3%«º­Ñ ‚,Ý„lÒ!¾¯¢û\™|J†2%‘°Póò&zÀ¥hr¡©+ U,­<˺ &¥¤;÷ª* “©‚ýÖÖÇÐÖT²¸Û,Íë,]¨GTž¡›‰Æ9ŒÓ’ )áæ"ï5Eojš¤¸P‚²¨õ×ÂØ•7Æré|¦© ™E1òM¹¼S¾)ï?ãJ ©d R“ xM¬±ß÷úRq_f¹dà’õ(†ü‡.¶”¹kÑí8—ÏÁ¹X›*Åê ½¢ª˜ú è9¥k¸*Ôç¤9WðÑ/J"L1CV¦+wH³\4ô¹’Ÿµ½*³ZÔ`ïÁ¡6:Â59Š`¸;(‚'ÄÚ°ì¤/.ðQ5Zɰè:0狆+ú…D&Øh\炌µD‚io`>ÑdÑ4ëäÃ\Ó¹V³n4YúËF¨ácè;™t6ãþ—ïÈÙv, Çðê.ú&²Ýíð>ŽÙ•0;ô(¨•Ê5=^ ²'×”±AØZ¼X«Ì—¦) Èi9‘O]œp]Y¹ú:å\¬gn€¯ FO"Kô І¥n•ÊXꕟ3@ß{óÍ›“x‰ª¨%<@EU+jÚ§Ÿ¸jÕ¼±¢YR†Qh5"ª/ÏÂ8ûHBAJ@*ý|yÎ`—uÈzÞŽ~ ¡*iP£¢îaŽY>í –çÌ{J¯¡¯V‡ÛYÎ!ÿ8¼ŠèÞÞE¯ov§JêbÕÏœÁ°ÈEŠ‘j–¥?ºlm`¨dŠ^ç¢øGTý5¥QkÁD2 “,zބפ¥ÒRñjëÚÛÊeyhœµDòïÜÿ#ó4 u©uŠ^¼ÞxáŒçÂÄ Cë7£çŒq÷8ô„|¨íÌÖþòm[âOæ»Qfendstream endobj 151 0 obj << /Filter /FlateDecode /Length 257 >> stream xœ]‘1nÃ0 EwB7°lG2Z’%C‹¢í‰ i ´ñçt¥æ»3N>µÛPX"ÝʨNùBj4Æ)yE9þkµv›8§×Ñ.x‘1¨jì1É2Øy°c´^´Œ{/ L^įŒ+]Ë]W'ÎŽ]ïEÀžKœ,r¼ÈE/FF¸:qvì<Àf«­,d*Gð¾+§Á¹¾cÔá^+åU—p9Ó9Óßû”¥ð”†ÔÚØ€¡endstream endobj 152 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1952 >> stream xœ…•{PSWÇo¸÷h­é­ZôFmµu»ÖwkÑ+°v­Š|´+U*ƒšIHx!ïäþ‚Q,„—P_Uj«ÎŽVÔµà.…vÄ­»N­ÛZ«ÛÎIæÐqoÖíNgÿÙ9çŸs~gæ|¿¿óûüŽˆŠS"‘hT抌5óæÏ$‰#“%@ÖGG7ÆC‚â|“QÆxüâÓý~%‰ž‘ªViå…Sרså…ª©jåûªÿÙ¤(j¬:­P»-G¾v½rþ¢×;—¢ÖQ3¨Lj•J¥Q+¨¹Ô"*’RÏRÏQc¨§©q‚"Š¡¢9¢ ègq¹øžd“äNÜê¸oâ•ô,ú"ó S÷X´b-•øXtOG™#syýøð nš =K#ìTãìW€H¡ÿJ$x"N¸÷ÅI‡pÜb<†L⤧í6ÞžDÆ‘j–¯Øèó}=tI‡jꞆ¤n8ZÚ¢ìÚæÉ„Mɯ’gîÊ+‘ÃbÈþÜ<àôÃ>Þ‹¿ P®6³.ú|» Où2ÂÞ ­?}ƒ•Ru2‡:ìr o«l¤žÉUZ•.¨äT¸…‘î ùPÒ\\¹0x½û{x€ð$§YÄÂÀ^Þ~$=Ô =Ümºú LùäMßBH†J0ò•‚L‘¡›r…#sô¢^‘´ERXUÓÞîÈ’‘N&¯Ø–!Úf*¡‚¯´Sí=*ÓÚÅÃÍ5nŸ«¼àá½€xÆ`5  ieÁÖÁàQE(c®|k1g7ÙŒ¼‘Ç’‰OîÞeRPÆkôã]+jÄSB¤Ÿã™‘ö­Ý„Áéé—_ å|9”£yg6Þj¸ÝË*bÀà2€e›“Ó3*ûÎr¸Oc¤=a2 ¯a„¤Ë¾ LÁÈìöÅ áú!I›ð_<üÀ„Üûë›ÏÎ>¶óL£¿{„Çb´S½»Z“­•ñx ×ð¬}³-[•·{G®.¶¼YÓeÙoÞu'3PË×B-ú´ÿÌ%,n&‰‹-`æ-œàîø31wsE]Cüs±pº€´%ÅÚÂ@icS°îàð¼ÎUdÌ"Oan E8îú¥#}a™»Ú³j‡á=v· ‡±xã;òL@Óø©[íXŠãŽ÷”ä7Ëþ{×¼ (ü6^‘ü„ËY|Œö{=þGë¿%£I«DDЂžùÛ.k2¶XZUц\Unx×êµUØ öX¸Â-Y;7€òCš¶œìWàSä °ƒ'ïßú{ó i‚3q4‡Ò7E—6 Å|á›HÞÝO£X²¤ŒL'ÏÙ†Èk8޼‹wá˜Å+ñïcT½I¤8l°²Úx[Ù5’ÀÖ3€G· gF]¨:p°}oGÒè_{<ù`æÞ•„UUd^îG¹³ˆ –ãYü”3À”Ýäj‹ŠÛDøO7%ç£KØ¢‰o¤=»o„;;º/zÎ:MCβ‘£Ì¶<«¢R@ËÄ)Uøax--ßî9!‹„¨æ«¡…¼‡¸Nú3¸a -ªÉw+ªòQ#]‚5ñ‰%s.ÆàH Š0u çĦ¤3ÆH€ ·Ùßû#ʰ–Qi<«˜¾®û÷/×®[Ç‘ F¡r(¹ÒÇéÿ/Øíê–a=|GY›öDÖ-B.VËKÖ"ž¶ô¥‡WÂH¯,ƒ4ñIƒs݈ô†E8rK2 ²ÁÞ³çúÝéym錔×W§ÊR“uZF§™¤Ým5›í°gØ\6°£E·â¥ñE´9«js6¨AåT"Éô¹srálÿ• ÂÿÍŸ 5kÁ’™j¿îˆ•;TßÔÒTP{e°‚ʼnžˆÐŒL â©zуƒ¸ì¦dc«y¯]Ô´¶èmQY!/³2/.xˆîþˆ¬ÄÔTg¤·Bz©6ǹæv[®AoËÙóžêj/ø›á½OX(*/³©ÍJ» ¶€|_ö›ÛVes!'m‡·!rƒ1»ÍþÚê@M î<8¨šþÊþ‰‡å°Â²p™¢Ô\ê(…•ðÖù²«¿hoÊ`df‹èÄ0¶ K"d]4,p¾ðöï Šâ-°ä>M5µú;BåEzƒ†—Ù™Ô«;n^»píwª¿î&ÜDx¬ç“ioçdär¹¦­…fô‘ÑxÌ¥Ë'/×rN<†£Ë&daliªYs½dÀñC²ÃÌg,~øC™iç`õ‘±ßQd™ŸÒ‹pjH‚SX—„áÈ,Z—“c·[…9·Í㬂Ðܵ/Ðt샠u¯¾£,),T7·ÖÕúü~î?VƒÑç‚.ýøãƒxïà̲è+,É(á‰^FdJùNãöÓ8£Á p |OVá­$EÀØf±KÌŠ}‰B“‚^¸êí­:îý°ép¨³«ñ( <ÙDrfË–3°Ü¾´`SÉö<åû€¬4Ið×à…¦¯Q"7Z²oYÂ(ŠúùEóCendstream endobj 153 0 obj << /Filter /FlateDecode /Length 397 >> stream xœ]’±NÃ@D{…ÿ Ž}Þ R´ 4 ü€s>#8– ÏÌ((æ¤gßf´»»½¿»_æK½{ÚÎù¥\êi^Æ­|œ?·\êSy›—jßÖãœ/?¤3¿kµ»}ÖׯµÔ¸P¦+?ïe÷Üî;}Ú_åóX>Ö!—mXÞJulš8NSTeÿýê×§éçj;†Ô48«c·©ipÛ€-± ØSHÀDìCöD hÄ!$àL°I²J´J°I²J´J°I²J´J°I²J´J7!oˆ9$`&–€Ø#~¯ =+ô°éeÕÓÊÐÕÔר×pÑtÙxÙÐÒÒÐÒÒÐÒÒ<$ !Dä5e6f¶SHÀñMŒ 1 Å8CS#c#›BbÆGǰ©¦Á D|WgG|WgG|WgG|WgG^WfgfG^WfgfÇô\ÄÉÝú]"®öw?ëü¹me¹h«µµ\Öy)‹¿žW¾ª¡êPÊïendstream endobj 154 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4225 >> stream xœ}XiXSgÚ>!pΡ* 1¥sÂàÒŽjíÔV­,î;*!l"[Ø‚ÈBÖ7„„Å•=BBD ¨¸¢Ö¥­µj­­Õ¶Nk¿ÖoŠõ }éÕyƒÖÎ73×—“üHr®¼Ïr?÷}?áQînÇó\º0|ƬYÓ]oÆ9ýÝœ/ñŽúÅ{`•ÎGÃÝ/ùüâ9ÞðÊH FQn<7%(=5#';>3`Ez\|fZÀÂô”¸€¼ìø´¸ø¸û’¢¨‰iéAÁ!™¡YÙ9Kbòb—oçÇ­ŒOLZ»u[ʆÔ×gΚýÆ[SfPÔ*j55žZCM &R먗©0j=µzZHm¢‚¨)TLER!Ô4*”ZL-¡fRK©åÔ jµ’P£)!õ"åKùS^”õ2Éb¨tê!¹v+rëã‡ñ;ÜãÜß÷˜îîñ]Êx2o37ØÅì!ÏÏ^6XãðyÃkGŒ5â˜×b¯³#3G~4Jå=Þ[â]æ}ñ7^š™òúÍ-(Ã9ÒêÓ s_{øZ?Dõû îu:W ÅuŒàFK“:E„ï0‚{!bÐ8hÁ刦ïtÿðð¼~é:W}µ ²kìymGÂLQ(…¼' e÷´Àx`>MΙð5¥´;Em<+ÐP,ÌÎ!^¸áihqóâ³1=I Ï#F€×˜u6ÿ|B·(âýuAˆÅ#ñ^a/ƒNé.êlll¯¶¡èx¤i~¹T'Eylƒ"4kSã³R P<ŠÛ•m•´£nÕ!Öë7þØw©â®¼K0†?0zÀ]q<زavÛ¼7W/KŸ‰f¡Ù–—~ö°IOH/Á¤ðšÈÀhÌr±*º´L[$’3²°¢Ä¤Ä-Ñ+³V¢7Ðô¶ðO7¨üŠÄsÅøÙÁó'¾¸o¿‡À}3µSåEš‚}ˆ5ÓµúÊjȳºRV˜—QXÈ•‡¬˜}Ú¼Þ¯XþvöC0¾‚~›« ÍŒàaC£&Y4¸—ô?«÷&ú@šCzÝD_6÷|ØÚU{dÁ#h˜Ú×|_ÁuÛ3¬øë×I‘« Ö-³`<}OaWT$/BÛYÁõĖܶö†–Ö“-Ñ+6®]UÊiÁSJr†›ã×v×s(+x£ó_ƒ_Øì½ÌtL¹3¬"‘k( =%¢h·ìàC_C_hºìKÔÇâþ¿ójw®3h¿ÏÁ˰½3þ²¯àÁãv¡-ý¨ì0bïÕõXTÍ(ÏGÞX'©J¯È0ˆAU¨—=óQÍÑ ½ÙÁ&®ÉwqFºÆ`Ø)ôUj+ cï1f•¡t{A’qEHº+OŸ¯Ï×å#váËy˜™Ãó>-–_ØV³ïþEË-¨J±g|£®’£RÄ)U…"ìÅ µV‰”l©¾¬ª¦¶Öh⌕&C¥‘›g³»†~¸5Åë¤>?õÇw¨ƒ¾‚›0w`”ð1ö àRÁ—·q#RËS÷HZ£ÏÉŽ!öÖÁË·DŽxê Q+–-Y¿IÖÛÃAŒcÇx¬cžŽÒê|GÊÊE\üΧó²5iÊsX¬`ȯ¦ìÎh ¿®:.¢K–º ºÙ~0¿GBgç–[EΘeVË=Ä LKƒ:M„%ŒbyìÊ…ˆ‹AŸïÒíFµ"8Oî!¹˜tåãgpms͸ >Ænbø_v#hws0ψΠqMG`2AÚŸCÁfõŸëô܆ÉΩÂV£¥²±gz%¢­ †×JðèYÀ˹|ö†åÒ^‘ŽÞØ•|¼ç´õ4÷AüZ&¿,37GaÙÅAó ä Ë þ¶ óJf'Y¤­ÖæF[SÑ®¼"ÃÁÂÉ̲MË8Á1Q­' îs#™j·àdW䔃Wç\#côÈ 3 *T¡0”Á‹xŽ_…Ò,G ¤(-S)1¿è·Cš¨IEl®Ö8胬Roz ž­É_÷ˆC¾#’V#%éº ©Œ #€ ?$“Öˆ*MÆŠŠ¿öùíÙg¬µêëõu¨±¼VÌØ”U ¥l渗ÁÛ㈃~Þ=Î j)ïQ˜æÛ\eeL¨JgFlC½2E4ÌAÝé ±Ç3/¢10 Üoë@/üÊÚȼähœbºA“P½Y—¶5K’X˜ˆ¢¸YÒ©4"r ñ]ûußS݈$ A°\+w±¯¹ðYc<ç9ç Æ2DAŠÅó7ň“¢‹×#6ôØo(rRQiu^sðñ\U=ÃìDÒÊ¢3Øã¦ObÆPj,&gïÈwälJ]½\I¤Éô%úݵ»ÍÈÂZsê¹Ea.’°kÏ¥÷|×sûš•»c?„1À¿ÆâWÝ´_׌Øjž-¦w¡ýZƒŒU3¨Hš\š» ûùɲeÙ%9Xv¿s®¸xλæÍßÎë†Ø9]¸£XVH¨2½.­«¤ìmµÜãu " ½­.|/aã–ŒEh‹¹GÓá¥k7Ú®X9½±² ™X³¼¼Œ+£óPyAYqN俬HľòÞð¸]žàÝܬR5Šžž©³Á{ŸÔ¡Ïóè¤u6ýÁ:‹åбŠ#ˆý–žGMÇÞÓñHe™¢•²ÅUª*Î9œÞ á¤ÄêÕIbrB„* ±sèúök£íãûß¿rß`4š‘™­‘WȸÃËAâ"Ì}|øÞÕ³f¦©ÞE݉L¼-jO‹QØ}.~ecíFÛQ׆OÒ¯ ^Ô»çÐAv=õ0r$ÓËÉ5 ø «ÍÂSÌÍ]À‡a‡;¶omã*TJ‚ÅeŒk2TŠŒØˆô‚bÞ/½®­Bfm•ËXDö>'…n§¼›ÿ B_3ÛÅš­\Š ºíÐ"69­¢A¤sž¨W**ËÆX’Åã‡^/àqclqÖxkœ±´¢¬±•tmee­ÈùSß¡oç:Ø ¦kÑNÝNĶ[Õ¹¢ÁEÌ íŒJìÊÖ¹´¡ß“´Â-0߯k#…)ºÏ‡ñ#„Ktm…±¨ ñäõaÉ[Da1¡(”%Ê{¸›_[ïX9]E¹U°æRCIžRR(ç$±á9‘h# oÊ<ž×©=‚ºY‹;:b8n騷Øw·Áiœ´@ô”ìpT+ÌùîV+·¥vøœ{ôF¤<2ÝòüXD³3‹“Q2Šßº?=ª,.Ű˜‚·óo “ë€zòæQ<‰ #£Q¬-Fìæ¤Æ#¢ FWk¨®¬väÛeAw;~>TÐ*±Ššªšª[tª2ÉÙœ=…õõ»÷ÔYÓ›R“’ ¤œF£ÖhH»JMezVðsÉ®äc‚ÿ«¯o˜¿÷={ŽH‡Û…H¯-Gz´Ÿe¿©F_‹jû€>û ]˜·vÉÔÝ7d¢€…6æ´ùÒ­OöD-ää̆‚õÁ™óÞnÇ^mƒº‘ýßLÀH¬óü‰}°…çþNöÇÖÒŒDƒÿáØþ›k63¨CsPjÍè^O\ó24/:…ûa2ÍüK¨kžº¬à †,€øâNã+(øƒ¨àG tùøêÖÙ¢`Fp"¤ém>vÆrÍȘ*µ¹ ±~ åÞ@&˜¢nQß~$¢µÀœoÎ××jËk 5^>n¡›ëz—[–X‚ʃ;ß‚ãs}¦nªË(GÛP´|KL†8+B³¹”}Æ’þÀ8©Ï˜‹ýÀobŸ¯àî—ü2FdÒ™ˆ ¼ÔP¯JáoBÅPèXùŒàn'h„‚«ÉeÉÖ±I–änâHÚ--ÈÎÈhá~W«@)1ió‡~Õö¬¤WŸ—ô.)ŸŒ”Ï›.ßyò°±°¦Ø\4¦Ulk—« ÄŒ¨K ]`RˆAâ uÙ†í{ñ HÔTÖÔ‚w_såI½‡¯35M©HË e™J­Á>~ù…ù;¶±ÎB ϳ$±tÁ¼¡Xº†ô‰’KŸW÷)ÓDƒ×HD$I¹#t ðiçæÍ¢jrÆypÝÿS‹tŠ 9ìûÕÍÏPRYºËµì!Zi¨Î'~¦Æv½±û8WL·žC yªT„Çá»ù±¹±…1c44Nvj<þæby¾9ë?V¶¡­íþï[ÛÍçNöþ³­íÊÇ_4óŸŸÕ mrÛ[%=ëL‘h ÍYÁjiù×ïÖ-}¾ØÝÿ?‹<œÈÁ;K oxñqžZo$NîîÉ…óDÅŒbÊ‚wV, [»f]ø*¹\£D%l®s:Ù—ÈÆê6Ä¡,uÊ"Ôý6ÝÛÓ{ìl÷õ³ß_Dý,øáÑçÆMÃÌú€˜¶¬öΦ¶öÎô¦îéÉ%Á¹ÕîÓ~ãa$L¯ñ®é‚?;¿¶¶öì$üqæ`Ê*²¶¡Ùof©6™Q&«`ÊKªiäS°ÖчÐYÃ6âïRʣƮ”O›°1®ÆÁé{E(²å¶%X9+IKÀîú%á·OÁŸúf~0­J$¸|µõN÷ñÿÀÃÌz óã&%µ¤ut4[سZ’ŸÅK¶O‡3Vê€G éYŸ^‚{ˆáªcxþè˜ä\Eת* ˆøf!ynÑŽ—qvÃc&â‚SÐÃ&¡à¨à¦¾Éü^xbûù‰Uês†-é‚ ðñ<ê±Pð‹Ý¶·«=«%)9#7æÕ‚ùö^¹?`‘àIŠ,LLea&Ó¢lVµ ÖHßÖÝì<}‘ÔDôר6–iÔH«á6åm,YP‚!©*Õì§Ð“KǪiR«µD,?edyu]MKíÎRo7ÙËYýaj©(ŸA[4Ñ’Äì´ÄâD´E¶¥cŸ7‘g½ÁCmà> †ù îÀ\øJ¨Ý‡îëûoÓ¼ÇVeCm¨­´!7E“RXÁÔ†ÌÖ¦†Î¯´/~óͤ%¹MK¥AokÃá}í£Æ³G9ÁvGõ‰ScÁ#ô…­I^UÀéð¡¹Úðÿy¨õU.›YTòÎTqú¾ýR.·]çPu>c¹†\çj)ï.øI%'Y.ÉF‰l¢eÛu™ZQÛ‘a‹áÊðÿÏ·^Cÿ·À¢ïx5P'œ =ß1¿#Â9Hö—íÀãÿÕù=!—ãèDEÏ.vp?þ“ØéíÀþÎÏT¡#֚ݩ4äI³e¹ÜúÌUŠÕ„ˆ¹øÕó‡{RÔ?W¸Áendstream endobj 155 0 obj << /Filter /FlateDecode /Length 577 >> stream xœ]”=nAƒ{bo ýyÃgÂ4vã"Aä«ÕÈPá• ËEn’²S¤àÔþ¼œÕlŸ^ž_ÖÓ­Ûþ¸ž—_íÖOëáÚÞÏ×¥uûözZ7ÃØNËíÓy]ÞæËfûôm¾üþsiohÇ»ÿ>¿µíÏqxðOÃý¡å|hï—yi×y}m›]ß×ÝñX7m=üw ¸?±?~Þ:ðVi쇥ÒÕ¢=È–jÑN´cÖ»úáQö¡Z}Ï•ö±Z´¾ºT‹VoÕ¢Õ›ÇV-ÚF;Aê{®´DhÙ±Z´£ìT-ZQMQ-Ú%¯D[dQ-ZÈ_¢MYâOŽ0)ÂDüÉ&E˜æjÑβûjÑîeÕ¢e×» o˜9Ää 3‡˜ƒ¼aæs7Ìbò†™CÌAÞ0sˆ9† AÃ!È`ÇážC=;÷ê9Øq¸çPÏAÞ0sˆ¹°²²°²²°²²°²²°Åâ&‹š,$*¦*¢›€Û€Ú‡Àƒ Aàx4‚CàAÐ ° ¸ ¨ p&<š —•"À0wÞAhÁ݃wÚA°6¸:¨:°6¸:¨:°6¸:¨:°6¸:¨ºäךþbS_l2k:o*o2k:o*o2k:o*o2k:o*o2k:o*o2k:o*o2k:o*o2k:o*o2k:o*o2k:/Wýÿ¿þè: t¨|!Ýòq½¶õæ“Ç'‹”ÓÚþN—óEOuÔæ/½Ú%Kendstream endobj 156 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7997 >> stream xœ­ZTTWÞˆÌ{¶h˜LÀ’÷Œ)ÆÄ–“hŠ5bÅÞ‚Qa€™¡ L¯wzcè3ÌÀÀPý5ÖÓÔ‰fÓ³š;ìåÛýî’Mv÷l¾=ç;Çiïþÿ÷÷ÿ•;F½{QQQ}–ÎZ²àÅñãdzÿÚ+2,:™ÿªéPÅ€þÑ oǰ‘Ç Â)¿DôŠŠzü™™Y¢Üáð™›S„‚áK236 þéA‚ fMLÏ\=#kföÛÂY9¹³EsÄ’y›æç'/(Øœ˜²pËÖmKR—¦-K_ÎÏXùÒ„—'>úÊ«¯MšüúÈAÏM5lôcxcÇ¿8”&ˆÄBâ)b1™xšXL‰îñ`ç›cÓ^{i^Â3……&PȼÅØkx²74“ ¡Ôæ7SËyì‡QÜ6G¾M,bœðæÂW€ÈMr@=ò÷¨¿Ü&Áȸ£$6t †nÅq÷AYdÅO~ ŠB1wQ/8öÿá[KsÛa¿!…ÓÜ}:­Q7 DÞ^’ÛþaR0æ“ý S\´YÄ@æª'ÏÛ")irÁt°ê”ü¦Á J ^ê uD…£à­;ѶA~r?¨Ñ6×­?:ñè+Ç^qʼRPH¡×H 6©€’Ú´M—Bóùp_˜ÏY—eÞËÀWH`7ÚTyK]å'þtòî±/ZN·U½k¢Â(•OÖ†•©Œ^ª’©dÂäxÁA’`Mæºx­R­Ô)(#GçÖ85®ŒÃñ®®¹œ|MçP;‹²¶êfSPö0/õj?BîŽ:ñStäNÁ“¹å^™{cóŠªå€BËÐKhJBËàh4¾‡ÂÞ?AÚ˘lÀ l”GïPæÙ6Kt´øÙ'¡(0ÌiZslÉ…‚»à6xßq5üÞîïœùÀAàÔ׋‹\“ïT©Ùîf ƒt]réš7ÔzºÙhyŽöGF£\¥·£ëX`W‘ õzŒ«rk¶N@ç²°ÍádäX·3&8­fÓII }÷€ýÇ>@}–%ålÚÊ\ aÒñ´Ó7¯¤4œ `s g»Ê£qƒR ¾A¯Ö:öþáÓ?´ ¸™Z 1jÙu ]íÑ Qð‰;‘Ç~ˆnþ#ˆOB€ýΞ‚Ï–ív´½Žß@ þBS(ÄC,ê¤xR9<E£GÓ2”ÀÔDNn¾-ì09€‰(Hà2ºñí,³‡œN#°Ó9WÀeéáÄãsK^¯Qø* 6ªX„ ¬ûM«šnÁ²«Ñ‘gy&´ f‹Ð°‚îj™™'?r†Š…Yó„'W¶$‚Á(v^DÔû Ô8x.x À 2øÏC‚ë¿þ"ä4»L.pìŸë^L¡—I°pþšÍr¹^f”ƒyæõW¥çô|±ë¨OfÙòL6a°» ß F7u¼ùÏÝ ý¶[‘ëäè‰r¤ãe#QT&_nÄ<‡ãHDæ†î‘-`§¾Š_"6e!¥ÂJ!®}¹lÃ"Už!äêmhà4ûL>öçœ×ÁÎ ɶö$*ÈÉ„"ÌXQ:G7KF=¸Ô ¢eGm˜#EF Ò³l˜!“·«!ï[«­´‘”jT…xïrË ktT}UepWŠë¢+–+ãÏ,)šª0LàÁ0Üß]ûò?®^“E ÀX¨ÕÖý 4Ü"›ÁnC• TdÊÙ¿»R±.Q•‹ÍÔlèæ4W›q±}8GÁ9ÃŽÍ;’­‰Ž•ÿ(Ö¸šPø#³%>IlÓ!˜È9ǽgb¢Û¢Û¬sÄE© à ÐóTâ²ÌeSV¿SD\Fn€J§Q3è%Ò¤µèmFÊÌ)å­%ÊÄq‚:¡O]¡©4Tê³öÞ»˜t|N³ â®SÜ}ï]*Û{èý¬‰J“Êã¶:Ý6#fú'L™v½SU¨â²(ôʬøåö|‰%o(·]£5j†â•«Ï³Íó‹Âp$ö/W„A84ÇýŽŠtòf УÏÐ"’ûÑäá€4sZ¹ÀŸ| `/ >-¿wŽi’@jbit½â­EK”—÷Ñ0GÜa4. {˜¸ž Ö†®=:zïÑLóÁp*¹1˼‡•ð(ª$“Su[é4öqŒ—b/Ûø¶L$Lîl´7{½Fà¡Ãè ¿™ÜSgØÈ : ä®fëvº©‹c1¯É‘¸ív,œ½º»½K±Kɨ7Ø3°Z)€œBI$H3ñ+uNÉ€}`wu°Õáv¸-%¦ ñðOLXJ¦eXö1XÆþF¢çä1Üö_=‡œ–2éµ¥‚Ò‡X½àY\}TjÌ/D6ìnt}OÝ¥&l\ZÂÚ-Lg3™’®I“JM@†ë>&‹ñ<ãúÒí;˜H3ÙÚlkö–A &»IÇ#ŽŒ GUu³òa`•„§|‚ÖÆ—§TO €Pu­¬õLŽžõ7žX¦&òI° §XS¤-f33ß¼­Wj•VŸÔ5òeÍ$¸.^ïÔ9“²bi ›ªñûÈneê¾[e÷øfQpfpVí¬À¬†Wã­rKQ) U?(½ämªYòLRg¾KV%+KÛ»moÚ^eI¼Ál°-Àî°[¬áñ•þò€¯:ÚY¾‡mÿ,>Y­u*0E Ò³ rÕJ©+8(ªQzD¡mÛA(ó;½¦n0áÄ 9Úí¬°òVt;d˜ÎïÈ€Ù[Øcæ’NçÆŠ=èkHÀ~œ{¡ÇV®nÞÆÀ+ØQtž.I»1c‹0=Eœ °©沦Âʨ®Žw™Š3×÷Ÿ„D-Š}£K²YW‘Õþ›aü#ŠT’¡.‘TM.F"E•àP1#1Km ŽŠì"wm·5yÜFà¢ë»h†õü‘z泓‹__‘²õÚ‹¢ ^•C”Àh4£Z¬(%’w €Hì²’*Kµ„(à—VŠÌ¿0_¸úcâ\;nO7\‹o¹³_Sñj[OõLwMM¡°XS¬•1‚„ìY ]Ó­ìžnŸÒŸ}1φ>;;v=è™ï¨CË1õÃ&8FŒmºàJÌŠEâxÙq!Ȥ²ªòêÊý!ÚN*®¿XŒ-Â#/ GƒPÌçˆy/l¿Rǘí6*‡Ö¦–*SŠÕtÎæM9ëŒÃAÚÏÚ¨’»|\ûÃ^nÊÉi;åõB ëhW6kÐ{Ὂø?ný×í°sj6`x‡Éu‹ì—]&lÎSÄe»%NÉà+hÁ54ÿšg•ÛDeOb™ÙZc+¹~_…‹Î [¥©óF|rº&½¨¸›dw…Éÿ "ÓÉ<½¢etžA‚ÈEyž%¦»S¦`„ë:†÷UŽ&Æ9æón(ÐxÀ² c5š3 x=6 Ôj4˜Š(…Cï¦á=N\ˆA¤NÐNzA‘k\@½Ì¹þýïo|ç¯o;*(—Ê¢¢ÿiB°¡¬x?žb»RN¶Ôé3F´%g×ßZìŽz þ)ê ãîjbæ’ð9¬M$?Ãr€‰tèv§‰WF‚®.ï¯.©qÔêЕäùÅX\Š4™Å8¥(²7o¬Ááií%ùeC)(3”þ#<±{3,™ê•RPn*TC^ÊtÖ’o/Ö®ëÞ› ‰öKìÞ8™ˆ’,Ï÷м8%Àþhûסpb½s[.&òy錭 ÷¹˜O¶ÑÎa:§‘èo‘Ú|s̯€ÌðÁiþ¨Ðíhœ¹¥<¸›ã±9<çÞC½ÑÀ ( õwdÒçÉL¢N”…Óηյ›,8Y(‡ãP“&×ÐÂõI‚•` H÷eÕ%ï×_ïR&¯}÷ןÒ6ηJãSî…,ûxqmÇkµ8 žø)²ñ'P#dÇ ÞKÅS‹ÞNœ™>Œ£ÐT\É"¸ ®‚Ã`œÏæÔyh(œ‹Öb©×jÚ¡(µó^ €q¼HqÛËBaWãÐÛàÚ¯×-s΋Šçã47vU‘Nj(è0 Ž’ÂØÚU*Ù¼üba7ÝC»? F_ІÑqoòo¢[œT>ƒ=1ЄÓ9G&™Gå^PBÁ 'ŒúÀ8òÄŽKÇÝ¥“ÎÓÊóA•]]¨­ªö\Ü4~îÌçénÕCkà’ÓŸ >àÁÁü˜èC8ýæcqÜoå°_ ÏH®Q®_ÖPcàÓéí· ÝðÓ…µûVÐ[=[L)€ÏI·A#e#M^G­xõ¥ò2m¥¦Z_…áÇïÝn.Ú•ågLd]umåv`a/£R£QƒbªÐ[T^QRZ^'ògnK-eÓz½N§3bÆÒ•Sk¡¸‘—fg83†¾ñʲñiÞŒêlƪsh¾4Zƒ®X¢‚À/ÝÔ ¥¸å)ÃyNòØþðqú&çÜ×Ê·¦ç/˜3®á}ƒ?è8ìäyÉÞ7ÎÏÛ¤šüf¶°Ä_@K« }€úÍi;v°×%˜r%º‘|ÙZ«Kb:Íìù€OtJ–.“Îé>Óù/Ntš¤ 9{W{×âÝŸ»9o©þïzë;Bì ð¸늹°õ§?޹û~5Æ.È!/‚3)Ž·¨×È„ú—á°{Â×´têÝÊ|ËJa!-+Ì—åw€ê¢z§ìf¼üJîñÑMUŽjk•ódðÄ>ûÊÉâ {wŒ£ÀTp&Ž>ãA̵nWÐL]ç|òg1z 1¨—|ƒ¡ÃÛÃöê9øØ6é|øãìKqÜ»­Ý^´¾V·–A ä¶|ÍB¼Ôô°„Ìα42‘~ä(áqÛ…z‰(kˆ ˜ª«õ‡ë…Lš{_o0ê‡vï‚8Ü1>uâ<¼|>R‡xçÞ9$nÇÜÚülMûëMLbpŠaEcr0|ê£KG®~wù‘å´EªÏÂÙÈcr¹˜A: •TžY¬¢5˜âtŠM»×ÔÏÔ¨g÷©¾3>mµÆ§=Y|²(0Ã&µ)5uÕŒWG> ’Úè2º.ÊAºµ6• RÓ(‹T›ÕÎ2OÈë¤í.‡Ûêî™UVîÿÖÓŽ¿<ìjÇ»ÝíhnÔãvÜ"A¾)äSK5ã¤ÙŒ®±”ø€ª•çÐ:Nnž(géÙœíçÏÞg"22ñ[…m¹©!ò?5—ïFŸè˜ÌËC",HÖ£–«áææ–S–ýÓºHeóEr²fk1–>P‘áÎæën6ZÔWØvÐAÎ~pJß¼-œj™ieól^WžýÕu IW~ï¶jõÿv ²Â¹œ ¡usŸ¾]¡@¥«À+õÒéþ4»ÒY dT¾D'ŨXf&(tæz E>a‹p{vËÖ#ñ˜µ]ÀJáv4øK=ûwÝwžõ7sùd™W^”±&#KÄdd ²3„T$§'WbÀÄuÿŽp·HµmAÝ&¦³‰èûèQ8ÆÜùñ!FH5ðj Þ"/wË‚Oî=­É {äÈeáÃg,>~IBË[;À ~…×ataô«å— ÙýŽ$]´Üô"3€ Ǻ“é ðR£§=ù»óâdö'ŽûÍ:2þ䂹¾ë˯Ï;欥‘’ÜšaH£¥¿' ÿ¼˜“‘eia ˜­†¦œæì¦ ®d0¬HÉZœwu]p˜“Å (î7]Ç˽£µw¼‚£[˜<¬OÊ9´ø Z|-±Y„˜ÙÍœJ‹¥Ñê< Ç„cÀÑ—ÅivaçeËñäÚs÷!z/bö#Ú•oQ[ÕN–çJ` ØJÏÁ§à³pAÉnߎ+ìÖná“-àP±e3ÐÔR_°$oiÞ’…hA¼2S‘¥Ê,jVì·$Ãþ[à€M°¿Þ£fÕ®á$á¬àבû€KQ3 g?PRìiyº¢´q‚1£iÅiÊT¬)†­GW‚x'^ºG¾[ºÛheH( é¨=ê{¬0ÖDÓG‘cQ‡Àõ_Fß„y#­G®ê«=ß·`Ê’9Ì’Ù‰óÏQ*u¬b²‰óÁ 6¬µ­^¨dŽÀˆ/°¤`‰¼|1>TWWW¼qñ«kFS0æ…OQÌ c¦ºÓ÷4Ôøê$µ|쀶G“Å¡.ÂV|Œ~õøúý)Ø‹ì†Q‘v^óãeÇuºnáêùâI³%L®f‘«»ž4©F' J8PŠ÷ê·µcg0RÒ8/sÂD° Ì®aÅ× ¨Æš`8”W“ie4œäüLþ³êsH<ü’÷߸úR9ýp½öý÷þ4äóñGǼõÜÆÑ鼺ºšê†°¨šß³HS GD’XxàæŒkqÜBp2VçlâVÒE| sŠ»Ýuz–}óåwáç"ËH`5Ùº«²ƒZ®¦Ç cÐScÑY¾LŠ¤Æ¤*æqïC}„Œé${vERæ‡qŸ…Y†=ü]RæÝÅ6åSØŸg3°‡óT½ÏWÊ­ÎÎÊÍKúÓÉ?Àþ øq¶þ ÷ë$°.[W’M@4â^]œ°Úív\̤Ѧ³j äåkJ¾ŽÖdφRìC´f­Ek¢ -ÀEK¡¤Ò¢v—xü6jj«ÚËÐ?¼¨7˜t¯¼•R¨Ê×àQ›w¸à+ ¦‘‰b¸üa¬öù¿`áœõ ò3(LJ Ð/.Û°A§Skq דZ‡ÞRV»½ª„î*?‚]ò÷ÞqPÜ»)"Iß/ W–:JK¸åñÿêí÷ÿö_!7ÂÓƒQ{¾€²/¢á`x‹g䀻ÖÏ_®oØîkÍ ©¸:KoÒ= ²ó$ÙÙÕ’ÚºªÊæë á…èõÑh1z=Q¶Âùð‘!ö~0öAý/áÏÍgL¨/ÏM¾€œï`” N¤óÉÙÔ/g}ý‘Žnà1º{¾&âÈíZ0½Î¸ýyÇ$^JƪæÜc8Ö¨ZNÔ˜Ø Ì笘±túÆ7Þ» Å€'©Œ*Q=mæÔš*| žªd ޏøÀ[]ZUZõŒwz6³JÈWÁý°,²ëë©_:Ð1OÜ!Ñ>lÆò á<9={æT0‰’“öçþ2ÿÏ€úr.Á¾í)'¦íedN¹«ØŽ þ™gv}4äΫïyiì‚áSO¬:» #{{×j´Z‡€+‡Oœh±šmV®´!·²xgÎ÷IàmêÕ©«_ŸñÖþÏå8Gø#ƒy.òRûößK™RBIt`ñºõzƒN§Jí+r­lšW¯~÷×eÃmþÈ3µ±{îλ7Õ$Ò ^á™8`¢qÔÒ·3ÓRs׃TìÌ QuÁÒ†`AMf®¸0GÏ(É)WùïÃi7áZ8>"€[Ь oÌ, ¹”Ùú̬!ˆøÇû~ïžÜyºŒ6B.o+©›Éáe@=¥‚ñW/Y?¹¸ƒ)sVX+õÀF,íq†~qd;¥wÆq/øÈt¶1¹@H «áº/Ls[k%¡tš{Y¯ÇVµ÷þ§°Ïâ§û]û‰hЏ£W÷G*|qÜz˜PÅ3kYª é‹×'pƒ€žÒÙ4¶Òº~/Øí=PûŠO¯ÄwÉåf¤ÖæÕ••Û¼.&)2îÔ}SvZâ[¹5l!óþó¤¶VáÛî¬èµâ³I·ÿeÔX`Âå’X_ûÁþŠãÞ¯ˆäò¶šÀm9<ÑÖfµb :ØïÕ½4{kžŒ&&$LG‰‡Ôp;üÕMa‘ŸŸ_¤.’uqÀóö»¿{÷á?Êÿµíñ÷§Ùj¯t(yEÚ|‰L†¢«Ñ[½ МÓh9‰úÚôv-¦\»Ånõdùä^æi80¾à‹Nj‚sà 8Ènw³GMÜV»ÖªÁ­5ŒF¶µíCY„øà‘?öÄ=ÿùÁonÅqÛ¾ŒtòšìõÛÁ~eåV§Ô”o* d$بÌMÛœ´4kºv:µš:£'@¥Yå6R6ŽÍlÁšB]$×ZÏ?PUî/)¯®ð»B ŽúzüÎ'ß³îešû‘È“n¬¬® ‰Ê³hHu:Xˆ@õ[ou7äzÚ!Å$”:ú,n…Î`Ðcê“Õxd?£Al3^hbš‡ú£þju±û>­Mk§Mœsµ¯¤ÆÂØÃð-ÀþÌ_ç¡gá ¬"6 0âË`üGæìæ·BÀ~A·\Š~ðÛ ß$sÂ9œÌl ’·àD2ä×.W@Á ůf‘uš™BëvÛô‚,ñ~䀀Éïöûœn< AXQ“Ke‹ò³rªrëj|åA3c'/Ï/šúbúôm´Bl\ê×yë†~±Ã5n·?n¿©†$ÿ§·à¸ôÇÿa¡åF™N(GeRÚTæ[9ßDrF!r"Ø€¢x(õcð€ëGÞý“›òpüÀ§õë¥2…H€ÌVäVÙôVƒIÝ£Ê0ñt”x‹`Õé³]þ(î-Šèüä/œÖí¸Ç`ÜÃe_7{û…=ðGˆj:âý&IìÎÏ ã{ž*ïÇCË tHF±NunZcʉœÃà¸x"Ü^wÙò!¸¾R@NâeŠ»ogêòÀª¡Üv­Ž=SŠéÍcspQIÌ0l-;¬{|Mþ¦–šX û8ѳÏ1kHe€ =V€hÔ ñÂç5¿x¶†ŽAÑYDùžÇ'ÛÀAÛNÕYEnNÑl.(0ö;€›l2+0b'»ýîé:éÒ;äEy*9½^°\µŠýï½Æ÷léËÄsܵ«qÜOþïûªý7‡Wdõý3 ÎzÿtÍUÊßk TÅ2 Æé$»2¯¶¡Ô_sxEõ¦ÔŒâ‚lZ§×jØ£ ¢2¹]äɱç˜(9g4ê¿ b`SO¯r  °ÉH}@r?¹÷þÉOB5*y« e ä»ó+5Ô¯©¬ë¤­cë%8ï+6Àÿõ?³õw ýÿPn';¢~¶»ª€Ÿº2£aqâʼ­«iéŽÄŠmàe€úl”=KÁ0Šç¡´Ÿ8_ž?ýYc½\¤…¹‚"{‘GÉ`~î8Ý7Ú3¥‚ø_?‚ãendstream endobj 157 0 obj << /Filter /FlateDecode /Length 570 >> stream xœ]”±nÛ@D{}ÿ@”no6ŒkœÆE‚ ÉPÔÉPaJ å"Ÿ™‘"ÅI\Λoûôüõy9߆íõ2ÿê·át^Žk»¼¯sýå¼lvûáxžoÎ×ùuºn¶Oߦëï?×>ðýt÷ß§×¾ý¹ß´»ß4_Žýí:Í}–—¾yÇöx:µM_Žÿ}…z¿ãpúøé>Û]ãî¡Ñ~iÖ8òJûÐ,Z;5‹v’›E;Ë›E{”íÍ¢í´…™¤qä•v×,Úì¾Y´{ÙÒ,Ú"Í¢ ÙÚ,Ú*‹fÑB–4mÊ’¦˜¨ˆ¨¦˜¨ˆ¨¦˜¨ˆ¨¦˜¨ˆ¨¦˜¨ˆ¨¦˜¨ˆ(?ŒBæ geæ geæ geæ ge& § ¥ FÇÅFÇÅFÇÇ85‹–+¬ì¸ºçªž+V‡¬ YÙquÏU=Wæ­Î\•¹šE{å3«Ÿ[õ\pªÄ¿Šš§Â“¡ÉàTx24œ O†&ƒMÀm@m€MÀm@m€MÀm@m€Ûƒ7mܼAø¹ì î ê ܼAhƒ`|B[„›„ši`"˜ˆ-ÂMBM‚-ÂMBM&YÓM¦šL²¦yS¼IÖ4oŠ7ÉšæMñ&YÓ¼)Þ$kš7Å›dMó¦x“¬iÞo’5Í›âMÂ¥S€Iš4QŠ(?À«^øÏ7[ï¾N‘ÏCc˜ß×µ/75>Jt‚œ—þï4º^®ºk 6Yü%endstream endobj 158 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7444 >> stream xœ­z xUÚn…@U±(š²‡@´ uFeSQ@vdß Š„M¶„lÞ÷=>ÝÞ÷5I‡$Ye™°É"d û°(8ã¸à¸ëi¦˜ûßSÍ"sïõùÿçÿïÓýHWÕùÎ÷½ßû½ïir°Î°œœœ®…“æÏ2xð`îý22OæšXIæýÛ3º€¹ GgדÏÀ ³¾ÿeÏ™kï-¥YùbB¤QÈQCœA;K†¼‰ˆ* FW•ã§¥e;/~GR¼–¾‚) /¥sŽœõ¹Ðš™Ác»k&ô}I-QËæÜxv¤‡Ý½ç¿>¢æýíH6Ÿ•ò@­ÅjÉ ^ûôrÛ„7Ý´ŽûÐÕøXçâ3ú«äÝE´éŒ;³½#73(s–·¶yIì@jpöEíHv$`ûö©kìhø¢‘tãßüzvNª}U«œ¥U&FS8oy! 8ûˆë8|Mþz >‰SgÌrѵëÆ ›k£Qþ‰HèÂ"¾ªDfº·9¶2 ¥á©ßr\{¶õ5Êqož›hY¥ôH\,Ó Ä!-Êñß÷ßiD9¾ò;9^Žo*Ù*ÝHHüíHü:âìcEEÒÒu ¬egò ãÈÉEWŽø­~àg€Ï‚þ ¼ï£»‡L6jËýȬé¢L[:ïZŒ ²ï¾–iãÝQV©^ÈPI@• ©Ý.'BˆSzTÜGÂ>š §³Cv5.RhÅ…ž¡vߺ£äe”Ÿ0¯}¶ÿà•,ÃvÖ0ÿ–‡óíqÞö3°­x-ÓóC[\HH”±Ü§Œ1p*žòùb1µ_ÄfŽàÍ —X¬Ð—ÓR‚úfÓºe‰Ål÷—ú±ÝØœkl.ìùáîºÍ›6 _ä`P‹`pýÔåë'Û¦ÏtÑsˆñ“gεðô5íC†ÛÓpi:ïVüÃÙ‹Qûoe÷]Lå:C©Äa5Ú÷>Ôu®‚ Gmî(Ô¶ÀE^%’¿@*1•­&`¢g±1(„½`´è,£Ñˆs}“N+wC“Ä)“J¥DFší—âÔ~hžƒ—FL±:Ÿ¿¾B$«#%ôÇp¾R'Ì6ÌCÑÞøerzA:¯ND¡z Þæm&´Jš}—(t"Q@‹Rïú”MÅ©÷jµTæW$ªÁá´: àÛè—¡°%\Ðs42”5ªñói–€þù^Ø >JS AKѶ‚d<’j›Z5enñâ¢ÀbîmçýÐû |õ¢¾ý!›­µ„eKP¡šÄë9jRûDÌX ðÿj6 ÐÕÖ»©HúýIú‚ú$Z­pÉ ¨o/ Øqê£Áj,@Œ(f¨o…a}ÝÒ˜'Á®ð…™íyçÒ0šîEE ÓÊÛR¾[‹úãÆ¯ÍßÃ^ì\0%Ä”»–A3Ø}ÞÝbóÕ ZÀ‡×J’j­tYä­Mµ±s…ˆk‰:µ‰^,u­äp¶‹ðö5xå"è˘‰éý«…ˆV‚Å Ñ!’ZŸRƒPiAiµTʼÊA±7)ô«bõÎxÔõ·JÞâøR‚øí¥ÌÞ`¼J­–ˆÃ¨á¿`¸kü‘åä…k{®3~¢QZ+ªÕÚ@,)®)oÚCCND“öFÔB†óW¼Iî”H䆲ËÌçxœÃ5Gá»oep[C€2«0RÖ¸â€nØvù6¥HJ"ökêéOðxØ“Œ+¼f¼'¡bz4.Ö”K}ª$s€]ŽS»GÎ+³ú=Gš†uDÂ-£_GL¥¢i}‘Œ2Åhcâ,0Ø._«’)<Êp Ûñ¤7‰ªýUˆ 2JØoð"!]PncØv\¨Uʤ~uý½‡EE™®بƒ‚‹cÏu©ÁY žëÒŒ£”ÖÅU>>ó—!ŽÅA”Ú >þùÑßu áI_ ÊPޤÌ#¦9äXŒ£pÊ ¤/¨RhD•Iš» 7Ç÷n< Óý?›v†Œ²@c+˶ôD\¤SÊå^Åo€ä”Üy$²:!$³Å ÌdY½<úSúò°Ú9Ã&ñ'¼CóW«ŠÀR²ß5¶+|êÌéÔÉÚî²{€›Œ*=RZ‹‹€IdTÉ‹W*ÖòåIo|vüÒ7‰DMÍý¥¬íy{NÇaX$·#»'ˆԳémµJš%±¯ðz.TU@h4r¸²f޵µþâÞ‘·" ¢o¹Äù.¡³¤´çFWÒÏo8_!y¾6èÚBÖ;‚üÿì‰%ÁaРȤ?jf^5ò¥ÒÂ|á$ÁdÑd³Ö¬µhÉÜœÒlÐ7(HÈHçËw)le€œƒ 4èé~UÒé²ço£×Ÿ˜•暦;"ð×_uÔÐákÖ®,¤È­LÑ™.x,noaÎ ·D¢4ÈÌô|6FܺõÃ×ß ‡½ D%¬¢Ñ ­RêÊ™…âK‡´©ÁïkDÍàµx=} §8¬¢NÝ|:Xv¢IJ/% «b+ÜîùJ¶;›sñYØIÇxñ6°+þþû¤‚ Ê„>uý)‹º“ …W̼ âqmOS;Q¦ŒSp© é¿*Á°ï²ßò€©5yùØÕk´ßon+r½K."€ÆªjrÂì)ÓG-½zÆðï(F‘}yŠ÷¢Z¿ÌV¼Œà£Ü1ÔzÔñÈ)µfÒWxßM*<"¦ G%,¬[}»÷,ñKîd …G± •ßlG™Œ§‰à¨Í(3§"Hn øj%zl«8¨MÒMw¸ÑoTû—C½—A²øó|Õ&ÓFd æsw‘@‘ —„îqw<è…'A*“yò\.¤à|Ü'¢ŽæGŠ·3k>X_€rÚå™!l£#l÷_Á€ôâ‡l%6íq£]€0™º¤¥‰LK‹JV)Wƒ`æ!Ñ5ÒzŒ;±³ý:8vέôo&çØ ˜B¹üv.o¤n²CÍŽ¬.K@9sl³# S#/˜OòϨ‚í›Zwì° .“0—û,ûõmúƒÊ¶É‘w1(A>w§±,×›»°ìvì²7ÖnŒ&ë·Û62€·‚í UÕRfŸæ’lw̯ó××.6¯B-´)o¶Ø6Wo!rœ0Ɇé(ÊéH½ß³ÊU*•X5D™ŸØôë8熹_j”÷õÄ^<î Dâ*¿@oà\ð0˜†ùx,ˆ¦µr†}ü0¾og|G S;i·2¤2N4œì(óÕ•¯þÖ]3ÒМ¥esGîÃ@B³2x<’E²€)†3`_üïéëà pkÌ•?ºHÖÌ‘¿XÖ%™Y3šaâ#Ó‰"ð&X\²t‰¸BSÊï@Vr Ž?|è|÷˜Sœ×tTÌI´-õ¢~ÒÀO2/ò,¸ ¹Â½Ã^ŸñR¹«4TÅØª&«‰´ ~”!(óI•$õÍJqñÂE}ž}–~ÝÖæŒï¢w'Ú6×~Hº ø8›ïOÏÂMÖj«ÉJêI%¼6¯ÝË4¨RÚ¦¬×N|24¨ë˜›™fàÙâŠÚk%EÄ@,`O’bëŠèºÕïÊÅbš5ÝÑò€ÝbvòÐѦ=È/½Oƒ¯L®q¤‘k–W•jŠUkª×‚Õ`µsm¨Ò·.fn!ÑÞöBˆüími;Þf޶ʹˆÔ —-~‹6ã‹íïm5 n¢ÛsÓ9‡;`ýÙÜLƒw$åÚïÜç: GŒût‡Ô­ ­°®d1^ÆZJÊÊLkÌŦe–¥h’ ²zÞò,Œ,i—¿¯Û 6r+Žt Ò1aP?-Rà×jw2ª15MÐP4ª6ÈSäÚy†z J‹1`ŠJ)À•r³LëÖÖê™@U°ÊÇ'«ðð¨LVÅM›ÌMÆ]½¸v5DÖοè?…O2Œ·€M–ñŽbÓHÛ;֕覕¤”SjƯ\÷þ³DûБË7w!ˆèùçÿ‡+pîÿKBÜa‘nãÌ]ÞÍ8¥c"œÅ¾‚Ù{ó!båXÕ忆ÒͬÔr¡1 Äi2e°ø+|q{9jù"‚’Ís ú¢mÛž£)™S*•™Ji‹²†Ä$µW`Þ™b>!jvò›×o ©=û’Ù÷’.\œ÷ñÄD‰g=(F43qž( ­c¾e‹y°˜ˆ‚¨5 €KêÄD›Á¦z²šEhÊ«t| <Ì'àú·â„8/+r~†Î>¤ÙǤæ%J‚ãŽsY%åŽì ÚPˆ\”Ô!¡Ù?á2Qu…0¤¯GZšxÕr’ÂhySK²>ESß$¥Jiêg34¿1-w”±ÿÜq²uvßÌ;°ö°æ#@~ûÙ¥ïÏ/>=¡™VÇŽàWòø‰Í‡ï_3=B»*Ì•[¸Ã"d8a_¤œ“Ú ˜¯«›iêò†¢Õ±ù_˜úüÔ?Ï;RÅl•í2n×ՙ꫚͒†UJ§¯˜9m`óßš‘I,©Ki¶/!ô*‘œLÅí÷OœÄéL냄È&ä—@a•# ÌU¿:@,±Ù$´—h”Äù‘´lü5 QüWо·î²»4€™Fl”Ö""ŽËÄ&¾Ì¯D€{ñ¿˜¡{Âöàæv>[ˆæö6Ø)‹¯B‚ÚÁ•;kþàH<æ †³@/¼Š_¹{°ƒŒwOBErÎã „Q]œ¦vdçûUœ¸ÜwA*࣑›ÕJi!.ŠU¸xך]Å»Ív“© +Ü_^†G-² -ȃyI‰GbÈŠ7¶.Rè„Ùƒ§=8;”}± ¼I—ÕF]JåGâe›Á€ÄËl\¡TÉÅAuüw]/CHFFô(A?àa»;‚dD@èê›ñàÙ>d‡ár¡¹‰ Ïi‡'r02Á.…‡a÷îE}ŒDú3¼ b‡X§CH¥Õ¸Tj(¼Ü¤Žµ°iÜŒ(;$õ±XªfíÎ?ðòóI©G|wspŽº¯¶‡6§ÕÈí p=Ê„ˆ3H~MÒ²‰ /þ?º)¿‡µ¶r)¿TJSá2~rUA1(– „÷šs–èvT|7UžÛÓx"³NŒ7ì ä¿#€Þ¢:²" ®§kQÞü~[˜¼=ên\d½*&¬1KÍÚÊ ©y͉çÚy¬?¾ÿäþc‡Ž9t|NÂÇXâôÓCØœB6§<*l¬'êCjšY T×öÕGR®F@^¸¾lðs|sâ\¦¤r}EIåÛËóJ±ÈòdM˜†·J«ÝÈ´ c´\"3–Ó‰«¼Ìã+PKÝ€2<+±©a‹ó$¥NWÍ™»hÉ"=³Æ4Ù–“fÂ* 蒀܆Sg.Ü\=hÂèÅ–¼åo]ÏÔ tZÀåõi#R’¿XͯZÚP‘Ñ»–ŸUۀݸS3ôêX||¢Î{[ß?ðQŸ[Ï· žÀÂÂ`esC<Úè1{Œ÷Ï›2š“ê“yÔX8-“á­4•TòKz½hHJ#+Â45¶ ÷m¯%kñ:GµèEÃUSA[ÇáV†V†ä4¥A.æí•Õd5.âÔK¢*+¥57Î<…æüÎW^€£y,6…àN ³:ÿNIZ7Z6¤ðÆû†Úô”ˆž ±S¸89Ó”dM“y þŠGk¥ôj¼è%r>[ÄväkeZà“¢$fŽÓ-i¬„ßwa¿'î×¶¤áÐtÎg°ákü΃¥È’¹¹cd UÑRùäh¼lÂ+Ôú2†m ¤á†:g$¢›ë·ú·òúÞ)_~cá˜U+ƒM|Æä0pÞq‡×ë²A¯ªKJûÃÎï|ò ìtöðÐAâ§a_±—¯Ð‰W?@ÜÒ‡·ç87á*¬;˱îcðKžë‡K°ìŒtòöj¥Î ¦¢Nž¬ÇšÎMh™Å¾Ö—}Àò6±óà”_¿û ’¿ ûKÎ["Yõ.âU7K!uéüÖS´ï0´MSÈñs'Stæ# !€³ÆaqÞk6Ÿ(s4F%…z∛`gy,o»°3H a“†•Qî(nEö"A† [ÒZgOÆàЋp$ áó…Ü FÆtä/âÔam¸²@ ÔU!ûÈ™Çw¦û`Ó¶4\’ÎÛÕQØèG/êG8~Ê›Á³¼D²8$'þH{ðoí0§ <Ü|sçäF’úæXìÄŽs}À­çŽ ¬%µø ¶ó¼þS.=%d¨'±ÃxâÒ•¶3'/¾7ÚEëˆ1#O8mû%%JInG)9u¥õð±ųݴ˜2r]á¼É­Çôôý<@1*G:ïÒ”W&œCµÈô‡)žiðl¶G_ ZN\äyœnàeRUa_"®u•ŽøŽÏÂÞ«á v‹=?”íÚïâ@Hšh~ìˆîßJjá`¨¶Tƒj’ÊXuv­®yâ„39vîñËŒ‹¨í8|ê8 ¶Î γmF`ºW¤(3QáØ3º‘ë)Ñ E ˜\“ªØIoDn´’›+—ÑìÑο÷ÑÝ]e4èAÏÁ£<r¼A÷µý×÷^Ûò$Ü I†U!™R­5¨iÖÁ*J¡ª·J¯Ö9ûu}RU§L*!ú0ßoðkƒ€DíNŠÜ 3|߈½#ö+Ý*¯Âu/pø.×í/¥ó¨eí0À“›u* $~9òÜ §ƒû~èž {ã!MPfcîäãf€^¾ØYaÖr˃Š$º8iø¿ð:_ Ò†¸K{¡K«¹Kï­t@?Cø}šÙΑ잌Žf—ÎZ°^mé””G‡Ùñ³‘Ó?ƒ›ä‡•禮®,_W\_º•¦Îø²;µäÃYÊ< ñj…A¥UêG,þΈ·Ñ8Ó#ñ"("!¿Ïå§>õˉ_Ü~r aµGåî{üé“ýNª]jŸ2•B.LèC:æúÛ×Þ¹¶(¨hC†{«ÌÝŽ¡ZŒïÈÍôCóMU#Wiôl·—§²€}†”N¶ßE¶?|lœÇä1y¹LC™Kíež>?æä ï#HÁÜ >§ß BdDå—šk€ÅLÿÛ­ˆâ:¡%”5:5PILÒþ4êÛ°y€¥æ²³Ø¥N¥r´7uxÛ©Ó»Þ§Û¯´| ¾°ï4øÛõ¦Ò' IPqjÊy—»¸”œ åð¯4¶>RC{÷*)VIä²€&l`õªzyJÛ»†¨ÔVÉ«TŠ|¹A¥2´š$ô{Q&›c-‘æPXHe TEj aPkp'"tˆ›k¶€ÍÍ’Y³äøâX8‚hÚÅT„ªUáJoo4½õáú@c(?ì ø@:&¾ êRÉzY©l¶¡ç!"¢Oð%J•ÖÜjVZé\.‚»ŽÐ{{9§Ba%â§&¸KÄ»ý |…@AÇ*Ð tÊhª){2Œ4šHj^Ïük%a×¹”Kô†ÃqjmVqFãΆjŠ©ü¢ûZ»«FPòÞ…ÏåÂ÷ky¦RaE¥B¦-±€ €@¶'ÎÞ¸´·üà+³ŠCj’ßjb\¸‹ûJÌöG¨ÌF¨¼ý(ï_ϰ¯"µJÂPkùIšÜMÈŒ¡^†ô‰½…¹½’0{ •Mz³#p*`ä¾‘ŠŒ|¢Ä¯NÜ Îʾk`´ð”f½Uéî$ ßÇ“~4¨q߯DpjlöL–ÅhyÀYðñœó~—³î‰.wÛwtûL8ˆ›@ŠÌj8Öœþ°ÅA” ˸0ã"_"•-+ f SãOlMmØÔZռЦnh€Ö¬­þí@ëŒ8go܆Ðo¿ËcŸ•²“Û“dŸø˜ _¢Ý8ìl‡cìࣅp2ûtHgÕšú½Ø«¼ Îø.ì:’ØÐR¿ Ùó³zû,ÆkÆ®š).Y_¼ÂBÖàÃÀ2ØUu½&Â5¡‡¾»¸‹6dBUÙM1ïÎð_­áþ{?jh¡©-¡5T—q¹ŒÅ”¾*f`¦Oè‚¢*“@¦£%kÊŠWh« •°Î®Îwªæ ]øVG*ÔRG>JwËõéÑÃþ7–!endstream endobj 159 0 obj << /Filter /FlateDecode /Length 352 >> stream xœ]’Ínƒ@ „ï<oÀò»‘"_ÒK­ª¶/Ëq B}ûŽ'I=Œ¥e­¯‹ãéõ4[^|¬süJ[>ŒS¿¦ë|[cÊ»t§¬¬ò~ŒÛƒXã¥]²âøÖ.ß?KÊñCîüÞ^RñYÕÂOåýRœût]Ú˜Öv:§ì‚†Á²4õÿŽêÇnxüZ½*pop&`¾®P•QÀÊqgplpÒð´ñÓ¦6 X;öF{ÇÁ( |¤4*T Ú[‰·´¶o%QÀÆ„6ÄmˆGdæÏ+È*Ì+žW:£€c4 áWèYܳ`0ÂáˆG`_A<‚bNÊY©ÏJa_A=‚¾2‚z…}eõ ûÊêTª#ì+#¨GPØWF@õ'¾­¿¾ïÑsmòx[×4m\6.“ïÐ8¥¿}\æÅoåPö õƒ´oendstream endobj 160 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5058 >> stream xœu˜yxSu¾ÆÓ“ƒ£ÄŒé­sâ.¸â‚ë(ŠXd+ )¤´¥¥û–¤Ù·“åœoÖ“=MÒ¦mÒ6t£-‹¶U6ÙdÄÆe”¹sï3ë½úüÒ9>ãý…êÌ\Ÿ{ŸóWòôéù-ïûù¾orxsry999s_+,^õÈ’%K²îÈäfnÍkáÓ̘®ƒòà†9ì­?W.D[ ü›þ<Ÿ—›“sË=/5Ö7IÛ*[n[×XQÙÒpÛª¶²ºÝå?ù–ÇãI–7¾ÔÔÒÚ&•—µïT”¯«¨\¿«zwm]ý£K-~h ·žw'oï.ÞFÞk¼bÞ&ÞfÞÞr^ ï%ÞVÞ ÞC¼—y…¼%¼•¼U¼Õ¼µ¼u¼"ž÷sÞ\Þõ¼›x x·á-ð¼£9‰Üù¹£ys^˜3q]5ÿAžà8ášûâÜ?__þ³ô ®E7vÏÛrÓm7Ÿ?¼à¡…+…¤ðÒÍ«nž¸ù÷¢–ïs*Gyó¾Ï}TÄ3$2$r2ÛNæ¡ô½È0 Ÿz žåRífÆJ[ÁB˜<߈Ý ï“Qð(Â$€}©v·FL5ëj •Ðú´ÑOOÂCAͱ '¯^±ç»øÃÞ]M…uËÚVIŒÍúh"J‡Æ>ùüpiw9Üà&–r™–Ý*+©«j~u;í¶p8àôWâ vžî9=Ïv»{¡—8»}¤¸xuCI‰×þîž}À CL ±ôÂÌçWk¿¾E8‘F3"‡à’] þB¸ƒ°¯mlwwyߎm@pGø&†2‚‘0øh/™àw÷0*ƒEG$œð;·¡ÂÕò<äãmÛ­@èM&>Èx%¨‹/lïˆ2Ñ‚ßÀåÔÑ“ƒ#½“0Mœ*þèI’kÖ‰ÐZþ{}E%áÄmÝÏåh4vÐ’ñ &‡¿3˜ötK¦ÐV§ßé³ûó= Q÷g_ô|抹Â=NÍ㟷=øÌº_>×&‘[äŒ ˆyßçüý»nåžGÁóyè¿2E"î껸»€¿œxü2ÀG·¿E÷¿ÞöùÝäE._d«¤ª UòÊï¿_E¬“™–޶Ÿíï8Ý}zú@W,ÕÑ„“¸³¨ÜÊØÀ&Á‡ù~O–žy0ƒîû*o¦|&GÔö^ajÜ÷¾ZXøøãEÜMÀ- ¸ùsw g‘åý SdpÐîvxÀϰf'R€Z“r×}‹ïãæqÇš«ß¢{>@ë‘økÉxrºk ˆoGxFJZ%a,4fÂàÅ·àáûv¿Ã{ÕŠAWÈ$œÍš”š›ÉÙãàŠ³·½&{( 3EgoD['Ev¾¿zoÛ0èÞ+¨ôgkûXUµ¼µYb4Q#e¯ì¯„Aq·±‚±a9[±œ­Þ¸=r’þñèù1 b¬LY®Þi¬”<Âýͦ¶)Íê|J© • aâkhJ f§ÙÃ~Ž$!IšèÀ¢p¹WŸea($4_ `BÀ:¼6ˆ]ƒ@VÙv›vk4˶©­íÍ@l“žJKì~—ßáAwнCþîôÞpXíèö±?´uCá;ÊIºyYf96ëúÌb‘Ô¢n)¡ ¨©ä;kƒµ›kš·W¦}uýe°ä‹eØÁ–¬ƒYÚÓÉD".28¹0.À«¶yÌbÉ®È,ø)äúÅ´ÉŠ­ê ! €»Û›|©ÄgÞºxÆKxù1HXÃú¬}VkïʾSœF[ä9èõÌ]"Kƒ±jÜÉ=-6ËÌ2ZFXÛû¶»ôÖ‡ºI§ËÁ^ã ã1;ÌP:îŽÇ€h³ÄãV`cûÅΠ°Øé‡hˆùÙCq`=ÁåÒR”d-ß»7°×·×7(¶³NX¢SïÕì6×6›É:îFÚd7h ßf» ˆ5|­Áª‘X“—a³6ÎÓ?žÓÃ'ó2ó¦E¡ªTë¼]'Ãoeÿ©ËîVûì*±Z¬*c•buRk*²'ö¦%Ð ¶7¸zò#º€º‰’J)r%׫-­,~¿_î5¤aΦÎvŸöõSXž,ã¥Yc—XÓÓêo°ã ýfÚB›1Ø(¯•…N`Øhp4yü QF]«®Ñ7H¤ëWÃâŃož™ééË‚6ôJvËå]ò…hÉ)ôÐIl½Ì‚Ã"?¼ãÙ~@|FâÞ«áCÑ÷à(‘4í­mk•¶2$(ÃzŸ6ÐîiÅk25RͲçj¹žì«7YÍŒb>ÛGüCÞái´]чTM”¬"u[+š–YVäÐZS¸ÝB¼Ú¾oß…ô…Þ$ž8.Bxq¤õ@ésUK‹Õ$6¶%kl¯í´÷…]dh¬ûðA–ˆù Š:MÝì¶VB±ìжKc{“{£ä? ÛƒxgóÐ_Oˆ:vt5€sø*0mw9³êñš”š6iRÿ¦ò—;PP‰O O—»ë2cù¸ö×ûUmZ…ÜFJÓ²¾2¸Vp7¨k¥;Ô[ Öu­>×ì·ù„#ƒ‰Qâ(ŠŠ½Uo3Ts·‰mZZeÖQJJ§Q«ÅJe“¼ ˆJÅ)‰cÔ7ê‰ï8¹ï­h¼;ž´ctè.ªËáìñÔõǤ¨Ë‹BÓD¤d£h‚fâÍ)ÅèÛÉÑ$Ü9Ѽ>C'N9ÜNxðÖœ”ÌV­¦HÅΊ-%@Ôª÷¤±'œ’Àdh28>ùì”pó; j ¤‰xΫ‚êk¼ykktWYeC¥‚T¥ÚÇ‹0o¶¼Ò²–6c~˜0olÞ(ºÉŽÉ±O±'Ö¸[aUØTýVm©®T³IñÀ+ •GçÍâäý¯f7‚<ØÚ‚c¨ñ\ÒdòEΘõ8ÒˆÁa·ƒƒ`-“ÉHƒ‰l´5д†VÛAæl9Ÿ‚Z‚Ðj«Úªy;,Ö–hJàu¢pxÙçhÁ_ЋhÁÙª/ŸÜ¹«µ¡D,ÙªM5ÆÝÊUÒ6–éŒjJ…Y¬;6. £þ‘쉋}{"݉Ôÿã«MAïˆ^Œ‘ut³Q*3cG§t%•on6å—jG÷¾ßû~× ‰¿/ÜibL6Q¼¼úÑ×ÿ¡uеx¶TÄC†'zNLÑáVk´µ†fIÛÚúBXI¬ûxÿ;ã·›õaÞ94K±èAݤ×ïØùZýz (~1«9 íísÄ?M]:w êïd;¡ú5Á|§yúž³ß KÈf$é… è¯37âiíÜìÝäÞáîrwwpwÛ5¶¶.èoŒ ¢Ï3“â@<÷wv¾f­rjíÃßÍ Ý•³fCA+Ÿ¢-–—pƒ.›„n{8d |¾p‰XÀç´ nÌñŸÏN=QC‡À‰æîD 26±g°‹ÅÀëá³·ðcqQv‚»‡ßT«VÎùĔޤÃÞamb5ZªBO)ж”8D¹ô$·˜o0ØÔštJ\ü®.F%± h9%³È¹ë¾+ëÊÔë­„Ž¯3Z´”›b%Bw(èœÁëÄ Æët¹g×žk„cx(9£nü ~f›ØvGá ‚׬I•y:#Y<6ÖêQô6‡`3”V××oX_ù< ëJšY¨ÿ‹j¬ÙeªswÂjRAá<èKz’ôo™Ù0çÉ· ¼”Ód6Ó`%ë±ÍêiÓn‡vÂè²É0?š¼¶e0ÙŒ´©œ{\l‘a”ÈÕú0ív±©B'kmÄ4S©e˜fµ­ƒÇ%vÌ2ßHçå ´à²óÈ6ÔJ~Ðg7jL gº4W"üÃá­é-¤ðŠÕÂX ¸;¹ "á>‰ûÝ•‚ñžúº;¹û™•Ùüw%Oššy*™]Ââ«™²ßfÝøÌf'oåžå À•Ãû9ÝRµÆm1¸ _¿}æÄŸ÷+$ô <“ÐõkÐ]ÜÓ¤ð ÅÂX Ð#-Òp÷sOrÄÃÕe­Ûhì.· ]÷"Ï¢*>.^ôÄãÎ΂oƸÜg `` Ù#y4x 0—ÓȃÓ˜yIôLVÿ&¬=‹Õÿ!ŸÆ¡Ç*“qyâöEŠ{[+ô/½‰¡ªñüÝî®è#:UAE­¢¡¹l¬uüë$¾'ûE×÷"¡û»€8õcð ±¾ „ñí& žàû§¼SIï^÷¹·!“ßÐf‘) …PU=©®Tj¼2UÆåʹE÷´“ŒÑfÂê'jÍ܇;¥³Æ÷ÄÝñè^ìð»C˜y¬ÕýPÜÑúa4‚ÔØb–ÞþݳbC…¡2›ÚÁ 6B˪;û|=I7î—ñOOñ)äÛùcÞêÆW ¥k%†zMÔ6AÙpÝø§{w,L‚ÝîÈŽ “›jgÚ´VR[Ö¼º5 §Cý¾>Id_d"¸ŸMyû Ÿøäô«¯ílª¬"gOv5ÙØ#ümÃJ\e¬y^½e6Eá´aõ¦m¸_!Ã>C{›¡ÑØ&QlU”*¶êj”5PM”w•!YþYÏàg‘éku×5[w©j›ï)RM…CIÒ×#‰MǧbÓtt mãÅä%T#j³µËñÈU„ÕñÞžD/)<×DÚIá4ÍØ ~:67!¥hÂxR AeQÓJB/ØÞ½é½K‰ÿœ ‘öY¿xqKS8Œ8>êËU+Jq,³Æ™Â/€>¯3…ƒ:œØ’, ê:§XÖçö²ÇÞ>üÍÇ¿Õkl±J¸Å«‚ÙÒ‘ìmêÙÍñ^åÕ¾H²CþAïÐEt<›i]³™Ögv˜`7TQ»im– rBƒÃéâÇb˜ VÝNµ[Ü­ß‹ì|ªŠ1ÒïéKxÈÀ¾àTp:4•?ï{Þ÷ZÜ|f6És~<Ϭm4¿àÂë09-~2Ä÷1J…¡Å"—<É}­}]Z¥ùö²¾G`ùÝ]“Á½ž.±+è‰B »,–Â]µýZÔ6T)VᨭՅ»÷Gßì“ Ÿeš<™½ùà¡q?ÊŽ²G|?þLÒ¾1|úPÙÈf ¬übX]×\jŽÓ8v©£m~)¼ ²¥ú-ŒÕF]ë]VŸÏåö8ÈðàWoáÞåd\4î]ì?{—¼KÿлUØéw÷ö°Xä-=½ûå—?ê­j*ÓVvJJ¹Í³—oWácœ3ÜŽ~¶ç *»; o¼ûØ‘O/njƒ6o54auûzÙ:Cƒ¦ê“ÀU‘®‡30~>zÂávám^‹Ó$³·)pì.o.Âþ ùZ»%Àq8ì[O¬óHçáÈQ¶×ƒ[/qfÇÞ Wìz½õ‡ -;_"»”@O,DW±!>¼E¨E|"œÙ7=ˆxÐ G=Wv^¾;HøœžçæÔßk–[Ú±$"uÃW&Ðõ1Ó°Úlµ˜I›ÀnqZx]v“ÝHl)]ºUFÇ;¾ü =ƒxÇ^ßÈå?ñø²dצÖíÛßx£|ƒj5cc²˜ XìJÝ%8BŽÁ`ý€œ¾£êéÓ÷| Sÿ;‰o–Íf_£‹2Yl&©Ø´«èX Eýk£•¡è(&ëÏ$Ð/.¦å9ßîAæ!Íáë'©d÷@BPl6Õ½üÖÊß {Ð\´‘Ô~µèÍííM»%S¨¤ÇÐkMBþGð«¡w&±²/öX“ÑL›É]š c”A¹¿2NÛm¸>€VÑÔzÎ*¶ÈõxÚRN³?ë †Éþž±ØþDdK¥D+€:}­¹Aµ¶mÝæ²VE½®¾Ê©$³Åœk—ýMu­­=0}‹ð\†™1Š”fµJkàJ¸Ç9·8=,JÞñ«e.‹‡ÂEÕãr»ƒÒ$‘Ô2ž‡sD·Àq±ãRçÇûÑÓ—ÑJt+Êñx‚Lná¨Ûê6c14Ãüó}j  /¦óP¿ËÊÐYfëâR¯zÏÿËØKUK¥Oíäy™»Ÿpµ¦º´„Åeõv~ÈÑ Q)z ñÐ0 -ü©å~bu[Üf`€†!f™œ}sLJÐóߊj­- 8õ·&T]CéAÒ-vuÛiðíBfo21#ÆtîûèýÓy™gqkš‹¸¦&n¾¾ÄPŠK²ž‡QNö×´ù¾Ëháз±câ®s±Ëðq±åß¹\ò÷sõPãC­Kä/¶ß»~«¼½^Ù» &m}— `„f‡ƒ';ðÿ8úëwŽ'z†C@ íkZm3c‘Ì#¯Ï ,»a.÷?Œbåµendstream endobj 161 0 obj << /Filter /FlateDecode /Length 282 >> stream xœ]‘±n„0 †wž"o€„ô$äån¹¡UÕö 8'† Ç }ûÚNéÐá³ô~âú|½\Ó²›ú}[Ã'í&.iÞè±>·@f¢Û’*Û˜y û¯i ÷1WõùuÌ_ß™ 7P,þ6Þ©þh:ÐG¶ …u¦GmcºQ5à#V”æGöT&¦x´,´Sƒ¬„…vêD#ÚèXm À¤zBÚ¢*¬-k;¢Ð(Ÿ¬A”¿)w‹Î¨ð)çºT¸²r"U^å:Tœ„tÜè´ÙIsÏ?/4ÖJŒžóöš™k5x>¼dö<çuÖˬç¼^3s•ë;îInRvr¬À„ç¶QÚuqºÙÇ’èo·yÍ2e˜êƒ¸endstream endobj 162 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2853 >> stream xœ•Vit×Ö c&K•ZYf|š4+Ó”¶uHB° $!@À Ù’±-[¶¼j³¤ÑܙѾØÖjK–±…q©!$!$Â%éIJKCCÒs²4)ç34t í9ý›³½3ï¾ïÝû}ß}˜dò$ †aS‹W­Z9·hâõþnŒ¿g¯t»Àý8eÜ:¦Kaúdß=Ó·£#·¡ÄLDß*Éðf‹'ÐÑ•Ì䊵µ:M…ÚP¸±h^Ñ6ñ6·pî‚ógþ¦¨hAáòj•N³­¬¦pU™A­ª.3ˆU…/k·iT†ÆS« †Ú…sæ444Ì.«ÖÏÖê*žšˆ2«°AcP®QéUºz•²ðYm¡ð¥²jUáM¼³o>еյ; *]á*­R¥«Ùª2”)UU†²Z^SQ]¦ªÕkª´5s«ôÛtšZƒD"¹ãµÚƦ—Ê×.^´pÉÓówÏÜ"‘,”J–¬—lÌ‘<'™'¹W2C2Sr«$‚Ñ` Æbæ–üBL˜d2v;vrÒy©}ò¢ÉßOñÈÖÈR²+S§L}jê×ø¡¼¥y§§Í™ÆÝò0OåÙq,‹ñüù|´ò»ÜÀ(Àïu™ QÂ+Y4+Å?–Âø'ÎI÷ ùr›ÆÕ¾p“Œ6ÓOöAF² ©‰Pž$Bh]¾@x/šïë%#5iõ;€#݆@~µæÔÒõ*”•¤åЖd9l£ÒÚØ¨i[ n d˜”—ˆ~”8<ÚaS(³³‰íO¡pÊCŠ‹òCc™Þ\˜cïærÿø2uܸ8ÝÕN¹¬¢zC}³ô`êl‰»›•€/Û4»ÞÁÙÖãeÉøþ1ÆIˆ·wÛFZB°*¶Ì/66Ï_ðÜ õh¿A8?ˆ¼D÷PßtCÐÚ[­5gÿf ÝÑçqzlV—½" Ï¿`¬„hôÖû4‘w=¸ ¬ž?>zÓ6Æ)þ‚<8Ë~Êáž0ŸY jª4 æ*ÿÏ*üäa:T;€rµÐ”0ùZiPÇÇm1š1€B¸KÖ`½I(Ïq°’²lkÚ¾RØÍ´±}By0ĉºƒµëÊ Z3YÊÚRãáB¨€ï-pÛÁå± öÚ¥_ç @€óÄD}· e©øç|mýúr,…½{¹"å¥üýòK²Dð†’›È'eI$s';>ò†x´3 n1‘ñÆÍN‹ZpCÌß™I ïßÔ½UóbãCÕ„½ž²è…ÛñgeFÑ%LâÈS²e×ÈÆGšŠ^…S´Mcl?ÈA'ÞÝ6ÖÕìP½1ªÿý{#G†cÄÀñn4ƒñ‰è>ƒ,/ìÆø¥ür_Úí sv—XMü»}Î%-N¥³Ž¤.Ñq;×±þN–èC¿cÜL8ÅvâÚA™hºÝ¸Zxº€v9ZÁ…Û=_ñty‰ChÎÎzð=²48mvÊDS©ÎZË)s!(*ì"üxûÒ,ô‘iÈ0iÑ¡ÓtßD¾3ÐïJœ êtmÀÖ‘5œƒ.`ÀãîiS&ùÁv‘GÒñù¼Cöú<ÂCÖ€™rA{qí¡Ãì°9À¤“ßrðâ@A¤ðP  µ{L^òZ 1SòQ¿°.‰–]ÅÐÊ”mfä§t{”• :mm¢nW8â Žep†¡[—«WWV’f³HR  ã }ú Ê#Ä(p»tUŠÖ¾+Ïè{jjôúšš}&ÓÓ“!òyJà’?¾œÀÞç)å_ß(y%¶À˜ÛÛ);Eßþ´Ê!zÐ sÀŠp¾¨WœøP•D/¦ø9 5£äždp­ ?,Û.ÌX'Ì(+²µˆ„jÇ]¶H¿?½;Ltì6ò6~£ó¡ŸÆ°ác(}BÊwŠ€þ¿‘Œ2áQA*,nyüÈ3_ iHf ÙQ8P5J Zá'ùëÛrçúÃ:Bîýðh‡Øïþâ_Yµ±aæ5RY²®N â‹ lÏ¢û²Ø›¤èT/ïŠA´Ã$Z¦ð„¬ç¯ï}xVíÛÄAµL¸S¸ëAá1aÒ…ÅWOž qtÈêrÝJV5”7‰'˜Òõ{?òOœ`øßŠÅœ—¾<‰ªz¤]ßÈ]Ewxm^Ç¥Ó„ß—ˆûýÁÅ™åça†º“Á7÷@稀•¢i›•xáÅò±…¿Ñ|ðO^v?ç!YØDâ³ÏЯÐ@sð(|PòäraÁBl°*!„[ð“§ålèHwåõJŽ >ÍÂ"a±P¼a=™OL›> stream xœ]‘Mnƒ@ …÷œbnÀOÀò&ÙdѪj{2˜ˆEDÈ¢·ï³CºèâYú0ö<Ûùñ|:§i ùÇ:Ç/ÝÂ8¥aÕûüX£†‹^§”•U¦¸íä1Þú%Ëoýòý³hÀ:>ù½¿iþY¶­*ŸEqô¾ôQ×>]5ëŠBºq”LÓð/Uí—qÿõPŠ«(•¸€•áA\Àƒa-.`m؈ ØkWQ f]ƒºÆk«%> stream xœ…–yTSWÇ_ˆy÷jEGžŠú^[ív¤­Ú±Ú7´Zi[QÖ [@ÈF’C6eG –'*£¸¢¨Ómm¥µ•.Z­ÓE[ÇjoèõçE™žž™sfÎûëmç÷ûþ–ï犨Q”H$±fUØ+aaþ›Ç}“|SÄ(÷·àáh +FcG¹¦<Âá?OÀãoþ‰ ‰&=µX)OÏÊLÌxl•rKb†â±5Jù&Å<¤(JªPfD¨2³²7%lILŽJ“¿ôò¬Ù/RÔ›Ô[T5ЦÖR¥b¨EÔj)A­ fQ«¨HŠ¡&RRjDMR¤¥£î‹TcöˆCÄ–QâQu’@I%=Î >»G¯Ý7F;f辨ð5î~@öYÊÆûÆz‚úðŒEƒ‹†pîP0s­Ï·H*Û ˜Ao›ùmŽÔæZ‚ÊºŽ•á<>0göG‚z¿¾{ºæíH–äƒ$…%•-xðžnïâ° 6«7¿%ãPTeZ‰æ%g®€Ì5ëOëŠ%B蹑”®ÝÇyD{¿Åï]ãzß)ZBÏE„ƒ:à"¡xü|<a Âã†\øQè˜#ð& eÉ3Ä.En«¹ÑIÇÙÃ=žÝ|¢^uk’WæˆAQ”ueB¼2uãöMè/è­#_AËNTiÙ ÇÝ[=¢û0žq>âb0sùðˆèó¿‹¾<"ÚƒB¤µ"ȼ›’îà¹ÏªµÖ¡Zx©bkZƒ©@e½º©¥¦¡áo [V®ŽŽÖ°Ìe+ˆ} øò ÊÖ…÷ñ¸›êÇqÏÿ€eÁÌþáùR¨@•¶J»¼&ÇÜ0ZÆÐ{% 1Å”Â2w4[ádâAR[óŒQ–à°þ'À£s]Ze¶-eÀ¢Nܘ\P†5]““¶É,Cp¶Óµ;Y<…¾€.™»Ã«eŽÍŽ-°“–ãl˜0-Уäñªœ 8”܉Ÿèf>Åó|÷¤ó ý4«Ì¥ïH)Rw¤V§7o>šß‹xÔ^UßÖÐn߃ºaw2@6£6-ˆŒÚ~ê8‹«p`ñ$ ¯¢¨Õ÷È~§dâÃuøìÁ:”‚æn‹‚Û¡EÂÉ›€€óIß÷4½çam³ñse¾ÈÓì9æ ö2³PÂ|&=(–#óÀ«þ–ôŽë¸ŽµT£T …¯ü×RÙ¾¼Èë[,•Uj0Woíy…ŸµwÿZˆ«À‘_êc—³KqWHSs÷)y°°xf‰Dêkó³¶ ŒN¾<ë£ÄxòQiµv—¦FwXÕ_Øç÷ù›_¸ñ(X Ð?"/ÍjÎsªíê’ðê GFIfùÆzY:ÀýåÅž ètuI% ô/d5‚U¶òRË€ËRV˜­X¥/fóvnß©.ßZ_/4NLDádôÒ#ËÞWrMÚfc³ñb^H‡¡ËÔš»[s;ßúÉáCoØ3k)™ x"ewNKkcC›7³JY2’˜€u)ïSȸre6^KÞºxk/ž+ŇfÓÃeÀ‘–•!á>ž.@ùÖá0q+YáÜÃ]Á< ðÅ‚ sižª0S­eK›I¦>CB¶g‡uf#*„Ý•RæÞå£$÷¨óV{}ã<˜ùÆ›ôþÏñ> stream xœ]1ƒ E{NÁ @”Ρ1MŠd2I.€°8"ƒXäöUS¤ø;ó`?»6\/×à3e´˜dê|° ÖeK蓤Ôz“Âjf n:¾?hi·ó]ÏÀž‚K> stream xœ]kLSgÆßÓrÎy!ÚcÁ(iÕŘ̙”é6M¼ÄéºÁ–v—®ZZ@K/á¨z´ÿrñ¸a³i¬àz&×.1» SÃP c`3cLÔ˜-nh ïÁW£5øÉçÛó|y~ù1(E‡†I-ÜnÍ·¼i±`mœäjüÌ_+Ëþ°Ûz¶ÁøðÄùÉóãCS#p“WhÎEš²’¦½Gy{¬&®žŒ©gª;œ¦9O¿F{ £„%uÓˆ>[˜&¯iã㑟_¸ìÚ´£ìí<ÑüIýî8ÃI â)N¸{c¸àÕ¼òÜÜ"çJÌÝGOD!Ž8s”ù]Mƒ{ÇÄè‡~¥72Ñ5ö\ƒ±/.XÕšHÕñêf,L_Vÿî&úÅÿÐ̳–t¡“¢}1ï÷j4ÖuÊwÚõ‚0<@$4‡×@fo¿u# øYo$WÖs¶½’½V Ã!S)aùh ;Ã]m`üaC"àgÓæë$5ASµ]üwE4Iœޏ×~üþ¢\š¶–f®¦¼,5 Ÿ=fî’Nò„¥Oø¹coŸÆu“Œ;ª×0y¯4A¢ÿg H:I1ÆcêißIG¥Û³²„Í»O摬›$‡àU¨¡È*ÚËÍ$Ÿï’:å® nç®ÁŸ±Ñß•d  7ñ!Ej nä|¾:¿l?bk°A1ì9öi»Ô$7K€ƒœ0I0½ÊÃ×Mòq 7sPÚ”ozçÚ‡?ìX¾us9vlï©6ÏA· x<³«“²:of }džöŸ±”ÿ •_£øñQ¾r;Xt@`¡ªô/ôýÒÌ’BN¨lLÊóÂðç¶Ö^³6ò\ÛAÿ¡Ú:ÑTä(|8Ô¦oÛœžŠÐ3u¬{(endstream endobj 167 0 obj << /Filter /FlateDecode /Length 376 >> stream xœ]’1nÂ@E{ŸÂ7`mÌL"¡iHC‘(Jrc¯#Ë@‘Ûçÿ¤HñGzàÕÌÛÕnÿ²ŸÆK¹z_NÝg¾”Ã8õK>Ÿ®K—ËCþ§¢ªË~ì.wRíŽí\¬v¯íüõ3çäáÆoí1¯>êªÒOÕíPwêóyn»¼´Ów.¶)Åv¢ÈSÿï¯õævâ0Ü?­»PRBö¡{b˜MJJ¨ÀM(À ñ)à± ØѤQ£†4iÔ¨a£MJJ¨ÅÖÐÄÔÈØÈêP€5q pMÄ9ÓYÓYLdšÊ8•Y(@#z(@'b^ÓÌÆ™í9à3ñ ð@Äø&£‚a|“‚QÁpO¦»2Þ• ¡ñ[Ç“0)¡!çt :ä\‚NA‡œKÐ)ès :r.A§ CÎ%ètȹ‚9— SÐ!çt :ÞÇõF¨\—Ç^ps¸ƒ•+»ë²äé¢EÕ"rÿÆ)ÿíò|šyªDŠ_¿9Ãendstream endobj 168 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4591 >> stream xœ•X tSUº>mÍÙyô[DÏFPæŠ:*Š@+¯B‘ÒRh›WiÒ¦m’æì46m’>“¾Ò¤)-¥¶ ¢<ëtäê5Â0#:¢âìxO×õî$UgÐ5³î:§YIÏ>ÿþßÿýÿ¿ˆÛ‰„„„i›Ÿß”¶ôñ¥K£?æGæ&FîI‚\Öÿ<òÍnœž§ßV{Ï]Ó“QþlÄÌüû,"1!á®+%¢ÂRinñ}i’½¹Åâû6ID{Ä·ü“ ˆ+Ē¢ÕÅ%Ò5¥²µò=eÙëËsÒ{s÷íÏËß"ŠîY¼äá¹1Ÿx‰Ø@l$6 ‰ÍÄ"ƒxˆØJl#™ÄsÄvb%±„ØA¬"&v«‰ç‰ˆ5Ä‹ÄZb±žH#Ò q1ƒ˜™p;±ÛD¢5aiBcâ’Ä·’v']ºMsÛG¼ Þ$i!#àcJ1­jÚ×·§ÿlÆÏ*¦Ë§_¹#ïŽ+32gæÏJšU2ëÒìšÙ—“ûé,úUú ÍÝyíÛ„ì;‰ß&Íí ¬a”ŽdÊúûøà¸÷)ù¶‰âO¿Ûá±»Ù±»`}#œƒl¤ZÕvM{zj¬N^'«Ñ9´PK‰·Y$LêK[+=J¡Mß´e<‹6¯Bºc»–B£T¯XË=µžûí:î·eÛRK^̩ȰP(•óÑLôšÉ}À›ñmbú|ÂTÉ"KɈzwõM¤ÐC‡"Ïñ ZAOÀ¸‹å:@v‘e;CK =tÆÆ»IÒÒs&Ë€°ÄvEëÁøØåë'·¿Äp ¯Ð$fäd|ý¹øú3xýPXÄ…¶~å¶ôU„ŠG¶y^†éð™‚Òõ–ë©ûLQh†Ð„"÷z>F¯]IB¶È&>Tó‡Ü|Š»Íz D¡iWQ âϻɥ2ÜÎÁ‡utÁׇ»;<úbäÑû·˜Ós_a E99pÜøºú³zÌ^*n;· Ö"sCÉ(q¯¦Ðc}Që›@¨Ç´—ƒ‰e'CWbkÆâÖWF­Ã—ØûY+zÚŸ{NvíÄû݉’~…HŽ¿)CžÍž(ÌéùÆ'¶ýüAHm_96Ê¢ß趸¡‡:1>òÖç£xÊõƒUZêû•\A¡?b" Ïð­ÜZÞþRSÆ”*qÇVž‰©"ÂbÛ 9 Í{¼±-¼Îá’ç-àfq³o>ˆ¦½Ð9ÚÅr!t¿Àßuýñ|[ƒßéƒgàé ué·À—VíÚW]m®6«!¥#Ÿw¼ò;,½ÉŒ¯¨³.íâ÷dà hÁ5”Jùæ·ÇØ«Rz Ø[ §À¯R[¡š)ˆÜ#ÈÃ’êÚsw½¼åióHq÷’EÊÚù>axÐÜVÒ*µ ¡˜Ò†P›P  Ø",“ÊKrÍzÈ>ŸÕÏ Tò=xÍÜû+O=Ï‘G…H1’c¸$´üã'¾³f<…˜Š_0hzùôb§ ¼eã!’¤ /a¯õ„ª,*H $ ʧÀ¥ÁÏÿÌTWë•PIIÚí­mÃ{|û6mغMÍЖ›Û¼‘Ç4ÈÂÎäo uHx*…þëßúø½âÁªaH}p½ÝÆÖÙÃ-[¸;õp‚:õÏ«gŽŠVÕ2Z«Úë±740ô §Åé¼û*¨3ÔjÊ5%Ø]f²ÒQU_n¯´UÖT@jÕ²y¬Øö·d܆~v/¤V§É6?»õ‹JÆTñªJKµ†åfh´ ª¶U7¸\ Nãt:kNG휨‹R£5=ŒÒäÉ(aBB÷SèOÑ#‘Iþª|îg   o^áH(¨4K:²V†Ôû®\`û…VY«`µ[½bC†òÔQùÐCaî!”¦h­½F½½1XŒ_ðx6 w AC;‰1§·ê žÚW`(P*­°sØ‘0PÁªhòÅÎC,ò¢qÎ ²ó{Iô)©†ÊØScÀi­…N6R…}n·Ö3aîHtot—¾}ËUq㨠õÙ™@˜Œ'xuOdÆA'¶›ŠSÛDŒÚ< †ÌIVC5ÅmPd7v¼­ƒ‡ápSÏ:ìH{ƒõ‰Tô‹z^¸ ˆ%ö–Eà–¨yôDôÍ;YîðdÖÓËòv¹Žs Ä…—ñtvË"‹‚6y2íóGVòéãàMè( ¾ðêâC‹_]ì¬rTµÀfèô8½ƒ×Gþ>|½ýXçÁ7!Eû€>žcâ­ i_¦‡ß>K›ÒVJ”VidÒí²ÌÒíºr]¹F±ïÊÞ÷~lª5ÖÁ:ÊlmAkëwBÒL¼‡¢î˜·ƒ$hcDUè8J@Ë®¤Ða´ç±èè7f±“)VXPA­+]¼,{Wk¸„ uù{YZ,툊åûù·¬ñ?]}ýª—©ñ@|Q‘L0°1N’nÆû…×ÇxüÛÅ1Ûà >lw¼m‚®5î_Üÿ°]m¯òš)釿³Ù÷FªÿDÛ ÿ \½NOèEÿŠ®|©N…£Ôñ2O ~Ò0ðÅàçƒ7¼ƒþÁã sO€.KWe+Dš’ÒÍ¥›ä›LZ£Ö¬¡,¤¡CÙ¡m/¾€¯óæZCÔ-Ýa L¼_’»l<Œ‚ô›öÛtv-Ô™dÊb¬jo%¾teš2C–¢÷Wûµ>é9éÛEgMµ¦¨;púC8ÊT`C&§JV@2ŒrºÐ¢@òȇH<ñûP ­DwGîå[AocÀ…ËÑïF3Ÿf¥Sòªnöc7 ÏŸ8Ûy¢‹µ’{ä{õàqæ´p(2ì Œ˜òžÂuÂênêó^ûMÆÂŠ'K=…ÁÞV_W›ª­ØÁ }ÜbþC€>¶zkÆó¯¼:bdìâ¨ÎíúÈAÅÑ£Æy1ü9bøóaضÀ6K“®EãQ¸+Ï=“ÚXÖ'€8„¢ŠòÌMYibÜda¶ôØx’tÖ›˜,1Žv]·¯¥Åãõ4ºñ_­ËUç¬wik«ë4XŽSëÒâtR*4JË”\/¯c GcÔ  Ê­Šº2—²UÙ”s8ïõ¬aK¹Ú(+€u=oÓëg†Ç&ðö #ßnïq’vlÁØ÷€&sf (Ú'VÈ´U¹¡Éa¶m¼˜ªuUEèm®óX§ÞÅ_2Ř­™ª•' ÄqÔw%©/JOSÕaòS´¼u0kDr×ïˆ¼ŽøhÚ"DpüÍÛe{÷²è\À'M|S–.K’'Îß#Ë‚Ôî'ON•ñhxNM ŸF·…8j9¦8‹.ZÇÝ{0-F¾qbÒè?b_0Z'»AŽÀ¯VÅëäè‰ÕIͯ•÷swHD:!BêiR¦pô°‘~phÀfNÄ(-Ú ìDÞ½…Rú`‹®i{fê¡¥CK-u•¥Ö˜¬&h¤LÀ¢7êLúʼTi¦èù5°V9•Î¦ÚÆzØEuÊ›¥ÅåriÆqÉð#ï0Ýg|5òõØ—sšù^;ÿƒd›xÏý@C=^™¼LU¦S°«¸_k:…V1GÓ¡êPw,Gi©¦:cmœƒ|±4‰KXgâ-Æú'LÌŵD¬0ÚݤÐ#I‘ù|¹V]唤­¬7àíö½·Ì›ngT$w×¼…Ü]ÜÌ› ýîÅþ‹¶¦ÖéÂÂ]8cËl²rS¼ûI&¤ÈüìK\îE$¢ƒæÊŽBÒù¾X|Ãö&£_ôE^ÿ(…‹£!Þ´ôõöáˆA=;Y^ܨÏbè2ŒÛÑ 6Þ?Hº,Úç†Á+›êÞqYë¡‹µF¬Ny½¼N>g‚[ó>—v‰[CÑe¥CÖbiŸÓó{´îÑ{mÏÓáðY'GS¡ÎªÁ{n¾¾€¡GUJkõÜø.ã¸ÙüenP@µÁ P¥¾È=¾š{l-·\_¡©0VÈGË”Î@woFwãÏêÎT½GW_ ©¸£­¡/˜p´û“N`¤¨¸'y!ÒþvÃ…ký5íSëGäƒÜ틹YK¸éN 5”ºÞPÏ ÿ%›QF¤aµî©ÔR³J!µ gÇt|ÿ MwÔÖÖAåQÖh˜[Ò ÷¬}ãIèèŸÀ`—yËeƒýý»|Ûpó7“ã-âøÜÌ/ Çû‡úÙõ-Á¶ˆD¶#lMšù^ßõüiâ@[½ßუðµ’Îl›Áf´ê)N2—¿¼mÅÖÓ—TŒ¹ÑÒhnœ2¸:Œ²»Ñ£t{tÌ9Ezœµî£•§ö°ûû·7cœpÓ8âWÜ=ùnzàýËGÆ}¬ÕfµaªÓÕjËL•†)Þ³½hÜ 3Ec²C¦Q8BY»ùõ'ëÏžîivµ8Záy8’Q¿Òª‚ªUtïUµSLFKÑíÜŠNÃ蚆½¾¼àCî2™'2 ˜ª©¡hª •^ŠMQB-µ±è÷hz˜›y \hýø}W½Ùäb*µZ¬ è¡’VyWOs{ÇØVöÃ+Ö,`âE‡Ë ¢ç/L‘2è–'÷œ.ù½„Ç…ÏÐ~þnÍÎl¸“š–K/}„–tÞD³¹9­2ËÈœâP?ëV·£ÕUc‡õ:§©S4 uùÚñ¿öTËÛY“]ï€Ê{Ú×ËèuÆ(0UÍ-OKXÒ&ÚŸS^VʘÌFƒâ*i„šZ¢¿òˆ%u‚¹O<²m±À-h/fmF‡·¥µrC¥IeVA%Ìõ–u (Áý|úk›¥Æv÷›§Ú3äûðÏú†(=€9;…MAÕ~c\vi/`Ño ŽßN¶\xÿ\hëš â-k‹Š|匢ÍÔnê¤þe”Å0Ä ß8*Ä ßØ¡([@ože'±&:܈j©=BK£'b¬û÷Cíg¯2 Û°H`Ȫ HlmÜ7À•ââ ôI*†ÇÏgÇ5[h”(÷ÉãðÆç”Þ}ËuNÍ)B¼A+tƒ¤…'bSs×ô±ð€,EU|‘±¼S…~Io_{wC‡»K†2™,¦¹ñÖËŽ \}®^K:õÍr¾œ“ãÌw³] ÷‡ûNÙF!uì X ØÉcÌeÖàa™“‹‘é¡v[sˆ< ßÓúsåÛòlùxä’ÇF®ê+P¶%Û2Ú÷o½£»Ó6Þøë™©Ê¨{ù €MnG½«ÉÝèir×+¼•îÊ`Îét<VUéStü¶w¿y8NÇéP~åTîØž±¼cÍ®ÆÆF÷Øà‰± ц,ßÄû ¹·vÀÝ Q 3E’RVX,*”P ø!߬ŠÁÀPTo-™ì¤ù;½sæ<¦šü~hü =‹wÀC¿¤ØÖÏ~“~J}Ëá‰8z§ÐÃ?y~"ÆãP“E¡và{¨­ãGþ볓M;Ó®ò{¨ÅÖÇ¡&‹:møÿ½^DâèyK€½–pe°dd[cô¼åYQIN[ÍÅUí`|.¯4ýûã—ï²S™%:R£íŸ­ÀžC‰?"³X2ÄÝSuÏ‘p9ÚúXt­½Þª|–ëž²þÖµcáR )²÷³àæi*M> stream xœÍ[K䯑ÞsŸìÞVêf–¡¦˜ïL‹…0,£ƒí|öÀ®bOSSÉÒxüë¯ä£†ÝjÉÞ…1‡&‹™‘™‘_|™óý¦*Õ¦Âòww¼©6ïn¾¿QôëFþ쎛7w7_üY¹M*“×~s÷pÃ=ÔFi[ÆJoBåK¥ÓæîxóMqÜêXV*úârÚ§ÃÇí­©ª²ò®y©LñÔïëûöÐmÓ¹ýß»?Â0ÆÏ‡Q¡T0pµ¹ÛßÕöî;h¡Õ¬…-STZZ|S|¹½­ÊbŒ~Eœ.ƒ.Š8Çâë ð”’ÎâÎ]ž®/êí­NÆ E¿½Uð[P±hOï ~I®­*S²Éhh-´ŽÅž+‹ýHD E}Ú¯­8ùÒǤþmVœ{Y'¿Ûb¨Û­$ÅT4ßã4*g’Ò°í5|[™Í­¶®ÔVon•*“³á'-2V•RúG©×V©LiB YÞ‡Çæk±öE¡½1Õ¶Ù—Û[ Œ¯‹¯ÎðžŒ*>ÐS ±h~Øj_&LÑtŸ£‡Æ¦Ýö,»æg0’âtò`V<@^><žùµJs³¶Êñ!çî#+WY¯‹ÿ]“;êFyzÑ,ÁùÆâ<}¡ùWh–ä›)ç;ÆÛu«xqt²?N±¶”£ v×àRÀéqÅçS“_½Ç5k #ó¦NhëÒ¾À¸d9¨7Ð"8HÑ´¸"1—…(oKF#ø ' î“·]n°.ò1·ý¬ë?Æ™/ž&_ܯ iRtLYÌiÀ£­Y³*íd£´/õÃÐtyá°skc¸PeÌb úLö™1Œ]YW¥fòÒ^TÛ­1¶ô:{¤7×koðÆÐ¿906îçþõdÜдÇÝMVlÞqmmès‹(K/#lejP¨H`¨Þ‡ö=Y³N`#ð!‚J]*Àr¬b¿êÏ2 Ä­PjtÅg§úgˆ¦Ø³õ+§ Ú꣋íá;ÅâÐ À­!µŽõ0ªtåö×7 pÉZuåo.°'Ú T`a„ûÿ ŽÞëw-zü ¾¨‚+è‹!£ØŸó³fÀÐÎ(¼Ÿû)J‚ÆÒøå"HÀòîÜž&IˆIüEu‹{kU ×¯kžµF\äqUÔzT‰N8W¼CÍÚà …`ðÃÔŸq0÷o÷£h0Ò>‹ŠÅ/æ;N¸úËÙ/¨Ç»ß|#k™²D•Dj; Ê7Ï`Ù"ºÒVNHüË™âÚ“&ËyCñÇA÷Ë™Æ"éöQBcEfže¹ŠÆ]Ø O !¥ÑL¡Éc=äo2qžnZ zšfÐmG½w$=Ó4EnÉz¹›Ol 41ÑâÿLníº5ªñm3üެ»~¨»!¿:ð€¼B&X˜ÕžG“{IìO¼ð‰×ÂhÔ;â´`ÍïÖ`[³Ri O¸4{ÖB»V*7F”VŽÿª9í™YÌtƒ^übbˆÌ6MË2‚ê¡Ó;æp¶êšvzCæ·Š†fÀ¨‚’Y(&h&.h¨IZAë?® ß’€f\Š?KUÈ<‚"¬`òªYO´œçÔl†`6D MÑ5»¦ýlÄš,΋«{¶>˜0hùQ˜4‰jŠÀ>dÚ6N.žòÎâ;ð„k¶F+1Š™× P*šà0Zö䧸2H]†öØôÒPK¶ZúýØVk/+c3ÿqnÿÉî-Ä]íÞ·y<èAIû¸Èq“ÂÂŽæÙÑb&ðdªè_=­õ(ùÛí8“£3e Öç4îË-@b öýJŽ@n?IÒL6$ð. Šbe+SG›ôþEïSÞèøj­Y²ÙÌg)¾Wf$¶·Ú—M×¾b˜«`× Øtýž×ùì‘ÜD¦G§± ÎËœ(Ënw¾È' mØQ’(£‡z7dI˜ãÕ³·E€³*Q¢L!Z€TJ—ò  ~ƒèt€•4YšÂYŠ»’ÇåIïÛÿj~¢eFh4‹iЍê¹Û7]ÿ<[½“iTÏC}4Lñ#aÓ÷8l †2´ÌÕ©!„Ú%WoÏ ">¢dí L­!QGn…XW9Ö¥¼T›<±ðc"1®‚¶÷2A ý³¸º°mSyÈý³¾%ÓÇók>îÁmU•VÐf-Ïšûøè0_û û èñ9:,0ho}ÞÌÄrú¡k˜^sàTl£Æ$ 5-ñ+Eöºožx3ªà!;£tÝRf äTŠúØârZË? (œòˆá#ZÖk®L7xýs¹Žõžô—sžh ÷ÉÏBÇp=àÂHö§ÔÇ'.¹P}…ý«´ßB°WþòX³ÕÃxé}PSè[Ä€/È^¯úÿŸ[Ü+#ç°ç¿ù1žèU*ju§ úò8¾Ÿñ'Ehü«c¯ÜÏä ×>ö’.?)£^U—tiM®0~[(ýiH‹C£BÀ3Áxúª«Çyá†DÄ Qz¤ùîD–lîq³Æ.7uW)b¤p,Ô§1Ér'ÜÚ*LìŸ.Cž æ ô£oA_%–Ü®<»9<1éÀ¹äÊK>‹AëÁÇ`8£Ø»esZ‰ÂÁͺf^c`€Öž‹CƒYz˵œ!Î7öí>¿øe‰RTFâ°ï÷{ Ù¦`3„9˜Å—‚z€Ä¾Â ( h9M_Q)u|U I‰ÈÊù¾oº™¬é˾jŒó–©ÝÐ0äÔ$gÉ‘"S†eªä˜ é­^ªvòFù¼ñÁBkO³ŠS×›YkòÂ|Y[ËçóaXÉ$XaeÿÔt5—5¨xvAa mVÓΑ¥(Ô}îŠ7Mw:_‡âÀê‚Y˜xø'Îô|à/r¹`í¯Ë¤ XãÃ,ýí‡-–C=κkëûCS®Í`¢BÿDa,¹ï^ªp­‘0†}3+ hfj½´«ÌT@Œ&²ÍОêNà|ì7g3ȱ}Ð×u›Ñ.2¸Òa`ZàB"áÒ+Èd»Ë¡¨$J§íͭ•‘4³ÃÎ~õH&âí SúâøTÕs=>Ÿ1æ÷qhk‘Ó* ³$PLÇkÅ:F©ë‚A›8Î T`^¾ôü •G>«t íÁ.A€F~F ÉÁ@û Yfp/‚™án~>䩨~á´nÓ<‚^6\ɳæÅG&\š d+-òGy•Ê@ë))(îë¾Ùço’d»Pº„ÉlUW¸6Rt[|%‡•BÒ)©¤ÔˉßÕÐО3“Õ"q|¼t÷ØÎ²–‰å”cù>u]~FTœZŸeöQódžÑ©ˆÉKÃs!ZZ6¸ßÒñ7›ã*¾@«OA>ÊP¯ÍK _½Ä«r÷Ñ5*Äž4Y?³ú &ð]Ó:[PE{xF£à‰Ñévå°!{S¥i&«Æ/ÂN¶ÿÛ%ê| ‚§s\- %IaÕƒtí0¾ZòÙü\ç3¼Ç¡ÔU1ý OiÁî.§xrtäúú0&¬-ò†y£ºPb‹<®päo:—7"û„Hb›Y‘DAm3“޼‰”2aGç—ÙØçõB€¯~ ÐLÁÛS‰%uMâßJž`Á…ÑŠflÊæ°î­x¾!1[+Í-—'2ÓЮñ hî[Y>~ŒÏ¹`}®â¾òr˜¡Úùý¸ò±ºˆ3ñ–ü*ZªTn…Ϧ|G Ì*¹:J W>scì™o† îŸ@™õ>Kµ’tÉ(nó8k åŒÆpZ -‡£Áç—³”qcQ`âTJ]\xyjwŒBŸ@Ý”Aå›;’ouͯæ¹Únü4ÞýÁ·Êp)J¾€ i”ä“×@yG,嬧ÃG²C—‘ðÆUÿßAÒY~`2Cœ-H éiD‘~jf^[¤ÍÝg>ŒÂ)}8¯Ö#”)C®Aj0´G +—°`»ü ³†ù²—\Ïχ?Š¥±æ mh¥Ävâ‚ÆL4íç3>ããÿŒHÍ t¥†Ͳt¼¶pÈT©šè(_ÒÂÛD™ÉiÌ7° €I÷á2&K5i:#Ï«ÿzQ”ü[{¼`ú9ã]%Ì·¹˜.¹_+Þ$Uz¥ÂXŸúúmΖt_®ÙZ3<ÊŽ£•(Yr°Ïy ™Ïì:Úe7BâãÔ|¸Øt¼TÿŽà$°M­<Ð鹑·ÇöïMŸÛæ|J:¾ÚjyR“ÙL;ø‡aJø†‚ÿôØ~å‹|=ßmdK\×¢+../îÙ=jDú>f‰Ëäø2’¤¢xƒ5B4çD܈€Íqœ<úBùŒFïµ=d¹xÒ·]Í–ë8ã‚Èð4á}Óá}ÛÏçáNò;bvC‹Y_‡¿q½€|ç(b¥œ1Ÿ–fQmÁÌ$;”Ð}äötÕvZD+.Ðó³pè,ùãʆî’å´.Í‹Žþ8žŸe||8ImÎŽ{?›Þúª€ ÃtÞÉÍ1äÂ$=Ýq¤Ëc|éðCBPÐÑ|â‹^b‰3ïó….–1?‰±©×9? ² 'ôr•Ž«c|wiwîºf7`ò#TÑl@T.JgZ¡´Îo 8·v~Ô~}{0X6·ó†wtcÐAÅRW×%‚¹ÙÀŽ>±0/•o!¹q€`Ç7å[ ô)âÖ?Ú];Œ6ýQ>†ñ¢Î”œŠ¼9PAδç1©]>•0Y&¹2Þ±ÞË–P¾~‹Ï~¼™±rˆ™ˆ\Qå+|yõxîs¹–.Cì›C®À/œ1c[°¶ƒ=ÀÝ?iÏ9[úÝÝÍŸpCÝæ]ƒý6nªÍï!µFèrlÛæx£=X=lšüp¸ù˳—óuÚÀJC¥Ö.ç+ BÞ9¾œ?ž;›€ö$Ú~VwÂqºFF5ÔD7;é2Ýì4^‡€ýB˜Š(¹~ð‰FYÈúËaa¸ ùÛlPºg@B&0®©~²LIø©[nF[ÓtOèʇ^€»™–˜×;‘Jëmå&àøþ÷qL°äÖ­ãÜÒè•aãP‹ãÉÞÝ¿Å#1¼˜3¿3ß [‚—ÞÂÞªñH 8„Öâo¤c¼²:º+ ©M1¶äb~”›³ŽÓ«®yh=ö²> stream xœ]’Anƒ0E÷œ‚à`yœHÑlÚM­ª¶c"1ˆEoß??I]|KÆã'{š—Óë©L[Ý|¬súÊ[=NeXóu¾­)×}>O¥Úµõ0¥íA\Ó¥[ªæå­[¾–\£ w~ï.¹ùlç§Ý}Sš‡|]º”×®œsutNã¨U.ÿ_íþ¾£¥íNç°÷Ê÷†e€Ã^`ô8ÆâV Úx¶òÖÊ·Ê[C¯ Ðe€ÁP”ŠaT aäiåÍÊÃÈÓÊ›•‡‘§•§Õ  pJ“ ( ,VPX¬X (”“ %Å$FB+1+‚PCLC’2Àdˆ®ÂÎÂΣ2@<Ì1âb"/'ÚåDœyn´scVƵCâ+>ŸËÔFã9 uº­k.ç‡óac1•ü7b˼خ©~gé«~endstream endobj 171 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3089 >> stream xœ­VyTSç¶?!QQ8ž‡ã ¾úZµA®U묭RëŒÖyˆÌS2K˜6 2(2 "BE¥Vm­ÜÚZ½XµWÅ«V­…îàǺëÀõÝ·Öû÷­“uVÎ:çÛ{¿a[@™›Q@´~ËNGÓ¿÷3Æ™fÆYB šážáT ˜ „ æù3'fÚ`§5*&¡ÏdJ(Ä'kÖ‡GÄÉýävn‹|ù›£ã²eKØ-rpXf·6T* ôõ³Ûâ-†zËù‡;—pß@©éq2UéEy99ååœZ ê²òï3Ù: QÜù×Bë’‹%A ~Þ4ÓçRä\rizSõ©‡h–ç¢ærR€èÈ+—¼k!_‘©‚äCœ2+%5K)­ñD ‰•ÔÙ% 4üx´„yÔY“q#‰/w»Â€-›ý¸g É`ËyÐÃX\r¯ý‘«ÙþMœ2#Ñ‘e15ÕÚºV·®"Ö E&sÌ ™òr.Ò8½'¦€2U••¡âÂì7%š1~étKÆÒ„êpÞ†§;µW6šÇóØÃ8ï¾áÐo É ? I=ÈbƒH¯ÝËÝ/x Ð çk Ÿfúà|£Õ–¦©3SUi*.`Ž#¤À>ð­“·„~½ÐL[ ¯€zf[|°øÒë_îB+´üãÜí¦¤s{k¸/kvÀV¤ç笇çëìûÎ]è<ÙÒ¡‘\=DsX¼ûÓ}ó$VX7j%œ8dskè³^[æ)~fÜÊ;“‘®ÉØ/!úw6jÈéQWA4©Nd›€ž•×*a^çO² 2]xHtDd„>²¾Æ kàF# Ú(ìâgÛX2çÌ9¢}³1ç‚Ú8«j‹é¬€3«á›ïŽf×QjúÙ2gÑ _°ÿ¶uSÎ9õq¨†óïê]‹›Åütñû«E»wr$éÿåÛ=<žÙu4ºˆá!Ô‡4'iƒ*vÀðÜ/Múum‘¬7¯ð-4swPLf›¶ÉœÝ(/%èÃÁÁ×õ6݃èùdÌ!L;ô»Ò¯¦¡Èñ±"k.ré¶©øTu\u@𠲕\嵞–K@?9ýɲåûVìÚ"!»HB*ˆšÆ Å¢\g!e†¦{¦$¦§}Ѧþ¨Œ®ù‰·ïœJ´|ýÁ¦Í?þ—¬~cË Oã {ïø¥áúžÓÅß_½mu>¶¶¾B_Ûí‰\ýÙÞ’f O}혬Úé) u TʲӲ£Té–ž )43¢P䃖ëÝkÙ8ÌÜæéµF{9@Ò”Y¯‡“tsDEXTp’§ÃÓÏQ€VOžüÎñ©áÞæÛïéÆ D³ßp³mûmº_ºÖcì#Þ\ÆÝèÎâøe¯åê•àÏáqEFczß1'ýø¤ä°2/=[ÊLILêþgð÷|7]šFÅãA§@Z‚„œÇAÚÑ|µæp.WTÚÒs΃Á½ ®ô€ÚöÑÌ[€mQÞQ~"ÝyÚv^ˆ¿¬ÉÖdݨ5TUÅ‚’ÒÝ_ŸææO¤ŽŒd´`ãVö?iãÑ|Œ~›Û2#8ï²Ub<Œ“›ÏUµgê¹D_ð©JÔ—Öi½âùÕR2…Ì ,‰”0oIÒŸ>Eá³ðë˜EωcF6½1®4Ž'·ØÍz%ãX\Ü2úvëǥ°¥ž+;{a?¸Â¾xÿÐmî™ò1†uÆk}‚‹†gBc„qˆ=RÌwM²DÄ T%Yå´LD|, "Mé‘«%Åù‡oC £¯H‘°‚¢ƒÉŒ "ÿâqóèì„3…Ü91|­ùº '·òèÃŽïh“ƒ‡îꌉ6‡6Ö™„ûJÙV£›EcDYddD\×:âš“˜“V Ó A“—£¥[‰[UVU[[UUËñý×oPË£×-ïÀêy·-ó—ÝY÷º¬ ¥¿½ÖþÓÏßïY¿ÓÏu[§Odo´œ=½ôÀ¢o>ød­}œÚ£Ý+Jn‘âiæÍްÅ.s§ÏþÝy-žõÿÙ¬8#mçbu …_´Ñx³8ieMlW(l¶vÍže‹×שáû]c•4ܯ4Î1¹iý›t'ÄŸØ`19ôžûŽøÝê\GA­¶>êXtBhªŸsÏdù.;ƒyQOÄ?,Úµ?ÎËóñ{ÚXÿ}šsÌŸß©öÊK4™¼ŠMw¹×Ýpz K×r¼ûdItBc Ö£88w3¸ÓKç;™Z%Ù­Ã5C>ÄC«·e^ã.£Õÿ’y;"d!V~±6=½’NÏËÊ+ºqéZÕÍÝq‘‘Úˆæ<5ääpù9Eê‚|þì,Ãimhß&¸8ü‘px=Þa•CEE,$IFž‹’b!&¦ŽIŒÏÈ—ìѱ7‰’‘ˆÇÞ5Éßxã©Ç›~ä§­Æ¿ ÏfŽYdKÝ<å²c…ñ\Di\~v;jX¸­À‰»ÐbNZ|ŸÈ2²²2” *“ü6÷âD dõò)™Ll%b™‘›u¸ðñÈ\æÎ¡Y1 á¾ÉDÓövA›-Û„X@4lÛü· ^´‘YÿLóØ.Õá™6ì¬-Ú„Æ0Þ4ê}0iÀFÑQ(‰Ø$Ž4’FqÒ;X‘%AlE”åþW¢ñ|"§áT–Ìò}ñÏó}q–È$\ýðT½ µ ø¹Ú fáN怛îÏ—•zÃ6Xéc¼!s%,‡5¹Ž§VŸþäç“ðþ¦ëxÖp=ï6ßí‰?ù™õƒí†¸çŠ+0—á{è-è)Çñ·Ž zãÊçºÀjØ k`£â‹Ä;C=F» ·ÔwðèFÜb×ð¶ ÔrnËN tˆ¸‚u¾š9&öW&;rÁ¦¡ëW~ànƒ_ÿ5t‹•E-+ÎÒ|IÅ„qõßç°™Šendstream endobj 172 0 obj << /Filter /FlateDecode /Length 206 >> stream xœ]Mnà …÷œ‚&QÜH›d“E«¨í0  cDìEnŸ¨«(‹o¤Çü0oºÓå|Iq‘ݵÌîbòïóZÊo1 ÒG·ü©Ýd³èNŸ6ÿ>2J*ÀÐô—°ûu¬Oº5¹Ùã=[‡Å¦ŠA)3„`&ÿ–ê[Ã^* †¤6 5ö,Á4”g©)Ã4ÉeõArïLE)Šbèijß&ÛºÉö'/Åö67Ò­¥`Zê ªG¶þŸ)Ï™»$!ž¹f~endstream endobj 173 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 884 >> stream xœU‘]L[eÇÏK± AgRÓÎyzva¦ ë ›YP"S„ÌL4\RÁ˜¦œŒFú±ö°®íJO9í¡íáíéÐv%ë$Ý–Ì,èDâèÍ.ôÎè#˜xµ¼çppñt¨Á¼Ïû¼OÞÿïÿæAD{êµõX[Çåç‘|¬M~AÆ”åƒ' SíÒÞÅgñÒœzsÏz„±l寭A7ès^šbé ë+V‡¶õÐ=}}g»é^«µ~ÃÅøœ»›±³SŒËÎjÉ4=êq86øøIÿËz_=u*Xì.¿Åã»ôzK¥›8Ù)ú}ÆÏø®0“ô°ÇÍÒïÚ] }àÕr=.ï ËøèÏ$ãs3—gœWìÓŒÛÁ0ÓŒ‹q³>ûd‹IDÇ;Ú_ôz â$Ñ$ÖJ#@ ©]íÄ@“(Ž~kj»-'»”¡™mùÏm$_ÿC‡Ï?0lýRýné«»˜º‡;·ï——ò…;@Þ‡#A!‚9žƒ…ÊZ©v“ª-аAny6F?9Í¿ô2õZß„zþê¹à0?íz/½üqdÖë‹A Sâ¼”YÍT?¥ªEQ¼Ñø2}wîÛàfdóÚ&ü?}sçóúZî&,’RÒa? QÞP*‘ á?Ÿà£i¸ã`[y»æÉUl0 ‘ôl’ìRj…Uy¤Žð 9 “Mø¶AÀãÑrJäÁ‹Íqq1UâÌxBýHÕÇ¥R7qŹb^Ëe ká:Ä´$äÀTÎJ"d’YsZTOãÛ&¾€/³¹•¥lnAÌd@"K|!ÊÏÏs¥Ñqhý,cr‡ ÕÔëA™÷ïéCAjP5wá[ØöýúP‡Çð¢¡éo¸Ý~¿ÛÝð7›F³%òÙ:¶´&úZ9©SÞ”û Õëÿ*í(Õ¡b–ìW[•ßõáÀ? y_> stream xœ]=Â0 …÷œ"7è…P©ÊR.¦N•¡iÊÀí±Ý–áYú’øÅÏY{>ƒŸevK“}À,}‚×ôNdƒ¢(eïí¼W;š(²öbâóAâp _ÍÙ½,K“zxEc!™0€hò\7Îi¡ÿ»:. [_vš•çX÷š…¸'¬5 ±&ÍBB§YˆøW£ÐF±•"+UibEˆ6Š­°Ò`Û4#¥ÝÂIûN ÂÌ+áÈ”Ôøm-N‘º$J|ã©j$endstream endobj 175 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1320 >> stream xœ]“[L“gÇ¿¶Ø¾(âT0ÈWGœ·â¦n™N(*“NAÁ(-Pz -=—¶= JËéã¤x`sa( lN“M³M“mYf²˜]½5_÷õb7»|ß›ßóžÿE¤° ‹•ZZPV˜—Ÿ——|ä&²Ù‰m «^¥¾Ú¹Ò8–âÙ¶æòfœ± ³7¼ÜH°Y¬Ì‡EBq›¤¶e{‘¨¦¶¥i{¡äRc}õÿ~ ‚@M¢V‰¢º¶ž NÅDqš¨ >$ ˆãD*C'xÄ#Ö!Ö»™³‡NIOyðš¥yI¤¿fŸ+%ô”IšØGmNHff\Ç“ø¾9b k‚í1Q·‚l·ºÒÖi5‚é|VÏpg,â&}“‘¥Ð ¹]‰ô<›åfÅ1º?Ëj°èÁ`oêB0nÊŸÅW²¢«ž=t+ÐÐxBx¸­H oÖ4ƒeˆÏÜ}¶rse©ìrÚ]àD~ƒÝ@ê¸:›Uo5˜Y×k:DEç³ÛÁÐÕÈD`LŸ§~€y´Z:R\r¤¡²…d‚ìhg‚$Þ•±0o­pìDŸÎQÑ\z ÐÛÐûSyx=éãâœF8p6z\ý‚^O>§sù¦÷D4ëH‰Dþ©J¨¼þö¢ ›ç~:ŠS–ïÅÃ#= ÷®q¸ *À 63JÍúü†(Â{pˆÚŒßZÄ9‹™rœö=ÀÁŠ*C²zEƒðÀ¯Ò{«?Oþ4LºƒÎ„áNÝTy°ºç„YÿrñæçF¿½ªiê!å&¥Ú* ŠQá!j±hðã½ûJwÕÅÆ•äŒøE>@_=Twª¶ÒCΤfÕè“©7Là3LðÊD._²O‘/Íïl7), ¤åUŒž_Z^X^ .‡ÜÈÎóëº ­ iדÒËWJÏj5Åû¢Î^WTð-öÙý¶P¶>æºÀes øÝÝÝz—Ú`°QPÈõOz'}“Þ‰,‡Ï|ê:S½ØD éµV´°õ × æ.3 ­Î¤1:^«€9Pêz‚>.ŧG$ÅÂï<ä$6Q|UAãÁªÛ5C´BK chÀ ÊuôˆÉ“Z "T<Û4CÚ¹x#<ÇìaÌzŽÿÌr]Á® rò‚LO…Úf±™Å}‰óÜxÝ×篣ßdŠI¿ñöÝ?]¥æçn·/ °ól>O6*Õ—êK%e€ eK3ã¾q׸ÀKñ£?Þûå7@×d5ytšiY¸…è–¾ÚÅ”è2£ÍQ}í!e™Õl51Êk6_¿u(Ì(?ŒDýþ,¿§ÇòªÔ­zæty¹ü¬¼\UeR›U62ñÎÜl¼ÍØöÐ3µºkwv1uBž- ÈÀ"îTvÔ6•VRšB!çˆ{ú¼}½Ó½Ó‘éî±È\E³µ,6°¿c!_b‘J¡ÉÂê>ÒÍ'ëG}Œbd:¹–8–Jÿ d}Dendstream endobj 176 0 obj << /Filter /FlateDecode /Length 4930 >> stream xœí\I#Éu6à[$:ydÐ'%åa*cC µ %@R>h˜ÍÊ®J5ɬa&§¥ùõ~KDn•É"{Ú2,}(æ‘ñâmß[¢¿Z¹Xø/þÝîŠÕãÝWw‚î®âŸÝaõÓû»ÿN˜Uȃ•vuÿîŽGˆ•P!wÚ®\as!Ãêþp÷‡ìMÙ®7Jº¼>;7øÛæÁûì±<–§í¾ú¦ŒÏ —µOéB¸¬üózSä…ÚÊl{xÞ—i°Ë>¬á¥œÏº›2{¨úÑÛ·ë Þ…Íê5ýr!ûšÆY†ãÞÕ'¾( øÐ1ͲíémÕž¶§¿Ä[ÖfÇîÃçýô0?~ ï:+O‰Õïèw¶««ãú÷¿ºÛ­Åj#DŒv«ûØ¢TÏM¾ÞhXÕ4Íù€KS8µ‰‚Áò<ñÊdî%ì8î}Iy"ü||Ïû¦‚°` dŽý;ßy·­NDzi¾ Ÿ•æðÙ‡§òTÆ)…žì°òÙŸÎM›¦²‡òX·ÝBýp¡>{>Õo7ƒE íD±«û7w÷?ú°§ÚW--­°:£†+™µuü 8•;œ洰²bö)Üg|ÉÓRžÊíC“.eö¡jŸh·ü;)†RŠ V$•6ý'À&§øå‘Hø¼"½ŒÜÑŠäã?×À‡Î' úô9’ ” ›móh%(ÍÛ–i ¤áÜ&Â,2ln‰0Q°6}õ7¸DBnf‰BæÊzÏ/gí©Úî›Ï×÷šN ‹‘®p‹“‚‚8åñed‡V ŸðŠ’‡ü‡ØB -çÖQä^¹ ‰«ÁÅÅô мU<å@ºXè4mnb¥’°tñan›l·óázŠ\IRÎð˜{ãàÞÏrlV£!‘¦¸¸¨  L&‘Àkn·Õ¾‰ò‚âxÏŠ¡HƒQ1’/t ɇ§…“Q‹%©PûÄú­HÄëhËT¶ÛOÕñ1M©Ð DªÃ€PDaCªe®•öLÂ÷¦2?–¾"76€qr¹ùžÝÜÊ¥2ÂÂÄ*7´Å×±À>J•Щ2 ÙZÌàrc¬4+ Rîl\Ì?]\¿Çýn¡•-É/Ô|´2Rs$ ¡ ; ƃpŠ(î`µ4òVá›+QÐU"˜1*À–ÇÏ(;Z™1°¡°y‚¬ ù2#ÓnQd"òV5à|ÁºÎüCYïí ñø«0fѺÖ3±n_®»•Ìì‘ʽó ß·îéüpHçT¡¼sfDø\¨ñ&ÁIXÜK›F&”pE~W[q5³ab•[Öl@®­ºšÈlðäs®ÔH8°7>Óí)3Úþ"$È/Xò…*úBÄJí¬¿ e¾ÞZ«'†ð…¾´í ìƒ!l£££I°DÓ™å×åž:箕o›"ü/É·SzF,n´—å;äÒK/®“‹EOñW“‹Ë’žVhk-ü QÿÈ÷! Ò ÷Ññ#’‡^ÀëÛêXÀŸâtýÀíDÿË®* iƒï5çá„{ö‚Œ½sW;8ÏþÍy±ìàp¾èà®M6Ì¡¦ëü›ñ þm$Õ°|éMˆþ-‘ü“‹jxßJÀbB{9OÍè3 aA€Dáù×ñü?»¿û-~Ĭ›;'«(þüN ÀÀ•- ¸+Wø4p¼^º³¿ûýb89fS 'aJX|X™@ŽÃÉO‚4Fô"'Œg× P=4d3; šÔ2ºtUÈ«MÞÈ¥¿N9 Çm”#4 Ë)ï_xì\Z •LÀ{‚ö4¾l¢öCˆ ‘+Ù1„¶u²¥ïvõù¸N:k:`] û„öTUʨ„NI-“µDØ—ƒ·é¢&;lùk…h5é¶uľåÅ8 <ûX¹IÓ‹ì©î¢ûxW‘UŽñ Dz}ã þi=`Î0 µ ‹ö|:¦uv~N*º«}ÙÕ´Õ~ŸÆaøÛœ÷-Âý9ÕÄ-F^7çü;0ò¾€Ff¶"²!íÑDÍBŒl7ˆœ®^]pnb@£ աm@#>Ò_ÂQþKÜvƒ¢ý®:øîµüzÈùª^£/سF·ŽÑÉŽöf.µÀ+³kí¹È=(xzù'èÿ|î ´”ë9u38U ²¹H^0èÉo—ˆe§5õ@Jªñ––-f— RE‡ÅXePv:L¢$£~ÊlpÓrÚ)ZØ0G° ÿ3Ë»LBŸS@ãqý9¢ ²ºiª·”DôœÛÕ1…§¦ð׫㶭êcÓ`"o;c Ö\zF 6, ‰ÅUd*‚¤QÀm`eN¿¹ƒ}üáô[(µ8) ¨}â5p:¢…´<à–²¸„ à\aÕ ïe Gé+ÎcxÖ«tŠûª17ˆ»N‰›•GÊèЖë¬>¦IÂ0kZ·Oé¾&Eål©Œ'†ÅTgÏÛÓöP¶åiF Ì\\•âóèÙzõc¬êÑ÷°m·ßgÀŠw°îêXc–&p1(yà<Œ/Pf;7æ ÖfÖ8؃òp`Þ^ÓΩ?Vzd‰¨—Û$ ˜Rò;é½YÛ¼Ø\: ¶e4dÉxEÉ­Ÿ}5´½gR·´Õ"ýÝeLpúûœR“)‘Œ“–MÃå ! Ø£:ye³}õ¾Ïï«§a€ ¡„J¤4ÙÃ÷GÒL‚ìÀ;[ÃHűû¨ß6åi\>À·@¦QX8kü×ÚÃwžðTz„…ÀÖ5mùÞŒÀ1‡r’>üÖŠxj&“«Þhtk`ºz̓í(Q¢u4£õqN$-§¯S˜mÔØ’óô€ ÐÑÄ…¤ì%_ mM†ù/x¶É€±?UKÈFHŒ£—sY—V…ÞX“H+üìÔ¤w>r Ë_ö®^crÉ<,&²=VPлÀ–>ô¯½Â2ªrldB_tbÍnSª]½')WÌV¶7ŠÃa¡ôH¡»áœpá§e À²Ë6Ã"ö´ëô2,—j^$*\lPŒZÛa zjœËã.¥ £y¦QžÖôÅ\ÞÆt(˜Ég†„I|C–l眑§o.‡T˜‘Á|L®çß/†T,èüGØàå„Ù¸41©º!°&a8]ÿÊ)Y/_ËÄL6¶7µÝ4ŸÊ`¬ù1†q€ð]Ä0·W .fQú2ž/&\ Þ—÷û#Ò(Ÿ¤L Ä·Oÿ½LðmÓ¨à[•DL8.(.lØB³i’Iî»’m¹C£ýt¬v„ÐQ+)!˜ÚCY<ò¦Fpx‚> ¼7#l› kûx6¡’pÇ ð8½Ã"P!:¾®¤~Üt€$\Â#é£àËlã5z-,Ò3p i.ÌP,L–=Ìu¤ì—äg´åÊ+,/^ª)Æ6;ÔǺ­aÇÒcp+KØÉA¬|Ü¡…ÔÁP!ô°†Ì‡ZöχþKH‘¶ì™E¼ãÅíá!"8|nö¶ûþF.L\6b¨+‘"8ÇPJÜÅŠ,>à. ªøû@û]z`³fK­q|ÅÐ-tå_¯‹n|.¿\cì…Ò\¨ ¤Ä ág×SJ1°°LGGEá˜ó§(fꀬ¾°óßSO?ÍñÛc ÞÁê-ì7›?!Ûæ|#5ÀºTÖ£‚ŒY5çÁàaìZ€ÍQ³¤|[há1k"?´¸"ùòZ « ¹ZLÊ)¶ElÑ é±…æ~ö€™…‡˜ túqÎM)‘[ßgþ6á"Êá­<í2ðÉÓ×"ó,%¼7ÏÒ0fi_éx:î2Až:kpêføxe“ÉÕi œ÷Æîþå“IqL 'º\\Ú¿½‚¹À’Ò-ó X‹YÙKa',åݘ`a•²×i –g ÊN/bz¥Ò7 1:jØúÅÁS¯Âg©À 9?‚ϾPλ%Òä>`ÑŸ/±/¦ËùX0ÚûÈ Q'ôÛuÅ üâ_ƒ©U¦ Ç[6muضÔïªçµf;F8¿Ün1ɤ{e—v¥žœÝ§Ô}J!ÂA$Š€púû±ORs¾oŠ«oÊn®/ΈqEæÓO€?±ö4ÍÊš»cOíù¸mKDü K“¿ÂD”Àt?£dx¤0ž”è{Òî¾Û6XšÀ²Vü¨¢ä£ ;è<Ž­« èþëz$rø•Ê·Ëÿ:†u1*Ñ5…ð`\®˜©$0+0ÿŽ©^b…ÂýM!É1Þ,|̪шýÞbJŒ/\Й…Oêcb£íŠÓíµ jÁ/*þróp–³jj+Úqã‰yà¬=Wmjl(Yj¨á9 ¬ vfU†äëÊ–fŸ{íúæâŒ>Õ‘”Ë ~Y[éb\ãAÑ…‚j<ÈŸØÛîQÓ†j}ªêsšOȾÀ¦êG_V3sæH¿ô$tEå °j3Íúw}FÆQT‡¬Šf ÿ¾Ov|`3ã éëûŠ >œ´§¥8M6k·Æ}…ø­> MãÐÈ…ãü¾¡ø™n¢JUÆ9w2—}÷Ç(±&b|’Lº/+ié‰YÈçCà †rêÔ¯S®ÏMþ‹ò¸‹58U„Ü:=aÀ(+­±[Ákny ‹à²êñXŸè!7=p.•¸ýÙ©!)§{D‰LC©eú‡KÊÅœªÇ§.÷ÑÆûØ=Î ÍD4Uÿ^fL pÝf7釧òØîœPl/Ta- ,0gÀÊR° éó €f¬ë º¸˜Ë@w¥Ø<ýþ¼ët‹¡…#1Ý>ƒnw]T¡¤#Ä×@Ó…):­‚·U¸”Ó§".GxÅ Ô{»k«]‰j„¼(@m3±„9˜¹’>UxöÖi¸!¹…/Õµ@‡b2h›¦Õ&£#|‚Ócx…ÉŸX\e±rׯi‘÷Ôì¨ûœI“ž¹ô}N±ÕïÚò˜¦”Qi5¦L¢X“yŸ&H® cb­'fH²ûÊ[H•·S9âùCùÙ±L•›0¬º¸Eù˜Ï2RHõ!<1[Pö2è™ñâáùìÌëÉ’ÿOõ u˜ÔÔ÷‘u˜(ĤÈúºBÌ``!FÞÐÔ«•’Q"F®£!Ÿ8LtàÞ”¿:L¼±½±C¡Wr„•)æ0Ñè«âDœîÿ Ìòjœ¸Ø¡‰x¬M'šQœh“åˆh±”‹¦˜GRsV>h£ÕŽçôû§ØÜ[<·Å¿±Ðþ>¾Î¿áEß'4õþë’J…þ9Zàq2¼7­ÊBAôpeÃÀDHÇ‹œ>ùëÓâAve¶7ÕÅýç.ì¨,¨K7{uÔ@ðmn¶7 N'dj®§•";ì aÔ´ª?Éïþ#Wß lžïVÚz·ÚŒÆ\¶lèaq ú¨0xeCýB33u¢Kðt9p^"m°í\ P«îÎÍmçâ”E:Pø+Ûί80±ÇÎagþhÈ'¶˜X.¸%±†' ÐÊzìkL‡:^ñ["w°ã·r·Ï(\â®’Ýõ»à—s>ž»J!T¹‹É4È£ÆöË–V`3¼é ­ZI†ÖöL๗5OõyX Ųk‹”Üd&‚6çÓó©jJGUžJ7õ±ü£ A:õš  Öô“ëÑqp8¢ ˆX0Tïf‹'ÐS;*ÀpÆÓá4ÀuY”î˜{<ôݵÝÇxÅAì{Ú>OÈÆK0¯å Ñ:²Ë¸áÁû¶*È©ª;>üâø:ŸÍ},Ûœ×5ªˆ§¨ö7±ç_xê¿óÜ•\QEW¥^Dþ͆TÏ¥ ËÌ —²ö”Ò$V¥t¥ç^yîÊòÌwn…¥!¦=Ÿ¹ì9æbnhL)9™šÀÁ[½DÞ`$ŽØ™<èÇÐ>pªäMÙþ0~¶;²Îšq·ØAY…åºiÞ¥nÒhì`‹ iñ%PÝq€îâTîÒ™}:2V 溥’€æ-‡¤“›{Öµ\lZW Më^¸.£}ݱt<ôÔÕ„87-ó\6ÎØu‡Ör‰÷*!œ7ûå…̫ԶF‚pXæUZ@:^ww.šW·2x€sd^…29zpQ ­‡®„È+DŠCñ(¤¸:"åWlAb¿êCõÏå óä@¶„à¿­¶]˸À~&kr£S¢_ú–qƒ7T®ïs·Œô¨ságæSÀ´iÀãìxäãhjKÛ‡±ŒÜ‡3h¶Øå;I)ÃýD]þ#x-bþ9:/5³Nã*µ¸ÎÉv×ùyÜfìJ)yÏ£‰7:ž9J cÃÏg|SžêÏû%³ª¶f ú ü0¬ž¬Ô‚«ze¥œ:Ÿö—ièpë¦õXë 1 õoïþ›ã7endstream endobj 177 0 obj << /Filter /FlateDecode /Length 199 >> stream xœ]PË ¼óüA·µM.õ҃ƨ?@éb8”Úü{°<Ì$ÃÎdwȺþÜ[³òìægõÀ•kcG˼y…|À—±,/øhÔúU‘Õ$˺‹tÏ·CNÔI_å„Ù½€&>å)¤æ'zi_ÈZÑj-ÚñoT§À w'Š„ÃP Ö–G@L’&$‘dUŠ€*˜ë“ˆ fmC¹&f‰Ãê}I¸"ôÙÏçjóíKÇR¡‹±øû7»âö9ÀcÛendstream endobj 178 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1213 >> stream xœ•’ûoSeÇÏ¡]9šY„ؤ•ÑS14†—‰pq"cCd€S.Î®Û k;»³víº^Y/çéi»^¶ÞØÚ]z6è( w$!L5ÄÄ  AýÁ¼§?x6þ~yò¾Éû|Þçù~¿8&\‚á8.ª©¯ÿ°jáôJq%^¬XR\%ÛãÆùƒeP.€ra¸¢¬v²,Gºe¨íyLˆãf[ R£ë4éÕmí”båÆJ%_6(6TWW­U¼QYY­ØªQéÕÊf­¢¾™jWiš)þÒ¡Ø­SªU”i±åvŠê|{ýz£Ñ¸®YÓµN§o{w²VaTSíŠU]*½AÕ¢¨Õi)EC³F¥XtÝb­Ñi:»)•^Q¯kQéµ-ªªÃ0Ѷžu£Ã^Ú°õØvl#NcKù5ù©Iü‡%.Á Á÷ÂE¯øñ&( ¯ (WÀçïI¢§˜ÈO@d‡ ôƬÚ÷< %ÍÐf¿ÑßÅôÑoî¾!ñƒï™ñ1>p ýËGœ]ÊÝ]E–è4¹²tüé(>p{,œ¬ÄJ]jŸk3f+awtòf&É4œ„4=Bû†¥ “tÕëÆDv€øa ˜¼Ž’RtW´zB‘=-e óâù•vî;T÷G‚Å/>Dþ æ?‰C}xÏ'à ³ñ@ˆ¬9Ié´]­MW¨Kçg'çÆåc{/›N–^CËÙ¸¼}´Ûæ! µ;÷íBiÈÍLŒÝc‘€¹un<Ïz2}Gœ6˜ˆî´et4?3ÚšœjRü¸ EÄ;ùsñ†$6“ÊßfˆPÒŸpGäahõ¨¡ v»à«ò—ݼÖ8%/Ði èÁE»½v/m“rábÚ•öñOeÜ*QuѨL&K{2$÷bé„SeÖlöÊŽöÑF{Ä&óp*8 cpÕ3¶ŽÅB©„Ío'w00 B(˜B/£Rt®ô Ò¤c KBÇqqƒ-’,’ü‰#áã7èÖ|™$eËê:µ_jÍqCn2ÇæäÜç¥ÄcuÛÀLì?­ºü÷Y$O>ѵßî‘S›}Ñ„ÑÏŽ†‡£Y2–Ì NDÇ ÷‚CX`bÄåLãš\…yAÀ„?4Oü~¶p ˆTÔÕIõ[ôõ¤xþ-^ÀÎàEsñ_Ix’ d™ÿ$ŒÀ5~•q×¹>F mÐ Ö£û|v)Ÿ©X’‰¦yí.KØÀ$/q"=㣆ÜûRÚãq‚›ð0Ždœ ȯ¢7ó1´ˆ9Ñ”ŸvÒ q'9iiÔÙêu¬Y›º½Œ=ÉN€%Y˜ò³ÀÂ4°tŽžò倈$!›p@GÑïï!Õ_† L0ÁK™²±ókfðk?ÞÏ£¥·èbñ ÷ºªŠ“Àjâ¥_k¡çî£ÕèÕ‡û¾é˜–·Îm¹ç‰xbrvšíµeåQwÁbìXvdÜÊì$mÌØ ¶4jØ{¢áäN2Ý)avA Qý)·²ªqOá·ë3h+RLËÅòg…U#åÏ`Øÿ‹†endstream endobj 179 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1108 >> stream xœU’LSWÇçœûeˆ>‡0 “W]»kEyÖÜ¢¾À¨\ŽZ¬ósnp¹â|.u`Ô'¹\C¡ZR æ?£êÖ£IºH˜ô‘2E¾!×ÉÏdk³èœÞ€·Ul‘žp—‹ôZ}¢{ùÂi·¯:;+öò“§œì3À•­Ûã¶9ýÞVkçjN›§Ýý${°FYÓ,\_ðìþ#]„Q`bLœI0½LÓÏ$ÉÚìDõd9Fþfü̽œ÷st¼ÎªFóÕ}€FÜ7ÂøU·N=ü›i ú‚üÞØ–ŠiJfš¾ó]ˆö„£ÀJŠâ3wå!Ge |lbæÿ@6¯›Æ•˜Ô¡(‘ÞB7PþMzè‚H›yOž”ô \Æ(r|þ’¤ !Í­Ù6 mN·TÇ=úɽr\N´œ¯¹Yƒ„VâéxTàè.¢¯ÜÁM³³ÓÓ‰Ä!ÁF ”¡»›§ ôÚ¦L#d,)¾›0I¯ŽH_4]²…â0 ìpµzïsK%iÎ0§Ak~Ñ©CÜR©)®¤Â1`G ‰Xj¹³}ªáÛZÌ¡ë›# Öa!c5îÒØ=Ë•áÄq¾Ô"Pónj?L‹$Z ´ høKšÆ Ú=a;ßÊ`h¸k Â>E¿§Ñ_y¼¨µv±¯Ñ~ÜrÁŸŽôËÙy‡CøT喝lžtÌÔß 9Ú8ê/RÌnj¤çh Ý#Ër¢l8ƒgâ0âŸ}„æÐ>Å£¸ °’}rÎjÝœNý”£&P‹+T&ñjû˜x©i*4$CØÐ*õsûÍš›Zû•ôô-z2& £vºH-ôʺiûv«UŽþÇŽÃÙ³<.ൿÔFP-l>—;Îýçô.3œþOÍ6á:¼•ë3Ø^,y‹æ¹Úz^úˆÖ›Y4¨ýêýä@<}³ŸÌ´,©_ðyø5˜’gN̹s`¼ëûÆÉ*vÜ@×ád.G› (Jô¤…8¦~öØQ”9›yA(¥à¯©“~Æ4®ê{bm›ià=¥ß•ç‹)cŸÄbÉ>~7 ÿ#êe‡ ™6?º¢lúº‰r(ÓÀ×›mÅ«éY‚žaþºòlj»‡4\z{Ü–èé‡3ÀŽÅbãæ|~­¾Óc\CÈ¿>ñVUendstream endobj 180 0 obj << /Filter /FlateDecode /Length 171 >> stream xœ]1à {^ÁÀÈ‘RX4Nã"V”äø8, ¸ÈïØN‘bW:nͱ~¸ Î&ÊÑà 5Ö鈫ß" p¶Ž4‚j 阪âaý]…÷' Í4û<ªÙSˆ¦>5{ ¼Æ5(À¨ÜŒ¤ã\vÆH‚Nÿ­Ä^˜Ì‘l¯²Šó줻´²Šóì¥~ËO…éD °Åˆ.Uð Vx¬ÃßmÁ‡Ò¢Yä ÒVKendstream endobj 181 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 616 >> stream xœ¥ÏOÓÇÛmÌ¢ Š qCÜõbB`˜ƒ1C¢’0h¼è¶Héèº ?º­ßµ0Æ $ 3º+‰ñâÙ£ÆxÐÄ£ü¶Û0±Ã?Ãûæ½Ãû¼ï÷‘„É@$iîw¹ºëÝuí2©µ´+F úµÒ׋Sº"_T½Íêèyuøa$ÉȬÒÏù¼wÌ#Ð/œÝÎ]ºè®žž[íôM§³‡¾Ë2¼wÄ=A»Ü‚‡aÝ‚>ŒÓO¸/#„OVîxÁw»³3 u¸YÇõÖ)ítÈ+xè!ÆÏðAf”¾ÇMôC7ËÐ'6;N´Ÿc}ái7ÊðA<%ˆD7aÔ3&ÒehÕMÕ#”Õƒ²ºQ&ÕÆê°±úL;jÉ”äÌ7¼…Œ%%VÖ±BÉÒF$èO `÷ƒ—ô‚?Åã£åš^ €Š†ÁEÖ‘Ïî#¶ãÀ,’‰¹¡ZØzÜlþ¤úþm§&Ö.ïXEV{AñÓàÄ•…µ=»Ž<öRyè*åëŒ7È'vA¥·ñnkSI?䈃—¥2ºYÉ~VsV­Ù6OB‚¨ Z¥d|[šÛÚL){«öCõÜûj(•0gC6IÅÎÔZKâËÄüUØæ $)¾/Î!Z_Ü,”ìESE”°/•êI (%  VêI^!âèG}˜\S±Ö° y)G5ÙÏšY â@'T-endstream endobj 182 0 obj << /Filter /FlateDecode /Length 175 >> stream xœ]1à EwNÁ îUŠXÒ%C«ªí˜ˆ!€zûIªªÃ·ôlë›õÃep6Qv^=1Qcޏø5*¤#NÖT[•vªUÍ2Ö_ex½Ò¼€f㛜‘=ÚÚâ›IyK £t’®iDgŒ èôßè´Fó³YÀAd<‰ªŒmFàb×êµÃW—ˆG"ªÖÑ¥úGÍYâY‡ßWƒÅE³ÈðY8endstream endobj 183 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 556 >> stream xœcd`ab`dddsö Ž4±T~H3þaú!ËÜ]÷kçi¬Ý<ÌÝ<,Ý?N}oü^Ëÿ½B€•‘±ª¾oÆçü‚Ê¢ÌôŒ…Sƒd a¨`hii®£`d``©à˜›Z”™œ˜§à›X’‘š›Xää(ç'g¦–T‚µØd””Xéë———ë%æëå¥ÛLÑQ(Ï,ÉPJ-N-*KMQpËÏ+QðKÌMU;TL:çç”–¤)øæ§¤å%å—ƒ]“X Í,ÎÚ˜ÁÀÀÀÌ ÈÌÀ°Œ±‹±ì]-†IŒ?:ø~ |/ÛÃøá§óÏšïe¢³w/^RÒ]%ÿç[uqwqÑ¢î9ò|2,E|—Þó]f>ã/ ²ßBúU¿Å»»8j¦vÏœÛ×=g‚ÜÄ%³Oï?tôÄÞY[gLìíŸØ=¡{jko]ÿo•Ë·Ooé®íæ¨mì®éœê±ÇM~Qæòü5Å·#ÙóãîÚ?¤æ3~Ÿy’ùûõï½¢w×o˜¹dú¼ÉSú»çqÌ©›UÙÒÙÝT/÷[Æ`ÖwÞ¹Kr»%ë+šK›ús{å{«&9÷–ü–~&Ñ8±£¯©›£²®¦ª|zÛ”vùïÚ>–éiµEõ’ÍMíÝU3jæNêíž2]î»Ì³êߦ¬¥óš§mZ8¥}e×T¹Î9-—;}ç±–XZTÕÍ1wƬ9óë'4õËó10q‘íCendstream endobj 184 0 obj << /Filter /FlateDecode /Length 172 >> stream xœ]=à …wNáð“ª]¢,é’¡UÕöLÄ@„ ½}$:< ›÷¬Ï´®ƒ³ è#zõÂÆ:qñkT#NÖ.@[•ö®V5Ë@h“áý Ù€fëïrFúÍ©ŽøR^ã¤Â(Ý„¤e¬ké:ý÷%¶Àhvç9;‹çMGÚK~1–k‰Ʋ©0 ÖÑ¥ ^Á uø»-øPRE¾zCV8endstream endobj 185 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 604 >> stream xœcd`ab`dddsöõõ´±TH3þaú!Ë\ü;ìÇœŸ!¬Ý<ÌÝ<,“~Üú^$ø=—ÿ{¦ #cU}ßTçü‚Ê¢ÌôŒ…Sƒd a¨`hii®£`d``©à˜›Z”™œ˜§à›X’‘š›Xää(ç'g¦–T‚µØd””Xéë———ë%æëå¥ÛLÑQ(Ï,ÉPJ-N-*KMQpËÏ+QðKÌMU;TL:çç”–¤)øæ§¤åå'e”0000$002v10=ÇÀ°—1éGßU¿ÃÖ|×^þC~9ã¿›ÌûóŠ6gw6'wsÔÖwW4Mi› ?µ»¿ïj÷å÷ò(÷Ïêž7³¶»>º{â”Is/­üÎ)?£dUúÁnŽïüßE¾+}W}ç}ÆÚ?:)7O¾ì’GwcwzwQIuNI\CDwGÓ¤öÉ3zú—N’›±oî‘-ÝK&•7¶w6Ë_®0§»wÖÜ;¶-Zßͱw^T\Z©Bn€|aht|L7‡{õÍI½½Ózúäù~¸Õ/ÿ©±œñõÕï+™¿[|:e*]¼¸ëô’ {o-¿Û}‚cWåúØß|¿%~_þÐÐÝÙÓ$WS›’QSSUÕÕÕÝÎÑ:©qr_o÷ŒYr‡öNõ_n|¿[rc÷ÆyËæÎš¶wC÷4މmÓÛ::Zšå’Ò¢Sc»9B/žšÔ=·w¾üä¢S¬ý®²úÎôU;¾sìêæX<µ"«¾ÛÂæ7|˜•½Ck+Ÿ‹ù|Npbü°endstream endobj 186 0 obj << /Filter /FlateDecode /Length 170 >> stream xœ]1à EwNá@"Uj¤ˆ%]:´ªÚ^€€‰b!Co_ I‡ßÒÇÿ£g>\/Wr ø#zýÂÖ‘‰¸ø5j„'G¬iÁ8vW§žU`|¸©ðþ„@»ù»š‘?›îTŸš­¤½Á%(QÑ„¬BöÖJ†dþVíVížlϲJˆ<³ídU¶]­ÁòSa:@¯1"¥ ^Á #üÝ|(-Èb_†0VUendstream endobj 187 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 397 >> stream xœEŽ¿KaÆßS‹ËŽ~Cfö.AQèD„BØP‹E—½èAçÉõ–HdXaæ+Wô‹†Úki¨=èOhiŠƒø^½K^KËÃó χ€\$B]$6=l—n«C°|«Óɸ _ßÛuLr2Éudá6H¶ÂR3,¶ § ä6+=“3ÔdŠâyyPNÔ"„CáðP?å0׈¡&”4Ž)4E4…ÖÆ žÑ*¡¹¿ËhŠÒÌH0˜ÍfжÐä˜MéÇY•¦ð4Y%Æ:YÆzšâ)E#ØÖ ØѵÌ%ŽéËÄH#„=½5 ·-çŠßZ¥&+ÄÍ«Ÿ™KáóÙ ¯ÜôWYõ \ÉŸuñNð¨0Å Èà¡ãñþü¬Ra‡âáÞQac6¾½ãç^Žue\bœ¾ó(øúÄ&”jÐ'0=ìcêcàž{“_rL,”v‹{l¿ºÕ˜·ßðIÆŒßòFî‹/äóå2+ŠEs÷ôâáñäØ^À×e ‰M~·kèRj@褢Åendstream endobj 188 0 obj << /Filter /FlateDecode /Length 2755 >> stream xœYK“ÛÆ¾óîStàMÃcž˜QÅ©rbWE)YI$Vå u•°$v‰ˆ(ôzíÊOwÏ œ¨"DÌôôôóëîѧušðuŠÿûj•®VŸVœV×áŸ}µþÓnõõ[£Ö.qF˜õî~åOð5*±©Xg©I¸pë]µzÏþ¸Ùj¡“4•ìÍ_ð·Jœ3ì[ÿ;å†ÉÍ»¿®8OœVÙz÷zµ»™ŸÛ-ŸËèÜ×o¹ž ?…‘Í›âçÍ6MRÍ•¬±ÙJ›WÁ7"–™e,f~kvÌq5Eî?Ñ¾Éøt¿oâoÎųš¶ 0¬?Òž¶ìÔ\ª¢&ßpð]–1Š“©ÿ¹±h"Î^\°}^‡Ó6cù~_t)ŠÚòm§Pà -÷-W´x 8ÇÇú@ƧÑba)SìÉG–t¨H‡¡å$Yõ¾i#S täW+L,óz_„M ñtl›ËÃ1.-CPg“ \&2e»ÃнÝìþuö ¶ÔÜxŠ÷q›öã$ºs>^‘&…HŸèQ¶MM¦F%•#SkÇÉoïn+„HTš­·CÜÌïĤ¢,óŽ‚ìòØ–=íI4`œð1d Ø^ƒÓÆ”IA4J”HhÊ 1ØKͱÓ_/€ðGöá{$1/nÙùvI,ûu’.²ü=’R”žÝ×ᔕ¸?â–ßç[¤Xȳ í¿Ëe¾¥¤Sdö¬ÎO§' }qƒÁ§1ÞœwXT“” . sòp1Àx Ðe!Àªüç×`«Ï£ œj•T1Ê&x¢,iOp¤2òö¾©Î—¾wƒŽÞßÊ‘ …KðˆÒ²Äž²‚½Š·gx…Ä+«­K.]I2Êó©@$ÎO!5«K±”ËäX@‚K‚ÚâÓ4w.e[tþ.Ä~HˆI:ŽÛy»`àm³Eæ1“pÐÌ™1~ȧÝFYy?2t®g¬Ùë€qJÃÇ8G^ÿ¾ëË*Yæ|òF[?hÔÿa“¨˜êïC­"ƬjÚ"\œfÐßôyy*ó Æ¿Ê®Ywu>å5IÚÝn0s…OûÅm(·óŒClç –L2Œ-Ae\`l’* ð±OŽm„§¼n#ü*¶;áWRlWNå!,ƒ&-а¾øLIãHz’Ž>ÅPCè˜öýÔv©y A«¼»FÉi2±nè÷p˜Xõ¼ž@±m›¶‹dœ=¢$CX‡ *±'ôÒHˆÑA_h$Ÿ᪘Æ*#“ÎÕ&§eVQ©ŸuU^$å+ ðb{€'9× µEðI¼¡AÎ(:'g‡¸µk8•IKY„Nä[€ÁŠ+[)žÑÜú3ršyY"dgfG¾Áˆ° (§DöUA‰±ÿo°Ðís±pƒH2“iÒI/d’M8wZFvÔ—S‡JÇ–0°.û2?ÅÍk»#s_Ø<•c§¢3†P-8±v8H/΄ÆcÏ<EÍÀìØt>èÞ×›®+ïN‹n0q碲Á£P¾ÕBç–Ñ© Äì¾m*4ßV ™(kf ‚2¶ÅKoÞ/ìåÊ»6oŸn#ÆE™uYná`5þŒýš½¢B ¾fÄï6$ú‹H!}â|á¸ÜÈM®è.Uåe ¤Ÿµò‰ã6Ü.üÙD€\Ú$‚  ò”Õ¥Š[¬3¾¼pÖFU‹±œÄyèMñØ7õöm~>v[¾“ÍfV)«²ËCïš21**< þÀñmÑ_Úzìµ÷Í¡ ¾Œ¿EyhóCYÔ}\¡÷„âqßLFâ_ŠÍPÎÛÉú-±é›S°6^ÿz£¨~sð7²­hµFÉ¥ÒÏ¡*¥:Å̽o‹ ëP'ºkˆE¹€îìù àÊßueŒ$£vïzìg‘3AûDÏј?ùúwþÞÞúØ n—¯{Ï_üJhhûÓÁo‰LÓñÙ ùª”‹ó‹`PW3TZ_€ Ð^Z…—Iê«oè: BÐôú ò>C Ý X°g:jôáæææù¨Ü’nb£ËgdÏcèq¤âãׇqËâ–öùÌ–ùøñaäû|üÉ—òæÿOÀ+Ä›çõ·q À.oÖ#…&†ä}_¨Wã4ìgõT GÎl½äšš¸¥r‡˜½Bì±Ïä®Ox/»x)”ÒDb;Gƒºa»´ãÅådT¹”±Ï—††ÏaÐ9ŸÛ&߯7ǯF]]LB;E‹¯ôØ CÞÐ%>Qß«`Ô3hÑ«)åXä‡.<7PÝ¥wˆù;Ãoy£-à PýWˆ`]<†½h×ø/s›K·Â\ »z‚ ‚L0ì‡QÁ×þ%#`r‘øÅîW]ç÷Â;&ñãWó¾Œjº¼æc VÐ9ô•êñ÷Œ{^ÁПpLè'=¥„¤³ô´},T—æÒEq4ŒÿE—ÄT‘kÐK*|˜½÷EQ\%„×Q‚e >…ÿLâ(íŒÌõF¢éŒÓ…øÛÀÈK^*•µmMøÒ™ÌhÈ–ThY;¬q‡Š=Þœ§pOa®T|{çw2C=Ëï¦tå©ìIlŒŸè-Gµ‘`zW=É„(wþ ¦aÇ7|$+§'É « 1ÅÆ`»„I%¬7”µ™ê˜ˆÇzá\”ŠH²} M²àÖ®·€[–ί|DŒñŠé¹‚jZǸÍð*ê‡eô4®ß¯*LÃÃ$>û(„¥GìÀêPvû¶ˆïˆ¸Ò‚›*R@?ÛN˜ãÓï‹ÌØp2h‡)4†”~Ä.4€Ë,±ª¡q¨Mzï„'Ëëé)|iFýŒ^Œiãg=¿T>‡äü§r“R ~+Ô’`ßÇ>À§0¾{Ç-ñŠ®‚XºpÑðÉë½ñÏç8ãûy]ûgoö]Ù-µÁêš? vtKšúÑ…¬€Ù›‚çóî)|)î’v»Ôh¨z/庙©½Ï/ÝÀÈ\[X¤1Ô‹¹ï/ôê·œ¼;¦é8GÚ¢;‡ uX„·CÛ8¼Rwñ¾í€G!šîòhUŒ$ã_¶'/—ýP)µ%µhÔÓÿ#Ѥå„ãðÊþHõÖ"Jv_,I®¦ÿz¢›øàß°ç‹, B²:;žsX ÉÏÆ%P²Mba,¥©Lá@öýnõøûðˆ¯ëendstream endobj 189 0 obj << /Filter /FlateDecode /Length 503 >> stream xœ]“1nÛ@D{‚7¥ÝýcÂ6vã"Aä¹4T˜"h¹Èí33²S¤#q?çÍâïŸ^ž_–Ë­ÛÿØ®ã¯vëæË2míýú±­;·×˲;»é2Þ>ŸãÛ°îöO߆õ÷Ÿµu|¡Íwÿ}xkûŸÇ”üÓá~h¼Ní}ƶ ËkÛú¾žæ¹îÚ2ý÷W)÷çùóÕcªVßóI›«E›eªEû ûX-ÚGÙ¡Z´ƒì¹Z´gÙ±Z´£ìT-ÚI¶U‹¶ÉÎÕ¢eîS"€Ô÷|ÒªE{=V‹ö(ËøÉI©T‹¶È¢Z´e¢äTI©%§JJ•˜(9URªÌ©Ù“³&g6‘ÝFV™c²GeÊ<—}6û,i²‰²ˆ K-.¶¨ØÂžŠ»*êª0BqŒ¢…cŠG ¢‡ñCøAô0~?022ø‘ð‡B 6n#ÔFDµhC–Å„Ë •„ †ƒW¾îÐuó†3‡2ÑÃø!ü`ü0B!?ŒF`á6Bm€w ß/t¿  ‚p0 ÂÁ€ B€  ‚p0 ÂÁ€ B€  ‚p0 ÒÀDHñ©ÕúÚ!m™öõk=»ñcÛÚróR{iµ«—¥ýÛûõºêTGíþAÚtendstream endobj 190 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6753 >> stream xœÅY tTUš~!äÖc¢°bà=f°Áe[dYÈ*Œ=$$U©}ß—[û¾×«J%¬ aQA¤hCP‘EwÅi{F»{ìž¾Å<ûÌÜJÒ³œ3sΜ3§ @¸/÷¿ÿÿýß÷ý÷C‡EEEÃÖ-]·~æŒ3 ÿ˜œ/’ŸP,feùïo­,#‹áÈ¡¾ “þ4uAÏÞ‹JFCð²Y‹v7µˆÚª“Ví®ª4OZ/k©–ê ßY·ë•æ¶êªÿ°‚ ˆ'›w/jiÛ–‰Ä’W¤;VÊvV•W¯®©­«_߸«i–êqõON~`Þ3Ï>øÐÃLÛöèöǦÏPÌ$ˆÕÄâb-ñ"±ŽXOl "6›ˆÍÄBâ%bñ(±…XL,!¦K‰ÄóÄ2âb9ñ±’XE”\b;1Žx™Oð‰¡D 1‘ šF 'Fóˆ‘Ä=ijÄ(b>q/1šCl%ÆOá¼âÕ¢1EíCžòNñæ¡ã‡î+±™ —Ó@Ž6tXnø¸áïŽxbÄW#á=%÷tŽšyïs÷þ8Z?fä˜cçÎâ¾9ní8ěϻ2~É}ùò#¥CK%¥×ï_T6®L[öož›Ð=áŸ&ÖLôOüŽZKݤkþmÈÈ/‰Qý_}èý¾"¤È—ò‡zlIu@ Ér`„‡’b…®MÀXô1°™í+Y$*TÑfi´¤"‘d\•ÐÀÝãïñözùÑߺv’.°'­nXÄ -bË,B“Ð&,ݤ*µLÖ´Óè ‰2IUXFoNÆÃ¸˜4Î÷ìõîƒûH/ÇönEw9Ü·Kw4“ÑšûЇ}Eß\E{®£ßäWòXžúì(ÈŽ#gŸž†FPa€xáшxä—þÀÞKýÈnâ笙ô $ç×ÿæÂo/ÿþ{Ú Þ5Ý×@‹Ý -ƒ?[×—÷÷9$c¿ºr÷rÞ‘¿Ê«élʼÙùì=;‡”rX¢—ŒüÝïü¶“rÝ"]kRãÃèÚÄ/–W¬€äôÊþ:Æq}±ïó3ïtí=r’­Zk¶[ …nc̱.G'ã¢2ˆ?÷RnOBR¶)äzj "¶µMïC×úŠò[¯#e~"ºNè"ºˆ€šÚ:±0eJãÚ<þSç_›.Tu°¶[~–¢ûÿ݇¸ß®ýŽ%¶¼"k­¦QûÏ0kÙÃᢷ\x“öpœYoÖ•qE`ÜÎöž{ímH^î^9ú íÚÕâüø7yõluÉj Vi%Òhaÿå Ž%’š„ÞœqWÂÿ2ïä;¼…³’Y±[Ùhk«7S ‡«Rd'°%ìö¾Ç.<††ö¾‘9p”îB•9ÅÞ¤r¸qŸö#xâ,ÑÜJEXÉP\ƃC*C[·2·ÇËö2ÞΤ",¢¹£Án*› \{¼{<{<ûÜûC{~òÑ…ßB²9¥dri&G¹8§^ðµ._Þ¸};u¢åh)†.ï‹Æô!õ…û¸ 4µ‹×Õt\ÿ:$ÿ ;úcFŸÖ$éÚåþþ/¿Ö»Î»È_£ßþý½Ö—-v+´P܃:·xo.j÷PþÓΠ+ƒ¥iMH&µÊ †²…K»äN¹CÉ…,_Ì™Šf˜ßÒÒM§”°ž\ú õ¶-;Ò§4wW‡ÑÁÔ—5B›È*UÏãÛõÔ‘ò¸&–õ3Éà]ÑÏG•}è^ÉXn ªÏžz‘v¡f‘McÕØ5¤–óÜÙš+Ÿ|ÿ«ïðÑ­)¹G_†ñj™€äö4ÉêåueM¡ž¸'éŒÑ᢯‘èY̸;Sê˜fŸžŒ+ëÎ:ü.? íR¯¼Õ Z)ÍB«ÁnÓÃÒ_.,ªÉÜU^t¬m/4'·-ÉŸä=ý“ª¤•Z Ím‘Æ5¸Ž=>ŸÃ[ö%à¶ø|vY&byÿÁ`7–õ q ª5¬øùàgyUÉ~¦¢¹{Sª°?a4b`°E8R­D”Ô3>¿zév{ [ ¢íÿÃn§|}êë?ƒfâ„k^Ð-Ó.³Œ-°•œs]yædw4}˜’ŵIŠ»¦®—@:©S©³bŸø€|gÀ„A2¥ K0°M…MW¹Ø$PDT)ú$Ю5¬Õ­5UYš››[kÕÕ”WÀŸè¨s‰™OF=Ùß7ýì2gBœÞåéüb%û{mºVÒ ²± “VôTÀeŒ‡±¬M©ÓÜÓmiSŒJƒ´ªúèìÄã±Ç½[¬;ß‚gaä\êб/O|qô†?Î÷ø ;É ðBŸÝÉt<ÄÐ\&¥ )îic¡ SqÍt"qÂp;š¤8? ëÖ¸s£q8 8É= N"åœ]IKª3é8œ "{RGÛ_÷+u(àvÏe©§1bÌÂ7¡÷hç§ç>Þó‘;`E!éIvú³¤‡“V‡Ú(v(ô"yDÍЧÁ¶77/µ´;÷VÁÐÐ"ܤ¨Õèjí¨—¤pÌ1MTN³“€T£TˆcúÔ£Î]ÇŠ¥Èçeœiw¾ _Ó¥·9Ì34CMg×’ì(öåÝlÙs—·}|öx¤½—N6°wál`,ÓQ}T颛€Ê¡vªX-3µÉw%=)˜„¯ Õ1Õ•î—!¹þQ˳´K˜V1ÜÛ“£Ñãì ž®Ê½Q¸{WÝjãNHî^=þÖç¹Þ}ÈwØÝ%Ðqá•“@qåÞ_ШKì(4:Œ°T¤RIE c’FcA—¼Cì–Ñ­@µ{µqeNÑ®Ì飯˜6L:8á@  uuõ}ÖɃ “†ù7ÓßVl,Ä¢BAý`ÆÔÕðB¯Ý KÓñp/Å(¥¸•&†ðß©Ô(%ôé¶ñôXÝúˆ˜ŸhãO¬ï´º±’C}›T«i¨]ò„’ @&´î–EÔYŒ%¿ßá+û¤ U¬Iw*s­A}]‰ÅTixÅX¹â§Ñ|,vo H[â\¹Œ“PPò?+¨ïÀIÿž®A!‡QmDå  ±ID[8Š:M¡+ì…ŸU¶ïMÚû?QÙ~e;ÙW¨FÅ ë½dê%«²8™ýä•¿txà^7z_èeè#3J¿¬É¢l2QV±Yd‘¬¨ì=‰†ÒŒ* ¢ÙÅ@¢QÉÅqýmªe%˜þÇæð8î*o]°Þ_wˆÁ÷ =ü[ÄwX0j-6…L “´nm^³Êî ¸CØq+³’¬V¹KPy åèŸO££ÞE£Ï£ñ—Ð(wÐÅÖÕÀµ/#ñ*éI€ë1al•‰Tj|$FÕg©À>ÄöŠÑÐJ´ºäà z’êH:ý<: ˆ1ã°ÓôÝ®ð_®çççGò 'LÊÚÄ:÷SæsO8+!YØqZv( ;žd‹oÌCÄ•+û΢<~wKQZê“7iª¥BJ$ßmØ ÉçWŸúøO7¿ý!ÍØì?ï´þìdÆ^Æ6ñx~.K ûDÓ¾ªÄvXÊŽ†l;ÂÄÞKê9paÇó×kÒú¬%{áQæð>;F34‘W*wÅL±²˜ÆýY,‚RzpÆ\1gìÃü6¾³¿„¶´Ò'ƒä\ –šò:E»Y†g|fÎÏxH.o¼rö›ëþáP¤*B»ônÔC›ÝmäÏÊ¿‘Ì~/tû/fÛ²L2ˆ̤•A1%Ÿæ…{Ù¹ÝìÓøW©[àW`nƒýÐtç>Ìoåc—p&È€IøÚ1éc‘ýå`ׇ› úèy? ðôb=¡:©?ц† Ñ!nÉX3U|rСjÚÖÞ©ß„óˆÛWôõ´ïr1z­ç¡àPƒ÷þcŸìú½Ÿå³#ð§”-ùv.*¾t±óÌ«´/àÀ0éäØÒ ¯ ¶@«L­lmÜ)ßÉEËO}E£=çyl 0—®Ÿ» É·V-7Ø1ÿÐwyþ¢/¯¡÷®£oóÔ65TÛÙ‰25KIXŽz|œO8°ÅQ—jÛ×ÐkÅp \†¸|tðbø«ðõÐû‘÷á%xÜò«Ɇ@-¬‡äìtžy¶r¦t¶¾J¿¥¥©¥±ÂŠOÿÒ ç§oúOúNÒÑÏhÔÉÞ={Þp†dì]vFW8[ ÜŸõN±5@«%EÈŠ›±`_gßÌÚÔj©Ê¯JØhôÈ„¢‰”6$ ¿B}%hïÏ>…•õ¡çÞ>{m=ÍÏ;w÷{T|‚·I²}|‘4r‚#^Óuøˆ¨AÄ!ÉÞ}Žz ˜ Œá õ@®‹¥¾ˆ?J;@VzȸÛá›_|Ÿ°&MqÚî²zðhãฃÝÆá’$ÔIHfSLæHu¤©®F'ßM[lV«’6œMPï&¹–ûen©ƒÔ‚G{êA©Gâ“ÓN‹S(4¨”&Qa‘A”z¤A‰.²CyKØáÏU¬ó¬¡t~CFa÷þ®ñl¸Ý‘ƒä‡àäMÛRº±<ÓÚŠµx«¬ïz³µ÷Ü©sï@òõu;é»&ç‚xô G¡½þŸfc7îHW‚Aãî>¬t‘ÓLdjU†Tiún,½ a0¹ß@gñÄNB‰‘ÇHº Ò±P³m›Áh‡Fzpíõîõì=‚”XYœØ ‘X÷ñXõ~Ϊ Í¬ÌY·3…J‘™'°JD6iåq®3Çtº(‡{¹CÌ(î «Ín-»OžFÓËûj N!m!‹«°ñÂ|×eh´ ¤¢‘$Nƒ„ÞØ/_xUllƒb’{º Í{Ý©¬ ƒ¿×tÁR'®ÅRñÓTÐ M «D»„o7Ú°…| Ãowâ\IwÒ™ü›.Y=í½ûÚ6XÒ5ý#­$Rf6È&úk"¥7ƒè§ß^ÿŽ^"b¹N$dÌ aì^\œL›[MI€ ª£-¯7¿.8a+ Û‰Êáöº\®ö¿=K·§Ò ¾Ïãqû!ÙKb²/öxRjÅâ¸!CwÙÙrö¡Ø‘/£_”F>r†±c—º9ÜV¥_QÆýÎX0I«€L­’ã±)u§ÚyÆæÕÁƒ° ”Ù§Ž[iôgÀЩÃèdïTžò;DæD.%ÅÎÊ6{“<ˆð]Š2òìúþŠ-ÊOåí@Ud2½XèöØ­ý‚ k3ôç•Á¾®Œ‰ERUxªbú=ÝÀ–ÑÏùá+i4ì[H¦µöåÚ¶æfJ}¸9º ÖÁÝÊé”ä­Ø*îÆ>cÆ«gž9zöÈÍ÷ÑÏ â’Ÿ¯ºÁò§?üÒ´&ʦ·êñtݨ˜Ž Ó0âa—rrâ'.è…dߥÆìp¶lîâôò­+¶.ߢ3´PCÊ"ZlÿÚaÄïÊyöݾ.ÉÈ}²]fq«mpJŸÓwkò…"ô5êã•‹tëu/JØ{ùÚeO™7AR$Z¥Ï-c}~:ÒÍ8Û=¹ šÈO¡aè™ð"fÙ¤6ÜFiÀN×R<†IW>‡¦ðQW FÉ?±9k$bCkkΣ<àˆýšO=f;þØ©|›Êª‚*X“–´KîÜŸ!ç´µol~Õ劫¸‰òsÑ Þ©žóÇá—äGÎ_6Uµ ‘²è¬Z¨%[RÊl.Ê9)ìt°ÒÃÎÏü)|ÜÂ_Éž>Å…IaTÑu‹Ë5Ϙ¤öV;l"¥!m–º¸<{¾j圙˟®Ùâëh¦Í½úœ‘ˆ¦Éœ$ÞV«mh}q¿A4ú·hDoÃ;Òƒ©Ëk/žÁjz Ÿçm7Ï 6ʨ<Þ™{ÓsŒrƒãðÀ†·°Àþö˜"®l¼dÙA[€Êbò0&ˆ4ßO»ÿšñ° vÍy‚·ûU$À®%`÷cU‰º|W€ïpãܺ­1M@ IöhƒµE²›Ë7Õ˜jM5X7å>U®3ÖÉPû=þnH^ìÞ°aƬyÓk—¤‡ã˜¦sÒx[¶±~ÚwþFüïÐÄO6Ýd‡ïÜ¡QÖ ž,$Îÿº/^˜ÏÑŽ›†;…-c‡±kv³ØtBYX‘¶“NнÝè%ÄCËÐD4,І` O¾Ö¸ÒWóLîÐgð\Æm4BcÙ+–òzaÊÇCýèÎ:Û³£§ûÐŒ>4çT_Ë›D˜¼ 7klÇÖàSÌÇX«Ta1=œ@3f›T»Ø×F×gØq‡½Œ7ãc\!oÁ%&ñÌ$1*Å&J[k’š$fi©EcÖØÔ$Ëùß­G€mâaç[Øq@=;¬d !åâ[Iœ þ{Hý^1:ˆ^ãU±ÊÎêœñõReB0Kº9Ö´4.)¸˜*ƒ¾s̴ˬ ³ÊIiP›£^uK.Æý—z7_¯¯??3ÕªùGt]ÖèµÁ €JR˜’g„zö~»ðøåJîlmHíX÷®¨LV£ÃdŸ¥œ±> Ëoø@´†ìA+™eå¼C¨é¤ñõ:XQ —/Û\®%5è4oG`cž‚¯u‰ÙøÇúLœ×âcmFoóþx0@iô ?ìÀ WÅåj•Ψ¥VÖ-®Z¼sqUé‚W4F•Ѧ(Űk=!ÊÉ?YÿÙÆOÖa]’‰h,ž”TAzɉN>B’†ÑAHaÙ!FÓ°ÆT¢1ùé<…Õ †*C*ëõ`7J¡é”?ÓFåNú'>ÀfZí¤€}d`­š”ãµXâñrobñ¨HÇ´±¿Y}÷±ò 1Êr­BÿÈþ‹»ÖîZ× × ô°TD• L ag(ô ò}4ü4ìbÄõá!ÈÉqM@¨8ÈŸ{fÞ[sOë|š ’rµR!Mè£zúò¶+›¯l‹k㺘fpË•â[iœÉ‚ˆøn-çÕË7×ÖN›¶‚½² ;ì$ ~ÿ\Ј3…óŽÅ*Ÿ&@?ó^ý7豯Ѓhì?´Çøa_Ècd\–[mÐn$Uüaƒ‡¾)¼Å@­x •M¯j¨ŒèC†WôÌ€S!ûä#ì,vâ¥Nk‚J¨ ©âbHÀX(@SoþÍ…h ™äQÉ6Ä™öÉíñéô^M÷²Íj· 8T"Î/ćyýÄkM­ ­_V9H;H¤’¶vcBKç”íò iã´êwË[Õ»”|©VaÀ¹Å+cá°Ï¤¥{’ÝŒµ^WkªÇÊ©ÅÊ)‹è¢‡]‡÷¹)Ï~÷ÏþyNÞ!¢š¨t€t:•Î }ò-ˆ ­…°#°'ÑÙã§Bq?n3¼R©ÑM:ªVR#«»º‚‡¼]Î@áLªƒªjku“…2ï²4šw‘·uu‡øÖ+ƒ·ÇĘ)„áYæ WŒ WûÔ>a˜_Ÿ¬7¤Œ.“ÛÈ18Ëíh,¯ÑÒÜ ëÉÆŽÖƒ”ìwwt¸÷ãIî@KG#uG@¿Â>å/·ŠyìBélV ÙÈéGÙ)ˆKZ˜ø© šB~S¦²÷Q¿g}<#Kn`g>°[ÙÂ¥Àú!s½€¸ Ú ¾è`‡.7ã4›ñðÔ…[“ñÀñú5ï<<ü5ƒFó×>wŸ.Ü®§"ÑdÿÛ@=XÓÔ®R‹­‹°À×¼0_³’F Ú›¶Ÿé|WH'Ìi-Æ¡öÛÍ)»‰iŠœŠ&™Ž3ŒwoN€c;‹ÜðhÁº:<íh*ßuE ½„2wÝ‚Ý*î/ýoòm¼¥?ý²ä! TÞÿT¶¦Ìû0>c1G¬ ½ˆñéñØ=eL8–ÀðÃC‚šUxu‰?×ó[n¿\Ih"2±AªÔSM/WÕmГZB[9eD‡dt»ºcr5¼84ä0‚øwÄ:– endstream endobj 191 0 obj << /Filter /FlateDecode /Length 271 >> stream xœ]‘Anƒ0E÷œÂ7À$Ô Í&ÝdѪj{°‡ˆEŒåEoß?CÒEo¤Øó™©Oç×sZ6S”5|ñfæ%Å·õ^›‰/Kªšƒ‰Kئ5\Ç\Õ§·1ÿd6ø€çÝßÇ+ן‡c¯šýPX#ßò¸ŒéÂÕ`- óL§øïUÓí'¦ùñiûBе¨Ð@ 4ˆFR êR¬E…¶¤@[Q\ãô*'W¹Žh'Ú“íE'R “(z:í뤯CO§}öeR ,:“ÅOþHе¨P$òšÊK*D^SyIå=)P/Š€^C¢Ê¼žƒ‘ÑÉž37á^ §M7¥›,‰ÿ–™×,§ ¨~"‹ÿendstream endobj 192 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2723 >> stream xœ­VypSuiéëCÐÆB"l¨ˆà±x¬¸®Îr,E(•£Zh ôH“´¹›6wÞ{¿÷òrmÒ$mz¥E*·ˆ¸ DäÐ…EÇÑ]Ç]_º¯3ì/ Œ»îîÌÎÎN&$3¿ïñù~¿ŸÏG€LÈCÁÄò’ò•OΟ??ócVzz^zF>0ý­y¬´LÊ“&xfLú¦ˆ{ò^nÒ”?݃ä ¢9KšeÊ–ZÅ̲¦mµ éÌò†iKí¶Ÿü H‘´iis‹R]£YY¶­vÇÎú†FY…¬FF^A*uH%²©BGž@–"%Èrd²)C^F„°6¤9" .æ¹ò'æÇ&t¬Gg£±B76;6QwWÞ]ïÞ=cÒC·e ‘É·ò­G˜A“*½ Q”~ù£i‘ïÓKŠí a´o”Ú)eÇ7ñIÒŒv³Ø¨Q ÀZQ¡¬RqNÉG\-h³œévÑnÚ…÷qO‡.ÅN|´GóǼI€í‰Ê¶í4o·íÄùëV±†ÐmÛ?†P‚fÆ^ÆK{‡¹õ"gÂÕº±ÏÖt—•.ß\!“PhÇ &ß|1%Ó?ON_á^Îç>L¿RÌO×ñ(_øYØSïÍåîç$Ü$®ˆûÙÊ¿ã§J8„¡Ø2»ž_Àã[½îðÕkýÜfîñSG7½hVÊ‚C,¶"ôw2Å §cÏõäsŒIЇÐá!Bfr)3þ‹q¿µº‘lâÔ@iÚñôI4ÈÜà|z´ zÂT÷ÈöÇø'm*J T›ê4L,ìŽ2a|„“3^Ââ«è¾ãÔJ­CKêð$z‹(P»Ô^¯in‘Öµ{õ†ÎF½‡EÎ] ¦`Ülôð¡¥wCHîlR ¥4@‡%¹ÇPß'ão£&’°d€¡ªS¨ÒÕE7ŽskŽOúfWqbÇ!ó›à"¸8Ô{ ±aGå{7Ç*zù¼6ó9Ì]xöJâȱ݊Õ‰­˜30cV·Ýc/ ¹$lí,pŠÙBÊå`ì@Úí#Q+móØœê@³S °_=TÏç?õƒñT¾Ûúf3³# —•Ö–,ßtô†ÛŒ¤ÙBI”ü4ÊNZU,ÔXœo¸g –ÀÒ­g23-N(S\™ºHxŽcÓãÅŽee|Ñ<@(E×QBn“9ä ~žÈ¡°É[t¬áÔµÁߟñKœagˆc Ô²ZÏ#‹V¯Ií 1!&ˆÎúGgˆ˜  @$ïGá¶¥ øÐA®¬ w¦DZ´Û­.âî&Ò-¡Qóܜ g@›™Aã¡Ýø1îÜh'ÌO‡ø¹F‘ðŒA{û‰&[öžüÜÖgv<ø 7nñìqõ²Ýø^Δy”pwÓ Vuþn$ J?‘|uç{A‚ðlëâÑ~¾ Ê£Œè# <>—oÏå—ö\öu"îF£î-¾ßoµw³·ÚÝ(bÌ.«$ÀÀ…讑 {Î\ˆè{ë$p¥Dü/Q{C„¿Uì3³6&3 7é-­;Jj_ÚYb3Ùàêcj¸©¼i¸Iz ð‚dÒÝÅ0€¥œÀh–ñ8ã"¾Ͷù d.m§íÀŽ…”ÅfvX›7‰ä•M•„°9ìdÜ'bí£†ÑöQÊMº€ÚâäíëN(GÒ&ŠN\™&”q¿Iß[œ<б`'R¯¾f$M¤'”Ž6†hb¥ ŽmfÑbõ#U`>¶èkÍ»ï:ÚÇSÛzðròós.‰µm¶-Ü 0¥µ'ÒåN²1Ü{À½ßs€» Žù -Ÿ.Þ»`K®}·²ª.cÀú»{â­Qέág›6*] &Ù ^W]1½v[÷ád îÂß:û»ø%63·)}JOH nææÖ…úXݧü="§Ãe6x¤©^®ÓšF³É¸±²jË`oqËPØ¿Îô%#Üøü`(Ô“õQiP%bí¬†Ð*Ì­«É¼Ñ •ÀÓéúuºþÆ#"Òeõ@LéBW(Õ‰¼ÿþ¡#W³ã^–‘Ç`lVjõz\£Ö¨Õj‡ßê·ùj>†Ïìnø,’ôwäF‘'û v3–7(àæŒ-„í@ˆqjÐ!Ï1âsㄽŽPÕÚÄJ²(3}¤Ýh€¹9Q‡š°’}¹vmMͦ굆R€-@[Õt XÚ‰™Þå Óá@@¦œ)Ð :©p¦ÎÉ·ò}ò¬.ÍL ãÜ£Ùì9.6&È×/Žï ZÎfë·W^˜VçDÉ%eçjö°«’ïåôv¦“‰°bxÑòÀº!þ‘7ø9)~®Ó”acÑàI¸ýŸs]çæ~ÁÍ‹ÄvÇáWÄzXÓ ÏŠl% dë3ã‘ù¹`2˜1+Üž\ÆvÐFµAqhbûýN?ãÃo¤CžƒL B†zM;}™KYˆª‰6=füoúkXë¼ÃCñôã έŒ^ÉO?›ë¹íï#êpJïÐz5?EäPÚU¤ž¤zE .4/á'ó÷ñ3f^~þ»GzGFpÓJn^†”´¨Lî€bÊxð¯ÓGQ&ÊFÅüTþ¯Å ¢¹Ž_'²T.Z³`¥5§Ï}¹[ËÍÙ;¤ÙØ‹3¢wÀ©€ÄÌ5ƹE Z]túç‚$ù-w8£¹SÛâ§~*æ(tò‚/^ø3¸ Î}ãcÚÉ@ÁÀÜ6—Í@hV‰ìÕÕ[ׂrP5b¹ŽõÐ{¢n1}=x¬Pxí«—Þ[ ~k·Söé<ί/öï îïêI½Ù¿`CË*qS¡‚T HÉ?Xλ⾃Y_?ÑÐæ0@žxa\J¶’:G«Ø*—Q-TƼhÚ²SË8Cÿ£yÑüh^ýÔ¼x†¹*›`»¡y¹´¶§tUIõ†h^~ËeÝKñþ\å˸ûþ¢ŽCÕ)ùašp˜Ü‘ä)…2ñŒWCnÇë`â}0± Uµ0‰lVü8÷YNuÄjN],%šå SÄT½©ÁÞa‰p¸W—K„ûH‚"¦ß¡ª±GÕnV¶Ú—"oÃöü¸Î!£¥;lb©º,AÉ2d£B嬫ž•ï]g’uÇÂâH°# Ä×ïz-YÅZX3°`£Aš¥¨JøJ ‹×ûÅš`{)ö ÷´ˆd!Õ@ÞxÜ®Á„èí7ÞÞ{:ÐÍ«k·”l¢¤ 4:ë2YCAs{ㆦ&5^ÛTÛôºôjºYô#ˆc+Ô~˜óßͽ:7wÅm~ÈÎ=á¢áÿ&}Þ¡3 ÉgѬ ‰?¡ü3¼‘OY댦—€8Óœ*I3ªhfº=ÙÀý ~Ò7ziHñt±Qà¨pA›(Úî‚æ˜(xw„VÔ^j.5–Z7‹¬;Úl+²ðþßc)QÓŽø¿,ååÕ‰R‰ ]c_Y­| 2 øxm÷zs ËAImã*,ç³%wå~=i"‚üÛ½Sendstream endobj 193 0 obj << /Filter /FlateDecode /Length 179 >> stream xœ]1à EwNÁ ´cÄ’.ZUm/@ÀD DÈÐÛ×8I‡ßÒƒoû»ÆËCåݽ$û„Ê}ˆ®À’ÖbO0‡È¤â.غUû6™uÃÕä×'Gøoæ ÝC AOrk²ÉÁ’…bâ ¬B÷ÞkÑý}·†ÉïÎ:›„ÀŠ(5 Q6Tš„¨:MBt4üÓö´ÄG@n×R V:‹b·´!Âïòœrëâ(ö¼_\Äendstream endobj 194 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 605 >> stream xœ_HSa‡¿ogn§XþYí"ÊVˆ;chiÈÀ œ”u‘k¶™óŒ³³Â¨껺‚±’©[.$¶¨(CòÂ(‘2“@‚D$„n’ïœNB[7ï{ñûñ¼/FzÂ9]­ŽÂpL9„•Ã:¥œYQ+‹ÀDI?ªl˜IWñ”öRDaÜÓs ÁÑïõIì5îçÉÃÎÚkk•l5ÇÕ²^ô{ÜݬË-ùø€[Ê/]l›àñóRÏÿÈŸ$ël6Y–«ÜP• z -•¬ì—|l+âÅ›|Û$tKl‹;À³˪œB –x‘u ¼ØÒW×sv„ö#+*CfT”? éQ=Ãå¸S*V–à;ÙÞÙÊá;d`›Rîó¿a(Üw1Ú70ؽ@k͆ß[ÙÌ»™Õ…Oïá'MöYÕh¥ŽÛÕ—·ï§?ÌÝÊ^±Nþ81ô¯¹úºWÓ /£ jý}ý0 ÂA…3ÿÁ„ž%ggñ²*Qê=²aILB:‚(³{Ò AHœ„£žÐ‚–x Ri "ÌîqCD1”‚8“·•3*Ás›$»I)q²b¢‹®µ/w½v=?W€öÈé-I"$$¹ÂhmÑÒ-Sòzï¬Ã*|¯‰ùñµÅñðæ:Ç.=¸sB 4Bc´YæÎ…=@“SÒ¹WILF¿Qä‡Zj™Ž Ö ÍÜÈu ½Æ,Á|†þ;jì„h…•÷Æb¤äîS˜bšyc:šãÂ[÷êIÓ„þæòendstream endobj 195 0 obj << /Filter /FlateDecode /Length 5765 >> stream xœÍ\[oäÈu67=ÉC؇ÎS¨xD³îU»1‚õ"N6pâØ;ÌÕM˜énjIöʳ¿>çRÅ›ØRk‚y˜&Y×SçòKéÛM‘‹MÿâÿÛÃU±ùpõí• ·›øßö°ùåÛ«ŸÿN˜Mȃ•vóöîŠ{ˆ:÷…Ü¸ÂæB†ÍÛÃÕ»¬9µ×7ÊØ<›U¼¾)òÂmeöp}#]^ÎeU[ªã5=•õùõ ´‡ý[ƒ¯ƒW"{L9ø‰/ƒóY…/¡Ÿ°Ù¶9võ®æÓøæZzGº¬¯øUû¬¹Ãß‚¾ÏzÒâ¡lûz{Ú—í8 žîWâ¸ß¤¯@þCÓ¦ùÚj[wHâb„´Ù_ò^¼1Y5ž¾UÃéãy+ɬ¶kð7ÄguÿBv{êÇÌQ±Iyì&”jãXuÝñïàá€÷“—L§³Ù»ÔÓeî î³·÷u7y(‡Q>Å·Jg]s:îè‰ée'ÐïižÏZqo#“n¦“דO§º¯¿£);£×(-L ‹ôQGDö¾/ºô)Ê âì»f¿oýñX?Ć…Ÿ+òZ €¶| ¼¨"ÚŒ¥çì¾Úã<øà”W*#Uñ¾¾KS"_!1´E¹OoAt»qf#Ç%† 7·ñåѤ¯*ûP«$l2R·YÛaj3õ}EÔ¿H/0¯èÙë~ý UÇ4nÈŠqÊ22±¨ˆ|û‰ºY˜ øë<RSá©ÃPJ°ÆŽÃ¹ÖÚYota¬!äÃþ_¢*° ÷Cü.-r-0Ñýõ [ÍJSÒ±Q̈eZÓo¤ö|D<²Ïº’µ¢#YÒ¶å# äkÖ·ŠTùôê? Î,¨ï¡žæ“Dš8qÔüô!ë[Ð¥5…#]êXEÏi;>Öû}Ú(-?i»oº$† ©£"ñ‰?q$'âéà^âQƒø×ÿ¶%˜ˆ’ºÎ%fÛï?ÅovTOÈL/æ­KìŸîÏ'Õ΄ у6m©r+´×›·»«L_¿ýŸ%Èð¹w^¨Íͤá»ì—§ÉvU÷P÷ÕÈÀq±þ o * Wu#¥ÉMá77¤Là XFî`àcØ‘Ä{ü©BšÔ¤µ/anëX‘ã(@Àï«¶AÑr}8©a"êøPÝÏ Šº'&IãÂŽ#åAµïNÛj¿«{0…´¿¨®øá³¶ëÇ¥=ìËm5 çWûªl÷ÌÀ4 2p(Èz~ÏšÓ[Ô¼´_ß‘OüjFVgˆ¶¤ Ä` h“*’zŠ‹%8æ"häxc£²+'3Õ‡Q©<ŒGÕt] нÊþ¹ŸÌ*æ mLI¿¾;ƒw<O8Á`Ú° RöMÚ¡[á½4J9±cõ¡LæU‹I£ËT™¦´EÛÂk0s2Gý——ÌÏáxÚÎÊ!N'‰æ®r%W’f}äðˆ.‰«ãû9\æ·xv&€ †³½Ÿ-a[žº¡§]ÈX|¹0Ô}“Λ ËçéÉ^©Í9(ï#éLü6Êb!€Yv§‘ѶäYà˜‚ÌM<žád·Ún ÐŠñöö‰ñvÌh¼M·D AHÂ\ï¯ÙOÐú›&µ—g$•¾Z›7U´åà“T"j@T {.N/®nñÆÃs´Èqå$÷UL žb:z¹ïšURA“M®0 Àj†`_ÒdÁÓHï‡@²w(?2d…iÓh°žCÙ3œ£.“shÚ®zÃþDT«Ÿb/Å0Á’Yz êÏ#A•Õã|ä‹ðl øÇA‘GÂÍÑl€ÕhÆVƒÛ1òG Ÿz+ЍÈ5(i ‡£3ôÙ¯ÈÕó‘Í©#@ ´È`V°ÿ}yÜÒV­'ó„_5’Ó³»»òÍà7õØ•í.6#ßÊ}ZÙnùûTsrs˜n¦^º¡×{$z7q/ŒHtxª …«BäãhdJ¢8ÙcÜš½-pcK‚$€üõÄYƦ6¹Êí¶iwìŽx¶ÌÄYlâÿüv0zl»{ §–•¸^¨þâý5~ò#\¥vð0‰SìOUZ6ª°Îj);ÁP2hO›‚PV±Š² ÆÀ-ÞûÇeT?m‡Ÿ÷4i:åeO‚»3ÇjòžÝžƒˆÍQ°ËctJƒyº+öVñ0ÈûòÓÌ œJ«þ ž‡ y»B—;í}lœÉ"h´ÄÒ¤á¿#ª&,=ò´‘¦ŽqŒuýIM {"´sè6µêÀa‰X…Y0æHU›‹BEb£çŒËœ7”b'ã©Iþ‰k¨8Ì“d|œ¶bþÅ +hX{bï1°€R @ŸŠÚd¿ª?œˆm°¨Uv†è ò¹à‘·D94”Àq8tÏ›ÁNíO•Ó× ò ™dv é7bð†Ç!å81¸ÍG|ô6žñdš¬~G·†{#&ÞÁ¡L#»©(j|ššèƒözøÙÅBŒøð&tv,[-/´°OsH; ]'rïYðÖ‰J à¼$\Ö…ñÑ®€å%St`|´E_ AUî¯"­¼bç0_d0v$p|N{Cºt¾¿ªë“g(¦êJÃÌ ŒÈ)Õ@>ḱõ7Òä6Xî´ÑAr} PTxî³t0\Îlf]@ÜŒˆAÜ錤-D„GrTsÇq©C!m’Êëã“Q«kEqŒ!º£5ñr P®HXr#}þsIŸy^âfuj|Wî÷]šd~MZ˜/,ò)‚—‚˜UE-ÆÑÞ1€Lª;Ž ü†çÎM¬Ö»þ~m+7Mb Fþ)í ÎÏ®ŸŸ©¶Q•¿aU>Sö6WF‰Dïp8V[È5~Cãgüåü¦Šü&ô~›Ù)›v‘ßR—Ÿ<»&Qä´XZÓOÑçZ€úúÅš…¹sNé¹q› ¸0n_ ¢:Â1>¿_8çíËûµ/í÷g?Ú~ßeòKÓ?ã—™é'IOÁ܈-n€[Hèb@ǘ‹4œ2,ïGãgXx;YE ­þ›ÛÁ-šÛ—æÔ¥h ,×eŠ!¿D!ÊûŠ‘ÝMp¹±V/ä…ÛC“ƒ°±%· ¶t=`×U Ô¨K¤ŸPþRÉcˆ„ëða¾v:T 9@‚µì²Xð‰»þØHp< _S„¿\µ SƒB^¬÷‚³r©÷‚{b‡š´nŸŽ?p:nj ymë*}IÅüÃqIl‹>Sm5€x-ÝòÈNí¡ä|gCÐ@šh"è5Ðâñûái·À<]ð6@ …mÁfÇO!Fq¸åTRpe‚NØZp.Ù-N%ÑÄ # ;L^ÆÑ kwäíȮ͢^<½çH3vEW¦÷ˆ?1Õ:xð‘°0*º ?)ÒÞ'(š¼!•Ü‹ñ‘Òä=Þ×ÛAÚîšÎ³Æ #Æ€‡1Ä|{3ÐýÄòbŒmТwØÚÂY‡ÛÐÊ mßg$”°~±2°Ê½ÓV¼ÂÉ:ìÜ%:1(®Í¬ º˜³5ÍDh¶¦ìó£-A6ú l{YŒ2òÇ!  Wý*S5%ËÐÉ‚‰™rù ´Dl 8$£‘aßßÃPFQ‚Œ‹Ïlb?0=d ‹PBN´¬ ÎÄ.fšÁMK;(Ù¿¡©€;“áœmùQŽà¥xÑ0ƒÝÜ•K­¸Ç¿‡j )~úܹ€&6Æé³ªxî¦ ´ûÃ%‘^XµjÙÁ¢89CnSÃþ§!7¤Ô žÂ¹Y/½˜{ Oâ ³5á¯Â˜)’ñ¹ `‰Ï!ï„x%r¹·é7k›Ur¾×Ÿýh{£¶¥*úÇ·W¿ÅÙÌæCw…†bóxUlþé œ.˜a£@ çfs¸R _쯾9[»4gàX»¤$O Œ“K˜j—H:ø>š8¼-óÄõJÁJhñŸ?‹8mÈAÐ\<Õ÷™ƒíÁÉÂÉ}‰HJEŸñëT”|B*ÜyRÂp r.¹ÿp¨ç ©“% ’°(ŽˆÙ›ÙÇ'Q6jQ¶¹ þ ÷ ! îUî8hH85g¼²ö*#ždªè§x’+¥D (¦"§ó;‚üïËw„ÎÃ|G’û7¯O8g`ZÙ"í ] i,-»íªwŒ –b†1 ,ΚK­) µ&®¯¶¦Þ¾x’ç­iêÂ’}nM t´ƒ%ù{´¼dC?"„…©¤ø‘À¤•ó~~ß°‰•}_®ÞeT‡ÆÞ"EF™K0RÜöý›5>æb$ú¹#Û0Ÿ¯Ù2U¿$ßTæc¸6h‹În[¦T–¹pb˜ì¡½6¨MÁhn¯ ÕaÔSì* ¸ö°~7y!²»ÓqKáä5¯øJ‰¤·ß°ÎÖaþyœ¯¹ëÉ3ÃÀ-hÏÛÛ¶š¹a%åƒ8®ë°ä`eCAcçp.´ô•vw«‹¸ƒ`dd‹ÊŒç×Ç1„pøæ„‚¯é˳U±ÜjLïsÌÚf“ t;†¦¹¯rÙߎ.&¨ì7c]V*f)Ôgp_³7ÔÓ¥„ZÁ&ºÎ…ÍÝÒsžf\?F'ÎðBûûÑ_œ¤—ww±.±Ê:*ŠT2Fü»¾*w)ic–ô{Z¸RW1h–žOj|5©Dæ‚ÔÈáZ»P\¶HvÊ¥²Eƒ‰Pªôyä(±áÌ}ßÎ|б.x >çÀû-‹¥íjNùXs6¬‘j»ö”ʤYσ˜x†Mj‰Iß›çr»ê³œì¡:îbÉå¥Ð™Ëve_¦ä”}ÚVáGÕ¦:û=É^"í³Û2…Ey붪vãØ±‰BË\ŠÔÕ‡z+bnü)ë(Ì?Ý-ˆæ´ì’ÙÅ?£‹ŠAÃÕ~»z°“`Ü›v´²ÑìóPÊÛ¶ê«dãa42Ô|t5©Âص¹íªvašù ÚJÊñ*á5"úëaéã›%²¤†/Ù`ÅZ0õ¬Ûë0å2ÏûQT<þ4uAØ[,ã»ÏÛÐYª?uÁšçävÍ´°:ª†§;Z";=Œb‰4‡hÎÔIâW‹W‡†ZÃ]µSÑy¥¹8Å©ÖMÇ^tžÌߌ‡ŠúèÕEUî˜_µr™f'Ù”\êý”ïøK*’¡ALøVFy.X™BSÜžVEöØr±u™v?W"…×l”½¤¹É•x]$qí?=Ö¹0ÂØäé.5ß"6:ïutµáܾùp3ÔÑ÷õ}3Â¥¡~iêoæ­ÞHŒ¯Ž¾¨H¾(ÎÀíLmãÉy(ÛòPõU»ªøá§ð?9}1–ÊÌòÙtþ4µ‹ÆÁò%…ÏŽàÔ3²’NA_ºúl=B(_ر~c¹¸Yº„°Oaè&‰‘1«ñgÂ-DáÖHç†27³>“e¥"Ÿ%+!œ–—ÚJãÇ*yàu$ML÷®º:”œêï©Òò ޱö¸Cɥ˵mÚ¶ê’ç º¨9îÆ>€Ù—±àó]Î%‰ãmAÅÙÚDZXP<©ìŽå|õ7zÑÓ&]ÊábíòRæ– (~–Ó /:À§¡™'ßcÕ&ýKŒ%)Gøùõ eiE徟ÿSlþ÷9—˜;8Þ5ÆËF ÿÒaVÄþ¢ë ª)w¹ýEçï…dæBL½ôA\`à{MâL„×Ú]‡AìY—71®)ôœ¦Ç™ÿØäExÏÁsPÙINƒÎÇ\vË«âÕfY¨¹Dì-2}ÁIÂJ^ì™+LÒà’c€qZ5Aýܨº¹) Ö÷Ãàí¬;D¾h{gœAàI õàØKÉÂܱÇñ¸Ò{jƒÊ½òÕtǬþY­P­añYuЍIà€„\Pä ½Îú…Èa˜÷úÿ¯þâÙì&ð0øúg-äKø`RA0¯J£A¾œ.Øôdä,RÊSºþÈ €c`ØÍá r7¾y.Å9÷†cНºYRÓPŠóaV³ÀS{h ƒvÐÐSËáͳSÛµ©=V¸Ã§¦ìêË)îì¯W :}î¬Ð ·˜­æYÚÅîñð`LìQ€&º$"àcÁœŒ …]œŒäkm@ƱzsϸÚ/^M18¢PÆæ‚*gÉæ?Q¦tAz`ï\Œä¹üþŸ„g&—Œ…(ÀÑLÖ%ŒaŒD’¿bfÝsfŽH`å8L5zû9ØNõzxól}€Û—:7;#¡LN&£ ÖÑ9ÅkÊàM¡Ã[ˆäx€vËå„(˜zá?Ð¥” -4°=J“ÿ·Q«s; 9:Sþ[`­F˜=,‘ÓíÖmÏ·$—¸n¯Ñ“øPi§øxs‚§lÑÎR;«³Š ‹X Ïý•âÒ?Á‘c¼tÂî¥É%ÛÍM{Ô)•dè‚] ÀûOàÏž4jaC¢ëR(ª©5”†òœÿ7ØM0L*ð½ñf.ÿ) OÖ/¡Š IÀuý}súpçƒ7T8 F=ÀdÐy!1\M—• Ò+Î ®=ÿá \ŽÃrØçlGa6£‹Ÿ\ B îÓ=°êŒ©†Q„CÕÖ[¢ÒCPÑ'™ÊE4·C-r•ò>ûΔA¶ûD±}ÎVNÓÌFSv&Òšo{ÐZ´h-Rý2_|®µæ«iG  À"w‡ÜcÄÏ B÷Û«ÿÚ{¡yendstream endobj 196 0 obj << /Filter /FlateDecode /Length 4726 >> stream xœÍ\I$Çu–­[ë"À€us–­Jgìx°`Ñ’A–ÙptÈ©Êî)±–f-޼üvïEDnUÝ=‹)àteÆú–ï­©ïfU)fý/ý»ÜÞ|w#L¥S3+l(½I§ñ¯›<®L˜šÙ׳ÝÍ?ý§³Ùßüžþªf÷Çü‡¦ó‚³ôÏr;ûõ-F„jæË`­žÝÞÝÄÍÄLH]úJÎL¥2rv»½ù¦øb}>4ó…V®¬d!>›/¬¶e¾øêTïVõaE/=^Êb·?lë ýÆo‹‡Ãþuýz½YŸæ#*áŠwi©Ê«fw¤7® .„þ›WÅéÍzÉs„/¾í^¼ÞÔxðØ©øœ– Åò|øž†çÍ«y9_8-iXñõ›õ¦™ÿñößèÚ²wm%ªÒy«ß®pÑÿ˜/ªRg”.¯ úå*©ƒLSuŸbªÔOÒÔ?Ðà¬1ab!KTûy·ÍÄ Ê=[àlÌå)?Ð[UÊÛ4ÅÎ hæ,M1¥”ª23>‚q•Š“MUUY=q&] #„› 缚Ï8jiƒ sÊ K žW´”ǃOKõ¥GÒÙ´Ë÷ûå|¡´cùX— ø !¶UeÀÑùB”hpˆá ^_Z§}é‹C³lÖßÇQZwkØ¢ÎBq€Hî·é7’…Eç-‹Y°²x‹ÑNÍ¡ØüP/O›wÝïïÎ$ÈŠEŽ1Á,i}éÁ¤ªÇ&èN%Í6Yc/°ÉŠ &hëJi+aGlZÓÁŠ ¯.zg‚D‰RÐJ„ÄÉ DÁ,ö_¶Œûû ¾I Ó€oW!¶RbÌ7ùbGº+…ClÔ]EÇ-îæ‘;hSìÏ E’áì´Þ6Çø ØÔ‡û¦} :Ô»üNwûCZOd¶i¢—»ª4iH¢Í”} ¸²ÑÏ—j)§¥º p(¤œ Y ÊùyŠ2j˜03vÉË/$àMk˜#“©*n“KîoŸE…¤Í%G‚ÀRIòX„´HôVŽ1.“È)ÂìúD©S…`¿ÂJÉ =÷¼µ(šNJß²ƒ§KDÜ NAÖ§´ iëúÔnVñ¥è]XÂ×Ѿ5¨wY4“o¡lo,¸×Èä±Ñm Ð.1±°‚Û¨­pUÛM®lCoå_±?„óg¬¬K­„lÿâÚ‘-Ñl¬:Ÿcpäó]é\èÔ´îq™PaN«»ŽiYaŽ0âdÔ!ˆ,’|vLƒò ó¦#âÚoëÓ©ûáM±q¢ÍèoX‚3~ŒEïλåi½§À,²…d|¤"¾›bUŸjrzDÅÎç¯Ï§4.ˆvA2¥¬ðyÒÃa½­ïÒ[ÝZ“6¹5S•ئ:÷ç-È, È‚0µåß÷|*¿¥Ši8:•æ‡çc“Ö„~å³H>K X0â °p·Ê§PPÇÎnúyÚç¿MñwqœŠ/Þ4ݨ‘)šc^ß’/SŸòX™Î«Šýk:, á\ß3i ³MUÁ/onÿá›èa)&Ôªƒ’zÎÖ+žêÖØC†’˜T0G5OŒ–é™Zчì`ùè[µƒ{’3ÇZ«Þ–›¼šŒü ïK«/"Ë@þFÈþë‹ß6‡|T2ƒdÚ•5ÚEÏäOgèÂÄþx-½}6fš>}ZÌôüR7™Rh?RYéµxÐT~0XY#ۈ󯮃 ½—ŸXcÝ '8§azÎnäkà?_°d¥é…Ɉgý Ê™‰G‹Cê5S±ó"tpbR”Ø7–Zr¤6E/¢ÎžúHDA°Q´võ0B‹ÑaØá3@*á'm•’Í`JMR ÑŒ¦·j:yŠ‚ñ3i° Ëþíòxv(– £Þqs\Ãg_VÆvçÍiÝ=µ.ÁÏ×År€)Éèh€K»ÕzwŸÖýlͺ9æHR¶”òùN„–šN*"XêtÞ_PFfÏ‘`E¾Öëõ®iÇ=Ê %å‰V'ËŽ ˜ï)ƒŸû6¨ˆkŠ"grê•èOnÇò‰ûè„|š_v|]9¼í?^õç?ä¶…?Žà¤À59÷x¬%ƒÈ0ÁzU†Gy„Ô˜ìÒã)}E|R€/’ïÅ,ü‹Î?ÅП|2†~zñ}|Û±øJ/~Lù`2®iàY@ùçcP¬ï!ðN„!.>tË)ØÒàš MI%­£2*Á‰ñpmK§+ÝAìùÍ-W~À‘ûã ùʳ·7Õì_oD€*y7Ãå»ÙöF YÑ_ùÉææ«‹e¢Ôµe"D×––ôd¼M,=pYç œ"âã=Jh ,Á¸ÞÞéÉÕ½íÄÞX’|¡™rà™Ó´÷ÓâYüm1â‰ð|g0íðÃOšXœ¡hBíÈcǪ4.—½ w£]°.:Í/„W°ï¬ð¢WA …øQØöÉË JÒ’`·ZÙR8 ßÒ Èö}„ØçŒ)iÂ}¿ŠcY³Û)/²Ê4 ÎO—¥Ü¼.…Е}ž^Vf”n ¡n]É”×6–·Ð,dÀ)ªã¨RkÏL+þ»è´¼¦R'‚_»= ˜‚rvhnî.JÏÐÍÌÒ ²P ÊH¬HÒó_îäU‹pk¸!¢÷ß±‰ª†þOÒgÕãΘãd0Äì°2ý¾X(7¦¼éª ^s²æû¹¥°윧ç>ØßOÄv}'WÑ~h³¥¢rÍÓÏTT䢉€Îe_rT:iÄðCq1N ÀETì³þõUô”ð&ì€Âêv(ÄJ8{ÀNÑgiäû;–,5-ùØ%0×µ¸®„F¬«{¸Þný¾¸Ž%K’ôƒëâÿÖ °¦a]i[„¬+*‚ªsÅÆ…Õ¡oOåA.y§  N)™‚:b€O êìO‚ºªŽZŒ¥Z¨T±Ô€µ.aº2Töÿ˜û,ù H×àèÒ¿j–{J*1^»ðÁ¸%µ}>®{eǸ.¯â:™LÖÇõŸ{Ó@ƒÉãé]Äȟנ°$[8õVüo¼þö ¸˜7[ôF~S|y5œ¥˜@t™Ü!Æ‹2ÕOæÀûuÃ'±ÈŒžJ7°!ŒÀ(«A<'X“YàþŽƒûOîø¥®Ñ÷³1ɆÙ8BªÊ¡ù«"`mÅBÅJ%ÎJ˜‰$·œ’å>ÑÖz¨Å ­õ3¬õÄÁnÐE38­§F w쇥ƒÈ4xÛÖš¨Œƒ­bóN?MKO©åÊÇm½iÏcc§Cþqê_«ù.x…¶²8×±6KkSµ–sÕ6›w\ÇÇ9©žáyÚ ¶Ïú<œ73ª¯šÝž+ûÂ¥b—ª„ãþ€é,7Uäm©;Òz2åú£iýPv@¦^+"7ï€FÇÓ¡9¦“SŸCÌÀó=À.ʼnXBÈ÷c²©Œ[9WœÖ±ÒÎo­I…G™°YKå‰f³~³g^W•b%S+êa“±Üø`ÆüôiŠçî#Û•ìs;Òºý;t] ÂJ¹átÛÆdS¹1µ+Å‚Xûnø¥qÅj¿ãž*¦”¡²Wšg{{ûþÞ¶Xî×»EODOûã±Û.u_媨_gºì[u‰*Ö@A~9U6Ô‹´‰Ä êðÏØŠ@wÄ+îBàg K?u`Âwm q¾å¶~›[j4· Øâ¡>ÔÛ2Y †ù—~\Ö|n'‚öUW¬'Ç~ÔDOb¹Áb²,Je_`Jºœkrƒ³z"8üÂÞ”K¹k¬Ò"ôsNâ+3:Ê#óôQîˆlà‡hv`û-Çf»^î7ûmbI¶[(GÔÖCYR6pS×{!‚ÇÓz[ŸšIîSd¨žÇ}DDýÞž×í¾ïÒa´¥úb¬¸¥:óñ¼Œ…>i‹7ùDÜž“O ³f°…ˆE¹É3Vg¼tDÓ÷Q¹ wM•VÀã‰3~S¢rSKN¯Àò‹Áœ©Ü íŠmýC¯'©Xo×fÿ€z刾©—†»E¡%µÆ4?ôpåPœ5˜^“aeï Œ€Á}b´·'iËÆ·ˆÆ,þñÄÔ³è+ý\ûÛÛ)øÃ¦nVú]YA’õl|vûô7Pà°ß´'ŠÃùMKA¾9Þ¶ w÷yd*Ç*¢Ôó×Z#¸M¦µFÒ+r4õ!¿;õöoOi¢™¢_EŠ—¬UÊ·­ÃKóëeÍí¹Ô䣷#¢µº«—§ý›ÅtB@~«"v‹hÊ Šl©ÅŒŽ5òžÒ¨]s_çN`-tÿU²æFªüª¿_7íQwÙ/$ÈíÜÅWûüʰ‹–k籡Ur¨ÖqKZú>ƒØ•Ü4âWÄk&†Õ|RްHÆ6̈E2vFFÄÀ¢ûÝžÛˆxYüæî2Mª•ûEóÈüw(ÞFÝ”Š%«÷"~ÍÂæŸ¶VÍ{Å·o¸[0n¬íŽº?|;¸-·Xð(Ý>þƒÏ ×ÀµÞE4†å=ïÏÇ4¿r,ƒgZ%Ǩ]Ö5Èv¤Kró›,~Ûì–ÍÔ©š´<ÓäÄŸ©G,þÐOºj=ï#5X¹¹z˜Qó¥'è[Ù¬=ßôb‡ÀdDêñO%\¦#Òg%Œ~uµÌÿþée7âb@ÚNyYõT„)oábù”wz¸#7?:êˆ ±ÄØ¥!¤!€Yç˜2ëW¬(A«ázx C@´Åp'KÛ=yqÖ™Ú/ CâõŬ³Òñ¡q!Ò`ëøäåÅ2Må ,é(k-?V51cŸ²š8QN.ÌD¨té%ÈyM<ÉO^N §Hj@aªD¸©šøÁ]=?Râ™ÓÎÒ_^KôÏ* _*é.Õ‡¨f î4U!š.¥|MÅ’¥à%¯kªÒRÃ{ïij·õ{j*}ï¢iÉ£©ÐÐjªsêãi*â[šž¢*hF©Äû+*EÖü¥èé'î^ú4õ¡¡¢^É(NiõzΖÂ1™s¦ ¿¨rÒ–ûÜàApí@'Giøe‘ì+¶ÉFÓ~×tÌ¿ä0xÎqÑí½*铿L'Ó…Š¢ß4x½;žššJ3Fr¾s7•Á„ó‡°ÁG³èñYmúFfyÊ  'NP'üÙicÙ<%ÒñøG‰Œyþ¦í¼¥LŠÉÙ/æž2:ôÝ5ËDÚ+!IZØSw†é?Ëå“ &^?ñ’®Ê_b=óÁÂ`É}¨ ˜Ž=¶×z(*þøýÍÿ¥ÂÛpendstream endobj 197 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 747 >> stream xœ[HSqÆÿÇÍq2™-‚êìPJ=Œ5¥%’XdK,»L—ºôä–ÛÎ<žÝLݦ¢æoó’›nÓyšºÈX tCZd=„ôäCBy!#ˆzˆÿ‘óÒôåãû¾ï‡q À0L¢ÖhŠòv\ã¦ðÇDPÐóKÛºT˜.‚ébß¶ë²ïGæ T»ˆ1¬ÑÝ;¨¦­.ÆXk`Iꬪ:)Ùdv^^®‚ÌQ©òÈ‹fŠ1Vë-¤FÏ(³žMyƒ®6R¬k·rÞÀ²ÖüÓ§‡RonPÒLí…é0²ò:Õ@1vª†¼L[XòšÞL‘»G•»ª¦ÍVK1¤†®¡K ebõ±ó~¾-(X7HM1XÄnb6ìß%åW=\`i×6»…V~ŠxJv¿ÝS›p§¿‘›ŽNͽÒFtņ[v7á|[É•Cœ,Ö·úº|C½ý=òØïåXâ3OœºÚ*áLK¹¼ÍdÖ”B´û¼Ñþp>Á9çˆÓRÏËŽù¥go¢‚+úØ…8С©þ.Ÿ×ÓÝÙÔ!·©K´·!n±ßLúßÅå}Á(=¾ÇÏgß- u¼¸”ïôÜØOÇPæÆ‡%’ï·D|Ÿ)Û”„‡apÔ òs’‘Oþo£‰¡Sþ0€‘GãnG§ÅàLÈŠNOÆ_VrºŠŠº{vÂVÕ^cöâj‰Ó ]-#0(ÿ"¡…,ƒ ³p—>,(‡MÐ1àäú¸ 8»Kd¥mÔÝW ó_£üïc„Å!Ø:"t'!›a9“‰e,–‹E¸BÊ—»¹mŶ°ü+ögE„&ø+2AÂ"! ‡qAôµx ákHŽÿ¨L˜â„aáê*|‰Fçcawóáo›q > MN¶ôê†åe#.Ø ñ“M—ê´O c¥rƒì²§"ÉXpOPäT•No~žC…ˆˆR"MœNßÀ´¤Jýendstream endobj 198 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 419 >> stream xœcd`ab`ddduö 21T~H3þaú!ËÜÝýcÝO]Önæn–I?ô„¾§ ~Oäÿ+ÀÀÌÈXYÛãœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© Wêçü܂ҒÔ"ßü”Ô¢<&#Ca!ãXÊýèàûñ½ûé÷çßžlb¼üáûÔÌ?Ö}÷Mcë¬mô¬ihiµê®íæøíÏö]äóÚeON¯Z)¹rå–}Ý—8¾ þf½ò[ê·¨¡™nĶÖiKVÌ]¿°nu¼Ü¢}'6^èæørØÖÊ%ÆÉÕ[þwãïÜÆÆ®®îÉîl +7ý˜¿iû|ÆïK2ÿý“_te{w®œ§·jwz7G ûÒî³ÝÇ—süéeÏê®S‘KHa;Ô³û0Þì>Ô¾—GyS»J÷Œ5ò|r\,æóy8Ù¥²endstream endobj 199 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2084 >> stream xœuUkPSg>!sj)é)—Ú“(ê´^íz¡«Ud¥ˆ¢”ÖQAŠá"  •’¼B¸_ rD„ª ¢«ÖK«õ‚«µµ£ÕŠëZ‹Vßàç:{ÀÝ™þÙyg¾9gæ{Ÿïyž÷9ßPöv”@ p ó}˜j{W`›`g{O¤p$s$×…àh_:9çŠ.¨rÆã)¡@¹uG€29S•¯–®÷ãÃ/³¤³üüæÍÎöõõ“ú+dª„˜è$iH´:^¦ˆVó/régʘ™:s¬eA¼Zü‘OFF†w´"Õ[©Šûxe†4#A/ “¥ÊTé²MÒ@e’Zº2Z!“Žòô]”Šä4µL% Qn’©’(ŠrQglŠO˜ý᜹óý¾ÿÁLßYµšZK-¥–QÁ”˜z›z‡r£<(OŠ£Þ ÆQN” åJ¹òú){*H0N`²s·SÛý"̶ϾÛaÃ÷"W‘\tZt“^K7ÚôN6/m›MlÅùfÁ³Aôlâ ÛdÖJ¦à %mœ?yšžQÝ!í4±ÿQ=païùËÜIåjÚ_¿ÔsØA;|©µØ<­óȧ¼Ã6ž9`²ì`¬tŒÞ›A[!¦èˆ£—œî5Ö$CäòõÛ}H‘ûTlÕUCy´X [8+½Aÿd’ ñuÇ.0+·–k«c@Œ~$Í}Vç×B!˜ÅÄûBÛ¸”UŠ ÛòB·eåçòá`H è·8'€a¤gÜ$΄^èë»®WWÞØQ×Ù‘Þ(×éÁXÀ5]>¿ë80Ã{gNùx~Ä YNµZ0€ÚÃ&ñÇ¥µñpR Ž{|¶Ùµ¸nÈØMü OáìÐÞ¯¯Â÷Ìõ¹ßxy- ]´¹QÓÞn±´ïM­MàZzΘû¹U·l½"?B%“È#ã I•ÁyÀˆÿkÌ3j=·•ÁWÜaÑ®¥3ÈÛŸÄ­ÕL+Œ”têÚša³[eIR)¶DÏ D]†î sâWp'ppº…ㆂGÜŒn·G÷Ö[1çžð)>b3A[SVd*/á:zúšŽóˆwœB”‘’äèhÍ&… }©ƒ; ÄP Œ¹½ºÓšVŸœ®ÌŽñý‡ ñ­'p<¾5ç7"ˆÏŠ‹—à:º#ת³ƒÎWîV—ç›t¼ ]®d½fC^4ŸìȲ 9Å>ÂL6ä|)cg“4ߺßÍ»ö+*†‚u¿DOü™…áÊ“­ÇÌúO ‚ìŒcb³4É)õmm õðås?‘ˆIªDü‚4¡SÐ>Bwû<$œøå*ˆŠW®bp<¹Á†ÁæÃÙ횃Æ[PÇ\°"õ°®Bvq 2‡¾Â5±ò°Û“áµg#Î<£ã)}ØÖ—rÜM|ÛÂYEƒºö1ß^ÚÿÝõ³«—.]µ!FÍ5g±Gš®™3OÌšä·ØGV×”Ä}¥))8ˈï~ºÒÏsâA|D˜G?!õ/ÿ>U§-KªÏ.e¦D±èºpç–#‰°ÒcÁ_#üeÉÙ[WŽ|s©“#²ïþÏͶ‰£ö¡‚·ç¹Í/²i4ÑaT˜z­iw g•ö—A7Ó’iV*5™ŠeQ÷åË•œø6ÇïfzŠ7q#âûÓ‘·çùUè¨ïeû76‹7!ñ¬¦ úaô#>béê8º¿º.Bk )§&Ö´¢˜%›½frN#Ý(îÅ9½‚“#‹…#kùIÕ4€¥1 ¶H^>mIƒ´t3TKl/H[Û l•¼|%Ú–éiPËÝ‹Z^„™¿Ÿâ’ËB±¾(¿”°8nút`(NDO“©°Š™Ri{ö¬"XÍELO äM('¢dÜiâ<´¤ ˜ÒbSéŸ1›‡>Š&~ÊF»5( Egï«ÙÀäòôºB}évÉ©5hG&éâ•B"Èò®A_P:F_˜WQuï Žs}›ž:2NøO 9+¨ÄV!va{–´†Ð£ãiqmt߯šÛB›¯²pA;þÚÆ‹š>…U°(uãôÄ@ÝG°‚ §õ.8<÷FÖ!øÛzîw\(¹7"'—XÔhÐ!ç4܆“¼é?T0ß=_¿ºà\Fƒwåç°V@ÌË M'"ÿÕXHùûsŸU€×…xrDÌ6 U\Èrþ¿ÆÈé8Ç[˜—5t¬>Û›K”‹:‹î¾îAçØšH{ë«öIœ¸qöóÌŽoPÔPT;+endstream endobj 200 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 259 >> stream xœcd`ab`dddsö Ž´±T~H3þaú!ËÜÝý3æûÖnæn–Iß§}OüžÌÿ=A€™‘±ª¾Ï9¿ ²(3=£D!ÆÀÔ H*ZZšë(X*8æ¦e&'æ)ø&–d¤æ&–99 ÁùÉ™©%•`-6%%Vúúåååz‰¹ÅzùEév StÊ3K2‚R‹S‹ÊRSÜòóJüsSÀÎÔ“Îù¹¥%©E ¾ù)©Ey Œ@¼Œ‰‘‘EóGßÏÍß+·1>üÉÆüÓü{¥èÜyÝ ”wWÉÿÙÃVUÞ]V:¯{®<Ÿ‹ù|Nß=Wendstream endobj 201 0 obj << /Filter /FlateDecode /Length 406 >> stream xœ]“ÁnÛ0Dïú ý)™Þ­c/é%‡EÒ)*Ð!² Ø‡ü}gÆN= gKä¼5½{zþù¼Ì×v÷{»”·zm§y·úy¹m¥¶çú>/M×·ã\®ÒZ>†µÙ=ýÖ?_kmñ@îü2|ÔÝkße}ÔÝ_*—±~®C©Û°¼×æ”Rœ¦)šºŒÿ}µ?Þß8OGû¡¤„x x$–P€…8†‰5`îq*“V` °#ö¡{â pf %%¬@TȪ‘Y#£BV̇}()aæP€™è¡h¨`ªa¬a¨`ªa¬aØÆ´•q+Ã6¦­Œ[Ù!àˆ‚¦’Æ’†CL™ÂM“4NÒPߤ`T°s(À36&#£‘a¨¦Ákªi°ÆÁÚ ?êÉ1cלsvȹ‚9— SÐ!çt :ä\‚NA‡œKÐ)ès :r.A§ CÎ%ètȹ±òª}ß)Þ:ÞßïëÚ–Û¶ÕåªK®KÌ»;/õßÿ`½¬|«Eš¿j9Î.endstream endobj 202 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5075 >> stream xœ­XyxSe¾>¡ôäŠÐN$QÌdDvq@dQ°…JÙ–E MÛ¤IšfßsΗ=iš6I›µé-¤² ƒ€¸€88nåÊ—ÞÃ}¼ßiÄëõ™yæ;Ïù§Ës¾óû¾ßû¾¿÷ý8ØÀ‡ÃT¼¨¸`ê3S§²¿<žyt@fdÅ8GÔôsÁ9àÞ‘üåyP:Ž}è¯Ã°ÜüŠòJiõñ¨ÂŠÍ[Ä¢QÅe›DÕ[6ÿâφUTV‰Iª¥‹eòM5%ŠÒÍ[¶n{c{ñŠeå«§?;þ‰‘'Mž: ÖaEØ+Ør¬‡­ÀVb«° Øjl ö¶›MÂÖa °…Ø"ìEì%l1¶›=`…ØRl–=Š ÂcåØƒØPì!l8–‡=ö‡q±ýœ_qn ˆç Ï92P1ðF®8÷ ¾„;†{‹Ø5hî ÎÁsïR0ä÷lxpãÐü¡ôCÒ‡¾öýð¾ü!ùÁüoµêW‡yNÞ'¼¿=<úá®þÀYeÀ†þ0àÙ1WZ/ËÌHæåGÿšË â©vJd¡-ÀJ¾Ætš¶ªÌ…@¿ÑR–›}ß™ §âît®—T9â>‡Ïî%SpJðJÛÅÓ‚'âŠâ€G™‚RÛ”¤¡@W¨+0mä›¶*-KñËe”¸ˆT ø~‡¯®æ»¢®ˆ·,*xáõ5!7~ÃüZk3ä¢j9'1}:óD’sö:G}»äÃÂÌlžq¬ˆ™ÁŒÄâ׎^mq§Ý-dãŸÞ¼ ¹€xçTé3&`¢Ídö0˜åè4à˩̘t^FõÙˆüÞ?£C ãÉVªRgÕÒrþ½ù”šVZÔ£¤Š–Ò„ ϯ1@ÙŒ6?¿÷²3÷Sô—m¹ù½îv#0Ð ¨®v¥H{½3è¨Ýæ;îˆ=LØñè¦5oŽ‚yèÞYð3pE1B é?ÃŒg7á3sy†‰EŒv€ 5­Æ€¾4®£ÉÕ䌤àD~Ýï¯8ˆ?&§¼b¨wìf„Kïo&™y™™ã|v®:™óí.^샆np\lÝuÍt5Ø?Û¾wC¤81Z£¿@x¸ç®ÄÞ>µ«|­[há#m¤M„Ñ¡D@*ä6|jwÔf‹Ky,N :2u•˜€ÑgvÉê$®j@<Ï䈘Ü)p€þ¬†ì2õH%„•»`NYQá–îO´Â:n=R)ìb--TŒ¦6=0 ìf·ÅAs[¤m««žåp^;,”s 7sg¿<˜)£ˆ‰[¸nŽz®fŽUl®Uļ·wžý6 ‡_p +pc‘jÌ\@lWutíAG€|Ö9êè ê‰#°2¿ÆÛ`an–Êè„x]yä]‘‰Õ¶¢1½ÓD¡“åÌkµY$‚é75'Îîi?²O¨yN eÔx…ÈÑæux‘º½ ?ÈÍïvÔ;‚özæI-?ÿ’¹<7‹F±©Ò"&ÕO«ŸÞ:f4T®wй“®8Ù ègÌ³ÇØwúÆ–ù¡ÇõÍíGVEVg‡µBoÓÓFræ=«UDU•š½ª¤Yï°ÛÚŒ‹v8Ò~gÀî#od¨º“õžÛ@€þ‘¥y@–™ÜÎù*3-šÛˆ§AÔ( ®á‡_ / ¿ÉwjšÝÚ.Ǻ÷]Þ÷ÞþË­GZ{Nw;Ÿ™‰£—š›52¥®Ö¨!§1Sk«”•ê ÍO¡§jø”Ïæ>OxÂpÑN@íÑÉ«óE@¼“ˤ4‹u&aùëëv,`í1Û5"Àßì‡#!žòÚ=v/Ø XbRâx÷FNæw¨ÿ<ÙbÛ©´()%)c†Ú¤6©E*(;°!ô²#B†Ç g„ã>šœ8šØÓCê á¤@?^%ÎVŸÝçð’_f¹šX%ÈÂ"Ì|Å£j‘Aª3ùº—æ¬Y ˆE›OðyúÕ~2Z÷ÆÁ~°·º¹Ôaq ³ÅŽ´³Ã0]«Ï%9gnÀ† 9p¬åõÛ‚t” rÿcÙÙIÌã †àÉËãá°¯¹Ø!t¸œnà&¼F·YgÕë­Â+W–.Ä+˺¿K"ß$Ý ^ý.o{´#Õýæîƒ€¸˜(Z%"¥Íj‚!çȲ|`;r;ç;Äôk¸­Ü$²”oaÔ|S©ÚürÿÉš$=ÐÒ: U¸RMžfgSv~ZB@ð}ÿø‚ÿ@_Ùóv¤¥½u7𤮔J§Ðƒ B”Ç„n¼5–L÷®læwË~''eÖ–2kŸ2Ù z@àZZ®#³âÉlJÁÂsŸ´@cK@ž×x²û ,¼ZþΈü»ºÌøÌW·¬õ#ë8}ìâñb_U£œtØ\V`EÚD› £J_c–"kTQ'k–¯Ñ¼R–“áã’÷ŽõÅÞŠîçø ó°{¡ð9|cMÓ®T0X×D¶Êvéw£´ñöå_Þ»Ö¥”q²»9Þ F“heP×ÐÜjŽ+›%¡/+¥r1¸'æŒÞ6µ`Bâü‰SûŽ‘_á¨Þã_XÛh ØÈ|ˆ­Ï<óRÙÓ3QZÖr” p=çŸÝDO»hƒùgYâÇ`…0›—ß ÇÞ—¬2UMiÉÙ÷j ›üÒ§P¸ªdSQ÷ÿ;\ýb6\U8lhø²+uµ|g¬Èüñ忥Ë•®­DáêøMùK•±XJßµ#Ï´riºÛ3iJ$5K­2r £ø™ó.g1S‹KÅ÷‹}^eŽ=(`Æö‹ìpèà•Û$• Œ¨ŠÊSm©]Âüî”; “Ç„ƒ\ ¤}f§ h¥§ŒÙX%eáS:«è£Çâ 6¦ê„Þ:¿ß ²VÉÞ–¹‡†µ#̬E" æj ì L§{öÇÑ…‡›)¹%';J «7šW(íµý£d({ðø¶ÍÎ6ŸÓLáÍŒÇÕFGÎA~á `U±µŠlq4AIî½>\CÕåþä Z„‘÷&ÿ) ÙæŠÙoÈð*ÔÜl¨ }íŽÔ9ëý‚P}c}8\Ó¹>µÆet€‘P¨ ÕBö­5¸ éÌb`ê˜ Ÿº1üaï{Ÿ|Þ?` ЀAƒvQbºÃ ,°cE…Hº­j[嶪k™JþÏÑȨo bONÈÙB ìµJ®p2‹|./³'ÜýeßÎ\p†WX0ô‡œ5 ÿ ·Sƒ¦Ï_ë„=M,!{Êv²ã_ßvü›–ù%¯×°¼Ž#^úRdiáÂ×_e/Mô®i*‹ÀâmÛ—?¿CHÞÆ€¬ï·È¸²gÚ„GA‚ ¨ýZPãTÅ™q-Ìø3λѻÁ·Ñ¡¦%ŠpàÍÀ›ð5ÂÍ™üúd}2˜-ðÍ ÎM NMsJ‘Ówê@øãžèÏøùm§£ç?g]Ü [.3 ·Tä¶Ôn%mHÛ­&›™yãÞl¾ImÔXT¬?¤=l8T1Ä䳯ù(e ‡è:Úû}¸?§ 2ât@P)’)ØìV`:•Ü¢c^bþ›oÖ™´V-ZQ )à8¨>ʧ](Úº7×ÓÒe;‰FÜ€ÓáuFùÙYeÿ$s¼“Yýu\œi絞~÷À»hÚî[¾xòªEËÈçgÿvþ³óªªù*e¦jˆÌ Üï8çç¢PZ¦ZW«3Z´´è€ÎŽÆ  ×Ðj AŠ2¿x꽓ïÿôì«" gü‰iã',œ "ÉÐQWÇ;ZZ;:+ê*…YØÇd™yò¨<ï8 ¸_†á“ë´VˆA9Q“¶=x‹+mD‹IéAø!/¿UdW–?R“¥:ZÓÿWcÙÐÍ(dÿ¥b—…#Π53+ú <¥E­T˜…ÌØ'`$€á÷0Âï&»‘@záuy]AqTßH_WüÜPò1ôÀEp’ÏãcóV~·Çâ6¡ïÐMÿì;j$/NçÀ"ô M!ž]´Æ¯=3³iø `Ö¿È™I̤òJ¾É¤2!­6»Ì>aƒ£) ÀpÔ¨°ÀǶÃÇFß±¸ÑWPî Eÿè³2–Ÿ#*_º¨üñ?4>RVƲÄCj`w“Gá dy`7¾t9ö"™²V˜Øër=“6m•Y‹³øÓ»Î¸×ü(Ù ›Ý®No‡€™ªÍeÁ¿pű@TÐvêb×yÐ Ò†˜ŒUU—“ù7«#Šx{"Ý~¶ ]<}ÌæÙ¥BƒÂVc«QÏBvɡעð¾]b»ÎÌ‘Á·åœo ž yÊy’µK‹­(©ö[{»ÉcsƒFÐÒ í?ÚúpèÒî~MXn÷O×&ykcÐã«g!´Ê€Á³œcðô…¹°¯;›{Yˆ^èrþ¤²¬%ûòœO®Á“Wsà÷}Sx35…%çŒÕ0#3¬ó¶ol+9.;ˆ GNÃÅI8ÓÝåêÝÄm²f¿þ™™Ï3OQN”OÓ­Ó­mªÚ¶©|2Õ¢s ã\{¯¯×{ øQÝG±šÏ¹ðèmЦЯjg%¨v”?r´­oÂÕÊÌwHµƒ¾Þˆ ;uçÞ[‡1­ÐjÔêr[¨µvuûÏ÷_Ìi€ŠVAéfWÚÛ¯¾_dºìu.äo~“Û R*LZá:Q±¾%gáàœÀœaØÿõ@{5endstream endobj 203 0 obj << /Filter /FlateDecode /Length 225 >> stream xœ]ÁŽ! †ï<o0À„Œ&¦½xØY÷Šá Cp<øöÛÖq¾&CÛáöÇñ–E§>Ç3.:—š:ÞçG¨/x-UY§S‰ËjRã-45ì¿Bû}6Ôtó˿à ‡çŒÙWSœÞ[ˆØC½¢Ú»œAaMŸìÚqÉëU·Áª¤[H·¤# bŒ¡JjA µ¬RÇ:‚@:²zH=ëéÄš@ M¤ž–xYäyÑDS'™L•Ÿñþ_~góŽBÇGïX Pâ\JÅÿŒÛܸKêÚ§rmendstream endobj 204 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1390 >> stream xœM”yPSWÆß#ðòD*JŒÔ¥ï¥àÒ ÃbGG Êˆ[©Ûº h a‘„Ј’“Í à±‚ŽŠŠJ]pÃŽ¶*µµãˆãh‹t8ÁKÿꜙoîýãþ¾ïÌshÊÕ…¢iÚ-2:6tì0Û1vÌpq|$bÉ)tx¸Zf¸Ù½°hÊ=qËDJ@Ó¹;÷D*3rU©É)jɦ ùA N –‡††øKæ…J–(dªÔiº$ZªN‘)¤jçE.Y«LH•©sß?Yœ¢Vg, Ôh4REf€R•>Fñ—hRÕ)’XY¦L•-K”D)ÓÕ’/¥ ™d,gÀ˜D*Yj™J­L”©Ò)НЙ÷Ùü°O> ¦¨µT0%¢&SS¨)ŽG¹S“(/ÊÃÙ-åJ)©ßéÕôs—b—§‚Y­à¾ë·®õÃÇl°;Üì;›¼†o?õÂ(̯Ä9Èp&¡Ñ”§…L6ú½ÁÚPÛzHÑ¿F*Vs&FÔ7$$>:7¹ðVÅ®<©nÖÁJN.×ÎÃàTú?$žD´¡C_akÝßÞšm“ë `ÚÍ5Þ¾qè°ƒmþQ³ÂÆ­ãÉj²Q§#¨§:DcvYÍBŸƒèþ×õ&¯¤qÃÀ’AoÑ;¼ŒÿˆÚNÞ…_ؾW}}Ãb"¶ÙrZZ¬Ö–¶ÌÚTîà‰kõg}´å&EqœJÆËãSŒéF•ÑEÀŠþ-4™tÓòËᮓyrt™?™¼ök  ¼+†›†Ç)÷¶¼ˆhü Ö@Dæ¿´(ý"ˆ€U湋;ÜÏ; ?Coó‰ç­7÷Þƒ‡,‘“_Å ˆ¬ÉA·‚+ðºá<¨ºXÿìÆÃpz4u•_C|ÑR“M˜%*3wn”#Î×ô °{D$n2™U\ôê@+ÖÁ5¸p­&´\šœi/yÇÕíï> stream xœ]1ƒ E{NÁ \5j ‡Æ4Éd’\pq(DµÈíkL‘âïð`ÿì~²®¿ôή<»‡Y?qåÆº!à2oA#W8ZÇò‚V¯_¢ª'éYÖ]¥½=òØ€f盜0{EEWùnÒ󀋗ƒt#²@´Æ†nø{ªwƒ2G'Š]¥: Öæµ ¨*¡$(u±”‚P`Äê,H±²¶‰Ç†0Ö4ú’¶HyŽõ¹ÞB@·Rh •²X‡¿ñ³O.Å>|Qdnendstream endobj 206 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1156 >> stream xœklSuÆÏY·yÔYD-¡‚ç4ÐÄfl FŒHf@œãæDE¨[ÙÊzÙºÓÛ;m×níyÛõ²ÞÖѵëNËV¶áX Ρ2" ñ’(_0€QcbŒ_þ§9&zÆ—7Ïûáý½ÏóàXy†ãxeCSÓžúµ‰_‡óëËø—$ èJ¥’ªª$PUY_^x ¬F–UÈð,Vã½ÌðH²ÁØe7iÛ;h…ªæõšVqÔ*jëë·*u55õŠzIÛª6(šÔt‡F¯¦ÅE§xߨªÕÐöÇ'otÐt×öÍ›­VkµZßSm4µ¿¹BQ*¬ZºCqPÓ£1Y4mŠ]F­Ø«Ök­V?ž F}—™Ö˜MÆ6ÉЦÑÑêm»^aXåaÛö5ÒuVµ`«°$Îâ€=!¦ÅÊñÆ²Š²e·%’<ï•–A±„q>Ä?/ã&¯ßžäb1.‹ß¼µ°p?œðÇý0F½Q‡Gëébi8 v°ùíðyÕ;X½@xúÁÎ$Ø5ÂNû zé–¶ ;ÖÖ!Õ` à—Ch,{y˜%üg£îà€•ö•²‚•µÀ)ðA_Ä5*†úÀÞŸ€ÄÃ¥ÐÓBذX9x™ðb_ŸÃÑmp8öîmkÝêaX § OÐ Ï LFý9ƒ4Œ³é{¢ð"”‚t‚ñû¨¿>yK$‹3?Ý[»üUz<_‰É&¸CNÑ…øfð´å„—dØ^Wdh$“ @†Ê@ÆŸ…1Ç$+b¯ù*‚"2ɳ0žqQRþžƒK=D-S>ý;ºû‡„wð¯ÊNºÐGØF{¹s…ɹŖœj_Ç! Cڮ玡8 ¼ÐíŠx#ÑáP8@ÿºSœb*oSµŸ¶ô¥tú¦f°‚%â B™ä ηºMÚ#ËÖK×g— )’ÛsÃUa_£5“!oÄé`‡ú)sÖ0t¥—&F¯Eç©`º€°@$qaúê+ySNBŠæaøC„IÐáeÙÍét´É`È™ŠÅ7EJùÿ]½ÇñÊ Žœ¨$ ¥£³$Æú“ö΃ÓG þývèŒXžGÞ›`ÆÏL§†É%t0oYpÍ€ü,Å¿ü&qè3Û,L—œ¸”ž½)Vœ2ì mˆ4o1ëu@˜<“ã¡LtŽâþÉ= ç‹”']cæNw—ÑMZ¶9[ áî–³CGª·ñüÎ §ã‹bÿG®¤œÃ/ßù³ø÷] ÊòïÈ„J“ ª@XK’Ÿ÷=@ĤD/ÿv|Y7Ov\~÷|JœM.3Ì™,9:0eO‘Ÿ˜èVŨ#q;œ╾·;[Îï.6S£=²«Õà±ãcAYw¢ùܯ·çÐnD΋-ÝwpÝÄî éï$ü_!K~òf°QS)¼&B­°zÓûAR´=ƒ”·º.zC”`ìäÙß_D»ÐÆ+‹æ&¦æX#uJu¸óRò©ò­™ª'1ì]f0endstream endobj 207 0 obj << /Filter /FlateDecode /Length 166 >> stream xœ];à D{NÁ À¸µhœÆE¢ÈÉ0,…„q‘Û‡"Ŭ4ì zËÆé6¡K”=£×/HÔ:4vD tÕ!é5N§ÓÕ©7ï*¼?h€mþ¡6`³èÛS×JÚ؃Ò®@Îå`­$€æo%Za±g²ïdçyf+dU¶¢Ö¯`ù©0]T1¦ ^Á CøÝ|(-šE¾tLV)endstream endobj 208 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 419 >> stream xœcd`ab`ddduö 21T~H3þaú!ËÜÝýcÝO]Önæn–I?ô„¾§ ~Oäÿ+ÀÀÌÈXYÛãœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© Wêçü܂ҒÔ"ßü”Ô¢<&#Ca!ãXÊýèàûñ½ûé÷çßžlb¼üáûÔÌ?Ö}÷Mcë¬mô¬ihiµê®íæøíÏö]äóÚeON¯Z)¹rå–}Ý—8¾ þf½ò[ê·¨¡™nĶÖiKVÌ]¿°nu¼Ü¢}'6^èæørØÖÊ%ÆÉÕ[þwãïÜÆÆ®®îÉîl +7ý˜¿iû|ÆïK2ÿý“_te{w®œ§·jwz7G ûÒî³ÝÇ—süéeÏê®S‘KHa;Ô³û0Þì>Ô¾—GyS»J÷Œ5ò|r\,æóy8Ù¥²endstream endobj 209 0 obj << /Filter /FlateDecode /Length 166 >> stream xœ];à D{NÁ ø¤µhœÆE¢(Éð²X„q‘Û‡"Ŭ4ì zËÆé:y—){¤/ÌÔ:onaO€tÆÅy"$5òáÚ„UGÂÆ›ŽïODZh»¿ëÙSÊþ$z ‚Á-jÀ¤ý‚dà\ Ö*‚Þü­d/ÌöH^„jâ¼Ìb¥j*V¶ú¬?U¦žúÜÀXåq·Åk‹‘/sžV(endstream endobj 210 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 420 >> stream xœcd`ab`ddduö 21T~H3þaú!ËÜÝýãáO=Önæn–I?ô…¾§ ~Oäÿ+ÀÀÌÈXYÛãœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© Wêçü܂ҒÔ"ßü”Ô¢<&#Ca!ãXJÿèàûiÓýúûÃï\×¶3>ûÎù}Ù7濇Š&²uT4úV76·šw×tsüŽ`;»çêá㛟ž¼´û+Çw‘ß¼—~+ÿ–Ö°Õ ßÞ4sÙê¹[ÖÎÏé“[¾ãèÊsÝoîjšzÛyÉÿîøÜØÔÕÝÕ"™û#•dáöÓ¶oŸÏøýØsæJ?DWvtgɹxÿfî.ïæ¨ŠŸÃ¶¨ûLßÑ%:Øsºå"ãÙõ?é> „OºµïåQÞÉ®Ø=m•<Ÿ‹ù|NÝJ¦4endstream endobj 211 0 obj << /BBox [ 1591.73 6169.86 4341.6 7160.92 ] /Filter /FlateDecode /FormType 1 /Group 79 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R25 24 0 R /R80 80 0 R >> /Font << /R81 81 0 R /R83 83 0 R /R85 85 0 R /R87 87 0 R >> >> /Subtype /Form /Type /XObject /Length 2167 >> stream xœÕXËŽÇ Íºù†^ÞYÜR‘¬g€l¼‹f€,d¯F‘CÀIùósûU}umi10`FyÉ*>å7o[œ¿ÿïôÓ$¹K¨6)=´2kMøÖ¹ƒsŸÿóÏùó¿§zŒÖeþ~’–{hX’ }~98©æ 6§Yh`´$ÏÒc ‰t@›ñ[rÌ<ây§Srj¨HÙ²Ä` ³„’IjèÕåWÎjÄÇÏÌú8}˜Þ¼ÕL7áaŠ5ÍŸ&™¿Á?NqþÛïÇ‘¦ÇéÝl1$½}¿›ãü~JÁ24èZ‡ ÖGw6Nêè YÔ F ìút\— ˜2F¡HeÑ; ߥ6Î(µŸÕzÈ:J­œEJ`õ(T„êI(:ÒR =’›joA«k;+†º0Î:ŒÞ‡W;éñïîüãL¦AÊÙÙ^Jhí’]o»içìÆÜh­^¼ÚYBœ¥K&+$nÐBwR R>7BÐ+¡œ@wÄLczY`Ùëèåàìh¤vKÙµ>R¢‡šÏœQg Ø+ô8¶?‹cIC™íœ^RЄ2Vr–2›h ¸¤g *ƒ´:I MÐLP%àôÔX&HY¥~B÷QÐmV7$5»ÆÊÙÃvkûßÑ÷~?V¯ÍWUœµ}o›ÝfÌáÎÙºû`Ýn«QÑ~uˆBÕ:õ6+©3H횘ÑNR g‘By ƒTDë—v’ŠŽ«ZCÊ'ÉM7c”4Ö~Z ºÄוּàþõ΋ףüáT-ÊÌŸ<–^ÙFF©]o»içìÖÜh­~¼ÚY÷Þ dxxM¡´/èF zVjГÚ5Cuô“.ÍeWrèw‡Èq¢`"Ê¢ >j=ȳü¬W8åq*!vÅÞ‡ùVr™\|CäœF¡ Û'Z|eËZb”ošƒÁ/-b{¤Nöµ34r9( j\wZ•8.0°P+:ŒGv¥à½… ýI8Ð=˜‰—Bç۵6kTì¤a\£†F4,_UÜô|¦óOB-ÒX€>!Í@a[*X%°—‚óÑöT±¹Áà‚( +Ç®T·\•_؇°ò8ÀÑ‚‚® E¢†YrŸª6JªÁ£Ž ÕTY=jX&+G:;«&-¬ÄR›\° ´— ÏÔ>zvišø;4²t¶ãxE†GÜ]ǶzßxXæÞ©Yå Ïð¹$åúÓ±¶ ÉjNâXs·QM€›¹ëi…SŽžÄ¸hE°›ÓAkG|¾øª«M+7GÐ}¡aÓÒ«94´µ% hø„‚vE: p¨çÈç‰ÓÝ êmy¸ð‹¨y_umßrÐÞŒðØGwó9‡û§œO;7Gò³é¶®M}’ÓM…I"@a¶)’™tˆ¡lyl»UO>a_a“šÛÐ6Œ{°ú)™"üe¦ê¥“"óÞΔ"ü꾚V' ¦?·™„*ÅI’-Ý›4ÐÝb¬V@¯25¢a6®ÄIñ€ð×äÒ@ŸOs!) dêŽçZÆÆÙå4)cï~!G¼n ÁA%$E{ÖR v罆{àøÓtsF¡îÞ“Wš„¬  Ûz¬qü‰Š;Ê¢ ¾ô[ö´oy°Ò†äÏg« ÆÀ©ËùàKN·Õó.gÚÓ9p"Kb8!²üŽ;ðü®i1‰&“nÏjš ö«±àVÝCõ8 ò£…ö¨®QÜ%Ö(ã„XÎt?çA}ó:2‰–’œ^s 0ñž?Ë>wú­VkæÏέ7ŽO(îähø£Έä­hà¿$§}â*[);Í‹ ÜJ;FpÚ¢Ucáø4p ÎïŒëÜ0I[^4 ‡28ˆ Øl©€)ƒîº³éTkËÜ¢³\÷Ðië`§ü#< íÂ=fba8Áù邇Ø5FZ^”5Ú›Ÿ˜q6„a#÷8mŒ5Pïc*8¢Î©à†9¦‚…gC2áC·=ÝÏŸÀŸƒ+”Ö±åìœÈw±#‡%Ë tp¡È Ü…|Ô”QèàìBÞDS„Î.ôÓ$qæ¿õóü2ÿåizó ]!fÜû÷lù2Ç*bƒ7Üüô2]~~xú‘ 6£Eã%Oy ®ó†£±¡“?½Ÿ.²Jæñhîg€ÆIðÏ«`¹m †_t‘?Ü; ƒº4´¬xºîä QÑûvÊŸî‚7$f`^DÞ]ÊbN»ôý¯üðÝÓ7Ó˜ÅB¹ÌBÃ~åòŸ°'(Fóå_¼ ;ñåýÕ séò¿‡+ ¹“ûƒŸrk¡ÏÆ´YøGZˆá×é×c_±uù/îÙŽbŠ‚¹({¹Al«Ì—Ó—Ñœ1}ÀC.šçQòÝåÛxÉÝD’‰?1ºôïú+\švs¾&#ñ3r{6 àjóû¡gêë<Þp¨_ Øw_v9ÇÚûÕN^Œ"˜ôsc¯¯õ>Àðhþ` ü_X9 ½µ¡¥ý¦ÃKï °»áˆ°‰þ¶³VmÿKîŒÿó‚ã©kà~¾wÅU±ì+†á(%IËUW&®§\Ü¿â%Èe{ä ¸Š„ Ü»VãÓOkð_ ÔhØ_Ÿ¦¿ãßÿ)†>endstream endobj 212 0 obj << /Filter /FlateDecode /Length 3194 >> stream xœÅZ[GŽÄÛ>ÁH?ŒÄCjOÓu¯r0R…@’•@òÑ™iïv2=³ééñ%ˆÿÎwªªo3½^ÛIÀ~powÕ9§Îå;ß©õ·‹<㋜þ¦×õE¾¸¾øö‚‡·‹ôϺ^üþòâ7Ÿs½ð™7Â,.Ÿ]Ä|Á…Ê\.67~qY_°â°¼ümOÎ/VBg9w‹ËÍÅSö¯å*ÏŒ÷Fº//ÿŒ-ÒŒ¶¨Œk® ô„ÅWŒV;雷‹'òeæ¬2¼[ü‹ 9Ï¥Ÿl2|Т[ûa›çœ‹Á*S’ Ý-~I‹•÷žÛ¸Ø¸…ɼ•ŽëÌ8Ÿ›ÅŠcò"îá´Gç¹án^ñ\-&[`76³ÎÝ£‘#z‚ûâTã‰cg4^-—+a]æsÇžœzºF±·7ƒGÞ;[l&qW¹°²[¼¥ÅÖ;ç Û_/礫LËÜ/\f…ñ:n»¥mÂ;\ðñåÅgäE½¸> ðÞ-^ sÿx! (ƒ.ãó<“vQ_éð÷o¶_ܙ߳»ü†HìƒHÇ3ÎyÈo1“Þ VôYÂ~WLÎå2_‹‰×¢#f¼Æ¡M{…8[IùDB9 G!0d”D*s:§F²(Õ¿yûsjžYiU&¬Ή²¶“9)½—¢GóûKñÞŠñÒêû*fâJƒ*ŽOó÷Ì¡gð“ký.ð@…19ìÄ~:¬¶n°â„ooP~d>¹r5ÙóÊOœ@»wZ P92ïõ‰„ò×›U•ë}Õ'®JM Ù÷¨4H„iâ+í$~”Qw…ï$•rg¤|‹ªt措„NÝŒªRºè‹w®JˆÌr9W•Vª·*åi;Oë¥*…Sü‡(KÃíLY*oywæ‰|› i¬8)ËÔ ¥uaÏŠ ”‚BñJ4†ÞõOÎòfRTðwêÍ’ M_9×g¤ "@G·oP½ 7Pñ"=î.^CÒµ0Ô<‘¨FÂñ®óö‰ªuaú¯ýÿtIø÷´ ˆl2"3"”#Ð ötoÞ¡Mh²¨fô¬ñ)§”Lf¹‚û¹Ï„#oÌÖŽò# + ÃrŽ‚pQ–§ì§EªìH<£wÈEÄ0iªh÷^«¹Ê™ãn@Ç'çÖL3×I ë%ÔRÄõ(¦v9ƒ®¨ïL«;==i€NãCLµH|é'´X›Û¹æà2ÏÁ·ã=gôï” b,10 ag'܈{FëLɆyKøÜà –¡Wto^}SôdaÉ3‡IÐwßxª £9yEI~ÔÙ­ü®‰Útdî1M¶|ðÚÙ‚H÷¨OÝoÍ,S›Þà {&[ÎØÂNü”-W6$¼bj)±]h±BÎÃËÇæ¶©(lt¥óÐöǦÛ&ßÃ&@f{Œm†ªÑ§³¦iqw›²¡Ö³I@ŠŽíŸ¥GØV—E€\PüܳçK$©G+eÅöX¦÷œÔš¦*ØS×åXTFlÇ*EØÛ¡[삊Ìkâ&˜ÎÚ}'QuxÅïöM[’-iSÚ¦BÛ!XêaÜH‚ñˇO©Ÿ½´LøÃÁüY›Åõ¨R©,?ÝÇ®Á{×*¡‰‚@íÈÅà(\ÑVu™ÁMdzøÂ©/ÕÔÏœ¾“âé<‡²¤XSŽÛ6ˆóáˆ/ªö&A&×k ižîÁ L?~Ÿw©ÁÕ}|·# ýfˆÙuIYçá ®Ù*D>RBt …;IŽuS†#H)CHBÂI©B¬~ºDb”DW,SüP¬Xrò:W[ü,º=§Á:°%ˆÁ`Ò1$šyÁ‹þiÔ¤¨‡öH‹µâN­ š¢ƒìÝ×É2œw×Ë9Æ^ã$Gó À ôÜæð8yÚ¨1f€˜Z¦úÝr¥é×°ñ%§g8ÍoWñ™ú`C‚SÝsœ”ýzس.w ÜdgÜÉYÑË€4RÀ•»î 2,r˜Rñ\¿¸W? 8·z&Ÿ0—†‹š¤å)ÿ²³Ã1ÄNÐt2çs“îîå¬p€)†%ÝïÓ§ûlôÈy 4 $rÐä=E:>‰ÎÔù»˜^b3&þH^8 æïK'è!Rò ÛtÄ³Ó "N´4~•c3x­O¿[IíC™Ú}¾G²;àÙ ¥£GÜF ¦4¤+¤5‘³®´€ÎŠóàŽ8 ÞÎÒϤÜF^K?OSªsÏ«”ÖXÃþ=g(ÝXcv ­ÄCˆ¼b‡o›öЉ‡·ÕÕ2î)»T# ^ÁꮢÈ}£Ï¤óŸ"ý /}0|ºªÉöÈàâ¸gT£Mÿ™M¶©c¦Ñ×6@x±=éÐRÙ® x~Ja ë$p¶)ê²¥&F¯ÈtÓ†ðMüRäVDËŸ¿ˆ°S”Jó€ŽÔãPbýýˆ4C>„Í5éJèqÞ&Ãlàš“öÑ®; ZØ€¸#{އr®´È$Ãûú6ñ.jŠáš/9i.‚!R„ ‰> ýÁ&¨Îi:,³4æ+y÷ïsN‡öáB‡=N7vãBYÑM!\ »Ì ©£‚©‡Ç®VÈêšðÜYØ’7®Ra=êVȘ Oò”œ'4Rp8ÖuѼBÑ¥¥¯íÕÌŸ€y6÷1,˜é1)^Võ±î v Lj(ÊÙ~3œq¨ê¢g[´õÓòE»ß­>/noS±2íÄ)U]®GHÄpâ 5>z‰Ÿ—í±Ù °´ÞüB.ãS®›bS¡·}u½ÝÊ3ˆ‰u‹¿+‡–ÛŒÞ_1í~;‰L Ÿ ¯tžØo¢rŽƒaÃ5K •ö³¦Lf÷àÏÈ>Ž^,A?ÞÇp‡ÑΜËÝ’át_´›,Š+›fߌÎ98óy$ãÝ—¿5W1¹@KÓWóêžòGèÐm(ÒÅ-s‚4$N²-ò*¬"¼IàR¦(9JÆÎ9ñÝ“4ŒÃ\›Æ€¥œ¡ß5Ò8,DæTÿæuã0môvk§ÿ]…H2êœÓaâ~ºÅ~ô³9ïþë ÝÅAyrÔGKA­ŽÈn¼Ä“y¸]!Øò øDZ›ôìh¹ɯn·Õº#*ÚR9¡çyJòtóG|FÚªa›}'Q‚ÔÒdi-7`mZƒIˆnÉnT¸¢åt…æ´#>~›®c×~6ìÙþ6røÆ·Õ2ÒyǺ«¾Gãñ®/jlìÕ`cÑt³¦ÎDT)¹¿'©j( çD¯ÝG2¬µÂLÏ•"Ü|rFÈxÙ'¹¸²Þר§xµÅã`Öß}‘¯ÁÿÚáSÏ i³.º|@¹ø2¥3$ ðZ5áb«> êQ?Á.oÂoŸ}tÛ³6ÚèÜOL!,UÉÅam¼“ YcÅÄå'öÄ ÇÑMSª¥¯ŠCd *ð^dL‹Iˆ®Né¥ Ê+í!›%<ÆgÒPJ{‚°fû_•}vñ_!Žbendstream endobj 213 0 obj << /Filter /FlateDecode /Length 4657 >> stream xœí[K$¹qÖ¹>0t©c–=J¾IëƒÃ’±¶ oÃ6°#ÀÙU9Ý¥­Go=4;þ_þþ"Hf&³²zzVkX£]™$ƒd0_#¿_4µX4ô—þ¯vwÍâéîû;Áoéßj·øû‡»_ü«0‹P+íâáÃ]!BêÚ7rá[ »»o«‡çny¯”ªa«vú¸”Ž~«ª;¦†FU›Sþí«s‚«Ní.>Uí¨ Þ¾lû^¾Úuíþ—Ëß?ü–fõxi®vÒ¹ÅÃ×wýmõwË{# Ïø¾úAГ®ƒ÷ÕßÐOÐòºúA¾_þB&b‰…Z8!2±oÅï v.”’>T ¡k¥­¤‘JÖÚ†Gêå½M6Tÿ¶ ë ²ê–x¡ŒSNW«ó˜cm ¸v¤&­¥מ.Ëû¡ãŽ[‚2`î˜À~©Œ•Õù”Tœ­µXÜKQ{áë|RÒ`Nª—c÷GžÈì¡Ú.'jt´ÖªûaÔ”Ž‚†À')G?Qmö§óñ²:oþH‡Œ ]¦žºz¼œ’›óа>à·'ît§¼"Y=·LÄ7¶9¼ÓÖŒÉÅ9¬ÁòŽŸamªÝÇa–—c‹Õ¬Úí@¶}yÙnîã~TÒ»î{ u9´jÏ›ÃþT/ïµ ,ÐQº…á­¿ÛnG/HÄ“Ts£€T·çüd«¤Žw „Åêtõñx8ÈE=`Rži·›-øÄ¬Ñ¡ú”FSíˆcŽqÝmÓ(ð¯-`8îÚíð¼îöh …V™.”éÓ„= ¢ð~ÜœŸÁ ù ªºì¿ÉÂþ©T÷ÔE0H9“J1Â`jœ,óö/‰Œ…f†9ymj¬È}Yr´ |î§s»_·ÇužòSŠ.ÚÜD.ù~?§A $µš_‚U:÷í¾Íp!¶òä±ó!2‡v~ØwÏG-ª_zÆñûª‹«Ù¾Ã(X’M\ì½$ƒä|–ÁgN"Ì¢¡y»,K°•¾Ê/m9®·ŸÒ3)Jš ÂvW±sÕw£³Î§‡]fê¢{Áöxf2 5zXwív³ÊCdz"ž\öš ÚC+RC3>Ǹ1;œc\^<Èš™C‚øÏšVöBl«íæ;^š©ŽvÎ NTQ5‚ Õ%7 `±LOä™ÖCÓH¤”`]œ©ØêÓþµŠûè5¢`*©dÁé†ùþÍ!–Iò³›ºlÓ†vHXfÁf¿:_’îÃŽ-[E’dO K3·.2œ$óÚ!dmá¾Þ¤¡VÖ;“;C„³5!}Ènú<ƥ͊ÇzUÒMìîC9Ž¥M‘Œ˜ê©ÛwG6k¼ç‘™ãî¢zŽý!¿öY(yaj,{**á[mˆT2§~—È;<üaJïÈZÐBšÌë¨ìh ì¹Q¦?©÷+v)f¬jïô`JÙ“kˆ¸p³”mPñþ-uv81og(ëZK¡ýµA§q‰ÓW *æƒ;Ó~ð ï—tx`0ìêWåÜ@qF6nak¨”TÌPAüü‡‡»ßÑÊÌâétGlñHöï@ÇcÈ âà»;¼:ìßlï¾¹‰w•ŸÁ»´4Ï$i£&âÝZ'Hæ'¯Fz'j£©”BSÇ7¯Nmç¦ö®D²‘)MSWrFœœ ™%î¯b‚"¸k–›k½œðQÁÀ혦€R¡ÞÄ´Òœ¤•ƒdmˆd£jy™i¯K €Å&lí• ­vÓ!F¡Cp8©$‹Î©…KCyÈÏhˆ‡órf‡!R²Da=å„„›aAB”Mœ´‹ôøÅzµ"z0tÊ…á…÷ L(ÿ“h´/v £¢]îü³W5š~^Þ‚hÅÆÂŽÐr²dëGÇCK¦ãQµ¶‘qˆ¤¦i¬ð½ê  Â(1f»Õ®† ¾…óó¨Ï9?#€År d/$Ö ]š]ët*H,Ðl8,sã’ÄþrÊëÂ7xD‹J$þáÄÂP’øb­A¬F=Brßlv›m{üÞ±kO‡}^ìÕL1Œ(޽Ÿš 8жY¢ôÇ%ìât‰DxNÏ(‘!>…}Ûíà\ŸRÀ§8*í»{[mO÷=fÕíæù02øiÙ5¯ç¼®õlÀ,^fHZòŸ,ž ­üŒ|jØKalé$a ÔœàOTêǺ27À'v1Y¶=Sשz3=£Â…4’SÖB#¡ýà1âU‰Ô”@×bHÞ Øë3¿ cUÎx¥~ÏÌøŠCŸúXø7;päÊNML·C`Óƒ0Iû"¥qÕvWž–3Á ¹ðœ(J¢4ëĵ©XhkI%؉ êßü'®aãAÒ8’·/tâ¾ðrÌ€×¹5±ê‰[YÌr«„»Ä-߇Á×*0Y•SÍÏÖXàŸtJ{øC^6XC~ÀÀ¨Ð¿ùr¬}­ˆ$ŒIS0¿”}Ei$·¡–ÄâyÑ×aŒáM- rOv$ú©Ži7š¬|öžsrP¶dç9X°æzN1%eï|þÕ•(wF^…{"ü)°E±6L€a¥™¸ÚA!#ム¥™ø?D%ÀÂ3¨D3ŠÁ(‡èS‹‘4j!‰©o’ÆyT@펤ñÍ $ÜÊRsqMX"a!ß(çÝ "QPEbCHTHpÎ’˜Jðæß—ž‘ˆ`—¯B8n8!¡Gë¤B1Ɔ¾[W½ŽÈÄJS­y|„6D6þÃe¿Šù˜8«®Òïà®Ò?qv]½´Çv×»ãéݬ¡³!ÛL0;Ök E#Ÿ(‘tmÂa=´Õ¢°›bô =:YˆÛ:ÀVyg«S·»NZË€ÆëiÒú°å\UÓäT^œwŠÙßÑFÕ7–”šò”v:w1E§ƒân4vÝžÛù4‘…óQ7-ÄtQPÐh!‚¼]J`\ÃüBÚFCn0YÒý‡°o_ŠÕ~²”+úÜR¾­NÏ1¯ p53+¦½KfŸŸ—§çô ±Œ§É¦ÀÂó—³âO¯éÚB¿îîSÆÌˆ¸ÊRßí×´*N<‚‰‚– ÓùHñ$8&8÷Ó(myKåEwmdÊw¦‡ýáœIºj×çý.«þçsjGçqb°;v§s·Ž”‚WÔH¿çxÅ5>ÝK £ØV·_±œzƒåJ+2ºú@—gDh?ʸÇãä.ãáy¸xT4nÇIQ±¿lÛ!¸Ùíºõ¦=sb™»›ò친³’9éI\.’ž«öÔ ÂNÍb,NÙg1þ—©—×vÄ÷2OGšóÁ '™øÝHG7) d>g&†,QÎÑsX¥lFÖqÕOQ DWgP. „F9•ÏÇ8`#uŒ9¼x%æpÙÔB9 ËÎ9Kø[`¾þÍ—ÇNàhAÒÊ¢üï…¯A¶?¥ ;É«¼5(±r¸ô{KPbn%*ÈÚ‚•ˆME„ŠÌÔ&¿ùò˜D!ÊE†RN§ÚX0SŠI\1uœÝ,c’%4Ç1‰·îÿc’×bÕÀ þ¹Ä$äÞ)&Ñ~>ó…Ñj5’F `_›·Iã|L»êpú#qü 2¥%Ô5øé9*1Þæ@C†žŸ;¾ôŽ“e)½„Éw¨ð¿)séb\â©nCVëãá%úɺ"ÂMM.»HÄ× 9ȃº 9xO€Cî–¢ @6£–‚ÀRºÅ¾·" üfŸòÜŒG,ö4)lC C¶ü6Gi„èšèIãÇ DŠuìv]rؘy 5ŽÔ‰N(%±y1õñGܬç?Bè={ tÒÇQ: ïܺ?á~î-B²vû±Ow·}¥Oqºy:¯S©;`Jws“ˆ¨@Ç/ˆ£äJ½äͦ {ê˜Ko,ÚèIuD»™“ä(G]¤ÄEpCÂ;“Îד%¸œó7F¤ô«Ã@~0@»Ýr”C«Oòˆ¥îÞ%DíûÃ¥ïé  O¼" ¹ûxiåÍICn›ÔH½n˜ák7Í¥K¯ç\ð}ê¶«”¬Ñ‚‚AtæÒ;îï}X±·PøRœ{Žª_ý6%w‹ý#,‘a¸*&}:Åe5,Iïã=Y³/íª¿áùnIѾ†#Cð?7½§°#äÓJ”O3k (6|ìò~I>ȹê7il9ó¤j ùã 'O«ã&«’þ341þ<ðÞØ¡BèWûíÙ–E9mÊ|Ñå-US]o1äòrn£Õ4Nñ×dŠÍ Áȼ`[2ÆØžƒÿ=SOMÉãA|wý¥|âÑ–jG©Ó&ÚkÊÅ̘ô ƒ|­¸Ì—R$FÞ*¹.Ø‹E«£1§AýÇ%×ÕêÕ ! ‹ç•¾¤×‹èqÓ>ngÅT‹;Ç |DÝœ¢ðÀ5ß*—Š>Fx†ÔzD¼?dN€ÐY 6sÿ§c{:ñdžü]_LàFQÐeˆ¾ƒLÙ3 Û$öÚå|aÙ)‡“€[_n°Ïƒ9»ô²Ž>@9žócÂctæûf'«—íá|N¾’•¿0ˆ°Fç6²ËþCE˜›ÂÊ‘¤ÛuñkY¬ œj‹CíSu[åû ®ßÝýÿ–bendstream endobj 214 0 obj << /Filter /FlateDecode /Length 161 >> stream xœ]O1ƒ0 Üó ÿ –ˆ…. ­ª¶Žƒ2àD! ý}I€ÎÒùîä³ì‡ëÀ.|D/J`›H‹_#Œ49U Æa:X™8ë dÓáý ›ìÎïz&ù¬UVÕBoh )jžH´Ju­µ 6Òíá¬ê®@5ã¥øO%Gs‰ó&à#q*MK“\À1ýž >äl_8@SVendstream endobj 215 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 356 >> stream xœcd`ab`ddäpöõõt24±U~H3þaú!ËÜÝücûÏ Önæn–Iß? }/üžÏÿ=G€…‘±¦yÒtçü‚Ê¢ÌôŒ…Sƒd a¨`hii®£`d``©à˜›Z”™œ˜§à›X’‘š›Xää(ç'g¦–T‚µØd””Xéë———ë%æëå¥ÛLÑQ(Ï,ÉPJ-N-*KMQpËÏ+QðKÌMU€ºTJ;çç”–¤)øæ§¤å­)Id```b`ìb`bdd©ùÑÁ÷ãhóúï»wß½žqç·ï›¿1ÿØþ£Nô»‹òz?õðß2rº9¿eº­8~«^÷|zîñ¦äîç_öݸûÇEg]úlåþE[¶ØÝͱun|€O©IC¢|}´¨ïïæݵÝeýµ«ê—uoêîé>²ðÅ´m|r\,•ù<œ Á«Šmendstream endobj 216 0 obj << /Filter /FlateDecode /Length 226 >> stream xœ]±ŽÂ0D{…ÿ §‰Üp Ýñ޳F.p, þžÙ%P\1#={wdO³?ürZts®sø£EÇ”§J÷ùQé‘®)«v£§–•ÄÃÍÕì¾\ž…4(¾ùäoÔün¶½µï¥0Ot/>PõùJj0Æ 1:EyúwÕ®c\G»­F'b°­bÐʰåa»s"àŽqt"àÈHN$F¤ZI¶œÜã‰,càÀΉ€#R{I†ó7>ïåq7Ÿ*txÔJy‘¥ î%eúv\æÂ[R/9Htvendstream endobj 217 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1626 >> stream xœ“mPSgÇoÒÛ²V½³“êÞ›µ,ÅÖPW¥Ývµ,íX¡€/ˆÖÄ!¦$!D !/$÷$H ˜‰PA[ ÛM×N‘m§µvÛaÜq»uvf_f·O26ˆÝû¡_žû<î9¿ÿ9ÿ?‡HN"8Îy…‡÷çd/_3cë9± I±Ÿs—ÆgãGVAÒ’žX·YžFÊŸ"ñj‚Ëáhtö<¹¢Q)©ªVñOdÿ*[”8rø9¹¹»7ó·ggçò÷Õˆ•‘PÆ/ªªÅ5BUâñ6ÿ\$«ýòëj•Jñâ¶m […5g¶Ê•U¯,WÙÌo¨ªùÅgÄÊzq%ÿ5¹LÅSX#æ¯n]ùäÉku*±’_(¯+eA¤Êäy UP$.’DQLd%Ä¢”x•ø-ñ‘Mi ÍD2q•“Æ1'½’t…[žL$‹’o­z>…3§ÇæµþØ?UEQa”‹šPe÷;íþžë`=wÁ{>òù@^³•×VàÔfÓ"1·A=Y1\5õ—1´ÉîÖƒ¡,gÛi½B±©Èz¶×p]ç™ÙwQ2Ì‘_öÏ}«HÝB¦%#¨‚•¦F-j) ©sƒ½ÇjõÛéþEßtÈ È´³º]Ãìų#k3Oë:;Ð}ÎésÒé1×câ¤EÔ½È9P˜zP|{'^)¼gfü¾ð´Qh#zŽÆ<@Už :DÜ‹7™ñèÔÈe §‡+ŠuÒ"Fv¸¬ü8éñŠºp|K˜ƒž¿Ç_¿LY;­è$m¬Kg1¶.Ø'¸]Òw xø$–â:ÜŠ÷|ŸhtqÐé)·LFèÍL~?}`»w|ˆ6~@9_¡$fáÛo'ïùïK/ì¬5IÁÌ,ËötXÏu.+Â¥a´ù‘ªXÑ'Üûx¥[o®dɶVPì&'‚ ÃpÆÌa˜I{ÖѾn4—Ãíp;û˜K(ÓÙ8yx žT”…6}_4]t\P%‘1­såÝÊ1u#h rf^E¹Æ>ºû÷A©PSe˜V¥ ¿ ÈÝ’«ÃÚas1éh)±^å $úýŽ‹zbÔRÊ9O¬ 4ÌK)ã(«Ó 6°ó›ì:¥®>a‘J¼cÕ®”&-¨[» ù*%_aÛÛ5ÐÌ;6*ºñ÷ë Õ l£žn|K´÷ ô9¡Ï9̤ÇF/[åD¹1wl=esXíÐAzô½ 5™ÞB¿Š#¦F°´µðÔrYMåm×F££÷ ŒkÐî9s:,ØZ³Œ+c°y;iW8ô`HÇ\{F§h®djók÷ÁQrçœìæìØÈÈyºÿÈU£Æa0ÐîŸèžù•eëŒtó!uÈZy@]öéú‹ "Ðo¢?[{ýcžò¯çGWY@ÿ™¡Ô 1±ôÚÈUµ´OºsñSx3ÎÜ5™ÿ9³öÎ%¸6xeš¼‰œÔ \¢—¾xx'ôó#3aôdÿ{L :9ón"É£B k²„­MܬdI¸Ä½û uÌ'Ñx4C¡óþ‰iÁø¢•5¿ÿOhÝ7ëµë=úv–Õéé}[¶·7yD™› ý+8Å\úxæx bé=I®´PøÑK‰)·‘i‚‹ôHCÀ'S¾»ácp’“§.؈ŸÍÄù9S»üÿúohÿpyÂq†‡ŸºŠöü³Ã ä•!…ä8N3éqg]0¾;ÈAY÷câ%nüB¬˜B{~ùW¼ ±ëðYüÂw|´@§P2ÒŸ{X@eú¥Nz¥3³ßýù³{@~ùi~fGæ•f3o`îöâ½ðX†,´øÇ?:õ½k~‰‹>ŒçPËYH¬°ËØ¡7ëXÖLgé[Y- jžÖ Þ)›Ag6·K㬇÷X Ûž¶ë¬oÄ9Üe£¯£ŠqÓ­z(áÚÍÍ ¥Бyï+ǯùG}Ú?á&¬ý_ƒ´”©_OZar[íŒ}¬Ð ù¡‚®!ïà`(|cþòä øÀ©µ²Bƒ©ôdK—:èîŸ6Ž•— „" }p¿FSÝD¦ÓO%ïH{’ þ‹f©endstream endobj 218 0 obj << /Filter /FlateDecode /Length 173 >> stream xœ]1à EwNÁ HUS)bI— ­ª¶ `"†DÈÐÛ7@ҡ÷dûë™õÃu@—({D¯_¨uh",~è“CÒjœN{WªžU ¬¿©ðþ ›líïjöl.ç2jjH{KP¢Â Hǹ쬕Ðü­D Œvw¶­,\Iºñ$‹8ßjŽÆ|)3T¯1¦^À2Cøý|È)º‰|µTVãendstream endobj 219 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 320 >> stream xœcd`ab`ddä v ò5400qäH3ýaîþúcÕ Önæn–IßO}|ÏÿZ€‰‘QLÕ9?· ´$µHÁ7?%µ(O!(?71M©c+C à fFFv§£ÿ}øþ3~~Àp£ì‡dù‚r!áßë~ІµD–f%ägÔÄwÇs¨<4øÎðéÙã¯r ÞÚ¿ýÍ-'| ­½«Mšï?“v6ÃïÐUßµÿ_?±\è;ÓÅïIŅ߬þá,š3‡}ÕÊŽXù?¹ìé…íyr%9ßËV²å÷¯“ÿ.ľ¸yYëÒîmÝ›g¯Ü4iêäé}Ó{Œ%¾«Nc]UÞ•Û·C^øÎ÷ì¿5X…ßäÌb_³²#Fþ·'»sº¹YpÞL cî|?ÉÎ'ÇÅ<Íž‡“~«~rendstream endobj 220 0 obj << /Filter /FlateDecode /Length 1742 >> stream xœXKãÆ¾ë˜ƒ/Κë—ýî6^ÛÃp`G€+àJ mQœ%©5‚Üó³SUÝ-’ /æ ö«º^ßWÕó.+ ž•øí¦ÌN›wN³Yü9´ÙëÝæÕOFe¾ðF˜l÷° 'xÆ…*\)2[š‚ ŸíÚÍöe¾ÕBe)Ù±«=ûúbŸç¿ì¾Ûp^x­l¶û~³{±ØùØ #íüìú4V¿çf••¬Ž'¿Ým~DÝ„r\¨J§ o3a•/œŽ¿&ëëìçìêr¡³oºpfÕ®×쑾°ÊdÒ›Â(ìù¾ó­4pŸì:à·í8k»|+láÌës\à‚u8›®ùláÆ{F†á--«“Lͪ!î?TÓô¥ëÛêÇΰ¾º»6]/Ùû\Âáp¨oª·ç:-iöÜŒé cõ‡§¨¨Ö¬>ŒõqZ› 9_krõ«ŸŸùfËUá•âÙ–¢§y¶;‚W>É·ea@Î,üé ¥a|J{Aq¸QYrÎ0°êiÝYR¿Ga¥æÊÖTcÓ]Ö4á¥*¬½IýIºÉ Êã6m.ò­r‚üúsNœS H+Ç!–%/1–iÞ°=ã R0êÊÙócCŽV#ÎB<ö(«„)+$íSs9M›Þï{áYGq¡yÅžª¾jë±î‡I~s™î®È [.ÊB¸;÷p)"ÁÇ!ŽÂ@˜¬Åw}\ð&$—”6ÇoîÑ0½Ï¿ˆ~^¢ÛRz¾‚Òsw:7¿§±aÛâ·"m®—Fné°Ïã6pî¿&ðkŸÄ~^)½GŒ´× õM"ñ›½á¿ÌŽó•ãCsj«t›\‘ ‚„»c?LvÍ\úîyÎYw§¶?¼/ìÙð®Â/žš}l Ç·“Ô´³9YðD[Ýöø¶ó ¤±èHVúÅpm÷lF‡utséæòÛë>ÿU¼"ó½S7íùw¾‚`—\¹#º’è=} #Î(âfˆ«pûд͹êÓ"gc—ŽºH{÷râýv~¿ê¶&l¦¤Úýv¯&\a¹¼Aø™°æ­#ÜJ\LL&ƒ¥!¹ƒ&ÿ¬68&ÿÊÓ"˜Û×ÓÞª´Ö…ºƒÙo×aLø‘l̉"¬žéÒMl« `À"ͧpÏåP rq–ðÆ¹™ûƒÖÃÝ yà–ÿ¬x¿¸É#ÿC,ŽS”ªþtmëKбTl^®ñ©ÕP:•LÒþ„Ñ-Üs»¹¢Œ±ËTߎ«¤ uY}}iÃ^eÜmsEqB£ÀŸOÕâ n$RƒßC×bisXô8ë.hsŒC<*X÷póº˜k& €øâeA͈´+šcsmn¾z Â='vÅ<¯Ù#|""Bni°n_f×k‚°ű#c‡Ò‘òì˜YzËSu†Z%ˆj'¨#«Twè°Lx”DÏËB?«‰¸¥OÎ)›ˆÍeë ýàUpyüvÓ •GEPÌvu-Ã*CpÀ¢ç‘!>ð4Òª¨E!ì±Ëäó–û$F¬‰)¡WI\DU¥'½—|ãC@.Í:(~ÏÑDè#>Ìú—±¯c\Ôãà NÓ3ÿˆËZ°‡ZºPæ ”y¯SŠø Ï,†6 £ÊŽG¡{ܠ嬈©î-í‚KÞ°×õБ‰„/©H‚Ü¡ŽÃÒf#[Ÿo×¥e7ã²—8騃 ÕÂÉ Y¥Àήç QéÉæ¥ÇÂ,ÿ¹R#itj$ÃÞûÌ'q`Ô¡‹”+,¡þ Aõ^ë¤åÈݘ2ÚF‘ÒLc0bWñÀQY¾øŸ%…qQ±öet|ÀØÂ=ø¥,¡\”Z `g5‘§Ñxø 7@öœëˉª­–Ô¯Ú ý†.1#Øë*”-ܹwIRu7 $ûëéÚφo“Ž€¹‡8tèšjL‹ôrÔ'N³ÇLê#¼‡Ù1Ù`ƒ-G½V‚wwÛœC ¸:–—ž7%–ŸÕ$†C;ñÿÚ iøµfÖ-üE#“åZ'+$ô%NÝûd*+r§€ã¥W+ ðu |¸2U˜/ðz|ÉY/3 ›Â9KDÎ<®Ð?1~Üü³9Uendstream endobj 221 0 obj << /BBox [ 1248.02 4085.97 3997.87 6835.82 ] /Filter /FlateDecode /FormType 1 /Group 79 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R25 24 0 R >> /Font << /R123 116 0 R /R124 117 0 R >> >> /Subtype /Form /Type /XObject /Length 530 >> stream xœ“MoA †ïó+|à°9ÄŒ=ö|\‹øP/¨°‡ŠCUÒˆBZ’¶Bü{<›ýê¶(ŠfÇzöõ»ïxöŽX2zñY±$à$³ök„þÀó°u{Gê¯_.wpҺןˆd,D¡½rtd€)b µ@»s͇ïw÷·ÛÃÅn¯àÍG~õðëþâÇfÕ^w2 cÈ’«LT¬Y}JÐ~sçÍøÒŠŠÙ-ÒlV_ÛSW®kg²÷¼2h`Ó ]Ûw‡Íþassù§6zÛº3ëÆ Û;'˜’x…ßæúÔþ×&ôÞQáb" ªEaçBbÁHcå§ûü 5U|ªt¥8¤„EfÔT™Q™#æ0§ÆÊDNŠ‰ç¾ÆÊŒZx»Ÿ¨—óÑ) ‡I9cŒ !c6MKõ¼!¿ZKŠ&­ Ï© ³²Ÿƒ}I£¾;©.Šº¯‹æÌKÍ®¯Xç‚ÂÇ„ŸRšò¸#ÊÄhÇÞ#Ãö’½¯¹H¿#Có™{ùϼ¦9TB+šYlk—˜¥‘HitµŽ9Ö+ÖåµriH§¦1ÝOQô‡Ê”íV)Pñuº¦˜ÍÏö¥¹Nþ8}€l‹T±  ™ˆ;qÓ/(Šæ,¦hæïÁ[ìœþá}¼5K-Ã2@jóAñ9cÑž:sq6Lendstream endobj 222 0 obj << /Filter /FlateDecode /Length 3641 >> stream xœÕZ[I†W ‰7$B~K{Y÷vÝ«²$@ ,Ê.lv$¤tìž™&¶{¶m'ÙÏwªª¯nÏ8$‹Dò0}©Suîç;§ýýæòW-/ ÿVŠò(×ÉÓ±@Üåñ±qȼ0·ÏI­FI UúP"'®¼WÈÌGî–@zv Id!™*çFÁ³Z(ïý6ÁùSµ€á2“¢S-9©Tä ”ÿÁMkrñ*ß…¸÷Ы.E, H1_ÿnUÕðù{£ÀWH »5IØT«Û¸ "¡ æ™î¢ÛRp7Gú’3eY«SÃ`³e[ؼov!ªÊŽ44áÖŸyïùëF¯@÷õÉõàSØ;•Bö.ÃÔ ¡{ü pYáì|@ò̳‘lôNÎúf€?1+3ü¬uÎ ‡Dð ¡\ðIqž$ûäˆ%mŸ(~û Gt•)u1GLö·g' ÙCܲ!G?s4´³¤zѺ*\›Ü îRõ¬¸ëYw5ePrx&íˆÙ‡ ªR%pШïç´Ø¢éÔ,!¸¤2q4ØØ›<~@ñôD{ý@w †¤Â4²(2íRpÂY&ˆÞúÕŽjOwU½.êæÖDì!}š^¸SHå>®Cí¶_Z8ìfÏr×B‡þöysé’¯Ÿ7«©ZWÈ‚[ Vî¤ïßøª!ÙÕ6D¼ÛËxúüU¹AèO•î¤xPžÀœù½~¿ß;д¥3µñgîòÝQš—!'†“¬bŽõ+´¢q=í„Æp»˜Êàšs~Î}–¥ì'´iÖzx˜‘„Œë;ªÊw œÛ¤Þæ›Í‘/¡=_Àǃ×Väœúêîn[PâoÖ,XzTpòa¬Œt/;î^÷ ÓQûÉËÛÕp‘¨ œ€Z&KjÙ=÷!Fê·bªÝàÞÕÅû¦ÚlªÐ~håI¥ö%™AxÛø…"Ú3Öø^ɧM6Õm÷®ºéêt(üH.Yß4K,­ã  @@û”»Û¦µ Ã«Wû¢Ž@]O„LáxÑ¡aâ•©á€yÄr"¶e/Çxxße ŒÒáèëÄ7Xf“©×öÁè‰Û¡€b¬ÍϬxñ‹k†p2Ö\éø%e¬W4Z ²57h›e'eP±™–x+ÊONëAù“=I6ÝÚ>3µ9À+wf´Åµ‹Í£6Žüë…ùòjö-iLÍo÷3òóùÛY6ÿó 5yÀÍQ¡mªå|;™B¦2í“Í컳ç×ÍęʊÁ–ÜQŠ34qFºð}ÉÈ`ðßFÌä—aÅ@.›ri­¸TiÌ0©µa„Ö„Ðç]qÄŠæê"Vd˜2”@p™öò1’Ï[A( (!¡`.S¸T®´Eûä½UŽ-¡j • @9qVåÖ-™@/'¦Ãdö ZQFY\güLæD–NQqD;á…T”\B+=ùÑ>B¿É?4a+?4Š-{ò_TÒ8²‘iTq"Øà( t­¸€.Эó‹@£qˆƒÆ}©6˜ÔH»±¨Ø¿uPM.}Boë[ÈðÐÄ¡—/Xo<@ƒÔ_~pµž.øg6ì§2Ò9ÇÌ„iÄ‘qñª}å½d@2˜0HŽz-L@và9©Q «m„†T²Ž‡UµÆ+hòl€¡%0ï³û˜y€V“ÑPÔ´F-3Ö²ïëoñ.Ök«`ª•Ç’‡9[oö@ß&¤‚¨”f½C`ò‹Q¿8NtÀV­!QwÜÞX±åxüƒTÖ6ÇCƒ€’_HŽ|…!~±çaXÑGD2 ³ìuE×ÌC¶]uhn$`Èjs\OúÈZŽ¥ŽV2ßž¶¸|×#)pOhˆPV`K&w…oÂZ> #6ÑÔÕ}Czè¶Î7û*FT1Mc‘ö›%±qS{we:ôÞ˜ñç¼¾=niHEª¢ö%è/|/ÛÂBr`½IÿhÖ-?\åŒ üT;íGÃÉS`ÍSÑé`+˜Š…êul» h@.hm‚CkR£»8™F÷Çxåi4ÐÞ€U§ï¡1½@: ó i66ß×þã‹§F‹Q¶‚Ëä¸X†æ¬Ù¸k[ ¾åhÐ:áØââÒ4“6é¡z<ÎÂ:Õœ¦mê’ôñˆ.'‡h’„:¦hqÈt˜ÒÅæí Ô†Ñ€äq}”y[«›ÓàxÞ€-ÅGfˆƒåñ à˜£ÿùð¼3ž·µï7n³“ã¶&z®üTA*îSá ™³:³9¸T¢Q2èæ91 JYq>ÑÇ…b·îíÖ,£r~hË`W)³ªþìeݲ.©ŽÒ‡·°›ê&:ƒŒ‹|ëO }~÷›‡O§wpcGŽÅ­dg<Ë·ó§¾.\n* £äj^z$¾ÂžvHœ'þ#!Éž¸øYFÐÐ£Ü ¾¡Òy7 Ï‡RŽRöÕ]±ïMlò:|{é¾­ ¾/ÆÁËi^§ï/ñÓ °Eñ›Uð't¢,)ýG¨ä°oŽqãi‘ìã`Àb˃?1~ÄÞ›~½[‡ÙÔô‡Û÷˜MÝoŽ·ÝØ'ÎØ<φ›aR€P“jÙôåKjÆ2nz€„¾{Š©/$“3 ^A{hôøç7Î7'Ùz< ££äø9âRjÈ>p§â±d|r !òs><å‰Òò¶îú)—˜a|:‹£óvP¥ƒY¦ª´3œ‡*E~ LkñX™žJëŠ“Ý óýÝÔ ìÄ'.˜Þe#Ÿx`|G?ñ2"# ÆIãì „Îá†êÑ!‚«­Ç—Mùä~Êg™8;åã@Oˆ°9õ|;ãš~M¡Ú'ï?Ò£ÚD["û©?h¢çº/3ÿg=é'zV›ñD•"%4ÆÑ…)h1|ú%Wxðþó> stream xœÝZK$I‘f¯% ‡•R‚CdÓþ w ö0#XfÁ–ÒÔ ¢ª¢«‚ÉGMFf× +îül>3wWEVg£$º‘ás{|föE|µ(r±(èü{»¹(÷_]þuÿÜn_]|ô[a>÷VÚÅÕ›‹0C,„Ô¹+ä¢,l.¤_\m.>Ï~·t2÷^ˆ¬^®TYæ…ÐÙCµ”eîpÙ[¾²åèù]Ý6÷Ûú.þR”Ùzw¿Z7_ò`å³zÝ<ì–+^ĘŒ.fJ“ñ‹Uöf·_ Ì–ÒeÌÌŽë*>užŸâÆ`¨Îª´·ÊÚf{¿®ûûu³­«}’Äeûú~_·m³ÛÆßd¶»iëýۥѹ×8RµZ~qõ« !rot¹¸úôâêÕçÙ3ò媚—ùåa¹Ò’®é¸á…sY½½ké´†;ÚEK…G*;<ìë:ÜBuÙcµ¯6õ¡Þ·¯y¿~+ÅÐ*¥ÎËÂÁ2Ww°Ã·—«"·XƇ±Ö- ‡R9kr« g+‘ko”S šbŠÂ 挬®sm½Ð‹Á”ìõòêOS9Ê\¾,Ï–CɉâE1\n-åHŒj{7#ˆ,ri Ÿù>-«`|­f–µya„褎†óÞf¿X:Ò¿ÓhFï°£µ…I3?£m´G”3Û•+l”'G‚SZ¸^E>Ó&?QÙÓž\ó¦p}[AIQW'kFL¾r~±‚7I+ãü?²upLu­ìÈäÂc“´×v5;³Ê]©­8ßîÞÃk8Õ¼ÿÖgÿóÁðÝ”Ÿ.W–y‰ÈyqCQä…D7|æiÏ7Œ?·á3‡é;j'uRÇOYuE!„Œn¡Êñ^Rè.xÝY²Gº@€P n¾äkï4|´¦kY^—s–Å1c$J vö„( ÀšFß.Ãqµ"Ðí6ßÞNw`æÈVÉq–Ý.÷…Ï.Ç»¯ Le!ÀJlSF¿¼œª féÖ×éö[ÓÁ<*µsêd|N­VjÝ~Ý ›íî—s¢èÜ(˜:’N¨0m&‘tx‹Ÿ_]ü†„2‹ûö‚ dñ„\û?PR‰;¹°HåËÅæB*‡ë~X_üßÉ„<–:&d¬‡Äа8° ù8“3À¨ 0’…³ï…£S98Œ—r¤à¡ÇÆ^€…j™$…R„+;_>ÃÐñÑ!—3mÇ :Ì#¾NDºžL$Œ…%J%«Æ`8À¿.¦ô7:+²4ûô£•öÄQGÞdày¤žÑ”÷Š…w&A(ò9p$üΩI ü ôܾS€{c‰þãY‚ÙðSz5È/¨Ùú…˜_¤—:Ûöé#fì‰Øð”0ÇÂF{ÚBÏ(ZŒò}±„öû:4Ÿ‡ªÙ’ž(ãôr¥ÝUt.i é?ÔÿëcÝÆ9±Ÿ s„iju¸ñ~¼Øl‘‡¦Ah‘+ã'MÃÕCC»8O}zÆ×žbÐŽJ_p¿úæ¸^oˬ «@ž®o›P«ÎÐx¼O«Î›:Z}{{à–:¬BÍnuwÔë>ñj¾¤ö§é¶Ú¦Tv„ØÉÃc‚ÎâÍîñÐlš?W¡ïztÒCèÑ?­±§BµTˆ’WT*(uÏ:®ñ F==TÝ5–Cº†¬°)4ŒÏ$ìn·­’BØq¡ƒ¸&[Á-áñì¹~k&.Ê®8..ʵßÒ-D™š(„*`ƒè¶q$NL‰^¾Gšqt~ÏÌ o"F ¿ÀßÄ(ô«ý’ŒiÈ)7l,!ñ;3³4T„¦ûìTEyçÉó$§Ì¨Å@¿g§Sæ„À@h?cžÀ@Žs¶ïÄü5"Îë³ ƒr“âó3r…À! vÐlMà Dž†×`é@âu«–¸§Â2Haéš)d˜Á,Å ãߦq–l-9¢J“}èä¤ðzln+ÆÞÀÂóviq›mw‡´€Îhܬç T7ît0¦`(ŸôåeÕïÕl=¥E½ï&ÚúøHÏÇløšq‘q”®Iyí¡!¸·’÷0¾_ÿ5îƀꇇÝñþ!.IÜâ¶Në뉱ı=F]Ñ®Ö]Ñ6¾Œºâ ]í»…J¢CIJìx¼í.ø˜„ƒÕMÏGY”í"7›ºæ›!Bì/)”ÊþsÛAß±i«ímÖqýÑ¿s½ÌgáoÅ—‚Bmµgãß%CÍèŒ\D —5ÐdŸˆeÕÚh¹TaZ)öÒjÝîâ‚8IOÕ–6Qµ<Õfý$ɬj ƾP#¥‡‡¡„oY> 7¯©Jš/ޤC]Ño±8’N‹P¥‚(B\3Ÿ_­Âú&º¶U/VI„ú,>T)å\™$‰û‰ÕXw›!,‹ôw˜_Q;à‡e r.îêl›áOAÛÖë7é^qà†Dö§c{ˆw°p{Üô7œÛøZP>åò¶„AWÉçn^ô¹0[Å*1;ì››cd|Ãþ¼IŠÉf{×¼ 8)´•Ys‡Ð§Ú&T(ÛÝ~îÿ]ÓŽû›v4ˆ~DÀmZø¥*$kãæxˆ3 FÕ¦ÙfZoÅ2u—®MÀ^3cÉ9(ox˜ˆ…iœ$Z  >íf²—Ru´÷°¨ …Sýõ@”pÂTMCÛÁÐÃÃ>`*oP¸ V‚´®¾œ´¥Ï⫞¬³ú«Á>Ç*ºŽ odšmi×™ð ÌVŠ)ÊT»ÏüE³e¬¦"ÊJ„gÍNÒ£`ß»Ým€"¶nNl6„†9ݽŠ·ý[¡~|CHœviT²œ`|Kï»ÒýÀŽÜ¶%p–Ãø-mîzá»pJÍ¡ rµ L‰ƒ»ccëãc´\”àu•J¥HÉÖwD*+ Pì­··ßDý£ˆíì"RCJt9²ÒÈôpb6èë°ð‚!òžöÍaöÅ w‰TjJÒÈñâd¬Ý áõÝgÜ·=­ÝžZwï"ÍK%»ñ“æSN7¬üiN4·èžO‘æRPù⚘Wï‰!Çø‡H¿¼?+ §"àQ®Eþm8râˆÎåÈùEàù9o³¹ST…B™øk%ÙǹÞÜýòþöA'éhIGgr/säJŽ<½5:ƒ#Gõþ¹pîŸÅ‘õ¯æÈUäȽ>qÔ)GÎo GS>0%-Ä»>T8E’w3þ1$ùéON’äÝ”¿ƒ$/†E÷{‘äÝÎg“äÝŒE’£½DxàBC-¹5gÁŃH1Ä"ØŠI1MÅD¶©«íõrö+-‡|¥Åù_i»''}9™ÐI ‡Otö È<ðíþsìøx¨[l]’Ž|¹bdϱŸDMˆØ)ª‚?”›²'½ŠØùšûáÛ:¼8Aó¶çÞ`,äÊØÐ×u/}ï¥/ÿßùyŽqº£ /‰*3HÛ:ó¾' Ÿú‚V¡§çf ]I[m¹»Ò)¡°½«öwq 'BbÔÔÇÃÒ÷fñMŒÚ˜ˆiyŽê´Äð-Ó {£˜ãm15‚Nœb¸#Œ¬$eÍB±VO¹àçA½)w³cbćvrðÖoƒ#l^'ê×d7ƒžm½ë(Á'š-éMï¶b÷,uöX“,DˆÑÆû“6ÇÃðáCº!QñU”£ö–¿0õ¡qø¸=ñÍœ¢\]¬ÿÕÓp3â©ó.½rlâ£%-}9Ñr5h]«ãPÚfÝTû®É¥wHü‘) sßT7ëîÕ‘ÕãðeÒ!€ç/WF¥õîžÈ©xo³Ÿ­èZs/œ¨²kÒÝ¡"gáaè™ÿðª¬[öÇÐcá‰MÎn0¾HÃU·j´Ã¡ú\|1÷²m²‚xiùEǃ ¦µÍý¦ziššöY¯‘á”í~÷t}òk±f2ë‡?ì‡vþ~pÙ¶~Š7p¬Í±yC¯M¨VóVç—6&¼ëÆÐé_÷f¡{9'Âpá¡äü¸ß†—~õɯ—"“?"\í ѧׇ<¶u¦%án,H³í¨êwn›M³¦×BIEoªö¡éí˜oL‹<îëîSí·ÍîØ¢Jé$í¿¡HºúìÜù:k¿Ú€n¯›ëepÚ°âª%lz’vSucq¸ÕPÙ( ˜»DBxÕ7×ûÄ¢BˆC¬¿9^/ÿ ?boüÃÄ{Ò˜¿t?âq3ïˆ~OZ¨èëO›/¯cܾN#T¨¨.o¯ƒå/UÁÏã›ü²èFÇ0¹+h&”áÀÎu¶‰Cjå8§Jú¬,Î\Íüë‰ðÿæ°à«¯›ÍqÓ;θs þ]¯š: äGsu‹µ9õÙD͹ø¥\&z~â7‡ ¡endstream endobj 224 0 obj << /Filter /FlateDecode /Length 211 >> stream xœ]1Â0 ÷¼"?h …ªRå– | M”¡i”–ßc»ÀÀp–.‰ÛÅ¡;v1,º¸äÉÝpÑ>Ä!ã<=³CÝã#DUnôÜò1‰n´I‡“M÷WBMЯ~¶#ײ©ä¨\“Ü4àœ¬ÃlãUk ´ÞƒÂ8ü]5kBï?/÷%ÆP$Ý‚@ºeÝ@ºcíA íYRdõ Ò×mM=0ÆP$­@ ­X©j-•)rŸßޏeþ;«vÏœ1.²!Ù"þ–˜¦ÄYšPoº™mœendstream endobj 225 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1376 >> stream xœ•SkLSW¿¥…]…Á4éu÷ÖHÔ,q *Œ8Ææ|à(AÔUhhy´P ¥o¤@áþ[¨m¡åU PZ¥ŠÃÂdÜ]æ’%n‰›.Ë’©Ë8%wÉvü¶Û—“s>œÿïùgaœŒÅb­I=zðdÖ¡ÝñËíÁͬà–à[l ³—,…6Dp¬[ÂÈõ¨iªŠBÅo`lK[oO•–)eâB‘œ—ŸŸÏ»y»÷ÆòÞ‰O䥔 eâ|„wT  KræQÂãKóÅB¹òå—d‘\^–´k—B¡ˆ”VÄIe…û—§Äòb¹ˆ—)¬ʪ„¼t©DÎ;&(ò^q{uI•––UÊ…2ÞQiP&Á0lDZ&¯ä ‹1,;Ž}ŒÀ²±ƒX–ŽÁÖ2š1æÅþf]‰ ù†-d?इ®6F.†@pc€¤ž³ÑhŠkõÍþòðIVgœ¦£”gÈZYãyÐàüëE3L£Í½í:ÐT7€ÎHÊËb3o¨…îÞVÓO¶_¼ŠÖÂþèøPrÕ!H?D(æ2Å2±NQ.ÐñÁˆkÐÑo2 X Ûh÷·~ÀVƒ¼ºÉ`T“iô%£š !º¤_ßk4ßp‘ÁÇ+ ¹‹|Š.r_¤-Ðátîú]z]ÌýP8Êy†’Ðz‚ž¦í\Ñ©®ûãĸIúîÎŽ^|ìsUZA9Í‘e“’#ü¢“€3séì)D®È×.²ŸÒ¸F E•®QDßZo'½0Òìƒ14úáZÄ6køºT¦óZ5˜täÚ?" Ý’gÀ7¡ð½Ÿ¾w,¯¸º‚¬;ÓYðŸu¿PsÍVh6Ù&ç&:†wûµ™'DtT”¿¥)Hûj.3œ¹Œ¡SèƒzýÝ nå~ÖÕ¾nhÉ´°ÖÉfç·ÕÓvl¸[å’ê„õyz¢Þš¦dd)» ƒ|ö>=bTCÔG™Ï£×î!²k9ØJŠRÁÎ\e9àµzÓËlké$#—°•PÂsvðq0ŠÛÒf°àÃŽ*]A}ŽžH§ýF%e”J£µ:™š11§iªoÞÿSÿ]Ò>`q@>{rôĶ\zsÕ². 8[›gð×9À-vPVéKêµdq‚4øxü‚è‘wÂòÙ4á]Uà ø=ž ç,´àË<+(JVGДĀëëlîUž¯Êòç";8ÄWh¶¼tB¥2ètDñh…‘F'ï “èMtøç~95âê'ÿPL'·©)Ã'™M àrqûÜÌŠjó’þßnux˜Îx+Ûò™Ä|†²"NßJ³P$;x6Hrs”ÔihÄ5¶’‰‰¡±É鳓ûèÌ­ôúÍÄ©´ÃsÇÜ3CÌôŽƒ·)úÏ7¡®ŽHÙ·4€g• Ï{Qäà rüÁŽ!hƒAÊ¥XEË  =LR!Cæ6Dî<ÊDÌó)t ³7è8ûêC´nøv³ë:ù¯0ZÀù’ÑÛÖq»Ÿ´¸í×Z‡|#^ÿ•N¦ºî²âz/#W ÛÞ@³,t±E±Ñ“¥\“Å|,àRRZŠŽªßXC)$¸²ìŽnŠrtÌ_ß­î»Ì¥ìtzÛ󅾟{j¸£†lh€†Æš¦ZªjðŒ›‚…gch§m5–ò:Bs\qªé©Î> stream xœÕXK“ܶ¾oùœK|˜›0ª%C¼U씣HUNéKsó¦Jô»C‹CŽHŽ.W~{º‚àŽ)?r‹¶JCîF?¿Æ»M‘ÓMÓïþxUlî®Þ]Qÿu3ý웿ï®þòŠêÍ­bj³»½ ;è†2‘›‚mt¡rÊìfw¼úž¼pÆ®Í^•§ÃеÛL‘5š·þJCÊõ±ʱîÚk\—DùŸ¼(8©G×û¥aûïÝ?¯(Í­z³{vµ{ø=yåÆsßFjEö]å¼bM¢¬"Ãy¿wÃP¿w%•äöÜî‘m¤¢ä}Ùœ·ØX%ˆÒöõx¨Bz×€FYØ:v hÙî·”¸D×À±Æ$ñ7åÍÖŸÕÖÝeÏê·®©]WIÆ ‹Â¦ó*ÏË·-düÈm念žÊ¾<:°Ó/LDž c},G7<Ú>¸â*W·›ÌSHºÙUàŸH’Nóz¬òÀÙõ}×Çp2&· ±*# ùWC¾{Æé|Ùº¸ÜX‰%µ¹~xœJÍ‘PkShá¿ ±9ÆŠ–3b¤ 6 MÚ0—Q•˜>|øp61Ê£IQ4F/Se-Ñœ&)²R†FM,hâ¤k.”Ë •,…Ú襡¾;–I¦Í¹µ³L#ôR¦¢ v:e¹@Æ Q\üîƒÎ—gY¾F5^×wm}›Guψ„hš"ñ}œ$¾hŠ9è=²1ƒ½Áhz{“–¼™e‡7ù½Ï4½¼I|¤G:«—­üóJijV=î†ò$å‰u¨‰ÎÙÜ‘Ë9jÁHÙ‡2ü:^Cê¶®ìñ=änïîz¬ X/p'¦·K¹„2º¬‰FäT+¨‹>ì¿Øf¤äC Uf£ ιAZ™+®8‡LÉ!Ƶ [ ÜÉ¡¨ {¸Zð×9“²0›{[¾ÚfÌ@Lðš]ÓŠyŽ&jõEhp†Q+"X®Ôç@Läv÷ã¥eMÎ c#EÙVæ¾! üµ`¿ÛVÈ CÐß6„QŸ5D±ê™Kjåÿb‡‚šKPˆ&ÁudI'„†0äE7Æp³PBåctÙB3f!^üÑSµ$Çò#´‡q–\SÅuçñt†ê, †U†”ÍÐáVMFN}·…‚òYHn)S¤®°ÃyúÂ’aß•}5í12TþázÕvÊä ZÒ$û'ÏÖBßgv%ù  _çˆËÞo%XÜL°ƒ Q¢õѳ"Mò\ÓÂÆý'”FÁ®š¯ £,™…Ið‹ A  áˆ^9¸vïP¸/·äÈ…¾Å´_û6Rä58ïE汋÷âT,xªÐZ R çf&Zès9Ôw‡æÓD)}¬¡†ìË&}ÖØ<ÚúË}Ù‚JXìóPä…?ʤbÔëe; w…ú5kïPÅà°á¿¾¨5½8EGÕûpZ«~G0†®5©\Å@KM“¶ÞÕï·@f9<»$ah„ž aCšÒƒA ­§O"&SÆòªÄ2_’5à#›ÒþëÔG†óñXöŸnHs¼!çÓè;D )ߺԃþ“6ì»vZ`UŽåW_²›í„g.–<·!ªZô^l/(î¶ëg8ÓÄLc9’ Á@U& Hüu R{]°ðZyð¤žïû¯ÜPWgÈ|T ê¼¥êz=GË©0e}— äsWÕeX³„ÃS Òâyùqʉ5~c¹ñjâ“é_4³ æ@m6Ü¿b$z†þLJ–PHIÌ'ôǓ=îÜím½¯];úÃA~B¤Ç>ù-W<Žýyü9bØû¢b-¸!ß¶·÷î„`wâe=–QXÁéÈ47zeÞlèlŒ'¬ø‡°Æ Ô1 LGTë1¬Gµré øÍjŠ…<°ø¯ÀÛ0„Lðòb û?Fš!ÐX(C%#L½ÑO L€ø*±ð]òQJf*ÒáÒt©ˆaá‹b…õ4M˜†t·‰纪;®M»Ï¡Ö§f⯲áÝy©œÃ)Ò‡X‘3.EhyýþMõãy]u±{L Q…Í!a?Stm}šÍ­k>;#ð3'?“éA%!J‹0…}é«êÛV`Ëë©A†ÞÃøŸëÔï›.N ^Ûfî'¯Ýt)Ö8âUÕ VYl×»¸ÀÁ7 „é£;ÂHŠÝ&|1³>`ðÞgœ`,]h%}®n²õÓýp¾¯ÖâÊ3|cºcù ËIâˆÇæòñ|ÿ@^úG(支ÜEP™k,(þ.BK¿]ÞE\܇(SLwó–:„,…Tèï]+)xj…¦¡ì¢AyhBшã‹HËÉ ëƒº4.ãÁÿÎà¸kÅÚý|]õ™›˜J£øŸ‘! ÐÆ®u…ÁFç Ä!BÎÉ\ ³½´àjl‹ƒhÊPò“ÝÕwð÷_?Á€endstream endobj 227 0 obj << /Filter /FlateDecode /Length 2852 >> stream xœYÍë¶?ô¶Çé%9øVºxVÄo2@Q$H ¤H $] ‡¤=[ëU"K}dóú×w>HYrüÒ¢ØÃJ"9œÎüæ7ôO»²»ÿÒÿãå¡Ü~zôu—þ/»Ï>þFú],¢Sn÷øôÀ+äN*S„Rí|é ©âîñò .Õ/_5?îÀEvµHÛ‡`áãéá;1ÔÓˆwI˜ â¥í§©éÎY€Ú @ËÉZ³Á¯¬·k²6 R¬e àÛd›Ôd[ÚÜøÔïKô y?qj>¬‡¹Šdåîñ«‡Ç?}'ênOƒpNZ£d„Œ^aäáœÖ?‘¨¿Qrj!ÁiCíèAW–:òÜMX‚_JmÏo8p7Ò`F4Òdi¢4#ãq±Ð.x›'/þڜ罢霅ú~_Î_<>|éf£,¼ÞéˆfÊò&Á.ÿ‡z÷í®£mÊÝç=¯¹›¢±ÜÐÙ{)ª½-¤õ˜¢ß]uòY5ê HIõUˆgq (RØ{È-<}FG'ÆyxªŽuþ ª1É-¥¨òJ<ÍÒK Ù½‡§Òx-8¯qR„½–ÓŒfmJŒ… fsšª”&æÆl<øhÓÉ‹ª;ñynå…BºÒnÎÓA û;òla£’1ÏÅÄÕüôøœ¨Dÿ25òX '<£@pµQ\Þà$H€qý^£2E·úHùP 3Õèb/)‘À¥-JK8KÐóld•(!X`%ä.S°Ä´8õÓLXO(ÙŒiDM÷üìB¡e΄äg Çâ²cÔÆÑ²ôf™ügÜÈ:¡È"BV¿<Þ;Vx4Þg Ÿà\ ñQª;Û©t¼ÚC^õív›„p… QþvdøB—Á½72n`¼sXpÀF£ò¿9@‡ÿÏWÏ û[VÇBIëÔ*~ +äíã3­ jœH!„Xp쇡±ˆÙ] ™¥LIT¤âK‚„ÄÞk>ÅàJH}"œÊyàÅé»ÒƒôSéH~Ye JÏÝ¿Eö;Ò_P 8Ϫ²&Ž›‰,sÍšñ¸bÖrH‹Džúyßy¹¿ñd‘.£øö¹Á¶D¦t~ó¥êÆ{ñÂ=Ct§mâjرôØD𖌱ø €Í‰=EUÆI³ìëüÒ\.õ©“ÃcIQŒu^ Üâõ¹^¯Fð'MÀ¸fZ^,ö†ëf†[< ¡5#O2R¥,}™á€ØÊdZ‡ÅÅ õŠbOñÎÙ¨ űŽéPë¼ ÀNÕ9½Ô‰¦£y05¥Ià‚k:T@t![‚ÆDwX9½£z)>¥˜Å}€J]ú!ï ™Ü\’r0­&8’Ü(Nè¼kKùA²ÛYÎný«ó][®KM`ôž^°4«ÊÆÙ>&f]r×Ѭb²cyƬ=I‡ð„Dèò^(m¨“¼Ss©»;/"–v{$ïñÚ5cë’jܵg[È×NѾcŸŸ­x®Û—§!Ûaœè‡4 G@lz»:øc5užn°,}¤” ÅÑà¡Sî¡es“1œüO€„kæî¡71„9yí±Y¨ú°xŠq6nœ†ù8Q(2“)âX ¨Ü1+ƒgÒð%·÷ÄËÈMC÷ú<-·Ü³ž$ºëÓÇgcrÔr,CÓ2ÿ•SÐÓMF^0r|s¯»Ö@¥C¼í®U0÷ø0<ï­¹¥Ý J8—ïLœþ_m‡y B øaK*ç©ü¸ÀU×` ²tÍCCþW°}ÿfG ’æIÄ–`á š@ýéâÑSšN!ÐÕy­ÎH‘ÞÐÇÔ{n˜H=duÓ¤/DÉ+â‹@2]lN×éM"ØÙ9‡¥H{­ë~[ KÝi „`È÷œ‚wp&]Áq—ï‰òMŒ§ŠL4‰­à j0·†w©K6xvuÂ&šî¶Ä—Y(¢Çj\n$<ú4b³`õ«È$\™òuPäÊ£‘êq¬þBÊ7UKå 'RíYu¾ÿ£A¤}’‚AÓ·œB†+;]ãVæ˜0|Ð*ÕXp¶'¡dƒ—‰ÌðOO}Ûö Ï~Íw?Û¼95ÃRü° €´ç3þ$D9¢o.Á9ˆ·: Ó ôþÞ3TûŒ¨Ø­ð"¼ûêŽí|b¹\¹ 5­E«›ŸÔðòÐç{ë÷ÿ†f oôBz^*ÌñãºVˆZ]Øwà"¤;!5ʤ¾~ø ±·rendstream endobj 228 0 obj << /BBox [ 1591.73 3927.91 4341.58 6677.76 ] /Filter /FlateDecode /FormType 1 /Group 79 0 R /Matrix [ 1 0 0 1 0 0 ] /Resources << /ExtGState << /R148 132 0 R /R149 133 0 R /R25 24 0 R >> /Font << /R124 117 0 R >> >> /Subtype /Form /Type /XObject /Length 209399 >> stream xœ´½ÍŽlM7¯¢†Ý•öÿÏÔ€a@3ëkÀAÃ’Z’O¹á ß¾ƒk12“\Ü¥‚¡z?Ÿ®Ã“;6ƒ\Éø×ÿvÙ¿þñ¿ýõÿ5ï÷ü}®_뽜ß÷üµœÛý}í¯ÿþóüúß¾þ¯¿¶ïóܦýëÿýkþú7íÿý׿¦¯ÿå¯åÞ–ïóþÚ·cùž¯Ÿ¿Ö{ß¾·íkŸ¦Ëþû篿ɟýüµ-ëù}~ëÞþsâ—Òý4ÛËõ=-_Ç~îß÷‚_Êö#Ÿ¿5Íë÷u}m×½~o×Óo‰­ù¸÷ïcþ:–ûúæ?˜þèGl×ÿÞ:·Å¼Ö¯mZîï“ÿ^þ³z­¦¯´‡2}Ùÿçÿù?~¾þ§øë_ÿÛyÙ¾Îïc½®ùëþÓ_ÿjúÞ—mn±ý¯kŸõ«ÿ÷ýZ¶ö7¶ãkÛæöO\_ÿðó׿û»Ÿ¿Ÿ§¿ûþþßÿÿùëþ‡¿þ×öÚ·¼Î¯Ížù¶ÚG›¶É¾ÒzßmÁü1NÓô}ìí ÌÛ÷\þÒ:ÍíW6ûŽÇ÷Îûßý6íoòÛÜmNWûë¶®í³þ«ÏÿåÿÇõZ¿Ï©}Ÿ½ýw9íûüÝû/ÿøó¿ÿý?üW~™u¹|†yžíKÙ:_Ç÷Ô¾Þ}Lßóí{òXmÛoÍê÷¼—¿ÕžÏü½¶çz¯S_„ÿÞ·Ù÷Ý¿M³sµ%h+tœ×ºà!½þ—ÿçöO^íyíëÞÖ}åÓùóOÿøõ¯¾þü—ÿóïçûû¾÷õïþãŸÿòŸÿéŸþÃûÍw{í-ÙçiÇÆÝwûÿ=æeéïÓ|œÛ÷´}ÓÕÉ]þVû“ËÓ1ù_úwöÅîïé>–¯ßþ÷Ð`¯³ýÂ×~^Ë÷6ûÖŽöôäß*ÜÀÜöÿ¿Ä«È_Uw¡/ýj__ëØ?ñ/ó>jI|:šÿô?Îô¿Ì‡éÿü¿èc²Ýüå÷Ù¶N¢æ-Úqlí½¶Ý~»½Æíi{Ü=~{ óòµ_íµ[ðJ\¶$öoLÍr?Öõ»íÿcÞ—ï{Ão-øû¹¶OÛûûh›ckþuÅ/ëö}ÝövmÍYq37OmfuƒiÏkÞöïÝ\J{;gþÖÒüÚ÷r5[3\Rû-órÍiïs{ýÅX¶«Ùo¿töZ·_Ú›×ÝÛ/™‹?ù".§}ÒöìÛ§Ûí [Îó´Ï¹¯Çü=]îÇ›ëÛÚgß›]ñNOíÙÜæÅ\'~kÙš ßÌÖbªýÖÒœÎd¶Úÿ¥¿Ó[ûßíÛ환Íö[kó)ö[‹í?÷ÇѾ}®­-éÿ°·]Úžö|·ººGngÇrØ2,þ±Nû8‹-CûG%ÌÍ›5r¬Í"én¿Ôά}j[jñox·ÇºÚÒ̧¯hÜ05µº¶g¿·gàwâöà¿×²öÉ÷«ýw¥¥¸=»4öÑç¶1™ÁKØ\÷¸Ê¶Ìj;Ì®©ý‹§¯{\åö[Ëyã:šÿ<—äUn¿ÕNwÛQÛ¾¶Æø­¸Êí—š3û^f‹ZÐÇ·"­rû¥«­ÑÑþ½eoŸäè'¬²íÛYÍÔt\}û­Kû†÷ÝŽÞÓ‘‡ý¾nçjŸyÛÚë´û}=öÛþ¥ÍΨûzØð-z8aÿj«…вÚñíxn»Ý_ü9ý³5댃´-Ϥ¶S6[í}k±Ðqõÿ›m”­­ì^z¦ÿܼŽw÷ÝvÛ+Þë›±cß:ö¢ß,Y¬=ÙWûÃÓ–ðh§O pÜö@SÅÄ/lÆöÛ¬W?SÚ[מXû•ÝœO±&þ)>îlúkÔ,í|WªÕOàq1ãƧÜìå9¶æ>¦îö76þβ”‹ÂÏ9ÎV±œá+cCöU÷­ mËúæU©×àj¾{³o¦'„Ùíc·…çbʶ;Ç`”¡âÛ‡¯f–[ðÑ·•¾Y+7;’ 9c @÷¹[ Aß?“F™’5ÈßÊÍeoÇn‹‰Pª-ïmên{ëÚÊW‹Ÿ3¾;ö9÷ÙÂÄÝ>èÄ¥ž*¿_O\Øò?tê-šm«p3º¯Lz-üˆeªx¡ÂëÓÍY\¶ûK*È.&¼b_™°é[jÂ(ïjøÅOÌCX}BòýÛ²T|ù–IÙ1k¦¶ƒ¶™×¬––/Ý/Eÿ]ÐÃû”ßúnË–º´?™- µmyíöÛŸ ?-<ƒÙhª:˜‚»:¸(×e‰àŸò Ë^æÁ§„×þ‡²ëŽo²Ð«sT×àñÉ(S…O‰ß aK fÛ“ÀúÚñ´›í«v2ü˜Ñ ÔiŸj·åt A¶:™‡åŒo=âñɺmד‘£xÚì^éÍSÅ–ŠÄñÇbËÙrãq²:ÚìeÜxsÛüé¯lxùí½àÒÛŸl”é ç(‚Že˜©Â³D?â!Ðe«Òþ‘™a™xÚìkê%8ÛK¿á1LÝv ð&|ðâ~á#ƒõf©pÑCÙ;Õ^ÔÕÖ²¿ëܲÏÿØÔCàìÅèÿÆÙªŠàî\AÄÛU¿Êd—èÆaûµ æ‘Âzmß7Ó¾å¶o¦Û«ÅÕÔ'J ýßÏ+Èi‹r~O{÷Qé˜È>’ <ÎV±ÀqñL1Ÿ(~Ù*@*lO 9œäQú[z­èKq lFš©¶|7?U:¾Ôß>x×è=Kw[”ŠCGœ$?ç0S•&|)‚ÖÍ—söw¶8s²¿}ð®ÑšqKíýƒß.ðÈA‘½¤‹ÃlUakð¦ˆ[7sPûj«èªŠœ:Ùã>¬Bt‚=,´~SŽ)ŽŠì'=vdI\Kt$0¼ã…9 Ãÿ©ñ5çËŽ ­¢‹»Òé{á© çD:Kú 3ÎVáYâ)×ü‡í§ó&[ç¶ñ- 2|–=à@Sñø0'eØÚÖÝ`\§z@9bh+ºa³u|ÛSuG"g¡¸êá¶Òç·å²º¼~;³¢unçX‹”àšñ ÕËûá\8ŸöyfÇKÏeÒ‘œÝ|õ–`¯Ù/øpö–´ eõGqïÜ}r$Çcûýš,_üùVc>\:#ÂéõZ軪c4»}?؆Ù*NŒx>˜¯l|A©ÆõÍ /£z„<,BtéÈ#vÛ‡yݵ'7áèS¯Ï5f*oƒt4@Ù ¶ýS¢ùø¨W :tdp©môåܧï É(Sr\„ÃÁS¼;rq|Êùñ”Ž wµx Ÿúz<áÐË.ßÓ‘Q¦ô¬HîÜ1ª|x>§ö@sM“?8¸pœûË ¢=‡ïäî\øtèkë{R–0#…"çi8ò\>Ä–‚ôö§ ôTôt%9MãÙ‰Ô÷ê¶Ò´z¼>¦ñÀCÎx½‚#õª‹ä#ѳÏA–ÄG‡“‰çiŽw·Â¸WH .òáúðíãiçIݶÓ,ž.æ ŸˆžxŽ2U¥ñ´3gÕ¢¯ÛÞIV²µ½µÌ7°øz.¬ БŽoœ-yPáèÄ 4‹íš“q¥-Aùp}øþñàDòs˜Éfúìr¤ºï|¸Ö{ vH˜}AúRDEr ¾’ïA¶ªä;vXCÇk—WqQ>=ûg+­<ª¼©ªÛ ºòYÜ'Cûæ°§µ<\KÏý<ÒI6¶ß+w4`ËGÁÃrÆ3ú‡ è¼ÑvW rÄ&çøC|6ͨ’µÀ¯Rû?ª +b}O 3Õ>àiöÓ× ÿàpÆ.šŒ$Ä’Áƒ Q¦Ò&Èq¢¼ÑíA-|ÂUÀ•crÒm¶çÉ_ØÙ÷n#åsü¥•1%@<î¡'ìö¶¶˜ˆ,4Ú’€»©™˜­Àu½=á9Û¡9ÁK÷’:®Rö¸˜áÀ‡žp[Y&’ý%”ÆD¸21ÆêŸ‘d‰Íª©÷Õúרakh•£‹ú˧ã)ÿéîrõÒßI@àªÄ KÅIÏz«Ë_.BÁÃÅ-l¦8ÞI©Äôûãl©zò8`V•5ƒt»æØâabà`‡Õ±ÿh¦½õ¢:¾rpñJÄÃrJ?17gcET˜—f†ÙR¿Ÿ{¬_§v’,~bç¨PâO¤G™*V|¶Ãæ>­½¯¨÷n{J8´ç³Œ5já'Ä~Z–.TKøšB'á'„¶"óf® ¦™ ñk5¶X<ü!Ö°!ǶE'sÌ)ÂKød«-Âéõë` q`‹þ©BN‰Z 3%ñe<û , ï³=ª‰¬ˆ8å|ô(% =Ö´· ù*˦Ä-u8y´¯žžT‹ §õãëUaaˆý¡3U<óð Tt³þßKÚ%*Ì1Ws˜)‰bè„8mù¶g0Àü)ãË]=@1LA ¶ø§>ú#ÏQ¡„2®&2¥rRˆyP,<£^c7o3È᥆Eô mÉnì(=׉$¯ýÜ]hxCÐÇx*Ä<¨i›xÚèÚúSF…).rej%óòûrǧˆÇN¨cc|Ó"t­Ëu£·sÝo¾MaË´¥ZŸ ÍMÖèÐRú^³£‘pаÎè>¹žtØ»2Ï^¦§¶„X{ †<.)Ý+ýî¢ZŠ%*r}n”©à ·ÝœÖÀï}$ K4Ø”>™´êfÜ;Åì=gú¹´Y²5¬¬e½‡Ù’G£—ñ¸®Ëg9CŽ$<¢ Q&"ÚÖvÆÞŒ;5iH‘(Ï´³nûåˆÛÿD·²=†ì֧и©òA§ Á¶ûÞ r`¯-p˜)ˆSÌjQ¬O=lOdЇ(ÉÖeÛ½]PÂZlÇq¦Tîá/äîÑîbEmž¤\FBä‡H)Ƭ)µ²cwn^3\ÄN9®}Þ)Ø„v½±ÿâ5OLœŽ2eIÏ£z"^ÜÍÅ·?YüÔôFÂS¶ãl+> d]+ªÐÌ™¬ÌºÚC±irDB¨ÐP·VØCë½—&sµX®·Gj"–B]?FÔD·å÷ùðÆåÕ]ª‰XJÖzTCbÌ8nð±f~?I¼$l®óöÇZ\wLt‚û+’dIb]WçG™Êa+Š º1vÅé4E²”CÛѶTB10¾²?+›Rɼ@r#‰“ù1c …’ÆnÓ9œ¤HŽr ü ÅÀ27çÜXX°zK_Êh$¶uÅ|”©:>б«/Ã2óÈ`˜¨IÄ·\Òa¦T4a0Dó›£)ÏÓOŸ"AÊ¡²‹æ! †jŽÓ½Ù:¼éB3¤*?<÷»ºÒ½î|X½D’šߺj>ÌÖCdcWóîŒ]·ÓãÉl$¼åš3U)çQ0”s±å $G’@Ù£÷C9ßÍ#À–Ò)ÊOÒùg, µÛ­MSeø.‰Ä»®œ2õ(Ç`Ôû_pÇ§×ØH^“^O5†Ù*BåÍZäqbÂôa@db|{£âxñQx*3•7hŠ3\ð½l}Œv‘ÊåX„«ƒl ˆ,ª°?i'ÑN~—S9 ĹeÛ¹|-/ã¾ÚI$^­ÁÞ¾)cÞÕg“7)µ-»Ìär²óA_‚q¦6à½-Œæ'“\Ob|§&c ©A Þm³¶ n¾q7·¦$z9¾gf0̔W}n ï¶g¨¯Ûæñ|Ç#©BcBàs¿Õ™&@Є4§ ®ú‡Øªÿ„4Õl­ G5Íñý/éfŒºA;66’쳓\I%0gD?ÌT޹QÈÒ–ÎüáB“D Ë›’x>Fï 7JæÚyÖßI %Â÷S³Gï‡×¿í©š„ÝÒdòhŠYö‹_·HjsÊÀ 3¦žnÙ–ÝO×SŠœ6g ùAÌ+®<Œ›q޵("¼œ1<¤Ç1ˆüÙ¾ýù®r–›ã|ÇHƒ,¥ß¶_o}GñÈÀßšëk¬pÊp5ÒwŠ4ÊTý’¦PÞØvsêˆîx:šå¦hŸ[”¥"Kˆ;ÆL»ïªÛ "Ìö~2 Ÿê¼¡Ì¼c|çš?,ïÞ ©¸„€|T1½ïº™ì›&ŠT\S§1!½@K‚=4ÚZ½(gÞ’‚ôo¼}ñ§gÚægޝwÅɲ[Î{òŒ$Ö„9%Õ=Õgë Ä…äøìzñ}¢YΚS~ã n¥úMŠù ”‘ƒjš]ëåˆ+çÍ’óðé3¥´,¤F>+{y_z²¤irNŸœÑ„tˆëÂ%NK[aOp4MÎ)Ï/>æ 5++{·ÃÝ´æ¶’§8çf+ç^B9óDgЩÉmÎHÆZRÈò|]HÏû²ÝžòK–,© ÏJþ{pµÏÊבaË0%Ö$9&Ò/\2 Ìþ:ì!ìø–D ’$K–ôKJ3p®“•—]‡*2[Én^Ìl­2JH¹ ãdçÛºúl Íl%¿ñ…eJ¡YH„l#Í3¦×ÛMƒ—wgK’œ“%¯y ‰ú‰ý8l¶fk’$K²T§Ä)yqÒµùãº|tž$¶9Áá–g«~ô)y>\I0×Ý«ÿrf+éC³A–4-J™‹å2»œ/Ϲ6Žño¿³úÛš³|Êq¦í}&ANö ºn^‘Räò9Qz¥˜ï$ÈËòhÉÓá*“ωÒC“ §nE\¼Œ»is¢ô „Ôå‡zä¹ðù2ìÔÌ^Ò×?G™zHŒbò‚Ôóâ>hϺ@–Ä>'8žÆ³õ…ôÅž™ `F69Ð×ä^RæFÃL{+î3ÞÎ ˆˆÛî®~óοåÕ‘ éÒ–|§BN%O×s×E„ÈÙ’CÉ !½l˜m3õšð•TM–ÇDÈb¦ÁL±ð\EI–j‰!%/ ‡“m/¼,ÎS© ç7ÌŒ†™zx—bò‚tsa;Y?âU,Ç1ä(S©QL_l‹¶´j²Ñž9KJE,ÈŸþ0S²ÙCT ô3Óý¾Ö@¤ |¹œ1¥úሬÉMùM¬* ä´ËÉcH©¼ßÇYór̨TÈY¿ï=ÙÁa?}³O+wßäSÈ÷ucðúJ‰4™O ßìÃLÕ›=%k€š'ÿrfQdó’Ð9 f+¥k(C¾ijyµÎIîžSº±–”~&~ø¶¸Þ¼m WIh’2͈ٚœÄ¾Þ^D3K’ÐýF4C‚õÃÁO@Œíåð¶vMÛsæHs˜­‡ô-¦X˜¿±Qv]Ž^³yÉÂ|]G™’‡“5¬ÂEÇÓ²ê" ätŽo¸ 6CÀJ•¯-/"µO=TU ý»Æ”ï=µ™šûL UrZøË&Š©xæþzð¼0R3yIç^lt-û˜óWÿùJC² ‰“¤=ø«_%¢©|Jèœ7²T$‚1W³ìíF}zû“É'7í󎪤ùðÐ[Ò9&‚ÃLU÷#ïÃÝ1;si qò}éBrCOªCæ‡JÔÉÔJØš½/K’>Äë1óó2ÂÍLµ'Åw[#xÉT˜¯9Ô…›hÿÜúªîZ†ätˆ‡ÙzØü1_CwáfÇías &W’š!)'ï£L=ä‚1_³}Û‚K€yñv•BÒÈ9“Áq¶ŠÝwŒ_Üô–©a!ìžq°ÜõvíAòÃ'ý‘ü9Þñ†~EK¡¾¤Ñs÷ü!w?pÁƒ™ò‘½*¿¤‘qSLþ,<¿Üý§[r‚ø ­ÄŒ œx1Ý{†÷qIÊê92•_¦ÍÇ„„ŒÍ3llïuîU&Y Ѭγõa¶^§˜³ý°L>¾_Š^H$9¯ãÛ4Ì”¢áÿ ŸöÄÚÎÚzQ¶"9Gt4ò?[P¿wYVö‚¨ ¢9¢£áÏüdøFÓ˜™â©‚ˆäˆoC’x»³eëLE IäFÛ*O‹œ¤a¾Š î=ž©d‹œÈ9 f«NRò‘êää]oH$Â…$x|àãlå-ž¢x°½•ÙšE†”¤TI‘¾/iÈ*}Ø#¼nûïé÷É‹ ’3OÇ¢ŸY%šÿò¤¥×—« ’Ï®y_üÉo¼.Zµ#•Ü-í¹-ÿÁ^`q‚JAÝð/<ÌÒÃvÙ*.b¨XÞ˜™e Ihùh†™Êy)ä(Ÿ°6¿çшp‘r×Á¦~¦¸þuͳڀ$ÿºYÉY0?bÌL-©²U¹iê=P3( ’¼>«Z)™bFbÔ‹UT I8W³U9ŸÄb ÇêÍ!?"[HÎé¼z˜­:ÀHYeûv{†í&¿w~Ýw¿Ã<ù')ñdÆ:Ê’’õž‚¬{àÚþöì,‹59ƒåÙÛ¶˜Ë²3¥Ä¹Ššôö*ô<"Ø|*:=Ši.ýMmÁ7¼Ê59f`³ZËD¦“jù¼ú3ÖÜD2ßg¡'e£NéwĤ‡W×rMÎX_Ä­úíIé(Vvbcà+lSÅ&§¬|HÃL=¼;1µ×^NŒû˜¶^Ã,ªd¬|{ÆÙÒ\7íŸÜ=÷Àh;'̰Wƒ¶sòûTUð‘Ø¢¬àâŸ}”U!/IòëÒDÈlQX€ž£fköUå%É~=Ù ©­%;- Ç§h¶æÙMjRN´£˜‚þï~‚ß/ø IY½Ž`”©‡d7¦£¶¦ëFû¯ÁÄ…”SÖ—1ÈV ¤|ŽpÆñ0¼Ì¸€$eåófJQHmõÛÄ‹Åj¹^RN’{rú˹­­¨_®jAºÏõÏzæ¿NûCrûÃÎR›Žn¶Vj£"øHüDûC† ¬qñŠÝ¶M9±Pir븘­‡=3T@ú“í-m ½¯ai4‹ågë!ÿ*úzÛƒ¶z÷ •irëß(SJüC® ä?A¼™oUÅGÒa¯ ¹.ÿÅʰéè·l«ä“óa~á˜ì¢U}1ÍÖîRh¡ù¤„¸áXúSTpúv>2dj¤ªd±LGYªw{ÌPm='Ÿ•Ü#šB²‘,Öyÿ(S5ÆHi+&gMŒ¾Ú¡L¥Š¤¶.î ³%ZOL à  ´mµö'Q$mx-é;¡ös¼'V¿Ä¾Q$éf*3êÁ⛳ôñ§"ÿä¬ÛÅžÙv»ýôÝn—1®_ïFŠ–¡øJëÂèSE›¤ëôí>ÌT½ßS²P¿ÚÀÜ·€W¨69Ÿ÷ʇQ¦rVŽÍJŠ(»áʪdî£mÕyKÌË-Q?{VGó³Žh÷û`ßQó¼³Lrw|Öq¦´ #äøöpZŒÂØhê·‰\•d/¤™9 )nˆü³!XŸ,rUÎÞ™¦ÅÌÜâÊ…0°ÅW®È­röþ‹Ò³iR|øvlxëDÖ«$ã~e ²Õ>æq~õŸ/¥+¦ÓXY¯DoOR”ÊU9ãö‡4ÊÔC®³iÛ©.lÁ—Jô*I¸¹†™*2õ¸wPöáÍó6»ˆ¯Ï¢ýÎÄö ÉÝë²—£ìáön·î.še]Mrw†ðË…÷®½¿œþ°oëYÏKUW‹ÚÛ«‚$¤ø¨ 9ðõæûèY¤5‘˜ôÅÕão[ìªSiMd€ÌC>„´›£Îã⦫oÉB “üÝkFF™ª_Í”šcºÂÌÔ|>}T˜ªa’¾ó³U'A17·Mßö“.DíKÕ°œ¿»ƒeªªêøHó½ªãÄîÚ8д¿²Ài>¤„M°å *~e)€»>¦ùHûôZ|¸q«ø•¥€:0L©9Š1&^¿N¯©>Y±ÊÙ;Æ0SyÇ{YGÈË¡¥l Ýæ»Èz•æî^Ö1ÌV ÅÄÜ+Û0{¥m_’Ñ«$wçÓeI‹:>3|¯é˜q\̽&P„/Q¼¨#dø(ê°K`ÊK³î¥"Ó¾˜à#í»ì-n¶¦^(º—ˆ¾Ù”ù/¯2ÿ˜ùB™ ó,/KU’¼{‘Ç(S¥{Ï™9ê2d[FêwèˆR•³w>žq¶j¤“2sH“7GRN=¡©J’w>ÿa¦´Â#äø¨ðÀ7ÜçërWU¯,x…GÈñQá±Ú‡„)Ö7ªWÖþ‡ï¼ i¾Î.âªê•ußðëúÅŸ½Æ#dæ¨Ì@޽0»•Ôªœ¼{Ç KõvOy9*|œm¥ ±*çî^ã1ÊTNË¡?b`uÛ{ûkVŸ¨R9umKvv ÎáŒN\EiâB³Bá’þ=!æüí³3N†ã5÷e=<›3à€—Iu ãl‰¢õƒ÷“ß­C¤«™YžËƒ×»# ™‚ÍV+ðQÕ9ü­Ü±ÕÛO~ε=  m÷]$›iëg¬QhjQwóÝ>ÐVZÎ,|X¨}"ÛÅ0ÊÔäUÄ‘,¿(}Q¬°çeïå×[½-ô:Ñ3¼æg”©!$hØ»mJ\|yEÃ=÷Ö(S/kÔ*L½Ø´6üŒõC…ˆ—ô ¾«ÃLBHÜ9f»…ÓÎsJ³tàšnËî™sŠ4R×ÝõDê4çké·›©Ö˜µ¯ b…=®¶±8‚îœ µ1  å£X‘x—;•ÙÇ(jc4º|Ñ[XßÚb’¼vÇqˆê¨@˜U/eªJæ²¾€Ê‹÷Uû¢ú`– ø„†™BÄŽ!G-Þšã´ìö'ˆ5ÿö¤pDžíÅÉ^6ÈòðÍ›UDQ*(qŒ³Õb2¿Åo;|€Kú’þ•¤èçC÷@ÑÏm‡ä|Þ¶+þTÚ¢H#/8jâ²7&JÞ 9_+òJ\ úcß’ñYR÷A_¾Gû¶+/áÙYåïo¿•¥´Hñª%û~ë\D¤¹ÆÓó Å é‚ÍoÖíKiXEJ‘kº8óÕtA2*((Zù˜­ôœ}",f•Å+‘F™ªßè( üðª„ݯúUEdqõl˜­°Gì;_3ßm£Xò·''Ê,pÖ»y¾‰ñÆ©ò˜”˜WùçKU¥Ò‡`ã•JxM¶FV fÖtøšž«½SèXê/Nx•ðzEUªÏ&û]4•3 ?žæQÇ^›½¢»EClÉ-TÎ,üô׿úâÏÎ÷ƒã|c/‡¼ªLŠZãµLÃL¥‡§u1 ïóëþö |FµòÚâçÙæ÷2‹~©ŠŽ×; ³UkAI®ùa‡ÙŒÃkçìÝBÂÌ’Ž ß£Li½SP~Ì/Ø8ÛÛ'²„?•šÕ!¯w Ê-èŽ=S<ಪâ_Š(ü`ò¯¬Ÿûµà…*âP×>ž,û«Þ)È5¶“—ÃwòŠ:©JÂLŠŽéÃ,µ¨mã WÙ =¾IÑA%“WGXtÎ=«gV}¼‚g˜­:wL:=Ö %‡é–¼?TENÑ‚\f+»/íÆ ºMÌÝ54LÊâj+±›@ÂÕUE}zN_“dúÐÎÀÖF(2I:.Çö;«v‹b„ÕhK´¦¨,ùèœÃ×Áßr…E}¢"¥¥Ž†ÅŸwËEùÉßû Þ¸ýô÷>ÊAö’Y§·WAŸ^®•¥\‘Œø¾Ž³%bS”–Ps±3>.ÁÈO•ØÄçä ÎvÁ'oñãá•`Y&Ɉc˜©2Nrz>75¶xål!•޳dä;k˜-q1ûy ±ÚíBöÛkDD†Î©Þ/:V”šlGØ•öb¶,!¿JÇ"GQÈgK•¬´-Íx;×è L 3K")Óy¾*]«® šêã8ÔÜØý‰êÛY×â>ˆš•i$-¬·®yõ@!pg]‹ÇF<$Pg4ó¾°öûÓáÇÆ}EEa´cJF\«ÝŸ0ó”ågWå:©Û]ófJ¼PÔã,UÛ9‹ÎÖi¢Î¢"xÖì~‘¼ƒˆ†‚=D™<|¦Y¸íUû7Ä’¹Ÿõ«ÿìN(Jh¾a5W{ú†Éºu–Ù¼ðo”©å-jc’W`v›t¹P-SÙ:ëgTÞÆÙÒò¼O oÍÉôöu餪Ԣ²Q‰j•åá7æöì <:•Z-æXQ­²«ý[ÓM[}àNV©EÑâ–Œ~æðû\N{R®£…r9ïÇnoùúê1 ´¨U^x7ÊTùf$%ꇯ2ÞæÊ‹ú,RŸâ Cµ|%&8²ÃBïã})ŸŠÊ"Cñ¥fªª¸{ËUÎï¶NýJtç,hyuÂí9ÈÂ9‹}¢ëæÛå9ˆèÎI›~‰Ÿš*÷N\k>“ÝB.κ_®¨iAÀX<°ŸÜÑrqÖ½¢8ü®fyËP(f¹¼æö  y7KUN=F™J/L[,ÈWQ`‚lxóvr»Iš3”TßÍ"Õ–q¶䫨0¡Äû+™˜udùVD(>ýa¦D¾Šbj÷jN6-ù5}:HÁ¢gqA£Xeê‡]Gb[ÙfLpA“Ê+z¥‰¨UAš8Ñæd–|³‹È+z–§Xáܰ“dG´…$òz$¹¾/èP–’ì—o—Ë[«E¾©ŠïÄ(KïG÷ñBD}é‡1’`º»â”UYÕ ¼¶o˜­2Mò†Êͦ_·M|{wB¡Êf Š;dœ--î "œØD½ïý&•x³Põ‹,…£öÙíþ?›e£¥­­ç²ðe>ýÌQq‰²Ô8[’4F •‰¸,¤­Ãæs) }: U^±D(Œ%Z™éî¼÷­Ò§³NõP˜u#ع‘ù‚eUY¤%‘G™ÊÙ`R 0ð†ðÌ;ïy-2ý¬Q‰èC­‚fd®™¸¼ÈëÍE‡]É+‡Ùª}@ÔŒÀ%pó¶]ó˧."´¨JÜR£,Éûµ'ÎÜ³ç´øË¯*µˆS¿HQQ,2¹½ «äv‰udU–³ D)jœ­BŠ Û2´ý}öKÛV+¢@ü|ì®{Emª®« º“ï/8Œ}&ž.ÔoѦ¨DÝé‡SÑéTæ>ÑNÔoѦ +£d® E|xñgÉ+Å:kE™ÆÙ•)jJ¨ïÙÖ0ËÊ¢¢ñÍgK *ƒÚ†‚ÊËæRY WOŠD-‚Üsž2“Ì6e6ñ‹u[§OÍ´›ºX$"–a ­¼ IlC1èaƒq±œ¸"úº rîmƒo…²{°`¯+6Ë’œ *Ð=,aÎ#¼hv½^4jéIDC•çÌ óLVhâIg£3eIô¹ Æa6ÕJbõ)ĪdÁîÙK$ §ÝÅ2Êiõt§ÐÚ³ÎÆtgœ­2“*šmª]8ÿÜ‹‹Ò.J›SœA–´pô­ÇÁ£ÏLÛÖÃg«`/šÝ/ò|ÔÐLU³K²-¥=’5ÕØEf£:?ÌT¡Ï…íh¦—‹îa›¨6”y¥Då/ v¥Bõ8O§¡êY©`¢ÙyÍ`ÐãPz“Ñ­;åŽdÉî¡5Jhˆ‹f‹ã¡ÀðeUïEf£[gKº(Ç™g™qIr[‚³ »'ü—4´Nµ[]âæœðEgsP:ÌV…$ ã)vœBv­ŠÏÜL~–Ù¸­¿µ8oùƒµöûy3 ÈbÝ/Ò\Ïìp·wóAAªß‹ÀFinœ-­ BjE‘ù`¦³oƒ¤Ùg±Ž*Jl&‘x·â¼úU …f/2œ;€\u1ÔnN¼9À®NàK+:»hgTÝÆÙE6Ë1[â~ Ž›¹YU²îI ²˜WnÞ7¿ˆˆ™]¤³PúÿßÔçYõñöGq {aGèf§¿ö%ëì" q_³% }H®ÑG„÷êï¿èõ’}ÿ¢èEÍ C©-öÞx}·hìY—£¢7Ζ(zAÀC})æº`¼Aäzù¼¾4(x¨/½ÈJ—›½Ý…/*ŸÙš¿ÚG3×µ{- OêÕ3b«˜Åß®ó(ªûŽhK´ö(6ÚK?]\ž¥–D}AÒeªY@ÖÞ~ænïÞªB‘2¢×¯âj8«3õ«/e]”F/…e*»‡ÕkaƒŽøó*2þ˜Ü£úzÔ½vŒ¡Z|Lú ²ç9§^ã,ʺhˆ|ƒÇÙªªa?ÄF(¨Oz_3!2}Ò#¹œQ!„âëGýâóæ+™>«ˆ.‰ÐD£æmm¾þl÷¯²bUeú,#>‹òIÛCé–wðö=Z(ëYþóJÖQ¦²$°z%kö ÅH9ìÀ¢c,´õ,ÿy%ë0[µp¥=x3OåÏ¥á\ä?îq¶´”õS&D%ëΓÙ½W%káEJdBØÖf=™¦r¬ÿÖ©¶N*ê… •ú_5Ȩšnxa:qû“‰[gµ[Éìèyݦ«J"5Èq¶DƒŒŠ#*z1ŽKÊŽAªJR ú8gѯ-+z ~ zc§iDCPQ_»»ú…Oÿ§ÒüE%äK?Ì”è‹QND†¥Øí¶xÞ‡$NVñ&Qüá-(8Jz#‘• ],f«£ h§€ J´ýzî~Ýc²TÈM5ΖˆQU4wb^ÝØäI¯r‰¬;þ¢2!Ð^Úe¦¸/^­X „,RefJUƸ'mÛ+¥©^fÑþù•‘m[y¼#"<–®%ŠŠ¶Ëš×_/®ï̦wYxä.ˆ¢¢y—m¦õöÈúÝ>‘e$ÕÑ]‹”õÀ+=Ä´½æK%!B!O”a¦ÄµD==Ã+Û3^~4#iŽÏ¾%©€(ÙÜñ±,qÚ/d!J!5Æq¶j1 dX¥"–ð6ÁI,äždHÜJT^½íiy'^4¢êH7“@4—ù|2Û>¯ ŠB‰LZ!fY ?ì‚u–É ¨9‘¾g˜)Ñ!£êˆ­²á.;[¨é59%’ Q&©EÕÑ+:á©»tW Q&ÝItŽ"’Rˆê^ÄŠˆµw8!«‰Ô!ÇÙ2¨Ž(ã%Tó¼{¥‡’ Q&Ÿg’ Qàë­r6h—} ²¤H-rœ­12Ê…¶!n´ÂYœ¤’…Ç^ tn´ÅaŠžÈâäKŠLb!*u'ÿ“ÕÁ*EA‘ïý8[òÞGáÑ^ü–©Afìî¢ Y›|àIì…ºŸM§GÈ:!Æa¦Ì3-_ýçK`Œ* ÿΤ¸b·?~¡Y)äVgK^û¨3Ø ÐvXÐ<ûxÔ„$²ñ‹zõÅÞ©¾á½ìôM)‚hT/ÇÙÒ sI­ô¢b¼‡’= ”ÈŠfõ:$õ£VL±4Ë?dD! çH¬`©è¦Â æÅ.A¸s­~›•b…„:gK€DZ8wIZÞY c»ôZLÝê(J˜'WgŸ‘®À@Tz‚a¦YϥĵZsvÁÖA[³Ožà“õ\¤Ör{Í2ú*òm †@“’øeP#,}Å0S¢áD©ÓcxˆMíßb™L}²œû‹»ˆòꇿ^ó׫ɱ5¢ÀÒ[ 3UŠEy“„XŽ(ÄχLj’Ëm5ÈPá&¢V‹:rÖ0™²°~Nøú¤>YÏýE½úª7`¨‹uØùàá0Yƒ¥z;Ζ®DRk±P¾ÖVâôHO±Ž(ºÕ;‘Ä[ÓàóÁ°ÌÙ yDßõÍq¯Âò ½Zy.œiqO~Ç£b‘g)쎳%N!ʸ(ÄÕ¶¯Ì…ñd©÷ åWhp—»³ÍuÅ2"Ñ茳Uk:IÅnØqO÷û0#-ýÂ0S¢íF%Ååð„í‰íý^_áTcôÌ®€øu@e™f!Q¤£þ `…€f_ž"[Ö²Fý‹‰š1æÙ`ªÜÇy n´€_Y×öm‚Ð a¯3ÞJ¬‰[‰M {Œq.Q­69Á'fO×ò’2Eûý:H̨¿¾=°ôIõ‚¬D…¦|=ÊR­^G¼óäÎ|EÊ«D‡æ†fªp+Q¯ÆAsSGiܵöƒ& /‘´Ÿì$1ÿ8zÆ+ë—(Jdh¾þãlv”«±\m áìSÈ–HÚ.,~ÊGÐ/>ž{æy*w¤&Mû‡WuZ±çd1( ÝÛ¿Ûx¼‹%‚mo{;Øg†ä¼èEVÖª©r²$wT´Qd:£IÝÔ¹>=)£0Q½ŸG’¡Ñi°Û“F«ñÁžáWY©¦Æ=ÌÔƒÄUh ÓBJòšÆ—ùUV©¹ ‡˜Ñ7%IÙhYXxØl…ƒB1X–»Ñ·£ýÃyfO¼-é+º•šúö8[…ZÕl4- àšÎ>ѽ `Yð.™W³Í¬"a™#6‚e¹û¥ÿ$eÙb¼Sa0 Š J`•ˆÏtÃL‰ˆµeçû†oxÎ" ì•eìgÊ•”å¶Kxâ+«õ™ºõ8[e••e<ÿÍßæ^¯[Àª¬>S·gK8WT©Q±¢fêƒ ò%Jv!ÿôk:ƒ²Œr´ó¶wÕ+ø*^•ÕgêÖãlÕm 25°çlªW[ˆÍ'¶*û)ûá:JËíÓßv'­}úå%67ÿ5ËŠ^qbŠüLázœ-®£NíÕNv·“­ÃÄ% N´ì—çJÚ²©Íí!Û·F úáf¢?“¸³%ÊuÔ©8á Ò¾¯ Q'Zv7]ÌwŠâ²¥ÿ¬ ½²MÈ™èÏô]ÃL‰rujKÏ)ô¶Uðéä*Š”ý‹SŒÒ2Úµsñ‚oEr"?Ó)޳%‚JÔO0bpw_à³Ø ¼'˳&žTk;)ÛgG±]~9‹29Q¶éÇÙª[EÞ 8:EVºÀsæã¯ø^RÉñ‰Qµ6Ûw²ééø JäDئKfJñ¸åí­ðK™ìŒä¥ñv òuQËsµ=kä¥jp§/H‘lú±SA!‡I$cÂí¢¿lo^ ’ˆÝí‡ ¡ý ªü Ø—€`Ç„ãl‰³Z=dÖ‰Ëgñ ¡ ‚ìçwùþ+t‘e¿¼ìÉâºÓ^æ|"ÄS¿gKmì1‹ýe»UsîI ³¨_xÚæ¥°±¢ÈŽï=´«f¹±2èS!žþ8[µÒ–Dv¾34  áydÎ':<üa¦ªV˜ ×£Æ’öÌ.—X…Š¢ÿ,³G%Ügƒa•—£3=¡|¢–“3¥«duô”l6ús·Î[NÛ®§Ò» AUC¹øâ{m£Q-iñö—.ÚŽÃÛ/”Š^ß¿]jF‰"úotsyŒ2ˆb¼ ³SœbF´ù¨Ä»‚‡Ñ¾ùâ“É2 µþÙkDÝ#»_‘Ç*½Ë";Åùa¦Ì[àmÁÏ—:t{ÑgtYA&öCá]VÙ&³UèóQÇ)rº«ßûµ6B³b_†©óÆMnŠ•œå¤ûéÃè+há¯ÊÔæ‘ ÜŒ L¨"­ËøNä{ ÿÃLºTùÑ¿r"^—˜,0“€RÄ‹ ¾¹šæ@á|¬Z‡š¾¢Á¬òûçÞ‘ì/twS^Ö‹qûåu*ÒËÚ<½Í(Kâp¢‚oŠK{v¿¥åZ·®<˜UþçJƒ$»£é`¥ß˜üJzY˜§»d((ì˜N‡ :²c^AwY…i§êX R½ù“åæ°ëõ¼ÓQ`–óŸÅû$¯£Oán{3ÏI l—%x¾÷ãlU+ŸR=9Zb!„*9¿|ïƒTüwc…'¦öÛËj¾æ{ñ|×}vÐÚÍeóCLYYx™pæq"ÇSÇeIdü Ú{u°%^Ó~{ªdO„ý_8^Ú¡´Þ¶•á®&Â7Áq"Æä³U+VIhÿáåÁhê¶ÑÛ>Ž.@9Qâê2Tм(Ø£dâß}`hÁ÷DÔ”ð“Èn‡Àybv"eQÅq"Âä±S5©¡Þ6AÏ?ZŠGÿ'XO´üg•*i릶73‹9¨yòSð¶ñùö]z¾ÈïXƒq¦D·*½í¯ö’úø~™uü·juuSÚ=n{’5ûJ ³òNÍ~”%‘¢>o~ªÇ6¿€º§ðÇ,áÿBƒ¦ŽÆ–îG–Þç$ÈPtw öÃLåEHò¼ÅÕ^‰5í~qX%V& ¿ØúL³¤Ž&-× ÚiCý_Y¤ÈîìÇÙ’L(Êó}çÊeXXÄ¢\3Kø¿öQR7ßÕ³¿õfdQÀH‘Ý)سUµß}Þ¹öÓ{³æ’?%ØÌþ/’}PÕÍñÎÙæ®V”EŠòNÉ~˜©B²›ÞpÀ„a4ˆb&—ÀÐ ɵñKGzŒƒûÜñ_r'A¦"â¿5û¨ª›ƒœ/ŸKõÒÙsŠòNÍ~œ­¬Äýð Ñë`€ù¾Ê&øä(ãcÑÎ…á¾V¨*:¿ïëbn]Rßѱ3Ó±v7ª 4ëóTöGY]?ªø˜Ò^®¶>º@ª¢ô?:䬼Ý,Îëϳâ Y§®?Ζ8ä(D¡&óŠl&&#ÂTE¬úU‡sf<Â_*ͺ?Á CU?Ñ'@;Ñ |ÚgjMÍá\%}óp+&î·­ì£÷ šUâ‚a¦ \áš~v£,m&Ï÷…y*@x»Ëùd »ì^º‰»â $ž>ÏHAe‚™Ý]޳%"" HÊ'7w‹æÙñW@ÏŒ-:âL—®²€6wÌ×ÜÇK ¨Þ@G8Ê’8Â&Ì®ð¤øþü¶òÌðâGqúŠöWƒòê½c S*r #g«† 'ü|Lß³¸3ÆÄ)…8Ð_ 3Uu(0¥m}¾PMQä™ÙÅ3PHšÿ™#¢ûùöYãJ*3 ËfªpY •褟ÞNÖrTÌ3#†Ò-D|€ Ê¥Úí :UÐLA þ¹sgQTý!-LÚÆfìŸ1¤p…Q–(D|`¹ÏÃíûû$è‚i bxv Qõ7MØ<Ù‡¨^pÈ ªeÅ„lܵ®W »2HA ÔÇÙ*\Bd è.:¡¤Ùà§…'DÆ™‚¨Í›;¡›žk±Ðaw­ö~ÍÑ $¦“^T±Øª`ÇÉUÔ3’Ñ_iGäˆØ7—ÛOç”U ³ íg«jo píM¨:Å£àTuŸ@JÜèÚL8¿x²—À/ÆÈÜSˆì|õs¤ózúÕÖžRT¸2‘ a¦yDÂaniÙøí, ð‰IB>3y有J áèäÛpn]¢`™Á‘Ç0Se HÆ?äÙ“÷ßz›ÒÌ€.H<ªú—ß°ãɰ¦=œmæL¯ ŠfòL<Þ\›ë¡Î-‡ƒÚ‚afvAâ1ÆN¡òE¾a{`Wh Яš(`hb ù廀ÄÏ_ÛÄwÿX¦‰qú6ñÚ÷ö'—÷hfhAÚ1Ì”x¬À6LJÞ6êk›'e²Møã ;@Íê×^4#l5C q£,‰ƒŠhEÞ<†Û¡ÁeSJ›éÇ3’M<ÂUcŒû1®ûõ§âª‚,È:†™ÖɆ…ü&¦Ú3nNŸNZE^¡Œ™‚”‰ñ!3ÂqÑ—ËHßî#³ÀÖ[yOJû\l°+Xƒr˜©5%úñà o—‹KÅòaÄHžqHv^î3iÕë–whÐAޱSws}@Ô°ãÝ®»fwŸ0bá"¿ áH*Œ]ì¼ú}iœ# ²›i½ä8[ÊAÒ aÒž-ÛyTR¸vÖ(›4 ×™ÁH‰A"ô0ñ¸¥=·™i~–S!„@ yûàÈ),Î÷³ì¿DöŽexž1Ê”à˜a.ØÇVÛ%(LÔ•@ ðˆv{È‘øZºNúQ‡-3Έ‚þw!ô"ÇÀT¨ÇKûöLÓ ôœQG•¸ïŽÕzøyÏ]·Gƒq%/^â¡xEU"øÀ‰ÜÚñí7¢E˜›É‘Ê;ùƒÛWY/¾u- šù[ÙV ÚÊ6:ÓžË+9ΆKÙþ荒Üð'¯mOj 0Ù‰9Is…ö§ûÛÇIf",x…`f˜©ªõ,`´žÝ|÷–Ù ]VTóò·&XœmR&ô‰Å:+mlÌLPààˆŒ}áG\áÐo¿9O~aæýÊ• õÕÌ­né` Ða«ˆ{~/ºDåÁû²$¸(Â!s¶íà²ãfé•3?zöµ‘éØ~Zf’“¶gØ8¤ 8câ¢a–’ÏðjQ¼/í¿ûûº¡‚*EîcÇs‹Cð8fNaqFCH†™ª:â> ’·Uïxªç *gî,”‰;¯´hà·*vÞV| ¯UÅ}"™±|f[˜u·}ÉÁÔRÎô†Ügœ­ª½-PtR"DÛ!¦:{>IPé^"æ1÷bÛØ³i^ɉò錂^ºßÇ&ýas6ГpøíxígŸ›”ªù… ™N zÈC<4Ì”¸¡ƒL¦²¹?'×iö™˜™c 0ú…ZG‚ƒV¦3Mæ…B!ü9Cú¡Q–2¬ñRöc®l)@³Ñ¶ Ùâ&à=}+ uÆC¥|¶,`ϼ΄$÷dÇcZ¥Ñ‰Xÿ‚™²Ó­mT<«y÷dQ²@#º q¶ á’mªS„°Â‹wWf-ŠÁvÚ1ûÃÆcê-ÿ‹Ñ ÔÚ.ýz¶èè³>Ï™é’ñ¦–›5-jpD»Yƒš}åù† Z(ûœ#m ¼Š¬ ­`'•³Åó ðg½‚Ø„˜Pj{xèáРàä‚¡èBÚJîß4¢¶iú´‡ëu6GΨZ#†ó ïÀ*À¼€¯G¶P”}¤ŒíÞÃ?;)Ï0]p1ü0S*Ú'®e9Ts88…íM¤F¯2~f_ÏbkäL?C‹±X›ß|Y°zaQ„XÃL-ËäCwM±öî…P2TçÏm™¯8v³˜$ÁS^Êý»"’²M‰{Ê­¯¡íjEÖ5ÄLÕýø ¶Ðü¸@‰™oGÎZ ìëÒY”ùÁ ñ~óz.ê+ÕW^EÒ5ÎVAºâ ¡÷ úm.ƒ¦®Û ›ûE`œØW‰º"ØBk%³Ðùî­•Z~Ø×›tEõÈsw®îP¥j ó*¾/£,‰Ž`Ë1‹hßøê­•RÙ×/åI!UÀµ§¹± µâ!3Ê’¸…‘o3¦Ú쵃óJ¤ú@Ø/¸+)o×å-“ßS(ÅYv²$¨+‚-s¬Tu°>QÊ 2ûú…tEåEwPZÄÚÈ¢ 3+²®q¶êžÊ7ÛBEËŠ½ß¯Î(Š 2ýú…uEeøßK”ù.S,‹ø¥‘¶ÏØŠ´k!¡]‘mýpÞ+’œy÷¾N­ˆü«ÛÕ° H¡Ùâ2‡òá»”Ý'fEÙ˜¥¡£¶ÔÅûË«æ4fOü ³V{ÓÚ2mtZ+ ˆì ˜•m»õ p¶¯¯.ÅŒ÷…kˆ 3%Î2à/Kœ|ðßÔÖ‚·Ëk¡€ 2jš³{®Ë§þ§løˆEleÇö>SïZ1µ ¡-ºÓA†ª¦ÈO´…¦ÈÕ}Èê£Ï¤”@èWß+ ²"k²ð@ùŸqFºþE_:ÌTÕ°•mÄîZð4/~6I%’­·'ý€MÆžÚÊ-õÛ^üE/:ÀˆÌ‘V™m‰ìŒÇ=û`ƒ¢‚ -_Äܺø´^ÃY@ÁPô ÃL‰s ¼Êd¹ñ™€Wõ™iýâfY ¹»^/Ì_I}ã(KR+ES¤)§ýSX/ÈÕ¢«¢N/` 3¼µ©Êó…]Q›e©j­ ˆ g4ÔŽ}šîNùûg æ8’ížýÑ]^éý½–òj`©Ég (®rEÎ?ÌTá#ಽ¸Ít$vÍ$9¿’ÿ ÁÞœßÂrÃó;TÕfo2ιùr'ZŸp~‡ü#¬ˆ‡ŒÀÍÈÈáC^ ÊüÊù:®xØëùRÆ>ù—+c|ÇwöW ^ƒ’a¦®E”öÒÅÚœ½z*Cm¿ þˆ¿ÐȹsD¹ÕøÑ¯eVŸ ß»Q–2ñ0GÓÞL<´Ãð%퉛¡s¡ý¯/Œa`‚' ^È™Ü(KUÓhàohe+JÛ"ÄEŠü3¢{Æfl!ßp¡Ý^@*1Âß~›³U`³ɰa&F͸WÛ Ë–÷¶³®H£~þÚæe$ÇÜÞ68×–H Û¯ýÎA¤ÄЬkœ-a]l¡p⸀i󪑢æ Ó/_ß«çϽ;°)Kºì |ãö\­zEî5ÌR:ŠJ7£êd>¯"¼èä2h‚׈]LE½Aæ`Ï.0‘)dõ·Ë³Ÿ]E•@¦WTÆÙ*$üH¹s¸mšÖža©¨ŸIÓŸw¾žÁK•u: –mÚ>vuoUÕ/dÊE<6ÎVA¾"è&ß8æ×®z¢[•Za¥&\,Ôº˜Yòχq¸©¢â£`âW¨Ø—åEÍÝá¶ÓéêÅ ZÀùÁÚ0SU£í'KCŸíÂ|Zø÷ªRˆÌÛéZæ_FÄvÜxÝü¬g•UÝAfdôâãl¯f|íLY^x¤íVñÞ” €¨, o«•åˆÓŒ`Xd¦g$i„·7_ Ì\¹e!ö§Óࢦ!c2òµQ–„¯EšfúÚ5aÚCà R©{ȼí1Îì³·}(¿¢IŠ2&óŽQ¦„¯EœæóÛ»±Ýý’Ù¢èAÛ/%‚aO­¼p›™L• ”Ñ…²T8ðÔP²ˆéAX^ƫЉ¹=¶ÄÀ,°°ž#ey_b®AJF;ÊRÕªû ÓЩ‹³¾-ÁÕçu犆ÌÛžéZâ_æ·.x°æ7>+-BPFÆò…q¶ ºYšIÂøÖÆ!p—媡mo'fí\œ,Ñþ„®JŠ“yUÎ0S«Õ°õÖ"Æ+‘u”3E#s3gÙüAó X™Ùo³“ò„Ìåz5B1è9Â2´Ìîd@6‰Jœ”d F9Ì”øËˆÔ,QåëÕVá`W•($ìV€“÷_a¶ýlLrývªý)Ê •±a”%Al‘¨¡õùÑ2âÃ~r…‚P7*0QoAË×ÉÏdWÛ¾š,*ÈQ˜ýÕ^éØ\Ø2÷ä,.#gg«jÒ dÍ›t¹eöo¿»12$öV‘6/[ˆ4ÌœáŒ)ýíðù2Eñ3¢¶q¶ ÇȘ/B´¶ cÉ¢Œ!Ã7æö1“ÿy]"ô†Bs à™Ñ´cã+>m>‘I«"`c;ÆŽ¸Ôá~8…¹=ð¶&«·îk©ƒ€º¾Þ9ø³Ojô ˆ—w mä!lDsÃL š‹$ÓÚV{±Ú*,>äV+„Ö=úÔ Ð@y¹­ÍÉPØ‘l„sÃL ‹,γœ–ÐbèŠZ†Äë~¡s¡áø^€I,Räý‹EuAÂltÚÃLUM¸Æ¡ Qþf£ˆáŠR…LìºÞQ·Ç,߯×üÙìP§¨/HüŒàm”¥‚»EÊf®}å—Ù"`‹¥*¥poæöÁÆ •µW éØ\˜Òƒ€ÏˆÜâù¸”öö%'¯NÒ*ap¾‚©-Q1tÂΜîxô~d--ÈàŒÄm˜)‘›_3¿×Vq·­~ù…mU•BbpÏn/Q1ôÂî”ìåtz+µ‰œñÄfªÐ7£šiyÍÑËà…ƒR¨ Šg¥@ÕEœ†F'"›*rrÞ·ÔdäFÇ7ÊRÕ‹û æÐ‹ ³-ÂE1º(TxWk†I!´°nko'7–ö·}Ûø.rHŸhˆ0:ˈڛr¸o^I¤% óèSÇÙ*0`„~hà…8ØÖø¤×*Ê2ôº¼¤Ø ¸îÎ6ãzjžÚ‚]ŸsÐy6KðŒvéœg*_Hõ Ý2ô™L‘`&ÆhÔñ¢”8Û¼gôÓÙuâ;~ǧ÷)‡„ö=ЖœXÁÙ ö´Çwø¸ ¢f#CMßÇí¢_\ô‰MiÙÞBÙõôRf‘A$••1v䤪ßî;÷péSŠ5UVÒߌˆѬ|±2¢Å°|´ÆBð¢WE ³UЈ!¼fÞRéËûQ "¨òY­NPÐ6™Í­DÚº(å Y×1ÌTÕjð"Z­Ñ7¿™Ê«;´Ô#Èg*˜ÈR²I¾…²#õ÷XÙ1ÌTÕ ?j¼° È–‹R…¿`ÁîÌ™g±Ð×r\ ÎÕ {Ä‚ãlä.€:ïþ\>¬ž0kÁG†y¯œ!ò5/¢ÇKݧ}j]† 8úÃa–’Äc§÷îA÷ýp V„yæ5gDåmQ¯µÀ#ñ>_îÏ)i½š#8tÏì¿¶/@G'5BéXÍ1ÐÖk€ÜºùäÉT‚Q-U‚~OŠ‘m­f—;‹ ˜Ìêl‡ÞSÆvþ±šP+2„ç޳Uu1‚?X.抱¬¢Ñ⃾ Ú–^(5Ÿ·ù)(ž¼Û´‚{áñE§(îˆ ¿‚Æ}L¡×ü5‘‹220$he©j”\ÒÍkÛ"L|zEuGF¿ÆÈ­üÂcóù\¿9Bë12.$hfªÃU´=¼Æm÷ë.g­¾Hàñ# ´º=?x”ö'¬ë”¢‰ IGYga!z™NÛÏøÂŒa¥úBx¢¯å«å³—9>´ ¯³ú0&-šH|8ΖðÃH ½\¶p^Áj†Å_b€|ØW“?ѽo#­›H q”%ˆ¢¡ŠÚf#.ohÎõ™(ædöƒ Fvg¿iÈ+#LŸç«&2Ýcž?ÊRÝÌüf€ÞÌ|,\žøEFâ„ÏX0;Ë”¬VѦ_À¢U ÷èÇÙ*ü` €T슶‡G®R€! ±æ+‘^ú§:‡7Œôç°š¶ËXI¡µð1ÚfJà`DhN€æÛVf÷ðU+0öUï3'»n—ùÝ·9-ŸN®¨šÈŒppœ-¡ƒ‘šÃ´çl Ó¯¨J02/,Þ”“K€g;°½ã§+¶~kp®œÆG9Ì”¸ÌÈ‘Háº'¬Ââ}͹ Cx!3©Ï¼ liFÀ²ïy^zQB|v~^;®vN-—¢ Á€t«ãlUÍzk3OØ•UÜZ€!H‘Ÿ3Šº¨ýâÂõcüYÁ# Äm‡é¡«§µ Y•1ÌTA?¸"`6ïâµZ¡‘¸ã›2F"hnpa|dâ÷§ÖLdjH¡{˜)ñ¨‘/b–ª--¾1×¢þ"#H_È >¤ýìMÐ bêIŸhŸZ+&27¤3fª@AQ.Eù8#á¸ypg8”ÕÒMGti #/üÙ̽,>Ž=—uÞ|tÓ8¢/úx½;]i®ÅP(I8ÎVÁ3#½Du3–µLjïT,íÂù ÏŒÐÁBÚlfyå”d˜$Ñg«êµQÁBßlú.¢…yÞ¦ó§þÁÄÛ~8щ—é(•®‰Éæ 3Uuit÷ÛÝ;“áWTƒd¼÷ Ì ¼Í¨m‚‹·hqö¡èRe‘™aÞ0SU—v@wÞÖÇ{´^Cà^§û}Àôlëh—Š!º7§4ÛqcmAV>´Ðµi5E¬¸p/<ÐVÍ4Nhƒ›W:^”ºVE™²cœ­‚9FĈžò›…|U™B¾¡c$ƒèýžxl«S€\M‘Ù!u A†ÄµGzhb gêáû²¨¨ËH„±E(1üéU ùõy27cЕ^k)2$PgKPaƒÈG¡Î·eX]Öª ‡Ï v‚y(ôñ»$–™çRQJ!À¨pœ­ªí;A´}ƒÿm¦òÖk­ËzèÔv彇V× ï—8no„Qè—Èœ…ÇÁ˜ô@ .„Üѹ±S5oâ=4oo”xûüª¢l#À_Š4“3H׎f¾îf]…TZ·cÆ0S…»Œ|ý«7K,×Wÿj¬ÚÈ ðü"–3/Ç{ý°…8¤(¶HèŽÌo˜)~ñ¡¯j3÷Ó¾³‡ÕEQFÆ€Ï5 Ëý|Ü`ìü©(¤rGæ7Ì” ¿øÌaòÒ‹íÞYºR[dø,`',gë`*gwZy´Hº£¿g«ê²”Õ‘7wÃ~óVÖªÚ"‘@Ú>/§í§çìó†”Ç+¿g›}òT:8LŠ-RAÆoP1‚?LÇš¨¹¶¿{½Æcʼn YZ1ÊRÝÃýÁÑĽYPÚV¸_/-Å‚¡Š‘û™ƒ4µÆŽˆ¤ËJ‰„¢ABÅa¦ ¦"z¸oËZÛ2œþ‚i¹EÆŒ/W‰°œlkërEQ$!ÔÇü0S"nG8øÃ9°=»UµÜ"ÓÃ^]î3‡Yç¡¡zå +ûÄ kZ"!ÄŽv˜)q´ Zzj—ÚFh¾`áäC©¶xè(/ˆ @y—ÏÚ?“?UÉ#ó³¿Ú²<ÀÅç·i%EÆ‚ü£, NŒð¨»~»ûÈü¢&C#!YDbæÏ—Fl¶Ï×/H1B?hòæNP+ú½\o!\b˜©(F|èýÛ+ÎïÍkôŠâ„+§ê¨0â<´çàŽÞæ ¯>¼B .2ò#+g«p«týÛ’}ëäp#ñ÷GD§5˜Pû“ÍáµÔQô£GfJq©(ÊȤï®ÉNpŸx?u`ZÔId:G®7ÎVá#ÅCöB½Çî-øúS•\è{Äz‰¼aúàŽŒc¶9¶¼cKJ 2£;fªÀz‘â¡æúÆ:õ;7B9E¢|o¦Á›¡8lag×k”–@œ#ÖgKÈ^äx@ ›q–Ò÷µž"£>*´\­|ߨRj÷ü‚±£Ë‚ž?FH½m§³š~x4­”Èüo˜)‘G£Šµ‹;¿”“WTHÍEL]‘ûD‡&ȱ +ÎK1Š*‹ŒŸ=oâ}¶…׉éHûò’ª•ÊIÇÙ*j*";´¼iÛ¬m—YJ…ÐÅÊó&–÷ÃiòÐÆ§Wa¥ÔEï£çg«ê`mâs¸ÍÛ¢ÆBàá3+L<ïç5êݺ~U›TFócMÅ8[- l½Ü¸¬ 1y…”Y$~Ø]pFxæ6gÐ°ÃÆú²öº¨È˜€pœ- Á#a3œÁYHík½ãYê-„½™[cçcH·>ÆHK#<£KeI¨[dlËfª§V§ærY*± ×-\ÿÔ}`äb?¯þP|ð…xBê"„Ñ޳U(œQÏ!¢`µm¯N±\b!’ç3vK`Ìö–uÌ!?ž}šŸ=´`ûðº×±­5‚Y}1Ì”d"Ÿ½Ûm3ß”¢CèâcFiŸ9îéBoçk&£P$Jfª8"7D'f·n&wî¼ôFj1„-þBû"’ûá]`¤ü«Ö\‘™aß Cë‹dmÔØêÛ½ÞžI¡E†ENDu2Ó8ãs‡¢Ø×^ù Õ™ØÑc3UhÒì™Çä¼­¶ Þ\ÕYdø÷.«ˆ<ÆfªdÔO´8BË*†™ùAöLÚ³î`~a–Sk‘…À¿Žú¾úЉšóy±x 8iòaµ©.Bp9ß(K‚ù"Ôûa±ÄaÓ^QÔËÂý*Èw?26쨉¯ˆ¢("s8GÃL Á‹¼ÎÎãsÚ"^ÎWTWd¦Gx¦K¡"Ö܆œhµŸžL'óÌ}â›·ØÇ»h1ÇBj F¬Óx{ щàÙç:VFQ—ü¢t"S>~Ãa¦ ?a ‡6>†™Z‚‘iá3LøÎäÄö-hÃ8ø9µpBK.ÆÙ*ñ' ´¸« “ëÎÍWÔ`1|ÂÈðêo˜¬(Ù³])ÈœYÃ0S#4Ol}l8½¹7Õ`/ìkYtg|ç½Sø“¶+‰5µn">²Áa¦ÄGˆ~€›qúºöÞl-ÁȰ°#»+ŽðζUTZdÃQ¸¹"Bðž×s ²$Ž8B@$F+K÷VŸáVW(|Æ‚ Ü¡ ã0a†@A‹î±Lbœ­ ˆ¾ë…šüÊùbEÝ„pÂg*Á9¯ùÄøýùð±VE­ƒÀ=bÁq¶ .( m´yf}Ý:¿úªSåDà„o0Ñy.›®b¾¬™ò럵Ø!ã=‚Á‘¶Ì'ÞÖÌrv…{Ú(ì[ÊòªŠÎ2ÂBã"6Ž qëäcnŠ‹}É7,ùöV?Ù¦^ ½ù’åÂ÷‡™’:ŠÈî,lêò˜é¥²Bø^4ýr•™¶G/~ïÏå°Pj!”ȱŠbœ­‚å}’;tGÏÜËíÚˆÖUÝû…åEÚöÃ+h”úp-… G”7ÌTAò"·Ã¡<1ThK|Ñ9JUEF{¿€¼ˆÚ0Ðo6eŸeZ ‘i9Þ0S…³ŒÔS'¾®2Ìe öÞ3’6óLØÃš4à´‹‚¡qôrãl/b;‹:6+ XXK!ÕBöÜvºŽ57ˆc¾‘׃§›V;›£7fJ„̨[úTL®³ó -œmÓ5¶ͳš/1[ËÑ'ÊjåD‚† £Zl»ÍÕ8ί¡Åý&ÐgLæ^)E÷ïSD $Fg«hÌÐÐü¯ °U^voV•ÚŠŒˆóaÄÊÌëy§‰n­ª‡È(qœ­ÂÿFdh½©ä\°¸¢´B¨â3CL°ï‡… »”ók~D,‡ H<ÎVÁ#:DÛõEubÙMhmEÆ‹Ž¬—ñ'ÃöLûŒÿµÏÓ^¨Ý„ªÔR¡D,qœ­½ÐöÃ&PhÄÍ·³<²¨µH0Žºïµr—Úèy$„±Üí5†.4ûM»@äðQqZE!”|nœ-©Àˆ4âÙÍÀdY^âY®Éb÷\‚‘èš•Wú²ucURd‚Fô6ÎV!yFÓ’¨ ÊVÂ…£\”!è3}‹€ Õ=+£i'±*ê(¢Ñy3UзÈÚ,* Š°Ù/§”d{¦o ‘yª„Ü»=ÞŇÆçjŠˆÑX1ÈîrCL3ù´ÎT‹àXYŸCç3Ž˜‰ƒ´ªºDì¸H»±øqm+ýªåŒó×~¶\içŠÙ´M¹Ÿ«_œXUZÄjŒîWÇÙz`‰÷YÉ„-AûƒÍµ­¡ H”8ÊRqDnˆÖWßç}è´VcZ|ƒÄÀú,κßWù­ÕZA‘y ”a¦$rhžY=¾²×YH-†ÐE_Ï¢¦"!¿Ï¦ê~UnQ‘± qâ0S’ŠDzˆüöæÑ?_ß¼¶M«,`|Ɖ ùÙ¾:7J'ÓÍr{­Œ*È’Ša¦ÔW%|øÃâLT©·ŠE12!‹ü Ù·k:Éö.Ô* _är˜ŸµPI8=è.*(„ÝñcŒ³…¸Ò0»ê™ùBRAN€ˆnìÝ—ôp…Uk6þ#¾³Â ÌnlÞpã SUZdÂG68ÌT# ´mk ßÅu •¢ ¡…o6ñF‹Ôê¯Åƒ ©´Âç…£L‰g $Фà ›°}å«W”mXØ+4ŠQ÷ß¡ƒz¦³_O¿O‹-2â£WfJØ`$–´v§‡E݆ÐÂ_Ø`àwŸ•~Í>«|¤Ø"#>j>ƒ ŒM̛֮’š A…šÜ{)øtÚ€H¤DŽÓµò kÖîäFj1b½Æ¯È1BATdnìʸ\€¯ (28$rg«@Ž0zg6lds1FFÀ1!Aƒ„&’6—èQ±ÖO(6¤wg«p¯‘/bTÁÆö‡yáX¼ª#2È·wX9.ßy½<¹Ó A‡t¯ãl NŒðð‡“ïAæ^L*ÕÂû‚VÍÙû¡¡zâŸØýÅ>©6—QdÆ8[#B4/Û<Ò /;y¼^ÔddÌè¦ÑÀŸWI³øÎÞrv_STRd6Hª8ÌT#DDŽtñ[·ïJ³¨ËÈ ‘QvTü-\±“ËïkX^£Ø øù úíe¶2šù5O .„!Òo3U¸ÍˆÑÀ=±>Ô2JÞf“‹72|fZ>u¯ü û⡎Ö[Bd¥Æ0S{Œ¤ÑØãåC ¦ËO&©Ýùf‘Ú©lÿ~{†Óq¹Þ£5B™mŒ³U°Ç@Ís.;ûÒ[„Àà»(ÍÈ4Òm§{T„4ÈÉšm9V‘‚ŠL‰GY—٠Ƣ¾ ߟôZ+3„>»ÌëÐ TÊnâ(i)§PžG8Ì”‚ÀÄýÌe®ÎÕ-³õYT¹6#±AêSÆœwø”t©¸0=ÌXiGÁ®»tµ)—\òc­Æ(K* `ð‡cS°*Óλ5ŠÚ ‡*ôRóÌÉÍ Î> 0U\ï#)fª… þð~w¨b¾ï´r#sC_„/Í|ôñÌ‘â¡cÞ}·,èfÇ´Ö^dÒG-{œ­ÂsF$hTåØpL[ŸdŸË8¾ŒèÌÃÙ¹ãSx@Qn‘!ñß8["1Ô¶”åè<$9à[+7D/u=.`Có¼ÓI&Ô²2¾“Z¹!h±ð¼½#¢>4ÆnN´oIÔR Át½ãlYšk‰¥ýìÓ*6´ÄȪ?m!Ö_kUF‹¿`Ĉúl›Þ7/X·A\,“R Áäˆãl 1bCtr7ÄeàkUF‹ÎçÎoÈ«Ïâ‰À®O.Pcä–@­¦è#œ$'-Š-22$lfªp¾Ü[ñö@uöë´hCØ¢+I¨ÎàO;‡Øðpš´oÜ@r7ƒK-µP$H<ÎVÁè"‘CO餮í=hÝF¦voF9š)tÆÏ¼Ôò¯–Qkc4Ζ`D(gÚÆg{ٻ蚔ddp÷KF$ihX^f^½“Tª(¶±þb˜©É~¸è5N{Ç'@'¬ÂL©åY}”KC ЄçæûöK¤‹*ŠÄÛ(! 3U8͈å¼&³ m®>3Yê1Ýuõ÷Á½æÉH†iR?6ßmZE!.q ©HaêäÈœÛÛ¹K‰º¤1õ³­Öé³wRQ† 9.Õj‰ ~xe[‹Gî¯}ã­&ðs“}ƒÝî)\¼DBÊ(b©ÅËgŽ2Ur„òŒíÝH¥çö²âøÐ* },¿d¨ðì¢îÖ2j{L~ë\UŽ‘©áÛ³GŒ‡a·EäKŠ(2é£_eIa‚èÅÂÕJí+ïí¡Õ™öÒ‹b4~äx连Ó`i­ ÖGJ8Ζ`ÂXYq\…ÉçWärŒÌ §N1&±<ßW NåÍ Áµ†Bx9á0SB üáu46Üä²›µ›RŒ!ìð™ç%âfĵaÄåX𧪌(Çn˜©ÂWGxÇNjк«OÓ­*(à«pžKDâfÎkKjÞ 3zrÁƒ9²¼1v Ž©z¨oûî‹k°R5!TÁG¤|Ø;‰(sbÓ}¿;Ò8OCñxí³P”‚!vÌÀښ̫Pëf©Wª¾)û #þ3wÚ‹1 [@ЏE FB„ýÉ|ôcS¥ËÈÎ| ½ä~ &ÇÙµë޳%þ4â?ô `þe[Ï‹B A„êvfv?¼ýô[€(IEÆzô§£,ewšÐR1¼m &Të•Òp‹g¾øÓ“vÊA–i¡‘ÚÛ~úqµ:†Éű^£öÔ#äCE&zJ1Ç«%¤~B@ â8[BŒÀÝÞ¸ y35e¶E1F†ŠÅ¹íÝÞóYÁ„¥¼öÙMSñÁRB‘Q â8[DŒÈPû€“A»8!¢–cd¬ø¦ˆ‘ó!ËeoÂtM>ʰ(¢È,ntœ-=­4ô9·øÞž…ÂÝöÞóð® ~’>“m"$D„ÓÕ-­ HŒ8Ì”pÄH ÍÓÚRXüîãe‹Z ‹ÝÑ._ý§Ý)ΆîOÒ÷Còl§VU;ñüÏõJYy1ÌTQy¡!¦\­¢Ú¹áŽ@k1,Ékg„ãá\?0(ÏbNô/J#ë##e©`„ âDÁ*Ì,ñÑ‹Œ Ÿ!aâxïɇMTbIº”FdÔGw8ÊRá ?€ Z¹7, ® fñD.°PhXï²ñìÜmk2 ùܽVK$ôÑ-޳U ÂÍ+ò"¡¶¼¥¨¶ÈÌðu$†gR_‹6€ZZ$Ϲ®R!˜~p˜)ñƒ‘á™0׎×`õ fŠ ‹Ìù~©§ˆÜ íÐ<›­Ç×Is.‹4G¨7ÌT1Ÿù“àÙY{ßæI± tE}E¦|¿@½ˆÝlgµeF_ÓµzM§”<™#Ófªp„à¡­Ûl³«š©iùD†|Þö йã“©3y†z‘»ÙÉ|ϼ7Î:páϤ,BÐ *FY*^dxØ‹ 5m©fƉ¹ÂB Ÿ#½‹Õá—§ì»Ðͯ©ni?ÛÓG`¯• æ(O³U8Ìð V\˜v½r­²ÈŒï]T¡ŠîWN2kïÚäSsi„€9UŒ³%EQä´Cx7Íg%GªH•ERA)ÇE9n·ói»Î›ZOQd!°ðÙùFz‡vêÒ¶ ©ô’)‹È€dp”¥v\ám·Ÿ}eæ|mÓù*0âÌ‚ ŸÁ`bw(û9X2u]>QV«"„ïÑ÷޳U ÁQ3‰o×ÖÁ#\­°Vø  ðμ!g¨ PKÓ’á{¤y- øãÕí35×02mÍ­Ù5vY% ± YPèXpµSš?¯isçð9ÌNp·+'ºG­ŠÈˆÏï0[u‹ ¥¾ôu~ðk……€8/ÁÙ©‚¿n÷E9/Õ9ò3¯ÅõSm'rN­–NccŒ0̔йÀâ̯.èÁ P¬Š02¯{.¹H­·33îdãR®›ÂF67ÈP{¸›9=ûé9WT6Q„î__Z”`ˆúYä´.%'~†F‹¡¶mÆ¡Z7!ŒÐc ­„K0œ §;êÙæ'—’ yßùòSh룱¤R#s½_(^m?~ÓÒôÓI©VWdǨa”¥²áù“Ø¡çù°±-ÂêµÑZ!TƧÉNþ<½ùÕ®!ÕY3MËDªÓ><þrY>ñYañF™*‰cà‚† Û úµ¬4¥Õ²èbˆ™ª-ûMí}±I&|LôoEF¯¨;¡>LCpgS8¨©jÕD†~Š ²4}M/íH³ã kõºÝ‚âDÒópJDÄh’â1#ÞêÕLRÔ!ÒŸNÕè© ú´œÙiDUQ*1„òœgK˜c$Œ?,ЀöqÎ\΢ªC(䣞¹ l³xû•vE1Ff‡„ŽÃLIG`Œ–,îØ™QÓ‘!ä3LàΑ}§fÛlðúz©Ã¶Gß?ÌTÕŽ ú±¾ çÔïÅÖ’ŽŒ +ÊŽÈíÌNë/Þ딊1챆c¡ª;Ð?Ÿ3nó¯>­4—u|»Õˆë×òf©éž]€Ð* Azt¬ãl00 ?Û‘„hÙ„_x=)Æ:è¦W¤ÞüÉÛ»2³3Uñ8\¼ž½=B+(„ë±öbœ-ñƒÿY&ÊÛ¨Ú*œ~‹bŒÌ!‚‘ÚÙÞj1þ ÔïI«J(2Ù#g+'Ü?ìj`°ØR£ó«+µ‰FNˆžíù›ËÅÌ-mF|††‰ìÙÁÜr– <ÿèµR !ôuãl.3bB;–›O²©·W3>{ÕD®¬”èeS¼HÒfSþ‰…TþªŽí@­BÂ’ÇÍ‹Øf-5B ‰‡™ª:¶PDÇ6Jj6kùfJ'å ³£é¥=Úê1+Ùo»ËÞPÊ'„¾ÄS…« <CqO ¾Çæ[ 12rtÓçá{ÀåÃH!RN±ûdû•”OdjHÜ8Ì”àÆ1Òñ$¹ú€M-Äþøìd´=fS1áO¿\Ê'’63UÐÆHÍy67e“ƒlfŸ\%…™@>óÆ„q¦ïL³š;:Y=)õ²îb˜©ª9;àEtgïŒ4ì-à]6RŠ!²*¼p–€†"€=fïm/Ê'2ä)9ÈPA#7üáµêv­ÊuÌžÚµ-:XYl ð§q þGa}¨ÆÝìoîW{q™RÌgk±›‰P;^Æ‘ˆq©ÒeºhâEÀ¶X“‡9R³‘ä7F ˆ¾ëÉO>øV -24$ngK:#Ä@ÆËÐ/È+Š627üÅi”‡Õ“ô*ÐãlU]× ¢ëzAܽ3Y«j65t­¢€§“$TŒù|ǸÏGڃܬ7ÛY´Ô"AO>Yªš®?‰ íÅæNq´´=É >-ÙjèR6WûéÑf” Íî¾›èeã·ˆLéäæ‹ÿïš¼51BlàNž}_}|–p(E¤Ëg«à‘6š3n¯9B·½§VE9H&’/wœðŸ‰p3¦¨¾oA(j8ò|gKª?¢Ô]‹ 2ã1<ÌTá #)ûá@øíæ×öª„\¸aZßZý èšZ [è3†Cm²øÕZg Œìlœ­¤ Ókóðä‹®“4O3Ù}Á¶úŇEeCbn3C0P\ÇA=oÒbe,cgkÒ É‘¨0låq¶ªÖæÀÓühæ*ødÁªf!3·gÂ9f¼_h¹nÏ#`µ„ £2– 2T56ž†Þæ‹I„M¸õÞæTŠ Ì͹’þ¼{‰j `ž|´w7…Šª¹‚@8ýç(K9‹œ PbÆÕ–ׯÛÕªZ„„Ò^gFb[¦2ñcà=å\H­þE9Ζ8ÃÀÉ0u×s]ëí\Z ,íÙF¼ek"«0‘›wíjÑ€ 0¢³a¦Š›O#)³$årU¤­4s©?˜Fá†b"4N£H$·(SrÄ– ˜Ï¹q·£oÝ¢´@@}æ8[UôPCtó@¶V§7‰j‘‚0·bÏtx¦¼#ônÑT\ÕìQ– xYÚ›wsNoÖ2ái4>£Vk~×jEV†Ö¤ Óð,Daˆ¤¿ð4Ç8öùÓ†Ò³è!0.*L–qïûµt—K”ƒyÑÃ0[…S޼Ì@À†Ä°-óë–j)p¦ö.hˆŒ eìˆyø'”ú¥,A8 ÆÙ’‚†° ”Íü’¯®¤ÄAõ—‹÷ƒøìðÀµÝe‰P¡§ù´ÅG£.ïŒ×ú…±È¾FY²k‚m#ÛOWâ{`߯- (òºù,É¢"ó°"~s®•ÈÚå&jçÔGÄKù‚Ð)òÌq¶ ®1–… çIw³¾nÝJ¥º¼¢-dQÈ$o?Õ\&ø›y)¨H‡µñRª$2W’òýÊÁÞ £ä/+¹²<§(sȨ‹Œl˜©BTHÌvíÊ¢2[Pºá¢f"a3wñضüyúÌÅ„±<˜mVvSnïªj„Œº’ ³•‘/ŽÍÅòp¨T÷SéY$ghQÝ|û-¼í²(ºöfixa@ŠG0ûÕï<“’b ÊÆÙ6È­ŸÍa}pmUý%Yªƒ‘Ä™:hŒÆŒÛ°{Ÿm!¥ ™Ö=2$|æ­ÔûƳˆýºZ”ÑÜ(Kh.Â3ÛI7ÆgÁ'²eKë°ͳUµD‡–è•zqWS$bWUðè‰tÌ¢K%àó0­~F'9ÈPá"eC?ô ÷·ÜýJÒ\W îgEzeOÎKz¯¥_)¬°>®7ÑŠÐ ¬6Œ ˜ß“±'0ňj˜)ýÒ `y3¾³w¨¼>3.7=õ¯~ÛOæ„ÙÓbBTsùÕǂ؅KÑ 3õ€"Bÿöæ]X3ÚŠ±gxDì4ΖÐùˆ™,°ôÄËå×G õL²³à„²ƒ»|Úf†ç‚ŽH݇™*˜S LÞ -Ýr_¯ÑÈ>C¨_S„BFËy'Âaç`#«òó ŽˆœÆÙ*œULè‹=©Ž¶ld¡P/8^ Ô9E,„¬†‡Ô´ì~W¡rtAG ÇÙ* SDLæ°æÅ¿øê¸Y©|ÆP„ä×AÄc³ª9sÙæèî¯?ñ°³ˆì§‹Q‘¡ãuquôðPh»&~‚a¦$JŒˆ u•'.k¾z#”‚{¡PE¤°®Ž<²ý×Öî<¼2TØ.èˆÐiœ­:EÄä3ð-qvä>0(ž>q_ØNižüê½u,fZvsº;ÿŒ)QÚa]‰Uz•øªä6;m›ƒ<}r®’{aPŒêÇÙ*üodUxg<Þ\fâ)šõ » „É(ý¾R\¶Sn¯Šôj˜©ªq6Ð*côymüJk-P¢ålõŒüi—¦÷æÙ7dBÃ+ Ówr»›A~QtÆ£,®8à*ÀpŽí•¢¾¯iñ‹¯j/O¿òvÓΉ‚ètøü±É˜”ï ­"çgK8WÄZ˜fŽÒ²Íˆ€Oä“2A_¿8à£Ð {à“Úä>ŠoÂîXѳUÜÍÀ–ùß•›)²ÞF«e~![‡X3y@p\Û.¦0‚ïEQ‡gkÙ-¤ƒœ|÷³=žõÔ³ˆ¶Ð»rOÌõ‚¿ ­£{ؤL?²^p%·UΟ™ì(KUn@[èÀ½yn—ʪ 2ý¢íe²€À~öî•@¶,:e¹"£÷Š¢/ô‹û%ªÔúPÍ‹"¨=ã –?-2a‰B`V>\bÿjàÊÙyE¥A¦Z^¢0ÊP×`ìåW³¶üÐÙ¯«Eõþë^ ©l†ö&f‘iYŠkNÈeQ6‘å™{‘˜³%>ò1ŒiŸ-skßzóÁžEmCbh¿2D^åÝ’3|òá ²$d¦ÅB†a¦p×%~¸oïÉÿÇÚ»¤±±ëJºSñÖ—ïÇ8ö nÇêÔü—ˆSéÎaGuÊ{–2Iøm/í ƒë¹Ïo–ÂÇñ†óÞ2{û¸šÈ?¿5‚%IË‘…ý¨¦:QãÒZ„¬zQ/fªË¢8f®}ºÌ_âaQÁ׆¨ŸëÅ®¤Ba´:oµè–¾WjD¨¢3Ì”æIÐÂZÜÙ/å `)lÑË5.çç£qE "ám§ÑŽIcîýR½€*UOì<ÈV¡^E±ÊܦMŒ¶1ón®¢ò ËY_ÅéÝásŠ’ÿE(ó‡VäþÛ¼;@K D§âY1Ì”Ô"D ø·9·'3ù&ÔêÑ©zTÌ`ÚZcwÌF¹o?îµZ@ä%Ö 3•%O §ÚOêÔô;õëwïwý**L(Z9xCä±yƯº¿¨PÔ¯ÆÙª:aƒZ…VXŽ5’çŽÖˆ¢õ+ÈL˜œ6°ÚÍtT"×g)Š Ö(K…„+s–mÚŸLÎ! Ý?«Z/N%*Vh <Ñ÷Þ’4Raɳ¨õÕ°¢Ì„¡Hoqka8+­T×)ŠNjœ-a‘lzKäîÙ3­B$ÏôóE j˜ù¢¶i¡°´àcËE2ÁÌM—Ý«AÅX¼ywûäUIªt‹ÐÅj˜©Z!K–--º\¸>e•ºE碓g«ßÄŽ7 Œ™wÑ“ñ«šÙÉeNhcWÈꢬýCJ‹jׇ·3Íu.§ª‡'AŒJÚ(Kuÿên†’Š2ùäZT!­'iQ’u‰YY–…ü¬ÇŒ…Z€ã)šGEvêí€C«ä2©Z¦ ÜŽï§µOˆL/Ê××¥E%  &Æ´/ë·/꺨UtiãlŽ'ªZ†vÛößC¨R}¾²í©SÏBÚPV‚ÏÃgbŠÀž”(b®1vJÈ™u {ûO-éÎFÝB[­ˆªü8[E3覄nЙˆµ‰Q˜2}Öœ:Þ,”¡¨Ý|8HÁýÚ…^•×Eßñz¯a¶ªnÐ_ÈÖÁ+Ú7«¸¦¬ Z}ÖŠ*eÈ•ù¨Þ`Ä4ò«Ã†‰#.U}]*óãlÚPT‚Ð z™ËÛžÒóJ«Ïj‘ÃtôEžÞÉô% 8èé´Q–v¿È–NÑÛEã¡ëfªP‡¢d(å´ÐÍBYæ³*Ý‹`äÆÏÕvg)A§Á¡0ïÚmÿ­”t‘r?Œ3•N´KÊoï‰Zž™iÊuÐ…0æúðçtzr[h÷Y;ú‡Rµˆ•‹ÂÀÍñ$Á]õ*Eãl©R”„!ðgÎWµA@|ðI¼åè]'JJލÇJǶM~k¯ å¢öÐ‘Ž³¥Ž4 >8QD´Ç°{X¡¢{Ö„þ¡E•#¥/L=³‹OxªVž•2‹a¦ªÑ_ÅÇVÚ±ö§€E•w•„\Ân°Ï~»]{,lkq*ë~3Y±³$ôUL½fŸË5nøÝü´;ÏÈ)5Í-fhÐ¥ßJ¨O2ŽCÊ1†²t奠X°½&¤”¢ØdŽÊœö¢6Ï܊€$}åŸ(Р÷t2úja‰³¨ô"âPþh 2`Oo>Iصlý’æ©Áƒ·g³ú|­ ÙèÝ·'!Mªžý=Cº -?‹=<½ÆÙ²™Çðá[.n'ô„áŽfüê^© ÈÂQº5ï²weÇÇÙ(㋌CAw”¥BØbÅíýNØ'³˜+­? BŸykÿj4˜‡}ò¬°˜ªêóYÇ¡4ÌT!E¹}¬‚›óž|L–ˆý¢¹gG­?OÎPJ êlàÈÞܾ ¾ªŽg ‡~u˜¥Bóº©0fzu¬¿rqø¶(é:ÜS:»HF_¿ÔŽcŠÂa<‚=Šª‹‚C·:Ì”H?Qè1ÉUµ}HN%³g1è]Uê ô›~î©wð‹6. ¥Ÿa¦²Âbïº-øÜ{öÒmÑ¡E„lªê÷ Z Êåè¯ÎëtÚ£’¶è9ïêM’X|öôâøŒ·;‰-* ]Ø0S… ‹r Ú4Wd­çÕó”BÕΒ΋‚ô´­Ä(—_x—ebtž=Ÿ‹Ní¬bè›JU´ †qî(KYÔ´”a^ D—“©Kå £ìbäÊ3?{,;E%i&úœŸNÅ ”x{áfq¢…iœE›u]S¼u¥ï¹e øö~‘ö”'/Èz­H)aÆÙy% *æ)Úz^áÇWwŽ…ò›E¦%1 AZr2£l²tá ú“>l&ˆ%~XÇ{Ÿ¼%²nT(3Uµ4á=ð,íi-~-…JÄYœ!ÌêjÚ™4'uìEd‰’ :-Ç9ùÞEGΪËWc‰*R¥‡ÿÔƒæBü¥„i×8[Þ"fó™i£'wYHÜ‹zµô`L„À—·â©’œå7½ÍžÏ§±ò+± rº‡OÞ¸—¥_Qaè­ÆØyo¢¼‚UµÛ‡7a7@!ûf †âÍ8[*Þ$µí ¼v>ÓëU"Š3ã+†Êí>f Q}ïÿI:Œo7Ÿü¼L½eR¤_Ñj(³U¨Û¸TÙQ‚rÆ8[z¥\/0„Eïúp&Š£Àñ9# ¨Lâe¢órP·*Ô]%b³Uu ñƒ‡?†­—!‰TœŽw9#)–P\(,8žA4*ïª*Aaxœ-}¿ú„3Ú®\aÑJ–‰Uâ á ÉëÓœä sHnòÁùå©2o…ƒ¶wHÍü¼Vœƒ €c ¬ûÙþAö$‹l,š=ç(K…猢æâq´‡°ã´O›” ¥Û"µØ΂p6 Å"&0(f*§>˜ç½±î¾åÃÏ%}yqÐ0€xóøffö^iÎY—("½î’£N`‹{ï±ØÎKT'V-.yœ­vþì »?Ò_Øæ‘§“ާ=&¡…䜕‰A-°ÓÛ† \ˆø+¡8+ T"†™*æ(<|x·•ŸœýØ+4g'Þµˆ¤˜51 ^Óƒ„B)MÀhœ­åEíÁVÛºZž{ö ";'m"kÌæ½¼Ã/ÈpvT×÷R‘j“¢àU:ƒ -}Ú꾓½eýúEˆˆÂ˜òa±þfœ‚ÆTóqâ+ED¹àÃâZY:¡V$jãl‰ÛŒÒ†Xá8Ý l^ðŸå\‘'þ!ÞFÁàÛ …›ßùbU‚QÁ5¦a¶ê“$#|8»¡n%¡4« ”'FX©Zë‚Ö:‡›Qï¤Sµ5k•2áÝuQ<0It™p>[HB¦"i¨L 3Uu׫àd¬mâ´k©Yn­âE™B„yu&Óµ$¸X%Ê(V|¥‰ ˜'²pø  °qr•ªŠIb SfÉΓÉòxëçg)M$:Up•tЊÕ6Õ’ 6*bŠVá|Æ–›6^Ðb†ÛÆáÏþ¨ô˜EJ–ãLY)ß„óu~ÊaBL‘À;L*ƒ-$KWn{æB*ZŠA2ÎVáU¢fò¡Ó~[{µó¦gTtw½3I˜%Û7þÌÉJ¶Ì2Šq¶ªŽµ G emfTd;È[ÖD͚ŋ_‰ZšvzšvÞrü“ˆ YŽ`îð,”kÉåÅÃmâ‚ÌZ£“‡×%ØÿB1‰"F”0·ä†òeL’ŧY!Â+ÇY*d†(*`€«Å}íÉ]ýnEÕI“îÐMߞϧ;-èh)»Èí=ø•vYßÌš•†a¦j§’ä[DçF¡rï *oŠd@atœ-í; ÂzÏì©Ûó™k…T*âC?½g]Hˆ¨AëJ‡Ø^÷=÷ 5*—"PHg«î?û‘ ¼ÿl·?Ùó×+T¤…!!ÊöýÞ”“©ÏßJâiÁOhoŸí·“¯%QVõu÷)ä…F)Š]Ã0S"nFƒ®D5ð؇lÈrg†@/^'J¨~ß-#oO×Ût ¹Sd‹¾.î?ÏgÛàQQ°pþ¾Ù†Õâ”Í…HÑ*ƒê@­b¡{e­ÀÔS¼ä…SàT¦=ç8[*}%ݳV[+í…­=%VÉ3kïbD’ ìj[÷B|¸ðÀV R:a¦ªµ º›Ö§»)á{×ð8%Xw­ä2#~ú7zðyí}V îhûB†²[eI3syfL£ ­†f–??Ô7V²Ô`*~sDææq†L~žê§¢|™Äòh›X|töÓYÕÎŒîÉü‡™zAîŠCOrÏvlÞK'Z¦‚s.å¶‹0cò*÷ù&Hxöé*®fûk+Þ×f3i}J ¨§ÂêéA¢"‚ù1+Ë6Љoƒ¤b·õd1²ƒÛ]®*£‚á©ÔŒ³Uµˆ\±…ÙWïü®TÖŒôÿ!ªFÄŽ< c™éÜ=³iT1 «'ó žc )5rƒ·ê,z¡ðu*ÃLUmOã¹nçÚ–Þõ”•G!õÌyKû~Ÿ‡'‰[cÜJd_Bp8n9"ÿw#<ý^‘@u_äÀˆýÁ\oþÉÖoQP¤¯y½e'«/ÆÉµÕôé3úfJ0„úÆBVÜ ŽÍ9…<˜ÁÇüúÇÓ—8¼Ö£þCO~Ï—zÂêIùÇÙzø±ØÍ»\ôós±çWÖOx?ÆNî#¦GÁ ¹Ïõf*V„‚òßÁ}Bë˜éC1®žf¨ª'øzà8[U«RÀôhU*Ø,A;8èU%ÂŒò_À}Äôæš{Ķo¿éô«®²F˜Qþ×*þa.a¾–TFe½LÍéFX)úWGN1Ã[[g„È~‚Þ»iNú_BR€á½oÈÓE”OZs¢öq¶jî•h8&—nœíÛT5¸ŒË ÚÒö£ÀÔÑ~4Cê7~Ìõò2w/({o,ŠÁçE(ÛâzÒ•ß„•S¸g«n-úaêh-râe¨ŒgHyÂÝ_ödêFÙí{š+Yû<ŸBÊËÜIb”ál$Ó|xíÅûàþh±“²ÈËAÊ&;Ž÷ÛÂ(¯ßKj› uÆUÃLÄ<òqsÓŽ¬tõ½*Ü AwÓ{¯SŸ•¡6:y°t~v¼Êm¾‰ÌÇÙz¡æ‘kcÕà^ìrÎQU½MØ7©ù8[JÍ#G0¹’Â~£IQï„£wjþÛôãÔ<€m w\ñìmÉ3íRÁMà7™×0SuËÏ#÷–u«SªJ»ËÝeŽÉ1„o6Œ¸vŸ,_HuÒùE· ×*l6üÃÒ]˜²œ“Q±bé^:¼¢tŸöÚØBô ·?ÏP Ý¸ E“BÀKÜKYÝ/BÔz/Ì"Û£gñQ¡fPòâÆ"]7¦¹M|3ËåeŸ…˜ üñ%$Ž^¢…ì¶=†ÍC"Ø 6§ßhkµy ³£Í‹Jƒ>Ñ‹›~"ƒe÷î¤@Îmo4P“M,¥ÊžÐu—˜†Ù²ßƒ[¦–‡¸Ç€Žþþn9¨Ðš„5¿“åÄ~}nîEïQd¡ æög«j -!«ýÅÍzJבDkʬ™p w?¼Àã´5s>c”ôlŽÝh£^g‰I{5¿].{¶SгKU·ný8“D…A™àßöÛä2޾‹šS¢ÆÄÍCÌÎ$€åkcí,õI?Y¹JèÙ ¯ëŸç“Å£‹ƒc 懑«Ø$¸˜2Õ8[/2UDÁöâ/ lz¿·,êMŠ)S 24éä©”-ì<6Öä[ÏûÉDºè\!f†S #øD[ZÛð+÷L!7 *¦P5ÎVÝòƒ”½Äî:<­Å‹·‰t%ØÙÝI@Êèþ0:ÑL}Š¡ U™:ó{î8Jíó{GÁQöúVó«³påh1?E°Ì˜ÿßÿ'øÃA-ûòg·–·YJP1=Ì0S ŽÜÕ­žO»O¥‰KÐ03¯ã hIÔÆÛs"~u·ÊeŸž«Gº‹ÖŽÝ’"ÌFa¥§*WB€ÉŽÇÙ*©pæ¶vÀµÜ™Üzÿç+2 W‚v¹¸‡™*.àˆ+½Ô;T; ÙÔYh`™wwôÓ'âL8a[4K/ö—@$fêT¢\e´K¢8ÌTÕ'0úDàŽ7sSÌèTJÌ÷é. >\Ñì}Q*€ þu¹k¶|žŸ—w¢ ÉÆ%ìÛ6ûØmÕ•2±¥¯eIèGdvhÜ7—ËtúŒ€B¡Ê<¤öB ›²«ÿðž&/7(ªŒŒ]J'&Lž¶¯5v¶ x—H½>l‡½#«JzÊ,˜y˜){ä¯ñùàáÀq¡5àúKÜYÁDH$%e½$ÄÃL%ö8bµ<»ò|.º-Hrb½¨£Àu,m‹ÞܯÒð`ŠJãl$9rc4hL¨.<²ª3e¶ìØClÛ?yÒƒd HYéÑ¢ù]v¯úÆA†YgŒû²Ë"£ÅÌ–Ó+#³g"ÌÇýRÛHV½× ·¼®+/Ù+Ä¡¯ ÆÙª–Ä6Q¼°ï©­ršSµ$àORÓ1v^ iD›ðƒ·â´ör"‘IœŽ³UuTŠn€ÅêøÚRZ¹ç ÉEpjO½ òM £ šlϰ†_„’Ì@)° 3UÀÓˆJ?ìø°’»£1··$Í%ÁÔtA釗;l'-s„Š.S¿è4RN‹Î;íO®^& RI&¡ôcÃLéÏŽÄ“ vT´÷ìQ•ª.BUù5mŒ0_»70FUßÓ‘ŸþÕKøI(ªî7Æ–¦“‡‰P"´”˜u˜©Ìq¨yôö&à#û¥…Z’‰)Aë0SU@«ÐØW˵m,ÈæUüY{úú¥%ÒPLâBWÜa÷›ª`"Ä”¬uœ­ªŠ?UTñC Ý ‡±´@Õ¡¯/U¼‘†šÇ˜‘È5ã½o½_21ýzŒˆ1AHN\¥jS´&ÖÜ‹vP§Ó ±SÒD-ˆßës0DÉÈ4~€[f˜^|Å~Lÿñö_ÑLs3õÂG#ÄÄ@7Þ]h]K|Ë¢›DÎI<:ÆN‹‡7–´ß>²d9ÚA»ûŸÎa¦ Á«ù³ صGÖ#ÂWUJl–Ô¼ºŽûH¦´%ëŒÄJñ×nf-ãš]¶ÉÒŽðT~…q¶2¿ðwòŠÂ}éyó ¯ H+ûD3Š“"òWH¨+±Ò(K­ñ4œI{ñ_!' ¿-hmïOˆ@Õ¢ê +ÃŽWfb…$Е¸vœ­ªC!ÐYìqÄì›™Üy‰ÊI™à:_ÿÒYTb^ø=p SÖ’à>²ʰ÷ù?Þ)u.¾ '2?›sp è—¼B¶™íÔÜ7kÍ£k!'ÃYW€Y*¼N­¨ÃD|{R§w¨$$0ö‰#÷40`Œ©?¾¿•"#d”d¡ T:†=mìý“ñ&r¶¬Çua`”©v8®(oupùfXUWd"óĸ­ÉÖ†"¬åg+Òf–ÿ0dý©õbþYq| b¾­Êý‘i²”# –À;T{?³ˆ1[[Wj’”“ ë ¬ˆl¾îjåäQI„_×Eûáx+ð[ìXº°äϯF\J ³bBÐÆx›úp«,eŠåŠfªÆ¸ ´šOmG 2Þëb~Whc©nŒ³UÕÉh‹:yŽ?¬{Jäáº|¦‘{Ú3Ý€Ù6“Syt‹”‘Ùè àhj2»…JÆÈd”GʼðÒ•v²ù™#åGL@ÓOýÝ<`{TÔQU èÉb ­è;Í™6£¶:zöÿŒ{â<Ë&n ”%x퉵°ßVЂ=&Šÿ*?å ÷…Õ\³¥D¡œÔRÆÙªùhD˜ð©3†£ØNáÓ*Šæ¤fª*Eÿ¥¡æ_=#8oø¨ä•LLßùh"˜Æ4/ÜpL-3áI¢šˆPNª)ãlUµè†bAàÀhObs0¦‹Ó7i¨á­¶¦àÛ¿…= ,‚K¿p4"Ìï{à ‘;‹Ö QD('ƒ¾a¦ Dˆ(†¡9z³YtÀ…À’¨i7ý3í„Agæ˜înâÀöÂ!«’GfôlÃLÕ4¢L{ù÷ =ª>¡(ë;)” 2$³N"í4/@GÞÞÕ3P/Ë&Dù,áðá•å(ë dÂKzÕz¤˜˜a´QÝ}¨V¡‡餒2ÎVÁH#EÕ:p{Z“l…¶’©)ùR ^ľڮi±+K.TI"êüÓâÀý¹92ÂN;ú›÷²QBÇ“´*ë êØ€Ÿ]«Lø"'HË~^U©-Q2Œ³U8¢ˆ2‘R®©vôT¾É´ÓMÿN¹¼Î/°E” ï=¹OÎeUsþèêÅ0[™Sb.Òé—«Ü¿—«(¹ŒxÑåíײÌÏé”Õ!¾¼YÒ›QÓÚµõ½£:|3XÏ)÷ªâËìäR'«$¶ˆ™Jœ¨9Y“ö¦*/™{޳U•½N &6;_/×ø²Š#,“@)¢J{c³Ç<ûÆ[à …&ÑLwG¸«köºôH-ò?1yjß·¥“gM#ÃE×BY* dD–ôÞ¶x7kBðK×EJùe’¿äEá³gÞûEÇ"N]d9ÊÒ •Œà ' ´D–EwÈl‘kh”%ãŒ(TÆçC#D«#yÉd)‹_ —ua†¤ãlUà¢üä.lïp¡7AТê¶×›æÌ²§˜i#±¶Œoò˳Ö'ÙÿêÐ[ìó`ð¥&o–W½"2Ê—ÏÈ mã,—‘ÈíØn£ÞÏXñ #çCfê8÷©–,ŸH â Cå¶Éô„wgÛ–•&sm+“Ï„kœ­—Òçˆï óžò>'·gÄG88ÎVsÞ³-ææÑÎÞe!ÖwÊ$,ǽ}É‘”QÿbE8ΓqÂv8A.F|A+¢BÚvÎö Ët_¹ã ι1ŠÃqð“‡øsþì¸èK(1³`b0°m^1¦t_(æ·Ð:òÂG\"*é½ý’¦øÈ™ƒlevo•%<Ûüt|UåØôÙ[\fÊªíŸ ùËL^Y )â0S/1‚>ó©Õ~åÔ» „É ä²gk®â_dœÔò‹:£EÛŠÏ á9¸ŠBߪ¡¬Cm¼mƒmæˆ×JÈ4Çʼnêó§G%“û·ë¿8\÷š´~êõÒQ6Þ÷S,ý àPa™wû“e÷ѪÊðÒù[ejŠC!žã’¡Æ˜Gˆ>G‚ U’æêÂ0[/. 24 ì¼c?”‰bõŒÓHáFY’ÁºAì¼ìLߎõd¿ÒùÌå*ï€-"0LrÙËמ¨ê‚Éè~ÇÙªJ•N³µ°MÀÂVmO%Pá¼ 7/, < Ä:Ç6ì9&)£øÌÛø%ÛYû‡Ÿ}²k i¶»Vü83¯œR"‘‰›¿®üŸ>ç+0ïo}¿ì79ŠO´^ ™—A2UеÈÒ,riOfÇËÚzœ"ä?ó6·½žžÏ³O5ì·‚ùi+U\/”Ì«¡‡ÙÊ4 bN^o³JûAQòµÁ ­¼#ÚŽO¿ü"‘ýÌɸºGY*&ǵ‹–ú‹¤…fÔD%âVñµÓ§Ôæ“eìÑOÎp h/ŒÞh˜©ª":°4ˆC3u[Õ¬²Uþ/¼¯+²4##=#^§>4F€ÌÛzµ3ø>û܇ÀÀl—OÀ»í6òOÅêÂÉ<#eª l§a«{îÅ+Ô >Ÿ‘Û°E †r­Óž2¤7ETæ•ÃÃlÕ-0[ž"Ï~EaAÈ3+ãRe)'ѨžY9gw¢ð{cý ¨ %h. ðá (ÁÜ‚ÓÈÇÙªJ{vóÒ^,°å|† 2ÏhÎsØÓ‰Ìî¤v#e‹L͇QáOJxBÕº½lžÀÔP’51?j¡púŸxµ`·/d‹$ •® ÇQ>)‚ræD˘6²ôB§#³ÚK¡g¿h¹ ÌËȧÇÙª1[aèÃ@cT¥\\ºÂ™–‘P³ÕB¼ƒ‡¢mŽ yü ?‹¸ Nâ~[r‹+°-HÍëa¿¸ xÓã‡^ÎWÁèŒÔ^ès¤\¶1Úþ5l},“Ù öÝLY*¶‰ °âèÅȯ¸5Ú²‹†Ðì¹úˆ†( ü£„5p)ËÙςɯsجìŠ/q˜©—Öˆ¥ À­|‚GG˜Êš]‘R³UìŒH¯PÂ:3œÏÞ(Ys\^üeU8€FÑsŸþ9³Â¬ª31;U­ibçÖey\Á™s¶æÔ# -Ã+†£·o¹÷ ŠšöúRå¢ ÞãbìÃæØ)Xå5®ãlÍ7ä‰ýzš ’4ñÖ¸Ÿ`ªbëåà³P4,<Ë+SG™ÊH åv{·?9jAž#Óúpäщ:Êå?†‚{<3U åÈ í!‘ßPHÈɘ¢½`¨ð"N7>÷û6#d*å¨à—8Y’~¬<šl6 ”ãH¥œA¥¯qB0zsïÎ=_Ѭ]¨”ÿü§âúÛ>ûƒˆ-ààÖuV`)PÎÉÕëQ¦ÈsŸÏã;ø‡â 6}âMtíœ\|´EÆÉBzb1èÿÝVM¡ù°càüŸ­@9C®«Q–ŠŽø¸jP:q§M—_°«p:ã MC6iÇÛ}°Š¾7eÛ ÏÐhqÙDbþtîW…£õÀÉí˜_`K©×€g8-<ˆs˜©ªl4`#”žh›?ìÿźRÌÉ“ÿHPW·á²c;ñyŠ æ°äé´bF~ýÃÖ°å&¨-Ü ÑЭ9!îýq¶ øÈ’£™ëÖü +Ê…F ]rÛ‹:{ñN?(ÅD4qàîÁg²p„LJ¼¬s˜­2¸Oèǧ¯\˜ sûu@Ê…qi 3U"£u0»‚Ó¦uñ„OÑmæ>$FÃLÕ•?xÈ+*-b˜–~Õ©@`!H„ ?Üó !X6C^Ÿ_àŒ†˜D6ŸÊ5´“$¡º–¥² 2óJ=¸ð€L_3?b”=ùMší[±Q(Ñ„§-qËŠì2äã¦XUØ×<޳µž'¯_›W˜Ë«’g§O‘¡*ö&`±¼oMèk&H\£,½ÔFF6„ <}'ƒÔ>Ã,ƒUáG$OãlUµ‘!¡6ÕÛeÌ„ƒ i39Ï  MðîJØo$y¿ fz­!Y没àxCa-|LÁ_fúB¥ˆ}­,ÿ!ÉÙø@ h*dˆî`˜©\‘Ž=ÓO°èpFªÔT°C¿a¶^Ê#Ò1/·#:ðž]pÎÔT°Ñ@[)‚udÒ/\ÿ^GœëÒç)âެ옷L+€Îä¡wàHz_ˆU·Ý§a)‚M¨é…¶Fôc¿ìDÃP3<ùÜ寂‡¾<(TNÿÝt5>mK™i†:tóÃLM± ¹“žcPò‡Â0¬] *ÀÆ«G™ªNÄ,¯r²vï9JCÅp[ 3U•dƒ²@FIvØß^˜Áj¦:oeØ ,67Û‡ }m T‡i̹ó\›ï(¾Ž¿R:‚â˜Õ<63K $›Sfû!ÛǬóöSö^w&HVÀŸS¾É5q£§ÛMØÐ«;%–ü%œ«ð8IаÇ"¯²=×L f+@È5¬q¶‡@/ÄÅYe«;Öÿ 25(Ð|&önÍÿr²rLäݰkÛ:îEî ]"—g«.Œìèà ¨Õï³ë•Û ^â¹7Ê’r©´Q"iXÛ,Ž)pæToE‡¿05‡+J+9áôÁ¿l¼Ü`µy¯^‹œtN>ŒÈ#/mXFû €'¤ãPxÆçµ2|KˆåÃaN-xØ-„ö~ZA°‚aøkÆÙR×@ˆWäõ¹äÕÊ_–xuß0[)PC|sù¹ý{½@Ñ]yɇƒ±q06·±>#b¦¤B?ÌÔK^&è’Ôl§ÜæÔPh†*§ÃLÕx_öòá$»r`·ýè˜4‘TÁ3Ìšš'¸¼T‡(:anÌÀhPY…Þ‡öïÞð*´U0Î ¯ŒôÙÅn®ºÙÞûœMA˜™Ð|kß" ùpØ&îõ°yãì'Ô(À„¨eœ­H@&i®®NúLô‚5fâe”ƒ,•ˆ2B´j¯V÷ÛÖŸ?‡‚4 !£g«ªj ÀÄ«Ú6ì‹å¹Û,cËÌTHP"/±@èF•ÓfCª¼b-“ˈT^JÄ%–7\`m›]xÍrd%’‚TœßÝ|tÖÄ‚ûHô¾—ëbù„é¤öÌ•! *ñ’¶fr´ˆ©EöÑqô®ÅjÇ$Lòá%—§ß½íÓ2>’Â7<ÌÔKÉ[„$h¤žèÛ»ÊSàC)ãl!0±!k“¿f!sLäÞjÞr3D°½Ù:¿zG€¥`f¹€~YÌÛ,~0¸2S™ ¡‹m¯âC3¾û‰‚Xf0óSõöKJP¨¶RÚìS„2 Kñ’·1†¦r\¢$lÃEù‹-]þHAŒJR¼˜m˜­—j¶J Á°ÿs²cKW£°B˜a¦Â$äbÞnGës[JÏ¥a"3–ñ‚¬oÒ:79¶8ŠGõžK`ÆÞ„]|j‡ÐÓ^’ñ¥²z»˜Æx3'ê3æ‹`¤¢5‰§xgŸÇY=íÊœ‰7á[#!ý[‘T¡/[ž.P£RŽ7#±…Ãm·¡&œ5«ŒRˆ ׿0S,z>¯ë)”û"(nƒ“ÆÙÆé¯Šš<…rƒl½tlGØ’°Ùb<ö‘*YÌ@ÄÙù(SËÆÄ@C¼Û+Q…útKÌÕ_7#ÊvÊíÄ·J× ëˆ(ÊíQ·ÐÅ©ud¶è'ÜͲY„à¶Ü(LƒçÃíóZNGP9 {’®D Pä´ðž\»#áÏß Ç Pðz©Q¦jD—pzÂ/÷»ÏÙQÒ&H,bœ­&ñ.ܶ[ûƾ©âÓL¸:`Ùp1M+ãé‚È { Ëg«.­úa€´¼ÍÇ&.îÞ‚ù^æŒò"£°(¯ET6;koaÇ.(ºKã…[DJáF7š}Ûþc¡‚» 2¾œî P´¡eçòº©´ [ ¢b¦Þ àaÎTdÎÍ넲ebÀ×2ÊҲسÞZ\ÖÒ†ÅKGCìÖ¯¤ÐB¨D ÐO:aæÔˉ 'Toœ­ª*pB¡õº­´¹_;#\O؃Cž_°€¾Æ™Ëxë>_¹žÀî3äuH£^¹ùQ¾-_W¤7ºü€:ø²”ö 1%€‚(~*¦~±‚YÞk0ù| _(—±‚K°Ò›!á‰ÄgÄ|ÍùÛ¶k£84!&ÃïÃív°·×þ ¾eÛ%›ß”™  ¯¸fë÷î€XȳíˆÞ/«ÌNØiß0SÔˆµT‹Ñ¿¶Ïgì æ¨wn¢èkàÐ8»k˜·ÃfÔ'ã…ëEìða§*6Vóñ›_È’IŸ  ipn†´8€/ Õ”1‘ Ó&Þv¿v½5n¿ß/€Œ¼˜ìF2Lsüí5`_ð`ÇÖîb·BìÿqБ Ä„&¼ük€•„3|â|$ö¾WÄsˆâ¼Ë5³C¥ÄÃL½@ÇÈ>œPkáñÔÛV„ n tf*!È—·Ÿáðgĵˆê°‰»º…&UfÄñ4¾@uØ…´_Ÿi} !3â`Š¡‚íÙöT°Ñ–›r@…3xpÌ€àžŸ¦Ù3ðá¤6»d­½ÞÖÐUCÌ0óx>.Äü%J+©Íí²Å)c;A^í4ÊÔÛí,!ý‡‡õ˜t=9ù¾r‚¼Ìw˜­ª,)PÔ%(‚ß—•B8 /È-ä4(F˜¹kÚÎbž§ NòžâqƒåÅmñ[ƒmMšRÄÆìßëzŽƒ±Ëæu== !ð¡Q¦Ê(.'ÿö³ÛζSÒÞüÌ"MAOèQÆÙj{zFlgËÎ]ÿ¨"v5†zøt ¶ø¤þ…lÅüõ9ì‚Þ»ÀX°.Éñý°ùY˜nƒG~X&ãó-r–ïõð«³Ï‚veÀÌàô£¬…|,OxïÛÕ²m<ÖYÍ!™QI>îu6ƒ YxãŒ==þL°®ªQó[»CBnðb%ÅÔ'>K’vò¯a¦ê~Áh£É#$÷»з7ù œÊÉ8Óøq¶ªbœ´{1ŽÉ»µ¯;ÄЕ{/5ßž1j\” „pv ì`8Þ×ÙìÁÅL „θ±jï÷Ò+4HðÉbjoçÇd±E3½ú!'ÔL’ÿoªsr”Û¬LòZTy0ªËÈ+¤íÜc̬Vc»Ù½Ù§W0$8û2$'Ú˜üE÷³ü3“\œ8g˜©—fÁ˜h×Z½©«Mé’dœYü8[U5NHÚÍgZ°j[iÆõæ+j&‰=ÑHÌÑQŽƒ~ømŸn½Q`³”Ç¿¤í1I·ãËGéìÓé5ÜˉüKÚ’t[×úSà`NB,IŒ$‘ÿ„Ä:Øì»å;|€JÉròìÑ KuÖ2cô°ðs&‚³¶Sè–fÏ|ùãleŒ…Í”aû›?ƒÂË~Ř@›ã´©XöÅÚd¯œR°œd3`f*çÝi1¡ h÷ż¹˜«4M2ñvÓlÔú°gŸ;þ-hš¦âL¿bšmé/®Þì8âV|&©¸ï:ÌSåçåcUc‚Œ¾qG={p©˜)æÐ^5ÄÌÎR h:¼T&†í~0éEe)FñÆÎh§Åà~ϰ`¨œd{5Î(S5¾Jù3ú®?;®ÿXÖ&JRl®Ža¦ªr˜‰ã1lú¬ÓÚçCež”“u¦z17Fªwº¥“”´ÀI’??Þ=eµ¨ÍXY¶ÕâFA T$ó}êìq¶rÌÞÅ÷'³·º$$ȶpfä·›UkòÌ,0ON¢_¨NÂ!]£ò³Ù¾{»rž¨×P'åÙ˜ƒ!%›)f¼¼¼À<9ÿ‚úzÇôà|&§Ç®E ²T-¾”Ò‚eaýby°b[ÑŒ¤½ ÆÙZVLoÝï.´'Êó²öb^êv¬n×õ ·ÉGrWþ˜KZr¹á5lðq”¦¢É7ÈDЃeë¿y›;ÍðbŠÏ¡bfŸùêÔÊTrÊçõƒ,MÕõ(9[ÃãÁ¡â# T’Ó9æcì”|%&i–¶Ý¸Ü¹¥¹“ß ¥”D9ò•q¶ ¾>”Ilvù^[j^,­ÀErBžAÌ÷°®|tJ$iò'4ç컌“![‰‰#ê-nܹ»uî®´Er˶GP^úkÑÊÅ(>ÓÉ-ýµ§›RâD²ã’2óND4ÖÏÉåO‘FÈ÷¼äx6V²/Þ(­FrB/Õf«®IO zþ?ØûÜ Å,’2gë…ÐÄ\ã#s¬ P¼ K9‹ä{$4ãlU…a¹ ÐbÂ|Ží<9[«`6’>¾Z„ÔðÃÁ ¶±·óèY{†8š>òÈ´Ôqf†EŒ’HvvJ m÷Ù^³rîÏp“Œv$Éô”ò÷îì>m#d~˜¶9ûic‰dŠ’³CÿA£Le~Ðäö°ãæžK-cö‡z†›.µ¥×³·î$Ò¢¢—F ³õB_Bþj½Zåj;”.Q”¡HŽÈ e˜©ª4á7•ôÄâœ)WÌ$§›Ž nÆ“OëQ„ÀN”Èßæq7KÑ|Ø€‚•œ”~SИ$BÁ·"^¼Æ‹z½@É#½`˜©˜¡Ûoƒ¥ûüé û æ¶^’RD¸Ì·7{ü[É"‰$†™*VMÌ#Q.póO,Sõg• …¤š\5§ÅÙem~[gÌÞ¹lbhNÃWåfÃü²ÃÌ2$I Zÿôì¶ÔcåMÇ÷Rþ ¹Ý#ö²U/Š”ºa$ÓÊÛû¸ø‚-Hzçh˜­fjr¾»y+e zß•üŸDJ>béö.'Æë¨dñ%¢ñ«­ú¶,š²Ag©¤"Ÿ/\"¦P¡W«ÝÜ¶æ±ø THfú¬Á”~ØÙM¡jéƒ/„/äLÑÕþQ¦lí‘òÓ#ÄßLÄl/¼÷k“ º Ù"SùA†*iü7Qƒ2n—›]pB\ùCNå˜ÇìÊÞr %í/ö¶âŠ?¤ì%%Š '¢›ÍÄð´ÕˆV’¤1;$-^c{#âºûE Â$±qf:ÐÖ„+ÊZ°{‘†'†”«ú§gÈzÊl0"äF9êÔk73–Ô‡oq¡ùâþÂýn¿ðôy¾!¸ï–J™åBzéPšv!É©Ç0SEVWtosmÁûØZ¥ ’e1ù‹, UöJúƒ²#?¦Yh#àŒëÍ‚ çyH*örÄŒ  þ™füì—O ó¬Ë«58lì´Kp½Zã‹o|Ìȯçßž‹”*A»F¸Ÿ÷)~lÊwÖ(SSêÜÝ\I’`oÁëƒË”>h¢ä2ø0[TLI-¶öPì€û¬°G˜Ôœ×€¤Ù’Pq73Å’\4OlÿyGR [ÞâwA æ­äÛê£$«Ï‰™Ë¿yºÓw_‘³'Fš®çTéG¯ ¹‹Ë¬ˆÝ&g šaKzã‚í(S™óŸÞûå-:¤ýÝ[_5Á–$ÇÁè0[•û“ QpØã¶œO©|NÂ%còÌßÎÎÊÙ|c¥¿÷xXGô…¨ îôFVeóņI¢.ÙK^ò3ôþÍv²nvS $Q—ÎÃŒ48åg¨ƒ:mEá{ï®8¤hYR¸oÊSª¯Üþd¦ë,2wI»\Vike#Ý“ä$,ç/u]ÿ<Ÿ‹üSZ…§òN½'Úß*Í©3¶A†4Á‰/¢.îRnë£ç±ä„§^x)OÁ¸C^b#µn9à¤2¾òV´á³öޱý‡ã…ž>¥ÈÑ%üwT7È_ÍRsNÁ¨%æ(s\í97[—|Ð,UrˆA/„ëN¿¿7Æ,ÑÜ2‡ô.è2•cÛÈ|bíQŸ}¤w4cP1Z;ÿ _^d–øs›Ž3•RP][ÐéÝ_¾ßFަÕ/*N™FWüföÔw NÞì›l® æ„U2 .®F|¼íÔâ1ë—cb!ù©¤¾NCðr\4\ØýI„Ò{lïß-½9ýþ»—ó¿Òо@}Ç•…lq÷Î-k—Ç1ÿüT2’Gfc*r~£@L6 Ìã¦Ñ¶––ÞÎTÉGø¬‡™±¿×%£|¬¿IŒ!»Äˆ!›•¤øÎ©®¤?Llc&ñỆzJ.¹nN6êÔ"%˜k²þ‡µíC&‹85çYLì±ÿ·á·íT/ÑÔWÒƒ_1q€)b“ç³=H¤k1¬Gy7*"!õ[Iræ+¡¿¿Ÿa¶Š¼!>~×Û»j/ßw«<:¦uæÃ+¨¦º­¦â¸'ÏùqŽÀèÈcÆñá³y5S>¯¾Ê‡SV"ï*¥ pt¼¦ål©ù~ýd”?Y§¤ÜÉÃLñëñÓ±˜%à! çdz߀š“II$ø<‡™‚èÙþ÷ýlÒá÷¦‡Ð²t^9ŒG½'šÿÚiyЋy¦„úÜËçÊ··q‰`k̨ÇÕœ¾™‚øç8KA5rŽA©ÕRø¤·”±iàÍ\oœ­|”ûᎊÒåüÉõ¾=N/RLmQ¶±i{Û«k EÊ–ãîGücª‘BxéD½öKï4ûÓÞŸUÏ1GÆú3·µÏsÐäOx/ ¨ß¼ o/ˆ‡õ`¢nˆC§…q³Ÿ°žÓ5‰Ì™è³5¡…âù<ý6ÃRcfvµŸuYqþß*“°›9Ü8[E£j—Ü,A[Ï««Hë$òö€-„Õ˜*µ¢éo=¡7ü­Ò: ½¿I\ M¡ÂØ¥BíILN—4“èÕQ¦r4ÒG4…ÈÛÁSœý!’fåèÕåÙQ¦Ð¯hÅÕBIR””н :!Ê… ƒö§Ín^äî(²± 3®ŠQ®ÅU^±™®DŽY¤b)fåJÌ“?~ñ­øsî÷éääÓOÝë§yüUfˆ;œÓn;ÀD%b“ úGw aî‡ÓžñþZ¸´yB•“® »î4ÌÔîWÜÝç⽟)IÍKaêsØs,loñ¼lË6osôå'™™ÄËtãli¤Þ’ëS¦®‡WæôMï7m(ö°ÚW² ¥ÚÊôž[Mé$`~¢ŒŸ˜âÃ^ çvö^Ï›ð(£×ü…(2Í…nûó#¨ù•DÂ.Ó ³e >E¶˜p5S5>—./J:%Ñ/Æ0Súc|8mÿ³½Â½÷¤iú”b¯ ô‡×‹YÔd#p«A‘>I°ý<À±¢7oµÿk·šÝ ”>HP˯6ÌT%„`¡6²„ÍZyˆ9F(éÙì-.üž§×°þæ?x?Äe¾±¹Ã¸úT9âרÍñþ8['¹ ¦qÒgÊŠò¢Ëª„àÍ"üþ-Z¾ÄSQc~ ð޳µìí÷aƒO~¯^ŠÌ_ mŒÑ…­º­ýÆ8Í24¢¤gŒÑ"†£˜;ïó¸$Ã`ò‡ÿÿFw`ö»ç|§•hÀ/ ãÿA–êÃ5Åv`8í]Úëš=¶×h_Â?>¾a¦*@àøÝvp‹ö|Y‰ƒ‰ŒB ˆñ!³˜ÍÔõŒÉyƒÄ‰/84†n4r`^X‘ƒ‚Û}#¹l!ž¸­®—̳5‹pÌ©û(K»U´«ŠÙûKSïÛ4¡­em̦LÁ-°r]Ó ‰ÄøþÆÙ*b¸Ÿ—àÎDs]·~ËŒ¤"Õ=¡Ä7ò·Pb8´Á®exüsôbX‡æÔJovsš“àœÃHäç?íiñ.FY â8ýös[<‰Òø^"1§ë#m¶WPØÍ%•’…~çh ù<0œ!÷©„ó°ñõ 3U ÏÙ»_pûÁÖ܈/Ç"~ÏÛ7>‹!Ô‡³àLmêò­¨æ0‹ñÙ0SE€û=,‘TL°o9¯êÕx7¨B† Õ3³¥ÚÍÔìs!$Ü•¸ïq¢)Ä2sIÞ~±÷à1ª„a„½mYmÖeÏÔª¹ƒbLúÒf£4å"íØZâZÂb€Z°¤·»NÞm¢Jö÷ÇìÐõúóö3ûgìoŒL쫵ÃEAóá C^^3e#-ÑÝÒKPr¤YŸX1v±È¶½jÓAn›ÈÆ™"ëJ|Ã(y˜©ŠŸ†0üt·w¼­ÓÝgðKD,¡ÖAðÆ]¤ÛÚïÔÌqŽ“ê¨(Å@¶Ð츕‹–ýzÒÆ8é‡ÊþF.f«ý`ÓÒ/»GÁÇ¥97ÎdÇÊázÔÑåÙV«ßÿ-‘°(ƒ-¥hñÇi4¨íãƒn¨(pÎoèõä›jpsô*‘wމ¸ñlø[[Ó–PYIÁc¥€Ç'kXÃéê£ÇŠ€=ÇD=êmèýþ¨£˜Ïµ mÐäŒJÙ¿E`,aŒ£ÑQ¦pÄÇàÛ…ìᙿóéõäJ€âéÍ(SExã²õË}ôÂ% X%Öñ'¸ ¯s¯ÖŽà7ì‹! :Örã'~?cŠÞ$Šá¶fªûB˜‚^]6+-·ßqWÄÊ|£¾Z ¶ A;6.¿Ç]ƒ7 ?S ´•N@ò{o×gÄ »Uê²U#õÞOç R»'nbñi“9° g]{sZèµÇòÝná;¦È áÜn^ukŽ¿ÇR{æàêiÆòöj‰RcÞI,ñ¢†1Œ4Ú²»¶átŸa˜"¢7°Â€5ô7næ+¢Ézj¨ñÄãg/JñäûðŒ|ðHˆð âÉíÕšv9ðµ,~Hi™Ow.a¦ÒÙmåDïêŽá¾>\:G‚ù|l)Eˆ lìánË×S̓‚ô.ðP.\I²œ~+EwJ PûÍx¼ãÀÂ+h¶}¯q¨„½ö맜ݻÁÓÁl‹­=ë 9Îó?"9‰üòÙÍ4λ vs“¿-=‡_:½Ñ_ @°[/,_tŽå|j‡‡X*â‚ €ÕbÛb~—!†Dq9Pø*zñL6s;jrÌùy½tŽiò©ÍÕ>ÈP‹Ül’yñP6E/<óì,'a¼¼õ÷|©“‹A6äԼ؄J¿&YH|Æx°{à QÚ1îâ ÄÊRá5£“D±Ó…’Å&íøî•à);R~Íx²xƒ9üÅH¬G)4ÒÓß¹–ÛÑïè ñG]“ŽÛ;vÇ$Þàêÿ^qääáy…é̶_D¿Ÿý%¨ô\'‹g+¶ˆÃWæ<«ìËÓ7þpÐ0Îë\lxXÎä<özÕƒC…[NÃÒªà´üílÖRÁOo¬H'ýEäÔ¾ùÑï0/%9 é †™‚Ç‹§Ø‡£Y†rµ î….哎øe”¥å°=h‘Â}÷ëáâ‰ÿÂ>⩉V¡B˺<õŠ9:Ê'«?dû!wßNñûF¿Ç!|Èn§íÞËU%F“ÁÅ Cí×ÝŒc­?yGX²,´³áÓ9sšô×^Ôµx²"ûçTÝ¥ùÅÝ5ž¸ÈéëgãïÆýÈwbD°vè@ðò<'ƒ_ÆÖÛ;6ÛëîµÑ9v‘£÷‰ð‡XªG8Uíù™n¯Ë¼ÈÁû U~ÏBT1vLíW ©ºk¸!ç%_Æ(KÍÅ®v06ÇsÚ!ú‘íE–‰ç%zNî_fLeù[Æ$ùLu¹à7âñ®4‹-ç—Üæ4—^!³òݸb"§†©ß(q ëµÜ­. ßb=¹¡GY*ÜetŽŽø´í<~™Eâ?ëB€ô°à‡€/·e:{‰~Ž’ôö´“•/ÓÇëŠGž½.«èm¯Ý&“ô»“s˜!Ç"”¶ÎyB¡³õ7yÛ\<Êü>ÅÓ ^ÛM—áßã†pÍfIòÀñîè Ï—N HÇ ÊÉÊâΧ‰p2Uœñ¤ø<÷Œl³ÉXYzVçÃä ü£OÆIð Ý›Ï=Ð34ùmž¼Ã,%?ƒÕ›¹ð´svNyª†3À… ‹›¿ƒE嘕sâ{*Dßýáíqv²Bž:ȉ&þçÂ8[¸¤ÉÜò5ñ E=/((‡=1ðq¾žòá|„Ê9ánôäݯ$H;ìåTgF$¬HçŽû*·–ωï©=·ýž–дSq?ÚBZ|i>{“wçÏfÉÞ’•“_Gï,HÞ©ºøósa_úÑö`,@¶E{n\êz‚«¿çÂh 9Òóè“y’å8oóÿü4I¯'úéÇSØe6Í÷ö4;–êËùÚºOøèÝ–RZ~/éKðˆ m vùl}ô‘ù€§IWyrë³±ÅpO]d¢#ÎÎþË7™\w¶´hójÀ| ˆC¤+hË&oX½¡}_NWHµöÉwz«¾I*s ìÂÑ߃GÜ«×Ðl¼íâºyKTpaßÌ#zD°ÛÝBÉý|˜”ž(â5™{Œ³Õ¶¨Çí©žýÚ·¸Ñ_ºÿý>î\{J¸p˧ûeãÅ)‘׸SZk×2”¸ùØÝì•d¸çð¢ðü-i+Ò¥2~²¸È'§§_ç/0î ¼_œïíKLßú!YS²§:êzßNï2n@Ò±oíx›7‰?’­À£r˜)I0óïÃi„ðl³©+§Oš§•÷wÑŽÇ¿£ðד²°~ø|…ÿÈî7nü®u€i^1` ÎÇe¿‘·¿Ã(C˱²Ëù~ mâ*ã1mC<ùmŒ—ã_ãVóSæpq%¯jdVâoÏL$Ó\ç†àp­úÚ]~|ä%DÈo´Çm ›„lQyëO jË~ %„øë|§í6ÌÄ>»ðö^ˆI{Öí:{ÙA^w²÷¤l(ÕÚÛC8¯ÎDËœ—ù; ðX±ÊðiólIºÂZÀat€&âÝR?’½•× ÷ä@SVùe^üèªVzÈÏÝc‡m%“+ñïÅÅ@ŸÄ‡“Ь„Å®]˜ú9á«Î¯¯Š/N€Ê¼5’ø™¬K&¿0»Ð+оgÏüã7çBöouì^ß™·¿Ö~t4çoŽ¿Ð9C›mìÑ%®·'sØ ™Üñ;p§Æ¿‰2C/¶ÑÛkÿöñÏzµ”½iqê]¸ ÒÈþ/°÷PÙŽo–uò[ÅÝmÿÍÁr4O>Û?µ|MY¿¦í/ûìbÇŠÁ [°u³ªýÁÊ$¦ùœîíOüDl‹å6õêØ–Ÿ¹M þ`xÂÛjmý@:Û²sÏÈ3ñoyskõ;~xÙ¡YÚ{¸>Ýø^füÁø–3zXŸ8›kÑA¸ñ¿ñ‘`ù¡x,6ÌVñ8ÃOÆ‚:í§î›aVږ盟Jý ®ÓÊ Ží¹½V§}íû?>^]aå!ùþëGY*~<ÇiíëS„yíX»ÍZÙ–î—¢‰.èe?å]ï%çûj2{á õöYîz?è’gðâµQ¦ªƒ)x¸+&•6{ÁÝaÙ˼ø”°íͶ²Á/ñbbuŽê<>eªð)ñ÷yó£Ý}º¶g§q–Çší«v2^œBÌyo³Ïj§­NæåqÆ]xW^k‘XxÚìžôfŒ©bIE‚ רîvíúÎ8Ymö2nü:\˜ë¥î¿›ß‹Û÷=ìÆ†ýìÁstŽâ ¼l”©Â³D?â!‰ûkûGfoÈž6ûšúÁÛð¦n»x¾øî*³úÈàE{aÊ(K…û‹ Ä|±=u<agô(k×cÕ‹9öf« (‚»ûÐqbÇö骕;È.±–‹Å}ûúíYYp×g3}Œú[8ñPz}ÀÑÿ}ž ç°y,ÓÞ}T:&²äg«ÂÚÅÄE</ Ÿ‘ÃI¥¿õª£àKq  ÓwÇän6måãKýí‹wгÔy·‡põ’ð|舓ôÊ”Q¦*&|)‚ÖÍçÜkLôÌÉþöÅ»Fˆ².T‹ã‹ûIzPd/éÐc˜­*l Þq+dÍ}í|·:u²Ç}y Ñ ö°Ð¾¸ƒèâ¨È~Òc×A–ĵDGòa/µm„ã DQ8âk^Ζ;ãKó2ȳ­QºÒ‰Å9‘Î’§ôq˜­Â³ÄSªÂŽ»ím`ØåJ?ä®cíMErPxÁ8Sñø@³É„16E êåˆq1,¸aHXºÇú´—éY(®z¸­ôý?'„­uì}\gŒÂ5{]öò~øŽÃ×Ò7[½¿8’³›¯v ÖšýGO˜U²­þ*|Š„ÉñØþnÔe\Ëùqé8Œ0Óâ° Ðåéý£ÙíûÁ6ÌVqbÄóªù mqÝ//üÈǨ!/!ºtä»-‰Ã¼îÚ“›pô©×wÙu”)©*ŒGÈîl5ÅíýÂH=DóñQ?èБEl¦ZÂMx /çž8}OHF™’ã"½õË~®ÕMðhÌǧœoÙHðçrWÌ·o}}žpèe—ïéÈ(SU‰Bp稔sÏv®>ãj¾vz¿³ÛÎ.ß KG™’×½ªg¼¸ö—=ÿ”c>û¯—³2žAˆÂ/îðnœÞš)><ŸS/k ¹¦É_ÜÆn¯cñ£e÷ÑCzî§Ø /‚q¶Š§OÙB"9aKo—%Ö—æí'½+GÊ|tùÁ?ÌVqè…#ª/wÀä–$ÌÈÇ`T˜ŒÑ,ï® Ÿ.úÚó=ÉŸ%ÌH¡ÈëyŽ<LJXRg¯%ËžŠžƒ²$§i<;‘€úZÝÖ~q 0ôx}9L㇜ñ0õ ŽÔ«.rPDÏ>YNN$ž' ‹VÛþ–ÁE>\_~}<í<©ÛvúÅÓÅäÑÏQ¦Š£4žv¨tÂ凕²5na{Vûoï'•‘Žoœ-yQáèİáÔ» sÒ–ƒ |¸¾üþxpz¥œõx¬ëÙq¤ºï|¸Ök vH˜ýô3¤ˆŠä@|’ïA¶ªä;vx 8†Žg•WqQ>=ûg+=ùÏwv…mÐÕoB:Ú7‡Íž9\KÏý<ÒÉ5õëS¹£[> ^g<£}D…u¯,(ü[ElrŽ¿ÄgƳc!ðÖ6—6*s¾z•‚¬ˆõ55ÌTû‚çõ‡Ÿ½á"ÿÐáL»Ìpàƒ'ÜV–‰ƒd@ùoL$1“‰1†”KüFÀ›uÞíëròòÃ*´ÊÑEýãÓq”ÿtw¹zéoˆ$ p*1ÈRq’ƳÞGfX!ü0§/ìè“;¾I©Äôûãl)=ù ðV•5ƒ}¬Kslñòbà€"ÿÍôfzó)¾Åñ•ƒ‹—P"öÀ)ýÄÜžù>æ€ÀÑÌ0[ê÷Óa§Àíd7ûú‰£B‰<‘eªxYñExï:|ZÛ¯~¤ßVe‡6{¨%Ö¨ÁOˆ#¼ý†–¥ƒj _S¨ñ~Bá6~sí÷ÅUñk5^–X<ükVÆç7žÌ1§ød«-Âéúu0…8V¿tNCN‰Z 3%ñe<û@Øu·.ý†$8å|ô(%P{¬÷z_çû)˦Ä-u8yœS~S-*œÖŸŸW……!rô—>ÎTñÎcÀƒRÑÍ68ü¿—´KT˜c">Ía¦$ˆ¡â´ÅúôÖùôË•Šø2GW/P S|ô¿õwjkŠ %”qš4Ê”â¤ó XxF½Æ:oÞfÃK ‹èAp*ŽN^9v²“ÏÒÝ…†—1}§BÌƒš¶‰§];Fvù$*Lq‘“©A–&4·òó;^ç'âAçÆø¦E>›á¯Ûþa³$æ«ŒŠ°äÚR„Â'°¹ÉZJÿL«•H8EX/gt Ÿœ'Ys`;ö¼0LOm ±^Ö@ y)Ý+½_1•cc‰ŠœÏ2•{OÓ‚EKÎgPŒ„%lJßLzê>í§Íì…ô+f0ÎŒLüVÍOÕ¤/ÄJx˜ˆèvm|òÜ”=ÇS/ *KhØdǤÙîýÞ³ç€ê ö…çÃ!ÆŒ¾1t޳%rØ7ÌV¡ÇÇÛVí"8»–fu„–íyéÈ(SŠçB°ü0Œxø(ÏœQä`Ïç(SŠZCLÔŠs´½¨µ×júãF×­ƒBcîLÍ^£%ÙBÿV¿p(.kâS§‹_‚½/Zc«c$÷a·]!gGãyðs°çhu”© ­þ…Žº½±=ŸöÉ$pô`6D…@«—ÛzúÃrºÇú¥§@g‚¼(Bµ"ÂÏÁÏËq¶~_zs¹m#¯†`…†öߨËY{x,a¾|z³%¯>F Žñº.3Zå 9’ðˆ6D™>Çц-v5-ãNMR$Ê×3í¬Û~qû?Ñ­l¯‡!»„õ)ônª|Ñ)hE°íž÷ÂØk`Ë8Ì”FÄ)fµ(Öî²´?˜üV_='H¹Œ„È/‘RŒY?œÄ»cun}¸¶ÆN9®}_)Ø»ÞØñÌ“'‡£ dGYÒó(…žŽf›mý´³˜;RÓ OØŽ³U¼®ø*ua¸&œ ¯hµ—b“#-ˆôqø9Ô­ {c½÷Ò0×bS` Ô5K¡®#¼vÔ®ô›-½1dyºK5KÉZêbHüádw1KŸ¤W$^6×y{Œc-®;&:ÁýI„$Y’X×éü(S9lEÑÀ‚A7¦]]~¡¨$K9´mKzˆñ“ý]]z‘IœÌ¯ƒ`^ ½™>œ¤HŽr ü’ ÅÀ˜›sn,,X½¥/e4Û:1eªŽbìêÁn8·#ƒa¢&5ßò‘3¥Ð<„Á€æ7Jõ—óôÓ§Hr¨ìÐ<„Á æ8Ý›­Ã›.4CÊ¡òË{±«“îuçËêµ ’ÔäøÖ©ù0[/‘qŒ]QÌ»3vÝÎ~AoÎl$¼å3fª"ç?Q0È9±å+HŽ$²Gï!9ßq#ÄÒ‰A•#å@ù ÿƲÞ·Gkóƒ¾Kb#ñ®“óQ¦^åÌb˜ÒbÿܱÏÖ¼&¼žj ³U„Ê!šµÈÃÂóëÊ|Åø{V]Õˆ—¡ò0Sy¦8ïMû¶Û=C×T.Ç"|š1ȶ€È¢ û»”ú]Nå4 äb”mç²e,nÜŸ„v‰WÏ`ç= 8pâšØÅ]ÆvÉ^ÌýŒ3õ²øCïma4?јäzã»j2ÆP‘ÄàݫͷåÛŽ¸›KS½ß33fJ×SZ+È>q™Ænk†äxÝ6çû8Ij9&¤>÷ Qùg¿ªZÒœ*8õ±;¨ÿ„4Õl­~¬$ 9¾ÿGº£n¨IöÙ•\I%0gD?ÌT޹QÈÒ½øÃ_…&‰–6%ñ|ŒÞ¡lÜ(™³ûg–§Ý1¦„áû©Ù£÷Ãëßm¢¹!ì–æQU+r”7>\?dàú—¯™÷9¡d ïk&…ð>s =-ËTÙ\ ó]*eê%?ˆ1ü积Ñ*Gý²éœàIœÿ<×A¶T0 ù“ÛU´X$‹9gpÁ$äPLþͶÇnŸ#ÉbÎ^R„Ä»ÊÁ8xíñ«dxè?ŠÉ [/)BŒâñ$fFž6ÿ­?ˆ”ã¥@ŸÏt”%ÕL~ÓH&¸_­½«É‰‘抒2¸dÒÔÿàØòÛ4%W””áM4 A¼ù'›øæïjŸ{ò–¼è;ífëå½Ç BÇéÛr±®¶¿U†'>Ÿê8[+¥0þcWn®œ÷·ìÞ»µ k{Ùe¤Ïa˜©JÛùIÉ£)Æn ^¼Ô+'µ9e`ÐÓO·lÉî§ó”"§ÍÃK~³ÄŠ+ãfœc-Š/g /éq â!þlÿùû]]üÉYnŽó]Fd)­ü¶üzÛè7ŠG¶øÒ\Ÿ±Â)ÃÕHßU¤Q¦êMšBy{aÛÍ©#vºãíh–›¢}.ýQ–Š,!®3}쾪n/ˆ0ÛûÉ€|ªó†2óŽñ3XÞ½!ASq ùªbz½ëfJ°oýÖ䜊k âjLH/Ð’`/¶V¯Ê™·¤ ýãîÖ»ßݺ›Ÿ9þ|+N–ý¸ÙÂkp¾kœ’êžj³õ&Ä…äòÙõñ}¢YΚS~ãBÜ KõNŠù ÈÈAš¶ô+$5o–œ‡o˜)UËBjä³b°–÷¥'Kš&çôÉ5šÎ@âºpûób÷Z1ÁÑ49§<ÿxõ1R³²²w;ÜMkn+yŠë\Ãlå|ÃK(gžè :5¹ÍÉXK*r…¼?èy_¶ÛS~É’%µáYÉ®–7-”Yf.¦Äš$ÇDú‘ËB„Ù_¼ˆ¿’¨$É’%ý#%Ž™ t®“•—C™­d7f6ÈV%¤Üãd盕êûeæ9³•üÆì(S*š…DÈÒ'J/lL‚> NmJù²-^Æ]„´9Qz!uùGž ß/ÃNÍì%½qþ9ÊÔKb“¤ž×A{×}²$ö9Áñ4v˜­—Ô(¤/öÎl3¢°É}Mî%Åan4ÌT±¶âº1ãí DÜvwõ›wþ-OG‚¤Ko²ä7rUòÄq=w("gK.J†LyìeÃl›©gÂW¢š,18މÅL7‚%˜bá¹BI–jÄ’(‡“-/ìç)*Èù 3£a¦^öRL^n.l'ëG¼ÂIp\†eê%5Šé‹-Ñ–VMö/Ú;gI©À‚œáðí3%‹=Dµ~fºßçÀ—3¦TŽÈšÜ”ßĪh §]®<†”Êûpœ5/ÇŒJÉ@κø{ï תOýàØ§•«oò)äûº1x}R"MæSÂßû0SõbOÉDÍ“¹³(²yIè\ f+¥k(C¾ijyZç$wÏ)ÝXKªþ&~øµ3n\Ö§¤@€$‡L3b¶æ'±®·GÑÌ@º)š!Áúpð$ƶ9¼­]Óöœ„¹¤9ÌÖKúS,Ìߨˆ][ãåÑ9›—,ÌŸë(Sòòc²†§pÑñ´l…@À@Nç¸Ãm°VR¾öx ¬}ê¡Rúo)ßwj35÷™JrZøES5è™ûóâya¤fò’Î=Úè [ö5ç?ýóIC²A'I{ñW¿JDSù”йÞ<ÈR‘Æ\Ͳ·õéíO&ŸÜ´Ï;ª’æÃCoIç˜3UI¸?y4ܳ3—ö NîwA’zR2?T¢NF+akö^¼Œ.$;|‰×cæçe„›™joŠ{[#xÉ_(HÌ×\Ô…›hÿÜúT÷F–!9 ÄÃl½,þ˜¯¡»p³ãö°¹“ÓŸD3$¥óä}”©—\0æk¶n[p a`^¼]¥@9§c28ÎV±ºâÊñ‹‹Þ25<»gZîz;{üðMþIþ\ޱÿ¢¥ /)AôÜ=$ÈÝ\ð`¦|d¯â—” 2nŠÉ‚…çׂ۠ÿ–°%'ˆ/h%flЉãX3¼£@$)«sÉy”©¼™626ϰ±¼×¹W™d@¢YgëÃl½l§˜³}X& ß/E/IÎ븛†™Ri8ä†O{cmem½([€HÎ]ùŸ=P¿wYVö‚(ÑÑ¥áßüÊð¦13ÅSˆäˆ.Þ†$ âíÎR”­K` -$‘m«<-r’†ù*Ü{ì^·ý¿§ß'/$gž.‹þf•hþ;¨'-½¾\)HN<;ó8þð“¿x;Zµ#•Ü-í¹-ÿÁZ`q‚¢‹@7ü³ô²Üc¶ 1(–7ffl! -_Í0S9/Žò ków€‹”»6¥â÷oŠë?×<« HòŸ›H΂ùcfjI•=•›¦¾5‘äõj¥d3Úp£^üªB-$át¹z˜­2ÀÈù$úq¬Þò#ØBrN׫‡ÙªŒ”U¶`·wØþ`ò{ç×}÷+0¬Á“’Of¬£,©²ÒS(븶¿=û ˰&g°<{Û’ƒæ²ìL)q®¢&½m¥ƒžG€Í/ÑéiTLs¡ÐßtÐLqÁ+®É©0³˜ÕZ&2¤åóêïXsÉ|ßAOÊF]¥ß“^]Wàšœ±>Šÿ [õîIé(žìÄÆÀ'lSb“SV¾¤a¦^öNÌFíŵ͉qÓÖk˜…ÚHÆÊÝ3ΖæºiõÀøäî¹FÛ9a†„m ÚÎÉï[UÁOb‹²‚‹k|öQV^’ä×ÑDÈlQX€ž£fköUñ’d¿žì„ÔÖ’Ðã[4[ó£ì&š”ÓßvóQ¨ÿ»Ÿ Çø¤¬^G0ÊÔK²ÓQ{¦ëFûÏ`âå”õaƒlÕ¹@ÊGágÓÁËŒ $)+ßÿ0S*õ‡ÔÖµ~›x±X-׃rîÉé/ŸhÌmí‰ú媤û\ÿÌ{4ÿuµ?$·v–Útt³µ’ ð‘øMí*d‹Wì¶eʈ¥ÉY¬ËýÃl½¬ù˜¡B¤?ÙÞÒžá±÷g0f±|Cãl½ä¿1CE_o{ѶPï^¢˜&'±øF™RÅ?äºü'À›éð6Q%>’{EÈu!ù_¬ ›Ž~˶"ŸœóÇd­²è‹i¶vG¡óI qÿÁ±ô?¦¨ÐéÛùÈD©‘RÉb™þ޲T¯ö˜¡Úóœ|Vrh d#Y¬ëý£LÕ2FJ[19kbôÕeº(%6’Ú:ÜfKXOL à  ´eµö'¡?’6<ô›P{‹9ö‰Õ/±¯Eè$ÝLcFýá,?ü“³n‡=³­vûôÕn—1®¾-Cñ;”օѧB›Äuúrfª^ï)Y‡P¿ÚÀÜ/À+¨MÎç½òa”©œ•£Åc³’¢Ã”ÝÇpej#™ûh[uÞórKÔÏÅÞÕÑü¬K´û}°ï¨yÞY&¹;¾ë8SZr|{9-Fal4õÛÎW% à…!3G!Å È?›ëS‚WåìiZÌÌ-®\Q[ÜrEn•³÷®˜M£âàË·cÃ['2¯’Œû)Êd«}ÍãüÓ?ÒÓinïváîÐ,s5ÉÝÂ/ö]Û¿œþ°oëYÏK•«EööT„$~Þ|ý5 Z À¤/¦ø¨ÿÚbW¢5ÁYùi!7GÇÅ;MW_’ “üÝkFF™ª·fJÍ1]afj>Ÿ>*Li˜¤ï|GãlÕIPÌÍmÑ·õd'† ‘}) Ëù»;èQ¦ªªŽŸ4ß«:N¬®M+ø•QŸhLóR6Á–7`(üÊ(€«>¦ùHûôZ|¸q+üÊ(  SjŽbŒ‰WÁ¯Ó3Õ'«œ½Ói 3•W¼—u„¼,ecè6ß}@æUš»{YÇ0[u&s¯lÃì•¶|©Œ¯’Üo”%-êøÍð½¦cÆq1÷š@_B¼¨#dø(ê°K`ÊK3÷RÀ´/&øHû.ÛÅÍÖÔ+…{ ðž ÌyÊüCbqÈ”¹0Ïò2ª’äÝ‹˜ßÐ0SˆØ1ä¨Å[sœ–Ýþ±æÿÞG¤ön/Nö²A–‡/ÞL…TqŒ³Õb2¿Åo;|€Kú‘þ“¤èç‡{ èç¶Cr>o[+¶(häG ¾!{c¢äÍóµò(¯àbà}IÆw‘ºú:ð;Ú¯]y Ïv°È*ÿ~û¯2ŠA‹¯Z²ß7±ÎE ¥àOÏŠAӛ߬ۗhX!¥àšgþô$#AAùÐÊ×l¥çì³°˜)‹W"2UïèP>¼êa÷S£TQ ‹Ó³a¶Â±ß|mÌ|·°äo'b8ëÝ<߉ĨqJ‰yÊ?‡Xª*•~€W*a›l¬ff:ܦçj{ K}ã„­„í©¨Ï&ûš åÌàÇÓüulÛÌèÝ-bKnA93øéÛæúÃÏ®ïãú>Æ^,.ò*™ZãµLÃL¥•‡·u1 ïóëþ÷B>#­^[ü<Ûü^fá—Jt¼Þi˜­š%\óa‡ÙŒÃkçìÝaf¤ãà{”)­w äÇü‚€³µ}"Kø[áÐL‡¼Þ){ ;Ö Lñ€Ë4Tá7E?˜|Á+ëç~-xACuö‰ðdÙŸz§€kl%/‡¯äuRÂLDÇôa–ZT…¶ñ†«ì†^wD":¨dòê‹Î¹f•qfêã<ÃlÕ¹câ<öZ/”Æ-y¨BNaAŽÇ‡ÙÊîÇK»q‚nsw Y‚C\íIìHX` \UèÓ{úšhñ¡­Pd’t\PÛ³za·#<­¶„5E²ä£s¾Ë }"ˆhéÃѰø“ãî»\ °à'ß÷¼qûô}qm2ëôö*èÓ˵2ÊdÄý:ΖÀ¦ˆ–Ps±3>.Á€@ðS›ø¾úp¶ ¾y‹¯˘XÆ0Se pz>75¶xål!EÇùÊfKALã>Ï«Ý.d¿½FD0tNõþÁ±"j²aWBØÆlYC~EÇ‚£²ÆÙR’•–¥oç]‘0³t ’2Îó§âZu}\`V¨ãPs{ÀîO”og®Åu™•1’Ö[WE^=Pî̵xlÄCuF3ï kÿýtø±qŸ¢Î.Ea´cJF\«ÝŸ0ó”åwWrèvgÞÃL‰Š<ÎRµ³èì9Mä, Á3³ûò {ˆ2)ððfp-œí©ýbÉÜÏú§v'š/„@ÍÕž¾`2·Î˜Í ÿF™z!o‘$¯ÙmÒåBZ¦Ø:ó3’·q¶´<ï— aלLoŸK'•R e#‰´Êòðs{v <:J-D‹9V¤U–cµkºi«ÜÉ”Zˆ—dôa¿Ïåäð¿7rQÊå¼»íòõé1 Zh•Þ2UîŒD¢>¼x•ñ6Ÿ¼ÐgAU|‹ƒ Õø*"&8²ÃBïã{)ŸBeÁPÜÃLUw_\åòݶÂɯ„;g åÕ ·ç çd,ö®›»ËsáΉM?ðñ—i¡rïĵæó1ùÏ-pqæ^Ü\‘i`,ØOîè \œ¹W„Ãßj–/†B1Ëå50·_àXàÝŒª\õe*m˜ö°€¯"`6¼y;¹Ý$ÍJÊw3„"mgë_E„cDì®dbÖ‘ñ­@(¾ýa¦_EX…Ú½ƒÌɦ%?Ó§ žÅa•Ñ»ŽÄ–²Í˜àM”WxÑDdU@'ڜ̒/v¼Â³<Å ç†$;¢-$‘×s’äú¾À¡,%Ù/_.—·V ¾TÅ=1ÊÒ÷ÕýlˆÈ—>œxŒ‘Ó݉S¦²Ê ¼¶o˜­2Mx Cåfã×mßÞPPÙŒ ¸BÆÙÒ⾡àÄ&úè}ï7±(âÍ êX*‚£öÝíþ?›e£¥­­ç²p3Ÿ~æ(\"–gK’Æ¡P™ˆËBÚsØ|.eÁ§3¨òе¡0–he¦»óÞ·ŠOgNõR˜¹l\ÈÜ`™* Zrˆ<ÊTÎÂÀ^À3ï¼çµÈô3£'ЇZfd®™¸¼ÈëÍ…C WòŠÇa¶j™t ܼm×üò­ „ªÄ%5Ê’ìÿÈž8sÏÞÓâ›_)µÀ© ¨‹Œ#·°ÚIn—(#+YÎ@‰(jœ­E…å̶¿Ï~iÛjEˆŸÝ¹WdSu]eàN¾¾à0ö™òtA¿…M‘DîôáTt:•¹O´ú-lꥰ2r s-âÃÆŸ’WÄ:³"B¦q¶„2E¦„úžÍ#¥‰¢|A¿…;½x–Ä…/ñ™Øœ¢EÔؽéÄÆ~ê<çµßc$`9ã3¯d©¤Žyîq3NXϾUÎÍëEǪùAbcèïÆxœÃníóñ”…Ÿ¼³¥ž¿  êÁdÅç­ÏDT…XùGvï^"14œvË(§ÕÓ‚µgÎÆtgœ­2“ÍiÎ?÷âBÚ…´¹Š3È’Ž~y<úÌ´m=|f°{avÿÀó‘¡U³K²-¥=*kÊØ³‘Î3Uð¹°ÍôrÑ=lùñaC™W’ ’¿ ìJByœ§Ó zVjF,B€0;¯ <Õ¨75ºu'î(„€Œì^ŠQ#BC\4[ÃK”•Þ f£[gK]ÄqæYf\’ÜÁÆÙ …‰Ý›ü—Ú‡SíVGÜœ³¢_8› ¥ÃlÕYH¢hO±ã²kU|æfBø³qY 2$~%²8oùƒZûý¼Y Ȱîh.Â3Û±ò¶÷7¤ü^ÑÜ8[Z+@jE‘ùà!Lg_‰ÙgXÇ›!ïVœW¿ ´`ö‚áÜä*ÐÈÅP»9ñæ»:›V8»°3R·q¶ÄDÈf9fKÜÄq3«"{ápo€>`1¯Ü¼oþ+˜]ÐY(ý¿›ú=«~v„cX ;B7;ýûµ/™³ @ãºgK}H®ÑG„ý?õý/¼^²ï½ÈÜ0‘ÚbûÆë»…±g.G¢7Ζ½ðP_й.x Þƒ ¸^ Ÿ×—‚‡úÒ‹Zér³·»ñBùÌÖü§}5s]»×ò¤^=#¶ŠYüí ˜G¨î+~ -aí6Ú¦Ÿ.>ž¥KBßH:©šd-àígîöí­*ˆ|Ĉ^¿Š«á¬Î8Ô¯>d]H£—ÂŽ2•ÝÃêµ°#~ž"ãŸÉ=Ê×#kôZØ1†jø˜ø ²ç9§^ã,d]"wð8[U5ìltõIßk&Ó'ÉÇ !ˆ¯õ‹Ï›¯0}¦ˆ"4hÔ¼­ÍןíþUV¬*¦ÏñÊ'¶‡Ò-ïàík´ ëÿy%ë(S ¬^ÉÐXì”Ã,:Æ‚­gü畬ÃlÕà0¢=x3OåÏ¥p.øk`œ--eýÅ„¨dÝy2Û¡÷T²F/(‘ a{6ëÉ4•cý·vH-°u’¨>’ú2ÈH ^˜NÜþdâÒYíV2;zžÛt•$’A޳% 2GTôb);VDBP*Iô?œ³h¯=Vô0úðÆ®¤ ¢¾vwú…oÿ·bþB ¹é‡™¾q"6<ŠÝn‹ç}Hp2qü‡7‰ðÃ[Pp”ôF":,f«ŒÚ)`ƒm½ž»_÷Xˆ rQ³%0 REs'æÕí…Mžôª.‘¹ã?(c¶i—™ p_¼Z±2,$efJ)c\“¶l•hª—Y´~edÛž<öˆ€ÇÒµD¨h«¬yýõâóÙô®‚G\*šwÙfZo¯¬ßíµŒDݵHiq €¨^é!¦í™/•…2R$‹gëFF\h âÆkG”@*K }i ²$ ò‡;¢NØ%!êÒƒh™M¾“ÈÄ íÐh¯z3ÿf3¼¾?K Â)FŒ³%#‚GÔ ŸL@úÓJžÈp’$#‚ǯ:7Úâ0ÅBžÈpòA‘ ¢Rwò?Y}¬ª¹ïÇÙ’}Á£mü–©3vwQ(™M¾è‰öBÝߦÓBGÈœ€q˜)óLËŸþùÆHñúw&Å-»ýõ‹ŠI!—Ò8[²í#g° ­¼³(ÆvôZLÝt%Ì“ïÕÙg¤«` •ž`˜©Oá¨-}ãÎ4ºåw‡gÍ@ªãña¶Jôšà(¼÷ÉÕNÒééæJªA¨Ü±ãléŽM¬Õ ž';©§ó¿ç޲$AdËgóôp¬}\¦d6«FòÙþêÕy•¡cQ_ûù,6ì’=BGÉUÇÙø)*€ýIfj0žKDT– Z€ýrT0Ç(SÏl ‘%³ÖNVÿüމìÓbP"¶'/´KšˆàQrÕa¦²_KÕòÏ Àí÷ï>s¢@V‰´¾úŠŒ>AÀW{ÕèoóÝœÅÅ£.ä³UÃÄ>Ñôr}‡‡%ª¶¥N3ÎVá,IE…÷ÉÌeêÙj7™¶VkåÃBZ¿3޶Á"ö×úü÷JɉjÏ?…›ˆV?R6û“¶î(ã«ü"ø•fœ­ÜÆEoƒBâI ÚÔV 峿?Ê-Wä´¶ŒÛùŒèhê9™ÈDÊré½"§5Ìv¢Õ¶–çF® E”ÛWX.9pÕ<Î|òÀ]9VX•D_‰mÇØf -Ræ‹Þý¹H"¡¸ï~+aU{IÖå£hg—lDÖÉè•Ðvœ­šÆ$´j¾Æž&áÍP¾Ðu¿v™q”-õ[ Õú9759²‘H`®×E…ˆiÈÍ)ÐÓÎÿ•ÞFÀŠ OÖÜÆá÷樔!,éí0SBo#«Eùj}·ëR¼KAŸÌs‰Ø"«5'al´5ûäY|2ÏõWºáYnÏ,óÈW‘oc0š”¼À/ 5‚`é+†™†Q-¦Çð›Ú¿Å2™BôÉ8÷î"âÕ‡¿^óŸ§É±j„ÀÒ[ 3UŠE¼ŠIB¬@GâçCVjå²d¨p‘Õ¢Žœ5LFÖß _¿ªOæ¹ÿ ·‘¯zó†ºX‡~&3XÒÛq¶ôI$Z‹Å€òµö$NôTÖ¢[í‰oÁæƒa™³ Tä¾ë_›ãžÂò€^-ˆ<δ¸'¿ãQeÁ³»ãl‰Sˆåƒ¡ÚžÀsƒ¹h<õþCЉø îrw¶9‡QYF-q¶j¦“ø+VÃŽ{º¿‡¼3‚h醙¶I.ŠËá ÛÛû½¾¢óDÚûw~ý°AqÂÖžýâ)fÑÒ)Œ³UÀ݈rQ_Î(j²Å@É_dÁ½%Ý,×Þ][UmS5Ó†ªð^†h1 äGaέp„´¨#Ÿ|5Ì®MrN¹DÀãl ŽÀ×üÅ‚ŠZ<ø¡ÒP†ÂoJPd´¨Ç5i~~F%AG8.ð8[/82Zt%lœˆøõªèÇå2gKhFd¨ÀÎÚ­Ab¢wPu(ów¸ñ/†‡Ýö4ÛžÞúE"ê"&\g«ð%¥Áöf§ }(ãæwª“À®¡^wd?§“¬»·z+Bj‡3g‘)üŃhK¼[€Ä(Å¿|O®ØbVÉt5!1fl²4È—x9S*‰T¾eÚ¿6FM¬àgO^žÅ,Þ]ºúépé*±è_Õ~ýº›B‚J¼š {˜)Ýk£•œÃ§ËYTAø}÷=pýéŸéŽ,ÚV×Â+Xí䙹ºTÛʼš¤{œ­ÒYô‡sÊn×]ˆÒ q+ój’îq¶ ÒÁ¶7%˜¦:Ù|¼? LÐ÷?ptÆælfAY«kT¢Z T&Žg«ÀÑqië6ØÎ“ÌQ·ië ¨£îħKÙ³wÇ™®Ó›³¸¦xšè0¢g ÃnÅNk8D]ËxºKi©Iᇣ¯`vâׂ˜ez˜A†„HGþ ° Ù'd+¤µÌ¨ÿá_"3Æ<L•û9DS®Lÿ2ÎVM¤3¶¥°‘PYìÂnzÕÄ+Ó¿Œ³U ¦ÀŸÑ¤pú©°{„Xhk‰Q—±§5K¦p–ÿjzÓB¿Å–•Ö’üVz®N±#hÆä$Œ,:¿¸RÃ2‹&ÅfJœKdÖhRÀО£Ϫø•¹6½KdÖÆ¹f›­ÕÇâWæÚ¾ r£BÍ ØÎØ!D¬I·M„=ÆŽ8—H« 'øÄìéZœ0!Úÿà×1£VüúÏKŸT/’•PhâëQ–jz3ôΓ+ó‰,T¯Í5ÌTáV"¯ÆAs“£´îZûA¤/AÚï;!æKÏØ²~©a!E †æög«ØWc=¸8Ú„kŸ¢l Òv°ø‹À/¾ž{æy*w¤&¦ýáUVì9"æ%Ä{ûo;°o±D ѶÛÛÁ>3$çE/*`eVMÊ=Ê’0îH´Qd:£IÝè\Ÿž”¥0¡ÞïŽ#ahtìö¦Ñj|°§Dô«LªÉ¸‡™zAÜ‘Bc˜R’g_Ö¯2¥æ*bFwJBÙhYXxØl…ƒBe°Œ»ÿÁ·#þpžÆÿÿ¬ýK²ìÈ®¤ ös1‚(¾Ó¨;„ÛÈNxç¯ù‹”AFw@ÁÕùMRr{ö‰XN'A@?l[¼ÒWt%*5õíq± µ*ªÙZ@Á5}£{AÁ²à]2¯(f[:X D"2Wl*Ër÷£ÿ$eÙj¼[a° Š J`•ˆÏLÃBIˆµuçû†OxÎ" ì•eìwÊ•”埱køÆWV%ê3uëq±Ê*+Ëøþ7š»_·€UY}¦n=.–p®¨RñÂ3õÅ¿ù%»ú1AY†ã¼íYu_Å«²úLÝz\¬zŒáG¦öœMõjbó­Ê¾DÊþC¸ŽÒrûío;“Ö~ûå›[þ‚še¦W¼1E~¦p=.–×Q§v·“íd×aâ% N´ì's%mÙÔæö¡Û·A æáf¢?“¸‹%ÊuÔ©?Üð„?idž(ƒ-»‡.ö;EqÙÚ,Ö†^éGr&ú3s×°P¢\GÚÚs ½í*øvrEÊþ#)Fiã áÚ¹¸á[‘œÈÏLŠãb‰ õ¬Ü=ø.¶ï‰Æò®‰'ÕÚÞ”íwGñ]~8‹29Q¶™ÇŪGE¾ 8&EV¦Àsæ×_ñ½¤’ÿ‘£jm:¶ßɦ§ãU"'Â6Sâ°PªˆÇ[Þž ?”ÉÞ‘<4ÞA¾.jy®¶g¼T­¢îô-’m?v*(ä0‰ä_Lت]Ì—í-‹A±³ýÐ!´?Ë¿€} vL8.–$Û¨ÕCfxù¬ž!Tp˜õü.ßÿ&a‚ÈÛ/{²ºît†—9ŸñÔïÇÅ’D{ìbÀ|Ùnncî=) aõ‹LÛ²n¬(²c€Ågí¨YÞXô©O \¬ZiK";^¾34 [ áydÎ':<üa¡ªQ˜ ×cÆ’ö].± 1Eÿ]fJ¸ïÃU^ŽÎô„ò‰ZN>8,”^…$«c¦d³ÕŸ»MÞrÛ®p=•Þ]àªìâ‹ßksŒêlI‹·¿t¹Ðv>~¡4PôúþéÒ0JÑ?<ÑÍå1Ê Šñ‚ÌNq~HÑæ£ïR¾ŒöÉßL–Q ¨õïY# è˜98ýj´ˆê|TÐíAŸ1e™Øß ï²Êî4yX¬BŸj<Þ"§§ú½k#$0+öe™:o¼ÉMQ¢’³œL?}}…-üSùÚ<º›” U¤u߉|OáX¨B÷*?æWN ÓsˆYÁ3 (E¼¨à[ªi ÉÇÜ:Ôô f•ßï-Àþ€À »›ò²^¬Û/÷©H/kóÌ6£"I‰ ¾).í»°ó-­×º}qµàÁ¬ò¿; ’쎡ƒ•ycò#aêeažéfP  °c;Þ`Бó ºË*üÈ8ÕÄJê-Ÿ,7¿;^Ï'…f9ÿ]¼Oò:æ´Qî¶'sñž¤ÀvY‚çs?.V5±ò+Õã+ÇBK\¡JEÎ/Ÿû ÕÿÝÿâ Olí ¶—Õ|ÿïÅû]ÏÙAk·”Ís ±eeáa™ljOT$‘ñƒhïî`k¼¦ýöTÉžûp¼(´Ci½íVFºšßljO7.V­X%¡ýÃÃ1Ôm«·}]€r¢Ä³Ô¨ yQ°‡dâ=¾ûÂЂÿ*á'‘Ý^ç‰Ý‰”EljO7&N5¤„z» zÿÑZ<æ?Áz¢å¿«TI[7µ½…Y,AÍ“¿o[ŸoŸ¥Ïç‹üŽk0.”èöQ¥·û«=¤~ ü~™uü¯juuSÚ½n÷$=ûJ ³òNÍ~T$‘¢>oyª×¶¿€º§ðÇ,áÿAƒ¦ŽÁ–žG–>ç$ÈPtw öÃBå‹äy««Ý‰5í~pX%V& ¿¸õÙfICZ®´· õe‘"»S°K:¡(Ï÷]œ+/ÃB‹rÍ,áÿ!ØGIÝrWïþÖ›•E#Ev§`?.V5~ôyçÚ7ÞÞ› —üW‚ͬáÿ!ÙUÝÏœméjõZ@Y¤(ï”쇅*$ûxÓ˜°ŒUÌä£!¹Ö"~™h£@5bHŸ;þ“ÜI©ˆø_Í>ªê– çË÷R=:»`NQÞ©Ù‹••¸½˜ß£lBNŽ2>6í¼0¼¯ªŠÎï÷u±·.©ï˜Ø™™X{Ušõy*û£"‰®U|léÀ,W»¾º@ª¢ô¿&䬼Ý4çõï³â Y§®?.–$ä(DÁ“Î}Ev&6#ÂTE¬úU/ÿ–̼xD¾TšuƒAªy¢_8€q¢ ø´ÏÔ ššÂ¸ Jú–áVlÜo·²¯Þ+hVý‰ †…*pA„úÙ²´Ë0y¿/ÌSÂ7]Î'[Øewë&Ίƒ’xú>#• föt9.–@Šˆ$ )Ÿ¼¹[5ω¿zflÑg:tý—%˜´yb¾æ¾^Z@¥ð&ÂQ‘$F0a‰pE&Åçç§-g†$ˆ0W´?ʫώ%L©È‰p\¬V$œðùÙ¾g pg¾§âÀ|5,T5¡ÀF”vŒõYùB5E‘gfï@!iþ2GT÷óí»Æ•Tf.À”5,T‘²">À(ÑÉ<½ôrTÌ3#†2-D|€¢Ê¥Úí :UÐLA þ{çÉ¢¨úCZØ4´›µÆÂFE ñõ>·Ïï›  ¦)ˆá=-DÕß4aËd?¢zÁ!3 P¨–°ûàž¨u=ve‚ X¨‹U¤„È0]tBI³ÅO ßg ~ 4ož„nf®ÅJ‡ÝµÚûÙ£ Il'½¨bqTÁ^;(${=VQÏHFÿ¤‘G bß\n[¼SV)Ì‚´c\¬j¼)À Œ7ÁuН‚[Õ| )qG 3áþâÉ?#sO! þk磟#•Àš×Ó¶ö–¢Â•‰\x %È#KKËÆOgm€oLò™)È;çLTG'Ÿ†sëµË .ˆ<†…*= K|ȳ'Ÿ¿õ96¥™]x TÍ/¾a¯'Úöål3wzUP43wâñå>\un9Ô 3³ 1q •/ò »d…vúQ M $¿Eù,$ ñù?ÛÄgÿX¦‰uú6ñØ÷ö'—ÏhfhAÚ1,”d¬À6LJÞ6êk›7e²Møã ;€fõ‰k7Í[ÍЂ¯¸Q‘$AE´“7_Ãí¥Á˦”6Ów$›x„«ÆX÷c\÷Ÿÿ*®*È‚¬cX(a‘lXÉobª}Ç-é3I«È+ôƒ5S2±>dæ—p\Ìå²Ò·§ÇÈ,pë­<'¥ý^°+Xƒ rX¨\5%úñá·ËÅKEû0b$ï8$ {_î3iÕsK„»4˜ ÇÄ©§¹~ |œx·ã®9Ý'ŒX¸ÈH8’ c;~_ZçŠÅ‚ìfšÁ,9.–rô@˜´gkËv¾*©\;=Ê&Í"uf0Rb=Lq-ë¡ZÂÁ[ØžDjô*ãgöõ.¶FÎôáZ¬ÅÚüäË‚Õ ‹"ÄjY&_ºkеO/ËPÝ?G´e¹â88Íb’ßòbüõìŠHÊnJœ#€Rn}–nD´¨Y×0Õôã/ØÂðã%f¾9«7@Ø×¤+²(˃êý–õ\ÔWª¯¼Š¤k\¬‚tÅBïAôÛRC]·7¶÷‹À8±¯uE°…ÑJv¡óÝG+Õ~Ø×—tEõaÅ€½;WO¨âȼŠÏ˨H’…#Ø2AŒË"Ú'¾úh¥ø2ûúÃnZ{úÓ‹i@¨_2£"‰Ù À-¬|›±åÐv¯ÜW"î`த|\—§L~N¡˜2²"ìIPW[–X©êàøD±döõéŠ<ÊMwPZDodaÈÌŠ¬k\¬z¦ò˶àhYqï÷£3 sA¦_°®ˆ£ ÿ»ÅÖyÄÿjÈÈŠ°k\,¹ ™maðñ¶4Ú.D?²T=¿¾´ëIYî2ŽˆYaûŒ­H»ÚÙÖ‡û^ÑäÌ»Ïuª ò¯WÂ& …a‹ËÊOîRvŸ˜eÿa‘R…o©‹÷—»æ´fOü »V{ÒÚeÚ˜Ô+ ˆì ˜•ÝvëAál_Ÿ)ÅŒ÷…kˆ %É2à/kœ|ñßÔ®O—W£€ 2jš³g®Ë·þ§nøˆEle¯í}¦Þµ^bª ¡-¦ÓAª¡È_´…¡ÈÕsÈê«ÏÄJ ô«ß+ ²"k²ð€ýÏ8 +Ýÿ b.ªŠ ØÊnÄVîZñ4/þn'’­o&ýMÆžÚ•Zê§½$ø/0ŠYt@˜#­² ÚÙ_÷ì‹ A&Z~óddL˜^\Óº‡³€þ‚¡˜A‡…’äx•Ér­0â+db^ù2Óú#9Ì":Ó ¹»^/Ì_Isã¨H∢)Ú”ÓþU¸nÈîÑU Q§Û˜ÂÎmê€ò|aWÔ&GEªF+âÂ;jÇ>Mw§ü‚ý3s‰?¼’í%=ûWw¹Óû z%,åj`©Éw (®rEÎ?,T‘#à²{q›™Hì˜Ir~%ÿ‚}9¿•å†çw¨ª-Þd*œ róáN´>áüùGD‘ ›‘‘× <ÊüÊùu\ñe¯ç£Œýò/WÆøŒïœ¯.@½02%ÃB \‹(íÑÅÚ8»{*Cm þˆ¿0ȹsE¹yü˜×2«Ï„ŒÏݨH™xX¢iÏ6ÚËð‘Æ‹ñƒÄÍ0¹Ðþ¯°† ž€z!kdr£"UC£¿ah”£(í!.RäŸÝ;6‹` ý† íöR‰þ.ð‹Øl\¬›EH†facÔ‚»ÛNX¾€´|o;ëŠ4êó¶yä˜ÛÓ†äÚ Lûµæ`RbEÖ5.–°®@¶08q]À´¹k¤ðdúå×÷êýs÷b6eM—}€Œqz®:„^‘{ ‹”^Å@¥›Qu2ŸÇ„S`€\–Mâ5âSá7Èì=&2…®þvYxöwWáÈôŠ Á¸X…„)z®'¢MÓÚ;,õ3 c;cëóÎGáù^ªr¬Ó±H°ì¦í iWÏV•!S.â±q± òA0ùÆ5¿vÔÓªx!†½XL¸X¨u±'²æŸ_Æá¡ ?ÄaâO¨Ø—õE-Ýá´Óéêæ50d>F°6,T5hûËÒ0g»°$Ÿþ½Ê ‘yÛ+]Ëüˈ؎¯[žõ®²òdFÆ,>.VñhÆÇΔå…ÇAÚÝ@ïMÉx€Ê²ð¶ZYŽ8͆•Az¦!I ܾ|-00KåÖ…ØœNƒ OCÆdäk£" _‹4ÍôµkÂh_•Šï!ó¶×~8#°ß ÜöKùMbVȘÌ=£B _‹8Í÷3¶gc»û!³…éAÛ‡ÁpO­<p›ÙLN… ʘÂGE*xj°,b{®ãU3Cbnï„-10+,læÈKYž—˜=BɘcGEªFua&uñ®o—àêûº³£!ó¶wº–ø—å­ ¬¥Àß•š”‘Ѿ0.VA×"K3IŸzÃ:ÞeÙu ´í›#î‹›%ÚŸ0U‰Y@0™»r†…ZÍÃÖG‹X¯DÖQîÌÍ’eË-/àÊÌ~šØ2—ën„bÑs„e™ÝÉ€l7•8±d Æ|9,”äˈԬQåãÕ®ÂÁ2®²($ìV€“Ão¿Âìö³5!èõÛ[ýŸÿ [ 2úFEĉFoÑm #¾ì';„ºQ‰z F¾NþNv´í3dQA¶ˆÂì¯v§cKaËÜ›³h>\FÎ6.V5¤Èšéò–Ùÿõ³³‘!±·Š´¹m!Ò0K†3¶ô·gÀ÷Ëæ!fDmãb‰550_”hí*l¬% C†oìíc'ÿyúB¡ÿq à™Ñ´cã#>m¾‘I] °±‰GRj„pnan_x»&«î«ÕA@]¿Þ(9ø³ojô ƒˆ—w,md‹‚6¢¹a¡ÍE‡mm«=Xí*,¾äV Bë^sjh ¼¼­-ÉPØ‚@6¹a¡„ÎEç]Nkhq˜ /CâuйˆÐðú^€I¬Räù‹…» a6&ía¡ª!Ü@ã0„‹*³UÄÈ …U!»®wä-Â1ëw ñµ|6;Ô)ü‰Ÿ¼ŠTp·HÙ,ÁµÜê2»¸Å’KA)Ü—¹ý°1Ceíѯtl.L©Á à3"·A„¸E¾î¥½}ÈÉÝIêRçW0³%*†IØ™Û>¬Ö‚ ÎH܆…¹9ð5Ë{í*îv«_~`[åRH î=í%*†YØ=œNoÅ[È߸ÃBúfT3­¯Ù!šâ2¸qPŒ ¢xV Q]Äit"B±­"'÷}‹¿ #7&¾Q‘ªYÜ_0‡Y\Șí"\£ £‚À»Z3L ¡•u[{:Ù¸ÑÚßþíÛÆg‘KúDC|Å€ ÔYGÔž” À}s'‘Zæ1§Ž‹U`Àý0À q°]ã“Y«°7d.込ФØpÝ,Æõ Ô2µ;>ç`òl‘íÐ'$Ïd_Hþ†ž‚úm¦H0c4êxQJœmß3æéì8ñÿŒoïS í{`,y_D`igƒÚÓ¾¾Ãמ 5ýÞ8nÏýà¢_Ìh‚HëöʆôÓ‹Í"ƒH*+câÈ›"pH¨~»ß¹‡KŸbÖTYeH2":İòEgD«aù ¨ÇB𢻢†Å*èGÄ÷Ì[+}ùØ2¶÷Ý„êÈžG8.V5Åü þ`°\,ãBÐE£æƒ~!Œ-Ý(5Ÿ·å)(ž<Û´‚{áë‹Oaîˆ?Ac€¾¦‰Ðk~VMdSF†£"UƒÒ+bRºem»¿½ÂÝ‘Ñã¤1²@³_xm>Ÿë¿Ü¡~ŒŒ ‡…*äð@í^ã¶û9ËYÝ <~1c$ÖC·ï¥ý }bšÈ°”qT$IÆb–é´û˜5¬¸/„'úµ|&P~g™àÃòê0«/cRÓ„@@âÃq±„FZèvýÙî„óò VBÿˆòᾚüÝûm¤¾‰ GE€q!š¡¨m¶bàòæì¿ÈD17³?d0²;{ñ›†¼²Âô}Ù5‘éûüQ‘êaæ/ôaæcá5à¿0`$NøŽ¸³Nɼ¨6ýuM(Üc«ÈƒP9pW´ëpxå* …¬5ŸFzuèŸ|_,éÞÃNhÚ]F'…z+2àcµ5,”ÀÁˆ1œÍ·]™ÝËWu`.ìW½ïœìº]æwsZ~“\ášÈŒpp\,¡ƒ‘Z´ïÙ.L? ²`d^X<)­$&—ÏîÀöŒŸ®Øú©ÁÙ9!ŒsX(I™‘¢‘ÂqO¸ ‹Ï5g†ðBvR¿}ØÒŒ‚e9¾û¼ô8¢„øìý}\zíuµsk¹˜&2­Ž‹UM6^è£Í|îtq«C"Ï(êÂ:ûÁ…ëÏú³‚3Fˆ%8(ÚÓC¹VO½ éʪ ?\0‰wqï:4wüRÆH- .¬LÜàý©ž‰L )t %5òEìRµK‹OÌò¥ð_dérCi?ût`‚X€z2'ÂÃo­Ž‰Ì ™L‡…*PP”KaÇb$¼ng¾¸3Ê‚j™¦#º´†‘þl–^_Çžm‚7_ÓtŽ˜‹>ž§sg*Í^ …’Ì€ãb<3ÒKtP7kYë¤öÞAEk‡Î?xf„Ž( Òf Ë#§Ä!`’Ds\¬jÖ: LT s³é»ÈÄ…»#cÎÊÌÁ^*²AËŒ3 –-®ý ªìÉ|Hð8,T5EýK1D½ó±ò”¤<Ø;C~óld~Fw, :æy󗛺3"d–H(]dr ·•¿íÓ^~t–ú<„Ûù•<»?ص¹_˜fm#Ï©ÿ1!¨5C€M£" ©‹`ÂÙjŸ©}þ“£º<2»{Ï‚‰¥a”õæ)àÛÎ3‰ k†ð6’ºq± ©3 ›®z·ngJR6RŸG?™® T_ÑÂ>oáÐùÛü`âmn4Aãe: W¥«ƒ#19¼a¡ª)í€î><Ùý°wòá+ü 7HÆ{À¼ÀÛ¬Ú&¤x«g_Š..‹Ìäó†…ª¦´ºó±~<8>“ ~ {î÷Û¡g»Žv¨ª{KJ³½nl,ÈìC S›º)¢ã³ðÀX5sLTÐ8¡-n^™xau­L™ÒŽ1.VÁ#bÄLùM _•C#SÈ/tŒd³ß3 mu Ý™RHR{¤‡&r§>/' _F"Œ(B‰áOwa$ä×÷ÉܬAWÞðê¥ÈX@q\,A… ¢…:ß.Ã꺰º2¾+Ø æÁèãgI,3ßK…•B€Qá¸XÕØw ƒûÿÛLå©×êËzèÔv幇ækEöK·Â(ôKdÎJŽãàLz † !wLîcâTÃÛ¿xÃÛ%Þ¾¿ª°mdø‡I#09ƒtíõ` Á,×ÝôUˆÓB¸=ÃBé2ò=̯¢Ü¬±\ŸùÕèÚÈ ð‹ü"–³,Çsýp q…Ia¶HèŽÌoX(~ña®j³ôÓ>³—Õ…)#cÀwFÂrŸŸŒ?F !wd~ÃB ò‹€Ï&½ØîÖ•Êl‘ເ°œÝX[9;ÓÊÇ Å !èŽùr\¬jÊ:P>¸#oÞ ûÍSY+·E"Œ}^&NÛOï78ç )G~϶ÿúä[éà^01[$CÆ_P1‚?lÇš¨¹¶¿{=ë±¢E"ÃAZ+FEªg¸"†¸7+JÛîÇK‹ÙB0ãT1r?K¦ÖØ+¢éÀ²b‘P4H¨8,TÁ#BÄ ÷m]k» §?`j·È˜ñIµ‘èaËɱ¶.W& ¡~|Í %âv„ƒî=ðÕsZUí™vw¸ÏvYDœ‡ê•'¬ì7¬©EBˆí°P’h#´öÔ;µ¡å‚…›Åm!ðÐQ^€ò.ß´ÿ &ÿª’(FægµuøßߦNŠŒùKŒŠ$81ÂC rÜõÛÝWæž Œ„d‰Y>[\±Ý>ÿü#ôƒA“'w‚Z1ïe¿…pAþÃB@1âCŸß^ñþÞÜ£W˜7b¬’ª£Âˆó0žƒ3z[&¼úò 5\däGV8.V‘Vè æ·/¼Hö­/ãFâ‡ßŒ‰ÞÖ`BíO6‡×â£èÇŒ:,”dÔ±…v³ Ö>ñêSRêÈ€Ø/e1œ©Þ‡ÇÄöDØ-ÙF!äÌp\,!{‘ãa5ãÄjµ]h¶ÐjÉÖWPœNö"{ƒÂ{qi­Íཥ>ŠÌç(‹•sf¢xè’/*߸T˜22éûƒëEò†7¸o¼Ÿ:0-|™Î‘ë‹U$ÄHñ0£½Pï±s þù¯²\è{Åz‰¼aûàŽŽc¶=¶*’P·ÈØ —ÍTOͧærY²X†ë‘Žê90r±Ï3Š_|!ž_„°3æÀq± …3ê™ D¬¶í™Ë ‘<ß±[cvoÙÄúãÙ·ù©éAàíãb2r„l–g×ӷͯrá È .¿i~€ÚöB»ãJ‚)…t4ˆë!£1¾¾ÇÄ)’_àg¸Í€ ÚèÃHâÄÆÐV0ò§?³ÙÏìL°ëvÐ=Û=göèÛ*>»ÀjzÈÆˆž ÇÅZ¬Bÿ9ï#Ô´°uF9ž!Ú/fºë8»rFdÔGF8,TÁ#Äd+Õz{upRöX3üf÷HñPfC> pÓ7­Ö!}^²‹%0"AËð+&¿·û9S¹pZdlèŠii¡üó©§ì'z”ö‹€û0‚íËëž×¶z( Ò}1,”t¢Ÿ½YÛm3Ÿ”Â!tñµ#È´Ï÷ta¶óÙɨ ‚D‰ÃB¯€È aàÄîÖÍä·ވCØâ´/"¹Ï#å_ýwÍîˆÌìû*X_${£Æ­¾Ýëí=-2ü+z"ª“™ÆŸ;|žv烸#2±cƪФ#سŒÉ}[í*ø4rå³Èðïk«ˆ<îÃMc3U2ê'jŽdG[ŰP’1ÈžI{6ÌL;µš,þuÔ÷OÿA2±8L>/V/'M¾¬6ù"בóŠ$˜/B½Í‡}¶ðŠ¢^vX÷«r ŸýÈØpGMüx SDæp| %/ò:{ߨ;Ð.Âáv¾Â]‘™sTàuØ.G ­¹/ 91›­Œ 9²§âïì¶ê‚J«l¸ 7;"﹟cP$IÄ¢1ZiÝ[}‡[a®PøŽ¸Ã2Ö?`à ‚šîÑ&1.VÄÜõBM~å~±Â7!œð FpgÉk>±~>|­Uáu¸G,8.VÁ´mžé¯[çg®:9''ü‚Áˆî,sÙvËe-”ÿ¬f‡Œ÷GƲœxÛ0ËÙîi£°o-ËãŠÉ2ÂBã"¶Ž uëäkn ƒEŠ~É7\òí«þ²; L/<zóK^Ø!„ï  %>ŠÈî¬l7Ôå`fjg…ð½úI•™¶G/~îÏå°P¼Jä被`y¿äÓÑ3oƒåvmD}B÷þ`y‘¶}x €R_¢VrDyÃB$/r;¼”'– í_LŽâªÈhïQúͦŒòÓæ VˆLãÈñ†…*’e¤vØê8Ñdøe˜m ö¾3’6ËL^°‡ i i†¡qÌrãb/b;«:6³,ôRˆ»BÈžÇNDZ&ðqÌoäõàÛMÝÂæ˜ ‡…!3ê–¾Õ†Éuv¤Æ Ñ6]c @Ð2«å‹µ}£¬:'4d™ÕbëØm¯ÆÉ"p~N„–ô›@Ÿ1™{¥Ý?OaˆHŒ8.V1˜ ¡å_[2`WyÙ}XU¼+þ#æÃŠ•™ÇóNÓZå‡È(q\¬"ÿFdh/zSÉyÀâ k…PÅw†˜`ß‡Æ†Ý ÊùÙ홀ÇÅ*XbD‡»¾¨N,›£ õVd¼èÈu²lÏ´Ïø_û}Úµ›0@•ZüJÉÇÅ*Ð[mB#n¹öÈÂk‘`ußkå]j«çÑF»Û³†.4û'ítz‘ÃWÅ©‹B(ùܸXâÀˆ4âÙÍÂdYñ,{2„ؽ[0ðòÊ\¶n,¢*#E&hDoãb’g8­‰Ú \áJ¸p”M¢¾Ó·ÈàîYù:šv«ÂG!ÉsX¨‚¾EÖfUU„ÍN|Ù˜<Å’!<î¾%Dæ­zïöõ.¾4>»)"F£cP œå†šfòmÉ‹àXYŸÃä3^1iU¾Dìx‘v{Çâǵ­Ì«Ö3Îÿìgë•v®¡h•M»)÷sõƒ+§Etcô¼:.Ö KŒ¸Ï,v Úl®=¨‡B€ Qâ¨HÅ rCŒ¾ú}Þ—N«CÐâ$ÖguÖý=úËO­VEæ|¡ % 1CËììêñ‘Ýg!^ ¡‹~= OEB~¿CÕý¨Ü‘± qâ°PÒŠDzˆþö櫾þå±mê²H€ñ'&äg÷Õ¹Q:™nÚíÕ!T–Ša¡4W%|ø¡9.õVRñÀÂd‘#²ÈÑMH»¦“l_£Vý"—Ãþ¬…JÂéEwá vÇ_c\,Ô•&€ÙQÏì’ úòøA˜ÆÞý’®°ªgCpáp0â;3V`wcˆß0•Ó">²Áa¡ 4Q Ý¶¦ð]¼Ì£bÚZøeƒßaµØA­þZ¼Ø§…>wŽ %™5@“7Ü„í#ï¼z…m#ÀÂîÐ(VÝG|‡ ê™É~=ý|<5[dÄǬ:,”°ÁH­iíO/! ߆ÐÂ?Ø`àw¿N¿Ÿ.1[dÄGÍgP !ƒ‘bÈÚõƒyóµSâÙTè åȽ[Á§Ó¤@"%rœ®•OX{aíNnÄ‹ý"ÇáÈÜ8•q¹_(28$r«@Ž0úd6ld³##ÈWà˜ AÂI[JôªXýŠ ™]ÇÅ*Òkä‹XU°qüa^¸¯rcDùÍ® ¢Çåó1¯—7wj tÈô:.–àÄ?Ü|*0w3©¸1„/ö Z gì‡ê‰bçû¦Úl£4HƸX#B´,Û2ÒŒ,;y½^x22fôИàOƒ«¤Ùügï,9{®)œ™ ’* U@ÅÑ#]üÔí³RÅ,|4²ÊŽŠ¿•+öæòó–g[#´·~{˜ÍF3?‹ðÔp! ‘ysX¨"mFÔˆî‰þPë(yšM6odùÎ#´~ê^ù‹î‹—:ê·„H§Æ°P{Œ¤ÑØãåK ¦ËßLâÝùe‘Ú[Ùþýí;œŽËõõ\Ad·1.VÁi´Ì¹ìœKo‹ïš‘i¤ÇNç¨&<i›5ÛmäXE ™ =ŽŠ$)3²A¬9„¿ ŸŸôZÂßSf‚u˜„F+e'q•´Ø)”ç‘ ¥ 0q?K™«suël}Uöf$6H}ʘÓâé¿eD =_*.L@;VÚ«àwéjS¶\ò£WcT¤0øáÚ\•içÙ…wCàa… ݪaž%¹% JÁÙ&Ç…ð>’Âa¡ P±à‡ç»CóûN™úE˜ñÐÌG_Ï)&¦‘Ýwë‚nNL«÷"“>jÙãb™3"A£*dž·Á´õMöÙÆ!Ððya$@gÎÞ;¾€'v‹ ñˆÿÆÅy4ˆ¡vKYΗ$|«sCôR×ã6´Ì;dB­+ã3©Î A‹Eæí&Œˆú0»9Ñ:|$Q­‚™zÇŲ6×KûÙ·Ulh‘¹?íB,ô_«+#ƒÅ?0bD}v›Þ7X·E\´Iˆ•Bp 9â¸XHŒØ“Ü-ñ2ð«+#ƒEçsç¿ VßÅ]_ž\ ÆÈ­ZMÑG9INZ˜-22$lªH¾?Ü·âí…êìÇÿ¨iCØ¢+Ipg𧽇8ðp†´oœ@r7‹KµZ(d«`t‘Èa¦taS×î=Ô·‘©Ý—ÑEŽf ñ3·ºSþU…°6Aãb‰#B9“Ð6~·—=‹.¡‰%#ƒ»? ‘¤a`ye™yõIRqQl£ÿbX¨É¾,üpÑ3jœöŒO€N¸ 3s¤Ú1²ú.('– ЄçæûöC¤ Eâm”†…*’fÄrîÉl¯„v®¾3Yü‚îºúûƒàž}ò’¡AZ€ÔÍï6uQGcJ*D„:¹2çöqîÀEâ†)ilýlWëôÝ;É”!HŽ—jµÆ?ÜÙÖê‘ûŸ}ã©&Ès“}‚ÝÎ)\Ü"!6Šhµxræ¨P%'L(ÏØÞVzn+^ê¢ØGûÅ @EfD¾[ë¨íkòSç*;F¦†ßÌ1†ÝV‘su4*,1QdÒǼ>*’Â1‹…£•ÚGÞ¹ÚCÝ™vëE±?r<Ì_Ïi±´:(„õ‘Ž‹%˜0BÁ×É«0ùþŠlÇÈܰHêcËóûjÁ[ys#¸z(„÷‘ %¤0pÁ£±å&—ÕØônŠCØá;ÏKÄÍ ˆkÊ˰à¿Ê!PŽ=ܰPE®ŽðŽ“Ô uWߦ[9(à«pž›%"q³äµ %µì…=Ùð DŽ,oLœ‚ãEj‡êÛ>û⬸&„ê±øˆ´ÂŠ{&QeNºïç`Gçm(¾^û](Š!Bˆ;°±&Ë*ÔºiõJî›r¾0â?K§ík1¦a"náÁHˆ°3?óØTé2²³h¹Ÿ‚Éuv…w"c=Áq±$ŸFü‡Yì¿l—Á{ň!ˆðUÝÎÌîÃÓØO?ˆ‚(2Öc>)§Ó„þЊáh×`‚[¯pbd8ÈÀ­žù‡?½ io9È2­Tã"R{ÚO]­Ža²#ú5êLí1B>821SŠ58î–ÿ„€@Äq± „!¦½q@òfj l¶…#CÅâ½íÓÞó™aÂZ^ûÝMSñÅb¡È( q\¬"Fd¨} É`\œQí+~)bä|èr9›0]“¯2,L™2Ž‹¥o« }Ï->·w¡…#C¸¢ÇÞ{ÞuÁ_Òg² m„„ˆpºº¥ ¡ÄˆÃB GŒÔÐ2­] «ß}½láŰØíòOÿigŠs û—ô}HžÍpj®Ú‰ïÿìŸPHçŰP…ó"BCl¹Z!Eµ÷†'õbX,š×ÎÇÃ{ýÀ¢<«¸Ñ¿°F$ÖGF8*RÁ#ÄüWa¦ÅG=¾CÂÄñ¾›ÿÛ¨DKºX#2êc:©H†?@£Ü. Ž ¦y",ÖwY‚xöÞm k2 ùÜ}V-ú˜ÇÅ*a‚–yP»<¥p[dfø¼ Ã3©¯U@-­’ç^WqEæcJò`dx&̵×ë°ú3…Ã"s¾?ü‘»ašïf›ñuÒœm‚æõ†…*ö3ÿ<{×Þ·eR\&€Â_‘)ßP/b7»³ÚeÆ\Óµº§S,BæÈô†…*ax«Æm¶ÙQÍTŠÔ>‘!…·ý‚bîøä#>“w¨¹›½™ï™çÆÙ.ò™Ø"ÍÑP1*Rô"ÃýRÓ.ÕÌ:1;,ò9һ迼àÜu€n~LuûKûÙ¾}öêŒP0Gyz\¬"aF€µâÂ~°ëéIÔe‘ß×T¡L÷+7™µgmò-ˆÙ!`ަŠq±ÄTEN{ ï¦yà]É•*â²H*(å¸!Çíö~Ú®ó¦ÖS˜,¾'ßHï0N½CÚ¶%•ny[D|$ƒ£"µ×žvûÙWQhÉ×n:¿ ¬8³ÁBPá;Lì¶Ÿƒ–©ëò²êоÇÜ;.V#„gŸ®]¯pÕa!¬ð2àeCîPA¡–¦–á{¤y# øqwûLMãYF¦£Ùb4«Æ.s’ ªÉ"€BÇ‚«½¥ùóš6O¾ßYÂ<‘w;rpbzTWDF|ž|‡Å*¨[„l(=ð¡¯óð¿:,Ĺg§ þœîM9êù™»EqüT»¹§V­ÂØX# %t.°8Ë« fFpaP(V&ŒÌëÞ-  õqfÖ\ʾ !ldsƒµ/w³¤g?½çŠÊ&Lè.ñõõ¥…CÔÏ¢§u)9ñ3 Z(µí–aª¾ al„c%\‚å\x»ÃÏ6¿¥”í,ûΗ¿…¶¾Kœ™ëýAñhûøI_hÓO'¥ê®È,ŽUèHåÀó/±ÃÌóa5b»«{£Õ!TÁ§ÉÞ>üyúð« Bª³:gš–‰T§ýòøË¥}â×añ£B•Ä1pAÄí úµ¬ ¥î‰Œiº¦ËþÒE{^l“ ¿&æ·Â‘äSu'Ô‡­b(îl 5UuMdèo‘A‘¦¦G;Ò¯Ùqеºo· 8‘ô¼¼%"b4Iñ˜ÿE½ÕÝLbêéßN5è© æ´œÙÛˆª¢81„ò=1.–0ÇH?4h@û8g^ÎÂÕ!òU Ï\ 6®Y¼ýH»ÂŒ‘Ù!¡ã°PbâŒÑšÅ7ÂDæ_x:2„|Ç‚ ÜY9²ïÔl[ _/> a{ÌýÃBUãØb{árNý\lµtdLXeTZ8"·³d8y­¿ø¬S2c(Ø£‡cP j;Ð?ß3nû¯¾­4Û:~ÓjÄuèky²ÔtÏ.@¨ËBë¸X èÏÅöJBµlÂ/²ž˜1„zè­7òô®ÌìLU<¯gP…p=z/ÆÅ’<øŸu¢<ª]…ÓÁoaÆÈŒð"©Ý[­Æ¿¡‘ú9i•…"“="Áq±rÃýáT‹ÅÖÿt¥6‘ÃÈ 1³=ÿËËÅÎ-™6#¾CÃDöìÅÜz– <ÿèÞqCý£b\¬"eFLh¯å–“lëíÕ‚ÏîšÈÎ A‰n›âA’¶ƒ˜òO4Rù×PMlh k77pÌZ<B ‰‡…ª&¶PÄÄ6,5›|³¥û…BÇœhºµ'@@»zìJöÛβG6û„pÂ狪Hµ'b)îIÁ÷Øb«##G}~¸|‰ äCÊ)VaŸ¿ûD¦†ÄÃB nŒp+@’«/ØT#†ðÇ÷$›€ Ýc¶‰ñôƒÀÅ>!дqX¨‚6Fºhɳ¥)Ûd—aöÍUbÄÈò7&$ˆwúÎ6«¥£“îIñOdlHßŰPÕpvÀ‹˜ÎÞYiØSÀ³lÄŠ!²2^8K ÀËQÀ³Ï¶ö‰Œù–¨ ‰‘~x¬º«r³·v…#°E+‹Ýüièÿù¿ÂúàÆÝìoîW{pYŠÌ)f‹µØÉD°ÃN§Û81.UºLMÜ£Ø.ÖäeŽx62€üâÆ1w=ùàéÑߪÑ"CCâÆq±d‚0BA,dÜxúy…i#sÃ?’f@yP=YAÏ3·Ç¨ÏBhsæ¨Hv¨1æ¯g{sd‚ÖÙÑf /OˆWÓFâ†PÂÈñ>Ü™‚çù a}T ÇŪ¦®ÄÔõ‚º{g³Vy65t­¢€·“$T¬ù|ÖãÜç+íAnÖ›ã,jµÈDЛA‘ª¡ë_"h÷bK§xµ´{’>µl5t)/WûéÕf” -î~3¶4Ðmã·ˆLéäæþÿkòÑÄHaØÀ™<û¾úúµp(EdÊ«à‘6Z2n9J·½·V…$É''üg"ÜŒ-ªßS ‡ B¾_ÆÅ÷G”G­X'/âûµâÉ*ESú´ÖîÓ˜ѥI~ÇÂë¾ïnQÛˆàÍøIžÄy#¾7*ÐRÖîßÉé‘‘$ "câ´ÿ ©Æ~öu™á9²g­½CÀáÚçw{ˆF2Úôt” ¤›•g¤žþ'W’wF §Óâ)ÜÁEeÉÐ’¸s\¬â°%¬¢(É6{Ñî>`ž}%‚6ßAf¢ÞÂáãl¾©Qí HÒü1*R58°%Ç¡ü¶«°’;‰cDÀ¦ç~Üüù`ÌH¥p4ón:ÜDöX=¤Ü0.V‘¢ÄÔë…‘¹«]PÚAÄ2"äð› ÌÃα˿ÁÍeµpdàÇô<*’0½@ðLí»o*J»ŸPxA2åókY­ÄÔ ³Ôkú¹WùÙÁ!\ŽDoT¤º%.fßüë/Þ<žQ=ÂΘ®ÆÅìöCÙ09Ä7{}Ò¾-& q|ˆl½åi+Àü|Ê,§¾oýL {­nblÁ(@‰Á"36¹Q‘øËñg®$·«íÉÞ¾›çÅ©‘aÝšËü̈ڎLƒ“ð&ġ΅ÀØèv¨@sÄò¸\*²”F‡C6ADV÷%s‘y/ 5gšˆ³Ô¸ x]Ô°P…X0œå²õä{ÛÞè~¸o6,dR×CëÕœaèéøËv¶•Ç ³5R¹a¡ÊE‡=ô—ë§—ÖâVLWxbWê¤ ›Ù]Õ.öŠD¶ôTb1´Fs¸XnûÂ5ô2^Ýn‡/Ù®èÛ¬-Ò0x%wnð±z‘ Ôc‰YÛ¸XšYÃÌòOýe'Š"á‰_Aà[¡9EK¤ËØ×éæäöšyˆFö( £Ca\¬jh9P3‡¿höúvÁʶÉÚ“ #ÝBÅÊkjO–/uW³A&`|  UdÃHÊ>\¿ÝüØîJÈÆ… Óú­ÕŸ€®©º…9c$Ôö'‹= >!`dgãb%5“^›·€'tݤ‘xšeÌž ¶Õ>,œ ‰¹½fÌ Á@qõ¾IÍ ÊhckÒ É‘¨0lX*Õýf=‹ä #ª›ß~ O»,ìB×¾,-/,Hñ f¿ú™gbY(Æ¢l\,`ƒÜú;Ö×Vî‡,ÉRŒ$ÎÔAc4Ü–Ýûn ±6dZ÷fdHøÌG©÷ï"Îëª)!!6¢¹Q‘^Ð\„gv'ÝXŸ…œÈ‘-õ`#š«‰4#Ñ+¿êÅSMe.HÄ®rðÕé˜UÖJ Áç`ê~Æ$9(P‘"eÃ<ô‰ô·ÜýHÒì+÷³"½²oÎ-½×ÒVXŸ×—hEè„VÖÌßÍ ŠØ˜bE5,”~è°|ŸÙ'T ^Ÿ—‡žÿé?žyÛ_æ„ÙÓjB¸¹üècAìÂ¥˜†…zÁN a~{ó)¬™/ÚŠ±gxDì4.–Ðùˆ™¬°öľ.Úå×G õN²wÁ egø¶Í Ï‘º U0§@˜|"Zºõ¾îÑÈ>C¨?S„BFËy&Âa_ÏÁAVåç9‹U$«˜0{RmÝÈB¡^p¼@¨/rŠX] _RÓ²ûY…Êѱ4«€N1Yšÿà«ãf¥òC’_íªæÎeÛ£»?âe'vÙO£"=ÂÄëâêèá  ÐvLü †…’*1"&ø*OÖ|õA(÷B¡ŠJa]y,d÷_»vçÁâ•m¤ÂvAG„NãbÐ)"&ßi„O‰wWAîƒâÛ'Þv§´L~õÙ:š™–Ý’îÎ?cK”î°®Ä*½J| .¹ÍÞ¶-Až¾9Wɽ0(Võãbù7²*<3^o.3ñ‡˜„fýÁ®a2J¿¯G'—í”Û+…"½ªœ ´ÊØs^» ~¤µZ”h9€Ÿ‘?íÐô><û…Lx…1}7!·ë±ägÅd<*R‘Š® ÷êØ)êûjÈH‹X|œà½<ýÈÛ9l;K$ ¢ÓáûÇ&`R¾/´Šœk\,á\ka›9¬e›ßÈ'6A_$à£0 {à7µÍ}ß„Ý+°b«8›3€-Ë¿3œ›)²>F«6€ ¿Š’­C¬ˆ™¼ 8.Šm[Á÷‚¢¨CŽ‹µìVÒAN¾û»=¾ê­gmaþvå=1{AXøZGϰH™~dÿdÁ•ÜV9fVL°£"U¸ma÷6æ¹]¾*«2 dúÅØËdýìÓ+lYuJ»*£çŠ¢/ô‹÷KT©±ÿô¥šEPûŽ7ܰüi• - Yùr‰ýŸV®ÜW8 2Õr‹Â¨H@]ÿÆ^~4[`Ë/À}x\-ÜûϹ@âl†ö%f‘iY‹kIÈeQ‘v„̽HÌÆÅ’ùÖ´ÏÖ¹µO½ùbÏÂÛÚF†È«|ZrFN>\AVCBfZ42 …³.ñÃs{|NìYÚ×sßß,Æc.ñ†÷½uöö àh"_üü6––" çQ:‘q©!S/ò²a¡ \ᘥöé²|‰‹E‚¯Æ†ÈÏŠûÅ W¢PX­ÎS`­ºeî/‚€*â˜a¡´H@ ÷âÆ*È>)‹±A —3.çχqE HxÛÛhǦ1Ï~É/ ¤ê©Å*èU„U–6mc´]ˆ™gs΃Œ³¾ÄíÝá{Šƒÿ® ŽÊò¡™ÜÿÁä6ÏPKp*¾+†…/BdPqšs»2“?„êNNÕK b{dGݱå¾ýu¯nÁKô •‘§•Sí#uÕô»õë÷Ùïü*&˜VžylÞñ+÷ E~5.V5 hFa¹Ô%ß;ê!¢õÀ ˜ ›Óf ¬v²•àúŒ¢H°FE*V V–­Û´?™\‡(¸¦Z/I%+ ž˜{oMUa…äj}VÄLXŠtñ·V†Ói¥\[P“Ô¸X¢uFeÓG’€»gï´ HžÕÏ>h˜å¢öЂ°´âkË™ 0óÐåôj XožÝ>¹+II·€.PÃBÕ„,1,»µ˜r‘x•u çb’«ŸÄŽ7. Œw1“ñKÍìÍe: ’ÐÆ®ÀêBÖþ@i‘v}x:Ó ¨s¹Š <<1’´Q‘êùÕnKÅNL>9‹*ÐzBk¬’lJÌlYVòÓZÇS5Gž$øí ‡VÍe¢ZÆn—ï§µ›'Ó ùú¦´H¢Ð±`cLûeýô…‚® ­bJ«H<‘j™´°C°m|ñ=„Šê3øÊ±§¾L=ƒ&Œ¡¬>߉)€=‘(Ê\câ”"gæ@öí?^Òƒº[VD*?.V1 úÔ0 :ؘÁ|Äô™9uy³ C‘Ý|¸HÅýÚA¯âuá;î÷«šýå@v8¢}3Ç5±‚²úÌŠ*2äd>Ò¬˜FuØ2qÔ¥ÊוðÌ‹U°¡H‚0 zYÊÛëyÅê3-r1s‘§OG²}I3¶ÊòŸÝÖ/r¤Sx»0¦îa¡ :aI)¶…nVʲŸUt/ÀȃŸ«kØ]K œ/ýƒ}×nOðI”Ãúa\¨ôFûÐR~ûLÔòì¼H[®šëïÓéÍmÁî3;úƒE–X¹˜iŽoîÊ{HŠÆÅRR”ÀôgîWµE@¼ð Þ 9zçD‰äØ õX™Ø¶ÉOíUP.´‡‰t\,M¤ øàŠ"¢]†ÝË …î™ ýA€"¥ÁJé [Ïìྕ•g’CÍbX¨jFô—øØv¬ý*à¦ÊÄ]‘ < ö³Ÿnp•m­Nå뺟LVÈØ }‰ D¯Ù÷2G†Â~ÇôÚ™gÔ) ŠæV3´?èè·õ ã¸H9&PFWnÅ Û=!%þ‰°’9œ9í‹Ú¼s+Œ }ñO4˜=Lý„ZhqJ/‡øg`,` ˆ=}ø$É®åèDBا† Þ®ÍêûaÔ Øè=·'ƒ!Uïþž%]Ëϰ‡o¯q±lçñ?øá\|œ0†3š ,øÑ½â Èਨ EFãÓCö]Ùëãì…FÄø‚qtGE*À~„=Vg´ïwÂs2Û ó¿Šõg Tè3Ïbí_Fƒ}Ø'ÿÀŒÅ¤6Êç3Ç!ª@÷`ŽõBqsÞ“¯ÉØ/DÈ3;¼~üyr‡R4ð;Ù‘½¥}¾JÇ3Âa^)`^7 ³ÉL¯‰õ™ßnJ¦ÏTgdôÍ«ÎÀ8Ž- ‡éœQT6.‡iuX(A?ôX¾ä]µ}IN…Ù3 z§ê‘Π@¿¹áçžú¿°q!8D?ÃBeÂbßu»áèÞ³[·…C „ªš÷ ¬v9æ«ó:]íQ¤-<çÞ$Äâ»§—Ïxº“ h¡0LaÃB),âŒi®èZÏ«÷)ÕÎHç…à^ƒñ±•2ÊåÞeL,@çyæa±êÔÞU,½qR©B[a0¬sGEÊPÓZ†y¥ ºœl]ªd±‹)WÞùÙe™8©(”XÐLÌ9?“Š”øxáfu¢•iÜE›¹®À7²Š”ë=‡0“`Á·Ï‹´«<¹m óZA)„0ãb ^‰@Å2E»ŸWäñÕ“cA~3ta[›´%';Êö'K1Ÿôe3–xù}à>Þûæ-Áº¨ U4ð‚™Fd–vµ?–Bq†3³º5ílš{,©`ò‘ØÙzœ“ß»päL]¾Œ%R´J;^þS/š ø+¤„m׸X"¼E™ÍGd¦iœº›pdQâ^èMd5˜Á˜(_>Ч$9ã½Íÿÿ“8Œ½ßn^ùy™úȤ _a5„Æãb”'BÀ4 ÿ´3 Üf 9ƒŸú©I ûZ!!µà'wKøW`ŠËi,’ K$vª|ûïí_êˆJo3üà÷<*RM""¡|ÇkòlßÏAB9pÆ(O÷êFªq佟~%‘[Á¾ƒâ¤ÿÅ>¢½¼ª—ºF%à€iÁËîE<øãR²+P‚8c\,=R.Á ì¡éÝ*@_Î$ 8Ž?pFp&ñ0Ñy9È­ º+P‚%Ö¸XÕÄ`€˜<ü2l݆$¨8Žwœ‘ˆƒ5ŒdzˆFñ®R ‚áq±ôBüò €3“57¹BÓJÆÄŠ8xÅ@òú $'|a éÀI>xùFª¬· á`쨙?¯Õs x­ÁÀºŸí_È™dÁÆÂ˜9GE*2g„Ø?ˆËÑ.ÂB¿Ð&ž"6‘+¥ÛMjµ5’ÅÙ Š&°(*·>Øç½Ñwßúáç¾¼€80, Þ¼¾™Ù½WÌ9s‰¢Òë)9r»¹÷^‹í<±D9±²¦äq±ÚûgÇjØýAá)°Œ£†‡¢=]l‰g œptô+߃mì}v_?UÁË,ñ» é¦õ™nJò=Škd¼%è»Vå2Kü.èߘ=ÀÏkï+°‚ꎱ/t(»9à,#ͬ˳ch5ifùçGõN–Z˜J ¿%"Kóx‡L~ŽžòS¡_Í?Êò›h>:ûÛYig–î©ù õ"¹GQ<É3Û±ù,°LÎy+ŒµX…€ “»Ü“èÈoò³Ÿž¡âÝlmÅ÷µÙNZß(ôT´zfHD°?f¥h#L|[$•$v»Ÿ¬Cwp{ÊU2*2õ”É£(õìyJû~Ÿ‡7I·Æº•¨}a ÁárËÁù(ân”O?W$¨º/80ÊþÐ\oþÉÖOQ@(hà‹¢^oÝÉê7ãäìF™žHú¬>†…™?Šú¦…¬8š{ <˜…ÿ.óÿÓÊô0ÜÐÈ}®7[±Š”ÿ.Ü'i2})ÆÕÛ ¥z"¿“Ž‹U*™£J 6kÐ.zUD˜¥üá>Êô–ZzÄcß>ÓéG]eF˜¥üoªø‡½„åZª2Šõ²jÎ0"J1tuô3²µMvPƒì'Ò{ÍMÿK@ bxŸòvö‰‚Õ‰`N©}\¬Z÷Jj86—nÜí©2¸,—ShHÇ‚¦Žñ£¨ßôc¾\ ”—u÷BeïƒEQGñyQ”mu=ÕÅo¢•Ü‹Uýhê-rÅˤ2¾ å‰îþòüGMÝTvû=-•¬}ŸOò²îÎ&1b8[É4î³½xÜÿµØ•²¨—C)›ìu¼ßVF¹/Ñ6‘ÔYW U(æQ·ô1íèJW_ЫàNt½wáuê»±²¨IÜ:?O¼â6¾)™‹õ¢šG]w Î…ÁSÎ=ªÊÛDû¦j>.–ªæI#G1¹R…ýV“BïDGïªùïЫæAØÆrÇ×Þny¶] ÜDü¦æ5,T=òó£‘ûÈ^u««T»Ë:ºcŽ ’c ßl2âvÚy²üzÕ‰Î_tÛp¬ÂöÈæQ$ÿк‹PÖsRDTY1 én^aÆOûÚ8Bô+nž¥@»é*„&ÀK¸[EîEx½v‘íÒÓ|TÀ,”¼¤±¨®›¦¹Müf–ËmŸ Ì üSñ%I³D µÛv6 °Ùœyk`¬Õö!,ìŽ67•>ÑÍM?•Á²ûtRPÎíßPhÀ“MYJÉž¨ëŽ˜†Å²ÏƒS¦–Gq&úwæ»åð.Q)¡hð~Vbz”»áM9}óéÆƒë ´—qjéÃBUãIA8Çx=ç²»™Z)¡ˆëÞ{@ÿ@áFÎ*3A ¶/µ}[…eáÉI ŒEÄvÕú²7ÚÜsT–18á0¯µÝ*~fA†t¢={’*?ílÖç ÅŒ÷¢ŽýávÈ—jíÉE€ŸhÝ_¼µgÈ újÛÈÇ×–R:ѧÉ÷ÆÅÊÒ^Ї 6;p\žßýá=Ýü(ÉnÀÃc±_ÿú}“@žŠÍL5câ¼HÔQMÆ*ã‰:Ž-ÃYc¢8S§«š‹ Ò´½CLÁ 7wKš€6‘¯{í[ÌÅ=ÙHØÊݺÓÑ—œl,kΫ‡…* Z¦1sÂZÐ E¨( Ù’vý‚ª¢NlôÄcµÎ¾ž¢€W¢%QUwa¦Ûܲö"›Dæ+X(UQ'¶Üwºù= PЕhÉN׫áÛ½±Ý]Ýúáí=StÇÚΊ0F.–ÿÇæŸ0q)‰‰³JOˆ/¸I 0Æ8W«ˆ1ùÀVYéTP‰™'ÆÄ)´å¨$cÔ„6çs>Ü›S ®¬6ÿ¡-Gõ×(Ô+Úa­ÒáÓm‰K‰@LiyX¨jÔ$èÈvÜ>yü6ȈK¤æ—<edÓ·™]E+ê&feNYjþ*Ë_ Ø™ ë…Ú'^Ø)$•øñ¶ü¿ShËAJÆÒ;¨IíÓnÿÒe+¬IÔæN–¶žŸ¿$_õS'ààÑÍd ˆD &Zê…,ýÈ¿ö­·Ýú»5D"- ”•å¤#c$_û®|!PÁšDk~W–“öë{ëp.z¯" >$ú0ÿq±ª‘ #c$dµ¿¸ÙLÉáIXSÖšYÇr÷Ãë…S^ו‡ì0EÔWãbÕ€%i›0ïœ{jw9Ã)- ò'UÓ1q^DÓ(m"|§µÛ‰“ˆüIát\¬j H¥˜XÌÇ×n¥•Ï\\DN­ÄSŸˆú&–…1MöÌÐÃ/ $k ,ÃBâi”J?œø0ËÝÑŠ˜ÛÇsIbê‹t…ÒwØNFæ…."¦~¥Ó¨rZu¾àµÓþäê6A%Y eJ?vTL±©`‡£å¸g¯ª”ºˆªÊ_ÓÖók÷ÆHõ½ù±ð¯náÿUBáºßX?X›N=L@‰¨¥”Y‡…z‘Y£j½}È‘ýP‰‚–dÅ”Bë°P•?H«`ì«õÚ¶dsf/¢¾þAZ¢ŠM\˜Š;lãkS&¢˜Rk«rñe.~°ÐÍä0Z ”¾ˆúúââj¨eŒ\ ÞçÖ ü’Óoƈ2&’G©Ú­‰ž{a'Aêt3Í8…BQ«ûì"âóú á/Y2ÍÁ-k˜n¾ÇÍ~Lÿòô_a&"sR êE"&ºñìB›Zâ·,Ü$꜔GÇÄiõðFKûí+K–£½hwÿ“§-ÈjLí­b{©íUÚÇ.*F“EÕB»éj9±Xëp!¾{î ®’…Pv>ãbÕ^ÿÁpn²;§]‰mÁhDT¥`&BSûʪ_°L+ªªÖ1û<þÑ>)Š^]fÝì³ØÏ¾;ýWTµgŸÔHýîÛiJ„WÞVá&‚{ùâö·ËÚv[àüy-üª¥øxÛ©™íw?™Ùˆ¢ÊË9,T!ÅFáÕòY«ìØ#›ቫʀ’6KÔ²¦ŽûJ¦ôHÖ3I+Å_»Ù ´Žkvl“ÑŽè©üÆÅÊú ÀßÉ# ÷¥÷Í/zmTqgŸF13)*%@¢ºRV©Pkã€zɤ}ñ?_“D¿-ÔÚ>ŸU«ª7Üöze'V ])׎‹UM(uÏ8jöÍBî<‚DqRVp]_ÿª³pb^ø‡Ð=p SfI"à>X6ì}þ—gÊDÎÁ›oÙ?[rpô«¼ÛÌöÖÜ7Ícj“ÅY'@ƒ"Y' ­ðab¾]©Ó§  ‰ûTQ÷4aÀ4¦~ùþ«ˆŒ(£Ì ƒeAuabˆ¢§Ý ûÁüdzu¶ÌcDu00*T{9®°·ºpùͰRTÈDÍë¶&»7 `øËBÀŠè¢|ÆÇÅÊ/i3Ë¿X ²þx=ÄÌDV¼>à˜owåþ`šŒrDˆ¥àTû~f[c±¶NjÊÉ ë‹Xµ àÈ HW+7%ý‚r]”b?\o½Çn ""×>Ïa’O­çÞwNÜö½¾Ê(Da%Ü*‚'–.Üòç——$ʬØ´ñ ¼M}¹UF"ÅòŽª–q“Ðj9µ½ZÐñ^û»‚QˆKº1.Vå“¢-|ò\!~˜Ï”àÑuyM£îi×tƒÌ¶Nå«[PFÖF_¤Ð(|Bšš,îE0PaŒ¬Œò•2/*¼’Ów}4)˜¦i^8!à˜ZgÂ7‰2Q9ISÆÅª¼èA Å F»› c XD1}I#Q 5y«ÝSÈm‡ŸÂž‹È¥_q4J˜ž÷†"wšÖ ("*'‹¾a¡ ‰4(¢Xv„áèÍvÑQ.KRM{èŸm',:³Ž‰âî¦ؾp`õyd­“™mX¨Z$R¦}ù÷ ÕÞ¾¡(ó; J’]'Qí´,ÀDÞ¾«g¡^Æ&"ˆòZ†—‡G–Ã6ÖÈD/é®õ¨bb‡ÑFºûR­‚‡ˆÒI’2.V¡‘FE®u(ÀíjM^°l%«¦Ô—‚ÚùáAì«=5­v¥åBIŠ(¢®Z¸?'GF±Ó^ý-{Ù*¡ãiZUëAÔe#\þì¬2É€œPZöëpSTE[²DI‘a\¬"E)-劕jG?HñMV;=ôïâ”Ë}~A[„}çêÉ}r]V™‹èN/†ÅÊ:%ö"~¸Êý{¸Š*—Q^´›òöcYæçí”éŒ(~{Ф'£¦{×îïîðÍÄzn¹WŠ#ZfW.u³JÒ±S‰5'rÀ³©äEôGöžãbU¶÷ S‚ñaÃÄfï×Ë_¦8¢eRPŠR¥}c³×<ûÆSà B“ÔLOG8w×ì¾ô¨ZåbóÔ¾oK7“g¦‘ÅEg!ƒ"¢d” ­é½íæÝlÁ]6"*åW“üUa Ÿ}9óÞ:8!ê";ÈQ‘^TÉ(bàáä¢ÖÈra p‡¬-òÉtF•ñóQ£ˆQGê%“µ,~€^æ¢Rm«r€Ùð“Oaûfsa "-z¡nÏÚaÛœi{Š6këø&?êXãb½XŸ£|Ì{zÉû,œT¹=K|ÇÅjÉ{¶›¹e´³OE‘÷wê$¬î¾®'½}•#±QÿÊŠHœ'ë„íp¹ø³ôÈ/(ÊŠpHÛ“³=KƲº¯ºã‹œ{c˜“ã+&/DàÏý³ËE_…; &ÛæŽ1U÷EÅü­£^øáŠKT%}¶¿äES|pæ XY»·ŠÊž7ÛüL|Uvì(ôÙ·¸ÌĪí_Aå/kòªREêEDŒBŸåT.jŸrêӢɋÈÛb\¬rÿ";àDË)ê,-Ú£ø¼!¼W êã›:ˆuðÆÛc°Í\ñZ!¬æñuqÂ}þ̨dåþíx㯜]çš´ýÔýÒˆÞ÷c–þà`ȰλýɲûjUÕðE¤óoiT¨).…xŽ• c¾ ès%¸¨ê¢¤9]ëE„ RPv,Þ±ÊÆFeõ,§Q…ICDÑ °ó²wúv¬' üªÎg]®’â]`‹6¹lË×™¨ÔE&cú«²*9Íî…m‚,ln{’@çErsciÐÓ ƒØäØfÀžk’²Ÿõ6þ’í]ûöÍ®AM³§kÅ_@2sç”*Yqó¯kþ€Ÿ¾ç++`>%Þ.ú~Ùgr)>©õ"’¹ zT¨B]‹ZšU.íÊìø²¶^§ˆòŸõ6½žÿ.’y–漇=]‘Ù}T’ݨ²EMÍ—QáO"=.V-³%! ÓaXh WÊÅ[Wtf˨P‹ÕJ¼ƒ/E{\¸.,ôñ/úY”Ë÷Û-·Øà¿JØIÍý°_¹ ò¦×ÝÎW‰ÑYR{QŸ£ÊeF{~M¶>–ÉMö¢G‹ö}0¢0e­ØI…W—¨Œ,â±ì ! {®¾¢!‚?,¬A—‚r9û»`òãElVíŠ_â°P/Ö(KÀ­¼‚G—0UkéŠ*õ¸XÅ“Õ+XXg¶€óÙ²Öœ.wµ*¼€VÑsßþuf³^T娉Ù[Õ†&v>º´Ç:sîÖ\õ’–É+–£·ßrïTj²×WUŽBà=Æ>lo;Õ†E¬rë¸Xó <±_ÏDBoƒûI¬‚{ð l½¼* ‹žåÎÔQ¡²$Žò‹»}·?=j¡Ù‹Âó°P…¤2°=4òŒ„Üü!³ˆh/2TðEœ|îçmF‘YT)— ~'kÒ•¯&ÛÍB Jã¨J¹•N¼NŠŠÑ›ÏîÜûíÚE•òÿ8®¿ã³?ª|'8u,”³ŒäôzT(ê¹ÏÏã»øGÅ7}âItí=¹øj‹,'‹ÒÍ ÿïcÕêORh>œ8×g« œEÞW£"ññ®#tâ“6]~À®ŠÓYrhºI{½Ý]ôí5qÛfñ ƒ—½¨˜?“û•q4h=Hr;öØ­Ô=àYœ=ˆ sX¨Ê6d#ØFOŒÍöŸ¸¯TæÎÊ›ÿ¨ÁW·á°c{ãó-.2·K®#ffäOSÿðèG±vx w“h˜V 9 B|öÇÅ*Äç ¡9šyßZ^¡£\ÔhQ—<öESg7ïáVLTÎ|6 GñXÄ!·u‹U÷Iúñí+vÂÜ~êÂ"ñÖª”Œ¢¨ƒÝÜ0­‹7|*Ýf݇ŠÑ°Pµ£òGrG¥U ÓÒ:X$Š ?ºöAX¶@îÏ/à, ±‰l9•÷ÐN êK'qÈ™GêÁàÂS²úšõ#VÙ“Ÿ¤Ù~+ %5á™_KºuEvòñ#¦²ªhCîyk=O¿6-N˜“–W5Ï®>E}®Ø›‹õíøÖD}Í o‡Q‘^¼‘Q†¾}'©}‡YVE?¢ò4.Vå ¼‘poÖ1ST‘Vd&×ó‚Š„!ø g%ì7š¼ÿD~™éEl’u. ÇŒµÈ1…þšd¦¯¨et+Ë¿hr6^ÐB4eˆé`X¨¹(J:vM[=A#Ðᩪ¦"û¸è7,Ö‹­1J:–åvR¾gÎY5Ù‡‚ÑÀX©‚uɤ¸þ=Ž8û5¤ÏcâÞÌvÌS¦U€ÉKï #Yé}¡VÝv߆¥l’š^ÔÖ(ýØ';10ÔO¾wEõW‘‡¾zPTlà œþ½™j|Û–j¦YÔašjŠCÈ]é‰b ,0†áÞå9DE°q÷à¨Pµ†e¬W99 »wGŽª¡"Å𱪲ɶ@VIö²¿Ý˜…Õ¬ê¼Ùƒb[ ln±}…ÚT¶1çÎ÷Ú¼ø¼x”âëú+µ#0Ǭ–)ð0Ó XH²¹ea·E#tûØuÞ>ÊÞ}g"ÉŠ°Äë”OrMºÑ‡Ûí&<Ы'UD[ò/á\ÝÀãJR{¬òš!C¶‹àÌ´ÐlEr†5.VÒ!0 qqWÙê‰õD25Q å:lìÝ.^šÿÉ3ʾÈ1)G˜†èým÷q75fqWÔ%êRãbÕɨ}¸A^ý¾»^u[‘—øÞIu©tÂ"i¬=4ÇpÖ©ÞL‡¿b <‡+_•f9áö‘E°q»„Õ–½º9qn>Œ’ V^Ú²Œö/¤¬"q’t\ž! ãçµ²|Kˇ˜Zñ°[ íó´"ÁŠ ÃO3.–¦†$„¸#¯ï­ ^­ú«ˆ%îî+j¨o.ß"·(¦ ²^òábl¼[ÚXŸ1A¦I…X¨^L0%Q³½å6W UÍ¢ …Óa¡jÞW{ùp9°Ûóè2iRREža×Ô2ÁåVJÑIVáƒ48«0ûÐþ}‡¼ŠÚ*2΋^Õt»¥ê{ï{6EÂÌ Í×ûÅ—mâ\Û7ÎyB‘E0¡Ô2.Ö‹H„lÒ\NúNôBkÌbˆÛ(E*%Ê(r`T{5ßo»ÿü:J£!Ô(ÇŪ\mA0qWÛ†çbyÎ6˲eÖT¨ D½Ä ¡.§Í–T¹c-+—QRyP¢\b}ím³¯iGVER$×ïn^:¢á>*zßÃu‚bý„qRûöÊ"!ŠTâ–¶arµˆ­Ev 1qô©Åê‰I2ɇ‡\ž~ö¶oKÈò¡()ü†‡…z±¼E‘ƒÔs{§<…|(B …Çq±P˜Ø’µÉÖ_Óȹ7Ï[\ Qlo¶ÎÞÁRdvArúe5o‹tø‹±+³*ó"ÃDÑůð¡ß}‰D¡XfaæÇõö«”À¨¶mö­ ¢2Š–â–·1¦r\RI8† û‹Ýºü"1ª’âf¶a±^ÜlQ(‚áüçd¯uܺ¢0Š–BfX(a’äbÙnÇès»•žCÃDˆÌ²Œ²6~“6¹ÉµÅÕÏ\fì›°ƒOí%ôŒ—dùRµf»ØÆx2'üóEa¤Rk’žâ“}fŽ3?íÊ>UtNÑ\\aI›¶“žbÏ„IxJéjêe³âòL£èÓÛi_ŽMÌã¯fÑSD~WcâdùÃ¥Ò(P``¦¨ß7E«à)¥Òq¡ÐEf-8{?ûõ?LqQÇÀŠ‹™sÏ›×S•0š´6˜ÃBÉéïMd·™Çh·Y?é·WE4ysÄIßÂdÓ§»y{|¸=Ë«¢š¼´F±²äfã 3 CO¥Y*ñ_eåÃx~k4¤ÿUJª¨/.¶ðå>¯ºróWù¶|S‘žèäøËZQÚbªŠDñã˜ú• PÌòŒXûƒÉ÷ýŠrYVp³Ôˆ(}Ø W$^#îàkÉßnl;6ŠKb3ü¾Ü>Éöíµ¿‚ß²=%›Ÿ”5;‘&Üq5,Ö‹ÜtÔê<ÛÑçeU³m‚jß°P…¨% x©SÿÚíxx?«ÂžÈõ“›Ô Ì5piœ5ÌÓa³Ô'Æ‹®e‡'Uñ`µ¿ù,YéiÂ…ˆ´87É–´x/n Õ–1)^¦M<í~í¼5>"~¾_^°òb²ɰQÌåo÷€}…{pìÞ]ìTˆý_.: 1Inÿ%ɾq>*ö}¯¨çPÅù”kÖU• œ1,ԋ资7ÔZy<µűÑEn è8,T~… :n?ËᣜïE¸Ã&>Õ­¬paR¥Ê,q¼A¾€;ìBÛ¿¯Ï¶>!³ÄÁ3Š ö̶«‚m¹‰*91 .3 ¸çOcö‚ðá¦6;d­} <‡-Iw¢°¨†ÇóÓÕ…Øÿâ´rÚÒ.Gœ²l'»F…z;%´ÿȰ^“®'7ß‚œHnó«²%•¾¤&ø}ÙYQ©'B‹äz˜f>5íÉbŸ§œô=/Š[”¬/n7¿¹ìÑd(Õá‚$ñT±ûw_Ïq°vÙÜד¥'QÜ#4*TYÅåæß>v{²í-ißüL“¦HO"0£Œ‹Õžéµu,;ŸúßBµ«i¨‡oâˆOzÿ([±‡?‡SÐ{Œ…Ö%=¾¿l~…,l·Á%?¬“ñý¹Ëwÿòêì¿g¡ve%€Á鯲VòÑ Ÿä½ïTSè¶qmè³:šK„³F%ý¸ûl²ðÆ;öôú3‰u•GÍOí ¹]À‹NŠ©o|*T,iÚ© UÏ †FC~X!¹ß­€¾}ÈOĩ܌³«2ㄦÝÍ8†=v_wK„®ÜØ»Õ|{Ö¨ñ¦LB·aÇÒÉ^ ÇÁó:[‚=hQÌ H茫ö~.½Šf"¼ˆd±µ·÷ÇdµE ½úKNT3iþ¿­~ìÉa·YÙ䵪ò`U—%¯Ð¶ó©f5ífçfŸî`HâìË"ÜhcóE¸ŸÛ?ëaÒ‹SÎêeX06Ú¦k­>ÆÔiS¡tI3Î.~\¬ÊšvË™V¬Ú£4ãxóÿ*ÕL{J#±G‡óðÛ>ݾz£ÍRÿÒ¶Ç&Ý^_¾JgŸN÷pzXnä_ÚöФÛ}=a> 椈%‘4ò_AhŒáÐÁþ[¿Ã ¨*Ynž ŠTwÝ©3Æ Û‰À®:O*ÔkQ'õÙØƒ%%›3^^È<¹ÿ ;¡õuŸÇô[àúLnE ŠTÝ|©¥…–…û·Û*ÍHÛ˲a\¬eÅöÖýî =©º“”íÝ1,Ö‹úú?¨Ö«9WÛKéòE5éY´ UY~[Io,.Ü‚3q¥™ävÓ%ƒ›õä3zE.` ',ò·eÜÍZ4_6 ÂJnJ¿-hlAðÍÄ‹¯ñ"¯Ñ@úH7 »3LûmˆtŸ?sa¿ÅÜÖ-Y¡EDJ€a¾}³§ ÿÊ‚t‘”$†…*îšØGÂ.póO¬Sõk•D i5y×LÜg‡µùi±{çm@K~Wn¶ÜÁ;ÌZ†4‰õO¿Ånk=Vžtüd/Õ¤·{`ÿ XõM‘Z7¬dZyzb__h ÒÞ¹4,V 5¹¾»ù(e*zßIþO#’Zº}—ëõB¨fñ¥¢õ«Ýõív@³hdƒÉR•ŠTã¾è±í…^Í»¹m-cñB‰P!és¦.ðÃÉn‚ª¥/¾}!wŠNûG…²{)z…øÛ AÌö…·à~lr¡.H·ÈV~P  ÿ6j ãæ¸Üì€RpÕr+Ç>8vWö-·RÒþb+®ô‡Ô€½´D±²7ä‰êf³1|ÛjE+MÒÌMË‡ÇØÞ¨¸î~ˆh ÒØ¸f:0Ö„#ÊZ±{Q ORvõOÏ’õÔÙ`EÈ ;êÔ½›Y–Ö‡ßâ @ÿŸÿïܲóÿýÿµ >µÿG¯Ì”8Ñïäxjÿ“½²× õf·ÿ\®ev# þûÿÒCˆ†§ÿíúÙ ñ¡ÿwþ;þ÷ÿ|ÿd·u¯ß|Å|ÿüïÿû¾¿Dÿ'ú/éâ§ø_Å;æ<Úý9sÝ÷OZ%·Yvmì˜$K³ý÷–nÏíùïÿË×¾MY<ÿ„i0ëO„þßùï°¿Ñÿä\ø‹öí‰nåß÷ßÁÿn£ÿýŸè¿e?Çÿ²ƒÁ9”ûÝ®ãé{™C“f7¥}Û—}Ûõø[je­¹=0‰¶Ž^£¤Û¥Œ5,TÑ&Ç#ƒeø–Á|±ÊZÒ6³›â(Æ&.xXúFŠrÅBì›1Â¥å›U‰.Pe Kzë—|l‘AÚð¯iÁÏ~š¸ˆXÒF»ý†ÛãN;ÕØí7_=ÎÉú Ádþ¢I½/Ì(ì÷³ë·ª'I{ì©rT¨)boîk]-T Ÿ©ì›èTNÒÎ×} ÃbU*qêjífkÅu¸Ïƒå•jIÒøò¾ª¸»cƒlWáà:æ6á–QŽ´‰vOwè}?œ®±ÖÚ:3§Ä¢ Iü±aý0·bMŸ5zü:Dæ¦Öáû¨PYß±ºïîJêÍ-J²ÚÔ ÏÔ²ÚÓÞÒänYû:ºU\ÔÜÖRE*uó0wñ€x[A|¸´¿Ùœ”Ï.Ü!û¥5¥Þ€ÿdskΟÊɲLî¯_ ò±MƵ¿Y¯§OŪ#­4]ë¡~õQ´$ù<Ÿ(5À€ù‡¯1ö—­Š+Ò#»/`T¨úKíï‡#¨{[rã= º‰tÈ|š‡…¢ÇÓ0Û¿>bë—7âi3¿­¾ûAdšÜi;?þml±n`÷;röNWõ—ÜûþøÐŒ:7G1>¹8¤’‰ô«NàG…Êàæô’БÚ'oe"ãï>Ë¬Š‰t­®t‹UöŸöš·wnËùÌ>dUEZ`7Õþ–tö®lå-¬¶³ö¾9Pw.†ª éôF›l ãY†‰ò"…Ù‹Ðn sÎöfÝìè/N´‰ò"=¹—i³sj¸al;íŽÂï½;BJÕ²ôä_ 'öÈ_¶ßþdfê,¤é£Ý'02ÖÊÉȧÛI:«©ëúÏóóàÔFê“qõO’8ãö_¥ÛÄ^š-ø @ÚàÄ/”‡c·û£ YýÉ O}ã¥>û+yŒíH»}‡DRƒ¤•ñ;oÅ< ~öMí±¶ÿp€U¡§¯Ô)D)ÿ]{‰_<öö-õvÚ_€Ûß´W6k"¦Háï>øq±ìì ¸w7oÒ™ŠI$—•bõ±”¬-V?®MÛïÜ 0YŽŠT4±°kÕKÿ–ˆé¸‘FZ[ –"±ÀÊÎÄÜýrEÛœ; ¾q—ÕZºýøáT~ëØØ!Xgd¼`fºâÖ鵋`û10–ÀuٳƒX?Òø¿/%7¨ÒðZ Äþ^g€ùµþÆŒCwfŒ­«›yŒüÉ­®´?llc'ñáFÂzK.½nn6êÖ"5ö¢l‰÷¶o -êÔÜkd:Ükÿïw{RÝÑ£­¯´¿tx@(Ê&ÏÏv!ѮŲ~}X\Áûú13¹ó•Òß¿Ÿa±Š¾!^~‡Äí»j_¾ObW}tl%êÎ!–W Öô­Fq<“çþ8W`Lä±ãøpk]«¼Z(?€ ê‡SW"";¿«Ô& ÑñܳµæûõÓQþtÒJðIŠ¿z#»\ ±ãõìGÚæfR ^Ïa¡@±Ûÿ¾Ÿí…Ä]´©´,“W.ãaàÅ4g{[ÌâEŸ)¥>Ÿåse…Ûçò„ÀÛŠc!+Þ._§šŠøçu–Šjô‹R3Çøê¾Ô±iáÍ^o\¬ü*÷—;€÷rþôzß!W/RMmU¶iÓöm¯ÎŠ–-×Ýüª‚H¡<D:Q@¯ýCíþ´„÷kÊs,²Ûmí :´ù“Þ= ti´¬Âã(âËúX„ªpè´2eÖá+ós»&•9½q±&ÌÄñ+›Õ?Ü)”¹®ïÆ÷×ʥͪÜtåRعӰP»ŸYxŸ‹ó¦&5ß S_¬ŸkaûÏËÙ–mŽ~ûIg&õ2SĸXZi§oÉù”ÑÁõp;onߤð~cC¡¶«ýJvBX»3}ˆZ[:)˜Ÿ*㧦øp¸Ês{÷^Ï7áUF7q†*˜æÂú„óÙ ©ý•TÂŽi†Å²>U¶XY6“ŸKÇ‹ÒNIõË„1,”~‡±>8mÿ³}…{2Ôö)Õ0nó …ô‡çÅYÕd;-pLEÑ>I±ý\ÀT±bØrµÿk7ÏÇî(µRÔòWªB¡XD©.a³Ùl¢E‘ Jf6ûþž§›’{Œy?Ôeئ²y¸ú‰b¹â×ÚÍåýq±Nê.XïÄÕ­©ÿ)ìE—oÈ Å›Uøý·hýߊZóKÇÒp\¬eoŸøä%¦ÊüE¤Õ"FÚð¨nk?P» ­(™cµˆm7–Îû‚5é0¤˜üÑÿ«;hö»™s¾ëg´à—ÐåÿA‘ê—kªí á´ïÒ¾®Ùk{­ö¥üãåªÒâC9žvèµßVEã E"k€PbÌ ¯õjj‚ïƒÉ}ƒÔ‰/rh,ݬøÿ³öïX²ì: ª÷(öÎòÿg5ˆVn(oþBf GOåQ¨¸geíD†ÓI|Ì `ó'vÑኜäÜî›ÉÅd ùÄmý-^²ÎÖ*BÒ1GÝGYÚM`owO³7 §$ßi‚¶R–hc6f n­ZfH&Æ÷7ÎV‘Ãý¼î,4×uë×I)"YÝ“J|3K%Ö‰S8ìžÇ?çD/¦uè‚Vz³«ð Î5Œd~þhOO“çp1Ë"Žè·ŸÛâE”æ÷’‰9º>ÒÖigÂnn©T,ô`œ³1ÔsðÀp†<§’ÎKÂÆ×3ÌT}ÆlÌÞý‚ë,¶æF|;ù{ÎØ¾ùYL¡>¼i1µ-Ôåw9QÍió³a¦Š÷,QTL¸1Á8rÞ½¬ùn¨žB†¤ê™ÙJífjöA’îJÞ÷8Ñ”b™9ΘoOìM•EŽ*iÁÞ¶L›uÙšš*'(æ¤/m&1»A—5ÊŽ­Õ!¾Ð’kÄÔ‚’z\×;yû`È*9°!V‡Îן·ÇìŸ9Î13±¯Ö‚%DAóá@†$¼š½8@:Ì”Í(EwK— äL³ŽX1w±Ì¶½jãAn±Ç!1’ëJ~Ã,y˜© ? iðÓÝÞñ¶Nw¿TA2bI•Xc—Ënk¿$5gÄ9Oª³¢”ÙF³{ýVnZ6|j¤yÒ*û›¹˜­öÀÆ¥[ÜáóÝRŠœ“ÇdÇÊ醠m·íV¿Ð]2aIP[JÙòÓРvŽº¡Bàœß ×“oªÀÍ¡WɼsNăgÓüÚž¶j„ÌJJë(%<>*Å:ˆWŸ%W$ì9'êPŸ+Ð/ 9Šù\»ÁÙ&‡ÎPÊþ¯HŒ%qht”)„ø˜|x³-žù;¿Ž@’\IP¼¼eªHOc€lþc¹.\’„Ur_Áú:wµvL¿i_LQ0‚)7éð 7Sö&Y Å0SEÚÒ4_³Yi¹ýÒÂ"”Tæ›õÅÔÚ60-l\dB‹äMÒæTm¥èïùü^ÃÛý9ÈnJ]¶jÄd£>û)Þ ´»©pµŽ͉eˆItíÍi¡WÔvå»1ÝÂwL™ҹݼê¶X76s)É=srõÍ4cŠz{µÂ ¥1/™–|QÓfšmÙåépº‡¥LѰÒkèoÜÌW,„&sê©©ÆKBÃ?b/$‡Xù> %I¾9AŒÜ®Ö´Ûž¯eñ ¥dŽîÜ:ÃL¥ØmåDïêŽiÍ>-8Ð0`]lxXNÎ$»^õà”èVÓPZœ–¿ÍZ*øé)Òùü'ûæG¿”¾H”$Ò 3£Ø‡7ÝY…rµî I—r¤#ü2ÊÒrØ´Lá¾û}1â¿`1j¢Uh@hY—G¯˜³£Y}‘íAî~œbûf¿á>d7§ÞËU%GˆÉäb¡öt7óXëÏcݶ,…v6Mü Çš&ýÚ »#+ªŽI^š_Üãɉ‹D_¿÷#߉ÁzXÐáåuN ƒ_Œ#ÖÛ;6;ëîÚ蜻Hè}2ü!–*„#DU[?£Ãmù:ý¡É‹Þoªò ¡b:,LíWK ɺkº!ñ’/c”¥æbW ŒÍñœD?Ð^h™/èyÃ2c*ËÿÊœ$ÇT§ ~3ïªA³Ør~‘ÛAS¼„g7‡„w㌉D ©ß(†8¤‹õZî¦ è—Ú‡\Fƒ ô(K…»ŒÎñÙ­vœçÃo—Ó¤Hüg-H‹?ør[¦³Kôs–¤aØËRŒÊ^\¦ßç)þ„<{]¦èm¯Ý&“ô˰sš!a‘ Ê@[ç-ÇÚeóÂL•8Eî¶a¦Zž¶ ÷>mÞ.Úèਠ‹Çñí¦¾2žã™µ–¼n\8}ÈTÚËñPSHñÃÎä{ü}&Ö§ç^m’ü´  ú0Û¿ç¿ØîÜ­ôòi\9©ïä.g+¯à‡gp6çÙ{‘WÛǬf°˜–‚w‡l¸C1p÷‹íÂクŸèl>Ô¼ÚÅgóêúõÊ©'‡äÎ4|÷|'"9FØÑï"xì½”Àåœ-œN¼…8»¶o0ˆÞƵýÆûí¨jP—.‰ßœ-(wx½<ZiFœV>Wü£ -ÇÊ.çûÚÄ]Æ0mC<ù6æËñ×xÔ<ÊN®ä]Í„Ì$þö{>`"™æ>7Î~f ƒ·V„%/A„üF{.Ñö°QèNúË•·þ$ªv°ìYH!ħó“¶Û0ûìÄC8Wx-¸Ée³n×ÙeyßÉÙs eƒTko‹p^9¬r^æï4ÀcÅ.çͳ%Òö‚Ñ4ï–ü‘œ­¼_x&š2å—yñ~5O^äç2¹ÃŽ’Ñ•ø{q3Ð_Ç•øpR‘IXìÚ…©o¾êüúªø2àÈÌ[#‰ÇdÝ2ù…yÚ…^ö={å¿97²«cw}g^ ~­ýè,h6ÎgŽOè9C›mìÙåºØxXã¬íÊSžøxRãoâ¤Ìà‹môöÚ¿}üYWKÙ›6§>… ”Fö[ûî=œ–*[ø¶aYQ~SÜÝöoÊѼøljùš²~M;_öÙÉŽƒA·>`릪ý`eÓ|¢{û‰GĶYnc¯Žmù™{0ÐT±ñá oÓ8Ú †{élÿd癑5ñoyóhõ;~x{¥YÚ{¸®n|¯‹ßcFëgs-:7þ –Ås±a¶Šå Œ uÚ£îÛ²õBUÖ7¯J½×irƒc{®A¯Õi_ûþË«Û"ìœÝc¨xúðhîê,Ù¸”ðd8ónà ·½Ü]üަìÜìÏ.ü»í·ðuÈÖŠ»ïu#ÅWí_ÑFl=ð¯SóÔ7–ãüï*wƒçÁ£LU§2ìoI´ŽÇm^:ý˜×VvÖ›cú}Û.BÇ»µR4­"ï?ò£LKïã‹i/Ý6®çý<Ýþׄ²½*x¶¼¦ÿîÛs±wqFÓöz9‘-6ïŸTÁ06%¦#¸ƒËg+ž¾Wd© «Zö‹p5¯>–åbsýþlÊx´ÒÑû><líóû~~6;Þšmµ_NÜrLÜsl]ŸœÏwÑ(KÅ&ЧgÂùöðýÎxÙVz²^V nv«›Z°ÏÝRðzµèSE9Ÿ(ƨÔó$lw9Ú;9Z͆³‘Ø É}Ñéé`Iž*Ÿ¯·¶¼ƒ$6“Ô†7-|5ê`Ò±ð7ÊTq Âñé¸ýzï¼F­p1ሽ<þ2aÓã&?œÓ}²yó˜\Ås*>!ùþô£,ÏqZûúˆ0¯ûq·Y+ÛÒýRô/ѽœ§|ê]r¾¯ö“Ù…ƒÖÛg=¸ëý £JžÁÅk£LU)x¸+•6{Á=aÙ˼ø”pìͶ²Á“¸˜X£ºÏOF™*|J|>o~´KP×–àì4Ny¬Ù¾j'ãâÔà@ê`Î{[Î>«Qœ¶:™—匧ù8®¼<Ö>"±ð´Ù1<åÍSÅ–ŠI ®Qmk°r9+G›½Œ¿'æºÔý÷𻈱}ßÃnlØÏž®Ž3Fáš]D—½¼ŸàÂ|p-}³ÕøEHÎn¾:%Øköž.0S²­þ*|Š„†ä¶¿ÇºŒkùAc~\:‚fZ¦]~!½ß0šÝ¾¶a¶ŠˆãXóÜâº_.üÈaTCÈË"D—Ž:b·-q˜×]{qBŸz}§]G™Ua @vgÓ·8ú…‘Dsø¨W :tT›±–pžèKܧïÉ(S.Bpè­_ö¸¦›`hÌáSâÇ[5ü9’Üóãí[_ßÀ‚^vù^ŽŒ2UI‚;‡RÎ=Û¹úŒ«ùÚéýÎ>l;»|–Ž2%¯)z'¨g¼¸öË^J˜Ïþë%VÆ„,üâ oéÆé­™âÃsœzÙÍ5Mþâ6v{‹‡–ÝGiÜO¹Aßãl«#*I\–{Xda¬ÑÀ/QÑ%}ãl¥•wç¼ò€ò–ðùÀôàöo¦G1‘"¬Çù=]ø»]}²…d$aKo—ÂõeÁ¼=âÑ»r4­È¡Ëÿ0[EÐ !Õ·» `rK’fä0X&&c4˻󧓾¶¾'ñgI3R*òOCÈsø[êìZ²œhTôt%‰¦1v¢õ½º­ýâ”`hx} ¦1à¡f<Œ½‚#uÕEN rHôês%ñÑ!r¢ðns@!e[’p75&1¶·Ïý>Ф¼>’:Í®Röº˜!àO¸M–‰@²?@ùoN$9#c ).ñ›9–جón_—“—V©UÎ.ê‡Oá%ÿéîruéoNˆ$!pTb¥"’ÆXï#3L?Ìé ;úäŽoQ*ùýþ8[Šžü&X€¢²f°uÉI`Î-^ &ùoÆ4Ó›Oñ-ÂWN.^R‰ì§ôˆ¹=ó}$+Ì C3Ãl©ßOÁ«Àãd7ûzÄÎY¡ä^H2U¼¬ø"¼w>­W¿Òo«² ÍjÉ5jà'äÞþ CËÒjI_Sªñü„4Â;!lüæÚï‹«òלj¼l±üÖ¬ÌÎo>™sNIàg­"µÑè×ÁâXýÒ9M9%C`j1Ì”ä—1öY`×ݺô’4ã”øè PHJÀöXïõ¾Î÷#Ë ¦ä-u:yœS~S-+œÖŸÇ«ÒÂ9úKgªxç1áTt³ÿï’vÉ sNÄÕfJr€˜:!O[¬OoO¿\©È/svõ’Å4ÅGð[§¶¦¬PRG“F™R8)ä< ÏÐk¬óæm9½Ô´ˆw¡"tòʱ“´XKwš^Æô5Ÿ 94m£] #»§|’¦¼È‘©A–&4·òó;^ç'ãAçÆü¦e>›á¯Ûÿ°YóUfEØòm)„Ò'`s“5:´’þ™V+™pʰ^btLŸO²æÀö\¦Q[R¬—=S‡”ޯ˜Ê¹±dEŽÏ2•{OÓ‰¢•Gç3(fÂ’ 6¥o&­ºOû@´™]H¿bãÌÌÄoÕÌùTô…\ ‹‰Œn·ÑÆ'㦤ì9ŸzÙP1YBÃ&;&Ívï÷–œ='To`_Hp>bÌÌá›Cç<[’ û†Ù*2ô˜âxÛª]g×Ò¬¡åD;gA.eJá¹,ž»(xj¾ƒ]&EZ*'÷~“%p{3¢²5UømŒ9‹–|êå¥Ç§Ch_Ôášd¾9 zиA¶ìu#šœÏ¤¬˜âCƒ$à0 §Ä9÷Íi—s˜)yé1Ä;‚LÔnUt)gÑ’x–÷›y!ÉsãÆ 2ó’$:'g/)³]òºò%P7ëï‹’Ì7eÇ=ggK^zNëâ¿àñIe¹oNý˜4޳%YcÈ‘ŽÞ &V°ÿ%åÑšF¾¬@LÇÀo®tG³GfÉ}5cã 3Uäz1³ùjÓ„nܵOÆZ-ØÝ8dKŸ>•36æzãlÉ‹ ™ÝÏ|Ÿ¥_†\äû9ùã¶¿]¼k ‚†tZuòê„?Ô‘?I"ÀÃÅb9¾¦Óš®äDò Š ™º¿}u¥ß@/5@Îþ‹e*§vöÈ+DÇúܽ^dü9ým«H㺛ñsõ÷>y¾eóÑm‘í ¿ò §’5°ÒD »;ºc”?7c.M$“dZ³BKküRÀ¥‹Í‹RD2Çz¥Lè+.¹@°|ÐÁT>ädÏÜQ¦4aH‰Öà0~Œxø(Ï\QädÏ—s”)…ZCN¨q´½¨µk µ|Èy£óÖ¿I!D㘻S³k´¤ZȉãÐê7‘ŠKÅÚO|êt‘áK²÷…VÇØ*ÒĘÉ}¨ó¶+ä,T0Ÿ× ?'{­Ž2UA«?Iᇣnoϧ=BªI=™ Y! ÕËm=ýa¹\ȉcýÒS"‡‡d/Š Z‘áçdñrœ­ß—Þ\n;ÄWC2¡¡ý{9kO%Í—„ÏSïa¶äÕÇ,ÁaA¼®ËÇŒV5CÎ$<£ Y¦Ïq´áA‹]M˼S‹†”‰òõL;uÛ#nÿ‰ne{=LÙ%­O©ÿpSå‹NI+’mO(ö½ rb¯‰-p˜)͈SÎjY¬Ýei?˜üVÌÖKfsWˆywæ®ÛÙ/èÍ•¤·\Óa¦*äü' rFlùR#I¢ìÙ{H‚œï¸béˆAU#åDù :ÿÍe?¼oŽÖæ3}—ÂFò]GÎG™zI”c2‹aJ‹ýܱÏÖº&'¼^j ³U¤Ê!›µÌÃÒóëʾbü€­UçG5ãeª<ÌTÞ )ÏpÀצ}Ûm„^¡k)—s®fL²-!²¬Â~b÷’¿Ë¥œ&â/‰\̲-.[ÅâÆ} $µ“L¼Zƒ÷€ þ àÄ5±‹»*Œí.нXúŒ3õ²ùCïma4?јÔz’ã;k2ÆPQÄäÝ6«Í·íÛBÜÍ­)…^ÎïY 3¥û)íTŸ¸Lc·=CäxÝ6Ïçû8)j:&”>÷ Yùg¿ªZ Ò\*8êrw þÊT³µú=²R€æüþr3fÝ`;66’ì³3¹R6JbÎŒ~˜©œsCÈÒ–Î^üá¯B‹DIË›’|>fï`6nHæìþ™åiwŒ%¡dø5{ö~¸þÝ&š„ÝÊ<²jEM˜êÆ×•pýË÷ÌÎû ŠšPª…÷=“RxŸ¹…ž–ËTÕ\Jó*eê¥>ˆ9ü积є£~Ùt.ð$ÏÖu-%LB=ÂäÀ…Çv-Åb®œ0 õ“ƒ¿ÙÎØísc¤XÌ5ÃK‰“xg9˜¯=• Oý‡1dë¥DˆY»žß'šåª9Õ7NÄ ²TŸ¤XÏ9ˆ¦-ý I­›¥æáÛfJÙ²Pù¬ìå}éÅ’–ɹ|rŽ&”3 ¸.Üþ¼Ø½V,p´LÎ%ϯ>Ö `jV*{·ÃÝ´Ö¶R§8Ï5ÌV®7\B93¢3éÔâ6W$c-)Éê<. ç}Ùn/ù¥J–Ò†±’®–7-•Yfn–ÄZ$ÇBú¡ËB„Ù_¼ˆOI TŠd©’þ(‰cåžë¤ò²ãPEe+ÕÍÙ ²Uf ©vÁbœì|3©¾_fž+[©o|aG™RÒ,B¶‘æÓ뻯һ³¥HÎÅ’kÞB!„~b‡Ûsu©ÉR,Õ%q*^œéÚüu]>:O Û\àp˳U¿úT¼€>\É`®»«ÿre+å“fƒ,iY”*«ev=8_^smãßþÍê§5W7ø–ãL)µ÷[9³ÐusEJQËçBé)1¿EËòhÉË᪒υÒK‹ “S›R¾l‹Ë¸‹”6J/ @(]>Ä#Ï…ï—i§VöRÞ8þ9ÊÔKa‹”ž÷A{×}²ö¹Àñ2v˜­—Ò(”/öÎl3²°É }-î¥Äam4ÌT±·â¾1ã-DÜvwõ›wþ-OG‚”Ko´ä·rVòD¸ž;( D®–œ” •êØË†Ù6SÏ„¯„Bh±Ää8B–3ÝH–`ŠÂs¤Xª!†T¼€9œl{áPœ§PA®oX 3õr–bñ‚rsa;Yñ Hã4ä(S/¥Q,_l‹¶²j²¿hR r…÷?Ì”löÕ‚ú™é~Ÿ5h@_.g,©>‘5¹)¿‰U¡\v9óJ*ï7B8k^Ž•"¹êâóÞ®UŸzàØ§•»oò)äûº1y}J"-æSÁß7û0SõfOÅHÍ“¿Ü‡YÕ¼tN³•Ê5ÈošZžÖ9©ÝsI7Ö’ò£¿…žvÆ ËúH âeF¬Ö|à$öõö0š‚î/F3X~ÅØ‡·µkÙž‹0§4‡Ùz)ßb‰…ùa×Vø<:WóR…ùºŽ2%/?kX…‹Ž§U+ÄÈåO¸ 6CÂJ”¯-/2µO=TT ýYcÉ÷€ÚLÍ}¦„¢¹,ücÅR |æþ¼x^©•¼”s7:È–}Íù_ÿ| ÁP¬ÐD$i/þêW‰h)Ÿ :ç›Y* ÁX«YõvCŸÞ~2ùä¦}Þ¡JšO½¥œc!8ÌTEáþÔ}àpwÌÎ\ÚBœ<ï]HmèEu¨ü D ­„­Ù{ñ2t!ÕáK¾+?—nfª½)žmÍà¥:|AAb½æ¤.ÜDûsë£îX†ÔtN³õ²ùc½†îÂÍÂías &Gš!%ï£L½Ô‚±^³}Û’Kóâí*¤‘k:ƒãl»+é­RÃBØ=ãàr×Û±©ßèŸâÏè'ìð+Z ô%ˆ^»‡âµû Ì”ìUø%ˆÌ›bñ‡‚`aüZpôÿJ°%ˆ/ÐJ¬ØÀ/†`Ïð>Ž"IUSΣLåôù˜P±y…í½Î]e’­ê¼Zfëå8ÅšíC™4||¿½€Hr]ÇÓ4Ì”Ráþ5|Úk;kë¢lDrèÔp¨ÿlAýÜeYÙ ¢€ˆÖˆN ÿÖ`†o4™)LD¤Ftò6i owJQ¶N)h!…Üh[e´ÈEæ«8àÞó™ ¶È…œSÁÃlÕÅA*ÞRœ¼kã 郸/|œ­¼ÅSnoeµf™!!)…AR¦ïKªJö¯Ûþ÷ôûäÉ•§Ó¢¿U%šÿòIK×—+ ’ ÏŽyÿøÉ'Þ­ZH€’»•=·Õ?Ø '(tÐ àa–^¶{¬VÁà"!Šå™¶‚–¯f˜©\—Žò ków©vlJÉïß××<« HòÇÍH®‚ùcejE•­ÊMSßš‘âõÕJÅ$(f´á G½øUµ‚Óéêa¶Ê#דX ô1"¬Þò#°…ÔœÎW³U'©ªl`·wØ~0ù½óë¾ûÖàÉŸ¤Â“ë(Kʬ‡ò̺'®í·ga¬É,coÛrà\–%%â*4éí(ô<Øü":½ŒŠe.ú›Ú’)nx…kr)ÌÄ,VµV‰L'Ñòyõw¬µ‰T¾ï@OªF¥ß‘“®®+àš\±>Œÿ [õéIå(Vvbcà“¶)b“KV¾¤a¦^ÎN¬FíŵÉqÓÖ5Ì‚ÚHÅÊÓ3Ζֺi÷Àøäî¹'FÛ9a†„ ÚÎÅৰ…¬àâŸ}”U/IñëÐD¨l!,@ÏQ³5{ªð’T¿^ì„ÒÖŠ–Ðã[4[óÃì&4)—¿/ØQ¬GÁþïAŽÿò?@R²ºŽ`”©—b7–£¶¦ëFûÏ`âÊ%ëƒE ²Uש…#œ¦ƒ—”¬|ÿÃL)ÕJ[çúmâÅbZ®ÊIpO.¹¢±¶µõËU-I÷¹þïÑú×ÙþPÜ~ØYjÓÑÍÖJlT)€ßØþP¡‚Ö¸xÅnÛ¦œX 4¹Šuº˜­—=+Tô'Û[Ú{_ÃÓhË74ÎÖKý+Tôõ¶mõî …irëß(SÊø‡Z”ÿðf:¼MT)‡]Aj]Pþ•aÓÑoÙVÈ'×Ã|àXì¢U}1ÍÖîPhù¤‚¸?p”þÇ<}‹,Y)j#U,ËßQ–êÝ+T[ÏÉg%÷Œ¦€l¤Šu¾”©šÆHe+&gM̾ZP¦‹RÄFJ[÷†Ù¬'–pP(Ú¶Z{‹“ ?R6†+£6R¹¶U×-±.·Bý\ì]ÍÏ:E»ßûŽšàeR»ã»Ž3¥‚ŒPãÛËi9 s£©ßv&pU‚\H*s)n€ü³Q°>%Xàª\½³L‹•¹å•+„0°Å#WÔV¹zÿéŠÕ4„_¾… oÈx•TÜ(c­ö5ó_ÿ|®XNce]‰ÞÞ¡(…«rÅí/i”©—Z=VÓ¶S'.lÁC%x•ÜÜÃL•zÜ;}xó¼Í.âñ™P´3±}Bj÷ZöêrÈ>nïváî YÆÕ¤vg ¿\8wíürúþy®g=k”,*®±·GAJ|(H<Þ|ý5 ´&0‹¾XâC=þµÅ®:…ÖÈ|Èjsè<.Þiºú–,Ð0©ß]32ÊT}4SiŽé 3KóùôQaІIùÎw4ÎV]ÅÚÜ6}ÛO!0\ˆØ—¢a¹~w=ÊT¥êø)ó]ÕqbwmhZ_ àŠÆ2P Ò&Øò ¿2À]Ë|”} ú-?ܸÇüÊP@¦ÒbŒ‰WÁ¯Ó3Õ'#V¹z§Óf*ïx—u„ºXÊÆÔm¾û€ŒWiía¶êJ(æ®lÃì•¶}ÉŒ^%µ;ßþ(K*êø­ð]Ó1#\Ì](À— .ê>Dv)L¹$0ã^ °ì‹>ʾËNq³5u à^øf_ ó_™(ÌAùB™ ó*/CUR¼»Èc”©Ò½çʺŒƒ‚l«HýAªrõÎ×3ÎVMé¤ÊÐäÍ‘”S/hª’âï˜)Ux„ <á>_—£¸ŠzeÀ¡Æ‡Âcµ/ SÔ7¨WÆœü5>ßyÒ|ÄUÔ+ã¾á×õ?»Æ#TæPf Ç^˜ŠJhU.Þ]ã1ÈR½ÝS]¥‚³íGº«ríîQ¦rYü«ÛÞÛŸY}‚JåÒ}´-ÙÙ19‡3:q¥CHÍ „Kø?ð„Xó·ï~̈ Ç3÷e=¼š3‡Iq ãl ¢ñƒï›ß­C¤£™žËƒë]€€‘…,ÁfÓ <#ê#:' ƒŸÊ[½}ò{®í] 0´Ýw Ù,I[sS‹¸›ïö¶ÒrfàÃRíÕ.Ö€Y¦¯Ždøýé‹`…½/;—ÿ¾èm× žášŸQ¦^€€U`;ì¶ qñð †'x†o­Q¦^kÄ* ½ØP´6üŒú¡ÄKxÏê0SwŽÙn©Ç´3î! 4K®é¶êž5§@#µž(àÐmÐiÎ×Òo7S¬1c#® `…½®¶±8‚îœ ´1Lå#X‘x—;•ÙÇ(hc4:|Ñ[X¿Øb‚\»ãtˆ u Ì(„Ë€F™ªŠ¹Œ/@¹sñwí‹âƒ‚àf ;†µ|kŽÓ²ÛOkþßÂQ{·'{Ù ËÃ7oF© Ä1ÎVËÉü¿íð.é!ý‘Dôóƒ{@ôs[œÏÛvÅÿ*lQ ‘‡5ð Õ %o†œ¯•¡¼þØ·d| ©û ¯ÏÑžvå%<ÛA‘U~~ûWŠA‹¯Z²ç›¨sRà/σ¦ 6¿Y·/¡a)®éàÌ¿þÑɈ @>´ò5›ôœ},f”Å•H£LÕ':(^õ€´ûÑß(ª( ‹£gÃl…=bÏ|m¬|·`Éÿ½!8f³ÞÍó˜HŒ§ÈcBbùçK•Ré°q¥ŽéÁÖÈ Á̘é¹Ú™BÇR?8á(áxET¨c“ýAÍåÌÀ—ùÔ±c3£Wt·lˆ-¹Ê™Ÿ~l®üìü~€bœßÇØ‹ÅI^E&­q-Ó0Siçám]ÌÂûüºÿ{A>#Zxmñx¶ù½Ì‚_*¢ãz§a¶j,(Á5v˜Í^;gïf†tøeJõNù1¿`#àloŸ¨þWÁ¡r½S@~lAwì˜b€Ëh¨‚C<øÁä ^Y?÷kÁ 4TÀ¡Ž}"=YöGïàÛÉËá;y…Nª‚0¢ã!}˜¥–U`£m¼á*»¡×‘(™\aÙ9÷¬bœõqÏ0[uí˜p{­$‡á–¼?TANÁ‚f+»—v#‚nkwM ²‡¸ÚJìP` ¸ª OïåkBƒ Ú™ØÚEIÇƱý›Õ…Ý‚aµÚ¬)"K>:çðuðS. ° OD"´ôáhXüä¸û)PXà'?÷¼qûôsá ;dÖéí*èÓåZÊȈçuœ-›"´ÍÅÎJø¸(‚/Ÿ*°‰ï+ÀAÎvÁ7oùãáJ°  dD‡1ÌT™H'8=ŸEí¯œ-¤Ðq†Œ|g ³%Ž –qŸgˆÕn²ß®:—zàXj²aWBØÁlUS~…ŽŽ"5Ζ"Yi[šñ×è 3K2)ÃyþU¸V­ ˜ôqjn ìþDñíŒkqDÌÊ0’–Ö[W‹¼z ¸3®Å°ƒtF3ï kÿ~:¼x•ù6W^Ðgªøªá«1Á‘–zßKùTЇb˜©Jq÷…«œc@½Ûv8ñ+Á3 åê„Ûk…s2ûF×ÍÓå5ˆàÎ ›~ÀÇ_L ʽךÏÇä[ÀÅ÷âኘŒÅûÉ]gÜ+‚Ã_5ˆ‚˜år Ìí8ðn†ªœõe*˜¶X€¯"ÀØðæíäv“4g()¾›A(¢-ãl½ÀWa‚Äû+™XudøV@(¾ýa¦¾Š`´{1'›–üLŸP°àY\ÐVúaבØV¶\Єò žEh"bU€&N´9™%ßìò žå%VˆIvd[("¯'’d}_À¡¬$Ù/ß.—·V |+PÏÄ(KßW÷s "¾ôáÄcŒ$˜îŽ8eTV1(×ö ³UV£ ^ÂP¹Ùð붉oïN(PÙ Aq‡Œ³¥â¾BÁ‰MôÑûÞobQˆ7UÀR8jßÝîÿ³X6ZÙÚz. óé1GÁ%ÂRãlIÑA((qYH[‡ÍçRøtª\±@(Œ%ZYéî¼÷­Â§3Nõ"LŒ¸6’Fnd°Œ* ´ä ò(S¹LÞðžyç=¯E¥Ÿ1*q}¨UÀŒÌÃ5—7¹Þ\phÁ•\ñ8ÌVí"f^7oÛ5¿|ëB ªÄ-5Ê’œÿˆ=qæž½§Å¿¢ÔNýEE°ÈpävV‹äv‰qdE–3 D(jœ­Š Û0°ý}öKÛVQ >vǽ"6Uë*îäû cŸIOè·`SD"îôáTt:•¹O´ô[°©aeÄÌ´Œv¼B¬3VDiœ-A™"¦}Ïæ™ÒDR¾@¿wzñ, B¾0‚ZôÜS•넹ªr”©dŠ8Ї“Ü7G\}îàÕ‚=ûj­¼ýS)i¾eº¹eí>6âÚŠ}çróÁJ“Û¶3>x¾À«‡¢{gKe•°‚¬rç¶Ýú¥ÈDA-†¬˜Îû( \Kn™Üc†µì¥¶´¨ÿEÂq¡/w×åkî΢mjÄ•#9‰ve‚Ñ ÕO ,Gì¹ûŠa¦²«H`ÚÑ'®‰ÍÉ!Ô¢uìÞé„ýè<çµßc$Àr†Ï\1:ÈR‰"pÌU¸ÇÍA0eÁϼ³¥ Ï_  ìÁdÅç­ÏDTx:cq\ÍŽAK ¬¦8bZÑiÐ0 à˜÷:.°µy]¤è´h~Ü«öîlAÙ;%×~‘‘€Å~q»³õ²á#°äiXI{³7H1äËÍ1†²‹†Ó^hz9T¯ã°ZD¾ÜÐeEó¬œÐ1î’a¦TO@4×S"xÙ¼À¹;¬ˆP'œ5©1§‹Ê'… ñ‰xP'»/z„ãìíÙ^þ¥H@u‚ì xð—ÉPÆfj PgÌ®ÃÑ©‘<d-œ¶×ÎÙ–3ˆFðm˜©égÐÝþ2h }0 ©n9Õ0ÃÊŠ¢ñõŒ³UÖÞ !û|›»­Rîc 3¬,(OÎ8[*¨ h•—Í¥2 «'¢@î½O™AfD‡Mü¢nëô©™vSåA¢aÚÊËÀ6ˆAŒ‹uàÄÁ×so|+݃‚½ŽØü_†ä PîaåkÁà³ëzш¥' *Ï™ç™.¬ÀÄÎFg8Ê’àsÃlª• ÄêSˆÿȀݻ—H¢ÝEå´z¹S`ígc¹3ÎVYIÍ6Ñ.Ä?÷â‚´ Òæ,Î K*ýâqðè3˶õð™Á Ø f÷<14CÕì’l+i…Ìšbì³fªÀçÂv4ÓËE÷°MÄʼ "ò—»¡‹xœ—Ó@õLjFXˆÁì\3ð8¨QortëN¸£ 2d÷"Fò¢Ùòx 0¼DYÑ{ÙèVÆÙ€.ÂqæYf\’Ü–`ã솂 ȈÝý—0´§Ú­qsΊø‚³9Q:ÌV]…$ ã)vD!»VÅgn&?ÃlÜVƒ ‰_‰Xœ·üƒZûý¼™ È`ÝÐ\ÏìÄNäíìo>(Hñ{Øͳ¥ZÑÄA+ŠÊ‹0}$Ì>ƒu|Qb3ˆÄ»çÕ¯-0{áÜdhÄÅ Ýœxs€]ÀC+8»`gDÝÆÙA6«1[á~ ›¹Y²î  °˜+7ï›âbfè,ˆ@ÿÿ7õ«~Nǰv¤nýûµ/gûjœ-AèCq>"œÿ©ŸÁë¥úþÑ‹˜† 0S[ìܸ¾[0öŒËÑgK½àA_й.XïA¸^@>×—úÒ‹\ér³·»âå3[ó¿öÕÌuí®d¤^½"6Å,~»Ì#¨î;~ -ÁÚ#Øh‡~º¸<-K; , ú.€¤Ã¡T³„¬%¼=ænßÞª‘0¢ëWq5œéŒƒ~õAÖit)ì(SÙ=¬®… 8âçÿLîQ|=b®…c¨>èƒlàyΩkœY ‘'xœ­J û6:}Ò÷š éÉåŒ!_õ‹Ï›¯`úŒ":h Bš·µùú³Ý¿JŪÂôF|å¶é–wðö=Z ëþs%ë(SX]É =`±J XtŒ¶žá?W²³U‡Úƒ7óRþ\úXaÎþãgK¥¬¿0!”¬;#³½GÉAxY¶µYO–©뿵 µÀÖID½€á#Rÿ'QBà /L'n?™¸uV»•ÌBÏs›®"‰Ä ÇÙ 2"ŽPôb–”+B!(*I4èLüp΢-^[Vô0üà-H#}íîè¾ýÿ*Ì_PBúa¦_Œp" 6,Ån·Åó>$p2âø‡7‰à‡·  ”ôF"%&t°x˜­`Œ0 E”hûõÜýºÇ‚LÈP!7Õ8[DTÑ܉yu{a“½ÊKdÜñ”1vh—™@ྸZ± 2XH”q˜)Eãž´m{¬„¦ºÌ¢ýù•™m[yœK×AEÛeÍë¯×wfÓ»xä.ˆ ¢y—m¦õöÊúÝ>‘ËH¨£»‘zà•bÚžùR‰€ e˜)q-ODÏðÊöŒç‚ÍH˜ã»oI( $›;¾–N»Ó ™‚¤ã8[5Æ@rX–¥"—ð6¡!XÈ=5ȸ•ˆ(~ž^ö¶¼¯ 4"êH7‹@4—ù|2Û>‚¢@"VˆYÈû`2Y!*N¤ïfJpȈ:b«l¸ËÎjz&§DfBIâEutE'ƒµË”ʦãªùl¿zu<ÈdèØÔ×~`>‹ »dO† £ÄUÇÙð#¢¨ìOb¦Æs‹Ë’VGÀ~qT`£L½²)H–ŒµvdõßïXˆ}Zê+bçpr¡]âD%®:ÌTök Eµúsƒ¸=ÿî3' È*!­¯¾"CŸ@ÀW{ÕèoóӜɅGÈf«?ö‰¦—ƒØçqxZ¢l‹à£äiÆÙ*œE@R¡ð>Y¹L½ºQî&£­ÕÁZ¹X(ëwæÑ6XÄ~­Ï¯˜œÈöüIÜDhõC"e³Ÿ´}G_é_é`ÆÙ*€Û¸éí`ðBH¬QA›ÚŠ¡|Öã÷¯‚rKçqZÛÆ->#;šzM&4‘b¹ô^§5˜íD«/l-Ï\)ŠPnßaYrÀUó8óÉ€»r¬°2; }%l;ÆŽ`¶¡EÉ|Ñ»?7)A$(î»ßJ°ª½$ë¿òQ´³S6Bëdè• í8[5“ Uó5¶š€ o¦ò¯#ðk§GÙR¿• ZcpS“CöB ˜ëº¨Ñ  ¹9zÚð_ÑÛ°âdÓ5·qø½9Êe–èí0S‚ÞF¬*òÕ}·ëR¼KA ŸŒçb‹X­9 »`ë ­Ù'Ï á“ñ\¥Ör{f™G|õ6C IÉ~™¨–¾b˜)Áp"T‹é1 bSû[”ɤO†sÿp^ýpøë5ÿ{š ¢FXz‹a¦j¡X„W1Iˆ td!2S“Xn«A† 7±ZèÈ©a2daýðõËúd<÷ô6â«Þü¡.Öaçƒ_„‡É,ÑÛq¶t%Z‹ÍùZ[‰Ó3=¥uÑ­ÎDo ƒ;ÌÃ2g(É#ø®mŽsx„åzµ$ò\8ÓâžüŽG¥ež%°;Ζ8…ãB>ˆªmžÌ…ãÉPï„N„_Á]îÎ6Ça”–ˆ–„Î8[5¦“ðWì†÷tƒ¼3ÑÒ/ 3%ØnDr!.‡'lolï÷ú ÏÑÞwp7Á¯6(N8Ú³_<¥ÄŒ@´t ãlàn„r¡/g5Ùf å/4À½%º±\{wmWµCÕL{Z°>‚÷2E‹ @~û·Ndh„€#H ùä»avn² s2Kxœ-€#àkþb¢kã?”Ê ð1ZèÃqM’ŸŸQÉЗð8[/pÄhÑ•°q"âê•Ñ—Ûdœ-A3"v5NÖn ½ƒ²Cßx—#ü‹áa·­f;Ó[¿(CHˆ .³UxŒ%©Á¶³çS?”áæwT'»õÚ¸#{œ&©»·z!µàÌYd þb!Úï@bHñ/?À“36™•䇺š ¡0fn²4È—x94¦T"©üÈ´%~+Ú5±Z>€Çž\žÉ,½;uõÓ5àÔU¢?~Uûõën *áÕº‡™ ;ÂÚh%ç°Åér,ª@øúîgàú×?¤;bѶ»^Áj‘gæîRn+ãÕDºÇÙzAº#ýᜲÛyB鹕ñj"ÝãlHw¶½)Á8ÕÉæãýû_Á“ ôýcs63EP&puŽJX+• G³UÀÑqkÖm`;#™CÝÆ­oD@êNøt‰FGìٻ㠙®Ó›3¹¦ð4¡Ã=:<áV,Z;Á!ìZ†§;•–š~PcôÌŽ€øu@!&È2=Ì C‚HGü0À Íž [A­eŒúÿ1c̳ÁT¹Ÿx |˜âÊô/ãlÕˆtÂŒm+lD¨,wa7½òa‚+Ó¿Œ³U@MF“ÂéQa÷ ±àÖF]æžÖ,M0…³üWã›ú-¶|(µ–è·Òsu;͘œ„‘EÇâ×WlXÆ¢‰b3%Î%bÖhR@h«ðð³J~e\›Þ%bÖ†7rÏ6[«-ȯŒkû6È h°€ãŒBˆ5ñV‚DÂcGœKD« Nð‰ÙÓµ3%çE/J`e¬š(÷(K‚qGD"ÓMê†ÎõéI™ ÔûÝq$»½i´ì)þ*#Õĸ‡™z¸# aZ(Iži|™¿Ê(5wá3zR”–…‰‡ÍV8è!”Ëp÷øvD ?œg†ñÄÛ♾RW‚Rßg«@«"š¦$\ÓÙ'º,X¼KÎ+‚ÙæV#"a™#6•Ëp÷ƒÿ$dÙr¼Sa0 ŠD”U>Ó 3% bÔVïžð<œ‹,h¯ c¿³\ Yþi;°Â_¹*AŸ‰[³UÖPYÆûßü4w½nAVeô™¸õ8[ÂsE”Bˆ"˜©þ-˜/A² ø§_ÓeÈ!ÐÎÛΪ+ø*¾*£ÏÄ­ÇÙªÛ~`jО³¡^m!6ŸØªÜ—@Ù×Znßþ¶;iíÛ/ØÜüÐ,½"b üLàzœ-®#Níj'»ÛÉÖaâ΢N°ìÇs%lÙÐæöŠPí[£ýðf‚?“qgKëˆS8á ?iÆkC”ƒ,»›.æ;EpÙÊ Ö ½R&Ì™àÏô]ÃL rqj+Ï ô¶Uðéä  ”ý‡SŒÐ2Ú’kçâ‚o¥ä~¦SgK•ˆŸ`Äàî¾Àg±ôž`,ï˜xB­-R¶ïŽ‚c»üråäÙ¦Wg«nù"àèYéÏ™¯¿â÷Jþ‡OŒ¨µáؾ“ OÇUFN€mºÄa¦[ÞN…_Êd1’—ÆÛ%È×E,ÏÑöŒ‘—¨UÀ}A‰dÓæ0ä_š°e»è/Û›$bwû¡Bh?Ê¿ û!ØiÂq¶ÄÙF¬0ëÄå³|†¤ ‡Ïïðý¿Ð @vÈ~yÙ“åu§sx™ç žøý8[âh#`Y è/ÛMm̹'i˜AýÂÓ6/…Av4°xï¡]5Ë•‰>â á³U#m dGðIØh¿È#ó|‚ÃÁfªj… x=za !iïìrˆUCAôßaöˆ„ûl0¬òrtNOX>AËÉ3¥«`uô”l6ús·Î[NÛ^O¡w8ª¹øâ{m£Qœ-añöK—mÇáíÊ ^ߟ.5£DýÃÝ# ¢4^€Ù Î1#Ø|Dâ ÁËhO¾ød²L Zÿî5"€Žî‘ƒÝ¯Æ‘Uö.ƒì燙2oӂϺô]V€‰=b(y—Qvg“‡Ù*ðùˆÆ#Šœîê÷~­0±/ÓÔyã&7D‰HÎrÒýôaôØÂ?‘ÿˆÍ£¸™APE¶.ÓwßøfªÀý#Êþ• èô\bVp™ (A¼ˆà›«iÎÇÔ:Äô•Ì(¿ï%Àþw7äe½˜·_®S)(½ŒÍÓÛŒ²$'"ø†¸´wa÷[Z­uûàj¡3Êÿ®4H°;šVúɯ„R/ót7ƒ „ÓéÁ€#;Í+Ô]FáGÚ©:VToþd¹ùìz=ït0Ãùïà}‚×ѧt·ÌÅk’‚¶Ë<Ïý8[UÇÊ/TWŽ–X&¡Ê œ_žûÕƒþ»ÿà O,í n/£ùþïÅë]÷Ùk7—Í{ 1eeáe™8ž8þ(KãÐÞÕÁVxMûí¨2{ìÿÁãE Hëm[îj"ù&tœ€ñ$òÆÙª«´xy0šºmô¶£ ¤œ ñLu*ؼØC 2qï>0´à÷Ô…ðÈnAà<1;‘°¨Òq“Èc§jR @½m‚^´þOh=ÁòßQª„­ÚÞÌ,æ æÉ£àmãóíYz¾ÀïXƒq¦·(½í¯vH} |? ™qü/jquCÚ½Œn{’š}e 3òNÌ~”%‘">o~ªç6¿€¸§ðÂÿƒm ˜:[ºYzŸ“P†‚»°f*/B‚ç-¯v%Ö´ûÅaX™ übë³ Ì:š´7hцø¿r‘»°gK*¡Ï÷Yœ+—a¡ˆEyÍ áÿØGHÝ|W¯þÖ›™EAF ìNÀ~œ­ªý&àóÎk߈ޛ5—ü¯$63†ÿdPus`¼s¶¹«Õså"y'd?ÌTÙÇMotÀ„a4Èb&‡ÀÐ ȵñKGzŒƒûÜñ¿ä„2ÿ‹ÙGTÝä|ù\ªgšSwböãle$îÃ+D¯ƒ æ÷*›à“#ŒID;†ûZIUÁù}_sëúŽŽ™Žµ»Q%B3>Od”%Áõ#Š)èåjkà¡ JUþW‡œ‘wF7Åyý}Vz¯ @3êOº`˜©‚.ˆäš~vcYÚ2L^ï ç©Â×]Î'KØewé&xú<#%*™ÙÝå8[BRDJòÉÍݲyvü¤g¦-:Å™.]ÿå ÚÜ1_s/-D¥ð t„£,‰#ŒÄ„9žÏϧ-(ÏL^üá#€¾¢ýiP^½w,Ñ”J9Ð޳U“‰NøüLß³¸3Ccâ)…q ¿fªêP ÄZ”v´õYúB4E)ÏÌ]¼ óÿsDv?ß>k\™ÊÌ Ðe 3U¸¬H •褟ÞNj9*Î3S ¥[ˆô’*‡j·ƒèTÁf Åàß;wEÔ@Т¡mlæþ™†^€„Â(KB(DúÀjŸ †Ûóû$è‚ÓŠáÝ-DÔß0aód? zÁCfb€„ C5¬˜èÛ÷D¬ë‘°+)”õq¶ —¹t@ÒlðÓ‘éL¡ˆÍ›;¡›žk±Ôaw¬ö~æè…DÓI/¢XlU°°ƒD²çcë™Ñ?ÙŽÈG cßn[¼œS®R8 ²ãlUíMÜ@{T§xœª®Ä§ %ÝØ ´™p~ñd‡À/ÆÈ¼§0 þµóÕÏ‘•À˜×Ó¯¶ö’¢¢+sAÆc˜)¡<"ÃaniÙøtVøÄ$a>3 òÎs&V G'OùuˆZËL\òfªÔ€dZâC>{òþ[ïcS63Pd<ªú—¿aáÉhM{9ÛÌ™^)š9wÆãËKxs=йåp¢¶à03wAÆcŒå‹ü†í^¡-@¿j¢ C’£(ÏB"$>ÿÏ6ñìË41Oß&^ûÞ~ryf&-Èv 3%+p%oñµÍ‹²‚²MôÇ—ì„d4«OL\»hF¸ÕLZ0IJ$*Ry3 · ÁeS–6³ï”lâ#5Ƹãuÿý¯âU…² ×1Ì”p‘Ù°”ßÀT{ÇÍéÓI+È+ìs¦eb|ÈÌ—p\ôå2Ò·»ÇÈY`ë­¼'¥}/6a ±+´ä0S9kJìLJÞ.—Šòህ y§CaañrŸÉV=÷°DrW :È1vên®Òºv¼Ûu×ìîŽXx‘?(áÈTw±óê÷¥ypŽX,˜ÝÌfÐK޳¥‘»C;þóÑ"™›™R*cìä/n²^¤ræ…eâÎk‘ ü¦bçm…ÁúZU¼Odf¬žÙVÝm_r0uA)gö†¼Ï8[U{[`yÐI‰m˜êܳðÓ™ *ÝK¤y̽ØÅ6önšWrg¢üt¦‚Üïg“~(`îïx‚ߎ—Ð>ûÜüÀਗ਼šå”PÏBòfJÜP$ƒ ¦²¹?'×iö™˜™ÇÂèÖ:28heÚ9óÐ`^ Â?g’‡~h”¥LÖ¸Tý˜+[F ¢YѶ 6(r?hˆ;X€÷ò­ ­3=TÂgËî™×Áäãž,ü0§U6:1ÖÐL‰²èÖ6*ÞÕ¼{±¨²FtAãl.(’K¶©vL /Þ]™Éh! ˜lG ³?<0Soù_ŒÝ€Övé׳EGŸñy|ÏÌ.ßÔj³f e NÑnÖ f<߀A+ʾçH[B^E® ­`'‘³Åë %à3õ$±‰b‚ÔöðÔÃIƒ‚'Š.t ­äþ #j›¦O{¸žØy®Àj¡5ba:¿ð¬‚˜âë•ÛNT”}¥–ŒíÞÃ?;SžÉt¡«HÃ3¥ }⵬†jQØN"1z…ñ3÷õ¶FžéÃ1´‹µùÍ—W/\I¬a¦–eò¡»†X{÷B Õõs¤¶ÌW»Y ’`”y€Ð_]‘’²M‰{Ê­ÏÐHíÖŠ\×3U÷ã/±…æÇHÌ|;å¬Úá¾þ`º"e~pB¾ß¼žƒúÊê+_E¦kœ­‚éŠ@ïAê·¹ šºn/4lî ãÄ}•TW$¶ÐZÉ*t¾{k¥Ê÷õeº"õaÆ€¹;Ww¨¢È|ÏË(Kâ…#±e€‡E´'¾zk¥è2÷õ‡Ü PR(píéOm,¢a­dFY±A ·0òmÆ”C›½vp^‰¨„ûƒîŠ„”·ëò–ƒÉï)1@¦¬Hv²$TW$¶Ì±ÕÁøD‘dîë¦+òQ.º¢ÐÒ j# 9@æ¬Èu³U÷T~¹-(ZVìý~uF!.Èì×\W¤£Œþw‰‡ ó>Hÿ« SV$»ÆÙ’eÈÜos£m!ú•¥ªþëËvýPRæ» ±@-â—FV´}¦­Èv 2$lWä¶>œ÷Š"gÞ½¯S5‘ÿêvu l"¤Ðlq™Cùñ]ÊÝ'Ί°ÿ0K)C‡¶ÔÁûËUsš³'þ ³V;im™6:Õ Eö!8+ÛvëAàl_Ÿ.ÅLï ¯EBl˜)q–þ²ÂÉÿMm-x»¼ „"#¦9»çº|êª†ß ±H[YØÞgâ]Ëá)¦ŠµEw:ÈPÕùKm¡)ru²úè3‘ûÕ÷ŠY‘k²ð€üÏxfºý/|}é0SUSd ­l#¶t×’§yñØ$Je¶¾žô‡l2î©­¨¥~ÛK"ÿ…Œ¢`DæÈV™m…ìŒ×=û`ƒBA-_ÄÜ9&t/.ði]ÃYþBCу3%Î1ðU˵Ĉ!db^é2§õ‡s 4Ñ™ÉÝñzáü•‰¢oeI´4E™rڟ¸X «W-@.ÄfxkSO”ÏîŠØä(KUke ¸£vìÓtw–_hÿÌ‚9ÄB²éÙ_ÝåJï7Ò+ÑR.¡-5ù ¥ë…¹"Ï?ÌTá#Áe{q›éHìšIòüÊügìËó[ZnôüTµÙ› …³FnîÄÖ':¿“ü#¬ˆ‡Œ„›1#‡Ax"”óRÎ×qÅË^Ïûå¿ãßÙ_]õ‘1)fJȵH¥=¸X[€³«§2é/dÛ¤¿ÐȹsD¹iüè×2WŸ2ž»Q–2ãaަL<´`ø@ãEûAâÍйÐþëÆ0°À¢^˜5rr£,UM£CÓ([QÚ!]¤”¦èÞi³Hl¡Þp Ý ‘áß…ü"m6ÎVA›E’ faaÔŒ»ÚN¸|!ÒòÞv®+²QŸÿg›ÈH޹68×VH Û¯ý›ƒH+r]ãl ט-ôN0m®)4™ýòõ½zýܵ؛²¢ËàcÜž« a¯È{ ³”B1¨ÒÍXur>/ºÀ@r™4@ŠkÄ.¦Boy°w˜˜)Tõ·Ã³ǮB%Ù+"ãl~d¹Ps¸mšÖ^a)¨Ÿ™0–36>ï|žÐK•uv,2X¶iû@ÚÕ½U¥_È,é±q¶ æ+] É7Žùµ«žèVE !dØ‹ôÁ€‹…Xk"+þù27Uè!~’jû²º¨¹;Üv:]]œ †Ì‘Xfªj´ýåÒÐg»0%Ÿþ^%…È|Û+»–ù/cÄvÜxÝü¬W••î sdôâãlG3;C–^i»*Þ›è"Ë·ÕÈr¤ÓŒÁ°4ÈLÏ$©@B·/¿80såV…ØNgƒ MC¦Éȯ²$üZdÓ _»&¬@{ T*º‡Ì·½ÖÙûíÀm_ʯh±B¦É\Ã1Ê”ðk‘NóùŒíllw¿d¶=åö‡Ä!`ØS+ïÜfÓ…R!etá£,<j,bzV—ñª˜!qnï [âÀ,±°ž#Oey_bÖ KF;ÊRÕªûK¦¡S±¾-ÁÕçugECæÛÞٵęߺàÁš Üø®T„ å ãlìZäÒ ÆSo‡À]–U¶}`äÀ0kçâd‰öº* M檜a¦VÓ°õÖ"æ+‘ë(gŠFÎÍœeóÍ/`ef¿ÍNä ™—ëj„bÐs$ËÐ2»“²ÙHDâDV 5úËa¦Ä_FJÍ U¯¶ Ó¸J¢h·‚89|ûE"̶Ÿ A­ß¢ú¿ÿ²¡Ê¨GeI(¶È¨¡õõÑfćýd…‚°nD`"Þ‚–¯“ßÉ®¶}š,*’-Raö«]éØ\Ø2÷â,Š„.#Ï6ÎVÕ¤˜5oÒå–Ùÿó»³!qoÓæ²…Ȇ™3œ1¥¿Ÿ/Sˆ„1#Õ6ÎVáX³Î)Z[…¹d!cÈäkûXÉžK„¾¤Ðÿ9!È3cÓŽG|Ú|"“ª"ÁÆ"vŒq©‘„ûp s{ámMVoÝW©ƒu}½‘rð³Ojì/Kî˜6:·‘% °‘šfJ¨¹ÈÄaZÛj«­ÂâCnUÉ lÝ«OÍX^nks2vD€ $ɹa¦„‹\œW9­ Å*Ð+Z†Ä×ýÁÎE á{Mb™"ï_,Ô‰f£Ófªj lšp‘åo6Š^¡*dÆ®ãóÉ1«w€ñ56;©Sè Fâm”¥‚w‹,›9¸öÈ-/³EÀK*eᾜÛ7fTY;ZøJÇæÀ” }FÊm€aÜ"¿ÞH{{ÈÉÕIªRÎW0µ³%V °3§;½Y¥™8#ã6Ì”ÀÍ_3¿×Vq·­~ù…m•J!qpïn/±bè…Ý ØátöV´‰9cÄfªÀ7#šiuÍÐËàÂA*âY!¤ê"†F'R(6Uää¼oÑdÊŽo”¥ª÷—˜C/.`̶ÁèB¨ ä]&„ÐÒº­Nn”ö·¿¾m<‹Ò'â+ ˜ˆ:«ˆÚI¹@¸o®$RI‚yô©ãl4`$ýÐÀ p°­ñI¯UÈ2/躼P¤X\wç6ãzjžÚ»>ç ól–àíÒ'8Ï$_Hú†î‚ú-¦È`&ŽÑXÇ‹PâlóžÑOg׉ïø7>½OyH`ßmI¼ˆ„¥18ОöúTh62©é{ã¸ÝCô‹‹~iFDZµ·6¤ž^d™ˆ$²2ÆŽDŠÀCõÛ}ç}ŠXC¨ÊÊCúɈÔ!š•/*#Z˃  ¡]5ÌVÁ~Dòãšy+¥/ïG-ø¡*ßÑêD Ú&³¹•(ûOEž!¼!uÃLU­Ö^D«5úæ7CByu‡J=2ùÎ &æ%Ù‚"ßRYŽŠ}†{Tv 3UõC$?4^XTË…ÔCˆÂ?hÁHÜ™;3Ïb©¯Õ¸œ« !÷H ޳U0w¨óîÏéÃê³ >2™÷Ô ‘_s=uŸö©º ¡àè‡YJEïÝ“îûå¬Hæ™×œ‘•·EY<¿VGâû|¹§¤u5GdàÐy<³ÿÚ€ŽN4ÂÒQÍ1ÐÖ3@nÝ|òd’`TK•H?Œ'ÅH‚¶V³Ã…$ƒ¯!&su¶Cïƒ%c‹Tª"Cø<ãlU]̿Ė‹¹b,U4*îrÐbA»ÑÒ…Róy›ŸâÉ˱ +¸†/b<…¸# @þ$ècšHzÍϨ‰,ÊÈ„!‰ÆQ–ªFéÀ+¢Sºym[„‰o¯Pwdêñ¦1r&¿ðÜ|>×ÿ8Bõ™.$Ñ8ÌT‡VÑöð2n»Ÿ»œU}‘ˆÇ/Í™@«¡ÛûƒGi?¡®SD™,$Ë8Ê’8ãH¢—é´ýŒf+ê á}-Ÿ”ß^æ@ð¡yu2«cRÑ„€¤ÇÙþ0²….ןm'œ—g°ªÀFñ1|ØW“¿Ñ½o#ÕM$"â(KB Fº Í@Ô61pyCsÖ_dF1³?Ì`äî,ð†¼2ÃôyY5‘Ù=Öù£,ÕÍÌ_Л™…kÀˆ_0OøN &âÎ*%Ój Ûô XT5¡äÝà8[… , *vE[‡Ã3W`QÈ\ó)¤W'ý“ÎáK FöqØš¶Ë¨¤PmE&ø˜m 3%ä`¤ќ̷­Ìîé«*0„.ì«ÞgNvÜ.ówosZ~\¡šÈÉÁq¶„Œ\ 9L{϶0ýz€J‚‘ùÂ⤴”˜¼t ðl¶3~:bë·gå„p|ô˜ÃL‰ËŒ\ )\÷„UX¼¯9«0„/d%õ[7[š‘°,Çwž—^G”(>‹ßÇEB¯…«SËE4!4 Ýê8[Ugsà ½µ™v¥Š[B)ò{FPúÐÙ/.\ÆŸÑ(<|kULdÞÎt˜©‚ Šp)ä㌄p;3pgr(ª¥›ŽÔ¥Œ¼ðg3÷²ø8ö,ëzóÕMgÂ}ÑÇs:wºÒ¬ÅPR’pœ­‚ÏŒì%*¨›¹¬UR{¯ ¢´CÎ?øÌH:"1XÈ6›Y^9%‚ !&Éh޳UõZ YÁBßlø.¼Ùý°˜|ø¿B ’é½?ȼÀ·YµMpñ–-Î>]T™“#™7ÌTÕ¥¨;oëÇÁñžÕk¹×Ùý¾ `z¶u´KÅÝ›Sš-ÜX[ɇº6USDÅ…{á¶jÎ1±‚ÆÚàæ•ŽR×JT‘™CÊ1ÆÙ*8ÇH1¢§ü&@ႯJ¡‘YÈ/é™Aô~ÏL<¶ÕY€¬¦ÈÜ!q A†ÄµGöÐÀ@ÎÔÃó²¨Ðe$†±Š@bøé*ŒDùõy27sЕ^µ™$¡8ΖP…‘D= t¾-Ã기ª2„<|G°™¡ß%±ÌŒK…”B?R…ãlUmßDÛ7ø¿ÍðQÞz­º aµ]yï¡éZáýÛa”ôKÌœ¥ÇÁLö@ÂÜѹ±S5oÿÒ{hÞÞñöùU…l#3€ˆ4'g$] 6Ì|ÝM]…(-„·£Fc˜©Â]F~ý«H7+,×§5ª62ø¥ü"-g^Ž÷úa q„I!¶HÔ9¿a¦„ô‹úª6s?í™=­.D™|×`$Zîósƒ±óO…B˜;r~ÃL å >s˜¼ôb»wJW*±EæßìDËÙÆ:XÊÙVÞ- ¡îè/ÇÙªº¬ËuäÍݰ߼•µR[$&¶ÏËÀiûôzƒ}Þ€òxå÷ló¯OF¥ƒsÁDl‘‘Š‘øÃt¬‰˜kûÝë%™¤´b”¥º‡û‡CD÷fIi[á~½´ˆ-„füƒUŒ¼Ÿ9HCk,D´$´¬H$”$©8ÌTÁ)F =Ü·U­mN?`*·È4ããj#£‡,'ÛÚ:\Qˆ$„õc˜fJÀíH~8öÀ«g·ªÊ-2{ØÕà}æ0Ë"Òyh¨^yÃÊ>qšJ$„ñ££fJm$­<µËNm#4_°pò¡¨-„„ãli 6£38 ©=öÑ;žEo!,Ü—s‹ÄÁùÒ­1RiD"ÏèÒGYÖ-rl€Ëf¢§¦Ss¸,I,„†ë–®ê>0òbŸ§?_|!=!ºáÎèÇÙ*Έg‚!"`µmO§X–XäùN»%bÌö–uÌ¡>ž}šŸŠ„<£\bœ­FŽ$›9ÁÙñômóU.™ˆË‘æ‡Pû¡½Pî8’`H! ¢zÈÔÃ÷;…ó ü¶(¨¶½I´B±ÑôŒüô#1›üÌî»n'ºgÛs&¾-ã³VÑCFto8ÎÖbúÏýxaM )Pç#gBˆöÅl¡[²ŽK±+eD¦úÈ3Up„‘Dg+Ñz €”5Â~½{dñf>Œà¦nZ¥ÂôyÊ>Ì–„‘4¿¢ó{»Ÿ;• ¥E¦ 1 -ž:ð”ýFR~è>´`ûðº'l«†B(Aª/†™’ê"0‡¨goævÛÌ“R¨1„]|­2ÛgŽ{ºÐÛùÌdT…‚¤‡™*B@ä !àÄìÖÍàΗވC¸Å?ؾHÉ}xYþÕ¿kVGdÎŽdß C×™=´Qc«o÷z{ $B‹Lþ5ÑÉÌÆ?wø(Š}íÊQGdÆŽs˜©“ŽÄžyLÎÛj«àÝÈ•Î"“_YEäã>œ46%#~¢â¡ì(«fJ<æ³gОuó)§V‘…êû×?B&.Ï‹åK “&V›tBבçeIh¾Hê}(–8ìuÚÀ+‚zYa!¼_åyö#dž5±ñãi€(D™‡c8fJ¼È×Y<¾1w -Âár¾B]‘9=ú¨À×aºH´æ>0äDó¨}z=2¬3÷‰'o±/޳h9ÇBª#ê4¾HéÁÄàÙ÷:VfQ—ýB:‘Y>>á0S…Žd „C_ÃLÁ…J02[øN&úÎàÄvŒ´aüž*œPŠ’‹q¶ GüËZÞÕÉuçæ+4Â~ ÂÈá!Õß0™(Ù«]‘NdžUÃ0SBF>Ð<±õ±!{soÒ`_Ø×²èÎô÷Ná'mW’ÖTÝDføÈ 3%ž8踙§¯kïÍV F& 8²»âHÞÙ¶ê€JËl8 7+"„Þs=Ç Kâˆ# ˆÂh¥toõn…¸BˆÂwZ0whBÆøL˜!¡ b!÷(“g«  ˆ¾ë…˜üÊùb…nBxÂwV0wæ¼æã÷çÃÇZZ!÷H ޳Uð‚´6ÏÔ×­óÓW”'üƒ‘º3ÏeÓUÌ—5S~ý³Š2½Gbp¤-ó‰·5³œáž6ûV²<Ú¨è,#Yh¼ˆ#CÞ:ù˜›B`‘E_ò K¾}Á_î Ó ¯„Þ|É 9„ð{$‡™Eäî¬lêò˜éEY!ü^4ý¸Ê̶Ž^üÞŸËÉBÑB(#GÅ8[—÷ËÜ¡;zæ6XnÇFTW!ìÞ\^dÛ>¼‚„R¢R!äHå 3U0y‘·CPž˜*´%¾èEU‘©½?ˆ¼Hµa ßlÈH~ÊT ‘Ù8òxÃLÎ2²v˜ê8Qdø\e˜eJì}=fdÚÌ3m°‡5iÀi‚aãèåÆÙ*x¼HÛ ±¬c3iÀB-…¨+„ÙsÛé:ÖD¼ó¼ŒnªvnŽÞp˜)2#néSM ˜\gçƒT8!ئcl4Ïj¾Äl-GŸ(«Ê‰D2MŒh±Uì6Wãd8?7B‹ûMDŸq2÷J(º?O!ˆ24â8[Ecv ÍÿÚ[åe÷fUÑVdZñ1Ò|±2ózÞi¢[«ô™ $‰8ÎVá#ehÞPr.¸¸BZ!¬â;‡˜È¾… »'”ó3?"Ê!„¤g«à#uˆ¶ë‹èIJ95¡ÚŠL/:e¼ŒŸLÛ3Ûgü_û>í@í ¥}„2‚äÇÙ*¨·@´}Ø Œ¸ùvÊ# ­E"ãˆû^+w©žGAånϺĠٿ´ÛéAˆ>*NU²‘ŸgK‘xv31Y–<Ëš aìÞ%‰C³òJ_¶nL¢*!EfÐH½³U@žà´"jr…•pà(‹2}gß"AuÏÊp4íd¬ …htžÃLì[äÚ,+ аÙ/§H2„{gßEæ¥jïözŸÕ‘F£c!Ü农fòiI‹à´²4>BÏ1iUºÄØq‘v‹±ø¸¶•~ÕjÆùß~¶Zi犖ٴM¹Ÿ«_œX)-¢£ûÕq¶^¸ÄH÷™d– ý`sìA5B’Je©‘7Dë«ïó>tZÕB-~‰ÄÀõYžu¯þò[«UA‘ù@”a¦„H Ì¡yvVõxd×YˆCØE_ÏBS‘(¿ß¦ê~Un¡È´ éÄa¦¤‰ì!êÛ›¡¾þãµmª²Hã;˜(?ÛWçFèdº)·We„°‚”T 3¥¾*чŠ3¡Ro)/(D™bdAùCdÜ®á$ÛW¨U~‘—Ãü¬…HÂéIw¡ îŽ_cœ-ä•€ÙUϬ ú~¨@tcᄂ‡#¬ªÙºðr0Òw&¬ÀìÆæ 7F˜Ji‘>rƒÃLÔ`¤mÛÂwqèGE´!lá—ŒôF‹Äê¯Å“ QZÃçŠÂQ¦Ä³&Рà ›°=òÎÕ+d,ì bÔ}¤ïÐA=ÓÙ¯§ß§b‹LñÑ«3%Ü`d­híOO! ݆°…pƒ¿ûUú5ûTùˆØ"S|Ä|f0ò€h²vü`Þ|ì”h6„*ôÐ|äÞ¥àÓiR‘’rœ®•'¬¬Ý™ÑbD½ÆŸ”c$¡ÈÜØ•q9_ (2qHÊqœ­‚r £wfÓÁv@6‹12ùJ8&JÐHÂI›Kô¬XõJÒ»Ž³U¸×È/bTÁÆö‡yáX¼J9ȯw´ j\žy½¼¸S…P‡t¯ãl ÉÃ'߃˜»˜TÔÂ/ö­š³퇆ê‰?±û‹}Rm–Q5HÆ8[B*F ѼlóH3¼ìäùz¡ÉÈ4£›F?\%›ˆ?ÐÙ;SÎîk %EæÉ*3UŠ‘DDtñ©Û³Å,t™hd–KW,rù} Ë3Š­ #?hQ¿f“ÑÌÏ <\‡H¿9ÌTá6#Õˆî‰úP«(y›Mod6ò{Œô ÕS÷Ê/º/žê¨ÞB(D*5†™*¸ÇÈ4÷xùЂéòÈ$Ú a#¿Ücd-*Ûßoïp:.Ç{Ts! "«q¶ î10æ9—}é-C`ò]H32é¶Ó=ª‰4ÈÉšm9­"‚ŠÌ ’zeI\fä1æú&2…ÃLDa¤?¼ß¨˜ï;UndÞÐaÆ¡™>ž9²x蘆wß­ ºÙ1­Ú‹ÌôËg«ðœ‘4VåØ ¦­O²Ï2! Ÿ€‘:ópw| o(ä™Ä#ý7ΖÀ£ µ-e5:ƒ$|«rCðRÇãmhžw:É µªŒgR•B-ž·‹0"Õ‡ÆØÍ­Ã[UJ!t ]ï8[VæZaiŸ}Ze  ­02õ§-ÄBýµª22±ø©>Û¦÷Í Öme"¥:<â8[‘iCtr7Äeà«*#‹ÎÏÿ.X}O$ìúðä‚jŒ| P«!úH'É“b‹L’lfªp¾?¼!¶âí‰êì×ÿ¨hC¸EG’ Îà§Å!6|ºMÚ7n 9›É¥J-”¤g«àè"#‡žÒ…E]Û{¨n#³v_Ž.òh†ÐæRw¿*£®IÐ8["Àˆ¤œAhßíegÑ!4‘ddâîFdÒа¼2ͼz'©¨(„l£þb˜©É^>ôŒ§ñ ¤Va¦T9FÆAßåÄ¥A4áÆ¹ù¾ýéBE‘ø6BÃLN3Òr®Él!¡­ÃÕg&‹C¨»ŽþþPpÏ<ù@’¡@Z@©›ï6UQF—8ÐTdaêäÈœÛÛ¹K.2nè’ÆÔ϶Z§ÏÞI¢ ¡ä¸T«.øpe[ËGîûÆ[Màç&{‚Ýî)\\"!2Š(µx|æ(S%O˜¨<ãön”Òs;¬ª¢²ò‹A† ÏAèn­¢¶×ä·ÎUrŒÌ~={¤ñ00ì¶Œœ££‘a‰ˆ"3}ôë£, C Aôbáj¥öÈ;G{¨#s†]zQŒÆ<ú¯ç4XZÂõ‘%gKhÂH ~¨¬¸N®Âäó+²#ó†…S'“¸<ßW ¢òæBpÕPßGžp˜)a /øáu46Üä²›ÚMcwøÎç%ÆÍˆkÈËdÁÿ*e„r¬á†™*|u$ïØI ¶îêÓt+E ø*:ÏÅ‘q3çµÀ%5ï…=Y𠌹¼1v /²v衾íÙÇ`E5!¬“ÈVXòagYæÄ¦û~vdã¼ ÅëµïBPÂØ±hk2¯B¬›R¯¤¾)û #ýgî´½ã4l âŒDö7óÓM”.Svæíû-˜gWh'2­GBpœ-ñ§‘þC¯æ_¶eðZ±bEøŠngÎîÃÛØO¿ˆ€(2­G:ÊRv§‰úC)†ÐÖ`‚Z¯Pbdr†[>óŸ^„´(X¦¥jDj§ýôpµ: “ÅQ¯Q{j'#ÉE&zJ1ÇÕ¢Ÿ" â8[… Ct{ã‚äÍÐ@Èl 1F&‹¸íÝÞ‘æ3Á„•¼öÝ SñÁ"¡ÈT 9Äq¶ 1R† µ8´‹“DT9F¦¿,bäùPå²7aº&eXˆ(2H7:ΖF«Dúœ[<·W¡…"CxE·½÷:¼ã‚¿LŸÁ‚6 Âéè–*(„ $8Ì”ðˆ‘54OkKaù»—-´B,vG»üëŸv§8º™¾™gœšªvbüÏú e©¼fªP^DÒS®V@Q-n¸#P-†‹EñÚ9ÂÀã!®”g9'úÒˆÄõ‘#e©à#%ˆˆþ«0S⣋L¾“„‰ÇûNþ;l¢%é"ÈTÝá(K…3ü!ÑʽaQpe0ÅY`¡¤a½Ë‰gq·9¬É0äs÷X•HÑG·8ÎVABм"/j‹À;P µEæ Ÿ`8<ƒúZ¶ª¥eòœë*ª¡ù臙?9<æZx=àV¿`¦PXdžï=EäÝÐÍØl=¾Î4gY„Ps$õ†™*æ3ÿ2xkïÛ<)V ÐWd–ïR/Òn¶³Ú2£¯éZ]Ó)’aæÈé 3U8Â@à¡­Ûl³«š‰©|"“|Þö ˆ¹Ó'Ñ™¼“z‘w³È|ϼ7Î:páÏD!Ô£,”^äð°ÁÔ´¥š™'f……|Né]T‡_^N°ï:n~Muû¥ýlo‰½*#”˜#<=ÎVá0#´âÂ|°ë©ITe‘9¾¯¨"’nݯœdÖÎÚäS³4Bˆ9Š*ÆÙQE9-ï†y Vr¤Š¨, J8.€ãv‹OÛuÞÄz ‘……ïÎ7²wh§ÞmÛJ—<ˆ,"|dGYjá §Ý>û(Ê@šóµMç«ÀŒ3 ,„*|'wÙÏAÉÔuùDYUE¿Gß;ÎVA F"šI<][ÏpUa!\áÌ` ïÌr† Epi*y~lÞ@Kü¸º}&¦ñ #ÓÖìÀ¢5£Æ.S’T‘E \-Jóóš6w¾¾3‡y îvåàD÷¨ªˆLñ¹óf«`Ý"Ɇ†Ò}‡~UXçœ(øs»oå<¨säÏ\-Šë§ÚNäœZ•NÇÆa˜)açg~uAωb%ÂÈ|Ý»ä"h½™y'—²nB6rsƒ µ—»™Ó³O¯¹"² ºC|}|i!Áô³¨iJNü-¤Ú¶e˜‡ªnB86’m%ºùÝ¡g›ß\J"íÌûΗG¡­Æ¥Fæõþ`ñÑöñ›¾P¦ŸÎ”ªº"sqÌFY*ž;ô<–#¶EX]­ aõh|š,úðóôæW»„¬Îê<Ó´LduÚ—Ç/—ò‰_…ÅŒ2U2Ž4š°Å`°_ËJSªžÈÔ!ECÌTmÙ_vÑ΋M2ák¢+‰€|²îDõaª’;›ÂALUU™ ô(2ÈÒôoz°#}ÍN]«ëv '2=/Q"RŒ)óÈ·ºšIDBBúÛ©½#+ˆ>-gÈ,U%†0‡Œãl çÆÀ>ΙËY¨:„…|…Â3/ˆÇ,Þ~¥]!ÆÈÜ!IÇa¦DÄ8F+wl„‰œ¡éÈ$ä;-˜ˆ;KGö˜m³ÁëëE‡!Ü}ÿ0SU;v Ñ½ð€œS¿[%™&¬<*%‘·3g8y®¿x¯Sc(±G Ç CU3v`ÿ|θÍw¾ú´Ò,ëzðëV#]‡º–7KM÷섪,„Ò£cg« õg€b IÈ– ø…×1†°ƒnzEéÍOÞÞ•9;CÃÁëÙÛ#TA!¼µãl‰ üŸU¢¼ª­ÂéÄo!ÆÈáŒ`díloµÿFê÷¤UŠÌì‘g+Üv50Yl¥Ñù¯#µ‰9Œ‘YCÒÃL ÝÉEŒt<@’\}À¦ 1„|w²‰´=fS1áO¿\äB’mfª`#»hγ¹)›dË0ûä*bdòoL” búÎ2«¹£“êIÑOdÚº‹a¦ªæì@/¢;{g¦a§€wÙˆC(ÈJxá\b ü0 ì1{o{!ŸÈ” £ä C“yïU·kU®cöÒ®Ðbnщ•Ŷ?úÿý¿ÂõA»ÙoîW;¸Lá Eæ f³µØÍDÃN§Ë8c\¢t™]4p `[¬ÉÓÑldòK7FB}ד7ž}ð­ -2iHºqœ-é Œ¤ 2n\†~A^!ÚȼáN3PyhP=™AÏ3§Ç¨ÎBØ>úÌQ–ìRcô^ÏôæÈ ZdW›-\Þ¯¢ÄþÁFïÙ)8ÏçÁ}[è,„ë#=ÎVÕu8At]/È»wk•f#±†ŽÕ!Çé$‰*Æ|>ãñÝç#íÁܬ7ÛYTj‘A/>Yªš®AÛ‹Í"´´=IŸJ6„5t(ÁÕ>=ÛŒ¤yÂÝ7cs]6žé)¹ùÇÿ»&oMŒ !¸“gßWߣeé²ÇÙ*øÇÈ6š3nÇ©ÛÞK«B’ÉÇ'úÏ@¸ST¿· ¡_ÆÙõG„G-X'Oâûµ¢É*E$¦>Ôi­]§…˜©KƒüŽ…ë¾ï.QوЛñIÇùF4|o¼T ¹¬Ý;¾“Ò#S’ˆŒ±Óþp5öÙÇe†sdg­Åðpíù]¢‚‘Lmº» ÈÜÍÊ;Òw ÿ—3ÉNwFBJ§Å]¸•$“–¤;ÇÙ*D -!EJ¶Y Ý½Á<ëJ„Ú|'2Ûè%góI*É„$Å£,Uã¶Dã8ß¶ +y'QŒ±é¾Û€Ÿ™FR¸šy7n"÷X=I¸aœ­ÂEZ]¯Zæ®¶ ”ƒˆdD˜Ã¯ƒdfŽ]þ7‡T‘ ?ºçQ–„Ó ž¡}÷MDi÷û -Hfù|-«‘ø‘uC/õÄœ~îY~Vp/GFo”¥štK¼˜½ùÒ_Ä?^Ϩ áÎè®ÆÙÚí‡eCç4nß,|R¾-" !âxˆl¼åi#Àü~Ê §¾OýLæZœÄØŒ€EæØHβÄ/ÇÏÞ\˜8o®¶“½}'Ï‹R#“uoÔ\æÏŒQÛáipÞÄ‹8T¹86ª*¨¹HÄåq9Td. ‡,‚ˆ\Ý—™‹Ü™×’@s¦‰t– „^c5ÌTÎ|Ùz2n[D÷Ë}³`!3uÝ´ÞQ‰34=ÿaØÁÎò¯Òdn¬Ü0SBÊEsè/ë§§Ö¢Vš®ÐÄ®ÄImf»ª-ö G¶ôT"1jâ„q¶ ºíK®¡–ñìv;]°’-dfíñ†‘ÝB}ÅÌkj'ˇº«Ø 3` ÃÃLÞ02e„ßn>¶«²p!“i}kõÐ1µÀn¡Ïµýdñ«Tg ¹³q¶€N¯ÍKÀ“]'i$>ÍÚÁÝ ¡¢B5+„'£ÿe©`Î"ORbÆÕ–ׯÛÕ*-B¢Òž˜‘¸-C™ø5pN9R5ÂÑc޳%Î0ðd˜:Èë¹®õv—ê„K{w†‘Þ²5‘«0›wíªh@(0RgÃL7ŸF¦ÌŠ”ËQ‘¶Ò¬QD d‚‰À`8"1¹…LÉ)¶D‚ùÌ‘w;úÖ-¤B”Ñg޳UuAÿjh‚nÈÖêô&Q)çVì™NžE‚ SÞI#ônÑ$.ÈÑìQ– ò,rehoÞýäœÞ4¬2áÓh|†Vkþjµ"W†Ö¤ Óð,EaФ¿ðiN ìóÓ†ÒSô8.&«¸÷ýZ:—¥ ʃ¹èa˜­Â)G¾Ìˆ€ …a[æç–j8§ö4DŽ 2vä<ü ¡~‘%FAÃ8["hˆ€²™ùÜp%Ḉ¿\¼Ï¼¶û£Bö*ô4Ÿ¶øhÔåñª_H$¹¯Q–ìš`ÛÈöég%ž{¾¶0`‘×ÍgIBˆÌ‡ù›óZ‰yB»ÜDŒâœúˆx‘/;E>sœ­‚׊4–¥ çIw³>·n%)„]®h U*ÉÛ£šÃÿg^ (Òa­D¼”*̤|?5Ø—ÆÂ(ù ÉŠe®”çü¬½M®$KŽ¥9ÏUø ^뿪­ æÃB²š<›tïhá95%Õ'%HàfÄ wº™ª…üI)ʲÔEl˜©*IÌVíÊ¢2{ tÃEÍD’ÍÜÅcÙòçé3“ŒåÁl³²¹…xWU#d©ËE²a¶²ä‹cs±<”êóVz•3´¨n¾üÞvY”?ˆºöÓÒ¢à…)ÁìW¿óLJDcP6Ζ؀[ŸÍa}pmUý‘,é`T⌚FcÆmؽ϶҆¬Ö½2$ùÌ[©÷gûuµ(!Il”æFYz‘æ¢xf+éƒñYð‰lÙÒzØ(ͳUµD5-Ñ+_õ⮦*.HŠ]UIÀ£'ªcX*Ÿ€i=€ègt’ƒ .2¨lè‡>áþ–O¿’4׈÷¢gEõÊÞœ—ô^K¿RXÅú$pý­(:a€Õ†1óor‚JìI˜bD5Ì”~é$`y3¾³w¨z}Ö¸Üôü§ÿ¸ûmŸšZdO‹ QÍåW‹Ä.º=À0S/²S†Ð¿½yÖ̃¶ÒسxDÙiœ-Qç£Ìd)€¥'öºX.'r}”¡ÞU§¤ ÙYp‚ìà™Ås‘ލº3UhNAaòŽX°tË}½F#ëðY„ú‹äE!SËy'Âa¯ç`#«êçY8¢ä4ÎVᬢÀ„¾Ø“t´e# A½Èñ"Bý$§( !«á!5-»ßU¨:ºHG ÇÙ*D§(1™Ãšÿâ«ËͪÊgŠ"ùuPâ±YÕœ¹lst÷û7vb‘ýtÕ#t¼.NGOEm‰Ÿ`˜)‰£Ä„ºÊ—5_½J…{Q¡ŠHa]]ò ²­¿öì΃Á+ÓHÛE:¢è4ÎV!:E‰Égá[âì*”û Añô‰ëÂVJóäWï­c1Ó²›ÓÝù;¦Di…u«êUÒ—P%·ÙiÛäé“sU¹ ŠQý8[…ÿZöŒÇ›ËLùCŠDÍú‹v&Sé÷•ptrl§º½ªPT¯†™ªgƒZeÚ}^{ ~¥µ–¨¢åÚêùÓ.Mïͳ?‘ ¯(Lß äv›…ü,DѲT¸â WA ÷èØ¶ù¾dI‹,n'Ô^ž~åí¦%% Ðéðùc“70©¾/ju®q¶D犲¦™£´l3EÀ'òI€H_qÀQŒB3ìOj“ûßD»WÁŠxœ­ânÎ l™ÿQY±‘õ6Z-ÈâW²u+ÊLaÛÅFä{‘¢È!ÇÙZv 逓?ýlçC=õ,J[è¿]¹&f‹z‘¿ ÖÑ=l¤ŒÙ?/¸R·U?kVt°£,U¸AÚBîÇ4ÏíòQYUÁ@V¿h{™, °Ÿ½{%([²Ü ‘ÑÆ{…è‹úÅõ)5FàŸ>Tó"µw¼aÁò§E&,Qš•—Øÿ´påì¼¢Ò «Z^¢0ʤ®?c/¿š-hË/ÀQ ûòºZTïß÷Ieƒhh?Å,jZ–âšr,Ê&Ò¢!ë^TÌÆÙõ1ŒiŸ-skßzóÁžEmCÒÐþRÈõ*ï–œá“'ÈZ5-2 3…».ñÃ}{Ü'¶—ö…ÁõÜç7KaƒˆcŽxÃyo™½}\M䃟ßÁ’¤eÈÂ~TS¨qi-BV½¨— 3UÈeQ3×>]æ/ñ°¨àkaCÔÏŠõâ WR¡0Z·ÀZtKß+µ"TQŽfJó‡$ha-nŒ‚ì›rP°6ˆèåŒóç­qE "áÇN£“ÆÜû¥zUªîØy­B½Šb•¹M›mbæÝ\EåA–³~Š;Ò»Ãç% þ7*8ŠPæ­Èý:·yw€–ˆNųb˜)©Eˆð!nsnOfòM¨Õ ¢Sõ¨˜Áµ#´Æî˜òùøq¯Õ"/±Î`˜©,yZ8Õ¾R§¦¿©_Ͻßõ«¨0¡håà ‘Çæ¿êþ¢BQ¿g«ê„ jZa9ÔJž;ZC ŠÖ_¬ 3arÚLÀj7ÐQ‰\Ÿ¥(*X£,VP¬ÌZ¶i¿™œCºVµ^œJT¬Ðx¢ï½%i¤Â*’gQë§aE™ C‘.ÞâÖÂpVZ©®-RÔ8[Â:#Ùô–$ÈݳgZ…Hžéç‹>Ô0óEmÓBaiÁ!Ç–‹d.‚™›.»WƒŠ°øáÝí“W%©Ò-B¨a¦j…,iX¶´èráø”Uê‹Nfœ­~;nÞ8€2fÞEOÆS5³“Ë8 œÐÆ®ÕEYû‹”Õ®/ogš!ê\NTO‚•´Q–êþÕ‡n†’Š2ùäZT!­'iQ’u‰YY–…ü¬ÇŒ…Z€ã)šGEvêí€C«ä2©Z¦ |ßOk/ž™^”¯ŸK‹J2LŒiÖo_(ÔuQ«èÒÆÙ*OTµ -ì¶í‹/>‡P¥ú,|eÛS¦ž…&´¡¬Ÿ‡ÏÄ=)QÄ\cì”3ë@ööïZÒº…¶.ZUùq¶ŠfЇ¦„nЙˆµ‰Q˜2}Öœ:Þ,”¡¨Ý|9HÁýÚ…^•×Eßñz¯a¶ªnЧdëàÀí›U\SVP­>kE•2äÊ|To0bùÕaÃÄ—ª¾® •ùq¶ m(*Ah½Ìåmwéy¥ÕgµÈa:ú"OïŽdú’ôtÚ(Ë?»_dK§èí¢ñÐu3U¨CQ 2”raZèf¡,óY•îE0rãçê »³” ÓàÐ?˜wí¶ƒÿ­”t‘r?Œ3•N´/KÊ?ÞµÜ3/Ò”ë  aÌõáÏéôä¶Ðî³vô¥(j9++„›ãI"‚»ê=TŠÆÙR¥( CàÏœ¯jƒ€øà“x/ÊÑ»N””;P•Žm›üÖ^ÊEí¡#gKi|p¢"ˆha÷°BE÷¬ ýEŠ* FJ_˜zfŸð T­<+9dÃLU=¢OÅÇVÚ±ö§€E•w•„\Ân°Ÿýv» ÷XØÖâT×ýf²cgI觘zÍ>—9j(Üð;ºÿøÓî<#§|j4š[ÌÐ~Ñ¥ßJ¨O2ŽCÊ1†²t奠X°½&¤”¢ØdŽÊœö¢6Ï܊€$ýäŸ(Р÷t2úja‰³¨ô"âPþh 2`Oo>Iصlý’æ©Áƒ·g³ú|­ ÙèÝ·'!MªžýÝCº -?‹=<½ÆÙ²™Çð÷\ÜNè ÃÍ&,øÕ½R…£"6t(j4Þ=dïÊŽ³QƇ‚î(K…°Å‹3Úû°Of;0ÿ­´þ,|æ¬ýÔh0ûä/¬°˜ªêóYÇ¡4ÌT!E¹}¬‚›ó3ù˜,ûErÏŽZ?þ<9C) 4¨w²#{sû.øª:ž%úÕa–RÍëC…Ù0Ó«c}ÊAÆáÛ¢¤ëpOUèì"ýüjPgP8Ž) ‡ñö(ª6. Ýê0S"ýD¡Çü%WÕvö!9•̞ŠwU=ª3Ð?œðó™z¿hã¢àPúf*+,ö®Û‚?À½g/ÝZD˜Á¦ª~Ï Õ \Žþê¼N§=*i‹žó®Þ$‰ÅgO/ŽÏx»“HÑ¢ÂÐ… 3U¸°(× MsEÖz^=O)Tí,é¼(8A¯AûØJŒrù…wY&AçÞóIa±èÔÎ*†Þ¸©TE[Ñ`玲”EMKæ•@t9™ºTÎ0Ê.F®<ó³Ç2±SQTb‘f¢Ïyt*¥ÄÛ 7‹-Lã,ڬ늘Ⅼƒ,åxÏE˜(“`À·÷‹´§ÌTN}0Ï{cÝ}ˇïKúòâ 9`ñæñÍÌì½Òœ³.QDzÝ%GÀ÷Þc±7–¨N¬Z]ò8[íüÙ1v¿¥¿° Ì#O'O{LB É9+"‚Z`§· 5@¸ñWBqV¨D 3U(ÌQxøò>n+?9û±WhÎ"N¼kI/0?jb¼¦ …R,šÑ8[ʋڃ­¶uµ<÷ì2DvNÚDÖ˜Í{y‡_ à쨮-î¥4"Õ&EÁ«tZú´Õ}'{Ëúõ‹…0åÃbýÍ8Œ©æ+âÄOŠˆrÁ—ŵþ²t:$B­H Ô"ÆÙ·¥ ±Âqºؼà?˹"OüE¼‚Á¯ 7¿óŪ+¢‚kLÃlÕ=&IFørv/B/ÜJBiV(OŒ°RµÖ­u87£þÞI§jkÖ**e»ë¢x`’è2á|¶0„LEÒ,0P™fªê® :VÁÉXÛÄi×R³Ü*ZÅ‹2„óëL¦kI:q±J”Q¬øIA>0OdáðA`ãä*U“Ä@§6Ì’'“åñÖÏÏRšHtªà*é -«mª%lTÄ­ÂùŒ-7?6^Ðb†ÛÆáÏþ¨ô˜EJ–ãLY)ß„óu¾ËaBL‘À;L*ƒ-$KWn{æB*ZŠA2ÎVáU¢fò¡Ó¾[{µó¦gTtw½3I˜%Û7þÌÉJ¶Ì2Šq¶ªŽµ G emfTd;È[ÖD͚ŋ_‰ZšvzšvÞrü“ˆ YŽ`îðHʵäòâá6qAf­Ñ‚ÉÃë졘D#Ê ˜[òòeL’ŧY!Â+ÇY*d†(*`€«Å}íÉ]ýnEÕI“îÐMþÜ?ïî´  ¥ì"7¶÷àWÚe}3kT†™ªJ’l…ʽ72¨¼)’…Ñq¶´ï, è=³§noÌg®R©ˆý9¡y¼ýtˆh¿|-‰²ª¯OŸB^h”¢Ð5 3%âf$<è*@T}øÈ†,wfôâu¢$ê÷Ý2òöt½M·;E¶èëâóçþÙ6øDT,œÿ|؆Õâ”Í…HÑ*ƒê@­b¡{e­ÀÔS¼ä…SàT¦=ç8[*}%ݳV[+í…­=%VÉ3kïbD’ ìj[÷B|¸ðÀV R:a¦ªµ ¦s÷ÌB¤QÅðUÇÙ*DÕ€ëÑ%æÊÇzˆÎš€¾{`ä=üyóûHÙáÜ£6»AInáB¿L$Þýä(S,Þj;‰°™¶>7ªB3¯ÿÑùÈÏÍ[£¼Áìœê—ÂØÝ­³%t>²x X¹)¶>JeNáõï6ñs[ZóDÂuNXGªKfÀî….c MåÝ%~n¯¾Eî¨ä·ž}V ‰l˜;åÆ!fª®§ÀáÑõ´sûm«Š«ø(¬þÌ'xŽ)¤ÔÈ Þr¨³è…Âש43Uµ=溞k[z×SV…Ô3gä-íûç<bzܰû\?LÅ PPþ;¸Oh2}(ÆÕÓ Uõ¿Sg«jU ˜­J@›%h½ªD˜Qþ ¸˜ÞAsØöí;~ÕUÖ3Êÿù@Å¿Ì%Ìגʨ¬—©9]À+EÿQàêÈ)fxkëì ƒÙOÐ{7ÍIÿKh@ 0¼÷ yºˆò‰B«`NÔ>ÎVͽ ÇäÒs¢}›ª—q9Aû CÚ~˜:ÚfHýÆy¸R^æîeïE‘ƒ#ø¼e[\Oú£ò›°r wãlÕ­E¦ŽÖ"'^†Êxˆ”'ÜýeÿG¦n”Ý>§¹’µÏó)¤¼ÌÝ™$FÎF2͇×Ù^¼î?;)‹¼¤l²ãxÿXåõ{Im¤Î¸j˜©‚˜G>nîcÚ‘•®> W…;!ènzïàuê³±2ÔF'–ÎcÇ«Ü&à›È|œ­j¹6V î…Á.çUÕÛ„}“š³¥Ô<1r“+)ì/šõN8z§æÏ¦§ælc¸ãŠgoKži— n¿É¼†™ª[~ŒÜ[~pÔ­N©*í.st—9$ǾÙ0âvÚ}²|="Õ HçÝ6\«°ÝØÝxq]!íe"N–>ÌTÕžÀ9Ú“Xcp.»S«J(pÝsðnÔY#a!™ €Ú^jû6 Ë6áÉN ,% Ävj}ىşÖwFÀ2º'\æµ¶¥âwd‘Nس;©Q¦’ãçÓÎ4l¹/ZÌò^äØ_N‡œñ¨Öî\DðÖý“÷"{XWÛD>[ªÒ Ÿ¦¾7ÎVFs86;p<žçüðîn$ùË xØûõ¯›$ä)l¦«cçQGšŒQÆ9ŽÝ-Ã^Ç„8“S³UõÅ4mgˆ±,¸¹—¤‰Ð&øºÇ¾E_Làɦ„­œ­;}Èy¡eæLX=ÌT!ª4¦˜¥-PEQ‘-±ë©*rbÛЯÕ:ûxŠB¼–ü“ªÜE1Ýæu kŸ)"z“`ùÃL‰P9±ù¾Óaäï&@‘®„%»º~X wØìíÓéÖCoW¬=ä¬È€Ñp1ü?6ÿ†I—HL9k¡´C|ÀM"Àhã\-"FçSeU§%¦Ÿc§`Ë‘$£Õ„eÎç|xmN!qeÚü¶鯩P;JÑK•ïnKº”b¢åa¦ªV“À‘m˜¸}ò9ø2È— æ?1²qÁmfVÑ‚º‰^A5§Œšdù‡€mË\/Ô¾ñ„HE"¡ÄwmËÿ¹™‚-”Œ¡w IíÛnÿ°ÊV´&¡Í]YÚþÜ?Ÿ $?ú‹®âàÑ‹ÉT @Lii˜©eéí­·ƒ‹Õú» j* "¦´4ÈP&ˉ#£%¯¿½+ThMšßÉrb¿>·÷¢÷(²Ð‡„sû³Uµ„ŽŒ–Õþâf=%‡ëH¢5eÖÌ8†»_^àqÚš9ï1Ê z6 ÇîG´Q ¯³ˆÄ¤½šß.—=Û)ÅÙ¥ª[ ·¾I¢Â LðoûÇä2޾‹šS¢ÆÄÍCÌÎ$€å/kcí–‚ú¤Ÿ¬\%ôì†×õÏý“Å£‹ƒc 曑«Ø$¸˜2Õ8[/2UDÁöâ/ lz¿·,êMŠ)S 24éä©”-ì<6Öä[ÏûÉDºè\!f†S #øD[ZÛð+÷L!7 *¦P5ÎVÝò@ÊÞbwžÖâÅÛ‡Dºììî$ eth¦Ž>ÅP…ªLù9w¥öówGÁƒ({}+‚ùÕY¸r4ÎŽ˜ï"XfÌÿïÿüå –}ù³[Ë ‡Û‰,%¨˜f˜©‚Gî‹êVÏ‚§Ý§R‰Ä%h˜™×qд$jãí9¿º[岟ž«Gº‹ÖŽÝ’"ÌFa¥§*WB€ÉŽÇÙ*©pæ¶vÀµÜ™ÜúùÇWd®írq3U\ÀW.z©w:¨v@²©³ÐÀ2%îîèÑ'âL8a[4K/ö—@$fêT¢\e´K¢8ÌTÕ'0úDàŽ7sSÌèTJÌ÷é. >\Ñì}Q*€ þu¹k¶|ž?/ïD 82’KØ·mö±Ûª+ebK_1Ê’ÐÈ:ìÐø|¸\¦Óg Uæ!µJ8ؼ]ý‡÷4y¹A¡PedìRrˆ81aò´} ¬±³Å»Dèõa8ìYUÒSfÁ¤ÈÃLÙ#‡|Ÿ7Z®¿ÄL„DRRÖKB<ÌTbÿˆ#V˰+Ïû¢Û‚$'Ö‹: \ÇÒ¶øáÍý* ¦¨4ÎVA’#7FƒÆ„êÂãsõPVu¦Ì–;`ˆmû'OzŒ)+=-šßÙå`÷ªod˜u&Á¸/»,2ZÌl91½â02Kp&ÂŒpܵdÕ{½pËëºò’½BLúÊàaœ­Z`IlÅûûžÚ*§9UKþ$5cçšF´ ?xp+Nk/'™Dð'Áé8[U7@@¥èX¬Ž¯-¥•{®\§VðÔ»"ßİ0º Éö køE(É ”Ë0S<¨ôËŽ+¹;Zóñv€¤¹$˜ú‚N#(ýòr‡í¤e€PÑE`êFÊiÑù‚c§ýæêe"•dJ?6Ì”~íHL1©`GEËñ™=ªRÕE¨*?¦æk÷ƨê{:ò(á_½„ÿIBQu¿1~°4!&ºñîBëZâ[Ý$rNâÑ1vZ<¼±¤ýã#K–£´»ÿæN 2BÀÔN›KmGio»¨4š U vÓj„œ¬u8ˆï5w…®’A(3Ÿq¶êZÿ0…87ÙÊiObã¶-4ªäVDh°¬Jâ †iEªj³÷ãíëQE¥EÀ«cÖ;‹ýì³ÓŸPÕö>;¨áú½nWД€W.«°ˆP½|qúÛåÚ¶¬Nèüùù]Zø£¥_^øø±[3Ûg?éD²¢ÊÇ9ÌTb#x5Ö"»öÈzDxãªj@‰Íƒš—C×qÉ”¶d݃‘X)þÚ‡Ù@˸f—m²´#<•aœ­Ìo ü¼¢p_zÞüÂkRÅÊ>ÑŒbŤˆüUêJ¬4ÊRAkã@< gÒ^üÄïWÈIÂo ZÛû"Pµ¨zÃʰ㕙X! t%®g«êPt{1ûf&w^A¢rR&¸Î×t•˜þ²NaÊZ’Ü[Ö@ö>ÿÃ;e¢ÎÁÅ·áDæÏæ\z’WÈ6³šûf­yt "äd8ë Ð K…׉ u˜h€oOêô®•„ÆÞQc䞌1õÇ÷o¥È¥d(Õ…Ž!BO[ ûAÿd¼‰œ-ë1ÂE]eªŽ+Ê[\~…VDÕ™È<1nk²µ‚ᇅ+ÂE¹ÇÇÙʇ4‚™å YµRÌ +ŽTÌ·U¹ß2M–rÄxG€jïg¶1fkëJM’r2a}‘M€Ã7À]­œ *‰ð ⺈b¿oÞc‹‚‚‡ˆ"‚kï}˜ð©åÜûÎŽÛ>×W5 !¬7†™ÊÁ—K–üùÓˆK $bVLÚxoSn•¥ A±\ÃLÕ7Vó©íhAÆ{]Ìï B`,Õq¶ª:ùmQ'Ïâ‡ÕÂcO‰Ü!\—Ï4rO{¦0Ûfr*n‘22}A¡|MMf÷¢0PÉ™ŒòH™^ºÒN6?Sb¤|Kƒ hú©¿›lŠ:ª =@ ´}§9ÓfÔVGïÁþqOœ‡`ÙÄ-„²¯=±¶ö´ E‰âßÊÏÁD¹ðÂ}a5×l)ED('µ”q¶j>&|êŒá(¶Sxç´Š"‚9韇™ªJÑŸ4Ôü«gÇá •¼’‰é;MӘ慎©e&Ëp(|ye9ÊÆz™ð’^µ)&fmÔAwªUè!B:©¤Œ³U0ÒHDQµÜžÖä[¡­djJ¾hç—±¯¶kZìÊ’ UR„ˆ:ÿ´8p¿oŽŒ°ÓŽþæ½l”Ðq'­Êzˆ:6Â#àÏ®U&ü‘¤e¿/ŠªÔ–Œ( ÆÙ*QD™H)WŒT;ú@*ßdÚ馟ƒS.¯ó l%è;GOî“sYÕ\„?ºz1ÌV攘‹túå*Ÿçå*J.#^´EùñkYæûtÊêŒH_Þƒ,éͨiíÚúÞQ¾¬ç”{Uq„evr©“U[ÄL%NÔœ¬É{S•áÌ=ÇÙªÊÞ§„Ƈ ›¯—k|YÅ–I Q¥½±Ùcž}ã-p…B“h¦»#\ŠÕ5{]z$‡ùŸ˜<µïÛҋɳ¦‘á¢k!ƒ,P2"HKz?¶x7kBðK×EJùc’Orˆ¢ðÙ‡3ïý¢c'„.2ƒeé…JFpˆ†‡“ƒZ"Ë¢;d¶È54Ê’qF*ãçM#D«#yÉd)‹_ —ua†¤ãlUà¢üä.lïp¡7AТê¶×›æÌ²§˜i#±¶Œoò˳Ö;ÙÿéÐ[ìó`ð¡&o–W½"2Ê—ÏÈ mã,—‘ÈíØ> F ¼Ÿ±â"F·ÌÔqî]-/X>‘@"ÄA†Êm“éïζ-+MæÚV&Ÿ 9Ö8[/¥ÏßAæ==ä½N*nψppœ­æ¼g[ÌÍ£½Ë(B"¬ï”IXÜ}]·{û‘#)£~bE8ΓqÂv8A.F|A+¢BÚvÎvËt_¹ã ι1ŠÃqð“‡øsþì¸èG(1³`b0°m^1¦t_(æ¯Ð:òÂ/G\"*é½ý’¦xË™ƒlevo•%<Û|w|UåØôÙ[\fÊªíŸ ùËL^Y )â0S/1‚>ó©Õ¾åÔ» „É ä²gk®â_dœÔò‹:£EÛŠ÷ á9¸ŠBߪ¡¬Cm¼mƒmæˆ×JÈ4Çʼnêó»G%“û·ë8\÷š´~êõÒQ6Þ÷(–~8dXæÝ~³ì>ZU¾@:K£LMq(Ä=pŸ‘Û°E †r­Óž2¤7ETæ•ÃÃlÕ-0[ž"Ï~EaAÈ3+ãRe)'ѨžY9gw¢ð¼±þÈ#*C š h|8J0·à4òq¶ªÒހݼ´ l9ïáã‚Ì3šóöt"³û©„ÝHÙ"SóaTøÍB O¨º@·—͘J²&æG-îCÿ¯ìöƒl‘„¡Òuá8Ê;EPΜhÓ¦Q–^ètaö@{)ôì-”Y`ùô8[5fK ÝahŒª”‹KW8³À2êq¶ZˆwðP´íÂqa!ág—Á)BÜoKn±àß l RózØ.Þôø¡—óU0:#µú)—mŒ¶ [ËäEö£„ý6FS–Šm@"¬8ºD1²À+n¶ì¢!4{®>¢! )a \ ärö³`òë6+»âKfꥄ5b)(p+ŸàѦ²fAW¤Ôãl;#Ò+”°ÎLç³7 dÖœ—WÿXàƒQôܧEά0ë…*ÇLÌNUkšØ¹uYWp札9õH˰NJáèíSî}‚ƒ¢æ€½~T9‚(ˆ÷¸û°¹EvʆVyë8[óòÄ~ÝMIšxkÜO° Õƒ±õrðY(žå•©£Le$ކò‹_»½ÛGŽZçÈ´¾yt¢Žrù‡a…àcÁ^ÏÃLH92h{Hä7rò‡0fh/*B'¼ˆÓÏý¾Í™…J9*x'KÒ•G“Íf!ƒ`©”3¨tãu"NF?Ü»sÏW4k*å_ÿ®¸þµÏ>¢¼ܺΠ,Ê#¹z=ÊyîýóøM'~PÔ¦O¼‰®“‹¶È8YHO,ý?·UÓŸDh¾ì8Ÿã³(gˆÃu5ÊRÑW *B'î´éò vNgä¢iÈ&íxû¬¢oÇć²m†gh´¸ì@"1¿;÷«ÂÑÀzàävÌ/°¥ÔkÀ3œD‡9ÌTU6°ÊFO´Íöÿ±®sg²Ää?R#ÔÕm¸ìØN|žâ‚¹,9G:­˜‘?þaëG؃rÔî†hèV ‚œ÷þ8[|äÉÑÌuk~…åB£….¹í‹E½x'€”b"š8p÷à=Y8ÂcC^Ö9ÌVÜ'ôãÓW.Ì„ùøu@Ê…qi 3U"£u0»‚Ó¦uñ„OÑmæ>$FÃLÕ•<ä•1LK¿êT °$†÷Át5>mK™i†:tóÃLM± ¹“žcPò‡Â0¬] *ÀÆ«G™ªNÄ,¯r²vï9JCÅp[ 3U•dƒ²@FIvؼ,0ƒÕLuÞʱAY .ln¶ú Ú¨Ó˜sç¹6/Þ/Q|¥tÅ1«y lf–H6§,Ìö#4B¶Yçí«ì½îL¬€%>§|“kâF_N·›°¡Ww J„-ùK8W/àq’aE^30d{®™ÌV€kXãl%^ˆ‹³ÊVw¬ÿdjP ù:LìÝ.>šÿäeä˜Èºa'Öþ¶uÜ‹3ܺD.5ÎV] ÙÑ—P«ßg×+·¼Äso”%åRi ¢DÒ°¶XSà̩ފŸ05‡+J+9áôÁ¿l¼Ü`µy¯^‹œtN>ŒÈ#/mXFû €'¤ãPxÆÏkeø–˗ÜZð°[íý´‚`ÃðÛŒ³¥®!¯Èës+È«•¿ ,ñê¾a¶R †øæò)rûïz¢» ó’/cã`lnc½GÄL+H…˜©— ¼LÐ%¨ÙN¹Í©¡Ð UN‡™ª+ð~ìåËI4våÀnûÑ1i"©‚g˜55Opy©QtÂ*ܘѠ² ½íß;¼áUh«`œ^é ²‹Ý\u³½÷9›‚03¡ùÕ¾Eòå°MÜëaóÆÙO(¨Q€ QË8[/2€LÒ\]ô™èkÌ0ÄË(Y*e„hÕ^­î·­?iBF9ÎVUÕ€‰WµmØË}·YÆ–™© D^bÐUN› ©òеL.#Ry(—XÞpµmvá5Ë‘•H Rq~÷ᣳF ÜG¢÷»\' Ë'L'µ_0W„(¨ÄKÚF˜ÉÑ"¦Ù#D_ÄÑ»«“0É——\ž~÷¶OKÈøPH ßð0S/%o’ ‘z¢oï*O¤<޳…ÀƬM6þš…Ì1‘{«y ÈÎÁöfèüê–‚e˜eäúe1o³tøÁXàÊLe^0L„.¶½fˆÍøîC$ b™ÁÌ£êíIJP¨¶RÚìS„2 Kñ’·1†¦r\¢$lÃEù‹-]~IAŒJR¼˜m˜­—j¶J Á°ÿs²cKW£°B˜a¦Â$äbÞnGës[J÷¥a"3–ñ‚¬oÒ:79¶8ŠGõžK`ÆÞ„]|j‡ÐÝ^’ñ¥²z»˜Æy3'ê3æ‹`¤¢5‰§xgŸÇY=íÊgGI› ²ˆq¶Z˜Ä»pÛníScúR¤Šw3mà è€eÃÅ4­Œ§ "'ì,oœ­º´êÁ(iy›M\ܽ;ó½Ì1åEFaQ^‹¨lvÖÞÂ6Ž]Pt—0Æ ·ˆ”Ã>8hömû‡U„ î2Èøqº8@ Ó†–Ëë¦ Ð&lˆnˆ™zO$€‡9S‘97¯CÊ–‰_Ë(KËbÏzkqYK/ ±[¿’B ¡1@?é„y˜S/'.XœPR¼q¶ªB¨ÀP…Öë¶Òæ~íŒp=ayž`}3—ñÖ}¾r=<ÜgÈëF½ró£|[~®Hot øuðe-(íbJQ<*¦žØÁ,_L>è å2Vðb©Vz³1„"<‘øŒ8ƒ¯9[Øvm‡&Ädø}¸}ÂööÚ_Á§l»dó[‚2³4áWÃl½à¾À«y¶Ñûe•Ù › ífª€a –j1ú×–ãáù¬‚=ÁõÎMt} gw óvØŒú`¼p½ˆ¾ìTÅÆj>~ó Y2é4á " ÎMØáËðâ´š2&2áaÚÄÛî×®·Æ-â÷ûð‚‘“ÝH†‰bŽ¿½ìlãØÚ]ìVˆý:„˜Ð„— °’p†OœÄÁÞ÷ŠxQœw¹fv¨T‚8c˜©èY—j-<žšƒbÛŠ°CÁ „ŽÃLå#9ÐáòáöqF\‹¨›¸«[Xá`RQeF/@#à T‡]Hû÷õžÖ§2#¦˜*ØžmOmùP¨pbŽÜó§iöÀ |9©Í.Yk¯÷°%t'¤€AÕ3Ì<îŸNbþ¥•ÔævÙâ”± ¯veêív–þÃÃzLºžœ|_9A^æ;ÌVU–(ê’ÁïËΈJ!œ€„är#ÌÜ5mg1ÏS'yÏ q‹¸Áòâ¶ø­ŠÁ¶&M)‡ HâŽcöïu=ÇÁØe󺞌ž„xÐ(Se—“ûÚmgÛ)io~f‘¦ 'ô(ãlµ==#¶³Œe箪ˆ]¡>ˆ->é„![1G}» ÷.0¬Kr|?lž ÓmðÈËd|¾EÎò½þ~uöÏYЮL˜œ~”µò ïýºšB¶gú1‹£9D83*ÉǽÎf! ?8cO?¬«jÔüÖîÛ¼XI1õ‰OÅ’¤ük˜©º_0$ÚhòÃÉýÓè7ù œÊÉ8Óøq¶ªbœ´{1ŽÉ»µ¯;ÄЕ{/5ßî1j\” „pv ì`8Þ×ÙìÁÅL „θ±jï÷Ò+4HðÉbjoçÇd±E3½ú!'ÔL’ÿ_ªsr”Û¬LòZTy0ªËÈ+¤íÜc̬Vc»Ù½Ù§W0$8û2$'Ú˜üE÷Xþ™‡I.Nœ3ÌÔK³`L´k­ÞÆÔÕ¦‚tI2Î,~œ­ª'$íæ3-Xµ­4ãzó+j&‰=ÑHÌÑQŽƒ~ømŸ>>z£Àf)IÛc’nÇ—ÒÙ§Ók¸ –ù—´=$é¶®'ô§ÀÁœ„X’I"ÿ+ ‰1*t°ÙwËwø•’åäÙ%¢A–ê¬;eÆèa;áçLgm§Ð-ÍžùòÇÙÊ ›)Ãö7ƒÂË~Ř@›ã´©XöÁÚd¯œR°œd3`f*çÝi1¡ h÷ż¹˜«4M2ñvÓlÔú°gŸ;þ[Ð4MÅ™~Å4ÛÒ/^\½ÙqÄ/¬øLRqßu˜§ÊŸ—U 2úÆ9õìÁ¥b¦˜C{YÔ3;K5 éðR™¶ûÁ¤•¥Å;£ƒû=¡r’íÕ8£LÕø*åÏè»^üì¸þaY›@(I±¹:†™ªÊaB&ŽÇ°!è³NkŸ•yRNÖ™êÅÜ©Þé–NRÒ'Iþ|{÷”Õ¢6ceÙV‹)P‘Ì÷®óek»¿:të…¡1{ «Ræ‹Ò¼ñEÛV~ã­PÉŽù°ÇÙÊ17ZxߟÌÞêb Û™‘ßnV­É3³À<9‰~¡:!‡tÊÏfûÓØ•ó¤@½†:)ÏÆt )ÙL1ãååæÉ¹øì„Ô×ë<¦g€[𙜻1ÈRµøRJ –…õ‹åÁŠmE3’ö2lgkY1½uÿt¡=Qž—µóR/À°cu»®{¸M8’»òË„\Ò’Ë ¯a³€£4ÍH¾É@&‚,[ÿÎÛÜi†S„|•3ûÌWï V¦’S>¯§diª®GÉÙÑP ’œÎ1c§ä+1I³´íƒË[š;ùÝPJI$‘#_g«à+1áC™Äf—ïµ¥æÅÒ \$'äÄ|ëÊG§D’F!BsÎ~±Ë81²•˜8¢Þâƒ;w·ÎÝ•¶HnùÂVbâÊK-Z¹ÅgÚ"¹¥¿öt³CJHv\RfÀ‰ˆÆú9¹|i„|ÏKŽgc%ûâÒ a$'ôRa¶êšô”ð¡çñƒ½Ï­PÌ"I!ÓÉq¶^MÌå0>b1Ç Å º”³H¾GB3ÎVUh– -&ÌçØÎ“³µ f#éã[¡EH ¿Ü`{;žµgˆ£é#LKgfXÄ(‰ôag§ÒvŸí5Û!ç~7ÉhG’LO)Ÿwg÷i!óô ÌyÜOKì„$S”œúe*ó „&;>Üóo©eÌþPÏð¡Kméõì­;‰´h†è¥Ãl½Ð—ÿZ¯V¹Ú¥ËCe(’#2hfª*Mx¦’žX\X‚3㊙ätÓ‘Á‡ñäÝz!°Æ„%ò󸛥h>l@ÁJNJ)hL¡à[/^ãE½^ ä‘^ 0ÌTÌÎÐí·ÁÒç|ô…=ƒ¹­—d….óíÍžÿ-È‚d‘DÃL«&æ‘(øð7–©ú³JBRM®š‰Óâì²6¿­3fï\614§á«r³á~Ùaf’$­z»-õXyÓñí½”?Hnw‹ýƒlÕ‹"¥nÉ´òöÄ>.¾` ’Þ9f«™šœïnÞJ™‚Þw%ÿ‘BÉG,ÝÞåÄx½’,¾D´1~µUß–’ES6è,•T¤÷…KÄ´*ôjµ›ÛÖ<”€ ÉLï5˜²À/;»)T-}ð…ð…œ)ºÚ?Ê”­=0Rþôñ™ B‚˜í…7ã~mrA$[d*?ÈP%?5(ãVq¹Ù'TÁ•?äTŽypÌ®ì-·PÒþbo+®øCJÀ^R¢˜Ù y"ºÙl@ O[h%IzˆÙ!iùòÛ"®O¿DXƒ$6ÎLÚšpEY v/ÒðÄrUÿtYO™ F„|PŽ:õÚÍŒ%$õá[dèÿúŸsóÎÿûÿk|jÿÇZ9SâF¿“í©ý7'rx¯èÍnÿ¹–Ù Qðßÿ›5„HxúŸhÏÏzˆo ý¿óßøïÿúýf·q¯?{e4&)ûKÜ3oxøgšñ³_R.lL²s¯êáPºÓ.KöªžæsÁ~‡™Ÿ½9)¥Fò…ýìXX1•dÝîG™šR‡÷æå!Y,ñVÍ>àN)•&Ô^.1ÌVŸS²l‹­=»+âsŒÚQI>Íu;ÌT±ºcÞmOáàÕ j’´ûŽ3bü¥ËÆô?Ëù:„žH®ìšþ(SY8ùé€öÃáL21ÕãÙ” Ûnonr7¯}½] KΖ g†™J5c¼wÞ&®lÖ~å-ÑG“Æ4ìV°RJŽº“9³äx³O¦=9m©»Ù7žý‡áõzz³­‚ÉЙ?[ ¨Ö;ÜIº¿QÊ«Q#pøtd?l•ÙHêíå£LÕ+,eÕ_vb œnÎk@pŒ$ÞÜÍÃL±tM6Û?޹㗷B‚Ÿ£€·×o«”ú“x—¥Ÿù2¦ì¾"gO ëä”ú¡ë‡×åxÄø“3'%1’»°?ÊTÖƒNïS ‰®}ó&Òþî-Ò b$v€>ÌV¥Û?²f ,Ý–ón©È°F2k¯Õ}†tvV¶pÈæ`Û• {H¨£CÔwúAömsè† БÀì…ß„<=¢³¬›Ý(ÆF9:’ê{˜‘F§<õr§­(|îÝ•©-KªÿCC1õþ• ´ßÌtá‘ôÜËFÚZÙpyg; ßúK]×?÷ÏƒÍ )ýÆÓ?y÷âŒíß Å™ý CšàÄñwn·õÑyG†J9á©^ÊS0“WÌØèµ¦HIR_y+Ú ñ³€±ý—“, =}ROÁr$üw¤;È_<öô-õöÚ_@!]&kÂh$ð÷òúq¶lƒNvî½H8á«¢ÁÉiUŒþÑ€µÙê·Àiú:ËQ–ŠÄ"¦ö¬zèß1 y$‘ÖTƒ¡HL#0 ôĆ˜{^‘6çLƒ'î²ú N¿Õ8Å¿86f–™ 1Ó]q˜‚dŒšE0ýhËîõºl¯ð~ׯ$þïs”RŠàeh»½ïær& 9µ”,‚ùÇ0S•8ò…/‡Ø!¶E>|6š¥æœ‚QKÌP»Úsn¶. ¢Yªäá7„ë_Þ’ðÙ_°¼HsËÒ»ð;ÊTŽql#ó‰µG}öÑï5úŽA=Æ­íüE¿6¾È,%ðç6g*¥ 8º&š¶ Ó»-|ÿ9šV¿Ð:eu^ñÌ"›1`{³O²¹†œVÉ4¸¸Bñõöd‹Ç¬¯’‰…ä§’jø: Á˶јc÷l#ì±½ÿiéÍé÷$†¼œJ+?cõk² ÄÝ;·¬]2ÈüCòSÉHn9zŒ©ÈùÍ1Ù@n¤mkiém/9A•|„Ïz˜)@û{]ZÌÇú›²HÑæºYé’ïü˜êJúÃÄ6f_:\`¨§ä’ëæd£N-R"aesœXÛ>Œ´ˆSs®‘EçûÿÃÛNõB!M}%=xŠÎL›Ü?ÛƒDºÃz´ r2b¿½&g¾úûûf«Èâãwí¹½«öò½Á»Ê£c*Qg1¼‚*1aúßj*Ž{òœçŒŽÊýp‡Ž¾œ\ï×›ãô"ÅÔe›¶·½ºÖP¤l9î¾ÅÏ1¦*)„ç‘NÐk¿Q³? áýY…ðó†¬w[ûÜMþ$€÷Ò4¯Â[.âaý,BÔ qè´0aÖá“øsº&‘9½q¶&´ÚÜ?O¿õ2†ÔX†Ùník]ÖÄño•‰IØÍnœ­"ÈŽQµKn– ­çÎÕU¤uy{ÀÂjL[ѺžÐþ­Ò: ½I\ M¡ÂØåSíILN—4“èÕQ¦r4ÒGy…ÈÛÁSœý&’fåèÕåÙQ¦Ð×jÅÕBIR””н :!Ê… ƒ6¹Ínèäî(²± 3®ŠQ®ÅUr²™®DŽY¤b)fåJÌ“¿~A•­øsî÷.åäÓOÝë1dà?Ufˆ;œço;À‰%b“ ú¡;…0÷Ë©àx-\Ú<¡ÊIW…]wfj÷«?çâ=Â)IÍKaêóús,loñ¼lË6osôå'™™ÄËtãli¤Þ’ëS¦®‡W çôMï7m(ö°ÚG²‹ÇÚÊôÞlMé$`¾£ŒGLñeÏÈs;{¯ûMx”ÑkCC” ™æÂT†ó7©ù•DÂ.Ó ³e >E¶˜„6S5>—./J:%Ñ/Æ0Súc|8mÿ³½Â½÷.jú”b¯ ô—×ÐYÔd£2pûE‘>I°}?À±¢‡sµÿ´[ÍÇîPJ$¨åGfª’ B°ˆPYÂf-ß D‹#”ôlö~ÎÓkŸ9Æ CZ6wW¿¨,Gü»9Þgë$wÁÔ(N„MùOQ^tùà¼Y„ß?EË—x*jÌ/CÃq¶–½}?lðÉï_L‘ù ¤Ñ":å°U·µß,¨Y†F”ôŒ1ZÄsç}n›dL>øÿ3º³ß­8ç7ÕF~ ÿ²T®)¶ÃiïÒ^×ì±½Fûþññ 3U±ødzÊzí˪H$Hd b@Œ™™QI½Mð139o8ñ‡ÆÐÍB€æO¬¢Ã +rPc»_$ƒ-Äk›lç%ólÍ"$sê>ÊÒnuûv¥õ1{r ò}›&´•¢, Ù”)¸v8hš!‘ßß8[E ÷x9îL4×uë·I*"QÝJü" %Ö‰Ã=ìúŽÛ?ç@/†uhBB­ôf7ì9 Î9ŒD~þÕîV)áb”"ŽÓo?·Å“(ï%sº>ÒÖi{…Ý\R)Yè‡qŽÆÏÁÃrŸJ8/_Ï0SúŒÑ˜½û·dlÍør,â÷±ýâ³B}yÎÔö .oäËj³Ÿ 3U¸¿ÃIÅ„‹L#ç•ÎïæÕCÈÔ¡zf¶T»™š}~ˆ„»÷ÝN4…XfŽ£ëÛ7ö^Í"F•0Œ°w -«Íºì™ZU wPŒI_ÚLbtƒæm¤[ËCüAKX¬C –ÔãàÉ»CTÉ91;t½þüø™ý#ûhí°DQÐ|8È€W£¤ÃLÙèSt·ô”iÖ'VŒ],²m¯ÚtMîãì‰u%¾a”<ÌTÅOC~ºÛ;ÞÖéÓïjˆXB%¬1‚à;k·µß½š#â'ÕQQŠl¡Ùu+-ûHõ¤qÒƒÊ>#³Õ¾°iéÖWwøØ¸"çàÆ™ìC9üÀ,t·Õê÷ÄK$,Ê`K)Ú@üq jûø * œó[z=ù¦ÚܽJäc"n<ØÖ´e#TVRðX‡@)àñ ,Ö˜¼úˆº"`Ï1Q€ú¸‚~ÏXˆQÌçÚÅÐ6tF¥ì¿E`,aŒ£ÑQ¦pÄÇàÛ…ìᙿó[$È•ÅÓ›Q¦Šð4Æ1d ê?–ÏÑ —$`•XÇŸàF€¾Î½Z;‚¿°/†(˜l€_éð{VÊ>ùÑïº/%9 é †™‚Ç‹§Ø—èY†rµ î….哎øe”¥å°=h‘ÂçÓ¯Œ'þ ûˆ§&Z…-ër×+æè(Ÿ¬þí‹|úvŠØ/ºx‡ð!»œ¶{/oT•ANLƒ µo÷akýyÌ;Â’e¡ )?Èá˜Ó¤¿ö¢®Å“Ù?§//Í/î®ñäÀEN_?Ÿ÷+Ÿ‰ÁzØ¡ÁËóœx þGšm6¹+KÏê|˜ü€ôÉ8é¾¡Ïæsô M~›'ï0KÉÏ DõÃ\xÚ9;§Båœp7zòˆîWW¤ör*„3#V$€sÇ}•[ËçÄïTˆžÛ¾OKhÚ©¸m!->¨6Ÿ½É»óë ³doÉÊɯ£w$ïÔ]üy\옾´= mÑž—ºžàêï¹0ZhŽô<údžäDùÎùþ4I¯'úé/ÇSØ¥6õùãiv:-Õ—ó9 ´õ9á£w[büJiù½¤/Á#‚¶-Øå³õÒGæZœ&]Qt<æÉ­ÏÆÃgê"itvö'[ÜdrÝÙҢͫó) ‘®t -›¼aõ†öy9]!=ÔÚc$ßé­ú&©Ì-° w‰ÿq¯^C³ñV”ëÃÛÄ‚ ûeÑ#‚ÝîJîçͤôD¯ÉÜcœ­¶Eí8nOõìׯþâq£E Ö‚â“y;<_.Χäƒ{Ô¤'ßéý«ø×ö)¬ºõ¯¿“.z<(ƧÅXû±lž˜Éq%N‘«m˜©§-ˆ½O›·KP«Ââvür@«U_™ÎqÏZK^7>8}”©´—ãGMq@Šv%ßÏß{~ÚqîuÑ&ÉŸv¯Tfûð‹pþ‹­ÎÝR/ŸÆ•O#õ\¥ãlå'øeãœÍyö^äÕÖ1s£Ù,¦GÁ+I6\͇3p÷ûòÂß{q?ÑÙ|Yój÷©Í«×¯WN=9$w¦á³å3‘ä˜`[¿Ämï©îülÇéÄ˳kûÑÛxm¿éþGûB¬jP—.‰Ÿœ-T&î6ðz¹3´¸Ë^ÞFt]_1Mm³ÿÅÁ†âÞèÍò]êÉw¡úåY›U"ïôy{ªóléêŠkWµžÞÇûÛc¹oš.qPôl£,%g”¿X<@ä6[ÐáFòÙ!îé>ì-·µ\ša~XÖîŠ0?¯³{Äà,¾¬dne·*i:ÄìÈÅгd©ÙÌÚ~Þy”6 7î±Æ~ÙŒè£ûßßCàε§„‹Ù|º_6^œy;¥µv-C‰›ÝÍ^Iö p/ÚÏï’¶"]*ã'‹‹|rzúvþãÊûŰ]í0ýê‡dMÉžêü©wêý:A~˸IǾµãmbÜ$þH¶Êa¦$ÁÌß§³ͦ®œ>iFœVÞWÜE;ÿŽÂ_OÊÂúák8ðÿ »ßÌò\ëÓ¼bÀ@œÊ~#o~†Q†–ce—óç.´‰«ŒÇ´ ñäÚ/ǿƭæ§ÌáâJ^Õ È¬Äßþž˜H¦¹Î ÁÙï,`ðÖŠðÈKˆßh%Ú6 ÝEÙ¢òÖï@Õ6–}JñÛùNÛm˜‰ýìÂCØWx-¸ f³n×ÙËòº“½ç eC©ÖÞÂyu&rXæ¼Ì¿i€ÇŠU†Ÿ6Ï–¤+¬Fh"Þ-õ#Ù[y½pO4e•_æÅûÕ0ù!ßwÔ¶•L®Ä¿ýu|_N*²»vaê 䄯:¾*¾ 8*óÖHâg².™üÂ<ìB¯@ûœ=óŸœ Ù?Õ±{}g~ÜüXûÑUÐlœß9~Cgtæ m¶±G—ëbãaM³¶›T¹âgàN;e†^l£·×þéãïzµ”½iqê]¸ ÒÈþÖ¾{§…Êv|Û°¬ƒ”ß*î>ög–£yòÙþ©å7hÊú5mÙÏ.v¬ ºõ[Aµ_¬LbšÏÁéÞ~ã'b[,S¯ŽmyÌ=hªxñ Ã~¬ÆÑæù0Ü ¤³ý‘{Fž‰Ê·V¿ã‡—bš¥½÷ˆëÓoàõaÆ/ŒO‰1£‡õ‰³¹„ÿŒËÅc±a¶ŠÇ¾2Ôi_uß–­'ªò|óS©ŸÁuZ¹Á±Ý×€ ×ê´ýù‡W—EX97Ýc¨øö᫹«³ƒÈÆ¥ì$À“qæÝ†n{¹ºø­²s³¿xö¿íãö ¾YZqõ½.¤øªý#Ú¨‘­üëÔ<õãüç*WƒÇÁ£LU»2¬oI´ŽÇm^ºü˜Ÿ­¬¬7Çô|Û^„Žwk¤hZ7D^¾åG™*Aüz_˜öÒmázÜÁÓíÏxN(ËËQÁ½tà•0ýwßîû‹=š–×ËŽlgó~ñëS 2 cS"l:‚;¸¼·âò K…\Õ¢_WóêcY.6×ï÷¢Œ[+m½ß—G[ûù{?ÅŽ÷ƒfÛcíw·wÁ[¯OÎû«h”¥bÅ]ƒ=ŠÂùöåûUô²¬tg½<¸ØqX~pØÔ‚}î–‚GÐýpW‹Ž1U¤“ñûb\@¥ö0Oò`»"ÒÞù­ÀÈÖr4öŠÄf”üØî–ä©òþz;àÂ’wHb3ImxÓÂW£&m ?âF™*6TØ>0½€Û¯Ÿ×¨.&l±—¯¿LXô¸ÉûtŸlÞ<&WqŸŠOH~£ûQ–Š/ÏqZûza^;Öãn³V¶¥û¥è_¢ zÙOy×{Éù¾Úof/´Þ>ëÁ]?÷0ªä¼xm”©ê` îŠI¥Í^pÅEwXö2/>%l{³m£lðM¼˜X£ºOF™*|Jü~Þüh— ®-ÀÙiœå±fûªŒ§‚PsÞÛãì³Åi«“yyœq×#Ç•—ÇÚG$ž6;†;½cªXRу HÁ5ªí¬|œ•£Í^Æ_‡ s½Ôý¹ù½ˆ±}ÞÃnlØÏ¸ß˜¤Eö’=†ÙªÂÖàM·BÖÜ×Îw«S'{Ü—§` íƒ;ˆ.ŽŠì'=vdI\Kt$_öRÛF8JÅ#¾æå|a¹3>4/ƒ<Û¥+X0Pœé,¹K‡Ù*®Ž3Fáš½ˆ.{y?|‚ Çáƒké›­^_ÉÙÍW»kÍþÐÝf•l«¿ Ÿ"¡Gr<¶Ûu×ò 1—ŽÃ3-«]žHïyŒf·ïÛ0[ʼnϨæ3´Åu¿¼ð#£z„¼<„èÒ‘Gì¶$óºkOnÂѧ^ße×Q¦¤ª0 »³Õ·pô #õÍÇGý¢CG±™j 7ᾜ{âô=!eJŽ‹p8ôÖ/ûºV7Á£1Ÿr~¼e#ÁŸ#È]1?Þ>õõ;x¡—]¾§#£LU% Á£RÎ=Û¹úŒ«ùÚéýÎ>l;»|/,eJ^SôN¨œñâÚ_öüSŽùì¿^ÎÊx! ¿¸Ã[¸qzk¦øð|N½¬æš&q»½ŽÅ–ÝG鹟bƒ¾ÆÙ*žB<ñIâ²ÜÃNž5zðË©è%}ãl¥'ïuœóÊ Ê[ÂçÓƒÛŸ™îЉtÂú9NO¯áÁ¹»]}²…D$r–Þ.K8¬/;ÌÛW]ôµç{’?K˜‘B‘×ó4yޱ¤Î^K–==dINÓxv"õµº­ýâ`èñúr˜Æ9ãaê©W]ä  ‰ž}²$>:œœHÇoV>Æ”ñ¸OØqçíÚ|Üæ@!E[p55Vbl ?>÷û@“òz—Ôit•"°×‡|ð„•eâ ÙoPþŒ‰$&p21Ær‰gä,±Yçݾ.'/?¬B«]Ô_>÷HùOw—«—þæ€H§ƒ,'i<ë}d†BÁsúÂŽ>¹ã—”J<@¿?Ζғgà€°¢¨¬ìc]r˜c‹—ùo¦4Ó›Oñ-ޝ\¼„ñ°Né'ævÏ÷‘¨0Žf†ÙR¿Ÿ{<n'»Ù×OìJ<à‰ô(SÅËŠ/Â{×áÓÚ~õ‹ ý¶*;´ÙC-±F ~BáíŸ0´,TKøšB7ðÂï„°ñ›k¿/®Š_s¨ñ²ÄâáX³2>8ñdŽ9%@¸ÁÏ [EhNЯƒ)ıú¥srJ„ÀÐb˜)‰/ãÙ²À®»ué7$iÄ)ç£@!(Úc½×û:î’±`JÜR‡“Ç9å7Õ¢Âi}|½*, ‘£¿ôq¦Šw”Šn¶Ááÿ½¤]¢Âñi3%1@ §-Ö§·Î§_®TÄ—9ºz €b˜â£ø©S[ST(¡ŒÓ¤Q¦'…˜ÅÂ3ê5Öyó6ƒ^jXD‚»Pqtòʱ“´x–î.4¼Œ!èk<bÔ´M<íÚ1²{È'QaŠ‹œL ²4¡¹•?ãuz<7Æ7-ò™Ø }ìá6Kb¾Ê¨K~ -Eh!|››¬Ñ¡¥ô÷´Z‰„S„õrFÇðÉy’5¶cÏ ÃôÔ–ëe ÄÇ‘Òg¥£÷+¦rl,Q‘ó¹Q¦R€cïiZP¢héÑyÏŠ‘°AƒMé›IOݧ}à´™½~Å Æ™‘‰ßª™ã©šô…X Ýn£Ož›²çxêeAÅ` ›ì˜4Û½ß[böP½Á¾à|9Ę‘Ã/†Îq¶Aû†Ù*"ôâxÛª]g×Ò¬ŽÐr £ /eJñ\–€ç.<5ßÁ.“"¬Î•‹{Ï` ÚÞŒSÙš*ü6ÆEK<õòÒc€ÓÚÅu8B“È7A7dË^7N“óž”C04”Æpº@œcßñq3%/=ñNÀDíVE'H9Š–0À£¼gä… Ï›.ÈÈK‚蜽„ÌvÉëÊ—ÀšøãøP±þ½(‰|StÜcæq¶ä¥ç°ñ(þ<>¥Œ"öÍ¡ƒÆq¶$j 1"ÂÑK Øÿ’âh #_ž@ Ç o®tG³ŸÌûjÄÆ0ÌTëÅhÌæ«Mºq×>kµÃîƒM¶ôéS9bc¬7Ζ¼¨Ù=æû,ý2ä"ÞÏÁ—ýÇ‹w­A…!]Vüư*à9Áß@ä#H<\ì,ÇÇtRÕH¾¡ØÙ¡ûÛŸî±ôè%ÈÑŸ³ØQ¦rhg_yEѱÞw¯ÿFÛ*ÇøÜÍø¹ú{Ÿ<Þ²ùè6‹ÈÖ†_yCÉì†0dcGwŒòçbÌ©‰D’ kbTha_ ¸ôbó"‘ȱ^A)’}Å%8,o:˜Ò‡ì9ÈeJ†ÈáæÃȇòÌEöüqŽ2¥¨5Ä„@­8GÛ‹Z{M¡¦9ntÝú¢hsw`jö-Éràø´ú ä€Cq©XûO."| ö~huŒ­"LŒ‘Ü—uÞv…œŒç5ÂÏÁž£ÕQ¦*´ú ¿uûÁö¼Û#$[ÀуÙ­^nëîËéBë—ž9œqVð¢ÕŠ?{F Žñº.3Zå 9’ðˆ6D™>Çц-v5-ãNMR$Ê×3í¬Û¾qûèV¶×Ã]Âúú7U¾è´"Øö€bß{á@ì5°åfJ#â³ZkwYÚ/&¿Q’=»\çS†µXŽãL)îá/p÷Žhן¯ž¤\FBä—H)Ƭ_Nâݱ:·>\[c§×¾/„l‚]o쿸ç‰I‚“ÃQ²£,éy”BÏ/G³Í¶~ÚY̩鄧 lÇÙ*^W|Ⱥ0\΄W´ÚK±É‘Dú8üêÖ„=„±Þ{i˜k±)°Ꚉ¥P×^;jWúÍ–Þ²ÜÝ¥šˆ¥d­Gu1$þr²»˜¥OÒ+/ ›ë¼=Ʊ×à~'B’,I¬ët~”©¶¢h`Á Ó®.¿PT’¥Ú޶¥=ÄÀøÊþ®®‰ ½È$NæÇŒA0/ÆÞ‚LNR$G9P~I…bà ÌÍ97¬ÞÒ—2‰m˜2UÇG1võÇ`7œÛ‘Á0Q“‰oùH‡™RhÂ`@óJõ—óôÓ§Hr¨ìÐ<„Á æ8Ý›­Ã›.4CÊ¡òË{±«“îuçËêµ ’ÔäøÖ©ù0[/‘qŒ]QÌ»3vÝÎ~AoÎl$¼å3fª"ç(äŠØò $G’@Ù£÷ƒœï¸béÄ Ê‘r ü†ÎŸ±ì—÷íÁÑÚü`†ï’ØH¼ëä|”©—@9³¦´Ø‚;ö™Ãš×ä€×Sa¶ŠP9D³yXXcÞc]y‚¯?`Ϫë£ñ2Tf*/Ðg8ðµißv¡gèšÊåX„O3ÙYTa¿±û@©ßåTNñ—@.FÙv.[ÆâÆýHh'‘xõ vÞ‚ó '®‰]ÜUalw‘ìÅ|ÐÁ8S/‹?ðÞFóI®'1¾«&c ©A Þm±Úìq[¾íˆûpiJ¢—ã{fÃLézJkÙ'.ÓØmÍ¯Ûæñ|Ç#©B-Ç„4Àç~!ª3ÿìWUKBšS§þ!võŸ¦š­Õï‘•4Ç÷I7cÔ µcc#É>»’+i£æŒè‡™Ê17 YÚ£³ø«Ð$QÂòÁ¦$žÑ;”Jæìþ™ånwŒ)¡Dø~jöèýðúw›hn»¥yTÕŠœ0å7×™¸þåkfç}EN(ÙÂûšI!¼ÏÜBOKÇ2U6—Â|—JF™zÉb ÿ}ô5Zå¨_6<‰óïç:È– &!€`ràÂc»Š‚ƒ‹d1ç .˜„|ŠÉÁ¿ÙöØÇçÆH²˜s†—!ñ®r0^{ü*žú·b2ÈÖKŠ£x<‰™‘§Íë"åx)Ðç3eI5“g:É÷«µw591Ò\QR—LB:€ú\[~›¦äŠ’2¼‰&!ˆ7ÿdßü]ísOÞR‚—}§]Ãl½¼÷ÄCè8}[.ÖÕöo•áI Ï§:ΖÆJ)ŒÿÚ•›+çý-»wàn-ÀAÈÚ^vé3EfªÒv 2y4ÅØ­Á‹—zå¤6§ :c:àé–-ÙýtžRä´9cxÉb6€XqåaÜŒs¬EáåŒá%=ŽA<ÄŸí¿«‹?9ËÍq¾ËHƒ,¥•ß–_oýEñÈÀ_šë=V8e¸黊4ÊT½IS(o/lûpêˆîx;šå¦hŸK”¥"Kˆ+ÆL»¯ªD˜íýd@>ÕyC™yÇøÎ™?,ïÞ ©¸„€|U1½€ÞõaJ°oýÖ䜊k âjLH/Ð’`/¶V¯Ê™·¤ ýãîÖO¿»u7?süùUœ,ûña ¯Àù® sJª{ª=ÎÖ›’Èg×}Ä÷‰f9kNù qƒ,Õ;)æ3 #iÚÒ¯Ô¼Yr¾ýa¦T- ©‘ÏŠÁZÞ—ž,išœÓ'×hB:‰ëÂíÏ‹ÝkÅGÓäœòüåÕÇJÍÊÊÞíp7­¹­ä)®s ³•ó /¡œy¢3èÔä6g$c-©Èò|] ç}Ù>žòK–,© ÏJþ{pµ¼‰ðh¡Ì2sÉ0%Ö$9&Ò·\2 ÌþâE¼ø–”@%I–,é/)qÌ\ s¬¼ìªÈl%»¹5³A¶Ê(!å.x';߬Tß/3Ï™­ä7þ`G™RÑ,$B¶æÓ뻯һ³%IÎÉ’×¼…DýÄ~n÷Õ¥’$K²T§Ä)yq¥kó×uùè–`Š…ç $YªCJ^ N¶¼°Xœ§¨ ç7ÌŒ†™zÙK1yAº¹°¬ñ $Áqr”©—Ô(¦/¶D[Z5Ù¿hïœ%¥ r†Ã·?Ì”,öÕBú™é~ïg h@_>ΘR}9"krS~«¢œv¹òR*ï7ÂqÖ¼3*%9ëâ÷ýL¸V}êÇ>­\}“O!ß×Áëi2Ÿþ¾Ø‡™ª{JÖ jžüË}˜E‘ÍKBçé0[)]Cò‡¦–»uNr÷œÒµ¤úè3ñ÷qCà²Þ%Â$9dš³58‰u½ÝŠfF’ÐýMÑ Ö—ƒŸ 1¶Íámíš¶ç$Ì%Ía¶^Ò·˜baþÆFìÚr/ÎÙ¼daþ\G™’—“5<…‹Ž§e+är:ÇnƒÍ°’òµÇ‹H`íS• DpпkLù~P›©¹Ï”P*Ó¿,¢˜ªAÏÜïÏ #5“—tîÖFÙ²9ÿé?ïD0$k4q’´õ«D4•O ë̓,‰`ÌÕ,{û >½ýfòÉMû¼£*i><ô–tމà0S•„ûÈû á´qr¿ ºÜГêù¡u2Z [³÷âet!ÙáK¼3?/#ÜÌT{SÜÛÁKvøBAb¾æ¢.ÜDûçÖ»º7² Éé\ fëeñÇ| Ý…›·‡Í-˜œþ$š!)'ï£L½ä‚1_³uÛ‚Kóâí*ÒÈ9“Áq¶ŠÕWŒ_\ô–©áAØ=ãÐr׳ÉßèGòç ôŽvø-}I ¢çî!ùCî~à‚3å#{¿¤‘qSLþ,<¿Üýo [r‚ø‚VbÆx1n€5Ãû8 D’²:—œG™Ê›ió1!!có Ë{{•I$šÕy¶>ÌÖËvŠ9Û—eÒðñýRô‘伎»i˜)•†Cþiø´7ÖVÖÖ‹²ˆäÑ¥áÿÙõ{p—ee/ˆÍ]~æP†?h3S<0ˆHŽèâmHÒ Þî,EÙº¦ÐB¹Ñ¶ÊÓ"'i˜¯âÀ½Ç3¶È‰œKÁÃlÕÉAJÞ©NNÞµñ†ôA.$Áã g+/ñÅCÛ[™­YdH$¥$EúþHCVéÃáuÛÿ?ý>yÁ 9ótYô™U¢ùï ž´ôúr¥ 9ñìÌãøÃŸüÆÛáЪ ¨äniÏÇò¬'(ºtÿð0K/Ë=f«PpƒbycfÆ’ÐòÕ 3•óRà(Ÿ°6ÿæÑ¸H¹ë`S*~?S\ÿºæYm@’ÝŒ@rÌ3SKªì©|hê7P3I^ß©VJ&!1£ 1êÅ*ÔBN—«‡Ù*ŒœOâa Çê‡C~[HÎézõ0[u€‘²ÊöìöÛ/&¿w~Ýw¿Ã<ù›”x2ceI•õžBY÷ÀµýíÙ_X†59ƒåÙÛ–4—egJ‰s5ém+ô<lžD§§Q1Í…Bÿ¡ƒ¶`Š ^qMN…˜Å¬Ö2‘é$-ŸWÇš›HæûzR6ê*ýŽ˜ôðêº×äŒõVüÙªwOJGñd'6Þa››œ²ò% 3õ²wb6j/®mNŒû˜¶^Ã,ÔF2Vîžq¶4×M«Æ'wÏ=0ÚÎ 3$lkÐvN~ߪ ‰-Ê .®ñÙGYxI’_G!³EazŽš­ÙTÅK’ýz²R[KvZ@OÑlÍ·²›hRN_ØQÌG¡þï~‚ÿÜâ@’²zÁ(S/ÉnLGí™®í߃‰ ”SÖ›E ²Uç)…#œq:LjÙ}`Wæ;’ã¶Ug81ƒ·”þ\ì]mw¸˜»v(5_ÁÛÍ$ËÇggJK7 °—Ó¢FQS¿MÀV^rrx”\| Ì&ÖúPFû3-$˯ DBæ»Ý[¸;^ËN²|ûË…}×ö/çDì›G…ÖÝÆâF%p‘Òݵ& ÖäÀ×›?GÍá0=Œ0uæ?[ì¿S'À +'ä²xT„\¼ýtõ%Yp3Éô½ºd”©zk¦$sf&ñóéCÅ”›I¢Ïw4ÎV.Å,Þ}[OvB` )™r³œé»ƒeªªÿx¯ÿ8±º6Ž>­0Y†|¢º l‚-oÕPL–¡W}Ht Z|¸q+&ËР S²‰—ƯÓ=ÿ'³­œçÓi 3•W¼€„ Ôecè6ú¨€L¶4Ë÷a¶êœ)¦ð^‡)-mùRC²%Y>ßþ(KZþñd^ý1㸘{õ  2á^þXÊ?ìú˜òâÁLÈ0AŒ( âe»¸Ùšz­ 2Á¾Ø4,wC@Há!#ùƒ2æY^†Z’æ{9È(S¥{Ï9<*8–n[Fê·íÓÊy>_Ï8[µø“rx@̇WN=¡¨%i>ßÿ0SZ r|Ô‚àîóu9ïU>–1€×‚„µ «}H˜b%DÁÇ2ð2ã£Ežw&Í×Ùq¯ò±Ì|Á¯ëþìÕ !3G p¬…Ù¡UâZ9y÷jA–êåžòrÔ4øàÛ¾¥ ¬•sw¯e*§å • Kmíí÷T?áW9umKVv ÎáŒN\Zip¡YA¸$€ÿ Oˆ9ûìÇŒ“á¸'Ĭ‡gs&M`3) PgKˆBä¿7¿[/IçžÏeÆà•1 `¸!S°Ùª îaö‘Î dð]¹c©·Ÿüœk{8Úê» ¥l¤­ÏX£`j‘»ùjh+=Î >,Ô>‘íâ0ÊÔäUàHõÒa…½/Û—~œ·àuÂ3¼:h”©X–ÃnË€ˆ‹›Wžð _Z£L½lÖÈ*Œ^lHZ“ÆJ£â%žÁ½:ÌTBâÊ1Û-ô˜vž{8Pš¥z[vÏœSÐH]y¸*6TtÎ×ÒïASÖ˜ÙˆW Xa¯«-쎠;§‚6f ÁP> ϻܩÌ>p¡ ht|Ñ›]l1!¯òqáÄKzf áC£LUÉ\æ ¨ñ¹øwï*åƒAð 3Õ²æµÿ#wŒEjq×çk·ß æüô§–Vo…“U‰G'Wö›[ª˜Hä¶.N ³!™‡/÷Ì…mŠŒ³õ{«ß¸>$&=–ßC°ëgJ³'ÄÇR‰p‰­ écñ|~låý[ñKÁ/·Tk€"“1oÍœ¯•áB0ãL_>½oHç}üØñøæñeÛ—àüö›ƒyîžOtåeBÛÁb±üŒOte;¨ä?Ÿ±ýÉŒ”ÐÆË¥ìN¬ìØ*ØÉ1C@Jh3a»Ÿõ7q+lìÔ!ÓŸþ£ƒÕH‚P0µrñY±=;KfZäµW£LÕž)‚ //·@úpW)Xäp˜­ßÊ|¬C¸›™ü¶õu”Ö¡­Ì¶ÆæƒJ12€ÿ¼±«˜pLí¶Lm†çPæšÔ]";ÄRUÍõ@U^Íçq°}´b·™f…纫íttwõͶëýh÷sc¯‘.À¸]íO&ÂÈsÜ>¢F!Â’9 ̶æŒÜÝ"G6:D8C²¾5¯?üÙk!¶òZ Y\WŠ+dË+ÄF™ú-ö¸ºñž/f.}:`_ïqu{'ûì¯éöÄWŽ, ðrñhaóû±…+/óº³a¶jÒ–`Ø—~3ú3 @œ™Ë £LiÝYàj¶AlŸí†9Ø¿lÎìÍëÎW³ºc•ÁöÌš½qE¬† $7`¿ž½`Í‚Þ:YF(·ìwÝY€a¶ö—Ã×þŠzµ '^æÁÌ K?ÏÖâ*,¸7ŽeW÷slÇÊI„«·gW÷º‡aC šW«X¶ÄU®Ì9S8¯¨f«Îåw³…p¡ä0ŽÌ›_: ›s¹b˜­û}%çÅùˆ¶©Ÿ/tzÑÅz.矜¯UU¿67½Ú³Û q±DDɸðÃw‘xž¾‰†Ëdšk³MZZ€Î"~a~x¾m -ŒlÐÇ$þÜ“Ö~H¦áà—c€ñ›ãÓ=‰`}ˆî[6xüöÓ}Kz¶­«ß+ÞO/¸Ë0^ ½Ë8[‚ #DÕÌN–q\lx(Ä ò} ÷å|ò9^Ë—A¿@?º˜a¦Ê"=ô÷n,`mÿðÊ9R ÿ3ôó•5Ì–8‚˜$ïeí….ÒBBN¤ÿB"#,´aרÆlù)“…ÿ‰"ÇÙR™–¥og&]±L³t Z3R÷§"“u…c Ž¨pä{{ÀîOT¡Èd’ë RG£\-âµ*Cüf¢(2™ŒM2§ý‹ÌÀ)2ľõ¸ ²X!lõ®÷bÉÖú§ÿìn+bS_b³šs>}‰e­"£U/öeê…Fº ñ`Ei…ÍA]È;UªÈ”ìtœ--É|M쯓iú}%©*B=‰’"Ù3žðÁT§ÝDG'D™úÇÌ/’=ËüÚ¿5}h«cÊʄпèK¢çðP”·þœ<:º/‰ž^q÷º‰©ßÞôúPTéýýÍ/¬wÏr)„zyæ(Så^JœïË‹|™ð]‰F! ï}¡õE×wXxü.yT!A·Ñ0SU]æí¹…¼½í ²>Q2üó–ç9 ç®,ö‰®÷£ç9¢$uáF»Oš‡úÎÓŽ†fjò¯[ÀøLü¸#͈Y¿ˆÞ5O?Ryy¥ÔÇ/-ày†t^Ý9ÊTÚ0íaÃEP`úám÷v39gr)=Ï0Ôhœ­ I Ñ‘¸â‹™MFÝÓøö‡™ ¡›Ër`g6}ûžf°¹p9>ÐÝŒÉØõ6¶”mf hâÛÂå¼T4´ç5KSËQÚ‹Ø.›†NƔɰʉ–9û÷|KzN¨tÙ¹³#šCZ{=O¨xÙŸ¼€b ÁOw´›«K§³tj¿|^>@€¸ <îµQ–~Kâ±Ñ"Mûr27FgLŸÎ×2µVâæ•¥Ãl•™t‚i~8›Ð6ÇÇ{c jQÝ8[ZZœãDß¿ïýÆ Eà²ý©EèÕ>»ÝSi³Úl:âÆÕ?8‰ÓÏ2cDjãlIÂêbq©M{›ÏO-ø}†l^/Æg­ÌÒwÞOXñûÌØ^Êb#óB €°• ™,SwÁbŒ)†™Êyi¢gÌÄ‹¢æ÷”"ó5q}øZà]æáš‰ËÛ×¼ÛA¨»01¯·f«ö‘wA·Á ñv5ߺ w!b\R£,ÉþÜ ¸X¨‹o~%ìÖþ‚Ñ"è2ÞvÂj‚]öA®T<Ã0b´q¶ Œ–#ÍÙ/\­¬qù±;³‹\­®ê ÌÌׯ>³D  ÷ÂÕÈ%"3ûrz?ÊÜ'/ ¹®öRÖ‰”¹‚IbãÏø+Úž©q×8[»"ÝB­Øæ±ÕĈ‚Ü {ñ,‰H,͈éP3 ÎSž®d]Pž£›€é¼7w­Í3)%ë‚ò|»Wã"bC©kïì]û]‚­Ãq¹³õ²à#bC±ëi<¦½Ù¹~f×Âà­Ž1t»ùäÔáæþËÉ£ÃÝ|rêÞØ¹öº?ʪ¢DíÜó¤3,Ŷ‰'²Çu5Ì”ÖÍèu³8 mvæÜ]\¤ë‰2ï5eyñdüd!ÉÂ`c°_Àõà;DŠ(ÑÞ·ÝgçŠrkA×ntÆ(!Ï„©y›{÷°ÂõL;JO£"„CáÔÂÉ“ P<ƒ:¾a¦¦ÇÐÇã‹u¨>$Óª«9á3#q%uNl‡Ù*óûD᾿ñ–÷Ÿ‰ ©ãÎgK‹ZÑCQëe“׬ÆÂ+X¯ ô{Ïò„3,·¡hå°éw¬k;}‚¬ÝZÇò)ux måÇ€ r« ÁsàL!Ñú%ÿ¼1(òÁÈN‡nÿ¼ñ——tâßjïŽ0qB£<€1–#1)A@b/ò€?‘=”æÎo:Ïôy¨OðÞs”%†b\kzQ[ÁÉUÊdŠøîVØÃAz±’µ½æ`…á±á8[ezО-"8˜îöÿ þã¡2Ê’Vâþ !Ž€™™ázøÀmU$þE3ˆ`ÏPŸÝ0oyö±PFTð/ì’Á0S4 ËÑL/ýÉ6j6Ñ|%ž"ŽÌ±Ä†znÔhµ{ÄÒ¢NHô"Ì QÞû¡Ô¸îÌþ u"sÄ—êÞÈõHÍ–* ñr•„ýÑ­Œ³%Ô02Bó,3no`ã8“BŸÈñM“L`ïËA«swŽRUAàŸ÷³U': íabËŽãÊî$òµIWÈìËj!ñ+zŸ)ÀØÚ/·ÎE&ˆá…‘èÙŽpò·½¿ùì,„ú‘޳¥Å·¢Ì ©Âtöe„„Lù¢"Å3 ã¯óê÷èB‚>w¹H6¢7”¶N¼vcòŠˆþ ž#ØgK@äx–”Z»¿™‹UuA}oªA o^Øúùð‹øÌdaÿBçBìÿ¹©çYõØý‘¿a-ìÝìôïw&eø/ŒŽëjœ-‘ B6ަ/ìÿ©ï$]ÿ 4ŒXsA©-¶o¼`^ÀF„†ãl 4 Œå·u„ÇàM¢!GôòÛ Q~{QÀ]>EP°~‰fkþÓ>š¹®Ý yR¯žB[A1þvÅä#·÷?ЖàüÈ3mÓOO‹ÒN’(ü–eZ»ëY³´H(9ÎV %#9¤èeQ*b oÝ"ÑE®©A†Ä­Dù½;VÚÛò>ÅB‰˜2æ‘1kD žOÓ³eô¬êˆi#þ¤o¹sã)ü6¤1áHÌAäÞ<Š_à«ZˆKz«a¦uF°‰Åµá®I{´Ó='Š?‰¤"ØôJVøöN ñCà§»•ÔèèZG‚‘¨jFt‰èœ‘j¡Xd`IÔ9Ζ Î6QÆÆ«ßæy÷ê?~¾Ë¨‰H¢°Ù[m¼5û#T³ÈÔ’¸sœ­Þ‰¤-ˆ^;â *]YµféKk%a´‰úhWlø®«"düù;Ž´c¦½êÍ<¢ÍÏ𾆬Z²¤Þ1ΖÈ6Q}2e郃+$óO²È6¿¼ˆëÜh‹£? $óÏ›v&‰zãɳúøe*„Yrß³%û>²MÛø-·Éìî¢=2þ|‘85ìåÆÏ&ÞBªÈd‘Hr˜)óLËŸþóF’‘âõïL£[èöñ×/BEf‹\JãlɶdÂN€¶ò 7ͳ NªGFá‘HBïFnÓöeøT¨jIÞ9ΖNL|ÓK£±-j¤¼%ºGf ÕvH¼£'VLP5Ë]áÈ*ˆ0ÑŸæ±B®EÆŸ/vmÂkõ;äT¹HêF×<ÆÙÍ#¢Ù/'Ÿ ­ëƒc Dðm‡µÅ“ÀSQˆ=ù^ýf•„¹Ò 3õâ "Nµ%°oÜ™ÆÃX{-*ƒ WêÃl•°6áTxoÌ„ÅI:Ý]lIgÈÈ•;vœ-ݱ‰Îzöd'õtþsß ˜D‹ pŸ¹PÎ|ÐÕÀí°öqªÌ…r惷pšETSõ Ò5»tµýÕ«3'+¿Ç6¸örll*{Q„À’ÝŽ³%€%’Zˆ'¹¬.*Qr2ÌuÊödµ€lþôþÖÞ%Mv]GÒí×(Öö§·äãÈiŸ÷f $Ààѹl”gedl¬p=@Ð~8õ½Pr²žÛÕÛïÁ,Q_µªÕ¯ˆ½¹“»ÿw –Úí°P9&¥Öv¬ÛÊgeÚ}êG!‹%5÷gvÉò*TöÕn5:ýýÏ@G%XŠ·ãbÕKÒWÑìsP_=/d”èˆK4.V‘^‚Z ŸúɽÎÔ÷Cʇ²¢[½X+/„€•·v±ÿ¬Ÿ¡PÑ¢H”þ„CQ¾ýÖlö“öÜÑ[ ˆG$^&˜q± q8>ôöbðàV\ *6Å£­·ñ_%—É+jÁö·õTo@R¥z1³WÔ‚MÊ;ÑXË}r^ QQ.îOXöÁ×2Î|r‰^9 ZéQRx) ‰#ºpT±É¾˜ÝB‰Rü;o%éÖn’õùÀáÙ± £,ïR«Öo’|k¹Æ®&DÆ‹ÿ‚‰ÄÛQæ¨Xš·’ìëÒÔäX@@”Ʊ*Š560N Ÿv.ü½*Š5ê§Fûöª?“Ï+…8ЏÈÈ}-Ñ~"–r©,ôR!Jâ¨à ¿ÚmÚí $o¶P •5cÊxQ¶´bGçŒ5ûDbÁPY3ö‡`õÜî9úQÃÅžC7Мå>ÅŒDæevJt¢(c2—½©ý[4ï(*KÆ$˜(á~9øšÿÝÍ>•—ùeX¨Ú¾%\Li¢‘u‹¯(™%•—Õ @Eb‰z0ìðtV™z±¾§§½YTÖŒÿPˆ£†ë=,˜c…>TGXOÖy©‹¥W")Âx`ªkWâôÚPÑ‘¨ÆÕ;‘bÓùËÚˆÌ) ’DCö?›ƒ/n|w­ì<Î ùL~z«¢‘€)‹%I!JÅ05bPn»ý¸,åHYNþE‰:ßåéÌ!f~D&4«Ö’Æ‹§a·©Æ¯²@àÈÀÌ ÃB‰~Õbxä‘ ÛÛû‰ÝÂ’¢¢ü[@Nï—}–^íÙbSø#20“¸X…€åbØäYwMö0Ј (I$åRAŽz±Ý»öTµ—ª…öµ K¢)Ç¢.–p °bý:_5]¬àðÏTÕ¬+…¿Ia:JÇ0ÐOþüÌNL È”åe Óãb‰0ehË0 œÁ¸š>LEU–ªñ©¨ÃçŽC Q.½bÌ$ê2eéq±~èÒQ9F;ÆÆù”wq œIÔeêÒãb‰bõxð.îÖ21Ÿ(³ÊÊoÉ;ŠÒåö±«Ù²ÀÖ\Ô$Â5%ïq±ŠîX¶ë0{VP«,‚ÿVŽ’xlr²’²¯ÓVöØ”3SÙY ÍÁY`Æ…Kòa¢ÑRpù <9G*[«o 6StÂæ›ÕÌÒR&oâåò›b±„Îü•KòVÍ1”cÅhûÚ“ûÜ3ba½µW÷ƒµ¤wy #îÏn 0–4qŠéÃB‰˜¥sôÐsôåt¹ÞU¨ˆ"¯÷wàú×?o5=êÝöt-<ŽÙVž™O—·¬‰SM뇚õî/gÀ}œíP®/[ÖÄ©¦‹U¨éQ<÷æ #½“M+ü÷¿‚Þ‰¼þ‡äEiK63­YfÔu&dL„kJÞãb’w|4MO7AŸ+™ËéFü7ª¬.§' ¼T¼£¾íí€&L×éÝžà©Ny2ÊÛ'O¤[­¢ÁËxÇu©Ùâ¥L£?bvÍÄ–* ›¨×Ì0ƒ‰ê5n+$7ûò”å |—uð?òKÔ¥1{û^ë07Õ®™_ÆÅªUï¤KÛ£°QÓ²Ú…c”¹‰vÍü2.V!Nͧ¯ »Wˆ¿K:xY{Z—8åžØ°ÓZ˜·Øº¢ø.!¾2suÝ;JÓ˜1…áNÇâGlWÄ-«×Ô½‡…’äUn4[`hWáfÀ زÎìUnS(ù̶X«r-[VÂý1È Aš†×oEÙÄÆD»¦è=&Ž$—¨o›áó˧k¹ˆ ÙDÿCñ¢4ì×^Xú¹‚ÅD·¦à=*R­wGULõä“yWÊÄD¹æ5,T‘V¢Â…æCå¥p×Úš€×Dÿ-y'Qúëx¯¬YÀ+®ùú‹UHÞQàÆópà`t»ÎW……‰$¨(8A‘¼x›>3×U— ’àd¿É£O@ê§Ûl•ô ›yL¬™V'“¡9kJ€›hë~5<»Çcáj·å‡V Ì,âyB²¬‡SIItô¨šÃ,;£=ß”¿>h*ã6QÖ§š$u£cb·gMÖ{c„‘e5œ:ú°P?dô¨tcî61÷àÂÌȲN }H}·’\ŽÖ‹¥ŠM•8˜SµeIý =ªÜ_Ž~ðhEXÓ %œú¸X…¾s4_ D›Î>‘¿ mYT/¹ZÌ=…\ŒÌñ¥ Ú²¤~+FI‹¶ªpÅÌÒ"ì &r5À°P’¢ªmûù}Ã7<çZËÂ÷o’–´èWû„m5X#(½šJ÷¸Xå®+kѸÿ›¿ÍÝw\±¬WSéKXZÔµa¶XaÍ™úPå‚®‰ö]FýÀ× EÃrFæö®º¯°bbY¯¦Ò=.VÝŽñ¶VgÓÉÚ…Ø|®ò5¿ÿº£ÝþúnlýrËÓ-Aÿ2+.VL¬)u‹%RwT¶Ý{egsÙu˜ød õûÎ\I6}ºÝ"èÖpÂ|#¤Mk2ºq±DëŽÊö—ðð“öÕxì‹R;Q¿{èb²U”£M0À1@Ý+]rÂÚD±fîJ´î¨lÛ†žÒp» >ù]åC¿ÿHŠQŒ†]o!Ž;·¡+ÄÁšIq\,‘`¢â‚iŒ»ç[WAQe~«èIç¶•ÒŽÕ†ºüp¥x¢…3+Ž‹U·¼<š9:^V¦Àsæí¯ˆ`ÒÕÿȉQç6åÛŸdSàñ‡*Ã)œ)qX(ÕÐã#oo…ªekä‡c”/L|€úçú|VÕK+JæÎk°E²AÑÎ…5&Yý‹­ÚEŸÜÞ²D;›;„öôx0!ÄÇÅ’dÕ}³/ŸÕ3ĈŠ3è‚ÿ¿ÐŸdy˜‘yX—Õu§S¿LEº§â?.–$Ú(ñc¦úävó>sâK3(2mËRx°¢,¶ï¡´#ˆù`e4¨Ò=Eÿq±jm.ÉòX|gh6âÂIÉdP”{jþÃBU :AáG‡Ž)$íž].Ê cð[˜Úù—„Wy9:.(ú:‰â°Pz’N—ͦ¤îÖAÌÁÄBU¬Â\Ôá`M_ü™Û˜kº2u8Û µ•è¤Ûä_%Ë%±ßbû™pÓqx‰âFýb¤Žš¨Òy€Ÿ«iTM”Ÿêÿ0"þG©ß•Ü»öÍá–Y£à€ßI&*ôh9Øôk8ŠÀWñ`Vñ©þ eÉ/>où?Jô–f´ŠA‡öFé`–ñW‹U€(÷cÑ9}eØû C‚3(«ÚyãCn…Ÿåd¶êcþ+ÖpäŸh!ŠÿØ<|Xp™®E˜ù ð’…a¡ °1ZjNÔ«Ó}]3j(5¿(ø[ªiùÉÇì@DÊ3ð¿{ÇŽa¿IcéM¨Y/–ù—a f˜¥|f›Q‘$áDÁßšv/ì8SÛš}|$¸ðÇ ~[’J>ˆ•ycòÃv„fŸéfP  ÈÛýÞ°àAvvŽ,l0‹ö#ãTM4AÙ·|²|xìlDo×ĘÕÿßZRãÑlŽê¸½™‹oa .˜{¾÷ãbUM4oe·“?q!X³*bõ¿|®øùWx¢P À,þûßüY|{ì9;Hó–²y%†Ë,<;:ã;Qï)ûŠ$ªÐøÝ~lû´iÿø~UA p€?°_Ôå!Ì~ìQFºšÈê„Þ‰vOî7.V-p%]þ˳¢Ñ™n3Ê} _`x"Üý TÀ¿¨ïÃj2ñß}²j…üTü“&o‹ÀybÈ$UT¥w¢Ù“û‰SõÍ]ß‚¾]±nb>™Šôÿ[ÔJR¼‰ó-Ìb jž|üØ9ö]úQëq Æ…™?Šúö|µ—Ô¯?OÂ+³ìÿˆüQ†7aÞwÝÖËÃNŒY¨§Ä?*’hNQη<ÕëÂ@™TpeVüÿ€“A‚GçLÏ#Ko½Â(2=õýa¡òEHj¾ÕÕnõšv?’­Ò6“â_<úÜf­[.3´Õ†¸@Ñ¥¨ôÔ÷ÇÅ’PTóûÒ•—a¡çE1hVüÿÐ÷£o¹«ïþÖ+‹‚]ŠJO}\¬ª¿'ÈùŽÁ?X½7ë^ù_ÉA³äÿ‡ÂDxK`<þ·¥«ÕkE—"ÔSáªPøãCoô`ÂDT1“+fèÕ†B[kþe¢z>æv!}îøŸÄTBXEó$þ(Â[‚œ/®uËòBEE¨§Ä?.Ö-å%áîËCZ¯ƒ…æsHPÐíèlb³z? \•¼ 0}içuäk ÈV(‚¿Åt¿¤í£ƒhfîYW1kVÿÉ FEj挠·¬]Ÿ´][á?ówÖõ£>´þõÛ_QÖ¬ý“Œ‹%ù;êVðÈsF“]‡‰{!¶¢mý$3@­ÐrŸ×šH¯ŠX3U ލêoz£´7mPا}¦dP°ÚŒ'þ€XB\q’A{”}0aX3S Œª€=  i7†Ó.Ãäò€UÅOvOîx—Ý¡8´Âãé3œƒ&TÚ³ë¸XÂ4"Á€}òánÅ?; ¤š)G¨ð©´ÏÞ1õB¦mž¿¯¹á *x‚‰pT$I„‘cX"\‘Iñýùm  šYlj0Òô9íwÃôê½l ‚*¡`"«f‰>|_mLðÌ¥1QPÌWÃBUSc ejG›¡U;_¨fÔñ›?$Dð%ÑÄf`þøDvå #0e U¤¬HÐÚt2Oo'"QÍD¢L ‘6 örew;(fðSˆ„ÿݹÓ)BèF ÷íÁæV!SKÁä£" ˆ´Á¶J·ïïó² *DâwZˆÀ$dËd/ ¾À–™#? T«‰Øsð™(ÝyE–B&ÆÅ*RBDèv:!¼Ù誅+D¦ŸB+ÞûÅÚf2ú0ƒ-VBì.ñ~žÙ‹MþÞO2ž—µòô¢§;Yö f¸^”ÉØ:a JÏ^ÁUX5¢×?qJ¨ñ7×óß/* (Bœ2.VÕ è ´à‚ÅÍã´z%«BXJžð 6‡œò<ÙkãŽd°*ˆÅÿì|jwĆ{ú©ä¾ ©xhB#D*ÃB S‰ÅÙ²ñÛÙÆÁ§D Z͘å7HMØ P'ß›së¸ÑLFÈT†…*M&™{| Ì'ï ö¾:Å¥© TõS€b šqS»9ÛÌ9fuÍå7RyÀ‡€ü·N‚ Hšá‘ʘ8…ŒŠ= ²B»ý‚¶&È’×]¾ ‰x|ÿß6ñÝ?–ibe¿Yyó±ß™.ï2ÍT„8eX(ÉXž˜V½mð6߯L8ñ•‡¦âŸÎêS"×îÊx›©iʨH’ ";éœ w[4xÙg¼ò›ù&àá²48þ÷¿ Ü !LJ`JD'¶I0µÖîqKúLÒª" ^‰êiÐJ1eæÍ8.ætWO“\úõ^ÎôÉ%¤IëÊkÚ7a©°f!-L©ÃBå:+™/+^.^\:’[ ³ùMhC±vŸ Ðîq"oÆÂ”:&NÝöâ0°*°Ëߎ*g¢`kA5PêO §´gCy[Îç Ê6gÀ¼:.–¢™ô ™|h£Úv.®T ®.k“‘l3«)ÉLä0&P·­Õǘɘ9:CqA5OÖŽèÄv~v™ýOº„e ]aÚJÊ ˜XÒöáÞv Å…âUbÜ^¤$Ʀ´“™×¹O±3aÆHDÃÈJ0 kłԾ=¥€‚†gœR‰»“¾€7¾Ï{»5Q lA „ßãb <‰¨£”1™µ]‡Íw°ŠÃ§ü†'qØCfuÄÆªÒá€-„è{T¤ª±-Ðïl;yž" ‘ð T~ó“„8,kµ+ˆ!{So}W~1¹÷°P…i ÝÉ·ö­vÁ«3QyøI$ÆXzµ)T]Õ½˜ aµ¿ ²PYëWµa<3³TÁ#*÷—T ÁdŠ}yÒöÎKW/”§ 8ëW3w”…ÙJ‡]Eœ}1M+¸‹˜lT$ÁdŠÙÐS{—®[8=s³ßù?²,{ž–™Ä¨=3ì¯Ržq1Ù¨HO¢ŠiÉ͵xoÚÿÜ_x(å¥/‡½œîÞž£¥ þ ™­šZ¼Nå€@Åꢿ Uµ¾Y›··ïxοgB/<.\Þ­eI T°6žiòì“VûãôYÜ•­_ÝŠE†eû¸m¡ÚО}!/p}æ\$dãbU†‡¡©¥éÙÙ¹¾°ÿÌÌʘ¥0;öÈîfË|ž°”ýgh_ø"|é)ïws;^¯F||Vž3Êú"¾ã¶µÏ~ªB cÐôæÖrã)FhDoÃBIrŒ Í=›ñtòÊÎ>15» Æýá!ˆt }h;'bš„-GÜ 1;ŽŠ”A˜GØL»²ß§ÀþËFÇ*H[äjèf<(Uôna!Èè­—\Ÿšä棽lQdõ¯¤?¹þ@x ¶ÙšÛTÜ«y÷mµÂyrLZãbI+‚;{¨ö™>‰ûØRý÷ÕHó^|Ù>¦0Þ!A {xxJ"ÜI{÷…÷Õæ °nk ŒËϰöö´ƒ\¾éHËO†%¸.lLû—Z¹ä„}³ÕÊ®êü&]D»#c {Œ¨­‚'K—Å÷nêŸÈ4ò®Þ!„·úðšË NasŠÈ,=.Ök• k’­=6¤×‡‡\¯#­I_Np…¢gÞ‹õ_Õc1&šmî|ôV81„tþ43$öh|«[wŸ 1»5"»'„OÒw1,”2—2m»Ù² Š KD,Ja2ìü­•GLøådÌeÛü@ØÂœ!(‘ rX¨ÇC6ùœhÞ\e‡ÌþwË#Vdp¦Ft•ÕrFd™–¸ŽƒU¦A±X‰ðÎ?èfdöã° Ô°ë=ø%º?¦$ܦêÀ}“L4à.Þæ»Ô>"°ó´á£åÚ [£–Yâ¨ñC%Ñæ¸XÚŒ¯”ýƒî€–dêúøžÌFÕÑS`gÉ6#ÉD{/·øó§·÷ªC%ÁÎmFúøeáƒQQWOÚb,É€’dsT$ÉÛ‘dš É%í_½½W,*vþáH {#œüÄW"˜’xsT$ñ£š‰)…3sÚ¸Àƒ3sÄ "Äó¾ ¤·Œó(Éï÷Gf”¤›£" ÛŒ$Ó+%3\Ü)>’ ;ÿ@›@º/âK+µhŸ-üRnŽ‹U÷õ>0¦§Ï~?¦p“dÜùÜŒüÑüîé±ùóýêÉŒ’ts\,¹ f¢ùöci´]ˆ~’¯š>x>xóÅ -w™Tƒ-•Ÿ¥Zù42§$ÞHðf„™_Ž(Æ^mÞ½·XMxö¸:Ã8Htð\–P^¹KÍ R’nŽŠt×li€jß ÉåK¯ÙÒ.àË!?˜vÄ*®Ú$DЉd«½›íÂnLj'Šú3 XÓÔõ Æ¸¯w³lv€ú$3JÒk ¤VìútË©] '…—D(jŒgÏy—q‘6õÏ~½¸í:÷NC¦þ73dÓ*ƒ}¦2¸^Ū±$ÑOfìAªfÞ7ÌD3ïêijõ bOÞÙ.‘‘Z™yÀ„jÅta)žÈt=,TÕ̰£=¹­¢¶úl^|ùwŠ’É'Y¿`¡±Ãvå€û©IÉP"0‘‰z@ï#m´$Ýv×3n÷ìó; WJ&’~s÷nd„èº]6»ƒ¸0’Fd’J²ià&`¶Ú‹«ÔÄ¿ò¤d&ùG6 ˜D{¦évw">%‰L¦£"‰ÿ$ÊËØ öOá¸%;RD.”¦î ˜³2¼%¯×jù’H¯È¨HUKpŽ( ½ìÓôé¾1zd&”´êÛê>û-¼¼ï ×qÕ‡Òm÷Ó‘ñ‹@&FèÎ0ÂÉg©?C0"ÃBù4ÒF{x»¼nç»ÒØ¡VL$c‡m̱CMnñ&Ómb³A²g$ÿFwuŒˆ")5ÒOã‡Oû¸ûxÔä!„ԯ㊛½ž·Z÷F‹®Ö1)ì$P83?ÒÓ1,”pËH)o­®]€³[ø²ËC8枎Hѱ¼sÒ¿M™³9#ÃGRËQ‘¼a’e¨öa"¨­¢È0 s3f>Çõjù-ºfÄDkNûÿýÄnSÅ™!˜“€tT¤ªŸ:ÀPôS³çª=Tdwêñȼô7ÃŒ”{  öÊROû„H2Ìq± †‰%±…›µÜM¢bۙߧ‚‘Û}ÿß6/ps{?‘ŽÛ¦m­íwæ,e{¤‚ãb  Í®'i´í/mB…e$sB¿¾WWz A xÞ9`rñ kµcç#!éYÿãjU}3«™×ËÙW{<$ˆãÃ\¬2@¤–dM‡ãEe_áÉÀðw–M"ÆÇÕðÙ—ÇÂã‘1ùà¸X¹ˆ8ûÀgNÓÚw}Ê222 wn¶ –ç-p}…ý¨ÛJçék®Jã#´÷¢O^=!V~• IÇÅ*a$‚07l¬Wí7fnñ¾5üau1½f¡(ÈI¼}‡‡*ü//ƒÌŸô1@BÛݵŒŠ³§«›QÔ°’A" ä°PUÓú:¢g}áÆbZøßUÖ— &bÈ îØq´Tî{ãÊ’a"Šq±Š—9¾¨&Á/<êÕžzÉ?>ÀQ(Á ˜¬%øÈ õXMf¡gÐÔ­"dò‘Ú{o[#ûÁ騼0˜džH9*’€ÈˆM%¼&\v8UX,%LþÜÕgVøîfo”¿&>ÌÉ!‡…¹£SmïÆöéH~a“¸G-Ä3µòŒÏm¦$P˜@2Qd ©Hà‘<¢ŠÙ]¸ eÙž!XòI‚bÂÕÅ)-í'LU⪞È,8,Ô]ÅY%¶ôæ;Ö-ñª0‚Z$“«ÈÝA‚¦£ lWac¹ZXJ2¥Œ EÔ#¾÷ábEë E$Ðÿ¶»Içê¹”¬3pI•ÇÆ,2m>rM=&‘]wމ#Y;òÍ/³·G¤]ÅÕ'm¨QEh¿C¨ƒøÙ‡·0‰nÞË*Nֲ޲ÁDà%©ç°PB=#äÄ8ÆÕ^Åv¼ ¡p•ý™¶3›@ç‹`ù‹ú”˜A„_’{ %à3bNßzµ]6®óHá+I(ôðé$*„@ÉÊWøZ7Á$ùªêh íØzl6y¤pdÚE˜‚QFŒh›0H•-ÎŽ¿ ëF"D”£"„2òHKpí+·ÄfX2€(¯|èä‹"Tl¯þ¤csµL½4N"l2’Hð†ö%'wŠ©Dh¥_ÁÔS™ø!šÄgŽo=z«¾º62b$›JTó@"-ﵫ¸Û£~ù‘•$ÑÊßi/ñC_&¡[ØËéœ[l‰1N Uˆ®QbµÍÖ%—ÁmŸâ¶’Eˆ(#FD—Ñ‘ :y€X72jdâ©j:IØø ­¶‹pQ!/< -k!3É–VnííäÞí_ß6¾‹œÂ)ÂæOü™¥mºÚ›rÁš°¹IKÍ1™SÇÅ*ðg„è;‡bÙ®ñɬUA2 ;£´²•pÝl,ì%ï;£°òGgvìú*Y-µ±»fÛöO#•šAÙ69C’u¤çìAÞû5¢Þc Ï^Dg›ÍÙN­Åïø‡Ë€íuá‰ÜùÜŒøÑÒ™e«•mSŒQÚêeDI¸9.VÁnôöàõ†*)œ1IÞ›ŒH ½/uŸæ«‰Ì‡£"ݺRR‘lÙß½Zÿ¼ÞËÒ—³¼è8 ¾‹¾ø1-ÏÎ(üÛe\¼„W+Lâœ~ƒÞs»ï%’G4³Ï`_™©QÜ+B'é{뱦ôÑ’ëæ3i“Y屦ìžô±^ìCÍf•êâ&<ŠÇÖÑ®îìªmá¯Éõç2–©¦½ŸƒûضÆÒ ªÞ!ŸD¦ãbUñoD ìbé‚%µÁFõ ± ƒm鮵ù„GÂ-$­ÅŒÏÂ%’ÂSaƒ‰V™?‘lÀ¦>Vxp¾‡°dûJF«D²£"U½÷À¢ù¾= v&޽“!íL6RS3ªøFa>×ÿ8ÔD+¬É U¨ú¿Ú3¼WnŸûˆzõ©$DûÙÈLmcNÌAí'4ÙŠ½$cUòØQ‘$}GHŠÞµÓžg|aÖÉâSŽê×òî8z·Ç°‰®öÕ!^l¦öŸÄ¦ãb 7”Ô»-f{N÷^!©€Ó7ñ\M~G÷þ©Ã$P‚ÓQ‘œFLŠyÈ|›M­¸¼G>;U2IÍæàŒÒJ¶WV±>"$ûK2¤$Þ©îP¦÷Ç ¯k„ª’pçoº™ø£íÆÌÕ‚ŠÖ}R‰2J¦Áq±Š<`¦QžOE»‡WÇbUÞý÷¦}uÓCr„Ü¥ÒvîŽ÷vŠËއnF‰•ÛAS{.éRQßJ攜ÃB ãŒD](®ÛµÜ½DV¯ŠPÏ~ŸúTÙ.?f ùõF¸å IF•dœãb äŒHÓR¬=vaú¡$•Y%cÏâÝjå6|àö̶¬pºðìǧg‰ JæØa¡$ÉF¤‰íŽ¥ÃUX¼U>ûU{ÆýÝ{7T6£ÔYŽ×¤»´•ïù‰%›k2µÖñAb›V#If[wž® þáŸLÝãbUÝò”z»›´mÝ6aQ°úröƒÄÂ’ù%Áç°PUëÀœ>]¯šwŸ¨F@hwBôÇ¡g»ŽvÀ"v–Æf[ ¬̼Y “¡:O¢;ÅóöÀX5ŸMÕ˜ªf_™ªá#® (™²Òº2.VÁg#ŽÅ ‚ewÓUn–Ll@)* Ì,U¶ÕiFvždÎJ>;(,‘›š¨Éq“ø¾ìý*<,‰­v” }ˆŸîXI°³Ï?ú°j]ùÀ«ï$Q¢Ôq±yFÀ‰,(C» «ëÛê`ú[‰OP¦(?‰f™¹’¶—DžãbUþp¢Ñs3÷bš‹PÐÄ«WžkæadÁD°_¼ú`×íâóù¾îmLÊ:´šæ8ø¾šˆ—E€%ׂ1qªîþ7ÕDwÿFe»i+1|þá (ÒØd[Mlî¥Æ-+bb\Iû˰PEvX Ψ07Û¹®wƒs4ÄdôùÎH#íᑨxÒ8F§ð±$bIÔ9,”°ÎH6Ñ·Y¶jßÙëö½’éço³J¢‘ß×qñŽÝ ljK¢Îa¡„tF®iù•§àlŸn Êc’Ñço>ÑH{°îíè=ï“_ˆK¦×q±ª6ü7a<ýðiØ?4$W&“@=»^¦—Û§oh8Z!çÐÌ6Iþä"vpšxL’å/–y'fºMuÛ{ÝCÝ¢3$3Q:JFEª›ü_è]þ›Õ°í ûIê1ºúL¸Ó¤ÉA¶D´š4Zœ!JDÉR‡…*Pj$§hòÿض¸]†Ó_0u™dºz§Úˆ%1¡çd‹a×C oˆ K2Ïa¡D=„óËqÇn={Õe’h7•EÍaØId’èŸ_yÒ>q. :C[2Ñ %‰6ÒMÛÍÚ9Ñö ´\°p^§˜L„€•T pɡFí¡=©hOþP™_f GB£:G¨ÓþÓ¶ÝÀ-_|ê ZN2 %FI(jd¦ðà=Ù>ý¸ŠÂ¼"\5’¾Èõ,.®ÁØØ¨çÒ&¬gÏö|r<Å|ÝG×$5²N¸ey>2Ðsk¶²%Hªà¨‘šz‡ÿŠasûcá‹IdµJÜÎ;#“D;ŽPoÙöêTÔË’¹%ç¸XEê~!N¼B«}ëó(Ä“ 蓵#–DE°Õ~²9åÉKfía¡$kÂi*¤å.$íÕ»âÔº"´_Ê¢?¢É/ÏÖîɶ{I²ßDð%Áç¸X‚'#ŒÄÒ‰q»ÐÜÕ«wE€e¢:žŒ¢óÅÉQ6‰…Ï–N2d$ž+gÙ„"±;¼pY|ìWá^ɸò8ñ!ª?mbêÔ·°‡dÄH89.V‘#ŠDOþB ÊNù÷¿Êi"´ò'›LøC3wìjf›ðÌ3ôÄÇ‘#Óá°P›Œ(¢ë?,ºS?!'xBª|Àd¤‡Æm䉭]÷QRiù«Ð¶#(´$8»È¿m~• H†‰y¥yQ¾‹ÃþÈ¥“/éÊçFæu\¾ÇÄ)’_€zxÌÀÅÚèaâÿîÇÐÆ€òÓ_‰ÙLwv€ßõqX?Û3g6ò•ˆvÕ¸‘Í=Ž‹õâ¼Óû̯ßÀyá%m»$º^à·0?uÌQ¤Y?Ú×°ÛÒö³ý…•$ÓJbÎa¡ Ì¡&z’ l¡á<­ì*ìù¬D¢z‡Œ}Ò§·\Í +¹Œ‹%œ3RM[VÌØ>÷‘ì…·$“Ϩ=ñ. Ôg"”•a¡d÷p)6ØÖŽÛÌ7±p¬Rý¹ãȈÓ†éBï=FTM&BAÉO‡…*–˜Ka‹Å|âÍô×çS‰_E€êˆ3rÈ/Ïù£µaõ¿5[B2¨$᨜g¢e/ÅöY?¾ÇwI&žÅž‹riF%m²¯Ýî!–Œ)™c‡…*DòH3-Çrà[» Þy^™K2ñ|¼$B~9ên¦lGAG!Â)é%Jrì gšÖhàüÂ4©«³Dˆgç›ÿúUÑ Ñå¾X="6ù|ådFI¸9*’°ÍH2¿tˆv;m€UÆl+ØYå@¾û≚ØNs·•NŒÉ ‡…‘£­à̘hápËca)ÉX2,³9bjì(äægœLFŽ˜Û¹欩?Ñ(lŸ¾CšNî|÷‰ïêb_o¯U¡œdªV•hgyr–Î!í++µË¿pá0É ’y{X¨"sGž ÕÆ7Ó—¢N• <óÍD Màl/Þ‚v˜ƒ§úK”RÒ™2.V‘ºß@•šK¥ëÎǵ°ªô|gÄØtl(±Ìêíûoq˜dTIÆ9,”0Έ4Aï'~¡Õ¹“UEg¿–E'~ ÞÆŸ´§’dVí%Ro %¹;²LtY|¸X×Þ‡¯N•Ì; ´'ïÈí±êO«…8½9Û@„P2wŠ$©;rLl¾V:WŸ"X8J„uþ&›‰=¢mÃA0ˆˆCýÂ'éô« ›c¢c~!%X9á®°~êü 6#{´äÕÖ­í >­°kŸ$Ù«@›dÚƒ6Ï´!®óÝßžÌu>l3ÒGË\6{ÇrY å‡Á«_#J²Íq±neÈ:ºlÞÚô9»ö>mD¶Ùy,d‹]„ÓÖ¦ÉED;.îò!Éw‰‘Òk$¤ÆvljãÉÇ&®’DQý&m¸IÛ£j¾ù#ZÝ)¿ùM*< Â( 7‡…óHä¶Ûlàå`f2;‰0ÊúN®™‚Á/~â–ß5€(U¤ud\¬‚G¾é#úÚg>ËÇõ5“¡üƒGFbøåQN€b}tŒú?*G UÐÈȱŒO,.Ú%¾˜NÅJ’ñä02âB ˆœM}³Þõd¢H9,T‘^#yÄÒ‰^ÌûÑì%Q8ùäØH -‡mX’ë~Aš/\ B™cÇÅ*XdD€.Ñuv¦¥æÑO]Ç PÓ2ëòá¾t9údu$ðUí¨a›:`“QN–ó[ÕŽ¶ýæ¼z'È¢gÒ= ;áM#QŸ•Rz¿… D(áé¸XE~@¥Xí/«ã6Œös~“£$ÃÔ?Ði„›§3ó´n+*˜SÕ’(Ñé¸XEÆŽ ÔJÓîy@ C‰°Ôßä4!Î/í»­ó=+$š@ƒ2e‹UÔLÑ0QY6&ê(ÉPµ¿n›ŸÜdÆiÔ³ý=íÕÛM| v.®å $¨ãb1Ã/Ûw¡\·Õ€.ÒÂa’ b(Á—kåÓj-`ó]°eMÔ æÑà‘™ÁxÏALèÐB¶%NÿåðY…j6¼H09.–U"†„¢÷aí³,·¢—­+‚*;UäC_úÊä·n¬Ó*¿I$ˆãb:lT]mg·ANÕp5+{WD˜ý #çƒ jåJ7ío…ÝDX ³í°PDŒÈÐpJÛ§w)ÎÁŠ¿!b"}¾ƒ Ðnïâg)dÓI¤4ª ô84Nj8vÙ!-/ƒ†+Ñ-¥ùû,ŽÓ¥Ë=€H´¹c›8–­òÃ$RÉ?w·U×¶2sÛ>vþ·Ÿmÿ¶sDI«¶ÚS¼Ÿ«†Z9X¢Ë¥gîq±~0Ôˆ9Í\b— ý`sEÝ&B‰PGE*–˜ÈKÑçì/F¬®¾Aª@ ŒÓ*¹Ïs枟a¯^“ÌA P‡…€ˆ©-Tð•Ý‘"®¡ª~= /IBïú~pvá É8”uX(ÙEjŠ=÷‡ÅÅ|ýÇóÕ]’ÀêoŒšP§=WçF9gú°ïA!BCi%JsU¦_š^áþoEá(Ì%­F¹)6‹òµi8ÛSden yýã&•å>—¼†‘Gb~ÛB=äô@á5fÉÝí¸XÖkå¹U¸Ö21Së êï£õÎbáËá‘ëIýý±Ê¼ )Úûw¿m‡kÑꇰúF ÓL+˜6Ú2îÆU¬r±dJŠ:,TQ#4µWÔ͋׹Z 1ÂUŠA'†á¤×â¸X„…’¢ %Ù;0S{g6<¶í+ï¼z…%&`Õî~)ŽŒˆ -ù3”õôÃ/ÕÈ’a(3÷°PBQ#3µ­w¶z™Rxb„«þAQé|û-[|:¨ÄÈ’a(!ê @ÂP#1E¾« óæcÏÄ#PÕ‹L˪{·ñO§MÜ4L8;]+ß°¶(îθĵ-ÂÙˆOáÝØQs9x¨¬&±ÎŽ‹UÀÙÀb½ Ÿ ¶ ÑÙ¶’aíO4›à©áÔ3t[JôÊ[& X™]ÇÅ*Òk$±˜y±±ue^8–±ò­DZûd×P±ñæû1¯—ï8Õj"•éu\,Á¨š~yhÈܺâ[®Ú/hÕ‰p'ºç'þÄ'÷ÙÊÙp"H”V•q±¦FtjY¶e¤YÖ˒ʽ’ñª‡F? *“ûà ð¿³¬í¹¦ðœd&Jš:,TS#<Å>ìâ·nß•Zlá`É€5TȉtXÙb+˜Ÿf²<£3é%ù ØQ‹ÍÈ$.Å¥ùݨf¡­Ì´ÃB‰6BYô÷OtëÚ>—çHecLæ¶¿)m©¶Ëû¬üC÷Å‹#õ²l¥ fX¨‚ÒF&k”öò™Óåk™øb„Û>”6rT[Çíßo÷p:.W¡ÔÏ"¬•”v\¬‚Ò&k¹vÙ9¶ Õ,× Kæ¶;’œ@*N΂]—Ž“Äz’Y+!í¨H’d#EŤ͋»Áétί!­¿“l‚”è{ÇæËÎââ¸t1ž(Ç$Jhâ–dWw L'l.–ÄD#è1æ¶xBÅ_ÑÏzÖƒû‚–OiµJè§gX« }bh±!ëªY¶³¥fT¤ª„úå\Çiç‰3…/F0kUݱ§¥Åe&ÊÍÙ§Z&7‹Q2Õa¡ ¤ª=½ËLuÏŸTuÅdÂêaÆk6}yäè¨Çz°ÛNëÃŽzõµd&JM~\¬"×Fxj8ɦ²+°õó²EFðê½Ä$2iï­T>%‚çcV–L/É=ÇÅ™7ˆºöH™Àe•CìÕ#º¯«„—Z®žN°¶óã;©®aªE®îv•È8Ñ8½9Ê;¼ UM'ÂA™¬ÇŲ­´m^í³X ¼Ô6_æÅµ ±Ð ¯þ•LTÿà§‘qÚcúAƇÒê}b:J€:.VAP#/E§ËA¼ ¼Äê_ÉD5Éó?H«yФ2‡çår.HeŸ^PÙˆNm[·Ë@ÉJ¤\Y2]%—ªH×/bЇ÷ãÅðìÇh©!F¨ªë[p¾ðÓV.Ž mÿœäs°jcQÊ”=.VA'#‹Dñ­f{ZéÑPOL敌ў#‡Þª@QZ'B‰'ÇůJÄ‘&ìm¼·—½½.ì‰{%#Ë?¼*‘!¢E}e){õÞa1œf¤UeX¨Én>\ŠÊ«eƒ ¸ WafVUçJVgË܉"Â^5á¬Çùóñ#â ÃI"D”ÃBi6IwȶE¤]‡«ëŠ@Ë®I¿Pâ}lB€}Ø„-0›?mj8È”8,Ô­%vŠ'‡1}¼‘¿ÓÔÈN1Eô ®¥Þ3¢ô¢‘“j[¸ÓÇ?%ÿŠEþÁ«íðá6ÃVò|þíBbœìÏÙíHÑÅÝ$â8‰®”;ÉŽ UâÎD$ Q~°¿ŸÛÛõF 'Â,éT¨X Ø„hÛ|»M~Ücå\ÉðóY "Ä̺ý©Ž"Nü&Xr!I@gäšpàL³ö•w΋QãJFŸÝ¥Rq$Zôç4p]Í&‚, ;ÇÅÚÙæ—&”ëäU˜|ÄIv®düY¬Tˆ’ôçjÁ2¾¹_í&‚-‰;‡…àðæ—§:ÙÄœËÊxúbÅ·"ô7–LàÐRçµa,ëñáÍ…%DØ"·‰ÃB¹:2H¶Î:^}te œ²¢’îùˆàВׂ”Ô²?e߆‚E"É1q á#šæ?ö݆Åü!p2b˜]¬j±wåéÄi ÃDè`s0²ÙáþÝnðD!}kŒçÁþx ™bRIÆ9.ÖcK²tDåžvºäSz\I&l¤¸8p6û”ÊÔJ-c·;o,ÇîÅë­’`j¿ù¯jnZšµ<â'Ürhcá2É”èt\,IÙ”¢7S^ÛeðýkaY˜úSÕÏtÍù¨ Ë-k‰Õ$P¦ìQ‘rÆNÛC¼[íLðNž•ŒQ¸•Lÿøé£¶âelÕ ÇíZB9}E\?eÛJt¶Ô‹ƒÓ7áE×1†1¹¯Dœ&@INÇÅ*Ði¥˜ €ÃÏ7Ó4az.l+¦¥OˆxÓ¬%¶ ·¿Ýtv"f“Œ@ÉNÇÅ*àiD¥Àÿ’ Fžªq%ãÔ‡žF¾‰7[K¦kò…Ý$3PÒÓq±tAL°ôK(„ïí;ã»"<Õcï]èêæ›pš¸isO!lœ®¸©×D((ñé°PÂO#-µLk—¶>D¹p­Pí‰vù×??›?Yp~IÜÍÄk‹áÄ#;M”‚Ò£2,TáQ‰°³ÖVÈcmÝðD ®ªÅþ¸“Î@#±®×hUº(,!‰X’tŽŠTÎ6±¢ƒ"á*Ì4C©·$ÃÏߨ3ÑÈgþäaSºØ – ,™GE*’á k¢ÙÃEÁqà4dc‰¢Ïú)K(ÒÖÝ–°&ÓµÏÝžÕ"¸’iq\¬t¬iY‘gwµ‹À£ —I&Ÿ÷bH$ü;QÛ$pz±¸AV2 %y0’H Ûòz ¬~îRá,É´òI¤‡hçÚl=ÝÎ˳D#Ñä°PÅò7‡´µöó±LŠ«ÀPøJ2«üMFxhOÖv°ËìZÝý*Æ á‹$“ÃB‰0`H´Ñã1ÛìPuŠQjɨ2ºiö j¾£¯øk;͂׺;Ç$ƒÍo4顭埙‡f’ˆÑаržžÍ»òéÙ"x‘f’q±ÄL•W[¶wÓS°ºrL¸K’4K0âLh„»­hÛu~(<æAž¿ÓudèŸß¡·ÛpU·zˆ$cJòÍQ‘Ú‡·Ý>û@Ô€3-]ÛCçW5j6–ðü7„Ýé Uìº|²ºA„R2[‹UΈ3á.Å·k×Ákbu–ñüƒoiÙsyPZð©qC(%ñæ¨HÝŒ,óë3Õ×H¼Ä2í•ÙOð/cƒûOºY&zñ1ïî2« á¦zKît¸¹Z!ÀÏkÚ<[iöDÖÜí°Ï‰ùTÍ Tz¶«`‡¢ƒøÀ—¾ú¢\K'&¯ÒNMÿ>Ã;º—^f¥“» #ñïc.º}éÏ#7tË.ŽŠ3±þñ˜[$”J¨d`€ìÔÅíR¢z­Ü*™Sþö¦$pØ;ÞY ³ï,L„,’I Ô‡Íòª}úF0Ê­èpݱÏé-¼*"Ém×·7tÜ2Í|¸X«ÁDØ"­)ãbÝY+a"Ì–C!ËàüGÁ³ ‘w“Z•´Ü´a¾|aÜú8±Àdþùí @òë‡òAk8(«m%3K2£"•ýío²‰÷ÃÊÖvV·©«SEè'ƒO“-ˆü<½×ÙNï!šZ¯MËD4e+ËÁŒ«6“·åV3F…*Él৆S[Yè·¬ ¥.“ŒXiN¦êÂ(¬Oç;y›˜ §Jµ÷F N C@½iƒ]( «»$3PÒÓQ‘¦Ó-€émv¦u­n¡.PTÄU?V•ÈIM=æÿPv›˜˜_„¤úÝ©úú#ÚD“c>[½(ŠcEð'וq±œFLú¥‘Î9órîA©?õü 71/bã4ÑŸ>Y˜V2%9JÌ.”¢^ƒ0ÑQx_2IýÍ6}D½Sxn1Nšª²_E%sÿ°PU÷}à˜h¿_ø‚œS?ó^­/™uV•V—-N¾íX¼Q-™V”NÒë2(PÕ{æ—ØÝŸ_}(o¶¿ã|ÒjdŽØjóX·é3»&¢æá’L¬ãbD3ðKSEÛ’„êÚÔkd=q”âôÐ+Ô~òè¼ M=WàgïTQˆÀIHÆÅ’< ¦mŠy\» §ÓëÂQ’AçX3¢G{¶ZYúÀëGV>Œ'É5Çźëë¤|Ù6¢±m©Î§¾N"ÆZÎ,-ì0Š]mN¼4ÒQôôÏÿñúrk˜¬*O£ÒÄ3m%o›¢ .†£;>Ä"Ì“î‘q±Šᨭã-‰Ù4諟Ý+’ý$P“½Œ‡ÅÚŒnJYÑpöØËÖ º^˼þÑnæ·­êçœÔ|$¶›ÝÜ*Á&|q–K%„ªêçØýü0m6€{L1©(šÍ™¬ *µ«ÌmÏþi]€b2šÊ˰PE.Ô£¢OŠÜÇæ¨_í*ÌzèóðgÀ%ÓÈM!™Rß±þd«˜L2[%”J lD°˜[z ]}ЬÚU„ÒþÎâ ›Ú3f£_‘yOJ¢j2´J&;,TÁd#ƒõ—Ý&QÙe˜}šØU2§ýMe8EѰs×Ò×I©¸L2\¥;eX¨ª?@Xtâï,eì-à)RbXP[ÙSœŸȉ–¨Ù'&“ŒA‰O*èid¥èÂßq Ñu̾w,+§:LZìà§±/èñ‘oÂݼÙ¹_íÅå¡°™dJ=~\¬E?ižN7½$®®Ý޽!Š\½3ƒ5¹‘:f»¼“W^â…ɘö²›¢+òvä£ÏƒVKF«„²ãbI·hD§7ºñ2ôã/ 3L¦«¤Ù<ÑŒ|²¨ŸgN#RÿŠ0QfÙQ‘ìHtôŠ^÷PóHNmSfÇ.¼ %y1Ã$ºúK´óË<È} Uá_"J–:.VÕ“È)zòTö;÷•&±Õ„¹ W_rêM_/ÎÕªg–´‡÷äðU̲½Ï¥õK3 hÕúaë’:X2heæ©êÈsS{x[ÆÆêÕ¾&­”ê„¶êr<Öoûô‚6ʨ–lwzöº?#&‘ZVýãÿ»&oC>Ÿµï«ÏRgŒ²V® ãb”62YËÞ-/ :Üûn¯pÙdn{çï„<퉜1ø÷9~¤°Æ%OKL5Qâµrc|ŸÐO¬›M#ž‹0îKãÜÚs÷+›`œ]ðýÃã—½ÑÈ|õïnÖQûŽ0àøÕïÔ¡,Æl<Í£%ÅÝç$ÇMæ¶4ꌉÓþoÈMöÙG†Ï^ζJ>¶ïï65îdþ›ž ç O­<9ñðlÒž çØÅoÿøµÓD6ÝJA‘#æ…EmñUÅñNåâÉ(˜y\¬bÍ p®`Ô•›­ý»Ï7Ȇ À¿qob²¾Å×Ù||©úx2¶¥ÿgT¤j A€»C}¼] yêÜüë« ~Þ°7òX ;œ¿›Z9‘І›Äl©™Œ‹U,ž¢‡úBwäµ/,fÕº#|õYòÄX½Ëï ël…1&cQ.£" ù œÓ$Îχ²Øîw›ÌBýZVgQD6‰Îü‰ÛŒ¹o<²/Fè%¹ç¨H5šLôÐîü—7VXžÕªÎ!ŒLWãb œ|±Hôá¯il¦S_¬*‚+£bl“\O›]ç‡Öf ù%“X·Kû "ÿ„›°$·NÚþQªnb[Éä’ÈsT$þqüì­ýozk¿e‚í9¾Aü/þž™J§Ü‘™pŒæÄsÔÈ%=$ƒÀ3âMI—ëc–éÉÖ’H@Þ‰¤o‡!aM!¡ÚAZw U(änZî[O®óVøYâÙ’ùgýj—÷AãG¢îø£6vî\+çF&–dÃB êŒdÓ*ÁV?ñ®Ÿ^ì‹Dàga~^)'iOU»Ø+ßÒ祉qC€%-ãbLò!Ø]yù¼^ìdHB”Ɉ },NY}Éù#êÜÈX‘@r\,M ?¢}BóÄeÇ#በDe!b9:LxÏ€ßéîó¶fÍ<‰&»7Ò÷1.VÕÏP¡#uì&ûøÌÊ ’qâ #ÒÃŽ•ÚÔÞ,?A-û±öªÈ†~yªÂöá×v¯G¶ƒd‚Ø­þtY0 =´ #¡¶Ÿ,~~‡º7ûŽ‹u>fÀÍ·Ž'_ø®XD(M?‹—˜:–%aG˱={l«ŸšZ8Lšü™c3+ìvjÖwfj QžH;ɸX“Žàfà¬k[¼ÊŠN“`ÈH m5·ùä¨ >Ýš!ö ¡‰äãbU}ò;úbΫà£3++HF“¿AdÄ…&è™Ha‰pñ©Èê´ÈD‘Aª.ù€Ñ(q›bCŸ½Q>96M:ŒÁvƒŸŸn°Ð·5íßM4£S8-'2㎊TàÂAbfs{m<8±²l$~x¯2 è¡ó÷â±ßí=åàSµVôcŽK’a€ƒ§ÉÑÉ×úq'Ú4 þN†‘éÙƒ5‘·˜PσºÕ[!ܼpX¨âäˆm[s¹îÒ®4w5bÓ‚ñ6eK¨=q’€÷ oO\ÅÛߺºMÞ]Ø¿D&Tèo>8çÕŸõ²!8‘Iv\¬ªÿ…Ñ‚¿ü‡‡ìô†c5™,²N #Õ³Ì8•£ØôÎãdÚÈàÄpT¤‚F@ˆVùÝ_µÓÐÕþ!‘ÁgxàæÇ!zÐ.Œ{´š†U˜r ˆŽ]µð—dXTˆÎæ=CåDÝäœú¹ â ÂF?ɸX›‹(Ζªód~Zïãô’ÃD`]ÜêÅv¹_7]Âè{½¸³³/Öòçöy™"ÿï%¤ïÐÄk7ã9uIZ„ô}å{Ò§Odi½’Ò?÷NòÁ}¶‘4 ôaýMgUa8ÉH,qX¨BLèÐÞŒ•B»i\ ÷J‹¾îàÕàçécHîó’|·ƒ¬íäÉ[|! :Lë¡ë‘¥cM_L‡€J÷yòX†é½÷¢cçü1U!1I´ToþR,<\·°®·|(eD‰˜.ä…X{§üˆE±›n$§K¤ê L¿›û€éʹ’ÅkꨑqšŽjÔË‚Û)>îE\&™ƒþò”$0é­ÿûÆ%•ýâêIð’ÐsT¤Ð3bI{’>˜A‡LÍ–AuZº$ô«jÉœ-ù+oõâÉ©²m$Zy4¸ FŽhï!›Œ˜ýwöMipZidZ¨HªG¢ÿDÂ\>ýäìØbùƒüEÎgwÎß×ÒO0WDBû‹xSà6Œµ˜ŸIj^HìoX(ýÒ õyo¾³wHNˆL=ôü¯ÜýÞo:‡íÓJZ8ñü¤u1/Ácê ‹ ó6ïþ›¹4Wî…ŒÙèÆÅßCrVñÛ.Ën­Žb„ˆÀî7ŸKÍÖ‚ŠÎñ‘µÙf þ„a¡ :Xœwdƒ!ØÞÝ/Ù±qÝp.â3óðì’ÃnÏÁFjudÄF87.V‘¬"ŠC_öIU¸í‘ 1.®{à\hØkq‘š–Ý:UÇ@6“ãbx.Â8KXóâ_|u0¯þ… ì†{¿B1üÎæ6”z¿rïYŽ‹†õkð¢ùÂ8wûßûPɦsÙ§kp‘Ë¡C{qùðm¬øÝÑ1,”T•ÅÁ {â,ù«÷Õ©%Bh]QY¬«Ãä€Ïìym×øHödQ3¡¾k>÷kÙ}gîÖÒ#ü<°½ùÃÔ-:fÂ#Ü%n刉ôÁ¹ÙúßRöé±Õu!4ûŒq±Š!RC¼½^/3A”8„+þAë3‡Å¾Ruž\UÏ…ò@rÄa¡ªNïÀ Ò0 ·«0ù)ºbßP¶èO/¼«üü,Ÿ»ÛûÁ}èÐF{Ãn yº³ ##A.£"‹C‡00x½n/-Á‰š92\Œ¯p|aá·=ýï9LLo,¶&Ÿÿ>T¸|êe„ˆPóù7yÚ94QŽ‹%ˆ2I[&ø7c3>gSB-ÿX"GD»÷¿ÔæpRÕÛ…²F® ãb' &iK ÍfR·7Š«ƒ#sË¢êì81?¯iŽ‹ ãÅ]˜8/ 'Ž‹õ¬7VB¯ÿô²#®@Ïz³ °„#Þ3Š+P=j0bIô¤¯|Šf¯‚ sˆ ËBàéI<ÂDÍì@¢õÊMM™72‡ŠTu¥,‰®ôf–_>Ÿ®r‡drÉØËdU}öî©@%­$§åÝÆS\®¹Œ!{ÒÚEáÝë@x'óÉ™=Þ ~Z=FK”>Éeÿ×ÊÞ•1 JF˜n`) €nV»´_qŸÁ—âðã΢óàG}ħ_Žv“ûÌ21Êa}xj$ž¦Xbtµ™Ø…»%SQòÔq±dщôÔ”C;yßzó‰À…U&Ö?|1NzñŒuâpa^ý-`Ò3,ÎƇ¯7ñM´·u_¸™û¤xñÉ eL,Zúǧù¨ù^Æ„¢<÷0“'<ÞÛ_­Ž xÚk¾°§Û˜$ ¨Z[2%Mª€©Új3]ê‰ñµ#>™HW‹ÌÙdâ‡V5}x̶Õô\ÄÚ"ˆ‘¦˜a¡t×”P$Þ¥œ}SŽ$ŸŒàJ§“ÎÏ›NF~ü±rÇÄAϯÉ~¢ŒñÞ1 ŠUPÄ 1yÆÍ·“ÙTÕÈ’±¢8°¹=|üX²t¼ ‡ûzÝ–ÏLòHí7­ãÆ%ðäµ´`$šJ¼0B÷½˜:æÉßZuÇ`ìe\qžD„~h/ß1óèóñ’EÍ'Âi[ꮇݶҰ}µ.{¿Ç&ºm‹\«»Ég?¼P2¤££Êa³:xhï±¹”¢ÆŽD•ãbUíäL¢ŸœˆEs-T‹ÀË?Xe ŠÎ8SK·s]˜ ÅË‘©#aå¨H­ pÒ²ì4q!Ÿ\à)L!`þÈ[N¢»öÄxж™%P?Dæ—®ŒDѪY;¢Ô®J[þé T ƒPGòÊq±D¦Ž¢´÷éáÍ™}'Zø!²pý…ðiÙ«½æ€i­`å‰ âŽ6ê¡Ëð,!ÅB”j?q[œš„i† UÃЄ+íÑb’FàUVWƒ M&™q±îvÛO|0¨¤f…â^`MÇóÞ2 ³PR(ŠF¥7RµÕÑ$Y¤­Ueá¹ìúg(ôËóïf¿Ëõ5K$ZJÌ:*RÝþ‚ªðÛìôPL* ßEâ®á–MÖti¾@ÛÀÐ{‚/ÐÎ{9IkyË’S0 ´‹Ávo-Ì¥½Í~3mcPñN8ÅÉÝÞI;x ªÆš>Žƒ¦µ›wÄ&"äõɳ‘„bk‡o×®ƒŸoS¸;„–2ÏŽ‹UdÃHUMÝÙ!Ï·/¾ø˜TµŠdðšcOý0‰ .ÑþµRµ>|į<Ù$§T¨3W´»[±w¶ÔÞat…Œ‹U´m¿%ú¶g/kä¤1$ÚD2ÃìÚtÁ#©ûr¦63k7¨½Chžû ‡Åªú¶ßÔÏž2Ú—´ÆB$õŠd2Xq@w†DV‡ûØWv˜Šeõw(Ï£3d\¬‚FíËrãvwpT^‘Ì={£ƒùô>fî®C÷µMÚý·ÛtX6_‹ßCˆ7öÃB,0¢?“. 3Þ¬¾æ>^­#‚=ø¹:NèR€l¨@nw{ƒÿW93„Ñà u/Ši ü²áâã=‰ËËv—À.a^ÿžî íãNÐjðíð {ú¦¾0dRøŒä,{1N€¼È¥GJ÷ÈÇÅR.˜0 çEÛL0Þ©äNø› &ng+ð±2Úô]®À⣶ÇÌ;.–fÞë°£êh—a÷:D=™çýAï"aÈü £í¤(®œj¥ÈŽønX¨ª™ûMëìI;Ö~ðPeC†â<‡wx쳟PPÕy­fæúÞ‹,AÆy ¯ úÍn8Š8ë¯V¦H+´?^½pSÄŽÆ^~ÚÁ•”€ß€ %+KÚº5 r~$çúï˜@·T—@¥»žñ¨w³‘KuT¢9÷"„k¿xôo/ì.’:à ¸ÏÚ³°ù·°¦$š÷°»H×Ї>Ùf¨q¡í_lBàÈîÆÅz ÞØnàÄ6߈D ü¼ÛòŠÂóØ\vx97$@Œ{ĪÒ.æêã¨Ô™"ð÷z“°:Ü}—|Ï,Ü$í Ž‹esåÿáÃÓ@|ÅÑjÇ?lF…üyñ¥dLX¸Nï"`ó†@»©¶¤½ZŠFap´ ŒŠTE"©³b©Ýß /Öl‹øÿ*ïH¦y…òu^ðl8sàäÌOä¦~ áH*è]duèi¿PpŸÉ§ò‰yDpž¯60ÀòóäȶD×`ѳùF{[Šï«y"ó7fîQ‘^µm¨dQ²~è+0YîIݹ”Å«¸@ ·ÿÊáf1_žk<6 „Oæh í˜Ùr˜ Ã~eµN~câJ¸]¤t–`ùngâU¹02Éûmºˆh Û‘'}¦>/D¬‚ßÈ톅ʰË› ˆÙ&Äu ƒ ÀÜv†;*’©—þ…9@ q׸X"iFÓ»ß̶nk MñˆÆù¤El†æ§‰züå]¹ê4ÈdÍCoó¿ûóî±~hÚ¢÷{ÀÉÃ_³5@€3â˜8?8Z$]xªv»âÈXlÃ)l™†‘£‹¥-3TÙ3„Ëó>SD ®Eìmz”Ñ ;‡ûŸÂì}nl±§Ü×ú·ê%ˆf«î‡÷j^¦Þo-Öm4Œ‹U ºHä¼{ÛÔÅÓÎt㊠2µ«ß³D¹0Z[ ~räÁî…„ùs‘ºB#–²Ô3Û˜žÝÔco_VôžÉwÄ£"Ä+ò-4dìX¼Ïv*/ ñ3óàØÀuGzDRhB¦|m»vfEZ?&ΣĽ?zfûª¶@>/WÜø£9 MYX×ÛôÎ\Þö˜hz/{x‘[Ø¢©_ˆYÔ¸Xz¾i"O_f”ÿÞ'Ú‰- Ò©?XT¤E0Çñèìy9H) –/D‰,j\¬ª8't~¶î„c@¦S¿YTÂE¶º`#9îAY ó)Ñ0.–^ˆ7|õÄæ gmÑ¢”MʧxÅ8ƒõgØ“e°Ž*,‘>•/ Ó‚§{‡±€Ÿ×êö‚Àv°bÂu½ŸíäD1 þaª©Hµç`ï‚ËÑ.ÂBBRØ ùÜurb0V×s/ÄdA;ÛÓÐO0,ÔkOvpöÆZÕm8ÛöþublÞÁÙ4ml•Øö»[Z܃Áð›]3uŽÊb‘OQ~ö$Œ½{/}§©¶Å4Lâãbµ%nÇÈîý&½á½±>LUíBp÷]8 2ôùƒñcwdž®`»žT¾€ ky†…* ‘é ÝÃí:øBYX „ûüÆ< ÅXæ5°‰<ëuHa \CKÁ¸X…豎=mëj¥ÌÙ'øˆË aŸl)°|ç½µÄ =@æ­âîV+áì Ö¸‹kL —9Á‡[ï»b›ì Á€:®Ý¾1ɯðòD¨½þ° Èf G&*±ðó`žˆb¾t„sœÕÒ…4Áì‚kÈyÆÅ’D±ÆòaÉÞLËØ¼¯%³uA?ôcž®Âö/÷GAy¸’ôq±êVª„h¾ªŽ¢Ó­€‰Zg‚Cô3"JÕ¤šT±ÔnFT¼'UÑwæ@õñ>ÕfŒO/j+5)ô)±Îð†ÔgX¨ªO50<'ëys 8ØÎì[8Ðê eu¦ümÊ•uÅ¿=Ø' ËYVr$+gñ)±Mø†ØgT¤G†ú@dÞ?6惪(L=2T{ý/÷'òÀ$LU\‚<è5°ÀìÃBÕy+Á{NÏhzï> ´à…‹¥=Ÿó 3ÛU·;æÃ- 8.(¨_‡Wßg‡4£ ö^™sÛíþ̽öŽäYP !͸Xuïç Éxïçn?ÙÝò\`lÁ6? MD2vý ¯“{½ÿUÀY°Ÿ0¢}ºzHŠ-Ãm×hNÂO?z¢ ÆB[˜†…Ô¥-$:eXŸá’ásV¿~dˆ{Ðå±cÒÄÙ›ê ø,H¨?Ÿ÷g{ ÌDZc»’χý‰­xÚ 9D‡hP RïËXÅ@ ½È †©ÐXÐ qó¸XŠ¢Á,–‹é¹¯}g¯:cœßÜ&‘[‡ì8« kå»_˜t†…ªz7£ñæM¼Ð¨X8ŽS¶7édÛ¸…Û}D^†3 qö¶¿õnûK¤•>2V zùU²Í4ÄÙÇ3ø¼ö>¦/ ôCbÛ´›I’Á2.΃’Ü @wq¸šÒ´ü{ÉÞщôøŽì0ø‹ŒcwÄœHµ"—ð ÀÁÄÕÖf*°»\h¶ –¨Dæ®Ê‰Æ´³¯çÊž3!Pêˆü°Îsá±y[ª€be |øÇÅzÔX«-0éfòž‹¤Ï>jl?wÆ0êÙÕØ—>Ë{€¨Ú§gÁøÆØ?°âo6`Üç« ÌUT˜EµÒ¶‘ñú¾#ñ){ð6X„àëz|7MIÄÃþS« ±¹ùøB h[¨¡ø¸XUƒf 'hÐ\¸íìƒ*Lž ËT<ló0Åÿ˜ÎÝ÷;¶•ŠŠ‹UPñ@OУéúžPžøŠ¯ ØñóÆ)z å.¶—ÝMñeš(t#ž½G…*ÐG˜ÅºñõÛúx»‚egò 'ðÎ/Ì&÷ð"Ð0ˆ>ÆÅôAìì3_Š­O¢Sê,0äwOp­y¢xNxŽgzAî1(ÐTuõ‚vëÛ~-(6bƒþ0¡¸`þ Sõ÷Èþ¾¯ß¶z‡ƒ²`!¿±G"˜ßLsƒ)✸/øVàÁï°PUƒ_€OïÚ¹-½¿/ƒ`Á Q¤6#‘Õ{çá[Ú‘š'мúL˜¢Lˆi#‡ËGKþ.F™¿éS4»æ“¨¹ž~äUÙ_’úJáa9¼:K"ûI$ð²õc®” zèOD4ög¬þŠLŽëã Å!þJ8M¤2¦ÙÍØù¥9D· ™ÜtNó¯Üm~¤˜ÞÍ;|˜LçÇ[ ÃØBL3.Öž‰Ç—í÷ËÅœ5߇l?$WˆYʘ8G‰Ô>.¶ œë‡ÛÖ‚ YùÍQéÀxaŸ¬sõ –‚\¡!DÀãbUš ó²Êf›Ùƒcµ• g²òƒ£Djb‰ %m¼öv ¼Ÿ×˜±p&+OáËÝ“­T°”äfÂÀ0"JÑöD3Öëb¢^#ÜV0EÍÃa–ОÀAïªó22|[.KŒ‹Uk„‰`ìóÆñýþš*ÍhPbP mÎ üÍy3ܦµsÉ/Èjf‘èMt‘x‡'ì¶Û R¦4T¸9ê¸XuÝ‹?|9£ê ÉŠ\ „¬ £øñþGþ`DÂþNK%k VÕÌ(‚ø§Íw›·q_<Ü´SÏ91Ïvã|Òéd‰ñA‹w2Òè“-àûÇÊA·…&–)À‚¤cX¨‚GDú` gÚ±»^}ºbQázï²öÔGòed€®5UA5c w¤¯p¤ãÓn›ßÞèà{ÛM"’*ðh ¶FEªÅ´,Ü ·KO¿YÁY³àó#E‰xâY.÷ 5ÓŠ»FLøˆd¡ZÝ.ÃÆ¶7Á¡‚˜·ÆÅz±ž -ª¶lî@ôç{P“ѵuR® ø§ûÙ^ÕDz{c]àöJl(fÐ@ANI«° G~ÃbÙ7Çá‡ËM'⫃ÆÜr9|'ªÔVx…?µFˆB?¼B§hÞxbkZ3 Eªê¬ Èuô|œËî®~¥¶‚"Z𡵠H$ÿŽºÏ£ò| Á±Ý¦ø—îc¿™Ä|{ Ú·¶‰zö¢Ÿl{Ë0XWø/[ùiM–̃A„Gïî„S)×öpù14³ŠNï‰pT¨‡x¾åkáÎMâò\ÊŒ·0 ƒFÜó=_]m¤_ŽÊqqמòÙ IxmTö!o,ÐlÚ(S嬢þ“ÐŽ‹u_Î$cbÉ>\€Ú)(øM2&Ï'VõÓÄý.èûLˆž_Êþ—ó@ñ²î×þl&x«â?à˜8?AT÷íÙ'*Xóìþ …•BÈ ÆÅªÚÆ*°ÕÈG(œ6 Ä‘ÌàSpB¯á‹¶± ï™\9š|:úÁ«Ì €ð`X¨rT€ž±;u7_ gb ?ÐaTÈ-L< òìÃg ˜(*úƒƒ¬ífxº…Ö>kHøŸHß$‡ÃB 8Œ ¹åÕÓeØç\A‰¢¢'GÅa5éass¶O×÷^‹—£bãºwy‹xöXôÜÒ/ õrtÎ\Üø›_“DEP'(½S>*+©åè¤^m/€Æ ŠÊƒ¢ÎÌ2&N¡ÃGÕXtèŸóឯRfeþ>*åÆwXÛ$Þ.šÈ¢ˆé”ᇅª:±‚ænÙN^ 2¤YþGf‰’»i¨^¥œ­8˜G”ÏeYþQá¹øƒÊÚ7^¸T &Š:eø!a >È ­}Ûí?º·…ˉ2ß)Üöïþ|wK=J9ZœN@Ú£›¦‰˜N 7,Ô ÷’Êí®Ÿ»·‹ì¦‰œN 7(PVá“æŽŒÛßî•ý*¸œèò¿Uø¤“ûÈLN>õJµ`i¢¥óõ«êf š;º™Vìõ­êpæ&\.ëò±ÈŽ%õ—''öìœa }.©Qd»I¾ŸÑðv,hþnïÌÑEíhx6– ^ò¡} ÚêÝPbÞ‰bm’g[ 9ËY9¢p„;a%Írèþ1|ÉÉž‘&Mžbþ0E ²ý—¾nûmÓ}X&‰IØ÷Àëúïþ¤%' íè@â4’ù& ÿDŒ'6ë6ŒB»Ýø *‹ŸùŸ(ñĆƒM:Ã.ÊõöH_k)d»¥ D‘ô+Ÿ%[ÙQ£k³%••oWÿDˆ'8«HYQ°÷V&Û«ŸÖÉSë%ЍO ? öè\2]¦…:úÌU‡YÓçß¹c¹¶Ïç™—^ïkl1V' ª9Фïþm®æ>„ ªì_ÎcÚ—»µkqL¦@?â™a†…*tö¨ªÃí{ói÷iuExRöq0´-ÞÆó&¢¸ýHÙËÊ%»e…“IQÜö…Š }ºæÕv42í¶aXÄJE‘§–?.V©¹gUÜѶxb'º~þóg8ƒDÎù: U«Ÿu @Ø™ÒÚ’Ê.é‚If ¾'°WW”+îIǼ‚Åþ#(+3¹¡Ä,œ“@ UuE}]QHà›%6î3•IŠÏû•p $¯Ù»HŠTîøq6•Ÿ—÷]éXÏfœì&5óå|YÝfvITœ¨ÙØ2óùðqé>‹Šf]§Î[I·¼eÇÑâ>¹\Ã,–ÇÊ8ÖÁ˜r{Úû Ñe®rŒ7‘çG›9Å}µÞDqŸ)ƒ‚Âź fÝœŠû°Pv“`@Àç-Œ$‡9ã@!nèñ©ÊMm|X¨ûæD²‚šeµ}ÞçóeïIdÅG].ë‹ þßÕ=éâðÎà ¯–FŸÈ¡pO´sbÁq± Õ=jìh.šàA=>W/°•f> ¤ÖÝþé“Ù*K¦/Ôk®×´‘ ™ ¾p̶`OÏñÁ‘šÿ+H¡ˆä?Þý¨€cüÓ‰{~˜îM‘Q@™¨ä&ukï³Ä¹èëÊ` ¸%Ú6Eñq±j|•t`´ªì$´^O¢aQA*¦Â<&Î9ÊÀÈÎ_÷ií¦3P"Sd«ê} ²²W ÓjÒÊ·´Z"=WB³÷¾D-“ ™æ&{»Ø±"*ëÅÄWÃBBs”•¿ìo2æq´ÒêãÍ/‰h%áù‡ÌEå/ÏàÙNFæEZ"ûñoÁÚÒ g`uqǹˆx"jÇ'7<§ðß_@y=gÓãÉÏ©ýæå]}5Ø ¨??Ï‘ºvýå¹Å;5º}Û“IQšèÛ”®†…*„ñ(ƒ[’m剙gT<£\‰\RÊõE ÆÄ€>ä-å‰ûâöo»7ŒÓ½S¢¨;”’ràÃ]PÛiÎŽÝ2šu›ºø¸X·„“Ô4Ü“GïîË[aÈjš7ÛLÞH°w0PêíAÇûv¢ Ì¬ÖØ#)óÕœòí¨H…Ú_Kì<øýìÖoTÛ Šþ^¨í½ß' â¶ÿØðÔY]Á…¥€~"šSn«êø ê:2v5›…Üy•ĬÀ;yÔux‡/üöYœ—é¡ð²Ð¬°Ïÿñ\²H¶^kçZ»üÌÁD¶ø¸n(^øÙR–c·Ö´7[±oÖNË„%°/ËñN E*ra”Öá Æ(vmOïëQl(òû]`GÝÚ.—éwýBÿ¯bp¢l3K ”ñ…©$ŠÖöHís iyÔ03]ÛQШPp®0h»œü÷Ñö‰Ùt»ü€½¤àVЏS»¨YcádOô$_ô¾‰®Í<2.Ö}Ry‚úoùƒ‰Öà9JÕÉ—§Ÿ'W”›4KÿMPÕ± ò¨‹ãÜoö—ù (ïd"Qÿ¶G`¶yVkëø/ñÁ,ÿК¢´6‚D…ºr ˆ 7‘Ÿ¨¶F%ýËyëεS4!m¢¶ß¯zR¿íÙãhÂgN»b,ÈÉ¿†…z RdH_¶'^x«Î·U!3${xÛ[‰'jóÈÿ÷‹«E]Ý´Õ¶´£Ø¦>0ó1ÑÞù UëöIYGŸÒLÅ⺸?/0–¨ï`ãbUm'A¥GÛ Ÿ8¬µo¡1òyM£Ðm×tƒžºÕg"ì*‹á?´ï¨tCZœ,îETq«,…Ç-ļðP°¶ìú‚7Ï‚S<,ßãžâÔIòö*g·¬Ü®-ù¿",‘Å ¿ÆÅz2|ÈçØg^XâûÿÂ1[~ßVŠéûùœòpKé(€I(ÌmýY9ç‰A-ÃúaÝom=`ÛÍl/çþ1“ªÀ¨$<º·}pÍPŽJÂ_ý%AEG?ÀÂwÛš*Øe&O”vÀq±j>ÊèXIfLв·}#·’'R;W¥a¡ª^“·"o·Ø7gÇá=`̪ýo>©è¦«_8!ç˜Ú&‘ë§‚*Éþ裌þ噪8àygWJAòDi§F?,T!ÓU“ß0÷a³q¡„LÊ}ýýÄÚ¬#ˆ˜Á¬Ü“Ö&þÍýf  t)ÿ‘Ý%u«~÷û<稟£1åÀ$¶ãÖT¨Ý\~vžôi0|Èdûu¸#±b„YæÞ3.V‘º¢Ö½úЉ”G?CO¡c–Ã=ô{ŠÔå&Û %£:Ý9OxŸ\¸WR(r3ã¸XPeiŒ—;ýp±Ï{j–¥!i£«Ìz»ªw×…ê¨&ÛùñÌæ{ÌRg!EÒÎÓÓnoÄŽöÍH`Qö(Òuªu0U’’1ÄŽƒ•'ëbÂÛ¬¼Päf’Æq±ª¾– KƒesŽœ­á—³ìÌEº¦•i»c³×UûƳY L˜ÄkO`8ÌO×ì'QöµmȉQû¾-½[$c²¬ ;^©P”£~lâÀÇÞͺŒ(~)n‰ù”ß²/z8fŸÑ¿û¬^¥W" SQ釤U_t4œˆÒ6üœÐ*˜) Ã|†FE2É}ø¼…ß(Í¢_šºÒdÛ"?¤6c ‘o)üŽ‹U5lA 'ßÂvæAJ"ÚFiÝÎÛ³¬1»/0ꯓw“W¡|Ø~tò*×[yÓ 'mlMc)žè\I:îÓ·ð­'¢ü+ÊË?ÌÕQîµ7s¹ð-HIÿ+áOV„ý7J´Ø^»dwÏ´I".ÕßAÊ÷2˨çw6~ZÃ_%6Yj¥ 8.Ö6…¨£Âšpz~V´’µVª´ãb=BX‹¹Y롵X²_1JcfKçꀃåEÔÆÂ ”6P¶‰¸®;ó¾¶´‚¹ýb«­±ó_¸'jÈÈþ'‹ípÀPðŸ¬3ó!ˆ2úìíÜîA“þ¨ÈüƒöDÖâŤT$¼Éká?Yhp]í‘£1½… ÿǶ¹½SáHÖOEÔz¿ŒŒÒªO9)ˆèÁT’Çź«ú„v¬<´ýÞÆ{~Þˆv`°üp'º,/ÿMÕž5T»ïËLÓ@û£(ªfd£2+Úa¡~è³QCµLÏÁízL½Iè¬|ÆÅzVaÛs0@?;ïß>mO ã›"ñ.çId+m_é\ÞPª$Rð¯~Š œ"Ù«¶Í, *Æ”¥Õp1¶},wG]FAø1p¶RÝY¿QУ9^b,T5:ÒþÞ©·hDp%jí«?ã-Ÿ‚m˜ Ò~²ì>%\¡H¬ÞŸ1*ÔgöÜgg] \ûnxº…0ÑA½áaX¬j:Ñó¶c’š}QnŠd1”ê¨H2·'J¦x/+f6{ñ+[ɪjR\ìŒr$mmDëÎdWà‘,)vŽ‹U5;i,r‚¨o >dÑŠVDþtkzÐ6!EY#ìf&Î½Ë %kŸü#[ðŸ}äxP6Qzá?@ös ¢j=YýôÛµ@ÙÁ§nÌjä—~åvÑ÷ëÚü*kÁÒ)F…*”ΨkZ95±[Êܹ^< ·ÉÚ§Ç^Ï÷çÙèjä»âî¯WØ"Š¥÷S ‹uWEIÙįO¹“ϳ²deóË35°Qmu×§ƒ*µÎ(H‚þ!as-çh°Lr²fÉ÷aT¤b|Úí0„co„U˜d@B…Dý¬´ÎÓÇŽ5ÒG…ÙÍš\O+‹–Ì_ÃBU=A× œù­ÍcÁ¥LèhŸ¼]Q×4•ª‹ëÔ't ¾ÉÚgo€Ö‰Ï>d'è‘–&H6»½ŸÔ¢ŠˆfI±sX¨Bí Ú¦m'z®_ÜêYЕ,>bg”$½Â›–Ø¥ DdK·ù‹U žI”´ÀÕ„ÙÏ0.hEÖ-ù(Šôè Q=€ókåP²ý¸±Ã9zQ=°ßÜ?< w^^}Z^½ðF:aÍt`jÕ8g ¤1”xc\¬Ê‡DS÷áã‘\Îûä YXMþùÓe¯Ý'& ô¥x^Ôí|k= ”ŠgÔ7}l!~²ð BôÇÛôMø'îñÚßÐÈIpB$ÐGðŒª$<é Gß»… I¹¤â9*ÒEILwôö« 9e¤ Â%aĸXµä™DI(2Ì›ÓÅ'] ‚—Äãb½´Šƒ«¯½eÔ‹G« yƒ ©?”É(D"OÓïx´=ÉÊ<-ÜCÄJ7¢?B$Äi/iº+¶B Y¬üÁ¢~h¯RKV3Ëä-7BDb|^¥(ùÙµÚ ¡Öã *…" òeë¹é[I¦ÕGÜDnôÜsÛPQ×í'´nô‡u<~‘g_Ï&?%ZØ‚Š‚¼ëÃBý°ŽG½Dwå%?ºš¬hA4AB‰q±ŠW)Š|°ŽÏÜøÎgïʲÖÝÿHz(îæ>.2JþªùýøãþÓÖyë Úù®ÓÒYHþyêZOòLìYq*Hû+÷>*GUÿ ö=”ß`ù`ÚØ:êš*Ó‹DçVña±^çƒ"í×Ý•HÔÃN®ç¶E¢öAÔ¯ )IÓs«Ãrðâ©H/²ŸÛ¯G…zGĘàqñzµ‡â½•O8ØÁw€—¤oå jÅÂ/‡ê°Ó¼^Hÿ¢' UˆûAy-†B²Á_Ë)N¢ö‹:ùCß‹jnÝéÁç~Rx”ûEîãÛ¤<Ó2J,‡ÍÙ¢¸'J|”û\Ü»ÐöqÄAÊCåýaz˜û¶NÅ ‘ûüëßÏØ—ö†Z÷dÿL»Û U©Ïúœ»¯G…¢P~ÏTþ—<†î‘‰§Õ¶¥wñ©CY§ -z¤ÿÿǪE²$d}Ù t¾P¥>k]|®FE*FħÅÌÄ7mº¸8ªVÍBÚN›n[I?›LÚŠôyaý´éþrfä]v+›QÇD÷¶™„¯‘‘IÇ´±•Àæ:ºùè§;ÈnH¹;Æ×ØãÚÛ/2YiŽšÞ°P•ÿ:(xð_Ÿ˜rØÿij«Œ"‹|Ôa¢€»éf»úÍ #Â(DãsIï4W0?MˆEz‰ºÜW0é1uòÖæ˜_ÆÅ*ÈAñ°œùnXîb3‡ ú<öEwt÷´ žfEÎ@¾'êGå_t:÷G‹UîQ’ ç÷.Œûø€*Ñ‹RÇGkX¨R‹‹jqþÊ´.¾ÑU= j”ↅªÆ/ÝÍÆ–‹¦¥¹.z¼Hs”e^ ™½§¤Í-·ÆZ|Ñ¢ ¡åo>K;•“dKxLíOýl¨—&[BíBHÂF÷òh_˜¬x&O–γ6%ÈÉÏo߇ͤÔ<›t›Qÿ¡Xxñ»E©æî˜M*œm ¯“Ç@vi«PÑE©s»ñ°XÏw>O;-nEHBìë(Ùë?Wu؇štØJ˜pñ0Ê{p°¨™ˆ‚GIÔù,òé‡9J{^Ãrc2ˆáƒ8³Œ.ò…Ãq±*sÐóàc†Ó35‚Ú®Jò¢ù¹$=LÙ0fÿ`ý?QÏEóû¡•G=϶lš6˜à‘ø ù;}¢-ÁQ4Žè‰#‚£~fEŽ &¶¿Š¿R¤¯9˜Aðy­,[“Þõå”ÀVí¶uð)¢¹‹&Æ40.–櫤J¹·µÏï! PÁ]”+÷É‹õ”¬¡@Eµvù€Ó=œ?” T»›ÖY¾Í^·ö¡ÍCYîúò -‡­÷€° ä‹"Æ…kX¨^Ö¨waZ4鶪o.úª~51êÞÃBÕ^ÖG:ûrœ˜-&ûyz÷¶᢮E ¬%Ë-l›ëDA{‰`3:N§±Yã»49˜Ñ ÕþÀÃûþE]Ùî‡>µ3lÜv<ˆû¹÷ Ö"Yg}í±FaêËñÕ8[ÌÎ!a[µHË"^Qöë‡(4$ËA6îg¥ÚrÖ™ÜÁ<(R)IGý+V3é·GůC¡,‹ÆDMz\¬Ê´(·‡nx‘–û ×,Sg¹ŠâT”¢¬œûÀÿ·Ù,C÷rf¥:ªU?´©¨DÙë‚ÞÙOÞ  z²¨UIKýðZ»"Q]}´Ô ƒ+Ë\É êês"`¥ð{'@Bô_‘­Ü:"ÌS6Ç"“åìÚ£ûéx žjd«šmY;èmè‡$–/e¹¾<‹+Ðgêsi²ø+:¢a¡~øM£Ä…‘×› ñWd0ÊÆãb½ü¦û9Ù1lWˆ›ê×ú㦳Å÷L»ê_†Ó ˜!CcW²ÙU?aPjÕ(¡Á j§mZ¤Ã—÷BžÎšÚ-JföÎÏ `-øî~ …:Ëj/Ëé[ç‚à°’¯÷‰7¢*‹æ~Ó1¦rhÒ¸8]Î0œœ@Ïn–”Uscè°X?œ¡QæÕc“ºíã9ëEeQÂ(¡ ¥ZÌ,£zªÛû4U‘‘³¨We㵦qîþ"|ÞÒvÏÍø¶¬ßÒ$Ä¡/e¶t±íwƒZ–«U« ARa¾ &Š'xR£ …™E3GEl^zVb{’ª(q %ÝñïÇ/øÁ|³wíD%{Ѽ~Rƒ¢…û6ÙÞs7KÏÉ’½ˆ^?v‘qÏh)×zžf¦EÖЪ¯Ë¾²üÃß:×—“Z'üÕØìÿ¯ÛE séëîn7QÑð(NÙ£‡%w·)Uœâ®2¶èWT¾†…¢sðþ¼®Û§ú’§à-Å’—cÒU ëö©ŠõcE’à«Üñ[­Êþx†³ØäxeT¨—G Cþ!ÔMÞ`›×¤ojZg¾‘Q°©WܤÁHùa]¼Ï¬]+9iLQYŒB¢O\Ãh…Økë•tD»Zm‹c½c&_]Q ’½VN¹Û¬2òÓuƒ¼,S¬a>>êët!1pKp‡kæ?'ì‰ï-p{àI\ì7°«ö^Ó,ØŠäöÆQ¡jI6©=˜¿qùó¸û07UVE¢”4.ÖËàvÙO÷–Iúà¸5|ÜJ†4ä¥}a±k¿§ £Ø6M+7*…`+J¥Þq±jãäK‘‚êÏcŽl0ñîÓ1²ü›U+Ê Q‘²"¸•‘fDÙ[­Ê‰8*Ô&Ñê‡J5) Üû`­Ü·í?z„U¦Í²Õ£Ê>ª̃/÷6²ªCd‡„©_¡¤ÌàbÎd‚çæ†>ÑT³xÃÛ2*ÒkÒ熷f¬_Ü  Ö×Q N¾iÿâÕYý«^ígh©1©=èÛŸ0_zê}…T+ŠEÞq±* bPx`AÄPŒöhÎý,>‘}Erî-ñ !|æs¿õUHe_‘½²ƒªÖŽ7Fo^ÆlKHugïãÚfÏ—M‡iâº×,}5£t‡²j+Ý»›S%e‘—^îÆ·äƒ’ÿdºßŸEÛ, ¹±q@”ç€Wȉk¯j8àõX¹šó¨¡tUãlÛ¶Ù[g'|rtN”>^BÇ×g¤yc^>~~“d)ø@^‰ö²o~ cVEºræ°X?ä Ka×Ù½½§}üªÀ¢]Q?ª½¢Ä§äbÕ[{ä×"Tø¬Î'I‹B»g¥îëÎ{-R°ÈU?tß(}9F/o[ª6?¦.+Á"+ù«‘†Þ'ÉèËMêˆÅøºyOªR|éV}i6~uõúñÿkíZrmÉmäÜ«¨TçGJe.£=5zTzR9êý-FP)‘ÔñH0pí÷|ÏI¥DF„øDéA‚F¥9®ÙãÜŠÒ½æÔ(2xQLVdÇÁ^ö šfÞ­1ÔvG¯”º•Š‚|§÷½)y•@ßóø½bãLòôŠ µžu¶f }FÙÑ„>¹”I×M ò©W\]Oúº¡rß;ym舖XÒÁY4¯—×ìØ K?/º¯!ég[X*¸°ž\@×­´½è•(d8ì˜ašuææDí bÕu× F›7ÁmÕô©"ȽARê’Uzäw’ÖW(~1{­ÖˆAT–˜é±èD÷ºü\¥/¶—ýÄ7”üÕL‡;ˆ=¶¼ƒFŸÔš‡óèeß ÕP„\fêGq¹bD=µìµÝ OôÙ ÖPåYgk–ÍgDqûBäl‹jüAë Â¥3«á M]RÞíQ5{ÎóCÖ±"Žwmk—·¢µ/MÖ =?d#âÈ)ØPW(¤Š„žždD døÁ5daž\À¨ÔzáƒjÉ*KsÅÄ©¨`.💮„õ ~Fåƒ/­.cYÑ 7Ö’Ë!Ìl/Z‰çNp„|¯&YÌ+â­\"ŽYzZÊ£Ô]ËÚꨫzI…ZÌ:SRÊ/Ú7Ž/èÉŒ˜r{›¿œ'£¬¸M •õ$Mçˆ*nÐZ~h¶VHAF!Óóv²?mTq£ØB‚m…p^:KPä²DÙ6ˆ-}³´Ÿ·¶·ˆ"ú°ºúêw¢ôhUž¿%f¾ø˜™´…«Q$´4«“ª-q5ªq-S³4KCd ëD$|eÁÊ™Ø?Š™^xÑœ¿U¦æ"¨ÓTÐ1äÐ(vÿÉÝ eÙ…ûi™©Y Qg° xXz„hKG¯1z‡ôßê% ÿE-ªò‰1h*_œqJò¹N&‡V¤BôE¶ †|¹a‹lux,<¸[!Ç;Û=ô^Efýìúû :ùÉШž\¶˜‰Ò]XøvÖÙêÅÒñ™Ç¡.€ºU²[6"¿˜Ð`šôö]~†œ3£™èu¶kIm'@˜¨‹^Wù!&~‚t¤É'Ž´»K”‡™k‰NHA?7´KrE}RZ‰ê¢[ºžh¤ MõÚF4?‘½œ¡Yc‹,Ͷ« ¡âh`C±¼%*‚At @Xg«ó7t¦ÏOK¢qââPÑv[IÅ:VÉ©‹?v«Õ4 KÐA’ÎÚÀΫ€Akàãb¼‡—³ëwTëy·ò¡U ±áuµÒ>êU^-WñàUסªX†K#}jgÏ–S›‹D‘ÍÓmMÂZdi›»óÜ«‰À¨íŽ&J˜'ÇdÕkìLå3Ky…W/ôHuÝ´@E°@‹)Ÿ­³5‘Ï,}FªT’–4ugjYIÔÓöq”,ÛP˜YiµÌ[Á}9µ}¹“VmjΆÎ|³­†éº$§u°¢BÑ-8‘ŽÚ™¥úÈëzÐ;7µ;«¨¦5à‡vf©>”}ð™$¨î&‹òjZPtk¹Ù^ŽêCˈÅâ”TñŠ,ÊËCj—aèZα‹–í!E¶Àâ5Ák™­y…£èhµs(ži}¦¢0h<€u¶~hj–K£{Ó!¾ê"çÆFe,ðmjjëlÍR§ÌvAêÔ†€™Jᙨl˜ÿJ2¤ûe¿$qIäIê,^v‹ÄÜÆ}!ç;9,0§äã˜2 ÷E+ÐqLFÉ…+p\^Ž«N9R%Ý˼zø¾~E­;–!áȸE?êŠG.­7ŽB™'ê$øËLõæâV¢0{t=tM´²ÕËfØ’9œÛ(ô)Å·,ÙIÃÁµiCŒ ½E¦®‰NËlýP× Ç­Ç)ةƣ[QYÔÈW§º¶ÌÔ,mh¤ôÊÓnìØÉ3µËÓ~'ö<ß_Á©•†ôƒ”Iîl¡‡s]šÞô¨@ùíÆÜØÚvû:9¡¦Ë2õÁ k¬PÃi™;±ò¤$iĤCÁ½ÎØ½É ÚFQ Ð,¦gi­AЦ)ÿ’ƒÙšï"ǹÑòä”ñ6Xh"#^®òà2[:h*§T‹3^;2Ñ#ûƒ?#²SrdâwÏÀà‘ÇVSßþFæ4ѤËÿû-Ê—ƒU7X¾ÜØÑåGQÊ1”Õ |Oø©ãÒM*H ß®u|üeÇ^À­KU’þi^Ç2[Ý•lé[)Áݺ0¹°{’º' µ 2«ú «í›˜ãø"ú=H©ßZÞ¹”¡ä[_dè¿þY!úÿûõmõ?ÌÞÝÁ¤ì† ío ¼ÝÝ-Ë÷±k†þü³šA#Ûoœh·Ô-´?ó3ð/ôo²´ÉïÄúGðÏý£‰öíKªû dTéÕý¼k‘ö÷754$1R×F2ÅÕîòçêžKúþü‘ˆT¼}¿!êÙ9XhægÈ¿hS~Ñf¡z€ aûgðÏò/Ú·h¿Ñ¾e³`ŸCží_HÅð!k[wx©Žàêëü·¾ÖÊEM[ÁþZÛß{Ë@ü—þ³7cl|åo·üî˜ô0Ocÿ•ëyTEÀiùEÙm·ì¶yµµ(Ð^ m@Ò™ódÌ aP]fj"~X—Ì(©z\Ñ †BíTx”¹ÝÈ@k¦š,?ˆðè•™’ØÀiŸ$+ž Üs’àlU)½–øÅʸ–ÆÇTãEU¦¨diÄ娱in®9v]ƪ)Ù!GZ 2Äɸš?²CgÛÇjJ§€@ïëÊ¥]-D2ˆ$ŒCËLm®;JÒ&#V@ Ó­UoT!£ ¡ LËlÍ.0œX!»¸.ŠŒ {ÊE¼%È gP¼\fjr¬î!«pqª¤¤•±]£6¢e4F xYf)Љ°aMÂÊ`1>„dU…—Á]‰…\óuõ*(šÛ²ÊT×ý¬Ê'€øiBüÓšœP÷T>\}ßÈ9÷¯ ý,[Ï‹ âMª×Îmî«ÕòIÌ ”Ò–™êŠhû‰‚=̧¸†;*+Ðà7&¸œÌWn¨’”åjÇKÇ<B;ÔK0_.ëÈ=Ïg¿KY'Ÿ §P®ü;­äô⟗X~”HYá[à!O9‹6ˆª]G̺J-p¨»•J;aqгÜß%ñ<ºNYüÖÀ‰ Hö¹t̆â–(áDó†V™šoz'¼,³“©Þ™Û2ˆmA%¡;Zfʶ@B%eúS š-$Z q,·hÅZâb á¯ü£¥ ¿ç¤ï=µ¡T½Ø¢Ù"£¶þBY7ý®bGí¼ü1¤Û=B³dÀ¯6• £Î$ Í·YeÊßp­Z4¢„`¨ GA`ñ|uÎN ;ÿÄùÔ~ùââéªÏJiôñýÒDª_Ò|Ȩ·†9f¥,0EqïûYŒßò<èâ(n@~Awéå–Àõý,³5!’vù59¥¾«úòµÕÊL¼±ÜrN%-4Æu!'Fžr«ÏK,=“G[ ú²Ù±´Q9N6“TM wY|Wއrþg‘‰fÚ5Ä 5’m.3ůǟÊÌ-!Ä" K ð󑂺8#×s™)SlW/—\™öë@¾OHOK̰êìÀ|½n]eÊ£°ÖEÕ@x %²ùSç ÷0_³?V™2í%@ÞJk‹xK¼‡tÞ|ËÅ“Î\µÄû×}«á¸oE©x:ëžàyšpoÈ@-§®iIî‡y0!ÞŽ2˜€á¤šWôÊY)û8oÖK5Èšgb\£C§p'(°¹‡vDM±ò:¦aj.ÒÚçG>cØ ®w9ÿ뤣ü{ŸÃnž çx9±›ªbפtØSfÏKôæy•©Žt ýSm÷áDŒ¾? ’`›ð¾^Ãð»skóÀ<‘‘%c åÄH´ÕÏÃÙ¡Ÿ[g+Ò$÷VõF[ò ÎKk@<÷¬é×ݰa2²eëW’¹µçuh[–ÈÇÛ pl_/ËQqST¡Ç=œ.¾Ðćó8…6m¾{a«0¤îèF—¨ò58t:½$^fKŽ”#2²šÕŸ …¥-…!°ç@vèõ–™ŠoÝ9Ü#Éÿ-/=·ŠÿÈ–ØÓêÃ^Îx)­»ð'l9p¥o@çƒSþW–œ¸¬ÀŽ-¾À¯¶ÌÔìVÏ j°¼$]^ˆØ'Ñ#o{'oóà÷-Z3RÄNÜÐH´’]&-C.â ~6“ú¤»M€öì+b\½ˆ[f«ÏF >‡¶š^àØë0jèDo¤Ô;\[ö:Éô¼µ#¡Å¶Ú÷®¼˜è!ò¯ º×Ùêa®® |Év©ŽjYRrõ£v‰jÕ»²ªÂѤ—¹£(þ#mä|äˆÝ…Zä.ŽQ©µØ ü0ûároDÚ¸Ë’ÆØ[ÿEòÀ¸Þí-²4ÇgCG¬o_^ð®<+2¯Å¹|ËLÍî´ ´ÆµK{ζ'$.ÀoBƒ®”w”Lig=Ïáÿ!É[ÄŠ,‚bÖ™tôNÀ6Òvk1&`Ñ#u5ˆS%‰Œ. P½*Zd©Ÿk)»eÀ]+÷v„ˆã`3éÕ±-=ÕNuØŠ×.×tð;¬Â‹ 1àO¾îu¶&Èux—¸#¢Fpž©Íl œ0`Y.-£Xtnì4&GŽeê3½ÑyýºÖò蕸(»I2]o;<û Y×â«Vàj¡%n}êsI‡ÒßH›üÔ¤e¶ú–GBÑסÀq°¡¥*_šÔ¿k á` xÌ êŽïLÇhR€µÄÃËLMô}‹Y‘+ˆy}©ú5Ýð^äqmG±h¾œZ XPWôÖòwç=%Š]fjBzôI#õ:ùRÿž±áhè‹ôÊìç±ko²@ :þ¼ºÃ•bŽ#¥êk‡ƒ ’Ø“· ël 7,IЪ9µ(Ó"÷á†åÀú¥½©"ºÿ¨¢´øLs5R¥wúfÛˆÎ"mÖUÀs°PÈcï>ÜáÜ™L»+y÷ØÛ(^ŽØkZQy¯ ý™±GCŽ ½(Sóøiú#~“ǯ©—û¥*W 㩤¿ÊTw’WahËÆó¾Çõûfõµpqî áç¡ÝbBá;~;K[[9EÖp#ùÊ2S³;/qGeç¥gÛ(¸À4%±øÙ/’·eäÅNÏc™†ÇŸs´é°¥lf™Rò(±‰D„$7#"ÄÁÈH¨‘"ùKÛ;êáA£Þ;¬1äqz%¡HÝß»'ô # ¹µ–:N³° @­ˆhX}ÅE¯Ú€še/nâ:²„O fükÃíÕ$-*éíB @mÚ³!g¡‡¼ t(~œÙ_¯ß(~-|Yå*]ONmu±z ¼,Õ =-Œë ­~­t‰´y ÆÍ=’‹²`þüÆ j—~Ï L$¶(³~šMÈy`1‰ÅÊý o;ó­ +ËozÜ ôÎ’“ãN ªá¡q'ü!9支Iæ¡„íéAˆ$ël ì³Ì9`Gw‡5‡ Žï•¶‡zãi±æ/)Ø4HÁh¾ÄKì=Õˆ îÔ²À ¨9àxW­wÐ|u´e1‘æéYãÐhѼÇMÄ[ËL9T$½ ±FƤ@ä1¶GNk- ˆËà+ .韥­Êpø<¾‚{.tÆuœŸ·pˆË½=,ãëÛ£è¼Æ l1,BpÇK«¶ÕsL8A€K-Çv¨ÇÒæ6ÄÈö¬k)e–å(;îqÒ*K¾+Ew¯ øˆ]) —$Í.«îf€è6B¨TYºop3y„Ða×*KÔe1RË‘¿›‰p#{Ö/ß-~Ar'ò Å%k5ŽGŒá-2Ôß°8pùíCy…Æý‹#õ,šžì°¡Å‚˜(ÁßÚÐb ½KŒ\ʬgé ´€áGÖ³mp(7.yŽ|kZv„ÉØug–RB .-©å  À˜æä/³Õá‹,ÊÍW˜´ç‹4¾$–™I‡º‡®Ð#’90ˆ{¿Äo©<˜£ÃMs%‘›ä"õþ‘hçAôW´ ¬|(ŒÜ§“,yYèoÄ{“ bP‡ i•¥I¼°áKu#Ïí¦ ê…˜õ!„_Ó¢ —“%Ôrÿ¡XÎψ”œH¬Ð-iU“CwV#´z¶T îæ™€[ð?ȇ8ò©_p=þúÞ¹D²¼gË¥ÍbŽ7‚&Š®ël ØfD2 m'©ôõeaRK2@G!‘UÖÍ0i˜ÚÄL³TЉ‘Ù†ZSî±r;fˆy4£””Ìxëîœ/¤þÕp4¹ßºkÝIJùS ðxÙ>UV÷|º <Ò à„y™)¸z ^Ž“Š|W¯£ fïzAer•¥~4ÅA€kÉ·€õ³™Y!ÞW§`?„> bP| ó8/ÁÝÃ[tlhBd{Úa·x¢‡¥5‹õûn­¢Çâ‰G<ç™E­îêÖväÈCt¸ÈP?65tôHo’Tsú¹)ln+çF ¸9H63[f;]ÔÏÉ­Ýçt’0BþBN˜ú‘6`Ä46|?jÈÉzí±mOÆçY÷†‡²É‹{ÃCXÐx^‚ DÝâž¾î‹÷ gÖF»÷tYÒb¯—õŸIœÅ™µìÈCè€Î½WYš‰’á¢á”<ñ£5ΟàÛ€›:š¡ ò^/9tùn…„Dà±ì*KÝ]§à›êª‹`¡7à‘Aˆ;åBú<5‡G~ÜP[x\Ça€Ç~rcÌ ¨‡PVC5XËuQ®~s]ä ±£ˆ4CL!KpÀÛ׫àó„ê ÙB¸‰Cp[–´°¢ÃØ Ø ‡ès•¥I ²açå€ qJû¥ã#j‘ižØå ~· éØJ«–ó0:‚(Ul0ëèЊ¹Ö~€òº¤Ž¥nÜ4ª ¬ …€t­AêÞPA$¥ÖZÁoa…I]FX’F¼Ü±°ÂzgIËOmÖ„þ~l/Í”p˺yûÑGŠ—´ËÆ€Se•XÝš `CÀçð]˜GöÐA µ)†š54©x•©IX¶Aøýf¶¦]’;°Ý#4óqº_fÚP`wqÛžöèi#œ,@ÝŒ—äKT"âQ‹‚Œž«,u\Pß”ï«Õ:ŸÜ—rÇ«>¤œ>ØzåÖŽ?4lÚU’•.'ç¨$Ó¦b µ{µ‰TR£YN6: É}è]ÝéÕàžN6 Åü)I&Ø6d¾ì'ƒeHУ’˜.1¬2 ¯³5´LG(˲íu_Û#Ñ_Û±ð _=?X½‰¸8àÜvéÈÀXâÑU.Öe[-‘Oªe§>›I°ZÉ‘¯X¢±WF(EÄä‰!s¹T*˜4—߇×i£ÖÙêœV¶èÉdÇ0÷Þž ´ÔÝ­î½Í]«‹J/“óåþz¯À?sÛ;Ë:6îEsÈÇkýl=; †t7åàF:[¶Ñ ×^Yþm.Ÿ á@ˆ`äËël Ê¥`ªú‹†çéR°>qù„ÞÑ#þ~6Ô¡Hþ€WÚÓ¥ZØ[¸pØxƒ#æ¢ÒË´h|ZN›¢¿‘«£Kô¦¼¢û|IŇ5B¸!HYfjPr°À’ö6ÑF‚AÉÁ†­ØáÑ„ lR·õB/LjHJ´Ð¡ËµwCpŠ'ï\†ñ¼>Ú€\Ѻ(LðPŠö~ á³o4ó4CYÐÍ¿Níäèv‘ËñI=ÖI9ËÍtrñ‘?¶” U šhÿäÿQ±6ŽÎô;ßR(?{S!V!ÝéçI*Í2#‚w•1ž©r·ô§–Ì1F›—ÉËusõ“§NhõÀ!$F²U–\`ðφ[^\¥]²vUôX!„Š8EéLÛµ7Ï­Qr{Öôh¡=Þ¥Å3ã¸_Ö¶UãYêæÎ|¨®Aa•¥îD€ÙY&ÊñÔÎ+ôóP÷±p×C¦ñiŸã¬Ãz€”ó-Ó«®oûB÷u-ïOê÷e=­[ië²î‡;k³|ÿ‰À¶õWîÛO`‡?¨zE'MäŽ'é”Âa¶Ë{©æ»º¼Î ·•¸IG—]Œ²—|Dø…ΤsëÝ£L]Dèé’‚@Üc\íZOãýŠÞ-Jcû­'V‡³|O»hM7z uwÐ\¡Zd霵‘G\ÙÍ2SAÒòÏTz“¤QfÑ6¤Á¹{ÿc½MÆ Ë¨ SuÇìèÁß°üuZ³ÁqGó­]øÆ—~ï¬c}G‡$Ä©r£¢­h½;ö>ƒ^|‘¡l¥ô|éÇvÐHWþ:ò†mlQ¢@šM$¸öƒ³,%Ìj·ý ëÁY\š¹àÏrS²åÄl¹µtÖeˈÉwÓ–‚îëœKÆÞš0`¾¾ur_#Vwk…µÙiCæ%›éh!dØiS}×oýÅ«!IÔäÉà.Ãñ°‡áD­ V˜÷ív͇—[TDð ® ‘¥“¦ül—ùÆ_)%ýÒ:h×lOžƒOszBÆ>ZÁ®è —•ÇnHr¢Ô˜ì•÷&½üD§„´·›bà?eÂïQÌ1|ºp¹…ãÂL‘àåüQ¤w\fj讑äÑóÕF7û·8ðhv`ß¿– ãk´'˜Hªž¬‹Üvî'8£3Ju®Î·qÛÙ‚ûrä%<èÉ™eÚé6êTöåà7ï?áÅËpÖÇJcvC!@ôRŠkã)ñ›Î:I$çòI¾vq†¥@[„$ÔÓ/ÝâXÿ ~e-¼ò/²Ûm;÷L¼âõ/Òz3.A¾Z晀îÏ$ V^”\&é÷5O`_}Qz&a\fxjj^ÔË´eÙgõ¥Î¥‹ %â:³¦¥¸ué«P½±\JvŒf×ņ ûÝðÛ;2¥·Ì9²óå4¨ÊKƒÏô(ÏiÕrX%WeTUŒ%›ìj*ÛÿãÿoŽ gendstream endobj 229 0 obj << /Filter /FlateDecode /Length 2069 >> stream xœ…X;“ã6ÎÙùÊrd‚ðWvÕv]¹Ê.9ò:À’D/2Aîxþýõ ©Ù±] 6ºÝ_ÝÐûä$÷ þ…ÿU·Kö×Ý;I«ûð¯êößw_ÿ,³½9™<Í÷çËŽwȽLõ©LÒ}‘ä'™šý¹Ûý*ìÕŽi^žLYˆÞ¹ßÌ)I2qmݸþ𱓂o¥<“‰¦¯Ú¹núk—ZL·¦Ú]ìÜN¼H#~tÏÓж÷›?PR¨R‰á ÁVš–¢š¤èÜtÐ~]§ÃQ§ ^”ønž¢ƒZ€ù–äâ9®Q‡ßÎ?ì$8šébþßîüÕ¯ €¢¦(x©t">¢I&užŠ¡±L¢"¥ ÀŸ|ˆ$) 1±Lsy%‹i‡ëñ€Ço¥h›ÃQ¢cÀ zgF¸¶¡ó¡ïY&ð1‡JÔäÅë2÷ÕÔ =jÆ‚Xß¾Ÿr-F7ÍcïÃB’ }(„‡¼´‹ÿ…øtÈôÉhY ÛÎî?ŒÉùæFÓ0ZØø’€äD§•E¶‰Ü€ŸÍfÁRØd¡Å GS™æÂs8æ&£5ï¼ë>bÁ—`S/æé(çÝÝŽv¢d&„‚@SYùˆØBñšnvZuUCwŸ'çו-˜ ø5ý{EI®"JrW ʨìÊC¦ñ Ž™nΪú‡¤¢Bù*Q„¡Ï¥è8¤¦P>ÍÒ¹¹³f“1€x,ÉGç ÎŽ¸ y/Áª†ÀöuÔÞ¸z®¢mH!éry]û°!Á ÌÅeDx—RgZ ]\OCT‚üµÑ_†±³ˆ`¬^…•8Iq¿x7.yôT%I!ƒ%Iaî¡H'@ɯϛ3Ç&‘à„›a|B1. Š,Ø#¶h®·ð»j”ÑF¥@8½S~©ŒY¦Lh6-¸² ;…˜+θx‹ŸÄÅúÉa— AgÙã6HÕº­uÞG9t6: dzGäG¥È7B"œÌÀ¬ëÁd°ˆX¯W)ظe3ŒÙ´œº².]lõ ¼'y)>¬$岫-C"ú²!TÌ”ÆuÀÈ%êÌ·T2nùÏh$±7C÷‚ÞU7ì·w‰­–"¾aª3ÀŸÌD¿èž;€A}„ó`Ó¼€ÕS0åú%!)âÖÀ­À,ÕAÑa6 ü„ŽGMª$°ÖË3¤{@&E)Œ÷«CÓfUÆl•¤›ë+_e€>†ª‚óû–•L·´ìFˆ!Û¿`áMs ŠÙ#k@GMwßpÓ8ÙPÚð6\‰!ÁoJJD3­¼™È S>œ´C1AZZO§W„Ú¹ í‡Ö­ãÂ…a’æIcX[÷gT– ½ót":©ø¾ÂT„…VÀ¡J €/KWæh¢ 7sŒ~ºº)¡Íoö¦éëù!Øx`ÄLÆ“Ûö:ŒÍt#Ê+è £»Ìž ‚ „‚é '@îâçµõW( o€Í·aö„ŠØè|øš©ŠÂ®@HÌJQRAƒ¸¥KÑ/Í{©…„)ä³6¸ÀûÚ„Ù­Pä…RD¡ŸèDF 7¾„åBn¡Ø¶aY&_\ÝtÜÇp=åüÒ¬ÎǹLqk¨]§›p ¨y–N+'N ¶Q-÷Ç%GõÇêENCƒRšÂskú…¨&–Ï7HðÌ*yFe‡ÏK³Áuð’|™Û¨Zqšñ̪Õ2Å‘UŠÁ»¨Èˆë°¶§(å[¯êüdÇ)jÌqJ€9|À)Á¯$d;+’À¤Ì·`l\ÏIáÄNBq:¾¨h¢PK9ƒÜÏ#½ˆw½mƒÎ0ÎC«!Ð=E8Äqˆvæ3=õ~Õø W´œ›Ç½+µ²jèvzp°ô:#„}KÍ!øø|kZ÷œq’L3*íGöÂUäF zdÒœæ<ÐÄN:é·{¹†‹Ò~X0•6tÏQT†ÎæƒZIõÍ߀9¶÷#Ì¿ÉO¶©Q¤Ÿ0&ámE@p¹äRxƒÌo®…® %µGΈFhH¶£ï†Â_1ï0î¼Om÷!ë›vë6'©¦ugÿÜtÿ¦[èhî »yÁsÿy«˜%x[í!4>÷‘ó}³û|'ùKª{b>¥û[ÉÈJ4×»0ð§p¥ K"YNÜ¡%ß ÖÄ#á^"³‰«›|Tå2Í36\@?òÞD1FŸm|ÀXx RP÷ÖVÑWxÅ–ÂÆÅﳟ–—ÍõÓÝýu­zkN¨éØq˜c¦yäObˆŒm|„kÐTˆ®@)v¶ïi*É M÷ýeŸnÔÉ¡JJ8äIo¡K<ñXƒg~áÎ$m^'&àˆ[lB4N«tƒŒz@)õx8e,@‡j7“ácØ•=̳›MˆL8>5ÙDtŒ³5hf}mÇ:Z0|‹ZN¤g’‡ãM£ëÿ¨¢©ˆ[»ÞI¯qÖÈóLc{ˆ_yÊ´Â*¤>œßý÷¼û þþÒ7Ö9endstream endobj 230 0 obj << /Type /XRef /Length 309 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 231 /ID [<086ba6a7e84aca0fed44d80d247f86ba><720b1ce47a3b083eea6b7647c8076775>] >> stream xœí‘»JCAEgîhbŒñæ‘ÅhÁF°¶±I+‚XØúø‹‚…øêŠ6FP…V‚eˆÁBÔ˜½Ì'(ä‹Íž}Ï93Ç3úþþû£éÕ¹/v]ÒeI/`Èœ’¢ÚÎΖäîŽSŸ—ñÈdÈóÚ¡]*/4èF²lsÆ|»_; endstream endobj startxref 363449 %%EOF maxLik/inst/tinytest/0000755000175100001440000000000014077525067014372 5ustar hornikusersmaxLik/inst/tinytest/test-optimizers.R0000644000175100001440000006220314077525067017702 0ustar hornikusers### This code tests all the methods and main parameters. It includes: ### * analytic gradients/Hessian ### * fixed parameters ### * inequality constraints ### * equality constraints ## do not run unless 'NOT_CRAN' explicitly defined ## (Suggested by Sebastian Meyer and others) if (!identical(Sys.getenv("NOT_CRAN"), "true")) { message("skipping slow optimizer tests") q("no") } if(!requireNamespace("tinytest", quietly = TRUE)) { message("These tests require 'tinytest' package\n") q("no") } library(maxLik) ## data to fit a normal distribution # set seed for pseudo random numbers set.seed( 123 ) tol <- .Machine$double.eps^0.25 ## generate a variable from normally distributed random numbers truePar <- c(mu=1, sigma=2) NOBS <- 100 x <- rnorm(NOBS, truePar[1], truePar[2] ) xSaved <- x ## log likelihood function llf <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output sum(dnorm(x, mu, sigma, log=TRUE)) } ## log likelihood function (individual observations) llfInd <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output llValues <- -0.5 * log( 2 * pi ) - log( sigma ) - 0.5 * ( x - mu )^2 / sigma^2 return( llValues ) } ## function to calculate analytical gradients gf <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] N <- length( x ) llGrad <- c( sum( ( x - mu ) / sigma^2 ), - N / sigma + sum( ( x - mu )^2 / sigma^3 ) ) return( llGrad ) } ## function to calculate analytical gradients (individual observations) gfInd <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] llGrads <- cbind( ( x - mu ) / sigma^2, - 1 / sigma + ( x - mu )^2 / sigma^3 ) return( llGrads ) } ## log likelihood function with gradients as attributes llfGrad <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output N <- length( x ) llValue <- -0.5 * N * log( 2 * pi ) - N * log( sigma ) - 0.5 * sum( ( x - mu )^2 / sigma^2 ) attributes( llValue )$gradient <- c( sum( ( x - mu ) / sigma^2 ), - N / sigma + sum( ( x - mu )^2 / sigma^3 ) ) return( llValue ) } ## log likelihood function with gradients as attributes (individual observations) llfGradInd <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output llValues <- -0.5 * log( 2 * pi ) - log( sigma ) - 0.5 * ( x - mu )^2 / sigma^2 attributes( llValues )$gradient <- cbind( ( x - mu ) / sigma^2, - 1 / sigma + ( x - mu )^2 / sigma^3 ) return( llValues ) } ## function to calculate analytical Hessians hf <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] N <- length( x ) llHess <- matrix( c( N * ( - 1 / sigma^2 ), sum( - 2 * ( x - mu ) / sigma^3 ), sum( - 2 * ( x - mu ) / sigma^3 ), N / sigma^2 + sum( - 3 * ( x - mu )^2 / sigma^4 ) ), nrow = 2, ncol = 2 ) return( llHess ) } ## log likelihood function with gradients and Hessian as attributes llfGradHess <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output N <- length( x ) llValue <- -0.5 * N * log( 2 * pi ) - N * log( sigma ) - 0.5 * sum( ( x - mu )^2 / sigma^2 ) attributes( llValue )$gradient <- c( sum( ( x - mu ) / sigma^2 ), - N / sigma + sum( ( x - mu )^2 / sigma^3 ) ) attributes( llValue )$hessian <- matrix( c( N * ( - 1 / sigma^2 ), sum( - 2 * ( x - mu ) / sigma^3 ), sum( - 2 * ( x - mu ) / sigma^3 ), N / sigma^2 + sum( - 3 * ( x - mu )^2 / sigma^4 ) ), nrow = 2, ncol = 2 ) return( llValue ) } ## log likelihood function with gradients as attributes (individual observations) llfGradHessInd <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output N <- length( x ) llValues <- -0.5 * log( 2 * pi ) - log( sigma ) - 0.5 * ( x - mu )^2 / sigma^2 attributes( llValues )$gradient <- cbind( ( x - mu ) / sigma^2, - 1 / sigma + ( x - mu )^2 / sigma^3 ) attributes( llValues )$hessian <- matrix( c( N * ( - 1 / sigma^2 ), sum( - 2 * ( x - mu ) / sigma^3 ), sum( - 2 * ( x - mu ) / sigma^3 ), N / sigma^2 + sum( - 3 * ( x - mu )^2 / sigma^4 ) ), nrow = 2, ncol = 2 ) return( llValues ) } # start values startVal <- c( mu = 0, sigma = 1 ) ## basic NR: test if all methods work ml <- maxLik( llf, start = startVal ) expect_equal( coef(ml), truePar, tol=2*max(stdEr(ml)) ) expect_stdout( print( ml ), pattern = "Estimate\\(s\\): 1.18.*1.81" ) expect_stdout( print( summary( ml )), pattern = "Estimates:" ) expect_equal( activePar( ml ), c(mu=TRUE, sigma=TRUE) ) expect_equal( AIC( ml ), 407.167892384587, tol = 0.1, check.attributes=FALSE ) expect_equal( coef( ml ), c(mu=1.181, sigma=1.816), tol = 0.001 ) expect_stdout( condiNumber( ml, digits = 3), "mu[[:space:]]+1[[:space:]\n]+sigma[[:space:]]+1\\." ) expect_equal( hessian( ml), matrix(c(-30.3, 0, 0, -60.6), 2, 2), tol = 0.01, check.attributes = FALSE ) expect_equal( logLik( ml ), -201.583946192294, tol = tol, check.attributes = FALSE ) expect_equal( maximType( ml ), "Newton-Raphson maximisation" ) expect_equal( nIter( ml ) > 5, TRUE ) expect_error( nObs( ml ), "cannot return the number of observations" ) expect_equal( nParam( ml ), 2 ) expect_equal( returnCode( ml ), 1 ) expect_equal( returnMessage( ml ), "gradient close to zero (gradtol)" ) expect_equal( vcov( ml ), matrix(c(0.032975, 0, 0, 0.0165), 2, 2), tol=0.01, check.attributes = FALSE ) expect_equal( logLik( summary( ml ) ), logLik(ml) ) mlInd <- maxLik( llfInd, start = startVal ) expect_stdout( print( summary( mlInd ), digits = 2 ), "mu +1\\.18" ) expect_equal( nObs( mlInd ), length(x) ) ## Marquardt (1963) correction mlM <- maxLik( llf, start = startVal, qac="marquardt") expect_equal( coef(mlM), coef(ml), # coefficients should be the same as above tol=tol ) expect_equal( returnMessage(mlM), returnMessage(ml) ) ## test plain results with analytical gradients ## compare coefficients, Hessian mlg <- maxLik(llf, gf, start = startVal ) expect_equal(coef(ml), coef(mlg), tol=tol) expect_equal(hessian(ml), hessian(mlg), tolerance = 1e-2) ## gradient with individual components mlgInd <- maxLik( llfInd, gfInd, start = startVal ) expect_equal(coef(mlInd), coef(mlgInd), tolerance = 1e-3) expect_equal(hessian(mlg), hessian(mlgInd), tolerance = 1e-3) ## with analytical gradients as attribute mlG <- maxLik( llfGrad, start = startVal ) expect_equal(coef(mlG), coef(mlg), tolerance = tol) expect_equivalent(gradient(mlG), gf( coef( mlG ) ), tolerance = tol) mlGInd <- maxLik( llfGradInd, start = startVal ) expect_equal(coef(mlGInd), coef(mlgInd), tolerance = tol) expect_equivalent(gradient(mlGInd), colSums( gfInd( coef( mlGInd ) ) ), tolerance = tol) expect_equivalent(estfun(mlGInd), gfInd( coef( mlGInd ) ), tolerance=tol) ## with analytical gradients as argument and attribute expect_warning(mlgG <- maxLik( llfGrad, gf, start = startVal)) expect_equal(coef(mlgG), coef(mlg), tolerance = tol) ## with analytical gradients and Hessians mlgh <- maxLik( llf, gf, hf, start = startVal ) expect_equal(coef(mlg), coef(mlgh), tolerance = tol) ## with analytical gradients and Hessian as attribute mlGH <- maxLik( llfGradHess, start = startVal ) expect_equal(coef(mlGH), coef(mlgh), tolerance = tol) ## with analytical gradients and Hessian as argument and attribute expect_warning(mlgGhH <- maxLik( llfGradHess, gf, hf, start = startVal )) expect_equal(coef(mlgGhH), coef(mlgh), tolerance = tol) ## ---------- BHHH method ---------- ## cannot do BHHH if llf not provided by individual x <- xSaved[1] expect_error( maxLik( llfInd, start = startVal, method = "BHHH" ) ) ## 2 observations: can do BHHH x <- xSaved[1:2] expect_silent( maxLik( llfInd, start = startVal, method = "BHHH" ) ) ## x <- xSaved mlBHHH <- maxLik( llfInd, start = startVal, method = "BHHH" ) expect_stdout(print( mlBHHH ), pattern = "Estimate\\(s\\): 1\\.18.* 1\\.81") expect_stdout(print(summary( mlBHHH)), pattern = "mu *1.18") expect_equivalent(activePar( mlBHHH ), c(TRUE, TRUE)) expect_equivalent(AIC( mlBHHH ), 407.168, tolerance=0.01) expect_equal(coef( mlBHHH ), setNames(c(1.180808, 1.816485), c("mu", "sigma")), tolerance=tol) expect_equal(condiNumber( mlBHHH, printLevel=0), setNames(c(1, 1.72), c("mu", "sigma")), tol=0.01) expect_equivalent(hessian( mlBHHH ), matrix(c(-30.306411, -1.833632, -1.833632, -55.731646), 2, 2), tolerance=0.01) expect_equivalent(logLik( mlBHHH ), -201.583946192983, tolerance=tol) expect_equal(maximType( mlBHHH ), "BHHH maximisation") expect_equal(nIter(mlBHHH) > 3, TRUE) # here 12 iterations expect_equal(nParam( mlBHHH ), 2) expect_equal(returnCode( mlBHHH ), 8) expect_equal(returnMessage( mlBHHH ), "successive function values within relative tolerance limit (reltol)") expect_equivalent(vcov( mlBHHH ), matrix(c(0.03306213, -0.00108778, -0.00108778, 0.01797892), 2, 2), tol=0.001) expect_equivalent(logLik(summary(mlBHHH)), -201.583946192983, tolerance=tol) expect_equal(coef(ml), coef(mlBHHH), tol=tol) expect_equal(stdEr(ml), stdEr(mlBHHH), tol=0.1) expect_equal(nObs( mlBHHH ), length(x)) # final Hessian = usual Hessian expect_silent(mlBhhhH <- maxLik( llfInd, start = startVal, method = "BHHH", finalHessian = TRUE ) ) # do not test Hessian equality--BHHH may be imprecise, at least # for diagonal elements expect_stdout(print(hessian( mlBhhhH )), pattern="mu.*\nsigma.+") ## Marquardt (1963) correction expect_silent(mlBHHHM <- maxLik( llfInd, start = startVal, method = "BHHH", qac="marquardt")) expect_equal(coef(mlBHHHM), coef(mlBHHH), tolerance=tol) expect_equal(returnMessage(mlBHHHM), "successive function values within relative tolerance limit (reltol)") ## BHHH with analytical gradients expect_error( maxLik( llf, gf, start = startVal, method = "BHHH" ) ) # need individual log-likelihood expect_error( maxLik( llfInd, gf, start = startVal, method = "BHHH" ) ) # need individual gradient x <- xSaved[1] # test with a single observation expect_error(maxLik( llf, gfInd, start = startVal, method = "BHHH" )) # gradient must have >= 2 rows expect_error( maxLik( llfInd, gfInd, start = startVal, method = "BHHH" ) ) # ditto even if individual likelihood components x <- xSaved[1:2] # test with 2 observations expect_silent(maxLik( llf, gfInd, start = startVal, method = "BHHH", iterlim=1)) # should work with 2 obs expect_silent( maxLik( llfInd, gfInd, start = startVal, method = "BHHH", iterlim=1) ) # should work with 2 obs x <- xSaved expect_silent(mlgBHHH <- maxLik( llfInd, gfInd, start = startVal, method = "BHHH" )) # individual log-likelihood, gradient expect_equal(coef(mlBHHH), coef(mlgBHHH), tolerance = tol) expect_equal(coef(mlg), coef(mlgBHHH), tolerance = tol) expect_silent(mlgBHHH2 <- maxLik( llf, gfInd, start = startVal, method = "BHHH" )) # aggregated log-likelihood, individual gradient expect_equal(coef(mlgBHHH), coef(mlgBHHH2), tolerance=tol) # final Hessian = usual Hessian expect_silent( mlgBhhhH <- maxLik( llf, gfInd, start = startVal, method = "BHHH", finalHessian = TRUE ) ) expect_equal(hessian(mlgBhhhH), hessian(mlBhhhH), tolerance = 1e-2) ## with analytical gradients as attribute expect_error( maxLik( llfGrad, start = startVal, method = "BHHH" ) ) # no individual gradients provided x <- xSaved[1] expect_error( maxLik( llfGrad, start = startVal, method = "BHHH" ), pattern = "gradient is not a matrix") # get an error about need a matrix expect_error( maxLik( llfGradInd, start = startVal, method = "BHHH" ), pattern = "at least as many rows") # need at least two obs x <- xSaved[1:2] expect_error( maxLik( llfGrad, start = startVal, method = "BHHH" ), pattern = "gradient is not a matrix") # enough obs but no individual grad x <- xSaved expect_silent(mlGBHHH <- maxLik( llfGradInd, start = startVal, method = "BHHH" )) expect_equal(coef(mlGBHHH), coef(mlgBHHH), tolerance = tol) # final Hessian = usual Hessian expect_silent(mlGBhhhH <- maxLik( llfGradInd, start = startVal, method = "BHHH", finalHessian = TRUE )) expect_equal(hessian(mlGBhhhH), hessian(mlgBhhhH), tolerance = tol) ## with analytical gradients as argument and attribute expect_warning(mlgGBHHH <- maxLik( llfGradInd, gfInd, start = startVal, method = "BHHH" ), pattern = "both as attribute 'gradient' and as argument 'grad'") # warn about double gradient expect_equal(coef(mlgGBHHH), coef(mlgBHHH), tolerance = tol) ## with unused Hessian expect_silent(mlghBHHH <- maxLik( llfInd, gfInd, hf, start = startVal, method = "BHHH" )) expect_equal(coef(mlgBHHH), coef(mlghBHHH), tolerance = tol) ## final Hessian = usual Hessian expect_silent( mlghBhhhH <- maxLik( llfInd, gfInd, hf, start = startVal, method = "BHHH", finalHessian = TRUE ) ) expect_equivalent(hessian(mlghBhhhH), hessian(mlghBHHH), tolerance = 0.2) # BHHH and ordinary hessian differ quite a bit ## with unused Hessian as attribute expect_silent(mlGHBHHH <- maxLik( llfGradHessInd, start = startVal, method = "BHHH" )) expect_equal(coef(mlGHBHHH), coef(mlghBHHH), tolerance = tol) ## final Hessian = usual Hessian expect_silent(mlGHBhhhH <- maxLik( llfGradHessInd, start = startVal, method = "BHHH", finalHessian = TRUE )) expect_equal(hessian(mlGHBhhhH), hessian(mlghBhhhH), tolerance = tol) ## with analytical gradients and Hessian as argument and attribute expect_warning( mlgGhHBHHH <- maxLik( llfGradHessInd, gfInd, hf, start = startVal, method = "BHHH" ), pattern = "both as attribute 'gradient' and as argument 'grad': ignoring" ) expect_equal(coef(mlgGhHBHHH), coef(mlghBHHH), tolerance = tol) expect_equal(hessian(mlgGhHBHHH), hessian(mlGHBHHH), tolerance = tol) ## ---------- Test BFGS methods ---------- optimizerNames <- c(bfgsr = "BFGSR", bfgs = "BFGS", nm = "Nelder-Mead", sann = "SANN", cg = "CG") successCodes <- list(bfgsr = 1:4, bfgs = 0, nm = 0, sann = 0, cg = 0) successMsgs <- list(bfgsr = c("successive function values within tolerance limit (tol)"), bfgs = c("successful convergence "), # includes space at end... nm = c("successful convergence "), sann = c("successful convergence "), cg = c("successful convergence ") ) for(optimizer in c("bfgsr", "bfgs", "nm", "sann", "cg")) { expect_silent(mlResult <- maxLik( llf, start = startVal, method = optimizer )) expect_stdout(print( mlResult ), pattern = paste0(optimizerNames[optimizer], " maximization") ) expect_stdout(print( summary( mlResult )), pattern = paste0(optimizerNames[optimizer], " maximization,.*Estimates:") ) expect_equal(coef(ml), coef(mlResult), tolerance=0.001) expect_equal(stdEr(ml), stdEr(mlResult), tolerance=0.01) expect_equal(activePar( mlResult ), c(mu=TRUE, sigma=TRUE)) expect_equivalent(AIC( mlResult ), 407.167893392749, tolerance=tol) expect_equivalent( hessian( mlResult ), matrix(c(-30.32596, 0.00000, 0.00000, -60.59508), 2, 2), tolerance = 0.01) expect_equivalent(logLik( mlResult ), -201.5839, tolerance = 0.01) expect_equal(maximType( mlResult ), paste0(optimizerNames[optimizer], " maximization") ) expect_true(nIter( mlResult ) > 1 & is.integer(nIter(mlResult))) expect_error( nObs( mlResult ), pattern = "cannot return the number of observations") expect_equal(nParam( mlResult ), 2) expect_true(returnCode( mlResult ) %in% successCodes[[optimizer]]) expect_equal(returnMessage( mlResult), successMsgs[[optimizer]]) expect_equal(logLik( summary( mlResult ) ), logLik(mlResult)) ## individual observations expect_silent(mlIndResult <- maxLik( llfInd, start = startVal, method = optimizer)) expect_stdout(print( summary( mlIndResult )), pattern = paste0(optimizerNames[optimizer], " maximization,.*Estimates:") ) expect_equal(coef(mlResult), coef(mlIndResult), tolerance = tol) expect_equal(stdEr(mlResult), stdEr(mlIndResult), tolerance = 0.01) expect_equal(nObs( mlIndResult ), length(x)) ## with analytic gradients expect_silent(mlgResult <- maxLik( llf, gf, start = startVal, method = optimizer)) expect_equal(coef(mlgResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlgResult), stdEr(mlResult), tolerance = 0.01) expect_silent(mlgIndResult <- maxLik( llfInd, gfInd, start = startVal, method = optimizer )) expect_equal(coef(mlgIndResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlgIndResult), stdEr(mlResult), tolerance = 0.01) ## with analytical gradients as attribute expect_silent(mlGResult <- maxLik( llfGrad, start = startVal, method = optimizer)) expect_equal(coef(mlGResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlGResult), stdEr(mlResult), tolerance = 0.01) expect_silent(mlGIndResult <- maxLik( llfGradInd, start = startVal, method = optimizer )) expect_equal(coef(mlGIndResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlGIndResult), stdEr(mlResult), tolerance = 0.01) ## with analytical gradients as argument and attribute expect_warning(mlgGResult <- maxLik( llfGrad, gf, start = startVal, method = optimizer )) expect_equal(coef(mlgGResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlgGResult), stdEr(mlResult), tolerance = 0.01) ## with analytical gradients and Hessians expect_silent(mlghResult <- maxLik( llf, gf, hf, start = startVal, method = optimizer )) expect_equal(coef(mlghResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlghResult), stdEr(mlResult), tolerance = 0.01) ## with analytical gradients and Hessian as attribute expect_silent(mlGHResult <- maxLik( llfGradHess, start = startVal, method = optimizer )) expect_equal(coef(mlGHResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlGHResult), stdEr(mlResult), tolerance = 0.01) ## with analytical gradients and Hessian as argument and attribute expect_warning(mlgGhHResult <- maxLik( llfGradHess, gf, hf, start = startVal, method = optimizer )) expect_equal(coef(mlgGhHResult), coef(mlResult), tolerance = tol) expect_equal(stdEr(mlgGhHResult), stdEr(mlResult), tolerance = 0.01) } ### ---------- with fixed parameters ---------- ## start values startValFix <- c( mu = 1, sigma = 1 ) ## fix mu (the mean ) at its start value isFixed <- c( TRUE, FALSE ) successMsgs <- list(bfgsr = c("successive function values within tolerance limit (tol)"), bfgs = c("successful convergence "), # includes space at end... nm = c("successful convergence "), sann = c("successful convergence "), cg = c("successful convergence ") ) ## NR method with fixed parameters for(optimizer in c("nr", "bfgsr", "bfgs", "sann", "cg")) { expect_silent( mlFix <- maxLik( llf, start = startValFix, fixed = isFixed, method=optimizer) ) expect_equivalent(coef(mlFix)[1], 1) expect_equivalent(stdEr(mlFix)[1], 0) expect_silent( mlFix3 <- maxLik(llf, start = startValFix, fixed = "mu", method=optimizer) ) expect_equal(coef(mlFix), coef(mlFix3)) mlFix4 <- maxLik( llf, start = startValFix, fixed = which(isFixed), method=optimizer) expect_equal(coef(mlFix), coef(mlFix4), tolerance=tol) expect_equivalent(activePar( mlFix ), !isFixed) expect_equal(nParam( mlFix ), 2) ## with analytical gradients mlgFix <- maxLik( llf, gf, start = startValFix, fixed = isFixed, method=optimizer) expect_equal(coef(mlgFix), coef(mlFix), tolerance=tol) ## with analytical gradients and Hessians mlghFix <- maxLik( llf, gf, hf, start = startValFix, fixed = isFixed, method=optimizer) expect_equal(coef(mlghFix), coef(mlFix), tolerance=tol) } ## Repeat the previous for NM as that one does not like 1-D optimization for(optimizer in c("nm")) { expect_warning( mlFix <- maxLik( llf, start = startValFix, fixed = isFixed, method=optimizer) ) expect_equivalent(coef(mlFix)[1], 1) expect_equivalent(stdEr(mlFix)[1], 0) expect_warning( mlFix3 <- maxLik(llf, start = startValFix, fixed = "mu", method=optimizer) ) expect_equal(coef(mlFix), coef(mlFix3)) expect_warning( mlFix4 <- maxLik( llf, start = startValFix, fixed = which(isFixed), method=optimizer) ) expect_equal(coef(mlFix), coef(mlFix4), tolerance=tol) expect_equivalent(activePar( mlFix ), !isFixed) expect_equal(nParam( mlFix ), 2) ## with analytical gradients expect_warning( mlgFix <- maxLik( llf, gf, start = startValFix, fixed = isFixed, method=optimizer) ) expect_equal(coef(mlgFix), coef(mlFix), tolerance=tol) ## with analytical gradients and Hessians expect_warning( mlghFix <- maxLik( llf, gf, hf, start = startValFix, fixed = isFixed, method=optimizer) ) expect_equal(coef(mlghFix), coef(mlFix), tolerance=tol) } ## Repeat for BHHH as that one need a different log-likelihood function for(optimizer in c("bhhh")) { expect_silent( mlFix <- maxLik( llfInd, start = startValFix, fixed = isFixed, method=optimizer) ) expect_equivalent(coef(mlFix)[1], 1) expect_equivalent(stdEr(mlFix)[1], 0) expect_silent( mlFix3 <- maxLik(llfInd, start = startValFix, fixed = "mu", method=optimizer) ) expect_equal(coef(mlFix), coef(mlFix3)) expect_silent( mlFix4 <- maxLik( llfInd, start = startValFix, fixed = which(isFixed), method=optimizer) ) expect_equal(coef(mlFix), coef(mlFix4), tolerance=tol) expect_equivalent(activePar( mlFix ), !isFixed) expect_equal(nParam( mlFix ), 2) ## with analytical gradients expect_silent( mlgFix <- maxLik( llf, gfInd, start = startValFix, fixed = isFixed, method=optimizer) ) expect_equal(coef(mlgFix), coef(mlFix), tolerance=tol) ## with analytical gradients and Hessians expect_silent( mlghFix <- maxLik( llf, gfInd, hf, start = startValFix, fixed = isFixed, method=optimizer) ) expect_equal(coef(mlghFix), coef(mlFix), tolerance=tol) } ### ---------- inequality constraints ---------- A <- matrix( -1, nrow = 1, ncol = 2 ) inEq <- list( ineqA = A, ineqB = 2.5 ) # A theta + B > 0 i.e. # mu + sigma < 2.5 for(optimizer in c("bfgs", "nm", "sann")) { expect_silent( mlInEq <- maxLik( llf, start = startVal, constraints = inEq, method = optimizer ) ) expect_stdout( print( summary( mlInEq)), pattern = "constrained likelihood estimation. Inference is probably wrong.*outer iterations, barrier value" ) expect_true(sum(coef( mlInEq )) < 2.5) } ### ---------- equality constraints ---------- eqCon <- list(eqA = A, eqB = 2.5) # A theta + B = 0 i.e. # mu + sigma = 2.5 for(optimizer in c("nr", "bhhh", "bfgs", "nm", "sann")) { expect_silent( mlEq <- maxLik(llfInd, start = startVal, constraints = eqCon, method = optimizer, SUMTTol = 0) ) expect_stdout( print( summary( mlEq)), pattern = "constrained likelihood estimation. Inference is probably wrong.*outer iterations, barrier value" ) expect_equal(sum(coef( mlEq )), 2.5, tolerance=1e-4) } ### ---------- convergence tolerance parameters ---------- a <- maxNR(llf, gf, hf, start=startVal, tol=1e-3, reltol=0, gradtol=0, iterlim=10) expect_equal(returnCode(a), 2) # should stop with code 2: tolerance a <- maxNR(llf, gf, hf, start=startVal, tol=0, reltol=1e-3, gradtol=0, iterlim=10) expect_equal(returnCode(a), 8) # 8: relative tolerance a <- maxNR(llf, gf, hf, start=startVal, tol=0, reltol=0, gradtol=1e-3, iterlim=10) expect_equal(returnCode(a), 1) # 1: gradient a <- maxNR(llf, gf, hf, start=startVal, tol=0, reltol=0, gradtol=0, iterlim=10) expect_equal(returnCode(a), 4) # 4: iteration limit maxLik/inst/tinytest/test-parameters.R0000644000175100001440000002451414077525067017643 0ustar hornikusers ### Test battery for various optimization parameters for different optimizers. ### ### ... ### library(maxLik) library(tinytest) tol <- .Machine$double.eps^(0.25) set.seed( 123 ) # generate a variable from normally distributed random numbers N <- 50 x <- rnorm(N, 1, 2 ) ## log likelihood function llf <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output N <- length( x ) llValue <- -0.5 * N * log( 2 * pi ) - N * log( sigma ) - 0.5 * sum( ( x - mu )^2 / sigma^2 ) return( llValue ) } # start values startVal <- c( mu = 0, sigma = 1 ) # expect_silent(ml <- maxLik( llf, start = startVal )) expect_equivalent(coef(ml), c(1.069, 1.833), tolerance=tol) ## tol expect_silent(mlTol <- maxLik( llf, start = startVal, tol=1)) expect_equal(returnCode(mlTol), 2) # tolerance limit expect_silent(mlTolC <- maxLik(llf, start=startVal, control=list(tol=1))) expect_equal(coef(mlTol), coef(mlTolC)) expect_equal(hessian(mlTol), hessian(mlTolC)) expect_equal(returnCode(mlTol), returnCode(mlTolC)) expect_silent(ml <- maxLik( llf, start = startVal, tol=-1)) # negative tol switches tol off expect_silent(ml <- maxLik( llf, start = startVal, control=list(tol=-1))) expect_false(returnCode(ml) == 2) # should not be w/in tolerance limit expect_error(ml <- maxLik( llf, start = startVal, tol=c(1,2)), pattern="'tol' must be of length 1, not 2") expect_error(ml <- maxLik( llf, start = startVal, control=list(tol=c(1,2))), pattern="'tol' must be of length 1, not 2") expect_error(ml <- maxLik( llf, start = startVal, tol=TRUE), pattern="object of class \"logical\" is not valid for slot 'tol'") expect_error(ml <- maxLik( llf, start = startVal, control=list(tol=TRUE)), pattern="object of class \"logical\" is not valid for slot 'tol'") ## ----- reltol: play w/reltol, leave other tolerances at default value ----- expect_silent(mlRelTol <- maxLik( llf, start = startVal, reltol=1)) expect_equal(returnCode(mlRelTol), 8) mlRelTolC <- maxLik(llf, start=startVal, control=list(reltol=1)) expect_equal(coef(mlRelTol), coef(mlRelTolC)) expect_silent(ml0 <- maxLik( llf, start = startVal, reltol=0)) expect_true(nIter(ml0) > nIter(mlRelTol)) # switching off reltol makes more iterations expect_silent(ml1 <- maxLik( llf, start = startVal, reltol=-1)) expect_equal(nIter(ml0), nIter(ml1)) expect_error(ml <- maxLik( llf, start = startVal, reltol=c(1,2)), pattern="invalid class \"MaxControl\" object: 'reltol' must be of length 1, not 2") expect_error(ml <- maxLik( llf, start = startVal, control=list(reltol=c(1,2))), pattern="invalid class \"MaxControl\" object: 'reltol' must be of length 1, not 2") expect_error(ml <- maxLik( llf, start = startVal, reltol=TRUE), pattern="assignment of an object of class \"logical\" is not valid for slot 'reltol'") expect_error(ml <- maxLik( llf, start = startVal, control=list(reltol=TRUE)), pattern="assignment of an object of class \"logical\" is not valid for slot 'reltol'") ## gradtol expect_silent(mlGradtol <- maxLik( llf, start = startVal, gradtol=0.1)) expect_equal(returnCode(mlGradtol), 1) mlGradtolC <- maxLik(llf, start=startVal, control=list(gradtol=0.1)) expect_equal(coef(mlGradtol), coef(mlGradtolC)) expect_silent(ml <- maxLik( llf, start = startVal, gradtol=-1)) expect_true(nIter(ml) > nIter(mlGradtol)) # switching off gradtol makes more iterations expect_error(ml <- maxLik( llf, start = startVal, gradtol=c(1,2)), pattern="object: 'gradtol' must be of length 1, not 2") expect_error(ml <- maxLik( llf, start = startVal, control=list(gradtol=c(1,2))), pattern="object: 'gradtol' must be of length 1, not 2") expect_error(ml <- maxLik( llf, start = startVal, gradtol=TRUE), pattern="assignment of an object of class \"logical\" is not valid for slot 'gradtol' ") expect_error(ml <- maxLik( llf, start = startVal, control=list(gradtol=TRUE)), pattern="assignment of an object of class \"logical\" is not valid for slot 'gradtol' ") ## examples with steptol, lambdatol ## qac expect_silent(mlMarq <- maxLik( llf, start = startVal, qac="marquardt")) expect_equal(maximType(mlMarq), "Newton-Raphson maximisation with Marquardt (1963) Hessian correction") expect_silent(mlMarqC <- maxLik(llf, start=startVal, control=list(qac="marquardt"))) expect_equal(coef(mlMarq), coef(mlMarqC)) expect_error(ml <- maxLik( llf, start = startVal, qac=-1), pattern = "assignment of an object of class \"numeric\" is not valid for slot 'qac'") # qac should be "stephalving" or "marquardt" expect_error(ml <- maxLik( llf, start = startVal, qac=c("a", "b")), pattern = "invalid class \"MaxControl\" object: 'qac' must be of length 1, not 2") expect_error(ml <- maxLik( llf, start = startVal, qac=TRUE), pattern = "assignment of an object of class \"logical\" is not valid for slot 'qac'") mlMarqCl <- maxLik(llf, start = startVal, control=list(qac="marquardt", lambda0=1000, lambdaStep=4)) expect_equal(coef(mlMarqCl), coef(mlMarq)) ## NM: alpha, beta, gamma expect_silent(mlNMAlpha <- maxLik(llf, start=startVal, method="nm", beta=0.8)) expect_silent(mlNMAlphaC <- maxLik(llf, start=startVal, method="nm", control=list(beta=0.8))) expect_equal(coef(mlNMAlpha), coef(mlNMAlphaC)) ## likelihood function with additional parameter llf1 <- function( param, sigma ) { mu <- param N <- length( x ) ll <- -0.5*N*log( 2 * pi ) - N*log( sigma ) - 0.5*sum( ( x - mu )^2/sigma^2 ) ll } ## log-lik mixture logLikMix <- function(param) { rho <- param[1] if(rho < 0 || rho > 1) return(NA) mu1 <- param[2] mu2 <- param[3] ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) ll } ## loglik mixture with additional parameter logLikMixA <- function(param, rho) { mu1 <- param[1] mu2 <- param[2] ll <- log(rho*dnorm(x - mu1) + (1 - rho)*dnorm(x - mu2)) ll } ## Test the following with all the main optimizers: pl2Patterns <- c(NR = "----- Initial parameters: -----\n.*-----Iteration 1 -----", BFGS = "initial value.*final value", BFGSR = "-------- Initial parameters: -------\n.*Iteration 1") for(method in c("NR", "BFGS", "BFGSR")) { ## create data in loop, we need to mess with 'x' for constraints N <- 100 x <- rnorm(N, 1, 2 ) startVal <- c(1,2) ## two parameters at the same time ## iterlim, printLevel expect_stdout(ml2 <- maxLik(llf, start=startVal, method=method, iterlim=1, printLevel=2), pattern = pl2Patterns[method]) expect_stdout(ml2C <- maxLik(llf, start=startVal, method=method, control=list(iterlim=1, printLevel=2)), pattern = pl2Patterns[method]) expect_equal(coef(ml2), coef(ml2C)) ## what about additional parameters for the loglik function? expect_silent(mlsM <- maxLik(llf1, start=0, method=method, tol=1, sigma=1)) expect_silent(mlsCM <- maxLik(llf1, start=0, method=method, control=list(tol=1), sigma=1)) expect_equal(coef(mlsM), coef(mlsCM)) ## And what about unused parameters? expect_error(maxLik(llf1, start=0, method=method, control=list(tol=1), sigma=1, unusedPar=2), pattern = "unused argument") N <- 100 ## Does this work with constraints? x <- c(rnorm(N, mean=-1), rnorm(N, mean=1)) ## First test inequality constraints ## Inequality constraints: x + y + z < 0.5 A <- matrix(c(-1, 0, 0, 0, -1, 0, 0, 0, 1), 3, 3, byrow=TRUE) B <- rep(0.5, 3) start <- c(0.4, 0, 0.9) ## analytic gradient if(!(method %in% c("NR", "BFGSR"))) { expect_silent(mix <- maxLik(logLikMix, start=start, method=method, constraints=list(ineqA=A, ineqB=B))) expect_silent(mixGT <- try(maxLik(logLikMix, start=start, method=method, constraints=list(ineqA=A, ineqB=B), tol=1))) expect_silent( mixGTC <- try(maxLik(logLikMix, start=start, method=method, constraints=list(ineqA=A, ineqB=B), control=list(tol=1))) ) ## 2d inequality constraints: x + y < 0.5 A2 <- matrix(c(-1, -1), 1, 2, byrow=TRUE) B2 <- 0.5 start2 <- c(-0.5, 0.5) expect_silent( mixA <- maxLik(logLikMixA, start=start2, method=method, constraints=list(ineqA=A2, ineqB=B2), tol=1, rho=0.5) ) expect_silent( mixAC <- maxLik(logLikMixA, start=start2, method=method, constraints=list(ineqA=A2, ineqB=B2), control=list(tol=1), rho=0.5) ) expect_equal(coef(mixA), coef(mixAC)) expect_equal(hessian(mixA), hessian(mixAC)) } } ### Test adding both default and user-specified parameters through control list estimate <- function(control=NULL, ...) { maxLik(llf, start=c(1,1), control=c(list(iterlim=100), control), ...) } expect_silent(m <- estimate(control=list(iterlim=1), fixed=2)) expect_stdout(show(maxControl(m)), pattern = "iterlim = 1") # iterlim should be 1 expect_equal(coef(m)[2], 1) # sigma should be 1.000 ## Does print.level overwrite 'printLevel'? expect_silent(m <- estimate(control=list(printLevel=2, print.level=1))) expect_stdout(show(maxControl(m)), pattern = "printLevel = 1") ## Does open parameters override everything? expect_silent(m <- estimate(control=list(printLevel=2, print.level=1), print.level=0)) expect_stdout(show(maxControl(m)), pattern = "printLevel = 0") ### does both printLevel, print.level work for condiNumber? expect_silent(condiNumber(hessian(m), print.level=0)) expect_silent(condiNumber(hessian(m), printLevel=0)) expect_silent(condiNumber(hessian(m), printLevel=0, print.level=1)) maxLik/inst/tinytest/test-maxControl.R0000644000175100001440000000465714077525067017634 0ustar hornikusers### Does maxControl stuff behave? ### ### do not run unless 'NOT_CRAN' explicitly defined ### (Suggested by Sebastian Meyer and others) if (!identical(Sys.getenv("NOT_CRAN"), "true")) { message("skipping slow optimizer tests") q("no") } ### test for: ### 1. create maxControl object ### 2. SGA_batchSize NULL ### 3. negative batch size ### 4. more than 1 batch size ### SG_clip: NULL, negative, more than one ### ### printing: ### * #of cols, rows library(maxLik) set.seed(3) ### ---------- create maxControl object maxControl(tol=1e-4, lambdatol=1e-5, qrtol=1e-6, qac="marquardt", marquardt_lambda0=0.1, marquardt_lambdaStep=3, marquardt_maxLambda=1e10, nm_alpha=2, nm_beta=1, nm_gamma=4, sann_temp=5, sann_tmax=100, sann_randomSeed=1, SGA_momentum=0.9, Adam_momentum1=0.5, Adam_momentum2=0.55, SG_learningRate=0.5, SG_batchSize=10, SG_clip=1000, SG_patience=7, SG_patienceStep=10, iterlim=10, printLevel=3) ### ---------- SG_batchSize expect_silent(maxControl(SG_batchSize=NULL)) expect_error(maxControl(SG_batchSize=-1)) # should fail expect_error(maxControl(SG_batchSize=2:3)) # should fail expect_silent(maxControl(SG_clip=NULL)) expect_error(maxControl(SG_clip=-1)) # fails expect_error(maxControl(SG_clip=2:3)) # fails expect_error(maxControl(Adam_momentum1=NA)) # should fail w/'NA in Adam_momentum' ### ---------- printing ---------- ### ---------- max.columns, max.rows ---------- loglik <- function(beta) { e <- y - X %*% beta -crossprod(e) } gradlik <- function(beta) { e <- y - X %*% beta l <- crossprod(e) g <- t(-2*t(X) %*% e) -g } ## linear regression with many columns X <- matrix(rnorm(20*15), 20, 15) beta <- rep(1, ncol(X)) y <- X %*% beta + rnorm(20, sd=0.3) m <- maxNR(loglik, gradlik, start=rep(1, ncol(X)), iterlim=1) ## print estimates + gradient, and hessian ## should print only 4 rows for estimates, 4 rows + 2 cols for Hessia ## should give message "reached getOption("max.cols") -- omitted 13 columns" etc expect_stdout(print(summary(m, hessian=TRUE), max.rows=4, max.cols=2, digits=3), pattern=paste0('reached getOption\\("max.rows"\\) -- omitted 11 rows', '.*', 'reached getOption\\("max.cols"\\) -- omitted 13 columns', '.*', 'reached getOption\\("max.rows"\\) -- omitted 11 rows') ) maxLik/inst/tinytest/test-methods.R0000644000175100001440000001040614077525067017136 0ustar hornikusers## Test methods. Note: only test if methods work in terms of dim, length, etc, ## not in terms of values here ## ## ... ## * printing summary with max.columns, max.rows ## if(!requireNamespace("tinytest", quietly = TRUE)) { message("These tests require 'tinytest' package\n") q("no") } require(sandwich) library(maxLik) set.seed(0) compareTolerance = 0.001 # tolerance when comparing different optimizers ## Test standard methods for "lm" x <- runif(20) y <- x + rnorm(20) m <- lm(y ~ x) expect_equal( nObs(m), length(y), info = "nObs.lm must be correct" ) expect_equal( stdEr(m), c(`(Intercept)` = 0.357862322670879, x = 0.568707094458801) ) ## Test maxControl methods: set.seed(9) x <- rnorm(20, sd=2) ll1 <- function(par) dnorm(x, mean=par, sd=1, log=TRUE) ll2 <- function(par) dnorm(x, mean=par[1], sd=par[2], log=TRUE) for(method in c("NR", "BFGS", "BFGSR")) { m <- maxLik(ll2, start=c(0, 2), method=method, control=list(iterlim=1)) expect_equal(maxValue(m), -41.35, tolerance=0.01) expect_true(is.vector(gradient(m)), info=paste0("'gradient' returns a vector for ", method)) expect_equal(length(gradient(m)), 2, info="'gradient(m)' is of length 2") expect_true(is.matrix(estfun(m)), info="'estfun' returns a matrix") expect_equal(dim(estfun(m)), c(20,2), info="'estfun(m)' is 20x2 matrix") expect_stdout( show(maxControl(m)), pattern = "Adam_momentum2 = 0\\.999" ) } ## Test methods for non-likelihood optimization hatf <- function(theta) exp(- theta %*% theta) for(optimizer in c(maxNR, maxBFGSR, maxBFGS, maxNM, maxSANN, maxCG)) { name <- as.character(quote(optimizer)) res <- optimizer(hatf, start=c(1,1)) if(name %in% c("maxNR", "maxBFGS", "maxNM", "maxCG")) { expect_equal(coef(res), c(0,0), tol=1e-5, info=paste0(name, ": result (0,0)")) } expect_equal(objectiveFn(res), hatf, info=paste0(name, ": objectiveFn correct")) } ## Test maxLik vcov related methods set.seed( 15 ) t <- rexp(20, 2) loglik <- function(theta) log(theta) - theta*t gradlik <- function(theta) 1/theta - t hesslik <- function(theta) -100/theta^2 a <- maxLik(loglik, start=1) expect_equal(dim(vcov(a)), c(1,1), info="vcov 1D numeric correct") expect_equal(length(stdEr(a)), 1, info="stdEr 1D numeric correct") a <- maxLik(loglik, gradlik, hesslik, start=1) expect_equal(dim(vcov(a)), c(1,1), info="vcov 1D analytic correct") expect_equal(length(stdEr(a)), 1, info="stdEr 1D analytic correct") ## ---------- both individual and aggregated likelihood ---------- NOBS <- 100 x <- rnorm(NOBS, 2, 1) ## log likelihood function llf <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output sum(dnorm(x, mu, sigma, log=TRUE)) } ## log likelihood function (individual observations) llfInd <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] if(!(sigma > 0)) return(NA) # to avoid warnings in the output llValues <- -0.5 * log( 2 * pi ) - log( sigma ) - 0.5 * ( x - mu )^2 / sigma^2 return( llValues ) } startVal <- c(mu=2, sigma=1) ml <- maxLik( llf, start = startVal) mlInd <- maxLik( llfInd, start = startVal) ## ---------- Various summary methods ---------- ## These should work and produce consistent results expect_stdout( show(confint(ml)), pattern = "2.5 % +97.5 %\nmu +[[:digit:] .]+\n" ) expect_stdout( show(glance(ml)), pattern = "df logLik AIC +nobs.*1 2 -140. 284. NA" ) expect_stdout( show(glance(mlInd)), pattern = "df logLik AIC nobs.*1 2 -140. 284. 100" ) expect_stdout( show(tidy(ml)), pattern = "term.*estimate std.error statistic.*p.value" ) ### ---------- estfun, bread, sandwich ---------- expect_error( estfun( ml ) ) expect_equal(dim(estfun( mlInd )), c(NOBS, 2)) expect_equal(colnames(estfun( mlInd )), names(startVal)) expect_error(bread( ml ) ) expect_equal(dim(bread( mlInd )), c(2, 2)) expect_equal(colnames(bread( mlInd )), names(startVal)) expect_equal(rownames(bread( mlInd )), names(startVal)) expect_error(sandwich( ml ) ) expect_equal(dim(sandwich( mlInd )), c(2, 2)) expect_equal(colnames(sandwich( mlInd )), names(startVal)) expect_equal(rownames(sandwich( mlInd )), names(startVal)) maxLik/inst/tinytest/test-basic.R0000644000175100001440000001027514077525067016560 0ustar hornikusers### general optimization tests for the functions of various forms ### test for: ### 1. numeric gradient, Hessian ### 2. analytic gradient, numeric Hessian ### 3. analytic gradient, Hessian ### ### a) maxLik(, method="NR") ### c) maxLik(, method="BFGS") ### b) maxLik(, method="BHHH") ### ### i) maxNR() ### ii) maxBFGS() if(!requireNamespace("tinytest", quietly = TRUE)) { cat("These tests require 'tinytest' package\n") q("no") } library(maxLik) ## ---------- define log-likelihood functions ---------- ## log-likelihood function(s) logLL <- function(x, X) # per observation for maxLik dgamma(x = X, shape = x[1], scale = x[2], log = TRUE) logLLSum <- function(x, X) sum(logLL(x, X)) # gradient of log-likelihood function d.logLL <- function(x, X){ # analytic 1. derivatives shape <- x[1] scale <- x[2] cbind(shape= log(X) - log(scale) - psigamma(shape, 0), scale= (X/scale - shape)/scale ) } d.logLLSum <- function(x, X) { ## analytic 1. derivatives, summed colSums(d.logLL(x, X)) } ## Hessian of log-likelihood function dd.logLL <- function(x, X){ # analytic 2. derivatives shape <- x[1] scale <- x[2] hessian <- matrix(0, 2, 2) hessian[1,1] <- -psigamma(shape, 1)*length(X) hessian[2,2] <- (shape*length(X) - 2*sum(X)/scale)/scale^2 hessian[cbind(c(2,1), c(1,2))] <- -length(X)/scale return(hessian) } ## ---------- create data ---------- ## sample size 1000 should give precision 0.1 or better param <- c(1.5, 2) set.seed(100) testData <- rgamma(1000, shape=param[1], scale=param[2]) start <- c(1,1) mTol <- .Machine$double.eps^0.25 ## estimation with maxLik() / NR doTests <- function(method="NR") { suppressWarnings(rLLSum <- maxLik( logLLSum, start=start, method=method, X=testData )) stdDev <- stdEr(rLLSum) tol <- 2*max(stdDev) expect_equal(coef(rLLSum), param, tolerance=tol, info=paste("coefficient values should be close to the true values", paste(param, collapse=", "))) # should equal to param, but as N is small, it may be way off ## rLL <- suppressWarnings(maxLik( logLL, start = start, method=method, X=testData )) expect_equal(coef(rLL), coef(rLLSum), tolerance=mTol) ## rLLSumGSum <- suppressWarnings(maxLik( logLLSum, grad=d.logLLSum, start = start, method=method, X=testData )) expect_equal(coef(rLLSumGSum), coef(rLLSum), tolerance=mTol) rLLG <- suppressWarnings(maxLik( logLL, grad=d.logLL, start = start, method=method, X=testData )) expect_equal(coef(rLLG), coef(rLLSum), tolerance=mTol) rLLGH <- suppressWarnings(maxLik( logLL, grad=d.logLL, hess=dd.logLL, start = start, method=method, X=testData )) expect_equal(coef(rLLGH), coef(rLLSum), tolerance=mTol) } doTests("NR") doTests("BFGS") ## maxBHHH: cannot run the same tests method <- "BHHH" expect_error( maxLik( logLLSum, start=start, method=method, X=testData), pattern = "not provided by .* returns a numeric vector" ) rLL <- suppressWarnings(maxLik( logLL, start = start, method=method, X=testData )) stdDev <- stdEr(rLL) tol <- 2*max(stdDev) expect_equal(coef(rLL), param, tolerance=tol, info=paste("coefficient values should be close to the true values", paste(param, collapse=", "))) # should equal to param, but as N is small, it may be way off ## rLLG <- suppressWarnings(maxLik( logLL, grad=d.logLL, start = start, method=method, X=testData )) expect_equal(coef(rLLG), coef(rLL), tolerance=mTol) ## Do the other basic functions work? expect_equal(class(logLik(rLL)), "numeric") expect_equal(class(gradient(rLL)), "numeric") expect_true(inherits(hessian(rLL), "matrix"), info="Hessian must inherit from matrix class") ## test maxNR with gradient and hessian as attributes W <- matrix(-c(4,1,2,4), 2, 2) c <- c(1,2) start <- c(0,0) f <- function(x) { hess <- 2*W grad <- 2*W %*% (x - c) val <- t(x - c) %*% W %*% (x - c) attr(val, "gradient") <- as.vector(grad) # gradient matrices only work for BHHH-type problems attr(val, "hessian") <- hess val } res <- maxNR(f, start=start) expect_equal(coef(res), c, tolerance=mTol) expect_equal(sqrt(sum(gradient(res)^2)), 0, tolerance=mTol) expect_equal(maxValue(res), 0, tolerance=mTol) maxLik/inst/tinytest/test-maxSG.R0000644000175100001440000001635114077525067016517 0ustar hornikusers### tests for stochastic gradient ascent ### ### do not run unless 'NOT_CRAN' explicitly defined ### (Suggested by Sebastian Meyer and others) if(!identical(Sys.getenv("NOT_CRAN"), "true")) { message("We are on CRAN: skipping slow optimizer tests") q("no") } if(!requireNamespace("tinytest", quietly = TRUE)) { message("These tests require 'tinytest' package\n") q("no") } library(maxLik) ### Test the following things: ### ### 1. basic 2-D SGA ### SGA without function, only gradient ### SGA neither function nor gradient ### SGA in 1-D case ### 2. SGA w/momentum ### 3. SGA full batch ### 4. SGA, no gradient supplied ### SGA, return numeric hessian, gradient provided ### SGA, return numeric hessian, no gradient provided ### SGA, printlevel 1, storeValues ### SGA, NA as iterlim: should give informative error ### SGA, storeValues but no fn (should fail) ### ### using highly unequally scaled data ### SGA without gradient clipping (fails) ### SGA with gradient clipping (works, although does not converge) ## ---------- OLS ## log-likelihood function(s): ## return log-likelihood on validation data loglik <- function(beta, index) { e <- yValid - XValid %*% beta -crossprod(e)/length(y) } ## gradlik: work on training data gradlik <- function(beta, index) { e <- yTrain[index] - XTrain[index,,drop=FALSE] %*% beta g <- t(-2*t(XTrain[index,,drop=FALSE]) %*% e) -g/length(index) } ### create random data set.seed(1) N <- 1000 x <- rnorm(N) X <- cbind(1, x) y <- 100 + 100*x + rnorm(N) ## training-validation iTrain <- sample(N, 0.8*N) XTrain <- X[iTrain,,drop=FALSE] XValid <- X[-iTrain,,drop=FALSE] yTrain <- y[iTrain] yValid <- y[-iTrain] ## Analytic solution (training data): start <- c(const=10, x=10) b0 <- drop(solve(crossprod(XTrain)) %*% crossprod(XTrain, yTrain)) names(b0) <- names(start) tol <- 1e-3 # coefficient tolerance ## ---------- 1. working example res <- maxSGA(loglik, gradlik, start=start, control=list(printLevel=0, iterlim=200, SG_batchSize=100, SG_learningRate=0.1, storeValues=TRUE), nObs=length(yTrain)) expect_equal(coef(res), b0, tolerance=tol) # SGA usually ends with gradient not equal to 0 so we don't test that ## ---------- store parameters res <- maxSGA(loglik, gradlik, start=start, control=list(printLevel=0, iterlim=20, SG_batchSize=100, SG_learningRate=0.1, storeParameters=TRUE), nObs=length(yTrain)) expect_equal(dim(storedParameters(res)), c(1 + nIter(res), 2)) ## ---------- no function, only gradient expect_silent( res <- maxSGA(grad=gradlik, start=start, control=list(printLevel=0, iterlim=10, SG_batchSize=100), nObs=length(yTrain)) ) ## ---------- neither function nor gradient expect_error( res <- maxSGA(start=start, control=list(printLevel=0, iterlim=10, SG_batchSize=100), nObs=length(yTrain)) ) ## ---------- 1D case N1 <- 1000 t <- rexp(N1, 2) loglik1 <- function(theta, index) sum(log(theta) - theta*t[index]) gradlik1 <- function(theta, index) sum(1/theta - t[index]) expect_silent( res <- maxSGA(loglik1, gradlik1, start=1, control=list(iterlim=300, SG_batchSize=20), nObs=length(t)) ) expect_equal(coef(res), 1/mean(t), tolerance=0.2) expect_null(hessian(res)) ## ---------- 2. SGA with momentum expect_silent( res <- maxSGA(loglik, gradlik, start=start, control=list(printLevel=0, iterlim=200, SG_batchSize=100, SG_learningRate=0.1, SGA_momentum=0.9), nObs=length(yTrain)) ) expect_equal(coef(res), b0, tolerance=tol) ## ---------- 3. full batch expect_silent( res <- maxSGA(loglik, gradlik, start=start, control=list(printLevel=0, iterlim=200, SG_batchSize=NULL, SG_learningRate=0.1), nObs=length(yTrain)) ) expect_equal(coef(res), b0, tolerance=tol) ## ---------- 4. no gradient expect_silent( res <- maxSGA(loglik, start=start, control=list(iterlim=1000, SG_learningRate=0.02), nObs=length(yTrain)) ) expect_equal(coef(res), b0, tolerance=tol) ## ---------- return Hessian, gradient provided expect_silent( res <- maxSGA(loglik, gradlik, start=start, control=list(iterlim=1000, SG_learningRate=0.02), nObs=length(yTrain), finalHessian=TRUE) ) expect_equal(coef(res), b0, tolerance=tol) expect_equal(dim(hessian(res)), c(2,2)) ## ---------- return Hessian, no gradient expect_silent( res <- maxSGA(loglik, start=start, control=list(iterlim=1000, SG_learningRate=0.02), nObs=length(yTrain), finalHessian=TRUE) ) expect_equal(coef(res), b0, tolerance=tol) expect_equal(dim(hessian(res)), c(2,2)) ### ---------- SGA, printlevel 1, storeValues ---------- ### it should just work expect_silent( res <- maxSGA(loglik, gradlik, start=start, control=list(iterlim=2, storeValues=TRUE, printLevel=1), nObs=length(yTrain), finalHessian=TRUE) ) ### ---------- SGA, NA as iterlim ---------- ### should give informative error expect_error( res <- maxSGA(loglik, gradlik, start=start, control=list(iterlim=NA), nObs=length(yTrain), finalHessian=TRUE), pattern = "invalid class \"MaxControl\" object: NA in 'iterlim'" ) ### ---------- SGA, fn missing but storeValues=TRUE ### should give informative error expect_error( res <- maxSGA(grad=gradlik, start=start, control=list(iterlim=10, storeValues=TRUE), nObs=length(yTrain)), pattern = "Cannot compute the objective function value: no objective function supplied" ) ## ---------- gradient by observations gradlikO <- function(beta, index) { e <- yTrain[index] - XTrain[index,,drop=FALSE] %*% beta g <- -2*drop(e)*XTrain[index,,drop=FALSE] -g/length(index) } expect_silent( res <- maxSGA(grad=gradlikO, start=start, control=list(printLevel=0, iterlim=100, SG_batchSize=100), nObs=length(yTrain)) ) expect_equal(coef(res), b0, tolerance=tol) ## ---------- 0 iterations expect_silent( res <- maxSGA(grad=gradlik, start=start, control=list(iterlim=0), nObs=length(yTrain)) ) expect_equal(coef(res), start) # should return start values exactly ### -------------------- create unequally scaled data set.seed(1) N <- 1000 x <- rnorm(N, sd=100) XTrain <- cbind(1, x) yTrain <- 1 + x + rnorm(N) start <- c(const=10, x=10) ## ---------- no gradient clipping: ## should fail with informative "NA/Inf in gradient" message expect_error( res <- maxSGA(loglik, gradlik, start=start, control=list(iterlim=100, SG_learningRate=0.5), nObs=length(yTrain)), pattern = "NA/Inf in gradient" ) ## ---------- gradient clipping: should not fail expect_silent( res <- maxSGA(loglik, gradlik, start=start, control=list(iterlim=100, SG_learningRate=0.5, SG_clip=1e6), nObs=length(yTrain) ) ) maxLik/build/0000755000175100001440000000000015124514352012616 5ustar hornikusersmaxLik/build/vignette.rds0000644000175100001440000000051015124514352015151 0ustar hornikusers‹•R]KÃ0ÍÚnº2Ø«yT¤ý¾ c"*ˆúàklÓö²6MJõÍ?îêKQ™ú›Ü“sr.‡<Œ! úñ|<ú,\cÓ€ q?¡+j–ì ʺ ˜ór)“èV4–v ´Œs¦4ÄaV±¸ÐFqóO¬q­@d/,îš]³¤Ž5HqJ›œi ŠZgºv¶ü“»ó3 ŽFçœ>21­Ë8•©‘#ÏJޝ¿=F9Ž]2£§ 蜮æþO‹$ýCk–„Á]³ý©[ÒÞ¦Ì-g×M‘É»ÍÊŽü°ÿrxXzöõ¾`%Wör`Á`ï÷ ?ÿf:³Ç^7ßÖ”/¸H”m·/ùs#+ì]£a%›¨3Û1ßöKÛ¶¯_'Š ¦º‰:p”0Í¢´B=vË7tý²ømaxLik/build/partial.rdb0000644000175100001440000000007515124514264014747 0ustar hornikusers‹‹àb```b`aeb`b1…À€… H02°0piÖ¼ÄÜÔb C"Éð¸F$7maxLik/man/0000755000175100001440000000000014077525067012305 5ustar hornikusersmaxLik/man/compareDerivatives.Rd0000644000175100001440000001076514077525067016441 0ustar hornikusers\name{compareDerivatives} \alias{compareDerivatives} \title{function to compare analytic and numeric derivatives} \description{ This function compares analytic and numerical derivative and prints related diagnostics information. It is intended for testing and debugging code for analytic derivatives for maximization algorithms. } \usage{ compareDerivatives(f, grad, hess=NULL, t0, eps=1e-6, printLevel=1, print=printLevel > 0, max.rows=getOption("max.rows", 20), max.cols=getOption("max.cols", 7), ...) } \arguments{ \item{f}{ function to be differentiated. The parameter (vector) of interest must be the first argument. The function may return a vector, in that case the derivative will be a matrix. } \item{grad}{ analytic gradient. This may be either a function, returning the analytic gradient, or a numeric vector, the pre-computed gradient. The function must use the same set of parameters as \code{f}. If \code{f} is a vector-valued function, grad must return/be a matrix where the number of rows equals the number of components of \code{f}, and the number of columns must equal to the number of components in \code{t0}. } \item{hess}{ function returning the analytic hessian. If present, hessian matrices are compared too. Only appropriate for scalar-valued functions. } \item{t0}{ numeric vector, parameter at which the derivatives are compared. The derivative is taken with respect to this vector. both \code{f}m \code{grad} (if function) and \code{hess} (if present) must accept this value as the first parameter. } \item{eps}{ numeric. Step size for numeric differentiation. Central derivative is used. } \item{printLevel}{ numeric: a positive number prints summary of the comparison. 0 does not do any printing, only returns the comparison results (invisibly). } \item{print}{ deprecated (for backward compatibility only). } \item{max.rows}{maximum number of matrix rows to be printed. } \item{max.cols}{maximum number of columns to be printed. } \item{\dots}{ further arguments to \code{f}, \code{grad} and \code{hess}. } } \details{ Analytic derivatives (and Hessian) substantially improve the estimation speed and reliability. However, these are typically hard to program. This utility compares the programmed result and the (internally calculated) numeric derivative. For every component of \code{f}, it prints the parameter value, analytic and numeric derivative, and their relative difference \deqn{\textrm{rel.diff} = \frac{\textrm{analytic} - \textrm{numeric}}{\frac{1}{2}(|\textrm{analytic}| + |\textrm{numeric}|)}.}{rel.diff = (analytic - numeric)/(0.5*(abs(analytic) + abs(numeric))).} If \eqn{\textrm{analytic} = 0}{analytic == 0} and \eqn{\textrm{numeric} = 0}{numeric == 0}, then rel.diff is also set to 0. If analytic derivatives are correct and the function is sufficiently smooth, expect the relative differences to be less than \eqn{10^{-7}}{1e-7}. } \value{ A list with following components: \item{t0}{the input argument \code{t0}} \item{f.t0}{f(t0)} \item{compareGrad}{ a list with components \code{analytic} = grad(t0), \code{nmeric} = numericGradient(f, t0), and their \code{rel.diff}. } \item{maxRelDiffGrad}{max(abs(rel.diff))} If \code{hess} is also provided, the following optional components are also present: \item{compareHessian}{ a list with components \code{analytic} = hess(t0), \code{numeric} = numericGradient(grad, t0), and their \code{rel.diff}. } \item{maxRelDiffHess}{max(abs(rel.diff)) for the Hessian} } \author{Ott Toomet \email{otoomet@ut.ee} and Spencer Graves} \seealso{ \code{\link{numericGradient}} \code{\link{deriv}} } \examples{ ## A simple example with sin(x)' = cos(x) f <- function(x) c(sin=sin(x)) Dsin <- compareDerivatives(f, cos, t0=c(angle=1)) ## ## Example of normal log-likelihood. Two-parameter ## function. ## x <- rnorm(100, 1, 2) # generate rnorm x l <- function(b) sum(dnorm(x, mean=b[1], sd=b[2], log=TRUE)) gradl <- function(b) { c(mu=sum(x - b[1])/b[2]^2, sigma=sum((x - b[1])^2/b[2]^3 - 1/b[2])) } gradl. <- compareDerivatives(l, gradl, t0=c(mu=1,sigma=2)) ## ## An example with f returning a vector, t0 = a scalar ## trig <- function(x)c(sin=sin(x), cos=cos(x)) Dtrig <- function(x)c(sin=cos(x), cos=-sin(x)) Dtrig. <- compareDerivatives(trig, Dtrig, t0=1) } \keyword{math} \keyword{utilities} maxLik/man/nObs.Rd0000644000175100001440000000230615124512772013467 0ustar hornikusers\name{nObs.maxLik} \alias{nObs.maxLik} \title{Number of Observations} \description{ Returns the number of observations for statistical models, estimated by Maximum Likelihood using \code{\link{maxLik}}. } \usage{ \method{nObs}{maxLik}(x, \dots) } \arguments{ \item{x}{a statistical model estimated by Maximum Likelihood using \code{\link{maxLik}}.} \item{\dots}{further arguments (currently ignored).} } \details{ The \code{nObs} method for \dQuote{maxLik} objects can return the number of observations only if log-likelihood function (or the gradient) returns values by individual observation. } \value{ numeric, number of observations } \author{Arne Henningsen, Ott Toomet} \seealso{\code{\link[miscTools]{nObs}}, \code{\link{maxLik}}, \code{\link[miscTools]{nParam}}.} \examples{ ## fit a normal distribution by ML # generate a variable from normally distributed random numbers x <- rnorm( 100, 1, 2 ) # log likelihood function (for individual observations) llf <- function( param ) { return( dnorm( x, mean = param[ 1 ], sd = param[ 2 ], log = TRUE ) ) } ## ML method ml <- maxLik( llf, start = c( mu = 0, sigma = 1 ) ) # return number of onservations nObs( ml ) } \keyword{methods} maxLik/man/sumt.Rd0000644000175100001440000001231314077525067013564 0ustar hornikusers\name{sumt} \Rdversion{1.1} \alias{sumt} \title{ Equality-constrained optimization } \description{ Sequentially Unconstrained Maximization Technique (SUMT) based optimization for linear equality constraints. This implementation is primarily intended to be called from other maximization routines, such as \code{\link{maxNR}}. } \usage{ sumt(fn, grad=NULL, hess=NULL, start, maxRoutine, constraints, SUMTTol = sqrt(.Machine$double.eps), SUMTPenaltyTol = sqrt(.Machine$double.eps), SUMTQ = 10, SUMTRho0 = NULL, printLevel=print.level, print.level = 0, SUMTMaxIter = 100, ...) } \arguments{ \item{fn}{ function of a (single) vector parameter. The function may have more arguments (passed by \dots), but those are not treated as the parameter. } \item{grad}{ gradient function of \code{fn}. NULL if missing } \item{hess}{ function, Hessian of the \code{fn}. NULL if missing } \item{start}{ numeric, initial value of the parameter } \item{maxRoutine}{ maximization algorithm, such as \code{\link{maxNR}} } \item{constraints}{list, information for constrained maximization. Currently two components are supported: \code{eqA} and \code{eqB} for linear equality constraints: \eqn{A \beta + B = 0}{A \%*\% beta + B = 0}. The user must ensure that the matrices \code{A} and \code{B} are conformable.} \item{SUMTTol}{ stopping condition. If the estimates at successive outer iterations are close enough, i.e. maximum of the absolute value over the component difference is smaller than SUMTTol, the algorithm stops. Note this does not necessarily mean that the constraints are satisfied. If the penalty function is too \dQuote{weak}, SUMT may repeatedly find the same optimum. In that case a warning is issued. The user may set SUMTTol to a lower value, e.g. to zero. } \item{SUMTPenaltyTol}{ stopping condition. If the barrier value (also called penalty) \eqn{(A \beta + B)'(A \beta + B)}{t(A \%*\% beta + B) \%*\% (A \%*\% beta + B)} is less than \code{SUMTTol}, the algorithm stops } \item{SUMTQ}{ a double greater than one, controlling the growth of the \code{rho} as described in Details. Defaults to 10. } \item{SUMTRho0}{ Initial value for \code{rho}. If not specified, a (possibly) suitable value is selected. See Details. One should consider supplying \code{SUMTRho0} in case where the unconstrained problem does not have a maximum, or the maximum is too far from the constrained value. Otherwise the authomatically selected value may not lead to convergence. } \item{printLevel}{ Integer, debugging information. Larger number prints more details. } \item{print.level}{same as \sQuote{printLevel}, for backward compatibility} \item{SUMTMaxIter}{ Maximum SUMT iterations } \item{\dots}{ Other arguments to \code{maxRoutine} and \code{fn}. } } \details{ The Sequential Unconstrained Minimization Technique is a heuristic for constrained optimization. To minimize a function \eqn{f}{f} subject to constraints, it uses a non-negative penalty function \eqn{P}{P}, such that \eqn{P(x)}{P(x)} is zero iff \eqn{x}{x} satisfies the constraints. One iteratively minimizes \eqn{f(x) + \varrho_k P(x)}{f(x) + rho_k P(x)}, where the \eqn{\varrho}{rho} values are increased according to the rule \eqn{\varrho_{k+1} = q \varrho_k}{rho_{k+1} = q rho_k} for some constant \eqn{q > 1}{q > 1}, until convergence is achieved in the sense that the barrier value \eqn{P(x)'P(x)}{P(x)'P(x)} is close to zero. Note that there is no guarantee that the global constrained optimum is found. Standard practice recommends to use the best solution found in \dQuote{sufficiently many} replications. Any of the maximization algorithms in the \pkg{maxLik}, such as \code{\link{maxNR}}, can be used for the unconstrained step. Analytic gradient and hessian are used if provided. } \value{ Object of class 'maxim'. In addition, a component \item{constraints}{A list, describing the constrained optimization. Includes the following components: \describe{ \item{type}{type of constrained optimization} \item{barrier.value}{value of the penalty function at maximum} \item{code}{code for the stopping condition} \item{message}{a short message, describing the stopping condition} \item{outer.iterations}{number of iterations in the SUMT step} } } } \section{Note}{ In case of equality constraints, it may be more efficient to enclose the function in a wrapper function. The wrapper calculates full set of parameters based on a smaller set of parameters, and the constraints. } \author{ Ott Toomet, Arne Henningsen } \seealso{ \code{\link[clue]{sumt}} in package \pkg{clue}. } \examples{ ## We maximize exp(-x^2 - y^2) where x+y = 1 hatf <- function(theta) { x <- theta[1] y <- theta[2] exp(-(x^2 + y^2)) ## Note: you may prefer exp(- theta \%*\% theta) instead } ## use constraints: x + y = 1 A <- matrix(c(1, 1), 1, 2) B <- -1 res <- sumt(hatf, start=c(0,0), maxRoutine=maxNR, constraints=list(eqA=A, eqB=B)) print(summary(res)) } \keyword{optimize} maxLik/man/maxLik-methods.Rd0000644000175100001440000000326014077525067015463 0ustar hornikusers\name{AIC.maxLik} \alias{AIC.maxLik} \alias{coef.maxim} \alias{coef.maxLik} \alias{stdEr.maxLik} \title{Methods for the various standard functions} \description{ These are methods for the maxLik related objects. See also the documentation for the corresponding generic functions } \usage{ \method{AIC}{maxLik}(object, \dots, k=2) \method{coef}{maxim}(object, \dots) \method{coef}{maxLik}(object, \dots) \method{stdEr}{maxLik}(x, eigentol=1e-12, \dots) } \arguments{ \item{object}{a \sQuote{maxLik} object (\code{coef} can also handle \sQuote{maxim} objects)} \item{k}{numeric, the penalty per parameter to be used; the default \sQuote{k = 2} is the classical AIC.} \item{x}{a \sQuote{maxLik} object} \item{eigentol}{ The standard errors are only calculated if the ratio of the smallest and largest eigenvalue of the Hessian matrix is less than \dQuote{eigentol}. Otherwise the Hessian is treated as singular. } \item{\dots}{other arguments for methods} } \details{ \describe{ \item{AIC}{calculates Akaike's Information Criterion (and other information criteria).} \item{coef}{extracts the estimated parameters (model's coefficients).} \item{stdEr}{extracts standard errors (using the Hessian matrix). } } } \examples{ ## estimate mean and variance of normal random vector set.seed(123) x <- rnorm(50, 1, 2) ## log likelihood function. ## Note: 'param' is a vector llf <- function( param ) { mu <- param[ 1 ] sigma <- param[ 2 ] return(sum(dnorm(x, mean=mu, sd=sigma, log=TRUE))) } ## Estimate it. Take standard normal as start values ml <- maxLik(llf, start = c(mu=0, sigma=1) ) coef(ml) stdEr(ml) AIC(ml) } \keyword{methods} maxLik/man/condiNumber.Rd0000644000175100001440000000645614077525067015054 0ustar hornikusers\name{condiNumber} \alias{condiNumber} \alias{condiNumber.default} \alias{condiNumber.maxLik} \title{Print matrix condition numbers column-by-column} \description{ This function prints the condition number of a matrix while adding columns one-by-one. This is useful for testing multicollinearity and other numerical problems. It is a generic function with a default method, and a method for \code{maxLik} objects. } \usage{ condiNumber(x, ...) \method{condiNumber}{default}(x, exact = FALSE, norm = FALSE, printLevel=print.level, print.level=1, digits = getOption( "digits" ), ... ) \method{condiNumber}{maxLik}(x, ...) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{x}{numeric matrix, condition numbers of which are to be printed} \item{exact}{logical, should condition numbers be exact or approximations (see \code{\link{kappa}})} \item{norm}{logical, whether the columns should be normalised to have unit norm} \item{printLevel}{numeric, positive value will output the numbers during the calculations. Useful for interactive work.} \item{print.level}{same as \sQuote{printLevel}, for backward compatibility} \item{digits}{minimal number of significant digits to print (only relevant if argument \code{print.level} is larger than zero).} \item{\dots}{Further arguments to \code{condiNumber.default} are currently ignored; further arguments to \code{condiNumber.maxLik} are passed to \code{condiNumber.default}.} } \details{ Statistical model often fail because of a high correlation between the explanatory variables in the linear index (multicollinearity) or because the evaluated maximum of a non-linear model is virtually flat. In both cases, the (near) singularity of the related matrices may help to understand the problem. \code{condiNumber} inspects the matrices column-by-column and indicates which variables lead to a jump in the condition number (cause singularity). If the matrix column name does not immediately indicate the problem, one may run an OLS model by estimating this column using all the previous columns as explanatory variables. Those columns that explain almost all the variation in the current one will have very high \eqn{t}{t}-values. } \value{ Invisible vector of condition numbers by column. If the start values for \code{\link{maxLik}} are named, the condition numbers are named accordingly. } \references{ Greene, W. (2012): \emph{Econometrics Analysis}, 7th edition, p. 130. } \author{Ott Toomet} \seealso{\code{\link{kappa}}} \examples{ set.seed(0) ## generate a simple nearly multicollinear dataset x1 <- runif(100) x2 <- runif(100) x3 <- x1 + x2 + 0.000001*runif(100) # this is virtually equal to x1 + x2 x4 <- runif(100) y <- x1 + x2 + x3 + x4 + rnorm(100) m <- lm(y ~ -1 + x1 + x2 + x3 + x4) print(summary(m)) # note the outlandish estimates and standard errors # while R^2 is 0.88. This suggests multicollinearity condiNumber(model.matrix(m)) # note the value 'explodes' at x3 ## we may test the results further: print(summary(lm(x3 ~ -1 + x1 + x2))) # Note the extremely high t-values and R^2: x3 is (almost) completely # explained by x1 and x2 } \keyword{math} \keyword{utilities} \keyword{debugging} % is it debugging? maxLik/man/maxLik-internal.Rd0000644000175100001440000000101314077525067015626 0ustar hornikusers\name{maxLik-internal} \alias{checkFuncArgs} \alias{constrOptim2} \alias{maximMessage} \alias{maxNRCompute} \alias{observationGradient} \alias{print.summary.maxLik} \alias{returnCode.maxim} % Document the following: %%%% \title{ Internal maxLik Functions } \description{ Internal maxLik Functions } \details{ These are either various methods, or functions, not intended to be called directly by the user (or in some cases are just waiting for proper documentation to be written :). } \keyword{ internal } maxLik/man/maxLik.Rd0000644000175100001440000001320014077525067014015 0ustar hornikusers\name{maxLik} \alias{maxLik} \alias{print.maxLik} \title{Maximum likelihood estimation} \description{ This is the main interface for the \pkg{maxLik} package, and the function that performs Maximum Likelihood estimation. It is a wrapper for different optimizers returning an object of class "maxLik". Corresponding methods handle the likelihood-specific properties of the estimates, including standard errors. } \usage{ maxLik(logLik, grad = NULL, hess = NULL, start, method, constraints=NULL, ...) } \arguments{ \item{logLik}{log-likelihood function. Must have the parameter vector as the first argument. Must return either a single log-likelihood value, or a numeric vector where each component is log-likelihood of the corresponding individual observation.} \item{grad}{gradient of log-likelihood. Must have the parameter vector as the first argument. Must return either a single gradient vector with length equal to the number of parameters, or a matrix where each row is the gradient vector of the corresponding individual observation. If \code{NULL}, numeric gradient will be used.} \item{hess}{hessian of log-likelihood. Must have the parameter vector as the first argument. Must return a square matrix. If \code{NULL}, numeric Hessian will be used.} \item{start}{numeric vector, initial value of parameters. If it has names, these will also be used for naming the results.} \item{method}{maximisation method, currently either "NR" (for Newton-Raphson), "BFGS" (for Broyden-Fletcher-Goldfarb-Shanno), "BFGSR" (for the BFGS algorithm implemented in \R), "BHHH" (for Berndt-Hall-Hall-Hausman), "SANN" (for Simulated ANNealing), "CG" (for Conjugate Gradients), or "NM" (for Nelder-Mead). Lower-case letters (such as "nr" for Newton-Raphson) are allowed. The default method is "NR" for unconstrained problems, and "NM" or "BFGS" for constrained problems, depending on if the \code{grad} argument was provided. "BHHH" is a good alternative given the likelihood is returned observation-wise (see \code{\link{maxBHHH}}). Note that stochastic gradient ascent (SGA) is currently not supported as this method seems to be rarely used for maximum likelihood estimation. } \item{constraints}{either \code{NULL} for unconstrained maximization or a list, specifying the constraints. See \code{\link{maxBFGS}}. } \item{\dots}{further arguments, such as \code{control}, \code{iterlim}, or \code{tol}, are passed to the selected maximisation routine, i.e. \code{\link{maxNR}}, \code{\link{maxBFGS}}, \code{\link{maxBFGSR}}, \code{\link{maxBHHH}}, \code{\link{maxSANN}}, \code{\link{maxCG}}, or \code{\link{maxNM}} (depending on argument \code{method}). Arguments not used by the optimizers are forwarded to \code{logLik}, \code{grad} and \code{hess}. } } \details{ \code{maxLik} supports constrained optimization in the sense that constraints are passed further to the underlying optimization routines, and suitable default method is selected. However, no attempt is made to correct the resulting variance-covariance matrix. Hence the inference may be wrong. A corresponding warning is issued by the summary method. } \value{ object of class 'maxLik' which inherits from class 'maxim'. Useful methods include \itemize{ \item \code{\link[=AIC.maxLik]{AIC}}: estimated parameter value \item \code{\link[=coef.maxLik]{coef}}: estimated parameter value \item \code{\link[=logLik.maxLik]{logLik}}: log-likelihood value \item \code{\link{nIter}}: number of iterations \item \code{\link[=stdEr.maxLik]{stdEr}}: standard errors \item \code{\link[=summary.maxLik]{summary}}: print summary table with estimates, standard errors, p, and z-values. \item \code{\link[=vcov.maxLik]{vcov}}: variance-covariance matrix } } \section{Warning}{The constrained maximum likelihood estimation should be considered experimental. In particular, the variance-covariance matrix is not corrected for constrained parameter space. } \author{Ott Toomet, Arne Henningsen} \seealso{\code{\link{maxNR}}, \code{\link{nlm}} and \code{\link{optim}} for different non-linear optimisation routines, see \code{\link{maxBFGS}} for the constrained maximization examples.} \examples{ ## Estimate the parameter of exponential distribution t <- rexp(100, 2) loglik <- function(theta) log(theta) - theta*t gradlik <- function(theta) 1/theta - t hesslik <- function(theta) -100/theta^2 ## Estimate with numeric gradient and hessian a <- maxLik(loglik, start=1, control=list(printLevel=2)) summary( a ) ## ## Estimate with analytic gradient and hessian. ## require much smaller tolerance ## setting 'tol=0' or negative essentially disables this stopping criterion a <- maxLik(loglik, gradlik, hesslik, start=1, control=list(tol=-1, reltol=1e-12, gradtol=1e-12)) summary( a ) ## ## Next, we give an example with vector argument: ## fit normal distribution by estimating mean and standard deviation ## by maximum likelihood ## loglik <- function(param) { # param: vector of 2, c(mean, standard deviation) mu <- param[1] sigma <- param[2] ll <- -0.5*N*log(2*pi) - N*log(sigma) - sum(0.5*(x - mu)^2/sigma^2) # can use dnorm(x, mu, sigma, log=TRUE) instead ll } x <- rnorm(100, 1, 2) # use mean=1, stdd=2 N <- length(x) res <- maxLik(loglik, start=c(0,1)) # use 'wrong' start values summary(res) ## ## Same example, but now with named parameters and a fixed value ## resFix <- maxLik(loglik, start=c(mu=0, sigma=1), fixed="sigma") summary(resFix) # 'sigma' is exactly 1.000 now. } \keyword{optimize} maxLik/man/storedValues.Rd0000644000175100001440000000323514077525067015257 0ustar hornikusers\name{storedValues} \alias{storedValues} \alias{storedValues.maxim} \alias{storedParameters} \alias{storedParameters.maxim} \title{Return the stored values of optimization} \description{ Retrieve the objective function value for each iteration if stored during the optimization. } \usage{ storedValues(x, \dots) \method{storedValues}{maxim}(x, \dots) storedParameters(x, \dots) \method{storedParameters}{maxim}(x, \dots) } \arguments{ \item{x}{a result of maximization, created by \code{\link{maxLik}}, \code{\link{maxSGA}} or another optimizer.} \item{\dots}{further arguments for other methods} } \details{ These is a generic method. If asked by control parameter \code{storeValues=TRUE} or \code{storeParameters=TRUE}, certain optimization methods store the objective function value and the parameter value at each epoch. These methods retrieves the stored values. } \value{ \itemize{ \item \code{storedValues}: a numeric vector, one value for each iteration \item \code{storedParameters}: a numeric matrix with rows corresponding to the iterations and columns to the parameter components. } In both cases, the first value stored corresponds to the initial parameter. } \author{Ott Toomet} \seealso{\code{\link{maxSGA}}, \code{\link{maxControl}} } \examples{ ## Estimate the exponential distribution parameter t <- rexp(100, 2) loglik <- function(theta, index) sum(log(theta) - theta*t[index]) ## Estimate with numeric gradient and numeric Hessian a <- maxSGA(loglik, start=1, control=list(storeValues=TRUE, storeParameters=TRUE, iterlim=10), nObs=100) storedValues(a) storedParameters(a) } \keyword{methods} maxLik/man/numericGradient.Rd0000644000175100001440000000541514077525067015721 0ustar hornikusers\name{numericGradient} \alias{numericGradient} \alias{numericHessian} \alias{numericNHessian} \title{Functions to Calculate Numeric Derivatives} \description{ Calculate (central) numeric gradient and Hessian, including of vector-valued functions. } \usage{ numericGradient(f, t0, eps=1e-06, fixed, \dots) numericHessian(f, grad=NULL, t0, eps=1e-06, fixed, \dots) numericNHessian(f, t0, eps=1e-6, fixed, \dots) } \arguments{ \item{f}{function to be differentiated. The first argument must be the parameter vector with respect to which it is differentiated. For numeric gradient, \code{f} may return a (numeric) vector, for Hessian it should return a numeric scalar} \item{grad}{function, gradient of \code{f}} \item{t0}{vector, the parameter values} \item{eps}{numeric, the step for numeric differentiation} \item{fixed}{logical index vector, fixed parameters. Derivative is calculated only with respect to the parameters for which \code{fixed == FALSE}, \code{NA} is returned for the fixed parameters. If missing, all parameters are treated as active.} \item{\dots}{furter arguments for \code{f}} } \details{ \code{numericGradient} numerically differentiates a (vector valued) function with respect to it's (vector valued) argument. If the functions value is a \eqn{N_{val} \times 1}{\code{N_val * 1}} vector and the argument is \eqn{N_{par} \times 1}{\code{N_par * 1}} vector, the resulting gradient is a \eqn{N_{val} \times N_{par}}{\code{NVal * NPar}} matrix. \code{numericHessian} checks whether a gradient function is present. If yes, it calculates the gradient of the gradient, if not, it calculates the full numeric Hessian (\code{numericNHessian}). } \value{ Matrix. For \code{numericGradient}, the number of rows is equal to the length of the function value vector, and the number of columns is equal to the length of the parameter vector. For the \code{numericHessian}, both numer of rows and columns is equal to the length of the parameter vector. } \section{Warning}{ Be careful when using numerical differentiation in optimization routines. Although quite precise in simple cases, they may work very poorly in more complicated conditions. } \author{Ott Toomet} \seealso{\code{\link{compareDerivatives}}, \code{\link{deriv}}} \examples{ # A simple example with Gaussian bell surface f0 <- function(t0) exp(-t0[1]^2 - t0[2]^2) numericGradient(f0, c(1,2)) numericHessian(f0, t0=c(1,2)) # An example with the analytic gradient gradf0 <- function(t0) -2*t0*f0(t0) numericHessian(f0, gradf0, t0=c(1,2)) # The results should be similar as in the previous case # The central numeric derivatives are often quite precise compareDerivatives(f0, gradf0, t0=1:2) # The difference is around 1e-10 } \keyword{math} \keyword{utilities} maxLik/man/tidy.maxLik.Rd0000644000175100001440000000352514077525067014776 0ustar hornikusers\name{tidy.maxLik} \alias{tidy.maxLik} \alias{glance.maxLik} \title{tidy and glance methods for maxLik objects} \description{ These methods return summary information about the estimated model. Both require the \pkg{tibble} package to be installed. } \usage{ \method{tidy}{maxLik}(x, ...) \method{glance}{maxLik}(x, ...) } \arguments{ \item{x}{ object of class 'maxLik'. } \item{\ldots}{Not used.} } \value{ For \code{tidy()}, a tibble with columns: \describe{ \item{term}{The name of the estimated parameter (parameters are sequentially numbered if names missing).} \item{estimate}{The estimated parameter.} \item{std.error}{The standard error of the estimate.} \item{statistic}{The \eqn{z}{z}-statistic of the estimate.} \item{p.value}{The \eqn{p}{p}-value.} } This is essentially the same table as \code{summary}-method prints, just in form of a tibble (data frame). For \code{glance()}, a one-row tibble with columns: \describe{ \item{df}{The degrees of freedom of the model.} \item{logLik}{The log-likelihood of the model.} \item{AIC}{Akaike's Information Criterion for the model.} \item{nobs}{The number of observations, if this is available, otherwise \code{NA}.} } } \seealso{ The functions \code{\link[generics:tidy]{tidy}} and \code{\link[generics:glance]{glance}} in package \pkg{generics}, and \code{\link[=summary.maxLik]{summary}} to display the \dQuote{standard} summary information. } \author{David Hugh-Jones} \examples{ ## Example with a single parameter t <- rexp(100, 2) loglik <- function(theta) log(theta) - theta*t a <- maxLik(loglik, start=2) tidy(a) glance(a) ## Example with a parameter vector x <- rnorm(100) loglik <- function(theta) { dnorm(x, mean=theta[1], sd=theta[2], log=TRUE) } a <- maxLik(loglik, start=c(mu=0, sd=1)) tidy(a) glance(a) } maxLik/man/summary.maxim.Rd0000644000175100001440000000365114077525067015410 0ustar hornikusers\name{summary.maxim} \alias{summary.maxim} \alias{print.summary.maxim} \title{Summary method for maximization} \description{ Summarizes the general maximization results in a way that does not assume the function is log-likelihood. } \usage{ \method{summary}{maxim}( object, hessian=FALSE, unsucc.step=FALSE, ... ) \method{print}{summary.maxim}(x, max.rows=getOption("max.rows", 20), max.cols=getOption("max.cols", 7), ... ) } \arguments{ \item{object}{optimization result, object of class \code{maxim}. See \code{\link{maxNR}}.} \item{hessian}{logical, whether to display Hessian matrix.} \item{unsucc.step}{logical, whether to describe last unsuccesful step if \code{code} == 3} \item{x}{object of class \code{summary.maxim}, summary of maximization result. } \item{max.rows}{maximum number of rows to be printed. This applies to the resulting coefficients (as those are printed as a matrix where the other column is the gradient), and to the Hessian if requested. } \item{max.cols}{maximum number of columns to be printed. Only Hessian output, if requested, uses this argument. } \item{\ldots}{currently not used.} } \value{ Object of class \code{summary.maxim}, intended to be printed with corresponding print method. } \author{Ott Toomet} \seealso{\code{\link{maxNR}}, \code{\link{returnCode}}, \code{\link{returnMessage}}} \examples{ ## minimize a 2D quadratic function: f <- function(b) { x <- b[1]; y <- b[2]; val <- -(x - 2)^2 - (y - 3)^2 # concave parabola attr(val, "gradient") <- c(-2*x + 4, -2*y + 6) attr(val, "hessian") <- matrix(c(-2, 0, 0, -2), 2, 2) val } ## Note that NR finds the minimum of a quadratic function with a single ## iteration. Use c(0,0) as initial value. res <- maxNR( f, start = c(0,0) ) summary(res) summary(res, hessian=TRUE) } \keyword{methods} \keyword{print} maxLik/man/vcov.maxLik.Rd0000644000175100001440000000265214077525067015002 0ustar hornikusers\name{vcov.maxLik} \alias{vcov.maxLik} \title{Variance Covariance Matrix of maxLik objects} \description{ Extract variance-covariance matrices from \code{\link{maxLik}} objects. } \usage{ \method{vcov}{maxLik}( object, eigentol=1e-12, ... ) } \arguments{ \item{object}{a \sQuote{maxLik} object.} \item{eigentol}{ eigenvalue tolerance, controlling when the Hessian matrix is treated as numerically singular. } \item{\dots}{further arguments (currently ignored).} } \value{ the estimated variance covariance matrix of the coefficients. In case of the estimated Hessian is singular, it's values are \code{Inf}. The values corresponding to fixed parameters are zero. } \details{ The standard errors are only calculated if the ratio of the smallest and largest eigenvalue of the Hessian matrix is less than \dQuote{eigentol}. Otherwise the Hessian is treated as singular. } \author{ Arne Henningsen, Ott Toomet } \seealso{\code{\link[stats]{vcov}}, \code{\link{maxLik}}.} \examples{ ## ML estimation of exponential random variables t <- rexp(100, 2) loglik <- function(theta) log(theta) - theta*t gradlik <- function(theta) 1/theta - t hesslik <- function(theta) -100/theta^2 ## Estimate with numeric gradient and hessian a <- maxLik(loglik, start=1, control=list(printLevel=2)) vcov(a) ## Estimate with analytic gradient and hessian a <- maxLik(loglik, gradlik, hesslik, start=1) vcov(a) } \keyword{methods} maxLik/man/nIter.Rd0000644000175100001440000000206714077525067013662 0ustar hornikusers\name{nIter} \alias{nIter} \alias{nIter.default} \title{Return number of iterations for iterative models} \description{ Returns the number of iterations for iterative models. The default method assumes presence of a component \code{iterations} in \code{x}. } \usage{ nIter(x, \dots) \method{nIter}{default}(x, \dots) } \arguments{ \item{x}{a statistical model, or a result of maximisation, created by \code{\link{maxLik}}, \code{\link{maxNR}} or another optimizer.} \item{\dots}{further arguments for methods} } \details{ This is a generic function. The default method returns the component \code{x$iterations}. } \value{ numeric, number of iterations. Note that \sQuote{iteration} may mean different things for different optimizers. } \author{Ott Toomet} \seealso{\code{\link{maxLik}}, \code{\link{maxNR}} } \examples{ ## Estimate the exponential distribution parameter: t <- rexp(100, 2) loglik <- function(theta) sum(log(theta) - theta*t) ## Estimate with numeric gradient and numeric Hessian a <- maxNR(loglik, start=1) nIter(a) } \keyword{methods} maxLik/man/logLik.maxLik.Rd0000644000175100001440000000221214077525067015236 0ustar hornikusers\name{logLik.maxLik} \alias{logLik.maxLik} \alias{logLik.summary.maxLik} \title{Return the log likelihood value} \description{ Return the log likelihood value of objects of class \code{maxLik} and \code{summary.maxLik}. } \usage{ \method{logLik}{maxLik}( object, \dots ) \method{logLik}{summary.maxLik}( object, \dots ) } \arguments{ \item{object}{object of class \code{maxLik} or \code{summary.maxLik}, usually a model estimated with Maximum Likelihood} \item{...}{additional arguments to methods} } \value{ A scalar numeric, log likelihood of the estimated model. It has attribute \dQuote{df}, number of free parameters. } \author{ Arne Henningsen, Ott Toomet } \seealso{\code{\link{maxLik}}} \examples{ ## ML estimation of exponential duration model: t <- rexp(100, 2) loglik <- function(theta) log(theta) - theta*t gradlik <- function(theta) 1/theta - t hesslik <- function(theta) -100/theta^2 ## Estimate with analytic gradient and hessian a <- maxLik(loglik, gradlik, hesslik, start=1) ## print log likelihood value logLik( a ) ## print log likelihood value of summary object b <- summary( a ) logLik( b ) } \keyword{methods} maxLik/man/maxValue.Rd0000644000175100001440000000161114077525067014355 0ustar hornikusers\name{maxValue} \alias{maxValue} \alias{maxValue.maxim} \title{Function value at maximum} \description{ Returns the function value at (estimated) maximum. } \usage{ maxValue(x, ...) \method{maxValue}{maxim}(x, \dots) } \arguments{ \item{x}{a statistical model, or a result of maximisation, created by \code{\link{maxLik}}, \code{\link{maxNR}} or another optimizer.} \item{\dots}{further arguments for other methods} } \value{ numeric, the value of the objective function at maximum. In general, it is the last calculated value in case the process did not converge. } \author{Ott Toomet} \seealso{\code{\link{maxLik}}, \code{\link{maxNR}} } \examples{ ## Estimate the exponential distribution parameter: t <- rexp(100, 2) loglik <- function(theta) sum(log(theta) - theta*t) ## Estimate with numeric gradient and numeric Hessian a <- maxNR(loglik, start=1) maxValue(a) } \keyword{methods} maxLik/man/bread.maxLik.Rd0000644000175100001440000000277014077525067015103 0ustar hornikusers\name{bread.maxLik} \alias{bread} \alias{bread.maxLik} \title{Bread for Sandwich Estimator} \description{ Extracting an estimator for the \sQuote{bread} of the sandwich estimator, see \code{\link[sandwich]{bread}}. } \usage{ \method{bread}{maxLik}( x, ... ) } \arguments{ \item{x}{an object of class \code{maxLik}.} \item{\dots}{further arguments (currently ignored).} } \value{ Matrix, the inverse of the expectation of the second derivative (Hessian matrix) of the log-likelihood function with respect to the parameters. In case of the simple Maximum Likelihood, it is equal to the variance covariance matrix of the parameters, multiplied by the number of observations. } \section{Warnings}{ The \pkg{sandwich} package is required for this function. This method works only if the observaton-specific gradient information was available for the estimation. This is the case if the observation-specific gradient was supplied (see the \code{grad} argument for \code{\link{maxLik}}), or the log-likelihood function returns a vector of observation-specific values. } \author{ Arne Henningsen } \seealso{\code{\link[sandwich]{bread}}, \code{\link{maxLik}}.} \examples{ ## ML estimation of exponential duration model: t <- rexp(100, 2) loglik <- function(theta) log(theta) - theta*t ## Estimate with numeric gradient and hessian a <- maxLik(loglik, start=1 ) # Extract the "bread" library( sandwich ) bread( a ) all.equal( bread( a ), vcov( a ) * nObs( a ) ) } \keyword{methods} maxLik/man/gradient.Rd0000644000175100001440000000373414077525067014400 0ustar hornikusers\name{gradient} \alias{gradient} \alias{gradient.maxim} \alias{estfun} \alias{estfun.maxLik} \title{Extract Gradients Evaluated at each Observation} \description{ Extract the gradients of the log-likelihood function evaluated at each observation (\sQuote{Empirical Estimating Function}, see \code{\link[sandwich]{estfun}}). } \usage{ \method{estfun}{maxLik}(x, ...) \method{gradient}{maxim}(x, ...) } \arguments{ \item{x}{an object inheriting from class \code{maxim} (for \code{gradient}) or \code{maxLik}. (for \code{estfun}.)} \item{\dots}{further arguments (currently ignored).} } \value{ \item{\code{gradient}}{vector, objective function gradient at estimated maximum (or the last calculated value if the estimation did not converge.)} \item{\code{estfun}}{ matrix, observation-wise log-likelihood gradients at the estimated parameter value evaluated at each observation. Observations in rows, parameters in columns.} } \section{Warnings}{ The \pkg{sandwich} package must be loaded in order to use \code{estfun}. \code{estfun} only works if the observaton-specific gradient information was available for the estimation. This is the case of the observation-specific gradient was supplied (see the \code{grad} argument for \code{\link{maxLik}}), or the log-likelihood function returns a vector of observation-specific values. } \author{ Arne Henningsen, Ott Toomet } \seealso{\code{\link{hessian}}, \code{\link[sandwich]{estfun}}, \code{\link{maxLik}}.} \examples{ ## ML estimation of exponential duration model: t <- rexp(10, 2) loglik <- function(theta) log(theta) - theta*t ## Estimate with numeric gradient and hessian a <- maxLik(loglik, start=1 ) gradient(a) # Extract the gradients evaluated at each observation library( sandwich ) estfun( a ) ## Estimate with analytic gradient. ## Note: it returns a vector gradlik <- function(theta) 1/theta - t b <- maxLik(loglik, gradlik, start=1) gradient(a) estfun( b ) } \keyword{methods} maxLik/man/maxBFGS.Rd0000644000175100001440000002172514077525067014032 0ustar hornikusers\name{maxBFGS} \alias{maxBFGS} \alias{maxCG} \alias{maxSANN} \alias{maxNM} \title{BFGS, conjugate gradient, SANN and Nelder-Mead Maximization} \description{ These functions are wrappers for \code{\link{optim}}, adding constrained optimization and fixed parameters. } \usage{ maxBFGS(fn, grad=NULL, hess=NULL, start, fixed=NULL, control=NULL, constraints=NULL, finalHessian=TRUE, parscale=rep(1, length=length(start)), ... ) maxCG(fn, grad=NULL, hess=NULL, start, fixed=NULL, control=NULL, constraints=NULL, finalHessian=TRUE, parscale=rep(1, length=length(start)), ...) maxSANN(fn, grad=NULL, hess=NULL, start, fixed=NULL, control=NULL, constraints=NULL, finalHessian=TRUE, parscale=rep(1, length=length(start)), ... ) maxNM(fn, grad=NULL, hess=NULL, start, fixed=NULL, control=NULL, constraints=NULL, finalHessian=TRUE, parscale=rep(1, length=length(start)), ...) } \arguments{ \item{fn}{function to be maximised. Must have the parameter vector as the first argument. In order to use numeric gradient and BHHH method, \code{fn} must return a vector of observation-specific likelihood values. Those are summed internally where necessary. If the parameters are out of range, \code{fn} should return \code{NA}. See details for constant parameters.} \item{grad}{gradient of \code{fn}. Must have the parameter vector as the first argument. If \code{NULL}, numeric gradient is used (\code{maxNM} and \code{maxSANN} do not use gradient). Gradient may return a matrix, where columns correspond to the parameters and rows to the observations (useful for maxBHHH). The columns are summed internally.} \item{hess}{Hessian of \code{fn}. Not used by any of these methods, included for compatibility with \code{\link{maxNR}}.} \item{start}{initial values for the parameters. If start values are named, those names are also carried over to the results.} \item{fixed}{parameters to be treated as constants at their \code{start} values. If present, it is treated as an index vector of \code{start} parameters.} \item{control}{list of control parameters or a \sQuote{MaxControl} object. If it is a list, the default values are used for the parameters that are left unspecified by the user. These functions accept the following parameters: \describe{ \item{reltol}{sqrt(.Machine$double.eps), stopping condition. Relative convergence tolerance: the algorithm stops if the relative improvement between iterations is less than \sQuote{reltol}. Note: for compatibility reason \sQuote{tol} is equivalent to \sQuote{reltol} for optim-based optimizers. } \item{iterlim}{integer, maximum number of iterations. Default values are 200 for \sQuote{BFGS}, 500 (\sQuote{CG} and \sQuote{NM}), and 10000 (\sQuote{SANN}). Note that \sQuote{iteration} may mean different things for different optimizers. } \item{printLevel}{integer, larger number prints more working information. Default 0, no information. } \item{nm_alpha}{1, Nelder-Mead simplex method reflection coefficient (see Nelder & Mead, 1965) } \item{nm_beta}{0.5, Nelder-Mead contraction coefficient} \item{nm_gamma}{2, Nelder-Mead expansion coefficient} % SANN \item{sann_cand}{\code{NULL} or a function for \code{"SANN"} algorithm to generate a new candidate point; if \code{NULL}, Gaussian Markov kernel is used (see argument \code{gr} of \code{\link{optim}}).} \item{sann_temp}{10, starting temperature for the \dQuote{SANN} cooling schedule. See \code{\link{optim}}.} \item{sann_tmax}{10, number of function evaluations at each temperature for the \dQuote{SANN} optimizer. See \code{\link{optim}}.} \item{sann_randomSeed}{123, integer to seed random numbers to ensure replicability of \dQuote{SANN} optimization and preserve \code{R} random numbers. Use options like \code{sann_randomSeed=Sys.time()} or \code{sann_randomSeed=sample(100,1)} if you want stochastic results. } } } \item{constraints}{either \code{NULL} for unconstrained optimization or a list with two components. The components may be either \code{eqA} and \code{eqB} for equality-constrained optimization \eqn{A \theta + B = 0}{A \%*\% theta + B = 0}; or \code{ineqA} and \code{ineqB} for inequality constraints \eqn{A \theta + B > 0}{A \%*\% theta + B > 0}. More than one row in \code{ineqA} and \code{ineqB} corresponds to more than one linear constraint, in that case all these must be zero (equality) or positive (inequality constraints). The equality-constrained problem is forwarded to \code{\link{sumt}}, the inequality-constrained case to \code{\link{constrOptim2}}. } \item{finalHessian}{how (and if) to calculate the final Hessian. Either \code{FALSE} (not calculate), \code{TRUE} (use analytic/numeric Hessian) or \code{"bhhh"}/\code{"BHHH"} for information equality approach. The latter approach is only suitable for maximizing log-likelihood function. It requires the gradient/log-likelihood to be supplied by individual observations, see \code{\link{maxBHHH}} for details. } \item{parscale}{A vector of scaling values for the parameters. Optimization is performed on 'par/parscale' and these should be comparable in the sense that a unit change in any element produces about a unit change in the scaled value. (see \code{\link{optim}})} \item{\dots}{further arguments for \code{fn} and \code{grad}.} } \details{ In order to provide a consistent interface, all these functions also accept arguments that other optimizers use. For instance, \code{maxNM} accepts the \sQuote{grad} argument despite being a gradient-less method. The \sQuote{state} (or \sQuote{seed}) of R's random number generator is saved at the beginning of the \code{maxSANN} function and restored at the end of this function so this function does \emph{not} affect the generation of random numbers although the random seed is set to argument \code{random.seed} and the \sQuote{SANN} algorithm uses random numbers. } \value{ object of class "maxim". Data can be extracted through the following functions: \item{maxValue}{\code{fn} value at maximum (the last calculated value if not converged.)} \item{coef}{estimated parameter value.} \item{gradient}{vector, last calculated gradient value. Should be close to 0 in case of normal convergence.} \item{estfun}{matrix of gradients at parameter value \code{estimate} evaluated at each observation (only if \code{grad} returns a matrix or \code{grad} is not specified and \code{fn} returns a vector).} \item{hessian}{Hessian at the maximum (the last calculated value if not converged).} \item{returnCode}{integer. Success code, 0 is success (see \code{\link{optim}}).} \item{returnMessage}{ a short message, describing the return code.} \item{activePar}{logical vector, which parameters are optimized over. Contains only \code{TRUE}-s if no parameters are fixed.} \item{nIter}{number of iterations. Two-element integer vector giving the number of calls to \code{fn} and \code{gr}, respectively. This excludes those calls needed to compute the Hessian, if requested, and any calls to \code{fn} to compute a finite-difference approximation to the gradient.} \item{maximType}{character string, type of maximization.} \item{maxControl}{the optimization control parameters in the form of a \code{\linkS4class{MaxControl}} object.} The following components can only be extracted directly (with \code{\$}): \item{constraints}{A list, describing the constrained optimization (\code{NULL} if unconstrained). Includes the following components: \describe{ \item{type}{type of constrained optimization} \item{outer.iterations}{number of iterations in the constraints step} \item{barrier.value}{value of the barrier function} } } } \author{Ott Toomet, Arne Henningsen} \seealso{\code{\link{optim}}, \code{\link{nlm}}, \code{\link{maxNR}}, \code{\link{maxBHHH}}, \code{\link{maxBFGSR}} for a \code{\link{maxNR}}-based BFGS implementation.} \references{ Nelder, J. A. & Mead, R. A, Simplex Method for Function Minimization, The Computer Journal, 1965, 7, 308-313 } \examples{ # Maximum Likelihood estimation of Poissonian distribution n <- rpois(100, 3) loglik <- function(l) n*log(l) - l - lfactorial(n) # we use numeric gradient summary(maxBFGS(loglik, start=1)) # you would probably prefer mean(n) instead of that ;-) # Note also that maxLik is better suited for Maximum Likelihood ### ### Now an example of constrained optimization ### f <- function(theta) { x <- theta[1] y <- theta[2] exp(-(x^2 + y^2)) ## you may want to use exp(- theta \%*\% theta) instead } ## use constraints: x + y >= 1 A <- matrix(c(1, 1), 1, 2) B <- -1 res <- maxNM(f, start=c(1,1), constraints=list(ineqA=A, ineqB=B), control=list(printLevel=1)) print(summary(res)) } \keyword{optimize} maxLik/man/nParam.Rd0000644000175100001440000000237415124512743014007 0ustar hornikusers\name{nParam.maxim} \alias{nParam.maxim} \title{Number of model parameters} \description{ This function returns the number of model parameters. } \usage{ \method{nParam}{maxim}(x, free=FALSE, \dots) } \arguments{ \item{x}{a model returned by a maximisation method from the \pkg{maxLik} package.} \item{free}{logical, whether to report only the free parameters or the total number of parameters (default)} \item{\dots}{other arguments for methods} } \details{ Free parameters are the parameters with no equality restrictions. Some parameters may be jointly restricted (e.g. sum of two probabilities equals unity). In this case the total number of parameters may depend on the normalization. } \value{ Number of parameters in the model } \author{Ott Toomet} \seealso{\code{\link[miscTools]{nObs}} for number of observations} \examples{ ## fit a normal distribution by ML # generate a variable from normally distributed random numbers x <- rnorm( 100, 1, 2 ) # log likelihood function (for individual observations) llf <- function( param ) { return( dnorm( x, mean = param[ 1 ], sd = param[ 2 ], log = TRUE ) ) } ## ML method ml <- maxLik( llf, start = c( mu = 0, sigma = 1 ) ) # return number of parameters nParam( ml ) } \keyword{methods} maxLik/man/returnCode.Rd0000644000175100001440000000552214600003172014666 0ustar hornikusers\name{returnCode} \alias{returnCode} \alias{returnCode.default} \alias{returnCode.maxLik} \alias{returnMessage} \alias{returnMessage.default} \alias{returnMessage.maxim} \alias{returnMessage.maxLik} \title{Success or failure of the optimization} \description{ These function extract success or failure information from optimization objects. The \code{returnCode} gives a numeric code, and \code{returnMessage} a brief description about the success or failure of the optimization, and point to the problems occured (see documentation for the corresponding functions). } \usage{ returnCode(x, ...) \method{returnCode}{default}(x, ...) \method{returnCode}{maxLik}(x, ...) returnMessage(x, ...) \method{returnMessage}{maxim}(x, ...) \method{returnMessage}{maxLik}(x, ...) } \arguments{ \item{x}{object, usually an optimization result} \item{...}{further arguments for other methods} } \details{ \code{returnMessage} and \code{returnCode} are a generic functions, with methods for various optimisation algorithms. The message should either describe the convergence (stopping condition), or the problem. The known codes and the related messages are: \describe{ \item{1}{ gradient close to zero (normal convergence).} \item{2}{ successive function values within tolerance limit (normal convergence).} \item{3}{ last step could not find higher value (probably not converged). This is related to line search step getting too small, usually because hitting the boundary of the parameter space. It may also be related to attempts to move to a wrong direction because of numerical errors. In some cases it can be helped by changing \code{steptol}.} \item{4}{ iteration limit exceeded.} \item{5}{ Infinite value.} \item{6}{ Infinite gradient.} \item{7}{ Infinite Hessian.} \item{8}{Successive function values withing relative tolerance limit (normal convergence).} \item{9}{ (BFGS) Hessian approximation cannot be improved because of gradient did not change. May be related to numerical approximation problems or wrong analytic gradient. } \item{10}{ Lost patience: the optimizer has hit an inferior value too many times (see \code{\link{maxSGA}} for more information) } \item{100}{ Initial value out of range.} } } \value{ Integer for \code{returnCode}, character for \code{returnMessage}. Different optimization routines may define it in a different way. } \author{Ott Toomet} \seealso{\code{\link{maxNR}}, \code{\link{maxBFGS}}} \examples{ ## maximise the exponential bell f1 <- function(x) exp(-x^2) a <- maxNR(f1, start=2) returnCode(a) # should be success (1 or 2) returnMessage(a) ## Now try to maximise log() function a <- maxNR(log, start=2) returnCode(a) # should give a failure (4) returnMessage(a) } \keyword{methods} \keyword{utilities} maxLik/man/reexports.Rd0000644000175100001440000000047314077525067014633 0ustar hornikusers\docType{import} \name{reexports} \alias{reexports} \alias{tidy} \alias{glance} \title{Objects exported from other packages} \keyword{internal} \description{ These objects are imported from the "generics" package. See \code{\link[generics:tidy]{tidy}} and \code{\link[generics:glance]{glance}} for details. } maxLik/man/maxControl.Rd0000644000175100001440000002452614600003077014713 0ustar hornikusers\name{MaxControl-class} \Rdversion{1.1} \docType{class} \alias{MaxControl-class} \alias{maxControl} \alias{maxControl,MaxControl-method} \alias{maxControl,missing-method} \alias{maxControl,maxim-method} \alias{show,MaxControl-method} \title{Class \code{"MaxControl"}} \description{ This is the structure that holds the optimization control options. The corresponding constructors take the parameters, perform consistency checks, and return the control structure. Alternatively, it overwrites the supplied parameters in an existing \code{MaxControl} structure. There is also a method to extract the control structure from the estimated \sQuote{maxim}-objects. } \section{Slots}{ The default values and definition of the slots: \describe{ \item{tol}{1e-8, stopping condition for \code{\link{maxNR}} and related optimizers. Stop if the absolute difference between successive iterations is less than \code{tol}, returns code 2.} \item{reltol}{sqrt(.Machine$double.eps), relative convergence tolerance (used by \code{\link{maxNR}} related optimizers, and \code{\link{optim}}-based optimizers. The algorithm stops if it iteration increases the value by less than a factor of \code{reltol*(abs(val) + reltol)}. Returns code 2.} \item{gradtol}{1e-6, stopping condition for \code{\link{maxNR}} and related optimizers. Stops if norm of the gradient is less than \code{gradtol}, returns code 1.} \item{steptol}{1e-10, stopping/error condition for \code{\link{maxNR}} and related optimizers. If \code{qac == "stephalving"} and the quadratic approximation leads to a worse, instead of a better value, or to \code{NA}, the step length is halved and a new attempt is made. If necessary, this procedure is repeated until \code{step < steptol}, thereafter code 3 is returned.} % \item{lambdatol}{1e-6, (for \code{\link{maxNR}} related optimizers) controls whether Hessian is treated as negative definite. If the largest of the eigenvalues of the Hessian is larger than \code{-lambdatol} (Hessian is not negative definite), a suitable diagonal matrix is subtracted from the Hessian (quadratic hill-climbing) in order to enforce negative definiteness.} % \item{qac}{"stephalving", character, Qadratic Approximation Correction for \code{\link{maxNR}} related optimizers. When the new guess is worse than the initial one, program attempts to correct it: \code{"stephalving"} decreases the step but keeps the direction. \code{"marquardt"} uses \cite{Marquardt (1963)} method by decreasing the step length while also moving closer to the pure gradient direction. It may be faster and more robust choice in areas where quadratic approximation behaves poorly.} \item{qrtol}{1e-10, QR-decomposition tolerance for Hessian inversion in \code{\link{maxNR}} related optimizers. } \item{marquardt_lambda0}{0.01, a positive numeric, initial correction term for \cite{Marquardt (1963)} correction in \code{\link{maxNR}}-related optimizers} \item{marquardt_lambdaStep}{2, how much the \cite{Marquardt (1963)} correction is decreased/increased at successful/unsuccesful step for \code{\link{maxNR}} related optimizers} \item{marquardt_maxLambda}{1e12, maximum allowed correction term for \code{\link{maxNR}} related optimizers. If exceeded, the algorithm exits with return code 3.} % \item{nm_alpha}{1, Nelder-Mead simplex method reflection factor (see Nelder & Mead, 1965)} \item{nm_beta}{0.5, Nelder-Mead contraction factor} \item{nm_gamma}{2, Nelder-Mead expansion factor} % SANN \item{sann_cand}{\code{NULL} or a function for \code{"SANN"} algorithm to generate a new candidate point; if \code{NULL}, Gaussian Markov kernel is used (see argument \code{gr} of \code{\link{optim}}).} \item{sann_temp}{10, starting temperature for the \dQuote{SANN} cooling schedule. See \code{\link{optim}}.} \item{sann_tmax}{10, number of function evaluations at each temperature for the \dQuote{SANN} optimizer. See \code{\link{optim}}.} \item{sann_randomSeed}{123, integer to seed random numbers to ensure replicability of \dQuote{SANN} optimization and preserve \code{R} random numbers. Use options like \code{SANN_randomSeed=Sys.time()} or \code{SANN_randomeSeed=sample(1000,1)} if you want stochastic results. } % SG general General options for stochastic gradient methods: \item{SG_learningRate}{0.1, learning rate, numeric} \item{SG_batchSize}{\code{NULL}, batch size for Stochastic Gradient Ascent. A positive integer, or \code{NULL} for full-batch gradent ascent.} \item{SG_clip}{\code{NULL}, gradient clipping threshold. This is the max allowed squared Euclidean norm of the gradient. If the actual norm of the gradient exceeds (square root of) this threshold, the gradient will be scaled back accordingly while preserving its direction. \code{NULL} means no clipping. } \item{SG_patience}{\code{NULL}, or integer. Stopping condition: if the objective function is worse than its largest value so far this many times, the algorithm stops, and returns not the last parameter value but the one that gave the best results so far. This is mostly useful if gradient is computed on training data and the objective function on validation data. } \item{SG_patienceStep}{1L, integer. After how many epochs to check the patience value. 1 means to check (and hence to compute the objective function) at each epoch. } % Stochastic Gradient Ascent Options for SGA: \item{SGA_momentum}{0, numeric momentum parameter for SGA. Must lie in interval \eqn{[0,1]}{[0,1]}. } % Adam Options for Adam: \item{Adam_momentum1}{0.9, numeric in \eqn{[0,1]}{[0,1]}, the first moment momentum} \item{Adam_momentum2}{0.999, numeric in \eqn{[0,1]}{[0,1]}, the second moment momentum} % general General options: \item{iterlim}{150, stopping condition (the default differs for different methods). Stop if more than \code{iterlim} iterations performed. Note that \sQuote{iteration} may mean different things for different optimizers.} \item{max.rows}{20, maximum number of matrix rows to be printed when requesting verbosity in the optimizers. } \item{max.cols}{7, maximum number of columns to be printed. This also applies to vectors that are printed horizontally. } \item{printLevel}{0, the level of verbosity. Larger values print more information. Result depends on the optimizer. Form \code{print.level} is also accepted by the methods for compatibility.} \item{storeParameters}{\code{FALSE}, whether to store and return the parameter values at each epoch. If \code{TRUE}, the stored values can be retrieved with \code{\link{storedParameters}}-method. The parameters are stored as a matrix with rows corresponding to the epochs and columns to the parameter components. } \item{storeValues}{\code{FALSE}, whether to store and return the objective function values at each epoch. If \code{TRUE}, the stored values can be retrieved with \code{\link{storedValues}}-method.} } } \section{Methods}{ \describe{ \item{maxControl}{\code{(\dots)} creates a \dQuote{MaxControl} object. The arguments must be in the form \code{option1 = value1, option2 = value2, ...}. The options should be slot names, but the method also supports selected other parameter forms for compatibility reasons e.g. \dQuote{print.level} instead of \dQuote{printLevel}. In case there are more than one option with similar name, the last one overwrites the previous values. This allows the user to override default parameters in the control list. See example in \link{maxLik-package}. } \item{maxControl}{\code{(x = "MaxControl", \dots)} overwrites parameters of an existing \dQuote{MaxControl} object. The \sQuote{\dots} argument must be in the form \code{option1 = value1, option2 = value2, ...}. In case there are more than one option with similar name, only the last one is taken into account. This allows the user to override default parameters in the control list. See example in \link{maxLik-package}. } \item{maxControl}{\code{(x = "maxim")} extracts \dQuote{MaxControl} structure from an estimated model} \item{show}{shows the parameter values} } } \section{Details}{ Typically, the control options are supplied in the form of a list, in which case the corresponding default values are overwritten by the user-specified ones. However, one may also create the control structure by \code{maxControl(opt1=value1, opt2=value2, ...)} and supply such value directly to the optimizer. In this case the optimization routine takes all the values from the control object. } \references{ \itemize{ \item Nelder, J. A. & Mead, R. A (1965) Simplex Method for Function Minimization \emph{The Computer Journal} \bold{7}, 308--313 \item Marquardt, D. W. (1963) An Algorithm for Least-Squares Estimation of Nonlinear Parameters \emph{Journal of the Society for Industrial and Applied Mathematics} \bold{11}, 431--441 } } \author{ Ott Toomet } \note{ Several control parameters can also be supplied directly to the optimization routines. } \examples{ library(maxLik) ## Create a 'maxControl' object: maxControl(tol=1e-4, sann_tmax=7, printLevel=2) ## Optimize quadratic form t(D) %*% W %*% D with p.d. weight matrix, ## s.t. constraints sum(D) = 1 quadForm <- function(D) { return(-t(D) \%*\% W \%*\% D) } eps <- 0.1 W <- diag(3) + matrix(runif(9), 3, 3)*eps D <- rep(1/3, 3) # initial values ## create control object and use it for optimization co <- maxControl(printLevel=2, qac="marquardt", marquardt_lambda0=1) res <- maxNR(quadForm, start=D, control=co) print(summary(res)) ## Now perform the same with no trace information co <- maxControl(co, printLevel=0) res <- maxNR(quadForm, start=D, control=co) # no tracing information print(summary(res)) # should be the same as above maxControl(res) # shows the control structure } \keyword{utilities} maxLik/man/maximType.Rd0000644000175100001440000000144214077525067014552 0ustar hornikusers\name{maximType} \alias{maximType} \alias{maximType.default} \alias{maximType.maxim} \alias{maximType.MLEstimate} \title{Type of Minimization/Maximization} \description{ Returns the type of optimization as supplied by the optimisation routine. } \usage{ maximType(x) } \arguments{ \item{x}{object of class 'maxim' or another object which involves numerical optimisation. } } \value{ A text message, describing the involved optimisation algorithm } \author{Ott Toomet} \seealso{\code{\link{maxNR}}} \examples{ ## maximize two-dimensional exponential hat. True maximum c(2,1): f <- function(a) exp(-(a[1] - 2)^2 - (a[2] - 1)^2) m <- maxNR(f, start=c(0,0)) coef(m) maximType(m) ## Now use BFGS maximisation. m <- maxBFGS(f, start=c(0,0)) maximType(m) } \keyword{optimize} \keyword{methods} maxLik/man/maxLik-package.Rd0000644000175100001440000000727514077525067015425 0ustar hornikusers\name{maxLik-package} \alias{maxLik-package} \docType{package} \title{ Maximum Likelihood Estimation } \description{ This package contains a set of functions and tools for Maximum Likelihood (ML) estimation. The focus of the package is on non-linear optimization from the ML viewpoint, and it provides several convenience wrappers and tools, like BHHH algorithm, variance-covariance matrix and standard errors. } \details{ \pkg{maxLik} package is a set of convenience tools and wrappers focusing on Maximum Likelihood (ML) analysis, but it also contains tools for other optimization tasks. The package includes a) wrappers for several existing optimizers (implemented by \code{\link[stats:optim]{optim}}); b) original optimizers, including Newton-Raphson and Stochastic Gradient Ascent; and c) several convenience tools to use these optimizers from the ML perspective. Examples are BHHH optimization (\code{\link{maxBHHH}}) and utilities that extract standard errors from the estimates. Other highlights include a unified interface for all included optimizers, tools to test user-provided analytic derivatives, and constrained optimization. A good starting point to learn about the usage of \pkg{maxLik} are the included vignettes \dQuote{Introduction: what is maximum likelihood}, \dQuote{Maximum likelihood estimation with maxLik} and \dQuote{Stochastic Gradient Ascent in maxLik}. Another good source is Henningsen & Toomet (2011), an introductory paper to the package. Use \code{vignette(package="maxLik")} to see the available vignettes, and \code{vignette("using-maxlik")} to read the usage vignette. From the user's perspective, the central function in the package is \code{\link{maxLik}}. In its simplest form it takes two arguments: the log-likelihood function, and a vector of initial parameter values (see the example below). It returns an object of class \sQuote{maxLik} with convenient methods such as \code{\link[=summary.maxLik]{summary}}, \code{\link[=coef.maxLik]{coef}}, and \code{\link[=stdEr.maxLik]{stdEr}}. It also supports a plethora of other arguments, for instance one can supply analytic gradient and Hessian, select the desired optimizer, and control the optimization in different ways. A useful utility functions in the package is \code{\link{compareDerivatives}} that allows one to compare the analytic and numeric derivatives for debugging purposes. Another useful function is \code{\link{condiNumber}} for analyzing multicollinearity problems in the estimated models. In the interest of providing a unified user interface, all the optimizers are implemented as maximizers in this package. This includes the \code{\link{optim}}-based methods, such as \code{\link{maxBFGS}} and \code{\link{maxSGA}}, the maximizer version of popular Stochastic Gradient Descent. } \author{ Ott Toomet , Arne Henningsen , with contributions from Spencer Graves, Yves Croissant and David Hugh-Jones. Maintainer: Ott Toomet } \references{ Henningsen A, Toomet O (2011). \dQuote{maxLik: A package for maximum likelihood estimation in R.} Computational Statistics, 26(3), 443-458. doi: \doi{10.1007/s00180-010-0217-1}. } \keyword{Basics|package} \keyword{Mathematics|optimize} \examples{ ### estimate mean and variance of normal random vector ## create random numbers where mu=1, sd=2 set.seed(123) x <- rnorm(50, 1, 2 ) ## log likelihood function. ## Note: 'param' is a 2-vector c(mu, sd) llf <- function(param) { mu <- param[1] sd <- param[2] llValue <- dnorm(x, mean=mu, sd=sd, log=TRUE) sum(llValue) } ## Estimate it with mu=0, sd=1 as start values ml <- maxLik(llf, start = c(mu=0, sigma=1) ) print(summary(ml)) ## Estimates close to c(1,2) :-) } maxLik/man/fnSubset.Rd0000644000175100001440000000506114077525067014367 0ustar hornikusers\name{fnSubset} \alias{fnSubset} \title{ Call fnFull with variable and fixed parameters } \description{ Combine variable parameters with with fixed parameters and pass to \code{fnFull}. Useful for optimizing over a subset of parameters without writing a separate function. Values are combined by name if available. Otherwise, \code{xFull} is constructed by position (the default). } \usage{ fnSubset(x, fnFull, xFixed, xFull=c(x, xFixed), ...) } \arguments{ \item{x}{ Variable parameters to be passed to \code{fnFull}. } \item{fnFull}{ Function whose first argument has length = length(xFull). } \item{xFixed}{ Parameter values to be combined with \code{x} to construct the first argument for a call to \code{fnFull}. } \item{xFull}{ Prototype initial argument for \code{fnFull}. } \item{\dots}{ Optional arguments passed to \code{fnFull}. } } \details{ This function first confirms that \code{length(x) + length(xFixed) == length(xFull)}. Next, \itemize{ \item If \code{xFull} has names, match at least \code{xFixed} by name. \item Else \code{xFull = c(x, xFixes)}, the default. } Finally, call \code{fnFull(xFull, ...)}. } \value{ value returned by \code{fnFull} } %\references{ } \author{ Spencer Graves } \seealso{ \code{\link{optim}} \code{\link[dlm]{dlmMLE}} \code{\link{maxLik}} \code{\link{maxNR}} } \examples{ ## ## Example with 'optim' ## fn <- function(x) (x[2]-2*x[1])^2 # note: true minimum is 0 on line 2*x[1] == x[2] fullEst <- optim(par=c(1,1), method="BFGS", fn=fn) fullEst$par # par = c(0.6, 1.2) at minimum (not convex) # Fix the last component to 4 est4 <- optim(par=1, fn=fnSubset, method="BFGS", fnFull=fn, xFixed=4) est4$par # now there is a unique minimun x[1] = 2 # Fix the first component fnSubset(x=1, fnFull=fn, xFixed=c(a=4), xFull=c(a=1, b=2)) # After substitution: xFull = c(a=4, b=1), # so fn = (1 - 2*4)^2 = (-7)^2 = 49 est4. <- optim(par=1, fn=fnSubset, method="BFGS", fnFull=fn, xFixed=c(a=4), xFull=c(a=1, b=2)) est4.$par # At optimum: xFull=c(a=4, b=8), # so fn = (8 - 2*4)^2 = 0 ## ## Example with 'maxLik' ## fn2max <- function(x) -(x[2]-2*x[1])^2 # -> need to have a maximum max4 <- maxLik(fnSubset, start=1, fnFull=fn2max, xFixed=4) summary(max4) # Similar result using fixed parameters in maxNR, called by maxLik max4. <- maxLik(fn2max, start=c(1, 4), fixed=2) summary(max4.) } \keyword{optimize} \keyword{utilities} maxLik/man/confint.maxLik.Rd0000644000175100001440000000261314077525067015462 0ustar hornikusers\name{confint.maxLik} \alias{confint.maxLik} \alias{confint} \title{confint method for maxLik objects} \description{ Wald confidence intervals for Maximum Likelihood Estimates } \usage{ \method{confint}{maxLik}(object, parm, level=0.95, ...) } \arguments{ \item{object}{ object of class \dQuote{maxLik} returned by \code{\link{maxLik}} function } \item{parm}{the name of parameters to compute the confidence intervals. If omitted, confidence intervals for all parameters are computed.} \item{level}{the level of confidence interval } \item{\dots}{additional arguments to be passed to the other methods } } \value{ A matrix of lower and upper confidence interval limits (in the first and second column respectively). The matrix rows are labeled by the parameter names (if any) and columns by the corresponding distribution quantiles. } \seealso{ \code{\link[stats]{confint}} for the generic \code{confint} function, \code{\link[=stdEr.maxLik]{stdEr}} for computing standard errors and \code{\link[=summary.maxLik]{summary}} for summary output that includes statistical significance information. } \author{Luca Scrucca} \examples{ ## compute MLE parameters of normal random sample x <- rnorm(100) loglik <- function(theta) { dnorm(x, mean=theta[1], sd=theta[2], log=TRUE) } m <- maxLik(loglik, start=c(mu=0, sd=1)) summary(m) confint(m) confint(m, "mu", level=0.1) } maxLik/man/objectiveFn.Rd0000644000175100001440000000145414077525067015036 0ustar hornikusers\name{objectiveFn} \alias{objectiveFn} \alias{objectiveFn.maxim} \title{Optimization Objective Function} \description{ This function returns the optimization objective function from a \sQuote{maxim} object. } \usage{ objectiveFn(x, \dots) \method{objectiveFn}{maxim}(x, \dots) } \arguments{ \item{x}{an optimization result, inheriting from class \sQuote{maxim}} \item{\dots}{other arguments for methods} } \value{ function, the function that was optimized. It can be directly called, given that all necessary variables are accessible from the current environment. } \author{Ott Toomet} \examples{ hatf <- function(theta) exp(- theta \%*\% theta) res <- maxNR(hatf, start=c(0,0)) print(summary(res)) print(objectiveFn(res)) print(objectiveFn(res)(2)) # 0.01832 } \keyword{methods} \keyword{optimize} maxLik/man/maxSGA.Rd0000644000175100001440000003567614077525067013735 0ustar hornikusers\name{maxSGA} \alias{maxSGA} \alias{maxAdam} \title{Stochastic Gradient Ascent} \description{ Stochastic Gradient Ascent--based optimizers } \usage{ maxSGA(fn = NULL, grad = NULL, hess = NULL, start, nObs, constraints = NULL, finalHessian = FALSE, fixed = NULL, control=NULL, ... ) maxAdam(fn = NULL, grad = NULL, hess = NULL, start, nObs, constraints = NULL, finalHessian = FALSE, fixed = NULL, control=NULL, ... ) } \arguments{ \item{fn}{the function to be maximized. As the objective function values are not directly used for optimization, this argument is optional, given \code{grad} is provided. It must have the parameter vector as the first argument, and it must have an argument \code{index} to specify the integer index of the selected observations. It must return either a single number, or a numeric vector (this is is summed internally). If the parameters are out of range, \code{fn} should return \code{NA}. See details for constant parameters. \code{fn} may also return attributes "gradient" and/or "hessian". If these attributes are set, the algorithm uses the corresponding values as gradient and Hessian. } \item{grad}{gradient of the objective function. It must have the parameter vector as the first argument, and it must have an argument \code{index} to specify the integer index of selected observations. It must return either a gradient vector of the objective function, or a matrix, where columns correspond to individual parameters. The column sums are treated as gradient components. If \code{NULL}, finite-difference gradients are computed. If \code{fn} returns an object with attribute \code{gradient}, this argument is ignored. If \code{grad} is not supplied, it is computed by finite-difference method using \code{fn}. However, this is only adviseable for small-scale tests, not for any production run. Obviously, \code{fn} must be correctly defined in that case. } \item{hess}{Hessian matrix of the function. Mainly for compatibility reasons, only used for computing the final Hessian if asked to do so by setting \code{finalHessian} to \code{TRUE}. It must have the parameter vector as the first argument and it must return the Hessian matrix of the objective function. If missing, either finite-difference Hessian, based on \code{gradient} or BHHH approach is computed if asked to do so. } \item{start}{initial parameter values. If these have names, the names are also used for results.} \item{nObs}{number of observations. This is used to partition the data into individual batches. The resulting batch indices are forwarded to the \code{grad} function through the argument \code{index}.} \item{constraints}{either \code{NULL} for unconstrained optimization or a list with two components. The components may be either \code{eqA} and \code{eqB} for equality-constrained optimization \eqn{A \theta + B = 0}{A \%*\% theta + B = 0}; or \code{ineqA} and \code{ineqB} for inequality constraints \eqn{A \theta + B > 0}{A \%*\% theta + B > 0}. More than one row in \code{ineqA} and \code{ineqB} corresponds to more than one linear constraint, in that case all these must be zero (equality) or positive (inequality constraints). The equality-constrained problem is forwarded to \code{\link{sumt}}, the inequality-constrained case to \code{\link{constrOptim2}}. } \item{finalHessian}{how (and if) to calculate the final Hessian. Either \code{FALSE} (do not calculate), \code{TRUE} (use analytic/finite-difference Hessian) or \code{"bhhh"}/\code{"BHHH"} for the information equality approach. The latter approach is only suitable when working with a log-likelihood function, and it requires the gradient/log-likelihood to be supplied by individual observations. Hessian matrix is not often used for optimization problems where one applies SGA, but even if one is not interested in standard errors, it may provide useful information about the model performance. If computed by finite-difference method, the Hessian computation may be very slow. } \item{fixed}{parameters to be treated as constants at their \code{start} values. If present, it is treated as an index vector of \code{start} parameters.} \item{control}{list of control parameters. The ones used by these optimizers are \describe{ \item{SGA_momentum}{0, numeric momentum parameter for SGA. Must lie in interval \eqn{[0,1]}{[0,1]}. See details. } Adam-specific parameters \item{Adam_momentum1}{0.9, numeric in interval \eqn{(0,1)}{(0,1)}, the first moment momentum} \item{Adam_momentum2}{0.999, numeric in interval \eqn{(0,1)}{(0,1)}, the second moment momentum} General stochastic gradient parameters: \item{SG_learningRate}{step size the SGA algorithm takes in the gradient direction. If 1, the step equals to the gradient value. A good value is often 0.01--0.3} \item{SG_batchSize}{SGA batch size, an integer between 1 and \code{nObs}. If \code{NULL} (default), the full batch gradient is computed. } \item{SG_clip}{\code{NULL}, gradient clipping threshold. The algorithm ensures that \eqn{||g(\theta)||_2^2 \le \kappa}{norm(gradient)^2 <= kappa} where \eqn{\kappa}{kappa} is the \code{SG_clip} value. If the actual norm of the gradient exceeds (square root of) \eqn{\kappa}{kappa}, the gradient will be scaled back accordingly while preserving its direction. \code{NULL} means no clipping. } Stopping conditions: \item{gradtol}{stopping condition. Stop if norm of the gradient is less than \code{gradtol}. Default 0, i.e. do not use this condition. This condition is useful if the objective is to drive full batch gradient to zero on training data. It is not a good objective in case of the stochastic gradient, and if the objective is to optimize the objective on validation data. } \item{SG_patience}{\code{NULL}, or integer. Stopping condition: the algorithm counts how many times the objective function has been worse than its best value so far, and if this exceeds \code{SG_patience}, the algorithm stops. } \item{SG_patienceStep}{1L, integer. After how many epochs to check the patience value. \code{1} means to check at each epoch, and hence to compute the objective function. This may be undesirable if the objective function is costly to compute. } \item{iterlim}{stopping condition. Stop if more than \code{iterlim} epochs, return \code{code=4}. Epoch is a set of iterations that cycles through all observations. In case of full batch, iterations and epochs are equivalent. If \code{iterlim = 0}, does not do any learning and returns the initial values unchanged. } \item{printLevel}{this argument determines the level of printing which is done during the optimization process. The default value 0 means that no printing occurs, 1 prints the initial and final details, 2 prints all the main tracing information for every epoch. Higher values will result in even more output. } \item{storeParameters}{logical, whether to store and return the parameter values at each epoch. If \code{TRUE}, the stored values can be retrieved with \code{\link{storedParameters}}-method. The parameters are stored as a matrix with rows corresponding to the epochs and columns to the parameter components. There are \code{iterlim} + 1 rows, where the first one corresponds to the initial parameters. Default \code{FALSE}. } \item{storeValues}{logical, whether to store and return the objective function values at each epoch. If \code{TRUE}, the stored values can be retrieved with \code{\link{storedValues}}-method. There are \code{iterlim} + 1 values, where the first one corresponds to the value at the initial parameters. Default \code{FALSE}. } } See \code{\link{maxControl}} for more information. } \item{\dots}{further arguments to \code{fn}, \code{grad} and \code{hess}. To maintain compatibility with the earlier versions, \dots also passes certain control options to the optimizers. } } \details{ Gradient Ascent (GA) is a optimization method where the algorithm repeatedly takes small steps in the gradient's direction, the parameter vector \eqn{\theta}{theta} is updated as \eqn{\theta \leftarrow theta + \mathrm{learning rate}\cdot \nabla f(\theta)}{theta <- learning rate * gradient f(theta)}. In case of Stochastic GA (SGA), the gradient is not computed on the full set of observations but on a small subset, \emph{batch}, potentially a single observation only. In certain circumstances this converges much faster than when using all observation (see \cite{Bottou et al, 2018}). If \code{SGA_momentum} is positive, the SGA algorithm updates the parameters \eqn{\theta}{theta} in two steps. First, the momentum is used to update the \dQuote{velocity} \eqn{v}{v} as \eqn{v \leftarrow \mathrm{momentum}\cdot v + \mathrm{learning rate}\cdot \nabla f(\theta)}{v <- momentum*v + learning rate* gradient f(theta)}, and thereafter the parameter \eqn{\theta}{theta} is updates as \eqn{\theta \leftarrow \theta + v}{theta <- theta + v}. Initial velocity is set to 0. The Adam algorithm is more complex and uses first and second moments of stochastic gradients to automatically adjust the learning rate. See \cite{Goodfellow et al, 2016, page 301}. The function \code{fn} is not directly used for optimization, only for printing or as a stopping condition. In this sense it is up to the user to decide what the function returns, if anything. For instance, it may be useful for \code{fn} to compute the objective function on either full training data, or on validation data, and just ignore the \code{index} argument. The latter is useful if using \emph{patience}-based stopping. However, one may also choose to select the observations determined by the index to compute the objective function on the current data batch. % Does it support contraints? } \value{ object of class "maxim". Data can be extracted through the following methods: \item{\code{\link{maxValue}}}{\code{fn} value at maximum (the last calculated value if not converged.)} \item{\code{\link{coef}}}{estimated parameter value.} \item{\code{\link{gradient}}}{vector, last calculated gradient value. Should be close to 0 in case of normal convergence.} \item{estfun}{matrix of gradients at parameter value \code{estimate} evaluated at each observation (only if \code{grad} returns a matrix or \code{grad} is not specified and \code{fn} returns a vector).} \item{\code{\link{hessian}}}{Hessian at the maximum (the last calculated value if not converged).} \item{\code{\link{storedValues}}}{return values stored at each epoch} \item{\code{\link{storedParameters}}}{return parameters stored at each epoch} \item{\code{\link{returnCode}}}{ a numeric code that describes the convergence or error. } \item{\code{\link{returnMessage}}}{a short message, describing the return code.} \item{\code{\link{activePar}}}{logical vector, which parameters are optimized over. Contains only \code{TRUE}-s if no parameters are fixed.} \item{\code{\link{nIter}}}{number of iterations.} \item{\code{\link{maximType}}}{character string, type of maximization.} \item{\code{\link{maxControl}}}{the optimization control parameters in the form of a \code{\linkS4class{MaxControl}} object.} } \references{ Bottou, L.; Curtis, F. & Nocedal, J.: Optimization Methods for Large-Scale Machine Learning \emph{SIAM Review}, 2018, \bold{60}, 223--311. Goodfellow, I.; Bengio, Y.; Courville, A. (2016): Deep Learning, \emph{MIT Press} Henningsen, A. and Toomet, O. (2011): maxLik: A package for maximum likelihood estimation in R \emph{Computational Statistics} \bold{26}, 443--458 } \author{Ott Toomet, Arne Henningsen} \seealso{ A good starting point to learn about the usage of stochastic gradient ascent in \pkg{maxLik} package is the vignette \dQuote{Stochastic Gradient Ascent in maxLik}. The other related functions are \code{\link{maxNR}} for Newton-Raphson, a popular Hessian-based maximization; \code{\link{maxBFGS}} for maximization using the BFGS, Nelder-Mead (NM), and Simulated Annealing (SANN) method (based on \code{\link[stats]{optim}}), also supporting inequality constraints; \code{\link{maxLik}} for a general framework for maximum likelihood estimation (MLE); \code{\link{optim}} for different gradient-based optimization methods. } \examples{ ## estimate the exponential distribution parameter by ML set.seed(1) t <- rexp(100, 2) loglik <- function(theta, index) sum(log(theta) - theta*t[index]) ## Note the log-likelihood and gradient are summed over observations gradlik <- function(theta, index) sum(1/theta - t[index]) ## Estimate with full-batch a <- maxSGA(loglik, gradlik, start=1, control=list(iterlim=1000, SG_batchSize=10), nObs=100) # note that loglik is not really needed, and is not used # here, unless more print verbosity is asked summary(a) ## ## demonstrate the usage of index, and using ## fn for computing the objective function on validation data. ## Create a linear model where variables are very unequally scaled ## ## OLS loglik function: compute the function value on validation data only loglik <- function(beta, index) { e <- yValid - XValid \%*\% beta -crossprod(e)/length(y) } ## OLS gradient: compute it on training data only ## Use 'index' to select the subset corresponding to the minibatch gradlik <- function(beta, index) { e <- yTrain[index] - XTrain[index,,drop=FALSE] \%*\% beta g <- t(-2*t(XTrain[index,,drop=FALSE]) \%*\% e) -g/length(index) } N <- 1000 ## two random variables: one with scale 1, the other with 100 X <- cbind(rnorm(N), rnorm(N, sd=100)) beta <- c(1, 1) # true parameter values y <- X \%*\% beta + rnorm(N, sd=0.2) ## training-validation split iTrain <- sample(N, 0.8*N) XTrain <- X[iTrain,,drop=FALSE] XValid <- X[-iTrain,,drop=FALSE] yTrain <- y[iTrain] yValid <- y[-iTrain] ## ## do this without momentum: learning rate must stay small for the gradient not to explode cat(" No momentum:\n") a <- maxSGA(loglik, gradlik, start=c(10,10), control=list(printLevel=1, iterlim=50, SG_batchSize=30, SG_learningRate=0.0001, SGA_momentum=0 ), nObs=length(yTrain)) print(summary(a)) # the first component is off, the second one is close to the true value ## do with momentum 0.99 cat(" Momentum 0.99:\n") a <- maxSGA(loglik, gradlik, start=c(10,10), control=list(printLevel=1, iterlim=50, SG_batchSize=30, SG_learningRate=0.0001, SGA_momentum=0.99 # no momentum ), nObs=length(yTrain)) print(summary(a)) # close to true value } \keyword{optimize} maxLik/man/summary.maxLik.Rd0000644000175100001440000000436514077525067015525 0ustar hornikusers\name{summary.maxLik} \alias{summary.maxLik} \alias{coef.summary.maxLik} \title{summary the Maximum-Likelihood estimation} \description{ Summary the Maximum-Likelihood estimation including standard errors and t-values. } \usage{ \method{summary}{maxLik}(object, eigentol=1e-12, ... ) \method{coef}{summary.maxLik}(object, \ldots) } \arguments{ \item{object}{ object of class 'maxLik', or 'summary.maxLik', usually a result from Maximum-Likelihood estimation. } \item{eigentol}{ The standard errors are only calculated if the ratio of the smallest and largest eigenvalue of the Hessian matrix is less than \dQuote{eigentol}. Otherwise the Hessian is treated as singular. } \item{\ldots}{currently not used.} } \value{ An object of class 'summary.maxLik' with following components: \describe{ \item{type}{type of maximization.} \item{iterations}{number of iterations.} \item{code}{code of success.} \item{message}{a short message describing the code.} \item{loglik}{the loglik value in the maximum.} \item{estimate}{numeric matrix, the first column contains the parameter estimates, the second the standard errors, third t-values and fourth corresponding probabilities.} \item{fixed}{logical vector, which parameters are treated as constants.} \item{NActivePar}{number of free parameters.} \item{constraints}{information about the constrained optimization. Passed directly further from \code{maxim}-object. \code{NULL} if unconstrained maximization. } } } \author{Ott Toomet, Arne Henningsen} \seealso{ \code{\link{maxLik}} for maximum likelihood estimation, \code{\link{confint}} for confidence intervals, and \code{\link{tidy}} and \code{\link{glance}} for alternative quick summaries of the ML results. } \examples{ ## ML estimation of exponential distribution: t <- rexp(100, 2) loglik <- function(theta) log(theta) - theta*t gradlik <- function(theta) 1/theta - t hesslik <- function(theta) -100/theta^2 ## Estimate with numeric gradient and hessian a <- maxLik(loglik, start=1, control=list(printLevel=2)) summary(a) ## Estimate with analytic gradient and hessian a <- maxLik(loglik, gradlik, hesslik, start=1, control=list(printLevel=2)) summary(a) } \keyword{models} maxLik/man/hessian.Rd0000644000175100001440000000336714077525067014237 0ustar hornikusers\name{hessian} \alias{hessian} \alias{hessian.default} \title{Hessian matrix} \description{ This function extracts the Hessian of the objective function at optimum. The Hessian information should be supplied by the underlying optimization algorithm, possibly by an approximation. } \usage{ hessian(x, \dots) \method{hessian}{default}(x, \dots) } \arguments{ \item{x}{an optimization result of class \sQuote{maxim} or \sQuote{maxLik}} \item{\dots}{other arguments for methods} } \value{ A numeric matrix, the Hessian of the model at the estimated parameter values. If the maximum is flat, the Hessian is singular. In that case you may want to invert only the non-singular part of the matrix. You may also want to fix certain parameters (see \code{\link{activePar}}). } \author{Ott Toomet} \seealso{\code{\link{maxLik}}, \code{\link{activePar}}, \code{\link{condiNumber}}} \examples{ # log-likelihood for normal density # a[1] - mean # a[2] - standard deviation ll <- function(a) sum(-log(a[2]) - (x - a[1])^2/(2*a[2]^2)) x <- rnorm(100) # sample from standard normal ml <- maxLik(ll, start=c(1,1)) # ignore eventual warnings "NaNs produced in: log(x)" summary(ml) # result should be close to c(0,1) hessian(ml) # How the Hessian looks like sqrt(-solve(hessian(ml))) # Note: standard deviations are on the diagonal # # Now run the same example while fixing a[2] = 1 mlf <- maxLik(ll, start=c(1,1), activePar=c(TRUE, FALSE)) summary(mlf) # first parameter close to 0, the second exactly 1.0 hessian(mlf) # Note that now NA-s are in place of passive # parameters. # now invert only the free parameter part of the Hessian sqrt(-solve(hessian(mlf)[activePar(mlf), activePar(mlf)])) # gives the standard deviation for the mean } \keyword{methods} \keyword{optimize} maxLik/man/activePar.Rd0000644000175100001440000000267215124512703014504 0ustar hornikusers\name{activePar} \alias{activePar} \alias{activePar.default} \title{free parameters under maximization} \description{ Return a logical vector, indicating which parameters were free under maximization, as opposed to the fixed parameters that are treated as constants. See argument \dQuote{fixed} for \code{\link{maxNR}}. } \usage{ activePar(x, \dots) \method{activePar}{default}(x, \dots) } \arguments{ \item{x}{object, created by a maximization routine, such as \code{\link{maxNR}} or \code{\link{maxLik}}, or derived from a maximization object. } \item{\dots}{further arguments for methods} } \details{ Several optimization routines allow the user to fix some parameter values (or do it automatically in some cases). For gradient or Hessian based inference one has to know which parameters carry optimization-related information. } \value{ A logical vector, indicating whether the parameters were free to change during optimization algorithm. } \author{Ott Toomet} \seealso{\code{\link{maxNR}}, \code{\link[miscTools]{nObs}}} \examples{ ## a two-dimensional exponential hat f <- function(a) exp(-a[1]^2 - a[2]^2) ## maximize wrt. both parameters free <- maxNR(f, start=1:2) summary(free) # results should be close to (0,0) activePar(free) ## keep the first parameter constant cons <- maxNR(f, start=1:2, fixed=c(TRUE,FALSE)) summary(cons) # result should be around (1,0) activePar(cons) } \keyword{methods} \keyword{optimize} maxLik/man/maxNR.Rd0000644000175100001440000004167114600005731013611 0ustar hornikusers\name{maxNR} \alias{maxNR} \alias{maxBFGSR} \alias{maxBHHH} \title{Newton- and Quasi-Newton Maximization} \description{ Unconstrained and equality-constrained maximization based on the quadratic approximation (Newton) method. The Newton-Raphson, BFGS (Broyden 1970, Fletcher 1970, Goldfarb 1970, Shanno 1970), and BHHH (Berndt, Hall, Hall, Hausman 1974) methods are available. } \usage{ maxNR(fn, grad = NULL, hess = NULL, start, constraints = NULL, finalHessian = TRUE, bhhhHessian=FALSE, fixed = NULL, activePar = NULL, control=NULL, ... ) maxBFGSR(fn, grad = NULL, hess = NULL, start, constraints = NULL, finalHessian = TRUE, fixed = NULL, activePar = NULL, control=NULL, ... ) maxBHHH(fn, grad = NULL, hess = NULL, start, finalHessian = "BHHH", ... ) } \arguments{ \item{fn}{the function to be maximized. It must have the parameter vector as the first argument and it must return either a single number, or a numeric vector (this is is summed internally). If the BHHH method is used and argument \code{gradient} is not given, \code{fn} must return a numeric vector of observation-specific log-likelihood values. If the parameters are out of range, \code{fn} should return \code{NA}. See details for constant parameters. \code{fn} may also return attributes "gradient" and/or "hessian". If these attributes are set, the algorithm uses the corresponding values as gradient and Hessian. } \item{grad}{gradient of the objective function. It must have the parameter vector as the first argument and it must return either a gradient vector of the objective function, or a matrix, where \emph{columns} correspond to individual parameters. The column sums are treated as gradient components. If \code{NULL}, finite-difference gradients are computed. If BHHH method is used, \code{grad} must return a matrix, where rows corresponds to the gradient vectors for individual observations and the columns to the individual parameters. If \code{fn} returns an object with attribute \code{gradient}, this argument is ignored. } \item{hess}{Hessian matrix of the function. It must have the parameter vector as the first argument and it must return the Hessian matrix of the objective function. If missing, finite-difference Hessian, based on \code{gradient}, is computed. Hessian is used by the Newton-Raphson method only, and eventually by the other methods if \code{finalHessian} is requested.} \item{start}{initial parameter values. If start values are named, those names are also carried over to the results.} \item{constraints}{either \code{NULL} for unconstrained optimization or a list with two components. The components may be either \code{eqA} and \code{eqB} for equality-constrained optimization \eqn{A \theta + B = 0}{A \%*\% theta + B = 0}; or \code{ineqA} and \code{ineqB} for inequality constraints \eqn{A \theta + B > 0}{A \%*\% theta + B > 0}. More than one row in \code{ineqA} and \code{ineqB} corresponds to more than one linear constraint, in that case all these must be zero (equality) or positive (inequality constraints). The equality-constrained problem is forwarded to \code{\link{sumt}}, the inequality-constrained case to \code{\link{constrOptim2}}. } \item{finalHessian}{how (and if) to calculate the final Hessian. Either \code{FALSE} (do not calculate), \code{TRUE} (use analytic/finite-difference Hessian) or \code{"bhhh"}/\code{"BHHH"} for the information equality approach. The latter approach is only suitable for maximizing log-likelihood functions. It requires the gradient/log-likelihood to be supplied by individual observations. Note that computing the (actual, not BHHH) final Hessian does not carry any extra penalty for the NR method, but does for the other methods.} \item{bhhhHessian}{logical. Indicating whether to use the information equality approximation (Bernd, Hall, Hall, and Hausman, 1974) for the Hessian. This effectively transforms \code{maxNR} into \code{maxBHHH} and is mainly designed for internal use.} \item{fixed}{parameters to be treated as constants at their \code{start} values. If present, it is treated as an index vector of \code{start} parameters.} \item{activePar}{this argument is retained for backward compatibility only; please use argument \code{fixed} instead.} \item{control}{list of control parameters. The control parameters used by these optimizers are \describe{ \item{tol}{\eqn{10^{-8}}{1e-8}, stopping condition. Stop if the absolute difference between successive iterations is less than \code{tol}. Return \code{code=2}. If set to a negative value, the criterion is never fulfilled, and hence disabled. } \item{reltol}{sqrt(.Machine$double.eps), stopping condition. Relative convergence tolerance: the algorithm stops if the relative improvement between iterations is less than \sQuote{reltol}. Return code 8. Negative value disables condition. } \item{gradtol}{stopping condition. Stop if norm of the gradient is less than \code{gradtol}. Return code 1. Negative value disables condition.} \item{steptol}{1e-10, stopping/error condition. If \code{qac == "stephalving"} and the quadratic approximation leads to a worse, instead of a better value, or to \code{NA}, the step length is halved and a new attempt is made. If necessary, this procedure is repeated until step < \code{steptol}, thereafter code 3 is returned.} \item{lambdatol}{\eqn{10^{-6}}{1e-6}, controls whether Hessian is treated as negative definite. If the largest of the eigenvalues of the Hessian is larger than \code{-lambdatol} (Hessian is not negative definite), a suitable diagonal matrix is subtracted from the Hessian (quadratic hill-climbing) in order to enforce negative definiteness. } \item{qrtol}{\eqn{10^{-10}}{1e-10}, QR-decomposition tolerance for the Hessian inversion. } \item{qac}{"stephalving", Quadratic Approximation Correction. When the new guess is worse than the initial one, the algorithm attemts to correct it: "stephalving" decreases the step but keeps the direction, "marquardt" uses \cite{Marquardt (1963)} method by decreasing the step length while also moving closer to the pure gradient direction. It may be faster and more robust choice in areas where quadratic approximation behaves poorly. \code{maxNR} and \code{maxBHHH} only. } \item{marquardt_lambda0}{\eqn{10^{-2}}{1e-2}, positive numeric, initial correction term for \cite{Marquardt (1963)} correction. } \item{marquardt_lambdaStep}{2, how much the \cite{Marquardt (1963)} correction term is decreased/increased at each successful/unsuccesful step. \code{maxNR} and \code{maxBHHH} only. } \item{marquardt_maxLambda}{\eqn{10^{12}}{1e12}, maximum allowed \cite{Marquardt (1963)} correction term. If exceeded, the algorithm exits with return code 3. \code{maxNR} and \code{maxBHHH} only. } \item{iterlim}{stopping condition. Stop if more than \code{iterlim} iterations, return \code{code=4}.} \item{printLevel}{this argument determines the level of printing which is done during the optimization process. The default value 0 means that no printing occurs, 1 prints the initial and final details, 2 prints all the main tracing information for every iteration. Higher values will result in even more output. } } } \item{\dots}{further arguments to \code{fn}, \code{grad} and \code{hess}. Further arguments to \code{maxBHHH} are also passed to \code{maxNR}. To maintain compatibility with the earlier versions, \dots also passes a number of control options (\code{tol}, \code{reltol}, \code{gradtol}, \code{steptol}, \code{lambdatol}, \code{qrtol}, \code{iterlim}) to the optimizers. } } \details{ The idea of the Newton method is to approximate the function at a given location by a multidimensional quadratic function, and use the estimated maximum as the start value for the next iteration. Such an approximation requires knowledge of both gradient and Hessian, the latter of which can be quite costly to compute. Several methods for approximating Hessian exist, including BFGS and BHHH. The BHHH (information equality) approximation is only valid for log-likelihood functions. It requires the score (gradient) values by individual observations and hence those must be returned by individual observations by \code{grad} or \code{fn}. The Hessian is approximated as the negative of the sum of the outer products of the gradients of individual observations, or, in the matrix form, \deqn{ \mathsf{H}^{BHHH} = -\frac{1}{N} \sum_{i=1}^N \left[ \frac{\partial \ell(\boldsymbol{\vartheta})} {\boldsymbol{\vartheta}} \frac{\partial \ell(\boldsymbol{\vartheta})} {\boldsymbol{\vartheta}'} \right] }{ \code{H = -t(gradient) \%*\% gradient = - crossprod( gradient )}. } The functions \code{maxNR}, \code{maxBFGSR}, and \code{maxBHHH} can work with constant parameters, useful if a parameter value converges to the boundary of support, or for testing. One way is to put \code{fixed} to non-NULL, specifying which parameters should be treated as constants. The parameters can also be fixed in runtime (only for \code{maxNR} and \code{maxBHHH}) by signaling it with the \code{fn} return value. See Henningsen & Toomet (2011) for details. } \value{ object of class "maxim". Data can be extracted through the following methods: \item{\code{\link{maxValue}}}{\code{fn} value at maximum (the last calculated value if not converged.)} \item{\code{\link[=coef.maxim]{coef}}}{estimated parameter value.} \item{gradient}{vector, last calculated gradient value. Should be close to 0 in case of normal convergence.} \item{estfun}{matrix of gradients at parameter value \code{estimate} evaluated at each observation (only if \code{grad} returns a matrix or \code{grad} is not specified and \code{fn} returns a vector).} \item{hessian}{Hessian at the maximum (the last calculated value if not converged).} \item{returnCode}{return code: \describe{ \item{1}{ gradient close to zero (normal convergence).} \item{2}{ successive function values within tolerance limit (normal convergence).} \item{3}{ last step could not find higher value (probably not converged). This is related to line search step getting too small, usually because hitting the boundary of the parameter space. It may also be related to attempts to move to a wrong direction because of numerical errors. In some cases it can be helped by changing \code{steptol}.} \item{4}{ iteration limit exceeded.} \item{5}{infinite value.} \item{6}{infinite gradient.} \item{7}{infinite Hessian.} \item{8}{successive function values within relative tolerance limit (normal convergence).} \item{9}{(BFGS) Hessian approximation cannot be improved because of gradient did not change. May be related to numerical approximation problems or wrong analytic gradient.} \item{100}{ Initial value out of range.} } } \item{returnMessage}{ a short message, describing the return code.} \item{activePar}{logical vector, which parameters are optimized over. Contains only \code{TRUE}-s if no parameters are fixed.} \item{nIter}{number of iterations.} \item{maximType}{character string, type of maximization.} \item{maxControl}{the optimization control parameters in the form of a \code{\linkS4class{MaxControl}} object.} The following components can only be extracted directly (with \code{\$}): \item{last.step}{a list describing the last unsuccessful step if \code{code=3} with following components: \describe{ \item{theta0}{ previous parameter value} \item{f0}{ \code{fn} value at \code{theta0}} \item{climb}{ the movement vector to the maximum of the quadratic approximation} } } \item{constraints}{A list, describing the constrained optimization (\code{NULL} if unconstrained). Includes the following components: \describe{ \item{type}{ type of constrained optimization} \item{outer.iterations}{ number of iterations in the constraints step} \item{barrier.value}{ value of the barrier function} } } } \section{Warning}{ No attempt is made to ensure that user-provided analytic gradient/Hessian is correct. The users are encouraged to use \code{\link{compareDerivatives}} function, designed for this purpose. If analytic gradient/Hessian are wrong, the algorithm may not converge, or may converge to a wrong point. As the BHHH method uses the likelihood-specific information equality, it is only suitable for maximizing log-likelihood functions! Quasi-Newton methods, including those mentioned above, do not work well in non-concave regions. This is especially the case with the implementation in \code{maxBFGSR}. The user is advised to experiment with various tolerance options to achieve convergence. } \references{ Berndt, E., Hall, B., Hall, R. and Hausman, J. (1974): Estimation and Inference in Nonlinear Structural Models, \emph{Annals of Social Measurement} \bold{3}, 653--665. Broyden, C.G. (1970): The Convergence of a Class of Double-rank Minimization Algorithms, \emph{Journal of the Institute of Mathematics and Its Applications} \bold{6}, 76--90. Fletcher, R. (1970): A New Approach to Variable Metric Algorithms, \emph{Computer Journal} \bold{13}, 317--322. Goldfarb, D. (1970): A Family of Variable Metric Updates Derived by Variational Means, \emph{Mathematics of Computation} \bold{24}, 23--26. Henningsen, A. and Toomet, O. (2011): maxLik: A package for maximum likelihood estimation in R \emph{Computational Statistics} \bold{26}, 443--458 Marquardt, D.W., (1963) An Algorithm for Least-Squares Estimation of Nonlinear Parameters, \emph{Journal of the Society for Industrial & Applied Mathematics} \bold{11}, 2, 431--441 Shanno, D.F. (1970): Conditioning of Quasi-Newton Methods for Function Minimization, \emph{Mathematics of Computation} \bold{24}, 647--656. } \author{Ott Toomet, Arne Henningsen, function \code{maxBFGSR} was originally developed by Yves Croissant (and placed in 'mlogit' package)} \seealso{\code{\link{maxLik}} for a general framework for maximum likelihood estimation (MLE); \code{\link{maxBHHH}} for maximizations using the Berndt, Hall, Hall, Hausman (1974) algorithm (which is a wrapper function to \code{maxNR}); \code{\link{maxBFGS}} for maximization using the BFGS, Nelder-Mead (NM), and Simulated Annealing (SANN) method (based on \code{\link{optim}}), also supporting inequality constraints; \code{\link{nlm}} for Newton-Raphson optimization; and \code{\link{optim}} for different gradient-based optimization methods.} \examples{ ## Fit exponential distribution by ML t <- rexp(100, 2) # create data with parameter 2 loglik <- function(theta) sum(log(theta) - theta*t) ## Note the log-likelihood and gradient are summed over observations gradlik <- function(theta) sum(1/theta - t) hesslik <- function(theta) -100/theta^2 ## Estimate with finite-difference gradient and Hessian a <- maxNR(loglik, start=1, control=list(printLevel=2)) summary(a) ## You would probably prefer 1/mean(t) instead ;-) ## The same example with analytic gradient and Hessian a <- maxNR(loglik, gradlik, hesslik, start=1) summary(a) ## BFGS estimation with finite-difference gradient a <- maxBFGSR( loglik, start=1 ) summary(a) ## For the BHHH method we need likelihood values and gradients ## of individual observations, not the sum of those loglikInd <- function(theta) log(theta) - theta*t gradlikInd <- function(theta) 1/theta - t ## Estimate with analytic gradient a <- maxBHHH(loglikInd, gradlikInd, start=1) summary(a) ## Example with a vector argument: Estimate the mean and ## variance of a random normal sample by maximum likelihood ## Note: you might want to use maxLik instead loglik <- function(param) { # param is a 2-vector of c(mean, sd) mu <- param[1] sigma <- param[2] ll <- -0.5*N*log(2*pi) - N*log(sigma) - sum(0.5*(x - mu)^2/sigma^2) ll } x <- rnorm(100, 1, 2) # use mean=1, sd=2 N <- length(x) res <- maxNR(loglik, start=c(0,1)) # use 'wrong' start values summary(res) ## The previous example with named parameters and a fixed value resFix <- maxNR(loglik, start=c(mu=0, sigma=1), fixed="sigma") summary(resFix) # 'sigma' is exactly 1.000 now. ### Constrained optimization ### ## We maximize exp(-x^2 - y^2) where x+y = 1 hatf <- function(theta) { x <- theta[1] y <- theta[2] exp(-(x^2 + y^2)) ## Note: you may prefer exp(- theta \%*\% theta) instead } ## use constraints: x + y = 1 A <- matrix(c(1, 1), 1, 2) B <- -1 res <- maxNR(hatf, start=c(0,0), constraints=list(eqA=A, eqB=B), control=list(printLevel=1)) print(summary(res)) } \keyword{optimize} maxLik/DESCRIPTION0000644000175100001440000000247415124517754013245 0ustar hornikusersPackage: maxLik Version: 1.5-2.2 Title: Maximum Likelihood Estimation and Related Tools Authors@R: c(person("Ott", "Toomet", role=c("aut", "cre"), email="otoomet@gmail.com"), person("Arne", "Henningsen", role=c("aut"), email="arne.henningsen@gmail.com"), person("Spencer", "Graves", role=c("ctb")), person("Yves", "Croissant", role=c("ctb")), person("David", "Hugh-Jones", role=c("ctb")), person("Luca", "Scrucca", role=c("ctb")) ) Depends: R (>= 2.4.0), miscTools (>= 0.6-8), methods Imports: sandwich, generics Suggests: MASS, clue, dlm, plot3D, tibble, tinytest Description: Functions for Maximum Likelihood (ML) estimation, non-linear optimization, and related tools. It includes a unified way to call different optimizers, and classes and methods to handle the results from the Maximum Likelihood viewpoint. It also includes a number of convenience tools for testing and developing your own models. License: GPL (>= 2) ByteCompile: yes NeedsCompilation: no Packaged: 2025-12-29 15:06:20 UTC; hornik Author: Ott Toomet [aut, cre], Arne Henningsen [aut], Spencer Graves [ctb], Yves Croissant [ctb], David Hugh-Jones [ctb], Luca Scrucca [ctb] Maintainer: Ott Toomet Repository: CRAN Date/Publication: 2025-12-29 15:36:12 UTC