e1071/0000755000175100001440000000000015120776073011035 5ustar hornikuserse1071/tests/0000755000175100001440000000000012212345174012170 5ustar hornikuserse1071/tests/clustering.R0000755000175100001440000000033314173734135014503 0ustar hornikusers## cmeans clustering should also work on data frames library(e1071) data(iris) set.seed(123) cm1 <- cmeans(iris[,1:4], 10) bc1 <- bclust(iris[,1:4], 3, base.centers=20,iter.base=50, base.method="cmeans") e1071/MD50000644000175100001440000001200415120776073011342 0ustar hornikusers0e0345cabf2888d21438a373e2367f09 *DESCRIPTION 982e8477f7b00c70533f633e265ad65b *NAMESPACE e8db4b66edf88cba9f12e4b0498fb742 *R/bclust.R b6dce356797c9f7cb3077469773b030e *R/bincombinations.R aa8bc82108cc500067f9f46a973cb1d1 *R/cmeans.R fd4d3eb836e8d3c0cfcdfc2ea51950c3 *R/countpattern.R 27d13b1b67006a82641886f51d02e2da *R/cshell.R 971efff8ec1f8b3a4a0f3dd6c58e4014 *R/discrete.R d41d8cd98f00b204e9800998ecf8427e *R/e1071-deprecated.R 4287fb25fb942b55d950e9de2d1aaa36 *R/element.R 3db01176430fe3f275e2a344476d2956 *R/fclustIndex.R 1a7bbce6f191976a433fac624f4bc352 *R/gknn.R 6daec6d2db476530b6d439af0aafdce1 *R/hamming.distance.R 56906cfd1a1ce8fcc772e85fc39f0325 *R/hamming.window.R 3c39c73411e9280258b9e5043a654900 *R/hanning.window.R b7afef78d7b3bfac19b3055a0757410a *R/ica.R 0a3d0a8de11821b3cacd9ae5ca508d59 *R/impute.R 7086a3457d63ee76c4e17555c7ebeaf8 *R/interpolate.R 16536adfb4a3b2b639e7a9f30b36ef87 *R/kurtosis.R a80e8dbdf74e45dd91d2c70d425d92cd *R/lca.R 2cfaaf8eb47a18898183615f1f6b8c44 *R/matchClasses.R e1a0bc54714aa36210b46236bea1fd50 *R/matchControls.R ac88f22d04fa8e5745033f4b67bb8beb *R/moment.R f5c9270a10ba4ae08cb4e526226eef03 *R/naiveBayes.R 8e1ad1d11cc51d56f3567d0465bb1b71 *R/plot.stft.R 89c1c475c981b0ee83ca5663f1b1554d *R/probplot.R 387e1108e948f03d5aea9649e368b699 *R/rbridge.R 3789f8392daf68d346c780027d5ad876 *R/rectangle.window.R 37584ee4f166291d086f2597090ae0d8 *R/rwiener.R e8ffd78b776e92a731f11e8bd5beb15e *R/scale_data_frame.R edceb942c772b6cd6b28ff5154d3f392 *R/shortestPaths.R 48af933aaa58ec5f8619f9e0537b78ff *R/sigmoid.R c81f35fe8c448b8d8f054e77ded9c3fb *R/skewness.R 326c8c290cc1063b4e186ee94402a616 *R/sparse.R bed8f86b4b9c45ebf4c847960b7a6824 *R/stft.R 5442c3e5e3f0954c0ab7a023e7f7e959 *R/svm.R 5c6e3ee47a44caa44e5a246063cee71d *R/tune.R 76e123f5dcc8dee07678412adc0a2a84 *build/partial.rdb d86ea2ef7b3dd2a4bd9b4ab0a7773cc5 *build/vignette.rds ff6e4879f9f9ee5e7a52bdcec11725ad *inst/NEWS.Rd 559e3c5e6d8a2fb68b00b08f6c09f050 *inst/doc/svmdoc.R ccbf35b4584a05d90eb84b77c0a4161a *inst/doc/svmdoc.Rnw 187f461af3ffc1584cf9f6cbc09e7895 *inst/doc/svmdoc.pdf 3472ac0aacc7fa252056b599378ff0ae *inst/doc/svminternals.Rnw 962f4acf4ba9acf6ed7ea5009108e649 *inst/doc/svminternals.pdf e7d7541d75e4ecda9570a9f8f90d2439 *man/Discrete.Rd 28bd29b1382ee4add6289ae239dba04e *man/bclust.Rd 168807874afed9032ae4b1732f25a73e *man/bincombinations.Rd 0e3344d6bc722ca82c755c346d3bd524 *man/bootstrap.lca.Rd 964b1c10eea943f824c2865c9d728249 *man/boxplot.bclust.Rd 8603274376f301d1d9ddf5a096c4b765 *man/classAgreement.Rd 5394b0243c4f5ea7d0adc29d906e9615 *man/cmeans.Rd 1f2181682f48caf798cb7d49bd7f3c07 *man/countpattern.Rd d9d767fa46dda39f428f73555ba21d23 *man/cshell.Rd e9df0cc673e6fcbb343ddb75ab347c0f *man/e1071-deprecated.Rd fb37ec4196c22c3702a7f83616f0f132 *man/element.Rd 36da964aaa376b6e05fc9f696fa270da *man/fclustIndex.Rd c0db4ff8714f87f8cc868612d1dac379 *man/gknn.Rd 88d0e6f9c58e13547be01a5c53483321 *man/hamming.distance.Rd c4a293671687db22aedd5d1bcd4cb670 *man/hamming.window.Rd b14517cf4646473fb9c342fafbceb4b3 *man/hanning.window.Rd ab9bfe34bf36c1d7d728f7e40a97e29b *man/hsv_palette.Rd 03db09cc98b364bc8220ce2d0f04be43 *man/ica.Rd 8122102191f249f0b38a07292f3e6ac5 *man/impute.Rd e39603b710ecc4a5f3ac9545344aea83 *man/interpolate.Rd 4c9ff24c9bd4ba15ac8005964b0a46f6 *man/kurtosis.Rd af416938566e2bfacc767d33908a5fc3 *man/lca.Rd 2c4b89670725817a00a37b1e4c2782ad *man/matchClasses.Rd 67f40907ed7de0fd6a019956ad2221eb *man/matchControls.Rd d7d971f2e38d972135df9dae8babf2d7 *man/moment.Rd d85b6a16d76b0971e2ee98aaf8ad0c4c *man/naiveBayes.Rd 694a2cbf822a403fc9a6a1c9ac408b37 *man/permutations.Rd 4c5574df4e6adeb8520097531cc02234 *man/plot.stft.Rd 027c1963b36b104e9d4e5584fc78e57f *man/plot.svm.Rd dd163128b5fd3a26a3142787cfc0f500 *man/plot.tune.Rd 6c5c209c6af13e83d78890033eaa06c3 *man/predict.svm.Rd 25c76212b1f8cb24c5ed64ff60283412 *man/probplot.Rd b47af3167e2df93d92a5a74949eabf7c *man/rbridge.Rd 3f17fe6c456abcb866ead028c1935bce *man/read.matrix.csr.Rd 233e2fedbbd374142e6747bf74a3fe95 *man/rectangle.window.Rd 16b7402d275eb839a9e2de21f5c3ee27 *man/rwiener.Rd 729d268bb006a7273520cc1b151b49fb *man/scale_data_frame.Rd cc985e432831de33f9dce6832388a00c *man/shortestPaths.Rd 8b5d2082ad2377650db04cb01a963e99 *man/sigmoid.Rd 5dbf4948c6c7ead41a658be75a24e3fb *man/skewness.Rd fe077a9a20d0e3349a32a3369758f97e *man/stft.Rd 781b6f6acc056c672e9ad2f4ec25b460 *man/svm.Rd 7252b01ee4a9b9c7576efa1ce93d9d55 *man/tune.Rd a920400b80897304d8c7e7c861a3eabc *man/tune.control.Rd 15d610bda133f5de095350ca73b8013f *man/tune.wrapper.Rd 5d842f7e574d57c03ba5acf39a14bd7a *man/write.svm.Rd 62ac92a3511202ebdc653874d8f27f93 *src/Rsvm.c 71dbe4e867b5c029eb630a96aee6f552 *src/cmeans.c 050b21afdf680cb53c7ad07985f34bd6 *src/cshell.c 3e8de7ef56baf940ba9fb88d6e73617e *src/floyd.c 3f15bfd42342c6dba3813acbc77f1821 *src/init.c ca6a3b182d63c83f0898f41c87ea7200 *src/svm.cpp 4f931941b24dc9018fb40924706039dd *src/svm.h 0cf3205810ff1e471ee07ab3d4c2bfad *tests/clustering.R 7f8951b054765414b6c2bdf282954286 *vignettes/svm.pdf ccbf35b4584a05d90eb84b77c0a4161a *vignettes/svmdoc.Rnw 3472ac0aacc7fa252056b599378ff0ae *vignettes/svminternals.Rnw e1071/R/0000755000175100001440000000000015120610017011217 5ustar hornikuserse1071/R/sigmoid.R0000755000175100001440000000023314173734133013013 0ustar hornikuserssigmoid <- function(x) 1/(1 + exp(-x)) dsigmoid <- function(x) sigmoid(x) * (1 - sigmoid(x)) d2sigmoid <- function(x) dsigmoid(x) * (1 - 2 * sigmoid(x)) e1071/R/hamming.window.R0000755000175100001440000000023114173734133014304 0ustar hornikusershamming.window <- function (n) { if (n == 1) c <- 1 else { n <- n-1 c <- 0.54 - 0.46*cos(2*pi*(0:n)/n) } return(c) } e1071/R/stft.R0000755000175100001440000000144614173734133012347 0ustar hornikusersstft <- function(X, win=min(80,floor(length(X)/10)), inc=min(24, floor(length(X)/30)), coef=64, wtype="hanning.window") { numcoef <- 2*coef if (win > numcoef) { win <- numcoef cat ("stft: window size adjusted to", win, ".\n") } numwin <- trunc ((length(X) - win) / inc) ## compute the windows coefficients wincoef <- eval(parse(text=wtype))(win) ## create a matrix Z whose columns contain the windowed time-slices z <- matrix (0, numwin + 1, numcoef) y <- z st <- 1 for (i in 0:numwin) { z[i+1, 1:win] <- X[st:(st+win-1)] * wincoef y[i+1,] <- fft(z[i+1,]) st <- st + inc } Y<- list (values = Mod(y[,1:coef]), windowsize=win, increment=inc, windowtype=wtype) class(Y) <- "stft" return(Y) } e1071/R/moment.R0000755000175100001440000000033714173734133012664 0ustar hornikusersmoment <- function(x, order = 1, center = FALSE, absolute = FALSE, na.rm = FALSE) { if (na.rm) x <- x[!is.na(x)] if (center) x <- x - mean(x) if (absolute) x <- abs(x) sum(x ^ order) / length(x) } e1071/R/lca.R0000644000175100001440000001572714533656212012133 0ustar hornikuserslca <- function(x, k, niter=100, matchdata=FALSE, verbose=FALSE) { ## if x is a data matrix -> create patterns if (is.matrix(x)) { if (matchdata) { x <- countpattern(x, matching=TRUE) xmat <- x$matching x <- x$pat } else x <- countpattern(x, matching=FALSE) } else ## if no data ist given, matchdata must be FALSE matchdata <- FALSE n <- sum(x) npat <- length(x) nvar <- round(log(npat)/log(2)) ## build matrix of all possible binary vectors b <- matrix(0, 2^nvar, nvar) for (i in 1:nvar) b[, nvar+1-i] <- rep(rep(c(0,1),c(2^(i-1),2^(i-1))),2^(nvar-i)) ## initialize probabilities classprob <- runif(k) classprob <- classprob/sum(classprob) names(classprob) <- 1:k p <- matrix(runif(nvar*k), k) pas <- matrix(0, k, npat) classsize <- numeric(k) for (i in 1:niter) { for (j in 1:k) { ## P(pattern|class) mp <- t(b)*p[j,]+(1-t(b))*(1-p[j,]) pas[j,] <- drop(exp(rep(1,nvar)%*%log(mp))) # column product } ## P(pattern|class)*P(class) pas <- pas * classprob ## P(class|pattern) sump <- drop(rep(1,k)%*%pas) # column sums pas <- t(t(pas)/sump) spas <- t(t(pas)*x) classsize <- drop(spas%*%rep(1,npat)) # row sums classprob <- classsize/n p <- pas%*%(x*b)/classsize if (verbose) cat("Iteration:", i, "\n") } for (j in 1:k) { mp <- t(b)*p[j,]+(1-t(b))*(1-p[j,]) pas[j,] <- drop(exp(rep(1,nvar)%*%log(mp)))*classprob[j] # column product } ## LogLikelihood pmust <- drop(rep(1,k)%*%pas) # column sums ll <- sum(x*log(pmust)) ## Likelihoodquotient xg0 <- x[x>0] ll0 <- sum(xg0*log(xg0/n)) lq <- 2*(ll0-ll) ## bic bic <- -2*ll+log(n)*(k*(nvar+1)-1) bicsat <- -2*ll0+log(n)*(2^nvar-1) ## chisq ch <- sum((x-n*pmust)^2/(n*pmust)) ## P(class|pattern) sump <- drop(rep(1,k)%*%pas) # column sums pas <- t(t(pas)/sump) mat <- max.col(t(pas)) if (matchdata) mat <- mat[xmat] colnames(p) <- 1:nvar rownames(p) <- 1:k lcaresult <- list(classprob=classprob, p=p, matching=mat, logl=ll, loglsat=ll0, chisq=ch, lhquot=lq, bic=bic, bicsat=bicsat, n=n, np=(k*(nvar+1)-1), matchdata=matchdata) class(lcaresult) <- "lca" return(lcaresult) } print.lca <- function(x, ...) { cat("LCA-Result\n") cat("----------\n\n") cat("Datapoints:", x$n, "\n") cat("Classes: ", length(x$classprob), "\n") cat("Probability of classes\n") print(round(x$classprob,3)) cat("Itemprobabilities\n") print(round(x$p,2)) } summary.lca <- function(object, ...) { nvar <- ncol(object$p) object$npsat <- 2^nvar-1 object$df <- 2^nvar-1-object$np object$pvallhquot <- 1-pchisq(object$lhquot,object$df) object$pvalchisq <- 1-pchisq(object$chisq,object$df) object$k <- length(object$classprob) ## remove unnecessary list elements object$classprob <- NULL object$p <- NULL object$matching <- NULL class(object) <- "summary.lca" return(object) } print.summary.lca <- function(x, ...) { cat("LCA-Result\n") cat("----------\n\n") cat("Datapoints:", x$n, "\n") cat("Classes: ", x$k, "\n") cat("\nGoodness of fit statistics:\n\n") cat("Number of parameters, estimated model:", x$np, "\n") cat("Number of parameters, saturated model:", x$npsat, "\n") cat("Log-Likelihood, estimated model: ", x$logl, "\n") cat("Log-Likelihood, saturated model: ", x$loglsat, "\n") cat("\nInformation Criteria:\n\n") cat("BIC, estimated model:", x$bic, "\n") cat("BIC, saturated model:", x$bicsat, "\n") cat("\nTestStatistics:\n\n") cat("Likelihood ratio: ", x$lhquot, " p-val:", x$pvallhquot, "\n") cat("Pearson Chi^2: ", x$chisq, " p-val:", x$pvalchisq, "\n") cat("Degress of freedom:", x$df, "\n") } bootstrap.lca <- function(l, nsamples=10, lcaiter=30, verbose=FALSE) { n <- l$n classprob <- l$classprob nclass <- length(l$classprob) prob <- l$p nvar <- ncol(l$p) npat <- 2^nvar ## build matrix of all possible binary vectors b <- matrix(0, npat, nvar) for (i in 1:nvar) b[, nvar+1-i] <- rep(rep(c(0,1),c(2^(i-1),2^(i-1))),2^(nvar-i)) ll <- lq <- ll0 <- ch <- numeric(nsamples) for (i in 1:nsamples) { ## generate data cm <- sample(1:nclass, size=n, replace=TRUE, prob=classprob) x <- matrix(runif(n*nvar), nrow=n) x <- (xX): ", x$pvalzratio, "\n") cat("P-Val: ", x$pvalratio, "\n\n") cat("Pearson's Chisquare\n\n") cat("Mean:", x$chisqmean, "\n") cat("SDev:", x$chisqsd, "\n") cat("Value in Data Set:", x$chisqorg, "\n") cat("Z-Statistics: ", x$zchisq, "\n") cat("P(Z>X): ", x$pvalzchisq, "\n") cat("P-Val: ", x$pvalchisq, "\n\n") } predict.lca <- function(object, x, ...) { if (object$matchdata) stop("predict.lca: only possible, if lca has been called with matchdata=FALSE") else { x <- countpattern(x, matching=TRUE) return(object$matching[x$matching]) } } e1071/R/gknn.R0000644000175100001440000000772314533650064012325 0ustar hornikusersgknn <- function(x, ...) UseMethod("gknn") gknn.formula <- function (formula, data = NULL, ..., subset, na.action = na.pass, scale = TRUE) { call <- match.call() if (!inherits(formula, "formula")) stop("method is only for formula objects") m <- match.call(expand.dots = FALSE) if (inherits(eval.parent(m$data), "matrix")) m$data <- as.data.frame(eval.parent(m$data)) m$... <- NULL m$scale <- NULL m[[1L]] <- quote(stats::model.frame) m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") attr(x, "xlevels") <- .getXlevels(Terms, m) if (length(scale) == 1) scale <- rep(scale, ncol(x)) if (any(scale)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scale)) ) scale <- !attr(x, "assign") %in% remove } ret <- gknn.default (x, y, scale = scale, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("gknn") ret$terms <- Terms ret$na.action <- attr(x, "na.action") class(ret) <- c("gknn.formula", class(ret)) return (ret) } gknn.default <- function(x, y, k = 1, method = NULL, scale = TRUE, use_all = TRUE, FUN = mean, ...) { if (length(scale) == 1) scale <- rep(scale, ncol(x)) if (is.numeric(x) && any(scale)) { tmp <- scale(x[,scale]) x[,scale] <- tmp attr(x, "scaled:center") <- attr(tmp, "scaled:center") attr(x, "scaled:scale") <- attr(tmp, "scaled:scale") } structure(list( x = x, y = y, k = k, FUN = FUN, method = method, use_all = use_all, scaled = is.numeric(x) && any(scale), scale = scale), class = "gknn" ) } predict.gknn <- function(object, newdata, type = c("class", "votes", "prob"), ..., na.action = na.pass) { if (missing(newdata)) return(fitted(object)) type = match.arg(type) if (inherits(object, "gknn.formula")) { if(is.null(colnames(newdata))) colnames(newdata) <- colnames(object$x) newdata <- na.action(newdata) act <- attr(newdata, "na.action") newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), xlev = attr(object$x, "xlevels")) } else { newdata <- na.action(as.matrix(newdata)) act <- attr(newdata, "na.action") } if (object$scaled) newdata[,object$scale] <- scale(newdata[,object$scale, drop = FALSE], center = attr(object$x, "scaled:center"), scale = attr(object$x, "scaled:scale") ) d <- dist(object$x, newdata, method = object$method) FUN <- function(x) { o <- order(x) ks <- which(x[o][object$k] == x) ## check for ties on kth place if (!object$use_all) ks <- sample(c(ks, ks), 1) ## handle ties lab <- object$y[c(head(o[1:object$k], -1), ks)] if (is.numeric(lab)) object$FUN(lab) else { tab <- table(lab) switch(type, class = levels(object$y)[sample(rep(which(tab == max(tab)), 2), 1)], ## break class tie by random prob = prop.table(tab), tab) } } ret <- apply(d, 2, FUN) if (is.matrix(ret)) t(ret) else if (is.numeric(object$y)) napredict(act, ret) else napredict(act, factor(ret, levels = levels(object$y))) } print.gknn <- function(x, ...) { cat("Object of class 'gknn'.\n") } fitted.gknn <- function(object, ...) napredict(object$na.action, object$y) e1071/R/probplot.R0000755000175100001440000000367114173734133013232 0ustar hornikusersprobplot <- function(x, qdist=qnorm, probs=NULL, line=TRUE, xlab=NULL, ylab="Probability in %", ...) { DOTARGS <- as.list(substitute(list(...)))[-1] DOTARGS <- paste(names(DOTARGS), DOTARGS, sep="=", collapse=", ") xlab=deparse(substitute(x)) x <- sort(x) QNAME <- deparse(substitute(qdist)) DOTS <- list(...) qdist <- match.fun(qdist) QFUN <- function(p){ args=DOTS args$p=p do.call("qdist", args) } y <- QFUN(ppoints(length(x))) if(is.null(probs)){ probs <- c(.01, .05, seq(.1,.9, by=.1), .95, .99) if(length(x)>=1000) probs <- c(0.001, probs, .999) } qprobs <- QFUN(probs) plot(x, y, axes=FALSE, type="n", ylim=range(c(y,qprobs)), xlab=xlab, ylab=ylab) box() abline(h=qprobs, col="grey") axis(1) axis(2, at=qprobs, labels=100*probs) points(x, y) QTEXT <- paste("Quantile: ", QNAME, sep="") if(nchar(DOTARGS)) QTEXT <- paste(QTEXT, DOTARGS, sep=", ") mtext(QTEXT, side=1, line=3, adj=1) xl <- quantile(x, c(0.25, 0.75)) yl <- qdist(c(0.25, 0.75), ...) slope <- diff(yl)/diff(xl) int <- yl[1] - slope * xl[1] if(line){ abline(int, slope, col="red") } z <- list(qdist=QFUN, int=int, slope=slope) class(z) <- "probplot" invisible(z) } lines.probplot <- function(x, h=NULL, v=NULL, bend=FALSE, ...) { if(is.null(h) & is.null(v)){ abline(x$int, x$slope, ...) } pu <- par("usr") if(!is.null(h)){ h <- x$qdist(h) if(!bend){ abline(h=h, ...) } else{ v <- c(v, (h-x$int)/x$slope) } } if(!is.null(v)){ if(!bend){ abline(v=v, ...) } else{ h <- v*x$slope+x$int segments(v, pu[3], v, h, ...) segments(pu[1], h, v, h, ...) } } } e1071/R/svm.R0000644000175100001440000006570114533645520012176 0ustar hornikuserssvm <- function (x, ...) UseMethod ("svm") svm.formula <- function (formula, data = NULL, ..., subset, na.action = na.omit, scale = TRUE) { call <- match.call() if (!inherits(formula, "formula")) stop("method is only for formula objects") m <- match.call(expand.dots = FALSE) if (inherits(eval.parent(m$data), "matrix")) m$data <- as.data.frame(eval.parent(m$data)) m$... <- NULL m$scale <- NULL m[[1L]] <- quote(stats::model.frame) m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") attr(x, "xlevels") <- .getXlevels(Terms, m) if (length(scale) == 1) scale <- rep(scale, ncol(x)) if (any(scale)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scale) ) ) scale <- !attr(x, "assign") %in% remove } class(x) <- c("svm.formula", class(x)) ret <- svm.default (x, y, scale = scale, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("svm") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) ret$na.action <- attr(m, "na.action") class(ret) <- c("svm.formula", class(ret)) return (ret) } svm.default <- function (x, y = NULL, scale = TRUE, type = NULL, kernel = "radial", degree = 3, gamma = if (is.vector(x)) 1 else 1 / ncol(x), coef0 = 0, cost = 1, nu = 0.5, class.weights = NULL, cachesize = 40, tolerance = 0.001, epsilon = 0.1, shrinking = TRUE, cross = 0, probability = FALSE, fitted = TRUE, ..., subset, na.action = na.omit) { yorig <- y if(inherits(x, "Matrix")) { loadNamespace("SparseM") loadNamespace("Matrix") x <- as(x, "matrix.csr") } if(inherits(x, "simple_triplet_matrix")) { loadNamespace("SparseM") ind <- order(x$i, x$j) x <- new("matrix.csr", ra = x$v[ind], ja = x$j[ind], ia = as.integer(cumsum(c(1, tabulate(x$i[ind])))), dimension = c(x$nrow, x$ncol)) } if (sparse <- inherits(x, "matrix.csr")) loadNamespace("SparseM") ## NULL parameters? if(is.null(degree)) stop(sQuote("degree"), " must not be NULL!") if(is.null(gamma)) stop(sQuote("gamma"), " must not be NULL!") if(is.null(coef0)) stop(sQuote("coef0"), " must not be NULL!") if(is.null(cost)) stop(sQuote("cost"), " must not be NULL!") if(is.null(nu)) stop(sQuote("nu"), " must not be NULL!") if(is.null(epsilon)) stop(sQuote("epsilon"), " must not be NULL!") if(is.null(tolerance)) stop(sQuote("tolerance"), " must not be NULL!") xlevels <- attr(x, "xlevels") xhold <- if (fitted) x else NULL x.scale <- y.scale <- NULL formula <- inherits(x, "svm.formula") ## determine model type if (is.null(type)) type <- if (is.null(y)) "one-classification" else if (is.factor(y)) "C-classification" else "eps-regression" type <- pmatch(type, c("C-classification", "nu-classification", "one-classification", "eps-regression", "nu-regression"), 99) - 1 if (type > 10) stop("wrong type specification!") kernel <- pmatch(kernel, c("linear", "polynomial", "radial", "sigmoid"), 99) - 1 if (kernel > 10) stop("wrong kernel specification!") nac <- attr(x, "na.action") ## scaling, subsetting, and NA handling if (sparse) { scale <- rep(FALSE, ncol(x)) if(!is.null(y)) na.fail(y) x <- SparseM::t(SparseM::t(x)) ## make shure that col-indices are sorted } else { x <- as.matrix(x) ## subsetting and na-handling for matrices if (!formula) { if (!missing(subset)) { x <- x[subset,] y <- y[subset] if (!is.null(xhold)) xhold <- as.matrix(xhold)[subset,] } if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x, check.names = FALSE)) y <- df[,1] x <- as.matrix(df[,-1], rownames.force = TRUE) nac <- attr(x, "na.action") <- attr(y, "na.action") <- attr(df, "na.action") } } ## scaling if (length(scale) == 1) scale <- rep(scale, ncol(x)) if (any(scale)) { co <- !apply(x[,scale, drop = FALSE], 2, var) if (any(co)) { warning(paste("Variable(s)", paste(sQuote(colnames(x[,scale, drop = FALSE])[co]), sep="", collapse=" and "), "constant. Cannot scale data.") ) scale <- rep(FALSE, ncol(x)) } else { xtmp <- scale_data_frame(x[,scale]) x[,scale] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] if (is.numeric(y) && (type > 2)) { yorig <- y y <- scale(y) y.scale <- attributes(y)[c("scaled:center","scaled:scale")] y <- as.vector(y) } } } } ## further parameter checks nr <- nrow(x) if (cross > nr) stop(sQuote("cross"), " cannot exceed the number of observations!") ytmp <- y attributes(ytmp) <- NULL if (!is.vector(ytmp) && !is.factor(y) && type != 2) stop("y must be a vector or a factor.") if (type != 2 && length(y) != nr) stop("x and y don't match.") if (cachesize < 0.1) cachesize <- 0.1 if (type > 2 && !is.numeric(y)) stop("Need numeric dependent variable for regression.") lev <- NULL weightlabels <- NULL ## in case of classification: transform factors into integers if (type == 2) # one class classification --> set dummy y <- rep(1, nr) else if (is.factor(y)) { lev <- levels(y) y <- as.integer(y) } else { if (type < 3) { if(any(as.integer(y) != y)) stop("dependent variable has to be of factor or integer type for classification mode.") y <- as.factor(y) lev <- levels(y) y <- as.integer(y) } else lev <- unique(y) } if (type < 3 && !is.null(class.weights)) { if (is.character(class.weights) && class.weights == "inverse") { class.weights <- 1 / table(y) names(class.weights) = lev } if (is.null(names(class.weights))) stop("Weights have to be specified along with their according level names !") weightlabels <- match (names(class.weights), lev) if (any(is.na(weightlabels))) stop("At least one level name is missing or misspelled.") } nclass <- 2 if (type < 2) nclass <- length(lev) if (type > 1 && length(class.weights) > 0) { class.weights <- NULL warning(sQuote("class.weights"), " are set to NULL for regression mode. For classification, use a _factor_ for ", sQuote("y"), ", or specify the correct ", sQuote("type"), " argument.") } err <- empty_string <- paste(rep(" ", 255), collapse = "") if (is.null(type)) stop("type argument must not be NULL!") if (is.null(kernel)) stop("kernel argument must not be NULL!") if (is.null(degree)) stop("degree argument must not be NULL!") if (is.null(gamma)) stop("gamma argument must not be NULL!") if (is.null(coef0)) stop("coef0 seed argument must not be NULL!") if (is.null(cost)) stop("cost argument must not be NULL!") if (is.null(nu)) stop("nu argument must not be NULL!") if (is.null(cachesize)) stop("cachesize argument must not be NULL!") if (is.null(tolerance)) stop("tolerance argument must not be NULL!") if (is.null(epsilon)) stop("epsilon argument must not be NULL!") if (is.null(shrinking)) stop("shrinking argument must not be NULL!") if (is.null(cross)) stop("cross argument must not be NULL!") if (is.null(sparse)) stop("sparse argument must not be NULL!") if (is.null(probability)) stop("probability argument must not be NULL!") cret <- .C (R_svmtrain, ## data as.double (if (sparse) x@ra else t(x)), as.integer (nr), as.integer(ncol(x)), as.double (y), ## sparse index info as.integer (if (sparse) x@ia else 0), as.integer (if (sparse) x@ja else 0), ## parameters as.integer (type), as.integer (kernel), as.integer (degree), as.double (gamma), as.double (coef0), as.double (cost), as.double (nu), as.integer (weightlabels), as.double (class.weights), as.integer (length (class.weights)), as.double (cachesize), as.double (tolerance), as.double (epsilon), as.integer (shrinking), as.integer (cross), as.integer (sparse), as.integer (probability), ## results nclasses = integer (1), nr = integer (1), # nr of support vectors index = integer (nr), labels = integer (nclass), nSV = integer (nclass), rho = double (nclass * (nclass - 1) / 2), coefs = double (nr * (nclass - 1)), sigma = double (1), probA = double (nclass * (nclass - 1) / 2), probB = double (nclass * (nclass - 1) / 2), cresults = double (cross), ctotal1 = double (1), ctotal2 = double (1), error = err ) if (cret$error != empty_string) stop(paste(cret$error, "!", sep="")) cret$index <- cret$index[1:cret$nr] ret <- list ( call = match.call(), type = type, kernel = kernel, cost = cost, degree = degree, gamma = gamma, coef0 = coef0, nu = nu, epsilon = epsilon, sparse = sparse, scaled = scale, x.scale = x.scale, y.scale = y.scale, nclasses = cret$nclasses, #number of classes levels = lev, tot.nSV = cret$nr, #total number of sv nSV = cret$nSV[1:cret$nclasses], #number of SV in diff. classes labels = cret$labels[1:cret$nclasses], #labels of the SVs. SV = if (sparse) SparseM::t(SparseM::t(x[cret$index])) else t(t(x[cret$index,,drop = FALSE])), #copy of SV index = cret$index, #indexes of sv in x ##constants in decision functions rho = cret$rho[1:(cret$nclasses * (cret$nclasses - 1) / 2)], ##probabilites compprob = probability, probA = if (!probability) NULL else cret$probA[1:(cret$nclasses * (cret$nclasses - 1) / 2)], probB = if (!probability) NULL else cret$probB[1:(cret$nclasses * (cret$nclasses - 1) / 2)], sigma = if (probability) cret$sigma else NULL, ##coefficiants of sv coefs = if (cret$nr == 0) NULL else t(matrix(cret$coefs[1:((cret$nclasses - 1) * cret$nr)], nrow = cret$nclasses - 1, byrow = TRUE)), na.action = nac, xlevels = xlevels ) ## cross-validation-results if (cross > 0) if (type > 2) { scale.factor <- if (any(scale)) crossprod(y.scale$"scaled:scale") else 1; ret$MSE <- cret$cresults * scale.factor; ret$tot.MSE <- cret$ctotal1 * scale.factor; ret$scorrcoeff <- cret$ctotal2; } else { ret$accuracies <- cret$cresults; ret$tot.accuracy <- cret$ctotal1; } class (ret) <- "svm" if (fitted) { ret$fitted <- na.action(predict(ret, xhold, decision.values = TRUE)) ret$decision.values <- attr(ret$fitted, "decision.values") attr(ret$fitted, "decision.values") <- NULL if (type > 1) ret$residuals <- yorig - ret$fitted } ret } predict.svm <- function (object, newdata, decision.values = FALSE, probability = FALSE, ..., na.action = na.omit) { if (missing(newdata)) return(fitted(object)) if (object$tot.nSV < 1) stop("Model is empty!") if(inherits(newdata, "Matrix")) { loadNamespace("SparseM") loadNamespace("Matrix") newdata <- as(newdata, "matrix.csr") } if(inherits(newdata, "simple_triplet_matrix")) { loadNamespace("SparseM") ind <- order(newdata$i, newdata$j) newdata <- new("matrix.csr", ra = newdata$v[ind], ja = newdata$j[ind], ia = as.integer(cumsum(c(1, tabulate(newdata$i[ind])))), dimension = c(newdata$nrow, newdata$ncol)) } sparse <- inherits(newdata, "matrix.csr") if (object$sparse || sparse) loadNamespace("SparseM") act <- NULL if ((is.vector(newdata) && is.atomic(newdata))) newdata <- t(t(newdata)) if (sparse) newdata <- SparseM::t(SparseM::t(newdata)) preprocessed <- !is.null(attr(newdata, "na.action")) rowns <- if (!is.null(rownames(newdata))) rownames(newdata) else 1:nrow(newdata) if (!object$sparse) { if (inherits(object, "svm.formula")) { if(is.null(colnames(newdata))) colnames(newdata) <- colnames(object$SV) newdata <- na.action(newdata) act <- attr(newdata, "na.action") newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), xlev = object$xlevels) } else { ## FIXME: would be safer, but users might provide new data with ## other colnames than the training data ... ## if (!is.null(colnames(newdata))) ## newdata <- newdata[,colnames(object$SV)] newdata <- na.action(as.matrix(newdata)) act <- attr(newdata, "na.action") } } if (!is.null(act) && !preprocessed) rowns <- rowns[-act] if (ncol(object$SV) != ncol(newdata)) stop ("test data does not match model !") if (any(object$scaled)) newdata[,object$scaled] <- scale_data_frame(newdata[,object$scaled, drop = FALSE], center = object$x.scale$"scaled:center", scale = object$x.scale$"scaled:scale" ) ret <- .C (R_svmpredict, as.integer (decision.values), as.integer (probability), ## model as.double (if (object$sparse) object$SV@ra else t(object$SV)), as.integer (nrow(object$SV)), as.integer(ncol(object$SV)), as.integer (if (object$sparse) object$SV@ia else 0), as.integer (if (object$sparse) object$SV@ja else 0), as.double (as.vector(object$coefs)), as.double (object$rho), as.integer (object$compprob), as.double (if (object$compprob) object$probA else 0), as.double (if (object$compprob) object$probB else 0), as.integer (object$nclasses), as.integer (object$tot.nSV), as.integer (object$labels), as.integer (object$nSV), as.integer (object$sparse), ## parameter as.integer (object$type), as.integer (object$kernel), as.integer (object$degree), as.double (object$gamma), as.double (object$coef0), ## test matrix as.double (if (sparse) newdata@ra else t(newdata)), as.integer (nrow(newdata)), as.integer (if (sparse) newdata@ia else 0), as.integer (if (sparse) newdata@ja else 0), as.integer (sparse), ## decision-values ret = double(nrow(newdata)), dec = double(nrow(newdata) * object$nclasses * (object$nclasses - 1) / 2), prob = double(nrow(newdata) * object$nclasses) ) ret2 <- if (is.character(object$levels)) # classification: return factors factor (object$levels[ret$ret], levels = object$levels) else if (object$type == 2) # one-class-classification: return TRUE/FALSE ret$ret == 1 else if (any(object$scaled) && !is.null(object$y.scale)) # return raw values, possibly scaled back ret$ret * object$y.scale$"scaled:scale" + object$y.scale$"scaled:center" else ret$ret names(ret2) <- rowns ret2 <- napredict(act, ret2) if (decision.values) { colns = c() for (i in 1:(object$nclasses - 1)) for (j in (i + 1):object$nclasses) colns <- c(colns, paste(object$levels[object$labels[i]], "/", object$levels[object$labels[j]], sep = "")) attr(ret2, "decision.values") <- napredict(act, matrix(ret$dec, nrow = nrow(newdata), byrow = TRUE, dimnames = list(rowns, colns) ) ) } if (probability && object$type < 2) { if (!object$compprob) warning("SVM has not been trained using `probability = TRUE`, probabilities not available for predictions.") else attr(ret2, "probabilities") <- napredict(act, matrix(ret$prob, nrow = nrow(newdata), byrow = TRUE, dimnames = list(rowns, object$levels[object$labels]) ) ) } ret2 } print.svm <- function (x, ...) { cat("\nCall:", deparse(x$call, 0.8 * getOption("width")), "\n", sep="\n") cat("Parameters:\n") cat(" SVM-Type: ", c("C-classification", "nu-classification", "one-classification", "eps-regression", "nu-regression")[x$type+1], "\n") cat(" SVM-Kernel: ", c("linear", "polynomial", "radial", "sigmoid")[x$kernel+1], "\n") if (x$type==0 || x$type==3 || x$type==4) cat(" cost: ", x$cost, "\n") if (x$kernel==1) cat(" degree: ", x$degree, "\n") if (x$type==1 || x$type==2 || x$type==3) cat(" gamma: ", x$gamma, "\n") if (x$kernel==1 || x$kernel==3) cat(" coef.0: ", x$coef0, "\n") if (x$type==1 || x$type==2 || x$type==4) cat(" nu: ", x$nu, "\n") if (x$type==3) { cat(" epsilon: ", x$epsilon, "\n\n") if (x$compprob) cat("Sigma: ", x$sigma, "\n\n") } cat("\nNumber of Support Vectors: ", x$tot.nSV) cat("\n\n") } summary.svm <- function(object, ...) structure(object, class="summary.svm") print.summary.svm <- function (x, ...) { print.svm(x) if (x$type<2) { cat(" (", x$nSV, ")\n\n") cat("\nNumber of Classes: ", x$nclasses, "\n\n") cat("Levels:", if(is.numeric(x$levels)) "(as integer)", "\n", x$levels) } cat("\n\n") if (x$type==2) cat("\nNumber of Classes: 1\n\n\n") if ("MSE" %in% names(x)) { cat(length (x$MSE), "-fold cross-validation on training data:\n\n", sep="") cat("Total Mean Squared Error:", x$tot.MSE, "\n") cat("Squared Correlation Coefficient:", x$scorrcoef, "\n") cat("Mean Squared Errors:\n", x$MSE, "\n\n") } if ("accuracies" %in% names(x)) { cat(length (x$accuracies), "-fold cross-validation on training data:\n\n", sep="") cat("Total Accuracy:", x$tot.accuracy, "\n") cat("Single Accuracies:\n", x$accuracies, "\n\n") } cat("\n\n") } plot.svm <- function(x, data, formula = NULL, fill = TRUE, grid = 50, slice = list(), symbolPalette = palette(), svSymbol = "x", dataSymbol = "o", ...) { if (x$type < 3) { if (is.null(formula) && ncol(data) == 3) { formula <- formula(delete.response(terms(x))) formula[2:3] <- formula[[2]][2:3] } if (is.null(formula)) stop("missing formula.") if (fill) { sub <- model.frame(formula, data) xr <- seq(min(sub[, 2]), max(sub[, 2]), length.out = grid) yr <- seq(min(sub[, 1]), max(sub[, 1]), length.out = grid) l <- length(slice) if (l < ncol(data) - 3) { slnames <- names(slice) slice <- c(slice, rep(list(0), ncol(data) - 3 - l)) names <- labels(delete.response(terms(x))) names(slice) <- c(slnames, names[!names %in% c(colnames(sub), slnames)]) } for (i in names(which(vapply(data, is.factor, NA)))) if (!is.factor(slice[[i]])) { levs <- levels(data[[i]]) lev <- if (is.character(slice[[i]])) slice[[i]] else levs[1] fac <- factor(lev, levels = levs) if (is.na(fac)) stop(paste("Level", dQuote(lev), "could not be found in factor", sQuote(i))) slice[[i]] <- fac } lis <- c(list(yr), list(xr), slice) names(lis)[1:2] <- colnames(sub) new <- expand.grid(lis)[, labels(terms(x))] preds <- predict(x, new) filled.contour(xr, yr, matrix(as.numeric(preds), nrow = length(xr), byrow = TRUE), plot.axes = { axis(1) axis(2) colind <- as.numeric(model.response(model.frame(x, data))) dat1 <- data[-x$index,] dat2 <- data[x$index,] coltmp1 <- symbolPalette[colind[-x$index]] coltmp2 <- symbolPalette[colind[x$index]] points(formula, data = dat1, pch = dataSymbol, col = coltmp1) points(formula, data = dat2, pch = svSymbol, col = coltmp2) }, levels = 1:(length(levels(preds)) + 1), key.axes = axis(4, 1:(length(levels(preds))) + 0.5, labels = levels(preds), las = 3), plot.title = title(main = "SVM classification plot", xlab = names(lis)[2], ylab = names(lis)[1]), ...) } else { plot(formula, data = data, type = "n", ...) colind <- as.numeric(model.response(model.frame(x, data))) dat1 <- data[-x$index,] dat2 <- data[x$index,] coltmp1 <- symbolPalette[colind[-x$index]] coltmp2 <- symbolPalette[colind[x$index]] points(formula, data = dat1, pch = dataSymbol, col = coltmp1) points(formula, data = dat2, pch = svSymbol, col = coltmp2) invisible() } } } write.svm <- function (object, svm.file = "Rdata.svm", scale.file = "Rdata.scale", yscale.file = "Rdata.yscale") { ret <- .C (R_svmwrite, ## model as.double (if (object$sparse) object$SV@ra else t(object$SV)), as.integer (nrow(object$SV)), as.integer(ncol(object$SV)), as.integer (if (object$sparse) object$SV@ia else 0), as.integer (if (object$sparse) object$SV@ja else 0), as.double (as.vector(object$coefs)), as.double (object$rho), as.integer (object$compprob), as.double (if (object$compprob) object$probA else 0), as.double (if (object$compprob) object$probB else 0), as.integer (object$nclasses), as.integer (object$tot.nSV), as.integer (object$labels), as.integer (object$nSV), as.integer (object$sparse), ## parameter as.integer (object$type), as.integer (object$kernel), as.integer (object$degree), as.double (object$gamma), as.double (object$coef0), ## filename as.character(svm.file) )$ret write.table(data.frame(center = object$x.scale$"scaled:center", scale = object$x.scale$"scaled:scale"), file=scale.file, col.names=FALSE, row.names=FALSE) if (!is.null(object$y.scale)) write.table(data.frame(center = object$y.scale$"scaled:center", scale = object$y.scale$"scaled:scale"), file=yscale.file, col.names=FALSE, row.names=FALSE) } coef.svm <- function(object, ...) { if (object$kernel != 0 || object$nclasses > 2) stop("Only implemented for regression or binary classification with linear kernel.") ret <- if (inherits(object$SV, "matrix.csr")) { loadNamespace("SparseM") drop(crossprod(object$coefs, SparseM::as.matrix(object$SV))) } else drop(crossprod(object$coefs, object$SV)) trm <- object$terms if(!is.null(trm)) names(ret) <- labels(trm) c(`(Intercept)` = -object$rho, ret) } e1071/R/bincombinations.R0000644000175100001440000000035114246371157014540 0ustar hornikusers## Kopie in mlbench bincombinations <- function(p) { retval <- matrix(0, nrow=2^p, ncol=p) for(n in 1:p){ retval[,n] <- rep(c(rep(0, (2^p/2^n)), rep(1, (2^p/2^n))), length.out = 2^p) } retval } e1071/R/fclustIndex.R0000644000175100001440000001521314533655727013664 0ustar hornikusersfclustIndex <- function ( y, x, index= "all" ) { clres <- y ########################################################################### ################SESSION 1: MEASURES######################################### ########################################################################### gath.geva <- function (clres,x)#for m=2 { xrows <- dim(clres$membership)[1] xcols <- dim(clres$centers)[2] ncenters <- dim(clres$centers)[1] scatter <- array(0.0, c(xcols, xcols, ncenters)) scatternew <- array(0.0, c(xcols, xcols, ncenters)) fhv <-as.double(0) apd <-as.double(0) pd <- as.double(0) control <- as.double(0) for (i in 1:ncenters){ paronomastis <- as.double(0) paronomastis2 <- as.double(0) for (j in 1:xrows){ paronomastis <- paronomastis+clres$membership[j,i] diff <- x[j,]-clres$centers[i,] scatternew[,,i] <- clres$membership[j,i]*(t(t(diff))%*%t(diff)) scatter[,,i] <- scatter[,,i]+scatternew[,,i] }#xrows scatter[,,i] <- scatter[,,i]/paronomastis for (j in 1:xrows){ diff <- x[j,]-clres$centers[i,] control <- (t(diff)%*%solve(scatter[,,i]))%*%t(t(diff)) if (control<1.0) paronomastis2 <- paronomastis2+clres$membership[j,i] ## else ## cat("...") }#xrows fhv <- fhv+sqrt(det(scatter[,,i])) apd <- apd+paronomastis2/sqrt(det(scatter[,,i])) pd <- pd+paronomastis2 }#ncenters pd <- pd/fhv apd <- apd/ncenters retval <- list(fuzzy.hypervolume=fhv,average.partition.density=apd, partition.density=pd) return(retval) } xie.beni <- function(clres){#for all m xrows <- dim(clres$membership)[1] minimum<--1 error <- clres$withinerror #sd ncenters <- dim(clres$centers)[1] for (i in 1:(ncenters-1)){ for (j in (i+1):ncenters){ diff<- clres$centers[i,]-clres$centers[j,] diffdist <- t(diff)%*%t(t(diff)) if (minimum==-1) minimum <- diffdist if (diffdist 1) sample(y, 1) else y } retval <- rep("", length(cases)) for (k in 1 : length(cases)) { retval[k] <- controls[which.is.min(d[cases[k], controls])] if (!replace) controls <- controls[controls != retval[k]] } fac <- rep(NA, nrow(m2)) names(fac) <- rownames(m2) fac[cases] <- "case" fac[retval] <- "cont" fac <- factor(fac) list(cases = cases, controls = retval, factor = fac) } e1071/R/e1071-deprecated.R0000755000175100001440000000000014173734133014203 0ustar hornikuserse1071/R/bclust.R0000644000175100001440000001605014246371157012661 0ustar hornikusers"bclust" <- function (x, centers = 2, iter.base = 10, minsize = 0, dist.method = "euclidean", hclust.method = "average", base.method = "kmeans", base.centers = 20, verbose = TRUE, final.kmeans = FALSE, docmdscale=FALSE, resample=TRUE, weights=NULL, maxcluster=base.centers, ...) { x <- as.matrix(x) xr <- nrow(x) xc <- ncol(x) CLUSFUN <- get(base.method) object <- list(allcenters = matrix(0, ncol = xc, nrow = iter.base * base.centers), allcluster = NULL, hclust = NULL, members = NULL, cluster = NULL, centers = NULL, iter.base = iter.base, base.centers = base.centers, prcomp = NULL, datamean = apply(x, 2, mean), colnames = colnames(x), dist.method = dist.method, hclust.method = hclust.method, maxcluster = maxcluster) class(object) <- "bclust" optSEM <- getOption("show.error.messages") if(is.null(optSEM)) optSEM <- TRUE on.exit(options(show.error.messages = optSEM)) if (verbose) cat("Committee Member:") for (n in 1:iter.base) { if (verbose){ cat(" ", n, sep = "") } if(resample){ x1 <- x[sample(xr, replace = TRUE, prob=weights), ] } else{ x1 <- x } for(m in 1:20){ if(verbose) cat("(",m,")",sep="") options(show.error.messages = FALSE) tryres <- try(CLUSFUN(x1, centers = base.centers, ...)) if(!inherits(tryres, "try-error")) break } options(show.error.messages = optSEM) if(m==20) stop("Could not find valid cluster solution in 20 replications\n") object$allcenters[((n - 1) * base.centers + 1):(n * base.centers),] <- tryres$centers } object$allcenters <- object$allcenters[complete.cases(object$allcenters),,drop=FALSE] object$allcluster <- knn1(object$allcenters, x, factor(1:nrow(object$allcenters))) if(minsize > 0){ object <- prune.bclust(object, x, minsize=minsize) } if (verbose) cat("\nComputing Hierarchical Clustering\n") object <- hclust.bclust(object, x = x, centers = centers, final.kmeans = final.kmeans, docmdscale=docmdscale) object } "centers.bclust" <- function (object, k) { centers <- matrix(0, nrow = k, ncol = ncol(object$allcenters)) for (m in 1:k) { centers[m, ] <- apply(object$allcenters[object$members[,k-1] == m, , drop = FALSE], 2, mean) } centers } "clusters.bclust" <- function (object, k, x=NULL) { if(missing(x)) allcluster <- object$allcluster else allcluster <- knn1(object$allcenters, x, factor(1:nrow(object$allcenters))) return(object$members[allcluster, k - 1]) } "hclust.bclust" <- function (object, x, centers, dist.method = object$dist.method, hclust.method = object$hclust.method, final.kmeans = FALSE, docmdscale = FALSE, maxcluster=object$maxcluster) { d <- dist(object$allcenters, method = dist.method) if(hclust.method=="diana"){ if (system.file(package = "cluster") == "") stop("Could not load required package 'cluster'!") object$hclust <- stats::as.hclust(cluster::diana(d, diss=TRUE)) } else object$hclust <- stats::hclust(d, method = hclust.method) if(docmdscale){ object$cmdscale <- cmdscale(d) } object$members <- cutree(object$hclust, 2:maxcluster) object$cluster <- clusters.bclust(object, centers) object$centers <- centers.bclust(object, centers) if (final.kmeans) { kmeansres <- kmeans(x, centers = object$centers) object$centers <- kmeansres$centers object$cluster <- kmeansres$cluster } object } "plot.bclust" <- function (x, maxcluster=x$maxcluster, main = deparse(substitute(x)), ...) { opar <- par(c("mar", "oma")) on.exit(par(opar)) par(oma = c(0, 0, 3, 0)) layout(matrix(c(1, 1, 2, 2), 2, 2, byrow = TRUE)) par(mar = c(0, 4, 4, 1)) plot(x$hclust, labels = FALSE, hang = -1) x1 <- 1:maxcluster x2 <- 2:maxcluster y <- rev(x$hclust$height)[x1] z <- abs(diff(y)) par(mar = c(4, 4, 1, 2)) plot(x1, ((y - min(y))/(max(y) - min(y))), type = "l", xlab = "", ylab = "", ylim = c(0, 1)) lines(x2, z/sum(z), col = "grey") text(x2, z/sum(z), labels = as.character(x2)) # lx2 <- length(x2) # abline(h=qexp(.95, rate = length(x2)), lty=3, col="grey") # abline(h=qexp(.95^(1/lx2), rate = length(x2)), lty=3, col="grey") mtext(main, outer = TRUE, cex = 1.5) layout(1) } "boxplot.bclust" <- function (x, n = nrow(x$centers), bycluster = TRUE, main = deparse(substitute(x)), oneplot=TRUE, which=1:n, ...) { N <- length(which) opar <- par(c("mfrow", "oma", "mgp","xpd")) on.exit(par(opar)) par(xpd=NA) memb <- x$members[, (n - 1)] tmemb <- table(memb) cendf <- as.data.frame(x$allcenters) ylim <- range(x$allcenters) if (bycluster) { if(oneplot){ if (N <= 3) { par(mfrow = c(N, 1)) } else { par(mfrow = c(ceiling(N/2), 2)) } } tcluster <- table(clusters.bclust(x, n)) for (k in which) { boxplot(cendf[memb == k, ], col = "grey", names = rep("",ncol(cendf)), ylim = ylim, ...) if (!is.null(x$datamean)) { lines(x$datamean, col = "red") } if(!is.null(x$colnames)){ text(1:length(x$colnames)+0.2, par("usr")[3], adj=1,srt=35, paste(x$colnames, " ")) } title(main = paste("Cluster ", k, ": ", tmemb[k], " centers, ", tcluster[k], " data points", sep = "")) } } else { a <- ceiling(sqrt(ncol(cendf))) if(oneplot){ par(mfrow = c(a, ceiling(ncol(cendf)/a))) } memb <- as.factor(memb) for (k in 1:ncol(cendf)) { boxplot(cendf[, k] ~ memb, col = "grey", ylim = ylim, ...) title(main = x$colnames[k]) abline(h = x$datamean[k], col = "red") } } } ### prune centers that contain not at least minsize data points prune.bclust <- function(object, x, minsize=1, dohclust=FALSE, ...){ ok <- FALSE while(!all(ok)){ object$allcluster <- knn1(object$allcenters, x, factor(1:nrow(object$allcenters))) ok <- table(object$allcluster) >= minsize object$allcenters <- object$allcenters[ok, ] } if(dohclust){ object <- hclust.bclust(object, x, nrow(object$centers), ...) } object } e1071/R/naiveBayes.R0000644000175100001440000001324414246371157013455 0ustar hornikusersnaiveBayes <- function(x, ...) UseMethod("naiveBayes") naiveBayes.default <- function(x, y, laplace = 0, ...) { call <- match.call() Yname <- deparse(substitute(y)) x <- as.data.frame(x) if (is.logical(y)) y <- factor(y, levels = c("FALSE", "TRUE")) ## estimation-function est <- function(var) if (is.numeric(var)) { cbind(tapply(var, y, mean, na.rm = TRUE), tapply(var, y, sd, na.rm = TRUE)) } else { if (is.logical(var)) var <- factor(var, levels = c("FALSE", "TRUE")) tab <- table(y, var) (tab + laplace) / (rowSums(tab) + laplace * nlevels(var)) } ## create tables apriori <- table(y) tables <- lapply(x, est) isnumeric <- vapply(x, is.numeric, NA) ## fix dimname names for (i in 1:length(tables)) names(dimnames(tables[[i]])) <- c(Yname, colnames(x)[i]) names(dimnames(apriori)) <- Yname structure(list(apriori = apriori, tables = tables, levels = names(apriori), isnumeric = isnumeric, call = call ), class = "naiveBayes" ) } naiveBayes.formula <- function(formula, data, laplace = 0, ..., subset, na.action = na.pass) { call <- match.call() Yname <- as.character(formula[[2]]) if (is.data.frame(data)) { ## handle formula m <- match.call(expand.dots = FALSE) m$... <- NULL m$laplace = NULL m$na.action <- na.action m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") if (any(attr(Terms, "order") > 1)) stop("naiveBayes cannot handle interaction terms") Y <- model.extract(m, "response") X <- m[,gsub("`", "", labels(Terms)), drop = FALSE] return(naiveBayes(X, Y, laplace = laplace, ...)) } else if (is.array(data)) { nam <- names(dimnames(data)) ## Find Class dimension Yind <- which(nam == Yname) ## Create Variable index # deps <- strsplit(as.character(formula)[3], ".[+].")[[1]] deps <- labels(terms(formula, data = data)) if (length(deps) == 1 && deps == ".") deps <- nam[-Yind] Vind <- which(nam %in% deps) ## create tables apriori <- margin.table(data, Yind) tables <- lapply(Vind, function(i) (margin.table(data, c(Yind, i)) + laplace) / (as.numeric(apriori) + laplace * dim(data)[i])) names(tables) <- nam[Vind] isnumeric = rep(FALSE, length(Vind)) names(isnumeric) <- nam[Vind] structure(list(apriori = apriori, tables = tables, levels = names(apriori), isnumeric = isnumeric, call = call ), class = "naiveBayes" ) } else stop("naiveBayes formula interface handles data frames or arrays only") } print.naiveBayes <- function(x, ...) { cat("\nNaive Bayes Classifier for Discrete Predictors\n\n") cat("Call:\n") print(x$call) cat("\nA-priori probabilities:\n") print(x$apriori / sum(x$apriori)) cat("\nConditional probabilities:\n") for (i in x$tables) {print(i); cat("\n")} } predict.naiveBayes <- function(object, newdata, type = c("class", "raw"), threshold = 0.001, eps = 0, ...) { type <- match.arg(type) newdata <- as.data.frame(newdata) ## fix factor levels to be identical with training data for (i in names(object$tables)) { if (!is.null(newdata[[i]]) && !is.numeric(newdata[[i]])) newdata[[i]] <- factor(newdata[[i]], levels = colnames(object$tables[[i]])) if (object$isnumeric[i] != is.numeric(newdata[[i]])) warning(paste0("Type mismatch between training and new data for variable '", i, "'. Did you use factors with numeric labels for training, and numeric values for new data?")) } attribs <- match(names(object$tables), names(newdata)) isnumeric <- vapply(newdata, is.numeric, NA) islogical <- vapply(newdata, is.logical, NA) newdata <- data.matrix(newdata) len <- length(object$apriori) L <- vapply(seq_len(nrow(newdata)), function(i) { ndata <- newdata[i, ] L <- log(object$apriori) + apply(log(vapply(seq_along(attribs), function(v) { nd <- ndata[attribs[v]] if (is.na(nd)) rep.int(1, len) else { prob <- if (isnumeric[attribs[v]]) { msd <- object$tables[[v]] msd[, 2][msd[, 2] <= eps] <- threshold dnorm(nd, msd[, 1], msd[, 2]) } else object$tables[[v]][, nd + islogical[attribs[v]]] prob[prob <= eps] <- threshold prob } }, double(len))), 1, sum) if (type == "class") L else { ## Numerically unstable: ## L <- exp(L) ## L / sum(L) ## instead, we use: vapply(L, function(lp) { 1/sum(exp(L - lp)) }, double(1)) } }, double(len)) if (type == "class") { if (is.logical(object$levels)) L[2,] > L[1,] else factor(object$levels[apply(L, 2, which.max)], levels = object$levels) } else t(L) } e1071/R/plot.stft.R0000755000175100001440000000021014173734133013310 0ustar hornikusersplot.stft <- function (x, col = gray (63:0/63), ...) { x <- x$values image(x=1:dim(x)[1], y=1:dim(x)[2], z=x, col=col, ...) } e1071/R/rwiener.R0000755000175100001440000000022314173734133013032 0ustar hornikusersrwiener <- function(end=1, frequency=1000) { z<-cumsum(rnorm(end*frequency)/sqrt(frequency)) ts(z, start=1/frequency, frequency=frequency) } e1071/R/discrete.R0000755000175100001440000000407514173734133013172 0ustar hornikusersrdiscrete <- function (n, probs, values = 1:length(probs), ...) { sample(values, size=n, replace=TRUE, prob=probs) } ddiscrete <- function (x, probs, values = 1:length(probs)) { if (length(probs) != length(values)) stop("ddiscrete: probs and values must have the same length.") if (sum(probs < 0) > 0) stop("ddiscrete: probs must not contain negative values.") if (!is.array(x) && !is.vector(x) && !is.factor(x)) stop("ddiscrete: x must be an array or a vector or a factor.") p <- probs/sum(probs) y <- as.vector(x) l <- length(y) z <- rep(0,l) for (i in 1:l) if (any(values == y[i])) z[i] <- p[values == y[i]] z <- as.numeric(z) if (is.array(x)) dim(z) <- dim(x) return(z) } pdiscrete <- function (q, probs, values = 1:length(probs)) { if (length(probs) != length(values)) stop("pdiscrete: probs and values must have the same length.") if (sum(probs < 0) > 0) stop("pdiscrete: probs must not contain negative values.") if (!is.array(q) & !is.vector(q)) stop("pdiscrete: q must be an array or a vector") p <- probs/sum(probs) y <- as.vector(q) l <- length(y) z <- rep(0,l) for (i in 1:l) z[i] <- sum(p[values <= y[i]]) z <- as.numeric(z) if (is.array(q)) dim(z) <- dim(q) return(z) } qdiscrete <- function (p, probs, values = 1:length(probs)) { if (length(probs) != length(values)) stop("qdiscrete: probs and values must have the same length.") if (sum(probs < 0) > 0) stop("qdiscrete: probs must not contain negative values.") if (!is.array(p) & !is.vector(p)) stop("qdiscrete: p must be an array or a vector") probs <- cumsum(probs)/sum(probs) y <- as.vector(p) l <- length(y) z <- rep(0,l) for (i in 1:l) z[i] <- length(values) - sum(y[i] <= probs) + 1 z <- as.numeric(z) z <- values[z] if (is.array(p)) dim(z) <- dim(p) return(z) } e1071/R/hamming.distance.R0000755000175100001440000000060014173734133014567 0ustar hornikusershamming.distance <- function(x,y){ z<-NULL if(is.vector(x) && is.vector(y)){ z <- sum(x != y) } else{ z <- matrix(0,nrow=nrow(x),ncol=nrow(x)) for(k in 1:(nrow(x)-1)){ for(l in (k+1):nrow(x)){ z[k,l] <- hamming.distance(x[k,], x[l,]) z[l,k] <- z[k,l] } } dimnames(z) <- list(dimnames(x)[[1]], dimnames(x)[[1]]) } z } e1071/R/kurtosis.R0000755000175100001440000000111514173734133013243 0ustar hornikuserskurtosis <- function(x, na.rm = FALSE, type = 3) { if(any(ina <- is.na(x))) { if(na.rm) x <- x[!ina] else return(NA) } if(!(type %in% (1 : 3))) stop("Invalid 'type' argument.") n <- length(x) x <- x - mean(x) r <- n * sum(x ^ 4) / (sum(x ^ 2) ^ 2) y <- if(type == 1) r - 3 else if(type == 2) { if(n < 4) stop("Need at least 4 complete observations.") ((n + 1) * (r - 3) + 6) * (n - 1) / ((n - 2) * (n - 3)) } else r * (1 - 1 / n) ^ 2 - 3 y } e1071/R/element.R0000755000175100001440000000050614173734133013014 0ustar hornikuserselement <- function(x, i) { if(!is.array(x)) stop("x is not an array") ni <- length(i) dx <- dim(x) if(length(i)!=length(dx)) stop("Wrong number of subscripts") if(ni==1){ return(x[i]) } else{ m1 <- c(i[1], i[2:ni]-1) m2 <- c(1,cumprod(dx)[1:(ni-1)]) return(x[sum(m1*m2)]) } } e1071/R/impute.R0000755000175100001440000000060714173734133012670 0ustar hornikusersimpute <- function(x, what=c("median", "mean")){ what <- match.arg(what) if(what == "median"){ retval <- apply(x, 2, function(z) {z[is.na(z)] <- median(z, na.rm=TRUE); z}) } else if(what == "mean"){ retval <- apply(x, 2, function(z) {z[is.na(z)] <- mean(z, na.rm=TRUE); z}) } retval } e1071/R/countpattern.R0000755000175100001440000000117214173734133014111 0ustar hornikuserscountpattern <- function(x, matching=FALSE) { nvar <- dim(x)[2] n <- dim(x)[1] ## build matrix of all possible binary vectors b <- matrix(0, 2^nvar, nvar) for (i in 1:nvar) b[, nvar+1-i] <- rep(rep(c(0,1),c(2^(i-1),2^(i-1))),2^(nvar-i)) namespat <- b[,1] for (i in 2:nvar) namespat <- paste(namespat, b[,i], sep="") xpat <- x[,1] for (i in 2:nvar) xpat <- 2*xpat+x[,i] xpat <- xpat+1 pat <- tabulate(xpat, nbins=2^nvar) names(pat) <- namespat if (matching) return(list(pat=pat, matching=xpat)) else return(pat) } e1071/R/cshell.R0000644000175100001440000001461714246371157012646 0ustar hornikuserscshell <- function (x, centers, iter.max = 100, verbose = FALSE, dist = "euclidean", method = "cshell", m=2, radius= NULL) { x <- as.matrix(x) xrows <- dim(x)[1] xcols <- dim(x)[2] xold <- x perm <- sample(xrows) x <- x[perm, ] ## initial values are given if (is.matrix(centers)) ncenters <- dim(centers)[1] else { ## take centers random vectors as initial values ncenters <- centers centers <- x[rank(runif(xrows))[1:ncenters], ]+0.001 } ## initialize radius if (missing(radius)) radius <- rep(0.2,ncenters) else radius <- as.double(radius) dist <- pmatch(dist, c("euclidean", "manhattan")) if (is.na(dist)) stop("invalid distance") if (dist == -1) stop("ambiguous distance") method <- pmatch(method, c("cshell")) if (is.na(method)) stop("invalid clustering method") if (method == -1) stop("ambiguous clustering method") initcenters <- centers ## dist <- matrix(0, xrows, ncenters) ## necessary for empty clusters pos <- as.factor(1 : ncenters) rownames(centers) <- pos iter <- integer(1) flag <- integer(1) retval <- .C(R_cshell, xrows = as.integer(xrows), xcols = as.integer(xcols), x = as.double(x), ncenters = as.integer(ncenters), centers = as.double(centers), iter.max = as.integer(iter.max), iter = as.integer(iter), verbose = as.integer(verbose), dist = as.integer(dist-1), U = double(xrows*ncenters), UANT = double(xrows*ncenters), m = as.double(m), ermin = double(1), radius = as.double(radius), flag = as.integer(flag) ) centers <- matrix(retval$centers, ncol = xcols, dimnames = dimnames(initcenters)) radius <- as.double(retval$radius) U <- retval$U U <- matrix(U, ncol=ncenters) UANT <- retval$UANT UANT <- matrix(UANT, ncol=ncenters) iter <- retval$iter flag <- as.integer(retval$flag) ## Optimization part while (((flag == 1) || (flag==4)) && (iter<=iter.max)) { flag <- 3 system <- function (spar=c(centers,radius), x, U, m, i) { k <- dim(x)[1] d <- dim(x)[2] nparam<-length(spar) v<-spar[1:(nparam-1)] r<-spar[nparam] ##distance matrix x_k - v_i distmat <- t(t(x)-v) ##norm from x_k - v_i normdist <- distmat[,1]^2 for (j in 2:d) normdist<-normdist+distmat[,j]^2 normdist <- sqrt(normdist) ##equation 5 op <- sum( (U[,i]^m) * (normdist-r) )^2 ##equation 4 equationmatrix <- ((U[,i]^m) * (1-r/normdist))*distmat ## ## This had just apply(), but optim() really needs a scalar ## fn. ## What do we really want here? op<- op+sum(apply(equationmatrix, 2, sum)^2) ## } for (i in 1:ncenters) { spar <- c(centers[i,],radius[i]) npar <- length(spar) optimres <- optim(spar, system, method="CG", x=x, U=U, m=m, i=i) centers[i,] <- optimres$par[1:(npar-1)] radius[i] <- optimres$par[npar] } retval <- .C(R_cshell, xrows = as.integer(xrows), xcols = as.integer(xcols), x = as.double(x), ncenters = as.integer(ncenters), centers = as.double(centers), iter.max = as.integer(iter.max), iter = as.integer(iter-1), verbose = as.integer(verbose), dist = as.integer(dist-1), U = as.double(U), UANT = as.double(UANT), m = as.double(m), ermin = double(1), radius = as.double(radius), flag = as.integer(flag) ) flag<-retval$flag if (retval$flag!=2) flag<-1 centers <- matrix(retval$centers, ncol = xcols, dimnames = dimnames(initcenters)) radius <- as.double(retval$radius) U <- retval$U U <- matrix(U, ncol=ncenters) UANT <- retval$UANT UANT <- matrix(UANT, ncol=ncenters) iter <- retval$iter } centers <- matrix(retval$centers, ncol = xcols, dimnames = list(pos, colnames(initcenters))) U <- matrix(retval$U, ncol = ncenters, dimnames = list(rownames(x), 1 : ncenters)) U <- U[order(perm),] clusterU <- apply(U, 1, which.max) clustersize <- as.integer(table(clusterU)) radius <- as.double(retval$radius) retval <- list(centers = centers, radius=radius, size = clustersize, cluster = clusterU, iter = retval$iter - 1, membership=U, withinerror = retval$ermin, call = match.call()) class(retval) <- c("cshell", "fclust") return(retval) } ## unfinished! ## ## predict.cshell <- function(object, newdata, ...){ ## xrows<-dim(newdata)[1] ## xcols<-dim(newdata)[2] ## ncenters <- object$centers ## cluster <- integer(xrows) ## clustersize <- integer(ncenters) ## f <- object$m ## radius <- object$radius ## if(dim(object$centers)[2] != xcols){ ## stop("Number of variables in cluster object and x are not the same!") ## } ## retval <- .C("cshell_assign", ## xrows = as.integer(xrows), ## xcols = as.integer(xcols), ## x = as.double(newdata), ## ncenters = as.integer(ncenters), ## centers = as.double(object$centers), ## dist = as.integer(object$dist-1), ## U = double(xrows*ncenters), ## f = as.double(f), ## radius = as.double(radius)) ## U <- retval$U ## U <- matrix(U, ncol=ncenters) ## clusterU <- apply(U,1,which.max) ## clustersize <- as.integer(table(clusterU)) ## object$iter <- NULL ## object$cluster <- clusterU ## object$size <- retval$clustersize ## object$membership <- U ## return(object) ## } e1071/R/sparse.R0000644000175100001440000000377514534322136012665 0ustar hornikusersread.matrix.csr <- function(file, fac = TRUE, ncol = NULL) { l <- strsplit(readLines(file), "[ ]+") ## extract y-values, if any y <- if (is.na(l[[1]][1]) || length(grep(":",l[[1]][1]))) NULL else vapply(l, function(x) x[1], character(1)) ## x-values rja <- do.call("rbind", lapply(l, function(x) do.call("rbind", strsplit(if (is.null(y)) x else x[-1], ":") ) ) ) ja <- as.integer(rja[,1]) ia <- cumsum(c(1, vapply(l, length, integer(1)) - !is.null(y))) max.ja <- max(ja) dimension <- c(length(l), if (is.null(ncol)) max.ja else max(ncol, max.ja)) x = new(getClass("matrix.csr", where = asNamespace("SparseM")), ra = as.numeric(rja[,2]), ja = ja, ia = as.integer(ia), dimension = as.integer(dimension)) if (length(y)) list(x = x, y = if (fac) as.factor(y) else as.numeric(y)) else x } write.matrix.csr <- function (x, file = "out.dat", y = NULL, fac = TRUE) { on.exit(sink()) x <- SparseM::as.matrix.csr(x) if (!is.null(y) & (length(y) != nrow(x))) stop(paste("Length of y (=", length(y), ") does not match number of rows of x (=", nrow(x), ")!", sep="")) sink(file) l <- length(x@ra) zerocols <- all(x@ja < ncol(x)) if (!is.null(y) && is.factor(y) && fac) y <- as.character(y) for (i in 1:nrow(x)) { if (!is.null(y)) cat (y[i],"") if ((x@ia[i] <= l) && (x@ia[i] < x@ia[i + 1])) { for (j in x@ia[i] : (x@ia[i + 1] - 1)) cat(x@ja[j], ":", x@ra[j], " ", sep="") if (zerocols) { cat(ncol(x), ":", 0, " ", sep="") zerocols <- FALSE } } cat("\n") } } na.fail.matrix.csr <- function(object, ...) { if (any(is.na(object@ra))) stop("missing values in object") else return(object) } e1071/R/shortestPaths.R0000755000175100001440000000146614173734133014244 0ustar hornikusersallShortestPaths <- function(x){ x <- as.matrix(x) x[is.na(x)] <- .Machine$double.xmax x[is.infinite(x) & x>0] <- .Machine$double.xmax if(ncol(x) != nrow(x)) stop("x is not a square matrix") n <- ncol(x) z <- .C(R_e1071_floyd, as.integer(n), double(n^2), as.double(x), integer(n^2) ) z <- list(length = matrix(z[[2]], n), middlePoints = matrix(z[[4]]+1, n)) z$length[z$length == .Machine$double.xmax] <- NA z } extractPath <- function(obj, start, end){ z <- integer(0) path <- function(i, j){ k <- obj$middlePoints[i, j] if (k != 0) { path(i,k); z <<- c(z, k) path(k,j); } } path(start,end) c(start, z, end) } e1071/R/scale_data_frame.R0000644000175100001440000000110314024400426014572 0ustar hornikusersscale_data_frame <- function(x, center = TRUE, scale = TRUE) { if (isFALSE(center) && isFALSE(scale)) return(x) if (!is.data.frame(x)) return (scale(x, center, scale)) i <- vapply(x, is.numeric, NA) | vapply(x, is.logical, NA) if (any(i)) { x[, i] <- tmp <- scale.default(x[, i, drop = FALSE], na.omit(center), na.omit(scale)) if(center || !is.logical(center)) attr(x, "scaled:center")[i] <- attr(tmp, "scaled:center") if(scale || !is.logical(scale)) attr(x, "scaled:scale")[i] <- attr(tmp, "scaled:scale") } x } e1071/R/interpolate.R0000755000175100001440000000364214173734133013715 0ustar hornikusersinterpolate <- function(x, a, adims=lapply(dimnames(a), as.numeric), method="linear"){ if(is.vector(x)) x<- matrix(x, ncol=length(x)) if(!is.array(a)) stop("a is not an array") ad <- length(dim(a)) method <- pmatch(method, c("linear", "constant")) if (is.na(method)) stop("invalid interpolation method") if(any(unlist(lapply(adims, diff))<0)) stop("dimensions of a not ordered") retval <- rep(0, nrow(x)) bincombi <- bincombinations(ad) convexcoeff <- function(x, y) { ok <- y>0 x[ok] <- y[ok]-x[ok] x } for(n in 1:nrow(x)){ ## the "leftmost" corner of the enclosing hypercube leftidx <- rep(0, ad) xabstand <- rep(0, ad) aabstand <- rep(0, ad) for(k in 1:ad){ if(x[n,k] < min(adims[[k]]) || x[n,k] > max(adims[[k]])) stop("No extrapolation allowed") else{ leftidx[k] <- max(seq(adims[[k]])[adims[[k]] <= x[n,k]]) ## if at the right border, go one step to the left if(leftidx[k] == length(adims[[k]])) leftidx[k] <- leftidx[k] - 1 xabstand[k] <- x[n,k] - adims[[k]][leftidx[k]] aabstand[k] <- adims[[k]][leftidx[k]+1] - adims[[k]][leftidx[k]] } } coefs <- list() if(method==1){ for(k in 1:(2^ad)){ retval[n] <- retval[n] + element(a, leftidx+bincombi[k,]) * prod((aabstand- convexcoeff(xabstand, aabstand*bincombi[k,]))/aabstand) } } else if(method==2){ retval[n] <- element(a, leftidx) } } names(retval) <- rownames(x) retval } e1071/R/matchClasses.R0000755000175100001440000001504014173734133013774 0ustar hornikusersclassAgreement <- function (tab, match.names=FALSE) { n <- sum(tab) ni <- apply(tab, 1, sum) nj <- apply(tab, 2, sum) ## patch for matching factors if (match.names && !is.null(dimnames(tab))) { lev <- intersect (colnames (tab), rownames(tab)) p0 <- sum(diag(tab[lev,lev]))/n pc <- sum(ni[lev] * nj[lev])/n^2 } else { # cutoff larger dimension m <- min(length(ni), length(nj)) p0 <- sum(diag(tab[1:m, 1:m]))/n pc <- sum((ni[1:m] / n) * (nj[1:m] / n)) } n2 <- choose(n, 2) rand <- 1 + (sum(tab^2) - (sum(ni^2) + sum(nj^2))/2)/n2 nis2 <- sum(choose(ni[ni > 1], 2)) njs2 <- sum(choose(nj[nj > 1], 2)) crand <- (sum(choose(tab[tab > 1], 2)) - (nis2 * njs2)/n2)/((nis2 + njs2)/2 - (nis2 * njs2)/n2) list(diag = p0, kappa = (p0 - pc)/(1 - pc), rand = rand, crand = crand) } matchClasses <- function(tab, method = "rowmax", iter=1, maxexact=9, verbose=TRUE){ methods <- c("rowmax", "greedy", "exact") method <- pmatch(method, methods) rmax <- apply(tab,1,which.max) myseq <- 1:ncol(tab) cn <- colnames(tab) rn <- rownames(tab) if(is.null(cn)){ cn <- myseq } if(is.null(rn)){ rn <- myseq } if(method==1){ retval <- rmax } if(method==2 | method==3){ if(ncol(tab)!=nrow(tab)){ stop("Unique matching only for square tables.") } dimnames(tab) <- list(myseq, myseq) cmax <- apply(tab,2,which.max) retval <- rep(NA, ncol(tab)) names(retval) <- colnames(tab) baseok <- cmax[rmax]==myseq for(k in myseq[baseok]){ therow <- (tab[k,])[-rmax[k]] thecol <- (tab[, rmax[k]])[-k] if(max(outer(therow, thecol, "+")) < tab[k, rmax[k]]){ retval[k] <- rmax[k] } else{ baseok[k] <- FALSE } } if(verbose){ cat("Direct agreement:", sum(baseok), "of", ncol(tab), "pairs\n") } if(!all(baseok)){ if(method==3){ if(sum(!baseok)>maxexact){ method <- 2 warning(paste("Would need permutation of", sum(!baseok), "numbers, resetting to greedy search\n")) } else{ iter <- gamma(ncol(tab)-sum(baseok)+1) if(verbose){ cat("Iterations for permutation matching:", iter, "\n") } perm <- permutations(ncol(tab)-sum(baseok)) } } ## rest for permute matching if(any(baseok)){ rest <- myseq[-retval[baseok]] } else{ rest <- myseq } for(l in 1:iter){ newretval <- retval if(method == 2){ ok <- baseok while(sum(!ok)>1){ rest <- myseq[!ok] k <- sample(rest, 1) if(any(ok)){ rmax <- tab[k, -newretval[ok]] } else{ rmax <- tab[k,] } newretval[k] <- as.numeric(names(rmax)[which.max(rmax)]) ok[k] <- TRUE } newretval[!ok] <- myseq[-newretval[ok]] } else{ newretval[!baseok] <- rest[perm[l,]] } if(l>1){ agree <- sum(diag(tab[,newretval]))/sum(tab) if(agree>oldagree){ retval <- newretval oldagree <- agree } } else{ retval <- newretval agree <- oldagree <- sum(diag(tab[,newretval]))/sum(tab) } } } } if(verbose){ cat("Cases in matched pairs:", round(100*sum(diag(tab[,retval]))/sum(tab), 2), "%\n") } if(any(as.character(myseq)!=cn)){ retval <- cn[retval] } names(retval) <- rn retval } compareMatchedClasses <- function(x, y, method="rowmax", iter=1, maxexact=9, verbose=FALSE) { if(missing(y)){ retval <- list(diag=matrix(NA, nrow=ncol(x), ncol=ncol(x)), kappa=matrix(NA, nrow=ncol(x), ncol=ncol(x)), rand=matrix(NA, nrow=ncol(x), ncol=ncol(x)), crand=matrix(NA, nrow=ncol(x), ncol=ncol(x))) for(k in 1:(ncol(x)-1)){ for(l in (k+1):ncol(x)){ tab <- table(x[,k], x[,l]) m <- matchClasses(tab, method=method, iter=iter, verbose=verbose, maxexact=maxexact) a <- classAgreement(tab[,m]) retval$diag[k,l] <- a$diag retval$kappa[k,l] <- a$kappa retval$rand[k,l] <- a$rand retval$crand[k,l] <- a$crand } } } else{ x <- as.matrix(x) y <- as.matrix(y) retval <- list(diag=matrix(NA, nrow=ncol(x), ncol=ncol(y)), kappa=matrix(NA, nrow=ncol(x), ncol=ncol(y)), rand=matrix(NA, nrow=ncol(x), ncol=ncol(y)), crand=matrix(NA, nrow=ncol(x), ncol=ncol(y))) for(k in 1:ncol(x)){ for(l in 1:ncol(y)){ tab <- table(x[,k], y[,l]) m <- matchClasses(tab, method=method, iter=iter, verbose=verbose, maxexact=maxexact) a <- classAgreement(tab[,m]) retval$diag[k,l] <- a$diag retval$kappa[k,l] <- a$kappa retval$rand[k,l] <- a$rand retval$crand[k,l] <- a$crand } } } retval } permutations <- function(n) { if(n ==1) return(matrix(1)) else if(n<2) stop("n must be a positive integer") z <- matrix(1) for (i in 2:n) { x <- cbind(z, i) a <- c(1:i, 1:(i - 1)) z <- matrix(0, ncol=ncol(x), nrow=i*nrow(x)) z[1:nrow(x),] <- x for (j in 2:i-1) { z[j*nrow(x)+1:nrow(x),] <- x[, a[1:i+j]] } } dimnames(z) <- NULL z } e1071/R/skewness.R0000755000175100001440000000105014173734133013220 0ustar hornikusersskewness <- function(x, na.rm = FALSE, type = 3) { if(any(ina <- is.na(x))) { if(na.rm) x <- x[!ina] else return(NA) } if(!(type %in% (1 : 3))) stop("Invalid 'type' argument.") n <- length(x) x <- x - mean(x) y <- sqrt(n) * sum(x ^ 3) / (sum(x ^ 2) ^ (3/2)) if(type == 2) { if(n < 3) stop("Need at least 3 complete observations.") y <- y * sqrt(n * (n - 1)) / (n - 2) } else if(type == 3) y <- y * ((1 - 1 / n)) ^ (3/2) y } e1071/R/tune.R0000644000175100001440000004453414325173177012350 0ustar hornikuserstune.control <- function(random = FALSE, nrepeat = 1, repeat.aggregate = mean, sampling = c("cross", "fix", "bootstrap"), sampling.aggregate = mean, sampling.dispersion = sd, cross = 10, fix = 2 / 3, nboot = 10, boot.size = 9 / 10, best.model = TRUE, performances = TRUE, error.fun = NULL) { structure(list(random = random, nrepeat = nrepeat, repeat.aggregate = repeat.aggregate, sampling = match.arg(sampling), sampling.aggregate = sampling.aggregate, sampling.dispersion = sampling.dispersion, cross = cross, fix = fix, nboot = nboot, boot.size = boot.size, best.model = best.model, performances = performances, error.fun = error.fun ), class = "tune.control" ) } tune <- function(METHOD, train.x, train.y = NULL, data = list(), validation.x = NULL, validation.y = NULL, ranges = NULL, predict.func = predict, tunecontrol = tune.control(), ... ) { call <- match.call() ## internal helper functions resp <- function(formula, data) { model.response(model.frame(formula, data)) } classAgreement <- function (tab) { n <- sum(tab) if (!is.null(dimnames(tab))) { lev <- intersect(colnames(tab), rownames(tab)) p0 <- sum(diag(tab[lev, lev])) / n } else { m <- min(dim(tab)) p0 <- sum(diag(tab[1:m, 1:m])) / n } p0 } ## parameter handling if (tunecontrol$sampling == "cross") validation.x <- validation.y <- NULL useFormula <- is.null(train.y) if (useFormula && (is.null(data) || length(data) == 0)) data <- model.frame(train.x) if (is.vector(train.x)) train.x <- t(t(train.x)) if (is.data.frame(train.y)) train.y <- as.matrix(train.y) ## prepare training indices if (!is.null(validation.x)) tunecontrol$fix <- 1 n <- nrow(if (useFormula) data else train.x) perm.ind <- sample(n) if (tunecontrol$sampling == "cross") { if (tunecontrol$cross > n) stop(sQuote("cross"), " must not exceed sampling size!") if (tunecontrol$cross == 1) stop(sQuote("cross"), " must be greater than 1!") } train.ind <- if (tunecontrol$sampling == "cross") tapply(1:n, cut(1:n, breaks = tunecontrol$cross), function(x) perm.ind[-x]) else if (tunecontrol$sampling == "fix") list(perm.ind[1:trunc(n * tunecontrol$fix)]) else ## bootstrap lapply(1:tunecontrol$nboot, function(x) sample(n, n * tunecontrol$boot.size, replace = TRUE)) ## find best model parameters <- if (is.null(ranges)) data.frame(dummyparameter = 0) else expand.grid(ranges) p <- nrow(parameters) if (!is.logical(tunecontrol$random)) { if (tunecontrol$random < 1) stop("random must be a strictly positive integer") if (tunecontrol$random > p) tunecontrol$random <- p parameters <- parameters[sample(1:p, tunecontrol$random),] p <- nrow(parameters) } model.variances <- model.errors <- c() ## - loop over all models for (para.set in 1:p) { sampling.errors <- c() ## - loop over all training samples for (sample in 1:length(train.ind)) { repeat.errors <- c() ## - repeat training `nrepeat' times for (reps in 1:tunecontrol$nrepeat) { ## train one model pars <- if (is.null(ranges)) NULL else lapply(parameters[para.set,,drop = FALSE], unlist) model <- if (useFormula) do.call(METHOD, c(list(train.x, data = data, subset = train.ind[[sample]]), pars, list(...) ) ) else do.call(METHOD, c(list(train.x[train.ind[[sample]],], y = train.y[train.ind[[sample]]]), pars, list(...) ) ) ## predict validation set pred <- predict.func(model, if (!is.null(validation.x)) validation.x else if (useFormula) data[-train.ind[[sample]],,drop = FALSE] else if (inherits(train.x, "matrix.csr")) train.x[-train.ind[[sample]],] else train.x[-train.ind[[sample]],,drop = FALSE] ) ## compute performance measure true.y <- if (!is.null(validation.y)) validation.y else if (useFormula) { if (!is.null(validation.x)) resp(train.x, validation.x) else resp(train.x, data[-train.ind[[sample]],]) } else train.y[-train.ind[[sample]]] if (is.null(true.y)) true.y <- rep(TRUE, length(pred)) repeat.errors[reps] <- if (!is.null(tunecontrol$error.fun)) tunecontrol$error.fun(true.y, pred) else if ((is.logical(true.y) || is.factor(true.y)) && (is.logical(pred) || is.factor(pred) || is.character(pred))) ## classification error 1 - classAgreement(table(pred, true.y)) else if (is.numeric(true.y) && is.numeric(pred)) ## mean squared error crossprod(pred - true.y) / length(pred) else stop("Dependent variable has wrong type!") } sampling.errors[sample] <- tunecontrol$repeat.aggregate(repeat.errors) } model.errors[para.set] <- tunecontrol$sampling.aggregate(sampling.errors) model.variances[para.set] <- tunecontrol$sampling.dispersion(sampling.errors) } ## return results best <- which.min(model.errors) pars <- if (is.null(ranges)) NULL else lapply(parameters[best,,drop = FALSE], unlist) structure(list(best.parameters = parameters[best,,drop = FALSE], best.performance = model.errors[best], method = if (!is.character(METHOD)) deparse(substitute(METHOD)) else METHOD, nparcomb = nrow(parameters), train.ind = train.ind, sampling = switch(tunecontrol$sampling, fix = "fixed training/validation set", bootstrap = "bootstrapping", cross = if (tunecontrol$cross == n) "leave-one-out" else paste(tunecontrol$cross,"-fold cross validation", sep="") ), performances = if (tunecontrol$performances) cbind(parameters, error = model.errors, dispersion = model.variances), best.model = if (tunecontrol$best.model) { modeltmp <- if (useFormula) do.call(METHOD, c(list(train.x, data = data), pars, list(...))) else do.call(METHOD, c(list(x = train.x, y = train.y), pars, list(...))) call[[1]] <- as.symbol("best.tune") modeltmp$call <- call modeltmp } ), class = "tune" ) } best.tune <- function(...) { call <- match.call() modeltmp <- tune(...)$best.model modeltmp$call <- call modeltmp } print.tune <- function(x, ...) { if (x$nparcomb > 1) { cat("\nParameter tuning of ", sQuote(x$method), ":\n\n", sep="") cat("- sampling method:", x$sampling,"\n\n") cat("- best parameters:\n") tmp <- x$best.parameters rownames(tmp) <- "" print(tmp) cat("\n- best performance:", x$best.performance, "\n") cat("\n") } else { cat("\nError estimation of ", sQuote(x$method), " using ", x$sampling, ": ", x$best.performance, "\n\n", sep="") } } summary.tune <- function(object, ...) structure(object, class = "summary.tune") print.summary.tune <- function(x, ...) { print.tune(x) if (!is.null(x$performances) && (x$nparcomb > 1)) { cat("- Detailed performance results:\n") print(x$performances) cat("\n") } } hsv_palette <- function(h = 2/3, from = 0.7, to = 0.2, v = 1) function(n) hsv(h = h, s = seq(from, to, length.out = n), v = v) plot.tune <- function(x, type=c("contour","perspective"), theta=60, col="lightblue", main = NULL, xlab = NULL, ylab = NULL, swapxy = FALSE, transform.x = NULL, transform.y = NULL, transform.z = NULL, color.palette = hsv_palette(), nlevels = 20, ...) { if (is.null(x$performances)) stop("Object does not contain detailed performance measures!") k <- ncol(x$performances) if (k > 4) stop("Cannot visualize more than 2 parameters") type = match.arg(type) if (is.null(main)) main <- paste("Performance of `", x$method, "'", sep="") if (k == 3) plot(x$performances[,1:2], type = "b", main = main) else { if (!is.null(transform.x)) x$performances[,1] <- transform.x(x$performances[,1]) if (!is.null(transform.y)) x$performances[,2] <- transform.y(x$performances[,2]) if (!is.null(transform.z)) x$performances[,3] <- transform.z(x$performances[,3]) if (swapxy) x$performances[,1:2] <- x$performances[,2:1] x <- xtabs(error~., data = x$performances[,-k]) if (is.null(xlab)) xlab <- names(dimnames(x))[1 + swapxy] if (is.null(ylab)) ylab <- names(dimnames(x))[2 - swapxy] if (type == "perspective") persp(x=as.double(rownames(x)), y=as.double(colnames(x)), z=x, xlab=xlab, ylab=ylab, zlab="accuracy", theta=theta, col=col, ticktype="detailed", main = main, ... ) else filled.contour(x=as.double(rownames(x)), y=as.double(colnames(x)), xlab=xlab, ylab=ylab, nlevels=nlevels, color.palette = color.palette, main = main, x, ...) } } ############################################# ## convenience functions for some methods ############################################# tune.svm <- function(x, y = NULL, data = NULL, degree = NULL, gamma = NULL, coef0 = NULL, cost = NULL, nu = NULL, class.weights = NULL, epsilon = NULL, ...) { call <- match.call() call[[1]] <- as.symbol("best.svm") ranges <- list(degree = degree, gamma = gamma, coef0 = coef0, cost = cost, nu = nu, class.weights = class.weights, epsilon = epsilon) ranges[vapply(ranges, is.null, NA)] <- NULL if (length(ranges) < 1) ranges = NULL modeltmp <- if (inherits(x, "formula")) tune("svm", train.x = x, data = data, ranges = ranges, ...) else tune("svm", train.x = x, train.y = y, ranges = ranges, ...) if (!is.null(modeltmp$best.model)) modeltmp$best.model$call <- call modeltmp } best.svm <- function(x, tunecontrol = tune.control(), ...) { call <- match.call() tunecontrol$best.model = TRUE modeltmp <- tune.svm(x, ..., tunecontrol = tunecontrol)$best.model modeltmp$call <- call modeltmp } tune.nnet <- function(x, y = NULL, data = NULL, size = NULL, decay = NULL, trace = FALSE, tunecontrol = tune.control(nrepeat = 5), ...) { call <- match.call() call[[1]] <- as.symbol("best.nnet") loadNamespace("nnet") predict.func <- predict useFormula <- inherits(x, "formula") if (is.factor(y) || (useFormula && is.factor(model.response(model.frame(formula = x, data = data)))) ) predict.func = function(...) predict(..., type = "class") ranges <- list(size = size, decay = decay) ranges[vapply(ranges, is.null, NA)] <- NULL if (length(ranges) < 1) ranges = NULL modeltmp <- if (useFormula) tune("nnet", train.x = x, data = data, ranges = ranges, predict.func = predict.func, tunecontrol = tunecontrol, trace = trace, ...) else tune("nnet", train.x = x, train.y = y, ranges = ranges, predict.func = predict.func, tunecontrol = tunecontrol, trace = trace, ...) if (!is.null(modeltmp$best.model)) modeltmp$best.model$call <- call modeltmp } best.nnet <- function(x, tunecontrol = tune.control(nrepeat = 5), ...) { call <- match.call() tunecontrol$best.model = TRUE modeltmp <- tune.nnet(x, ..., tunecontrol = tunecontrol)$best.model modeltmp$call <- call modeltmp } tune.randomForest <- function(x, y = NULL, data = NULL, nodesize = NULL, mtry = NULL, ntree = NULL, ...) { call <- match.call() call[[1]] <- as.symbol("best.randomForest") loadNamespace("randomForest") ranges <- list(nodesize = nodesize, mtry = mtry, ntree = ntree) ranges[vapply(ranges, is.null, NA)] <- NULL if (length(ranges) < 1) ranges = NULL modeltmp <- if (inherits(x, "formula")) tune("randomForest", train.x = x, data = data, ranges = ranges, ...) else tune("randomForest", train.x = x, train.y = y, ranges = ranges, ...) if (!is.null(modeltmp$best.model)) modeltmp$best.model$call <- call modeltmp } best.randomForest <- function(x, tunecontrol = tune.control(), ...) { call <- match.call() tunecontrol$best.model = TRUE modeltmp <- tune.randomForest(x, ..., tunecontrol = tunecontrol)$best.model modeltmp$call <- call modeltmp } tune.gknn <- function(x, y = NULL, data = NULL, k = NULL, ...) { call <- match.call() call[[1]] <- as.symbol("best.gknn") ranges <- list(k = k) ranges[vapply(ranges, is.null, NA)] <- NULL if (length(ranges) < 1) ranges = NULL modeltmp <- if (inherits(x, "formula")) tune("gknn", train.x = x, data = data, ranges = ranges, ...) else tune("gknn", train.x = x, train.y = y, ranges = ranges, ...) if (!is.null(modeltmp$best.model)) modeltmp$best.model$call <- call modeltmp } best.gknn <- function(x, tunecontrol = tune.control(), ...) { call <- match.call() tunecontrol$best.model = TRUE modeltmp <- tune.gknn(x, ..., tunecontrol = tunecontrol)$best.model modeltmp$call <- call modeltmp } knn.wrapper <- function(x, y, k = 1, l = 0, ...) list(train = x, cl = y, k = k, l = l, ...) tune.knn <- function(x, y, k = NULL, l = NULL, ...) { loadNamespace("class") ranges <- list(k = k, l = l) ranges[vapply(ranges, is.null, NA)] <- NULL if (length(ranges) < 1) ranges = NULL tune("knn.wrapper", train.x = x, train.y = y, ranges = ranges, predict.func = function(x, ...) knn(train = x$train, cl = x$cl, k = x$k, l = x$l, ...), ...) } rpart.wrapper <- function(formula, minsplit=20, minbucket=round(minsplit/3), cp=0.01, maxcompete=4, maxsurrogate=5, usesurrogate=2, xval=10, surrogatestyle=0, maxdepth=30, ...) rpart::rpart(formula, control = rpart::rpart.control(minsplit=minsplit, minbucket=minbucket, cp=cp, maxcompete=maxcompete, maxsurrogate=maxsurrogate, usesurrogate=usesurrogate, xval=xval, surrogatestyle=surrogatestyle, maxdepth=maxdepth), ... ) tune.rpart <- function(formula, data, na.action = na.omit, minsplit=NULL, minbucket=NULL, cp=NULL, maxcompete=NULL, maxsurrogate=NULL, usesurrogate=NULL, xval=NULL, surrogatestyle=NULL, maxdepth=NULL, predict.func = NULL, ...) { call <- match.call() call[[1]] <- as.symbol("best.rpart") loadNamespace("rpart") ranges <- list(minsplit=minsplit, minbucket=minbucket, cp=cp, maxcompete=maxcompete, maxsurrogate=maxsurrogate, usesurrogate=usesurrogate, xval=xval, surrogatestyle=surrogatestyle, maxdepth=maxdepth) ranges[vapply(ranges, is.null, NA)] <- NULL if (length(ranges) < 1) ranges <- NULL predict.func <- if (is.factor(model.response(model.frame(formula, data)))) function(...) predict(..., type = "class") else predict modeltmp <- tune("rpart.wrapper", train.x = formula, data = data, ranges = ranges, predict.func = predict.func, na.action = na.action, ...) if (!is.null(modeltmp$best.model)) modeltmp$best.model$call <- call modeltmp } best.rpart <- function(formula, tunecontrol = tune.control(), ...) { call <- match.call() tunecontrol$best.model = TRUE modeltmp <- tune.rpart(formula, ..., tunecontrol = tunecontrol)$best.model modeltmp$call <- call modeltmp } e1071/R/cmeans.R0000644000175100001440000001131014246371157012625 0ustar hornikuserscmeans <- function(x, centers, iter.max = 100, verbose = FALSE, dist = "euclidean", method = "cmeans", m = 2, rate.par = NULL, weights = 1, control = list()) { x <- as.matrix(x) xrows <- nrow(x) xcols <- ncol(x) if(missing(centers)) stop("Argument 'centers' must be a number or a matrix.") dist <- pmatch(dist, c("euclidean", "manhattan")) if(is.na(dist)) stop("invalid distance") if(dist == -1) stop("ambiguous distance") method <- pmatch(method, c("cmeans", "ufcl")) if(is.na(method)) stop("invalid clustering method") if(method == -1) stop("ambiguous clustering method") if(length(centers) == 1) { ncenters <- centers centers <- x[sample(1 : xrows, ncenters), , drop = FALSE] if(any(duplicated(centers))) { cn <- unique(x) mm <- nrow(cn) if(mm < ncenters) stop("More cluster centers than distinct data points.") centers <- cn[sample(1 : mm, ncenters), , drop = FALSE] } } else { centers <- as.matrix(centers) if(any(duplicated(centers))) stop("Initial centers are not distinct.") cn <- NULL ncenters <- nrow(centers) if (xrows < ncenters) stop("More cluster centers than data points.") } if(xcols != ncol(centers)) stop("Must have same number of columns in 'x' and 'centers'.") if(iter.max < 1) stop("Argument 'iter.max' must be positive.") if(method == 2) { if(missing(rate.par)) { rate.par <- 0.3 } } reltol <- control$reltol if(is.null(reltol)) reltol <- sqrt(.Machine$double.eps) if(reltol <= 0) stop("Control parameter 'reltol' must be positive.") if(any(weights < 0)) stop("Argument 'weights' has negative elements.") if(!any(weights > 0)) stop("Argument 'weights' has no positive elements.") weights <- rep_len(weights, xrows) weights <- weights / sum(weights) ## ## Do we really want to do this? perm <- sample(xrows) x <- x[perm, ] weights <- weights[perm] ## initcenters <- centers pos <- as.factor(1 : ncenters) rownames(centers) <- pos if(method == 1) { retval <- .C(R_cmeans, as.double(x), as.integer(xrows), as.integer(xcols), centers = as.double(centers), as.integer(ncenters), as.double(weights), as.double(m), as.integer(dist - 1), as.integer(iter.max), as.double(reltol), as.integer(verbose), u = double(xrows * ncenters), ermin = double(1), iter = integer(1)) } else if(method == 2) { retval <- .C(R_ufcl, x = as.double(x), as.integer(xrows), as.integer(xcols), centers = as.double(centers), as.integer(ncenters), as.double(weights), as.double(m), as.integer(dist - 1), as.integer(iter.max), as.double(reltol), as.integer(verbose), as.double(rate.par), u = double(xrows * ncenters), ermin = double(1), iter = integer(1) ) } centers <- matrix(retval$centers, ncol = xcols, dimnames = list(1 : ncenters, colnames(initcenters))) u <- matrix(retval$u, ncol = ncenters, dimnames = list(rownames(x), 1 : ncenters)) u <- u[order(perm), ] iter <- retval$iter - 1 withinerror <- retval$ermin cluster <- apply(u, 1, which.max) clustersize <- as.integer(table(cluster)) retval <- list(centers = centers, size = clustersize, cluster = cluster, membership = u, iter = iter, withinerror = withinerror, call = match.call()) class(retval) <- c("fclust") return(retval) } print.fclust <- function(x, ...) { cat("Fuzzy c-means clustering with", length(x$size), "clusters\n") cat("\nCluster centers:\n") print(x$centers, ...) cat("\nMemberships:\n") print(x$membership, ...) cat("\nClosest hard clustering:\n") print(x$cluster, ...) cat("\nAvailable components:\n") print(names(x), ...) invisible(x) } e1071/R/hanning.window.R0000755000175100001440000000022714173734133014313 0ustar hornikusershanning.window <- function (n) { if (n == 1) c <- 1 else { n <- n-1 c <- 0.5 - 0.5*cos(2*pi*(0:n)/n) } return(c) } e1071/R/rectangle.window.R0000755000175100001440000000005614173734133014635 0ustar hornikusersrectangle.window <- function (n) rep (1, n) e1071/R/rbridge.R0000755000175100001440000000026514173734133013003 0ustar hornikusersrbridge <- function(end=1, frequency=1000) { z <- rwiener(end=end, frequency=frequency) ts(z - time(z)*as.vector(z)[frequency], start=1/frequency, frequency=frequency) } e1071/R/ica.R0000755000175100001440000000306614173734133012123 0ustar hornikusersica <- function(X, lrate, epochs=100, ncomp=dim(X)[2], fun="negative") { if (!is.matrix(X)) { if (is.data.frame(X)) X <- as.matrix(X) else stop("ica: X must be a matrix or a data frame") } if (!is.numeric(X)) stop("ica: X contains non numeric elements") m <- dim(X)[1] n <- dim(X)[2] Winit <- matrix(rnorm(n*ncomp), ncomp, n) W <- Winit if (!is.function(fun)) { funlist <- c("negative kurtosis", "positive kurtosis", "4th moment") p <- pmatch(fun, funlist) if (is.na(p)) stop("ica: invalid fun") funname <- funlist[p] if (p == 1) fun <- tanh else if (p == 2) fun <- function(x) {x - tanh(x)} else if (p == 3) fun <- function(x) {sign(x)*x^2} } else funname <- as.character(substitute(fun)) for (i in 1:epochs) for (j in 1:m) { x <- X[j,, drop=FALSE] y <- W%*%t(x) gy <- fun(y) W <- W + lrate*gy%*%(x-t(gy)%*%W) } colnames(W) <- NULL pr <- X%*%t(W) retval <- list(weights = W, projection = pr, epochs = epochs, fun = funname, lrate = lrate, initweights = Winit) class(retval) <- "ica" return(retval) } print.ica <- function(x, ...) { cat(x$epochs, "Trainingssteps with a learning rate of", x$lrate, "\n") cat("Function used:", x$fun,"\n\n") cat("Weightmatrix\n") print(x$weights, ...) } plot.ica <- function(x, ...) pairs(x$pr, ...) e1071/vignettes/0000755000175100001440000000000015120610017013026 5ustar hornikuserse1071/vignettes/svmdoc.Rnw0000644000175100001440000004447115120607775015044 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{hyperref, graphicx, color, alltt, doi} \usepackage{Sweave} \usepackage[round]{natbib} \definecolor{Red}{rgb}{0.7,0,0} \definecolor{Blue}{rgb}{0,0,0.8} \definecolor{hellgrau}{rgb}{0.55,0.55,0.55} \newcommand{\pkg}[1]{\texttt{#1}} \newenvironment{smallexample}{\begin{alltt}\small}{\end{alltt}} \begin{document} %\VignetteIndexEntry{Support Vector Machines---the Interface to libsvm in package e1071} %\VignetteDepends{e1071,randomForest,xtable} %\VignetteKeywords{classification, regression, machine learning, benchmarking, support vector machines} %\VignettePackage{e1071} \SweaveOpts{engine=R,eps=FALSE} \setkeys{Gin}{width=0.8\textwidth} \title{Support Vector Machines \footnote{A smaller version of this article appeared in R-News, Vol.1/3, 9.2001}\\ \large The Interface to \texttt{libsvm} in package \pkg{e1071}} \author{by David Meyer\\ FH Technikum Wien, Austria\\ \url{mailto:David.Meyer@R-Project.org} } \maketitle \sloppy ``Hype or Hallelujah?'' is the provocative title used by \cite{svm:bennett+campbell:2000} in an overview of Support Vector Machines (SVM). SVMs are currently a hot topic in the machine learning community, creating a similar enthusiasm at the moment as Artificial Neural Networks used to do before. Far from being a panacea, SVMs yet represent a powerful technique for general (nonlinear) classification, regression and outlier detection with an intuitive model representation. The package \pkg{e1071} offers an interface to the award-winning\footnote{The library won the IJCNN 2001 Challenge by solving two of three problems: the Generalization Ability Challenge (GAC) and the Text Decoding Challenge (TDC). For more information, see: \url{https://www.csie.ntu.edu.tw/~cjlin/papers/ijcnn.ps.gz}.} C++-implementation by Chih-Chung Chang and Chih-Jen Lin, \texttt{libsvm} (current version: 2.6), featuring: \begin{itemize} \item $C$- and $\nu$-classification \item one-class-classification (novelty detection) \item $\epsilon$- and $\nu$-regression \end{itemize} and includes: \begin{itemize} \item linear, polynomial, radial basis function, and sigmoidal kernels \item formula interface \item $k$-fold cross validation \end{itemize} For further implementation details on \texttt{libsvm}, see \cite{svm:chang+lin:2001}. \section*{Basic concept} SVMs were developed by \cite{svm:cortes+vapnik:1995} for binary classification. Their approach may be roughly sketched as follows: \begin{description} \item[Class separation:] basically, we are looking for the optimal separating hyperplane between the two classes by maximizing the \textit{margin} between the classes' closest points (see Figure \ref{fig:svm1})---the points lying on the boundaries are called \textit{support vectors}, and the middle of the margin is our optimal separating hyperplane; \item[Overlapping classes:] data points on the ``wrong'' side of the discriminant margin are weighted down to reduce their influence (\textit{``soft margin''}); \item[Nonlinearity:] when we cannot find a \textit{linear} separator, data points are projected into an (usually) higher-dimensional space where the data points effectively become linearly separable (this projection is realised via \textit{kernel techniques}); \item[Problem solution:] the whole task can be formulated as a quadratic optimization problem which can be solved by known techniques. \end{description} \noindent A program able to perform all these tasks is called a \textit{Support Vector Machine}. \begin{figure}[htbp] \begin{center} \includegraphics[width=8cm]{svm} \caption{Classification (linear separable case)} \label{fig:svm1} \end{center} \end{figure} Several extensions have been developed; the ones currently included in \texttt{libsvm} are: \begin{description} \item[$\nu$-classification:] this model allows for more control over the number of support vectors \cite[see][]{svm:scholkopf+smola+williamson:2000} by specifying an additional parameter $\nu$ which approximates the fraction of support vectors; \item[One-class-classification:] this model tries to find the support of a distribution and thus allows for outlier/novelty detection; \item[Multi-class classification:] basically, SVMs can only solve binary classification problems. To allow for multi-class classification, \texttt{libsvm} uses the \textit{one-against-one} technique by fitting all binary subclassifiers and finding the correct class by a voting mechanism; \item[$\epsilon$-regression:] here, the data points lie \textit{in between} the two borders of the margin which is maximized under suitable conditions to avoid outlier inclusion; \item[$\nu$-regression:] with analogue modifications of the regression model as in the classification case. \end{description} \section*{Usage in R} The R interface to \texttt{libsvm} in package \pkg{e1071}, \texttt{svm()}, was designed to be as intuitive as possible. Models are fitted and new data are predicted as usual, and both the vector/matrix and the formula interface are implemented. As expected for R's statistical functions, the engine tries to be smart about the mode to be chosen, using the dependent variable's type ($y$): if $y$ is a factor, the engine switches to classification mode, otherwise, it behaves as a regression machine; if $y$ is omitted, the engine assumes a novelty detection task. \section*{Examples} In the following two examples, we demonstrate the practical use of \texttt{svm()} along with a comparison to classification and regression forests as implemented in \texttt{randomForest()}. \subsection*{Classification} In this example, we use the glass data from the UCI Repository of Machine Learning Databases for classification \citep{svm:blake+merz:1998}, converted to R format by Friedrich Leisch in the late 1990s. The current version of the UC Irvine Machine Learning Repository Glass Identification data set is available from \doi{10.24432/C5WW2P}. The task is to predict the type of a glass on basis of its chemical analysis. We start by splitting the data into a train and test set: <<>>= library(e1071) library(randomForest) data(Glass, package="mlbench") ## split data into a train and test set index <- 1:nrow(Glass) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- Glass[testindex,] trainset <- Glass[-testindex,] @ Both for SVM and randomForest (via \texttt{randomForest()}), we fit the model and predict the test set values: <<>>= ## svm svm.model <- svm(Type ~ ., data = trainset, cost = 100, gamma = 1) svm.pred <- predict(svm.model, testset[,-10]) @ (The dependent variable, \texttt{Type}, has column number 10. \texttt{cost} is a general penalizing parameter for $C$-classification and \texttt{gamma} is the radial basis function-specific kernel parameter.) <<>>= ## randomForest rf.model <- randomForest(Type ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-10]) @ A cross-tabulation of the true versus the predicted values yields: <<>>= ## compute svm confusion matrix table(pred = svm.pred, true = testset[,10]) ## compute randomForest confusion matrix table(pred = rf.pred, true = testset[,10]) @ %% results table <>= library(xtable) rf.acc <- c() sv.acc <- c() rf.kap <- c() sv.kap <- c() reps <- 10 for (i in 1:reps) { ## split data into a train and test set index <- 1:nrow(Glass) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Glass[testindex,]) trainset <- na.omit(Glass[-testindex,]) ## svm svm.model <- svm(Type ~ ., data = trainset, cost = 8, gamma = 0.0625) svm.pred <- predict(svm.model, testset[,-10]) tab <- classAgreement(table(svm.pred, testset[,10])) sv.acc[i] <- tab$diag sv.kap[i] <- tab$kappa ## randomForest rf.model <- randomForest(Type ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-10]) tab <- classAgreement(table(rf.pred, testset[,10])) rf.acc[i] <- tab$diag rf.kap[i] <- tab$kappa } x <- rbind(summary(sv.acc), summary(rf.acc), summary(sv.kap), summary(rf.kap)) rownames <- c() tab <- cbind(rep(c("svm","randomForest"),2), round(x,2)) colnames(tab)[1] <- "method" rownames(tab) <- c("Accuracy","","Kappa"," ") xtable(tab, label = "tab:class", caption = "Performance of \\texttt{svm()} and\ \\texttt{randomForest()} for classification (10 replications)") @ \noindent Finally, we compare the performance of the two methods by computing the respective accuracy rates and the kappa indices (as computed by \texttt{classAgreement()} also contained in package \pkg{e1071}). In Table \ref{tab:class}, we summarize the results of \Sexpr{reps} replications---Support Vector Machines show worse results. \subsection*{Non-linear $\epsilon$-Regression} The regression capabilities of SVMs are demonstrated on the ozone data. Again, we split the data into a train and test set. <<>>= library(e1071) library(randomForest) data(Ozone, package="mlbench") ## split data into a train and test set index <- 1:nrow(Ozone) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Ozone[testindex,-3]) trainset <- na.omit(Ozone[-testindex,-3]) ## svm svm.model <- svm(V4 ~ ., data = trainset, cost = 1000, gamma = 0.0001) svm.pred <- predict(svm.model, testset[,-3]) sqrt(crossprod(svm.pred - testset[,3]) / N) ## random Forest rf.model <- randomForest(V4 ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-3]) sqrt(crossprod(rf.pred - testset[,3]) / N) @ <>= rf.res <- c() sv.res <- c() reps <- 10 for (i in 1:reps) { ## split data into a train and test set index <- 1:nrow(Ozone) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Ozone[testindex,-3]) trainset <- na.omit(Ozone[-testindex,-3]) ## svm svm.model <- svm(V4 ~ ., data = trainset, cost = 1000, gamma = 0.0001) svm.pred <- predict(svm.model, testset[,-3]) sv.res[i] <- sqrt(crossprod(svm.pred - testset[,3]) / N) ## randomForest rf.model <- randomForest(V4 ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-3]) rf.res[i] <- sqrt(crossprod(rf.pred - testset[,3]) / N) } xtable(rbind(svm = summary(sv.res), randomForest = summary(rf.res)), label = "tab:reg", caption = "Performance of \\texttt{svm()} and\ \\texttt{randomForest()} for regression (Root Mean Squared Error, 10 replications)") @ \noindent We compare the two methods by the root mean squared error (RMSE)---see Table \ref{tab:reg} for a summary of \Sexpr{reps} replications. In this case, \texttt{svm()} does a better job than \texttt{randomForest()}. \section*{Elements of the \texttt{svm} object} The function \texttt{svm()} returns an object of class ``\texttt{svm}'', which partly includes the following components: \begin{description} \item[\textbf{\texttt{SV}:}] matrix of support vectors found; \item[\textbf{\texttt{labels}:}] their labels in classification mode; \item[\textbf{\texttt{index}:}] index of the support vectors in the input data (could be used e.g., for their visualization as part of the data set). \end{description} If the cross-classification feature is enabled, the \texttt{svm} object will contain some additional information described below. \section*{Other main features} \begin{description} \item[Class Weighting:] if one wishes to weight the classes differently (e.g., in case of asymmetric class sizes to avoid possibly overproportional influence of bigger classes on the margin), weights may be specified in a vector with named components. In case of two classes A and B, we could use something like: \texttt{m <- svm(x, y, class.weights = c(A = 0.3, B = 0.7))} \item[Cross-classification:] to assess the quality of the training result, we can perform a $k$-fold cross-classification on the training data by setting the parameter \texttt{cross} to $k$ (default: 0). The \texttt{svm} object will then contain some additional values, depending on whether classification or regression is performed. Values for classification: \begin{description} \item[\texttt{accuracies}:] vector of accuracy values for each of the $k$ predictions \item[\texttt{tot.accuracy}:] total accuracy \end{description} Values for regression: \begin{description} \item[\texttt{MSE}:] vector of mean squared errors for each of the $k$ predictions \item[\texttt{tot.MSE}:] total mean squared error \item[\texttt{scorrcoef}:] Squared correlation coefficient (of the predicted and the true values of the dependent variable) \end{description} \end{description} \section*{Tips on practical use} \begin{itemize} \item Note that SVMs may be very sensitive to the proper choice of parameters, so allways check a range of parameter combinations, at least on a reasonable subset of your data. \item For classification tasks, you will most likely use $C$-classification with the RBF kernel (default), because of its good general performance and the few number of parameters (only two: $C$ and $\gamma$). The authors of \pkg{libsvm} suggest to try small and large values for $C$---like 1 to 1000---first, then to decide which are better for the data by cross validation, and finally to try several $\gamma$'s for the better $C$'s. \item However, better results are obtained by using a grid search over all parameters. For this, we recommend to use the \texttt{tune.svm()} function in \pkg{e1071}. \item Be careful with large datasets as training times may increase rather fast. \item Scaling of the data usually drastically improves the results. Therefore, \texttt{svm()} scales the data by default. \end{itemize} \section*{Model Formulations and Kernels} Dual representation of models implemented: \begin{itemize} \item $C$-classification:\\ \begin{eqnarray} \min_\alpha&&\frac{1}{2}\alpha^\top \mathbf{Q} \alpha-\mathbf{e}^\top\alpha \nonumber\\ \mbox{s.t.} &&0\le\alpha_i\le C,~i=1,\ldots,l,\\ &&\mathbf{y}^\top\alpha=0~, \nonumber \end{eqnarray} where $\mathbf{e}$ is the unity vector, $C$ is the upper bound, $\mathbf{Q}$ is an $l$ by $l$ positive semidefinite matrix, $Q_{ij} \equiv y_i y_j K(x_i, x_j)$, and $K(x_i, x_j) \equiv \phi(x_i)^\top\phi(x_j)$ is the kernel. \item $\nu$-classification:\\ \begin{eqnarray} \min_\alpha&&\frac{1}{2}\alpha^\top \mathbf{Q} \alpha \nonumber\\ \mbox{s.t.}&&0\le\alpha_i\le 1/l,~i=1,\ldots,l,\\ &&\mathbf{e}^\top \alpha \ge \nu, \nonumber\\ &&\mathbf{y}^\top\alpha=0~. \nonumber \end{eqnarray} where $\nu \in (0,1]$. \item one-class classification:\\ \begin{eqnarray} \min_\alpha&&\frac{1}{2}\alpha^\top \mathbf{Q} \alpha \nonumber\\ \mbox{s.t.} &&0\le\alpha_i\le 1/(\nu l),~i=1,\ldots,l,\\ &&\mathbf{e}^\top\alpha=1~,\nonumber \end{eqnarray} \item $\epsilon$-regression:\\ \begin{eqnarray} \min_{\alpha, \alpha^*}&&\frac{1}{2}(\alpha-\alpha^*)^\top \mathbf{Q} (\alpha-\alpha^*) + \nonumber\\ &&\epsilon\sum_{i=1}^{l}(\alpha_i+\alpha_i^*) + \sum_{i=1}^{l}y_i(\alpha_i-\alpha_i^*) \nonumber\\ \mbox{s.t.} &&0\le\alpha_i, \alpha_i^*\le C,~i=1,\ldots,l,\\ &&\sum_{i=1}^{l}(\alpha_i-\alpha_i^*)=0~.\nonumber \end{eqnarray} \item $\nu$-regression:\\ \begin{eqnarray} \min_{\alpha, \alpha^*}&&\frac{1}{2}(\alpha-\alpha^*)^\top \mathbf{Q} (\alpha-\alpha^*) + \mathbf{z}^\top(\alpha_i-\alpha_i^*) \nonumber\\ \mbox{s.t.} &&0\le\alpha_i, \alpha_i^*\le C,~i=1,\ldots,l,\\ &&\mathbf{e}^\top(\alpha-\alpha^*)=0\nonumber\\ &&\mathbf{e}^\top(\alpha+\alpha^*)=C\nu~.\nonumber \end{eqnarray} \end{itemize} \noindent Available kernels:\\ \\ \noindent \begin{table}[h] \centering \begin{tabular}{|l|l|l|} \hline kernel & formula & parameters \\ \hline \hline linear & $\bf u^\top v$& (none) \\ polynomial & $(\gamma \mathbf{u^\top v}+c_0)^d$ & $\gamma, d, c_0$\\ radial basis fct. & $\exp\{-\gamma|\mathbf{u-v}|^2\}$&$\gamma$\\ sigmoid & $\tanh\{\gamma \mathbf{u^\top v}+c_0\}$ &$\gamma, c_0$\\ \hline \end{tabular} \end{table} \section*{Conclusion} We hope that \texttt{svm} provides an easy-to-use interface to the world of SVMs, which nowadays have become a popular technique in flexible modelling. There are some drawbacks, though: SVMs scale rather badly with the data size due to the quadratic optimization algorithm and the kernel transformation. Furthermore, the correct choice of kernel parameters is crucial for obtaining good results, which practically means that an extensive search must be conducted on the parameter space before results can be trusted, and this often complicates the task (the authors of \texttt{libsvm} currently conduct some work on methods of efficient automatic parameter selection). Finally, the current implementation is optimized for the radial basis function kernel only, which clearly might be suboptimal for your data. \begin{thebibliography}{6} \bibitem[Bennett \& Campbell(2000)]{svm:bennett+campbell:2000} Bennett, K.~P. \& Campbell, C. (2000). \newblock Support vector machines: Hype or hallelujah? \newblock \emph{SIGKDD Explorations}, \textbf{2}(2). \newblock \url{http://www.acm.org/sigs/sigkdd/explorations/issue2-2/bennett.pdf}. \bibitem[Blake \& Merz(1998)]{svm:blake+merz:1998} Blake, C.L. \& Merz, C.J. (1998). \newblock UCI Repository of Machine Learning Databases. \newblock Irvine, CA: University of California, Irvine, Department of Information and Computer Science. \newblock Formerly available from \texttt{http://www.ics.uci.edu/~mlearn/MLRepository.html}. \bibitem[Chang \& Lin(2001)]{svm:chang+lin:2001} Chang, C.-C. \& Lin, C.-J. (2001). \newblock {LIBSVM}: a library for support vector machines. \newblock Software available at \url{https://www.csie.ntu.edu.tw/~cjlin/libsvm/}, detailed documentation (algorithms, formulae, \dots) can be found in \url{https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.ps.gz} \bibitem[Cortes \& Vapnik(1995)]{svm:cortes+vapnik:1995} Cortes, C. \& Vapnik, V. (1995). \newblock Support-vector network. \newblock \emph{Machine Learning}, \textbf{20}, 1--25. \bibitem[Sch\"olkopf et~al.(2000)Sch\"olkopf, Smola, Williamson, \& Bartlett]{svm:scholkopf+smola+williamson:2000} Sch\"olkopf, B., Smola, A., Williamson, R.~C., \& Bartlett, P. (2000). \newblock New support vector algorithms. \newblock \emph{Neural Computation}, \textbf{12}, 1207--1245. \bibitem[Vapnik(1998)]{svm:vapnik:1998} Vapnik, V. (1998). \newblock \emph{Statistical learning theory}. \newblock New York: Wiley. \end{thebibliography} \end{document} e1071/vignettes/svm.pdf0000755000175100001440000006265414173734135014365 0ustar hornikusers%PDF-1.2 %Çì¢ 4 0 obj <> stream xœ•½KvË’4ß¿¢†f@‘÷Ì5EBbÂÜ‚±elctvC7FñçÉç‰ëzÛ»ê¸Ôû«XoFÞâ–‘‘å»~üOÿûoÿüãŸþ(ßç«Þÿýóø£~ýÇ?ênó{Ÿ¯öÌú]Æ×ŸÒ¿{ýú›@&ó»Ûß­°¿çÈø~: Ï÷ééûBVý>Š¥íßå ¡ÑZßEÑ´ïÎßœï2R¿úÞå»(âò=hߥPŸï†žvÿÞ 9߈÷‰>]À¾m ÀTîßÏ·Œ­Þ±¡ãƒõ! } = S%¤|oBú÷$åÁ¸/dÚhËþî„,´&dÊŒÎþ–)–Ûœ€ó½Mý&Þç{>¬çùÞõBž¢ZÏþÞïS¿çÈÖåißu¤#ýIå®]xÀíë}!s d}?K G°œ»‘Ä2¾«ô}W@FóäîÉxAöùoÌ{}×çÕûݰ§¿F¸ë÷3òÖó½ökšë.äóZŠ5?—kuÙîXÒK…{äU_«€ÞccÖ<º›¶wk.ÝMÛß5Ç÷Z™Öl²‚N'ë2Ò‰”Ö,X” ¶57܈qýÝžD®kÜ¥š™¢×˜ öDõkÜ¥Þ™1Öè2#gž5š,¹3Øõ{µÌ„€”™ùt»™Oæe@^ì¾úóý´$X#ËŒ€P®ü Á‚œâ»ûÉ}ßéÎþÞ\[¯)à§û5Í~—¢¾–¢/_@]®>l8¶¤½ùré²÷»Èï­iâ-ï^»Ôòäý½p=/ ¸âíy^Dñö¼éJ³³_Äv¥Ù˜/‚¬Û†lD{åÙj/º®Ë”iÿʸ>_üq…Ü|±P­6+c³+õf{±â•zÏ|që…ôóâèr¾O{qý…èþ™d¸6_ÒãJÏg¿$Ì•T ¢Ò,aV‰½«PŒšØŒI˜h‰šøÅ0í fRÜ–Ô弯º«ßW¾{®R|‡]í8¸jrJqíåÔdúÍ Î5 ¥+I§[W¤NÛ¦kü];‹¸Êv6rµî¬fšß¹ÑçX7 œ«ÝÈpÎ73Äeƒ[*.?>¬™¿ýñﯕóÿüñO_튲;L˜Aøç0ƒhB?}Õr¡^ãg^mðàOÍóÊ¿Zîì1¢yå7~Ú'wx^:^÷Ï+jzú ià-/OWþ!XUZZŸ—›‰èÒÒŠ¯«ÜùFÛuidÞUžèӾ鈬¥WñÚ\òLÿöÇ¿þúÿ¨çnÑU­MJÐ?3Ä9 õG¶õLœÛ?Ls…$iƒRi¹t€é#ôr!KiënEEsýu@ewÇ›ë¸Ó+´As5x„4/ÄTå¹Ô Jj¦MÏ]€ÉFªo—àþ­úø€'.Ñ4ÓØ§Ñ ¹Sê§ÑÚ¸SüG¸ïBÌ88`(BÌ€8—é*«qÀú˜r(/.À,•ƒÑ¹5sDx]ˆY<§,˜PbVѹ[ߎ@¶Lª4n›ÛV§p Q£bm Dl´ýû4·ã6~]ƒÜM/äî3dóóêýÊïýᆌyšáä‰nh'/ƾæ½`{Y0_Ô}éê6Ö}_’:#m;TwVÞ½}U`]y‡÷U“ce*ØÐ¤+SÊ^wÝG"¦}òÓ¹íË&°F ÄŸD±{Š" ¢Þ—û΋ð÷µžycÏ.SŠÃÓl²èÎc€Œ–ùp_Öç¼UiOfçÍáÆaor¥C$0F©>–¿„„ô6ÄWŸè8€,”„Šä¼fÊÆV‚ë½R¡³l5C¯ÙЇî³] õh*Ôö6Ô¬m¨b£ÐÖFE¡ÑÒ*¥Laņõ`D†~X!Æn¨ÿ„)c<ææŽó¡›DΫ0›ž?»iå<6^rÁ-4—nŹ|ù¢r*AT–9f—wÞ»‰D KMŸ„KVŸ¨K__ —о`&ÄmI]Ìûª»&ðqmá»çÅwصŽSk&§W^NM¦Þœà\:QºŽtÂu=êÄmªÖéßµ±óˆklç#×êÎk¦øÝ6p–uûÁÙÚm g}³BL2¸âÂãÖé÷×fߺsx¾6Eòýë.)Iü¶Z v׆„Ç_wåïï®Ú Uaoí«æº™5ow®O«íbåðwìë\Š"ŽŽÃ¡èÇ[qvŽñN­Foüæ#a;%qú ÒÜÄΛ—ZÆ%Ä6.ù]‚ú3C–hh@*¨dTe‰à 8.i•G!ó US¤TþfB¨rµñ4Ѭ=H#UÇ€vÞU‰Ï;®ç’Ç…¨yH™„Laßy¹ë¾!2¹G.È»ÑѾó²fÇ!u JÝ ¹ ý «K[K—zä.¬@.¿@´Ž1Å:˜W. Pè“„,!wQòŸko—Ž™_ ¬{¾ˆRöx°L˜ÄUAµr—ra„WqZãŠÄ~0õ«‚¸îã@>m,â¹f oãê¤Æ®®‘R¡1ÙÒèRüÆÖ\ÅM]?.¬ÚÒ‰g˜¦¿¡E36\ íÈÝüä­óºjŒÜûU`sï<«äæóšÅU„½­<Ñk–ôÑób\Ûç“ î™–ÉóZøûéžýóæÜìrò^ó£”6ÿèQB— —@”Xðr€ÑÓ¸¼¹wO4‡ô¹YòýI¤‹ý¼ÇåêsÈYŸNk‰MÆzëœÄJøñÁÜÙ g…qä¸"y—™˜*”±ÑÙ31?û$€4vnB$A† çH£dKˆ‹® w^Ô”–)ò>&Qú÷yMóræìy)úåÌ9ór]n²Ô%í—3eÏmÙûÓu¹tgúeÌ'o^¿|ùÔ¼ÁýÊçV3ôCó*è¤_;éÔLJýŠñþdr»ìßV"É~yWldÛ!àG&í¾UÇ:ù÷ Ù›YÒ3õËÍçɬÕ¹³c¿Šœeûn:uck@„˜Œõ)+‹‡¾+œYI„BƒÈÅÌW1‘˜MìEï&c„*=c&`c¢&„c1LPÇ‚™0E5 oJÁ7LJo +ßdW@N®¤œX\‘9A™®s’suèdé*ÓI×ÕªQ·k^£WÎÎ"®À\É;«…`ì¶‚rl˜ÆÔarã'³D…C2]D~|Z7?[~uÁª‚±xàïºg±vÿ:r4—|õ®Õ«_î//—Ò<Ÿ´âä _ææ‚i«‹IÜÇĸäþ€}-qÝU8ÇíË¥†m®8ulØ_ëpë_ZØØ—;ÍI,¾Ëwµî®\"À³— Kt eL@»>·‹½îF—+ êVÈÀA«\I0!w˜•¿™8g ¤ƒÝ‹\r¼´â• óöy ±\6]“+¾!ÊV_ÊÁ”ˆgë‰è\ö ô €ÛzÍ#ކsO¼PåTᮃC1„X9êDÂòù ˜ŠK¾8·—3E“uu·@Tùbk–@ŽX"yÞòè‰è`Ýà /8ÄóÝĆãlyTÔ\5vbÏJ€\ý‡S]eß ¹Í¡+Êe°Š-<×$œ€FƹöGiüÉÒ™ãjª?9ÒèΊ«x°ê©"ø´ª)x¹6ûÎïU}ó!j•½ ¹Qõt²å'ˆÉ(‡¸sÌ.ë¼w—‡>B—™> «>Q—¼¾.}Á\‚û¢º”÷…wMà›ãÚÂ7Ð5Šo±k'×LN&®½œ”\Ã9¹™tŠt=éTkªÔ Ûµ­hdcÐÚÆD¡ÙÑBû3†…` V„1uXÆøÉÙìŸ6ÍÏv^ƒ”øÂ}:ì<\µ¯}ÉC¼\s­¨]ųÖ ¼üÆ_|¸éCÖ/½Pj+*ù1Ò×¾†øízÁïåK/øµµÁf¶{Vû!\û×:6ÅeãNsÒÜ&þƒºzßÿÌzmáx½»zÿ<²ó„ÜåcÄ‚¸Ê¹¬Ì˜1<ŽÅ_ Y˜ö÷¥xF,lÅ» r±°õ7T“£•K6ø…¯Ð`|ÂV¼¼ya|‚¢A¤×ý[ïšø1‹ ži8 +†kŽˆ õ’Ë6[!Ô£Œ" ™¸rG<9ÎoþB5n…œhíJ°5ÆFbƒh¿Ô&ŽŽ 1×:\ó••1îûKÀb·˜pús¼]m„Š‹‚Š9ÁC!˜ï¢Õ#¹b»ªzõBÌ#y>zBmÕÀ˜¢Ê…€Y7y½»|;%D¢¹ê³t8?AÈ©/HåòâC†O}×CW__= pIsÀa¦Ö<Ïzª®—­E•cpZ¯ºiA¦5­{êzÙºã4NÞ›*>´—æAi‹qP*O¦‚º8ÖD(×´£$Zª‹ê"Ñy³Ód]¼ºLt['ýoAÚuò'hÇU3{ÔÉËçÄBuÒLlHY™qåÌ[ëUb$&çg@ÖÈ<¶/ä ×06Á±ù„4½  ´E˜w½ÌáÃ[—^ž™g°.½(-é,ש¶X²k?`‹u5GtíéË'kŽÀ´½ò¶,sùÖ-ܤŸ¼½ë’K?™®'ÚÒÉdÉ)­e!FnkYø‚‘äZ¬`d»¦œ´ÛW_ä¿¦Ý ‹ "°>™–,\b5ÄêÂ+;®iñ ƲK62±5âgͬˆÄ8¨t@ã3²Dü&d~†ˆ° ˆÊ³@l"/ú6±ã3Ñs0ñó4kab:ÖËDy¬©‰ûXwS ±7ª5bûT¯Ä«æ "PÝtbê+hÉT\Л©Á IS•A·ªLƒ´MÝù›J1µldª=XÍÔ¿s£ÚÁ¯fBO›™|¦ˆÉ†°UL~|Ø3?Ûxƒw®ßkùà¢Ý¿m¥Ûù/DÞÑ1[à!›O£5@1÷¯ã_VéâÚg«…þŒ‹ék]ª‘¿à­Ó/ý{›Ë<Ž­£wíç½¶ð±—;ÍIm<žHÔŸâ:SåB\÷Š8ºcü¶x€i4*UûñféBÚ[û5˜¡ÕÝ`«íz•„{!‹5 ºéU/×.DBzÕ+9ŠYÞ ÔaF†ðÌ…X@'øï¾Ód”ñ—°Å¶6½pâ­6õr{TEz\H¨©Â “Þ(‚G&Þ,¨©Ž%“ç `0¼¦7 ƒª£1@£#ÀRÖæšüo·à©  ‰Cæ(.¤Ëç…ðøBˆ`‘_ÀÐØ‹ÚwÛ»tj½£ª y‹ÌåB¦IeDÙ>¿A$0>A*¥{Æ\7ƒsSñaW[žÂ=Íœùš%øa¿V¢Ð ˜ ²üµžB‡yÍ ½w±-å‘îØ98gÏ» ×áé™à:äSI‘ à $ø {¢´rÕ‡k´G!¯‘œ\á*ä Œ“t9‰êá;äM‰s|‡œ‘sO9´™‡Ñ›ød.„ÓvN…q­ÌÍ€ô“9ž>È'K…‚°Äš%G@šç/!ÇÂŽóÙâ–ˆÞ4Ù.ÙgžÖ}ç™zt¤-¢#kË †ðHŽ,*â#uÇeÝÛVÿ¥oM»רyû<*'ïp[zýd4Ñ‘F%m©Â)©ÁÁ³3µ!^rÎL‘mÛt¥Ú†Ø‡š A”óEü ÆðLü°Ê•9¨M7·•Ëô´jæÄˆ³4fmÓMráç6“úÊò€¬žÅ }fш˜*^~†ˆ˜Êe ³H»Ô¹ Ä4@•™i*WÓ~5"xÚí ç{·E\6¸½âòãæùÙÎëW|­¯ÍûhüU4 ~´CÙ#7¡ýŠÄÓÑwv=iwâË•ÔÛ[Á;2Œ§‰Í%½‰Á¹<9üË(¼éÕV¸‘žŽ–ãñÞô›°•P0úèÓÌÔÚ›—c.W¤Ø\ýg†˜G˜þH±¤@Ì×q!÷Ï¡—P÷Ïù=: :áQ’a®@øóñL¸¡ÑóxT­O¤û\û 6MWÑúWqì½uY×IçæèE`EtÀÆp§FZUDàñ, eÜ:_ê³¾¹z!]\*g8˜FÈC%dÙˆï–pâKرœÕ²ØQÄ[lBLà0& xWrršWÏrÈ[ †ñ`¯+R2Å33pÏY1#càòj D&>.½Oâ53žÂEßf«Œ«¨ !S÷n\ívÏŸ„èOºŽå¯Çã¸<=ßX7n‹^=ß]Úȃ»/õ5,ÈxM˸^ ±pM—j º=b5Çj²V¾â?ª5ï B”püŽCÓhywê´[¦„––ˆ1€«f:b à“i 1€±Ó#c±f¦YĶ–É\ðïé#È å`„ÖÌAˆ k3sãŸÌˆ€pæÎ¬ã22)ɶ3Ç3.íÉR‡Ìò—€fð޵Y¬½÷ÜÔ‘éƒkz¡€p˜¯IÖGMZ_ˆªñ1¶TuúXt5k÷ÅÓ¯æÇô])¦y}ç µ_»[ôþ)(à2~]/*)f,8!³0ŒØÎc–ŠäyÌÉaD{T7º>^«9íŸÇ¢Û?Σ—qÎBòbf6;ú`Œ¨=sëy4ŽÌ9úœÇI¹]³dd¼„ b+˜|ù"R*CD’%Ì"ìRç*ÓUf¦I¨\MÑëKa²9VËäw¬¨ÉøXuÓ±3¦+|óLÄþšÊ p­ätâšËiɵ›Ó›k@§IS’N¶®G´]×:ù»>vqíldjÝøÌ¿³¢ήb=8?»ya,ïˆK·R\r|X2?Ûxöú¢¿“ù È´@º„$ÒöåtØ_kK äݰ/¸%O|9û{G«§i#1>r«½qäóxÄ89¢.ÀcÙyoòMÆ!­l„‚ÑFŸff==]ÍCåòg†¸O­ò|tOîS»Ä#“W}|•ºòBŒ‚A8ó|¶¹ø+µgC:pœöš°ŠùÐ*Õiƒßòäârâ¼§ &~#åø¼Ž•Ë©óþ«V»5„¼ ·ÐÃóàªîn*’\¿î ôøwù17H°)~^„ø‘GË îw=>ˉ8¯3÷+DÎå«i (Ïî4_Mƒ£j‘Õ Y~𝰠.D/òà7Xò÷ór6¬feÌC±ZrkpÍÛóá‰Dd!ž] ÂAe/ÌOÁ – i<2fÌR"õ¾(~Òqâ|^sX¼=Ìó\Rµ"VÇÝ÷bÍ-; ŠƒôóZô)^‚´18>¿ö'ì×ö"RLAe,Y@'Ä—à(¤„”ggr+£ÉŽ;I"r‰ Û2J¦ëÒÅÉ´Hi™? ‚oŸÌB€pÍËW3vfÄÒéß ^©¡ÁÏ€”™y;{g¹"ÒBvdùhþ2Ô_â˜O3žõ~ФõÄ·:ù$ö2—§Ís[ò·-żòÑP–k.w(ê’ÎåE]ö¹¦. ¶e®¡þYݶ¹Ìtµ­ËR5mûç´”O#‘9ݽ¨d4§ª6£´9‡žŒçl6Z%Ø9MaQÏ©qêNøs˜š5昰P{f ©åœÅæ•úL• çp/¥²* :såæ9ÌeoˆîœJ@Ô¹©’cõâ›pù "**ÅV•t©g†it*0Ó D¦¦IªÔM ¡’9ÖJ…wZNðiÉU Ķ˜žˆ­3]Ûkú&HÀt’S‰©­ $SmAl¦þ‚ MEÑšuº6Mëd¯º8ÃÔu0iô`0×ú΄n8£ºõà¼ì†ó»[!.ÂRîr~Y3¿øò_w òàË»ØÖô…-¨"ØÌôäqú/ô;ŽG_OUÿœ|©r!¤­šxЀmø;öô4ñþaÛü˃ )Þ†…Qcì¼/ù&£V6>bô±§YYd¨ :bò6þÏ ƒ6%=Ÿ…ªA˹Ü6UØ*º“€Àq‹ˆ›’ø:N#7YY§ÁÕà€Ëp £Ž“FŒ#–ö‚\m0š¿!^@Ò·‘pM[ÂDmHþ6*;itrÝ R÷±ˆ·!%PÙ©*DÊ ÐxÖh$D7„4 ;@LAC]§¥LdGY'뀀íñâæFŒÁÒC)!ÊO–(¡m=R§"éÃÅéQzÆ=!¦„ã£Æ_41óŸ#΃ÖÐ㱌U* TA„DcnL ò§p%*Ðnx¦êî&q]¿@8ß„•â"5—Rϸ\ytU"½Ò ’ƒÛ­˜e•°‹´õi«U:ñbAë9kŽ "Z»¾/4Â}aì]•D¶´½XZO¦5=O&¤AÑw:ÒP¨DkUb[=Ö=Ä@wšEU}2]W‰šI´Ð,¡ÎUBm! ‹ ï|† /Üz/jXâ׺ô â< H™ïí†$dƒÝ«„üH³ý%d<½¨˜‡‡{ZïC3c„Ãc0mÃ#.m¦—“K{­Fߟ+Ö­ˆŽ¯j÷3[ùîÑ”¶;~^ñ´£‡o²y$œ4<ÕÅŽNKqb4z‹S¥ÑdœûZ¹÷õt1ïkzȺ( ß8S'¾·¦p|ûM%9…¸Ö2"rŦñצùŒ]9™ºþtJvëÔîzØ9Âuµsësç,×ùÎ}n8‡ºíà\ìö…sºÛ . ÜNq‰ñaËü•·hwZx£Á:ÓÄydÐ|Á‡(zȨ}zù’ÛÓkŸiž„~UoVÙjˆÿŒø†d@hO’^ÏØGÿ‚Œ•hƒ@€À‡»‰èK¾É(¤•O0ÚØÓ¬Ô²\•ÇåÄ¢k=GàˆÒé3TÊ#yV•X$ö¿°š€RwÛõW`x—©Ý|†D,Y€Ò*±d÷gði”W˜| X³H¨U4Éb¥V‘S SÁÇñ ʃc爬ù¬²5Yæ&Eu!V²çÀ¯ sã6a]ô?†ƒ=ÄŒ§”•ÃÐx’99T §F8Z4D£ç€7šVމãÁêÉqs8|1xÍcëp@“áü±h»€ˆ‹"a^nç½ÃpX¯N‹·³I\»aÕ×<çÐà?_ \z½×ëJY/_Óq$:1Öý²‹%øÖ Þ2å݃Ày^Œ;ÂçE¨«ñ¼ˆ¤ö42‚´›‰Ð&rÓF"E8ÞY¾Ã©uBŠ>™¢'’Þf&z@jæ xì9%ç@¸æÎ_žÑ™ypÂ3z2ŸÂ@Xge@/æì®×"I$èeJi>ž¿„xPcöÀï݃sl„Àc³ˆ ›©ÇÙbD¨-X„éšFÄ‘-{D%ÙÖDä’m_D7ÙG”ADID$•ÑRD[½ED–ÑdDmÝFd—‘vDùG„˜±ˆ‘Eœ™qZÄ¢7F¸š1l„´SGØ›1~„Æ™pˆè9 agBæGˆ « ‰Ø=ŬB/õ®‚1Pegš…Ê×4S•Ái5TN§SY‹ªâ>­»j„´7¢4Òö1¤,öW”N"QKA%¦¸‚’L·µ™ú Š4T«Z4Ûm¿)ã`SØÁC¦ÔƒÏLñ;+ªiÌjÖC0´[Îôn…¸`pKŅLJ5óËm-R¸<"ïòˆG´ËªG»/ƒ›^ùîÄž“VY|s'=I4Þ¼qzð˜®h÷hà£WÐû’o2 ¶òñ£=ÍJl¼VîQEµŠh¼Q‹ž„9L+ÙppC¬Bx³CÑfÕ<à aQhÜKŸÇ¸îǵ?5N㉚nÉ…°9¯ðI çÈóEÙ_1‹°SŠÅÌ k í­†fSF“¦άˆµŒ9ôõS"W…l&lÍ®ŽÞ†ã1âK°·]¼S @Þ2œ+p\õ7ŒË½Öh8¬sZC£5NóÕâ ~úàŠ`Éò\Nâµ(d£k³0ÖèØ_”âMÐÀQS¢3_,ò_§ ¾†èY®» ¾Æ˜ÛG º}“·DÔ²(b7~ð6*ª¬^`Å‘«½zÆqq¾F7x¿–gÍöš$N€íµÈ«z­Rºæk9Y@íµä¬ÌöÚ–Î[™¼uAôy{]&K›ãd*¹G,ä&Bj²™Øq&HœËÛ‹hÁüçEØ•¦K&~~dþ¨¼ÔÊU¢/ù&£V6>ÁhcO³²100 –Û4Ûbä?Ô“É`,ó©Œé‚(9yiq'9¤cH™#K¤l蘕t½$¸“7R?ž ŒYü•a/ ¤—² úÀˆ²äZ Ó¹é*:wåì1ì6_™Ÿ-=˯,î2}ÿAåÌÏ‘W ¢2-aV¹—zјƧÂ3æ`6æiB8ÖÂu¬—ÊòXR÷±ì¦bkLmÄö™j‰-6õd`**HÅÔX“jº 8S†A•¦0ƒrM©u«Þ 0ÕLbê;ÉT|0›ZÎŽn&8Ǻ)á\íæ†s¾Y$.Š1òa×üö:¬*,|ð:­ª£ñ‰‹÷ ,KÈ¿:~¹åz|±ô ŠûÕøvF´:-§ÕæÕÞàü%/[Ìø6ßáíà@6ŒRÙú’/6iecŒ6þ47õñái$n 6_‚H &߃áßvøo¢pRÁÅAÇuj2>4¿7«[Uƒ°Ñ’ 8>`3ÙHo‡ªÁ}ŽCšøÐ1¿xHŸj mì´ÖD‘5æb+D<¾ˆÑi:bfòÔö¸ßDPë9.jÚ̵ÉóICøú„ñ}_ vO>w6Éy©Õ8SGfuÌõuäÒ 18=Öx°3wã½ ä7TÅ—êçB¨ÊzôZ‘ÃØÄ<ÕÙ´%\€ãŽC„¥Ô8«ÉÁ•=‚Êá–=‡¿@#U2¤É &Ìò€iê}Jhn¡ãÓ$äìŸç)þ¼³¼kHD}Zбt±|ч„óǾð%ƒ×ÖBÁ–¶·KùDâ(Éd"ÿ2)!§½È­÷9Š‹&‘, CÌLÖpìÀ Ò‡ógì̵ÉÕKpºŒ—Á­„ÀžàD@8qgÖÚ†’34 3ó<Ü8— ð`á}—pŸà÷âî^Bí»»KІ×ýDoSè~ê·ivs ØJ4 ÌŒÕjË×OW´ Ûp[õÖlµlgšÞ¤ÅîUÓíEÒáyíîLS©Ý¼ÀFGµ™Éh­ê)豄‹\i¶„ÿ[ɺló¢éw{èãKÊ>E:Á_ÅLƃE?Á¦¥Ú¤•K522v/î 7‘PÔ ±QÜße¢åGˆŠ¨€˜ Ì&ê¢w‡1B“˜1 “ª1S“¼%»Ûd<¾b&Á}EMÈÇ¢›ˆ1]›gú$6ØtNé¥ Ó]ALªÞœÞ\:Mº–t¢uMê„mÊÖißõ±ó‡ëlç!×ëÎg¦úÝ8pfuÂÚ gz³C\,„¥b¢ãÚùͯ‡Émø°é×Ã}éòŒÝ–ÛÐ1Ýcw9eø{ R/"Žø¶ÄúÒvúê…âÜZñ]ú£O¾9TIño—0G´ãóºŽSª2[òÍÆ"ílœ‚Óææ§/c\Ë’OßYÐ^¬¦?r^b:€tÆ+X |„Çâ|Š>kŽÙCþúÃh&‹„ÏYH’ÞY¹Ì¢Ž`èvÆYdÒ¹,4¥ê˜2Þ!QsÕ±spqñ¤ ¨s€ÃP/“:8IuP¿ Å´-Ö Õµ«Z8–òö-”ÿnQÓEÃùVŒE~ÁŸ-‡¡8ùi9~Ì ˜{ŒÊžŸ™ëèÁZ?|¯ÈbÕøVÊ«J¬¯– é¡ û¹PÝAìüΕôžn);E×<´ÜrLŽS‡µ‚Ü ¿jëñXVÆ|9°[$ŸôŽê»å"z÷œ4 œa[ŽÛÃ Š§çRz¨´fŽÛÃó¯Ò„8dŸWùB”ð¯'U8D•ÿ ›w·~ Äö÷RLjќ1:¹4¶XüÏHÉž!pb»"øLŒñ f™bñT&rªFËÊ%$éÆ`Ì«sâe΋ƒàü`„«3YAÝŒ•ùPž^N½?]ÒÆ¸Ñ;9F€Â&— Wæïó’±7 ~€XÜM`¶ØïÜÂw||â£3𠟣… ù2x$‘/•GùrzD’/¹G-ù¶xd“o?ùîz€”€Q9x •Ó‘c9­y¼–Ó£‡t9ÍzÌ—“µ‡…9é{è˜q‡—9ÿxü™ó˜Ç¨9z›³ªÇº9;{<œ³¼Ç̹Xð¸:{çâå'ˆ‰©QQ˜MÜEï&c„&5c&Yc¦&}c5LBÇŠ™÷E5AËnº ¶FÕEìži”ØaÓ:A¦™‚RLy5™~ Š3Tij2(×TiP·©ÛàSÉÁ%ªµƒ‘T¯¯™êwv4ë 8V ˆ`i·1œíÝqÉ`¦Š ·1óÛ‹OŠÙ»bËãÞ€/bö0‹ÙÛ³§_4ÆNZYüðYdžô$1{º©¶^-)f’=bö:óœ½^Ÿ|“J~lå5þˆÑÇžfõμMôâïCHÖKª•¥™1©ž–fϤš[ša“êriNªÝ¥™:©¾—fó¤`šñ“Ê„iVP*%¦™C©Ü˜&¥’dš”Ê–i‚R*m¦9L©ü™¤9E4-æ%Ô$S*UY“\ªTˆM³­R±6ÍÈJݬ†y}ÓÌ®¨ §É_©tœ&ˆ¥òršC–JÐižY*S§¹h©”’ÕR­;Íf‹rxšðö#€R"æ÷|!•ܻܫä祑i _½¦ù¥j*`ZMŒ…Ҍ´–šu˜Ö[ÓžhòbÚ7MpL[«Ii÷5Q2èCS)ÙóAešŽ™(Q36µjVg¢hÍüLT¯Ù¡‰34ƒ4qf™&ÓLÔÄ…öôBpªf´&nÖ¬×Äñ𛤂fÏ&É/áðׯÉ㘽˜Ž÷î ¬6@¯ºå“ð²\>Q/Ýå‹aÕ½|½¼þ—¯iTa²u÷:b¾7QÍÉöÏë‘ùGU(£/kæ´â¥Ïœœ¼<š‘\”P3²Œ2kFº^‰Í¨;еDA7ã’(úfŒ…áŒÙ¼vœñcT—3ž tÆ×Q¥Îx?*Ù©xˆZw&@¢žÉ˜!*©2d¼Ðª¬K=«tÓÀyÕÍçg71<]ج— n©¸äø°fþžÜ[«¢‡\Y«B'Y´V¡N2l¥väÞZU;ý¢ï´•VÃ#>­“§=qÔ’)ë_˜Eëm˜_ëø˜{ë}É7…´²ñ F{šÕ;÷¶öJ)úg†X(f½Tø%Fν­ð]ìœ{[áñ@I°©ð’”œ{[á[i9÷¶Â!Órîm…_§ir%÷nšVS‚.ÄA²†.à±¼_I,ª}¨Þ×Ü£ д(MOºµJ4ƒé4KN÷¯£å×$ê<{‚¬O/ ɪ ðœPI³ºK$•L¬ g¡ÉZ2-éRº.dYÞ£$}]ˆE2hbØ…hܪ%"h$¿ì¨”´¿¢r ˆR *1¥”dz-¨ÍT_P¤iÇ ZU AئcƒøMƒ¸®vr}î|æ:ßyÑÌgW·œ¥Ýºp¶w ÄECX)&>>,™¿'ï±”–w‹(ÊÈ»EtdäÝ®’äÝ2‘'¾À­ðèø—äÝŽ}RÞí8å˲dý 3h½ skón½/ù&£V6>ÁhcO³²W0†…þð”óg‚lbH¶4ήÏJò("“a$š×‹x4ÊóÕµ˜"TÇw˜jå<ª‹â’5Çø¦3OÉCÊ%xü==Q¤ ñòþñšÈ@O‹O-«€¤ŸýªÅ€hÓZÓ¡X™"-ßÍpU7]²Æ‹•zCÀ~!Äë5Ü3"Ë*×õÈÅSÙö¼Æ’Ò¸n’¥A½%¸­-{±¢Ê6¼ÖÕj=5ÈèÀ@[½¾DÑÂÕÞyœ|1¯áð«ŠîžY ÊE~E>³ð«…Ö3ÿü™ò\M‚tY½@¬U¼oÆÈ>yxˆ£e1ŸbmûH³D4.ß­ò…@þØ{­ÀA¼òõ{Iù[r$¯ñÁ)ß•¯àkçP\^»‹ ãž qɼüpAJ®³‚ŽôÑDkxî;Ó#/ÀåGÐ,§Q *È/¬'“>¢­ÏHÜ€ì%/dqÕƒÉiOæC}n!ñ* 5³3äÉÙ™áþd¡Ê,„àpÈòg~€X°»c¶Èè}ùû6@\Uõ×$æ1b²‰Zìa,Æ 6›G×tÚ»¢¾îñ½V$Ì÷oXŠŠïñ°rNxÁ}¾h¥û# JOÇ›#“œÇ&9Y"¬ïE¹hjO¦n$ßÖ ª´Ì$(¬;3b§Ä¼7fC`¢>&¢üèWγ€èÔ•¯ò¨ô¤¼HËÒ‘\OËÄc»\Êü ihYež÷mR1†g’3¦`Â5¦©ò7VÂDt¬–‰ñXQ“ô±ê¦ bgLcÄî™Vñ 6½4`º)èÄõ—Ó’ë8§7׃N“¦*l]›:i»Æuòw¥ì,âŠÛÙÈt»sš«çF7œc͈p¦v;ÃßmaÁ˜ù°i~{9 ˆqƒw‹ùv÷¯ª/[lÆ¿iÕDdÜ¿¤òÝä µÈüZñ (ÑnKâÜZ?Eú;Å_Áhñ­ó~ØÚÁ8éðþä›EÚÙ8§Í!ÍOm¾ç*+<܃à•Ì“€T«f θ4:€™žíê™KÝɈ9‘³«4ÃAÞÌ^¹æ½´äæ<â”nv ÿ5ÎÑG‹FÓÃÍghšçáX"Ÿõ¢ÑS7Ç5oñq‘Ñí|‡;ÞRA„‰°÷‡xìDÀM˜Í&Âý0«]„x†NˆibÄ<,BÔ?„@ NÜë¬#˜o0/«ŽšFˆ‰e`8^Wqò0ã±8Õ¯«Žø«Ñ¤b\_³WZG wozêÒå ¾|4Õr9(€ß ©È9šÌ´å äGÈ”·¤ËÍJÂ\äÆÃ;‡GìyÏA¾¦0†,WL󮛯¥r]–KòÐò’ÞÝï¯UGàkc:K®çÍër-ûÛäµìDmËò™4¹~ JBÜòyQ[ë2Þ ÈÆBò™j›ÞüeÃov^ÔêýÅ!”8‰‡ªÞ1›á†á¼8±N™wpk¥È]õò*˜’î¼Cå»ë!; P¬¬é_¶eìÖHÁ·ž‹•TõÑ»X¶ìÇJ³Ú,·Ä­§•@‹fúÉZí¨+ Ë¹½ò€®8ÒTÅób»²i`Û¹}ì¸`»»O÷´M¡€çÜG"’}ÌómtÄPë–i ù¸/jDüµåV Å"­·ïDÔÈüm™ì÷–ü>g ¨“63ó [§¤ ¶%ß!1!2“wbSü]_œŒpòé\ðµ=KRÂ^/‰Á-K•Ÿ!"D%˜¡U —úU)˜Æ¦’2_¥iÌPnZÊi¡Tp§ÅTáë­ò?íˆêˆ´iªFÒÆªª‰½Wm”ÈC5V"!Õj‰ÊTó%JT혨U5h¢hÕ²‰êUc¨®N¼#ê"æ?æ{º]"åÕ~ôðû°%NËÃÌïüL¯GfÔÿ±WZåXSçy½§=õ*GsÔª~|çû@Å_E~Šk”aZrÂäqQyU¤1—¯/G áÞ’ëÃ%ƒ"ê,›MÞ×ñ²pËx~€\ëI*ädJ=œx\µKõ›è\ž/ÍDU¯ç5 Ûk¢8nŽ×bàZm¾ …\ökQQ“åy-<ò-ŸØ¡_;Ǭù¼¹­½vB¾¤©Óà4tíVJp*»¦txmVpJEaý-£öþ~Ñ{¥V˨7úL·žæÂÛ1,®çï®* ð. ÝçKU< ¸ôÎ_*àÒÛÞéò* ¶`Iïxpy qi7Ãy°htá”' ayk(Äg —•'³Jæ‡J»§µ¦Ù” ˆ6wñw8øÐ3!êC$Ï¡+zðÈÍ@¼6ˆ—GYnûãÙAˆÆ˜]!Àg5·ûâ%bäH‘‚Ú oÌpé¯ÞL³^#įÏk[õŒ™.{!ÑWcI}‡X0TZyM¹k;¯;_{Üyos4_û‡¸$yÿÔ¶TŸLˆŠà€ i^:ÙSAk(Í;=¢ôy½eIxY£Â&Ò4ºÛìÌðòËË¥~Ÿ5¤\|p ¬¿è\ho08£@³ÚYŸ öƒÝí–$D‚Ý­¸Ôp€×@ü bµÀ1ʤœWçͱ6«Mæs¨VÐÌçéµ }-ª=‰éëU›¯ ,i-¶á¶èű}ñ£Šï:|Ý#á4ॠNâPa¤§ #·8XIÆÉÂÈÖOFÙP|åEýбzW¦ˆhc"ho½TFóó‡ñb”@4~…Y 3Wždd¶S‹‰XöDˆ@4œMEÌÏU"â,c‘—zW±TÉ™æ Ò5ÍS%°/…ÉèX-“ã±¢&ëcÕMÄΨÊð½S»«Z'€J)(Ä´VP‘k6§4×~N®!bM‡:Q»–uÂwMìÌáÚÚùÇ5ºó˜k}çC³ œUÝtpvvóÂYÞM a·˜èø°d~‹Ñã;8ñÂÒÃýëÔI¼Ý¦ÅV5fÙµG.G¦W¾[ñí9z˺ù®xÑçS$BúCÒ‹EçÍø†Äæh·Ûqn¾¬aýÉ7 Ûù8‰ÓçæçôXJ¡îÁÍù3C¬Â? ô*[eHBà'Å•°Ô&jrWSQBFjßጌ¿uo^E§í´jd€Ð±;/¾Ã€îºWõúxí/¤[õɵýµÂívxéoEgº9ÜoÅ«•µ$¾îu+TåÁ¼éZ†(œéVÐÛ|錶ªOò¥3ÚŠ—æKg´§t4„ƒÑVôŸf/ouœw++‡Í¤K~XA5Vr·3ÚêÞ#ÀÛJG2ØŠ'èY²äÇ‹‘Ýé–G ú“þ"@«½áµêñ„7"/QOˆ¿Ô^}ßm&ÑÄø&Ÿ±Îs€r˳DäV{-xê½X¸á¯;k~Ù²î×¶Œ)›[7ºlfl/½/ ¸vW/h¤Kø{"#}‹ HM˧%rÔk‰dÎÃ'k„üÔ™)aA=ñ‡8)g­ÿ–X ñG¼Ls6´G‚UÇÄÛ@cf-5—ø\†L@x®WCn$Jþý5¤YI|Ç\üÝí½ûË 6@YÈ<‰Þ”l¢]u^,Æýï|^ |5/i›¶å¶êÍkþÙΘË(v¯>V-Ív¸j¼hPA]öþŠQJõ2€FMUoقઆ5QV+ ht[4þ>h»¯¼§ä_4„-X¤hÀh°QÑ"HÁjÅ_4Pf,Z7ø~óùâéÒŒšŒï‹½”`’¡hBHRí“0?ALR%ˆ ³ÀlÏ;7™ã3¹s0Ñó4ñKa":–ËÄx,©IúXvÓ±5¦1bûL«ø«Ú PÅäd⺫F:ÕoNn®$]O:Ùš*uÊvmëÔï Ù9Ä•¶3‘+vg4WþÎŒf8¿ºá<íV†ó}X"&ÂZ1ùñaÑüVEj–û`=¼«T­››ûÿ˜k[$>Ñoó fõŠo­jõ=¶kVa8›xå´¿^ü]Œß¥çíÁç8qŠþä›EÚÙ8§Í!ÍO-¾5y2½Ò‡Žà?3D }BÑRñÕ­îÀ×Í™ÐöØ+O‹Eþ.Àž™Á ò!dké[@Ù Sx²sèˆ.¸ùL±§«pÉ…QìÁ+ÆS+ÅÞ‚YS9noä­T’†ø^yÏ$#w5[! ;åÕÌPìå ˜ZÚq 켘-Œûz<S·3C ·2bÔ L5'Ô4”ƒoAó… f"ïE#>·R‡…µ9ÜffÈš·M0·-ÏÒ4M4åëÕ“˜í‰¶fÈ£­ôeVÆ«Ùò×U_jšLÀ§¹e,?Aš¼å” å{¾#W¢çŽqÔ]¯±!ú¼Æ_ª¼aåsD 𬕭³ÔF^«yIŽñøzÎK•²V¶æSê6¥}AÆ\yòÞM–ŒJ»Ë¼»'ï?róø2‘“¼| Ê©}È Jƒ7ƒCvjœ[Œ‘ Xø@¸5NÕsó )þdÕ­Ìsë3[Æ?ð¶0Á×Y ÇE&(;œ_gÕÉŠ_™›i#s4ÇÕÙÑÒÇc:cÆÕ}Ø[Æø€Hùr• –sîÒ€Õ²„ù"’*CžŒU%]êW¥a›JÌ4~•ª1E¼iT8ÇJ™Õ4!¯ËmZ öCEl™j’ØUÕ5¾ïªŒ‚2L_õ˜N 3½Thº1(UÕg³iØ xÓÂΦ§ƒm\—;k¹¾wö3“À9Ô­çb·,œÓÃú0iŠIŒ+æg+ïÁÅýÂT`å=u1ïU¢ÜPNYµ÷f܉Žc9š/¸|W|{$ªNÛ=q'8+Ä_²?¸V‰y4ñ ¯¡D»oŸãdŠ‹÷'ßl,ÒÎÆIœ>‡4?±òÊ×ø£¢6ö¥ìþH8öŸ2è³îÇ6Aû+Æ: Pʹƒ¹Û úÀƒµ¶ªX1¿Wšþ)Õ˜îÞôýØ#YHÛ»¦@‡*n^Ö{¿Ú´Ÿ¼Ñõ=T‚Ò:-g¢nŒŠ¼TUÕaßäÄr)o}k!6&vÔ’Ö"Io,‚ù’÷È%aR,0?,®ÔW·Wß›òöU5H£á˜Ž¾–]—} !j¶½B£3ažkÝÇÛá–GBP2÷bž–„ÕäJ¢Oyßèoÿ‹`žŒûíS*3nn;DYIÜUGÃm‡°Izgqã;Fxó%œîq#üé®hGÀÇ&n„ð`nýñŒ=œ¹î<ºWÏë$»û1¼DÇÁ-É’‚hr±Lrß»×tÒÈšŽ2GÃ÷*Y‡4ž¡“ð7f… øAa¾‘f™õÖ+Ýr?ôæwÓ±yš©öF"ˆ'o/:ŽÜG::žùÔ+ê+zÐ^ŸÈ»KÚé7ð‹¼„‡¸¡KÉ\ë%…QzY~Cïîl™E¤%%:êùÛo2Á,:¥ø˜ˆy³²A{Žß !l¸^ȶÌc½„é¥YZÈ`~øøg{d·Xîíñ4è-/4k±¤Í(©†ú}Z¶!!!Vp)rRõ.ébðŸ" êØq¢òÉÛuŒ…·öoˆÒ×ß¼ñnD¡5¾ãÅêšDÑöðÏôE¯Ý-qnW¼'o°>"÷BCÌšÌÐàR©‚î6Cµ+}µJ_Ž‚BíšuÂ肆…Ò ý7Á ùu‡+Bòçª<…ë˪Aï ¦O±‹è¬Yªø 1?ŒûfTÁ m ¤ä~ríƒR#_J¢¢ï‡%†¿Ñf¤„ýóV 3·Ä³I$Øe€ÛÍj‘¦‚°ÚWzÆëp·‚Jì(LþPè÷’KMX\b?TZz^t¥24²o{˜æ >û¥åÊñ.<$\*7|²y1ïe•[™’kŠšµr,ý@CÔâDË÷5>^N‰÷q±,^éŒF0o¹³†KV³‡ŽÏ«÷ÚËýG+;ÎÅË ^u~¾Ðuk œkEl4 ž¼ i¥Ù Â,xç¶7³ÐDãÉ>qÞMò¾$@/åÅ‹ò¿,ãiA¦©dW|°|ªÀ?°3*Ú·ãuÿ€yJEA”¿çJL–ù¸uL±ÔKÔO4‚zSòͺBÔ8WÓs)^Ê~Çóädž»K’~Zµ¦Õ'â…‹ŽËÉ ÀËwtébæîÁ½+}{¾ DûðJcëÓLXóbè.Ånç2#óÂᇈ`â¶ç2¡Â£Í]/XÑâ²ý@CÔ“Åà¿Á7D$OÚëÅÞ=òf»!‚/qW3­¢ö'Á É5äqõÅšÖˆ¸ÀÔ£©§û%³ð¡z†x˜s} !êÑI¯’¢âǪÓÌåXTû <tØ,;ÿ¼Ñê%—1Z¡ ¨=q–&…ÈjàðúH< ²¤˜OûÄCÜpÿ³Ӧȹ¸w¤tT³S¼û(«T܉ä.cšöŒ÷A=Y,Ž62—´·n—䚯µ «jºgƒ_'tTŽoü¢> V¸Jœ‹/;â0]–ѼøP˸êXøhÊ%o9 ¾±â)¹š¥SàóaU÷µ]‹#´õ»†<˜Q¡†äjø Q£2éý=НMG}Î;ćoHpö€ELERc:Þóõð…%¿yáÜ“'”âôÔ-ÏqÕª(í&Ÿ›DÚFD]2W¨èà~J¿¯|hôÛ_¢ã““¸ iCꫦàánfÊQõ¬êà@züÀܽ æàV o\7äÜøZð³¯ðn_p@t#ÍÆƒ¢¢HÚ]„¼‘bȰþµp\¥²íxØì t¸Ï²ãÉ«‹a²Œµ`½8®;p74yã!n<†—ȯè¡Ùùb½œÉ)«ã¼{g¸öU‚²¯·³ŽSÀ#¿Éh³D™#‚ÿÚµ@ÝP{ðÆÑšVKÇq÷ŽdàAA×q‰y»åçSföÆCÜMb;ã$¸"xÂg¼—&+‰ó.ÎÌ%¼Í_Ð,ÏäŸhõaœB'ñrEðj eQgCš!ðþ—²äi´J‹ ò†¨!¼øNÂÏ&æµ45ùŠ0.Üpü8ÓŒë]ªª±<ÅóÁ|äQ´ôîÑS.ùhþ¬!¨¡ *Ê¿)¤ˆ÷w†œÇ±Pk®ŒÆjµ°}« <]&GO°±{ë1ãÛ·Q³Ú71"w[­"§Œk80:“ ÎRi ŽòXàásxF¸±=üF#˜'}Ï÷D]ä”q =FaŽÒ«Úu®—x¯i²T„–N«zT;”îßxÄŠÄ…/\;+Ïÿé‘ GÍQšˆÞ'¢À€øsÌüÀ#¸+ÒŸªUßx‰Oò¡—FlžÑ=ûkvÔ†Ö¨¡1$ ß'쨃 ç Ö²Þp5é‚×8¥óE1¹W‡£¯iøo^x÷`yXz‰ŠàÞ¬ÊÞqû®Ác“vuG•d½8‡ ƒ³I.œß8ïCƒ¸ã ~ˆO5݉ѹ,ªTȦ£Ix§çï¥IŸXÄB…Ç¢£g øe&|FãQª¢GžŽ®KiëÎëéý…F0à ¹ÄATÅöm4«;+F ê‡fbg"•X©‹Õ>ýyÁ1È^P¼o#·,o4‚úÐ$¦w¨õªŒˆ¦Â’SÄ|h$öj\Ì/XU¼Lz_óÆ#Fjge§+'—ž4à¸ÄäÊ’8SÞD`úð2¿»…ïHÊQã >,xÚKçm- I8güL[ïNë6<ˆ1õòžçÔKí“öFlÔ.ïxr¢ êEãXL1±Ha%¶cÕë®­+«ð,„\±|`̇Uý²¤ä¨ä~Œñø54"áà+Z‚±pp5ÿÉ ©ñ•S|¾ E&\Mö†0êjà”Ž¸p(H€#8¾ÓòFP‹Ëü.0µ ÉÂÒ<`ˆ‘ˆ_Óq›#äˆZ7(µµ­>ê'1$‘¸ÄI$G <ëÁÔ”ymb’>ß=åÜ4ÇH1„¯IŽhõ¦MÌŒz ¯EÚ˜¬È7A½-ɹè“CRÆŸœ"G4bIB„ÁIÄ;`žEr,éþ„Fb8õàJ‚¹zÇþEãÙwRbv¤4§{óˆ(\PŸ¦ú´ã],ÅA”T•Ÿx7ý\×h¤èê#Ï»Kë¸9eÙ/K¼êxòá+êþ@CÌ£èÛ(Lº˜‘Œk%(Â:\§KëÉ Á|èI¡kYLo4‚yN‰7el0Ë-S„9v°&'„߸­µ ~a!b(zø%–Üî]Ì(ù4M£]:j8À L!Ed(ã Îûà-¿y¡Ôx´ž"J`>ÌBgš–„넹ãÊ…7˜i¯ðImu  !æ¥Å_§"çPb˜N<†wDÐ!,uX¡ÂŽPTVUõàçÄÅê­wÿGÓ±;®R°4нÃ9×%ŠHŽOo,‚øYúÈ‚„'vÆ\ÂÛ¤W ‚þÑúÜB‡\NæW·ð"Fh$¢Pôä Ì›9:çÜ*¨‡T°gH(ša¢pHM}óà~øDv­GõÁ0³j…Íîo6ƒ øÈ9)†å“¾ør‘B?Ð5+£‘œ5 ®Åî:*®aúeL¥ETë‚7´<êþD#¨q¥zPeÏÿ#Ëà¿i‚ú!¯?^–Ãô‘ìñk4iâÁ'¢Æ¥þžÐr§ êÅt$â ³?|\µšŠPÃ#®p7!¡4‚úðY'Ô Z 20”;7”;[Ú=<Çœ2ø,% ÌJE ­ESó PßÃf‡ÑˆÊM•¶Þ=Z¢àø¶÷«N¼wö›oiB o–AK7ÌŸX3Ì×ç ÖÛÃŒœ™×ך(„*í@ÏŒêˆXæ þ ЦO<Äxþ&u—6ÏåêwnÌ/äB>Òý…ôÝg —®õ‹ÕÜ«<±óF#¨7«d!kR8 øaïä†Þ' Øå‚G`àqA úîC>NFb!æÆ¼}–‹\\ü4©¸>)>B¬ï+*„s,qAñõFn³åù.ÕC W+ìGýÜ÷¸ç4_ «æ1xJ®K?°ˆhê `U JjÜ3ºRë;_I×Ä}¿´¾CÇÕJ¥A§LVÛMûo,l3ðC¬=á(ªˆu0¸0¦gðFý¦(bCƒëˆ'ˆvä…" !F㿉š˜•v-ÍÏI|‰áùBuÞú6>¶„K a†ßÔ§—ïŸp¢Lì’?7—eÛùµØl;ôm*⟔^½ÊÐ=k¥o|ÇÉÚñ§À Á•z”¯>¶ŒÑ^¬1“4O‰PêCb\Z5šŒ¸ñ á)›8•U_k$Ÿ¶ý€a0µj±î™ý&öÕðøÖKGʼnÆ/³˜Åˆº¢Y¿YŠ×°¨õ”:B¤ŠÔ¿¶Áøo|ÀŽÆ'e]ù¼ß óáBâ \,„¼ø//=ˆwդįl ”ÇýSk4^A"]a§¯CCÒ¶úþ™àU¨õÊÇßð'îü[ƒÙoí³‘p­—z”¯>ié£VŸIš§ЀfG%š‚’Ø0qÇÑEÔ‚h6Žž·JÑpÃ%HãžÝK¤•Þ ÕïS³Ú /­¥tB?±¢”Ä;o… ñ4LüfŸ•‘l¹^IAkΑâ?ñ±Gš‰v’gû^‘_Hg=Ü xç‚ÃWE2÷hP·ù“¥¢‹°/ÓŒì9ýÆ›Ðh¹Î’‚.IÙfŸöŠ\]”:ñ•¾@o»˜óé˜×Áõ§ö*ß|DÒÒÇ+xu.yžJ:àT”}èr«wIgÈÑ ÞG:¹rÿQ­ °äæc½SãdÆpÆh»÷oªFž☵/Jli¥ãñ× Çåõƒ¦hIU´ÑQ¥šc¿‰ž˜:J³~¯Ì/$ÄÚ._t¨3VŽ"ìh‰D ~ñyvùî;\ÑJi‰³ý¡iû†Ä´ÔØóOÁŠÐù³¥‡­Y£â¤¯x6!µ•ŒwÇ"iéã¼:—4OM£bž>B­CiáE»G:zè¶…|ãÞ?ÔÜÔ«ÞA#¥%¨÷Ç^4Ô‹Ÿø½¿£)ŽX»Bœ¨"Öá Ì_Ü£|S´æßÀHÌŽçHømê  Ô6‰ñøo|Ȏǧe}¥©¿—ç:*|V3'á¦ëž$` nb\³«È(¬èÏ%áŸ8ÌÝ?kú†D¡Ô’õòþæXá4áŸÒgŠ—Å.â+E\´e¬ÀÌ|ëU¾ùˆ¤¥Wðê\ÒÒ²©¡C¬MÕ“öؼþ_Oßõ§·CŽCÂ:WêQ¾ùx¤¥ŽU°ú<ÒÕŠ>’6ÜPPn7q·L©3ùhY+^xJPsÍÂK)¥tÕÞQ7Ÿå€pƒ1^¿šÿxšc¶¾ªcÖñ nª³Gbã7¨§0ŸŒvÔ´¾P”›a¢>ÿÙñø¼¼¯˜û{}~!"¤ü´¯ƒ,ÚÏÂJ*\|&Ý ”ŠüÕÙj¸ðMѯ³¶ý”_Wý©%*MÊŸ…މܗ¥.B@zúÆ”h¯DÂÚžÔ£|óñHK«`õy¤9ª ÂÃÔpÂØe2Ç@À'_Z³bàAf¾л¾(ŒdŸ/V¶§ƒî­¸¯5tí'ÛŠ19/…áY  nÓÅÎe¶û“~¹Å´zÇ‚Ëþ=rOˆBbT Æ~6<>%ë)Íú½2¿É 5¼³ÚØ×~^†ò°¶·$à¦õk -îÎÛó¯Õ—=(įzÛd-Û´wψ·Ù‹aÒkSqÖÒa¦v,XiÔxŸúÕF¤m­ gKóH³TB04_8EÒ&ÎkI¥”!®¯ÚµÜ¾Þc œœy³1õIr*Žž"^ã'VÂ5Ð °ô®Öcˆm8ëÑ«°Q_H?™úàA ‘Œ“Ôl –þðáøo|ÄŽÇg¥]ÅÄ?ç7¢A³øÆÿ\Ük{åŠWË*oÃVE•.-ßÍZî|E(¾úótÒ²Xeiâ…m(”Â^§>²Ögú‚ÐÉÔNôv¬4h¢Oùê#’¶:Ú>Ó<Ò,U“¡ÄŸ%SC¨Ã½°“=ÐQŽfÍd2t„­ò‰63+¨7F²<ü'n83`¼#·q|,nu”ðàÑÚƒgÒ‰ØÐà‹¬èiú‘Óhì71`C“&¥]ż_+ó› «¶ŽÚAÝ=."üá­7ë!I',Þøæ“Ù%úÕlmiâ5{G{[¨—“¾P²y;Dá?—Ü;úÔ¯>"i«£VŸGš¥ÒOíRTF:C®:Üï•¡Zo”!šp!ä_ `g{\±W-|Ä/—³4ÈñÀjRÌÚ׎YÇ«i fÄNÔ×o¶Ö3 NvÑâ@21ÿMŒYñ¤yi_iîïõù…Œ€ñAà´šBÈ‘a µZUž¼)R—½áŽ­©ý±k°ªìnß«Ú.Öº¨icØ‹Z>Ö»¼®Húöo…9ŸÑ¶<¼}vÜ©oûnc³ö:râöYåY›ŸV‚:ïo‘Ê‹ëKœñämê#AOWGKGMB\/㱨;[-yŠæÇúoŽV:$–)fí fߣ®iŠ9‹Õ;${-ÿF¯íä=1{_ ¦;y<ö›³á‰yY_1÷õùÕ_M•€Àzú«™¨Ïžúœ ývk€¿­P5>iÇLÿ¶¦ì©¶³‚ߊuªç˜=NKºEÖ«| ÅÛUjPÇ*nNïQ¾úx¤­UðúLÒ<–¸Mzo’}L×?<ý¼Þ÷!ÃìM£75èø3y0‹œŠñÞ¶D°ÛoÚö˜E“\kWfd£I>•Dó¿èSž$R¦%wÔ$¾-c¿‰Ÿ“v”fý^™ßHˆœ|øÿùç¡'Ñî¨PPŠ?ÝýMo ^[ð$T¶ý~Dµw¥#d;éP~ôƒzèiŠ“¢Å¾U¾›èípÞLXÙWô(_}_?©ÌŠJXŠ¢Ž kRä±èoÒxMš’ô“~-Ëo>kÞZº„’1É@‚³Ù ¤É{ 7G&ï)ÜXѯfÊhK3t¯™AÚ«¾Å½Zú‚lËÔŽ/ªVD¥>å«HÚêhÕç‘fù>¤…«ÈNNá.±ÓU¸Tìn=¤…g&~bÞ›@cžèʼ@>÷éÉ)ÿDWÀRWzHKÃñßøˆÏʺò‰,ÎßuHs?‘¦ÜÃ"G-õ¾È1Ì=3rHs¿~5ŸŽ¶4â5öªÏ%õ™¾PÛD;} ¢æCZô)_}DÒvûÑ/æ‘fù>¤õùqHëóãFsçuHë WWø·óIŒ–2>i}|Òúø<¤1áuHK¿ÑÓU ÑXêIii4ö›°¡™Ÿ‡´4ï×Êü]‡´Úø^›ŸÒxŸŽi0y¾ü$F#'Ôx%Þòw=M{k?úú[`·Ë ë]ž8À¹*}ã‘+Úʉ,pË-ú¶ï>6m?fàöYåYA=¬Ú†&Ñq|õ³‹ÙÁppz™q»ËD‚ÚpK‡ª­k­Ù~¤˜jÃDêû7E+^8Ä*fí %@³‡ù`ä¸<¿Y,Ù•ð@Î2ŠÚûbŠvËãñßø˜ OÌËúй¬ÏotÅS›îBº³' •m~hÆ{X]ùNé&iëãÌ>›4WSpEj*â%†$wXþ¨ y,{¶Ã¼ç;Ô>váÑ÷µx–)øo–¦vÅOôIñÀÒ¯õÔ¯Ž&RG0v¾¶î¿ÁÔ‰ØñÀBÌÖ’ÐÖÌ£ñŸø€OÊ»Š‰¿ç7JâéBIîžfå€"ìF´a‹›i‘]›ËtLçuÖJí†ú‡ëÐûísØå)¸%¾ÂêѶ‘ñ3N£WýjcÒ¶>^Áì³Is5w“ŠCˆucà|G]äeÑë$)_HŸFøÎžšzR—"(:"25÷š?ñ´(GS±uU± §hÒ$½ÐðCÅoà{Exnà)R;.õ…ôr†Mùxü7>dÇÓ²¾bêïåùÕ¿ ¶F6…F¤‡æÛüÓ|¦e sv#âH-ø®‘™Ñí§Ð)jËh»¦–Žb­jiŸŒGt‹¯ð[÷h[3˜‘ñD¯úÕÆ¤mm¼ŠÙg“æªÑk8¾ •IGj´\ ŽÙ¶µép£À P[ƒ«Iÿ’€ „€Ãç4-œå¿YVÏYñ\ÚwÌ9»ŽVFÒ0-Ïs¨£Òo°íA $@ˆ}Ž4j`1ÿXñ¤Ii_yâï•ù¥°(bâ´™c‡Ì\,±V /Y GNéB XH à•xüÙô§”ÈÙ©†V}ÀÛúì[°¢h_ú:ñZ[©1ë˜;OkÞ«~µ1i[¯b¶Ùä¹ ýÓý“K¡ÿù·~ý·ÿðÇó?¡|ĵXïÂþÿÿ£~ý×í»ÝÜ-ÅñØÁw¹ÿ‡‚È×Þ_pAìûã?ÿÕÿ÷_ýÃÿþÇ÷÷ÅþÇÿòGA† l¯ûs{ùÛ»Ï×?':ÿÛÿøÃhXèëÙ2~¼rånôóü¨‚|Ðùÿ€ÎùûkÞô9ã÷wÀcÒò»vO]¹Ñ¿ñF¸”»ßR'×ôCŽkÍ\òI­þÙ[!Õ§¬÷ÐîAµAå A+µú¿õ…Ì¢ïõ\K­þãï}!Öæ©3·úÇ×NÔ¯ÿˆ[VM|ŽµË­à€Cm kUï«“Ñoä M:xµC!N½¢T’gªTL÷ðÂÈ{d(¸‰D$T9婹K(,4;Ì3Î'žWm1\Il)µ†ç„YÜÊ‹¦/†#^1j•¢>Zi‰MâÓ-‡ÅA³”;BçŠÒ[“~ªo‰ž`¦;¿@•­`ÿÆ#"å/i%Z}Ñ^,™|5ÞØ_HV»Æöè_ÿ¾³ªœwgS«ÿ­P°¢·vúÿë¿ú? r5w‚ü |.&áý?~眹®È­þ9·",·Ús1ðáîî“[ý'o…äè{JIãCª'®¤móšøWZ®k¿åÁz§Â“+ÊSE«ÿù×¥`5¥òÚ™—ÍRƫњ|`Õ+^“ú·ÇRàúõZIÿÙ¥øËñÁjí¿t¯P lÿü^ýå¬À¾å5«ÿˉgØi¢SÇ)&*«îˆ‹dI)GÔ•‘ZO­¤¾Î¢˜éSJzw¼H(…ÞÍq†´p>敬ÔÊ>xÙ\*Õ¼°¯(† {J/RÒ2hBߌröëßüͯÊ篵®Eò…)îæ¾ú¯ö¹ß{ôÜèßý.:åã««ß9œ®·+ÉR£¿GÙ äÓŸÜêŸï ²þÒÚ_uõŸgÖ\(¯þ§ß5dÇÁíµ‚‡†D•¥ÑZnõÿBކ~¾?˜ä‰–.CÜs<³ÑÄäùköµW磔Ecô ¨š[ý¿Ϧ㕀“[ý}›ÞÏ««÷÷lz¥BL­þ »¾ÿj€½ÈC5úÛï³Btæ|ÍêßüãÃïyÍêï 8uþ‹|™Röì]oÌŒDµ4–Çì(/+ oTšK—‘ï6RÜA¦×jbÑ©ºö°º&K胄(­›¥kE*\­±Þ´—Ë>Ј$üÿ4+zSendstream endobj 5 0 obj 25267 endobj 3 0 obj << /Type /Page /MediaBox [0 0 486 481] /Parent 2 0 R /Resources << /ProcSet [/PDF /Text] /Font << /R6 6 0 R >> >> /Contents 4 0 R >> endobj 6 0 obj <> endobj 2 0 obj << /Type /Pages /Kids [ 3 0 R ] /Count 1 >> endobj 1 0 obj << /Type /Catalog /Pages 2 0 R >> endobj 7 0 obj << /CreationDate (D:20011015154900) /Producer (Aladdin Ghostscript 5.50) >> endobj xref 0 8 0000000000 65535 f 0000025651 00000 n 0000025592 00000 n 0000025373 00000 n 0000000015 00000 n 0000025352 00000 n 0000025521 00000 n 0000025700 00000 n trailer << /Size 8 /Root 1 0 R /Info 7 0 R >> startxref 25791 %%EOF e1071/vignettes/svminternals.Rnw0000644000175100001440000001616015120304220016243 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{hyperref, graphicx, color, alltt,a4wide} \usepackage{Sweave} \newcommand{\pkg}[1]{\texttt{#1}} \definecolor{Red}{rgb}{0.7,0,0} \definecolor{Blue}{rgb}{0,0,0.8} \definecolor{hellgrau}{rgb}{0.55,0.55,0.55} \newenvironment{smallexample}{\begin{alltt}\small}{\end{alltt}} \begin{document} \SweaveOpts{concordance=TRUE} %\VignetteIndexEntry{svm() internals} %\VignetteDepends{xtable} %\VignetteKeywords{classification, regression, machine learning, benchmarking, support vector machines} %\VignettePackage{e1071} \SweaveOpts{engine=R,eps=FALSE} \setkeys{Gin}{width=0.8\textwidth} \title{\texttt{svm()} internals\\ \large Some technical notes about the \texttt{svm()} in package \pkg{e1071}} \author{by David Meyer\\ FH Technikum Wien, Austria\\ \url{mailto:David.Meyer@R-Project.org} } \maketitle \sloppy This document explains how to use the parameters in an object returned by \texttt{svm()} for own prediction functions. \section{Binary Classifier} For class prediction in the binary case, the class of a new data vector $n$ is usually given by \emph{the sign} of \begin{equation} \sum_i{a_i y_i K(x_i, n)} + \rho \end{equation} \noindent where $x_i$ is the $i$-th support vector, $y_i$ the corresponding label, $a_i$ the corresponding coefficiant, and $K$ is the kernel (for example the linear one, i.e. $K(u,v) = u ^{\top} v$). Now, the \texttt{libsvm} library interfaced by the \texttt{svm()} function actually returns $a_i y_i$ as $i$-th coefficiant and the \emph{negative} $\rho$, so in fact uses the formula: \[ \sum_i{\mathrm{coef}_i K(x_i, n)} - \rho \] \noindent where the training examples (=training data) are labeled \{1,-1\} (!). A simplified \textsf{R} function for prediction with linear kernel would be: \begin{smallexample} svmpred <- function (m, newdata, K=crossprod) \{ ## this guy does the computation: pred.one <- function (x) sign(sum(sapply(1:m$tot.nSV, function (j) K(m$SV[j,], x) * m$coefs[j] ) ) - m$rho ) ## this is just for convenience: if (is.vector(newdata)) newdata <- t(as.matrix(x)) sapply (1:nrow(newdata), function (i) pred.one(newdata[i,])) \} \end{smallexample} \noindent where \texttt{pred.one()} does the actual prediction for one new data vector, the remainder is just a convenience for prediction of multiple new examples. It is easy to extend this to other kernels, just replace \texttt{K()} with the appropriate function (see the help page for the formulas used) and supply the additional constants. As we will see in the next section, the multi-class prediction is more complicated, because the coefficiants of the diverse binary SVMs are stored in a compressed format. \section{Multiclass-classifier} To handle $k$ classes, $k>2$, \texttt{svm()} trains all binary subclassifiers (one-against-one-method) and then uses a voting mechanism to determine the actual class. Now, this means $k(k-1)/2$ classifiers, hence in principle $k(k-1)/2$ sets of SVs, coefficiants and rhos. These are stored in a compressed format: \begin{enumerate} \item Only one SV is stored in case it were used by several classifiers. The \texttt{model\$SV-matrix} is ordered by classes, and you find the starting indices by using \texttt{nSV} (number of SVs): \begin{smallexample} start <- c(1, cumsum(model$nSV)) start <- start[-length(start)] \end{smallexample} \texttt{sum(nSV)} equals the total number of (distinct) SVs. \item The coefficients of the SVs are stored in the \texttt{model\$coefs}-matrix, grouped by classes. Because the separating hyperplanes found by the SVM algorithm has SVs on both sides, you will have two sets of coefficients per binary classifier, and e.g., for 3 classes, you could build a \emph{block}-matrix like this for the classifiers $(i, j)$ ($i$,$j$=class numbers): \begin{table}[h] \center \begin{tabular}{|c|c|c|c|} \hline i $\backslash$ j & 0 & 1 & 2 \\\hline 0 & X & set (0, 1)& set (0, 2)\\\hline 1 & set (1, 0) & X & set (1, 2)\\\hline 2 & set (2, 0) & set (2, 1) & X\\\hline \end{tabular} \end{table} \noindent where set(i, j) are the coefficients for the classifier (i,j), lying on the side of class j. Because there are no entries for (i, i), we can save the diagonal and shift up the lower triangular matrix to get \begin{table}[h] \center \begin{tabular}{|c|c|c|c|} \hline i $\backslash$ j & 0 & 1 & 2 \\\hline 0 & set (1,0) & set (0,1) & set (0,2) \\\hline 1 & set (2,0) & set (2,1) & set (1,2) \\\hline \end{tabular} \end{table} \noindent Each set (., j) has length \texttt{nSV[j]}, so of course, there will be some filling 0s in some sets. \texttt{model\$coefs} is the \emph{transposed} of such a matrix, therefore for a data set with, say, 6 classes, you get 6-1=5 columns. The coefficients of (i, j) start at \texttt{model\$coefs[start[i],j]} and those of (j, i) at \texttt{model\$coefs[start[j],i-1]}. \item The $k(k-1)/2$ rhos are just linearly stored in the vector \texttt{model\$rho}. \end{enumerate} \newpage \noindent The following code shows how to use this for prediction: \begin{smallexample} ## Linear Kernel function K <- function(i,j) crossprod(i,j) predsvm <- function(object, newdata) \{ ## compute start-index start <- c(1, cumsum(object$nSV)+1) start <- start[-length(start)] ## compute kernel values kernel <- sapply (1:object$tot.nSV, function (x) K(object$SV[x,], newdata)) ## compute raw prediction for classifier (i,j) predone <- function (i,j) \{ ## ranges for class i and j: ri <- start[i] : (start[i] + object$nSV[i] - 1) rj <- start[j] : (start[j] + object$nSV[j] - 1) ## coefs for (i,j): coef1 <- object$coefs[ri, j-1] coef2 <- object$coefs[rj, i] ## return raw values: crossprod(coef1, kernel[ri]) + crossprod(coef2, kernel[rj]) \} ## compute votes for all classifiers votes <- rep(0,object$nclasses) c <- 0 # rho counter for (i in 1 : (object$nclasses - 1)) for (j in (i + 1) : object$nclasses) if (predone(i,j) > object$rho[c <- c + 1]) votes[i] <- votes[i] + 1 else votes[j] <- votes[j] + 1 ## return winner (index with max. votes) object$levels[which(votes %in% max(votes))[1]] \} \end{smallexample} In case data were scaled prior fitting the model (note that this is the default for \texttt{svm()}, the new data needs to be scaled as well before applying the predition functions, for example using the following code snipped (object is an object returned by \texttt{svm()}, \texttt{newdata} a data frame): \begin{smallexample} if (any(object$scaled)) newdata[,object$scaled] <- scale(newdata[,object$scaled, drop = FALSE], center = object$x.scale$"scaled:center", scale = object$x.scale$"scaled:scale" ) \end{smallexample} \noindent For regression, the response needs to be scaled as well before training, and the predictions need to be scaled back accordingly. \end{document} e1071/src/0000755000175100001440000000000015120610017011605 5ustar hornikuserse1071/src/Rsvm.c0000755000175100001440000002754614173734134012737 0ustar hornikusers #include #include #include #include #include #include "svm.h" #define Malloc(type,n) (type *)malloc((n)*sizeof(type)) /* * results from cross-validation */ struct crossresults { double* results; double total1; double total2; }; struct svm_node ** sparsify (double *x, int r, int c) { struct svm_node** sparse; int i, ii, count; sparse = (struct svm_node **) malloc (r * sizeof(struct svm_node *)); for (i = 0; i < r; i++) { /* determine nr. of non-zero elements */ for (count = ii = 0; ii < c; ii++) if (x[i * c + ii] != 0) count++; /* allocate memory for column elements */ sparse[i] = (struct svm_node *) malloc ((count + 1) * sizeof(struct svm_node)); /* set column elements */ for (count = ii = 0; ii < c; ii++) if (x[i * c + ii] != 0) { sparse[i][count].index = ii + 1; sparse[i][count].value = x[i * c + ii]; count++; } /* set termination element */ sparse[i][count].index = -1; } return sparse; } struct svm_node ** transsparse (double *x, int r, int *rowindex, int *colindex) { struct svm_node** sparse; int i, ii, count = 0, nnz = 0; sparse = (struct svm_node **) malloc (r * sizeof(struct svm_node*)); for (i = 0; i < r; i++) { /* allocate memory for column elements */ nnz = rowindex[i+1] - rowindex[i]; sparse[i] = (struct svm_node *) malloc ((nnz + 1) * sizeof(struct svm_node)); /* set column elements */ for (ii = 0; ii < nnz; ii++) { sparse[i][ii].index = colindex[count]; sparse[i][ii].value = x[count]; count++; } /* set termination element */ sparse[i][ii].index = -1; } return sparse; } /* Cross-Validation-routine from svm-train */ void do_cross_validation(struct svm_problem *prob, struct svm_parameter *param, int nr_fold, double* cresults, double* ctotal1, double* ctotal2) { int i; int total_correct = 0; double total_error = 0; double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0; /* random shuffle */ GetRNGstate(); for(i=0; il; i++) { int j = i+((int) (unif_rand() * (prob->l-i)))%(prob->l-i); struct svm_node *tx; double ty; tx = prob->x[i]; prob->x[i] = prob->x[j]; prob->x[j] = tx; ty = prob->y[i]; prob->y[i] = prob->y[j]; prob->y[j] = ty; } PutRNGstate(); for(i=0; il/nr_fold; int end = (i+1)*prob->l/nr_fold; int j,k; struct svm_problem subprob; subprob.l = prob->l-(end-begin); subprob.x = Malloc(struct svm_node*,subprob.l); subprob.y = Malloc(double,subprob.l); k=0; for(j = 0; j < begin; j++) { subprob.x[k] = prob->x[j]; subprob.y[k] = prob->y[j]; ++k; } for(j = end; jl; j++) { subprob.x[k] = prob->x[j]; subprob.y[k] = prob->y[j]; ++k; } if(param->svm_type == EPSILON_SVR || param->svm_type == NU_SVR) { struct svm_model *submodel = svm_train(&subprob,param); double error = 0; for(j=begin;jx[j]); double y = prob->y[j]; error += (v-y)*(v-y); sumv += v; sumy += y; sumvv += v*v; sumyy += y*y; sumvy += v*y; } svm_free_and_destroy_model(&submodel); /* printf("Mean squared error = %g\n", error/(end-begin)); */ cresults[i] = error/(end-begin); total_error += error; } else { struct svm_model *submodel = svm_train(&subprob,param); int correct = 0; for(j=begin;jx[j]); if(v == prob->y[j]) ++correct; } svm_free_and_destroy_model(&submodel); /* printf("Accuracy = %g%% (%d/%d)\n", */ /* 100.0*correct/(end-begin),correct,(end-begin)); */ cresults[i] = 100.0*correct/(end-begin); total_correct += correct; } free(subprob.x); free(subprob.y); } if(param->svm_type == EPSILON_SVR || param->svm_type == NU_SVR) { /* printf("Cross Validation Mean squared error = %g\n",total_error/prob.l); printf("Cross Validation Squared correlation coefficient = %g\n", ((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/ ((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy)) ); */ *ctotal1 = total_error/prob->l; *ctotal2 = ((prob->l * sumvy - sumv * sumy) * (prob->l * sumvy - sumv*sumy)) / ((prob->l * sumvv - sumv * sumv) * (prob->l * sumyy - sumy * sumy)); } else /* printf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob.l); */ *ctotal1 = 100.0 * total_correct / prob->l; } void svmtrain (double *x, int *r, int *c, double *y, int *rowindex, int *colindex, int *svm_type, int *kernel_type, int *degree, double *gamma, double *coef0, double *cost, double *nu, int *weightlabels, double *weights, int *nweights, double *cache, double *tolerance, double *epsilon, int *shrinking, int *cross, int *sparse, int *probability, int *nclasses, int *nr, int *index, int *labels, int *nSV, double *rho, double *coefs, double *sigma, double *probA, double *probB, double *cresults, double *ctotal1, double *ctotal2, char **error) { struct svm_parameter par; struct svm_problem prob; struct svm_model *model = NULL; int i; const char* s; /* set parameters */ par.svm_type = *svm_type; par.kernel_type = *kernel_type; par.degree = *degree; par.gamma = *gamma; par.coef0 = *coef0; par.cache_size = *cache; par.eps = *tolerance; par.C = *cost; par.nu = *nu; par.nr_weight = *nweights; if (par.nr_weight > 0) { par.weight = (double *) malloc (sizeof(double) * par.nr_weight); memcpy(par.weight, weights, par.nr_weight * sizeof(double)); par.weight_label = (int *) malloc (sizeof(int) * par.nr_weight); memcpy(par.weight_label, weightlabels, par.nr_weight * sizeof(int)); } par.p = *epsilon; par.shrinking = *shrinking; par.probability = *probability; /* set problem */ prob.l = *r; prob.y = y; if (*sparse > 0) prob.x = transsparse(x, *r, rowindex, colindex); else prob.x = sparsify(x, *r, *c); /* check parameters & copy error message */ s = svm_check_parameter(&prob, &par); if (s) { strcpy(*error, s); } else { /* call svm_train */ model = svm_train(&prob, &par); /* set up return values */ /* for (ii = 0; ii < model->l; ii++) for (i = 0; i < *r; i++) if (prob.x[i] == model->SV[ii]) index[ii] = i+1; */ svm_get_sv_indices(model, index); *nr = model->l; *nclasses = model->nr_class; memcpy (rho, model->rho, *nclasses * (*nclasses - 1)/2 * sizeof(double)); if (*probability && par.svm_type != ONE_CLASS) { if (par.svm_type == EPSILON_SVR || par.svm_type == NU_SVR) *sigma = svm_get_svr_probability(model); else { memcpy(probA, model->probA, *nclasses * (*nclasses - 1)/2 * sizeof(double)); memcpy(probB, model->probB, *nclasses * (*nclasses - 1)/2 * sizeof(double)); } } for (i = 0; i < *nclasses-1; i++) memcpy (coefs + i * *nr, model->sv_coef[i], *nr * sizeof (double)); if (*svm_type < 2) { memcpy (labels, model->label, *nclasses * sizeof(int)); memcpy (nSV, model->nSV, *nclasses * sizeof(int)); } /* Perform cross-validation, if requested */ if (*cross > 0) do_cross_validation (&prob, &par, *cross, cresults, ctotal1, ctotal2); /* clean up memory */ svm_free_and_destroy_model(&model); } /* clean up memory */ if (par.nr_weight > 0) { free(par.weight); free(par.weight_label); } for (i = 0; i < *r; i++) free (prob.x[i]); free (prob.x); } void svmpredict (int *decisionvalues, int *probability, double *v, int *r, int *c, int *rowindex, int *colindex, double *coefs, double *rho, int *compprob, double *probA, double *probB, int *nclasses, int *totnSV, int *labels, int *nSV, int *sparsemodel, int *svm_type, int *kernel_type, int *degree, double *gamma, double *coef0, double *x, int *xr, int *xrowindex, int *xcolindex, int *sparsex, double *ret, double *dec, double *prob) { struct svm_model m; struct svm_node ** train; int i; /* set up model */ m.l = *totnSV; m.nr_class = *nclasses; m.sv_coef = (double **) malloc (m.nr_class * sizeof(double*)); for (i = 0; i < m.nr_class - 1; i++) { m.sv_coef[i] = (double *) malloc (m.l * sizeof (double)); memcpy (m.sv_coef[i], coefs + i*m.l, m.l * sizeof (double)); } if (*sparsemodel > 0) m.SV = transsparse(v, *r, rowindex, colindex); else m.SV = sparsify(v, *r, *c); m.rho = rho; m.probA = probA; m.probB = probB; m.label = labels; m.nSV = nSV; /* set up parameter */ m.param.svm_type = *svm_type; m.param.kernel_type = *kernel_type; m.param.degree = *degree; m.param.gamma = *gamma; m.param.coef0 = *coef0; m.param.probability = *compprob; m.free_sv = 1; /* create sparse training matrix */ if (*sparsex > 0) train = transsparse(x, *xr, xrowindex, xcolindex); else train = sparsify(x, *xr, *c); /* call svm-predict-function for each x-row, possibly using probability estimator, if requested */ if (*probability && svm_check_probability_model(&m)) { for (i = 0; i < *xr; i++) ret[i] = svm_predict_probability(&m, train[i], prob + i * *nclasses); } else { for (i = 0; i < *xr; i++) ret[i] = svm_predict(&m, train[i]); } /* optionally, compute decision values */ if (*decisionvalues) for (i = 0; i < *xr; i++) svm_predict_values(&m, train[i], dec + i * *nclasses * (*nclasses - 1) / 2); /* clean up memory */ for (i = 0; i < *xr; i++) free (train[i]); free (train); for (i = 0; i < *r; i++) free (m.SV[i]); free (m.SV); for (i = 0; i < m.nr_class - 1; i++) free(m.sv_coef[i]); free(m.sv_coef); } void svmwrite (double *v, int *r, int *c, int *rowindex, int *colindex, double *coefs, double *rho, int *compprob, double *probA, double *probB, int *nclasses, int *totnSV, int *labels, int *nSV, int *sparsemodel, int *svm_type, int *kernel_type, int *degree, double *gamma, double *coef0, char **filename) { struct svm_model m; int i; char *fname = *filename; /* set up model */ m.l = *totnSV; m.nr_class = *nclasses; m.sv_coef = (double **) malloc (m.nr_class * sizeof(double*)); for (i = 0; i < m.nr_class - 1; i++) { m.sv_coef[i] = (double *) malloc (m.l * sizeof (double)); memcpy (m.sv_coef[i], coefs + i*m.l, m.l * sizeof (double)); } if (*sparsemodel > 0) m.SV = transsparse(v, *r, rowindex, colindex); else m.SV = sparsify(v, *r, *c); m.rho = rho; m.label = labels; m.nSV = nSV; if (*compprob) { m.probA = probA; m.probB = probB; } else { m.probA = NULL; m.probB = NULL; } /* set up parameter */ m.param.svm_type = *svm_type; m.param.kernel_type = *kernel_type; m.param.degree = *degree; m.param.gamma = *gamma; m.param.coef0 = *coef0; m.free_sv = 1; /* write svm model */ svm_save_model(fname, &m); for (i = 0; i < m.nr_class - 1; i++) free(m.sv_coef[i]); free(m.sv_coef); for (i = 0; i < *r; i++) free (m.SV[i]); free (m.SV); } e1071/src/svm.cpp0000644000175100001440000020004114671763131013133 0ustar hornikusers#include #include #include #include #include #include #include #include #include #include #include #include "svm.h" int libsvm_version = LIBSVM_VERSION; typedef float Qfloat; typedef signed char schar; #ifndef min template static inline T min(T x,T y) { return (x static inline T max(T x,T y) { return (x>y)?x:y; } #endif template static inline void swap(T& x, T& y) { T t=x; x=y; y=t; } template static inline void clone(T*& dst, S* src, int n) { dst = new T[n]; memcpy((void *)dst,(void *)src,sizeof(T)*n); } static inline double powi(double base, int times) { double tmp = base, ret = 1.0; for(int t=times; t>0; t/=2) { if(t%2==1) ret*=tmp; tmp = tmp * tmp; } return ret; } #define INF HUGE_VAL #define TAU 1e-12 #define Malloc(type,n) (type *)malloc( (unsigned int) ((n)*sizeof(type)) ) static void print_string_stdout(const char *s) { /* fputs(s,stdout); fflush(stdout); */ Rprintf("%s", s); } static void (*svm_print_string) (const char *) = &print_string_stdout; #if 0 static void info(const char *fmt,...) { char buf[BUFSIZ]; va_list ap; va_start(ap,fmt); vsprintf(buf,fmt,ap); va_end(ap); (*svm_print_string)(buf); } #else static void info(const char *fmt,...) {} #endif // // Kernel Cache // // l is the number of total data items // size is the cache size limit in bytes // class Cache { public: Cache(int l,long int size); ~Cache(); // request data [0,len) // return some position p where [p,len) need to be filled // (p >= len if nothing needs to be filled) int get_data(const int index, Qfloat **data, int len); void swap_index(int i, int j); private: int l; long int size; struct head_t { head_t *prev, *next; // a circular list Qfloat *data; int len; // data[0,len) is cached in this entry }; head_t *head; head_t lru_head; void lru_delete(head_t *h); void lru_insert(head_t *h); }; Cache::Cache(int l_,long int size_):l(l_),size(size_) { head = (head_t *)calloc(l,sizeof(head_t)); // initialized to 0 size /= sizeof(Qfloat); size -= l * sizeof(head_t) / sizeof(Qfloat); size = max(size, 2 * (long int) l); // cache must be large enough for two columns lru_head.next = lru_head.prev = &lru_head; } Cache::~Cache() { for(head_t *h = lru_head.next; h != &lru_head; h=h->next) free(h->data); free(head); } void Cache::lru_delete(head_t *h) { // delete from current location h->prev->next = h->next; h->next->prev = h->prev; } void Cache::lru_insert(head_t *h) { // insert to last position h->next = &lru_head; h->prev = lru_head.prev; h->prev->next = h; h->next->prev = h; } int Cache::get_data(const int index, Qfloat **data, int len) { head_t *h = &head[index]; if(h->len) lru_delete(h); int more = len - h->len; if(more > 0) { // free old space while(size < more) { head_t *old = lru_head.next; lru_delete(old); free(old->data); size += old->len; old->data = 0; old->len = 0; } // allocate new space h->data = (Qfloat *)realloc(h->data,sizeof(Qfloat)*len); size -= more; swap(h->len,len); } lru_insert(h); *data = h->data; return len; } void Cache::swap_index(int i, int j) { if(i==j) return; if(head[i].len) lru_delete(&head[i]); if(head[j].len) lru_delete(&head[j]); swap(head[i].data,head[j].data); swap(head[i].len,head[j].len); if(head[i].len) lru_insert(&head[i]); if(head[j].len) lru_insert(&head[j]); if(i>j) swap(i,j); for(head_t *h = lru_head.next; h!=&lru_head; h=h->next) { if(h->len > i) { if(h->len > j) swap(h->data[i],h->data[j]); else { // give up lru_delete(h); free(h->data); size += h->len; h->data = 0; h->len = 0; } } } } // // Kernel evaluation // // the static method k_function is for doing single kernel evaluation // the constructor of Kernel prepares to calculate the l*l kernel matrix // the member function get_Q is for getting one column from the Q Matrix // class QMatrix { public: virtual Qfloat *get_Q(int column, int len) const = 0; virtual double *get_QD() const = 0; virtual void swap_index(int i, int j) const = 0; virtual ~QMatrix() {} }; class Kernel: public QMatrix { public: Kernel(int l, svm_node * const * x, const svm_parameter& param); virtual ~Kernel(); static double k_function(const svm_node *x, const svm_node *y, const svm_parameter& param); virtual Qfloat *get_Q(int column, int len) const = 0; virtual double *get_QD() const = 0; virtual void swap_index(int i, int j) const // no so const... { swap(x[i],x[j]); if(x_square) swap(x_square[i],x_square[j]); } protected: double (Kernel::*kernel_function)(int i, int j) const; private: const svm_node **x; double *x_square; // svm_parameter const int kernel_type; const int degree; const double gamma; const double coef0; static double dot(const svm_node *px, const svm_node *py); double kernel_linear(int i, int j) const { return dot(x[i],x[j]); } double kernel_poly(int i, int j) const { return powi(gamma*dot(x[i],x[j])+coef0,degree); } double kernel_rbf(int i, int j) const { return exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j]))); } double kernel_sigmoid(int i, int j) const { return tanh(gamma*dot(x[i],x[j])+coef0); } double kernel_precomputed(int i, int j) const { return x[i][(int)(x[j][0].value)].value; } }; Kernel::Kernel(int l, svm_node * const * x_, const svm_parameter& param) :kernel_type(param.kernel_type), degree(param.degree), gamma(param.gamma), coef0(param.coef0) { switch(kernel_type) { case LINEAR: kernel_function = &Kernel::kernel_linear; break; case POLY: kernel_function = &Kernel::kernel_poly; break; case RBF: kernel_function = &Kernel::kernel_rbf; break; case SIGMOID: kernel_function = &Kernel::kernel_sigmoid; break; case PRECOMPUTED: kernel_function = &Kernel::kernel_precomputed; break; } clone(x,x_,l); if(kernel_type == RBF) { x_square = new double[l]; for(int i=0;iindex != -1 && py->index != -1) { if(px->index == py->index) { sum += px->value * py->value; ++px; ++py; } else { if(px->index > py->index) ++py; else ++px; } } return sum; } double Kernel::k_function(const svm_node *x, const svm_node *y, const svm_parameter& param) { switch(param.kernel_type) { case LINEAR: return dot(x,y); case POLY: return powi(param.gamma*dot(x,y)+param.coef0,param.degree); case RBF: { double sum = 0; while(x->index != -1 && y->index !=-1) { if(x->index == y->index) { double d = x->value - y->value; sum += d*d; ++x; ++y; } else { if(x->index > y->index) { sum += y->value * y->value; ++y; } else { sum += x->value * x->value; ++x; } } } while(x->index != -1) { sum += x->value * x->value; ++x; } while(y->index != -1) { sum += y->value * y->value; ++y; } return exp(-param.gamma*sum); } case SIGMOID: return tanh(param.gamma*dot(x,y)+param.coef0); case PRECOMPUTED: //x: test (validation), y: SV return x[(int)(y->value)].value; default: return 0; // Unreachable } } // An SMO algorithm in Fan et al., JMLR 6(2005), p. 1889--1918 // Solves: // // min 0.5(\alpha^T Q \alpha) + p^T \alpha // // y^T \alpha = \delta // y_i = +1 or -1 // 0 <= alpha_i <= Cp for y_i = 1 // 0 <= alpha_i <= Cn for y_i = -1 // // Given: // // Q, p, y, Cp, Cn, and an initial feasible point \alpha // l is the size of vectors and matrices // eps is the stopping tolerance // // solution will be put in \alpha, objective value will be put in obj // class Solver { public: Solver() {}; virtual ~Solver() {}; struct SolutionInfo { double obj; double rho; double upper_bound_p; double upper_bound_n; double r; // for Solver_NU }; void Solve(int l, const QMatrix& Q, const double *p_, const schar *y_, double *alpha_, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking); protected: int active_size; schar *y; double *G; // gradient of objective function enum { LOWER_BOUND, UPPER_BOUND, FREE }; char *alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE double *alpha; const QMatrix *Q; const double *QD; double eps; double Cp,Cn; double *p; int *active_set; double *G_bar; // gradient, if we treat free variables as 0 int l; bool unshrink; // XXX double get_C(int i) { return (y[i] > 0)? Cp : Cn; } void update_alpha_status(int i) { if(alpha[i] >= get_C(i)) alpha_status[i] = UPPER_BOUND; else if(alpha[i] <= 0) alpha_status[i] = LOWER_BOUND; else alpha_status[i] = FREE; } bool is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; } bool is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; } bool is_free(int i) { return alpha_status[i] == FREE; } void swap_index(int i, int j); void reconstruct_gradient(); virtual int select_working_set(int &i, int &j); virtual double calculate_rho(); virtual void do_shrinking(); private: bool be_shrunk(int i, double Gmax1, double Gmax2); }; void Solver::swap_index(int i, int j) { Q->swap_index(i,j); swap(y[i],y[j]); swap(G[i],G[j]); swap(alpha_status[i],alpha_status[j]); swap(alpha[i],alpha[j]); swap(p[i],p[j]); swap(active_set[i],active_set[j]); swap(G_bar[i],G_bar[j]); } void Solver::reconstruct_gradient() { // reconstruct inactive elements of G from G_bar and free variables if(active_size == l) return; int i,j; int nr_free = 0; for(j=active_size;j 2*active_size*(l-active_size)) { for(i=active_size;iget_Q(i,active_size); for(j=0;jget_Q(i,l); double alpha_i = alpha[i]; for(j=active_size;jl = l; this->Q = &Q; QD=Q.get_QD(); clone(p, p_,l); clone(y, y_,l); clone(alpha,alpha_,l); this->Cp = Cp; this->Cn = Cn; this->eps = eps; unshrink = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;iINT_MAX/100 ? INT_MAX : 100*l); int counter = min(l,1000)+1; while(iter < max_iter) { // show progress and do shrinking if(--counter == 0) { counter = min(l,1000); if(shrinking) do_shrinking(); info("."); } int i,j; if(select_working_set(i,j)!=0) { // reconstruct the whole gradient reconstruct_gradient(); // reset active set size and check active_size = l; info("*"); if(select_working_set(i,j)!=0) break; else counter = 1; // do shrinking next iteration } ++iter; // update alpha[i] and alpha[j], handle bounds carefully const Qfloat *Q_i = Q.get_Q(i,active_size); const Qfloat *Q_j = Q.get_Q(j,active_size); double C_i = get_C(i); double C_j = get_C(j); double old_alpha_i = alpha[i]; double old_alpha_j = alpha[j]; if(y[i]!=y[j]) { double quad_coef = QD[i]+QD[j]+2*Q_i[j]; if (quad_coef <= 0) quad_coef = TAU; double delta = (-G[i]-G[j])/quad_coef; double diff = alpha[i] - alpha[j]; alpha[i] += delta; alpha[j] += delta; if(diff > 0) { if(alpha[j] < 0) { alpha[j] = 0; alpha[i] = diff; } } else { if(alpha[i] < 0) { alpha[i] = 0; alpha[j] = -diff; } } if(diff > C_i - C_j) { if(alpha[i] > C_i) { alpha[i] = C_i; alpha[j] = C_i - diff; } } else { if(alpha[j] > C_j) { alpha[j] = C_j; alpha[i] = C_j + diff; } } } else { double quad_coef = QD[i]+QD[j]-2*Q_i[j]; if (quad_coef <= 0) quad_coef = TAU; double delta = (G[i]-G[j])/quad_coef; double sum = alpha[i] + alpha[j]; alpha[i] -= delta; alpha[j] += delta; if(sum > C_i) { if(alpha[i] > C_i) { alpha[i] = C_i; alpha[j] = sum - C_i; } } else { if(alpha[j] < 0) { alpha[j] = 0; alpha[i] = sum; } } if(sum > C_j) { if(alpha[j] > C_j) { alpha[j] = C_j; alpha[i] = sum - C_j; } } else { if(alpha[i] < 0) { alpha[i] = 0; alpha[j] = sum; } } } // update G double delta_alpha_i = alpha[i] - old_alpha_i; double delta_alpha_j = alpha[j] - old_alpha_j; for(int k=0;k= max_iter) { if(active_size < l) { // reconstruct the whole gradient to calculate objective value reconstruct_gradient(); active_size = l; info("*"); } REprintf("\nWARNING: reaching max number of iterations\n"); } // calculate rho si->rho = calculate_rho(); // calculate objective value { double v = 0; int i; for(i=0;iobj = v/2; } // put back the solution { for(int i=0;iupper_bound_p = Cp; si->upper_bound_n = Cn; info("\noptimization finished, #iter = %d\n",iter); delete[] p; delete[] y; delete[] alpha; delete[] alpha_status; delete[] active_set; delete[] G; delete[] G_bar; } // return 1 if already optimal, return 0 otherwise int Solver::select_working_set(int &out_i, int &out_j) { // return i,j such that // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) // j: minimizes the decrease of obj value // (if quadratic coefficeint <= 0, replace it with tau) // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) double Gmax = -INF; double Gmax2 = -INF; int Gmax_idx = -1; int Gmin_idx = -1; double obj_diff_min = INF; for(int t=0;t= Gmax) { Gmax = -G[t]; Gmax_idx = t; } } else { if(!is_lower_bound(t)) if(G[t] >= Gmax) { Gmax = G[t]; Gmax_idx = t; } } int i = Gmax_idx; const Qfloat *Q_i = NULL; if(i != -1) // NULL Q_i not accessed: Gmax=-INF if i=-1 Q_i = Q->get_Q(i,active_size); for(int j=0;j= Gmax2) Gmax2 = G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } else { if (!is_upper_bound(j)) { double grad_diff= Gmax-G[j]; if (-G[j] >= Gmax2) Gmax2 = -G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } } if(Gmax+Gmax2 < eps || Gmin_idx == -1) return 1; out_i = Gmax_idx; out_j = Gmin_idx; return 0; } bool Solver::be_shrunk(int i, double Gmax1, double Gmax2) { if(is_upper_bound(i)) { if(y[i]==+1) return(-G[i] > Gmax1); else return(-G[i] > Gmax2); } else if(is_lower_bound(i)) { if(y[i]==+1) return(G[i] > Gmax2); else return(G[i] > Gmax1); } else return(false); } void Solver::do_shrinking() { int i; double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) } double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) } // find maximal violating pair first for(i=0;i= Gmax1) Gmax1 = -G[i]; } if(!is_lower_bound(i)) { if(G[i] >= Gmax2) Gmax2 = G[i]; } } else { if(!is_upper_bound(i)) { if(-G[i] >= Gmax2) Gmax2 = -G[i]; } if(!is_lower_bound(i)) { if(G[i] >= Gmax1) Gmax1 = G[i]; } } } if(unshrink == false && Gmax1 + Gmax2 <= eps*10) { unshrink = true; reconstruct_gradient(); active_size = l; info("*"); } for(i=0;i i) { if (!be_shrunk(active_size, Gmax1, Gmax2)) { swap_index(i,active_size); break; } active_size--; } } } double Solver::calculate_rho() { double r; int nr_free = 0; double ub = INF, lb = -INF, sum_free = 0; for(int i=0;i0) r = sum_free/nr_free; else r = (ub+lb)/2; return r; } // // Solver for nu-svm classification and regression // // additional constraint: e^T \alpha = constant // class Solver_NU: public Solver { public: Solver_NU() {} void Solve(int l, const QMatrix& Q, const double *p, const schar *y, double *alpha, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking) { this->si = si; Solver::Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking); } private: SolutionInfo *si; int select_working_set(int &i, int &j); double calculate_rho(); bool be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4); void do_shrinking(); }; // return 1 if already optimal, return 0 otherwise int Solver_NU::select_working_set(int &out_i, int &out_j) { // return i,j such that y_i = y_j and // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) // j: minimizes the decrease of obj value // (if quadratic coefficeint <= 0, replace it with tau) // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) double Gmaxp = -INF; double Gmaxp2 = -INF; int Gmaxp_idx = -1; double Gmaxn = -INF; double Gmaxn2 = -INF; int Gmaxn_idx = -1; int Gmin_idx = -1; double obj_diff_min = INF; for(int t=0;t= Gmaxp) { Gmaxp = -G[t]; Gmaxp_idx = t; } } else { if(!is_lower_bound(t)) if(G[t] >= Gmaxn) { Gmaxn = G[t]; Gmaxn_idx = t; } } int ip = Gmaxp_idx; int in = Gmaxn_idx; const Qfloat *Q_ip = NULL; const Qfloat *Q_in = NULL; if(ip != -1) // NULL Q_ip not accessed: Gmaxp=-INF if ip=-1 Q_ip = Q->get_Q(ip,active_size); if(in != -1) Q_in = Q->get_Q(in,active_size); for(int j=0;j= Gmaxp2) Gmaxp2 = G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[ip]+QD[j]-2*Q_ip[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } else { if (!is_upper_bound(j)) { double grad_diff=Gmaxn-G[j]; if (-G[j] >= Gmaxn2) Gmaxn2 = -G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[in]+QD[j]-2*Q_in[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } } if(max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps || Gmin_idx == -1) return 1; if (y[Gmin_idx] == +1) out_i = Gmaxp_idx; else out_i = Gmaxn_idx; out_j = Gmin_idx; return 0; } bool Solver_NU::be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4) { if(is_upper_bound(i)) { if(y[i]==+1) return(-G[i] > Gmax1); else return(-G[i] > Gmax4); } else if(is_lower_bound(i)) { if(y[i]==+1) return(G[i] > Gmax2); else return(G[i] > Gmax3); } else return(false); } void Solver_NU::do_shrinking() { double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) } double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) } double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) } double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) } // find maximal violating pair first int i; for(i=0;i Gmax1) Gmax1 = -G[i]; } else if(-G[i] > Gmax4) Gmax4 = -G[i]; } if(!is_lower_bound(i)) { if(y[i]==+1) { if(G[i] > Gmax2) Gmax2 = G[i]; } else if(G[i] > Gmax3) Gmax3 = G[i]; } } if(unshrink == false && max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10) { unshrink = true; reconstruct_gradient(); active_size = l; } for(i=0;i i) { if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4)) { swap_index(i,active_size); break; } active_size--; } } } double Solver_NU::calculate_rho() { int nr_free1 = 0,nr_free2 = 0; double ub1 = INF, ub2 = INF; double lb1 = -INF, lb2 = -INF; double sum_free1 = 0, sum_free2 = 0; for(int i=0;i 0) r1 = sum_free1/nr_free1; else r1 = (ub1+lb1)/2; if(nr_free2 > 0) r2 = sum_free2/nr_free2; else r2 = (ub2+lb2)/2; si->r = (r1+r2)/2; return (r1-r2)/2; } // // Q matrices for various formulations // class SVC_Q: public Kernel { public: SVC_Q(const svm_problem& prob, const svm_parameter& param, const schar *y_) :Kernel(prob.l, prob.x, param) { clone(y,y_,prob.l); cache = new Cache(prob.l,(long int)(param.cache_size*(1<<20))); QD = new double[prob.l]; for(int i=0;i*kernel_function)(i,i); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start, j; if((start = cache->get_data(i,&data,len)) < len) { for(j=start;j*kernel_function)(i,j)); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(y[i],y[j]); swap(QD[i],QD[j]); } ~SVC_Q() { delete[] y; delete cache; delete[] QD; } private: schar *y; Cache *cache; double *QD; }; class ONE_CLASS_Q: public Kernel { public: ONE_CLASS_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { cache = new Cache(prob.l,(long int)(param.cache_size*(1<<20))); QD = new double[prob.l]; for(int i=0;i*kernel_function)(i,i); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start, j; if((start = cache->get_data(i,&data,len)) < len) { for(j=start;j*kernel_function)(i,j); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(QD[i],QD[j]); } ~ONE_CLASS_Q() { delete cache; delete[] QD; } private: Cache *cache; double *QD; }; class SVR_Q: public Kernel { public: SVR_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { l = prob.l; cache = new Cache(l,(long int)(param.cache_size*(1<<20))); QD = new double[2*l]; sign = new schar[2*l]; index = new int[2*l]; for(int k=0;k*kernel_function)(k,k); QD[k+l] = QD[k]; } buffer[0] = new Qfloat[2*l]; buffer[1] = new Qfloat[2*l]; next_buffer = 0; } void swap_index(int i, int j) const { swap(sign[i],sign[j]); swap(index[i],index[j]); swap(QD[i],QD[j]); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int j, real_i = index[i]; if(cache->get_data(real_i,&data,l) < l) { for(j=0;j*kernel_function)(real_i,j); } // reorder and copy Qfloat *buf = buffer[next_buffer]; next_buffer = 1 - next_buffer; schar si = sign[i]; for(j=0;jl; double *minus_ones = new double[l]; schar *y = new schar[l]; int i; for(i=0;iy[i] > 0) y[i] = +1; else y[i] = -1; } Solver s; s.Solve(l, SVC_Q(*prob,*param,y), minus_ones, y, alpha, Cp, Cn, param->eps, si, param->shrinking); double sum_alpha=0; for(i=0;il)); for(i=0;il; double nu = param->nu; schar *y = new schar[l]; for(i=0;iy[i]>0) y[i] = +1; else y[i] = -1; double sum_pos = nu*l/2; double sum_neg = nu*l/2; for(i=0;ieps, si, param->shrinking); double r = si->r; info("C = %f\n",1/r); for(i=0;irho /= r; si->obj /= (r*r); si->upper_bound_p = 1/r; si->upper_bound_n = 1/r; delete[] y; delete[] zeros; } static void solve_one_class( const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo* si) { int l = prob->l; double *zeros = new double[l]; schar *ones = new schar[l]; int i; int n = (int)(param->nu*prob->l); // # of alpha's at upper bound for(i=0;il) alpha[n] = param->nu * prob->l - n; for(i=n+1;ieps, si, param->shrinking); delete[] zeros; delete[] ones; } static void solve_epsilon_svr( const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo* si) { int l = prob->l; double *alpha2 = new double[2*l]; double *linear_term = new double[2*l]; schar *y = new schar[2*l]; int i; for(i=0;ip - prob->y[i]; y[i] = 1; alpha2[i+l] = 0; linear_term[i+l] = param->p + prob->y[i]; y[i+l] = -1; } Solver s; s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y, alpha2, param->C, param->C, param->eps, si, param->shrinking); double sum_alpha = 0; for(i=0;iC*l)); delete[] alpha2; delete[] linear_term; delete[] y; } static void solve_nu_svr( const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo* si) { int l = prob->l; double C = param->C; double *alpha2 = new double[2*l]; double *linear_term = new double[2*l]; schar *y = new schar[2*l]; int i; double sum = C * param->nu * l / 2; for(i=0;iy[i]; y[i] = 1; linear_term[i+l] = prob->y[i]; y[i+l] = -1; } Solver_NU s; s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y, alpha2, C, C, param->eps, si, param->shrinking); info("epsilon = %f\n",-si->r); for(i=0;il); Solver::SolutionInfo si; switch(param->svm_type) { case C_SVC: solve_c_svc(prob,param,alpha,&si,Cp,Cn); break; case NU_SVC: solve_nu_svc(prob,param,alpha,&si); break; case ONE_CLASS: solve_one_class(prob,param,alpha,&si); break; case EPSILON_SVR: solve_epsilon_svr(prob,param,alpha,&si); break; case NU_SVR: solve_nu_svr(prob,param,alpha,&si); break; } info("obj = %f, rho = %f\n",si.obj,si.rho); // output SVs int nSV = 0; int nBSV = 0; for(int i=0;il;i++) { if(fabs(alpha[i]) > 0) { ++nSV; if(prob->y[i] > 0) { if(fabs(alpha[i]) >= si.upper_bound_p) ++nBSV; } else { if(fabs(alpha[i]) >= si.upper_bound_n) ++nBSV; } } } info("nSV = %d, nBSV = %d\n",nSV,nBSV); decision_function f; f.alpha = alpha; f.rho = si.rho; return f; } // Platt's binary SVM Probablistic Output: an improvement from Lin et al. static void sigmoid_train( int l, const double *dec_values, const double *labels, double& A, double& B) { double prior1=0, prior0 = 0; int i; for (i=0;i 0) prior1+=1; else prior0+=1; int max_iter=100; // Maximal number of iterations double min_step=1e-10; // Minimal step taken in line search double sigma=1e-12; // For numerically strict PD of Hessian double eps=1e-5; double hiTarget=(prior1+1.0)/(prior1+2.0); double loTarget=1/(prior0+2.0); double *t=Malloc(double,l); double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize; double newA,newB,newf,d1,d2; int iter; // Initial Point and Initial Fun Value A=0.0; B=log((prior0+1.0)/(prior1+1.0)); double fval = 0.0; for (i=0;i0) t[i]=hiTarget; else t[i]=loTarget; fApB = dec_values[i]*A+B; if (fApB>=0) fval += t[i]*fApB + log(1+exp(-fApB)); else fval += (t[i] - 1)*fApB +log(1+exp(fApB)); } for (iter=0;iter= 0) { p=exp(-fApB)/(1.0+exp(-fApB)); q=1.0/(1.0+exp(-fApB)); } else { p=1.0/(1.0+exp(fApB)); q=exp(fApB)/(1.0+exp(fApB)); } d2=p*q; h11+=dec_values[i]*dec_values[i]*d2; h22+=d2; h21+=dec_values[i]*d2; d1=t[i]-p; g1+=dec_values[i]*d1; g2+=d1; } // Stopping Criteria if (fabs(g1)= min_step) { newA = A + stepsize * dA; newB = B + stepsize * dB; // New function value newf = 0.0; for (i=0;i= 0) newf += t[i]*fApB + log(1+exp(-fApB)); else newf += (t[i] - 1)*fApB +log(1+exp(fApB)); } // Check sufficient decrease if (newf=max_iter) info("Reaching maximal iterations in two-class probability estimates\n"); free(t); } static double sigmoid_predict(double decision_value, double A, double B) { double fApB = decision_value*A+B; // 1-p used later; avoid catastrophic cancellation if (fApB >= 0) return exp(-fApB)/(1.0+exp(-fApB)); else return 1.0/(1+exp(fApB)) ; } // Method 2 from the multiclass_prob paper by Wu, Lin, and Weng static void multiclass_probability(int k, double **r, double *p) { int t,j; int iter = 0, max_iter=max(100,k); double **Q=Malloc(double *,k); double *Qp=Malloc(double,k); double pQp, eps=0.005/k; for (t=0;tmax_error) max_error=error; } if (max_error=max_iter) info("Exceeds max_iter in multiclass_prob\n"); for(t=0;tl); double *dec_values = Malloc(double,prob->l); // random shuffle GetRNGstate(); for(i=0;il;i++) perm[i]=i; for(i=0;il;i++) { int j = i+((int) (unif_rand() * (prob->l-i))) % (prob->l-i); swap(perm[i],perm[j]); } PutRNGstate(); for(i=0;il/nr_fold; int end = (i+1)*prob->l/nr_fold; int j,k; struct svm_problem subprob; subprob.l = prob->l-(end-begin); subprob.x = Malloc(struct svm_node*,subprob.l); subprob.y = Malloc(double,subprob.l); k=0; for(j=0;jx[perm[j]]; subprob.y[k] = prob->y[perm[j]]; ++k; } for(j=end;jl;j++) { subprob.x[k] = prob->x[perm[j]]; subprob.y[k] = prob->y[perm[j]]; ++k; } int p_count=0,n_count=0; for(j=0;j0) p_count++; else n_count++; if(p_count==0 && n_count==0) for(j=begin;j 0 && n_count == 0) for(j=begin;j 0) for(j=begin;jx[perm[j]],&(dec_values[perm[j]])); // ensure +1 -1 order; reason not using CV subroutine dec_values[perm[j]] *= submodel->label[0]; } svm_free_and_destroy_model(&submodel); svm_destroy_param(&subparam); } free(subprob.x); free(subprob.y); } sigmoid_train(prob->l,dec_values,prob->y,probA,probB); free(dec_values); free(perm); } // Return parameter of a Laplace distribution static double svm_svr_probability( const svm_problem *prob, const svm_parameter *param) { int i; int nr_fold = 5; double *ymv = Malloc(double,prob->l); double mae = 0; svm_parameter newparam = *param; newparam.probability = 0; svm_cross_validation(prob,&newparam,nr_fold,ymv); for(i=0;il;i++) { ymv[i]=prob->y[i]-ymv[i]; mae += fabs(ymv[i]); } mae /= prob->l; double std=sqrt(2*mae*mae); int count=0; mae=0; for(i=0;il;i++) if (fabs(ymv[i]) > 5*std) count=count+1; else mae+=fabs(ymv[i]); mae /= (prob->l-count); info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma= %g\n",mae); free(ymv); return mae; } // label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data // perm, length l, must be allocated before calling this subroutine static void svm_group_classes(const svm_problem *prob, int *nr_class_ret, int **label_ret, int **start_ret, int **count_ret, int *perm) { int l = prob->l; int max_nr_class = 16; int nr_class = 0; int *label = Malloc(int,max_nr_class); int *count = Malloc(int,max_nr_class); int *data_label = Malloc(int,l); int i; for(i=0;iy[i]; int j; for(j=0;jparam = *param; model->free_sv = 0; // XXX if(param->svm_type == ONE_CLASS || param->svm_type == EPSILON_SVR || param->svm_type == NU_SVR) { // regression or one-class-svm model->nr_class = 2; model->label = NULL; model->nSV = NULL; model->probA = NULL; model->probB = NULL; model->sv_coef = Malloc(double *,1); if(param->probability && (param->svm_type == EPSILON_SVR || param->svm_type == NU_SVR)) { model->probA = Malloc(double,1); model->probA[0] = svm_svr_probability(prob,param); } decision_function f = svm_train_one(prob,param,0,0); model->rho = Malloc(double,1); model->rho[0] = f.rho; int nSV = 0; int i; for(i=0;il;i++) if(fabs(f.alpha[i]) > 0) ++nSV; model->l = nSV; model->SV = Malloc(svm_node *,nSV); model->sv_coef[0] = Malloc(double,nSV); model->sv_indices = Malloc(int,nSV); int j = 0; for(i=0;il;i++) if(fabs(f.alpha[i]) > 0) { model->SV[j] = prob->x[i]; model->sv_coef[0][j] = f.alpha[i]; model->sv_indices[j] = i+1; ++j; } free(f.alpha); } else { // classification int l = prob->l; int nr_class; int *label = NULL; int *start = NULL; int *count = NULL; int *perm = Malloc(int,l); // group training data of the same class svm_group_classes(prob,&nr_class,&label,&start,&count,perm); if(nr_class == 1) info("WARNING: training data in only one class. See README for details.\n"); svm_node **x = Malloc(svm_node *,l); int i; for(i=0;ix[perm[i]]; // calculate weighted C double *weighted_C = Malloc(double, nr_class); for(i=0;iC; for(i=0;inr_weight;i++) { int j; for(j=0;jweight_label[i] == label[j]) break; if(j == nr_class) REprintf("WARNING: class label %d specified in weight is not found\n", param->weight_label[i]); else weighted_C[j] *= param->weight[i]; } // train k*(k-1)/2 models bool *nonzero = Malloc(bool,l); for(i=0;iprobability) { probA=Malloc(double,nr_class*(nr_class-1)/2); probB=Malloc(double,nr_class*(nr_class-1)/2); } int p = 0; for(i=0;iprobability) svm_binary_svc_probability(&sub_prob,param,weighted_C[i],weighted_C[j],probA[p],probB[p]); f[p] = svm_train_one(&sub_prob,param,weighted_C[i],weighted_C[j]); for(k=0;k 0) nonzero[si+k] = true; for(k=0;k 0) nonzero[sj+k] = true; free(sub_prob.x); free(sub_prob.y); ++p; } // build output model->nr_class = nr_class; model->label = Malloc(int,nr_class); for(i=0;ilabel[i] = label[i]; model->rho = Malloc(double,nr_class*(nr_class-1)/2); for(i=0;irho[i] = f[i].rho; if(param->probability) { model->probA = Malloc(double,nr_class*(nr_class-1)/2); model->probB = Malloc(double,nr_class*(nr_class-1)/2); for(i=0;iprobA[i] = probA[i]; model->probB[i] = probB[i]; } } else { model->probA=NULL; model->probB=NULL; } int total_sv = 0; int *nz_count = Malloc(int,nr_class); model->nSV = Malloc(int,nr_class); for(i=0;inSV[i] = nSV; nz_count[i] = nSV; } info("Total nSV = %d\n",total_sv); model->l = total_sv; model->SV = Malloc(svm_node *,total_sv); model->sv_indices = Malloc(int,total_sv); p = 0; for(i=0;iSV[p] = x[i]; model->sv_indices[p++] = perm[i] + 1; } int *nz_start = Malloc(int,nr_class); nz_start[0] = 0; for(i=1;isv_coef = Malloc(double *,nr_class-1); for(i=0;isv_coef[i] = Malloc(double,total_sv); p = 0; for(i=0;isv_coef[j-1][q++] = f[p].alpha[k]; q = nz_start[j]; for(k=0;ksv_coef[i][q++] = f[p].alpha[ci+k]; ++p; } free(label); free(probA); free(probB); free(count); free(perm); free(start); free(x); free(weighted_C); free(nonzero); for(i=0;il; int *perm = Malloc(int,l); int nr_class; GetRNGstate(); if (nr_fold > l) { nr_fold = l; REprintf("WARNING: # folds > # data. Will use # folds = # data instead (i.e., leave-one-out cross validation)\n"); } fold_start = Malloc(int,nr_fold+1); // stratified cv may not give leave-one-out rate // Each class to l folds -> some folds may have zero elements if((param->svm_type == C_SVC || param->svm_type == NU_SVC) && nr_fold < l) { int *start = NULL; int *label = NULL; int *count = NULL; svm_group_classes(prob,&nr_class,&label,&start,&count,perm); // random shuffle and then data grouped by fold using the array perm int *fold_count = Malloc(int,nr_fold); int c; int *index = Malloc(int,l); for(i=0;ix[perm[j]]; subprob.y[k] = prob->y[perm[j]]; ++k; } for(j=end;jx[perm[j]]; subprob.y[k] = prob->y[perm[j]]; ++k; } struct svm_model *submodel = svm_train(&subprob,param); if(param->probability && (param->svm_type == C_SVC || param->svm_type == NU_SVC)) { double *prob_estimates=Malloc(double,svm_get_nr_class(submodel)); for(j=begin;jx[perm[j]],prob_estimates); free(prob_estimates); } else for(j=begin;jx[perm[j]]); svm_free_and_destroy_model(&submodel); free(subprob.x); free(subprob.y); } free(fold_start); free(perm); PutRNGstate(); } int svm_get_svm_type(const svm_model *model) { return model->param.svm_type; } int svm_get_nr_class(const svm_model *model) { return model->nr_class; } void svm_get_labels(const svm_model *model, int* label) { if (model->label != NULL) for(int i=0;inr_class;i++) label[i] = model->label[i]; } void svm_get_sv_indices(const svm_model *model, int* indices) { if (model->sv_indices != NULL) for(int i=0;il;i++) indices[i] = model->sv_indices[i]; } int svm_get_nr_sv(const svm_model *model) { return model->l; } double svm_get_svr_probability(const svm_model *model) { if ((model->param.svm_type == EPSILON_SVR || model->param.svm_type == NU_SVR) && model->probA!=NULL) return model->probA[0]; else { REprintf("Model doesn't contain information for SVR probability inference\n"); return 0; } } double svm_predict_values(const svm_model *model, const svm_node *x, double* dec_values) { int i; if(model->param.svm_type == ONE_CLASS || model->param.svm_type == EPSILON_SVR || model->param.svm_type == NU_SVR) { double *sv_coef = model->sv_coef[0]; double sum = 0; for(i=0;il;i++) sum += sv_coef[i] * Kernel::k_function(x,model->SV[i],model->param); sum -= model->rho[0]; *dec_values = sum; if(model->param.svm_type == ONE_CLASS) return (sum>0)?1:-1; else return sum; } else { int nr_class = model->nr_class; int l = model->l; double *kvalue = Malloc(double,l); for(i=0;iSV[i],model->param); int *start = Malloc(int,nr_class); start[0] = 0; for(i=1;inSV[i-1]; int *vote = Malloc(int,nr_class); for(i=0;inSV[i]; int cj = model->nSV[j]; int k; double *coef1 = model->sv_coef[j-1]; double *coef2 = model->sv_coef[i]; for(k=0;krho[p]; dec_values[p] = sum; if(dec_values[p] > 0) ++vote[i]; else ++vote[j]; p++; } int vote_max_idx = 0; for(i=1;i vote[vote_max_idx]) vote_max_idx = i; free(kvalue); free(start); free(vote); return model->label[vote_max_idx]; } } double svm_predict(const svm_model *model, const svm_node *x) { int nr_class = model->nr_class; double *dec_values; if(model->param.svm_type == ONE_CLASS || model->param.svm_type == EPSILON_SVR || model->param.svm_type == NU_SVR) dec_values = Malloc(double, 1); else dec_values = Malloc(double, nr_class*(nr_class-1)/2); double pred_result = svm_predict_values(model, x, dec_values); free(dec_values); return pred_result; } double svm_predict_probability( const svm_model *model, const svm_node *x, double *prob_estimates) { if ((model->param.svm_type == C_SVC || model->param.svm_type == NU_SVC) && model->probA!=NULL && model->probB!=NULL) { int i; int nr_class = model->nr_class; double *dec_values = Malloc(double, nr_class*(nr_class-1)/2); svm_predict_values(model, x, dec_values); double min_prob=1e-7; double **pairwise_prob=Malloc(double *,nr_class); for(i=0;iprobA[k],model->probB[k]),min_prob),1-min_prob); pairwise_prob[j][i]=1-pairwise_prob[i][j]; k++; } if (nr_class == 2) { prob_estimates[0] = pairwise_prob[0][1]; prob_estimates[1] = pairwise_prob[1][0]; } else multiclass_probability(nr_class,pairwise_prob,prob_estimates); int prob_max_idx = 0; for(i=1;i prob_estimates[prob_max_idx]) prob_max_idx = i; for(i=0;ilabel[prob_max_idx]; } else return svm_predict(model, x); } static const char *svm_type_table[] = { "c_svc","nu_svc","one_class","epsilon_svr","nu_svr",NULL }; static const char *kernel_type_table[]= { "linear","polynomial","rbf","sigmoid","precomputed",NULL }; int svm_save_model(const char *model_file_name, const svm_model *model) { FILE *fp = fopen(model_file_name,"w"); if(fp==NULL) return -1; char *old_locale = setlocale(LC_ALL, NULL); if (old_locale) { old_locale = strdup(old_locale); } setlocale(LC_ALL, "C"); const svm_parameter& param = model->param; (void) fprintf(fp,"svm_type %s\n", svm_type_table[param.svm_type]); (void) fprintf(fp,"kernel_type %s\n", kernel_type_table[param.kernel_type]); if(param.kernel_type == POLY) (void) fprintf(fp,"degree %d\n", param.degree); if(param.kernel_type == POLY || param.kernel_type == RBF || param.kernel_type == SIGMOID) (void) fprintf(fp,"gamma %.17g\n", param.gamma); if(param.kernel_type == POLY || param.kernel_type == SIGMOID) (void) fprintf(fp,"coef0 %.17g\n", param.coef0); int nr_class = model->nr_class; int l = model->l; (void) fprintf(fp, "nr_class %d\n", nr_class); (void) fprintf(fp, "total_sv %d\n",l); { (void) fprintf(fp, "rho"); for(int i=0;irho[i]); (void) fprintf(fp, "\n"); } if(model->label) { (void) fprintf(fp, "label"); for(int i=0;ilabel[i]); (void) fprintf(fp, "\n"); } if(model->probA) // regression has probA only { (void) fprintf(fp, "probA"); for(int i=0;iprobA[i]); (void) fprintf(fp, "\n"); } if(model->probB) { (void) fprintf(fp, "probB"); for(int i=0;iprobB[i]); (void) fprintf(fp, "\n"); } if(model->nSV) { (void) fprintf(fp, "nr_sv"); for(int i=0;inSV[i]); (void) fprintf(fp, "\n"); } (void) fprintf(fp, "SV\n"); const double * const *sv_coef = model->sv_coef; const svm_node * const *SV = model->SV; for(int i=0;ivalue)); else while(p->index != -1) { (void) fprintf(fp,"%d:%.8g ",p->index,p->value); p++; } (void) fprintf(fp, "\n"); } setlocale(LC_ALL, old_locale); free(old_locale); if (ferror(fp) != 0 || fclose(fp) != 0) return -1; else return 0; } static char *line = NULL; static int max_line_len; static char* readline(FILE *input) { int len; if(fgets(line,max_line_len,input) == NULL) return NULL; while(strrchr(line,'\n') == NULL) { max_line_len *= 2; line = (char *) realloc(line,max_line_len); len = (int) strlen(line); if(fgets(line+len,max_line_len-len,input) == NULL) break; } return line; } // // FSCANF helps to handle fscanf failures. // Its do-while block avoids the ambiguity when // if (...) // FSCANF(); // is used // #define FSCANF(_stream, _format, _var) do{ if (fscanf(_stream, _format, _var) != 1) return false; }while(0) bool read_model_header(FILE *fp, svm_model* model) { svm_parameter& param = model->param; // parameters for training only won't be assigned, but arrays are assigned as NULL for safety param.nr_weight = 0; param.weight_label = NULL; param.weight = NULL; char cmd[81]; while(1) { FSCANF(fp,"%80s",cmd); if(strcmp(cmd,"svm_type")==0) { FSCANF(fp,"%80s",cmd); int i; for(i=0;svm_type_table[i];i++) { if(strcmp(svm_type_table[i],cmd)==0) { param.svm_type=i; break; } } if(svm_type_table[i] == NULL) { REprintf("unknown svm type.\n"); return false; } } else if(strcmp(cmd,"kernel_type")==0) { FSCANF(fp,"%80s",cmd); int i; for(i=0;kernel_type_table[i];i++) { if(strcmp(kernel_type_table[i],cmd)==0) { param.kernel_type=i; break; } } if(kernel_type_table[i] == NULL) { REprintf("unknown kernel function.\n"); return false; } } else if(strcmp(cmd,"degree")==0) FSCANF(fp,"%d",¶m.degree); else if(strcmp(cmd,"gamma")==0) FSCANF(fp,"%lf",¶m.gamma); else if(strcmp(cmd,"coef0")==0) FSCANF(fp,"%lf",¶m.coef0); else if(strcmp(cmd,"nr_class")==0) FSCANF(fp,"%d",&model->nr_class); else if(strcmp(cmd,"total_sv")==0) FSCANF(fp,"%d",&model->l); else if(strcmp(cmd,"rho")==0) { int n = model->nr_class * (model->nr_class-1)/2; model->rho = Malloc(double,n); for(int i=0;irho[i]); } else if(strcmp(cmd,"label")==0) { int n = model->nr_class; model->label = Malloc(int,n); for(int i=0;ilabel[i]); } else if(strcmp(cmd,"probA")==0) { int n = model->nr_class * (model->nr_class-1)/2; model->probA = Malloc(double,n); for(int i=0;iprobA[i]); } else if(strcmp(cmd,"probB")==0) { int n = model->nr_class * (model->nr_class-1)/2; model->probB = Malloc(double,n); for(int i=0;iprobB[i]); } else if(strcmp(cmd,"nr_sv")==0) { int n = model->nr_class; model->nSV = Malloc(int,n); for(int i=0;inSV[i]); } else if(strcmp(cmd,"SV")==0) { while(1) { int c = getc(fp); if(c==EOF || c=='\n') break; } break; } else { REprintf("unknown text in model file: [%s]\n",cmd); return false; } } return true; } svm_model *svm_load_model(const char *model_file_name) { FILE *fp = fopen(model_file_name,"rb"); if(fp==NULL) return NULL; char *old_locale = setlocale(LC_ALL, NULL); if (old_locale) { old_locale = strdup(old_locale); } setlocale(LC_ALL, "C"); // read parameters svm_model *model = Malloc(svm_model,1); model->rho = NULL; model->probA = NULL; model->probB = NULL; model->sv_indices = NULL; model->label = NULL; model->nSV = NULL; // read header if (!read_model_header(fp, model)) { REprintf("ERROR: fscanf failed to read model\n"); setlocale(LC_ALL, old_locale); free(old_locale); free(model->rho); free(model->label); free(model->nSV); free(model); return NULL; } // read sv_coef and SV int elements = 0; long pos = ftell(fp); max_line_len = 1024; line = Malloc(char,max_line_len); char *p,*endptr,*idx,*val; while(readline(fp)!=NULL) { p = strtok(line,":"); while(1) { p = strtok(NULL,":"); if(p == NULL) break; ++elements; } } elements += model->l; fseek(fp,pos,SEEK_SET); int m = model->nr_class - 1; int l = model->l; model->sv_coef = Malloc(double *,m); int i; for(i=0;isv_coef[i] = Malloc(double,l); model->SV = Malloc(svm_node*,l); svm_node *x_space = NULL; if(l>0) x_space = Malloc(svm_node,elements); int j=0; for(i=0;iSV[i] = &x_space[j]; p = strtok(line, " \t"); model->sv_coef[0][i] = strtod(p,&endptr); for(int k=1;ksv_coef[k][i] = strtod(p,&endptr); } while(1) { idx = strtok(NULL, ":"); val = strtok(NULL, " \t"); if(val == NULL) break; x_space[j].index = (int) strtol(idx,&endptr,10); x_space[j].value = strtod(val,&endptr); ++j; } x_space[j++].index = -1; } free(line); setlocale(LC_ALL, old_locale); free(old_locale); if (ferror(fp) != 0 || fclose(fp) != 0) return NULL; model->free_sv = 1; // XXX return model; } void svm_free_model_content(svm_model* model_ptr) { if(model_ptr->free_sv && model_ptr->l > 0 && model_ptr->SV != NULL) free((void *)(model_ptr->SV[0])); if(model_ptr->sv_coef) { for(int i=0;inr_class-1;i++) free(model_ptr->sv_coef[i]); } free(model_ptr->SV); model_ptr->SV = NULL; free(model_ptr->sv_coef); model_ptr->sv_coef = NULL; free(model_ptr->rho); model_ptr->rho = NULL; free(model_ptr->label); model_ptr->label= NULL; free(model_ptr->probA); model_ptr->probA = NULL; free(model_ptr->probB); model_ptr->probB= NULL; free(model_ptr->sv_indices); model_ptr->sv_indices = NULL; free(model_ptr->nSV); model_ptr->nSV = NULL; } void svm_free_and_destroy_model(svm_model** model_ptr_ptr) { if(model_ptr_ptr != NULL && *model_ptr_ptr != NULL) { svm_free_model_content(*model_ptr_ptr); free(*model_ptr_ptr); *model_ptr_ptr = NULL; } } void svm_destroy_param(svm_parameter* param) { free(param->weight_label); free(param->weight); } const char *svm_check_parameter(const svm_problem *prob, const svm_parameter *param) { // svm_type int svm_type = param->svm_type; if(svm_type != C_SVC && svm_type != NU_SVC && svm_type != ONE_CLASS && svm_type != EPSILON_SVR && svm_type != NU_SVR) return "unknown svm type"; // kernel_type, degree int kernel_type = param->kernel_type; if(kernel_type != LINEAR && kernel_type != POLY && kernel_type != RBF && kernel_type != SIGMOID && kernel_type != PRECOMPUTED) return "unknown kernel type"; if(param->gamma < 0) return "gamma < 0"; if(param->degree < 0) return "degree of polynomial kernel < 0"; // cache_size,eps,C,nu,p,shrinking if(param->cache_size <= 0) return "cache_size <= 0"; if(param->eps <= 0) return "eps <= 0"; if(svm_type == C_SVC || svm_type == EPSILON_SVR || svm_type == NU_SVR) if(param->C <= 0) return "C <= 0"; if(svm_type == NU_SVC || svm_type == ONE_CLASS || svm_type == NU_SVR) if(param->nu <= 0 || param->nu > 1) return "nu <= 0 or nu > 1"; if(svm_type == EPSILON_SVR) if(param->p < 0) return "p < 0"; if(param->shrinking != 0 && param->shrinking != 1) return "shrinking != 0 and shrinking != 1"; if(param->probability != 0 && param->probability != 1) return "probability != 0 and probability != 1"; if(param->probability == 1 && svm_type == ONE_CLASS) return "one-class SVM probability output not supported yet"; // check whether nu-svc is feasible if(svm_type == NU_SVC) { int l = prob->l; int max_nr_class = 16; int nr_class = 0; int *label = Malloc(int,max_nr_class); int *count = Malloc(int,max_nr_class); int i; for(i=0;iy[i]; int j; for(j=0;jnu*(n1+n2)/2 > min(n1,n2)) { free(label); free(count); return "specified nu is infeasible"; } } } free(label); free(count); } return NULL; } int svm_check_probability_model(const svm_model *model) { return ((model->param.svm_type == C_SVC || model->param.svm_type == NU_SVC) && model->probA!=NULL && model->probB!=NULL) || ((model->param.svm_type == EPSILON_SVR || model->param.svm_type == NU_SVR) && model->probA!=NULL); } void svm_set_print_string_function(void (*print_func)(const char *)) { if(print_func == NULL) svm_print_string = &print_string_stdout; else svm_print_string = print_func; } e1071/src/cshell.c0000755000175100001440000001766714173734135013266 0ustar hornikusers/*****************************************************************/ /* * Copyright (C)2000 Evgenia Dimitriadou * * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ #include #include #include "R.h" int subcshell(int *xrows, int *xcols, double *x, int *ncenters, double *centers, int *itermax, int *iter, int *verbose, int *dist, double *U, double *UANT, double *f, double *ermin, double *radius, int *flag) { int k, col, i, m, n ; double serror; /*convergence parameters*/ double epsi1, epsi2, conv; double sum2; double temp,tempu, tempu1, tempu2, distance; int j; double suma; double exponente; /* *ermin=0.0;*/ serror=0.0; sum2=0; if ((*flag==0) || (*flag==5)){ /* UPDATE CENTERS*/ for(i=0;i<*ncenters;i++) { sum2=0; for(col=0;col<*xcols;col++) centers[i+(*ncenters)*col]=0.0; for(k=0;k<*xrows;k++) { temp=pow(U[k+(*xrows)*i],*f); sum2=sum2+temp; for(col=0;col<*xcols;col++) { centers[i+(*ncenters)*col]+= temp*x[k+(*xrows)*col]; } } for(col=0;col<*xcols;col++) centers[i+(*ncenters)*col]/=sum2; } /*UPDATE radius*/ for(i=0;i<*ncenters;i++) { sum2=0; radius[i]=0.0; for(k=0;k<*xrows;k++) { distance=0.0; temp=pow(U[k+(*xrows)*i],*f); sum2=sum2+temp; for(col=0;col<*xcols;col++) { if (*dist==0){ distance+= (x[k+(*xrows)*col]-centers[i+(*ncenters)*col])*(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); } else if(*dist ==1){ distance+=fabs(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); } } if (*dist==0){ radius[i]+= temp*sqrt(distance);} else if(*dist ==1){ radius[i]+= temp*distance;} } radius[i]/=sum2; } }/*flag=0*/ /*update UANT*/ for(i=0;i<*ncenters;i++){ for(k=0;k<*xrows;k++){ UANT[k+(*xrows)*i]=U[k+(*xrows)*i];}} /* UPDATE Membership Matrix */ exponente=2.0/(*f-1.0); for(i=0;i<*ncenters;i++) { for(k=0;k<*xrows;k++) { suma=0; for(j=0;j<*ncenters;j++) { tempu=0; tempu1=0; tempu2=0; for (col=0;col<*xcols;col++) { if (*dist==0){ tempu1+=(x[k+(*xrows)*col]-centers[i+(*ncenters)*col])*(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); tempu2+=(x[k+(*xrows)*col]-centers[j+(*ncenters)*col])*(x[k+(*xrows)*col]-centers[j+(*ncenters)*col]); } else if(*dist ==1){ tempu1+=fabs(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); tempu2+=fabs(x[k+(*xrows)*col]-centers[j+(*ncenters)*col]); } } if (*dist==0){ tempu=fabs(sqrt(tempu1)-radius[i])/fabs(sqrt(tempu2)-radius[j]); } else if(*dist ==1){ tempu=fabs((tempu1-radius[i])/(tempu2-radius[j])); } suma=suma+pow(tempu,exponente); } U[k+(*xrows)*i]=1.0/suma; } } /*ERROR MINIMIZATION*/ epsi1=0.002; epsi2=0.2; conv=0.0; for (m=0;m<*ncenters;m++){ for (k=0;k<*xrows;k++){ serror = 0.0; for(n=0;n<*xcols;n++){ if(*dist == 0){ serror += (x[k+(*xrows)*n] - centers[m+(*ncenters)*n])*(x[k+(*xrows)*n] - centers[m +(*ncenters)*n]); } else if(*dist ==1){ serror += fabs(x[k+(*xrows)*n] - centers[m + (*ncenters)*n]); } } if (*dist == 0){ serror=fabs(sqrt(serror)-radius[m]);} else if(*dist ==1){ serror=fabs(serror-radius[m]);} *ermin+=pow(U[k+(*xrows)*m],*f)*pow(serror,2); /* *ermin=*ermin/(*xrows));*/ /*Convergence check*/ conv += fabs(U[k+(*xrows)*m]-UANT[k+(*xrows)*m]); } } if (conv<= ((*xrows)*(*xcols)*epsi1)){ *flag=2; if (*verbose){ Rprintf("Iteration: %3d converged, Error: %13.10f\n",*iter,conv); }} else if (conv<= ((*xrows)*(*xcols)*epsi2)){ if (*verbose){ Rprintf("Iteration: %3d Epsi2: %13.10f\n",*iter,conv);} if (*flag==3) *flag=4; else *flag=1; } else if(*flag==3) *flag=5; if (*verbose){ Rprintf("Iteration: %3d Error: %13.10f\n",*iter,*ermin/(*xrows)); } return 0; } int cshell(int *xrows, int *xcols, double *x, int *ncenters, double *centers, int *itermax, int *iter, int *verbose, int *dist, double *U, double *UANT, double *f, double *ermin, double *radius, int *flag) { int k; int i,j,col; double suma,tempu,exponente,tempu1,tempu2; exponente=2.0/(*f-1.0); /* *flag=0;*/ if (*flag==0){ *iter=0; /*Initialize Membership Matrix */ for(i=0;i<*ncenters;i++) { for(k=0;k<*xrows;k++) { suma=0; for(j=0;j<*ncenters;j++) { tempu=0; tempu1=0; tempu2=0; for (col=0;col<*xcols;col++) { if (*dist==0){ tempu1+=(x[k+(*xrows)*col]-centers[i+(*ncenters)*col])*(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); tempu2+=(x[k+(*xrows)*col]-centers[j+(*ncenters)*col])*(x[k+(*xrows)*col]-centers[j+(*ncenters)*col]); } else if(*dist ==1){ tempu1+=fabs(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); tempu2+=fabs(x[k+(*xrows)*col]-centers[j+(*ncenters)*col]); } } if (*dist==0){ tempu=fabs(sqrt(tempu1)-radius[i])/fabs(sqrt(tempu2)-radius[j]); } else if(*dist ==1){ tempu=fabs((tempu1-radius[i])/(tempu2-radius[j])); } suma=suma+pow(tempu,exponente); } UANT[k+(*xrows)*i]=1.0/suma; } } for(i=0;i<*ncenters;i++) { for(j=0;j<*xrows;j++) U[j+(*xrows)*i]=UANT[j+(*xrows)*i]; } } while(((*iter)++ < *itermax) && ((*flag)!=1 && (*flag)!=2) && (*flag)!=4) { *ermin=0.0; subcshell(xrows, xcols, x, ncenters, centers, itermax, iter, verbose, dist, U, UANT, f, ermin, radius, flag); } return 0; } /*****************************************************************/ /*******only for prediction***************************************/ /*****************************************************************/ int cshell_assign(int *xrows, int *xcols, double *x, int *ncenters, double *centers, int *dist, double *U, double *f, double *radius) { int k, col, i; double tempu, tempu1, tempu2; int j; double suma; double exponente; exponente=2.0/(*f-1.0); for(i=0;i<*ncenters;i++) { for(k=0;k<*xrows;k++) { suma=0; for(j=0;j<*ncenters;j++) { tempu=0; tempu1=0; tempu2=0; for (col=0;col<*xcols;col++) { if (*dist==0){ tempu1+=(x[k+(*xrows)*col]-centers[i+(*ncenters)*col])*(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); tempu2+=(x[k+(*xrows)*col]-centers[j+(*ncenters)*col])*(x[k+(*xrows)*col]-centers[j+(*ncenters)*col]); } else if(*dist ==1){ tempu1+=fabs(x[k+(*xrows)*col]-centers[i+(*ncenters)*col]); tempu2+=fabs(x[k+(*xrows)*col]-centers[j+(*ncenters)*col]); } } if (*dist==0){ tempu=fabs(sqrt(tempu1)-radius[i])/fabs(sqrt(tempu2)-radius[j]); } else if(*dist ==1){ tempu=fabs((tempu1-radius[i])/(tempu2-radius[j])); } suma=suma+pow(tempu,exponente); } U[k+(*xrows)*i]=1.0/suma; } } return 0; } e1071/src/init.c0000755000175100001440000000627514173734135012750 0ustar hornikusers #include #include #include void cmeans(double *x, int *nr_x, int *nc, double *p, int *nr_p, double *w, double *f, int *dist, int *itermax, double *reltol, int *verbose, double *u, double *ermin, int *iter); int cshell(int *xrows, int *xcols, double *x, int *ncenters, double *centers, int *itermax, int *iter, int *verbose, int *dist, double *U, double *UANT, double *f, double *ermin, double *radius, int *flag); int e1071_floyd(int *n, double *A, double *C, int *P); void ufcl(double *x, int *nr_x, int *nc, double *p, int *nr_p, double *w, double *f, int *dist, int *itermax, double *reltol, int *verbose, double *rate_par, double *u, double *ermin, int *iter); void svmtrain (double *x, int *r, int *c, double *y, int *rowindex, int *colindex, int *svm_type, int *kernel_type, int *degree, double *gamma, double *coef0, double *cost, double *nu, int *weightlabels, double *weights, int *nweights, double *cache, double *tolerance, double *epsilon, int *shrinking, int *cross, int *sparse, int *probability, int *nclasses, int *nr, int *index, int *labels, int *nSV, double *rho, double *coefs, double *sigma, double *probA, double *probB, double *cresults, double *ctotal1, double *ctotal2, char **error); void svmpredict (int *decisionvalues, int *probability, double *v, int *r, int *c, int *rowindex, int *colindex, double *coefs, double *rho, int *compprob, double *probA, double *probB, int *nclasses, int *totnSV, int *labels, int *nSV, int *sparsemodel, int *svm_type, int *kernel_type, int *degree, double *gamma, double *coef0, double *x, int *xr, int *xrowindex, int *xcolindex, int *sparsex, double *ret, double *dec, double *prob); void svmwrite (double *v, int *r, int *c, int *rowindex, int *colindex, double *coefs, double *rho, int *compprob, double *probA, double *probB, int *nclasses, int *totnSV, int *labels, int *nSV, int *sparsemodel, int *svm_type, int *kernel_type, int *degree, double *gamma, double *coef0, char **filename); static const R_CMethodDef CEntries[] = { {"cmeans", (DL_FUNC) &cmeans, 14}, {"cshell", (DL_FUNC) &cshell, 15}, {"e1071_floyd", (DL_FUNC) &e1071_floyd, 4}, {"svmpredict", (DL_FUNC) &svmpredict, 30}, {"svmtrain", (DL_FUNC) &svmtrain, 37}, {"svmwrite", (DL_FUNC) &svmwrite, 21}, {"ufcl", (DL_FUNC) &ufcl, 15}, {NULL, NULL, 0} }; void R_init_e1071(DllInfo *dll) { R_registerRoutines(dll, CEntries, NULL, NULL, NULL); R_useDynamicSymbols(dll, FALSE); } e1071/src/floyd.c0000755000175100001440000000140314173734135013106 0ustar hornikusersint e1071_floyd(int *n, double *A, double *C, int *P) /* this function takes an nxn matrix C of edge costs and produces */ /* an nxn matrix A of lengths of shortest paths, and an nxn */ /* matrix P giving a point in the middle of each shortest path */ { int i,j,k; for (i=0; i<*n; i++) for (j=0; j<*n; j++) { A[i + *n * j] = C[i + *n * j]; P[i + *n * j] = -1; } for (i=0; i<*n; i++) A[i + *n * i] = 0; /* no self cycle */ for (k=0; k<*n; k++) for (i=0; i<*n; i++) for (j=0; j<*n; j++) if (A[i + *n * k]+A[k + *n * j] < A[i + *n * j]) { A[i + *n * j] = A[i + *n * k] + A[k + *n * j]; P[i + *n * j] = k; /* k is included in shortest path */ } return 0; } e1071/src/cmeans.c0000755000175100001440000001774014173734135013252 0ustar hornikusers/* C code for (weighted) fuzzy c-means, rewritten from scratch by KH. */ #include #include #include /* Enhance readability of matrix-subscripting for matrices stored in row-major order. */ #define MSUB(x, i, j, n) x[(i) + (n) * (j)] static double *d; static double *dwrk, *dwrk_x, *dwrk_w; static int *iwrk; static void cmeans_setup(int nr_x, int nr_p, int dist) { int len_u_d = nr_x * nr_p; d = (double *) R_alloc(len_u_d, sizeof(double)); if(dist == 1) { /* Needed for weighted medians. */ dwrk_x = (double *) R_alloc(nr_x, sizeof(double)); dwrk_w = (double *) R_alloc(nr_x, sizeof(double)); dwrk = (double *) R_alloc(nr_x, sizeof(double)); iwrk = (int *) R_alloc(nr_x, sizeof(int)); } } /* static void cmeans_copy_vector(double *from, double *to, int len) { int i; for(i = 0; i < len; i++) to[i] = from[i]; } static double cmeans_delta_old_new(double *old, double *new, int len) { int i; double sum = 0; for(i = 0; i < len; i++) sum += fabs(new[i] - old[i]); return(sum / len); } */ static int cmeans_sign(double x) { if(x == 0) return(0); return((x > 0) ? 1 : -1); } static double cmeans_weighted_median(double *x, double *w, int len) { int i; double sum, val, marg, mval, cumsum_w, cumsum_w_x; /* Sort x. */ for(i = 0; i < len; i++) iwrk[i] = i; rsort_with_index(x, iwrk, len); /* Permute w using iwrk, and normalize. */ sum = 0; for(i = 0; i < len; i++) { dwrk[i] = w[iwrk[i]]; sum += dwrk[i]; } for(i = 0; i < len; i++) { w[i] = dwrk[i] / sum; } cumsum_w = cumsum_w_x = 0; mval = R_PosInf; marg = *x; /* -Wall */ for(i = 0; i < len; i++) { cumsum_w += w[i]; cumsum_w_x += w[i] * x[i]; val = x[i] * (cumsum_w - .5) - cumsum_w_x; if(val < mval) { marg = x[i]; mval = val; } } return(marg); } /* Update the dissimilarities (between objects and prototypes) for a * single object (i.e., a single row of the dissimilarity matrix. */ static void ufcl_dissimilarities(double *x, double *p, int nr_x, int nc, int nr_p, int dist, int ix, double *d) { int ip, j; double sum, v; for(ip = 0; ip < nr_p; ip++) { sum = 0; for(j = 0; j < nc; j++) { v = MSUB(x, ix, j, nr_x) - MSUB(p, ip, j, nr_p); if(dist == 0) sum += v * v; else if(dist == 1) sum += fabs(v); } MSUB(d, ix, ip, nr_x) = sum; } } static void cmeans_dissimilarities(double *x, double *p, int nr_x, int nc, int nr_p, int dist, double *d) { int ix; for(ix = 0; ix < nr_x; ix++) { /* Loop over all objects ... */ ufcl_dissimilarities(x, p, nr_x, nc, nr_p, dist, ix, d); } } /* Update the memberships for a single object (i.e., a single row of the * membership matrix.) */ static void ufcl_memberships(double *d, int nr_x, int nr_p, double exponent, int ix, double *u) { int ip, n_of_zeroes; double sum, v; n_of_zeroes = 0; for(ip = 0; ip < nr_p; ip++) { if(MSUB(d, ix, ip, nr_x) == 0) n_of_zeroes++; } if(n_of_zeroes > 0) { v = 1 / n_of_zeroes; for(ip = 0; ip < nr_p; ip++) MSUB(u, ix, ip, nr_x) = ((MSUB(d, ix, ip, nr_x) == 0) ? v : 0); } else { /* Use the assumption that in general, pow() is more * expensive than subscripting. */ sum = 0; for(ip = 0; ip < nr_p; ip++) { v = 1 / pow(MSUB(d, ix, ip, nr_x), exponent); sum += v; MSUB(u, ix, ip, nr_x) = v; } for(ip = 0; ip < nr_p; ip++) MSUB(u, ix, ip, nr_x) /= sum; } } static void cmeans_memberships(double *d, int nr_x, int nr_p, double exponent, double *u) { int ix; for(ix = 0; ix < nr_x; ix++) { /* Loop over all objects ... */ ufcl_memberships(d, nr_x, nr_p, exponent, ix, u); } } static void cmeans_prototypes(double *x, double *u, double *w, int nr_x, int nc, int nr_p, double f, int dist, double *p) { int ix, ip, j; double sum, v; if(dist == 0) { /* Euclidean: weighted means. */ for(ip = 0; ip < nr_p; ip++) { for(j = 0; j < nc; j++) MSUB(p, ip, j, nr_p) = 0; sum = 0; for(ix = 0; ix < nr_x; ix++) { v = w[ix] * pow(MSUB(u, ix, ip, nr_x), f); sum += v; for(j = 0; j < nc; j++) MSUB(p, ip, j, nr_p) += v * MSUB(x, ix, j, nr_x); } for(j = 0; j < nc; j++) MSUB(p, ip, j, nr_p) /= sum; } } else { /* Manhattan: weighted medians. */ for(ip = 0; ip < nr_p; ip++) for(j = 0; j < nc; j++) { for(ix = 0; ix < nr_x; ix++) { dwrk_x[ix] = MSUB(x, ix, j, nr_x); dwrk_w[ix] = w[ix] * pow(MSUB(u, ix, ip, nr_x), f); } MSUB(p, ip, j, nr_p) = cmeans_weighted_median(dwrk_x, dwrk_w, nr_x); } } } static double cmeans_error_fn(double *u, double *d, double *w, int nr_x, int nr_p, double f) { int ix, ip; double sum; sum = 0; for(ix = 0; ix < nr_x; ix++) for(ip = 0; ip < nr_p; ip++) sum += w[ix] * pow(MSUB(u, ix, ip, nr_x), f) * MSUB(d, ix, ip, nr_x); return(sum); } void cmeans(double *x, int *nr_x, int *nc, double *p, int *nr_p, double *w, double *f, int *dist, int *itermax, double *reltol, int *verbose, double *u, double *ermin, int *iter) { double exponent = 1 / (*f - 1); double old_value, new_value; cmeans_setup(*nr_x, *nr_p, *dist); cmeans_dissimilarities(x, p, *nr_x, *nc, *nr_p, *dist, d); cmeans_memberships(d, *nr_x, *nr_p, exponent, u); old_value = new_value = cmeans_error_fn(u, d, w, *nr_x, *nr_p, *f); *iter = 0; while((*iter)++ < *itermax) { cmeans_prototypes(x, u, w, *nr_x, *nc, *nr_p, *f, *dist, p); cmeans_dissimilarities(x, p, *nr_x, *nc, *nr_p, *dist, d); cmeans_memberships(d, *nr_x, *nr_p, exponent, u); new_value = cmeans_error_fn(u, d, w, *nr_x, *nr_p, *f); if(fabs(old_value - new_value) < *reltol * (old_value + *reltol)) { if(*verbose) Rprintf("Iteration: %3d converged, Error: %13.10f\n", *iter, new_value); break; } else { if(*verbose) { *ermin = cmeans_error_fn(u, d, w, *nr_x, *nr_p, *f); Rprintf("Iteration: %3d, Error: %13.10f\n", *iter, new_value); } old_value = new_value; } } *ermin = new_value; } /* Update prototypes based on a single object. */ static void ufcl_prototypes(double *x, double *u, double *w, int nr_x, int nc, int nr_p, double f, int dist, double lrate, int ix, double *p) { int ip, j; double grad; for(ip = 0; ip < nr_p; ip++) { for(j = 0; j < nc; j++) { grad = MSUB(x, ix, j, nr_x) - MSUB(p, ip, j, nr_p); if(dist == 1) grad = cmeans_sign(grad); MSUB(p, ip, j, nr_p) += lrate * w[ix] * pow(MSUB(u, ix, ip, nr_x), f) * grad; } } } void ufcl(double *x, int *nr_x, int *nc, double *p, int *nr_p, double *w, double *f, int *dist, int *itermax, double *reltol, int *verbose, double *rate_par, double *u, double *ermin, int *iter) { double exponent = 1 / (*f - 1); double old_value, new_value; int ix; double lrate; cmeans_setup(*nr_x, *nr_p, 0); /* Need some starting values ... */ cmeans_dissimilarities(x, p, *nr_x, *nc, *nr_p, *dist, d); cmeans_memberships(d, *nr_x, *nr_p, exponent, u); old_value = new_value = cmeans_error_fn(u, d, w, *nr_x, *nr_p, *f); *iter = 0; while((*iter)++ < *itermax) { /* Turns out that sampling the objects is a bad idea ... */ lrate = *rate_par * (1 - (double) *iter / *itermax); for(ix = 0; ix < *nr_x; ix++) { ufcl_dissimilarities(x, p, *nr_x, *nc, *nr_p, *dist, ix, d); ufcl_memberships(d, *nr_x, *nr_p, exponent, ix, u); ufcl_prototypes(x, u, w, *nr_x, *nc, *nr_p, *f, *dist, lrate, ix, p); } new_value = cmeans_error_fn(u, d, w, *nr_x, *nr_p, *f); if(fabs(old_value - new_value) < *reltol * (old_value + *reltol)) { if(*verbose) Rprintf("Iteration: %3d converged, Error: %13.10f\n", *iter, new_value); break; } else { if(*verbose) { *ermin = cmeans_error_fn(u, d, w, *nr_x, *nr_p, *f); Rprintf("Iteration: %3d, Error: %13.10f\n", *iter, new_value); } old_value = new_value; } } *ermin = new_value; } e1071/src/svm.h0000755000175100001440000000660514173734135012614 0ustar hornikusers#ifndef _LIBSVM_H #define _LIBSVM_H #define LIBSVM_VERSION 323 #ifdef __cplusplus extern "C" { #endif extern int libsvm_version; struct svm_node { int index; double value; }; struct svm_problem { int l; double *y; struct svm_node **x; }; enum { C_SVC, NU_SVC, ONE_CLASS, EPSILON_SVR, NU_SVR }; /* svm_type */ enum { LINEAR, POLY, RBF, SIGMOID, PRECOMPUTED }; /* kernel_type */ struct svm_parameter { int svm_type; int kernel_type; int degree; /* for poly */ double gamma; /* for poly/rbf/sigmoid */ double coef0; /* for poly/sigmoid */ /* these are for training only */ double cache_size; /* in MB */ double eps; /* stopping criteria */ double C; /* for C_SVC, EPSILON_SVR and NU_SVR */ int nr_weight; /* for C_SVC */ int *weight_label; /* for C_SVC */ double* weight; /* for C_SVC */ double nu; /* for NU_SVC, ONE_CLASS, and NU_SVR */ double p; /* for EPSILON_SVR */ int shrinking; /* use the shrinking heuristics */ int probability; /* do probability estimates */ }; /* // // svm_model // */ struct svm_model { struct svm_parameter param; /* parameter */ int nr_class; /* number of classes, = 2 in regression/one class svm */ int l; /* total #SV */ struct svm_node **SV; /* SVs (SV[l]) */ double **sv_coef; /* coefficients for SVs in decision functions (sv_coef[k-1][l]) */ double *rho; /* constants in decision functions (rho[k*(k-1)/2]) */ double *probA; /* pariwise probability information */ double *probB; int *sv_indices; /* sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set */ /* for classification only */ int *label; /* label of each class (label[k]) */ int *nSV; /* number of SVs for each class (nSV[k]) */ /* nSV[0] + nSV[1] + ... + nSV[k-1] = l */ /* XXX */ int free_sv; /* 1 if svm_model is created by svm_load_model*/ /* 0 if svm_model is created by svm_train */ }; struct svm_model *svm_train(const struct svm_problem *prob, const struct svm_parameter *param); void svm_cross_validation(const struct svm_problem *prob, const struct svm_parameter *param, int nr_fold, double *target); int svm_save_model(const char *model_file_name, const struct svm_model *model); struct svm_model *svm_load_model(const char *model_file_name); int svm_get_svm_type(const struct svm_model *model); int svm_get_nr_class(const struct svm_model *model); void svm_get_labels(const struct svm_model *model, int *label); void svm_get_sv_indices(const struct svm_model *model, int *sv_indices); int svm_get_nr_sv(const struct svm_model *model); double svm_get_svr_probability(const struct svm_model *model); double svm_predict_values(const struct svm_model *model, const struct svm_node *x, double* dec_values); double svm_predict(const struct svm_model *model, const struct svm_node *x); double svm_predict_probability(const struct svm_model *model, const struct svm_node *x, double* prob_estimates); void svm_free_model_content(struct svm_model *model_ptr); void svm_free_and_destroy_model(struct svm_model **model_ptr_ptr); void svm_destroy_param(struct svm_parameter *param); const char *svm_check_parameter(const struct svm_problem *prob, const struct svm_parameter *param); int svm_check_probability_model(const struct svm_model *model); //void svm_set_print_string_function(void (*print_func)(const char *)); #ifdef __cplusplus } #endif void svm_set_print_string_function(void (*print_func)(const char *)); #endif /* _LIBSVM_H */ e1071/NAMESPACE0000644000175100001440000000436214533656767012277 0ustar hornikusersuseDynLib("e1071", .registration = TRUE, .fixes = "R_") import(graphics) import(grDevices) importFrom("stats", "cmdscale", "complete.cases", "cutree", ".getXlevels", "delete.response", "dnorm", "fft", "fitted", "kmeans", "median", "model.extract", "model.frame", "model.matrix", "model.response", "na.fail", "na.omit", "na.pass", "napredict", "optim", "pchisq", "pnorm", "ppoints", "predict", "qnorm", "quantile", "rnorm", "runif", "sd", "terms", "time", "ts", "var", "xtabs") importFrom("proxy", "dist") importFrom("utils", "head") importFrom("methods", "as", "getClass", "new") importFrom("class", "knn", "knn1") if(getRversion() >= "2.5.0") importFrom("utils", "write.table") export(ddiscrete, pdiscrete, qdiscrete, rdiscrete, bclust, hclust.bclust, centers.bclust, clusters.bclust, bincombinations, bootstrap.lca, classAgreement, cmeans, countpattern, cshell, element, fclustIndex, gknn, hamming.distance, hamming.window, hanning.window, ica, impute, interpolate, kurtosis, lca, matchControls, matchClasses, compareMatchedClasses, moment, naiveBayes, permutations, rbridge, read.matrix.csr, write.matrix.csr, rectangle.window, rwiener, allShortestPaths, extractPath, sigmoid, dsigmoid, d2sigmoid, skewness, stft, svm, tune, tune.control, write.svm, probplot, hsv_palette, scale_data_frame) exportPattern("tune\\..+", "best\\..+") S3method(boxplot, bclust) S3method(coef, svm) S3method(gknn, default) S3method(gknn, formula) S3method(lines, probplot) S3method(naiveBayes, default) S3method(naiveBayes, formula) S3method(plot, bclust) S3method(plot, ica) S3method(plot, stft) S3method(plot, svm) S3method(plot, tune) S3method(predict, lca) S3method(predict, naiveBayes) S3method(predict, svm) S3method(predict, gknn) S3method(print, bootstrap.lca) S3method("print", "fclust") S3method(print, gknn) S3method(print, ica) S3method(print, lca) S3method(print, summary.lca) S3method(print, naiveBayes) S3method(print, svm) S3method(print, summary.svm) S3method(print, tune) S3method(print, summary.tune) S3method(summary, lca) S3method(summary, svm) S3method(summary, tune) S3method(svm, default) S3method(svm, formula) e1071/inst/0000755000175100001440000000000015120610017011773 5ustar hornikuserse1071/inst/doc/0000755000175100001440000000000015120610017012540 5ustar hornikuserse1071/inst/doc/svmdoc.Rnw0000644000175100001440000004447115120607775014556 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{hyperref, graphicx, color, alltt, doi} \usepackage{Sweave} \usepackage[round]{natbib} \definecolor{Red}{rgb}{0.7,0,0} \definecolor{Blue}{rgb}{0,0,0.8} \definecolor{hellgrau}{rgb}{0.55,0.55,0.55} \newcommand{\pkg}[1]{\texttt{#1}} \newenvironment{smallexample}{\begin{alltt}\small}{\end{alltt}} \begin{document} %\VignetteIndexEntry{Support Vector Machines---the Interface to libsvm in package e1071} %\VignetteDepends{e1071,randomForest,xtable} %\VignetteKeywords{classification, regression, machine learning, benchmarking, support vector machines} %\VignettePackage{e1071} \SweaveOpts{engine=R,eps=FALSE} \setkeys{Gin}{width=0.8\textwidth} \title{Support Vector Machines \footnote{A smaller version of this article appeared in R-News, Vol.1/3, 9.2001}\\ \large The Interface to \texttt{libsvm} in package \pkg{e1071}} \author{by David Meyer\\ FH Technikum Wien, Austria\\ \url{mailto:David.Meyer@R-Project.org} } \maketitle \sloppy ``Hype or Hallelujah?'' is the provocative title used by \cite{svm:bennett+campbell:2000} in an overview of Support Vector Machines (SVM). SVMs are currently a hot topic in the machine learning community, creating a similar enthusiasm at the moment as Artificial Neural Networks used to do before. Far from being a panacea, SVMs yet represent a powerful technique for general (nonlinear) classification, regression and outlier detection with an intuitive model representation. The package \pkg{e1071} offers an interface to the award-winning\footnote{The library won the IJCNN 2001 Challenge by solving two of three problems: the Generalization Ability Challenge (GAC) and the Text Decoding Challenge (TDC). For more information, see: \url{https://www.csie.ntu.edu.tw/~cjlin/papers/ijcnn.ps.gz}.} C++-implementation by Chih-Chung Chang and Chih-Jen Lin, \texttt{libsvm} (current version: 2.6), featuring: \begin{itemize} \item $C$- and $\nu$-classification \item one-class-classification (novelty detection) \item $\epsilon$- and $\nu$-regression \end{itemize} and includes: \begin{itemize} \item linear, polynomial, radial basis function, and sigmoidal kernels \item formula interface \item $k$-fold cross validation \end{itemize} For further implementation details on \texttt{libsvm}, see \cite{svm:chang+lin:2001}. \section*{Basic concept} SVMs were developed by \cite{svm:cortes+vapnik:1995} for binary classification. Their approach may be roughly sketched as follows: \begin{description} \item[Class separation:] basically, we are looking for the optimal separating hyperplane between the two classes by maximizing the \textit{margin} between the classes' closest points (see Figure \ref{fig:svm1})---the points lying on the boundaries are called \textit{support vectors}, and the middle of the margin is our optimal separating hyperplane; \item[Overlapping classes:] data points on the ``wrong'' side of the discriminant margin are weighted down to reduce their influence (\textit{``soft margin''}); \item[Nonlinearity:] when we cannot find a \textit{linear} separator, data points are projected into an (usually) higher-dimensional space where the data points effectively become linearly separable (this projection is realised via \textit{kernel techniques}); \item[Problem solution:] the whole task can be formulated as a quadratic optimization problem which can be solved by known techniques. \end{description} \noindent A program able to perform all these tasks is called a \textit{Support Vector Machine}. \begin{figure}[htbp] \begin{center} \includegraphics[width=8cm]{svm} \caption{Classification (linear separable case)} \label{fig:svm1} \end{center} \end{figure} Several extensions have been developed; the ones currently included in \texttt{libsvm} are: \begin{description} \item[$\nu$-classification:] this model allows for more control over the number of support vectors \cite[see][]{svm:scholkopf+smola+williamson:2000} by specifying an additional parameter $\nu$ which approximates the fraction of support vectors; \item[One-class-classification:] this model tries to find the support of a distribution and thus allows for outlier/novelty detection; \item[Multi-class classification:] basically, SVMs can only solve binary classification problems. To allow for multi-class classification, \texttt{libsvm} uses the \textit{one-against-one} technique by fitting all binary subclassifiers and finding the correct class by a voting mechanism; \item[$\epsilon$-regression:] here, the data points lie \textit{in between} the two borders of the margin which is maximized under suitable conditions to avoid outlier inclusion; \item[$\nu$-regression:] with analogue modifications of the regression model as in the classification case. \end{description} \section*{Usage in R} The R interface to \texttt{libsvm} in package \pkg{e1071}, \texttt{svm()}, was designed to be as intuitive as possible. Models are fitted and new data are predicted as usual, and both the vector/matrix and the formula interface are implemented. As expected for R's statistical functions, the engine tries to be smart about the mode to be chosen, using the dependent variable's type ($y$): if $y$ is a factor, the engine switches to classification mode, otherwise, it behaves as a regression machine; if $y$ is omitted, the engine assumes a novelty detection task. \section*{Examples} In the following two examples, we demonstrate the practical use of \texttt{svm()} along with a comparison to classification and regression forests as implemented in \texttt{randomForest()}. \subsection*{Classification} In this example, we use the glass data from the UCI Repository of Machine Learning Databases for classification \citep{svm:blake+merz:1998}, converted to R format by Friedrich Leisch in the late 1990s. The current version of the UC Irvine Machine Learning Repository Glass Identification data set is available from \doi{10.24432/C5WW2P}. The task is to predict the type of a glass on basis of its chemical analysis. We start by splitting the data into a train and test set: <<>>= library(e1071) library(randomForest) data(Glass, package="mlbench") ## split data into a train and test set index <- 1:nrow(Glass) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- Glass[testindex,] trainset <- Glass[-testindex,] @ Both for SVM and randomForest (via \texttt{randomForest()}), we fit the model and predict the test set values: <<>>= ## svm svm.model <- svm(Type ~ ., data = trainset, cost = 100, gamma = 1) svm.pred <- predict(svm.model, testset[,-10]) @ (The dependent variable, \texttt{Type}, has column number 10. \texttt{cost} is a general penalizing parameter for $C$-classification and \texttt{gamma} is the radial basis function-specific kernel parameter.) <<>>= ## randomForest rf.model <- randomForest(Type ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-10]) @ A cross-tabulation of the true versus the predicted values yields: <<>>= ## compute svm confusion matrix table(pred = svm.pred, true = testset[,10]) ## compute randomForest confusion matrix table(pred = rf.pred, true = testset[,10]) @ %% results table <>= library(xtable) rf.acc <- c() sv.acc <- c() rf.kap <- c() sv.kap <- c() reps <- 10 for (i in 1:reps) { ## split data into a train and test set index <- 1:nrow(Glass) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Glass[testindex,]) trainset <- na.omit(Glass[-testindex,]) ## svm svm.model <- svm(Type ~ ., data = trainset, cost = 8, gamma = 0.0625) svm.pred <- predict(svm.model, testset[,-10]) tab <- classAgreement(table(svm.pred, testset[,10])) sv.acc[i] <- tab$diag sv.kap[i] <- tab$kappa ## randomForest rf.model <- randomForest(Type ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-10]) tab <- classAgreement(table(rf.pred, testset[,10])) rf.acc[i] <- tab$diag rf.kap[i] <- tab$kappa } x <- rbind(summary(sv.acc), summary(rf.acc), summary(sv.kap), summary(rf.kap)) rownames <- c() tab <- cbind(rep(c("svm","randomForest"),2), round(x,2)) colnames(tab)[1] <- "method" rownames(tab) <- c("Accuracy","","Kappa"," ") xtable(tab, label = "tab:class", caption = "Performance of \\texttt{svm()} and\ \\texttt{randomForest()} for classification (10 replications)") @ \noindent Finally, we compare the performance of the two methods by computing the respective accuracy rates and the kappa indices (as computed by \texttt{classAgreement()} also contained in package \pkg{e1071}). In Table \ref{tab:class}, we summarize the results of \Sexpr{reps} replications---Support Vector Machines show worse results. \subsection*{Non-linear $\epsilon$-Regression} The regression capabilities of SVMs are demonstrated on the ozone data. Again, we split the data into a train and test set. <<>>= library(e1071) library(randomForest) data(Ozone, package="mlbench") ## split data into a train and test set index <- 1:nrow(Ozone) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Ozone[testindex,-3]) trainset <- na.omit(Ozone[-testindex,-3]) ## svm svm.model <- svm(V4 ~ ., data = trainset, cost = 1000, gamma = 0.0001) svm.pred <- predict(svm.model, testset[,-3]) sqrt(crossprod(svm.pred - testset[,3]) / N) ## random Forest rf.model <- randomForest(V4 ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-3]) sqrt(crossprod(rf.pred - testset[,3]) / N) @ <>= rf.res <- c() sv.res <- c() reps <- 10 for (i in 1:reps) { ## split data into a train and test set index <- 1:nrow(Ozone) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Ozone[testindex,-3]) trainset <- na.omit(Ozone[-testindex,-3]) ## svm svm.model <- svm(V4 ~ ., data = trainset, cost = 1000, gamma = 0.0001) svm.pred <- predict(svm.model, testset[,-3]) sv.res[i] <- sqrt(crossprod(svm.pred - testset[,3]) / N) ## randomForest rf.model <- randomForest(V4 ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-3]) rf.res[i] <- sqrt(crossprod(rf.pred - testset[,3]) / N) } xtable(rbind(svm = summary(sv.res), randomForest = summary(rf.res)), label = "tab:reg", caption = "Performance of \\texttt{svm()} and\ \\texttt{randomForest()} for regression (Root Mean Squared Error, 10 replications)") @ \noindent We compare the two methods by the root mean squared error (RMSE)---see Table \ref{tab:reg} for a summary of \Sexpr{reps} replications. In this case, \texttt{svm()} does a better job than \texttt{randomForest()}. \section*{Elements of the \texttt{svm} object} The function \texttt{svm()} returns an object of class ``\texttt{svm}'', which partly includes the following components: \begin{description} \item[\textbf{\texttt{SV}:}] matrix of support vectors found; \item[\textbf{\texttt{labels}:}] their labels in classification mode; \item[\textbf{\texttt{index}:}] index of the support vectors in the input data (could be used e.g., for their visualization as part of the data set). \end{description} If the cross-classification feature is enabled, the \texttt{svm} object will contain some additional information described below. \section*{Other main features} \begin{description} \item[Class Weighting:] if one wishes to weight the classes differently (e.g., in case of asymmetric class sizes to avoid possibly overproportional influence of bigger classes on the margin), weights may be specified in a vector with named components. In case of two classes A and B, we could use something like: \texttt{m <- svm(x, y, class.weights = c(A = 0.3, B = 0.7))} \item[Cross-classification:] to assess the quality of the training result, we can perform a $k$-fold cross-classification on the training data by setting the parameter \texttt{cross} to $k$ (default: 0). The \texttt{svm} object will then contain some additional values, depending on whether classification or regression is performed. Values for classification: \begin{description} \item[\texttt{accuracies}:] vector of accuracy values for each of the $k$ predictions \item[\texttt{tot.accuracy}:] total accuracy \end{description} Values for regression: \begin{description} \item[\texttt{MSE}:] vector of mean squared errors for each of the $k$ predictions \item[\texttt{tot.MSE}:] total mean squared error \item[\texttt{scorrcoef}:] Squared correlation coefficient (of the predicted and the true values of the dependent variable) \end{description} \end{description} \section*{Tips on practical use} \begin{itemize} \item Note that SVMs may be very sensitive to the proper choice of parameters, so allways check a range of parameter combinations, at least on a reasonable subset of your data. \item For classification tasks, you will most likely use $C$-classification with the RBF kernel (default), because of its good general performance and the few number of parameters (only two: $C$ and $\gamma$). The authors of \pkg{libsvm} suggest to try small and large values for $C$---like 1 to 1000---first, then to decide which are better for the data by cross validation, and finally to try several $\gamma$'s for the better $C$'s. \item However, better results are obtained by using a grid search over all parameters. For this, we recommend to use the \texttt{tune.svm()} function in \pkg{e1071}. \item Be careful with large datasets as training times may increase rather fast. \item Scaling of the data usually drastically improves the results. Therefore, \texttt{svm()} scales the data by default. \end{itemize} \section*{Model Formulations and Kernels} Dual representation of models implemented: \begin{itemize} \item $C$-classification:\\ \begin{eqnarray} \min_\alpha&&\frac{1}{2}\alpha^\top \mathbf{Q} \alpha-\mathbf{e}^\top\alpha \nonumber\\ \mbox{s.t.} &&0\le\alpha_i\le C,~i=1,\ldots,l,\\ &&\mathbf{y}^\top\alpha=0~, \nonumber \end{eqnarray} where $\mathbf{e}$ is the unity vector, $C$ is the upper bound, $\mathbf{Q}$ is an $l$ by $l$ positive semidefinite matrix, $Q_{ij} \equiv y_i y_j K(x_i, x_j)$, and $K(x_i, x_j) \equiv \phi(x_i)^\top\phi(x_j)$ is the kernel. \item $\nu$-classification:\\ \begin{eqnarray} \min_\alpha&&\frac{1}{2}\alpha^\top \mathbf{Q} \alpha \nonumber\\ \mbox{s.t.}&&0\le\alpha_i\le 1/l,~i=1,\ldots,l,\\ &&\mathbf{e}^\top \alpha \ge \nu, \nonumber\\ &&\mathbf{y}^\top\alpha=0~. \nonumber \end{eqnarray} where $\nu \in (0,1]$. \item one-class classification:\\ \begin{eqnarray} \min_\alpha&&\frac{1}{2}\alpha^\top \mathbf{Q} \alpha \nonumber\\ \mbox{s.t.} &&0\le\alpha_i\le 1/(\nu l),~i=1,\ldots,l,\\ &&\mathbf{e}^\top\alpha=1~,\nonumber \end{eqnarray} \item $\epsilon$-regression:\\ \begin{eqnarray} \min_{\alpha, \alpha^*}&&\frac{1}{2}(\alpha-\alpha^*)^\top \mathbf{Q} (\alpha-\alpha^*) + \nonumber\\ &&\epsilon\sum_{i=1}^{l}(\alpha_i+\alpha_i^*) + \sum_{i=1}^{l}y_i(\alpha_i-\alpha_i^*) \nonumber\\ \mbox{s.t.} &&0\le\alpha_i, \alpha_i^*\le C,~i=1,\ldots,l,\\ &&\sum_{i=1}^{l}(\alpha_i-\alpha_i^*)=0~.\nonumber \end{eqnarray} \item $\nu$-regression:\\ \begin{eqnarray} \min_{\alpha, \alpha^*}&&\frac{1}{2}(\alpha-\alpha^*)^\top \mathbf{Q} (\alpha-\alpha^*) + \mathbf{z}^\top(\alpha_i-\alpha_i^*) \nonumber\\ \mbox{s.t.} &&0\le\alpha_i, \alpha_i^*\le C,~i=1,\ldots,l,\\ &&\mathbf{e}^\top(\alpha-\alpha^*)=0\nonumber\\ &&\mathbf{e}^\top(\alpha+\alpha^*)=C\nu~.\nonumber \end{eqnarray} \end{itemize} \noindent Available kernels:\\ \\ \noindent \begin{table}[h] \centering \begin{tabular}{|l|l|l|} \hline kernel & formula & parameters \\ \hline \hline linear & $\bf u^\top v$& (none) \\ polynomial & $(\gamma \mathbf{u^\top v}+c_0)^d$ & $\gamma, d, c_0$\\ radial basis fct. & $\exp\{-\gamma|\mathbf{u-v}|^2\}$&$\gamma$\\ sigmoid & $\tanh\{\gamma \mathbf{u^\top v}+c_0\}$ &$\gamma, c_0$\\ \hline \end{tabular} \end{table} \section*{Conclusion} We hope that \texttt{svm} provides an easy-to-use interface to the world of SVMs, which nowadays have become a popular technique in flexible modelling. There are some drawbacks, though: SVMs scale rather badly with the data size due to the quadratic optimization algorithm and the kernel transformation. Furthermore, the correct choice of kernel parameters is crucial for obtaining good results, which practically means that an extensive search must be conducted on the parameter space before results can be trusted, and this often complicates the task (the authors of \texttt{libsvm} currently conduct some work on methods of efficient automatic parameter selection). Finally, the current implementation is optimized for the radial basis function kernel only, which clearly might be suboptimal for your data. \begin{thebibliography}{6} \bibitem[Bennett \& Campbell(2000)]{svm:bennett+campbell:2000} Bennett, K.~P. \& Campbell, C. (2000). \newblock Support vector machines: Hype or hallelujah? \newblock \emph{SIGKDD Explorations}, \textbf{2}(2). \newblock \url{http://www.acm.org/sigs/sigkdd/explorations/issue2-2/bennett.pdf}. \bibitem[Blake \& Merz(1998)]{svm:blake+merz:1998} Blake, C.L. \& Merz, C.J. (1998). \newblock UCI Repository of Machine Learning Databases. \newblock Irvine, CA: University of California, Irvine, Department of Information and Computer Science. \newblock Formerly available from \texttt{http://www.ics.uci.edu/~mlearn/MLRepository.html}. \bibitem[Chang \& Lin(2001)]{svm:chang+lin:2001} Chang, C.-C. \& Lin, C.-J. (2001). \newblock {LIBSVM}: a library for support vector machines. \newblock Software available at \url{https://www.csie.ntu.edu.tw/~cjlin/libsvm/}, detailed documentation (algorithms, formulae, \dots) can be found in \url{https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.ps.gz} \bibitem[Cortes \& Vapnik(1995)]{svm:cortes+vapnik:1995} Cortes, C. \& Vapnik, V. (1995). \newblock Support-vector network. \newblock \emph{Machine Learning}, \textbf{20}, 1--25. \bibitem[Sch\"olkopf et~al.(2000)Sch\"olkopf, Smola, Williamson, \& Bartlett]{svm:scholkopf+smola+williamson:2000} Sch\"olkopf, B., Smola, A., Williamson, R.~C., \& Bartlett, P. (2000). \newblock New support vector algorithms. \newblock \emph{Neural Computation}, \textbf{12}, 1207--1245. \bibitem[Vapnik(1998)]{svm:vapnik:1998} Vapnik, V. (1998). \newblock \emph{Statistical learning theory}. \newblock New York: Wiley. \end{thebibliography} \end{document} e1071/inst/doc/svminternals.Rnw0000644000175100001440000001616015120304220015755 0ustar hornikusers\documentclass[a4paper]{article} \usepackage{hyperref, graphicx, color, alltt,a4wide} \usepackage{Sweave} \newcommand{\pkg}[1]{\texttt{#1}} \definecolor{Red}{rgb}{0.7,0,0} \definecolor{Blue}{rgb}{0,0,0.8} \definecolor{hellgrau}{rgb}{0.55,0.55,0.55} \newenvironment{smallexample}{\begin{alltt}\small}{\end{alltt}} \begin{document} \SweaveOpts{concordance=TRUE} %\VignetteIndexEntry{svm() internals} %\VignetteDepends{xtable} %\VignetteKeywords{classification, regression, machine learning, benchmarking, support vector machines} %\VignettePackage{e1071} \SweaveOpts{engine=R,eps=FALSE} \setkeys{Gin}{width=0.8\textwidth} \title{\texttt{svm()} internals\\ \large Some technical notes about the \texttt{svm()} in package \pkg{e1071}} \author{by David Meyer\\ FH Technikum Wien, Austria\\ \url{mailto:David.Meyer@R-Project.org} } \maketitle \sloppy This document explains how to use the parameters in an object returned by \texttt{svm()} for own prediction functions. \section{Binary Classifier} For class prediction in the binary case, the class of a new data vector $n$ is usually given by \emph{the sign} of \begin{equation} \sum_i{a_i y_i K(x_i, n)} + \rho \end{equation} \noindent where $x_i$ is the $i$-th support vector, $y_i$ the corresponding label, $a_i$ the corresponding coefficiant, and $K$ is the kernel (for example the linear one, i.e. $K(u,v) = u ^{\top} v$). Now, the \texttt{libsvm} library interfaced by the \texttt{svm()} function actually returns $a_i y_i$ as $i$-th coefficiant and the \emph{negative} $\rho$, so in fact uses the formula: \[ \sum_i{\mathrm{coef}_i K(x_i, n)} - \rho \] \noindent where the training examples (=training data) are labeled \{1,-1\} (!). A simplified \textsf{R} function for prediction with linear kernel would be: \begin{smallexample} svmpred <- function (m, newdata, K=crossprod) \{ ## this guy does the computation: pred.one <- function (x) sign(sum(sapply(1:m$tot.nSV, function (j) K(m$SV[j,], x) * m$coefs[j] ) ) - m$rho ) ## this is just for convenience: if (is.vector(newdata)) newdata <- t(as.matrix(x)) sapply (1:nrow(newdata), function (i) pred.one(newdata[i,])) \} \end{smallexample} \noindent where \texttt{pred.one()} does the actual prediction for one new data vector, the remainder is just a convenience for prediction of multiple new examples. It is easy to extend this to other kernels, just replace \texttt{K()} with the appropriate function (see the help page for the formulas used) and supply the additional constants. As we will see in the next section, the multi-class prediction is more complicated, because the coefficiants of the diverse binary SVMs are stored in a compressed format. \section{Multiclass-classifier} To handle $k$ classes, $k>2$, \texttt{svm()} trains all binary subclassifiers (one-against-one-method) and then uses a voting mechanism to determine the actual class. Now, this means $k(k-1)/2$ classifiers, hence in principle $k(k-1)/2$ sets of SVs, coefficiants and rhos. These are stored in a compressed format: \begin{enumerate} \item Only one SV is stored in case it were used by several classifiers. The \texttt{model\$SV-matrix} is ordered by classes, and you find the starting indices by using \texttt{nSV} (number of SVs): \begin{smallexample} start <- c(1, cumsum(model$nSV)) start <- start[-length(start)] \end{smallexample} \texttt{sum(nSV)} equals the total number of (distinct) SVs. \item The coefficients of the SVs are stored in the \texttt{model\$coefs}-matrix, grouped by classes. Because the separating hyperplanes found by the SVM algorithm has SVs on both sides, you will have two sets of coefficients per binary classifier, and e.g., for 3 classes, you could build a \emph{block}-matrix like this for the classifiers $(i, j)$ ($i$,$j$=class numbers): \begin{table}[h] \center \begin{tabular}{|c|c|c|c|} \hline i $\backslash$ j & 0 & 1 & 2 \\\hline 0 & X & set (0, 1)& set (0, 2)\\\hline 1 & set (1, 0) & X & set (1, 2)\\\hline 2 & set (2, 0) & set (2, 1) & X\\\hline \end{tabular} \end{table} \noindent where set(i, j) are the coefficients for the classifier (i,j), lying on the side of class j. Because there are no entries for (i, i), we can save the diagonal and shift up the lower triangular matrix to get \begin{table}[h] \center \begin{tabular}{|c|c|c|c|} \hline i $\backslash$ j & 0 & 1 & 2 \\\hline 0 & set (1,0) & set (0,1) & set (0,2) \\\hline 1 & set (2,0) & set (2,1) & set (1,2) \\\hline \end{tabular} \end{table} \noindent Each set (., j) has length \texttt{nSV[j]}, so of course, there will be some filling 0s in some sets. \texttt{model\$coefs} is the \emph{transposed} of such a matrix, therefore for a data set with, say, 6 classes, you get 6-1=5 columns. The coefficients of (i, j) start at \texttt{model\$coefs[start[i],j]} and those of (j, i) at \texttt{model\$coefs[start[j],i-1]}. \item The $k(k-1)/2$ rhos are just linearly stored in the vector \texttt{model\$rho}. \end{enumerate} \newpage \noindent The following code shows how to use this for prediction: \begin{smallexample} ## Linear Kernel function K <- function(i,j) crossprod(i,j) predsvm <- function(object, newdata) \{ ## compute start-index start <- c(1, cumsum(object$nSV)+1) start <- start[-length(start)] ## compute kernel values kernel <- sapply (1:object$tot.nSV, function (x) K(object$SV[x,], newdata)) ## compute raw prediction for classifier (i,j) predone <- function (i,j) \{ ## ranges for class i and j: ri <- start[i] : (start[i] + object$nSV[i] - 1) rj <- start[j] : (start[j] + object$nSV[j] - 1) ## coefs for (i,j): coef1 <- object$coefs[ri, j-1] coef2 <- object$coefs[rj, i] ## return raw values: crossprod(coef1, kernel[ri]) + crossprod(coef2, kernel[rj]) \} ## compute votes for all classifiers votes <- rep(0,object$nclasses) c <- 0 # rho counter for (i in 1 : (object$nclasses - 1)) for (j in (i + 1) : object$nclasses) if (predone(i,j) > object$rho[c <- c + 1]) votes[i] <- votes[i] + 1 else votes[j] <- votes[j] + 1 ## return winner (index with max. votes) object$levels[which(votes %in% max(votes))[1]] \} \end{smallexample} In case data were scaled prior fitting the model (note that this is the default for \texttt{svm()}, the new data needs to be scaled as well before applying the predition functions, for example using the following code snipped (object is an object returned by \texttt{svm()}, \texttt{newdata} a data frame): \begin{smallexample} if (any(object$scaled)) newdata[,object$scaled] <- scale(newdata[,object$scaled, drop = FALSE], center = object$x.scale$"scaled:center", scale = object$x.scale$"scaled:scale" ) \end{smallexample} \noindent For regression, the response needs to be scaled as well before training, and the predictions need to be scaled back accordingly. \end{document} e1071/inst/doc/svmdoc.R0000644000175100001440000001052315120610015014155 0ustar hornikusers### R code from vignette source 'svmdoc.Rnw' ################################################### ### code chunk number 1: svmdoc.Rnw:146-156 ################################################### library(e1071) library(randomForest) data(Glass, package="mlbench") ## split data into a train and test set index <- 1:nrow(Glass) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- Glass[testindex,] trainset <- Glass[-testindex,] ################################################### ### code chunk number 2: svmdoc.Rnw:161-164 ################################################### ## svm svm.model <- svm(Type ~ ., data = trainset, cost = 100, gamma = 1) svm.pred <- predict(svm.model, testset[,-10]) ################################################### ### code chunk number 3: svmdoc.Rnw:169-172 ################################################### ## randomForest rf.model <- randomForest(Type ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-10]) ################################################### ### code chunk number 4: svmdoc.Rnw:175-180 ################################################### ## compute svm confusion matrix table(pred = svm.pred, true = testset[,10]) ## compute randomForest confusion matrix table(pred = rf.pred, true = testset[,10]) ################################################### ### code chunk number 5: svmdoc.Rnw:185-221 ################################################### library(xtable) rf.acc <- c() sv.acc <- c() rf.kap <- c() sv.kap <- c() reps <- 10 for (i in 1:reps) { ## split data into a train and test set index <- 1:nrow(Glass) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Glass[testindex,]) trainset <- na.omit(Glass[-testindex,]) ## svm svm.model <- svm(Type ~ ., data = trainset, cost = 8, gamma = 0.0625) svm.pred <- predict(svm.model, testset[,-10]) tab <- classAgreement(table(svm.pred, testset[,10])) sv.acc[i] <- tab$diag sv.kap[i] <- tab$kappa ## randomForest rf.model <- randomForest(Type ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-10]) tab <- classAgreement(table(rf.pred, testset[,10])) rf.acc[i] <- tab$diag rf.kap[i] <- tab$kappa } x <- rbind(summary(sv.acc), summary(rf.acc), summary(sv.kap), summary(rf.kap)) rownames <- c() tab <- cbind(rep(c("svm","randomForest"),2), round(x,2)) colnames(tab)[1] <- "method" rownames(tab) <- c("Accuracy","","Kappa"," ") xtable(tab, label = "tab:class", caption = "Performance of \\texttt{svm()} and\ \\texttt{randomForest()} for classification (10 replications)") ################################################### ### code chunk number 6: svmdoc.Rnw:234-254 ################################################### library(e1071) library(randomForest) data(Ozone, package="mlbench") ## split data into a train and test set index <- 1:nrow(Ozone) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Ozone[testindex,-3]) trainset <- na.omit(Ozone[-testindex,-3]) ## svm svm.model <- svm(V4 ~ ., data = trainset, cost = 1000, gamma = 0.0001) svm.pred <- predict(svm.model, testset[,-3]) sqrt(crossprod(svm.pred - testset[,3]) / N) ## random Forest rf.model <- randomForest(V4 ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-3]) sqrt(crossprod(rf.pred - testset[,3]) / N) ################################################### ### code chunk number 7: svmdoc.Rnw:257-281 ################################################### rf.res <- c() sv.res <- c() reps <- 10 for (i in 1:reps) { ## split data into a train and test set index <- 1:nrow(Ozone) N <- trunc(length(index)/3) testindex <- sample(index, N) testset <- na.omit(Ozone[testindex,-3]) trainset <- na.omit(Ozone[-testindex,-3]) ## svm svm.model <- svm(V4 ~ ., data = trainset, cost = 1000, gamma = 0.0001) svm.pred <- predict(svm.model, testset[,-3]) sv.res[i] <- sqrt(crossprod(svm.pred - testset[,3]) / N) ## randomForest rf.model <- randomForest(V4 ~ ., data = trainset) rf.pred <- predict(rf.model, testset[,-3]) rf.res[i] <- sqrt(crossprod(rf.pred - testset[,3]) / N) } xtable(rbind(svm = summary(sv.res), randomForest = summary(rf.res)), label = "tab:reg", caption = "Performance of \\texttt{svm()} and\ \\texttt{randomForest()} for regression (Root Mean Squared Error, 10 replications)") e1071/inst/doc/svmdoc.pdf0000644000175100001440000036016415120610017014540 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 3363 /Filter /FlateDecode /N 56 /First 446 >> stream xœÝZÛrÛ8}߯ÀÛdj*Ä §¦RëK.NìÄc;·™Ê#Ñ6'²äˆTœÌÃ~ûžHÝ(˲ãLmmÉ4HL3)$3Le–Y¦Ó”9æ”ažI –2™ÉŒeLi‹,ÁT*“’i%“ŠiƒZRãMds™fÊ3ç!„‚´>E½Œ¹Ì:¦óZx¦%óÖ!U â"_³AX–:aZ–záK3ƒgÏ2¡i,ST#Öxß–ÙT0#YæQÉ(¤ƒÓ,K1cX–eŠ¡ hÑ¢¢£—bظQ(G¯R‡"¨A8-C_"uP„"…ÓÐ nå@5Ò@{ÖÐ ZFgRÚÌ0ŒšÌ4 25ö_¿ýÆøAQçý¼Î™M1—GŒ¿º,†[½º ÙŸ˜Ï÷¤¬ÙÆó³â`Ô/]¯&õ UÌ®`¡êÉ·K”ï ÅÁèŒ=z:ÙšÔç£1{ð3JÆENïæuÁìþª„²RI¯ ’Ò¿ù“?µõèýü¤xÇ®Êúœ£íñ¸8Eñ‹âÛÕhܯB›é¦æÇ£þ¤W ½§‡ûìéù¨ª«Þ¸¼¬¡ÉD¨D¢Îñäã_E¯Mž”õ  »f;£É°†-ó%:mk IT¦—L-%©‹I,ƒ}Rò¡UPÔY«žápTS«0ˆPY„ÄG•úØ—íù؉ÍúøB_H§ìŒ†u1D›RÆ×0Ïý2ß}E/TÙf6Ø©‘ ¼+Ìí/´³xTT£É¸I¾Ç_ë§Ç5i8UAŒHq ¤ÛÞqQ£e~¸û#,¾’¹ûåøÉÑ€þxØõËá™lcvãªÞ9ÏÇ¡ÏÝ"Î #êïçMÏ¡)¬C—Ô±lûý½-ûõ9iY“ŸjôMQÿíÏb\"ЕN«µ?Í+\j£k.GëRƒ›^ªy7¶m<©×ý½H™‡uyüÂì»0å6”i¬ÔQ¿¸–A9”/BMÊõ@%’>´`c‹À’´i|ñD!DÔ´©vš†FÏ*ŠJ* êiÊ)_g4 †ÒæQç”ÒEm.÷»(ÁÿïF÷aêéU¼Ì¸‘—ÏŠòì¼}„“=à[|›ïð]þ˜?áOù3¾ÇŸó|Ÿð—üÿÎø1?á¯ùþ–¿ç9Ï/‹U>ìóüãd0(jÞã½Ñ`4Äÿ‹‹œ÷yÁCO¼¸èçÕ9/†!9å§ôWòð÷¥à§~ ¿çgüœlÏ‹!/ù_üð >äCà=q8i1.ª²â#<_òK‚ŽAqZÇ»qè "•£>¿L*þ™ž¡>nFuÑÿ8ÕÛ‡øFxŠ·c^ñª¸(㠪⠩ʯ¼Ø5¯ÏÇEÁ뫟ð/üŠåßøßüïb<tF’*,˜üÉ ?£Å)€ÀvÀ‡X•C)ü> ÚÿËŸ”ƒ‚Ð[(ëe~Q¬Àª=¬oeokx†UeU¹Ä àÇuqñ4f„æà«‡/Þíï>F''Kpän «r–1FŒd¹å¸$Ì€¼`C`´Ê†+˜ms—K,´$V¶;ç K9Ý–æKÄŠv¿_ÎõµÚ¾–ÝØenÞ‰ô.¹1¹09ntÙ7ä¬U¯,ërÐ/È[ÇyïSQghî£\뿟'ù€ü6xë5®:(ªjÁ_oôÒèowô2à{ãe Ë nfc© äÚ-;YºìdXçd–” $hµñwÍÌp²ŽÃí>;yóúù/;{r}´gÆ>d§i);^&WxÙèõ Æ,j·Úñœ¦RpLjyç ŸÉI¸Û×"ö¨ƒ\\s½\^ŸS“Í—g›´n4­ø³gV}ÊQYzÍ;Öc똭Z!Åâ )æ]këáï<\žçÁU'¹¬J8äg9e”Á†“éRt^6¦ C^°`j|Õ:!bYšfA€EûmHúœý.ÔZë%ón–Z-6["Þm¿:9~Ù~'Ùt‰p]î,uÇzņk„ÇÀg4±Dð\&g&ãè[Uj÷8e2ð{§–ýi‚?¯ÛP¶øÜp×E{7×l •¥°&Ä\²ø§ß±Àµ¾HÏhëñ;ÍUѾ2 ɵz‘ºcÆ_íµ•š0nÈzp‘—ƒzôënþ¥ì'Å·bü‡ã…<“Ñøìç°ÛûŸ ʳ$ ¾ÿ ãè`¯ÑJŸŠ ¥iB1v¹DK šI?Ì9Ò~9ü4•Š¢SO¾¶¿ð­POíÒøîý,Ë’4£@¹NO7ÃÉ`БM{—]Z­;5.K,RkLb7m•5M\7nXL$ò!ŽvDøNÈU5Ès?!W)æ¢÷g·ºN‡Òˆ¦ì ©í}J­ÜœÐÊßAæÆ?+_Šºìåk ïÐþn[SV&&óרš*1t¶d\bEIe¢èŒÉÂ/œüÑŽpƒpÞ&82N«¬»pG˜Áf¿Þæ` ÑLñ§´½¥˜Zs¨”­•À¸Úƒˆ#YÏ·5µN³u|K¯2Ò{8Í&¡Ýw™äþáþÞÓ·‘æ¨EÚÑzë:¦¬Íh‡Ü4FçÔÍ›ÝaÆë~tìº2ß·,8½[3 )ذ·ŒôC7-Ç”$oi‰±Y80úFìÅÒÖÿ%6gy$óÎ/q‚°è߆ڦF¬b¶pgÊ×®ó~ á×;0ÛÇÛ{G¿ãÛăµé›Z6¶;„Ãän îÖš¥Ý°êª–¯Ë»Ÿ€ð}„Û¾n¦ÓÚ¼}g ¸ Çèï“ãe‚<‹O.>ãª<n žêù ðB@x}0xzä’¹%b-c±zE4Xu¢ÁûŸ÷·‡rM<8Û€X«ï_wuf’Ìf×­» ëŒQXo1\íT¢B *Kèèé“‚õÂiç“ d`*\Š×ÐÌ­„»qÇq^×—Õ¯œ÷G%m0¸hÜhÅwìÛ·êð§Ñq3vÊ‚x úbù©ÙËlÀ\$“ŠuYŒ%úÔÜÓŽ9ê½³]ˆÍ,ò–æµû!×f.RH_}“Ù;øãhëu Z«yÓøYøÄv—–ÎY£]{ÒòžÕ•¬ ¨4Ës€§€ÚüçH^¿ÚKúv‡FÁGw"¯KZÙ0”oí ‘ü•¶.ۇƛψ‚;“Å;OPaÝuþ °1©bJ ¤ip 8* ÙÔ/6åø«½÷ôAÌj0]hYÉ=y„÷ˆ;qûæ¦ù¶k:7–Îk ¬œ‰í™ôÅ‘x9<§‰A×¢‚óÞïä\]óå=ô»0Q› 9€üêê*É{ÌÁ(*ú÷©Ròõr0‡Ïú*ߘê¡â‹á°¨ëä²z{¨×Â&†>•2@~¿1 Po7ܶþŽKR<ÊÑ÷¡Y↙&tDf°6¦›ÃÆ /«W•E2¬'IÑŸ$õÿOï¯A9äƒòcõå‚ß~*5‰¡5XªD˜ÙÄhLŒô÷91?jí(âJнô*Ñô)/LÂéÿØ.sún«HrY%gßa4Æ‚û`4)`E¥ƒaÁAhtjãàèÀšíånyzZ† wÿ _C„£w:FŽgíaÝoWcŠî`ÛAA:s§‹›Ý‹Û±âžbö=ì´³»HD–Óòž£HÌü¡[Ú£(*?= #姺/‰€Í}7‡ A³ñÀû’s~bülýAendstream endobj 58 0 obj << /Subtype /XML /Type /Metadata /Length 1388 >> stream GPL Ghostscript 10.02.1 2025-12-17T21:18:23+01:00 2025-12-17T21:18:23+01:00 LaTeX with hyperref endstream endobj 59 0 obj << /Type /ObjStm /Length 2912 /Filter /FlateDecode /N 56 /First 470 >> stream xœÍZYsÛ8~ß_ÇÙš [S©²;>¢Ä±ÇN*ŠÌ؜ȔG¤sÌÃþöí‘-ÓIvg‹%q4º?4>4©aDskAtL×RÆX¢%áÜ&D+•‚bM¸QOHÌ8'Ú8ŽánI,LL0NNc†$1"‘$D$±%”Œ+’("97pÇ«ëpÐÞ}(¿Ý¦„žÂ?¯nUSìÿ²¼.È;°¶m.Ç~ØÀ%÷ÏMû_µ‰;½`ëU} €©@èÁKÞ“'Oœ¶Šy ’$`-º3»ÝO³«ë& S˜¦%ù…N蔞ÑÓ”^ÓŒætIo銴¤wÿ$u€½üÆ-to1»*˜ÒMr{{ù&Ȫ‡Wtþ¾z¸—-R0Ñ)‹^ÌnÒ›”³E6ßʯ)Ô¥“¬(À ND€±é´LoÎYWÃãÐóznRŠfòk ¼ÚÞ½x~Zãä[“#º*áZÓË84½ðMKœé%¬@ð IÜòͯɳ6׿Q&¬_aë2¡D]ÃÀ¢éµ`Užãª@ £$¸"ðÍ=¶¶-¯ÄT;1à ]ú  œ‘Gƒw,©êUõÄÁjì"Îe[ÄmѧtîÓÀÞ)}ØŦ«¬øt3+¯é€â|ys3£—Éôcö9¥W›ŸèÃç2OkŒéç4¤–_–ôŽ~¦ßè_éjÙ…m 묆­ôQ‹ÏÐ{ €j¥ÚEQëµ`¾aÔ@½8<:}zCœžz@Ýä¬À ˆUßXÅ¥û±Nªu;un¸Ô/j3TïíÿX ÿ„ Ö¯î>45:œ545ì*h>HÓ„dYq±Ìßn¯lýƒ.ˆ5 ¸ËËQˆÃ:Ïßxõ…Eá<*‡H6á;­qˆ{ÚCÎS`rrøêõ {–ŒÇ¤ 1™x˜Ä:ã 4+0£îy˜*Ûš ÓQ>k4¯ËÏà†+]{Oñ:P¼¯‰À¬ÏÇ®ÿ­ÃÓãg;ÕÚÛ¨š]tãFe¿WÅšóÞ:Ä|¨rÝW¹îª¼çÂïÑ¿ïq…Zב§ûÄ×} ™Mº—\?ú@÷/_ŸŸ¾ªF0ãq΂ž“€(%ÉÀÊ’W»{ \\õÆ;pÿà×ñ™ÃZ=&`¾ ¬9ã,«‘ž9&&ð !U¯žK^·è–zÞǶ¸bâö.bÕ–7RàÝàÙÄ®Û ™:ö®f¨€ÎâhxÇöÐÅ7ÒˆG$ž‘8¤/ÀkŸ:"»vÛ]¢uà Ç#ò üwC&ng«4_¤Ë*µÂ{Î}1+®‘h\¯ÒÔÑÏô ýŠ®>töÎýUÎ>äðÐ(ë&çÁÞø°@¹Ñå¨Öå ¯}ÈÙ«¦¼wº·;™º½å1‹@‡‹ ¤Ìz¬·Þà&úðث꯿¬Â…6ü?T§+!¦jû»–íyð<ËÊlq™úÔ£f9"5å@ñ¯>ÝHî?«áC%˜ÞC õ b#Û¼u¼öa¶Á[.W—éªÞÜùqÇex•Ö%äãÚ±ž_¼%ÖFÆ*ð)ÂD &ò»Åë„ž¤slÁã$Ò ‰Šyc¸@ÚHYŒ¥ˆˆþßw ù<Ë?5"oåù²Ü,o2µpj-—,b#„ãöádl#ˬY  Ø ŽDL˜nŒæ„°׿á„T‘ÛV@X¹’ÉO‹‚)'ëß*›d6Ò@ÏZá8Õÿ‘p±‘Hî.14S0 ¤`:’@ bÅ£ÄüÍ Î"›h+ GŽŸ œ¿\`ª‡“ÚVv5Rܧ:›DÂJŒ´ÂB0 :éÇòxœp[ÿ§„¾>9h+ÌËl™WE¿\—åmñ/J¿|ùÍ‹,òò.J/ï¢ò ý÷üEv¶ Ùój3§Åòn5O ·²œŽM}<;^-稣w|º2¥_K§ÜF¼ãÙUºÆî0^Äh8øÇç±ûµ|6-geê^N´vq/(?·Î ~{z¶ûú×É ÿ#¤e>ê1ò= õÜ8þ`ù&IýëßÐÕ­û}WÜö°CÊ: oý¡XÂê¼*‰]Ýx š¬úï/ë³æ«ÙüSZVG¶”¦ÞÍŽ1»`ñ†ÓÝ⮨CÇ_Ã3œb´Â¹p¢x«ÀÁ~zÞãé69Hzá£zÍt¼-aû°ccm"ãPüÌKˆ¡w$?v)»ŽFÅu1|6tp“ýƒ›ìApq{=k£ î¤ö˜7:Ñ®GIÁˆ¾qÇ"‰?\œ¼þ§}ð4’´à!v¤‡=ôZ¾Î3è¼®¬½îðÖmÔ#¬øó.cÃÔ–ïF=³²Ìæi¹¼üÄwlvÏ}»Œö ñX»ŸO&»/¡ÿíóÑ»–pšø»ÖØÀ§Ñb¤‰pØ\C üø¡ `â] …²mÒ7jÒ5ê+ØDêw‰ýeÜÚr˜ùat»ól´=6êÃoÀ‡âzÛ¯&ûo‚zƯ&¼¥=ÀFŒoW66–7†W4uðþ#òU¬z¼F”ADµ¯‘6÷…SoŸM^îC÷»çÞRg¾†P±ÏÍX È‘îûJî-„Þ2(î€-a`æ2+n³o½/$»Îî!îÚ×µb¾®=eŒTv<ÎÇÛ/Ÿo½@rŸŒ=î )= O£¿Öжý ¹*3àž´î3Ýc4•ƒÁSž‹Âi6½KÇdži×¥g§¹Þþ6„Ì0îÏêáOðëDü…!´N¸'ë Ç&ë´À&ë´"V5i÷¡d¶ø‘dW€!‘îâ1q‘~ìîûeî`z€Ÿf?Â>”cåúÝ®ƒDcWfi|O¯èj|·ð“ºÆ%þ ã!ÚþÀû“êøÿ-°o8<¥­-?iX¤Ž üB¶Ãf.ߖĹ!H`jäv¡5G"’TqO PâþiÑ+Èendstream endobj 116 0 obj << /Filter /FlateDecode /Length 3468 >> stream xœµYKwÛÆÞkÓvÓ.šWÉ01aÌ{à.ZGn^'öitau‘´¢,ºÎÏîwgÄP¦œgâó¸sïw¿ûÀw³<㳜þâÿåæ,ŸÝœ}wÆýÓYü·ÜÌ>¾8{üµq› ®Õìâú,¬à3‘«Lzf ý糋ÍÙKöÍ~»/„aÝ|‘g¹Z)Ãúa¾\fyîØ·sëè‡dÕr8žE“DV‚=/à YaÙr.4ó‚­ë9Ka¸Õ¬­vóÿ\|ùŠçY¡µðò9•Y¹2¡Œs³‹Õûp~ñêl!rÜW™~d¼P½zÉ.Ö´§²EÁ*H ¤Óìó @‘sÎY;Çjˆª¿.—ã<Îpƒ ç© Îd–ÈÃM¼¦sʲ:\¥ƒ]:x“6ÉàÄ•Už¹\Ž«[’*Ólëïf S°réoÁµd¯çZÒbÎÊ›êä4ÙÛŒV©,<äéÀ>4í„È ÎËa —9g£1®¼Œ ¦¾‹:VŠ=+’G­ø Õ«ñš†=¯îæ˜]1¬êý•Rg@›-€¼B¹pÀ'“B>‹Ë½‹¹Ë0 L†£¤dëif;Ÿ°P¿NDØOs6^^öïºj§‡F@Iöt~jÕnèëÒ‹ûÏ‹³¯ÎòÌþ¼ð.Ö?ìÇÆ=’äYb¦ ™i2E+ÄáB‡tÐ¥ƒ'éàÙÑÖoŽ÷†: À^Ïm^Q&ò9gBn.gœ0w™Öz¶@Œ’ÁEþè¥×¹Æ²Ï‚-4xS0¢v›'J‚g,é¹`Ÿ•MS5ûWåúïš/´‚¿ìÆ·Ž ë*ˆšún.<Úû­Ñ¤O/²dËr¨Ãs©D<Š õÐ$Ë÷»j5î,oÀ]ÄŽ–&Ñ=Å:—çÖñwúUªÑÒÚLK=ÓÒdF‰`‡«¶­†aÔ‚eïǟܲór³½ŠªÊ «šfþ³Ppòt.‡Sã¿q–Ng—ŒâÙo¿÷«öFÌ“ÝéŽÀ¬.LºãKv9'Ýp›¤`eŸqr$ow[ 4c¯þM¼ºº§KÖ]û-ðÛùiZA>ÑDå ç3á~|XA‘Ðl Ê"Â+̼dß|ûürž…à WÐ8y_öU|…–û¾ÙÃCsBŽ÷áOÁyp¨’$$h°u7Ðo:Ü wØÖËø*j&ü.‚£„a›¹½Û‘¼ôÆ_˜5UÙ·u{3îâê6`HÃöm=VÞÍr+ Õ‚ÀLþv˾‚«… È.‘uWoê¦ìcUü(ÈéÖû]]î6ÓÙåà•p¸ú—g¾Œ>¯(¨*¶é6´ …‹‘±TWeüׇ…ÌOçÛÁ<Ö¢ç}´¸—  À´™ÍÕ={Õ›mSmF‚“6šxî§OÒ IŠB´9_×ëÅù:,‚M÷Þóý ‡—eš@„céMT„…_Tm|déÝŸ#ù‡ÍBJdªÀ#2šúj÷fs  êÔ#4.§EÌÊ=ãÆ#§¼«êßRÅ;jIÙ°y& ¥”'U‘™Ë9å™8“¸ç¡dß"OÏ(°JIëSKöc”¸H$ö¿8E>'Q,üÎÈ «Í"ÿ8wA‡ƒødÐoœŸnîk/;Îÿ ´|bSM)°3É®Sú,rUˆñªÈ¤†û-8¥:…«~D¶›{âd][½½Ç;v —Ó‰†Ô>äÅÄý¨@ªf n¾‹­˜x\ëu4ŽxR|ðúýÉQ NŸP…¢z›«ÿ‡‚QˆŸRpBú^»&Ï4ªï6/£÷$–u»lö«j÷ ÌŽŒbÒ£1×m§Hß5iqÓv䇙 þr’L+¯Ê](f¤wéë}»Œ,P¦rŠãÒ-È ¾ÙtõjÜ‹f¾ž¬Û·U³ͨ êX…™ÜÁ7¦uaû¦…4cT‹ìN»ü ø4É@Žâ´µNZêºkVã¹™c·ÛÎO}Ùð«lpÓÉ[Àó 92Ï”R…´œà}8ÔGè„•c”ºç.ð‚²nq,žy9¹¦ÕO'Ϥ=\|Dòž]UýŠ˜w²ŽDî(QÖ('y,#Ç0á+BÐ÷eÝþ†ªQå&³F¥‡ýnU£â´Çs»²’¡–ù-‚0Z¹cÁQy‹ *03è1´e®¥Ìl•I+"º>†›¢¬Bvçõ¸$ÈPÓX9‡`'™ÛáTWDðÌñƒ†Pø¼_%ÅFE¥`xlÅ@×tÝÇÄŒ 0+6)üâß¿I!àÜ!”̤Ñ…‚¸òÒ‡äøýñ§LªãrÛÖIÝôëM'sJªU*Âï†9‰LÄGÒ½9Hâ×§ŒTÅ[Q+Ì×.*4˜®;j:D#jîQ3$uU·eÕö>ŠäT4ú|¯   ŽmhW,„½ßØnû.öT7®=…v-xk›dâ‡ÐÏ Õ‘7ë»ýͺ§!ö„øBÍð!– Ìu•„§r71)¨½‰™ê›ÛZáoGŠC’ð3úÅ9Ý[¸íªmÙ{-<9åS®È,¹säW ¥Ë²iBSÂ(ž2nâcÓÃØyQ¬é¦b»{ø«S&½¡E=Gé£e·ê͇'±ÃÒç×1ÛšvŽþÌO¥Ê¨8ÜÛà¶)©Q£œñǤV‚Y‘‚'Õ8¡á ‚Ä~%%ìã”ir7¾U~äá±YqT§øI†úGß§­´Mýƒ¿.-¡¼êe*>”Vl,JÅQé!ŠLˆCú¶)û¹&]7u{ÊÚ¼l uÚé-iù­{¡*®¸_¤Å»~Cië+öeÓáÁ8uÔó¨ÛCÙ=Nµñ-³KÐ1ôÉû¤¾Ù÷¿W€…Ë$ ŽêÉÑÇÒßVÔÊ\RXK·#†ús@´&oUGél,÷)G Wv~ÊQ–®C[ÍgS:´æ»R˜KLÖíÛUÙ×ñ[ë‚k™#ï³y§â¡+IŽ]­N! qP+q`5o=”xìÝ*©½Ÿ¾©æšò ”âÔ»ÝÅwŰ|D«‹Ðõñ¹ ËÚ©®^‘€áÚÁÁxøb±©W«Æ OdÔ]Õ½‰eOp/1‘júMúêöý´j$™ /.2åÞ*¥ïÑùá:Éèëb³HÍ[õžTþ6Úµ³›ÝhËÍn°OÏxˆS3®§o€ThB€¡Oš³o„!œJµö¸¥›¸ÿaQýaÈÇd>ƒp6ç¡vDÚ½ÀY\¸ø5÷)!JùŽönC¨èÇŠê[(\øïµ¡ççßðÐÞׯ‡µ¯¸ü ” õÒò{ ¢ž™ÁE’¡Ìóñ …¶ thU|½xQÝîMû"û)H À®Éøc™¼×ì=…, Mf,¢ž/'ø»õÀÓ~ tÆG&” ½Ï!$ñ^`vº}iã4A*}üPìó/Î_¼ ¡e^º¸™¥újmoÆý±Ý)–2ß»qšb»®yÀæb-å'ÙÂŒ§ƒ »q~´}¢X÷ž-õFDÊr…’ ùÁBËЬ¦«qöiÕ.FšÈ`ZDÅ"SF©9õŒëb§ý!Áž^ÕMä)rêyãMN²¥—Óþ5õ™?}ê“CºsJ×0™ÚÞÁÍáñ«ðÈË5nt1÷ß;XõýÏ…k=«–”JP ¬¼zü1ÐWzrœ~É.žûOCApKe¬ß•20K³M£ ÝUó˜-Õ-•ðcÇ;"‘èÉ/ÿö/E¶ƒ“:oó9²Ùøí= ÛÝ“Çooo³å®®²vØgÕj¨‹Y6Ü>þïòUS··åŽø¸~µlÛl»Ën~øyUÜö§eš©8,;Qu/h}Û&;8—z•?ø«³ÿʦpdendstream endobj 117 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 9235 >> stream xœ­ytWöþcÍ+Z‹²3„$'@„PcÀ4Ó›1`[n²%¹Èêý©7÷"Knr¥™¦‡^CBBHH ½,ä‰}üvÿol§l9›ÝsþŒ8¶4šwïûî÷}÷¾0¢w/",,¬Ï’Ù‹c_ŽŽŽfš€,Õ>RG€~á _oçб#á‘§àÐ ö_Üô´é+gfÌÊ|[8;kNö;¢¹ây’19›æç&Äæm^¸eÑÖ¸m‹“–$/MY&Xž?aâS¯¾öú¤Éoü<â…#§=tÌŸÇò^ýòŸ^1ŒBÄpb!ñ,±ˆ˜L1ˆL !†7 ’ ˆ$¢1Œx‚èG¤OiD"@ $ž"" #Á%²‰§ÃžëK,Æ™Â7Ô‡ »Þ+¦×ÏáªÞOônŽ˜q‘ó2ç)¢FQÆ>ýûèûÜxbÖõ}ýÆöûÛ“§úÏí°rÀ{Õï?•ùÔW‘ë"rãžÞÀ{…Wÿ§uÚ%ˆúŒ?‡ÿå UƒÜƒŸüîw‡NúöÒÏÒEÌBæî°W‡5>³~¸xøùgŸ½ò\ös=?úù##žÑ4âȈ+/ôz!ñ…ÐÈ9#WŽü`ÔÓ£ö½¨½éïa±oýÿÞëÛ+„9WCk%a<ɰ©ÄØ!”иj“$²î6¬»ÅÝ å¡¥<)›ü @…"î¢^pì÷÷0’æv¾/C ¢¹{õ:“~€¼¼=$·ó}?¤`Ä­}s¦¹i‹ˆÌÓLŽÙ"‘‰´Ù`XqBñ±‹¨_0ô(,oß ÷² ò‘û@µÖ¿¹v=ÿÈÄ#¯v¼ê’IA>…^'Ƭ*jÓ6}"-À½Ag]†e_%ÃäNPQTì.=öéñ»Ÿ·œj¯8i¦‚(I@ÖUIŒAª–«åÂ~Úš´ÕikÒ×ñu*J¯¤L½GëÒºSñ7\]s9ášÞ©q7e%í•Íf? A^êÕ¾„8øhl0ìØOá¡;žÜ£(’{66/¯X(´½‚&¡Õh)ƒ¦ÁUpìýÄ%f¶°S^ƒS•cß,ÑÓâ/OBa`˜Û´¦cñù¼»àpÉy5øî®ïœþÀàz¢V…6· P҇NÒir+¤k¦h t7²Ñ² ã „=¸‹? ¯e]A6Ô0®²È­™ú4:›…m'5ËÖʘáôêMÇ%í`øÝCø$ì÷ÒÔgéê¬M[™ó$,CzžnÆæáÑ*JËÙ6û³ZÕ^­Sp ŠL¶TÇ¥C§~hAQ³t@kÒ±ë@;1Ú…7„Á?ß =ýCxóAü{òì{æQ²ËÙö= NAs|ùæ<‹Xô˜âIHÕ°$ŽžJNU¥ &r²síA§Ù LHI·Éƒ7hG‰£Îå2}s\–Zpt^ákàu B 4&5‹°µ¿KUÓmXr5<4à ÏŒb#¶Ë鮑陸òC§I±Q&̈oY¡Èqx}PïÏQoøä³ý~•À?ó¼$ø°þëÏë\·Ù ΃}óeËÓAØ„Á>ô.\oz4õŸ³U÷ûl…®“»¡7BÈ‘FË_@aéMŠ ózš#YXwl; ‚B±9)e– qìËä©sŒ9 PoC#§¹Ê\EÃ~œSà:ر¼!Á¾Ü±š pÒ¡3V˜ÞÙÍ’a.vƒè_ÙQäÈAI¨” ;fÈAä'•÷­Í L6ÚDJµê|¼wÙ%ùÕzª¾¢<°3Ñ·uÑòåË”Œég–Í&ð@îëŽ}ÙǯÉgÊÔÊTÛ>o“Í`—±"­XdN™¿¯\·@‹ƒÍÔ;ÐÃi®´à`ûpŽ€³Æí›·'Ø8ã Ö´’PúBïHª$‘Ma®?ë`÷:œ…‰n‹~³&M˜%.H)Wpð€^¤,M_:mÅÑ;´Ñmrã¨õZ ƒ^!Í:«Án¢,œRPÚVh¥Ì¨ViÊ´åÆr@}|sϽ «Î-cbËžà:ÅÝûîÅ’=/eLT™Õ^Íå±›03Ð?aÊt\ê|¥‡mâ¨A~‘܆?îÈ•Xs†p;µ:“vHÿ¿‡ÔcúÒ`hË@¡×¾8Ç+¶%a†²:\ |“¥A'pR-5–=tµ² .5—ê@£a“I9f{9õêÔÍSÆM8vk|F |[ùÓ­Kí­'ªØ ܈®Ea‰v+¦>¯ÍùOì“_$+É/Ú\U¼C1Bo£…Œ”t¢ùðEô:\ŽAv©jÿ9VT4çXQáùDA+‰üËañGqoÀQ¡Ç¼Ùiè©çiɽyy9 Ù’\šæKØŸ·P•Þ;Ë´ I 5³Ô¿^ùæ¢ÅªË{iè‡ÃIîþ cÉõh€gƒ°¥¡ WGîb\ý&‘3,»X r2!I¿•NfÿŽ1.c1¾M`ßÁ„‚äŽFGsQ‘ xq*šÉÝµÆ R¢#PIîl¶µÒM]º€¹XáEµ:°Ø÷êv].£˜¬õFG*ι((´šÉfAyZí†òý`/ØUhszœk¡y<>ïJÉäTë^KïßH4RÁíüµç’Ó'½¾$­ø!V\xG–ñ ù½^ßw±›­– n ó¸™LLÑ&K¥f Çq’2\Ã2_Šc;j&ÛšíÍEÅ&PHÉn¢ôŠC/Ã*º•ä°I‚Ón¡µüÒÄÊ·ü T\+i» ø3¾ÆãKëDé äeÉ´:“6+ýí´· *ÊÈjª¦ZQ­ªž×ñ .½ ¸(–Ó ¹ßÇ>-ì8!ö‰à’$¹ †Ãï°Žc*ÅIr{›~3ó8†Œ•!"zýÆêÖ,Új²+¾×ï2Üœ†ìšP”4áÎê÷orôÓbÚ‚CÇZE6—ÙOwG„»;¢ònZ;,¢À¬ÀìšÙþÙ ¯ñm kA1[Â>P|±¨©¼£¢£²Ã^ì(¶ãr¶8 ìõ“ùåñ•+Êãm ›¼ø@ÙÕÂÖp<¿úlñ­÷ÙLÂöÂTW`Méy)ªÌü R|©ÅJ‘N„³ óË”ìóYçD NCO|M€ÍZ.('w·´(É¢±«€Ú¤Ò) êÅh<_›§ËÓäËkÔõÊZi{~»´=÷0ßèгÕ]ÓföuØeÚ|¢6˜\U Ç‹|‘»>Àð»ˆâ¦ÃÞ¡§xàžâ6@½×ôÎ,\üˆDS·¢(FMºÞüFr盋ãHj·`4‚Ö&û^êºp™¡Û$HÑí¨¥ádW!QßDw¼>ÿÁ4wOZUNM]™Ï_Ÿ]–AÃ$4Œ7‚³W/§§ôœ5Û0vÒùŒh¹cÅÞÇË¢´Wñÿ7Õ‰Q*eQšœæØÁ° (©()ô”–”–––z‹½ž"¼äayViV{ÌÞù»ç:•n¹YF™ÈìtM­@O#Y@âÉuËÊå¥Ïò}÷é‚OMVÆe&ëÐÕå üÃï½(KmpÊkÕ™¹¹ù2£PèôzN?(z|ôÄq(x•ÜÛb>M»ºê× ÖËÞ8 %0:¼Œ…V%†V±¸:íK4€oÓ»Ô¬=R« ú…³×G? (œ$s”åÅ•%Å…%LaIaiQ‰Ëãv;¼Œª_Z…oÓ95@Cå‹T8 ƒ¹dÈ1K]¹ny…¼$y϶=É{T…|£ÅÈV‚Ãé°Ú‚Ûùå¾RUe°nGénO³d¥Î¥ÄZœ–’™—­QIÅØª‚j•WT·­”Ÿ«ÈÜ]˜·°ŽñA‡K?,¿^ËîNd çñw¤ßRTæßaáøWŸÊ:„­ÙÀ¯!ûBr<ì…žŽ_+ܼW°u| xúÕº©[„)‰â€Ýc ›unPBuA¨Ë=ž¾¾ï8$jPä”.oÆÚÇŒÎß1Øa#TNÖu¹!õd¢)JS§Ü=NçH,R;¨¥B;É­ö&¯ÇÜtý/{…6øB7{H­3²ÏP–¸õÚƒÂ÷£^VµSTÀd2“F¬”¨$’UisbHò k¥ÔQÀ'-Y(A~®på~Á^ÆÌ¹¶¿8Õpßr§_þü²ý5m'z(±º:_(ÓÊtr&mNæìÔ9]”¨ê¦Ä*•/ó¼àB&\Ë,8|õ ‡Ã.Ãz ›àXa ²é6€ñXJ Eñ2â|NeTäÔ6”úêh©¼þª? pÿÑÃÐ@ñâ@Þ…ó­Wj‹ÃîÆ,áÔÙ5RU¢LCgmÞ”µÎ4 $ÿ¬»I’ܽàà ØöòP.NØ¡¨²uêÎd;±^x/†Õ…|…·ýëvXÉy µ0 ‡½¡”wq¼v§÷á¼{¨70…¡¾ãOú,©VÖj€²r:Á¹öÚN³7¾VÊ©Ç8Ô&+´´pýê´x°¤TeÔ&ì3œ')s¯s××Ñvη*Ó³ž…¬i|9¿›_¡ÃÃ/†Ãp¼ÅS Ûœ¤4C*;‹ÑS0Åg›å^µB Þ€œ ê£ÈcÛ/õµ.:G§È9Tfež¿¦¢Òw`e`Sô¼Y/Ò=½XÜQÏ5°#š?ß…~Šâ~ó‡ÝëoÞQ ç“_9aß“’àáð+#<÷–YZ`–á~Ó5›‡¾äÁ jˬêç88oܱƟfM[ÁVC¢(5iCœjÅ/ô±.>õ±ü^Id]‡è}8ãý¦Ž(î· Ø·šg"ר֯k¨±ð¹”ÎÛnøéüÚ½ Ëè­Þ-æD@EsR¬ÂFÐHÙIs‘³ÆŠ ÅŠ]¹¶ÒPýô‡ï~Ò\°3ÃǘÉÚÊšòVÀš)«I¥Õj€ŒÊ/*(-+,.­ùÒ·%ˆ2iƒA¯×›0eiÚ¥³RÜ¿(Š3S]©C¦¼º4:¹(µ2“±é:€/­Î¨—I4B¬¸‚âM RŠûP‘8Œç";öÒpÎ~­zsÆ0AìÜq —” þ¢£ð1¯ˆæAL…5wÀB]çÜúJŒžF ê¥Ïàbàífs5>ý€íÒßÿñ‹QÜ»mÝ=I}~-ƒÈm¹Ú…x©)A ™™emdB}ÉÅPÂãv QÆà´@V]m/X/ô§ÓÜû£É0¤{ÄÁGÑuaÇÎÁËçÂ!uwvÕAq'–‹¾7~þñã5o41 ÓŒËÙí`uóâá«ß]žòB)m•2p_ï5»Ý L%F§ZªH—©i-fm½rÓ®5õïjÔós÷Ù÷ç|+fªt•Ú*ÝqÙñÿL»Ôš¤Ô[+f¾ö‹”Ð&·ÉcrSNÒ£³«•Fµ†F¤Æ¢q•xëŠ\´ÃíôØ<=lÈ:˜¿õ¤ã/»Òq²;ÍœŽÛ$È5ç‚\j‰vÜi¦Õ+§«­…U Šª•fÑzNvŽ(kÉ™¬ýçÎÜgBrrÁ§ ·Næ†ÐÿaÔ\¾~ìÑd^aµ±^ 67·œ°îT€S[’Ø>3!A»U†Õ\‰éЀàÎæDÛ.¶Å¬/³o§œ}à„¡y[0É:ËÆÎbrºf1¿iX&éš=u»ÏÆú[ÁlNªÐ¶¹O‚ªuþrw^‘Ô›ïOö§ø’*— È©\‰^ŠQ±"ÈzgïÊ.Ê÷T [„­™-[ó±¹Â_ílð{÷í<²÷kÙæ È’"EAêšÔ “š™–™*¤BY=ó ˜¨î¡Õ£`·î¶íý&æq/Ñ÷ÑSp2Œ¸óãCŒJP*)x›¼Ü­¨J¹ç”nƒî‘/,Å63îèE ­h3mÛ)ø%^‡É=àΖ_f ÝØ4ZÒ%|Mÿ‘!²0¤eÙv0ˆž¥ñ£W±Ñeïª&«ßT,+”Ûrù§ÐsGÐÈcèùÓè¾]n×âNØöB—οc/ØÒÝ•;?`Ó±U@6´leŒ¤I­W5²$¾4±`³4ñ5$ä˶ɷJ“òÛd; ¶¯„Q+`Ô*È7ÚLv`gçå5ûA}·— ŸþÌ?¾$°?XcÿÓùË­_ïúÎ/¾>眻–F*rkª1™–þ##þû÷ÅœÔ k Å$h36e5g6mp'€y`ybF\ÎÕuÙ ,HÇRÜoºÎjz‡ëzrVÎæ¬Š< liÞij(î4Š;ƒÛ ¬B,žN¹ÕÚhs‚cÀ—Ã1V·ÕeqcwkÏòf;²÷"zbö!ÚÁ·jl—[ r`õÛ‹ÏÂØpá[¸«jû6¹[d 8(³nZ£Æ¨U ùy‹s–ä,^ˆbùªte†:½ Y¹]Ñ’ûmOn‚ý ^ kpŽƒ: ©2A-¹¸•Õ³-»¨(#öÊ}Aò¸´±©c”ɲdU–mc¡Î«/D¼Ã—îVì’î2±[dÃ[ä¬9ð˱¶Ã„ƒƒ!6Aû?˜y-ŠûCœÌƒÑœMc<] € GÖmÎS2Û™/¾ Ž -%Ílï:ÔpÉ󌅆‹†EϾ„†ËsåRÜjÍZ ¥ê‚<î}h‘ÉžJjCC}0êã Ëf‡¾[„9w±ÉùöãÙì!U_UU_—]™™‘“òÜG“€O~õ|òÇÙXDú1ܯWƒu¹Û’)Oúµ~mÀDr®€ ûÙî{,¤É®·é€däêÒT½¬ Þ ÅØÅè,:«ÎL9:€ýŽBï‘*«ÆSèõÙít]S{ÅÖ¿~j,B½Á40Gÿꛉùê\CFQÌ¡¼³,!ǾE„&Šá²‡‘.ØOäûœ}ÁþäAègPšU@iX(\ºaƒ^¯Ñá~Þ@êœkIMkE!ØYz;ï®rRÜ»‰"I²À' –;‹ ¸åOÿÓí÷ÿñöîäŠëBÃ}0%¶ûs(ÿ<‚·y&¸kûñÜåú†ÖªvÐ šd•³•™#É̬”ÔÔV”7_Ÿ\ˆÞƒâÐhôçz´·ý?„ØûÁLØõ[,˜—˘Ñ<yÉ9ßÁ03´€Î%ßÑ¢¾ˆ˜ñõM= <Àkòô8Z }r£SÙÌûáŸ=šÄKF XA\»퀪áøAµÅ•w>gùÌ%36NYõÒ6ž¡R+Dõ´…Sc.«õT9kÌpø…E•ÅÅ_B>ßåuÚq—VH¾b ÃÁ"°Èa¨§~ÉÀ£ÈÝE{a ]tK%\?çÉ™³Þ“(éù—ù_êkȹŸèL<6}#w)Ü2Åí<½ýôΛƒï¼öîØW^ŠöÖ±g¶ad¯àqïÚL6Û`påбc-6‹ÝfÇ‘6d—Ëvd}¿¼M½öÖÊ7f¾¹ï3Ííô…ñÜäÅÎÖcÞMœVH½Ùô nÝú i£^¯*YUàŽoŠ©×œüuÙp›/ô|Mäî»1÷àæÏ1Q†zÁ+<3L4ZòvzrRöz\ÙuTm ¸!Wž-ÎÏ20*rÚUÁ%8ý¸FÃþip š=~ʼÉi4÷*Óž1ï#nUŽï8UB› —·•ÔÏŒž¨gÕõ¢íÖ…íL‰«ÌV¨4|I ó‰CÓÙ*½ó0Š{F…fð°eÈBJX™¬­« ÒܶI] ͽl0`Û€:{ÿÓØwñ[€=³d¿M?êÕý•ʪ(n=œSÁ³èXªsRâÖ¯6⥷kíŵÛ}E´WÑ~°ŸÚ+;Ÿ’*ÊNMªÉ©-)µ¹™Õ¡q° ßk?xØu°-{çÝŽâÞüGÛ=çÚz èéžSíÿáL»)”Äã~!4ˆ³3»¼U] º/¯éfךàò?±Æ dPܯ„éÁ†šê†nïu³Ç{±í8Œ}‘/l÷íÖspH <´öç 9 ããÒN¼mY‡¨ÞdìåôºmöÝÇZNµž.©-iTÖV5»šY½Iu»•ÉŒž4ŠEé2qÒ’m±Öê]§Î™ÑÊßtrý»™ ­8Í.Ëno?pæ"¸F^ºlÖL--#Áì’ß<³ãØäÉùy«â¬oR/}ë䱪–½tI¡ÕbµZÁ 'ðÈÝÚ])ÙGuæ£öï™Ç¸™çLõo-Oó'Öl©ÛVœWœS&]ŸŸ¹P³R>ºp­nשSLû‡¼Ü£ò൳¤ó$íàÔ9§!˜·`ÞúUT7~Xâ=‚ÑŸÌ®f1óŸI·­#hGYkÊάÆþÉ¿°&KçÌC¸L©õ‰`¿ÏÙ—(îý²P6`‡<ÖÓcíí6æ'kq4Vƒ4skŽœ&䯫±²,8¨ÁܱÝWÙù¹šyÿ/·ßýÇÛ»ÏÎPŽø¯ùl¹|ŠöÊ#¯@—+‘Ë1,¬Do4 ¹§Ð2øzÂnpè]'6oF•¢ˆyH¯ø2€“šà\8t8<ìÐ‘ÛæÐÙ´¸JLF“‰­’Î!l±WÁ¹"_ä±{¾‡óßà*iÿ"ô˜×´OÕQÐ ö©Ê·º¤æ\s%'ÁFUvòæÕK2fèfP+I 7iæ5‹Úc¢ì»ÅŠíut^k;stE©¯°´²Ì箵Ô×Ñ;žy{ìº î"oF°±¼²¶NTšAC걓×S¿oIpMt³GO:¤XOrO…ŸÁ©Ð¬bòê<¯üg4°‘MÆ+MLÆÍ~?ÔO£‘)±]ÖÙuÚÌ)´TVâêŽ<ßìÏü…0€±!°k _FSÏðaÞðÁ’°P_x¨?,á¡SʨR¹NžB'9‹Òí¶xœ4< O’N·ÅíVY4 ¿qœFJeTj~kü{m°'ìpËÅð¿ŸguuóYÁ,Nz¦wó·áD²Î§[¦4)’AÊ_ 2k÷Ó…¶V&D’h´<ñ~ä¿ÙçñùŸj<APYMeŠr3²*²k««JÆA^ž\ôÖË)3¶ÑJ±I²©_‰¸»úˆ«XhýO<üÉ?òðu’ÿ*Æ-¸gýíï¿õ1 “\¯”ž£6«ìjKž=Ï’k¦ò9£¹‘ l@a<”ô!—–xÁõÃ'?õP^ŽTé|©\© ·xÔvƒÍhÖôØ5¸àT˜y‹`Å© ݾ0ì- é}ä/b×Õ_s;`ÔÕ•ªnYéø¬ [œa¡éŠJ­6]“¿ =ÏŸâ§£åøu&ZÊ7é : Ã]kÕ;h+Çkq59ËÏ©ü÷`üu¸â:Œ? ßà[¬f[W_êÀ gO"Lt÷#DÕø>³$rÇÇÐy›= P<ÇCËòôHF—\(ììôÆÄcY‡ÀpáX°³ö²õ}p|©„œ—)îÞIËü+†p;uØêAÇðØ~¸©$¦ÄýöÖí¶ÝUM¾¦–ê؉õq¡#™5$È0§T =‡hÔ .‚ËÏi{–dnx4—`ÆðÏCßód;8`ßQE=® 7'j7çå™{|õÛç™ò•°=k¾Nº NEA^ŽZA¯O[¦îšz†ß;úë>ü5ß÷KCr3ÔÑvè\ÿEøÝPç?Üvø ¾Ü=qê¸Øi‹ç2‹ßY7W¥ÒkŒ’ãj¨ƒ3ÙC¡µö•øk8i&|Í Ñœ._à×ÕÖÖÖÞ»ðå5ñˆþEŒ;c˜Ð“²»¡ºª¡VR#Ða­×õØ) ê‚,Ç[®Y)JÞ¾Mº¬§ž» ‰Ÿ¾øì!Ä·Ó¾E}1"ºrû«ûÀEBl?aøk?À7îOû)Š» †…:yÍû–`AgëJZº}AÙ60 >å#(D|Jþ‰óŹS7Ö+DZ˜—Và(ðª,á]¬E?îÖ¯Aü?Wwòçendstream endobj 118 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6566 >> stream xœ­y tTU¶ö ùA\Ê¢÷¢¨t·2( €" 2ˆ€@PdJHB*©yS©S•Ô=\÷±[ò‘¤?êÑ÷@?‚ÿ3îùòI¦§VU‹Ä’åÒ3e+gÉß\µzÍÚyëŠKæ—®_P6zÌ“O7þ™ úóâÇ– >bäãOŒRăÄËÄb1—x„(&†ó‰WˆÄBb1‰x•˜L #^#^ †‹‰)ÄTb1x‘EÌ F/cˆ™Ä,â)b6! î% ˆBb 1ˆ("î'¢qñ q7ñ,qÑ—èGô'ò š@,#Æã$±/obÞénãºê®ëѽG¢ç´žoGÀ;äB’£²½d½¾½Ë×;¯÷ç}æôùñîåwÕ·G_K¿ùý:û¿ÙÿrþúQºjÀ}þ)0ÜÛÿ^o(ˆŽ*Tþ0pÁÀ½ƒ Uz§¨ h~‘¾è¯÷­¸ïƒû§ßÿ#3ÿŸÝú|NÜ“ûië\.Fã3Ù$ùôÖ/;Fh° %Rj¤R¿&Å^ô§Ëê*jðb,½5¦ò‹Œ+41¿öD£Aã#4ÚŒp H¥‘²ô©_ÝÀÐ[].›³è2Hùý±˜Ò/2àOÙÚ¨)V}_(úNü­è;Ýó…7TX SåjnX)7b7¬Ff’ÖÊ(”‡Æ ÞFsP~O;hàòßææö¼te2ó3ù>4bJšßU@ŸF㳌ëC•Ìh Sj%â°üá»\f! wJÔj¹8¤K²h,h»“q•OÊ.DOƒÛ{òbGàÍe §XLеJEuÔgñÉ’!o"¢Hðª ð‘ì+®Çâפ%k™[!ˆ3hi&ÿë.$ïz ‡ðÛ×ÙvÁ5U)–«¤~m#‹´`#Ühkòu¦Þþ‡§æœì£èì{ ïm>?èÇч‡3œˆ´*ynÏŸ®©HŒdF­QEÿ¶SÖ´dÙ 8kÁòyfŠ{í ¥èn’ζg*§­¯[/aˤ%p¤®eΠs™¼Ã'QcWwdËÎp½µ“ÃÁ”ެçò.=‚zAÊPoïéßàAÝÇüÂÝ)®“ `½Õë©3ï]üü“öÉ/¸=ùÜëŽbjÀsÎÅ' nlÂUfЈ :q»õÐßã ¸É6±MÆ<$rCµ$¤Ãøzÿµ&\€óÿ¢ËÁæÒm²½Bä_GäÏ#Àõ[ºTV¶ŽEõÜ,ñùEχԴ¥çûm~èg¡ÏŠÿ¡Ž}ðîû—ö<>Í k¬æ[5Y!F;2hY&ÿrº÷ã]]‹q]ö_ÎÕ¥„)ôb–VKšC¿‹ê*Ê2 jwGabˆCuLjýÕ2©¹Âd3C33›‹!* ¡Éj€Fª,jŠF#ÎÌ$tÚø4Kr™ÌTfÄMcb¸´ Ðû‘e(‹˜c >Š9O&k"¥Ìµ~@¨Ò‹r…¾#ÚK¿O˼’ÉoEÅSp¨$DW[È”.PÉpw©R/t,íúœËŒô›ÕLîW&X:åpÚEèUüËPØ.j‰9šXÚÕú… G"ümê†îaèTкt{Q2I·Ï ­š>·dñÒ[ þe“äÿÒ…á|ÐW~Éek-Y³% ¨c,z4òm¬ñ‰Ù9¨ÿn6ŠðjÛõT$ýþ$óI­QºEô­¡²¢²™ŒÍCO:JäWÇñ¨ëV˜¸P_Iz¬ =ŽÛî±ìdÁHP¥ÑH%aŒÿo¸pM:¼üß uæâÞOY?Ù$«—‰4šjÆH.Y*ZS,Ù¼—Aj4ŽŒ&ëšb¿ˆå&~šN©Ta,¿µÍ|~æñ¸æÇÝžËY àjIXnEÊ›Vtè÷Âp·osš¢¥<{˜Ï@<ìIÆ•^)»]IS¨„yÈEµ2Ÿ:Évp˽ç™yesV¿éÈ0¨L˜£åÌS¸qõ"<9£7 g{ñø dI~K‚;Õ³p:Õ³à,4ÄÕ>!;Ð £Áf,ªVòe< ŽècL ¡MU§¾RØ4®ilËÓîJou ¤œ #¶@Èí±ûë}›~ÞúSÛOu^[0©ˆE<Û‰¸Ò_ÍÐr  „j­äŽÆÉÅ”w‡SWîªp”·p }/[‹÷Á½ÐákC…õáú°-BÙ\’žå_ê.‰™a;„ïÁ4±ÐÑìÚX×L…@"äI&ån Ë ¸"˜ð´ öXÄuÀ°9l T4}9®„5k ÔËÕËuo,D…G@Âï‹]¯Ô~Ò‰D!}‚½Ûèã£ù» JŠ&d{ ¢uq{ R~†åúpÓʹ»‡»òLû;ž†m¬Ýf·×AÊ0¨ ˜7ê¶ÂfH5zCQ¶Ž´FT9Ä)•Øc[=¬gÑ ~?)óg'µ,kš^ž”7¤bñÆ:<¿ëX4‹{\ð$Iï™ñ¼æÕå3cÝáb69Úê6ݤ ~òdA®¬ ô|§à Ô÷¯\ßžŠ\Ä!L›hHyý±&I½”§nkjXã2zÄéBqZÜ$NëºÆGÙHŸ×ôø7nnÿ$)pâIê†T4QßÓù…|†ø)& âÔÁ¤¡ÏþÔ3’¾@”¥I¹GÂðȱšŠ&Ú!d(ªRjÅ•I3fŽ„›Ñn†‰OýÁÓ…nmD[°–¬ÕËdz¨B\!ªUõ–-¤JH¥ ãæ0û ßÀªÅYxsxRùªo7ìé ™¼Ë¯`NwuÏvÃ}-V ´PåŠd<êO;˜OÆÖÏ;U8ù5F¸Z½.£¼ÈõB÷Ÿü(}¼•©sÕy ›Šª<2FÄÐ,6©%+•X=1õì¥/û!‘¨­½™[gVÑ™×u5îÞ•«iÂRÖ„Å/>@nJ~ùĪ"“‰ï[¶Ú±6°Ö_20² òJtKRè9KÛ`tnr%]ñÂÔéÆ3ÉÓõAGв]«.ü¯ž˜U‘|Ç8!•ô¢Qm šÊ¡²V±LV\(šZ=M<Í¢³è¬:ªXÒÚ†”²CÚ!ï*TìVÚË!5TkñÓýê¤ÓeƒÎÛÜnBfgøïëŽ xê©ÑÆŒ[³veÜ@‰Ýª4“í bñºVö4™Rº¥R•Qnaæs1òòå_¾ÿùÛh8ì…) ®ŠÁL]¥ÒW° I<ݲæ”ßׄ[×kõzÝÁP'øÎÂXÛr91´w¡ÿƒË=A–¦VÅV`™ÙwÔC\o.ïì蛞õ‚v¸;þÖ[”’¤ËE>Mó9ˆEÝÉ„Ò+a_ÍA\2ô.œ)SÑt “cAâW'XîuîŠ:¬õÐA}rôÂEÆ ö[Ú—º^§‘PkÓ@ 5ùåé/MXvá¤ñ6¬î;ŠèL^ö¾SÝæ Ð~ˆ:ZÞS.ÙÁ®yû•ø+8¼žCçŠX=éàzÿ>õ”´¿ŸØ¼×í÷ã¤2—¬Ì*•ëqé*Õj8Î:(¾HÙŽ bîêüž†»æÖO¥î0)(?“‡^²à%, ž7ÜU•Z-EQöW.óà ÿKì”U¸–Eû@܈ÄÕþjƒ‘7NcQ‚X MW×+X®ÿ!ðî®øÎ„Q¯q2nUÈ‚T:œH˜æ¨xaôÊÑ7[_zM:tð(zý¨S’ß|D‹‡­ïпjÑgÙGVP\á^©±OÍ|¬ÂUªbí5³ÍLYq"¥¥PË}Ò@%Eÿ¨]))Y¸hÐ4hÙ÷ííÎønfO¢}Ký{”›Dý¹ÂøCÌl`¶ÕØÌ6Jù:ðÚ½u^6¥Nëškq/]üðû´1¥i`kíì2 g«+ZW/ (0cº$å ·­ˆ®[ýºB"a8ó5ÖYí°Ž:x¤y¯Ïg…^f?ÈÀïÌ®ç) ×,¯*Ó–¨×Ô¬…«ájçÚP¥o]ÌÒJá³íC‘Ž]í˜:Ðnپʹˆ2 ßX¼€±€Åuon3í¿]¢bï0’?\w ç¿ýïYF4÷ÿÅ7cX$FÛyžÿEšÞ5ÍæFá@ö}‘ ¤œâîÆôÙàróÝýEŽa]x^„1,ïDl9*ùì5í¨€s©¥$-ŸçñMûö½GÒr§L&7—1VU­kz_µeWšýŒ¬Ý%lY¿‘¢÷¾›Üt´nå"áÙyLI”zÖÃÜ#˜W¯]{…+ ÞÚ¢°žÓ$¦Øv4P5$\ŠG‰¢J/´ ᇟ^‘$$ù9nû yÕ˜áú‘XÄIU%ïS9•ƒwî^` ÂzWæ2ÜŸ\\³A2`ó<™jÕPD‰¢Í­ÉÆ4Cÿ”m,cèß,˜Š®oªËdÝVIþþhçñúÔ{‹ cí!íûºò幟O/þhrŠÑÀM€ðêØ‡[Û¿æ¥ãÚ`©Ü ©Þ ÁX0%uA‰P_)±0ô'—®ŽÍ/þ—‡¾øÎ¼ÃUì6ùnÓ}ƒ¹±&¡Ý"M­‚+©)/­˜5cxË_[°œ/­•Èn0)òª°ŠHÇë˜;¹÷fB:r éàò# •6Â\Íèa©Ý.e¼d“4.¬ËÊ'}Sû6ži_ÁÎ} Ÿ¸S0ÓTv,¹IVcKÌB¹_…÷è3C¶L–E#0¤»PÿÂèâaŽ ®©æäŠ-õiÓ,šÇN0œƒùœ¬ ¼ è‹åQs4é÷¥˜ÏÈ (æÚPPn–IÙ§Èʰ! »62ÿAT¢>üüO ‡‹ÐÛQ·ˆ‹Iz'¿Ì¯iäýdìæ6ÅÀù³¨7ÂtýI¬àýCµ(ª3ôÎÜ%Ò@W»ÜVwQ:à³t[Z\/cD@ •­êpÉî5»KöXêÌÌ6º¿ý4r8õX‚¶b}ïM$¥©1GµÜƒ@¬Ô‹>UŒÝ ¸1Ü£=v®.›‹†´Ûfz»Ñˆ©æe T©’ æ¶SϪð—à1.ɇcI)ftQÄ€«ð ×¹ƒ0HE°½Ä(Èz@®Ù¹±@!²lÀ”uó9Aqö½Îœ;‹ã|-öJàÕVîîf®ÿfîî:‰O³P-  z£Î´+åL;S›Ð€-HЂŽˆ3jÜhýDØ›Äv\È».¤LE˜’ļï k.'ÌkÏ花S[†ŽE_8¡n`"ìãÅpBî•bs•£Õa@Èkþ°.áºSÿ* ?ÀJuˆ`£Ä!Ñëqß2 “«•^ž³è£­\˜ –uxÔ| ‘éE9Íÿ#H`š”y$׫€'á÷…ΰÃî´9 µ¯g9‰’ó.Á¯mÀ)ôÅÿGŸÀ‚bÓ!ÝWcW.–É:¼I.L®**%ŠjÑ=›ÉÞóôa^…ŸAÏb;ä¯Xr:£ÐÒ¶MÖMEiДªÛÈÒ{°23/"âöjN^´%9Ñ&bŸD€hm¢ŒY J AªrK¹®B\§‡BÊH–Æ,q¦5#@•èçžÜÏäÍÉ…Z3hL&ïË.T‹Áõ8ú‡ÍCeXºy•ŒÕob­!µO§òf°Ac(g¹)sˆ668#ÓҸͿRŸî›>剧N\µ2Ø,dÍ#/àq7x½ RÒxU¥DZöêñÚg¿£n¡>&Hþ:ö;®ÇòzÉjæV4[;ѸÎüŽS“/ †ùaÕ}+pýrõA= ØQ£Ò PËnhP$ã±æS“[gsOæžä†q‚ÍÜ<4ýŸ¾DÔïcä¨yK¤«^ÇãÈÍјÏÞv‚ñƒ.cûT8š4wÚ³—ž|_ËDHè¬uX7ªâgd¸$ðŒ:‰?¤B€p‚ ÜrÈͤ´¤]VEy»·aM– Â¤=ik¨KÆÐ˜³h"éó…Ü0FÅô¬[Ïz«¨>\YT­ÒWaùÊ‹×iuëÐö Z’ÉßÝUÜ…øGýwôú\0S8q5|ŒÂÞ…šòwƮԡ¼.ˆóðÅ »¦5QôGcî<5^~äèðzJFp=æ=ôâ‘e'D,ý÷©ÜX‡ëbôäÄñ‹_œ2cÇ9NIíÀ)9q¾íÐÑK^v3Frú3ëŠçMk;j`næIp92ù]¨ñüäS¸Ù‡PZ`ù2×g°o'YêyœnèeÓUa±P*Ù0á‚ð8ÿ74=Œ®F3¹É1t ×ëÁ³Ãef\àÜݿҡ‘lGk` Egmú:~eÊä3!õÜÜcŸ°.²¾ëЉ‹ðc¸mNpžÝd7Aóê$ÅÙI’<´õíŽÜXÏ–DbXB­IoØÅlº¶Q[*›Þ`¸#=þÕ[×tQœÕâ=‚ŽìdÈðÝ÷ºïâ»!O “TX’«4:£†áÜ‘ªZ%RT4z¨€ ¿>`HªTIÂoú~]R¸ÂI±[éaǽ;~ßøý*·Ú«tݽÎwûcØ ½Ñ‰…E¯†*JéW4`ãt`–A×@2쇴A¹½V°íÄæ“BöPZtübEP™Ä+ñhEÿ4øá.Ä/-ÀKkø¥×wš%¾Ã'›„ ìƒWgÔµ µÖÀÝõÄ‹\ä†PrÒÉ=x–{õ{Þcö˜½|ÜÁPDîÒxÙNÏEù €Hð>¶(ÝÏøœ~7 Qµ_f©…V ó¶hãÞBU«×@5”Æô!ݯr½!—9z.ן#&ªôjTPŠ€&¼ýÄG»ßb:Ï·^W <ÝÏõúB哇¤ø¨µ·O0§êêržÄQ%n†f´[,¸:"ãAŒ¥)‘SÎÐ͹KLqb™e=û•dÞ¥ Xp èµ9ÂŽÆ)–nŽ©ýâ›z¨—Ep쯣Gº£·êæ2цJ¥\Wj… ÎpØJ}øñ¥sû*¼bcWUV¬+I Û̬ ¸ø fûŽP•‹PuõÁ?†p£H±F-eéµÂ¤9ŒCÄ‚v,öyØâ^]IZ<Æ€Ú®†¹ñ€˜øûJ™Ø$Ä!JýšÄÑìÜ]‰å!«@e1¨ šÂ²%É ·@»À !¤°³\ÐÏ宫pwXMÖ[ ‚úßj”ÿ/äúêÙâ«^Ò²“p!e£Ô¨|”+Å-z”›§*W–©ÊLj­ ËDy@aì is&<‘½_ïþn÷7èÞнÇн΀7€aB'ª ózm­µö¶4>)ÉÛ×…¶c°Ô]í.àVsÓ ×—â|À½ŒcÜ v¢‰@tO1šÆ=ÒÛtP)§€» ’ð¤ïÌîÉ­[ U~Ólæ†à$ís«fIJ×—¬°Rµ`,|õRZ‚áÚÐ{µ;®’Wi+2g%‚k㆑B–ÿ¢P5¶2ôÖPÈ*Boð¹ÅT¾*vx¶$ôAq•¹Z®g¤kÊKV誌•¸®NSèT,H¹À6G:ÔÚpk£¬[û‡?îž}¤SÀéÀ±ýÇ÷=xìðÁáß(Ô#?zàq.¯˜Ë«ˆŠšã‰ÆÆƒý$ 5¤Â¾ÆHÚÕ©3Ÿ¾1ò‘?½0e.[Z¹~Ciå«Ë •*‰VSÉÚ0ƒ.ca:n#¦h…Tnª`nÛL~¶× |ÊKhX¶I`#7§¶:߆Ô;UÍ™»hÉ"»Æ<Í—SÒ& è“Úè“g¾X=bò³‹'/Yào[ÏÖbku@—× }”LKc°jYª""gv/ÿXu~ ±'îâ¿kÁ»›mÜ¥}mou¼?èòÐö‘“9R4\¬lIÅ£M‹Çtó[Ьö¸æx>ýš‘Í VšK+…e&ƒAµ­U„•¸[žk†ÁwwÔã7À„"ªÃo(SÕÍEí]‡ÚXú¹°*„Á¥-‡ÊWWÖP5@%!™—ÂoTn”ÝÃÜÕÝ7±O/‚øwT endstream endobj 119 0 obj << /Filter /FlateDecode /Length 273 >> stream xœ]‘1nÃ0 w½B?°ÉJ\Ò%C‹¢íd‰ ¬oŸl+JsÆÇ®±ÞP ÆÀP (¬ùß“=µŠ±ì©#4Üh4CÃŽµ@ÕžÔA ×ÌŠ —ùÕEŒ9 +ueH+ueHi²Á;Œñü#O‰^’)ª¡· ÓóTƒ$NRÏzôÌJm‚´ Ü*$HyŒã c(’RÝQj)ò¾öÅðêøûÎuz®+ÖM.%—àLÿŽ¹Ì WiBýïôendstream endobj 120 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2381 >> stream xœu•yPgÆ»ÄOcP“štëcÜ…$&SI<AW¢¨$ñ\Ô¹†a߹ï‘ÁaAdÄMÔ†è&V6‡Fc­+îZßÏJmÃî{Tþ誮¯û;ž÷ù½ÏGSqc(š¦Ç¥fd¤Ï{iäu:ë/kÇÂD LŒs>=aíø™)÷'ݘL¥éøÇ&'fËò³·g—”dËò Jwl“U—•Σ(ê‰Ô5µ9¹™EÅo¾±ð±Ç™§&QÔ³T2µ‘ÚD¥QK©—©w¨•Tõ85‰šL›hŽæiêIñTe¤ýcÒ%c$EqSâ.ÍŠŸ¿+þÛø_ǽ6îJCߌOßã†·ìˆ §Dè“·%ÃæáR+ïÔr‚ V3Ki›¯ $‘J²ƒ”yO 4Nùþzôèiöǯƒ·ÁŠ<à F^Ç3ÚÒª´e™ËÞ[)_hÞÌSø¥3Mxþ}<ž=õî‰[€~>·ˆ N «ñ‚Í–F;“0üD†ésÄž— V“ ¬`n x]8OO¼ûY¯­Ãê2{ÌЈlœSÇì€ ®¶C®u;œ˜8K¯‚j½s l˜w¡jKª ÆGÓÈéÄø ÷~‹g’ZÝrë,¶ R¹2(…4kéèd%Ô¨ÄoþFAð± ¼ƒ5˜ŽS‘iº5¹¼n ¹ZüËÖ½W­°—m‚° >æ›9ýòj¡&¯ ްu8å?í?…çb”ˆ ûdÀƒ!©äA«VCµÚ& }Ö¨¸ÂG\xäö‘5Ô e?4+ÝŠn”û³:ì¿…³oùÃtû|ùޤŽ=Íw®€Z IYf¬®€JTç­i w6FO­ d毫R1—ÖzJÍz—H+4°¸¬·…ízx©µ Ðînmf±|y™ÁÖeägoTÁN«=Ü{QÛ—¢¤¢¼hó‰ê×ö_qX_ÞþÜA@Î`i«G œ–7éy¶pÆ+‹åÖuv…aïa6!¦$Ùá¾›}áXJ˜ÆÔ§ÞAIL›"µÀNÔXïVhê -ÃuŦZ}¹^fm!ËMŠ–¦Ú >Üs4x¡ë&ÛpÐæ…:RÔ³19ÌÖŽHð˜- v¦ín´C”à BQ…¦\_ÃæÌ¬}²ÐËŸ”œëí îÝË(׫6å¬+/ØVºYÄ`‹UÞÛ~øcwÁŽÖ.îë:H=˜8 ¯âU Fk¢9§:ÚzÂLëaïÀaP½¤jÃV@5Jïþ ­ÙÖÀÚ‚`\\Ù>ßiììln9~úè¡~p€,‚7ƒË`@°ðâ´¹ M–=íáÞŽª}[Ê6ëÞLe ¶åælSˆu>©Kû裟H°÷ÿÕ µGTdÖêF=ñˆãÔ¼ê’Þ=œl_khi<à;û?ÐÖ¬ÉT³Ý¾›MÀdm~ë!ÓüÕ,½TÑ›[X]Q& •ïóùmncµ˜Íf@f³PÿvþêÂBVŒW8¤·›ìÞÁãqxH÷P‚ß?-m¯l)-­¬,-m©looii{Ÿ#Ö¦_Þ ÑÇž’ÄV gIÅ ¶yµnµ^Ï8†Üÿ5èIj·Öë·:ƒñÂzr´‘È8vY+Çj)ž÷BÿÛä’8/ .þù“Ï›Ï_güëöt:2Ðà4X zŽÓslí¦íâAQ1|ÙÒ$çcïâ M‰[—Zä©v”§ÅYkÐò·¤¯,Z¤k³ºäȧý{0Õy’=6ôÑîn@?yjadZä"lRÔ;ÿ9m•ÄNi/‡i³þƒ—¤.Xe”ë«ÙKJ‚r¤¶ë]~Wø€—iþÆsƱø-~›¿ó§OñÜèWNŸ/ ¶Ñ[3€žcÖ'çÌÉyNˆ×sïUžÏé_ˆLYJ’‹5nðˆ[ß!ÙMxI(öbV„èþ‹1æ¢$–>ü²,V¯(Ê n•Fàä&&“Ô‘WŒb˜%y: ’¯,À z]±¾HW¬ÊÒl½Ó8»Ýb¶Z‹ÅÑb Àsñ«v<O< ¾Zw‡°D¦?Z*]mMí†û€'~‡gà¹~´gêuÓ™Í𠯊&ü^%f®±±ûªb©¿ÝwzU–^Ƭ"¼I!ÆŸ¤ö&ŇéÞÉxR~Ü(¨òÈuU¦R³BŒ½dGâóÄ=æŠm^Á³"ô±k|ï”64B0 9KæÇ·|ûñÙÛ€"ñÁqP)æÇ“©b‘2æÚ‚‡/F½VÁ«åŞײÅÕyò<@™ûnv»X}l3!îµÐÄñõOû?(!endstream endobj 121 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3552 >> stream xœmW TSg¶>1s|+iFÔÞ/j«•ZmÕÎè´Z|â|Ê#¼!"wBH²C’€¨¼ (1 *ZÁÖÇ´–ªõQ«UïhÕk»f”EÝ¡?s½'ø˜Þ;“?+k嬽ÿß·¿Í£Ü†P<oè¿Å›gÏ™3Ëõgâfò[Nÿ:wÁ‡nÆ·GßðÀ©cïŽ>=†r}Ø‹“|’}wú¥ìJ _)‹øGF­‰Ž‰]˜HQk¨ÉÔZj µŽz‡Ú@½KPÔFj1åMm¡|¨%ÔLÊ—ò£fQK©eÔrê#jµ’šKÍ£ü©Õ”˜Ë‡¢©$ÞHžfÈô!GøSø*þ}·r7§{¤ûsA1=•nf&1ÝC†ö Û:ìèðÖ‡GY4òò¨OG¥v{ÁÛa¦F½”Öámó°ã‚÷~y¯CúÆ ØkD’jZxµ¡^• &·iá_ ªáE_ ÜnòËYÝŠ ,)|ì‹å4´¨[d“;L!à¾[Óýá Òî„/õ‚?ò»mhuxàdŠ_âØé('ì³÷ûˆ$}½^çûôO€Ti÷£½£0ϧ\šÆ wjî·þmôlíZcÝ\Z†|ü "ìFÚõšº\FØ×%­Y0¶dlÙš—­‰†|ˆ÷DhÅùÜ®M=·Ëy4µ:8„Û•¶5L›`¸[˜z‡R¶8Åy6`)2|4;ƒDdDŽ×r À2˲3áÇcÏgŸG⨜s&ólt»8èËEÕ>\œÑ¤RÔEÃçÚsGìuuÍeMp:ƒM ‹Óµé cRhR¯OŒJIŒÎކ(ˆÜ·Ë&m†ö¢#\pþ„ϨܶþaÞÏï«ßMԹĺ³ýÓ?®]™ô̹ÖÙçW\›Lìs.‹·qÆc|Ol Õf¹VL‘ @¡ÉËéü€œ˜Ø˜°ÐÕ)«aÌ:¸ù»m_)äòé1^k={òû‡-‡ÂݙՄ*ÎQgUc”ëJÊĘO›U%ùÙ²äìl6ûäç2/B8ÍæœØâÑ„>^}¨Gš+\“ $ZøKm:^ßìŠhCÏwòg>žùô)òÇ ¯4½âÇWoøq…;zÇëœ"x Ãwp8›“#Ï Fx%¦!í`smCã©MÖPÿMë×°J(%cˆc˜+F‹ë;x*œgÿÝÁð2[ºè8¡Ü —·O‘/x?(g »[ÐCp¾W·-ªŠÑ…"ÿ™¾ý/Ôþ4§ÏÖ‹˜aº8Nø¸·YÔ”t,ÿ(0lë—Ñʳ¡G7UKK“ôɉѧº˜î¯÷;×µk‰‰-ù>Ö(Øc0ì Ÿ–hJ Ðæ"CAFV*ä³9¾O¦ËÔej3Yü®ŒÐSq„ì»ñQù¹Ý°–ùìOªvQiBKò]U© €ÉQe‹É(T%(™¢tOy¹ÑÄKL†#7^S‹‹Š#l Üîñ÷¾(;Ži'¼Ž úLj>‘Q“ØZøÃ-R'€ÄâÄ icèù'€¹Ùzñ¦ØEC¶6²IþòÀ-ù]ÇY<ˆ“iá ™Œè—pVÚœŸ¤ór‰ßþÅUP¯.NeH!ͽ5arãæ+E'á<\°VŸÓÎõÄ…Ç¥‚]iÅ6±s$aèµrwI-ÝP«Ú!&RºpUÄêÅÀD†Ã}ÚýP.ƳܞW²bÃÈúôætèW?tÃ9}_ØÇ o¡·s¦¨Ñh-i¦»Kê%Ž£Ép²^JÞšƒ¼Ô‹g®Z/Tе‚MmñÇOÛN³‰ZOg*v¦¥Z÷±I?öí!Xᣕ„—77ÖšÞh³Ô5Õçì“ícY"ò¦Wn ZÉ i<õ …Õ7Œ‡ø!Ý.¹ï}C ×Ò‹÷^˜@÷=Ú+­•V¥Œ?t˜[G=uE&9(5­*ˆ×ì&O‚g‚\ÈÕäÂNHÓgWï®ñ̰È2,d ~ê¹Ë–bKk ô´TÙŒuî’#¡k;U…Ùyr¥x¾ïßùKbâ¶Gï`ð§>‡ë¢–ĻҢ¼jç:‘ä­ƒÖ¥ /4(ðäcO½Ò,‡B(,P) ŸüÁswzŒ:˜4 ®s ù%:S/mŒÿqŠ{¨3Á(9åC‘±ÐH&¡'‡[“Æ%&£^ÿ_O=+ªŒå6]®j€qõºIYZ¨Ìÿhò»8Ö½Ã!xÖ†ªtÞ³§hF¿Éug´ Jµf`jk” â¤[µ§ÏÙ#:wž‡ñ8Ýnát,Fw2m}°,>TŒ'ånÀ$RmSFîˆK‘ÆdÇ@H,R»Òܽæ+.ÑCºŒ Y$–Òa之Ϻ81±…wi>ç,ÑîÜülNÎ’ªw4³%‚C•Öä-m´X-Xk³·Fo K^ KÂ>›…o_¾z°ÇÆêŒ%¥`bÌòb«È 8K‘›¼-%˜i[o£û­R®+µXŠŠê8¤¸ ‡½FÊÔvçcîàŽ—`¹CgHÔql‚ Ž(A|\1Sç5½lOšY6þG²ì{²ü²Â˜¥K¯ã.V_mÜýo£ÿ-\mª,­4Tjz=9ÀÝpx9{û$Q… )žËÈ,?òÁRòb·|wa&£VåWÊ+—ãL?ôæ~óê= ÷³€ùµiåÚ¶ù)råh¡ëk\ªCG5…Tq}` q[@¦m*ßÔ&nÛømRtAWÅ‘V&}ŽÓrÈ×pÚ”šfàØý‚«Q ˜EŸÓ×÷!‡=œwÕé•ÚB†(h ‹ “#‚’‚À¿,¸¢)³¦ÔU§¾¡\p³6ñr)å<äãLáA¹Þ¸©½Ä;0 +>Lžì~ §J¼‘½~ÇvÛÆjõÅ:Ð3æCžL)Í–³ÒˆÍ©Á° 6×ïì”Ù5ÐÎXÝ ÃÐi=\cmÙßÌ©xÝŒE.^ÿ©‡Ê;Ô/já<„§sNà;'ö¯}RàŸµ&lÖ²Hâd¼À»Ç¯géÏIÿ ½ðüð7wN]î0¸…xà<’Ã’˜‘"Ž"&n=2#ïÚ·ö–Ί“ð|¹Ëvn…i̯<"&<ÿÄŒ¸‚8X«›¢N3 í9]òÓêZ¨SÕº.b-ó’Ï8ÿƒS>á%Â{±äê5Ø'ç8)tsæ¾à˜áÑ_M&ÆÌfæìæ -¼·Ý²ËvÐRßÔh ö›•²8™ÍOá´Fúʨ‘FüøþÍFÌ=˜xØã‹góÚ0á™éæ8áÏyHah×ÎÜxˆ‡¨ý‰’B‘ÑÎ ÿœyë¯è]Ôó?#3ØAÞ „1Ûbë:ÄzZ[n(+)sd¶äws¯×ñ둬F©M\_Z_Ö -R¨å gR+²kjöWTÛ’êcc³Ò“YµZ¥Vs€(0)tŒð×¼}ñÑÆè‰Ó?ÜèU¹µ%U¬%Í"ÐiŠAª¬L{t尘ǂ3¿E^²õËg/>‰DÔDŸ6_¸ùmEÈbVNoÌ \²Sj®ÌbwWjjUuÌïŒ:wΫ/|ç)†õ½vÏUœ k°@´xà›š°gÓÍ4V·¦Û’Û9›¾> Íô×`¯'qwyôÙË©<›Ìm6tp.}guí8~œ0ëw}â.×'ÚGs)n®x -<é[ÿg\r¢ÛzÙÈèR•9+¦f6ø±óÀ„響:3:‚³Ì™æL]¹¡¼¸ÜðuÝÅ“p®oèZe]nõ)^ÌlrK„Žgt7\S5‡T'GÁv•‡…'KR‚ÔÛ\lçÍÞÍ¿rºG. žèùÎÓqÂ{®ÞMsxÕš€^¨­)J“»~Ìv¬rÎ$|ZxÏŽj‘ðR¼B*›koçìL³±¡Z˜CÉÍáìK¬’`[þg¿2ݧqïm½âzomµÅ+4 (Œ¦ÃÕ3}³³uºVKW˜ÍUPÅ“›Sê¶Ç¥îLXq3½ó»[Ž•¬¾öCã”ÑkTî _£Vä qÌÿ~8©É}È@»«¬O8™½Ê5äsý D2’ãnèOê.h=tì\q0çžéç™î|rž3«·‹ª§u ʼnœÛ“ º=þªß¨<‡ó^¶v§WÛëîÎÒ°M/©Žøü]d¢žÃmxRüê{ù1ÝqÆ9ƒ†Ú uy±Eg‹« ê%Gè–&Uªx` =9˜ðÈ'A›÷Ša%åèfœsö«rx¥{´á§ƒ¥hl‹f0»Ú¢ðR]•r‡xà2W®r‡_¿×Kˆ™SþeÜœ8¾ž8¯¿qÔ_Mœ=ÿÊ¡ô¿>Û38…f4Jo0Ãrð‹Hõg4ùϪW¼J¾JÙaü²…#†RÔÿØÎ«{endstream endobj 122 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3526 >> stream xœ— TSç¶ÇOÎ9¢B%f_bomm­ övºÖ©¨8 j•¨ ŒÊŸ-ñöö¶½™D<Ï^=¾ÕfÚÁLûòyη]ð­Ù÷‡_£la\üꄤä”Ô´Ðý™¶ø…ûDFEÇÄî\:ÏÃ{É{åOP ¨­T Dm§vP;)j5µ›ZCyR^”/åM-¡ÖS¨ÔÊú˜r§b)'j6åBñ)w Š¡R¨_xíÓÞœ¦°ó±3Ùϰspv¨£=é&&¹Ç²÷§88žŸ!š8ãç™'fÙÍ*™uÄi­S³Ý ^ÞrÊéÅ´ÜS”Úbµ3» àªO !H»òXýbÿÙ|D+"×þC_1–ZhþU_L¡Ëo_ý°•{vˆˆqò_ºŠ»3Zã,åŸÁzXº/}˨BЏË¸ÍVÜ£ä}Ö9]¼vœƒ:œc‡FënÁöŠЄ ¡Ñ+aì¬ 4‡ ÞNû Xœÿ~?ÄÅÄé™/"³H½à*šáŽö#Gºª:Á æÜ#I‘° ö°± ì-ŠJ‰—D@„Õ¦¶&·©ûÝ¬Ó »»©‹uü”:Ýå z¸ò¯[ýÇýdn.q [Cq;%þàï·¾?²ùëpf4Ú ½îTà{lS-+ÍÒyE ‰Hʤç·7>¯Žà»Qd·à \-m9vûù›ÖHã,øvIa´’2E5°zZz¦ŠÅ|†ß­WVåÊd2áÞýr…¬°æªè"(ªkwvù5l–] Ü*vŠÙnÁ×ÍÖ×úxÏ9D%(°ë¶22Í&ÏDÙK~LWì@Ηº9Èÿ ½Ðé£gä­€àœèÝ¢A"b¬ó >± Â>Ø×”Ô«(îÉbSÚ~Ó| ØgF2ÓW2•Läô‚w7Ÿ«¬Æ7"Í.f\ºèÉ"¤¸Ô™_ǥߊã:w€:®8šÞG}¿ß@V˜—[”Y\$Ñ-‰--ç¶µûˆ7H„*t Óä\IðFò)¹ÙêÞÎaéü1WþsE"‘GÅ%¦æDAj¼f̯M…dvãriDÀö–±$¡¼F]Ÿl!UœŸ'"3(PçW³%t#Zkm¡ÉJ•­Y½Š.`ï?6?ûóÆAŸ:Q`5±WÁÙ+FËPööja‰¤(½ØRºASQ%ú‰©RTäfH2!WXÙú-Ë(ÉHÓ¤¹óï½ÂšGÉúñ…9Ò‚é.¿"-¶àŒS®ü[¸|ü5ÁÒP2ëua$ÃÿúÑÓpPs¨!þè‰ØN=ªUÕB­HJGÈ6í’¢ 2|‹…,Ä@fŠ:—Þ9|²+í&{ÒÄ4@“J›Ê%Cœþ3ïu—Ú„êå¸v8•NNÑ~.²2d&u˜ª8Ùɬ;°ö®~A„F~Ve#X<ÏÝÆí´÷ UŸf}ÓÂk¶nˆÏ0&8¼£*þ¸çq¯ž•iÚ´øÊ †!¤‘wyUêÊQ`-d…˜9 -Å%RVÉ@~ztAZB`BPò6…T.-.$s°Éí¬…æ¶ð|h ÆÕìÒŽÓ1iB£Ø•ÿ úX½eÍêF`G†âŠ’™Á£`MDhjd³ïàkY×ÏÞj¼hj%ÊÜrI}äYU/°Çhþ5úÒÿø$BÖÃbƒ3æó&ÒëçË?Œ2ňzJ ÍÐi­¤†ÅÍdÀ“Y·3dSÈ®–¡oÿÖzM'¬RWC•æeɺ­s̨LwyÎ Åå¼ç¸òŸu¯ˆ™ Щ+€=bsíö+sR}æ\ç¾ËÃ0ÝÑþ᯿’·? ̉áÉÉ^›((ÊÃb’ã"²Ã¹ž~²¢ó´ˆk6Mµ±¾Vß\ß^ÙÎõ*­Ÿï#Jb@ …*);„›#q‘5ÐâÒŒv®üîI4CL-”g–ä¶ls;éyÒ£q‰´¼¤l1Er™R¿9fÅJE!j 5¬š1ê*M`bù ‰GE¤ÇÅm;u™ãÈ{Ð/ìÐÿ}ÿw ½ ½õ}:C“Ælƒ (Ý¥¨’É2‰‘qüL“üxÿ3aSnç>^Mµ'¬‚ÜÜ¢LÈd£[ºŽ;ÚÙ%¡Âz%,Ko9’ Û!¨n[÷þÁí7SÇ` n7uŽ–èt• gkò4BªYnòÞä]À’éÉÏþ|G‡ :MÊb'?ö«#lÃé¡Å¥?Ø1l%èâÊÿ±o’'ÔÄ :ë@q”ÿãA1š,t.ä¨$ÀFÒEjëpY¦>K—9÷k²î;²öYWž]žaP²ZÚºÚòºkøÎ(¾Ã]/ã|7myI¹¦\=ñƒ¡Äø“…PÖ‡HYH&²Ä´½o»…z†z…z†}à&“æ(rVàòU¸l.Ë7)ZmÄH/¾-ë‡Þ8Ôìé¦OÖÆµChM†‹è9‚Ü•»_ߪ©i-1jœ«àLBÌt~iQ~±Ttð£ˆÅÑ‹,ž+—qªÉÓdUìðØBî‰oÒµP’[’«Ol猢’)ŒÉ‰ÍMöKñOñËÜë¦,T* þþ“WÌÑÝЛw½íðk€`¿2! "ÙÈÏ÷Ÿ²´÷ô›ãúƒ“I¿ý?ýîUÉâS‹øÂeÜÄ9Ü翾É5ê&tŸþõœº«Ä·ÈÓIÎ4,ߟbmÐdçˆÈ—Sôþ1Êú.YËð¿  ÈÂG̳·Ê% Â8YN<ijÜJ‘-±=½-mæ›kz6— %ôï8æìZY()Ê.β™Iîß®~«Æ&Á"yÂL)JyÜ1Ýhk(>f ÀfŽMã Á2qAx̲ÍyÄ™û ò{5qýôDðÕŒ+0ç*Ot~{U‡ÎðàÜ\ü¬êåì«6#C›ÎÙWNäîïN¼=¥ªåpQsQ}Q;X‹lTÖ­éQv¿=®âŸ§ó0gØÝ·>äúnÎkêØ‰"[g!ó­w™ZeuJva4_¸1l­tõ+ÁTãlkº…w / ípõ†àè‘îên`¿9½nÅ‚—­_/‹÷ŠÅ⌠·‚EHØë§L&9ˆiy¸*0’”‰Äù/z¨oðøpï—ÃGà‹o’7,¯ óè¥râ„b#áC}mí}æÔ–(ÐIèhW¹rætŠú_²™ˆendstream endobj 123 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6285 >> stream xœ•yp÷¹'(ZËuS,#ˆÁ£o7.’å–¸Ä΋G.²ºdJV§%‘)6±‚‰Þe÷[,¶ °S¢HŠT±šÕ-ÉK¶Ï–øYNì8¾»¼ÜKΞ¥ÞjÎ÷_JÎËÜÜÍÜÃ3,°¾ò+ 4·ÍÐÜþÆ¢µKŸ~ê©§Ô%­Jù¿Ù¯¯Ÿ wÂ]·‰÷ß?~¯<ö×?zûú˜ÓØôjó–×Z ‹ÛŒKMíË;¶¯0ïX¹ªjguMíõ»Ö7̾çIæuM©æaÍjÍÍÍ\ÍšµšyšušõšÇ445¯hžÐlÒ¼ªyR³Y³Pó3ÍÏ5‹4‹5K4K5Ë4¿Ð¬Ð¬ÔüR³JÓ®©ÕÌÒÌEAjŠ4§ ž)ø`ÆÇ…þÛ¶Í4Ìü3mÇ‚ÿövÓ?¾ã¿ß¹÷î'îþnÖÀ¶Ýs÷=îÙçïýJ[÷ãŸþøêŽü¤â¾§ôz}¢ØøŸ¸’;K‚%G¾/¨ÓÌú~Æ3:;7õD®`jËùB¹Gþ^çr}^ËsŽõL€€÷ þȨS~Þ%Ò ˜CvÜ[u®:oýe¹žjqÖwDé!¡''§ä–®Ïÿñ3¶8Œí•v6/Úµ m)éiqµB3^¶·qßG£×Nvl8ćŠD*ì5ûk­ÂYݲf+àÁd2ÆE@"¹x÷Åžsé‹Åbžï…^üòÖѵk—5nj&f}_ð¿nÜŠ]žñ¾¿PþÛÔ*òŸm+ƒ¢Å=ù‹o‰&ßÿN~dþ»-׿Ÿ(ź`Uí®niÇã[ñ•í§Ž‘±¢Ð8(v^Ì_¢c±hÍxÛ^ÀåG>“Ëþårýž­®i7´/åöPþ‰ªÁ*xÌs<•L<€ 2l&ÁщôûûïM–¶íž*òiåß‚¶ Åg+¦,fÚÂà^Ì@S6ðq>ÁcXr¡ôA?ôÑx'¦íóL¸$*б¤Ì$YÀ LćåÙúp'Ÿæ:q9 w邵ÞZO½Ô¼ºb‹Õlèh|‹ñÂ0ÉFÃÑPtT~H/DóÃãɤ>ÝÙ›îü@bóN (†Rkô·+vøzÁpÁõÅ× eçõ:_0ˆÞÅÝB0Fä°l/cõ<ŒTJnŒ»wÔC%{1'å·‘¬[b$”¤Ï1y[\~^6¼¾]/ߡܙ¶qV°®¼‡¡fšÁ§ÞTêt.å™ÅФl7Zi+ctXtÑFŒb½,úª”ØNÊÚ)Q/åýÃÿƒ‘À^ãÁ†î¡ 8ªÙ»_j”uÓ¼lºfrÁ{j;< s–»·¹Ê½5zO“×ù&à&ÌK¼àÅE¬ °öq¨0ÚŽ t3OaÚCá0J4j5–¸¨_n„a4_¡´§eÝT\!6.ÊV[Ùj',{[ÑÄ)÷=®¼¤h—þúŸ½¤„€½=ƒƒøûò^ã ¸‚îå½·Òi24Ùlz«µÉÔ x½aÏ9’‹ŒFF»;)Ïþ-‡&æ`¾´^-Dÿc¨.“ü§ayÏ0Û~ïõ¥GïÓNÊ¿›ÂtÉ‹é oëþ+Ïáò<ìü%ê‰fŸZH‘G JQ\N<9ÍŸ*FAs(h힘‡óÚI¿Ÿ –ܨƜž€Ýö‰ 9ˆu÷2“«Åo$•¢Û\[…æ Ø.ñ;ݨ™É)¦mã†/É?ÍåOçÎöœáxN˜† ÉËQ¾:³—0n2¬ÝV‡k'«6¼«Jæa.7•ÂV"/Ê<'ѱÿÛ½/¸£ÍÕ¤WòK‰P,©¸ÇrlÂÅ\‘è SMáÅu„ÉÜ@7‚Zì>^3M½‚°öõ©GuF¿­Œ¸%fÍ u÷÷_¯__ײµ’ðîß5X¯@û£ž À~€EZèfR©0ŸH}¸€F*(øô‚—uL~t)ƒz57xÀw§˜!àóRßYÙªï¼ôÖ'—$\º HºÔX~ÀÎ'ÏÊ3.ëúz³ýݶDG­eWë¶œ¾úážÏ‡H¡“ï‚.\*òï7ŒìèÜڵȗù£Æß†=0bÔi³Úp[Ü™èìï;¿ºoÓüß¼þŒ¢——¿?%^y­rõÎMÉý.P…3w!•mÃSsobèÔ.èxc¾* ¥X)6‰†;²GỸ¬˜–º²lPÔ >%!ö±åP,‡¨åjwø)'Åù£äl`ˆî°P–€•ôTúêê«wnh߈ںpéWrÁ7À÷‘=òON PÆ®ª0{ ð¿íûÙ ³f½ÿŸ0k8»¸5<óJ/~öÙReÖ4âk?SùWï\Èì;LD$1‘Ô¯xXM±~XÞØ^ o˜zXçoô4RMÛ•_é}&Ÿ‰6áþ¢­[Î|úÖ'ò‰Ó­È¾ªÁ©<ø,àmþL†‹ØL¸+ÜuINê¹(¤€€©=À…xŒ ;(ŠŠ\Iã±ñÈxdž9D¼Û%Ùk}õ->b—r7íeÝv(öƒ ¾s¸v2PòJŒ¨¶è =ýC[üü|áÔ¬cºDu¿aœ…ìùä[ê—†YÂ(%7£DdÖ°zªÍ˪7úS»cÃÒ0yP^(öÆwCOqʳ5SF#E,VzeUk× »c;$÷0ŒÂåþËù‹‘Þx?¢ ‘‘hÑ“ÕÛ{ ÑVXåÏ»ÖÓ~Ú~–’"t3$¦ãc}çžì¶z[«‘4¾Þ´ –ã/ÚvéèhÏè€ÊƒnÓ4ô\`ø^mV6« Bç-èÁr}ŒÕBu,¤2ë†ÅºÌo@± ó8P+°™ãpœ;Ž‚„~×–3Wù’˜(ÅHm6æ ¹ fle;»"}B7)ß5eJžJ#0.îÁbñPZô‡)–܃aêÔ] rF¹…÷·òûÊï:¤WL´¡Ú #呱¡ åÔè7úLm¿¬Tîû9àoÚ^~?ÿ.)åDD­°·}ìM§°—`Çލ‡¼!_¼›éëˆÄÄÀùƒ€wŠN”žzw#é³ú,>«i‘Þ׿oVü•&"˜|gô˜¼ªWžÎ…sÇû¨É:”¹‰%jt¯´gÛº ÿì<"£©Ù'u!,Y~Ütd öÊúÞ?&O¤ÏÂi¼Ï;^ßf0,IWD ߀Âoö6Q-¦ë•»æRÛDû(Œáá"v4:"í=&oÕ§\ k3ej£çfóÂæþ…Åî"0Ô-ÚêÇ×tìßÿáð‡½ï‘BFLA×~2j8Xöbõskm’'~UžHA±›H†‰Ä¾üÉC"Þq›wÙwÝlˆÅ° _pb˧ûÆûÆÓÿ ó²ær¡ü×wtåÙæƒpr_ÆŽ±aN;É¢l´ÛC®m–_oÜLu§ÈHä„,Ÿ}Gö¡Á ÇØžqE­ms{0›*,Tî²ÕËma¬Ì.»Ò F™‚S{rcøi9ýwý©>è ­>'e±[vĤKs;bÒ*óÁ£dh,‚¸4u®óüþ·Ò™|¦¦ÓU»j&4á·)gºþ›ö{µüÔâ)Nl·FZÓ[;·¦·f×é9—ä'Z|¥Ýåõ;Hm©M6ÁcZžëê†î’Øö©<ä™<ƒ'0m)Ç1\‰å:I-ó°^‚šÂöé´2S>¡ ( qGîÂqýÇ¿þæË®D¨;€ºPU,ÒDA+à¤íà`ñ:̉èÈÉâ~Ìí¥\Ɖ$´’ÐRÙb´Ì_òÂ’ùKšÛš M¦ìÕô'™«é÷ô\‚KòñHn˜ÍÞEã\J ðhxj :Xíêañ˜ƒEp—×ëjù¢dÑí”Éß®`7¶è]UÜÎm~'ƒànÌáõ9IºH[  ’ÁµüÄ$7Q¢-Cši,ˆ XL 'ÐŽzäNrZ l˜zÅ>¡7ÿ^È÷&ÓÉݽ»û†úÕ©½|îÖÔþKŸ.+t¥¡ï²§ŒD#cn†|ÛQóØá¾¡±>"¾}²e?| '>Ê]©ÚBÀUea ÖØ(¼½rã&$›l»‡–£œÇŽ$ŽÄ$Ϥ>¿Áy¬Ò¸Ûˆd5rSÖ¸mZ¼µ9½³¢ª±ÊLXû;&V!Y°qIë Ú‡hÞ‹dAPJÓ±8OtÙ÷Á;€ïîjª5ÌA+éÚì(s–Ù×™ŸX‚„«UpJÿ?²€(©èŒÜt¥P¶Oë¸.Ì9Ù® DB¸è¼^ ^¢)ØH7ÑvÚÆ‚‰k}¿êqºˆ¶lûKÊI½c“}lÀí]pMžý¯òËòìËÕ¿ÿåö†ÆFB~z.Xã­óÔZ–_Z]áôØ(Ô.[œg&ÈdQh,:ªö»>²;•Ïõÿ?½aöMA$¤ö„ì@`îTAïŠÐQ$è» ‡ Ù¡ÍÕì7*wß0êeþZÀ[0×Mé!§ÖbY¤ùPݧ¤ó©X’82T;Ýb÷ÕJáâŠðç0›ÅÅÎPœ”gOù¸—’Åñü×øÈ4¸“¡"Óa×ÀêàhÔ!\›eÖ_¢ÂüM ÿc³F)ÏMý!ýõ~]ÇóeÊ=ò¿ Ï<Œõær½)oÊÌ‘~¬Ò¾«aõ‘Æ#þôÚµ29&ND÷},ׇ$Dõ\eÐÑäC ÚïÜTµm½·¸Ì16þnï»ÙwÈè@riü}¦Éµ¯Ô<³áïI‰~)ìO Dr²ç#ˆx›£ÑQïn!ÛV4,‚Åøâƒe—Gö¤ò=Äßûâ•vÕI"p¹[×ÜÖÞÔžtd†rý»Ï¬Z¯< Ü¦<«üø™S þëç8>Aò¼Aú"TD‹”H lÍ.Wùö7^ÄNkEû 8ÃÒ?”¹Úÿ镃éh·Ø(uÐoD*tµj¸ÁiH#Q—ʽ~7rªÜzi¿>¥Ì‰+s:•9¬=Ø–…NºÄ¸|mêˆ>–Id¢Ý8‹ÙWXVZWüüÆmz-_åöÙKK Eû)4*ê!)§åÙd‚M–ÈÅôAtSI$Â'ûŸo€TPÆw£Šõž¬ˆF&†x ÐQ¹WæbÍ@íÊ© §\^§ßI#qňÁ!½M~Î*ÿ“Y~>دOPa¡<йÝA›½ ºÉ0–Í2V2ø^ͼ±IאּU¾Àª9qP<%",JÄC‰’K(N•ás žDq"Õ1 Ωþ›à \šGGƦ¶èn¦BIU±ýi®&w3‰êˆ”Ëï]/Ñ9·ºÐyêFÞºÒ·íf8IÖ+0à½X>Ô yˆCùpôR›eâ1&^•øDÔv‘ÌÄ™"Á< ƆÄ?ÈÿCÈi<:–)K•u•P~[™s\™Ã»BÆ$‹X7Йpy]^¯ U>œÜ8À£wb‚#U½èGCƒû°*¨ VÚBW\Ó§»¡”+e Ú¿¡ÕÓ®€&O#ÏãéÓo—ïÚ*ßY.ßát¹Ý}zmi"è—¬%ÚrŸŸñ—(³ÐØíÎh0&,ðdºèÎÀp——›ròü<ƒ$ vEöݧýLþ«º'ºß>O•ïà FçÉ3>þjâÒÁ©ö Ùc*êñø\T€ØºiÍŽ×_Ý~öè±èÅ®°ÀG~#ωȷ“Ú?ŸÜ<¼‘Ð~ð3å!¥T§ýóÇé3_V2ÑÓ°ë!åqf1‚°‚gâÓPðÛaY@hÐ4õªn¾Ú©ª™w‰¨O?Àhd&“R¨ï˜g~Äð¨§Òõê6ÛvÉÍóÙNÀ»­qs½¹±¥bŸaâ«QYÿi†”ñ´|G¯Œ‡Rô "Ð ?X‚„‰“Lñ9àÏcÑ£ÒÑØiœ¿rRòFÝm~“‰AKÜÚÓŸíê¯Pf´+óævŒ'èÏ›‰òœºKz@]°!áôõIe¹ÿ•-L­»Lµêâdžp›€ #$2Ðgl ÑĘZ.®»BÒQ&QvA¸0åp‘ù¿P_ÈðA4ÑÓê3>pã½»Ò]¥®Ë AÜ!Úº"=}Ù6±–øT®Óµ;Ú‘N1'm™Þž\/¡½’±§:í4ÊwÉÍͲièúÓò#GƒÈo¸fAÞD¾ÿ¯òƒògŒ'W÷‘ÁÆÙÃË3ú7“UY8‹<Â~÷ÅŸì/šñÁ&ÀøÁ_,b)!n'€ÄÅË«P±¦Õx¨Hð…ýކ7MDûÛË;· ÏσÊ=JÑÆî'ZÈ”·3Øé¿`ÖŸµ ·!£ø³_Í{vÛæÎÑ&¢½Ú { É"9,R! Íã¡=”*= ƒ*‰òpfìL\"þOé´N¶è&=çm°¬~mÁ]E[óëÎ~šûoG{D$G™Cd}];¬ Ë1 d˜NùíG€f& yZF34#!¨î‡úÇš{jÍe^ýË„8Ý#|"ŸSýxø¦øB^¨…jª–v0,´ãv$(‰0ÖÕ… ædyÍÊý7&ôŒ_M*nNxRƒÂ@N bûãGãÇG‹g}¯ùÞ¡‰™®¯k/øæ\¡> stream xœÅZÏ“ä8±æÜw޼¨áâM¹­_–4D@0½,Ì2CÝ,AìpðT¹»¼[?zíªéÝ=ðg¿óû2%ÛruMoLAËØ²¤Le~™ù¥ª¿¹˜ô¿øïr{qùigw݅ʽ›=\³ß]§d^š™Æå^ζZEnÄ0²¹¸gÊæÊaV²®Ì½âYÅìîâÛ ÁBgñŸåvöê‚…šùÜ—²œÝÜ^…ÄL8ŸK-±-v•zv³½ø*ûf.-fZ‘Õí®ÞÌÿqóÅÅoo.þ|!KYäªL”F%>IYº¼zªÀí¾ÝÎ%>ÒfÇM5j ´w¹Ö‰ÃÈs5PÆäNŠTƒì¾j«m}¨Ûn~óu<8eœ¸iœ¸Éäʧ³âÈt–0¹.'³ÂH:«t^å…L]^•kûégíÝ]z™;çù¬›fWWípÎÞ££ÔÑë?.Õ›'|œHý*;ÎEî á9+Õ V´°,V–§ž©¼ôïÍ +~E+œ÷Vȸ$VæVšÙb²âÃDÆ@ãñFH=Ó¨=€R£¾Ëvû]ýnþ>£ÜÄåƒÜÁåÖÊÜØÄåýºç»ÜÁx:Øý|A!î¥Ïö›ïÉJ…º”ÐzÛTg"~TèO)c<†T¡wÙ DºTåÀPXÎn}×Òd[®Tg`r Dˆ~öhZÊ'¡vrtàKÀؽFÿËê{ç\yæ°ü„åqò’•F”“‹ô:Wßf ‘k¯!ƒ´ÈE)Üu42Œ³f6Y¬žØI$bÈfj¥Ut‚(ÍãèàroOâ÷\†èšÀ@Òµ…³Ù/ç Q–0‘ *ihÃf?fB©ó¢ÔâiNƒx8ÞiXO‚¸4'y»_÷ü Fì6Ámµ¢X](1Beï«®éÂ+,Ý.ù™@”: íçrªTMfSÞk­²ïF»SÆ¡¼õÊGxÙtK“KDHùÛŸ<!BæVþ‰t`OÒEHœýõéÖ“àÆÎ8ÎGS‡µ±Îiæ‘Ê“}éIÉaãÓl1Ù¸Ìea\ùQ…OÐ -ÌC±Y¼ò4þ'(þ)-’5ÙÝPª† rÖÏâ*§~›†Ø ü4è&!fÔiŒëžbÈ+1Ïd]s·Ý7«ÇÜh{VÏ¢QìWÙa€GVíÆÀÉÖô\ÅùÂ#£&»%Oœ¡Ñs#®þûUôiºv®†ÂPÿ¡Zxýc5ô4†ÀèEZCυДӠz~¥@ùHü‘ó (o ÷)o8Çi|žD#¾¥­Q?òÔiq&(¤À>'ሪì°)%ôá¸WÌTµñÚe{ŒBi ÀdËÍ1¼«¬FàN>G#¤a/EÙÍ›‹›_|•ýmî$UMALŒޫl½'žLmh©Óñú:ÌãžLÃèhïCv¶1'ÙÆá¤C¸o÷hsî £¼eÖ¬êŽDY¤ÉáĆÊêɇ*«æ‚šc¬Å\z´ÂeßÏñ~•2[ö‹cÇ*“ú6kvÜO ï2tµ·Õ²Ž2 öƒŒÃº?&õÀ×ÞÂÖífÕï…Ža~¡èõ—o»ÔUà¬Îeëf:x#¶ÈX | ‘ ! &ÀÁ&ØíYŠƒþ,¯À”äx+>ž@¹Â ŸÞMœ_ë‚e®+Þ³aÑჾ >çû±é©—û-‘ÁÊý$9éŒî4Æü 0)[ïšoŽÃ>ddšœý¬þ.™Ð¼ßŒS¶û°?Ô]Õ´Þwù|aaC0¦ìf]·ÃT/d¸Á^ª]P]С}¶jÉ.ÙÃûj´÷7©}à­èlðüz¼[¿DÊ€9Éa´ £'ë–kI¯Ì@h{!Ä,WÔ%òWí³‡æ€J¤ rµ˜ û¨lUªøÙͽ¦Pô8L39:‡—+†Ï&†­p¶`¾z ‡šmó)º• §ª6wûmi«ð³[õ{ºT€£«-€[#c„«­¸$;´Õ®£k'Þ>1ÎdŸÏ1k˜çØ’AIœŽ)Uï˜í¾­amå Dé(—v Œ¼oÛzy ÉŠ,4¯÷Ͳî?˜lKÏš#-^Ä?jKY*½ž6dê_-Ûã2´¼‰¡Ë´¨¾¨š`×ïe²»IáL@%E^”ÖÖgž ò¸9Δ3Œˆ1øgAFf|ß·Õ­6&e Àä²m Ë÷sDÈ­á#” ôÇÏ”˜Ó :Ô»® QnÐ]Ôý4•uuÕ.ÙË”ÁGáÛ!»^’NšÆM–ûÝê¸<Ô«~¨¤z3…¬÷¬âà rŒÓîžókxH€/†¥†eËÅDü’ÏKË„ͼåÝ;®Ù¯õŠâ[µ“öqóÚÁ:Àï·°XÿQâ|Ûû &RiS+ŒBx¬ÞÒ0moCC•Ô„¤‰è úû>0 ½:1y¤ƒÒ6d†ð¢GéýµBXhûk…ðŠtrÜ-Cæ•âq’ +؟Ω¨óõ|N“¦HZ§[n¶ìò(uÛÜ­ƒéé¨ýÄ1^ bdgsÑûÀÙDÏ<\‘ XC.äûÁµÀÚ„ÑG¾Á3)·–\Õòñ)]¥ t¥ÿ¥žœô6}iéY+R|dªS›~鿣­®÷ªÞíêÃR¯ …õy|FjûßÁ˜<JùÏÃ#rÀUµ½Öt0 ØÈ‹8 ‡¿Jöy—É¢(ÅZ9öÂõñ>%È„£ÛøòÇå!˜›>”Ù– S`„Í®î^ªC™ë÷ß§¸g݆é«KI¦í‰šYmNkÙŽëc؇¤SoŽ_Wë_‡´#ÓìÒèÊ‘Š_¿þÝ>ûŒjX ¿¿ýî~‰†Ñ1+8ë¤eê“c’{©I§‹×öm<“ÑjK«bkKæ&[÷­$~„¯ðܵOü¼wî—U@:šxJ‰ÎŦq}8Ü¿¼¼|xxÈ«å6ß·w—c;ÑÜu—ø¿oV«Ë˜­ÈKíOs#›'˜å²éºc-áMèÐ&“—ï2óûÕíxùÉ¿NªÂrÏ« ÀD†_VØ2 Á|†´ ·¬Ô«M’‘"~4[ë9*®Æ¨Î߯¥#Bé(£ÏÞÖía°ÿÅ0GÁSýœñþzõºÿnò}$P"é``.}ÁT™Ø\éI¸ÍÞ&`”ñÉo·˜Ÿq476[„žð3d#$åºc• Ñ›ìu;m"w||ï™ú\ýæ%­u௻”7µP8´]Xõ}˜U”Žõò°¼Ú4Hœ»¦ê÷Dû¨@ZñYhUµW2«Q‹1Uk;ðåz7”$Ò¼3›ŒŸEöz7pòqi¿À…W`2G®»èö˜j_SÕ^Ödm8Ó}!íÝnk.3·Tív—oß<2ùú°ÝœÍø†»þ˜úeI¾¢ÿj]íî^Pç úU¾H3%txœøsz4\wß4;^šx,ù"‘±9ñ‹}ó›×¯Ð5¾ŒQëE%WVл¶"ä󀕡ÔÒ Ð.Ô ¾êô¡^ ô£zñ81tÀ¡€h ôÆ8A_ƒàΗHù µÐ CÙÃË©¯!¼Ñ|mмÆ[=íJ4B w3E¿k7¦Ý.âcÙ5u¾;sÂ` N°:懇Ë.¿Þ4»>õÂõö„–F}9Ö‡Oþ“ Z•%’«Êñ_PŽéšôíá†9"SŒì£ßÀJ· Ž1©•æy7¶á|ã@¸âì£?䨃¡ª4,$¿ÕQË›ŒÒ/’¼?ÀÌQx‘¢1—|½C¿w9âvG$6PW¢o™®Ùý{j*Â)Gg<“Hët—÷Éν¯î‘w/ƒ#óû.¿ûá_¨‘ýE©„[™‰òE)B¬fGà/æs>Ø•( /ÙÚ_ò GQÝïšôÞ¨_‹ ÷å°Ö…jh8%”ÌëÌÈ™ªTYÿC`Ønä‚,×dà<éÞ Â=ù}®„NÊö¬ímZƒªe¨ªZæ÷o˜½Q;_Ï)‰ºsZ¾ü8“g™*ôiö‹ZO²2ä¹[^¸JõZ¾è; ™‰Ÿ¢ •-Ó$~ï…€çõÿm"›!Š}Ë·«%Gëœ_ÂÉõv¿©â;UÄߌ]ö·f³iªm·ßõË( ¼'çû–”õëA„,§W¨Þ›Ðc„o&ô|-FûP)bš44 Jvòë‡øýlø8ÛInoûBrAsÌYp@-C÷fÿc¦ú¸|Òà ¡áË Hª¡>q$ꢶæ PÀ ®ì\ÎÄ9vÏ`Ðö¦ö§ˆß‚è“íËz—y2FULÓ£Ü׺„±ž³*R1BóšÎÞÑUGˆ˜Z…±¡ŠX´Ý"6Þ;Üsr›¤ý`gÌ(è ØB©‘ÔŠ/¢#á/Êìï OKÏÉb¸…žëx©Pꬤ’›b€Èæî!W¸!Ùÿùâÿè|Bendstream endobj 125 0 obj << /Filter /FlateDecode /Length 27492 >> stream xœ¤½Í“ž9’vï‹¥“^GðXt,k|»'Ë!Û¡ðZ–¶C:ìêPî馶Šì!Ù;ùÿöÙøå7ÞaWׄcÓ•|ÞÈ/$2xsݧ7þ'ÿÿþù»ëÍßýá»DÐ7òïŸßü›ï¿û›ÿ˜Ë›u¿zîo¾ÿýwü‹ô&åz?¯üf\ý>åõæûçïþñîßÿËÛ’îתãîñóÓÃÏ?øøãÛwe–û+å»÷O_¾<~ùÛ·ÿåû·Ñ¦íÕîû(õ÷?lD?<|}xû.çýÁJw?ïÿû¿òºûôáãÛ<ï¯+»¯_ä“Ùï>}ÄÏýQ¿ûúÓãþ£\{Ðr÷¯þøùÓÇÿõžEâY|ùðãþ®Þ}ú½só»>|yÿùÃó‡2`Kw_éŸïž>¿{ûnÖÊJ¹c5ïæu?zó.íå·:x?îɾ+%ö‡Ïø“Xw|»t¥Ñî?üøþX³ìÐoúÝŸ6üî†bÞ}ý¤ÿ:ï>?þðËûGGÿõ§·ïÒþ—ÚêÆùYþa¦»ÿÇ_?Ú§›Vÿt';[ؼ鳚nÀ¿úòé÷{¹¥­±ä·íÚ•E«JW!ø¿þÖ^¶Í+gFu÷Ooÿîí÷ÿõ–ÞåÕ6ÅÖ}žWâaÿÏOŸ>|||øüá+qÒ¬ëîOßd˜qÝÏjüòÇŸ7j¾ˆ6DݵƼ۫®{€ëêwï>~üôU?*wõñüãf™™ï¾Eðä´!0/¢Á¬ioæ·æ´ùhBä_~øüðõÓç¿ÞUlÎƦ9í‰þcOã_· 1ÇðôëÝÏŸ?½£1ߥ™îÛXÂkM(÷_ß÷ÔkÞÕ n©× -øèÿýOw¿|ùåáééO‡÷.ÑOiV?m–|üüî‡Ï¿|øôñá‰Ñ~ùùl„ÏÖœ 9MЖC–yù÷|‰X‡Íþÿøîûÿéïÿ‡½€ÿB;·™ìÓ+û›«”»ß9™ßz~”Iõޙží]ˆþ»'ûhíýéÃù›Iùö]k{¢mÝ>@{ð×ùξÜÿýùñáéÃÊt£üK$Ú‡o²M^ù~¤*ìÿÏŸ?>>AÞé?œ÷U¸jÏýýO?üá—Ç/ßâ­ÚîÓ5^–©ή‰ejÌÅü_Ÿ?mr<«Þ$úôô ÖüM¹Zõ>f8Xqüñ§OOôg½ûúðåŸñ_cm-ùc ZÜ-ùíÖá¿ÿôùÙ4÷/OĬÀôðE~º9ïÁ¿ÿC$ô/?lAúð^ÿyóÒÏ_·rþo"}/øj·2!K²aÛ…[…bˆ?þ´1ê”~"8¯Duê¹’R*HÇ,ZÁ£?¸:þ©œ?Ñ—{êùîŸßîm×Ìùî#kç¾¶¤Ó˜i/ýÐí˜Ë8€WžjÏw›îy…ÛÜ®½ÿï¶ö*W‘þÏ<é½²??<+²­ž˜ÿií0Aã<~¦=qõôäßí}ÿò(+¹Šl·oÈùÊ&ü÷ûC©Bóø¦|ìñFÊaÿðËÏ?›^ýôٌκûOoGß”nÛHòëîýÖ¦ø`ôÿ=OiÎTÆÝÃûŸ ¡Å·H}ŠìÜCrþí÷ßýø;sÉoJ]{Èö&ç>öÜöÿ×z_Ö›Ïoþó›ßmSs]c&¸=o>ÿø]î4|Sç€{ó!ýþZož2; {²Y!ù†ìÁ¦BýjÝoÅ ‚o¶Cqu…\À³Õ~<ý~1¸*Ö`À¶\r?—B*ýh»m‚¥ÂÞoÈö¼ R3 ý¾ â²Ý@ö¬ ’˜÷›ÆÈ÷µ0d št?h¨ußd™Ø½ Y×ýf$¬Dm< H±`Ý2ï+±Õ¦t³nŸ é…QŒr•ƒ\[i¦ƒ¢½CºÑ{³—}é›Gû±w}3๿=+g+ôdl+|²!éä¥~Ý×Ù­- 9rCÒ8¸v›œ–ÎnCèiÜ¿!õ”Ø¡SŠöÙ§åCÒp*Q·ç9û!¯Ò‘Þalû ɇfØ€kÚ£UÙMÓ0/BDSˆh3ǬÏW¥èóSÅiKPÝê«õëtP í´R-îôTMï4Wkàû¢Ã÷N­Šï¯ZçµNÎ&jÁœ•ÄÈ9·©tŽT[é\«öÔ9[m®s¿Úe1Ý*BjÛMÈÌü› š‹`Âên„ ´»æy°/¢*ÁýU7>ÍÓw¿‡ã6Þüøå»- 9ï!ÿøÝõæûîoØcãPÐöÕö©JIÄkLòm—ùFI·¥ƒ"½ƒ÷`˜.¦ÝÝt0eHâ3ªð‘}²UqË6²o:ж\e*ã/±€«+÷õv|³÷?•ÏÞ6'6T!½¦cßèŒ -ʆòuŸ´yúîÞü‡ Ý÷MùV7‹'¢˜A–ZrXÈ«mG¸,…€m÷iGEdC2~Õ² ÄdEÕZ¹¯ÌÛŠ‚“Ûž°ÿ†â†ô]ÛÓ)߆vÛô Ùk ¥Û–=ùQ§µ°–B¶Ü¥æÛ⊱A!†n[Кþ ¶-gY°’!@d'Û‚¶%O,û6ÅÐnmK^ž I yض9a[… ƒWëŒu°Ü*}·an™!â‰lÃLÓßò'€&S!ù#HŤ2yI[Àh+;Q• äEDÔÂHLß žÜ’Läì¦Ûö·­ß‚L(£²õh:0ïå´vŒšœ3ÜtãùØ* æÒ±Ò½!mÔØ»ØûA0lt9ˆ =ìDo‹LvØýÖ±sà¹ssa7Žý߬;ÒÁ#°aíà£Íïå`µ-<7cÆ ¹úÁ°¤z05q„1ý=¢®Ëņ\ó-°Ä#._Ró!‚’ú!¦[ðç)Ê2Ú!îB|ä*êâT!Õò½“'æÚ¡bâèu[þqÌ'åt¬¢ÛmYiͰX‘u¯â X½TG*QËÒ }k`9{è®À¨•cç¶N^éØÝ žVØ&¸äƒI¶á®Œ´Í½hyáµ ”²cÉzÒP–ÝŽÅœ[oˆTØ~{#ó”Œ  «ôà2â° 8epr9äãå<•«UÞ7DŽXªàQÕColH9uËKÕQ’l>‚Yu®úÐg¨:ÓW¡zÕW*ª×h¡ÊÙ¨¥úÛ ª Þ‰®FÀ÷Eì„ošß^57Îj’œKÔl9'‰asfSÛç ©öÑ™Vm¨3¶šYg~5Å.j®]‚Ä¢›Œ©Íw1T¿ÀEU}gõ/\äÕqµ`~Š©ŽÓ—ù[þâf,Þì ÷ë™EaÊöK¬Ê“é›TGÐNî5 [UaÏjfM¿Ù",>¯``™ÌM6G ÐS˜ ì¥qZéÇ7ˆ”Ï&0[j“aÌc:úOYñتt(_÷Iñ/xè{­8Y òÒØ¯Ürx+­€Ñp²M!€y?“*}²ä”hˆcÔX )8ÍÑ)*+Z îo—errqhjC! õ¾)ªç¡,x 4Üê,³¹èÌB ƒd/¨¤ À§äÚÛ6i ÍÍ]!D™ a§äê¬Á»mÄÕIÉ÷-/™W°µ(„ !çÖ´ØÏb@áÙm€|‘é°Ü·He(óŸ]þdKÈ”Ù^çè[æøàqÁr,†°q¾`]*Cš|Ó™¼Ò§Bˆš";Ù^þ³À¹ºÀbEœtö=ƽH5†¹m™[ã˜ÿf÷•Ž5V>U:T>ÐBm}’j¤åV99EjW>m‡ Ù¿®ëØ4úuÜÖýO4ßú Ié`(Ív°Ðþ·±"—m”{àÃýqí¯âì\~Þš"ËóÏ£T:FÉ)]˜Õ¤«p¬$à†´~)¹4òÄ‚¬oÈU¢:(• CUÌò¯ ¬Ô:çÀIÎ>¹ NG\@"K™ÔÊ!öyù¤UêÃHÏÔäŒb$O’·%U‰=ÙÖ¥b['Û›²²µr@JƲÂ%éÃlœ”ÈçÜv‰‹¢üxí]©Ë"ž8¶¾†’T9CdÆ*WW"«mˆIÉò1D¼³¢º!|R1q¾ôŠÁDaÓy¨…K¯LulHºå×ÿÕt@H}œ¢â¸¢}nª*}þªN}=G¨>6*©ÆvBªRwb«â÷ Ûà{¦æÃ÷UMŒí¼Z!gµTÎ@bËœÇÔÜ9ªIt^U³éü¬–Õy^­¯Ë…Zh—1â&]jæ]Õp!UoÁäØ “uó9L˜_b:ãô]^á^&ö¨ä‚ßóàLC”Eöº7MÊJ¦+OA{ X š~³õoiš­%¦É U‹ÜíØt`*›rÚ¨Ç7HGÈžMq¶36Ö&D:ç£ßøœ-K‡ò•ŸÔ1Wºº«ÛG•¬Côl‘·~‚¨[üØH‚sBÅi5)Ç·^ˆŶ‚mÈ5¢n™êAøDb7g+Úšãöm™"ÖöÞú›§c\€{¾zpÊEqÔÈMÛTŒ|pÜÅö9p¥Ü2νø"(p÷EwjQ®ÊTv)¹Èõ‚„;Ï|ÈîDÚ!».±WaÞv¡F6ç:ÿÊÌM®6dž Ä ]|È— M¼HÇ\Å£õÑ‹˜ ›aÓû[ÄöQt>²PD×A Ä*ûA¯ª¶Úh —)t¯Ýv]öž×:ö¯ª÷a{\5ÕÂøyð íÉO’ËÁs[E¶~ð%täÁ¹ëÁÝpTÓ!pfMnHJXEFI*’0iãÛÁ(‘Ò£Ìò•ck¸ßã}ıʡè.3j è§–y"Ú*@D£9bQzalQŒ>?Už¾U°¶JÕÁNÕÓN,ÕåNPÕ÷Nt1 ¾/j4|ïÔ°ØöªíqPûä\¢6Ì9Iíœs›ÚBçHµ—εjS³Õî:÷«mv QóíR¤&Þ%M¼Fõ\`Õ—p¡6Ãß|Sæ·˜9}›Wx{ÝÌ"Û›~f¶Ï²µE•E7ËÍ=)ŽÅK˵ŃŠ}ÓOz•{C3. ~èHpõ„óIJ*{\ Eü`ïtjŽMkÒ">N-Lk›Š~â³U4¾"ÊW¨"ncE’óþYžä·>GÈ’ù×%¶„|óJbOÁ9 Ï,–€dú¦È©òªÂgõ"nª¨ `>¤ @ÔžT>umÈF¤d 9(=²ä$T$˜;íU¹’Pht@8ZT/ —+ËvU>²nˆ¦Ô‹Òè6¤J: I 2c„;Ó’H)"4ÝóꄸˌË"B)–ïriÒ[YœÔÈH'Å[8Ŭ€´ì)  ¬*óº.‹"ua‡¦ìM›Œä^Ðpd€%?ºdä “®¼"`’p¼sž COÎ Ó›T‚—0)uÂÖ8‰¡"¼—BÍ|Pr&2Úó‚_v¤àò¨Å]+cOÛÆ–1ˆ]}ï Ò•Gä@JŠ£Rs”T@Ò!Í€\9J|a}´ ãÐÑSñKas̳‡9Óè›7Æ:fÞ(Ç*ÀÇB7wŒvW~ù ؼlÊBÔ±äjÒ?–í:ïÍPOA·/ÍŒÐ-Λ„¯… òþÿQ#«äQ%ÏKÙ ¼"Ë¡\¢çÈ•TÑ#çR"s硱s@²KH¦’ª(B€È DÌr×XºÊ! *½,«€ð¥¢Ê3 ÂÚ"ó¹kF…ª@fš€q(——!¬¤"„õX@,º. .ú0LPtfX„(Õ°PQ¼¢œÁD;QUÉ;ÕÕøÎ¨±°ÍSsâû«&Çy@Í’ó‰X.g%5nÎnj%ÕH:Ûš!5Ö6ckÜoÙ$ÄŒ¶I‘v“43þ&æ ¨Àša2m~†É½ù"¦Ü_QýqãÓ¼Â]s-aœ6áûS•íUÑì)¬ÍI¤Xãe (³îóøfiHDñ”ËÄW†‚õJKy]Œºœñ +ýð 6üZíBŽcUÔ´ó±olΊ'¬KÆò¥ŸÔÏš)Ë{Ž)ñ@(QÁð5\—D8Ñ«kPÂâ4)jÊ 0<”Ƀi¯¨_Ûþÿ–BéˆG™ž’àãÛføÔU¹SVÎX’“_(mg¬`!`3Ú¼lÂliÚ¦j5³ ƒÕf6 1qÀÿŒ§K¶Å,:=&܆TÉ)(NJ ’rªÝ†4‰šílJáÎáá†dTâ/d޵Àg¹Ò3çqM­L(=Ét‡Z¢Î™¶€ð©¤ ç:3„ãW(ã"jNóHÚÁ_‚tN F:0b®”¶/ÄÛa‚û”™Ò±dšµc¡HX+‘û<›#±*Åë"=9Ñ%¼J~•oJmœó䇋özln­2c\‡·ƒIöáRZœ‘pÇXfÃõ÷8²^œÝå»!DQçê"‰<ÎùˆaôC:8å(J£”á~µ’ˆ¼óCXº(‡@#sÈ<«ãÐ ÞÔCwd„± b^–a–êysõ˜^/ª’eHîéÇ"5sÔèÐ5ÓÀhµe ·ƒžÍÜ¥y“¼Û–¦y¶u,q{›F¿,yÔØQ”ƒ“X("·m¡ƒ‰r$Â3éàZ“ÈÙ[L„žÊýí:Äé¢Q~ª¾MªÙ@•BK?5I­’ãäÒ\‡„»Tà«æ3˜NØñ„Uolˆì¤ê–!¢£Dô˜cV]磫>ô ªÎôE¨^õ…ªêub¨vv‚‘ú6zªzz«þ÷ Qá›Ö½*`˜B“õÍW{ä ¢6ËxHÍš³Ù=çC5ŒÎ«j<ŸÕÀË« v©P;í’£¶Ü¥Kí½  º.£ê6¸«ká¢nî‡*óPüTvx1¯ñ‘,ÜR(º¨*UtU&n6}Á¹”£ ©¤n æã\|•Ït™•±¦^ Ù|æP­½ @­þ n³Æ»p Åg€8ùÄg¬X|U:’¯ü¤ŽøŠH(…¬àˆÔ©xZ!8"qÙeåÌ"œØH /ÑV.àüYùH ˆy"Õd( #p½½Ú!X¸: §ŸÞ‚ œu˜Ý¹ϼ—O2ñ(ÜåZ‚Ý¢C —+!zù%­!Cž'Ý) #AU”yMZD†$^(üš´ˆ ‰¾tY‘ª¸ö€m¸ œ&ZS“Ú¤7¬eeH<†b„iQ¸°¦!{ŒôeºýÙ®ü©^r é )Mñ»'Mae¾"ÈÅwwèRÂ#ç%ó]r³IÉ«‰!U¾Lâ´ÄMh¹ E•¿Zn2úK.ùŒBV bæbÒ8úÅ—Fa†ßËù*»†•&Îß ÔHƒï`œb‰®¶#Iq0²'¹7°Áæ÷cóÀ ãØ`0Ñ8˜Œ6FÁ9Ìt-¾Är†»È@ž“׃o© ÌÁÛÙÆÈÿ¸ýœQD.v‚!C¥’ÆÁÕ(»Ab9Ü…ú"ÝÕpqÚ‘kˆ‚ù ˆ&~ ^w8Ì£C#Ôãì#j-®q¤žã*kj=Rñ(›©…˜+`%(×cF¢S0¬ÇäqóBKq‹Æ+ §\Ël‚hÝè‘•9¸ !>.ãPŽDtb\KñŠÁC¡¨0? ×! ˆ@ ‘EˆI% â–à !ß>©ÀRl3Ê4¢ŸÂÙ"÷€Ìu }FýH«QǼ a]!¬Ï"fÖyqtÖ‹a†¢;}¢^Ã:EZ”“X¢ÄAEÑ¢‹-#ö"lžØ”°Ábwˆmr>óXIL\`71ƒ%ÅT®s8[Lnà~±ÊABÄr)ë$M<€ ì$?"5».öâŒÍ`Œ)Ó§y…ç¾R~(1ƒ(ÐÓL7€¬Ò•Ùl&Ï÷ri( ç’‘ëŠß@çñÀ4¨øòXU,ŽLPYDwÔŸX÷å@ꟃTªr‘oÂdMX Ö|ÒEã‹rÞPÒTòâM…?.©¥Ç©ºx\~²¤œ± Y’ƒVø€Sru«B öÇÖ‚\‚n)‹´Ò¢åôzdD.Š1á†Ôû#"5¶VÔÀV"[+ê!ö"qiØéøB©Â†HÍi¡Ãáþ[ÿtå?õr‚O ud»ŠS; KNÿl€Rw˜ÕØZSãZ!Ä ¸ ¹htŒ3èvB/Ž\5è†<ùÊ|D£†å½j¬igŸÄ€eÇ#"/”±¡:S37ÝÉÂåœ/Cêùg!õÑf²qh¶,qv|yVÀ•‹q•yÊÐF‰Ì'¼@­LY8 ™uÉ÷¯ëH¡:.©æ4˜4›cao¾‡ióOLsœ>Ìk¢‹¥Év“OÿÌ‚ÀüØM ¥n)ª5¨j¯Zs©±õ¼Ì¨É7P»Î40ÙdŽÍe‘B™Kã¾™Žo*ב<Ø…s,D‘ÏùÈ7a΂Ǘ%C…•ŸÔ±Fºâ¨2”FtadÈ¢kRRU ’5Ðqe‰±€Ü¹z‰Ñù£&M?ºèo-/|M<³q¬0‰ŸJ”‘üT¢Ìæ§aLJŸI”¡ÓTZ*Ó§¡Þ ƆuUx’ÝBª€ùÁEe¼rš´õ¦ŠrjÊÕ*í~àQ…š^CªÒØzè•—¢œDÏ^Š×tœ­jЦgšÒV`ÊÔVi ×(aJÙ¨eŠÛ(jÊݨnÀvFm„f£±±5#c{o†ÈøÃŒ•ñ4ã33zƈfYÍxC›5¦7#l‚a†Ú„ÇŒ¹ ˜|Bs TNÍmH6cõ,²¥á©÷QÌIEuÆé¼*cqˆÓÕ°ögƒ*›¯òW$²‘ôмÆ¼fÍ,*%f½Ó7¨9鞺LÖe¬m‹$\çÓšº=rÝâß`¿kx°ý‹qÌǾ±9[—åk?é£÷гÝä,:DóŽ(Ï+¦&!©‹Sž4} 3e3eó¤ Ë¥Bn¥rX¾ ”°`9Y”-–bÞe‹‹m¦›ŒÅ6SLCòØ™±ØÆºÉX¤‹ 瘰†Iœ‹˜è™±ˆÅœ‹XðJ1DY+&ÙB­’4„<’AëœÞ„\>ìÇ™´ˆ=;“©Ááxò±õš%(ÁÿÞo’‘/’ÊoAª¤Y:¤Ü$-"[åZç艻p…^7Y‹H¸8³‘<“r$FGÖ"Rr(iÌ Êýz#Ñ[½ÉZDÇ™µˆ„¡3kN³4&ؾ䙵HÉIGÖ"òA8IÕ®®›¬Eʉ:˜vˆ¢ÎØupÛ?gþÊUnA@*Ÿkƒmñ­ Zm7I‹€PÏ2—×Ún²évcr/W>A7ÈEQPaô_ƒh’cn~¥=0[=&h O¶Kв…jÞ”ÑÂR«Œ^–~e4µ-£»fqÙÖX¢—îžå‚Ù[¾˜1%”£hΙñ’¥¥¿Yêšñ¤¥·ßZ œñ¶¥Éÿs ˆæÙ©YžÉ˜¥ê™Z:ŸÉª¥ü™<[Z É¼¥š^°ôBÓ–‚húåEH¿I\4]0‹¾ £¯›ÄEÓ›¾ Õ­¾RU¿N ÕÐN1ÕâFSUôBsµ¾)j-|ãÔ¢øæªÕqPËäL¢ÖËøH œ³Y@çE5‘ίjF¥ÕÔ׫5vÁP‹í£VÝL ¿‰ º.¥ê>¸$«‹áÒ®^ˆêóSLeœ¾ÌkîŸ{»É\ÄRÏÌElÖ™¹…qf.’²:RíK'4<Ó¥vÜd.Ú|,( ¶;3±Õgæ"íÁ1Ôl7™‹ú‰ÏX±øªt$_ùIñÑižj_öé+“ÇèíëU¸]Eû€PéáöK¹u¬§_5)2„Šô®.ñ@¨ÊñRs‡Î÷TM‡¾  j±kIª:S îÊšB¨æ(%‘Ttæ_ ˜ ‚­¤VÚ“c Hò1ÔšH° ÎF­C+jܓࡶOáj¬Ù8Ym%š¨ ‡ ÎV@ø$;%©&„¤•Óë©÷^)I?Ó;W  ÞV‘/,fI®™ÈêÈ à3ɬ…+³†¿¦dØR ^wå~°è`‘?`_3LtU`€lf‘š¸!ülG„t®* ˆBúØE*ý|~èA›5¾„ ëDwÛIQ¸Åƒ+s_ˆ@O´ÈèÉóäÂWß¹ [—ùÂ2l/ÎÜ-r@få˜$“Š |”Y¯^Ë™+a7„Jלg3÷ç| Böƒõå’#ˆ"å¡Ä½@\ÊÆ‡ &Š…FaM‹ùÚ:q‘ ôLʨ6dœÊà IlÕKKtžbNx%ž–nêÓÒ’A]DÂuY MKžûPR¤¥­×•\ 'ôHÒ´4H£dO+é–óΤ¥Öu÷Ò\¶{¼Ãiji®r 9ðIÂñ)rRšÚ„J™ kF†LS›¯+Ó&”•öÈØ€=…ùÓBå€%(ÁK˜QȦA„T*¬xÀq¶(Ðô¤ã 2@mQ-RrT€¤C½¼ !-¢ÈbQvapQˆa~¢4ÃD±†uŠî ´õèÅÜ)*:Þi.f l‹˜Š°ubNÂöŠÅ , V)°‰X.ç$1nÎjbÿ7Š Ëf40µXZç{1ÆA4Ä`ñ›DLì¾K¡¸APÅ}Â,.FxqC\'˜§bjãôf^á3‚«”øÙ;°}¶Wqªä\ƒÞ*É—¦HD€–ŒœjüZwÕˆvAö¶¨ó þ¶Î‡LfUFëéø¦qw±€§qqK«q_®0ù&ÌYð„uÉXaí'}ôy—½q”˜Ðèðõ ýÒ7-q’K“ùEy+^§%§ñâé)ÜäIàiŒ¦Ê“èz­†Œ JíÙGi¥¿*·™ìj¤‘gAÜÈT tß—>1D½T»8 ÈÕ ¬£¡n2<0îY*ºÙ¦{qêÏÐ;K$ŽPãß¡i¥Ú˜¾½ E ehpãÐ'n_+CØ£§œÂÓí˜F¥g€L«òGhã?CäM'´½e†f&Bmgñº$…—mC!´—CÝÐÑPKŸpü" Yü—¿#Cy3/B*ç‹H‘éVêQÇe£çƆ+ÌŸû†%öÉs }dê\¬HÙ%sͩݹT"ìH—޲¾k-|ØY4-lÇîwéxëÒÉaŽ\$oiNë'q976övÃ6iÒk<Ý&ç79Ï7îä¢q„"ÈN̰._­sœË ?"Ä´uæiå&c.î°/ãP ­ñnºÖPH³w2^€L}M1#êUÑ›=K¦3lÚ™L„©c™­(›()¶ÅÍù WSçÕH [>²W½U³©FµÝ«CŸÖÒ®ÚøÏ¸ Ú;sÊ)Õ^«Sn²6÷ÆqUï+«Úhã\mOoÌ]5-× êå¥ ‰õ§g)ªöÚŠY½ŒkE‘ö×i-êu˜D£šüú²ìÝ*Q ÖäÞTGÑ.7¦^^„Œ›?Y‡´¢ç|hÕ…>;Õ—¾U©¾JU»N UÍõæ¡°@MUðNq5¾+j(lãÔ–øÞª½ñýW›ä<¢vËùHm›óš˜?gGµÎ²¼ÛÆÓjbïÕ »l¨©vùQsî2¦&ßåPÝUõ\˜Õ»pÄu‚y-¦7N?æ5O» –«†_ÔùiÝË$ïýAïej—(Ó’½#ÕXVšY6ý¦w}ƒGñŒbÒ®žÃÐiu>c©ú¦ãFüdq±@³¸‰lju~2À§£ßø”/K†ò•ŸÔÑcçU¼H-1Fƒh½* 3¾Ô‚&ÒBª(ÐŒØÂR"Ä—Ú(Z}Yœ¥g~µa ÐJ—Z7¼‰ D}¢„x6„^6ºôi[¼€Hw=×ÔPL§'Ê*•#ÑXÌ&]a\| ’’E颢ø0 „OK€h„±s¬‘!Ð}Fªaä£ ÓŽwôV§é?±sß2c#U, „Dù›xE)•.Wc€œÐ1u1D#ŒÒL$Ù¹˜.I’¾m¬gg@ä´'çk@d'å þ2„Óe#Dž¶ ˜9F‡š>=‰"„ùKì!¬QâN†ÂÂê„*ŸCsRâUùy¯k­cK ·B ÛVäu1ßÚr1¿ÚîK &0HæW‰òäÛg4jbâœ(!"gÖÌ y?çÎOœç3×ý¹Èì%Ù‘ˆ•‹W®,µèdW#iæ´³ ÇHu*‡¬gzÎ!hƒ\¸€Ü5†A¦†_€ ›0!®Ëâœ2v]ú¢­Î¯.­C×5ÔeqN^fÝ&1§Hˆº,¨)Ī[m– Zç²À"½N}ÝR7¦Îa‘EÞ»Šhϲͭ³Ç­¯Sc+Âuê›FÊAujº2 ׊ŒX§v„Y+.¦zdh@„˜ÂôuZÄHbn¾¿,®ØåÁ€eqÅ! ,»˜Ò¸^;ªÒŽh zýÁúÿeyaZŒ°Hëÿ— Uzö;„uXÄÜå)}Ès>Ãió‘UˆB +¨ÁZ9Lw ª(÷@x1asÄH„ C6YlM`±GYÄf†»æ<'¦OyRLcàX1Ÿ«ÅÄÎ3¤CLu ±æAÊÄâId§ Ȫø AžÙµ"/ÞGP 桘æ8½˜×ĵ5 §:?3ë§!{kê½/¦÷2½QEoT»=š¢×ĨÉ7P»«D<ÍÞÚѱ`£$n(ó!si·™tßïßhšˆãéÜš?Œ%éa>òM˜³à ë’±ÂÚOúØC/r—m9ŒÑ“ ™2 4Q Î,Ôd¦\§tbÔ„'@ø?[›qybÓ§¡ÎX–\•«ø–€H>’´rÁ¹)”n¥É^¹Ò5ˆçƒ@9(–2–+WzZ£Ïü&¤žeN% éiÚ—ÞSØÐ=Ü KsC |ò‚,óç‘.ç(êµ¥¶ä÷´;ta×,EA#nƒ¦îå">‚e÷áÅI äó(Þ(’Z( ÀØíñ6A>’ Ñî½)Šxã ß‚L™°Cä¹[Çœ»¥UÊè¹YZ¥¾iSm>² Ô8¯c¡9Ó5¡Ó"'IRTjáN MòR±=QâyØ—4n÷.I'0ß_î´y 5>Ÿ8Ÿ¤Êþ¿óRÒOã·ívÖ#·6s§È·)3Iµ“ôZsöOIr MD¶3:ú!FébNqQÃ%ö!ò–HØkI"²IõEц(ù×â %SNbÍY| ¢)I†X’–lhKk²éYê“-ÁÒ£t•š@et°+£•¥a=-UËh®Ù\¶-–ðe[gIa¶½–8f,`™eÆ&š|fœdùiÆm–ÃfiynƵ– §ŒmérÆûœOg¡ùv&>–’gfi{&…–Úg’jé&Í–"hoi„¦,ÕÐ4‡¥#švy ¢Z*@D“9fÕv>ºjDŸ¡(M_„êU_¨ª^'†jg'˜jp£©*y¡¹Zßµ¾ojM|oÕâøþ«UrQËel¤ÆÍ9M  s£ØHgX5£ÎÔjjïÕ»h¨ÅvñQ«î"¦–ߤP—Su \–ÕÉpy7GDU‚ù*¦4NæU»p¿{Ïc„$ð3ÕËDPúlZƔƔÜM}Oj1e,qо±äBÃcù‡6Öô·Úd>–ÇF»Ž\Glw;ÐÔÁjćªKHnÓÑo|Ê‚ÆW¥#ùÊOêœ5ÒÞ€QÖAM‹”¼Éš2y6©uòVmZ åíÜ´bÊ[¾iU•·…ÓÊ+o§ÕYÖ]N ¸¼yy“:-óFvZ,æÍî´ ÌûáiÑ™÷ÌÓÂ4ï«§ÅkÞ{OêÛ¼=Ÿ”¿y?­³&ZDç}µÐÎ{j1ž÷Ô‚=ï9¨E}Þ—P ÿ¼w¡Ôz{C-ôˆZbèmµ Ñ[)J¥â‹.wŒyÝ*à­ÖYJÇæç‹lzZ|é+ÐM_¥q:%´ÐÓ©¥Å NQ-uªkQ©mŒÔœúÖIUªo®®úþkq«óˆÀ:nž•­Ì–KhµÌÖVKq©µ\×_KzM6´ê×ÅG+ƒ]Ä´zØÅP Œ]T¥Ù¥YË”]ൔٕ‚–;›ÞP€÷Y| 2­NyH£Em°¦ƒ[6™ž7i³×w¬‘›®Ò{½)%¼œRË{Æ)A½¯œÝ{ÏéÆx:Ý<ïa§ì}îøÕ!k„§,âÍò”¬Ÿžrš·Üfô®|®޸OÚ›û)Ó{@ o¨ÂãUÀ¼Ó  ¡w#TAµ†…"ÊÞÒP¥ÝÛªFðÖˆª5¼}¢(–¬"„5˜ãU-çc‹"ôé©®´¨:õ5ªÊu:¨ZvZÙSaFOUïNsµ¾/b$|ëÄŒøæª¥ñýWkd,¢˹Hšsš>gE5ŽÎ®j@¥ÕÈ:Û«vÑP[íâ£öÜELm¾‹¡ú.ªâ:¸4«waò®ˆ«óY¦hG'æ/¨’öŽ‹Z¹ìݵºÙ;j´71Ô*iot¨ßx3DÅã u,i¨¨“ñŽ‹Z¶,h]³cÐÚgEë£}&úÏVñøŠt °ê“27õÑ)‘xŽkÏË•I)ÕPØLYÙɺüJSJàÖ"¨”†•ñpnwšÚ^R*©\ H:xöºk.ÈJì‚?yÍVÊYKO¥®+eokíWÊ’AµaøkÕÑ ¹äGutÊÝꓹ-eM–Ð:5@æQÐ>@ ⺬@K ´&-M⢹T.+Mâº:@ÎÚ»T¤Až–ç #qó¨ŽNÅÊt¥rTGÒŽêh@¤ñ©T2ŽZè—!6‰®‚Œ˜¹R2ŽÎÕ”q†—ÍG‹¹*3¬TBéN ®ítjIñg ¨T‡¢KiØ©2 ›'•¨aƒ¥Z50T´F‘¢×ÀLRNŠgSr}­ó¬Tà¶–"ÝÀúRÈÄCŠ}ƒIA°K™” A”²â «RzäYÊ“ƒÌKsÐ RåT‡A¼¥ÿ¯CÒMÕuJá©üòf™à»óÒ"®ØÁ—zu+ùZ\ÍJ¾…^Wµ) M/}VÜèNÏÃÆ­¹´B7ïJÚÞ]÷÷º”±…®eÉÌ&×’·2•“HGmôµ¬X8òÚFTÚí ×"EôÂÙ×Ò¶VÊý€í®Uµ.›$ËDÆ®¥¯Ž«^œÔd|TG_xFrE™dÕÑ€Œ£:vè——!¬§"dÈ|3ë»8:ëÄ0CÑ›a¢[ÃJEýjˆ†-î4EïT[6FìEØ<±)aƒÅî&ÛEì—ó’˜8ç62Îj"cÕŒ:S«©5¾Wk좡ÛÅGº‹˜Ø}—Bu \RÕ}piVÃ%^½Õ 様Ú8}™¿ >ÌP[¬¾V5iâºfV%¹ ]ª¤ i}4FÖÞ ò ŠéZÄ« ’ËcÁlIw™™ÌªŒGåZþT6<\ü†’úè0ùÄg,Xªd¤°ò“:ê1¢q'ezQíés„h&> ER´¤¯¼ö}Ð,.(r¼­G3BOÉÏSe'Ý#¸G®@8ÓkÜ6I3(ZºIZDÑèN%ëóìta€oNZ ^×®s·[ÐUBÎÖö^Úhp»Û`_ð€¶½—¢G¸=º«E*RÏ] FÑ Dý€ÄM²ÕÈ#!íÛªøѶý‹/†¸C.A&ßç¥TÊàÙh‹<¼@—uYZ»âÚÈrÙó0’EY.í«P¸°y(¤p4 ²“h•~ rqÐÚ!Y^µrÌ{5e£#õëœ!H¹â"eVŽu"Yí D.|ñâÔÚÛL!~§hNÜrÄ©žåªÀ7µ³çæ%yXÌ78MæXgt]+Î$H¡ÉÓ¨¬7&y‡É9•Ôíàê$NÎù¸í,‡h$ª,ÒqnQë!„òtHÔ”˜¯]˜·÷uyOIòqM%¤$²ljà ZÖþáÒ1ž­ÇØ—µ‡Ðù¡˜»k¸´#Ÿ­Ó5•—µFbm{(9”ž—D”äxxÄmAš¦tc‘­C²Ð:¶—Ò‡zd$[ Ï › !KÂGHÚâìså5OãR~D¦W3¥,="”È×€¬ ²yˆ ú¾‹òÎd,eÓŠ zjšÊ* ÒùJäiRú¸Ë< ÒáFô¥R­¨;< NõËËÖSRd>ŽY’}tщa†¢6Ã*X³†…Šò Äè¥/…9MYϲ‹)ðk6O,JØ`y!Íy@ŒRà1\—ĸ~xRŒdà[1¤·ë:˜_M±É‡Zk!µè.ejõ]Õ3paUïÁZ zuB\1¨£bºãôe^ã1"-˜¹ûédIœLòJ8¥Èg¦mr­!jC Kd’<·l_¤©3É›zx}ÂäM,¾<5Ä!c© –Î/ª<»èHê’jˆq"úOU„åÈ@aÉ‘&â%¢/(E$+…zž#¤Óá/ı `tÕ¦¨€Ð‹@U»™¢)ÅÜ«ö.„»¢3¸è‘šªjhkJ/dmFªM!t/Ó´@€:I"ÖÜäYv0ߦ™ÀÔˆqצudÔk21d „ÏÞhΧ¼ƒA…¡u€è‚¢ Ë£kzÈ4œЧK‹ Î2PøÈŲˆôZ|‹s9&ݱti]Ї2è*„ééüQ×n¦ƒ…Œ ‚µò=]WW ði+»¨¡I>]tír‚FúDà aÝŠfûò£.%k¬_†Èõ^€È+V†¥Z%Ž:·|̵[<] ñèrD—‰b¾R"%PïG·.F-­4‚¢¶[)ŽêCº²´MAâ¬qã!¦¶Í¥‡Zdz ¢G&‘êHc#üM›mœ†â3šq#ª.gŠ ÑÓ¸ºO¹mR¾€l¢ÏY¢øBLRc(]樢þn¥(¨€S›0ÒRøÎIÿ®¨ðýPÉVûë$~£#Ö¼.|+´:Ž ²ùŽ‹ÀÒëX'{‘E³ Œ^E5¯Ñ´9ÙKÖbuÝš¢é¶}%ÙöÉM06ÈËš)0£ä):Üxi;[küÆîWäɬ Æ·Û[ëàí¬ýÅ>%$ë#&EZr*b–õݓī1IZù1¿(Ñ9+k‹Ðo€VðŠ^`_5êÄËOýòDÕTˆ&3ĦílpÓˆ6?Óš¶S¬¶NS¾F SйFýmÄ4 o7+`›b–Â6ά‰m®Yc³JÆ$f¸Œ‘̸¯™4~4#i<«vÔØÚ,­±¾Yc³Ø&BfÕMÌÌð›(šs`âj„‰´9&öjp_%Qæ5uÑkKl?ÿ™%¡ ,ë`B©œY+ªc‰êèË—Œ¬õìòM»¬^ð4“^«_âšÛ|`7»2ÝëÛ7ƒ{¯<ƒû¾…±@‰vÌǾ±9[—åk?é£ÑE9졤~ñ}´A¬oc¦¤s4z#¯*5?W.kS!½†ó”jø¢Ý?q©ÇôÒÞ4SJèùÏ!M5š:ºrZEEý”©pÍJÕå.Mθ(©—³Š;W’W QåØãƒ\ôÊ}@. Qqç’dgðB¢†nrtbEFd~¼éÌ#f…ögÌ ý-4LÄ1@4²0ùm®Þ”Â8 ®·¨½Q„‚y ,4)¡ĨÒleXŒ¥Xk•ná¦2¢o0³ºDÃ#\=ç-Zp€+å· S¦ìyWÉ1£lw£Ë{Àa†PäéXÅEEq¥W–95.éKb».îa4Å©™^ß2ºãd=ZܜǩQ…í ×±Ç8šÛ*à ÏÍŸ”S+r•þ•Èqdè‘+)ù–J GämiÆÆÿ€Ð¦›Œà\Hk09¢xK6Aß,¾*‰¨xœ+ + ýh@Z”y¸c‹ê@ò¡:2Ä6¾Ñ>Ž¹Ç‘«=>¡“«E¶ß€€M?‰˜ÎA„j¯c(¡¬kŠÓ:´ÁËÔÍ–=)úˆ£n¬âºêCŸ¡êL_…êU_©ê^§†ªg§˜ªp!©êx§¹ÚÛ³¶wfOlÍä˜Y2>1Ó¥¬dÖ͸M  1¤ÙHcZ³£Æ×fk•õÍ›t˜É6 2›nR¦f_åÐUsLœÍÁ0‘7为r•qãǼ&²xq/LpÐàØ"¢¦Âõ*‚ò^Ñz@è "JÕn·¤¬2<óñ "ùÀ³-DR‘ëbÀüQùªfØ"›×:¾É\Gîh@ÿ8ÈÐŽÉè'6_Cbk²lÝ7´Q_qÉ’ÔÕî9–rðâÄH¸ÌK†J1fRä+qdñ2@è5‹Z|\€ifUö\"ïögÙ±«¿¹Xï¢f¶+„òÔ6 mr›O@„?ñ<'®šSTԲ̇NÉt²^ ¡~[+[û{i_¶²r¬d]øƒ›H „§Z⤼+ºôx uS[z¼Dæ?îÙnÒ)éHn *¤³V×+‡1¹¿ÖÒ']ôáNM¹äîsk¨wÊXú‘Æ«G•'$§%eŠm[S/.F–u§¿#SçË1Åè\‡°öÉŒä#oÁ(Çäzg‹éóï\'–Ø«¼äkdèE¦k¤âPg$çvÒú:HŽöÇ®´Åýþ|çš¾l»ËñÆÈ*ð"—4>›N’T±Àmq Ù*7w3–EEc>¸ºq0!pþ†]:Z–§{M‚ðÎá8¤Œ¢”Q%E.+ª2y–> AäåÞ'¨¹, ªÃ Ó/‹~ 9ø†EÞù{´pQEÓõ¾+@7²i½.C[­†]Œ+=û²d¡y×wm_ü¼£[ç'ÝÝ~óÔ÷º4&ñS¥2’Ÿ<•ÙìpªüèçWåY?ã [wízdœÅ)ý²–D‚š=4¤Ræ'n•Dk™iÒÚ¦š}h”Í2ïGyÕ èSÕѤ“¼)—¢  :,`=ç#«*´É‰²ôé«>õ%ªÎu2¨^n·…rª~w’« ðmQ;á;§¶ÄwWí1€š$ç5[ÎGjÚœ×Ôú;ª}tŽUê\­vÖ9_m±K‡Úk— µé.ej÷]Õ7paU÷ÁäY= yõB\-˜§bªãpf^ã/.B§Þìþô"!®e‰òCâUc 8Q7^µ¾–¤Íó´—\н»®oÄžå‰û2Ö6Rì*ûSµ·\„o´¡§ã™KL‡…Èå9ýÆç¬x|]:–¯ý¤u^ädˆ‚VqÜxQú ! LMŠð)ÞØ–ÖásxðÆvVÅ[Ó°@•ï#’µÌê•*éÒÒúÜu Ïr_ÖÐ1 dýJNÂí€ðÝ^> OÔHñ*^£œœ'o@UòMƒ*¿$ÐV~I Méý’½°êŒ—è—ÕKtÙ¢Zª¬ý7'?ŒL¾ÂQù˺l.D,šxƒ_9Þ# ]R–›íÖxÉíC¡žò¢8 SÞ)à¡~ý€®d|ƒ§»£sY–»8 ù¼ëe€\»9 È\k¦<÷8r¢ƒMœG… wT«,S.,Œeð™«pWˆ@ÐÒäÚ͈Žð|:6G®sóJ–;Û`œý(¬ÞŸ`ÇM¡6Øsv“|ÎÒ¥/ðlžrµe|÷?ÖÏÜÍ#ˆGf{DH)Ë]¤×QÞ Š۳Cž¥Waù,E®®rãítÕa9¿H6aÅ{i<ûÒÖ³6=ïCIó¿Fx…Öxuå¥ÃÕlºB««Ú#…Ê 韼ì©!‘1@ü)•Dx²6yY¤[wEzv)|QäóÒïª>͑סY~íOÖLö'«®ˆÕ[‘U OJ´d˜·hÒ°6V¶aù¢‰ôu0'£èõ@jÑýa;Ä>„-¶UßEó­[¸CìUà 1iËÄê9#ªa4VUÛéܬöÕ9^m° …˜i—1ä.Zjë]üÔpU—AÿŸÂ…ÜüS曘²8Ü—Wu\l ÔKò™™¿.ÙÙ¡²x‰,š6¢)²iœ$3á@}c}AMÉ.¯<¬’v•ù—,Ú)ÙðMÓ‹aïG]¸àÙÇ’ó\˜~ãs<¾,*¬ü¤ûˆ{ˤæ¦R%Ìs„4Ns ½éS;K"A¨’°rdTú‘ôb&•÷ kÒbHâ2'ÈhSª{à:€¢¦Æ;¾ÿævT ^zV®€,FÒ¹ÅZj•{“„J‚Zã(ë†L:Û$$”uàÏ¡3Áy˜F– ˆ éüxX›\'º!_%kR[At3Hx¼¡0¤rñæ†t^uçæ^ Ï^ð:87x'NRÏJ`<þB–¿EïhõÂ.×L~¸ªKŠÍ† ~" ìÁ€Î[Ùå‘ò iü„S—_T¦o¯ìl÷BD6²e~ëEHâ7©äâé⺘‰däÊKÂÔ*7@ñÙ£¬ìX_müè–Ó òëLµð{YNJ*uZWn”ö£&~Ê÷¬^Ì;¯è?|l=zÜÜQ8e'pP¡6‘˶u¦ÒhgÃÒùm9gÕÒ˜”ÎÎ….ÇJz BQ*àšÜ”"¬j’w"‡‚€vh)ÌÎ.Ä…_²p9/™_|sUP2o¤« ƒHqÝ‹yô&`–¾æapy.2Lðb;â‹(Ëæ# -ÒÉ܉Q_Ã8½Šh]#iáFæFó"iù¾-Y›ÇÖiÉ¿o¯ï; ”¤«l¢µùÆI8»äÈkyê~+;fé=ì,›¥3”³õ†$g{­Êw¹È]É«²“Ÿm\¾rSÔÂ}Rt@χ ã´ÓIÏ•Ý SRîïêÝ×O•ò"DTS€ˆúrÄ¢ÞÂТ}zª%} ªI}™¢lªXª³ ª×èªûmSÔ<ø¾© ñ½U3ãû¯¦ÈYDÍ•sQ?ÙL ž³¢EgW5œÎÒj\íÕ›`ˆ…vÉîÂ¥fÞP]RsLÍ£0a7¯Ã‚y&¦4Nïå·}Db+Úzšæ™™žútäv/¦©Li+±Ô41=hÕ¥©n2ð•Žoš4ªq<ÍÅVÆÒÀ±M§W]Q¿Ä9°O:g+4C¤ß‡"iò¹è>_EâkÒ|ÝiØ?¼Þüø]FFýÞ‰†¸H¥ÛúÒ©o{ƒ*í«•ì58 Ò€­RÖJ+ÜØõéÏð`·´UZË“&öÌ»¶¶ŒŒ3:PŸˆØ®=>iãý+mlpƒ‡q/joÚr£2Ìgîµµ@CIéŽEÌÚÐZVšÂM:t54#mÒ%ì@C¨+?øÚ¨&‘¤â¥ƒýU²ÃiÍÔõªá1Ïlý°:–vMMõ:ñ0n(æý³¡Ðk£|ÞF.ç4T²4 vYÏ äö¯ôÕ cî”qÝHì˜ H¯Ý€d˜:U±µ”´ ¹6² jBº8žh5ÞÝ„Dÿ„.ÔP¢]cY^‚fk$µŒi§Ñ¨I„å¾<„[ªU4c’ñ0îÌ&{º¿î™툧Hõº=lõ…xËÏn }[ÆÞ…º’¨µ& ¿å­« £®KŠøi×åu6*ùF©—Ä5“<â£i'B¦JYŽ7x÷`ù¯z;÷d=Rñ0ÿn°©[Ç^ë¤Ö³mjìÄø‘ËpÕÅÆMzgqSX ®qŒ5´Xœ³pzNúˆý‰‡p¯Ä=Å¥O$pÓ%’ë‰ +-«åbÉuÉ-Å­-®Ô¹mÿI›kªV9÷ôgxw§T°VË’‡s½¸ÛÐmZгy·:oóQ'‚J?²’î £^ªlpÔB¸Ù·!Ö"Þó¢÷¢7D{$®Åmè{-ÅÖ'öÌ݇ïCs•>ƒ’EÑPaÌW‘‰Š ›ÑNìõ´¡>牅1:ÈQüH^Ôkôn5Å¡’\ÌR[ŠCéc$V‡ò”¢€‡q÷‹ƒG•É1è¡í#›:ÏÈÐ2–mÕ8%ÞeÀÁH'µ;¢ÈÑŤàD @1ÚI½Œ(%‡œq ä"üDÃ.÷¼nx¯©-`UHK®ºö7ÔO£¥©ç"-Þò¥¯ÝàaÜ™ª?)tt9ÄøPŠã-ˆ2´Æ(n“Sþ*⼊<Œ»QµRKEŸjY|™ÝõP'Ÿ4w &`V!›Ü¼Uä F=(dJ¡#·À{\\VN>)½«F!(À ðö@JœËã†!zßÙ#»ÈÉm¸‡áXËZ­¤ø“&ÿpDÂþX¢ô Q …Rð‚OìÆáá„~¤ÈM®"…žä*òĘ7]à×-~üý™oOàÅ­Þ9„BWFH<äÖ Ók¦ûWYÍãî¥Q–”d’Qä©Éý7OX9[p™;‚"òTäžã@è·«‰0õJ•-.Ü8DžXÃ_“ó‚p–ÈàwÍÀï««Ëð°?vq 9DŒ(ŠMî(2˜ÛÃ*ž:°Rðgè añ«½Jñ4O<Œ©ˆ5ÊÈîÊŠIÕ±ÈM¾)øÄ'4¥Æ1É_âYžxwã»(ŠM)ÒµÆtǹìè5xâ@Š=5öbO4ŒzpãÙÑ%н½ÍI÷\ˆ=e&É ¤Š=eñ‡9…¦†}Í<Œ{‘*§ ZÈ#ê«Ê•yBqlED%ñ‹R{bßò »e‰Û€ fDaì‚s §ÒK»@Ÿ^’ëÏ7dºsNQçòÄø«,H›œRª‡´«¼eF™òYâØï¾QtS‡óDè;¿ŽŒ¨QbŠŒÉ}7WVǹ{s“vi_¸Rãšê_žx÷äì#FZ%¿”ºÆ5 cX¦ž8úÄyy4îÅ6$Œ}‹‡³‹KW¨‰yfˆMŒØ™¶ G 7[Ö\® y­Jø‰Ó7nð0î’¥¡¥„±É3EK9nFDž)×V út1¢ÌQtTvŠÏy¢aÔ­rÔªSe4P÷É/U‰bê8…XWÑí5`lžÅøwI¯yp°…\Szêjê?êæLÑ'¾ãÄ­TYªoѰ·z]œ+·$ˆ]piE)Ç]ö¤àÙzq¦_zR˜“»ôL‰cßâaÜ™š|¢ûÕ$Š ê¦™è¬KqiÕ%¿ž •ŠÔ·Šµë‰†1WI{OÅ. `[!³ˆÿ[ól A„`pô‰ƒ-7x7.KÇÚÔâ` =s8úĪ´òkŸ}â³®­À$yHûãF¹…Ä8Ø‚&˜/X².EJMÆ.ôêwáðÛoðîÆ©)pDa삪nêõçzˆ¸¶¢ÖäCú±\[Ñ‹KÂØ·xwáŒJŽ8ÚB]ù+_—³2E§N~F£ñ>\[ÉZâ …<Œ»Q‡OŽ8ÚBÁéÌñ'Ö¥t(Êb«ˆ[«Ù9Åá– cœ,}-‰c\ZQøIÂØ…:³ »F…iøÍ”0ö-ÂŒ«&ì-¼¶té*‹èkRœ Á#ˆ>qø… œGŸ8MêãFZ¢VIÂØWVáž%aì‚bí«qø‰mb—vpùì~ƒ‡q×Áa«!aì‚2³=Š?±&Å}K Ø(âÖ 1 (¼r‹‡qÊ^Û¿“0véÜÁžP¬J;÷ú§Ç`:×2–…Ð*r aÔ8÷%ŽQ » ½×f×­‡dSJçב ¥‚VqƒcP||¿ÁC¸qÛ„ˆÒ^iÖ¸´êb=ŠL‰PlqeUIjžAÂx ‰?ÅŽ8à2©‘2/y¢ àƪ!,´¦B2i=Š@5þä@è=ȵ¦Al\X Ž@±Ò¢Ù^\WUüfjû cžtŒ§È\Æ"^ÝóÐ 6bX ¿ËÄÆŤ6_³C~ƒ‡pcÙ~—4ˆ=/²Ù]ÕØEÂhX>] ò,©?/ ha7Xøršß iˆà°§Ó/Jr/èyÁÚO©w½Ò½¢\œ²²½Nr a’ÇÐçâ^«¤ÂèeíºðæS+aøíÙÒøF°Â•åEzi—òFûðþ%Š*–&Ù>P“ uŸLÕÀ'uiÇ7‰m\Àsqù^ Çñs:úOYñتt(_÷I›WdCQƒŠÁ¿(C©Iï¿L9µôa&1ÕØ5)Ô£Ðs ^@Ç7q54­ÉȆj—x¨6ºD£O ½>ãßH»µ€çâ[}Jžª ³ÑO|ÆŠÆW¥CùÊOêHÆ|¥Ç~©E<•¢8¦ôB6„/p^±Ô(‡½‘I­àèztŠÇÀït.áû¤Î©.œ »ÃΙŒT‡Oe2uÈK¼Ûí¡–#á‹Æ­y‰¸aaœªm¿m.òM˜nóæô²$)¬ú Ì+¸îs3÷Ö_KIYgU Ð{I:òm*Ui†]äì BNi ÏÆX¿Á Yxˆ0ª’²2ZÒṲ̀äíÇW>¾iÜ~? ©|†ªMÊÛtä›0å¦ïزd,_ùIæ½í]t8*‹Ã#ÁZ§?}GOšP¿ø}l"‹NNZ›|Ê Vç§qÞø3·þI’è¿£¹$ÅÁ‡ºÛj=XX)ªv|Ñå¾ß±H“ƒ0µŽ“ÑOl¾†Æ×$#ù²Êü6óѨI}Yü`½¤ØH“Ò‡aSæP:“äÁ‡*"%¿j¸t…úM’Ø ã¹äPïC]r£mÓ±—i‰­ßtyaÙñ ¦cuÙ)Nߨœ ¯KDz¥ŸÔÕ·’ô_»†œÖ‘ Ÿ¶_\Kºwç(Ë%ͧ·#Éyam59u¯’%ÅF2…ü›)1zÃ~”ádC¡/EaÏÆžV/fSÌA?™kH€C± >Ž‘YLÇl웱⠫’¡|á'm^¡þ–öMÙðزl,[ú y„ǘ¬=q4"ŸyL® ‚Ég ÷¢ü¤M²c“<¤2õ®aðc#•®|ÇñM—KyCÓå?ªMQM6Ö座¿Üo:7¬ x76ö¡ÚàgR|6ú‰ÏXÑøªt(_ùIW0!~ ü‚c©Xä¬ÙÁ/Ê\R¡3ø9šKÑèXÁHÇ7]Z¢:.Õy c5éäâóiò†­’2 ý›^Ø!q<% cµÉ~¶ÏG¿ñ9+_—Žåk?é#\Øá:J[ªÅ£ÄÍ{‘³(âMYZ"qp—m¦p+Q(¡–Z7V û†£‡Et(Øhél²ÞítëbÚ-ØÖèÌíh¨ûMŽC! wÓ±olʆǗe‰mé'u^Á…øw‚ª’R…E.ÁÊÁ¬Ž;Ji˜ ÄêrÍÃÙQ]žkê|G¿1]nx ¿¬áCå.V›N®r<ÕȤB¬ÐPLå©Y#^}c364¾*ÊW~RGxpnß—®!¸†HCÞàÐxîLÕÞà` >í Ýõ™/i[#×ôþMI¢{ tYñ¡´?ŠO'-¹Pq÷oTÜ Í˜Ò;ÞF¢”þgcߨŒ ­Ê†ò•ŸÔyâÔô§IÂ-r V#Cͯ±Ç…CF£¤9”¢C°²¯JÐ^d‹É7jчÉñÅê,@)ŸðÅtÀ:ˆÉH •LŸ'ròm–Èxô“ÌeÁKâ‹î0.<Ê1ýÆ',xšd¨°î“6ƒ¸>?=B\ŸŸ!:ž!:ù¡jk÷ÁìuÓ yr6”y{6óÑGòôõE<-ñ³‡6T]7¡}â3V4¾*ÊW~Rç5QiÎ !yz„ôÐòáRgÄÃ#=Oо1/Íð˜'gc™·gó1«<=BPâô©êáâi¿Ó#´o|ΊÇ×¥cùÚOúf¾U«¨ß¡7X ²"rGk ¤S“$í4ø¢Š´vñæb“¸ncãoß ^±ä%!(W‰Úd²¶ê€·v|39ÿÌñàf~Ö0R|‹à“±Ol†ÆeCÙºohó Ä/`~¨C’ÊHy«(;óÀy*¥fãÉM%&øm² lÿ^4b8 PšÒ’ÆáDŒ§0•œ% ~JNœ"ï™;Jrèq(вø\ì«á°åØ8¶ä“*z!ƒ,aI.¨aT--àp{âð¦ÄÒ9ÃhCŠF¤§ÆR³i‡~3†Ý¬ž‘$à¢c•ÞíªbÉkwM2((ÞÞo4ÏÄñT*Žcn„æ£ßøœ¯KÆ k?éóš‹‘Æï·ái0E¹~X›Ý! #—®Ô"¯Ê²@QrfÓUê7H^<Ð MJÒ¡@,½®çäz™Ã"Û8¾©ôb}@S/~ËF*‹›ôùdôŸ°`ñ5É@aÝ'mŒ/Dá`<ómJ§®ÿö¢(AœÂ¾’ÕÓ9c µrGD÷ÈîçUû¢M­P,-kÞ‘Žd9u<“ÚÅc@®Y>þ½ÈÅŽã(‰sî|zß;NE?±É*_ äKv’¼Šå.~Ì{jåOå¼eÂ(ŒÜ¹mÕK3z_ÓR¾¸žµkÑ›~b)G†E_·ñ‘ªžl6Öf¨r¥ü¦4.¦u<8Ã¥c¬ÂIpa:úOYñøªt,_ùI½¹«9óû–Ü*»÷³äVÙ=¤…¢žèD-ég鎖~¢®˜!1oÍ2Î&c^EÙsüf.îYãxæ\~«C!«z“±Ol†ÆÖdCùºOÚ¼æ:äêæ'²×‡EªùðŽ7ÙÔ’$;÷²ìóÄ yk6–yt6óú°Ê|x† „ºÑͨ¥n´¬g3?šçcߨœ ­ËÆòµŸô¹9ÿzPϤ Ós«GÌôlëQ5=ÿzä;±èœáñÔ#Ë¢|6‹ê™4|#çÖ€Gζa,9ÿ†ùè7>gÅãëÒ±|í'}þ‚ó¯GõTêñ0=¹zÌL·VÓó¯‡Þô‹Î àÙ@ã³ÉXP¥á9¸:9Û†‘äü&£ßø„¯I‡òuŸ´9Ï¿zËåpþm‰“RŸìàÚ¨N—“»ãysG¿ E’Ù£ßd V*š$/ËùPIs2l:I¯xåP¿Ñ¦6އ·q,>Çùè7>gÅcËÒ¡|å'u^þ¥_p}ƒœI kíJˆRyàlÄB-&7,šž'/ɲoôJÇ“4íÅÆBMr×Ð|®¥©Å|&ßð¹5âá³m‹Ï¿q>úÏYñøºt,_ûI½¹øv r!"Y6-‹š¥8{ao,K(žjÍ*’ýÀÍ[©Ç¿ç®™¶Š#é…·“´ˆÀ¦’$‡–bì3~Òø]=ÇÒ¸KT ÏLtŸŠ} SU¶Å|Ðä5× _H¡vU¥b¤’´›øŠ§¬TÊ‹§ÖíÎ .¨þ˜õø&[®µâIZÆdc%Ë}ÖùÀ< Ssñ“ÓäEBÇCÕ3q(rÓÑOlƆÅVe#ùÊOêèÙ£ð…Ôà’#¸Î•Z_QSUwÖ]“Äö~äÂy’H&å x€Y3ùø{Óˆ‘tKOÔQ¬™ŽÍÿð2ÑÖ„Ÿ·°o²<áx’¾¹eq}tœŒ|&,x¢d¬°ê“2¯9†pe4]æsð«ìR&G*qÆ1²¤+6NV£ªx> m[ +#~ƒU¦ˆ„èY Úåy%=wÉÝØÔÛ!,2ã›,Ïö8ž4ä.JÞ‰³‘OÂŒMX• V~RGŸ¼'X9ØDõ |¨¯œέ\Xˆb©F¨Tñ^©òå‘ |öé»ËŒ~ƒê¢M™Z°¦C¡_/™ånŒãMAÐÃ7"èÒoŽ¡®&Ñ ›|f,hªd¨°ò“:¯ÑÜ\Ž~ÁµLŸê&¬³(!Rå‘9.Z•Á³­¸¯t¥„àcŠß`‘r9,x@ÑŠ2â&«ê è g¨¤uyð2|“¤û»ã‘œ0ÖÕ¤’ÍG¾ sþ}pþ–£ÙóQrçÇj}&öMÖÐØ‚t$[s$Ê«î;Ĩ¶,¹X£@iT̃•ªß‹b{¨šâ¨ˆH-K¶ö8üI¶Ö/ŠÅjÐl T4ŸP'“4´—ÙPÄo‡ížÁ¡= µŸ×1ûƦlxlU6–.ü†6úrÂâDÿu-鲓P†`R;¸ßÈn囈”Ø/FB1ke:0ávmêáʾ™:kÅ3±í  ‘æHÚÔZ&3ûÒ>÷H×?¿™üÀ¶a¹õ&]xy}cV4¾(Ê~C›×ôe^܈sñR…šÜÕAž2*Ò¤¢Z•:H¥³ã6N 'ÝÎ¥­ôí8¶¤¡C“šc”’Ë2™ÉUŲD¨ÒðÍä{Çsq b´ìÇLì›­"ñé@¾êÊ0÷ýá»t1/!á›óýwóqB@cÔòæû߇ŽèçöæÊ¯±wþ£ï¦Kûܘq׊38¸}l¿þîîÿyûýýîß~ÿÝøŽFBŠm¾ŠøPúHG§7‰ÚVÔ7Ÿßüç7_šRBª2Ï íɶóAß½IÔŽ˜áL7ûï1öí¯P¦Û÷É[‡²QÌ9üîá[¿CÝòÞ2ûrrÚñ³ÏßþÙ•ÒfþY¾ð€_=~öão†~c‹ûðŠÑÐ%£Ÿ£}üæ~<7¿ M#6ªŒ¾Ûo‘ýÈ(±A÷¼žÉIQ±Ô¨©æ×fЬ 1®Î{n~BÅœ9QíDëžmšA.jÙëRϯ €óFx‘ŽvÔ¨”ænI7hŽè98¹æà"¸+g{hƒ“ïÐGÇœ¿’N:ZÛMãQ§%ª/ô>7·Ô»¿ \,Í/rWª¼FñôgxHq½Š¯Q®Þr³Í§ÇÑ×P¼f–ªxÅæãÍÚ½îð³¼ûå-ZûŽYî~~‹&µïÿø/ßÿ»ß’’L=Ê1‡O¯àÛêÑtüìW¤$å¾I+?ãvÕñW_o~•e’[T›$nÔP~÷æW…Ž’ö3tÙšÇÏþÓ+4Ö<÷ø+¿Û>§«7ô@VkÇïÞÿ6Qàý̺^¤Ê·¦IO=Í¿xëðê_n¿½s·‹CwÅFùb*Ç«“¹Ê™úoPõêÚ$½1+÷:•2ÝóGÒm ‘&žÍe‡©ê yÈÏÐYUØ7q¯6n˜VpOÁ Þ$À XÐÀ¢ ¥ù›Ô pÿ3Z嬾þ2u#ʜÓЭï¾ÿº ßî =R2ï¾Éf§ø£ KÁ*ìG?¿f¨‚ Ê~õ [X¨s ?ú5N9†BCܶüöPAÐ'¦F~Snæ‡C4ÚØ^a;qج5ý·éTåùã¯éÌÍ mÝn6º¥Dÿï¿2p/£‡Ýîhö~ö§×í7º½…_½vÃ+zÀ…ßý wÝnùèíøÙ+Üìyíó·gy»{4’HÿÙÓk‡ºþuÎòW˜ì˜e‡'}Òòãk†cßéÏHù{[òØiƒÁ/‚Ln”Û®!¡â<¹ó=[!mÓÏ_qç´LÝeâ¿‹[²ošP+ÁÚô•@¼¡O”n•IZªtñnnð¨ªü¶ Ü'9x‚¹iÍaFìt{üÐÏÛj’Dý¯~üåóãÛwè¹µV¹Kûöò¶®´îþ—§‡/_>üÕû‡¯>}Ä'èX”îþéîéÃÇÇ‡Ï )w_~øüð»§Gm¼{ÿðåñŸÞ’¾C˵„ƒ z×ÏM ïØ#ÿÃ㿼Å-å»ÇÏOoßÑEÌLwÿ÷Ûwh™jÏw_?~ÙÃÁ?×»Ÿè7óêwüëíþí-Æ¿í#h¹ûÝÛwô¬AÞÐÇ„òî .ͻǧO?ïOÈmØýðwoßÑKÜ×è§G™ÂšwŸ>>~Q¬{1¿|þ¼±åy÷õéOÎw>¾úå‡ÇñÒ†ˆVJ-Ðÿ¢×u«Ìwôäã6E{ý›†¿ûò/ÏÌÓÇvQÕ IÀgŸÿV>Z§~õå!Û—bšþ÷oß%žAŽ(÷™p{ó]P¾{÷õo¿1T§ÎYøûÜ”ù°‰A½£7iž?1çÜ”}|<<=}z‹K£½qwÔlUýûOŸñG'¾xþnc\éîý§ü“<î¾~þô¤ß•;F•FûüÙgA»Å'˜lú˜w¿<3”d¬pÕ㇟~ïSþòËÏà T×ݧÏ_…_ÜvݯíÜdc³Íû¯Ÿ>ïźâËã#ý”ô ‰âý¼®1Ó›Ï?þeBJ}é·cœñÃvÂHHÿá=ScOÿ§ÿ÷éŸi"¥¦»O?ÿ^f±ÆÝãWùï±ö^ÜÇùü¥s@ÚßÖ1awmjôÿßòpTó:_×°ÿ¥áRö‚mÍóŽŸyóºy¥Ñ¶ 'm-{÷E÷}“ôñý‡ßÿ)hœć‹Dÿá£ü÷Ö‡?üð"]µÝAé=?~ÝÜ%R¿N©G_çzH}”Ðc5(@i*oüéoøUÖÝO¢S·ºzøùçÏŸ˜¿[»%‰Òú~÷áùá+4–ªqVÏýî÷ŸÞ« ‡Bc!àOç7„Àþ)ˆ±üß½ý†ra­ž÷:ÑŸg’v¿û÷YÍü¶²A¹q¥eWµ»g,rmEWòÝ'VÜ«ˆÞAfóµ¶Þøðh¿Øj„>ÃwwõñÏ! 7?×øÖšéµþAºûá×=Ìï~!*’Ž@¹ûºÑéÿ¸¹å%{Ý#Âbü²7»°ñÙþ¤?W–ôs±²ÜÛt÷é—¯O?ÿÍGÞñ}¢‹Ûñô¸ÿ$ƒ¼I(õõ‘öûÛÛ…¾u×8¶ëïþþ—§¯x¯6Î}ø—ÜlÝ7ع?­LÝ»ß=|ùð~/íOoñ’[ß꯷7Q™æøO¿q㪜L*«"Ú¿Éûé#Ì*ýµ÷æÓ/tÿ¦¿ÀþîÃLJÏ [ev´ìÙ¶6ÿðŽ÷&¡ýDѽٺ‹$ öˆ[¶»òüåþí»ÙI î¾ËË&F«è»¹éÜ'ýyçmÂGøãÙ´³Q‘7c/;a×+ÅÁÆIпþ–¯‹¶ðb©ÝGØÕ¶™J÷_¾<~‘¹A ~zÔ?ÜL]ãN…GçÝÃ>~ùúnÿ÷·œÐ]Ëäóºéî§þtæ/P7Ô-q±Â%kü'Ör÷W_¿²^DÅðlu[éÃÖ·hþN$sÛ~!Ø#Y[|°9ËýºBÄ–¶]'zší  ÷|ÚŽÜû¯¢S»Sޱ<§.¢õÀéÒ÷I–"þð3("NÇO?|yþ»o„Cð–påGœ²Ùk¶oëž$®êÕqûüøãçÇ/_~E‘v,û2aüéñóã_³^[KÔʬÖÝ_¡ÚÐÔnËÛÏî2úܱ/úIÞ|øMŽÂUKpÿ˜xŒvõ-†Úúìñëù¿7ù?~K‰àÞ¹O×ÿ>ѯ¶4#þIÿµEgÿÓçˆIöŒYƒcæ)Glùîùáó⢿å¯>™å×,ï‡/²Ý[+>?ÄãɇçÿíñÕßéÿ›Fi^Jj"}—f– :F ÂØä…´"’¨Î‚tœ,€© ³á›Ÿ ³Ø‚†V 0Æ~H1¼¼/^5õ ,AÝ> stream xœ¥YMsÜÆ½óäâÖ]Ϧ¸æ{ Ä®Š;¥”•Š::puwAáKXQô!¿=Ý=3Àì’JIé .0ÓÓÝóúÍëÁ¯‹<ã‹ÿ…ÿ7ÍY¾¸9ûõŒÓÓEøoÓ,^\œ={ËE‘F˜ÅÅõ™ŸÁ\¨Ìåbas“qQ,.š³Kö»åŠ¿»øÌ2™£3­Ð0ïb{ÆV]uÓU}_ïÛçË‹ãé`óBƾdõp»\))²¢0¬lËÝþæPá™å¹aÍ~¹6+œcÛú«M9€Ù>¼æŠí¯Ãd'Øp[EK›œˆ¶ŠÑV‘+¶­vÓ̲9«Ç œ,b¼+%2 .,V¼×Êzß7»VN-W’,H¶)û*[†Ê­®Ù}¹Y XÝIÁî–Ö2àÆ6³œæ+ÃrÏ-ŸY Ìq¥Š0ê<ŒHíØÌ€è6ø¼fëåç,]‚©•Îsp[°‡%aÂ: p·­úú¦…¤åðNÒ6&Ð`Ǩ¯&LÅ @n¤K {‚›toõP¿'¤µdA ö¬wFrñ>¬! ¶Ä]í_+ 6¤¯Õç ×;šÆ ÊeW…°ÒWÃPm£Iˆµ?k«‡8޳m9”Á –š¸ïªm½IÌÈÄKÉý¡ÜSà®æ¤~1ヹJ‚¡Ú§ç*”/ýÐ̧ÄXΪͰïž5åÐÕÂ{›<š¨Øõ¾k(»¹°ì°ÃPq 5f½piEà;Â4>5us¿«š* WP0Õòí$@ÁìÏý¢‘1ÄêB%׬ÄýΪbºU%x‡? Ôr¨Öoûø£`ý,Òõ(TŠ•»0êúÐnˆú¸JãÞ„°!‹U{S·U0…ùèêŠ-'ËVZå(÷qà¶oÊnˆkAr¯< 8Û†É÷H†#”}à#AC·Õ__ÈÙe ÛøC¾n÷}ÕŽ‘9@RÝÞÄézŠt[ÝOX¯ÚmØ#È:ýGìh Y…è]b‰áXW—P5¾š…˜ûÜ;Bä€`˹Õìq tîÄQÍ%©Z³Hfé¡)4Ðpr —G\GÂVˆúÖ°,‰h½|¾\©i«êëYë厭 È·±^dš 7Z¯û¸€ñi?üPTç£ 8¿……E‹3)ç2°@D˜„ó Öéì¢ ‡"àqÛišxr< <õщ^ãsDPðóÂ(Öá.V‚§¾ [uF=ÖC°x¯Ûr¬þÀ!š6p´ØGO¨À$C»ÁŸÝ°Öc°k± ‡jÕdCÙß}DÝè"S°SAÞ¤(Œ>” l(@XãÆÝOJ´Ñ JrúÑ\íŒrçx‚€3 Cts½ßíB`ƪô;n¸¶ ©$ÆÃ~:ö÷ñµ=fñÏ$\™+:”¦)Õ4e[5@ËCWUp­Ð©kÄ$à ²ËÀôJc×${°ßhI ÀQO¢ùdõçU°$!Œm ¹€Q°Ay¦,?©ù ÅIAz¾0Xâ–möÍ}ÙÕ=‰ic ‘^9JÌI¥ÓiGâàH“ Ü3x4ôaP> 0ÜÝéFw´fþ %ç æ3iIdë`ù}ó#-AÙùö49Vfb¬M–ùôi·ðV<¤ã$çt€ôK/¡Z8§ C>t| ‹\»$s—Ig‹ÉJšÐ¹QK÷ž{qR!&TÒ'’ã§»Øaž»AOÃ@Ø… $^¡ËC)ÖÄç'~yù*ÎwìM5I›}_Ã!ò^Jz1C$ó:öPH”sªð§ªìZÐ9åá/àÞtO}x(¼Þ/#wñiñp'n §óg?ŸùØAB_tŸè~åL÷+ ›)¨¡¬“¾û}±+ï¼R²ª¢3Šý>8À!ìªû-uá —• G…Vé²¾éùÿ#’PbE~ã°¿‰õ/µè\æ„<ÎÑz‰)O¤vâì?Óäì|9òûî)üâ88H6û´Ä“±<Ÿ(!O ß”M‰vcœ?_–æ~óaWÖSäÉZTxËØÄWÃå9Øx–ùdY¬Ã‰ïü•Åt‰wÃÓ¦Q¨¥3t½ä’(ù{ËóùfdŒ_Ph3f. w|ü؆X/ü}ó-]~Ñ‚ ’¿;4-ýfíxzŸ‡vzÃÖMó@EÍ{¦…Á3ÚÙÏTfM~|ÙEf :f¿½©¦[’¶êH;8EGTòÝe•H_y<ú¤ò@lÔ¿y YÀR½/»²©ŒIÑç E?s‘Çsž7)—ì%®h@ws9ƒ›9(î8vuÚ­ÒJÅȧâD&¤ˆ”æa>ó.ϸ<Ê^ ÀÓ Z¨+·5}GȹÿàåÛlø¬° „%DÆAd§­˜ÅïUÕâˆÔ¥ÙÝô™¦k‰I@Ù)±Y<Õ€¨¡o^¬”Δæt‘Â䨻>û/×QW@endstream endobj 127 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2089 >> stream xœ=–{TWÇ'gF+>2'‡úØ©´>VÑõÑãiZ#• "¬DP!‚óoBK­@ü¢»L ~Þ<Ô]Ö,×Ê­òÆìS:C¸³ÊQëÔØJ™J¼È”eÒ·(Ÿ€¡üÏã:S›S,)\F£±,¡[%i¸5ŒïêlémT5–:´• —›R¢hêÑgÉÇÎ~?ÞyÅÆÔPkò»iü ‚{y½((=çBT(T (†}ÍÅÝj²?ÞÞáþžiÞ#ß“ÄÄ¥æ‰@± Ñ©ƒ;ÿ"½ ×áÖÑî«f«­ìd ')­Â¥`,Q+ó?KËÛÁÕh÷oÖþò;—btÚ4þêËÍöù±—‡BÏø*ÎöM„ Å¿¼¸K”¤é³è}¯ ,‡RCÙ¹UÍŒ¯—H:™ØŸˆ¦³‹P×f² ­¸8]¶ê:Æ÷˜hò˜Ý.§jh/ëNp]@º»uRæÅZâÅb_Y¡‰svo—8Š;†Ö¾bP Z‚ò…hwUÛœÏÆÙUÛcK÷îbv¤æm†H’ø+Ñ{wþæ=F›ü«"ÕŠ"C¡LIçîN,N∵³)ß[Ô§/yŒ¯ó‡‡7ÜQŸXi£ä&9ùšzlr7Zñðv*u[¤‚¯ž¡ Ï,ßSOÐäZaÆ¡…°ƒœÂŠ~þ-m@²³šÞË“ V‚Ò¨„½&I'¸ÁajrXìà¬pèZ+Ûµm²€°Äï-sçu0ÎvJ¯Õ¬‚ƲæÖúÆ–#3¬LÈUæÈ´z­Fj8dS›I Õe§UgÌY=_üN–+«5Ÿ1ël®H :(ÕÈ´ ½ä°§.óh© ÚˆÓ½ ƒôü øÑ`%µÄºYläÒº‡èsè¹ÐMœ®º|ÿºëÓÈxùöð¼¤R#™•’ÜÌÙ{OÓV¼Çzä(ô}9ž]ôoS3RÁ)´a¾ÖSþôÛ9Ž·6ër˜W^s\ãM0,ÿÿ7ž{Åço ðl·`|E#ìíq_¬pƒøûüõôEãÔ@³Q&gØ{¯û|aì‚úsúˆxz¿ófSŸNÖHçªË$ !©ÛY’ã'[»»G"{c,´_ìÊ"9Kúw;% Myµ¨E¸ ]xHL£§Ö¬Ÿ:Ãþj&%»endstream endobj 128 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6071 >> stream xœÅY tTUš~1äÖeiŠ‚ ÷[Û\X”E@ÙA@°d#!I¥ö½Rû­}ß·TÂVƒhCá J#(*N·¨Ó.ÓŽí-æÑgæVšž™>§çœ9g¨$oùßÿÿ·¼Qýî¢ŠŠŠú¯˜·bå£'N,ü£LÄJó?Þ\T‚£Aý<£ÇćáÊ¡?ßÝ5„*ü™Ô°£±©yžà¹¡H¼I²YºEV¹dëÒmUÕ5+ë¶¿X?ùÉ{=uúÓÏŒ ü¡õ?2a⣓Tµ”ZF-§^ VP㨕Ôxjõ"µšZCͦ^¢ž¥ÖRs¨rj.5šGM¤ž£ž§æS ¨Ç©EÔbêIj Å£øT1Õ*£ÆP4ÅPý©ÔtêWÔ3Ô`ênj5”F §6R“ÉãPêhÑТ½w=u×Ùâ—úì÷^I€ À™Åù#ü´¿|Àcb™ï úñW®Áew?x÷µ!Û‡ü2ôíac†½Â=:üùá'yO˜?":ŒlùÿPé˜Ò¥ïŽê?ê‰QhÔÏe‚Ñ¥£7ýÇ]ƒ¾ wÿ´åðû¹",Ï—òl.› ¹,q•O‚à G­¶VErmKsÊcŽ€5l®d+ÕIH“aðL…âQMXÌ”çAïA÷A_?|õÍK×t€IU­Ø$2‰![fAéj Qª¤Ò ºÁƒtÔ—Ž+ƒRf°§\)G*‰ð]»Ü»ÑnèæXÞ)ß¿­F$›`OµÆþ8WôÕ‡xçÅbü»ü"ËSÝÏFìp8ùôCx ˜ü D˜¯¿ô#{7ý»š§Ÿ²ìžñΨùÝùß_øá;Æ Þi=¼ -C&«™z¯Í6åð„¾”+ʯû°+òcxÈa³#ŒiCÍôÐ"׊ C’tbÒ­Ž¿ß‰}¸roÕ~Ù1TŠGýÄÜË¿a©µ›¤M[ÜÂ>Èk}ìùß<@ZÜxþ$ãâØ3îŒ#í¡¨+˜é:ûÊ[^Ø¿h¹ÑjB&æ¿–véÃâüˆ“¼vkÉR RjÄ’páþ @"‰ÅÕ1³Ø£Ž˜#úEÞη¹ …‘SQgi©1Òµ‡*å²£Ùv(;ò‘óà~]¯¥;3ûpÏXi®ÖÔ¯[»y9‚3kÞ?ûíµ|wϲ•fd¶š™>ÀlåAnåùaÜ>€ÿ³\‚mRŒf$2­¨{¾Ÿvãå1 ]b¸ò "EsS.RR^ ¸‘¨5Z¶+åîˆËƒB†ëÒ·Z e“c§{§k§k·sO`çÞO®œÿ=‚ E*›LeiçÔ|OÓ‚u6зá[[ø*«ò'xSo)KÖ±Z)“T ? ÚÂÁA˜˜yìǵ%x°ê̤ƒ‚¤)ru´;i×nÏnÇnhç$”aµ©V@³£zFÀ,Ò ‘¶$ ±ÝŽTÊIûÞ°ùä/MÉ<"úÖ½@nÑ*¶Ä Iº£Kð<ÜÿfInšÃªó#¹1™Cf—ÙdÎfù"ö®qx¢ñM ³×pJjà¼ûUë×nNžRÓÜííz[ª¦¬Y„f‰j:ߪ3i‘Ê¢êHÆ›Šû{«×žÇæð±œM<쳋#¹ßä‡äIܲŽLнÏI§ðÝöˆ#b”F@:âO§d^1ÃÒÛô •JIKaCÙÜ5“g"ød¦®ââxôOg™x åI¡J-s—C¬d€M¤¹ûl)Æü*å^‰É ¡b»\Gs¿Ù]¿5ZQÆÎËjØØâ“ì|óªã@|o;ükÏgàŠ¾[<ŒÛˆkò€§zV3[ý¬EmV[ÕPÙufÛÅO¾{ý‚!sBæ’£(ºUÚ ¹ë¥5²ê²úºÀÁ¨+n0ÁÂù/Aü ˆ§ U@İS+íÈ836¯Ã‹|°t¤©U.0ÓêÙæV«E‡JŸB‚siXfúJZ™Ã¹BÉž¯yÞÔ¬oDMpÊeÅ'ö‡“‡hiT§¹Ëº÷âŒFTF,ï]„ƒXÄ·û~ä‡ EPL6È '{²ÈD†fyH™`NÍòÖåÚå†JSmCCCS•j+‚²rtÐc#æ8îèmjJê‘ÒOv/h7õÔåtWy:?‡TÉ>Î^Z/•왈?•Tù›™q€›Ò·Úôe- µˆážnI"tDYe×…'Ç&E&¹×š·¼‰Î ÐÙÄ#_ÿüðuo”ïò §A?p#Õƒ`2H1ÜTBÐÜÓdÝõeã€@¡Šb­}ÕÄEùþd™‡ŸE¸¨'¡{œ÷ { Œ³=nJtÄm‡bthgâpÛ«Þ#¥69ÚàZë­(uÕ…ôt¹w|Úq5{uç§Ï#èq´{3ÐÅIª-4;ˆå:¡,¤J1§ÁX¶ Ó3+5µÙwU¢-¨µQ°Z^%ܦ­²â.þ9 5GÔaÃÞ$j…\Ñ%n_9Wt™‘ñ1{ v‚TÌÓFt@Ȱ°bâ8ÉÞ!3ýVs‰˜+O获à¥lÉi¸øƒ÷:Þ8Êx|Þ ›fIÊÝRÔˆÌR•¢©n‹l3‚Ï.8õ%ƒwžã±úàòÙ|ëpå‚V+¡¤Þ;ksyo®è‹KøÝKÅøã›Å<•E…TVvŒTÅÒb–£š)›ŽfÀVj´U'Zv×v™ +œÐGΗ¯ì}/øeðràýÐûètÌôúÂx­¯ Õ ø=;gœ¬xT2YW©[ÛXßXWn&OÿÒBû§'½'<'˜ð5|¢kçÎ×l‡ hŸ5%ŠÊíH‚ 6Ï|{ˆÄ-à¥â"l&°/XçÉ}¬E¥’(½Ê˜…Á¯t KhÍÌxð:Εà –rgS Ÿ€aWe}©ø+ÇÂGÉy1qÛÎX¦£ë9{ÓÒù;VÕÓª³ÀØdñÍ: q-ð T“8Pe=î÷ò“4‡g½uæ^wÎ%–Ÿ~v$÷;\|œ·Z¼a!zê9þß`^ýeô Âôq?æ@ Ù_…gÑOc´lPdÚH"æ yÃŒ d$ô»‰ïýúóïbæ¸!ÊXf‰U6ŽÓ¿ß—²9Ä1UÁL"•~yk¨¾z›V¶ƒ1YÌf‚Òµ_ç„ÜŸe^©Sbƒðà#O—¸Äc7Ù «¢V¥Â ÖËMR$E—Ä/‡ø=¶o.; nVù ïÞm´ÖÛDa´ϾÎh&ØfË"ø18ñµeSŽYžayùò…VÔì;JÜ}öÔÙ·|µ³z sGF.èI·–Öëÿ);ÉF:b)<ü¶&é‚ œ¤BC“" ¼ƒ? –æ‹SD_®ã7zñÄŽb­\A%Hú$#$!ü–V½é™)À±Ë½Ëµëe¬àÛ<„H=Xé!^@­5Ë¡‘CÄ:ãõ‘u¦q)6òšÍb¡¥š9Í)Q¶#›êpÐ.÷B»(ÕLs¯›-VsY_=yOXÒÍPËH h ]\L¼ᓨ6ÍàÅ Å{£Úµ¼²l’¾ZrOÔz—3‘qmòvÙývŸÍ_êàD5D­n È 7‹5sùV½…¸šÞÚ½Ñ6;9ÁwÆíñ¿Ù²AåÅá.’oðúÞ‘.ëŽÓâP¡˜É 랉„YŸ޸ü ;¡DD2­P2Æ<´‰¡qpÒ-N-R¤êÔ‡_mxµù¸¥€n'!*›Óíp8Úü¶d6Ù–HÆø—ËéEp?y EdŠØJ’‘(ÚšföYÙ%ì‘ÏC_„?/ ]±‰‰–:9$ ™^y÷}Á7-R•RF’Fâö´óJ‚Í{„½¯gÌUÔÌàŸAª€N-A'‰µ=Ês%¿¹@0+t(hv P´Xëe~bŠ{.åÏt÷hê6’øzO%çæbì¬ ;3ÊβK¢=¦Àäy3öòiÌ;ƒ‡¿yöàëi Â~{¶K]RúaÀ=h0Ø eb©Z,yãæÜn›»Œ}òÖèÚI%_rªótIdÂê €é=…JÌp/ò}‡Ô :³²JÏæÇñv6#9½ H¥:‘8BÖ1ÜýÖ¡©[©$AMšù¬ ă–¥"¡;¹`ªÛöô"ü?x1‰ûß@0á’(Äf ÙT{¿©ÙÐdi.½#ØÊ‚$Û} RÄ#“*[9ø?¸BÚ•q¦S˜Ëw‘´‡vA®ë¸èÔòU- ´êPCx;ªF;’^øZ߯T.?†ˆÄŸð3<)K•Ì,¼î6¶™²GŽÚÑj2²Gê Y œž½®Nèu:¼ôrßîþ ™i€ݤ#ñ*NòíWE`RAp\¶"¨¥qÝ2~ùÒò¥kÉ7_"h£*Ø”µFéý9îÄ?®`o˜ô2KIOIQþ·¹h!?âÍ…Õv,[Æög—í`»S¾4(OZ¡ìC¶½îöýø%ÌÃÏã1¸8 Ifæ¨Â£@ýˆ0 ò´º 7åÔ둾l“iI aŒ’ÐÙ‰œ{[oðŸDø5Ò„ôÍ»x«®…¦ ÓãfÓk«¥ðNCêSdé]ÇËÎtøà™à»×îµyÉJf$1!gŽXct-'§l4U7’“ /„¤>eá$û!WÆÔÿŠï¨ïebÏÈyž¾ózîá©9<1‡§‘Tùå¼ @HX¬ðz‹dxäbr`;±di QLÛΚJe¥v‹Å@ØÉ`“FÕ”E„kSÞ¬§Í›õî" ßYˆÊ?ý½#“þý±ÏˆF~Jˆ‰ Q13Àq<±ä]m"O ³ ؃®3èN¹Óž”#à.Ø¥¸,¨ë"­©2H b£¤Ô¤6ª-*ÈrþwÇcÀÖóˆ,Üs@ Û¿¤¯!ùÙ¤a<¬_Î?Ë«5í¨GÕ°®]ÐI{@§«½ÝÑI†ÜÙ”­£—³¿åÕ™P ùÿ¦½´ìq¶·;÷ëll¯ëmñ5Q^C®¸¿Å‹y£þ¨'ñx~ÌßîCiUFe*¥V¯¡UÏ©œ³eNeéÌMj½Ro‘—´5feŸ¬¼öâ'+üú 6„`,‰Æe>¥Ÿ™{|þ‰çŽË’€<܇¤u„PðC„N*ðÐüžÜܪBJH¦™q»ˆ#¢ñ¿ƒd›ŽhÂ2;s‹ˆaCf+lfì9VeäX"3äpwf‰]ö%#šÈß}çcågãO¦—iäºWñ_ؾ|ûŠZ™®Y‡Z <¬ˆÑN´|~†ïãoãþï…/龑ÅVž¢ÂÄ]¦iL¬F0·Fäv† îL½ÎÖZf(¼zíƒ%!´¡äQÛð°`5—ˆn’¾ä‘IKÌZ¡N1ž}x,û0ù9ï^¾¤^\/©×*dR$#+$§}™h€Æ#ðj<—þ>vï Gã(F°–Ñwf³o>¯_,Æ¿lÆÎ–Lfˆ½N8ÌŽÅ\ÚðìØ°á±ð«­x;’þõðô,\Å>z߉3$ÍLiÆý>ÆCÏâù˜cœàóv¶ß#yVã¿»¸YÜ=’ßå[xón=QòÛÇp+šÆÝd‘ˆ-R†_ ãp¹¬®²T0#Ý.Øú±ÂoVÈçr~mß+Ù˜:$µJ:º~ceõ*lVÇbæØ!m´àƒö;öG:Ò°ï9±ý> stream xœÝX9oÜFîÙ¤K•‚€‹ÌZzîÃp8@äpÛ 7ZÔ.%3ÙË$ײ\ä·ç½5eYN¨Ðpgæß»æmJ3–Rükÿ¯¶ M¯’· ó¿¦í¿Õ6ýq‘<|¡iê2§¹N—I¸ÁRÆef)O Õã.]l“sòÃl®¸Ê(äÁ\ËÌ9Mª|·ÞoŸÎ(|qiÙWEÝÌ^/~MËœ’&]üž,¾¨.³í~]l"F#k”%óÁ±@ÛS\’ÅÍ¡è÷þé—ÙY C™&ë¼É[ª Ë÷ý¡¦ÊË]]4Ë™—ícU±žÍ•"罊¸S®@‚(öÙ€,ˆT{õÏÏ€Æë–ËÃL íë2-ˆy2› !ȪÚ×5°£U” mH“_7ySîwþ¨¢ÉþרXÒ¼)ÂjÜTÇ¢ßz7ã:sR+RTõ±ŽÇÄøN«*èãE¥@P8™oŽE¸Hn‚TÂ1®IYlÖõ£V¯nNŒ9„Çj¿=›":È’úÝ6úÇÂîîòX{=ÃO‚ló¦*ß÷øQn?` M±$ÁeŠƒÎ L¼?ÏzºÁLSÀe¬8Â+ÏÏFþS=9x™étÎY¦´ÁÖI »ø+™ÇÍ 5ÃÍsÒÊ!¨X/ŽKÆeDôKÕ/uÖk¨L[ÃN°^æúÛf’=Y†t`l=–oÌâSˆþ€èMÜ·û•}žŒHKÝEëd?Ö·—¾ÙÁ’ßaFóedgõTÀÍ;0  øù18Ì«þÊ-ØÅEŒÈ{†a›UÏú4ÿ©( 윖äÿ…|’Ó]Ÿ=>Ь[Ѱ‹º+7wFáX¤Se}: §#ZÞ¡úF¡œ¢õßCÏzZ?-’çAnÒ«:ÁZ™^CGõsM’Í IZ ü¿M¤ «øË&yykß5î bßeyfm*”ÎŒ¡íÚÍ›ýlÎ-†¢!X±óâ=+wË\@5†²¯ïT“çGÜáÔkô j}¾ ßðá—þ’%¢Z÷­C ‰çï±§šA¾ÈzCtZ+î2i‡Z·¿Ü_kA3ZK™ ”~2ƒ¤³Z«|u´ð.´#"òĆÔÁ©™FpKé•‚/À$×ÒoX\î]뿘ä…/ƒº3(˜P®ÏNƒ;v¸àÈ‹¦mpƒ9¤r6=gc†œqvèQ&žåžõ\·î„ýoùá÷.›6‚§#ÍPÅ?‚9ª‘4Jyiü±(J°£ô¢Œ%ÁX¿Ûjì}› ÌÈlh°ÞÇ@£Òd0ƒ €Öþr µc ·0ÖP ¶˜YŒË 3”FûòÇÍæ«‰•äO„cQ]î«m¾[ùCÆ7ÕДO(F ð‘ÀKô¢K• ªÓ‰\/Ó¢=ÆmÏŒhÀ©pí™ñ@4IÔèŒjC|2GYmòº.¿Yµ£…× ¿$Œâôÿ`†ª8lÊù ñ÷Øä‚¢Õ›$ªƒ0wpÍ»Îçi¹Ë7›) š+¡”1E®gÍg`F™ÍÄä¾IÉ«îÿÓÏä3xÎü`d˜gp8Â5*9º×Ìü5cS¿ÚG ót‚“6Ï¢I” ëO…véÝÓr“¢í§NMtÒù•»+ˆMëÉ™”e^}pÞ!$t«¦ C›œtÇ,É»üçQ¼³Jø„tˆ€iO‹ž²û»Ÿå|îðb€iÊL}‘v†K’¶}â:^àæ#EOPõäª*Šm± °üö•@À—‹¨Ì75ÚZ oh>g±¾50¥£%†å®];rÈWÞNœ5Ð𪘌Aè7„$›Š@HR³(Ör–aÐ3_CÙE OsD—£Â;‹¥ÔX–V·?²Lf#á –KŽm³ñÅžx[Ê÷¤&)ÏP“¹sŒ9ù‘~íSX´¥"õq»Í«òCÐŽ¨š§¥gðf`¶ÇMã0,F˜?ÂÚ„üÑf†¯_mèZ¥ `4ýWQ!0K… ­Gïç7€†ºÝ‚¡†õa zß.Ø0¶«:ŠLm9k¢lÔãÞ÷¤¹Šáú`› á(¾AmÊðÉ­•†y§07$¡µÏ‰-z¾Â8‘€be¦øI(©Úu§ç/ ™ÚÏ\ÈI:¨èoFsc-(šÐYn/Ð3›š z÷-º 7¨E-VŽ¡ù^8¦Lû¸©³GŒöë¾|Îô° ù™³‹À¡3›šµqÌ4TL¼BmÊ‹*¯n–!½´ÃêížÝñPÅý´+¡‡kËùô‹¡vŒ0ì:˜ ‚÷a܉iâyò/¨•ûÝendstream endobj 130 0 obj << /Filter /FlateDecode /Length 160 >> stream xœ]O1ƒ0 Üó ÿ Ж ±Ð…¡UÕöÁqPœ(„¡¿/I C‡³t¾;ù,ûá:° Áá‹"Ë:ÐâÖ€#M–E}m1î,Oœ•²¿)ÿþx‚Í@¦ð»šI>ëæ’Wu ¡Ó´x…O$ÚªêZc:A¬ÿ¤=0šÃiº‚³i²ÿPR4•8n®!ÇÜ47I,Óïï|JÁñ]SÂendstream endobj 131 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 184 >> stream xœcd`ab`dddwöõõ441%º0ÿpfíæaîæa™ôÝFè´à þ£ 9ÎÔ‚âÌœü> stream xœ­XÍsÛÆ¿óÔ^zꓲì0ö Øu“2ãvÒ‰ÛÚæ¤Éˆ„$º  çп=¿·»ø 9v§ã Xì¾Ïßû½·¼’˜G ý ·‡EÝ,îÜ­FáÏö}¿Y¼x›&‘m*Òhs½ð'xÄ…ŠM"¢,Ic.l´9,.Ø_–k-tœ$’íò.¿dÿü¥®Š­ÊØZv—oÿ“ßß}u(¯Šj{ûÕårù~ó÷ç±Õ*‹6?.6:óõ×ô¬p8eí]¹ïzY^Cøh8ÛW]=¾å£„®É÷Uxå’åÕ2Áqe¥`»É®¢íÆãmÑ9»ÎŒÙW»âçåZ¤B±o×£aüeÕÔÁÛ©K©œqé˵Ê|<¦B:²ËdÚ°æXm/YYT7Ýí¥Wz¹|!3VäC0‘„ÒÒTA›îÊ"\õë°'ž‘FqXsº[åq}ØwÁß‹AíÊE6“F²µ|ÿŒTʆ›(‡?%y=ІÀ!>ó’Oòpc¥íL¬°#>Ô»¢|&V‡Kö“ê÷§ì¿ãÑx5î0Ⱦ{êc¿ñØÖSxMöò$Yr< aX2|“=ˆ¹O¤,Áÿ Ÿ‚Aóg¼kŠÝ| éË~‹@q˜èv™'«x ŒÝÅêÙt¶÷ „l›ºmïšzç\Q™d—ý} ÖO°u±rri™½…mÔÈo¹ž0Êâ$,ÖÃÇ!";ðÑ_½Ã.c(®U~Bk½ÌOP“W»úК²¿Ö ÑÆ§Ñ\Of-gßHž„ÄËö'ÐãfzÈÌýÍÀo>c0g ;‹ÞèÕS>  *ÿ'X êû`üŸQ¡> bÍYXhn3Ú,Þ@…È¢›vAŠ£GôÆ¿-¸Ânm"%3+J'fºR.Þ=ÛA¹œé B$±Jx¤•§o ¯÷UŒÔpk)5œHCJéêþÍ‘¾ˆõÏÙkä ̽³×ø‹E¥+Üb8.›x6gÇsôø)4"¦¦~†•/ö“[@&Oc÷…ül¼~‘À%ëe0ÖP‡¥7ÔHAoeIá7Òúšãƒá®Ê8Ã6ž*÷¦bás¹V"ˆÎ8!ÚÒ*@Ì×°3€´[Ò§H¦Nµ ³ OœhA»û–¦N;•+´ÃΧáLD£§á +_Î0xI8 tæã¹YB‰,¿* Ø–W4â%† žžÿµD)(îÀCš`ÎX×usÈ«­;IØAX®çª'Y ÐÀ*3×%Q–¨´3c Ò“Â>D<ì9‘ÿ…éeò߬PÔ™5–ûdspÔ²¦¸ÁÁv_WýÆö¶¦7Òð§ë£bP=(‡!y,Œ<ƒÈ»{N®RÁ޹ã+WGƲWMS7«P–5šÐ3Qr+0¡nóf´­‡§Ccù·ƒåœ¡$ŒÆpp¸ƒZ Ýœu·îE9Ý.@ºfô„˰º?¬Ø¡ènÉMCCDÆví(æjØÿ1ˆ“z”M!jB€¬°.@^¨€Pâ¿ ÃÐý”,B<ú­Å#èK!è¯ß½º\þ¡-й9ø ¨C¡øëˆI’Ìð¨yþ.2_¥@%!‰0Ü]„ Pý¥âÒô2•ÀÖçßÓh -‹7†Û@P*bIý÷·ëáqôƒMíÕ–Ð6!mc£Rþ9J‡}h›Ewlª6¨%ðôZ ÅZ8$—4MUÚ’K¾×¸«6 2Úµc÷ ê+ó¶ ç‘©ßÍö4 ®ÍylOKWǘ$cO­SŠØãí~»ìéñv´”Û•{Sp+Þ–Ç]Ñ;h=Sz¼Äò ïÄeY;†5©aûꦯf醪´žªìèEg¬k_Îv?½þ¸`ï~Z~Cà9q³¢6}½œ ‚”íô‚ò®Ùÿ8 mÌs“Ÿ[à™³»»±ôë¦éÁù–"“ÀfÝ´#\×Çj÷ç9/0!aÆã4oc£[æWEÙ†‚˜ZŒÞ+‡šù —ýÀÂÉ ícMÙŽ-z_Äæ¶ÿ£'ß>˜8ëá讘w£ž–™=qÅÿºòÔ$—5õ®„Ÿid–º¦ì’CÏ}ó—~ÖA¦\~ˆº1“úü„}3ù¡Cè×Î÷|&m_Ý»þU…¤“ç:÷¶>–»ñóÕ8ô2¬bÇÖMCΈ”ñ ÝI¥Éè†çº¤+K7«ÏJæ,“)kßórÿË&ÿ“LÞö÷ãêµO`zgï£Kx-îóì®Áq(cS’M>¸`?\Ó|L£K?qÝNçäÚ]ØÝ«õ•“¡@ÚõÄþ3œ)îX]9h³ 6cÔåý3zJ5!È‘O¯F/‹ÝjœÖzBzB ?+?Í }¡Ä´õ=&ðȵžà·•åø¶…/°¨s¿ âÁÚúГ">ͳ³É9ßíö¼ìË s—‹!±(¸vÛ쯅ˆI,S JÓUR¹ÆÏô0è½Yü ɘbÚendstream endobj 133 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1849 >> stream xœ”{PSWÇoä\-UKšjkç^V;³­íZÛ]«®ÝÕ>¤¾‚ÚUDå¡#D$!$Ìë&÷w“0$¼”Ü‚D©ŠhqÇgÑU*ÝÒ]g·»u¶ÝºÚé s™qoª;³ûßž?ÎÌyÌœß÷{>¿¯„HL $ɤ¬´Ìô·¼½0¾x åø‰ñµI,…äDßËèVÊ?ŸûlêÙiD|LUí.-+¯ÐlÝž^°cgVAdk‰uD‘M(‰â}âCb.‘F¬ V¿"ÓÄD”c’Éï¶&tK_‘Ž$.K¼Ÿ´)i@ö¼ì"ZõX²è9bJ¬•ãcót’£x{TÚ[ªPQ¤Õ¼ºÑN“5›Rã ^ÊËÜiœ…>ë|øýÕ`Æ:J°£»%”9~n©Ë܇hlDÐÍ/ëRÈ÷çÃ:X³¹&‹ý׌)%w.MÚXŽN¹Š?¹&ÅêX†BH6~ô3H%gÝ&á™ø™/pNøñ}œ,¼@ 3P€ŸõƒþÐø·?zCž ŒÁÉ%þ¥\§=d3« sËKŠ* a1äݵ “ΜRT%(ùØCä‡(\“öÄeÐ&—žÈE`å,`! K*Ê/¿JVZáŽÐVîÛÖ僧ÖZ  ªöZd8lí. íÊØ”©ÜC³Xš$¾Pf{êìÁ£ØtT:rJá’µªêrøÏŸ<úT{RÙIg䕦¯É:48Èž½®zöWç£ð ÈãôÚÍÆ “•Ru”6¹r¾æõœ“§tdÏaGÄ|¬¶g§+dÐ*å®Õ ‹oÞ¨¢œ®öj -,c£…EˆqÙêüí ^ªÎWçsûÉ'ÚÅÊæñ8]'y•b*6¡X[%¼<›2ÜôªY#T“¿9’þåÝæ±+Ô§â–‘3‚ÌÛ³hmVõµ(…»p*/¤ât$~i@›{ЫKù Õéò»Oaí Ön4°0ÒÂÛЏíáݼª¯ö ÂÅÞ¦ Ü/fàÉþ$ÞˆvéüÇiùIü ɵIò»ê:Ü\›O «Ð{Eóç«U¾ýª¦• Ã~R¼v=ÁÖÚûµ.åÔeœÍÁoL—?ø¯ž>éòæ‘e+è"4‰îãŵz[9hI ò®ü\=Q¸ÜÙ }2ù7=uáÞnò¶~#*¶o)¤ä a¯CÐÅvûû³:³EÖ”¥š,»É.ª"Ë‚†}í"톎±³,Cá]¡x ÁÊméivÒ&ËÍ;ræ!F}¥ÅŠ÷1Oqè»,†)ÎÎ1@j««tÚfc[[s0<ònÿr!åáÙTaÒØ,<íú¥+-´ËÍÕ‡t#Ã9jÀhvPšõ[ò³@Ú:ýÞ•=ú¯à&9Êãçq™Un‹È·tót‘ïñgx ¦ÛbŽËÒKãâ§ð¨‹g ,¬¬ô„­)0mA_Σ ªýQš‹ );²Û”/O‰Ç…¤àöNAÚîÁÁ‡-ðœóøõ‹xޝ¥¾ÕÓÊM|=#¿Ä™OÕª±š/“—yŽú8?x阉=â?y¬Ë}ˆjã…—dÕN}ž7q^(Ž}emªv%ý·%‘ë¸fHú®Uàˆ¬Þãjøný·B’0y©8IfÝS(ÙFûeÃpãdÛ-Î͹E;ên¦†)5Ù(M^îΠ‚]š¶Ì~û]&¹Â5zì‚;Ðõ[o†ËÄÖ¸LäO¡:Õ³¸@ Õ¸%õ(v|LOA:Ǧÿ+Tõ² и˜0‰-hL}õçéÙÛÖé)㸦Ä;qß'ëÚÅ~8;]>Üo†&t¨Å¼•Žþ'[ÒøjTQ!&sìY´«ÆT UdeHÓnnk§äÝ-†ýå”|Øá`3ź<£Oë`u)çúñþéòȃ«Š奲Û@bbô›G#ùw–¢WXÌdöñÚÐGŽ^ùáê/Sgf @z=nË‘¼¹ŽõØLU9êjJ9§ZÞ¹væ+ ß=çJÆ}5=¦ù»ãÐlwµÛ(BnEö÷Ö§½+࣮ü šÈžnÇ1Öï™â c³Ò‚ É‹»ÇIºe Ðm ‹“-$¼Úñù¼$79„ŽC°6Px[Ø0*䈳·Æ£n† xÞÀ5¼ä^r /m;ßtã yá5 À^Gƒ¥Þ<ÃkqYÁµUØfƼãÍsw¼©ÍÖ(uÙý†ó•¿Ã)ðÔ\œâðÛ ä{¸}¦P“¥K’'Ä¿˜ž¹`endstream endobj 134 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4311 >> stream xœ¥W pSW–ýBXÿ†5‚~èî !Ân0cÀÆû&/Ú,K²­]úOÖjK–%[’åE^±Í„%B ÁI!IO:KCözr?Oežl’Ꙛꚩ©¯*•ê½ï¹çž{.‹;†`±Xãv¬ßü̳‹EÌ(ù…Cb@,ÄŽu<Ê=:®žòÓ#×&GoOLŠÒ‹³…¹ù)¢,‚ þ´RPP¸¦x­P´^¼A’rP–º9-=#3+{gnþî¥Ï?µè™gçĉ­Ä6b;ñ8±ƒx‚ØI$s‰]Äæ Îίd95‘’[4®g<=Þ4Á;&v_ì…‰[&~3é‡G:'/™¬™2~ÊSîNíû•µv+1é×1w>&Ìa˜Žì‘²ÚZy˜& S=n»ÛZCÛëøªj´v× *85À\aQ¶Æ9EN¡SdÑZÕ@C ö2…|´‡E¤ˆÌe5å5z¿Þ·&¼w­‡ ú:cƒ”ÖT¡ÇmF[Ö£-ÐÅ>éÁWð¹;¡’)xRè^ މޯ™égAâ*L½Ên¬â êÉp‹aÚÉŒ}<~]>~@hí¦áFr ëþýwj¶mã£r2K`Ìç—ŽÜç™{h(%A;Ó&oÞëI‰ E–¤.×–”Ìq“~e}-!ÔÁÈ¢ #¾[§qOÀÒH<ï1ÕÓ âRˆó)bÃ8ûõ/pŸ;Ç.ƒÑt>÷„AÏf¢ÉÈÎcœ z 8¾2-_—_^n¸$7Ïïg3o‚kõWÎÞºÖó|GÁh,\…2ø\ŸDÂHgÂÃ<žQYDÜÔ|M>ÈÔs©´²ÝivÑ’ Š©Õ·£ Ôõñ?ã\×o¯zkc X*€Š©À˜±=Q’,”²~º2 –—lm5&ýAÊÂd(gÊ•[`Ã4y”ƒn8æNÈYé0; X R¡S+€œÕËýM¿¿;+½+-ý@ ß Ö«Ù˜$cÊFåTôÂކ Aö‘¡Wx‚6²³Í©c´£ˆ¤ærøyøf˜SŒP{ mÇhhù”<ó¶á)Z„^±Œ\Q(œÇ_ =œ·³™)ÎÛà}¦go[ªu‡=‘ rŠ wK=›Pû#ë¤S;N@Y@rr÷+ØYÌ+hÈqfjõ²‚ù´šìF/AÕ=AmÞQ°ë‰âÛ¥|“ÝhÓJmÒji´ÔWj>Ï1àâ›9“Mc×7KÛ5€ºy§û»‹Igvúèš`·©ó—ü}®ä=§6k]|î÷ÎJ»ƒ†SH›Ñ¡+“ µQŽDÁÇ3Ý/ ÃÍÒ©?^á¬À4îGpNd˜÷zâ=Η’ÜOî! (%PRÏœÜuw°î³Ëücb(Ì   ’5Ë·ÄW ôóaÎ&¹}a4n&jR |/ »G¡>}C½‚÷¯È “åk]hï¥ažF2¥È´väf.™“gíq˜íÀNG|do«½Õíf0anà6»ÕѬO£‘†:òHGåq~]˜3Úœ*œk§wÈMãŽ(R y4`ØJ£$ä™s½…MΕõ³àbWí9ó³q®Ž —‚ëq ʯ$š]øÉÎVÓ^%’ú×ÒÖ¼ 2@f•ꌩøA=…¼Œ!`¥M¾“ìÂÉÓÃÝdzž.·¬l4ïÎßóÎηõÒ‘n²§ÓÚÉo‰|l á–D†Yu‘ÕQíµå5¹þ=q=ózæžo­°*<À¬µVWïƒÞ‡ïφºÎ*Œ ÈÓ E@©-Ó—Kw‰vI¤»ã *ÊT¡s˜ªÎ’“q……ïFpRVÒ ã$¨09ª©ª€"yÜ?õØ0åê­à4nÿÏ‘±¼PÏqÿq@?¹}'@ÎAk Ð,ZEV­»[tœWÛkÏQfW‘déÌ><ó(è8d=B}(N$úäœ\í±F®•‹i¸Ìnࢾ^ræ…ËÓ_ÔËC!_}Cc‰Wȇ©ˆË{Šäö¯Ù¿qMúžÖ·¾øË‰»n¾§ÒÃx,%wÖÓèˆ*ôaÅ¢üPD†¿'•n_èdgroîE,®ãÿþ#|R‹!¨76 “E4oÆ@ï0Ã3ìÕ' 2ŠrÒ$©à H »´n x)¸<*²5Xd/]?yŽ ¡IË´@Ãhù£µAü‘;¿ç°g4Äw/ì~ºw¡Uc/Ãjo Å¥¹*iÉ~Á¶x U²*ÊŒ#óz@ (|bq©BôF_á‘ÝÿnÐ×x¹öÖmóô‘60A¥¹H+dÚ’ÜU…+óV¥~ž÷UÚ&‡>Z±¦Ns€?R­Ãÿíç…AV× þG„âÉl”HV"*®/ 6ø½Íwži߈&.E1ˆBägx N¾vñÐ@˜®´[«FÊÏX • @aT•ìÚ“¾P³³à„»wš Ž=Ü'ËÑ“þÁ3‡‡b0¯éPÄûûxô²£Õñ°¯ÕäÆ]ÚLíœ0¹/±êm޵•:Kí¥Ó/ ¹çГçÑÜh\œµÌ*­5bªÔkƒÕ_‹Œ«éªéòt¹;â,‹£Òi>4æ¨EIÏÓçEµB…öý+9XB–¥ÉèàpOŒ‰ƒFz¤ÇJ-1ÿ0]ƒÐw… ÏDƒw“‡C&¬¹dnorh Ð¤³Qìì[h<üÃÙž†Gèu$œµ@Næç[úè"Ñ_†¯–VÝ8éwíA@º‘º©Œ)e4zŒiâʤë¨ù&ÌS õ{žñ³ÂCÕö/QÓÔË©¶Y«Üù-bŸC,D-î{öóLºAÕ¨m”…ó!¸q×úkîQ.t–‡da£ÅhÁ`™IGðmo0j€QëµjPA)j*<Þj·—o# RŸd‚ýra…LÃJžêh¯ñؼæZ@½Ïyü»Þõ*¥%_-غ~Aó*>÷—³p˜WC¾é¸ûÉ»¡M›h^zy›¨ÿ܈óúÍ÷F)‰+0[ß(ÅêÈö&l}‡ÛÈŒ"C_<êmÿÞ·½{ߤš` Øœ!ÛÆÀÛûÈߣ¯› §+ àÙÆº‚ÝE÷¨“jé÷ÿ““J +IAqe'™@&@!¯Ð ãâI¼mmõ¡¶ÄyKŸû•ÑÈgNÚgט|ï6ûÌЋ<Æ9ÖS•7Âím=¬'u‚ã˜r¢Óì`†.«w¦šŸ/€?„·rÒ3­GèH€v÷'®³uðÛ9ÀRgve–%û#Ùˆ?Šªè(K}£>ðF+ £‘/²öÒ˜¡MÇêüX5:v÷îíØmW;ÊpYeRƒ§¹=LîbïÁË9ýý¹ýz‡Á¡³c"\>qîö_£cq€¬õT( 2„â:+/3?;—ŠH°€²^ø/+Ëÿokù_W¶md«9’äÙ+»)]¶•b8Ú-á `3زO?²Þ°ïãØ$CχY¾(<µd„·ÒUf)±É[Ľ…–žE‹O¡¥vYœEeU¹¢ºá··Àñþ‹ÝpÌ!Èî€ì¨ š7‹¸¦À!ìGsO¡'O ù­Eç4á?¸€µÚRm¯‹»·½7_„Û<=u=W£¨%Èö>EºB%ÖÉé7МD4'=‰CU)=Rv8NHƒÒáxƒKëþ?|ÊÜŒ…Ø€±«>Æ­ –ÛSí%wMi*ו+ÒÐÄt4)MÔ+uJ“R[mª5¸áœ}pî^ø„.g²™lÀFµžÍü߉´92Ë»Ù&e}× å·Ùƒp2ÏÎØ0Ѩ¦úú¦Fi½P$–3´Žœkñ0öo?ÃØï6@â1_ÅÁýS*Ê``Z\Xݪƒ«àrcÿ«Ýnª’dl£óO¬”ë 4ùØÒ]É}¥Þ¢7S&Ž ŒžB7HM¥¦ºÆ^ïtòÃíGêÊιg¨E1`%X«]òjV©¦ÔX 6€×ÏÈß}¨‘fIäËíXVʾ7ô/%ᦲµÚúk]v[mTqs<øËƒ7êíIA‰@*ås$Y¯#â šÝd+H‹¤NT%§ÓÞÚ@Ÿ$ã„S.}ëið¼ÁÏà”8§Ëaµ”Í&‹ÀAc*Ø âí¦05ªèU <"À†«êxfÀ—q‡x[jªÁ ÃW¨Ro5Yp¡.W}Coí› êQ½»'_V\\,iòÖ8ª«ù+âšá7K§¾«nac åC x(^Æ ˜O¡Y-(®á»90¾®*Àý¨A~T UXtôÍ™ˆ3<7º}c.ƒwm—-‡mÇ:í]ÁnŒÄ£j”ú4½’+ /î–efä§JÇA±ÕN¸Dýåoà¶ MÁI Ø_Fðä Ðã8^O É´4]š\ae¸wß‹šm9£Ô¼ÓEnvS•JY^¢-ã' ö¨ñðxÜÈelq"wÙw"~žÿrÿé@ýµïù—_õ¦Õôêå‘D(Né4(›ôf=0P‚Ó_Šs4I–½É LØP,çœ>}ê­³ýׯ|s üLÁ©ó>@ÄÜÅ/Î)¨–Òñ;| ÊúwP 5= â¼$²J:õ*ŒÏ<•ÃøiÜw †x™¡ì–¬¦ §Ò£÷ž\£(—ú ^5]§öiêÔ Ae(ÚgBX)ªÈWl+…T¹Sáõx‰L«Tâ;=ÛfLâg»^Gÿ s¿endstream endobj 135 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3299 >> stream xœ¥WipSGº½Þî½ìÄw4&ï^ ËÔ$ã&–„%‹Y `TÀbŒ7¬}µµÚ’úÊZ-Y²dI–m0Ál8,Æ ;CâÈ/„L&LÈLM†GZ©¦j¦e0ëWõþ±]–<™È¾>s˜£×“'÷Œ"J®1Ê8F¡öi£,sÔåâ]Bø: _ú¾0g \†^áP>©Àódc”ûîžQ4ÂL fj¢'A.ýÌf4 ‰¿ÿÛòÄúþìá²å8––Á;ÛGYqƒ_Ä¢¬‘µ¤ASŒcÜÃèâ\’y_V¥W¨ýÚÇ´8¼SWá?ƒA[PØÖäj Uù¥Ã6“pK‚ÏCŸ|ÔÚÔY¿ÐLKkÕ¶ƒB†65ÆO¬ô–¾¹¤x] 7šÌ·áR˜·º?{(‘Ã4ÀÇ÷ ŽT¨êô0Lo¹s¼ø@a„[pÍS‚!Ð3äí´{—÷‚.p\ïÝB3í"'PïvðM!îÑû”½®ª–-múÀù _GcU¿Ydç15wJy†ïÕtŽÈA‘E9E¹é5ó ¼Û¢è£™-z[¨\Xj“©¸90ÙLÀHË|ÕáGsØÅ>„þRN’§ÁçáìÁ ø\òUÁLR\­SȃÆ0÷TM¾|ŽºÑu#\l\åV)µF k¢ e[2§OÃ*Ö8h¦a-œG…ÃŽ–c@cçPÞwänK©Ôš* É’¼<›éMf$ÇPºŠÒáÕ¬²©ÑÕÑz¥Ü³$5›x“P®ÖÉ9¦W2†Ø8VÓÐ9ãÀS>é–yäˆy¸{Ýíßî½Ùö­Ók 'C!†5Qûä,Ók2ÙÌÂgI©Ö —4š›´>ÿ˜Èî„ÊTq—’{Ôô±¯” ¸jê *Ð4…~ñææK=Ç|ñC¬¯³×hP7ÔÐŒBßmkɸÏáê([£Ö£´””Šø(®)ààà" |Ÿe¸é"ò5+*#êh,ŽÕ:[W¢Ù‚9sté+Õ›Šò‚gXžg¿u»÷º:í€~ˆÒpްɳoBû…æÇÛ©f PesSÔç²TçÎýÒ ©ÿúÒ?åÁ1h»ïÉJŠ9¸‘jÛåØÅ2×´Z›^h$e"kÇü¨òUGØ$îL‹ßY8Ðolþª¿÷?ü6üEz®w`øÄï_5³Íü@$O$ÞGÒhÀî¶»›nVeR¥Rºî¸hßOûàä«ml×Ý.˜¶ÿnøä”:/H!%›Buí1¹CÃ=sc³P^¥S`ŒeAc„í ŸFUÝOÃPV x¢aOÈ¿ø&°+oùw“9ò´Ëç3’Dr‚ÀfIÒ¢¨6×·8ØËsø y«µ«7°E[” €FéÃ(N?ˆÛúýþh¤#*—ºŒW(u¬²t§¦¼–Y{öRàâá¡ Ñ3̇A Ĭ1˃¨|ÒHê…M§3†FتfÞ h¥Z/WtaîOd$PßÕxdœ†ä“GqC±¯äñP~xm8ß_4Å%¯—7ƒƒÀyƉ_Š]Ž_rA{#O3åÿZ+i ’Ñ 'Ñx•\¹XM;#ÉSäI—Zuµz›Î«‰™c’+•_Šþ¨íÑðe€. åUz¥¬Ñy´I0?ŒIdw‡Ü'Çáãs yÔö½%¡M€F! Ó.£LHs^²§n¼³—‹ ˜hF#ñëbìU2ôF¢Z’Û˜ “Qƒ¯’}™T©Œr,mBÿ•’/Ï~su’ ë‰ð6¨¶U]D1Ç—oZ¸xþÖLÿ¦­é}ðñDœ× `én›÷¾ßûiÌ@ÿ&ÎA"‘àç\8ö“ÏZûör®zwø­á*·” Õ©$eÅšíàe°ðœa˜æûž¡O?ýOÐ:ôQEG¥w›s;ýˆÄØ$R hI%b¤%”lDÍ4Ê@uŒeŽ»Ý)5K'Ã!|â%p OÂôákç¼”ùë•—gûµv-ÐâÜ‘rLJ¸ôcÜmÀñÔùt>X¶”mzOQQ]ÊF+)úàÒÓý}p[_þ¹ìö>Õ Ì<Ô—ÃÜÑÁ+Ég6RܼÕSèyóV>[á) йºZG ‹Ä ³j«,Tæ5(y+oÅ PS*×ihæ¯:¥XWVö" ûþÍî\¡CìÇ‘“ghÇ£Éñ¹ìsäu[;ç¤xO~ïVî1ìô+¾ïÔµ©Û¹°+T‰ÓI:­v³Æm¬¯õº¹!Ü´{¨xûµTÊ!#–q[Š=ëèÖìrðŽyî\”«—¬~&vÎÈá£ô@$À uêð‰ÓüIp¨ÐU@(°aë;ëlëÁ¶#æÞÙÐÁ©hrV$`y"û›A¸|p9ÌC¹Í7#ÀT`›¡W(‚º(†¸¸k¸D\67 #ËŒœ«€WÉÞã+¸w)¦gm]îwÇu÷G«dÛÂå;Ô¬¥Æb0b±?.²iá.Pà„æÄæ8Í;ÜùQçå'nÏþâI½FJõ:Ü2X*o¡r,§AˆúÌOšÜ—_.Y̼˜€ËäQy6Ôá]Þ‚º”ú]Dã(±N¯Ôz´A+Ï’Ñz¨IW_É= ¿'Û•‹¦’*±­B24{¼Âý@ÆüõMm½[`æàBêLàÖçíù%,ÒRæûlªÐèå²ì:o‘‘ÿ×t-5 »±àý-âR«8 }v‚rµDò ‚ëeÉs‰F Í| ÍçÀéý MŸÞèZGñª —’ŒÙ=M Jû)\õ\Ÿ€Oøý~_J“4œ¯~²ÔÚjði»TÖD¸Ë¬’q(¹_ßntîõÉØ•nYØDÀwaVÓíÉ%‚R‹²ˆhæóÊhÙ¾ý±ö¶VYG1ËtY,6‹ e>tËC&îKÀ}ø‘ÖÝûçS0¯7#9=)( ÄN›È܈˜5ûc”ñç"ÝçBg½{ù½ “þ‹n@øí‚¼?²âÕkÁZ=õg4.8{¶ùàÖív¹—ŽËìQ•¢ZÁе»Œ»0mþj!.¾±ï…YÿØËÁº8 ºD¬Ú³ÐêQ)éï¼–øªek6H^X¶RüuwíƒÃU#îE÷3´´S5¨¢•~]”…ídSC}8pß¹úHf‘ÉÄ…)ïSc£¦§Yí0ëaætÿ3s‹F2w~u²1íÄ ì:ŸÃ\Ö_2h†v] â `Uýª–ü޼%WÁ%Ðw¶³ß¹†KøR0 ÿD„üÑÏÊV~1_È\©Å¾Fˆ~® ØN¾Ùá­óâk ³:Л¯/­Zºs §(.ßQXC›Éi .6|ùˆvý’‰A7 ¦07$µ‚{Kr)1æ-Ü1n ™]#ŸÖˆ³•cºÂUõRöù¤“.Þ“²vú…¸F¢6±Êå;¶é¥¦J'(¡k©:E¨¶1Åúû{B­qz";6÷pü‚ø_(óendstream endobj 136 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 176 >> stream xœcd`ab`ddä v ò5070q$º»äýÔeíæaîæa™ôÝXèà~þ= ÀhÈÀ ÄÀÄÈÈžüŸ±á2ß&K?†ž?ÙÊ¿OºËüéÇ[Ñtö½Ý;'ížÏñg*{wzAgIg^gnw^7Gú÷@öE}¬¯Ùöv°®g¯ì.ïªèæHH¸AþÇiöé­“êªkªšëå¢r›ƒº9øä¸˜§Ùóp20Àã8ýendstream endobj 137 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 406 >> stream xœ‹tþ SFRM0600‹‹ø›ù.‹ ‹ ’øÔÏË asteriskmath1*‡m/ÿ ÿbö²vvù-w÷¥á°÷ øîcÀ‹½’¼ ü‘‹gn‹t‰l‰k‹l‹pcä°»Ž÷Sp½ˆ³w‹sŠv‹_f¯øÇš„’}y~{‚|XkN„P‹ÿböÄvøBw÷´÷«a£d§b¥~“}—{‹x~x{m³…¡±|±}°{Rrû o‹e{˜xž›™—“˜´¥²§µ£‡`†_†`ŠƒŠƒ‹„}“w£›œ–œ‹ˆžˆœ‡®ˆ®‡®µs²o´q˜ƒ™›‹ž˜ž›©c‘u•eše™f›Ä¤÷ §‹±›~žx{}ƒ~bqdoas¹‘ººŒ‘Œ’‹‘‰œz–{‹sƒw}„ŒƒŒƒ`_` –? E]¦iendstream endobj 138 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5391 >> stream xœ•X X“Wºþ“ók]‰Z;ÿßE­µjíX[µ¶u«u_Pµ¶ ʾ‰@ÈÙ““„l@š„%‚ PpA¥U¬Xl]jkuºÛv¬¶ÚÚ™æ0·÷l{§3ϽÏåOxàÉÉY¾ï}¿ï}O5r2jËÊÈõsÌüg ÄÆ;  ÇŒ´>q/ ENüaüÉ TðgöÒôŒå™+²Vx5{µ`pOnl^œhï†}ñ ‰‘I[S·¥-\4ãÉY³ç<=÷™?Ï£¨Ç¨Ô&j*µ™šFm¡ž ¶R3¨(j&M=EÅP˨íÔrjµ‚ÚI½B=M­¤^¥VQ«©yÔj>µžzžÚ@=DM¡FQ£©±Ô8j<5šH…Q0ãâ1#ƨÇÎÛ2îññÏŽo‚rÂÏ·Lt‡-ãMæùy¿LŠŸt™?ƒÿ?aòë“Ãg‡_ù%$v5î—Ð)5”ÁbüaHK3?ô˜8·¹ïj9~çh)5;ŠJÙƒßÙaq|™¸¥ÐPP$÷lˆ° lB›À¨4+ ‚NÖg0ɨޟ ²a–Aì:ÔÕªòå(j Ú²E)+5žl˜5Ù*Ѽx~q-~17:âÀê¸ü(=ö¡<>®¡ñøgÜ/#6ÒÑæ#,R}ÂvmUºS`H‡ûi¹U§ÊÜ"Û³^‘«Â\H/GVns•ÑàqÜ>øl‹ôî5G[vÑ>n:rÆž—{Ókþ… |©½á¼¿Þlæ7¥·I:!}íVÉZúìŽÎ¨ÊÍ•x’ ^¥{?r¼yöxÚ +£08E%% ï®Eo±<ô5°©­ò<ù²[Wl–ç‰Mb#ÙðŠ©‚GY%0%x²¼‚£ÑLétú•õ‚-K¶½uAÊh‹a%ÖÊY<@^ Õt¡©°Än/±˜‹ÅbµXÌÖ ®–®'„C¢ÆÀãCœÛþMèÀ}|#×q %Þ i›Éf &ZôVh¥ùᛌ¬Õr°,[ + „B­ª=$õPÉîpŸ"cÏþxÁù8>}bÇWÉg /ÂóðvÝw]èjï¯9 ÑDxcª‡IV+Ö?0pãñ]‡fCïÀ/âçðÎ9­sû_e/.»³÷¼ïTûú†P¬Icüh½0 …\Mõ¡?5†ón£§ƒüIøéLàý|—paŠ1¥"£&öxþHæºqžmIPb@ ýFÁÒMQÒÞã r¡'ýøI´¹¥À)ðKSK¡mC,sB§Áé–U;X ö¦¨’%b” ö)”è¥NH³´³ÐÞl9èpèa ‰Aç\U*<Õh†ñ\îÀr>¯'¹ ¼ Íû_}sVû¬7gY$fI%¬€‡ÅÙv«ë‡Î[žµ‡Þ†4Ï弞8-g)—çŠ1qÈ7K_-5åгó÷ËÙÛ19Û•yÊ<¹(þÆÞ¯ö~¥µjlÐF›©ºÑPõë$뵜'¹d7î „Þ ´9 n¨… ù7Ây~´•&­´¶hv³ƒáæDPD¯Í™5?vW•ÿã«s7±¼ÔƜڔ´,aÂ37w_ùüë“_;£’‡Ä€V¯¡ƒ±pyd=ÿº¡ÖA zðìþ9w²¼M)u«Îi™Õ2§¨ HâÔÑ&®šÝ– ×[îSէܧHßuZ¾Õî¥u¯º¢","sŽ’a#ù¤¤õǶ;mwmî¶HûñâdP§¯3`–(M~ gKN¤0R«Ð(trZÏU×Hkž¬óäé×YÕÁ°Ô‚•$/›´œg¸»LœdðA§¦<Á¤,"Ö ¤Yrd¯˜<Ê\y®:—Ì¢rº®ìsÙïîïÓZµÁYŠ€Åíƒý~@:‡{¿Kz3:Q\zÊÖõ J¿zÙΓ¢‡â@S™×N:àû‡c^b³ž€WÄá‰ÏßÍì?ÕW{ªŽ5p÷Lê8ñæ¡æLê°_Ÿš¢iu3Aò•è•Òw¦{6jzþ 9ŽÌƦ*W]µ¬:ËÌ x<‹ÿ$àxe[ÔÊ×_óÕ0E’¬N©ûÔL£o¨?:„?óþ\É¥ VëË••r‡¨T|îåˆ²ÜæX SÓòób"w¯Žàf7©Ð®.ç.o÷Û¤@‰Š”%$Ý®ÊJ‡ÓQVJÞV»Ýf)¶+¬…69™Ç¢°+`!”ŠäRýýHs¼Ef8>4CÈ…BƒÈ–k—VIËãŽ$žÜÝ©7êŒÐD´5xå'Ïvv_%Ë› Œ\oh9 ¸<óV‚}W2(×ÙdPÓâÓE…\–§‚¨5ëL›/D(ì’ 6‡áþw ägs‡û9©ËŒY…!hÄÔ|#´9Èêûið6h4´¾shwWÆÒ¬Ç!î-ÄG£žBæoÙ.Ø»—Eï‘n=¨åkw+wg$¦'íì†ô‹ÚNßïÙÁôô^í<ƒFú0½PIš¶’=~øøæ7ÃJey‚'&¢}nÇÜö¹öÜ£Ö …Z ô*R«'FdǤ­\ÅPb‘ZÊ­eŰޮVdgå ³£z2:¯u}ÐïeêûŽÞëú[÷OV´»ŽõÿJôX-gÙïÕ¢Á)æÊr•"vž§)E уòYMAÍB´>BkÓX‡K…kÍÃ3ú>‹Ä-äêRcÑ!ĦúÂ’"q!xŒ/TäC!QÛäuÖ»>˜ïÜPÄȸxò£Óñd<þçéˆwñBË/k´Zìdr;!V®I'c²Þx=#ÒSc¾¿ËòZ¯7!.¢½m:±—^Ìà pCŽ×¢+îÐS$Z2üÇÇ-z·äüWV·ÑIø”;ž…'ÌÆcÔJ¥Êé‚bu1ƒþ‹[¢8©\õ+ÊÅS ²uÙ0ÒóI"Ç×M4ÆlµÚˆ‚qHræˆ8x5_ EŸQÚêt›X Zv¹¢I9Oa>ÿÓ44®§ÇÝÑ®hv1ÇŸÒÒLGÙ êøN/:>¿z°ºØmvÁÃðØÚX“Ú¤1¨hœ b¾½tÛ™K2FW¦/Ó•ÑÃö#HÌ&4­9°º9œWÑE¶Áó'ƒZXku•ګΨ#.cÐVlQ¿ÎðR ¥üçMœ¿sy©Ãš¼±Ùú¾Í@bÍ„ #¦eë¡m(_‰¨ßW_¿æ¥Z­Rçb`Š ÁtNó½–Næ·Bý©Ù« ^Ⱥ½ªÕìà0HœR'‡W‘kä g§ÐbëÑsÞ4:èXz¹‹µô¸¸7¡•MhÙ^A‹GaêÏøaÌýBS?¼~ôŠ‹5˜ &Âm›ÒªÈÕ¦ÈäLÖžíûwÀm0Æ»¿[Ю= »hC=¿øtqß™† {¥¹ öî¨âå”eÁV¿Âz¿‚ùÑ\4:ƒÈÊQDTv Q" “?Á×¹‰iºFrßßÜ—”Ù—†‚“ b³‹:Xtñãñß‚óU_}h/ÖiíŒX¡Á|š×q JX×Pá©éÞæŽ³tÕ4f¸˜ãÝhåù«HÚX* k8sàÚøq8ï{4ÖÍC¾3î¤C ³/}Šf×þŒ&â«f0ó¹qY¾Ö ¥æ*»±+-ÚZi£Ú éëßöüµ¡°1ÏÃj‹Tfh¦g\MŒJ© ¢XT&©¨t8*ýÕi qy¹9ŒV§Q«!é>(·*‹hÞ=Gz†-eÊ¢§£g¥”¦x²X“Ƭ!ÂNO´w¾P-ÖÊt2(…ûœ¹u)tÊã|ÞßLz£é¡·{=G˜«Üá—ª’Wi€q;S3äÉ’M"\ §×)Γ©ßB˜_NWžÿðœoÛªMé[×ìß_âÊcDÕZ¶–þWJ8CŒé”Iliw{5• ©ŽØÒA²¥AIÀº'UǨ¸ÿÁýïþthü°?ŸŸ 2ÒMY” Oï“x³n+Û7ÁåéY›ô軣±‰DÐ(›P·rG¨ï§pžøÿtG]AÝ& 궤tK‹Š zà ‰ïõÊL˜Sè²™Õ³ÑÌw:OöÚõ¥b‘Q(Ó1êĬ„x åç•G¤×̼xTlç™.y~ê„·! ÃóM·Ðx&¾ÎGÅ ®¤¤Žùû9™‡yøqÌWÍc ¨Ÿ˜8ÏWº„àövJM5ªÝ,®½ï”†(OÂx—PþÔm—€ô,c3€8ôl$á§iòöÃ,:ÓÑÔì©o`xþúo ÃëÐjõÚ)ÃBÌà ¢i¿þ6´w`!_ˆ…¤¸šO˜.ù[üͽ¦Ã>Ä­õê“ÙÁ§ÙǬM&úz-7náO ø<¦f¦øž®çíI¦DSñ= ïùí8k#~ð7êcØAݯÀˆKÑ%2…Üß¼ß=òg9Q+1ÍY¦v`¸3#˜çqgq7¬Õ;¤©9ךoþG~Œ#­yæœ`O0{¬WÑÚKhã5´º¼£®írP•mö°µDßt©Êó-ò“Ú ‚j(VˆUAê£4M–*KSŠ Eš¼ ÑÊ”e*g—‰_$|žøE~‡àMÉ›º"}᪠˜kÛ õ¶øIg Í™÷‡;šôà+œ×ù¯iä´Ã4ipð7¬WŽþåûÓå;70Xü †ÆÓ@¤Açÿ{¼¤¥¯u’lÒûź¢Ë‚×:KÒ¬''•_XáÙ×Ãe‰9þç-°ík<\ÒuYo†¦ ÷ø.|¬ÿ Œ)ó´y0&Ø3ké:—«®&ד‘-ÈÏÒ²r°àÓ¬‹¹tòÝ.æØ…ÊOá§ð›‚{.ï;¼Ål Ófà‰8ô3<ëíoí­duè!>,4Èa¡vyâ b÷fî}ƶÓeï·?@ú§J‚ZfØ:â\Á?ÄÂ{çCÑr¾L“›WP€Ã1g ^ñKo~¯EOá±f…($Új6[™•…¥,HD/Cô,D ÛÑËh cµ–ZMÅ#6e‘R§‡zÝ¿.ƒÔçCß'˨uZR°¡¬&Ϧ@#ñ¸CøˆçCüB^F„Æx¥JF´3­4«mŒ‘[f¬¬.-!J”>ƒVBôm!º~í*‰ŸB´ºHU¤‚:¨‡:==&ü²FiBzC8ïs´±œ×ÖíØ¡Ö¨TPIÔ¥AiÑXˆÍ¬m-­¬k¯8»é#Òs˜u^L§Sê²I$/”A§¥Äœ4v…Öõ‡”"# :Ø~MÇpÁâõ GPè¿§Â5œŠž_+O°• ÿ '°}(ÞÊ U>‹ObŸÂàšBy””f•5hõœF›ßZq™o#+ù]Ý]u¬ªÛd·–Àb¢Q­z•E9…×£×éõ÷WzF7„t|…j‚÷“sø8r.^ñ ³qмy¡÷s?S{Þx^„ßÈPè†÷ã;¶Ôn…ªà£§ñ˜Á|=ñväéƒ}æ³E§l§NµV•º-Dw¢IVÝÿèôY+×0JêÊñù\oKfyókY$¡M™õa](dñhýwKïõ׸ÀÇüÆŽ·+»!}¶+våš´^Íg÷«#mZ-±ëŠ™.qdÁkÞ¥¿5>³„UýªÔÅ t!w\^›zœn©ñø›²\iñ¢´tÌAá{¾@#øbí@ó墔ҼþKÞ÷ÞþëCßbêÈs‹0µgÙ¤1³:—Hª»±1«:ÇŒ-yyÌ(ŠúoiتJendstream endobj 139 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2421 >> stream xœ­–{PSWÇ/"ç^¬}q›j÷ÞéN»ÛÝ–¶«Ön}la­ïÖ¢•¢Ö H!’Ü›7 äÜ$@ äE^ˆ€ˆ…EV©¶ÖUÂø¬X[»;ÎÚjý£³u{Ò½Îî^Tl·3;»ìÉÉœsÏ÷s~÷{~ß›‚MŸ†¥¤¤¤ç.]·.ë嬬É?bȳÉöïW¦Á™©pæôÆ9÷ÍÏ@3þäÁÃa“—HRQ)“+ØmU…E;‹w•”¾òÜó: {[‹åbë°õØÛXö–ƒmÄ~‡-Å–a˱ØJl5&ÆÄ¶` ¢ŽyRæ§ìŸ¶lÚþÔgR¯NWO¿’–—väâ;ñïˆRâÛôéöôfdÍhøç4’¸}çèèñz6£Í]4Þœ˜EžC‹“³DMx·ÌÎR‹£4UJƒæMŽ ð#ù€<'×k• ¡F @4Ð ê½r:½ \'F‡OÚ‰þˆjÍKŠñJµQ.óWÇhôˆúïMÜà‰ºñWÚ ²ß©Õصâ·`!»]BÜ⥠”™@ǯ¿ e ® PsD½Œ]Aå34s&H0J£¬2h ÓäÕ·Ù§ÿ,t–þ^= ”þÕ·ˆ¸¹<¹©H)-¤ǯ™²7,] ‰×ß=¬…knºmnè&>;xôâÈorja­–šª×f& &áö ¼EY1èï¢No9½/ô÷ŠfC‚cO fbÊžEö£ì$¹=‰CgÙšŸÎçÊù‡^¸’åÐ7ÖOJYz!¾6{G'[¯¢µ€œ)`FoãðÚâ‘…-*» ª ±>«.»"$·EÂmè´9h´ŒV´_¾X»±heptôKÿþFª«¾ËÑïBÕŽ#çm¨3‚]N¡8oyOG(Ò~áinó¯_•¼²‰bw aÁ§ü‰ˆ>v24ÜC¹ZšýBacŠU ÇTé)Uy±¾ rÎ^¦ÉÞ/Nœý2´ÚB÷Î*Ož@ñ‰TD –A:›Á\m0V?ö«yϼôâ‹:] *‰ÒÝ–…nPÔÞCŸÆÛÕÍ £¬)§¶ðxg¬#ï88=vzÌï÷5ÁÑÁ¸XŠÿ°2K ½—6UuÄ|{ïôñ8š6é˜î È!ìðâ“">/ißîÝ >ý¹§øûø´/ž@3[÷î¥xqÔ¦&@8芵)ôÖ¤ Ÿ…ô>EÞ¨1ÛjÄ«3i_·&Jóùë"¡ÖõÐIœÿè⥡#Å뛨84rFh$^ß³pÛÅ“æ)¢1ôȺ/‘’šŠf£µ"4:›ü‘ïÞ¼ÉÿŒôùŸó?Ý?Í8>í¡]Í.?ôvÜ©jd` äXƒš‘ìÐÃ%0ûÍy‚NŽ9Çá`^ýëS­›C/ŸC¹§3â'*. ·>é:1‹ä¿mcÊ6ÀÂÛDO|}=Ù‹°ñ­Ö†¨|̃D)0BƒÀO˜‰*µQÚ‰s ö®!®é¨î†ÄWO_›öTµÑþz_c€s(B:Á¿±ÖàîÁ-Þ’Ò÷ŒšrºÎR[W vc“Ñ¡r)JŽ0€¹pájsaÆ¡¤±Ü/³×9ë¸Z(L´Ö™ª,j¨„RW¥OA˜øGD$o³C‡ýñ>l¢.‚?~S³d Ÿ¦ÈÍ]ÑÜW@›ÍM67qÝ‘Èas:‡‡Ù 9°9/ëÖ|+mù°°¯æ ñC¾&;ü(`o\ìpK–“7ÿ_Qªu 9 ïæ(Z…÷¨Ôs ÊjÐA¡ZöGb$Ïdž…n]ˆ°Éû+2M¶Óþ~¼B¯gÕÍêV+NÝO(¤óÊL&4ÓóÑç`S/8_èeŠ:‰€ÝA#AyCjaXÉã’ÝÒxG{¤3.k“PäM‹Õfß5“HFîɆ§d ²·Dø:ã¼,•ÊΩ(èàœÐAtÈ#Ò F^±ô’ñCªü¥ùÔˆ†A[ „ð^…¡øù JQ#Uz4!eþO\")K¤ VèìíWÀ?‚Wè u“frÇß‚ˆÇŒjÝ4O&í`?c—Sü\P%·” çn*ÂL2ûö>ò‘x²}5 iWf–°°€Ø¹§dÿþxwEöî+ë.¢ÈS·Õù¡é?™09* A 5µ&šóok¢9ÿeÍ;Ï­a¾L>GŽ&s¾_!b-fƤÞòL^ææÌµüÌ\þ5|ºY£Q@5¡òªƒ”D9WÄÕìõ‡û÷Ý{®çlÿåþË}—½ñ–Î@ÜÙèl‚MnÕøTBvY­6ë]©¤úLz8ƒ4 Õɤh{mö&XHTùª‚Þaçû”D`HÝjlÓ´ëöÀ?À}]Ý .á3Š\Òªñ³i(ƒÚw ê+`¡Â§nüÒrO©øjFª{ñÌt û«mBÄendstream endobj 140 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 570 >> stream xœ­ŽÍkÓpÇÓµM2©s6”Mù]˜º‹'_‚ ^VvðâKÙÆt"mÓ—Ø6m:Ú'u}K×5YÚ2µ™ÛÙdSG©ñ²« ¢àŽáARÉA·îâà×¾‡ß ³t`&“©sÈérØ+½h¥[—¬`3ƒÍ’û}оqøí¡õnlïÌ÷½ãæÂ®a—Û,F`ÏL§;þ˜ŒÖÕþ Í¥_Ôoiö÷š.k=ÔšîXq<y~äæÎüÏïGç.L£ÑìÍìùiX=ÒÜ̯®4"Ã:Í%£™X.R„"uA’ÐG2B²¤Ä•!.š¤ÝÒñg ‹÷øÕµÑW!ÔhÐ`ŸrË^¸M:%Ƈ®«M†¦˜GÑ´2Ö7‘ …ÑEøD®\Ë=®dè}MÙÛêÖŸj¶,-›Ã Œ-×·¬óx¥TPfÃêÇ©*Ï ñ>/ñ#ªé“â2]ïˆê)õäÂ`žÉ,à }’«–_öªÛ‹_ëÛ™b²,YÃe)WATu–-ziªçS|_?î s~¦ÄUоŠðNÿ¬Ù…áuݾ—êǯr¤ Ö9ÿT OO³¸Ÿ1láA9…¨/ªñçaW H†2A1¢ o¸2“WVdvW€Gº“U¨‹õiµ¶´äC|yÊ5‚Œ(ñÉÌÄÚ¤X”þ é"@dÉÀ§scwƒà'ã<}áVÆ`ÜAÿ=²‹>`.ž±ubØ_çj!endstream endobj 141 0 obj << /Filter /FlateDecode /Length 3206 >> stream xœµYI“äF¾÷ð…¨›³ˆ)¡\•š€cì`±!ðT˜ƒ›ƒZ•Ý-¦–¶¤rÓü~6oÉÔR¥13D}h•2óå[¿·è‡UžÉUŽñ}¸ÉW7?ÜHz»ŠÿêÃêíöæWß*¹’&ÓÆ©Õöþ†À+e2Ÿ«U‘»Lªrµ=Ü|/þÒ?®7y–kã½³"¬Ççv½ÑÎe¥Ó ·y^ˆª9òB®Å}¨ú3Ÿ·…. œÁF8ºõß·Dvôª2Ê!7Jf^z¿Ú~}³ýå÷â‹}Õu@Ïë,—Nüm]Ú¬,}!Bóð¸Ö2+½“¢oŽo"59¥æ}æmâmw Ls”¬…8­e–çJyìJ$^– ¬kZnºÇ€7[ ûŒèOiÍŠçµ*p“Ç9>R‰¢ç‹r !R‘JÔ(“E6”Ø5? m8òÁ‰~ÿ’Žq+Bö½Æ ’/©–HQW] ‘7¸²«eX+YÔˆjr•¡ªªî…l`%X\¡o›š—sY2OøSÓÏ®ùWèÒaEr›\2¡µr(m)~$ 8[ŠS³‹¤@IOë iFÁû®kîP¢k,(e”àãáÐ>µ§xÈ{qjûæt¬ö‰ÿüŽuˆ< ÝîIèAXv“»æá! Cz´ªèÙçLúe4môâPµä¦Ðâ¡9Þ®QÛ%\Þûfî"%Pk5¼}‰/w$XÞ‰t)xA‡"£2J ]7¯Â.1ªÙÄÌh5è:x÷‡]§ÌÍÿÁu oò…‰®#ÏA_;vp«þ_8ͨM,›þ»6—üpðÕ1ǺäB¨«Om[ŸÂýÓX99õ L¿[äK*B;¼(ìSŠ—œ.êÓ5ðu\Ç>¦O›°…#Cƒ;pÎF‚>Ù‡¯`@¥*™NA²›ì… ­=Ç,ZÒ—iýû^ÊR q§"•…ô”„¸ˆ@,#h&˜åž´@”56¹°Þ5GŠPfš;m6&R6bª®OÔx"@·@”VãLÁi¦ÐÂ~¨ÝÀãSì@ç~¾ƒb<»ŸÅÕË89·ãk¬ï3ŽßQÏ“<9qï8eÆâM;žT]vøA¦êÞÏôý:V²U§¬Œ!ë[GÕõá„Êàvh¤¡FP¸à 5à ©IA_­ÌPÕ|A© ”,õBø™Ÿ´àW“ã>˜fñ‚ÔÔI{ '¥‹^ê4“ûöíW`GÒµ—Bç2E¡r û¸àÇî‚æ;ÆzÖép¦®ÎÝlh489 f$n°ÿ7ÑôøÇGG:Þ¥ F<„chi˜EX;›Ž…+êŠ'[TuHÎ&–#©$ â;<£iqA‡þð=‚d CmÚûô‚c‡Ð¡Àú†“„)5XÜ÷¡ç@¾:’ýÑ“¬å¹Ý÷Ý© ¯†ï8úqs7’ íh®Ž‰“8‰v•vCâh›]Z€´ª6ÖGPÁ?Æóùð½¤ô³z2ÍíAGbàz¸\ÒÅú ÇÿÞæ”æ5‡qEì¬'l®®?xzÜÓ¨¾²¾ò’ËXZ¶<ºç=ípgÓyèwä0½ëÏÇÑ(ZŒk¨µø­qÌ÷çcÍ„qôel©mTÔ FÏ¡AB\p:ØkÒ”š_¨b{;„ ~츎˜Ï@âÕÆ`Ÿ˜3F8t/·7…¿ÿ1|öxendstream endobj 142 0 obj << /Filter /FlateDecode /Length 2600 >> stream xœí[[GFÊÛð€@¼D¨{D¦¨{Õ±e$@ ž û€dça½œ5{Iv7Nàð³ùNõµzº§§wf'B–µ½íºœ:·ï|§Ú_R¨BòŸúçÅõJïV_­Tz[Ô?.®‹?œ­~û7e äµ/Îþ¹ªf¨BÁ†"H/”¦âìzUþw}öžÇSo|zR<ëìíêUùÇõFŠ eôæó³¿ "£›±+ÔÆ‡òâêüþþòÓ‹ó‡ËÛ›gëjrìïHX‰6F ¥bµÆ5¯¡)R(/ÓÖcôå ?{) U ‘*¼ `"/äDP,òÆá•'W­ôÓZråÝÈæF ÉjP“ƒæ)¥buüélõƒu(ÞݯŒ X|eÿy¥¡<¡uáCpBêâo(iÚ7W«¿Oš$Û»1 –—ôQ¨’IôˆI`°t,ÏÚ¦Ø;ž§hUm2=•xaN^XhÖJ_MùO‰DÐV=Åe»íŠM6ãe=ÃøÚø}±" â¢o¬¿+PÈk«[WùIgÛQÒTVÞò`%¥ cGuB;¥ÂÌQ Ý=êŽø™£oH 2>¦¢°,;̽x•½rß2B{Y‹_ÊjD¦€PŒÒ5üÅP™´J §ö¨Yev7x2SX²ÊVsR$)"DÛˆi¼‘Ý*›²D$äå9Ô|ù|½q’'ªjSKD*ŒÄŸV¦Ð¬ðb½Ñ!Š-‚pl?‡•‚i†?ç ¡ÃËgüˆÀõcïmõø¼p…] ’ŠšßŽ—åLi‘ƒjéÊ×¥z½®Ø÷R¸‚†ï”mY«*áþÍK“TQÆ¥N1³ÐY9rfÜ5æI\…¨ôˆzå¨z£ˆÊÇžz÷(êDÚ²ìc8û9+é›/¶wÛ%éÈ®³P–DÚ6êÉÖŽp+‚ Ö£/ï×£’0•_lë_ _ß\>¬Ó?Å 7ØF:S~XkÈd½+··wŸ6:á¡ à&ã^šd§Èi¾ü’uŒˆåö®dË7Õ[ÒTÞ~}ó¶(SQŒ•{ȉœ;';*ÏùÍØiµþm{Ú«”pN²#«5ãZì³Ö±rgè–”ÍX?ê×wÔZ·ÖI[ÞÞ_>\~HÕ¨¶é$ Sé”s• çêøºß^_¾Ý~ `ý[•t~}þpwùm%ª²^—£¦'”RgŠžª)PÙT©)éDZ¾¯ê ã½Iªx‚Sª"›þó¹¤ê½kÅKŠ7RY="ž1Ú°G¼‘õ0*àM6evF†|—÷ÃÂ*ÛYŠÚ²)m«º‰dŘUŸûu9H ƒC°ÿ·Júv2ƒÂдÝñZêÎþaC”¢4§±8«1 Î@ f …žÊ>ã@@­JœÞަNz®ìY{ôTV^NõÕÕGÁj,œãR"$^Ϲ%´oÁëƒ%-”7üqñúœ x ʉŠ4c#牱23Ä8øvïÓãÄ¢NNŒã”H•“í”GÈr_4œÔ˜ÜMm$Ä}Ëžž;+Á’†;ë)îìþÖÜÙ„C;=2ÄäüH; ;=íŒý*ªƒ²V×/x 2'¨0Ãïê(?=Ò ÷1|ƒÔ zâÎAh‡?›ë(>Ù9ÈKV†ŒN’–}E£u—0tu8ò&ŽÎÊØENU+¤48~:Hà¼v½ÐÙúVXº•ú|¯.8ÑQS‰”)YnBŽ9c°_±SV­-oo¶»°› ‚¡$!¥og,^ó/ä-c*SŽÂs ®“>}@ó  Y‡´1@Ç!>{‡ük ‹L,œOøŒ«4o–ã³GîIK"|Í©ñYâÜ)Jë.ñ|ngÌâsÏ+<øl¤[ŒÏ¼Á“á3Òß®’¾€’!…yžmín£,ǺåÛl±Ãö0ðƒ«ed>Ö*‚,RuU„™¨"€b†8zø}ÑtˆNUíŒyíU³ÚÍaô€|ʱŠZrx³ûYåF(#ÜȲVHk»Þ{†4wÛww[Ì»óQhíâñôŽíf5ûÙ$Á«”c¤’`¾;¸BTp„á`¬U즸ÿ®ªÄ÷ÉH@kxDtèHÃðã26lU7÷8nlß,‡G"ò’–˜íàgo54æï`{™bçvÇwŒŽÓ+OÁ^[h~Òj´\rQÊ8(NçÛI|€³ß¸]¤÷+¿¸è Ì*?¤(ÛQ¾¹$EåoöÊ·!#,ªsD<sèå—ž^óV,(–©È³Î¨Þ}‘¦àšÎ/–ìí‚Ú€Í7\˜ÿØÛµƒÔV«Çâ+fƉ&³ìëP#ÿöLùBñpà¨Wc<™"jØg msî(O–BLifºäƒ.¨3MÙ”ý6|t€ó¤qær ìH¦ðð7¾M;à$Vxî÷ÃK|‹µ{3Û6'¨n©“Y¤á¥N¦â»r²v²Þíö®“å5ÎÀd59™ ½Á²ëºýê½îZt峤ï6)S–*áÌ©Ȧ<Z}?Î<}ªfîVo´åŽÏ >4¢®>x*¾v’«¼ýFãœkåeû2£¹0§™F™Õ>Âï¡âwEÆÅ8µd̶d,Ó6“±êî[ÃÐ6œ}ùR1eߨãŸÊ—¥Ù7¿âC•} š÷ÖâÇö;ÈÇA¿ã€B²û’ò…änˆ%ÉÖy±alóy‘Qgåáõ$© €7è;ƒ³Y×k®íLÍw : ³€/?â&t†(ƒ1zÎé§&ÊcÖGe 3DYOe8qZy1QvPFí›åDÙ"gñ’2}gódðBžüQµi›&jÓ%ÄÈG ;Ðí¦“=è–ä‚o¶öŒK>?Mz;ZýÓt¤¯Y”ÒuwXÿ™iLJRqÆpyßm¤/ùŨ1´{Árª2ðèÚ2RjÌΔGßµÄ.lž¨vç/œ–ÕîíŒeµ{Ý·[B¸ú…Å$áúí¾·vÇvÑ›¶vwã)Z’ÐÖ.þcþ"å ë‹]ƒ{ºÈÍ?®È­bS`žàz뉭2¬úV™ïJ~ïVÙ—-PÒ2€té"h¨ ³Ãÿ!P†Y>­0¬ñÞýÝ«ò÷k¥ø{ W~X;Ç£]y~yuþæ*}ôiùSŽÁGŸW÷ËPž„ñXÐÞ6…zh+ó—«ÿ·ÞL˜endstream endobj 143 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1639 >> stream xœµTkPSg>!$çXJ²g»=Ç™ñ²ØŽbK\­´RDÄÖ+µˆ¢ ©È5@’7 7A›ËBF$Æ""–j«£²K],¶:X©v+u]Åy“~ÎÎ`étì¿î9ΙïýÞç½<Ï#¢üý(‘H$]»N±`ìk³¯ÏW,1øWýžadÿ þKÐÙ©±'ø£Œ}ÛsRRw+#çÌýãÖŠ…GÅS‰T4µ‘ZEÅPoSïR5…z  Þ¤‚)%§dåOU‰ÂD{ü¦ø]¿&>ëÏúKfJò%7¤3¥oJ»i çÕzσÓ+ñdÙeè7€)Óå]è‡#,±ÑÛ4°šK§]Æ^Ó1pÀyýqù€YNzµºx\C?½~áê•ÚÄuQÿÏØ÷@†Vh¸ Îôê£{¬ñ [>ØËÈoŽÒäI:-ïúªºd/TC681Üî õ¨šdÏðÈðtù.÷°ý4  ½^¥Mâå·Ÿ§Nb7ž79àôê]¿®ï ú]‘ïI§>ý Y”Ï{DüyùH$lOÊÎb̤Åé–Þž–^`//"2måâ¨äë© ^m#X™@oˆ¦ g[q¯—XEOpF“£¼a,Îr’Y¸F%5D(gÎ-c²¾#-4 ú¡ðÄõæëW¹‹ªõt”2eçj¸” ôê4ö<'¦Ùß+œ±Ë<77?ÀŃMÓåÊÏðî[ž|l{70¶Ëõ·ù ª:^¥Õì)ëªàp èÜÕ±£sã‘ÍÀD¬Ú£´æ9š?±:êJÛ·ùc®žÚ6`z.îXÈï¤å§?{_·R·F¹|·ê=Hb–ލ®pH?œq[º™\ÂΦÃWìÞ¶éC×¹Ï=ÃQ)”×8F§¨Îk£ ÙÖ“'{ΧºŒ“ÎÔ+ ™ „ ¦FaÄé´M_}ò!¿è@©662$òqq ˜ 2YÇobôù°¶3Lã7¾£¡;îaCæ“©›ã×ê÷Ch:í09h¾qbwYæ’ÃÐö–öËšR­9œZ(..È00ét«ñ‚É.¬ºÓЪO„\Ó¾aKk…-)mm—5cq¿J¬ý±‚­Ä¹wšŽ󤆼´ƒ'ÅûYl“ÚÍ`{”8B‚ ój æå£dÆè<¤1° Å–šƒ¦Òb½ö žK S@l„äÖ}ʳÐ'˜ Á„Úq§G4¡1ú ±ÿ?­,†¤¸¼Ÿü‡ÿBÁÖqÙâ´g²¯Ÿ­ìŸ.ÿWzßeÉ̱k‡´›xbŸj3ö™Á®I ˜²ŠN^þ¸Ü{’mSÙö¦gedfØ3ÍM¶6.Ðwã¿ aÜz~KOø­f¢¾U»í/¹ý‹—ôLŒÈ÷ †ºñ·è‚o¾Ø·o± °Zs@Í?(Uç@v¶xïd[?qRÀ?ÿ»´`â¤^p£b¶ýœ`=úBŒwH f0å%Á(KÃxÀù€Ñ'1ƒ0øpÀ3SUfÖ)Þ"òµÜûįˆ…R#kɴωä ºûv 0•åæª_¥Æ|!·Í,|£ÁiëQ‡A¯]+F[V¦ÕÞ’Ëÿ4ç"Y$ȲäOäE";PzÐ1Úò²ÊšïÿŠò/¹^ô«C1Üa„¶—ذËg¬"¯Ä-öÎǼ1Ê×OöNÚ°]Z–ìlÈQs¤´ÓêÉŽQø“ª!ÇjK='Œ˜Ý?‡»E8Õ-ÆE¾b–¼œ<ò¯ðyÉø²Tð´,»/Ä.êÆêa±÷dáVé­Ô[FÙqùá+i«J#áuX^®8µìôÒÁìèƒ!Û§?¶ T|ß2ddwÀڦ܇šK𾄫Ð_ÝgÁ©_j‚vèϵ̩I€e°–Ã[šw Â×)· B@Òç§Âü2nˆ±Û÷;¶Lû¸ØwANaRw0Ïè]ºB—–.u™ï[xïK7F­4Z¡«íàÇRÝÄÑÑÇNÙ¹QLº?áò›ì³'¸¹K}ðP”*n‘@"‰zuaÂ90uÕrä:RêÁ ãþ|­¯ã `îŸ^ñúÆ7ÖÇòd=Ù£Ñ ú8T>ꥥÍ-QIK•%IE%c K¨Ÿ{Á±5` Eýçã5endstream endobj 144 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 844 >> stream xœm’{LSwÇï¥U(ÖGv—4q÷j²D]LöŸhÈ>¦Á¦êG²¨YQnQ Vû¢·½½=åò*}²J[úÊ@E­ñÁ` · s™[Âbf–‰Ñdûó×î§É*Æíÿ;œó=ŸœÏ¡)e EÓôâZmeùËJ DWØRسÊP¦ì)Õ¿¬º³bbåB[µž?QMQÔâcuõúŠeõ µ‡ÒPZj9-RKŠQ”’ºHï gJÊKš¥y·*ÿØžÎsi|$G÷›"·°‰ñvÈAð¢.ÑgmÛâšY‡ÇìVdA ‚CœR;û1¶^¨K5y‹¼MÖ‘wßûa×£Û¹¡Ñ1.ºoÊž„, ƾŠ'¡-à6{D^`­û«„X*Úᓃܶ~Fhjik1¶;Ž6è¯MÞ¿•Á%ñ1îêÜpÐÑl¢$xÜœ*¿á/ó„ÆÊûC?)ð¯…ELÄkâ ¦`k*“J§X¢{Þň—LèÐXÍÄß㘠¿bpÚDöTÅ—GŽjscñž¨/Æõ¥ü)Ò—È>î È}rúе£_Øp˜¬1YB ‡¼]~™M>ÏŽŠøü)çÙfmGOtéî?òŽ"x†MyM‘¨ß2À7òMü™@K:•IfXrâÅfÆ´«êÓý`k‡­3èM@%ŒÁ6½o¨mžžÆ¥Y¬bÿÄÿÈþ„Õ‡F÷…†pæ—¯îa§nÎLô™l’ÙãâTø…=](ÍÑw(ðð› ÿôÿo¬|Þî¶{l`Wk&õ¹ÄùÞ+Iv`$ôík]’ó´ÈZ¾0jŠRxa0á—£²¿¸mžT àmx kÓ \aà€]³[#Šn7¸Ð+t‡~ŸÃËfN^;\Ò oˆ†‘xYUÁ´`žyˆ.j¯c柎`5øQÐ .3¸[öteýÞêÏ=aiVòýëãï×"ë‹öYûþìVŒæ^ºü ß~ÃzÐTöú@ 8j‘D§Ä™kôN+˜Á.¼}PÇl¬ØÜ^¨òøøl&„é›÷¸«Ó“ýÃ0ñ{µÝ:igWR±K•åçÊJ)ê_à‚¦Nendstream endobj 145 0 obj << /Filter /FlateDecode /Length 171 >> stream xœ]1Â0 E÷œÂ7H“‚Xª,ea!àiâTšDi:p{—20|K¶ÿ·žy9_‚/Àï9š'p>ØŒs\²Apô Ö›òí¨šI'Æû«N¯wBX è¶þ¦'äq4[ÈD‹sÒ³#²®iTçœbìßJnÁíÎV‘¤'źIJ} øn¬—*ÓŽfÉC!p«<>àï·SMÁ*ö§€VÉendstream endobj 146 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 326 >> stream xœ;ÄþCMSY7‹‹ùÄùJ‹ ‹ ‹÷ÍÜ×Ó asteriskmathlatticetop>‡ˆªâøˆøÝ­ øŸ÷™É÷×÷ݯ°™¤—ž“”‹žœ~šy„‰‹‚û0š÷(‰¤m‹xƒu‚mŒz’Ctûæ‘ˆŽ‚‹y~|zw—†”‡÷+L9gf}rxƒ‚‡‹xz˜|’‹”—÷æ{û5užƒ——ž“¡”‡©Š•‰œ„Ó‰¢÷ ;¥w‹”‹˜šœŸ‚ú‹ ù¼ø:¼økù÷Å›£‹£¤t‹zý'|r‹sr¢‹œ÷Åüò{‹t¤£‹£šë0Ѝendstream endobj 147 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 753 >> stream xœuÍ[HSqðÿqÛ9ÇÓ´Q‹ØénW¨ÐnTÚ=×=³¶"Ä,+5V3µ,S§îü¶‰NS£é´4ËåªCa“.vƒ¢zHè¡   ºñû¿Ízê¡ßïå¿/ßGÔ„ã8!É”˜:wÎðif Ý ™4 UV]Rb^Œ|ÕM†‡·åæìÝDH ÙNv3YIæ!ÜAÔ$ƒ<ãLœ/"2â)­Ð…bì}÷ðûýäûùѱytí+ýRWTi‚‚©®K±]”l]‰Æ"ôVCºÄ¶‹RS—U\è1âç>…Âø·¶¯ÆØ¼Vx·úE,÷Ñè ×J﨨›jõU •Õ} «#[¶‚v»r·µ“,Bc{E‰,;Ê‹%&27‹ÅVMO?à˜+ËŽ¥ý‰õ ˜4¤­³ÖC=ÎA­»Ö7¬0K€> rÍ(üeªÜ™#ެWœ’ËO˲Ä&‡™–ÿ0†&5ÛÌW-C!8œ%îšÊ–èýGÎgæd祧Óž¼yÔû°Í¨£P¨:ÀuÐDÝF§è=g\.¨}…oRñ§X’&ÈW ^ ‚øŽgC¤¸Pl°ÕC£‘Fó^Ü­±ðÅS¯gc@Làuô¼]Ád?.·â(,PÑ4ÌÖ7×BÛeûë·¥Œ@Šw%ˆ,>~2ÉÆ~› 8ïæ¯sÞ<(+p”·K¹æÍ׆d4Ü‘üj×uç Ÿ¿UQêÚ#ÁÅF;÷‹ºÐO–‚³/)TV¸>ŒÃ¨vbœŠ~ðêqaßMœȉxˆMÁ‰ÌÊò™Ä¦± ¶DZ©x¢Ùïô(Æ‹LÓ´äˆ?:qÄsxóO8N–Ê’}¯I.€£éEæñëYÔã œý ÀÀÕ—w¿¼?Û=Ь{þµÕîØ[€MŸ¹Ææßwo5ˆ©»2Ívg‘«LÂv½µ(«ì0ˆ[K;êÚÚý¯%7I:ãõ‚&m$!¿§={ñendstream endobj 148 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 771 >> stream xœu’_HSQÇÏqs»–™V •ÚÅ'ë¡0“þ€Q=„¦)ȺNÝ2JÛÚ²¼mêûétÿ®“\w&•µpè ­—襨·¨—zŠRê­‡sÖYÑÕkbApàøø}¾ßï#}Ÿ¨9½«bñY ,iÎxr!_ùúÐÝȺ¢ïë ‘ãüü6áÂ¥VG“]h·‚]8'´wZ/ MÂEUh²Û…ŽŽ¥BˆCÍmÖ¢JÓ&„î 3¨5b ûp?ö£> stream xœñÿCMSY5‹‹øéøe‹ ‹ ‹÷ƒÎËÈ asteriskmath‡§øˆùt­ øŸ÷Û×ø÷¨Ÿ›÷‹”|™wv}{|šûŽvû/è‘‡Ž‚‹{y|u{•–‡ÂuÄwÃxSv/jz„„ˆz…‹uu|›–ªžÞ÷ˆv|û‹|™{ Ÿš™”{÷‰Ÿ÷2,’‡ˆ”‹›š¡›•€T¡RŸSžÃ ç¬œ’’Žœ‘‹¡¡yš{‚‡ˆ…ZµgKendstream endobj 150 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 234 >> stream xœcd`ab`dddwöu041¥ºe~uȰfw³vó0wó°Lúž+tUðÿy¬`qinnbIf~^JfqANb%cc;##K×>Ö%@ɰ|ÜÍ(Ãò ÈýÎûý k[в¢ûoöܲ6ÍÂß!ÝÞ³ý˜øãå¤É½}Ý$»{»ûZ¦þIû)(ñcÛw©îgµS:6?ªº¹¤ú|ÜZ+Ž%l¿y¿¯e•ùÄVÒÝÕÕÑÚÕÑ]ÑÝðcéß ‰?³þ|n¬ïnëî’l˜Ò=WŽOŽ‹¥2Ÿ‡“\„YÌendstream endobj 151 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 456 >> stream xœ½BþCMR7‹wùºù,‹ ‹ ’øHǾ¸=012F±þjøˆú÷¼÷?¼Ñùtù”÷ä𢋤£s‹~ý*~s‹sr¢‹šù)ûp˜£‹£¤t‹|ý(|t‹rs£‹˜øÍw§ù§¿æ÷®æø˜÷Ó‹÷~Ú]Ñl¹M³;‹û|‹û¥CC‹ûŸ÷|÷|‹÷ŸÓû|ûË]‹N¦wÝ}Ƌ݋Ջԋך ÚË ´‹Á‹¿jQ›UŒC‹4‹A‹A~Lw0Guc‹øÍ‹¯ø¤¯·Ÿ÷’Ü÷ãù¦‰ŒoKL0Šb‹g£‹Í‹Â§ü’j‹~'egŒ÷ް‹ª‹÷ˆ¡Š½ #¯e'‹˜¬øÍ‹à÷ìõè¯Êõ÷ƒìø÷JiˆuP~ƒ…>‹}‹ûLôè®§ÇºÕÆÐÉ‹ê÷ !Õûû74/X¶†•£¨œ¯„®WªÒÏ¡º‹ï¿=:4MFkgû…û‚‚‹‰‹oø0 7Ÿ D/ÎÖendstream endobj 152 0 obj << /Type /XRef /Length 171 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 153 /ID [<613080e416e825509457ce5ab62cbf72><7a4b9cd8cbf5be476731b8eee726da9d>] >> stream xœcb&F~0ù‰ $À8JRüÏÀ;ã$Ͷ¶oŸŒ†-5ÃVþ (ly¯BU5DJ1‚HÆ(ɵDòYƒIE)ùDòIÆìõ 6Ï|ÉÑ–‘ìÓÁ&¼“`Îm`ñ) RÈDòÚe¹@$Ó)š¶ñˆd«ä³¹‹ÀzÅ—u}ëZñ“±`‘0°¬ ˜­ÄWŸÉ endstream endobj startxref 122550 %%EOF e1071/inst/doc/svminternals.pdf0000644000175100001440000014116615120610017015771 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 3056 /Filter /FlateDecode /N 53 /First 419 >> stream xœÍZ[sÓÈ~?¿bÞ–­-2w´µE\d“@pBlñ l9ÑâXYIp~ýéidI#; ð°å(šû¥ûëË´FF$áÜE$çD„$ Zbˆ6! ‰‘ŒD$ ¡ƒ?Í¡=áÒ@FxCU( áŠ(Â5JAy@„Q!áað©$ä#"Ã1¢¾9Q&Õ *…$Zb¹":Ð’M“‰€Ƈ©A9,Ç ˆX¨á°8ɉÁv6¡Œ$Rc`<©HÈt@¤&¡à¸W*\šE"†ý#ñ@ØB$a]Š“(„M*Ø/V‹´PZÀf.QiL„l†)¢€,dPfŒ¡"L5Áî–\G@oHÍ`ß° #sÅõþøƒÐÓ¤Œgq˜’‘ ¡¯ï’åî´L³%ù‹Ôe‡iI>BÕª\¤Ë¤.Úâ³ø:9Íf ¡o‹¤©´Åpݶ¹øvõû0Ã"»&ÏžÙIwWåM–“'¿BMžÄ8ÙA\&äÉÁï‚ Í7‚ßåoŒÿÂØ/®ö9‰/’+ò%-oÈ Œçɪ“o_²|VØ1aM w–g³Õ4ñ^œ7YQÓ<½+Ž;Lìphs¾úôw2-íi¹H0Uï`?[-KÀ7=NaRG¨Ðþöàë£Û|E·õå2+±p¼nµŸ-Ëd …º¦ëi2Kã½ì+´bðÓ‘Þ<†Šï’>â€9´wž$E¶Ê§0Ž˜a¬”gÓó¤„qèÙÁ!¬'ùŠŒ|ö¬½´î–r;/JRm‚žÄŽÚ¼l]÷;HŠr •«÷é€:¾#Q€—«Å×ü §ŽnìúëÝV¤ýcžÌçŒ)ÁXÁ“ÀÃ37‡Sʦu¹¬Û6¨ßÛ?[ïÇ‘Õü8YŸ-_œ—*ådÂ:øB× ; T°lM°.‘ÎòäÞ±¤K°lZW„1ª&Ä¡Äl¸| ÷â"±»¤“³¯?\üv~xq¢9–Ól–.¯ }—.w—Eº.°èÙ¿‰sÔq–J°_+Z ½ TÙ6 –VÐJK%¤w$«¦~—ÎÊ+aL؇ý+~Ý•¸¾?6’^L-#úïîe’^߸,láò„ÞÒ;äó"™—U*ÇV´ ÷¿Z Ø!ž‚b§‡‹øº{ê]UÄÙsXÆz…vìUÝaºHÀ„k¬¾Šo“NbN§»Ëk€ðë4- `£¥:ZL`N™Ü^‚±k³¥ÅQzUo ­²œ½“³“½]˜nrúàDÆŽô€ŒŽ2ŽMЇuJ¼õF—¤*—šlnßcdLšÒ]Ò¸W¶¹g¶3¤Ð ð.êóÎ#æ6ÞI ˜ã|˜wÒçÝù‹wï®þ¬¦õ;Vè•Ï;Ýçà#y'ŒãŒí—gM®û3RUo…üdÔ*Ã_™¶ðÂ’ «J@1â4ØL{çÞëÒj‘\QL…LÛ¦®P…(ðßð%Ø’Õ=ú 3¼²*Û€l—ÐCú’žÒszAßè>Ñ)f··1çé}B¯é ñ3`ñИÑl™Ð;‹Ê"¹O–´¤å—Œ®è=ýFÿ—äYªÂÂÓ‚UŠ>P±2 C»¨TÖ‡ª‡­Pƒ*Ž=„Uph=¬¾¹¸8Þ;«ÔÚ#°:` ZnÏ#±ª¹²Oª&×-wuí7ëåºåC}†Zø­ÝÌí>Cëj=\ú.„T ¦Ð3:xÆ¥Åç"[Z|4¿ÝÝSú·ÅgÎ!®}šÍ:¸D,*ÂAÄ>åU-®ßÓ®ïCÖƒÐVËc:íjÖ®ÊGìÁûýããƒJBX±ÚISƒS»ÅN#§z,NíI/”²²€&ì GÚÃjÔÔIhWµç°ó´Sí‘L3"ø¥ Y«\£«9em}ž…«VM\±öØhÖüï;_U;$¬{K¦›r»nÌá0{ÅrWå2ð(»WW†óâŸö=%­EGIkÑUÒ{ô9¨é#‚WôµUÕ—ôÊ*ëOy¹£»«µÉý£è: N;AŸ«Ñ·êm_çXÒÞåéë?_ˆž_uÔˆrA®†¯á€ùêŸu±låx2ès£#lQ+V fø1j–w‹¸«ÀÛ P¬®ä2D˜t oú„ïQb¤8 ¶).â‘úíåó³«=+¯f¤,q°–ýq¥ç³±LÅàCµEÀf§J7tŽ—B½µ…Ctií»»¤Ij<Ì¿„á'|¬Öˆ8I?²=ÖHJ|ú®{.ï9_[LáãÊ8ÝùÛ3–ƒgçmÆnÚ~ç[cC‹U^Öƒ7Œl`Àñmžiïrv¬ÉÛ(£-ݸX/&oa²ó÷mÕ.BÕ€'>xú¶ñ°“½]¦SüNo¿„L6 )Œ†>^©®åêëqY¦Ó¤ÌîZŒØ¨%™ ãê/×%ÈXê‹Ñrûár÷ÃÉ{;~×§}ŠH«ïcŒ ü\Éû·ý/ëŸàô_¹¯·érUŒr^k1Fybl¼¸SÑ£åxÓ·NIÇN'WWàEŸ?Âu äØsq†äxð€®‚Gú›“!a ´BO`>ðaÒxÑ #ã{Ñh‘½Ú½zóç±·°h|xÏÿ©üðÞ踹 Û"óC-i¬ åÛ­¿'8öyx„öœƒnœ£äÇô¤G’<->o “o ¶ãçMÑy×ÍÑÙ‹/’¢°qÅåêöS’éõÃ!FÜ› 5ZOäŸUV&³O‹NøgcT}s ÒÔ1H(÷bÆ y2ñcÍ'Ö]{ûé|éÉ‘ëVß´EOnãtQf¿Ä÷élç4ù–äÿ<=Ë3¼9·“å׿ÚKP{Y>KòZ«X½½o3œØˆ×KX<Þ¸šâ½(ÁøNŽRÀôŽäxU’ïp°Íà;B[²4òz’.?7Ë›unݼHÊ*’Mð©piM¸»áÚ.mïVip…ë+$®¯°÷Fë´ÁˆWŽˆp}¥½+Z§%‘®¯´wDë´Á/uÚÞ ­Òx‘³ê[ïâ|—ÌãE‘¸-®o¡¹m*C”jwù¡tÜZ¤·°æn©Ð¿²X¯ùQ7é6nÖ[0š’-6ã ÏX!.é/_èÇÖ×9›^ß34Fçç)B¿êý¤AÑé_ûâ?iPX¡õË*·qÖ~.~Ò``Wò› ¯LUF?ïÓ-Ëý?Ø…fuendstream endobj 55 0 obj << /Subtype /XML /Type /Metadata /Length 1388 >> stream GPL Ghostscript 10.02.1 2025-12-17T21:18:23+01:00 2025-12-17T21:18:23+01:00 LaTeX with hyperref endstream endobj 56 0 obj << /Filter /FlateDecode /Length 2684 >> stream xœ½Yko$ÅÍgÿ„ED ñtºÞU›€!( ! `‘HëýÐ;n{{™‡™žÙ] ñßsn=¦«gÛ6 RĦÜU÷ÞºsÏ­ýaVW|VÓéÿËõY=»9ûጇ¿ÎÒÿ–ëÙggü†ó·•àZÍ.®Ïâ>ÊTN©™q¼ÂÎ.ÖgOY?¯«ZªZjË^¿×ÅïKÂp«Ùå|øòìâ¤M–Ú”«´µPxqñ]qr3¦ªkÃÙ¾ÝmšùŸ”ÐJ¶*¶õó$VÏ8¯¼Ö‚Ä.¸´•æb†•W.Šÿ–Î)ë=Û®Ûù {Iz¼Wž½Ÿ7Ê駯9ç¬[6«°¹ª¹I_¦í¾í郅™‚5Ïç ’¤gÛðkO[44à"…†6ÛmK»¹Q•6<û£O·vN‘¿‹Åº\Àãåj^¬¦œ#Ue´<*é6ÉBÍnûøãZ²ïçZÒaΚ›i³5Ôd-(/Œcµ-WüžpAƒ³n¶®rÎú(èy0AqÏîR”bŸ7GÃ’G‚½ÝU¾…a_µw!¨JÂÏ» q!u%ëÚ¥„0IÃÃ…ÿžÎ#¿.æ®FÀ,k“¤¼79¾/l(¢¾ŽéõŸ®-æ<Ý¢–ìÓùÔ©~¿ëš`ïß.ξ>«+ã½à>TäîçP.àZÄæu¥¼‰<Ê›¦\tåbU.öåb[.ž”‹ÏG¢_Ý+û ÷Eh”Q¬*ÿþU¹hËÅݽ_våâ/åâ›r±˜/äáøß{|t³—÷ª\Ž]ƒ»XQ#«{e´Üœg ôCè¬'ƒ‹‚á®v¶â‚Çà~Þ.ÛuÈØZr–ðÈ8EE@H3nÏ4CŽè\‘~æ±[˜Xf §DÐNÄb¹xÑÞy9‚]ᎨVÔ”cËÃyÅ‹}Þýæ½3µcoÈSZ~Ùí*þÝpM9È >… 8T$–‚E6–·QrmØë£èý6m¨-;ômþ»ƶñ‹wÀ³fGjæ’5H}¨â¢†ah)ýp& `Ö¿·p¡¦zÕ ‘íL·²ÒI¶Ü»wíþ°Û´WI®„_.%¯–ž =p ž– h\ˈ¬µ¾$¿xy‰f.„Ì{½ÝvÎ)Ü;öz“ÐÎùÊä1ux)ÝY·¾y$0¡ÞÝøZŒ­ÿÆX„ÕÅÔ¨»LÜ„HM ƘoÒ+R ãtТ*™Š?&ƒ˜Š¿ÈUXU›w\29ä»°È4O¼ Ÿ_¿hwmJçRð• ôúÙî“VG÷5ŸvßÈJ@“¯ ¬GGlD·‰»S›BÚÚ4n8•’¢>6œEDh4–Rð£?ÜÞF&€™ ˆLÑÒªD»QËðQ/UV‚OX JSXùhÑ(Ì¿ÚqãÓ•–ÛÝ®íéV–ødÛÍU·¹¡ 64—Uö"ÝiW“—൯”;ÞâQ€¡Jø?ßb¹Mßj4ãß-»f§ ³ûx§…ðciÄ)å›ÍÕDÂs_¡ç ò†Æ&¨æù¨oÉØŽâ° ùëû˜A–ƒÁnÚUb-ž&ÞH€èA³=ƒøúvd(z>ß]u›¶Ù ëí†ú|T~PµÕd,½­Œ“÷]í-„•µþ{Êq€A‡\ ×dj4†A[@ ƒ¨>†óñÃ0ˆ¹Aøyx Ÿô¸Ï¢ýË™¬L€ìpä:â .&ä›Êb½xë*£¤Ðw–g’q9¯è $+Y1f²ÿJ3åEé­ ƒC {%ÇV(=í“àU÷,{Ša“­üø…}»@ …P4Ÿ{–£®›%±|°qšDâ#=Eå½ö>;x­‰Š=Êõ¦Ò‰cl2E'ñš€“5Ë}bwA¡4iøè§‘h|‚DÚ[ÿ8QK*¡ÈÀ ³ã‰»Ó9EmÍÍcJN²C=-µ°¦ŸÂ!ŽÁxxBû%M.ÐàÐäȽ„7 *6žBeš¸ †&D;æ©Jô• 2Í(;Kr[ ~4xÓÎu˜Y»iöÝ«ÉF.QâDï'1¨&]dóyžø0Zoâ9'¢`žgHë}^IªûŒ·²DeCлgz8¬š'ÙÂR¿ÓàgÒ:/}F€Çé|èæÎ;wO<å—éY/¼Ñ`:VJ²Ôä0ZD’þ|ýPCæ4'‰ÌÅÁkß•ñç#·Š¡­<Þ* æþ|òרÿkèøè.ÒŽz†÷ß~ó`£ ¿œ ÕBóÊILµïêÔ#"3Ûѱ°³÷»¦Û$.D_ù ƒüÔ)ŸKÓKXEŸ%xDëãRŠ ý'Lût <À‘ò—fwÔ®Nècx1JFþÈÏü§RËï©.´q¯|%šô!œñm Œ§{r’¯Gg( ¹«~35î ¬Fø·»M0ŠS½Ñòcºåè¨êgtÃ7Ù`W9bg¥}ôps‰i¤¤onoWw|Ÿ„d6#Üúƒýv_m¾ý.»ÄéI/“ äpÏu| 'ðíwO_ž?;ÏÞ°Áª`òGY´À¶å¶½îŸ¾|¤Q-}ˆ¼£×H9–™‡S‹RÀîÅ6fëÛ·ey¢•ÄåBÃñÄa‰TúÍ^úýð×0Š QÚ¼j7]»Y¶C”ºëAâ%¤T¯Âûß%5ûœT!ß.çD(åäÛɼ¿ªÖÍ~×½ ANB&nþ47[¯é}ãÉf·} cæ¨ÃdFœÿ$æž“ãS1ïr À9!/³¹O»ógƒE emı4B›a?Mqàš^ÿg ƒQ1üó3öqÚþÁäë³ÿçŽendstream endobj 57 0 obj << /Filter /FlateDecode /Length 3999 >> stream xœ½[Yo#Çγ~ÃÂàC€ r2}w;q€0‚Nର0°Ú‡5+Κ‡Ì!-ëß§ªúj$¯ Øqfº»ª«¾:»÷ÇYS³YƒÿÂßÕöª™Ý_ýxÅèí,üYmg___ýá?ÌÍ\í4׳ëW~›¹ff¸¨­f×Û«êqݺùõ'ÎY1œ»Ú4']ß]U‡î®ÞﺛêfîÖVºVŒ;?ø}u·Ÿ/¹­›ÆÚªæKÉeÝ0Q×>ˆª]Oí†~×Î Z¾_ûý.¾cÕÇý!‚þú_{oà‚U 4}KS|¸$:0hÛµ ó T¢áY¥G¨TθËjT­âÀ›Š’ 0i6µjm šê‹«šŽª(þ.@ËL°`Ü‚¥PËP/óaaoʆÝU_ypŽÖCù6iÄ”!˜ZsÖ¸Ò ƒdKa\­¥:óÁëu‘Ó+ &]ýnEÉÔ¿MÍ•xÑG¼® (Ô/(C½¸¤mÕ4Æžkâ¿”æêÿ¨ L²l“(] h`~> qÊÞ¾#×ÌÀÀ†E=øæY†F|,C#%@^]º:¬÷dl³qY]¯;}ËQB[ö@À“a’Š"ìy§µŽäfätBü…ÀxÍP)KðÙ£IyrýÆ¿w˜  ®K)_ªÐCCrð¿™OAŒ"®£Æ'Ä(~s˜k ]ñå¿HŸ‡†œŸv^Ÿ†r©Û”¬Ež Qºà!#9PBH£YaL°À𒣈§òpÛ—2Àc»¿ë6¿}ûn R:ô?OÄfELÜaûd­ŽÕPOœ9øýj±¤9H oÓVcºcXŽÂiØ(OsLö$8èý)ëý‚°Ô9ÏÂȆ>NıPö«nÈ/ÆÂÄdMq7Nš×€#ýÔt?Q§8Œ:Ùm`>^ ¡5£¤ós·—Ìêfþe`Pˆ8ZK$áÕÖ¬x å«k¢?¤Í‚й¢ýi~ƒ•¬|ºí'7[à˜ {^¶Ã r ¯jØÏÍ\ Õ 0WöiØ+kymô~¹év÷ÇõŸs3ÿ0)GÀ5¶`ùŠ Òóß=KTÍ _ÝýX¬“M©wŒøG_ 2]Ò1©ÁÇñÛ৸KºÀí”9û $êàguD¿`©!ŸDƒYÕÖBÀ\‚î¸db–g÷qMÌi$¤Á¾ø‘ ¯¾Û%æÈUjÑ¡-Ùí±XÈóžÕUCÉÒ!PÙ áR.”môM¤ ç\3 ü6zäVûîã0Õ2µb.%ÑOd-¢²Õýaz=#(=Wž“[4'ÿÜhœ:« ²“ã’†ðõ(ß$ás%k£íy…ê9ÊûTœ"< ÝC{hƒcЂÊÇuéŽrBKmÌõЭ>lÚ]àË(ë¥.CSàDN(ä¯c§‚Tµ)YÀ².ކ¸µ¹ßúãzÖc˜Â‘÷±Kˆ³»À5K!ñ }…ØãLYºÿ u’HÇÎçI~˜;á•E=õ º³!åÏT¯‡V§.%˜´”+×uTUFò¹y…ek™ÏBYú‰šº˜ÛBݾòµ…Ó²¢Ò|¢üUùñ©ƒÉ4©7ööÝûO¦.ú°Z䛑&æ,ûHïJ!ò š…-ëÃÐùÌ3¦Y±šþáÈ´l7–‹oýŒêÍfSd‰¢j†L¶<ŒO3ü!!Äü¯žÚÿ’ÍY‰×oUídÃ?ã òeM:€¢Öwóìx«Ì<@Z§›8Ç5äÀ Ãïλjé)ýÄ«zS}z€žÉ=_Ê€/v8­æQ~ëøš®0‰ÆÐM’1Ÿ¾’f>úÛ‹?ÂóÙ}˜[,ã/EÓpéí)l¯Æ¦EM™Ì>Í-Ç e,µ¦ÒqQ‘ßý¥ y”}ãƒü§|+ÁJ>Cý+.‹í±¯TÄÞÅÜœ¶»x-õœ’:#Õx÷ÊŸMXÝÐ¥?îï"çúŒÏP¤é,“ü5ŠñóIgðÞiݘ¤3l|DVÚã¤ÂWžœ ø½§ùïû‹y<²ýôa Œ£×J ¤‚€ØÃƒé5b+ðpÎù§‚ó¾ä|šÕ%ćšËëêÏ9ö—2—Ÿ>,ú%›fb¶á1ß®é"“¢–JOµÅøT{ò¶„¾ñµ´W®Wånÿç\±:¿ïöÊ%+&›dç¿xÉJs›.Y½Ä]²ŠÇ_/^²ÊgFÓ—¬D¹o¼ÿ”OB/@†ëóÑ…‚ÛíÚÃ&^–ÑüÅ»PF§}gÿeb÷à„ŠçfDÀÖÔU<¤çlŒ ž¯W5x8iòÆ)žwõ_ãUrXendstream endobj 58 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 728 >> stream xœµ‘ÝKSqÇÏ6wv4ñí0TFçxR…iä‚I’/`]a ©™ˆ¸w§Û<ÛÚ3›7ÛÎ9Ó9צ…š l•¤$T]ˆAbIdY¿É¹¨©Ýôôðåçâû}>Kb‰$±ª¤º:/ÿDÁî1ÛN™’eœÀî¼Êx™þ,u> Û-y›ºùð «Æj°r,KÂðx¦ÀÂ’SÒ£R»ôÓoɯy,e¯Û"ÈŠŽÕD2^GP ’I>Fi³ÊÙÓ @¬þÝ~R7];D…ÅãFxC,ï QwðX0z.äªÆíЪzñ‘ÎO¿U@¯‹–à-^£ŽÑ›TS¨ž­¢L<`3k°ÛÁé þÉ_”}‹G›oÛ¬`íßññô¼H€˜ ¢²R¤Dy‘¥ÛÄ€‰0pvŸÀû½> %¬¿@9°§rtP”mî9 8À'‘B%ÉÎ%'bØ×~õendstream endobj 59 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 942 >> stream xœU“mL[UÇÏåBÏ…Tt4uK4÷ýdÜ4‡F]L$‘­vPÊÂtØá+ÐÑ–Rè+”¾ÞÓ7Z`Ò2(®µÝÖA†ñ&i$3™ÊÖ-"3qQMLܲɒâ]2o35ñùp’ç<Éïÿœçù‚ Je5õ’^yq_!ÙƒìyãÎ$$‘°8ô×{[»¾z|õ PJ¥ÑökUv 2Ðäà-Pj”ò(A¸J(‰Å¢‹n’/‘­‰ê] Gp4¦ƒ¡`Ȧ½ï¬*%zb;Kb:ÿ@|ÈÀ=ý,mü…ë‡ÈÌš™z}Fò㉛_Óù+“ׄŒ”b°úÌüm–Æ)\™à*±ò¸Í/=™U_±t+²xïnÑÝ[øŽ8²¼œ^AÔµ™·ßaÚaâý‘×úZÔK ÁáÚ-õ:Ê¢ËɉK”WàmIµ, *$ý>ŒŸd¨ïú›`§óƒ6Zt·ÍóEœÁu¥Ø¤)~bE–l@‡‘¼['sZFd¢4ãtŒ_OÌÕ»œ,ë¢qÄÏATÛ"©qRA³bfuÃ…_¾ãi×?«Io`Ë%ò6ã´`$à½}øW•íçâ™ë\):˜° ‡®,N]÷ú½~ ‚n¿Ëâê¶:h¢YÙ„T¨cL7U¿â¼r”7*öýpáŸÐ&J½1,õYY‹ÏZ˜yñ&ï·2}L_qm·(·PðÜ8> stream xœ¥W TS׺>1’½•¼\±ÖœÚ¾ÚÚÁ¡jk­­³µu¬ŠZDED†0 aHÈxvÈ @FA­Å:´jêX+·W­½íó¶õj{;¬îæ½¾°]÷½õÖ]ï­Ça-+'çüûû¿ïû¿ŸÇ Æðx¼›Vn\;û¥Y³ÂÿLF$îï™ý»"Ðh>=ÜþxäDâÙ¾w|<þyr‰$#syöJiΪ\Ùî=ùñk÷&ïÛ´95-}ë¼W^˜5û%z³žÙÀ¼Ã<Ílbža63ÑÌtf ³•YÊ,cb˜åÌ f³’YżżÍÌeæ1k˜µÌ:f>ó&ŠÃL`"!ó­L5oÏ4lÒ°üYü†á¯¯Ž˜Q+xLЀá\xDÚˆÏG~=jÓ¨öÑñ£/ŒÙ}|ï„eì‘å­\ÏŒýuØí?2¦ Ž †¶ÉyÍM" Ž ‚Ìxw›Ëæ²T²¶Z;½Êë-5b¥ ™JÌjßú(GŽCêÈ1k,jT %Û¹L±Û‚9 嘊*‹+u^g9Ž~oY…£u5†:*FZ±¤47j-Y·Š¬{‹¬Sîïyƒ>w3.aˆïbHîFКØpM¡É^f.âø‹ü–ÐR‘¤õ;XÒ’òtéëÒéë$RK‹Wƒ+í|\¹aƒ˜ƒd‰!]\0ø¹ 3ËÔÉb9@-\³¢1çÐvwŠA»ó÷Ū‹5ÅúBîǨ±¿ò¾•1jh–Ÿ“Go⦾‰Â.\Ú(zBõâ D„¾ |…Gû !öááóñ2I,ìÒë8ýd2žØDœ…¯+ö{} °Ï᩵ÔLîDõéí{,›ÐV´‰[›–”Ÿˆæ£¸ÏJo+“«€C&ÑA<ÓzÁÏûé"®êã7†Ï\š »Ù8€´& ÒÀÄtcЏ0|¶\AF–åkÂoÖï>•Û&á1÷0ÄÃ_Á|2fóÎÌø½ì%€«‰V¤[%]¼j¯R£Ð »¢û¿P%çB•ðÃËÇÎüÐN¢Vj‘–ÓŠi1÷kSc©¬ÿ©&ŠžòyHô—‰BÏ{ý‹EÂVI hmÖ'j8z7;à éÚôâb*¡ý¨Â4¯—óM¾.Õž?yóRçCô=Ä‹Ép¼”$‰…™Œ“OÆ]"Q)ÉOa|zi:JCðe\^Öâ0Ù‘ i*ç*P¶6 šñ—‚«èªòÃ¥¼iŸ‹ ¤âJ(fIF¿LxÎéyéÏ{Ù:U½¦A³àStí¨ï†É\fAhÕZuźÌ"8{glj4JDû|ÒÆø÷õçÑYhªÝ|ïÁ¯ÿº\<¤_çÃѧîú±Éç”Gž½º¹Ÿë(üßMq‚Ìúx'‹ço‘âJñg³fƒU‡è¥ÕõPx¿X®‘æ<†R݉A;Šw& 8 OÜ{÷ž|ˆÇ‘±5¯ŠßìJ©;ÂZ€Écw;ª‚І¢V#t>G—z+ïB'@mÅü Ál0Ó#˜€Ýÿaµ?X$ÙemlhˆÆRQ¦¾0É ¬:£¹¹6 Ó§hL ¿18Ã䡓ȃý³i$;y_?MgæŸuo»œsÁŸîÝÁcnÇ|=¿ž%üVÉ•âHè7.ž¾þÝõUzÅeÅú,‚åƒÃ\„]œÕ`ÖÑ1œÿ®BJójoúІM“ŸžùþÆûË®g°ÇàÑ]+ˆº¤¸œŠžD+¢W-×Bµà4ý¸à*8šÒ`%à¬:³A§¥9#èL*GUÓe›Y<¶«©±X#r‹¢ÿ5Q>‘Fø–ceׂ-ͧ-Ýv ¼>c ;Ðö$i“K¨ÈÕât þ[p½ q¯å0òdã¨Ôa°ÆÚ*n\F×´¾yŽ}eÉæ}4%妤°!SÜa{†ÒàÿÜâdÚÂôËAö;€ŽÔx멵n=¸½u«Mm/¢\Ì—ë•´7ïÁv”[½ç\JoRoj¯Î®·km”½çºNÝú: óN’9ÇÈ<[~”YeQ9îåµ5â‘Þ3xØÌoÅü°›jsne†]ÚK¦#Ïv‘çͳÖa¤_p"K…¹ÂVuoø¯=ƒ7¸;k:/†Ž–€–e¢R•«U°ï’i1dÚnò,-U}¸àpÑ¡$<*JÄ#õN‡. Ác¦ýt è)vUÈù´þb[¼-ïÑ’jÆbm±¡$ŒI$cãÉ]¡¶ÐX¨©0Vé]1xÚ<};~Fë‹2ZVd…MÇÐ~ñoyH¾?4Å‹Ÿ¸Ñ,ç}¿+nñûðx‘³Rn†ÚÚ†zy­4'W‘ͱZðüÍ9ãÿò3ýý[˜yÂ.V ¨Oä$q8!*¨nÒÑEt®¾÷„Åf³";,£š¾¹… ]Fiº^‚v¢Dgœ[WFµe‚Fôœ’k ´¬´¢ÒVëpˆƒ-‡k hÜÕW‘´­ÔÌ]”\PZ`(@o¡·O(.À¡ÚM²Ð½k-”“˾àßíU”Fb©­MÖÞ*§ÍZvÖC7ý㦫ø;YFžD.ÉI‘¦È’ß&Ì2>¼—³¬FáC°LÐfkªc»ÂÃxÂÙûîºj_µÿK> stream xœ¥WypWžn_Ý9ãŽÆ‘nCBf'§ °ÉN lsØ@ ®søˆmÝ—u_¯e–,Y²$Ë€ &˜<ŒA&Câ fŽ !“ 3Ìl%Ã&OÔ£jæÉ`†Ý­ÚVR©Jê®÷{ï÷}¿ïû:ƒÈÎ$222F¬x§¤dÚ+S§¦íH–òß_”FeQÙž‰cßσÜS7ÇžG¤_«Þª©I6È7½ûÑæeeË·n+¯xmÖ¿üjnáËS§Ä2b ±‚(!J‰UÄIÐã¾{`¤ 0›‚Ùªè9PH¿°MF‚'êÀïÿsarU_Þ§pÁB\Ë+à=ýhNB¨aQÎÐZâ1Î1žkèÒL’ùH¢ÑÉ”u”cZ].Þ%„KñŸ¡=$ÜÕìn kbŽá »Q8æ’àËð¹ÏŽîhîhØh¦u‡fÓ!ÃÇš›'–øÊÏÛºr;7ÜÌ÷à|˜½¬/o ™Ï4 û‡«ökº} f¶ÞëÞºC”+:ˆfÉÁèðu8|ÀÇû@'èÖùÖÓL[ (w Ûùæ0w‹Šêür•N«±°åÍ»>ôÛ(WñËU‡·Wr§ägøU»þ°”Yåãåkß2} JÁ­²^šÙÒª³‡+…åv‰‚›A£Ý ´Ä_iu¶DÜìcè/'áXi|©NïÏ‚/¥þM0•¬­Óʤ!C„ûª#ß ½ GÜê¼…‘n6¡ð(äjƒˆ5R¥$e—_ÅÊ‚¶h¡¡΢"gk£!¨rp¨ø;r§Ê-—«Õ †%©ÑI^šÇô¤²R#( ]G™ðzN;ÙÜän‰ª}bîE’‰™Œ¼Q(Uj¥Ó# Âl‚ ›Ê¥ï˜¼ÿùOŸóH¼R D<<]Á#mßî¹½ë[—Ï :A†Ã^ k,ªôKY¦Çh´›„/’bµ^*j25?†iUþ.™×ÛåirÂ×RcQGÌôñßË‹¸:ê+*U4ž~õöºË]Çý‰ƒ¬ƒ¯wÔ'hT6šiF¦Kì´ï†È„ßåê){“Ú«´˜×ð1Ì)àäà\ |_2ˆ²îFŒl×·Å¥N÷Âþš„RV†û* ¢l;9Ó™Ã9»ÈXЋhýµC iÔ:<«=¢ˆ¹‰m!›PzÔ ºžß_PopëXl»Å„¹WRûêb`6Þ:¼ks2•/ͼ•"R£vkú*]SG#‘†V';8ƒ_]¼L½l5[¶^^ Ji”y eÁI§“‰ã»YO MtTáVVð2¹–•—oS•WÀÂ+–´— \Œ…]>â n‹[UåûRúdÆ@l>50¤8”˜x åJTÔF¸?Ñ`CsLå•p*’O©œ[·ú·O—DVDJeãÝÒi 8\g¼ÑÄåø`â²3ä 9šøªñÿ×Z)CˆŒ…¼±¨Ê'çJÉuÀfÜöKQ±¬X<ߦµèìZcÜ7ÅEW«¯ÔüNÝ¥â+]JJ5:¹¤I}’˜x&G$ó:/@'æf7ü/|4‹Ú¼g{x- Ñ˜¹ˆBDÁ ʆ4ç#»ê÷%::€ ¸‘fT¢€6Î^'#!_4¦öʹ5©Óû«Ù×I… ŲåÐZô·aù¾rö››¬‹LÚN”‚÷@½hé2Šé^¸vΛ³7Þºhü§TLê…’p,\)€»H7ðؽxÞø+az™D"ÁÏ…0÷³/vôîáÜ ž Ø" T Ö*D[U›Áë`Îyý5šïx>ÿüßÁ.Ю‹ÉÚ«}›\›é'dÝ‘„ ™w·ZÓ¸;„çvJ2ä *y°.Î2ÝOÚA2ÉHŸ8 Äv¸Ž"aæµ?p>Êôõ’ÁéµC Ô¸we¤ ¦~œ»‹€8Š0.‹Àúеʪê*AÅ㹓õÂù§ûzá¦Þ’óym½Š~XÒ°7Ÿ¹§…WS/ ìdmËFo% gÍZòb•·JD ˜®õ…ß?x¤/¦i”lŠTnQ²V³UoÀÛ]c=ÜÊ]¤À Õ‰u š9~¨ãhÇà3w§õ‹Þ"Å:-lOwP¥VRM ̇}æGUáë¯o“&¼”„ ¤1iÔâ]ÞÚ´ã\B#©Z­N®öªC6ž%c p³¶¡š›¿'ÛäN‹&’ŠZ{•$¬oñúxàáîBƒ@¬¬‘×FÅ­‰X¬…c®&¤ñ–¹c³ÚmÂa•>»’)ÅpÍ.\óìÂ5Ì¥ÌË+Þ{ÈÁ`rÓŒÈitwmJLeÓó®YްAò"èiùôb´Óq¢S3¨ÝJ§”ESI¥ÌZ#œø¿¶Ññp‡žØŸLðŠ°™‹†`Ac©µA,i2F8x‹ 9¼A¤›Õ>‹žJÅÈV™S΢_“ ‰­ZÑ "¾~•̳ÿ¼²ŽKò™¿ü”š,h“»$8[‡S•Q¢ö¥ÛÈÜhG—HÀ¶áû ¶ ©, ‹r&ã†æ¨ºAŠm˜88‡:¼óåGÉv©)ÓC5•©tRI#NzwÈèÿëv5uN{«è£õµå V¶¿6ü1Ø*•"Ñ#7HRç“MšÙš/!Û£$zMšÞè}ZKñŠSÉAÆÞf£”.ý .€ÏÀg€?íI*/îWi»Ÿ¶Sa ‹„Ÿ˜n5% èš#M®ÃÙ'Áõ0ÇŽk~sò™NèHÍ”[åU †f¾¬ŽUìÝoÛµCÒ¾•e:­V»Uˆ²]ò*¾¬]"¹Á‰íþ¬Ô„û j«¦Nk@¹³£)Mè¥A4 æ½æ3ûÌXšþPXåÖø¸‚ÁåPàÓæ%á˜}Åïñ{1‚Z¿Òböÿ±þîþ¬Ÿñòu6ƒÔÉ^UHs§¨Qˆ^ƒFO+SlÓ P´!;îY„~8âF?œ`¡Ìpü"8×T~e“4€Õþ8ô&á^ü•q¤ç§`qOVjRJ,e^‘ƒ6’…k1múoQÖŸ97Ùï|Ö·‡ß:è?éájô›_ÿf[»¦nXA£çÿˆfâ³g[f=·øè„Ä¡ªÑÈêdl­úÃ'XýŸžƒg(÷‡˜ó·=\Ъ‚c ³&ŠÃÇ4´lØû:n$߀òT«E¯,XRûõË£ÆÀ¥C!Lû3´â®뀆–´1¶‘Í ‘àÃÐë'™¹F#obºÚÍçå‡XÀÌ…˜ó˜ÚÿN€¹Cxx]_ª)]íD?ì¼Ï\…¶ûÙ4Y½2ÍÔÑ`iÃÒÖ’öâsó®ƒË ÷lGßÞó—ñóÄ9Ýg58üû¢bÉW³…ÌU ŽgBô tSÄIt'üÅív_½?qÐ0§-~{¾fþ¶åœlkå– fÚD€jø¦þÊ|?sWߟ¦®>¥<˜WHÕbùÅÓçàj’ùdè™iGÔµƒc:#š1ûrÊE7ïM'T]£¬Ö,RYù–Ê-›tbcµ l§-T½,liJ›×>çîðŽ=†ÍÍòÏ5‚ þ°D&endstream endobj 62 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6519 >> stream xœ­Y TSWÞˆä=ëZÒ ±8yv¯Ý´µ›¶ÖºTTÜ—º ¨@$! ì{rCv²°&$ ¸áV—ªµu­{igZíL×é2m§í ½ÎÌwØNg9óïœÏÇ9èÉË»ÿû¿ÿo¹¦Ç)))#–Í[šóä”)S˜LÈGŽŸ† ÒÀ¨T0j¸g¯- ï„ÃÆG0ž›%X=»¬\øŠhž¸B²@*Û\¹EžŸS[˜·uÛö¢eÅËKø+KŸyöžé3ž4á±ÇŸ˜RýäSÊ©<‚¸—È#¦÷‹‰%ÄRâAbñ±œXA¬$!V«‰ÙÄbñ*1—x‚x…˜LÌ#¦YÄ|b±xšÈ&ž!9ÄsD.ñ"ƒFdÒ8*eTÏèçGïóÀ˜=c3Æ=8®aÜw&ÒïI7²bofwW-g ç“߬ÿMOÆ„ KÆ·\#÷îŸÆgŽßq÷¢»ë333¯L¨üíÊ¿§ä¼LŒùû°¯.ö\“H®—¥tvrøŸ KÀ–:ëPëò9|þ®Ú®`§Ã!@A+ì§.¼ˆ[+l,RÊLêÄ6 Œ|Ø–°*€¸FÐÌ ÆºÕpÒjøÈJ8I~T|\uÌ4º•@ÌR£lš‘ƒ^XŠ^”æqr£\©¬o7–à%ÖÃJß…ièÝ4\à£%¸À$7’‡Í¾·\Ê`Ö™œÃáב]–u4j'·”Ù6òðúÒ„Åî/;{i˜M^éýôë³îy›xHO ,|žœ¹CÄ*:»iXF‚.[wuGžÕõ ,Ú,ˣ؟پMû¹))‰”Þ—9ü(¹´£ÛálTÇ­±8,À jY·)gKΦEUb¥HTÅ—æ¶Šu ²¼(ÔYW[ë ÐÎõßw>XQ/©“Ä Z [ ë¸N£G Y¾NÂ|¡=QN®ŸxgÞgó?Yò'ƒÏä×{);éö¸¼.O´‡ëõÄzZëãñÆDx—»P ¤ç“­= ¾\,PÈhyYeye™V©WhTeGóÑÔWÄAÀѼÛÁw“xS''ö­~†»vY:¤?€o|9û› v_³M2‚ö zÚMùô­YXl,R©í@Ãã÷I¶$Ül gþ¹‡áÄ/PÚåét ÉÞqÝûÉ™›w{H=$çÐC€bï•Él•™÷ O90‹õå±YÏ?•õ@uµTÓ/±¤UîVžkHÐrEj(Èf½öIÛSì>Ü%•Ým›:#ïY »Pcþžò—›„6–œÜb“¥·Ý€m72Øû¡:¹œƒÒUÓPÚGhG}óLç±ûáÈ'!…ÆóØûÍ&›9E~Î>’ÝÿNR0í÷²fúx5 °Ð0={«L%1V€Ù`õIÍ»Ö YƒÔîЊ|,’œKùþ ÝL3°k";Ú-¸Abr›Ð,àU0 ³JÅÎÚgµl9!ëãḯ€£á¨'¾G#–¯oÙFŸ#a2sL³ Œ¬M  *îÑûµ DÁI´áþSÇÞ>|ê›(c® m&®clüWutÝ€u—S“cOsì('m«Èº’7¸>Y&Ä O¾IJ­*Qy¶èι`t&v0J£:ø[ŽŸ¿kÿâã6oÏîçÀ…µK(ô4 ò­+Ðh,j›d×l¼¬8ci°â‹éG{>Co&`W"Nø¾Kí˜ñ¯°iû5l’WɽП&b)¦¨B)e|C‰ 8º‹%“ÔtаíSrØeiâ¤ör ¢´1Ø(Ò’`…zÓb}¥µTêheu‡íaÅ:®‚]+;ò]+Ýk©« J𰦘=C‘òý…¡7ôïÄ`J°Ô@iSª¤Ü…Éa@½_ÿéºWD…]ª©Ú—/Õ]ÜσQx/É>˜@÷Ârh^5ÑdF3Û°!èÔ‰ÙÚ­îRL'Z ¡ÐZÛù‚ø¦“êƒ`?ØÓëõÔzjûT.|ÀŸ–PÅ¥Žý4晿‘èaM»ÿL- gN{~™ ô¦x/œR”†Á1ÚÁ #µý6“†ìX.w$L[é[Ýda‰±X¡°5†Å‰©ÂPTj{‰{'ì&{»]ÝÁ xƒ4çÝ/M>‘Hib›ÃÀ)KÌü=ZÏ­/l~9 Z@Ó•ºÞ0Ÿ;é<ÁHÁ3|2««ŒJ“ŠÌ-{EðŠEgÒYµ”ehÑ´èZ¦Á \‹×ì^ÊIºšöæÛ¢pyäíÕ‡&ç(¨‘ÄæÆæµÎ‹ÎëxŽëÔ8”!%ºìj<Öt¬ù˜+ä9B1¥[éR¶Oç6®j^ݸʩqªC .zN©ܖ3¡ß¿ÃÔ7 K•­Mé(eU%:aõ&¾ôR­Ä$Áš¢ªm¬âœø¬ä¼Åc¹]c¤ 05¢•üFrð)‚E5—èm:“Ö¢_ЦrU¦*CµºUß®+úªû}ò#\«Û쪵×lç zD$½°8‡S%‘ô=ïbh\e°ËàðäœXb_Ëéµ®ùsi!‰H4cÊ õ¤÷¥/e'Áp®;xïÔ½)¾#ôt¹öSWE«ÉrÓ~‰iWœ_ 9 H}9åØó‹žâ¿Àcï„+[Û"ÑöІr,B9’`ÞÚå Í”™µnsï!ÚEz¾i;>tPC¯3ØpÌ-Ëà”ÔæM4ã7’¶>Gc¹N³WϨ^o1çÍÛ8å>Æ5d%ÈJmc¨¹.¨£uú`·Öçsûcåá’ˆ IÄuš<` ª%:%ÆÝ愜¬•v…WîS7©ëŠ÷mßW¼OàZk¬›¸=n‡3±“Û©†›m»ê÷2íŸÇ'›M^-fGA‰°ªÂ SH±2e‹Î/iÛÞ@]Ä´ &§Ç"Ð-Ãd o¤Æ™qh ÌìÜúšŒÖ¢{ÓjXѵ§Ä‡±Ù÷$àHHN…ÃÐ]«Ö‹ ¶ÓðVÊ[€c^kÚ\ºUTR(ÍX,cŒXú@5ØñA±|óêhEé/b?eÓÿ¬–›"É÷nO=žýˆ¥¡ð¶¥DÃzè€Íf6ƒT+ÓÉd¯ ²r€ÈÜê@“£ÙÚ(Q4Jj(~µ\´æ ?mg];¯ÅNu\áîø°_ѓ܆ƒ­½'o_KKµHeT™Ô´ K8¯4kpøtCÃÖE„çøçÅÚ™ ´Ý‘vp{üR^[IvÁÇE±ô®®Â¤Q2ƒ#´J«AUÞT勤ñܤöê³Ñ%X<Ç<:CiD,È9®çRœ®q»|ó“ˠЪ .‰”m/RJ„<‹Ål6Û0™ÞkrPì¿hBÂRoiæ‹Ï.ŸR,mÒN³Çðe4YÍ*™AÄ€ÚÒ¡ Ø?h 'r¼ä±‰×yï²Î|¡{iöD~΂Éokiü ×á-N<¼vítlIöýôBQ RÅS4[£–(õ«ôÍŒv^Jíd_Gö¶š×Ò·pŽ3Ø @O–›Ëxâ¡ þHà]Šñ¾5Áõøí/,¨\fƒßpñKàìeV}Þõ=ctßùvþ… öG½C~¨½Õ¼žFäv¹1XIBF ÅŽN:9’\ ev¿È"“”ß-ˆ‰Ûâ­‘D»(ZÆcf±Ú,™Cû‘&¦´¥? /žM…Ôkœ3¯¾&íÇZ4òúß~°®ÿ….:76Óº²3?VÑöQï]8rùë‹/>TÏs(,åØûí> KIÕƒãv™JÏ3b²0k·ìY×>P“ÈBìûÞÉúJJ‡MÍÆ°é„ê„2:Ç¥pTõòê9Ï=ôÈ1HÊx6Ÿ­Öæ£YÔ(Kו–KèR¡@X*¢’bòg(<*ä›®ÿ }1†¶@ìÜEäPéÀ³Ø‰0ßj![@ÔRÔÎJwõ)tÿQôðqôÀ›è)®Kí2†0W¹Ü®€7½ ]‚9avýÞæÝï2Õmã“}Êm´•´éÍz«AUÄU* …Ï!Wµ]½MQTݫڥܹf¬†¯B®Õ‰Ã™‹r®Öƒ }Èl¤ÎºçŸNÒò™Ÿ ö—ÿí0í÷?òÕÝŸ|qÖ³`=éÈm¥ÖbžâŸGý?.e•–;vÐPJ‚^k—¸[صɗ‚•…åK*/oˆÍÙ 7_šC±¿ÅqYÝ67 ÚÃáö¶ŠfayEeÉýïOÿŽþÓwpô·ó ‰FÑì/Ö‚ òíÅ\EFQcÌFX—ÀùŽƒÇn·û¤Òæ2;M@ *”r“@Ç7óÁïßÂÜjª19LvÊÊ2̺& ]#uCmÀq¹xm]}MûQýƒ5ˆ†ƒ™ ËüìK…Õz¹¥ o*ûpÕ™ÛçUÒ¶ä½XKÙû1Tœ Çà |äüöìÅöŽžpè]ªær‹Ý ,€VÊ„ÂfYk¼©±ûjV"½ðZ‚E¿mGÛà"8æw‡?Ž@#—,å/”Óvt§–|; Y_Ã;ŸË““óh$"ž.ÿâ=3Ô¿m;0|#ÒL/?þsû"ÌØÄ)U” …]I{îÞ繿†ï(uðíZ«"h£\¬`¥Ñ^Ûiœ„´½Öîµ»7! x싃üpëÇá y€rê×ì?ÞÏË&g1Ë~øÃà²ÉÙü… ¢DÍ‚D¼-œà±{[em%Ì-ø‰¨ø¿ÜÀ|Š?|"ãDà}ørÄ.Kg; ☭30SœøxãXf‡½:§Ç>6xò÷×á, °«²£•“ÕʜܚœFHgáÄïõj“ÙÌô_Sð½æÞ¡³ T)ý©š©ÿϧ˜ú/ è8J“\¦V£±hìô@3Zp ­€¡;\· Óíp;ýåaM¾Ž-Ïø$€Óºà8Žs»kÿÏîuã ð¦mV›-ók)0ÃÈO¥žÆë˜™B-@ÝRåWÿˆÆu2+=Ð3Å(B£ •³ŽÉeróì¬@Ms8€é0ý0| 0?‹ò°ûzŽÃ@v _ƒ›º-åÃü^˜Ã2¸õBê÷¿öOƒú,NˆYeBÖçð²-bZ¡µi–FÚ_˜á¹2‘³‡N’$zT†8ß²@Ô©DŽœê<:ABÛRA %òrqSE¼%\«¡ÝäÅ9‰Å/?Y2{;O+µa‹vžh¦Å0#¤a¦ÓYMœ“݈CfVÉ’k­+žÊì2ºBñ‘ /º'x¤ö«N­â±o–J*J‹Z+ãuõ® 3ø¿0÷TŠZ9‹aÓ)òg0J'ûÌøáßqÂá±AM°¤e€Ë@m×Ðsƒ ešÉ´¢ÊŒ4`x‹RÎÌê,<ŽCì‡àüñDü¢ãp|®…¬Ü‹{ÿ®¢ÑÕ™ì~ .ì~Ón=Ʊù€ø¨C$8º9v:÷†»"];Zvaï1‹|˜^G‚r»”Rè®*ÄCÃÐÝA¸ò¬ñg[êhùiqK ,¹” §C'çdË™¸ ¶ãróHpýðþk.ÊÉòxj|>C¿ìiéÊbZQa!…ŠH09G´HCiõV íPﱇõûå]` …Ä$ÛÅi×¢ÏåÇåõ9½^ƒÃ@X«\+:T¯[ñÌ"¹$ر©oö­lÒ:u5Zçmʳw ŒÃ.¿žúqòÏ8õC®]aêVYPh,¨ª²æâ]F—«lÕ€ÊßΜ~]%}FYU©×ð6 VèWÿ|âo/y¬3åð÷pã'©%œè‘Þ#WõùÞgfLΙ¹t½t~nö’:Ùˆ]³#± ÎI±ôë]kðCòY¾@(Ä–üâyn[<o];ÿùS)˜öèû(íÑÇgOÕ–ìíh wÄe­|œT€é×Íf€ÛÍ òÐű’ëõ+Š”kM s\¬¶«±á@óI0cyþ|#¥géMfƒÞgöž?v°nˆÖÄAÛàÙ~È!peoß• Žà˜Mí5µ ^`Q9–ø7íçgn?à„4ìÉŽÇš0›ì¦LÖ{š}Q籺q.;gz«¼˜Óy¼ðƒ$þ.Ì|mÍeõ?”‡‹bé{aêsßÀ>›‰3Û˜’ìçt|½îu@Šç­Y$6_FW{p ²vƒÇæX'ýøõâðϾüUëshiË.›ú Xæ· Ž0!Å ¨Î–X¢­²¥ÌIYùò2þ½0mõ!ñÃ{üìÅËOÕÓìsW[ß~ëwÿqÊÑÇ_zxóc%ÑÊx¼¥¹#!iæó~q!ƒ.“ж 0ûsÆÐþôß¹hö¸ÿÿžŰ” "ï n_ˆP—æt,É]U¹m O±3·a;x ›ÕR0¸Tü듳§>èl×Hb> stream xœ­W tSe¾¿¥ô»·”µ×ʉs/Š3.#(ŠcQA±E@…Z–b·´¹7ûž4Í—fß÷´iiKÙ”a±lBEPDqÄí9ot@Ϩ_x—9ï} ¼óÞœóÞy/''9¹'7ßùm)"†!ŠŠŠJ«fWWO¾ÿþûóÄÏu_~¦/†Ã‡zn-VŽ&ùrÔþÑDþq[Cc“¤¥UÊñ+çËVÕ¼V»zíºªº×_¨¿ã®»—ßûòÄI“ âYâ9bQET‹‰ˆ%ÄRb&q/±Œ˜Eè³ûa?|Ûä©¡èfM9 îu&ì92© ÉÔ:ƒÖ¬ ×{j!5U(‘Þ#ü;dzVrþmR¼¢%pyŠ;Hѯwka¤^\ß&—³SHh¶™ ‰’5‰.w2Y4Š/B÷žB“1VîÍÍÝZ´ZÅKûVÏÌÃ+ÿ©ÏïþŒ ’=r§\ªÕ¶2&òåÒ5Uü¦Ý Ò ©d<íèIhƒRV˜þèUºe2¥©á×cªóœ(ÿî*v}—"¡„ vi¬¡gÕ~Ãn¸î lê¦hÔu1ɨ/TùeìRt¤Í‘:æq ¶7Êš4»_X è]Óž¯nõk®,ƒ:É”%ÞÀ<ŒÑfbfům!ÎåJ³v¾œ>#óGÂG%í@ ÐG%}O¡3© HØ;2í&q«J˳ô.fH0†vµC_\Ñ3µ§²ïQo³¿µRn‡1{(âõu?l½4pÉá·‡ÃÊ€Ḋá“Jª‚­ }Àd´™Äw‰FÇX{SME'p9ŽO£«¡O˜^xÖVµÀªpFQ{Œ²ør÷‚à o]ÂÜ!Ìúö¡é®^ÏG/©ˆ/VxyV¸àÈdA]Š},öè%j‰ŒS4Y k`ÛZc“f¥f¥þÕ%hÅ;  $®nê–<=¥Òˆ!u¶'ЇGËžBr¼(z,W&Š;’ H9Ù:† s„“þRóñà[¾Îml‡½£Ã©à‚aUȲA¿öBªË‰³ÒSûª2¾#á²;¡“EóHø×™Ù»ÝÔðB¥ynCZÑ™I$»Xt,Z Lý¤wÍ›¡]¶ò™ÄÃ߯ßô0]ŽºAîÜ`~­4O(;<–Î|•]Q7“to¾YPÛÅ~èµnÍ#N‡CX|zó“bšQø †n%¦‚ ³ôZ>¢K`*»Ýv·øH‡üɔүd @Ûâ¦@ó[µ'jO¬>au™¼ÐMÙÉ·Çípý€Ê+BÑø@R>Ç ÑÓÒ¡d'™BÏç’õa(A‰ ®+ó¦ñÌ[ØÉ€™MlRµF†¥F3&oR­(à5…f}F}-Œ*Q¶ËE°_ [@ÆLôðN™ÑhƒfÖ4°ÍcòqÝ\7×ÃuëCúˆ6€‹ ø}a_pæÁ³É|nè±y!O9{ú $¿ú¼òaŒ™0˜yçã—J" ÅYÚ•Vøx&O ›Yü ]˜Fq‹JÇ5§-x”)¯Çî{Aúø€æø£^]L‰µ¥l7Èå­´‘k”6Ê[+Lk›RëT­ç$IK”ý Я⥸Åɘ¿@u õ†Î"-ú.o#Åßå7ê#×GÍÉŒ/ÔÅxÈî ÈM 'ÇÜÔVª(YH›`rÄKÿ+ϸj™§Oç†`ÌC«Í ­TC—2Œ»]ÌÙJçs•³%³^d$«5+à+Ôíç…Rô›“t¿ßÏ8<ôRqµOÎè-œY£¬«Q­ƒÔ³Ï\øòÝO¾O¥ÚÛo°¬º »…]o9‰Ü˜j;пᖅÈõ™ÚÄ*lÄ£¼C(ŠÎü 1°~0w&7o¦T$Ý h;™ÏA"îM§T~ž]– ƒ¤>,aèf3ÞÙ\ W8yP“b…—„‹"è²9¡‹:{ôÓóŒì³®ð¼D-%¡Î®…ZjÖ³sç?öʧ'M7æpëQDg‹r·~TŒhT-Bû@*îê{[u¸îMvÍ'ãòJ&LĬt e?߇†CÊvImÚí ã0J¥äy½ “áÖתWÃGà‚ƒÜyÊ~T”xoDZÏài¸c‘s6uSbBåÙ"4{Ð|ìA݃T‹FÃKã¦8ûw!û0¾h*\Ô©*Ÿ:Ú΢= Å’š`«Ñdƒ&¶e1É¡P¼»Õ©d…1‡ÀÞÉí)“Aëf¼êˆ1©îh*s`Ž«ñÉ)5SØ_+(,ä¦Ðö}ƒê%$ýÓÿ_Å—#®à'dÊ»VVMb‘©}ª|Ǽб1ßñCû­Ü%c„»‚kk’FŒ]X)EÔÔ¦l…RJoìíOwu3ô÷iù†z†þÉŠÑ-¾z¨>›ó³ñåûN íï¥Ï¢²-¢ýkéŽ@êâ—Ÿüpzù³2ì¼Ná1¡Þ}oË¡w÷­™c'Ö·órFOJýjLöî¤ãÚ<>›øu û ÙŸÈßH¨²+ñði§Läe2ÆOöÈ’’VNÞ0óÛö?b˜íé<ëÍÀ ì¦r•äF¹“g„©@Á[$Š /é÷ÿà ٳ95F&‘É!°¤L«‘ç•›E?‚¨Ã†a*†ãî"ç… •@)µ6a^7þ0—{ûX!ŒœÁ?ã~1±8´¬_Ñ+ŒÙ$ŒpðUÂJõè»»=w·;³ݲ‰úÈsÇ;by ÷@*õc+Á¹e"Î-f³Ý,Æ,ãò¹%ªKyÜvèf&\™¦¯ÂIçutg%úÂ Ýø¾q©h ¯¹)…_†ÃIA)&IÞZ¢ú”ǃï»Î…cèýCUyPXPIÕ!TžŽ¥#"7A´wñÆ£r¹©UåÏÓ>Ú/dší&HÑÇy¹AZ°–¿Îqi¹7™ó~…&“ðï§Ç¢®±!õ&¬zxÉåÍ(¨ëdÿ¤Âäÿ鬑鿪¬yER/gèèF…$]+®ƒuÊVé5ÁYÈ]ŽóEïGC²¨èx1R守*Í÷χ“à”sÏWyÛ=j±;´ajÉÕžê´y7ÕAÂ@ȇխK‘jjá䓎=ÿ5ÇøÚè¦ ×S6ç“wÆB$3H~ãÿó‡ð{ÊO>›{¼2lêÀ"¢ÐRr«e¯ÌWƒMÆv“ l£šò>fƒ=ØaûÍÌ=‡v•ƒk™Ò‡9¥ZÇ3Ù+?ŠP=yPÿv |V=¿r‰Z˜3‰„Èå¾}ð> stream xœ=UkPWív »1H”v D·' êf"ÜìÆøFAA‘(Œ¨°(Œ00¶DceS»J±ùš½l¹=šÊýq«úÖ×çûêœ{Î% ‡I’Na~Q+|}}ìóŽþ¯fj›#r– gËæ´+snx~6a_®‡ýü¤¦)c3åÁñ I –­XI!„±#‰báGì"6ÞD D‡•` VìEÐD29‹ì1{ƨd‘Äìàå0ä¸ÜñDµScô6zÝÉüö-y¸’py;#£MøÐêÚ«—ü´dvOº±ãýBˆTÞD³w:ÚŠ’dxŒfÇäPl£Øk°‡;ùãO£eAa®~wµ4ê+îSu§ „WìFþ( &#aÇK€vÄÊå­dÖ1±ŒX·¹‚8Áß`ÎïAêÆNöOùKå“ÓžraýQ5òC]»…GÌÏ o|̱“æçÇÿå®Ç ‡ðQ­á(ÂÈàÏþç!…¦ÛQ[Ik.ÃN+šCÃ=ÐÎÌ1) ûKâƒWÂ3)ðð¹XÕQÒ&V §”MѻŪô˜½ŠƒÊ8$GŒ8Ç—D™6ò Ì“LÍrîÚÀoDX‚0óçµ Ýœü)òE«ø—ƒî~ÌÁŸ à“W°Df¢‹+uFb ©ü‚LGkÃ5Ø»gkêVôäÓuû뫆Çèºn¹{|ôì÷/ûÆ8¡§ÞM˜(ÕçEL%U[f®––®,2kÕªµšË—6Wü6ÊNžÜ“¬®=ð™÷+ïׯAâÆÞêùE§«¿êtK¤ZÔ‰÷……Ôx,†8F§A™ {ë@GzwoKG×¹H~Opäö|®œ0ap{ôG4¤ þ®Ç¯Afü57öÕD¯´'ù´öb^>>1!«¦ £{NE6)ª’ËSLr‹fFþ^súÒpÚ† .éê9 Uc2Õɨ׿³Écœ®,4ågæ(‘–Ó ŒzUYVY–1 1~¿Saz8«nkd§t—²Q(ó埊â¸õUI})O‹ªt(1C¡Z†]hTTb@&¿¬ ª¦¶ÖRÁYÌ&³ÅNLOa蜭I6Ëpý÷d|?Ì>îÆÞƒÕS³¥käØå#.‰f>­:Tzè/Š®=µgóàøµ2[<ÔF5ÊaäÚM;µÃƒtƒÍž±a/£ß»Ã`Öd@Ø!yç‹vú(j+.U2XO‹¨I )]Q· Ï¢Ëè ßtɸÊÖ *¨´ôR«L˜…:Tç(o¡;ZŠ˰‚Öo‰Ûꇘ}±è~½±ÕÊ`T¬{-z"ö‚}m°T”ùØÁ|'/ö»±`©à-í²ðæ.ÄŒ +> stream xœu•ypçÆWȄϢ@§j íìB2äè@ÛôÒiiì¦Ä€ ‡ÁØÆ¶ŒeI–mÉ’uYÇJ¯´ºV—%[^!ßøÀŠ/\jc(&!P t¦-Ót†#„¶Ót˜avͺ® m“™æofg¾ýÞ÷7ï÷<Ï'²V`"‘hU^AAþkß_ú\oà©ÇÏ.XVÂj1¬Î |}ðõÏ×ÞxnnÍÒn–ªªÃ0‰¼¡±t—F[-«ùé¶Ÿ¬Ç°w°ýØ,ÛŒ ±Ø.l7F`ÏbkDì9¡–…M‰ªEwWXÅá¬+±•Ô3kŸù몭2¬š@ßåH ÷7ÃmaDìãù¶Ëâ;¥¤Þ¦;ÒÆ,m‘ö`2€»=‘3þ4s/ÇÏø_Ê›öº½mý³ŸþÐ$])+(ä¿¡+!,j—LèÈPÕ™#ìKÁH3z;Þ´C¶¯P³5”JÓ'i†è˜cWÀtµ$“÷Zá.u3n?¬¿Š@SnÖêª[ö€™Cà ¹ÝŒOÜHNJ† ²FÒd8@H¸kO¡±Ëáy1ûð¼4|lH9CJÅÃÑKàAË.G ‰·ìÓì(ÔhŽžb"g‚CD‹Qm¡ÓÞ¶u¾Tè4øQ{ ­·+]Õ­¸&·˜Ï3îw9ʶå:`ž9ßóÇ®kD¤Ïƒ4Y3rhs%¿ÉòÏ÷áéÛ§GN¢ã®šz“ܦ'*_i,€B´ùšêÜ•áI&K8ÍS^ÑåÐ<ûü1WÅ!é9e\«ªW6|¶° ïlŠ*á8*}›×òÙÕC3ç»oö‚ÿí»¥âK}{ÿ1žêL@ʬ°j‰bŠ ñØÝûúöÏ3Bëô“Ö™b.›“Hµ#òøl~ ÿ2ÿÒ–¹ü?$ÏzoÌÁÒ!ÙôWM°y§vÏ1@ ²+Éx‚á "<æ ѧ‡Ù 9ð€%Œ¡æz}C‰ïä#R»œúæâ÷Žéêj™=;À>—š"ú/NŽd}û±Íæ´¹ì„dáô/ˆúEœŸ{Eê¢A ÀÝ Ù<ö[9®zû¨ ;ä†vä%­x¨Èz8åÔq8»úE«š¬>2à"'m‡ÐÖªmöÅõülÎ-öwô°'tÖ¥iHÒ­QydÈáç”|ù°4Fa/ÚîrEˆ¸ÓO‚ .’4òësÌåÎÖ×Ýš„¿"@õÜ  ‡èÆ%,`œ€QH†- ©¬ƒö€Ÿ¡ÚˆØLt6:þʢñLDN°­«-qYL&h2EÀƒg`ŒÊΑÌ„o©† ,Ä1·Ö! ÷½Ë–Þ2¢ÞûìÕûbö6;'õw¥î_œ„NC½IõHÖt3í™™=±êw«ö«¸êÊû!9 ÷òRÕ’R‚”‡öƒ®¤u YÞ=ÑüV%¿ÐT•ƒ ÔÐMû˜NèAÝ A}­JYSt¶iâÊðu?…G*‡Ëç=¼ÀJÓKR°8V'!Û°£¾P¹n`PÈA ÎÀ—2cwƾ`I®™[+õÆe–½eÖÙôÜio=áÐZ•V…Mq”+Ç¡/y3ÐAÓÄÈTâÃÁ;Düo’ÿÏdÝ2}ƒÿ»Òª!Ê6j÷Âôƒj/Ž&zzpÃcaÙ~eu…¼HÁQªy4QˆEBt(*¨_á5«eºÂC•>|ø)»²—°O²ýæ]Ñ­ùq!HFVJ“-qµB¡ThbÍ'S}©>|ûâji¤Wé&¿Ê‹_Àk´6:ÌD«ò(t%f¥AèÈ`Õ´Š›èÿ…¢v·ìíƒOCÑK]Æø=aŒ¡Ûµ d¸ÏÊ?s¢Ž–D½ºV]ÛÓ¥º»»ºq¾èŸ[¥ší‡÷î®PZº/ÐÖ%Ì-nêP¨äÇË2 Ó×?›ý{¿Ï­ð%!Ôº ¥C‡6í} M»ÝA<}iêÒ X»£TMšÍBÂİó÷" [9&bǯþú31›YØ.uCÈ Îmu*ñV—ÉZ$¸¦mÚc5ãd¿iñ¦£eYæê¤Ž¶Ñ?î£'Ùzü“òÁ Òì4:—Þ‡=™²™¾îøƒ§'Âs_ ™¦\õÁb@Cx8áMyã„7^ÁÇ»{îoHœžÿ%øÁ WÄ醠Íã—Çy˜´”ƒqÙ]½ÌhŸúÔѺ¢ÖmyxuEyY…IÚË;¹Á¤èŽ/le5Ò°/èƒDZ}&ßâ¯ØXŽ!àÉb1Y|¶˜•àÞ½(¬«Ý–u&Ú¦=GÝôñïw²?{$bó1[ì–^Q–ËšTuФòT$ê¥C8åq»Ý€ÜnWËUïÈd„`¡>²ú¾ð­ß³ÙB2“<Õùx_Rtž{^ÌíZ8"¢À n¶Ð&«•´‘8ÿù¿ ìp€k¹}” $üÂÁ¢“ݽl7.÷·âSüRÒ(”w"}ؘ茴SáØõÝ,–,PœÄf³Rv#ûê£ü ¹ßyáe^RK°kø 雺[´×—¤¢Â›˜f2ã7Ï2ý€Fä¿xOÎg) ÙîÃ5€$ø×²¶&WgcØ¿ʈY¿endstream endobj 66 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2721 >> stream xœu–ytTõÇ_Ìü¤…têDðV­ö”ÅJµ*‹…°ÂCÉFöL&™}_Þ,oîlïÍžY’™d²™„„E%Ö@D@Nä(b)Òj=ÖcÏ©=ö¼±Ï–¾í±žö½¿Þ_÷¾ïýÜûýæasç`yyyów•n^½jÕªÜÇÝÀÎÞø²x,̇…sée ¼ä‹Å—ðڱܳ¸EÜÚÖ.•WTVU×ÔÖÕ76aØl'V‚íÆJ±=Ø>lö ö,öV„½€mƶ`[±mØ®ÆÇÎæ­™óØœ3ùå:×1¯xÞ?yþ^þu42_qÇ= æ-°ÞÊ«ÅÝš³F€™RÙGRyÙòóùL7sK`0žÞ Oúú½N;i²P¶ÀˆYoáq Tn²ð¡ÁØ`i,g‹…„ÄÐíHÐǨa$‰+çÿtÝUèåõi-jZ/Ý,2KŒmЊʎ¶{g䃩îòº}àCn>Mx-*[½ÆŽj%»R:¢Ñ'~‘'Üy±û\üb!öõ@š94RZº¥å@+¾èVÞ¿¾ú¦wfÎe&|9Ÿù[v»€½G»’] lzrâÑ/ðYþ;ó0ô»òîÇßc Ž¢ÖT«ØPõðôMqfRâ»è~º?v1}qòdW¢7ÖÈÛêÜ^ew:À!º]Ž-0É™â\Ñ%Ùí3wœb¾*pñ‚ucÒ£€˜®3e™iüãÚÚ:E»Dd¶&3a¯é¯ ºß\ítp‚Ú9Aíþ¤+ñàÁñøåc€´\]¥­4׈V³ÿphj«¶P«HµYx:’ЂÕc¥œ(ÄKAÊ‚ ôB†D1^Òësz—i:UJDòÔ gh·ßM2‹…Þ˜/î‰!ÆÎ$ŽzK½¹A·SURQ®Qµ+%€ÊeE® 7èŽ0+„þá`zp,Æc=ñ@'"@8‰œ’g7q¸ìÈ>$Ù´J!uH“èìͼ¶5ܸ·Ar¨·oꯀM xÈ\Á1dË1D“T§³£Ã‹‡Ç;®/p];(«²¸Œ`¹{‹Ø~!i±›Á Ú°©Ã9¾´?ó£Æ.½üÞ%?òó²Gˆë%ráÛñÿl:Ÿ™3#Èôtõvj#ÊzuSó3W$g¯^º1 ¢b¾$Ÿo;Þ>\;”(2ÂŒïô‰±ßÀ k 2R¥)Ò† ñ¾Xoæ|IæÀOïXà ™âËp/¾é¹ê’#¢Ç9ä¶®ÌÕ2ûy̾ìJ­ÅÜBˆ+ÙÇ…V¹UNÊ‘¨¯|úÚË7_O㯛þšp'eu[¡ ì}kImɤˆæ»’Þ„7q‰‰ =AˆG½Íˉ⢩pˆðê  „h+Ï? Œ†„.ÚC:~]½µQbÅ›Ø;I‹Ë¤ƒBX]@Å<½É®Ùùn‹ßICN§·>üV§Uçó³{¸‘“í ‚²Ãìׇ””d ³É¬réºjö®ŸzQûÊÈìàåô["Šæpƒ£ŠÑ)Dð6@Õ#úýÜ?ZÜÖp§3£ðÈxßù“€b´AÛ¨m4µˆ¬«Úª‘ ­R›ÚÐî¾mŸá³ 8ÉlïaySÞ¤Q†˜høÎ§ól&ŸùüMAìpWëI˜…Ô‡¡I—דÐoqZÒd&qã¯ÕOVR¢RT—¯ëMÆÊ)è ¹B(i j¤z•ÂËå}p<Ë.Ô6Êk÷ÃØÖµeVt8g†R£è,8v£ÃTÇÞ+tèIÕ@¨uƒN«ªÕ­ŠV@5ª“§EîÑÀh`¤ã\ìüñ—ãÉt2ãâ®ÃÉôö&Ñwf( þ4#žÍgtÙB'Aq#>Çè„àv¹Àhe±˜I°àbG )&u¤ÖrOÛå^hD$ŸÔÚµvÝvJ¨? ;ûPÑÑõ0‹ÿÊldÏÔþa]å‘ö–œY½^ਲ਼4˜ëÕ›eJ* f-¡á¶Ø0=.ŠòÝ£Á‘\£ÂÀKéTïÿYãÛoRä.g~öùì‚V©B¬ˆê“©Þ—¦÷ ìe—³sÙµìÖœYÿçïœxm\äóÑŽ=7Ÿ¤ š#h[ÆÃ•»›wGF)­{N ¿Ï7¼Ú{möd<ØIwB?ôëÂ-è;×úk•¸YgÏ 6ÊAÛ)U$Ò{¡¬®¹y玚µ°v|V·±@ÿÅmõZª£ì Ø‚«îh2TFÄÜrùÜP….¾ŸðX¬Vìx3§x3ip*] Df¯-ŒGyñŒS#rðÁâ0“–*öQ¡MNÈr¤Ùñ*– -Õy»˜ãA£Ë9ۇΉ\ ‘Î÷'˜Åï{¾Fagã·Â¦qŠy"íT,ax³Œmô®‚ëÌç9gZ¦{]ìr´~äAfλ_Ç=—<ˆ"‚f³ÕHØñCvU=¨DñÆéÉàÅ„—r‚Gô s€™/*ølêàà~¼àºÝæ´/eW°;Ÿ½ŸþøúÒñîæ¦ìÃÎç¿é‚=œÊùÔòœ9r{õñ”@]lÛT¾OÞÖ$¯Ï™J;´{¥7Î$IB'Jj#r\ì”·öÌ’ýdЂ  ïuapxªk˜žŒ}’EB·Ÿ³æ€3f uеC)mí<G ZÑ"ß°«ü'°m<öÔï'F»ûûñß2i©·ú¶#[R_)Ö›T5 ’W©› މ¢ç’Nˆu¤:Ҁܼ ²§äÌ—ü>=ÙŸrQEÍAëIRI_rŠiºƒ¾m÷ýs·¿Ý b™Û¬²å_ýJhª6Õä¬ìà@zZÛÙèÎø¸Ø"N^½xó*ºxÇüuâÄE²­"S³®ƒ_q´iüêØÇÓQ\.wn›->Bé”êí¸¾B²¥ –G#ý>QÇñމð º×ßýèÝwí®l­©Í»5áoþ£AÑ¥XRp“)çÈØlnxZ»Ÿ´‘ÖœQúíþA¶®ãÑ€I)5‰ÍR‘ê ªLuÐРn€:TÕUu§y3ÔЎɯS”÷vŠ"êhn~‰k‰h$̺E‰ÉäéÄdh0þ £ éx)~iHJ—ØTQm²§;ÕƒÌ&uJ¼à&Éé½ôûm£L˜Ïk¡46-©FFþ¡ôž7®¥>}5‚»nóë·z•Û,#pc•æÙ2ÎPìIg ùÐìñôrî oîÏ”…ƒÇ4ðùééW¦¾x8Èì~s›]¦â¡&œK™þÑÖîzÛÅ>ظ§‡ƒCþá÷˜sB——«åE¾3`u[ j‰zRŸÛhÒÅ Ž{y‰·Ñv>©$”v»ì«q¡ÓæÌÅUÄÜÑOõqé4t<|:<9]¸èvKÏÅ™/÷(ò>9—ÏŒY,(±nðr}X<¶ á%ûœj•©Í¦­c?Òï“m‡²BWEßá³€hÞ)_׫á1ªKè SqåÚ¢ J€6»ÆT«Ú\ Hoˆ¦OÄ_ 1 ²7Å9½¿(’ =h~G~hýÂùöo2Öóendstream endobj 67 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 234 >> stream xœcd`ab`dddwöu041¥ºe~uȰfw³vó0wó°Lúž+tUðÿy¬`qinnbIf~^JfqANb%cc;##K×>Ö%@ɰ|ÜÍ(Ãò ÈýÎûý k[в¢ûoöܲ6ÍÂß!ÝÞ³ý˜øãå¤É½}Ý$»{»ûZ¦þIû)(ñcÛw©îgµS:6?ªº¹¤ú|ÜZ+Ž%l¿y¿¯e•ùÄVÒÝÕÕÑÚÕÑ]ÑÝðcéß ‰?³þ|n¬ïnëî’l˜Ò=WŽOŽ‹¥2Ÿ‡“\„YÌendstream endobj 68 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 224 >> stream xœÕ*ÿCMMI7‹÷òù+‹ ‹ ’÷`¿¼¹iJ“øˆø(§ø§÷è÷ÒªÓ÷Øù›¡opnqnz˜v¦¨¦§¦û`ü‡~†‹z\³eÂï·÷š˜~‹ˆ}Š…€‡t;_bd‹w†˜¡¢’ž”¡•¦–¦–¥”£¯æ—ޕޗ‹•ºc±T(]ûz~™‹Ž™Œ–Ž¥á·¯°‹›”ƒpt…|rM 7Ÿ úoY‡endstream endobj 69 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 984 >> stream xœM“mL[eÇïím{/X m¹QfrãÙNtn.ƒét†ÈCIÌ N^2^ÜV˜ÊK×vGº¤cm‘¤0¨ÔÁ€ ¢ÂŒá“%BÄñe¸Ì.çv¼Œ,ñùtžœäÿ~çš’Ë(𦕙¯R6« ŽHA¤Q*Tò¶û>í¢f6v"ŽÚ<Ñïž*(<¸c—aEeQ‡©—¨#”@EQÑ”ŠÒR:êIŽ’SÅÔ<½öÑ«²C²+²UÆÀôŠ6µh3ûÄ”nºo û—±OóÿÔ=C4; M4„ÿwFaÌÛ¨ÈYòŸyÞ3ãcð ÃL÷ŒõŽOxC0£Uþ|>ƒ(‚UyU'óÊsS‹Yf®}ÚkKøÆšÉ÷¨N”4ËyLûåúW}±hHxÛf)ƒ:îìUcŸßÓÕ?pä¢I&‰ta¿žˆn@•ÓÙ¶F»ÕbÊw­/N'椎â^ý–M™Ó»µMã\Å9^æ[qDzïpwäñwô¤ñÊnx×Oü-!rÉÛ‰F/Ùl ïDÕ7q;›Z.4Ú-Mv¡x» NõWË&a†95Î@@Tðéå»i”Ï­­0˜!fðȱ¿MNŽ9\Ðìê­5uPÁU^1ö~êòô s^|&ç °ûK& ìI3¼,”²#L´ôA& d›R=8Dz·ëñGTXuä*&„pwˆþ&’ÄD2p‘w¹Áã©“~ã/¥©ŒF7¸ôâŸ$‡ïÜêÔê7þPÖnu:¥±> stream xœ]O1ƒ0 Üó ÿ @+uA,tahUµý@p”' aèïKèÐá,ïN>Ë~¸l#ÈGpø¢Ʋ´¸5 ÁH“eQ7 -Æå‰³òBö7åßO°È~W3Éç¥lê’A§iñ )(žH´UÕµÆt‚XÿI{`4»óD]FÓ¨söJЦÇIÀ5☋æ"©€eúýâO)Ø ¾ çSendstream endobj 71 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 138 >> stream xœcd`ab`dddsö Ž4±$º»ùéÅÚÍÃÜÍÃÒý]LèŒàIþc 9îœÄ’’ÌäÔ’üF;Æv&FF›|¿ºüdÙÃøÃjó쟒¢ß²Í^ܽxIIw•ü_u¶š¢îâ¢EÝsä¿ýó‰­º»d Pr– ¾.­endstream endobj 72 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 179 >> stream xœcd`ab`dddwö Ž441%»÷ýôúÙÄÚÍÃÜÍÃ2黩ÐÁýü»@€‰!aƒ-3##‹fÒ¾Ÿs¿Wmf¼÷ã;óOõïU¢sævÏŸ_Ù]%ÿg#[UewyùÜî9ò|?:~÷-ø<Ÿñðq檿ËD'ÔNhœÑÍ1mÊ”i½Ý½mÓå¿úåÐ:§cR]·dCSSCgwç„zy>9.óù<œ ¢LBfendstream endobj 73 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 194 >> stream xœ·Hÿ SFSS1000‹‹øýùK‹ ‹ ’÷B¿¼R3rÿM@ÿ…¬¡v÷ÑÈ÷˜Ãëá÷ÚÜëùKýJá÷Ñ÷%˜‰÷Aû·›uãû[÷Ùàžáы瓄÷ûÇ'”}Œ}‹}‹û#ûÐ÷˜÷ԋ뇰2‹‹R`aUxf~d‹d‹ –? ðKendstream endobj 74 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6257 >> stream xœ¥Y tUº®RU¢¦lhl§ÊA˜QDET@ÙÑ Ã*KBB¶Þ÷½ÓÝ·÷%½wW/!!!!( kXiDÔ .Ȉθ¾qÆÑw›W¼ã»ÔóžïÍœ—îäœô­»üÛ÷}ÿíl𠬤¤dÈ ³**&Nž8±øÏ&NZh»:¯ +Ãûî¤=å°ú6ˆÝÒs+VüydFm]ýÌÆYB‘X"];W¶nž|}åË6nªzasuÍ‹[Fßýø˜±S¦N»gܽ+Æß·êþ •>¤Æ°ÑØBl¶û öö[¬[‚½ˆ-Å~=…݇-ÞÆîÇ–cÏ`3±YØlìYì9l6 ›‹ÍÃæca °Rl0&ÀÊ0ûFc 6» ŠMÁ†a7cÓ°áØ-ØmØJ¬£°ÕØ£È*ŒÀ”Ì,i4³tp©npÙàDÙ2>o%¦’w ¹uˆ{ȵ›þ0tìÐCÃ~;lÇÍû‡Ï~à–¥·üéÖ?ÞúÃmGÊÍÔÝÔ··OçÝ9bØÓˆ¯F®ÙÃ7jð¨u£zFýpÇê;bwœ,¼wçœ;üê¹ »Œ ïûë8»J'å c¥åT÷'gyf`t˜)VèEâˆ.Ëü§ê½^»WŽ…X†êN©#B“ÙÌtú³ ³ ŽÂƒVv¢˜Ñ¬QBµZÊPõ’˜6MSÝ^¯Ã+ø+ž4§3Š ØdD³[ÌkŠ™ÛL­æ¶çà>¾öˆý½)Ç«€NiªÙÀMØÈM¬æhRZ”6% Káü£°Ù48¿‡s—]?töœ/÷ÃI3zá¢Þ‘ÔçpJa/@t‰Rz*.‘%1]’¡.âz–âÔçb­J.Š2 |ÏÆ‚é„6*f–Âßá ÓÓÁ¶d·íHîdw‹bÃI«ˆF¥^ÔÿøD<½ñøüBÃ{£W¬”Wo¤ûÒ”‡gó%Gß©ÞRè(Ìåqƒ5Óhº‹Ô\é%n,§C8, ^üÊýÒOÀÒ)çFÐÜHNÆn»¸É Ç.¼O;ð `ÿLÿl4oÆÊ‰OY}éœéú\CŽ? O Ø .Œ¤¾Cö x>b›È%£+p09Œ(f2Ãu³¿>t-‰ÌþîÌ^‰oÛ²[ö* áÐOÿù·Yp(G­X¯¬}™€[È3?½ô™)€|ö¥‹ÇBŽhf¾Ã§väû§>c»™Äj üC.Ë;¤å_ôÂò ݽKQ@ö`_ö𮉫 ‘ b()&‰h²4õªÎïwø…;ñ¤3 à³û—üˆÈhÃB™Ô\kvX€…^À…a¤`¶›‰¬Ød»¾B¯Ã‡&´*‰ÂP[Ì:3ÍQpŽö„†Eø–¸‰Í5Grô%‚Ú¥ÍÉjMv“€û¼A­7&MìOŽ_ ^óHË·Âe3й۠r¼¢E©£¹á¸T¡K¢Ú,Cå.s=“pj“H£–É#ª4Cµy=(¿aO8‚1#‘sëŧ€$~ùèwp]Ï*26ÞJSá#󃛞[R³j}#‡+‹ï’ozáä¶Òoú<¶©ÏcòŠeà“xK(œJjÃ"f1¬„#ñÖ#ÑÓ–Ÿù"mITÓ×Fâõj½¨1uÃMù9ði8 Þ3'_þvó#©8üu;oGÃ^Ã@^†%­ßÂÜb×Ü0Sï]-例þ]ÛOØ+ý4µ]äµK»n6Åœ'Rú¸D£1i-tMt£» qƒ„ã¹'à…OG0 Æ5 i ¾¬Nˆ’TMNo×6[Å2æ^BT¤SáXÒMÿx®©pUÞ"-‡÷¢ÀÈཅ§y“ðFF*hÓ̧ÀýÓŽ®ú½ü(eŒmy)Y³YÆP{ÔõêZÁBÍîÝ ÂYD̈‚(Ù*q+hnòÇx›Â/•( u{‡=ù’/ŠHRúEa3 ÖY¶V2ì»ÁÞàöœCÖ¥yOŃVÙ,c~?À3Æh5ý8®7ÕÉ›Uæ0÷þÄBqÅÆ î4LYc¼†~—¨PÖýèô¸¤€ë…êù mçÔ»õ.}|?±,²<¾lë~le@ÕHÞšýÑ]áüÔéÔ)ö4{šïyBÎÙQ¬æ C±IMDHS=ZÍ‚»p¡Z+Çô¦ ÃO—µáTö¬:$dîÂ)Ödr˜B”Á Õ#Šé³ôOTrÆÕèmp7nãžä'~íT¼ ö€`h+¼Õw' ÒƒåéEá5ÁªDSìÎÁ×#%>jþˆŒâ™HˆÍÉ|†ãáB%YD“fŽã+¸= [¹[¿c”¡ÍœSÅëÁ¨õÀ\ejЮӮ5¬]÷ðã™P8™Ò…‹s%2S£$¢ÍÜÈP€ÃQzöôBqï 8}$µN/¼”#åLrÿÛòé ‡só…Ü­þeåû¯ž ¶µ1­ÕÝ6è)<–qmCW2jœº("@Øapiöñ©©K¤€\òõ麴,×iieà³Üý¼iÄ‚ge•sG}’Üí£;<®@þ8öƒ˜†‹¸[_Iå>î+ßF‚j“È "iX“e>GxáõÛ}‚L$‚P9‡âÐH7B ÿ Ä¡€3—É%¦F†Ú$ŽkS4“"_+Î)ò-ŠÅÑœ”:¬ õ¸È©Z6ä7æ_Î[=f?ð‘ÂéõzÜx+œÈ÷ú\f€Â‡SmB—œK•:±0adn (ƒ:›dR𰙀Sa3¢z¨³s’¨.KÿĸA}ÊBòì„V«WéÕÏOçkTÆFä®Z\Þh­•"Lgàíx -ʪBbF…¿ÉKY¦ÂÅ(j(®GR€õù‘iâÔŸßîd£a¶Mä–£F™kp°¹ÍþÑð¾Íeq·3ú›S¹dK2Mñ=>—Y׉§cÍé¬Ì/g¸œò˜Ü ÄJ"VÖdè0þܳ>+ ãéP4‘‘¤ w;ª ´6gM¢ªˆ`ò“ü„(!NˆZ¿¦Y=¤–³ðôYXÚ‡àBi9‰Í1c:Šdh/ñËœù±SŒ„Çø¦Ñ9) «Ótä¾ú™eTù÷½Pƒðnï÷}éôòuþ”IÃjDŸ{ý¾"{Å®ëìéC‰ð‘ÓE¥2cý{j âÓD&I²ªf!ÃÍÂ%Z•¢Xð×7ã$g ™~ú¦AZ¯Î­kŸÚ>mÛÔ­³ø.ƒOôÀbµØ› |Õ*ÉŒù€4šÝ&Žçä©z¡XQ·²{Ã~8h'¼ó“6’ðWÐÑ£X<źº˜¦´Â#$‡!Ð1›fD¡#ÐAY–£Ûð9ܲúûËl¸aû8Ø\¶Ï!ìHAªVÖdîOË"J‰c7Ò²È c ælùÛêakQm ÒLxp7“w}ÀápLþÍ–Ó­´;à ‚ÉÊ}r$B—¦¦Ü#xtvï¥Ã¹ÃÜÆ´%ÛÝÛ™Äcq»Ži"< e-,›ëý­k%£Eb Q¬¼>¯çQR Wu¾(0_Ê w?±¹¥2¶™xÓƒc¸¡þñݰtßÞÔö팎h ksôŸú¨"­ J™e… N}˜ÔFVm6!¬ž‡Ë…¨~Â"¹e(O€Çîò‰÷> =ø1ë¡å èìz€áÐ=9gÅ“3¦¬ÿŒqÀgàˆ3pX¾¤À·Ž€Ïóà>¼5Î}÷Â÷ܯ¹¡÷æHNoš‡œz#s¤“ñ}&„-¥ö‹Á`—é”Òš—Õ›À ðä1]/é8Ëä} œG׺§’?ƒ÷ †0>×[„ßçCZJ+% )æ®ç± Z$iùa#N‰b1{\ÐÁzÚ’êÊâí¦¢íÀÈÃSáhb«Ð£`¸›{ðWv¶ì¢©ÝiÛš?¶&Ò[ëþ¥Íë‚xrÕø”•žŸ:ü:|ì›™gÊÛN7ùºãôHê{Ý7¯òÖÉ·¼V |ƒcÖ_ùÞÙùm3 ÅJL¢§áÐä´Ò„SÿÐ Õ-lÀpù™VU‡±ïþùØ÷L€°n5dÕÙ°'@~Î*”%éT"»csªÊɘp!YÄ¡–ßdk²X€ØœVT7|ê+]³°ÁÛ ˜ôøìê}uÈN—Ńú!`ÐÉ­Z›Ñê∘4r<:¯Ýœ®;ŽÏí§ßÅO}c|êwÜ36WÅØä ƒ–Û¿G­Ìá={ع~…—6K_ziÙòÊ]ûÍ?JÇŬüIôu¿Þ^Jü¢èÿô:pÁÿp?¶byðÁߤ,Rj†Þbob(Ѝî6¢^«•*ƒª„ú9^i(JdóROÛ”~ÍÝß'[ä!5ÊÚyuMJ‘¢d]Ûv6»ULFÖº…¦¾³ZíVAÿ¦ú|äKŸ‡çKáðí¼#Õu'ù·?½ûí¹ÊSóÛ˜^å&ÊDžz½ëÈéUsã´¯Î&êd»'•` Ñ/œýdN•4"ºn¢W¶mˆ,À# ÷£6 Ì<0?/úr3¼+¸aäÜŠ—æÍ4Ûk¤-í µ H¸"v¹ƒ†Ô©âÔåÊIϲ7r°Ï!‘C®&ˆg?¦P¤BËá.²U”Š¥ªú&ÚBLÿ›uíÿo䣗]))²ð ±]ê–ÑÜ$1Í qc Çþ“>º^¸,‚WÞ»‰”ëK“ ‚ÚZ†t"ðGñ–H0“Ô„ÅLE/þî;¨Eým™oDaa(‘0e`Qmú|ý .êcð­á‹ê¹Uì’Ób\ÔÛ4±Í¯Ví®Þcu[<À¼ñHÐßš;p’¤÷õõyd‘÷sRŸŒáÃ…*DŒØ€é¹ñÜè2x‰¾¾dÕ‘F´¡©Ø²ÍG§ÕY”×72® Η 1X Å}Öp#‰:N¢ (‹™ñt$œÌªõ 7¢àÆ»dn Í=Ž+à £†Öëx–ícÀwÑ".DWEæ~~fBæôûß#J·‰¢ bÁöÄ;üì—™¯²_f¿â{ž¸+á^ľd:ލKƒ¤ë¸"Õ™ÕIE¿õH£êŒÏë^ºIgC/å<~ÃØú1Ò1± Ï´“_yUðöx{S̳Į/˜‰FRHϰŠ){3êøãPÛ£•"•˜î[p@1œ…Çóå&8dákpxñ]$§A…1¼6¡SFoÀer£U_ÌÊP¶s=xÿ I}“XÖ¬Î2ÃÙx “V„$ˆ”€!q,ôyo—cE ÍIŒ„HÕ ,ógœ†“¬",þW”¯i/?^¹ZR#£¥Ý Ñ*P 6«D’ëìÿ.ûPP—&öÄù/?v~$õ)œvuOl“)m"D*ˆX\«ùTÞµÆõ’s£Á^³tó>gФÞquuuxQÆyHaMKãb$Ѧ¸1ÝØ?ýÊé[~œþ©k—c—k—k/¡r?é!šÒ²â ý§KKàä<Ÿ‡“¥%pRáÔŠ•Z)B¿,ó.ãÆ—-Fॖɣ¼à`r%XKJ¢êx*ÏÒîvw»·ÝÙ> –þòؤiC©bÇLÅ“p|Ù <Ž$Ð"f9vÇöWHVQHÕ !ÝTÓTc®±ÕŒâJy ÞÃÕðÐÅÕÐ֖ʲs O!C–ÃÛK¡Qx©Q ªÈª–š]´o÷åÒ “ìlh_O›¹£ÿËhÿj $WÒ’çþ‘ÿö\)T8Þï Ó*´“HîûÓü+z’ë›7Å”»ìNàN7ƒÈ(ÙFY©i¨»ïòSŸ0•»tY#‰D»ÏI²•±€-èü0òÖ;à2x{ÅégZ•>¹Wî á*¢ÓøŠ&¼Áavô2h5@„II‹Ü͵F­Ñ¦0cNÔŸœÕ=ŒÚG ®D.2 çä¯]áÁ Äí‘ày°øÅªerNAÎãëÃscà 8ôZ÷yöx¿»àÓÿ€O¤.îº:‡'±å&ùtnÉT®âI®B×hhÐ5Z4F•MMZ I\ÎÒN<ëô§|É?ÃÚK°ñϰÎò…EÙEíeqTÑm6»M@íå:¼EFÉ °”|1ûÄý'ýl/8Lö?˜KÃÜ5?o±Qü X20|Ÿ~ éæNJ‹÷‡¨S™@wÜC:ñç¼&‘aÂÆØp%0tÛ’$ì@« <Ö¼;î$Y…F¤+>¤ÆÖÔu{?Œ(3î…'y{ÛönÝÛzå?bÃH¤RÀ’quLN[p­MoÔ›ŸßÈŸôâ#“^Ô4F  m„³ö°¤U~ní›kέ=¾‘4!;éÁáX<-ñ©š™y;ŸïZÜõ|÷(„Ò!¥o ü—ç¯ô·!oâ!q¡AËo×<ç<4üg#áTJ”;îc°z¤†F€šF;iáîã©ô2­4®É0…Á8˲Q}Tád®ñq+hVûÏÌ+Ü_ã­®]]¿ºnÒ¾T+ÑH5*ƒÔ$¤"¢L ÝrýAïñü+=½Ò„ý ^”ÌmKÙºÄìWf½:û•çwðõ^½_ë -¸\£TˆYSTǼ^y|Ó‰MÇ7ŒŠbÚ˜‰üi‰Àé½¥}ù³Ù²pQõú¹Ol Æî®2§ŽJd •°Ò½ `Ød¤‘®®*f ÜÒwû»GÁ»*€S® ©pNâ—ÑT[ŸA"B.¶Ö0ÿ¹Žð‹"¶$"šÉ7<‰´w+Cµ¨!ªt]ÿ·X‘‡E×å‹®ûŒÇ=Š+ìj5yT·“ðnîP„U ÔÌG"Ž8ú>BÄ‘ú ¡ +$7 >‚Ûãêš4*»âúêÜ LõÁù¸RØáâi6É…• u“ÂÄ@V&ìäùsW.ØrdÉúÆ-U[ëvx½‡‡ùo¦«ûLW_½‰÷Ÿn ¡´T@ER›YcÙî+Þ9 ^"‘vµ3WׯŒÚ)Cïä$¹Ø\_´]@bc ²çõÕµ–BOi5ª€šì»¢íxñ[ŸŸ®§¦ £À\¼Ú¶ÿÈ»·ß *Èû?©Hw¶`Ã.-ß× »Šw'Ðuµ”ÇU.æfî6°¨yQvIûÌ33>À©§:O…σ7ÁIÝIá’ºx¾fNïdÉ–&»EÀñ¸x\NÃÛ?kG£®€ßÀ’íÜãsÇnäžcEûXK5ïÙâö„-~£Ú!< ÷œí—¦MKw·D·ƒý$¼—'óîcnÄqQvú3d˜0fµn x p¤V=Žäf³'ØWË4Jù¦î¤þ£ýMø= >¤I@ÖùìßMGÄ3ÍQ$Â"F/÷2£‹‹„f±ÂHË6Õn\§Zê ±œ0m‰¢t¹:¢[376,èò%…ÑæFŸåqbüõž7ž:väàþSÞ‹ÈTÔ†>Ï•<È•TÜU”¶¶¤R-]4Ó;Îu¼ráýÕÿ†Ã¦/ZNW7ÔÔW7,[ËWidz &·¤­IþO±îN¦“ÈuR¹iàÞþwEgC(í÷w'÷u ªÍÅ[äïEsš^h"5¸ hlF©!üÂPS¼àðhàKÎ"Î}îØ›ŒD]­R½Ê1+‘ù;eg ɪ7Æú–Ò€‹5j¹$ªe™~Àò–,$¹Ù„»šÝØ®%‡Ó7•6?9l†ý ‰l˜endstream endobj 75 0 obj << /Filter /FlateDecode /Length 1791 >> stream xœ•XÝ›F÷ßÐ+êÒØ”…vOM¥VJ¤6÷ÒÆÊ‹ñ‡×w868€ÏUýß;û 6ç¤òƒ »ó=ó›™>ž⣿‹ý$˜ÞO¾L°|;Õ_Å~úûbòóߘM™Ï’0™.6E§,˜¦aäI<]ì'K´xàÞ<Šà NЦÞíj/LýÎÑ©¬îÅYè3¡¢öæpÂX@ÐÚÒ0Ô>H ÆF§V¼'@ž"ýš :ÉÛ¨« EÇV³`4AÝCÙê£ %ý¢CÃ×eÑ•uuã­‚QO)•aTø, Òéâv²øi‰^¾ôæqK.·eÅóF<Ð-F<¸KCFbÄ›ŠïôMPes¬¤)혆4=Ð/sÇ[QxÈI˜ ºÊP9ÛfžáIPÑÔm{hêµ9¼Cì3L oaZû¸w4J±úd¨¾Ûò¢›9U+~Zç]®ù9]Ñ?ÞL}ÆÓ¹y½ž =RÔûñãʤ‡¨íò¦›—Õš?I~B+ùÎ ì[]8fÏ´kRŠŠã¾=ªŸ2ï ÖJ~ƒi똊KËùŽW÷ÝC¦h2oå<—†cq¾°ê³ ¯ üc¾;òv$¸çÿÛüpØ}uïÁØmZWw>˜7“Ü(ñS°¡Ãm2ir äO*3@½£ÖO?-Ÿf«±èj×Í­“9ß i“Ÿ\\\ñ˜4ŽUq©TìòV&q€SÔ–›’Ûz‰P?qMºÖ>]û;ÒïÊÌ&¯î!DðIÙNÉTiÙ:'•FýåÕÚÚ²½±š6¥ã/xb_2]6e/õ·s¼²çá·¨·=êr5‚¯ƒãݱ©\‚ ÊDĨÅ¥E3ã ¢ÁŒXY؇ž× „¥õDAH“¬Æ±%ß®L­_Öú×[l¯uQüuÇ{•2V¾3H§+J”;¨Ö´c)zÆ©Ÿ¥M¯2Ì êK®¼½èMÒ‘Î/}^ŽE/Ý…Wüõ±ê–Ljœ' ãU*BØqÍ™š.§‡aÔA¬3ÙÛKÙ…Þ øš÷¸¯•ÖIx”E4Jй…ªQnœŒÌÀro!è× àÓåh ° ŒA_áÕ?Èäp˜€o*ôò¤ÏóÙ<ç;!ÕŸ¶}VØöª°8ñI*h!î¢õœ!̼ïàX9Ù¾x*»óÄÐ>ò qªT}ˆk—§5=Æ  ;¶Þ”!¯Ëêõ@FfXgÞ€RT àâê:vœ¯ÀÀü<'þã#JE)r9”à óHŒ"úütR3>d>oä%¦FÈ"ßñµx§b^/åô.©ú¡ëÔ! ú†¶‚½^((…}bçˆ2TÕ7ψòN³ Äì ZIó[,"𹏯€ã&?î:£©BCµA„¸ç“(Á~İ ¯Á<ž!à¥ïÂÄÇ$ÁêÞ”“K“…X„~ÄDà Ž±º C(HÐ4>OLÌ{°˜CªÖ#ù› ;½kÁ$ÀÕÛ  0,ϱè3`Žñ»8ÛsÉG¤”ŒtÐô‡,!h âà î´Òó.pŠÁÆ© nñª¾)'J5ìifükgâˆDª78©4A¢xkÈWï;ËÁ¨…:¸Ê—Öƒ·õõZ Ë"µk)‘òõZ Cl¨×R˜à«òpèypí¶UÑ ´äN#Q“z e rK(ôËÊíª’"×1éQˆÞ¡ÐCñW[òuþמÙ#‰âG!û‰y'o 8f4Åæ†^"F8EðSt¹V:e&-µ ›&ßs3×-Üó¤Ò0D¬Þx´'åÕ׬ßÛ(‰ ÜT\í°Z÷¥™)Z’‚våPšÀsL¤œ¬ÇK¶×¤f"VŠÌ\ \7õÁÁí[×EÞÿvûÑ.HïV³ç›rÁå˜bÛÆÛa ¶L@ü“/xõBéq£H_ÌFÆ&yj> L4è·¶Ukÿœ³’_/z=a¨æx‚Í£ÔOÀ‡}š&¡²ç½Ge€(QÊ)‘SkÃï£c]‰b§D6=©\DàTÔ@*þiíQbÑ.$rK´“Gøíô Û[ä­Ha\ªBÖkGãŒƉòƒ0W~È4Œ±è÷ÊQï]“—€‰4&V8ZAb,‹tg3Ëtën›Æ«ÿO×™q¢Èh¯q\Ýå…'|„)ú¬ SŒò¢¨›5èàL*NRä«E'=m*"G匱Dhß-&Áç?Άºendstream endobj 76 0 obj << /Type /XRef /Length 99 /Filter /FlateDecode /DecodeParms << /Columns 4 /Predictor 12 >> /W [ 1 2 1 ] /Info 3 0 R /Root 2 0 R /Size 77 /ID [<7a4b9cd8cbf5be476731b8eee726da9d>] >> stream xœcb&F~ñ‰ ÈcéÄžÔ3L lû€¡ÁuH¼,† ‚Ä$ø²€„äE ÁÿHpV ^ ÁýH0:ƒK˜6FŒAˆé= ÒáÁ^a endstream endobj startxref 49412 %%EOF e1071/inst/NEWS.Rd0000644000175100001440000004356115120304402013045 0ustar hornikusers\name{NEWS} \title{News for Package \pkg{e1071}} \newcommand{\cpkg}{\href{https://CRAN.R-project.org/package=#1}{\pkg{#1}}} \section{Changes in version 1.7-17}{ \itemize{ \item fix author url in vignettes } } \section{Changes in version 1.7-16}{ \itemize{ \item fix bug in svmdoc vignette \item fix possible glitch in svm.cpp regarding memory allocation } } \section{Changes in version 1.7-15}{ \itemize{\item fix bug in \code{read.matrix.csr()}} } \section{Changes in version 1.7-14}{ \itemize{ \item fix incomplete arguments to avoid partial matches \item fix small bug in svm.cpp (\code{Rprintf()} wrongly called) \item fix \code{predict.gknn()} and \code{predict.svn()} to allow for \code{newdata} argument with character variables instead of factors (levels are taken from original data). } } \section{Changes in version 1.7-13}{ \itemize{\item add ORCID ids} } \section{Changes in version 1.7-12}{ \itemize{\item add \code{tune.gknn()} wrapper} } \section{Changes in version 1.7-11}{ \itemize{\item cosmetic changes} } \section{Changes in version 1.7-10}{ \itemize{ \item small fix in \code{coef.svm} for sparse data } } \section{Changes in version 1.7-9}{ \itemize{ \item Remove configure code testing for gcc 2.96. } } \section{Changes in version 1.7-8}{ \itemize{ \item Bugfixes in \code{gknn()}: wrong behavior in case of tied k-nearest neighbors (for \code{use_all=TRUE}), and also in case of an overall class tie. } } \section{Changes in version 1.7-7}{ \itemize{ \item Bugfix in examples of \code{cshell()} } } \section{Changes in version 1.7-6}{ \itemize{ \item Bugfix in \code{scale_data_frame()} - now calls \code{scale()} if x is not a data frame. } } \section{Changes in version 1.7-5}{ \itemize{ \item NaiveBayes: better handling od character and logical features \item Added: \code{gknn()} for generalized k-Nearest Neighbours (using arbitrary proximity measures) \item Added: \code{scale_data_frame()} for scaling the numeric columns of a data frame. } } \section{Changes in version 1.7-4}{ \itemize{ \item Bug fix: "inverse" argument for class.weights argument in \code{svm.default()} did not work } } \section{Changes in version 1.7-2}{ \itemize{ \item Change license to GPL-2 OR GPL-3 \item add coef() method for SVMs with linear kernel } } \section{Changes in version 1.7-1}{ \itemize{ \item add warning in \code{predict.naiveBayes()} if the variable type (numeric/factor) does not match for training and new data. \item Fix bug in tune when parameter space is sampled \item Fix formula interface for NaiveBayes to account for variable removal } } \section{Changes in version 1.7-0}{ \itemize{ \item Bug fix in \code{lca()} \item The \code{class.weights} argument of \code{svm()} now accepts \code{"inverse"}, setting the weights inversely proportional to the class distribution \item \code{predict.naiveBayes} now fixes the factor levels of \code{newdata} to be identical with the training data. \item{libsvm updated to version 3.23} } } \section{Changes in version 1.6-8}{ \itemize{ \item add and use native symbols for C-code \item \code{naiveBayes()} now supports logical variables } } \section{Changes in version 1.6-7}{ \itemize{ \item fix some bug in handling weights in \code{svm.default()} } } \section{Changes in version 1.6-6}{ \itemize{ \item fix numeric issue in \code{classAgreement()} \item add functions from recommended packages to NAMESPACE \item fix bug in svm.default (incorrect handling of subset= argument) \item fix bug in predict.svm (new data with NA in response got removed) \item residuals are now correctly computed for regression in case of scaled data } } \section{Changes in version 1.6-5}{ \itemize{ \item \code{hamming.distance()} no longer converts input to binary \item \code{tune()} now uses \code{mean()} to aggregate error measures from cross-fold replications } } \section{Changes in version 1.6-4}{ \itemize{ \item remove library("SparseM") statements in code, use namespace semantics instead \item Fix memory leak and uninitialized read error in \code{write.svm()} \item add warning in \code{predict.svm()} if probabilities should be predicted, but the model was not trained with \code{probability = TRUE} \item add \code{eps} to laplace smoothing in \code{predict.naiveBayes()} to account for close-zero probabilities also. \item use R's random number generator for cross-validation and probability computation instead of the system one. } } \section{Changes in version 1.6-3}{ \itemize{ \item remove require() statements and dependency on stats } } \section{Changes in version 1.6-2}{ \itemize{ \item vignettes moved to \code{vignettes} folder. \item libsvm upgrade to version 3.17, getting rid of stdout and stderr } } \section{Changes in version 1.6-1}{ \itemize{ \item \code{write.matrix.csr()} now accepts a \code{fac} argument similar to \code{read.matrix.csr()}, writing factor levels instead of the numeric codes. \item \code{naiveBayes()} uses a numerically more stable formula for calculating the a-posterior probabilities. \item \code{predict.naiveBayes()} now accepts data with predictors in an order different from the training data, and also ignores variables not in the model (especially the response variable). \item \code{svm()} checks whether parameters which are passed to the C-code are set to NULL to avoid segfaults. } } \section{Changes in version 1.6}{ \itemize{ \item bug fix in tune with sparse matrices \item version bump of libsvm to 3.1 \item Fixed partial argument matching in several places \item NEWS file changed to .Rd format and moved to \file{inst/} } } \section{Changes in version 1.5-28}{ \itemize{ \item bug fix in svm cross validation } } \section{Changes in version 1.5-27}{ \itemize{ \item \code{svm()} now accepts to set the random seed for libsvm. } } \section{Changes in version 1.5-26}{ \itemize{ \item \code{tune()} now allows user-specified error functionals. } } \section{Changes in version 1.5-25}{ \itemize{ \item add auto-coercion from Matrix and simple_triplet_matrix objects to \code{predict.svm()} \item Bug fix in \code{tune.svm()}: when a data frame was provided as validation sample, the response variable was not correctly extracted } } \section{Changes in version 1.5-24}{ \itemize{ \item Cosmetics: use \code{sQuote()} instead of hard-coded quotes in warnings and error messages in several places \item Bug fix in labeling of decision values \item add \code{decision.values} of fitted values to a svm object } } \section{Changes in version 1.5-23}{ \itemize{ \item Bug fix in \code{svm()}: Error messages returned by the C function have not been correctly handled, causing segfaults. } } \section{Changes in version 1.5-22}{ \itemize{ \item minor fix } } \section{Changes in version 1.5-21}{ \itemize{ \item Allow sparse_triplet_matrix objects for \code{svm()} } } \section{Changes in version 1.5-20}{ \itemize{ \item More flexible interface to \code{naiveBayes()} \item Fix bugs in docs for \code{kurtosis()} } } \section{Changes in version 1.5-19}{ \itemize{ \item Fix bugs in \code{read.matrix.csr()} and \code{write.matrix.csr()} \item Allow Matrix objects for \code{svm()} \item Version bump of libsvm to 2.88 } } \section{Changes in version 1.5-18}{ \itemize{ \item Improve \file{DESCRIPTION} install metadata } } \section{Changes in version 1.5-17}{ \itemize{ \item \code{tune()} now also returns a dispersion measure of all training samples. \item Bootstrap is done \emph{with} replacement. \item \code{tune.svm()} now also accepts the \code{epsilon} parameter. } } \section{Changes in version 1.5-16}{ \itemize{ \item \code{write.svm()} now also stores the scaling information for the dependent variable. \item data sets Glass, HouseVotes84, and Ozone removed (are in package \cpkg{mlbench}) \item merged help pages for \code{naiveBayes()} and \code{predict.naiveBayes()} } } \section{Changes in version 1.5-15}{ \itemize{ \item Bug in \file{NAMESPACE} file fixed (conditional import from \pkg{utils} failed in R 2.3.1) } } \section{Changes in version 1.5-14}{ \itemize{ \item \code{predict.naiveBayes()} sped up \item Bug fix in \code{plot.svm()} (error in case of training categories without predictions) \item \pkg{methods} now added to \samp{Suggests}, and \pkg{grDevices} to \samp{Imports} } } \section{Changes in version 1.5-13}{ \itemize{ \item Bug fix: sparse handling was broken since 1.5-9 } } \section{Changes in version 1.5-12}{ \itemize{ \item update to libsvm 2.81 \item laplace smoothing added to \code{naiveBayes()} } } \section{Changes in version 1.5-11}{ \itemize{ \item \code{tune()}: allow list of vectors as tune parameter range so that class.weights in svm-models can be tuned \item better default color palette for \code{plot.tune()} \item New function \code{probplot()} for probability plots } } \section{Changes in version 1.5-10}{ \itemize{ \item Bug fix: class probability prediction was broken since 1.5-9 } } \section{Changes in version 1.5-9}{ \itemize{ \item \code{tune()} now returns the split indices into training/validation set. Information added about cross validation \item \code{plot.svm()}: wrong labeling order in levels fixed \item \code{predict.svm()} now adds row numbers to predictions, and correctly handles the \code{na.action} argument using \code{napredict()}. } } \section{Changes in version 1.5-8}{ \itemize{ \item Update to libsvm 2.8 (uses a faster optimization algorithm) } } \section{Changes in version 1.5-7}{ \itemize{ \item \code{read.matrix.csr()} did not work correctly with matrix-only objects. \item \code{svm()}: Fixed wrong labeling for predicted decision values and probabilities in case of a Class factor created from a non-ordered character vector } } \section{Changes in version 1.5-6}{ \itemize{ \item \code{cmeans()} is substantially enhanced, with a complete rewrite of the underlying C code. It is now possible to specify case weights and the relative convergence tolerance. For Manhattan distances, centers are correctly computed as suitably weighted medians (rather than means) of the observations. The print method for fclust objects is now more in parallel with related methods, and registered in the name space. } } \section{Changes in version 1.5-5}{ \itemize{ \item \code{read.octave()} is now deprecated in favor of a substantially enhanced version in package \pkg{foreign} for reading in files in Octave text data format. } } \section{Changes in version 1.5-4}{ \itemize{ \item Use lazy loading } } \section{Changes in version 1.5-3}{ \itemize{ \item New arguments in \code{plot.svm()} for customizing plot symbols and colors \item Fix of broken code in \code{plot.svm()} for the \code{fill = FALSE} (non-default) case } } \section{Changes in version 1.5-2}{ \itemize{ \item Fixed memory leak in \code{svm()} } } \section{Changes in version 1.5-1}{ \itemize{ \item Fixed C++ style comments } } \section{Changes in version 1.5-0}{ \itemize{ \item Example for weighting added in \code{svm()} help page \item upgrade to libsvm 2.6: support for probabilities added } } \section{Changes in version 1.4-1}{ \itemize{ \item \code{NaiveBayes()} is more accurate for small probabilities \item call is more sensible in \code{tune()}, \code{tune.foo()}, and \code{best.foo()} objects. \item \code{control} parameter of \code{tune()} changed to \code{tunecontrol} to solve name space conflict with training methods using \code{control} themselves \item new function \code{matchControls()} \item fixed a bug in \code{bclust()} triggered when a cluster had only one center } } \section{Changes in version 1.4-0}{ \itemize{ \item adjusted to restructering of R base packages \item added a \file{NAMESPACE} file \item Function \code{write.svm()} now also creates a file with scaling information } } \section{Changes in version 1.3.16}{ \itemize{ \item Small bug fixes in \code{predict.svm()} and \code{plot.svm()} \item Function \code{write.svm()} added which saves models created with \code{svm()} in the format libsvm can read. } } \section{Changes in version 1.3.15}{ \itemize{ \item Bug fix in \code{plot.svm()}: non-SVs had wrong colors \item data sets Ozone and Glass added } } \section{Changes in version 1.3.14}{ \itemize{ \item Several Docu bug fixes (for functions \code{plot.bclust()}, \code{impute()}, \code{stft()}, \code{svm.formula()}, \code{svm.default()}) \item upgrade to libsvm 2.5. New feature: \code{predict.svm()} optionally returns decision values for multi-class classification \item svm-vignette gave warnings due to rank deficiency in Ozone data \item \code{naiveBayes()} now also supports metric predictors, and the standard interface. } } \section{Changes in version 1.3.13}{ \itemize{ \item Bug fixes in svm: \itemize{ \item Prediction of 1 single observation gave an error \item Only \eqn{k} instead of \eqn{k*(k-1)/2} \eqn{\rho} coefficients have been returned by svm (\eqn{k} number of classes), having caused nonsensical results for \eqn{k > 3}. } \item The \file{svmdoc} file in \file{inst/doc} now is a vignette. } } \section{Changes in version 1.3-12}{ \itemize{ \item The \code{x} argument of \code{cmeans()} and \code{bclust()} is now automatically coerced to a matrix. \item Started \file{tests} directory \item New method: \code{naiveBayes()} classifier for categorical predictors \item optimization of \code{read.matrix.csr()} which used to be rather slow \item Bug fixes for the \code{svm()} interface: when the data included categorical predictors, the scaling procedure did not only affect the metric variables, but also the binary variables in the model matrix. \item Function \code{scaclust()} removed. Bug has to be fixed. } } \section{Changes in version 1.3-10}{ \itemize{ \item Now supports libsvm 2.4 } } \section{Changes in version 1.3-9}{ \itemize{ \item \code{rdiscrete()} is now simply a wrapper for \code{sample()} and provided for backwards compatibility only. \item Minor bug fixes in \code{svm()} and \code{tune()} (mostly interface issues). New plot function for objects of class \code{svm} working for the 2d-classification case. } } \section{Changes in version 1.3-7}{ \itemize{ \item \code{svm()} now supports the matrix.csr format, as handled by the \cpkg{SparseM} package. Predictors and response variable (if numeric) are scaled per default. \item A new \code{plot()} function for \code{svm()} objects visualizes classification models by plotting data and support vectors in the data input space, along with the class borders. \item A new generic \code{tune()} function allows parameter tuning of arbitrary functions using, e.g., boot strapping, or cross validation. Several convenience wrappers (e.g., for \code{svm()}, \code{nnet()}, and \code{rpart()}) do exist. } } \section{Changes in version 1.3-3}{ \itemize{ \item Bug fixes in various bclust routines: \code{stop()} if required packages are not found \item \code{svm()} now interfaces LIBSVM 2.35 which is a bug fix release. A call with invalid parameters now no longer causes R to be terminated, and the C(++) code became completely silent. \item Bugs fixed in \code{fclustIndex()} function and \code{print.fclust()}. } } \section{Changes in version 1.3-1}{ \itemize{ \item Functions \code{rmvnorm()} and \code{dmvnorm()} for multivariate normal distributions have been moved to package \cpkg{mvtnorm}. \item Bug fixes in \code{print.fclust()} and \code{fclustIndex()}. \item fixed \file{floyd.c} (ANSI C pedantic warnings) } } \section{Changes in version 1.2-1}{ \itemize{ \item Bug fixes in \file{cmeans.c}, \file{cshell.c} and \file{scaclust.c} (R header files included and unused variables removed) \item Bug fixes in \file{Rsvm.c} and \file{svm.R} (incomplete list of returned Support Vectors). \item Encapsulate kmeans call in \code{bclust()} in a \code{try()} construct, because kmeans gives an error when a cluster becomes empty (which can happen for almost every data set from time to time). } } \section{Changes in version 1.2-0}{ \itemize{ \item Added functions for bagged clustering, see help(bclust). \item \code{read.pnm()} and \code{write.pgm()} have been removed from \cpkg{e1071}, much improved versions can now be found in the new packagepixmap. \item Lots of documentation updates and bugfixes. \item Support Vector Machine interface now upgraded to libsvm V. 2.31 featuring: \itemize{ \item Multi-Class Classification \item weighting of classes for C-classification (for asymmetric sample sizes) \item \eqn{\nu}-regression \item Formula Interface \item \eqn{k}-fold cross-validation } In addition, an introductory article is provided in directory \file{docs/} (\file{svmdoc.pdf}). \item \code{classAgreement()} now features an option to match factor levels \item updated API design for the fuzzy clustering functions (\code{cmeans()}, \code{cshell()}, \code{scaclust()}). Documentation updates and function name changes (\code{cmeanscl()} to \code{cmeans()}, \code{validity.measures()} to \code{fclustIndex()}) } } e1071/build/0000755000175100001440000000000015120610017012115 5ustar hornikuserse1071/build/vignette.rds0000644000175100001440000000053015120610017014452 0ustar hornikusers‹ÅQ=OÃ0M“¦ÐŠHX=ˆN쨪„„X¯Î%±šØ‘mØøå-Nã´i™H±}ïüîžß½Çq¾ï:®gŽÞØ,ó î Í>RË"4zâ•Eƒ0®QrÈÕßgÜ>¿—¥š¼"ÕB’ ã¨Â0Ô’»ššE¢ÉÙÜT#Œ“èR$8¹¾™ØJç&wyE¶ÍþVÆÉ¯Òj|ŸqÜ>ÆÆõ7Þä›{žÅý®Š <ÅLHTÚbƒ ó-­÷íõmöŒæ KÍo!15…Õ ŠÆ0’#HÎxÚê˜#§Yr±Ã.”õzÙxm©ê_[÷ÝŽ'>‡b«g`ÁþŒ›ì…¦·÷8Ùc¯ÒÑKä±jgxŸ•ña£¡UÔ6;­ýÿ2Ëz½^*Ú˜q0ºQ ¢D¾‰VßKÃÎÐe1071/build/partial.rdb0000644000175100001440000000007515120610005014241 0ustar hornikusers‹‹àb```b`aeb`b1…À€… H02°0piÖ¼ÄÜÔb C"Éð¸F$7e1071/man/0000755000175100001440000000000015120307527011602 5ustar hornikuserse1071/man/matchControls.Rd0000644000175100001440000000547214671763131014730 0ustar hornikusers\name{matchControls} \alias{matchControls} \title{Find Matched Control Group} \usage{ matchControls(formula, data = list(), subset, contlabel = "con", caselabel = NULL, dogrep = TRUE, replace = FALSE) } \arguments{ \item{formula}{A formula indicating cases, controls and the variables to be matched. Details are described below.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which \code{matchControls} is called from.} \item{subset}{an optional vector specifying a subset of observations to be used in the matching process.} \item{contlabel}{A string giving the label of the control group.} \item{caselabel}{A string giving the labels of the cases.} \item{dogrep}{If \code{TRUE}, then \code{contlabel} and \code{contlabel} are matched using \code{\link{grep}}, else string comparison (exact equality) is used.} \item{replace}{If \code{FALSE}, then every control is used only once.} } \description{ Finds controls matching the cases as good as possible. } \details{ The left hand side of the \code{formula} must be a factor determining whether an observation belongs to the case or the control group. By default, all observations where a grep of \code{contlabel} matches, are used as possible controls, the rest is taken as cases. If \code{caselabel} is given, then only those observations are taken as cases. If \code{dogrep = TRUE}, then both \code{contlabel} and \code{caselabel} can be regular expressions. The right hand side of the \code{formula} gives the variables that should be matched. The matching is done using the \code{\link[cluster]{daisy}} distance from the \code{cluster} package, i.e., a model frame is built from the formula and used as input for \code{\link[cluster]{daisy}}. For each case, the nearest control is selected. If \code{replace = FALSE}, each control is used only once. } \value{ Returns a list with components \item{cases}{Row names of cases.} \item{controls}{Row names of matched controls.} \item{factor}{A factor with 2 levels indicating cases and controls (the rest is set to \code{NA}.} } \author{Friedrich Leisch} \examples{ Age.case <- 40 + 5 * rnorm(50) Age.cont <- 45 + 10 * rnorm(150) Age <- c(Age.case, Age.cont) Sex.case <- sample(c("M", "F"), 50, prob = c(.4, .6), replace = TRUE) Sex.cont <- sample(c("M", "F"), 150, prob = c(.6, .4), replace = TRUE) Sex <- as.factor(c(Sex.case, Sex.cont)) casecont <- as.factor(c(rep("case", 50), rep("cont", 150))) ## now look at the group properties: boxplot(Age ~ casecont) barplot(table(Sex, casecont), beside = TRUE) m <- matchControls(casecont ~ Sex + Age) ## properties of the new groups: boxplot(Age ~ m$factor) barplot(table(Sex, m$factor)) } \keyword{manip} e1071/man/svm.Rd0000644000175100001440000002546714173734133012720 0ustar hornikusers\name{svm} \alias{svm} \alias{svm.default} \alias{svm.formula} \alias{summary.svm} \alias{print.summary.svm} \alias{coef.svm} \alias{print.svm} \title{Support Vector Machines} \description{ \code{svm} is used to train a support vector machine. It can be used to carry out general regression and classification (of nu and epsilon-type), as well as density-estimation. A formula interface is provided. } \usage{ \method{svm}{formula}(formula, data = NULL, ..., subset, na.action = na.omit, scale = TRUE) \method{svm}{default}(x, y = NULL, scale = TRUE, type = NULL, kernel = "radial", degree = 3, gamma = if (is.vector(x)) 1 else 1 / ncol(x), coef0 = 0, cost = 1, nu = 0.5, class.weights = NULL, cachesize = 40, tolerance = 0.001, epsilon = 0.1, shrinking = TRUE, cross = 0, probability = FALSE, fitted = TRUE, ..., subset, na.action = na.omit) } \arguments{ \item{formula}{a symbolic description of the model to be fit.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which \sQuote{svm} is called from.} \item{x}{a data matrix, a vector, or a sparse matrix (object of class \code{\link[Matrix]{Matrix}} provided by the \pkg{Matrix} package, or of class \code{\link[SparseM]{matrix.csr}} provided by the \pkg{SparseM} package, or of class \code{\link[slam]{simple_triplet_matrix}} provided by the \pkg{slam} package).} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{scale}{A logical vector indicating the variables to be scaled. If \code{scale} is of length 1, the value is recycled as many times as needed. Per default, data are scaled internally (both \code{x} and \code{y} variables) to zero mean and unit variance. The center and scale values are returned and used for later predictions.} \item{type}{\code{svm} can be used as a classification machine, as a regression machine, or for novelty detection. Depending of whether \code{y} is a factor or not, the default setting for \code{type} is \code{C-classification} or \code{eps-regression}, respectively, but may be overwritten by setting an explicit value.\cr Valid options are: \itemize{ \item \code{C-classification} \item \code{nu-classification} \item \code{one-classification} (for novelty detection) \item \code{eps-regression} \item \code{nu-regression} } } \item{kernel}{the kernel used in training and predicting. You might consider changing some of the following parameters, depending on the kernel type.\cr \describe{ \item{linear:}{\eqn{u'v}{u'*v}} \item{polynomial:}{\eqn{(\gamma u'v + coef0)^{degree}}{(gamma*u'*v + coef0)^degree}} \item{radial basis:}{\eqn{e^(-\gamma |u-v|^2)}{exp(-gamma*|u-v|^2)}} \item{sigmoid:}{\eqn{tanh(\gamma u'v + coef0)}{tanh(gamma*u'*v + coef0)}} } } \item{degree}{parameter needed for kernel of type \code{polynomial} (default: 3)} \item{gamma}{parameter needed for all kernels except \code{linear} (default: 1/(data dimension))} \item{coef0}{parameter needed for kernels of type \code{polynomial} and \code{sigmoid} (default: 0)} \item{cost}{cost of constraints violation (default: 1)---it is the \sQuote{C}-constant of the regularization term in the Lagrange formulation.} \item{nu}{parameter needed for \code{nu-classification}, \code{nu-regression}, and \code{one-classification}} \item{class.weights}{a named vector of weights for the different classes, used for asymmetric class sizes. Not all factor levels have to be supplied (default weight: 1). All components have to be named. Specifying \code{"inverse"} will choose the weights \emph{inversely} proportional to the class distribution.} \item{cachesize}{cache memory in MB (default 40)} \item{tolerance}{tolerance of termination criterion (default: 0.001)} \item{epsilon}{epsilon in the insensitive-loss function (default: 0.1)} \item{shrinking}{option whether to use the shrinking-heuristics (default: \code{TRUE})} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the accuracy rate for classification and the Mean Squared Error for regression} \item{fitted}{logical indicating whether the fitted values should be computed and included in the model or not (default: \code{TRUE})} \item{probability}{logical indicating whether the model should allow for probability predictions.} \item{\dots}{additional parameters for the low level fitting function \code{svm.default}} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} } \value{ An object of class \code{"svm"} containing the fitted model, including: \item{SV}{The resulting support vectors (possibly scaled).} \item{index}{The index of the resulting support vectors in the data matrix. Note that this index refers to the preprocessed data (after the possible effect of \code{na.omit} and \code{subset})} \item{coefs}{The corresponding coefficients times the training labels.} \item{rho}{The negative intercept.} \item{sigma}{In case of a probabilistic regression model, the scale parameter of the hypothesized (zero-mean) laplace distribution estimated by maximum likelihood.} \item{probA, probB}{numeric vectors of length k(k-1)/2, k number of classes, containing the parameters of the logistic distributions fitted to the decision values of the binary classifiers (1 / (1 + exp(a x + b))).} } \details{ For multiclass-classification with k levels, k>2, \code{libsvm} uses the \sQuote{one-against-one}-approach, in which k(k-1)/2 binary classifiers are trained; the appropriate class is found by a voting scheme. \code{libsvm} internally uses a sparse data representation, which is also high-level supported by the package \pkg{SparseM}. If the predictor variables include factors, the formula interface must be used to get a correct model matrix. \code{plot.svm} allows a simple graphical visualization of classification models. The probability model for classification fits a logistic distribution using maximum likelihood to the decision values of all binary classifiers, and computes the a-posteriori class probabilities for the multi-class problem using quadratic optimization. The probabilistic regression model assumes (zero-mean) laplace-distributed errors for the predictions, and estimates the scale parameter using maximum likelihood. For linear kernel, the coefficients of the regression/decision hyperplane can be extracted using the \code{coef} method (see examples). } \note{ Data are scaled internally, usually yielding better results. Parameters of SVM-models usually \emph{must} be tuned to yield sensible results! } \references{ \itemize{ \item Chang, Chih-Chung and Lin, Chih-Jen:\cr \emph{LIBSVM: a library for Support Vector Machines}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/libsvm/} \item Exact formulations of models, algorithms, etc. can be found in the document:\cr Chang, Chih-Chung and Lin, Chih-Jen:\cr \emph{LIBSVM: a library for Support Vector Machines}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.ps.gz} \item More implementation details and speed benchmarks can be found on: Rong-En Fan and Pai-Hsune Chen and Chih-Jen Lin:\cr \emph{Working Set Selection Using the Second Order Information for Training SVM}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/quadworkset.pdf} } } \author{ David Meyer (based on C/C++-code by Chih-Chung Chang and Chih-Jen Lin)\cr \email{David.Meyer@R-project.org} } \seealso{ \code{\link{predict.svm}} \code{\link{plot.svm}} \code{\link{tune.svm}} \code{\link[SparseM]{matrix.csr}} (in package \pkg{SparseM}) } \examples{ data(iris) attach(iris) ## classification mode # default with factor response: model <- svm(Species ~ ., data = iris) # alternatively the traditional interface: x <- subset(iris, select = -Species) y <- Species model <- svm(x, y) print(model) summary(model) # test with train data pred <- predict(model, x) # (same as:) pred <- fitted(model) # Check accuracy: table(pred, y) # compute decision values and probabilities: pred <- predict(model, x, decision.values = TRUE) attr(pred, "decision.values")[1:4,] # visualize (classes by color, SV by crosses): plot(cmdscale(dist(iris[,-5])), col = as.integer(iris[,5]), pch = c("o","+")[1:150 \%in\% model$index + 1]) ## try regression mode on two dimensions # create data x <- seq(0.1, 5, by = 0.05) y <- log(x) + rnorm(x, sd = 0.2) # estimate model and predict input values m <- svm(x, y) new <- predict(m, x) # visualize plot(x, y) points(x, log(x), col = 2) points(x, new, col = 4) ## density-estimation # create 2-dim. normal with rho=0: X <- data.frame(a = rnorm(1000), b = rnorm(1000)) attach(X) # traditional way: m <- svm(X, gamma = 0.1) # formula interface: m <- svm(~., data = X, gamma = 0.1) # or: m <- svm(~ a + b, gamma = 0.1) # test: newdata <- data.frame(a = c(0, 4), b = c(0, 4)) predict (m, newdata) # visualize: plot(X, col = 1:1000 \%in\% m$index + 1, xlim = c(-5,5), ylim=c(-5,5)) points(newdata, pch = "+", col = 2, cex = 5) ## weights: (example not particularly sensible) i2 <- iris levels(i2$Species)[3] <- "versicolor" summary(i2$Species) wts <- 100 / table(i2$Species) wts m <- svm(Species ~ ., data = i2, class.weights = wts) ## extract coefficients for linear kernel # a. regression x <- 1:100 y <- x + rnorm(100) m <- svm(y ~ x, scale = FALSE, kernel = "linear") coef(m) plot(y ~ x) abline(m, col = "red") # b. classification # transform iris data to binary problem, and scale data setosa <- as.factor(iris$Species == "setosa") iris2 = scale(iris[,-5]) # fit binary C-classification model m <- svm(setosa ~ Petal.Width + Petal.Length, data = iris2, kernel = "linear") # plot data and separating hyperplane plot(Petal.Length ~ Petal.Width, data = iris2, col = setosa) (cf <- coef(m)) abline(-cf[1]/cf[3], -cf[2]/cf[3], col = "red") # plot margin and mark support vectors abline(-(cf[1] + 1)/cf[3], -cf[2]/cf[3], col = "blue") abline(-(cf[1] - 1)/cf[3], -cf[2]/cf[3], col = "blue") points(m$SV, pch = 5, cex = 2) } \keyword{neural} \keyword{nonlinear} \keyword{classif} e1071/man/e1071-deprecated.Rd0000755000175100001440000000051214173734134014732 0ustar hornikusers\name{e1071-deprecated} \alias{e1071-deprecated} \title{Deprecated Functions in Package e1071} \description{ These functions are provided for compatibility with older versions of package \pkg{e1071} only, and may be defunct as soon as of the next release. } %\usage{ %} \seealso{ \code{\link{Deprecated}} } \keyword{misc} e1071/man/plot.svm.Rd0000755000175100001440000000437314173734134013672 0ustar hornikusers\name{plot.svm} \alias{plot.svm} %- Also NEED an `\alias' for EACH other topic documented here. \title{Plot SVM Objects} \description{ Generates a scatter plot of the input data of a \code{svm} fit for classification models by highlighting the classes and support vectors. Optionally, draws a filled contour plot of the class regions. } \usage{ \method{plot}{svm}(x, data, formula, fill = TRUE, grid = 50, slice = list(), symbolPalette = palette(), svSymbol = "x", dataSymbol = "o", ...) } %- maybe also `usage' for other objects documented here. \arguments{ \item{x}{An object of class \code{svm}} \item{data}{data to visualize. Should be the same used for fitting.} \item{formula}{formula selecting the visualized two dimensions. Only needed if more than two input variables are used.} \item{fill}{switch indicating whether a contour plot for the class regions should be added.} \item{grid}{granularity for the contour plot.} \item{slice}{a list of named values for the dimensions held constant (only needed if more than two variables are used). The defaults for unspecified dimensions are 0 (for numeric variables) and the first level (for factors). Factor levels can either be specified as factors or character vectors of length 1.} \item{symbolPalette}{Color palette used for the class the data points and support vectors belong to.} \item{svSymbol}{Symbol used for support vectors.} \item{dataSymbol}{Symbol used for data points (other than support vectors).} \item{\dots}{additional graphics parameters passed to \code{filled.contour} and \code{plot}.} } \author{David Meyer\cr \email{David.Meyer@R-project.org}} \seealso{\code{\link{svm}}} \examples{ ## a simple example data(cats, package = "MASS") m <- svm(Sex~., data = cats) plot(m, cats) ## more than two variables: fix 2 dimensions data(iris) m2 <- svm(Species~., data = iris) plot(m2, iris, Petal.Width ~ Petal.Length, slice = list(Sepal.Width = 3, Sepal.Length = 4)) ## plot with custom symbols and colors plot(m, cats, svSymbol = 1, dataSymbol = 2, symbolPalette = rainbow(4), color.palette = terrain.colors) } \keyword{neural}% at least one, from doc/KEYWORDS \keyword{classif}% __ONLY ONE__ keyword per line \keyword{nonlinear}% __ONLY ONE__ keyword per line e1071/man/naiveBayes.Rd0000755000175100001440000000764314173734134014201 0ustar hornikusers\name{naiveBayes} \alias{naiveBayes} \alias{naiveBayes.default} \alias{naiveBayes.formula} \alias{print.naiveBayes} \alias{predict.naiveBayes} \title{Naive Bayes Classifier} \description{ Computes the conditional a-posterior probabilities of a categorical class variable given independent predictor variables using the Bayes rule. } \usage{ \S3method{naiveBayes}{formula}(formula, data, laplace = 0, ..., subset, na.action = na.pass) \S3method{naiveBayes}{default}(x, y, laplace = 0, ...) \S3method{predict}{naiveBayes}(object, newdata, type = c("class", "raw"), threshold = 0.001, eps = 0, ...) } \arguments{ \item{x}{A numeric matrix, or a data frame of categorical and/or numeric variables.} \item{y}{Class vector.} \item{formula}{A formula of the form \code{class ~ x1 + x2 + \dots}. Interactions are not allowed.} \item{data}{Either a data frame of predictors (categorical and/or numeric) or a contingency table.} \item{laplace}{positive double controlling Laplace smoothing. The default (0) disables Laplace smoothing.} \item{\dots}{Currently not used.} \item{subset}{For data given in a data frame, an index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is not to count them for the computation of the probability factors. An alternative is na.omit, which leads to rejection of cases with missing values on any required variable. (NOTE: If given, this argument must be named.)} \item{object}{An object of class \code{"naiveBayes"}.} \item{newdata}{A dataframe with new predictors (with possibly fewer columns than the training data). Note that the column names of \code{newdata} are matched against the training data ones.} \item{type}{If \code{"raw"}, the conditional a-posterior probabilities for each class are returned, and the class with maximal probability else.} \item{threshold}{Value replacing cells with probabilities within \code{eps} range.} \item{eps}{double for specifying an epsilon-range to apply laplace smoothing (to replace zero or close-zero probabilities by \code{theshold}.)} } \value{ An object of class \code{"naiveBayes"} including components: \item{apriori}{Class distribution for the dependent variable.} \item{tables}{A list of tables, one for each predictor variable. For each categorical variable a table giving, for each attribute level, the conditional probabilities given the target class. For each numeric variable, a table giving, for each target class, mean and standard deviation of the (sub-)variable.} } \details{ The standard naive Bayes classifier (at least this implementation) assumes independence of the predictor variables, and Gaussian distribution (given the target class) of metric predictors. For attributes with missing values, the corresponding table entries are omitted for prediction. } \author{David Meyer \email{David.Meyer@R-project.org}. Laplace smoothing enhancement by Jinghao Xue.} \examples{ ## Categorical data only: data(HouseVotes84, package = "mlbench") model <- naiveBayes(Class ~ ., data = HouseVotes84) predict(model, HouseVotes84[1:10,]) predict(model, HouseVotes84[1:10,], type = "raw") pred <- predict(model, HouseVotes84) table(pred, HouseVotes84$Class) ## using laplace smoothing: model <- naiveBayes(Class ~ ., data = HouseVotes84, laplace = 3) pred <- predict(model, HouseVotes84[,-1]) table(pred, HouseVotes84$Class) ## Example of using a contingency table: data(Titanic) m <- naiveBayes(Survived ~ ., data = Titanic) m predict(m, as.data.frame(Titanic)) ## Example with metric predictors: data(iris) m <- naiveBayes(Species ~ ., data = iris) ## alternatively: m <- naiveBayes(iris[,-5], iris[,5]) m table(predict(m, iris), iris[,5]) } \keyword{classif} \keyword{category} e1071/man/lca.Rd0000644000175100001440000000412514246371156012641 0ustar hornikusers\name{lca} \alias{lca} \alias{print.lca} \alias{summary.lca} \alias{print.summary.lca} \alias{predict.lca} \title{Latent Class Analysis (LCA)} \usage{ lca(x, k, niter=100, matchdata=FALSE, verbose=FALSE) } \arguments{ \item{x}{Either a data matrix of binary observations or a list of patterns as created by \code{\link{countpattern}}} \item{k}{Number of classes used for LCA} \item{niter}{Number of Iterations} \item{matchdata}{If \code{TRUE} and \code{x} is a data matrix, the class membership of every data point is returned, otherwise the class membership of every pattern is returned.} \item{verbose}{If \code{TRUE} some output is printed during the computations.} } \description{ A latent class analysis with \code{k} classes is performed on the data given by \code{x}. } \value{ An object of class \code{"lca"} is returned, containing \item{w}{Probabilities to belong to each class} \item{p}{Probabilities of a `1' for each variable in each class} \item{matching}{Depending on \code{matchdata} either the class membership of each pattern or of each data point} \item{logl, loglsat}{The LogLikelihood of the model and of the saturated model} \item{bic, bicsat}{The BIC of the model and of the saturated model} \item{chisq}{Pearson's Chisq} \item{lhquot}{Likelihood quotient of the model and the saturated model} \item{n}{Number of data points.} \item{np}{Number of free parameters.} } \references{Anton K. Formann: ``Die Latent-Class-Analysis'', Beltz Verlag 1984} \author{Andreas Weingessel} \seealso{ \code{\link{countpattern}}, \code{\link{bootstrap.lca}} } \examples{ ## Generate a 4-dim. sample with 2 latent classes of 500 data points each. ## The probabilities for the 2 classes are given by type1 and type2. type1 <- c(0.8, 0.8, 0.2, 0.2) type2 <- c(0.2, 0.2, 0.8, 0.8) x <- matrix(runif(4000), nrow = 1000) x[1:500,] <- t(t(x[1:500,]) < type1) * 1 x[501:1000,] <- t(t(x[501:1000,]) < type2) * 1 l <- lca(x, 2, niter=5) print(l) summary(l) p <- predict(l, x) table(p, c(rep(1,500),rep(2,500))) } \keyword{multivariate} \keyword{cluster} e1071/man/predict.svm.Rd0000755000175100001440000000755214173734134014350 0ustar hornikusers\name{predict.svm} \alias{predict.svm} \title{Predict Method for Support Vector Machines} \description{ This function predicts values based upon a model trained by \code{svm}. } \usage{ \method{predict}{svm}(object, newdata, decision.values = FALSE, probability = FALSE, ..., na.action = na.omit) } \arguments{ \item{object}{Object of class \code{"svm"}, created by \code{svm}.} \item{newdata}{An object containing the new input data: either a matrix or a sparse matrix (object of class \code{\link[Matrix]{Matrix}} provided by the \pkg{Matrix} package, or of class \code{\link[SparseM]{matrix.csr}} provided by the \pkg{SparseM} package, or of class \code{\link[slam]{simple_triplet_matrix}} provided by the \pkg{slam} package). A vector will be transformed to a n x 1 matrix.} \item{decision.values}{Logical controlling whether the decision values of all binary classifiers computed in multiclass classification shall be computed and returned.} \item{probability}{Logical indicating whether class probabilities should be computed and returned. Only possible if the model was fitted with the \code{probability} option enabled.} \item{na.action}{A function to specify the action to be taken if \sQuote{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{Currently not used.} } \value{ A vector of predicted values (for classification: a vector of labels, for density estimation: a logical vector). If \code{decision.value} is \code{TRUE}, the vector gets a \code{"decision.values"} attribute containing a n x c matrix (n number of predicted values, c number of classifiers) of all c binary classifiers' decision values. There are k * (k - 1) / 2 classifiers (k number of classes). The colnames of the matrix indicate the labels of the two classes. If \code{probability} is \code{TRUE}, the vector gets a \code{"probabilities"} attribute containing a n x k matrix (n number of predicted values, k number of classes) of the class probabilities. } \note{ If the training set was scaled by \code{svm} (done by default), the new data is scaled accordingly using scale and center of the training data. } \author{ David Meyer (based on C++-code by Chih-Chung Chang and Chih-Jen Lin)\cr \email{David.Meyer@R-project.org} } \seealso{ \code{\link{svm}} } \examples{ data(iris) attach(iris) ## classification mode # default with factor response: model <- svm(Species ~ ., data = iris) # alternatively the traditional interface: x <- subset(iris, select = -Species) y <- Species model <- svm(x, y, probability = TRUE) print(model) summary(model) # test with train data pred <- predict(model, x) # (same as:) pred <- fitted(model) # compute decision values and probabilites pred <- predict(model, x, decision.values = TRUE, probability = TRUE) attr(pred, "decision.values")[1:4,] attr(pred, "probabilities")[1:4,] ## try regression mode on two dimensions # create data x <- seq(0.1, 5, by = 0.05) y <- log(x) + rnorm(x, sd = 0.2) # estimate model and predict input values m <- svm(x, y) new <- predict(m, x) # visualize plot (x, y) points (x, log(x), col = 2) points (x, new, col = 4) ## density-estimation # create 2-dim. normal with rho=0: X <- data.frame(a = rnorm(1000), b = rnorm(1000)) attach(X) # traditional way: m <- svm(X, gamma = 0.1) # formula interface: m <- svm(~., data = X, gamma = 0.1) # or: m <- svm(~ a + b, gamma = 0.1) # test: newdata <- data.frame(a = c(0, 4), b = c(0, 4)) predict (m, newdata) # visualize: plot(X, col = 1:1000 \%in\% m$index + 1, xlim = c(-5,5), ylim=c(-5,5)) points(newdata, pch = "+", col = 2, cex = 5) } \keyword{neural} \keyword{nonlinear} \keyword{classif} e1071/man/Discrete.Rd0000755000175100001440000000302514173734133013642 0ustar hornikusers\name{Discrete} \alias{ddiscrete} \alias{pdiscrete} \alias{qdiscrete} \alias{rdiscrete} \title{Discrete Distribution} \description{ These functions provide information about the discrete distribution where the probability of the elements of \code{values} is proportional to the values given in \code{probs}, which are normalized to sum up to 1. \code{ddiscrete} gives the density, \code{pdiscrete} gives the distribution function, \code{qdiscrete} gives the quantile function and \code{rdiscrete} generates random deviates. } \usage{ ddiscrete(x, probs, values = 1:length(probs)) pdiscrete(q, probs, values = 1:length(probs)) qdiscrete(p, probs, values = 1:length(probs)) rdiscrete(n, probs, values = 1:length(probs), ...) } \arguments{ \item{x,q}{vector or array of quantiles.} \item{p}{vector or array of probabilities.} \item{n}{number of observations.} \item{probs}{probabilities of the distribution.} \item{values}{values of the distribution.} \item{...}{ignored (only there for backwards compatibility)} } \details{ The random number generator is simply a wrapper for \code{\link{sample}} and provided for backwards compatibility only. } \author{Andreas Weingessel and Friedrich Leisch} \examples{ ## a vector of length 30 whose elements are 1 with probability 0.2 ## and 2 with probability 0.8. rdiscrete (30, c(0.2, 0.8)) ## a vector of length 100 whose elements are A, B, C, D. ## The probabilities of the four values have the relation 1:2:3:3 rdiscrete (100, c(1,2,3,3), c("A","B","C","D")) } \keyword{distribution} e1071/man/stft.Rd0000755000175100001440000000317214173734134013064 0ustar hornikusers\name{stft} \title{Computes the Short Time Fourier Transform of a Vector} \usage{stft(X, win=min(80,floor(length(X)/10)), inc=min(24, floor(length(X)/30)), coef=64, wtype="hanning.window")} \alias{stft} \arguments{ \item{X}{The vector from which the stft is computed.} \item{win}{Length of the window. For long vectors the default window size is 80, for short vectors the window size is chosen so that 10 windows fit in the vector.} \item{inc}{Increment by which the window is shifted. For long vectors the default increment is 24, for short vectors the increment is chosen so that 30 increments fit in the vector.} \item{coef}{Number of Fourier coefficients} \item{wtype}{Type of window used} } \description{This function computes the Short Time Fourier Transform of a given vector \code{X}. First, time-slices of length \code{win} are extracted from the vector. The shift of one time-slice to the next one is given by \code{inc}. The values of these time-slices are smoothed by mulitplying them with a window function specified in \code{wtype}. For the thus obtained windows, the Fast Fourier Transform is computed.} \value{Object of type stft. Contains the values of the stft and information about the parameters. \item{values}{A matrix containing the results of the stft. Each row of the matrix contains the \code{coef} Fourier coefficients of one window.} \item{windowsize}{The value of the parameter \code{win}} \item{increment}{The value of the parameter \code{inc}} \item{windowtype}{The value of the parameter \code{wtype}} } \author{Andreas Weingessel} \seealso{plot.stft} \examples{x<-rnorm(500) y<-stft(x) plot(y) } \keyword{ts} e1071/man/probplot.Rd0000755000175100001440000000433414173734134013746 0ustar hornikusers\name{probplot} \alias{probplot} \alias{lines.probplot} \title{Probability Plot} \description{ Generates a probability plot for a specified theoretical distribution, i.e., basically a \code{\link[stats]{qqplot}} where the y-axis is labeled with probabilities instead of quantiles. The function is mainly intended for teaching the concept of quantile plots. } \usage{ probplot(x, qdist=qnorm, probs=NULL, line=TRUE, xlab=NULL, ylab="Probability in \%", ...) \S3method{lines}{probplot}(x, h=NULL, v=NULL, bend=FALSE, ...) } \arguments{ \item{x}{A data vector for \code{probplot}, an object of class \code{probplot} for the \code{lines} method.} \item{qdist}{A character string or a function for the quantiles of the target distribution.} \item{probs}{Vector of probabilities at which horizontal lines should be drawn.} \item{line}{Add a line passing through the quartiles to the plot?} \item{xlab, ylab}{Graphical parameters.} \item{h}{The y-value for a horizontal line.} \item{v}{The x-value for a vertical line.} \item{bend}{If \code{TRUE}, lines are ``bent'' at the quartile line, else regular \code{abline}s are added. See examples.} \item{\dots}{Further arguments for \code{qdist} and graphical parameters for lines.} } \author{Friedrich Leisch} \seealso{\code{\link[stats]{qqplot}}} \examples{ ## a simple example x <- rnorm(100, mean=5) probplot(x) ## the same with horizontal tickmarks at the y-axis opar <- par("las") par(las=1) probplot(x) ## this should show the lack of fit at the tails probplot(x, "qunif") ## for increasing degrees of freedom the t-distribution converges to ## normal probplot(x, qt, df=1) probplot(x, qt, df=3) probplot(x, qt, df=10) probplot(x, qt, df=100) ## manually add the line through the quartiles p <- probplot(x, line=FALSE) lines(p, col="green", lty=2, lwd=2) ## Make the line at prob=0.5 red lines(p, h=0.5, col="red") ### The following use the estimted distribution given by the green ### line: ## What is the probability that x is smaller than 7? lines(p, v=7, bend=TRUE, col="blue") ## Median and 90\% confidence interval lines(p, h=.5, col="red", lwd=3, bend=TRUE) lines(p, h=c(.05, .95), col="red", lwd=2, lty=3, bend=TRUE) par(opar) } \keyword{hplot} e1071/man/ica.Rd0000755000175100001440000000342314173734134012637 0ustar hornikusers\name{ica} \alias{ica} \alias{plot.ica} \alias{print.ica} \title{Independent Component Analysis} \usage{ ica(X, lrate, epochs=100, ncomp=dim(X)[2], fun="negative") } \arguments{ \item{X}{The matrix for which the ICA is to be computed} \item{lrate}{learning rate} \item{epochs}{number of iterations} \item{ncomp}{number of independent components} \item{fun}{function used for the nonlinear computation part} } \description{ This is an R-implementation of the Matlab-Function of Petteri.Pajunen@hut.fi. For a data matrix X independent components are extracted by applying a nonlinear PCA algorithm. The parameter \code{fun} determines which nonlinearity is used. \code{fun} can either be a function or one of the following strings "negative kurtosis", "positive kurtosis", "4th moment" which can be abbreviated to uniqueness. If \code{fun} equals "negative (positive) kurtosis" the function tanh (x-tanh(x)) is used which provides ICA for sources with negative (positive) kurtosis. For \code{fun == "4th moments"} the signed square function is used. } \value{ An object of class \code{"ica"} which is a list with components \item{weights}{ICA weight matrix} \item{projection}{Projected data} \item{epochs}{Number of iterations} \item{fun}{Name of the used function} \item{lrate}{Learning rate used} \item{initweights}{Initial weight matrix} } \references{ Oja et al., ``Learning in Nonlinear Constrained Hebbian Networks'', in Proc. ICANN-91, pp. 385--390. Karhunen and Joutsensalo, ``Generalizations of Principal Component Analysis, Optimization Problems, and Neural Networks'', Neural Networks, v. 8, no. 4, pp. 549--562, 1995. } \note{Currently, there is no reconstruction from the ICA subspace to the original input space.} \author{Andreas Weingessel} \keyword{multivariate} e1071/man/rbridge.Rd0000755000175100001440000000113214173734134013514 0ustar hornikusers\name{rbridge} \alias{rbridge} \title{Simulation of Brownian Bridge} \usage{ rbridge(end = 1, frequency = 1000) } \arguments{ \item{end}{the time of the last observation.} \item{frequency}{the number of observations per unit of time.} } \description{ \code{rwiener} returns a time series containing a simulated realization of the Brownian bridge on the interval [0,\code{end}]. If W(t) is a Wiener process, then the Brownian bridge is defined as W(t) - t W(1). } \seealso{ rwiener } \examples{ # simulate a Brownian bridge on [0,1] and plot it x <- rbridge() plot(x,type="l") } \keyword{distribution} e1071/man/gknn.Rd0000644000175100001440000000752514077325003013036 0ustar hornikusers\name{gknn} \alias{gknn} \alias{gknn.default} \alias{gknn.formula} \alias{print.gknn} \alias{predict.gknn} \title{Generalized k-Nearest Neighbors Classification or Regression} \description{ \code{gknn} is an implementation of the k-nearest neighbours algorithm making use of general distance measures. A formula interface is provided. } \usage{ \method{gknn}{formula}(formula, data = NULL, ..., subset, na.action = na.pass, scale = TRUE) \method{gknn}{default}(x, y, k = 1, method = NULL, scale = TRUE, use_all = TRUE, FUN = mean, ...) \method{predict}{gknn}(object, newdata, type = c("class", "votes", "prob"), ..., na.action = na.pass) } \arguments{ \item{formula}{a symbolic description of the model to be fit.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which \sQuote{gknn} is called from.} \item{x}{a data matrix.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{k}{number of neighbours considered.} \item{scale}{a logical vector indicating the variables to be scaled. If \code{scale} is of length 1, the value is recycled as many times as needed. By default, numeric \emph{matrices} are scaled to zero mean and unit variance. The center and scale values are returned and used for later predictions. Note that the default metric for data frames is the Gower metric which \emph{standardizes} the values to the unit interval.} \item{method}{Argument passed to \code{dist()} from the \code{proxy} package to select the distance metric used: a function, or a mnemonic string referencing the distance measure. Defaults to \code{"Euclidean"} for metric matrices, to \code{"Jaccard"} for logical matrices and to \code{"Gower"} for data frames. } \item{use_all}{controls handling of ties. If true, all distances equal to the kth largest are included. If false, a random selection of distances equal to the kth is chosen to use exactly k neighbours.} \item{FUN}{function used to aggregate the k nearest target values in case of regression.} \item{object}{object of class \code{gknn}.} \item{newdata}{matrix or data frame with new instances.} \item{type}{character specifying the return type in case of class predictions: for \code{"class"}, the class labels; for \code{"prob"}, the class distribution for all k neighbours considered; for \code{"votes"}, the raw counts.} \item{\dots}{additional parameters passed to \code{dist()}} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.pass}. (NOTE: If given, this argument must be named.)} } \value{ For \code{gknn()}, an object of class \code{"gknn"} containing the data and the specified parameters. For \code{predict.gknn()}, a vector of predictions, or a matrix with votes for all classes. In case of an overall class tie, the predicted class is chosen by random. } \author{ David Meyer (\email{David.Meyer@R-project.org}) } \seealso{ \code{\link[proxy]{dist}} (in package \pkg{proxy}) } \examples{ data(iris) model <- gknn(Species ~ ., data = iris) predict(model, iris[c(1, 51, 101),]) test = c(45:50, 95:100, 145:150) model <- gknn(Species ~ ., data = iris[-test,], k = 3, method = "Manhattan") predict(model, iris[test,], type = "votes") model <- gknn(Species ~ ., data = iris[-test], k = 3, method = "Manhattan") predict(model, iris[test,], type = "prob") } \keyword{nonlinear} \keyword{classif} e1071/man/fclustIndex.Rd0000755000175100001440000001753314173734134014402 0ustar hornikusers\name{fclustIndex} \title{Fuzzy Cluster Indexes (Validity/Performance Measures)} \usage{fclustIndex(y, x, index = "all")} \alias{fclustIndex} \arguments{ \item{y}{An object of a fuzzy clustering result of class \code{"fclust"}} \item{x}{Data matrix} \item{index}{The validity measures used: \code{"gath.geva"}, \code{"xie.beni"}, \code{"fukuyama.sugeno"}, \code{"partition.coefficient"}, \code{"partition.entropy"}, \code{"proportion.exponent"}, \code{"separation.index"} and \code{"all"} for all the indexes.}} \description{ Calculates the values of several fuzzy validity measures. The values of the indexes can be independently used in order to evaluate and compare clustering partitions or even to determine the number of clusters existing in a data set.} \details{ The validity measures and a short description of them follows, where \eqn{N} is the number of data points, \eqn{u_{ij}} the values of the membership matrix, \eqn{v_j} the centers of the clusters and \eqn{k} te number of clusters. \describe{ \item{\bold{gath.geva}:}{ Gath and Geva introduced 2 main criteria for comparing and finding optimal partitions based on the heuristics that a better clustering assumes clear separation between the clusters, minimal volume of the clusters and maximal number of data points concentrated in the vicinity of the cluster centroids. These indexes are only for the cmeans clustering algorithm valid. For the first, the ``fuzzy hypervolume'' we have: \eqn{F_{HV}=\sum_{j=1}^{c}{[\det(F_j)]}^{1/2}}, where \eqn{F_j=\frac{\sum_{i=1}^N u_{ij}(x_i-v_j)(x_i-v_j)^T}{\sum_{i=1}^{N}u_{ij}}}, for the case when the defuzzification parameter is 2. For the second, the ``average partition density'': \eqn{D_{PA}=\frac{1}{k}\sum_{j=1}^k\frac{S_j}{{[\det(F_j)]}^{1/2}}}, where \eqn{S_j=\sum_{i=1}^N u_{ij}}. Moreover, the ``partition density'' which expresses the general partition density according to the physical definition of density is calculated by: \eqn{P_D=\frac{S}{F_{HV}}}, where \eqn{S=\sum_{j=1}^k\sum_{i=1}^N u_{ij}}. } \item{\bold{xie.beni}:}{ This index is a function of the data set and the centroids of the clusters. Xie and Beni explained this index by writing it as a ratio of the total variation of the partition and the centroids $(U,V)$ and the separation of the centroids vectors. The minimum values of this index under comparison support the best partitions. \eqn{u_{XB}(U,V;X)=\frac{\sum_{j=1}^k\sum_{i=1}^Nu_{ij}^2{||x_i-v_j||}^2}{N(\min_{j\neq l}\{{||v_j-v_l||}^2\})}} } \item{\bold{fukuyama.sugeno}:}{ This index consists of the difference of two terms, the first combining the fuzziness in the membership matrix with the geometrical compactness of the representation of the data set via the prototypes, and the second the fuzziness in its row of the partition matrix with the distance from the $i$th prototype to the grand mean of the data. The minimum values of this index also propose a good partition. \eqn{u_{FS}(U,V;X)=\sum_{i=1}^{N}\sum_{j=1}^k (u_{ij}^2)^q(||x_i-v_j||^2-||v_j-\bar v||^2)} } \item{\bold{partition.coefficient}:}{ An index which measures the fuzziness of the partition but without considering the data set itself. It is a heuristic measure since it has no connection to any property of the data. The maximum values of it imply a good partition in the meaning of a least fuzzy clustering. \eqn{F(U;k)=\frac{tr (UU^T)}{N}=\frac{}{N}=\frac{||U||^2}{N}} \itemize{ \item \eqn{F(U;k)} shows the fuzziness or the overlap of the partition and depends on \eqn{kN} elements. \item \eqn{1/k\leq F(U;k)\leq 1}, where if \eqn{F(U;k)=1} then \eqn{U} is a hard partition and if \eqn{F(U;k)=1/k} then \eqn{U=[1/k]} is the centroid of the fuzzy partion space \eqn{P_{fk}}. The converse is also valid. } } \item{\bold{partition.entropy}:}{ It is a measure that provides information about the membership matrix without also considering the data itself. The minimum values imply a good partition in the meaning of a more crisp partition. \eqn{H(U;k)=\sum_{i=1}^{N} h(u_i)/N}, where \eqn{h(u)=-\sum_{j=1}^{k} u_j\,\log _a (u_j)} the Shannon's entropy. \itemize{ \item \eqn{H(U;k)} shows the uncertainty of a fuzzy partition and depends also on \eqn{kN} elements. Specifically, \eqn{h(u_i)} is interpreted as the amount of fuzzy information about the membership of \eqn{x_i} in \eqn{k} classes that is retained by column \eqn{u_j}. Thus, at \eqn{U=[1/k]} the most information is withheld since the membership is the fuzziest possible. \item \eqn{0\leq H(U;k)\leq \log_a(k)}, where for \eqn{H(U;k)=0} \eqn{U} is a hard partition and for \eqn{H(U;k)=\log_a(k)} \eqn{U=[1/k]}. } } \item{\bold{proportion.exponent}:}{ It is a measure \eqn{P(U;k)} of fuzziness adept to detect structural variations in the partition matrix as it becomes more fuzzier. A crisp cluster in the partition matrix can drive it to infinity when the partition coefficient and the partition entropy are more sensitive to small changes when approaching a hard partition. Its evaluation does not also involve the data or the algorithm used to partition them and its maximum implies the optimal partition but without knowing what maximum is a statistically significant maximum. \itemize{ \item \eqn{0\leq P(U;k)<\infty}, since the \eqn{[0,1]} values explode to \eqn{[0,\infty)} due to the natural logarithm. Specifically, \eqn{P=0} when and only when \eqn{U=[1/k]}, while \eqn{P\rightarrow\infty} when any column of \eqn{U} is crisp. \item \eqn{P(U;k)} can easily explode and it is good for partitions with large column maximums and at detecting structural variations. } } \item{\bold{separation.index (known as CS Index)}:}{ This index identifies unique cluster structure with well-defined properties that depend on the data and a measure of distance. It answers the question if the clusters are compact and separated, but it rather seems computationally infeasible for big data sets since a distance matrix between all the data membership values has to be calculated. It also presupposes that a hard partition is derived from the fuzzy one.\cr \eqn{D_1(U;k;X,d)=\min_{i+1\,\leq\,l\,\leq\,k-1}\left\{\min_{1\,\leq\,j\,\leq\,k}\left\{\frac{dis(u_j,u_l)}{\max_{1\leq m\leq k}\{dia(u_m)\}}\right\}\right\}}, where \eqn{dia} is the diameter of the subset, \eqn{dis} the distance of two subsets, and \eqn{d} a metric. \eqn{U} is a CS partition of \eqn{X} \eqn{\Leftrightarrow D_1>1}. When this holds then \eqn{U} is unique. } } } \value{ Returns a vector with the validity measures values. } \references{ James C. Bezdek, \emph{Pattern Recognition with Fuzzy Objective Function Algorithms}, Plenum Press, 1981, NY.\cr L. X. Xie and G. Beni, \emph{Validity measure for fuzzy clustering}, IEEE Transactions on Pattern Analysis and Machine Intelligence, vol. \bold{3}, n. 8, p. 841-847, 1991.\cr I. Gath and A. B. Geva, \emph{Unsupervised Optimal Fuzzy Clustering}, IEEE Transactions on Pattern Analysis and Machine Intelligence, vol. \bold{11}, n. 7, p. 773-781, 1989.\cr Y. Fukuyama and M. Sugeno, \emph{A new method of choosing the number of clusters for the fuzzy $c$-means method}, Proc. 5th Fuzzy Syst. Symp., p. 247-250, 1989 (in japanese).} \author{Evgenia Dimitriadou} \seealso{\code{\link{cmeans}}} \examples{ # a 2-dimensional example x<-rbind(matrix(rnorm(100,sd=0.3),ncol=2), matrix(rnorm(100,mean=1,sd=0.3),ncol=2)) cl<-cmeans(x,2,20,verbose=TRUE,method="cmeans") resultindexes <- fclustIndex(cl,x, index="all") resultindexes } \keyword{cluster} e1071/man/skewness.Rd0000755000175100001440000000325714173734134013752 0ustar hornikusers\name{skewness} \alias{skewness} \title{Skewness} \description{ Computes the skewness. } \usage{ skewness(x, na.rm = FALSE, type = 3) } \arguments{ \item{x}{a numeric vector containing the values whose skewness is to be computed.} \item{na.rm}{a logical value indicating whether \code{NA} values should be stripped before the computation proceeds.} \item{type}{an integer between 1 and 3 selecting one of the algorithms for computing skewness detailed below.} } \details{ If \code{x} contains missings and these are not removed, the skewness is \code{NA}. Otherwise, write \eqn{x_i} for the non-missing elements of \code{x}, \eqn{n} for their number, \eqn{\mu}{mu} for their mean, \eqn{s} for their standard deviation, and \eqn{m_r = \sum_i (x_i - \mu)^r / n}{m_r = \sum_i (x_i - mu)^r / n} for the sample moments of order \eqn{r}. Joanes and Gill (1998) discuss three methods for estimating skewness: \describe{ \item{Type 1:}{ \eqn{g_1 = m_3 / m_2^{3/2}}{g_1 = m_3 / m_2^(3/2)}. This is the typical definition used in many older textbooks.} \item{Type 2:}{ \eqn{G_1 = g_1 \sqrt{n(n-1)} / (n-2)}{ G_1 = g_1 * sqrt(n(n-1)) / (n-2)}. Used in SAS and SPSS. } \item{Type 3:}{ \eqn{b_1 = m_3 / s^3 = g_1 ((n-1)/n)^{3/2}}{ b_1 = m_3 / s^3 = g_1 ((n-1)/n)^(3/2)}. Used in MINITAB and BMDP.} } All three skewness measures are unbiased under normality. } \value{ The estimated skewness of \code{x}. } \references{ D. N. Joanes and C. A. Gill (1998), Comparing measures of sample skewness and kurtosis. \emph{The Statistician}, \bold{47}, 183--189. } \examples{ x <- rnorm(100) skewness(x) } \keyword{univar} e1071/man/tune.Rd0000644000175100001440000001115614325173527013057 0ustar hornikusers\name{tune} \alias{tune} \alias{best.tune} \alias{print.tune} \alias{summary.tune} \alias{print.summary.tune} \title{Parameter Tuning of Functions Using Grid Search} \description{ This generic function tunes hyperparameters of statistical methods using a grid search over supplied parameter ranges. } \usage{ tune(METHOD, train.x, train.y = NULL, data = list(), validation.x = NULL, validation.y = NULL, ranges = NULL, predict.func = predict, tunecontrol = tune.control(), ...) best.tune(...) } \arguments{ \item{METHOD}{either the function to be tuned, or a character string naming such a function.} \item{train.x}{either a formula or a matrix of predictors.} \item{train.y}{the response variable if \code{train.x} is a predictor matrix. Ignored if \code{train.x} is a formula.} \item{data}{data, if a formula interface is used. Ignored, if predictor matrix and response are supplied directly.} \item{validation.x}{an optional validation set. Depending on whether a formula interface is used or not, the response can be included in \code{validation.x} or separately specified using \code{validation.y}. Only used for bootstrap and fixed validation set (see \code{\link{tune.control}})} \item{validation.y}{if no formula interface is used, the response of the (optional) validation set. Only used for bootstrap and fixed validation set (see \code{\link{tune.control}})} \item{ranges}{a named list of parameter vectors spanning the sampling space. The vectors will usually be created by \code{seq}.} \item{predict.func}{optional predict function, if the standard \code{predict} behavior is inadequate.} \item{tunecontrol}{object of class \code{"tune.control"}, as created by the function \code{tune.control()}. If omitted, \code{tune.control()} gives the defaults.} \item{\dots}{Further parameters passed to the training functions.} } \value{ For \code{tune}, an object of class \code{tune}, including the components: \item{best.parameters}{a 1 x k data frame, k number of parameters.} \item{best.performance}{best achieved performance.} \item{performances}{if requested, a data frame of all parameter combinations along with the corresponding performance results.} \item{train.ind}{list of index vectors used for splits into training and validation sets.} \item{best.model}{if requested, the model trained on the complete training data using the best parameter combination.} \code{best.tune()} returns the best model detected by \code{tune}. } \details{ As performance measure, the classification error is used for classification, and the mean squared error for regression. It is possible to specify only one parameter combination (i.e., vectors of length 1) to obtain an error estimation of the specified type (bootstrap, cross-classification, etc.) on the given data set. For convenience, there are several \code{tune.foo()} wrappers defined, e.g., for \code{nnet()}, \code{randomForest()}, \code{rpart()}, \code{svm()}, and \code{knn()}. Cross-validation randomizes the data set before building the splits which---once created---remain constant during the training process. The splits can be recovered through the \code{train.ind} component of the returned object. } \author{ David Meyer\cr \email{David.Meyer@R-project.org} } \seealso{\code{\link{tune.control}}, \code{\link{plot.tune}}, \code{\link{tune.svm}}, \link{tune.wrapper}} \examples{ data(iris) ## tune `svm' for classification with RBF-kernel (default in svm), ## using one split for training/validation set obj <- tune(svm, Species~., data = iris, ranges = list(gamma = 2^(-1:1), cost = 2^(2:4)), tunecontrol = tune.control(sampling = "fix") ) ## alternatively: ## obj <- tune.svm(Species~., data = iris, gamma = 2^(-1:1), cost = 2^(2:4)) summary(obj) plot(obj) ## tune `knn' using a convenience function; this time with the ## conventional interface and bootstrap sampling: x <- iris[,-5] y <- iris[,5] obj2 <- tune.knn(x, y, k = 1:5, tunecontrol = tune.control(sampling = "boot")) summary(obj2) plot(obj2) ## tune `gknn' using the formula interface. ## (Use Euclidean distances instead of Gower metric) obj3 <- tune.gknn(Species ~ ., data = iris, k = 1:5, method = "Euclidean") summary(obj3) plot(obj3) ## tune `rpart' for regression, using 10-fold cross validation (default) data(mtcars) obj4 <- tune.rpart(mpg~., data = mtcars, minsplit = c(5,10,15)) summary(obj4) plot(obj4) ## simple error estimation for lm using 10-fold cross validation tune(lm, mpg~., data = mtcars) } \keyword{models} e1071/man/write.svm.Rd0000644000175100001440000000410714246406243014034 0ustar hornikusers\name{write.svm} \alias{write.svm} \title{Write SVM Object to File} \description{ This function exports an SVM object (trained by \code{svm}) to two specified files. One is in the format that the function 'svm_load_model()' of libsvm can read. The other is for scaling data, containing a data with centers and scales for all variables. } \usage{ write.svm(object, svm.file = "Rdata.svm", scale.file = "Rdata.scale", yscale.file = "Rdata.yscale") } \arguments{ \item{object}{Object of class \code{"svm"}, created by \code{svm}.} \item{svm.file}{filename to export the svm object to.} \item{scale.file}{filename to export the scaling data of the explanatory variables to.} \item{yscale.file}{filename to export the scaling data of the dependent variable to, if any.} } \details{ This function is useful when SVM models trained in R shall be used in other environments. The SVM model is saved in the standard format of libsvm. The scaling data are written to separate files because scaling data are not included in the standard format of libsvm. The format of the scaling data file is a n times 2 matrix: the n-th row corresponds to the n-th dimension of the data, the columns being formed of the corresponding mean and scale. If scaling information for the dependent variable exists (in case of regression models), it is stored in yet another file (1 times 2 matrix). } \author{ Tomomi TAKASHINA (based on 'predict.svm' by David Meyer) \email{t.takashina@computer.org} } \seealso{ \code{\link{svm}} } \examples{ data(iris) attach(iris) ## classification mode # default with factor response: model <- svm (Species~., data=iris) # export SVM object to (temporary) files svm_file <- tempfile() scale_file <- tempfile() write.svm(model, svm.file = svm_file, scale.file = scale_file) # read scale file # the n-th row is corresponding to n-th dimension. The 1st column contains the # center value, the 2nd column is the scale value. read.table(scale_file) # clean up unlink(svm_file) unlink(scale_file) } \keyword{neural} \keyword{nonlinear} \keyword{classif} e1071/man/tune.wrapper.Rd0000644000175100001440000000512114324555763014536 0ustar hornikusers\name{tune.wrapper} \alias{tune.wrapper} \alias{tune.rpart} \alias{best.rpart} \alias{tune.svm} \alias{best.svm} \alias{tune.nnet} \alias{best.nnet} \alias{tune.randomForest} \alias{best.randomForest} \alias{tune.gknn} \alias{best.gknn} \alias{tune.knn} \title{Convenience Tuning Wrapper Functions} \description{ Convenience tuning wrapper functions, using \code{tune}. } \usage{ tune.svm(x, y = NULL, data = NULL, degree = NULL, gamma = NULL, coef0 = NULL, cost = NULL, nu = NULL, class.weights = NULL, epsilon = NULL, ...) best.svm(x, tunecontrol = tune.control(), ...) tune.nnet(x, y = NULL, data = NULL, size = NULL, decay = NULL, trace = FALSE, tunecontrol = tune.control(nrepeat = 5), ...) best.nnet(x, tunecontrol = tune.control(nrepeat = 5), ...) tune.rpart(formula, data, na.action = na.omit, minsplit = NULL, minbucket = NULL, cp = NULL, maxcompete = NULL, maxsurrogate = NULL, usesurrogate = NULL, xval = NULL, surrogatestyle = NULL, maxdepth = NULL, predict.func = NULL, ...) best.rpart(formula, tunecontrol = tune.control(), ...) tune.randomForest(x, y = NULL, data = NULL, nodesize = NULL, mtry = NULL, ntree = NULL, ...) best.randomForest(x, tunecontrol = tune.control(), ...) tune.gknn(x, y = NULL, data = NULL, k = NULL, ...) best.gknn(x, tunecontrol = tune.control(), ...) tune.knn(x, y, k = NULL, l = NULL, ...) } \arguments{ \item{formula, x, y, data}{formula and data arguments of function to be tuned.} \item{predict.func}{predicting function.} \item{na.action}{function handling missingness.} \item{minsplit, minbucket, cp, maxcompete, maxsurrogate, usesurrogate, xval, surrogatestyle, maxdepth}{\code{rpart} parameters.} \item{degree, gamma, coef0, cost, nu, class.weights, epsilon}{\code{svm} parameters.} \item{k, l}{\code{(g)knn} parameters.} \item{mtry, nodesize, ntree}{\code{randomForest} parameters.} \item{size, decay, trace}{parameters passed to \code{nnet}.} \item{tunecontrol}{object of class \code{"tune.control"} containing tuning parameters.} \item{\dots}{Further parameters passed to \code{tune}.} } \value{ \code{tune.foo()} returns a tuning object including the best parameter set obtained by optimizing over the specified parameter vectors. \code{best.foo()} directly returns the best model, i.e. the fit of a new model using the optimal parameters found by \code{tune.foo}. } \details{For examples, see the help page of \code{tune()}.} \author{ David Meyer\cr \email{David.Meyer@R-project.org} } \seealso{\code{\link{tune}}} \keyword{models} e1071/man/element.Rd0000755000175100001440000000064514173734134013537 0ustar hornikusers\name{element} \title{Extract Elements of an Array} \usage{ element(x, i) } \alias{element} \arguments{ \item{x}{Array of arbitrary dimensionality.} \item{i}{Vector of the same length as \code{x} has dimension.} } \description{ Returns the element of \code{x} specified by \code{i}. } \author{Friedrich Leisch} \seealso{\link{Extract}} \examples{ x <- array(1:20, dim=c(2,5,2)) element(x, c(1,4,2)) } \keyword{array} e1071/man/sigmoid.Rd0000755000175100001440000000073714173734134013543 0ustar hornikusers\name{sigmoid} \alias{sigmoid} \alias{dsigmoid} \alias{d2sigmoid} \title{The Logistic Function and Derivatives} \description{ Sigmoid \eqn{1/(1 + \exp(-x))}{1/(1 + exp(-x))}, first and second derivative.} \usage{ sigmoid(x) dsigmoid(x) d2sigmoid(x) } \arguments{ \item{x}{a numeric vector} } \author{Friedrich Leisch} \examples{ plot(sigmoid, -5, 5, ylim = c(-.2, 1)) plot(dsigmoid, -5, 5, add = TRUE, col = 2) plot(d2sigmoid, -5, 5, add = TRUE, col = 3) } \keyword{math} e1071/man/impute.Rd0000755000175100001440000000101014173734134013374 0ustar hornikusers\name{impute} \alias{impute} \title{Replace Missing Values} \usage{ impute(x, what = c("median", "mean")) } \arguments{ \item{x}{A matrix or dataframe.} \item{what}{What to impute.} } \description{ Replaces missing values of a matrix or dataframe with the medians (\code{what="median"}) or means (\code{what="mean"}) of the respective columns.} \value{ A matrix or dataframe. } \author{Friedrich Leisch} \examples{ x<- matrix(1:10, ncol=2) x[c(1,3,7)] <- NA print(x) print(impute(x)) } \keyword{manip} e1071/man/hamming.distance.Rd0000755000175100001440000000122714173734134015314 0ustar hornikusers\name{hamming.distance} \alias{hamming.distance} \title{Hamming Distances of Vectors} \usage{ hamming.distance(x, y) } \arguments{ \item{x}{a vector or matrix.} \item{y}{an optional vector.} } \description{ If both \code{x} and \code{y} are vectors, \code{hamming.distance} returns the Hamming distance (number of different elements) between this two vectors. If \code{x} is a matrix, the Hamming distances between the rows of \code{x} are computed and \code{y} is ignored. } \examples{ x <- c(1, 0, 0) y <- c(1, 0, 1) hamming.distance(x, y) z <- rbind(x,y) rownames(z) <- c("Fred", "Tom") hamming.distance(z) hamming.distance(1:3, 3:1) } \keyword{multivariate} e1071/man/plot.stft.Rd0000755000175100001440000000160414173734134014037 0ustar hornikusers\name{plot.stft} \alias{plot.stft} \title{Plot Short Time Fourier Transforms} \description{ An object of class \code{"stft"} is plotted as a gray scale image. The x-axis corresponds to time, the y-axis to frequency. If the default colormap is used, dark regions in the plot correspond to high values at the particular time/frequency location. } \usage{ \method{plot}{stft}(x, col = gray(63:0/63), \dots) } \arguments{ \item{x}{An object of class \code{"stft"} as obtained by the function \code{stft}.} \item{col}{An optional colormap. By default 64 gray values are used, where white corresponds to the minimum value and black to the maximum.} \item{\dots}{further arguments to be passed to or from methods.} } \value{No return value. This function is only for plotting.} \author{Andreas Weingessel} \seealso{stft} \examples{x<-rnorm(500) y<-stft(x) plot(y) } \keyword{ts} e1071/man/read.matrix.csr.Rd0000755000175100001440000000302214173734134015102 0ustar hornikusers\name{read.matrix.csr} \alias{read.matrix.csr} \alias{write.matrix.csr} %- Also NEED an `\alias' for EACH other topic documented here. \title{Read/Write Sparse Data} \description{ reads and writes a file in sparse data format. } \usage{ read.matrix.csr(file, fac = TRUE, ncol = NULL) write.matrix.csr(x, file = "out.dat", y = NULL, fac = TRUE) } %- maybe also `usage' for other objects documented here. \arguments{ \item{x}{An object of class \code{matrix.csr}} \item{y}{A vector (either numeric or a factor)} \item{file}{The filename.} \item{fac}{If \code{TRUE}, the y-values (if any) are interpreted as factor levels.} \item{ncol}{Number of columns, detected automatically. Can be used to add empty columns (possibly not stored in the sparse format).} } \value{ If the data file includes no y variable, \code{read.matrix.csr} returns an object of class \code{matrix.csr}, else a list with components: \item{x}{object of class \code{matrix.csr}} \item{y}{vector of numeric values or factor levels, depending on \code{fac}.} } \author{ David Meyer\cr \email{David.Meyer@R-project.org} } \seealso{\code{\link[SparseM]{matrix.csr}}} \examples{ \dontrun{ library(methods) if (require(SparseM)) { data(iris) x <- as.matrix(iris[,1:4]) y <- iris[,5] xs <- as.matrix.csr(x) write.matrix.csr(xs, y = y, file = "iris.dat") xs2 <- read.matrix.csr("iris.dat")$x if (!all(as.matrix(xs) == as.matrix(xs2))) stop("Error: objects are not equal!") } } } \keyword{IO}% at least one, from doc/KEYWORDS e1071/man/rwiener.Rd0000755000175100001440000000075714173734134013565 0ustar hornikusers\name{rwiener} \alias{rwiener} \title{Simulation of Wiener Process} \usage{ rwiener(end = 1, frequency = 1000) } \arguments{ \item{end}{the time of the last observation.} \item{frequency}{the number of observations per unit of time.} } \description{ \code{rwiener} returns a time series containing a simulated realization of the Wiener process on the interval [0,\code{end}] } \examples{ # simulate a Wiener process on [0,1] and plot it x <- rwiener() plot(x,type="l") } \keyword{distribution} e1071/man/tune.control.Rd0000755000175100001440000000523014173734134014533 0ustar hornikusers\name{tune.control} \alias{tune.control} \title{Control Parameters for the Tune Function} \description{ Creates an object of class \code{tune.control} to be used with the \code{tune} function, containing various control parameters. } \usage{ tune.control(random = FALSE, nrepeat = 1, repeat.aggregate = mean, sampling = c("cross", "fix", "bootstrap"), sampling.aggregate = mean, sampling.dispersion = sd, cross = 10, fix = 2/3, nboot = 10, boot.size = 9/10, best.model = TRUE, performances = TRUE, error.fun = NULL) } \arguments{ \item{random}{if an integer value is specified, \code{random} parameter vectors are drawn from the parameter space.} \item{nrepeat}{specifies how often training shall be repeated.} \item{repeat.aggregate}{function for aggregating the repeated training results.} \item{sampling}{sampling scheme. If \code{sampling = "cross"}, a \code{cross}-times cross validation is performed. If \code{sampling = "boot"}, \code{nboot} training sets of size \code{boot.size} (part) are sampled (with replacement) from the supplied data. If \code{sampling = "fix"}, a single split into training/validation set is used, the training set containing a \code{fix} part of the supplied data. Note that a separate validation set can be supplied via \code{validation.x} and \code{validation.y}. It is only used for \code{sampling = "boot"} and \code{sampling = "fix"}; in the latter case, \code{fix} is set to 1.} \item{sampling.aggregate,sampling.dispersion}{functions for aggregating the training results on the generated training samples (default: mean and standard deviation).} \item{cross}{number of partitions for cross-validation.} \item{fix}{part of the data used for training in fixed sampling.} \item{nboot}{number of bootstrap replications.} \item{boot.size}{size of the bootstrap samples.} \item{best.model}{if \code{TRUE}, the best model is trained and returned (the best parameter set is used for training on the complete training set).} \item{performances}{if \code{TRUE}, the performance results for all parameter combinations are returned.} \item{error.fun}{function returning the error measure to be minimized. It takes two arguments: a vector of true values and a vector of predicted values. If \code{NULL}, the misclassification error is used for categorical predictions and the mean squared error for numeric predictions.} } \value{ An object of class \code{"tune.control"} containing all the above parameters (either the defaults or the user specified values). } \author{ David Meyer\cr \email{David.Meyer@R-project.org} } \seealso{\code{\link{tune}}} \keyword{models} e1071/man/kurtosis.Rd0000755000175100001440000000317014173734133013764 0ustar hornikusers\name{kurtosis} \alias{kurtosis} \title{Kurtosis} \description{ Computes the kurtosis. } \usage{ kurtosis(x, na.rm = FALSE, type = 3) } \arguments{ \item{x}{a numeric vector containing the values whose kurtosis is to be computed.} \item{na.rm}{a logical value indicating whether \code{NA} values should be stripped before the computation proceeds.} \item{type}{an integer between 1 and 3 selecting one of the algorithms for computing kurtosis detailed below.} } \details{ If \code{x} contains missings and these are not removed, the kurtosis is \code{NA}. Otherwise, write \eqn{x_i} for the non-missing elements of \code{x}, \eqn{n} for their number, \eqn{\mu}{mu} for their mean, \eqn{s} for their standard deviation, and \eqn{m_r = \sum_i (x_i - \mu)^r / n}{m_r = \sum_i (x_i - mu)^r / n} for the sample moments of order \eqn{r}. Joanes and Gill (1998) discuss three methods for estimating kurtosis: \describe{ \item{Type 1:}{ \eqn{g_2 = m_4 / m_2^2 - 3}. This is the typical definition used in many older textbooks.} \item{Type 2:}{ \eqn{G_2 = ((n+1) g_2 + 6) * (n-1) / ((n-2)(n-3))}. Used in SAS and SPSS. } \item{Type 3:}{ \eqn{b_2 = m_4 / s^4 - 3 = (g_2 + 3) (1 - 1/n)^2 - 3}. Used in MINITAB and BMDP.} } Only \eqn{G_2} (corresponding to \code{type = 2}) is unbiased under normality. } \value{ The estimated kurtosis of \code{x}. } \references{ D. N. Joanes and C. A. Gill (1998), Comparing measures of sample skewness and kurtosis. \emph{The Statistician}, \bold{47}, 183--189. } \examples{ x <- rnorm(100) kurtosis(x) } \keyword{univar} e1071/man/matchClasses.Rd0000755000175100001440000000667014173734134014524 0ustar hornikusers\name{matchClasses} \alias{matchClasses} \alias{compareMatchedClasses} \title{Find Similar Classes in Two-way Contingency Tables} \usage{ matchClasses(tab, method="rowmax", iter=1, maxexact=9, verbose=TRUE) compareMatchedClasses(x, y, method="rowmax", iter=1, maxexact=9, verbose=FALSE) } \arguments{ \item{tab}{Two-way contingency table of class memberships} \item{method}{One of \code{"rowmax"}, \code{"greedy"} or \code{"exact"}.} \item{iter}{Number of iterations used in greedy search.} \item{verbose}{If \code{TRUE}, display some status messages during computation.} \item{maxexact}{Maximum number of variables for which all possible permutations are computed.} \item{x, y}{Vectors or matrices with class memberships.} } \description{ Try to find a mapping between the two groupings, such that as many cases as possible are in one of the matched pairs. } \details{ If \code{method="rowmax"}, then each class defining a row in the contingency table is mapped to the column of the corresponding row maximum. Hence, some columns may be mapped to more than one row (while each row is mapped to a single column). If \code{method="greedy"} or \code{method="exact"}, then the contingency table must be a square matrix and a unique mapping is computed. This corresponds to a permutation of columns and rows, such that sum of the main diagonal, i.e., the trace of the matrix, gets as large as possible. For both methods, first all pairs where row and columns maxima correspond and are bigger than the sum of all other elements in the corresponding columns and rows together are located and fixed (this is a necessary condition for maximal trace). If \code{method="exact"}, then for the remaining rows and columns, all possible permutations are computed and the optimum is returned. This can get computationally infeasible very fast. If more than \code{maxexact} rows and columns remain after applying the necessary condition, then \code{method} is reset to \code{"greedy"}. If \code{method="greedy"}, then a greedy heuristic is tried \code{iter} times. Repeatedly a row is picked at random and matched to the free column with the maximum value. \code{compareMatchedClasses()} computes the contingency table for each combination of columns from \code{x} and \code{y} and applies \code{matchClasses} to that table. The columns of the table are permuted accordingly and then the table is passed to \code{\link{classAgreement}}. The resulting agreement coefficients (diag, kappa, \ldots) are returned. The return value of \code{compareMatchedClasses()} is a list containing a matrix for each coefficient; with element (k,l) corresponding to the k-th column of \code{x} and l-th column of \code{y}. If \code{y} is missing, then the columns of \code{x} are compared with each other. } \author{Friedrich Leisch} \seealso{\code{\link{classAgreement}}} \examples{ ## a stupid example with no class correlations: g1 <- sample(1:5, size=1000, replace=TRUE) g2 <- sample(1:5, size=1000, replace=TRUE) tab <- table(g1, g2) matchClasses(tab, "exact") ## let pairs (g1=1,g2=4) and (g1=3,g2=1) agree better k <- sample(1:1000, size=200) g1[k] <- 1 g2[k] <- 4 k <- sample(1:1000, size=200) g1[k] <- 3 g2[k] <- 1 tab <- table(g1, g2) matchClasses(tab, "exact") ## get agreement coefficients: compareMatchedClasses(g1, g2, method="exact") } \keyword{category} e1071/man/hanning.window.Rd0000755000175100001440000000142314173734134015031 0ustar hornikusers\name{hanning.window} \title{Computes the Coefficients of a Hanning Window.} \usage{hanning.window(n)} \alias{hanning.window} \arguments{ \item{n}{The length of the window.} } \description{The filter coefficients \eqn{w_i}{w(i)} of a Hanning window of length \code{n} are computed according to the formula \deqn{w_i = 0.5 - 0.5 \cos\frac{2\pi i}{n-1}}{ w(i) = 0.5 - 0.5*cos(2*pi*i/(n-1))} } \value{A vector containing the filter coefficients.} \references{For a definition of the Hanning window, see for example\cr Alan V. Oppenheim and Roland W. Schafer: "Discrete-Time Signal Processing", Prentice-Hall, 1989.} \author{Andreas Weingessel} \seealso{stft, hamming.window} \examples{hanning.window(10) x<-rnorm(500) y<-stft(x, wtype="hanning.window") plot(y) } \keyword{ts} e1071/man/permutations.Rd0000755000175100001440000000054414173734134014636 0ustar hornikusers\name{permutations} \alias{permutations} \title{All Permutations of Integers 1:n} \description{ Returns a matrix containing all permutations of the integers \code{1:n} (one permutation per row). } \usage{ permutations(n) } \arguments{ \item{n}{Number of element to permute.} } \author{Friedrich Leisch} \examples{ permutations(3) } \keyword{datagen} e1071/man/cshell.Rd0000644000175100001440000000660514246371156013361 0ustar hornikusers\name{cshell} \alias{cshell} \title{Fuzzy C-Shell Clustering} \usage{ cshell(x, centers, iter.max=100, verbose=FALSE, dist="euclidean", method="cshell", m=2, radius = NULL) } \arguments{ \item{x}{The data matrix, were columns correspond to the variables and rows to observations.} \item{centers}{Number of clusters or initial values for cluster centers} \item{iter.max}{Maximum number of iterations} \item{verbose}{If \code{TRUE}, make some output during learning} \item{dist}{Must be one of the following: If \code{"euclidean"}, the mean square error, if \code{"manhattan"}, the mean absolute error is computed. Abbreviations are also accepted.} \item{method}{Currently, only the \code{"cshell"} method; the c-shell fuzzy clustering method} \item{m}{The degree of fuzzification. It is defined for values greater than \emph{1}} \item{radius}{The radius of resulting clusters} } \description{ The \emph{c}-shell clustering algorithm, the shell prototype-based version (ring prototypes) of the fuzzy \emph{k}means clustering method. } \details{ The data given by \code{x} is clustered by the fuzzy \emph{c}-shell algorithm. If \code{centers} is a matrix, its rows are taken as the initial cluster centers. If \code{centers} is an integer, \code{centers} rows of \code{x} are randomly chosen as initial values. The algorithm stops when the maximum number of iterations (given by \code{iter.max}) is reached. If \code{verbose} is \code{TRUE}, it displays for each iteration the number the value of the objective function. If \code{dist} is \code{"euclidean"}, the distance between the cluster center and the data points is the Euclidean distance (ordinary kmeans algorithm). If \code{"manhattan"}, the distance between the cluster center and the data points is the sum of the absolute values of the distances of the coordinates. If \code{method} is \code{"cshell"}, then we have the \emph{c}-shell fuzzy clustering method. The parameters \code{m} defines the degree of fuzzification. It is defined for real values greater than 1 and the bigger it is the more fuzzy the membership values of the clustered data points are. The parameter \code{radius} is by default set to \emph{0.2} for every cluster. } \value{ \code{cshell} returns an object of class \code{"cshell"}. \item{centers}{The final cluster centers.} \item{size}{The number of data points in each cluster.} \item{cluster}{Vector containing the indices of the clusters where the data points are assigned to. The maximum membership value of a point is considered for partitioning it to a cluster.} \item{iter}{The number of iterations performed.} \item{membership}{a matrix with the membership values of the data points to the clusters.} \item{withinerror}{Returns the sum of square distances within the clusters.} \item{call}{Returns a call in which all of the arguments are specified by their names.} } \author{Evgenia Dimitriadou} \references{ Rajesh N. Dave. \emph{Fuzzy Shell-Clustering and Applications to Circle Detection in Digital Images.} Int. J. of General Systems, Vol. \bold{16}, pp. 343-355, 1996. } \examples{ ## a 2-dimensional example x <- rbind(matrix(rnorm(50, sd = 0.3), ncol = 2), matrix(rnorm(50, mean = 1, sd=0.3), ncol = 2)) cl <- cshell(x, 2, 20, verbose = TRUE, method = "cshell", m = 2) print(cl) } \keyword{cluster} e1071/man/scale_data_frame.Rd0000644000175100001440000000571614024346715015340 0ustar hornikusers\name{scale_data_frame} \alias{scale_data_frame} \title{Scaling and Centering of Data Frames} \description{ \code{scale_data_frame} centers and/or scales the columns of a data frame (or matrix). } \usage{ scale_data_frame(x, center = TRUE, scale = TRUE) } \arguments{ \item{x}{a data frame or a numeric matrix (or vector). For matrices or vectors, \code{scale()} is used.} \item{center}{either a logical value or numeric-alike vector of length equal to the number of columns of \code{x}, where \sQuote{numeric-alike} means that \code{\link{as.numeric}(.)} will be applied successfully if \code{\link{is.numeric}(.)} is not true.} \item{scale}{either a logical value or a numeric-alike vector of length equal to the number of columns of \code{x}.} } \value{ For \code{scale.default}, the centered, scaled data frame. Non-numeric columns are ignored. Note that logicals are treated as 0/1-numerics to be consistent with \code{scale()}. The numeric centering and scalings used (if any) are returned as attributes \code{"scaled:center"} and \code{"scaled:scale"} - but only for the numeric/logical columns. } \details{ The value of \code{center} determines how column centering is performed. If \code{center} is a numeric-alike vector with length equal to the number of numeric/logical columns of \code{x}, then each column of \code{x} has the corresponding value from \code{center} subtracted from it. If \code{center} is \code{TRUE} then centering is done by subtracting the column means (omitting \code{NA}s) of \code{x} from their corresponding columns, and if \code{center} is \code{FALSE}, no centering is done. The value of \code{scale} determines how column scaling is performed (after centering). If \code{scale} is a numeric-alike vector with length equal to the number of numeric/logiocal columns of \code{x}, then each column of \code{x} is divided by the corresponding value from \code{scale}. If \code{scale} is \code{TRUE} then scaling is done by dividing the (centered) columns of \code{x} by their standard deviations if \code{center} is \code{TRUE}, and the root mean square otherwise. If \code{scale} is \code{FALSE}, no scaling is done. The root-mean-square for a (possibly centered) column is defined as \eqn{\sqrt{\sum(x^2)/(n-1)}}{sqrt(sum(x^2)/(n-1))}, where \eqn{x} is a vector of the non-missing values and \eqn{n} is the number of non-missing values. In the case \code{center = TRUE}, this is the same as the standard deviation, but in general it is not. (To scale by the standard deviations without centering, use \code{scale(x, center = FALSE, scale = apply(x, 2, sd, na.rm = TRUE))}.) } \references{ Becker, R. A., Chambers, J. M. and Wilks, A. R. (1988) \emph{The New S Language}. Wadsworth & Brooks/Cole. } \seealso{ \code{\link{sweep}} which allows centering (and scaling) with arbitrary statistics. } \examples{ require(stats) data(iris) summary(scale_data_frame(iris)) } \keyword{array} e1071/man/shortestPaths.Rd0000755000175100001440000000422014173734134014752 0ustar hornikusers\name{allShortestPaths} \alias{allShortestPaths} \alias{extractPath} \title{Find Shortest Paths Between All Nodes in a Directed Graph} \description{ \code{allShortestPaths} finds all shortest paths in a directed (or undirected) graph using Floyd's algorithm. \code{extractPath} can be used to actually extract the path between a given pair of nodes. } \usage{ allShortestPaths(x) extractPath(obj, start, end) } \arguments{ \item{x}{matrix or distance object} \item{obj}{return value of \code{allShortestPaths}} \item{start}{integer, starting point of path} \item{end}{integer, end point of path} } \details{ If \code{x} is a matrix, then \code{x[i,j]} has to be the length of the direct path from point \code{i} to point \code{j}. If no direct connection from point \code{i} to point \code{j} exist, then \code{x[i,j]} should be either \code{NA} or \code{Inf}. Note that the graph can be directed, hence \code{x[i,j]} need not be the same as \code{x[j,i]}. The main diagonal of \code{x} is ignored. Alternatively, \code{x} can be a distance object as returned by \code{\link{dist}} (corresponding to an undirected graph). } \value{ \code{allShortestPaths} returns a list with components \item{length}{A matrix with the total lengths of the shortest path between each pair of points.} \item{middlePoints}{A matrix giving a point in the middle of each shortest path (or 0 if the direct connection is the shortest path), this is mainly used as input for \code{extractPath}.} \code{extractPath} returns a vector of node numbers giving with the shortest path between two points. } \references{Kumar, V., Grama, A., Gupta, A. and Karypis, G. Introduction to Parallel Programming - Design and Analysis of Algorithms, Benjamin Cummings Publishing, 1994, ISBN 0-8053-3170-0} \author{Friedrich Leisch} \examples{ ## build a graph with 5 nodes x <- matrix(NA, 5, 5) diag(x) <- 0 x[1,2] <- 30; x[1,3] <- 10 x[2,4] <- 70; x[2,5] <- 40 x[3,4] <- 50; x[3,5] <- 20 x[4,5] <- 60 x[5,4] <- 10 print(x) ## compute all path lengths z <- allShortestPaths(x) print(z) ## the following should give 1 -> 3 -> 5 -> 4 extractPath(z, 1, 4) } \keyword{optimize} e1071/man/bclust.Rd0000644000175100001440000001160615120307527013371 0ustar hornikusers\name{bclust} \alias{bclust} \alias{hclust.bclust} \alias{plot.bclust} \alias{centers.bclust} \alias{clusters.bclust} \title{Bagged Clustering} \usage{ bclust(x, centers=2, iter.base=10, minsize=0, dist.method="euclidean", hclust.method="average", base.method="kmeans", base.centers=20, verbose=TRUE, final.kmeans=FALSE, docmdscale=FALSE, resample=TRUE, weights=NULL, maxcluster=base.centers, ...) hclust.bclust(object, x, centers, dist.method=object$dist.method, hclust.method=object$hclust.method, final.kmeans=FALSE, docmdscale = FALSE, maxcluster=object$maxcluster) \method{plot}{bclust}(x, maxcluster=x$maxcluster, main, ...) centers.bclust(object, k) clusters.bclust(object, k, x=NULL) } \arguments{ \item{x}{Matrix of inputs (or object of class \code{"bclust"} for plot).} \item{centers, k}{Number of clusters.} \item{iter.base}{Number of runs of the base cluster algorithm.} \item{minsize}{Minimum number of points in a base cluster.} \item{dist.method}{Distance method used for the hierarchical clustering, see \code{\link{dist}} for available distances.} \item{hclust.method}{Linkage method used for the hierarchical clustering, see \code{\link{hclust}} for available methods.} \item{base.method}{Partitioning cluster method used as base algorithm.} \item{base.centers}{Number of centers used in each repetition of the base method.} \item{verbose}{Output status messages.} \item{final.kmeans}{If \code{TRUE}, a final kmeans step is performed using the output of the bagged clustering as initialization.} \item{docmdscale}{Logical, if \code{TRUE} a \code{\link{cmdscale}} result is included in the return value.} \item{resample}{Logical, if \code{TRUE} the base method is run on bootstrap samples of \code{x}, else directly on \code{x}.} \item{weights}{Vector of length \code{nrow(x)}, weights for the resampling. By default all observations have equal weight.} \item{maxcluster}{Maximum number of clusters memberships are to be computed for.} \item{object}{Object of class \code{"bclust"}.} \item{main}{Main title of the plot.} \item{\dots}{Optional arguments top be passed to the base method in \code{bclust}, ignored in \code{plot}.} } \description{ Cluster the data in \code{x} using the bagged clustering algorithm. A partitioning cluster algorithm such as \code{\link{kmeans}} is run repeatedly on bootstrap samples from the original data. The resulting cluster centers are then combined using the hierarchical cluster algorithm \code{\link{hclust}}. } \details{ First, \code{iter.base} bootstrap samples of the original data in \code{x} are created by drawing with replacement. The base cluster method is run on each of these samples with \code{base.centers} centers. The \code{base.method} must be the name of a partitioning cluster function returning a list with the same components as the return value of \code{\link{kmeans}}. This results in a collection of \code{iter.base * base.centers} centers, which are subsequently clustered using the hierarchical method \code{\link{hclust}}. Base centers with less than \code{minsize} points in there respective partitions are removed before the hierarchical clustering. The resulting dendrogram is then cut to produce \code{centers} clusters. Hence, the name of the argument \code{centers} is a little bit misleading as the resulting clusters need not be convex, e.g., when single linkage is used. The name was chosen for compatibility with standard partitioning cluster methods such as \code{\link{kmeans}}. A new hierarchical clustering (e.g., using another \code{hclust.method}) re-using previous base runs can be performed by running \code{hclust.bclust} on the return value of \code{bclust}. } \value{ \code{bclust} and \code{hclust.bclust} return objects of class \code{"bclust"} including the components \item{hclust}{Return value of the hierarchical clustering of the collection of base centers (Object of class \code{"hclust"}).} \item{cluster}{Vector with indices of the clusters the inputs are assigned to.} \item{centers}{Matrix of centers of the final clusters. Only useful, if the hierarchical clustering method produces convex clusters.} \item{allcenters}{Matrix of all \code{iter.base * base.centers} centers found in the base runs.} } \author{Friedrich Leisch} \references{ Friedrich Leisch. Bagged clustering. Working Paper 51, SFB ``Adaptive Information Systems and Modeling in Economics and Management Science'', August 1999. \doi{10.57938/9b129f95-b53b-44ce-a129-5b7a1168d832} } \seealso{\code{\link{hclust}}, \code{\link{kmeans}}, \code{\link{boxplot.bclust}}} \keyword{multivariate} \keyword{cluster} \examples{ data(iris) bc1 <- bclust(iris[,1:4], 3, base.centers=5) plot(bc1) table(clusters.bclust(bc1, 3)) centers.bclust(bc1, 3) } e1071/man/plot.tune.Rd0000755000175100001440000000375214173734134014040 0ustar hornikusers\name{plot.tune} \alias{plot.tune} %- Also NEED an `\alias' for EACH other topic documented here. \title{Plot Tuning Object} \description{ Visualizes the results of parameter tuning. } \usage{ \method{plot}{tune}(x, type = c("contour", "perspective"), theta = 60, col = "lightblue", main = NULL, xlab = NULL, ylab = NULL, swapxy = FALSE, transform.x = NULL, transform.y = NULL, transform.z = NULL, color.palette = hsv_palette(), nlevels = 20, ...) } %- maybe also `usage' for other objects documented here. \arguments{ \item{x}{an object of class \code{tune}} \item{type}{choose whether a contour plot or a perspective plot is used if two parameters are to be visualized. Ignored if only one parameter has been tuned.} \item{theta}{angle of azimuthal direction.} \item{col}{the color(s) of the surface facets. Transparent colors are ignored.} \item{main}{main title} \item{xlab, ylab}{titles for the axes. N.B. These must be character strings; expressions are not accepted. Numbers will be coerced to character strings.} \item{swapxy}{if \code{TRUE}, the parameter axes are swaped (only used in case of two parameters).} \item{transform.x, transform.y, transform.z}{functions to transform the parameters (\code{x} and \code{y}) and the error measures (\code{z}). Ignored if \code{NULL}.} \item{color.palette}{color palette used in contour plot.} \item{nlevels}{number of levels used in contour plot.} \item{\dots}{Further graphics parameters.} } \author{ David Meyer (based on C/C++-code by Chih-Chung Chang and Chih-Jen Lin)\cr \email{David.Meyer@R-project.org} } \seealso{\code{\link{tune}}} \examples{ data(iris) obj <- tune.svm(Species~., data = iris, sampling = "fix", gamma = 2^c(-8,-4,0,4), cost = 2^c(-8,-4,-2,0)) plot(obj, transform.x = log2, transform.y = log2) plot(obj, type = "perspective", theta = 120, phi = 45) } \keyword{models}% __ONLY ONE__ keyword per line e1071/man/bincombinations.Rd0000755000175100001440000000052314173734133015256 0ustar hornikusers\name{bincombinations} \title{Binary Combinations} \usage{ bincombinations(p) } \alias{bincombinations} \arguments{ \item{p}{Length of binary vectors} } \description{ Returns a matrix containing the \eqn{2^p} vectors of length \code{p}. } \author{Friedrich Leisch} \examples{ bincombinations(2) bincombinations(3) } \keyword{utilities} e1071/man/hsv_palette.Rd0000755000175100001440000000143514173734134014422 0ustar hornikusers\name{hsv_palette} \alias{hsv_palette} \title{Sequential color palette based on HSV colors} \description{ Computes a sequential color palette based on HSV colors by varying the saturation, given hue and value. } \usage{ hsv_palette(h = 2/3, from = 0.7, to = 0.2, v = 1) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{h}{hue} \item{from}{lower bound for saturation} \item{to}{upper bound for saturation} \item{v}{value} } \value{ A function with one argument: the size of the palette, i.e., the number of colors. } \author{David Meyer \email{David.Meyer@R-project.org}} \seealso{\code{\link[grDevices]{hsv}}} \examples{ pie(rep(1, 10), col = hsv_palette()(10)) pie(rep(1, 10), col = hsv_palette(h = 0)(10)) } \keyword{hplot}% __ONLY ONE__ keyword per line e1071/man/countpattern.Rd0000755000175100001440000000202414173734134014625 0ustar hornikusers\name{countpattern} \alias{countpattern} \title{Count Binary Patterns} \usage{ countpattern(x, matching=FALSE) } \arguments{ \item{x}{A matrix of binary observations} \item{matching}{If TRUE an additional vector is returned which stores which row belongs to which pattern} } \description{ Every row of the binary matrix \code{x} is transformed into a binary pattern and these patterns are counted. } \value{ A vector of length \code{2\^ncol(x)} giving the number of times each pattern occurs in the rows of \code{x}. The names of this vector are the binary patterns. They are sorted according to their numeric value. If \code{matching} is \code{TRUE}, a list of the following two vectors is returned. \item{pat}{Numbers of patterns as described above.} \item{matching}{Vector giving the position of the pattern of each row of \code{x} in \code{pat}.} } \author{Andreas Weingessel} \examples{ xx <- rbind(c(1,0,0),c(1,0,0),c(1,0,1),c(0,1,1),c(0,1,1)) countpattern(xx) countpattern(xx, matching=TRUE) } \keyword{multivariate} e1071/man/bootstrap.lca.Rd0000644000175100001440000000523314246371156014656 0ustar hornikusers\name{bootstrap.lca} \alias{bootstrap.lca} \alias{print.bootstrap.lca} \title{Bootstrap Samples of LCA Results} \description{ This function draws bootstrap samples from a given LCA model and refits a new LCA model for each sample. The quality of fit of these models is compared to the original model. } \usage{ bootstrap.lca(l, nsamples=10, lcaiter=30, verbose=FALSE) } \arguments{ \item{l}{An LCA model as created by \code{\link{lca}}} \item{nsamples}{Number of bootstrap samples} \item{lcaiter}{Number of LCA iterations} \item{verbose}{If \code{TRUE} some output is printed during the computations.} } \details{ From a given LCA model \code{l}, \code{nsamples} bootstrap samples are drawn. For each sample a new LCA model is fitted. The goodness of fit for each model is computed via Likelihood Ratio and Pearson's Chisquare. The values for the fitted models are compared with the values of the original model \code{l}. By this method it can be tested whether the data to which \code{l} was originally fitted come from an LCA model. } \value{ An object of class \code{bootstrap.lca} is returned, containing \item{logl, loglsat}{The LogLikelihood of the models and of the corresponding saturated models} \item{lratio}{Likelihood quotient of the models and the corresponding saturated models} \item{lratiomean, lratiosd}{Mean and Standard deviation of \code{lratio}} \item{lratioorg}{Likelihood quotient of the original model and the corresponding saturated model} \item{zratio}{Z-Statistics of \code{lratioorg}} \item{pvalzratio, pvalratio}{P-Values for \code{zratio}, computed via normal distribution and empirical distribution} \item{chisq}{Pearson's Chisq of the models} \item{chisqmean, chisqsd}{Mean and Standard deviation of \code{chisq}} \item{chisqorg}{Pearson's Chisq of the original model} \item{zchisq}{Z-Statistics of \code{chisqorg}} \item{pvalzchisq, pvalchisq}{P-Values for \code{zchisq}, computed via normal distribution and empirical distribution} \item{nsamples}{Number of bootstrap samples} \item{lcaiter}{Number of LCA Iterations} } \references{Anton K. Formann: ``Die Latent-Class-Analysis'', Beltz Verlag 1984} \author{Andreas Weingessel} \seealso{\code{\link{lca}}} \examples{ ## Generate a 4-dim. sample with 2 latent classes of 500 data points each. ## The probabilities for the 2 classes are given by type1 and type2. type1 <- c(0.8, 0.8, 0.2, 0.2) type2 <- c(0.2, 0.2, 0.8, 0.8) x <- matrix(runif(4000), nrow = 1000) x[1:500,] <- t(t(x[1:500,]) < type1) * 1 x[501:1000,] <- t(t(x[501:1000,]) < type2) * 1 l <- lca(x, 2, niter=5) bl <- bootstrap.lca(l,nsamples=3,lcaiter=5) bl } \keyword{multivariate} e1071/man/interpolate.Rd0000755000175100001440000000214214173734134014426 0ustar hornikusers\name{interpolate} \title{Interpolate Values of Array} \usage{ interpolate(x, a, adims=lapply(dimnames(a), as.numeric), method="linear") } \alias{interpolate} \arguments{ \item{x}{Matrix of values at which interpolation shall take place.} \item{a}{Array of arbitrary dimension.} \item{adims}{List of the same structure as \code{dimnames(a)}.} \item{method}{Interpolation method, one of \code{"linear"} or \code{"constant"}.} } \description{ For each row in matrix \code{x}, the hypercube of \code{a} containing this point is searched. The corners of the hypercube are linearly interpolated. By default, \code{dimnames(a)} is taken to contain the coordinate values for each point in \code{a}. This can be overridden using \code{adims}. If \code{method=="constant"}, the value of the ``lower left'' corner of the hypercube is returned. } \author{Friedrich Leisch} \seealso{\code{\link{approx}}, \code{\link{spline}}} \examples{ x <- seq(0,3,0.2) z <- outer(x,x, function(x,y) sin(x*y)) dimnames(z) <- list(x,x) sin(1.1*2.1) interpolate(c(1.1, 2.1),z) } \keyword{arith} \keyword{multivariate} e1071/man/hamming.window.Rd0000755000175100001440000000142714173734134015033 0ustar hornikusers\name{hamming.window} \title{Computes the Coefficients of a Hamming Window.} \usage{hamming.window(n)} \alias{hamming.window} \arguments{ \item{n}{The length of the window.} } \description{The filter coefficients \eqn{w_i}{w(i)} of a Hamming window of length \code{n} are computed according to the formula \deqn{w_i = 0.54 - 0.46 \cos\frac{2\pi i}{n-1}}{ w(i) = 0.54 - 0.46*cos(2*pi*i/(n-1))} } \value{A vector containing the filter coefficients.} \references{For a definition of the Hamming window, see for example\cr Alan V. Oppenheim and Roland W. Schafer: "Discrete-Time Signal Processing", Prentice-Hall, 1989.} \author{Andreas Weingessel} \seealso{stft, hanning.window} \examples{hamming.window(10) x<-rnorm(500) y<-stft(x, wtype="hamming.window") plot(y) } \keyword{ts} e1071/man/moment.Rd0000755000175100001440000000231214173734134013376 0ustar hornikusers\name{moment} \alias{moment} \title{Statistical Moment} \description{ Computes the (optionally centered and/or absolute) sample moment of a certain order. } \usage{ moment(x, order=1, center=FALSE, absolute=FALSE, na.rm=FALSE) } \arguments{ \item{x}{a numeric vector containing the values whose moment is to be computed.} \item{order}{order of the moment to be computed, the default is to compute the first moment, i.e., the mean.} \item{center}{a logical value indicating whether centered moments are to be computed.} \item{absolute}{a logical value indicating whether absolute moments are to be computed.} \item{na.rm}{a logical value indicating whether \code{NA} values should be stripped before the computation proceeds.} } \details{ When \code{center} and \code{absolute} are both \code{FALSE}, the moment is simply \code{sum(x ^ order) / length(x)}. } \author{Kurt Hornik and Friedrich Leisch} \seealso{ \code{\link{mean}}, \code{\link{var}}} \examples{ x <- rnorm(100) ## Compute the mean moment(x) ## Compute the 2nd centered moment (!= var) moment(x, order=2, center=TRUE) ## Compute the 3rd absolute centered moment moment(x, order=3, center=TRUE, absolute=TRUE) } \keyword{univar} e1071/man/cmeans.Rd0000755000175100001440000001206714173734134013355 0ustar hornikusers\name{cmeans} \alias{cmeans} \alias{print.fclust} \title{Fuzzy C-Means Clustering} \description{ The fuzzy version of the known \emph{k}means clustering algorithm as well as an on-line variant (Unsupervised Fuzzy Competitive learning). } \usage{ cmeans(x, centers, iter.max = 100, verbose = FALSE, dist = "euclidean", method = "cmeans", m = 2, rate.par = NULL, weights = 1, control = list()) } \arguments{ \item{x}{The data matrix where columns correspond to variables and rows to observations.} \item{centers}{Number of clusters or initial values for cluster centers.} \item{iter.max}{Maximum number of iterations.} \item{verbose}{If \code{TRUE}, make some output during learning.} \item{dist}{Must be one of the following: If \code{"euclidean"}, the mean square error, if \code{"manhattan"}, the mean absolute error is computed. Abbreviations are also accepted.} \item{method}{If \code{"cmeans"}, then we have the \eqn{c}-means fuzzy clustering method, if \code{"ufcl"} we have the on-line update. Abbreviations are also accepted.} \item{m}{A number greater than 1 giving the degree of fuzzification.} \item{rate.par}{A number between 0 and 1 giving the parameter of the learning rate for the on-line variant. The default corresponds to \eqn{0.3}.} \item{weights}{a numeric vector with non-negative case weights. Recycled to the number of observations in \code{x} if necessary.} \item{control}{a list of control parameters. See \bold{Details}.} } \details{ The data given by \code{x} is clustered by generalized versions of the fuzzy \emph{c}-means algorithm, which use either a fixed-point or an on-line heuristic for minimizing the objective function \deqn{\sum_i \sum_j w_i u_{ij}^m d_{ij},} where \eqn{w_i} is the weight of observation \eqn{i}, \eqn{u_{ij}} is the membership of observation \eqn{i} in cluster \eqn{j}, and \eqn{d_{ij}} is the distance (dissimilarity) between observation \eqn{i} and center \eqn{j}. The dissimilarities used are the sums of squares (\code{"euclidean"}) or absolute values (\code{"manhattan"}) of the element-wise differences. If \code{centers} is a matrix, its rows are taken as the initial cluster centers. If \code{centers} is an integer, \code{centers} rows of \code{x} are randomly chosen as initial values. The algorithm stops when the maximum number of iterations (given by \code{iter.max}) is reached, or when the algorithm is unable to reduce the current value \code{val} of the objective function by \code{reltol * (abs(val) * reltol)} at a step. The relative convergence tolerance \code{reltol} can be specified as the \code{reltol} component of the list of control parameters, and defaults to \code{sqrt(.Machine$double.eps)}. If \code{verbose} is \code{TRUE}, each iteration displays its number and the value of the objective function. If \code{method} is \code{"cmeans"}, then we have the \eqn{c}-means fuzzy clustering method, see for example Bezdek (1981). If \code{"ufcl"}, we have the On-line Update (Unsupervised Fuzzy Competitive Learning) method due to Chung and Lee (1992), see also Pal et al (1996). This method works by performing an update directly after each input signal (i.e., for each single observation). The parameters \code{m} defines the degree of fuzzification. It is defined for real values greater than 1 and the bigger it is the more fuzzy the membership values of the clustered data points are. } \value{ An object of class \code{"fclust"} which is a list with components: \item{centers}{the final cluster centers.} \item{size}{the number of data points in each cluster of the closest hard clustering.} \item{cluster}{a vector of integers containing the indices of the clusters where the data points are assigned to for the closest hard clustering, as obtained by assigning points to the (first) class with maximal membership.} \item{iter}{the number of iterations performed.} \item{membership}{a matrix with the membership values of the data points to the clusters.} \item{withinerror}{the value of the objective function.} \item{call}{the call used to create the object.} } \author{ Evgenia Dimitriadou and Kurt Hornik } \references{ J. C. Bezdek (1981). \emph{Pattern recognition with fuzzy objective function algorithms}. New York: Plenum. Fu Lai Chung and Tong Lee (1992). Fuzzy competitive learning. \emph{Neural Networks}, \bold{7}(3), 539--551. Nikhil R. Pal, James C. Bezdek, and Richard J. Hathaway (1996). Sequential competitive learning and the fuzzy c-means clustering algorithms. \emph{Neural Networks}, \bold{9}(5), 787--796. } \examples{ # a 2-dimensional example x<-rbind(matrix(rnorm(100,sd=0.3),ncol=2), matrix(rnorm(100,mean=1,sd=0.3),ncol=2)) cl<-cmeans(x,2,20,verbose=TRUE,method="cmeans",m=2) print(cl) # a 3-dimensional example x<-rbind(matrix(rnorm(150,sd=0.3),ncol=3), matrix(rnorm(150,mean=1,sd=0.3),ncol=3), matrix(rnorm(150,mean=2,sd=0.3),ncol=3)) cl<-cmeans(x,6,20,verbose=TRUE,method="cmeans") print(cl) } \keyword{cluster} e1071/man/boxplot.bclust.Rd0000755000175100001440000000214514173734133015064 0ustar hornikusers\name{boxplot.bclust} \alias{boxplot.bclust} \title{Boxplot of Cluster Profiles} \usage{ \method{boxplot}{bclust}(x, n=nrow(x$centers), bycluster=TRUE, main=deparse(substitute(x)), oneplot=TRUE, which=1:n, ...) } \arguments{ \item{x}{Clustering result, object of class \code{"bclust"}.}% \item{n}{Number of clusters to plot, by default the number of clusters used in the call of \code{\link{bclust}}.} \item{bycluster}{If \code{TRUE} (default), a boxplot for each cluster is plotted. If \code{FALSE}, a boxplot for each variable is plotted.} \item{main}{Main title of the plot, by default the name of the cluster object.} \item{oneplot}{If \code{TRUE}, all boxplots appear on one screen (using an appropriate rectangular layout).} \item{which}{Number of clusters which should be plotted, default is all clusters.} \item{...}{Additional arguments for \code{\link{boxplot}}.} } \description{ Makes boxplots of the results of a bagged clustering run. } \author{Friedrich Leisch} \keyword{hplot} \examples{ data(iris) bc1 <- bclust(iris[,1:4], 3, base.centers=5) boxplot(bc1) } e1071/man/classAgreement.Rd0000755000175100001440000000610414173734133015036 0ustar hornikusers\name{classAgreement} \alias{classAgreement} \title{Coefficients Comparing Classification Agreement} \description{ \code{classAgreement()} computes several coefficients of agreement between the columns and rows of a 2-way contingency table. } \usage{ classAgreement(tab, match.names=FALSE) } \arguments{ \item{tab}{A 2-dimensional contingency table.} \item{match.names}{Flag whether row and columns should be matched by name.} } \details{ Suppose we want to compare two classifications summarized by the contingency table \eqn{T=[t_{ij}]} where \eqn{i,j=1,\ldots,K} and \eqn{t_{ij}} denotes the number of data points which are in class \eqn{i} in the first partition and in class \eqn{j} in the second partition. If both classifications use the same labels, then obviously the two classification agree completely if only elements in the main diagonal of the table are non-zero. On the other hand, large off-diagonal elements correspond to smaller agreement between the two classifications. If \code{match.names} is \code{TRUE}, the class labels as given by the row and column names are matched, i.e. only columns and rows with the same dimnames are used for the computation. If the two classification do not use the same set of labels, or if identical labels can have different meaning (e.g., two outcomes of cluster analysis on the same data set), then the situation is a little bit more complicated. Let \eqn{A} denote the number of all pairs of data points which are either put into the same cluster by both partitions or put into different clusters by both partitions. Conversely, let \eqn{D} denote the number of all pairs of data points that are put into one cluster in one partition, but into different clusters by the other partition. Hence, the partitions disagree for all pairs \eqn{D} and agree for all pairs \eqn{A}. We can measure the agreement by the Rand index \eqn{A/(A+D)} which is invariant with respect to permutations of the columns or rows of \eqn{T}. Both indices have to be corrected for agreement by chance if the sizes of the classes are not uniform. } \value{ A list with components \item{diag}{Percentage of data points in the main diagonal of \code{tab}.} \item{kappa}{\code{diag} corrected for agreement by chance.} \item{rand}{Rand index.} \item{crand}{Rand index corrected for agreement by chance.} } \references{ J.~Cohen. A coefficient of agreement for nominal scales. Educational and Psychological Measurement, 20, 37--46, 1960. Lawrence Hubert and Phipps Arabie. Comparing partitions. Journal of Classification, 2, 193--218, 1985. } \author{Friedrich Leisch} \seealso{\code{\link{matchClasses}}} \examples{ ## no class correlations: both kappa and crand almost zero g1 <- sample(1:5, size=1000, replace=TRUE) g2 <- sample(1:5, size=1000, replace=TRUE) tab <- table(g1, g2) classAgreement(tab) ## let pairs (g1=1,g2=1) and (g1=3,g2=3) agree better k <- sample(1:1000, size=200) g1[k] <- 1 g2[k] <- 1 k <- sample(1:1000, size=200) g1[k] <- 3 g2[k] <- 3 tab <- table(g1, g2) ## both kappa and crand should be significantly larger than before classAgreement(tab) } \keyword{category} e1071/man/rectangle.window.Rd0000755000175100001440000000106514173734134015355 0ustar hornikusers\name{rectangle.window} \title{Computes the Coefficients of a Rectangle Window.} \usage{rectangle.window(n)} \alias{rectangle.window} \arguments{ \item{n}{The length of the window.} } \description{Returns the filter coefficients of a rectangle window. That is a vector of \code{n} 1. The purpose of this function is just to have a name for the R command \code{rep (1, n)}. } \value{A vector of length \code{n} filled with 1.} \author{Andreas Weingessel} \seealso{stft} \examples{x<-rnorm(500) y<-stft(x, wtype="rectangle.window") plot(y) } \keyword{ts} e1071/DESCRIPTION0000644000175100001440000000362315120776073012547 0ustar hornikusersPackage: e1071 Version: 1.7-17 Title: Misc Functions of the Department of Statistics, Probability Theory Group (Formerly: E1071), TU Wien Imports: graphics, grDevices, class, stats, methods, utils, proxy Suggests: cluster, mlbench, nnet, randomForest, rpart, SparseM, xtable, Matrix, MASS, slam Authors@R: c(person(given = "David", family = "Meyer", role = c("aut", "cre"), email = "David.Meyer@R-project.org", comment = c(ORCID = "0000-0002-5196-3048")), person(given = "Evgenia", family = "Dimitriadou", role = c("aut","cph")), person(given = "Kurt", family = "Hornik", role = "aut", email = "Kurt.Hornik@R-project.org", comment = c(ORCID = "0000-0003-4198-9911")), person(given = "Andreas", family = "Weingessel", role = "aut"), person(given = "Friedrich", family = "Leisch", role = "aut"), person(given = "Chih-Chung", family = "Chang", role = c("ctb","cph"), comment = "libsvm C++-code"), person(given = "Chih-Chen", family = "Lin", role = c("ctb","cph"), comment = "libsvm C++-code")) Description: Functions for latent class analysis, short time Fourier transform, fuzzy clustering, support vector machines, shortest path computation, bagged clustering, naive Bayes classifier, generalized k-nearest neighbour ... License: GPL-2 | GPL-3 LazyLoad: yes NeedsCompilation: yes Packaged: 2025-12-17 20:18:24 UTC; meyer Author: David Meyer [aut, cre] (ORCID: ), Evgenia Dimitriadou [aut, cph], Kurt Hornik [aut] (ORCID: ), Andreas Weingessel [aut], Friedrich Leisch [aut], Chih-Chung Chang [ctb, cph] (libsvm C++-code), Chih-Chen Lin [ctb, cph] (libsvm C++-code) Maintainer: David Meyer Repository: CRAN Date/Publication: 2025-12-18 13:06:03 UTC