nFactors/0000755000176200001440000000000015025060212012022 5ustar liggesusersnFactors/MD50000644000176200001440000000675115025060212012343 0ustar liggesusers0ccd5b4b9db5c32612a12e710677f9f9 *DESCRIPTION df390c53434517b304ac5db487184641 *NAMESPACE fed27c7691a76a0cf3a28331ea3551e6 *NEWS 81051bcc2cf1bedf378224b0a93e2877 *R/aphabetical.R d2aaf1a5fd14d40683b7ffeb0042af5d *R/bentlerParameters.r 427cb2a2c19a41c5f7fca47d4990486e *R/componentAxis.r 125250b81d305d8cf3855948cab17d6c *R/corFA.r 2bcac2e439df3611570c4966caffe5d9 *R/data.R 7c2bb0d5e1f60256472b6c4d59074aba *R/diagReplace.r df265ec038bd100ba534535461247726 *R/eigenBootParallel.r 384a228b025ad0aa851c04c839206377 *R/eigenComputes.r c0eb4fb420a079aa5dd20290e7e4d541 *R/eigenFrom.r 4cd9d9f0779edcc96ad2b6b652bfd219 *R/generateStructure.r 1a5cc2f0a8c3da31b1baf020762ff92f *R/iterativePrincipalAxis.r 9728b13ca84016815d6cdcf5b2a477f7 *R/makeCor.r b2ad039caa5f18f3e8a550986a4cd22e *R/moreStats.r c384cd4e3559701c4db9c52603ab94f1 *R/nBartlett.r e4580cc0784c3b89511d10547ab83070 *R/nBentler.r 589967de44474fe06542d5e9f0a9ad16 *R/nCng.r d243316a5291bf5e7a2423a774f602ab *R/nFactors.R fb6711495bbc168b0fe62e30439e78c3 *R/nFactorsObjectMethods.r 6e3bfdc86d9cf740bac03481db8fd05f *R/nMreg.r e331bd84b7b0e24b31f942d670892caf *R/nScree.R 9cd2eec04101fb369d692f1f8094dc17 *R/nScreeObjectMethods.r e6421b7b3bfcb3f93d53bbe127427dbd *R/nSeScree.r fa887a4267e3091ea872e5814a1e0729 *R/parallel.R a4a0f92488b301966097080fc8147ae7 *R/plotParallel.R 3c31fd3b10ac60aa9c1a89db8bf4947b *R/plotnScree.R b4b53e39d42a041829b4555f4f37e046 *R/plotuScree.R 925871c449473aaaef9d18bdf8246860 *R/principalAxis.r 5207e1ce18528b03beaf06816e0e8612 *R/principalComponents.r b306e56b7c26dd939c9b32371ee60b7f *R/rRecovery.r fa05068505bd15012b9d2008ae12d0f5 *R/structureSim.r e50156d1e249acb9f893240af8cc63af *R/structureSimObjectMethods.r ad9e7ab938dd927b8afbf8c328e0ea83 *R/studySim.r 566eaca37f817829ed73d0675955df39 *TODO bdddb5db1e58c743da382e585c965159 *data/dFactors.rda fa83c13c2d21bdc388e481c351a74a84 *man/bentlerParameters.Rd 2b439edebdb70087f96d6f5a1dccf53a *man/componentAxis.Rd d8ef6e48e7ba8c153229187495d66ebd *man/corFA.Rd 25a46aa3a7638a552ed9b38976ace5cf *man/dFactors.Rd 981aad4022be472a177b18cf92ddbb0d *man/diagReplace.Rd 11b1807191ca1334dd7a52903e08bbd1 *man/eigenBootParallel.Rd 462274b57923e211ace9fa23f2392671 *man/eigenComputes.Rd bab4fbfea972bba9f9a1b44034f2ad1d *man/eigenFrom.Rd 79e52de283e924ef5f6e628e60c550b3 *man/figures/essai.png.png e9c09b4f72123a73e77d00269aaaa6cd *man/generateStructure.Rd 7355adde306d79179088316548942204 *man/iterativePrincipalAxis.Rd a4f8d0f5d0b1ea5f240df94b2557b13d *man/makeCor.Rd bbf760295a470097b322b5ebd6cd0b10 *man/moreStats.Rd 05d28ccfdaea8d118ecd71df485627e8 *man/nBartlett.Rd 6823c278627b29c4a9d9d248f0f63ec7 *man/nBentler.Rd 097cb31d105263ac86b2c8769819c0c8 *man/nCng.Rd 1b05c647a03dc92beb5270683f08aaec *man/nFactors.Rd 15211bdde920d9b201ee5901a7f1ab5f *man/nFactorsObjectMethods.Rd 621e10d7c219d0fbd12efc07c7a112a9 *man/nMreg.Rd 20971662f528ca542ccfa4aab2c7b5a7 *man/nScree.Rd e797a8a163429dd4702caa2de170f84a *man/nScreeObjectMethods.Rd 1adfa8e96756cbc6de3e895f8673894f *man/nSeScree.Rd 8f26780ddb25af4d89e44ebfd7182251 *man/parallel.Rd ab4180eb6fd6b31112a86e11cee472ae *man/plotParallel.Rd 2d96e84c439437ba71cc15e5c2c4550d *man/plotnScree.Rd 91db474e2a98ec74b5323a16e632c4da *man/plotuScree.Rd 9e495f63fbd5c739e4406ef7919892ae *man/principalAxis.Rd 182f37265b1e7da07b57959bf32d87e1 *man/principalComponents.Rd 0d451bc3008ed8f47549803e26e95b9d *man/rRecovery.Rd fedb3e06d470340993c488fa78d16e17 *man/structureSim.Rd 394a314d331f900d7dcc5b863a1e34a1 *man/structureSimObjectMethods.Rd e53b5f46c2aa6f13a6eab1e26a00935c *man/studySim.Rd nFactors/R/0000755000176200001440000000000015016141120012221 5ustar liggesusersnFactors/R/diagReplace.r0000644000176200001440000000405315017053654014625 0ustar liggesusers#' Replacing Upper or Lower Diagonal of a Correlation or Covariance Matrix #' #' The \code{diagReplace} function returns a modified correlation or covariance #' matrix by replacing upper diagonal with lower diagonal, or lower diagonal #' with upper diagonal. #' #' @param R numeric: correlation or covariance matrix #' @param upper logical: if \code{TRUE} upper diagonal is replaced with lower #' diagonal. If \code{FALSE}, lower diagonal is replaced with upper diagonal. #' @return \item{R }{ numeric: correlation or covariance matrix } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @keywords manip #' @export #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' #' # Replace lower diagonal with upper diagonal #' RL <- diagReplace(R, upper=FALSE) #' # ....................................................... #' } #' } diagReplace <- function(R, upper=TRUE) { RT <- R if (upper == TRUE) { Rtranspose <- t(RT) # Replacing upper diagonal with lower diagonal RT[upper.tri(RT)] <- Rtranspose[upper.tri(Rtranspose)] return(RT) } if (upper == FALSE) { Rtranspose <- t(RT) # Replacing lower diagonal with upper diagonal RT[lower.tri(RT)] <- Rtranspose[lower.tri(Rtranspose)] return(RT) } } nFactors/R/nBentler.r0000644000176200001440000001675115017050100014165 0ustar liggesusers#' Bentler and Yuan's Procedure to Determine the Number of Components/Factors #' #' This function computes the Bentler and Yuan's indices for determining the #' number of components/factors to retain. #' #' The implemented Bentler and Yuan's procedure must be used with care because #' the minimized function is not always stable, as Bentler and Yan (1996, 1998) #' already noted. In many cases, constraints must applied to obtain a solution, #' as the actual implementation did, but the user can modify these constraints. #' #' The hypothesis tested (Bentler and Yuan, 1996, equation 10) is: \cr \cr #' #' (1) \eqn{\qquad \qquad H_k: \lambda_{k+i} = \alpha + \beta x_i, (i = 1, #' \ldots, q)} \cr #' #' The solution of the following simultaneous equations is needed to find #' \eqn{(\alpha, \beta) \in} \cr #' #' (2) \eqn{\qquad \qquad f(x) = \sum_{i=1}^q \frac{ [ \lambda_{k+j} - N \alpha #' + \beta x_j ] x_j}{(\alpha + \beta x_j)^2} = 0} \cr \cr and \eqn{\qquad #' \qquad g(x) = \sum_{i=1}^q \frac{ \lambda_{k+j} - N \alpha + \beta x_j #' x_j}{(\alpha + \beta x_j)^2} = 0} \cr #' #' The solution to this system of equations was implemented by minimizing the #' following equation: \cr #' #' (3) \eqn{\qquad \qquad (\alpha, \beta) \in \inf{[h(x)]} = \inf{\log{[f(x)^2 #' + g(x)^2}}]} \cr #' #' The likelihood ratio test \eqn{LRT} proposed by Bentler and Yuan (1996, #' equation 7) follows a \eqn{\chi^2} probability distribution with \eqn{q-2} #' degrees of freedom and is equal to: \cr #' #' (4) \eqn{\qquad \qquad LRT = N(k - p)\left\{ {\ln \left( {{n \over N}} #' \right) + 1} \right\} - N\sum\limits_{j = k + 1}^p {\ln \left\{ {{{\lambda #' _j } \over {\alpha + \beta x_j }}} \right\}} + n\sum\limits_{j = k + 1}^p #' {\left\{ {{{\lambda _j } \over {\alpha + \beta x_j }}} \right\}} } \cr #' #' With \eqn{p} beeing the number of eigenvalues, \eqn{k} the number of #' eigenvalues to test, \eqn{q} the \eqn{p-k} remaining eigenvalues, \eqn{N} #' the sample size, and \eqn{n = N-1}. Note that there is an error in the #' Bentler and Yuan equation, the variables \eqn{N} and \eqn{n} beeing inverted #' in the preceeding equation 4. #' #' A better strategy proposed by Bentler an Yuan (1998) is to used a minimized #' \eqn{\chi^2} solution. This strategy will be implemented in a future version #' of the \pkg{nFactors} package. #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param N numeric: number of subjects. #' @param log logical: if \code{TRUE} does the maximization on the log values. #' @param alpha numeric: statistical significance level. #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param details logical: if \code{TRUE} also returns detains about the #' computation for each eigenvalue. #' @param minPar numeric: minimums for the coefficient of the linear trend to #' maximize. #' @param maxPar numeric: maximums for the coefficient of the linear trend to #' maximize. #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return \item{nFactors}{ numeric: vector of the number of factors retained #' by the Bentler and Yuan's procedure. } \item{details}{ numeric: matrix of #' the details of the computation.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @seealso \code{\link{nBartlett}}, \code{\link{bentlerParameters}} #' @references Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in #' eigenvalues of a covariance matrix with application to data analysis. #' \emph{British Journal of Mathematical and Statistical Psychology, 49}, #' 299-312. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest #' eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. #' @export #' @importFrom stats lm #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## ................................................ #' ## SIMPLE EXAMPLE OF THE BENTLER AND YUAN PROCEDURE #' #' # Bentler (1996, p. 309) Table 2 - Example 2 ............. #' n=649 #' bentler2<-c(5.785, 3.088, 1.505, 0.582, 0.424, 0.386, 0.360, 0.337, 0.303, #' 0.281, 0.246, 0.238, 0.200, 0.160, 0.130) #' #' results <- nBentler(x=bentler2, N=n) #' results #' #' plotuScree(x=bentler2, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1996, p. 309)", #' sep="")) #' # ........................................................ #' #' # Bentler (1998, p. 140) Table 3 - Example 1 ............. #' n <- 145 #' example1 <- c(8.135, 2.096, 1.693, 1.502, 1.025, 0.943, 0.901, 0.816, 0.790, #' 0.707, 0.639, 0.543, #' 0.533, 0.509, 0.478, 0.390, 0.382, 0.340, 0.334, 0.316, 0.297, #' 0.268, 0.190, 0.173) #' #' results <- nBentler(x=example1, N=n) #' results #' #' plotuScree(x=example1, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1998, p. 140)", #' sep="")) #' # ........................................................ #' } #' } nBentler <- function(x, N, log=TRUE, alpha=0.05, cor=TRUE, details=TRUE, minPar=c(min(lambda) - abs(min(lambda)) +.001, 0.001), maxPar=c(max(lambda), stats::lm(lambda ~ I(length(lambda):1))$coef[2]), ...) { stopMessage <- paste("\n These indices are only valid with a principal component solution.\n", " ...................... So, only positive eugenvalues are permitted.\n", sep="") lambda <- eigenComputes(x, cor=cor, ...) if (length(which(lambda <0 )) > 0) {cat(stopMessage);stop()} n <- N significance <- alpha min.k <- 3 LRT <- data.frame(q=numeric(length(lambda)-min.k), k=numeric(length(lambda)-min.k), LRT=numeric(length(lambda)-min.k), a=numeric(length(lambda)-min.k), b=numeric(length(lambda)-min.k), p=numeric(length(lambda)-min.k), convergence=numeric(length(lambda)-min.k)) bentler.n <- 0 for (i in 1:(length(lambda)-min.k)) { temp <- bentlerParameters(x=lambda, N=n, nFactors=i, log=log, cor=cor, minPar=minPar, maxPar=maxPar) LRT[i,3] <- temp$lrt LRT[i,4] <- ifelse(is.null(temp$coef[1]), NA, temp$coef[1]) LRT[i,5] <- ifelse(is.null(temp$coef[2]), NA, temp$coef[2]) LRT[i,6] <- ifelse(is.null(temp$p.value), NA, temp$p.value) LRT[i,7] <- ifelse(is.null(temp$convergence), NA, temp$convergence) LRT[i,2] <- i LRT[i,1] <- length(lambda) - i } #LRT <- LRT[order(LRT[,1],decreasing = TRUE),] for (i in 1:(length(lambda)-min.k)) { if (i == 1) bentler.n <- bentler.n + as.numeric(LRT$p[i] <= significance) if (i > 1) {if(LRT$p[i-1] <= 0.05) bentler.n <- bentler.n + as.numeric(LRT$p[i] <= significance)} } if (bentler.n == 0) bentler.n <- length(lambda) if (details == TRUE) details <- LRT else details <- NULL res <- list(detail=details, nFactors=bentler.n) class(res) <- c("nFactors","list") return(res) } nFactors/R/rRecovery.r0000644000176200001440000000667715017054512014415 0ustar liggesusers#' Test of Recovery of a Correlation or a Covariance matrix from a Factor #' Analysis Solution #' #' The \code{rRecovery} function returns a verification of the quality of the #' recovery of the initial correlation or covariance matrix by the factor #' solution. #' #' #' @param R numeric: initial correlation or covariance matrix #' @param loadings numeric: loadings from a factor analysis solution #' @param diagCommunalities logical: if \code{TRUE}, the correlation between #' the initial solution and the estimated one will use a correlation of one in #' the diagonal. If \code{FALSE} (default) the diagonal is not used in the #' computation of this correlation. #' @return \item{R}{ numeric: initial correlation or covariance matrix } #' \item{recoveredR}{ numeric: recovered estimated correlation or covariance #' matrix } \item{difference}{ numeric: difference between initial and #' recovered estimated correlation or covariance matrix} \item{cor}{ numeric: #' Pearson correlation between initial and recovered estimated correlation or #' covariance matrix. Computations depend on the logical value of the #' \code{communalities} argument. } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, #' \code{\link{principalAxis}} #' @export #' @importFrom MASS ginv #' @importFrom stats cor #' @keywords utilities #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' nFactors <- 2 #' loadings <- principalAxis(RU, nFactors=nFactors, #' communalities="component")$loadings #' rComponent <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor #' #' loadings <- principalAxis(RU, nFactors=nFactors, #' communalities="maxr")$loadings #' rMaxr <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor #' #' loadings <- principalAxis(RU, nFactors=nFactors, #' communalities="multiple")$loadings #' rMultiple <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor #' #' round(c(rComponent = rComponent, #' rmaxr = rMaxr, #' rMultiple = rMultiple), 3) #' # ....................................................... #' #' } #' } "rRecovery" <- function(R, loadings, diagCommunalities=FALSE) { recoveredR <- loadings %*% t(loadings) recovery <- list(R = R, recoveredR = recoveredR, difference = R - recoveredR) if (diagCommunalities == FALSE) {diag(R) <- NA; diag(recoveredR) <- NA } corr <- stats::cor(c(R),c(recoveredR), use="pairwise.complete.obs") recovery <- list(recovery, cor = corr) return(recovery) } nFactors/R/nBartlett.r0000644000176200001440000002126015020640452014353 0ustar liggesusers#' #' Bartlett, Anderson and Lawley Procedures to Determine the Number of Components/Factors #' #' This function computes the Bartlett, Anderson and Lawley indices for determining the #' number of components/factors to retain. #' @details Note: the latex formulas are available only in the pdf version of this help file. #' #' The hypothesis tested is: \cr #' #' (1) \eqn{\qquad \qquad H_k: \lambda_{k+1} = \ldots = \lambda_p} \cr #' #' This hypothesis is verified by the application of different version of a #' \eqn{\chi^2} test with different values for the degrees of freedom. #' Each of these tests shares the computation of a \eqn{V_k} value: \cr #' #' (2) \eqn{\qquad \qquad V_k = #' \prod\limits_{i = k + 1}^p #' \left[ #' {{\lambda _i} \over {1 \over q } #' \sum\limits_{i = k + 1}^p {\lambda _i }} #' \right] } #' #' \eqn{p} is the number of eigenvalues, \eqn{k} the number of eigenvalues to test, #' and \eqn{q} the \eqn{p-k} remaining eigenvalues. \eqn{n} is equal to the sample size #' minus 1 (\eqn{n = N-1}). \cr #' #' The Anderson statistic is distributed as a \eqn{\chi^2} with \eqn{(q + 2)(q - 1)/2} degrees #' of freedom and is equal to: \cr #' #' (3) \eqn{\qquad \qquad - n\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr #' #' An improvement of this statistic from Bartlett (Bentler, and Yuan, 1996, p. 300; #' Horn and Engstrom, 1979, equation 8) is distributed as a \eqn{\chi^2} #' with \eqn{(q)(q - 1)/2} degrees of freedom and is equal to: \cr #' #' (4) \eqn{\qquad \qquad - \left[ {n - k - {{2q^2 q + 2} \over {6q}}} #' \right]\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr #' #' Finally, Anderson (1956) and James (1969) proposed another statistic. \cr #' #' (5) \eqn{\qquad \qquad - \left[ {n - k - {{2q^2 q + 2} \over {6q}} #' + \sum\limits_{i = 1}^k {{{\bar \lambda _q^2 } \over {\left( {\lambda _i #' - \bar \lambda _q } \right)^2 }}} } \right]\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr #' #' Bartlett (1950, 1951) proposed a correction to the degrees of freedom of these \eqn{\chi^2} after the #' first significant test: \eqn{(q+2)(q - 1)/2}. \cr #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data (eigenFrom) #' @param N numeric: number of subjects #' @param alpha numeric: statistical significance level #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix #' @param details logical: if \code{TRUE} also returns detains about the computation for each eigenvalue #' @param correction logical: if \code{TRUE} uses a correction for the degree of freedom after the first eigenvalue #' @param ... variable: additionnal parameters to give to the \code{cor} or \code{cov} functions #' @return \item{nFactors}{numeric: vector of the number of factors retained by the Bartlett, Anderson and Lawley procedures.} #' @return \item{details}{numeric: matrix of the details for each index.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} #' #' @references #' Anderson, T. W. (1963). Asymptotic theory for principal component analysis. \emph{Annals of Mathematical Statistics, 34}, 122-148. #' #' Bartlett, M. S. (1950). Tests of significance in factor analysis. \emph{British Journal of Psychology, 3}, 77-85. #' #' Bartlett, M. S. (1951). A further note on tests of significance. \emph{British Journal of Psychology, 4}, 1-2. #' #' Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in eigenvalues of a covariance matrix with application to data analysis. #' \emph{British Journal of Mathematical and Statistical Psychology, 49}, 299-312. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest #' eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. #' #' Horn, J. L. and Engstrom, R. (1979). Cattell's scree test in relation to #' Bartlett's chi-square test and other observations on the number of factors #' problem. \emph{Multivariate Behavioral Reasearch, 14}(3), 283-300. #' #' James, A. T. (1969). Test of equality of the latent roots of the covariance #' matrix. \emph{In} P. K. Krishna (Eds): \emph{Multivariate analysis, volume 2}.New-York, NJ: Academic Press. #' #' Lawley, D. N. (1956). Tests of significance for the latent roots of covarianceand correlation matrix. \emph{Biometrika, 43}(1/2), 128-136. #' #' @export #' @importFrom stats pchisq #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## ................................................ #' ## SIMPLE EXAMPLE OF A BARTLETT PROCEDURE #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nBartlett(x=eig, N= 100, alpha=0.05, details=TRUE) #' results #' #' plotuScree(eig, main=paste(results$nFactors[1], ", ", #' results$nFactors[2], " or ", #' results$nFactors[3], #' " factors retained by the LRT procedures", #' sep="")) #' } #' } nBartlett <- function(x, N, alpha=0.05, cor=TRUE, details=TRUE, correction=TRUE, ...) { stopMessage <- paste("\n These indices are only valid with a principal component solution.\n", " ...................... So, only positive eugenvalues are permitted.\n", sep="") x <- eigenComputes(x, cor=cor, ...) if (length(which(x<0)) > 0) {cat(stopMessage);stop()} n <- length(x) detail <- NULL bartlett.n <- anderson.n <- lawley.n <- 0 bartlett <- bartlett.chi <- bartlett.df <- bartlett.p <- numeric(n) anderson.chi <- anderson.df <- anderson.p <- numeric(n) lawley.chi <- lawley.df <- lawley.p <- numeric(n) for (k in 0:(n-1)) { i <- k+1 bartlett[i] <- prod(x[(k+1):n]) / (sum(x[(k+1):n])/(n-k))^(n-k) # From Horn et Engstrom (1979) bartlett.chi[i] <- -(N - 1 - ((2*n+5)/6) - ((2*k)/3)) * log(bartlett[i]) bartlett.df[i] <- .5 * (n-k) * (n-k-1) # Bartlett without correction, from Horn and Engstrom (1979. p. 291, equation 8) if (correction==TRUE & bartlett.n > 0) bartlett.df[i] <- .5 * (n-k+2) * (n-k-1) # From Bentler and Yuan (1996, p. 300) bartlett.p[i] <- stats::pchisq(bartlett.chi[i] , bartlett.df[i], lower.tail = FALSE) # Conditions to stop when non significant test are obtained anderson.chi[i] <- -N * log(bartlett[i]) # From Bentler and Yuan (1996, p. 300, equations 3-4) anderson.df[i] <- .5 * (n-k+2) * (n-k-1) # From Bentler and Yuan (1996, p. 300) anderson.p[i] <- stats::pchisq(anderson.chi[i] , anderson.df[i], lower.tail = FALSE) lMean <- mean(x[(k+1):n]) lawley.chi[i] <- -(N - 1 - ((2*n+5)/6) - ((2*k)/3) + sum((lMean^2)/((x[k]+lMean)^2))) * log(bartlett[i]) # From Bentler and Yuan (1996, p. 300, equation 6) lawley.df[i] <- .5 * (n-k) * (n-k-1) # From Horn and Engstrom (1979. p. 291, equation 8) lawley.p[i] <- stats::pchisq(lawley.chi[i] , lawley.df[i], lower.tail = FALSE) # print(c(bartlett[i], bartlett.chi[i], bartlett.df[i], bartlett.p[i]),2) ############ TEST ############# if (i == 1) { bartlett.n <- bartlett.n + as.numeric(bartlett.p[i] <= alpha) anderson.n <- anderson.n + as.numeric(anderson.p[i] <= alpha) lawley.n <- lawley.n + as.numeric(lawley.p[i] <= alpha) } if (i > 1) { if(bartlett.p[i-1] <= 0.05) bartlett.n <- bartlett.n + as.numeric(bartlett.p[i] <= alpha) if(anderson.p[i-1] <= 0.05) anderson.n <- anderson.n + as.numeric(anderson.p[i] <= alpha) if(lawley.p[i-1] <= 0.05) lawley.n <- lawley.n + as.numeric(lawley.p[i] <= alpha) } } if (bartlett.n == 0) bartlett.n <- n # If no test if significant, retain all components if (anderson.n == 0) anderson.n <- n if (lawley.n == 0) lawlwy.n <- n if (details == TRUE) detail <- data.frame(v=(1:(n)),values=x[1:(n)], bartlett, bartlett.chi, bartlett.df, bartlett.p, anderson.chi, anderson.df, anderson.p, lawley.chi, lawley.df, lawley.p) res <- list(detail=detail, nFactors=c(bartlett=bartlett.n, anderson=anderson.n, lawley=lawley.n)) class(res) <- c("nFactors","list") return(res) } nFactors/R/nScreeObjectMethods.r0000644000176200001440000001111615017122012016276 0ustar liggesusers#' Utility Functions for nScree Class Objects #' #' Utility functions for \code{nScree} class objects. Some of these functions #' are already implemented in the \code{nFactors} package, but are easier to #' use with generic functions like these. #' #' @rdname nScreeObjectMethods #' @param object nScree: an object of the class \code{nScree} #' @param ... variable: additionnal parameters to give to the \code{print} #' function with \code{print.nScree}, the \code{plotnScree} with #' \code{plot.nScree} or to the \code{summary} function with #' \code{summary.nScree} #' #' @return Generic functions for the nScree class: #' \item{is.nScree}{ logical: is the object of the class \code{nScree}? } #' \item{plot.nScree }{ graphic: plots a figure according to the #' \code{plotnScree} function} #' \item{print.nScree }{ numeric: vector of the #' number of components/factors to retain: same as the \code{Components} vector #' from the \code{nScree} object} #' \item{summary.nScree }{ data.frame: details #' of the results from a nScree analysis: same as the \code{Analysis} #' data.frame from the \code{nScree} object, but with easier control of the #' number of decimals with the \code{digits} parameter} #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} # #' @family nScree # #' @seealso \code{\link{plotuScree}}, \code{\link{plotnScree}}, # #' \code{\link{parallel}}, \code{\link{plotParallel}}, #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' @export #' @importFrom stats coef #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## INITIALISATION #' data(dFactors) # Load the nFactors dataset #' attach(dFactors) #' vect <- Raiche # Use the example from Raiche #' eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues #' nsubjects <- vect$nsubjects # Extract the number of subjects #' variables <- length(eigenvalues) # Compute the number of variables #' rep <- 100 # Number of replications for the parallel analysis #' cent <- 0.95 # Centile value of the parallel analysis #' #' ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) #' aparallel <- parallel(var = variables, #' subject = nsubjects, #' rep = rep, #' cent = cent #' )$eigen$qevpea # The 95 centile #' #' ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES #' results <- nScree(x=eigenvalues, aparallel=aparallel) #' #' is.nScree(results) #' results #' summary(results) #' #' ## PLOT ACCORDING TO THE nScree CLASS #' plot(results) #' } #' } summary.nScree <- function(object, ...) { if (!is.nScree(object)) stop("Not a nScree object") cat("Report For a nScree Class \n\n") #digits <- 2 NextMethod() cat(paste("Details:",object$Model,"\n\n")) object$Analysis[,c(1:5,7)] <- round(object$Analysis[,c(1:5,7)], ...) print(object[[2]]) cat(paste("\n\n Number of factors retained by index","\n\n")) print(object[[1]]) } ## ................................................................. #' @rdname nScreeObjectMethods #' @param x Results of a previous \code{nScree} analysis # #' @family nScree #' @export # #' @method print print.nScree # #' @S3method print print.nScree ## ................................................................. print.nScree <- function(x, ...) { res <- x[[1]] print(res, ...) } ## ................................................................. #' @rdname nScreeObjectMethods # #' @param x Results of a previous \code{nScree} analysis # #' @family nScree #' @export # #' @method plot plot.nScree # #' @S3method plot plot.nScree ## ................................................................. plot.nScree <- function(x, ...) { plotnScree(x, ...) } ## ................................................................. #' @rdname nScreeObjectMethods # #' @family nScree #' @export # #' @method is is.nScree # #' @S3method is is.nScree ## ................................................................. is.nScree <- function(object) { if (inherits(object, "nScree")) return(TRUE) else return(FALSE) } ## ................................................................. nFactors/R/plotnScree.R0000644000176200001440000001156115017052364014502 0ustar liggesusers#' Scree Plot According to a nScree Object Class #' #' Plot a scree plot adding information about a non graphical \code{nScree} #' analysis. #' #' #' @param nScree Results of a previous \code{nScree} analysis #' @param legend Logical indicator of the presence or not of a legend #' @param xlab Label of the x axis (default to \code{"Component"}) #' @param ylab Label of the y axis (default to \code{"Eigenvalue"}) #' @param main Main title (default to \code{"Non Graphical Solutions to the #' Scree Test"}) #' @return Nothing returned. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotParallel}}, \code{\link{parallel}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' @export #' @importFrom graphics lines par text plot.default #' @importFrom stats lm coef #' @keywords Graphics #' @examples #' \dontrun{ #' if(interactive()){ #' ## INITIALISATION #' data(dFactors) # Load the nFactors dataset #' attach(dFactors) #' vect <- Raiche # Use the second example from Buja and Eyuboglu #' # (1992, p. 519, nsubjects not specified by them) #' eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues #' nsubjects <- vect$nsubjects # Extract the number of subjects #' variables <- length(eigenvalues) # Compute the number of variables #' rep <- 100 # Number of replications for the parallel analysis #' cent <- 0.95 # Centile value of the parallel analysis #' #' ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) #' aparallel <- parallel(var = variables, #' subject = nsubjects, #' rep = rep, #' cent = cent)$eigen$qevpea # The 95 centile #' #' ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES #' results <- nScree(eig = eigenvalues, #' aparallel = aparallel #' ) #' #' results #' #' ## PLOT ACCORDING TO THE nScree CLASS #' plotnScree(results) #' } #' } "plotnScree" <- function (nScree, legend = TRUE, ylab = "Eigenvalues", xlab = "Components", main = "Non Graphical Solutions to Scree Test") { if (!inherits(nScree, "nScree")) stop("Method is only for nScree objects") #if (!exists("legend", mode="logical") ) legend <- TRUE # To develop #if (!exists("ylab")) ylab <- "Eigenvalues" # To develop #if (!exists("xlab")) xlab <- "Components" # To develop #if (!exists("main")) main <- "Non Graphical Solutions to Scree Test" # To develop if (nScree$Model == "components") nkaiser = "Eigenvalues (>mean = " else nkaiser = "Eigenvalues (>0 = " if (nScree$Model == "factors") xlab = "Factors" graphics::par(col = 1, pch = 1) # Color and symbol for usual scree graphics::par(mfrow = c(1,1)) eig <- nScree$Analysis$Eigenvalues k <- 1:length(eig) #plotuScree(x=eig, ...) # To develop plotuScree(x=eig, main=main, xlab=xlab, ylab=ylab) nk <- length(eig) noc <- nScree$Components$noc vp.p <- stats::lm(eig[c(noc+1,nk)] ~ k[c(noc+1,nk)]) x <- sum(c(1,1) * stats::coef(vp.p)) y <- sum(c(1,nk)* stats::coef(vp.p)) graphics::par(col = 10) # Color for optimal coordinates graphics::lines(k[c(1,nk)],c(x,y)) graphics::par(col = 11,pch=2) # Color and symbol for parallel analysis graphics::lines(1:nk, nScree$Analysis$Par.Analysis, type = "b") if (legend == TRUE) { leg.txt <- c(paste(nkaiser,nScree$Components$nkaiser,")"), c(paste("Parallel Analysis (n = ",nScree$Components$nparallel,")")), c(paste("Optimal Coordinates (n = ",nScree$Components$noc,")")), c(paste("Acceleration Factor (n = ",nScree$Components$naf,")")) ) legend("topright", legend = leg.txt, pch = c(1,2,NA,NA), text.col = c(1,3,2,4), col = c(1,3,2,4) ) } naf <- nScree$Components$naf graphics::text(x = noc , y = eig[noc], label = " (OC)", cex = .70, adj = c(0,0), col = 2) graphics::text(x = naf + 1, y = eig[naf + 1], label = " (AF)", cex = .70, adj = c(0,0), col = 4) } nFactors/R/structureSim.r0000644000176200001440000001566215017051750015141 0ustar liggesusers#' Population or Simulated Sample Correlation Matrix from a Given Factor #' Structure Matrix #' #' The \code{structureSim} function returns a population and a sample #' correlation matrices from a predefined congeneric factor structure. #' #' #' @param fload matrix: loadings of the factor structure #' @param reppar numeric: number of replications for the parallel analysis #' @param repsim numeric: number of replications of the matrix correlation #' simulation #' @param N numeric: number of subjects #' @param quantile numeric: quantile for the parallel analysis #' @param model character: \code{"components"} or \code{"factors"} #' @param adequacy logical: if \code{TRUE} prints the recovered population #' matrix from the factor structure #' @param details logical: if \code{TRUE} outputs details of the \code{repsim} #' simulations #' @param r2limen numeric: R2 limen value for the R2 Nelson index #' @param all logical: if \code{TRUE} computes the Bentler and Yuan index (very #' long computing time to consider) #' @return \item{values}{ the output depends of the logical value of details. #' If \code{FALSE}, returns only statistics about the eigenvalues: mean, #' median, quantile, standard deviation, minimum and maximum. If \code{TRUE}, #' returns also details about the \code{repsim} simulations. If #' \code{adequacy} = \code{TRUE} returns the recovered factor structure} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{principalComponents}}, #' \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules #' for determining the number of components to retain. \emph{Psychological #' Bulletin, 99}, 432-442. #' @export #' @importFrom stats median factanal #' @importFrom graphics boxplot plot abline lines #' @importFrom psych sim.structure #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Example inspired from Zwick and Velicer (1986, table 2, p. 437) #' ## ................................................................... #' nFactors <- 3 #' unique <- 0.2 #' loadings <- 0.5 #' nsubjects <- 180 #' repsim <- 30 #' zwick <- generateStructure(var=36, mjc=nFactors, pmjc=12, #' loadings=loadings, #' unique=unique) #' ## ................................................................... #' #' # Produce statistics about a replication of a parallel analysis on #' # 30 sampled correlation matrices #' #' mzwick.fa <- structureSim(fload=as.matrix(zwick), reppar=30, #' repsim=repsim, N=nsubjects, quantile=0.5, #' model="factors") #' #' mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, #' repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) #' #' # Very long execution time that could be used only with model="components" #' # mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, #' # repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) #' #' par(mfrow=c(2,1)) #' graphics::plot(x=mzwick, nFactors=nFactors, index=c(1:14), cex.axis=0.7, col="red") #' graphics::plot(x=mzwick.fa, nFactors=nFactors, index=c(1:11), cex.axis=0.7, col="red") #' par(mfrow=c(1,1)) #' #' par(mfrow=c(2,1)) #' graphics::boxplot(x=mzwick, nFactors=3, cex.axis=0.8, vLine="blue", col="red") #' graphics::boxplot(x=mzwick.fa, nFactors=3, cex.axis=0.8, vLine="blue", col="red", #' xlab="Components") #' par(mfrow=c(1,1)) #' # ...................................................... #' } #' } structureSim <- function(fload, reppar=30, repsim=100, N, quantile=0.95, model="components", adequacy=FALSE, details=TRUE, r2limen=0.75, all=FALSE) { simulation <- psych::sim.structure(fx=fload, n=N, raw=TRUE) if (adequacy == TRUE) print(stats::factanal(covmat=simulation$model, factors=dim(fload)[2])) # Verification of the adequacy of the model eigenvalues <- eigenComputes(simulation$r, cor=TRUE, model=model) variables <- length(eigenvalues) # Compute the number of variables aparallel <- parallel(var=dim(fload)[1],subject=N,rep=reppar,cent=quantile,model=model)$eigen$qevpea # The percentile components <- matrix(NA, ncol=15,nrow=repsim) analysis <- NA values <- matrix(NA, ncol=length(eigenvalues),nrow=repsim) for (i in 1:repsim) { simulation <- psych::sim.structure(fx=fload, n=N, raw=TRUE) aparallel <- parallel(var=dim(fload)[1],subject=N,rep=reppar,cent=quantile,model=model)$eigen$qevpea eigenvalues <- eigenComputes(simulation$r, cor=TRUE, model=model) values[i,] <- eigenvalues results <- nScree(x=eigenvalues,aparallel = aparallel, cor=TRUE, model=model) components[i,(1:4)] <- t(results$Components) ### PERMUTATIONS if (eigenFrom(data.frame(simulation$observed)) == "data") { permutation <- eigenBootParallel(x=data.frame(simulation$observed), quantile=quantile, model=model)$quantile } results <- nScree(x=eigenvalues,aparallel = permutation, cor=TRUE, model=model) components[i, 5] <- results$Components$nparallel ### ... components[i, 6] <- nCng(x=eigenvalues, model=model)$nFactors components[i, (7:9)] <- nMreg(x=eigenvalues, model=model)$nFactors components[i, (10:11)] <- nSeScree(x=eigenvalues, model=model, r2limen=r2limen)$nFactors if (model == "components") { components[i, (12:14)] <- nBartlett(x=eigenvalues, N=N, alpha=1-quantile, cor=TRUE, correction=TRUE)$nFactors if (all == TRUE) { cat(paste("-- repsim = ", i, "/",repsim,"\n", sep="")) components[i, (15)] <- nBentler(x=eigenvalues, N=N, alpha=1-quantile, cor=TRUE)$nFactors } } # analysis <- rbind(analysis, results$Analysis) #components[2,] <- t(results$Components);components } names <- colnames(results$Components) names <- c("oc", "af", "par", "mean.eig", "per") components <- data.frame(components) colnames(components) <- c(names,"cng","b","t.b","p.b","sescree","R2","Bartlett","Anderson","Lawley","Bentler") if (details == TRUE) analysis <- list(components=components, eigenvalues=values) if (repsim > 1) components <- moreStats(components, quantile=quantile) else components <- NA res <- list(details=analysis, nFactors=components) class(res) <- 'structureSim' return(res) } ## LIGNE 21 MODIFIEE: ETAIT quantile=0.95 ## LIGNE 42 MODIFIEE: EAIT c("oc", "af", "par", "per", "mean.eig") nFactors/R/eigenComputes.r0000644000176200001440000000553615017047106015236 0ustar liggesusers#' Computes Eigenvalues According to the Data Type #' #' The \code{eigenComputes} function computes eigenvalues from the identified data #' type. It is used internally in many #' fonctions of the \pkg{nFactors} package in order to apply these to a vector of #' eigenvalues, a matrix of correlations or covariance or a data frame. #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return numeric: return a vector of eigenvalues #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @export #' @importFrom stats cor cov cov2cor #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Different data types #' # Vector of eigenvalues #' data(dFactors) #' x1 <- dFactors$Cliff1$eigenvalues #' eigenComputes(x1) #' #' # Data from a data.frame #' x2 <- data.frame(matrix(20*rnorm(100), ncol=5)) #' eigenComputes(x2, cor=TRUE, use="everything") #' eigenComputes(x2, cor=FALSE, use="everything") #' eigenComputes(x2, cor=TRUE, use="everything", method="spearman") #' eigenComputes(x2, cor=TRUE, use="everything", method="kendall") #' # From a covariance matrix #' x3 <- cov(x2) #' eigenComputes(x3, cor=TRUE, use="everything") #' eigenComputes(x3, cor=FALSE, use="everything") #' # From a correlation matrix #' x4 <- cor(x2) #' eigenComputes(x4, use="everything") #' # ....................................................... #' } #' } eigenComputes <- function(x, cor=TRUE, model="components", ...) { dataType <- eigenFrom(x) if (model == "components") { res <- switch(dataType, eigenvalues = as.vector(x), correlation = {if (cor == FALSE) eigen(x)$values else eigen(stats::cov2cor(x))$values}, data = {if (cor == TRUE) eigen(stats::cor(x, ...))$values else eigen(stats::cov(x, ...))$values} ) } if (model == "factors") { res <- switch(dataType, eigenvalues = as.vector(x), correlation = {if (cor == FALSE) eigen(corFA(x, method="ginv"))$values else eigen(stats::cov2cor(corFA(x, method="ginv")))$values}, data = {if (cor == TRUE) eigen(corFA(stats::cor(x, ...), method="ginv"))$values else eigen(corFA(stats::cov(x, ...), method="ginv"))$values} ) } return(res) } nFactors/R/nFactorsObjectMethods.r0000644000176200001440000000645115017054234016656 0ustar liggesusers#' Utility Functions for nFactors Class Objects #' #' Utility functions for \code{nFactors} class objects. #' #' # #' @aliases is.nFactors print.nFactors summary.nFactors #' @rdname nFactorsObjectMethods #' #' @param x nFactors: an object of the class nFactors #' @param ... variable: additionnal parameters to give to the \code{print} #' function with \code{print.nFactors} or to the \code{summary} function with #' \code{summary.nFactors} #' @return Generic functions for the nFactors class: #' #' \item{is.nFactors}{ logical: is the object of the class nFactors? } #' \item{print.nFactors }{ numeric: vector of the number of components/factors #' to retain: same as the \code{nFactors} vector from the \code{nFactors} #' object} \item{summary.nFactors }{ data.frame: details of the results from a #' nFactors object: same as the \code{details} data.frame from the #' \code{nFactors} object, but with easier control of the number of decimals #' with the \code{digits} parameter} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{nBentler}}, \code{\link{nBartlett}}, #' \code{\link{nCng}}, \code{\link{nMreg}}, \code{\link{nSeScree}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @export #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## SIMPLE EXAMPLE #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' N <- dFactors$Raiche$nsubjects #' #' res <- nBartlett(eig,N); res; is.nFactors(res); summary(res, digits=2) #' res <- nBentler(eig,N); res; is.nFactors(res); summary(res, digits=2) #' res <- nCng(eig); res; is.nFactors(res); summary(res, digits=2) #' res <- nMreg(eig); res; is.nFactors(res); summary(res, digits=2) #' res <- nSeScree(eig); res; is.nFactors(res); summary(res, digits=2) #' #' ## SIMILAR RESULTS, BUT NOT A nFactors OBJECT #' res <- nScree(eig); res; is.nFactors(res); summary(res, digits=2) #' ## ................................................................. #' } #' } is.nFactors <- function(x) { if (any(class(x) == "nFactors")) return(TRUE) else return(FALSE) } ## ................................................................. ## ................................................................. #' @rdname nFactorsObjectMethods #' @export print.nFactors <- function(x, ...) { if (!is.nFactors(x)) stop("Not a nFactors object") res <- x$nFactors print(res, ...) } ## ................................................................. ## ................................................................. #' @rdname nFactorsObjectMethods #' @param object nFactors: an object of the class nFactors #' @export summary.nFactors <- function(object, ...) { if (!is.nFactors(object)) stop("Not a nFactors object") cat("Report For a nFactors Class \n\n") NextMethod() cat(paste("Details:","\n\n")) print(object$detail, ...) cat(paste("\n\n Number of factors retained by index","\n\n")) print(object$nFactors) } ## ................................................................. nFactors/R/principalComponents.r0000644000176200001440000000621715017054400016446 0ustar liggesusers#' Principal Component Analysis #' #' The \code{principalComponents} function returns a principal component #' analysis. Other R functions give the same results, but #' \code{principalComponents} is customized mainly for the other factor #' analysis functions available in the \pkg{nfactors} package. In order to #' retain only a small number of components the \code{componentAxis} function #' has to be used. #' #' #' @param R numeric: correlation or covariance matrix #' @return \item{values}{ numeric: variance of each component } #' \item{varExplained}{ numeric: variance explained by each component } #' \item{varExplained}{ numeric: cumulative variance explained by each #' component } \item{loadings}{ numeric: loadings of each variable on each #' component } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, #' \code{\link{rRecovery}} #' @references Joliffe, I. T. (2002). \emph{Principal components analysis} (2th #' Edition). New York, NJ: Springer-Verlag. #' #' Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. #' What it is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical #' methods and practical issues}. Beverly Hills, CA: Sage. #' @export #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Principal component - #' # Kim et Mueller (1978, p. 21) #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' principalComponents(RU) #' #' # Replace lower diagonal with upper diagonal #' RL <- diagReplace(R, upper=FALSE) #' principalComponents(RL) #' # ....................................................... #' } #' } "principalComponents" <- function(R) { nVar <- dim(R)[2] acp <- eigen(R) values <- acp$values vectors <- acp$vectors # Normed vecteurs to 1 varExplained <- round((values/nVar)*100, 2) cumVarExplained <- round(cumsum(varExplained), 2) loadings <- vectors %*% diag(values^0.5) # F1 * diag(E) acp <- list(values = values, varExplained = varExplained, cumVarExplained = cumVarExplained, vectors = vectors, loadings = loadings) return(acp) } nFactors/R/bentlerParameters.r0000644000176200001440000002374215016664734016120 0ustar liggesusers#' Bentler and Yuan's Computation of the LRT Index and the Linear Trend #' Coefficients #' #' This function computes the Bentler and Yuan's (1996, 1998) \emph{LRT} index #' for the linear trend in eigenvalues of a covariance matrix. The related #' \eqn{\chi^2} and \emph{p}-value are also computed. This function is #' generally called from the \code{nBentler} function. But it could be of use #' for graphing the linear trend function and to study it's behavior. #' #' The implemented Bentler and Yuan's procedure must be used with care because #' the minimized function is not always stable. In many cases, constraints must #' applied to obtain a solution. The actual implementation did, but the user #' can modify these constraints. #' #' The hypothesis tested (Bentler and Yuan, 1996, equation 10) is: \cr \cr #' #' (1) \eqn{\qquad \qquad H_k: \lambda_{k+i} = \alpha + \beta x_i, (i = 1, #' \ldots, q)} \cr #' #' The solution of the following simultaneous equations is needed to find #' \eqn{(\alpha, \beta) \in} \cr #' #' (2) \eqn{\qquad \qquad f(x) = \sum_{i=1}^q \frac{ [ \lambda_{k+j} - N \alpha #' + \beta x_j ] x_j}{(\alpha + \beta x_j)^2} = 0} \cr \cr #' #' and \eqn{\qquad \qquad g(x) = \sum_{i=1}^q \frac{ \lambda_{k+j} - N \alpha + #' \beta x_j x_j}{(\alpha + \beta x_j)^2} = 0} \cr #' #' The solution to this system of equations was implemented by minimizing the #' following equation: \cr #' #' (3) \eqn{\qquad \qquad (\alpha, \beta) \in \inf{[h(x)]} = \inf{\log{[f(x)^2 #' + g(x)^2}}]} \cr #' #' The likelihood ratio test \eqn{LRT} proposed by Bentler and Yuan (1996, #' equation 7) follows a \eqn{\chi^2} probability distribution with \eqn{q-2} #' degrees of freedom and is equal to: \cr #' #' (4) \eqn{\qquad \qquad LRT = N(k - p)\left\{ {\ln \left( {{n \over N}} #' \right) + 1} \right\} - N\sum\limits_{j = k + 1}^p {\ln \left\{ {{{\lambda #' _j } \over {\alpha + \beta x_j }}} \right\}} + n\sum\limits_{j = k + 1}^p #' {\left\{ {{{\lambda _j } \over {\alpha + \beta x_j }}} \right\}} } \cr #' #' With \eqn{p} beeing the number of eigenvalues, \eqn{k} the number of #' eigenvalues to test, \eqn{q} the \eqn{p-k} remaining eigenvalues, \eqn{N} #' the sample size, and \eqn{n = N-1}. Note that there is an error in the #' Bentler and Yuan equation, the variables \eqn{N} and \eqn{n} beeing inverted #' in the preceeding equation 4. #' #' A better strategy proposed by Bentler an Yuan (1998) is to use a minimized #' \eqn{\chi^2} solution. This strategy will be implemented in a future version #' of the \pkg{nFactors} package. #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param N numeric: number of subjects. #' @param nFactors numeric: number of components to test. #' @param log logical: if \code{TRUE} the minimization is applied on the log #' values. #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param minPar numeric: minimums for the coefficient of the linear trend. #' @param maxPar numeric: maximums for the coefficient of the linear trend. #' @param resParx numeric: restriction on the \eqn{\alpha} coefficient (x) to #' graph the function to minimize. #' @param resPary numeric: restriction on the \eqn{\beta} coefficient (y) to #' graph the function to minimize. #' @param graphic logical: if \code{TRUE} plots the minimized function #' \code{"wireframe"}, \code{"contourplot"} or \code{"levelplot"}. #' @param resolution numeric: resolution of the 3D graph (number of points from #' \eqn{\alpha} and from \eqn{\beta}). #' @param typePlot character: plots the minimized function according to a 3D #' plot: \code{"wireframe"}, \code{"contourplot"} or \code{"levelplot"}. #' @param ... variable: additionnal parameters from the \code{"wireframe"}, #' \code{"contourplot"} or \code{"levelplot"} \code{lattice} functions. Also #' additionnal parameters for the \code{eigenFrom} function. #' #' @return \item{nFactors}{ numeric: vector of the number of factors retained #' by the Bentler and Yuan's procedure. } \item{details}{ numeric: matrix of #' the details of the computation.} #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @seealso \code{\link{nBartlett}}, \code{\link{nBentler}} #' @references #' Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in #' eigenvalues of a covariance matrix with application to data analysis. #' \emph{British Journal of Mathematical and Statistical Psychology, 49}, #' 299-312. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest #' eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. #' @importFrom lattice wireframe contourplot levelplot #' @importFrom stats nlminb lm pchisq #' @export #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## ................................................ #' ## SIMPLE EXAMPLE OF THE BENTLER AND YUAN PROCEDURE #' ## #' @importFrom graphics abline #' #' # Bentler (1996, p. 309) Table 2 - Example 2 ............. #' n=649 #' bentler2<-c(5.785, 3.088, 1.505, 0.582, 0.424, 0.386, 0.360, 0.337, 0.303, #' 0.281, 0.246, 0.238, 0.200, 0.160, 0.130) #' #' results <- nBentler(x=bentler2, N=n, details=TRUE) #' results #' #' # Two different figures to verify the convergence problem identified with #' # the 2th component #' bentlerParameters(x=bentler2, N=n, nFactors= 2, graphic=TRUE, #' typePlot="contourplot", #' resParx=c(0,9), resPary=c(0,9), cor=FALSE) #' #' bentlerParameters(x=bentler2, N=n, nFactors= 4, graphic=TRUE, drape=TRUE, #' resParx=c(0,9), resPary=c(0,9), #' scales = list(arrows = FALSE) ) #' #' plotuScree(x=bentler2, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1996, p. 309)", #' sep="")) #' # ........................................................ #' #' # Bentler (1998, p. 140) Table 3 - Example 1 ............. #' n <- 145 #' example1 <- c(8.135, 2.096, 1.693, 1.502, 1.025, 0.943, 0.901, 0.816, #' 0.790,0.707, 0.639, 0.543,0.533, 0.509, 0.478, 0.390, #' 0.382, 0.340, 0.334, 0.316, 0.297,0.268, 0.190, 0.173) #' #' results <- nBentler(x=example1, N=n, details=TRUE) #' results #' #' # Two different figures to verify the convergence problem identified with #' # the 10th component #' bentlerParameters(x=example1, N=n, nFactors= 10, graphic=TRUE, #' typePlot="contourplot", #' resParx=c(0,0.4), resPary=c(0,0.4)) #' #' bentlerParameters(x=example1, N=n, nFactors= 10, graphic=TRUE, drape=TRUE, #' resParx=c(0,0.4), resPary=c(0,0.4), #' scales = list(arrows = FALSE) ) #' #' plotuScree(x=example1, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1998, p. 140)", #' sep="")) #' # ........................................................ #' } #' } #' bentlerParameters <- function(x, N, nFactors, log=TRUE, cor=TRUE, minPar=c(min(lambda) - abs(min(lambda)) +.001, 0.001), maxPar=c(max(lambda), stats::lm(lambda ~ I(length(lambda):1))$coef[2]), resParx=c(0.01, 2), resPary=c(0.01, 2), graphic=TRUE, resolution=30, typePlot="wireframe", ...){ stopMessage <- paste("\n These indices are only valid with a principal component solution.\n", " ...................... So, only positive eigenvalues are permitted.\n", sep="") lambda <- eigenComputes(x, cor=cor, ...) if (length(which(lambda <0 )) > 0) {cat(stopMessage);stop()} k <- nFactors p <- length(lambda) q <- p-k i <- 1:q x <- q-i l <- lambda[k+i] n <- N - 1 # Bentler (1996, p. 133) maximization of equations 8 and 9 f1 <- function(n,l,x,alpha,beta) sum((n*l-(n+1)*(alpha+beta*x))/((alpha+beta*x)^2)) f2 <- function(n,l,x,alpha,beta) sum((n*l-(n+1)*(alpha+beta*x))*x/((alpha+beta*x)^2)) f <- function(alpha,beta) f1(n,l,x,alpha,beta)^2+f2(n,l,x,alpha,beta)^2 if (log == FALSE) F <- function(y) f(y[1],y[2]) else F <- function(y) log(f(y[1],y[2])) figure <- NULL if (graphic == TRUE) { p1 <- seq(resParx[1], resParx[2], length=resolution) p2 <- seq(resPary[1], resPary[2], length=resolution) data <- expand.grid(Alpha = p1, Beta = p2) data <- data.frame(data, y=numeric(length(data$Alpha))) for( i in 1:length(data$Alpha)) data$y[i] <- F(c(data$Alpha[i],data$Beta[i])) if (log == FALSE) zlab <- "y" else zlab <- "log(y)" if (typePlot == "wireframe") figure <- lattice::wireframe( y ~ Alpha * Beta, data=data, zlab=zlab, ...) if (typePlot == "contourplot") figure <- lattice::contourplot(y ~ Alpha * Beta, data=data, region=TRUE, ...) if (typePlot == "levelplot") figure <- lattice::levelplot( y ~ Alpha * Beta, data=data, region=TRUE, ...) } res <- stats::nlminb(objective=F,start=stats::lm(l~x)$coefficients,lower=c(minPar[1],minPar[2]),upper=c(maxPar[1],maxPar[2])) para <- res$par[1] parb <- res$par[2] # Bentler (1996, p. 133) equation 7 # !!! Warning: Bentler and Yuan (1998) were in error for the definition of LRT !!! # !!! So N and n must be inversed in the first logarithm !!! lrt <- N*(k-p)*(log(n/N)+1)-N*sum(log(lambda[(k+1):p]/(para+parb*x))) + n*sum(lambda[(k+1):p]/(para+parb*x)) df <- q-2 resp <- list(convergence=res$convergence, figure=figure, coefficients=res$par, lrt=lrt, df=df,k=k,p.value=1-stats::pchisq(lrt,df)) names(resp$coefficients)<-c("alpha","beta") return(resp) } nFactors/R/aphabetical.R0000644000176200001440000000000213622033402014575 0ustar liggesusers nFactors/R/plotParallel.R0000644000176200001440000000720515017045714015020 0ustar liggesusers#' Plot a Parallel Analysis Class Object #' #' Plot a scree plot adding information about a parallel analysis. #' #' If \code{eig} is \code{FALSE} the plot shows only the parallel analysis #' without eigenvalues. #' #' @param parallel numeric: vector of the results of a previous parallel #' analysis #' @param eig depreciated parameter: eigenvalues to analyse (not used if x is #' used, recommended) #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param model character: \code{"components"} or \code{"factors"} #' @param main character: title of the plot #' @param xlab character: label of the x axis #' @param ylab character: label of the y axis #' @param legend logical: indicator of the presence or not of a legend #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return Nothing returned. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{parallel}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @export #' @importFrom graphics plot.default lines #' @keywords Graphics #' @examples #' \dontrun{ #' if(interactive()){ #' ## SIMPLE EXAMPLE OF A PARALLEL ANALYSIS #' ## OF A CORRELATION MATRIX WITH ITS PLOT #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' subject <- dFactors$Raiche$nsubjects #' var <- length(eig) #' rep <- 100 #' cent <- 0.95 #' results <- parallel(subject,var,rep,cent) #' #' results #' #' #' ## PARALLEL ANALYSIS SCREE PLOT #' plotParallel(results, x=eig) #' plotParallel(results) #' } #' } #' "plotParallel" <- function(parallel, eig = NA, x = eig, model = "components", legend = TRUE, ylab = "Eigenvalues", xlab = "Components", main = "Parallel Analysis", ... ) { if (any(!is.na(x))) eig <- eigenComputes(x, ...) if (!inherits(parallel, "parallel")) stop("Method is only for parallel objects") if (model == "factors") xlab <- "Factors" var <- length(parallel$eigen$qevpea) if (length(eig) == 1) { Component <- var:1 Location <- seq(from = 0, to = max(parallel$eigen$qevpea)*3, length.out = var) graphics::plot.default(as.numeric(Component), as.numeric(Location), type = "n", main = main, xlab = xlab, ylab = ylab) } if (length(eig) > 1) {plotuScree(eig, main = main, xlab = xlab, ylab = ylab) } graphics::lines(1:var, parallel$eigen$qevpea , col = "green", type = "p", pch = 2) graphics::lines(1:var, parallel$eigen$mevpea, col = "red") if (legend == TRUE) { if (length(eig) == 1) { leg <- c("Mean Eigenvalues", "Centiles of the Eigenvalues") tco <- c("red", "green") co <- c("red", "green") pc <- c(NA, 2) } if (length(eig) > 1) { leg <- c("Eigenvalues", "Mean Eigenvalues", "Centiles of the Eigenvalues") tco <- c("black", "red", "green") co <- c("black", "red", "green") pc <- c(1, NA, 2) } legend("topright", legend = leg, text.col = tco, col = co, pch = pc ) } } nFactors/R/principalAxis.r0000644000176200001440000000763415017052504015234 0ustar liggesusers#' Principal Axis Analysis #' #' The \code{PrincipalAxis} function returns a principal axis analysis without #' iterated communalities estimates. Three different choices of communalities #' estimates are given: maximum corelation, multiple correlation or estimates #' based on the sum of the squared principal component analysis loadings. #' Generally statistical packages initialize the the communalities at the #' multiple correlation value (usual inverse or generalized inverse). #' Unfortunately, this strategy cannot deal with singular correlation or #' covariance matrices. If a generalized inverse, the maximum correlation or #' the estimated communalities based on the sum of loading are used instead, #' then a solution can be computed. #' #' #' @param R numeric: correlation or covariance matrix #' @param nFactors numeric: number of factors to retain #' @param communalities character: initial values for communalities #' (\code{"component", "maxr", "ginv" or "multiple"}) #' @return \item{values}{ numeric: variance of each component/factor } #' \item{varExplained}{ numeric: variance explained by each component/factor } #' \item{varExplained}{ numeric: cumulative variance explained by each #' component/factor } \item{loadings}{ numeric: loadings of each variable on #' each component/factor } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, #' \code{\link{rRecovery}} #' @references Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to #' factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical #' methods and practical issues}. Beverly Hills, CA: Sage. #' @export #' @importFrom MASS ginv #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Principal axis factoring #' # without iterated communalities - #' # Kim and Mueller (1978, p. 21) #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' principalAxis(RU, nFactors=2, communalities="component") #' principalAxis(RU, nFactors=2, communalities="maxr") #' principalAxis(RU, nFactors=2, communalities="multiple") #' # Replace lower diagonal with upper diagonal #' RL <- diagReplace(R, upper=FALSE) #' principalAxis(RL, nFactors=2, communalities="component") #' principalAxis(RL, nFactors=2, communalities="maxr") #' principalAxis(RL, nFactors=2, communalities="multiple") #' # ....................................................... #' } #' } "principalAxis" <- function(R, nFactors=2, communalities="component") { if (communalities == "component") diag(R) <- componentAxis(R)$communalities if (communalities == "maxr") { RT <- R; diag(RT) <- 0; diag(R) <- apply(RT, 1, max)} if (communalities == "ginv") diag(R) <- sqrt(1-1/diag(MASS::ginv(R))) if (communalities == "multiple") { if (all(eigen(R)$values > 0)) diag(R) <- sqrt(1-1/diag(solve(R))) # Gorsuch (1983, p. 106) else return("Not all eigenvalues are greater than 0") # Verication of positive definiteness } apa <- componentAxis(R, nFactors) return(apa) } nFactors/R/nCng.r0000644000176200001440000000737515017050174013316 0ustar liggesusers#' Cattell-Nelson-Gorsuch CNG Indices #' #' This function computes the \emph{CNG} indices for the eigenvalues of a #' correlation/covariance matrix (Gorsuch and Nelson, 1981; Nasser, 2002, p. #' 400; Zoski and Jurs, 1993, p. 6). #' #' Note that the \code{nCng} function is only valid when more than six #' eigenvalues are used and that these are obtained in the context of a #' principal component analysis. For a factor analysis, some eigenvalues could #' be negative and the function will stop and give an error message. #' #' The slope of all possible sets of three adjacent eigenvalues are compared, #' so \emph{CNG} indices can be applied only when more than six eigenvalues are #' used. The eigenvalue at which the greatest difference between two successive #' slopes occurs is the indicator of the number of components/factors to #' retain. #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param details logical: if \code{TRUE} also returns detains about the #' computation for each eigenvalue. #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} function #' @return \item{nFactors}{ numeric: number of factors retained by the CNG #' procedure. } \item{details}{ numeric: matrix of the details for each index.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @references Gorsuch, R. L. and Nelson, J. (1981). \emph{CNG scree test: an #' objective procedure for determining the number of factors}. Presented at the #' annual meeting of the Society for multivariate experimental psychology. #' #' Nasser, F. (2002). The performance of regression-based variations of the #' visual scree for determining the number of common factors. \emph{Educational #' and Psychological Measurement, 62(3)}, 397-419. #' #' Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the #' number of factors to retain in factor analysis. \emph{Multiple Linear #' Regression Viewpoints, 20}(1), 5-9. #' @export #' @importFrom stats lm #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## SIMPLE EXAMPLE OF A CNG ANALYSIS #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nCng(eig, details=TRUE) #' results #' #' plotuScree(eig, main=paste(results$nFactors, #' " factors retained by the CNG procedure", #' sep="")) #' } #' } nCng <- function(x, cor=TRUE, model="components", details=TRUE, ...) { x <- eigenComputes(x, cor=cor, model=model, ...) detail <- NULL nlength <- 2 n <- length(x) if (n < 6) stop("The number of variables must be at least 6.") i <- 1 cng <- numeric(n-5) while ((i+2*nlength+1) <= n) { xa <- c(i:(i+nlength)) ya <- x[i:(i+nlength)] compa <- stats::lm(ya ~ xa)$coef[2] xb <- c((i+1+nlength):(i+2*nlength+1)) yb <- x[(i+1+nlength):(i+1+2*nlength)] compb <- stats::lm(yb ~ xb)$coef[2] cng[i] <- compb - compa i <- i + 1 } if (details == TRUE) detail <- data.frame(v=(1:(n-5)),values=x[1:(n-5)], cng) cng <- as.numeric(which(cng==max(cng, na.rm=TRUE))+nlength) res <- list(detail=detail, nFactors=c(cng)) class(res) <- c("nFactors","list") return(res) } nFactors/R/componentAxis.r0000644000176200001440000000546115017053524015254 0ustar liggesusers#' Principal Component Analysis With Only n First Components Retained #' #' The \code{componentAxis} function returns a principal component analysis #' with the first \emph{n} components retained. #' #' #' @param R numeric: correlation or covariance matrix #' @param nFactors numeric: number of components/factors to retain #' @return \item{values}{ numeric: variance of each component/factor retained } #' \item{varExplained}{ numeric: variance explained by each component/factor #' retained } \item{varExplained}{ numeric: cumulative variance explained by #' each component/factor retained } \item{loadings}{ numeric: loadings of each #' variable on each component/factor retained } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{principalComponents}}, #' \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to #' factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical #' methods and practical issues}. Beverly Hills, CA: Sage. #' @keywords multivariate #' @export #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, 0.560, 0.480, 0.224, 0.192, 0.16, #' 0.560, 1.000, 0.420, 0.196, 0.168, 0.14, #' 0.480, 0.420, 1.000, 0.168, 0.144, 0.12, #' 0.224, 0.196, 0.168, 1.000, 0.420, 0.35, #' 0.192, 0.168, 0.144, 0.420, 1.000, 0.30, #' 0.160, 0.140, 0.120, 0.350, 0.300, 1.00), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Selected principal components - Kim and Mueller #' # (1978, p. 20) #' componentAxis(R, nFactors=2) #' #' # ....................................................... #' } #' } "componentAxis" <- function(R, nFactors=2) { nVar <- dim(R)[2] acp <- principalComponents(R) values <- acp$values[(1:nFactors)] varExplained <- round((values/nVar)*100, 2) cumVarExplained <- round(cumsum(varExplained), 2) loadings <- acp$vectors[,(1:nFactors)] %*% diag(values^0.5) # F1 * diag(E) communalities <- apply(loadings*loadings,1,sum) apa <- list(values = values, varExplained = varExplained, cumVarExplained = cumVarExplained, loadings = loadings, communalities = communalities) return(apa) } nFactors/R/nFactors.R0000644000176200001440000000132215017117770014140 0ustar liggesusers#' nFactors: Number of factor or components to retain in a factor analysis #' #' A package for determining the number of factor or components to retain in a factor analysis. #' The methods are all based on eigenvalues. #' # #' @section Foo functions: # #' BentlerParameters # #' componentAxis # #' corFA #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @docType package #' @name nFactors NULL nFactors/R/eigenFrom.r0000644000176200001440000000553115017053744014342 0ustar liggesusers#' Identify the Data Type to Obtain the Eigenvalues #' #' The \code{eigenFrom} function identifies the data type from which to obtain the #' eigenvalues. The function is used internally in many functions of #' the \pkg{nFactors} package to be able to apply these to a vector of eigenvalues, #' a matrix of correlations or covariance or a \code{data.frame}. #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data #' @return character: return the data type to obtain the eigenvalues: \code{"eigenvalues"}, \code{"correlation"} or \code{"data"} #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @export # #' @import methods #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Different data types #' # Examples of adequate data sources #' # Vector of eigenvalues #' data(dFactors) #' x1 <- dFactors$Cliff1$eigenvalues #' eigenFrom(x1) #' #' # Data from a data.frame #' x2 <- data.frame(matrix(20*rnorm(100), ncol=5)) #' eigenFrom(x2) #' #' # From a covariance matrix #' x3 <- cov(x2) #' eigenFrom(x3) #' #' # From a correlation matrix #' x4 <- cor(x2) #' eigenFrom(x4) #' #' # Examples of inadequate data sources: not run because of errors generated #' # x0 <- c(2,1) # Error: not enough eigenvalues #' # eigenFrom(x0) #' # x2 <- matrix(x1, ncol=5) # Error: non a symetric covariance matrix #' # eigenFrom(x2) #' # eigenFrom(x3[,(1:2)]) # Error: not enough variables #' # x6 <- table(x5) # Error: not a valid data class #' # eigenFrom(x6) #' # ....................................................... #' } #' } eigenFrom <- function(x) { #classType <- methods::class1(x) classType <- data.class(x) res <- switch (classType, data.frame = "data", matrix = "correlation", numeric = "eigenvalues", stop("Not a data.frame, a matrix, or a numeric vector") ) switch (res, data = if (dim(x)[2] <= 2) stop("At least 3 variables must be supplied"), correlation = if (dim(x)[2] <= 2) stop("At least 3 variables must be supplied"), eigenvalues = if (length(x) <= 2) stop("A vector of 3 eigenvalues or more must be supplied") ) if (res == "correlation") if (any(x[lower.tri(x)] != t(x)[lower.tri(t(x))])) { stop("A correlation/covariance matrix must be symetric, empirical data must come from a data.frame, or eigenvalues must directly come from a vector. Verify the documentation about the eigenFrom function.") } invisible(res) } nFactors/R/nSeScree.r0000644000176200001440000001145615017050724014134 0ustar liggesusers#' Standard Error Scree and Coefficient of Determination Procedures to #' Determine the Number of Components/Factors #' #' This function computes the \emph{seScree} (\eqn{S_{Y \bullet X}}) indices #' (Zoski and Jurs, 1996) and the coefficient of determination indices of #' Nelson (2005) \eqn{R^2} for determining the number of components/factors to #' retain. #' #' The Zoski and Jurs \eqn{S_{Y \bullet X}} index is the standard error of the #' estimate (predicted) eigenvalues by the regression from the \eqn{(k+1, #' \ldots, p)} subsequent ranks of the eigenvalues. The standard error is #' computed as: #' #' (1) \eqn{\qquad \qquad S_{Y \bullet X} = \sqrt{ \frac{(\lambda_k - #' \hat{\lambda}_k)^2} {p-2} } } \cr #' #' A value of \eqn{1/p} is choosen as the criteria to determine the number of #' components or factors to retain, \emph{p} corresponding to the number of #' variables. #' #' The Nelson \eqn{R^2} index is simply the multiple regresion coefficient of #' determination for the \eqn{k+1, \ldots, p} eigenvalues. Note that Nelson #' didn't give formal prescriptions for the criteria for this index. He only #' suggested that a value of 0.75 or more must be considered. More is to be #' done to explore adequate values. #' #' @param x numeric: eigenvalues. #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param details logical: if \code{TRUE} also returns details about the #' computation for each eigenvalue. #' @param r2limen numeric: criterion value retained for the coefficient of #' determination indices. #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} and \code{cor} or \code{cov} functions #' @return \item{nFactors}{ numeric: number of components/factors retained by #' the seScree procedure. } \item{details}{ numeric: matrix of the details for #' each index.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' #' @references #' Nasser, F. (2002). The performance of regression-based #' variations of the visual scree for determining the number of common factors. #' \emph{Educational and Psychological Measurement, 62(3)}, 397-419. #' #' Nelson, L. R. (2005). Some observations on the scree test, and on #' coefficient alpha. \emph{Thai Journal of Educational Research and #' Measurement, 3(1)}, 1-17. #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the #' number of factors to retain in factor analysis. \emph{Multiple Linear #' Regression Viewpoints, 20}(1), 5-9. #' #' Zoski, K. and Jurs, S. (1996). An objective counterpart to the visuel scree #' test for factor analysis: the standard error scree. \emph{Educational and #' Psychological Measurement, 56}(3), 443-451. #' @export #' @importFrom stats sd lm #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## SIMPLE EXAMPLE OF SESCREE AND R2 ANALYSIS #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nSeScree(eig) #' results #' #' plotuScree(eig, main=paste(results$nFactors[1], " or ", results$nFactors[2], #' " factors retained by the sescree and R2 procedures", #' sep="")) #' } #' } nSeScree <- function(x, cor=TRUE, model="components", details=TRUE, r2limen=0.75, ...) { x <- eigenComputes(x, cor=cor, model=model, ...) detail <- NULL n <- length(x) criteria <- 1/n seScreeCriteria <- R2Criteria <- 0 if (n < 3) stop("The number of variables must be at least 3.") i <- 1 seScree <- R2 <- numeric(n-3) while ((i) <= (n-2)) { xa <- c(i:n) ya <- x[i:n] ma <- stats::lm(ya ~ xa) seScree[i] <- stats::sd(ya)*sqrt((1-summary(ma)$r.squared) * ((length(ya)-1)/(length(ya)-2))) # Howell(2008, p. 253) seScreeCriteria <- seScreeCriteria + as.numeric(seScree[i] > criteria) R2[i] <- summary(ma)$r.squared R2Criteria <- R2Criteria + as.numeric(R2[i] < r2limen) i <- i + 1 } if (details == TRUE) detail <- data.frame(v=(1:(n-2)),values=x[1:(n-2)], seScree, R2) seScree <- seScreeCriteria R2 <- R2Criteria res <- list(detail=detail, nFactors=c(se=seScree, R2=R2)) class(res) <- c("nFactors","list") return(res) } nFactors/R/nScree.R0000644000176200001440000002535715017050546013613 0ustar liggesusers#' Non Graphical Cattel's Scree Test #' #' The \code{nScree} function returns an analysis of the number of component or #' factors to retain in an exploratory principal component or factor analysis. #' The function also returns information about the number of components/factors #' to retain with the Kaiser rule and the parallel analysis. #' #' The \code{nScree} function returns an analysis of the number of #' components/factors to retain in an exploratory principal component or factor #' analysis. Different solutions are given. The classical ones are the Kaiser #' rule, the parallel analysis, and the usual scree test #' (\code{\link{plotuScree}}). Non graphical solutions to the Cattell #' subjective scree test are also proposed: an acceleration factor (\emph{af}) #' and the optimal coordinates index \emph{oc}. The acceleration factor #' indicates where the elbow of the scree plot appears. It corresponds to the #' acceleration of the curve, i.e. the second derivative. The optimal #' coordinates are the extrapolated coordinates of the previous eigenvalue that #' allow the observed eigenvalue to go beyond this extrapolation. The #' extrapolation is made by a linear regression using the last eigenvalue #' coordinates and the \eqn{k+1} eigenvalue coordinates. There are \eqn{k-2} #' regression lines like this. The Kaiser rule or a parallel analysis #' criterion (\code{\link{parallel}}) must also be simultaneously satisfied to #' retain the components/factors, whether for the acceleration factor, or for #' the optimal coordinates. #' #' If \eqn{\lambda_i} is the \eqn{i^{th}} eigenvalue, and \eqn{LS_i} is a #' location statistics like the mean or a centile (generally the followings: #' \eqn{1^{st}, \ 5^{th}, \ 95^{th}, \ or \ 99^{th}}). #' #' The Kaiser rule is computed as: \deqn{ n_{Kaiser} = \sum_{i} (\lambda_{i} #' \ge \bar{\lambda}).} Note that \eqn{\bar{\lambda}} is equal to 1 when a #' correlation matrix is used. #' #' The parallel analysis is computed as: \deqn{n_{parallel} = \sum_{i} #' (\lambda_{i} \ge LS_i).} #' #' The acceleration factor (\eqn{AF}) corresponds to a numerical solution to #' the elbow of the scree plot: \deqn{n_{AF} \equiv \ If \ \left[ (\lambda_{i} #' \ge LS_i) \ and \ max(AF_i) \right].} #' #' The optimal coordinates (\eqn{OC}) corresponds to an extrapolation of the #' preceeding eigenvalue by a regression line between the eigenvalue #' coordinates and the last eigenvalue coordinates: \deqn{n_{OC} = \sum_i #' \left[(\lambda_i \ge LS_i) \cap (\lambda_i \ge (\lambda_{i \ predicted}) #' \right].} #' #' #' @param eig depreciated parameter (use x instead): eigenvalues to analyse #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param aparallel numeric: results of a parallel analysis. Defaults #' eigenvalues fixed at \eqn{\lambda >= \bar{\lambda}} (Kaiser and related #' rule) or \eqn{\lambda >= 0} (CFA analysis) #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param criteria numeric: by default fixed at \eqn{\bar{\lambda}}. When the #' \eqn{\lambda}s are computed from a principal component analysis on a #' correlation matrix, it corresponds to the usual Kaiser \eqn{\lambda >= 1} #' rule. On a covariance matrix or from a factor analysis, it is simply the #' mean. To apply \eqn{\lambda >= 0}, sometimes used with factor analysis, fix #' the criteria to \eqn{0}. #' @param ... variabe: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' #' #' @return #' \item{Components }{ Data frame for the number of components/factors #' according to different rules } \item{Components$noc }{ Number of #' components/factors to retain according to optimal coordinates \emph{oc}} #' \item{Components$naf }{ Number of components/factors to retain according to #' the acceleration factor \emph{af}} \item{Components$npar.analysis }{Number #' of components/factors to retain according to parallel analysis } #' \item{Components$nkaiser }{ Number of components/factors to retain according #' to the Kaiser rule } \item{Analysis }{ Data frame of vectors linked to the #' different rules } \item{Analysis$Eigenvalues }{ Eigenvalues } #' \item{Analysis$Prop }{ Proportion of variance accounted by eigenvalues } #' \item{Analysis$Cumu }{ Cumulative proportion of variance accounted by #' eigenvalues } \item{Analysis$Par.Analysis }{ Centiles of the random #' eigenvalues generated by the parallel analysis. } \item{Analysis$Pred.eig }{ #' Predicted eigenvalues by each optimal coordinate regression line } #' \item{Analysis$OC}{ Critical optimal coordinates \emph{oc}} #' \item{Analysis$Acc.factor }{ Acceleration factor \emph{af}} #' \item{Analysis$AF}{ Critical acceleration factor \emph{af}} Otherwise, #' returns a summary of the analysis. #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' #' @seealso \code{\link{plotuScree}}, \code{\link{plotnScree}}, #' \code{\link{parallel}}, \code{\link{plotParallel}}, #' #' @references #' Cattell, R. B. (1966). The scree test for the number of factors. #' \emph{Multivariate Behavioral Research, 1}, 245-276. #' #' Dinno, A. (2009). \emph{Gently clarifying the application of Horn's parallel #' analysis to principal component analysis versus factor analysis}. Portland, #' Oregon: Portland Sate University. #' #' Guttman, L. (1954). Some necessary conditions for common factor analysis. #' \emph{Psychometrika, 19, 149-162}. #' #' Horn, J. L. (1965). A rationale for the number of factors in factor #' analysis. \emph{Psychometrika, 30}, 179-185. #' #' Kaiser, H. F. (1960). The application of electronic computer to factor #' analysis. \emph{Educational and Psychological Measurement, 20}, 141-151. #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' # #' @family nScree #' @export #' @importFrom stats lm coef #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## INITIALISATION #' data(dFactors) # Load the nFactors dataset #' attach(dFactors) #' vect <- Raiche # Uses the example from Raiche #' eigenvalues <- vect$eigenvalues # Extracts the observed eigenvalues #' nsubjects <- vect$nsubjects # Extracts the number of subjects #' variables <- length(eigenvalues) # Computes the number of variables #' rep <- 100 # Number of replications for PA analysis #' cent <- 0.95 # Centile value of PA analysis #' #' ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the #' ## mean criterion) #' aparallel <- parallel(var = variables, #' subject = nsubjects, #' rep = rep, #' cent = cent #' )$eigen$qevpea # The 95 centile #' #' ## NUMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES #' results <- nScree(x=eigenvalues, aparallel=aparallel) #' results #' summary(results) #' #' ## PLOT ACCORDING TO THE nScree CLASS #' plotnScree(results) #' } #' } "nScree" <- function(eig=NULL, x=eig, aparallel = NULL, cor=TRUE, model="components", criteria=NULL, ...) { # Initialisation eig <- eigenComputes(x, cor=cor, model=model, ...) if (is.null(aparallel)) aparallel <- rep(1,length(eig)) # default to 1 in the diagonal nk <- length(eig) k <- 1:nk proportion <- eig/sum(eig) cumulative <- proportion if (is.null(criteria)) criteria <- mean(eig) for (i in 2:nk) cumulative[i] = cumulative[i-1] + proportion[i] proportion[proportion < 0] <- 0# To constraint negative proportions to be zero cond1 <- TRUE; cond2 <- TRUE; i <- 0; pred.eig <- af <- rep(NA,nk) while ((cond1 == TRUE) && (cond2 == TRUE) && (i < nk)) { i <- i + 1 ind <- k[c(i+1,nk)] #### Optimal coordinate based on the next eigenvalue regression (scree) vp.p <- stats::lm(eig[c(i+1,nk)] ~ ind) vp.prec <- pred.eig[i] <- sum(c(1,i)* stats::coef(vp.p)) cond1 <- (eig[i] >= vp.prec) cond2 <- (eig[i] >= aparallel[i]) nc <- i-1 } # Second derivative at the i eigenvalue (acceleration factor, elbow) # See Yakowitz and Szidarovszky (1986, p. 84) tag <- 1 for (j in 2:(nk-1)) { if (eig[j-1] >= aparallel[j-1]) { af[j] <- (eig[j+1] -2* eig[j]) + eig[j-1] } } if (model == "components") p.vec <- which(eig >= aparallel,TRUE) else p.vec <- which((eig-aparallel)>=0 & eig >= criteria) ###if (model == "components") p.vec <- which(eig >= aparallel,TRUE) else p.vec <- which((eig-aparallel)>=0 & eig > 0) npar <- sum(p.vec == (1:length(p.vec))) nkaiser <- sum(eig >= rep(criteria,nk)) #### if (model == "components") nkaiser <- sum(eig >= rep(criteria,nk)) else nkaiser <- sum(eig >= rep(0,nk)) #### if (model == "components") nkaiser <- sum(eig >= rep(1,nk)) else nkaiser <- sum(eig >= rep(mean(eig),nk)) naf <- which(af == max(af,na.rm=TRUE),TRUE) - 1 # Assure that all the optimal coordinates will be computed for (i in (nc+1):(nk-2)) { ind <- k[c(i+1,nk)] vp.p <- stats::lm(eig[c(i+1,nk)] ~ ind) vp.prec <- pred.eig[i] <- sum(c(1,i)* stats::coef(vp.p)) } # Assure that all the acceleration factors will be computed for (j in 2:(nk-1)) af[j] <- (eig[j+1] - 2 * eig[j]) + eig[j-1] # Return values by the function coc <- rep("",nk); coc[nc] = "(< OC)" caf <- rep("",nk); caf[naf] = "(< AF)" result <- (list(Components = data.frame(noc = nc, naf = naf, nparallel = npar, nkaiser = nkaiser), Analysis = data.frame(Eigenvalues = eig, Prop = proportion, Cumu = cumulative, Par.Analysis = aparallel, Pred.eig = pred.eig, OC = coc, Acc.factor = af, AF = caf), Model = model)) class(result) <- 'nScree' return(result) } nFactors/R/parallel.R0000644000176200001440000001404015017051156014151 0ustar liggesusers#' Parallel Analysis of a Correlation or Covariance Matrix #' #' This function gives the distribution of the eigenvalues of correlation or a #' covariance matrices of random uncorrelated standardized normal variables. #' The mean and a selected quantile of this distribution are returned. #' #' Note that if the decision is based on a quantile value rather than on the #' mean, care must be taken with the number of replications (\code{rep}). In #' fact, the smaller the quantile (\code{cent}), the bigger the number of #' necessary replications. #' #' @param subject numeric: nmber of subjects (default is 100) #' @param var numeric: number of variables (default is 10) #' @param rep numeric: number of replications of the correlation matrix #' (default is 100) #' @param cent depreciated numeric (use quantile instead): quantile of the #' distribution on which the decision is made (default is 0.05) #' @param quantile numeric: quantile of the distribution on which the decision #' is made (default is 0.05) #' @param model character: \code{"components"} or \code{"factors"} #' @param sd numeric: vector of standard deviations of the simulated variables #' (for a parallel analysis on a covariance matrix) #' @param ... variable: other parameters for the \code{"mvrnorm"}, \code{corr} #' or \code{cov} functions #' @return \item{eigen}{ Data frame consisting of the mean and the quantile of #' the eigenvalues distribution } \item{eigen$mevpea}{ Mean of the eigenvalues #' distribution} \item{eigen$sevpea}{ Standard deviation of the eigenvalues #' distribution} \item{eigen$qevpea}{ quantile of the eigenvalues distribution} #' \item{eigen$sqevpea}{ Standard error of the quantile of the eigenvalues #' distribution} \item{subject}{ Number of subjects} \item{variables}{ Number #' of variables} \item{centile}{ Selected quantile} Otherwise, returns a #' summary of the parallel analysis. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @references Drasgow, F. and Lissak, R. (1983) Modified parallel analysis: a #' procedure for examining the latent dimensionality of dichotomously scored #' item responses. \emph{Journal of Applied Psychology, 68}(3), 363-373. #' #' Hoyle, R. H. and Duvall, J. L. (2004). Determining the number of factors in #' exploratory and confirmatory factor analysis. In D. Kaplan (Ed.): \emph{The #' Sage handbook of quantitative methodology for the social sciences}. Thousand #' Oaks, CA: Sage. #' #' Horn, J. L. (1965). A rationale and test of the number of factors in factor #' analysis. \emph{Psychometrika, 30}, 179-185. #' @export #' @importFrom MASS ginv mvrnorm #' @importFrom stats cov dnorm qnorm #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## SIMPLE EXAMPLE OF A PARALLEL ANALYSIS #' ## OF A CORRELATION MATRIX WITH ITS PLOT #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' subject <- dFactors$Raiche$nsubjects #' var <- length(eig) #' rep <- 100 #' quantile <- 0.95 #' results <- parallel(subject, var, rep, quantile) #' #' results #' #' ## IF THE DECISION IS BASED ON THE CENTILE USE qevpea INSTEAD #' ## OF mevpea ON THE FIRST LINE OF THE FOLLOWING CALL #' plotuScree(x = eig, #' main = "Parallel Analysis" #' ) #' #' lines(1:var, #' results$eigen$qevpea, #' type="b", #' col="green" #' ) #' #' #' ## ANOTHER SOLUTION IS SIMPLY TO #' plotParallel(results) #' } #' } "parallel" <- function(subject=100, var=10, rep=100, cent=0.05, quantile=cent, model="components", sd=diag(1,var), ...) { r <- subject c <- var y <- matrix(c(1:r*c), nrow=r, ncol=c) ycor <- matrix(c(1:c*c), nrow=c, ncol=c) evpea <- NULL leg.txt <- "Pearson" # Simulation of k samples to obtain k random eigenvalues vectors # for Pearson correlation coefficients for (k in c(1:rep)) { # y <- rnorm(y, sd=sqrt(mean(diag(sd)))) # Old version without covariance # y <- matrix(y, nrow=r, ncol=c) # Old version without covariance y <- MASS::mvrnorm(n = r, mu=rep(0,var), Sigma=sd, empirical=FALSE) corY <- stats::cov(y, ...) # The previous version was only cor(y) if (model == "components") diag(corY) <- diag(sd) # To constraint the diagonal to sd for PCA if (model == "factors") corY <- corY - MASS::ginv(diag(diag(MASS::ginv(corY)))) # To constraint the diagonal to communalities for FCA evpea <- rbind(evpea, eigen(corY)[[1]]) } # Temporay function to compute the standard error of a quantile SEcentile <- function(sd, n = 100, p = 0.95) {return(sd/sqrt(n) * sqrt(p*(1-p))/stats::dnorm(stats::qnorm(p))) } # Summary statistics sprob <- c(cent) mevpea <- sapply(as.data.frame(evpea), mean) # Eigenvalues means sevpea <- sapply(as.data.frame(evpea), sd ) # Eigenvalues Standard deviations qevpea <- moreStats(evpea, quantile=quantile)[3,] # Would be more in line with version 2.3 #quant <- function(x, sprobs = sprobs) {return(as.vector(quantile(x, probs = sprob))) } #qevpea <- sapply(as.data.frame(evpea), quant) # Eigenvalues centiles sqevpea <- sevpea sqevpea <- sapply(as.data.frame(sqevpea), SEcentile, n = rep, p = cent) # Standard error of the centiles # List of results return result <- list(eigen = data.frame(mevpea, sevpea, qevpea, sqevpea), subject = r, variables = c, centile = cent ) class(result) <- 'parallel' # For future use return(result) } nFactors/R/iterativePrincipalAxis.r0000644000176200001440000001243015017047362017105 0ustar liggesusers#' Iterative Principal Axis Analysis #' #' The \code{iterativePrincipalAxis} function returns a principal axis analysis with #' iterated communality estimates. Four different choices of initial communality #' estimates are given: maximum correlation, multiple correlation (usual and #' generalized inverse) or estimates based #' on the sum of the squared principal component analysis loadings. Generally, statistical #' packages initialize the communalities at the multiple correlation value. #' Unfortunately, this strategy cannot always deal with singular correlation or #' covariance matrices. #' If a generalized inverse, the maximum correlation or the estimated communalities #' based on the sum of loadings #' are used instead, then a solution can be computed. #' #' #' @param R numeric: correlation or covariance matrix #' @param nFactors numeric: number of factors to retain #' @param communalities character: initial values for communalities (\code{"component", "maxr", "ginv" or "multiple"}) #' @param iterations numeric: maximum number of iterations to obtain a solution #' @param tolerance numeric: minimal difference in the estimated communalities after a given iteration #' #' @return values numeric: variance of each component #' @return varExplained numeric: variance explained by each component #' @return varExplained numeric: cumulative variance explained by each component #' @return loadings numeric: loadings of each variable on each component #' @return iterations numeric: maximum number of iterations to obtain a solution #' @return tolerance numeric: minimal difference in the estimated communalities after a given iteration #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' #' @references #' Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. What it #' is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical methods and #' practical issues}. Beverly Hills, CA: Sage. #' #' @export #' @importFrom MASS ginv #' @keywords multivariate #' @seealso \code{\link{componentAxis}}, \code{\link{principalAxis}}, \code{\link{rRecovery}} #' #' @examples #' \dontrun{ #' if(interactive()){ #' ## ................................................ #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Principal axis factoring with iterated communalities #' # Kim and Mueller (1978, p. 23) #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' nFactors <- 2 #' fComponent <- iterativePrincipalAxis(RU, nFactors=nFactors, #' communalities="component") #' fComponent #' rRecovery(RU,fComponent$loadings, diagCommunalities=FALSE) #' #' fMaxr <- iterativePrincipalAxis(RU, nFactors=nFactors, #' communalities="maxr") #' fMaxr #' rRecovery(RU,fMaxr$loadings, diagCommunalities=FALSE) #' #' fMultiple <- iterativePrincipalAxis(RU, nFactors=nFactors, #' communalities="multiple") #' fMultiple #' rRecovery(RU,fMultiple$loadings, diagCommunalities=FALSE) #' # ....................................................... #' } #' } #' iterativePrincipalAxis <- function(R, nFactors=2, communalities="component", iterations=20, tolerance=0.001) { if (communalities == "component") diag(R) <- componentAxis(R)$communalities if (communalities == "maxr") { RT <- R; diag(RT) <- 0; diag(R) <- apply(RT, 1, max)} if (communalities == "ginv") diag(R) <- sqrt(1-1/diag(MASS::ginv(R))) if (communalities == "multiple") { if (all(eigen(R)$values > 0)) diag(R) <- sqrt(1-1/diag(solve(R))) # Gorsuch (1983, p. 106) else return("Not all eigenvalues are grater than 0") # Verication of positive definiteness } iter <- 1; tol <- 1 while ((iter < iterations) && (tol > tolerance)) { # for (i in (1:iterations)) oldR <- diag(R) diag(R) <- componentAxis(R, nFactors)$communalities tol <- max(abs(diag(R) - oldR)) iter <- iter + 1 } if (tol > tolerance) warning("Maximum number of iterations needed before the desired tolerance: cautious solution.") iapa <- componentAxis(R, nFactors) iapa <- list(values = iapa$values, varExplained = iapa$varExplained, cumVarExplained = iapa$cumVarExplained, loadings = iapa$loadings, iterations = iter, tolerance = tol) return(iapa) } nFactors/R/nMreg.r0000644000176200001440000001101415017050376013466 0ustar liggesusers#' Multiple Regression Procedure to Determine the Number of Components/Factors #' #' This function computes the \eqn{\beta} indices, like their associated #' Student \emph{t} and probability (Zoski and Jurs, 1993, 1996, p. 445). These #' three values can be used as three different indices for determining the #' number of components/factors to retain. #' #' When the associated Student \emph{t} test is applied, the following #' hypothesis is considered: \cr #' #' (1) \eqn{\qquad \qquad H_k: \beta (\lambda_1 \ldots \lambda_k) - \beta #' (\lambda_{k+1} \ldots \lambda_p), (k = 3, \ldots, p-3) = 0} \cr #' #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data (eigenFrom) #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param details logical: if \code{TRUE} also returns details about the #' computation for each eigenvalue. #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} and \code{cor} or \code{cov} functions #' @return \item{nFactors}{ numeric: number of components/factors retained by #' the \emph{MREG} procedures. } \item{details}{ numeric: matrix of the details #' for each indices.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @references Zoski, K. and Jurs, S. (1993). Using multiple regression to #' determine the number of factors to retain in factor analysis. \emph{Multiple #' Linear Regression Viewpoints, 20}(1), 5-9. #' #' Zoski, K. and Jurs, S. (1996). An objective counterpart to the visual scree #' test for factor analysis: the standard error scree test. \emph{Educational #' and Psychological Measurement, 56}(3), 443-451. #' @export #' @importFrom stats sd lm pt #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## SIMPLE EXAMPLE OF A MREG ANALYSIS #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nMreg(eig) #' results #' #' plotuScree(eig, main=paste(results$nFactors[1], ", ", #' results$nFactors[2], " or ", #' results$nFactors[3], #' " factors retained by the MREG procedures", #' sep="")) #' } #' } nMreg <- function(x, cor=TRUE, model="components", details=TRUE, ...) { x <- eigenComputes(x, cor=cor, model=model, ...) nlength <- 3 detail <- NULL n <- length(x) if (n < 6) stop("The number of variables must be at least 6.") i <- 1 mreg <- tmreg <- tmreg2 <-pmreg <- numeric(n-5) while (i <= (length(x)-5)) { xa <- c(1:(i+2)) ya <- x[1:(i+2)] ma <- stats::lm(ya ~ xa) Syx.a <- stats::sd(ya)*sqrt((1-summary(ma)$r.squared) * ((length(ya)-1)/(length(ya)-2))) # Howell(2008, p. 253) compa <- ma$coef[2] seCompa <- summary(ma)$coef[2,2] xb <- c((i+1+nlength):length(x)) yb <- x[(i+1+nlength):length(x)] mb <- stats::lm(yb ~ xb) Syx.b <- stats::sd(yb)*sqrt((1-summary(mb)$r.squared) * ((length(yb)-1)/(length(yb)-2))) # Howell(2008, p. 253) compb <- mb$coef[2] seCompb <- summary(mb)$coef[2,2] mreg[i] <- compb - compa semreg <- sqrt((Syx.a^2)/((length(xa)-1)*stats::sd(xa)^2) + (Syx.b^2)/((length(xb)-1)*stats::sd(xb)^2)) # Se_dif_b -> Howell(2008, p. 259, 266) tmreg[i] <- (compb - compa)/(semreg) tmreg2[i] <- (mreg[i])/sqrt(seCompa^2 + seCompb^2) # Il semble, selon moi, qu'il y aurait une erreur dans la formule de Zoski et Just. Et ce serait la bonne formul, comme celle plu shaut, mais plus rapide de calcul. pmreg[i] <- stats::pt(tmreg[i],(length(xa)-1) + (length(xb)-1) - 4, lower.tail=FALSE, log.p=TRUE) i <- i + 1 } if (details == TRUE) detail <- data.frame(v=(1:(n-5)),values=x[1:(n-5)], mreg=mreg, tmreg=tmreg, pmreg=pmreg) mreg <- as.numeric(which(mreg ==max( mreg, na.rm=TRUE)) + nlength) tmreg <- as.numeric(which(tmreg==max(tmreg, na.rm=TRUE))) pmreg <- as.numeric(which(pmreg==min(pmreg, na.rm=TRUE))) res <- list(detail=detail, nFactors=c(b=mreg,t.p=tmreg,p.b=pmreg)) class(res) <- c("nFactors","list") return(res) } nFactors/R/plotuScree.R0000644000176200001440000000422415017056046014510 0ustar liggesusers#' Plot of the Usual Cattell's Scree Test #' #' \code{uScree} plot a usual scree test of the eigenvalues of a correlation #' matrix. #' #' #' @param Eigenvalue depreciated parameter: eigenvalues to analyse (not used if #' x is used, recommended) #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param model character: \code{"components"} or \code{"factors"} #' @param main character: title of the plot (default is \code{Scree Plot}) #' @param xlab character: label of the x axis (default is \code{Component}) #' @param ylab character: label of the y axis (default is \code{Eigenvalue}) #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} function #' @return Nothing returned with this function. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{nScree}}, \code{\link{parallel}} #' @references Cattell, R. B. (1966). The scree test for the number of factors. #' \emph{Multivariate Behavioral Research, 1}, 245-276. #' @export #' @importFrom graphics lines par text plot.default #' @importFrom stats cor #' @keywords Graphics #' @examples #' \dontrun{ #' if(interactive()){ #' ## SCREE PLOT #' data(dFactors) #' attach(dFactors) #' eig = Cliff1$eigenvalues #' plotuScree(x=eig) #' } #' } "plotuScree" <- function(Eigenvalue, x=Eigenvalue, model = "components", ylab = "Eigenvalues", xlab = "Components", main = "Scree Plot" , ...) { Eigenvalue <- eigenComputes(x, ...) if (!inherits(Eigenvalue, "numeric")) stop("use only with \"numeric\" objects") if (model == "factors") xlab <- "Factors" graphics::par(mfrow = c(1,1)) nk <- length(Eigenvalue) Component <- 1:nk graphics::plot.default(as.numeric(Component), as.numeric(Eigenvalue), type = 'b',col = "black", pch = 1, ylab = ylab, xlab = xlab, main = main ) } nFactors/R/eigenBootParallel.r0000644000176200001440000001100715017046256016012 0ustar liggesusers# PERMETTRE LES CALCULS AVEC DES DONNEES DISCRETES AUSSI #' Bootstrapping of the Eigenvalues From a Data Frame #' #' The \code{eigenBootParallel} function samples observations from a #' \code{data.frame} to produce correlation or covariance matrices from which #' eigenvalues are computed. The function returns statistics about these #' bootstrapped eigenvalues. Their means or their quantile could be used later #' to replace the eigenvalues inputted to a parallel analysis. The #' \code{eigenBootParallel} can also compute random eigenvalues from empirical #' data by column permutation (Buja and Eyuboglu, 1992). #' #' #' @param x data.frame: data from which a correlation matrix will be obtained #' @param quantile numeric: eigenvalues quantile to be reported #' @param nboot numeric: number of bootstrap samples #' @param option character: \code{"permutation"} or \code{"bootstrap"} #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix (\code{eigenComputes}) #' @param model character: bootstraps from a principal component analysis #' (\code{"components"}) or from a factor analysis (\code{"factors"}) #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return \item{values}{ data.frame: mean, median, quantile, standard #' deviation, minimum and maximum of bootstrapped eigenvalues } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{principalComponents}}, #' \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references Buja, A. and Eyuboglu, N. (1992). Remarks on parallel analysis. #' \emph{Multivariate Behavioral Research, 27}(4), 509-540. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for #' determining the number of components to retain. \emph{Psychological #' bulletin, 99}, 432-442. #' @keywords multivariate #' @export #' @importFrom stats cov cor #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #' # Example from the iris data #' eigenvalues <- eigenComputes(x=iris[,-5]) #' #' # Permutation parallel analysis distribution #' aparallel <- eigenBootParallel(x=iris[,-5], quantile=0.95)$quantile #' #' # Number of components to retain #' results <- nScree(x = eigenvalues, aparallel = aparallel) #' results$Components #' plotnScree(results) #' # ...................................................... #' #' # ...................................................... #' # Bootstrap distributions study of the eigenvalues from iris data #' # with different correlation methods #' eigenBootParallel(x=iris[,-5],quantile=0.05, #' option="bootstrap",method="pearson") #' eigenBootParallel(x=iris[,-5],quantile=0.05, #' option="bootstrap",method="spearman") #' eigenBootParallel(x=iris[,-5],quantile=0.05, #' option="bootstrap",method="kendall") #' } #' } #' "eigenBootParallel" <- function(x, quantile=0.95, nboot=30, option="permutation", cor=TRUE, model="components", ...) { if (eigenFrom(x) != "data") stop("Only data from a data.frame must be used as input") x <- data.frame(x) res <- data.frame(matrix(NA, ncol=dim(x)[2], nrow=nboot)) if (model == "components") { names(res) <- paste("C", 1:dim(x)[2], sep="") } else names(res) <- paste("F", 1:dim(x)[2], sep="") if (option == "permutation") { for (i in 1:nboot) { rPerm <- apply(x,2,sample, replace=TRUE) if (cor == TRUE) corY <- stats::cor(rPerm, ...) if (cor == FALSE) corY <- stats::cov(rPerm, ...) if (model == "factors") corY <- corFA(corY, method="ginv") res[i,] <- eigen(corY, only.values=TRUE)$values } } if (option == "bootstrap") { for (i in 1:nboot) { rBoot <- sample(1:dim(x)[1], dim(x)[1], replace=TRUE) if (cor == TRUE) corY <- stats::cor(x[rBoot,], ...) if (cor == FALSE) corY <- stats::cov(x[rBoot,], ...) if (model == "factors") corY <- corFA(corY, method="ginv") res[i,] <- eigen(corY, only.values=TRUE)$values #if (cor == TRUE) res[i,] <- eigen(stats::cor(x[rBoot,], ...), only.values=TRUE)$values #if (cor == FALSE) res[i,] <- eigen(stats::cov(x[rBoot,], ...), only.values=TRUE)$values } } res <- data.frame(t(moreStats(res, quantile=quantile))) return(res) } nFactors/R/structureSimObjectMethods.r0000644000176200001440000001567515017120534017615 0ustar liggesusers#' Utility Functions for nScree Class Objects #' #' Utility functions for \code{structureSim} class objects. Note that with the #' \code{plot.structureSim} a dotted black vertical line shows the median #' number of factors retained by all the different indices. #' @rdname structureSimObjectMethods #' #' @aliases boxplot.structureSim is.structureSim plot.structureSim #' print.structureSim summary.structureSim #' @param eigenSelect numeric: vector of the index of the selected eigenvalues #' @param index numeric: vector of the index of the selected indices #' @param main character: main title #' @param nFactors numeric: if known, number of factors #' @param object structureSim: an object of the class \code{structureSim} #' @param vLine character: color of the vertical indicator line of the initial #' number of factors in the eigen boxplot #' @param x structureSim: an object of the class \code{structureSim} #' @param xlab character: x axis label #' @param ylab character: y axis label #' @param ... variable: additionnal parameters to give to the \code{boxplot}, #' \code{plot}, \code{print} and \code{summary functions.} #' @return Generic functions for the \code{structureSim} class: #' \item{boxplot.structureSim }{ graphic: plots an eigen boxplot } #' \item{is.structureSim}{ logical: is the object of the class #' \code{structureSim}? } \item{plot.structureSim }{ graphic: plots an index #' acuracy plot} \item{print.structureSim }{ numeric: data.frame of statistics #' about the number of components/factors to retain according to different #' indices following a \code{structureSim} simulation} #' \item{summary.structureSim }{ list: two data.frame, the first with the #' details of the simulated eigenvalues, the second with the details of the #' simulated indices} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{nFactors-package}} #' @references #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @export #' @importFrom graphics boxplot abline lines #' @importFrom stats median #' @keywords multivariate #' @examples #' #' \dontrun{ #' if(interactive()){ #' ## INITIALISATION #' library(xtable) #' library(nFactors) #' nFactors <- 3 #' unique <- 0.2 #' loadings <- 0.5 #' nsubjects <- 180 #' repsim <- 10 #' var <- 36 #' pmjc <- 12 #' reppar <- 10 #' zwick <- generateStructure(var=var, mjc=nFactors, pmjc=pmjc, #' loadings=loadings, #' unique=unique) #' #' ## SIMULATIONS #' mzwick <- structureSim(fload=as.matrix(zwick), reppar=reppar, #' repsim=repsim, details=TRUE, #' N=nsubjects, quantile=0.5) #' #' ## TEST OF structureSim METHODS #' is(mzwick) #' summary(mzwick, index=1:5, eigenSelect=1:10, digits=3) #' print(mzwick, index=1:10) #' plot(x=mzwick, index=c(1:10), cex.axis=0.7, col="red") #' graphics::boxplot(x=mzwick, nFactors=3, vLine="blue", col="red") #' } #' } #' ## ................................................................. summary.structureSim <- function(object, index=c(1:15), eigenSelect=NULL, ...) { if (!is.structureSim(object)) stop("Not a structureSim object") if (is.null(eigenSelect)) eigenSelect <- c(1:dim(object$details$eigenvalues)[2]) cat("Report For a structureSim Class \n\n") NextMethod() cat(paste("Simulated eigenvalues","\n\n")) object$details$eigenvalues <- round(object$details$eigenvalues[,eigenSelect], ...) colnames(object$details$eigenvalues) <- paste("E",eigenSelect,sep="") print(object$details$eigenvalues) cat(paste("\n\n Number of factors retained by each index for each simulation","\n\n")) object$details$components <- round(object$details$components[,index], ...) print(object$details$components) } # summary(mzwick, index=1:5, eigenSelect=1:10, digits=2) # summary.structureSim(x) # summary(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. print.structureSim <- function(x, index=NULL, ...) { if (!is.structureSim(x)) stop("Not a structureSim object") if (is.null(index)) index <- c(1:dim(x$nFactors)[2]) res <- x$nFactors[,index] print(res, ...) } # print(mzwick, index=c(1:13), 2) # print.structureSim(x) # print(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. boxplot.structureSim <- function(x, nFactors=NULL, eigenSelect=NULL, vLine="green", xlab="Factors", ylab="Eigenvalues", main="Eigen Box Plot", ...) { if (!is.structureSim(x)) stop("Not a structureSim object") if (is.null(eigenSelect)) eigenSelect <- c(1:dim(x$details$eigenvalues)[2]) graphics::boxplot(x$details$eigenvalues[,eigenSelect], xlab=xlab, ylab=ylab, main=main, ...) graphics::abline(v=nFactors, lty=2, col=vLine) } # boxplot(mzwick, nFactors=3, eigenSelect=1:5, vLine="blue", col="red") # boxplot.structureSim(x) # boxplot(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. plot.structureSim <- function(x, nFactors=NULL, index=NULL, main="Index Acuracy Plot", ...) { if (!is.structureSim(x)) stop("Not a structureSim object") if (is.null(index)) index <- c(1:dim(x$details$components)[2]) if (!exists("col") == TRUE) col <- "black" ylab <- "Average Number of Factors Retained" tx <- t(x[[2]][,index]) tx <- data.frame(Index=rownames(tx),tx) colnames(tx)[2] <- "Mean" tx <- tx[order(tx[,1]),] plot(Mean ~ Index, type="n", data=tx, main=main, ...) #plot(Mean ~ Index, data=tx, cex.lab=1, cex.axis=0.7, type="n", ylab=ylab) graphics::abline(h=nFactors, ...) graphics::abline(h=stats::median(tx[2,], na.rm=TRUE), lty=2, col="black") for (i in 1:length(tx[,2])) graphics::lines(y=c(0,tx[i,2]), x=c(i,i), lty=2) } # plot.structureSim(x=mzwick, nFactors=3, index=c(1:10), cex.axis=0.7, col="red") # plot.structureSim(x) # plot(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. is.structureSim <- function(object) { if (inherits(object, "structureSim")) return(TRUE) else return(FALSE) } # is.structureSim(mzwick) # is.structureSim(x) ## ................................................................. nFactors/R/data.R0000644000176200001440000000763215017053612013276 0ustar liggesusers #' Eigenvalues from classical studies #' #' Classical examples of eigenvalues vectors used to study the number of factors #' to retain in the litterature. These examples generally give the number of #' subjects use to obtain these eigenvalues. #' The number of subjects is used with the parallel analysis. #' #' Other datasets will be added in future versions of the package. #' #' @name dFactors #' @docType data #' #' @format A list of examples. For each example, a list is also used to give the eigenvalues #' vector and the number of subjects. #' \describe{ #' \item{Bentler}{$eigenvalues and $nsubjects} #' \item{Buja}{$eigenvalues and $nsubjects} #' \item{Cliff1}{$eigenvalues and $nsubjects} #' \item{Cliff2}{$eigenvalues and $nsubjects} #' \item{Cliff3}{$eigenvalues and $nsubjects} #' \item{Hand}{$eigenvalues and $nsubjects} #' \item{Harman}{$eigenvalues and $nsubjects} #' \item{Lawley}{$eigenvalues and $nsubjects} #' \item{Raiche}{$eigenvalues and $nsubjects} #' \item{Tucker1}{$eigenvalues and $nsubjects} #' \item{Tucker2}{$eigenvalues and $nsubjects} #' } #' #' @source #' Lawley and Hand dataset: Bartholomew \emph{et al}. (2002, p. 123, 126) #' #' Bentler dataset: Bentler and Yuan (1998, p. 139-140) #' #' Buja datasets: Buja and Eyuboglu (1992, p. 516, 519) < Number of subjects not specified by Buja and Eyuboglu > #' #' Cliff datasets: Cliff (1970, p. 165) #' #' Raiche dataset: Raiche, Langevin, Riopel and Mauffette (2006) #' #' Raiche dataset: Raiche, Riopel and Blais (2006, p. 9) #' #' Tucker datasets: Tucker \emph{et al}. (1969, p. 442) #' #' @references Bartholomew, D. J., Steele, F., Moustaki, I. and Galbraith, J. #' I. (2002). \emph{The analysis and interpretation of multivariate data for #' social scientists}. Boca Raton, FL: Chapman and Hall. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Tests for linear trend in the #' smallest eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), #' 131-144. #' #' Buja, A. and Eyuboglu, N. (1992). Remarks on parallel analysis. #' \emph{Multivariate Behavioral Research, 27}(4), 509-540. #' #' Cliff, N. (1970). The relation between sample and population characteristic #' vectors. \emph{Psychometrika, 35}(2), 163-178. #' #' Hand, D. J., Daly, F., Lunn, A. D., McConway, K. J. and Ostrowski, E. #' (1994). \emph{A handbook of small data sets}. Boca Raton, FL: Chapman and #' Hall. #' #' Lawley, D. N. and Maxwell, A. E. (1971). \emph{Factor analysis as a #' statistical method} (2nd edition). London: Butterworth. #' #' Raiche, G., Langevin, L., Riopel, M. and Mauffette, Y. (2006). Etude #' exploratoire de la dimensionnalite et des facteurs expliques par une #' traduction francaise de l'Inventaire des approches d'enseignement de #' Trigwell et Prosser dans trois universite quebecoises. \emph{Mesure et #' Evaluation en Education, 29}(2), 41-61. #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). #' Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Tucker, L. D., Koopman, R. F. and Linn, R. L. (1969). Evaluation of factor #' analytic research procedures by mean of simulated correlation matrices. #' \emph{Psychometrika, 34}(4), 421-459. #' #' Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the #' number of factors to retain in factor analysis. \emph{Multiple Linear #' Regression Viewpoint, 20}(1), 5-9. #' #' @keywords datasets #' #' @examples #' \dontrun{ #' if(interactive()){ #' # EXAMPLES FROM DATASET #' data(dFactors) #' #' # COMMAND TO VISUALIZE THE CONTENT AND ATTRIBUTES OF THE DATASETS #' names(dFactors) #' attributes(dFactors) #' dFactors$Cliff1$eigenvalues #' dFactors$Cliff1$nsubjects #' #' # SCREE PLOT OF THE Cliff1 DATASET #' plotuScree(dFactors$Cliff1$eigenvalues) #' } #' } "dFactors" nFactors/R/studySim.r0000644000176200001440000001360313620574534014252 0ustar liggesusers#' Simulation Study from Given Factor Structure Matrices and Conditions #' #' The \code{structureSim} function returns statistical results from #' simulations from predefined congeneric factor structures. The main ideas #' come from the methodology applied by Zwick and Velicer (1986). #' #' #' @param var numeric: vector of the number of variables #' @param nFactors numeric: vector of the number of components/factors #' @param pmjc numeric: vector of the number of major loadings on each #' component/factor #' @param loadings numeric: vector of the major loadings on each #' component/factor #' @param unique numeric: vector of the unique loadings on each #' component/factor #' @param N numeric: vector of the number of subjects/observations #' @param repsim numeric: number of replications of the matrix correlation #' simulation #' @param reppar numeric: number of replications for the parallel and #' permutation analysis #' @param stats numeric: vector of the statistics to return: mean(1), #' median(2), sd(3), quantile(4), min(5), max(6) #' @param quantile numeric: quantile for the parallel and permutation analysis #' @param model character: \code{"components"} or \code{"factors"} #' @param r2limen numeric: R2 limen value for the R2 Nelson index #' @param all logical: if \code{TRUE} computes the Bentler and Yuan index (very #' long computing time to consider) #' @param dir character: directory where to save output. Default to NA #' @param trace logical: if \code{TRUE} outputs details of the status of the #' simulations #' @return \item{values}{ Returns selected statistics about the number of #' components/factors to retain: mean, median, quantile, standard deviation, #' minimum and maximum.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{generateStructure}}, \code{\link{structureSim}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules #' for determining the number of components to retain. \emph{Psychological #' Bulletin, 99}, 432-442. #' @export #' @keywords multivariate #' @examples #' #' \dontrun{ #' # .................................................................... #' # Example inspired from Zwick and Velicer (1986) #' # Very long computimg time #' # ................................................................... #' #' # 1. Initialisation #' # reppar <- 30 #' # repsim <- 5 #' # quantile <- 0.50 #' #' # 2. Simulations #' # X <- studySim(var=36,nFactors=3, pmjc=c(6,12), loadings=c(0.5,0.8), #' # unique=c(0,0.2), quantile=quantile, #' # N=c(72,180), repsim=repsim, reppar=reppar, #' # stats=c(1:6)) #' #' # 3. Results (first 10 results) #' # print(X[1:10,1:14],2) #' # names(X) #' #' # 4. Study of the error done in the determination of the number #' # of components/factors. A positive value is associated to over #' # determination. #' # results <- X[X$stats=="mean",] #' # residuals <- results[,c(11:25)] - X$nfactors #' # BY <- c("nsubjects","var","loadings") #' # round(aggregate(residuals, by=results[BY], mean),0) #' } #' studySim <- function(var, nFactors, pmjc, loadings, unique, N, repsim, reppar, stats=1, quantile=0.5, model="components", r2limen=0.75, all=FALSE, dir=NA, trace=TRUE) { nsubjects <- N result <- NULL id <- 0 nid <- length(nFactors) * length(loadings) * length(pmjc) * length(var) * length(unique) * length(nsubjects) for (i in 1:length(nFactors)) { for (j in 1:length(loadings)) { for (l in 1:length(pmjc)) { for (n in 1:length(var)) { for (k in 1:length(unique)) { for (m in 1:length(nsubjects)) { id <- id + 1 kid <- paste(id,"/",nid,sep="") ident <- c(nFactors=nFactors[i], loadings=loadings[j], unique=unique[k], quantile=quantile, pmjc=pmjc[l], nsubjects=nsubjects[m], var=var[n], reppar=reppar, repsim=repsim, id=kid, model=model) if (trace == TRUE) print(ident) fStruct <- generateStructure(var=var[n], mjc=nFactors[i], pmjc=pmjc[l], loadings=loadings[j], unique=unique[k]) fSim <- structureSim(fload=as.matrix(fStruct), reppar=reppar, repsim=repsim, details=FALSE, all=all, N=nsubjects[m], quantile=quantile, model=model, r2limen=r2limen)[[2]][stats,] if (length(stats) == 1) { fSim <- data.frame(var=var[n], nsubjects=nsubjects[m], nfactors=nFactors[i], pmjc=pmjc[l], loadings=loadings[j], unique=unique[k], t(fSim), repsim=repsim, reppar=reppar) } if (length(stats) > 1) { ls <- length(stats) info <- data.frame(stats =rownames(fSim), id =rep(id, ls), var =rep(var[n], ls), nsubjects=rep(nsubjects[m], ls), nfactors=rep(nFactors[i], ls), pmjc =rep(pmjc[l], ls), loadings=rep(loadings[j], ls), unique =rep(unique[k], ls), repsim =rep(repsim, ls), reppar =rep(reppar, ls)) fSim <- data.frame(info, fSim) } result <- rbind(result, fSim) rownames(result) <- 1:dim(result)[1] fString <- paste("RES_", paste(ident,"_", sep="", collapse=""), sep="") # if (!is.na(dir)) save("fSim", file=paste(dirPack, fString,".Rdata", sep="")) # Old erroneous code if (!is.na(dir)) save("fSim", file=paste(dir, fString,".Rdata", sep="")) }}}}}} return(result) } nFactors/R/moreStats.r0000644000176200001440000000410115017047576014406 0ustar liggesusers#' Statistical Summary of a Data Frame #' #' This function produces another summary of a \code{data.frame}. This function #' was proposed in order to apply some functions globally on a \code{data.frame}: #' \code{quantile}, \code{median}, \code{min} and \code{max}. The usual \emph{R} #' version cannot do so. #' #' @param x numeric: matrix or \code{data.frame} #' @param quantile numeric: quantile of the distribution #' @param show logical: if \code{TRUE} prints the quantile choosen #' @return numeric: \code{data.frame} of statistics: mean, median, quantile, standard deviation, minimum and maximum #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @export #' @importFrom stats sd median #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## ................................................ #' ## GENERATION OF A MATRIX OF 100 OBSERVATIONS AND 10 VARIABLES #' x <- matrix(rnorm(1000),ncol=10) #' #' ## STATISTICS #' res <- moreStats(x, quantile=0.05, show=TRUE) #' res #' } #' } moreStats <- function(x, quantile=0.95, show=FALSE) { cent <- quantile # The old parameter was labeled cent x <- data.frame(x) xMean <- sapply(x, mean) # mean(x) xSd <- sapply(x, stats::sd) # sd(x) xMin <- xMax <- xMedian <- xQuantile <- numeric(ncol(x)) for (i in 1:ncol(x)) { xMin[i] <- min(x[,i]) xMax[i] <- max(x[,i]) xMedian[i] <- stats::median(x[,i]) xQuantile[i] <- quantile(x[,i],probs=cent,names=FALSE, na.rm=TRUE) # quantile(rnorm(1000),probs=cent) } names <- colnames(x) results <- rbind(mean=xMean, median=xMedian, quantile=xQuantile, sd=xSd, min=xMin, max=xMax) if (show==TRUE) { cat("------------------------ \n") cat("Quantile specified:", cent, "\n") cat("------------------------ \n") } return(results) } nFactors/R/generateStructure.r0000644000176200001440000001234415017054012016127 0ustar liggesusers#' Generate a Factor Structure Matrix #' #' The \code{generateStructure} function returns a \emph{mjc} factor structure matrix. #' The number of variables per major factor \emph{pmjc} is equal for each factor. #' The argument \emph{pmjc} must be divisible by \emph{nVar}. #' The arguments are strongly inspired from Zick and Velicer (1986, p. 435-436) methodology. #' #' @param var numeric: number of variables #' @param mjc numeric: number of major factors (factors with practical significance) #' @param pmjc numeric: number of variables that load significantly on each major factor #' @param loadings numeric: loadings on the significant variables on each major factor #' @param unique numeric: loadings on the non significant variables on each major factor #' @return values numeric matrix: factor structure #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @export #' @importFrom psych sim.structure #' @seealso \code{\link{principalComponents}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for #' determining the number of components to retain. \emph{Psychological Bulletin, 99}, 432-442. #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' # ....................................................... #'# Example inspired from Zwick and Velicer (1986, table 2, p. 437) #'## ................................................................... #'unique=0.2; loadings=0.5 #' zwick1 <- generateStructure(var=36, mjc=6, pmjc= 6, loadings=loadings, #' unique=unique) #'zwick2 <- generateStructure(var=36, mjc=3, pmjc=12, loadings=loadings, #' unique=unique) #'zwick3 <- generateStructure(var=72, mjc=9, pmjc= 8, loadings=loadings, #' unique=unique) #'zwick4 <- generateStructure(var=72, mjc=6, pmjc=12, loadings=loadings, #' unique=unique) #'sat=0.8 #'## ................................................................... #'zwick5 <- generateStructure(var=36, mjc=6, pmjc= 6, loadings=loadings, #' unique=unique) #'zwick6 <- generateStructure(var=36, mjc=3, pmjc=12, loadings=loadings, #' unique=unique) #'zwick7 <- generateStructure(var=72, mjc=9, pmjc= 8, loadings=loadings, #' unique=unique) #'zwick8 <- generateStructure(var=72, mjc=6, pmjc=12, loadings=loadings, #' unique=unique) #'## ................................................................... #' #'# nsubjects <- c(72, 144, 180, 360) #'# require(psych) #'# Produce an usual correlation matrix from a congeneric model #'nsubjects <- 72 #'mzwick5 <- psych::sim.structure(fx=as.matrix(zwick5), n=nsubjects) #'mzwick5$r #' #'# Factor analysis: recovery of the factor structure #'iterativePrincipalAxis(mzwick5$model, nFactors=6, #' communalities="ginv")$loadings #'iterativePrincipalAxis(mzwick5$r , nFactors=6, #' communalities="ginv")$loadings #'factanal(covmat=mzwick5$model, factors=6) #'factanal(covmat=mzwick5$r , factors=6) #' #'# Number of components to retain #'eigenvalues <- eigen(mzwick5$r)$values #'aparallel <- parallel(var = length(eigenvalues), #' subject = nsubjects, #' rep = 30, #' quantile = 0.95, #' model="components")$eigen$qevpea #'results <- nScree(x = eigenvalues, #' aparallel = aparallel) #'results$Components #'plotnScree(results) #' #'# Number of factors to retain #'eigenvalues.fa <- eigen(corFA(mzwick5$r))$values #'aparallel.fa <- parallel(var = length(eigenvalues.fa), #' subject = nsubjects, #' rep = 30, #' quantile = 0.95, #' model="factors")$eigen$qevpea #'results.fa <- nScree(x = eigenvalues.fa, #' aparallel = aparallel.fa, #' model ="factors") #'results.fa$Components #'plotnScree(results.fa) #'# ...................................................... #' #' } #' } generateStructure <- function(var, mjc, pmjc, loadings, unique) { if (var/mjc != ceiling(var/mjc)) stop("Bad pmjc value") fload <- matrix(unique, ncol=mjc, nrow=var) for (i in 1:mjc) { if (i == 1) fload[i:(pmjc),i] <- loadings min <- ((i-1)*pmjc+1) max <- ((i-1)*pmjc+pmjc) if (min > dim(fload)[1]) min <- dim(fload)[1] if (max > dim(fload)[1]) max <- dim(fload)[1] if (i > 1) fload[min:max,i] <- loadings if (min > dim(fload)[1]) fload[dim(fload)[1],i] <- unique } return(data.frame(fload)) } nFactors/R/corFA.r0000644000176200001440000000320715016662776013431 0ustar liggesusers#' Insert Communalities in the Diagonal of a Correlation or a Covariance Matrix #' #' This function inserts communalities in the diagonal of a correlation/covariance matrix. #' #' @param R An integer matrix or a data.frame of correlations #' @param method A character vector: inversion method #' @return A correlation matrix with coerced variables with communalities in the diagonal. #' @author Gilles Raiche, Universite du Quebec a Montreal (\email{raiche.gilles@@uqam.ca}) #' @export #' @importFrom MASS ginv #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' #' @examples #' \dontrun{ #' if(interactive()){ #' ## LOWER CORRELATION MATRIX WITH ZEROS ON UPPER PART #' ## From Gorsuch (table 1.3.1) #' gorsuch <- c( #' 1,0,0,0,0,0,0,0,0,0, #' .6283, 1,0,0,0,0,0,0,0,0, #' .5631, .7353, 1,0,0,0,0,0,0,0, #' .8689, .7055, .8444, 1,0,0,0,0,0,0, #' .9030, .8626, .6890, .8874, 1,0,0,0,0,0, #' .6908, .9028, .9155, .8841, .8816, 1,0,0,0,0, #' .8633, .7495, .7378, .9164, .9109, .8572, 1,0,0,0, #' .7694, .7902, .7872, .8857, .8835, .8884, .7872, 1,0,0, #' .8945, .7929, .7656, .9494, .9546, .8942, .9434, .9000, 1,0, #' .5615, .6850, .8153, .7004, .6583, .7720, .6201, .6141, .6378, 1) #' #' ## UPPER CORRELATION MATRIX FILLED WITH UPPER CORRELATION MATRIX #' gorsuch <- makeCor(gorsuch) #' #' ## REPLACE DIAGONAL WITH COMMUNALITIES #' gorsuchCfa <- corFA(gorsuch) #' gorsuchCfa #' } #' } #' #' @keywords manip "corFA" <- function(R, method="ginv") { R <- as.matrix(R) if (method == "ginv") return(R - MASS::ginv(diag(diag(MASS::ginv(R))))) } nFactors/R/makeCor.r0000644000176200001440000000324715017054140014001 0ustar liggesusers#' Create a Full Correlation/Covariance Matrix from a Matrix With Lower Part Filled and Upper Part With Zeros #' #' This function creates a full correlation/covariance matrix from a matrix with #' lower part filled and upper part with zeros. #' @param x numeric: matrix #' @return numeric: full correlation matrix #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @export #' @keywords multivariate #' @examples #' \dontrun{ #' if(interactive()){ #' ## ................................................ #'## LOWER CORRELATION MATRIX WITH ZEROS ON UPPER PART #'## From Gorsuch (table 1.3.1) #'gorsuch <- c( #' 1,0,0,0,0,0,0,0,0,0, #' .6283, 1,0,0,0,0,0,0,0,0, #' .5631, .7353, 1,0,0,0,0,0,0,0, #' .8689, .7055, .8444, 1,0,0,0,0,0,0, #' .9030, .8626, .6890, .8874, 1,0,0,0,0,0, #' .6908, .9028, .9155, .8841, .8816, 1,0,0,0,0, #' .8633, .7495, .7378, .9164, .9109, .8572, 1,0,0,0, #' .7694, .7902, .7872, .8857, .8835, .8884, .7872, 1,0,0, #' .8945, .7929, .7656, .9494, .9546, .8942, .9434, .9000, 1,0, #' .5615, .6850, .8153, .7004, .6583, .7720, .6201, .6141, .6378, 1) #' #'## UPPER CORRELATION MATRIX FILLED WITH UPPER CORRELATION MATRIX #'gorsuch <- makeCor(gorsuch) #'gorsuch #' } #' } makeCor <- function(x) { if (is.matrix(x)) stop("x is not a vector.") upper <- matrix(x,ncol=10, byrow=FALSE) diag(upper) <- 0 lower <- matrix(x,ncol=10, byrow=TRUE) res <- lower + upper return(res) } nFactors/NEWS0000644000176200001440000001300215017133236012525 0ustar liggesusers ****************************************************** * * * Changes and Developments in the nFactors Package * * * ****************************************************** ------------------------------------------ - Changes in nFactors 2.4.1.2 (2025-06-01) ------------------------------------------ Use of GPL-3 license. Clean Roxygen documentation. ------------------------------------------ - Changes in nFactors 2.4.0 (2020-03-20) - ------------------------------------------ Use of the function methodss::class1() instead of the function class(). Transition to Roxygen documentation. ------------------------------------------ - Changes in nFactors 2.3.3 (2011-12-16) - ------------------------------------------ Changes done because the functions mean() and sd() in the moreStats() function were depreciated with data.frames. xMean <- sapply(x, mean) # mean(x) before change xSd <- sapply(x, sd) # sd(x) before change ------------------------------------------ - Changes in nFactors 2.3.2 (2010-10-04) - ------------------------------------------ Changes in this version are stricly limited to orthographical correction of the documentation. ------------------------------------------ - Changes in nFactors 2.3.1 (2009-14-10) - ------------------------------------------ o Two bugs were fixed inside the function structureSim. The first one, at line 21, is related to permutation analysis where the quantile paramater of the function call stayed fixed at 0.95 even if the value is different in the call. The variable quantile is now formally use in the call to eigenBootParallel. The second bug was realted to an error in the column names of the data.frame return from structureSim: the per and mean.eig columns were unfortunalely inrerchanged. Version 2.3.1 fixed it. o Bug fixing was the occasion to introduce a new function used for a paper proposed to Behaviormetrika, studySim. But this function is useful for many other simulation settings. ---------------------------------------- - Changes in nFactors 2.3 (2009-15-09) - ---------------------------------------- This version of nFactors is a major upgrade and so presents important additions and modifications. Care was taken to not modified parameters from version 2.2 calls so that functions and packages already requiring nFactors will yet operate correctly in the future. But like with all mojor upgrades, care must be taken and it is recommanded that developpers verify their results. All the future upgrades won't have this potentiel problems. o Many new procedures to determine the number of components or factors to retain are added: permutation and bootstrap parallel analysis, CNG, Bentler and Yuan, Bartlett, Anderson, Lawley, Zosky and Jurs, etc. o Care is taken to uniformise the labelling of new functions and new variables. According to the Java coding practice, with this labelling, the names begin with a small character, and capitals are used inside for added concepts. o It is now possible to do most of the nfactors package analysis on a covariance matrix. o It is now possible to do most of the nfactors package analysis in the CFA context. o The permutation parallel analysis of Buja and Eyuboglu (1992) is added. o It is now possible to bootstrap the eigenvalues from an empirical data matrix. o New heuristic numerical indices are added to determine the number of components/factores to retain: CNG, Zoski and Jurs multiple regression, Joski and Jurs standard error of the scree, and Nelson R. o Likelihood ratio tests are added: Bartlet, Anderson, Lawley, and Bentler and Yuan chi-squared. o The eigenComputes function computes eigenvalues conditional of the class of the object from which data come from: eigenvalues from vector, correlation/covariance matrix, or data from a data.frame. o The eigenFrom function determine the class of the object. o The corFA function is added to insert commulalities in the diagonal of a correlation or a covariance matrix. o The makeCor function creates a full correlation/covariance matrix from a matrix with lower part filled and upper part with zeros. o Functions are added to generate a factor structure (generateStructure) and to simulate data and correlation matrices from a predefined factor structure (structureSim). o A function, moreStats, is added to be computes additionnal statistics on a numeric data.frame. o Utility functions for \code{nScree} class objects werw implemented: is.nScree, plot.nScree, plot.nScree and summary.nScree. ---------------------------------------- - Changes in nFactors 2.2 (2009-02-06) - ---------------------------------------- o Considering the instabillity of the function factanal with ill conditionned correlation matrices, new functions for computing factor analysis are added: componentAxis, iteratePrincipalAxix, principalAxis and principalComponents. o The diagReplace function replace the upper or the lower diagonal of a correlation matrix with the respective lower or lower diagonal. o The rRecovery function is added for a verification of the quality of the recovery of an initial correlation matrix. nFactors/data/0000755000176200001440000000000013636677340012761 5ustar liggesusersnFactors/data/dFactors.rda0000644000176200001440000000203714321003144015171 0ustar liggesusersV]LTG] QRJMh1Qk͙ZQV mH {vݵj@R6MMcHk"Eb JRjW̜uu2{f39|+!$X&$mZ+a"MJs.B̋t;g泀Ѥq[~Zsz#Z#0vEG`M7*Au0+?FC)ŲæBO{UӼжiKjx$bt(sb1V-ݮ:v+n}K,8v " ZHSf/(xF4y77#f 8~q5| G9Z`ǸB)Z7bo{x8^R˘1Q Vvϓ%eZwHË; \$S!+VN[aCM&9x=fUpB0ɢq鏡{嘳QWeazFQw#޿CXφ_֫<Ԕ}fQ{:핋k&3.Q4_(6= ;G<źqB$֡ 8N'QunG }4l4AKEPg0Βm}?^G`ۃvv{BG|ꯍڐw臁awD]dHޡ\(^ԉ/gt.^ؠ:\S]>mnyiqqj%SqNPR,WJ+G)-*Q%= \bar{\lambda}} (Kaiser and related rule) or \eqn{\lambda >= 0} (CFA analysis)} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{criteria}{numeric: by default fixed at \eqn{\bar{\lambda}}. When the \eqn{\lambda}s are computed from a principal component analysis on a correlation matrix, it corresponds to the usual Kaiser \eqn{\lambda >= 1} rule. On a covariance matrix or from a factor analysis, it is simply the mean. To apply \eqn{\lambda >= 0}, sometimes used with factor analysis, fix the criteria to \eqn{0}.} \item{...}{variabe: additionnal parameters to give to the \code{cor} or \code{cov} functions} } \value{ \item{Components }{ Data frame for the number of components/factors according to different rules } \item{Components$noc }{ Number of components/factors to retain according to optimal coordinates \emph{oc}} \item{Components$naf }{ Number of components/factors to retain according to the acceleration factor \emph{af}} \item{Components$npar.analysis }{Number of components/factors to retain according to parallel analysis } \item{Components$nkaiser }{ Number of components/factors to retain according to the Kaiser rule } \item{Analysis }{ Data frame of vectors linked to the different rules } \item{Analysis$Eigenvalues }{ Eigenvalues } \item{Analysis$Prop }{ Proportion of variance accounted by eigenvalues } \item{Analysis$Cumu }{ Cumulative proportion of variance accounted by eigenvalues } \item{Analysis$Par.Analysis }{ Centiles of the random eigenvalues generated by the parallel analysis. } \item{Analysis$Pred.eig }{ Predicted eigenvalues by each optimal coordinate regression line } \item{Analysis$OC}{ Critical optimal coordinates \emph{oc}} \item{Analysis$Acc.factor }{ Acceleration factor \emph{af}} \item{Analysis$AF}{ Critical acceleration factor \emph{af}} Otherwise, returns a summary of the analysis. } \description{ The \code{nScree} function returns an analysis of the number of component or factors to retain in an exploratory principal component or factor analysis. The function also returns information about the number of components/factors to retain with the Kaiser rule and the parallel analysis. } \details{ The \code{nScree} function returns an analysis of the number of components/factors to retain in an exploratory principal component or factor analysis. Different solutions are given. The classical ones are the Kaiser rule, the parallel analysis, and the usual scree test (\code{\link{plotuScree}}). Non graphical solutions to the Cattell subjective scree test are also proposed: an acceleration factor (\emph{af}) and the optimal coordinates index \emph{oc}. The acceleration factor indicates where the elbow of the scree plot appears. It corresponds to the acceleration of the curve, i.e. the second derivative. The optimal coordinates are the extrapolated coordinates of the previous eigenvalue that allow the observed eigenvalue to go beyond this extrapolation. The extrapolation is made by a linear regression using the last eigenvalue coordinates and the \eqn{k+1} eigenvalue coordinates. There are \eqn{k-2} regression lines like this. The Kaiser rule or a parallel analysis criterion (\code{\link{parallel}}) must also be simultaneously satisfied to retain the components/factors, whether for the acceleration factor, or for the optimal coordinates. If \eqn{\lambda_i} is the \eqn{i^{th}} eigenvalue, and \eqn{LS_i} is a location statistics like the mean or a centile (generally the followings: \eqn{1^{st}, \ 5^{th}, \ 95^{th}, \ or \ 99^{th}}). The Kaiser rule is computed as: \deqn{ n_{Kaiser} = \sum_{i} (\lambda_{i} \ge \bar{\lambda}).} Note that \eqn{\bar{\lambda}} is equal to 1 when a correlation matrix is used. The parallel analysis is computed as: \deqn{n_{parallel} = \sum_{i} (\lambda_{i} \ge LS_i).} The acceleration factor (\eqn{AF}) corresponds to a numerical solution to the elbow of the scree plot: \deqn{n_{AF} \equiv \ If \ \left[ (\lambda_{i} \ge LS_i) \ and \ max(AF_i) \right].} The optimal coordinates (\eqn{OC}) corresponds to an extrapolation of the preceeding eigenvalue by a regression line between the eigenvalue coordinates and the last eigenvalue coordinates: \deqn{n_{OC} = \sum_i \left[(\lambda_i \ge LS_i) \cap (\lambda_i \ge (\lambda_{i \ predicted}) \right].} } \examples{ \dontrun{ if(interactive()){ ## INITIALISATION data(dFactors) # Load the nFactors dataset attach(dFactors) vect <- Raiche # Uses the example from Raiche eigenvalues <- vect$eigenvalues # Extracts the observed eigenvalues nsubjects <- vect$nsubjects # Extracts the number of subjects variables <- length(eigenvalues) # Computes the number of variables rep <- 100 # Number of replications for PA analysis cent <- 0.95 # Centile value of PA analysis ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the ## mean criterion) aparallel <- parallel(var = variables, subject = nsubjects, rep = rep, cent = cent )$eigen$qevpea # The 95 centile ## NUMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES results <- nScree(x=eigenvalues, aparallel=aparallel) results summary(results) ## PLOT ACCORDING TO THE nScree CLASS plotnScree(results) } } } \references{ Cattell, R. B. (1966). The scree test for the number of factors. \emph{Multivariate Behavioral Research, 1}, 245-276. Dinno, A. (2009). \emph{Gently clarifying the application of Horn's parallel analysis to principal component analysis versus factor analysis}. Portland, Oregon: Portland Sate University. Guttman, L. (1954). Some necessary conditions for common factor analysis. \emph{Psychometrika, 19, 149-162}. Horn, J. L. (1965). A rationale for the number of factors in factor analysis. \emph{Psychometrika, 30}, 179-185. Kaiser, H. F. (1960). The application of electronic computer to factor analysis. \emph{Educational and Psychological Measurement, 20}, 141-151. Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \seealso{ \code{\link{plotuScree}}, \code{\link{plotnScree}}, \code{\link{parallel}}, \code{\link{plotParallel}}, } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/nScreeObjectMethods.Rd0000644000176200001440000000572615017122040016767 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nScreeObjectMethods.r \name{summary.nScree} \alias{summary.nScree} \alias{print.nScree} \alias{plot.nScree} \alias{is.nScree} \title{Utility Functions for nScree Class Objects} \usage{ \method{summary}{nScree}(object, ...) \method{print}{nScree}(x, ...) \method{plot}{nScree}(x, ...) is.nScree(object) } \arguments{ \item{object}{nScree: an object of the class \code{nScree}} \item{...}{variable: additionnal parameters to give to the \code{print} function with \code{print.nScree}, the \code{plotnScree} with \code{plot.nScree} or to the \code{summary} function with \code{summary.nScree}} \item{x}{Results of a previous \code{nScree} analysis} } \value{ Generic functions for the nScree class: \item{is.nScree}{ logical: is the object of the class \code{nScree}? } \item{plot.nScree }{ graphic: plots a figure according to the \code{plotnScree} function} \item{print.nScree }{ numeric: vector of the number of components/factors to retain: same as the \code{Components} vector from the \code{nScree} object} \item{summary.nScree }{ data.frame: details of the results from a nScree analysis: same as the \code{Analysis} data.frame from the \code{nScree} object, but with easier control of the number of decimals with the \code{digits} parameter} } \description{ Utility functions for \code{nScree} class objects. Some of these functions are already implemented in the \code{nFactors} package, but are easier to use with generic functions like these. } \examples{ \dontrun{ if(interactive()){ ## INITIALISATION data(dFactors) # Load the nFactors dataset attach(dFactors) vect <- Raiche # Use the example from Raiche eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues nsubjects <- vect$nsubjects # Extract the number of subjects variables <- length(eigenvalues) # Compute the number of variables rep <- 100 # Number of replications for the parallel analysis cent <- 0.95 # Centile value of the parallel analysis ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) aparallel <- parallel(var = variables, subject = nsubjects, rep = rep, cent = cent )$eigen$qevpea # The 95 centile ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES results <- nScree(x=eigenvalues, aparallel=aparallel) is.nScree(results) results summary(results) ## PLOT ACCORDING TO THE nScree CLASS plot(results) } } } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/figures/0000755000176200001440000000000013636677340014267 5ustar liggesusersnFactors/man/figures/essai.png.png0000644000176200001440000000347513617350210016654 0ustar liggesusersPNG  IHDRs0"E$iTXtXML:com.adobe.xmp ,sRGB, pHYs&?9IDATx{UU4JZڨ1X6"{X%dQP 23i$Gj=YdLYsctgpp^{[H$D"H$D"H$Ey]ukoʎ<.a39Bl:{ܴ ?%}/8k*5\X"m2GE*QɏKxxp<+KX'Nok8s1 !X"G`l3 KOkb 7$<s_Dj_K"wHB{ȞM#J[цs53qqRavAəccs'm[Fnu)v7)ȗs g;t[4\Z* =x&6F9'J7T;s=5X <˨6\9&rNZOV&%x-p_΍-9Ό*Mv!ݙɺ>;ZYf}y5{ZܳjF>7~fn.D@3tH7m}i6Ƹ3E>FfX1&2~6-יssSwo0@076T(fCw,V{Ã%3s儵 Qd˔X>9,1@x<`6Ze9'ΟM-їA/TL{Q7{zQX2F-Jvܐ\hsp.Pa4R*/慨\qv5FobDj)QsPDUMG8tDc_g +=)D"H$D"H$Db(/Xt"$1IENDB`nFactors/man/eigenComputes.Rd0000644000176200001440000000400215017117254015701 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/eigenComputes.r \name{eigenComputes} \alias{eigenComputes} \title{Computes Eigenvalues According to the Data Type} \usage{ eigenComputes(x, cor = TRUE, model = "components", ...) } \arguments{ \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{...}{variable: additionnal parameters to give to the \code{cor} or \code{cov} functions} } \value{ numeric: return a vector of eigenvalues } \description{ The \code{eigenComputes} function computes eigenvalues from the identified data type. It is used internally in many fonctions of the \pkg{nFactors} package in order to apply these to a vector of eigenvalues, a matrix of correlations or covariance or a data frame. } \examples{ \dontrun{ if(interactive()){ # ....................................................... # Different data types # Vector of eigenvalues data(dFactors) x1 <- dFactors$Cliff1$eigenvalues eigenComputes(x1) # Data from a data.frame x2 <- data.frame(matrix(20*rnorm(100), ncol=5)) eigenComputes(x2, cor=TRUE, use="everything") eigenComputes(x2, cor=FALSE, use="everything") eigenComputes(x2, cor=TRUE, use="everything", method="spearman") eigenComputes(x2, cor=TRUE, use="everything", method="kendall") x3 <- cov(x2) eigenComputes(x3, cor=TRUE, use="everything") eigenComputes(x3, cor=FALSE, use="everything") x4 <- cor(x2) eigenComputes(x4, use="everything") # ....................................................... } } } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege \cr \email{David.Magis@ulg.ac.be} } \keyword{multivariate} nFactors/man/nSeScree.Rd0000644000176200001440000000722315017117254014611 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nSeScree.r \name{nSeScree} \alias{nSeScree} \title{Standard Error Scree and Coefficient of Determination Procedures to Determine the Number of Components/Factors} \usage{ nSeScree( x, cor = TRUE, model = "components", details = TRUE, r2limen = 0.75, ... ) } \arguments{ \item{x}{numeric: eigenvalues.} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{details}{logical: if \code{TRUE} also returns details about the computation for each eigenvalue.} \item{r2limen}{numeric: criterion value retained for the coefficient of determination indices.} \item{...}{variable: additionnal parameters to give to the \code{eigenComputes} and \code{cor} or \code{cov} functions} } \value{ \item{nFactors}{ numeric: number of components/factors retained by the seScree procedure. } \item{details}{ numeric: matrix of the details for each index.} } \description{ This function computes the \emph{seScree} (\eqn{S_{Y \bullet X}}) indices (Zoski and Jurs, 1996) and the coefficient of determination indices of Nelson (2005) \eqn{R^2} for determining the number of components/factors to retain. } \details{ The Zoski and Jurs \eqn{S_{Y \bullet X}} index is the standard error of the estimate (predicted) eigenvalues by the regression from the \eqn{(k+1, \ldots, p)} subsequent ranks of the eigenvalues. The standard error is computed as: (1) \eqn{\qquad \qquad S_{Y \bullet X} = \sqrt{ \frac{(\lambda_k - \hat{\lambda}_k)^2} {p-2} } } \cr A value of \eqn{1/p} is choosen as the criteria to determine the number of components or factors to retain, \emph{p} corresponding to the number of variables. The Nelson \eqn{R^2} index is simply the multiple regresion coefficient of determination for the \eqn{k+1, \ldots, p} eigenvalues. Note that Nelson didn't give formal prescriptions for the criteria for this index. He only suggested that a value of 0.75 or more must be considered. More is to be done to explore adequate values. } \examples{ \dontrun{ if(interactive()){ ## SIMPLE EXAMPLE OF SESCREE AND R2 ANALYSIS data(dFactors) eig <- dFactors$Raiche$eigenvalues results <- nSeScree(eig) results plotuScree(eig, main=paste(results$nFactors[1], " or ", results$nFactors[2], " factors retained by the sescree and R2 procedures", sep="")) } } } \references{ Nasser, F. (2002). The performance of regression-based variations of the visual scree for determining the number of common factors. \emph{Educational and Psychological Measurement, 62(3)}, 397-419. Nelson, L. R. (2005). Some observations on the scree test, and on coefficient alpha. \emph{Thai Journal of Educational Research and Measurement, 3(1)}, 1-17. Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the number of factors to retain in factor analysis. \emph{Multiple Linear Regression Viewpoints, 20}(1), 5-9. Zoski, K. and Jurs, S. (1996). An objective counterpart to the visuel scree test for factor analysis: the standard error scree. \emph{Educational and Psychological Measurement, 56}(3), 443-451. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/iterativePrincipalAxis.Rd0000644000176200001440000000761315017117254017570 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/iterativePrincipalAxis.r \name{iterativePrincipalAxis} \alias{iterativePrincipalAxis} \title{Iterative Principal Axis Analysis} \usage{ iterativePrincipalAxis( R, nFactors = 2, communalities = "component", iterations = 20, tolerance = 0.001 ) } \arguments{ \item{R}{numeric: correlation or covariance matrix} \item{nFactors}{numeric: number of factors to retain} \item{communalities}{character: initial values for communalities (\code{"component", "maxr", "ginv" or "multiple"})} \item{iterations}{numeric: maximum number of iterations to obtain a solution} \item{tolerance}{numeric: minimal difference in the estimated communalities after a given iteration} } \value{ values numeric: variance of each component varExplained numeric: variance explained by each component varExplained numeric: cumulative variance explained by each component loadings numeric: loadings of each variable on each component iterations numeric: maximum number of iterations to obtain a solution tolerance numeric: minimal difference in the estimated communalities after a given iteration } \description{ The \code{iterativePrincipalAxis} function returns a principal axis analysis with iterated communality estimates. Four different choices of initial communality estimates are given: maximum correlation, multiple correlation (usual and generalized inverse) or estimates based on the sum of the squared principal component analysis loadings. Generally, statistical packages initialize the communalities at the multiple correlation value. Unfortunately, this strategy cannot always deal with singular correlation or covariance matrices. If a generalized inverse, the maximum correlation or the estimated communalities based on the sum of loadings are used instead, then a solution can be computed. } \examples{ \dontrun{ if(interactive()){ ## ................................................ # Example from Kim and Mueller (1978, p. 10) # Population: upper diagonal # Simulated sample: lower diagnonal R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, .5600, 1.000, .4749, .2196, .1912, .2979, .4800, .4200, 1.000, .2079, .2010, .2445, .2240, .1960, .1680, 1.000, .4334, .3197, .1920, .1680, .1440, .4200, 1.000, .4207, .1600, .1400, .1200, .3500, .3000, 1.000), nrow=6, byrow=TRUE) # Factor analysis: Principal axis factoring with iterated communalities # Kim and Mueller (1978, p. 23) # Replace upper diagonal with lower diagonal RU <- diagReplace(R, upper=TRUE) nFactors <- 2 fComponent <- iterativePrincipalAxis(RU, nFactors=nFactors, communalities="component") fComponent rRecovery(RU,fComponent$loadings, diagCommunalities=FALSE) fMaxr <- iterativePrincipalAxis(RU, nFactors=nFactors, communalities="maxr") fMaxr rRecovery(RU,fMaxr$loadings, diagCommunalities=FALSE) fMultiple <- iterativePrincipalAxis(RU, nFactors=nFactors, communalities="multiple") fMultiple rRecovery(RU,fMultiple$loadings, diagCommunalities=FALSE) # ....................................................... } } } \references{ Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical methods and practical issues}. Beverly Hills, CA: Sage. } \seealso{ \code{\link{componentAxis}}, \code{\link{principalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege \cr \email{David.Magis@ulg.ac.be} } \keyword{multivariate} nFactors/man/structureSim.Rd0000644000176200001440000000767115017120720015612 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/structureSim.r \name{structureSim} \alias{structureSim} \title{Population or Simulated Sample Correlation Matrix from a Given Factor Structure Matrix} \usage{ structureSim( fload, reppar = 30, repsim = 100, N, quantile = 0.95, model = "components", adequacy = FALSE, details = TRUE, r2limen = 0.75, all = FALSE ) } \arguments{ \item{fload}{matrix: loadings of the factor structure} \item{reppar}{numeric: number of replications for the parallel analysis} \item{repsim}{numeric: number of replications of the matrix correlation simulation} \item{N}{numeric: number of subjects} \item{quantile}{numeric: quantile for the parallel analysis} \item{model}{character: \code{"components"} or \code{"factors"}} \item{adequacy}{logical: if \code{TRUE} prints the recovered population matrix from the factor structure} \item{details}{logical: if \code{TRUE} outputs details of the \code{repsim} simulations} \item{r2limen}{numeric: R2 limen value for the R2 Nelson index} \item{all}{logical: if \code{TRUE} computes the Bentler and Yuan index (very long computing time to consider)} } \value{ \item{values}{ the output depends of the logical value of details. If \code{FALSE}, returns only statistics about the eigenvalues: mean, median, quantile, standard deviation, minimum and maximum. If \code{TRUE}, returns also details about the \code{repsim} simulations. If \code{adequacy} = \code{TRUE} returns the recovered factor structure} } \description{ The \code{structureSim} function returns a population and a sample correlation matrices from a predefined congeneric factor structure. } \examples{ \dontrun{ if(interactive()){ # ....................................................... # Example inspired from Zwick and Velicer (1986, table 2, p. 437) ## ................................................................... nFactors <- 3 unique <- 0.2 loadings <- 0.5 nsubjects <- 180 repsim <- 30 zwick <- generateStructure(var=36, mjc=nFactors, pmjc=12, loadings=loadings, unique=unique) ## ................................................................... # Produce statistics about a replication of a parallel analysis on # 30 sampled correlation matrices mzwick.fa <- structureSim(fload=as.matrix(zwick), reppar=30, repsim=repsim, N=nsubjects, quantile=0.5, model="factors") mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) # Very long execution time that could be used only with model="components" # mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, # repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) par(mfrow=c(2,1)) graphics::plot(x=mzwick, nFactors=nFactors, index=c(1:14), cex.axis=0.7, col="red") graphics::plot(x=mzwick.fa, nFactors=nFactors, index=c(1:11), cex.axis=0.7, col="red") par(mfrow=c(1,1)) par(mfrow=c(2,1)) graphics::boxplot(x=mzwick, nFactors=3, cex.axis=0.8, vLine="blue", col="red") graphics::boxplot(x=mzwick.fa, nFactors=3, cex.axis=0.8, vLine="blue", col="red", xlab="Components") par(mfrow=c(1,1)) # ...................................................... } } } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for determining the number of components to retain. \emph{Psychological Bulletin, 99}, 432-442. } \seealso{ \code{\link{principalComponents}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/plotnScree.Rd0000644000176200001440000000472215017117254015221 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plotnScree.R \name{plotnScree} \alias{plotnScree} \title{Scree Plot According to a nScree Object Class} \usage{ plotnScree( nScree, legend = TRUE, ylab = "Eigenvalues", xlab = "Components", main = "Non Graphical Solutions to Scree Test" ) } \arguments{ \item{nScree}{Results of a previous \code{nScree} analysis} \item{legend}{Logical indicator of the presence or not of a legend} \item{ylab}{Label of the y axis (default to \code{"Eigenvalue"})} \item{xlab}{Label of the x axis (default to \code{"Component"})} \item{main}{Main title (default to \code{"Non Graphical Solutions to the Scree Test"})} } \value{ Nothing returned. } \description{ Plot a scree plot adding information about a non graphical \code{nScree} analysis. } \examples{ \dontrun{ if(interactive()){ ## INITIALISATION data(dFactors) # Load the nFactors dataset attach(dFactors) vect <- Raiche # Use the second example from Buja and Eyuboglu # (1992, p. 519, nsubjects not specified by them) eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues nsubjects <- vect$nsubjects # Extract the number of subjects variables <- length(eigenvalues) # Compute the number of variables rep <- 100 # Number of replications for the parallel analysis cent <- 0.95 # Centile value of the parallel analysis ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) aparallel <- parallel(var = variables, subject = nsubjects, rep = rep, cent = cent)$eigen$qevpea # The 95 centile ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES results <- nScree(eig = eigenvalues, aparallel = aparallel ) results ## PLOT ACCORDING TO THE nScree CLASS plotnScree(results) } } } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotParallel}}, \code{\link{parallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{Graphics} nFactors/man/moreStats.Rd0000644000176200001440000000252015017117254015056 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/moreStats.r \name{moreStats} \alias{moreStats} \title{Statistical Summary of a Data Frame} \usage{ moreStats(x, quantile = 0.95, show = FALSE) } \arguments{ \item{x}{numeric: matrix or \code{data.frame}} \item{quantile}{numeric: quantile of the distribution} \item{show}{logical: if \code{TRUE} prints the quantile choosen} } \value{ numeric: \code{data.frame} of statistics: mean, median, quantile, standard deviation, minimum and maximum } \description{ This function produces another summary of a \code{data.frame}. This function was proposed in order to apply some functions globally on a \code{data.frame}: \code{quantile}, \code{median}, \code{min} and \code{max}. The usual \emph{R} version cannot do so. } \examples{ \dontrun{ if(interactive()){ ## ................................................ ## GENERATION OF A MATRIX OF 100 OBSERVATIONS AND 10 VARIABLES x <- matrix(rnorm(1000),ncol=10) ## STATISTICS res <- moreStats(x, quantile=0.05, show=TRUE) res } } } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/principalComponents.Rd0000644000176200001440000000501115017117254017122 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/principalComponents.r \name{principalComponents} \alias{principalComponents} \title{Principal Component Analysis} \usage{ principalComponents(R) } \arguments{ \item{R}{numeric: correlation or covariance matrix} } \value{ \item{values}{ numeric: variance of each component } \item{varExplained}{ numeric: variance explained by each component } \item{varExplained}{ numeric: cumulative variance explained by each component } \item{loadings}{ numeric: loadings of each variable on each component } } \description{ The \code{principalComponents} function returns a principal component analysis. Other R functions give the same results, but \code{principalComponents} is customized mainly for the other factor analysis functions available in the \pkg{nfactors} package. In order to retain only a small number of components the \code{componentAxis} function has to be used. } \examples{ \dontrun{ if(interactive()){ # ....................................................... # Example from Kim and Mueller (1978, p. 10) # Population: upper diagonal # Simulated sample: lower diagnonal R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, .5600, 1.000, .4749, .2196, .1912, .2979, .4800, .4200, 1.000, .2079, .2010, .2445, .2240, .1960, .1680, 1.000, .4334, .3197, .1920, .1680, .1440, .4200, 1.000, .4207, .1600, .1400, .1200, .3500, .3000, 1.000), nrow=6, byrow=TRUE) # Factor analysis: Principal component - # Kim et Mueller (1978, p. 21) # Replace upper diagonal with lower diagonal RU <- diagReplace(R, upper=TRUE) principalComponents(RU) # Replace lower diagonal with upper diagonal RL <- diagReplace(R, upper=FALSE) principalComponents(RL) # ....................................................... } } } \references{ Joliffe, I. T. (2002). \emph{Principal components analysis} (2th Edition). New York, NJ: Springer-Verlag. Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical methods and practical issues}. Beverly Hills, CA: Sage. } \seealso{ \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/studySim.Rd0000644000176200001440000000701115017117256014720 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/studySim.r \name{studySim} \alias{studySim} \title{Simulation Study from Given Factor Structure Matrices and Conditions} \usage{ studySim( var, nFactors, pmjc, loadings, unique, N, repsim, reppar, stats = 1, quantile = 0.5, model = "components", r2limen = 0.75, all = FALSE, dir = NA, trace = TRUE ) } \arguments{ \item{var}{numeric: vector of the number of variables} \item{nFactors}{numeric: vector of the number of components/factors} \item{pmjc}{numeric: vector of the number of major loadings on each component/factor} \item{loadings}{numeric: vector of the major loadings on each component/factor} \item{unique}{numeric: vector of the unique loadings on each component/factor} \item{N}{numeric: vector of the number of subjects/observations} \item{repsim}{numeric: number of replications of the matrix correlation simulation} \item{reppar}{numeric: number of replications for the parallel and permutation analysis} \item{stats}{numeric: vector of the statistics to return: mean(1), median(2), sd(3), quantile(4), min(5), max(6)} \item{quantile}{numeric: quantile for the parallel and permutation analysis} \item{model}{character: \code{"components"} or \code{"factors"}} \item{r2limen}{numeric: R2 limen value for the R2 Nelson index} \item{all}{logical: if \code{TRUE} computes the Bentler and Yuan index (very long computing time to consider)} \item{dir}{character: directory where to save output. Default to NA} \item{trace}{logical: if \code{TRUE} outputs details of the status of the simulations} } \value{ \item{values}{ Returns selected statistics about the number of components/factors to retain: mean, median, quantile, standard deviation, minimum and maximum.} } \description{ The \code{structureSim} function returns statistical results from simulations from predefined congeneric factor structures. The main ideas come from the methodology applied by Zwick and Velicer (1986). } \examples{ \dontrun{ # .................................................................... # Example inspired from Zwick and Velicer (1986) # Very long computimg time # ................................................................... # 1. Initialisation # reppar <- 30 # repsim <- 5 # quantile <- 0.50 # 2. Simulations # X <- studySim(var=36,nFactors=3, pmjc=c(6,12), loadings=c(0.5,0.8), # unique=c(0,0.2), quantile=quantile, # N=c(72,180), repsim=repsim, reppar=reppar, # stats=c(1:6)) # 3. Results (first 10 results) # print(X[1:10,1:14],2) # names(X) # 4. Study of the error done in the determination of the number # of components/factors. A positive value is associated to over # determination. # results <- X[X$stats=="mean",] # residuals <- results[,c(11:25)] - X$nfactors # BY <- c("nsubjects","var","loadings") # round(aggregate(residuals, by=results[BY], mean),0) } } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for determining the number of components to retain. \emph{Psychological Bulletin, 99}, 432-442. } \seealso{ \code{\link{generateStructure}}, \code{\link{structureSim}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/dFactors.Rd0000644000176200001440000000723215017117254014647 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{dFactors} \alias{dFactors} \title{Eigenvalues from classical studies} \format{ A list of examples. For each example, a list is also used to give the eigenvalues vector and the number of subjects. \describe{ \item{Bentler}{$eigenvalues and $nsubjects} \item{Buja}{$eigenvalues and $nsubjects} \item{Cliff1}{$eigenvalues and $nsubjects} \item{Cliff2}{$eigenvalues and $nsubjects} \item{Cliff3}{$eigenvalues and $nsubjects} \item{Hand}{$eigenvalues and $nsubjects} \item{Harman}{$eigenvalues and $nsubjects} \item{Lawley}{$eigenvalues and $nsubjects} \item{Raiche}{$eigenvalues and $nsubjects} \item{Tucker1}{$eigenvalues and $nsubjects} \item{Tucker2}{$eigenvalues and $nsubjects} } } \source{ Lawley and Hand dataset: Bartholomew \emph{et al}. (2002, p. 123, 126) Bentler dataset: Bentler and Yuan (1998, p. 139-140) Buja datasets: Buja and Eyuboglu (1992, p. 516, 519) < Number of subjects not specified by Buja and Eyuboglu > Cliff datasets: Cliff (1970, p. 165) Raiche dataset: Raiche, Langevin, Riopel and Mauffette (2006) Raiche dataset: Raiche, Riopel and Blais (2006, p. 9) Tucker datasets: Tucker \emph{et al}. (1969, p. 442) } \usage{ dFactors } \description{ Classical examples of eigenvalues vectors used to study the number of factors to retain in the litterature. These examples generally give the number of subjects use to obtain these eigenvalues. The number of subjects is used with the parallel analysis. } \details{ Other datasets will be added in future versions of the package. } \examples{ \dontrun{ if(interactive()){ # EXAMPLES FROM DATASET data(dFactors) # COMMAND TO VISUALIZE THE CONTENT AND ATTRIBUTES OF THE DATASETS names(dFactors) attributes(dFactors) dFactors$Cliff1$eigenvalues dFactors$Cliff1$nsubjects # SCREE PLOT OF THE Cliff1 DATASET plotuScree(dFactors$Cliff1$eigenvalues) } } } \references{ Bartholomew, D. J., Steele, F., Moustaki, I. and Galbraith, J. I. (2002). \emph{The analysis and interpretation of multivariate data for social scientists}. Boca Raton, FL: Chapman and Hall. Bentler, P. M. and Yuan, K.-H. (1998). Tests for linear trend in the smallest eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. Buja, A. and Eyuboglu, N. (1992). Remarks on parallel analysis. \emph{Multivariate Behavioral Research, 27}(4), 509-540. Cliff, N. (1970). The relation between sample and population characteristic vectors. \emph{Psychometrika, 35}(2), 163-178. Hand, D. J., Daly, F., Lunn, A. D., McConway, K. J. and Ostrowski, E. (1994). \emph{A handbook of small data sets}. Boca Raton, FL: Chapman and Hall. Lawley, D. N. and Maxwell, A. E. (1971). \emph{Factor analysis as a statistical method} (2nd edition). London: Butterworth. Raiche, G., Langevin, L., Riopel, M. and Mauffette, Y. (2006). Etude exploratoire de la dimensionnalite et des facteurs expliques par une traduction francaise de l'Inventaire des approches d'enseignement de Trigwell et Prosser dans trois universite quebecoises. \emph{Mesure et Evaluation en Education, 29}(2), 41-61. Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Tucker, L. D., Koopman, R. F. and Linn, R. L. (1969). Evaluation of factor analytic research procedures by mean of simulated correlation matrices. \emph{Psychometrika, 34}(4), 421-459. Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the number of factors to retain in factor analysis. \emph{Multiple Linear Regression Viewpoint, 20}(1), 5-9. } \keyword{datasets} nFactors/man/rRecovery.Rd0000644000176200001440000000572215017117256015066 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rRecovery.r \name{rRecovery} \alias{rRecovery} \title{Test of Recovery of a Correlation or a Covariance matrix from a Factor Analysis Solution} \usage{ rRecovery(R, loadings, diagCommunalities = FALSE) } \arguments{ \item{R}{numeric: initial correlation or covariance matrix} \item{loadings}{numeric: loadings from a factor analysis solution} \item{diagCommunalities}{logical: if \code{TRUE}, the correlation between the initial solution and the estimated one will use a correlation of one in the diagonal. If \code{FALSE} (default) the diagonal is not used in the computation of this correlation.} } \value{ \item{R}{ numeric: initial correlation or covariance matrix } \item{recoveredR}{ numeric: recovered estimated correlation or covariance matrix } \item{difference}{ numeric: difference between initial and recovered estimated correlation or covariance matrix} \item{cor}{ numeric: Pearson correlation between initial and recovered estimated correlation or covariance matrix. Computations depend on the logical value of the \code{communalities} argument. } } \description{ The \code{rRecovery} function returns a verification of the quality of the recovery of the initial correlation or covariance matrix by the factor solution. } \examples{ \dontrun{ if(interactive()){ # ....................................................... # Example from Kim and Mueller (1978, p. 10) # Population: upper diagonal # Simulated sample: lower diagnonal R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, .5600, 1.000, .4749, .2196, .1912, .2979, .4800, .4200, 1.000, .2079, .2010, .2445, .2240, .1960, .1680, 1.000, .4334, .3197, .1920, .1680, .1440, .4200, 1.000, .4207, .1600, .1400, .1200, .3500, .3000, 1.000), nrow=6, byrow=TRUE) # Replace upper diagonal with lower diagonal RU <- diagReplace(R, upper=TRUE) nFactors <- 2 loadings <- principalAxis(RU, nFactors=nFactors, communalities="component")$loadings rComponent <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor loadings <- principalAxis(RU, nFactors=nFactors, communalities="maxr")$loadings rMaxr <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor loadings <- principalAxis(RU, nFactors=nFactors, communalities="multiple")$loadings rMultiple <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor round(c(rComponent = rComponent, rmaxr = rMaxr, rMultiple = rMultiple), 3) # ....................................................... } } } \seealso{ \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, \code{\link{principalAxis}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{utilities} nFactors/DESCRIPTION0000644000176200001440000000201415025060212013525 0ustar liggesusersPackage: nFactors Type: Package Title: Parallel Analysis and Other Non Graphical Solutions to the Cattell Scree Test Version: 2.4.1.2 Date: 2025-05-29 Authors@R: c( person( given="Gilles", family="Raiche", comment="Universite du Quebec a Montreal", email = "raiche.gilles@uqam.ca", role = c("aut", "cre", "cph") ), person(given="David", family="Magis", role = c("aut")) ) Description: Indices, heuristics, simulations and strategies to help determine the number of factors/components to retain in exploratory factor analysis and principal component analysis. License: GPL-3 Encoding: UTF-8 Language: en-US LazyData: true Depends: R (>= 3.5.0) Imports: stats, MASS, psych, lattice RoxygenNote: 7.3.2 Suggests: testthat NeedsCompilation: no Packaged: 2025-06-07 20:09:23 UTC; raich Author: Gilles Raiche [aut, cre, cph] (Universite du Quebec a Montreal), David Magis [aut] Maintainer: Gilles Raiche Repository: CRAN Date/Publication: 2025-06-19 19:10:02 UTC