From noreply at r-forge.r-project.org Sun Aug 3 09:46:55 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 3 Aug 2014 09:46:55 +0200 (CEST) Subject: [Returnanalytics-commits] r3494 - in pkg/FactorAnalytics: . R man Message-ID: <20140803074655.B17451875B9@r-forge.r-project.org> Author: pragnya Date: 2014-08-03 09:46:55 +0200 (Sun, 03 Aug 2014) New Revision: 3494 Added: pkg/FactorAnalytics/R/fmCov.R pkg/FactorAnalytics/R/fmEsDecomp.R pkg/FactorAnalytics/R/fmSdDecomp.R pkg/FactorAnalytics/R/fmVaRDecomp.R pkg/FactorAnalytics/man/fmCov.Rd pkg/FactorAnalytics/man/fmEsDecomp.Rd pkg/FactorAnalytics/man/fmSdDecomp.Rd pkg/FactorAnalytics/man/fmVaRDecomp.Rd Removed: pkg/FactorAnalytics/R/covFm.R pkg/FactorAnalytics/man/covFm.Rd Modified: pkg/FactorAnalytics/NAMESPACE pkg/FactorAnalytics/R/fitTsfm.R pkg/FactorAnalytics/R/fitTsfm.control.R pkg/FactorAnalytics/R/plot.tsfm.r pkg/FactorAnalytics/man/fitTsfm.Rd pkg/FactorAnalytics/man/plot.tsfm.Rd Log: Corrected, updated factor model Risk Decomposition functions Modified: pkg/FactorAnalytics/NAMESPACE =================================================================== --- pkg/FactorAnalytics/NAMESPACE 2014-07-30 17:52:41 UTC (rev 3493) +++ pkg/FactorAnalytics/NAMESPACE 2014-08-03 07:46:55 UTC (rev 3494) @@ -1,8 +1,11 @@ # Generated by roxygen2 (4.0.1): do not edit by hand S3method(coef,tsfm) -S3method(covFm,tsfm) S3method(fitted,tsfm) +S3method(fmCov,tsfm) +S3method(fmEsDecomp,tsfm) +S3method(fmSdDecomp,tsfm) +S3method(fmVaRDecomp,tsfm) S3method(plot,FundamentalFactorModel) S3method(plot,StatFactorModel) S3method(plot,pafm) @@ -20,7 +23,6 @@ S3method(summary,StatFactorModel) S3method(summary,pafm) S3method(summary,tsfm) -export(covFm) export(dCornishFisher) export(factorModelEsDecomposition) export(factorModelMonteCarlo) @@ -30,6 +32,10 @@ export(fitStatisticalFactorModel) export(fitTsfm) export(fitTsfm.control) +export(fmCov) +export(fmEsDecomp) +export(fmSdDecomp) +export(fmVaRDecomp) export(pCornishFisher) export(paFm) export(qCornishFisher) Deleted: pkg/FactorAnalytics/R/covFm.R =================================================================== --- pkg/FactorAnalytics/R/covFm.R 2014-07-30 17:52:41 UTC (rev 3493) +++ pkg/FactorAnalytics/R/covFm.R 2014-08-03 07:46:55 UTC (rev 3494) @@ -1,126 +0,0 @@ -#' @title Covariance Matrix for assets' returns from fitted factor model. -#' -#' @description Computes the covariance matrix for assets' returns based on a -#' fitted factor model. This is a generic function with methods for classes -#' \code{tsfm}, \code{sfm} and \code{ffm}. -#' -#' @details \code{R(i, t)}, the return on asset \code{i} at time \code{t}, -#' is assumed to follow a factor model of the form, \cr \cr -#' \code{R(i,t) = alpha(i) + beta*F(t) + e(i,t)}, \cr \cr -#' where, \code{alpha(i)} is the intercept, \code{F(t)} is a {K x 1} vector of -#' the \code{K} factor values at time \code{t}, \code{beta} is a \code{1 x K} -#' vector of factor exposures and the error terms \code{e(i,t)} are serially -#' uncorrelated across time and contemporaneously uncorrelated across assets -#' so that \code{e(i,t) ~ iid(0,sig(i)^2)}. Thus, the variance of asset -#' \code{i}'s return is given by \cr \cr -#' \code{var(R(i,t)) = beta*var(F(t))*tr(beta) + sig(i)^2}. \cr \cr -#' And, the \code{N x N} covariance matrix of N asset returns is \cr \cr -#' \code{var(R) = B*var(F(t))*tr(B) + D}, \cr \cr -#' where, B is the \code{N x K} matrix of factor betas and \code{D} is a -#' diagonal matrix with \code{sig(i)^2} along the diagonal. -#' -#' The method for computing covariance can be specified via the \dots -#' argument. Note that the default of \code{use="pairwise.complete.obs"} for -#' handling NAs restricts the method to "pearson". -#' -#' @param object fit object of class \code{tsfm}, \code{sfm} or \code{ffm}. -#' @param use an optional character string giving a method for computing -#' covariances in the presence of missing values. This must be (an -#' abbreviation of) one of the strings "everything", "all.obs", -#' "complete.obs", "na.or.complete", or "pairwise.complete.obs". Default is -#' "pairwise.complete.obs". -#' @param ... optional arguments passed to \code{\link[stats]{cov}}. -#' -#' @return The computed \code{N x N} covariance matrix for asset returns based -#' on the fitted factor model. -#' -#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan. -#' -#' @references -#' \enumerate{ -#' \item Zivot, Eric, and W. A. N. G. Jia-hui. "Modeling Financial Time Series -#' with S-Plus Springer-Verlag." (2006). -#' } -#' -#' @seealso \code{\link{fitTsfm}}, \code{\link{fitSfm}}, \code{\link{fitFfm}} -#' -#' \code{\link[stats]{cov}} for more details on arguments \code{use} and -#' \code{method}. -#' -#' @examples -#' \dontrun{ -#' # Time Series Factor model -#' data(managers) -#' factors = managers[, (7:9)] -#' fit <- fitTsfm(asset.names=colnames(managers[, (1:6)]), -#' factor.names=c("EDHEC LS EQ","SP500 TR"), data=managers) -#' covFm(fit) -#' -#' # Statistical Factor Model -#' data(stat.fm.data) -#' sfm.pca.fit <- fitSfm(sfm.dat, k=2) -#' #' covFm(t(sfm.pca.fit$loadings), var(sfm.pca.fit$factors), -#' sfm.pca.fit$resid.sd) -#' -#' sfm.apca.fit <- fitSfm(sfm.apca.dat, k=2) -#' -#' covFm(t(sfm.apca.fit$loadings), var(sfm.apca.fit$factors), -#' sfm.apca.fit$resid.sd) -#' -#' # Fundamental Factor Model -#' data(stock) -#' # there are 447 assets -#' exposure.names <- c("BOOK2MARKET", "LOG.MARKETCAP") -#' beta.mat <- subset(stock, DATE=="2003-12-31")[, exposure.names] -#' beta.mat1 <- cbind(rep(1, 447), beta.mat1) -#' # FM return covariance -#' fit.fund <- fitFfm(exposure.names=c("BOOK2MARKET", "LOG.MARKETCAP"), -#' data=stock, returnsvar="RETURN", datevar="DATE", -#' assetvar="TICKER", wls=TRUE, regression="classic", -#' covariance="classic", full.resid.cov=FALSE) -#' ret.cov.fundm <- covFm(beta.mat1,fit.fund$factor.cov$cov,fit.fund$resid.sd) -#' fit.fund$returns.cov$cov == ret.cov.fundm -#' } -#' -#' @rdname covFm -#' @export - -covFm <- function(object, ...){ - UseMethod("covFm") -} - -#' @rdname covFm -#' @method covFm tsfm -#' @export - -covFm.tsfm <- function(object, use="pairwise.complete.obs", ...) { - - # check input object validity - if (!inherits(object, c("tsfm", "sfm", "ffm"))) { - stop("Invalid argument: Object should be of class 'tsfm', 'sfm' or 'ffm'.") - } - - # get parameters and factors from factor model - beta <- as.matrix(object$beta) - beta[is.na(beta)] <- 0 - sig2.e = object$resid.sd^2 - factor <- as.matrix(object$data[, object$factor.names]) - - # factor covariance matrix - factor.cov = cov(factor, use=use, ...) - - # residual covariance matrix D - if (length(sig2.e) > 1) { - D.e = diag(sig2.e) - } else { - D.e = as.vector(sig2.e) - } - - cov.fm = beta %*% factor.cov %*% t(beta) + D.e - - if (any(diag(chol(cov.fm))==0)) { - warning("Covariance matrix is not positive definite!") - } - - return(cov.fm) -} Modified: pkg/FactorAnalytics/R/fitTsfm.R =================================================================== --- pkg/FactorAnalytics/R/fitTsfm.R 2014-07-30 17:52:41 UTC (rev 3493) +++ pkg/FactorAnalytics/R/fitTsfm.R 2014-08-03 07:46:55 UTC (rev 3494) @@ -1,7 +1,7 @@ #' @title Fit a time series factor model using time series regression #' -#' @description Fits a time series (or, macroeconomic) factor model for one -#' or more asset returns (or, excess returns) using time series regression. +#' @description Fits a time series (a.k.a. macroeconomic) factor model for one +#' or more asset returns or excess returns using time series regression. #' Users can choose between ordinary least squares-OLS, discounted least #' squares-DLS (or) robust regression. Several variable selection options #' including Stepwise, Subsets, Lars are available as well. An object of class @@ -10,7 +10,7 @@ #' @details #' Typically, factor models are fit using excess returns. \code{rf.name} gives #' the option to supply a risk free rate variable to subtract from each asset -#' return and factor to create excess returns. +#' return and factor to compute excess returns. #' #' Estimation method "OLS" corresponds to ordinary least squares using #' \code{\link[stats]{lm}}, "DLS" is discounted least squares (weighted least @@ -33,12 +33,12 @@ #' #' \code{mkt.timing} allows for market-timing factors to be added to any of the #' above methods. Market timing accounts for the price movement of the general -#' stock market relative to fixed income securities). "HM" follows -#' Henriksson & Merton (1981) and \code{up-market = max(0, Rm-Rf)}, is added -#' as a factor in the regression. The coefficient of this up-market factor can -#' be interpreted as the number of free put options. Similarly, "TM" follows +#' stock market relative to fixed income securities. "HM" follows +#' Henriksson & Merton (1981) and \code{up-market=max(0,Rm-Rf)}, is added to +#' the regression. The coefficient of this up-market factor can be +#' interpreted as the number of free put options. Similarly, "TM" follows #' Treynor-Mazuy (1966), to account for market timing with respect to -#' volatility, and \code{market.sqd = (Rm-Rf)^2} is added as a factor in the +#' volatility, and \code{market.sqd=(Rm-Rf)^2} is added as a factor in the #' regression. Option "both" adds both of these factors. #' #' \subsection{Data Processing}{ @@ -74,22 +74,15 @@ #' @param control list of control parameters. The default is constructed by #' the function \code{\link{fitTsfm.control}}. See the documentation for #' \code{\link{fitTsfm.control}} for details. -#' @param ... For \code{fitTsfm}: arguments passed to -#' \code{\link{fitTsfm.control}}. \cr -#' For S3 methods: further arguments passed to or from other methods +#' @param ... arguments passed to \code{\link{fitTsfm.control}} #' -#' @return fitTsfm returns an object of class \code{tsfm}. +#' @return fitTsfm returns an object of class \code{tsfm} for which +#' \code{print}, \code{plot}, \code{predict} and \code{summary} methods exist. #' -#' The generic functions \code{summary}, \code{predict} and \code{plot} are -#' used to obtain and print a summary, predicted asset returns for new factor -#' data and plot selected characteristics for one or more assets. The generic -#' accessor functions \code{coef}, \code{fitted} and \code{residuals} -#' extract various useful features of the fit object. \code{coef.tsfm} extracts -#' coefficients from the fitted factor model and returns an N x (K+1) matrix of -#' all coefficients, \code{fitted.tsfm} gives an N x T data object of fitted -#' values and \code{residuals.tsfm} gives an N x T data object of residuals. -#' Additionally, \code{covFm} computes the \code{N x N} covariance matrix for -#' asset returns based on the fitted factor model +#' The generic accessor functions \code{coef}, \code{fitted} and +#' \code{residuals} extract various useful features of the fit object. +#' Additionally, \code{fmCov} computes the covariance matrix for asset returns +#' based on the fitted factor model #' #' An object of class \code{tsfm} is a list containing the following #' components: @@ -97,10 +90,10 @@ #' class \code{lm} if \code{fit.method="OLS" or "DLS"}, class \code{lmRob} if #' the \code{fit.method="Robust"}, or class \code{lars} if #' \code{variable.selection="lars"}.} -#' \item{alpha}{N x 1 vector of estimated alphas.} +#' \item{alpha}{length-N vector of estimated alphas.} #' \item{beta}{N x K matrix of estimated betas.} -#' \item{r2}{N x 1 vector of R-squared values.} -#' \item{resid.sd}{N x 1 vector of residual standard deviations.} +#' \item{r2}{length-N vector of R-squared values.} +#' \item{resid.sd}{length-N vector of residual standard deviations.} #' \item{fitted}{xts data object of fitted values; iff #' \code{variable.selection="lars"}} #' \item{call}{the matched function call.} @@ -115,29 +108,31 @@ #' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan. #' #' @references -#' \enumerate{ -#' \item Christopherson, Jon A., David R. Carino, and Wayne E. Ferson. -#' Portfolio performance measurement and benchmarking. McGraw Hill -#' Professional, 2009. -#' \item Efron, Bradley, Trevor Hastie, Iain Johnstone, and Robert Tibshirani. -#' "Least angle regression." The Annals of statistics 32, no.2 (2004): 407-499. -#' \item Hastie, Trevor, Robert Tibshirani, Jerome Friedman, T. Hastie, J. -#' Friedman, and R. Tibshirani. The elements of statistical learning. Vol. 2, -#' no. 1. New York: Springer, 2009. -#' \item Henriksson, Roy D., and Robert C. Merton. "On market timing and -#' investment performance. II. Statistical procedures for evaluating -#' forecasting skills." Journal of business (1981): 513-533. -#' \item Treynor, Jack, and Kay Mazuy. "Can mutual funds outguess the market." -#' Harvard business review 44, no. 4 (1966): 131-136. -#' } +#' Christopherson, J. A., Carino, D. R., & Ferson, W. E. (2009). Portfolio +#' performance measurement and benchmarking. McGraw Hill Professional. #' +#' Efron, B., Hastie, T., Johnstone, I., & Tibshirani, R. (2004). Least angle +#' regression. The Annals of statistics, 32(2), 407-499. +#' +#' Hastie, T., Tibshirani, R., Friedman, J., Hastie, T., Friedman, J., & +#' Tibshirani, R. (2009). The elements of statistical learning (Vol. 2, No. 1). +#' New York: Springer. +#' +#' Henriksson, R. D., & Merton, R. C. (1981). On market timing and investment +#' performance. II. Statistical procedures for evaluating forecasting skills. +#' Journal of business, 513-533. +#' +#' Treynor, J., & Mazuy, K. (1966). Can mutual funds outguess the market. +#' Harvard business review, 44(4), 131-136. +#' #' @seealso The \code{tsfm} methods for generic functions: #' \code{\link{plot.tsfm}}, \code{\link{predict.tsfm}}, #' \code{\link{print.tsfm}} and \code{\link{summary.tsfm}}. #' #' And, the following extractor functions: \code{\link[stats]{coef}}, -#' \code{\link{covFm}}, \code{\link[stats]{fitted}} and -#' \code{\link[stats]{residuals}}. +#' \code{\link[stats]{fitted}}, \code{\link[stats]{residuals}}, +#' \code{\link{fmCov}}, \code{\link{fmSdDecomp}}, \code{\link{fmVaRDecomp}} +#' and \code{\link{fmEsDecomp}}. #' #' \code{\link{paFm}} for Performance Attribution. #' @@ -171,8 +166,7 @@ fitTsfm <- function(asset.names, factor.names, mkt.name=NULL, rf.name=NULL, data=data, fit.method=c("OLS","DLS","Robust"), - variable.selection=c("none","stepwise","subsets", - "lars"), + variable.selection=c("none","stepwise","subsets","lars"), mkt.timing=NULL, control=fitTsfm.control(...), ...) { # record the call as an element to be returned @@ -493,6 +487,7 @@ #' @param object a fit object of class \code{tsfm} which is returned by #' \code{fitTsfm} +#' @param ... further arguments passed to or from other methods #' @rdname fitTsfm #' @method coef tsfm Modified: pkg/FactorAnalytics/R/fitTsfm.control.R =================================================================== --- pkg/FactorAnalytics/R/fitTsfm.control.R 2014-07-30 17:52:41 UTC (rev 3493) +++ pkg/FactorAnalytics/R/fitTsfm.control.R 2014-08-03 07:46:55 UTC (rev 3494) @@ -142,6 +142,7 @@ lars.criterion="Cp", K = 10) { # get the user-specified arguments (that have no defaults) + # this part of the code was adapted from stats::lm call <- match.call() m <- match(c("weights","scope","scale","direction","method","type", "max.steps"), names(call), 0L) Added: pkg/FactorAnalytics/R/fmCov.R =================================================================== --- pkg/FactorAnalytics/R/fmCov.R (rev 0) +++ pkg/FactorAnalytics/R/fmCov.R 2014-08-03 07:46:55 UTC (rev 3494) @@ -0,0 +1,122 @@ +#' @title Covariance Matrix for assets' returns from fitted factor model. +#' +#' @description Computes the covariance matrix for assets' returns based on a +#' fitted factor model. This is a generic function with methods for classes +#' \code{tsfm}, \code{sfm} and \code{ffm}. +#' +#' @details \code{R(i, t)}, the return on asset \code{i} at time \code{t}, +#' is assumed to follow a factor model of the form, \cr \cr +#' \code{R(i,t) = alpha(i) + beta*F(t) + e(i,t)}, \cr \cr +#' where, \code{alpha(i)} is the intercept, \code{F(t)} is a {K x 1} vector of +#' the \code{K} factor values at time \code{t}, \code{beta} is a \code{1 x K} +#' vector of factor exposures and the error terms \code{e(i,t)} are serially +#' uncorrelated across time and contemporaneously uncorrelated across assets +#' so that \code{e(i,t) ~ iid(0,sig(i)^2)}. Thus, the variance of asset +#' \code{i}'s return is given by \cr \cr +#' \code{var(R(i,t)) = beta*var(F(t))*tr(beta) + sig(i)^2}. \cr \cr +#' And, the \code{N x N} covariance matrix of N asset returns is \cr \cr +#' \code{var(R) = B*var(F(t))*tr(B) + D}, \cr \cr +#' where, B is the \code{N x K} matrix of factor betas and \code{D} is a +#' diagonal matrix with \code{sig(i)^2} along the diagonal. +#' +#' The method for computing covariance can be specified via the \dots +#' argument. Note that the default of \code{use="pairwise.complete.obs"} for +#' handling NAs restricts the method to "pearson". +#' +#' @param object fit object of class \code{tsfm}, \code{sfm} or \code{ffm}. +#' @param use an optional character string giving a method for computing +#' covariances in the presence of missing values. This must be (an +#' abbreviation of) one of the strings "everything", "all.obs", +#' "complete.obs", "na.or.complete", or "pairwise.complete.obs". Default is +#' "pairwise.complete.obs". +#' @param ... optional arguments passed to \code{\link[stats]{cov}}. +#' +#' @return The computed \code{N x N} covariance matrix for asset returns based +#' on the fitted factor model. +#' +#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan. +#' +#' @references +#' Zivot, E., & Jia-hui, W. A. N. G. (2006). Modeling Financial Time +#' Series with S-Plus Springer-Verlag. +#' +#' @seealso \code{\link{fitTsfm}}, \code{\link{fitSfm}}, \code{\link{fitFfm}} +#' +#' \code{\link[stats]{cov}} for more details on arguments \code{use} and +#' \code{method}. +#' +#' @examples +#' # Time Series Factor model +#' data(managers) +#' fit <- fitTsfm(asset.names=colnames(managers[, (1:6)]), +#' factor.names=c("EDHEC LS EQ","SP500 TR"), data=managers) +#' fmCov(fit) +#' +#' \dontrun{ +#' # Statistical Factor Model +#' data(stat.fm.data) +#' sfm.pca.fit <- fitSfm(sfm.dat, k=2) +#' #' fmCov(t(sfm.pca.fit$loadings), var(sfm.pca.fit$factors), +#' sfm.pca.fit$resid.sd) +#' +#' sfm.apca.fit <- fitSfm(sfm.apca.dat, k=2) +#' +#' fmCov(t(sfm.apca.fit$loadings), var(sfm.apca.fit$factors), +#' sfm.apca.fit$resid.sd) +#' +#' # Fundamental Factor Model +#' data(stock) +#' # there are 447 assets +#' exposure.names <- c("BOOK2MARKET", "LOG.MARKETCAP") +#' beta.mat <- subset(stock, DATE=="2003-12-31")[, exposure.names] +#' beta.mat1 <- cbind(rep(1, 447), beta.mat1) +#' # FM return covariance +#' fit.fund <- fitFfm(exposure.names=c("BOOK2MARKET", "LOG.MARKETCAP"), +#' data=stock, returnsvar="RETURN", datevar="DATE", +#' assetvar="TICKER", wls=TRUE, regression="classic", +#' covariance="classic", full.resid.cov=FALSE) +#' ret.cov.fundm <- fmCov(beta.mat1,fit.fund$factor.cov$cov,fit.fund$resid.sd) +#' fit.fund$returns.cov$cov == ret.cov.fundm +#' } +#' +#' @rdname fmCov +#' @export + +fmCov <- function(object, ...){ + # check input object validity + if (!inherits(object, c("tsfm", "sfm", "ffm"))) { + stop("Invalid argument: Object should be of class 'tsfm', 'sfm' or 'ffm'.") + } + UseMethod("fmCov") +} + +#' @rdname fmCov +#' @method fmCov tsfm +#' @export + +fmCov.tsfm <- function(object, use="pairwise.complete.obs", ...) { + + # get parameters and factors from factor model + beta <- as.matrix(object$beta) + beta[is.na(beta)] <- 0 + sig2.e = object$resid.sd^2 + factor <- as.matrix(object$data[, object$factor.names]) + + # factor covariance matrix + factor.cov = cov(factor, use=use, ...) + + # residual covariance matrix D + if (length(sig2.e) > 1) { + D.e = diag(sig2.e) + } else { + D.e = as.vector(sig2.e) + } + + cov.fm = beta %*% factor.cov %*% t(beta) + D.e + + if (any(diag(chol(cov.fm))==0)) { + warning("Covariance matrix is not positive definite!") + } + + return(cov.fm) +} Added: pkg/FactorAnalytics/R/fmEsDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmEsDecomp.R (rev 0) +++ pkg/FactorAnalytics/R/fmEsDecomp.R 2014-08-03 07:46:55 UTC (rev 3494) @@ -0,0 +1,225 @@ +#' @title Decompose ES into individual factor contributions +#' +#' @description Compute the factor contributions to Expected Tail Loss or +#' Expected Shortfall (ES) of assets' returns based on Euler's theorem, given +#' the fitted factor model. The partial derivative of ES with respect to factor +#' beta is computed as the expected factor return given fund return is less +#' than or equal to its value-at-risk (VaR). VaR is computed as the sample quantile of the historic or +#' simulated data. +#' +#' @details The factor model for an asset's return at time \code{t} has the +#' form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr +#' where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. By +#' Euler's theorem, the ES of the asset's return is given by: +#' \cr \cr \code{ES.fm = sum(cES_k) = sum(beta.star_k*mES_k)} \cr \cr +#' where, summation is across the \code{K} factors and the residual, +#' \code{cES} and \code{mES} are the component and marginal +#' contributions to \code{ES} respectively. The marginal contribution to ES is +#' defined as the expected value of \code{F.star}, conditional on the loss +#' being less than or equal to \code{VaR.fm}. This is estimated as a sample +#' average of the observations in that data window. +#' +#' Computation of the VaR measure is done using +#' \code{\link[PerformanceAnalytics]{VaR}}. Arguments \code{p}, \code{method} +#' and \code{invert} are passed to this function. Refer to their help file for +#' details and other options. \code{invert} consistently affects the sign for +#' all VaR and ES measures. +#' +#' @param object fit object of class \code{tsfm}, \code{sfm} or \code{ffm}. +#' @param p confidence level for calculation. Default is 0.95. +#' @param method method for computing VaR, one of "modified","gaussian", +#' "historical", "kernel". Default is "modified". See details. +#' @param invert logical; whether to invert the VaR measure. Default is +#' \code{FALSE}. +#' @param ... other optional arguments passed to +#' \code{\link[PerformanceAnalytics]{VaR}}. +#' +#' @return A list containing +#' \item{VaR.fm}{length-N vector of factor model VaRs of N-asset returns.} +#' \item{n.exceed}{length-N vector of number of observations beyond VaR for +#' each asset.} +#' \item{idx.exceed}{list of numeric vector of index values of exceedances.} +#' \item{ES.fm}{length-N vector of factor model ES of N-asset returns.} +#' \item{mES}{N x (K+1) matrix of marginal contributions to VaR.} +#' \item{cES}{N x (K+1) matrix of component contributions to VaR.} +#' \item{pcES}{N x (K+1) matrix of percentage component contributions to VaR.} +#' Where, \code{K} is the number of factors and N is the number of assets. +#' +#' @author Eric Zviot, Yi-An Chen and Sangeetha Srinivasan +#' +#' @references +#' Epperlein, E., & Smillie, A. (2006). Portfolio risk analysis Cracking VAR +#' with kernels. RISK-LONDON-RISK MAGAZINE LIMITED-, 19(8), 70. +#' +#' Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis. +#' The Journal of Risk, 5(2), 1-18. +#' +#' Meucci, A. (2007). Risk contributions from generic user-defined factors. +#' RISK-LONDON-RISK MAGAZINE LIMITED-, 20(6), 84. +#' +#' Yamai, Y., & Yoshiba, T. (2002). Comparative analyses of expected shortfall +#' and value-at-risk: their estimation error, decomposition, and optimization. +#' Monetary and economic studies, 20(1), 87-121. +#' +#' @seealso \code{\link{fitTsfm}}, \code{\link{fitSfm}}, \code{\link{fitFfm}} +#' for the different factor model fitting functions. +#' +#' \code{\link[PerformanceAnalytics]{VaR}} for VaR computation. +#' \code{\link{fmSdDecomp}} for factor model SD decomposition. +#' \code{\link{fmVaRDecomp}} for factor model VaR decomposition. +#' +#' @examples +#' # Time Series Factor Model +#' data(managers) +#' fit.macro <- fitTsfm(asset.names=colnames(managers[,(1:6)]), +#' factor.names=colnames(managers[,(7:8)]), data=managers) +#' +#' ES.decomp <- fmEsDecomp(fit.macro) +#' # get the component contributions +#' ES.decomp$cES +#' +#' @export + +fmEsDecomp <- function(object, ...){ + # check input object validity + if (!inherits(object, c("tsfm", "sfm", "ffm"))) { + stop("Invalid argument: Object should be of class 'tsfm', 'sfm' or 'ffm'.") + } + UseMethod("fmEsDecomp") +} + +#' @rdname fmEsDecomp +#' @method fmEsDecomp tsfm +#' @export + +fmEsDecomp.tsfm <- function(object, p=0.95, + method=c("modified","gaussian","historical", + "kernel"), invert=FALSE, ...) { + + # set defaults and check input vailidity + method = method[1] + + if (!(method %in% c("modified", "gaussian", "historical", "kernel"))) { + stop("Invalid argument: method must be 'modified', 'gaussian', + 'historical' or 'kernel'") + } + + # get beta.star + beta.star <- as.matrix(cbind(object$beta, object$resid.sd)) + colnames(beta.star)[dim(beta.star)[2]] <- "residual" + + # factor returns and residuals data + factors.xts <- object$data[,object$factor.names] + resid.xts <- t(t(residuals(object))/object$resid.sd) + + # initialize lists and matrices + N <- length(object$asset.names) + K <- length(object$factor.names) + VaR.fm <- rep(NA, N) + ES.fm <- rep(NA, N) + idx.exceed <- list() + n.exceed <- rep(NA, N) + names(VaR.fm) = names(ES.fm) = names(n.exceed) = object$asset.names + mES <- matrix(NA, N, K+1) + cES <- matrix(NA, N, K+1) + pcES <- matrix(NA, N, K+1) + rownames(mES)=rownames(cES)=rownames(pcES)=object$asset.names + colnames(mES)=colnames(cES)=colnames(pcES)=c(object$factor.names,"residuals") + + for (i in object$asset.names) { + # return data for asset i + R.xts <- object$data[,i] + # get VaR for asset i + VaR.fm[i] <- VaR(R.xts, p=p, method=method, invert=invert, ...) + # index of VaR exceedances + idx.exceed[[i]] <- which(R.xts <= VaR.fm[i]) + # number of VaR exceedances + n.exceed[i] <- length(idx.exceed[[i]]) + + # get F.star data object + factor.star <- merge(factors.xts, resid.xts[,i]) + colnames(factor.star)[dim(factor.star)[2]] <- "residual" + + if (!invert) {inv=-1} else {inv=1} + + # compute ES as expected value of asset return, such that the given asset + # return is less than or equal to its value-at-risk (VaR) and approximated + # by a kernel estimator. + idx <- which(R.xts <= inv*VaR.fm[i]) + ES.fm[i] <- inv * mean(R.xts[idx], na.rm =TRUE) + + # compute marginal ES as expected value of factor returns, such that the + # given asset return is less than or equal to its value-at-risk (VaR) and + # approximated by a kernel estimator. + mES[i,] <- inv * colMeans(factor.star[idx,], na.rm =TRUE) + + # correction factor to ensure that sum(cES) = portfolio ES + cf <- as.numeric( ES.fm[i] / sum(mES[i,]*beta.star[i,]) ) + + # compute marginal, component and percentage contributions to ES + # each of these have dimensions: N x (K+1) + mES[i,] <- cf * mES[i,] + cES[i,] <- mES[i,] * beta.star[i,] + pcES[i,] <- cES[i,] / ES.fm[i] + } + + fm.ES.decomp <- list(VaR.fm=VaR.fm, n.exceed=n.exceed, idx.exceed=idx.exceed, + ES.fm=ES.fm, mES=mES, cES=cES, pcES=pcES) + + return(fm.ES.decomp) + } + + + + + +factorModelEsDecomposition <- + function(Data, beta.vec, sig.e, tail.prob = 0.05, + VaR.method=c("modified", "gaussian", "historical", "kernel")) { + + Data = as.matrix(Data) + ncol.Data = ncol(Data) + if(is.matrix(beta.vec)) { + beta.names = c(rownames(beta.vec), "residual") + } else if(is.vector(beta.vec)) { + beta.names = c(names(beta.vec), "residual") + } else { + stop("beta.vec is not an n x 1 matrix or a vector") + } + beta.names = c(names(beta.vec), "residual") + beta.star.vec = c(beta.vec, sig.e) + names(beta.star.vec) = beta.names + + ## epsilon is calculated in the sense of minimizing mean square error by Silverman 1986 + epi <- 2.575*sd(Data[,1]) * (nrow(Data)^(-1/5)) + VaR.fm = as.numeric(VaR(Data[, 1], p=(1-tail.prob), method=VaR.method)) + idx = which(Data[, 1] <= VaR.fm + epi & Data[,1] >= VaR.fm - epi) + + + + ES.fm = -mean(Data[idx, 1]) + + ## + ## compute marginal contribution to ES + ## + ## compute marginal ES as expected value of factor return given fund + ## return is less than or equal to VaR + mcES.fm = -as.matrix(colMeans(Data[idx, -1])) + + ## compute correction factor so that sum of weighted marginal ES adds to portfolio ES + cf = as.numeric( ES.fm / sum(mcES.fm*beta.star.vec) ) + mcES.fm = cf*mcES.fm + cES.fm = mcES.fm*beta.star.vec + pcES.fm = cES.fm/ES.fm + colnames(mcES.fm) = "MCES" + colnames(cES.fm) = "CES" + colnames(pcES.fm) = "PCES" + ans = list(VaR.fm = -VaR.fm, + n.exceed = length(idx), + idx.exceed = idx, + ES.fm = ES.fm, + mES.fm = t(mcES.fm), + cES.fm = t(cES.fm), + pcES.fm = t(pcES.fm)) + return(ans) + } Added: pkg/FactorAnalytics/R/fmSdDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmSdDecomp.R (rev 0) +++ pkg/FactorAnalytics/R/fmSdDecomp.R 2014-08-03 07:46:55 UTC (rev 3494) @@ -0,0 +1,122 @@ +#' @title Decompose standard deviation into individual factor contributions +#' +#' @description Compute the factor contributions to standard deviation (SD) of +#' assets' returns based on Euler's theorem, given the fitted factor model. +#' +#' @details The factor model for an asset's return at time \code{t} has the +#' form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr +#' where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. +#' \cr \cr By Euler's theorem, the standard deviation of the asset's return +#' is given as: \cr \cr +#' \code{Sd.fm = sum(cSd_k) = sum(beta.star_k*mSd_k)} \cr \cr +#' where, summation is across the \code{K} factors and the residual, +#' \code{cSd} and \code{mSd} are the component and marginal +#' contributions to \code{SD} respectively. Computing \code{Sd.fm} and +#' \code{mSd} is very straight forward. The formulas are given below and +#' details are in the references. The covariance term is approximated by the +#' sample covariance. \cr \cr +#' \code{Sd.fm = sqrt(beta.star''cov(F.star)beta.star)} \cr +#' \code{mSd = cov(F.star)beta.star / Sd.fm} +#' +#' @param object fit object of class \code{tsfm}, \code{sfm} or \code{ffm}. +#' @param use an optional character string giving a method for computing +#' covariances in the presence of missing values. This must be (an +#' abbreviation of) one of the strings "everything", "all.obs", +#' "complete.obs", "na.or.complete", or "pairwise.complete.obs". Default is +#' "pairwise.complete.obs". +#' @param ... optional arguments passed to \code{\link[stats]{cov}}. +#' +#' @return A list containing +#' \item{Sd.fm}{length-N vector of factor model SDs of N-asset returns.} +#' \item{mSd}{N x (K+1) matrix of marginal contributions to SD.} +#' \item{cSd}{N x (K+1) matrix of component contributions to SD.} +#' \item{pcSd}{N x (K+1) matrix of percentage component contributions to SD.} +#' Where, \code{K} is the number of factors and N is the number of assets. +#' +#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan +#' +#' @references +#' Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis. +#' The Journal of Risk, 5(2), 1-18. +#' +#' Meucci, A. (2007). Risk contributions from generic user-defined factors. +#' RISK-LONDON-RISK MAGAZINE LIMITED-, 20(6), 84. +#' +#' Yamai, Y., & Yoshiba, T. (2002). Comparative analyses of expected shortfall +#' and value-at-risk: their estimation error, decomposition, and optimization. +#' Monetary and economic studies, 20(1), 87-121. +#' +#' @seealso \code{\link{fitTsfm}}, \code{\link{fitSfm}}, \code{\link{fitFfm}} +#' for the different factor model fitting functions. +#' +#' \code{\link{fmCov}} for factor model covariance. +#' \code{\link{fmVaRDecomp}} for factor model VaR decomposition. +#' \code{\link{fmEsDecomp}} for factor model ES decomposition. +#' +#' @examples +#' # Time Series Factor Model +#' data(managers) +#' fit.macro <- fitTsfm(asset.names=colnames(managers[,(1:6)]), +#' factor.names=colnames(managers[,(7:9)]), +#' rf.name="US 3m TR", data=managers) +#' +#' decomp <- fmSdDecomp(fit.macro) +#' # get the percentage component contributions +#' decomp$pcSd +#' +#' @export + +fmSdDecomp <- function(object, ...){ + # check input object validity + if (!inherits(object, c("tsfm", "sfm", "ffm"))) { + stop("Invalid argument: Object should be of class 'tsfm', 'sfm' or 'ffm'.") + } + UseMethod("fmSdDecomp") +} + +## Remarks: +## The factor model for asset i's return has the form +## R(i,t) = beta_i'F(t) + e(i,t) = beta.star_i'F.star(t) +## where beta.star_i = (beta_i, sig.e_i)' and F.star(t) = (F(t)', z(t))' + +## Standard deviation of the asset i's return +## sd.fm_i = sqrt(beta.star_i'Cov(F.star)beta.star_i) + +## By Euler's theorem +## sd.fm_i = sum(cSd_i(k)) = sum(beta.star_i(k)*mSd_i(k)) [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 3494 From noreply at r-forge.r-project.org Mon Aug 4 04:01:27 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 4 Aug 2014 04:01:27 +0200 (CEST) Subject: [Returnanalytics-commits] r3495 - pkg/PortfolioAnalytics/vignettes Message-ID: <20140804020127.7F96E1876C5@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-04 04:01:26 +0200 (Mon, 04 Aug 2014) New Revision: 3495 Added: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf Log: adding first draft of custom moment and objective functions Added: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw =================================================================== --- pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw (rev 0) +++ pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-04 02:01:26 UTC (rev 3495) @@ -0,0 +1,140 @@ +\documentclass[a4paper]{article} +\usepackage[OT1]{fontenc} +\usepackage{Rd} +\usepackage{amsmath} +\usepackage{hyperref} + +\usepackage[round]{natbib} +\usepackage{bm} +\usepackage{verbatim} +\usepackage[latin1]{inputenc} +\bibliographystyle{abbrvnat} + +\usepackage{url} + +\let\proglang=\textsf +%\newcommand{\pkg}[1]{{\fontseries{b}\selectfont #1}} +%\newcommand{\R}[1]{{\fontseries{b}\selectfont #1}} +%\newcommand{\email}[1]{\href{mailto:#1}{\normalfont\texttt{#1}}} +%\newcommand{\E}{\mathsf{E}} +%\newcommand{\VAR}{\mathsf{VAR}} +%\newcommand{\COV}{\mathsf{COV}} +%\newcommand{\Prob}{\mathsf{P}} + +\renewcommand{\topfraction}{0.85} +\renewcommand{\textfraction}{0.1} +\renewcommand{\baselinestretch}{1.5} +\setlength{\textwidth}{15cm} \setlength{\textheight}{22cm} \topmargin-1cm \evensidemargin0.5cm \oddsidemargin0.5cm + +\usepackage[latin1]{inputenc} +% or whatever + +\usepackage{lmodern} +\usepackage[T1]{fontenc} +% Or whatever. Note that the encoding and the font should match. If T1 +% does not look nice, try deleting the line with the fontenc. + +% \VignetteIndexEntry{Custom Moment and Objective Functions} + +\begin{document} + +\title{Custom Moment and Objective Functions} +\author{Ross Bennett} + +\maketitle + +\begin{abstract} +The purpose of this vignette is to demonstrate how to write and use custom moment functions and custom objective functions. +\end{abstract} + +\tableofcontents + +\section{Getting Started} +\subsection{Load Packages} +Load the necessary packages. + +<<>>= +library(PortfolioAnalytics) +@ + +\subsection{Data} +The edhec data set from the PerformanceAnalytics package will be used as example data. +<<>>= +data(edhec) + +# Use the first 4 columns in edhec for a returns object +R <- edhec[, 1:4] +colnames(R) <- c("CA", "CTAG", "DS", "EM") +head(R, 5) + +# Get a character vector of the fund names +funds <- colnames(R) +@ + +\section{Setting the Portfolio Moments} +The PortfolioAnalytics framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are calculated only once. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. The moments are then used by lower level optimization functions. \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt (NEED REFERENCE HERE), the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci. + +The moments of the returns are computed based on the objective(s) in the \code{portfolio} object and return a list where each element is the respective moment estimate. +<<>>= +args(set.portfolio.moments) +@ + + +<>= +# Construct initial portfolio with basic constraints. +init.portf <- portfolio.spec(assets=funds) +init.portf <- add.constraint(portfolio=init.portf, type="full_investment") +init.portf <- add.constraint(portfolio=init.portf, type="long_only") + +# Portfolio with standard deviation as an objective +SD.portf <- add.objective(portfolio=init.portf, type="risk", name="StdDev") + +# Portfolio with expected shortfall as an objective +ES.portf <- add.objective(portfolio=init.portf, type="risk", name="ES") +@ + +Here we see the names of the object that is returned. +<<>>= +sd.moments <- set.portfolio.moments(R, SD.portf) +names(sd.moments) + +es.moments <- set.portfolio.moments(R, ES.portf) +names(es.moments) +@ + +\section{Custom Moment Functions} +In many cases for constrained optimization problems, one may want to estimate moments for a specific use case or further extend the idea of \code{set.portfolio.moments}. A user defined custom moment function can have any arbitrary named arguments, however the argument names \verb"R" and \verb"portfolio" will be detected and matched in an efficient manner. + +Here we define a function to compute the covariance matrix using a robust estimate. +<<>>= +sigma.robust <- function(R, ...){ + out <- list() + set.seed(1234) + out$sigma <- MASS::cov.rob(R, method="mcd", ...)$cov + return(out) +} +@ + +Now we can use the custom moment function in \code{optimize.portfolio} to estimate the solution to the minimum standard deviation portfolio. +<>= +opt.sd <- optimize.portfolio(R, SD.portf, + optimize_method="ROI", + momentFUN="sigma.robust") +opt.sd +@ + +Here we extract the weights and compute the portfolio standard deviation to verify. +<>= +weights <- extractWeights(opt.sd) +sigma <- sigma.robust(R)$sigma + +sqrt(t(weights) %*% sigma %*% weights) +extractObjectiveMeasures(opt.sd)$StdDev +@ + +\section{Custom Objective Functions} +A key feature of \verb"PortfolioAnalytics" is that the name for an objective can be any valid \R function. \verb"PortfolioAnalytics" was designed to be flexible and modular, and custom objective functions are a key example of this. + +TODO: add content and example code + +\end{document} \ No newline at end of file Added: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf =================================================================== (Binary files differ) Property changes on: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf ___________________________________________________________________ Added: svn:mime-type + application/octet-stream From noreply at r-forge.r-project.org Tue Aug 5 00:15:36 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 5 Aug 2014 00:15:36 +0200 (CEST) Subject: [Returnanalytics-commits] r3496 - pkg/PortfolioAnalytics/vignettes Message-ID: <20140804221536.1CA55186727@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-05 00:15:35 +0200 (Tue, 05 Aug 2014) New Revision: 3496 Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw Log: adding content to custom objectives section Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw =================================================================== --- pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-04 02:01:26 UTC (rev 3495) +++ pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-04 22:15:35 UTC (rev 3496) @@ -135,6 +135,105 @@ \section{Custom Objective Functions} A key feature of \verb"PortfolioAnalytics" is that the name for an objective can be any valid \R function. \verb"PortfolioAnalytics" was designed to be flexible and modular, and custom objective functions are a key example of this. -TODO: add content and example code +Here we define a very simple function to compute annualized standard deviation for monthly data that we will use as an objective function. +<<>>= +pasd <- function(R, weights, sigma, N=36){ + R <- tail(R, N) + tmp.sd <- sqrt(as.numeric(t(weights) %*% sigma %*% weights)) + sqrt(12) * tmp.sd +} +@ -\end{document} \ No newline at end of file +The objective function must return a single value for the optimizer to minimize. It is strongly encouraged to use the following argument names in the objective function: +\begin{description} + \item[\code{R}] {for the asset returns} + \item[\code{weights}] {for the portfolio weights} +\end{description} + +These argument names are detected automatically and handled in an efficient manner. Any other arguments for the objective function can be for the moments or passed in through the \code{arguments} list in the objective. + +For our \code{pasd} function, we need custom moments function to return a named list with \code{sigma} as an element. We can use the \code{sigma.robust} function we defined in the previous section. Here we construct a portfolio with our \code{pasd} function as an objective to minimize. + +<>= +# Construct initial portfolio with basic constraints. +pasd.portf <- portfolio.spec(assets=funds) +pasd.portf <- add.constraint(portfolio=pasd.portf, type="full_investment") +pasd.portf <- add.constraint(portfolio=pasd.portf, type="long_only") + +# Portfolio with pasd as an objective +# Note how we can specify N as an argument +pasd.portf <- add.objective(portfolio=pasd.portf, type="risk", name="pasd", + arguments=list(N=48)) +@ + + +Now we can run the optimization to estimate a solution to our optimization problem. +<<>>= +opt.pasd <- optimize.portfolio(R, pa.portf, + optimize_method="ROI", + momentFUN="sigma.robust") +opt.pasd +@ + +We now consider an example with a more complicated objective function. Our objective to maximize the fourth order expansion of the Constant Relative Risk Aversion (CRRA) expected utility function as in the Boudt paper and Martellini paper (NEED REFERENCE). + +\begin{equation*} +EU_{\lambda}(w) = - \frac{\lambda}{2} m_{(2)}(w) + +\frac{\lambda (\lambda + 1)}{6} m_{(3)}(w) - +\frac{\lambda (\lambda + 1) (\lambda + 2)}{24} m_{(4)}(w) +\end{equation*} + +Define a function to compute CRRA estimate. Note how we define the function to use \code{sigma}, \code{m3}, and \code{m4} as arguments that will use the output from a custom moment function. We could compute the moments inside this function, but re-computing the moments thousands of times (i.e. at each iteration) can be very compute intensive. + +<<>>= +CRRA <- function(R, weights, lambda, sigma, m3, m4){ + weights <- matrix(weights, ncol=1) + M2.w <- t(weights) %*% sigma %*% weights + M3.w <- t(weights) %*% m3 %*% (weights %x% weights) + M4.w <- t(weights) %*% m4 %*% (weights %x% weights %x% weights) + term1 <- (1 / 2) * lambda * M2.w + term2 <- (1 / 6) * lambda * (lambda + 1) * M3.w + term3 <- (1 / 24) * lambda * (lambda + 1) * (lambda + 2) * M4.w + out <- -term1 + term2 - term3 + out +} +@ + +We now define the custom moment function to compute the moments for the objective function. +<<>>= +crra.moments <- function(R, ...){ + out <- list() + out$sigma <- cov(R) + out$m3 <- PerformanceAnalytics:::M3.MM(R) + out$m4 <- PerformanceAnalytics:::M4.MM(R) + out +} +@ + +We now set up the portfolio and run the optimization using our custom moments and objective function to maximize CRRA. Note that \code{type="return"} is used to maximize an objective function. +<>= +# Construct initial portfolio with basic constraints. +crra.portf <- portfolio.spec(assets=funds) +crra.portf <- add.constraint(portfolio=crra.portf, type="weight_sum", + min_sum=0.99, max_sum=1.01) +crra.portf <- add.constraint(portfolio=crra.portf, type="box", + min=0.05, max=0.4) + +# Portfolio with crra as an objective +# Note how we can specify lambda as an argument +crra.portf <- add.objective(portfolio=crra.portf, type="return", name="CRRA", + arguments=list(lambda=10)) +@ + +<<>>= +opt.crra <- optimize.portfolio(R, portf.crra, optimize_method="DEoptim", + search_size=5000, trace=TRUE, traceDE=0, + momentFUN="crra.moments") +opt.crra +@ + +The modular framework of \verb"PortfolioAnalytics" allows one to easily define custom moment functions and objective functions as valid \R functions. + +TODO: add content to concluding paragraph + +\end{document} From noreply at r-forge.r-project.org Tue Aug 5 03:32:34 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 5 Aug 2014 03:32:34 +0200 (CEST) Subject: [Returnanalytics-commits] r3497 - in pkg/PortfolioAnalytics: R vignettes Message-ID: <20140805013234.268781876D3@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-05 03:32:33 +0200 (Tue, 05 Aug 2014) New Revision: 3497 Modified: pkg/PortfolioAnalytics/R/constraint_fn_map.R pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf Log: Updates to custom moments and objective functions vignette. Fixing print bug in rp_transform. Modified: pkg/PortfolioAnalytics/R/constraint_fn_map.R =================================================================== --- pkg/PortfolioAnalytics/R/constraint_fn_map.R 2014-08-04 22:15:35 UTC (rev 3496) +++ pkg/PortfolioAnalytics/R/constraint_fn_map.R 2014-08-05 01:32:33 UTC (rev 3497) @@ -93,7 +93,6 @@ # check leverage constraints if(!is.null(min_sum) & !is.null(max_sum)){ if(!(sum(tmp_weights) >= min_sum & sum(tmp_weights) <= max_sum)){ - print("foo") # Try to transform only considering leverage and box constraints tmp_weights <- try(rp_transform(w=tmp_weights, min_sum=min_sum, Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw =================================================================== --- pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-04 22:15:35 UTC (rev 3496) +++ pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-05 01:32:33 UTC (rev 3497) @@ -72,7 +72,7 @@ @ \section{Setting the Portfolio Moments} -The PortfolioAnalytics framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are calculated only once. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. The moments are then used by lower level optimization functions. \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt (NEED REFERENCE HERE), the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci. +The PortfolioAnalytics framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are calculated only once and then used in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt (NEED REFERENCE HERE), the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci. The moments of the returns are computed based on the objective(s) in the \code{portfolio} object and return a list where each element is the respective moment estimate. <<>>= @@ -93,7 +93,7 @@ ES.portf <- add.objective(portfolio=init.portf, type="risk", name="ES") @ -Here we see the names of the object that is returned. +Here we see the names of the list object that is returned by \code{set.portfolio.moments}. <<>>= sd.moments <- set.portfolio.moments(R, SD.portf) names(sd.moments) @@ -103,9 +103,19 @@ @ \section{Custom Moment Functions} -In many cases for constrained optimization problems, one may want to estimate moments for a specific use case or further extend the idea of \code{set.portfolio.moments}. A user defined custom moment function can have any arbitrary named arguments, however the argument names \verb"R" and \verb"portfolio" will be detected and matched in an efficient manner. +In many cases for constrained optimization problems, one may want to estimate moments for a specific use case or further extend the idea of \code{set.portfolio.moments}. A user defined custom moment function can have any arbitrary named arguments. However, arguments named \code{R} for the asset returns and \code{portfolio} for the portfolio object will be detected automatically and handled in an efficient manner. Because of this, it is strongly encouraged to use \code{R} for the asset returns object and \code{portfolio} for the portfolio object. -Here we define a function to compute the covariance matrix using a robust estimate. +The moment function should return a named list object where the elements represent the moments: +\begin{description} + \item[\code{\$mu}]{ first moment; expected returns vector} + \item[\code{\$sigma}]{ second moment; covariance matrix} + \item[\code{\$m3}]{ third moment; coskewness matrix} + \item[\code{\$m4}]{ fourth moment; cokurtosis matrix} +\end{description} + +The lower level optimization functions expect an object with the structure described above. List elements with the names \code{mu}, \code{sigma}, \code{m3}, and\code{m4} are matched automatically and handled in an efficient manner. + +Here we define a function to estimate the covariance matrix using a robust method. <<>>= sigma.robust <- function(R, ...){ out <- list() @@ -123,7 +133,7 @@ opt.sd @ -Here we extract the weights and compute the portfolio standard deviation to verify. +Here we extract the weights and compute the portfolio standard deviation to verify that the the robust estimate of the covariance matrix was used in the optimization. <>= weights <- extractWeights(opt.sd) sigma <- sigma.robust(R)$sigma @@ -133,7 +143,7 @@ @ \section{Custom Objective Functions} -A key feature of \verb"PortfolioAnalytics" is that the name for an objective can be any valid \R function. \verb"PortfolioAnalytics" was designed to be flexible and modular, and custom objective functions are a key example of this. +A key feature of \verb"PortfolioAnalytics" is that the name for an objective can be any valid \verb"R" function. \verb"PortfolioAnalytics" was designed to be flexible and modular, and custom objective functions are a key example of this. Here we define a very simple function to compute annualized standard deviation for monthly data that we will use as an objective function. <<>>= @@ -144,11 +154,16 @@ } @ -The objective function must return a single value for the optimizer to minimize. It is strongly encouraged to use the following argument names in the objective function: +A few guidelines should be followed for defining a custom objective function. + +\begin{itemize} + \item The objective function must return a single value for the optimizer to minimize. + \item It is strongly encouraged to use the following argument names in the objective function: \begin{description} \item[\code{R}] {for the asset returns} \item[\code{weights}] {for the portfolio weights} \end{description} +\end{itemize} These argument names are detected automatically and handled in an efficient manner. Any other arguments for the objective function can be for the moments or passed in through the \code{arguments} list in the objective. @@ -226,7 +241,7 @@ @ <<>>= -opt.crra <- optimize.portfolio(R, portf.crra, optimize_method="DEoptim", +opt.crra <- optimize.portfolio(R, crra.portf, optimize_method="DEoptim", search_size=5000, trace=TRUE, traceDE=0, momentFUN="crra.moments") opt.crra Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf =================================================================== (Binary files differ) From noreply at r-forge.r-project.org Tue Aug 5 12:32:43 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 5 Aug 2014 12:32:43 +0200 (CEST) Subject: [Returnanalytics-commits] r3498 - in pkg/FactorAnalytics: R man Message-ID: <20140805103243.7CA821876CB@r-forge.r-project.org> Author: pragnya Date: 2014-08-05 12:32:43 +0200 (Tue, 05 Aug 2014) New Revision: 3498 Modified: pkg/FactorAnalytics/R/fitTsfm.R pkg/FactorAnalytics/R/fitTsfm.control.R pkg/FactorAnalytics/R/fmEsDecomp.R pkg/FactorAnalytics/R/paFm.r pkg/FactorAnalytics/R/plot.tsfm.r pkg/FactorAnalytics/R/summary.tsfm.r pkg/FactorAnalytics/man/CommonFactors.Rd pkg/FactorAnalytics/man/fitTsfm.Rd pkg/FactorAnalytics/man/fitTsfm.control.Rd pkg/FactorAnalytics/man/paFm.Rd pkg/FactorAnalytics/man/plot.tsfm.Rd pkg/FactorAnalytics/man/summary.tsfm.Rd Log: Corrected typos Modified: pkg/FactorAnalytics/R/fitTsfm.R =================================================================== --- pkg/FactorAnalytics/R/fitTsfm.R 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/R/fitTsfm.R 2014-08-05 10:32:43 UTC (rev 3498) @@ -487,7 +487,6 @@ #' @param object a fit object of class \code{tsfm} which is returned by #' \code{fitTsfm} -#' @param ... further arguments passed to or from other methods #' @rdname fitTsfm #' @method coef tsfm Modified: pkg/FactorAnalytics/R/fitTsfm.control.R =================================================================== --- pkg/FactorAnalytics/R/fitTsfm.control.R 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/R/fitTsfm.control.R 2014-08-05 10:32:43 UTC (rev 3498) @@ -108,10 +108,8 @@ #' @author Sangeetha Srinivasan #' #' @references -#' \enumerate{ -#' \item Efron, Bradley, Trevor Hastie, Iain Johnstone, and Robert Tibshirani. -#' "Least angle regression." The Annals of statistics 32, no.2 (2004): 407-499. -#' } +#' Efron, B., Hastie, T., Johnstone, I., & Tibshirani, R. (2004). Least angle +#' regression. The Annals of statistics, 32(2), 407-499. #' #' @seealso \code{\link{fitTsfm}}, \code{\link[stats]{lm}}, #' \code{\link[robust]{lmRob}}, \code{\link[stats]{step}}, Modified: pkg/FactorAnalytics/R/fmEsDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmEsDecomp.R 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/R/fmEsDecomp.R 2014-08-05 10:32:43 UTC (rev 3498) @@ -93,8 +93,8 @@ #' @export fmEsDecomp.tsfm <- function(object, p=0.95, - method=c("modified","gaussian","historical", - "kernel"), invert=FALSE, ...) { + method=c("modified","gaussian","historical", + "kernel"), invert=FALSE, ...) { # set defaults and check input vailidity method = method[1] @@ -110,7 +110,8 @@ # factor returns and residuals data factors.xts <- object$data[,object$factor.names] - resid.xts <- t(t(residuals(object))/object$resid.sd) + resid.xts <- as.xts(t(t(residuals(object))/object$resid.sd)) + time(resid.xts) <- as.Date(time(resid.xts)) # initialize lists and matrices N <- length(object$asset.names) @@ -167,59 +168,4 @@ ES.fm=ES.fm, mES=mES, cES=cES, pcES=pcES) return(fm.ES.decomp) - } - - - - - -factorModelEsDecomposition <- - function(Data, beta.vec, sig.e, tail.prob = 0.05, - VaR.method=c("modified", "gaussian", "historical", "kernel")) { - - Data = as.matrix(Data) - ncol.Data = ncol(Data) - if(is.matrix(beta.vec)) { - beta.names = c(rownames(beta.vec), "residual") - } else if(is.vector(beta.vec)) { - beta.names = c(names(beta.vec), "residual") - } else { - stop("beta.vec is not an n x 1 matrix or a vector") - } - beta.names = c(names(beta.vec), "residual") - beta.star.vec = c(beta.vec, sig.e) - names(beta.star.vec) = beta.names - - ## epsilon is calculated in the sense of minimizing mean square error by Silverman 1986 - epi <- 2.575*sd(Data[,1]) * (nrow(Data)^(-1/5)) - VaR.fm = as.numeric(VaR(Data[, 1], p=(1-tail.prob), method=VaR.method)) - idx = which(Data[, 1] <= VaR.fm + epi & Data[,1] >= VaR.fm - epi) - - - - ES.fm = -mean(Data[idx, 1]) - - ## - ## compute marginal contribution to ES - ## - ## compute marginal ES as expected value of factor return given fund - ## return is less than or equal to VaR - mcES.fm = -as.matrix(colMeans(Data[idx, -1])) - - ## compute correction factor so that sum of weighted marginal ES adds to portfolio ES - cf = as.numeric( ES.fm / sum(mcES.fm*beta.star.vec) ) - mcES.fm = cf*mcES.fm - cES.fm = mcES.fm*beta.star.vec - pcES.fm = cES.fm/ES.fm - colnames(mcES.fm) = "MCES" - colnames(cES.fm) = "CES" - colnames(pcES.fm) = "PCES" - ans = list(VaR.fm = -VaR.fm, - n.exceed = length(idx), - idx.exceed = idx, - ES.fm = ES.fm, - mES.fm = t(mcES.fm), - cES.fm = t(cES.fm), - pcES.fm = t(pcES.fm)) - return(ans) - } +} Modified: pkg/FactorAnalytics/R/paFm.r =================================================================== --- pkg/FactorAnalytics/R/paFm.r 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/R/paFm.r 2014-08-05 10:32:43 UTC (rev 3498) @@ -1,34 +1,38 @@ #' @title Compute cumulative mean attribution for factor models #' #' @description Decompose total returns into returns attributed to factors and -#' specific returns. An object of class \code{"pafm"} is generated and generic -#' functions such as \code{plot}, \code{summary} and \code{print} can be used. +#' specific returns. An object of class \code{"pafm"} is generated, with +#' methods for generic functions \code{plot}, \code{summary} and \code{print}. #' #' @details Total returns can be decomposed into returns attributed to factors -#' and specific returns. \cr \eqn{R_t = \sum b_j * f_jt + u_t,t=1...T} \cr -#' \code{b_j} is exposure to factor j and \code{f_jt} is factor j. -#' The returns attributed to factor j is \code{b_j * f_jt} and specific -#' returns is \code{u_t}. +#' and specific returns. \cr \eqn{R_t = \sum b_k * f_kt + u_t, t=1...T} \cr +#' \code{b_k} is exposure to factor k and \code{f_kt} is factor k's return at +#' time t. The return attributed to factor k is \code{b_k * f_kt} and specific +#' return is \code{u_t}. #' #' @param fit an object of class \code{tsfm}, \code{sfm} or \code{ffm}. #' @param ... other arguments/controls passed to the fit methods. #' #' @return The returned object is of class \code{"pafm"} containing -#' \describe{ -#' \item{cum.ret.attr.f}{N X J matrix of cumulative return attributed to +#' \item{cum.ret.attr.f}{N X K matrix of cumulative return attributed to #' factors.} -#' \item{cum.spec.ret}{1 x N vector of cumulative specific returns.} +#' \item{cum.spec.ret}{length-N vector of cumulative specific returns.} #' \item{attr.list}{list of time series of attributed returns for every #' portfolio.} -#' } #' #' @author Yi-An Chen and Sangeetha Srinivasan #' -#' @references Grinold, R. and Kahn, R. \emph{Active Portfolio Management}, +#' @references Grinold, R. and Kahn, R. (1999) Active Portfolio Management: A +#' Quantitative Approach for Producing Superior Returns and Controlling Risk. #' McGraw-Hill. #' -#' @seealso \code{\link{fitTsfm}}, \code{\link{fitSfm}}, \code{\link{fitFfm}} +#' @seealso \code{\link{fitTsfm}}, \code{\link{fitSfm}}, \code{\link{fitFfm}} +#' for the factor model fitting functions. #' +#' The \code{pafm} methods for generic functions: +#' \code{\link{plot.pafm}}, \code{\link{print.pafm}} and +#' \code{\link{summary.pafm}}. +#' #' @examples #' data(managers) #' fit <- fitTsfm(asset.names=colnames(managers[, (1:6)]), @@ -41,8 +45,9 @@ paFm <- function(fit, ...) { - if (class(fit)!="tsfm" & class(fit)!="ffm" & class(fit)!="sfm") { - stop("Class has to be one of 'tsfm', 'ffm' or 'sfm'.") + # check input object validity + if (!inherits(fit, c("tsfm", "sfm", "ffm"))) { + stop("Invalid argument: fit should be of class 'tsfm', 'sfm' or 'ffm'.") } # TSFM chunk @@ -52,18 +57,18 @@ # return attributed to factors cum.attr.ret <- fit$beta cum.spec.ret <- fit$alpha - factorNames = fit$factor.names - fundNames = fit$asset.names + factorNames <- fit$factor.names + fundNames <- fit$asset.names attr.list <- list() for (k in fundNames) { - fit.lm = fit$asset.fit[[k]] + fit.lm <- fit$asset.fit[[k]] ## extract information from lm, lmRob or lars object reg.xts <- na.omit(fit$data[, c(k, factorNames)]) dates <- as.Date(index(reg.xts)) - actual.xts = xts(fit.lm$model[1], dates) + actual.xts <- xts(fit.lm$model[1], dates) # attributed returns # active portfolio management p.512 17A.9 # top-down method @@ -83,16 +88,16 @@ xts(as.matrix(fit.lm$model[i])%*%as.matrix(fit.lm$coef[i]), dates) cum.attr.ret[k, i] <- cum.ret - - Return.cumulative(actual.xts - attr.ret.xts) + Return.cumulative(actual.xts-attr.ret.xts) attr.ret.xts.all <- merge(attr.ret.xts.all, attr.ret.xts) } } # specific returns spec.ret.xts <- actual.xts - - xts(as.matrix(fit.lm$model[, factorNames])%*%as.matrix(fit.lm$coef[-1]), + xts(as.matrix(fit.lm$model[,factorNames])%*%as.matrix(fit.lm$coef[-1]), dates) - cum.spec.ret[k,1] <- cum.ret - Return.cumulative(actual.xts - spec.ret.xts) + cum.spec.ret[k,1] <- cum.ret - Return.cumulative(actual.xts-spec.ret.xts) attr.list[[k]] <- merge(attr.ret.xts.all, spec.ret.xts) colnames(attr.list[[k]]) <- c(factorNames, "specific.returns") } @@ -155,8 +160,8 @@ # return attributed to factors cum.attr.ret <- t(fit$loadings) cum.spec.ret <- fit$r2 - factorNames = rownames(fit$loadings) - fundNames = colnames(fit$loadings) + factorNames <- rownames(fit$loadings) + fundNames <- colnames(fit$loadings) data <- checkData(fit$data) # create list for attribution attr.list <- list() @@ -165,11 +170,11 @@ if ( dim(fit$asset.ret)[1] > dim(fit$asset.ret)[2] ) { for (k in fundNames) { - fit.lm = fit$asset.fit[[k]] + fit.lm <- fit$asset.fit[[k]] ## extract information from lm object date <- index(data[, k]) # probably needs more general Date setting - actual.xts = xts(fit.lm$model[1], as.Date(date)) + actual.xts <- xts(fit.lm$model[1], as.Date(date)) # attributed returns # active portfolio management p.512 17A.9 cum.ret <- Return.cumulative(actual.xts) @@ -220,15 +225,16 @@ } } - ans = list(cum.ret.attr.f=cum.attr.ret, cum.spec.ret=cum.spec.ret, + ans <- list(cum.ret.attr.f=cum.attr.ret, cum.spec.ret=cum.spec.ret, attr.list=attr.list) - class(ans) = "pafm" + class(ans) <- "pafm" return(ans) } # If benchmark is provided, active return attribution will be calculated. # active returns = total returns - benchmark returns. Specifically, -# \eqn{R_t^A = \sum_j b_{j}^A * f_{jt} + u_t^A},t=1..T, \eqn{b_{j}^A} is \emph{active exposure} to factor j -# and \eqn{f_{jt}} is factor j. The active returns attributed to factor j is -# \eqn{b_{j}^A * f_{jt}} specific returns is \eqn{u_t^A} +# \eqn{R_t^A = \sum_j b_{j}^A * f_{jt} + u_t^A},t=1..T, \eqn{b_{j}^A} is +# \emph{active exposure} to factor j and \eqn{f_{jt}} is factor j. The active +# returns attributed to factor j is \eqn{b_{j}^A * f_{jt}} specific returns is +# \eqn{u_t^A} Modified: pkg/FactorAnalytics/R/plot.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/plot.tsfm.r 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/R/plot.tsfm.r 2014-08-05 10:32:43 UTC (rev 3498) @@ -68,6 +68,9 @@ #' to suppress the legend. #' @param las one of {0, 1, 2, 3} to set the direction of axis labels, same as #' in \code{plot}. Default here is 1. +#' @param horiz a logical value. If \code{FALSE}, the bars are drawn vertically +#' with the first bar to the left. If \code{TRUE}, the bars are drawn +#' horizontally with the first at the bottom. Default here is \code{TRUE}. #' @param VaR.method a method for computing VaR; one of "modified", "gaussian", #' "historical" or "kernel". VaR is computed using #' \code{\link[PerformanceAnalytics]{VaR}}. Default is "historical". @@ -91,9 +94,8 @@ #' \code{\link[graphics]{barplot}} and #' \code{\link[corrplot]{corrplot}} for plotting methods used. #' -#' \code{\link{factorModelSDDecomposition}}, -#' \code{\link{factorModelEsDecomposition}}, -#' \code{\link{factorModelVaRDecomposition}} for factor model risk measures. +#' \code{\link{fmSdDecomp}}, \code{\link{fmEsDecomp}}, +#' \code{\link{fmVaRDecomp}} for factor model risk measures. #' #' @examples #' @@ -105,7 +107,7 @@ #' #' # plot the factor betas of 1st 4 assets fitted above. #' plot(fit.macro, max.show=4, which.plot.group=2, loop=FALSE) -#' # plot the factor model return correlation, order = hierarchical clustering +#' # plot factor model return correlation; angular order of the eigenvectors #' plot(fit.macro, which.plot.group=7, loop=FALSE, #' order="AOE", method="ellipse", tl.pos = "d") #' @@ -121,7 +123,7 @@ plot.tsfm <- function(x, which.plot.group=NULL, max.show=6, plot.single=FALSE, asset.name, which.plot.single=NULL, colorset=(1:12), - legend.loc="topleft", las=1, + legend.loc="topleft", las=1, horiz=TRUE, VaR.method="historical", loop=TRUE, ...) { if (plot.single==TRUE) { @@ -311,10 +313,10 @@ switch(which.plot.group, "1L" = { ## Factor model coefficients: Alpha - # ylab="Intercept estimate" + # xlab="Intercept estimate", ylab="Assets" barplot(coef(x)[,1], main="Factor model Alpha (Intercept)", - xlab="Assets", col="darkblue", las=las, ...) - abline(h=0, lwd=1, lty=1, col=1) + col="darkblue", las=las, horiz=horiz, ...) + abline(v=0, lwd=1, lty=1, col=1) }, "2L" = { ## Factor model coefficients: Betas @@ -326,10 +328,11 @@ } par(mfrow=c(ceiling(k/2),2)) for (i in 2:(k+1)) { - main=paste("Factor Betas:", colnames(coef(x))[i]) - barplot(coef(x)[,i], main=main, col="darkblue", xlab="Assets", - ylab="Coefficient estimate", las=las, ...) - abline(h=0, lwd=1, lty=1, col=1) + main=paste(colnames(coef(x))[i], "factor Betas") + # xlab="Beta estimate", ylab="Assets" + barplot(coef(x)[,i], main=main, col="darkblue", las=las, + horiz=horiz, ...) + abline(v=0, lwd=1, lty=1, col=1) } par(mfrow=c(1,1)) }, @@ -337,8 +340,8 @@ ## Actual and Fitted asset returns n <- length(x$asset.names) if (n > max.show) { - cat(paste("Displaying only the first", max.show,"assets, since the - number of assets > 'max.show' =", max.show)) + cat(paste("Displaying only the first", max.show, + "assets, since the number of assets > 'max.show'")) n <- max.show } par(mfrow=c(ceiling(n/2),2)) @@ -354,17 +357,17 @@ }, "4L" ={ ## R-squared + # ylab="Assets", xlab="R-squared" barplot(x$r2, main="R-squared values for factor model fits", - xlab="Assets", ylab="R-squared", col="darkblue", - las=las, ...) - abline(h=0, lwd=1, lty=1, col=1) + col="darkblue", las=las, horiz=horiz, ...) + abline(v=0, lwd=1, lty=1, col=1) }, "5L" = { ## Residual Volatility - barplot(x$resid.sd, xlab="Assets", ylab="Residual volatility", - main="Residual volatility for factor model fits", - col="darkblue", las=las, ...) - abline(h=0, lwd=1, lty=1, col=1) + # ylab="Assets", xlab="Residual volatility" + barplot(x$resid.sd, col="darkblue", las=las, horiz=horiz, + main="Residual volatility for factor model fits", ...) + abline(v=0, lwd=1, lty=1, col=1) }, "6L" = { ## Factor Model Residual Correlation @@ -380,24 +383,24 @@ ## Factor Contribution to SD cSd.fm <- fmSdDecomp(x)$cSd barplot(t(cSd.fm), main="Factor Contributions to SD", - xlab="Assets", legend.text=T, args.legend=legend.loc, - col=colorset, ...) + legend.text=T, args.legend=legend.loc, col=colorset, + horiz=horiz, ...) mtext("(pairwise complete obs)", line=0.5) }, "9L"={ ## Factor Contribution to ES - cES.fm <- fmVaRDecomp(x)$cES + cES.fm <- fmEsDecomp(x)$cES barplot(t(cES.fm), main="Factor Contributions to ES", - xlab="Assets", legend.text=T, args.legend=legend.loc, - col=colorset, ...) + legend.text=T, args.legend=legend.loc, col=colorset, + horiz=horiz, ...) mtext("(pairwise complete obs)", line=0.5) }, "10L" ={ ## Factor Contribution to VaR cVaR.fm <- fmVaRDecomp(x)$cVaR barplot(t(cVaR.fm), main="Factor Contributions to VaR", - xlab="Assets", legend.text=T, args.legend=legend.loc, - col=colorset, ...) + legend.text=T, args.legend=legend.loc, col=colorset, + horiz=horiz, ...) mtext("(pairwise complete obs)", line=0.5) }, invisible() Modified: pkg/FactorAnalytics/R/summary.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/summary.tsfm.r 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/R/summary.tsfm.r 2014-08-05 10:32:43 UTC (rev 3498) @@ -9,8 +9,8 @@ #' heteroskedasticity-consistent (HC) or #' heteroskedasticity-autocorrelation-consistent (HAC) standard errors and #' t-statistics using \code{\link[lmtest]{coeftest}}. This option is meaningful -#' only if \code{fit.method = "OLS" or "DLS"}. This option is currently not -#' available for \code{variable.selection = "lar" or "lasso"}. +#' only if \code{fit.method = "OLS" or "DLS"}. HC/HAC errors are currently not +#' available for \code{variable.selection = "lars"}. #' #' @param object an object of class \code{tsfm} returned by \code{fitTsfm}. #' @param se.type one of "Default", "HC" or "HAC"; option for computing @@ -61,6 +61,7 @@ if (!inherits(object, "tsfm")) { stop("Invalid 'tsfm' object") } + # note: fit.method=NULL for "lars" objects if (!(object$fit.method %in% c("OLS","DLS")) && se.type!="Default") { stop("Invalid argument: HC/HAC standard errors are applicable only if fit.method = 'OLS' or 'DLS'") Modified: pkg/FactorAnalytics/man/CommonFactors.Rd =================================================================== --- pkg/FactorAnalytics/man/CommonFactors.Rd 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/man/CommonFactors.Rd 2014-08-05 10:32:43 UTC (rev 3498) @@ -1,39 +1,39 @@ -\name{CommonFactors} -\alias{CommonFactors} -\alias{factors.M} -\alias{factors.Q} -\docType{data} -\title{ -Factor set of several commonly used factors -} -\description{ -Collection of common factors as both monthly and quarterly time series -\itemize{ -\item SP500: S&P 500 composite index returns. (Yahoo) -\item GS10TR: US Treasury 10y yields total returns from the yeild of the 10 year constant maturity. (FRED) -\item USD.Index: Trade Weighted U.S. Dollar Index: Major Currencies - TWEXMMTH. (FRED) -\item Term.Spread: Yield spread of Merrill Lynch High-Yield Corporate Master II Index minus 10-year Treasury. (FRED) -\item TED.Spread: 3-Month Treasury Bill: Secondary Market Rate(TB3MS) - 3-Month Eurodollar Deposit Rate (MED3). (FRED) -\item dVIX: First difference of the end-of-month value of the CBOE Volatility Index (VIX). (Yahoo) -\item OILPRICE: Monthly returns of spot price of West Texas Intermediate. (FRED) -\item TB3MS: 3-Month Treasury Bill Secondary Market Rate (TB3MS). (FRED) -} - -} -\usage{data(CommonFactors)} -\format{ - xts time series object - \describe{ - \item{\code{factor.M}}{Jan-1997 through May-2014} - \item{\code{factor.Q}}{Q1-1997 through Q1-2014} - } -} -\source{ -\itemize{ -\item Federal Reserve Economic Data (FRED): \url{http://research.stlouisfed.org/fred2/} -\item Yahoo Finance: \url{http://finance.yahoo.com/} -} -} - - - +\name{CommonFactors} +\alias{CommonFactors} +\alias{factors.M} +\alias{factors.Q} +\docType{data} +\title{ +Factor set of several commonly used factors +} +\description{ +Collection of common factors as both monthly and quarterly time series +\itemize{ +\item SP500: S&P 500 composite index returns. (Yahoo) +\item GS10TR: US Treasury 10y yields total returns from the yeild of the 10 year constant maturity. (FRED) +\item USD.Index: Trade Weighted U.S. Dollar Index: Major Currencies - TWEXMMTH. (FRED) +\item Term.Spread: Yield spread of Merrill Lynch High-Yield Corporate Master II Index minus 10-year Treasury. (FRED) +\item TED.Spread: 3-Month Treasury Bill: Secondary Market Rate(TB3MS) - 3-Month Eurodollar Deposit Rate (MED3). (FRED) +\item dVIX: First difference of the end-of-month value of the CBOE Volatility Index (VIX). (Yahoo) +\item OILPRICE: Monthly returns of spot price of West Texas Intermediate. (FRED) +\item TB3MS: 3-Month Treasury Bill Secondary Market Rate (TB3MS). (FRED) +} + +} +\usage{data(CommonFactors)} +\format{ + xts time series object + \describe{ + \item{\code{factors.M}}{Jan-1997 through May-2014} + \item{\code{factors.Q}}{Q1-1997 through Q1-2014} + } +} +\source{ +\itemize{ +\item Federal Reserve Economic Data (FRED): \url{http://research.stlouisfed.org/fred2/} +\item Yahoo Finance: \url{http://finance.yahoo.com/} +} +} + + + Modified: pkg/FactorAnalytics/man/fitTsfm.Rd =================================================================== --- pkg/FactorAnalytics/man/fitTsfm.Rd 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/man/fitTsfm.Rd 2014-08-05 10:32:43 UTC (rev 3498) @@ -52,8 +52,6 @@ \item{object}{a fit object of class \code{tsfm} which is returned by \code{fitTsfm}} - -\item{...}{further arguments passed to or from other methods} } \value{ fitTsfm returns an object of class \code{tsfm} for which Modified: pkg/FactorAnalytics/man/fitTsfm.control.Rd =================================================================== --- pkg/FactorAnalytics/man/fitTsfm.control.Rd 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/man/fitTsfm.control.Rd 2014-08-05 10:32:43 UTC (rev 3498) @@ -157,11 +157,9 @@ Sangeetha Srinivasan } \references{ -\enumerate{ -\item Efron, Bradley, Trevor Hastie, Iain Johnstone, and Robert Tibshirani. -"Least angle regression." The Annals of statistics 32, no.2 (2004): 407-499. +Efron, B., Hastie, T., Johnstone, I., & Tibshirani, R. (2004). Least angle +regression. The Annals of statistics, 32(2), 407-499. } -} \seealso{ \code{\link{fitTsfm}}, \code{\link[stats]{lm}}, \code{\link[robust]{lmRob}}, \code{\link[stats]{step}}, Modified: pkg/FactorAnalytics/man/paFm.Rd =================================================================== --- pkg/FactorAnalytics/man/paFm.Rd 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/man/paFm.Rd 2014-08-05 10:32:43 UTC (rev 3498) @@ -12,25 +12,23 @@ } \value{ The returned object is of class \code{"pafm"} containing -\describe{ -\item{cum.ret.attr.f}{N X J matrix of cumulative return attributed to +\item{cum.ret.attr.f}{N X K matrix of cumulative return attributed to factors.} -\item{cum.spec.ret}{1 x N vector of cumulative specific returns.} +\item{cum.spec.ret}{length-N vector of cumulative specific returns.} \item{attr.list}{list of time series of attributed returns for every portfolio.} } -} \description{ Decompose total returns into returns attributed to factors and -specific returns. An object of class \code{"pafm"} is generated and generic -functions such as \code{plot}, \code{summary} and \code{print} can be used. +specific returns. An object of class \code{"pafm"} is generated, with +methods for generic functions \code{plot}, \code{summary} and \code{print}. } \details{ Total returns can be decomposed into returns attributed to factors -and specific returns. \cr \eqn{R_t = \sum b_j * f_jt + u_t,t=1...T} \cr -\code{b_j} is exposure to factor j and \code{f_jt} is factor j. -The returns attributed to factor j is \code{b_j * f_jt} and specific -returns is \code{u_t}. +and specific returns. \cr \eqn{R_t = \sum b_k * f_kt + u_t, t=1...T} \cr +\code{b_k} is exposure to factor k and \code{f_kt} is factor k's return at +time t. The return attributed to factor k is \code{b_k * f_kt} and specific +return is \code{u_t}. } \examples{ data(managers) @@ -43,10 +41,16 @@ Yi-An Chen and Sangeetha Srinivasan } \references{ -Grinold, R. and Kahn, R. \emph{Active Portfolio Management}, +Grinold, R. and Kahn, R. (1999) Active Portfolio Management: A +Quantitative Approach for Producing Superior Returns and Controlling Risk. McGraw-Hill. } \seealso{ \code{\link{fitTsfm}}, \code{\link{fitSfm}}, \code{\link{fitFfm}} +for the factor model fitting functions. + +The \code{pafm} methods for generic functions: +\code{\link{plot.pafm}}, \code{\link{print.pafm}} and +\code{\link{summary.pafm}}. } Modified: pkg/FactorAnalytics/man/plot.tsfm.Rd =================================================================== --- pkg/FactorAnalytics/man/plot.tsfm.Rd 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/man/plot.tsfm.Rd 2014-08-05 10:32:43 UTC (rev 3498) @@ -5,7 +5,7 @@ \usage{ \method{plot}{tsfm}(x, which.plot.group = NULL, max.show = 6, plot.single = FALSE, asset.name, which.plot.single = NULL, - colorset = (1:12), legend.loc = "topleft", las = 1, + colorset = (1:12), legend.loc = "topleft", las = 1, horiz = TRUE, VaR.method = "historical", loop = TRUE, ...) } \arguments{ @@ -63,6 +63,10 @@ \item{las}{one of {0, 1, 2, 3} to set the direction of axis labels, same as in \code{plot}. Default here is 1.} +\item{horiz}{a logical value. If \code{FALSE}, the bars are drawn vertically +with the first bar to the left. If \code{TRUE}, the bars are drawn +horizontally with the first at the bottom. Default here is \code{TRUE}.} + \item{VaR.method}{a method for computing VaR; one of "modified", "gaussian", "historical" or "kernel". VaR is computed using \code{\link[PerformanceAnalytics]{VaR}}. Default is "historical".} @@ -105,7 +109,7 @@ # plot the factor betas of 1st 4 assets fitted above. plot(fit.macro, max.show=4, which.plot.group=2, loop=FALSE) -# plot the factor model return correlation, order = hierarchical clustering +# plot factor model return correlation; angular order of the eigenvectors plot(fit.macro, which.plot.group=7, loop=FALSE, order="AOE", method="ellipse", tl.pos = "d") @@ -134,8 +138,7 @@ \code{\link[graphics]{barplot}} and \code{\link[corrplot]{corrplot}} for plotting methods used. -\code{\link{factorModelSDDecomposition}}, -\code{\link{factorModelEsDecomposition}}, -\code{\link{factorModelVaRDecomposition}} for factor model risk measures. +\code{\link{fmSdDecomp}}, \code{\link{fmEsDecomp}}, +\code{\link{fmVaRDecomp}} for factor model risk measures. } Modified: pkg/FactorAnalytics/man/summary.tsfm.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.tsfm.Rd 2014-08-05 01:32:33 UTC (rev 3497) +++ pkg/FactorAnalytics/man/summary.tsfm.Rd 2014-08-05 10:32:43 UTC (rev 3498) @@ -44,8 +44,8 @@ heteroskedasticity-consistent (HC) or heteroskedasticity-autocorrelation-consistent (HAC) standard errors and t-statistics using \code{\link[lmtest]{coeftest}}. This option is meaningful -only if \code{fit.method = "OLS" or "DLS"}. This option is currently not -available for \code{variable.selection = "lar" or "lasso"}. +only if \code{fit.method = "OLS" or "DLS"}. HC/HAC errors are currently not +available for \code{variable.selection = "lars"}. } \note{ For a more detailed printed summary for each asset, refer to From noreply at r-forge.r-project.org Tue Aug 5 20:10:52 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 5 Aug 2014 20:10:52 +0200 (CEST) Subject: [Returnanalytics-commits] r3499 - in pkg/PortfolioAnalytics: R sandbox Message-ID: <20140805181052.AEE3A184611@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-05 20:10:52 +0200 (Tue, 05 Aug 2014) New Revision: 3499 Added: pkg/PortfolioAnalytics/sandbox/ptcQP.R Modified: pkg/PortfolioAnalytics/R/optFUN.R Log: revising proportional transaction costs QP formulation Modified: pkg/PortfolioAnalytics/R/optFUN.R =================================================================== --- pkg/PortfolioAnalytics/R/optFUN.R 2014-08-05 10:32:43 UTC (rev 3498) +++ pkg/PortfolioAnalytics/R/optFUN.R 2014-08-05 18:10:52 UTC (rev 3499) @@ -840,9 +840,13 @@ # Check for cleaned returns in moments if(!is.null(moments$cleanR)) R <- moments$cleanR + # proportional transaction costs + ptc <- constraints$ptc + # Modify the returns matrix. This is done because there are 3 sets of # variables 1) w.initial, 2) w.buy, and 3) w.sell - returns <- cbind(R, R, R) + R0 <- matrix(0, ncol=ncol(R), nrow=nrow(R)) + returns <- cbind(R, R0, R0) V <- cov(returns) # number of assets @@ -851,13 +855,7 @@ # initial weights for solver if(is.null(init_weights)) init_weights <- rep(1/ N, N) - # Amat for initial weights - Amat <- cbind(diag(N), matrix(0, nrow=N, ncol=N*2)) - rhs <- init_weights - dir <- rep("==", N) - meq <- N - - # check for a target return constraint + # Check for a target return constraint if(!is.na(target)) { # If var is the only objective specified, then moments$mean won't be calculated if(all(moments$mean==0)){ @@ -865,25 +863,47 @@ } else { tmp_means <- moments$mean } - Amat <- rbind(Amat, rep((1+tmp_means), 3)) - dir <- c(dir, "==") - rhs <- c(rhs, (1+target)) - meq <- N + 1 + } else { + tmp_means <- rep(0, N) + target <- 0 } + Amat <- c(tmp_means, rep(0, 2 * N)) + dir <- "==" + rhs <- 1 + target + meq <- 1 - # Amat for positive weights for w.buy and w.sell - weights.positive <- rbind(matrix(0,ncol=2*N,nrow=N),diag(2*N)) - temp.index <- (N*3-N+1):(N*3) - weights.positive[temp.index,] <- -1*weights.positive[temp.index,] - Amat <- rbind(Amat, t(weights.positive)) - rhs <- c(rhs, rep(0, 2*N)) + # separate the weights into w, w^+, and w^- + # w - w^+ + w^- = 0 + Amat <- rbind(Amat, cbind(diag(N), -diag(N), diag(N))) + rhs <- c(rhs, init_weights) + dir <- c(dir, rep("==", N)) + meq <- N + 1 - # Amat for full investment constraint - ptc <- constraints$ptc - Amat <- rbind(Amat, rbind(c(rep(1, N), (1+ptc), (1-ptc)), -c(rep(1, N), (1+ptc), (1-ptc)))) - rhs <- c(rhs, constraints$min_sum, -constraints$max_sum) - dir <- c(dir, ">=", ">=") + # w+ >= 0 + Amat <- rbind(Amat, cbind(diag(0, N), diag(N), diag(0, N))) + rhs <- c(rhs, rep(0, N)) + dir <- c(dir, rep(">=", N)) + # w- >= 0 + Amat <- rbind(Amat, cbind(diag(0, N), diag(0, N), diag(N))) + rhs <- c(rhs, rep(0, N)) + dir <- c(dir, rep(">=", N)) + + # 1^T w + tcb^T w^+ + tcs^T w^- >= min_sum + Amat <- rbind(Amat, c(rep(1, N), ptc, ptc)) + rhs <- c(rhs, constraints$min_sum) + dir <- c(dir, ">=") + + # 1^T w + tcb^T w^+ + tcs^T w^- <= max_sum + Amat <- rbind(Amat, c(rep(-1, N), -ptc, -ptc)) + rhs <- c(rhs, -constraints$max_sum) + dir <- c(dir, ">=") + + # -(1 + tcb)^T w^+ + (1 - tcs)^T w^- >= 0 + Amat <- rbind(Amat, c(rep(0, N), -(1 + ptc), (1 - ptc))) + rhs <- c(rhs, 0) + dir <- c(dir, ">=") + # Amat for lower box constraints Amat <- rbind(Amat, cbind(diag(N), diag(N), diag(N))) rhs <- c(rhs, constraints$min) @@ -917,16 +937,14 @@ dir <- c(dir, rep(">=", 2 * nrow(t.B))) rhs <- c(rhs, constraints$lower, -constraints$upper) } + d <- c(-tmp_means, rep(0, 2 * N)) - d <- rep(-moments$mean, 3) - - # Remove the rows of Amat and elements of rhs.vec where rhs is Inf or -Inf + # Remove the rows of Amat and elements of rhs where rhs is Inf or -Inf Amat <- Amat[!is.infinite(rhs), ] rhs <- rhs[!is.infinite(rhs)] dir <- dir[!is.infinite(rhs)] - ROI_objective <- Q_objective(Q=make.positive.definite(2*lambda*V), - L=rep(-moments$mean, 3)) + ROI_objective <- Q_objective(Q=make.positive.definite(2*lambda*V), L=d) opt.prob <- OP(objective=ROI_objective, constraints=L_constraint(L=Amat, dir=dir, rhs=rhs)) @@ -934,12 +952,7 @@ if(inherits(roi.result, "try-error")) stop(paste("No solution found:", roi.result)) wts <- roi.result$solution - w.buy <- roi.result$solution[(N+1):(2*N)] - w.sell <- roi.result$solution[(2*N+1):(3*N)] - w.total <- init_weights + w.buy + w.sell - wts.final <- wts[(1:N)] + wts[(1+N):(2*N)] + wts[(2*N+1):(3*N)] - - weights <- wts.final + weights <- wts[1:N] names(weights) <- colnames(R) out <- list() out$weights <- weights Added: pkg/PortfolioAnalytics/sandbox/ptcQP.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/ptcQP.R (rev 0) +++ pkg/PortfolioAnalytics/sandbox/ptcQP.R 2014-08-05 18:10:52 UTC (rev 3499) @@ -0,0 +1,96 @@ + +# proportional transaction costs minimum variance QP + +library(PortfolioAnalytics) +library(corpcor) +library(quadprog) +library(ROI) +library(ROI.plugin.quadprog) + +data(edhec) +R <- edhec[, 1:4] + +N <- ncol(R) +mu <- colMeans(R) +mu_target <- median(mu) +w_init <- rep(1 / N, N) +tcb <- tcs <- rep(0.01, N) +min_sum <- 0.99 +max_sum <- 1.01 +min_box <- rep(0, N) +max_box <- rep(1, N) +lambda <- 1 + +R0 <- matrix(0, ncol=ncol(R), nrow=nrow(R)) +returns <- cbind(R, R0, R0) +V <- corpcor::make.positive.definite(cov(returns)) + +Amat <- matrix(c(1 + mu, rep(0, 2 * N)), nrow=1) +rhs <- 1 + mu_target +dir <- "==" + +# separate the weights into w, w+, and w- +# w - w+ + w- = 0 +Amat <- rbind(Amat, cbind(diag(N), -diag(N), diag(N))) +rhs <- c(rhs, w_init) +dir <- c(dir, rep("==", N)) +meq <- N + 1 + +# w+ >= 0 +Amat <- rbind(Amat, cbind(diag(0, N), diag(N), diag(0, N))) +rhs <- c(rhs, rep(0, N)) +dir <- c(dir, rep(">=", N)) + +# w- >= 0 +Amat <- rbind(Amat, cbind(diag(0, N), diag(0, N), diag(N))) +rhs <- c(rhs, rep(0, N)) +dir <- c(dir, rep(">=", N)) + +# 1^T w + tcb^T w^+ + tcs^T w^- >= min_sum +Amat <- rbind(Amat, c(rep(1, N), tcb, tcs)) +rhs <- c(rhs, min_sum) +dir <- c(dir, ">=") + +# 1^T w + tcb^T w^+ + tcs^T w^- >= min_sum +Amat <- rbind(Amat, c(rep(-1, N), -tcb, -tcs)) +rhs <- c(rhs, -max_sum) +dir <- c(dir, ">=") + +# -(1 + tcb)^T w^+ + (1 - tcs)^T w^- >= 0 +Amat <- rbind(Amat, c(rep(0, N), -(1 + tcb), (1 - tcs))) +rhs <- c(rhs, 0) +dir <- c(dir, ">=") + +# lower box constraints +Amat <- rbind(Amat, cbind(diag(N), diag(0, N), diag(0, N))) +rhs <- c(rhs, min_box) +dir <- c(dir, rep(">=", N)) + +# upper box constraints +Amat <- rbind(Amat, cbind(-diag(N), diag(0, N), diag(0, N))) +rhs <- c(rhs, -max_box) +dir <- c(dir, rep(">=", N)) + +sol <- solve.QP(Dmat=V, dvec=rep(0, 3*N), Amat=t(Amat), bvec=rhs, meq=meq) +sol + +weights <- sol$solution[1:N] +round(weights, 4) +sum(weights * mu) + +##### ROI ##### +ROI_objective <- Q_objective(Q=make.positive.definite(2*lambda*V), + L=rep(0, N*3)) + +opt.prob <- OP(objective=ROI_objective, + constraints=L_constraint(L=Amat, dir=dir, rhs=rhs)) + +roi.result <- ROI_solve(x=opt.prob, solver="quadprog") +wts <- roi.result$solution[1:N] +round(wts, 4) +sum(wts) + +# The quadprog and ROI solution should result in the same solution using the +# same Amat, dir, and rhs objects +all.equal(weights, wts) + From noreply at r-forge.r-project.org Tue Aug 5 20:16:38 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 5 Aug 2014 20:16:38 +0200 (CEST) Subject: [Returnanalytics-commits] r3500 - pkg/PortfolioAnalytics/R Message-ID: <20140805181638.E0C8B187068@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-05 20:16:38 +0200 (Tue, 05 Aug 2014) New Revision: 3500 Modified: pkg/PortfolioAnalytics/R/generics.R Log: revised summary method for portfolio objects Modified: pkg/PortfolioAnalytics/R/generics.R =================================================================== --- pkg/PortfolioAnalytics/R/generics.R 2014-08-05 18:10:52 UTC (rev 3499) +++ pkg/PortfolioAnalytics/R/generics.R 2014-08-05 18:16:38 UTC (rev 3500) @@ -296,47 +296,43 @@ #' @method summary portfolio #' @export summary.portfolio <- function(object, ...){ - if(!is.portfolio(object)) stop("object passed in is not of class 'portfolio'") + if(!is.portfolio(x)) stop("object passed in is not of class 'portfolio'") - cat(rep("*", 50) ,"\n", sep="") - cat("PortfolioAnalytics Portfolio Specification Summary", "\n") - cat(rep("*", 50) ,"\n", sep="") + out <- list() - cat("Assets and Initial Weights:\n") - print(object$assets) - cat("\n") + out$category_labels <- object$category_labels + out$weight_seq <- object$weight_seq + out$assets <- object$assets - if(!is.null(object$category_labels)) { - cat("Category Labels:\n") - print(object$category_labels) - } - - if(!is.null(object$weight_seq)) { - cat("weight_seq:\n") - print(summary(object$weight_seq)) - } - - cat("Constraints:\n\n") - for(constraint in object$constraints){ - if(constraint$enabled) { - cat(rep("*", 40), "\n", sep="") - cat(constraint$type, "constraint\n") - cat(rep("*", 40), "\n", sep="") - print(constraint) - cat("\n\n") + # constraints + out$enabled_constraints <- list() + out$disabled_constraints <- list() + constraints <- object$constraints + for(i in 1:length(constraints)){ + if(constraints[[i]]$enabled){ + tmp <- length(out$enabled_constraints) + out$enabled_constraints[[tmp+1]] <- constraints[[i]] + } else { + tmp <- length(out$disabled_constraints) + out$disabled_constraints[[tmp+1]] <- constraints[[i]] } } - cat("Objectives:\n\n") - for(objective in object$objectives){ - if(objective$enabled) { - cat(rep("*", 40), "\n", sep="") - cat(class(objective)[1], "\n") - cat(rep("*", 40), "\n", sep="") - print(objective) - cat("\n\n") + # objectives + out$enabled_objectives <- list() + out$disabled_objectives <- list() + objectives <- object$objectives + for(i in 1:length(objectives)){ + if(objectives[[i]]$enabled){ + tmp <- length(out$enabled_objectives) + out$enabled_objectives[[tmp+1]] <- objectives[[i]] + } else { + tmp <- length(out$disabled_objectives) + out$disabled_objectives[[tmp+1]] <- objectives[[i]] } } + class(out) <- "summary.portfolio" + return(out) } #' print method for constraint objects From noreply at r-forge.r-project.org Wed Aug 6 22:34:32 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 6 Aug 2014 22:34:32 +0200 (CEST) Subject: [Returnanalytics-commits] r3501 - pkg/PortfolioAnalytics/vignettes Message-ID: <20140806203433.0A9B518763D@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-06 22:34:32 +0200 (Wed, 06 Aug 2014) New Revision: 3501 Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf Log: minor revisions to custom moments and objective function vignette Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw =================================================================== --- pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-05 18:16:38 UTC (rev 3500) +++ pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-06 20:34:32 UTC (rev 3501) @@ -37,6 +37,7 @@ % \VignetteIndexEntry{Custom Moment and Objective Functions} \begin{document} +\SweaveOpts{concordance=TRUE} \title{Custom Moment and Objective Functions} \author{Ross Bennett} @@ -72,14 +73,8 @@ @ \section{Setting the Portfolio Moments} -The PortfolioAnalytics framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are calculated only once and then used in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt (NEED REFERENCE HERE), the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci. +The PortfolioAnalytics framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are calculated only once and then used in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. For example, if the third and fourth moments do not need to be calculated for a given objective, then \code{set.portfolio.moments} will try to detect this and not compute those moments. Currently, \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt, the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci (NEED REFERENCE HERE). -The moments of the returns are computed based on the objective(s) in the \code{portfolio} object and return a list where each element is the respective moment estimate. -<<>>= -args(set.portfolio.moments) -@ - - <>= # Construct initial portfolio with basic constraints. init.portf <- portfolio.spec(assets=funds) @@ -117,10 +112,10 @@ Here we define a function to estimate the covariance matrix using a robust method. <<>>= -sigma.robust <- function(R, ...){ +sigma.robust <- function(R){ out <- list() set.seed(1234) - out$sigma <- MASS::cov.rob(R, method="mcd", ...)$cov + out$sigma <- MASS::cov.rob(R, method="mcd")$cov return(out) } @ @@ -184,8 +179,9 @@ Now we can run the optimization to estimate a solution to our optimization problem. <<>>= -opt.pasd <- optimize.portfolio(R, pa.portf, - optimize_method="ROI", +opt.pasd <- optimize.portfolio(R, pasd.portf, + optimize_method="DEoptim", + search_size=5000, trace=TRUE, traceDE=0, momentFUN="sigma.robust") opt.pasd @ @@ -225,7 +221,7 @@ } @ -We now set up the portfolio and run the optimization using our custom moments and objective function to maximize CRRA. Note that \code{type="return"} is used to maximize an objective function. +Finally, we set up the portfolio and run the optimization using our custom moment function and objective function to maximize CRRA. Note that \code{type="return"} is used to maximize an objective function. <>= # Construct initial portfolio with basic constraints. crra.portf <- portfolio.spec(assets=funds) Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf =================================================================== (Binary files differ) From noreply at r-forge.r-project.org Fri Aug 8 04:38:35 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 8 Aug 2014 04:38:35 +0200 (CEST) Subject: [Returnanalytics-commits] r3502 - in pkg/FactorAnalytics: R man vignettes Message-ID: <20140808023835.B66B0187706@r-forge.r-project.org> Author: pragnya Date: 2014-08-08 04:38:34 +0200 (Fri, 08 Aug 2014) New Revision: 3502 Added: pkg/FactorAnalytics/vignettes/FA.bib pkg/FactorAnalytics/vignettes/fitTsfm.pdf pkg/FactorAnalytics/vignettes/fundamentalFM.pdf Modified: pkg/FactorAnalytics/R/fitTsfm.R pkg/FactorAnalytics/R/fmCov.R pkg/FactorAnalytics/R/fmEsDecomp.R pkg/FactorAnalytics/R/fmSdDecomp.R pkg/FactorAnalytics/R/fmVaRDecomp.R pkg/FactorAnalytics/R/plot.tsfm.r pkg/FactorAnalytics/R/predict.tsfm.r pkg/FactorAnalytics/R/print.tsfm.r pkg/FactorAnalytics/R/summary.tsfm.r pkg/FactorAnalytics/man/fitTsfm.Rd pkg/FactorAnalytics/man/fmCov.Rd pkg/FactorAnalytics/man/fmEsDecomp.Rd pkg/FactorAnalytics/man/fmSdDecomp.Rd pkg/FactorAnalytics/man/fmVaRDecomp.Rd pkg/FactorAnalytics/man/plot.tsfm.Rd Log: Added vignette for fitTsfm, fixed some typos Modified: pkg/FactorAnalytics/R/fitTsfm.R =================================================================== --- pkg/FactorAnalytics/R/fitTsfm.R 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/fitTsfm.R 2014-08-08 02:38:34 UTC (rev 3502) @@ -26,20 +26,20 @@ #' improves. And, "subsets" enables subsets selection using #' \code{\link[leaps]{regsubsets}}; chooses the best performing subset of any #' given size. See \code{\link{fitTsfm.control}} for more details on the -#' control arguments. \code{varaible.selection="lars"} corresponds to least +#' control arguments. \code{variable.selection="lars"} corresponds to least #' angle regression using \code{\link[lars]{lars}} with variants "lasso", #' "lar", "forward.stagewise" or "stepwise". Note: If #' \code{variable.selection="lars"}, \code{fit.method} will be ignored. #' -#' \code{mkt.timing} allows for market-timing factors to be added to any of the -#' above methods. Market timing accounts for the price movement of the general -#' stock market relative to fixed income securities. "HM" follows -#' Henriksson & Merton (1981) and \code{up-market=max(0,Rm-Rf)}, is added to -#' the regression. The coefficient of this up-market factor can be -#' interpreted as the number of free put options. Similarly, "TM" follows -#' Treynor-Mazuy (1966), to account for market timing with respect to -#' volatility, and \code{market.sqd=(Rm-Rf)^2} is added as a factor in the -#' regression. Option "both" adds both of these factors. +#' Arguments \code{mkt.name} and \code{mkt.timing} allow for market-timing +#' factors to be added to any of the above methods. Market timing accounts for +#' the price movement of the general stock market relative to fixed income +#' securities. "HM" follows Henriksson & Merton (1981) and +#' \code{up.market=max(0,Rm-Rf)}, is added to the regression. The coefficient +#' of this up-market factor can be interpreted as the number of free put +#' options. Similarly, "TM" follows Treynor-Mazuy (1966), to account for market +#' timing with respect to volatility, and \code{market.sqd=(Rm-Rf)^2} is added +#' as a factor in the regression. Option "both" adds both of these factors. #' #' \subsection{Data Processing}{ #' @@ -56,9 +56,8 @@ #' @param asset.names vector containing names of assets, whose returns or #' excess returns are the dependent variable. #' @param factor.names vector containing names of the macroeconomic factors. -#' @param mkt.name name of the column for market excess returns (Rm-Rf). -#' Is required if \code{mkt.timing} or \code{add.market.sqd} -#' are \code{TRUE}. Default is NULL. +#' @param mkt.name name of the column for market excess returns (Rm-Rf); this +#' is necessary to add market timing factors. Default is NULL. #' @param rf.name name of the column of risk free rate variable to calculate #' excess returns for all assets (in \code{asset.names}) and factors (in #' \code{factor.names}). Default is NULL, and no action is taken. @@ -69,8 +68,8 @@ #' See details. Default is "OLS". #' @param variable.selection the variable selection method, one of "none", #' "stepwise","subsets","lars". See details. Default is "none". -#' @param mkt.timing one of "HM", "TM" or "both". Default is NULL. See Details. -#' \code{mkt.name} is required if any of these options are specified. +#' @param mkt.timing one of "HM", "TM" or "both". Default is NULL. See Details. +#' \code{mkt.name} is required if any of these options are to be implemented. #' @param control list of control parameters. The default is constructed by #' the function \code{\link{fitTsfm.control}}. See the documentation for #' \code{\link{fitTsfm.control}} for details. @@ -165,8 +164,8 @@ #' @export fitTsfm <- function(asset.names, factor.names, mkt.name=NULL, rf.name=NULL, - data=data, fit.method=c("OLS","DLS","Robust"), - variable.selection=c("none","stepwise","subsets","lars"), + data=data, fit.method=c("OLS","DLS","Robust"), + variable.selection=c("none","stepwise","subsets","lars"), mkt.timing=NULL, control=fitTsfm.control(...), ...) { # record the call as an element to be returned @@ -230,7 +229,7 @@ } # opt add mkt-timing factors: up.market=max(0,Rm-Rf), market.sqd=(Rm-Rf)^2 - if (!is.null(mkt.timing)) { + if (!is.null(mkt.name)) { if(mkt.timing=="HM" || mkt.timing=="both") { up.market <- data.xts[,mkt.name] up.market [up.market < 0] <- 0 @@ -277,9 +276,10 @@ # from returned factor model fits above coef.mat <- makePaddedDataFrame(lapply(reg.list, coef)) alpha <- coef.mat[, 1, drop=FALSE] - # to make class of alpha numeric instead of matrix - # aplha <- coef.mat[,1] + # to get alpha of class numeric, do: aplha <- coef.mat[,1] beta <- coef.mat[, -1, drop=FALSE] + # reorder the columns to match factor names vector + beta <- subset(beta, select=factor.names) r2 <- sapply(reg.list, function(x) summary(x)$r.squared) resid.sd <- sapply(reg.list, function(x) summary(x)$sigma) # create list of return values. @@ -493,14 +493,10 @@ #' @export coef.tsfm <- function(object, ...) { - if (object$variable.selection=="lars") { - # generic method 'coef' does not exist for "lars" fit objects - # so, use cbind to form coef matrix - coef.mat <- cbind(object$alpha, object$beta) - colnames(coef.mat)[1] <- "(Intercept)" - } else { - coef.mat <- t(sapply(object$asset.fit, coef, ...)) - } + # cbind alpha and beta; works for all fit and var selection methods + coef.mat <- cbind(object$alpha, object$beta) + # name for alpha/intercept column + colnames(coef.mat)[1] <- "(Intercept)" return(coef.mat) } Modified: pkg/FactorAnalytics/R/fmCov.R =================================================================== --- pkg/FactorAnalytics/R/fmCov.R 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/fmCov.R 2014-08-08 02:38:34 UTC (rev 3502) @@ -6,16 +6,16 @@ #' #' @details \code{R(i, t)}, the return on asset \code{i} at time \code{t}, #' is assumed to follow a factor model of the form, \cr \cr -#' \code{R(i,t) = alpha(i) + beta*F(t) + e(i,t)}, \cr \cr -#' where, \code{alpha(i)} is the intercept, \code{F(t)} is a {K x 1} vector of -#' the \code{K} factor values at time \code{t}, \code{beta} is a \code{1 x K} -#' vector of factor exposures and the error terms \code{e(i,t)} are serially +#' \code{R(i,t) = alpha(i) + beta(i)*f(t) + e(i,t)}, \cr \cr +#' where, \code{alpha(i)} is the intercept, \code{f(t)} is a {K x 1} vector of +#' factor returns at time \code{t}, \code{beta(i)} is a \code{1 x K} vector of +#' factor exposures and the error terms \code{e(i,t)} are serially #' uncorrelated across time and contemporaneously uncorrelated across assets #' so that \code{e(i,t) ~ iid(0,sig(i)^2)}. Thus, the variance of asset #' \code{i}'s return is given by \cr \cr -#' \code{var(R(i,t)) = beta*var(F(t))*tr(beta) + sig(i)^2}. \cr \cr -#' And, the \code{N x N} covariance matrix of N asset returns is \cr \cr -#' \code{var(R) = B*var(F(t))*tr(B) + D}, \cr \cr +#' \code{var(R(i)) = beta(i)*cov(F)*tr(beta(i)) + sig(i)^2}. \cr \cr +#' And, the \code{N x N} covariance matrix of asset returns is \cr \cr +#' \code{var(R) = B*cov(F)*tr(B) + D}, \cr \cr #' where, B is the \code{N x K} matrix of factor betas and \code{D} is a #' diagonal matrix with \code{sig(i)^2} along the diagonal. #' @@ -98,6 +98,7 @@ # get parameters and factors from factor model beta <- as.matrix(object$beta) + # convert NAs to 0 to enable matrix multiplication beta[is.na(beta)] <- 0 sig2.e = object$resid.sd^2 factor <- as.matrix(object$data[, object$factor.names]) Modified: pkg/FactorAnalytics/R/fmEsDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmEsDecomp.R 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/fmEsDecomp.R 2014-08-08 02:38:34 UTC (rev 3502) @@ -8,8 +8,8 @@ #' simulated data. #' #' @details The factor model for an asset's return at time \code{t} has the -#' form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr -#' where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. By +#' form \cr \cr \code{R(t) = beta'f(t) + e(t) = beta.star'f.star(t)} \cr \cr +#' where, \code{beta.star=(beta,sig.e)} and \code{f.star(t)=[f(t)',z(t)]'}. By #' Euler's theorem, the ES of the asset's return is given by: #' \cr \cr \code{ES.fm = sum(cES_k) = sum(beta.star_k*mES_k)} \cr \cr #' where, summation is across the \code{K} factors and the residual, @@ -105,7 +105,9 @@ } # get beta.star - beta.star <- as.matrix(cbind(object$beta, object$resid.sd)) + beta <- object$beta + beta[is.na(beta)] <- 0 + beta.star <- as.matrix(cbind(beta, object$resid.sd)) colnames(beta.star)[dim(beta.star)[2]] <- "residual" # factor returns and residuals data @@ -155,13 +157,13 @@ mES[i,] <- inv * colMeans(factor.star[idx,], na.rm =TRUE) # correction factor to ensure that sum(cES) = portfolio ES - cf <- as.numeric( ES.fm[i] / sum(mES[i,]*beta.star[i,]) ) + cf <- as.numeric( ES.fm[i] / sum(mES[i,]*beta.star[i,], na.rm=TRUE) ) # compute marginal, component and percentage contributions to ES # each of these have dimensions: N x (K+1) mES[i,] <- cf * mES[i,] cES[i,] <- mES[i,] * beta.star[i,] - pcES[i,] <- cES[i,] / ES.fm[i] + pcES[i,] <- 100* cES[i,] / ES.fm[i] } fm.ES.decomp <- list(VaR.fm=VaR.fm, n.exceed=n.exceed, idx.exceed=idx.exceed, Modified: pkg/FactorAnalytics/R/fmSdDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmSdDecomp.R 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/fmSdDecomp.R 2014-08-08 02:38:34 UTC (rev 3502) @@ -4,8 +4,8 @@ #' assets' returns based on Euler's theorem, given the fitted factor model. #' #' @details The factor model for an asset's return at time \code{t} has the -#' form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr -#' where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. +#' form \cr \cr \code{R(t) = beta'f(t) + e(t) = beta.star'f.star(t)} \cr \cr +#' where, \code{beta.star=(beta,sig.e)} and \code{f.star(t)=[f(t)',z(t)]'}. #' \cr \cr By Euler's theorem, the standard deviation of the asset's return #' is given as: \cr \cr #' \code{Sd.fm = sum(cSd_k) = sum(beta.star_k*mSd_k)} \cr \cr @@ -93,7 +93,9 @@ fmSdDecomp.tsfm <- function(object, use="pairwise.complete.obs", ...) { # get beta.star: N x (K+1) - beta.star <- as.matrix(cbind(object$beta, object$resid.sd)) + beta <- object$beta + beta[is.na(beta)] <- 0 + beta.star <- as.matrix(cbind(beta, object$resid.sd)) colnames(beta.star)[dim(beta.star)[2]] <- "residual" # get cov(F): K x K @@ -108,15 +110,15 @@ rownames(factor.star.cov) <- c(colnames(factor.cov),"residuals") # compute factor model sd; a vector of length N - sd.fm <- sqrt(rowSums(beta.star %*% factor.star.cov * beta.star)) + Sd.fm <- sqrt(rowSums(beta.star %*% factor.star.cov * beta.star)) # compute marginal, component and percentage contributions to sd # each of these have dimensions: N x (K+1) - mSd <- (t(factor.star.cov %*% t(beta.star)))/sd.fm + mSd <- (t(factor.star.cov %*% t(beta.star)))/Sd.fm cSd <- mSd * beta.star - pcSd = cSd/sd.fm + pcSd = 100* cSd/Sd.fm - fm.sd.decomp <- list(sd.fm=sd.fm, mSd=mSd, cSd=cSd, pcSd=pcSd) + fm.sd.decomp <- list(Sd.fm=Sd.fm, mSd=mSd, cSd=cSd, pcSd=pcSd) return(fm.sd.decomp) } Modified: pkg/FactorAnalytics/R/fmVaRDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmVaRDecomp.R 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/fmVaRDecomp.R 2014-08-08 02:38:34 UTC (rev 3502) @@ -8,8 +8,8 @@ #' estimated quantile using the Cornish-Fisher expansion. #' #' @details The factor model for an asset's return at time \code{t} has the -#' form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr -#' where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. By +#' form \cr \cr \code{R(t) = beta'f(t) + e(t) = beta.star'f.star(t)} \cr \cr +#' where, \code{beta.star=(beta,sig.e)} and \code{f.star(t)=[f(t)',z(t)]'}. By #' Euler's theorem, the VaR of the asset's return is given by: #' \cr \cr \code{VaR.fm = sum(cVaR_k) = sum(beta.star_k*mVaR_k)} \cr \cr #' where, summation is across the \code{K} factors and the residual, @@ -100,12 +100,14 @@ } # get beta.star - beta.star <- as.matrix(cbind(object$beta, object$resid.sd)) + beta <- object$beta + beta[is.na(beta)] <- 0 + beta.star <- as.matrix(cbind(beta, object$resid.sd)) colnames(beta.star)[dim(beta.star)[2]] <- "residual" # factor returns and residuals data factors.xts <- object$data[,object$factor.names] - resid.xts <- checkData(t(t(residuals(object))/object$resid.sd)) + resid.xts <- as.xts(t(t(residuals(object))/object$resid.sd)) time(resid.xts) <- as.Date(time(resid.xts)) # initialize lists and matrices @@ -156,13 +158,13 @@ mVaR[i,] <- inv * colMeans(factor.star*k.weight, na.rm =TRUE) # correction factor to ensure that sum(cVaR) = portfolio VaR - cf <- as.numeric( VaR.fm[i] / sum(mVaR[i,]*beta.star[i,]) ) + cf <- as.numeric( VaR.fm[i] / sum(mVaR[i,]*beta.star[i,], na.rm=TRUE) ) # compute marginal, component and percentage contributions to VaR # each of these have dimensions: N x (K+1) mVaR[i,] <- cf * mVaR[i,] cVaR[i,] <- mVaR[i,] * beta.star[i,] - pcVaR[i,] <- cVaR[i,] / VaR.fm[i] + pcVaR[i,] <- 100* cVaR[i,] / VaR.fm[i] } fm.VaR.decomp <- list(VaR.fm=VaR.fm, n.exceed=n.exceed, idx.exceed=idx.exceed, Modified: pkg/FactorAnalytics/R/plot.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/plot.tsfm.r 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/plot.tsfm.r 2014-08-08 02:38:34 UTC (rev 3502) @@ -68,9 +68,6 @@ #' to suppress the legend. #' @param las one of {0, 1, 2, 3} to set the direction of axis labels, same as #' in \code{plot}. Default here is 1. -#' @param horiz a logical value. If \code{FALSE}, the bars are drawn vertically -#' with the first bar to the left. If \code{TRUE}, the bars are drawn -#' horizontally with the first at the bottom. Default here is \code{TRUE}. #' @param VaR.method a method for computing VaR; one of "modified", "gaussian", #' "historical" or "kernel". VaR is computed using #' \code{\link[PerformanceAnalytics]{VaR}}. Default is "historical". @@ -91,7 +88,7 @@ #' \code{\link[PerformanceAnalytics]{chart.ACFplus}}, #' \code{\link[PerformanceAnalytics]{chart.Histogram}}, #' \code{\link[PerformanceAnalytics]{chart.QQPlot}}, -#' \code{\link[graphics]{barplot}} and +#' \code{\link[graphics]{barplot}}, \code{\link[lattice]{barchart}} and #' \code{\link[corrplot]{corrplot}} for plotting methods used. #' #' \code{\link{fmSdDecomp}}, \code{\link{fmEsDecomp}}, @@ -123,8 +120,8 @@ plot.tsfm <- function(x, which.plot.group=NULL, max.show=6, plot.single=FALSE, asset.name, which.plot.single=NULL, colorset=(1:12), - legend.loc="topleft", las=1, horiz=TRUE, - VaR.method="historical", loop=TRUE, ...) { + legend.loc="topleft", las=1, VaR.method="historical", + loop=TRUE, ...) { if (plot.single==TRUE) { @@ -249,7 +246,7 @@ } reg.z <- zoo(fit$model, as.Date(rownames(fit$model))) rollReg.z <- rollapply(reg.z, FUN=rollReg, formula(fit), - width=24, by.column=FALSE, align="right") + width=24, by.column=FALSE, align="right") } else if (x$fit.method=="DLS") { # get decay factor if (as.character(x$call["decay"])=="NULL") { @@ -264,17 +261,17 @@ coef(lm(formula, weights=w, data=as.data.frame(data.z))) } reg.z <- zoo(fit$model[-length(fit$model)], - as.Date(rownames(fit$model))) + as.Date(rownames(fit$model))) rollReg.z <- rollapply(reg.z, FUN=rollReg.w, formula(fit), w, - width=24, by.column=FALSE, align="right") + width=24, by.column=FALSE, align="right") } else if (x$fit.method=="Robust") { rollReg.Rob <- function(data.z, formula) { coef(lmRob(formula=formula, data=as.data.frame(data.z))) } reg.z <- zoo(fit$model, as.Date(rownames(fit$model))) rollReg.z <- rollapply(reg.z, width=24, FUN=rollReg.Rob, - formula(fit), by.column=FALSE, - align="right") + formula(fit), by.column=FALSE, + align="right") } else if (is.null(x$fit.method)) { stop("Rolling estimates is not available for 'lars' fits.") } @@ -313,9 +310,9 @@ switch(which.plot.group, "1L" = { ## Factor model coefficients: Alpha - # xlab="Intercept estimate", ylab="Assets" barplot(coef(x)[,1], main="Factor model Alpha (Intercept)", - col="darkblue", las=las, horiz=horiz, ...) + names.arg=rownames(coef(x)), col="darkblue", las=las, + horiz=TRUE, ...) abline(v=0, lwd=1, lty=1, col=1) }, "2L" = { @@ -329,9 +326,8 @@ par(mfrow=c(ceiling(k/2),2)) for (i in 2:(k+1)) { main=paste(colnames(coef(x))[i], "factor Betas") - # xlab="Beta estimate", ylab="Assets" - barplot(coef(x)[,i], main=main, col="darkblue", las=las, - horiz=horiz, ...) + barplot(coef(x)[,i], main=main, names.arg=rownames(coef(x)), + col="darkblue", las=las, horiz=TRUE, ...) abline(v=0, lwd=1, lty=1, col=1) } par(mfrow=c(1,1)) @@ -357,51 +353,65 @@ }, "4L" ={ ## R-squared - # ylab="Assets", xlab="R-squared" - barplot(x$r2, main="R-squared values for factor model fits", - col="darkblue", las=las, horiz=horiz, ...) - abline(v=0, lwd=1, lty=1, col=1) + plot( + barchart(x$r2, main="R-squared values", xlab="", + col="darkblue", ...) + ) }, "5L" = { ## Residual Volatility - # ylab="Assets", xlab="Residual volatility" - barplot(x$resid.sd, col="darkblue", las=las, horiz=horiz, - main="Residual volatility for factor model fits", ...) - abline(v=0, lwd=1, lty=1, col=1) + plot( + barchart(x$resid.sd, main="Residual volatility", xlab="", + col="darkblue", ...) + ) }, "6L" = { ## Factor Model Residual Correlation cor.resid <- cor(residuals(x), use="pairwise.complete.obs") corrplot::corrplot(cor.resid, ...) + # mtext("pairwise complete obs", line=0.5) }, "7L" = { ## Factor Model Return Correlation cor.fm <- cov2cor(fmCov(x)) corrplot::corrplot(cor.fm, ...) + # mtext("pairwise complete obs", line=0.5) }, "8L" = { - ## Factor Contribution to SD - cSd.fm <- fmSdDecomp(x)$cSd - barplot(t(cSd.fm), main="Factor Contributions to SD", - legend.text=T, args.legend=legend.loc, col=colorset, - horiz=horiz, ...) - mtext("(pairwise complete obs)", line=0.5) + ## Factor Percentage Contribution to SD + pcSd.fm <- fmSdDecomp(x)$pcSd + plot( + barchart(pcSd.fm, main="Factor % Contribution to SD", xlab="", + auto.key=list(space="bottom",columns=3, + points=FALSE,rectangles=TRUE), + par.settings=list(superpose.polygon=list(col=colorset)), + panel=function(...){panel.grid(h=0, v=-1); + panel.barchart(...)}, ...) + ) }, "9L"={ - ## Factor Contribution to ES - cES.fm <- fmEsDecomp(x)$cES - barplot(t(cES.fm), main="Factor Contributions to ES", - legend.text=T, args.legend=legend.loc, col=colorset, - horiz=horiz, ...) - mtext("(pairwise complete obs)", line=0.5) + ## Factor Percentage Contribution to ES + pcES.fm <- fmEsDecomp(x, method=VaR.method)$pcES + plot( + barchart(pcES.fm, main="Factor % Contribution to ES", xlab="", + auto.key=list(space="bottom",columns=3, + points=FALSE,rectangles=TRUE), + par.settings=list(superpose.polygon=list(col=colorset)), + panel=function(...){panel.grid(h=0, v=-1); + panel.barchart(...)}, ...) + ) }, "10L" ={ - ## Factor Contribution to VaR - cVaR.fm <- fmVaRDecomp(x)$cVaR - barplot(t(cVaR.fm), main="Factor Contributions to VaR", - legend.text=T, args.legend=legend.loc, col=colorset, - horiz=horiz, ...) - mtext("(pairwise complete obs)", line=0.5) + ## Factor Percentage Contribution to VaR + pcVaR.fm <- fmVaRDecomp(x, method=VaR.method)$pcVaR + plot( + barchart(pcVaR.fm, main="Factor % Contribution to VaR", + xlab="", auto.key=list(space="bottom",columns=3, + points=FALSE,rectangles=TRUE), + par.settings=list(superpose.polygon=list(col=colorset)), + panel=function(...){panel.grid(h=0, v=-1); + panel.barchart(...)}, ...) + ) }, invisible() ) Modified: pkg/FactorAnalytics/R/predict.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/predict.tsfm.r 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/predict.tsfm.r 2014-08-08 02:38:34 UTC (rev 3502) @@ -43,31 +43,3 @@ lapply(object$asset.fit, predict, newdata, ...) } } - -# -# if ( !(missing(newdata) && !is.null(newdata) )) { -# numAssets <- length(names(fit.macro$asset.fit)) -# -# data <- fit.macro$data -# factors <- data[,fit.macro$factors.names] -# mu.factors <- apply(factors,2,mean) -# cov.factors <- cov(factors) -# -# for (i in 1:numAssets) -# if (dim(newdata)[1] < length(residuals(fit$asset.fit[[1]])) ){ -# -# -# newdata <- data.frame(EDHEC.LS.EQ = rnorm(n=100), SP500.TR = rnorm(n=100) ) -# newdata.mat <- as.matrix(newdata) -# factor.scenarios <- 0.001 -# names(factor.scenarios) <- "SP500.TR" -# -# impliedFactorReturns(factor.scenarios, mu.factors, cov.factors) -# -# } -# -# -# -# } - - Modified: pkg/FactorAnalytics/R/print.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/print.tsfm.r 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/print.tsfm.r 2014-08-08 02:38:34 UTC (rev 3502) @@ -34,11 +34,11 @@ names(tmp) <- c("Factors", "Assets", "Periods") print(tmp) cat("\nRegression Alphas:\n") - print(x$alpha, digits = digits, ...) + print(t(x$alpha), digits=digits, ...) cat("\nFactor Betas:\n") - print(x$beta, digits = digits, ...) + print(x$beta, digits=digits, ...) cat("\nR-squared values:\n") - print(x$r2, digits = digits, ...) + print(x$r2, digits=digits, ...) cat("\nResidual Volatilities:\n") - print(x$resid.sd, digits = digits, ...) + print(x$resid.sd, digits=digits, ...) } Modified: pkg/FactorAnalytics/R/summary.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/summary.tsfm.r 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/R/summary.tsfm.r 2014-08-08 02:38:34 UTC (rev 3502) @@ -73,9 +73,9 @@ # convert to HC/HAC standard errors and t-stats if specified # extract coefficients separately for "lars" variable.selection method for (i in object$asset.names) { - if (se.type == "HC") { + if (se.type=="HC") { sum[[i]]$coefficients <- coeftest(object$asset.fit[[i]], vcovHC)[,1:4] - } else if (se.type == "HAC") { + } else if (se.type=="HAC") { sum[[i]]$coefficients <- coeftest(object$asset.fit[[i]], vcovHAC)[,1:4] } } @@ -92,7 +92,7 @@ } # include the call and se.type to fitTsfm - sum <- c(list(call=object$call, Type=se.type), sum) + sum <- c(list(call=object$call, se.type=se.type), sum) class(sum) <- "summary.tsfm" return(sum) } @@ -112,7 +112,7 @@ n <- length(x) for (i in 3:n) { options(digits = digits) - cat("\nAsset", i-2, ": ", names(x[i]), "\n(",x$Type, + cat("\nAsset", i-2, ": ", names(x[i]), "\n(",x$se.type, " Standard Errors & T-stats)\n\n", sep = "") table.coef <- x[[i]]$coefficients print(table.coef, digits = digits, ...) Modified: pkg/FactorAnalytics/man/fitTsfm.Rd =================================================================== --- pkg/FactorAnalytics/man/fitTsfm.Rd 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/man/fitTsfm.Rd 2014-08-08 02:38:34 UTC (rev 3502) @@ -23,9 +23,8 @@ \item{factor.names}{vector containing names of the macroeconomic factors.} -\item{mkt.name}{name of the column for market excess returns (Rm-Rf). -Is required if \code{mkt.timing} or \code{add.market.sqd} -are \code{TRUE}. Default is NULL.} +\item{mkt.name}{name of the column for market excess returns (Rm-Rf); this +is necessary to add market timing factors. Default is NULL.} \item{rf.name}{name of the column of risk free rate variable to calculate excess returns for all assets (in \code{asset.names}) and factors (in @@ -42,7 +41,7 @@ "stepwise","subsets","lars". See details. Default is "none".} \item{mkt.timing}{one of "HM", "TM" or "both". Default is NULL. See Details. -\code{mkt.name} is required if any of these options are specified.} +\code{mkt.name} is required if any of these options are to be implemented.} \item{control}{list of control parameters. The default is constructed by the function \code{\link{fitTsfm.control}}. See the documentation for @@ -110,20 +109,20 @@ improves. And, "subsets" enables subsets selection using \code{\link[leaps]{regsubsets}}; chooses the best performing subset of any given size. See \code{\link{fitTsfm.control}} for more details on the -control arguments. \code{varaible.selection="lars"} corresponds to least +control arguments. \code{variable.selection="lars"} corresponds to least angle regression using \code{\link[lars]{lars}} with variants "lasso", "lar", "forward.stagewise" or "stepwise". Note: If \code{variable.selection="lars"}, \code{fit.method} will be ignored. -\code{mkt.timing} allows for market-timing factors to be added to any of the -above methods. Market timing accounts for the price movement of the general -stock market relative to fixed income securities. "HM" follows -Henriksson & Merton (1981) and \code{up-market=max(0,Rm-Rf)}, is added to -the regression. The coefficient of this up-market factor can be -interpreted as the number of free put options. Similarly, "TM" follows -Treynor-Mazuy (1966), to account for market timing with respect to -volatility, and \code{market.sqd=(Rm-Rf)^2} is added as a factor in the -regression. Option "both" adds both of these factors. +Arguments \code{mkt.name} and \code{mkt.timing} allow for market-timing +factors to be added to any of the above methods. Market timing accounts for +the price movement of the general stock market relative to fixed income +securities. "HM" follows Henriksson & Merton (1981) and +\code{up.market=max(0,Rm-Rf)}, is added to the regression. The coefficient +of this up-market factor can be interpreted as the number of free put +options. Similarly, "TM" follows Treynor-Mazuy (1966), to account for market +timing with respect to volatility, and \code{market.sqd=(Rm-Rf)^2} is added +as a factor in the regression. Option "both" adds both of these factors. \subsection{Data Processing}{ Modified: pkg/FactorAnalytics/man/fmCov.Rd =================================================================== --- pkg/FactorAnalytics/man/fmCov.Rd 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/man/fmCov.Rd 2014-08-08 02:38:34 UTC (rev 3502) @@ -31,16 +31,16 @@ \details{ \code{R(i, t)}, the return on asset \code{i} at time \code{t}, is assumed to follow a factor model of the form, \cr \cr -\code{R(i,t) = alpha(i) + beta*F(t) + e(i,t)}, \cr \cr -where, \code{alpha(i)} is the intercept, \code{F(t)} is a {K x 1} vector of -the \code{K} factor values at time \code{t}, \code{beta} is a \code{1 x K} -vector of factor exposures and the error terms \code{e(i,t)} are serially +\code{R(i,t) = alpha(i) + beta(i)*f(t) + e(i,t)}, \cr \cr +where, \code{alpha(i)} is the intercept, \code{f(t)} is a {K x 1} vector of +factor returns at time \code{t}, \code{beta(i)} is a \code{1 x K} vector of +factor exposures and the error terms \code{e(i,t)} are serially uncorrelated across time and contemporaneously uncorrelated across assets so that \code{e(i,t) ~ iid(0,sig(i)^2)}. Thus, the variance of asset \code{i}'s return is given by \cr \cr -\code{var(R(i,t)) = beta*var(F(t))*tr(beta) + sig(i)^2}. \cr \cr -And, the \code{N x N} covariance matrix of N asset returns is \cr \cr -\code{var(R) = B*var(F(t))*tr(B) + D}, \cr \cr +\code{var(R(i)) = beta(i)*cov(F)*tr(beta(i)) + sig(i)^2}. \cr \cr +And, the \code{N x N} covariance matrix of asset returns is \cr \cr +\code{var(R) = B*cov(F)*tr(B) + D}, \cr \cr where, B is the \code{N x K} matrix of factor betas and \code{D} is a diagonal matrix with \code{sig(i)^2} along the diagonal. Modified: pkg/FactorAnalytics/man/fmEsDecomp.Rd =================================================================== --- pkg/FactorAnalytics/man/fmEsDecomp.Rd 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/man/fmEsDecomp.Rd 2014-08-08 02:38:34 UTC (rev 3502) @@ -45,8 +45,8 @@ } \details{ The factor model for an asset's return at time \code{t} has the -form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr -where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. By +form \cr \cr \code{R(t) = beta'f(t) + e(t) = beta.star'f.star(t)} \cr \cr +where, \code{beta.star=(beta,sig.e)} and \code{f.star(t)=[f(t)',z(t)]'}. By Euler's theorem, the ES of the asset's return is given by: \cr \cr \code{ES.fm = sum(cES_k) = sum(beta.star_k*mES_k)} \cr \cr where, summation is across the \code{K} factors and the residual, Modified: pkg/FactorAnalytics/man/fmSdDecomp.Rd =================================================================== --- pkg/FactorAnalytics/man/fmSdDecomp.Rd 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/man/fmSdDecomp.Rd 2014-08-08 02:38:34 UTC (rev 3502) @@ -33,8 +33,8 @@ } \details{ The factor model for an asset's return at time \code{t} has the -form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr -where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. +form \cr \cr \code{R(t) = beta'f(t) + e(t) = beta.star'f.star(t)} \cr \cr +where, \code{beta.star=(beta,sig.e)} and \code{f.star(t)=[f(t)',z(t)]'}. \cr \cr By Euler's theorem, the standard deviation of the asset's return is given as: \cr \cr \code{Sd.fm = sum(cSd_k) = sum(beta.star_k*mSd_k)} \cr \cr Modified: pkg/FactorAnalytics/man/fmVaRDecomp.Rd =================================================================== --- pkg/FactorAnalytics/man/fmVaRDecomp.Rd 2014-08-06 20:34:32 UTC (rev 3501) +++ pkg/FactorAnalytics/man/fmVaRDecomp.Rd 2014-08-08 02:38:34 UTC (rev 3502) @@ -44,8 +44,8 @@ } \details{ The factor model for an asset's return at time \code{t} has the -form \cr \cr \code{R(t) = beta'F(t) + e(t) = beta.star'F.star(t)} \cr \cr -where, \code{beta.star=(beta,sig.e)} and \code{F.star(t)=[F(t)',z(t)]'}. By [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 3502 From noreply at r-forge.r-project.org Sat Aug 9 23:50:13 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 9 Aug 2014 23:50:13 +0200 (CEST) Subject: [Returnanalytics-commits] r3503 - in pkg/PortfolioAnalytics: R man vignettes Message-ID: <20140809215013.20479187321@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-09 23:50:12 +0200 (Sat, 09 Aug 2014) New Revision: 3503 Modified: pkg/PortfolioAnalytics/R/constraint_fn_map.R pkg/PortfolioAnalytics/R/generics.R pkg/PortfolioAnalytics/R/moment.functions.R pkg/PortfolioAnalytics/man/rp_transform.Rd pkg/PortfolioAnalytics/man/set.portfolio.moments.Rd pkg/PortfolioAnalytics/vignettes/portfolio_vignette.Rnw Log: cleaning up some vignettes and man files Modified: pkg/PortfolioAnalytics/R/constraint_fn_map.R =================================================================== --- pkg/PortfolioAnalytics/R/constraint_fn_map.R 2014-08-08 02:38:34 UTC (rev 3502) +++ pkg/PortfolioAnalytics/R/constraint_fn_map.R 2014-08-09 21:50:12 UTC (rev 3503) @@ -408,6 +408,7 @@ #' @param max_pos_short maximum number of assets with short (i.e. sell) positions #' @param leverage maximum leverage exposure where leverage is defined as \code{sum(abs(weights))} #' @param max_permutations integer: maximum number of iterations to try for a valid portfolio, default 200 +#' @param weight_seq vector of seed sequence of weights #' @return named weighting vector #' @author Peter Carl, Brian G. Peterson, Ross Bennett (based on an idea by Pat Burns) #' @export Modified: pkg/PortfolioAnalytics/R/generics.R =================================================================== --- pkg/PortfolioAnalytics/R/generics.R 2014-08-08 02:38:34 UTC (rev 3502) +++ pkg/PortfolioAnalytics/R/generics.R 2014-08-09 21:50:12 UTC (rev 3503) @@ -296,7 +296,7 @@ #' @method summary portfolio #' @export summary.portfolio <- function(object, ...){ - if(!is.portfolio(x)) stop("object passed in is not of class 'portfolio'") + if(!is.portfolio(object)) stop("object passed in is not of class 'portfolio'") out <- list() @@ -308,13 +308,15 @@ out$enabled_constraints <- list() out$disabled_constraints <- list() constraints <- object$constraints - for(i in 1:length(constraints)){ - if(constraints[[i]]$enabled){ - tmp <- length(out$enabled_constraints) - out$enabled_constraints[[tmp+1]] <- constraints[[i]] - } else { - tmp <- length(out$disabled_constraints) - out$disabled_constraints[[tmp+1]] <- constraints[[i]] + if(length(constraints) >= 1){ + for(i in 1:length(constraints)){ + if(constraints[[i]]$enabled){ + tmp <- length(out$enabled_constraints) + out$enabled_constraints[[tmp+1]] <- constraints[[i]] + } else { + tmp <- length(out$disabled_constraints) + out$disabled_constraints[[tmp+1]] <- constraints[[i]] + } } } @@ -322,13 +324,15 @@ out$enabled_objectives <- list() out$disabled_objectives <- list() objectives <- object$objectives - for(i in 1:length(objectives)){ - if(objectives[[i]]$enabled){ - tmp <- length(out$enabled_objectives) - out$enabled_objectives[[tmp+1]] <- objectives[[i]] - } else { - tmp <- length(out$disabled_objectives) - out$disabled_objectives[[tmp+1]] <- objectives[[i]] + if(length(objectives) >= 1){ + for(i in 1:length(objectives)){ + if(objectives[[i]]$enabled){ + tmp <- length(out$enabled_objectives) + out$enabled_objectives[[tmp+1]] <- objectives[[i]] + } else { + tmp <- length(out$disabled_objectives) + out$disabled_objectives[[tmp+1]] <- objectives[[i]] + } } } class(out) <- "summary.portfolio" Modified: pkg/PortfolioAnalytics/R/moment.functions.R =================================================================== --- pkg/PortfolioAnalytics/R/moment.functions.R 2014-08-08 02:38:34 UTC (rev 3502) +++ pkg/PortfolioAnalytics/R/moment.functions.R 2014-08-09 21:50:12 UTC (rev 3503) @@ -149,7 +149,7 @@ #' \item{sample: }{sample estimates are used for the moments} #' \item{boudt: }{estimate the second, third, and fourth moments using a #' statistical factor model based on the work of Kris Boudt.} -#' See \code{\link{fit.statistical.factor.model}} +#' See \code{\link{statistical.factor.model}} #' \item{black_litterman: }{estimate the first and second moments using the #' Black Litterman Formula. See \code{\link{black.litterman}}}. #' } Modified: pkg/PortfolioAnalytics/man/rp_transform.Rd =================================================================== --- pkg/PortfolioAnalytics/man/rp_transform.Rd 2014-08-08 02:38:34 UTC (rev 3502) +++ pkg/PortfolioAnalytics/man/rp_transform.Rd 2014-08-09 21:50:12 UTC (rev 3503) @@ -36,6 +36,8 @@ \item{leverage}{maximum leverage exposure where leverage is defined as \code{sum(abs(weights))}} \item{max_permutations}{integer: maximum number of iterations to try for a valid portfolio, default 200} + +\item{weight_seq}{vector of seed sequence of weights} } \value{ named weighting vector Modified: pkg/PortfolioAnalytics/man/set.portfolio.moments.Rd =================================================================== --- pkg/PortfolioAnalytics/man/set.portfolio.moments.Rd 2014-08-08 02:38:34 UTC (rev 3502) +++ pkg/PortfolioAnalytics/man/set.portfolio.moments.Rd 2014-08-09 21:50:12 UTC (rev 3503) @@ -27,7 +27,7 @@ \item{sample: }{sample estimates are used for the moments} \item{boudt: }{estimate the second, third, and fourth moments using a statistical factor model based on the work of Kris Boudt.} - See \code{\link{fit.statistical.factor.model}} + See \code{\link{statistical.factor.model}} \item{black_litterman: }{estimate the first and second moments using the Black Litterman Formula. See \code{\link{black.litterman}}}. } Modified: pkg/PortfolioAnalytics/vignettes/portfolio_vignette.Rnw =================================================================== --- pkg/PortfolioAnalytics/vignettes/portfolio_vignette.Rnw 2014-08-08 02:38:34 UTC (rev 3502) +++ pkg/PortfolioAnalytics/vignettes/portfolio_vignette.Rnw 2014-08-09 21:50:12 UTC (rev 3503) @@ -87,9 +87,9 @@ \section{Adding Constraints to the Portfolio Object} Adding constraints to the portfolio object is done with \code{add.constraint}. The \code{add.constraint} function is the main interface for adding and/or updating constraints to the portfolio object. This function allows the user to specify the portfolio to add the constraints to, the type of constraints, arguments for the constraint, and whether or not to enable the constraint (\code{enabled=TRUE} is the default). If updating an existing constraint, the indexnum argument can be specified. -\subsection{Leverage Constraint} +\subsection{Sum of Weights Constraint} -The \code{leverage} constraint specifies the constraint on the sum of the weights. Aliases for the \code{leverage} constraint type include \code{weight\_sum}, \code{weight}, and \code{leverage}. Here we add a constraint that the weights must sum to 1, or the full investment constraint. +The \code{weight_sum} constraint specifies the constraint on the sum of the weights. Aliases for the \code{weight\_sum} constraint type include \code{weight} and \code{leverage}. Here we add a constraint that the weights must sum to 1, or the full investment constraint. <>= # Add the full investment constraint that specifies the weights must sum to 1. pspec <- add.constraint(portfolio=pspec, @@ -198,7 +198,7 @@ print(pspec) @ -The \code{summary} function gives a more detailed view of the constraints. +The summary method gives a more detailed view of the constraints. <<>>= summary(pspec) @ From noreply at r-forge.r-project.org Sun Aug 10 17:30:39 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 10 Aug 2014 17:30:39 +0200 (CEST) Subject: [Returnanalytics-commits] r3504 - pkg/PortfolioAnalytics/man Message-ID: <20140810153039.3517E187539@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-10 17:30:38 +0200 (Sun, 10 Aug 2014) New Revision: 3504 Modified: pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd Log: adding content to PortfolioAnalytics-package.Rd file Modified: pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd =================================================================== --- pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd 2014-08-09 21:50:12 UTC (rev 3503) +++ pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd 2014-08-10 15:30:38 UTC (rev 3504) @@ -47,7 +47,7 @@ \kbd{PortfolioAnalytics} supports three methods of generating random portfolios. \itemize{ - \item The sample method to generate random portfolios is based on an idea by Pat Burns. This is the most flexible method, but also the slowest, and can generate portfolios to satisfy leverage, box, group, and position limit constraints. + \item The sample method to generate random portfolios is based on an idea by Pat Burns. This is the most flexible method, but also the slowest, and can generate portfolios to satisfy leverage, box, group, position limit, and leverage constraints. \item The simplex method to generate random portfolios is based on a paper by W. T. Shaw. The simplex method is useful to generate random portfolios with the full investment constraint (where the sum of the weights is equal to 1) and min box constraints. Values for min_sum and max_sum of the leverage constraint will be ignored, the sum of weights will equal 1. All other constraints such as the box constraint max, group and position limit constraints will be handled by elimination. If the constraints are very restrictive, this may result in very few feasible portfolios remaining. Another key point to note is that the solution may not be along the vertexes depending on the objective. For example, a risk budget objective will likely place the portfolio somewhere on the interior. \item The grid method to generate random portfolios is based on the \code{gridSearch} function in package \kbd{NMOF}. The grid search method only satisfies the min and max box constraints. The min_sum and max_sum leverage constraint will likely be violated and the weights in the random portfolios should be normalized. Normalization may cause the box constraints to be violated and will be penalized in \code{constrained_objective}. } @@ -67,6 +67,18 @@ With the constraints and objectives specified in the portfolio object, the portfolio object can be passed to \code{\link{optimize.portfolio}} or \code{\link{optimize.portfolio.rebalancing}} to run the optimization. Arguments to \code{\link{optimize.portfolio}} include asset returns, the portfolio obect specifying constraints and objectives, optimization method, and other parameters specific to the solver. \code{\link{optimize.portfolio.rebalancing}} adds support for backtesting portfolio optimization through time with rebalancing or rolling periods. } +\section{Advanced Optimization}{ +In addition to the more standard optimizations described above, \kdb{PortfolioAnalytics} also supports multi-layer optimization and regime switching optimization. + +Support for multi-layer optimization allows one to construct a top level portfolio and several sub-portfolios with potentially different assets, constraints, and objectives. First, each sub-portfolio is optimized out-of-sample which creates a time series of returns. One can think of the out of sample returns for each sub-portfolio as the returns for a synthetic instrument. Finally, the out-of-sample returns of each sub-portfolio are then used as inputs for the top level optimization. The top level portfolio and sub-portfolios are created as normal using \code{portfolio.spec}, \code{add.constraint}, and \code{add.objective}. The multi-layer portfolio specification object is first initialized by passing the top level portfolio to \code{mult.portfolio.spec}. Sub-portfolios are then added with \code{add.sub.portfolio}. The multi-layer portfolio specification object can then be passed to \code{optimize.portfolio} and \code{optimize.portfolio.rebalancing}. See \code{demo(multi_layer_optimization)}. + +Support for regime switching models allows one to change constraints and objectives depending on the current regime. Portfolios are created as normal with \code{portfolio.spec}, \code{add.constraint}, and \code{add.objective}. The portfolios are then combined with a regime object using \code{regime.portfolios} to create a regime portfolio specification which can then be passed to \code{optimize.portfolio} and \code{optimize.portfolio.rebalancing}. Regime switching optimization is implemented in such a way that any arbitrary regime model can be used. See \code{demo(regime_switching)}. +} + +\section{Portfolio Moments}{ +The \kdb{PortfolioAnalytics} framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are set once for use in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. For example, if the third and fourth moments do not need to be calculated for a given objective, then \code{set.portfolio.moments} will try to detect this and not compute those moments. Currently, \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt, the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci (NEED REFERENCE HERE). See the Custom Moment and Objective Functions vignette for a more detailed description and examples. +} + \section{Charts and Graphs}{ Intuition into the optimization can be aided through visualization. The goal of creating the charts is to provide visualization tools for optimal portfolios regardless of the chosen optimization method. @@ -77,6 +89,14 @@ Multiple objects created via \code{\link{optimize.portfolio}} can be combined with \code{\link{combine.optimizations}} for visual comparison. The weights of the optimal portfolios can be plotted with \code{\link{chart.Weights}}. The optimal portfolios can be compared in risk-reward space with \code{\link{chart.RiskReward}}. The portfolio component risk contributions of the multiple optimal portfolios can be plotted with \code{\link{chart.RiskBudget}}. } +\section{Demos}{ +\kdb{PortfolioAnalytics} contains a comprehensive collection of demos to demonstrate the functionality from very basic optimization problems such as estimating the solution to a minimum variance portfolio to more complex optimization problems with custom moment and objective functions. +} + +\section{Vignettes}{ +TODO +} + \section{Package Dependencies}{ Several of the functions in the \kbd{PortfolioAnalytics} package require time series data of returns and the \code{\link[xts]{xts}} package is used for working with time series data. @@ -98,6 +118,7 @@ } \author{ +Ross Bennett \cr Kris Boudt \cr Peter Carl \cr Brian G. Peterson \cr @@ -106,10 +127,18 @@ } \references{ +Boudt, Kris and Lu, Wanbo and Peeters, Benedict, \emph{Higher Order Comoments of Multifactor Models and Asset Allocation} (June 16, 2014). Available at SSRN: http://ssrn.com/abstract=2409603 or http://dx.doi.org/10.2139/ssrn.2409603 + +Chriss, Neil A and Almgren, Robert, \emph{Portfolios from Sorts} (April 27, 2005). Available at SSRN: http://ssrn.com/abstract=720041 or http://dx.doi.org/10.2139/ssrn.720041 \cr + +Meucci, Attilio, \emph{The Black-Litterman Approach: Original Model and Extensions} (August 1, 2008). Shorter version in, THE ENCYCLOPEDIA OF QUANTITATIVE FINANCE, Wiley, 2010. Available at SSRN: http://ssrn.com/abstract=1117574 or http://dx.doi.org/10.2139/ssrn.1117574 \cr + +Meucci, Attilio, \emph{Fully Flexible Views: Theory and Practice} (August 8, 2008). Fully Flexible Views: Theory and Practice, Risk, Vol. 21, No. 10, pp. 97-102, October 2008. Available at SSRN: http://ssrn.com/abstract=1213325 + +Scherer, Bernd and Martin, Doug, \emph{Modern Portfolio Optimization}. Springer. 2005. \cr + Shaw, William Thornton, \emph{Portfolio Optimization for VAR, CVaR, Omega and Utility with General Return Distributions: A Monte Carlo Approach for Long-Only and Bounded Short Portfolios with Optional Robustness and a Simplified Approach to Covariance Matching} (June 1, 2011). Available at SSRN: http://ssrn.com/abstract=1856476 or http://dx.doi.org/10.2139/ssrn.1856476 \cr -Scherer, B. and Martin, D. \emph{Modern Portfolio Optimization}. Springer. 2005. \cr - } \section{Acknowledgements}{ From noreply at r-forge.r-project.org Mon Aug 11 04:39:13 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 11 Aug 2014 04:39:13 +0200 (CEST) Subject: [Returnanalytics-commits] r3505 - in pkg/PortfolioAnalytics: man vignettes Message-ID: <20140811023913.8AFF2180475@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-11 04:39:12 +0200 (Mon, 11 Aug 2014) New Revision: 3505 Modified: pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd pkg/PortfolioAnalytics/vignettes/PA.bib pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf Log: adding reference information Modified: pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd =================================================================== --- pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd 2014-08-10 15:30:38 UTC (rev 3504) +++ pkg/PortfolioAnalytics/man/PortfolioAnalytics-package.Rd 2014-08-11 02:39:12 UTC (rev 3505) @@ -68,7 +68,7 @@ } \section{Advanced Optimization}{ -In addition to the more standard optimizations described above, \kdb{PortfolioAnalytics} also supports multi-layer optimization and regime switching optimization. +In addition to the more standard optimizations described above, \kbd{PortfolioAnalytics} also supports multi-layer optimization and regime switching optimization. Support for multi-layer optimization allows one to construct a top level portfolio and several sub-portfolios with potentially different assets, constraints, and objectives. First, each sub-portfolio is optimized out-of-sample which creates a time series of returns. One can think of the out of sample returns for each sub-portfolio as the returns for a synthetic instrument. Finally, the out-of-sample returns of each sub-portfolio are then used as inputs for the top level optimization. The top level portfolio and sub-portfolios are created as normal using \code{portfolio.spec}, \code{add.constraint}, and \code{add.objective}. The multi-layer portfolio specification object is first initialized by passing the top level portfolio to \code{mult.portfolio.spec}. Sub-portfolios are then added with \code{add.sub.portfolio}. The multi-layer portfolio specification object can then be passed to \code{optimize.portfolio} and \code{optimize.portfolio.rebalancing}. See \code{demo(multi_layer_optimization)}. @@ -76,7 +76,7 @@ } \section{Portfolio Moments}{ -The \kdb{PortfolioAnalytics} framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are set once for use in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. For example, if the third and fourth moments do not need to be calculated for a given objective, then \code{set.portfolio.moments} will try to detect this and not compute those moments. Currently, \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt, the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci (NEED REFERENCE HERE). See the Custom Moment and Objective Functions vignette for a more detailed description and examples. +The \kbd{PortfolioAnalytics} framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are set once for use in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. For example, if the third and fourth moments do not need to be calculated for a given objective, then \code{set.portfolio.moments} will try to detect this and not compute those moments. Currently, \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt, the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci (NEED REFERENCE HERE). See the Custom Moment and Objective Functions vignette for a more detailed description and examples. } \section{Charts and Graphs}{ @@ -90,7 +90,7 @@ } \section{Demos}{ -\kdb{PortfolioAnalytics} contains a comprehensive collection of demos to demonstrate the functionality from very basic optimization problems such as estimating the solution to a minimum variance portfolio to more complex optimization problems with custom moment and objective functions. +\kbd{PortfolioAnalytics} contains a comprehensive collection of demos to demonstrate the functionality from very basic optimization problems such as estimating the solution to a minimum variance portfolio to more complex optimization problems with custom moment and objective functions. } \section{Vignettes}{ Modified: pkg/PortfolioAnalytics/vignettes/PA.bib =================================================================== --- pkg/PortfolioAnalytics/vignettes/PA.bib 2014-08-10 15:30:38 UTC (rev 3504) +++ pkg/PortfolioAnalytics/vignettes/PA.bib 2014-08-11 02:39:12 UTC (rev 3505) @@ -431,4 +431,33 @@ author = {Scherer, Bernd. and Martin, Douglas}, owner = {brian}, timestamp = {2007.08.19} -} \ No newline at end of file +} + + at ARTICLE{Boudt2014, + author = {Boudt, Kris and Lu, Wanbo and Peeters, Benedict}, + title = {Higher Order Comoments of Multifactor Models and Asset Allocation}, + month = {June}, + year = {2014}, + url = {http://papers.ssrn.com/sol3/papers.cfm?abstract_id=2409603} +} + + at ARTICLE{Meucci2008, + author = {Meucci, Attilio}, + title = {Fully Flexible Views: Theory and Practice}, + journal = {Journal of Risk}, + year = {2008}, + volume = {21}, + pages = {97-102}, + number = {10}, + url = {http://papers.ssrn.com/sol3/papers.cfm?abstract_id=1213325} +} + + at ARTICLE{MeucciBL2008, + author = {Meucci, Attilio}, + title = {The Black-Litterman Approach: Original Model and Extensions}, + journal = {Journal of Risk}, + month = {August}, + year = {2008}, + url = {http://papers.ssrn.com/sol3/papers.cfm?abstract_id=1117574} +} + Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw =================================================================== --- pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-10 15:30:38 UTC (rev 3504) +++ pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-11 02:39:12 UTC (rev 3505) @@ -37,7 +37,6 @@ % \VignetteIndexEntry{Custom Moment and Objective Functions} \begin{document} -\SweaveOpts{concordance=TRUE} \title{Custom Moment and Objective Functions} \author{Ross Bennett} @@ -73,7 +72,7 @@ @ \section{Setting the Portfolio Moments} -The PortfolioAnalytics framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are calculated only once and then used in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. For example, if the third and fourth moments do not need to be calculated for a given objective, then \code{set.portfolio.moments} will try to detect this and not compute those moments. Currently, \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt, the Black Litterman model, and the Fully Flexible Framework based on the work of Attilio Meucci (NEED REFERENCE HERE). +The PortfolioAnalytics framework to estimate solutions to constrained optimization problems is implemented in such a way that the moments of the returns are calculated only once and then used in lower level optimization functions. The \code{set.portfolio.moments} function computes the first, second, third, and fourth moments depending on the objective function(s) in the \code{portfolio} object. For example, if the third and fourth moments do not need to be calculated for a given objective, then \code{set.portfolio.moments} will try to detect this and not compute those moments. Currently, \code{set.portfolio.moments} implements methods to compute moments based on sample estimates, higher moments from fitting a statistical factor model based on the work of Kris Boudt \citep{Boudt2014}, the Black Litterman model \citep{MeucciBL2008}, and the Fully Flexible Framework based on the work of Attilio Meucci \citep{Meucci2008}. <>= # Construct initial portfolio with basic constraints. @@ -186,7 +185,7 @@ opt.pasd @ -We now consider an example with a more complicated objective function. Our objective to maximize the fourth order expansion of the Constant Relative Risk Aversion (CRRA) expected utility function as in the Boudt paper and Martellini paper (NEED REFERENCE). +We now consider an example with a more complicated objective function. Our objective to maximize the fourth order expansion of the Constant Relative Risk Aversion (CRRA) expected utility function as in \citep{Boudt2014}. \begin{equation*} EU_{\lambda}(w) = - \frac{\lambda}{2} m_{(2)}(w) + @@ -247,4 +246,6 @@ TODO: add content to concluding paragraph +\bibliography{PA} + \end{document} Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf =================================================================== (Binary files differ) From noreply at r-forge.r-project.org Mon Aug 11 09:50:40 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 11 Aug 2014 09:50:40 +0200 (CEST) Subject: [Returnanalytics-commits] r3506 - in pkg/FactorAnalytics: . R man vignettes Message-ID: <20140811075040.7F3DF1864B5@r-forge.r-project.org> Author: pragnya Date: 2014-08-11 09:50:40 +0200 (Mon, 11 Aug 2014) New Revision: 3506 Added: pkg/FactorAnalytics/vignettes/fitTsfm_vignette.Rnw pkg/FactorAnalytics/vignettes/fitTsfm_vignette.pdf Removed: pkg/FactorAnalytics/vignettes/fitTsfm.pdf Modified: pkg/FactorAnalytics/DESCRIPTION pkg/FactorAnalytics/R/fitTsfm.R pkg/FactorAnalytics/R/fmEsDecomp.R pkg/FactorAnalytics/R/fmSdDecomp.R pkg/FactorAnalytics/R/fmVaRDecomp.R pkg/FactorAnalytics/R/plot.tsfm.r pkg/FactorAnalytics/R/summary.tsfm.r pkg/FactorAnalytics/man/fitTsfm.Rd pkg/FactorAnalytics/man/fmEsDecomp.Rd pkg/FactorAnalytics/man/fmSdDecomp.Rd pkg/FactorAnalytics/man/fmVaRDecomp.Rd pkg/FactorAnalytics/man/plot.tsfm.Rd pkg/FactorAnalytics/man/summary.tsfm.Rd Log: Added .Rnw file for fitTsfm vignette, minor edits to fitTsfm and related functions. Modified: pkg/FactorAnalytics/DESCRIPTION =================================================================== --- pkg/FactorAnalytics/DESCRIPTION 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/DESCRIPTION 2014-08-11 07:50:40 UTC (rev 3506) @@ -28,7 +28,7 @@ ellipse Imports: corrplot Suggests: - testthat, quantmod + testthat, quantmod, knitr LazyLoad: yes LazyDataCompression: xz URL: http://r-forge.r-project.org/R/?group_id=579 Modified: pkg/FactorAnalytics/R/fitTsfm.R =================================================================== --- pkg/FactorAnalytics/R/fitTsfm.R 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/R/fitTsfm.R 2014-08-11 07:50:40 UTC (rev 3506) @@ -39,7 +39,8 @@ #' of this up-market factor can be interpreted as the number of free put #' options. Similarly, "TM" follows Treynor-Mazuy (1966), to account for market #' timing with respect to volatility, and \code{market.sqd=(Rm-Rf)^2} is added -#' as a factor in the regression. Option "both" adds both of these factors. +#' as a factor in the regression. Option "both" (default) adds both of these +#' factors. #' #' \subsection{Data Processing}{ #' @@ -68,7 +69,7 @@ #' See details. Default is "OLS". #' @param variable.selection the variable selection method, one of "none", #' "stepwise","subsets","lars". See details. Default is "none". -#' @param mkt.timing one of "HM", "TM" or "both". Default is NULL. See Details. +#' @param mkt.timing one of "HM", "TM" or "both" (default). See Details. #' \code{mkt.name} is required if any of these options are to be implemented. #' @param control list of control parameters. The default is constructed by #' the function \code{\link{fitTsfm.control}}. See the documentation for @@ -104,7 +105,7 @@ #' Where N is the number of assets, K is the number of factors and T is the #' number of time periods. #' -#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan. +#' @author Eric Zivot, Sangeetha Srinivasan and Yi-An Chen. #' #' @references #' Christopherson, J. A., Carino, D. R., & Ferson, W. E. (2009). Portfolio @@ -140,7 +141,7 @@ #' data(managers) #' fit <- fitTsfm(asset.names=colnames(managers[,(1:6)]), #' factor.names=colnames(managers[,(7:9)]), -#' mkt.name="SP500 TR", mkt.timing="both", data=managers) +#' mkt.name="SP500 TR", mkt.timing="HM", data=managers) #' summary(fit) #' fitted(fit) #' # plot actual returns vs. fitted factor model returns for HAM1 @@ -149,6 +150,10 @@ #' # group plot; type selected from menu prompt; auto-looped for multiple plots #' # plot(fit) #' +#' # example: Market-timing factors with robust fit +#' fit <- fitTsfm(asset.names=colnames(managers[,(1:6)]), factor.names=NULL, +#' mkt.name="SP500 TR", data=managers, fit.method="Robust") +#' #' # example using "subsets" variable selection #' fit.sub <- fitTsfm(asset.names=colnames(managers[,(1:6)]), #' factor.names=colnames(managers[,(7:9)]), @@ -166,7 +171,7 @@ fitTsfm <- function(asset.names, factor.names, mkt.name=NULL, rf.name=NULL, data=data, fit.method=c("OLS","DLS","Robust"), variable.selection=c("none","stepwise","subsets","lars"), - mkt.timing=NULL, control=fitTsfm.control(...), ...) { + mkt.timing="both", control=fitTsfm.control(...), ...) { # record the call as an element to be returned call <- match.call() @@ -181,10 +186,6 @@ stop("Invalid argument: variable.selection must be either 'none', 'stepwise','subsets' or 'lars'") } - if (xor(is.null(mkt.name), is.null(mkt.timing))) { - stop("Missing argument: 'mkt.name' and 'mkt.timing' are both required to - include market-timing factors.") - } # extract arguments to pass to different fit and variable selection functions decay <- control$decay @@ -220,12 +221,11 @@ # convert all asset and factor returns to excess return form if specified if (!is.null(rf.name)) { - cat("Excess returns were computed and used for all assets and factors.") dat.xts <- "[<-"(dat.xts,,vapply(dat.xts, function(x) x-data.xts[,rf.name], FUN.VALUE = numeric(nrow(dat.xts)))) } else { - cat("Note: fitTsfm was NOT asked to compute EXCESS returns. Input returns - data was used as it is for all factors and assets.") + warning("Excess returns were not computed. Returns data were used as input + for all factors and assets.") } # opt add mkt-timing factors: up.market=max(0,Rm-Rf), market.sqd=(Rm-Rf)^2 Modified: pkg/FactorAnalytics/R/fmEsDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmEsDecomp.R 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/R/fmEsDecomp.R 2014-08-11 07:50:40 UTC (rev 3506) @@ -45,7 +45,7 @@ #' \item{pcES}{N x (K+1) matrix of percentage component contributions to VaR.} #' Where, \code{K} is the number of factors and N is the number of assets. #' -#' @author Eric Zviot, Yi-An Chen and Sangeetha Srinivasan +#' @author Eric Zviot, Sangeetha Srinivasan and Yi-An Chen #' #' @references #' Epperlein, E., & Smillie, A. (2006). Portfolio risk analysis Cracking VAR Modified: pkg/FactorAnalytics/R/fmSdDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmSdDecomp.R 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/R/fmSdDecomp.R 2014-08-11 07:50:40 UTC (rev 3506) @@ -33,7 +33,7 @@ #' \item{pcSd}{N x (K+1) matrix of percentage component contributions to SD.} #' Where, \code{K} is the number of factors and N is the number of assets. #' -#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan +#' @author Eric Zivot, Sangeetha Srinivasan and Yi-An Chen #' #' @references #' Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis. Modified: pkg/FactorAnalytics/R/fmVaRDecomp.R =================================================================== --- pkg/FactorAnalytics/R/fmVaRDecomp.R 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/R/fmVaRDecomp.R 2014-08-11 07:50:40 UTC (rev 3506) @@ -43,7 +43,7 @@ #' \item{pcVaR}{N x (K+1) matrix of percentage component contributions to VaR.} #' Where, \code{K} is the number of factors and N is the number of assets. #' -#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan +#' @author Eric Zivot, Sangeetha Srinivasan and Yi-An Chen #' #' @references #' Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis. Modified: pkg/FactorAnalytics/R/plot.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/plot.tsfm.r 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/R/plot.tsfm.r 2014-08-11 07:50:40 UTC (rev 3506) @@ -75,7 +75,7 @@ #' is \code{TRUE}. #' @param ... further arguments to be passed to other plotting functions. #' -#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan +#' @author Eric Zivot, Sangeetha Srinivasan and Yi-An Chen #' #' @seealso \code{\link{fitTsfm}} and \code{\link{summary.tsfm}} for details #' about the time series factor model fit, extractor functions and summary Modified: pkg/FactorAnalytics/R/summary.tsfm.r =================================================================== --- pkg/FactorAnalytics/R/summary.tsfm.r 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/R/summary.tsfm.r 2014-08-11 07:50:40 UTC (rev 3506) @@ -37,7 +37,7 @@ #' format the coefficients, standard errors, etc. and additionally give #' significance stars if \code{signif.stars} is TRUE. #' -#' @author Yi-An Chen & Sangeetha Srinivasan. +#' @author Sangeetha Srinivasan & Yi-An Chen. #' #' @seealso \code{\link{fitTsfm}}, \code{\link[stats]{summary.lm}} #' Modified: pkg/FactorAnalytics/man/fitTsfm.Rd =================================================================== --- pkg/FactorAnalytics/man/fitTsfm.Rd 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/man/fitTsfm.Rd 2014-08-11 07:50:40 UTC (rev 3506) @@ -9,7 +9,7 @@ fitTsfm(asset.names, factor.names, mkt.name = NULL, rf.name = NULL, data = data, fit.method = c("OLS", "DLS", "Robust"), variable.selection = c("none", "stepwise", "subsets", "lars"), - mkt.timing = NULL, control = fitTsfm.control(...), ...) + mkt.timing = "both", control = fitTsfm.control(...), ...) \method{coef}{tsfm}(object, ...) @@ -40,7 +40,7 @@ \item{variable.selection}{the variable selection method, one of "none", "stepwise","subsets","lars". See details. Default is "none".} -\item{mkt.timing}{one of "HM", "TM" or "both". Default is NULL. See Details. +\item{mkt.timing}{one of "HM", "TM" or "both" (default). See Details. \code{mkt.name} is required if any of these options are to be implemented.} \item{control}{list of control parameters. The default is constructed by @@ -122,7 +122,8 @@ of this up-market factor can be interpreted as the number of free put options. Similarly, "TM" follows Treynor-Mazuy (1966), to account for market timing with respect to volatility, and \code{market.sqd=(Rm-Rf)^2} is added -as a factor in the regression. Option "both" adds both of these factors. +as a factor in the regression. Option "both" (default) adds both of these +factors. \subsection{Data Processing}{ @@ -141,7 +142,7 @@ data(managers) fit <- fitTsfm(asset.names=colnames(managers[,(1:6)]), factor.names=colnames(managers[,(7:9)]), - mkt.name="SP500 TR", mkt.timing="both", data=managers) + mkt.name="SP500 TR", mkt.timing="HM", data=managers) summary(fit) fitted(fit) # plot actual returns vs. fitted factor model returns for HAM1 @@ -150,6 +151,10 @@ # group plot; type selected from menu prompt; auto-looped for multiple plots # plot(fit) +# example: Market-timing factors with robust fit +fit <- fitTsfm(asset.names=colnames(managers[,(1:6)]), factor.names=NULL, + mkt.name="SP500 TR", data=managers, fit.method="Robust") + # example using "subsets" variable selection fit.sub <- fitTsfm(asset.names=colnames(managers[,(1:6)]), factor.names=colnames(managers[,(7:9)]), @@ -163,7 +168,7 @@ variable.selection="lars", lars.criterion="cv") } \author{ -Eric Zivot, Yi-An Chen and Sangeetha Srinivasan. +Eric Zivot, Sangeetha Srinivasan and Yi-An Chen. } \references{ Christopherson, J. A., Carino, D. R., & Ferson, W. E. (2009). Portfolio Modified: pkg/FactorAnalytics/man/fmEsDecomp.Rd =================================================================== --- pkg/FactorAnalytics/man/fmEsDecomp.Rd 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/man/fmEsDecomp.Rd 2014-08-11 07:50:40 UTC (rev 3506) @@ -73,7 +73,7 @@ ES.decomp$cES } \author{ -Eric Zviot, Yi-An Chen and Sangeetha Srinivasan +Eric Zviot, Sangeetha Srinivasan and Yi-An Chen } \references{ Epperlein, E., & Smillie, A. (2006). Portfolio risk analysis Cracking VAR Modified: pkg/FactorAnalytics/man/fmSdDecomp.Rd =================================================================== --- pkg/FactorAnalytics/man/fmSdDecomp.Rd 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/man/fmSdDecomp.Rd 2014-08-11 07:50:40 UTC (rev 3506) @@ -59,7 +59,7 @@ decomp$pcSd } \author{ -Eric Zivot, Yi-An Chen and Sangeetha Srinivasan +Eric Zivot, Sangeetha Srinivasan and Yi-An Chen } \references{ Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis. Modified: pkg/FactorAnalytics/man/fmVaRDecomp.Rd =================================================================== --- pkg/FactorAnalytics/man/fmVaRDecomp.Rd 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/man/fmVaRDecomp.Rd 2014-08-11 07:50:40 UTC (rev 3506) @@ -71,7 +71,7 @@ VaR.decomp$cVaR } \author{ -Eric Zivot, Yi-An Chen and Sangeetha Srinivasan +Eric Zivot, Sangeetha Srinivasan and Yi-An Chen } \references{ Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis. Modified: pkg/FactorAnalytics/man/plot.tsfm.Rd =================================================================== --- pkg/FactorAnalytics/man/plot.tsfm.Rd 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/man/plot.tsfm.Rd 2014-08-11 07:50:40 UTC (rev 3506) @@ -117,7 +117,7 @@ # plot(fit.macro) } \author{ -Eric Zivot, Yi-An Chen and Sangeetha Srinivasan +Eric Zivot, Sangeetha Srinivasan and Yi-An Chen } \seealso{ \code{\link{fitTsfm}} and \code{\link{summary.tsfm}} for details Modified: pkg/FactorAnalytics/man/summary.tsfm.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.tsfm.Rd 2014-08-11 02:39:12 UTC (rev 3505) +++ pkg/FactorAnalytics/man/summary.tsfm.Rd 2014-08-11 07:50:40 UTC (rev 3506) @@ -67,7 +67,7 @@ summary(fit$asset.fit[[1]]) } \author{ -Yi-An Chen & Sangeetha Srinivasan. +Sangeetha Srinivasan & Yi-An Chen. } \seealso{ \code{\link{fitTsfm}}, \code{\link[stats]{summary.lm}} Deleted: pkg/FactorAnalytics/vignettes/fitTsfm.pdf =================================================================== (Binary files differ) Added: pkg/FactorAnalytics/vignettes/fitTsfm_vignette.Rnw =================================================================== --- pkg/FactorAnalytics/vignettes/fitTsfm_vignette.Rnw (rev 0) +++ pkg/FactorAnalytics/vignettes/fitTsfm_vignette.Rnw 2014-08-11 07:50:40 UTC (rev 3506) @@ -0,0 +1,455 @@ +\documentclass[a4paper]{article} +\usepackage{Rd} +\usepackage{amsmath} +\usepackage[round]{natbib} +\usepackage{bm} +\usepackage{verbatim} +\usepackage[latin1]{inputenc} +\bibliographystyle{abbrvnat} +\usepackage{url} + +\let\proglang=\textsf +\renewcommand{\topfraction}{0.85} +\renewcommand{\textfraction}{0.1} +\renewcommand{\baselinestretch}{1.5} +\setlength{\textwidth}{15cm} \setlength{\textheight}{22cm} \topmargin-1cm \evensidemargin0.5cm \oddsidemargin0.5cm + +\usepackage{lmodern} +\usepackage[T1]{fontenc} + +% \VignetteIndexEntry{Fitting a time series factor model with 'fitTsfm' in factorAnalytics} +%\VignetteEngine{knitr::knitr} + +\begin{document} + +\title{Fitting a Time series Factor Model with fitTsfm in factorAnalytics} +\author{Sangeetha Srinivasan} +\maketitle + +\begin{abstract} +The purpose of this vignette is to demonstrate the use of \code{fitTsfm} and related control, analysis and plot functions in the \code{factorAnalytics} package. +\end{abstract} + +\tableofcontents +\bigskip + +\section{Overview} + +\subsection{Load Package} + +The latest version of the \verb"factorAnalytics" package can be downloaded from R-forge through the following command: +\begin{verbatim} +install.packages("factorAnalytics", repos="http://R-Forge.R-project.org") +\end{verbatim} +Load the package and it's dependencies. +<>= +library(factorAnalytics) +@ + +\subsection{Summary of related functions} +Here's a list of the functions and methods demonstrated in this vignette: + +\begin{itemize} + +\item \verb"fitTsfm(asset.names, factor.names, data, fit.method, variable.selection)": Fits a time series (a.k.a. macroeconomic) factor model for one or more asset returns or excess returns using time series regression. Ordinary least squares-OLS, discounted least squares-DLS and robust regression fitting are possible. Variable selection methods include Step-wise, Subsets and Lars. An object of class \code{"tsfm"} containing the fitted objects, model coefficients, R-squared and residual volatility are returned. + +\item \verb"coef(object)": Extracts the coefficient matrix (intercept and factor betas) for all assets fit by the \code{tsfm} object. + +\item \verb"fitted(object)": Returns an \code{xts} data object of fitted asset returns from the factor model for all assets. + +\item \verb"residuals(object)": Returns an \code{xts} data object of residuals from the fitted factor model for all assets. + +\item \verb"fmCov(object, use)": Returns the \code{N x N} symmetric covariance matrix for asset returns based on the fitted factor model. \code{use} specifies how missing values are to be handled. + +\item \verb"fmSdDecomp(object)": Returns a list containing the standard deviation of asset returns based on the fitted factor model and the marginal, component and percentage component factor contributions estimated from the given sample. \code{use} specifies how missing values are to be handled. + +\item \verb"fmVaRDecomp(object, p, method, invert)": Returns a list containing the value-at-risk for asset returns based on the fitted factor model and the marginal, component and percentage component factor contributions estimated from the given sample. \code{p} and \code{method} specify the confidence level and method to calculate VaR. \code{invert} allows the VaR value to be expressed as a loss (vs. fund's return/profit). + +\item \verb"fmEsDecomp(object)": Returns a list containing the expected shortfall for asset returns based on the fitted factor model and the marginal, component and percentage component factor contributions estimated from the given sample. \code{p} and \code{method} specify the confidence level and method to calculate VaR. \code{invert} allows the VaR value to be expressed as a loss (vs. fund's return/profit). + +\item \verb"paFm(fit)": Decompose total returns into returns attributed to factors and specific returns. An object of class \code{"pafm"} is returned, with methods for generic functions \code{plot}, \code{summary} and \code{print}. + +\item \verb"plot(x)": The \code{plot} method for class \code{"tsfm"} can be used for plotting factor model characteristics of an individual asset or a group of assets (default). The type of individual/group plot can be specified or chosen from a menu prompt (default if type not specified). Further the menu reappears (default) to enable multiple plots for the same asset(s) unless looping is disabled by setting \code{loop=FALSE}. + +\item \verb"predict(object, newdata)": The \code{predict} method for class \code{"tsfm"} returns a vector or matrix of predicted values for a new data sample or simulated values. + +\item \verb"summary(object, se.type)": The \code{summary} method for class \code{"tsfm"} returns an object of class \code{"summary.tsfm"} containing the summaries of the fitted \code{lm}, \code{lmRob} or \code{lars} objects and the chosen type (HC/HAC) of standard errors and t-statistics to display. Printing the factor model summary object outputs the call, coefficients (with standard errors and t-statistics), r-squared and residual volatility (under the homo-skedasticity assumption) for all assets. + +\end{itemize} + +\subsection{Data} + +The following examples primarily use the \code{managers} dataset from the \verb"PerformanceAnalytics" package. It's an \code{xts} data object with 132 observations on 10 variables; frequency is monthly. +<<>>= +data(managers) +colnames(managers) +range(index(managers)) +@ + +In the examples below, the monthly returns for the six hypothetical asset managers (HAM1 through HAM6) will be the explained asset returns. Columns 7 through 9, composed of the EDHEC Long-Short Equity hedge fund index, the S\&P 500 total returns, and the total return series for the US Treasury 10-year bond will serve as explanatory factors. The last column (US 3-month T-bill) can be considered as the risk free rate. The series have unequal histories in this sample and \code{fitTsfm} removes asset-wise incomplete cases (asset's return data combined with respective factors' return data) before fitting a factor model. +<<>>= +asset.names <- colnames(managers[,1:6]) +factor.names <- colnames(managers[,7:9]) +mkt.name <- "SP500 TR" +rf.name <- "US 3m TR" +@ + +Typically, factor models are fit using excess returns. If the asset and factor returns are not already in excess return form, \code{rf.name} can be specified to convert returns into excess returns. Similarly, market returns can be specified to add market-timing factors to the factor model. + +The \verb"CommonFactors" dataset in the \verb"factorAnalytics" package also provides a collection of common factors as both monthly (\verb"factors.M") and quarterly (\verb"factors.Q") time series. +<<>>= +data(CommonFactors) +names(factors.Q) +range(index(factors.Q)) +@ + +\section{Fit a time series factor model} + +In a time series or macroeconomic factor model, observable economic time series such as industrial production growth rate, interest rates, market returns and inflation are used as common factors that contribute to asset returns. For example, the famous single factor model by \citet{sharpe1970portfolio} uses the market excess return as the common factor (captures economy-wide or market risk) for all assets and the unexplained returns in the error term represents the non-market firm specific risk. Whereas, \citet{chen1986economic} find that surprise inflation, the spread between long and short-term interest rates and between high and low grade bonds are significantly priced, while the market portfolio, aggregate consumption risk and oil price risk are not priced separately. + +\subsection{Excess returns \& Market Timing factors} + +Let's take a look at the arguments for \code{fitTsfm}. The default regression fitting method is Ordinary Least Squares (OLS) and all factors are included in the model for all assets; no variable selection method is used. If \code{rf.name} is not specified by the user, perhaps because returns are already in excess return form, all returns are used as input by default. +<>= +args(fitTsfm) +@ + +Here's an implementation of Sharpe's single index model for the 6 hypothetical assets described earlier. Since \code{rf.name} was included, excess returns were computed for all asset and factor returns before model fitting. The component \code{asset.fit} contains a list of fitted objects, one for each asset. Each object is of class \code{lm} if \code{fit.method="OLS" or "DLS"}, class \code{lmRob} if the \code{fit.method="Robust"}, or class \code{lars} if \code{variable.selection="lars"}. The different fit and variable selection methods are described in the next section. +<<>>= +fit.Sharpe <- fitTsfm(asset.names=asset.names, factor.names="SP500 TR", + rf.name="US 3m TR", data=managers) +names(fit.Sharpe) +fit.Sharpe +@ + +In the following example, market timing factors are included in addition to the 3 other factors available in the \code{managers} dataset. Market timing accounts for the price movement of the general stock market relative to fixed income securities. "HM" follows \citet{henriksson1981market} and $up.market = max(0, R_m-R_f)$, is added as a factor in the regression. Similarly, "TM" follows \citet{treynor1966can}, to account for market timing with respect to volatility, and $market.sqd = (R_m-R_f)^2$ is added as a factor. Option "both" (default) adds both of these factors. +<<>>= +# adding up-market timing factor ("HM") to the model +fit1 <- fitTsfm(asset.names=asset.names, factor.names=factor.names, + mkt.name="SP500 TR", mkt.timing="HM", data=managers) +fit1$beta +fit1$r2 +fit1$resid.sd +@ + +\subsection{Fit methods \& Variable Selection} + +Alternatives to "OLS" regression are \verb"robust regression" (resistant to outliers and heteroskedasticity) and \verb"exponentially discounted weights" (accounts for time variation in coefficients). These can be selected via the argument \code{fit.method} as shown below. + +<<>>= +fit2 <- fitTsfm(asset.names=asset.names, factor.names=factor.names, + mkt.name="SP500 TR", data=managers, fit.method="Robust") +fit2$beta +fit2$r2 +fit2$resid.sd +@ + +Notice the lower R-squared values and smaller residual volatilities with robust regression. Figures 1 and 2 give a graphical comparison of the fitted returns for asset "HAM3" and residual volatilities from the factor model fits. Figure 1 depicts the smaller influence that the volatility of Jan 2000 has on the robust regression. Plot options are described in detail in section 4. + +<>= +par(mfrow=c(2,1)) +plot(fit1, plot.single=TRUE, which.plot.single=1, asset.name="HAM3", loop=FALSE) +plot(fit2, plot.single=TRUE, which.plot.single=1, asset.name="HAM3", loop=FALSE) +@ + +<>= +par(mfrow=c(1,2)) +plot(fit1, which.plot.group=5, loop=FALSE, xlim=c(0,0.043)) +plot(fit2, which.plot.group=5, loop=FALSE, xlim=c(0,0.043)) +@ + +By adding more factors in fit1 and fit2, though the R-squared values have improved (when compared to Sharpe's single index model), one might prefer to employ variable selection methods such as \verb"stepwise", \verb"subsets" or \verb"lars" to avoid over-fitting. The method can be selected via the \code{variable.selection} argument. The default \verb"none", uses all the factors and performs no variable selection. \verb"stepwise" performs traditional forward or backward stepwise OLS regression, starting from an initial (given) set of factors and adds factors only if the regression fit, as measured by the Bayesian Information Criterion (BIC) or Akaike Information Criterion (AIC), improves. \verb"subsets" enables subsets selection using \code{regsubsets}; chooses the best performing subset of any given size. \verb"lars" corresponds to least angle regression using \code{lars} with variants "lasso", "lar", "forward.stagewise" or "stepwise". + +Remarks: +\begin{itemize} +\item Variable selection methods \verb"stepwise" and \verb"subsets" can be combined with any of the fit methods, "OLS", "DLS" or "Robust". +\item If variable selection method selected is \verb"lars", \code{fit.method} will be ignored. +\item Refer to the section on \code{fitTsfm.control} for more details on the control arguments to the different variable selection methods. +\end{itemize} + +The next example uses the \verb"lars" variable selection method. The default type and criterion used are \verb"lasso" and the \verb"Cp" statistic. The \verb"subsets" variable selection method is demonstrated next for comparison using the same set of factors. However, the best subset of size 4 for each asset is chosen. Figures 3 and 4 display the factor betas from the two fits. +<<>>= +fit.lars <- fitTsfm(asset.names=colnames(managers[,(1:6)]), + factor.names=colnames(managers[,(7:9)]), data=managers, + rf.name="US 3m TR", mkt.name="SP500 TR") +fit.lars$beta +fit.lars$r2 + +fit.sub <- fitTsfm(asset.names=colnames(managers[,(1:6)]), + factor.names=colnames(managers[,(7:9)]), data=managers, + rf.name="US 3m TR", mkt.name="SP500 TR", + variable.selection="subsets", subset.size=4) +fit.sub$beta +fit.sub$r2 +@ + +<>= +plot(fit.lars, which.plot.group=2, loop=FALSE) +@ + +<>= +plot(fit.sub, which.plot.group=2, loop=FALSE) +@ +\newpage + +\subsection{fitTsfm.control} + +Since \code{fitTsfm} calls many different regression fitting and variable selection methods, it made sense to collect all the optional controls for these functions and process them via \code{fitTsfm.control}. This function is meant to be used internally by \code{fitTsfm} when arguments are passed to it via the ellipsis. The use of control parameters was demonstrated with subset.size in the fit.sub example earlier. + +<>= +args(fitTsfm.control) +@ + +Here's an ordered list of control parameters passed by \code{fitTsfm} matched with their respective functions for easy reference. See the corresponding help files for more details on each parameter. +\begin{itemize} +\item \verb"lm": "weights","model","x","y","qr" +\item \verb"lmRob": "weights","model","x","y","nrep" +\item \verb"step": "scope","scale","direction","trace","steps","k" +\item \verb"regsubsets": "weights","nbest","nvmax","force.in","force.out","method","really.big" +\item \verb"lars": "type","normalize","eps","max.steps","trace" +\item \verb"cv.lars": "K","type","normalize","eps","max.steps","trace" +\end{itemize} + +There are 3 other important arguments passed to \code{fitTsfm.control} that determine the type of factor model fit chosen. +\begin{itemize} +\item \verb"decay": Determines the decay factor for \code{fit.method="DLS"}, which performs exponentially weighted least squares, with weights adding to unity. +\item \verb"subset.size": Number of factors required in the factor model when performing \verb"subsets" selection. This might be meaningful when looking for the best model of a certain size (perhaps for parsimony, perhaps to compare with a different model of the same size, perhaps to avoid over-fitting/ data dredging etc.) +\item \verb"lars.criterion": An option (one of "Cp" or "cv") to assess model selection for the \code{"lars"} variable selection method. "Cp" is Mallow's Cp statistic and "cv" is K-fold cross-validated mean squared prediction error. +\end{itemize} + +\subsection{Summary, Predict, Coefficients, Fitted values and Residuals} + +<<>>= +methods(class="tsfm") +@ + +Many useful generic accessor functions are available for \code{tsfm} fit objects. Here are some examples using the time series factor model fit by \code{fit.sub} earlier. \code{coef()} returns a matrix of estimated model coefficients including the intercept. \code{fitted()} returns an xts data object of the component of asset returns explained by the factor model. \code{residuals()} returns an xts data object of the part of asset returns not explained by the factor model. + +\code{summary()} prints standard errors and t-statistics for all estimated coefficients in addition to R-squared values and residual volatilities. Heteroskedasticity and auto-correlation consistent estimates and standard errors are available via the \code{se.type} argument. The returned "summary.tsfm" object also contains the summary objects returned by \code{lm}, \code{lm.Rob} or \code{lars}, which usually give more detailed statistics from the regression fit. \code{predict} uses the fitted factor model to estimate asset returns given a set of new or simulated factor return data. + +<<>>= +coef(fit.sub) +tail(fitted(fit.sub)) +tail(residuals(fit.sub)) + +# comparing data, fitted and residual values for HAM1 +tail(merge(fit.sub$data[,1], fitted(fit.sub)[,1], residuals(fit.sub)[,1])) + +# printed summary for the time series factor model +summary(fit.sub, se.type="HAC") + +@ + +\section{Factor Model Covariance \& Risk Decomposition} + +\subsection{Factor model covariance} + +Following \citet{zivot2006modeling}, $R_(i, t)$, the return on asset $i$ ($i = 1, ..., N$) at time $t$ ($t = 1, ..., T$), is fitted with a factor model of the form, +\begin{equation} +R_{i,t} = \alpha_i + \bm\beta_i \: \mathbf{f_t} + \epsilon_{i,t} +\end{equation} +where, $\alpha_i$ is the intercept, $\mathbf{f_t}$ is a $K \times 1$ vector of factor returns at time $t$, $\bm\beta_i$ is a $1 \times K$ vector of factor exposures for asset $i$ and the error terms $\epsilon_{i,t}$ are serially uncorrelated across time and contemporaneously uncorrelated across assets so that $\epsilon_{i,t} \sim iid(0, \sigma_i^2)$. Thus, the variance of asset $i$'s return is given by +\begin{equation} +var(R_{i,t}) = \bm\beta_i\: var(\mathbf{f_t})\: \bm\beta_i' + \sigma_i^2 +\end{equation} + +And, the $N \times N$ covariance matrix of asset returns is +\begin{equation} +var(\mathbf{R}) = \bm\Omega = \mathbf{B}\: var(\mathbf{F})\: \mathbf{B}' + \mathbf{D} +\end{equation} +where, $R$ is the $N \times T$ matrix of asset returns, $B$ is the $N \times K$ matrix of factor betas, $\mathbf{F}$ is a $K \times T$ matrix of factor returns and $D$ is a diagonal matrix with $\sigma_i^2$ along the diagonal. + +\code{fmCov()} computes the factor model covariance from a fitted factor model and uses "pairwise.complete.obs" (default) to handle NAs. Other options for handling missing observations include "everything", "all.obs", "complete.obs" and "na.or.complete". + +<>= +fmCov(fit.sub) +# return correlation plot; Angular Order of the Eigenvectors +plot(fit.sub, which.plot.group=7, loop=FALSE, order="AOE", method="ellipse", + tl.pos = "d") +@ + +\subsection{Standard deviation decomposition} + +Given the factor model in equation 1, the standard deviation of the asset $i$'s return can be decomposed as follows (based on \citet{meucci2007risk}): +\begin{align} +R_{i,t} &= \alpha_i + \bm\beta_i \: \mathbf{f_t} + \epsilon_{i,t} \\ +&= \bm\beta_i^* \: \mathbf{f_t^*} +\end{align} +where, $\bm\beta_i^* = (\bm\beta_i \: \sigma_i)$ and $\mathbf{f_t^*} = [\mathbf{f_t'} \: z_t]'$, with $z_t \sim iid(0, 1)$. + +By Euler's theorem, the standard deviation of asset $i$'s return is: +\begin{align} +Sd.fm_i = \sum_{k=1}^{K+1} cSd_{i,k} = \sum_{k=1}^{K+1} \beta^*_{i,k} \: mSd_{i,k} +\end{align} [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 3506 From noreply at r-forge.r-project.org Tue Aug 12 03:43:15 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 12 Aug 2014 03:43:15 +0200 (CEST) Subject: [Returnanalytics-commits] r3507 - pkg/PortfolioAnalytics/vignettes Message-ID: <20140812014315.C8AE01874DA@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-12 03:43:13 +0200 (Tue, 12 Aug 2014) New Revision: 3507 Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf Log: revising content for custom moment and objective functions vignette Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw =================================================================== --- pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-11 07:50:40 UTC (rev 3506) +++ pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.Rnw 2014-08-12 01:43:13 UTC (rev 3507) @@ -44,7 +44,7 @@ \maketitle \begin{abstract} -The purpose of this vignette is to demonstrate how to write and use custom moment functions and custom objective functions. +The purpose of this vignette is to demonstrate how to write and use custom moment functions and custom objective functions to solve complex optimization problems. \end{abstract} \tableofcontents @@ -58,7 +58,7 @@ @ \subsection{Data} -The edhec data set from the PerformanceAnalytics package will be used as example data. +The edhec data set from the PerformanceAnalytics package will be used as data for the following examples. <<>>= data(edhec) @@ -107,7 +107,7 @@ \item[\code{\$m4}]{ fourth moment; cokurtosis matrix} \end{description} -The lower level optimization functions expect an object with the structure described above. List elements with the names \code{mu}, \code{sigma}, \code{m3}, and\code{m4} are matched automatically and handled in an efficient manner. +The lower level optimization functions expect an object with the structure described above. List elements with the names \code{mu}, \code{sigma}, \code{m3}, and \code{m4} are matched automatically and handled in an efficient manner. Here we define a function to estimate the covariance matrix using a robust method. <<>>= @@ -137,7 +137,7 @@ @ \section{Custom Objective Functions} -A key feature of \verb"PortfolioAnalytics" is that the name for an objective can be any valid \verb"R" function. \verb"PortfolioAnalytics" was designed to be flexible and modular, and custom objective functions are a key example of this. +A key feature of \verb"PortfolioAnalytics" is that the name for an objective can be any valid \R function. \verb"PortfolioAnalytics" was designed to be flexible and modular, and custom objective functions are a key example of this. Here we define a very simple function to compute annualized standard deviation for monthly data that we will use as an objective function. <<>>= @@ -193,7 +193,7 @@ \frac{\lambda (\lambda + 1) (\lambda + 2)}{24} m_{(4)}(w) \end{equation*} -Define a function to compute CRRA estimate. Note how we define the function to use \code{sigma}, \code{m3}, and \code{m4} as arguments that will use the output from a custom moment function. We could compute the moments inside this function, but re-computing the moments thousands of times (i.e. at each iteration) can be very compute intensive. +Here we define a function to compute CRRA estimate. Note how we define the function to use \code{sigma}, \code{m3}, and \code{m4} as arguments that will use the output from a custom moment function. We could compute the moments inside this function, but re-computing the moments potentially tens of thousands of times (i.e. at each iteration) can be very compute intensive. <<>>= CRRA <- function(R, weights, lambda, sigma, m3, m4){ @@ -242,10 +242,8 @@ opt.crra @ -The modular framework of \verb"PortfolioAnalytics" allows one to easily define custom moment functions and objective functions as valid \R functions. +\verb"PortfolioAnalytics" supports several methods to estimate moments as well as user defined moment functions. The name of the objective must be the name of a valid \R function and \verb"PortfolioAnalytics" integrates well with \kbd{PerformanceAnalytics} to utilize several of the risk measure functions such as \code{StdDev} and \code{ES}. Because an objective function can be a valid \R function, user defined objective functions are supported. The modular framework of \verb"PortfolioAnalytics" allows one to easily define custom moment functions and objective functions as valid \R functions to solve complex and specialized objective functions. -TODO: add content to concluding paragraph - \bibliography{PA} \end{document} Modified: pkg/PortfolioAnalytics/vignettes/custom_moments_objectives.pdf =================================================================== (Binary files differ) From noreply at r-forge.r-project.org Wed Aug 13 22:16:30 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 13 Aug 2014 22:16:30 +0200 (CEST) Subject: [Returnanalytics-commits] r3508 - pkg/PerformanceAnalytics/R Message-ID: <20140813201630.5C667186250@r-forge.r-project.org> Author: kylebalkissoon Date: 2014-08-13 22:16:30 +0200 (Wed, 13 Aug 2014) New Revision: 3508 Modified: pkg/PerformanceAnalytics/R/Return.portfolio.R Log: Modified: pkg/PerformanceAnalytics/R/Return.portfolio.R =================================================================== --- pkg/PerformanceAnalytics/R/Return.portfolio.R 2014-08-12 01:43:13 UTC (rev 3507) +++ pkg/PerformanceAnalytics/R/Return.portfolio.R 2014-08-13 20:16:30 UTC (rev 3508) @@ -1,418 +1,418 @@ -#' Calculate weighted returns for a portfolio of assets -#' -#' Using a time series of returns and any regular or irregular time series of weights -#' for each asset, this function calculates the returns of a portfolio with the same -#' periodicity of the returns data. -#' -#' By default, this function calculates the time series of portfolio returns given asset -#' returns and weights. In verbose mode, the function returns a list of intermediary -#' calculations that users may find helpful, including both asset contribution and -#' asset value through time. -#' -#' When asset return and weights are matched by period, contribution is simply the -#' weighted return of the asset. c_i = w_i * R_i Contributions are summable across the -#' portfolio to calculate the total portfolio return. -#' -#' Contribution cannot be aggregated through time. For example, say we have an equal -#' weighted portfolio of five assets with monthly returns. The geometric return of the -#' portfolio over several months won't match any aggregation of the individual -#' contributions of the assets, particularly if any rebalancing was done during the -#' period. -#' -#' To aggregate contributions through time such that they are summable to the geometric -#' returns of the portfolio, the calculation must track changes in the notional value of -#' the assets and portfolio. For example, contribution during a quarter will be -#' calculated as the change in value of the position through those three months, divided -#' by the original value of the portfolio. Approaching it this way makes the -#' calculation robust to weight changes as well. c_pi = V_(t-p)i - V_t)/V_ti -#' -#' If the user does not specify weights, an equal weight portfolio is assumed. -#' Alternatively, a vector or single-row matrix of weights that matches the length -#' of the asset columns may be specified. In either case, if no rebalancing period is -#' specified, the weights will be applied at the beginning of the asset time series -#' and no further rebalancing will take place. If a rebalancing period is specified, -#' the portfolio will be rebalanced to the starting weights at the interval specified. -#' -#' Return.rebalancing will work only on daily or lower frequencies. If you are -#' rebalancing intraday, you should be using a trades/prices framework like -#' {\link{\code{blotter}}}, not a weights/returns framework. -#' -#' Irregular rebalancing can be done by specifying a time series of weights. The -#' function uses the date index of the weights for xts-style subsetting of rebalancing -#' periods. -#' -#' Weights specified for rebalancing should be thought of as "end-of-period" weights. -#' Rebalancing periods can be thought of as taking effect immediately after the close -#' of the bar. So, a March 31 rebalancing date will actually be in effect for April 1. -#' A December 31 rebalancing date will be in effect on Jan 1, and so forth. This -#' convention was chosen because it fits with common usage, and because it simplifies -#' xts Date subsetting via endpoints. -#' -#' In verbose mode, the function returns a list of data and intermediary calculations. -#' \itemize{ -#' \item{\code{returns}:}{ The portfolio returns.} -#' \item{\code{contribution}:}{ The per period contribution to portfolio -#' return of each asset. Contribution is calculated as BOP weight times the -#' period's return divided by BOP value. Period contributions are summed -#' across the individual assets to calculate portfolio return} -#' \item{\code{BOP.Weight}:}{ Beginning of Period (BOP) Weight for each -#' asset. An asset's BOP weight is calculated using the input weights -#' (or assumed weights, see below) and rebalancing parameters given. The next -#' period's BOP weight is either the EOP weights from the prior period or -#' input weights given on a rebalance period.} -#' \item{\code{EOP.Weight:}}{ End of Period (BOP) Weight for each asset. -#' An asset's EOP weight is the sum of the asset's BOP weight and -#' contribution for the period divided by the sum of the contributions and -#' initial weights for the portfolio.} -#' \item{\code{BOP.Value:}}{ BOP Value for each asset. The BOP value for each -#' asset is the asset's EOP value from the prior period, unless there is a -#' rebalance event. If there is a rebalance event, the BOP value of the -#' asset is the rebalance weight times the EOP value of the portfolio. That -#' effectively provides a zero-transaction cost change to the position values -#' as of that date to reflect the rebalance. Note that the sum of the BOP -#' values of the assets is the same as the prior period's EOP portfolio value.} -#' \item{\code{EOP.Value:}}{ EOP Value for each asset. The EOP value is for -#' each asset is calculated as (1 + asset return) times the asset's BOP value. -#' The EOP portfolio value is the sum of EOP value across assets.} -#' } -#' -#' To calculate BOP and EOP position value, we create an index for each position. The -#' sum of that value across assets represents an indexed value of the total portfolio. -#' Note that BOP and EOP position values are only computed when \code{geometric = TRUE}. -#' -#' From the value calculations, we can calculate different aggregations through time -#' for the asset contributions. Those are calculated as the EOP asset value less the -#' BOP asset value; that quantity is divided by the BOP portfolio value. -#' Across assets, those will sum to equal the geometric chained returns of the -#' portfolio for that same time period. The function does not do this directly, however. -#' -#' @aliases Return.portfolio Return.rebalancing -#' @param R An xts, vector, matrix, data frame, timeSeries or zoo object of -#' asset returns -#' @param weights A time series or single-row matrix/vector containing asset -#' weights, as decimal percentages, treated as beginning of period weights. -#' See Details below. -#' @param wealth.index TRUE/FALSE whether to return a wealth index. Default FALSE -#' @param contribution if contribution is TRUE, add the weighted return -#' contributed by the asset in a given period. Default FALSE -#' @param geometric utilize geometric chaining (TRUE) or simple/arithmetic (FALSE) -#' to aggregate returns. Default TRUE. -#' @param rebalance_on Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{endpoints}. -#' @param value The beginning of period total portfolio value. This is used for calculating position value. -#' @param verbose If verbose is TRUE, return a list of intermediary calculations. -#' See Details below. -#' @param \dots any other passthru parameters. Not currently used. -#' @return returns a time series of returns weighted by the \code{weights} -#' parameter, or a list that includes intermediate calculations -#' @author Peter Carl, Ross Bennett, Brian Peterson -#' @seealso \code{\link{Return.calculate}} \code{\link{xts::endpoints}} \cr -#' @references Bacon, C. \emph{Practical Portfolio Performance Measurement and -#' Attribution}. Wiley. 2004. Chapter 2\cr -#' @keywords ts multivariate distribution models -#' @examples -#' -#' data(edhec) -#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly") # returns time series -#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly", verbose=TRUE) # returns list -#' # with a weights object -#' data(weights) # rebalance at the beginning of the year to various weights through time -#' chart.StackedBar(weights) -#' x <- Return.rebalancing(edhec["2000::",1:11], weights=weights,verbose=TRUE) -#' chart.CumReturns(x$returns) -#' chart.StackedBar(x$BOP.Weight) -#' chart.StackedBar(x$BOP.Value) -#' -#' @rdname Return.portfolio -#' @export Return.portfolio -#' @export Return.rebalancing -Return.portfolio <- Return.rebalancing <- function(R, - weights=NULL, - wealth.index=FALSE, - contribution=FALSE, - geometric=TRUE, - rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), - value=1, - verbose=FALSE, - ...){ - R = checkData(R, method="xts") - rebalance_on = rebalance_on[1] - - # find the right unit to subtract from the first return date to create a start date - freq = periodicity(R) - switch(freq$scale, - seconds = { stop("Use a returns series of daily frequency or higher.") }, - minute = { stop("Use a returns series of daily frequency or higher.") }, - hourly = { stop("Use a returns series of daily frequency or higher.") }, - daily = { time_unit = "day" }, - weekly = { time_unit = "week" }, - monthly = { time_unit= "month" }, - quarterly = { time_unit = "quarter" }, - yearly = { time_unit = "year"} - ) - - # calculates the end of the prior period - start_date = seq(as.Date(index(R)[1]), length = 2, by = paste("-1", time_unit))[2] - - if(is.null(weights)){ - # generate equal weight vector for return columns - weights = rep(1 / NCOL(R), NCOL(R)) - } - if(is.vector(weights)) { # weights are a vector - if(is.na(rebalance_on)) { # and endpoints are not specified - # then use the weights only at the beginning of the returns series, without rebalancing - weights = xts(matrix(weights, nrow=1), order.by=as.Date(start_date)) - } else { # and endpoints are specified - # generate a time series of the given weights at the endpoints - weight_dates = c(as.Date(start_date), index(R[endpoints(R, on=rebalance_on)])) - weights = xts(matrix(rep(weights, length(weight_dates)), ncol=NCOL(R), byrow=TRUE), order.by=as.Date(weight_dates)) - } - colnames(weights) = colnames(R) - } else { # check the beginning_weights object for errors - # check that weights are given in a form that is probably a time series - weights = checkData(weights, method="xts") - # make sure that frequency(weights) NCOL(weights)){ - R = R[, 1:NCOL(weights)] - warning("number of assets in beginning_weights is less than number of columns in returns, so subsetting returns.") - } else { - stop("number of assets is greater than number of columns in returns object") - } - } - } # we should have good weights objects at this point - - if(as.Date(last(index(R))) < (as.Date(index(weights[1,]))+1)){ - stop(paste('last date in series',as.Date(last(index(R))),'occurs before beginning of first rebalancing period',as.Date(first(index(weights)))+1)) - } - - # Subset the R object if the first rebalance date is after the first date - # in the return series - if(as.Date(index(weights[1,])) > as.Date(first(index(R)))) { - R <- R[paste0(as.Date(index(weights[1,]))+1, "/")] - } - - - if(geometric){ - out = Return.portfolio.geometric(R=R, - weights=weights, - wealth.index=wealth.index, - contribution=contribution, - rebalance_on=rebalance_on, - value=value, - verbose=verbose, - ...=...) - } else { - out = Return.portfolio.arithmetic(R=R, - weights=weights, - wealth.index=wealth.index, - contribution=contribution, - rebalance_on=rebalance_on, - verbose=verbose, - ...=...) - } - return(out) -} - -Return.portfolio.arithmetic <- function(R, - weights=NULL, - wealth.index=FALSE, - contribution=FALSE, - rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), - verbose=FALSE, - ...) -{ - # bop = beginning of period - # eop = end of period - # Initialize objects - bop_weights = matrix(0, NROW(R), NCOL(R)) - colnames(bop_weights) = colnames(R) - eop_weights = period_contrib = bop_weights - ret = vector("numeric", NROW(R)) - - # initialize counter - k = 1 - for(i in 1:NROW(weights)) { - # identify rebalance from and to dates (weights[i,], weights[i+1]) and - # subset the R(eturns) object - from = as.Date(index(weights[i,]))+1 - if (i == nrow(weights)){ - to = as.Date(index(last(R))) # this is correct - } else { - to = as.Date(index(weights[(i+1),])) - } - returns = R[paste0(from, "::", to)] - - # Only enter the loop if we have a valid returns object - if(nrow(returns) >= 1){ - # inner loop counter - jj = 1 - for(j in 1:nrow(returns)){ - # For arithmetic returns, the beginning of period weights are always - # equal to the rebalance weights - bop_weights[k,] = weights[i,] - period_contrib[k,] = coredata(returns[j,]) * bop_weights[k,] - eop_weights[k,] = (period_contrib[k,] + bop_weights[k,]) / sum(c(period_contrib[k,], bop_weights[k,])) - ret[k] = sum(period_contrib[k,]) - - # increment the counters - k = k + 1 - } - } - } - R.idx = index(R) - ret = xts(ret, R.idx) - colnames(ret) = "portfolio.returns" - - if(wealth.index){ - result = cumsum(ret) + 1 - colnames(result) = "portfolio.wealthindex" - } else { - result = ret - } - - if(verbose){ - out = list() - out$returns = ret - out$contribution = xts(period_contrib, R.idx) - out$BOP.Weight = xts(bop_weights, R.idx) - out$EOP.Weight = xts(eop_weights, R.idx) - if(wealth.index){ - out$wealthindex = result - } - } else if(contribution){ - out = cbind(result, xts(period_contrib, R.idx)) - } else { - out = result - } - return(out) -} - -Return.portfolio.geometric <- function(R, - weights=NULL, - wealth.index=FALSE, - contribution=FALSE, - rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), - value=1, - verbose=FALSE, - ...) -{ - # bop = beginning of period - # eop = end of period - # Initialize objects - bop_value = matrix(0, NROW(R), NCOL(R)) - colnames(bop_value) = colnames(R) - eop_value = bop_value - - if(verbose | contribution){ - period_contrib = bop_value - if(verbose){ - bop_weights = bop_value - eop_weights = bop_value - } - } - ret = eop_value_total = bop_value_total = vector("numeric", NROW(R)) - - # The end_value is the end of period total value from the prior period - end_value <- value - - # initialize counter - k = 1 - for(i in 1:NROW(weights)) { - # identify rebalance from and to dates (weights[i,], weights[i+1]) and - # subset the R(eturns) object - from = as.Date(index(weights[i,]))+1 - if (i == nrow(weights)){ - to = as.Date(index(last(R))) # this is correct - } else { - to = as.Date(index(weights[(i+1),])) - } - returns = R[paste0(from, "::", to)] - - # Only enter the loop if we have a valid returns object - if(nrow(returns) >= 1){ - # inner loop counter - jj = 1 - for(j in 1:nrow(returns)){ - # We need to know when we are at the start of this inner loop so we can - # set the correct beginning of period value. We start a new inner loop - # at each rebalance date. - # Compute beginning of period values - if(jj == 1){ - bop_value[k,] = end_value * weights[i,] - } else { - bop_value[k,] = eop_value[k-1,] - } - bop_value_total[k] = sum(bop_value[k,]) - - # Compute end of period values - eop_value[k,] = (1 + coredata(returns[j,])) * bop_value[k,] - eop_value_total[k] = sum(eop_value[k,]) - - if(contribution | verbose){ - # Compute period contribution - period_contrib[k,] = returns[j,] * bop_value[k,] / sum(bop_value[k,]) - if(verbose){ - # Compute bop and eop weights - bop_weights[k,] = bop_value[k,] / bop_value_total[k] - eop_weights[k,] = eop_value[k,] / eop_value_total[k] - } - } - - # Compute portfolio returns - # Could also compute this by summing contribution, but this way we - # don't have to compute contribution if verbose=FALSE - ret[k] = eop_value_total[k] / end_value - 1 - - # Update end_value - end_value = eop_value_total[k] - - # increment the counters - jj = jj + 1 - k = k + 1 - } - } - } - R.idx = index(R) - ret = xts(ret, R.idx) - colnames(ret) = "portfolio.returns" - - if(wealth.index){ - result = cumprod(1 + ret) - colnames(result) = "portfolio.wealthindex" - } else { - result = ret - } - - if(verbose){ - out = list() - out$returns = ret - out$contribution = xts(period_contrib, R.idx) - out$BOP.Weight = xts(bop_weights, R.idx) - out$EOP.Weight = xts(eop_weights, R.idx) - out$BOP.Value = xts(bop_value, R.idx) - out$EOP.Value = xts(eop_value, R.idx) - if(wealth.index){ - out$wealthindex = result - } - } else if(contribution){ - out = cbind(result, xts(period_contrib, R.idx)) - } else { - out = result - } - return(out) -} - -############################################################################### -# R (http://r-project.org/) Econometrics for Performance and Risk Analysis -# -# Copyright (c) 2004-2014 Peter Carl and Brian G. Peterson -# -# This R package is distributed under the terms of the GNU Public License (GPL) -# for full details see the file COPYING -# -# $Id$ -# -############################################################################### +#' Calculate weighted returns for a portfolio of assets +#' +#' Using a time series of returns and any regular or irregular time series of weights +#' for each asset, this function calculates the returns of a portfolio with the same +#' periodicity of the returns data. +#' +#' By default, this function calculates the time series of portfolio returns given asset +#' returns and weights. In verbose mode, the function returns a list of intermediary +#' calculations that users may find helpful, including both asset contribution and +#' asset value through time. +#' +#' When asset return and weights are matched by period, contribution is simply the +#' weighted return of the asset. c_i = w_i * R_i Contributions are summable across the +#' portfolio to calculate the total portfolio return. +#' +#' Contribution cannot be aggregated through time. For example, say we have an equal +#' weighted portfolio of five assets with monthly returns. The geometric return of the +#' portfolio over several months won't match any aggregation of the individual +#' contributions of the assets, particularly if any rebalancing was done during the +#' period. +#' +#' To aggregate contributions through time such that they are summable to the geometric +#' returns of the portfolio, the calculation must track changes in the notional value of +#' the assets and portfolio. For example, contribution during a quarter will be +#' calculated as the change in value of the position through those three months, divided +#' by the original value of the portfolio. Approaching it this way makes the +#' calculation robust to weight changes as well. c_pi = V_(t-p)i - V_t)/V_ti +#' +#' If the user does not specify weights, an equal weight portfolio is assumed. +#' Alternatively, a vector or single-row matrix of weights that matches the length +#' of the asset columns may be specified. In either case, if no rebalancing period is +#' specified, the weights will be applied at the beginning of the asset time series +#' and no further rebalancing will take place. If a rebalancing period is specified, +#' the portfolio will be rebalanced to the starting weights at the interval specified. +#' +#' Return.rebalancing will work only on daily or lower frequencies. If you are +#' rebalancing intraday, you should be using a trades/prices framework like +#' {\link{\code{blotter}}}, not a weights/returns framework. +#' +#' Irregular rebalancing can be done by specifying a time series of weights. The +#' function uses the date index of the weights for xts-style subsetting of rebalancing +#' periods. +#' +#' Weights specified for rebalancing should be thought of as "end-of-period" weights. +#' Rebalancing periods can be thought of as taking effect immediately after the close +#' of the bar. So, a March 31 rebalancing date will actually be in effect for April 1. +#' A December 31 rebalancing date will be in effect on Jan 1, and so forth. This +#' convention was chosen because it fits with common usage, and because it simplifies +#' xts Date subsetting via endpoints. +#' +#' In verbose mode, the function returns a list of data and intermediary calculations. +#' \itemize{ +#' \item{\code{returns}:}{ The portfolio returns.} +#' \item{\code{contribution}:}{ The per period contribution to portfolio +#' return of each asset. Contribution is calculated as BOP weight times the +#' period's return divided by BOP value. Period contributions are summed +#' across the individual assets to calculate portfolio return} +#' \item{\code{BOP.Weight}:}{ Beginning of Period (BOP) Weight for each +#' asset. An asset's BOP weight is calculated using the input weights +#' (or assumed weights, see below) and rebalancing parameters given. The next +#' period's BOP weight is either the EOP weights from the prior period or +#' input weights given on a rebalance period.} +#' \item{\code{EOP.Weight:}}{ End of Period (BOP) Weight for each asset. +#' An asset's EOP weight is the sum of the asset's BOP weight and +#' contribution for the period divided by the sum of the contributions and +#' initial weights for the portfolio.} +#' \item{\code{BOP.Value:}}{ BOP Value for each asset. The BOP value for each +#' asset is the asset's EOP value from the prior period, unless there is a +#' rebalance event. If there is a rebalance event, the BOP value of the +#' asset is the rebalance weight times the EOP value of the portfolio. That +#' effectively provides a zero-transaction cost change to the position values +#' as of that date to reflect the rebalance. Note that the sum of the BOP +#' values of the assets is the same as the prior period's EOP portfolio value.} +#' \item{\code{EOP.Value:}}{ EOP Value for each asset. The EOP value is for +#' each asset is calculated as (1 + asset return) times the asset's BOP value. +#' The EOP portfolio value is the sum of EOP value across assets.} +#' } +#' +#' To calculate BOP and EOP position value, we create an index for each position. The +#' sum of that value across assets represents an indexed value of the total portfolio. +#' Note that BOP and EOP position values are only computed when \code{geometric = TRUE}. +#' +#' From the value calculations, we can calculate different aggregations through time +#' for the asset contributions. Those are calculated as the EOP asset value less the +#' BOP asset value; that quantity is divided by the BOP portfolio value. +#' Across assets, those will sum to equal the geometric chained returns of the +#' portfolio for that same time period. The function does not do this directly, however. +#' +#' @aliases Return.portfolio Return.rebalancing +#' @param R An xts, vector, matrix, data frame, timeSeries or zoo object of +#' asset returns +#' @param weights A time series or single-row matrix/vector containing asset +#' weights, as decimal percentages, treated as beginning of period weights. +#' See Details below. +#' @param wealth.index TRUE/FALSE whether to return a wealth index. Default FALSE +#' @param contribution if contribution is TRUE, add the weighted return +#' contributed by the asset in a given period. Default FALSE +#' @param geometric utilize geometric chaining (TRUE) or simple/arithmetic (FALSE) +#' to aggregate returns. Default TRUE. +#' @param rebalance_on Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{endpoints}. +#' @param value The beginning of period total portfolio value. This is used for calculating position value. +#' @param verbose If verbose is TRUE, return a list of intermediary calculations. +#' See Details below. +#' @param \dots any other passthru parameters. Not currently used. +#' @return returns a time series of returns weighted by the \code{weights} +#' parameter, or a list that includes intermediate calculations +#' @author Peter Carl, Ross Bennett, Brian Peterson +#' @seealso \code{\link{Return.calculate}} \code{\link{xts::endpoints}} \cr +#' @references Bacon, C. \emph{Practical Portfolio Performance Measurement and +#' Attribution}. Wiley. 2004. Chapter 2\cr +#' @keywords ts multivariate distribution models +#' @examples +#' +#' data(edhec) +#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly") # returns time series +#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly", verbose=TRUE) # returns list +#' # with a weights object +#' data(weights) # rebalance at the beginning of the year to various weights through time +#' chart.StackedBar(weights) +#' x <- Return.rebalancing(edhec["2000::",1:11], weights=weights,verbose=TRUE) +#' chart.CumReturns(x$returns) +#' chart.StackedBar(x$BOP.Weight) +#' chart.StackedBar(x$BOP.Value) +#' +#' @rdname Return.portfolio +#' @export Return.portfolio +#' @export Return.rebalancing +Return.portfolio <- Return.rebalancing <- function(R, + weights=NULL, + wealth.index=FALSE, + contribution=FALSE, + geometric=TRUE, + rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), + value=1, + verbose=FALSE, + ...){ + R = checkData(R, method="xts") + rebalance_on = rebalance_on[1] + + # find the right unit to subtract from the first return date to create a start date + freq = periodicity(R) + switch(freq$scale, + seconds = { stop("Use a returns series of daily frequency or higher.") }, + minute = { stop("Use a returns series of daily frequency or higher.") }, + hourly = { stop("Use a returns series of daily frequency or higher.") }, + daily = { time_unit = "day" }, + weekly = { time_unit = "week" }, + monthly = { time_unit= "month" }, + quarterly = { time_unit = "quarter" }, + yearly = { time_unit = "year"} + ) + + # calculates the end of the prior period + # need to use the if on quarter as quarter is incompatible with seq (it does not work with by) + if(time_unit=='quarter'){ start_date = as.yearqtr(seq(as.Date(index(R)[1]), length = 2, by = paste("-3", 'month'))[2])}else{ start_date = seq(as.Date(index(R)[1]), length = 2, by = paste("-1", time_unit))[2]} + if(is.null(weights)){ + # generate equal weight vector for return columns + weights = rep(1 / NCOL(R), NCOL(R)) + } + if(is.vector(weights)) { # weights are a vector + if(is.na(rebalance_on)) { # and endpoints are not specified + # then use the weights only at the beginning of the returns series, without rebalancing + weights = xts(matrix(weights, nrow=1), order.by=as.Date(start_date)) + } else { # and endpoints are specified + # generate a time series of the given weights at the endpoints + weight_dates = c(as.Date(start_date), index(R[endpoints(R, on=rebalance_on)])) + weights = xts(matrix(rep(weights, length(weight_dates)), ncol=NCOL(R), byrow=TRUE), order.by=as.Date(weight_dates)) + } + colnames(weights) = colnames(R) + } else { # check the beginning_weights object for errors + # check that weights are given in a form that is probably a time series + weights = checkData(weights, method="xts") + # make sure that frequency(weights) NCOL(weights)){ + R = R[, 1:NCOL(weights)] + warning("number of assets in beginning_weights is less than number of columns in returns, so subsetting returns.") + } else { + stop("number of assets is greater than number of columns in returns object") + } + } + } # we should have good weights objects at this point + + if(as.Date(last(index(R))) < (as.Date(index(weights[1,]))+1)){ + stop(paste('last date in series',as.Date(last(index(R))),'occurs before beginning of first rebalancing period',as.Date(first(index(weights)))+1)) + } + + # Subset the R object if the first rebalance date is after the first date + # in the return series + if(as.Date(index(weights[1,])) > as.Date(first(index(R)))) { + R <- R[paste0(as.Date(index(weights[1,]))+1, "/")] + } + + + if(geometric){ + out = Return.portfolio.geometric(R=R, + weights=weights, + wealth.index=wealth.index, + contribution=contribution, + rebalance_on=rebalance_on, + value=value, + verbose=verbose, + ...=...) + } else { + out = Return.portfolio.arithmetic(R=R, + weights=weights, + wealth.index=wealth.index, + contribution=contribution, + rebalance_on=rebalance_on, + verbose=verbose, + ...=...) + } + return(out) +} + +Return.portfolio.arithmetic <- function(R, + weights=NULL, + wealth.index=FALSE, + contribution=FALSE, + rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), + verbose=FALSE, + ...) +{ + # bop = beginning of period + # eop = end of period + # Initialize objects + bop_weights = matrix(0, NROW(R), NCOL(R)) + colnames(bop_weights) = colnames(R) + eop_weights = period_contrib = bop_weights + ret = vector("numeric", NROW(R)) + + # initialize counter + k = 1 + for(i in 1:NROW(weights)) { + # identify rebalance from and to dates (weights[i,], weights[i+1]) and + # subset the R(eturns) object + from = as.Date(index(weights[i,]))+1 + if (i == nrow(weights)){ + to = as.Date(index(last(R))) # this is correct + } else { + to = as.Date(index(weights[(i+1),])) + } + returns = R[paste0(from, "::", to)] + + # Only enter the loop if we have a valid returns object + if(nrow(returns) >= 1){ + # inner loop counter + jj = 1 + for(j in 1:nrow(returns)){ + # For arithmetic returns, the beginning of period weights are always + # equal to the rebalance weights + bop_weights[k,] = weights[i,] + period_contrib[k,] = coredata(returns[j,]) * bop_weights[k,] + eop_weights[k,] = (period_contrib[k,] + bop_weights[k,]) / sum(c(period_contrib[k,], bop_weights[k,])) + ret[k] = sum(period_contrib[k,]) + + # increment the counters + k = k + 1 + } + } + } [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 3508 From noreply at r-forge.r-project.org Fri Aug 15 21:49:35 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 15 Aug 2014 21:49:35 +0200 (CEST) Subject: [Returnanalytics-commits] r3509 - in pkg/PortfolioAttribution: R man Message-ID: <20140815194935.185801873DE@r-forge.r-project.org> Author: kylebalkissoon Date: 2014-08-15 21:49:34 +0200 (Fri, 15 Aug 2014) New Revision: 3509 Modified: pkg/PortfolioAttribution/R/attribution.R pkg/PortfolioAttribution/man/Attribution.Rd Log: Fixed documentation to reflect code and Bacon, fixed an issue with single column benchmark turned into matrix and not being made to XTS which caused a checkdata failure later on. Added support for a single column benchmark that puts all contribution as interaction and a warning is displayed to the user that this is happening. Modified: pkg/PortfolioAttribution/R/attribution.R =================================================================== --- pkg/PortfolioAttribution/R/attribution.R 2014-08-13 20:16:30 UTC (rev 3508) +++ pkg/PortfolioAttribution/R/attribution.R 2014-08-15 19:49:34 UTC (rev 3509) @@ -1,372 +1,385 @@ -#' performs sector-based single-level attribution -#' -#' Performs sector-based single-level attribution analysis. Portfolio -#' performance measured relative to a benchmark gives an indication of the -#' value-added by the portfolio. Equipped with weights and returns of portfolio -#' segments, we can dissect the value-added into useful components. This -#' function is based on the sector-based approach to the attribution. The -#' workhorse is the Brinson model that explains the arithmetic difference -#' between portfolio and benchmark returns. That is it breaks down the -#' arithmetic excess returns at one level. If returns and weights are available -#' at the lowest level (e.g. for individual instruments), the aggregation up to -#' the chosen level from the hierarchy can be done using -#' \code{\link{Return.level}} function. The attribution effects can be computed -#' for several periods. The multi-period summary is obtained using one of -#' linking methods: Carino, Menchero, GRAP, Frongello or Davies Laker. It also -#' allows to break down the geometric excess returns, which link naturally over -#' time. Finally, it annualizes arithmetic and geometric excess returns -#' similarly to the portfolio and/or benchmark returns annualization. -#' -#' The arithmetic excess returns are decomposed into the sum of allocation, -#' selection and interaction effects across \eqn{n} sectors: -#' \deqn{R_{p}-R_{b}=\sum^{n}_{i=1}\left(A_{i}+S_{i}+I_{i}\right)} -#' The arithmetic attribution effects for the category i are computed -#' as suggested in the Brinson, Hood and Beebower (1986): -#' Allocation effect -#' \deqn{A_{i}=(w_{pi}-w_{bi})\times R_{bi}}{Ai = (wpi - wbi) * Rbi} -#' Selection effect -#' \deqn{S_{i}=w_{pi}\times(R_{pi}-R_{bi})}{Si = wpi * (Rpi - Rbi)} -#' Interaction effect -#' \deqn{I_{i}=(w_{pi}-w_{bi}) -#' \times(R_{pi}-R_{bi})}{Ii = (wpi - wbi) * Rpi - Rbi} -#' \eqn{R_{p}}{Rp} - total portfolio returns, -#' \eqn{R_{b}}{Rb} - total benchmark returns, -#' \eqn{w_{pi}}{wpi} - weights of the category \eqn{i} in the portfolio, -#' \eqn{w_{bi}}{wbi} - weights of the category \eqn{i} in the benchmark, -#' \eqn{R_{pi}}{Rpi} - returns of the portfolio category \eqn{i}, -#' \eqn{R_{bi}}{Rbi} - returns of the benchmark category \eqn{i}. -#' If Brinson and Fachler (1985) is selected the allocation effect differs: -#' \deqn{A_{i}=(w_{pi}-w_{bi}) -#' \times (R_{bi} - R_{b})}{Ai = (wpi - wbi) * (Rbi - Rb)} -#' Depending on goals we can give priority to the allocation or to -#' the selection effects. If the priority is given to the sector allocation -#' the interaction term will be combined with the security selection effect -#' (top-down approach). If the priority is given to the security selection, -#' the interaction term will be combined with the asset-allocation effect -#' (bottom-up approach). -#' Usually we have more than one period. In that case individual arithmetic -#' attribution effects should be adjusted using linking methods. Adjusted -#' arithmetic attribution effects can be summed up over time to provide the -#' multi-period summary: -#' \deqn{R_{p}-R_{b}=\sum^{T}_{t=1}\left(A_{t}'+S_{t}'+I_{t}'\right)} -#' where \eqn{T} is the number of periods and prime stands for the adjustment. -#' The geometric attribution effects do not suffer from the linking problem. -#' Moreover we don't have the interaction term. For more details about the -#' geometric attribution see the documentation to -#' \code{\link{Attribution.geometric}}. Finally, arithmetic annualized excess -#' returns are computed as the arithmetic difference between annualised -#' portfolio and benchmark returns: -#' \deqn{AAER=r_{a}-b_{a}}{AAER = ra - ba} the geometric annualized excess -#' returns are computed as the geometric difference between annualized -#' portfolio and benchmark returns: -#' \deqn{GAER=\frac{1+r_{a}}{1+b_{a}}-1}{GAER = (1 + ra) / (1 + ba) - 1} -#' In the case of multi-currency portfolio, the currency return, currency -#' surprise and forward premium should be specified. The multi-currency -#' arithmetic attribution is handled following Ankrim and Hensel (1992). -#' Currency returns are decomposed into the sum of the currency surprise and -#' the forward premium: \deqn{R_{ci} = R_{cei} + R_{fpi}}{Rci = Rcei + Rfpi} -#' where -#' \deqn{R_{cei} = \frac{S_{i}^{t+1} - F_{i}^{t+1}}{S_{i}^{t}}} -#' \deqn{R_{fpi} = \frac{F_{i}^{t+1}}{S_{i}^{t}} - 1} -#' \eqn{S_{i}^{t}}{Sit} - spot rate for asset \eqn{i} at time \eqn{t} -#' \eqn{F_{i}^{t}}{Fit} - forward rate for asset \eqn{i} at time \eqn{t}. -#' Excess returns are decomposed into the sum of allocation, selection and -#' interaction effects as in the standard Brinson model: -#' \deqn{R_{p}-R_{b}=\sum^{n}_{i=1}\left(A_{i}+S_{i}+I_{i}\right)} -#' However the allocation effect is computed taking into account currency -#' effects: -#' \deqn{A_{i}=(w_{pi}-w_{bi})\times (R_{bi} - R_{ci} - R_{l})}{Ai = -#' (wpi - wbi) * (Rbi - Rci - Rl)} -#' Benchmark returns adjusted to the currency: -#' \deqn{R_{l} = \sum^{n}_{i=1}w_{bi}\times(R_{bi}-R_{ci})} -#' The contribution from the currency is analogous to asset allocation: -#' \deqn{C_{i} = (w_{pi} - w_{bi}) \times (R_{cei} - e) + (w_{pfi} - w_{bfi}) -#' \times (R_{fi} - e)} -#' where \deqn{e = \sum^{n}_{i=1}w_{bi}\times R_{cei}} -#' The final term, forward premium, is also analogous to the asset allocation: -#' \deqn{R_{fi} = (w_{pi} - w_{bi}) \times (R_{fpi} - d)}{Rfi = (wpi - wbi) * -#' (Rfpi - d)} -#' where \deqn{d = \sum^{n}_{i=1}w_{bi}\times R_{fpi}} -#' and \eqn{R_{fpi}} - forward premium -#' In general if the intent is to estimate statistical parameters, the -#' arithmetic excess return is preferred. However, due to the linking -#' challenges, it may be preferable to use geometric excess return if the -#' intent is to link and annualize excess returns. -#' -#' @aliases Attribution -#' @param Rp T x n xts, data frame or matrix of portfolio returns -#' @param wp vector, xts, data frame or matrix of portfolio weights -#' @param Rb T x n xts, data frame or matrix of benchmark returns -#' @param wb vector, xts, data frame or matrix of benchmark weights -#' @param method Used to select the priority between allocation and selection -#' effects in arithmetic attribution. May be any of: \itemize{ \item none - -#' present allocation, selection and interaction effects independently, -#' \item top.down - the priority is given to the sector allocation. Interaction -#' term is combined with the security selection effect, \item bottom.up - the -#' priority is given to the security selection. Interaction term is combined -#' with the sector allocation effect} -#' By default "none" is selected -#' @param wpf vector, xts, data frame or matrix with portfolio weights of -#' currency forward contracts -#' @param wbf vector, xts, data frame or matrix with benchmark weights of -#' currency forward contracts -#' @param S (T+1) x n xts, data frame or matrix with spot rates. The first date -#' should coincide with the first date of portfolio returns -#' @param F (T+1) x n xts, data frame or matrix with forward rates. The first -#' date should coincide with the first date of portfolio returns -#' @param Rpl xts, data frame or matrix of portfolio returns in local currency -#' @param Rbl xts, data frame or matrix of benchmark returns in local currency -#' @param Rbh xts, data frame or matrix of benchmark returns hedged into the -#' base currency -#' @param bf TRUE for Brinson and Fachler and FALSE for Brinson, Hood and -#' Beebower arithmetic attribution. By default Brinson, Hood and Beebower -#' attribution is selected -#' @param linking Used to select the linking method to present the multi-period -#' summary of arithmetic attribution effects. May be any of: -#' \itemize{\item carino - logarithmic linking coefficient method -#' \item menchero - Menchero's smoothing algorithm -#' \item grap - linking approach developed by GRAP -#' \item frongello - Frongello's linking method -#' \item davies.laker - Davies and Laker's linking method} -#' By default Carino linking is selected -#' @param geometric TRUE/FALSE, whether to use geometric or arithmetic excess -#' returns for the attribution analysis. By default arithmetic is selected -#' @param adjusted TRUE/FALSE, whether to show original or smoothed attribution -#' effects for each period. By default unadjusted attribution effects are -#' returned -#' @return returns a list with the following components: excess returns with -#' annualized excess returns over all periods, attribution effects (allocation, -#' selection and interaction) -#' @author Andrii Babii -#' @seealso \code{\link{Attribution.levels}}, -#' \code{\link{Attribution.geometric}} -#' @references Ankrim, E. and Hensel, C. \emph{Multi-currency performance -#' attribution}. Russell Research Commentary. November 2002 \cr Bacon, C. -#' \emph{Practical Portfolio Performance Measurement and Attribution}. Wiley. -#' 2004. Chapter 5, 6, 8 \cr Christopherson, Jon A., Carino, David R., Ferson, -#' Wayne E. \emph{Portfolio Performance Measurement and Benchmarking}. -#' McGraw-Hill. 2009. Chapter 18-19 \cr Brinson, G. and Fachler, N. (1985) -#' \emph{Measuring non-US equity portfolio performance}. Journal of Portfolio -#' Management. Spring. p. 73 -76. \cr Gary P. Brinson, L. Randolph Hood, and -#' Gilbert L. Beebower, \emph{Determinants of Portfolio Performance}. Financial -#' Analysts Journal. vol. 42, no. 4, July/August 1986, p. 39-44 \cr -#' Karnosky, D. and Singer, B. \emph{Global asset management and performance -#' attribution. The Research Foundation of the Institute of Chartered Financial -#' Analysts}. February 1994. \cr -#' @keywords attribution -#' @examples -#' -#' data(attrib) -#' Attribution(Rp = attrib.returns[, 1:10], wp = attrib.weights[1, ], Rb = attrib.returns[, 11:20], -#' wb = attrib.weights[2, ], method = "top.down", linking = "carino") -#' -#' @export -Attribution <- -function (Rp, wp, Rb, wb, - wpf = NA, wbf = NA, S = NA, F = NA, Rpl = NA, Rbl = NA, Rbh = NA, - bf = FALSE, - method = c("none", "top.down", "bottom.up"), - linking = c("carino", - "menchero", - "grap", - "frongello", - "davies.laker"), - geometric = FALSE, adjusted = FALSE) -{ # @author Andrii Babii - - # DESCRIPTION: - # Function to perform the attribution analysis. - - # Inputs: - # Rp T x n xts, data frame or matrix of portfolio returns - # wp vector, xts, data frame or matrix of portfolio weights - # Rb T x n xts, data frame or matrix of benchmark returns - # wb vector, xts, data frame or matrix of benchmark weights - # wpf vector, xts, data frame or matrix with portfolio weights of - # currency forward contracts - # wbf vector, xts, data frame or matrix with benchmark weights of - # currency forward contracts - # S (T+1) x n xts, data frame or matrix with spot rates - # F (T+1) x n xts, data frame or matrix with forward rates - # Rpl xts, data frame or matrix of portfolio returns in local currency - # Rbl xts, data frame or matrix of benchmark returns in local currency - # Rbh xts, data frame or matrix of benchmark returns hedged into the - # base currency - - # Outputs: - # This function returns the attribution effects with multi-period summary - # and annualized excess returns - - # FUNCTION: - # Transform data to the xts objects - Rb = checkData(Rb) - Rp = checkData(Rp) - WP = wp # Save original weights in order to avoid double conversion later - WB = wb - wp = Weight.transform(wp, Rp) - wb = Weight.transform(wb, Rb) - if (nrow(wp) < nrow(Rp)){ # Rebalancing occurs next day - Rp = Rp[2:nrow(Rp)] - Rb = Rb[2:nrow(Rb)] - } - if (ncol(Rb) == 1){ - Rb = matrix(rep(coredata(Rb), ncol(Rp)), nrow(Rp), ncol(Rp)) - } - if (ncol(Rb) != ncol(Rp)){ - stop("Please use benchmark xts that has columns with benchmarks for each - asset or one common benchmark for all assets") - } - method = method[1] - linking = linking[1] - - currency = !(is.null(dim(wpf)) & is.null(dim(wbf)) & - is.null(dim(S)) & is.null(dim(F)) & - is.null(dim(Rpl)) & is.null(dim(Rpl)) & - is.null(dim(Rpl))) - - if (geometric == FALSE & linking != "davies.laker"){ - # The function makes all computations for the arithmetic attribution - # case (except for Davies and Laker linking) - - # Compute attribution effects (Brinson, Hood and Beebower model) - # If portfolio is single-currency - if (!currency){ - Rc = 0 - L = 0 - } else{ # If multi-currency portfolio - S = checkData(S) - F = checkData(F) - wpf = Weight.transform(wpf, Rp) - wbf = Weight.transform(wbf, Rb) - - Rc = lag(S, -1)[1:nrow(Rp), ] / S[1:nrow(Rp), ] - 1 - Rd = lag(F, -1)[1:nrow(Rp), ] / S[1:nrow(Rp), ] - 1 - Re = Rc - coredata(Rd) - Rl = Rb - coredata(Rc) - Rk = Rp - coredata(Rc) - Rfp = Re / (1 + Rd) - E = reclass(matrix(rep(rowSums(Re * coredata(wb)), ncol(Rb)), nrow(Rb), - ncol(Rb)), Rp) - L = reclass(matrix(rep(rowSums(Rl * coredata(wb)), ncol(Rb)), nrow(Rb), - ncol(Rb)), Rp) - D = reclass(matrix(rep(rowSums(Rd * coredata(wb)), ncol(Rb)), nrow(Rb), - ncol(Rb)), Rp) - # Contribution to currency - Cc = (wp - wb) * (Re - E) + (wpf - wbf) * (Rfp - E) - # Forward premium - Df = (wp - wb) * (Rd - D) - Cc = cbind(Cc, rowSums(Cc)) - Df = cbind(Df, rowSums(Df)) - colnames(Cc) = c(colnames(S), "Total") - colnames(Df) = colnames(Cc) - } - - # Get total portfolio returns - if (is.vector(WP) & is.vector(WB)){ - rp = Return.portfolio(Rp, WP, geometric = FALSE) - rb = Return.portfolio(Rb, WB, geometric = FALSE) - } else{ - rp = Return.rebalancing(Rp, WP, geometric = FALSE) - rb = Return.rebalancing(Rb, WB, geometric = FALSE) - } - names(rp) = "Total" - names(rb) = "Total" - - # Get individual attribution effects - if (bf == TRUE){ # Brinson and Fachler (1985) allocation effect - allocation = coredata(wp - wb) * (Rb - coredata(Rc) - coredata(L) - - rep(rb, ncol(Rb))) - } else{ # Brinson, Hood and Beebower (1986) allocation effect - allocation = coredata(wp - wb) * (Rb - coredata(Rc) - coredata(L)) - } - - selection = (Rp - coredata(Rb)) * wb - interaction = (wp - wb) * (Rp - coredata(Rb)) - - # Get total attribution effects - n = ncol(allocation) # number of segments - allocation = cbind(allocation, rowSums(allocation)) - names(allocation)[n + 1] = "Total" - selection = cbind(selection, rowSums(selection)) - names(selection)[n + 1] = "Total" - interaction = cbind(interaction, rowSums(interaction)) - names(interaction)[n + 1] = "Total" - - # Adjust attribution effects using one of linking methods if there are - # mutliple periods - if (nrow(allocation) > 1){ - if (linking == "carino"){ - allocation = Carino(rp, rb, allocation, adjusted) - selection = Carino(rp, rb, selection, adjusted) - interaction = Carino(rp, rb, interaction, adjusted) - } - - if (linking == "menchero"){ - allocation = Menchero(rp, rb, allocation, adjusted) - selection = Menchero(rp, rb, selection, adjusted) - interaction = Menchero(rp, rb, interaction, adjusted) - } - - if (linking == "grap"){ - allocation = Grap(rp, rb, allocation, adjusted) - selection = Grap(rp, rb, selection, adjusted) - interaction = Grap(rp, rb, interaction, adjusted) - } - - if (linking == "frongello"){ - allocation = Frongello(rp, rb, allocation, adjusted) - selection = Frongello(rp, rb, selection, adjusted) - interaction = Frongello(rp, rb, interaction, adjusted) - } - } - # Arithmetic excess returns + annualized arithmetic excess returns - excess.returns = rp - coredata(rb) - if (nrow(rp) > 1){ - er = Return.annualized.excess(rp, rb, geometric = FALSE) - excess.returns = rbind(as.matrix(excess.returns), er) - } - colnames(excess.returns) = "Arithmetic" - - # Select the appropriate result corresponding to the chosen method - result = list() - result[[1]] = excess.returns - result[[2]] = allocation - result[[3]] = selection - if (method == "top.down"){ # Top-down attribution - result[[3]] = result[[3]] + interaction - } - if (method == "bottom.up"){ # Bottom-up attribution - result[[2]] = result[[2]] + interaction - } - if (method == "none"){ - result[[4]] = interaction - } - } else{ # The function takes output of the corresponding function - # (Attribution.geometric or DaviesLaker) - if (geometric == TRUE){ - attrib = Attribution.geometric(Rp, WP, Rb, WB) - } - - if (linking == "davies.laker"){ - attrib = DaviesLaker(Rp, WP, Rb, WB) - } - result = attrib - } - - # Label the output - if ((method == "none" & geometric == FALSE) | linking == "davies.laker"){ - names(result) = c("Excess returns", "Allocation", "Selection", - "Interaction") - } else{ - names(result) = c("Excess returns", "Allocation", "Selection") - } - - # If multi-currency portfolio - if (currency){ - result[[length(result) + 1]] = Cc - result[[length(result) + 1]] = Df - names(result)[(length(result)-1):length(result)] = - c("Currency management", "Forward Premium") - } - return(result) -} +#' performs sector-based single-level attribution +#' +#' Performs sector-based single-level attribution analysis. Portfolio +#' performance measured relative to a benchmark gives an indication of the +#' value-added by the portfolio. Equipped with weights and returns of portfolio +#' segments, we can dissect the value-added into useful components. This +#' function is based on the sector-based approach to the attribution. The +#' workhorse is the Brinson model that explains the arithmetic difference +#' between portfolio and benchmark returns. That is it breaks down the +#' arithmetic excess returns at one level. If returns and weights are available +#' at the lowest level (e.g. for individual instruments), the aggregation up to +#' the chosen level from the hierarchy can be done using +#' \code{\link{Return.level}} function. The attribution effects can be computed +#' for several periods. The multi-period summary is obtained using one of +#' linking methods: Carino, Menchero, GRAP, Frongello or Davies Laker. It also +#' allows to break down the geometric excess returns, which link naturally over +#' time. Finally, it annualizes arithmetic and geometric excess returns +#' similarly to the portfolio and/or benchmark returns annualization. +#' +#' The arithmetic excess returns are decomposed into the sum of allocation, +#' selection and interaction effects across \eqn{n} sectors: +#' \deqn{R_{p}-R_{b}=\sum^{n}_{i=1}\left(A_{i}+S_{i}+I_{i}\right)} +#' The arithmetic attribution effects for the category i are computed +#' as suggested in the Brinson, Hood and Beebower (1986): +#' Allocation effect +#' \deqn{A_{i}=(w_{pi}-w_{bi})\times R_{bi}}{Ai = (wpi - wbi) * Rbi} +#' Selection effect +#' \deqn{S_{i}=w_{bi}\times(R_{pi}-R_{bi})}{Si = wbi * (Rpi - Rbi)} +#' Interaction effect +#' \deqn{I_{i}=(w_{pi}-w_{bi}) +#' \times(R_{pi}-R_{bi})}{Ii = (wpi - wbi) * Rpi - Rbi} +#' \eqn{R_{p}}{Rp} - total portfolio returns, +#' \eqn{R_{b}}{Rb} - total benchmark returns, +#' \eqn{w_{pi}}{wpi} - weights of the category \eqn{i} in the portfolio, +#' \eqn{w_{bi}}{wbi} - weights of the category \eqn{i} in the benchmark, +#' \eqn{R_{pi}}{Rpi} - returns of the portfolio category \eqn{i}, +#' \eqn{R_{bi}}{Rbi} - returns of the benchmark category \eqn{i}. +#' If Brinson and Fachler (1985) is selected the allocation effect differs: +#' \deqn{A_{i}=(w_{pi}-w_{bi}) +#' \times (R_{bi} - R_{b})}{Ai = (wpi - wbi) * (Rbi - Rb)} +#' Depending on goals we can give priority to the allocation or to +#' the selection effects. If the priority is given to the sector allocation +#' the interaction term will be combined with the security selection effect +#' (top-down approach). If the priority is given to the security selection, +#' the interaction term will be combined with the asset-allocation effect +#' (bottom-up approach). +#' Usually we have more than one period. In that case individual arithmetic +#' attribution effects should be adjusted using linking methods. Adjusted +#' arithmetic attribution effects can be summed up over time to provide the +#' multi-period summary: +#' \deqn{R_{p}-R_{b}=\sum^{T}_{t=1}\left(A_{t}'+S_{t}'+I_{t}'\right)} +#' where \eqn{T} is the number of periods and prime stands for the adjustment. +#' The geometric attribution effects do not suffer from the linking problem. +#' Moreover we don't have the interaction term. For more details about the +#' geometric attribution see the documentation to +#' \code{\link{Attribution.geometric}}. Finally, arithmetic annualized excess +#' returns are computed as the arithmetic difference between annualised +#' portfolio and benchmark returns: +#' \deqn{AAER=r_{a}-b_{a}}{AAER = ra - ba} the geometric annualized excess +#' returns are computed as the geometric difference between annualized +#' portfolio and benchmark returns: +#' \deqn{GAER=\frac{1+r_{a}}{1+b_{a}}-1}{GAER = (1 + ra) / (1 + ba) - 1} +#' In the case of multi-currency portfolio, the currency return, currency +#' surprise and forward premium should be specified. The multi-currency +#' arithmetic attribution is handled following Ankrim and Hensel (1992). +#' Currency returns are decomposed into the sum of the currency surprise and +#' the forward premium: \deqn{R_{ci} = R_{cei} + R_{fpi}}{Rci = Rcei + Rfpi} +#' where +#' \deqn{R_{cei} = \frac{S_{i}^{t+1} - F_{i}^{t+1}}{S_{i}^{t}}} +#' \deqn{R_{fpi} = \frac{F_{i}^{t+1}}{S_{i}^{t}} - 1} +#' \eqn{S_{i}^{t}}{Sit} - spot rate for asset \eqn{i} at time \eqn{t} +#' \eqn{F_{i}^{t}}{Fit} - forward rate for asset \eqn{i} at time \eqn{t}. +#' Excess returns are decomposed into the sum of allocation, selection and +#' interaction effects as in the standard Brinson model: +#' \deqn{R_{p}-R_{b}=\sum^{n}_{i=1}\left(A_{i}+S_{i}+I_{i}\right)} +#' However the allocation effect is computed taking into account currency +#' effects: +#' \deqn{A_{i}=(w_{pi}-w_{bi})\times (R_{bi} - R_{ci} - R_{l})}{Ai = +#' (wpi - wbi) * (Rbi - Rci - Rl)} +#' Benchmark returns adjusted to the currency: +#' \deqn{R_{l} = \sum^{n}_{i=1}w_{bi}\times(R_{bi}-R_{ci})} +#' The contribution from the currency is analogous to asset allocation: +#' \deqn{C_{i} = (w_{pi} - w_{bi}) \times (R_{cei} - e) + (w_{pfi} - w_{bfi}) +#' \times (R_{fi} - e)} +#' where \deqn{e = \sum^{n}_{i=1}w_{bi}\times R_{cei}} +#' The final term, forward premium, is also analogous to the asset allocation: +#' \deqn{R_{fi} = (w_{pi} - w_{bi}) \times (R_{fpi} - d)}{Rfi = (wpi - wbi) * +#' (Rfpi - d)} +#' where \deqn{d = \sum^{n}_{i=1}w_{bi}\times R_{fpi}} +#' and \eqn{R_{fpi}} - forward premium +#' In general if the intent is to estimate statistical parameters, the +#' arithmetic excess return is preferred. However, due to the linking +#' challenges, it may be preferable to use geometric excess return if the +#' intent is to link and annualize excess returns. +#' +#' @aliases Attribution +#' @param Rp T x n xts, data frame or matrix of portfolio returns +#' @param wp vector, xts, data frame or matrix of portfolio weights +#' @param Rb T x n xts, data frame or matrix of benchmark returns +#' @param wb vector, xts, data frame or matrix of benchmark weights +#' @param method Used to select the priority between allocation and selection +#' effects in arithmetic attribution. May be any of: \itemize{ \item none - +#' present allocation, selection and interaction effects independently, +#' \item top.down - the priority is given to the sector allocation. Interaction +#' term is combined with the security selection effect, \item bottom.up - the +#' priority is given to the security selection. Interaction term is combined +#' with the sector allocation effect} +#' By default "none" is selected +#' @param wpf vector, xts, data frame or matrix with portfolio weights of +#' currency forward contracts +#' @param wbf vector, xts, data frame or matrix with benchmark weights of +#' currency forward contracts +#' @param S (T+1) x n xts, data frame or matrix with spot rates. The first date +#' should coincide with the first date of portfolio returns +#' @param Forward_Rate (T+1) x n xts, data frame or matrix with forward rates. The first +#' date should coincide with the first date of portfolio returns +#' @param Rpl xts, data frame or matrix of portfolio returns in local currency +#' @param Rbl xts, data frame or matrix of benchmark returns in local currency +#' @param Rbh xts, data frame or matrix of benchmark returns hedged into the +#' base currency +#' @param bf TRUE for Brinson and Fachler and FALSE for Brinson, Hood and +#' Beebower arithmetic attribution. By default Brinson, Hood and Beebower +#' attribution is selected +#' @param linking Used to select the linking method to present the multi-period +#' summary of arithmetic attribution effects. May be any of: +#' \itemize{\item carino - logarithmic linking coefficient method +#' \item menchero - Menchero's smoothing algorithm +#' \item grap - linking approach developed by GRAP +#' \item frongello - Frongello's linking method +#' \item davies.laker - Davies and Laker's linking method} +#' By default Carino linking is selected +#' @param geometric TRUE/FALSE, whether to use geometric or arithmetic excess +#' returns for the attribution analysis. By default arithmetic is selected +#' @param adjusted TRUE/FALSE, whether to show original or smoothed attribution +#' effects for each period. By default unadjusted attribution effects are +#' returned +#' @return returns a list with the following components: excess returns with +#' annualized excess returns over all periods, attribution effects (allocation, +#' selection and interaction) +#' @author Andrii Babii +#' @seealso \code{\link{Attribution.levels}}, +#' \code{\link{Attribution.geometric}} +#' @references Ankrim, E. and Hensel, C. \emph{Multi-currency performance +#' attribution}. Russell Research Commentary. November 2002 \cr Bacon, C. +#' \emph{Practical Portfolio Performance Measurement and Attribution}. Wiley. +#' 2004. Chapter 5, 6, 8 \cr Christopherson, Jon A., Carino, David R., Ferson, +#' Wayne E. \emph{Portfolio Performance Measurement and Benchmarking}. +#' McGraw-Hill. 2009. Chapter 18-19 \cr Brinson, G. and Fachler, N. (1985) +#' \emph{Measuring non-US equity portfolio performance}. Journal of Portfolio +#' Management. Spring. p. 73 -76. \cr Gary P. Brinson, L. Randolph Hood, and +#' Gilbert L. Beebower, \emph{Determinants of Portfolio Performance}. Financial +#' Analysts Journal. vol. 42, no. 4, July/August 1986, p. 39-44 \cr +#' Karnosky, D. and Singer, B. \emph{Global asset management and performance +#' attribution. The Research Foundation of the Institute of Chartered Financial +#' Analysts}. February 1994. \cr +#' @keywords attribution +#' @examples +#' +#' data(attrib) +#' Attribution(Rp = attrib.returns[, 1:10], wp = attrib.weights[1, ], Rb = attrib.returns[, 11:20], +#' wb = attrib.weights[2, ], method = "top.down", linking = "carino") +#' +#' @export +Attribution <- +function (Rp, wp, Rb, wb, + wpf = NA, wbf = NA, S = NA, Forward_Rates = NA, Rpl = NA, Rbl = NA, Rbh = NA, + bf = FALSE, + method = c("none", "top.down", "bottom.up"), + linking = c("carino", + "menchero", + "grap", + "frongello", + "davies.laker"), + geometric = FALSE, adjusted = FALSE) +{ # @author Andrii Babii + + # DESCRIPTION: + # Function to perform the attribution analysis. + + # Inputs: + # Rp T x n xts, data frame or matrix of portfolio returns + # wp vector, xts, data frame or matrix of portfolio weights + # Rb T x n xts, data frame or matrix of benchmark returns + # wb vector, xts, data frame or matrix of benchmark weights + # wpf vector, xts, data frame or matrix with portfolio weights of + # currency forward contracts + # wbf vector, xts, data frame or matrix with benchmark weights of + # currency forward contracts + # S (T+1) x n xts, data frame or matrix with spot rates + # F (T+1) x n xts, data frame or matrix with forward rates + # Rpl xts, data frame or matrix of portfolio returns in local currency + # Rbl xts, data frame or matrix of benchmark returns in local currency + # Rbh xts, data frame or matrix of benchmark returns hedged into the + # base currency + + # Outputs: + # This function returns the attribution effects with multi-period summary + # and annualized excess returns + + # FUNCTION: + # Transform data to the xts objects + Rb = checkData(Rb) + Rp = checkData(Rp) + WP = wp # Save original weights in order to avoid double conversion later + WB = wb + wp = Weight.transform(wp, Rp) + wb = Weight.transform(wb, Rb) + if (nrow(wp) < nrow(Rp)){ # Rebalancing occurs next day + Rp = Rp[2:nrow(Rp)] + Rb = Rb[2:nrow(Rb)] + } + if (ncol(Rb) == 1){ + Rb = xts(matrix(rep(coredata(Rb), ncol(Rp)), nrow(Rp), ncol(Rp)),order.by=index(Rb)) + } + if (ncol(Rb) != ncol(Rp)){ + stop("Please use benchmark xts that has columns with benchmarks for each + asset or one common benchmark for all assets") + } + method = method[1] + linking = linking[1] + + currency = !(is.null(dim(wpf)) & is.null(dim(wbf)) & + is.null(dim(S)) & is.null(dim(Forward_Rates)) & + is.null(dim(Rpl)) & is.null(dim(Rpl)) & + is.null(dim(Rpl))) + + if (geometric == FALSE & linking != "davies.laker"){ + # The function makes all computations for the arithmetic attribution + # case (except for Davies and Laker linking) + + # Compute attribution effects (Brinson, Hood and Beebower model) + # If portfolio is single-currency + if (!currency){ + Rc = 0 + L = 0 + } else{ # If multi-currency portfolio + S = checkData(S) + Forward_Rates = checkData(Forward_Rates) + wpf = Weight.transform(wpf, Rp) + wbf = Weight.transform(wbf, Rb) + + Rc = lag(S, -1)[1:nrow(Rp), ] / S[1:nrow(Rp), ] - 1 + Rd = lag(Forward_Rates, -1)[1:nrow(Rp), ] / S[1:nrow(Rp), ] - 1 + Re = Rc - coredata(Rd) + Rl = Rb - coredata(Rc) + Rk = Rp - coredata(Rc) + Rfp = Re / (1 + Rd) + E = reclass(matrix(rep(rowSums(Re * coredata(wb)), ncol(Rb)), nrow(Rb), + ncol(Rb)), Rp) + L = reclass(matrix(rep(rowSums(Rl * coredata(wb)), ncol(Rb)), nrow(Rb), + ncol(Rb)), Rp) + D = reclass(matrix(rep(rowSums(Rd * coredata(wb)), ncol(Rb)), nrow(Rb), + ncol(Rb)), Rp) + # Contribution to currency + Cc = (wp - wb) * (Re - E) + (wpf - wbf) * (Rfp - E) + # Forward premium + Df = (wp - wb) * (Rd - D) + Cc = cbind(Cc, rowSums(Cc)) + Df = cbind(Df, rowSums(Df)) + colnames(Cc) = c(colnames(S), "Total") + colnames(Df) = colnames(Cc) + } + + # Get total portfolio returns + if (is.vector(WP) & is.vector(WB)){ + rp = Return.portfolio(Rp, WP, geometric = FALSE) + rb = Return.portfolio(Rb, WB, geometric = FALSE) + } else{ + rp = Return.rebalancing(Rp, WP, geometric = FALSE) + rb = Return.rebalancing(Rb, WB, geometric = FALSE) + } + names(rp) = "Total" + names(rb) = "Total" + + # Get individual attribution effects + #if the benchmark weights are not specified allocation effect is equal to 0 + #selection contribution is equal to 0 + #if bm weights unknown all contribution is treated as interaction as it cannot be broken down, user is warned + [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 3509 From noreply at r-forge.r-project.org Wed Aug 20 01:29:27 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 20 Aug 2014 01:29:27 +0200 (CEST) Subject: [Returnanalytics-commits] r3510 - pkg/PerformanceAnalytics/R Message-ID: <20140819232927.A462618761D@r-forge.r-project.org> Author: kylebalkissoon Date: 2014-08-20 01:29:27 +0200 (Wed, 20 Aug 2014) New Revision: 3510 Added: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R Log: Table to calculate # of periods fund is outperforming benchmark on a cumulative basis. User can supply a vector of periods and this will calculate it. This also returns the proportion of periods the fund outperforms which is commonly known as the probability of outperformance. Added: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R =================================================================== --- pkg/PerformanceAnalytics/R/table.ProbOutperformance.R (rev 0) +++ pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-19 23:29:27 UTC (rev 3510) @@ -0,0 +1,52 @@ +#' Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N year basis +#' +#' +#' @param R an xts, timeSeries or zoo object of asset returns +#' @param Rb an xts, timeSeries or zoo object of the benchmark returns +#' @param period_lengths a vector of periods the user wants to evaluate this over i.e. c(1,3,6,9,12,18,36) +#' @author Kyle Balkissoon +#' @keywords Performance Reporting Fund vs Benchmark +#' +#' @export table_ProbOutperformance + +table.ProbOutPerformance = function(R,Rb,period_lengths=c(1,3,6,9,12,18,36)){ + if(nrow(R)!=nrow(Rb)){ + stop("R and Rb must be the same length") + } + + + ###Create Trailing frequency analysis + R_periods = xts(data.frame(matrix(ncol=length(period_lengths),nrow=nrow(R))),order.by=index(R)) + colnames(R_periods) = paste0("period_",period_lengths) + Rb_periods = R_periods + for(i in 1:nrow(R_periods)){ + for(p_len in period_lengths){ + #if there aren't enough occurences yet don't calculate anything + if(p_len>i){}else{ + tdf = first(R,i) + tdf_b = first(Rb,i) + eval(parse(text=paste0("R_periods[",i,",]$period_",p_len," = Return.cumulative(last(tdf,",p_len,"))"))) + eval(parse(text=paste0("Rb_periods[",i,",]$period_",p_len," = Return.cumulative(last(tdf_b,",p_len,"))"))) + }}} + + + ##Calculate periods ahead + #Differences + diff_mat = R_periods-Rb_periods + + ##Result + result = data.frame(period_lengths) + result[,2] = NA + result[,3]=NA + for(p_len in 1:length(period_lengths)){ + result[p_len,2] = eval(parse(text=paste0("sum(ifelse(as.numeric(diff_mat$period_",period_lengths[p_len],")>0,1,0),na.rm=T)"))) + result[p_len,3] = eval(parse(text=paste0("sum(ifelse(as.numeric(diff_mat$period_",period_lengths[p_len],")<0,1,0),na.rm=T)"))) + } + result[,4] = result[,2]+result[,3] + result[,5] = result[,2]/result[,4] + result[,6] = result[,3]/result[,4] + + colnames(result) = c("period_lengths",colnames(R),colnames(Rb),"total periods",paste0("prob_",colnames(R),"_outperformance"),paste0("prob_",colnames(Rb),"_outperformance")) + return(result) + +} \ No newline at end of file From noreply at r-forge.r-project.org Wed Aug 20 01:29:56 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 20 Aug 2014 01:29:56 +0200 (CEST) Subject: [Returnanalytics-commits] r3511 - pkg/PerformanceAnalytics/R Message-ID: <20140819232956.43EB618763F@r-forge.r-project.org> Author: kylebalkissoon Date: 2014-08-20 01:29:55 +0200 (Wed, 20 Aug 2014) New Revision: 3511 Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R Log: Line endings switched to unix Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R =================================================================== --- pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-19 23:29:27 UTC (rev 3510) +++ pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-19 23:29:55 UTC (rev 3511) @@ -1,52 +1,52 @@ -#' Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N year basis -#' -#' -#' @param R an xts, timeSeries or zoo object of asset returns -#' @param Rb an xts, timeSeries or zoo object of the benchmark returns -#' @param period_lengths a vector of periods the user wants to evaluate this over i.e. c(1,3,6,9,12,18,36) -#' @author Kyle Balkissoon -#' @keywords Performance Reporting Fund vs Benchmark -#' -#' @export table_ProbOutperformance - -table.ProbOutPerformance = function(R,Rb,period_lengths=c(1,3,6,9,12,18,36)){ - if(nrow(R)!=nrow(Rb)){ - stop("R and Rb must be the same length") - } - - - ###Create Trailing frequency analysis - R_periods = xts(data.frame(matrix(ncol=length(period_lengths),nrow=nrow(R))),order.by=index(R)) - colnames(R_periods) = paste0("period_",period_lengths) - Rb_periods = R_periods - for(i in 1:nrow(R_periods)){ - for(p_len in period_lengths){ - #if there aren't enough occurences yet don't calculate anything - if(p_len>i){}else{ - tdf = first(R,i) - tdf_b = first(Rb,i) - eval(parse(text=paste0("R_periods[",i,",]$period_",p_len," = Return.cumulative(last(tdf,",p_len,"))"))) - eval(parse(text=paste0("Rb_periods[",i,",]$period_",p_len," = Return.cumulative(last(tdf_b,",p_len,"))"))) - }}} - - - ##Calculate periods ahead - #Differences - diff_mat = R_periods-Rb_periods - - ##Result - result = data.frame(period_lengths) - result[,2] = NA - result[,3]=NA - for(p_len in 1:length(period_lengths)){ - result[p_len,2] = eval(parse(text=paste0("sum(ifelse(as.numeric(diff_mat$period_",period_lengths[p_len],")>0,1,0),na.rm=T)"))) - result[p_len,3] = eval(parse(text=paste0("sum(ifelse(as.numeric(diff_mat$period_",period_lengths[p_len],")<0,1,0),na.rm=T)"))) - } - result[,4] = result[,2]+result[,3] - result[,5] = result[,2]/result[,4] - result[,6] = result[,3]/result[,4] - - colnames(result) = c("period_lengths",colnames(R),colnames(Rb),"total periods",paste0("prob_",colnames(R),"_outperformance"),paste0("prob_",colnames(Rb),"_outperformance")) - return(result) - +#' Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N year basis +#' +#' +#' @param R an xts, timeSeries or zoo object of asset returns +#' @param Rb an xts, timeSeries or zoo object of the benchmark returns +#' @param period_lengths a vector of periods the user wants to evaluate this over i.e. c(1,3,6,9,12,18,36) +#' @author Kyle Balkissoon +#' @keywords Performance Reporting Fund vs Benchmark +#' +#' @export table_ProbOutperformance + +table.ProbOutPerformance = function(R,Rb,period_lengths=c(1,3,6,9,12,18,36)){ + if(nrow(R)!=nrow(Rb)){ + stop("R and Rb must be the same length") + } + + + ###Create Trailing frequency analysis + R_periods = xts(data.frame(matrix(ncol=length(period_lengths),nrow=nrow(R))),order.by=index(R)) + colnames(R_periods) = paste0("period_",period_lengths) + Rb_periods = R_periods + for(i in 1:nrow(R_periods)){ + for(p_len in period_lengths){ + #if there aren't enough occurences yet don't calculate anything + if(p_len>i){}else{ + tdf = first(R,i) + tdf_b = first(Rb,i) + eval(parse(text=paste0("R_periods[",i,",]$period_",p_len," = Return.cumulative(last(tdf,",p_len,"))"))) + eval(parse(text=paste0("Rb_periods[",i,",]$period_",p_len," = Return.cumulative(last(tdf_b,",p_len,"))"))) + }}} + + + ##Calculate periods ahead + #Differences + diff_mat = R_periods-Rb_periods + + ##Result + result = data.frame(period_lengths) + result[,2] = NA + result[,3]=NA + for(p_len in 1:length(period_lengths)){ + result[p_len,2] = eval(parse(text=paste0("sum(ifelse(as.numeric(diff_mat$period_",period_lengths[p_len],")>0,1,0),na.rm=T)"))) + result[p_len,3] = eval(parse(text=paste0("sum(ifelse(as.numeric(diff_mat$period_",period_lengths[p_len],")<0,1,0),na.rm=T)"))) + } + result[,4] = result[,2]+result[,3] + result[,5] = result[,2]/result[,4] + result[,6] = result[,3]/result[,4] + + colnames(result) = c("period_lengths",colnames(R),colnames(Rb),"total periods",paste0("prob_",colnames(R),"_outperformance"),paste0("prob_",colnames(Rb),"_outperformance")) + return(result) + } \ No newline at end of file From noreply at r-forge.r-project.org Sun Aug 24 15:40:36 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 24 Aug 2014 15:40:36 +0200 (CEST) Subject: [Returnanalytics-commits] r3512 - pkg/PerformanceAnalytics/man Message-ID: <20140824134036.5BD72184D69@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-24 15:40:36 +0200 (Sun, 24 Aug 2014) New Revision: 3512 Modified: pkg/PerformanceAnalytics/man/Return.portfolio.Rd Log: updated Rd file for Return.portfolio Modified: pkg/PerformanceAnalytics/man/Return.portfolio.Rd =================================================================== --- pkg/PerformanceAnalytics/man/Return.portfolio.Rd 2014-08-19 23:29:55 UTC (rev 3511) +++ pkg/PerformanceAnalytics/man/Return.portfolio.Rd 2014-08-24 13:40:36 UTC (rev 3512) @@ -1,85 +1,151 @@ -\name{Return.rebalancing} +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{Return.portfolio} \alias{Return.portfolio} \alias{Return.rebalancing} -\title{Calculates weighted returns for a portfolio of assets} +\title{Calculate weighted returns for a portfolio of assets} \usage{ -Return.rebalancing(R, weights, ...) - Return.portfolio(R, weights = NULL, wealth.index = FALSE, - contribution = FALSE, geometric = TRUE, ...) + contribution = FALSE, geometric = TRUE, rebalance_on = c(NA, "years", + "quarters", "months", "weeks", "days"), value = 1, verbose = FALSE, ...) } \arguments{ - \item{R}{an xts, vector, matrix, data frame, timeSeries - or zoo object of asset returns} +\item{R}{An xts, vector, matrix, data frame, timeSeries or zoo object of +asset returns} - \item{weights}{a time series or single-row matrix/vector - containing asset weights, as percentages} +\item{weights}{A time series or single-row matrix/vector containing asset +weights, as decimal percentages, treated as beginning of period weights. +See Details below.} - \item{wealth.index}{TRUE/FALSE whether to return a wealth - index, default FALSE} +\item{wealth.index}{TRUE/FALSE whether to return a wealth index. Default FALSE} - \item{contribution}{if contribution is TRUE, add the - weighted return contributed by the asset in this period} +\item{contribution}{if contribution is TRUE, add the weighted return +contributed by the asset in a given period. Default FALSE} - \item{geometric}{utilize geometric chaining (TRUE) or - simple/arithmetic chaining (FALSE) to aggregate returns, - default TRUE} +\item{geometric}{utilize geometric chaining (TRUE) or simple/arithmetic (FALSE) +to aggregate returns. Default TRUE.} - \item{\dots}{any other passthru parameters} +\item{rebalance_on}{Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{endpoints}.} + +\item{value}{The beginning of period total portfolio value. This is used for calculating position value.} + +\item{verbose}{If verbose is TRUE, return a list of intermediary calculations. +See Details below.} + +\item{\dots}{any other passthru parameters. Not currently used.} } \value{ -returns a time series of returns weighted by the -\code{weights} parameter, possibly including contribution -for each period +returns a time series of returns weighted by the \code{weights} +parameter, or a list that includes intermediate calculations } \description{ -Calculates weighted returns for a portfolio of assets. If -you have a single weighting vector, or want the equal -weighted portfolio, use \code{Return.portfolio}. If you -have a portfolio that is periodically rebalanced, and -multiple time periods with different weights, use -\code{Return.rebalancing}. Both functions will subset the -return series to only include returns for assets for which -\code{weight} is provided. +Using a time series of returns and any regular or irregular time series of weights +for each asset, this function calculates the returns of a portfolio with the same +periodicity of the returns data. } \details{ -\code{Return.rebalancing} uses the date in the weights time -series or matrix for xts-style subsetting of rebalancing -periods. Rebalancing periods can be thought of as taking -effect immediately after the close of the bar. So, a March -31 rebalancing date will actually be in effect for April 1. -A December 31 rebalancing date will be in effect on Jan 1, -and so forth. This convention was chosen because it fits -with common usage, and because it simplifies xts Date -subsetting via \code{endpoints}. +By default, this function calculates the time series of portfolio returns given asset +returns and weights. In verbose mode, the function returns a list of intermediary +calculations that users may find helpful, including both asset contribution and +asset value through time. -\code{Return.rebalancing} will rebalance only on daily or -lower frequencies. If you are rebalancing intraday, you -should be using a trading/prices framework, not a -weights-based return framework. -} -\examples{ -data(edhec) -data(weights) +When asset return and weights are matched by period, contribution is simply the +weighted return of the asset. c_i = w_i * R_i Contributions are summable across the +portfolio to calculate the total portfolio return. -# calculate an equal weighted portfolio return -round(Return.portfolio(edhec),4) +Contribution cannot be aggregated through time. For example, say we have an equal +weighted portfolio of five assets with monthly returns. The geometric return of the +portfolio over several months won't match any aggregation of the individual +contributions of the assets, particularly if any rebalancing was done during the +period. -# now return the contribution too -round(Return.portfolio(edhec,contribution=TRUE),4) +To aggregate contributions through time such that they are summable to the geometric +returns of the portfolio, the calculation must track changes in the notional value of +the assets and portfolio. For example, contribution during a quarter will be +calculated as the change in value of the position through those three months, divided +by the original value of the portfolio. Approaching it this way makes the +calculation robust to weight changes as well. c_pi = V_(t-p)i - V_t)/V_ti -# calculate a portfolio return with rebalancing -round(Return.rebalancing(edhec,weights),4) +If the user does not specify weights, an equal weight portfolio is assumed. +Alternatively, a vector or single-row matrix of weights that matches the length +of the asset columns may be specified. In either case, if no rebalancing period is +specified, the weights will be applied at the beginning of the asset time series +and no further rebalancing will take place. If a rebalancing period is specified, +the portfolio will be rebalanced to the starting weights at the interval specified. + +Return.rebalancing will work only on daily or lower frequencies. If you are +rebalancing intraday, you should be using a trades/prices framework like +{\link{\code{blotter}}}, not a weights/returns framework. + +Irregular rebalancing can be done by specifying a time series of weights. The +function uses the date index of the weights for xts-style subsetting of rebalancing +periods. + +Weights specified for rebalancing should be thought of as "end-of-period" weights. +Rebalancing periods can be thought of as taking effect immediately after the close +of the bar. So, a March 31 rebalancing date will actually be in effect for April 1. +A December 31 rebalancing date will be in effect on Jan 1, and so forth. This +convention was chosen because it fits with common usage, and because it simplifies +xts Date subsetting via endpoints. + +In verbose mode, the function returns a list of data and intermediary calculations. +\itemize{ + \item{\code{returns}:}{ The portfolio returns.} + \item{\code{contribution}:}{ The per period contribution to portfolio + return of each asset. Contribution is calculated as BOP weight times the + period's return divided by BOP value. Period contributions are summed + across the individual assets to calculate portfolio return} + \item{\code{BOP.Weight}:}{ Beginning of Period (BOP) Weight for each + asset. An asset's BOP weight is calculated using the input weights + (or assumed weights, see below) and rebalancing parameters given. The next + period's BOP weight is either the EOP weights from the prior period or + input weights given on a rebalance period.} + \item{\code{EOP.Weight:}}{ End of Period (BOP) Weight for each asset. + An asset's EOP weight is the sum of the asset's BOP weight and + contribution for the period divided by the sum of the contributions and + initial weights for the portfolio.} + \item{\code{BOP.Value:}}{ BOP Value for each asset. The BOP value for each + asset is the asset's EOP value from the prior period, unless there is a + rebalance event. If there is a rebalance event, the BOP value of the + asset is the rebalance weight times the EOP value of the portfolio. That + effectively provides a zero-transaction cost change to the position values + as of that date to reflect the rebalance. Note that the sum of the BOP + values of the assets is the same as the prior period's EOP portfolio value.} + \item{\code{EOP.Value:}}{ EOP Value for each asset. The EOP value is for + each asset is calculated as (1 + asset return) times the asset's BOP value. + The EOP portfolio value is the sum of EOP value across assets.} } + +To calculate BOP and EOP position value, we create an index for each position. The +sum of that value across assets represents an indexed value of the total portfolio. +Note that BOP and EOP position values are only computed when \code{geometric = TRUE}. + +From the value calculations, we can calculate different aggregations through time +for the asset contributions. Those are calculated as the EOP asset value less the +BOP asset value; that quantity is divided by the BOP portfolio value. +Across assets, those will sum to equal the geometric chained returns of the +portfolio for that same time period. The function does not do this directly, however. +} +\examples{ +data(edhec) +Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly") # returns time series +Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly", verbose=TRUE) # returns list +# with a weights object +data(weights) # rebalance at the beginning of the year to various weights through time +chart.StackedBar(weights) +x <- Return.rebalancing(edhec["2000::",1:11], weights=weights,verbose=TRUE) +chart.CumReturns(x$returns) +chart.StackedBar(x$BOP.Weight) +chart.StackedBar(x$BOP.Value) +} \author{ -Brian G. Peterson +Peter Carl, Ross Bennett, Brian Peterson } \references{ -Bacon, C. \emph{Practical Portfolio Performance Measurement -and Attribution}. Wiley. 2004. Chapter 2\cr +Bacon, C. \emph{Practical Portfolio Performance Measurement and +Attribution}. Wiley. 2004. Chapter 2\cr } \seealso{ -\code{\link{Return.calculate}} \cr +\code{\link{Return.calculate}} \code{\link{xts::endpoints}} \cr } \keyword{distribution} \keyword{models} From noreply at r-forge.r-project.org Sun Aug 24 16:50:13 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 24 Aug 2014 16:50:13 +0200 (CEST) Subject: [Returnanalytics-commits] r3513 - pkg/PerformanceAnalytics/vignettes Message-ID: <20140824145013.95836187319@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-24 16:50:13 +0200 (Sun, 24 Aug 2014) New Revision: 3513 Added: pkg/PerformanceAnalytics/vignettes/portfolio_returns.pdf Modified: pkg/PerformanceAnalytics/vignettes/portfolio_returns.Rnw Log: revisions to portfolio returns vignette Modified: pkg/PerformanceAnalytics/vignettes/portfolio_returns.Rnw =================================================================== --- pkg/PerformanceAnalytics/vignettes/portfolio_returns.Rnw 2014-08-24 13:40:36 UTC (rev 3512) +++ pkg/PerformanceAnalytics/vignettes/portfolio_returns.Rnw 2014-08-24 14:50:13 UTC (rev 3513) @@ -1,15 +1,23 @@ \documentclass{article} \usepackage{amsmath} \usepackage{verbatim} +\usepackage{Rd} +%\VignetteIndexEntry{Portfolio Returns} +%\VignetteDepends{PerformanceAnalytics} +%\VignetteKeywords{returns, performance, portfolio} +%\VignettePackage{PerformanceAnalytics} \begin{document} -\section{Basic definitions} +\begin{abstract} +This vignette provides an overview of calculating portfolio returns through time with an emphasis on the math used to develop the \verb"Return.portfolio" function in \pkg{PerformanceAnalytics}. We first introduce some basic definitions, then give simple examples of computing portfolio returns in a prices and shares framework as well as a returns and weights framework. We then introduce \verb"Return.portfolio" and demonstrate the function with a few examples. +\end{abstract} -Suppose we have a portfolio of $N$ assets. +\tableofcontents -The value of asset $i$ in the portfolio is defined as +\section{Basic definitions} +Suppose we have a portfolio of $N$ assets. The value of asset $i$, $V_i$, in the portfolio is defined as \begin{eqnarray*} V_i = \lambda_i * P_i \end{eqnarray*} @@ -20,13 +28,12 @@ P_i \text{ is the price of asset $i$}\\ \end{eqnarray*} -The total portfolio value is defined as +The total portfolio value, $V_p$, is defined as \begin{eqnarray*} V_P = \sum_{i=1}^N V_i \end{eqnarray*} -The weight of asset $i$ in the portfolio is defined as - +The weight of asset $i$, $w_i$, in the portfolio is defined as \begin{eqnarray*} w_i = V_i / V_P \end{eqnarray*} @@ -37,14 +44,17 @@ V_P \text{ is the total value of the portfolio}\\ \end{eqnarray*} -The portfolio return at time $t$ is defined as +The portfolio return at time $t$, $R_t$, is defined as +\begin{eqnarray*} +R_t = \frac{V_{p_{t}} - V_{p_{{t-1}}}}{V_{p_{{t-1}}}} +\end{eqnarray*} \begin{eqnarray*} -R_t = \frac{V_t - V_{t-1}}{V_{t-1}} +V_{p_{t}} \text{ is the portfolio value at time $t$}\\ \end{eqnarray*} \section{Simple Example: Prices and Shares Framework} -Suppose we have a portfolio of $N = 2$ assets, asset A and asset B. The prices for assets A and B are given as: +Suppose we have a portfolio of $N = 2$ assets, asset A and asset B. The prices for assets A and B are given as <<>>= prices = cbind(c(5, 7, 6, 7), @@ -117,12 +127,11 @@ weights @ -We have shown that calculating portfolio weights, values, and returns is simple in a prices and shares framework. However, calculating these metrics becomes more of a challenge in a weights and returns framework. +We have shown that calculating portfolio weights, values, and returns is simple in a prices and shares framework. However, calculating these metrics becomes more challenging in a weights and returns framework. \section{Example: Weights and Returns Framework} -For this example, we will use the monthly returns during 1997 of the first 5 assets in the edhec dataset. - +We will use the monthly returns during 1997 of the first 5 assets in the edhec dataset for the following example. <<>>= library(PerformanceAnalytics) data(edhec) @@ -134,7 +143,7 @@ Suppose that on 1996-12-31 we wish to form an equal weight portfolio such that the weight for asset $i$ is given as: \begin{equation*} -w_i = frac{1 / N} \quad \text{for } i \in 1, \hdots, N +w_i = \frac{1}{N} \quad \text{for } i \in 1, \hdots, N \end{equation*} where $N$ is equal to the number of assets. @@ -150,8 +159,7 @@ Case 1: The beginning of period $t$ is a rebalancing event. For example, the rebalance weights at the end of \verb"1996-12-31" take effect at the beginning of \verb"1997-01-31". This means that the beginning of \verb"1997-01-31" is considered a rebalance event. -The beginning of period value for asset $i$ at time $t$ is given as: - +The beginning of period value for asset $i$ at time $t$ is given as \begin{equation*} V_{{bop}_{t,i}} = w_i * V_{t-1} \end{equation*} @@ -163,17 +171,15 @@ V_{{bop}_{t,i}} = V_{{eop}_{t-1,i}} \end{equation*} -where $V_{{eop}_{t,i}}$ is the end of period value for asset $i$ from the prior period. +where $V_{{eop}_{t-1,i}}$ is the end of period value for asset $i$ from the prior period. -The end of period value for asset $i$ at time $t$ is given as: +The end of period value for asset $i$ at time $t$ is given as \begin{equation*} V_{{eop}_{t,i}} = (1 + R_{t,i}) * V_{{bop}_{t,i}} \end{equation*} -Here we demonstrate this and compute values for the periods 1 and 2. +Here we demonstrate this and compute values for the periods 1 and 2. For the first period, $t=1$, we need an initial value for the portfolio value. Let $V_0 = 1$ denote the initial portfolio value. Note that the initial portfolio value can be any arbitrary number. Here we use $V_0 = 1$ for simplicity. -For the first period, $t=1$, we need an initial value for the portfolio value. Let $V_0 = 1$ denote the initial portfolio value. Note that the initial portfolio value can be any arbitrary number. Here we use $V_0 = 1$ for simplicity. - <<>>= V_0 = 1 bop_value = eop_value = matrix(0, 2, ncol(R)) @@ -193,14 +199,14 @@ eop_value[t,] = coredata(1 + R[t,]) * bop_value[t,] @ -It is easily seen that the values for the rest of the time periods can be computed by iterating over $ t \in 1, \hdots, T$ where $T=12$ in this example. +It is seen that the values for the rest of the time periods can be computed by iterating over $ t \in 1, \hdots, T$ where $T=12$ in this example. -The weight of asset $i$ at time $t$ is calculated as: +The weight of asset $i$ at time $t$ is calculated as \begin{equation*} w_{t,i} = \frac{V_{t,i}}{\sum_{i=0}^N V_{t,i}} \end{equation*} -Here we compute the beginning and end of period weights. +Here we compute both the beginning and end of period weights. <<>>= bop_weights = eop_weights = matrix(0, 2, ncol(R)) for(t in 1:2){ @@ -211,8 +217,7 @@ eop_weights @ -The portfolio returns for time $t$ are calculated as: - +The portfolio returns at time $t$ are calculated as \begin{equation*} R_{P_t} = \frac{V_t - V_{t-1}}{V_{t-1}} \end{equation*} @@ -223,9 +228,7 @@ R_P @ - -The contribution of asset $i$ at time $t$ is calculated as: - +The contribution of asset $i$ at time $t$ is calculated as \begin{equation*} contribution_{t,i} = \frac{V_{{eop}_{t,i}} - V_{{bop}_{t,i}}}{\sum_{i=1}^N V_{{bop}_{t,i}}} \end{equation*} @@ -238,7 +241,7 @@ contribution @ -Note that contribution can also be calculated as: +Note that contribution can also be calculated as \begin{equation*} contribution_{t,i} = R_{t,i} * w_{t,i} \end{equation*} @@ -249,9 +252,8 @@ args(Return.portfolio) @ +If no \verb"weights" are specified, then an equal weight portfolio is computed. If \verb"rebalance_on=NA" then a buy and hold portfolio is assumed. See \verb"?Return.portfolio" for a detailed explanation of the function and arguments. -If no \verb"weights" are specified, then an equal weight portfolio is computed. Also, if \verb"rebalance_on=NA" then a buy and hold portfolio is assumed. See \verb"?Return.portfolio" for a detailed explanation of the function and arguments. - <>= # Equally weighted, buy and hold portfolio returns Return.portfolio(R) @@ -265,5 +267,4 @@ Return.portfolio(R, rebalance_on="quarters", verbose=TRUE) @ - \end{document} Added: pkg/PerformanceAnalytics/vignettes/portfolio_returns.pdf =================================================================== (Binary files differ) Property changes on: pkg/PerformanceAnalytics/vignettes/portfolio_returns.pdf ___________________________________________________________________ Added: svn:mime-type + application/octet-stream From noreply at r-forge.r-project.org Mon Aug 25 01:56:50 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 25 Aug 2014 01:56:50 +0200 (CEST) Subject: [Returnanalytics-commits] r3514 - in pkg/PerformanceAnalytics: R man Message-ID: <20140824235650.EA6AC186BCF@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-25 01:56:50 +0200 (Mon, 25 Aug 2014) New Revision: 3514 Modified: pkg/PerformanceAnalytics/R/Return.portfolio.R pkg/PerformanceAnalytics/man/Return.portfolio.Rd Log: zerofilling NA's and fixing examples in Return.portfolio Modified: pkg/PerformanceAnalytics/R/Return.portfolio.R =================================================================== --- pkg/PerformanceAnalytics/R/Return.portfolio.R 2014-08-24 14:50:13 UTC (rev 3513) +++ pkg/PerformanceAnalytics/R/Return.portfolio.R 2014-08-24 23:56:50 UTC (rev 3514) @@ -1,418 +1,423 @@ -#' Calculate weighted returns for a portfolio of assets -#' -#' Using a time series of returns and any regular or irregular time series of weights -#' for each asset, this function calculates the returns of a portfolio with the same -#' periodicity of the returns data. -#' -#' By default, this function calculates the time series of portfolio returns given asset -#' returns and weights. In verbose mode, the function returns a list of intermediary -#' calculations that users may find helpful, including both asset contribution and -#' asset value through time. -#' -#' When asset return and weights are matched by period, contribution is simply the -#' weighted return of the asset. c_i = w_i * R_i Contributions are summable across the -#' portfolio to calculate the total portfolio return. -#' -#' Contribution cannot be aggregated through time. For example, say we have an equal -#' weighted portfolio of five assets with monthly returns. The geometric return of the -#' portfolio over several months won't match any aggregation of the individual -#' contributions of the assets, particularly if any rebalancing was done during the -#' period. -#' -#' To aggregate contributions through time such that they are summable to the geometric -#' returns of the portfolio, the calculation must track changes in the notional value of -#' the assets and portfolio. For example, contribution during a quarter will be -#' calculated as the change in value of the position through those three months, divided -#' by the original value of the portfolio. Approaching it this way makes the -#' calculation robust to weight changes as well. c_pi = V_(t-p)i - V_t)/V_ti -#' -#' If the user does not specify weights, an equal weight portfolio is assumed. -#' Alternatively, a vector or single-row matrix of weights that matches the length -#' of the asset columns may be specified. In either case, if no rebalancing period is -#' specified, the weights will be applied at the beginning of the asset time series -#' and no further rebalancing will take place. If a rebalancing period is specified, -#' the portfolio will be rebalanced to the starting weights at the interval specified. -#' -#' Return.rebalancing will work only on daily or lower frequencies. If you are -#' rebalancing intraday, you should be using a trades/prices framework like -#' {\link{\code{blotter}}}, not a weights/returns framework. -#' -#' Irregular rebalancing can be done by specifying a time series of weights. The -#' function uses the date index of the weights for xts-style subsetting of rebalancing -#' periods. -#' -#' Weights specified for rebalancing should be thought of as "end-of-period" weights. -#' Rebalancing periods can be thought of as taking effect immediately after the close -#' of the bar. So, a March 31 rebalancing date will actually be in effect for April 1. -#' A December 31 rebalancing date will be in effect on Jan 1, and so forth. This -#' convention was chosen because it fits with common usage, and because it simplifies -#' xts Date subsetting via endpoints. -#' -#' In verbose mode, the function returns a list of data and intermediary calculations. -#' \itemize{ -#' \item{\code{returns}:}{ The portfolio returns.} -#' \item{\code{contribution}:}{ The per period contribution to portfolio -#' return of each asset. Contribution is calculated as BOP weight times the -#' period's return divided by BOP value. Period contributions are summed -#' across the individual assets to calculate portfolio return} -#' \item{\code{BOP.Weight}:}{ Beginning of Period (BOP) Weight for each -#' asset. An asset's BOP weight is calculated using the input weights -#' (or assumed weights, see below) and rebalancing parameters given. The next -#' period's BOP weight is either the EOP weights from the prior period or -#' input weights given on a rebalance period.} -#' \item{\code{EOP.Weight:}}{ End of Period (BOP) Weight for each asset. -#' An asset's EOP weight is the sum of the asset's BOP weight and -#' contribution for the period divided by the sum of the contributions and -#' initial weights for the portfolio.} -#' \item{\code{BOP.Value:}}{ BOP Value for each asset. The BOP value for each -#' asset is the asset's EOP value from the prior period, unless there is a -#' rebalance event. If there is a rebalance event, the BOP value of the -#' asset is the rebalance weight times the EOP value of the portfolio. That -#' effectively provides a zero-transaction cost change to the position values -#' as of that date to reflect the rebalance. Note that the sum of the BOP -#' values of the assets is the same as the prior period's EOP portfolio value.} -#' \item{\code{EOP.Value:}}{ EOP Value for each asset. The EOP value is for -#' each asset is calculated as (1 + asset return) times the asset's BOP value. -#' The EOP portfolio value is the sum of EOP value across assets.} -#' } -#' -#' To calculate BOP and EOP position value, we create an index for each position. The -#' sum of that value across assets represents an indexed value of the total portfolio. -#' Note that BOP and EOP position values are only computed when \code{geometric = TRUE}. -#' -#' From the value calculations, we can calculate different aggregations through time -#' for the asset contributions. Those are calculated as the EOP asset value less the -#' BOP asset value; that quantity is divided by the BOP portfolio value. -#' Across assets, those will sum to equal the geometric chained returns of the -#' portfolio for that same time period. The function does not do this directly, however. -#' -#' @aliases Return.portfolio Return.rebalancing -#' @param R An xts, vector, matrix, data frame, timeSeries or zoo object of -#' asset returns -#' @param weights A time series or single-row matrix/vector containing asset -#' weights, as decimal percentages, treated as beginning of period weights. -#' See Details below. -#' @param wealth.index TRUE/FALSE whether to return a wealth index. Default FALSE -#' @param contribution if contribution is TRUE, add the weighted return -#' contributed by the asset in a given period. Default FALSE -#' @param geometric utilize geometric chaining (TRUE) or simple/arithmetic (FALSE) -#' to aggregate returns. Default TRUE. -#' @param rebalance_on Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{endpoints}. -#' @param value The beginning of period total portfolio value. This is used for calculating position value. -#' @param verbose If verbose is TRUE, return a list of intermediary calculations. -#' See Details below. -#' @param \dots any other passthru parameters. Not currently used. -#' @return returns a time series of returns weighted by the \code{weights} -#' parameter, or a list that includes intermediate calculations -#' @author Peter Carl, Ross Bennett, Brian Peterson -#' @seealso \code{\link{Return.calculate}} \code{\link{xts::endpoints}} \cr -#' @references Bacon, C. \emph{Practical Portfolio Performance Measurement and -#' Attribution}. Wiley. 2004. Chapter 2\cr -#' @keywords ts multivariate distribution models -#' @examples -#' -#' data(edhec) -#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly") # returns time series -#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarterly", verbose=TRUE) # returns list -#' # with a weights object -#' data(weights) # rebalance at the beginning of the year to various weights through time -#' chart.StackedBar(weights) -#' x <- Return.rebalancing(edhec["2000::",1:11], weights=weights,verbose=TRUE) -#' chart.CumReturns(x$returns) -#' chart.StackedBar(x$BOP.Weight) -#' chart.StackedBar(x$BOP.Value) -#' -#' @rdname Return.portfolio -#' @export Return.portfolio -#' @export Return.rebalancing -Return.portfolio <- Return.rebalancing <- function(R, - weights=NULL, - wealth.index=FALSE, - contribution=FALSE, - geometric=TRUE, - rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), - value=1, - verbose=FALSE, - ...){ - R = checkData(R, method="xts") - rebalance_on = rebalance_on[1] - - # find the right unit to subtract from the first return date to create a start date - freq = periodicity(R) - switch(freq$scale, - seconds = { stop("Use a returns series of daily frequency or higher.") }, - minute = { stop("Use a returns series of daily frequency or higher.") }, - hourly = { stop("Use a returns series of daily frequency or higher.") }, - daily = { time_unit = "day" }, - weekly = { time_unit = "week" }, - monthly = { time_unit= "month" }, - quarterly = { time_unit = "quarter" }, - yearly = { time_unit = "year"} - ) - - # calculates the end of the prior period - # need to use the if on quarter as quarter is incompatible with seq (it does not work with by) - if(time_unit=='quarter'){ start_date = as.yearqtr(seq(as.Date(index(R)[1]), length = 2, by = paste("-3", 'month'))[2])}else{ start_date = seq(as.Date(index(R)[1]), length = 2, by = paste("-1", time_unit))[2]} - if(is.null(weights)){ - # generate equal weight vector for return columns - weights = rep(1 / NCOL(R), NCOL(R)) - } - if(is.vector(weights)) { # weights are a vector - if(is.na(rebalance_on)) { # and endpoints are not specified - # then use the weights only at the beginning of the returns series, without rebalancing - weights = xts(matrix(weights, nrow=1), order.by=as.Date(start_date)) - } else { # and endpoints are specified - # generate a time series of the given weights at the endpoints - weight_dates = c(as.Date(start_date), index(R[endpoints(R, on=rebalance_on)])) - weights = xts(matrix(rep(weights, length(weight_dates)), ncol=NCOL(R), byrow=TRUE), order.by=as.Date(weight_dates)) - } - colnames(weights) = colnames(R) - } else { # check the beginning_weights object for errors - # check that weights are given in a form that is probably a time series - weights = checkData(weights, method="xts") - # make sure that frequency(weights) NCOL(weights)){ - R = R[, 1:NCOL(weights)] - warning("number of assets in beginning_weights is less than number of columns in returns, so subsetting returns.") - } else { - stop("number of assets is greater than number of columns in returns object") - } - } - } # we should have good weights objects at this point - - if(as.Date(last(index(R))) < (as.Date(index(weights[1,]))+1)){ - stop(paste('last date in series',as.Date(last(index(R))),'occurs before beginning of first rebalancing period',as.Date(first(index(weights)))+1)) - } - - # Subset the R object if the first rebalance date is after the first date - # in the return series - if(as.Date(index(weights[1,])) > as.Date(first(index(R)))) { - R <- R[paste0(as.Date(index(weights[1,]))+1, "/")] - } - - - if(geometric){ - out = Return.portfolio.geometric(R=R, - weights=weights, - wealth.index=wealth.index, - contribution=contribution, - rebalance_on=rebalance_on, - value=value, - verbose=verbose, - ...=...) - } else { - out = Return.portfolio.arithmetic(R=R, - weights=weights, - wealth.index=wealth.index, - contribution=contribution, - rebalance_on=rebalance_on, - verbose=verbose, - ...=...) - } - return(out) -} - -Return.portfolio.arithmetic <- function(R, - weights=NULL, - wealth.index=FALSE, - contribution=FALSE, - rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), - verbose=FALSE, - ...) -{ - # bop = beginning of period - # eop = end of period - # Initialize objects - bop_weights = matrix(0, NROW(R), NCOL(R)) - colnames(bop_weights) = colnames(R) - eop_weights = period_contrib = bop_weights - ret = vector("numeric", NROW(R)) - - # initialize counter - k = 1 - for(i in 1:NROW(weights)) { - # identify rebalance from and to dates (weights[i,], weights[i+1]) and - # subset the R(eturns) object - from = as.Date(index(weights[i,]))+1 - if (i == nrow(weights)){ - to = as.Date(index(last(R))) # this is correct - } else { - to = as.Date(index(weights[(i+1),])) - } - returns = R[paste0(from, "::", to)] - - # Only enter the loop if we have a valid returns object - if(nrow(returns) >= 1){ - # inner loop counter - jj = 1 - for(j in 1:nrow(returns)){ - # For arithmetic returns, the beginning of period weights are always - # equal to the rebalance weights - bop_weights[k,] = weights[i,] - period_contrib[k,] = coredata(returns[j,]) * bop_weights[k,] - eop_weights[k,] = (period_contrib[k,] + bop_weights[k,]) / sum(c(period_contrib[k,], bop_weights[k,])) - ret[k] = sum(period_contrib[k,]) - - # increment the counters - k = k + 1 - } - } - } - R.idx = index(R) - ret = xts(ret, R.idx) - colnames(ret) = "portfolio.returns" - - if(wealth.index){ - result = cumsum(ret) + 1 - colnames(result) = "portfolio.wealthindex" - } else { - result = ret - } - - if(verbose){ - out = list() - out$returns = ret - out$contribution = xts(period_contrib, R.idx) - out$BOP.Weight = xts(bop_weights, R.idx) - out$EOP.Weight = xts(eop_weights, R.idx) - if(wealth.index){ - out$wealthindex = result - } - } else if(contribution){ - out = cbind(result, xts(period_contrib, R.idx)) - } else { - out = result - } - return(out) -} - -Return.portfolio.geometric <- function(R, - weights=NULL, - wealth.index=FALSE, - contribution=FALSE, - rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), - value=1, - verbose=FALSE, - ...) -{ - # bop = beginning of period - # eop = end of period - # Initialize objects - bop_value = matrix(0, NROW(R), NCOL(R)) - colnames(bop_value) = colnames(R) - eop_value = bop_value - - if(verbose | contribution){ - period_contrib = bop_value - if(verbose){ - bop_weights = bop_value - eop_weights = bop_value - } - } - ret = eop_value_total = bop_value_total = vector("numeric", NROW(R)) - - # The end_value is the end of period total value from the prior period - end_value <- value - - # initialize counter - k = 1 - for(i in 1:NROW(weights)) { - # identify rebalance from and to dates (weights[i,], weights[i+1]) and - # subset the R(eturns) object - from = as.Date(index(weights[i,]))+1 - if (i == nrow(weights)){ - to = as.Date(index(last(R))) # this is correct - } else { - to = as.Date(index(weights[(i+1),])) - } - returns = R[paste0(from, "::", to)] - - # Only enter the loop if we have a valid returns object - if(nrow(returns) >= 1){ - # inner loop counter - jj = 1 - for(j in 1:nrow(returns)){ - # We need to know when we are at the start of this inner loop so we can - # set the correct beginning of period value. We start a new inner loop - # at each rebalance date. - # Compute beginning of period values - if(jj == 1){ - bop_value[k,] = end_value * weights[i,] - } else { - bop_value[k,] = eop_value[k-1,] - } - bop_value_total[k] = sum(bop_value[k,]) - - # Compute end of period values - eop_value[k,] = (1 + coredata(returns[j,])) * bop_value[k,] - eop_value_total[k] = sum(eop_value[k,]) - - if(contribution | verbose){ - # Compute period contribution - period_contrib[k,] = returns[j,] * bop_value[k,] / sum(bop_value[k,]) - if(verbose){ - # Compute bop and eop weights - bop_weights[k,] = bop_value[k,] / bop_value_total[k] - eop_weights[k,] = eop_value[k,] / eop_value_total[k] - } - } - - # Compute portfolio returns - # Could also compute this by summing contribution, but this way we - # don't have to compute contribution if verbose=FALSE - ret[k] = eop_value_total[k] / end_value - 1 - - # Update end_value - end_value = eop_value_total[k] - - # increment the counters - jj = jj + 1 - k = k + 1 - } - } - } - R.idx = index(R) - ret = xts(ret, R.idx) - colnames(ret) = "portfolio.returns" - - if(wealth.index){ - result = cumprod(1 + ret) - colnames(result) = "portfolio.wealthindex" - } else { - result = ret - } - - if(verbose){ - out = list() - out$returns = ret - out$contribution = xts(period_contrib, R.idx) - out$BOP.Weight = xts(bop_weights, R.idx) - out$EOP.Weight = xts(eop_weights, R.idx) - out$BOP.Value = xts(bop_value, R.idx) - out$EOP.Value = xts(eop_value, R.idx) - if(wealth.index){ - out$wealthindex = result - } - } else if(contribution){ - out = cbind(result, xts(period_contrib, R.idx)) - } else { - out = result - } - return(out) -} - -############################################################################### -# R (http://r-project.org/) Econometrics for Performance and Risk Analysis -# -# Copyright (c) 2004-2014 Peter Carl and Brian G. Peterson -# -# This R package is distributed under the terms of the GNU Public License (GPL) -# for full details see the file COPYING -# -# $Id$ -# -############################################################################### +#' Calculate weighted returns for a portfolio of assets +#' +#' Using a time series of returns and any regular or irregular time series of weights +#' for each asset, this function calculates the returns of a portfolio with the same +#' periodicity of the returns data. +#' +#' By default, this function calculates the time series of portfolio returns given asset +#' returns and weights. In verbose mode, the function returns a list of intermediary +#' calculations that users may find helpful, including both asset contribution and +#' asset value through time. +#' +#' When asset return and weights are matched by period, contribution is simply the +#' weighted return of the asset. c_i = w_i * R_i Contributions are summable across the +#' portfolio to calculate the total portfolio return. +#' +#' Contribution cannot be aggregated through time. For example, say we have an equal +#' weighted portfolio of five assets with monthly returns. The geometric return of the +#' portfolio over several months won't match any aggregation of the individual +#' contributions of the assets, particularly if any rebalancing was done during the +#' period. +#' +#' To aggregate contributions through time such that they are summable to the geometric +#' returns of the portfolio, the calculation must track changes in the notional value of +#' the assets and portfolio. For example, contribution during a quarter will be +#' calculated as the change in value of the position through those three months, divided +#' by the original value of the portfolio. Approaching it this way makes the +#' calculation robust to weight changes as well. c_pi = V_(t-p)i - V_t)/V_ti +#' +#' If the user does not specify weights, an equal weight portfolio is assumed. +#' Alternatively, a vector or single-row matrix of weights that matches the length +#' of the asset columns may be specified. In either case, if no rebalancing period is +#' specified, the weights will be applied at the beginning of the asset time series +#' and no further rebalancing will take place. If a rebalancing period is specified, +#' the portfolio will be rebalanced to the starting weights at the interval specified. +#' +#' Return.rebalancing will work only on daily or lower frequencies. If you are +#' rebalancing intraday, you should be using a trades/prices framework like +#' {\link{\code{blotter}}}, not a weights/returns framework. +#' +#' Irregular rebalancing can be done by specifying a time series of weights. The +#' function uses the date index of the weights for xts-style subsetting of rebalancing +#' periods. +#' +#' Weights specified for rebalancing should be thought of as "end-of-period" weights. +#' Rebalancing periods can be thought of as taking effect immediately after the close +#' of the bar. So, a March 31 rebalancing date will actually be in effect for April 1. +#' A December 31 rebalancing date will be in effect on Jan 1, and so forth. This +#' convention was chosen because it fits with common usage, and because it simplifies +#' xts Date subsetting via endpoints. +#' +#' In verbose mode, the function returns a list of data and intermediary calculations. +#' \itemize{ +#' \item{\code{returns}:}{ The portfolio returns.} +#' \item{\code{contribution}:}{ The per period contribution to portfolio +#' return of each asset. Contribution is calculated as BOP weight times the +#' period's return divided by BOP value. Period contributions are summed +#' across the individual assets to calculate portfolio return} +#' \item{\code{BOP.Weight}:}{ Beginning of Period (BOP) Weight for each +#' asset. An asset's BOP weight is calculated using the input weights +#' (or assumed weights, see below) and rebalancing parameters given. The next +#' period's BOP weight is either the EOP weights from the prior period or +#' input weights given on a rebalance period.} +#' \item{\code{EOP.Weight:}}{ End of Period (BOP) Weight for each asset. +#' An asset's EOP weight is the sum of the asset's BOP weight and +#' contribution for the period divided by the sum of the contributions and +#' initial weights for the portfolio.} +#' \item{\code{BOP.Value:}}{ BOP Value for each asset. The BOP value for each +#' asset is the asset's EOP value from the prior period, unless there is a +#' rebalance event. If there is a rebalance event, the BOP value of the +#' asset is the rebalance weight times the EOP value of the portfolio. That +#' effectively provides a zero-transaction cost change to the position values +#' as of that date to reflect the rebalance. Note that the sum of the BOP +#' values of the assets is the same as the prior period's EOP portfolio value.} +#' \item{\code{EOP.Value:}}{ EOP Value for each asset. The EOP value is for +#' each asset is calculated as (1 + asset return) times the asset's BOP value. +#' The EOP portfolio value is the sum of EOP value across assets.} +#' } +#' +#' To calculate BOP and EOP position value, we create an index for each position. The +#' sum of that value across assets represents an indexed value of the total portfolio. +#' Note that BOP and EOP position values are only computed when \code{geometric = TRUE}. +#' +#' From the value calculations, we can calculate different aggregations through time +#' for the asset contributions. Those are calculated as the EOP asset value less the +#' BOP asset value; that quantity is divided by the BOP portfolio value. +#' Across assets, those will sum to equal the geometric chained returns of the +#' portfolio for that same time period. The function does not do this directly, however. +#' +#' @aliases Return.portfolio Return.rebalancing +#' @param R An xts, vector, matrix, data frame, timeSeries or zoo object of +#' asset returns +#' @param weights A time series or single-row matrix/vector containing asset +#' weights, as decimal percentages, treated as beginning of period weights. +#' See Details below. +#' @param wealth.index TRUE/FALSE whether to return a wealth index. Default FALSE +#' @param contribution if contribution is TRUE, add the weighted return +#' contributed by the asset in a given period. Default FALSE +#' @param geometric utilize geometric chaining (TRUE) or simple/arithmetic (FALSE) +#' to aggregate returns. Default TRUE. +#' @param rebalance_on Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{endpoints}. +#' @param value The beginning of period total portfolio value. This is used for calculating position value. +#' @param verbose If verbose is TRUE, return a list of intermediary calculations. +#' See Details below. +#' @param \dots any other passthru parameters. Not currently used. +#' @return returns a time series of returns weighted by the \code{weights} +#' parameter, or a list that includes intermediate calculations +#' @author Peter Carl, Ross Bennett, Brian Peterson +#' @seealso \code{\link{Return.calculate}} \code{\link{xts::endpoints}} \cr +#' @references Bacon, C. \emph{Practical Portfolio Performance Measurement and +#' Attribution}. Wiley. 2004. Chapter 2\cr +#' @keywords ts multivariate distribution models +#' @examples +#' +#' data(edhec) +#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarters") # returns time series +#' Return.rebalancing(edhec["1997",1:5], rebalance_on="quarters", verbose=TRUE) # returns list +#' # with a weights object +#' data(weights) # rebalance at the beginning of the year to various weights through time +#' chart.StackedBar(weights) +#' x <- Return.rebalancing(edhec["2000::",1:11], weights=weights,verbose=TRUE) +#' chart.CumReturns(x$returns) +#' chart.StackedBar(x$BOP.Weight) +#' chart.StackedBar(x$BOP.Value) +#' +#' @rdname Return.portfolio +#' @export Return.portfolio +#' @export Return.rebalancing +Return.portfolio <- Return.rebalancing <- function(R, + weights=NULL, + wealth.index=FALSE, + contribution=FALSE, + geometric=TRUE, + rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), + value=1, + verbose=FALSE, + ...){ + R = checkData(R, method="xts") + if(any(is.na(R))){ + warning("NA's detected: filling NA's with zeros") + #R <- zerofill(R) + R[is.na(R)] <- 0 + } + rebalance_on = rebalance_on[1] + + # find the right unit to subtract from the first return date to create a start date + freq = periodicity(R) + switch(freq$scale, + seconds = { stop("Use a returns series of daily frequency or higher.") }, + minute = { stop("Use a returns series of daily frequency or higher.") }, + hourly = { stop("Use a returns series of daily frequency or higher.") }, + daily = { time_unit = "day" }, + weekly = { time_unit = "week" }, + monthly = { time_unit= "month" }, + quarterly = { time_unit = "quarter" }, + yearly = { time_unit = "year"} + ) + + # calculates the end of the prior period + # need to use the if on quarter as quarter is incompatible with seq (it does not work with by) + if(time_unit=='quarter'){ start_date = as.yearqtr(seq(as.Date(index(R)[1]), length = 2, by = paste("-3", 'month'))[2])}else{ start_date = seq(as.Date(index(R)[1]), length = 2, by = paste("-1", time_unit))[2]} + if(is.null(weights)){ + # generate equal weight vector for return columns + weights = rep(1 / NCOL(R), NCOL(R)) + } + if(is.vector(weights)) { # weights are a vector + if(is.na(rebalance_on)) { # and endpoints are not specified + # then use the weights only at the beginning of the returns series, without rebalancing + weights = xts(matrix(weights, nrow=1), order.by=as.Date(start_date)) + } else { # and endpoints are specified + # generate a time series of the given weights at the endpoints + weight_dates = c(as.Date(start_date), index(R[endpoints(R, on=rebalance_on)])) + weights = xts(matrix(rep(weights, length(weight_dates)), ncol=NCOL(R), byrow=TRUE), order.by=as.Date(weight_dates)) + } + colnames(weights) = colnames(R) + } else { # check the beginning_weights object for errors + # check that weights are given in a form that is probably a time series + weights = checkData(weights, method="xts") + # make sure that frequency(weights) NCOL(weights)){ + R = R[, 1:NCOL(weights)] + warning("number of assets in beginning_weights is less than number of columns in returns, so subsetting returns.") + } else { + stop("number of assets is greater than number of columns in returns object") + } + } + } # we should have good weights objects at this point + + if(as.Date(last(index(R))) < (as.Date(index(weights[1,]))+1)){ + stop(paste('last date in series',as.Date(last(index(R))),'occurs before beginning of first rebalancing period',as.Date(first(index(weights)))+1)) + } + + # Subset the R object if the first rebalance date is after the first date + # in the return series + if(as.Date(index(weights[1,])) > as.Date(first(index(R)))) { + R <- R[paste0(as.Date(index(weights[1,]))+1, "/")] + } + + + if(geometric){ + out = Return.portfolio.geometric(R=R, + weights=weights, + wealth.index=wealth.index, + contribution=contribution, + rebalance_on=rebalance_on, + value=value, + verbose=verbose, + ...=...) + } else { + out = Return.portfolio.arithmetic(R=R, + weights=weights, + wealth.index=wealth.index, + contribution=contribution, + rebalance_on=rebalance_on, + verbose=verbose, + ...=...) + } + return(out) +} + +Return.portfolio.arithmetic <- function(R, + weights=NULL, + wealth.index=FALSE, + contribution=FALSE, + rebalance_on=c(NA, 'years', 'quarters', 'months', 'weeks', 'days'), + verbose=FALSE, + ...) +{ + # bop = beginning of period + # eop = end of period + # Initialize objects + bop_weights = matrix(0, NROW(R), NCOL(R)) + colnames(bop_weights) = colnames(R) + eop_weights = period_contrib = bop_weights + ret = vector("numeric", NROW(R)) + + # initialize counter + k = 1 + for(i in 1:NROW(weights)) { + # identify rebalance from and to dates (weights[i,], weights[i+1]) and + # subset the R(eturns) object + from = as.Date(index(weights[i,]))+1 + if (i == nrow(weights)){ + to = as.Date(index(last(R))) # this is correct + } else { + to = as.Date(index(weights[(i+1),])) + } + returns = R[paste0(from, "::", to)] + + # Only enter the loop if we have a valid returns object + if(nrow(returns) >= 1){ + # inner loop counter + jj = 1 + for(j in 1:nrow(returns)){ + # For arithmetic returns, the beginning of period weights are always + # equal to the rebalance weights [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 3514 From noreply at r-forge.r-project.org Tue Aug 26 04:51:23 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 26 Aug 2014 04:51:23 +0200 (CEST) Subject: [Returnanalytics-commits] r3515 - in pkg/PerformanceAnalytics: R man Message-ID: <20140826025123.78B2B187716@r-forge.r-project.org> Author: rossbennett34 Date: 2014-08-26 04:51:22 +0200 (Tue, 26 Aug 2014) New Revision: 3515 Modified: pkg/PerformanceAnalytics/R/Return.portfolio.R pkg/PerformanceAnalytics/man/Return.portfolio.Rd Log: fixing bad link error Modified: pkg/PerformanceAnalytics/R/Return.portfolio.R =================================================================== --- pkg/PerformanceAnalytics/R/Return.portfolio.R 2014-08-24 23:56:50 UTC (rev 3514) +++ pkg/PerformanceAnalytics/R/Return.portfolio.R 2014-08-26 02:51:22 UTC (rev 3515) @@ -35,7 +35,7 @@ #' #' Return.rebalancing will work only on daily or lower frequencies. If you are #' rebalancing intraday, you should be using a trades/prices framework like -#' {\link{\code{blotter}}}, not a weights/returns framework. +#' the \code{blotter} package, not a weights/returns framework. #' #' Irregular rebalancing can be done by specifying a time series of weights. The #' function uses the date index of the weights for xts-style subsetting of rebalancing @@ -97,7 +97,7 @@ #' contributed by the asset in a given period. Default FALSE #' @param geometric utilize geometric chaining (TRUE) or simple/arithmetic (FALSE) #' to aggregate returns. Default TRUE. -#' @param rebalance_on Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{endpoints}. +#' @param rebalance_on Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{\link[xts]{endpoints}}. #' @param value The beginning of period total portfolio value. This is used for calculating position value. #' @param verbose If verbose is TRUE, return a list of intermediary calculations. #' See Details below. @@ -105,7 +105,7 @@ #' @return returns a time series of returns weighted by the \code{weights} #' parameter, or a list that includes intermediate calculations #' @author Peter Carl, Ross Bennett, Brian Peterson -#' @seealso \code{\link{Return.calculate}} \code{\link{xts::endpoints}} \cr +#' @seealso \code{\link{Return.calculate}} \code{\link[xts]{endpoints}} \cr #' @references Bacon, C. \emph{Practical Portfolio Performance Measurement and #' Attribution}. Wiley. 2004. Chapter 2\cr #' @keywords ts multivariate distribution models Modified: pkg/PerformanceAnalytics/man/Return.portfolio.Rd =================================================================== --- pkg/PerformanceAnalytics/man/Return.portfolio.Rd 2014-08-24 23:56:50 UTC (rev 3514) +++ pkg/PerformanceAnalytics/man/Return.portfolio.Rd 2014-08-26 02:51:22 UTC (rev 3515) @@ -24,7 +24,7 @@ \item{geometric}{utilize geometric chaining (TRUE) or simple/arithmetic (FALSE) to aggregate returns. Default TRUE.} -\item{rebalance_on}{Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{endpoints}.} +\item{rebalance_on}{Default "none"; alternatively "daily" "weekly" "monthly" "annual" to specify calendar-period rebalancing supported by \code{\link[xts]{endpoints}}.} \item{value}{The beginning of period total portfolio value. This is used for calculating position value.} @@ -74,7 +74,7 @@ Return.rebalancing will work only on daily or lower frequencies. If you are rebalancing intraday, you should be using a trades/prices framework like -{\link{\code{blotter}}}, not a weights/returns framework. +the \code{blotter} package, not a weights/returns framework. Irregular rebalancing can be done by specifying a time series of weights. The function uses the date index of the weights for xts-style subsetting of rebalancing @@ -145,7 +145,7 @@ Attribution}. Wiley. 2004. Chapter 2\cr } \seealso{ -\code{\link{Return.calculate}} \code{\link{xts::endpoints}} \cr +\code{\link{Return.calculate}} \code{\link[xts]{endpoints}} \cr } \keyword{distribution} \keyword{models} From noreply at r-forge.r-project.org Wed Aug 27 22:34:38 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 27 Aug 2014 22:34:38 +0200 (CEST) Subject: [Returnanalytics-commits] r3516 - pkg/PerformanceAnalytics/R Message-ID: <20140827203438.2B1EB18762F@r-forge.r-project.org> Author: kylebalkissoon Date: 2014-08-27 22:34:37 +0200 (Wed, 27 Aug 2014) New Revision: 3516 Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R Log: Expanded documentation and added examples Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R =================================================================== --- pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-26 02:51:22 UTC (rev 3515) +++ pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-27 20:34:37 UTC (rev 3516) @@ -1,12 +1,15 @@ -#' Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N year basis #' -#' +#' @description Tool for Robustness analysis of a strategy, can be used to give the probability an investor investing at any point in time will outperform the benchmark over a given horizon. Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N period basis. +#' #' @param R an xts, timeSeries or zoo object of asset returns #' @param Rb an xts, timeSeries or zoo object of the benchmark returns #' @param period_lengths a vector of periods the user wants to evaluate this over i.e. c(1,3,6,9,12,18,36) #' @author Kyle Balkissoon #' @keywords Performance Reporting Fund vs Benchmark -#' +#' @details Returns a table that contains the counts and probabilities of outperformance relative to benchmark for the various period_lengths +#' @examples +#' data(edhec) ##get data +#' table.ProbOutPerformance(edhec[,1],edhec[,2]) ##Returns tables of conv arb benchmarked to cta global #' @export table_ProbOutperformance table.ProbOutPerformance = function(R,Rb,period_lengths=c(1,3,6,9,12,18,36)){ From noreply at r-forge.r-project.org Wed Aug 27 22:35:20 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 27 Aug 2014 22:35:20 +0200 (CEST) Subject: [Returnanalytics-commits] r3517 - pkg/PerformanceAnalytics/man Message-ID: <20140827203520.A80C318762F@r-forge.r-project.org> Author: kylebalkissoon Date: 2014-08-27 22:35:20 +0200 (Wed, 27 Aug 2014) New Revision: 3517 Added: pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd Log: Documentation for table.ProbOutPerformance.Rd Added: pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd =================================================================== --- pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd (rev 0) +++ pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd 2014-08-27 20:35:20 UTC (rev 3517) @@ -0,0 +1,33 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{table.ProbOutPerformance} +\alias{table.ProbOutPerformance} +\title{} +\usage{ +table.ProbOutPerformance(R, Rb, period_lengths = c(1, 3, 6, 9, 12, 18, 36)) +} +\arguments{ +\item{R}{an xts, timeSeries or zoo object of asset returns} + +\item{Rb}{an xts, timeSeries or zoo object of the benchmark returns} + +\item{period_lengths}{a vector of periods the user wants to evaluate this over i.e. c(1,3,6,9,12,18,36)} +} +\description{ +Tool for Robustness analysis of a strategy, can be used to give the probability an investor investing at any point in time will outperform the benchmark over a given horizon. Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N period basis. +} +\details{ +Returns a table that contains the counts and probabilities of outperformance relative to benchmark for the various period_lengths +} +\examples{ +data(edhec) ##get data +table.ProbOutPerformance(edhec[,1],edhec[,2]) ##Returns tables of conv arb benchmarked to cta global +} +\author{ +Kyle Balkissoon +} +\keyword{Benchmark} +\keyword{Fund} +\keyword{Performance} +\keyword{Reporting} +\keyword{vs} + From noreply at r-forge.r-project.org Sat Aug 30 05:10:21 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 30 Aug 2014 05:10:21 +0200 (CEST) Subject: [Returnanalytics-commits] r3518 - pkg/PerformanceAnalytics/R Message-ID: <20140830031021.6618E183E26@r-forge.r-project.org> Author: efmrforge Date: 2014-08-30 05:10:20 +0200 (Sat, 30 Aug 2014) New Revision: 3518 Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R Log: Added title and one line description so it would build Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R =================================================================== --- pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-27 20:35:20 UTC (rev 3517) +++ pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-30 03:10:20 UTC (rev 3518) @@ -1,4 +1,7 @@ -#' +#' Probability of Outperformance +#' Returns tables of convertable arbitrage between a return series and +#' a benchmark +#' #' @description Tool for Robustness analysis of a strategy, can be used to give the probability an investor investing at any point in time will outperform the benchmark over a given horizon. Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N period basis. #' #' @param R an xts, timeSeries or zoo object of asset returns @@ -52,4 +55,4 @@ colnames(result) = c("period_lengths",colnames(R),colnames(Rb),"total periods",paste0("prob_",colnames(R),"_outperformance"),paste0("prob_",colnames(Rb),"_outperformance")) return(result) -} \ No newline at end of file +} From noreply at r-forge.r-project.org Sat Aug 30 16:27:59 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 30 Aug 2014 16:27:59 +0200 (CEST) Subject: [Returnanalytics-commits] r3519 - pkg/PerformanceAnalytics/R Message-ID: <20140830142759.3FBC81864D2@r-forge.r-project.org> Author: efmrforge Date: 2014-08-30 16:27:58 +0200 (Sat, 30 Aug 2014) New Revision: 3519 Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R Log: Fixed formatting, so this would build Modified: pkg/PerformanceAnalytics/R/table.ProbOutperformance.R =================================================================== --- pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-30 03:10:20 UTC (rev 3518) +++ pkg/PerformanceAnalytics/R/table.ProbOutperformance.R 2014-08-30 14:27:58 UTC (rev 3519) @@ -1,20 +1,33 @@ -#' Probability of Outperformance -#' Returns tables of convertable arbitrage between a return series and -#' a benchmark +#' Performance Reporting Fund vs Benchmark #' -#' @description Tool for Robustness analysis of a strategy, can be used to give the probability an investor investing at any point in time will outperform the benchmark over a given horizon. Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N period basis. -#' +#' Table of Performance Reporting vs Benchmark +#' +#' Returns a table that contains the counts and probabilities +#' of outperformance relative to benchmark for the various period_lengths +#' +#' Tool for Robustness analysis of a strategy, can be used to +#' give the probability an investor investing at any point in time will +#' outperform the benchmark over a given horizon. Calculates Count of +#' trailing periods where a fund outperformed its benchmark and calculates +#' the proportion of those periods, this is commonly used in marketing as +#' the probability of outperformance on a N period basis. +#' #' @param R an xts, timeSeries or zoo object of asset returns #' @param Rb an xts, timeSeries or zoo object of the benchmark returns -#' @param period_lengths a vector of periods the user wants to evaluate this over i.e. c(1,3,6,9,12,18,36) +#' @param period_lengths a vector of periods the user wants to evaluate this +#' over i.e. c(1,3,6,9,12,18,36) #' @author Kyle Balkissoon -#' @keywords Performance Reporting Fund vs Benchmark -#' @details Returns a table that contains the counts and probabilities of outperformance relative to benchmark for the various period_lengths +#' @keywords ts multivariate distribution models +#' @details Returns a table that contains the counts and probabilities +#' of outperformance relative to benchmark for the various period_lengths +#' #' @examples -#' data(edhec) ##get data -#' table.ProbOutPerformance(edhec[,1],edhec[,2]) ##Returns tables of conv arb benchmarked to cta global -#' @export table_ProbOutperformance - +#' +#' data(edhec) +#' +#' table.ProbOutPerformance(edhec[,1],edhec[,2]) +#' title(main='Table of Convertible Arbitrage vs Benchmark') +#' @export table.ProbOutPerformance = function(R,Rb,period_lengths=c(1,3,6,9,12,18,36)){ if(nrow(R)!=nrow(Rb)){ stop("R and Rb must be the same length") From noreply at r-forge.r-project.org Sat Aug 30 16:40:16 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 30 Aug 2014 16:40:16 +0200 (CEST) Subject: [Returnanalytics-commits] r3520 - pkg/PerformanceAnalytics/man Message-ID: <20140830144016.B8190183BD7@r-forge.r-project.org> Author: efmrforge Date: 2014-08-30 16:40:16 +0200 (Sat, 30 Aug 2014) New Revision: 3520 Removed: pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd Log: Removed failing .Rd file Deleted: pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd =================================================================== --- pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd 2014-08-30 14:27:58 UTC (rev 3519) +++ pkg/PerformanceAnalytics/man/table.ProbOutPerformance.Rd 2014-08-30 14:40:16 UTC (rev 3520) @@ -1,33 +0,0 @@ -% Generated by roxygen2 (4.0.1): do not edit by hand -\name{table.ProbOutPerformance} -\alias{table.ProbOutPerformance} -\title{} -\usage{ -table.ProbOutPerformance(R, Rb, period_lengths = c(1, 3, 6, 9, 12, 18, 36)) -} -\arguments{ -\item{R}{an xts, timeSeries or zoo object of asset returns} - -\item{Rb}{an xts, timeSeries or zoo object of the benchmark returns} - -\item{period_lengths}{a vector of periods the user wants to evaluate this over i.e. c(1,3,6,9,12,18,36)} -} -\description{ -Tool for Robustness analysis of a strategy, can be used to give the probability an investor investing at any point in time will outperform the benchmark over a given horizon. Calculates Count of trailing periods where a fund outperformed its benchmark and calculates the proportion of those periods, this is commonly used in marketing as the probability of outperformance on a N period basis. -} -\details{ -Returns a table that contains the counts and probabilities of outperformance relative to benchmark for the various period_lengths -} -\examples{ -data(edhec) ##get data -table.ProbOutPerformance(edhec[,1],edhec[,2]) ##Returns tables of conv arb benchmarked to cta global -} -\author{ -Kyle Balkissoon -} -\keyword{Benchmark} -\keyword{Fund} -\keyword{Performance} -\keyword{Reporting} -\keyword{vs} -