[Returnanalytics-commits] r4026 - in pkg/FactorAnalytics: . R man
noreply at r-forge.r-project.org
noreply at r-forge.r-project.org
Fri Aug 5 17:21:42 CEST 2016
Author: pragnya
Date: 2016-08-05 17:21:42 +0200 (Fri, 05 Aug 2016)
New Revision: 4026
Modified:
pkg/FactorAnalytics/DESCRIPTION
pkg/FactorAnalytics/R/fmVaRDecomp.R
pkg/FactorAnalytics/man/fmVaRDecomp.Rd
Log:
Updated fmVaRDecomp to include normal VaR, user defined factor cov
Modified: pkg/FactorAnalytics/DESCRIPTION
===================================================================
--- pkg/FactorAnalytics/DESCRIPTION 2016-08-04 12:55:43 UTC (rev 4025)
+++ pkg/FactorAnalytics/DESCRIPTION 2016-08-05 15:21:42 UTC (rev 4026)
@@ -1,7 +1,7 @@
Package: factorAnalytics
Type: Package
Title: Factor Analytics
-Version: 2.0.35
+Version: 2.0.36
Date: 2016-08-04
Author: Eric Zivot, Sangeetha Srinivasan and Yi-An Chen
Maintainer: Sangeetha Srinivasan <sangee at uw.edu>
Modified: pkg/FactorAnalytics/R/fmVaRDecomp.R
===================================================================
--- pkg/FactorAnalytics/R/fmVaRDecomp.R 2016-08-04 12:55:43 UTC (rev 4025)
+++ pkg/FactorAnalytics/R/fmVaRDecomp.R 2016-08-05 15:21:42 UTC (rev 4026)
@@ -18,15 +18,18 @@
#' being equal to \code{VaR.fm}. This is approximated as described in
#' Epperlein & Smillie (2006); a triangular smoothing kernel is used here.
#'
+#' Refer to Eric Zivot's slides (referenced) for formulas pertaining to the
+#' calculation of Normal VaR (adapted from a portfolio context to factor models)
+#'
#' @param object fit object of class \code{tsfm}, \code{sfm} or \code{ffm}.
+#' @param factor.cov optional user specified factor covariance matrix with
+#' named columns; defaults to the sample covariance matrix.
#' @param p confidence level for calculation. Default is 0.95.
#' @param type one of "np" (non-parametric) or "normal" for calculating VaR.
#' Default is "np".
-#' @param use an optional character string giving a method for computing factor
-#' covariances in the presence of missing values. This must be (an
-#' abbreviation of) one of the strings "everything", "all.obs",
-#' "complete.obs", "na.or.complete", or "pairwise.complete.obs". Default is
-#' "pairwise.complete.obs".
+#' @param use method for computing covariances in the presence of missing
+#' values; one of "everything", "all.obs", "complete.obs", "na.or.complete", or
+#' "pairwise.complete.obs". Default is "pairwise.complete.obs".
#' @param ... other optional arguments passed to \code{\link[stats]{quantile}}.
#'
#' @return A list containing
@@ -39,9 +42,12 @@
#' \item{pcVaR}{N x (K+1) matrix of percentage component contributions to VaR.}
#' Where, \code{K} is the number of factors and N is the number of assets.
#'
-#' @author Eric Zivot, Sangeetha Srinivasan and Yi-An Chen
+#' @author Eric Zivot, Yi-An Chen and Sangeetha Srinivasan
#'
#' @references
+#' Eric Zivot's slides from CFRM 546: Estimating risk measures: Portfolio of
+#' Assets, April 28, 2015.
+#'
#' Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis.
#' The Journal of Risk, 5(2), 1-18.
#'
@@ -63,7 +69,7 @@
#' data(managers)
#' fit.macro <- fitTsfm(asset.names=colnames(managers[,(1:6)]),
#' factor.names=colnames(managers[,(7:8)]), data=managers)
-#'
+#'
#' VaR.decomp <- fmVaRDecomp(fit.macro)
#' # get the component contributions
#' VaR.decomp$cVaR
@@ -71,9 +77,19 @@
#' # Statistical Factor Model
#' data(StockReturns)
#' sfm.pca.fit <- fitSfm(r.M, k=2)
-#' VaR.decomp <- fmVaRDecomp(sfm.pca.fit)
+#'
+#' VaR.decomp <- fmVaRDecomp(sfm.pca.fit, type="normal")
#' VaR.decomp$cVaR
#'
+#' # Fundamental Factor Model
+#' data(Stock.df)
+#' exposure.vars <- c("BOOK2MARKET", "LOG.MARKETCAP")
+#' fit <- fitFfm(data=stock, asset.var="TICKER", ret.var="RETURN",
+#' date.var="DATE", exposure.vars=exposure.vars)
+#'
+#' VaR.decomp <- fmVaRDecomp(fit, type="normal")
+#' VaR.decomp$cVaR
+#'
#' @export
fmVaRDecomp <- function(object, ...){
@@ -88,12 +104,11 @@
#' @method fmVaRDecomp tsfm
#' @export
-fmVaRDecomp.tsfm <- function(object, p=0.95, type=c("np","normal"),
+fmVaRDecomp.tsfm <- function(object, factor.cov, p=0.95, type=c("np","normal"),
use="pairwise.complete.obs", ...) {
# set default for type
type = type[1]
-
if (!(type %in% c("np","normal"))) {
stop("Invalid args: type must be 'np' or 'normal' ")
}
@@ -111,7 +126,14 @@
if (type=="normal") {
# get cov(F): K x K
- factor.cov = cov(as.matrix(factors.xts), use=use, ...)
+ if (missing(factor.cov)) {
+ factor.cov = cov(as.matrix(factors.xts), use=use, ...)
+ } else {
+ if (!identical(dim(factor.cov), as.integer(c(ncol(factor), ncol(factor))))) {
+ stop("Dimensions of user specified factor covariance matrix are not
+ compatible with the number of factors in the fitTsfm object")
+ }
+ }
# get cov(F.star): (K+1) x (K+1)
K <- ncol(object$beta)
@@ -122,6 +144,7 @@
# factor expected returns
MU <- c(colMeans(factors.xts, na.rm=TRUE), 0)
+ names(MU) <- colnames(beta.star)
# SIGMA*Beta to compute normal mVaR
SIGB <- beta.star %*% factor.star.cov
@@ -138,38 +161,33 @@
cVaR <- matrix(NA, N, K+1)
pcVaR <- matrix(NA, N, K+1)
rownames(mVaR)=rownames(cVaR)=rownames(pcVaR)=object$asset.names
- colnames(mVaR)=colnames(cVaR)=colnames(pcVaR)=c(object$factor.names,
- "residuals")
+ colnames(mVaR)=colnames(cVaR)=colnames(pcVaR)=c(object$factor.names, "residuals")
+
for (i in object$asset.names) {
# return data for asset i
R.xts <- object$data[,i]
# get VaR for asset i
if (type=="np") {
VaR.fm[i] <- quantile(R.xts, probs=1-p, na.rm=TRUE, ...)
- }
+ }
else if (type=="normal") {
- VaR.fm[i] <- mean(R.xts, na.rm=TRUE) + sd(R.xts, na.rm=TRUE)*qnorm(1-p)
+ VaR.fm[i] <- beta.star[i,] %*% MU +
+ sqrt(beta.star[i,,drop=F] %*% factor.star.cov %*% t(beta.star[i,,drop=F]))*qnorm(1-p)
}
# index of VaR exceedances
idx.exceed[[i]] <- which(R.xts <= VaR.fm[i])
# number of VaR exceedances
n.exceed[i] <- length(idx.exceed[[i]])
- # # plot exceedances for asset i
- # plot(R.xts, type="b", main="Asset Returns and 5% VaR Violations",
- # ylab="Returns")
- # abline(h=0)
- # abline(h=VaR.fm[i], lwd=2, col="red")
- # points(R.xts[idx.exceed[[i]]], type="p", pch=16, col="red")
-
- # get F.star data object
- factor.star <- merge(factors.xts, resid.xts[,i])
- colnames(factor.star)[dim(factor.star)[2]] <- "residual"
-
if (type=="np") {
+ # get F.star data object
+ factor.star <- merge(factors.xts, resid.xts[,i])
+ colnames(factor.star)[dim(factor.star)[2]] <- "residual"
+
# epsilon is apprx. using Silverman's rule of thumb (bandwidth selection)
# the constant 2.575 corresponds to a triangular kernel
eps <- 2.575*sd(R.xts, na.rm =TRUE) * (nrow(R.xts)^(-1/5))
+
# compute marginal VaR as expected value of factor returns, such that the
# asset return was incident in the triangular kernel region peaked at the
# VaR value and bandwidth = epsilon.
@@ -201,12 +219,11 @@
#' @method fmVaRDecomp sfm
#' @export
-fmVaRDecomp.sfm <- function(object, p=0.95, type=c("np","normal"),
+fmVaRDecomp.sfm <- function(object, factor.cov, p=0.95, type=c("np","normal"),
use="pairwise.complete.obs", ...) {
# set default for type
type = type[1]
-
if (!(type %in% c("np","normal"))) {
stop("Invalid args: type must be 'np' or 'normal' ")
}
@@ -224,10 +241,17 @@
if (type=="normal") {
# get cov(F): K x K
- factor.cov = cov(as.matrix(factors.xts), use=use, ...)
+ if (missing(factor.cov)) {
+ factor.cov = cov(as.matrix(factors.xts), use=use, ...)
+ } else {
+ if (!identical(dim(factor.cov), as.integer(c(object$k, object$k)))) {
+ stop("Dimensions of user specified factor covariance matrix are not
+ compatible with the number of factors in the fitSfm object")
+ }
+ }
# get cov(F.star): (K+1) x (K+1)
- K <- ncol(object$beta)
+ K <- object$k
factor.star.cov <- diag(K+1)
factor.star.cov[1:K, 1:K] <- factor.cov
colnames(factor.star.cov) <- c(colnames(factor.cov),"residuals")
@@ -258,24 +282,26 @@
R.xts <- object$data[,i]
# get VaR for asset i
if (type=="np") {
- VaR.fm[i] <- quantile(R.xts, probs=1-p, na.rm=TRUE, ...)
- }
+ VaR.fm[i] <- quantile(R.xts, probs=1-p, na.rm=TRUE)
+ }
else if (type=="normal") {
- VaR.fm[i] <- mean(R.xts, na.rm=TRUE) + sd(R.xts, na.rm=TRUE)*qnorm(1-p)
+ VaR.fm[i] <- beta.star[i,] %*% MU +
+ sqrt(beta.star[i,,drop=F] %*% factor.star.cov %*% t(beta.star[i,,drop=F]))*qnorm(1-p)
}
# index of VaR exceedances
idx.exceed[[i]] <- which(R.xts <= VaR.fm[i])
# number of VaR exceedances
n.exceed[i] <- length(idx.exceed[[i]])
- # get F.star data object
- factor.star <- merge(factors.xts, resid.xts[,i])
- colnames(factor.star)[dim(factor.star)[2]] <- "residual"
-
if (type=="np") {
+ # get F.star data object
+ factor.star <- merge(factors.xts, resid.xts[,i])
+ colnames(factor.star)[dim(factor.star)[2]] <- "residual"
+
# epsilon is apprx. using Silverman's rule of thumb (bandwidth selection)
# the constant 2.575 corresponds to a triangular kernel
eps <- 2.575*sd(R.xts, na.rm =TRUE) * (nrow(R.xts)^(-1/5))
+
# compute marginal VaR as expected value of factor returns, such that the
# asset return was incident in the triangular kernel region peaked at the
# VaR value and bandwidth = epsilon.
@@ -283,7 +309,7 @@
k.weight[k.weight<0] <- 0
mVaR[i,] <- colMeans(factor.star*k.weight, na.rm =TRUE)
}
- else if (type=="normal") {
+ else if (type=="normal") {
mVaR[i,] <- t(MU) + SIGB[i,] * qnorm(1-p)/sd(R.xts, na.rm=TRUE)
}
@@ -307,11 +333,11 @@
#' @method fmVaRDecomp ffm
#' @export
-fmVaRDecomp.ffm <- function(object, p=0.95, type=c("np","normal"), ...) {
+fmVaRDecomp.ffm <- function(object, factor.cov, p=0.95, type=c("np","normal"),
+ use="pairwise.complete.obs", ...) {
# set default for type
type = type[1]
-
if (!(type %in% c("np","normal"))) {
stop("Invalid args: type must be 'np' or 'normal' ")
}
@@ -329,7 +355,14 @@
if (type=="normal") {
# get cov(F): K x K
- factor.cov = object$factor.cov
+ if (missing(factor.cov)) {
+ factor.cov <- object$factor.cov
+ } else {
+ if (!identical(dim(factor.cov), dim(object$factor.cov))) {
+ stop("Dimensions of user specified factor covariance matrix are not
+ compatible with the number of factors in the fitSfm object")
+ }
+ }
# get cov(F.star): (K+1) x (K+1)
K <- ncol(object$beta)
@@ -356,31 +389,35 @@
cVaR <- matrix(NA, N, K+1)
pcVaR <- matrix(NA, N, K+1)
rownames(mVaR)=rownames(cVaR)=rownames(pcVaR)=object$asset.names
- colnames(mVaR)=colnames(cVaR)=colnames(pcVaR)=c(object$factor.names,
- "residuals")
+ colnames(mVaR)=colnames(cVaR)=colnames(pcVaR)=c(object$factor.names, "residuals")
+
for (i in object$asset.names) {
# return data for asset i
- R.xts <- object$data[,i]
+ subrows <- which(object$data[[object$asset.var]]==i)
+ R.xts <- as.xts(object$data[subrows,object$ret.var],
+ as.Date(object$data[subrows,object$date.var]))
# get VaR for asset i
if (type=="np") {
VaR.fm[i] <- quantile(R.xts, probs=1-p, na.rm=TRUE, ...)
- }
+ }
else if (type=="normal") {
- VaR.fm[i] <- mean(R.xts, na.rm=TRUE) + sd(R.xts, na.rm=TRUE)*qnorm(1-p)
+ VaR.fm[i] <- beta.star[i,] %*% MU +
+ sqrt(beta.star[i,,drop=F] %*% factor.star.cov %*% t(beta.star[i,,drop=F]))*qnorm(1-p)
}
# index of VaR exceedances
idx.exceed[[i]] <- which(R.xts <= VaR.fm[i])
# number of VaR exceedances
n.exceed[i] <- length(idx.exceed[[i]])
- # get F.star data object
- factor.star <- merge(factors.xts, resid.xts[,i])
- colnames(factor.star)[dim(factor.star)[2]] <- "residual"
-
if (type=="np") {
+ # get F.star data object
+ factor.star <- merge(factors.xts, resid.xts[,i])
+ colnames(factor.star)[dim(factor.star)[2]] <- "residual"
+
# epsilon is apprx. using Silverman's rule of thumb (bandwidth selection)
# the constant 2.575 corresponds to a triangular kernel
eps <- 2.575*sd(R.xts, na.rm =TRUE) * (nrow(R.xts)^(-1/5))
+
# compute marginal VaR as expected value of factor returns, such that the
# asset return was incident in the triangular kernel region peaked at the
# VaR value and bandwidth = epsilon.
@@ -406,4 +443,4 @@
mVaR=mVaR, cVaR=cVaR, pcVaR=pcVaR)
return(fm.VaR.decomp)
-}
+}
\ No newline at end of file
Modified: pkg/FactorAnalytics/man/fmVaRDecomp.Rd
===================================================================
--- pkg/FactorAnalytics/man/fmVaRDecomp.Rd 2016-08-04 12:55:43 UTC (rev 4025)
+++ pkg/FactorAnalytics/man/fmVaRDecomp.Rd 2016-08-05 15:21:42 UTC (rev 4026)
@@ -9,29 +9,31 @@
\usage{
fmVaRDecomp(object, ...)
-\method{fmVaRDecomp}{tsfm}(object, p = 0.95, type = c("np", "normal"),
- use = "pairwise.complete.obs", ...)
+\method{fmVaRDecomp}{tsfm}(object, factor.cov, p = 0.95, type = c("np",
+ "normal"), use = "pairwise.complete.obs", ...)
-\method{fmVaRDecomp}{sfm}(object, p = 0.95, type = c("np", "normal"),
- use = "pairwise.complete.obs", ...)
+\method{fmVaRDecomp}{sfm}(object, factor.cov, p = 0.95, type = c("np",
+ "normal"), use = "pairwise.complete.obs", ...)
-\method{fmVaRDecomp}{ffm}(object, p = 0.95, type = c("np", "normal"), ...)
+\method{fmVaRDecomp}{ffm}(object, factor.cov, p = 0.95, type = c("np",
+ "normal"), use = "pairwise.complete.obs", ...)
}
\arguments{
\item{object}{fit object of class \code{tsfm}, \code{sfm} or \code{ffm}.}
\item{...}{other optional arguments passed to \code{\link[stats]{quantile}}.}
+\item{factor.cov}{optional user specified factor covariance matrix with
+named columns; defaults to the sample covariance matrix.}
+
\item{p}{confidence level for calculation. Default is 0.95.}
\item{type}{one of "np" (non-parametric) or "normal" for calculating VaR.
Default is "np".}
-\item{use}{an optional character string giving a method for computing factor
-covariances in the presence of missing values. This must be (an
-abbreviation of) one of the strings "everything", "all.obs",
-"complete.obs", "na.or.complete", or "pairwise.complete.obs". Default is
-"pairwise.complete.obs".}
+\item{use}{method for computing covariances in the presence of missing
+values; one of "everything", "all.obs", "complete.obs", "na.or.complete", or
+"pairwise.complete.obs". Default is "pairwise.complete.obs".}
}
\value{
A list containing
@@ -62,14 +64,17 @@
contributions to \code{VaR} respectively. The marginal contribution to VaR
is defined as the expectation of \code{F.star}, conditional on the loss
being equal to \code{VaR.fm}. This is approximated as described in
-Epperlein & Smillie (2006); a triangular smoothing kernel is used here.
+Epperlein & Smillie (2006); a triangular smoothing kernel is used here.
+
+Refer to Eric Zivot's slides (referenced) for formulas pertaining to the
+calculation of Normal VaR (adapted from a portfolio context to factor models)
}
\examples{
# Time Series Factor Model
data(managers)
fit.macro <- fitTsfm(asset.names=colnames(managers[,(1:6)]),
factor.names=colnames(managers[,(7:8)]), data=managers)
-
+
VaR.decomp <- fmVaRDecomp(fit.macro)
# get the component contributions
VaR.decomp$cVaR
@@ -77,14 +82,27 @@
# Statistical Factor Model
data(StockReturns)
sfm.pca.fit <- fitSfm(r.M, k=2)
-VaR.decomp <- fmVaRDecomp(sfm.pca.fit)
+
+VaR.decomp <- fmVaRDecomp(sfm.pca.fit, type="normal")
VaR.decomp$cVaR
+# Fundamental Factor Model
+data(Stock.df)
+exposure.vars <- c("BOOK2MARKET", "LOG.MARKETCAP")
+fit <- fitFfm(data=stock, asset.var="TICKER", ret.var="RETURN",
+ date.var="DATE", exposure.vars=exposure.vars)
+
+VaR.decomp <- fmVaRDecomp(fit, type="normal")
+VaR.decomp$cVaR
+
}
\author{
-Eric Zivot, Sangeetha Srinivasan and Yi-An Chen
+Eric Zivot, Yi-An Chen and Sangeetha Srinivasan
}
\references{
+Eric Zivot's slides from CFRM 546: Estimating risk measures: Portfolio of
+Assets, April 28, 2015.
+
Hallerback (2003). Decomposing Portfolio Value-at-Risk: A General Analysis.
The Journal of Risk, 5(2), 1-18.
More information about the Returnanalytics-commits
mailing list