From noreply at r-forge.r-project.org Thu Jun 6 06:23:37 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 6 Jun 2013 06:23:37 +0200 (CEST) Subject: [Returnanalytics-commits] r2338 - pkg/PerformanceAnalytics/R Message-ID: <20130606042337.6E346180173@r-forge.r-project.org> Author: pulkit Date: 2013-06-06 06:23:37 +0200 (Thu, 06 Jun 2013) New Revision: 2338 Added: pkg/PerformanceAnalytics/R/ProbSharpe.R Log: Added ProbSharpe.R Added: pkg/PerformanceAnalytics/R/ProbSharpe.R =================================================================== --- pkg/PerformanceAnalytics/R/ProbSharpe.R (rev 0) +++ pkg/PerformanceAnalytics/R/ProbSharpe.R 2013-06-06 04:23:37 UTC (rev 2338) @@ -0,0 +1,62 @@ +#'@title Probabilistic Sharpe Ratio +#'@description +#'Given a predefined +#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed +#' in probabilistic +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param the confidence level +#'@param weights the weights for the portfolio + +ProbSharpeRatio<- +function(R, refSR,Rf=0,p = 0.95, weights = NULL, ...){ + x = checkData(R) + columns = ncol(R) + columnnames = colnames(R) + + if(!is.null(dim(Rf))) + Rf = checkData(Rf) + + psr <- function (x,Rf,p,refSR,...){ + sr = SharpeRatio(x, Rf, p,"StdDev") + n = nrow(x) + sd = StdDev(x) + sk = skewness(x) + kr = kurtosis(x) + PSR = pnorm(((sr - refSR)*(n-1)^(0.5))/(1-sr*sk+sr^2*(kr-1)/4)^(0.5)) + return(PSR) +} + +mintrl <- function(x,Rf,p,refSR,...){ + sk = skewness(x) + kr =kurtosis(x) + sr = SharpeRatio(x, Rf, p, "StdDev") + MinTRL = 1 + (1 - sk*sr + ((kr-1)/4)*sr^2)*(qnorm(p)/(sr-refSR))^2 + return(MinTRL) + +} + for(column in 1:columns){ + column.probsharpe <- psr(x[,column],Rf,p,refSR) + column.mintrack <- mintrl(x[,column],Rf,p,refSR) + if(column == 1){ + probsharpe = column.probsharpe + mintrack = column.mintrack + } + else { + probsharpe = merge(probsharpe, column.probsharpe) + mintrack = merge(mintrack, column.mintrack) + } + + } + + probsharpe = rbind(probsharpe,mintrack) + + colnames(probsharpe) = columnnames + probsharpe = reclass(probsharpe, x) + rownames(probsharpe)=c("PSR","MinTRL") + return(probsharpe) + +} + From noreply at r-forge.r-project.org Thu Jun 6 06:24:50 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 6 Jun 2013 06:24:50 +0200 (CEST) Subject: [Returnanalytics-commits] r2339 - pkg/PerformanceAnalytics/R Message-ID: <20130606042450.AC203180173@r-forge.r-project.org> Author: pulkit Date: 2013-06-06 06:24:50 +0200 (Thu, 06 Jun 2013) New Revision: 2339 Removed: pkg/PerformanceAnalytics/R/ProbSharpe.R Log: Deleted ProbSharpe.R Deleted: pkg/PerformanceAnalytics/R/ProbSharpe.R =================================================================== --- pkg/PerformanceAnalytics/R/ProbSharpe.R 2013-06-06 04:23:37 UTC (rev 2338) +++ pkg/PerformanceAnalytics/R/ProbSharpe.R 2013-06-06 04:24:50 UTC (rev 2339) @@ -1,62 +0,0 @@ -#'@title Probabilistic Sharpe Ratio -#'@description -#'Given a predefined -#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed -#' in probabilistic -#' -#'@param R the return series -#'@param Rf the risk free rate of return -#'@param refSR the reference Sharpe Ratio -#'@param the confidence level -#'@param weights the weights for the portfolio - -ProbSharpeRatio<- -function(R, refSR,Rf=0,p = 0.95, weights = NULL, ...){ - x = checkData(R) - columns = ncol(R) - columnnames = colnames(R) - - if(!is.null(dim(Rf))) - Rf = checkData(Rf) - - psr <- function (x,Rf,p,refSR,...){ - sr = SharpeRatio(x, Rf, p,"StdDev") - n = nrow(x) - sd = StdDev(x) - sk = skewness(x) - kr = kurtosis(x) - PSR = pnorm(((sr - refSR)*(n-1)^(0.5))/(1-sr*sk+sr^2*(kr-1)/4)^(0.5)) - return(PSR) -} - -mintrl <- function(x,Rf,p,refSR,...){ - sk = skewness(x) - kr =kurtosis(x) - sr = SharpeRatio(x, Rf, p, "StdDev") - MinTRL = 1 + (1 - sk*sr + ((kr-1)/4)*sr^2)*(qnorm(p)/(sr-refSR))^2 - return(MinTRL) - -} - for(column in 1:columns){ - column.probsharpe <- psr(x[,column],Rf,p,refSR) - column.mintrack <- mintrl(x[,column],Rf,p,refSR) - if(column == 1){ - probsharpe = column.probsharpe - mintrack = column.mintrack - } - else { - probsharpe = merge(probsharpe, column.probsharpe) - mintrack = merge(mintrack, column.mintrack) - } - - } - - probsharpe = rbind(probsharpe,mintrack) - - colnames(probsharpe) = columnnames - probsharpe = reclass(probsharpe, x) - rownames(probsharpe)=c("PSR","MinTRL") - return(probsharpe) - -} - From noreply at r-forge.r-project.org Wed Jun 12 21:30:32 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 12 Jun 2013 21:30:32 +0200 (CEST) Subject: [Returnanalytics-commits] r2340 - pkg/PApages/sandbox Message-ID: <20130612193032.14062185896@r-forge.r-project.org> Author: peter_carl Date: 2013-06-12 21:30:31 +0200 (Wed, 12 Jun 2013) New Revision: 2340 Added: pkg/PApages/sandbox/charts.onepager.R Log: - initial commit of a function that's been sitting around for years, whoops Added: pkg/PApages/sandbox/charts.onepager.R =================================================================== --- pkg/PApages/sandbox/charts.onepager.R (rev 0) +++ pkg/PApages/sandbox/charts.onepager.R 2013-06-12 19:30:31 UTC (rev 2340) @@ -0,0 +1,171 @@ +`charts.onepager` <- +function (R, manager.column = 1, peer.columns = NULL, index.columns = NULL, manager.color = "red", peer.color = "darkgray", index.color = "orange", Rf = 0, main = NULL, method = c("ModifiedES","ModifiedVaR"), p=(1-1/12), width = 0, event.labels = NULL, ylog = FALSE, wealth.index = FALSE, gap = 12, begin=c("first","axis"), legend.loc="topleft", lwd = 2, ...) +{ # @author Peter Carl + + # DESCRIPTION: + # An extension of charts.PerformanceSummary.R + + # Inputs: + # R: a matrix, data frame, or timeSeries, usually a set of monthly returns. + # The first column is assumed to be the returns of interest, the next + # columns are assumed to be relevant benchmarks for comparison. + # rf: this is the risk free rate. Remember to set this to the same + # periodicity as the data being passed in. + # method: Used to select the risk parameter to use in the chart.BarVaR. May + # be any of: + # modVaR - uses CF modified VaR + # VaR - uses traditional Value at Risk + # StdDev - monthly standard deviation of trailing 12 month returns + # + + # Outputs: + # A stack of three related timeseries line charts + + # FUNCTION: + begin = begin[1] + x = checkData(R, method = "zoo") + colnames = colnames(x) + ncols = ncol(x) + +# This repeats a bit of code from chart.CumReturns, but it's intended +# to align the start dates of all of the following charts. Basically, it assumes +# that the manager.column is the column of interest, and +# starts everything from that start date + + length.column.one = length(x[,manager.column]) +# find the row number of the last NA in the first column + start.row = 1 + start.index = 0 + while(is.na(x[start.row,manager.column])){ + start.row = start.row + 1 + } + x = x[start.row:length.column.one,] + + colorset = c(rep(manager.color,length(manager.column)), rep(index.color, length(index.columns)), rep(peer.color,length(peer.columns))) + legend.colorset = c(rep(manager.color, length(manager.column)), peer.color, rep(index.color, length(index.columns))) + + linetypes = c(rep(1, length(manager.column)), 1:length(index.columns), rep(1, length(peer.columns))) + legend.linetypes = c(rep(1, length(manager.column)), 1, 1:length(index.columns)) + + dottypes = c(rep(16, length(manager.column)), closedsymbols[1:length(index.columns)], rep(1, length(peer.columns))) + legend.dottypes = c(rep(16, length(manager.column)),1, closedsymbols[1:length(index.columns)]) + + if(ncols > 1){ + legend.loc = legend.loc + legendnames = c(colnames(x[, manager.column, drop = FALSE]),"Peer group", colnames(x[, index.columns, drop = FALSE])) + } + else + legend.loc = NULL + + if(is.null(main)) + main = paste(colnames[manager.column],"Performance", sep=" ") + + if(ylog) + wealth.index = TRUE + + # First, we lay out the graphic as a three row, one column format +# plot.new() +# layout(matrix(c(1,2,3)),height=c(2,1,1.3),width=1) +# pdf("test1.pdf", width=6.5, height=9, paper="letter") + layout(matrix(c(1,1,2,2,3,3,4,5),nrow=4,ncol=2,byrow=T),height=c(3,1.25,1.75,3),width=1) + # to see the resulting layout, use layout.show(5) + + # mar: a numerical vector of the form c(bottom, left, top, right) which + # gives the number of lines of margin to be specified on the four sides + # of the plot. The default is c(5, 4, 4, 2) + 0.1 + + # The first row is the cumulative returns line plot + par(mar=c(1,4,4,2)) + chart.CumReturns(x[,c(manager.column,index.columns,peer.columns),drop = FALSE], main = main, xaxis = FALSE, ylab = NULL, legend.loc = NULL, event.labels = event.labels, ylog = ylog, wealth.index = wealth.index, begin = begin, colorset = colorset, lwd = lwd, lty=linetypes, ...) + + if(!is.null(legend.loc)){ + # There's no good place to put this automatically, except under the graph. + # That requires a different solution, but here's the quick fix + legend(legend.loc, inset = 0.02, text.col = legend.colorset, col = legend.colorset, cex = .8, border.col = "gray", lwd = 2, bg = "white", legend = legendnames, lty=legend.linetypes, pch = legend.dottypes, pt.bg="white", pt.lwd = "1", merge = FALSE, pt.cex = 1.25) + } + + # The second row is the monthly returns bar plot + par(mar=c(1,4,0,2)) +# chart.BarVaR(as.matrix(R[,1]), main = "", xaxis = FALSE, ylab = "Monthly Return", method = method) + chart.BarVaR(x[,c(manager.column,index.columns,peer.columns),drop = FALSE], main = "", xaxis = FALSE, width = width, ylab = "Monthly Return", method = method, event.labels = NULL, ylog=FALSE, gap = gap, colorset = colorset, lwd = lwd, p=p, ...) + + # The third row is the underwater plot + par(mar=c(5,4,0,2)) + chart.Drawdown(x[,c(manager.column,index.columns,peer.columns),drop = FALSE], main = "", xlab = "", ylab = "From Peak", event.labels = NULL, ylog=FALSE, colorset= colorset, lwd = lwd, lty = linetypes, ...) + +par() +# chart.Histogram(x[,c(manager.column,index.columns,peer.columns),drop = FALSE],methods=c( "add.normal","add.density"), main="", colorset = c("lightgray", manager.color, "#005AFF", "#23FFDC", "#ECFF13", "#FF4A00", "#800000"), ...) + +chart.Boxplot(x[,c(manager.column,index.columns,peer.columns),drop = FALSE], colorset=colorset, mean.symbol = dottypes, symbol.color = colorset, as.Tufte = T, names=F, main = "", ylab = "", sort.by="variance", ...) + +par() +chart.RiskReturnScatter(x[,c(manager.column,index.columns,peer.columns),drop = FALSE], Rf = Rf, main="",add.names=FALSE, colorset = colorset, symbolset = dottypes, ...) +# layout.show(5) + +# dev.off() +} + +############################################################################### +# R (http://r-project.org/) Econometrics for Performance and Risk Analysis +# +# Copyright (c) 2004-2007 Peter Carl and Brian G. Peterson +# +# This library is distributed under the terms of the GNU Public License (GPL) +# for full details see the file COPYING +# +# $Id: charts.onepager.R,v 1.2 2007-10-03 02:50:03 peter Exp $ +# +############################################################################### +# $Log: charts.onepager.R,v $ +# Revision 1.2 2007-10-03 02:50:03 peter +# - merge set to FALSE in legend so that lines and points are easily +# distinguished +# - manager.columns changed to manager.column +# - manager.column defaults to "1" but will now correctly use any column +# +# Revision 1.14 2007/08/23 02:12:04 peter +# - added legend.loc as parameter so that legend can be shut off or moved +# in top chart +# +# Revision 1.13 2007/08/15 00:04:44 peter +# - aligns the three charts along the start date of the first column of +# data +# +# Revision 1.12 2007/06/29 15:52:25 peter +# - removed plot.new() that was causing two page pdf files +# +# Revision 1.11 2007/06/18 03:35:22 brian +# - make method argument a list +# +# Revision 1.10 2007/04/30 12:56:05 peter +# - changed 'method' to 'begin' +# +# Revision 1.9 2007/04/09 12:31:27 brian +# - syntax and usage changes to pass R CMD check +# +# Revision 1.8 2007/04/04 02:46:34 peter +# - added gap parameter for chart.BarVaR +# +# Revision 1.7 2007/03/22 13:48:11 peter +# - removed yaxis label in favor of default +# +# Revision 1.6 2007/03/21 21:46:54 peter +# - passing in wealth.index to top chart +# +# Revision 1.5 2007/03/21 21:44:21 peter +# - fixed conditional test +# +# Revision 1.4 2007/03/21 21:40:48 peter +# - added error handling for ylog passing in top chart +# +# Revision 1.3 2007/03/20 13:48:07 peter +# - changed "n" attribute to "width" in chart.BarVaR call +# +# Revision 1.2 2007/02/07 13:24:49 brian +# - fix pervasive comment typo +# +# Revision 1.1 2007/02/02 19:06:15 brian +# - Initial Revision of packaged files to version control +# Bug 890 +# +############################################################################### From noreply at r-forge.r-project.org Mon Jun 17 12:34:13 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 17 Jun 2013 12:34:13 +0200 (CEST) Subject: [Returnanalytics-commits] r2341 - in pkg/PerformanceAnalytics/sandbox: . pulkit Message-ID: <20130617103413.7C516184B61@r-forge.r-project.org> Author: pulkit Date: 2013-06-17 12:34:12 +0200 (Mon, 17 Jun 2013) New Revision: 2341 Added: pkg/PerformanceAnalytics/sandbox/pulkit/ pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R Log: -PSR and MinTRL Initial commit Added: pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R 2013-06-17 10:34:12 UTC (rev 2341) @@ -0,0 +1,34 @@ +#'@title Probabilistic Sharpe Ratio +#'@description +#'Given a predefined +#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed +#' in probabilistic +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param the confidence level +#'@param weights the weights for the portfolio + +MinTrackRecord<-function(x,Rf,refSR,p=0.95,...){ + +mintrl <- function(x,Rf,p,refSR,...){ + sk = skewness(x) + kr =kurtosis(x) + sr = SharpeRatio(x, Rf, p, "StdDev") + MinTRL = 1 + (1 - sk*sr + ((kr-1)/4)*sr^2)*(qnorm(p)/(sr-refSR))^2 + return(MinTRL) + +} + for(column in 1:columns){ + column.mintrack <- mintrl(x[,column],Rf,p,refSR) + if(column == 1){ + mintrack = column.mintrack + } + else { + mintrack = merge(mintrack, column.mintrack) + } + + } +} + Added: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R 2013-06-17 10:34:12 UTC (rev 2341) @@ -0,0 +1,66 @@ +#'@title Probabilistic Sharpe Ratio +#'@description +#'Given a predefined +#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed +#' in probabilistic +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param the confidence level +#'@param weights the weights for the portfolio + +ProbSharpeRatio<- +function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,n = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ + columns = 1 + columnnames = NULL + #Error handling if R is not NULL + if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + n = nrow(x) + #Checking if the weights are provided or not + if(!is.null(weights)){ + if(length(weights)!=columns){ + stop("number of items in weights is not equal to the number of columns in R") + } + else{ + # A potfolio is constructed by applying the weights + x = Return.portfolio(R,weights) + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } + } + else{ + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } + + columnnames = colnames(x) + + } + # If R is passed as null checking for sharpe ratio , skewness and kurtosis + else{ + + if(is.null(sr)) stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") + if(is.null(sk)) stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") + if(is.null(kr)) stop("You must either pass R or the Sharpe Ratio, Skewness, Kurtosis,n etc") + if(is.null(n)) stop("You must either pass R or the Sharpe Ratio, Skewness, Kurtosis, n etc") + } + #If weights are not taken into account a message is displayed + if(is.null(weights)){ + message("no weights passed will calculate Probability Sharpe Ratio for each column") + } + + if(!is.null(dim(Rf))) + Rf = checkData(Rf) + result = pnorm(((sr - refSR)*(n-1)^(0.5))/(1-sr*sk+sr^2*(kr-1)/4)^(0.5)) + if(!is.null(dim(result))){ + colnames(result) = columnnames + rownames(result) = paste("Probabilistic Sharpe Ratio(p=",round(p*100,1),"%):") + } + return(result) + +} Added: pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R 2013-06-17 10:34:12 UTC (rev 2341) @@ -0,0 +1,35 @@ +#'@title Probabilistic Sharpe Ratio +#'@description +#'Given a predefined +#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed +#' in probabilistic +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param the confidence level +#'@param weights the weights for the portfolio +chart.PSR<-function(x,Rf,refSR,p=0.95,...){ + for(column in 1:columns){ + column.probsharpe <- psr(x[,column],Rf,p,refSR) + column.mintrack <- mintrl(x[,column],Rf,p,refSR) + if(column == 1){ + probsharpe = column.probsharpe + mintrack = column.mintrack + } + else { + probsharpe = merge(probsharpe, column.probsharpe) + mintrack = merge(mintrack, column.mintrack) + } + + } + + probsharpe = rbind(probsharpe,mintrack) + + colnames(probsharpe) = columnnames + probsharpe = reclass(probsharpe, x) + rownames(probsharpe)=c("PSR","MinTRL") + return(probsharpe) + +} + From noreply at r-forge.r-project.org Mon Jun 17 15:13:40 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 17 Jun 2013 15:13:40 +0200 (CEST) Subject: [Returnanalytics-commits] r2342 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130617131340.AD2D8185602@r-forge.r-project.org> Author: pulkit Date: 2013-06-17 15:13:39 +0200 (Mon, 17 Jun 2013) New Revision: 2342 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R Log: -Added feature for weights and non return series based input in MinTRL Modified: pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R 2013-06-17 10:34:12 UTC (rev 2341) +++ pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R 2013-06-17 13:13:39 UTC (rev 2342) @@ -10,25 +10,54 @@ #'@param the confidence level #'@param weights the weights for the portfolio -MinTrackRecord<-function(x,Rf,refSR,p=0.95,...){ +MinTrackRecord<-function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ + columns = 1 + columnnames = NULL + #Error handling if R is not NULL + if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + n = nrow(x) + #Checking if the weights are provided or not + if(!is.null(weights)){ + if(length(weights)!=columns){ + stop("number of items in weights is not equal to the number of columns in R") + } + else{ + # A potfolio is constructed by applying the weights + x = Return.portfolio(R,weights) + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } + } + else{ + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } -mintrl <- function(x,Rf,p,refSR,...){ - sk = skewness(x) - kr =kurtosis(x) - sr = SharpeRatio(x, Rf, p, "StdDev") - MinTRL = 1 + (1 - sk*sr + ((kr-1)/4)*sr^2)*(qnorm(p)/(sr-refSR))^2 - return(MinTRL) - -} - for(column in 1:columns){ - column.mintrack <- mintrl(x[,column],Rf,p,refSR) - if(column == 1){ - mintrack = column.mintrack - } - else { - mintrack = merge(mintrack, column.mintrack) + columnnames = colnames(x) + } - + # If R is passed as null checking for sharpe ratio , skewness and kurtosis + else{ + if(is.null(sr) | is.null(sk) | is.null(kr)){ + stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") + } } + #If weights are not taken into account a message is displayed + if(is.null(weights)){ + message("no weights passed,will calculate Probability Sharpe Ratio for each column") + } + + if(!is.null(dim(Rf))) + Rf = checkData(Rf) + result = 1 + (1 - sk*sr + ((kr-1)/4)*sr^2)*(qnorm(p)/(sr-refSR))^2 + if(!is.null(dim(result))){ + colnames(result) = columnnames + rownames(result) = paste("Minimum Track Record Length(p=",round(p*100,1),"%):") + } + return(result) } Modified: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R 2013-06-17 10:34:12 UTC (rev 2341) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R 2013-06-17 13:13:39 UTC (rev 2342) @@ -44,14 +44,13 @@ # If R is passed as null checking for sharpe ratio , skewness and kurtosis else{ - if(is.null(sr)) stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") - if(is.null(sk)) stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") - if(is.null(kr)) stop("You must either pass R or the Sharpe Ratio, Skewness, Kurtosis,n etc") - if(is.null(n)) stop("You must either pass R or the Sharpe Ratio, Skewness, Kurtosis, n etc") + if(is.null(sr) | is.null(sk) | is.null(kr) | is.null(n)){ + stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") + } } #If weights are not taken into account a message is displayed if(is.null(weights)){ - message("no weights passed will calculate Probability Sharpe Ratio for each column") + message("no weights passed,will calculate Probability Sharpe Ratio for each column") } if(!is.null(dim(Rf))) From noreply at r-forge.r-project.org Mon Jun 17 18:31:32 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 17 Jun 2013 18:31:32 +0200 (CEST) Subject: [Returnanalytics-commits] r2343 - in pkg/FactorAnalytics: . R data man Message-ID: <20130617163133.42F7D185681@r-forge.r-project.org> Author: chenyian Date: 2013-06-17 18:31:32 +0200 (Mon, 17 Jun 2013) New Revision: 2343 Added: pkg/FactorAnalytics/R/.Rhistory pkg/FactorAnalytics/R/factorModelPerformanceAttribution.r pkg/FactorAnalytics/R/plot.FM.attribution.r pkg/FactorAnalytics/R/summary.FM.attribution.r pkg/FactorAnalytics/data/.Rhistory pkg/FactorAnalytics/man/factorModelPerformanceAttribution.Rd pkg/FactorAnalytics/man/plot.FM.attribution.Rd pkg/FactorAnalytics/man/plot.FundamentalFactorModel.Rd pkg/FactorAnalytics/man/plot.MacroFactorModel.Rd pkg/FactorAnalytics/man/plot.StatFactorModel.Rd pkg/FactorAnalytics/man/print.MacroFactorModel.Rd pkg/FactorAnalytics/man/print.StatFactorModel.Rd pkg/FactorAnalytics/man/summary.FM.attribution.Rd pkg/FactorAnalytics/man/summary.MacroFactorModel.Rd Removed: pkg/FactorAnalytics/FactorAnalytics old/ pkg/FactorAnalytics/tests/ Modified: pkg/FactorAnalytics/ pkg/FactorAnalytics/DESCRIPTION pkg/FactorAnalytics/NAMESPACE pkg/FactorAnalytics/R/fitFundamentalFactorModel.R pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R pkg/FactorAnalytics/R/plot.FundamentalFactorModel.r pkg/FactorAnalytics/R/plot.MacroFactorModel.r pkg/FactorAnalytics/R/plot.StatFactorModel.r pkg/FactorAnalytics/R/print.StatFactorModel.r pkg/FactorAnalytics/man/CornishFisher.Rd pkg/FactorAnalytics/man/factorModelCovariance.Rd pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd pkg/FactorAnalytics/man/fitStatisticalFactorModel.Rd pkg/FactorAnalytics/man/modifiedPortfolioVaRDecomposition.Rd pkg/FactorAnalytics/man/normalEsReport.Rd pkg/FactorAnalytics/man/normalVaRReport.Rd Log: merge factorAnalyticsUW into factorAnalytics under returnanalytics/pkg/FactorAnalytics Property changes on: pkg/FactorAnalytics ___________________________________________________________________ Added: svn:ignore + FactorAnalytics old tests Modified: pkg/FactorAnalytics/DESCRIPTION =================================================================== --- pkg/FactorAnalytics/DESCRIPTION 2013-06-17 13:13:39 UTC (rev 2342) +++ pkg/FactorAnalytics/DESCRIPTION 2013-06-17 16:31:32 UTC (rev 2343) @@ -1,11 +1,11 @@ -Package: factorAnalytics +Package: factorAnalyticsUW Type: Package Title: factor analysis -Version: 1.0 +Version: 1.1 Date: 2011-07-22 Author: Eric Zivot and Yi-An Chen Maintainer: Yi-An Chen Description: An R package for estimation and risk analysis of linear factor models for asset returns and portfolios. It contains three major fitting method for the factor models: fitting macroeconomic factor model, fitting fundamental factor model and fitting statistical factor model and some risk analysis tools like VaR, ES to use the result of the fitting method. It also provides the different type of distribution to fit the fat-tail behavior of the financial returns, including edgeworth expansion type distribution. License: GPL-2 -Depends: robust, robustbase, leaps, lars, zoo, MASS, PerformanceAnalytics, ff, sn, tseries +Depends: robust, robustbase, leaps, lars, zoo, MASS, PerformanceAnalytics, ff, sn, tseries, strucchange LazyLoad: yes \ No newline at end of file Modified: pkg/FactorAnalytics/NAMESPACE =================================================================== --- pkg/FactorAnalytics/NAMESPACE 2013-06-17 13:13:39 UTC (rev 2342) +++ pkg/FactorAnalytics/NAMESPACE 2013-06-17 16:31:32 UTC (rev 2343) @@ -1,5 +1,7 @@ -export(chart.Style) -export(chart.RollingStyle) -export(table.RollingStyle) -export(style.fit) -export(style.QPfit) \ No newline at end of file +exportPattern("^[^\\.]") +S3method(print, MacroFactorModel) +S3method(print, StatFactorModel) +S3method(plot, MacroFactorModel) +S3method(plot, StatFactorModel) +S3method(plot, FundamentalFactorModel) +S3method(summary, MacroFactorModel) \ No newline at end of file Added: pkg/FactorAnalytics/R/.Rhistory =================================================================== --- pkg/FactorAnalytics/R/.Rhistory (rev 0) +++ pkg/FactorAnalytics/R/.Rhistory 2013-06-17 16:31:32 UTC (rev 2343) @@ -0,0 +1,512 @@ +eigenvector <- eigen(cov(data))$vectors +eigenvalues <- eigen(cov(data))$values +abline(a=0,b=eigenvector[2,1]/eigenvector[1,1],col="red") +abline(a=0,b=eigenvector[2,2]/eigenvector[1,2],col="red") +covvar <- cbind(c(2,1),c(1,1)) +data <- rmvnorm(100,mean=c(0,0),sigma=covvar) +eigenvector <- eigen(cov(data))$vectors +eigenvector +eigenvalues <- eigen(cov(data))$values +eigenvalues +plot(data) +abline(a=0,b=eigenvector[2,1]/eigenvector[1,1],col="red") +abline(a=0,b=eigenvector[2,2]/eigenvector[1,2],col="red") +eigenvector <- eigen(cor(data))$vectors +eigenvector +eigenvalues <- eigen(cor(data))$values +eigenvalues +plot(data) +abline(a=0,b=eigenvector[2,1]/eigenvector[1,1],col="red") +abline(a=0,b=eigenvector[2,2]/eigenvector[1,2],col="red") +covvar <- cbind(c(2,-1),c(-1,1)) +data <- rmvnorm(100,mean=c(0,0),sigma=covvar) +eigenvector <- eigen(cor(data))$vectors +eigenvector +eigenvalues <- eigen(cor(data))$values +eigenvalues +plot(data) +abline(a=0,b=eigenvector[2,1]/eigenvector[1,1],col="red") +abline(a=0,b=eigenvector[2,2]/eigenvector[1,2],col="red") +covvar <- cbind(c(0,-1),c(-1,0)) +data <- rmvnorm(100,mean=c(0,0),sigma=covvar) +covvar <- cbind(c(0,1),c(1,0)) +data <- rmvnorm(100,mean=c(0,0),sigma=covvar) +eigen(covvar) +covvar <- cbind(c(0,1,0),c(1,0,1),c(0,1,0)) +covvar +eigen(covvar) +covvar <- cbind(c(1,1,0),c(1,1,1),c(0,1,1)) +covvar +eigen(covvar) +covvar <- cbind(c(2,1,0),c(1,1,1),c(0,1,1)) +covvar +eigen(covvar) +cor(covvar) +covvar <- cbind(c(1,1,0),c(1,1,1),c(0,1,1)) +covvar +eigen(covvar) +cor(covvar) +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +p <- 0.9 +q <- 0.1 +r <- 0.8 +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +is.positive.definite(chol) +eigen(covvar) +covvar +chol <- cbind(c(2,p,q),c(0,1,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +is.positive.definite(chol) +eigen(covvar) +chol <- cbind(c(20,p,q),c(0,1,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +eigen(covvar) +data <- rmvnorm(100,mean=c(0,0,0),sigma=covvar) +install.packages("scatterplot3d") +library(mvtnorm) +library(scatterplot3d) +? scatterplot3d +scatterplot3d(data) +trans3d(data) +trans3d(data[,1],data[,2],data[,3]) +scatterplot3d(data,highlight.3d=TRUE, col.axis="blue", +col.grid="lightblue", main="scatterplot3d - 1", pch=20) +eigen(covvar) +eigenvector <- eigen(cor(data))$vectors +eigenvector +eigenvector <- eigen(cov(data))$vectors +eigenvector +p <- 10 +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +p <- 1 +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,0,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +p <- .1 +chol <- cbind(c(1,p,q),c(0,0,r),c(0,0,1)) +covvar <- chol%*%t(chol) +p <- .1 +p <- .1 +q <- 0.1 +r <- 0.8 +chol <- cbind(c(1,p,q),c(0,0,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,1)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,0.7)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.65)) +covvar <- chol%*%t(chol) +covvar +is.positive.definite(chol) +eigen(covvar) +r <- 10 +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.65)) +covvar <- chol%*%t(chol) +covvar +r <- 1 +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.65)) +covvar <- chol%*%t(chol) +covvar +r <- 0.1 +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.65)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.9)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.98)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.999)) +covvar <- chol%*%t(chol) +covvar +eigen(covvar) +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.997)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.9999)) +covvar <- chol%*%t(chol) +covvar +r <- 0.5 +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.9999)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.8)) +covvar <- chol%*%t(chol) +covvar +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.9)) +covvar <- chol%*%t(chol) +covvar +eigen(covvar) +chol <- cbind(c(1,p,q),c(0,1,r),c(0,0,.9)) +data <- rmvnorm(100,mean=c(0,0,0),sigma=covvar) +pc <- princomp(data) +summary(pc) +plot(pc) +loadings(pc) +eigen(cov(data)) +data <- rmvnorm(100,mean=c(1,0,0),sigma=covvar) +eigen(cov(data)) +pc <- princomp(data) +summary(pc) +loadings(pc) +data <- rmvnorm(100,mean=c(0,0,0),sigma=covvar) +# download data small scale experiment +# using the finance sector provided by CNN money +library(quantmod) +library(PerformanceAnalytics) +symbol.vec=c("AAN","AB","ACAS","ACY","AFL","AIG","AMG","AXP","BAC","BGCP", +"C","CCNE","DB","GS","HCC","IHC","JPM","KEY","PLFE","TCHC") +getSymbols(symbol.vec, from ="2000-01-03", to = "2012-05-10") +# extract monthly adjusted closing prices +l <- length(symbol.vec) +db.m.price <- to.monthly(AAN)[, "AAN.Adjusted", drop=FALSE] +colnames(db.m.price) <- "AAN" +db.m.ret <- CalculateReturns(db.m.price, method="compound")[-1,] +for (i in (2:l)) { +name.price <- paste(symbol.vec[i],"m","price",sep=".") +stock <- as.name(symbol.vec[i]) +db.m.new <- to.monthly(eval(stock))[,"eval(stock).Adjusted",drop=FALSE] +colnames(db.m.new) <- symbol.vec[i] +db.m.price <- cbind(db.m.price,db.m.new) +# calculate log-returns +db.m.ret.new <- CalculateReturns(db.m.new, method="compound")[-1,] +db.m.ret <- cbind(db.m.ret,db.m.ret.new) +} +head(db.m.price) +dim(db.m.price) +dim(db.m.ret) +corr.m <- cor(db.m.ret) +corr.m.inv <- solve(corr.m) +db.pc <- princomp(db.m.ret) +summary(db.pc) +centrality <- loadings(db.pc)[,1] +centrality +eigen(corr.m.inv) +centrality +centrality.inv <- eigen(corr.m.inv)$vectors[,1] +eigen(corr.m)$vectors[,1] +cov.m.inv <- solve(cov(db.m.ret)) +centrality.inv <- eigen(cov.m.inv)$vectors[,1] +centrality.inv +eigen(cov(db.m.ret))$vectors[,1] +centrality +centrality.inv +head(db.m.ret) +names(centrality.inv) <- colnames(db.m.ret) +centrality.inv +covvar <- cbind(c(0.8,0.1,0.1),c(0.8,0.1,.1),c(.8,.1,.1)) +covvar +covvar <- cbind(c(0.8,0.8,0.8),c(0.1,0.1,.1),c(.1,.1,.1)) +covvar +eigen(covvar) +covvar <- cbind(c(0.5,0.5,0.5),c(0.4,0.4,.4),c(.1,.1,.1)) +covvar +eigen(covvar) +sum(eigen(covvar)$vectors[,1]^2) +eigen(covvar)$vectors[,1]^2 +sd(eigen(covvar)$vectors[,1]) +covvar <- cbind(replicate(3,c(.33,.33,.33)) +covvar <- cbind(replicate(3,c(.33,.33,.33))) +covvar +covvar <- cbind(replicate(3,c(.33,.33,.33))) +covvar +eigen(covvar) +sd(eigen(covvar)$vectors[,1]) +covvar <- cbind(c(0.5,0.4,0.5),c(0.4,0.5,.4),c(.1,.1,.1)) +covvar +eigen(covvar) +covvar <- cbind(c(0.5,0.4,0.3),c(0.4,0.5,.6),c(.1,.1,.1)) +covvar +eigen(covvar) +sd(eigen(covvar)$vectors[,1]) +covvar <- cbind(c(0.5,0.4,0.3),c(0.4,0.5,.5),c(.1,.1,.2)) +covvar +eigen(covvar) +sd(eigen(covvar)$vectors[,1]) +covvar <- cbind(c(0,0.7,0.5),c(0.9,0,.5),c(.1,0.3,0)) +covvar +eigen(covvar) +eigen(t(covvar) +eigen(t(covvar)) +eigen(t(covvar)) +covvar +t(covvar) +eigen(t(covvar)) +covvar <- cbind(c(0.1,0.7,0.5),c(0.8,0.1,.2),c(.1,0.2,0.3)) +covvar +t(covvar) +eigen(t(covvar)) +sd(eigen(covvar)$vectors[,1]) +eigen(covvar) +t(covvar) +eigen(t(covvar)) +covvar <- cbind(c(0.8,0.8,0.8),c(0.1,0.1,.1),c(.1,0.1,0.1)) +covvar +eigen(covvar) +t(covvar) +eigen(t(covvar)) +sd(eigen(covvar)$vectors[,1]) +sd(eigen(t(covvar))$vectors[,1]) +t(covvar) +library("rmgarch") +? dcc +? DCC.fit +? dccfit +eigen(diag(2)) +eigen(matrix(rep(1,4),nrow=2)) +eigen(matrix(c(1,-1,-1,1),nrow=2)) +covvar <- matrix(rep(1,9),nrow=3) +n <- length(covvar[1,]) +alpha <- eigen(cov(covvar))$values[1] -10^(-3) +solve(diag(n)-alpha*cov(covvar))%*%rep(1,n) +alpha <- eigen(covvar)$values[1] -10^(-3) +solve(diag(n)-alpha*cov(covvar))%*%rep(1,n) +eigen(covvar)$values +alpha <- eigen(covvar)$values[1] -10^(-3) +solve(diag(n)-alpha*cov(covvar))%*%rep(1,n) +kalz <- function(covvar) { +n <- length(covvar[1,]) +alpha <- eigen(covvar)$values[1] -10^(-3) +kalz.ec <- solve(diag(n)-alpha*cov(covvar))%*%rep(1,n) +return(kalz.ec) +} +kalz(covvar) +covvar2 <- matrix(rep(.1,9),nrow=3) +kalz(covvar2) +kalz <- function(covvar) { +n <- length(covvar[1,]) +alpha <- eigen(covvar)$values[1] -10^(-1) +kalz.ec <- solve(diag(n)-alpha*cov(covvar))%*%rep(1,n) +return(kalz.ec) +} +# example of all 1 matrix +covvar <- matrix(rep(1,9),nrow=3) +kalz(covvar) +# example of all .1 matrix +covvar2 <- matrix(rep(.1,9),nrow=3) +kalz(covvar2) +kalz <- function(covvar) { +n <- length(covvar[1,]) +alpha <- eigen(covvar)$values[1] -10^(-2) +kalz.ec <- solve(diag(n)-alpha*cov(covvar))%*%rep(1,n) +return(kalz.ec) +} +# example of all 1 matrix +covvar <- matrix(rep(1,9),nrow=3) +kalz(covvar) +# example of all .1 matrix +covvar2 <- matrix(rep(.1,9),nrow=3) +kalz(covvar2) +covvar3 <- eigen(matrix(c(1,0,0,0,1,0,0,0,1),nrow=3)) +covvar3 +covvar3 <- matrix(c(1,0,0,0,1,0,0,0,1),nrow=3) +covvar3 +kalz(covvar3) +covvar +covvar2 +covvar2 <- matrix(c(1,0.1,0.1,0.1,1,0.1,0.1,0.1,1),nrow=3) +kalz(covvar2) +covvar2 <- matrix(c(1,0.1,0.1,0.1,1,0.1,0.1,0.1,1),nrow=3) +kalz(covvar2) +covvar2 +diag(covvar2) <- c(0,0,0) +covvar2 +kalz(covvar2) +matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3) +covvar4 <- matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3) +kalz(covvar4) +covvar4 +kalz(matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3)) +matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3) +kalz(matrix(c(1,1,.1,1,1,.1,.1,.1,1),nrow=3)) +kalz <- function(covvar) { +n <- length(covvar[1,]) +alpha <- (eigen(covvar)$values[1])^(-1) -10^(-2) +kalz.ec <- solve(diag(n)-alpha*cov(covvar))%*%rep(1,n) +return(kalz.ec) +} +covvar <- matrix(rep(1,9),nrow=3) +kalz(covvar) +covvar2 <- matrix(c(1,0.1,0.1,0.1,1,0.1,0.1,0.1,1),nrow=3) +kalz(covvar2) +covvar3 <- matrix(c(1,0,0,0,1,0,0,0,1),nrow=3) +kalz(covvar3) +covvar4 <- matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3) +kalz(covvar4) +kalz(matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3)) +kalz(matrix(c(1,1,.1,1,1,.1,.1,.1,1),nrow=3)) +covvar4 <- matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3) +kalz(covvar4) +covvar4 <- matrix(c(1,1,-.9,1,1,-.9,-.9,-.9,1),nrow=3) +kalz(covvar4) +kalz(matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3)) +kalz(matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3)) +kalz(matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3)) +kalz(matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3)) +kalz(matrix(c(1,1,.1,1,1,.1,.1,.1,1),nrow=3)) +matrix(c(1,1,.1,1,1,.1,.1,.1,1),nrow=3) +kalz(matrix(c(1,1,0,1,1,0,0,0,1),nrow=3) +kalz(matrix(c(1,1,0,1,1,0,0,0,1),nrow=3)) +########################################################### +kalz(matrix(c(1,1,0,1,1,0,0,0,1),nrow=3)) +matrix(c(1,1,0,1,1,0,0,0,1),nrow=3) +########################################################### +kalz(matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3)) +kalz(matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3)) +kalz(matrix(c(1,1,.5,1,1,.5,.5,.5,1),nrow=3)) +kalz(matrix(c(1,1,-.5,1,1,-.5,-.5,-.5,1),nrow=3)) +kalz(matrix(c(1,1,.9,1,1,.9,.9,.9,1),nrow=3)) +kalz(matrix(c(1,1,-.9,1,1,-.9,-.9,-.9,1),nrow=3)) +library(matrixcalc) +install.packages("matrixcalc") +is.positive.definite(matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3)) +is.positive.definite(matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3)) +library(matrixcalc) +is.positive.definite(matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3)) +is.positive.definite(matrix(c(1,1,-.1,1,1,-.1,-.1,-.1,1),nrow=3)) +is.positive.definite(matrix(c(1,1,.1,1,1,.1,.1,.1,1),nrow=3)) +is.positive.definite(matrix(c(1,1,0,1,1,0,0,0,1),nrow=3)) +is.positive.definite(matrix(c(1,1,0,1,1,0,0,0,1),nrow=3)) +is.positive.definite(diag(3)) +is.positive.definite(matrix(c(1,1,-1,1,1,-1,-1,-1,1),nrow=3)) +library(mvtnorm) +library(sna) +library(matrixcalc) +library(corpcor) +chol <- cbind(c(1,0.1,0.1,0.1,0.1),c(0,0.99,0.1,0.1,0.2),c(0,0,0.98,0.4,0.5), +c(0,0,0,0.9,0.5),c(0,0,0,0,0.7)) +covvar <- chol%*%t(chol) +covvar +is.positive.definite(covvar) +eigen(covvar) +eigen(solve(covvar)) +is.positive.definite(covvar) +gplot(covvar,gmode="graph",edge.lwd=15,label=c(1,2,3,4,5)) +eigen(covvar) +? gplot +install.packages(c("JGR","Deducer","DeducerExtras")) +library(JGR) +JGR() +install.packages("rJava") +JPR() +JGR() +library(JGR) +plot.lm +library(leaps) +library(PerformanceAnalytics) +library(lars) +library(robust) +library(ellipse) +library(MASS) +# +# fitMacroeconomicFactormodel +# +# load data from the database +setwd("C:/Users/Yi-An Chen/Documents/R-project/factoranalytics/pkg/factorAnalytics/data") +# data(managers.df) +load("managers.df.rda") +ret.assets = managers.df[,(1:6)] +factors = managers.df[,(7:9)] +# fit the factor model with OLS +setwd("C:/Users/Yi-An Chen/Documents/R-project/factoranalytics/pkg/factorAnalytics/R") +source("fitMacroeconomicFactorModel.r") +source("factorModelCovariance.r") +source("factorModelSdDecomposition.r") +source("factorModelEsDecomposition.r") +source("factorModelVaRDecomposition.r") +fit.macro <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", factor.set = 3, +variable.selection="all subsets",decay.factor = 0.95) +source("factorModelPerformanceAttribution.r") +fm.attr <- factorModelPerformanceAttribution(fit.macro) +fm.attr[[1]] +fm.attr[[2]] +fm.attr[[3]] +fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", factor.set = 3, +variable.selection="all subsets",decay.factor = 0.95) +fm.attr <- factorModelPerformanceAttribution(fit) +fit$ret.assets +factors +benchmark = managers.df[,8] +fit$ret.assets - benchmark +fit = fitMacroeconomicFactorModel(port.ret,fit$factors) +port.ret = fit$ret.assets - benchmark +fit = fitMacroeconomicFactorModel(port.ret,fit$factors) +fit$call +fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", factor.set = 3, +variable.selection="all subsets",decay.factor = 0.95) +fit$call +eval(fit$call) +fit$call +ret.assets = fit$ret.assets - benchmark +fit.1 = eval(fit$call) +eval(fit$call) +setwd("C:/Users/Yi-An Chen/Documents/R-project/factoranalytics/pkg/factorAnalytics/data") +# data(managers.df) +load("managers.df.rda") +ret.assets = managers.df[,(1:6)] +factors = managers.df[,(7:9)] +# fit the factor model with OLS +setwd("C:/Users/Yi-An Chen/Documents/R-project/factoranalytics/pkg/factorAnalytics/R") +source("fitMacroeconomicFactorModel.r") +source("factorModelCovariance.r") +source("factorModelSdDecomposition.r") +source("factorModelEsDecomposition.r") +source("factorModelVaRDecomposition.r") +fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", factor.set = 3, +variable.selection="all subsets",decay.factor = 0.95) +source("factorModelPerformanceAttribution.r") +fm.attr <- factorModelPerformanceAttribution(fit) +fm.attr[[1]] +source("factorModelPerformanceAttribution.r") +fm.attr <- factorModelPerformanceAttribution(fit) +source("factorModelPerformanceAttribution.r") +fm.attr <- factorModelPerformanceAttribution(fit) +source("factorModelPerformanceAttribution.r") +fm.attr <- factorModelPerformanceAttribution(fit) +benchmark=NULL +if(benchmark != NULL) +} +if(benchmark != NULL) { +} +benchmark != NULL +benchmark[1] != NULL +class(benchmark) +as.logic(benchmark) +? logic +? as.numeric +as.logical(benchmark) +(as.logical(benchmark) != NULL) +source("factorModelPerformanceAttribution.r") +fm.attr <- factorModelPerformanceAttribution(fit) +source("factorModelPerformanceAttribution.r") +fm.attr <- factorModelPerformanceAttribution(fit) +fm.attr[[1]] +fm.attr[[2]] +fm.attr[[3]] +benchmark = managers.df[,8] +fm.attr.b <- factorModelPerformanceAttribution(fit,benchmark=benchmark) +fm.attr.b[[1]] +fm.attr[[1]] +fm.attr[[2]] +source("plot.FM.attribution.r") +plot(fm.attr,date="2006-12-30") +plot(fm.attr.b,date="2006-12-30") +source("summary.FM.attribution.r") +summary(fm.attr) +summary(fm.attr.b) Added: pkg/FactorAnalytics/R/factorModelPerformanceAttribution.r =================================================================== --- pkg/FactorAnalytics/R/factorModelPerformanceAttribution.r (rev 0) +++ pkg/FactorAnalytics/R/factorModelPerformanceAttribution.r 2013-06-17 16:31:32 UTC (rev 2343) @@ -0,0 +1,251 @@ +# performance attribution +# Yi-An Chen +# July 30, 2012 + +factorModelPerformanceAttribution <- + function(fit,benchmark=NULL,...) { + + # input + # fit : Class of MacroFactorModel, FundamentalFactorModel and statFactorModel + # benchmark: benchmark returns, default is NULL. If benchmark is provided, active returns + # is used. + # ... : controlled variables for fitMacroeconomicsFactorModel and fitStatisticalFactorModel + # output + # class of "FMattribution" + # + # plot.FMattribution + # summary.FMattribution + # print.FMattribution + require(zoo) + + if (class(fit) !="MacroFactorModel" & class(fit) !="FundamentalFactorModel" + & class(fit) != "StatFactorModel") + { + stop("Class has to be MacroFactorModel.") + } + + # MacroFactorModel chunk + + if (class(fit) == "MacroFactorModel") { + + # if benchmark is provided + + if (!is.null(benchmark)) { + ret.assets = fit$ret.assets - benchmark + fit = fitMacroeconomicFactorModel(ret.assets=ret.assets,...) + } +# return attributed to factors + cum.attr.ret <- fit$beta.mat + cum.spec.ret <- fit$alpha.vec + factorName = colnames(fit$beta.mat) + fundName = rownames(fit$beta.mat) + + attr.list <- list() + + for (k in fundName) { + fit.lm = fit$asset.fit[[k]] + + ## extract information from lm object + date <- names(fitted(fit.lm)) + + actual.z = zoo(fit.lm$model[1], as.Date(date)) + + +# attributed returns +# active portfolio management p.512 17A.9 + + cum.ret <- Return.cumulative(actual.z) + # setup initial value + attr.ret.z.all <- zoo(, as.Date(date)) + for ( i in factorName ) { + + if (fit$beta.mat[k,i]==0) { + cum.attr.ret[k,i] <- 0 + attr.ret.z.all <- merge(attr.ret.z.all,zoo(rep(0,length(date)),as.Date(date))) + } else { + attr.ret.z <- actual.z - zoo(as.matrix(fit.lm$model[i])%*%as.matrix(fit.lm$coef[i]), + as.Date(date)) + cum.attr.ret[k,i] <- cum.ret - Return.cumulative(actual.z-attr.ret.z) + attr.ret.z.all <- merge(attr.ret.z.all,attr.ret.z) + } + + } + + # specific returns + spec.ret.z <- actual.z - zoo(as.matrix(fit.lm$model[,-1])%*%as.matrix(fit.lm$coef[-1]), + as.Date(date)) + cum.spec.ret[k] <- cum.ret - Return.cumulative(actual.z-spec.ret.z) + attr.list[[k]] <- merge(attr.ret.z.all,spec.ret.z) + colnames(attr.list[[k]]) <- c(factorName,"specific.returns") + } + + + } + +if (class(fit) =="FundamentalFactorModel" ) { + # if benchmark is provided + + if (!is.null(benchmark)) { + stop("use fitFundamentalFactorModel instead") + } + # return attributed to factors + factor.returns <- fit$factor.rets[,-1] + factor.names <- names(fit$factor.rets[,-1]) + dates <- as.character(unique(fit$exposure.data[,"DATE"])) + exposure <- fund.fit$exposure.data + ticker <- names(fit$resids) + + N <- length(ticker) + J <- length(factor.names) + t <- length(dates) + # array arranges in N X J X t + # N is assets names, J is factors, t is time + attr.ret <- array(,dim=c(N,J,t),dimnames=list(ticker,factor.names,dates)) + for (i in dates) { + idx = which(exposure[,"DATE"]==i) + for (j in factor.names) { + attr.ret[,j,i] <- exposure[idx,j]*coredata(factor.returns[as.Date(i)])[,j] + } + } + + # specific returns + # zoo class + intercept <- fit$factor.rets[,1] + resids <- fit$resids + spec.ret.z <- resids + intercept + + #cumulative return attributed to factors + cum.attr.ret <- matrix(,nrow=length(ticker),ncol=length(factor.names), + dimnames=list(ticker,factor.names)) + cum.spec.ret <- rep(0,length(ticker)) + names(cum.spec.ret) <- ticker + + # arrange returns data + actual <- fund.fit$returns.data + # N <- length(assets.names) + # t <- length(dates) + # array arranges in N X t + # N is assets names, J is factors, t is time + actual.ret <- array(,dim=c(N,t),dimnames=list(ticker,dates)) + for (i in dates) { + idx = which(actual[,"DATE"]==i) + actual.ret[,i] <- actual[idx,"RETURN"] + } + + # make returns as zoo + actual.z.all <- zoo(,as.Date(dates)) + for (k in ticker) { + actual.z <- zoo(actual.ret[k,],as.Date(dates)) + actual.z.all <- merge(actual.z.all,actual.z) + } + colnames(actual.z.all) <- ticker + + + + # make list of every asstes and every list contains return attributed to factors + # and specific returns + attr.list <- list() + for (k in ticker){ + attr.ret.z.all <- zoo(,as.Date(dates)) + # cumulative returns + cum.ret <- Return.cumulative(actual.z.all[,k]) + for (j in factor.names) { + attr.ret.z <- zoo(attr.ret[k,j,],as.Date(dates) ) + attr.ret.z.all <- merge(attr.ret.z.all,attr.ret.z) + cum.attr.ret[k,j] <- cum.ret - Return.cumulative(actual.z.all[,k]-attr.ret.z) + } + attr.list[[k]] <- merge(attr.ret.z.all,spec.ret.z[,k]) + colnames(attr.list[[k]]) <- c(factor.names,"specific.returns") + cum.spec.ret[k] <- cum.ret - Return.cumulative(actual.z.all[,k]-spec.ret.z[,k]) + + } + +} + + if (class(fit) == "StatFactorModel") { + + # if benchmark is provided + + if (!is.null(benchmark)) { + x = fit$asset.ret - benchmark + fit = fitStatisticalFactorModel(x=x,...) + } + # return attributed to factors + cum.attr.ret <- t(fit$loadings) + cum.spec.ret <- fit$r2 + factorName = rownames(fit$loadings) + fundName = colnames(fit$loadings) + + # create list for attribution + attr.list <- list() + # pca method + + if ( dim(fit$asset.ret)[1] > dim(fit$asset.ret)[2] ) { + + + for (k in fundName) { + fit.lm = fit$asset.fit[[k]] + + ## extract information from lm object + date <- names(fitted(fit.lm)) + # probably needs more general Date setting + actual.z = zoo(fit.lm$model[1], as.Date(date)) + + + # attributed returns + # active portfolio management p.512 17A.9 + + cum.ret <- Return.cumulative(actual.z) + # setup initial value + attr.ret.z.all <- zoo(, as.Date(date)) + for ( i in factorName ) { + + attr.ret.z <- actual.z - zoo(as.matrix(fit.lm$model[i])%*%as.matrix(fit.lm$coef[i]), + as.Date(date)) + cum.attr.ret[k,i] <- cum.ret - Return.cumulative(actual.z-attr.ret.z) + attr.ret.z.all <- merge(attr.ret.z.all,attr.ret.z) + + + } + + # specific returns + spec.ret.z <- actual.z - zoo(as.matrix(fit.lm$model[,-1])%*%as.matrix(fit.lm$coef[-1]), + as.Date(date)) + cum.spec.ret[k] <- cum.ret - Return.cumulative(actual.z-spec.ret.z) + attr.list[[k]] <- merge(attr.ret.z.all,spec.ret.z) + colnames(attr.list[[k]]) <- c(factorName,"specific.returns") + } + } else { + # apca method +# fit$loadings # f X K +# fit$factors # T X f + + dates <- rownames(fit$factors) + for ( k in fundName) { + attr.ret.z.all <- zoo(, as.Date(date)) + actual.z <- zoo(fit$asset.ret[,k],as.Date(dates)) + cum.ret <- Return.cumulative(actual.z) + for (i in factorName) { + attr.ret.z <- zoo(fit$factors[,i] * fit$loadings[i,k], as.Date(dates) ) + attr.ret.z.all <- merge(attr.ret.z.all,attr.ret.z) + cum.attr.ret[k,i] <- cum.ret - Return.cumulative(actual.z-attr.ret.z) + } + spec.ret.z <- actual.z - zoo(fit$factors%*%fit$loadings[,k],as.Date(dates)) + cum.spec.ret[k] <- cum.ret - Return.cumulative(actual.z-spec.ret.z) + attr.list[[k]] <- merge(attr.ret.z.all,spec.ret.z) + colnames(attr.list[[k]]) <- c(factorName,"specific.returns") + } + + + } + + } + + + + ans = list(cum.ret.attr.f=cum.attr.ret, + cum.spec.ret=cum.spec.ret, + attr.list=attr.list) +class(ans) = "FM.attribution" +return(ans) + } \ No newline at end of file Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-17 13:13:39 UTC (rev 2342) +++ pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-17 16:31:32 UTC (rev 2343) @@ -78,7 +78,7 @@ # [1] 42912 117 # dimnames(fulldata) # PERMNO" "DATE" "RETURN" "TICKER.x" "BOOK2MARKET" "TICKER.y" - # check if exposures are numeric, if not, create exposures. factors by dummy variables + # check if exposures are numeric, if not, create exposures. factors by dummy variables which.numeric <- sapply(fulldata[, exposures, drop = FALSE],is.numeric) exposures.numeric <- exposures[which.numeric] # industry factor model @@ -334,8 +334,8 @@ factor.rets = f.hat, resids = E.hat, tstats = tstats, - returns.data = fulldata[,c(datevar, assetvar, returnsvar)], - exposure.data = fulldata[,c(datevar, assetvar, exposures)], + returns.data = fulldata[,c(datevar, assetvar, returnsvar)], + exposure.data = fulldata[,c(datevar, assetvar, exposures)], assets = assets, tickers = tickers, call = this.call) Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-17 13:13:39 UTC (rev 2342) +++ pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-17 16:31:32 UTC (rev 2343) @@ -41,9 +41,8 @@ require(leaps) require(lars) require(robust) - require(ellipse) require(MASS) -this.call <- match.call() + this.call <- match.call() if (is.data.frame(ret.assets) & is.data.frame(factors) ) { manager.names = colnames(ret.assets) Added: pkg/FactorAnalytics/R/plot.FM.attribution.r =================================================================== --- pkg/FactorAnalytics/R/plot.FM.attribution.r (rev 0) +++ pkg/FactorAnalytics/R/plot.FM.attribution.r 2013-06-17 16:31:32 UTC (rev 2343) @@ -0,0 +1,82 @@ +# plot.FM.attribution.r +# Yi-An Chen +# 8/1/2012 + +plot.FM.attribution <- function(fm.attr, which.plot=c("none","1L","2L","3L"),max.show=6, + date,plot.single=FALSE,fundName, + which.plot.single=c("none","1L","2L","3L"),...) { + # ... for chart.TimeSeries + require(PerformanceAnalytics) + # plot single assets + if (plot.single==TRUE){ + + which.plot.single<-which.plot.single[1] + + if (which.plot.single=="none") + which.plot.single<-menu(c("attributed cumulative returns", + paste("attributed returns","on",date,sep=" "), + "Time series of attributed returns"), + title="performance attribution plot \nMake a plot selection (or 0 to exit):\n") + switch(which.plot.single, + "1L" = { + bar <- c(fm.attr$cum.spec.ret[fundName],fm.attr$cum.ret.attr.f[fundName,]) + names(bar)[1] <- "specific.returns" + barplot(bar,horiz=TRUE,main="cumulative attributed returns",las=1) + }, + "2L" ={ + bar <- coredata(fm.attr$attr.list[[fundName]][as.Date(date)]) + barplot(bar,horiz=TRUE,main=fundName,las=1) + }, + "3L" = { + chart.TimeSeries(fm.attr$attr.list[[fundName]], + main=paste("Time series of attributed returns of ",fundName,sep=""),... ) + }, + invisible()) + } + # plot all assets + else { + which.plot<-which.plot[1] + fundnames <- rownames(fm.attr$cum.ret.attr.f) + n <- length(fundnames) + + if(which.plot=='none') + which.plot<-menu(c("attributed cumulative returns", + paste("attributed returns","on",date,sep=" "), + "time series of attributed returns"), + title="performance attribution plot \nMake a plot selection (or 0 to exit):\n") + if (n >= max.show) { + cat(paste("numbers of assets are greater than",max.show,", show only first", + max.show,"assets",sep=" ")) + n <- max.show + } + switch(which.plot, + + "1L" = { + par(mfrow=c(2,n/2)) + for (i in fundnames[1:n]) { + bar <- c(fm.attr$cum.spec.ret[i],fm.attr$cum.ret.attr.f[i,]) + names(bar)[1] <- "specific.returns" + barplot(bar,horiz=TRUE,main=i,las=1) + } + par(mfrow=c(1,1)) + }, + "2L" ={ + par(mfrow=c(2,n/2)) + for (i in fundnames[1:n]) { + bar <- coredata(fm.attr$attr.list[[i]][as.Date(date)]) + barplot(bar,horiz=TRUE,main=i,las=1) + } + par(mfrow=c(1,1)) + }, + "3L" = { + par(mfrow=c(2,n/2)) + for (i in fundnames[1:n]) { + chart.TimeSeries(fm.attr$attr.list[[i]],main=i) + } + par(mfrow=c(1,1)) + }, + invisible() + ) + + } + } \ No newline at end of file Modified: pkg/FactorAnalytics/R/plot.FundamentalFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/plot.FundamentalFactorModel.r 2013-06-17 13:13:39 UTC (rev 2342) +++ pkg/FactorAnalytics/R/plot.FundamentalFactorModel.r 2013-06-17 16:31:32 UTC (rev 2343) @@ -3,15 +3,11 @@ # 7/16/2012 plot.FundamentalFactorModel <- -function(fund.fit,which.plot=c("none","1L","2L","3L","4L","5L","6L","7L"),max.show=12, - plot.single=FALSE, fundId, fundName="TBA", - which.plot.single=c("none","1L","2L","3L","4L","5L","6L","7L","8L", - "9L","10L","11L","12L","13L") ) { +function(fund.fit,which.plot=c("none","1L","2L","3L","4L"),max.show=12) + { require(ellipse) - if (plot.single==TRUE) { - - } else { + which.plot<-which.plot[1] if(which.plot=='none') @@ -51,7 +47,7 @@ invisible() ) - } + } Modified: pkg/FactorAnalytics/R/plot.MacroFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/plot.MacroFactorModel.r 2013-06-17 13:13:39 UTC (rev 2342) +++ pkg/FactorAnalytics/R/plot.MacroFactorModel.r 2013-06-17 16:31:32 UTC (rev 2343) @@ -1,7 +1,7 @@ plot.MacroFactorModel <- function(fit.macro,colorset=c(1:12),legend.loc=NULL, which.plot=c("none","1L","2L","3L","4L","5L","6L","7L"),max.show=6, - plot.single=FALSE, fundId, fundName="TBA",which.plot.single=c("none","1L","2L","3L","4L","5L","6L", + plot.single=FALSE, fundName,which.plot.single=c("none","1L","2L","3L","4L","5L","6L", "7L","8L","9L","10L","11L","12L","13L")) { require(zoo) require(PerformanceAnalytics) @@ -11,9 +11,8 @@ ## inputs: ## fit.macro lm object summarizing factor model fit. It is assumed that [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 2343 From noreply at r-forge.r-project.org Mon Jun 17 19:05:38 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 17 Jun 2013 19:05:38 +0200 (CEST) Subject: [Returnanalytics-commits] r2344 - pkg/Meucci/R Message-ID: <20130617170538.B949F1844C6@r-forge.r-project.org> Author: xavierv Date: 2013-06-17 19:05:37 +0200 (Mon, 17 Jun 2013) New Revision: 2344 Added: pkg/Meucci/R/LognormalMoments2Parameters.R Log: added LognormalMoments2Parameters.R file Added: pkg/Meucci/R/LognormalMoments2Parameters.R =================================================================== --- pkg/Meucci/R/LognormalMoments2Parameters.R (rev 0) +++ pkg/Meucci/R/LognormalMoments2Parameters.R 2013-06-17 17:05:37 UTC (rev 2344) @@ -0,0 +1,26 @@ +#' Compute the mean and standard deviation of a lognormal distribution from its parameters, as described in +#' A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1. +#' +#' @param e : [scalar] expected value of the lognormal distribution +#' @param v : [scalar] variance of the lognormal distribution +#' +#' @return mu : [scalar] expected value of the normal distribution +#' @return sig2 : [scalar] variance of the normal distribution +#' +#' @note Inverts the formulas (1.98)-(1.99) in Risk and Asset Allocation", Springer, 2005. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "LognormalMoments2Parameters" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +LognormalMoments2Parameters = function( e, v) +{ + sig2 = log( 1 + v / ( e^2 ) ); + mu = log( e ) - sig2 / 2; + + return( list( sigma_square = sig2 , mu = mu ) ); + +} \ No newline at end of file From noreply at r-forge.r-project.org Mon Jun 17 19:12:41 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 17 Jun 2013 19:12:41 +0200 (CEST) Subject: [Returnanalytics-commits] r2345 - pkg/Meucci/demo Message-ID: <20130617171242.1F8551844C6@r-forge.r-project.org> Author: xavierv Date: 2013-06-17 19:12:41 +0200 (Mon, 17 Jun 2013) New Revision: 2345 Added: pkg/Meucci/demo/S_LognormalSample.R pkg/Meucci/demo/S_NonAnalytical.R pkg/Meucci/demo/S_NormalSample.R pkg/Meucci/demo/S_StudentTSample.R Log: -added CH1 scripts from Meucci's book as demo Added: pkg/Meucci/demo/S_LognormalSample.R =================================================================== --- pkg/Meucci/demo/S_LognormalSample.R (rev 0) +++ pkg/Meucci/demo/S_LognormalSample.R 2013-06-17 17:12:41 UTC (rev 2345) @@ -0,0 +1,57 @@ +#' This script simulate univariate lognormal variables, as described in +#' A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_LognormalSample.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + + + +################################################################################################################## +### Input parameters + +nSim = 10000; +ExpX = 3; +VarX = 5; + +################################################################################################################## +### Generate lognormal sample with above parameters + +LP = LognormalMoments2Parameters( ExpX, VarX ); +sigma = sqrt( LP$sigma_square ); + +X = rlnorm( nSim, LP$mu, sigma ); + +################################################################################################################## +### Plots + +# plot over time +plot( X, main = "lognormal sample vs observation time" ); + +# plot histogram +NumBins = round( 10 * log( nSim ) ); +hist( X, NumBins, main = "histogram of lognormal sample" ); + +# plot empirical cdf +f = ecdf( X ); +plot( f, col = "red", main = "cdf of lognormal distribution" ); + +# plot exact cdf +F = plnorm( 1:10000, LP$mu, sigma ); +lines ( 1:10000, F, col = "blue" ); +legend( "bottomright", 1.9, c("empirical", "exact"), col = c("red", "blue"), lty = 1, bg = "gray90" ); + + +################################################################################################################## +# plot empirical quantile +u= seq( 0.01, 0.99, 0.01 ); # range of quantiles (values between zero and one) +q = quantile( X, u ); +plot( u, q, type = "l", xlab="Grade", ylab="Quantile", lty = 1, col = "red", main = "quantile of lognormal distribution" ); + +# plot exact quantile +Q = qlnorm( u, LP$mu, sigma ); +lines( u, Q, type = "l", lty = 1, col = "blue" ); +legend( "bottomright", 1.9, c( "empirical", "exact" ), col = c( "red", "blue" ), lty = 1, bg = "gray90" ); Added: pkg/Meucci/demo/S_NonAnalytical.R =================================================================== --- pkg/Meucci/demo/S_NonAnalytical.R (rev 0) +++ pkg/Meucci/demo/S_NonAnalytical.R 2013-06-17 17:12:41 UTC (rev 2345) @@ -0,0 +1,54 @@ +#' This script generates draws for the sum of random variables, as described in +#' A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_NonAnalytical.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################## +### Input parameters + +nSim = 10000; +mu_St = 0; +s2_St = 0.1; +nu_St = 8; # NOTE: see how the final results change if you increase nu (=4 is enough) + +mu_LN = 0.1; +s2_LN = 0.2; + +################################################################################################################## +### Generate draws + +# generate Student sample with above parameters +s_St = sqrt( s2_St ); +X = mu_St + s_St * rt( nSim, nu_St ); + +# generate lognormal sample with above parameters +s_LN = sqrt( s2_LN ); +Y = rlnorm( nSim, mu_LN, s_LN ); + +# sum samples +Z = X + Y; + +################################################################################################################## +### Plot the sample Z +plot( Z, xlab="simulations", ylab="Z", main = "sample vs observation time" ); + +################################################################################################################## +### Plot the histogram of Z +NumBins = round( 10 * log( nSim ) ); +hist( Z, NumBins, xlab="Z", main="sample histogram" ); + +################################################################################################################## +### Plot the empirical cdf of Z +f = ecdf( Z ); +plot( f, xlab="Z", main="empirical cdf" ); + +################################################################################################################## +### Plot the empirical quantile of Z +u= seq( 0.01, 0.99, 0.01 ); # range of quantiles (values between zero and one) +q = quantile( Z, u ); +plot( u, q, type = "l", xlab="Grade", ylab="Quantile", lty = 1, main = "empirical quantile" ); Added: pkg/Meucci/demo/S_NormalSample.R =================================================================== --- pkg/Meucci/demo/S_NormalSample.R (rev 0) +++ pkg/Meucci/demo/S_NormalSample.R 2013-06-17 17:12:41 UTC (rev 2345) @@ -0,0 +1,54 @@ +#' This script simulate univariate normal variables, as described in +#' A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_NormalSample.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################## +### Input parameters +nSim = 10000; +mu = 3; +sigma2 = 5; + +################################################################################################################## +### Generate normal sample with above parameters +sigma = sqrt( sigma2 ); +X = rnorm( nSim, mu, sigma); + +################################################################################################################## +### Plot the sample# plot over time +plot( X, main = "normal sample vs observation time" ); + + +################################################################################################################## +### Plot the histogram +NumBins = round( 10 * log( nSim ) ); +hist( X, NumBins, main = "histogram of normal sample" ); + +################################################################################################################## +### Compare empirical with exact cdfs + +# plot empirical cdf +f = ecdf( X ); +plot( f, col = "red", main = "cdf of normal distribution" ); + +# plot exact cdf +F = pnorm( 1:10000, mu, sigma ); +lines ( 1:10000, F, col = "blue" ); +legend( "bottomright", 1.9, c("empirical", "exact"), col = c("red", "blue"), lty = 1, bg = "gray90" ); + +################################################################################################################## +### Compare empirical and exact quantiles +# plot empirical quantile +u= seq( 0.01, 0.99, 0.01 ); # range of quantiles (values between zero and one) +q = quantile( X, u ); +plot( u, q, type = "l", xlab="Grade", ylab="Quantile", lty = 1, col = "red", main = "quantile of normal distribution" ); + +# plot exact quantile +Q = qnorm( u, mu, sigma ); +lines( u, Q, type = "l", lty = 1, col = "blue" ); +legend( "bottomright", 1.9, c( "empirical", "exact" ), col = c( "red", "blue" ), lty = 1, bg = "gray90" ); Added: pkg/Meucci/demo/S_StudentTSample.R =================================================================== --- pkg/Meucci/demo/S_StudentTSample.R (rev 0) +++ pkg/Meucci/demo/S_StudentTSample.R 2013-06-17 17:12:41 UTC (rev 2345) @@ -0,0 +1,74 @@ +#' This script simulate univariate Student-t variables as described in +#' A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_StudentTSample.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################## +### Input parameters + +nSim = 10000; +sigma2 = 6; +ExpX = 2; +VarX = 7; + +################################################################################################################## +### Determine mu, nu and sigma + +mu = ExpX; +nu = 2 / (1 - sigma2 / VarX ); +sigma = sqrt( sigma2 ); + +################################################################################################################## +### Generate Student t sample with above parameters using built-in generator + +X_a = mu + sigma * rt( nSim, nu ); + +################################################################################################################## +### Generate Student t sample with above parameters using stochastic representation + +Y = rnorm( nSim, 0, sigma ); +Z = rchisq( nSim, nu ); +X_b = mu + Y / sqrt( Z / nu ); + +################################################################################################################## +### Generate Student t sample with above parameters using grade inversion + +U = runif( nSim ); +X_c = mu + sigma * qt( U, nu ); + +################################################################################################################## +### Plot histograms +NumBins = round(10 * log(nSim)); + +par( mfrow = c( 3, 1) ); + +hist( X_a, NumBins, main = "built-in generator" ); +hist( X_b, NumBins, main = "stoch. representation" ); +hist( X_c, NumBins, main = "grade inversion" ); + + +#axisLimits = [min(axisLimits(:, 1)), max(axisLimits(:, 2)), min(axisLimits(:, 3)), max(axisLimits(:, 4))]; +#subplot(3, 1, 1), axis(axisLimits); +#subplot(3, 1, 2), axis(axisLimits); +#subplot(3, 1, 3), axis(axisLimits); + +################################################################################################################## +### Compare empirical quantiles of the three simuations +u= seq( 0.01, 0.99, 0.01 ); # range of quantiles (values between zero and one) = 0.01 : 0.01 : 0.99; # range of quantiles (values between zero and one) +q_a = quantile( X_a, u ); +q_b = quantile( X_b, u ); +q_c = quantile( X_c, u ); + +################################################################################################################## +### Superimpose the the plots of the empirical quantiles + +plot( u, q_a, type = "l", xlab="Grade", ylab="Quantile", lty = 1, col = "red", main = "quantile of Student-t distribution" ); +lines( u, q_b, type = "l", lty = 1, col = "blue" ); +lines( u, q_c, type = "l", lty = 1, col = "green" ); +legend( "bottomright", 1.9, c( "built-in generator", "stoch. representation", "grade inversion" ), col = c( "red" , "blue", "green"), + lty = 1, bg = "gray90" ); \ No newline at end of file From noreply at r-forge.r-project.org Mon Jun 17 20:27:52 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 17 Jun 2013 20:27:52 +0200 (CEST) Subject: [Returnanalytics-commits] r2346 - pkg/FactorAnalytics Message-ID: <20130617182752.498C31856D3@r-forge.r-project.org> Author: chenyian Date: 2013-06-17 20:27:51 +0200 (Mon, 17 Jun 2013) New Revision: 2346 Modified: pkg/FactorAnalytics/DESCRIPTION Log: change DESCRIPTION file Modified: pkg/FactorAnalytics/DESCRIPTION =================================================================== --- pkg/FactorAnalytics/DESCRIPTION 2013-06-17 17:12:41 UTC (rev 2345) +++ pkg/FactorAnalytics/DESCRIPTION 2013-06-17 18:27:51 UTC (rev 2346) @@ -1,8 +1,8 @@ -Package: factorAnalyticsUW +Package: factorAnalytics Type: Package Title: factor analysis -Version: 1.1 -Date: 2011-07-22 +Version: 1.0 +Date: 2013-06-17 Author: Eric Zivot and Yi-An Chen Maintainer: Yi-An Chen Description: An R package for estimation and risk analysis of linear factor models for asset returns and portfolios. It contains three major fitting method for the factor models: fitting macroeconomic factor model, fitting fundamental factor model and fitting statistical factor model and some risk analysis tools like VaR, ES to use the result of the fitting method. It also provides the different type of distribution to fit the fat-tail behavior of the financial returns, including edgeworth expansion type distribution. From noreply at r-forge.r-project.org Tue Jun 18 01:41:16 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 01:41:16 +0200 (CEST) Subject: [Returnanalytics-commits] r2347 - pkg/PortfolioAnalytics/R Message-ID: <20130617234116.8DF9E185837@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-18 01:41:16 +0200 (Tue, 18 Jun 2013) New Revision: 2347 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: Added constraints_v2. Moves box constraints out of constructor for class constraint. Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-17 18:27:51 UTC (rev 2346) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-17 23:41:16 UTC (rev 2347) @@ -151,6 +151,69 @@ )) } +#' constructor for class constraint_v2 +#' +#' @param assets number of assets, or optionally a named vector of assets specifying seed weights +#' @param ... any other passthru parameters +#' @param min_sum minimum sum of all asset weights, default .99 +#' @param max_sum maximum sum of all asset weights, default 1.01 +#' @param weight_seq seed sequence of weights, see \code{\link{generatesequence}} +#' @author Peter Carl, Brian G. Peterson, and Ross Bennett +#' @examples +#' exconstr <- constraint_v2(assets=10, min_sum=1, max_sum=1, weight_seq=generatesequence()) +#' @export +constraint_v2 <- function(assets=NULL, ..., min_sum=.99, max_sum=1.01, weight_seq=NULL) { + # based on GPL R-Forge pkg roi by Stefan Thuessel,Kurt Hornik,David Meyer + # constraint_v2 is based on the constraint_v1 object, but removes box + # constraint specification + if (is.null(assets)) { + stop("You must specify the assets") + } + + if(!is.null(assets)){ + # TODO FIXME this doesn't work quite right on matrix of assets + if(is.numeric(assets)){ + if (length(assets) == 1) { + nassets = assets + # we passed in a number of assets, so we need to create the vector + message("assuming equal weighted seed portfolio") + assets <- rep(1 / nassets, nassets) + } else { + nassets = length(assets) + } + # and now we may need to name them + if (is.null(names(assets))) { + for(i in 1:length(assets)){ + names(assets)[i] <- paste("Asset",i,sep=".") + } + } + } + if(is.character(assets)){ + nassets = length(assets) + assetnames = assets + message("assuming equal weighted seed portfolio") + assets <- rep(1 / nassets, nassets) + names(assets) <- assetnames # set names, so that other code can access it, + # and doesn't have to know about the character vector + # print(assets) + } + # if assets is a named vector, we'll assume it is current weights + } + + ## now structure and return + return(structure( + list( + assets = assets, + min_sum = min_sum, + max_sum = max_sum, + weight_seq = weight_seq, + objectives = list(), + call = match.call() + ), + class=c("v2_constraint","constraint") + )) +} + #' check function for constraints #' #' @param x object to test for type \code{constraint} From noreply at r-forge.r-project.org Tue Jun 18 02:03:06 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 02:03:06 +0200 (CEST) Subject: [Returnanalytics-commits] r2348 - pkg/PortfolioAnalytics/R Message-ID: <20130618000306.BFB8518517D@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-18 02:03:05 +0200 (Tue, 18 Jun 2013) New Revision: 2348 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: Added add.constraint function Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-17 23:41:16 UTC (rev 2347) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 00:03:05 UTC (rev 2348) @@ -214,6 +214,52 @@ )) } +#' General interface for adding and/or updating optimization constraints, currently supports box and group constraints. +#' +#' This is the main function for adding and/or updating constraints in an object of type \code{\link{constraint}}. +#' +#' In general, you will define your constraints as one of two types: 'box' or 'group'. +#' +#' @param constraints an object of type "constraint" to add the constraint to, specifying the constraints for the optimization, see \code{\link{constraint_v2}} +#' @param type character type of the constraint to add or update, currently 'box' or 'group' +#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @author Ross Bennett +#' +#' @seealso \code{\link{constraint}} +#' +#' @examples +#' exconstr <- constraint_v2(assets=10, min_sum=1, max_sum=1) +#' # Add box constraints with a minimum weight of 0.1 and maximum weight of 0.4 +#' exconstr <- add.constraint(exconstr, type="box", min=0.1, max=0.4) +#' @export +add.constraint <- function(constraints, type, ...){ + # Check to make sure that the constraints passed in is a constraints object + if (!is.constraint(constraints)) {stop("constraints passed in are not of class constraint")} + + # Check to make sure a type is passed in as an argument + if (!hasArg(type)) stop("you must supply a type of constraints to create") + + assets <- constraints$assets + tmp_constraint = NULL + + # Currently supports box and group constraints. Will add more later. + switch(type, + # Box constraints + box = {tmp_constraint <- box_constraint(assets, ...=...) + constraints$min <- tmp_constraint$min + constraints$max <- tmp_constraint$max + }, + # Group constraints + group = {tmp_constraint <- group_constraint(assets, ...=...) + constraints$groups <- tmp_constraint$groups + constraints$cLO <- tmp_constraint$cLO + constraints$cUP <- tmp_constraint$cUP + }, + null = {return(constraints)} + ) + return(constraints) +} + #' check function for constraints #' #' @param x object to test for type \code{constraint} From noreply at r-forge.r-project.org Tue Jun 18 02:24:59 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 02:24:59 +0200 (CEST) Subject: [Returnanalytics-commits] r2349 - pkg/PortfolioAnalytics/R Message-ID: <20130618002459.3C96318517C@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-18 02:24:58 +0200 (Tue, 18 Jun 2013) New Revision: 2349 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: Adding box_constraints function. Called by add.constraint to update constraint object with box constraints. Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 00:03:05 UTC (rev 2348) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 00:24:58 UTC (rev 2349) @@ -260,6 +260,100 @@ return(constraints) } +#' constructor for box_constraint. +#' +#' This function is called by add.constraint when type="box" is specified. see \code{\link{add.constraint}} +#' +#' @param assets number of assets, or optionally a named vector of assets specifying seed weights +#' @param min numeric or named vector specifying minimum weight box constraints +#' @param max numeric or named vector specifying minimum weight box constraints +#' @param min_mult numeric or named vector specifying minimum multiplier box constraint from seed weight in \code{assets} +#' @param max_mult numeric or named vector specifying maximum multiplier box constraint from seed weight in \code{assets} +#' @author Ross Bennett +#' @seealso \code{\link{add.constraint}} +#' @export +box_constraint <- function(assets, min, max, min_mult, max_mult){ + # Based on the constraint function for object of class constraint_v1 that + # included specifying box constraints. + + # Get the length of the assets vector + nassets <- length(assets) + + # Check that the length of min and max are the same + if(hasArg(min) | hasArg(max)) { + if (length(min) > 1 & length(max) > 1){ + if (length(min) != length(max)) { stop("length of min and max must be the same") } + } + + # If the user passes in a scalar for min, then create a min vector + if (length(min) == 1) { + message("min not passed in as vector, replicating min to length of length(assets)") + min <- rep(min, nassets) + } + if (length(min) != nassets) stop(paste("length of min must be equal to 1 or the number of assets:", nassets)) + + # If the user passes in a scalar for max, then create a max vector + if (length(max) == 1) { + message("max not passed in as vector, replicating max to length of length(assets)") + max <- rep(max, nassets) + } + if (length(max) != nassets) stop(paste("length of max must be equal to 1 or the number of assets:", nassets)) + + } else { + # Default to min=0 and max=1 if min or max are not passed in + message("no min or max passed in, assuming 0 and 1") + min <- rep(0, nassets) + max <- rep(1, nassets) + } + + # Set the names of the min and max vector to the names of the assets vector + names(min) <- names(assets) + names(max) <- names(assets) + + # Checks for min_mult and max_mult + if(hasArg(min_mult) | hasArg(max_mult)) { + if (length(min_mult) > 1 & length(max_mult) > 1){ + if (length(min_mult) != length(max_mult) ) { stop("length of min_mult and max_mult must be the same") } + } else { + message("min_mult and max_mult not passed in as vectors, replicating min_mult and max_mult to length of assets vector") + min_mult = rep(min_mult, nassets) + max_mult = rep(max_mult, nassets) + } + } + + if (!is.null(names(assets))) { + assetnames <- names(assets) + if(hasArg(min)){ + names(min) <- assetnames + names(max) <- assetnames + } else { + min = NULL + max = NULL + } + if(hasArg(min_mult)){ + names(min_mult) <- assetnames + names(max_mult) <- assetnames + } else { + min_mult = NULL + max_mult = NULL + } + } + + # now adjust min and max to account for min_mult and max_mult from seed + if(!is.null(min_mult) & !is.null(min)) { + tmp_min <- assets * min_mult + #TODO FIXME this creates a list, and it should create a named vector or matrix + min[which(tmp_min > min)] <- tmp_min[which(tmp_min > min)] + } + if(!is.null(max_mult) & !is.null(max)) { + tmp_max <- assets * max_mult + #TODO FIXME this creates a list, and it should create a named vector or matrix + max[which(tmp_max < max)] <- tmp_max[which(tmp_max < max)] + } + + return(list(min=min, max=max)) +} + #' check function for constraints #' #' @param x object to test for type \code{constraint} From noreply at r-forge.r-project.org Tue Jun 18 02:56:27 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 02:56:27 +0200 (CEST) Subject: [Returnanalytics-commits] r2350 - pkg/PortfolioAnalytics/R Message-ID: <20130618005627.D48EC184F5F@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-18 02:56:27 +0200 (Tue, 18 Jun 2013) New Revision: 2350 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: Adding group_constraints function. Called by add.constraint to update constraint object with group constraints. Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 00:24:58 UTC (rev 2349) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 00:56:27 UTC (rev 2350) @@ -354,6 +354,43 @@ return(list(min=min, max=max)) } +#' constructor for group_constraint +#' +#' This function is called by add.constraint when type="group" is specified. see \code{\link{add.constraint}} +#' +#' @param assets number of assets, or optionally a named vector of assets specifying seed weights +#' @param groups vector specifying the groups of the assets +#' @param group_min numeric or vector specifying minimum weight group constraints +#' @param group_max numeric or vector specifying minimum weight group constraints +#' @author Ross Bennett +#' @seealso \code{\link{add.constraint}} +#' @export +group_constraint <- function(assets, groups, group_min, group_max) { + nassets <- length(assets) + ngroups <- length(groups) + + if(sum(groups) != nassets) { + stop("sum of groups must be equal to the number of assets") + } + + # Checks for group_min + if (length(group_min) == 1) { + message("group_min not passed in as vector, replicating group_min to length of groups") + group_min <- rep(group_min, ngroups) + } + if (length(group_min) != ngroups) stop(paste("length of group_min must be equal to 1 or the length of groups:", ngroups)) + + # Checks for group_max + if (length(group_max) == 1) { + message("group_max not passed in as vector, replicating group_max to length of groups") + group_max <- rep(group_max, ngroups) + } + if (length(group_max) != ngroups) stop(paste("length of group_max must be equal to 1 or the length of groups:", ngroups)) + + return(list(groups=groups, cLO=group_min, cUP=group_max)) +} + + #' check function for constraints #' #' @param x object to test for type \code{constraint} From noreply at r-forge.r-project.org Tue Jun 18 11:52:28 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 11:52:28 +0200 (CEST) Subject: [Returnanalytics-commits] r2351 - in pkg/FactorAnalytics: R man Message-ID: <20130618095228.3FC7318070E@r-forge.r-project.org> Author: braverock Date: 2013-06-18 11:52:27 +0200 (Tue, 18 Jun 2013) New Revision: 2351 Modified: pkg/FactorAnalytics/R/chart.Style.R pkg/FactorAnalytics/R/covEWMA.R pkg/FactorAnalytics/R/factorModelCovariance.r pkg/FactorAnalytics/R/factorModelEsDecomposition.R pkg/FactorAnalytics/R/factorModelMonteCarlo.R pkg/FactorAnalytics/R/factorModelPerformanceAttribution.r pkg/FactorAnalytics/R/factorModelSdDecomposition.R pkg/FactorAnalytics/R/factorModelVaRDecomposition.R pkg/FactorAnalytics/R/fitFundamentalFactorModel.R pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R pkg/FactorAnalytics/R/fitStatisticalFactorModel.R pkg/FactorAnalytics/R/impliedFactorReturns.R pkg/FactorAnalytics/R/modifiedEsReport.R pkg/FactorAnalytics/R/modifiedIncrementalES.R pkg/FactorAnalytics/R/modifiedIncrementalVaR.R pkg/FactorAnalytics/R/modifiedPortfolioEsDecomposition.R pkg/FactorAnalytics/R/modifiedPortfolioVaRDecomposition.R pkg/FactorAnalytics/R/modifiedVaRReport.R pkg/FactorAnalytics/R/nonparametricEsReport.R pkg/FactorAnalytics/R/nonparametricIncrementalES.R pkg/FactorAnalytics/R/nonparametricIncrementalVaR.R pkg/FactorAnalytics/R/nonparametricPortfolioEsDecomposition.R pkg/FactorAnalytics/R/nonparametricPortfolioVaRDecomposition.R pkg/FactorAnalytics/R/nonparametricVaRReport.R pkg/FactorAnalytics/R/normalEsReport.R pkg/FactorAnalytics/R/normalIncrementalES.R pkg/FactorAnalytics/R/normalIncrementalVaR.R pkg/FactorAnalytics/R/normalPortfolioEsDecomposition.R pkg/FactorAnalytics/R/normalPortfolioVaRDecomposition.R pkg/FactorAnalytics/R/normalVaRReport.R pkg/FactorAnalytics/R/plot.FM.attribution.r pkg/FactorAnalytics/R/plot.FundamentalFactorModel.r pkg/FactorAnalytics/R/plot.MacroFactorModel.r pkg/FactorAnalytics/R/plot.StatFactorModel.r pkg/FactorAnalytics/R/portfolioSdDecomposition.R pkg/FactorAnalytics/R/print.MacroFactorModel.r pkg/FactorAnalytics/R/print.StatFactorModel.r pkg/FactorAnalytics/R/scenarioPredictions.r pkg/FactorAnalytics/R/scenarioPredictionsPortfolio.r pkg/FactorAnalytics/R/summary.FM.attribution.r pkg/FactorAnalytics/R/summary.MacroFactorModel.r pkg/FactorAnalytics/man/CornishFisher.Rd pkg/FactorAnalytics/man/covEWMA.Rd pkg/FactorAnalytics/man/factorModelCovariance.Rd pkg/FactorAnalytics/man/factorModelEsDecomposition.Rd pkg/FactorAnalytics/man/factorModelMonteCarlo.Rd pkg/FactorAnalytics/man/factorModelPerformanceAttribution.Rd pkg/FactorAnalytics/man/factorModelSdDecomposition.Rd pkg/FactorAnalytics/man/factorModelVaRDecomposition.Rd pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd pkg/FactorAnalytics/man/fitStatisticalFactorModel.Rd pkg/FactorAnalytics/man/impliedFactorReturns.Rd pkg/FactorAnalytics/man/managers.df.Rd pkg/FactorAnalytics/man/modifiedEsReport.Rd pkg/FactorAnalytics/man/modifiedIncrementalES.Rd pkg/FactorAnalytics/man/modifiedIncrementalVaR.Rd pkg/FactorAnalytics/man/modifiedPortfolioEsDecomposition.Rd pkg/FactorAnalytics/man/modifiedPortfolioVaRDecomposition.Rd pkg/FactorAnalytics/man/modifiedVaRReport.Rd pkg/FactorAnalytics/man/nonparametricEsReport.Rd pkg/FactorAnalytics/man/nonparametricIncrementalES.Rd pkg/FactorAnalytics/man/nonparametricIncrementalVaR.Rd pkg/FactorAnalytics/man/nonparametricPortfolioEsDecomposition.Rd pkg/FactorAnalytics/man/nonparametricPortfolioVaRDecomposition.Rd pkg/FactorAnalytics/man/nonparametricVaRReport.Rd pkg/FactorAnalytics/man/normalEsReport.Rd pkg/FactorAnalytics/man/normalIncrementalES.Rd pkg/FactorAnalytics/man/normalIncrementalVaR.Rd pkg/FactorAnalytics/man/normalPortfolioEsDecomposition.Rd pkg/FactorAnalytics/man/normalPortfolioVaRDecomposition.Rd pkg/FactorAnalytics/man/normalVaRReport.Rd pkg/FactorAnalytics/man/plot.FM.attribution.Rd pkg/FactorAnalytics/man/plot.FundamentalFactorModel.Rd pkg/FactorAnalytics/man/plot.MacroFactorModel.Rd pkg/FactorAnalytics/man/plot.StatFactorModel.Rd pkg/FactorAnalytics/man/portfolioSdDecomposition.Rd pkg/FactorAnalytics/man/print.MacroFactorModel.Rd pkg/FactorAnalytics/man/print.StatFactorModel.Rd pkg/FactorAnalytics/man/scenarioPredictions.Rd pkg/FactorAnalytics/man/scenarioPredictionsPortfolio.Rd pkg/FactorAnalytics/man/stat.fm.data.Rd pkg/FactorAnalytics/man/stock.Rd pkg/FactorAnalytics/man/summary.FM.attribution.Rd pkg/FactorAnalytics/man/summary.MacroFactorModel.Rd Log: - roxygenize all existing package documentation - regenerate all Rd files using roxygen2 Modified: pkg/FactorAnalytics/R/chart.Style.R =================================================================== --- pkg/FactorAnalytics/R/chart.Style.R 2013-06-18 00:56:27 UTC (rev 2350) +++ pkg/FactorAnalytics/R/chart.Style.R 2013-06-18 09:52:27 UTC (rev 2351) @@ -1,3 +1,128 @@ +#' calculate and display effective style weights +#' +#' Functions that calculate effective style weights and display the results in +#' a bar chart. \code{chart.Style} calculates and displays style weights +#' calculated over a single period. \code{chart.RollingStyle} calculates and +#' displays those weights in rolling windows through time. \code{style.fit} +#' manages the calculation of the weights by method. \code{style.QPfit} +#' calculates the specific constraint case that requires quadratic programming. +#' +#' These functions calculate style weights using an asset class style model as +#' described in detail in Sharpe (1992). The use of quadratic programming to +#' determine a fund's exposures to the changes in returns of major asset +#' classes is usually refered to as "style analysis". +#' +#' The "unconstrained" method implements a simple factor model for style +#' analysis, as in: \deqn{Ri = bi1*F1+bi2*F2+...+bin*Fn+ei}{R_i = +#' b_{i1}F_1+b_{i2}F_2+\dots+b_{in}F_n +e_i} where \eqn{Ri}{R_i} represents the +#' return on asset i, \eqn{Fj}{F_j} represents each factor, and \eqn{ei}{e_i} +#' represents the "non-factor" component of the return on i. This is simply a +#' multiple regression analysis with fund returns as the dependent variable and +#' asset class returns as the independent variables. The resulting slope +#' coefficients are then interpreted as the fund's historic exposures to asset +#' class returns. In this case, coefficients do not sum to 1. +#' +#' The "normalized" method reports the results of a multiple regression +#' analysis similar to the first, but with one constraint: the coefficients are +#' required to add to 1. Coefficients may be negative, indicating short +#' exposures. To enforce the constraint, coefficients are normalized. +#' +#' The "constrained" method includes the constraint that the coefficients sum +#' to 1, but adds that the coefficients must lie between 0 and 1. These +#' inequality constraints require a quadratic programming algorithm using +#' \code{\link[quadprog]{solve.QP}} from the 'quadprog' package, and the +#' implementation is discussed under \code{\link{style.QPfit}}. If set to +#' TRUE, "leverage" allows the sum of the coefficients to exceed 1. +#' +#' According to Sharpe (1992), the calculation for the constrained case is +#' represented as: \deqn{min var(Rf - sum[wi * R.si]) = min var(F - w*S)}{min +#' \sigma(R_f - \sum{w_i * R_s_i}) = min \sigma(F - w*S)} \deqn{s.t. sum[wi] = +#' 1; wi > 0}{ s.t. \sum{w_i} = 1; w_i > 0} +#' +#' Remembering that: +#' +#' \deqn{\sigma(aX + bY) = a^2 \sigma(X) + b^2 \sigma(Y) + 2ab cov(X,Y) = +#' \sigma(R.f) + w'*V*w - 2*w'*cov(R.f,R.s)} +#' +#' we can drop \eqn{var(Rf)}{\sigma(R_f)} as it isn't a function of weights, +#' multiply both sides by 1/2: +#' +#' \deqn{= min (1/2) w'*V*w - C'w}{= min (1/2) w'*V*w - C'w} \deqn{ s.t. w'*e = +#' 1, w_i > 0}{ s.t. w'*e = 1, w_i > 0} +#' +#' Which allows us to use \code{\link[quadprog]{solve.QP}}, which is specified +#' as: \deqn{min(-d' b + 1/2 b' D b)}{min(-d' b + 1/2 b' D b)} and the +#' constraints \deqn{ A' b >= b.0 }{ A' b >= b_0 } +#' +#' so: b is the weight vector, D is the variance-covariance matrix of the +#' styles d is the covariance vector between the fund and the styles +#' +#' The chart functions then provide a graphical summary of the results. The +#' underlying function, \code{\link{style.fit}}, provides the outputs of the +#' analysis and more information about fit, including an R-squared value. +#' +#' Styles identified in this analysis may be interpreted as an average of +#' potentially changing exposures over the period covered. The function +#' \code{\link{chart.RollingStyle}} may be useful for examining the behavior of +#' a manager's average exposures to asset classes over time, using a +#' rolling-window analysis. +#' +#' The chart functions plot a column chart or stacked column chart of the +#' resulting style weights to the current device. Both \code{style.fit} and +#' \code{style.QPfit} produce a list of data frames containing 'weights' and +#' 'R.squared' results. If 'model' = TRUE in \code{style.QPfit}, the full +#' result set is shown from the output of \code{solve.QP}. +#' +#' @aliases chart.Style chart.RollingStyle table.RollingStyle style.fit +#' style.QPfit +#' @param R.fund matrix, data frame, or zoo object with fund returns to be +#' analyzed +#' @param R.style matrix, data frame, or zoo object with style index returns. +#' Data object must be of the same length and time-aligned with R.fund +#' @param method specify the method of calculation of style weights as +#' "constrained", "unconstrained", or "normalized". For more information, see +#' \code{\link{style.fit}} +#' @param leverage logical, defaults to 'FALSE'. If 'TRUE', the calculation of +#' weights assumes that leverage may be used. For more information, see +#' \code{\link{style.fit}} +#' @param model logical. If 'model' = TRUE in \code{\link{style.QPfit}}, the +#' full result set is shown from the output of \code{solve.QP}. +#' @param selection either "none" (default) or "AIC". If "AIC", then the +#' function uses a stepwise regression to identify find the model with minimum +#' AIC value. See \code{\link{step}} for more detail. +#' @param unstacked logical. If set to 'TRUE' \emph{and} only one row of data +#' is submitted in 'w', then the chart creates a normal column chart. If more +#' than one row is submitted, then this is ignored. See examples below. +#' @param space the amount of space (as a fraction of the average bar width) +#' left before each bar, as in \code{\link{barplot}}. Default for +#' \code{chart.RollingStyle} is 0; for \code{chart.Style} the default is 0.2. +#' @param main set the chart title, same as in \code{\link{plot}} +#' @param width number of periods or window to apply rolling style analysis +#' over +#' @param ylim set the y-axis limit, same as in \code{\link{plot}} +#' @param \dots for the charting functions, these are arguments to be passed to +#' \code{\link{barplot}}. These can include further arguments (such as 'axes', +#' 'asp' and 'main') and graphical parameters (see 'par') which are passed to +#' 'plot.window()', 'title()' and 'axis'. For the calculation functions, these +#' are ignored. +#' @note None of the functions \code{chart.Style}, \code{style.fit}, and +#' \code{style.QPfit} make any attempt to align the two input data series. The +#' \code{chart.RollingStyle}, on the other hand, does merge the two series and +#' manages the calculation over common periods. +#' @author Peter Carl +#' @seealso \code{\link{barplot}}, \code{\link{par}} +#' @references Sharpe, W. Asset Allocation: Management Style and Performance +#' Measurement Journal of Portfolio Management, 1992, 7-19. See \url{ +#' http://www.stanford.edu/~wfsharpe/art/sa/sa.htm} +#' @keywords ts multivariate hplot +#' @examples +#' +#' data(edhec) +#' data(managers) +#' style.fit(managers[97:132,2,drop=FALSE],edhec[85:120,], method="constrained", leverage=FALSE) +#' chart.Style(managers[97:132,2,drop=FALSE],edhec[85:120,], method="constrained", leverage=FALSE, unstack=TRUE, las=3) +#' chart.RollingStyle(managers[,2,drop=FALSE],edhec[,1:11], method="constrained", leverage=FALSE, width=36, cex.legend = .7, colorset=rainbow12equal, las=1) +#' `chart.Style` <- function (R.fund, R.style, method = c("constrained", "unconstrained", "normalized"), leverage = FALSE, main = NULL, ylim = NULL, unstacked=TRUE, ...) { # @author Peter Carl Modified: pkg/FactorAnalytics/R/covEWMA.R =================================================================== --- pkg/FactorAnalytics/R/covEWMA.R 2013-06-18 00:56:27 UTC (rev 2350) +++ pkg/FactorAnalytics/R/covEWMA.R 2013-06-18 09:52:27 UTC (rev 2351) @@ -1,50 +1,79 @@ -covEWMA <- -function(factors, lambda=0.96, return.cor=FALSE) { -## Inputs: -## factors N x K numerical factors data. data is class data.frame -## N is the time length and K is the number of the factors. -## lambda scalar. exponetial decay factor between 0 and 1. -## return.cor Logical, if TRUE then return EWMA correlation matrices -## Output: -## cov.f.ewma array. dimension is N x K x K. -## comments: -## 1. add optional argument cov.start to specify initial covariance matrix -## 2. allow data input to be data class to be any rectangular data object - - -if (is.data.frame(factors)){ - factor.names = colnames(factors) - t.factor = nrow(factors) - k.factor = ncol(factors) - factors = as.matrix(factors) - t.names = rownames(factors) -} else { - stop("factor data should be saved in data.frame class.") -} -if (lambda>=1 || lambda <= 0){ - stop("exponential decay value lambda should be between 0 and 1.") -} else { - cov.f.ewma = array(,c(t.factor,k.factor,k.factor)) - cov.f = var(factors) # unconditional variance as EWMA at time = 0 - FF = (factors[1,]- mean(factors)) %*% t(factors[1,]- mean(factors)) - cov.f.ewma[1,,] = (1-lambda)*FF + lambda*cov.f - for (i in 2:t.factor) { - FF = (factors[i,]- mean(factors)) %*% t(factors[i,]- mean(factors)) - cov.f.ewma[i,,] = (1-lambda)*FF + lambda*cov.f.ewma[(i-1),,] - } - -} - # 9/15/11: add dimnames to array - dimnames(cov.f.ewma) = list(t.names, factor.names, factor.names) - - if(return.cor) { - cor.f.ewma = cov.f.ewma - for (i in 1:dim(cor.f.ewma)[1]) { - cor.f.ewma[i, , ] = cov2cor(cov.f.ewma[i, ,]) - } - return(cor.f.ewma) - } else{ - return(cov.f.ewma) - } -} - +#' Compute RiskMetrics-type EWMA Covariance Matrix +#' +#' Compute time series of RiskMetrics-type EWMA covariance matrices of returns. +#' Initial covariance matrix is assumed to be the unconditional covariance +#' matrix. +#' +#' The EWMA covariance matrix at time \code{t} is compute as \cr \code{Sigma(t) +#' = lambda*Sigma(t-1) + (1-lambda)*R(t)t(R(t))} \cr where \code{R(t)} is the +#' \code{K x 1} vector of returns at time \code{t}. +#' +#' @param factors \code{T x K} data.frame containing asset returns, where +#' \code{T} is the number of time periods and \code{K} is the number of assets. +#' @param lambda Scalar exponential decay factor. Must lie between between 0 +#' and 1. +#' @param return.cor Logical, if TRUE then return EWMA correlation matrices. +#' @return \code{T x K x K} array giving the time series of EWMA covariance +#' matrices if \code{return.cor=FALSE} and EWMA correlation matrices if +#' \code{return.cor=TRUE}. +#' @author Eric Zivot and Yi-An Chen. +#' @references Zivot, E. and J. Wang (2006), \emph{Modeling Financial Time +#' Series with S-PLUS, Second Edition}, Springer-Verlag. +#' @examples +#' +#' # compute time vaying covariance of factors. +#' data(managers.df) +#' factors = managers.df[,(7:9)] +#' cov.f.ewma <- covEWMA(factors) +#' cov.f.ewma[120,,] +#' +covEWMA <- +function(factors, lambda=0.96, return.cor=FALSE) { +## Inputs: +## factors N x K numerical factors data. data is class data.frame +## N is the time length and K is the number of the factors. +## lambda scalar. exponetial decay factor between 0 and 1. +## return.cor Logical, if TRUE then return EWMA correlation matrices +## Output: +## cov.f.ewma array. dimension is N x K x K. +## comments: +## 1. add optional argument cov.start to specify initial covariance matrix +## 2. allow data input to be data class to be any rectangular data object + + +if (is.data.frame(factors)){ + factor.names = colnames(factors) + t.factor = nrow(factors) + k.factor = ncol(factors) + factors = as.matrix(factors) + t.names = rownames(factors) +} else { + stop("factor data should be saved in data.frame class.") +} +if (lambda>=1 || lambda <= 0){ + stop("exponential decay value lambda should be between 0 and 1.") +} else { + cov.f.ewma = array(,c(t.factor,k.factor,k.factor)) + cov.f = var(factors) # unconditional variance as EWMA at time = 0 + FF = (factors[1,]- mean(factors)) %*% t(factors[1,]- mean(factors)) + cov.f.ewma[1,,] = (1-lambda)*FF + lambda*cov.f + for (i in 2:t.factor) { + FF = (factors[i,]- mean(factors)) %*% t(factors[i,]- mean(factors)) + cov.f.ewma[i,,] = (1-lambda)*FF + lambda*cov.f.ewma[(i-1),,] + } + +} + # 9/15/11: add dimnames to array + dimnames(cov.f.ewma) = list(t.names, factor.names, factor.names) + + if(return.cor) { + cor.f.ewma = cov.f.ewma + for (i in 1:dim(cor.f.ewma)[1]) { + cor.f.ewma[i, , ] = cov2cor(cov.f.ewma[i, ,]) + } + return(cor.f.ewma) + } else{ + return(cov.f.ewma) + } +} + Modified: pkg/FactorAnalytics/R/factorModelCovariance.r =================================================================== --- pkg/FactorAnalytics/R/factorModelCovariance.r 2013-06-18 00:56:27 UTC (rev 2350) +++ pkg/FactorAnalytics/R/factorModelCovariance.r 2013-06-18 09:52:27 UTC (rev 2351) @@ -1,28 +1,61 @@ -factorModelCovariance <- -function(beta.mat, factor.cov, residVars.vec) { -## Inputs: -## beta.mat n x k matrix of factor betas -## factor.cov k x k factor return covariance matrix -## residVars.vec n x 1 vector of residual variances from factor model -## Output: -## cov.fm n x n return covariance matrix based on -## estimated factor model. - beta.mat = as.matrix(beta.mat) - factor.cov = as.matrix(factor.cov) - sig.e = as.vector(residVars.vec) - if (length(sig.e) > 1) { - D.e = diag(as.vector(sig.e)) - } else { - D.e = as.matrix(sig.e) - } - if (ncol(beta.mat) != ncol(factor.cov)) - stop("beta.mat and factor.cov must have same number of columns") - - if (nrow(D.e) != nrow(beta.mat)) - stop("beta.mat and D.e must have same number of rows") - cov.fm = beta.mat %*% factor.cov %*% t(beta.mat) + D.e - if (any(diag(chol(cov.fm)) == 0)) - warning("Covariance matrix is not positive definite") - return(cov.fm) -} - +#' Compute Factor Model Covariance Matrix. +#' +#' Compute asset return covariance matrix from factor model parameters. +#' +#' The return on asset \code{i} (\code{i = 1,...,N}) is assumed to follow the +#' factor model \cr \code{R(i,t) = alpha + t(beta)*F(t) + e(i,t), e(i,t) ~ iid +#' (0, sig(i)^2)} \cr where \code{beta} is a \code{K x 1} vector of factor +#' exposures. The return variance is then \cr \code{var(R(i,t) = +#' t(beta)*var(F(t))*beta + sig(i)^2}, \cr and the \code{N x N} covariance +#' matrix of the return vector \code{R} is \cr \code{var(R) = B*var(F(t))*t(B) +#' + D} \cr where B is the \code{N x K} matrix of asset betas and \code{D} is a +#' diagonal matrix with \code{sig(i)^2} values along the diagonal. +#' +#' @param beta.mat \code{N x K} matrix of factor betas, where \code{N} is the +#' number of assets and \code{K} is the number of factors. +#' @param factor.cov \code{K x K} factor return covariance matrix. +#' @param residVars.vec \code{N x 1} vector of asset specific residual +#' variances from the factor model. +#' @return \code{N x N} return covariance matrix based on factor model +#' parameters. +#' @author Eric Zivot and Yi-An Chen. +#' @references Zivot, E. and J. Wang (2006), \emph{Modeling Financial Time +#' Series with S-PLUS, Second Edition}, Springer-Verlag. +#' @examples +#' +#' # factorModelCovariance +#' data(managers.df) +#' factors = managers.df[,(7:9)] +#' ret.assets = managers.df[,(1:6)] +#' fit <-fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", +#' variable.selection="all subsets", factor.set = 3) +#' factorModelCovariance(fit$beta.mat,var(factors),fit$residVars.vec) +#' +factorModelCovariance <- +function(beta.mat, factor.cov, residVars.vec) { +## Inputs: +## beta.mat n x k matrix of factor betas +## factor.cov k x k factor return covariance matrix +## residVars.vec n x 1 vector of residual variances from factor model +## Output: +## cov.fm n x n return covariance matrix based on +## estimated factor model. + beta.mat = as.matrix(beta.mat) + factor.cov = as.matrix(factor.cov) + sig.e = as.vector(residVars.vec) + if (length(sig.e) > 1) { + D.e = diag(as.vector(sig.e)) + } else { + D.e = as.matrix(sig.e) + } + if (ncol(beta.mat) != ncol(factor.cov)) + stop("beta.mat and factor.cov must have same number of columns") + + if (nrow(D.e) != nrow(beta.mat)) + stop("beta.mat and D.e must have same number of rows") + cov.fm = beta.mat %*% factor.cov %*% t(beta.mat) + D.e + if (any(diag(chol(cov.fm)) == 0)) + warning("Covariance matrix is not positive definite") + return(cov.fm) +} + Modified: pkg/FactorAnalytics/R/factorModelEsDecomposition.R =================================================================== --- pkg/FactorAnalytics/R/factorModelEsDecomposition.R 2013-06-18 00:56:27 UTC (rev 2350) +++ pkg/FactorAnalytics/R/factorModelEsDecomposition.R 2013-06-18 09:52:27 UTC (rev 2351) @@ -1,79 +1,140 @@ -factorModelEsDecomposition <- -function(Data, beta.vec, sig2.e, tail.prob = 0.05) { -## Compute factor model factor ES decomposition based on Euler's theorem given historic -## or simulated data and factor model parameters. -## The partial derivative of ES wrt factor beta is computed -## as the expected factor return given fund return is less than or equal to its VaR -## VaR is compute either as the sample quantile or as an estimated quantile -## using the Cornish-Fisher expansion -## inputs: -## Data B x (k+2) matrix of data. First column contains the fund returns, -## second through k+1 columns contain factor returns, (k+2)nd column contain residuals -## scaled to have variance 1. -## beta.vec k x 1 vector of factor betas -## sig2.e scalar, residual variance from factor model -## tail.prob scalar tail probability -## output: -## A list with the following components: -## VaR scalar, nonparametric VaR value for fund reported as a positive number -## n.exceed scalar, number of observations beyond VaR -## idx.exceed n.exceed x 1 vector giving index values of exceedences -## ES scalar, nonparametric ES value for fund reported as a positive number -## mcES k+1 x 1 vector of factor marginal contributions to ES -## cES k+1 x 1 vector of factor component contributions to ES -## pcES k+1 x 1 vector of factor percent contributions to ES -## Remarks: -## The factor model has the form -## R(t) = beta'F(t) + e(t) = beta.star'F.star(t) -## where beta.star = (beta, sig.e)' and F.star(t) = (F(t)', z(t))' -## By Euler's theorem -## ES.fm = sum(cES.fm) = sum(beta.star*mcES.fm) -## References: -## 1. Hallerback (2003), "Decomposing Portfolio Value-at-Risk: A General Analysis", -## The Journal of Risk 5/2. -## 2. Yamai and Yoshiba (2002). "Comparative Analyses of Expected Shortfall and -## Value-at-Risk: Their Estimation Error, Decomposition, and Optimization -## Bank of Japan. -## 3. Meucci (2007). "Risk Contributions from Generic User-Defined Factors," Risk. - Data = as.matrix(Data) - ncol.Data = ncol(Data) - if(is.matrix(beta.vec)) { - beta.names = c(rownames(beta.vec), "residual") - } else if(is.vector(beta.vec)) { - beta.names = c(names(beta.vec), "residual") - } else { - stop("beta.vec is not an n x 1 matrix or a vector") - } - beta.names = c(names(beta.vec), "residual") - beta.star.vec = c(beta.vec, sqrt(sig2.e)) - names(beta.star.vec) = beta.names - - VaR.fm = quantile(Data[, 1], prob=tail.prob) - idx = which(Data[, 1] <= VaR.fm) - ES.fm = -mean(Data[idx, 1]) - - ## - ## compute marginal contribution to ES - ## - ## compute marginal ES as expected value of factor return given fund - ## return is less than or equal to VaR - mcES.fm = -as.matrix(colMeans(Data[idx, -1])) - -## compute correction factor so that sum of weighted marginal ES adds to portfolio ES -#cf = as.numeric( ES.fm / sum(mcES.fm*beta.star.vec) ) -#mcES.fm = cf*mcES.fm -cES.fm = mcES.fm*beta.star.vec -pcES.fm = cES.fm/ES.fm -colnames(mcES.fm) = "MCES" -colnames(cES.fm) = "CES" -colnames(pcES.fm) = "PCES" -ans = list(VaR = -VaR.fm, - n.exceed = length(idx), - idx.exceed = idx, - ES = ES.fm, - mcES = t(mcES.fm), - cES = t(cES.fm), - pcES = t(pcES.fm)) -return(ans) -} - +#' Compute Factor Model Factor ES Decomposition +#' +#' Compute the factor model factor expected shortfall (ES) decomposition for an +#' asset based on Euler's theorem given historic or simulated data and factor +#' model parameters. The partial derivative of ES with respect to factor beta +#' is computed as the expected factor return given fund return is less than or +#' equal to its value-at-risk (VaR). VaR is compute as the sample quantile of +#' the historic or simulated data. +#' +#' The factor model has the form \cr \code{R(t) = t(beta)*F(t) + e(t) = +#' t(beta.star)*F.star(t)} \cr where \code{beta.star = t(beta, sig.e)} and +#' \code{F.star(t) = (t(F(t)), t(z(t)))} By Euler's theorem \cr \code{ES.fm = +#' sum(cES.fm) = sum(beta.star*mcES.fm)} \cr +#' +#' @param Data \code{B x (k+2)} matrix of historic or simulated data. The first +#' column contains the fund returns, the second through \code{k+1}st columns +#' contain the returns on the \code{k} factors, and the \code{(k+2)}nd column +#' contain residuals scaled to have unit variance. +#' @param beta.vec \code{k x 1} vector of factor betas. +#' @param sig2.e scalar, residual variance from factor model. +#' @param tail.prob scalar, tail probability for VaR quantile. Typically 0.01 +#' or 0.05. +#' @return A list with the following components: +#' @returnItem VaR Scalar, nonparametric VaR value for fund reported as a +#' positive number. +#' @returnItem n.exceed Scalar, number of observations beyond VaR. +#' @returnItem idx.exceed \code{n.exceed x 1} vector giving index values of +#' exceedences. +#' @returnItem ES scalar, nonparametric ES value for fund reported as a +#' positive number. +#' @returnItem mcES \code{(K+1) x 1} vector of factor marginal contributions to +#' ES. +#' @returnItem cES \code{(K+1) x 1} vector of factor component contributions to +#' ES. +#' @returnItem pcES \code{(K+1) x 1} vector of factor percent contributions to +#' ES. +#' @author Eric Zviot and Yi-An Chen. +#' @references 1. Hallerback (2003), "Decomposing Portfolio Value-at-Risk: A +#' General Analysis", \emph{The Journal of Risk} 5/2. \cr 2. Yamai and Yoshiba +#' (2002). "Comparative Analyses of Expected Shortfall and Value-at-Risk: Their +#' Estimation Error, Decomposition, and Optimization", Bank of Japan. \cr 3. +#' Meucci (2007). "Risk Contributions from Generic User-Defined Factors," +#' \emph{Risk}. +#' @examples +#' +#' data(managers.df) +#' ret.assets = managers.df[,(1:6)] +#' factors = managers.df[,(7:9)] +#' # fit the factor model with OLS +#' fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", +#' variable.selection="all subsets",factor.set=3) +#' # risk factor contribution to ETL +#' # combine fund returns, factor returns and residual returns for HAM1 +#' tmpData = cbind(ret.assets[,1], factors, +#' residuals(fit$asset.fit$HAM1)/sqrt(fit$residVars.vec[1])) +#' colnames(tmpData)[c(1,5)] = c("HAM1", "residual") +#' factor.es.decomp.HAM1 = factorModelEsDecomposition(tmpData, fit$beta.mat[1,], +#' fit$residVars.vec[1], tail.prob=0.05) +#' +#' +#' +factorModelEsDecomposition <- +function(Data, beta.vec, sig2.e, tail.prob = 0.05) { +## Compute factor model factor ES decomposition based on Euler's theorem given historic +## or simulated data and factor model parameters. +## The partial derivative of ES wrt factor beta is computed +## as the expected factor return given fund return is less than or equal to its VaR +## VaR is compute either as the sample quantile or as an estimated quantile +## using the Cornish-Fisher expansion +## inputs: +## Data B x (k+2) matrix of data. First column contains the fund returns, +## second through k+1 columns contain factor returns, (k+2)nd column contain residuals +## scaled to have variance 1. +## beta.vec k x 1 vector of factor betas +## sig2.e scalar, residual variance from factor model +## tail.prob scalar tail probability +## output: +## A list with the following components: +## VaR scalar, nonparametric VaR value for fund reported as a positive number +## n.exceed scalar, number of observations beyond VaR +## idx.exceed n.exceed x 1 vector giving index values of exceedences +## ES scalar, nonparametric ES value for fund reported as a positive number +## mcES k+1 x 1 vector of factor marginal contributions to ES +## cES k+1 x 1 vector of factor component contributions to ES +## pcES k+1 x 1 vector of factor percent contributions to ES +## Remarks: +## The factor model has the form +## R(t) = beta'F(t) + e(t) = beta.star'F.star(t) +## where beta.star = (beta, sig.e)' and F.star(t) = (F(t)', z(t))' +## By Euler's theorem +## ES.fm = sum(cES.fm) = sum(beta.star*mcES.fm) +## References: +## 1. Hallerback (2003), "Decomposing Portfolio Value-at-Risk: A General Analysis", +## The Journal of Risk 5/2. +## 2. Yamai and Yoshiba (2002). "Comparative Analyses of Expected Shortfall and +## Value-at-Risk: Their Estimation Error, Decomposition, and Optimization +## Bank of Japan. +## 3. Meucci (2007). "Risk Contributions from Generic User-Defined Factors," Risk. + Data = as.matrix(Data) + ncol.Data = ncol(Data) + if(is.matrix(beta.vec)) { + beta.names = c(rownames(beta.vec), "residual") + } else if(is.vector(beta.vec)) { + beta.names = c(names(beta.vec), "residual") + } else { + stop("beta.vec is not an n x 1 matrix or a vector") + } + beta.names = c(names(beta.vec), "residual") + beta.star.vec = c(beta.vec, sqrt(sig2.e)) + names(beta.star.vec) = beta.names + + VaR.fm = quantile(Data[, 1], prob=tail.prob) + idx = which(Data[, 1] <= VaR.fm) + ES.fm = -mean(Data[idx, 1]) + + ## + ## compute marginal contribution to ES + ## + ## compute marginal ES as expected value of factor return given fund + ## return is less than or equal to VaR + mcES.fm = -as.matrix(colMeans(Data[idx, -1])) + +## compute correction factor so that sum of weighted marginal ES adds to portfolio ES +#cf = as.numeric( ES.fm / sum(mcES.fm*beta.star.vec) ) +#mcES.fm = cf*mcES.fm +cES.fm = mcES.fm*beta.star.vec +pcES.fm = cES.fm/ES.fm +colnames(mcES.fm) = "MCES" +colnames(cES.fm) = "CES" +colnames(pcES.fm) = "PCES" +ans = list(VaR = -VaR.fm, + n.exceed = length(idx), + idx.exceed = idx, + ES = ES.fm, + mcES = t(mcES.fm), + cES = t(cES.fm), + pcES = t(pcES.fm)) +return(ans) +} + Modified: pkg/FactorAnalytics/R/factorModelMonteCarlo.R =================================================================== --- pkg/FactorAnalytics/R/factorModelMonteCarlo.R 2013-06-18 00:56:27 UTC (rev 2350) +++ pkg/FactorAnalytics/R/factorModelMonteCarlo.R 2013-06-18 09:52:27 UTC (rev 2351) @@ -1,109 +1,194 @@ -factorModelMonteCarlo <- -function(n.boot=1000, factorData, Beta.mat, Alpha.mat=NULL, - residualData, residual.dist = c("normal", "Cornish-Fisher", "skew-t"), - boot.method = c("random", "block"), - seed=123, return.factors= FALSE , return.residuals= FALSE ) { -## inputs: -## n.boot number of bootstrap samples -## factorData n.months x n.funds matrix or data.frame of factor returns -## Beta.mat n.funds x n.factors matrix of factor betas -## Alpha.mat n.funds x 1 matrix of factor alphas (intercepts). If NULL then -## assume that all alphas are zero. -## residualData n.funds x n.parms matrix of residual distribution parameters. The -## columns of residualData depend on the value of residual.dist. If -## residual.dist = "normal", then residualData has one column vector -## containing variance values; if residual.dist = "Cornish-Fisher", -## then residualData has three columns containing variance, -## skewness and excess kurtosis values; if residual.dist="skew-t", -## then residualData has four columns containing location, scale, -## shape and df values. -## residual.dist character vector specifying the residual distribution. Choices are -## "normal" for the normal distribution; "Cornish-Fisher" for the -## Cornish-Fisher distribution based on the Cornish-Fisher expansion -## of the normal distribution quantile; "skew-t" for the skewed Student's -## t distribution of Azzalini and Captiano. -## boot.method character vector specifying the resampling method. Choices are -## "random" for random sampling with replacement (non-parametric bootstrap); -## "block" for stationary block bootstrapping. -## seed integer random number seed. -## return.factors logical; if TRUE then return resampled factors -## return.residuals logical; if TRUE then return simulated residuals -## -## output: A list with the following components: -## returns n.boot x n.funds matrix of simulated fund returns -## factors n.boot x n.factors matrix of resampled factor returns. Returned -## only if return.factors = TRUE. -## residuals n.boot x n.funds matrix of simulated fund residuals. Returned only -## if return.residuals = TRUE. - require(tseries) # for function tsbootstrap() - require(sn) # for function rst() - boot.method = boot.method[1] - residual.dist = residual.dist[1] - set.seed(seed) - if (nrow(Beta.mat) != nrow(residualData)) { - stop("Beta.mat and residualData have different number of rows") - } - factorData = as.matrix(factorData) - n.funds = nrow(Beta.mat) - fund.names = rownames(Beta.mat) - if (is.null(Alpha.mat)) { - Alpha.mat = matrix(0, nrow(Beta.mat)) - rownames(Alpha.mat) = fund.names - } -## -## reseample from empirical distribution of factors -## - if (boot.method == "random") { - bootIdx = sample(nrow(factorData), n.boot, replace=TRUE) - } else { - n.samples = round(n.boot/nrow(factorData)) - n.adj = n.boot - n.samples*nrow(factorData) - bootIdx = as.vector(tsbootstrap(1:nrow(factorData), nb=n.samples)) - if (n.adj > 0) { -## need to make sure that length(bootIdx) = n.boot - bootIdx = c(bootIdx, bootIdx[1:n.adj]) - } - } - factorDataBoot = factorData[bootIdx, ] -## -## run factor model Monte Carlo loop over funds -## - fundReturnsBoot = matrix(0, n.boot, n.funds) - residualsSim = matrix(0, n.boot, n.funds) - colnames(fundReturnsBoot) = colnames(residualsSim) = fund.names - for (i in fund.names) { - ## set random number seed for fund specific residual simulations - set.seed(which(fund.names == i)) - ## simulate from residual distributions - if (residual.dist == "normal") { - residualsSim[, i] = rnorm(n.boot, sd=sqrt(residualData[i,])) - } else if (residual.dist == "Cornish-Fisher") { - ## residual distribution is CornishFisher - residualsSim[, i] = rCornishFisher(n.boot, - sigma=sqrt(residualData[i,"var"]), - skew=residualData[i,"skew"], - ekurt=residualData[i,"ekurt"]) - } else if (residual.dist == "skew-t") { - ## residual distribution is CornishFisher - residualsSim[, i] = rst(n.boot, - location=residualData[i, "location"], - scale=residualData[i,"scale"], - shape=residualData[i,"shape"], - df=residualData[i,"df"]) - } else { - stop("Invalid residual distribution") - } - ## simulated fund returns - fundReturnsBoot[, i] = Alpha.mat[i,1] + factorDataBoot[, colnames(Beta.mat)] %*% t(Beta.mat[i, ,drop=FALSE]) + residualsSim[, i] - } # end loop over funds - - ans = list(returns=fundReturnsBoot) - if (return.factors) { - ans$factors=factorDataBoot - } - if (return.residuals) { - ans$residuals=residualsSim - } - return(ans) -} - +#' Simulate returns using factor model Monte Carlo method. +#' +#' Simulate returns using factor model Monte Carlo method. Parametric method +#' like normal distribution, Cornish-Fisher and skew-t distribution for +#' residuals can be selected. Resampling method like non-parametric bootstrap +#' or stationary bootstrap can be selected. +#' +#' The factor model Monte Carlo method is described in Jiang (2009). +#' +#' @param n.boot Integer number of bootstrap samples. +#' @param factorData \code{n.months x n.funds} matrix or data.frame of factor +#' returns. +#' @param Beta.mat \code{n.funds x n.factors} matrix of factor betas. +#' @param Alpha.mat \code{n.funds x 1} matrix of factor alphas (intercepts). If +#' \code{NULL} then assume that all alphas are zero. +#' @param residualData \code{n.funds x n.parms} matrix of residual distribution [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 2351 From noreply at r-forge.r-project.org Tue Jun 18 14:29:27 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 14:29:27 +0200 (CEST) Subject: [Returnanalytics-commits] r2352 - in pkg/Meucci: . R man Message-ID: <20130618122928.0C76A1852A5@r-forge.r-project.org> Author: xavierv Date: 2013-06-18 14:29:27 +0200 (Tue, 18 Jun 2013) New Revision: 2352 Added: pkg/Meucci/R/LognormalParameters2Statistics.R pkg/Meucci/inst/ pkg/Meucci/man/LognormalMoments2Parameters.Rd pkg/Meucci/man/LognormalParam2Statistics.Rd Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE Log: - added more demos for Meucci's book chapter 2 and the functions required. Problems with 3d histogram axis representation Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-18 09:52:27 UTC (rev 2351) +++ pkg/Meucci/DESCRIPTION 2013-06-18 12:29:27 UTC (rev 2352) @@ -59,3 +59,5 @@ 'Prior2Posterior.R' 'RankingInformation.R' 'RobustBayesianAllocation.R' + 'LognormalMoments2Parameters.R' + 'LognormalParameters2Statistics.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-18 09:52:27 UTC (rev 2351) +++ pkg/Meucci/NAMESPACE 2013-06-18 12:29:27 UTC (rev 2352) @@ -11,6 +11,8 @@ export(hermitePolynomial) export(integrateSubIntervals) export(linreturn) +export(LognormalMoments2Parameters) +export(LognormalParam2Statistics) export(MvnRnd) export(NoisyObservations) export(normalizeProb) Added: pkg/Meucci/R/LognormalParameters2Statistics.R =================================================================== --- pkg/Meucci/R/LognormalParameters2Statistics.R (rev 0) +++ pkg/Meucci/R/LognormalParameters2Statistics.R 2013-06-18 12:29:27 UTC (rev 2352) @@ -0,0 +1,29 @@ +#' Compute expectation, Cov, standard deviation and Corr for a lognormal distribution, as described in +#' A. Meucci "Risk and Asset Allocation", Springer, 2005 +#' +#' @param Mu : [vector] (N x 1) location parameter +#' @param Sigma : [matrix] (N x N) scale parameter +#' +#' +#' @return Exp : [vector] (N x 1) expectation +#' @return Cov : [matrix] (N x N) covariance +#' @return Std : [vector] (N x 1) standard deviation +#' @return Corr : [matrix] (N x N) correlation +#' +#' @references +#' \url{http://} +#' See Meucci's script for "LognormalParam2Statistics.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +LognormalParam2Statistics = function(Mu, Sigma) +{ + + Exp = exp( Mu + (1/2) * diag( Sigma ) ); + Cov = exp( Mu + (1/2) * diag( Sigma ) ) %*% t( exp( Mu + (1/2) * diag( Sigma ) ) ) * ( exp( Sigma ) - 1 ); + Std = sqrt( diag( Cov ) ); + Corr = diag( 1 / Std ) %*% Cov %*% diag( 1 / Std ); + + return( list( Exp = Exp, Covariance = Cov, Standard_Deviation = Std, Correlation = Corr )); +} \ No newline at end of file Added: pkg/Meucci/man/LognormalMoments2Parameters.Rd =================================================================== --- pkg/Meucci/man/LognormalMoments2Parameters.Rd (rev 0) +++ pkg/Meucci/man/LognormalMoments2Parameters.Rd 2013-06-18 12:29:27 UTC (rev 2352) @@ -0,0 +1,37 @@ +\name{LognormalMoments2Parameters} +\alias{LognormalMoments2Parameters} +\title{Compute the mean and standard deviation of a lognormal distribution from its parameters, as described in +A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1.} +\usage{ + LognormalMoments2Parameters(e, v) +} +\arguments{ + \item{e}{: [scalar] expected value of the lognormal + distribution} + + \item{v}{: [scalar] variance of the lognormal + distribution} +} +\value{ + mu : [scalar] expected value of the normal distribution + + sig2 : [scalar] variance of the normal distribution +} +\description{ + Compute the mean and standard deviation of a lognormal + distribution from its parameters, as described in A. + Meucci, "Risk and Asset Allocation", Springer, 2005, + Chapter 1. +} +\note{ + Inverts the formulas (1.98)-(1.99) in Risk and Asset + Allocation", Springer, 2005. +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See Meucci's script for + "LognormalMoments2Parameters" +} + Added: pkg/Meucci/man/LognormalParam2Statistics.Rd =================================================================== --- pkg/Meucci/man/LognormalParam2Statistics.Rd (rev 0) +++ pkg/Meucci/man/LognormalParam2Statistics.Rd 2013-06-18 12:29:27 UTC (rev 2352) @@ -0,0 +1,34 @@ +\name{LognormalParam2Statistics} +\alias{LognormalParam2Statistics} +\title{Compute expectation, Cov, standard deviation and Corr for a lognormal distribution, as described in +A. Meucci "Risk and Asset Allocation", Springer, 2005} +\usage{ + LognormalParam2Statistics(Mu, Sigma) +} +\arguments{ + \item{Mu}{: [vector] (N x 1) location parameter} + + \item{Sigma}{: [matrix] (N x N) scale parameter} +} +\value{ + Exp : [vector] (N x 1) expectation + + Cov : [matrix] (N x N) covariance + + Std : [vector] (N x 1) standard deviation + + Corr : [matrix] (N x N) correlation +} +\description{ + Compute expectation, Cov, standard deviation and Corr for + a lognormal distribution, as described in A. Meucci "Risk + and Asset Allocation", Springer, 2005 +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See Meucci's script for + "LognormalParam2Statistics.m" +} + From noreply at r-forge.r-project.org Tue Jun 18 15:13:14 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 15:13:14 +0200 (CEST) Subject: [Returnanalytics-commits] r2353 - pkg/Meucci Message-ID: <20130618131315.2D3111851DA@r-forge.r-project.org> Author: xavierv Date: 2013-06-18 15:13:14 +0200 (Tue, 18 Jun 2013) New Revision: 2353 Modified: pkg/Meucci/DESCRIPTION Log: -updated DESCRIPTION file with a suggested package Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-18 12:29:27 UTC (rev 2352) +++ pkg/Meucci/DESCRIPTION 2013-06-18 13:13:14 UTC (rev 2353) @@ -42,7 +42,8 @@ nloptr, pracma, ggplot2, - expm + expm, + latticeExtra License: GPL URL: http://r-forge.r-project.org/projects/returnanalytics/ Copyright: (c) 2012 From noreply at r-forge.r-project.org Tue Jun 18 19:51:26 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 19:51:26 +0200 (CEST) Subject: [Returnanalytics-commits] r2354 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130618175126.DF5CC18550D@r-forge.r-project.org> Author: pulkit Date: 2013-06-18 19:51:26 +0200 (Tue, 18 Jun 2013) New Revision: 2354 Added: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R Log: Porting the Optimization code Added: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-18 17:51:26 UTC (rev 2354) @@ -0,0 +1,98 @@ +PsrPortfolio<-function(R,bounds=NULL,MaxIter = 1000,delta = 0.05){ + + x = checkData(R) + columns = ncol(x) + n = nrow(x) + columnnames = colnames(x) + + weights = rep(1,columns)/columns + + if(is.null(bounds)){ + bounds = matrix(rep(c(0,1),columns),nrow = columns,byrow = TRUE) + } + + + + + optimize<-function(){ + + mean = NULL + for(column in 1:columns){ + mean = c(mean,get_Moments(x[,column],1)) + } + while(TRUE){ + if(iter == MaxIter) break + c(d1z,z) = get_d1zs(mean,weights) + + } + + + checkBounds<-function(w){ + } + + stepSize<-function(w,d1Z){ + } + + get_d1Zs(mean,w){ + d1Z = rep(0,columns) + m = NULL + x = Return.portfolio(x,weights) + m[1] = get_Moments(x,1) + for(i in 1:4){ + m = c(m,get_Moments(x,i+1,m[0])) + } + stats = get_Stats(m) + c(meanSR,sigmaSR) = get_SR(stats,n) + for(i in 1:columns){ + d1Z[i] = get_d1Z(stats,m,meanSR,sigmaSR,mean,weights,index) + } + } + + get_d1Z<-function(stats,m,meanSR,sigmaSR,mean,w,index){ + d1Mu = get_d1Mu(mean,index) + + + + } + + get_d1Mu<-function(mean,index){ + } + + get_d1Sigma<-function(sigma,mean,w,index){ + } + + get_d1Skew<-function(d1Sigma,sigma,mean,w,index,m3){ + } + + get_d1Kurt<-function(d1Sigma,sigma,mean,w,index,m4){ + } + + get_dnMoments<-function(mean,w,mOrder,dOrder,index){ + } + + get_SR<-function(stats,n){ + meanSR = stats[0]/stats[1] + sigmaSR = ((1-meanSR*stats[2]+(meanSR^2)*(stats[3]-1)/4)/(n-1))^0.5 + return(meanSR,sigmaSR) + } + + get_Stats<-function(m){ + stats = c(m[0],m[1]^(0.5),(m[2]/m[1])^(3/2),(m[3]/m[1])^(0.5)) + return(stats) + } + + get_Moments<-function(x,order,mean = 0){ + + sum = 0 + for(i in 1:n){ + sum = sum + (x[i]-mean)^order + } + moment = sum/n + return(moment) + } + +} + + + + From noreply at r-forge.r-project.org Tue Jun 18 20:18:01 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 20:18:01 +0200 (CEST) Subject: [Returnanalytics-commits] r2355 - in pkg/FactorAnalytics: . R Message-ID: <20130618181802.062A118431D@r-forge.r-project.org> Author: chenyian Date: 2013-06-18 20:18:01 +0200 (Tue, 18 Jun 2013) New Revision: 2355 Modified: pkg/FactorAnalytics/NAMESPACE pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R Log: Modified: pkg/FactorAnalytics/NAMESPACE =================================================================== --- pkg/FactorAnalytics/NAMESPACE 2013-06-18 17:51:26 UTC (rev 2354) +++ pkg/FactorAnalytics/NAMESPACE 2013-06-18 18:18:01 UTC (rev 2355) @@ -1,7 +0,0 @@ -exportPattern("^[^\\.]") -S3method(print, MacroFactorModel) -S3method(print, StatFactorModel) -S3method(plot, MacroFactorModel) -S3method(plot, StatFactorModel) -S3method(plot, FundamentalFactorModel) -S3method(summary, MacroFactorModel) \ No newline at end of file Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-18 17:51:26 UTC (rev 2354) +++ pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-18 18:18:01 UTC (rev 2355) @@ -1,373 +1,344 @@ -#' Fit macroeconomic factor model by time series regression techniques. -#' -#' Fit macroeconomic factor model by time series regression techniques. It -#' creates the class of "MacroFactorModel". -#' -#' If \code{Robust} is chosen, there is no subsets but all factors will be -#' used. Cp is defined in -#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. p17. -#' -#' @param ret.assets N x T Numerical returns data, univariate or multivariate, -#' where N is the number of assets return and T is the length of time period. -#' data has to be saved as class "data.frame" so that lm function can be used -#' and must have column names. -#' @param factors K x T Numerical factors data, where K is the number of -#' factors and T is the length of the time period. Data has to be saved as -#' class "data.frame" so that lm function can be used and must have column -#' names. -#' @param factor.set scalar, number of factors -#' @param fit.method "OLS" is ordinary least squares method, "DLS" is -#' discounted least squares method. Discounted least squares (DLS) estimation -#' is weighted least squares estimation with exponentially declining weights -#' that sum to unity. "Robust" -#' @param variable.selection "stepwise" is traditional forward/backward -#' stepwise OLS regression, starting from the initial set of factors, that adds -#' factors only if the regression fit as measured by the Bayesian Information -#' Criteria (BIC) or Akaike Information Criteria (AIC) can be done using the R -#' function step() from the stats package. If \code{Robust} is chosen, the -#' function step.lmRob in Robust package will be used. "all subsets" is -#' Traditional all subsets regression can be done using the R function -#' regsubsets() from the package leaps. "lar" , "lasso" is based on package -#' "lars", linear angle regression. -#' @param decay.factor for DLS. Default is 0.95. -#' @param nvmax control option for all subsets. maximum size of subsets to -#' examine -#' @param force.in control option for all subsets. The factors that should be -#' in all models. -#' @param subsets.method control option for all subsets. se exhaustive search, -#' forward selection, backward selection or sequential replacement to search. -#' @param lars.criteria either choose minimum "Cp": unbiased estimator of the -#' true rist or "cv" 10 folds cross-validation. See detail. -#' @return an S3 object containing -#' @returnItem asset.fit Fit objects for each asset. This is the class "lm" for -#' each object. -#' @returnItem alpha.vec N x 1 Vector of estimated alphas. -#' @returnItem beta.mat N x K Matrix of estimated betas -#' @returnItem r2.vec N x 1 Vector of R-square values. -#' @returnItem residVars.vec N x 1 Vector of residual variances. -#' @returnItem call function call. -#' @returnItem ret.assets Assets returns of input data. -#' @returnItem factors Factors of input data. -#' @returnItem variable.selection variables selected by the user. -#' @author Eric Zivot and Yi-An Chen. -#' @references 1. Efron, Hastie, Johnstone and Tibshirani (2002) "Least Angle -#' Regression" (with discussion) Annals of Statistics; see also -#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. 2. -#' Hastie, Tibshirani and Friedman (2008) Elements of Statistical Learning 2nd -#' edition, Springer, NY. -#' @examples -#' -#' # load data from the database -#' data(managers.df) -#' ret.assets = managers.df[,(1:6)] -#' factors = managers.df[,(7:9)] -#' # fit the factor model with OLS -#' fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", -#' variable.selection="all subsets") -#' # summary of HAM1 -#' summary(fit$asset.fit$HAM1) -#' # plot actual vs. fitted over time for HAM1 -#' # use chart.TimeSeries() function from PerformanceAnalytics package -#' dataToPlot = cbind(fitted(fit$asset.fit$HAM1), na.omit(managers.df$HAM1)) -#' colnames(dataToPlot) = c("Fitted","Actual") -#' chart.TimeSeries(dataToPlot, main="FM fit for HAM1", -#' colorset=c("black","blue"), legend.loc="bottomleft") -#' -fitMacroeconomicFactorModel <- -function(ret.assets, factors, factor.set = 2, - fit.method=c("OLS","DLS","Robust"), - variable.selection=c("stepwise", "all subsets", "lar", "lasso"), - decay.factor = 0.95,nvmax=8,force.in=NULL, - subsets.method = c("exhaustive", "backward", "forward", "seqrep"), - lars.criteria = c("Cp","cv") - ) { -## Inputs: -# ret.assets numerical returns data, univariate or multivariate. data has to be saved as class "data.frame" -# so that lm function can be used and must have colnames. -# factors numerical factors data. data has to be saved as class "data.frame" -# so that lm function can be used and must have colnames. -# factor.set numerical, the numbers of the factors to be included in the model when all subsets -# is chosen. -# fit.method chose between "OLS" ordinanary least sqaures, "DLS" discounted least squares using exponential -# declining weights, and "Robust" coerced by function lmob in package "robustbase" -# variable.selection "all" includes all the explanary variables in the model. -# "stepwise" is traditional forward/backward stepwise regression, starting from the initial -# set of factors, that adds factors only if the regression fit as measured by the Bayesian -# Information Criteria (BIC) or Akaike Information Criteria (AIC) can be done using the R -# function step() from the stats package. "all subsets" is Traditional all subsets regression -# can be done using the R function regsubsets() from the package leaps. -# only \code(OLS) or \code(DLS) can choose this option. -# "lar" , "lasso" is based on package "lars", linear angle regression. -# decay.factor for DLS. Default is 0.95. -# nvmax control option for all subsets. maximum size of subsets to examine -# force.in control option for all subsets. The factors that should be in all models. -# subsets.method control option for all subsets. se exhaustive search, forward selection, backward selection -# or sequential replacement to search. -# lars.criteria either choose minimum "Cp" which is unbiased estimator of the true rist or "cv" -# 10 folds cross-validation. See detail. -## Outputs: -## output: list with following components: -## asset.fit fit objects for each asset. This is the class "lm" for each assets. -## alpha.vec vector of estimated alphas. -## beta.mat matrix of estimated betas. -## r2.vec vector of R-square values -## residVars.vec vector of residual variances -## call function call. - require(leaps) - require(lars) - require(robust) - require(MASS) - this.call <- match.call() - -if (is.data.frame(ret.assets) & is.data.frame(factors) ) { - manager.names = colnames(ret.assets) - factor.names = colnames(factors) - managers.df = cbind(ret.assets,factors) -} else { - stop("ret.assets and beta.mat must be in class data.frame") -} - - - - -# initialize list object to hold regression objects -reg.list = list() - - -# initialize matrices and vectors to hold estimated betas, -# residual variances, and R-square values from -# fitted factor models - -Alphas = ResidVars = R2values = rep(0, length(manager.names)) -names(Alphas) = names(ResidVars) = names(R2values) = manager.names -Betas = matrix(0, length(manager.names), length(factor.names)) -colnames(Betas) = factor.names -rownames(Betas) = manager.names - - - - - - -if (variable.selection == "all subsets") { -# estimate multiple factor model using loop b/c of unequal histories for the hedge funds - - - -if (fit.method == "OLS") { - -if (factor.set == length(force.in)) { - for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, force.in)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = lm(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else if (factor.set > length(force.in)) { - -for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, factor.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, - method=subsets.method) - sum.sub <- summary(fm.subsets) - reg.df <- na.omit(managers.df[,c(i,names(which(sum.sub$which[as.character(factor.set),-1]==TRUE)) )]) - fm.fit = lm(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else { - stop("ERROR! number of force.in should less or equal to factor.set") -} - - - - -} else if (fit.method == "DLS"){ - - - if (factor.set == length(force.in)) { - # define weight matrix -for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, force.in)]) - t.length <- nrow(reg.df) - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } -# sum weigth to unitary - w <- w/sum(w) - fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.fit = lm(fm.formula, data=reg.df,weight=w) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else if (factor.set > length(force.in)) { - for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, factor.names)]) - t.length <- nrow(reg.df) - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } - w <- w/sum(w) - fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, - method=subsets.method,weights=w) # w is called from global envio - sum.sub <- summary(fm.subsets) - reg.df <- na.omit(managers.df[,c(i,names(which(sum.sub$which[as.character(factor.set),-1]==TRUE)) )]) - fm.fit = lm(fm.formula, data=reg.df,weight=w) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else { - stop("ERROR! number of force.in should less or equal to factor.set") -} - - -} else if (fit.method=="Robust") { - for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, factor.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = lmRob(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas[i, ] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - -} else { - stop("invalid method") -} - - -} else if (variable.selection == "stepwise") { - - - if (fit.method == "OLS") { -# loop over all assets and estimate time series regression -for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, factor.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = step(lm(fm.formula, data=reg.df),trace=0) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - - -} else if (fit.method == "DLS"){ - # define weight matrix -for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, factor.names)]) - t.length <- nrow(reg.df) - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } -# sum weigth to unitary - w <- w/sum(w) - fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.fit = step(lm(fm.formula, data=reg.df,weight=w),trace=0) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - -} else if (fit.method=="Robust") { - for (i in manager.names) { - assign("reg.df" , na.omit(managers.df[, c(i, factor.names)]),envir = .GlobalEnv ) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - lmRob.obj <- lmRob(fm.formula, data=reg.df) - fm.fit = step.lmRob(lmRob.obj,trace=FALSE) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - -} - -} else if (variable.selection == "lar" || variable.selection == "lasso") { - # use min Cp as criteria to choose predictors - - for (i in manager.names) { - reg.df = na.omit(managers.df[, c(i, factor.names)]) - reg.df = as.matrix(reg.df) - lars.fit = lars(reg.df[,factor.names],reg.df[,i],type=variable.selection,trace=FALSE) - sum.lars <- summary(lars.fit) - if (lars.criteria == "Cp") { - s<- which.min(sum.lars$Cp) - } else { - lars.cv <- cv.lars(reg.df[,factor.names],reg.df[,i],trace=FALSE, - type=variable.selection,mode="step",plot.it=FALSE) - s<- which.min(lars.cv$cv) - } - coef.lars <- predict(lars.fit,s=s,type="coef",mode="step") - reg.list[[i]] = lars.fit - fitted <- predict(lars.fit,reg.df[,factor.names],s=s,type="fit",mode="step") - Alphas[i] = (fitted$fit - reg.df[,factor.names]%*%coef.lars$coefficients)[1] - Betas.names = names(coef.lars$coefficients) - Betas[i,Betas.names] = coef.lars$coefficients - ResidVars[i] = sum.lars$Rss[s]/(nrow(reg.df)-s) - R2values[i] = lars.fit$R2[s] - } - - } else { - stop("wrong method") -} - - - - - - # return results -# add option to return list -ans = list (asset.fit = reg.list, - alpha.vec = Alphas, - beta.mat = Betas, - r2.vec = R2values, - residVars.vec = ResidVars, - call = this.call, - ret.assets = ret.assets, - factors = factors, - variable.selection = variable.selection - ) -class(ans) = "MacroFactorModel" -return(ans) -} - +#' Fit macroeconomic factor model by time series regression techniques. +#' +#' Fit macroeconomic factor model by time series regression techniques. It +#' creates the class of "MacroFactorModel". +#' +#' If \code{Robust} is chosen, there is no subsets but all factors will be +#' used. Cp is defined in +#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. p17. +#' +#' @param assets.names names of assets returns. +#' @param factors.names names of factors returns. +#' @param factor.set scalar, number of factors +#' @param data a vector, matrix, data.frame, xts, timeSeries or zoo object with asset returns +#' and factors retunrs rownames +#' @param fit.method "OLS" is ordinary least squares method, "DLS" is +#' discounted least squares method. Discounted least squares (DLS) estimation +#' is weighted least squares estimation with exponentially declining weights +#' that sum to unity. "Robust" +#' @param variable.selection "stepwise" is traditional forward/backward +#' stepwise OLS regression, starting from the initial set of factors, that adds +#' factors only if the regression fit as measured by the Bayesian Information +#' Criteria (BIC) or Akaike Information Criteria (AIC) can be done using the R +#' function step() from the stats package. If \code{Robust} is chosen, the +#' function step.lmRob in Robust package will be used. "all subsets" is +#' Traditional all subsets regression can be done using the R function +#' regsubsets() from the package leaps. "lar" , "lasso" is based on package +#' "lars", linear angle regression. +#' @param decay.factor for DLS. Default is 0.95. +#' @param nvmax control option for all subsets. maximum size of subsets to +#' examine +#' @param force.in control option for all subsets. The factors that should be +#' in all models. +#' @param subsets.method control option for all subsets. se exhaustive search, +#' forward selection, backward selection or sequential replacement to search. +#' @param lars.criteria either choose minimum "Cp": unbiased estimator of the +#' true rist or "cv" 10 folds cross-validation. See detail. +#' @return an S3 object containing +#' \item{asset.fit}{Fit objects for each asset. This is the class "lm" for +#' each object.} +#' \item{alpha.vec}{N x 1 Vector of estimated alphas.} +#' \item{beta.mat}{N x K Matrix of estimated betas.} +#' \item{r2.vec}{N x 1 Vector of R-square values.} +#' \item{residVars.vec}{N x 1 Vector of residual variances.} +#' \item{call}{function call.} +#' \item{ret.assets}{Assets returns of input data.} +#' \item{factors Factors of input data.} +#' \item{variable.selection variables selected by the user.} +#' @author Eric Zivot and Yi-An Chen. +#' @references 1. Efron, Hastie, Johnstone and Tibshirani (2002) "Least Angle +#' Regression" (with discussion) Annals of Statistics; see also +#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. 2. +#' Hastie, Tibshirani and Friedman (2008) Elements of Statistical Learning 2nd +#' edition, Springer, NY. +#' @examples +#' \dontrun{ +#' # load data from the database +#' data(managers.df) +#' ret.assets = managers.df[,(1:6)] +#' factors = managers.df[,(7:9)] +#' # fit the factor model with OLS +#' fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", +#' variable.selection="all subsets") +#' # summary of HAM1 +#' summary(fit$asset.fit$HAM1) +#' # plot actual vs. fitted over time for HAM1 +#' # use chart.TimeSeries() function from PerformanceAnalytics package +#' dataToPlot = cbind(fitted(fit$asset.fit$HAM1), na.omit(managers.df$HAM1)) +#' colnames(dataToPlot) = c("Fitted","Actual") +#' chart.TimeSeries(dataToPlot, main="FM fit for HAM1", +#' colorset=c("black","blue"), legend.loc="bottomleft") +#' } +fitMacroeconomicFactorModel <- +function(assets.names, factors.names, data=data, factor.set = 3, + fit.method=c("OLS","DLS","Robust"), + variable.selection=c("stepwise", "all subsets", "lar", "lasso"), + decay.factor = 0.95,nvmax=8,force.in=NULL, + subsets.method = c("exhaustive", "backward", "forward", "seqrep"), + lars.criteria = c("Cp","cv")) { + + require(PerformanceAnalytics) + require(leaps) + require(lars) + require(robust) + require(MASS) + this.call <- match.call() + + # convert data into xts and hereafter compute in xts + data.xts <- checkData(data) + #assets.names <- colnames(data.xts)[1:6] # erase later + #factors.names <- colnames(data.xts)[7:9] + reg.xts <- merge(data.xts[,assets.names],data.xts[,factors.names]) + +# if (is.data.frame(ret.assets) & is.data.frame(factors) ) { +# assets.names = colnames(ret.assets) +# factors.names = colnames(factors) +# reg.xts = cbind(ret.assets,factors) +# } else { +# stop("ret.assets and beta.mat must be in class data.frame") +# } + + + + +# initialize list object to hold regression objects +reg.list = list() + + +# initialize matrices and vectors to hold estimated betas, +# residual variances, and R-square values from +# fitted factor models + +Alphas = ResidVars = R2values = rep(0, length(assets.names)) +names(Alphas) = names(ResidVars) = names(R2values) = assets.names +Betas = matrix(0, length(assets.names), length(factors.names)) +colnames(Betas) = factors.names +rownames(Betas) = assets.names + + + + + + +if (variable.selection == "all subsets") { +# estimate multiple factor model using loop b/c of unequal histories for the hedge funds + + + +if (fit.method == "OLS") { + +if (factor.set == length(force.in)) { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, force.in)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else if (factor.set > length(force.in)) { + +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, + method=subsets.method) + sum.sub <- summary(fm.subsets) + reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(factor.set),-1]==TRUE)) )]) + fm.fit = lm(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else { + stop("ERROR! number of force.in should less or equal to factor.set") +} + + + + +} else if (fit.method == "DLS"){ + + + if (factor.set == length(force.in)) { + # define weight matrix +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, force.in)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } +# sum weigth to unitary + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.fit = lm(fm.formula, data=reg.df,weight=w) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else if (factor.set > length(force.in)) { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, + method=subsets.method,weights=w) # w is called from global envio + sum.sub <- summary(fm.subsets) + reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(factor.set),-1]==TRUE)) )]) + fm.fit = lm(fm.formula, data=reg.df,weight=w) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else { + stop("ERROR! number of force.in should less or equal to factor.set") +} + + +} else if (fit.method=="Robust") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lmRob(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas[i, ] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + +} else { + stop("invalid method") +} + + +} else if (variable.selection == "stepwise") { + + + if (fit.method == "OLS") { +# loop over all assets and estimate time series regression +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = step(lm(fm.formula, data=reg.df),trace=0) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + + +} else if (fit.method == "DLS"){ + # define weight matrix +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } +# sum weigth to unitary + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.fit = step(lm(fm.formula, data=reg.df,weight=w),trace=0) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + +} else if (fit.method=="Robust") { + for (i in assets.names) { + assign("reg.df" , na.omit(reg.xts[, c(i, factors.names)]),envir = .GlobalEnv ) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + lmRob.obj <- lmRob(fm.formula, data=reg.df) + fm.fit = step.lmRob(lmRob.obj,trace=FALSE) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + +} + +} else if (variable.selection == "lar" || variable.selection == "lasso") { + # use min Cp as criteria to choose predictors + + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + reg.df = as.matrix(reg.df) + lars.fit = lars(reg.df[,factors.names],reg.df[,i],type=variable.selection,trace=FALSE) + sum.lars <- summary(lars.fit) + if (lars.criteria == "Cp") { + s<- which.min(sum.lars$Cp) + } else { + lars.cv <- cv.lars(reg.df[,factors.names],reg.df[,i],trace=FALSE, + type=variable.selection,mode="step",plot.it=FALSE) + s<- which.min(lars.cv$cv) + } + coef.lars <- predict(lars.fit,s=s,type="coef",mode="step") + reg.list[[i]] = lars.fit + fitted <- predict(lars.fit,reg.df[,factors.names],s=s,type="fit",mode="step") + Alphas[i] = (fitted$fit - reg.df[,factors.names]%*%coef.lars$coefficients)[1] + Betas.names = names(coef.lars$coefficients) + Betas[i,Betas.names] = coef.lars$coefficients + ResidVars[i] = sum.lars$Rss[s]/(nrow(reg.df)-s) + R2values[i] = lars.fit$R2[s] + } + + } else { + stop("wrong method") +} + + + + + + # return results +# add option to return list +ans = list (asset.fit = reg.list, + alpha.vec = Alphas, + beta.mat = Betas, + r2.vec = R2values, + residVars.vec = ResidVars, + call = this.call, + ret.assets = ret.assets, + factors = factors, + variable.selection = variable.selection + ) +class(ans) = "MacroFactorModel" +return(ans) +} + From noreply at r-forge.r-project.org Tue Jun 18 20:52:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 20:52:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2356 - in pkg/FactorAnalytics: R man Message-ID: <20130618185255.C1B75185488@r-forge.r-project.org> Author: braverock Date: 2013-06-18 20:52:55 +0200 (Tue, 18 Jun 2013) New Revision: 2356 Added: pkg/FactorAnalytics/R/FactorAnalytics-package.R Modified: pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd Log: - roxygenize fitMacroeconomicFactorModel - add roxygen comment file for data sets, etc. Added: pkg/FactorAnalytics/R/FactorAnalytics-package.R =================================================================== --- pkg/FactorAnalytics/R/FactorAnalytics-package.R (rev 0) +++ pkg/FactorAnalytics/R/FactorAnalytics-package.R 2013-06-18 18:52:55 UTC (rev 2356) @@ -0,0 +1,132 @@ + + +#' Functions for Cornish-Fisher density, CDF, random number simulation and +#' quantile. +#' +#' \code{dCornishFisher} Computes Cornish-Fisher density from two term +#' Edgeworth expansion given mean, standard deviation, skewness and excess +#' kurtosis. \code{pCornishFisher} Computes Cornish-Fisher CDF from two term +#' Edgeworth expansion given mean, standard deviation, skewness and excess +#' kurtosis. \code{qCornishFisher} Computes Cornish-Fisher quantiles from two +#' term Edgeworth expansion given mean, standard deviation, skewness and excess +#' kurtosis. \code{rCornishFisher} simulate observations based on +#' Cornish-Fisher quantile expansion given mean, standard deviation, skewness +#' and excess kurtosis. +#' +#' CDF(q) = Pr(sqrt(n)*(x_bar-mu)/sigma < q) +#' +#' @aliases rCornishFisher dCornishFisher pCornishFisher qCornishFisher +#' @param n scalar, number of simulated values in rCornishFisher. Sample length +#' in density,distribution,quantile function. +#' @param sigma scalar, standard deviation. +#' @param skew scalar, skewness. +#' @param ekurt scalar, excess kurtosis. +#' @param seed set seed here. Default is \code{NULL}. +#' @param x,q vector of standardized quantiles. See detail. +#' @param p vector of probabilities. +#' @return n simulated values from Cornish-Fisher distribution. +#' @author Eric Zivot and Yi-An Chen. +#' @references A.DasGupta, "Asymptotic Theory of Statistics and Probability", +#' Springer Science+Business Media,LLC 2008 Thomas A.Severini, "Likelihood +#' Methods in Statistics", Oxford University Press, 2000 +#' @examples +#' +#' # generate 1000 observation from Cornish-Fisher distribution +#' rc <- rCornishFisher(1000,1,0,5) +#' hist(rc,breaks=100,freq=FALSE,main="simulation of Cornish Fisher Distribution", +#' xlim=c(-10,10)) +#' lines(seq(-10,10,0.1),dnorm(seq(-10,10,0.1),mean=0,sd=1),col=2) +#' # compare with standard normal curve +#' +#' # example from A.dasGupta p.188 exponential example +#' # x is iid exp(1) distribution, sample size = 5 +#' # then x_bar is Gamma(shape=5,scale=1/5) distribution +#' q <- c(0,0.4,1,2) +#' # exact cdf +#' pgamma(q/sqrt(5)+1,shape=5,scale=1/5) +#' # use CLT +#' pnorm(q) +#' # use edgeworth expansion +#' pCornishFisher(q,n=5,skew=2,ekurt=6) +#' +#' @name CornishFisher +NULL + + + + + +#' Hypothetical Alternative Asset Manager and Benchmark Data +#' +#' a data.frame format from managers dataset from package PerformanceAnalytics, +#' containing columns of monthly returns for six hypothetical asset managers +#' (HAM1 through HAM6), the EDHEC Long-Short Equity hedge fund index, the S\&P +#' 500 total returns. Monthly returns for all series end in December 2006 and +#' begin at different periods starting from January 1997. +#' +#' +#' @name managers.df +#' @docType data +#' @keywords datasets +#' @examples +#' +#' data(managers.df) +#' ## maybe str(managers.df) ; plot(managers.df) ... +#' +NULL + + + + + +#' Monthly Stock Return Data || Portfolio of Weekly Stock Returns +#' +#' sfm.dat: This is a monthly "data.frame" object from January 1978 to December +#' 1987, with seventeen columns representing monthly returns of certain assets, +#' as in Chapter 2 of Berndt (1991). sfm.apca.dat: This is a weekly +#' "data.frame" object with dimension 182 x 1618, which runs from January 8, +#' 1997 to June 28, 2000 and represents the stock returns on 1618 U.S. stocks. +#' +#' CITCRP monthly returns of Citicorp. CONED monthly returns of Consolidated +#' Edison. CONTIL monthly returns of Continental Illinois. DATGEN monthly +#' returns of Data General. DEC monthly returns of Digital Equipment Company. +#' DELTA monthly returns of Delta Airlines. GENMIL monthly returns of General +#' Mills. GERBER monthly returns of Gerber. IBM monthly returns of +#' International Business Machines. MARKET a value-weighted composite monthly +#' returns based on transactions from the New York Stock Exchange and the +#' American Exchange. MOBIL monthly returns of Mobile. PANAM monthly returns +#' of Pan American Airways. PSNH monthly returns of Public Service of New +#' Hampshire. TANDY monthly returns of Tandy. TEXACO monthly returns of +#' Texaco. WEYER monthly returns of Weyerhauser. RKFREE monthly returns on +#' 30-day U.S. Treasury bills. +#' +#' @name stat.fm.data +#' @aliases sfm.dat sfm.apca.dat +#' @docType data +#' @references Berndt, E. R. (1991). The Practice of Econometrics: Classic and +#' Contemporary. Addison-Wesley Publishing Co. +#' @source S+FinMetrics Berndt.dat & folio.dat +#' @keywords datasets +NULL + + + + + +#' constructed NYSE 447 assets from 1996-01-01 through 2003-12-31. +#' +#' constructed NYSE 447 assets from 1996-01-01 through 2003-12-31. +#' +#' Continuous data: PRICE, RETURN, VOLUME, SHARES.OUT, MARKET.EQUITY,LTDEBT, +#' NET.SALES, COMMON.EQUITY, NET.INCOME, STOCKHOLDERS.EQUITY, LOG.MARKETCAP, +#' LOG.PRICE, BOOK2MARKET Categorical data: GICS, GICS.INDUSTRY, GICS.SECTOR +#' Identi cation data: DATE, PERMNO, TICKER.x +#' +#' @name stock +#' @docType data +#' @references Guy Yullen and Yi-An Chen +#' @keywords datasets +NULL + + + Modified: pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd 2013-06-18 18:18:01 UTC (rev 2355) +++ pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd 2013-06-18 18:52:55 UTC (rev 2356) @@ -2,27 +2,25 @@ \alias{fitMacroeconomicFactorModel} \title{Fit macroeconomic factor model by time series regression techniques.} \usage{ - fitMacroeconomicFactorModel(ret.assets, factors, - factor.set = 2, fit.method = c("OLS", "DLS", "Robust"), + fitMacroeconomicFactorModel(assets.names, factors.names, + data = data, factor.set = 3, + fit.method = c("OLS", "DLS", "Robust"), variable.selection = c("stepwise", "all subsets", "lar", "lasso"), decay.factor = 0.95, nvmax = 8, force.in = NULL, subsets.method = c("exhaustive", "backward", "forward", "seqrep"), lars.criteria = c("Cp", "cv")) } \arguments{ - \item{ret.assets}{N x T Numerical returns data, - univariate or multivariate, where N is the number of - assets return and T is the length of time period. data - has to be saved as class "data.frame" so that lm function - can be used and must have column names.} + \item{assets.names}{names of assets returns.} - \item{factors}{K x T Numerical factors data, where K is - the number of factors and T is the length of the time - period. Data has to be saved as class "data.frame" so - that lm function can be used and must have column names.} + \item{factors.names}{names of factors returns.} \item{factor.set}{scalar, number of factors} + \item{data}{a vector, matrix, data.frame, xts, timeSeries + or zoo object with asset returns and factors retunrs + rownames} + \item{fit.method}{"OLS" is ordinary least squares method, "DLS" is discounted least squares method. Discounted least squares (DLS) estimation is weighted least squares @@ -59,7 +57,16 @@ cross-validation. See detail.} } \value{ - an S3 object containing + an S3 object containing \item{asset.fit}{Fit objects for + each asset. This is the class "lm" for each object.} + \item{alpha.vec}{N x 1 Vector of estimated alphas.} + \item{beta.mat}{N x K Matrix of estimated betas.} + \item{r2.vec}{N x 1 Vector of R-square values.} + \item{residVars.vec}{N x 1 Vector of residual variances.} + \item{call}{function call.} \item{ret.assets}{Assets + returns of input data.} \item{factors Factors of input + data.} \item{variable.selection variables selected by the + user.} } \description{ Fit macroeconomic factor model by time series regression @@ -72,6 +79,7 @@ p17. } \examples{ +\dontrun{ # load data from the database data(managers.df) ret.assets = managers.df[,(1:6)] @@ -87,6 +95,7 @@ colnames(dataToPlot) = c("Fitted","Actual") chart.TimeSeries(dataToPlot, main="FM fit for HAM1", colorset=c("black","blue"), legend.loc="bottomleft") + } } \author{ Eric Zivot and Yi-An Chen. From noreply at r-forge.r-project.org Tue Jun 18 22:45:00 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 18 Jun 2013 22:45:00 +0200 (CEST) Subject: [Returnanalytics-commits] r2357 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130618204501.0A052184E2D@r-forge.r-project.org> Author: pulkit Date: 2013-06-18 22:45:00 +0200 (Tue, 18 Jun 2013) New Revision: 2357 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R Log: Documentation and some bug fixes Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-18 18:52:55 UTC (rev 2356) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-18 20:45:00 UTC (rev 2357) @@ -1,3 +1,11 @@ +#'@title Implementation of PSR Portfolio Optimization +#'@description +#'Maximizing for PSR leads to better diversified and more balanced hedge fund allocations compared to the concentrated outcomes of Sharpe ratio maximization.We would like to find the vector of weights that maximize the expression.Gradient Ascent Logic is used to compute the weights using the Function PsrPortfolio +#'@param R The return series +#'@param bounds The bounds for the weights +#'@param MaxIter The Maximum number of iterations +#'@param delta The value of delta Z + PsrPortfolio<-function(R,bounds=NULL,MaxIter = 1000,delta = 0.05){ x = checkData(R) @@ -5,34 +13,54 @@ n = nrow(x) columnnames = colnames(x) - weights = rep(1,columns)/columns + weights = matrix((rep(1,columns)/columns),ncol = 1) if(is.null(bounds)){ bounds = matrix(rep(c(0,1),columns),nrow = columns,byrow = TRUE) } + d1z = NULL - - - + #Optimization Function optimize<-function(){ - - mean = NULL - for(column in 1:columns){ - mean = c(mean,get_Moments(x[,column],1)) + mean = NULL + for(column in 1:columns){ + mean = c(mean,get_Moments(x[,column],1)) + } + while(TRUE){ + if(iter == MaxIter) break + c(d1z_new,z_new) = get_d1zs(mean,weights) + if(z_new>z & checkBounds(weights)==TRUE){ + z = z_new + d1z = d1z_new + } + iter = iter + 1 + weights = stepSize(weights,d1z) + if(is.null(weights)) return + } + return } - while(TRUE){ - if(iter == MaxIter) break - c(d1z,z) = get_d1zs(mean,weights) - + # To Check the bounds of the weights + checkBounds<-function(weights){ + + flag = TRUE + for(i in 1:columns){ + if(weights[i,0]bounds[i,1]) flag = TRUE + } + return(flag) } + #Calculate the step size to change the weights + stepSize<-function(weights,d1Z){ + if(length(which(d1Z==0)) == 0){ + return(NULL) + } + weights[which(d1Z==max(d1Z)),0] = weights[which(d1Z==max(d1Z)),0]+delta/max(d1Z) + weights = weights/sum(weights) + return(weights) - checkBounds<-function(w){ } - - stepSize<-function(w,d1Z){ - } - + #To get the first differentials get_d1Zs(mean,w){ d1Z = rep(0,columns) m = NULL @@ -46,41 +74,73 @@ for(i in 1:columns){ d1Z[i] = get_d1Z(stats,m,meanSR,sigmaSR,mean,weights,index) } + return(d1Z,meanSR/sigmaSR) } - get_d1Z<-function(stats,m,meanSR,sigmaSR,mean,w,index){ + get_d1Z<-function(stats,m,meanSR,sigmaSR,mean,weights,index){ d1Mu = get_d1Mu(mean,index) - - - + d1Sigma = get_d1Sigma(stats[1],mean,weights,index) + d1Skew = get_d1Skew(d1Sigma,stats[1],mean,weights,index,m[2]) + d1Kurt = get_d1Kurt(d1Sigma,stats[1],mean,weights,index,m[3]) + d1meanSR = (d1Mu*stats[1]-d1Sigma*stats[0])/stats[1]^2 + d1sigmaSR = (d1Kurt * meanSR^2+2*meanSR*d1meanSR*(stats[3]-1))/4 + d1sigmaSR = d1sigmaSR - d1Skew*meanSR+d1meanSR*stats[2] + d1sigmaSR = (d1sigmaSR/2)*sigmaSR*(n-1) + d1Z = (d1meanSR*sigmaSR-d1sigmaSR*meanSR)/sigmaSR^2 + return(d1Z) } get_d1Mu<-function(mean,index){ + return(mean[index]) } - get_d1Sigma<-function(sigma,mean,w,index){ + get_d1Sigma<-function(sigma,mean,weights,index){ + return(get_dnMoments(mean,weights,2,1,index)/(2*sigma)) } get_d1Skew<-function(d1Sigma,sigma,mean,w,index,m3){ + d1Skew = get_dnMoments(mean,weights,3,1,index)*sigma^3 + d1Skew = d1Skew - 3*sigma^2*d1Sigma*m3 + d1Skew = d1Skew/sigma^6 + return(d1Skew) } get_d1Kurt<-function(d1Sigma,sigma,mean,w,index,m4){ + d1Kurt = get_dnMoments(mean,w,4,1,index)*sigma^4 + d1Kurt = d1Kurt - 4*sigma^3*d1Sigma*m4 + d1Kurt = d1Kurt/sigma^8 + return(d1Kurt) } get_dnMoments<-function(mean,w,mOrder,dOrder,index){ + sum = 0 + x0 = 1 + for(i in 1:dOrder){ + x0 = x0*(mOrder-i) + } + for(i in 1:n){ + x1 = 0 + x2 = (x[i,index]-mean[index])^dOrder + for(j in 1:column){ + x1 = x1 + weights[j,0]*(x[i,j]-mean[j]) + } + sum = sum + x2*x1^(mOrder-dOrder) + } + return(x0*sum/n) } + # TO get meanSR and sigmaSR get_SR<-function(stats,n){ meanSR = stats[0]/stats[1] sigmaSR = ((1-meanSR*stats[2]+(meanSR^2)*(stats[3]-1)/4)/(n-1))^0.5 return(meanSR,sigmaSR) } - + #To calculate the Stats(mu , sigma , skewness and kurtosis) get_Stats<-function(m){ stats = c(m[0],m[1]^(0.5),(m[2]/m[1])^(3/2),(m[3]/m[1])^(0.5)) return(stats) } - + # TO calculate the moments get_Moments<-function(x,order,mean = 0){ sum = 0 @@ -91,6 +151,8 @@ return(moment) } +optimize() +return(weights) } From noreply at r-forge.r-project.org Wed Jun 19 01:14:15 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 01:14:15 +0200 (CEST) Subject: [Returnanalytics-commits] r2358 - pkg/PortfolioAnalytics/R Message-ID: <20130618231415.BBDF4185307@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 01:14:14 +0200 (Wed, 19 Jun 2013) New Revision: 2358 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: Modified constraint_v2 to be a simpler constructor and remove specifying assets in constraints. Assets can be specified in portfolio constructor Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 20:45:00 UTC (rev 2357) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:14:14 UTC (rev 2358) @@ -151,67 +151,87 @@ )) } -#' constructor for class constraint_v2 -#' +# #' constructor for class constraint_v2 +# #' +# #' @param assets number of assets, or optionally a named vector of assets specifying seed weights +# #' @param ... any other passthru parameters +# #' @param min_sum minimum sum of all asset weights, default .99 +# #' @param max_sum maximum sum of all asset weights, default 1.01 +# #' @param weight_seq seed sequence of weights, see \code{\link{generatesequence}} +# #' @author Peter Carl, Brian G. Peterson, and Ross Bennett +# #' @examples +# #' exconstr <- constraint_v2(assets=10, min_sum=1, max_sum=1, weight_seq=generatesequence()) +# #' @export +# constraint_v2 <- function(assets=NULL, ..., min_sum=.99, max_sum=1.01, weight_seq=NULL) { +# # based on GPL R-Forge pkg roi by Stefan Thuessel,Kurt Hornik,David Meyer +# # constraint_v2 is based on the constraint_v1 object, but removes box +# # constraint specification +# if (is.null(assets)) { +# stop("You must specify the assets") +# } +# +# if(!is.null(assets)){ +# # TODO FIXME this doesn't work quite right on matrix of assets +# if(is.numeric(assets)){ +# if (length(assets) == 1) { +# nassets = assets +# # we passed in a number of assets, so we need to create the vector +# message("assuming equal weighted seed portfolio") +# assets <- rep(1 / nassets, nassets) +# } else { +# nassets = length(assets) +# } +# # and now we may need to name them +# if (is.null(names(assets))) { +# for(i in 1:length(assets)){ +# names(assets)[i] <- paste("Asset",i,sep=".") +# } +# } +# } +# if(is.character(assets)){ +# nassets = length(assets) +# assetnames = assets +# message("assuming equal weighted seed portfolio") +# assets <- rep(1 / nassets, nassets) +# names(assets) <- assetnames # set names, so that other code can access it, +# # and doesn't have to know about the character vector +# # print(assets) +# } +# # if assets is a named vector, we'll assume it is current weights +# } +# +# ## now structure and return +# return(structure( +# list( +# assets = assets, +# min_sum = min_sum, +# max_sum = max_sum, +# weight_seq = weight_seq, +# objectives = list(), +# call = match.call() +# ), +# class=c("v2_constraint","constraint") +# )) +# } + +#' constructor for class v2_constraint +#' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', or 'group' #' @param assets number of assets, or optionally a named vector of assets specifying seed weights #' @param ... any other passthru parameters -#' @param min_sum minimum sum of all asset weights, default .99 -#' @param max_sum maximum sum of all asset weights, default 1.01 -#' @param weight_seq seed sequence of weights, see \code{\link{generatesequence}} -#' @author Peter Carl, Brian G. Peterson, and Ross Bennett -#' @examples -#' exconstr <- constraint_v2(assets=10, min_sum=1, max_sum=1, weight_seq=generatesequence()) +#' @param constrclass character to name the constraint class +#' @author Ross Bennett #' @export -constraint_v2 <- function(assets=NULL, ..., min_sum=.99, max_sum=1.01, weight_seq=NULL) { - # based on GPL R-Forge pkg roi by Stefan Thuessel,Kurt Hornik,David Meyer - # constraint_v2 is based on the constraint_v1 object, but removes box - # constraint specification - if (is.null(assets)) { - stop("You must specify the assets") - } +constraint_v2 <- function(type, enabled=FALSE, ..., constrclass="constraint"){ + if(!hasArg(type)) stop("you must specify a constraint type") + if (hasArg(type)) if(is.null(type)) stop("you must specify a constraint type") - if(!is.null(assets)){ - # TODO FIXME this doesn't work quite right on matrix of assets - if(is.numeric(assets)){ - if (length(assets) == 1) { - nassets = assets - # we passed in a number of assets, so we need to create the vector - message("assuming equal weighted seed portfolio") - assets <- rep(1 / nassets, nassets) - } else { - nassets = length(assets) - } - # and now we may need to name them - if (is.null(names(assets))) { - for(i in 1:length(assets)){ - names(assets)[i] <- paste("Asset",i,sep=".") - } - } - } - if(is.character(assets)){ - nassets = length(assets) - assetnames = assets - message("assuming equal weighted seed portfolio") - assets <- rep(1 / nassets, nassets) - names(assets) <- assetnames # set names, so that other code can access it, - # and doesn't have to know about the character vector - # print(assets) - } - # if assets is a named vector, we'll assume it is current weights - } - ## now structure and return - return(structure( - list( - assets = assets, - min_sum = min_sum, - max_sum = max_sum, - weight_seq = weight_seq, - objectives = list(), - call = match.call() - ), - class=c("v2_constraint","constraint") - )) + return(structure( c(list(type = type, + enabled=enabled), + list(...)), + class=constrclass + ) # end structure + ) } #' General interface for adding and/or updating optimization constraints, currently supports box and group constraints. From noreply at r-forge.r-project.org Wed Jun 19 01:25:46 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 01:25:46 +0200 (CEST) Subject: [Returnanalytics-commits] r2359 - pkg/PortfolioAnalytics/R Message-ID: <20130618232546.7FCE6185642@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 01:25:45 +0200 (Wed, 19 Jun 2013) New Revision: 2359 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: modified add.constraint to add a constraint object to the portfolio object. Fixed typo on constraint_v2 Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:14:14 UTC (rev 2358) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:25:45 UTC (rev 2359) @@ -221,7 +221,7 @@ #' @param constrclass character to name the constraint class #' @author Ross Bennett #' @export -constraint_v2 <- function(type, enabled=FALSE, ..., constrclass="constraint"){ +constraint_v2 <- function(type, enabled=FALSE, ..., constrclass="v2_constraint"){ if(!hasArg(type)) stop("you must specify a constraint type") if (hasArg(type)) if(is.null(type)) stop("you must specify a constraint type") @@ -234,50 +234,55 @@ ) } -#' General interface for adding and/or updating optimization constraints, currently supports box and group constraints. +#' General interface for adding and/or updating optimization constraints, currently supports weight, box and group constraints. #' -#' This is the main function for adding and/or updating constraints in an object of type \code{\link{constraint}}. +#' This is the main function for adding and/or updating constraints in an object of type \code{\link{portfolio}}. #' -#' In general, you will define your constraints as one of two types: 'box' or 'group'. +#' In general, you will define your constraints as one of three types: 'weight_sum', 'box', or 'group'. #' -#' @param constraints an object of type "constraint" to add the constraint to, specifying the constraints for the optimization, see \code{\link{constraint_v2}} -#' @param type character type of the constraint to add or update, currently 'box' or 'group' +#' @param portfolio an object of class 'portfolio' to add the constraint to, specifying the constraints for the optimization, see \code{\link{portfolio.spec}} +#' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', or 'group' +#' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify box and/or group constraints +#' @param indexnum if you are updating a specific constraint, the index number in the $objectives list to update #' @author Ross Bennett -#' #' @seealso \code{\link{constraint}} -#' -#' @examples -#' exconstr <- constraint_v2(assets=10, min_sum=1, max_sum=1) -#' # Add box constraints with a minimum weight of 0.1 and maximum weight of 0.4 -#' exconstr <- add.constraint(exconstr, type="box", min=0.1, max=0.4) #' @export -add.constraint <- function(constraints, type, ...){ - # Check to make sure that the constraints passed in is a constraints object - if (!is.constraint(constraints)) {stop("constraints passed in are not of class constraint")} +add.constraint <- function(portfolio, type, enabled=FALSE, ..., indexnum=NULL){ + # Check to make sure that the portfolio passed in is a portfolio object + if (!is.portfolio(portfolio)) {stop("portfolio passed in is not of class portfolio")} # Check to make sure a type is passed in as an argument if (!hasArg(type)) stop("you must supply a type of constraints to create") - assets <- constraints$assets + assets <- portfolio$assets tmp_constraint = NULL # Currently supports box and group constraints. Will add more later. switch(type, # Box constraints - box = {tmp_constraint <- box_constraint(assets, ...=...) - constraints$min <- tmp_constraint$min - constraints$max <- tmp_constraint$max + box = {tmp_constraint <- box_constraint(assets=assets, + type=type, + ...=...) }, # Group constraints - group = {tmp_constraint <- group_constraint(assets, ...=...) - constraints$groups <- tmp_constraint$groups - constraints$cLO <- tmp_constraint$cLO - constraints$cUP <- tmp_constraint$cUP + group = {tmp_constraint <- group_constraint(assets=assets, + type=type, + ...=...) }, - null = {return(constraints)} + # Sum of weights constraints + weight=, weight_sum = {tmp_constraint <- weight_sum_constraint(type=type, + ...=...) + }, + # Do nothing and return the portfolio object if type is NULL + null = {return(portfolio)} ) - return(constraints) + if(is.constraint(tmp_constraint)) { + if(!hasArg(indexnum) | (hasArg(indexnum) & is.null(indexnum))) indexnum <- length(portfolio$constraints)+1 + tmp_constraint$call <- match.call() + portfolio$constraints[[indexnum]] <- tmp_constraint + } + return(portfolio) } #' constructor for box_constraint. From noreply at r-forge.r-project.org Wed Jun 19 01:32:21 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 01:32:21 +0200 (CEST) Subject: [Returnanalytics-commits] r2360 - pkg/PortfolioAnalytics/R Message-ID: <20130618233221.60F9E185859@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 01:32:20 +0200 (Wed, 19 Jun 2013) New Revision: 2360 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: modified box_constraint to use the constraint_v2 constructor and return a constraint object Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:25:45 UTC (rev 2359) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:32:20 UTC (rev 2360) @@ -288,16 +288,19 @@ #' constructor for box_constraint. #' #' This function is called by add.constraint when type="box" is specified. see \code{\link{add.constraint}} -#' +#' +#' @param type character type of the constraint #' @param assets number of assets, or optionally a named vector of assets specifying seed weights #' @param min numeric or named vector specifying minimum weight box constraints #' @param max numeric or named vector specifying minimum weight box constraints #' @param min_mult numeric or named vector specifying minimum multiplier box constraint from seed weight in \code{assets} #' @param max_mult numeric or named vector specifying maximum multiplier box constraint from seed weight in \code{assets} +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters to specify box and/or group constraints #' @author Ross Bennett #' @seealso \code{\link{add.constraint}} #' @export -box_constraint <- function(assets, min, max, min_mult, max_mult){ +box_constraint <- function(type, assets, min, max, min_mult, max_mult, enabled=FALSE, ...){ # Based on the constraint function for object of class constraint_v1 that # included specifying box constraints. @@ -376,7 +379,10 @@ max[which(tmp_max < max)] <- tmp_max[which(tmp_max < max)] } - return(list(min=min, max=max)) + Constraint <- constraint_v2(type=type, ...) + Constraint$min <- min + Constraint$max <- max + return(Constraint) } #' constructor for group_constraint From noreply at r-forge.r-project.org Wed Jun 19 01:45:38 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 01:45:38 +0200 (CEST) Subject: [Returnanalytics-commits] r2361 - pkg/PortfolioAnalytics/R Message-ID: <20130618234539.37E4D1852CF@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 01:45:38 +0200 (Wed, 19 Jun 2013) New Revision: 2361 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: modified group_constraint to use the constraint_v2 constructor and return a constraint object Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:32:20 UTC (rev 2360) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:45:38 UTC (rev 2361) @@ -388,15 +388,18 @@ #' constructor for group_constraint #' #' This function is called by add.constraint when type="group" is specified. see \code{\link{add.constraint}} -#' +#' +#' @param type character type of the constraint #' @param assets number of assets, or optionally a named vector of assets specifying seed weights #' @param groups vector specifying the groups of the assets #' @param group_min numeric or vector specifying minimum weight group constraints #' @param group_max numeric or vector specifying minimum weight group constraints +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters to specify box and/or group constraints #' @author Ross Bennett #' @seealso \code{\link{add.constraint}} #' @export -group_constraint <- function(assets, groups, group_min, group_max) { +group_constraint <- function(type, assets, groups, group_min, group_max, enabled=FALSE, ...) { nassets <- length(assets) ngroups <- length(groups) @@ -418,7 +421,11 @@ } if (length(group_max) != ngroups) stop(paste("length of group_max must be equal to 1 or the length of groups:", ngroups)) - return(list(groups=groups, cLO=group_min, cUP=group_max)) + Constraint <- constraint_v2(type, ...) + Constraint$groups <- groups + Constraint$cLO <- group_min + Constraint$cUP <- group_max + return(Constraint) } From noreply at r-forge.r-project.org Wed Jun 19 01:48:14 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 01:48:14 +0200 (CEST) Subject: [Returnanalytics-commits] r2362 - pkg/PortfolioAnalytics/R Message-ID: <20130618234814.9A867184D31@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 01:48:13 +0200 (Wed, 19 Jun 2013) New Revision: 2362 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: adding weight_sum_constraint function Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:45:38 UTC (rev 2361) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-18 23:48:13 UTC (rev 2362) @@ -428,6 +428,24 @@ return(Constraint) } +#' constructor for weight_sum_constraint +#' +#' This function is called by add.constraint when type="weight_sum" is specified. see \code{\link{add.constraint}} +#' This function allows the user to specify the minimum and maximum that the weights sum to +#' +#' @param type character type of the constraint +#' @param min_sum minimum sum of all asset weights, default 0.99 +#' @param max_sum maximum sum of all asset weights, default 1.01 +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @author Ross Bennett +#' @export +weight_sum_constraint <- function(type, min_sum=0.99, max_sum=1.01, enabled=FALSE, ...){ + Constraint <- constraint_v2(type, ...) + Constraint$min_sum <- min_sum + Constraint$max_sum <- max_sum + return(Constraint) +} #' check function for constraints #' From noreply at r-forge.r-project.org Wed Jun 19 01:52:24 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 01:52:24 +0200 (CEST) Subject: [Returnanalytics-commits] r2363 - pkg/PortfolioAnalytics/R Message-ID: <20130618235224.F183F184D31@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 01:52:24 +0200 (Wed, 19 Jun 2013) New Revision: 2363 Added: pkg/PortfolioAnalytics/R/portfolio.R Log: adding portfolio.R file with portfolio.spec and is.portfolio functions Added: pkg/PortfolioAnalytics/R/portfolio.R =================================================================== --- pkg/PortfolioAnalytics/R/portfolio.R (rev 0) +++ pkg/PortfolioAnalytics/R/portfolio.R 2013-06-18 23:52:24 UTC (rev 2363) @@ -0,0 +1,78 @@ +############################################################################### +# R (http://r-project.org/) Numeric Methods for Optimization of Portfolios +# +# Copyright (c) 2004-2012 Kris Boudt, Peter Carl and Brian G. Peterson +# +# This library is distributed under the terms of the GNU Public License (GPL) +# for full details see the file COPYING +# +# $Id: constraints.R 2362 2013-06-18 23:48:13Z rossbennett34 $ +# +############################################################################### + +#' constructor for class portfolio +#' +#' @param assets number of assets, or optionally a named vector of assets specifying seed weights. If seed weights are not specified, an equal weight portfolio will be assumed. +#' @param weight_seq seed sequence of weights, see \code{\link{generatesequence}} +#' @author Ross Bennett +#' @examples +#' pspec <- portfolio.spec(assets=10, weight_seq=generatesequence()) +#' @export +portfolio.spec <- function(assets=NULL, weight_seq=NULL) { + # portfolio.spec is based on the v1_constraint object, but removes + # constraint specification + if (is.null(assets)) { + stop("You must specify the assets") + } + + if(!is.null(assets)){ + # TODO FIXME this doesn't work quite right on matrix of assets + if(is.numeric(assets)){ + if (length(assets) == 1) { + nassets = assets + # we passed in a number of assets, so we need to create the vector + message("assuming equal weighted seed portfolio") + assets <- rep(1 / nassets, nassets) + } else { + nassets = length(assets) + } + # and now we may need to name them + if (is.null(names(assets))) { + for(i in 1:length(assets)){ + names(assets)[i] <- paste("Asset",i,sep=".") + } + } + } + if(is.character(assets)){ + nassets = length(assets) + assetnames = assets + message("assuming equal weighted seed portfolio") + assets <- rep(1 / nassets, nassets) + names(assets) <- assetnames # set names, so that other code can access it, + # and doesn't have to know about the character vector + # print(assets) + } + # if assets is a named vector, we'll assume it is current weights + } + + ## now structure and return + return(structure( + list( + assets = assets, + weight_seq = weight_seq, + constraints = list(), + objectives = list(), + call = match.call() + ), + class=c("portfolio.spec","portfolio") + )) +} + +#' check function for portfolio +#' +#' @param x object to test for type \code{portfolio} +#' @author Ross Bennett +#' @export +is.portfolio <- function( x ) { + inherits( x, "portfolio" ) +} \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 19 02:01:40 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:01:40 +0200 (CEST) Subject: [Returnanalytics-commits] r2364 - pkg/PortfolioAnalytics/R Message-ID: <20130619000141.39BC2184D31@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 02:01:40 +0200 (Wed, 19 Jun 2013) New Revision: 2364 Modified: pkg/PortfolioAnalytics/R/objective.R Log: adding add.objective_v2 to add objectives to the portfolio object Modified: pkg/PortfolioAnalytics/R/objective.R =================================================================== --- pkg/PortfolioAnalytics/R/objective.R 2013-06-18 23:52:24 UTC (rev 2363) +++ pkg/PortfolioAnalytics/R/objective.R 2013-06-19 00:01:40 UTC (rev 2364) @@ -120,6 +120,79 @@ return(constraints) } +#' General interface for adding optimization objectives, including risk, return, and risk budget +#' +#' This function is the main function for adding and updating business objectives in an object of type \code{\link{portfolio}}. +#' +#' In general, you will define your objective as one of three types: 'return', 'risk', or 'risk_budget'. +#' These have special handling and intelligent defaults for dealing with the function most likely to be +#' used as objectives, including mean, median, VaR, ES, etc. +#' +#' @param portfolio an object of type 'portfolio' to add the objective to, specifying the portfolio for the optimization, see \code{\link{portfolio}} +#' @param type character type of the objective to add or update, currently 'return','risk', or 'risk_budget' +#' @param name name of the objective, should correspond to a function, though we will try to make allowances +#' @param arguments default arguments to be passed to an objective function when executed +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters +#' @param indexnum if you are updating a specific constraint, the index number in the $objectives list to update +#' @author Brian G. Peterson and Ross Bennett +#' +#' @seealso \code{\link{objective}} +#' +#' @export +add.objective_v2 <- function(portfolio, type, name, arguments=NULL, enabled=FALSE, ..., indexnum=NULL){ + # This function is based on the original add.objective function, but modified + # to add objectives to a portfolio object instead of a constraint object. + if (!is.portfolio(portfolio)) {stop("portfolio passed in is not of class portfolio")} + + if (!hasArg(name)) stop("you must supply a name for the objective") + if (!hasArg(type)) stop("you must supply a type of objective to create") + if (!hasArg(enabled)) enabled=FALSE + if (!hasArg(arguments) | is.null(arguments)) arguments<-list() + if (!is.list(arguments)) stop("arguments must be passed as a named list") + + assets=portfolio$assets + + tmp_objective=NULL + + switch(type, + return=, return_objective= + {tmp_objective = return_objective(name=name, + enabled=enabled, + arguments=arguments, + ... = ... + ) + }, + + risk=, portfolio_risk=, portfolio_risk_objective = + {tmp_objective = portfolio_risk_objective(name=name, + enabled=enabled, + arguments=arguments, + ...=... + ) + }, + + risk_budget=, risk_budget_objective= + {tmp_objective = risk_budget_objective(assets=portfolio$assets, + name=name, + enabled=enabled, + arguments=arguments, + ...=... + ) + }, + + null = + {return(portfolio)} # got nothing, default to simply returning + ) # end objective type switch + + if(is.objective(tmp_objective)) { + if(!hasArg(indexnum) | (hasArg(indexnum) & is.null(indexnum))) indexnum = length(portfolio$objectives)+1 + tmp_objective$call <- match.call() + portfolio$objectives[[indexnum]] <- tmp_objective + } + return(portfolio) +} + # update.objective <- function(object, ...) { # # here we do a bunch of magic to update the correct index'd objective # From noreply at r-forge.r-project.org Wed Jun 19 02:02:08 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:02:08 +0200 (CEST) Subject: [Returnanalytics-commits] r2365 - pkg/FactorAnalytics/R Message-ID: <20130619000209.46221184D31@r-forge.r-project.org> Author: chenyian Date: 2013-06-19 02:02:07 +0200 (Wed, 19 Jun 2013) New Revision: 2365 Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R Log: debug: if variable.selection is not specified. default will be "none". Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-19 00:01:40 UTC (rev 2364) +++ pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-19 00:02:07 UTC (rev 2365) @@ -9,15 +9,15 @@ #' #' @param assets.names names of assets returns. #' @param factors.names names of factors returns. -#' @param factor.set scalar, number of factors +#' @param num.factor.subset scalar. Number of factors selected by all subsets. #' @param data a vector, matrix, data.frame, xts, timeSeries or zoo object with asset returns #' and factors retunrs rownames #' @param fit.method "OLS" is ordinary least squares method, "DLS" is #' discounted least squares method. Discounted least squares (DLS) estimation #' is weighted least squares estimation with exponentially declining weights #' that sum to unity. "Robust" -#' @param variable.selection "stepwise" is traditional forward/backward -#' stepwise OLS regression, starting from the initial set of factors, that adds +#' @param variable.selection "none" will not activate variables sellection. Default is "none". +#' "stepwise" is traditional forward/backward #' stepwise OLS regression, starting from the initial set of factors, that adds #' factors only if the regression fit as measured by the Bayesian Information #' Criteria (BIC) or Akaike Information Criteria (AIC) can be done using the R #' function step() from the stats package. If \code{Robust} is chosen, the @@ -70,9 +70,9 @@ #' colorset=c("black","blue"), legend.loc="bottomleft") #' } fitMacroeconomicFactorModel <- -function(assets.names, factors.names, data=data, factor.set = 3, +function(assets.names, factors.names, data=data, num.factor.subset = 1, fit.method=c("OLS","DLS","Robust"), - variable.selection=c("stepwise", "all subsets", "lar", "lasso"), + variable.selection="none", decay.factor = 0.95,nvmax=8,force.in=NULL, subsets.method = c("exhaustive", "backward", "forward", "seqrep"), lars.criteria = c("Cp","cv")) { @@ -86,22 +86,9 @@ # convert data into xts and hereafter compute in xts data.xts <- checkData(data) - #assets.names <- colnames(data.xts)[1:6] # erase later - #factors.names <- colnames(data.xts)[7:9] reg.xts <- merge(data.xts[,assets.names],data.xts[,factors.names]) -# if (is.data.frame(ret.assets) & is.data.frame(factors) ) { -# assets.names = colnames(ret.assets) -# factors.names = colnames(factors) -# reg.xts = cbind(ret.assets,factors) -# } else { -# stop("ret.assets and beta.mat must be in class data.frame") -# } - - - - -# initialize list object to hold regression objects + # initialize list object to hold regression objects reg.list = list() @@ -116,18 +103,66 @@ rownames(Betas) = assets.names - - - - -if (variable.selection == "all subsets") { +if (variable.selection == "none") { + if (fit.method == "OLS") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + } else if (fit.method == "DLS") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } + # sum weigth to unitary + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.fit = lm(fm.formula, data=reg.xts,weight=w) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + } else if (fit.method=="Robust") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lmRob(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas[i, ] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + + } else { + stop("invalid method") + } + + +} else if (variable.selection == "all subsets") { # estimate multiple factor model using loop b/c of unequal histories for the hedge funds if (fit.method == "OLS") { -if (factor.set == length(force.in)) { +if (num.factor.subset == length(force.in)) { for (i in assets.names) { reg.df = na.omit(reg.xts[, c(i, force.in)]) fm.formula = as.formula(paste(i,"~", ".", sep=" ")) @@ -140,7 +175,7 @@ ResidVars[i] = fm.summary$sigma^2 R2values[i] = fm.summary$r.squared } -} else if (factor.set > length(force.in)) { +} else if (num.factor.subset > length(force.in)) { for (i in assets.names) { reg.df = na.omit(reg.xts[, c(i, factors.names)]) @@ -148,7 +183,7 @@ fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, method=subsets.method) sum.sub <- summary(fm.subsets) - reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(factor.set),-1]==TRUE)) )]) + reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(num.factor.subset),-1]==TRUE)) )]) fm.fit = lm(fm.formula, data=reg.df) fm.summary = summary(fm.fit) reg.list[[i]] = fm.fit @@ -159,7 +194,7 @@ R2values[i] = fm.summary$r.squared } } else { - stop("ERROR! number of force.in should less or equal to factor.set") + stop("ERROR! number of force.in should less or equal to num.factor.subset") } @@ -168,7 +203,7 @@ } else if (fit.method == "DLS"){ - if (factor.set == length(force.in)) { + if (num.factor.subset == length(force.in)) { # define weight matrix for (i in assets.names) { reg.df = na.omit(reg.xts[, c(i, force.in)]) @@ -189,7 +224,7 @@ ResidVars[i] = fm.summary$sigma^2 R2values[i] = fm.summary$r.squared } -} else if (factor.set > length(force.in)) { +} else if (num.factor.subset > length(force.in)) { for (i in assets.names) { reg.df = na.omit(reg.xts[, c(i, factors.names)]) t.length <- nrow(reg.df) @@ -202,7 +237,7 @@ fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, method=subsets.method,weights=w) # w is called from global envio sum.sub <- summary(fm.subsets) - reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(factor.set),-1]==TRUE)) )]) + reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(num.factor.subset),-1]==TRUE)) )]) fm.fit = lm(fm.formula, data=reg.df,weight=w) fm.summary = summary(fm.fit) reg.list[[i]] = fm.fit @@ -213,7 +248,7 @@ R2values[i] = fm.summary$r.squared } } else { - stop("ERROR! number of force.in should less or equal to factor.set") + stop("ERROR! number of force.in should less or equal to num.factor.subset") } @@ -293,7 +328,7 @@ } -} else if (variable.selection == "lar" || variable.selection == "lasso") { +} else if (variable.selection == "lar" | variable.selection == "lasso") { # use min Cp as criteria to choose predictors for (i in assets.names) { From noreply at r-forge.r-project.org Wed Jun 19 02:03:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:03:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2366 - pkg/FactorAnalytics/R Message-ID: <20130619000356.42B4A184D31@r-forge.r-project.org> Author: chenyian Date: 2013-06-19 02:03:55 +0200 (Wed, 19 Jun 2013) New Revision: 2366 Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R Log: Debug : DLS method if variable.selection is "none". Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-19 00:02:07 UTC (rev 2365) +++ pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-19 00:03:55 UTC (rev 2366) @@ -128,7 +128,7 @@ # sum weigth to unitary w <- w/sum(w) fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.fit = lm(fm.formula, data=reg.xts,weight=w) + fm.fit = lm(fm.formula, data=reg.df,weight=w) fm.summary = summary(fm.fit) reg.list[[i]] = fm.fit Alphas[i] = coef(fm.fit)[1] From noreply at r-forge.r-project.org Wed Jun 19 02:20:36 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:20:36 +0200 (CEST) Subject: [Returnanalytics-commits] r2367 - pkg/PortfolioAnalytics/R Message-ID: <20130619002036.CAA95185107@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 02:20:36 +0200 (Wed, 19 Jun 2013) New Revision: 2367 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: added 'constraint' as a class name to constraint_v2 Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-19 00:03:55 UTC (rev 2366) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-19 00:20:36 UTC (rev 2367) @@ -229,7 +229,7 @@ return(structure( c(list(type = type, enabled=enabled), list(...)), - class=constrclass + class=c(constrclass, "constraint") ) # end structure ) } From noreply at r-forge.r-project.org Wed Jun 19 02:24:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:24:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2368 - pkg/FactorAnalytics/R Message-ID: <20130619002456.0DFDC185107@r-forge.r-project.org> Author: chenyian Date: 2013-06-19 02:24:55 +0200 (Wed, 19 Jun 2013) New Revision: 2368 Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R Log: add description: once lar, lasso is chosen. fit.method is ignored. Modified: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-19 00:20:36 UTC (rev 2367) +++ pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-19 00:24:55 UTC (rev 2368) @@ -20,11 +20,11 @@ #' "stepwise" is traditional forward/backward #' stepwise OLS regression, starting from the initial set of factors, that adds #' factors only if the regression fit as measured by the Bayesian Information #' Criteria (BIC) or Akaike Information Criteria (AIC) can be done using the R -#' function step() from the stats package. If \code{Robust} is chosen, the +#' function step() from the stats package. If "Robust" is chosen, the #' function step.lmRob in Robust package will be used. "all subsets" is #' Traditional all subsets regression can be done using the R function #' regsubsets() from the package leaps. "lar" , "lasso" is based on package -#' "lars", linear angle regression. +#' "lars", linear angle regression. If "lar" or "lasso" is chose. fit.method will be ignored. #' @param decay.factor for DLS. Default is 0.95. #' @param nvmax control option for all subsets. maximum size of subsets to #' examine @@ -331,7 +331,7 @@ } else if (variable.selection == "lar" | variable.selection == "lasso") { # use min Cp as criteria to choose predictors - for (i in assets.names) { + for (i in assets.names) { reg.df = na.omit(reg.xts[, c(i, factors.names)]) reg.df = as.matrix(reg.df) lars.fit = lars(reg.df[,factors.names],reg.df[,i],type=variable.selection,trace=FALSE) From noreply at r-forge.r-project.org Wed Jun 19 02:28:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:28:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2369 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130619002855.67232185107@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-19 02:28:54 +0200 (Wed, 19 Jun 2013) New Revision: 2369 Added: pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R Log: adding script to test and demo the portfolio specification structure with adding constraints and objectives Added: pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R (rev 0) +++ pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R 2013-06-19 00:28:54 UTC (rev 2369) @@ -0,0 +1,56 @@ +# Testing for the new portfolio specification + +# Load necessary packages +library(PerformanceAnalytics) +library(PortfolioAnalytics) + +# Load the edhec data +data(edhec) + +# Use the first 5 columns of edhec as returns +ret <- edhec[, 1:5] +funds <- colnames(ret) + +# Specify a portfolio object +pspec <- portfolio.spec(assets=funds) + +# pspec is an object of class "portfolio" that holds information about the +# assets, constraints, and objectives. +# The constraints will be stored as objects in the $constraints list +# The objectives will be stored as objects in the $constraints list. Note that +# this is just like how they are currently stored in the constraints object. +class(pspec) +str(pspec) + +# Add a constraint object to pspec for the sum of the weights +pspec <- add.constraint(portfolio=pspec, type="weight_sum", + min_sum=0.99, max_sum=1.01) +print(pspec) + +# Add box constraints to the pspec object +pspec <- add.constraint(portfolio=pspec, type="box", min=0.1, max=0.4) +print(pspec) + +# Update the box constraints to pass in a vector for min and max. Updates the +# object in place with the indexnum argument +pspec <- add.constraint(portfolio=pspec, type="box", + min=c(0.1, 0.05, 0.1, 0.15, 0.2), + max=c(0.4, 0.4, 0.5, 0.45, 0.6), + indexnum=2) +print(pspec) + +# Add objectives to the pspec object +pspec <- add.objective_v2(portfolio=pspec, type="return", name="mean", + enabled=FALSE, multiplier=0) +print(pspec) + +pspec <- add.objective_v2(portfolio=pspec, type="risk", name="var", + enabled=FALSE, multiplier=0, risk_aversion=10) +print(pspec) + +pspec <- add.objective_v2(portfolio=pspec, type="risk", name="CVaR", + enabled=FALSE, multiplier=0) +print(pspec) + +str(pspec) +summary(pspec) \ No newline at end of file From aray at crp.com.br Wed Jun 19 02:34:01 2013 From: aray at crp.com.br (Aray Gustavo Feldens -CRP-) Date: Wed, 19 Jun 2013 00:34:01 +0000 Subject: [Returnanalytics-commits] Unsubscribe Message-ID: <16DA1E58-4D82-44B1-BD96-D6CC08DC1659@crp.com.br> Aray Gustavo Feldens aray at crp.com.br CRP Companhia de Participa??es Av. Soledade, 550/1001 ? Petr?polis ? Ed. Carlos Gomes Center Porto Alegre ? RS ? Brasil ? CEP 90470-340 + 55 51 3211-0777 ? www.crp.com.br ATEN??O: este email pode conter informa??o confidencial. Se voc? o receber por engano, por favor, informe-nos e apague-o; n?o copie ou divulgue seu conte?do. WARNING: this email may contain confidential information. If you have received it by mistake, please let us know and delete it; do not copy it or disclose its contents. -------------- next part -------------- An HTML attachment was scrubbed... URL: From noreply at r-forge.r-project.org Wed Jun 19 02:45:16 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:45:16 +0200 (CEST) Subject: [Returnanalytics-commits] r2370 - pkg/FactorAnalytics/R Message-ID: <20130619004517.0A2171856D3@r-forge.r-project.org> Author: chenyian Date: 2013-06-19 02:45:16 +0200 (Wed, 19 Jun 2013) New Revision: 2370 Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R Log: change description: @returnitem to \item Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-19 00:28:54 UTC (rev 2369) +++ pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-19 00:45:16 UTC (rev 2370) @@ -1,367 +1,365 @@ -#' Fit statistical factor model using principle components -#' -#' Fit statistical factor model using principle components. This function is -#' mainly adapted from S+FinMetric function mfactor. -#' -#' -#' @param x T x N assets returns data which is saved as data.frame class. -#' @param k numbers of factors if it is scalar or method of choosing optimal -#' number of factors. "bn" represents Bai and Ng (2002) method and "ck" -#' represents Connor and korajczyk (1993) method. Default is k = 1. -#' @param refine \code{TRUE} By default, the APCA fit will use the -#' Connor-Korajczyk refinement. -#' @param check check if some variables has identical values. Default is FALSE. -#' @param max.k scalar, select the number that maximum number of factors to be -#' considered. -#' @param sig significant level when ck method uses. -#' @param na.rm if allow missing values. Default is FALSE. -#' @return -#' -#' : -#' @returnItem factors T x K the estimated factors. -#' @returnItem loadings K x N the asset specific factor loadings beta_i -#' estimated from regress the asset returns on factors. -#' @returnItem alpha 1 x N the estimated intercepts alpha_i -#' @returnItem Omega N x N asset returns sample variance covariance matrix. -#' @returnItem r2 regression r square value from regress the asset returns on -#' factors. -#' @returnItem k the number of the facotrs. -#' @returnItem eigen eigenvalues from the sample covariance matrix. -#' @returnItem residuals T x N matrix of residuals from regression. -#' @returnItem asset.ret asset returns -#' @returnItem asset.fit List of regression lm class of individual returns on -#' factors. -#' @returnItem residVars.vec vector of residual variances -#' @returnItem mimic N x K matrix of factor mimicking portfolio returns. -#' @author Eric Zivot and Yi-An Chen -#' @examples -#' -#' # load data for fitStatisticalFactorModel.r -#' # data from finmetric berndt.dat and folio.dat -#' -#' data(stat.fm.data) -#' ## -#' # sfm.dat is for pca -#' # sfm.apca.dat is for apca -#' class(sfm.dat) -#' class(sfm.apca.dat) -#' -#' # pca -#' args(fitStatisticalFactorModel) -#' sfm.pca.fit <- fitStatisticalFactorModel(sfm.dat,k=2) -#' class(sfm.pca.fit) -#' names(sfm.pca.fit) -#' sfm.pca.fit$factors -#' sfm.pca.fit$loadings -#' sfm.pca.fit$r2 -#' sfm.pca.fit$residuals -#' sfm.pca.fit$residVars.vec -#' sfm.pca.fit$mimic -#' # apca -#' sfm.apca.fit <- fitStatisticalFactorModel(sfm.apca.dat,k=1) -#' names(sfm.apca.fit) -#' sfm.apca.res <- sfm.apca.fit$residuals -#' sfm.apca.mimic <- sfm.apca.fit$mimic -#' # apca with bai and Ng method -#' sfm.apca.fit.bn <- fitStatisticalFactorModel(sfm.apca.dat,k="bn") -#' class(sfm.apca.fit.bn) -#' names(sfm.apca.fit.bn) -#' sfm.apca.fit.bn$mimic -#' -#' # apca with ck method -#' sfm.apca.fit.ck <- fitStatisticalFactorModel(sfm.apca.dat,k="ck") -#' class(sfm.apca.fit.ck) -#' names(sfm.apca.fit.ck) -#' sfm.apca.fit.ck$mimic -#' -fitStatisticalFactorModel <- -function(x, k = 1, refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05, na.rm = FALSE){ - -## Inputs: -## -## x : T x N assets returns data which is saved as data.frame class. -## k : numbers of factors if it is scalar or method of choosing optimal number of factors. -## "bn" represents Bai and Ng (2002) method and "ck" represents Connor and korajczyk -## (1993) method. Default is k = 1. -## refine : : TRUE By default, the APCA fit will use the Connor-Korajczyk refinement. -## check : check if some variables has identical values. Default is FALSE. -## max.k : scalar, select the number that maximum number of factors to be considered. -## sig : significant level than ck method uses. -## na.rm : if allow missing values. Default is FALSE. -## Outputs: -## -## factors : T x K the estimated factors. -## loadings : K x N the asset specific factor loadings beta_i estimated from regress the asset -## returns on factors. -## alpha : 1 x N the estimated intercepts alpha_i -## Omega : N x N asset returns sample variance covariance matrix. -## r2 : regression r square value from regress the asset returns on factors. -## k : the number of the facotrs. -## eigen : eigenvalues from the sample covariance matrix. -## residuals : T x N matrix of residuals from regression. -# residVars.vec : vector of residual variances -# mimic : N x K matrix of factor mimicking portfolio returns. - -# load package -require(MASS) - - - # function of test - mfactor.test <- function(x, method = "bn", refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05){ - - if(is.null(max.k)) { - max.k <- min(10, nrow(x) - 1) - } else if (max.k >= nrow(x)) { - stop("max.k must be less than the number of observations.") - } - if(check) { - if(mfactor.check(x)) { - warning("Some variables have identical observations.") - return(list(factors = NA, loadings = NA, k = NA)) - } - } - method <- casefold(method) - if(method == "bn") { - ans <- mfactor.bn(x, max.k, refine = refine) - } - else if(method == "ck") { - ans <- mfactor.ck(x, max.k, refine = refine, sig = sig) - } - else { - stop("Invalid choice for optional argument method.") - } - return(ans) - -} - - -# function of ck -mfactor.ck <- function(x, max.k, sig = 0.05, refine = TRUE) { - - n <- ncol(x) - m <- nrow(x) - idx <- 2 * (1:(m/2)) - # - f <- mfactor.apca(x, k = 1, refine = refine, check = FALSE) - f1 <- cbind(1, f$factors) - B <- backsolve(chol(crossprod(f1)), diag(2)) - eps <- x - f1 %*% crossprod(t(B)) %*% crossprod(f1, x) - s <- eps^2/(1 - 2/m - 1/n) - # - for(i in 2:max.k) { - f.old <- f - s.old <- s - f <- mfactor.apca(x, k = i, refine = refine, check = FALSE) - f1 <- cbind(1, f$factors) - B <- backsolve(chol(crossprod(f1)), diag(i + 1)) - eps <- x - f1 %*% crossprod(t(B)) %*% crossprod(f1, x) - s <- eps^2/(1 - (i + 1)/m - i/n) - delta <- rowMeans(s.old[idx - 1, , drop = FALSE]) - rowMeans( - s[idx, , drop = FALSE]) - if(t.test(delta, alternative = "greater")$p.value > sig) { - return(f.old) - } - } - return(f) -} - -# funciton of check - mfactor.check <- function(x) { - temp <- apply(x, 2, range) - if(any(abs(temp[2, ] - temp[1, ]) < .Machine$single.eps)) { - TRUE - } - else { - FALSE - } -} - - # function of bn - mfactor.bn <- function(x, max.k, refine = TRUE) { - - # Parameters: - # x : T x N return matrix - # max.k : maxinum number of factors to be considered - # Returns: - # k : the optimum number of factors - n <- ncol(x) - m <- nrow(x) - s <- vector("list", max.k) - for(i in 1:max.k) { - f <- cbind(1, mfactor.apca(x, k = i, refine = refine, check = - FALSE)$factors) - B <- backsolve(chol(crossprod(f)), diag(i + 1)) - eps <- x - f %*% crossprod(t(B)) %*% crossprod(f, x) - sigma <- colSums(eps^2)/(m - i - 1) - s[[i]] <- mean(sigma) - } - s <- unlist(s) - idx <- 1:max.k - Cp1 <- s[idx] + (idx * s[max.k] * (n + m))/(n * m) * log((n * m)/ - (n + m)) - Cp2 <- s[idx] + (idx * s[max.k] * (n + m))/(n * m) * log(min(n, m)) - if(order(Cp1)[1] != order(Cp2)[1]) { - warning("Cp1 and Cp2 did not yield same result. The smaller one is used." ) - } - k <- min(order(Cp1)[1], order(Cp2)[1]) - f <- mfactor.apca(x, k = k, refine = refine, check = FALSE) - return(f) - } - - - # function of pca - mfactor.pca <- function(x, k, check = FALSE, Omega = NULL) { - - if(check) { - if(mfactor.check(x)) { - warning("Some variables have identical observations.") - return(list(factors = NA, loadings = NA, k = NA)) - } - } - n <- ncol(x) - m <- nrow(x) - if(is.null(dimnames(x))) { - dimnames(x) <- list(1:m, paste("V", 1:n, sep = ".")) - } - x.names <- dimnames(x)[[2]] - xc <- t(t(x) - colMeans(x)) - if(is.null(Omega)) { - Omega <- crossprod(xc)/m - } - eigen.tmp <- eigen(Omega, symm = TRUE) - # compute loadings beta - B <- t(eigen.tmp$vectors[, 1:k, drop = FALSE]) - # compute estimated factors - f <- x %*% eigen.tmp$vectors[, 1:k, drop = FALSE] - tmp <- x - f %*% B - alpha <- colMeans(tmp) - # compute residuals - tmp <- t(t(tmp) - alpha) - r2 <- (1 - colSums(tmp^2)/colSums(xc^2)) - Omega <- t(B) %*% var(f) %*% B - diag(Omega) <- diag(Omega) + colSums(tmp^2)/(m - k - 1) - dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) - dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) - dimnames(Omega) <- list(x.names, x.names) - names(alpha) <- x.names - # create lm list for plot - reg.list = list() - for (i in x.names) { - reg.df = as.data.frame(cbind(x[,i],f)) - colnames(reg.df)[1] <- i - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = lm(fm.formula, data=reg.df) - reg.list[[i]] = fm.fit - } - ans <- list(factors = f, loadings = B, k = k, alpha = alpha, Omega = Omega, - r2 = r2, eigen = eigen.tmp$values, residuals=tmp, asset.ret = x, - asset.fit=reg.list) - - return(ans) - -} - - # funciont of apca - mfactor.apca <- function(x, k, refine = TRUE, check = FALSE, Omega = NULL) { - - if(check) { - if(mfactor.check(x)) { - warning("Some variables have identical observations.") - return(list(factors = NA, loadings = NA, k = NA)) - } - } - n <- ncol(x) - m <- nrow(x) - if(is.null(dimnames(x))) { - dimnames(x) <- list(1:m, paste("V", 1:n, sep = ".")) - } - x.names <- dimnames(x)[[2]] - xc <- t(t(x) - colMeans(x)) - if(is.null(Omega)) { - Omega <- crossprod(t(xc))/n - } - eig.tmp <- eigen(Omega, symmetric = TRUE) - f <- eig.tmp$vectors[, 1:k, drop = FALSE] - f1 <- cbind(1, f) - B <- backsolve(chol(crossprod(f1)), diag(k + 1)) - B <- crossprod(t(B)) %*% crossprod(f1, x) - sigma <- colSums((x - f1 %*% B)^2)/(m - k - 1) - if(refine) { - xs <- t(xc)/sqrt(sigma) - Omega <- crossprod(xs)/n - eig.tmp <- eigen(Omega, symm = TRUE) - f <- eig.tmp$vectors[, 1:k, drop = FALSE] - f1 <- cbind(1, f) - B <- backsolve(chol(crossprod(f1)), diag(k + 1)) - B <- crossprod(t(B)) %*% crossprod(f1, x) - sigma <- colSums((x - f1 %*% B)^2)/(m - k - 1) - } - alpha <- B[1, ] - B <- B[-1, , drop = FALSE] - Omega <- t(B) %*% var(f) %*% B - diag(Omega) <- diag(Omega) + sigma - dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) - dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) - names(alpha) <- x.names - res <- t(t(x) - alpha) - f %*% B - r2 <- (1 - colSums(res^2)/colSums(xc^2)) - ans <- list(factors = f, loadings = B, k = k, alpha = alpha, Omega = Omega, - r2 = r2, eigen = eig.tmp$values, residuals=res,asset.ret = x) - return(ans) -} - - call <- match.call() - pos <- rownames(x) - x <- as.matrix(x) - if(any(is.na(x))) { - if(na.rm) { - x <- na.omit(x) - } else { - stop("Missing values are not allowed if na.rm=F.") - } - } - # use PCA if T > N - if(ncol(x) < nrow(x)) { - if(is.character(k)) { - stop("k must be the number of factors for PCA.") - } - if(k >= ncol(x)) { - stop("Number of factors must be smaller than number of variables." - ) - } - ans <- mfactor.pca(x, k, check = check) - } else if(is.character(k)) { - ans <- mfactor.test(x, k, refine = refine, check = - check, max.k = max.k, sig = sig) - } else { # use aPCA if T <= N - if(k >= ncol(x)) { - stop("Number of factors must be smaller than number of variables." - ) - } - ans <- mfactor.apca(x, k, refine = refine, check = - check) - } - - # mimic function - f <- ans$factors - - if(is.data.frame(f)) { - f <- as.matrix(f) - } - - if(nrow(x) < ncol(x)) { - mimic <- ginv(x) %*% f - } else { - mimic <- qr.solve(x, f) - } - - mimic <- t(t(mimic)/colSums(mimic)) - dimnames(mimic)[[1]] <- dimnames(x)[[2]] - - ans$mimic <- mimic - ans$residVars.vec <- apply(ans$residuals,2,var) - ans$call <- call -class(ans) <- "StatFactorModel" - return(ans) -} - +#' Fit statistical factor model using principle components +#' +#' Fit statistical factor model using principle components. This function is +#' mainly adapted from S+FinMetric function mfactor. +#' +#' +#' @param x T x N assets returns data which is saved as data.frame class. +#' @param k numbers of factors if it is scalar or method of choosing optimal +#' number of factors. "bn" represents Bai and Ng (2002) method and "ck" +#' represents Connor and korajczyk (1993) method. Default is k = 1. +#' @param refine \code{TRUE} By default, the APCA fit will use the +#' Connor-Korajczyk refinement. +#' @param check check if some variables has identical values. Default is FALSE. +#' @param max.k scalar, select the number that maximum number of factors to be +#' considered. +#' @param sig significant level when ck method uses. +#' @param na.rm if allow missing values. Default is FALSE. +#' @return +#' \item{factors}{T x K the estimated factors.} +#' \item{loadings}{K x N the asset specific factor loadings beta_i. +#' estimated from regress the asset returns on factors.} +#' \item{alpha}{1 x N the estimated intercepts alpha_i} +#' \item{ret.cov}{N x N asset returns sample variance covariance matrix.} +#' \item{r2}{regression r square value from regress the asset returns on +#' factors.} +#' \item{k}{the number of the facotrs.} +#' \item{eigen}{eigenvalues from the sample covariance matrix.} +#' \item{residuals}{T x N matrix of residuals from regression.} +#' \item{asset.ret}{asset returns} +#' \item{asset.fit}{List of regression lm class of individual returns on +#' factors.} +#' \item{residVars.vec}{vector of residual variances.} +#' \item{mimic}{N x K matrix of factor mimicking portfolio returns.} +#' @author Eric Zivot and Yi-An Chen +#' @examples +#' +#' # load data for fitStatisticalFactorModel.r +#' # data from finmetric berndt.dat and folio.dat +#' +#' data(stat.fm.data) +#' ## +#' # sfm.dat is for pca +#' # sfm.apca.dat is for apca +#' class(sfm.dat) +#' class(sfm.apca.dat) +#' +#' # pca +#' args(fitStatisticalFactorModel) +#' sfm.pca.fit <- fitStatisticalFactorModel(sfm.dat,k=2) +#' class(sfm.pca.fit) +#' names(sfm.pca.fit) +#' sfm.pca.fit$factors +#' sfm.pca.fit$loadings +#' sfm.pca.fit$r2 +#' sfm.pca.fit$residuals +#' sfm.pca.fit$residVars.vec +#' sfm.pca.fit$mimic +#' # apca +#' sfm.apca.fit <- fitStatisticalFactorModel(sfm.apca.dat,k=1) +#' names(sfm.apca.fit) +#' sfm.apca.res <- sfm.apca.fit$residuals +#' sfm.apca.mimic <- sfm.apca.fit$mimic +#' # apca with bai and Ng method +#' sfm.apca.fit.bn <- fitStatisticalFactorModel(sfm.apca.dat,k="bn") +#' class(sfm.apca.fit.bn) +#' names(sfm.apca.fit.bn) +#' sfm.apca.fit.bn$mimic +#' +#' # apca with ck method +#' sfm.apca.fit.ck <- fitStatisticalFactorModel(sfm.apca.dat,k="ck") +#' class(sfm.apca.fit.ck) +#' names(sfm.apca.fit.ck) +#' sfm.apca.fit.ck$mimic +#' +fitStatisticalFactorModel <- +function(x, k = 1, refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05, na.rm = FALSE){ + +## Inputs: +## +## x : T x N assets returns data which is saved as data.frame class. +## k : numbers of factors if it is scalar or method of choosing optimal number of factors. +## "bn" represents Bai and Ng (2002) method and "ck" represents Connor and korajczyk +## (1993) method. Default is k = 1. +## refine : : TRUE By default, the APCA fit will use the Connor-Korajczyk refinement. +## check : check if some variables has identical values. Default is FALSE. +## max.k : scalar, select the number that maximum number of factors to be considered. +## sig : significant level than ck method uses. +## na.rm : if allow missing values. Default is FALSE. +## Outputs: +## +## factors : T x K the estimated factors. +## loadings : K x N the asset specific factor loadings beta_i estimated from regress the asset +## returns on factors. +## alpha : 1 x N the estimated intercepts alpha_i +## Omega : N x N asset returns sample variance covariance matrix. +## r2 : regression r square value from regress the asset returns on factors. +## k : the number of the facotrs. +## eigen : eigenvalues from the sample covariance matrix. +## residuals : T x N matrix of residuals from regression. +# residVars.vec : vector of residual variances +# mimic : N x K matrix of factor mimicking portfolio returns. + +# load package +require(MASS) + + + # function of test + mfactor.test <- function(x, method = "bn", refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05){ + + if(is.null(max.k)) { + max.k <- min(10, nrow(x) - 1) + } else if (max.k >= nrow(x)) { + stop("max.k must be less than the number of observations.") + } + if(check) { + if(mfactor.check(x)) { + warning("Some variables have identical observations.") + return(list(factors = NA, loadings = NA, k = NA)) + } + } + method <- casefold(method) + if(method == "bn") { + ans <- mfactor.bn(x, max.k, refine = refine) + } + else if(method == "ck") { + ans <- mfactor.ck(x, max.k, refine = refine, sig = sig) + } + else { + stop("Invalid choice for optional argument method.") + } + return(ans) + +} + + +# function of ck +mfactor.ck <- function(x, max.k, sig = 0.05, refine = TRUE) { + + n <- ncol(x) + m <- nrow(x) + idx <- 2 * (1:(m/2)) + # + f <- mfactor.apca(x, k = 1, refine = refine, check = FALSE) + f1 <- cbind(1, f$factors) + B <- backsolve(chol(crossprod(f1)), diag(2)) + eps <- x - f1 %*% crossprod(t(B)) %*% crossprod(f1, x) + s <- eps^2/(1 - 2/m - 1/n) + # + for(i in 2:max.k) { + f.old <- f + s.old <- s + f <- mfactor.apca(x, k = i, refine = refine, check = FALSE) + f1 <- cbind(1, f$factors) + B <- backsolve(chol(crossprod(f1)), diag(i + 1)) + eps <- x - f1 %*% crossprod(t(B)) %*% crossprod(f1, x) + s <- eps^2/(1 - (i + 1)/m - i/n) + delta <- rowMeans(s.old[idx - 1, , drop = FALSE]) - rowMeans( + s[idx, , drop = FALSE]) + if(t.test(delta, alternative = "greater")$p.value > sig) { + return(f.old) + } + } + return(f) +} + +# funciton of check + mfactor.check <- function(x) { + temp <- apply(x, 2, range) + if(any(abs(temp[2, ] - temp[1, ]) < .Machine$single.eps)) { + TRUE + } + else { + FALSE + } +} + + # function of bn + mfactor.bn <- function(x, max.k, refine = TRUE) { + + # Parameters: + # x : T x N return matrix + # max.k : maxinum number of factors to be considered + # Returns: + # k : the optimum number of factors + n <- ncol(x) + m <- nrow(x) + s <- vector("list", max.k) + for(i in 1:max.k) { + f <- cbind(1, mfactor.apca(x, k = i, refine = refine, check = + FALSE)$factors) + B <- backsolve(chol(crossprod(f)), diag(i + 1)) + eps <- x - f %*% crossprod(t(B)) %*% crossprod(f, x) + sigma <- colSums(eps^2)/(m - i - 1) + s[[i]] <- mean(sigma) + } + s <- unlist(s) + idx <- 1:max.k + Cp1 <- s[idx] + (idx * s[max.k] * (n + m))/(n * m) * log((n * m)/ + (n + m)) + Cp2 <- s[idx] + (idx * s[max.k] * (n + m))/(n * m) * log(min(n, m)) + if(order(Cp1)[1] != order(Cp2)[1]) { + warning("Cp1 and Cp2 did not yield same result. The smaller one is used." ) + } + k <- min(order(Cp1)[1], order(Cp2)[1]) + f <- mfactor.apca(x, k = k, refine = refine, check = FALSE) + return(f) + } + + + # function of pca + mfactor.pca <- function(x, k, check = FALSE, Omega = NULL) { + + if(check) { + if(mfactor.check(x)) { + warning("Some variables have identical observations.") + return(list(factors = NA, loadings = NA, k = NA)) + } + } + n <- ncol(x) + m <- nrow(x) + if(is.null(dimnames(x))) { + dimnames(x) <- list(1:m, paste("V", 1:n, sep = ".")) + } + x.names <- dimnames(x)[[2]] + xc <- t(t(x) - colMeans(x)) + if(is.null(Omega)) { + Omega <- crossprod(xc)/m + } + eigen.tmp <- eigen(Omega, symm = TRUE) + # compute loadings beta + B <- t(eigen.tmp$vectors[, 1:k, drop = FALSE]) + # compute estimated factors + f <- x %*% eigen.tmp$vectors[, 1:k, drop = FALSE] + tmp <- x - f %*% B + alpha <- colMeans(tmp) + # compute residuals + tmp <- t(t(tmp) - alpha) + r2 <- (1 - colSums(tmp^2)/colSums(xc^2)) + Omega <- t(B) %*% var(f) %*% B + diag(Omega) <- diag(Omega) + colSums(tmp^2)/(m - k - 1) + dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) + dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) + dimnames(Omega) <- list(x.names, x.names) + names(alpha) <- x.names + # create lm list for plot + reg.list = list() + for (i in x.names) { + reg.df = as.data.frame(cbind(x[,i],f)) + colnames(reg.df)[1] <- i + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.df) + reg.list[[i]] = fm.fit + } + ans <- list(factors = f, loadings = B, k = k, alpha = alpha, Omega = Omega, + r2 = r2, eigen = eigen.tmp$values, residuals=tmp, asset.ret = x, + asset.fit=reg.list) + + return(ans) + +} + + # funciont of apca + mfactor.apca <- function(x, k, refine = TRUE, check = FALSE, Omega = NULL) { + + if(check) { + if(mfactor.check(x)) { + warning("Some variables have identical observations.") + return(list(factors = NA, loadings = NA, k = NA)) + } + } + n <- ncol(x) + m <- nrow(x) + if(is.null(dimnames(x))) { + dimnames(x) <- list(1:m, paste("V", 1:n, sep = ".")) + } + x.names <- dimnames(x)[[2]] + xc <- t(t(x) - colMeans(x)) + if(is.null(Omega)) { + Omega <- crossprod(t(xc))/n + } + eig.tmp <- eigen(Omega, symmetric = TRUE) + f <- eig.tmp$vectors[, 1:k, drop = FALSE] + f1 <- cbind(1, f) + B <- backsolve(chol(crossprod(f1)), diag(k + 1)) + B <- crossprod(t(B)) %*% crossprod(f1, x) + sigma <- colSums((x - f1 %*% B)^2)/(m - k - 1) + if(refine) { + xs <- t(xc)/sqrt(sigma) + Omega <- crossprod(xs)/n + eig.tmp <- eigen(Omega, symm = TRUE) + f <- eig.tmp$vectors[, 1:k, drop = FALSE] + f1 <- cbind(1, f) + B <- backsolve(chol(crossprod(f1)), diag(k + 1)) + B <- crossprod(t(B)) %*% crossprod(f1, x) + sigma <- colSums((x - f1 %*% B)^2)/(m - k - 1) + } + alpha <- B[1, ] + B <- B[-1, , drop = FALSE] + Omega <- t(B) %*% var(f) %*% B + diag(Omega) <- diag(Omega) + sigma + dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) + dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) + names(alpha) <- x.names + res <- t(t(x) - alpha) - f %*% B + r2 <- (1 - colSums(res^2)/colSums(xc^2)) + ans <- list(factors = f, loadings = B, k = k, alpha = alpha, Omega = Omega, + r2 = r2, eigen = eig.tmp$values, residuals=res,asset.ret = x) + return(ans) +} + + call <- match.call() + pos <- rownames(x) + x <- as.matrix(x) + if(any(is.na(x))) { + if(na.rm) { + x <- na.omit(x) + } else { + stop("Missing values are not allowed if na.rm=F.") + } + } + # use PCA if T > N + if(ncol(x) < nrow(x)) { + if(is.character(k)) { + stop("k must be the number of factors for PCA.") + } + if(k >= ncol(x)) { + stop("Number of factors must be smaller than number of variables." + ) + } + ans <- mfactor.pca(x, k, check = check) + } else if(is.character(k)) { + ans <- mfactor.test(x, k, refine = refine, check = + check, max.k = max.k, sig = sig) + } else { # use aPCA if T <= N + if(k >= ncol(x)) { + stop("Number of factors must be smaller than number of variables." + ) + } + ans <- mfactor.apca(x, k, refine = refine, check = + check) + } + + # mimic function + f <- ans$factors + + if(is.data.frame(f)) { + f <- as.matrix(f) + } + + if(nrow(x) < ncol(x)) { + mimic <- ginv(x) %*% f + } else { + mimic <- qr.solve(x, f) + } + + mimic <- t(t(mimic)/colSums(mimic)) + dimnames(mimic)[[1]] <- dimnames(x)[[2]] + + ans$mimic <- mimic + ans$residVars.vec <- apply(ans$residuals,2,var) + ans$call <- call +class(ans) <- "StatFactorModel" + return(ans) +} + From noreply at r-forge.r-project.org Wed Jun 19 02:47:51 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 02:47:51 +0200 (CEST) Subject: [Returnanalytics-commits] r2371 - pkg/FactorAnalytics/R Message-ID: <20130619004752.003C218517B@r-forge.r-project.org> Author: chenyian Date: 2013-06-19 02:47:51 +0200 (Wed, 19 Jun 2013) New Revision: 2371 Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R Log: change input name Omega to ret.cov Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-19 00:45:16 UTC (rev 2370) +++ pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-19 00:47:51 UTC (rev 2371) @@ -75,31 +75,6 @@ fitStatisticalFactorModel <- function(x, k = 1, refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05, na.rm = FALSE){ -## Inputs: -## -## x : T x N assets returns data which is saved as data.frame class. -## k : numbers of factors if it is scalar or method of choosing optimal number of factors. -## "bn" represents Bai and Ng (2002) method and "ck" represents Connor and korajczyk -## (1993) method. Default is k = 1. -## refine : : TRUE By default, the APCA fit will use the Connor-Korajczyk refinement. -## check : check if some variables has identical values. Default is FALSE. -## max.k : scalar, select the number that maximum number of factors to be considered. -## sig : significant level than ck method uses. -## na.rm : if allow missing values. Default is FALSE. -## Outputs: -## -## factors : T x K the estimated factors. -## loadings : K x N the asset specific factor loadings beta_i estimated from regress the asset -## returns on factors. -## alpha : 1 x N the estimated intercepts alpha_i -## Omega : N x N asset returns sample variance covariance matrix. -## r2 : regression r square value from regress the asset returns on factors. -## k : the number of the facotrs. -## eigen : eigenvalues from the sample covariance matrix. -## residuals : T x N matrix of residuals from regression. -# residVars.vec : vector of residual variances -# mimic : N x K matrix of factor mimicking portfolio returns. - # load package require(MASS) @@ -208,7 +183,7 @@ # function of pca - mfactor.pca <- function(x, k, check = FALSE, Omega = NULL) { + mfactor.pca <- function(x, k, check = FALSE, ret.cov = NULL) { if(check) { if(mfactor.check(x)) { @@ -223,10 +198,10 @@ } x.names <- dimnames(x)[[2]] xc <- t(t(x) - colMeans(x)) - if(is.null(Omega)) { - Omega <- crossprod(xc)/m + if(is.null(ret.cov)) { + ret.cov <- crossprod(xc)/m } - eigen.tmp <- eigen(Omega, symm = TRUE) + eigen.tmp <- eigen(ret.cov, symm = TRUE) # compute loadings beta B <- t(eigen.tmp$vectors[, 1:k, drop = FALSE]) # compute estimated factors @@ -236,11 +211,11 @@ # compute residuals tmp <- t(t(tmp) - alpha) r2 <- (1 - colSums(tmp^2)/colSums(xc^2)) - Omega <- t(B) %*% var(f) %*% B - diag(Omega) <- diag(Omega) + colSums(tmp^2)/(m - k - 1) + ret.cov <- t(B) %*% var(f) %*% B + diag(ret.cov) <- diag(ret.cov) + colSums(tmp^2)/(m - k - 1) dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) - dimnames(Omega) <- list(x.names, x.names) + dimnames(ret.cov) <- list(x.names, x.names) names(alpha) <- x.names # create lm list for plot reg.list = list() @@ -251,7 +226,7 @@ fm.fit = lm(fm.formula, data=reg.df) reg.list[[i]] = fm.fit } - ans <- list(factors = f, loadings = B, k = k, alpha = alpha, Omega = Omega, + ans <- list(factors = f, loadings = B, k = k, alpha = alpha, ret.cov = ret.cov, r2 = r2, eigen = eigen.tmp$values, residuals=tmp, asset.ret = x, asset.fit=reg.list) @@ -260,7 +235,7 @@ } # funciont of apca - mfactor.apca <- function(x, k, refine = TRUE, check = FALSE, Omega = NULL) { + mfactor.apca <- function(x, k, refine = TRUE, check = FALSE, ret.cov = NULL) { if(check) { if(mfactor.check(x)) { @@ -275,10 +250,10 @@ } x.names <- dimnames(x)[[2]] xc <- t(t(x) - colMeans(x)) - if(is.null(Omega)) { - Omega <- crossprod(t(xc))/n + if(is.null(ret.cov)) { + ret.cov <- crossprod(t(xc))/n } - eig.tmp <- eigen(Omega, symmetric = TRUE) + eig.tmp <- eigen(ret.cov, symmetric = TRUE) f <- eig.tmp$vectors[, 1:k, drop = FALSE] f1 <- cbind(1, f) B <- backsolve(chol(crossprod(f1)), diag(k + 1)) @@ -286,8 +261,8 @@ sigma <- colSums((x - f1 %*% B)^2)/(m - k - 1) if(refine) { xs <- t(xc)/sqrt(sigma) - Omega <- crossprod(xs)/n - eig.tmp <- eigen(Omega, symm = TRUE) + ret.cov <- crossprod(xs)/n + eig.tmp <- eigen(ret.cov, symm = TRUE) f <- eig.tmp$vectors[, 1:k, drop = FALSE] f1 <- cbind(1, f) B <- backsolve(chol(crossprod(f1)), diag(k + 1)) @@ -296,14 +271,14 @@ } alpha <- B[1, ] B <- B[-1, , drop = FALSE] - Omega <- t(B) %*% var(f) %*% B - diag(Omega) <- diag(Omega) + sigma + ret.cov <- t(B) %*% var(f) %*% B + diag(ret.cov) <- diag(ret.cov) + sigma dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) names(alpha) <- x.names res <- t(t(x) - alpha) - f %*% B r2 <- (1 - colSums(res^2)/colSums(xc^2)) - ans <- list(factors = f, loadings = B, k = k, alpha = alpha, Omega = Omega, + ans <- list(factors = f, loadings = B, k = k, alpha = alpha, ret.cov = ret.cov, r2 = r2, eigen = eig.tmp$values, residuals=res,asset.ret = x) return(ans) } From noreply at r-forge.r-project.org Wed Jun 19 09:04:44 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 09:04:44 +0200 (CEST) Subject: [Returnanalytics-commits] r2372 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130619070444.EE822185296@r-forge.r-project.org> Author: pulkit Date: 2013-06-19 09:04:44 +0200 (Wed, 19 Jun 2013) New Revision: 2372 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R Log: Testing PSR Optimization Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-19 00:47:51 UTC (rev 2371) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-19 07:04:44 UTC (rev 2372) @@ -13,41 +13,44 @@ n = nrow(x) columnnames = colnames(x) - weights = matrix((rep(1,columns)/columns),ncol = 1) if(is.null(bounds)){ bounds = matrix(rep(c(0,1),columns),nrow = columns,byrow = TRUE) } + print(bounds) d1z = NULL #Optimization Function optimize<-function(){ + weights = rep(1,columns)/columns + z = 0 + iter = 0 mean = NULL for(column in 1:columns){ mean = c(mean,get_Moments(x[,column],1)) } while(TRUE){ if(iter == MaxIter) break - c(d1z_new,z_new) = get_d1zs(mean,weights) - if(z_new>z & checkBounds(weights)==TRUE){ - z = z_new - d1z = d1z_new + dZ = get_d1Zs(mean,weights) + if(dZ$z>z & checkBounds(weights)==TRUE){ + z = dZ$z + d1z = dZ$d1Z } iter = iter + 1 weights = stepSize(weights,d1z) if(is.null(weights)) return } - return + return(weights) } # To Check the bounds of the weights checkBounds<-function(weights){ + flag = TRUE + #for(i in 1:columns){ + # if(weights[i] < bounds[i,0]) flag = FALSE - flag = TRUE - for(i in 1:columns){ - if(weights[i,0]bounds[i,1]) flag = TRUE - } - return(flag) + # if(weights[i] > bounds[i,1]) flag = FALSE + #} + return(TRUE) } #Calculate the step size to change the weights @@ -55,36 +58,39 @@ if(length(which(d1Z==0)) == 0){ return(NULL) } - weights[which(d1Z==max(d1Z)),0] = weights[which(d1Z==max(d1Z)),0]+delta/max(d1Z) + weights[which(d1Z==max(d1Z))] = weights[which(d1Z==max(d1Z))]+delta/max(d1Z) weights = weights/sum(weights) return(weights) } #To get the first differentials - get_d1Zs(mean,w){ - d1Z = rep(0,columns) + get_d1Zs<-function(mean,weights){ + d1Z = NULL m = NULL x = Return.portfolio(x,weights) m[1] = get_Moments(x,1) - for(i in 1:4){ - m = c(m,get_Moments(x,i+1,m[0])) + for(i in 2:4){ + m = c(m,get_Moments(x,i+1,m[1])) } stats = get_Stats(m) - c(meanSR,sigmaSR) = get_SR(stats,n) + SR = get_SR(stats,n) + meanSR = SR$meanSR + sigmaSR = SR$sigmaSR for(i in 1:columns){ - d1Z[i] = get_d1Z(stats,m,meanSR,sigmaSR,mean,weights,index) + d1Z = c(d1Z,get_d1Z(stats,m,meanSR,sigmaSR,mean,weights,i)) } - return(d1Z,meanSR/sigmaSR) + dZ = list("d1Z"=d1Z,"z"=meanSR/sigmaSR) + return(dZ) } get_d1Z<-function(stats,m,meanSR,sigmaSR,mean,weights,index){ d1Mu = get_d1Mu(mean,index) - d1Sigma = get_d1Sigma(stats[1],mean,weights,index) - d1Skew = get_d1Skew(d1Sigma,stats[1],mean,weights,index,m[2]) - d1Kurt = get_d1Kurt(d1Sigma,stats[1],mean,weights,index,m[3]) - d1meanSR = (d1Mu*stats[1]-d1Sigma*stats[0])/stats[1]^2 - d1sigmaSR = (d1Kurt * meanSR^2+2*meanSR*d1meanSR*(stats[3]-1))/4 - d1sigmaSR = d1sigmaSR - d1Skew*meanSR+d1meanSR*stats[2] + d1Sigma = get_d1Sigma(stats[2],mean,weights,index) + d1Skew = get_d1Skew(d1Sigma,stats[2],mean,weights,index,m[2]) + d1Kurt = get_d1Kurt(d1Sigma,stats[2],mean,weights,index,m[3]) + d1meanSR = (d1Mu*stats[2]-d1Sigma*stats[1])/stats[2]^2 + d1sigmaSR = (d1Kurt * meanSR^2+2*meanSR*d1meanSR*(stats[4]-1))/4 + d1sigmaSR = d1sigmaSR - d1Skew*meanSR+d1meanSR*stats[3] d1sigmaSR = (d1sigmaSR/2)*sigmaSR*(n-1) d1Z = (d1meanSR*sigmaSR-d1sigmaSR*meanSR)/sigmaSR^2 return(d1Z) @@ -121,8 +127,8 @@ for(i in 1:n){ x1 = 0 x2 = (x[i,index]-mean[index])^dOrder - for(j in 1:column){ - x1 = x1 + weights[j,0]*(x[i,j]-mean[j]) + for(j in 1:columns){ + x1 = x1 + weights[j]*(x[i,j]-mean[j]) } sum = sum + x2*x1^(mOrder-dOrder) } @@ -131,13 +137,14 @@ # TO get meanSR and sigmaSR get_SR<-function(stats,n){ - meanSR = stats[0]/stats[1] - sigmaSR = ((1-meanSR*stats[2]+(meanSR^2)*(stats[3]-1)/4)/(n-1))^0.5 - return(meanSR,sigmaSR) + meanSR = stats[1]/stats[2] + sigmaSR = ((1-meanSR*stats[3]+(meanSR^2)*(stats[4]-1)/4)/(n-1))^0.5 + SR<-list("meanSR"=meanSR,"sigmaSR"=sigmaSR) + return(SR) } #To calculate the Stats(mu , sigma , skewness and kurtosis) get_Stats<-function(m){ - stats = c(m[0],m[1]^(0.5),(m[2]/m[1])^(3/2),(m[3]/m[1])^(0.5)) + stats = c(m[1],m[2]^(0.5),(m[3]/m[2])^(3/2),(m[4]/m[2])^(0.5)) return(stats) } # TO calculate the moments @@ -145,7 +152,7 @@ sum = 0 for(i in 1:n){ - sum = sum + (x[i]-mean)^order + sum = sum + (x[i]-mean)^order } moment = sum/n return(moment) From noreply at r-forge.r-project.org Wed Jun 19 13:30:47 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 13:30:47 +0200 (CEST) Subject: [Returnanalytics-commits] r2373 - pkg/Meucci Message-ID: <20130619113048.23C45184690@r-forge.r-project.org> Author: xavierv Date: 2013-06-19 13:30:47 +0200 (Wed, 19 Jun 2013) New Revision: 2373 Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE Log: - added S_DisplayLognormalCopulaPdf.R script and its required functions Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-19 07:04:44 UTC (rev 2372) +++ pkg/Meucci/DESCRIPTION 2013-06-19 11:30:47 UTC (rev 2373) @@ -28,7 +28,8 @@ R (>= 2.14.0), zoo, xts (>= 0.8), - matlab + matlab, + pracma Suggests: quadprog, mvtnorm, @@ -40,7 +41,6 @@ fOptions, moments, nloptr, - pracma, ggplot2, expm, latticeExtra @@ -62,3 +62,4 @@ 'RobustBayesianAllocation.R' 'LognormalMoments2Parameters.R' 'LognormalParameters2Statistics.R' + 'LognormalCopulaPdf.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-19 07:04:44 UTC (rev 2372) +++ pkg/Meucci/NAMESPACE 2013-06-19 11:30:47 UTC (rev 2373) @@ -11,6 +11,7 @@ export(hermitePolynomial) export(integrateSubIntervals) export(linreturn) +export(LognormalCopulaPdf) export(LognormalMoments2Parameters) export(LognormalParam2Statistics) export(MvnRnd) From noreply at r-forge.r-project.org Wed Jun 19 13:31:59 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 13:31:59 +0200 (CEST) Subject: [Returnanalytics-commits] r2374 - in pkg/Meucci: R demo Message-ID: <20130619113159.DF143184690@r-forge.r-project.org> Author: xavierv Date: 2013-06-19 13:31:59 +0200 (Wed, 19 Jun 2013) New Revision: 2374 Added: pkg/Meucci/R/LognormalCopulaPdf.R pkg/Meucci/demo/S_AnalyzeLognormalCorrelation.R pkg/Meucci/demo/S_AnalyzeNormalCorrelation.R pkg/Meucci/demo/S_BivariateSample.R pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R Log: - added S_DisplayLognormalCopulaPdf.R script and its required functions (fix) Added: pkg/Meucci/R/LognormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/LognormalCopulaPdf.R (rev 0) +++ pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-19 11:31:59 UTC (rev 2374) @@ -0,0 +1,35 @@ + +#' Computes the pdf of the copula of the lognormal distribution at the generic point u in the unit hypercube, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005. +#' +#' @param u : [vector] (J x 1) grades +#' @param Mu : [vector] (N x 1) location parameter +#' @param Sigma : [matrix] (N x N) scatter parameter +#' +#' @return F_U : [vector] (J x 1) PDF values +#' +#' @references +#' \url{http://} +#' See Meucci's script for "LognormalCopulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +LognormalCopulaPdf = function( u, Mu, Sigma ) +{ + N = length( u ); + s = sqrt( diag( Sigma )); + + x = qlnorm( u, Mu, s ); + + Numerator = ( 2 * pi ) ^ ( -N / 2 ) * ( (det ( Sigma ) ) ^ ( -0.5 ) ) / + prod(x) * exp( -.5 * t(log(x) - Mu) %*% mldivide( Sigma , ( log( x ) - Mu ), pinv=FALSE ) ); + + fs = dlnorm( x, Mu, s); + + Denominator = prod(fs); + + F_U = Numerator / Denominator; + + return ( F_U ); +} \ No newline at end of file Added: pkg/Meucci/demo/S_AnalyzeLognormalCorrelation.R =================================================================== --- pkg/Meucci/demo/S_AnalyzeLognormalCorrelation.R (rev 0) +++ pkg/Meucci/demo/S_AnalyzeLognormalCorrelation.R 2013-06-19 11:31:59 UTC (rev 2374) @@ -0,0 +1,42 @@ +#' This script considers a bivariate lognormal market and display the correlation and the condition number of the +#' covariance matrix, as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_AnalyzeLognormalCorrelation.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + + +########################################################################################################################################### +### Set input parameters +Mu = rbind( 0, 0 ) +s = c( 1, 1 ); +rhos = seq( -0.99, 0.99, 0.01 ); +nrhos = length( rhos ); +Cs = array( NaN, nrhos ); +CRs = array( NaN, nrhos ); + +########################################################################################################################################### +### Iterate of rho values and compute the correlation and condition number + +for ( n in 1 : nrhos ) +{ + rho = rhos[ n ] ; + Sigma = rbind( c(s[1]^2, rho * s[1] * s[2]), c(rho * s[1] * s[2], s[2]^2) ); + + S = LognormalParam2Statistics(Mu, Sigma); + + Lambda = eigen(S$Covariance); + + Cs[ n ] = S$Correlation[1, 2]; + CRs[ n ] = min(Lambda$values) / max(Lambda$values); +} + +########################################################################################################################################### +### Display the results + +par( mfrow = c( 2, 1) ); +plot( rhos, Cs, xlab = "r", ylab = "correlation", type ="l" ); +plot( rhos, CRs, xlab = "r", ylab = "condition ratio", type ="l" ); Added: pkg/Meucci/demo/S_AnalyzeNormalCorrelation.R =================================================================== --- pkg/Meucci/demo/S_AnalyzeNormalCorrelation.R (rev 0) +++ pkg/Meucci/demo/S_AnalyzeNormalCorrelation.R 2013-06-19 11:31:59 UTC (rev 2374) @@ -0,0 +1,46 @@ +#' This script considers a bivariate normal market and display the correlation and the condition number of the +#' covariance matrix, as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_AnalyzeNormalCorrelation.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################### +### Set input parameters + +Mu = rbind( 0, 0 ) +s = c( 1, 1 ); + +rhos = seq( -0.99, 0.99, 0.01 ); +nrhos = length( rhos ); + +Cs = array( NaN, nrhos ); +CRs = array( NaN, nrhos ); + + +################################################################################################################### +### Iterate of rho values and compute the correlation and condition numberfor ( n in 1 : nrhos ) + +for ( n in 1 : nrhos ) +{ + rho = rhos[ n ] ; + Sigma = rbind( c(s[1]^2, rho * s[1] * s[2]), c(rho * s[1] * s[2], s[2]^2) ); + + Covariance = Sigma; + Standard_Deviation = sqrt( diag( Covariance ) ); + Correlation = diag( 1 / Standard_Deviation ) %*% Covariance %*% diag( 1 / Standard_Deviation ); + + Lambda = eigen( Covariance ); + + Cs[n] = Correlation[ 1, 2 ]; + CRs[n] = min( Lambda$values ) / max( Lambda$values ); +} + +################################################################################################################### +### Display the results +par( mfrow = c( 2, 1) ); +plot( rhos, Cs, xlab = "r", ylab = "correlation", type ="l" ); +plot( rhos, CRs, xlab = "r", ylab = "condition ratio", type ="l" ); Added: pkg/Meucci/demo/S_BivariateSample.R =================================================================== --- pkg/Meucci/demo/S_BivariateSample.R (rev 0) +++ pkg/Meucci/demo/S_BivariateSample.R 2013-06-19 11:31:59 UTC (rev 2374) @@ -0,0 +1,117 @@ +#' This script generates draws from a bivariate distribution with different marginals, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_AnalyzeLognormalCorrelation.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +library(mvtnorm); +library(latticeExtra); + +################################################################################################################### +### input parameters + +nSim = 10000; + +# input for bivariate normal distribution + +NormCorr = -0.8; +NormStDev = rbind( 1, 3 ); # NOTE: this input plays no role in the final output +NormExpVal = rbind( -2, 5 ); # NOTE: this input plays no role in the final output + +# input for first marginal +nu_1 = 9; +sigmasq_1 = 2; + +mu_2 = 0; +sigmasq_2 = 0.04; + +# input for second marginal +nu_2 = 7; + +################################################################################################################### +### Generate draws from a bivariate normal distribution + +NormCorrMatrix = rbind( c( 1, NormCorr ), c( NormCorr, 1 )); +NormCovMatrix = diag( c( NormStDev ) ) %*% NormCorrMatrix %*% diag( c( NormStDev) ); + +Z = rvnorm( nSim, NormExpVal, NormCovMatrix ); + +Z_1 = Z[, 1]; +Z_2 = Z[, 2]; + +# display marginals: as expected, they are normal + +NumBins = round(10 * log(nSim)); +par( mfrow = c( 2, 1) ); +hist( Z_1, NumBins, xlab = "normal 1", ylab = "" ); +hist( Z_2, NumBins, xlab = "normal 2", ylab = "" ); + +plot( Z_1, Z_2, type = "p", xlab = "normal 1", ylab = "normal 2" ); + + +# 3d histograms + +NumBins2D = round(sqrt(100 * log(nSim))); +Z_3 = table( cut (Z_1, NumBins2D ), cut ( Z_2, cloud )); + +cloud( Z_3, panel.3d.cloud = panel.3dbars, scales = list( arrows = FALSE, just = "right" ), + xlab = "normal 1", ylab = "normal 2", zlab="", main = "pdf normal" ); + +################################################################################################################### +### Generate draws from the copula + +U_1 = pnorm( Z[ , 1 ], NormExpVal[ 1 ], NormStDev[ 1 ]); # grade 1 +U_2 = pnorm( Z[ , 2 ], NormExpVal[ 2 ], NormStDev[ 2 ]); # grade 2 +U = c( U_1, U_2 ); # joint realizations from the required copula + +# plot copula +NumBins = round(10 * log(nSim)); +par( mfrow = c( 2, 1) ); +hist( U_1, NumBins, xlab = "grade 1", ylab = "", main = "" ); +hist( U_2, NumBins, xlab = "grade 2", ylab = "", main = "" ); + +# joint sample +plot(U_1, U_2, xlab="grade 1", ylab="grade 2" ); + +# 3d histogram +NumBins2D = round(sqrt(100 * log(nSim))); +U_3 = table( cut (U_1, NumBins2D ), cut ( U_2, NumBins2D )); +cloud( U_3, panel.3d.cloud = panel.3dbars, scales = list( arrows = FALSE, just = "right" ), + xlab = "grade 1", ylab = "grade 2", zlab="", main = "pdf copula" ); + +################################################################################################################### +### Generate draws from the joint distribution +a = nu_1 / 2; +b = 2 * sigmasq_1; +X_1 = qgamma( U_1, a, b ); + +sigma_2 = sqrt( sigmasq_2 ); +X_2 = qlnorm( U_2, mu_2, sigma_2 ); + +X = C(X_1, X_2); # joint realizations from the required distribution + +################################################################################################################### +### Plot joint distribution +# marginals: as expected, the histograms (pdf's) do NOT change as NormCorr varies + +NumBins = round(10 * log(nSim)); + + +par( mfrow = c( 2, 1) ); +# Student t distribution +hist( X_1, NumBins, xlab = "gamma", ylab = "", main = "" ); +# chi-square distribution +hist( X_2, NumBins, xlab = "lognormal", ylab = "", main = "" ); + +# joint sample +plot(X_1, X_2, xlab="gamma", ylab="lognormal" ); + +# 3d histogram +NumBins2D = round(sqrt(100 * log(nSim))); +X_3 = table( cut (X_1, NumBins2D ), cut ( X_2, NumBins2D )); +cloud( X_3, panel.3d.cloud = panel.3dbars, scales = list( arrows = FALSE, just = "right" ), + xlab = "gamma", ylab = "lognormal", zlab="", main = "pdf joint distribution" ); \ No newline at end of file Added: pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R (rev 0) +++ pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R 2013-06-19 11:31:59 UTC (rev 2374) @@ -0,0 +1,43 @@ +#'This script displays the pdf of the copula of a lognormal distribution, as described +#' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_DisplayLognormalCoulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +############################################################################################################# +### Input parameters +Mu = rbind( 100.0, -30.0 ); +r = 0.8; +sigmas = rbind( 1000, 0.01 ); +nu = 100; +Sigma = diag( c( sigmas ) ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( c( sigmas ) ); + +############################################################################################################# +### Grid +GridSide1 = seq( 0.05, 0.95, 0.05 ); +GridSide2 = GridSide1; +nMesh = length(GridSide1); + +############################################################################################################# +### Compute pdf of copula + +f_U = matrix( NaN, nMesh, nMesh); + +for ( j in 1 : nMesh ) +{ + for ( k in 1 : nMesh) + { + u = c( GridSide1[ j ], GridSide2[ k ] ); + f_U[ j, k ] = LognormalCopulaPdf(u, Mu, Sigma); + } +} + +#mesh representation + +persp(GridSide1,GridSide2, f_U, + theta = 7 * 45, phi = 30, expand=0.6, col='lightblue', shade=0.75, ltheta=120, + ticktype='detailed', xlab = "U_1", ylab = "U_2", zlab = "copula pdf" ); From noreply at r-forge.r-project.org Wed Jun 19 13:36:30 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 13:36:30 +0200 (CEST) Subject: [Returnanalytics-commits] r2375 - pkg/Meucci/man Message-ID: <20130619113630.E1EF2184690@r-forge.r-project.org> Author: xavierv Date: 2013-06-19 13:36:30 +0200 (Wed, 19 Jun 2013) New Revision: 2375 Added: pkg/Meucci/man/LognormalCopulaPdf.Rd Log: - roxygenized documentation for last added files Added: pkg/Meucci/man/LognormalCopulaPdf.Rd =================================================================== --- pkg/Meucci/man/LognormalCopulaPdf.Rd (rev 0) +++ pkg/Meucci/man/LognormalCopulaPdf.Rd 2013-06-19 11:36:30 UTC (rev 2375) @@ -0,0 +1,31 @@ +\name{LognormalCopulaPdf} +\alias{LognormalCopulaPdf} +\title{Computes the pdf of the copula of the lognormal distribution at the generic point u in the unit hypercube, +as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005.} +\usage{ + LognormalCopulaPdf(u, Mu, Sigma) +} +\arguments{ + \item{u}{: [vector] (J x 1) grades} + + \item{Mu}{: [vector] (N x 1) location parameter} + + \item{Sigma}{: [matrix] (N x N) scatter parameter} +} +\value{ + F_U : [vector] (J x 1) PDF values +} +\description{ + Computes the pdf of the copula of the lognormal + distribution at the generic point u in the unit + hypercube, as described in A. Meucci, "Risk and Asset + Allocation", Springer, 2005. +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See Meucci's script for + "LognormalCopulaPdf.m" +} + From noreply at r-forge.r-project.org Wed Jun 19 14:07:17 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 14:07:17 +0200 (CEST) Subject: [Returnanalytics-commits] r2376 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130619120717.6AE9018572C@r-forge.r-project.org> Author: pulkit Date: 2013-06-19 14:07:16 +0200 (Wed, 19 Jun 2013) New Revision: 2376 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R Log: removed functions get_moments and get_stats Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-19 11:36:30 UTC (rev 2375) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-19 12:07:16 UTC (rev 2376) @@ -17,17 +17,16 @@ if(is.null(bounds)){ bounds = matrix(rep(c(0,1),columns),nrow = columns,byrow = TRUE) } - print(bounds) - d1z = NULL #Optimization Function optimize<-function(){ weights = rep(1,columns)/columns + d1z = 0 z = 0 iter = 0 mean = NULL for(column in 1:columns){ - mean = c(mean,get_Moments(x[,column],1)) + mean = c(mean,mean(x[,column])) } while(TRUE){ if(iter == MaxIter) break @@ -37,6 +36,7 @@ d1z = dZ$d1Z } iter = iter + 1 + print(iter) weights = stepSize(weights,d1z) if(is.null(weights)) return } @@ -55,8 +55,8 @@ #Calculate the step size to change the weights stepSize<-function(weights,d1Z){ - if(length(which(d1Z==0)) == 0){ - return(NULL) + if(length(which(d1Z!=0)) == 0){ + return(NULL) } weights[which(d1Z==max(d1Z))] = weights[which(d1Z==max(d1Z))]+delta/max(d1Z) weights = weights/sum(weights) @@ -67,12 +67,13 @@ get_d1Zs<-function(mean,weights){ d1Z = NULL m = NULL - x = Return.portfolio(x,weights) - m[1] = get_Moments(x,1) - for(i in 2:4){ - m = c(m,get_Moments(x,i+1,m[1])) - } - stats = get_Stats(m) + x_portfolio = Return.portfolio(x,weights) + mu = mean(x_portfolio) + sd = StdDev(x_portfolio) + sk = skewness(x_portfolio) + kr = kurtosis(x_portfolio) + stats = c(mu,sd,sk,kr) + m = c(stats[1],stats[2]^2,stats[3]*(stats[2]^3),stats[4]*(stats[2]^2)) SR = get_SR(stats,n) meanSR = SR$meanSR sigmaSR = SR$sigmaSR @@ -80,6 +81,7 @@ d1Z = c(d1Z,get_d1Z(stats,m,meanSR,sigmaSR,mean,weights,i)) } dZ = list("d1Z"=d1Z,"z"=meanSR/sigmaSR) + return(dZ) } @@ -104,31 +106,32 @@ return(get_dnMoments(mean,weights,2,1,index)/(2*sigma)) } - get_d1Skew<-function(d1Sigma,sigma,mean,w,index,m3){ + get_d1Skew<-function(d1Sigma,sigma,mean,weights,index,m3){ d1Skew = get_dnMoments(mean,weights,3,1,index)*sigma^3 d1Skew = d1Skew - 3*sigma^2*d1Sigma*m3 d1Skew = d1Skew/sigma^6 return(d1Skew) } - get_d1Kurt<-function(d1Sigma,sigma,mean,w,index,m4){ - d1Kurt = get_dnMoments(mean,w,4,1,index)*sigma^4 + get_d1Kurt<-function(d1Sigma,sigma,mean,weights,index,m4){ + d1Kurt = get_dnMoments(mean,weights,4,1,index)*sigma^4 d1Kurt = d1Kurt - 4*sigma^3*d1Sigma*m4 d1Kurt = d1Kurt/sigma^8 return(d1Kurt) } - get_dnMoments<-function(mean,w,mOrder,dOrder,index){ + get_dnMoments<-function(mean,weights,mOrder,dOrder,index){ sum = 0 x0 = 1 for(i in 1:dOrder){ x0 = x0*(mOrder-i) } + x_mat = as.matrix(na.omit(x)) for(i in 1:n){ x1 = 0 - x2 = (x[i,index]-mean[index])^dOrder + x2 = (x_mat[i,index]-mean[index])^dOrder for(j in 1:columns){ - x1 = x1 + weights[j]*(x[i,j]-mean[j]) + x1 = x1 + weights[j]*(x_mat[i,j]-mean[j]) } sum = sum + x2*x1^(mOrder-dOrder) } @@ -142,22 +145,7 @@ SR<-list("meanSR"=meanSR,"sigmaSR"=sigmaSR) return(SR) } - #To calculate the Stats(mu , sigma , skewness and kurtosis) - get_Stats<-function(m){ - stats = c(m[1],m[2]^(0.5),(m[3]/m[2])^(3/2),(m[4]/m[2])^(0.5)) - return(stats) - } - # TO calculate the moments - get_Moments<-function(x,order,mean = 0){ - sum = 0 - for(i in 1:n){ - sum = sum + (x[i]-mean)^order - } - moment = sum/n - return(moment) - } - optimize() return(weights) } @@ -165,3 +153,4 @@ + From noreply at r-forge.r-project.org Wed Jun 19 18:23:02 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 18:23:02 +0200 (CEST) Subject: [Returnanalytics-commits] r2377 - in pkg/Meucci: . R demo man Message-ID: <20130619162303.23635185464@r-forge.r-project.org> Author: xavierv Date: 2013-06-19 18:23:02 +0200 (Wed, 19 Jun 2013) New Revision: 2377 Added: pkg/Meucci/R/NormalCopulaPdf.R pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R pkg/Meucci/man/NormalCopulaPdf.Rd Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE pkg/Meucci/R/LognormalCopulaPdf.R pkg/Meucci/R/LognormalMoments2Parameters.R pkg/Meucci/demo/S_BivariateSample.R pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R pkg/Meucci/man/LognormalMoments2Parameters.Rd Log: - fixed error and added demos for displaying Normal distribution Copula Cdf and pdf Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-19 12:07:16 UTC (rev 2376) +++ pkg/Meucci/DESCRIPTION 2013-06-19 16:23:02 UTC (rev 2377) @@ -63,3 +63,4 @@ 'LognormalMoments2Parameters.R' 'LognormalParameters2Statistics.R' 'LognormalCopulaPdf.R' + 'NormalCopulaPdf.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-19 12:07:16 UTC (rev 2376) +++ pkg/Meucci/NAMESPACE 2013-06-19 16:23:02 UTC (rev 2377) @@ -16,6 +16,7 @@ export(LognormalParam2Statistics) export(MvnRnd) export(NoisyObservations) +export(NormalCopulaPdf) export(normalizeProb) export(PanicCopula) export(PartialConfidencePosterior) Modified: pkg/Meucci/R/LognormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-19 12:07:16 UTC (rev 2376) +++ pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-19 16:23:02 UTC (rev 2377) @@ -23,7 +23,7 @@ x = qlnorm( u, Mu, s ); Numerator = ( 2 * pi ) ^ ( -N / 2 ) * ( (det ( Sigma ) ) ^ ( -0.5 ) ) / - prod(x) * exp( -.5 * t(log(x) - Mu) %*% mldivide( Sigma , ( log( x ) - Mu ), pinv=FALSE ) ); + prod(x) * exp( -0.5 * t(log(x) - Mu) %*% mldivide( Sigma , ( log( x ) - Mu ), pinv=FALSE ) ); fs = dlnorm( x, Mu, s); Modified: pkg/Meucci/R/LognormalMoments2Parameters.R =================================================================== --- pkg/Meucci/R/LognormalMoments2Parameters.R 2013-06-19 12:07:16 UTC (rev 2376) +++ pkg/Meucci/R/LognormalMoments2Parameters.R 2013-06-19 16:23:02 UTC (rev 2377) @@ -1,5 +1,5 @@ #' Compute the mean and standard deviation of a lognormal distribution from its parameters, as described in -#' A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1. +#' A. Meucci, "Risk and Asset Allocation", Springer, 2005. #' #' @param e : [scalar] expected value of the lognormal distribution #' @param v : [scalar] variance of the lognormal distribution @@ -7,11 +7,11 @@ #' @return mu : [scalar] expected value of the normal distribution #' @return sig2 : [scalar] variance of the normal distribution #' -#' @note Inverts the formulas (1.98)-(1.99) in Risk and Asset Allocation", Springer, 2005. +#' @note Inverts the formulas (1.98)-(1.99) in "Risk and Asset Allocation", Springer, 2005. #' #' @references #' \url{http://} -#' See Meucci's script for "LognormalMoments2Parameters" +#' See Meucci's script for "LognormalMoments2Parameters.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export Added: pkg/Meucci/R/NormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/NormalCopulaPdf.R (rev 0) +++ pkg/Meucci/R/NormalCopulaPdf.R 2013-06-19 16:23:02 UTC (rev 2377) @@ -0,0 +1,33 @@ +#' Computes the pdf of the copula of the normal distribution at the generic point u in the unit hypercube, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005. +#' +#' @param u : [vector] (J x 1) grade +#' @param Mu : [vector] (N x 1) mean +#' @param Sigma : [matrix] (N x N) covariance +#' +#' @return F_U : [vector] (J x 1) PDF values +#' +#' @references +#' \url{http://} +#' See Meucci's script for "LognormalCopulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +NormalCopulaPdf = function( u, Mu, Sigma ) +{ + N = length( u ); + s = sqrt( diag( Sigma )); + + x = qnorm( u, Mu, s ); + + Numerator = ( 2 * pi ) ^ ( -N / 2 ) * ( (det ( Sigma ) ) ^ ( -0.5 ) ) * exp( -0.5 * t(x - Mu) %*% mldivide( Sigma , ( x - Mu ), pinv = FALSE ) ); + + fs = dnorm( x, Mu, s); + + Denominator = prod(fs); + + F_U = Numerator / Denominator; + + return ( F_U ); +} \ No newline at end of file Modified: pkg/Meucci/demo/S_BivariateSample.R =================================================================== --- pkg/Meucci/demo/S_BivariateSample.R 2013-06-19 12:07:16 UTC (rev 2376) +++ pkg/Meucci/demo/S_BivariateSample.R 2013-06-19 16:23:02 UTC (rev 2377) @@ -38,7 +38,7 @@ NormCorrMatrix = rbind( c( 1, NormCorr ), c( NormCorr, 1 )); NormCovMatrix = diag( c( NormStDev ) ) %*% NormCorrMatrix %*% diag( c( NormStDev) ); -Z = rvnorm( nSim, NormExpVal, NormCovMatrix ); +Z = rmvnorm( nSim, NormExpVal, NormCovMatrix ); Z_1 = Z[, 1]; Z_2 = Z[, 2]; Modified: pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R 2013-06-19 12:07:16 UTC (rev 2376) +++ pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R 2013-06-19 16:23:02 UTC (rev 2377) @@ -3,7 +3,7 @@ #' #' @references #' \url{http://} -#' See Meucci's script for "S_DisplayLognormalCoulaPdf.m" +#' See Meucci's script for "S_DisplayLognormalCopulaPdf.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export @@ -38,6 +38,6 @@ #mesh representation -persp(GridSide1,GridSide2, f_U, +persp( GridSide1, GridSide2, f_U, theta = 7 * 45, phi = 30, expand=0.6, col='lightblue', shade=0.75, ltheta=120, ticktype='detailed', xlab = "U_1", ylab = "U_2", zlab = "copula pdf" ); Added: pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R (rev 0) +++ pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R 2013-06-19 16:23:02 UTC (rev 2377) @@ -0,0 +1,43 @@ +#'This script displays the cdf of the copula of a normal distribution, as described +#' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_DisplayNormalCopulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +############################################################################################################# +### Input parameters +Mu = c( 0, 0 ); +r = -0.999; +sigmas = c(1, 1 ); +Sigma = diag( c( sigmas ) ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( c( sigmas ) ); + +############################################################################################################# +### Grid +GridSide1 = seq( 0.05, 0.95, 0.05 ); +GridSide2 = GridSide1; +nMesh = length(GridSide1); + +############################################################################################################# +### Compute cdf of copula + +F_U = matrix( NaN, nMesh, nMesh); + +for ( j in 1 : nMesh ) +{ + for ( k in 1 : nMesh) + { + u = c( GridSide1[ j ], GridSide2[ k ] ); + x= qnorm( u, Mu, sigmas ); + F_U[ j, k ] = pmvnorm( lower = -Inf, upper = x, mean = Mu, corr = Sigma ); + } +} + +#mesh representation + +persp( GridSide1, GridSide2, F_U, + theta = 7 * 45, phi = 30, expand=0.6, col='lightblue', shade=0.75, ltheta=120, + ticktype='detailed', xlab = "U_1", ylab = "U_2", zlab = "copula cdf" ); \ No newline at end of file Added: pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R (rev 0) +++ pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R 2013-06-19 16:23:02 UTC (rev 2377) @@ -0,0 +1,42 @@ +#'This script displays the pdf of the copula of a normal distribution, as described +#' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_DisplayNormalCopulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +############################################################################################################# +### input parameters +Mu = rbind( 1, -1 ); +r = 0.7; +sigmas = rbind( 1, 1 ); +Sigma = diag( c( sigmas ) ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( c( sigmas ) ); + +############################################################################################################# +### Grid +GridSide1 = seq( 0.05, 0.95, 0.05 ); +GridSide2 = GridSide1; +nMesh = length(GridSide1); + +############################################################################################################# +### Compute pdf of copula + +f_U = matrix( NaN, nMesh, nMesh); + +for ( j in 1 : nMesh ) +{ + for ( k in 1 : nMesh) + { + u = c( GridSide1[ j ], GridSide2[ k ] ); + f_U[ j, k ] = NormalCopulaPdf(u, Mu, Sigma); + } +} + +#mesh representation + +persp( GridSide1, GridSide2, f_U, + theta = 7 * 45, phi = 30, expand=0.6, col='lightblue', shade=0.75, ltheta=120, + ticktype='detailed', xlab = "U_1", ylab = "U_2", zlab = "copula pdf" ); Modified: pkg/Meucci/man/LognormalMoments2Parameters.Rd =================================================================== --- pkg/Meucci/man/LognormalMoments2Parameters.Rd 2013-06-19 12:07:16 UTC (rev 2376) +++ pkg/Meucci/man/LognormalMoments2Parameters.Rd 2013-06-19 16:23:02 UTC (rev 2377) @@ -1,7 +1,7 @@ \name{LognormalMoments2Parameters} \alias{LognormalMoments2Parameters} \title{Compute the mean and standard deviation of a lognormal distribution from its parameters, as described in -A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 1.} +A. Meucci, "Risk and Asset Allocation", Springer, 2005.} \usage{ LognormalMoments2Parameters(e, v) } @@ -20,11 +20,10 @@ \description{ Compute the mean and standard deviation of a lognormal distribution from its parameters, as described in A. - Meucci, "Risk and Asset Allocation", Springer, 2005, - Chapter 1. + Meucci, "Risk and Asset Allocation", Springer, 2005. } \note{ - Inverts the formulas (1.98)-(1.99) in Risk and Asset + Inverts the formulas (1.98)-(1.99) in "Risk and Asset Allocation", Springer, 2005. } \author{ @@ -32,6 +31,6 @@ } \references{ \url{http://} See Meucci's script for - "LognormalMoments2Parameters" + "LognormalMoments2Parameters.m" } Added: pkg/Meucci/man/NormalCopulaPdf.Rd =================================================================== --- pkg/Meucci/man/NormalCopulaPdf.Rd (rev 0) +++ pkg/Meucci/man/NormalCopulaPdf.Rd 2013-06-19 16:23:02 UTC (rev 2377) @@ -0,0 +1,31 @@ +\name{NormalCopulaPdf} +\alias{NormalCopulaPdf} +\title{Computes the pdf of the copula of the normal distribution at the generic point u in the unit hypercube, +as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005.} +\usage{ + NormalCopulaPdf(u, Mu, Sigma) +} +\arguments{ + \item{u}{: [vector] (J x 1) grade} + + \item{Mu}{: [vector] (N x 1) mean} + + \item{Sigma}{: [matrix] (N x N) covariance} +} +\value{ + F_U : [vector] (J x 1) PDF values +} +\description{ + Computes the pdf of the copula of the normal distribution + at the generic point u in the unit hypercube, as + described in A. Meucci, "Risk and Asset Allocation", + Springer, 2005. +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See Meucci's script for + "LognormalCopulaPdf.m" +} + From noreply at r-forge.r-project.org Wed Jun 19 19:20:56 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 19:20:56 +0200 (CEST) Subject: [Returnanalytics-commits] r2378 - in pkg/Meucci: . R demo Message-ID: <20130619172056.4880018577E@r-forge.r-project.org> Author: xavierv Date: 2013-06-19 19:20:55 +0200 (Wed, 19 Jun 2013) New Revision: 2378 Added: pkg/Meucci/R/StudentTCopulaPdf.R pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE pkg/Meucci/R/LognormalCopulaPdf.R pkg/Meucci/R/NormalCopulaPdf.R Log: - added demos for displaying Student T distribution Copula pdf Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-19 16:23:02 UTC (rev 2377) +++ pkg/Meucci/DESCRIPTION 2013-06-19 17:20:55 UTC (rev 2378) @@ -64,3 +64,4 @@ 'LognormalParameters2Statistics.R' 'LognormalCopulaPdf.R' 'NormalCopulaPdf.R' + 'StudentTCopulaPdf.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-19 16:23:02 UTC (rev 2377) +++ pkg/Meucci/NAMESPACE 2013-06-19 17:20:55 UTC (rev 2378) @@ -11,12 +11,10 @@ export(hermitePolynomial) export(integrateSubIntervals) export(linreturn) -export(LognormalCopulaPdf) export(LognormalMoments2Parameters) export(LognormalParam2Statistics) export(MvnRnd) export(NoisyObservations) -export(NormalCopulaPdf) export(normalizeProb) export(PanicCopula) export(PartialConfidencePosterior) Modified: pkg/Meucci/R/LognormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-19 16:23:02 UTC (rev 2377) +++ pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-19 17:20:55 UTC (rev 2378) @@ -15,6 +15,8 @@ #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export +library(pracma); + LognormalCopulaPdf = function( u, Mu, Sigma ) { N = length( u ); Modified: pkg/Meucci/R/NormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/NormalCopulaPdf.R 2013-06-19 16:23:02 UTC (rev 2377) +++ pkg/Meucci/R/NormalCopulaPdf.R 2013-06-19 17:20:55 UTC (rev 2378) @@ -5,7 +5,7 @@ #' @param Mu : [vector] (N x 1) mean #' @param Sigma : [matrix] (N x N) covariance #' -#' @return F_U : [vector] (J x 1) PDF values +#' @return F_U : [vector] (J x 1) PDF values #' #' @references #' \url{http://} @@ -14,6 +14,8 @@ #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export +library(pracma); + NormalCopulaPdf = function( u, Mu, Sigma ) { N = length( u ); @@ -21,7 +23,7 @@ x = qnorm( u, Mu, s ); - Numerator = ( 2 * pi ) ^ ( -N / 2 ) * ( (det ( Sigma ) ) ^ ( -0.5 ) ) * exp( -0.5 * t(x - Mu) %*% mldivide( Sigma , ( x - Mu ), pinv = FALSE ) ); + Numerator = ( 2 * pi ) ^ ( -N / 2 ) * ( (det ( Sigma ) ) ^ ( -0.5 ) ) * exp( -0.5 * t(x - Mu) %*% mldivide( Sigma , ( x - Mu ))); fs = dnorm( x, Mu, s); Added: pkg/Meucci/R/StudentTCopulaPdf.R =================================================================== --- pkg/Meucci/R/StudentTCopulaPdf.R (rev 0) +++ pkg/Meucci/R/StudentTCopulaPdf.R 2013-06-19 17:20:55 UTC (rev 2378) @@ -0,0 +1,41 @@ +#' Pdf of the copula of the Student t distribution at the generic point u in the unit hypercube, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005. +#' +#' @param u : [vector] (J x 1) grade +#' @param nu : [numerical] degrees of freedom +#' @param Mu : [vector] (N x 1) mean +#' @param Sigma : [matrix] (N x N) scatter +#' +#' +#' @return F_U : [vector] (J x 1) PDF values +#' +#' @references +#' \url{http://} +#' See Meucci's script for "StudentTCopulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +library(pracma); + + +StudentTCopulaPdf = function( u, nu, Mu, Sigma ) +{ + N = length( u ); + s = sqrt( diag( Sigma )); + + x = Mu + s * qt( u, nu); + + z2 = t(x - Mu) %*% mldivide( Sigma, (x - Mu)); #z2 = t(x - Mu) %*% inv(Sigma) * (x-Mu); + K = ( nu * pi )^( -N / 2 ) * gamma( ( nu + N ) / 2 ) / gamma( nu / 2 ) * ( ( det( Sigma ) )^( -0.5 )); + Numerator = K * (1 + z2 / nu)^(-(nu + N) / 2); + + + fs = dt((x - Mu) / s , nu); + + Denominator = prod(fs); + + F_U = Numerator / Denominator; + + return ( F_U ); +} Added: pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R (rev 0) +++ pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R 2013-06-19 17:20:55 UTC (rev 2378) @@ -0,0 +1,45 @@ +#'This script displays the pdf of the copula of a Student t distribution, as described +#' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_DisplayNormalCopulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +############################################################################################################# +### input parameters + +Mu = rbind( 0, 0 ); +r = 0.5; +sigmas = rbind( 1, 2 ); +Sigma = diag( c( sigmas ) ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( c( sigmas ) ); +#nu = 1; Sigma(1,2) = 0; Sigma(2,1) = 0; +nu = 200; + +############################################################################################################# +### Grid +GridSide1 = seq( 0.05, 0.95, 0.05 ); +GridSide2 = GridSide1; +nMesh = length(GridSide1); + +############################################################################################################# +### Compute pdf of copula + +f_U = matrix( NaN, nMesh, nMesh); + +for ( j in 1 : nMesh ) +{ + for ( k in 1 : nMesh) + { + u = c( GridSide1[ j ], GridSide2[ k ] ); + f_U[ j, k ] = StudentTCopulaPdf( u, nu, Mu, Sigma ); + } +} + +#mesh representation + +persp( GridSide1, GridSide2, f_U, + theta = 7 * 45, phi = 30, expand=0.6, col='lightblue', shade=0.75, ltheta=120, + ticktype='detailed', xlab = "U_1", ylab = "U_2", zlab = "copula pdf" ); From noreply at r-forge.r-project.org Wed Jun 19 19:29:27 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 19:29:27 +0200 (CEST) Subject: [Returnanalytics-commits] r2379 - in pkg/Meucci: . R man Message-ID: <20130619172927.8841B18589E@r-forge.r-project.org> Author: xavierv Date: 2013-06-19 19:29:26 +0200 (Wed, 19 Jun 2013) New Revision: 2379 Added: pkg/Meucci/man/StudentTCopulaPdf.Rd Modified: pkg/Meucci/NAMESPACE pkg/Meucci/R/LognormalCopulaPdf.R pkg/Meucci/R/NormalCopulaPdf.R pkg/Meucci/R/StudentTCopulaPdf.R Log: - added roxygenized documentation for the StudentTCopulaPdf function Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-19 17:20:55 UTC (rev 2378) +++ pkg/Meucci/NAMESPACE 2013-06-19 17:29:26 UTC (rev 2379) @@ -11,10 +11,12 @@ export(hermitePolynomial) export(integrateSubIntervals) export(linreturn) +export(LognormalCopulaPdf) export(LognormalMoments2Parameters) export(LognormalParam2Statistics) export(MvnRnd) export(NoisyObservations) +export(NormalCopulaPdf) export(normalizeProb) export(PanicCopula) export(PartialConfidencePosterior) @@ -25,6 +27,7 @@ export(RIEfficientFrontier) export(robustBayesianPortfolioOptimization) export(std) +export(StudentTCopulaPdf) export(subIntervals) export(SummStats) export(Tweak) Modified: pkg/Meucci/R/LognormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-19 17:20:55 UTC (rev 2378) +++ pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-19 17:29:26 UTC (rev 2379) @@ -1,3 +1,4 @@ +library(pracma); #' Computes the pdf of the copula of the lognormal distribution at the generic point u in the unit hypercube, #' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005. @@ -6,7 +7,7 @@ #' @param Mu : [vector] (N x 1) location parameter #' @param Sigma : [matrix] (N x N) scatter parameter #' -#' @return F_U : [vector] (J x 1) PDF values +#' @return F_U : [vector] (J x 1) PDF values #' #' @references #' \url{http://} @@ -15,10 +16,8 @@ #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export -library(pracma); - LognormalCopulaPdf = function( u, Mu, Sigma ) -{ +{ N = length( u ); s = sqrt( diag( Sigma )); Modified: pkg/Meucci/R/NormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/NormalCopulaPdf.R 2013-06-19 17:20:55 UTC (rev 2378) +++ pkg/Meucci/R/NormalCopulaPdf.R 2013-06-19 17:29:26 UTC (rev 2379) @@ -1,3 +1,5 @@ +library(pracma); + #' Computes the pdf of the copula of the normal distribution at the generic point u in the unit hypercube, #' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005. #' @@ -14,8 +16,6 @@ #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export -library(pracma); - NormalCopulaPdf = function( u, Mu, Sigma ) { N = length( u ); Modified: pkg/Meucci/R/StudentTCopulaPdf.R =================================================================== --- pkg/Meucci/R/StudentTCopulaPdf.R 2013-06-19 17:20:55 UTC (rev 2378) +++ pkg/Meucci/R/StudentTCopulaPdf.R 2013-06-19 17:29:26 UTC (rev 2379) @@ -1,3 +1,5 @@ +library(pracma); + #' Pdf of the copula of the Student t distribution at the generic point u in the unit hypercube, #' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005. #' @@ -16,9 +18,6 @@ #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export -library(pracma); - - StudentTCopulaPdf = function( u, nu, Mu, Sigma ) { N = length( u ); Added: pkg/Meucci/man/StudentTCopulaPdf.Rd =================================================================== --- pkg/Meucci/man/StudentTCopulaPdf.Rd (rev 0) +++ pkg/Meucci/man/StudentTCopulaPdf.Rd 2013-06-19 17:29:26 UTC (rev 2379) @@ -0,0 +1,32 @@ +\name{StudentTCopulaPdf} +\alias{StudentTCopulaPdf} +\title{Pdf of the copula of the Student t distribution at the generic point u in the unit hypercube, +as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005.} +\usage{ + StudentTCopulaPdf(u, nu, Mu, Sigma) +} +\arguments{ + \item{u}{: [vector] (J x 1) grade} + + \item{nu}{: [numerical] degrees of freedom} + + \item{Mu}{: [vector] (N x 1) mean} + + \item{Sigma}{: [matrix] (N x N) scatter} +} +\value{ + F_U : [vector] (J x 1) PDF values +} +\description{ + Pdf of the copula of the Student t distribution at the + generic point u in the unit hypercube, as described in A. + Meucci, "Risk and Asset Allocation", Springer, 2005. +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See Meucci's script for + "StudentTCopulaPdf.m" +} + From noreply at r-forge.r-project.org Wed Jun 19 19:29:53 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 19:29:53 +0200 (CEST) Subject: [Returnanalytics-commits] r2380 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130619172953.2781018589E@r-forge.r-project.org> Author: pulkit Date: 2013-06-19 19:29:52 +0200 (Wed, 19 Jun 2013) New Revision: 2380 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R Log: Updated PSR Optimization Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-19 17:29:26 UTC (rev 2379) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-19 17:29:52 UTC (rev 2380) @@ -6,7 +6,7 @@ #'@param MaxIter The Maximum number of iterations #'@param delta The value of delta Z -PsrPortfolio<-function(R,bounds=NULL,MaxIter = 1000,delta = 0.05){ +PsrPortfolio<-function(R,bounds=NULL,MaxIter = 1000,delta = 0.005){ x = checkData(R) columns = ncol(x) @@ -28,16 +28,18 @@ for(column in 1:columns){ mean = c(mean,mean(x[,column])) } - while(TRUE){ + flag = TRUE + while(flag){ if(iter == MaxIter) break dZ = get_d1Zs(mean,weights) - if(dZ$z>z & checkBounds(weights)==TRUE){ - z = dZ$z - d1z = dZ$d1Z - } + if(dZ$z Author: shubhanm Date: 2013-06-19 19:51:09 +0200 (Wed, 19 Jun 2013) New Revision: 2381 Added: pkg/PerformanceAnalytics/sandbox/Shubhankit/ pkg/PerformanceAnalytics/sandbox/Shubhankit/LoSharpeRatio.R Log: Added: pkg/PerformanceAnalytics/sandbox/Shubhankit/LoSharpeRatio.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/Shubhankit/LoSharpeRatio.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/Shubhankit/LoSharpeRatio.R 2013-06-19 17:51:09 UTC (rev 2381) @@ -0,0 +1,71 @@ +LoSharpeRatio<- + function(R = NULL,Rf=0.,q = 0., ...) + { +columns = 1 +columnnames = NULL +#Error handling if R is not NULL +if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + n = nrow(x) + + if(q==0){ + stop("AutoCorrelation Coefficient Should be greater than 0") + + } + else{ + # A potfolio is constructed by applying the weights + + count = q + x=edhec + columns = ncol(x) + columnnames = colnames(x) + + # Calculate AutoCorrelation Coefficient + for(column in 1:columns) { # for each asset passed in as R + y = checkData(edhec[,column], method="vector", na.rm = TRUE) + + acflag6 = acf(y,plot=FALSE,lag.max=6)[[1]][2:7] + LjungBox = Box.test(y,type="Ljung-Box",lag=q) + values = c(acflag6, LjungBox$p.value) + # values = base::round(as.numeric(values),digits) + + if(column == 1) { + result.df = data.frame(Value = values) + colnames(result.df) = columnnames[column] + } + else { + nextcol = data.frame(Value = values) + colnames(nextcol) = columnnames[column] + result.df = cbind(result.df, nextcol) + } + } + # Calculate Neta's + for(column in 1:columns) { + sum = 0 + z = checkData(edhec[,column], method="vector", na.rm = TRUE) + for(q in 1:(q-1) ) + { + sum = sum + (count-q)*result.df[column,q] + + } + + netaq = count/(sqrt(count+2*sum)) + if(column == 1) { + netacol = data.frame(Value = netaq) + colnames(netacol) = columnnames[column] + } + else { + nextcol = data.frame(Value = netaq) + colnames(nextcol) = columnnames[column] + netacol = cbind(netacol, nextcol) + } + + } + shrp = SharpeRatio(x, Rf, FUN="VaR" , method="gaussian") + results = Shrp*netacol + colnames(results) = colnames(x) + return(results) + } + } +} \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 19 19:53:12 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 19 Jun 2013 19:53:12 +0200 (CEST) Subject: [Returnanalytics-commits] r2382 - pkg/PerformanceAnalytics/sandbox/Shubhankit Message-ID: <20130619175312.EE816185349@r-forge.r-project.org> Author: shubhanm Date: 2013-06-19 19:53:12 +0200 (Wed, 19 Jun 2013) New Revision: 2382 Added: pkg/PerformanceAnalytics/sandbox/Shubhankit/GLMSmoothIndex.R Log: Added: pkg/PerformanceAnalytics/sandbox/Shubhankit/GLMSmoothIndex.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/Shubhankit/GLMSmoothIndex.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/Shubhankit/GLMSmoothIndex.R 2013-06-19 17:53:12 UTC (rev 2382) @@ -0,0 +1,36 @@ +GLMSmoothIndex<- + function(R = NULL, ...) + { + columns = 1 + columnnames = NULL + #Error handling if R is not NULL + if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + n = nrow(x) + count = q + x=edhec + columns = ncol(x) + columnnames = colnames(x) + + # Calculate AutoCorrelation Coefficient + for(column in 1:columns) { # for each asset passed in as R + y = checkData(edhec[,column], method="vector", na.rm = TRUE) + + acflag6 = acf(y,plot=FALSE,lag.max=6)[[1]][2:7] + values = sum(acflag6*acflag6)/(sum(acflag6)*sum(acflag6)) + + if(column == 1) { + result.df = data.frame(Value = values) + colnames(result.df) = columnnames[column] + } + else { + nextcol = data.frame(Value = values) + colnames(nextcol) = columnnames[column] + result.df = cbind(result.df, nextcol) + } + } + return(result.df) + + } + } \ No newline at end of file From noreply at r-forge.r-project.org Thu Jun 20 05:00:00 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 05:00:00 +0200 (CEST) Subject: [Returnanalytics-commits] r2383 - pkg/PortfolioAnalytics/R Message-ID: <20130620030001.3371D1848BE@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-20 05:00:00 +0200 (Thu, 20 Jun 2013) New Revision: 2383 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: adding get.constraints function to extract constraints from portfolio object Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-19 17:53:12 UTC (rev 2382) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-20 03:00:00 UTC (rev 2383) @@ -456,6 +456,45 @@ inherits( x, "constraint" ) } +#' Helper function to get the enabled constraints out of the portfolio object, see \code{\link{portfolio.spec}} +#' Returns an object of class constraint which is a flat list of weight_sum, box, and group constraints. +#' Uses the same naming as the v1_constraint object which may be useful when passed to other functions. +#' @param portfolio an object of class 'portfolio' +#' @author Ross Bennett +#' @seealso \code{\link{portfolio.spec}}, \code{\link{constraint_v2}} +#' @export +get.constraints <- function(portfolio){ + # Check that object passed in is a portfolio objec + if(!is.portfolio(portfolio)) stop("portfolio passed in is not of class portfolio") + + tmp.constraints <- portfolio$constraints + out <- list() + for(i in 1:length(tmp.constraints)){ + if(tmp.constraints[[i]]$enabled){ + # weight_sum constraint + if(tmp.constraints[[i]]$type == "weight_sum"){ + # Extract min_sum and max_sum + out$min_sum <- tmp.constraints[[i]]$min_sum + out$max_sum <- tmp.constraints[[i]]$max_sum + } + # box constraints + if(tmp.constraints[[i]]$type == "box"){ + # Extract min and max + out$min <- tmp.constraints[[i]]$min + out$max <- tmp.constraints[[i]]$max + } + # group constraints + if(tmp.constraints[[i]]$type == "group"){ + # Extract groups, cLO, and cUP + out$groups <- tmp.constraints[[i]]$groups + out$cLO <- tmp.constraints[[i]]$cLO + out$cUP <- tmp.constraints[[i]]$cUP + } + } + } + return(structure(out, class="constraint")) +} + #' function for updating constrints, not well tested, may be broken #' #' can we use the generic update.default function? From noreply at r-forge.r-project.org Thu Jun 20 05:03:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 05:03:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2384 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130620030356.3C7191848BE@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-20 05:03:54 +0200 (Thu, 20 Jun 2013) New Revision: 2384 Added: pkg/PortfolioAnalytics/sandbox/testing_DEoptim_cardinality_constraint.R Log: adding script to test cardinality max position constraint with DEoptim Added: pkg/PortfolioAnalytics/sandbox/testing_DEoptim_cardinality_constraint.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_DEoptim_cardinality_constraint.R (rev 0) +++ pkg/PortfolioAnalytics/sandbox/testing_DEoptim_cardinality_constraint.R 2013-06-20 03:03:54 UTC (rev 2384) @@ -0,0 +1,90 @@ + +# DEoptim with max positions constraint +# Uses fnMap to impose a cardinality constraint with DEoptim + +library(PerformanceAnalytics) +library(PortfolioAnalytics) +library(DEoptim) + +data(edhec) +R <- edhec + +# use example objective function from +# http://cran.r-project.org/web/packages/DEoptim/vignettes/DEoptimPortfolioOptimization.pdf +obj <- function(w) { + if(sum(w) == 0){ + w <- w + 1e-2 + } + w <- w / sum(w) + CVaR <- ES(weights=w, + method="gaussian", + portfolio_method="component", + mu=mu, + sigma=sigma) + tmp1 <- CVaR$ES + tmp2 <- max(CVaR$pct_contrib_ES - 0.05, 0) + out <- tmp1 + tmp2 + return(out) +} + +mu <- colMeans(R) +sigma <- cov(R) + +N <- ncol(R) +minw <- 0 +maxw <- 1 +lower <- rep(minw, N) +upper <- rep(maxw, N) + +eps <- 0.025 + +weight_seq <- generatesequence(min=minw, max=maxw, by=0.001, rounding=3) + +rpconstraint <- constraint(assets=N, min_sum=1-eps, max_sum=1+eps, + min=lower, max=upper, weight_seq=weight_seq) +set.seed(1234) +rp <- random_portfolios(rpconstraints=rpconstraint, permutations=N*10) +rp <- rp / rowSums(rp) + +controlDE <- list(reltol=.000001,steptol=150, itermax = 5000,trace = 250, + NP=as.numeric(nrow(rp)),initialpop=rp) +set.seed(1234) +out1 <- DEoptim(fn = obj, lower=lower, upper=upper, control=controlDE) + +weights1 <- out1$optim$bestmem +weights1 <- weights1 / sum(weights1) +sum(weights1) +out1$optim$bestval + +# Implement a cardinality constraint for max positions with DEoptim +# http://grokbase.com/t/r/r-help/126fsz99gh/r-deoptim-example-illustrating-use-of-fnmap-parameter-for-enforcement-of-cardinality-constraints +mappingFun <- function(x, max.pos=10) { + N <- length(x) + num <- N - max.pos + # Two smallest weights are given a value of 0 + x[order(x)][1:num] <- 0 + x / sum(x) +} + +out2 <- DEoptim(fn = obj, lower=lower, upper=upper, control=controlDE, fnMap=mappingFun) +weights2 <- out2$optim$bestmem +weights2 <- weights2 / sum(weights2) +out2$optim$bestval +sum(round(weights2, 4)) + +# mappingGroupFun <- function(x) { +# i <- 1 +# while(sum(x[1:2]) > 0.4 & i <= 5) { +# x[1:2] <- x[1:2] - 0.01 +# i <- 1 + 1 +# } +# x / sum(x) +# } +# +# out3 <- DEoptim(fn = obj, lower=lower, upper=upper, control=controlDE, fnMap=mappingGroupFun) +# weights3 <- out3$optim$bestmem +# sum(weights[1:2]) +# out3$optim$bestval +# sum(round(weights3, 4)) + + From noreply at r-forge.r-project.org Thu Jun 20 09:00:39 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 09:00:39 +0200 (CEST) Subject: [Returnanalytics-commits] r2385 - pkg/Meucci/demo Message-ID: <20130620070039.C08BD18517B@r-forge.r-project.org> Author: xavierv Date: 2013-06-20 09:00:38 +0200 (Thu, 20 Jun 2013) New Revision: 2385 Added: pkg/Meucci/demo/S_EllipticalNDim.R Log: - added S_EllipticalNDim.R Added: pkg/Meucci/demo/S_EllipticalNDim.R =================================================================== --- pkg/Meucci/demo/S_EllipticalNDim.R (rev 0) +++ pkg/Meucci/demo/S_EllipticalNDim.R 2013-06-20 07:00:38 UTC (rev 2385) @@ -0,0 +1,54 @@ +library(mvtnorm); + +################################################################################################################## +### This script decomposes the N-variate normal distribution into its radial and uniform components +### then it uses the uniform component to generate an elliptical distribution with location parameter +### Mu and dispersion parameter Sigma +### = Chapter 2 = +################################################################################################################# +clc; clear; close all; + +################################################################################################################## +### Parameters +N = 30; +nSim = 10000; +nu = 0.1; +t2 = 0.04; + +################################################################################################################## +### Random matrix and elliptical draws +Mu = runif( N ); +A = matrix( runif( N * N ), c( N, N )) - 0.5; +Sigma = A %*% t( A ); + +Y = rmvnorm( nSim, matrix( 0, N, 1 ), diag( 1, N )); + +# radial distribution (normal case ~ square root of chi-square with N degrees of freedom) +R = matrix( sqrt( apply( Y * Y, 1, sum ))); + + +# uniform distribution on unit sphere +U = Y / ( R %*% matrix( 1, 1, N )); + +tau = sqrt( t2 ); +R_New = rlnorm( nSim, nu, tau ); + +# N-variate elliptical distribution +X = matrix( 1, nSim, 1 ) %*% t( Mu ) + ( R_New %*% matrix( 1, 1, N )) * ( U %*% t( A )); + +################################################################################################################## +### Plots +# visualize projection on m-n coordinates + +m = 1; +n = 3; +xlabel = paste( "X_" , m ); +ylabel = paste( "X_", n ); +plot( X[ , m ], X[ , n ], xlab = xlabel, ylab = ylabel); + +# visualize n-th marginal +n = 4; +xlabel = paste( "X_", m ); +NumBins = round(10 * log(nSim)); +hist( X[ , n ], NumBins, xlab = xlabel, main= "histogram"); + From noreply at r-forge.r-project.org Thu Jun 20 09:08:18 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 09:08:18 +0200 (CEST) Subject: [Returnanalytics-commits] r2386 - pkg/Meucci/demo Message-ID: <20130620070818.8531618544B@r-forge.r-project.org> Author: xavierv Date: 2013-06-20 09:08:17 +0200 (Thu, 20 Jun 2013) New Revision: 2386 Modified: pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R pkg/Meucci/demo/S_EllipticalNDim.R Log: -fixedlast commit and documentation Modified: pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R 2013-06-20 07:00:38 UTC (rev 2385) +++ pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R 2013-06-20 07:08:17 UTC (rev 2386) @@ -3,7 +3,7 @@ #' #' @references #' \url{http://} -#' See Meucci's script for "S_DisplayNormalCopulaPdf.m" +#' See Meucci's script for "S_DisplayStudentTCopulaPdf.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export Modified: pkg/Meucci/demo/S_EllipticalNDim.R =================================================================== --- pkg/Meucci/demo/S_EllipticalNDim.R 2013-06-20 07:00:38 UTC (rev 2385) +++ pkg/Meucci/demo/S_EllipticalNDim.R 2013-06-20 07:08:17 UTC (rev 2386) @@ -1,15 +1,19 @@ library(mvtnorm); +#'This script decomposes the N-variate normal distribution into its radial and uniform components +#' then it uses the uniform component to generate an elliptical distribution with location parameter +#' Mu and dispersion parameter Sigma, as described in A. Meucci, "Risk and Asset Allocation", +#' Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_EllipticalNDim.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export ################################################################################################################## -### This script decomposes the N-variate normal distribution into its radial and uniform components -### then it uses the uniform component to generate an elliptical distribution with location parameter -### Mu and dispersion parameter Sigma -### = Chapter 2 = -################################################################################################################# -clc; clear; close all; +### Parameters -################################################################################################################## -### Parameters N = 30; nSim = 10000; nu = 0.1; From noreply at r-forge.r-project.org Thu Jun 20 09:42:39 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 09:42:39 +0200 (CEST) Subject: [Returnanalytics-commits] r2387 - pkg/Meucci/demo Message-ID: <20130620074240.03434184893@r-forge.r-project.org> Author: xavierv Date: 2013-06-20 09:42:39 +0200 (Thu, 20 Jun 2013) New Revision: 2387 Added: pkg/Meucci/demo/S_FullCodependence.R Modified: pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R Log: -added script illustrating the concept of co-dependence Modified: pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R 2013-06-20 07:08:17 UTC (rev 2386) +++ pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R 2013-06-20 07:42:39 UTC (rev 2387) @@ -3,7 +3,7 @@ #' #' @references #' \url{http://} -#' See Meucci's script for "S_DisplayNormalCopulaPdf.m" +#' See Meucci's script for "S_DisplayNormalCopulaCdf.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export Added: pkg/Meucci/demo/S_FullCodependence.R =================================================================== --- pkg/Meucci/demo/S_FullCodependence.R (rev 0) +++ pkg/Meucci/demo/S_FullCodependence.R 2013-06-20 07:42:39 UTC (rev 2387) @@ -0,0 +1,33 @@ +#' This script illustrate the concept of co-dependence, as described +#' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_DisplayNormalCopulaPdf.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +############################################################################################################# +### Generate draws +J = 10000; +N = 10; +sig2 = 1; + +U = runif( J ); +X = matrix( NaN, J, N ); + +for( n in 1 : N ) +{ + a = n / 2; + b = 2 * sig2; + X[ , n ] = qgamma( U, a, b ); +} + +NumBins = round( 10 * log( J )); + + +hist( X[ , 1 ], NumBins, xlab = "X_1"); +plot( X[ , 1 ], X[ , 2 ], xlab = "X_1", ylab = "X_2" ); +hist( X[ , 2 ], NumBins, xlab = "X_2"); + From noreply at r-forge.r-project.org Thu Jun 20 09:47:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 09:47:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2388 - pkg/Meucci/demo Message-ID: <20130620074755.5929E18515B@r-forge.r-project.org> Author: xavierv Date: 2013-06-20 09:47:54 +0200 (Thu, 20 Jun 2013) New Revision: 2388 Modified: pkg/Meucci/demo/S_FullCodependence.R Log: - showing in one panel all graphs from last commit Modified: pkg/Meucci/demo/S_FullCodependence.R =================================================================== --- pkg/Meucci/demo/S_FullCodependence.R 2013-06-20 07:42:39 UTC (rev 2387) +++ pkg/Meucci/demo/S_FullCodependence.R 2013-06-20 07:47:54 UTC (rev 2388) @@ -26,8 +26,8 @@ NumBins = round( 10 * log( J )); - -hist( X[ , 1 ], NumBins, xlab = "X_1"); +par( mfrow = c( 3, 1) ); +hist( X[ , 1 ], NumBins, xlab = "X_1", main = "histogram of X_1" ); plot( X[ , 1 ], X[ , 2 ], xlab = "X_1", ylab = "X_2" ); -hist( X[ , 2 ], NumBins, xlab = "X_2"); +hist( X[ , 2 ], NumBins, xlab = "X_2", main = "histogram of X_1" ); From noreply at r-forge.r-project.org Thu Jun 20 18:14:09 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 18:14:09 +0200 (CEST) Subject: [Returnanalytics-commits] r2389 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130620161409.43E6218510C@r-forge.r-project.org> Author: pulkit Date: 2013-06-20 18:14:08 +0200 (Thu, 20 Jun 2013) New Revision: 2389 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R Log: error in checkbounds Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-20 07:47:54 UTC (rev 2388) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-20 16:14:08 UTC (rev 2389) @@ -47,11 +47,11 @@ # To Check the bounds of the weights checkBounds<-function(weights){ flag = TRUE - #for(i in 1:columns){ - # if(weights[i] < bounds[i,0]) flag = FALSE + for(i in 1:columns){ + if(weights[i] < bounds[i,1]) flag = FALSE - # if(weights[i] > bounds[i,1]) flag = FALSE - #} + if(weights[i] > bounds[i,1]) flag = FALSE + } return(TRUE) } @@ -60,7 +60,7 @@ if(length(which(d1Z!=0)) == 0){ return(NULL) } - weights[which(abs(d1Z)==max(abs(d1Z)))] = weights[which(abs(d1Z)==max(abs(d1Z)))]+delta/max(d1Z) + weights[which(abs(d1Z)==max(abs(d1Z)))] = weights[which(abs(d1Z)==max(abs(d1Z)))]+delta/d1Z[which(abs(d1Z)==max(abs(d1Z)))] # OR all the weights should be changed ? #weights = weights + delta/d1Z weights = weights/sum(weights) From noreply at r-forge.r-project.org Thu Jun 20 18:39:28 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 20 Jun 2013 18:39:28 +0200 (CEST) Subject: [Returnanalytics-commits] r2390 - in pkg/Meucci: data demo Message-ID: <20130620163928.BFD4118487F@r-forge.r-project.org> Author: xavierv Date: 2013-06-20 18:39:28 +0200 (Thu, 20 Jun 2013) New Revision: 2390 Added: pkg/Meucci/data/fX.Rda pkg/Meucci/demo/S_FxCopulaMarginal.R Modified: pkg/Meucci/demo/S_FullCodependence.R Log: - added script to display the empirical copula of a set of market variables and its data file Added: pkg/Meucci/data/fX.Rda =================================================================== (Binary files differ) Property changes on: pkg/Meucci/data/fX.Rda ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Modified: pkg/Meucci/demo/S_FullCodependence.R =================================================================== --- pkg/Meucci/demo/S_FullCodependence.R 2013-06-20 16:14:08 UTC (rev 2389) +++ pkg/Meucci/demo/S_FullCodependence.R 2013-06-20 16:39:28 UTC (rev 2390) @@ -3,7 +3,7 @@ #' #' @references #' \url{http://} -#' See Meucci's script for "S_DisplayNormalCopulaPdf.m" +#' See Meucci's script for "S_FullCodependence.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export Added: pkg/Meucci/demo/S_FxCopulaMarginal.R =================================================================== --- pkg/Meucci/demo/S_FxCopulaMarginal.R (rev 0) +++ pkg/Meucci/demo/S_FxCopulaMarginal.R 2013-06-20 16:39:28 UTC (rev 2390) @@ -0,0 +1,62 @@ +#'This script display the empirical copula of a set of market variables, as described +#' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_FxCopulaMarginal.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +### Load data and select the pair to display + +library(pracma) +load( "../data/fX.Rda" ) + +Display = c( 1, 2 ); # 1 = Spot USD/EUR; 2 = Spot USD/GBP; 3 = Spot USD/JPY; + +############################################################################################################# +### Define variables (NB: first column is time) + +X = apply( log( db_FX$Data[ , 2 : ncol( db_FX$Data ) ] ), 2, FUN = "diff" ); + +############################################################################################################# +### Compute empirical copula by sorting +NumObs = nrow( X ); +K = ncol ( X ); + +# Sort and get the permutation indices +C = apply( X, 2, "order" ); + + +Copula = matrix( NaN, NumObs, K ); + +for ( k in 1: K) +{ + # scatter plot + x = C[ , k ]; + + y = 1 : NumObs; + xi = 1 : NumObs; + yi = interp1(x, y, xi); + Copula[ , k ] = yi / ( NumObs + 1 ); +} + +############################################################################################################ +### Display + +# marginals +NumBins = round(10 * log(NumObs)); + + +layout( matrix(c(1,2,3), 3, 1, byrow = TRUE), heights=c(1,2,1)); + + +hist( X[ , Display[ 2 ] ], NumBins, xlab = db_FX$Fields[[ Display[ 2 ] + 1 ]], ylab = "", main = ""); + +# scatter plot +plot( Copula[ , Display[ 1 ] ], Copula[ , Display[ 2 ] ], main = "Copula", + xlab = db_FX$Fields[[ Display[ 2 ] + 1 ]], ylab = db_FX$Fields[[ Display[ 1 ] + 1 ]] ); + +hist( X[ , Display[ 1 ] ], NumBins,xlab = db_FX$Fields[[ Display[ 1 ] + 1 ]], ylab = "", main = ""); + From noreply at r-forge.r-project.org Fri Jun 21 04:34:48 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 21 Jun 2013 04:34:48 +0200 (CEST) Subject: [Returnanalytics-commits] r2391 - pkg/PortfolioAnalytics/R Message-ID: <20130621023448.BBB9818594C@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-21 04:34:47 +0200 (Fri, 21 Jun 2013) New Revision: 2391 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: adding turnover_constraint function and updating add.constraint for turnover constraint Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-20 16:39:28 UTC (rev 2390) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-21 02:34:47 UTC (rev 2391) @@ -274,6 +274,10 @@ weight=, weight_sum = {tmp_constraint <- weight_sum_constraint(type=type, ...=...) }, + # Turnover constraint + turnover = {tmp_constraint <- turnover_constraint(type=type, + ...=...) + }, # Do nothing and return the portfolio object if type is NULL null = {return(portfolio)} ) @@ -495,6 +499,25 @@ return(structure(out, class="constraint")) } +#' constructor for turnover_constraint +#' +#' This function is called by add.constraint when type="turnover" is specified. see \code{\link{add.constraint}} +#' This function allows the user to specify a maximum turnover constraint +#' +#' Note that turnover constraint is currently only supported for global minimum variance problem with solve.QP plugin +#' +#' @param type character type of the constraint +#' @param max.turnover maximum turnover value +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @author Ross Bennett +#' @export +turnover_constraint <- function(type, max.turnover, enabled=FALSE, ...){ + Constraint <- constraint_v2(type, ...) + Constraint$toc <- max.turnover + return(Constraint) +} + #' function for updating constrints, not well tested, may be broken #' #' can we use the generic update.default function? From noreply at r-forge.r-project.org Fri Jun 21 06:14:20 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 21 Jun 2013 06:14:20 +0200 (CEST) Subject: [Returnanalytics-commits] r2392 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130621041420.4A825185165@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-21 06:14:19 +0200 (Fri, 21 Jun 2013) New Revision: 2392 Added: pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R Log: adding script to compute gmv with constraints including turnover constraint Added: pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R (rev 0) +++ pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R 2013-06-21 04:14:19 UTC (rev 2392) @@ -0,0 +1,125 @@ + +# script to solve the gmv optimization problem with turnover constraints using quadprog or ROI + +library(PortfolioAnalytics) +library(PerformanceAnalytics) +library(quadprog) + +# TODO Add documentation for function +# Computes optimal weights for global minimum variance portfolio with +# constraints including turnover constraint +turnover.gmv <- function(R, toc, weight.i, min, max){ + + # number of assets in R + p <- ncol(R) + + # Modify the returns matrix. Why do we do this? + returns <- cbind(R, R, R) + + V <- cov(returns) + # V <- make.positive.definite(V) + + # matrix for initial weights + A2 <- cbind(rep(1, p*3), rbind(diag(p), matrix(0, nrow=2*p, ncol=p))) + + # A matrix for lower box constraints + Alo <- rbind(diag(p), diag(p), diag(p)) + + # A matrix for upper box constraints + Aup <- rbind(-diag(p), -diag(p), -diag(p)) + + # vector to appply turnover constraint + A3 <- c(rep(0, p), rep(-1, p), rep(1, p)) + + # matrix for positive weight + A4 <- rbind(matrix(0, nrow=p, ncol=p), diag(p), matrix(0, nrow=p, ncol=p)) + + # matrix for negative weight + A5 <- rbind(matrix(0, nrow=p*2, ncol=p), -diag(p)) + + # Combine the temporary A matrices + A.c <- cbind(A2, Alo, Aup, A3, A4, A5) + + # b vector holding the values of the constraints + b <- c(1, weight.i, min, -max, -toc, rep(0, 2*p)) + + # no linear term so set this equal to 0s + d <- rep(0, p*3) + + sol <- solve.QP(Dmat=make.positive.definite(V), dvec=d, Amat=A.c, bvec=b, meq=6) + wts <- sol$solution + wts.final <- wts[(1:p)] + wts[(1+p):(2*p)] + wts[(2*p+1):(3*p)] + wts.final +} + +data(edhec) +ret <- edhec[,1:5] + +# box constraints min and max +min <- rep(0.1, 5) +max <- rep(0.6, 5) + +# turnover constraint +toc <- 0.3 + +# Initial weights vector +weight.i <- rep(1/5,5) + +opt.wts <- turnover.gmv(R=ret, toc=toc, weight.i=weight.i, min=min, max=max) +opt.wts + +# calculate turnover +to <- sum(abs(diff(rbind(weight.i, opt.wts)))) +to + +##### ROI Turnover constraints using ROI solver ##### +# Not working correctly. Failing with error in ROI_solve + +# library(ROI) +# library(ROI.plugin.quadprog) +# +# +# # Use the first 5 funds in edhec for the returns data +# ret <- edhec[, 1:5] +# returns <- cbind(ret, ret, ret) +# +# V <- cov(returns) +# mu <- apply(returns, 2, mean) +# # number of assets +# N <- ncol(returns) +# +# # Set the box constraints for the minimum and maximum asset weights +# min <- rep(0.1, N/3) +# max <- rep(0.6, N/3) +# +# # Set the bounds +# bnds <- list(lower = list(ind = seq.int(1L, N/3), val = as.numeric(min)), +# upper = list(ind = seq.int(1L, N/3), val = as.numeric(max))) +# lambda <- 1 +# ROI_objective <- ROI:::Q_objective(Q=2*lambda*V, L=-mu*0) +# +# # Set up the Amat +# # min_sum and max_sum of weights +# A1 <- rbind(rep(1, N), rep(1, N)) +# +# # initial weight matrix +# A.iw <- cbind(diag(N/3), matrix(0, nrow=N/3, ncol=2*N/3)) +# +# # turnover vector +# A.t <- c(rep(0, N/3), rep(-1, N/3), rep(1, N/3)) +# +# A.wpos <- t(cbind(rbind(matrix(0, ncol=N/3, nrow=N/3), diag(N/3), matrix(0, ncol=N/3, nrow=N/3)), +# rbind(matrix(0, ncol=N/3, nrow=2*N/3), -diag(N/3)))) +# +# Amat <- rbind(A1, A.iw, A.t, A.wpos) +# +# dir.vec <- c(">=","<=", rep("==", N/3), "<=", rep(">=", 2*N/3)) +# min_sum=1 +# max_sum=1 +# rhs.vec <- c(min_sum, max_sum, w.init, toc, rep(0, 2*N/3)) +# +# opt.prob <- ROI:::OP(objective=ROI_objective, +# constraints=ROI:::L_constraint(L=Amat, dir=dir.vec, rhs=rhs.vec), +# bounds=bnds) +# roi.result <- ROI:::ROI_solve(x=opt.prob, solver="quadprog") +# roi.result$solution From noreply at r-forge.r-project.org Fri Jun 21 13:50:42 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 21 Jun 2013 13:50:42 +0200 (CEST) Subject: [Returnanalytics-commits] r2393 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130621115043.1381918573E@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-21 13:50:42 +0200 (Fri, 21 Jun 2013) New Revision: 2393 Modified: pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R Log: Made a few minor edits to the ROI_solve implementation. Getting solution, but different than turnover.gmv Modified: pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R 2013-06-21 04:14:19 UTC (rev 2392) +++ pkg/PortfolioAnalytics/sandbox/testing_turnover.gmv.R 2013-06-21 11:50:42 UTC (rev 2393) @@ -4,6 +4,7 @@ library(PortfolioAnalytics) library(PerformanceAnalytics) library(quadprog) +library(corpcor) # TODO Add documentation for function # Computes optimal weights for global minimum variance portfolio with @@ -13,13 +14,14 @@ # number of assets in R p <- ncol(R) - # Modify the returns matrix. Why do we do this? + # Modify the returns matrix. This is done because there are 3 sets of + # variables w.initial, w.buy, and w.sell returns <- cbind(R, R, R) V <- cov(returns) - # V <- make.positive.definite(V) + V <- make.positive.definite(V) - # matrix for initial weights + # A matrix for initial weights A2 <- cbind(rep(1, p*3), rbind(diag(p), matrix(0, nrow=2*p, ncol=p))) # A matrix for lower box constraints @@ -46,7 +48,7 @@ # no linear term so set this equal to 0s d <- rep(0, p*3) - sol <- solve.QP(Dmat=make.positive.definite(V), dvec=d, Amat=A.c, bvec=b, meq=6) + sol <- solve.QP(Dmat=V, dvec=d, Amat=A.c, bvec=b, meq=6) wts <- sol$solution wts.final <- wts[(1:p)] + wts[(1+p):(2*p)] + wts[(2*p+1):(3*p)] wts.final @@ -73,7 +75,8 @@ to ##### ROI Turnover constraints using ROI solver ##### -# Not working correctly. Failing with error in ROI_solve +# Not working correctly. +# Getting a solution now, but results are different than turnover.gmv # library(ROI) # library(ROI.plugin.quadprog) @@ -84,6 +87,7 @@ # returns <- cbind(ret, ret, ret) # # V <- cov(returns) +# V <- corpcor:::make.positive.definite(V) # mu <- apply(returns, 2, mean) # # number of assets # N <- ncol(returns) @@ -116,10 +120,14 @@ # dir.vec <- c(">=","<=", rep("==", N/3), "<=", rep(">=", 2*N/3)) # min_sum=1 # max_sum=1 +# w.init <- rep(1/5, 5) +# toc <- 0.3 # rhs.vec <- c(min_sum, max_sum, w.init, toc, rep(0, 2*N/3)) # # opt.prob <- ROI:::OP(objective=ROI_objective, # constraints=ROI:::L_constraint(L=Amat, dir=dir.vec, rhs=rhs.vec), # bounds=bnds) # roi.result <- ROI:::ROI_solve(x=opt.prob, solver="quadprog") -# roi.result$solution +# wts.tmp <- roi.result$solution +# wts <- wts.tmp[1:(N/3)] + wts.tmp[(N/3+1):(2*N/3)] + wts.tmp[(2*N/3+1):N] +# wts \ No newline at end of file From noreply at r-forge.r-project.org Sat Jun 22 02:02:53 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 02:02:53 +0200 (CEST) Subject: [Returnanalytics-commits] r2394 - pkg/PortfolioAnalytics/R Message-ID: <20130622000254.41DA2185807@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 02:02:53 +0200 (Sat, 22 Jun 2013) New Revision: 2394 Modified: pkg/PortfolioAnalytics/R/objective.R Log: adding turnover as type for add.objective_v2 Modified: pkg/PortfolioAnalytics/R/objective.R =================================================================== --- pkg/PortfolioAnalytics/R/objective.R 2013-06-21 11:50:42 UTC (rev 2393) +++ pkg/PortfolioAnalytics/R/objective.R 2013-06-22 00:02:53 UTC (rev 2394) @@ -181,6 +181,12 @@ ) }, + turnover = {tmp_objective = turnover_objective(name=name, + enabled=enabled, + arguments=arguments, + ...=...) + }, + null = {return(portfolio)} # got nothing, default to simply returning ) # end objective type switch From noreply at r-forge.r-project.org Sat Jun 22 02:14:17 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 02:14:17 +0200 (CEST) Subject: [Returnanalytics-commits] r2395 - pkg/PortfolioAnalytics/R Message-ID: <20130622001417.ABEE4185807@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 02:14:16 +0200 (Sat, 22 Jun 2013) New Revision: 2395 Modified: pkg/PortfolioAnalytics/R/objective.R Log: adding constructor for class turnover_objective Modified: pkg/PortfolioAnalytics/R/objective.R =================================================================== --- pkg/PortfolioAnalytics/R/objective.R 2013-06-22 00:02:53 UTC (rev 2394) +++ pkg/PortfolioAnalytics/R/objective.R 2013-06-22 00:14:16 UTC (rev 2395) @@ -306,3 +306,25 @@ return(Objective) } # end risk_budget_objective constructor + +#' constructor for class turnover_objective +#' +#' if target is null, we'll try to minimize the turnover metric +#' +#' if target is set, we'll try to meet the metric +#' +#' @param name name of the objective, should correspond to a function, though we will try to make allowances +#' @param target univariate target for the objective +#' @param arguments default arguments to be passed to an objective function when executed +#' @param multiplier multiplier to apply to the objective, usually 1 or -1 +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters +#' @author Ross Bennett +#' @export +turnover_objective <- function(name, target=NULL, arguments=NULL, multiplier=1, enabled=FALSE, ... ) +{ + if(!hasArg(target)) target = NULL + ##' if target is null, we'll try to minimize the turnover metric + if(!hasArg(multiplier)) multiplier=1 + return(objective(name=name, target=target, arguments=arguments, enabled=enabled, multiplier=multiplier,objclass=c("turnover_objective","objective"), ... )) +} # end turnover_objective constructor From noreply at r-forge.r-project.org Sat Jun 22 02:23:08 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 02:23:08 +0200 (CEST) Subject: [Returnanalytics-commits] r2396 - pkg/PortfolioAnalytics/R Message-ID: <20130622002308.BFA8B185899@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 02:23:07 +0200 (Sat, 22 Jun 2013) New Revision: 2396 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R Log: adding turnover_objective functionality to constrained_objective.R Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-22 00:14:16 UTC (rev 2395) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-22 00:23:07 UTC (rev 2396) @@ -181,6 +181,9 @@ if(!inherits(objective,"risk_budget_objective") & is.null(objective$arguments$portfolio_method)& is.null(nargs$portfolio_method)) nargs$portfolio_method='single' if(is.null(objective$arguments$invert)) objective$arguments$invert = FALSE }, + turnover = { + fun = match.fun(turnover) # turnover function included in objectiveFUN.R + }, { # see 'S Programming p. 67 for this matching fun<-try(match.fun(objective$name)) } @@ -244,6 +247,15 @@ out = out + abs(objective$multiplier)*tmp_measure } # univariate risk objectives + if(inherits(objective,"turnover_objective")){ + if (!is.null(objective$target) & is.numeric(objective$target)){ # we have a target + out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) + # Does this penalize for turnover below target + } + # target is null or doesn't exist, just maximize, or minimize violation of constraint + out = out + abs(objective$multiplier)*tmp_measure + } # univariate turnover objectives + if(inherits(objective,"risk_budget_objective")){ # setup From noreply at r-forge.r-project.org Sat Jun 22 02:26:32 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 02:26:32 +0200 (CEST) Subject: [Returnanalytics-commits] r2397 - pkg/PortfolioAnalytics/R Message-ID: <20130622002632.7C410185807@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 02:26:31 +0200 (Sat, 22 Jun 2013) New Revision: 2397 Added: pkg/PortfolioAnalytics/R/objectiveFUN.R Log: adding a turnover objective to objectiveFUN.R. Note objectiveFUN.R is a new file for functions used as objectives. Added: pkg/PortfolioAnalytics/R/objectiveFUN.R =================================================================== --- pkg/PortfolioAnalytics/R/objectiveFUN.R (rev 0) +++ pkg/PortfolioAnalytics/R/objectiveFUN.R 2013-06-22 00:26:31 UTC (rev 2397) @@ -0,0 +1,19 @@ + +#' Calculates turnover given two vectors of weights. +#' This is used as an objective function and is called when the user adds an objective of type turnover with \code{\link{add.objective}} +#' @param weights vector of weights from optimization +#' @param wts.init vector of initial weights used to calculate turnover from +#' @author Ross Bennett +turnover <- function(weights, wts.init=NULL) { + # turnover function from https://r-forge.r-project.org/scm/viewvc.php/pkg/PortfolioAnalytics/sandbox/script.workshop2012.R?view=markup&root=returnanalytics + + # Check that weights and wts.init are the same length + if(length(weights) != length(wts.init)) stop("weights and wts.init are not the same length") + + # If wts.init is not given, then assume a vector of equal weights + if(is.null(wts.init)) { + N <- length(weights) + wts.init <- rep(1/N, N) + } + return(sum(abs(wts.init-weights))/N) +} \ No newline at end of file From noreply at r-forge.r-project.org Sat Jun 22 04:59:52 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 04:59:52 +0200 (CEST) Subject: [Returnanalytics-commits] r2398 - pkg/PortfolioAnalytics Message-ID: <20130622025953.05BB8184DD4@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 04:59:51 +0200 (Sat, 22 Jun 2013) New Revision: 2398 Modified: pkg/PortfolioAnalytics/DESCRIPTION pkg/PortfolioAnalytics/NAMESPACE Log: revised DESCRIPTION and NAMESPACE files after running roxygenize Modified: pkg/PortfolioAnalytics/DESCRIPTION =================================================================== --- pkg/PortfolioAnalytics/DESCRIPTION 2013-06-22 00:26:31 UTC (rev 2397) +++ pkg/PortfolioAnalytics/DESCRIPTION 2013-06-22 02:59:51 UTC (rev 2398) @@ -41,3 +41,5 @@ 'optimize.portfolio.R' 'random_portfolios.R' 'trailingFUN.R' + 'objectiveFUN.R' + 'portfolio.R' Modified: pkg/PortfolioAnalytics/NAMESPACE =================================================================== --- pkg/PortfolioAnalytics/NAMESPACE 2013-06-22 00:26:31 UTC (rev 2397) +++ pkg/PortfolioAnalytics/NAMESPACE 2013-06-22 02:59:51 UTC (rev 2398) @@ -1,32 +1,43 @@ +export("for") +export(add.constraint) +export(add.objective_v2) +export(add.objective) +export(box_constraint) export(CCCgarch.MM) -export(add.objective) export(chart.Scatter.DE) export(chart.Scatter.RP) export(chart.Weights.DE) export(chart.Weights.RP) export(charts.DE) export(charts.RP) +export(class) export(constrained_objective) +export(constraint_ROI) +export(constraint_v2) export(constraint) -export(constraint_ROI) +export(constructor) export(extract.efficient.frontier) -export(extractStats) export(extractStats.optimize.portfolio.DEoptim) -export(extractStats.optimize.portfolio.ROI) export(extractStats.optimize.portfolio.parallel) export(extractStats.optimize.portfolio.random) +export(extractStats.optimize.portfolio.ROI) +export(extractStats) export(extractWeights.rebal) export(generatesequence) +export(get.constraints) +export(group_constraint) export(is.constraint) export(is.objective) +export(is.portfolio) export(objective) -export(optimize.portfolio) export(optimize.portfolio.parallel) export(optimize.portfolio.rebalancing) -export(plot.optimize.portfolio) +export(optimize.portfolio) export(plot.optimize.portfolio.DEoptim) export(plot.optimize.portfolio.random) +export(plot.optimize.portfolio) export(portfolio_risk_objective) +export(portfolio.spec) export(random_portfolios) export(random_walk_portfolios) export(randomize_portfolio) @@ -35,4 +46,8 @@ export(set.portfolio.moments) export(summary.optimize.portfolio.rebalancing) export(trailingFUN) +export(turnover_constraint) +export(turnover_objective) export(update.constraint) +export(v2_constraint) +export(weight_sum_constraint) From noreply at r-forge.r-project.org Sat Jun 22 05:23:10 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 05:23:10 +0200 (CEST) Subject: [Returnanalytics-commits] r2399 - in pkg/PortfolioAnalytics: . R Message-ID: <20130622032310.EA368184950@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 05:23:10 +0200 (Sat, 22 Jun 2013) New Revision: 2399 Modified: pkg/PortfolioAnalytics/NAMESPACE pkg/PortfolioAnalytics/R/constraints.R Log: deleted constraint_v2 function that I had commented out and update NAMESPACE file after roxygenize Modified: pkg/PortfolioAnalytics/NAMESPACE =================================================================== --- pkg/PortfolioAnalytics/NAMESPACE 2013-06-22 02:59:51 UTC (rev 2398) +++ pkg/PortfolioAnalytics/NAMESPACE 2013-06-22 03:23:10 UTC (rev 2399) @@ -1,4 +1,3 @@ -export("for") export(add.constraint) export(add.objective_v2) export(add.objective) @@ -10,12 +9,10 @@ export(chart.Weights.RP) export(charts.DE) export(charts.RP) -export(class) export(constrained_objective) export(constraint_ROI) export(constraint_v2) export(constraint) -export(constructor) export(extract.efficient.frontier) export(extractStats.optimize.portfolio.DEoptim) export(extractStats.optimize.portfolio.parallel) @@ -49,5 +46,4 @@ export(turnover_constraint) export(turnover_objective) export(update.constraint) -export(v2_constraint) export(weight_sum_constraint) Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-22 02:59:51 UTC (rev 2398) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-22 03:23:10 UTC (rev 2399) @@ -151,70 +151,9 @@ )) } -# #' constructor for class constraint_v2 -# #' -# #' @param assets number of assets, or optionally a named vector of assets specifying seed weights -# #' @param ... any other passthru parameters -# #' @param min_sum minimum sum of all asset weights, default .99 -# #' @param max_sum maximum sum of all asset weights, default 1.01 -# #' @param weight_seq seed sequence of weights, see \code{\link{generatesequence}} -# #' @author Peter Carl, Brian G. Peterson, and Ross Bennett -# #' @examples -# #' exconstr <- constraint_v2(assets=10, min_sum=1, max_sum=1, weight_seq=generatesequence()) -# #' @export -# constraint_v2 <- function(assets=NULL, ..., min_sum=.99, max_sum=1.01, weight_seq=NULL) { -# # based on GPL R-Forge pkg roi by Stefan Thuessel,Kurt Hornik,David Meyer -# # constraint_v2 is based on the constraint_v1 object, but removes box -# # constraint specification -# if (is.null(assets)) { -# stop("You must specify the assets") -# } -# -# if(!is.null(assets)){ -# # TODO FIXME this doesn't work quite right on matrix of assets -# if(is.numeric(assets)){ -# if (length(assets) == 1) { -# nassets = assets -# # we passed in a number of assets, so we need to create the vector -# message("assuming equal weighted seed portfolio") -# assets <- rep(1 / nassets, nassets) -# } else { -# nassets = length(assets) -# } -# # and now we may need to name them -# if (is.null(names(assets))) { -# for(i in 1:length(assets)){ -# names(assets)[i] <- paste("Asset",i,sep=".") -# } -# } -# } -# if(is.character(assets)){ -# nassets = length(assets) -# assetnames = assets -# message("assuming equal weighted seed portfolio") -# assets <- rep(1 / nassets, nassets) -# names(assets) <- assetnames # set names, so that other code can access it, -# # and doesn't have to know about the character vector -# # print(assets) -# } -# # if assets is a named vector, we'll assume it is current weights -# } -# -# ## now structure and return -# return(structure( -# list( -# assets = assets, -# min_sum = min_sum, -# max_sum = max_sum, -# weight_seq = weight_seq, -# objectives = list(), -# call = match.call() -# ), -# class=c("v2_constraint","constraint") -# )) -# } #' constructor for class v2_constraint +#' #' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', or 'group' #' @param assets number of assets, or optionally a named vector of assets specifying seed weights #' @param ... any other passthru parameters From noreply at r-forge.r-project.org Sat Jun 22 05:44:42 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 05:44:42 +0200 (CEST) Subject: [Returnanalytics-commits] r2400 - pkg/PortfolioAnalytics/man Message-ID: <20130622034443.26F4818592D@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 05:44:42 +0200 (Sat, 22 Jun 2013) New Revision: 2400 Added: pkg/PortfolioAnalytics/man/add.constraint.Rd pkg/PortfolioAnalytics/man/add.objective_v2.Rd pkg/PortfolioAnalytics/man/box_constraint.Rd pkg/PortfolioAnalytics/man/constraint_ROI.Rd pkg/PortfolioAnalytics/man/constraint_v2.Rd pkg/PortfolioAnalytics/man/get.constraints.Rd pkg/PortfolioAnalytics/man/group_constraint.Rd pkg/PortfolioAnalytics/man/is.portfolio.Rd pkg/PortfolioAnalytics/man/portfolio.spec.Rd pkg/PortfolioAnalytics/man/turnover.Rd pkg/PortfolioAnalytics/man/turnover_constraint.Rd pkg/PortfolioAnalytics/man/turnover_objective.Rd pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd Log: adding documentation files in man directory produced wth roxygenize Added: pkg/PortfolioAnalytics/man/add.constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/add.constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/add.constraint.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,39 @@ +\name{add.constraint} +\alias{add.constraint} +\title{General interface for adding and/or updating optimization constraints, currently supports weight, box and group constraints.} +\usage{ + add.constraint(portfolio, type, enabled = FALSE, ..., + indexnum = NULL) +} +\arguments{ + \item{portfolio}{an object of class 'portfolio' to add + the constraint to, specifying the constraints for the + optimization, see \code{\link{portfolio.spec}}} + + \item{type}{character type of the constraint to add or + update, currently 'weight_sum', 'box', or 'group'} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} + + \item{indexnum}{if you are updating a specific + constraint, the index number in the $objectives list to + update} +} +\description{ + This is the main function for adding and/or updating + constraints in an object of type \code{\link{portfolio}}. +} +\details{ + In general, you will define your constraints as one of + three types: 'weight_sum', 'box', or 'group'. +} +\author{ + Ross Bennett +} +\seealso{ + \code{\link{constraint}} +} + Added: pkg/PortfolioAnalytics/man/add.objective_v2.Rd =================================================================== --- pkg/PortfolioAnalytics/man/add.objective_v2.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/add.objective_v2.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,48 @@ +\name{add.objective_v2} +\alias{add.objective_v2} +\title{General interface for adding optimization objectives, including risk, return, and risk budget} +\usage{ + add.objective_v2(portfolio, type, name, arguments = NULL, + enabled = FALSE, ..., indexnum = NULL) +} +\arguments{ + \item{portfolio}{an object of type 'portfolio' to add the + objective to, specifying the portfolio for the + optimization, see \code{\link{portfolio}}} + + \item{type}{character type of the objective to add or + update, currently 'return','risk', or 'risk_budget'} + + \item{name}{name of the objective, should correspond to a + function, though we will try to make allowances} + + \item{arguments}{default arguments to be passed to an + objective function when executed} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters} + + \item{indexnum}{if you are updating a specific + constraint, the index number in the $objectives list to + update} +} +\description{ + This function is the main function for adding and + updating business objectives in an object of type + \code{\link{portfolio}}. +} +\details{ + In general, you will define your objective as one of + three types: 'return', 'risk', or 'risk_budget'. These + have special handling and intelligent defaults for + dealing with the function most likely to be used as + objectives, including mean, median, VaR, ES, etc. +} +\author{ + Brian G. Peterson and Ross Bennett +} +\seealso{ + \code{\link{objective}} +} + Added: pkg/PortfolioAnalytics/man/box_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/box_constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/box_constraint.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,43 @@ +\name{box_constraint} +\alias{box_constraint} +\title{constructor for box_constraint.} +\usage{ + box_constraint(type, assets, min, max, min_mult, + max_mult, enabled = FALSE, ...) +} +\arguments{ + \item{type}{character type of the constraint} + + \item{assets}{number of assets, or optionally a named + vector of assets specifying seed weights} + + \item{min}{numeric or named vector specifying minimum + weight box constraints} + + \item{max}{numeric or named vector specifying minimum + weight box constraints} + + \item{min_mult}{numeric or named vector specifying + minimum multiplier box constraint from seed weight in + \code{assets}} + + \item{max_mult}{numeric or named vector specifying + maximum multiplier box constraint from seed weight in + \code{assets}} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} +} +\description{ + This function is called by add.constraint when type="box" + is specified. see \code{\link{add.constraint}} +} +\author{ + Ross Bennett +} +\seealso{ + \code{\link{add.constraint}} +} + Added: pkg/PortfolioAnalytics/man/constraint_ROI.Rd =================================================================== --- pkg/PortfolioAnalytics/man/constraint_ROI.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/constraint_ROI.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,31 @@ +\name{constraint_ROI} +\alias{constraint_ROI} +\title{constructor for class constraint_ROI} +\usage{ + constraint_ROI(assets = NULL, op.problem, + solver = c("glpk", "quadprog"), weight_seq = NULL) +} +\arguments{ + \item{assets}{number of assets, or optionally a named + vector of assets specifying seed weights} + + \item{op.problem}{an object of type "OP" (optimization + problem, of \code{ROI}) specifying the complete + optimization problem, see ROI help pages for proper + construction of OP object.} + + \item{solver}{string argument for what solver package to + use, must have ROI plugin installed for that solver. + Currently support is for \code{glpk} and + \code{quadprog}.} + + \item{weight_seq}{seed sequence of weights, see + \code{\link{generatesequence}}} +} +\description{ + constructor for class constraint_ROI +} +\author{ + Hezky Varon +} + Added: pkg/PortfolioAnalytics/man/constraint_v2.Rd =================================================================== --- pkg/PortfolioAnalytics/man/constraint_v2.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/constraint_v2.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,26 @@ +\name{constraint_v2} +\alias{constraint_v2} +\title{constructor for class v2_constraint} +\usage{ + constraint_v2(type, enabled = FALSE, ..., + constrclass = "v2_constraint") +} +\arguments{ + \item{type}{character type of the constraint to add or + update, currently 'weight_sum', 'box', or 'group'} + + \item{assets}{number of assets, or optionally a named + vector of assets specifying seed weights} + + \item{...}{any other passthru parameters} + + \item{constrclass}{character to name the constraint + class} +} +\description{ + constructor for class v2_constraint +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/get.constraints.Rd =================================================================== --- pkg/PortfolioAnalytics/man/get.constraints.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/get.constraints.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,26 @@ +\name{get.constraints} +\alias{get.constraints} +\title{Helper function to get the enabled constraints out of the portfolio object, see \code{\link{portfolio.spec}} + Returns an object of class constraint which is a flat list of weight_sum, box, and group constraints. + Uses the same naming as the v1_constraint object which may be useful when passed to other functions.} +\usage{ + get.constraints(portfolio) +} +\arguments{ + \item{portfolio}{an object of class 'portfolio'} +} +\description{ + Helper function to get the enabled constraints out of the + portfolio object, see \code{\link{portfolio.spec}} + Returns an object of class constraint which is a flat + list of weight_sum, box, and group constraints. Uses the + same naming as the v1_constraint object which may be + useful when passed to other functions. +} +\author{ + Ross Bennett +} +\seealso{ + \code{\link{portfolio.spec}}, \code{\link{constraint_v2}} +} + Added: pkg/PortfolioAnalytics/man/group_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/group_constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/group_constraint.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,38 @@ +\name{group_constraint} +\alias{group_constraint} +\title{constructor for group_constraint} +\usage{ + group_constraint(type, assets, groups, group_min, + group_max, enabled = FALSE, ...) +} +\arguments{ + \item{type}{character type of the constraint} + + \item{assets}{number of assets, or optionally a named + vector of assets specifying seed weights} + + \item{groups}{vector specifying the groups of the assets} + + \item{group_min}{numeric or vector specifying minimum + weight group constraints} + + \item{group_max}{numeric or vector specifying minimum + weight group constraints} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} +} +\description{ + This function is called by add.constraint when + type="group" is specified. see + \code{\link{add.constraint}} +} +\author{ + Ross Bennett +} +\seealso{ + \code{\link{add.constraint}} +} + Added: pkg/PortfolioAnalytics/man/is.portfolio.Rd =================================================================== --- pkg/PortfolioAnalytics/man/is.portfolio.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/is.portfolio.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,16 @@ +\name{is.portfolio} +\alias{is.portfolio} +\title{check function for portfolio} +\usage{ + is.portfolio(x) +} +\arguments{ + \item{x}{object to test for type \code{portfolio}} +} +\description{ + check function for portfolio +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/portfolio.spec.Rd =================================================================== --- pkg/PortfolioAnalytics/man/portfolio.spec.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/portfolio.spec.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,25 @@ +\name{portfolio.spec} +\alias{portfolio.spec} +\title{constructor for class portfolio} +\usage{ + portfolio.spec(assets = NULL, weight_seq = NULL) +} +\arguments{ + \item{assets}{number of assets, or optionally a named + vector of assets specifying seed weights. If seed weights + are not specified, an equal weight portfolio will be + assumed.} + + \item{weight_seq}{seed sequence of weights, see + \code{\link{generatesequence}}} +} +\description{ + constructor for class portfolio +} +\examples{ +pspec <- portfolio.spec(assets=10, weight_seq=generatesequence()) +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/turnover.Rd =================================================================== --- pkg/PortfolioAnalytics/man/turnover.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/turnover.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,23 @@ +\name{turnover} +\alias{turnover} +\title{Calculates turnover given two vectors of weights. +This is used as an objective function and is called when the user adds an objective of type turnover with \code{\link{add.objective}}} +\usage{ + turnover(weights, wts.init = NULL) +} +\arguments{ + \item{weights}{vector of weights from optimization} + + \item{wts.init}{vector of initial weights used to + calculate turnover from} +} +\description{ + Calculates turnover given two vectors of weights. This is + used as an objective function and is called when the user + adds an objective of type turnover with + \code{\link{add.objective}} +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/turnover_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/turnover_constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/turnover_constraint.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,31 @@ +\name{turnover_constraint} +\alias{turnover_constraint} +\title{constructor for turnover_constraint} +\usage{ + turnover_constraint(type, max.turnover, enabled = FALSE, + ...) +} +\arguments{ + \item{type}{character type of the constraint} + + \item{max.turnover}{maximum turnover value} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} +} +\description{ + This function is called by add.constraint when + type="turnover" is specified. see + \code{\link{add.constraint}} This function allows the + user to specify a maximum turnover constraint +} +\details{ + Note that turnover constraint is currently only supported + for global minimum variance problem with solve.QP plugin +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/turnover_objective.Rd =================================================================== --- pkg/PortfolioAnalytics/man/turnover_objective.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/turnover_objective.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,34 @@ +\name{turnover_objective} +\alias{turnover_objective} +\title{constructor for class turnover_objective} +\usage{ + turnover_objective(name, target = NULL, arguments = NULL, + multiplier = 1, enabled = FALSE, ...) +} +\arguments{ + \item{name}{name of the objective, should correspond to a + function, though we will try to make allowances} + + \item{target}{univariate target for the objective} + + \item{arguments}{default arguments to be passed to an + objective function when executed} + + \item{multiplier}{multiplier to apply to the objective, + usually 1 or -1} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters} +} +\description{ + if target is null, we'll try to minimize the turnover + metric +} +\details{ + if target is set, we'll try to meet the metric +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd 2013-06-22 03:44:42 UTC (rev 2400) @@ -0,0 +1,32 @@ +\name{weight_sum_constraint} +\alias{weight_sum_constraint} +\title{constructor for weight_sum_constraint} +\usage{ + weight_sum_constraint(type, min_sum = 0.99, + max_sum = 1.01, enabled = FALSE, ...) +} +\arguments{ + \item{type}{character type of the constraint} + + \item{min_sum}{minimum sum of all asset weights, default + 0.99} + + \item{max_sum}{maximum sum of all asset weights, default + 1.01} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} +} +\description{ + This function is called by add.constraint when + type="weight_sum" is specified. see + \code{\link{add.constraint}} This function allows the + user to specify the minimum and maximum that the weights + sum to +} +\author{ + Ross Bennett +} + From noreply at r-forge.r-project.org Sat Jun 22 07:37:24 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 07:37:24 +0200 (CEST) Subject: [Returnanalytics-commits] r2401 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130622053724.86B2118450E@r-forge.r-project.org> Author: pulkit Date: 2013-06-22 07:37:23 +0200 (Sat, 22 Jun 2013) New Revision: 2401 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R Log: Added Documentation Modified: pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R 2013-06-22 03:44:42 UTC (rev 2400) +++ pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R 2013-06-22 05:37:23 UTC (rev 2401) @@ -1,15 +1,30 @@ -#'@title Probabilistic Sharpe Ratio +#'@title Minimum Track Record Length +#' #'@description -#'Given a predefined -#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed -#' in probabilistic +#'?How long should a track record be in order to have statistical confidence +#'that its Sharpe ratio is above a given threshold? . if a track record is shorter#' than MinTRL, we do not have enough confidence that the observed ? is above the designated threshold #' +#'@aliases MinTrackRecord +#' #'@param R the return series #'@param Rf the risk free rate of return #'@param refSR the reference Sharpe Ratio -#'@param the confidence level +#'@param p the confidence level #'@param weights the weights for the portfolio +#'@param sr Sharpe Ratio +#'@param sk Skewness +#'@param kr Kurtosis +#' +#'@reference Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio +#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter +#' 2012/13 +#'@keywords ts multivariate distribution models +#'@examples +#' +#'data(edhec) +#'MinTrackRecord(edhec[,1],0.20) + MinTrackRecord<-function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ columns = 1 columnnames = NULL Modified: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R 2013-06-22 03:44:42 UTC (rev 2400) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R 2013-06-22 05:37:23 UTC (rev 2401) @@ -1,15 +1,35 @@ #'@title Probabilistic Sharpe Ratio +#' #'@description -#'Given a predefined -#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed -#' in probabilistic +#'Given a predefined benchmark Sharpe ratio ,the observed Sharpe Ratio +#'can be expressed in probabilistic terms known as the Probabilistic Sharpe Ratio +#'PSR takes higher moments into account and delivers a corrected, atemporal +#'measure of performance expressed in terms of probability of skill. #' +#'@aliases ProbSharpeRatio +#' #'@param R the return series #'@param Rf the risk free rate of return #'@param refSR the reference Sharpe Ratio #'@param the confidence level #'@param weights the weights for the portfolio +#'@param sr Sharpe Ratio +#'@param sk Skewness +#'@param kr Kurtosis +#' +#'@references Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio +#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter +#' 2012/13 +#' +#'@keywords ts multivariate distribution models +#' +#'@examples +#' +#'data(edhec) +#'ProbSharpeRatio(edhec[,1],refSR = 0.28) +#'ProbSharpeRatio(edhec,reSR = 0.28,Rf = 0.06) + ProbSharpeRatio<- function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,n = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ columns = 1 From noreply at r-forge.r-project.org Sat Jun 22 18:55:54 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 22 Jun 2013 18:55:54 +0200 (CEST) Subject: [Returnanalytics-commits] r2402 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130622165554.7084F185510@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-22 18:55:53 +0200 (Sat, 22 Jun 2013) New Revision: 2402 Added: pkg/PortfolioAnalytics/sandbox/testing_ROI_Martin.R Log: adding testing script for ROI to match examples from Prof Martin's slides. Added: pkg/PortfolioAnalytics/sandbox/testing_ROI_Martin.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_ROI_Martin.R (rev 0) +++ pkg/PortfolioAnalytics/sandbox/testing_ROI_Martin.R 2013-06-22 16:55:53 UTC (rev 2402) @@ -0,0 +1,252 @@ +# Testing for replicating professor Martin's examples +# The numbered examples corresopond to 1. theory review weights constrained mvo v5.pdf + +# data = crsp.short.Rdata +# returns = midcap.ts[, 1:10] + +rm(list=ls()) + +# Load packages +library(PerformanceAnalytics) +library(PortfolioAnalytics) +library(ROI) +library(ROI.plugin.glpk) +library(ROI.plugin.quadprog) + +# Use edhec data set from PerformanceAnalytics for reproducing if user does not +# have the crsp.short.Rdata data +# data(edhec) +# returns <- edhec[, 1:10] + +# Use crsp.short.Rdata from Prof Martin +# data file should be in working directory or specify path +# Can we include this as a data set in the PortfolioAnalytics package? +load("/Users/rossbennett/Desktop/Testing/crsp.short.Rdata") + +returns <- midcap.ts[, 1:10] +funds <- colnames(returns) + +# Set up initial constraint object +# Here we specify the minimum weight of any asset is -Inf and the maximum +# weight of any asset is Inf. This is essentially an unconstrained GMV portfolio +# We specify the full investment constraint (w' 1 = 1) by setting min_sum=1 +# and max_sum=1. +gen.constr <- constraint(assets=funds, min=-Inf, max=Inf, min_sum=1, max_sum=1) + +# Add objective to minimize variance +gen.constr <- add.objective(constraints=gen.constr, type="risk", name="var", enabled=TRUE) + +##### Example 1.1: Global Minimum Variance (GMV) Portfolio ##### +# Global Minimum variance portfolio +gmv.constr <- gen.constr + +# Call the optimizer to minimize portfolio variance +gmv.opt <- optimize.portfolio(R=returns, constraints=gmv.constr, optimize_method="ROI") + +# Optimal weights +round(gmv.opt$weights, 3) + +# Portfolio standard deviation +sqrt(gmv.opt$out) + +##### Example 1.2: Long Only GMV Portfolio ##### +gmv.longonly.constr <- gen.constr + +# Set the min and max vectors for long only constraints +min <- rep(0, length(funds)) +max <- rep(1, length(funds)) + +# Modify the min and max vectors in gmv.longonly.constr +gmv.longonly.constr$min <- min +gmv.longonly.constr$max <- max + +# Call the optimizer +gmv.longonly.opt <- optimize.portfolio(R=returns, constraints=gmv.longonly.constr, optimize_method="ROI") + +# Optimal weights +round(gmv.longonly.opt$weights, 3) + +# Portfolio standard deviation +sqrt(gmv.longonly.opt$out) + +##### Example 1.3: GMV Box Constraints ##### +gmv.box.constr <- gen.constr + +# Set the min and max vectors for box constraints +# The box constraints are such that the minimum weight of any asset is 0.03 +# and the maximum weight of any asset is 0.25 +min <- rep(0.03, length(funds)) +max <- rep(0.25, length(funds)) + +# Modify the min and max vectors in gmv.longonly.constr +gmv.box.constr$min <- min +gmv.box.constr$max <- max + +# Call the optimizer +gmv.box.opt <- optimize.portfolio(R=returns, constraints=gmv.box.constr, optimize_method="ROI") + +# Optimal weights +round(gmv.box.opt$weights, 3) + +# Portfolio standard deviation +sqrt(gmv.box.opt$out) + +##### Example 1.3a: GMV Box Constraints ##### +gmv.box.constr <- gen.constr + +# As an alternative to box constriants, we can also linear inequality +# constraints for the minimum and maximum asset weights + +# Set the min and max vectors for box constraints +# The box constraints are such that the minimum weight of any asset is 0.03 +# and the maximum weight of any asset is 0.25 +min <- c(0.02, 0.02, 0.02, 0.04, 0.05, 0.05, 0.02, 0, 0, 0.1) +max <- c(0.2, 0.4, 0.4, 0.45, 0.3, 0.5, 0.4, 0.4, 0.4, 0.4) + +# Modify the min and max vectors in gmv.longonly.constr +gmv.box.constr$min <- min +gmv.box.constr$max <- max + +# Mean variance optimization (MVO) seeks to minimize portfolio variance +gmv.box.opt <- optimize.portfolio(R=returns, constraints=gmv.box.constr, optimize_method="ROI") + +# Optimal weights +round(gmv.box.opt$weights, 3) + +# Portfolio standard deviation +sqrt(gmv.box.opt$out) + +##### Example 1.4: GMV long only with Group Constraints ##### +# Combine returns from different market cap groups +returns.cap <- cbind(microcap.ts[, 1:2], + smallcap.ts[, 1:2], + midcap.ts[, 1:2], + largecap.ts[, 1:2]) + +funds.cap <- colnames(returns.cap) + +# Set up constraints object for the market caps +lo.group.constr <- constraint(assets=funds.cap, min=0, max=1, min_sum=1, max_sum=1) + +# Add group constraints to gmv.box.group.constr +# Market cap constraints +# At least 10% and no more than 25% in micro-caps +# At least 15% and no more than 35% in small-caps +# At least 0% and no more than 35% in mid-caps +# At least 0% and no more than 45% in large-caps +lo.group.constr$groups <- c(2, 2, 2, 2) +lo.group.constr$cLO <- c(0.1, 0.15, 0, 0) +lo.group.constr$cUP <- c(0.25, .35, 0.35, 0.45) + +# Add objective to minimize variance +gmv.lo.group.constr <- add.objective(constraints=lo.group.constr, type="risk", name="var", enabled=TRUE) + +# Call optimizer +gmv.lo.group.opt <- optimize.portfolio(R=returns.cap, constraints=gmv.lo.group.constr, optimize_method="ROI") + +# Optimal weights +round(gmv.lo.group.opt$weights, 3) + +# Group weights +gmv.lo.group.opt$weights[c(1, 3, 5, 7)] + gmv.lo.group.opt$weights[c(2, 4, 6, 8)] + +# In the previous examples, we were solving global minimum variance with optmize_method="ROI". +# The solve.QP plugin is selected automatically by optimize.portfolio when "var" is the objective + +##### Example 1.6: Maximize mean-return with box constraints ##### +# Set up initial constraint object +# Here we specify the minimum weight of any asset is 0.03 and the maximum weight of any asset is 0.25 +# We specify the full investment constraint (w' 1 = 1) by setting min_sum=1 and max_sum=1 +gen.constr <- constraint(assets=funds, min=0.03, max=0.25, min_sum=1, max_sum=1) + +# Add objective to maximize return +gen.constr <- add.objective(constraints=gen.constr, type="return", name="mean", enabled=TRUE) + +maxret.constr <- gen.constr + +# Call optimizer to maximize return subject to given constraints +maxret.opt <- optimize.portfolio(R=returns, constraints=maxret.constr, optimize_method="ROI") + +# Optimal weights +maxret.opt$weights + +##### Example 1.7 Maximize mean-return Long Only with Group Constraints ##### +# Re-use lo.group.constr from Example 1.5 +maxret.lo.group.constr <- lo.group.constr + +maxret.lo.group.constr <- add.objective(constraints=maxret.lo.group.constr, type="return", name="mean", enabled=TRUE) + +maxret.lo.group.opt <- optimize.portfolio(R=returns.cap, constraints=maxret.lo.group.constr, optimize_method="ROI") + +# Optimal weights +maxret.lo.group.opt$weights + +# Group weights +maxret.lo.group.opt$weights[c(1, 3, 5, 7)] + maxret.lo.group.opt$weights[c(2, 4, 6, 8)] + +##### Example 1.X: Maximize Quadratic Utility ##### +# Quadratic utility maximize return penalizing variance +qu.constr <- constraint(assets=funds, min=0, max=1, min_sum=1, max_sum=1) + +# Add mean return as an objective +qu.constr <- add.objective(constraints=qu.constr, type="return", name="mean", enabled=TRUE) + +# Add variance as an objective +qu.constr <- add.objective(constraints=qu.constr, type="risk", name="var", enabled=TRUE, risk_aversion=20) + +qu.opt <- optimize.portfolio(R=returns, constraints=qu.constr, optimize_method="ROI") + +wts1 <- round(qu.opt$weights, 4) +wts1 + +# Check results for quadratic utility with manual code +p <- ncol(returns) +V <- var(returns) +mu <- colMeans(returns) +lambda <- 20 +min_wt <- 0 +max_wt <- 1 + +# parameters for solve.QP +A <- cbind(rep(1, p), diag(p), -diag(p)) +b <- c(1, rep(min_wt, p), rep(-max_wt, p)) +d <- mu +res <- quadprog:::solve.QP(Dmat=2*lambda*V, dvec=d, Amat=A, bvec=b, meq=1) +wts2 <- round(res$solution, 4) +names(wts2) <- colnames(returns) +wts2 + +all.equal(wts1, wts2) + +# Note that target mean return CANNOT be specified as a constraint currently +# It is specified as a target in the return objective +# Can do quadratic utility optimization with target return + +##### Example 1.X: Maximize Quadratic Utility ##### +# Quadratic utility maximize return penalizing variance +qu.constr <- constraint(assets=funds, min=0, max=1, min_sum=1, max_sum=1) + +# Add mean return as an objective +qu.constr <- add.objective(constraints=qu.constr, type="return", name="mean", target=0.025, enabled=TRUE) + +# Add variance as an objective +# Set risk aversion parameter high to approximate mvo +qu.constr <- add.objective(constraints=qu.constr, type="risk", name="var", enabled=TRUE, risk_aversion=1e6) + +qu.opt <- optimize.portfolio(R=returns, constraints=qu.constr, optimize_method="ROI") + +round(qu.opt$weights, 4) + +##### Example X: Mean Variance Optimization (MVO) with target mean return constraint ##### +# TODO Add type="return" for constraint to solve the mean-return constrained mvo +# Will also need to modify optimize.portfolio for optimize_method="ROI" + +##### Example X: Mean Variance Optimization (MVO) with target mean return and long only constraints ##### +# TODO Add type="return" for constraint to solve the mean-return constrained mvo +# Will also need to modify optimize.portfolio for optimize_method="ROI" + +##### Example X: Mean Variance Optimization (MVO) with target mean return and box constraints ##### +# TODO Add type="return" for constraint to solve the mean-return constrained mvo +# Will also need to modify optimize.portfolio for optimize_method="ROI" + +# ROI only solves mean, var, or sample CVaR type business objectives \ No newline at end of file From noreply at r-forge.r-project.org Sun Jun 23 02:21:49 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 02:21:49 +0200 (CEST) Subject: [Returnanalytics-commits] r2403 - pkg/PortfolioAnalytics/R Message-ID: <20130623002150.0A113184F70@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-23 02:21:49 +0200 (Sun, 23 Jun 2013) New Revision: 2403 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: adding enabled arg to function calls inside switch statment for add.constraint(). Constraints are defaulting to FALSE without this. Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-22 16:55:53 UTC (rev 2402) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-23 00:21:49 UTC (rev 2403) @@ -202,19 +202,23 @@ # Box constraints box = {tmp_constraint <- box_constraint(assets=assets, type=type, + enabled=enabled, ...=...) }, # Group constraints group = {tmp_constraint <- group_constraint(assets=assets, type=type, + enabled=enabled, ...=...) }, # Sum of weights constraints weight=, weight_sum = {tmp_constraint <- weight_sum_constraint(type=type, + enabled=enabled, ...=...) }, # Turnover constraint turnover = {tmp_constraint <- turnover_constraint(type=type, + enabled=enabled, ...=...) }, # Do nothing and return the portfolio object if type is NULL From noreply at r-forge.r-project.org Sun Jun 23 02:37:39 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 02:37:39 +0200 (CEST) Subject: [Returnanalytics-commits] r2404 - pkg/PortfolioAnalytics/R Message-ID: <20130623003740.216D418591C@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-23 02:37:39 +0200 (Sun, 23 Jun 2013) New Revision: 2404 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: revised get.constraints() function to add checks if constraints are not enabled or not specified. Also updated function documentation. Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-23 00:21:49 UTC (rev 2403) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-23 00:37:39 UTC (rev 2404) @@ -404,6 +404,15 @@ } #' Helper function to get the enabled constraints out of the portfolio object, see \code{\link{portfolio.spec}} +#' +#' When the v1_constraint object is instantiated via constraint, the arguments +#' min_sum, max_sum, min, and max are either specified by the user or default +#' values are assigned. These are required by other functions such as +#' optimize.portfolio. This function will check that these variables are in +#' the portfolio object in the constraints list. This function could be used +#' at the beginning of optimize.portfolio to check the constraints in the +#' portfolio object. +#' #' Returns an object of class constraint which is a flat list of weight_sum, box, and group constraints. #' Uses the same naming as the v1_constraint object which may be useful when passed to other functions. #' @param portfolio an object of class 'portfolio' @@ -415,7 +424,18 @@ if(!is.portfolio(portfolio)) stop("portfolio passed in is not of class portfolio") tmp.constraints <- portfolio$constraints + + # Check that constraints are passed in + if(length(tmp.constraints) == 0) stop("No constraints passed in") + out <- list() + + # Required constraints + out$min_sum <- NULL + out$max_sum <- NULL + out$min <- NULL + out$max <- NULL + for(i in 1:length(tmp.constraints)){ if(tmp.constraints[[i]]$enabled){ # weight_sum constraint @@ -439,6 +459,13 @@ } } } + # Error if no constraints are enabled + if(length(out) == 0) stop("No constraints are enabled") + + # Error if required constraints are not specified + if(is.null(out$min) | is.null(out$max) | is.null(out$max_sum) | is.null(out$min_sum) { + stop("Must specify weight_sum constraints (min_sum and max_sum) and box constraints ( min and max") + } return(structure(out, class="constraint")) } From noreply at r-forge.r-project.org Sun Jun 23 17:42:07 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 17:42:07 +0200 (CEST) Subject: [Returnanalytics-commits] r2405 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130623154207.E5B87184421@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-23 17:42:07 +0200 (Sun, 23 Jun 2013) New Revision: 2405 Added: pkg/PortfolioAnalytics/sandbox/portfolio_vignette.Rnw Log: Adding sweave file of vignette that demonstrates creating the portfolio object and adding constraints and objectives. Added: pkg/PortfolioAnalytics/sandbox/portfolio_vignette.Rnw =================================================================== --- pkg/PortfolioAnalytics/sandbox/portfolio_vignette.Rnw (rev 0) +++ pkg/PortfolioAnalytics/sandbox/portfolio_vignette.Rnw 2013-06-23 15:42:07 UTC (rev 2405) @@ -0,0 +1,121 @@ +\documentclass[12pt,letterpaper,english]{article} +\usepackage[OT1]{fontenc} +\usepackage{Sweave} +\usepackage{verbatim} +\usepackage{Rd} +\usepackage{Sweave} + +\begin{document} + +\title{Creating a Portfolio Object with PortfolioAnalytics} +\author{Ross Bennett} + +\maketitle + +\begin{abstract} +The purpose of this vignette is to demonstrate the new interface in PortfolioAnalytics to specify a portfolio object and to add constraints and objectives. +\end{abstract} + +\tableofcontents + +\section{Getting Started} +\subsection{Load Packages} +Load the necessary packages. + +<<>>= +library(PortfolioAnalytics) +library(PerformanceAnalytics) # just for edhec data set +@ + +\subsection{Data} +The edhec data set from the PerformanceAnalytics package will be used as example data. +<<>>= +data(edhec) + +# Use the first 4 indices in edhec for a returns object +returns <- edhec[, 1:4] +print(head(returns, 5)) + +# Get a character vector of the fund names +fund.names <- colnames(returns) +@ + +\section{Creating the "portfolio" object} +The portfolio object is instantiated with the \code{portfolio.spec} function. The main argument to \code{portfolio.spec} is assets, this is a required argument. The assets argument can be a scalar value for the number of assets, a character vector of fund names, or a named vector of seed weights. If seed weights are not specified, an equal weight portfolio will be assumed. + +The \code{pspec} object is an S3 object of class "portfolio". When first created, the portfolio object has an element named assets with the seed weights, an element named weight\_seq with a seed sequence of weights if specified, an empty constraints list and an empty objectives list. + +<<>>= +# Specify a portfolio object by passing a character vector for the +# assets argument. +pspec <- portfolio.spec(assets=fund.names) +print(pspec) +@ + +\section{Adding Constraints} +Adding constraints to the portfolio object is done with \code{add.constraint}. The \code{add.constraint} function is the main interface for adding and/or updating constraints to the portfolio object. This function allows the user to specify the portfolio to add the constraints to, the type of constraints (currently 'weight\_sum', 'box', or 'group'), arguments for the constraint, and whether or not to enable the constraint. If updating an existing constraint, the indexnum argument can be specified. + +Here we add a constraint that the weights must sum to 1, or the full investment constraint. +<<>>= +# Add the full investment constraint that specifies the weights must sum to 1. +pspec <- add.constraint(portfolio=pspec, + type="weight_sum", + min_sum=1, + max_sum=1, + enabled=TRUE) +@ + +Here we add box constraints for the asset weights. The minimum weight of any asset must be greater than or equal to 0.05 and the maximum weight of any asset must be less than or equal to 0.4. The values for min and max can be passed in as scalars or vectors. If min and max are scalars, the values for min and max will be replicated as vectors to the length of assets. If min and max are not specified, a minimum weight of 0 and maximum weight of 1 are assumed. Note that min and max can be specified as vectors with different weights for linear inequality constraints. +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="box", + min=0.05, + max=0.4, + enabled=TRUE) +@ + +The portfolio object now has 2 objects in the constraints list. One object for the sum of weights constraint and another for the box constraint. +<<>>= +print(pspec$constraints) +@ + +Another common constraint that can be added is a group constraint. Group constraints are currently only supported by the ROI solvers, see the ROI vignette [still need to make this] for examples using group constraints. Box constraints and weight\_sum constraints are required by \code{optimize.portfolio}. Other constraint types will be added. + +\section{Adding Objectives} +Business objectives can be added to the portfolio object with \code{add.objective\_v2}. The \code{add.objective\_v2} function is the main function for adding and/or updating business objectives to the portfolio object. This function allows the user to specify the portfolio to add the objectives to, the type (currently 'return', 'risk', or 'risk\_budget'), name of the objective function, arguments to the objective function, and whether or not to enable the objective. If updating an existing constraint, the indexnum argument can be specified. + +Here we add a risk objective to minimize portfolio variance. Note that the name of the function must correspond to a function in R. Many functions are available in the PerformanceAnalytics package. +<<>>= +pspec <- add.objective_v2(portfolio=pspec, + type='risk', + name='var', + enabled=TRUE) +@ + +The portfolio object now has 1 object in the objectives list for the risk objective we just added. +<<>>= +print(pspec$objectives) +@ + +We now have a portfolio object with the following constraints and objectives to pass to \code{optimize.portfolio}. +\begin{itemize} + \item Constraints + \begin{itemize} + \item weight\_sum: The weights sum to 1 (i.e. full investment constraint) + \item box: minimum weight of any asset must be greater than or equal to 0.05 and the maximum weight of any asset must be less than or equal to 0.4. +\end{itemize} + \item Objectives + \begin{itemize} + \item risk objective: minimize portfolio var(iance). +\end{itemize} + +\end{itemize} + +\section{Optimization} +Note that this currently does not work, but is how I envision the portfolio object replacing the current constraint object. +<<>>= +#out <- optimize.portfolio(R=returns, portfolio=pspec, optimize_method="ROI") +@ + + +\end{document} \ No newline at end of file From noreply at r-forge.r-project.org Sun Jun 23 20:15:06 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 20:15:06 +0200 (CEST) Subject: [Returnanalytics-commits] r2406 - pkg/PortfolioAnalytics/R Message-ID: <20130623181507.090391856CD@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-23 20:15:06 +0200 (Sun, 23 Jun 2013) New Revision: 2406 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: added missing paren to fix bug in get.constraints Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-23 15:42:07 UTC (rev 2405) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-23 18:15:06 UTC (rev 2406) @@ -463,7 +463,7 @@ if(length(out) == 0) stop("No constraints are enabled") # Error if required constraints are not specified - if(is.null(out$min) | is.null(out$max) | is.null(out$max_sum) | is.null(out$min_sum) { + if(is.null(out$min) | is.null(out$max) | is.null(out$max_sum) | is.null(out$min_sum)) { stop("Must specify weight_sum constraints (min_sum and max_sum) and box constraints ( min and max") } return(structure(out, class="constraint")) From noreply at r-forge.r-project.org Sun Jun 23 20:16:47 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 20:16:47 +0200 (CEST) Subject: [Returnanalytics-commits] r2407 - pkg/PortfolioAnalytics/man Message-ID: <20130623181648.0F3351856CD@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-23 20:16:47 +0200 (Sun, 23 Jun 2013) New Revision: 2407 Modified: pkg/PortfolioAnalytics/man/get.constraints.Rd Log: updated documentation for get.constraints after running roxygenize Modified: pkg/PortfolioAnalytics/man/get.constraints.Rd =================================================================== --- pkg/PortfolioAnalytics/man/get.constraints.Rd 2013-06-23 18:15:06 UTC (rev 2406) +++ pkg/PortfolioAnalytics/man/get.constraints.Rd 2013-06-23 18:16:47 UTC (rev 2407) @@ -1,8 +1,6 @@ \name{get.constraints} \alias{get.constraints} -\title{Helper function to get the enabled constraints out of the portfolio object, see \code{\link{portfolio.spec}} - Returns an object of class constraint which is a flat list of weight_sum, box, and group constraints. - Uses the same naming as the v1_constraint object which may be useful when passed to other functions.} +\title{Helper function to get the enabled constraints out of the portfolio object, see \code{\link{portfolio.spec}}} \usage{ get.constraints(portfolio) } @@ -10,8 +8,17 @@ \item{portfolio}{an object of class 'portfolio'} } \description{ - Helper function to get the enabled constraints out of the - portfolio object, see \code{\link{portfolio.spec}} + When the v1_constraint object is instantiated via + constraint, the arguments min_sum, max_sum, min, and max + are either specified by the user or default values are + assigned. These are required by other functions such as + optimize.portfolio. This function will check that these + variables are in the portfolio object in the constraints + list. This function could be used at the beginning of + optimize.portfolio to check the constraints in the + portfolio object. +} +\details{ Returns an object of class constraint which is a flat list of weight_sum, box, and group constraints. Uses the same naming as the v1_constraint object which may be From noreply at r-forge.r-project.org Sun Jun 23 20:18:37 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 20:18:37 +0200 (CEST) Subject: [Returnanalytics-commits] r2408 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130623181838.331B71856CD@r-forge.r-project.org> Author: pulkit Date: 2013-06-23 20:18:37 +0200 (Sun, 23 Jun 2013) New Revision: 2408 Added: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw Log: Vignette for Probabilistic Sharpe Ratio Added: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw 2013-06-23 18:18:37 UTC (rev 2408) @@ -0,0 +1,34 @@ +\documentclass[12pt,letterpaper,english]{article} +\usepackage{times} +\usepackage[T1]{fontec} +\IfFileExists{url.sty}{\usepackage{url}} + {\newcommand{\url}{\texttt}} + +\usepackage{babel} +\usepackage{noweb} +\usepackage{Rd} + +\usepackage{Sweave} +\SweaveOpts{engine=R,eps = FALSE} +%\VignetteIndexEntry{Probabilistic Sharpe Ratio} +%\VignetteDepends{PerformanceAnalytics} +%\VignetteKeywords{Probabilistic Sharpe Ratio,Minimum Track Record Length,risk,benchmark,portfolio} +%\VignettePackage{PerformanceAnalytics} + +\begin{document} + +\title{Probabilistic Sharpe Ratio} + +% \keywords{Probabilistic Sharpe Ratio,Minimum Track Record Length,risk,benchmark,portfolio} + +\makeatletter +\makeatother +\maketitle + +\begin{abstract} + + This vignette gives an overview of the Probabilistic Sharpe Ratio , Minimum Track Record Length and the Probabilistic Sharpe Ratio Optimization technique used to find the optimal portfolio that maximizes the Probabilistic Sharpe Ratio. It gives an overview of the usability of the functions and its application" + +\end{abstract} + + From noreply at r-forge.r-project.org Sun Jun 23 20:37:25 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 20:37:25 +0200 (CEST) Subject: [Returnanalytics-commits] r2409 - pkg/PortfolioAnalytics/R Message-ID: <20130623183725.44C5E18424E@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-23 20:37:24 +0200 (Sun, 23 Jun 2013) New Revision: 2409 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: added enabled as argument in constraint_v2 constructor in constraint functions Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-23 18:18:37 UTC (rev 2408) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-23 18:37:24 UTC (rev 2409) @@ -326,7 +326,7 @@ max[which(tmp_max < max)] <- tmp_max[which(tmp_max < max)] } - Constraint <- constraint_v2(type=type, ...) + Constraint <- constraint_v2(type=type, enabled=enabled, ...) Constraint$min <- min Constraint$max <- max return(Constraint) @@ -368,7 +368,7 @@ } if (length(group_max) != ngroups) stop(paste("length of group_max must be equal to 1 or the length of groups:", ngroups)) - Constraint <- constraint_v2(type, ...) + Constraint <- constraint_v2(type, enabled=enabled, ...) Constraint$groups <- groups Constraint$cLO <- group_min Constraint$cUP <- group_max @@ -388,7 +388,7 @@ #' @author Ross Bennett #' @export weight_sum_constraint <- function(type, min_sum=0.99, max_sum=1.01, enabled=FALSE, ...){ - Constraint <- constraint_v2(type, ...) + Constraint <- constraint_v2(type, enabled=enabled, ...) Constraint$min_sum <- min_sum Constraint$max_sum <- max_sum return(Constraint) @@ -483,7 +483,7 @@ #' @author Ross Bennett #' @export turnover_constraint <- function(type, max.turnover, enabled=FALSE, ...){ - Constraint <- constraint_v2(type, ...) + Constraint <- constraint_v2(type, enabled=enabled, ...) Constraint$toc <- max.turnover return(Constraint) } From noreply at r-forge.r-project.org Sun Jun 23 20:42:34 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 23 Jun 2013 20:42:34 +0200 (CEST) Subject: [Returnanalytics-commits] r2410 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130623184234.C38991856E4@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-23 20:42:33 +0200 (Sun, 23 Jun 2013) New Revision: 2410 Modified: pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R Log: added example for testing the portfolio spec Modified: pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R 2013-06-23 18:37:24 UTC (rev 2409) +++ pkg/PortfolioAnalytics/sandbox/testing_portfolio_specification.R 2013-06-23 18:42:33 UTC (rev 2410) @@ -27,8 +27,14 @@ min_sum=0.99, max_sum=1.01) print(pspec) +# Forgot to enable the weight_sum constraint +pspec <- add.constraint(portfolio=pspec, type="weight_sum", + min_sum=0.99, max_sum=1.01, enabled=TRUE, + indexnum=1) +print(pspec) + # Add box constraints to the pspec object -pspec <- add.constraint(portfolio=pspec, type="box", min=0.1, max=0.4) +pspec <- add.constraint(portfolio=pspec, type="box", min=0.1, max=0.4, enabled=TRUE) print(pspec) # Update the box constraints to pass in a vector for min and max. Updates the From noreply at r-forge.r-project.org Mon Jun 24 00:20:50 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 00:20:50 +0200 (CEST) Subject: [Returnanalytics-commits] r2411 - in pkg/PortfolioAnalytics: R sandbox Message-ID: <20130623222051.1FC3018596B@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-24 00:20:50 +0200 (Mon, 24 Jun 2013) New Revision: 2411 Added: pkg/PortfolioAnalytics/R/constraintsFUN.R pkg/PortfolioAnalytics/sandbox/testing_constrained_group.R Log: Adding function to impose group constraints on a vector of weights. Adding testing script for constrained_group in sandbox Added: pkg/PortfolioAnalytics/R/constraintsFUN.R =================================================================== --- pkg/PortfolioAnalytics/R/constraintsFUN.R (rev 0) +++ pkg/PortfolioAnalytics/R/constraintsFUN.R 2013-06-23 22:20:50 UTC (rev 2411) @@ -0,0 +1,64 @@ +#' Generic function to impose group constraints on a vector of weights +#' +#' This function gets group subsets of the weights vector and checks if the sum +#' of the weights in that group violates the minimum or maximum value. If the +#' sum of weights in a given group violates its maximum or minimum value, the +#' group of weights is normalized to be equal to the minimum or maximum value. +#' This group normalization causes the sum of weights to change. The weights +#' vector is then normalized so that the min_sum and max_sum constraints are +#' satisfied. This "re-normalization" of the weights vector may then cause the +#' group constraints to not be satisfied. +#' +#' Group constraints are implemented in ROI solvers, but this function could +#' be used in constrained_objective for random portfolios, DEoptim, pso, or +#' gensa solvers. +#' +#' @param groups vector to group assets +#' @param cLO vector of group weight minimums +#' @param cUP vector of group weight maximums +#' @param weights vector of weights +#' @param min_sum minimum sum of weights +#' @param max_sum maximum sum of weights +#' @param normalize TRUE/FALSE to normalize the weights vector to satisfy the min_sum and max_sum constraints +#' +#' @author Ross Bennett +#' @export +constrained_group_tmp <- function(groups, cLO, cUP, weights, min_sum, max_sum, normalize=TRUE){ + # Modify the args later to accept a portfolio or constraint object + n.groups <- length(groups) + + k <- 1 + l <- 0 + for(i in 1:n.groups){ + j <- groups[i] + tmp.w <- weights[k:(l+j)] + # normalize weights for a given group that sum to less than specified group min + grp.min <- cLO[i] + if(sum(tmp.w) < grp.min) { + weights[k:(l+j)] <- (grp.min / sum(tmp.w)) * tmp.w + } + # normalize weights for a given group that sum to greater than specified group max + grp.max <- cUP[i] + if(sum(tmp.w) > grp.max) { + weights[k:(l+j)] <- (grp.max / sum(tmp.w)) * tmp.w + } + # cat(sum(tmp.w), "\t", cLO[i], "\n") + # cat(k, " ", l+j, "\n") + k <- k + j + l <- k - 1 + } + # Normalizing the weights inside the groups changes the sum of the weights. + # Should normalizing the sum of weights take place here or somewhere else? + + if(normalize){ + # max_sum and min_sum normalization borrowed from constrained_objective + # Normalize to max_sum + if(sum(weights) > max_sum) { weights <- (max_sum / sum(weights)) * weights } + # Normalize to min_sum + if(sum(weights) < min_sum) { weights <- (min_sum / sum(weights)) * weights } + } + # "Re-normalizing" the weights causes some of the group constraints to + # be violated. Can this be addressed later with a penalty term for violating + # the group constraints? Or another way? + return(weights) +} Added: pkg/PortfolioAnalytics/sandbox/testing_constrained_group.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_constrained_group.R (rev 0) +++ pkg/PortfolioAnalytics/sandbox/testing_constrained_group.R 2013-06-23 22:20:50 UTC (rev 2411) @@ -0,0 +1,71 @@ + + +##### EX1 ##### +# first group exceeds cUP +groups <- c(2, 1) +cLO <- c(0.2, 0.10) +cUP <- c(0.4, 0.55) + +weights <- c(0.15, 0.35, 0.50) +sum(weights) + +(w <- constrained_group_tmp(groups, cLO, cUP, weights, 1, 1, TRUE)) +sum(w[1:2]) +sum(w) + +(w <- constrained_group_tmp(groups, cLO, cUP, weights, 1, 1, FALSE)) +# The group 1 cUP is met exactly, but the sume of weights are not equal to 1 +sum(w[1:2]) +sum(w) + + +##### EX2 ##### +# The assets are grouped into 3 groups of 2 +# The sum of the weights for the first group assets must be between 0.05 and 0.35 +# The sum of the weights for the second group of assets must be between 0.10 and 0.45 +# The sum of the weights for the last group of assets must be between 0.05 and 0.25 + +# first group exceeds cUP +groups <- c(2, 2, 2) +cLO <- c(0.05, 0.10, 0.05) +cUP <- c(0.4, 0.45, 0.25) + +weights <- c(0.15, 0.30, 0.15, 0.25, 0.05, 0.10) +sum(weights) + +(w <- constrained_group_tmp(groups, cLO, cUP, weights, 1, 1, TRUE)) +sum(w) + +##### Ex3 ##### +# The second group is below cLO +groups <- c(2, 1, 3) +cLO <- c(0.05, 0.10, 0.05) +cUP <- c(0.4, 0.45, 0.65) + +weights <- c(0.15, 0.25, 0.08, 0.2, 0.22, 0.10) +sum(weights) + +(w <- constrained_group_tmp(groups, cLO, cUP, weights, 1, 1, TRUE)) +sum(w) + +##### Ex4 ##### +# The second group is above cUP and the fourth group is below cLO +groups <- c(2, 4, 3, 2) +cLO <- c(0.05, 0.10, 0.05, 0.08) +cUP <- c(0.4, 0.5, 0.65, 0.45) + +weights <- c(0.05, 0.1, 0.07, 0.2, 0.22, 0.10, 0.05, 0.08, 0.05, 0.04, 0.04) +sum(weights[1:2]) +sum(weights[3:6]) +sum(weights[7:10]) +sum(weights[10:11]) +sum(weights) + +(w <- constrained_group_tmp(groups, cLO, cUP, weights, 1, 1, TRUE)) +sum(w[1:2]) +sum(w[3:6]) +sum(w[7:10]) +sum(w[10:11]) + +# Group 2 cUP is being violated. Appears that normalizing at the end of the +# function is causing some of the group constraints to be violated From noreply at r-forge.r-project.org Mon Jun 24 00:25:07 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 00:25:07 +0200 (CEST) Subject: [Returnanalytics-commits] r2412 - in pkg/PortfolioAnalytics: . man Message-ID: <20130623222508.176E818596B@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-24 00:25:07 +0200 (Mon, 24 Jun 2013) New Revision: 2412 Added: pkg/PortfolioAnalytics/man/constrained_group_tmp.Rd Modified: pkg/PortfolioAnalytics/DESCRIPTION pkg/PortfolioAnalytics/NAMESPACE Log: Adding updated documentation after running roxygenize on package Modified: pkg/PortfolioAnalytics/DESCRIPTION =================================================================== --- pkg/PortfolioAnalytics/DESCRIPTION 2013-06-23 22:20:50 UTC (rev 2411) +++ pkg/PortfolioAnalytics/DESCRIPTION 2013-06-23 22:25:07 UTC (rev 2412) @@ -43,3 +43,4 @@ 'trailingFUN.R' 'objectiveFUN.R' 'portfolio.R' + 'constraintsFUN.R' Modified: pkg/PortfolioAnalytics/NAMESPACE =================================================================== --- pkg/PortfolioAnalytics/NAMESPACE 2013-06-23 22:20:50 UTC (rev 2411) +++ pkg/PortfolioAnalytics/NAMESPACE 2013-06-23 22:25:07 UTC (rev 2412) @@ -9,6 +9,7 @@ export(chart.Weights.RP) export(charts.DE) export(charts.RP) +export(constrained_group_tmp) export(constrained_objective) export(constraint_ROI) export(constraint_v2) Added: pkg/PortfolioAnalytics/man/constrained_group_tmp.Rd =================================================================== --- pkg/PortfolioAnalytics/man/constrained_group_tmp.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/constrained_group_tmp.Rd 2013-06-23 22:25:07 UTC (rev 2412) @@ -0,0 +1,45 @@ +\name{constrained_group_tmp} +\alias{constrained_group_tmp} +\title{Generic function to impose group constraints on a vector of weights} +\usage{ + constrained_group_tmp(groups, cLO, cUP, weights, min_sum, + max_sum, normalize = TRUE) +} +\arguments{ + \item{groups}{vector to group assets} + + \item{cLO}{vector of group weight minimums} + + \item{cUP}{vector of group weight maximums} + + \item{weights}{vector of weights} + + \item{min_sum}{minimum sum of weights} + + \item{max_sum}{maximum sum of weights} + + \item{normalize}{TRUE/FALSE to normalize the weights + vector to satisfy the min_sum and max_sum constraints} +} +\description{ + This function gets group subsets of the weights vector + and checks if the sum of the weights in that group + violates the minimum or maximum value. If the sum of + weights in a given group violates its maximum or minimum + value, the group of weights is normalized to be equal to + the minimum or maximum value. This group normalization + causes the sum of weights to change. The weights vector + is then normalized so that the min_sum and max_sum + constraints are satisfied. This "re-normalization" of the + weights vector may then cause the group constraints to + not be satisfied. +} +\details{ + Group constraints are implemented in ROI solvers, but + this function could be used in constrained_objective for + random portfolios, DEoptim, pso, or gensa solvers. +} +\author{ + Ross Bennett +} + From noreply at r-forge.r-project.org Mon Jun 24 11:44:05 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 11:44:05 +0200 (CEST) Subject: [Returnanalytics-commits] r2413 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130624094405.91FE3184C10@r-forge.r-project.org> Author: pulkit Date: 2013-06-24 11:44:04 +0200 (Mon, 24 Jun 2013) New Revision: 2413 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw Log: updated ProbSharpe vignette Modified: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw 2013-06-23 22:25:07 UTC (rev 2412) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw 2013-06-24 09:44:04 UTC (rev 2413) @@ -1,11 +1,10 @@ \documentclass[12pt,letterpaper,english]{article} \usepackage{times} -\usepackage[T1]{fontec} +\usepackage[T1]{fontenc} \IfFileExists{url.sty}{\usepackage{url}} {\newcommand{\url}{\texttt}} \usepackage{babel} -\usepackage{noweb} \usepackage{Rd} \usepackage{Sweave} @@ -16,8 +15,9 @@ %\VignettePackage{PerformanceAnalytics} \begin{document} +\SweaveOpts{concordance=TRUE} -\title{Probabilistic Sharpe Ratio} +\title{ Probabilistic Sharpe Ratio } % \keywords{Probabilistic Sharpe Ratio,Minimum Track Record Length,risk,benchmark,portfolio} @@ -31,4 +31,35 @@ \end{abstract} +<>= +library(PerformanceAnalytics) +@ +\section{Probabilistic Sharpe Ratio} + Given a predefined benchmark Sharpe ratio $SR^\ast$ , the observed Sharpe ratio $\hat{SR}$ can be expressed in probabilistic terms as + + \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma{_3}}SR^\ast + \frac{\hat{\gamma{_4}}-1}{4}\hat{SR^2}}}\biggr]} + + Here $n$ is the track record length or the number of data points. It can be daily,weekly or yearly depending on the input given + + $\hat{\gamma{_3}}$ and $\hat{\gamma{_4}}$ are the skewness and kurtosis respectively. + It is not unusual to find strategies with irregular trading frequencies, such as weekly strategies that may not trade for a month. This poses a problem when computing an annualized Sharpe ratio, and there is no consensus as how skill should be measured in the context of irregular bets. Because PSR measures skill in probabilistic terms, it is invariant to calendar conventions. All calculations are done in the original frequency +of the data, and there is no annualization. + +<<>>= +data(edhec) +ProbSharpeRatio(edhec,refSR = 0.28) +@ + + + + + + + + + + +\end{document} + + From noreply at r-forge.r-project.org Mon Jun 24 16:02:34 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 16:02:34 +0200 (CEST) Subject: [Returnanalytics-commits] r2414 - pkg/PerformanceAnalytics/sandbox/pulkit Message-ID: <20130624140235.0F5841856CF@r-forge.r-project.org> Author: pulkit Date: 2013-06-24 16:02:34 +0200 (Mon, 24 Jun 2013) New Revision: 2414 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw Log: changes in the PSRopt and the vignette Modified: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-24 09:44:04 UTC (rev 2413) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-24 14:02:34 UTC (rev 2414) @@ -151,7 +151,9 @@ } weights = optimize() -return(weights) + result = matrix(weights,nrow = columns) + rownames(result) = columnnames +return(result) } Modified: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw 2013-06-24 09:44:04 UTC (rev 2413) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw 2013-06-24 14:02:34 UTC (rev 2414) @@ -35,10 +35,19 @@ library(PerformanceAnalytics) @ + +<>= +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R") +@ + +<>= +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/MinTRL.R") +@ + \section{Probabilistic Sharpe Ratio} Given a predefined benchmark Sharpe ratio $SR^\ast$ , the observed Sharpe ratio $\hat{SR}$ can be expressed in probabilistic terms as - \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma{_3}}SR^\ast + \frac{\hat{\gamma{_4}}-1}{4}\hat{SR^2}}}\biggr]} + \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma_3}SR^\ast + \frac{\hat{\gamma_4}-1}{4}\hat{SR^2}}}\biggr]} Here $n$ is the track record length or the number of data points. It can be daily,weekly or yearly depending on the input given @@ -51,15 +60,32 @@ ProbSharpeRatio(edhec,refSR = 0.28) @ +\section{Minimum Track Record Length} +If a track record is shorter than Minimum Track Record Length(MinTRL), we do +not have enough confidence that the observed $\hat{SR}$ is above the designated threshold +$SR^\ast$. Minimum Track Record Length is given by the following expression. +\deqn{MinTRL = n^\ast = 1+\biggl[1-\hat{\gamma_3}\hat{SR}+\frac{\hat{\gamma_4}}{4}\hat{SR^2}\biggr]\biggl(\frac{Z_\alpha}{\hat{SR}-SR^\ast}\biggr)^2} - - - +$\gamma{_3}$ and $\gamma{_4}$ are the skewness and kurtosis respectively. It is important to note that MinTRL is expressed in terms of number of observations, not annual or calendar terms. +<<>>= +data(edhec) +MinTrackRecord(edhec,refSR = 0.28) +@ +\section{Probabilistic Sharpe Ratio Optimal Portfolio} -\end{document} - +We would like to find the vector of weights that maximize the expression + \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma_3}SR^\ast + \frac{\hat{\gamma_4}-1}{4}\hat{SR^2}}}\biggr]} + +where +\eqn{\sigma = \sqrt{E[(r-\mu)^2]}} ,its standard deviation. +\eqn{\gamma_3=\frac{E\biggl[(r-\mu)^3\biggr]}{\sigma^3}} its skewness,\eqn{\gamma_4=\frac{E\biggl[(r-\mu)^4\biggr]}{\sigma^4}} its kurtosis and \eqn{SR = \frac{\mu}{\sigma}} its Sharpe Ratio. + +Because \eqn{\hat{PSR}(SR^\ast)=Z[\hat{Z^\ast}]} is a monotonic increasing function of \eqn{\hat{Z^\ast}}. This optimal vector is invariant of the value adopted by the parameter \eqn{SR^\ast}. + +\end{document}a + From noreply at r-forge.r-project.org Mon Jun 24 18:57:25 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 18:57:25 +0200 (CEST) Subject: [Returnanalytics-commits] r2415 - in pkg/FactorAnalytics: R man Message-ID: <20130624165725.9CA321800CA@r-forge.r-project.org> Author: chenyian Date: 2013-06-24 18:57:24 +0200 (Mon, 24 Jun 2013) New Revision: 2415 Added: pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R pkg/FactorAnalytics/man/fitTimeseriesFactorModel.Rd Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R pkg/FactorAnalytics/man/fitStatisticalFactorModel.Rd Log: 1. fitMacroeconomicFactorModel.R changes name to fitTimeSeriesFactorModel.R and same for .Rd file. 2. add checkData into fitStatisticalFactorModel.R Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-24 14:02:34 UTC (rev 2414) +++ pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-24 16:57:24 UTC (rev 2415) @@ -4,7 +4,8 @@ #' mainly adapted from S+FinMetric function mfactor. #' #' -#' @param x T x N assets returns data which is saved as data.frame class. +#' @param data a vector, matrix, data.frame, xts, timeSeries or zoo object with asset returns +#' and factors retunrs rownames #' @param k numbers of factors if it is scalar or method of choosing optimal #' number of factors. "bn" represents Bai and Ng (2002) method and "ck" #' represents Connor and korajczyk (1993) method. Default is k = 1. @@ -29,7 +30,7 @@ #' \item{asset.ret}{asset returns} #' \item{asset.fit}{List of regression lm class of individual returns on #' factors.} -#' \item{residVars.vec}{vector of residual variances.} +#' \item{resid.variance}{vector of residual variances.} #' \item{mimic}{N x K matrix of factor mimicking portfolio returns.} #' @author Eric Zivot and Yi-An Chen #' @examples @@ -53,7 +54,7 @@ #' sfm.pca.fit$loadings #' sfm.pca.fit$r2 #' sfm.pca.fit$residuals -#' sfm.pca.fit$residVars.vec +#' sfm.pca.fit$resid.variance #' sfm.pca.fit$mimic #' # apca #' sfm.apca.fit <- fitStatisticalFactorModel(sfm.apca.dat,k=1) @@ -73,32 +74,41 @@ #' sfm.apca.fit.ck$mimic #' fitStatisticalFactorModel <- -function(x, k = 1, refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05, na.rm = FALSE){ +function(data, k = 1, refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05, na.rm = FALSE, + ckeckData.method = "xts" ){ # load package require(MASS) +require(PerformanceAnalytics) + + +# check data +data.xts <- checkData(data,method=ckeckData.method) + +# convert it to coredata + + - # function of test - mfactor.test <- function(x, method = "bn", refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05){ + mfactor.test <- function(data, method = "bn", refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05){ if(is.null(max.k)) { - max.k <- min(10, nrow(x) - 1) - } else if (max.k >= nrow(x)) { + max.k <- min(10, nrow(data) - 1) + } else if (max.k >= nrow(data)) { stop("max.k must be less than the number of observations.") } if(check) { - if(mfactor.check(x)) { + if(mfactor.check(data)) { warning("Some variables have identical observations.") return(list(factors = NA, loadings = NA, k = NA)) } } method <- casefold(method) if(method == "bn") { - ans <- mfactor.bn(x, max.k, refine = refine) + ans <- mfactor.bn(data, max.k, refine = refine) } else if(method == "ck") { - ans <- mfactor.ck(x, max.k, refine = refine, sig = sig) + ans <- mfactor.ck(data, max.k, refine = refine, sig = sig) } else { stop("Invalid choice for optional argument method.") @@ -109,25 +119,25 @@ # function of ck -mfactor.ck <- function(x, max.k, sig = 0.05, refine = TRUE) { +mfactor.ck <- function(data, max.k, sig = 0.05, refine = TRUE) { - n <- ncol(x) - m <- nrow(x) + n <- ncol(data) + m <- nrow(data) idx <- 2 * (1:(m/2)) # - f <- mfactor.apca(x, k = 1, refine = refine, check = FALSE) + f <- mfactor.apca(data, k = 1, refine = refine, check = FALSE) f1 <- cbind(1, f$factors) B <- backsolve(chol(crossprod(f1)), diag(2)) - eps <- x - f1 %*% crossprod(t(B)) %*% crossprod(f1, x) + eps <- data - f1 %*% crossprod(t(B)) %*% crossprod(f1, data) s <- eps^2/(1 - 2/m - 1/n) # for(i in 2:max.k) { f.old <- f s.old <- s - f <- mfactor.apca(x, k = i, refine = refine, check = FALSE) + f <- mfactor.apca(data, k = i, refine = refine, check = FALSE) f1 <- cbind(1, f$factors) B <- backsolve(chol(crossprod(f1)), diag(i + 1)) - eps <- x - f1 %*% crossprod(t(B)) %*% crossprod(f1, x) + eps <- data - f1 %*% crossprod(t(B)) %*% crossprod(f1, data) s <- eps^2/(1 - (i + 1)/m - i/n) delta <- rowMeans(s.old[idx - 1, , drop = FALSE]) - rowMeans( s[idx, , drop = FALSE]) @@ -139,8 +149,8 @@ } # funciton of check - mfactor.check <- function(x) { - temp <- apply(x, 2, range) + mfactor.check <- function(data) { + temp <- apply(data, 2, range) if(any(abs(temp[2, ] - temp[1, ]) < .Machine$single.eps)) { TRUE } @@ -150,21 +160,21 @@ } # function of bn - mfactor.bn <- function(x, max.k, refine = TRUE) { + mfactor.bn <- function(data, max.k, refine = TRUE) { # Parameters: - # x : T x N return matrix + # data : T x N return matrix # max.k : maxinum number of factors to be considered # Returns: # k : the optimum number of factors - n <- ncol(x) - m <- nrow(x) + n <- ncol(data) + m <- nrow(data) s <- vector("list", max.k) for(i in 1:max.k) { - f <- cbind(1, mfactor.apca(x, k = i, refine = refine, check = + f <- cbind(1, mfactor.apca(data, k = i, refine = refine, check = FALSE)$factors) B <- backsolve(chol(crossprod(f)), diag(i + 1)) - eps <- x - f %*% crossprod(t(B)) %*% crossprod(f, x) + eps <- data - f %*% crossprod(t(B)) %*% crossprod(f, data) sigma <- colSums(eps^2)/(m - i - 1) s[[i]] <- mean(sigma) } @@ -177,27 +187,27 @@ warning("Cp1 and Cp2 did not yield same result. The smaller one is used." ) } k <- min(order(Cp1)[1], order(Cp2)[1]) - f <- mfactor.apca(x, k = k, refine = refine, check = FALSE) + f <- mfactor.apca(data, k = k, refine = refine, check = FALSE) return(f) } # function of pca - mfactor.pca <- function(x, k, check = FALSE, ret.cov = NULL) { + mfactor.pca <- function(data, k, check = FALSE, ret.cov = NULL) { if(check) { - if(mfactor.check(x)) { + if(mfactor.check(data)) { warning("Some variables have identical observations.") return(list(factors = NA, loadings = NA, k = NA)) } } - n <- ncol(x) - m <- nrow(x) - if(is.null(dimnames(x))) { - dimnames(x) <- list(1:m, paste("V", 1:n, sep = ".")) + n <- ncol(data) + m <- nrow(data) + if(is.null(dimnames(data))) { + dimnames(data) <- list(1:m, paste("V", 1:n, sep = ".")) } - x.names <- dimnames(x)[[2]] - xc <- t(t(x) - colMeans(x)) + data.names <- dimnames(data)[[2]] + xc <- t(t(data) - colMeans(data)) if(is.null(ret.cov)) { ret.cov <- crossprod(xc)/m } @@ -205,29 +215,29 @@ # compute loadings beta B <- t(eigen.tmp$vectors[, 1:k, drop = FALSE]) # compute estimated factors - f <- x %*% eigen.tmp$vectors[, 1:k, drop = FALSE] - tmp <- x - f %*% B + f <- data %*% eigen.tmp$vectors[, 1:k, drop = FALSE] + tmp <- data - f %*% B alpha <- colMeans(tmp) # compute residuals tmp <- t(t(tmp) - alpha) r2 <- (1 - colSums(tmp^2)/colSums(xc^2)) ret.cov <- t(B) %*% var(f) %*% B diag(ret.cov) <- diag(ret.cov) + colSums(tmp^2)/(m - k - 1) - dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) - dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) - dimnames(ret.cov) <- list(x.names, x.names) - names(alpha) <- x.names + dimnames(B) <- list(paste("F", 1:k, sep = "."), data.names) + dimnames(f) <- list(dimnames(data)[[1]], paste("F", 1:k, sep = ".")) + dimnames(ret.cov) <- list(data.names, data.names) + names(alpha) <- data.names # create lm list for plot reg.list = list() - for (i in x.names) { - reg.df = as.data.frame(cbind(x[,i],f)) + for (i in data.names) { + reg.df = as.data.frame(cbind(data[,i],f)) colnames(reg.df)[1] <- i fm.formula = as.formula(paste(i,"~", ".", sep=" ")) fm.fit = lm(fm.formula, data=reg.df) reg.list[[i]] = fm.fit } ans <- list(factors = f, loadings = B, k = k, alpha = alpha, ret.cov = ret.cov, - r2 = r2, eigen = eigen.tmp$values, residuals=tmp, asset.ret = x, + r2 = r2, eigen = eigen.tmp$values, residuals=tmp, asset.ret = data, asset.fit=reg.list) return(ans) @@ -235,21 +245,21 @@ } # funciont of apca - mfactor.apca <- function(x, k, refine = TRUE, check = FALSE, ret.cov = NULL) { + mfactor.apca <- function(data, k, refine = TRUE, check = FALSE, ret.cov = NULL) { if(check) { - if(mfactor.check(x)) { + if(mfactor.check(data)) { warning("Some variables have identical observations.") return(list(factors = NA, loadings = NA, k = NA)) } } - n <- ncol(x) - m <- nrow(x) - if(is.null(dimnames(x))) { - dimnames(x) <- list(1:m, paste("V", 1:n, sep = ".")) + n <- ncol(data) + m <- nrow(data) + if(is.null(dimnames(data))) { + dimnames(data) <- list(1:m, paste("V", 1:n, sep = ".")) } - x.names <- dimnames(x)[[2]] - xc <- t(t(x) - colMeans(x)) + data.names <- dimnames(data)[[2]] + xc <- t(t(data) - colMeans(data)) if(is.null(ret.cov)) { ret.cov <- crossprod(t(xc))/n } @@ -257,8 +267,8 @@ f <- eig.tmp$vectors[, 1:k, drop = FALSE] f1 <- cbind(1, f) B <- backsolve(chol(crossprod(f1)), diag(k + 1)) - B <- crossprod(t(B)) %*% crossprod(f1, x) - sigma <- colSums((x - f1 %*% B)^2)/(m - k - 1) + B <- crossprod(t(B)) %*% crossprod(f1, data) + sigma <- colSums((data - f1 %*% B)^2)/(m - k - 1) if(refine) { xs <- t(xc)/sqrt(sigma) ret.cov <- crossprod(xs)/n @@ -266,52 +276,52 @@ f <- eig.tmp$vectors[, 1:k, drop = FALSE] f1 <- cbind(1, f) B <- backsolve(chol(crossprod(f1)), diag(k + 1)) - B <- crossprod(t(B)) %*% crossprod(f1, x) - sigma <- colSums((x - f1 %*% B)^2)/(m - k - 1) + B <- crossprod(t(B)) %*% crossprod(f1, data) + sigma <- colSums((data - f1 %*% B)^2)/(m - k - 1) } alpha <- B[1, ] B <- B[-1, , drop = FALSE] ret.cov <- t(B) %*% var(f) %*% B diag(ret.cov) <- diag(ret.cov) + sigma - dimnames(B) <- list(paste("F", 1:k, sep = "."), x.names) - dimnames(f) <- list(dimnames(x)[[1]], paste("F", 1:k, sep = ".")) - names(alpha) <- x.names - res <- t(t(x) - alpha) - f %*% B + dimnames(B) <- list(paste("F", 1:k, sep = "."), data.names) + dimnames(f) <- list(dimnames(data)[[1]], paste("F", 1:k, sep = ".")) + names(alpha) <- data.names + res <- t(t(data) - alpha) - f %*% B r2 <- (1 - colSums(res^2)/colSums(xc^2)) ans <- list(factors = f, loadings = B, k = k, alpha = alpha, ret.cov = ret.cov, - r2 = r2, eigen = eig.tmp$values, residuals=res,asset.ret = x) + r2 = r2, eigen = eig.tmp$values, residuals=res,asset.ret = data) return(ans) } call <- match.call() - pos <- rownames(x) - x <- as.matrix(x) - if(any(is.na(x))) { + pos <- rownames(data) + data <- as.matrix(data) + if(any(is.na(data))) { if(na.rm) { - x <- na.omit(x) + data <- na.omit(data) } else { stop("Missing values are not allowed if na.rm=F.") } } # use PCA if T > N - if(ncol(x) < nrow(x)) { + if(ncol(data) < nrow(data)) { if(is.character(k)) { stop("k must be the number of factors for PCA.") } - if(k >= ncol(x)) { + if(k >= ncol(data)) { stop("Number of factors must be smaller than number of variables." ) } - ans <- mfactor.pca(x, k, check = check) + ans <- mfactor.pca(data, k, check = check) } else if(is.character(k)) { - ans <- mfactor.test(x, k, refine = refine, check = + ans <- mfactor.test(data, k, refine = refine, check = check, max.k = max.k, sig = sig) } else { # use aPCA if T <= N - if(k >= ncol(x)) { + if(k >= ncol(data)) { stop("Number of factors must be smaller than number of variables." ) } - ans <- mfactor.apca(x, k, refine = refine, check = + ans <- mfactor.apca(data, k, refine = refine, check = check) } @@ -322,17 +332,17 @@ f <- as.matrix(f) } - if(nrow(x) < ncol(x)) { - mimic <- ginv(x) %*% f + if(nrow(data) < ncol(data)) { + mimic <- ginv(data) %*% f } else { - mimic <- qr.solve(x, f) + mimic <- qr.solve(data, f) } mimic <- t(t(mimic)/colSums(mimic)) - dimnames(mimic)[[1]] <- dimnames(x)[[2]] + dimnames(mimic)[[1]] <- dimnames(data)[[2]] ans$mimic <- mimic - ans$residVars.vec <- apply(ans$residuals,2,var) + ans$resid.variance <- apply(ans$residuals,2,var) ans$call <- call class(ans) <- "StatFactorModel" return(ans) Added: pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R (rev 0) +++ pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R 2013-06-24 16:57:24 UTC (rev 2415) @@ -0,0 +1,372 @@ +#' Fit time series factor model by time series regression techniques. +#' +#' Fit time series factor model by time series regression techniques. It +#' creates the class of "TimeSeriesFactorModel". +#' +#' If \code{Robust} is chosen, there is no subsets but all factors will be +#' used. Cp is defined in +#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. p17. +#' +#' @param assets.names names of assets returns. +#' @param factors.names names of factors returns. +#' @param num.factor.subset scalar. Number of factors selected by all subsets. +#' @param data a vector, matrix, data.frame, xts, timeSeries or zoo object with asset returns +#' and factors retunrs rownames +#' @param fit.method "OLS" is ordinary least squares method, "DLS" is +#' discounted least squares method. Discounted least squares (DLS) estimation +#' is weighted least squares estimation with exponentially declining weights +#' that sum to unity. "Robust" +#' @param variable.selection "none" will not activate variables sellection. Default is "none". +#' "stepwise" is traditional forward/backward #' stepwise OLS regression, starting from the initial set of factors, that adds +#' factors only if the regression fit as measured by the Bayesian Information +#' Criteria (BIC) or Akaike Information Criteria (AIC) can be done using the R +#' function step() from the stats package. If "Robust" is chosen, the +#' function step.lmRob in Robust package will be used. "all subsets" is +#' Traditional all subsets regression can be done using the R function +#' regsubsets() from the package leaps. "lar" , "lasso" is based on package +#' "lars", linear angle regression. If "lar" or "lasso" is chose. fit.method will be ignored. +#' @param decay.factor for DLS. Default is 0.95. +#' @param nvmax control option for all subsets. maximum size of subsets to +#' examine +#' @param force.in control option for all subsets. The factors that should be +#' in all models. +#' @param subsets.method control option for all subsets. se exhaustive search, +#' forward selection, backward selection or sequential replacement to search. +#' @param lars.criteria either choose minimum "Cp": unbiased estimator of the +#' true rist or "cv" 10 folds cross-validation. See detail. +#' @return an S3 object containing +#' \item{asset.fit}{Fit objects for each asset. This is the class "lm" for +#' each object.} +#' \item{alpha}{N x 1 Vector of estimated alphas.} +#' \item{beta}{N x K Matrix of estimated betas.} +#' \item{r2}{N x 1 Vector of R-square values.} +#' \item{resid.variance}{N x 1 Vector of residual variances.} +#' \item{call}{function call.} +#' @author Eric Zivot and Yi-An Chen. +#' @references 1. Efron, Hastie, Johnstone and Tibshirani (2002) "Least Angle +#' Regression" (with discussion) Annals of Statistics; see also +#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. 2. +#' Hastie, Tibshirani and Friedman (2008) Elements of Statistical Learning 2nd +#' edition, Springer, NY. +#' @examples +#' \dontrun{ +#' # load data from the database +#' data(managers.df) +#' ret.assets = managers.df[,(1:6)] +#' factors = managers.df[,(7:9)] +#' # fit the factor model with OLS +#' fit <- fitTimeseriesFactorModel(ret.assets,factors,fit.method="OLS", +#' variable.selection="all subsets") +#' # summary of HAM1 +#' summary(fit$asset.fit$HAM1) +#' # plot actual vs. fitted over time for HAM1 +#' # use chart.TimeSeries() function from PerformanceAnalytics package +#' dataToPlot = cbind(fitted(fit$asset.fit$HAM1), na.omit(managers.df$HAM1)) +#' colnames(dataToPlot) = c("Fitted","Actual") +#' chart.TimeSeries(dataToPlot, main="FM fit for HAM1", +#' colorset=c("black","blue"), legend.loc="bottomleft") +#' } +fitTimeseriesFactorModel <- +function(assets.names, factors.names, data=data, num.factor.subset = 1, + fit.method=c("OLS","DLS","Robust"), + variable.selection="none", + decay.factor = 0.95,nvmax=8,force.in=NULL, + subsets.method = c("exhaustive", "backward", "forward", "seqrep"), + lars.criteria = c("Cp","cv")) { + + require(PerformanceAnalytics) + require(leaps) + require(lars) + require(robust) + require(MASS) + this.call <- match.call() + + # convert data into xts and hereafter compute in xts + data.xts <- checkData(data) + reg.xts <- merge(data.xts[,assets.names],data.xts[,factors.names]) + + # initialize list object to hold regression objects +reg.list = list() + + +# initialize matrices and vectors to hold estimated betas, +# residual variances, and R-square values from +# fitted factor models + +Alphas = ResidVars = R2values = rep(0, length(assets.names)) +names(Alphas) = names(ResidVars) = names(R2values) = assets.names +Betas = matrix(0, length(assets.names), length(factors.names)) +colnames(Betas) = factors.names +rownames(Betas) = assets.names + + +if (variable.selection == "none") { + if (fit.method == "OLS") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + } else if (fit.method == "DLS") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } + # sum weigth to unitary + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.fit = lm(fm.formula, data=reg.df,weight=w) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + } else if (fit.method=="Robust") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lmRob(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas[i, ] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + + } else { + stop("invalid method") + } + + +} else if (variable.selection == "all subsets") { +# estimate multiple factor model using loop b/c of unequal histories for the hedge funds + + + +if (fit.method == "OLS") { + +if (num.factor.subset == length(force.in)) { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, force.in)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else if (num.factor.subset > length(force.in)) { + +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, + method=subsets.method) + sum.sub <- summary(fm.subsets) + reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(num.factor.subset),-1]==TRUE)) )]) + fm.fit = lm(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else { + stop("ERROR! number of force.in should less or equal to num.factor.subset") +} + + + + +} else if (fit.method == "DLS"){ + + + if (num.factor.subset == length(force.in)) { + # define weight matrix +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, force.in)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } +# sum weigth to unitary + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.fit = lm(fm.formula, data=reg.df,weight=w) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else if (num.factor.subset > length(force.in)) { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, + method=subsets.method,weights=w) # w is called from global envio + sum.sub <- summary(fm.subsets) + reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(num.factor.subset),-1]==TRUE)) )]) + fm.fit = lm(fm.formula, data=reg.df,weight=w) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } +} else { + stop("ERROR! number of force.in should less or equal to num.factor.subset") +} + + +} else if (fit.method=="Robust") { + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lmRob(fm.formula, data=reg.df) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas[i, ] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + +} else { + stop("invalid method") +} + + +} else if (variable.selection == "stepwise") { + + + if (fit.method == "OLS") { +# loop over all assets and estimate time series regression +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = step(lm(fm.formula, data=reg.df),trace=0) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + + +} else if (fit.method == "DLS"){ + # define weight matrix +for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + t.length <- nrow(reg.df) + w <- rep(decay.factor^(t.length-1),t.length) + for (k in 2:t.length) { + w[k] = w[k-1]/decay.factor + } +# sum weigth to unitary + w <- w/sum(w) + fm.formula = as.formula(paste(i,"~", ".", sep="")) + fm.fit = step(lm(fm.formula, data=reg.df,weight=w),trace=0) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + +} else if (fit.method=="Robust") { + for (i in assets.names) { + assign("reg.df" , na.omit(reg.xts[, c(i, factors.names)]),envir = .GlobalEnv ) + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + lmRob.obj <- lmRob(fm.formula, data=reg.df) + fm.fit = step.lmRob(lmRob.obj,trace=FALSE) + fm.summary = summary(fm.fit) + reg.list[[i]] = fm.fit + Alphas[i] = coef(fm.fit)[1] + Betas.names = names(coef(fm.fit)[-1]) + Betas[i,Betas.names] = coef(fm.fit)[-1] + ResidVars[i] = fm.summary$sigma^2 + R2values[i] = fm.summary$r.squared + } + +} + +} else if (variable.selection == "lar" | variable.selection == "lasso") { + # use min Cp as criteria to choose predictors + + for (i in assets.names) { + reg.df = na.omit(reg.xts[, c(i, factors.names)]) + reg.df = as.matrix(reg.df) + lars.fit = lars(reg.df[,factors.names],reg.df[,i],type=variable.selection,trace=FALSE) + sum.lars <- summary(lars.fit) + if (lars.criteria == "Cp") { + s<- which.min(sum.lars$Cp) + } else { + lars.cv <- cv.lars(reg.df[,factors.names],reg.df[,i],trace=FALSE, + type=variable.selection,mode="step",plot.it=FALSE) + s<- which.min(lars.cv$cv) + } + coef.lars <- predict(lars.fit,s=s,type="coef",mode="step") + reg.list[[i]] = lars.fit + fitted <- predict(lars.fit,reg.df[,factors.names],s=s,type="fit",mode="step") + Alphas[i] = (fitted$fit - reg.df[,factors.names]%*%coef.lars$coefficients)[1] + Betas.names = names(coef.lars$coefficients) + Betas[i,Betas.names] = coef.lars$coefficients + ResidVars[i] = sum.lars$Rss[s]/(nrow(reg.df)-s) + R2values[i] = lars.fit$R2[s] + } + + } else { + stop("wrong method") +} + + + + + + # return results +# add option to return list +ans = list (asset.fit = reg.list, + alpha = Alphas, + beta = Betas, + r2 = R2values, + resid.variance = ResidVars, + call = this.call ) +class(ans) = "TimeSeriesFactorModel" +return(ans) +} + Modified: pkg/FactorAnalytics/man/fitStatisticalFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/fitStatisticalFactorModel.Rd 2013-06-24 14:02:34 UTC (rev 2414) +++ pkg/FactorAnalytics/man/fitStatisticalFactorModel.Rd 2013-06-24 16:57:24 UTC (rev 2415) @@ -1,80 +1,97 @@ -\name{fitStatisticalFactorModel} -\alias{fitStatisticalFactorModel} -\title{Fit statistical factor model using principle components} -\usage{ - fitStatisticalFactorModel(x, k = 1, refine = TRUE, - check = FALSE, max.k = NULL, sig = 0.05, na.rm = FALSE) -} -\arguments{ - \item{x}{T x N assets returns data which is saved as - data.frame class.} - - \item{k}{numbers of factors if it is scalar or method of - choosing optimal number of factors. "bn" represents Bai - and Ng (2002) method and "ck" represents Connor and - korajczyk (1993) method. Default is k = 1.} - - \item{refine}{\code{TRUE} By default, the APCA fit will - use the Connor-Korajczyk refinement.} - - \item{check}{check if some variables has identical - values. Default is FALSE.} - - \item{max.k}{scalar, select the number that maximum - number of factors to be considered.} - - \item{sig}{significant level when ck method uses.} - - \item{na.rm}{if allow missing values. Default is FALSE.} -} -\value{ - : -} -\description{ - Fit statistical factor model using principle components. - This function is mainly adapted from S+FinMetric function - mfactor. -} -\examples{ -# load data for fitStatisticalFactorModel.r -# data from finmetric berndt.dat and folio.dat - -data(stat.fm.data) -## -# sfm.dat is for pca -# sfm.apca.dat is for apca -class(sfm.dat) -class(sfm.apca.dat) - -# pca -args(fitStatisticalFactorModel) -sfm.pca.fit <- fitStatisticalFactorModel(sfm.dat,k=2) -class(sfm.pca.fit) -names(sfm.pca.fit) -sfm.pca.fit$factors -sfm.pca.fit$loadings -sfm.pca.fit$r2 -sfm.pca.fit$residuals -sfm.pca.fit$residVars.vec -sfm.pca.fit$mimic -# apca -sfm.apca.fit <- fitStatisticalFactorModel(sfm.apca.dat,k=1) -names(sfm.apca.fit) -sfm.apca.res <- sfm.apca.fit$residuals -sfm.apca.mimic <- sfm.apca.fit$mimic -# apca with bai and Ng method -sfm.apca.fit.bn <- fitStatisticalFactorModel(sfm.apca.dat,k="bn") -class(sfm.apca.fit.bn) -names(sfm.apca.fit.bn) -sfm.apca.fit.bn$mimic - -# apca with ck method -sfm.apca.fit.ck <- fitStatisticalFactorModel(sfm.apca.dat,k="ck") -class(sfm.apca.fit.ck) -names(sfm.apca.fit.ck) -sfm.apca.fit.ck$mimic -} -\author{ - Eric Zivot and Yi-An Chen -} - +\name{fitStatisticalFactorModel} +\alias{fitStatisticalFactorModel} +\title{Fit statistical factor model using principle components} +\usage{ + fitStatisticalFactorModel(data, k = 1, refine = TRUE, + check = FALSE, max.k = NULL, sig = 0.05, na.rm = FALSE, + ckeckData.method = "xts") +} +\arguments{ + \item{data}{a vector, matrix, data.frame, xts, timeSeries + or zoo object with asset returns and factors retunrs + rownames} + + \item{k}{numbers of factors if it is scalar or method of + choosing optimal number of factors. "bn" represents Bai + and Ng (2002) method and "ck" represents Connor and + korajczyk (1993) method. Default is k = 1.} + + \item{refine}{\code{TRUE} By default, the APCA fit will + use the Connor-Korajczyk refinement.} + + \item{check}{check if some variables has identical + values. Default is FALSE.} + + \item{max.k}{scalar, select the number that maximum + number of factors to be considered.} + + \item{sig}{significant level when ck method uses.} + + \item{na.rm}{if allow missing values. Default is FALSE.} +} +\value{ + \item{factors}{T x K the estimated factors.} + \item{loadings}{K x N the asset specific factor loadings + beta_i. estimated from regress the asset returns on + factors.} \item{alpha}{1 x N the estimated intercepts + alpha_i} \item{ret.cov}{N x N asset returns sample + variance covariance matrix.} \item{r2}{regression r + square value from regress the asset returns on factors.} + \item{k}{the number of the facotrs.} + \item{eigen}{eigenvalues from the sample covariance + matrix.} \item{residuals}{T x N matrix of residuals from + regression.} \item{asset.ret}{asset returns} + \item{asset.fit}{List of regression lm class of + individual returns on factors.} + \item{resid.variance}{vector of residual variances.} + \item{mimic}{N x K matrix of factor mimicking portfolio + returns.} +} +\description{ + Fit statistical factor model using principle components. + This function is mainly adapted from S+FinMetric function + mfactor. +} +\examples{ +# load data for fitStatisticalFactorModel.r +# data from finmetric berndt.dat and folio.dat + +data(stat.fm.data) +## +# sfm.dat is for pca +# sfm.apca.dat is for apca +class(sfm.dat) +class(sfm.apca.dat) + +# pca +args(fitStatisticalFactorModel) +sfm.pca.fit <- fitStatisticalFactorModel(sfm.dat,k=2) +class(sfm.pca.fit) +names(sfm.pca.fit) +sfm.pca.fit$factors +sfm.pca.fit$loadings +sfm.pca.fit$r2 +sfm.pca.fit$residuals +sfm.pca.fit$resid.variance +sfm.pca.fit$mimic +# apca +sfm.apca.fit <- fitStatisticalFactorModel(sfm.apca.dat,k=1) +names(sfm.apca.fit) +sfm.apca.res <- sfm.apca.fit$residuals +sfm.apca.mimic <- sfm.apca.fit$mimic +# apca with bai and Ng method +sfm.apca.fit.bn <- fitStatisticalFactorModel(sfm.apca.dat,k="bn") +class(sfm.apca.fit.bn) +names(sfm.apca.fit.bn) +sfm.apca.fit.bn$mimic + +# apca with ck method +sfm.apca.fit.ck <- fitStatisticalFactorModel(sfm.apca.dat,k="ck") +class(sfm.apca.fit.ck) +names(sfm.apca.fit.ck) +sfm.apca.fit.ck$mimic +} +\author{ + Eric Zivot and Yi-An Chen +} + Added: pkg/FactorAnalytics/man/fitTimeseriesFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/fitTimeseriesFactorModel.Rd (rev 0) +++ pkg/FactorAnalytics/man/fitTimeseriesFactorModel.Rd 2013-06-24 16:57:24 UTC (rev 2415) @@ -0,0 +1,112 @@ +\name{fitTimeseriesFactorModel} +\alias{fitTimeseriesFactorModel} +\title{Fit time series factor model by time series regression techniques.} +\usage{ + fitTimeseriesFactorModel(assets.names, factors.names, + data = data, num.factor.subset = 1, + fit.method = c("OLS", "DLS", "Robust"), + variable.selection = "none", decay.factor = 0.95, + nvmax = 8, force.in = NULL, + subsets.method = c("exhaustive", "backward", "forward", "seqrep"), + lars.criteria = c("Cp", "cv")) +} +\arguments{ + \item{assets.names}{names of assets returns.} + + \item{factors.names}{names of factors returns.} + + \item{num.factor.subset}{scalar. Number of factors + selected by all subsets.} + + \item{data}{a vector, matrix, data.frame, xts, timeSeries + or zoo object with asset returns and factors retunrs + rownames} + + \item{fit.method}{"OLS" is ordinary least squares method, + "DLS" is discounted least squares method. Discounted + least squares (DLS) estimation is weighted least squares + estimation with exponentially declining weights that sum + to unity. "Robust"} + + \item{variable.selection}{"none" will not activate + variables sellection. Default is "none". "stepwise" is + traditional forward/backward #' stepwise OLS regression, + starting from the initial set of factors, that adds + factors only if the regression fit as measured by the + Bayesian Information Criteria (BIC) or Akaike Information + Criteria (AIC) can be done using the R function step() + from the stats package. If "Robust" is chosen, the + function step.lmRob in Robust package will be used. "all + subsets" is Traditional all subsets regression can be [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 2415 From noreply at r-forge.r-project.org Mon Jun 24 19:04:01 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 19:04:01 +0200 (CEST) Subject: [Returnanalytics-commits] r2416 - in pkg/FactorAnalytics: R man Message-ID: <20130624170401.7D8AB183A54@r-forge.r-project.org> Author: chenyian Date: 2013-06-24 19:04:00 +0200 (Mon, 24 Jun 2013) New Revision: 2416 Removed: pkg/FactorAnalytics/R/covEWMA.R pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R pkg/FactorAnalytics/man/covEWMA.Rd Modified: pkg/FactorAnalytics/R/ pkg/FactorAnalytics/man/ Log: 1. ignore covEWMA.R and covEWMA.Rd Property changes on: pkg/FactorAnalytics/R ___________________________________________________________________ Added: svn:ignore + covEWMA.R Deleted: pkg/FactorAnalytics/R/covEWMA.R =================================================================== --- pkg/FactorAnalytics/R/covEWMA.R 2013-06-24 16:57:24 UTC (rev 2415) +++ pkg/FactorAnalytics/R/covEWMA.R 2013-06-24 17:04:00 UTC (rev 2416) @@ -1,79 +0,0 @@ -#' Compute RiskMetrics-type EWMA Covariance Matrix -#' -#' Compute time series of RiskMetrics-type EWMA covariance matrices of returns. -#' Initial covariance matrix is assumed to be the unconditional covariance -#' matrix. -#' -#' The EWMA covariance matrix at time \code{t} is compute as \cr \code{Sigma(t) -#' = lambda*Sigma(t-1) + (1-lambda)*R(t)t(R(t))} \cr where \code{R(t)} is the -#' \code{K x 1} vector of returns at time \code{t}. -#' -#' @param factors \code{T x K} data.frame containing asset returns, where -#' \code{T} is the number of time periods and \code{K} is the number of assets. -#' @param lambda Scalar exponential decay factor. Must lie between between 0 -#' and 1. -#' @param return.cor Logical, if TRUE then return EWMA correlation matrices. -#' @return \code{T x K x K} array giving the time series of EWMA covariance -#' matrices if \code{return.cor=FALSE} and EWMA correlation matrices if -#' \code{return.cor=TRUE}. -#' @author Eric Zivot and Yi-An Chen. -#' @references Zivot, E. and J. Wang (2006), \emph{Modeling Financial Time -#' Series with S-PLUS, Second Edition}, Springer-Verlag. -#' @examples -#' -#' # compute time vaying covariance of factors. -#' data(managers.df) -#' factors = managers.df[,(7:9)] -#' cov.f.ewma <- covEWMA(factors) -#' cov.f.ewma[120,,] -#' -covEWMA <- -function(factors, lambda=0.96, return.cor=FALSE) { -## Inputs: -## factors N x K numerical factors data. data is class data.frame -## N is the time length and K is the number of the factors. -## lambda scalar. exponetial decay factor between 0 and 1. -## return.cor Logical, if TRUE then return EWMA correlation matrices -## Output: -## cov.f.ewma array. dimension is N x K x K. -## comments: -## 1. add optional argument cov.start to specify initial covariance matrix -## 2. allow data input to be data class to be any rectangular data object - - -if (is.data.frame(factors)){ - factor.names = colnames(factors) - t.factor = nrow(factors) - k.factor = ncol(factors) - factors = as.matrix(factors) - t.names = rownames(factors) -} else { - stop("factor data should be saved in data.frame class.") -} -if (lambda>=1 || lambda <= 0){ - stop("exponential decay value lambda should be between 0 and 1.") -} else { - cov.f.ewma = array(,c(t.factor,k.factor,k.factor)) - cov.f = var(factors) # unconditional variance as EWMA at time = 0 - FF = (factors[1,]- mean(factors)) %*% t(factors[1,]- mean(factors)) - cov.f.ewma[1,,] = (1-lambda)*FF + lambda*cov.f - for (i in 2:t.factor) { - FF = (factors[i,]- mean(factors)) %*% t(factors[i,]- mean(factors)) - cov.f.ewma[i,,] = (1-lambda)*FF + lambda*cov.f.ewma[(i-1),,] - } - -} - # 9/15/11: add dimnames to array - dimnames(cov.f.ewma) = list(t.names, factor.names, factor.names) - - if(return.cor) { - cor.f.ewma = cov.f.ewma - for (i in 1:dim(cor.f.ewma)[1]) { - cor.f.ewma[i, , ] = cov2cor(cov.f.ewma[i, ,]) - } - return(cor.f.ewma) - } else{ - return(cov.f.ewma) - } -} - Deleted: pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-24 16:57:24 UTC (rev 2415) +++ pkg/FactorAnalytics/R/fitMacroeconomicFactorModel.R 2013-06-24 17:04:00 UTC (rev 2416) @@ -1,379 +0,0 @@ -#' Fit macroeconomic factor model by time series regression techniques. -#' -#' Fit macroeconomic factor model by time series regression techniques. It -#' creates the class of "MacroFactorModel". -#' -#' If \code{Robust} is chosen, there is no subsets but all factors will be -#' used. Cp is defined in -#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. p17. -#' -#' @param assets.names names of assets returns. -#' @param factors.names names of factors returns. -#' @param num.factor.subset scalar. Number of factors selected by all subsets. -#' @param data a vector, matrix, data.frame, xts, timeSeries or zoo object with asset returns -#' and factors retunrs rownames -#' @param fit.method "OLS" is ordinary least squares method, "DLS" is -#' discounted least squares method. Discounted least squares (DLS) estimation -#' is weighted least squares estimation with exponentially declining weights -#' that sum to unity. "Robust" -#' @param variable.selection "none" will not activate variables sellection. Default is "none". -#' "stepwise" is traditional forward/backward #' stepwise OLS regression, starting from the initial set of factors, that adds -#' factors only if the regression fit as measured by the Bayesian Information -#' Criteria (BIC) or Akaike Information Criteria (AIC) can be done using the R -#' function step() from the stats package. If "Robust" is chosen, the -#' function step.lmRob in Robust package will be used. "all subsets" is -#' Traditional all subsets regression can be done using the R function -#' regsubsets() from the package leaps. "lar" , "lasso" is based on package -#' "lars", linear angle regression. If "lar" or "lasso" is chose. fit.method will be ignored. -#' @param decay.factor for DLS. Default is 0.95. -#' @param nvmax control option for all subsets. maximum size of subsets to -#' examine -#' @param force.in control option for all subsets. The factors that should be -#' in all models. -#' @param subsets.method control option for all subsets. se exhaustive search, -#' forward selection, backward selection or sequential replacement to search. -#' @param lars.criteria either choose minimum "Cp": unbiased estimator of the -#' true rist or "cv" 10 folds cross-validation. See detail. -#' @return an S3 object containing -#' \item{asset.fit}{Fit objects for each asset. This is the class "lm" for -#' each object.} -#' \item{alpha.vec}{N x 1 Vector of estimated alphas.} -#' \item{beta.mat}{N x K Matrix of estimated betas.} -#' \item{r2.vec}{N x 1 Vector of R-square values.} -#' \item{residVars.vec}{N x 1 Vector of residual variances.} -#' \item{call}{function call.} -#' \item{ret.assets}{Assets returns of input data.} -#' \item{factors Factors of input data.} -#' \item{variable.selection variables selected by the user.} -#' @author Eric Zivot and Yi-An Chen. -#' @references 1. Efron, Hastie, Johnstone and Tibshirani (2002) "Least Angle -#' Regression" (with discussion) Annals of Statistics; see also -#' http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. 2. -#' Hastie, Tibshirani and Friedman (2008) Elements of Statistical Learning 2nd -#' edition, Springer, NY. -#' @examples -#' \dontrun{ -#' # load data from the database -#' data(managers.df) -#' ret.assets = managers.df[,(1:6)] -#' factors = managers.df[,(7:9)] -#' # fit the factor model with OLS -#' fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", -#' variable.selection="all subsets") -#' # summary of HAM1 -#' summary(fit$asset.fit$HAM1) -#' # plot actual vs. fitted over time for HAM1 -#' # use chart.TimeSeries() function from PerformanceAnalytics package -#' dataToPlot = cbind(fitted(fit$asset.fit$HAM1), na.omit(managers.df$HAM1)) -#' colnames(dataToPlot) = c("Fitted","Actual") -#' chart.TimeSeries(dataToPlot, main="FM fit for HAM1", -#' colorset=c("black","blue"), legend.loc="bottomleft") -#' } -fitMacroeconomicFactorModel <- -function(assets.names, factors.names, data=data, num.factor.subset = 1, - fit.method=c("OLS","DLS","Robust"), - variable.selection="none", - decay.factor = 0.95,nvmax=8,force.in=NULL, - subsets.method = c("exhaustive", "backward", "forward", "seqrep"), - lars.criteria = c("Cp","cv")) { - - require(PerformanceAnalytics) - require(leaps) - require(lars) - require(robust) - require(MASS) - this.call <- match.call() - - # convert data into xts and hereafter compute in xts - data.xts <- checkData(data) - reg.xts <- merge(data.xts[,assets.names],data.xts[,factors.names]) - - # initialize list object to hold regression objects -reg.list = list() - - -# initialize matrices and vectors to hold estimated betas, -# residual variances, and R-square values from -# fitted factor models - -Alphas = ResidVars = R2values = rep(0, length(assets.names)) -names(Alphas) = names(ResidVars) = names(R2values) = assets.names -Betas = matrix(0, length(assets.names), length(factors.names)) -colnames(Betas) = factors.names -rownames(Betas) = assets.names - - -if (variable.selection == "none") { - if (fit.method == "OLS") { - for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = lm(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - } else if (fit.method == "DLS") { - for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - t.length <- nrow(reg.df) - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } - # sum weigth to unitary - w <- w/sum(w) - fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.fit = lm(fm.formula, data=reg.df,weight=w) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - } else if (fit.method=="Robust") { - for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = lmRob(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas[i, ] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - - } else { - stop("invalid method") - } - - -} else if (variable.selection == "all subsets") { -# estimate multiple factor model using loop b/c of unequal histories for the hedge funds - - - -if (fit.method == "OLS") { - -if (num.factor.subset == length(force.in)) { - for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, force.in)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = lm(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else if (num.factor.subset > length(force.in)) { - -for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, - method=subsets.method) - sum.sub <- summary(fm.subsets) - reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(num.factor.subset),-1]==TRUE)) )]) - fm.fit = lm(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else { - stop("ERROR! number of force.in should less or equal to num.factor.subset") -} - - - - -} else if (fit.method == "DLS"){ - - - if (num.factor.subset == length(force.in)) { - # define weight matrix -for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, force.in)]) - t.length <- nrow(reg.df) - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } -# sum weigth to unitary - w <- w/sum(w) - fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.fit = lm(fm.formula, data=reg.df,weight=w) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else if (num.factor.subset > length(force.in)) { - for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - t.length <- nrow(reg.df) - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } - w <- w/sum(w) - fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.subsets <- regsubsets(fm.formula,data=reg.df,nvmax=nvmax,force.in=force.in, - method=subsets.method,weights=w) # w is called from global envio - sum.sub <- summary(fm.subsets) - reg.df <- na.omit(reg.xts[,c(i,names(which(sum.sub$which[as.character(num.factor.subset),-1]==TRUE)) )]) - fm.fit = lm(fm.formula, data=reg.df,weight=w) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } -} else { - stop("ERROR! number of force.in should less or equal to num.factor.subset") -} - - -} else if (fit.method=="Robust") { - for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = lmRob(fm.formula, data=reg.df) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas[i, ] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - -} else { - stop("invalid method") -} - - -} else if (variable.selection == "stepwise") { - - - if (fit.method == "OLS") { -# loop over all assets and estimate time series regression -for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - fm.fit = step(lm(fm.formula, data=reg.df),trace=0) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - - -} else if (fit.method == "DLS"){ - # define weight matrix -for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - t.length <- nrow(reg.df) - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } -# sum weigth to unitary - w <- w/sum(w) - fm.formula = as.formula(paste(i,"~", ".", sep="")) - fm.fit = step(lm(fm.formula, data=reg.df,weight=w),trace=0) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - -} else if (fit.method=="Robust") { - for (i in assets.names) { - assign("reg.df" , na.omit(reg.xts[, c(i, factors.names)]),envir = .GlobalEnv ) - fm.formula = as.formula(paste(i,"~", ".", sep=" ")) - lmRob.obj <- lmRob(fm.formula, data=reg.df) - fm.fit = step.lmRob(lmRob.obj,trace=FALSE) - fm.summary = summary(fm.fit) - reg.list[[i]] = fm.fit - Alphas[i] = coef(fm.fit)[1] - Betas.names = names(coef(fm.fit)[-1]) - Betas[i,Betas.names] = coef(fm.fit)[-1] - ResidVars[i] = fm.summary$sigma^2 - R2values[i] = fm.summary$r.squared - } - -} - -} else if (variable.selection == "lar" | variable.selection == "lasso") { - # use min Cp as criteria to choose predictors - - for (i in assets.names) { - reg.df = na.omit(reg.xts[, c(i, factors.names)]) - reg.df = as.matrix(reg.df) - lars.fit = lars(reg.df[,factors.names],reg.df[,i],type=variable.selection,trace=FALSE) - sum.lars <- summary(lars.fit) - if (lars.criteria == "Cp") { - s<- which.min(sum.lars$Cp) - } else { - lars.cv <- cv.lars(reg.df[,factors.names],reg.df[,i],trace=FALSE, - type=variable.selection,mode="step",plot.it=FALSE) - s<- which.min(lars.cv$cv) - } - coef.lars <- predict(lars.fit,s=s,type="coef",mode="step") - reg.list[[i]] = lars.fit - fitted <- predict(lars.fit,reg.df[,factors.names],s=s,type="fit",mode="step") - Alphas[i] = (fitted$fit - reg.df[,factors.names]%*%coef.lars$coefficients)[1] - Betas.names = names(coef.lars$coefficients) - Betas[i,Betas.names] = coef.lars$coefficients - ResidVars[i] = sum.lars$Rss[s]/(nrow(reg.df)-s) - R2values[i] = lars.fit$R2[s] - } - - } else { - stop("wrong method") -} - - - - - - # return results -# add option to return list -ans = list (asset.fit = reg.list, - alpha.vec = Alphas, - beta.mat = Betas, - r2.vec = R2values, - residVars.vec = ResidVars, - call = this.call, - ret.assets = ret.assets, - factors = factors, - variable.selection = variable.selection - ) -class(ans) = "MacroFactorModel" -return(ans) -} - Property changes on: pkg/FactorAnalytics/man ___________________________________________________________________ Added: svn:ignore + covEWMA.Rd Deleted: pkg/FactorAnalytics/man/covEWMA.Rd =================================================================== --- pkg/FactorAnalytics/man/covEWMA.Rd 2013-06-24 16:57:24 UTC (rev 2415) +++ pkg/FactorAnalytics/man/covEWMA.Rd 2013-06-24 17:04:00 UTC (rev 2416) @@ -1,49 +0,0 @@ -\name{covEWMA} -\alias{covEWMA} -\title{Compute RiskMetrics-type EWMA Covariance Matrix} -\usage{ - covEWMA(factors, lambda = 0.96, return.cor = FALSE) -} -\arguments{ - \item{factors}{\code{T x K} data.frame containing asset - returns, where \code{T} is the number of time periods and - \code{K} is the number of assets.} - - \item{lambda}{Scalar exponential decay factor. Must lie - between between 0 and 1.} - - \item{return.cor}{Logical, if TRUE then return EWMA - correlation matrices.} -} -\value{ - \code{T x K x K} array giving the time series of EWMA - covariance matrices if \code{return.cor=FALSE} and EWMA - correlation matrices if \code{return.cor=TRUE}. -} -\description{ - Compute time series of RiskMetrics-type EWMA covariance - matrices of returns. Initial covariance matrix is assumed - to be the unconditional covariance matrix. -} -\details{ - The EWMA covariance matrix at time \code{t} is compute as - \cr \code{Sigma(t) = lambda*Sigma(t-1) + - (1-lambda)*R(t)t(R(t))} \cr where \code{R(t)} is the - \code{K x 1} vector of returns at time \code{t}. -} -\examples{ -# compute time vaying covariance of factors. -data(managers.df) -factors = managers.df[,(7:9)] -cov.f.ewma <- covEWMA(factors) -cov.f.ewma[120,,] -} -\author{ - Eric Zivot and Yi-An Chen. -} -\references{ - Zivot, E. and J. Wang (2006), \emph{Modeling Financial - Time Series with S-PLUS, Second Edition}, - Springer-Verlag. -} - From noreply at r-forge.r-project.org Mon Jun 24 20:48:29 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 20:48:29 +0200 (CEST) Subject: [Returnanalytics-commits] r2417 - in pkg/PerformanceAnalytics/sandbox/pulkit: . week1 week1/code week1/vignette Message-ID: <20130624184829.B4E5A18569F@r-forge.r-project.org> Author: pulkit Date: 2013-06-24 20:48:28 +0200 (Mon, 24 Jun 2013) New Revision: 2417 Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/PSRopt.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/tests/ pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.pdf Log: created week1 file Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R 2013-06-24 18:48:28 UTC (rev 2417) @@ -0,0 +1,78 @@ +#'@title Minimum Track Record Length +#' +#'@description +#'?How long should a track record be in order to have statistical confidence +#'that its Sharpe ratio is above a given threshold? . if a track record is shorter#' than MinTRL, we do not have enough confidence that the observed ? is above the designated threshold +#' +#'@aliases MinTrackRecord +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param p the confidence level +#'@param weights the weights for the portfolio +#'@param sr Sharpe Ratio +#'@param sk Skewness +#'@param kr Kurtosis +#' +#'@reference Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio +#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter +#' 2012/13 +#'@keywords ts multivariate distribution models +#'@examples +#' +#'data(edhec) +#'MinTrackRecord(edhec[,1],0.20) + + +MinTrackRecord<-function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ + columns = 1 + columnnames = NULL + #Error handling if R is not NULL + if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + n = nrow(x) + #Checking if the weights are provided or not + if(!is.null(weights)){ + if(length(weights)!=columns){ + stop("number of items in weights is not equal to the number of columns in R") + } + else{ + # A potfolio is constructed by applying the weights + x = Return.portfolio(R,weights) + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } + } + else{ + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } + + columnnames = colnames(x) + + } + # If R is passed as null checking for sharpe ratio , skewness and kurtosis + else{ + if(is.null(sr) | is.null(sk) | is.null(kr)){ + stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") + } + } + #If weights are not taken into account a message is displayed + if(is.null(weights)){ + message("no weights passed,will calculate Probability Sharpe Ratio for each column") + } + + if(!is.null(dim(Rf))) + Rf = checkData(Rf) + result = 1 + (1 - sk*sr + ((kr-1)/4)*sr^2)*(qnorm(p)/(sr-refSR))^2 + if(!is.null(dim(result))){ + colnames(result) = columnnames + rownames(result) = paste("Minimum Track Record Length(p=",round(p*100,1),"%):") + } + return(result) +} + Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/PSRopt.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/PSRopt.R 2013-06-24 18:48:28 UTC (rev 2417) @@ -0,0 +1,179 @@ +#'@title Implementation of PSR Portfolio Optimization +#'@description +#'Maximizing for PSR leads to better diversified and more balanced hedge fund allocations compared to the concentrated outcomes of Sharpe ratio maximization.We would like to find the vector of weights that maximize the expression.Gradient Ascent Logic is used to compute the weights using the Function PsrPortfolio +#' +#'@aliases PsrPortfolio +#' +#'@param R The return series +#'@param refSR The benchmark Sharpe Ratio +#'@param bounds The bounds for the weights +#'@param MaxIter The Maximum number of iterations +#'@param delta The value of delta Z +#' +#'@references Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio +#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter +#'2012/13 +#' +#'@keywords ts multivariate distribution models +#' +#'@examples +#' +#'data(edhec) +#'PsrPortfolio(edhec) + +PsrPortfolio<-function(R,refSR=0,bounds=NULL,MaxIter = 1000,delta = 0.005){ + + x = checkData(R) + columns = ncol(x) + n = nrow(x) + columnnames = colnames(x) + + + if(is.null(bounds)){ + message("Bounds not given assumeing bounds to be (0,1) for each weight") + bounds = matrix(rep(c(0,1),columns),nrow = columns,byrow = TRUE) + } + + #Optimization Function + optimize<-function(){ + weights = rep(1,columns)/columns + d1z = 0 + z = 0 + iter = 0 + mean = NULL + for(column in 1:columns){ + mean = c(mean,mean(x[,column])) + } + while(TRUE){ + if(iter == MaxIter) break + dZ = get_d1Zs(mean,weights) + if(dZ$z bounds[i,1]) flag = FALSE + } + return(TRUE) + } + + #Calculate the step size to change the weights + stepSize<-function(weights,d1Z){ + if(length(which(d1Z!=0)) == 0){ + return(NULL) + } + weights[which(abs(d1Z)==max(abs(d1Z)))] = weights[which(abs(d1Z)==max(abs(d1Z)))]+(delta/d1Z[which(abs(d1Z)==max(abs(d1Z)))]) + weights = weights/sum(weights) + return(weights) + + } + #To get the first differentials + get_d1Zs<-function(mean,weights){ + d1Z = NULL + m = NULL + x_portfolio = Return.portfolio(x,weights) + mu = mean(x_portfolio) + sd = StdDev(x_portfolio) + sk = skewness(x_portfolio) + kr = kurtosis(x_portfolio) + stats = c(mu,sd,sk,kr) + m = c(stats[1],stats[2]^2,stats[3]*(stats[2]^3),stats[4]*(stats[2]^4)) + SR = get_SR(stats,n) + meanSR = SR$meanSR + sigmaSR = SR$sigmaSR + + for(i in 1:columns){ + d1Z = c(d1Z,get_d1Z(stats,m,meanSR,sigmaSR,mean,weights,i)) + } + dZ = list("d1Z"=d1Z,"z"=meanSR/sigmaSR) + + return(dZ) + } + #To get the dZ/dw for each weight + get_d1Z<-function(stats,m,meanSR,sigmaSR,mean,weights,index){ + d1Mu = get_d1Mu(mean,index) + d1Sigma = get_d1Sigma(stats[2],mean,weights,index) + d1Skew = get_d1Skew(d1Sigma,stats[2],mean,weights,index,m[2]) + d1Kurt = get_d1Kurt(d1Sigma,stats[2],mean,weights,index,m[3]) + d1meanSR = (d1Mu*stats[2]-d1Sigma*stats[1])/stats[2]^2 + d1sigmaSR = (d1Kurt * meanSR^2+2*meanSR*d1meanSR*(stats[4]-1))/4 + d1sigmaSR = d1sigmaSR - d1Skew*meanSR+d1meanSR*stats[3] + d1sigmaSR = d1sigmaSR/(2*sigmaSR*(n-1)) + d1Z = (d1meanSR*sigmaSR-d1sigmaSR*(meanSR-refSR))/sigmaSR^2 + return(d1Z) + } + + get_d1Mu<-function(mean,index){ + return(mean[index]) + } + + get_d1Sigma<-function(sigma,mean,weights,index){ + return(get_dnMoments(mean,weights,2,1,index)/(2*sigma)) + } + + get_d1Skew<-function(d1Sigma,sigma,mean,weights,index,m3){ + d1Skew = get_dnMoments(mean,weights,3,1,index)*sigma^3 + d1Skew = d1Skew - 3*(sigma^2)*d1Sigma*m3 + d1Skew = d1Skew/sigma^6 + return(d1Skew) + } + + get_d1Kurt<-function(d1Sigma,sigma,mean,weights,index,m4){ + d1Kurt = get_dnMoments(mean,weights,4,1,index)*sigma^4 + d1Kurt = d1Kurt - 4*(sigma^3)*d1Sigma*m4 + d1Kurt = d1Kurt/sigma^8 + return(d1Kurt) + } + #To get the differential of the moments + get_dnMoments<-function(mean,weights,mOrder,dOrder,index){ + sum = 0 + x0 = 1 + for(i in 0:(dOrder-1)){ + x0 = x0*(mOrder-i) + } + x_mat = as.matrix(na.omit(x)) + for(i in 1:n){ + x1 = 0 + x2 = (x_mat[i,index]-mean[index])^dOrder + for(j in 1:columns){ + x1 = x1 + weights[j]*(x_mat[i,j]-mean[j]) + } + sum = sum + x2*x1^(mOrder-dOrder) + } + return(x0*sum/n) + } + + # TO get meanSR and sigmaSR + get_SR<-function(stats,n){ + meanSR = stats[1]/stats[2] + sigmaSR = ((1-meanSR*stats[3]+(meanSR^2)*(stats[4]-1)/4)/(n-1))^0.5 + SR<-list("meanSR"=meanSR,"sigmaSR"=sigmaSR) + return(SR) + } + + weights = optimize() + result = matrix(weights,nrow = columns) + rownames(result) = columnnames + colnames(result) = "weight" + return(result) +} + + + + + + Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R 2013-06-24 18:48:28 UTC (rev 2417) @@ -0,0 +1,85 @@ +#'@title Probabilistic Sharpe Ratio +#' +#'@description +#'Given a predefined benchmark Sharpe ratio ,the observed Sharpe Ratio +#'can be expressed in probabilistic terms known as the Probabilistic Sharpe Ratio +#'PSR takes higher moments into account and delivers a corrected, atemporal +#'measure of performance expressed in terms of probability of skill. +#' +#'@aliases ProbSharpeRatio +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param the confidence level +#'@param weights the weights for the portfolio +#'@param sr Sharpe Ratio +#'@param sk Skewness +#'@param kr Kurtosis +#' +#'@references Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio +#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter +#' 2012/13 +#' +#'@keywords ts multivariate distribution models +#' +#'@examples +#' +#'data(edhec) +#'ProbSharpeRatio(edhec[,1],refSR = 0.28) +#'ProbSharpeRatio(edhec,reSR = 0.28,Rf = 0.06) + + +ProbSharpeRatio<- +function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,n = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ + columns = 1 + columnnames = NULL + #Error handling if R is not NULL + if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + n = nrow(x) + #Checking if the weights are provided or not + if(!is.null(weights)){ + if(length(weights)!=columns){ + stop("number of items in weights is not equal to the number of columns in R") + } + else{ + # A potfolio is constructed by applying the weights + x = Return.portfolio(R,weights) + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } + } + else{ + sr = SharpeRatio(x, Rf, p, "StdDev") + sk = skewness(x) + kr = kurtosis(x) + } + + columnnames = colnames(x) + + } + # If R is passed as null checking for sharpe ratio , skewness and kurtosis + else{ + + if(is.null(sr) | is.null(sk) | is.null(kr) | is.null(n)){ + stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") + } + } + #If weights are not taken into account a message is displayed + if(is.null(weights)){ + message("no weights passed,will calculate Probability Sharpe Ratio for each column") + } + + if(!is.null(dim(Rf))) + Rf = checkData(Rf) + result = pnorm(((sr - refSR)*(n-1)^(0.5))/(1-sr*sk+sr^2*(kr-1)/4)^(0.5)) + if(!is.null(dim(result))){ + colnames(result) = columnnames + rownames(result) = paste("Probabilistic Sharpe Ratio(p=",round(p*100,1),"%):") + } + return(result) + +} Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R 2013-06-24 18:48:28 UTC (rev 2417) @@ -0,0 +1,35 @@ +#'@title Probabilistic Sharpe Ratio +#'@description +#'Given a predefined +#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed +#' in probabilistic +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param the confidence level +#'@param weights the weights for the portfolio +chart.PSR<-function(x,Rf,refSR,p=0.95,...){ + for(column in 1:columns){ + column.probsharpe <- psr(x[,column],Rf,p,refSR) + column.mintrack <- mintrl(x[,column],Rf,p,refSR) + if(column == 1){ + probsharpe = column.probsharpe + mintrack = column.mintrack + } + else { + probsharpe = merge(probsharpe, column.probsharpe) + mintrack = merge(mintrack, column.mintrack) + } + + } + + probsharpe = rbind(probsharpe,mintrack) + + colnames(probsharpe) = columnnames + probsharpe = reclass(probsharpe, x) + rownames(probsharpe)=c("PSR","MinTRL") + return(probsharpe) + +} + Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R 2013-06-24 18:48:28 UTC (rev 2417) @@ -0,0 +1,5 @@ +chart.SharpeEfficientFrontier<-function(R){ + + x = checkData(R) + columns = ncol(x) + com Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw 2013-06-24 18:48:28 UTC (rev 2417) @@ -0,0 +1,100 @@ +\documentclass[12pt,letterpaper,english]{article} +\usepackage{times} +\usepackage[T1]{fontenc} +\IfFileExists{url.sty}{\usepackage{url}} + {\newcommand{\url}{\texttt}} + +\usepackage{babel} +\usepackage{Rd} + +\usepackage{Sweave} +\SweaveOpts{engine=R,eps = FALSE} +%\VignetteIndexEntry{Probabilistic Sharpe Ratio} +%\VignetteDepends{PerformanceAnalytics} +%\VignetteKeywords{Probabilistic Sharpe Ratio,Minimum Track Record Length,risk,benchmark,portfolio} +%\VignettePackage{PerformanceAnalytics} + +\begin{document} +\SweaveOpts{concordance=TRUE} + +\title{ Probabilistic Sharpe Ratio } + +% \keywords{Probabilistic Sharpe Ratio,Minimum Track Record Length,risk,benchmark,portfolio} + +\makeatletter +\makeatother +\maketitle + +\begin{abstract} + + This vignette gives an overview of the Probabilistic Sharpe Ratio , Minimum Track Record Length and the Probabilistic Sharpe Ratio Optimization technique used to find the optimal portfolio that maximizes the Probabilistic Sharpe Ratio. It gives an overview of the usability of the functions and its application" + +\end{abstract} + +<>= +library(PerformanceAnalytics) +@ + + +<>= +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R") +@ + +<>= +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/MinTRL.R") +@ + +<>= +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/PSRopt.R") +@ + +\section{Probabilistic Sharpe Ratio} + Given a predefined benchmark Sharpe ratio $SR^\ast$ , the observed Sharpe ratio $\hat{SR}$ can be expressed in probabilistic terms as + + \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma_3}SR^\ast + \frac{\hat{\gamma_4}-1}{4}\hat{SR^2}}}\biggr]} + + Here $n$ is the track record length or the number of data points. It can be daily,weekly or yearly depending on the input given + + $\hat{\gamma{_3}}$ and $\hat{\gamma{_4}}$ are the skewness and kurtosis respectively. + It is not unusual to find strategies with irregular trading frequencies, such as weekly strategies that may not trade for a month. This poses a problem when computing an annualized Sharpe ratio, and there is no consensus as how skill should be measured in the context of irregular bets. Because PSR measures skill in probabilistic terms, it is invariant to calendar conventions. All calculations are done in the original frequency +of the data, and there is no annualization. + +<<>>= +data(edhec) +ProbSharpeRatio(edhec,refSR = 0.28) +@ + +\section{Minimum Track Record Length} + +If a track record is shorter than Minimum Track Record Length(MinTRL), we do +not have enough confidence that the observed $\hat{SR}$ is above the designated threshold +$SR^\ast$. Minimum Track Record Length is given by the following expression. + +\deqn{MinTRL = n^\ast = 1+\biggl[1-\hat{\gamma_3}\hat{SR}+\frac{\hat{\gamma_4}}{4}\hat{SR^2}\biggr]\biggl(\frac{Z_\alpha}{\hat{SR}-SR^\ast}\biggr)^2} + +$\gamma{_3}$ and $\gamma{_4}$ are the skewness and kurtosis respectively. It is important to note that MinTRL is expressed in terms of number of observations, not annual or calendar terms. + +<<>>= +data(edhec) +MinTrackRecord(edhec,refSR = 0.28) +@ + +\section{Probabilistic Sharpe Ratio Optimal Portfolio} + +We would like to find the vector of weights that maximize the expression + + \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma_3}SR^\ast + \frac{\hat{\gamma_4}-1}{4}\hat{SR^2}}}\biggr]} + +where $\sigma = \sqrt{E[(r-\mu)^2]}$ ,its standard deviation.$\gamma_3=\frac{E\biggl[(r-\mu)^3\biggr]}{\sigma^3}$ its skewness,$\gamma_4=\frac{E\biggl[(r-\mu)^4\biggr]}{\sigma^4}$ its kurtosis and $SR = \frac{\mu}{\sigma}$ its Sharpe Ratio. + +Because $\hat{PSR}(SR^\ast)=Z[\hat{Z^\ast}]$ is a monotonic increasing function of +$\hat{Z^\ast}$. This optimal vector is invariant of the value adopted by the parameter $SR^\ast$. + + +<<>>= +data(edhec) +PsrPortfolio(edhec) +@ + +\end{document} + Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.pdf =================================================================== (Binary files differ) Property changes on: pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.pdf ___________________________________________________________________ Added: svn:mime-type + application/octet-stream From noreply at r-forge.r-project.org Mon Jun 24 21:12:11 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 21:12:11 +0200 (CEST) Subject: [Returnanalytics-commits] r2418 - in pkg/PerformanceAnalytics/sandbox/pulkit/week1: code tests Message-ID: <20130624191211.982C818568C@r-forge.r-project.org> Author: pulkit Date: 2013-06-24 21:12:10 +0200 (Mon, 24 Jun 2013) New Revision: 2418 Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/tests/SEF.tests.R Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R Log: Adding unit tests Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R 2013-06-24 18:48:28 UTC (rev 2417) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R 2013-06-24 19:12:10 UTC (rev 2418) @@ -63,7 +63,7 @@ } #If weights are not taken into account a message is displayed if(is.null(weights)){ - message("no weights passed,will calculate Probability Sharpe Ratio for each column") + message("no weights passed,will calculate Minimum Track Record Length for each column") } if(!is.null(dim(Rf))) Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/tests/SEF.tests.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/tests/SEF.tests.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/tests/SEF.tests.R 2013-06-24 19:12:10 UTC (rev 2418) @@ -0,0 +1,11 @@ +library(RUnit) +library(PerformanceAnalytics) +data(edhec) + +test_PSR<-function(){ + checkEqualsNumeric(ProbSharpeRatio(edhec[,1],refSR = 0.28),0.6275225,tolerance = 1.0e-6) +} + +test_MinTRL<-function(){ + checkEqualsNumeric(MinTrackRecord(edhec[,1],refSR=0.28),3861.706,tolerance = 1.0e-3) +} From noreply at r-forge.r-project.org Mon Jun 24 21:50:17 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 21:50:17 +0200 (CEST) Subject: [Returnanalytics-commits] r2419 - in pkg/FactorAnalytics: R man Message-ID: <20130624195017.BB030184E1E@r-forge.r-project.org> Author: chenyian Date: 2013-06-24 21:50:17 +0200 (Mon, 24 Jun 2013) New Revision: 2419 Added: pkg/FactorAnalytics/R/plot.TimeSeriesFactorModel.r pkg/FactorAnalytics/R/print.TimeSeriesFactorModel.r pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r pkg/FactorAnalytics/man/plot.TimeSeriesFactorModel.Rd pkg/FactorAnalytics/man/print.TimeSeriesFactorModel.Rd pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd pkg/FactorAnalytics/man/summary.TimeSeriesModel.Rd Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R pkg/FactorAnalytics/R/fitStatisticalFactorModel.R pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd pkg/FactorAnalytics/man/fitStatisticalFactorModel.Rd pkg/FactorAnalytics/man/fitTimeseriesFactorModel.Rd Log: change "MacroFactorModel" to "TimeSeriesFactorModel" Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-24 19:12:10 UTC (rev 2418) +++ pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-24 19:50:17 UTC (rev 2419) @@ -1,435 +1,399 @@ -#' fit fundamental factor model by classic OLS or Robust regression technique -#' -#' fit fundamental factor model or cross-sectional time series factor model by -#' classic OLS or Robust regression technique. Fundamental factor models use -#' observable asset specific characteristics (fundamentals) like industry -#' classification, market capitalization, style classification (value, growth) -#' etc. to determine the common risk factors. The function creates the class -#' "FundamentalFactorModel". -#' -#' The original function was designed by Doug Martin and originally implemented -#' in S-PLUS by a number of UW Ph.D. students:Christopher Green, Eric Aldrich, -#' and Yindeng Jiang. Guy Yullen re-implemented the function in R and requires -#' the following additional R libraries: zoo time series library, robust -#' Insightful robust library ported to R and robustbase Basic robust statistics -#' package for R -#' -#' @param fulldata data.frame, fulldata contains returns, dates, and exposures -#' which is stacked by dates. -#' @param timedates a vector of Dates specifying the date range for the model -#' fitting -#' @param exposures a character vector of exposure names for the factor model -#' @param assets a list of PERMNOs to be used for the factor model -#' @param wls logical flag, TRUE for weighted least squares, FALSE for ordinary -#' least squares -#' @param regression A character string, "robust" for regression via lmRob, -#' "classic" for regression with lm() -#' @param covariance A character string, "robust" for covariance matrix -#' computed with covRob(), "classic" for covariance matrix with ccov() -#' @param full.resid.cov logical flag, TRUE for full residual covariance matrix -#' calculation, FALSE for diagonal residual covarinace matrix -#' @param robust.scale logical flag, TRUE for exposure scaling via robust scale -#' and location, FALSE for scaling via mean and sd -#' @param returnsvar A character string giving the name of the return variable -#' in the data. -#' @param datevar A character string giving the name of the date variable in -#' the data. -#' @param assetvar A character string giving the name of the asset variable in -#' the data. -#' @param tickersvar A character string giving the name of the ticker variable -#' in the data. -#' @return an S3 object containing -#' @returnItem cov.returns A "list" object contains covariance information for -#' asset returns, includes covariance, mean and eigenvalus. -#' @returnItem cov.factor.rets An object of class "cov" or "covRob" which -#' contains the covariance matrix of the factor returns (including intercept). -#' @returnItem cov.resids An object of class "cov" or "covRob" which contains -#' the covariance matrix of the residuals, if "full.resid.cov" is TRUE. NULL -#' if "full.resid.cov" is FALSE. -#' @returnItem resid.vars A vector of variances estimated from the OLS -#' residuals for each asset. If "wls" is TRUE, these are the weights used in -#' the weighted least squares regressions. If "cov = robust" these values are -#' computed with "scale.tau". Otherwise they are computed with "var". -#' @returnItem factor.rets A "zoo" object containing the times series of -#' estimated factor returns and intercepts. -#' @returnItem resids A "zoo" object containing the time series of residuals -#' for each asset. -#' @returnItem tstats A "zoo" object containing the time series of t-statistics -#' for each exposure. -#' @returnItem returns.data A "data.frame" object containing the returns data -#' for the assets in the factor model, including RETURN, DATE,PERMNO. -#' @returnItem exposure.data A "data.frame" object containing the data for the -#' variables in the factor model, including DATE, PERMNO. -#' @returnItem assets A character vector of PERMNOs used in the model -#' @returnItem tickers A character vector of tickers used in the model -#' @returnItem call function call -#' @author Guy Yullen and Yi-An Chen -#' @examples -#' -#' \dontrun{ -#' # BARRA type factor model -#' data(stock) -#' # there are 447 assets -#' assets = unique(fulldata[,"PERMNO"]) -#' timedates = as.Date(unique(fulldata[,"DATE"])) -#' exposures <- exposures.names <- c("BOOK2MARKET", "LOG.MARKETCAP") -#' test.fit <- fitFundamentalFactorModel(fulldata=fulldata, timedates=timedates, exposures=exposures,covariance="classic", assets=assets,full.resid.cov=TRUE, -#' regression="classic",wls=TRUE) -#' names(test.fit) -#' test.fit$cov.returns -#' test.fit$cov.facrets -#' test.fit$facrets -#' -#' # BARRA type Industry Factor Model -#' exposures <- exposures.names <- c("GICS.SECTOR") -#' # the rest keep the same -#' test.fit <- fitFundamentalFactorModel(fulldata=fulldata, timedates=timedates, exposures=exposures, -#' covariance="classic", assets=assets,full.resid.cov=TRUE, -#' regression="classic",wls=TRUE) -#' } -#' -fitFundamentalFactorModel <- -function (fulldata, timedates, exposures, assets, wls = FALSE, regression = "classic", - covariance = "classic", full.resid.cov = TRUE, robust.scale = FALSE, - datevar = "DATE", assetvar = "PERMNO", returnsvar = "RETURN", - tickersvar = "TICKER.x") { - -require(zoo) -require(robust) -## input -## -## fulldata : data.frame. data stacked by dates -## timedates : a vector of Dates specifying the date range for the model -## fitting -## exposures : a character vector of exposure names for the factor model -## assets : a list of PERMNOs to be used for the factor model -## Optional parameters: -## wls : logical flag, TRUE for weighted least squares, FALSE for -## ordinary least squares -## regression : character string, "robust" for regression via lmRob, "classic" -## for regression via lm -## covariance : character string, "robust" for covariance matrix computed via -## covRob, "classic" for covariance matrix via ccov -## full.resid.cov : logical flag, TRUE for full residual covariance matrix -## calculation, FALSE for diagonal residual covarinace matrix -## robust.scale : logical flag, TRUE for exposure scaling via robust scale and -## location, FALSE for scaling via mean and sd -## datevar : character string giving the name of the date variable in the data. -## assetvar : character string giving the name of the asset variable in the data. -## returnsvar : character string giving the name of the return variable in the data. -## tickersvar : character string giving the name of the ticker variable in the data. - -## output -## -## cov.returns : covariance information for asset returns, includes -## covariance, mean, eigenvalus -## cov.factor.rets : covariance information for factor returns, includes -## covariance and mean -## cov.resids : covariance information for model residuals, includes -## covariance and mean -## resid.vars : list of information regarding model residuals variance -## factor.rets : zoo time series object of factor returns -## resids : zoo time series object of model residuals -## tstats : zoo time series object of model t-statistics -## returns.data : data.frame of return data including RETURN, DATE,PERMNO -## exposure.data : data.frame of exposure data including DATE, PERMNO -## assets : character vector of PERMNOs used in the model -## tickers : character vector of tickers used in the model -## call : function call - - # if (dim(dataArray)[1] < 2) - # stop("At least two time points, t and t-1, are needed for fitting the factor model.") - if (length(timedates) < 2) - stop("At least two time points, t and t-1, are needed for fitting the factor model.") - if (!is(exposures, "vector") || !is.character(exposures)) - stop("exposure argument invalid---must be character vector.") - if (!is(assets, "vector") || !is.character(assets)) - stop("assets argument invalid---must be character vector.") - - wls <- as.logical(wls) - full.resid.cov <- as.logical(full.resid.cov) - robust.scale <- as.logical(robust.scale) - - if (!match(regression, c("robust", "classic"), FALSE)) - stop("regression must one of 'robust', 'classic'.") - if (!match(covariance, c("robust", "classic"), FALSE)) - stop("covariance must one of 'robust', 'classic'.") - this.call <- match.call() - - if (match(returnsvar, exposures, FALSE)) - stop(paste(returnsvar, "cannot be used as an exposure.")) - - - numTimePoints <- length(timedates) - numExposures <- length(exposures) - numAssets <- length(assets) - tickers <- fulldata[1:numAssets,tickersvar] - # dim(fulldata) - # [1] 42912 117 - # dimnames(fulldata) - # PERMNO" "DATE" "RETURN" "TICKER.x" "BOOK2MARKET" "TICKER.y" - # check if exposures are numeric, if not, create exposures. factors by dummy variables - which.numeric <- sapply(fulldata[, exposures, drop = FALSE],is.numeric) - exposures.numeric <- exposures[which.numeric] - # industry factor model - exposures.factor <- exposures[!which.numeric] - if (length(exposures.factor) > 1) { - stop("Only one nonnumeric variable can be used at this time.") - } - - regression.formula <- paste("~", paste(exposures, collapse = "+")) - # "~ BOOK2MARKET" - if (length(exposures.factor)) { - regression.formula <- paste(regression.formula, "- 1") - fulldata[, exposures.factor] <- as.factor(fulldata[, - exposures.factor]) - exposuresToRecode <- names(fulldata[, exposures, drop = FALSE])[!which.numeric] - contrasts.list <- lapply(seq(length(exposuresToRecode)), - function(i) function(n, m) contr.treatment(n, contrasts = FALSE)) - names(contrasts.list) <- exposuresToRecode - } else { - contrasts.list <- NULL - } - # turn characters into formula - regression.formula <- eval(parse(text = paste(returnsvar,regression.formula))) - # RETURN ~ BOOK2MARKET - - ols.robust <- function(xdf, modelterms, conlist) { - if (length(exposures.factor)) { - zz <- xdf[[exposures.factor]] - xdf[[exposures.factor]] <- if (is.ordered(zz)) - ordered(zz, levels = sort(unique.default(zz))) - else factor(zz) - } - model <- lmRob(modelterms, data = xdf, contrasts = conlist, - control = lmRob.control(mxr = 200, mxf = 200, mxs = 200)) - sdest <- sqrt(diag(model$cov)) - names(sdest) <- names(model$coef) - coefnames <- names(model$coef) - alphaord <- order(coefnames) - model$coef <- model$coef[alphaord] - sdest <- sdest[alphaord] - c(length(model$coef), model$coef, model$coef/sdest, model$resid) - } - ols.classic <- function(xdf, modelterms, conlist) { - model <- try(lm(formula = modelterms, data = xdf, contrasts = conlist, - singular.ok = FALSE)) - if (is(model, "Error")) { - mess <- geterrmessage() - nn <- regexpr("computed fit is singular", mess) - if (nn > 0) { - cat("At time:", substring(mess, nn), "\n") - model <- lm(formula = modelterms, data = xdf, - contrasts = conlist, singular.ok = TRUE) - } else stop(mess) - } - tstat <- rep(NA, length(model$coef)) - tstat[!is.na(model$coef)] <- summary(model, cor = FALSE)$coef[,3] - alphaord <- order(names(model$coef)) - c(length(model$coef), model$coef[alphaord], tstat[alphaord], - model$resid) - } - wls.robust <- function(xdf, modelterms, conlist, w) { - assign("w", w, pos = 1) - if (length(exposures.factor)) { - zz <- xdf[[exposures.factor]] - xdf[[exposures.factor]] <- if (is.ordered(zz)) - ordered(zz, levels = sort(unique.default(zz))) - else factor(zz) - } - model <- lmRob(modelterms, data = xdf, weights = w, contrasts = conlist, - control = lmRob.control(mxr = 200, mxf = 200, mxs = 200)) - sdest <- sqrt(diag(model$cov)) - names(sdest) <- names(model$coef) - coefnames <- names(model$coef) - alphaord <- order(coefnames) - model$coef <- model$coef[alphaord] - sdest <- sdest[alphaord] - c(length(model$coef), model$coef, model$coef/sdest, model$resid) - } - wls.classic <- function(xdf, modelterms, conlist, w) { - assign("w", w, pos = 1) - model <- try(lm(formula = modelterms, data = xdf, contrasts = conlist, - weights = w, singular.ok = FALSE)) - if (is(model, "Error")) { - mess <- geterrmessage() - nn <- regexpr("computed fit is singular", mess) - if (nn > 0) { - cat("At time:", substring(mess, nn), "\n") - model <- lm(formula = modelterms, data = xdf, - contrasts = conlist, weights = w) - } - else stop(mess) - } - tstat <- rep(NA, length(model$coef)) - tstat[!is.na(model$coef)] <- summary(model, cor = FALSE)$coef[, - 3] - alphaord <- order(names(model$coef)) - c(length(model$coef), model$coef[alphaord], tstat[alphaord], - model$resid) - } - # FE.hat has T elements - # every element t contains - # 1. number of factors (intercept incl.) - # 2. estimated factors at time t - # 3. t value of estimated factors - # 4. residuals at time t - if (!wls) { - if (regression == "robust") { - # ols.robust - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), - FUN = ols.robust, modelterms = regression.formula, - conlist = contrasts.list) - } else { - # ols.classic - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), - FUN = ols.classic, modelterms = regression.formula, - conlist = contrasts.list) - } - } else { - if (regression == "robust") { - # wls.robust - E.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), - FUN = function(xdf, modelterms, conlist) { - lmRob(modelterms, data = xdf, contrasts = conlist, - control = lmRob.control(mxr = 200, mxf = 200, - mxs = 200))$resid - }, modelterms = regression.formula, conlist = contrasts.list) - E.hat <- apply(E.hat, 1, unlist) - weights <- if (covariance == "robust") - apply(E.hat, 1, scaleTau2)^2 - else apply(E.hat, 1, var) - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), - FUN = wls.robust, modelterms = regression.formula, - conlist = contrasts.list, w = weights) - } else { - # wls.classic - E.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), - FUN = function(xdf, modelterms, conlist) { - lm(formula = modelterms, data = xdf, contrasts = conlist, - singular.ok = TRUE)$resid - }, - modelterms = regression.formula, conlist = contrasts.list) - E.hat <- apply(E.hat, 1, unlist) - weights <- if (covariance == "robust") - apply(E.hat, 1, scaleTau2)^2 - else apply(E.hat, 1, var) - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), - FUN = wls.classic, modelterms = regression.formula, - conlist = contrasts.list, w = weights) - } - } - # if there is industry dummy variables - if (length(exposures.factor)) { - numCoefs <- length(exposures.numeric) + length(levels(fulldata[, - exposures.factor])) - ncols <- 1 + 2 * numCoefs + numAssets - fnames <- c(exposures.numeric, paste(exposures.factor, - levels(fulldata[, exposures.factor]), sep = "")) - cnames <- c("numCoefs", fnames, paste("t", fnames, sep = "."), - assets) - } else { - numCoefs <- 1 + length(exposures.numeric) - ncols <- 1 + 2 * numCoefs + numAssets - cnames <- c("numCoefs", "(Intercept)", exposures.numeric, - paste("t", c("(Intercept)", exposures.numeric), sep = "."), - assets) - } - FE.hat.mat <- matrix(NA, ncol = ncols, nrow = numTimePoints, - dimnames = list(as.character(as.Date(as.numeric(names(FE.hat)), origin = "1970-01-01")), - cnames)) - # give each element t names and PERMNO - for (i in 1:length(FE.hat)) { - names(FE.hat[[i]])[1] <- "numCoefs" - nc <- FE.hat[[i]][1] - names(FE.hat[[i]])[(2 + nc):(1 + 2 * nc)] <- paste("t", - names(FE.hat[[i]])[2:(1 + nc)], sep = ".") - if (length(FE.hat[[i]]) != (1 + 2 * nc + numAssets)) - stop(paste("bad count in row", i, "of FE.hat")) - names(FE.hat[[i]])[(2 + 2 * nc):(1 + 2 * nc + numAssets)] <- assets - idx <- match(names(FE.hat[[i]]), colnames(FE.hat.mat)) - FE.hat.mat[i, idx] <- FE.hat[[i]] - } - # give back the names of timedates - timedates <- as.Date(as.numeric(dimnames(FE.hat)[[1]]), origin = "1970-01-01") - coefs.names <- colnames(FE.hat.mat)[2:(1 + numCoefs)] - # estimated factors ordered by time - f.hat <- zoo(x = FE.hat.mat[, 2:(1 + numCoefs)], order.by = timedates) - # check for outlier - gomat <- apply(coredata(f.hat), 2, function(x) abs(x - median(x, - na.rm = TRUE)) > 4 * mad(x, na.rm = TRUE)) - if (any(gomat, na.rm = TRUE) ) { - cat("\n\n*** Possible outliers found in the factor returns:\n\n") - for (i in which(apply(gomat, 1, any, na.rm = TRUE))) print(f.hat[i, - gomat[i, ], drop = FALSE]) - } - tstats <- zoo(x = FE.hat.mat[, (2 + nc):(1 + 2 * nc)], order.by = timedates) - # residuals for every asset ordered by time - E.hat <- zoo(x = FE.hat.mat[, (2 + 2 * numCoefs):(1 + 2 * - numCoefs + numAssets)], order.by = timedates) - colnames(E.hat) <- tickers - if (covariance == "robust") { - if (kappa(na.exclude(coredata(f.hat))) < 1e+10) { - Cov.facrets <- covRob(coredata(f.hat), estim = "pairwiseGK", - distance = FALSE, na.action = na.omit) - } else { - cat("Covariance matrix of factor returns is singular.\n") - Cov.facrets <- covRob(coredata(f.hat), distance = FALSE, - na.action = na.omit) - } - resid.vars <- apply(coredata(E.hat), 2, scaleTau2, na.rm = T)^2 - D.hat <- if (full.resid.cov) - covOGK(coredata(E.hat), sigmamu = scaleTau2, n.iter = 1) - else - diag(resid.vars) - } else { - Cov.facrets <- ccov(coredata(f.hat), distance = FALSE,na.action = na.omit) - resid.vars <- apply(coredata(E.hat), 2, var, na.rm = TRUE) - D.hat <- if (full.resid.cov) - ccov(coredata(E.hat), distance = FALSE, na.action = na.omit) - else - diag(resid.vars) - } - # create betas from origial database - B.final <- matrix(0, nrow = numAssets, ncol = numCoefs) - colnames <- coefs.names - B.final[, match("(Intercept)", colnames, 0)] <- 1 - numeric.columns <- match(exposures.numeric, colnames, 0) - B.final[, numeric.columns] <- as.matrix(fulldata[as.numeric(fulldata[[datevar]]) == - timedates[numTimePoints], exposures.numeric]) - if (length(exposures.factor)) - B.final[, grep(exposures.factor, x = colnames)][cbind(seq(numAssets), - as.numeric(fulldata[fulldata[[datevar]] == timedates[numTimePoints], - exposures.factor]))] <- 1 - cov.returns <- B.final %*% Cov.facrets$cov %*% t(B.final) + - if (full.resid.cov) - D.hat$cov - else D.hat - dimnames(cov.returns) <- list(tickers, tickers) - mean.cov.returns = tapply(fulldata[[returnsvar]],fulldata[[assetvar]], mean) - dimnames(mean.cov.returns) = list(tickers) - Cov.returns <- list(cov = cov.returns, mean=mean.cov.returns, eigenvalues = eigen(cov.returns, - only.values = TRUE, symmetric = TRUE)$values) - if (full.resid.cov) { - Cov.resids <- D.hat - dimnames(Cov.resids$cov) <- list(tickers, tickers) - } - else { - Cov.resids <- NULL - } - output <- list(cov.returns = Cov.returns, - cov.factor.rets = Cov.facrets, - cov.resids = Cov.resids, - resid.vars = resid.vars, - factor.rets = f.hat, - resids = E.hat, - tstats = tstats, - returns.data = fulldata[,c(datevar, assetvar, returnsvar)], - exposure.data = fulldata[,c(datevar, assetvar, exposures)], - assets = assets, - tickers = tickers, - call = this.call) - class(output) <- "FundamentalFactorModel" - return(output) -} - +#' fit fundamental factor model by classic OLS or Robust regression technique +#' +#' fit fundamental factor model or cross-sectional time series factor model by +#' classic OLS or Robust regression technique. Fundamental factor models use +#' observable asset specific characteristics (fundamentals) like industry +#' classification, market capitalization, style classification (value, growth) +#' etc. to determine the common risk factors. The function creates the class +#' "FundamentalFactorModel". +#' +#' The original function was designed by Doug Martin and originally implemented +#' in S-PLUS by a number of UW Ph.D. students:Christopher Green, Eric Aldrich, +#' and Yindeng Jiang. Guy Yullen re-implemented the function in R and requires +#' the following additional R libraries: zoo time series library, robust +#' Insightful robust library ported to R and robustbase Basic robust statistics +#' package for R +#' +#' @param fulldata data.frame, fulldata contains returns, dates, and exposures +#' which is stacked by dates. +#' @param timedates a vector of Dates specifying the date range for the model +#' fitting +#' @param exposures a character vector of exposure names for the factor model +#' @param assets a list of PERMNOs to be used for the factor model +#' @param wls logical flag, TRUE for weighted least squares, FALSE for ordinary +#' least squares +#' @param regression A character string, "robust" for regression via lmRob, +#' "classic" for regression with lm() +#' @param covariance A character string, "robust" for covariance matrix +#' computed with covRob(), "classic" for covariance matrix with covClassic() in +#' robust package. +#' @param full.resid.cov logical flag, TRUE for full residual covariance matrix +#' calculation, FALSE for diagonal residual covarinace matrix +#' @param robust.scale logical flag, TRUE for exposure scaling via robust scale +#' and location, FALSE for scaling via mean and sd +#' @param returnsvar A character string giving the name of the return variable +#' in the data. +#' @param datevar A character string giving the name of the date variable in +#' the data. +#' @param assetvar A character string giving the name of the asset variable in +#' the data. +#' @param tickersvar A character string giving the name of the ticker variable +#' in the data. +#' @return an S3 object containing +#' \itemize{ +#' \item cov.returns A "list" object contains covariance information for +#' asset returns, includes covariance, mean and eigenvalus. +#' \item cov.factor.rets An object of class "cov" or "covRob" which +#' contains the covariance matrix of the factor returns (including intercept). +#' \item cov.resids An object of class "cov" or "covRob" which contains +#' the covariance matrix of the residuals, if "full.resid.cov" is TRUE. NULL +#' if "full.resid.cov" is FALSE. +#' \item resid.varianceb A vector of variances estimated from the OLS +#' residuals for each asset. If "wls" is TRUE, these are the weights used in +#' the weighted least squares regressions. If "cov = robust" these values are +#' computed with "scale.tau". Otherwise they are computed with "var". +#' \item factor.rets A "zoo" object containing the times series of +#' estimated factor returns and intercepts. +#' \item resids A "zoo" object containing the time series of residuals +#' for each asset. +#' \item tstats A "zoo" object containing the time series of t-statistics +#' for each exposure. +#' \item returns.data A "data.frame" object containing the returns data +#' for the assets in the factor model, including RETURN, DATE,PERMNO. +#' \item exposure.data A "data.frame" object containing the data for the +#' variables in the factor model, including DATE, PERMNO. +#' \item assets A character vector of PERMNOs used in the model +#' \item tickers A character vector of tickers used in the model +#' \item call function call +#' } +#' @author Guy Yullen and Yi-An Chen +#' @examples +#' +#' \dontrun{ +#' # BARRA type factor model +#' data(stock) +#' # there are 447 assets +#' assets = unique(fulldata[,"PERMNO"]) +#' timedates = as.Date(unique(fulldata[,"DATE"])) +#' exposures <- exposures.names <- c("BOOK2MARKET", "LOG.MARKETCAP") +#' test.fit <- fitFundamentalFactorModel(fulldata=fulldata, timedates=timedates, exposures=exposures,covariance="classic", assets=assets,full.resid.cov=TRUE, +#' regression="classic",wls=TRUE) +#' names(test.fit) +#' test.fit$cov.returns +#' test.fit$cov.factor.rets +#' test.fit$factor.rets +#' +#' # BARRA type Industry Factor Model +#' exposures <- exposures.names <- c("GICS.SECTOR") +#' # the rest keep the same +#' test.fit <- fitFundamentalFactorModel(fulldata=fulldata, timedates=timedates, exposures=exposures, +#' covariance="classic", assets=assets,full.resid.cov=TRUE, +#' regression="classic",wls=TRUE) +#' } +#' +fitFundamentalFactorModel <- +function (fulldata, timedates, exposures, assets, wls = FALSE, regression = "classic", + covariance = "classic", full.resid.cov = TRUE, robust.scale = FALSE, + datevar = "DATE", assetvar = "PERMNO", returnsvar = "RETURN", + tickersvar = "TICKER.x") { + +require(zoo) +require(robust) + + + # if (dim(dataArray)[1] < 2) + # stop("At least two time points, t and t-1, are needed for fitting the factor model.") + if (length(timedates) < 2) + stop("At least two time points, t and t-1, are needed for fitting the factor model.") + if (!is(exposures, "vector") || !is.character(exposures)) + stop("exposure argument invalid---must be character vector.") + if (!is(assets, "vector") || !is.character(assets)) + stop("assets argument invalid---must be character vector.") + + wls <- as.logical(wls) + full.resid.cov <- as.logical(full.resid.cov) + robust.scale <- as.logical(robust.scale) + + if (!match(regression, c("robust", "classic"), FALSE)) + stop("regression must one of 'robust', 'classic'.") + if (!match(covariance, c("robust", "classic"), FALSE)) + stop("covariance must one of 'robust', 'classic'.") + this.call <- match.call() + + if (match(returnsvar, exposures, FALSE)) + stop(paste(returnsvar, "cannot be used as an exposure.")) + + + numTimePoints <- length(timedates) + numExposures <- length(exposures) + numAssets <- length(assets) + tickers <- fulldata[1:numAssets,tickersvar] + # dim(fulldata) + # [1] 42912 117 + # dimnames(fulldata) + # PERMNO" "DATE" "RETURN" "TICKER.x" "BOOK2MARKET" "TICKER.y" + # check if exposures are numeric, if not, create exposures. factors by dummy variables + which.numeric <- sapply(fulldata[, exposures, drop = FALSE],is.numeric) + exposures.numeric <- exposures[which.numeric] + # industry factor model + exposures.factor <- exposures[!which.numeric] + if (length(exposures.factor) > 1) { + stop("Only one nonnumeric variable can be used at this time.") + } + + regression.formula <- paste("~", paste(exposures, collapse = "+")) + # "~ BOOK2MARKET" + if (length(exposures.factor)) { + regression.formula <- paste(regression.formula, "- 1") + fulldata[, exposures.factor] <- as.factor(fulldata[, + exposures.factor]) + exposuresToRecode <- names(fulldata[, exposures, drop = FALSE])[!which.numeric] + contrasts.list <- lapply(seq(length(exposuresToRecode)), + function(i) function(n, m) contr.treatment(n, contrasts = FALSE)) + names(contrasts.list) <- exposuresToRecode + } else { + contrasts.list <- NULL + } + # turn characters into formula + regression.formula <- eval(parse(text = paste(returnsvar,regression.formula))) + # RETURN ~ BOOK2MARKET + + ols.robust <- function(xdf, modelterms, conlist) { + if (length(exposures.factor)) { + zz <- xdf[[exposures.factor]] + xdf[[exposures.factor]] <- if (is.ordered(zz)) + ordered(zz, levels = sort(unique.default(zz))) + else factor(zz) + } + model <- lmRob(modelterms, data = xdf, contrasts = conlist, + control = lmRob.control(mxr = 200, mxf = 200, mxs = 200)) + sdest <- sqrt(diag(model$cov)) + names(sdest) <- names(model$coef) + coefnames <- names(model$coef) + alphaord <- order(coefnames) + model$coef <- model$coef[alphaord] + sdest <- sdest[alphaord] + c(length(model$coef), model$coef, model$coef/sdest, model$resid) + } + ols.classic <- function(xdf, modelterms, conlist) { + model <- try(lm(formula = modelterms, data = xdf, contrasts = conlist, + singular.ok = FALSE)) + if (is(model, "Error")) { + mess <- geterrmessage() + nn <- regexpr("computed fit is singular", mess) + if (nn > 0) { + cat("At time:", substring(mess, nn), "\n") + model <- lm(formula = modelterms, data = xdf, + contrasts = conlist, singular.ok = TRUE) + } else stop(mess) + } + tstat <- rep(NA, length(model$coef)) + tstat[!is.na(model$coef)] <- summary(model, cor = FALSE)$coef[,3] + alphaord <- order(names(model$coef)) + c(length(model$coef), model$coef[alphaord], tstat[alphaord], + model$resid) + } + wls.robust <- function(xdf, modelterms, conlist, w) { + assign("w", w, pos = 1) + if (length(exposures.factor)) { + zz <- xdf[[exposures.factor]] + xdf[[exposures.factor]] <- if (is.ordered(zz)) + ordered(zz, levels = sort(unique.default(zz))) + else factor(zz) + } + model <- lmRob(modelterms, data = xdf, weights = w, contrasts = conlist, + control = lmRob.control(mxr = 200, mxf = 200, mxs = 200)) + sdest <- sqrt(diag(model$cov)) + names(sdest) <- names(model$coef) + coefnames <- names(model$coef) + alphaord <- order(coefnames) + model$coef <- model$coef[alphaord] + sdest <- sdest[alphaord] + c(length(model$coef), model$coef, model$coef/sdest, model$resid) + } + wls.classic <- function(xdf, modelterms, conlist, w) { + assign("w", w, pos = 1) + model <- try(lm(formula = modelterms, data = xdf, contrasts = conlist, + weights = w, singular.ok = FALSE)) + if (is(model, "Error")) { + mess <- geterrmessage() + nn <- regexpr("computed fit is singular", mess) + if (nn > 0) { + cat("At time:", substring(mess, nn), "\n") + model <- lm(formula = modelterms, data = xdf, + contrasts = conlist, weights = w) + } + else stop(mess) + } + tstat <- rep(NA, length(model$coef)) [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 2419 From noreply at r-forge.r-project.org Mon Jun 24 21:54:05 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 21:54:05 +0200 (CEST) Subject: [Returnanalytics-commits] r2420 - pkg/FactorAnalytics/man Message-ID: <20130624195406.17138184E1E@r-forge.r-project.org> Author: chenyian Date: 2013-06-24 21:54:05 +0200 (Mon, 24 Jun 2013) New Revision: 2420 Removed: pkg/FactorAnalytics/man/plot.MacroFactorModel.Rd pkg/FactorAnalytics/man/print.MacroFactorModel.Rd pkg/FactorAnalytics/man/summary.MacroFactorModel.Rd pkg/FactorAnalytics/man/summary.TimeSeriesModel.Rd Modified: pkg/FactorAnalytics/man/ Log: delete MacroFactorModel method files Property changes on: pkg/FactorAnalytics/man ___________________________________________________________________ Modified: svn:ignore - covEWMA.Rd + covEWMA.Rd plot.MacroFactorModel.Rd print.MacroFactorModel.Rd summary.MacroFactorModel.Rd summary.TimeSeriesModel.Rd Deleted: pkg/FactorAnalytics/man/plot.MacroFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/plot.MacroFactorModel.Rd 2013-06-24 19:50:17 UTC (rev 2419) +++ pkg/FactorAnalytics/man/plot.MacroFactorModel.Rd 2013-06-24 19:54:05 UTC (rev 2420) @@ -1,71 +0,0 @@ -\name{plot.MacroFactorModel} -\alias{plot.MacroFactorModel} -\title{plot MacrofactorModel object.} -\usage{ - plot.MacroFactorModel(fit.macro, colorset = c(1:12), - legend.loc = NULL, - which.plot = c("none", "1L", "2L", "3L", "4L", "5L", "6L", "7L"), - max.show = 6, plot.single = FALSE, fundName, - which.plot.single = c("none", "1L", "2L", "3L", "4L", "5L", "6L", "7L", "8L", "9L", "10L", "11L", "12L", "13L")) -} -\arguments{ - \item{fit.macro}{fit object created by - fitMacroeconomicFactorModel.} - - \item{colorset}{Defualt colorset is c(1:12).} - - \item{legend.loc}{plot legend or not. Defualt is - \code{NULL}.} - - \item{which.plot}{integer indicating which plot to - create: "none" will create a menu to choose. Defualt is - none. 1 = "Fitted factor returns", 2 = "R square", 3 = - "Variance of Residuals", 4 = "FM Correlation", 5 = - "Factor Contributions to SD", 6 = "Factor Contributions - to ES", 7 = "Factor Contributions to VaR"} - - \item{max.show}{Maximum assets to plot. Default is 6.} - - \item{plot.single}{Plot a single asset of lm class. - Defualt is FALSE.} - - \item{fundName}{Name of the asset to be plotted.} - - \item{which.plot.single}{integer indicating which plot to - create: "none" will create a menu to choose. Defualt is - none. 1 = time series plot of actual and fitted values 2 - = time series plot of residuals with standard error bands - 3 = time series plot of squared residuals 4 = time series - plot of absolute residuals 5 = SACF and PACF of residuals - 6 = SACF and PACF of squared residuals 7 = SACF and PACF - of absolute residuals 8 = histogram of residuals with - normal curve overlayed 9 = normal qq-plot of residuals - 10= CUSUM plot of recursive residuals 11= CUSUM plot of - OLS residuals 12= CUSUM plot of recursive estimates - relative to full sample estimates 13= rolling estimates - over 24 month window} -} -\description{ - Generic function of plot method for - fitMacroeconomicFactorModel. Either plot all fit models - or choose a single asset to plot. -} -\examples{ -\dontrun{ -# load data from the database -data(managers.df) -ret.assets = managers.df[,(1:6)] -factors = managers.df[,(7:9)] -# fit the factor model with OLS -fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", - variable.selection="all subsets") -# plot of all assets and show only first 4 assets. -plot(fit.macro,max.show=4) -# single plot of HAM1 asset -plot(fit.macro, plot.single=TRUE, fundName="HAM1") -} -} -\author{ - Eric Zivot and Yi-An Chen. -} - Deleted: pkg/FactorAnalytics/man/print.MacroFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/print.MacroFactorModel.Rd 2013-06-24 19:50:17 UTC (rev 2419) +++ pkg/FactorAnalytics/man/print.MacroFactorModel.Rd 2013-06-24 19:54:05 UTC (rev 2420) @@ -1,28 +0,0 @@ -\name{print.MacroFactorModel} -\alias{print.MacroFactorModel} -\title{print MacrofactorModel object} -\usage{ - print.MacroFactorModel(fit.macro) -} -\arguments{ - \item{fit.macro}{fit object created by - fitMacroeconomicFactorModel.} -} -\description{ - Generic function of print method for - fitMacroeconomicFactorModel. -} -\examples{ -# load data from the database -data(managers.df) -ret.assets = managers.df[,(1:6)] -factors = managers.df[,(7:9)] -# fit the factor model with OLS -fit.macro <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", - variable.selection="all subsets") -print(fit.macro) -} -\author{ - Eric Zivot and Yi-An Chen. -} - Deleted: pkg/FactorAnalytics/man/summary.MacroFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.MacroFactorModel.Rd 2013-06-24 19:50:17 UTC (rev 2419) +++ pkg/FactorAnalytics/man/summary.MacroFactorModel.Rd 2013-06-24 19:54:05 UTC (rev 2420) @@ -1,28 +0,0 @@ -\name{summary.MacroFactorModel} -\alias{summary.MacroFactorModel} -\title{summary MacrofactorModel object.} -\usage{ - summary.MacroFactorModel(fit.macro) -} -\arguments{ - \item{fit.macro}{fit object created by - fitMacroeconomicFactorModel.} -} -\description{ - Generic function of summary method for - fitMacroeconomicFactorModel. -} -\examples{ -# load data from the database -data(managers.df) -ret.assets = managers.df[,(1:6)] -factors = managers.df[,(7:9)] -# fit the factor model with OLS -fit.macro <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", - variable.selection="all subsets") -summary(fit.macro) -} -\author{ - Eric Zivot and Yi-An Chen. -} - Deleted: pkg/FactorAnalytics/man/summary.TimeSeriesModel.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.TimeSeriesModel.Rd 2013-06-24 19:50:17 UTC (rev 2419) +++ pkg/FactorAnalytics/man/summary.TimeSeriesModel.Rd 2013-06-24 19:54:05 UTC (rev 2420) @@ -1,28 +0,0 @@ -\name{summary.TimeSeriesModel} -\alias{summary.TimeSeriesModel} -\title{summary TimeSeriesModel object.} -\usage{ - summary.TimeSeriesModel(fit.macro) -} -\arguments{ - \item{fit.macro}{fit object created by - fitMacroeconomicFactorModel.} -} -\description{ - Generic function of summary method for - fitMacroeconomicFactorModel. -} -\examples{ -# load data from the database -data(managers.df) -ret.assets = managers.df[,(1:6)] -factors = managers.df[,(7:9)] -# fit the factor model with OLS -fit.macro <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", - variable.selection="all subsets") -summary(fit.macro) -} -\author{ - Eric Zivot and Yi-An Chen. -} - From noreply at r-forge.r-project.org Mon Jun 24 23:14:39 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 23:14:39 +0200 (CEST) Subject: [Returnanalytics-commits] r2421 - in pkg/Meucci: . R demo Message-ID: <20130624211439.ADD651858E1@r-forge.r-project.org> Author: xavierv Date: 2013-06-24 23:14:39 +0200 (Mon, 24 Jun 2013) New Revision: 2421 Added: pkg/Meucci/demo/S_OrderStatisticsPdfLognormal.R pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R pkg/Meucci/demo/S_WishartCorrelation.R Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/R/logToArithmeticCovariance.R pkg/Meucci/demo/S_BivariateSample.R pkg/Meucci/demo/S_FxCopulaMarginal.R pkg/Meucci/demo/S_LognormalSample.R Log: - added three new demo files from chapter 2 and error fixing Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-24 19:54:05 UTC (rev 2420) +++ pkg/Meucci/DESCRIPTION 2013-06-24 21:14:39 UTC (rev 2421) @@ -43,7 +43,9 @@ nloptr, ggplot2, expm, - latticeExtra + latticeExtra, + scatterplot3d, + psych License: GPL URL: http://r-forge.r-project.org/projects/returnanalytics/ Copyright: (c) 2012 Modified: pkg/Meucci/R/logToArithmeticCovariance.R =================================================================== --- pkg/Meucci/R/logToArithmeticCovariance.R 2013-06-24 19:54:05 UTC (rev 2420) +++ pkg/Meucci/R/logToArithmeticCovariance.R 2013-06-24 21:14:39 UTC (rev 2421) @@ -14,6 +14,7 @@ #' # formula (7) and (8) on page 5 of Appendix to "Meucci - A Common Pitfall in Mean-Variance Estimation" #' \url{http://www.wilmott.com/pdfs/011119_meucci.pdf} #' @export + linreturn <- function( mu , sigma ) { # each element of M represents the linear returns for the corresponding log-returns element in mu Modified: pkg/Meucci/demo/S_BivariateSample.R =================================================================== --- pkg/Meucci/demo/S_BivariateSample.R 2013-06-24 19:54:05 UTC (rev 2420) +++ pkg/Meucci/demo/S_BivariateSample.R 2013-06-24 21:14:39 UTC (rev 2421) @@ -56,7 +56,7 @@ # 3d histograms NumBins2D = round(sqrt(100 * log(nSim))); -Z_3 = table( cut (Z_1, NumBins2D ), cut ( Z_2, cloud )); +Z_3 = table( cut (Z_1, NumBins2D ), cut ( Z_2, NumBins2D)); cloud( Z_3, panel.3d.cloud = panel.3dbars, scales = list( arrows = FALSE, just = "right" ), xlab = "normal 1", ylab = "normal 2", zlab="", main = "pdf normal" ); Modified: pkg/Meucci/demo/S_FxCopulaMarginal.R =================================================================== --- pkg/Meucci/demo/S_FxCopulaMarginal.R 2013-06-24 19:54:05 UTC (rev 2420) +++ pkg/Meucci/demo/S_FxCopulaMarginal.R 2013-06-24 21:14:39 UTC (rev 2421) @@ -48,12 +48,15 @@ # marginals NumBins = round(10 * log(NumObs)); +layout( matrix(c(1,2,2,1,2,2,0,3,3), 3, 3, byrow = TRUE), heights=c(1,2,1)); -layout( matrix(c(1,2,3), 3, 1, byrow = TRUE), heights=c(1,2,1)); -hist( X[ , Display[ 2 ] ], NumBins, xlab = db_FX$Fields[[ Display[ 2 ] + 1 ]], ylab = "", main = ""); +#hist( X[ , Display[ 2 ] ], NumBins, xlab = db_FX$Fields[[ Display[ 2 ] + 1 ]], ylab = "", main = ""); +barplot( table( cut( X[ , Display[ 2 ] ], NumBins )), horiz=TRUE, yaxt="n") +axis( 2, at = seq(0, 100, 20), labels = seq( 0, 1, 0.2 ) ); + # scatter plot plot( Copula[ , Display[ 1 ] ], Copula[ , Display[ 2 ] ], main = "Copula", xlab = db_FX$Fields[[ Display[ 2 ] + 1 ]], ylab = db_FX$Fields[[ Display[ 1 ] + 1 ]] ); Modified: pkg/Meucci/demo/S_LognormalSample.R =================================================================== --- pkg/Meucci/demo/S_LognormalSample.R 2013-06-24 19:54:05 UTC (rev 2420) +++ pkg/Meucci/demo/S_LognormalSample.R 2013-06-24 21:14:39 UTC (rev 2421) @@ -8,8 +8,8 @@ #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export +source("../R/LognormalMoments2Parameters.R"); - ################################################################################################################## ### Input parameters Added: pkg/Meucci/demo/S_OrderStatisticsPdfLognormal.R =================================================================== --- pkg/Meucci/demo/S_OrderStatisticsPdfLognormal.R (rev 0) +++ pkg/Meucci/demo/S_OrderStatisticsPdfLognormal.R 2013-06-24 21:14:39 UTC (rev 2421) @@ -0,0 +1,40 @@ +library(scatterplot3d); + +#' This script script shows that the pdf of the r-th order statistics of a lognormal random variable, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_OrderStatisticsPdfLognormal.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################# +### Input + +mu = 0.2; +s = 0.25; +T = 70; + +################################################################################################################# +### Pdf of r-th order statistic concentrated around the r/T quantile + +rs = 1 : T; +x = seq( 0 , 2.5 * exp(mu + s * s / 2), 0.01 ); + +F = plnorm( x, mu, s ); +f = dlnorm( x, mu, s ); + +#matrix to plot + +a = scatterplot3d( 0, 0 , 0, xlim=c(0,4), ylim=c(0,1), zlim=c(0,10), xlab = "x", ylab = "r/T", zlab = "pdf" ); + +for ( n in 1 : length( rs ) ) +{ + r = rs[ n ]; + pdf_rT = gamma( T + 1 ) / ( gamma( r ) * gamma( T - r + 1 )) * ( F ^ (r - 1) ) * (( 1 - F ) ^ ( T - r) ) * f; + q = qlnorm( r / T, mu, s ); + a$points3d( x, r / T + 0 * x, pdf_rT ); + a$points3d( q, r / T, 0 ); +} \ No newline at end of file Added: pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R =================================================================== --- pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R (rev 0) +++ pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R 2013-06-24 21:14:39 UTC (rev 2421) @@ -0,0 +1,38 @@ +library(scatterplot3d); + +#' This script script shows that the pdf of the r-th order statistics of a tudent t random variable, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_OrderStatisticsPdfLognormal.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################# +### Input +mu = 0; +s = 1; +nu = 10; +T = 70; + +################################################################################################################# +### Pdf of r-th order statistic concentrated around the r/T quantile + +rs = 1: T; +x = mu + s * seq( -4, 4, 0.01); + +F = pt((x - mu) / s, nu); +f = 1 / s * dt((x - mu) / s, nu); + +a = scatterplot3d( 0, 0 , 0, xlim = c(-4 , 4 ), ylim = c( 0, 1 ), zlim = c( 0, 3), xlab = "x", ylab = "r/T", zlab = "pdf" ); + +for ( n in 1 : length( rs ) ) +{ + r = rs[ n ]; + pdf_rT = gamma( T + 1 ) / ( gamma( r ) * gamma( T - r + 1 )) * ( F ^ (r - 1) ) * (( 1 - F ) ^ ( T - r) ) * f; + q = mu + s * qt( r / T, nu ); + a$points3d( x, r / T + 0 * x, pdf_rT, type = "l" ); + a$points3d( q, r / T, 0 ); +} \ No newline at end of file Added: pkg/Meucci/demo/S_WishartCorrelation.R =================================================================== --- pkg/Meucci/demo/S_WishartCorrelation.R (rev 0) +++ pkg/Meucci/demo/S_WishartCorrelation.R 2013-06-24 21:14:39 UTC (rev 2421) @@ -0,0 +1,58 @@ +#' This script computes the correlation of the first diagonal and off-diagonal elements +#' of a 2x2 Wishart distribution as a function of the inputs, as described in A. Meucci, +#' "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_WishartCorrelation.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################### +### Inputs + +s = c( 1, 1); +nu = 15; +rhos = seq( -0.99, 0.99, 0.01 ); +nrhos = length(rhos); + +################################################################################################################### +### Compute the correlation using simulation + +corrs2 = matrix( NaN, nrhos, 1); +for( uu in 1 : nrhos ) +{ + rho = rhos[ uu ]; + Sigma = diag( s ) %*% rbind( c( 1, rho ), c( rho, 1 ) ) %*% diag( s ); + + # compute expected values of W_xx and W_xy, see (2.227) in "Risk and Asset Allocation - Springer + E_xx = nu * Sigma[ 1, 1 ]; + E_xy = nu * Sigma[ 1, 2 ]; + + # compute covariance matrix of W_xx and W_xy, see (2.228) in "Risk and Asset Allocation - Springer + m = 1; n = 1; p = 1; q = 1; + var_Wxx = nu * ( Sigma[ m, p ] * Sigma[ n, q ] + Sigma[ m, q ] * Sigma[ n, p ] ); + + m = 1; n = 2; p = 1; q = 2; + var_Wxy = nu * ( Sigma[ m, p ] * Sigma[ n, q ] + Sigma[ m, q ] * Sigma[ n, p ] ); + + m = 1; n = 1; p = 1; q = 2; + cov_Wxx_Wxy = nu * ( Sigma[ m, p ] * Sigma[ n, q ] + Sigma[ m, q ] * Sigma[ n, p ] ); + + S_xx_xy = rbind( cbind( var_Wxx, cov_Wxx_Wxy ), cbind( cov_Wxx_Wxy, var_Wxy )); + + # compute covariance of X_1 and X_2 + S = diag( 1 / c( sqrt( var_Wxx ), sqrt( var_Wxy ))) %*% S_xx_xy %*% diag( 1 / c( sqrt( var_Wxx ), sqrt( var_Wxy ))); + + # correlation = covariance + corrs2[ uu ] = S[ 1, 2 ]; +} + +################################################################################################################### +### Analytical correlation +corrs = sqrt( 2 ) * rhos / sqrt( 1 + rhos ^ 2); + +figure(); +plot(rhos, corrs, xlab = expression( paste("input ", rho)), ylab = "Wishart correlation"); +lines( rhos, corrs2, col = "red" ); From noreply at r-forge.r-project.org Mon Jun 24 23:46:42 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Mon, 24 Jun 2013 23:46:42 +0200 (CEST) Subject: [Returnanalytics-commits] r2422 - pkg/Meucci/demo Message-ID: <20130624214643.2D2551850F1@r-forge.r-project.org> Author: xavierv Date: 2013-06-24 23:46:41 +0200 (Mon, 24 Jun 2013) New Revision: 2422 Modified: pkg/Meucci/demo/S_BivariateSample.R pkg/Meucci/demo/S_CovarianceEvolution.R pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R pkg/Meucci/demo/S_EllipticalNDim.R pkg/Meucci/demo/S_LognormalSample.R pkg/Meucci/demo/S_NonAnalytical.R pkg/Meucci/demo/S_NormalSample.R pkg/Meucci/demo/S_StudentTSample.R pkg/Meucci/demo/S_WishartCorrelation.R Log: - changes to display multiple plots per demo Modified: pkg/Meucci/demo/S_BivariateSample.R =================================================================== --- pkg/Meucci/demo/S_BivariateSample.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_BivariateSample.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -44,12 +44,13 @@ Z_2 = Z[, 2]; # display marginals: as expected, they are normal - +dev.new(); NumBins = round(10 * log(nSim)); par( mfrow = c( 2, 1) ); hist( Z_1, NumBins, xlab = "normal 1", ylab = "" ); hist( Z_2, NumBins, xlab = "normal 2", ylab = "" ); +dev.new(); plot( Z_1, Z_2, type = "p", xlab = "normal 1", ylab = "normal 2" ); @@ -57,7 +58,7 @@ NumBins2D = round(sqrt(100 * log(nSim))); Z_3 = table( cut (Z_1, NumBins2D ), cut ( Z_2, NumBins2D)); - +dev.new(); cloud( Z_3, panel.3d.cloud = panel.3dbars, scales = list( arrows = FALSE, just = "right" ), xlab = "normal 1", ylab = "normal 2", zlab="", main = "pdf normal" ); @@ -70,15 +71,18 @@ # plot copula NumBins = round(10 * log(nSim)); +dev.new(); par( mfrow = c( 2, 1) ); hist( U_1, NumBins, xlab = "grade 1", ylab = "", main = "" ); hist( U_2, NumBins, xlab = "grade 2", ylab = "", main = "" ); # joint sample +dev.new(); plot(U_1, U_2, xlab="grade 1", ylab="grade 2" ); # 3d histogram NumBins2D = round(sqrt(100 * log(nSim))); +dev.new(); U_3 = table( cut (U_1, NumBins2D ), cut ( U_2, NumBins2D )); cloud( U_3, panel.3d.cloud = panel.3dbars, scales = list( arrows = FALSE, just = "right" ), xlab = "grade 1", ylab = "grade 2", zlab="", main = "pdf copula" ); @@ -100,7 +104,7 @@ NumBins = round(10 * log(nSim)); - +dev.new(); par( mfrow = c( 2, 1) ); # Student t distribution hist( X_1, NumBins, xlab = "gamma", ylab = "", main = "" ); @@ -108,10 +112,12 @@ hist( X_2, NumBins, xlab = "lognormal", ylab = "", main = "" ); # joint sample +dev.new(); plot(X_1, X_2, xlab="gamma", ylab="lognormal" ); # 3d histogram NumBins2D = round(sqrt(100 * log(nSim))); +dev.new(); X_3 = table( cut (X_1, NumBins2D ), cut ( X_2, NumBins2D )); cloud( X_3, panel.3d.cloud = panel.3dbars, scales = list( arrows = FALSE, just = "right" ), xlab = "gamma", ylab = "lognormal", zlab="", main = "pdf joint distribution" ); \ No newline at end of file Modified: pkg/Meucci/demo/S_CovarianceEvolution.R =================================================================== --- pkg/Meucci/demo/S_CovarianceEvolution.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_CovarianceEvolution.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -60,15 +60,18 @@ Pick = cbind( K + 2*J - 1, K + 2*J ) # horizon simulations +dev.new(); plot( OUstepResult$X_t[ , Pick[ 1 ] ] , OUstepResult$X_t[ , Pick[ 2 ] ] ) # horizon location +dev.new(); plot( OUstepResult$Mu_t[ Pick[ 1 ] ] , OUstepResult$Mu_t[ Pick[ 2 ] ] ) # horizon dispersion ellipsoid # TwoDimEllipsoid(MuHat_t1(Pick),SigmaHat_t1(Pick,Pick),2,0,0); # starting point +dev.new(); plot( x0[ Pick[ 1 ] ] , x0[ Pick[ 2 ] ] ) # starting generating dispersion ellipsoid Modified: pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_DisplayLognormalCopulaPdf.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -1,3 +1,4 @@ + #'This script displays the pdf of the copula of a lognormal distribution, as described #' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. #' Modified: pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_DisplayNormalCopulaCdf.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -1,3 +1,4 @@ +library(mvtnorm); #'This script displays the cdf of the copula of a normal distribution, as described #' in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. #' @@ -13,7 +14,7 @@ Mu = c( 0, 0 ); r = -0.999; sigmas = c(1, 1 ); -Sigma = diag( c( sigmas ) ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( c( sigmas ) ); +Sigma = diag( sigmas ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( sigmas ); ############################################################################################################# ### Grid Modified: pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_DisplayNormalCopulaPdf.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -12,8 +12,8 @@ ### input parameters Mu = rbind( 1, -1 ); r = 0.7; -sigmas = rbind( 1, 1 ); -Sigma = diag( c( sigmas ) ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( c( sigmas ) ); +sigmas = c( 1, 1 ); +Sigma = diag( sigmas ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( sigmas ); ############################################################################################################# ### Grid Modified: pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R =================================================================== --- pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_DisplayStudentTCopulaPdf.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -13,8 +13,8 @@ Mu = rbind( 0, 0 ); r = 0.5; -sigmas = rbind( 1, 2 ); -Sigma = diag( c( sigmas ) ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( c( sigmas ) ); +sigmas = c( 1, 2 ); +Sigma = diag( sigmas ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( sigmas ); #nu = 1; Sigma(1,2) = 0; Sigma(2,1) = 0; nu = 200; Modified: pkg/Meucci/demo/S_EllipticalNDim.R =================================================================== --- pkg/Meucci/demo/S_EllipticalNDim.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_EllipticalNDim.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -48,11 +48,13 @@ n = 3; xlabel = paste( "X_" , m ); ylabel = paste( "X_", n ); +dev.new(); plot( X[ , m ], X[ , n ], xlab = xlabel, ylab = ylabel); # visualize n-th marginal n = 4; xlabel = paste( "X_", m ); NumBins = round(10 * log(nSim)); +dev.new(); hist( X[ , n ], NumBins, xlab = xlabel, main= "histogram"); Modified: pkg/Meucci/demo/S_LognormalSample.R =================================================================== --- pkg/Meucci/demo/S_LognormalSample.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_LognormalSample.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -29,13 +29,16 @@ ### Plots # plot over time +dev.new(); plot( X, main = "lognormal sample vs observation time" ); # plot histogram +dev.new(); NumBins = round( 10 * log( nSim ) ); hist( X, NumBins, main = "histogram of lognormal sample" ); # plot empirical cdf +dev.new(); f = ecdf( X ); plot( f, col = "red", main = "cdf of lognormal distribution" ); @@ -47,6 +50,7 @@ ################################################################################################################## # plot empirical quantile +dev.new(); u= seq( 0.01, 0.99, 0.01 ); # range of quantiles (values between zero and one) q = quantile( X, u ); plot( u, q, type = "l", xlab="Grade", ylab="Quantile", lty = 1, col = "red", main = "quantile of lognormal distribution" ); Modified: pkg/Meucci/demo/S_NonAnalytical.R =================================================================== --- pkg/Meucci/demo/S_NonAnalytical.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_NonAnalytical.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -35,20 +35,24 @@ ################################################################################################################## ### Plot the sample Z +dev.new(); plot( Z, xlab="simulations", ylab="Z", main = "sample vs observation time" ); ################################################################################################################## ### Plot the histogram of Z +dev.new(); NumBins = round( 10 * log( nSim ) ); hist( Z, NumBins, xlab="Z", main="sample histogram" ); ################################################################################################################## ### Plot the empirical cdf of Z +dev.new(); f = ecdf( Z ); plot( f, xlab="Z", main="empirical cdf" ); ################################################################################################################## ### Plot the empirical quantile of Z +dev.new(); u= seq( 0.01, 0.99, 0.01 ); # range of quantiles (values between zero and one) q = quantile( Z, u ); plot( u, q, type = "l", xlab="Grade", ylab="Quantile", lty = 1, main = "empirical quantile" ); Modified: pkg/Meucci/demo/S_NormalSample.R =================================================================== --- pkg/Meucci/demo/S_NormalSample.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_NormalSample.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -21,11 +21,13 @@ ################################################################################################################## ### Plot the sample# plot over time +dev.new(); plot( X, main = "normal sample vs observation time" ); ################################################################################################################## ### Plot the histogram +dev.new(); NumBins = round( 10 * log( nSim ) ); hist( X, NumBins, main = "histogram of normal sample" ); @@ -33,6 +35,8 @@ ### Compare empirical with exact cdfs # plot empirical cdf +dev.new(); + f = ecdf( X ); plot( f, col = "red", main = "cdf of normal distribution" ); @@ -43,7 +47,9 @@ ################################################################################################################## ### Compare empirical and exact quantiles + # plot empirical quantile +dev.new(); u= seq( 0.01, 0.99, 0.01 ); # range of quantiles (values between zero and one) q = quantile( X, u ); plot( u, q, type = "l", xlab="Grade", ylab="Quantile", lty = 1, col = "red", main = "quantile of normal distribution" ); Modified: pkg/Meucci/demo/S_StudentTSample.R =================================================================== --- pkg/Meucci/demo/S_StudentTSample.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_StudentTSample.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -45,6 +45,7 @@ ### Plot histograms NumBins = round(10 * log(nSim)); +dev.new(); par( mfrow = c( 3, 1) ); hist( X_a, NumBins, main = "built-in generator" ); @@ -66,7 +67,7 @@ ################################################################################################################## ### Superimpose the the plots of the empirical quantiles - +dev.new(); plot( u, q_a, type = "l", xlab="Grade", ylab="Quantile", lty = 1, col = "red", main = "quantile of Student-t distribution" ); lines( u, q_b, type = "l", lty = 1, col = "blue" ); lines( u, q_c, type = "l", lty = 1, col = "green" ); Modified: pkg/Meucci/demo/S_WishartCorrelation.R =================================================================== --- pkg/Meucci/demo/S_WishartCorrelation.R 2013-06-24 21:14:39 UTC (rev 2421) +++ pkg/Meucci/demo/S_WishartCorrelation.R 2013-06-24 21:46:41 UTC (rev 2422) @@ -53,6 +53,6 @@ ### Analytical correlation corrs = sqrt( 2 ) * rhos / sqrt( 1 + rhos ^ 2); -figure(); +dev.new(); plot(rhos, corrs, xlab = expression( paste("input ", rho)), ylab = "Wishart correlation"); lines( rhos, corrs2, col = "red" ); From noreply at r-forge.r-project.org Tue Jun 25 00:10:52 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 00:10:52 +0200 (CEST) Subject: [Returnanalytics-commits] r2423 - in pkg/FactorAnalytics: R data man Message-ID: <20130624221053.3D3771858F6@r-forge.r-project.org> Author: chenyian Date: 2013-06-25 00:10:52 +0200 (Tue, 25 Jun 2013) New Revision: 2423 Removed: pkg/FactorAnalytics/R/plot.MacroFactorModel.r pkg/FactorAnalytics/R/print.MacroFactorModel.r pkg/FactorAnalytics/R/summary.MacroFactorModel.r Modified: pkg/FactorAnalytics/R/ pkg/FactorAnalytics/R/fitFundamentalFactorModel.R pkg/FactorAnalytics/data/stock.RDATA pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd Log: change input of fitFundamentalFactorModel.R, making it easier to use. The input now requires panel data. Once user defines asset variables, time variables, return variable and exposure variables. The function will take care of the rest. Property changes on: pkg/FactorAnalytics/R ___________________________________________________________________ Modified: svn:ignore - covEWMA.R + covEWMA.R plot.MacroFactorModel.r print.MacroFactorModel.r summary.MacroFactorModel.r Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-24 21:46:41 UTC (rev 2422) +++ pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-24 22:10:52 UTC (rev 2423) @@ -14,12 +14,10 @@ #' Insightful robust library ported to R and robustbase Basic robust statistics #' package for R #' -#' @param fulldata data.frame, fulldata contains returns, dates, and exposures -#' which is stacked by dates. -#' @param timedates a vector of Dates specifying the date range for the model -#' fitting -#' @param exposures a character vector of exposure names for the factor model -#' @param assets a list of PERMNOs to be used for the factor model +#' @param data data.frame, data must have \emph{assetvar}, \emph{returnvar}, \emph{datevar} +#' , and exposure.names. Generally, data is panel data setup, so it needs firm variabales +#' and time variables. +#' @param exposure.names a character vector of exposure names for the factor model #' @param wls logical flag, TRUE for weighted least squares, FALSE for ordinary #' least squares #' @param regression A character string, "robust" for regression via lmRob, @@ -37,33 +35,25 @@ #' the data. #' @param assetvar A character string giving the name of the asset variable in #' the data. -#' @param tickersvar A character string giving the name of the ticker variable -#' in the data. #' @return an S3 object containing #' \itemize{ #' \item cov.returns A "list" object contains covariance information for #' asset returns, includes covariance, mean and eigenvalus. -#' \item cov.factor.rets An object of class "cov" or "covRob" which +#' \item cov.factor Anobject of class "cov" or "covRob" which #' contains the covariance matrix of the factor returns (including intercept). #' \item cov.resids An object of class "cov" or "covRob" which contains #' the covariance matrix of the residuals, if "full.resid.cov" is TRUE. NULL #' if "full.resid.cov" is FALSE. -#' \item resid.varianceb A vector of variances estimated from the OLS +#' \item resid.variance A vector of variances estimated from the OLS #' residuals for each asset. If "wls" is TRUE, these are the weights used in #' the weighted least squares regressions. If "cov = robust" these values are #' computed with "scale.tau". Otherwise they are computed with "var". -#' \item factor.rets A "zoo" object containing the times series of +#' \item factor.rets A "xts" object containing the times series of #' estimated factor returns and intercepts. -#' \item resids A "zoo" object containing the time series of residuals +#' \item resids A "xts" object containing the time series of residuals #' for each asset. -#' \item tstats A "zoo" object containing the time series of t-statistics +#' \item tstats A "xts" object containing the time series of t-statistics #' for each exposure. -#' \item returns.data A "data.frame" object containing the returns data -#' for the assets in the factor model, including RETURN, DATE,PERMNO. -#' \item exposure.data A "data.frame" object containing the data for the -#' variables in the factor model, including DATE, PERMNO. -#' \item assets A character vector of PERMNOs used in the model -#' \item tickers A character vector of tickers used in the model #' \item call function call #' } #' @author Guy Yullen and Yi-An Chen @@ -73,39 +63,68 @@ #' # BARRA type factor model #' data(stock) #' # there are 447 assets -#' assets = unique(fulldata[,"PERMNO"]) -#' timedates = as.Date(unique(fulldata[,"DATE"])) -#' exposures <- exposures.names <- c("BOOK2MARKET", "LOG.MARKETCAP") -#' test.fit <- fitFundamentalFactorModel(fulldata=fulldata, timedates=timedates, exposures=exposures,covariance="classic", assets=assets,full.resid.cov=TRUE, -#' regression="classic",wls=TRUE) -#' names(test.fit) -#' test.fit$cov.returns -#' test.fit$cov.factor.rets -#' test.fit$factor.rets +#' exposure.names <- c("BOOK2MARKET", "LOG.MARKETCAP") +#' ttest.fit <- fitFundamentalFactorModel(data=data,exposure.names=exposure.names, +#' datevar = "DATE", returnsvar = "RETURN", +#' assetvar = "TICKER", wls = TRUE, +#' regression = "classic", +#' covariance = "classic", full.resid.cov = TRUE, +#' robust.scale = TRUE) #' +#' names(test.fit) +#' test.fit$cov.returns +#' test.fit$cov.resids +#' names(test.fit$cov.factor) +#' test.fit$cov.factor$cov +#' test.fit$factor +#' test.fit$resid.variance +#' test.fit$resids +#' test.fit$tstats +#' test.fit$call +#' #' # BARRA type Industry Factor Model -#' exposures <- exposures.names <- c("GICS.SECTOR") +#' exposure.names <- c("GICS.SECTOR") #' # the rest keep the same -#' test.fit <- fitFundamentalFactorModel(fulldata=fulldata, timedates=timedates, exposures=exposures, -#' covariance="classic", assets=assets,full.resid.cov=TRUE, -#' regression="classic",wls=TRUE) +#' test.fit2 <- fitFundamentalFactorModel(data=data,exposure.names=exposure.names, +#' datevar = "DATE", returnsvar = "RETURN", +#' assetvar = "TICKER", wls = TRUE, +#' regression = "classic", +#' covariance = "classic", full.resid.cov = TRUE, +#' robust.scale = TRUE) +#' +#' names(test.fit2) +#' test.fit2$cov.returns +#' test.fit2$cov.resids +#' names(test.fit2$cov.factor) +#' test.fit2$cov.factor$cov +#' test.fit2$factor +#' test.fit2$resid.variance +#' test.fit2$resids +#' test.fit2$tstats +#' test.fit2$call +#' +#' +#' #' } #' + + fitFundamentalFactorModel <- -function (fulldata, timedates, exposures, assets, wls = FALSE, regression = "classic", - covariance = "classic", full.resid.cov = TRUE, robust.scale = FALSE, - datevar = "DATE", assetvar = "PERMNO", returnsvar = "RETURN", - tickersvar = "TICKER.x") { +function(data,exposure.names, datevar, returnsvar, assetvar, + wls = TRUE, regression = "classic", + covariance = "classic", full.resid.cov = TRUE, robust.scale = FALSE) { -require(zoo) +require(xts) require(robust) - # if (dim(dataArray)[1] < 2) - # stop("At least two time points, t and t-1, are needed for fitting the factor model.") +assets = unique(data[,assetvar]) +timedates = as.Date(unique(data[,datevar])) + + if (length(timedates) < 2) stop("At least two time points, t and t-1, are needed for fitting the factor model.") - if (!is(exposures, "vector") || !is.character(exposures)) + if (!is(exposure.names, "vector") || !is.character(exposure.names)) stop("exposure argument invalid---must be character vector.") if (!is(assets, "vector") || !is.character(assets)) stop("assets argument invalid---must be character vector.") @@ -120,34 +139,34 @@ stop("covariance must one of 'robust', 'classic'.") this.call <- match.call() - if (match(returnsvar, exposures, FALSE)) + if (match(returnsvar, exposure.names, FALSE)) stop(paste(returnsvar, "cannot be used as an exposure.")) - + assets = unique(data[,assetvar]) + timedates = as.Date(unique(data[,datevar])) numTimePoints <- length(timedates) - numExposures <- length(exposures) + numExposures <- length(exposure.names) numAssets <- length(assets) - tickers <- fulldata[1:numAssets,tickersvar] - # dim(fulldata) - # [1] 42912 117 - # dimnames(fulldata) - # PERMNO" "DATE" "RETURN" "TICKER.x" "BOOK2MARKET" "TICKER.y" - # check if exposures are numeric, if not, create exposures. factors by dummy variables - which.numeric <- sapply(fulldata[, exposures, drop = FALSE],is.numeric) - exposures.numeric <- exposures[which.numeric] + # tickers <- data[1:numAssets,tickersvar] + + + + # check if exposure.names are numeric, if not, create exposures. factors by dummy variables + which.numeric <- sapply(data[, exposure.names, drop = FALSE],is.numeric) + exposures.numeric <- exposure.names[which.numeric] # industry factor model - exposures.factor <- exposures[!which.numeric] + exposures.factor <- exposure.names[!which.numeric] if (length(exposures.factor) > 1) { stop("Only one nonnumeric variable can be used at this time.") } - regression.formula <- paste("~", paste(exposures, collapse = "+")) + regression.formula <- paste("~", paste(exposure.names, collapse = "+")) # "~ BOOK2MARKET" if (length(exposures.factor)) { regression.formula <- paste(regression.formula, "- 1") - fulldata[, exposures.factor] <- as.factor(fulldata[, + data[, exposures.factor] <- as.factor(data[, exposures.factor]) - exposuresToRecode <- names(fulldata[, exposures, drop = FALSE])[!which.numeric] + exposuresToRecode <- names(data[, exposure.names, drop = FALSE])[!which.numeric] contrasts.list <- lapply(seq(length(exposuresToRecode)), function(i) function(n, m) contr.treatment(n, contrasts = FALSE)) names(contrasts.list) <- exposuresToRecode @@ -241,55 +260,55 @@ if (!wls) { if (regression == "robust") { # ols.robust - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), + FE.hat <- by(data = data, INDICES = as.numeric(data[[datevar]]), FUN = ols.robust, modelterms = regression.formula, conlist = contrasts.list) } else { # ols.classic - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), + FE.hat <- by(data = data, INDICES = as.numeric(data[[datevar]]), FUN = ols.classic, modelterms = regression.formula, conlist = contrasts.list) } } else { if (regression == "robust") { # wls.robust - E.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), + resids <- by(data = data, INDICES = as.numeric(data[[datevar]]), FUN = function(xdf, modelterms, conlist) { lmRob(modelterms, data = xdf, contrasts = conlist, control = lmRob.control(mxr = 200, mxf = 200, mxs = 200))$resid }, modelterms = regression.formula, conlist = contrasts.list) - E.hat <- apply(E.hat, 1, unlist) + resids <- apply(resids, 1, unlist) weights <- if (covariance == "robust") - apply(E.hat, 1, scaleTau2)^2 - else apply(E.hat, 1, var) - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), + apply(resids, 1, scaleTau2)^2 + else apply(resids, 1, var) + FE.hat <- by(data = data, INDICES = as.numeric(data[[datevar]]), FUN = wls.robust, modelterms = regression.formula, conlist = contrasts.list, w = weights) } else { # wls.classic - E.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), + resids <- by(data = data, INDICES = as.numeric(data[[datevar]]), FUN = function(xdf, modelterms, conlist) { lm(formula = modelterms, data = xdf, contrasts = conlist, singular.ok = TRUE)$resid }, modelterms = regression.formula, conlist = contrasts.list) - E.hat <- apply(E.hat, 1, unlist) + resids <- apply(resids, 1, unlist) weights <- if (covariance == "robust") - apply(E.hat, 1, scaleTau2)^2 - else apply(E.hat, 1, var) - FE.hat <- by(data = fulldata, INDICES = as.numeric(fulldata[[datevar]]), + apply(resids, 1, scaleTau2)^2 + else apply(resids, 1, var) + FE.hat <- by(data = data, INDICES = as.numeric(data[[datevar]]), FUN = wls.classic, modelterms = regression.formula, conlist = contrasts.list, w = weights) } } # if there is industry dummy variables if (length(exposures.factor)) { - numCoefs <- length(exposures.numeric) + length(levels(fulldata[, + numCoefs <- length(exposures.numeric) + length(levels(data[, exposures.factor])) ncols <- 1 + 2 * numCoefs + numAssets fnames <- c(exposures.numeric, paste(exposures.factor, - levels(fulldata[, exposures.factor]), sep = "")) + levels(data[, exposures.factor]), sep = "")) cnames <- c("numCoefs", fnames, paste("t", fnames, sep = "."), assets) } else { @@ -318,7 +337,7 @@ timedates <- as.Date(as.numeric(dimnames(FE.hat)[[1]]), origin = "1970-01-01") coefs.names <- colnames(FE.hat.mat)[2:(1 + numCoefs)] # estimated factors ordered by time - f.hat <- zoo(x = FE.hat.mat[, 2:(1 + numCoefs)], order.by = timedates) + f.hat <- xts(x = FE.hat.mat[, 2:(1 + numCoefs)], order.by = timedates) # check for outlier gomat <- apply(coredata(f.hat), 2, function(x) abs(x - median(x, na.rm = TRUE)) > 4 * mad(x, na.rm = TRUE)) @@ -327,30 +346,30 @@ for (i in which(apply(gomat, 1, any, na.rm = TRUE))) print(f.hat[i, gomat[i, ], drop = FALSE]) } - tstats <- zoo(x = FE.hat.mat[, (2 + nc):(1 + 2 * nc)], order.by = timedates) + tstats <- xts(x = FE.hat.mat[, (2 + nc):(1 + 2 * nc)], order.by = timedates) # residuals for every asset ordered by time - E.hat <- zoo(x = FE.hat.mat[, (2 + 2 * numCoefs):(1 + 2 * + resids <- xts(x = FE.hat.mat[, (2 + 2 * numCoefs):(1 + 2 * numCoefs + numAssets)], order.by = timedates) - colnames(E.hat) <- tickers - if (covariance == "robust") { + +if (covariance == "robust") { if (kappa(na.exclude(coredata(f.hat))) < 1e+10) { - Cov.facrets <- covRob(coredata(f.hat), estim = "pairwiseGK", + Cov.factors <- covRob(coredata(f.hat), estim = "pairwiseGK", distance = FALSE, na.action = na.omit) } else { cat("Covariance matrix of factor returns is singular.\n") - Cov.facrets <- covRob(coredata(f.hat), distance = FALSE, + Cov.factors <- covRob(coredata(f.hat), distance = FALSE, na.action = na.omit) } - resid.vars <- apply(coredata(E.hat), 2, scaleTau2, na.rm = T)^2 + resid.vars <- apply(coredata(resids), 2, scaleTau2, na.rm = T)^2 D.hat <- if (full.resid.cov) - covOGK(coredata(E.hat), sigmamu = scaleTau2, n.iter = 1) + covOGK(coredata(resids), sigmamu = scaleTau2, n.iter = 1) else diag(resid.vars) } else { - Cov.facrets <- covClassic(coredata(f.hat), distance = FALSE,na.action = na.omit) - resid.vars <- apply(coredata(E.hat), 2, var, na.rm = TRUE) + Cov.factors <- covClassic(coredata(f.hat), distance = FALSE,na.action = na.omit) + resid.vars <- apply(coredata(resids), 2, var, na.rm = TRUE) D.hat <- if (full.resid.cov) - covClassic(coredata(E.hat), distance = FALSE, na.action = na.omit) + covClassic(coredata(resids), distance = FALSE, na.action = na.omit) else diag(resid.vars) } @@ -359,39 +378,32 @@ colnames <- coefs.names B.final[, match("(Intercept)", colnames, 0)] <- 1 numeric.columns <- match(exposures.numeric, colnames, 0) - B.final[, numeric.columns] <- as.matrix(fulldata[as.numeric(fulldata[[datevar]]) == + B.final[, numeric.columns] <- as.matrix(data[as.numeric(data[[datevar]]) == timedates[numTimePoints], exposures.numeric]) if (length(exposures.factor)) B.final[, grep(exposures.factor, x = colnames)][cbind(seq(numAssets), - as.numeric(fulldata[fulldata[[datevar]] == timedates[numTimePoints], + as.numeric(data[data[[datevar]] == timedates[numTimePoints], exposures.factor]))] <- 1 - cov.returns <- B.final %*% Cov.facrets$cov %*% t(B.final) + + cov.returns <- B.final %*% Cov.factors$cov %*% t(B.final) + if (full.resid.cov) D.hat$cov else D.hat - dimnames(cov.returns) <- list(tickers, tickers) - mean.cov.returns = tapply(fulldata[[returnsvar]],fulldata[[assetvar]], mean) - dimnames(mean.cov.returns) = list(tickers) + mean.cov.returns = tapply(data[[returnsvar]],data[[assetvar]], mean) Cov.returns <- list(cov = cov.returns, mean=mean.cov.returns, eigenvalues = eigen(cov.returns, only.values = TRUE, symmetric = TRUE)$values) if (full.resid.cov) { Cov.resids <- D.hat - dimnames(Cov.resids$cov) <- list(tickers, tickers) } else { Cov.resids <- NULL } output <- list(cov.returns = Cov.returns, - cov.factor.rets = Cov.facrets, + cov.factor = Cov.factors, cov.resids = Cov.resids, resid.variance = resid.vars, factor.rets = f.hat, - resids = E.hat, - tstats = tstats, - returns.data = fulldata[,c(datevar, assetvar, returnsvar)], - exposure.data = fulldata[,c(datevar, assetvar, exposures)], - assets = assets, - tickers = tickers, + resids = resids, + tstats = tstats, call = this.call) class(output) <- "FundamentalFactorModel" return(output) Deleted: pkg/FactorAnalytics/R/plot.MacroFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/plot.MacroFactorModel.r 2013-06-24 21:46:41 UTC (rev 2422) +++ pkg/FactorAnalytics/R/plot.MacroFactorModel.r 2013-06-24 22:10:52 UTC (rev 2423) @@ -1,393 +0,0 @@ -#' plot MacrofactorModel object. -#' -#' Generic function of plot method for fitMacroeconomicFactorModel. Either plot -#' all fit models or choose a single asset to plot. -#' -#' -#' @param fit.macro fit object created by fitMacroeconomicFactorModel. -#' @param colorset Defualt colorset is c(1:12). -#' @param legend.loc plot legend or not. Defualt is \code{NULL}. -#' @param which.plot integer indicating which plot to create: "none" will -#' create a menu to choose. Defualt is none. 1 = "Fitted factor returns", 2 = -#' "R square", 3 = "Variance of Residuals", 4 = "FM Correlation", 5 = "Factor -#' Contributions to SD", 6 = "Factor Contributions to ES", 7 = "Factor -#' Contributions to VaR" -#' @param max.show Maximum assets to plot. Default is 6. -#' @param plot.single Plot a single asset of lm class. Defualt is FALSE. -#' @param fundName Name of the asset to be plotted. -#' @param which.plot.single integer indicating which plot to create: "none" -#' will create a menu to choose. Defualt is none. 1 = time series plot of -#' actual and fitted values 2 = time series plot of residuals with standard -#' error bands 3 = time series plot of squared residuals 4 = time series plot -#' of absolute residuals 5 = SACF and PACF of residuals 6 = SACF and PACF of -#' squared residuals 7 = SACF and PACF of absolute residuals 8 = histogram of -#' residuals with normal curve overlayed 9 = normal qq-plot of residuals 10= -#' CUSUM plot of recursive residuals 11= CUSUM plot of OLS residuals 12= CUSUM -#' plot of recursive estimates relative to full sample estimates 13= rolling -#' estimates over 24 month window -#' @author Eric Zivot and Yi-An Chen. -#' @examples -#' -#' \dontrun{ -#' # load data from the database -#' data(managers.df) -#' ret.assets = managers.df[,(1:6)] -#' factors = managers.df[,(7:9)] -#' # fit the factor model with OLS -#' fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", -#' variable.selection="all subsets") -#' # plot of all assets and show only first 4 assets. -#' plot(fit.macro,max.show=4) -#' # single plot of HAM1 asset -#' plot(fit.macro, plot.single=TRUE, fundName="HAM1") -#' } -#' - plot.MacroFactorModel <- - function(fit.macro,colorset=c(1:12),legend.loc=NULL, - which.plot=c("none","1L","2L","3L","4L","5L","6L","7L"),max.show=6, - plot.single=FALSE, fundName,which.plot.single=c("none","1L","2L","3L","4L","5L","6L", - "7L","8L","9L","10L","11L","12L","13L")) { - require(zoo) - require(PerformanceAnalytics) - require(strucchange) - - if (plot.single==TRUE) { - ## inputs: - ## fit.macro lm object summarizing factor model fit. It is assumed that - ## time series date information is included in the names component - ## of the residuals, fitted and model components of the object. - ## fundName charater. The name of the single asset to be ploted. - ## which.plot.single integer indicating which plot to create: - ## 1 time series plot of actual and fitted values - ## 2 time series plot of residuals with standard error bands - ## 3 time series plot of squared residuals - ## 4 time series plot of absolute residuals - ## 5 SACF and PACF of residuals - ## 6 SACF and PACF of squared residuals - ## 7 SACF and PACF of absolute residuals - ## 8 histogram of residuals with normal curve overlayed - ## 9 normal qq-plot of residuals - ## 10 CUSUM plot of recursive residuals - ## 11 CUSUM plot of OLS residuals - ## 12 CUSUM plot of recursive estimates relative to full sample estimates - ## 13 rolling estimates over 24 month window - which.plot.single<-which.plot.single[1] - fit.lm = fit.macro$asset.fit[[fundName]] - - if (!(class(fit.lm) == "lm")) - stop("Must pass a valid lm object") - - ## extract information from lm object - - factorNames = colnames(fit.lm$model)[-1] - fit.formula = as.formula(paste(fundName,"~", paste(factorNames, collapse="+"), sep=" ")) - residuals.z = zoo(residuals(fit.lm), as.Date(names(residuals(fit.lm)))) - fitted.z = zoo(fitted(fit.lm), as.Date(names(fitted(fit.lm)))) - actual.z = zoo(fit.lm$model[,1], as.Date(rownames(fit.lm$model))) - tmp.summary = summary(fit.lm) - - - if (which.plot.single=="none") - which.plot.single<-menu(c("time series plot of actual and fitted values", - "time series plot of residuals with standard error bands", - "time series plot of squared residuals", - "time series plot of absolute residuals", - "SACF and PACF of residuals", - "SACF and PACF of squared residuals", - "SACF and PACF of absolute residuals", - "histogram of residuals with normal curve overlayed", - "normal qq-plot of residuals", - "CUSUM plot of recursive residuals", - "CUSUM plot of OLS residuals", - "CUSUM plot of recursive estimates relative to full sample estimates", - "rolling estimates over 24 month window"), - title="\nMake a plot selection (or 0 to exit):\n") - switch(which.plot.single, - "1L" = { - ## time series plot of actual and fitted values - plot(actual.z, main=fundName, ylab="Monthly performance", lwd=2, col="black") - lines(fitted.z, lwd=2, col="blue") - abline(h=0) - legend(x="bottomleft", legend=c("Actual", "Fitted"), lwd=2, col=c("black","blue")) - }, - - "2L" = { - ## time series plot of residuals with standard error bands - plot(residuals.z, main=fundName, ylab="Monthly performance", lwd=2, col="black") - abline(h=0) - abline(h=2*tmp.summary$sigma, lwd=2, lty="dotted", col="red") - abline(h=-2*tmp.summary$sigma, lwd=2, lty="dotted", col="red") - legend(x="bottomleft", legend=c("Residual", "+/ 2*SE"), lwd=2, - lty=c("solid","dotted"), col=c("black","red")) - }, - "3L" = { - ## time series plot of squared residuals - plot(residuals.z^2, main=fundName, ylab="Squared residual", lwd=2, col="black") - abline(h=0) - legend(x="topleft", legend="Squared Residuals", lwd=2, col="black") - }, - "4L" = { - ## time series plot of absolute residuals - plot(abs(residuals.z), main=fundName, ylab="Absolute residual", lwd=2, col="black") - abline(h=0) - legend(x="topleft", legend="Absolute Residuals", lwd=2, col="black") - }, - "5L" = { - ## SACF and PACF of residuals - chart.ACFplus(residuals.z, main=paste("Residuals: ", fundName, sep="")) - }, - "6L" = { - ## SACF and PACF of squared residuals - chart.ACFplus(residuals.z^2, main=paste("Residuals^2: ", fundName, sep="")) - }, - "7L" = { - ## SACF and PACF of absolute residuals - chart.ACFplus(abs(residuals.z), main=paste("|Residuals|: ", fundName, sep="")) - }, - "8L" = { - ## histogram of residuals with normal curve overlayed - chart.Histogram(residuals.z, methods="add.normal", main=paste("Residuals: ", fundName, sep="")) - }, - "9L" = { - ## normal qq-plot of residuals - chart.QQPlot(residuals.z, envelope=0.95, main=paste("Residuals: ", fundName, sep="")) - }, - "10L"= { - ## CUSUM plot of recursive residuals - if (as.character(fit.macro$call["fit.method"]) == "OLS") { - cusum.rec = efp(fit.formula, type="Rec-CUSUM", data=fit.lm$model) - plot(cusum.rec, sub=fundName) - } else - stop("CUMSUM applies only on OLS method") - }, - "11L"= { - ## CUSUM plot of OLS residuals - if (as.character(fit.macro$call["fit.method"]) == "OLS") { - cusum.ols = efp(fit.formula, type="OLS-CUSUM", data=fit.lm$model) - plot(cusum.ols, sub=fundName) - } else - stop("CUMSUM applies only on OLS method") - }, - "12L"= { - ## CUSUM plot of recursive estimates relative to full sample estimates - if (as.character(fit.macro$call["fit.method"]) == "OLS") { - cusum.est = efp(fit.formula, type="fluctuation", data=fit.lm$model) - plot(cusum.est, functional=NULL, sub=fundName) - } else - stop("CUMSUM applies only on OLS method") - }, - "13L"= { - ## rolling regression over 24 month window - if (as.character(fit.macro$call["fit.method"]) == "OLS") { - rollReg <- function(data.z, formula) { - coef(lm(formula, data = as.data.frame(data.z))) - } - reg.z = zoo(fit.lm$model, as.Date(rownames(fit.lm$model))) - rollReg.z = rollapply(reg.z, FUN=rollReg, fit.formula, width=24, by.column = FALSE, - align="right") - plot(rollReg.z, main=paste("24-month rolling regression estimates:", fundName, sep=" ")) - } else if (as.character(fit.macro$call["fit.method"]) == "DLS") { - decay.factor <- as.numeric(as.character(fit.macro$call["decay.factor"])) - t.length <- 24 - w <- rep(decay.factor^(t.length-1),t.length) - for (k in 2:t.length) { - w[k] = w[k-1]/decay.factor - } - w <- w/sum(w) - rollReg <- function(data.z, formula,w) { - coef(lm(formula,weight=w, data = as.data.frame(data.z))) - } - reg.z = zoo(fit.lm$model[-length(fit.lm$model)], as.Date(rownames(fit.lm$model))) - factorNames = colnames(fit.lm$model)[c(-1,-length(fit.lm$model))] - fit.formula = as.formula(paste(fundName,"~", paste(factorNames, collapse="+"), sep=" ")) - rollReg.z = rollapply(reg.z, FUN=rollReg, fit.formula,w, width=24, by.column = FALSE, - align="right") - plot(rollReg.z, main=paste("24-month rolling regression estimates:", fundName, sep=" ")) - } - }, - invisible() - ) - - - - } else { - which.plot<-which.plot[1] - - if(which.plot=='none') - which.plot<-menu(c("Fitted factor returns", - "R square", - "Variance of Residuals", - "FM Correlation", - "Factor Contributions to SD", - "Factor Contributions to ES", - "Factor Contributions to VaR"), - title="Factor Analytics Plot \nMake a plot selection (or 0 to exit):\n") - - variable.selection = fit.macro$variable.selection - manager.names = colnames(fit.macro$ret.assets) - factor.names = colnames(fit.macro$factors) - managers.df = cbind(fit.macro$ret.assets,fit.macro$factors) - cov.factors = var(fit.macro$factors) - n <- length(manager.names) - - switch(which.plot, - - "1L" = { - if (n >= max.show) { - cat(paste("numbers of assets are greater than",max.show,", show only first", - max.show,"assets",sep=" ")) - n <- max.show - } - par(mfrow=c(n/2,2)) - if (variable.selection == "lar" || variable.selection == "lasso") { - for (i in 1:n) { - alpha = fit.macro$alpha.vec[i] - beta = as.matrix(fit.macro$beta.mat[i,]) - fitted = alpha+as.matrix(fit.macro$factors)%*%beta - dataToPlot = cbind(fitted, na.omit(fit.macro$ret.assets[,i])) - colnames(dataToPlot) = c("Fitted","Actual") - main = paste("Factor Model fit for",manager.names[i],seq="") - chart.TimeSeries(dataToPlot,colorset = colorset, legend.loc = legend.loc,main=main) - } - } else { - for (i in 1:n) { - dataToPlot = cbind(fitted(fit.macro$asset.fit[[i]]), na.omit(fit.macro$ret.assets[,i])) - colnames(dataToPlot) = c("Fitted","Actual") - main = paste("Factor Model fit for",manager.names[i],seq="") - chart.TimeSeries(dataToPlot,colorset = colorset, legend.loc = legend.loc,main=main) - } - } - par(mfrow=c(1,1)) - }, - "2L" ={ - barplot(fit.macro$r2.vec) - }, - "3L" = { - barplot(fit.macro$residVars.vec) - }, - - "4L" = { - cov.fm<- factorModelCovariance(fit.macro$beta.mat,var(fit.macro$factors),fit.macro$residVars.vec) - cor.fm = cov2cor(cov.fm) - rownames(cor.fm) = colnames(cor.fm) - ord <- order(cor.fm[1,]) - ordered.cor.fm <- cor.fm[ord, ord] - plotcorr(ordered.cor.fm, col=cm.colors(11)[5*ordered.cor.fm + 6]) - }, - "5L" = { - factor.sd.decomp.list = list() - for (i in manager.names) { - factor.sd.decomp.list[[i]] = - factorModelSdDecomposition(fit.macro$beta.mat[i,], - cov.factors, fit.macro$residVars.vec[i]) - } - # function to extract contribution to sd from list - getCSD = function(x) { - x$cr.fm - } - # extract contributions to SD from list - cr.sd = sapply(factor.sd.decomp.list, getCSD) - rownames(cr.sd) = c(factor.names, "residual") - # create stacked barchart - barplot(cr.sd, main="Factor Contributions to SD", - legend.text=T, args.legend=list(x="topleft"), - col=c(1:50) ) - - }, - "6L"={ - factor.es.decomp.list = list() - if (variable.selection == "lar" || variable.selection == "lasso") { - - for (i in manager.names) { - idx = which(!is.na(managers.df[,i])) - alpha = fit.macro$alpha.vec[i] - beta = as.matrix(fit.macro$beta.mat[i,]) - fitted = alpha+as.matrix(fit.macro$factors)%*%beta - residual = fit.macro$ret.assets[,i]-fitted [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/returnanalytics -r 2423 From noreply at r-forge.r-project.org Tue Jun 25 02:35:56 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 02:35:56 +0200 (CEST) Subject: [Returnanalytics-commits] r2424 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130625003556.785B51858E0@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-25 02:35:55 +0200 (Tue, 25 Jun 2013) New Revision: 2424 Added: pkg/PortfolioAnalytics/sandbox/portfolio_vignette.pdf Log: adding precompiled portfolio_vignette pdf file Added: pkg/PortfolioAnalytics/sandbox/portfolio_vignette.pdf =================================================================== (Binary files differ) Property changes on: pkg/PortfolioAnalytics/sandbox/portfolio_vignette.pdf ___________________________________________________________________ Added: svn:mime-type + application/octet-stream From noreply at r-forge.r-project.org Tue Jun 25 04:59:58 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 04:59:58 +0200 (CEST) Subject: [Returnanalytics-commits] r2425 - pkg/PortfolioAnalytics/R Message-ID: <20130625025959.14BE3185614@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-25 04:59:57 +0200 (Tue, 25 Jun 2013) New Revision: 2425 Modified: pkg/PortfolioAnalytics/R/constraintsFUN.R Log: adding function to compute diversification for implementing as a constraint Modified: pkg/PortfolioAnalytics/R/constraintsFUN.R =================================================================== --- pkg/PortfolioAnalytics/R/constraintsFUN.R 2013-06-25 00:35:55 UTC (rev 2424) +++ pkg/PortfolioAnalytics/R/constraintsFUN.R 2013-06-25 02:59:57 UTC (rev 2425) @@ -62,3 +62,16 @@ # the group constraints? Or another way? return(weights) } + +#' Function to compute diversification as a constraint +#' +#' Diversification is defined as 1 minus the sum of the squared weights +#' diversification <- 1 - sum(w^2) +#' +#' @param weights vector of asset weights +#' @author Ross Bennett +#' @export +diversification <- function(weights){ + div <- 1 - sum(weights^2) + return(div) +} From noreply at r-forge.r-project.org Tue Jun 25 05:19:25 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 05:19:25 +0200 (CEST) Subject: [Returnanalytics-commits] r2426 - pkg/PortfolioAnalytics/R Message-ID: <20130625031926.4891B1851A9@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-25 05:19:25 +0200 (Tue, 25 Jun 2013) New Revision: 2426 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: adding functionality to specify diversification constraint Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-25 02:59:57 UTC (rev 2425) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-25 03:19:25 UTC (rev 2426) @@ -221,6 +221,11 @@ enabled=enabled, ...=...) }, + # Diversification constraint + diversification = {tmp_constraint <- diversification_constraint(type=type, + enabled=enabled, + ...=...) + }, # Do nothing and return the portfolio object if type is NULL null = {return(portfolio)} ) @@ -488,6 +493,22 @@ return(Constraint) } +#' constructor for diversification_constraint +#' +#' This function is called by add.constraint when type="diversification" is specified, \code{\link{add.constraint}} +#' +#' @param type character type of the constraint +#' @param div.target diversification target value +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @author Ross Bennett +#' @export +diversification_constraint <- function(type, div.target, enabled=FALSE, ...){ + Constraint <- constraint_v2(type, enabled=enabled, ...) + Constraint$div <- div.target + return(Constraint) +} + #' function for updating constrints, not well tested, may be broken #' #' can we use the generic update.default function? From noreply at r-forge.r-project.org Tue Jun 25 05:44:37 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 05:44:37 +0200 (CEST) Subject: [Returnanalytics-commits] r2427 - pkg/PortfolioAnalytics/R Message-ID: <20130625034437.CD611185897@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-25 05:44:36 +0200 (Tue, 25 Jun 2013) New Revision: 2427 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: adding functionality to specify volatility constraint Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-25 03:19:25 UTC (rev 2426) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-25 03:44:36 UTC (rev 2427) @@ -226,6 +226,11 @@ enabled=enabled, ...=...) }, + # Volatility constraint + volatility = {tmp_constraint <- volatility_constraint(type=type, + enabled=enabled, + ...=...) + }, # Do nothing and return the portfolio object if type is NULL null = {return(portfolio)} ) @@ -509,6 +514,26 @@ return(Constraint) } +#' constructor for volatility_constraint +#' +#' This function is called by add.constraint when type="volatility" is specified, \code{\link{add.constraint}} +#' If portfolio standard deviation is less than min.vol, add penalty to maximize +#' If portfolio standard deviation is greater than max.vol, add penalty to minimize +#' +#' @param type character type of the constraint +#' @param min.vol minimum volatility constraint +#' @param max.vol maximum volatilty constraint +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @author Ross Bennett +#' @export +volatility_constraint <- function(type, min.vol, max.vol, enabled=FALSE, ...){ + Constraint <- constraint_v2(type, enabled=enabled, ...) + Constraint$min.vol <- min.vol + Constraint$max.vol <- max.vol + return(Constraint) +} + #' function for updating constrints, not well tested, may be broken #' #' can we use the generic update.default function? From noreply at r-forge.r-project.org Tue Jun 25 10:14:14 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 10:14:14 +0200 (CEST) Subject: [Returnanalytics-commits] r2428 - in pkg/Meucci: . R demo man Message-ID: <20130625081414.66C311856E7@r-forge.r-project.org> Author: xavierv Date: 2013-06-25 10:14:13 +0200 (Tue, 25 Jun 2013) New Revision: 2428 Added: pkg/Meucci/R/ConvertChangeInYield2Price.R pkg/Meucci/demo/S_AutocorrelatedProcess.R pkg/Meucci/demo/S_BondProjectionPricingNormal.R pkg/Meucci/man/ConvertChangeInYield2Price.Rd Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE Log: - added two scripts from chapter 3 and its related functions Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-25 03:44:36 UTC (rev 2427) +++ pkg/Meucci/DESCRIPTION 2013-06-25 08:14:13 UTC (rev 2428) @@ -67,3 +67,4 @@ 'LognormalCopulaPdf.R' 'NormalCopulaPdf.R' 'StudentTCopulaPdf.R' + 'ConvertChangeInYield2Price.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-25 03:44:36 UTC (rev 2427) +++ pkg/Meucci/NAMESPACE 2013-06-25 08:14:13 UTC (rev 2428) @@ -4,6 +4,7 @@ export(ComputeMoments) export(ComputeMVE) export(CondProbViews) +export(ConvertChangeInYield2Price) export(Cumul2Raw) export(DetectOutliersViaMVE) export(EntropyProg) Added: pkg/Meucci/R/ConvertChangeInYield2Price.R =================================================================== --- pkg/Meucci/R/ConvertChangeInYield2Price.R (rev 0) +++ pkg/Meucci/R/ConvertChangeInYield2Price.R 2013-06-25 08:14:13 UTC (rev 2428) @@ -0,0 +1,30 @@ +#' Convert change in yield-to-maturity to price for fixed-income securities, as described in +#' A. Meucci "Risk and Asset Allocation", Springer, 2005 +#' +#' @param Exp_DY : [vector] (N x 1) expected value of change in yield to maturity +#' @param Cov_DY : [matrix] (N x N) covariance of change in yield to maturity +#' @param Times2Mat : [scalar] time to maturity +#' @param CurrentPrices : [vector] (N x 1) current prices +#' +#' @return Exp_Prices : [vector] (N x 1) expected prices +#' @return Cov_Prices : [matrix] (N x N) covariance of prices +#' +#' @references +#' \url{http://} +#' See (6.77)-(6.79) in "Risk and Asset Allocation"-Springer (2005), by A. Meucci +#' See Meucci's script for "ConvertChangeInYield2Price.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + + +ConvertChangeInYield2Price = function( Exp_DY, Cov_DY, Times2Mat, CurrentPrices ) +{ + Mu = log( CurrentPrices ) - Times2Mat * Exp_DY; + Sigma = diag( Times2Mat^2 ) %*% Cov_DY; + + Exp_Prices = exp(Mu + (1/2) * diag( Sigma )); + Cov_Prices = exp(Mu + (1/2) * diag( Sigma )) %*% t(exp(Mu + (1/2) * diag(Sigma))) * ( exp( Sigma ) - 1); + + return( list( Exp_Prices = Exp_Prices, Cov_Prices = Cov_Prices ) ); +} \ No newline at end of file Added: pkg/Meucci/demo/S_AutocorrelatedProcess.R =================================================================== --- pkg/Meucci/demo/S_AutocorrelatedProcess.R (rev 0) +++ pkg/Meucci/demo/S_AutocorrelatedProcess.R 2013-06-25 08:14:13 UTC (rev 2428) @@ -0,0 +1,35 @@ + +#' This script simulates a Ornstein-Uhlenbeck AR(1) process, as described in A. Meucci, " +#' Risk and Asset Allocation", Springer, 2005, Chapter 3. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_AutocorrelatedProcess.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################## +### Input parameters +theta = 0.1; # reversion speed +m = 0.05; # long term mean +sigma = 0.01; # volatility +T = 10^4; # number of steps +tau = 0.01; # discrete time interval + +################################################################################################################## +### Determine parameters +var = sigma^2 / 2 / theta * ( 1 - exp( -2 * theta * tau ) ); +sd = sqrt(var); +eps = rnorm( T, 0, sd ); + +x = matrix( NaN, T, 1); +x[ 1 ] = 0; + +for( t in 1 : (T - 1) ) +{ + x[ t + 1 ] = m + exp( -theta * tau ) * ( x[ t ] - m ) + eps[ t ]; +} + +dev.new() +plot( x, type="l", main = "AR(1) process vs. time" ); Added: pkg/Meucci/demo/S_BondProjectionPricingNormal.R =================================================================== --- pkg/Meucci/demo/S_BondProjectionPricingNormal.R (rev 0) +++ pkg/Meucci/demo/S_BondProjectionPricingNormal.R 2013-06-25 08:14:13 UTC (rev 2428) @@ -0,0 +1,61 @@ +################################################################################################################# +### This script projects the distribution of the market invariants for the bond markets +### (i.e. the changes in yield to maturity) from the estimation interval to the investment horizon +### Then it computes the distribution of prices at the investment horizon +### == Chapter 3 == +################################################################################################################## +clc; clear; close all; +run ../../LIBRARY/InitializeLibrary.m; # load library of functions + +################################################################################################################## +### Inputs +tau = 1/52; # time to horizon expressed in years +tau_tilde = 1/52; # estimation period expressed in years + +FlatCurve = 0.04; +TimesToMat = c( 1, 5, 10, 52, 520 ) / 52; # time to maturity of selected bonds expressed in years + +# parameters of the distribution of the changes in yield to maturity +u_minus_tau = TimesToMat - tau; +mus = 0 * u_minus_tau; +sigmas = ( 20 + 5 / 4 * u_minus_tau ) / 10000; + +nSim = 100000; + +################################################################################################################## +### Bond market projection to horizon and pricing +BondCurrent_Prices_Shifted = exp( -FlatCurve * u_minus_tau ); +BondCurrent_Prices = exp( -FlatCurve * TimesToMat ); + +# project bond market to horizon +N = length( TimesToMat ); # number of bonds +U = runif( nSim ); +BondMarket_Scenarios = matrix( 0, nSim, N ); +for( n in 1 : N ) +{ + # generate co-dependent changes in yield-to-maturity + DY_Scenarios = qnorm( U, mus[ n ] * tau / tau_tilde, sigmas[ n ] * sqrt( tau / tau_tilde ) ); + + # compute the horizon prices, (3.81) in "Risk and Asset Allocation" - Springer + X = -u_minus_tau[ n ] * DY_Scenarios; + BondMarket_Scenarios[ , n ] = BondCurrent_Prices_Shifted[ n ] * exp( X ); +} + +################################################################################################################## +### MV inputs - analytical +Exp_Hrzn_DY_Hat = mus * tau / tau_tilde; +SDev_Hrzn_DY_Hat = sigmas * sqrt( tau / tau_tilde ); +Corr_Hrzn_DY_Hat = matrix( 1, N, N ); # full co-dependence +Cov_Hrzn_DY_Hat = diag( SDev_Hrzn_DY_Hat ) %*% Corr_Hrzn_DY_Hat %*% diag( SDev_Hrzn_DY_Hat ); +Bond = ConvertChangeInYield2Price( Exp_Hrzn_DY_Hat, Cov_Hrzn_DY_Hat, u_minus_tau, BondCurrent_Prices_Shifted ); +print( Bond$Exp_Prices ); +print( Bond$Cov_Prices ); + +################################################################################################################## +### MV inputs - numerical +BondExp_Prices = t( apply(BondMarket_Scenarios, 2, mean) ); +BondCov_Prices = cov( BondMarket_Scenarios ); +print( BondExp_Prices ); +print( BondCov_Prices ); + +### EOF \ No newline at end of file Added: pkg/Meucci/man/ConvertChangeInYield2Price.Rd =================================================================== --- pkg/Meucci/man/ConvertChangeInYield2Price.Rd (rev 0) +++ pkg/Meucci/man/ConvertChangeInYield2Price.Rd 2013-06-25 08:14:13 UTC (rev 2428) @@ -0,0 +1,38 @@ +\name{ConvertChangeInYield2Price} +\alias{ConvertChangeInYield2Price} +\title{Convert change in yield-to-maturity to price for fixed-income securities, as described in +A. Meucci "Risk and Asset Allocation", Springer, 2005} +\usage{ + ConvertChangeInYield2Price(Exp_DY, Cov_DY, Times2Mat, + CurrentPrices) +} +\arguments{ + \item{Exp_DY}{: [vector] (N x 1) expected value of change + in yield to maturity} + + \item{Cov_DY}{: [matrix] (N x N) covariance of change in + yield to maturity} + + \item{Times2Mat}{: [scalar] time to maturity} + + \item{CurrentPrices}{: [vector] (N x 1) current prices} +} +\value{ + Exp_Prices : [vector] (N x 1) expected prices + + Cov_Prices : [matrix] (N x N) covariance of prices +} +\description{ + Convert change in yield-to-maturity to price for + fixed-income securities, as described in A. Meucci "Risk + and Asset Allocation", Springer, 2005 +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See (6.77)-(6.79) in "Risk and Asset + Allocation"-Springer (2005), by A. Meucci See Meucci's + script for "ConvertChangeInYield2Price.m" +} + From noreply at r-forge.r-project.org Tue Jun 25 10:36:36 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 10:36:36 +0200 (CEST) Subject: [Returnanalytics-commits] r2429 - pkg/Meucci/demo Message-ID: <20130625083636.6DAEE184685@r-forge.r-project.org> Author: xavierv Date: 2013-06-25 10:36:35 +0200 (Tue, 25 Jun 2013) New Revision: 2429 Modified: pkg/Meucci/demo/S_BivariateSample.R pkg/Meucci/demo/S_BondProjectionPricingNormal.R pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R Log: - Documentation fix Modified: pkg/Meucci/demo/S_BivariateSample.R =================================================================== --- pkg/Meucci/demo/S_BivariateSample.R 2013-06-25 08:14:13 UTC (rev 2428) +++ pkg/Meucci/demo/S_BivariateSample.R 2013-06-25 08:36:35 UTC (rev 2429) @@ -1,3 +1,6 @@ +library(mvtnorm); +library(latticeExtra); + #' This script generates draws from a bivariate distribution with different marginals, #' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. #' @@ -3,12 +6,9 @@ #' @references #' \url{http://} -#' See Meucci's script for "S_AnalyzeLognormalCorrelation.m" +#' See Meucci's script for "S_BivariateSample.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export -library(mvtnorm); -library(latticeExtra); - ################################################################################################################### ### input parameters Modified: pkg/Meucci/demo/S_BondProjectionPricingNormal.R =================================================================== --- pkg/Meucci/demo/S_BondProjectionPricingNormal.R 2013-06-25 08:14:13 UTC (rev 2428) +++ pkg/Meucci/demo/S_BondProjectionPricingNormal.R 2013-06-25 08:36:35 UTC (rev 2429) @@ -1,12 +1,16 @@ -################################################################################################################# -### This script projects the distribution of the market invariants for the bond markets -### (i.e. the changes in yield to maturity) from the estimation interval to the investment horizon -### Then it computes the distribution of prices at the investment horizon -### == Chapter 3 == -################################################################################################################## -clc; clear; close all; -run ../../LIBRARY/InitializeLibrary.m; # load library of functions +#'This script projects the distribution of the market invariants for the bond markets +#'(i.e. the changes in yield to maturity) from the estimation interval to the investment horizon +#'Then it computes the distribution of prices at the investment horizon as described in A. Meucci, +#'"Risk and Asset Allocation", Springer, 2005, Chapter 3. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_BondProjectionPricingNormal.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + ################################################################################################################## ### Inputs tau = 1/52; # time to horizon expressed in years Modified: pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R =================================================================== --- pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R 2013-06-25 08:14:13 UTC (rev 2428) +++ pkg/Meucci/demo/S_OrderStatisticsPdfStudentT.R 2013-06-25 08:36:35 UTC (rev 2429) @@ -5,7 +5,7 @@ #' #' @references #' \url{http://} -#' See Meucci's script for "S_OrderStatisticsPdfLognormal.m" +#' See Meucci's script for "S_OrderStatisticsPdfStudentT.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export From noreply at r-forge.r-project.org Tue Jun 25 12:05:33 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 12:05:33 +0200 (CEST) Subject: [Returnanalytics-commits] r2430 - in pkg/Meucci: . R demo man Message-ID: <20130625100533.739911806EB@r-forge.r-project.org> Author: xavierv Date: 2013-06-25 12:05:32 +0200 (Tue, 25 Jun 2013) New Revision: 2430 Added: pkg/Meucci/R/ProjectionStudentT.R pkg/Meucci/demo/S_BondProjectionPricingStudentT.R pkg/Meucci/man/ProjectionStudentT.Rd Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE Log: - added demo file S_BondProjectionPricingStudentT and ProjectionStudentT function Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-25 08:36:35 UTC (rev 2429) +++ pkg/Meucci/DESCRIPTION 2013-06-25 10:05:32 UTC (rev 2430) @@ -68,3 +68,4 @@ 'NormalCopulaPdf.R' 'StudentTCopulaPdf.R' 'ConvertChangeInYield2Price.R' + 'ProjectionStudentT.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-25 08:36:35 UTC (rev 2429) +++ pkg/Meucci/NAMESPACE 2013-06-25 10:05:32 UTC (rev 2430) @@ -22,6 +22,7 @@ export(PanicCopula) export(PartialConfidencePosterior) export(PlotDistributions) +export(ProjectionStudentT) export(Raw2Central) export(Raw2Cumul) export(RejectOutlier) Added: pkg/Meucci/R/ProjectionStudentT.R =================================================================== --- pkg/Meucci/R/ProjectionStudentT.R (rev 0) +++ pkg/Meucci/R/ProjectionStudentT.R 2013-06-25 10:05:32 UTC (rev 2430) @@ -0,0 +1,49 @@ +#' Perform the horizon projection of a Student t invariant, as described in +#' A. Meucci "Risk and Asset Allocation", Springer, 2005 +#' +#' @param nu : [scalar] degree of freedom +#' @param s : [scalar] scatter parameter +#' @param m : [scalar] location parameter +#' @param T : [scalar] multiple of the estimation period to the invesment horizon +#' +#' @return x_Hor : [scalar] +#' @return f_Hor : [scalar] +#' @return F_Hor : [scalar] +#' +#' @references +#' \url{http://} +#' See Meucci's script for "ProjectionStudentT.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +ProjectionStudentT = function(nu, m, s, T) +{ + # set up grid + N = 2 ^ 14; # coarseness level + a = -qnorm( 10^(-15), 0, sqrt( T ) ); + h = 2 * a / N; + Xi = seq(-a+h, a, h ); + + # discretized initial pdf (standardized) + f = 1 / h * ( pt( Xi + h/2, nu ) - pt( Xi - h/2, nu ) ); + f[ N ] = 1 / h *( pt(-a + h/2, nu ) - pt( -a, nu ) + pt( a, nu )- pt( a-h/2, nu ) ); + + # discretized characteristic function + Phi = fft(f); + + # projection of discretized characteristic function + Signs = ( -1 )^((0:(N-1)) * ( T - 1)); + Phi_T = h ^ ( T - 1 ) * Signs * (Phi ^ T); + + # horizon discretized pdf (standardized) + f_T = as.numeric( ifft( Phi_T ) ); + + # horizon discretized pdf and cdf (non-standardized) + x_Hor = m * T + s * Xi; + f_Hor = f_T / s; + F_Hor = h * cumsum( f_Hor * s ); + + return( list( x = x_Hor, f = f_Hor, F = F_Hor ) ); + +} \ No newline at end of file Added: pkg/Meucci/demo/S_BondProjectionPricingStudentT.R =================================================================== --- pkg/Meucci/demo/S_BondProjectionPricingStudentT.R (rev 0) +++ pkg/Meucci/demo/S_BondProjectionPricingStudentT.R 2013-06-25 10:05:32 UTC (rev 2430) @@ -0,0 +1,59 @@ +#'This script projects the distribution of the market invariants for the bond markets +#'(i.e. the changes in yield to maturity) from the estimation interval (Student t assumption) +#'to the investment horizon. Then it computes the distribution of prices at the investment +#'horizon as described in A. Meucci,"Risk and Asset Allocation", Springer, 2005, Chapter 3. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_BondProjectionPricingStudentT.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################## +### Inputs + +tau = 4/52; # time to horizon expressed in years +tau_tilde = 1/52; # estimation period expressed in years + +FlatCurve = 0.04; +TimesToMat = c( 4, 5, 10, 52, 520 ) / 52; # time to maturity of selected bonds expressed in years + +# determine the parameters of the distribution of the invariants (changes in yield to maturity) +Periods = tau / tau_tilde; # number of estimation periods until the investment horizon +u_minus_tau = TimesToMat - tau; + +nu = 8; +mus = 0 * u_minus_tau; +sigmas = ( 20 + 5 / 4 * u_minus_tau ) / 10000; +Num_Scenarios = 100000; + +################################################################################################################## +### Projection and pricing +BondCurrent_Prices_Shifted = exp(-FlatCurve * u_minus_tau); +BondCurrent_Prices = exp(-FlatCurve * TimesToMat); + +# generate common source of randomness +U = runif( Num_Scenarios ); + +N = length( TimesToMat ); # number of bonds +par( mfrow = c( N,1 )); +for( n in 1 : N ) +{ + # project bond market to horizon + Projection = ProjectionStudentT( nu, mus[ n ], sigmas[ n ], Periods); + + # generate co-dependent changes in yield-to-maturity + DY_Scenarios = interp1( Projection$F, Projection$x, U, method = "linear"); + + # compute the horizon prices, (3.81) in "Risk and Asset Allocation" - Springer + X = -u_minus_tau[ n ] * DY_Scenarios; + Z = BondCurrent_Prices_Shifted[ n ] * exp(X); + + # compute and plot linear returns + L = Z / BondCurrent_Prices[ n ] - 1; + + #for n=1 histogram represents the only bar (not empty) + hist(L, round(10 * log(Num_Scenarios)), xlab = paste( "Linear returns for bond", n ), main = "" ); + +} Added: pkg/Meucci/man/ProjectionStudentT.Rd =================================================================== --- pkg/Meucci/man/ProjectionStudentT.Rd (rev 0) +++ pkg/Meucci/man/ProjectionStudentT.Rd 2013-06-25 10:05:32 UTC (rev 2430) @@ -0,0 +1,37 @@ +\name{ProjectionStudentT} +\alias{ProjectionStudentT} +\title{Perform the horizon projection of a Student t invariant, as described in +A. Meucci "Risk and Asset Allocation", Springer, 2005} +\usage{ + ProjectionStudentT(nu, m, s, T) +} +\arguments{ + \item{nu}{: [scalar] degree of freedom} + + \item{s}{: [scalar] scatter parameter} + + \item{m}{: [scalar] location parameter} + + \item{T}{: [scalar] multiple of the estimation period to + the invesment horizon} +} +\value{ + x_Hor : [scalar] + + f_Hor : [scalar] + + F_Hor : [scalar] +} +\description{ + Perform the horizon projection of a Student t invariant, + as described in A. Meucci "Risk and Asset Allocation", + Springer, 2005 +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See Meucci's script for + "ProjectionStudentT.m" +} + From noreply at r-forge.r-project.org Tue Jun 25 14:00:34 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 14:00:34 +0200 (CEST) Subject: [Returnanalytics-commits] r2431 - in pkg/PortfolioAnalytics: . man Message-ID: <20130625120035.46C0A1851F6@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-25 14:00:34 +0200 (Tue, 25 Jun 2013) New Revision: 2431 Added: pkg/PortfolioAnalytics/man/diversification.Rd pkg/PortfolioAnalytics/man/diversification_constraint.Rd pkg/PortfolioAnalytics/man/volatility_constraint.Rd Modified: pkg/PortfolioAnalytics/NAMESPACE Log: adding documentation for diversification and volatility constraints Modified: pkg/PortfolioAnalytics/NAMESPACE =================================================================== --- pkg/PortfolioAnalytics/NAMESPACE 2013-06-25 10:05:32 UTC (rev 2430) +++ pkg/PortfolioAnalytics/NAMESPACE 2013-06-25 12:00:34 UTC (rev 2431) @@ -14,6 +14,8 @@ export(constraint_ROI) export(constraint_v2) export(constraint) +export(diversification_constraint) +export(diversification) export(extract.efficient.frontier) export(extractStats.optimize.portfolio.DEoptim) export(extractStats.optimize.portfolio.parallel) @@ -47,4 +49,5 @@ export(turnover_constraint) export(turnover_objective) export(update.constraint) +export(volatility_constraint) export(weight_sum_constraint) Added: pkg/PortfolioAnalytics/man/diversification.Rd =================================================================== --- pkg/PortfolioAnalytics/man/diversification.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/diversification.Rd 2013-06-25 12:00:34 UTC (rev 2431) @@ -0,0 +1,17 @@ +\name{diversification} +\alias{diversification} +\title{Function to compute diversification as a constraint} +\usage{ + diversification(weights) +} +\arguments{ + \item{weights}{vector of asset weights} +} +\description{ + Diversification is defined as 1 minus the sum of the + squared weights diversification <- 1 - sum(w^2) +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/diversification_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/diversification_constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/diversification_constraint.Rd 2013-06-25 12:00:34 UTC (rev 2431) @@ -0,0 +1,26 @@ +\name{diversification_constraint} +\alias{diversification_constraint} +\title{constructor for diversification_constraint} +\usage{ + diversification_constraint(type, div.target, + enabled = FALSE, ...) +} +\arguments{ + \item{type}{character type of the constraint} + + \item{div.target}{diversification target value} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} +} +\description{ + This function is called by add.constraint when + type="diversification" is specified, + \code{\link{add.constraint}} +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/volatility_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/volatility_constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/volatility_constraint.Rd 2013-06-25 12:00:34 UTC (rev 2431) @@ -0,0 +1,31 @@ +\name{volatility_constraint} +\alias{volatility_constraint} +\title{constructor for volatility_constraint} +\usage{ + volatility_constraint(type, min.vol, max.vol, + enabled = FALSE, ...) +} +\arguments{ + \item{type}{character type of the constraint} + + \item{min.vol}{minimum volatility constraint} + + \item{max.vol}{maximum volatilty constraint} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} +} +\description{ + This function is called by add.constraint when + type="volatility" is specified, + \code{\link{add.constraint}} If portfolio standard + deviation is less than min.vol, add penalty to maximize + If portfolio standard deviation is greater than max.vol, + add penalty to minimize +} +\author{ + Ross Bennett +} + From noreply at r-forge.r-project.org Tue Jun 25 16:19:19 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 16:19:19 +0200 (CEST) Subject: [Returnanalytics-commits] r2432 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130625141919.F2DED183D92@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-25 16:19:19 +0200 (Tue, 25 Jun 2013) New Revision: 2432 Added: pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw Log: adding sweave file for constraints vignette to demonstrate supported constraints Added: pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw =================================================================== --- pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw (rev 0) +++ pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw 2013-06-25 14:19:19 UTC (rev 2432) @@ -0,0 +1,123 @@ +\documentclass[12pt,letterpaper,english]{article} +\usepackage[OT1]{fontenc} +\usepackage{Sweave} +\usepackage{verbatim} +\usepackage{Rd} +\usepackage{Sweave} + +\begin{document} + +\title{PortfolioAnalytics Constraints Functionality} +\author{Ross Bennett} + +\maketitle + +\begin{abstract} +The purpose of this vignette is to demonstrate the functionality of constraints in PortfolioAnalytics. +\end{abstract} + +\tableofcontents + +\section{Constraints} +The following constraints are currently supported +\begin{itemize} + \item[weight\_sum] The weight\_sum constraint is used to constrain the sum of weights. Common use cases of this are to apply a full investment, dollar neutral, or leverage constraint. + \item[box] Box constraints are used to constrain the minimum and maximum weights of assets. Standard box constraints with a single upper bound and single lower bound as well as per asset inequality constraints on weights can be specified. A special case of box constraints is a long only constraint where the minimum weight is 0 and maximum weight is 1. + \item[group] Group constraints are used to specify the minimum and maximum weights of groups of assets. A common use case to group assets by market cap or style. Note that group constraints is only implemented for the ROI solvers. Implementing the group constraints for other solvers should be possible in \code{constrained\_objective} using the \code{constrained\_group\_tmp} function. + \item[turnover] Turnover can be specified as a constraint, but is not currently implemented. Turnover constraint may not be able to be implemented in the ROI glpk solver. It is implemented for the ROI quadprog solver in sandbox/testing\_turnover.gmv.R. Currently, turnover can be implemented as an objective function and the function has been added to the file \code{R/objectiveFUN.R}. + \item[diversification] Diversification can be specified as a constraint, but is not currently implemented in solvers. This can be done in the mapping function in the next part or implemented inside \code{constrained\_objective}. Currently the user can only specify a diversification target value. The function will try to maximize diversification, penalizing a value below the target. + \item[volatility] Volatility can be specified as a constraint, but it is not currently implemented. This can be done in the mapping function in the next part or implemented inside \code{constrained\_objective}. See \code{constrained\_objective} for how volatility is handled as an objective. Currently the user can specify a minimum volatility and a maximum volatility. We'll penalize if the minimum or maximum is violated. +\end{itemize} + +Constraint TODO +\begin{itemize} + \item[Integer] Integer constraint for cardinality max position constraint. This may be able to be implemented in \code{randomize\_portfolio} by generating portfolios with the number of non-zero weights equal to \code{max.pos} and then fill in weights of zero so the length of the weights vector is equal to the number of assets. Then scramble the weights vector. The number of non-zero weights could also be random so that the number of non-zero weights is not always equal to \code{max.pos}. This could be implemented in the DEoptim solver with the mapping function. This might be do-able in Rglpk for max return and min ETL. Rglpk supports mixed integer types, but solve.QP does not. May be able to use brance-and-bound technique using solve.QP. + \item[Quadratic] Need more help on this. Note that the ROI solvers quadprog and glpk do not support quadratic constraints, they only support linear constraints. The ROI pluging for cplex does support quadratic constraints, but this is a commercial product. What are some use case examples? + \item[Diversification] Case of quadratic constraint. Could be implemented inside \code{constrained\_objective}. + \item[Volatility] See email from Peter Carl. Should be able to specify this as a constraint and then implement inside \code{constrained\_objective} +\end{itemize} + +<<>>= +library(PortfolioAnalytics) + +data(edhec) +ret <- edhec[, 1:4] +fund.names <- colnames(ret) + +pspec <- portfolio.spec(assets=fund.names) +@ + +Add full investment constraint +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="weight_sum", + min_sum=1, + max_sum=1, + enabled=TRUE) +pspec$constraints[[1]] +@ + +Add box constraints for long only +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="box", + min=0, + max=1, + enabled=TRUE) +pspec$constraints[[2]] +@ + +Update the box constraints to specify per asset weight constraints. +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="box", + min=c(0.05, 0.02, 0.04, 0.06), + max=c(0.35, 0.55, 0.55, 0.65), + enabled=TRUE, + indexnum=2) +pspec$constraints[[2]] +@ + +Add group constraints +The assets are grouped in 2 groups of 2 assets +The asset weights of the first group must be greater than or equal to 0.15 and less than or equal to 0.65. +The asset weights of the second group must be greater than or equal to 0.25 and less than or equal to 0.55. +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="group", + groups=c(2, 2), + group_min=c(0.15, 0.25), + group_max=c(0.65, 0.55), + enabled=TRUE) +pspec$constraints[[3]] +@ + +Add turnover constraint. We'll penalize if \code{max.turnover} value is exceeded. +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="turnover", + max.turnover=0.6, + enabled=TRUE) +pspec$constraints[[4]] +@ + +Add diversification constraint. We will try to maximize diversification, a diversification value of less than the \code{div.target} will be penalized. +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="diversification", + div.target=0.7, + enabled=TRUE) +pspec$constraints[[5]] +@ + +Add volatility constraint. A portfolio volatility less than \code{min.vol} will be penalized and a portfolio volatility greater than \code{max.vol} will be penalized. +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="volatility", + min.vol=0.07, + max.vol=0.12, + enabled=TRUE) +pspec$constraints[[6]] +@ + +\end{document} \ No newline at end of file From noreply at r-forge.r-project.org Tue Jun 25 20:26:01 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 20:26:01 +0200 (CEST) Subject: [Returnanalytics-commits] r2433 - in pkg/FactorAnalytics: R man Message-ID: <20130625182601.51FE21857D5@r-forge.r-project.org> Author: chenyian Date: 2013-06-25 20:26:00 +0200 (Tue, 25 Jun 2013) New Revision: 2433 Added: pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd Modified: pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd Log: 1. revise summary.TimeSeriesFactorModel.Rd 2. add predict.TimeSeriesFactorModel.Rd and predict.TimeSeriesFactorModel.r Added: pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r (rev 0) +++ pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r 2013-06-25 18:26:00 UTC (rev 2433) @@ -0,0 +1,30 @@ +#' predict method for TimeSeriesModel object. +#' +#' Generic function of predict method for fitTimeSeriesFactorModel. It utilizes +#' function \code{predict.lm}. +#' +#' @param fit "TimeSeriesFactorModel" object created by fitTimeSeiresFactorModel. +#' @param newdata An optional data frame in which to look for variables with which to predict. +#' If omitted, the fitted values are used. +#' @param ... Any other arguments used in \code{predict.lm} +#' @author Yi-An Chen. +#' ' +#' @examples +#' +#' # load data from the database +#' data(managers.df) +#' ret.assets = managers.df[,(1:6)] +#' # fit the factor model with OLS +#' fit <- fitTimeseriesFactorModel(assets.names=colnames(managers.df[,(1:6)]), +#' factors.names=c("EDHEC.LS.EQ","SP500.TR"), +#' data=managers.df,fit.method="OLS") +#' +#' predict(fit) +#' predict(fit,newdata,interval="confidence") +#' +#' @export +#' + +predict.TimeSeriesFactorModel <- function(fit,...){ + lapply(fit[[1]], predict,...) +} \ No newline at end of file Modified: pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r 2013-06-25 14:19:19 UTC (rev 2432) +++ pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r 2013-06-25 18:26:00 UTC (rev 2433) @@ -1,10 +1,10 @@ -#' summary TimeSeriesModel object. +#' summary method for TimeSeriesModel object. #' -#' Generic function of summary method for fitMacroeconomicFactorModel. +#' Generic function of summary method for fitTimeSeriesFactorModel. #' #' -#' @param fit.macro fit object created by fitMacroeconomicFactorModel. -#' @author Eric Zivot and Yi-An Chen. +#' @param fit fit object created by fitTimeSeiresFactorModel. +#' @author Yi-An Chen. #' @examples #' #' # load data from the database @@ -12,13 +12,15 @@ #' ret.assets = managers.df[,(1:6)] #' factors = managers.df[,(7:9)] #' # fit the factor model with OLS -#' fit.macro <- fitTimeSeriesFactorModel(ret.assets,factors,fit.method="OLS", -#' variable.selection="all subsets") -#' summary(fit.macro) +#' fit <- fitTimeSeriesFactorModel(assets.names=colnames(managers.df[,(1:6)]), +#' factors.names=c("EDHEC.LS.EQ","SP500.TR"), +#' data=managers.df,fit.method="OLS") +#' summary(fit) #' +#' @export #' summary.TimeSeriesFactorModel <- - function(fit.macro){ - lapply(fit.macro[[1]], summary) + function(fit){ + lapply(fit[[1]], summary) } Added: pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd (rev 0) +++ pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd 2013-06-25 18:26:00 UTC (rev 2433) @@ -0,0 +1,37 @@ +\name{predict.TimeSeriesFactorModel} +\alias{predict.TimeSeriesFactorModel} +\title{predict method for TimeSeriesModel object.} +\usage{ + predict.TimeSeriesFactorModel(fit, ...) +} +\arguments{ + \item{fit}{"TimeSeriesFactorModel" object created by + fitTimeSeiresFactorModel.} + + \item{newdata}{An optional data frame in which to look + for variables with which to predict. If omitted, the + fitted values are used.} + + \item{...}{Any other arguments used in \code{predict.lm}} +} +\description{ + Generic function of predict method for + fitTimeSeriesFactorModel. It utilizes function + \code{predict.lm}. +} +\examples{ +# load data from the database +data(managers.df) +ret.assets = managers.df[,(1:6)] +# fit the factor model with OLS +fit <- fitTimeseriesFactorModel(assets.names=colnames(managers.df[,(1:6)]), + factors.names=c("EDHEC.LS.EQ","SP500.TR"), + data=managers.df,fit.method="OLS") + +predict(fit) +predict(fit,newdata,interval="confidence") +} +\author{ + Yi-An Chen. ' +} + Modified: pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd 2013-06-25 14:19:19 UTC (rev 2432) +++ pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd 2013-06-25 18:26:00 UTC (rev 2433) @@ -1,16 +1,16 @@ \name{summary.TimeSeriesFactorModel} \alias{summary.TimeSeriesFactorModel} -\title{summary TimeSeriesModel object.} +\title{summary method for TimeSeriesModel object.} \usage{ - summary.TimeSeriesFactorModel(fit.macro) + summary.TimeSeriesFactorModel(fit) } \arguments{ - \item{fit.macro}{fit object created by - fitMacroeconomicFactorModel.} + \item{fit}{fit object created by + fitTimeSeiresFactorModel.} } \description{ Generic function of summary method for - fitMacroeconomicFactorModel. + fitTimeSeriesFactorModel. } \examples{ # load data from the database @@ -18,11 +18,12 @@ ret.assets = managers.df[,(1:6)] factors = managers.df[,(7:9)] # fit the factor model with OLS -fit.macro <- fitTimeSeriesFactorModel(ret.assets,factors,fit.method="OLS", - variable.selection="all subsets") -summary(fit.macro) +fit <- fitTimeSeriesFactorModel(assets.names=colnames(managers.df[,(1:6)]), + factors.names=c("EDHEC.LS.EQ","SP500.TR"), + data=managers.df,fit.method="OLS") +summary(fit) } \author{ - Eric Zivot and Yi-An Chen. + Yi-An Chen. } From noreply at r-forge.r-project.org Tue Jun 25 22:22:57 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 22:22:57 +0200 (CEST) Subject: [Returnanalytics-commits] r2434 - in pkg/FactorAnalytics: R man Message-ID: <20130625202257.82D97184690@r-forge.r-project.org> Author: chenyian Date: 2013-06-25 22:22:57 +0200 (Tue, 25 Jun 2013) New Revision: 2434 Added: pkg/FactorAnalytics/R/predict.StatFactorModel.r pkg/FactorAnalytics/man/predict.StatFactorModel.Rd Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R Log: 1. add predict.StatFactorModel.Rd and predict.StatFactorModel.r 2. debug fitStatisticalFactorModel.R Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-25 18:26:00 UTC (rev 2433) +++ pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-25 20:22:57 UTC (rev 2434) @@ -86,10 +86,9 @@ # check data data.xts <- checkData(data,method=ckeckData.method) +data <- coredata(data.xts) -# convert it to coredata - # function of test mfactor.test <- function(data, method = "bn", refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05){ @@ -209,6 +208,7 @@ dimnames(data) <- list(1:m, paste("V", 1:n, sep = ".")) } data.names <- dimnames(data)[[2]] + # demean xc <- t(t(data) - colMeans(data)) if(is.null(ret.cov)) { ret.cov <- crossprod(xc)/m @@ -221,25 +221,43 @@ tmp <- data - f %*% B alpha <- colMeans(tmp) # compute residuals - tmp <- t(t(tmp) - alpha) - r2 <- (1 - colSums(tmp^2)/colSums(xc^2)) + resid <- t(t(tmp) - alpha) + r2 <- (1 - colSums(resid^2)/colSums(xc^2)) ret.cov <- t(B) %*% var(f) %*% B diag(ret.cov) <- diag(ret.cov) + colSums(tmp^2)/(m - k - 1) dimnames(B) <- list(paste("F", 1:k, sep = "."), data.names) dimnames(f) <- list(dimnames(data)[[1]], paste("F", 1:k, sep = ".")) dimnames(ret.cov) <- list(data.names, data.names) names(alpha) <- data.names + + if (ckeckData.method == "xts" | ckeckData.method == "zoo" ) { + f <- xts(f,index(data.xts)) + resid <- xts(resid,index(data.xts)) + } + + # create lm list for plot reg.list = list() - for (i in data.names) { - reg.df = as.data.frame(cbind(data[,i],f)) + if (ckeckData.method == "xts" | ckeckData.method == "zoo" ) { + for (i in data.names) { + reg.xts = merge(data.xts[,i],f) + colnames(reg.xts)[1] <- i + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.xts) + reg.list[[i]] = fm.fit + } + } else { + for (i in data.names) { + reg.df = as.data.frame(cbind(data[,i],coredata(f))) colnames(reg.df)[1] <- i fm.formula = as.formula(paste(i,"~", ".", sep=" ")) fm.fit = lm(fm.formula, data=reg.df) reg.list[[i]] = fm.fit } + } + ans <- list(factors = f, loadings = B, k = k, alpha = alpha, ret.cov = ret.cov, - r2 = r2, eigen = eigen.tmp$values, residuals=tmp, asset.ret = data, + r2 = r2, eigen = eigen.tmp$values, residuals=resid, asset.ret = data, asset.fit=reg.list) return(ans) Added: pkg/FactorAnalytics/R/predict.StatFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.StatFactorModel.r (rev 0) +++ pkg/FactorAnalytics/R/predict.StatFactorModel.r 2013-06-25 20:22:57 UTC (rev 2434) @@ -0,0 +1,23 @@ +#' predict method for StatFactorModel object. +#' +#' Generic function of predict method for fitStatisticalFactorModel. It utilizes +#' function \code{predict.lm}. +#' +#' @param fit "StatFactorModel" object created by fitTimeSeiresFactorModel. +#' @param newdata An optional data frame in which to look for variables with which to predict. +#' If omitted, the fitted values are used. +#' @param ... Any other arguments used in \code{predict.lm} +#' @author Yi-An Chen. +#' ' +#' @examples +#' data(stat.fm.data) +#'.fit <- fitStatisticalFactorModel(sfm.dat,k=2, +# ckeckData.method="data.frame") +#' +#' predict(fit) +#' @export +#' + +predict.StatFactorModel <- function(fit,...){ + lapply(fit$asset.fit, predict,...) +} \ No newline at end of file Added: pkg/FactorAnalytics/man/predict.StatFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/predict.StatFactorModel.Rd (rev 0) +++ pkg/FactorAnalytics/man/predict.StatFactorModel.Rd 2013-06-25 20:22:57 UTC (rev 2434) @@ -0,0 +1,31 @@ +\name{predict.StatFactorModel} +\alias{predict.StatFactorModel} +\title{predict method for StatFactorModel object.} +\usage{ + predict.StatFactorModel(fit, ...) +} +\arguments{ + \item{fit}{"StatFactorModel" object created by + fitTimeSeiresFactorModel.} + + \item{newdata}{An optional data frame in which to look + for variables with which to predict. If omitted, the + fitted values are used.} + + \item{...}{Any other arguments used in \code{predict.lm}} +} +\description{ + Generic function of predict method for + fitStatisticalFactorModel. It utilizes function + \code{predict.lm}. +} +\examples{ +data(stat.fm.data) +.fit <- fitStatisticalFactorModel(sfm.dat,k=2, + +predict(fit) +} +\author{ + Yi-An Chen. ' +} + From noreply at r-forge.r-project.org Tue Jun 25 23:57:59 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 25 Jun 2013 23:57:59 +0200 (CEST) Subject: [Returnanalytics-commits] r2435 - pkg/FactorAnalytics/R Message-ID: <20130625215759.84AEF183D92@r-forge.r-project.org> Author: chenyian Date: 2013-06-25 23:57:59 +0200 (Tue, 25 Jun 2013) New Revision: 2435 Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R pkg/FactorAnalytics/R/predict.StatFactorModel.r Log: create lm object for apca in fitStatisticalFactorModel.R Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-25 20:22:57 UTC (rev 2434) +++ pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-25 21:57:59 UTC (rev 2435) @@ -84,11 +84,7 @@ require(PerformanceAnalytics) -# check data -data.xts <- checkData(data,method=ckeckData.method) -data <- coredata(data.xts) - # function of test mfactor.test <- function(data, method = "bn", refine = TRUE, check = FALSE, max.k = NULL, sig = 0.05){ @@ -306,13 +302,45 @@ dimnames(B) <- list(paste("F", 1:k, sep = "."), data.names) dimnames(f) <- list(dimnames(data)[[1]], paste("F", 1:k, sep = ".")) names(alpha) <- data.names - res <- t(t(data) - alpha) - f %*% B + resid <- t(t(data) - alpha) - f %*% B r2 <- (1 - colSums(res^2)/colSums(xc^2)) + + if (ckeckData.method == "xts" | ckeckData.method == "zoo" ) { + f <- xts(f,index(data.xts)) + resid <- xts(resid,index(data.xts)) + } + + # create lm list for plot + reg.list = list() + if (ckeckData.method == "xts" | ckeckData.method == "zoo" ) { + for (i in data.names) { + reg.xts = merge(data.xts[,i],f) + colnames(reg.xts)[1] <- i + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.xts) + reg.list[[i]] = fm.fit + } + } else { + for (i in data.names) { + reg.df = as.data.frame(cbind(data[,i],coredata(f))) + colnames(reg.df)[1] <- i + fm.formula = as.formula(paste(i,"~", ".", sep=" ")) + fm.fit = lm(fm.formula, data=reg.df) + reg.list[[i]] = fm.fit + } + } + + ans <- list(factors = f, loadings = B, k = k, alpha = alpha, ret.cov = ret.cov, - r2 = r2, eigen = eig.tmp$values, residuals=res,asset.ret = data) + r2 = r2, eigen = eig.tmp$values, residuals=resid,asset.ret = data, + asset.fit=reg.list) return(ans) } +# check data +data.xts <- checkData(data,method=ckeckData.method) +data <- coredata(data.xts) + call <- match.call() pos <- rownames(data) data <- as.matrix(data) Modified: pkg/FactorAnalytics/R/predict.StatFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.StatFactorModel.r 2013-06-25 20:22:57 UTC (rev 2434) +++ pkg/FactorAnalytics/R/predict.StatFactorModel.r 2013-06-25 21:57:59 UTC (rev 2435) @@ -18,6 +18,7 @@ #' @export #' + predict.StatFactorModel <- function(fit,...){ lapply(fit$asset.fit, predict,...) } \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 26 01:22:52 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 01:22:52 +0200 (CEST) Subject: [Returnanalytics-commits] r2436 - in pkg/FactorAnalytics: R man Message-ID: <20130625232252.7189018512F@r-forge.r-project.org> Author: chenyian Date: 2013-06-26 01:22:51 +0200 (Wed, 26 Jun 2013) New Revision: 2436 Modified: pkg/FactorAnalytics/R/predict.StatFactorModel.r pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r pkg/FactorAnalytics/man/predict.StatFactorModel.Rd pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd Log: add summary.StatFactorModel.r and summary.StatFactorModel.Rd Modified: pkg/FactorAnalytics/R/predict.StatFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.StatFactorModel.r 2013-06-25 21:57:59 UTC (rev 2435) +++ pkg/FactorAnalytics/R/predict.StatFactorModel.r 2013-06-25 23:22:51 UTC (rev 2436) @@ -3,7 +3,7 @@ #' Generic function of predict method for fitStatisticalFactorModel. It utilizes #' function \code{predict.lm}. #' -#' @param fit "StatFactorModel" object created by fitTimeSeiresFactorModel. +#' @param fit "StatFactorModel" object created by fitStatisticalFactorModel. #' @param newdata An optional data frame in which to look for variables with which to predict. #' If omitted, the fitted values are used. #' @param ... Any other arguments used in \code{predict.lm} Modified: pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r 2013-06-25 21:57:59 UTC (rev 2435) +++ pkg/FactorAnalytics/R/summary.TimeSeriesFactorModel.r 2013-06-25 23:22:51 UTC (rev 2436) @@ -1,9 +1,11 @@ #' summary method for TimeSeriesModel object. #' -#' Generic function of summary method for fitTimeSeriesFactorModel. +#' Generic function of summary method for fitTimeSeriesFactorModel. +#' It utilizes \code{summary.lm} #' #' #' @param fit fit object created by fitTimeSeiresFactorModel. +#' @param ... other option used in \code{summary.lm} #' @author Yi-An Chen. #' @examples #' @@ -19,8 +21,7 @@ #' #' @export #' -summary.TimeSeriesFactorModel <- - function(fit){ - lapply(fit[[1]], summary) +summary.TimeSeriesFactorModel <- function(fit,...){ + lapply(fit[[1]], summary,...) } Modified: pkg/FactorAnalytics/man/predict.StatFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/predict.StatFactorModel.Rd 2013-06-25 21:57:59 UTC (rev 2435) +++ pkg/FactorAnalytics/man/predict.StatFactorModel.Rd 2013-06-25 23:22:51 UTC (rev 2436) @@ -6,7 +6,7 @@ } \arguments{ \item{fit}{"StatFactorModel" object created by - fitTimeSeiresFactorModel.} + fitStatisticalFactorModel.} \item{newdata}{An optional data frame in which to look for variables with which to predict. If omitted, the Modified: pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd 2013-06-25 21:57:59 UTC (rev 2435) +++ pkg/FactorAnalytics/man/summary.TimeSeriesFactorModel.Rd 2013-06-25 23:22:51 UTC (rev 2436) @@ -2,15 +2,17 @@ \alias{summary.TimeSeriesFactorModel} \title{summary method for TimeSeriesModel object.} \usage{ - summary.TimeSeriesFactorModel(fit) + summary.TimeSeriesFactorModel(fit, ...) } \arguments{ \item{fit}{fit object created by fitTimeSeiresFactorModel.} + + \item{...}{other option used in \code{summary.lm}} } \description{ Generic function of summary method for - fitTimeSeriesFactorModel. + fitTimeSeriesFactorModel. It utilizes \code{summary.lm} } \examples{ # load data from the database From noreply at r-forge.r-project.org Wed Jun 26 03:25:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 03:25:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2437 - in pkg/FactorAnalytics: R man Message-ID: <20130626012555.B47BF184468@r-forge.r-project.org> Author: chenyian Date: 2013-06-26 03:25:55 +0200 (Wed, 26 Jun 2013) New Revision: 2437 Added: pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r pkg/FactorAnalytics/R/summary.StatFactorModel.r pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd pkg/FactorAnalytics/man/summary.StatFactorModel.Rd Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd Log: 1. add summary.StatFactorModel.Rd and summary.StatFactorModel.r 2. add predict.FundamentalFactorModel.Rd and predict.FundamentalFactorModel.r 3. modify fitFundamentalFactorModel.R Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-25 23:22:51 UTC (rev 2436) +++ pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-26 01:25:55 UTC (rev 2437) @@ -48,9 +48,9 @@ #' residuals for each asset. If "wls" is TRUE, these are the weights used in #' the weighted least squares regressions. If "cov = robust" these values are #' computed with "scale.tau". Otherwise they are computed with "var". -#' \item factor.rets A "xts" object containing the times series of +#' \item factors A "xts" object containing the times series of #' estimated factor returns and intercepts. -#' \item resids A "xts" object containing the time series of residuals +#' \item residuals A "xts" object containing the time series of residuals #' for each asset. #' \item tstats A "xts" object containing the time series of t-statistics #' for each exposure. @@ -142,8 +142,7 @@ if (match(returnsvar, exposure.names, FALSE)) stop(paste(returnsvar, "cannot be used as an exposure.")) - assets = unique(data[,assetvar]) - timedates = as.Date(unique(data[,datevar])) + numTimePoints <- length(timedates) numExposures <- length(exposure.names) numAssets <- length(assets) @@ -318,10 +317,12 @@ paste("t", c("(Intercept)", exposures.numeric), sep = "."), assets) } + +# create matrix for fit FE.hat.mat <- matrix(NA, ncol = ncols, nrow = numTimePoints, dimnames = list(as.character(as.Date(as.numeric(names(FE.hat)), origin = "1970-01-01")), cnames)) - # give each element t names and PERMNO + # give each element t names for (i in 1:length(FE.hat)) { names(FE.hat[[i]])[1] <- "numCoefs" nc <- FE.hat[[i]][1] @@ -401,10 +402,11 @@ cov.factor = Cov.factors, cov.resids = Cov.resids, resid.variance = resid.vars, - factor.rets = f.hat, - resids = resids, + factors = f.hat, + residuals = resids, tstats = tstats, - call = this.call) + call = this.call, + data = data) class(output) <- "FundamentalFactorModel" return(output) } Added: pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r (rev 0) +++ pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r 2013-06-26 01:25:55 UTC (rev 2437) @@ -0,0 +1,57 @@ +#' predict method for FundamentalFactorModel object +#' +#' Generic function of predict method for fitFundamentalFactorModel. +#' +#' @param fit "FundamentalFactorModel" object +#' @export +#' @author Yi-An Chen +#' +predict.FundamentalFactorModel <- function(fit,newdata){ + + # if there is no newdata provided + # calculate fitted values + datevar <- as.character(fit$call)[4] + assetvar <- as.character(fit$call)[6] + assets = unique(data[,assetvar]) + timedates = as.Date(unique(data[,datevar])) + + numTimePoints <- length(timedates) + numExposures <- length(exposure.names) + numAssets <- length(assets) + + f <- fit$factors # T X 3 + exposure.names <- colnames(f)[-1] + beta.all <- data[,c(datevar,assetvar,exposure.names)] # (N * T ) X 4 + + if (missing(newdata) || is.null(newdata)) { + # + ### calculated fitted values + # + + fitted <- rep(NA,numAssets) + for (i in 1:numTimePoints) { + beta <- subset(beta.all, DATE == index(f)[i])[,exposure.names] + beta <- as.matrix(cbind(rep(1,numAssets),beta)) + fit.tmp <- beta %*% t(f[i,]) + fitted <- rbind(fitted,t(fit.tmp)) + } + fitted <- fitted[-1,] + colnames(fitted) <- assets + + } + + # predict returns by newdata + if (!missing(newdata) && !is.null(newdata)) { + # check if newdata has the same data points as beta + if (dim(newdata) != c(numAssets*numTimePoints,numExposures)) { + stop("Dimension of newdata has to match mAssets*numTimePoints,numExposures") + } else { + + + + } + + } + + +} \ No newline at end of file Added: pkg/FactorAnalytics/R/summary.StatFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/summary.StatFactorModel.r (rev 0) +++ pkg/FactorAnalytics/R/summary.StatFactorModel.r 2013-06-26 01:25:55 UTC (rev 2437) @@ -0,0 +1,24 @@ +#' summary method for StatFactorModel object. +#' +#' Generic function of summary method for fitStatisticalFactorModel. It utilizes +#' function \code{summary.lm}. +#' +#' @param fit "StatFactorModel" object created by fitStatisticalFactorModel. +#' @param newdata An optional data frame in which to look for variables with which to predict. +#' If omitted, the fitted values are used. +#' @param ... Any other arguments used in \code{summary.lm} +#' @author Yi-An Chen. +#' ' +#' @examples +#' data(stat.fm.data) +#'.fit <- fitStatisticalFactorModel(sfm.dat,k=2, +# ckeckData.method="data.frame") +#' +#' summary(fit) +#' @export +#' + + +summary.StatFactorModel <- function(fit,...){ + lapply(fit$asset.fit, summary,...) +} \ No newline at end of file Modified: pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd 2013-06-25 23:22:51 UTC (rev 2436) +++ pkg/FactorAnalytics/man/fitFundamentalFactorModel.Rd 2013-06-26 01:25:55 UTC (rev 2437) @@ -57,9 +57,9 @@ each asset. If "wls" is TRUE, these are the weights used in the weighted least squares regressions. If "cov = robust" these values are computed with "scale.tau". - Otherwise they are computed with "var". \item factor.rets - A "xts" object containing the times series of estimated - factor returns and intercepts. \item resids A "xts" + Otherwise they are computed with "var". \item factors A + "xts" object containing the times series of estimated + factor returns and intercepts. \item residuals A "xts" object containing the time series of residuals for each asset. \item tstats A "xts" object containing the time series of t-statistics for each exposure. \item call Added: pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd (rev 0) +++ pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd 2013-06-26 01:25:55 UTC (rev 2437) @@ -0,0 +1,17 @@ +\name{predict.FundamentalFactorModel} +\alias{predict.FundamentalFactorModel} +\title{predict method for FundamentalFactorModel object} +\usage{ + predict.FundamentalFactorModel(fit, newdata) +} +\arguments{ + \item{fit}{"FundamentalFactorModel" object} +} +\description{ + Generic function of predict method for + fitFundamentalFactorModel. +} +\author{ + Yi-An Chen +} + Added: pkg/FactorAnalytics/man/summary.StatFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.StatFactorModel.Rd (rev 0) +++ pkg/FactorAnalytics/man/summary.StatFactorModel.Rd 2013-06-26 01:25:55 UTC (rev 2437) @@ -0,0 +1,31 @@ +\name{summary.StatFactorModel} +\alias{summary.StatFactorModel} +\title{summary method for StatFactorModel object.} +\usage{ + summary.StatFactorModel(fit, ...) +} +\arguments{ + \item{fit}{"StatFactorModel" object created by + fitStatisticalFactorModel.} + + \item{newdata}{An optional data frame in which to look + for variables with which to predict. If omitted, the + fitted values are used.} + + \item{...}{Any other arguments used in \code{summary.lm}} +} +\description{ + Generic function of summary method for + fitStatisticalFactorModel. It utilizes function + \code{summary.lm}. +} +\examples{ +data(stat.fm.data) +.fit <- fitStatisticalFactorModel(sfm.dat,k=2, + +summary(fit) +} +\author{ + Yi-An Chen. ' +} + From noreply at r-forge.r-project.org Wed Jun 26 05:04:31 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 05:04:31 +0200 (CEST) Subject: [Returnanalytics-commits] r2438 - pkg/PortfolioAnalytics/R Message-ID: <20130626030431.4B9EA184B43@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-26 05:04:27 +0200 (Wed, 26 Jun 2013) New Revision: 2438 Modified: pkg/PortfolioAnalytics/R/objective.R Log: adding turnover objective function to add.objective for current framework Modified: pkg/PortfolioAnalytics/R/objective.R =================================================================== --- pkg/PortfolioAnalytics/R/objective.R 2013-06-26 01:25:55 UTC (rev 2437) +++ pkg/PortfolioAnalytics/R/objective.R 2013-06-26 03:04:27 UTC (rev 2438) @@ -108,6 +108,11 @@ ...=... ) }, + turnover = {tmp_objective = turnover_objective(name=name, + enabled=enabled, + arguments=arguments, + ...=...) + }, null = {return(constraints)} # got nothing, default to simply returning From noreply at r-forge.r-project.org Wed Jun 26 05:17:30 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 05:17:30 +0200 (CEST) Subject: [Returnanalytics-commits] r2439 - pkg/PortfolioAnalytics/R Message-ID: <20130626031730.344B2184F6C@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-26 05:17:29 +0200 (Wed, 26 Jun 2013) New Revision: 2439 Modified: pkg/PortfolioAnalytics/R/objectiveFUN.R Log: fixing turnover function Modified: pkg/PortfolioAnalytics/R/objectiveFUN.R =================================================================== --- pkg/PortfolioAnalytics/R/objectiveFUN.R 2013-06-26 03:04:27 UTC (rev 2438) +++ pkg/PortfolioAnalytics/R/objectiveFUN.R 2013-06-26 03:17:29 UTC (rev 2439) @@ -7,13 +7,15 @@ turnover <- function(weights, wts.init=NULL) { # turnover function from https://r-forge.r-project.org/scm/viewvc.php/pkg/PortfolioAnalytics/sandbox/script.workshop2012.R?view=markup&root=returnanalytics - # Check that weights and wts.init are the same length - if(length(weights) != length(wts.init)) stop("weights and wts.init are not the same length") + N <- length(weights) # If wts.init is not given, then assume a vector of equal weights if(is.null(wts.init)) { - N <- length(weights) wts.init <- rep(1/N, N) } - return(sum(abs(wts.init-weights))/N) + + # Check that weights and wts.init are the same length + if(length(weights) != length(wts.init)) stop("weights and wts.init are not the same length") + + return(sum(abs(wts.init - weights)) / N) } \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 26 06:54:08 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 06:54:08 +0200 (CEST) Subject: [Returnanalytics-commits] r2440 - pkg/PortfolioAnalytics/R Message-ID: <20130626045408.4F41B185188@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-26 06:54:07 +0200 (Wed, 26 Jun 2013) New Revision: 2440 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R Log: adjusted penalty for turnover function to only penalize if turnover greater than target. see comments in constrained_objective function Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-26 03:17:29 UTC (rev 2439) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-26 04:54:07 UTC (rev 2440) @@ -249,8 +249,14 @@ if(inherits(objective,"turnover_objective")){ if (!is.null(objective$target) & is.numeric(objective$target)){ # we have a target - out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) - # Does this penalize for turnover below target + # out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) + # Does this penalize for turnover below target? + # I want to only penalize turnover above the given target. + # Seems to be giving correct results, but only if multiplier=0.01 + # I would expect this to be the same result when multiplier=0, but it is not. + # max(tmp_measure - objective$target, 0) should equal 0 when tmp_measure is less than objective$target + print(max(tmp_measure - objective$target, 0)) + out = out + penalty * objective$multiplier * max(tmp_measure - objective$target, 0) } # target is null or doesn't exist, just maximize, or minimize violation of constraint out = out + abs(objective$multiplier)*tmp_measure From noreply at r-forge.r-project.org Wed Jun 26 06:58:03 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 06:58:03 +0200 (CEST) Subject: [Returnanalytics-commits] r2441 - pkg/PortfolioAnalytics/R Message-ID: <20130626045803.E6DD6185188@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-26 06:58:03 +0200 (Wed, 26 Jun 2013) New Revision: 2441 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R Log: commenting out print statement I used for debugging Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-26 04:54:07 UTC (rev 2440) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-26 04:58:03 UTC (rev 2441) @@ -255,7 +255,7 @@ # Seems to be giving correct results, but only if multiplier=0.01 # I would expect this to be the same result when multiplier=0, but it is not. # max(tmp_measure - objective$target, 0) should equal 0 when tmp_measure is less than objective$target - print(max(tmp_measure - objective$target, 0)) + # print(max(tmp_measure - objective$target, 0)) out = out + penalty * objective$multiplier * max(tmp_measure - objective$target, 0) } # target is null or doesn't exist, just maximize, or minimize violation of constraint From noreply at r-forge.r-project.org Wed Jun 26 14:40:38 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 14:40:38 +0200 (CEST) Subject: [Returnanalytics-commits] r2442 - in pkg/PerformanceAnalytics/sandbox/pulkit: . week1/code week1/vignette week2 week2/code Message-ID: <20130626124038.A7DD3184B9B@r-forge.r-project.org> Author: pulkit Date: 2013-06-26 14:40:38 +0200 (Wed, 26 Jun 2013) New Revision: 2442 Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R pkg/PerformanceAnalytics/sandbox/pulkit/week2/tests/ pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/ Removed: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.pdf Log: benchmarkSR file added Deleted: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R 2013-06-26 04:58:03 UTC (rev 2441) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R 2013-06-26 12:40:38 UTC (rev 2442) @@ -1,85 +0,0 @@ -#'@title Probabilistic Sharpe Ratio -#' -#'@description -#'Given a predefined benchmark Sharpe ratio ,the observed Sharpe Ratio -#'can be expressed in probabilistic terms known as the Probabilistic Sharpe Ratio -#'PSR takes higher moments into account and delivers a corrected, atemporal -#'measure of performance expressed in terms of probability of skill. -#' -#'@aliases ProbSharpeRatio -#' -#'@param R the return series -#'@param Rf the risk free rate of return -#'@param refSR the reference Sharpe Ratio -#'@param the confidence level -#'@param weights the weights for the portfolio -#'@param sr Sharpe Ratio -#'@param sk Skewness -#'@param kr Kurtosis -#' -#'@references Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio -#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter -#' 2012/13 -#' -#'@keywords ts multivariate distribution models -#' -#'@examples -#' -#'data(edhec) -#'ProbSharpeRatio(edhec[,1],refSR = 0.28) -#'ProbSharpeRatio(edhec,reSR = 0.28,Rf = 0.06) - - -ProbSharpeRatio<- -function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,n = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ - columns = 1 - columnnames = NULL - #Error handling if R is not NULL - if(!is.null(R)){ - x = checkData(R) - columns = ncol(x) - n = nrow(x) - #Checking if the weights are provided or not - if(!is.null(weights)){ - if(length(weights)!=columns){ - stop("number of items in weights is not equal to the number of columns in R") - } - else{ - # A potfolio is constructed by applying the weights - x = Return.portfolio(R,weights) - sr = SharpeRatio(x, Rf, p, "StdDev") - sk = skewness(x) - kr = kurtosis(x) - } - } - else{ - sr = SharpeRatio(x, Rf, p, "StdDev") - sk = skewness(x) - kr = kurtosis(x) - } - - columnnames = colnames(x) - - } - # If R is passed as null checking for sharpe ratio , skewness and kurtosis - else{ - - if(is.null(sr) | is.null(sk) | is.null(kr) | is.null(n)){ - stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") - } - } - #If weights are not taken into account a message is displayed - if(is.null(weights)){ - message("no weights passed,will calculate Probability Sharpe Ratio for each column") - } - - if(!is.null(dim(Rf))) - Rf = checkData(Rf) - result = pnorm(((sr - refSR)*(n-1)^(0.5))/(1-sr*sk+sr^2*(kr-1)/4)^(0.5)) - if(!is.null(dim(result))){ - colnames(result) = columnnames - rownames(result) = paste("Probabilistic Sharpe Ratio(p=",round(p*100,1),"%):") - } - return(result) - -} Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R 2013-06-26 04:58:03 UTC (rev 2441) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R 2013-06-26 12:40:38 UTC (rev 2442) @@ -62,9 +62,9 @@ } } #If weights are not taken into account a message is displayed - if(is.null(weights)){ - message("no weights passed,will calculate Minimum Track Record Length for each column") - } + #if(is.null(weights)){ + # message("no weights passed,will calculate Minimum Track Record Length for each column") + #} if(!is.null(dim(Rf))) Rf = checkData(Rf) Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R 2013-06-26 04:58:03 UTC (rev 2441) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R 2013-06-26 12:40:38 UTC (rev 2442) @@ -69,9 +69,9 @@ } } #If weights are not taken into account a message is displayed - if(is.null(weights)){ - message("no weights passed,will calculate Probability Sharpe Ratio for each column") - } +# if(is.null(weights)){ + # message("no weights passed,will calculate Probability Sharpe Ratio for each column") + # } if(!is.null(dim(Rf))) Rf = checkData(Rf) Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R 2013-06-26 04:58:03 UTC (rev 2441) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.SharpeEfficient.R 2013-06-26 12:40:38 UTC (rev 2442) @@ -3,3 +3,17 @@ x = checkData(R) columns = ncol(x) com + permutations<-function (n, r, v = 1:n) + { + if (r == 1) + matrix(v, n, 1) + else if (n == 1) + matrix(v, 1, r) + else { + X <- NULL + for (i in 1:n) X <- rbind(X, cbind(v[i], fn_perm_list(n - + 1, r - 1, v[-i]))) + X + } + } + Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw 2013-06-26 04:58:03 UTC (rev 2441) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw 2013-06-26 12:40:38 UTC (rev 2442) @@ -37,15 +37,15 @@ <>= -source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R") +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/week1/code/ProbSharpeRatio.R") @ <>= -source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/MinTRL.R") +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/week1/code/MinTRL.R") @ <>= -source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/PSRopt.R") +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/week1/code/PSRopt.R") @ \section{Probabilistic Sharpe Ratio} @@ -88,7 +88,8 @@ where $\sigma = \sqrt{E[(r-\mu)^2]}$ ,its standard deviation.$\gamma_3=\frac{E\biggl[(r-\mu)^3\biggr]}{\sigma^3}$ its skewness,$\gamma_4=\frac{E\biggl[(r-\mu)^4\biggr]}{\sigma^4}$ its kurtosis and $SR = \frac{\mu}{\sigma}$ its Sharpe Ratio. Because $\hat{PSR}(SR^\ast)=Z[\hat{Z^\ast}]$ is a monotonic increasing function of -$\hat{Z^\ast}$. This optimal vector is invariant of the value adopted by the parameter $SR^\ast$. +$\hat{Z^\ast}$ ,it suffices to compute the vector that maximizes $\hat{Z^\ast}$ + This optimal vector is invariant of the value adopted by the parameter $SR^\ast$. <<>>= Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.pdf =================================================================== (Binary files differ) Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R 2013-06-26 12:40:38 UTC (rev 2442) @@ -0,0 +1,33 @@ +#'@title +#'Benchmark Sharpe Ratio +#' +#'@description +#'The benchmark SR is a linear function of the average +#' SR of the individual strategies, and a decreasing +#' convex function of the number of strategies and the +#' average pairwise correlation. The Returns are given as +#' the input with the benchmark Sharpe Ratio as the output. +#' +#'@aliases BenchmarkSR +#' +#'@param R a vector, matrix, data frame,timeseries or zoo object of asset returns +#' +#'@references +#' +#'@export +BenchmanrkSR<-function(R){ + + x = checkData(R) + columns = ncol(x) + SR = SharpeRatio(x) + sr_avg = mean(SR) + corr = table.Correlation(edhec,edhec) + corr_avg = 0 + for(i in 1:columns){ + for(j in i:columns){ + corr_avg = corr_avg + corr[(i-1)*columns+j,] + } + } + SR_Benchmark = sr_avg*sqrt(columns/(1+(columns-1))*corr_avg[1,1]) + return(SR_Benchmark) +} \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 26 16:54:06 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 16:54:06 +0200 (CEST) Subject: [Returnanalytics-commits] r2443 - in pkg/PerformanceAnalytics/sandbox/pulkit: week1/vignette week2/code Message-ID: <20130626145406.2E85B1851A9@r-forge.r-project.org> Author: pulkit Date: 2013-06-26 16:54:05 +0200 (Wed, 26 Jun 2013) New Revision: 2443 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R Log: Added vignette for Benchmark SR Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw 2013-06-26 12:40:38 UTC (rev 2442) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/vignette/ProbSharpe.Rnw 2013-06-26 14:54:05 UTC (rev 2443) @@ -33,6 +33,7 @@ <>= library(PerformanceAnalytics) +data(edhec) @ Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R 2013-06-26 12:40:38 UTC (rev 2442) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R 2013-06-26 14:54:05 UTC (rev 2443) @@ -13,21 +13,30 @@ #'@param R a vector, matrix, data frame,timeseries or zoo object of asset returns #' #'@references +#'Bailey, David H. and Lopez de Prado, Marcos, The Strategy Approval Decision: +#'A Sharpe Ratio Indifference Curve Approach (January 2013). Algorithmic Finance, +#'Vol. 2, No. 1 (2013). #' +#'@examples +#' +#'data(edhec) +#'BenchmarkSR(edhec) #expected 0.2019308 +#' #'@export +#' BenchmanrkSR<-function(R){ - x = checkData(R) columns = ncol(x) SR = SharpeRatio(x) sr_avg = mean(SR) corr = table.Correlation(edhec,edhec) corr_avg = 0 - for(i in 1:columns){ - for(j in i:columns){ + for(i in 1:(columns-1)){ + for(j in (i+1):columns){ corr_avg = corr_avg + corr[(i-1)*columns+j,] } } + corr_avg = corr_avg*2/(columns*(columns-1)) SR_Benchmark = sr_avg*sqrt(columns/(1+(columns-1))*corr_avg[1,1]) return(SR_Benchmark) } \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 26 17:05:42 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 17:05:42 +0200 (CEST) Subject: [Returnanalytics-commits] r2444 - in pkg/PerformanceAnalytics/sandbox/pulkit: . week1/code week2/vignette Message-ID: <20130626150542.B1403184AFD@r-forge.r-project.org> Author: pulkit Date: 2013-06-26 17:05:42 +0200 (Wed, 26 Jun 2013) New Revision: 2444 Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/table.PSR.R pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw Removed: pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R Log: vignette update Deleted: pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R 2013-06-26 14:54:05 UTC (rev 2443) +++ pkg/PerformanceAnalytics/sandbox/pulkit/chart.PSR.R 2013-06-26 15:05:42 UTC (rev 2444) @@ -1,35 +0,0 @@ -#'@title Probabilistic Sharpe Ratio -#'@description -#'Given a predefined -#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed -#' in probabilistic -#' -#'@param R the return series -#'@param Rf the risk free rate of return -#'@param refSR the reference Sharpe Ratio -#'@param the confidence level -#'@param weights the weights for the portfolio -chart.PSR<-function(x,Rf,refSR,p=0.95,...){ - for(column in 1:columns){ - column.probsharpe <- psr(x[,column],Rf,p,refSR) - column.mintrack <- mintrl(x[,column],Rf,p,refSR) - if(column == 1){ - probsharpe = column.probsharpe - mintrack = column.mintrack - } - else { - probsharpe = merge(probsharpe, column.probsharpe) - mintrack = merge(mintrack, column.mintrack) - } - - } - - probsharpe = rbind(probsharpe,mintrack) - - colnames(probsharpe) = columnnames - probsharpe = reclass(probsharpe, x) - rownames(probsharpe)=c("PSR","MinTRL") - return(probsharpe) - -} - Added: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/table.PSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/table.PSR.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/table.PSR.R 2013-06-26 15:05:42 UTC (rev 2444) @@ -0,0 +1,70 @@ +#'@title Probabilistic Sharpe Ratio +#' +#'@description +#'A table to display the Probabilistic Sharpe Ratio Along with +#'the Minimum Track Record Length for better assessment of the returns. +#' +#'@aliases table.PSR +#' +#'@param R the return series +#'@param Rf the risk free rate of return +#'@param refSR the reference Sharpe Ratio +#'@param the confidence level +#'@param weights the weights for the portfolio +#' +#'@reference Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio +#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter +#' 2012/13 +#'@keywords ts multivariate distribution models +#'@examples +#' +#'data(edhec) +#'table.PSR(edhec[,1],0.20) +#' +table.PSR<-function(R=NULL,refSR,Rf=0,p=0.95,weights = NULL,...){ + + if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + n = nrow(x) + #Checking if the weights are provided or not + if(!is.null(weights)){ + if(length(weights)!=columns){ + stop("number of items in weights is not equal to the number of columns in R") + } + else{ + # A potfolio is constructed by applying the weights + x = Return.portfolio(R,weights) + } + } + + } + else{ + stop("Returns series not provided") + } + + columnnames = colnames(x) + + for(column in 1:columns){ + column.probsharpe <- ProbSharpeRatio(x[,column],refSR,Rf,p,weights) + column.mintrack <- MinTrackRecord(x[,column],refSR,Rf,p,weights) + if(column == 1){ + probsharpe = column.probsharpe + mintrack = column.mintrack + } + else { + probsharpe = merge(probsharpe, column.probsharpe) + mintrack = merge(mintrack, column.mintrack) + } + + } + + probsharpe = rbind(probsharpe,mintrack) + + colnames(probsharpe) = columnnames + probsharpe = reclass(probsharpe, x) + rownames(probsharpe)=c("PSR","MinTRL") + return(probsharpe) + +} + Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw 2013-06-26 15:05:42 UTC (rev 2444) @@ -0,0 +1,60 @@ +\documentclass[12pt,letterpaper,english]{article} +\usepackage{times} +\usepackage[T1]{fontenc} +\IfFileExists{url.sty}{\usepackage{url}} + {\newcommand{\url}{\texttt}} + +\usepackage{babel} +\usepackage{Rd} + +\usepackage{Sweave} +\SweaveOpts{engine=R,eps = FALSE} +%\VignetteIndexEntry{Sharpe Ratio Indifference Curve} +%\VignetteDepends{PerformanceAnalytics} +%\VignetteKeywords{Benchmark Sharpe Ratio,Sharpe Ratio Indifference Curve,Benchmark Sharpe Ratio Plots} +%\VignettePackage{PerformanceAnalytics} + +\begin{document} +\SweaveOpts{concordance=TRUE} + +\title{Sharpe Ratio Indifference Curve} +% \keywords{Sharpe Ratio Indifference Curve,Benchmark Sharpe Ratio,risk,benchmark,portfolio} + +\makeatletter +\makeatother +\maketitle + +\begin{abstract} + + This vignette gives an overview of the Benchmark Sharpe Ratio, Sharpe Ratio Indifference Curve and various plots associated with a Benchmark Sharpe Ratio.It gives an overview of the usability of the functions and its application. + + \end{abstract} + +<>= +library(PerformanceAnalytics) +data(edhec) +@ + +<>= +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R") +@ + + \section{Benchmark Sharpe Ratio} + + The performance of an Equal Volatility Weights benchmark ($SR_B$) is fully characterized in terms of: + +1. Number of approved strategies (S). +2. Average SR among strategies (SR). +3. Average off-diagonal correlations among strategies($\bar{\rho}$)). + +The benchmark SR is a linear function of the average SR of the individual strategies, and a decreasing convex function of the number of strategies and the average pairwise correlation. + +The benchmark Sharpe Ratio is given by the following equation. + +\deqn{SR_B = \bar{SR}\sqrt{\frac{S}{1+(S-1)\bar{\rho}}}} + +<<>>= +BenchmanrkSR(edhec) +@ + +\end{document} \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 26 18:59:14 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 18:59:14 +0200 (CEST) Subject: [Returnanalytics-commits] r2445 - in pkg/PerformanceAnalytics/sandbox/pulkit: . week1/code week2/code week2/tests Message-ID: <20130626165914.5676F184F87@r-forge.r-project.org> Author: pulkit Date: 2013-06-26 18:59:14 +0200 (Wed, 26 Jun 2013) New Revision: 2445 Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/tests/test_SharpeIndifference.R Removed: pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R Log: Added unit tests for Benchmark SR Deleted: pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R 2013-06-26 15:05:42 UTC (rev 2444) +++ pkg/PerformanceAnalytics/sandbox/pulkit/MinTRL.R 2013-06-26 16:59:14 UTC (rev 2445) @@ -1,78 +0,0 @@ -#'@title Minimum Track Record Length -#' -#'@description -#'?How long should a track record be in order to have statistical confidence -#'that its Sharpe ratio is above a given threshold? . if a track record is shorter#' than MinTRL, we do not have enough confidence that the observed ? is above the designated threshold -#' -#'@aliases MinTrackRecord -#' -#'@param R the return series -#'@param Rf the risk free rate of return -#'@param refSR the reference Sharpe Ratio -#'@param p the confidence level -#'@param weights the weights for the portfolio -#'@param sr Sharpe Ratio -#'@param sk Skewness -#'@param kr Kurtosis -#' -#'@reference Bailey, David H. and Lopez de Prado, Marcos, \emph{The Sharpe Ratio -#'Efficient Frontier} (July 1, 2012). Journal of Risk, Vol. 15, No. 2, Winter -#' 2012/13 -#'@keywords ts multivariate distribution models -#'@examples -#' -#'data(edhec) -#'MinTrackRecord(edhec[,1],0.20) - - -MinTrackRecord<-function(R = NULL, refSR,Rf=0,p = 0.95, weights = NULL,sr = NULL,sk = NULL, kr = NULL, ...){ - columns = 1 - columnnames = NULL - #Error handling if R is not NULL - if(!is.null(R)){ - x = checkData(R) - columns = ncol(x) - n = nrow(x) - #Checking if the weights are provided or not - if(!is.null(weights)){ - if(length(weights)!=columns){ - stop("number of items in weights is not equal to the number of columns in R") - } - else{ - # A potfolio is constructed by applying the weights - x = Return.portfolio(R,weights) - sr = SharpeRatio(x, Rf, p, "StdDev") - sk = skewness(x) - kr = kurtosis(x) - } - } - else{ - sr = SharpeRatio(x, Rf, p, "StdDev") - sk = skewness(x) - kr = kurtosis(x) - } - - columnnames = colnames(x) - - } - # If R is passed as null checking for sharpe ratio , skewness and kurtosis - else{ - if(is.null(sr) | is.null(sk) | is.null(kr)){ - stop("You must either pass R or the Sharpe ratio, Skewness, Kurtosis,n etc") - } - } - #If weights are not taken into account a message is displayed - if(is.null(weights)){ - message("no weights passed,will calculate Probability Sharpe Ratio for each column") - } - - if(!is.null(dim(Rf))) - Rf = checkData(Rf) - result = 1 + (1 - sk*sr + ((kr-1)/4)*sr^2)*(qnorm(p)/(sr-refSR))^2 - if(!is.null(dim(result))){ - colnames(result) = columnnames - rownames(result) = paste("Minimum Track Record Length(p=",round(p*100,1),"%):") - } - return(result) -} - Deleted: pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-26 15:05:42 UTC (rev 2444) +++ pkg/PerformanceAnalytics/sandbox/pulkit/PSRopt.R 2013-06-26 16:59:14 UTC (rev 2445) @@ -1,162 +0,0 @@ -#'@title Implementation of PSR Portfolio Optimization -#'@description -#'Maximizing for PSR leads to better diversified and more balanced hedge fund allocations compared to the concentrated outcomes of Sharpe ratio maximization.We would like to find the vector of weights that maximize the expression.Gradient Ascent Logic is used to compute the weights using the Function PsrPortfolio -#'@param R The return series -#'@param bounds The bounds for the weights -#'@param MaxIter The Maximum number of iterations -#'@param delta The value of delta Z - -PsrPortfolio<-function(R,bounds=NULL,MaxIter = 1000,delta = 0.005){ - - x = checkData(R) - columns = ncol(x) - n = nrow(x) - columnnames = colnames(x) - - - if(is.null(bounds)){ - bounds = matrix(rep(c(0,1),columns),nrow = columns,byrow = TRUE) - } - - #Optimization Function - optimize<-function(){ - weights = rep(1,columns)/columns - d1z = 0 - z = 0 - iter = 0 - mean = NULL - for(column in 1:columns){ - mean = c(mean,mean(x[,column])) - } - flag = TRUE - while(flag){ - if(iter == MaxIter) break - dZ = get_d1Zs(mean,weights) - if(dZ$z bounds[i,1]) flag = FALSE - } - return(TRUE) - } - - #Calculate the step size to change the weights - stepSize<-function(weights,d1Z){ - if(length(which(d1Z!=0)) == 0){ - return(NULL) - } - weights[which(abs(d1Z)==max(abs(d1Z)))] = weights[which(abs(d1Z)==max(abs(d1Z)))]+delta/d1Z[which(abs(d1Z)==max(abs(d1Z)))] - # OR all the weights should be changed ? - #weights = weights + delta/d1Z - weights = weights/sum(weights) - return(weights) - - } - #To get the first differentials - get_d1Zs<-function(mean,weights){ - d1Z = NULL - m = NULL - x_portfolio = Return.portfolio(x,weights) - mu = mean(x_portfolio) - sd = StdDev(x_portfolio) - sk = skewness(x_portfolio) - kr = kurtosis(x_portfolio) - stats = c(mu,sd,sk,kr) - m = c(stats[1],stats[2]^2,stats[3]*(stats[2]^3),stats[4]*(stats[2]^2)) - SR = get_SR(stats,n) - meanSR = SR$meanSR - sigmaSR = SR$sigmaSR - for(i in 1:columns){ - d1Z = c(d1Z,get_d1Z(stats,m,meanSR,sigmaSR,mean,weights,i)) - } - dZ = list("d1Z"=d1Z,"z"=meanSR/sigmaSR) - - return(dZ) - } - - get_d1Z<-function(stats,m,meanSR,sigmaSR,mean,weights,index){ - d1Mu = get_d1Mu(mean,index) - d1Sigma = get_d1Sigma(stats[2],mean,weights,index) - d1Skew = get_d1Skew(d1Sigma,stats[2],mean,weights,index,m[2]) - d1Kurt = get_d1Kurt(d1Sigma,stats[2],mean,weights,index,m[3]) - d1meanSR = (d1Mu*stats[2]-d1Sigma*stats[1])/stats[2]^2 - d1sigmaSR = (d1Kurt * meanSR^2+2*meanSR*d1meanSR*(stats[4]-1))/4 - d1sigmaSR = d1sigmaSR - d1Skew*meanSR+d1meanSR*stats[3] - d1sigmaSR = (d1sigmaSR/2)*sigmaSR*(n-1) - d1Z = (d1meanSR*sigmaSR-d1sigmaSR*meanSR)/sigmaSR^2 - return(d1Z) - } - - get_d1Mu<-function(mean,index){ - return(mean[index]) - } - - get_d1Sigma<-function(sigma,mean,weights,index){ - return(get_dnMoments(mean,weights,2,1,index)/(2*sigma)) - } - - get_d1Skew<-function(d1Sigma,sigma,mean,weights,index,m3){ - d1Skew = get_dnMoments(mean,weights,3,1,index)*sigma^3 - d1Skew = d1Skew - 3*sigma^2*d1Sigma*m3 - d1Skew = d1Skew/sigma^6 - return(d1Skew) - } - - get_d1Kurt<-function(d1Sigma,sigma,mean,weights,index,m4){ - d1Kurt = get_dnMoments(mean,weights,4,1,index)*sigma^4 - d1Kurt = d1Kurt - 4*sigma^3*d1Sigma*m4 - d1Kurt = d1Kurt/sigma^8 - return(d1Kurt) - } - - get_dnMoments<-function(mean,weights,mOrder,dOrder,index){ - sum = 0 - x0 = 1 - for(i in 1:dOrder){ - x0 = x0*(mOrder-i) - } - x_mat = as.matrix(na.omit(x)) - for(i in 1:n){ - x1 = 0 - x2 = (x_mat[i,index]-mean[index])^dOrder - for(j in 1:columns){ - x1 = x1 + weights[j]*(x_mat[i,j]-mean[j]) - } - sum = sum + x2*x1^(mOrder-dOrder) - } - return(x0*sum/n) - } - - # TO get meanSR and sigmaSR - get_SR<-function(stats,n){ - meanSR = stats[1]/stats[2] - sigmaSR = ((1-meanSR*stats[3]+(meanSR^2)*(stats[4]-1)/4)/(n-1))^0.5 - SR<-list("meanSR"=meanSR,"sigmaSR"=sigmaSR) - return(SR) - } - -weights = optimize() - result = matrix(weights,nrow = columns) - rownames(result) = columnnames -return(result) -} - - - - - Deleted: pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw 2013-06-26 15:05:42 UTC (rev 2444) +++ pkg/PerformanceAnalytics/sandbox/pulkit/ProbSharpe.Rnw 2013-06-26 16:59:14 UTC (rev 2445) @@ -1,91 +0,0 @@ -\documentclass[12pt,letterpaper,english]{article} -\usepackage{times} -\usepackage[T1]{fontenc} -\IfFileExists{url.sty}{\usepackage{url}} - {\newcommand{\url}{\texttt}} - -\usepackage{babel} -\usepackage{Rd} - -\usepackage{Sweave} -\SweaveOpts{engine=R,eps = FALSE} -%\VignetteIndexEntry{Probabilistic Sharpe Ratio} -%\VignetteDepends{PerformanceAnalytics} -%\VignetteKeywords{Probabilistic Sharpe Ratio,Minimum Track Record Length,risk,benchmark,portfolio} -%\VignettePackage{PerformanceAnalytics} - -\begin{document} -\SweaveOpts{concordance=TRUE} - -\title{ Probabilistic Sharpe Ratio } - -% \keywords{Probabilistic Sharpe Ratio,Minimum Track Record Length,risk,benchmark,portfolio} - -\makeatletter -\makeatother -\maketitle - -\begin{abstract} - - This vignette gives an overview of the Probabilistic Sharpe Ratio , Minimum Track Record Length and the Probabilistic Sharpe Ratio Optimization technique used to find the optimal portfolio that maximizes the Probabilistic Sharpe Ratio. It gives an overview of the usability of the functions and its application" - -\end{abstract} - -<>= -library(PerformanceAnalytics) -@ - - -<>= -source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/ProbSharpeRatio.R") -@ - -<>= -source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/MinTRL.R") -@ - -\section{Probabilistic Sharpe Ratio} - Given a predefined benchmark Sharpe ratio $SR^\ast$ , the observed Sharpe ratio $\hat{SR}$ can be expressed in probabilistic terms as - - \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma_3}SR^\ast + \frac{\hat{\gamma_4}-1}{4}\hat{SR^2}}}\biggr]} - - Here $n$ is the track record length or the number of data points. It can be daily,weekly or yearly depending on the input given - - $\hat{\gamma{_3}}$ and $\hat{\gamma{_4}}$ are the skewness and kurtosis respectively. - It is not unusual to find strategies with irregular trading frequencies, such as weekly strategies that may not trade for a month. This poses a problem when computing an annualized Sharpe ratio, and there is no consensus as how skill should be measured in the context of irregular bets. Because PSR measures skill in probabilistic terms, it is invariant to calendar conventions. All calculations are done in the original frequency -of the data, and there is no annualization. - -<<>>= -data(edhec) -ProbSharpeRatio(edhec,refSR = 0.28) -@ - -\section{Minimum Track Record Length} - -If a track record is shorter than Minimum Track Record Length(MinTRL), we do -not have enough confidence that the observed $\hat{SR}$ is above the designated threshold -$SR^\ast$. Minimum Track Record Length is given by the following expression. - -\deqn{MinTRL = n^\ast = 1+\biggl[1-\hat{\gamma_3}\hat{SR}+\frac{\hat{\gamma_4}}{4}\hat{SR^2}\biggr]\biggl(\frac{Z_\alpha}{\hat{SR}-SR^\ast}\biggr)^2} - -$\gamma{_3}$ and $\gamma{_4}$ are the skewness and kurtosis respectively. It is important to note that MinTRL is expressed in terms of number of observations, not annual or calendar terms. - -<<>>= -data(edhec) -MinTrackRecord(edhec,refSR = 0.28) -@ - -\section{Probabilistic Sharpe Ratio Optimal Portfolio} - -We would like to find the vector of weights that maximize the expression - - \deqn{\hat{PSR}(SR^\ast) = Z\biggl[\frac{(\hat{SR}-SR^\ast)\sqrt{n-1}}{\sqrt{1-\hat{\gamma_3}SR^\ast + \frac{\hat{\gamma_4}-1}{4}\hat{SR^2}}}\biggr]} - -where -\eqn{\sigma = \sqrt{E[(r-\mu)^2]}} ,its standard deviation. -\eqn{\gamma_3=\frac{E\biggl[(r-\mu)^3\biggr]}{\sigma^3}} its skewness,\eqn{\gamma_4=\frac{E\biggl[(r-\mu)^4\biggr]}{\sigma^4}} its kurtosis and \eqn{SR = \frac{\mu}{\sigma}} its Sharpe Ratio. - -Because \eqn{\hat{PSR}(SR^\ast)=Z[\hat{Z^\ast}]} is a monotonic increasing function of \eqn{\hat{Z^\ast}}. This optimal vector is invariant of the value adopted by the parameter \eqn{SR^\ast}. - -\end{document}a - Deleted: pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R 2013-06-26 15:05:42 UTC (rev 2444) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week1/code/chart.PSR.R 2013-06-26 16:59:14 UTC (rev 2445) @@ -1,35 +0,0 @@ -#'@title Probabilistic Sharpe Ratio -#'@description -#'Given a predefined -#'benchmark4 Sharpe ratio (), the observed Sharpe Ratio? can be expressed -#' in probabilistic -#' -#'@param R the return series -#'@param Rf the risk free rate of return -#'@param refSR the reference Sharpe Ratio -#'@param the confidence level -#'@param weights the weights for the portfolio -chart.PSR<-function(x,Rf,refSR,p=0.95,...){ - for(column in 1:columns){ - column.probsharpe <- psr(x[,column],Rf,p,refSR) - column.mintrack <- mintrl(x[,column],Rf,p,refSR) - if(column == 1){ - probsharpe = column.probsharpe - mintrack = column.mintrack - } - else { - probsharpe = merge(probsharpe, column.probsharpe) - mintrack = merge(mintrack, column.mintrack) - } - - } - - probsharpe = rbind(probsharpe,mintrack) - - colnames(probsharpe) = columnnames - probsharpe = reclass(probsharpe, x) - rownames(probsharpe)=c("PSR","MinTRL") - return(probsharpe) - -} - Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R 2013-06-26 15:05:42 UTC (rev 2444) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R 2013-06-26 16:59:14 UTC (rev 2445) @@ -9,6 +9,7 @@ #' the input with the benchmark Sharpe Ratio as the output. #' #'@aliases BenchmarkSR +#'\deqn{SR_B = \bar{SR}\sqrt{\frac{S}{1+(S-1)\bar{\rho}}}} #' #'@param R a vector, matrix, data frame,timeseries or zoo object of asset returns #' Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/tests/test_SharpeIndifference.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/tests/test_SharpeIndifference.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/tests/test_SharpeIndifference.R 2013-06-26 16:59:14 UTC (rev 2445) @@ -0,0 +1,9 @@ +library(RUnit) +library(PerformanceAnalytics) +data(edhec) + +test_BenchmarkSR<-function(){ + + checkEqualsNumeric(BenchmanrkSR(edhec),0.170288,tolerance = 1.0e-6) + +} \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 26 19:34:40 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 19:34:40 +0200 (CEST) Subject: [Returnanalytics-commits] r2446 - in pkg/Meucci: . R demo man Message-ID: <20130626173440.9EF5418573F@r-forge.r-project.org> Author: xavierv Date: 2013-06-26 19:34:40 +0200 (Wed, 26 Jun 2013) New Revision: 2446 Added: pkg/Meucci/R/TwoDimEllipsoid.R pkg/Meucci/demo/S_WishartLocationDispersion.R pkg/Meucci/man/TwoDimEllipsoid.Rd Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE Log: - added S_WishartLocationDispersion demo file and TwoDimEllipsoid plotting function Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-26 16:59:14 UTC (rev 2445) +++ pkg/Meucci/DESCRIPTION 2013-06-26 17:34:40 UTC (rev 2446) @@ -69,3 +69,4 @@ 'StudentTCopulaPdf.R' 'ConvertChangeInYield2Price.R' 'ProjectionStudentT.R' + 'TwoDimEllipsoid.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-26 16:59:14 UTC (rev 2445) +++ pkg/Meucci/NAMESPACE 2013-06-26 17:34:40 UTC (rev 2446) @@ -33,3 +33,4 @@ export(subIntervals) export(SummStats) export(Tweak) +export(TwoDimEllipsoid) Added: pkg/Meucci/R/TwoDimEllipsoid.R =================================================================== --- pkg/Meucci/R/TwoDimEllipsoid.R (rev 0) +++ pkg/Meucci/R/TwoDimEllipsoid.R 2013-06-26 17:34:40 UTC (rev 2446) @@ -0,0 +1,98 @@ +#' This script computes the location-dispersion ellipsoid of the normalized (unit variance, zero expectation) +#' first diagonal and off-diagonal elements of a 2x2 Wishart distribution as a function of the inputs, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @param Location : [vector] (2 x 1) location vector (typically the expected value +#' @param Square_Dispersion : [matrix] (2 x 2) scatter matrix Square_Dispersion (typically the covariance matrix) +#' @param Scale : [scalar] a scalar Scale, that specifies the scale (radius) of the ellipsoid +#' @param PlotEigVectors : [boolean] true then the eigenvectors (=principal axes) are plotted +#' @param PlotSquare : [boolean] true then the enshrouding box is plotted. If Square_Dispersion is the covariance +#' +#' @return E : [figure handle] +#' +#' @references +#' \url{http://} +#' See Meucci's script for "TwoDimEllipsoid.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + + +TwoDimEllipsoid = function( Location, Square_Dispersion, Scale = 1, PlotEigVectors = FALSE, PlotSquare = FALSE ) +{ + + ########################################################################################################## + ### compute the ellipsoid in the r plane, solution to ((R-Location)' * Dispersion^-1 * (R-Location) ) = Scale^2 + + Eigen = eigen(Square_Dispersion); + Centered_Ellipse = c(); + Angle = seq( 0, 2*pi, pi/500 ); + NumSteps = length(Angle); + + for( i in 1 : NumSteps ) + { + # normalized variables (parametric representation of the ellipsoid) + y = c( cos( Angle[ i ] ), sin( Angle[ i ] ) ); + Centered_Ellipse = c( Centered_Ellipse, Eigen$vectors %*% diag(sqrt(Eigen$values)) %*% y ); ##ok + } + + R = Location %*% array( 1, NumSteps ) + Scale * Centered_Ellipse; + + ########################################################################################################## + ### Plot the ellipsoid + + E = lines( R[1, ], R[2, ], col = "red", lwd = 2 ); + + ########################################################################################################## + ### Plot a rectangle centered in Location with semisides of lengths Dispersion[ 1] and Dispersion[ 2 ], respectively + + if( PlotSquare ) + { + Dispersion = sqrt( diag( Square_Dispersion ) ); + Vertex_LowRight_A = Location[ 1 ] + Scale * Dispersion[ 1 ]; + Vertex_LowRight_B = Location[ 2 ] - Scale * Dispersion[ 2 ]; + Vertex_LowLeft_A = Location[ 1 ] - Scale * Dispersion[ 1 ]; + Vertex_LowLeft_B = Location[ 2 ] - Scale * Dispersion[ 2 ]; + Vertex_UpRight_A = Location[ 1 ] + Scale * Dispersion[ 1 ]; + Vertex_UpRight_B = Location[ 2 ] + Scale * Dispersion[ 2 ]; + Vertex_UpLeft_A = Location[ 1 ] - Scale * Dispersion[ 1 ]; + Vertex_UpLeft_B = Location[ 2 ] + Scale * Dispersion[ 2 ]; + + Square = rbind( c( Vertex_LowRight_A, Vertex_LowRight_B ), + c( Vertex_LowLeft_A, Vertex_LowLeft_B ), + c( Vertex_UpLeft_A, Vertex_UpLeft_B ), + c( Vertex_UpRight_A, Vertex_UpRight_B ), + c( Vertex_LowRight_A, Vertex_LowRight_B ) ); + + h = lines(Square[ , 1 ], Square[ , 2 ], col = "red", lwd = 2 ); + + } + + ########################################################################################################## + ### Plot eigenvectors in the r plane (centered in Location) of length the square root of the eigenvalues (rescaled) + if( PlotEigVectors ) + { + L_1 = Scale * sqrt( Eigen$values[ 1 ] ); + L_2 = Scale * sqrt( Eigen$values[ 2 ] ); + + # deal with reflection: matlab chooses the wrong one + Sign = sign( Eigen$vectors[ 1, 1 ] ); + + # eigenvector 1 + Start_A = Location[ 1 ]; + End_A = Location[ 1 ] + Sign * (Eigen$vectors[ 1, 1 ]) * L_1; + Start_B = Location[ 2 ]; + End_B = Location[ 2 ] + Sign * (Eigen$vectors[ 1, 2 ]) * L_1; + + h = lines( c( Start_A, End_A ), c( Start_B, End_B ), col = "red", lwd = 2 ); + + # eigenvector 2 + Start_A = Location[ 1 ]; + End_A = Location[ 1 ] + ( Eigen$vectors[ 2, 1 ] * L_2); + Start_B = Location[ 2 ]; + End_B = Location[ 2 ] + ( Eigen$vectors[ 2, 2 ] * L_2); + + h = lines( c( Start_A, End_A ), c( Start_B, End_B ), col = "red", lwd = 2 ); + + } +} \ No newline at end of file Added: pkg/Meucci/demo/S_WishartLocationDispersion.R =================================================================== --- pkg/Meucci/demo/S_WishartLocationDispersion.R (rev 0) +++ pkg/Meucci/demo/S_WishartLocationDispersion.R 2013-06-26 17:34:40 UTC (rev 2446) @@ -0,0 +1,87 @@ +library(mvtnorm); +library(psych); + +#' This script computes the location-dispersion ellipsoid of the normalized (unit variance, zero expectation) +#' first diagonal and off-diagonal elements of a 2x2 Wishart distribution as a function of the inputs, +#' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_WishartCorrelation.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################### +### Set input parameters +s = c( 1, 1 ); # variances +r = -0.9; # correlation +Sigma = diag( s ) %*% rbind( c( 1, r ), c( r, 1 ) ) %*% diag( s ); +nu = 5; # degrees of freedom +nSim = 10000; + +################################################################################################################### +### Set input parameters + +W_xx = matrix( NaN, nSim, 1 ); +W_yy = matrix( NaN, nSim, 1 ); +W_xy = matrix( NaN, nSim, 1 ); +Vec_W = matrix( NaN, nSim, 4 ); +Dets = matrix( NaN, nSim, 1 ); +Traces = matrix( NaN, nSim, 1 ); + + +for( j in 1 : nSim ) +{ + X = rmvnorm( nu, matrix( 0, 2, 1 ), Sigma); + W = t( X ) %*% X; + + Dets[ j ] = det( W ); + Traces[ j ] = tr( W ); + + W_xx[ j ] = W[ 1, 1 ]; + W_yy[ j ] = W[ 2, 2 ]; + W_xy[ j ] = W[ 1, 2 ]; + + Vec_W [ j, ] = as.vector( W ); +} + +# compute expected values of W_xx and W_xy, see (2.227) in "Risk and Asset Allocation - Springer +E_xx = nu * Sigma[ 1, 1 ]; +E_xy = nu * Sigma[ 1, 2 ]; + +# compute covariance matrix of W_xx and W_xy, see (2.228) in "Risk and Asset Allocation - Springer +m = 1; +n = 1; +p = 1; +q = 1; +var_Wxx = nu * ( Sigma[ m, p ] * Sigma[ n, q ] + Sigma[ m, q ] * Sigma[ n, p ] ); +m = 1; +n = 2; +p = 1; +q = 2; +var_Wxy = nu * ( Sigma[ m, p ] * Sigma[ n, q ] + Sigma[ m, q ] * Sigma[ n, p ] ); +m = 1; +n = 1; +p = 1; +q = 2; +cov_Wxx_Wxy = nu * ( Sigma[ m, p ] * Sigma[ n, q ] + Sigma[ m, q ] * Sigma[ n, p ] ); + +S_xx_xy = rbind( cbind( var_Wxx, cov_Wxx_Wxy ), cbind( cov_Wxx_Wxy, var_Wxy )); + +# compute X_1 and X_2, i.e. normalized version of W_xx and W_xy +X_1 = ( W_xx - E_xx ) / sqrt( var_Wxx ); +X_2 = ( W_xy - E_xy ) / sqrt( var_Wxy ); +X = cbind( X_1, X_2 ); + +# compute expected value and covariance of X_1 and X_2 +E = rbind( 0, 0 ); +E_hat = t( apply( X, 2, mean) ); + +S = diag( 1 / c( sqrt( var_Wxx ), sqrt( var_Wxy ))) %*% S_xx_xy %*% diag( 1 / c( sqrt( var_Wxx ), sqrt( var_Wxy ))); +S_hat = cov( X ); + +figure(); +plot( X_1, X_2, xlab = "X_1", ylab = "X_2"); + +TwoDimEllipsoid(E, S, 1, TRUE, FALSE); Added: pkg/Meucci/man/TwoDimEllipsoid.Rd =================================================================== --- pkg/Meucci/man/TwoDimEllipsoid.Rd (rev 0) +++ pkg/Meucci/man/TwoDimEllipsoid.Rd 2013-06-26 17:34:40 UTC (rev 2446) @@ -0,0 +1,44 @@ +\name{TwoDimEllipsoid} +\alias{TwoDimEllipsoid} +\title{This script computes the location-dispersion ellipsoid of the normalized (unit variance, zero expectation) +first diagonal and off-diagonal elements of a 2x2 Wishart distribution as a function of the inputs, +as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005, Chapter 2.} +\usage{ + TwoDimEllipsoid(Location, Square_Dispersion, Scale = 1, + PlotEigVectors = FALSE, PlotSquare = FALSE) +} +\arguments{ + \item{Location}{: [vector] (2 x 1) location vector + (typically the expected value} + + \item{Square_Dispersion}{: [matrix] (2 x 2) scatter + matrix Square_Dispersion (typically the covariance + matrix)} + + \item{Scale}{: [scalar] a scalar Scale, that specifies + the scale (radius) of the ellipsoid} + + \item{PlotEigVectors}{: [boolean] true then the + eigenvectors (=principal axes) are plotted} + + \item{PlotSquare}{: [boolean] true then the enshrouding + box is plotted. If Square_Dispersion is the covariance} +} +\value{ + E : [figure handle] +} +\description{ + This script computes the location-dispersion ellipsoid of + the normalized (unit variance, zero expectation) first + diagonal and off-diagonal elements of a 2x2 Wishart + distribution as a function of the inputs, as described in + A. Meucci, "Risk and Asset Allocation", Springer, 2005, + Chapter 2. +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See Meucci's script for "TwoDimEllipsoid.m" +} + From noreply at r-forge.r-project.org Wed Jun 26 19:37:39 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 19:37:39 +0200 (CEST) Subject: [Returnanalytics-commits] r2447 - pkg/Meucci Message-ID: <20130626173739.757DA1854F2@r-forge.r-project.org> Author: braverock Date: 2013-06-26 19:37:39 +0200 (Wed, 26 Jun 2013) New Revision: 2447 Added: pkg/Meucci/THANKS Modified: pkg/Meucci/DESCRIPTION Log: - bump version, add THANKS file Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-26 17:34:40 UTC (rev 2446) +++ pkg/Meucci/DESCRIPTION 2013-06-26 17:37:39 UTC (rev 2447) @@ -2,9 +2,9 @@ Type: Package Title: Collection of functionality ported from the MATLAB code of Attilio Meucci. -Version: 0.2.1 +Version: 0.2.2 Date: $Date: 2012-06-06 15:18:48 -0500 (Wed, 06 Jun 2012) $ -Author: Ram Ahluwalia, Manan Shah +Author: Ram Ahluwalia, Manan Shah, Xavier Vals Maintainer: Brian G. Peterson Description: Attilio Meucci is a thought leader in advanced risk and portfolio management. His innovations include Entropy Pooling (technique for fully @@ -17,9 +17,9 @@ regularly posts code along with his working papers. Unfortunately for those of us using R, he prefers to code in Matlab. Some of that code requires Matlab's additional Optimization Toolkit. This package is the - result of a Google Summer of Code project in 2012 that sought to convert a - subset of his Matlab code to R to make it more widely assessible to R - users. All of Meucci's original MATLAB source is available on + result of a Google Summer of Code project in 2012 and 2013 that seeks + to convert a subset of his Matlab code to R to make it more widely + accessible to R users. All of Meucci's original MATLAB source is available on www.symmys.com. That code should be considered the reference code that this package seeks to port to R. This package remains under development (and likely will as long as Attilio keeps publishing code), and Added: pkg/Meucci/THANKS =================================================================== --- pkg/Meucci/THANKS (rev 0) +++ pkg/Meucci/THANKS 2013-06-26 17:37:39 UTC (rev 2447) @@ -0,0 +1,8 @@ +This package would not be possible without the work by Attilio Meucci not only +to do the research, but to publish his code in an open format for others to use. + +Attilio has very graciously collaborated with the R port of his code, and supported +the work and understanding that goes with any port. It is our hope that this +package will be incorporated into later published versions of his work, so that +language choice is not a restricting factor in using his research for teaching or +live portfolios. \ No newline at end of file From noreply at r-forge.r-project.org Wed Jun 26 20:38:20 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 20:38:20 +0200 (CEST) Subject: [Returnanalytics-commits] r2448 - in pkg/FactorAnalytics: R man Message-ID: <20130626183820.576F81858A7@r-forge.r-project.org> Author: chenyian Date: 2013-06-26 20:38:19 +0200 (Wed, 26 Jun 2013) New Revision: 2448 Modified: pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd Log: Modified: pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r 2013-06-26 17:37:39 UTC (rev 2447) +++ pkg/FactorAnalytics/R/predict.FundamentalFactorModel.r 2013-06-26 18:38:19 UTC (rev 2448) @@ -1,12 +1,19 @@ #' predict method for FundamentalFactorModel object #' #' Generic function of predict method for fitFundamentalFactorModel. +#' +#' newdata must be data.frame and contians date variable, asset variable and exact +#' exposures names that are used in fit object by \code{fitFundamentalFactorModel} #' #' @param fit "FundamentalFactorModel" object +#' @param newdata An optional data frame in which to look for variables with which to predict. +#' If omitted, the fitted values are used. +#' @param new.assetvar specify new asset variable in newdata if newdata is provided. +#' @param new.datevar speficy new date variable in newdata if newdata is provided. #' @export #' @author Yi-An Chen #' -predict.FundamentalFactorModel <- function(fit,newdata){ +predict.FundamentalFactorModel <- function(fit,newdata,new.assetvar,new.datevar){ # if there is no newdata provided # calculate fitted values @@ -21,37 +28,46 @@ f <- fit$factors # T X 3 exposure.names <- colnames(f)[-1] + + predictor <- function(data,datevar,assetvar) { + beta.all <- data[,c(datevar,assetvar,exposure.names)] # (N * T ) X 4 + names(beta.all)[1:2] <- c("time","assets.names") - if (missing(newdata) || is.null(newdata)) { - # ### calculated fitted values - # - + fitted <- rep(NA,numAssets) for (i in 1:numTimePoints) { - beta <- subset(beta.all, DATE == index(f)[i])[,exposure.names] - beta <- as.matrix(cbind(rep(1,numAssets),beta)) - fit.tmp <- beta %*% t(f[i,]) - fitted <- rbind(fitted,t(fit.tmp)) + beta <- subset(beta.all, time == index(f)[i] & assets.names == assets)[,exposure.names] + beta <- as.matrix(cbind(rep(1,numAssets),beta)) + fit.tmp <- beta %*% t(f[i,]) + fitted <- rbind(fitted,t(fit.tmp)) } fitted <- fitted[-1,] colnames(fitted) <- assets + return(fitted) + } + if (missing(newdata) || is.null(newdata)) { + ans <- predictor(fit$data,datevar,assetvar) } # predict returns by newdata if (!missing(newdata) && !is.null(newdata)) { - # check if newdata has the same data points as beta - if (dim(newdata) != c(numAssets*numTimePoints,numExposures)) { - stop("Dimension of newdata has to match mAssets*numTimePoints,numExposures") + # check if newdata has the same datevar and assetvar + if (class(newdata) != "data.frame"){ + stop("newdata has to be data.frame.") + } else if ( length(setdiff(unique(newdata$new.assetvar),assets))!= 0 ){ + stop("newAssetvar must have the same assets as assetvar") + } # check if newdata has the same data points as beta +else if (dim(newdata)[1] != numAssets*numTimePoints ) { + stop("length of newdata has to match numAssets*numTimePoints") + } else if( length(setdiff(intersect(names(newdata),exposure.names),exposure.names))!=0 ) { + stop("newdata must have exact the same exposure.names") } else { - - - + ans <- predictor(newdata,new.datevar,new.assetvar) } - } - +return(ans) } \ No newline at end of file Modified: pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd 2013-06-26 17:37:39 UTC (rev 2447) +++ pkg/FactorAnalytics/man/predict.FundamentalFactorModel.Rd 2013-06-26 18:38:19 UTC (rev 2448) @@ -2,15 +2,31 @@ \alias{predict.FundamentalFactorModel} \title{predict method for FundamentalFactorModel object} \usage{ - predict.FundamentalFactorModel(fit, newdata) + predict.FundamentalFactorModel(fit, newdata, + new.assetvar, new.datevar) } \arguments{ \item{fit}{"FundamentalFactorModel" object} + + \item{newdata}{An optional data frame in which to look + for variables with which to predict. If omitted, the + fitted values are used.} + + \item{new.assetvar}{specify new asset variable in newdata + if newdata is provided.} + + \item{new.datevar}{speficy new date variable in newdata + if newdata is provided.} } \description{ Generic function of predict method for fitFundamentalFactorModel. } +\details{ + newdata must be data.frame and contians date variable, + asset variable and exact exposures names that are used in + fit object by \code{fitFundamentalFactorModel} +} \author{ Yi-An Chen } From noreply at r-forge.r-project.org Wed Jun 26 21:45:48 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 21:45:48 +0200 (CEST) Subject: [Returnanalytics-commits] r2449 - pkg/FactorAnalytics/man Message-ID: <20130626194548.74610184EA2@r-forge.r-project.org> Author: chenyian Date: 2013-06-26 21:45:48 +0200 (Wed, 26 Jun 2013) New Revision: 2449 Removed: pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd Log: Deleted: pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd 2013-06-26 18:38:19 UTC (rev 2448) +++ pkg/FactorAnalytics/man/fitMacroeconomicFactorModel.Rd 2013-06-26 19:45:48 UTC (rev 2449) @@ -1,111 +0,0 @@ -\name{fitMacroeconomicFactorModel} -\alias{fitMacroeconomicFactorModel} -\title{Fit macroeconomic factor model by time series regression techniques.} -\usage{ - fitMacroeconomicFactorModel(assets.names, factors.names, - data = data, factor.set = 3, - fit.method = c("OLS", "DLS", "Robust"), - variable.selection = c("stepwise", "all subsets", "lar", "lasso"), - decay.factor = 0.95, nvmax = 8, force.in = NULL, - subsets.method = c("exhaustive", "backward", "forward", "seqrep"), - lars.criteria = c("Cp", "cv")) -} -\arguments{ - \item{assets.names}{names of assets returns.} - - \item{factors.names}{names of factors returns.} - - \item{factor.set}{scalar, number of factors} - - \item{data}{a vector, matrix, data.frame, xts, timeSeries - or zoo object with asset returns and factors retunrs - rownames} - - \item{fit.method}{"OLS" is ordinary least squares method, - "DLS" is discounted least squares method. Discounted - least squares (DLS) estimation is weighted least squares - estimation with exponentially declining weights that sum - to unity. "Robust"} - - \item{variable.selection}{"stepwise" is traditional - forward/backward stepwise OLS regression, starting from - the initial set of factors, that adds factors only if the - regression fit as measured by the Bayesian Information - Criteria (BIC) or Akaike Information Criteria (AIC) can - be done using the R function step() from the stats - package. If \code{Robust} is chosen, the function - step.lmRob in Robust package will be used. "all subsets" - is Traditional all subsets regression can be done using - the R function regsubsets() from the package leaps. "lar" - , "lasso" is based on package "lars", linear angle - regression.} - - \item{decay.factor}{for DLS. Default is 0.95.} - - \item{nvmax}{control option for all subsets. maximum size - of subsets to examine} - - \item{force.in}{control option for all subsets. The - factors that should be in all models.} - - \item{subsets.method}{control option for all subsets. se - exhaustive search, forward selection, backward selection - or sequential replacement to search.} - - \item{lars.criteria}{either choose minimum "Cp": unbiased - estimator of the true rist or "cv" 10 folds - cross-validation. See detail.} -} -\value{ - an S3 object containing \item{asset.fit}{Fit objects for - each asset. This is the class "lm" for each object.} - \item{alpha.vec}{N x 1 Vector of estimated alphas.} - \item{beta.mat}{N x K Matrix of estimated betas.} - \item{r2.vec}{N x 1 Vector of R-square values.} - \item{residVars.vec}{N x 1 Vector of residual variances.} - \item{call}{function call.} \item{ret.assets}{Assets - returns of input data.} \item{factors Factors of input - data.} \item{variable.selection variables selected by the - user.} -} -\description{ - Fit macroeconomic factor model by time series regression - techniques. It creates the class of "MacroFactorModel". -} -\details{ - If \code{Robust} is chosen, there is no subsets but all - factors will be used. Cp is defined in - http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. - p17. -} -\examples{ -\dontrun{ -# load data from the database -data(managers.df) -ret.assets = managers.df[,(1:6)] -factors = managers.df[,(7:9)] -# fit the factor model with OLS -fit <- fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", - variable.selection="all subsets") -# summary of HAM1 -summary(fit$asset.fit$HAM1) -# plot actual vs. fitted over time for HAM1 -# use chart.TimeSeries() function from PerformanceAnalytics package -dataToPlot = cbind(fitted(fit$asset.fit$HAM1), na.omit(managers.df$HAM1)) -colnames(dataToPlot) = c("Fitted","Actual") -chart.TimeSeries(dataToPlot, main="FM fit for HAM1", - colorset=c("black","blue"), legend.loc="bottomleft") - } -} -\author{ - Eric Zivot and Yi-An Chen. -} -\references{ - 1. Efron, Hastie, Johnstone and Tibshirani (2002) "Least - Angle Regression" (with discussion) Annals of Statistics; - see also - http://www-stat.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf. - 2. Hastie, Tibshirani and Friedman (2008) Elements of - Statistical Learning 2nd edition, Springer, NY. -} - From noreply at r-forge.r-project.org Wed Jun 26 22:19:19 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Wed, 26 Jun 2013 22:19:19 +0200 (CEST) Subject: [Returnanalytics-commits] r2450 - in pkg/Meucci: . R demo man Message-ID: <20130626201919.BF2CE185636@r-forge.r-project.org> Author: xavierv Date: 2013-06-26 22:19:19 +0200 (Wed, 26 Jun 2013) New Revision: 2450 Added: pkg/Meucci/R/PerformIidAnalysis.R pkg/Meucci/demo/S_DerivativesInvariants.R pkg/Meucci/man/PerformIidAnalysis.Rd Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/NAMESPACE pkg/Meucci/R/LognormalCopulaPdf.R Log: - added S_DerivativesInvariants demo file and PerformIidAnalysis function Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-26 19:45:48 UTC (rev 2449) +++ pkg/Meucci/DESCRIPTION 2013-06-26 20:19:19 UTC (rev 2450) @@ -70,3 +70,4 @@ 'ConvertChangeInYield2Price.R' 'ProjectionStudentT.R' 'TwoDimEllipsoid.R' + 'PerformIidAnalysis.R' Modified: pkg/Meucci/NAMESPACE =================================================================== --- pkg/Meucci/NAMESPACE 2013-06-26 19:45:48 UTC (rev 2449) +++ pkg/Meucci/NAMESPACE 2013-06-26 20:19:19 UTC (rev 2450) @@ -21,6 +21,7 @@ export(normalizeProb) export(PanicCopula) export(PartialConfidencePosterior) +export(PerformIidAnalysis) export(PlotDistributions) export(ProjectionStudentT) export(Raw2Central) Modified: pkg/Meucci/R/LognormalCopulaPdf.R =================================================================== --- pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-26 19:45:48 UTC (rev 2449) +++ pkg/Meucci/R/LognormalCopulaPdf.R 2013-06-26 20:19:19 UTC (rev 2450) @@ -1,5 +1,3 @@ -library(pracma); - #' Computes the pdf of the copula of the lognormal distribution at the generic point u in the unit hypercube, #' as described in A. Meucci, "Risk and Asset Allocation", Springer, 2005. #' Added: pkg/Meucci/R/PerformIidAnalysis.R =================================================================== --- pkg/Meucci/R/PerformIidAnalysis.R (rev 0) +++ pkg/Meucci/R/PerformIidAnalysis.R 2013-06-26 20:19:19 UTC (rev 2450) @@ -0,0 +1,61 @@ +#' This function performs simple invariance (i.i.d.) tests on a time series, as described in +#' A. Meucci "Risk and Asset Allocation", Springer, 2005 +#' +#' @param Dates : [vector] (T x 1) dates +#' @param Data : [matrix] (T x N) data +#' @param Starting_Prices : [vector] (N x 1) +#' +#' @note it checks the evolution over time +# it checks that the variables are identically distributed by looking at the histogram of two subsamples +# it checks that the variables are independent by looking at the 1-lag scatter plot +# under i.i.d. the location-dispersion ellipsoid should be a circle +#' +#' @references +#' \url{http://} +#' See (6.77)-(6.79) in "Risk and Asset Allocation"-Springer (2005), by A. Meucci +#' See Meucci's script for "ConvertCompoundedReturns2Price.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +PerformIidAnalysis = function( Dates = dim( Data, 1), Data, Str = "") +{ + + ########################################################################################################## + ### Time series over time + dev.new(); + plot( Dates, Data, main = Str ); + #datetick( 'x', 'mmmyy', 'keeplimits', 'keepticks' ); + + + ########################################################################################################## + ### Test "identically distributed hypothesis": split observations into two sub-samples and plot histogram + Sample_1 = Data[ 1:round(length(Data) / 2) ]; + Sample_2 = Data[ round(length(Data)/2) + 1: length(Data) ]; + num_bins_1 = round(5 * log(length(Sample_1))); + num_bins_2 = round(5 * log(length(Sample_2))); + X_lim = c( min(Data) - .1 * (max(Data) - min(Data)), max(Data) + .1 * (max(Data) - min(Data))); + + dev.new(); + + layout( matrix(c(1,1,2,2,0,3,3,0), 2, 4, byrow = TRUE), heights=c(1,1,1)); + hist(Sample_1, num_bins_1, xlab = "", ylab = "", main = "first half" ); + hist(Sample_2, num_bins_2, xlab = "", ylab = "", main = "second half" ); + + ########################################################################################################## + ### Test "independently distributed hypothesis": scatter plot of observations at lagged times + + + X = Data[ 1 : length(Data)-1 ]; + Y = Data[ 2 : length(Data) ]; + plot(X, Y, main="changes in implied vol"); + + m = cbind( apply( cbind( X, Y ), 2, mean )); + S = cov( cbind( X, Y )); + TwoDimEllipsoid( m, S, 2, FALSE, FALSE); + #axisLimits = axis; + #textX = axisLimits(1:2)*[-0.1,1.1]'; + #textY = axisLimits(3:4)*[0.1,0.9]'; + #text(textX, textY, Str); + +} \ No newline at end of file Added: pkg/Meucci/demo/S_DerivativesInvariants.R =================================================================== --- pkg/Meucci/demo/S_DerivativesInvariants.R (rev 0) +++ pkg/Meucci/demo/S_DerivativesInvariants.R 2013-06-26 20:19:19 UTC (rev 2450) @@ -0,0 +1,49 @@ +#' This script performs the quest for invariance in the derivatives market, as described +#' in A. Meucci,"Risk and Asset Allocation", Springer, 2005, Chapter 3. +#' +#' @references +#' \url{http://} +#' See Meucci's script for "S_DerivativesInvariants.m" +#' +#' @author Xavier Valls \email{flamejat@@gmail.com} +#' @export + +################################################################################################################## +### Load implied vol for options on SPX for different time to maturity and moneyness +# Variable name: derivatives +load('../data/derivatives.Rda'); + +################################################################################################################## +### Simple univariate test +# select the implied vol time series for a specific time to maturity and moneyness +maturityIndex = 1; # 1..6 +moneynessIndex = 4; # 1..7 + +################################################################################################################## +### Quest for invariance for changes in implied vol and changes in log implied vol + +#saving the sequence in a variable for legibility +eachFiveRowsSeq = seq( 1 , length(derivatives$impVol[ , 1, 1 ]), 5 ); + +X = diff( derivatives$impVol[ eachFiveRowsSeq , maturityIndex, moneynessIndex ] ); +PerformIidAnalysis( 1:length(X), X, 'Changes in implied vol'); + +Y = diff(log(derivatives$impVol[ eachFiveRowsSeq , maturityIndex, moneynessIndex ])); +PerformIidAnalysis( 1:size(Y,1), Y, 'Changes in log of implied vol' ); + +################################################################################################################## +### Multivariate test with AR(1) structure +[T, Mat, Mon] +Dim = dim(derivatives$impVol[ eachFiveRowsSeq , , ]); +Z = matrix(log(derivatives$impVol[ eachFiveRowsSeq , , ] ), Dim[ 1 ], Dim[ 2 ] * Dim[ 3 ]); +# VAR(1) model by least square +X = Z[ -1, ]; +F = cbind(matrix( 1, Dim[ 1 ]-1, 1), Z[ -length( Z[1, ] ) , ]); +E_XF = t( X ) %*% F / Dim[ 1 ]; +E_FF = t( F ) %*% F / Dim[ 1 ]; +B = E_XF %*% (E_FF \ diag( 1, ncol(size(E_FF) ) ) ); +Eps = X - F %*% t( B ); # residuals + +PerformIidAnalysis(1:size(Eps,1), Eps(:,3), 'VAR(1) residuals'); + +### EOF \ No newline at end of file Added: pkg/Meucci/man/PerformIidAnalysis.Rd =================================================================== --- pkg/Meucci/man/PerformIidAnalysis.Rd (rev 0) +++ pkg/Meucci/man/PerformIidAnalysis.Rd 2013-06-26 20:19:19 UTC (rev 2450) @@ -0,0 +1,31 @@ +\name{PerformIidAnalysis} +\alias{PerformIidAnalysis} +\title{This function performs simple invariance (i.i.d.) tests on a time series, as described in +A. Meucci "Risk and Asset Allocation", Springer, 2005} +\usage{ + PerformIidAnalysis(Dates = dim(Data, 1), Data, Str = "") +} +\arguments{ + \item{Dates}{: [vector] (T x 1) dates} + + \item{Data}{: [matrix] (T x N) data} + + \item{Starting_Prices}{: [vector] (N x 1)} +} +\description{ + This function performs simple invariance (i.i.d.) tests + on a time series, as described in A. Meucci "Risk and + Asset Allocation", Springer, 2005 +} +\note{ + it checks the evolution over time +} +\author{ + Xavier Valls \email{flamejat at gmail.com} +} +\references{ + \url{http://} See (6.77)-(6.79) in "Risk and Asset + Allocation"-Springer (2005), by A. Meucci See Meucci's + script for "ConvertCompoundedReturns2Price.m" +} + From noreply at r-forge.r-project.org Thu Jun 27 00:05:01 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 27 Jun 2013 00:05:01 +0200 (CEST) Subject: [Returnanalytics-commits] r2451 - pkg/FactorAnalytics/R Message-ID: <20130626220501.3ECC2181059@r-forge.r-project.org> Author: chenyian Date: 2013-06-27 00:05:00 +0200 (Thu, 27 Jun 2013) New Revision: 2451 Modified: pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R pkg/FactorAnalytics/R/impliedFactorReturns.R pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r Log: 1. modify predict.TimeSeriesFactorModel.r to adapt impliedFactorReturns.R 2. edit examples of impliedFactorReturns.R 3. add output of fitTimeSeriesFactorModel.R Modified: pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R 2013-06-26 20:19:19 UTC (rev 2450) +++ pkg/FactorAnalytics/R/fitTimeSeriesFactorModel.R 2013-06-26 22:05:00 UTC (rev 2451) @@ -367,7 +367,9 @@ beta = Betas, r2 = R2values, resid.variance = ResidVars, - call = this.call ) + call = this.call, + data = data, + factors.names = factors.names) class(ans) = "TimeSeriesFactorModel" return(ans) } Modified: pkg/FactorAnalytics/R/impliedFactorReturns.R =================================================================== --- pkg/FactorAnalytics/R/impliedFactorReturns.R 2013-06-26 20:19:19 UTC (rev 2450) +++ pkg/FactorAnalytics/R/impliedFactorReturns.R 2013-06-26 22:05:00 UTC (rev 2451) @@ -1,68 +1,71 @@ -#' Compute Implied Factor Returns Using Covariance Matrix Approach -#' -#' Compute risk factor conditional mean returns for a one group of risk factors -#' given specified returns for another group of risk factors based on the -#' assumption that all risk factor returns are multivariately normally -#' distributed. -#' -#' Let \code{y} denote the \code{m x 1} vector of factor scenarios and \code{x} -#' denote the \code{(n-m) x 1} vector of other factors. Assume that \code{(y', -#' x')'} has a multivariate normal distribution with mean \code{(mu.y', -#' mu.x')'} and covariance matrix partitioned as \code{(cov.yy, cov.yx, cov.xy, -#' cov.xx)}. Then the implied factor scenarios are computed as \code{E[x|y] = -#' mu.x + cov.xy*cov.xx^-1 * (y - mu.y)} -#' -#' @param factor.scenarios \code{m x 1} vector of factor mean returns of -#' scenario. m is a subset of the n, where n is risk factors and \code{n > m}. -#' @param mu.factors \code{n x 1} vector of factor mean returns. -#' @param cov.factors \code{n x n} factor covariance matrix. -#' @return \code{(n - m) x 1} vector of implied factor returns -#' @author Eric Zivot and Yi-An Chen. -#' @examples -#' -#' # get data -#' data(managers.df) -#' factors = managers.df[,(7:9)] -#' # make up a factor mean returns scenario for factor SP500.TR -#' factor.scenarios <- 0.001 -#' names(factor.scenarios) <- "SP500.TR" -#' mu.factors <- mean(factors) -#' cov.factors <- var(factors) -#' # implied factor returns -#' impliedFactorReturns(factor.scenarios,mu.factors,cov.factors) -#' -impliedFactorReturns <- -function(factor.scenarios, mu.factors, cov.factors) { -## inputs: -## factor.scenarios m x 1 vector of factor mean returns of scenario. m is a subset of the n, where n is -## risk factors and n > m. -## mu.factors n x 1 vector of factor mean returns -## cov.factors n x n factor covariance matrix -## outputs: -## (n - m) x 1 vector of implied factor returns -## details -## Let y denote the m x 1 vector of factor scenarios and x denote the (n-m) x 1 -## vector of other factors. Assume that (y', x')' has a multivariate normal -## distribution with mean (mu.y', mu.x')' and covariance matrix -## -## | cov.yy, cov.yx | -## | cov.xy, cov.xx | -## -## Then the implied factor scenarios are computed as -## -## E[x|y] = mu.x + cov.xy*cov.xx^-1 * (y - mu.y) -## - factor.names = colnames(cov.factors) - scenario.names = names(factor.scenarios) - non.scenario.names = setdiff(factor.names, scenario.names) - # m x m matrix - cov.scenarios = cov.factors[scenario.names, scenario.names] - # (n-m) x m matrix - cov.non.scenarios.scenarios = cov.factors[non.scenario.names, scenario.names] - # compute (n-m) x 1 vector of implied factor returns from conditional distribution - mu.non.scenarios = mu.factors[non.scenario.names] + cov.non.scenarios.scenarios %*% solve(cov.scenarios) %*% (factor.scenarios - mu.factors[scenario.names]) - mu.non.scenarios = as.numeric(mu.non.scenarios) - names(mu.non.scenarios) = non.scenario.names - return(mu.non.scenarios) -} - +#' Compute Implied Factor Returns Using Covariance Matrix Approach +#' +#' Compute risk factor conditional mean returns for a one group of risk factors +#' given specified returns for another group of risk factors based on the +#' assumption that all risk factor returns are multivariately normally +#' distributed. +#' +#' Let \code{y} denote the \code{m x 1} vector of factor scenarios and \code{x} +#' denote the \code{(n-m) x 1} vector of other factors. Assume that \code{(y', +#' x')'} has a multivariate normal distribution with mean \code{(mu.y', +#' mu.x')'} and covariance matrix partitioned as \code{(cov.yy, cov.yx, cov.xy, +#' cov.xx)}. Then the implied factor scenarios are computed as \code{E[x|y] = +#' mu.x + cov.xy*cov.xx^-1 * (y - mu.y)} +#' +#' @param factor.scenarios m x 1 vector of scenario values for a subset +#' of the n > m risk factors +#' @param mu.factors \code{n x 1} vector of factor mean returns. +#' @param cov.factors \code{n x n} factor covariance matrix. +#' @return \code{(n - m) x 1} vector of implied factor returns +#' @author Eric Zivot and Yi-An Chen. +#' @examples +#' +#' # get data +#' data(managers.df) +#' factors = managers.df[,(7:9)] +#' # make up a factor mean returns scenario for factor SP500.TR +#' factor.scenarios <- 0.1 +#' names(factor.scenarios) <- "SP500.TR" +#' mu.factors <- mean(factors) +#' cov.factors <- var(factors) +#' # implied factor returns +#' impliedFactorReturns(factor.scenarios,mu.factors,cov.factors) +#' +impliedFactorReturns <- +function(factor.scenarios, mu.factors, cov.factors) { +## inputs: +## factor.scenarios m x 1 vector of factor mean returns of scenario. +## m is a subset of the n, where n is +## risk factors and n > m. +## mu.factors n x 1 vector of factor mean returns +## cov.factors n x n factor covariance matrix +## outputs: +## (n - m) x 1 vector of implied factor returns +## details +## Let y denote the m x 1 vector of factor scenarios and x denote the (n-m) x 1 +## vector of other factors. Assume that (y', x')' has a multivariate normal +## distribution with mean (mu.y', mu.x')' and covariance matrix +## +## | cov.yy, cov.yx | +## | cov.xy, cov.xx | +## +## Then the implied factor scenarios are computed as +## +## E[x|y] = mu.x + cov.xy*cov.xx^-1 * (y - mu.y) +## + factor.names = colnames(cov.factors) + scenario.names = names(factor.scenarios) + non.scenario.names = setdiff(factor.names, scenario.names) + # m x m matrix + cov.scenarios = cov.factors[scenario.names, scenario.names] + # (n-m) x m matrix + cov.non.scenarios.scenarios = cov.factors[non.scenario.names, scenario.names] + # compute (n-m) x 1 vector of implied factor returns from conditional distribution + mu.non.scenarios = mu.factors[non.scenario.names] + + cov.non.scenarios.scenarios %*% solve(cov.scenarios) %*% + (factor.scenarios - mu.factors[scenario.names]) + mu.non.scenarios = as.numeric(mu.non.scenarios) + names(mu.non.scenarios) = non.scenario.names + return(mu.non.scenarios) +} + Modified: pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r 2013-06-26 20:19:19 UTC (rev 2450) +++ pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r 2013-06-26 22:05:00 UTC (rev 2451) @@ -25,6 +25,34 @@ #' @export #' -predict.TimeSeriesFactorModel <- function(fit,...){ - lapply(fit[[1]], predict,...) +predict.TimeSeriesFactorModel <- function(fit,newdata,...){ + if (missing(newdata) || is.null(newdata) ) { + lapply(fit$asset.fit, predict,...) + } + if ( !(missing(newdata) && !is.null(newdata) )) { + numAssets <- length(names(fit$asset.fit)) + + data <- fit$data + factors <- data[,fit$factors.names] + mu.factors <- apply(factors,2,mean) + cov.factors <- cov(factors) + + for (i in 1:numAssets) + if (dim(newdata)[1] < length(residuals(fit$asset.fit[[1]])) ){ + + + newdata <- data.frame(EDHEC.LS.EQ = rnorm(n=100), SP500.TR = rnorm(n=100) ) + newdata.mat <- as.matrix(newdata) + factor.scenarios <- 0.001 + names(factor.scenarios) <- "SP500.TR" + + impliedFactorReturns(factor.scenarios, mu.factors, cov.factors) + + } + + + + } + + } \ No newline at end of file From noreply at r-forge.r-project.org Thu Jun 27 08:49:17 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 27 Jun 2013 08:49:17 +0200 (CEST) Subject: [Returnanalytics-commits] r2452 - in pkg/Meucci: . R data man Message-ID: <20130627064917.C0F97180888@r-forge.r-project.org> Author: xavierv Date: 2013-06-27 08:49:17 +0200 (Thu, 27 Jun 2013) New Revision: 2452 Added: pkg/Meucci/data/derivatives.Rda pkg/Meucci/data/implVol.Rda Modified: pkg/Meucci/DESCRIPTION pkg/Meucci/R/PerformIidAnalysis.R pkg/Meucci/man/PerformIidAnalysis.Rd Log: - added two data files and fixed PerformIidAnalysis documentation Modified: pkg/Meucci/DESCRIPTION =================================================================== --- pkg/Meucci/DESCRIPTION 2013-06-26 22:05:00 UTC (rev 2451) +++ pkg/Meucci/DESCRIPTION 2013-06-27 06:49:17 UTC (rev 2452) @@ -17,13 +17,13 @@ regularly posts code along with his working papers. Unfortunately for those of us using R, he prefers to code in Matlab. Some of that code requires Matlab's additional Optimization Toolkit. This package is the - result of a Google Summer of Code project in 2012 and 2013 that seeks - to convert a subset of his Matlab code to R to make it more widely - accessible to R users. All of Meucci's original MATLAB source is available on + result of a Google Summer of Code project in 2012 and 2013 that seeks to + convert a subset of his Matlab code to R to make it more widely accessible + to R users. All of Meucci's original MATLAB source is available on www.symmys.com. That code should be considered the reference code that - this package seeks to port to R. This package remains under - development (and likely will as long as Attilio keeps publishing code), and - any and all feedback is appreciated. + this package seeks to port to R. This package remains under development + (and likely will as long as Attilio keeps publishing code), and any and all + feedback is appreciated. Depends: R (>= 2.14.0), zoo, Modified: pkg/Meucci/R/PerformIidAnalysis.R =================================================================== --- pkg/Meucci/R/PerformIidAnalysis.R 2013-06-26 22:05:00 UTC (rev 2451) +++ pkg/Meucci/R/PerformIidAnalysis.R 2013-06-27 06:49:17 UTC (rev 2452) @@ -12,8 +12,7 @@ #' #' @references #' \url{http://} -#' See (6.77)-(6.79) in "Risk and Asset Allocation"-Springer (2005), by A. Meucci -#' See Meucci's script for "ConvertCompoundedReturns2Price.m" +#' See Meucci's script for "PerformIidAnalysis.m" #' #' @author Xavier Valls \email{flamejat@@gmail.com} #' @export Added: pkg/Meucci/data/derivatives.Rda =================================================================== (Binary files differ) Property changes on: pkg/Meucci/data/derivatives.Rda ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Added: pkg/Meucci/data/implVol.Rda =================================================================== (Binary files differ) Property changes on: pkg/Meucci/data/implVol.Rda ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Modified: pkg/Meucci/man/PerformIidAnalysis.Rd =================================================================== --- pkg/Meucci/man/PerformIidAnalysis.Rd 2013-06-26 22:05:00 UTC (rev 2451) +++ pkg/Meucci/man/PerformIidAnalysis.Rd 2013-06-27 06:49:17 UTC (rev 2452) @@ -24,8 +24,7 @@ Xavier Valls \email{flamejat at gmail.com} } \references{ - \url{http://} See (6.77)-(6.79) in "Risk and Asset - Allocation"-Springer (2005), by A. Meucci See Meucci's - script for "ConvertCompoundedReturns2Price.m" + \url{http://} See Meucci's script for + "PerformIidAnalysis.m" } From noreply at r-forge.r-project.org Thu Jun 27 12:02:49 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 27 Jun 2013 12:02:49 +0200 (CEST) Subject: [Returnanalytics-commits] r2453 - pkg/PerformanceAnalytics/sandbox/pulkit/week2/code Message-ID: <20130627100249.192DA1806B2@r-forge.r-project.org> Author: pulkit Date: 2013-06-27 12:02:48 +0200 (Thu, 27 Jun 2013) New Revision: 2453 Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R Log: SR Indifference curve plot Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R 2013-06-27 10:02:48 UTC (rev 2453) @@ -0,0 +1,45 @@ +#'@title +#'Sharpe Ratio Indifference Curve +#' +#'@description +#'The trade-off between a candidate?s SR and its correlation +#' to the existing set of strategies, is given by the Sharpe +#' ratio indifference curve. It is a plot between the candidate's +#' Sharpe Ratio and candidate's average correlation for a given +#' portfolio Sharpe Ratio. +#' +#'The equation for the candidate's average autocorrelation for a given +#'sharpe Ratio is given by +#'\deqn{\bar{\rho{_s+1}}=\frac{1}{2}\biggl[\frac{\bar{SR}.S+SR_{s+1}^2}{S.SR_B^2}-\frac{S+1}{S}-\bar{rho}{S-1}\biggr]} +#' +SRIndifference<-function(R, ylab = NULL,xlab = NULL,col=c(1,4),lwd = 2,pch = 1,cex = 1,...){ + + x = checkData(R) + columns = ncol(x) + SR = SharpeRatio(x) + sr_avg = mean(SR) + corr = table.Correlation(edhec,edhec) + corr_avg = 0 + for(i in 1:(columns-1)){ + for(j in (i+1):columns){ + corr_avg = corr_avg + corr[(i-1)*columns+j,] + } + } + corr_avg = corr_avg*2/(columns*(columns-1)) + SR_B = BenchmanrkSR(R) + corr_range = seq(-1,1,length.out = 30) + SR_i = NULL + for(i in corr_range){ + + SR_i = c(SR_i,sqrt((i*2+((columns+1)/columns)+corr_avg[1,1]*(columns-1))*columns*SR_B^2)-sr_avg*columns) + } + if(is.null(ylab)){ + ylab = "Candidate Strategy's average correlation" + } + if(is.null(xlab)){ + xlab = "Candidate's Strategy's Sharpe Ratio" + } + plot(SR_i,corr_range,type="l",xlab = xlab,ylab = ylab,main="Sharpe Ratio Indifference Curve") + #OR we can use ggplot2 for much better plots + #qplot(SR_i,corr_range,geom="line",xlab=xlab,ylab=ylab,main="Sharpe Ratio IndifferenceCurve",margins=TRUE,facet="grid")+stat_summary() +} \ No newline at end of file From noreply at r-forge.r-project.org Thu Jun 27 18:55:34 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Thu, 27 Jun 2013 18:55:34 +0200 (CEST) Subject: [Returnanalytics-commits] r2454 - in pkg/PerformanceAnalytics/sandbox/pulkit/week2: code vignette Message-ID: <20130627165535.0E4F5185931@r-forge.r-project.org> Author: pulkit Date: 2013-06-27 18:55:34 +0200 (Thu, 27 Jun 2013) New Revision: 2454 Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw Log: updated the BenchmarkSR , SRIndifference and the vignette Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R 2013-06-27 10:02:48 UTC (rev 2453) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R 2013-06-27 16:55:34 UTC (rev 2454) @@ -28,6 +28,10 @@ BenchmanrkSR<-function(R){ x = checkData(R) columns = ncol(x) + #TODO : What to do if the number of columns is only one ? + if(columns == 1){ + stop("The number of return series should be greater than 1") + } SR = SharpeRatio(x) sr_avg = mean(SR) corr = table.Correlation(edhec,edhec) Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R 2013-06-27 10:02:48 UTC (rev 2453) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R 2013-06-27 16:55:34 UTC (rev 2454) @@ -10,12 +10,31 @@ #' #'The equation for the candidate's average autocorrelation for a given #'sharpe Ratio is given by +#' #'\deqn{\bar{\rho{_s+1}}=\frac{1}{2}\biggl[\frac{\bar{SR}.S+SR_{s+1}^2}{S.SR_B^2}-\frac{S+1}{S}-\bar{rho}{S-1}\biggr]} #' -SRIndifference<-function(R, ylab = NULL,xlab = NULL,col=c(1,4),lwd = 2,pch = 1,cex = 1,...){ +#'@param R an xts, vector, matrix, data frame, timeSeries or zoo object of +#' asset returns +#'@param ylab set the y-axis label, as in \code{\link{plot}} +#'@param xlab set the x-axis label, as in \code{\link{plot}} +#'@param lwd set the width of the line, as in \code{\link{plot}} +#'@param pch set the pch value, as in \code{\link{plot}} +#'@param cex set the cex value, as in \code{\link{plot}} +#' +#'@references +#'Bailey, David H. and Lopez de Prado, Marcos, The Strategy Approval Decision: +#'A Sharpe Ratio Indifference Curve Approach (January 2013). Algorithmic Finance, +#'Vol. 2, No. 1 (2013). +#' +#' +SRIndifference<-function(R, ylab = NULL,xlab = NULL,lwd = 2,pch = 1,cex = 1,...){ x = checkData(R) columns = ncol(x) + #TODO: What to do when the number of columns is 1 ? + if(columns == 1){ + stop("The number of return series should be greater 1 ") + } SR = SharpeRatio(x) sr_avg = mean(SR) corr = table.Correlation(edhec,edhec) @@ -39,7 +58,7 @@ if(is.null(xlab)){ xlab = "Candidate's Strategy's Sharpe Ratio" } - plot(SR_i,corr_range,type="l",xlab = xlab,ylab = ylab,main="Sharpe Ratio Indifference Curve") + #plot(SR_i,corr_range,type="l",xlab = xlab,ylab = ylab,main="Sharpe Ratio Indifference Curve") #OR we can use ggplot2 for much better plots - #qplot(SR_i,corr_range,geom="line",xlab=xlab,ylab=ylab,main="Sharpe Ratio IndifferenceCurve",margins=TRUE,facet="grid")+stat_summary() + qplot(SR_i,corr_range,geom="line",xlab=xlab,ylab=ylab,main="Sharpe Ratio IndifferenceCurve",margins=TRUE,facet="grid")+stat_summary() } \ No newline at end of file Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw 2013-06-27 10:02:48 UTC (rev 2453) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw 2013-06-27 16:55:34 UTC (rev 2454) @@ -32,12 +32,17 @@ <>= library(PerformanceAnalytics) +library(ggplot2) data(edhec) @ <>= source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkSR.R") @ + +<>= +source("/home/pulkit/workspace/GSOC/PerformanceAnalytics/sandbox/pulkit/week2/code/SRIndifferenceCurve.R") +@ \section{Benchmark Sharpe Ratio} @@ -57,4 +62,21 @@ BenchmanrkSR(edhec) @ +\section{Sharpe Ratio Indifference Curve} + +The trade-off between a candidate?s SR and its correlation +to the existing set of strategies, is given by the Sharpe +ratio indifference curve. It is a plot between the candidate's +Sharpe Ratio and candidate's average correlation for a given +portfolio Sharpe Ratio. + +The equation for the candidate's average autocorrelation for a given +sharpe Ratio is given by + +\deqn{\bar{\rho}{_{s+1}}=\frac{1}{2}\biggl[\frac{\bar{SR}.S+SR_{s+1}^2}{S.SR_B^2}-\frac{S+1}{S}-\bar{\rho}{S-1}\biggr]} + +<>= +SRIndifference(edhec) +@ + \end{document} \ No newline at end of file From noreply at r-forge.r-project.org Fri Jun 28 00:14:45 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 00:14:45 +0200 (CEST) Subject: [Returnanalytics-commits] r2455 - in pkg/FactorAnalytics: R man Message-ID: <20130627221445.682EA18599D@r-forge.r-project.org> Author: chenyian Date: 2013-06-28 00:14:44 +0200 (Fri, 28 Jun 2013) New Revision: 2455 Modified: pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r pkg/FactorAnalytics/man/impliedFactorReturns.Rd pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd Log: start skeleton of summary.FundamentalFactorModel.r and summary.FundamentalFactorModel.Rd Modified: pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r 2013-06-27 16:55:34 UTC (rev 2454) +++ pkg/FactorAnalytics/R/predict.TimeSeriesFactorModel.r 2013-06-27 22:14:44 UTC (rev 2455) @@ -29,30 +29,30 @@ if (missing(newdata) || is.null(newdata) ) { lapply(fit$asset.fit, predict,...) } - if ( !(missing(newdata) && !is.null(newdata) )) { - numAssets <- length(names(fit$asset.fit)) - - data <- fit$data - factors <- data[,fit$factors.names] - mu.factors <- apply(factors,2,mean) - cov.factors <- cov(factors) - - for (i in 1:numAssets) - if (dim(newdata)[1] < length(residuals(fit$asset.fit[[1]])) ){ - - - newdata <- data.frame(EDHEC.LS.EQ = rnorm(n=100), SP500.TR = rnorm(n=100) ) - newdata.mat <- as.matrix(newdata) - factor.scenarios <- 0.001 - names(factor.scenarios) <- "SP500.TR" - - impliedFactorReturns(factor.scenarios, mu.factors, cov.factors) - - } - - - - } +# if ( !(missing(newdata) && !is.null(newdata) )) { +# numAssets <- length(names(fit$asset.fit)) +# +# data <- fit$data +# factors <- data[,fit$factors.names] +# mu.factors <- apply(factors,2,mean) +# cov.factors <- cov(factors) +# +# for (i in 1:numAssets) +# if (dim(newdata)[1] < length(residuals(fit$asset.fit[[1]])) ){ +# +# +# newdata <- data.frame(EDHEC.LS.EQ = rnorm(n=100), SP500.TR = rnorm(n=100) ) +# newdata.mat <- as.matrix(newdata) +# factor.scenarios <- 0.001 +# names(factor.scenarios) <- "SP500.TR" +# +# impliedFactorReturns(factor.scenarios, mu.factors, cov.factors) +# +# } +# +# +# +# } +# - } \ No newline at end of file Modified: pkg/FactorAnalytics/man/impliedFactorReturns.Rd =================================================================== --- pkg/FactorAnalytics/man/impliedFactorReturns.Rd 2013-06-27 16:55:34 UTC (rev 2454) +++ pkg/FactorAnalytics/man/impliedFactorReturns.Rd 2013-06-27 22:14:44 UTC (rev 2455) @@ -1,54 +1,53 @@ -\name{impliedFactorReturns} -\alias{impliedFactorReturns} -\title{Compute Implied Factor Returns Using Covariance Matrix Approach} -\usage{ - impliedFactorReturns(factor.scenarios, mu.factors, - cov.factors) -} -\arguments{ - \item{factor.scenarios}{\code{m x 1} vector of factor - mean returns of scenario. m is a subset of the n, where n - is risk factors and \code{n > m}.} - - \item{mu.factors}{\code{n x 1} vector of factor mean - returns.} - - \item{cov.factors}{\code{n x n} factor covariance - matrix.} -} -\value{ - \code{(n - m) x 1} vector of implied factor returns -} -\description{ - Compute risk factor conditional mean returns for a one - group of risk factors given specified returns for another - group of risk factors based on the assumption that all - risk factor returns are multivariately normally - distributed. -} -\details{ - Let \code{y} denote the \code{m x 1} vector of factor - scenarios and \code{x} denote the \code{(n-m) x 1} vector - of other factors. Assume that \code{(y', x')'} has a - multivariate normal distribution with mean \code{(mu.y', - mu.x')'} and covariance matrix partitioned as - \code{(cov.yy, cov.yx, cov.xy, cov.xx)}. Then the implied - factor scenarios are computed as \code{E[x|y] = mu.x + - cov.xy*cov.xx^-1 * (y - mu.y)} -} -\examples{ -# get data -data(managers.df) -factors = managers.df[,(7:9)] -# make up a factor mean returns scenario for factor SP500.TR -factor.scenarios <- 0.001 -names(factor.scenarios) <- "SP500.TR" -mu.factors <- mean(factors) -cov.factors <- var(factors) -# implied factor returns -impliedFactorReturns(factor.scenarios,mu.factors,cov.factors) -} -\author{ - Eric Zivot and Yi-An Chen. -} - +\name{impliedFactorReturns} +\alias{impliedFactorReturns} +\title{Compute Implied Factor Returns Using Covariance Matrix Approach} +\usage{ + impliedFactorReturns(factor.scenarios, mu.factors, + cov.factors) +} +\arguments{ + \item{factor.scenarios}{m x 1 vector of scenario values + for a subset of the n > m risk factors} + + \item{mu.factors}{\code{n x 1} vector of factor mean + returns.} + + \item{cov.factors}{\code{n x n} factor covariance + matrix.} +} +\value{ + \code{(n - m) x 1} vector of implied factor returns +} +\description{ + Compute risk factor conditional mean returns for a one + group of risk factors given specified returns for another + group of risk factors based on the assumption that all + risk factor returns are multivariately normally + distributed. +} +\details{ + Let \code{y} denote the \code{m x 1} vector of factor + scenarios and \code{x} denote the \code{(n-m) x 1} vector + of other factors. Assume that \code{(y', x')'} has a + multivariate normal distribution with mean \code{(mu.y', + mu.x')'} and covariance matrix partitioned as + \code{(cov.yy, cov.yx, cov.xy, cov.xx)}. Then the implied + factor scenarios are computed as \code{E[x|y] = mu.x + + cov.xy*cov.xx^-1 * (y - mu.y)} +} +\examples{ +# get data +data(managers.df) +factors = managers.df[,(7:9)] +# make up a factor mean returns scenario for factor SP500.TR +factor.scenarios <- 0.1 +names(factor.scenarios) <- "SP500.TR" +mu.factors <- mean(factors) +cov.factors <- var(factors) +# implied factor returns +impliedFactorReturns(factor.scenarios,mu.factors,cov.factors) +} +\author{ + Eric Zivot and Yi-An Chen. +} + Modified: pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd 2013-06-27 16:55:34 UTC (rev 2454) +++ pkg/FactorAnalytics/man/predict.TimeSeriesFactorModel.Rd 2013-06-27 22:14:44 UTC (rev 2455) @@ -2,7 +2,7 @@ \alias{predict.TimeSeriesFactorModel} \title{predict method for TimeSeriesModel object.} \usage{ - predict.TimeSeriesFactorModel(fit, ...) + predict.TimeSeriesFactorModel(fit, newdata, ...) } \arguments{ \item{fit}{"TimeSeriesFactorModel" object created by From noreply at r-forge.r-project.org Fri Jun 28 00:39:32 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 00:39:32 +0200 (CEST) Subject: [Returnanalytics-commits] r2456 - in pkg/FactorAnalytics: R man Message-ID: <20130627223932.D2FE318560F@r-forge.r-project.org> Author: chenyian Date: 2013-06-28 00:39:32 +0200 (Fri, 28 Jun 2013) New Revision: 2456 Added: pkg/FactorAnalytics/R/summary.FundamentalFactorModel.r pkg/FactorAnalytics/man/summary.FundamentalFactorModel.Rd Modified: pkg/FactorAnalytics/R/factorModelCovariance.r pkg/FactorAnalytics/R/fitStatisticalFactorModel.R Log: 1. modify factorModelCovariance.r 2. create skeleton of summary.FundamentalFactorModel.Rd and summary.FundamentalFactorModel.r Modified: pkg/FactorAnalytics/R/factorModelCovariance.r =================================================================== --- pkg/FactorAnalytics/R/factorModelCovariance.r 2013-06-27 22:14:44 UTC (rev 2455) +++ pkg/FactorAnalytics/R/factorModelCovariance.r 2013-06-27 22:39:32 UTC (rev 2456) @@ -1,61 +1,75 @@ -#' Compute Factor Model Covariance Matrix. -#' -#' Compute asset return covariance matrix from factor model parameters. -#' -#' The return on asset \code{i} (\code{i = 1,...,N}) is assumed to follow the -#' factor model \cr \code{R(i,t) = alpha + t(beta)*F(t) + e(i,t), e(i,t) ~ iid -#' (0, sig(i)^2)} \cr where \code{beta} is a \code{K x 1} vector of factor -#' exposures. The return variance is then \cr \code{var(R(i,t) = -#' t(beta)*var(F(t))*beta + sig(i)^2}, \cr and the \code{N x N} covariance -#' matrix of the return vector \code{R} is \cr \code{var(R) = B*var(F(t))*t(B) -#' + D} \cr where B is the \code{N x K} matrix of asset betas and \code{D} is a -#' diagonal matrix with \code{sig(i)^2} values along the diagonal. -#' -#' @param beta.mat \code{N x K} matrix of factor betas, where \code{N} is the -#' number of assets and \code{K} is the number of factors. -#' @param factor.cov \code{K x K} factor return covariance matrix. -#' @param residVars.vec \code{N x 1} vector of asset specific residual -#' variances from the factor model. -#' @return \code{N x N} return covariance matrix based on factor model -#' parameters. -#' @author Eric Zivot and Yi-An Chen. -#' @references Zivot, E. and J. Wang (2006), \emph{Modeling Financial Time -#' Series with S-PLUS, Second Edition}, Springer-Verlag. -#' @examples -#' -#' # factorModelCovariance -#' data(managers.df) -#' factors = managers.df[,(7:9)] -#' ret.assets = managers.df[,(1:6)] -#' fit <-fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", -#' variable.selection="all subsets", factor.set = 3) -#' factorModelCovariance(fit$beta.mat,var(factors),fit$residVars.vec) -#' -factorModelCovariance <- -function(beta.mat, factor.cov, residVars.vec) { -## Inputs: -## beta.mat n x k matrix of factor betas -## factor.cov k x k factor return covariance matrix -## residVars.vec n x 1 vector of residual variances from factor model -## Output: -## cov.fm n x n return covariance matrix based on -## estimated factor model. - beta.mat = as.matrix(beta.mat) - factor.cov = as.matrix(factor.cov) - sig.e = as.vector(residVars.vec) - if (length(sig.e) > 1) { - D.e = diag(as.vector(sig.e)) - } else { - D.e = as.matrix(sig.e) - } - if (ncol(beta.mat) != ncol(factor.cov)) - stop("beta.mat and factor.cov must have same number of columns") - - if (nrow(D.e) != nrow(beta.mat)) - stop("beta.mat and D.e must have same number of rows") - cov.fm = beta.mat %*% factor.cov %*% t(beta.mat) + D.e - if (any(diag(chol(cov.fm)) == 0)) - warning("Covariance matrix is not positive definite") - return(cov.fm) -} - +#' Compute Factor Model Covariance Matrix. +#' +#' Compute asset return covariance matrix from factor model parameters. +#' +#' The return on asset \code{i} (\code{i = 1,...,N}) is assumed to follow the +#' factor model \cr \code{R(i,t) = alpha + t(beta)*F(t) + e(i,t), e(i,t) ~ iid +#' (0, sig(i)^2)} \cr where \code{beta} is a \code{K x 1} vector of factor +#' exposures. The return variance is then \cr \code{var(R(i,t) = +#' t(beta)*var(F(t))*beta + sig(i)^2}, \cr and the \code{N x N} covariance +#' matrix of the return vector \code{R} is \cr \code{var(R) = B*var(F(t))*t(B) +#' + D} \cr where B is the \code{N x K} matrix of asset betas and \code{D} is a +#' diagonal matrix with \code{sig(i)^2} values along the diagonal. +#' +#' @param beta \code{N x K} matrix of factor betas, where \code{N} is the +#' number of assets and \code{K} is the number of factors. +#' @param factor.cov \code{K x K} factor return covariance matrix. +#' @param resid.variance \code{N x 1} vector of asset specific residual +#' variances from the factor model. +#' @return \code{N x N} return covariance matrix based on factor model +#' parameters. +#' @author Eric Zivot and Yi-An Chen. +#' @references Zivot, E. and J. Wang (2006), \emph{Modeling Financial Time +#' Series with S-PLUS, Second Edition}, Springer-Verlag. +#' @examples +#' +#' # Time Series model +#' +#' data(managers.df) +#' factors = managers.df[,(7:9)] +#' fit <- fitTimeseriesFactorModel(assets.names=colnames(managers.df[,(1:6)]), +#' factors.names=c("EDHEC.LS.EQ","SP500.TR"), +#' data=managers.df,fit.method="OLS") +#' factorModelCovariance(fit$beta,var(factors),fit$resid.variance) +#' +#' # Statistical Model +#' data(stat.fm.data) +#' fit <- fitStatisticalFactorModel(sfm.dat,k=2, +#' ckeckData.method="data.frame") +#' +#' factorModelCovariance(t(sfm.pca.fit$loadings),var(sfm.pca.fit$factors),sfm.pca.fit$resid.variance) +#' +#' sfm.apca.fit <- fitStatisticalFactorModel(sfm.apca.dat,k=2 +#' ,ckeckData.method="data.frame") +#' +#' factorModelCovariance(t(sfm.apca.fit$loadings), +#' var(sfm.apca.fit$factors),sfm.apca.fit$resid.variance) +#' +factorModelCovariance <- +function(beta.mat, factor.cov, residVars.vec) { +## Inputs: +## beta.mat n x k matrix of factor betas +## factor.cov k x k factor return covariance matrix +## residVars.vec n x 1 vector of residual variances from factor model +## Output: +## cov.fm n x n return covariance matrix based on +## estimated factor model. + beta.mat = as.matrix(beta.mat) + factor.cov = as.matrix(factor.cov) + sig.e = as.vector(residVars.vec) + if (length(sig.e) > 1) { + D.e = diag(as.vector(sig.e)) + } else { + D.e = as.matrix(sig.e) + } + if (ncol(beta.mat) != ncol(factor.cov)) + stop("beta.mat and factor.cov must have same number of columns") + + if (nrow(D.e) != nrow(beta.mat)) + stop("beta.mat and D.e must have same number of rows") + cov.fm = beta.mat %*% factor.cov %*% t(beta.mat) + D.e + if (any(diag(chol(cov.fm)) == 0)) + warning("Covariance matrix is not positive definite") + return(cov.fm) +} + Modified: pkg/FactorAnalytics/R/fitStatisticalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-27 22:14:44 UTC (rev 2455) +++ pkg/FactorAnalytics/R/fitStatisticalFactorModel.R 2013-06-27 22:39:32 UTC (rev 2456) @@ -303,7 +303,7 @@ dimnames(f) <- list(dimnames(data)[[1]], paste("F", 1:k, sep = ".")) names(alpha) <- data.names resid <- t(t(data) - alpha) - f %*% B - r2 <- (1 - colSums(res^2)/colSums(xc^2)) + r2 <- (1 - colSums(resid^2)/colSums(xc^2)) if (ckeckData.method == "xts" | ckeckData.method == "zoo" ) { f <- xts(f,index(data.xts)) Added: pkg/FactorAnalytics/R/summary.FundamentalFactorModel.r =================================================================== --- pkg/FactorAnalytics/R/summary.FundamentalFactorModel.r (rev 0) +++ pkg/FactorAnalytics/R/summary.FundamentalFactorModel.r 2013-06-27 22:39:32 UTC (rev 2456) @@ -0,0 +1,18 @@ +#' summary method for FundamentalFactorModel +#' +#' Generic function of summary method for fitTimeSeriesFactorModel. +#' +#' @param fit it object created by fitFundamentalFactorModel. +#' +#' @author Yi-An Chen +#' +#' +#' +#' @export +#' + +summary.FundamentalFactorModel <- function(fit) { +dim(fit$factors) +print(fit$factors) + +} \ No newline at end of file Added: pkg/FactorAnalytics/man/summary.FundamentalFactorModel.Rd =================================================================== --- pkg/FactorAnalytics/man/summary.FundamentalFactorModel.Rd (rev 0) +++ pkg/FactorAnalytics/man/summary.FundamentalFactorModel.Rd 2013-06-27 22:39:32 UTC (rev 2456) @@ -0,0 +1,18 @@ +\name{summary.FundamentalFactorModel} +\alias{summary.FundamentalFactorModel} +\title{summary method for FundamentalFactorModel} +\usage{ + summary.FundamentalFactorModel(fit) +} +\arguments{ + \item{fit}{it object created by + fitFundamentalFactorModel.} +} +\description{ + Generic function of summary method for + fitTimeSeriesFactorModel. +} +\author{ + Yi-An Chen +} + From noreply at r-forge.r-project.org Fri Jun 28 01:09:58 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 01:09:58 +0200 (CEST) Subject: [Returnanalytics-commits] r2457 - in pkg/FactorAnalytics: R man Message-ID: <20130627230958.8E9441851D0@r-forge.r-project.org> Author: chenyian Date: 2013-06-28 01:09:58 +0200 (Fri, 28 Jun 2013) New Revision: 2457 Modified: pkg/FactorAnalytics/R/factorModelCovariance.r pkg/FactorAnalytics/man/factorModelCovariance.Rd Log: modify factorModelCovariance.Rd Modified: pkg/FactorAnalytics/R/factorModelCovariance.r =================================================================== --- pkg/FactorAnalytics/R/factorModelCovariance.r 2013-06-27 22:39:32 UTC (rev 2456) +++ pkg/FactorAnalytics/R/factorModelCovariance.r 2013-06-27 23:09:58 UTC (rev 2457) @@ -47,14 +47,8 @@ #' factorModelCovariance <- function(beta.mat, factor.cov, residVars.vec) { -## Inputs: -## beta.mat n x k matrix of factor betas -## factor.cov k x k factor return covariance matrix -## residVars.vec n x 1 vector of residual variances from factor model -## Output: -## cov.fm n x n return covariance matrix based on -## estimated factor model. - beta.mat = as.matrix(beta.mat) + + beta.mat = as.matrix(beta.mat) factor.cov = as.matrix(factor.cov) sig.e = as.vector(residVars.vec) if (length(sig.e) > 1) { Modified: pkg/FactorAnalytics/man/factorModelCovariance.Rd =================================================================== --- pkg/FactorAnalytics/man/factorModelCovariance.Rd 2013-06-27 22:39:32 UTC (rev 2456) +++ pkg/FactorAnalytics/man/factorModelCovariance.Rd 2013-06-27 23:09:58 UTC (rev 2457) @@ -1,57 +1,71 @@ -\name{factorModelCovariance} -\alias{factorModelCovariance} -\title{Compute Factor Model Covariance Matrix.} -\usage{ - factorModelCovariance(beta.mat, factor.cov, - residVars.vec) -} -\arguments{ - \item{beta.mat}{\code{N x K} matrix of factor betas, - where \code{N} is the number of assets and \code{K} is - the number of factors.} - - \item{factor.cov}{\code{K x K} factor return covariance - matrix.} - - \item{residVars.vec}{\code{N x 1} vector of asset - specific residual variances from the factor model.} -} -\value{ - \code{N x N} return covariance matrix based on factor - model parameters. -} -\description{ - Compute asset return covariance matrix from factor model - parameters. -} -\details{ - The return on asset \code{i} (\code{i = 1,...,N}) is - assumed to follow the factor model \cr \code{R(i,t) = - alpha + t(beta)*F(t) + e(i,t), e(i,t) ~ iid (0, - sig(i)^2)} \cr where \code{beta} is a \code{K x 1} vector - of factor exposures. The return variance is then \cr - \code{var(R(i,t) = t(beta)*var(F(t))*beta + sig(i)^2}, - \cr and the \code{N x N} covariance matrix of the return - vector \code{R} is \cr \code{var(R) = B*var(F(t))*t(B) + - D} \cr where B is the \code{N x K} matrix of asset betas - and \code{D} is a diagonal matrix with \code{sig(i)^2} - values along the diagonal. -} -\examples{ -# factorModelCovariance -data(managers.df) -factors = managers.df[,(7:9)] -ret.assets = managers.df[,(1:6)] -fit <-fitMacroeconomicFactorModel(ret.assets,factors,fit.method="OLS", - variable.selection="all subsets", factor.set = 3) -factorModelCovariance(fit$beta.mat,var(factors),fit$residVars.vec) -} -\author{ - Eric Zivot and Yi-An Chen. -} -\references{ - Zivot, E. and J. Wang (2006), \emph{Modeling Financial - Time Series with S-PLUS, Second Edition}, - Springer-Verlag. -} - +\name{factorModelCovariance} +\alias{factorModelCovariance} +\title{Compute Factor Model Covariance Matrix.} +\usage{ + factorModelCovariance(beta.mat, factor.cov, + residVars.vec) +} +\arguments{ + \item{beta}{\code{N x K} matrix of factor betas, where + \code{N} is the number of assets and \code{K} is the + number of factors.} + + \item{factor.cov}{\code{K x K} factor return covariance + matrix.} + + \item{resid.variance}{\code{N x 1} vector of asset + specific residual variances from the factor model.} +} +\value{ + \code{N x N} return covariance matrix based on factor + model parameters. +} +\description{ + Compute asset return covariance matrix from factor model + parameters. +} +\details{ + The return on asset \code{i} (\code{i = 1,...,N}) is + assumed to follow the factor model \cr \code{R(i,t) = + alpha + t(beta)*F(t) + e(i,t), e(i,t) ~ iid (0, + sig(i)^2)} \cr where \code{beta} is a \code{K x 1} vector + of factor exposures. The return variance is then \cr + \code{var(R(i,t) = t(beta)*var(F(t))*beta + sig(i)^2}, + \cr and the \code{N x N} covariance matrix of the return + vector \code{R} is \cr \code{var(R) = B*var(F(t))*t(B) + + D} \cr where B is the \code{N x K} matrix of asset betas + and \code{D} is a diagonal matrix with \code{sig(i)^2} + values along the diagonal. +} +\examples{ +# Time Series model + +data(managers.df) +factors = managers.df[,(7:9)] +fit <- fitTimeseriesFactorModel(assets.names=colnames(managers.df[,(1:6)]), + factors.names=c("EDHEC.LS.EQ","SP500.TR"), + data=managers.df,fit.method="OLS") +factorModelCovariance(fit$beta,var(factors),fit$resid.variance) + +# Statistical Model +data(stat.fm.data) +fit <- fitStatisticalFactorModel(sfm.dat,k=2, + ckeckData.method="data.frame") + +factorModelCovariance(t(sfm.pca.fit$loadings),var(sfm.pca.fit$factors),sfm.pca.fit$resid.variance) + +sfm.apca.fit <- fitStatisticalFactorModel(sfm.apca.dat,k=2 +,ckeckData.method="data.frame") + +factorModelCovariance(t(sfm.apca.fit$loadings), + var(sfm.apca.fit$factors),sfm.apca.fit$resid.variance) +} +\author{ + Eric Zivot and Yi-An Chen. +} +\references{ + Zivot, E. and J. Wang (2006), \emph{Modeling Financial + Time Series with S-PLUS, Second Edition}, + Springer-Verlag. +} + From noreply at r-forge.r-project.org Fri Jun 28 05:43:04 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 05:43:04 +0200 (CEST) Subject: [Returnanalytics-commits] r2458 - pkg/PortfolioAnalytics/R Message-ID: <20130628034304.BAD5D1859A3@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 05:43:03 +0200 (Fri, 28 Jun 2013) New Revision: 2458 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R pkg/PortfolioAnalytics/R/constraints.R Log: Added names to constraint classes. Changed penalty for turnover_objective target. Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-27 23:09:58 UTC (rev 2457) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 03:43:03 UTC (rev 2458) @@ -256,7 +256,13 @@ # I would expect this to be the same result when multiplier=0, but it is not. # max(tmp_measure - objective$target, 0) should equal 0 when tmp_measure is less than objective$target # print(max(tmp_measure - objective$target, 0)) - out = out + penalty * objective$multiplier * max(tmp_measure - objective$target, 0) + # Only penalize if tmp_measure violates target to the upside + # if(tmp_measure > objective$target) { + # print(tmp_measure) + # out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) + # } + # out = out + penalty * objective$multiplier * max(tmp_measure - objective$target, 0) + out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) } # target is null or doesn't exist, just maximize, or minimize violation of constraint out = out + abs(objective$multiplier)*tmp_measure Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-27 23:09:58 UTC (rev 2457) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 03:43:03 UTC (rev 2458) @@ -336,7 +336,7 @@ max[which(tmp_max < max)] <- tmp_max[which(tmp_max < max)] } - Constraint <- constraint_v2(type=type, enabled=enabled, ...) + Constraint <- constraint_v2(type=type, enabled=enabled, constrclass="box_constraint", ...) Constraint$min <- min Constraint$max <- max return(Constraint) @@ -378,7 +378,7 @@ } if (length(group_max) != ngroups) stop(paste("length of group_max must be equal to 1 or the length of groups:", ngroups)) - Constraint <- constraint_v2(type, enabled=enabled, ...) + Constraint <- constraint_v2(type, enabled=enabled, constrclass="group_constraint", ...) Constraint$groups <- groups Constraint$cLO <- group_min Constraint$cUP <- group_max @@ -398,7 +398,7 @@ #' @author Ross Bennett #' @export weight_sum_constraint <- function(type, min_sum=0.99, max_sum=1.01, enabled=FALSE, ...){ - Constraint <- constraint_v2(type, enabled=enabled, ...) + Constraint <- constraint_v2(type, enabled=enabled, constrclass="weight_sum_constraint", ...) Constraint$min_sum <- min_sum Constraint$max_sum <- max_sum return(Constraint) @@ -493,7 +493,7 @@ #' @author Ross Bennett #' @export turnover_constraint <- function(type, max.turnover, enabled=FALSE, ...){ - Constraint <- constraint_v2(type, enabled=enabled, ...) + Constraint <- constraint_v2(type, enabled=enabled, constrclass="turnover_constraint", ...) Constraint$toc <- max.turnover return(Constraint) } @@ -509,7 +509,7 @@ #' @author Ross Bennett #' @export diversification_constraint <- function(type, div.target, enabled=FALSE, ...){ - Constraint <- constraint_v2(type, enabled=enabled, ...) + Constraint <- constraint_v2(type, enabled=enabled, constrclass="diversification_constraint", ...) Constraint$div <- div.target return(Constraint) } @@ -528,7 +528,7 @@ #' @author Ross Bennett #' @export volatility_constraint <- function(type, min.vol, max.vol, enabled=FALSE, ...){ - Constraint <- constraint_v2(type, enabled=enabled, ...) + Constraint <- constraint_v2(type, enabled=enabled, constrclass="volatility_constraint", ...) Constraint$min.vol <- min.vol Constraint$max.vol <- max.vol return(Constraint) From noreply at r-forge.r-project.org Fri Jun 28 05:56:06 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 05:56:06 +0200 (CEST) Subject: [Returnanalytics-commits] r2459 - in pkg/PortfolioAnalytics: R man Message-ID: <20130628035607.36D8F1850FA@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 05:56:06 +0200 (Fri, 28 Jun 2013) New Revision: 2459 Modified: pkg/PortfolioAnalytics/R/constraints.R pkg/PortfolioAnalytics/man/add.constraint.Rd Log: updating documentation for add.constraint function Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 03:43:03 UTC (rev 2458) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 03:56:06 UTC (rev 2459) @@ -177,15 +177,15 @@ #' #' This is the main function for adding and/or updating constraints in an object of type \code{\link{portfolio}}. #' -#' In general, you will define your constraints as one of three types: 'weight_sum', 'box', or 'group'. +#' In general, you will define your constraints as: 'weight_sum', 'box', 'group', 'turnover', 'diversification', or 'volatility'. #' #' @param portfolio an object of class 'portfolio' to add the constraint to, specifying the constraints for the optimization, see \code{\link{portfolio.spec}} -#' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', or 'group' +#' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', 'group', 'turnover', 'diversification', or 'volatility' #' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify box and/or group constraints #' @param indexnum if you are updating a specific constraint, the index number in the $objectives list to update #' @author Ross Bennett -#' @seealso \code{\link{constraint}} +#' @seealso \code{\link{constraint_v2}}, \code{\link{weight_sum_constraint}}, \code{\link{box_constraint}}, \code{\link{group_constraint}}, \code{\link{turnover_constraint}}, \code{\link{diversification_constraint}}, \code{\link{volatility_constraint}} #' @export add.constraint <- function(portfolio, type, enabled=FALSE, ..., indexnum=NULL){ # Check to make sure that the portfolio passed in is a portfolio object Modified: pkg/PortfolioAnalytics/man/add.constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/add.constraint.Rd 2013-06-28 03:43:03 UTC (rev 2458) +++ pkg/PortfolioAnalytics/man/add.constraint.Rd 2013-06-28 03:56:06 UTC (rev 2459) @@ -11,7 +11,8 @@ optimization, see \code{\link{portfolio.spec}}} \item{type}{character type of the constraint to add or - update, currently 'weight_sum', 'box', or 'group'} + update, currently 'weight_sum', 'box', 'group', + 'turnover', 'diversification', or 'volatility'} \item{enabled}{TRUE/FALSE} @@ -27,13 +28,20 @@ constraints in an object of type \code{\link{portfolio}}. } \details{ - In general, you will define your constraints as one of - three types: 'weight_sum', 'box', or 'group'. + In general, you will define your constraints as: + 'weight_sum', 'box', 'group', 'turnover', + 'diversification', or 'volatility'. } \author{ Ross Bennett } \seealso{ - \code{\link{constraint}} + \code{\link{constraint_v2}}, + \code{\link{weight_sum_constraint}}, + \code{\link{box_constraint}}, + \code{\link{group_constraint}}, + \code{\link{turnover_constraint}}, + \code{\link{diversification_constraint}}, + \code{\link{volatility_constraint}} } From noreply at r-forge.r-project.org Fri Jun 28 16:33:44 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 16:33:44 +0200 (CEST) Subject: [Returnanalytics-commits] r2460 - in pkg/PerformanceAnalytics/sandbox/pulkit/week2: code vignette Message-ID: <20130628143344.BAEAB1859D2@r-forge.r-project.org> Author: pulkit Date: 2013-06-28 16:33:44 +0200 (Fri, 28 Jun 2013) New Revision: 2460 Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkPlots.R Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw Log: Added B\enchmarkPlots Added: pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkPlots.R =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkPlots.R (rev 0) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/code/BenchmarkPlots.R 2013-06-28 14:33:44 UTC (rev 2460) @@ -0,0 +1,32 @@ +BenchmarkSRPlots<-function(R=NULL,ylab = NULL,xlab = NULL,lwd = 2,pch = 1,cex = 1,avgSR = NULL,columns = NULL,...){ + + + if(!is.null(R)){ + x = checkData(R) + columns = ncol(x) + avgSR = mean(SharpeRatio(R)) + } + else{ + if(is.null(avgSR) | is.null(S)){ + stop("The average SR and the number of strategies should not be NULL") + } + + } + corr = table.Correlation(edhec,edhec) + corr_avg = 0 + for(i in 1:(columns-1)){ + for(j in (i+1):columns){ + corr_avg = corr_avg + corr[(i-1)*columns+j,] + } + } + corr_avg = corr_avg*2/(columns*(columns-1)) + + rho = seq(0,1,length.out=30) + SR_B = avgSR*sqrt(columns/(1+(columns-1)*rho)) + df1<-data.frame(x=rho,y=SR_B) + df1$model<-"A" + df2<-data.frame(x=corr_avg[1,1],y=BenchmanrkSR(R)) + df2$model<-"B" + dfc<-rbind(df1,df2) + ggplot(dfc,aes(x,y,group=model)) +geom_point()+geom_line()+xlab("Correlation")+ylab("Benchmark Sharpe Ratio")+ggtitle("Benchmark SR vs Correlation") +} Modified: pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw =================================================================== --- pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw 2013-06-28 03:56:06 UTC (rev 2459) +++ pkg/PerformanceAnalytics/sandbox/pulkit/week2/vignette/SharepRatioEfficientFrontier.Rnw 2013-06-28 14:33:44 UTC (rev 2460) @@ -73,7 +73,7 @@ The equation for the candidate's average autocorrelation for a given sharpe Ratio is given by -\deqn{\bar{\rho}{_{s+1}}=\frac{1}{2}\biggl[\frac{\bar{SR}.S+SR_{s+1}^2}{S.SR_B^2}-\frac{S+1}{S}-\bar{\rho}{S-1}\biggr]} +\deqn{\bar{\rho}{_{s+1}}=\frac{1}{2}\biggl[\frac{\bar{({SR}.S+SR_{s+1}})^2}{S.SR_B^2}-\frac{S+1}{S}-\bar{\rho}{S-1}\biggr]} <>= SRIndifference(edhec) From noreply at r-forge.r-project.org Fri Jun 28 19:19:33 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 19:19:33 +0200 (CEST) Subject: [Returnanalytics-commits] r2461 - pkg/PortfolioAnalytics/R Message-ID: <20130628171933.3D16118090B@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 19:19:32 +0200 (Fri, 28 Jun 2013) New Revision: 2461 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: revised constraints to specify target value instead of min and max Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 14:33:44 UTC (rev 2460) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 17:19:32 UTC (rev 2461) @@ -482,19 +482,20 @@ #' constructor for turnover_constraint #' #' This function is called by add.constraint when type="turnover" is specified. see \code{\link{add.constraint}} -#' This function allows the user to specify a maximum turnover constraint +#' This function allows the user to specify a target turnover value #' -#' Note that turnover constraint is currently only supported for global minimum variance problem with solve.QP plugin +#' Note that turnover constraint is currently only supported for global minimum +#' variance problem with ROI quadprog plugin #' #' @param type character type of the constraint -#' @param max.turnover maximum turnover value +#' @param turnover.target target turnover value #' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify box and/or group constraints #' @author Ross Bennett #' @export -turnover_constraint <- function(type, max.turnover, enabled=FALSE, ...){ +turnover_constraint <- function(type, turnover.target, enabled=FALSE, ...){ Constraint <- constraint_v2(type, enabled=enabled, constrclass="turnover_constraint", ...) - Constraint$toc <- max.turnover + Constraint$toc <- turnover,target return(Constraint) } @@ -517,20 +518,19 @@ #' constructor for volatility_constraint #' #' This function is called by add.constraint when type="volatility" is specified, \code{\link{add.constraint}} -#' If portfolio standard deviation is less than min.vol, add penalty to maximize -#' If portfolio standard deviation is greater than max.vol, add penalty to minimize +#' Penalize if portfolio standard deviation deviates from volatility target #' #' @param type character type of the constraint -#' @param min.vol minimum volatility constraint -#' @param max.vol maximum volatilty constraint +#' @param vol.target target volatilty constraint #' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify box and/or group constraints #' @author Ross Bennett #' @export -volatility_constraint <- function(type, min.vol, max.vol, enabled=FALSE, ...){ +volatility_constraint <- function(type, vol.target, enabled=FALSE, ...){ Constraint <- constraint_v2(type, enabled=enabled, constrclass="volatility_constraint", ...) - Constraint$min.vol <- min.vol - Constraint$max.vol <- max.vol + # Constraint$min.vol <- min.vol + # Constraint$max.vol <- max.vol + Constraint$vol.target <- vol.target return(Constraint) } From noreply at r-forge.r-project.org Fri Jun 28 19:29:39 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 19:29:39 +0200 (CEST) Subject: [Returnanalytics-commits] r2462 - pkg/PortfolioAnalytics/R Message-ID: <20130628172939.253D918090B@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 19:29:38 +0200 (Fri, 28 Jun 2013) New Revision: 2462 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R Log: cleaned up comments for turnover objective in constrained_objective Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 17:19:32 UTC (rev 2461) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 17:29:38 UTC (rev 2462) @@ -249,19 +249,6 @@ if(inherits(objective,"turnover_objective")){ if (!is.null(objective$target) & is.numeric(objective$target)){ # we have a target - # out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) - # Does this penalize for turnover below target? - # I want to only penalize turnover above the given target. - # Seems to be giving correct results, but only if multiplier=0.01 - # I would expect this to be the same result when multiplier=0, but it is not. - # max(tmp_measure - objective$target, 0) should equal 0 when tmp_measure is less than objective$target - # print(max(tmp_measure - objective$target, 0)) - # Only penalize if tmp_measure violates target to the upside - # if(tmp_measure > objective$target) { - # print(tmp_measure) - # out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) - # } - # out = out + penalty * objective$multiplier * max(tmp_measure - objective$target, 0) out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) } # target is null or doesn't exist, just maximize, or minimize violation of constraint From noreply at r-forge.r-project.org Fri Jun 28 19:58:10 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 19:58:10 +0200 (CEST) Subject: [Returnanalytics-commits] r2463 - pkg/PortfolioAnalytics/R Message-ID: <20130628175810.8586D1859F2@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 19:58:10 +0200 (Fri, 28 Jun 2013) New Revision: 2463 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R pkg/PortfolioAnalytics/R/objective.R Log: adding a temporary minmax objective for testing to understand how the objective function responds to specifying min and max values instead of a target Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 17:29:38 UTC (rev 2462) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 17:58:10 UTC (rev 2463) @@ -255,6 +255,12 @@ out = out + abs(objective$multiplier)*tmp_measure } # univariate turnover objectives + if(inherits(objective,"minmax_objective")){ + if (!is.null(objective$min) & !is.null(objective$max)){ # we have a min and max + out = out + penalty * objective$multiplier * ((tmp_measure - objective$max) + (objective$min - tmp_measure)) + } + } # temporary minmax objective + if(inherits(objective,"risk_budget_objective")){ # setup Modified: pkg/PortfolioAnalytics/R/objective.R =================================================================== --- pkg/PortfolioAnalytics/R/objective.R 2013-06-28 17:29:38 UTC (rev 2462) +++ pkg/PortfolioAnalytics/R/objective.R 2013-06-28 17:58:10 UTC (rev 2463) @@ -191,6 +191,11 @@ arguments=arguments, ...=...) }, + tmp_minmax = {tmp_objective = minmax_objective(name=name, + enabled=enabled, + arguments=arguments, + ...=...) + }, null = {return(portfolio)} # got nothing, default to simply returning @@ -333,3 +338,39 @@ if(!hasArg(multiplier)) multiplier=1 return(objective(name=name, target=target, arguments=arguments, enabled=enabled, multiplier=multiplier,objclass=c("turnover_objective","objective"), ... )) } # end turnover_objective constructor + +#' constructor for class tmp_minmax_objective +#' +#' I am add this as a temporary objective allowing for a min and max to be specified. Testing +#' to understand how the objective function responds to a range of allowable values. I will +#' likely add this to the turnover, diversification, and volatility constraints +#' allowing the user to specify a range of values. +#' +#' if target is null, we'll try to minimize the metric +#' +#' if target is set, we'll try to meet the metric +#' +#' If max is violated to the upside, penalize the metric +#' If min is violated to the downside, penalize the metric +#' Try to meet the range between min and max +#' +#' @param name name of the objective, should correspond to a function, though we will try to make allowances +#' @param target univariate target for the objective +#' @param min minimum value +#' @param max maximum value +#' @param arguments default arguments to be passed to an objective function when executed +#' @param multiplier multiplier to apply to the objective, usually 1 or -1 +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters +#' @author Ross Bennett +#' @export +minmax_objective <- function(name, target=NULL, arguments=NULL, multiplier=1, enabled=FALSE, ..., min, max ) +{ + if(!hasArg(target)) target = NULL + ##' if target is null, we'll try to minimize the metric + if(!hasArg(multiplier)) multiplier=1 + Objective <- objective(name=name, target=target, arguments=arguments, enabled=enabled, multiplier=multiplier,objclass=c("minmax_objective","objective"), ... ) + Objective$min <- min + Objective$max <- max + return(Objective) +} # end minmax_objective constructor \ No newline at end of file From noreply at r-forge.r-project.org Fri Jun 28 20:02:55 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 20:02:55 +0200 (CEST) Subject: [Returnanalytics-commits] r2464 - pkg/PortfolioAnalytics/R Message-ID: <20130628180255.788401859F2@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 20:02:53 +0200 (Fri, 28 Jun 2013) New Revision: 2464 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: fixed typo Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 17:58:10 UTC (rev 2463) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 18:02:53 UTC (rev 2464) @@ -495,7 +495,7 @@ #' @export turnover_constraint <- function(type, turnover.target, enabled=FALSE, ...){ Constraint <- constraint_v2(type, enabled=enabled, constrclass="turnover_constraint", ...) - Constraint$toc <- turnover,target + Constraint$toc <- turnover.target return(Constraint) } From noreply at r-forge.r-project.org Fri Jun 28 20:35:17 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 20:35:17 +0200 (CEST) Subject: [Returnanalytics-commits] r2465 - pkg/FactorAnalytics/R Message-ID: <20130628183517.D61101845ED@r-forge.r-project.org> Author: chenyian Date: 2013-06-28 20:35:17 +0200 (Fri, 28 Jun 2013) New Revision: 2465 Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R Log: add comment on fitFundamentalFactorModel.R Modified: pkg/FactorAnalytics/R/fitFundamentalFactorModel.R =================================================================== --- pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-28 18:02:53 UTC (rev 2464) +++ pkg/FactorAnalytics/R/fitFundamentalFactorModel.R 2013-06-28 18:35:17 UTC (rev 2465) @@ -112,7 +112,7 @@ fitFundamentalFactorModel <- function(data,exposure.names, datevar, returnsvar, assetvar, wls = TRUE, regression = "classic", - covariance = "classic", full.resid.cov = TRUE, robust.scale = FALSE) { + covariance = "classic", full.resid.cov = FALSE, robust.scale = FALSE) { require(xts) require(robust) @@ -369,30 +369,33 @@ } else { Cov.factors <- covClassic(coredata(f.hat), distance = FALSE,na.action = na.omit) resid.vars <- apply(coredata(resids), 2, var, na.rm = TRUE) - D.hat <- if (full.resid.cov) + D.hat <- if (full.resid.cov) { covClassic(coredata(resids), distance = FALSE, na.action = na.omit) - else - diag(resid.vars) + } else { diag(resid.vars) } } # create betas from origial database B.final <- matrix(0, nrow = numAssets, ncol = numCoefs) colnames <- coefs.names B.final[, match("(Intercept)", colnames, 0)] <- 1 numeric.columns <- match(exposures.numeric, colnames, 0) - B.final[, numeric.columns] <- as.matrix(data[as.numeric(data[[datevar]]) == - timedates[numTimePoints], exposures.numeric]) - if (length(exposures.factor)) +# only take the latest beta to compute FM covariance +# should we let user choose which beta to use ? + B.final[, numeric.columns] <- as.matrix(data[ (as.numeric(data[[datevar]]) == + timedates[numTimePoints]), exposures.numeric]) + if (length(exposures.factor)) { B.final[, grep(exposures.factor, x = colnames)][cbind(seq(numAssets), as.numeric(data[data[[datevar]] == timedates[numTimePoints], exposures.factor]))] <- 1 + } cov.returns <- B.final %*% Cov.factors$cov %*% t(B.final) + - if (full.resid.cov) - D.hat$cov - else D.hat + if (full.resid.cov) { D.hat$cov + } else { D.hat } mean.cov.returns = tapply(data[[returnsvar]],data[[assetvar]], mean) Cov.returns <- list(cov = cov.returns, mean=mean.cov.returns, eigenvalues = eigen(cov.returns, only.values = TRUE, symmetric = TRUE)$values) - if (full.resid.cov) { + +# report residual covaraince if full.resid.cov is true. +if (full.resid.cov) { Cov.resids <- D.hat } else { From noreply at r-forge.r-project.org Fri Jun 28 20:51:05 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 20:51:05 +0200 (CEST) Subject: [Returnanalytics-commits] r2466 - pkg/PortfolioAnalytics/R Message-ID: <20130628185105.CDD8E184F96@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 20:51:05 +0200 (Fri, 28 Jun 2013) New Revision: 2466 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R pkg/PortfolioAnalytics/R/objective.R Log: revised minmax_objective in constrained_objective to only penalize if max is violated to the upside or min is violated to the downside Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 18:35:17 UTC (rev 2465) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 18:51:05 UTC (rev 2466) @@ -257,7 +257,12 @@ if(inherits(objective,"minmax_objective")){ if (!is.null(objective$min) & !is.null(objective$max)){ # we have a min and max - out = out + penalty * objective$multiplier * ((tmp_measure - objective$max) + (objective$min - tmp_measure)) + if(tmp_measure > objective$max){ + out = out + penalty * objective$multiplier * (tmp_measure - objective$max) + } + if(tmp_measure < objective$min){ + out = out + penalty * objective$multiplier * (objective$min - tmp_measure) + } } } # temporary minmax objective Modified: pkg/PortfolioAnalytics/R/objective.R =================================================================== --- pkg/PortfolioAnalytics/R/objective.R 2013-06-28 18:35:17 UTC (rev 2465) +++ pkg/PortfolioAnalytics/R/objective.R 2013-06-28 18:51:05 UTC (rev 2466) @@ -113,6 +113,11 @@ arguments=arguments, ...=...) }, + tmp_minmax = {tmp_objective = minmax_objective(name=name, + enabled=enabled, + arguments=arguments, + ...=...) + }, null = {return(constraints)} # got nothing, default to simply returning From noreply at r-forge.r-project.org Fri Jun 28 21:19:53 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 21:19:53 +0200 (CEST) Subject: [Returnanalytics-commits] r2467 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130628191953.C571B1859EE@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 21:19:53 +0200 (Fri, 28 Jun 2013) New Revision: 2467 Added: pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R Log: adding testing script to sandbox folder to test the minmax objective Added: pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R (rev 0) +++ pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R 2013-06-28 19:19:53 UTC (rev 2467) @@ -0,0 +1,72 @@ +# Script to test specifying a min and max instead of a target for an objective + +library(PortfolioAnalytics) + +data(edhec) + +ret <- edhec[, 1:10] + +constraints=constraint(assets = colnames(ret), + min = 0, + max = 1, + min_sum=0.99, + max_sum=1.01, + weight_seq = generatesequence()) + +# add objective to maximize return +constraints <- add.objective(constraints, type="return", name="mean", multiplier=-1, enabled=TRUE) + +# add "tmp_minmax" type objective with "sd" as the name of the function. # This is matched to StdDev +# Penalize if sd is outside of the range of min and max +constraints <- add.objective(constraints, type="tmp_minmax", name="sd", + min=0.005, max=0.02, multiplier=0, enabled=TRUE) + +# Maximize return subject to a range portfolio volatility values + +##### test 1: just calculate portfolio sd ##### +set.seed(123) +opt_out_rp1 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=FALSE) + +# sd = 0.01385472 when when multiplier = 0 +# This calculates the portfolio sd, but does not use it in the objective function +opt_out_rp1$objective_measures$sd + +##### test 2: within range ##### +# Change the multiplier to 1 and set the min and max outside of the calculated +# portfolio sd. +constraints$objectives[[2]]$multiplier=1 +constraints$objectives[[2]]$min=0.011 +constraints$objectives[[2]]$max=0.015 + +set.seed(123) +opt_out_rp2 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=TRUE) + +# portfolio sd should be unchanged +opt_out_rp2$objective_measures$sd + +##### test 3: lower max ##### +# Lower the max below 0.01385472 +constraints$objectives[[2]]$min=0.005 +constraints$objectives[[2]]$max=0.011 + +set.seed(123) +opt_out_rp3 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=TRUE) + +# Portfolio sd should now be less than 0.011 +opt_out_rp3$objective_measures$sd + +##### test 4: raise min ##### +# Raise the min above 0.01385472 +constraints$objectives[[2]]$min=0.015 +constraints$objectives[[2]]$max=0.02 + +set.seed(123) +opt_out_rp4 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=TRUE) + +# Portfolio sd should now be greater than 0.15 +opt_out_rp4$objective_measures$sd + +# constraints <- add.objective(constraints, type="risk", name="sd", target=0.012, multiplier=1, enabled=TRUE) +# constraints$objectives[[2]]$enabled=FALSE +# opt_out_rp <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=FALSE) +# opt_out_rp$objective_measures From noreply at r-forge.r-project.org Fri Jun 28 23:01:43 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 23:01:43 +0200 (CEST) Subject: [Returnanalytics-commits] r2468 - pkg/PortfolioAnalytics/R Message-ID: <20130628210143.C11DF184BCF@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 23:01:42 +0200 (Fri, 28 Jun 2013) New Revision: 2468 Modified: pkg/PortfolioAnalytics/R/constrained_objective.R Log: remove abs() function for objective in return_objective so that return can be maximized with a negative multiplier Modified: pkg/PortfolioAnalytics/R/constrained_objective.R =================================================================== --- pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 19:19:53 UTC (rev 2467) +++ pkg/PortfolioAnalytics/R/constrained_objective.R 2013-06-28 21:01:42 UTC (rev 2468) @@ -233,7 +233,7 @@ out = out + penalty*abs(objective$multiplier)*abs(tmp_measure-objective$target) } # target is null or doesn't exist, just maximize, or minimize violation of constraint - out = out + abs(objective$multiplier)*tmp_measure + out = out + objective$multiplier*tmp_measure } # end handling for return objectives if(inherits(objective,"portfolio_risk_objective")){ From noreply at r-forge.r-project.org Fri Jun 28 23:31:09 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Fri, 28 Jun 2013 23:31:09 +0200 (CEST) Subject: [Returnanalytics-commits] r2469 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130628213109.297A118511C@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-28 23:31:08 +0200 (Fri, 28 Jun 2013) New Revision: 2469 Modified: pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R Log: updated minmax testing script to account for changes in maximizing return Modified: pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R =================================================================== --- pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R 2013-06-28 21:01:42 UTC (rev 2468) +++ pkg/PortfolioAnalytics/sandbox/testing_minmax_sd.R 2013-06-28 21:31:08 UTC (rev 2469) @@ -19,7 +19,7 @@ # add "tmp_minmax" type objective with "sd" as the name of the function. # This is matched to StdDev # Penalize if sd is outside of the range of min and max constraints <- add.objective(constraints, type="tmp_minmax", name="sd", - min=0.005, max=0.02, multiplier=0, enabled=TRUE) + min=0.02, max=0.04, multiplier=0, enabled=TRUE) # Maximize return subject to a range portfolio volatility values @@ -27,7 +27,7 @@ set.seed(123) opt_out_rp1 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=FALSE) -# sd = 0.01385472 when when multiplier = 0 +# sd = 0.03541473 when when multiplier = 0 # This calculates the portfolio sd, but does not use it in the objective function opt_out_rp1$objective_measures$sd @@ -35,8 +35,8 @@ # Change the multiplier to 1 and set the min and max outside of the calculated # portfolio sd. constraints$objectives[[2]]$multiplier=1 -constraints$objectives[[2]]$min=0.011 -constraints$objectives[[2]]$max=0.015 +constraints$objectives[[2]]$min=0.02 +constraints$objectives[[2]]$max=0.04 set.seed(123) opt_out_rp2 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=TRUE) @@ -45,25 +45,25 @@ opt_out_rp2$objective_measures$sd ##### test 3: lower max ##### -# Lower the max below 0.01385472 -constraints$objectives[[2]]$min=0.005 -constraints$objectives[[2]]$max=0.011 +# Lower the max below 0.03541473 +constraints$objectives[[2]]$min=0.02 +constraints$objectives[[2]]$max=0.031 set.seed(123) opt_out_rp3 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=TRUE) -# Portfolio sd should now be less than 0.011 +# Portfolio sd should now be less opt_out_rp3$objective_measures$sd ##### test 4: raise min ##### -# Raise the min above 0.01385472 -constraints$objectives[[2]]$min=0.015 -constraints$objectives[[2]]$max=0.02 +# Raise the min above 0.03541473 +constraints$objectives[[2]]$min=0.037 +constraints$objectives[[2]]$max=0.04 set.seed(123) opt_out_rp4 <- optimize.portfolio(R=ret, constraints, optimize_method="random", search_size=2000, trace=TRUE) -# Portfolio sd should now be greater than 0.15 +# Portfolio sd should now be greater opt_out_rp4$objective_measures$sd # constraints <- add.objective(constraints, type="risk", name="sd", target=0.012, multiplier=1, enabled=TRUE) From noreply at r-forge.r-project.org Sat Jun 29 01:54:10 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 29 Jun 2013 01:54:10 +0200 (CEST) Subject: [Returnanalytics-commits] r2470 - in pkg/PortfolioAnalytics: . R man Message-ID: <20130628235410.43B30185381@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-29 01:54:09 +0200 (Sat, 29 Jun 2013) New Revision: 2470 Modified: pkg/PortfolioAnalytics/NAMESPACE pkg/PortfolioAnalytics/R/constraints.R pkg/PortfolioAnalytics/man/add.constraint.Rd pkg/PortfolioAnalytics/man/turnover_constraint.Rd pkg/PortfolioAnalytics/man/volatility_constraint.Rd Log: adding position_limit constraint type and updating documentation for constraints Modified: pkg/PortfolioAnalytics/NAMESPACE =================================================================== --- pkg/PortfolioAnalytics/NAMESPACE 2013-06-28 21:31:08 UTC (rev 2469) +++ pkg/PortfolioAnalytics/NAMESPACE 2013-06-28 23:54:09 UTC (rev 2470) @@ -29,6 +29,7 @@ export(is.constraint) export(is.objective) export(is.portfolio) +export(minmax_objective) export(objective) export(optimize.portfolio.parallel) export(optimize.portfolio.rebalancing) @@ -38,6 +39,7 @@ export(plot.optimize.portfolio) export(portfolio_risk_objective) export(portfolio.spec) +export(position_limit_constraint) export(random_portfolios) export(random_walk_portfolios) export(randomize_portfolio) Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 21:31:08 UTC (rev 2469) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-28 23:54:09 UTC (rev 2470) @@ -173,11 +173,11 @@ ) } -#' General interface for adding and/or updating optimization constraints, currently supports weight, box and group constraints. +#' General interface for adding and/or updating optimization constraints. #' #' This is the main function for adding and/or updating constraints in an object of type \code{\link{portfolio}}. #' -#' In general, you will define your constraints as: 'weight_sum', 'box', 'group', 'turnover', 'diversification', or 'volatility'. +#' In general, you will define your constraints as: 'weight_sum', 'box', 'group', 'turnover', 'diversification', 'volatility', or 'position_limit'. #' #' @param portfolio an object of class 'portfolio' to add the constraint to, specifying the constraints for the optimization, see \code{\link{portfolio.spec}} #' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', 'group', 'turnover', 'diversification', or 'volatility' @@ -185,7 +185,7 @@ #' @param \dots any other passthru parameters to specify box and/or group constraints #' @param indexnum if you are updating a specific constraint, the index number in the $objectives list to update #' @author Ross Bennett -#' @seealso \code{\link{constraint_v2}}, \code{\link{weight_sum_constraint}}, \code{\link{box_constraint}}, \code{\link{group_constraint}}, \code{\link{turnover_constraint}}, \code{\link{diversification_constraint}}, \code{\link{volatility_constraint}} +#' @seealso \code{\link{constraint_v2}}, \code{\link{weight_sum_constraint}}, \code{\link{box_constraint}}, \code{\link{group_constraint}}, \code{\link{turnover_constraint}}, \code{\link{diversification_constraint}}, \code{\link{volatility_constraint}}, \code{\link{position_limit_constraint}} #' @export add.constraint <- function(portfolio, type, enabled=FALSE, ..., indexnum=NULL){ # Check to make sure that the portfolio passed in is a portfolio object @@ -231,6 +231,11 @@ enabled=enabled, ...=...) }, + # Position limit constraint + position_limit = {tmp_constraint <- position_limit_constraint(type=type, + enabled=enabled, + ...=...) + }, # Do nothing and return the portfolio object if type is NULL null = {return(portfolio)} ) @@ -534,6 +539,23 @@ return(Constraint) } +#' constructor for position_limit_constraint +#' +#' This function is called by add.constraint when type="position_limit" is specified, \code{\link{add.constraint}} +#' Allows the user to specify the maximum number of positions (i.e. number of assets with non-zero weights) +#' +#' @param type character type of the constraint +#' @param max.pos maximum number of positions +#' @param enabled TRUE/FALSE +#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @author Ross Bennett +#' @export +position_limit_constraint <- function(type, max.pos, enabled=FALSE, ...){ + Constraint <- constraint_v2(type, enabled=enabled, constrclass="position_limit_constraint", ...) + Constraint$max.pos <- max.pos + return(Constraint) +} + #' function for updating constrints, not well tested, may be broken #' #' can we use the generic update.default function? Modified: pkg/PortfolioAnalytics/man/add.constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/add.constraint.Rd 2013-06-28 21:31:08 UTC (rev 2469) +++ pkg/PortfolioAnalytics/man/add.constraint.Rd 2013-06-28 23:54:09 UTC (rev 2470) @@ -1,6 +1,6 @@ \name{add.constraint} \alias{add.constraint} -\title{General interface for adding and/or updating optimization constraints, currently supports weight, box and group constraints.} +\title{General interface for adding and/or updating optimization constraints.} \usage{ add.constraint(portfolio, type, enabled = FALSE, ..., indexnum = NULL) @@ -30,7 +30,7 @@ \details{ In general, you will define your constraints as: 'weight_sum', 'box', 'group', 'turnover', - 'diversification', or 'volatility'. + 'diversification', 'volatility', or 'position_limit'. } \author{ Ross Bennett @@ -42,6 +42,7 @@ \code{\link{group_constraint}}, \code{\link{turnover_constraint}}, \code{\link{diversification_constraint}}, - \code{\link{volatility_constraint}} + \code{\link{volatility_constraint}}, + \code{\link{position_limit_constraint}} } Modified: pkg/PortfolioAnalytics/man/turnover_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/turnover_constraint.Rd 2013-06-28 21:31:08 UTC (rev 2469) +++ pkg/PortfolioAnalytics/man/turnover_constraint.Rd 2013-06-28 23:54:09 UTC (rev 2470) @@ -2,13 +2,13 @@ \alias{turnover_constraint} \title{constructor for turnover_constraint} \usage{ - turnover_constraint(type, max.turnover, enabled = FALSE, - ...) + turnover_constraint(type, turnover.target, + enabled = FALSE, ...) } \arguments{ \item{type}{character type of the constraint} - \item{max.turnover}{maximum turnover value} + \item{turnover.target}{target turnover value} \item{enabled}{TRUE/FALSE} @@ -19,11 +19,12 @@ This function is called by add.constraint when type="turnover" is specified. see \code{\link{add.constraint}} This function allows the - user to specify a maximum turnover constraint + user to specify a target turnover value } \details{ Note that turnover constraint is currently only supported - for global minimum variance problem with solve.QP plugin + for global minimum variance problem with ROI quadprog + plugin } \author{ Ross Bennett Modified: pkg/PortfolioAnalytics/man/volatility_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/volatility_constraint.Rd 2013-06-28 21:31:08 UTC (rev 2469) +++ pkg/PortfolioAnalytics/man/volatility_constraint.Rd 2013-06-28 23:54:09 UTC (rev 2470) @@ -2,16 +2,14 @@ \alias{volatility_constraint} \title{constructor for volatility_constraint} \usage{ - volatility_constraint(type, min.vol, max.vol, - enabled = FALSE, ...) + volatility_constraint(type, vol.target, enabled = FALSE, + ...) } \arguments{ \item{type}{character type of the constraint} - \item{min.vol}{minimum volatility constraint} + \item{vol.target}{target volatilty constraint} - \item{max.vol}{maximum volatilty constraint} - \item{enabled}{TRUE/FALSE} \item{\dots}{any other passthru parameters to specify box @@ -20,10 +18,8 @@ \description{ This function is called by add.constraint when type="volatility" is specified, - \code{\link{add.constraint}} If portfolio standard - deviation is less than min.vol, add penalty to maximize - If portfolio standard deviation is greater than max.vol, - add penalty to minimize + \code{\link{add.constraint}} Penalize if portfolio + standard deviation deviates from volatility target } \author{ Ross Bennett From noreply at r-forge.r-project.org Sat Jun 29 02:20:39 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sat, 29 Jun 2013 02:20:39 +0200 (CEST) Subject: [Returnanalytics-commits] r2471 - pkg/PortfolioAnalytics/sandbox Message-ID: <20130629002039.78EB71852CB@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-29 02:20:39 +0200 (Sat, 29 Jun 2013) New Revision: 2471 Added: pkg/PortfolioAnalytics/sandbox/constraints_vignette.pdf Modified: pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw Log: adding updated constraints_vignette Modified: pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw =================================================================== --- pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw 2013-06-28 23:54:09 UTC (rev 2470) +++ pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw 2013-06-29 00:20:39 UTC (rev 2471) @@ -24,17 +24,15 @@ \item[weight\_sum] The weight\_sum constraint is used to constrain the sum of weights. Common use cases of this are to apply a full investment, dollar neutral, or leverage constraint. \item[box] Box constraints are used to constrain the minimum and maximum weights of assets. Standard box constraints with a single upper bound and single lower bound as well as per asset inequality constraints on weights can be specified. A special case of box constraints is a long only constraint where the minimum weight is 0 and maximum weight is 1. \item[group] Group constraints are used to specify the minimum and maximum weights of groups of assets. A common use case to group assets by market cap or style. Note that group constraints is only implemented for the ROI solvers. Implementing the group constraints for other solvers should be possible in \code{constrained\_objective} using the \code{constrained\_group\_tmp} function. - \item[turnover] Turnover can be specified as a constraint, but is not currently implemented. Turnover constraint may not be able to be implemented in the ROI glpk solver. It is implemented for the ROI quadprog solver in sandbox/testing\_turnover.gmv.R. Currently, turnover can be implemented as an objective function and the function has been added to the file \code{R/objectiveFUN.R}. - \item[diversification] Diversification can be specified as a constraint, but is not currently implemented in solvers. This can be done in the mapping function in the next part or implemented inside \code{constrained\_objective}. Currently the user can only specify a diversification target value. The function will try to maximize diversification, penalizing a value below the target. - \item[volatility] Volatility can be specified as a constraint, but it is not currently implemented. This can be done in the mapping function in the next part or implemented inside \code{constrained\_objective}. See \code{constrained\_objective} for how volatility is handled as an objective. Currently the user can specify a minimum volatility and a maximum volatility. We'll penalize if the minimum or maximum is violated. + \item[turnover] Turnover can be specified as a constraint, but is not currently implemented in any solvers. Turnover constraint may not be able to be implemented in the ROI glpk solver. It is implemented for the ROI quadprog solver in sandbox/testing\_turnover.gmv.R. Currently, turnover can be implemented as an objective function and the function has been added to the file \code{R/objectiveFUN.R}. The user can specify a turnover target \code{turnover.target}. Any deviation from the target will be penalized. + \item[diversification] Diversification can be specified as a constraint, but is not currently implemented in any solvers. This can be done in the mapping function in the next part or implemented inside \code{constrained\_objective}. The user can specify a diversification target value \code{div.target}. Any deviation from the target will be penalized. + \item[volatility] Volatility can be specified as a constraint, but it is not currently implemented for any solvers. This can be done in the mapping function in the next part or implemented inside \code{constrained\_objective}. See \code{constrained\_objective} for how volatility is handled as an objective. The user can specify a volatility target value \code{vol.target}. Any deviation from the target will be penalized. + \item[position\_limit] Integer constraint for max position cardinality constraint. This may be able to be implemented in \code{randomize\_portfolio} by generating portfolios with the number of non-zero weights equal to \code{max.pos}, then fill in weights of zero so the length of the weights vector is equal to the number of assets, then scramble the weights vector. The number of non-zero weights could also be random so that the number of non-zero weights is not always equal to \code{max.pos}. This could be implemented in the DEoptim solver with the mapping function. This might be do-able in Rglpk for max return and min ETL. Rglpk supports mixed integer types, but solve.QP does not. May be able to use branch-and-bound technique using solve.QP. \end{itemize} Constraint TODO \begin{itemize} - \item[Integer] Integer constraint for cardinality max position constraint. This may be able to be implemented in \code{randomize\_portfolio} by generating portfolios with the number of non-zero weights equal to \code{max.pos} and then fill in weights of zero so the length of the weights vector is equal to the number of assets. Then scramble the weights vector. The number of non-zero weights could also be random so that the number of non-zero weights is not always equal to \code{max.pos}. This could be implemented in the DEoptim solver with the mapping function. This might be do-able in Rglpk for max return and min ETL. Rglpk supports mixed integer types, but solve.QP does not. May be able to use brance-and-bound technique using solve.QP. - \item[Quadratic] Need more help on this. Note that the ROI solvers quadprog and glpk do not support quadratic constraints, they only support linear constraints. The ROI pluging for cplex does support quadratic constraints, but this is a commercial product. What are some use case examples? - \item[Diversification] Case of quadratic constraint. Could be implemented inside \code{constrained\_objective}. - \item[Volatility] See email from Peter Carl. Should be able to specify this as a constraint and then implement inside \code{constrained\_objective} + \item[Quadratic] Need more help on this. Note that the ROI solvers quadprog and glpk do not support quadratic constraints, they only support linear constraints. The ROI pluging for cplex does support quadratic constraints, but this is a commercial product. What are some use case examples other than diversification and volatility? \end{itemize} <<>>= @@ -78,10 +76,10 @@ pspec$constraints[[2]] @ -Add group constraints -The assets are grouped in 2 groups of 2 assets -The asset weights of the first group must be greater than or equal to 0.15 and less than or equal to 0.65. -The asset weights of the second group must be greater than or equal to 0.25 and less than or equal to 0.55. +Add group constraints. +The assets are grouped in 2 groups of 2 assets. +The sum of asset weights of the first group must be greater than or equal to 0.15 and less than or equal to 0.65. +The sum asset weights of the second group must be greater than or equal to 0.25 and less than or equal to 0.55. <<>>= pspec <- add.constraint(portfolio=pspec, type="group", @@ -92,16 +90,16 @@ pspec$constraints[[3]] @ -Add turnover constraint. We'll penalize if \code{max.turnover} value is exceeded. +Add turnover constraint. Any deviation from \code{turnover.target} is penalized. <<>>= pspec <- add.constraint(portfolio=pspec, type="turnover", - max.turnover=0.6, + turnover.target=0.6, enabled=TRUE) pspec$constraints[[4]] @ -Add diversification constraint. We will try to maximize diversification, a diversification value of less than the \code{div.target} will be penalized. +Add diversification constraint. Any deviation from \code{div.target} will be penalized. <<>>= pspec <- add.constraint(portfolio=pspec, type="diversification", @@ -110,14 +108,21 @@ pspec$constraints[[5]] @ -Add volatility constraint. A portfolio volatility less than \code{min.vol} will be penalized and a portfolio volatility greater than \code{max.vol} will be penalized. +Add volatility constraint. Any deviation from \code{vol.target} will be penalized. <<>>= pspec <- add.constraint(portfolio=pspec, type="volatility", - min.vol=0.07, - max.vol=0.12, + vol.target=0.035, enabled=TRUE) pspec$constraints[[6]] @ +Add position\_limit constraint. Constraint on the maximum number of positions or number of assets with non-zero weights. +<<>>= +pspec <- add.constraint(portfolio=pspec, + type="position_limit", + max.pos=3, + enabled=TRUE) +pspec$constraints[[7]] +@ \end{document} \ No newline at end of file Added: pkg/PortfolioAnalytics/sandbox/constraints_vignette.pdf =================================================================== (Binary files differ) Property changes on: pkg/PortfolioAnalytics/sandbox/constraints_vignette.pdf ___________________________________________________________________ Added: svn:mime-type + application/octet-stream From noreply at r-forge.r-project.org Sun Jun 30 01:06:17 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 30 Jun 2013 01:06:17 +0200 (CEST) Subject: [Returnanalytics-commits] r2472 - pkg/PortfolioAnalytics/R Message-ID: <20130629230617.315A4184EDC@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-30 01:06:16 +0200 (Sun, 30 Jun 2013) New Revision: 2472 Added: pkg/PortfolioAnalytics/R/constraint_fnMap.R Log: adding constraint mapping function. Still needs a lot of work Added: pkg/PortfolioAnalytics/R/constraint_fnMap.R =================================================================== --- pkg/PortfolioAnalytics/R/constraint_fnMap.R (rev 0) +++ pkg/PortfolioAnalytics/R/constraint_fnMap.R 2013-06-29 23:06:16 UTC (rev 2472) @@ -0,0 +1,102 @@ +#' Constraint mapping function +#' +#' The purpose of the mapping function is to transform a weights vector +#' that does not meet all the constraints into a weights vector that +#' does meet the constraints, if one exists, hopefully with a minimum +#' of transformation. + +#' I think our first step should be to test each constraint type, in +#' some sort of hierarchy, starting with box constraints (almost all +#' solvers support box constraints, of course), since some of the other +#' transformations will violate the box constraints, and we'll need to +#' transform back again. +#' +#' @param weights vector of weights +#' @param portfolio object of class portfolio +#' @author Ross Bennett +#' @export +constraint_fnMap <- function(weights, portfolio) { + + if (!is.portfolio(portfolio)) { + stop("Portfolio passed in is not of class portfolio") + } + + for(constraint in portfolio$constraints) { + # Check for enabled constraints + if(constraint$enabled){ + + ## box constraint + if(inherits(constraint, "box_constraint")){ + # TODO + } # box constraint + + ## weight_sum constraint + if(inherits(constraint, "weight_sum_constraint")){ + min_sum <- constraint$min_sum + max_sum <- constraint$max_sum + print(min_sum) + print(max_sum) + # normalize to max_sum + if(sum(weights) > max_sum) { weights <- (max_sum / sum(weights)) * weights } + # normalize to min_sum + if(sum(weights) < min_sum) { weights <- (min_sum / sum(weights)) * weights } + } # weight_sum constraint + + ## group constraint + if(inherits(constraint, "group_constraint")){ + groups <- constraint$groups + cLO <- constraint$cLO + cUP <- constraint$cUP + print(groups) + print(cLO) + print(cUP) + n.groups <- length(groups) + k <- 1 + l <- 0 + for(i in 1:n.groups){ + j <- groups[i] + tmp.w <- weights[k:(l+j)] + # normalize weights for a given group that sum to less than specified group min + grp.min <- cLO[i] + if(sum(tmp.w) < grp.min) { + weights[k:(l+j)] <- (grp.min / sum(tmp.w)) * tmp.w + } + # normalize weights for a given group that sum to greater than specified group max + grp.max <- cUP[i] + if(sum(tmp.w) > grp.max) { + weights[k:(l+j)] <- (grp.max / sum(tmp.w)) * tmp.w + } + k <- k + j + l <- k - 1 + } + # Normalizing the weights inside the groups changes the sum of the weights. + # Should normalizing the sum of weights take place here or somewhere else? + # Re-normalizing the weights will get us *close* to satisfying the group constraints. + # Maybe then add a penalty in constrained objective for violation of group constraints? + } # group constraint + + # Turnover constraints + # TODO + + # Diversification constraints + # TODO + } + } + return(weights) +} + +# library(PortfolioAnalytics) +# data(edhec) +# ret <- edhec[, 1:4] +# funds <- colnames(ret) +# +# pspec <- portfolio.spec(assets=funds) +# pspec <- add.constraint(portfolio=pspec, type="weight_sum", min_sum=0.99, max_sum=1.01, enabled=TRUE) +# pspec <- add.constraint(portfolio=pspec, type="box", enabled=TRUE) +# pspec <- add.constraint(portfolio=pspec, type="group", groups=c(2,2), group_min=c(0.1, 0.2), group_max=c(0.3, 0.8), enabled=TRUE) +# +# weights <- c(0.15, 0.2, 0.15, 0.5) +# sum(weights) +# +# (w <- constraint_fnMap(weights, pspec)) +# sum(w) From noreply at r-forge.r-project.org Sun Jun 30 18:56:53 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 30 Jun 2013 18:56:53 +0200 (CEST) Subject: [Returnanalytics-commits] r2473 - in pkg/PortfolioAnalytics: . R man sandbox Message-ID: <20130630165653.C75EB183B50@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-30 18:56:53 +0200 (Sun, 30 Jun 2013) New Revision: 2473 Added: pkg/PortfolioAnalytics/man/constraint_fnMap.Rd pkg/PortfolioAnalytics/man/minmax_objective.Rd pkg/PortfolioAnalytics/man/position_limit_constraint.Rd Modified: pkg/PortfolioAnalytics/DESCRIPTION pkg/PortfolioAnalytics/NAMESPACE pkg/PortfolioAnalytics/R/constraints.R pkg/PortfolioAnalytics/man/group_constraint.Rd pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw Log: adding support for group labels and updating documentation Modified: pkg/PortfolioAnalytics/DESCRIPTION =================================================================== --- pkg/PortfolioAnalytics/DESCRIPTION 2013-06-29 23:06:16 UTC (rev 2472) +++ pkg/PortfolioAnalytics/DESCRIPTION 2013-06-30 16:56:53 UTC (rev 2473) @@ -44,3 +44,4 @@ 'objectiveFUN.R' 'portfolio.R' 'constraintsFUN.R' + 'constraint_fnMap.R' Modified: pkg/PortfolioAnalytics/NAMESPACE =================================================================== --- pkg/PortfolioAnalytics/NAMESPACE 2013-06-29 23:06:16 UTC (rev 2472) +++ pkg/PortfolioAnalytics/NAMESPACE 2013-06-30 16:56:53 UTC (rev 2473) @@ -11,6 +11,7 @@ export(charts.RP) export(constrained_group_tmp) export(constrained_objective) +export(constraint_fnMap) export(constraint_ROI) export(constraint_v2) export(constraint) Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-29 23:06:16 UTC (rev 2472) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 16:56:53 UTC (rev 2473) @@ -354,6 +354,7 @@ #' @param type character type of the constraint #' @param assets number of assets, or optionally a named vector of assets specifying seed weights #' @param groups vector specifying the groups of the assets +#' @param group_labels character vector to label the groups (e.g. size, asset class, style, etc.) #' @param group_min numeric or vector specifying minimum weight group constraints #' @param group_max numeric or vector specifying minimum weight group constraints #' @param enabled TRUE/FALSE @@ -361,7 +362,7 @@ #' @author Ross Bennett #' @seealso \code{\link{add.constraint}} #' @export -group_constraint <- function(type, assets, groups, group_min, group_max, enabled=FALSE, ...) { +group_constraint <- function(type, assets, groups, group_labels=NULL, group_min, group_max, enabled=FALSE, ...) { nassets <- length(assets) ngroups <- length(groups) @@ -383,8 +384,16 @@ } if (length(group_max) != ngroups) stop(paste("length of group_max must be equal to 1 or the length of groups:", ngroups)) + # Construct the group_label vector if it is not passed in + if(is.null(group_labels)){ + group_labels <- paste(rep("group", ngroups), 1:ngroups, sep="") + } + + if(length(group_labels) != length(groups)) stop("length of group_labels must be equal to the length of groups") + Constraint <- constraint_v2(type, enabled=enabled, constrclass="group_constraint", ...) Constraint$groups <- groups + Constraint$group_labels <- group_labels Constraint$cLO <- group_min Constraint$cUP <- group_max return(Constraint) Added: pkg/PortfolioAnalytics/man/constraint_fnMap.Rd =================================================================== --- pkg/PortfolioAnalytics/man/constraint_fnMap.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/constraint_fnMap.Rd 2013-06-30 16:56:53 UTC (rev 2473) @@ -0,0 +1,27 @@ +\name{constraint_fnMap} +\alias{constraint_fnMap} +\title{Constraint mapping function} +\usage{ + constraint_fnMap(weights, portfolio) +} +\arguments{ + \item{weights}{vector of weights} + + \item{portfolio}{object of class portfolio} +} +\description{ + The purpose of the mapping function is to transform a + weights vector that does not meet all the constraints + into a weights vector that does meet the constraints, if + one exists, hopefully with a minimum of transformation. I + think our first step should be to test each constraint + type, in some sort of hierarchy, starting with box + constraints (almost all solvers support box constraints, + of course), since some of the other transformations will + violate the box constraints, and we'll need to transform + back again. +} +\author{ + Ross Bennett +} + Modified: pkg/PortfolioAnalytics/man/group_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/group_constraint.Rd 2013-06-29 23:06:16 UTC (rev 2472) +++ pkg/PortfolioAnalytics/man/group_constraint.Rd 2013-06-30 16:56:53 UTC (rev 2473) @@ -2,8 +2,9 @@ \alias{group_constraint} \title{constructor for group_constraint} \usage{ - group_constraint(type, assets, groups, group_min, - group_max, enabled = FALSE, ...) + group_constraint(type, assets, groups, + group_labels = NULL, group_min, group_max, + enabled = FALSE, ...) } \arguments{ \item{type}{character type of the constraint} @@ -13,6 +14,9 @@ \item{groups}{vector specifying the groups of the assets} + \item{group_labels}{character vector to label the groups + (i.e. size, asset class, style, etc.)} + \item{group_min}{numeric or vector specifying minimum weight group constraints} Added: pkg/PortfolioAnalytics/man/minmax_objective.Rd =================================================================== --- pkg/PortfolioAnalytics/man/minmax_objective.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/minmax_objective.Rd 2013-06-30 16:56:53 UTC (rev 2473) @@ -0,0 +1,48 @@ +\name{minmax_objective} +\alias{minmax_objective} +\title{constructor for class tmp_minmax_objective} +\usage{ + minmax_objective(name, target = NULL, arguments = NULL, + multiplier = 1, enabled = FALSE, ..., min, max) +} +\arguments{ + \item{name}{name of the objective, should correspond to a + function, though we will try to make allowances} + + \item{target}{univariate target for the objective} + + \item{min}{minimum value} + + \item{max}{maximum value} + + \item{arguments}{default arguments to be passed to an + objective function when executed} + + \item{multiplier}{multiplier to apply to the objective, + usually 1 or -1} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters} +} +\description{ + I am add this as a temporary objective allowing for a min + and max to be specified. Testing to understand how the + objective function responds to a range of allowable + values. I will likely add this to the turnover, + diversification, and volatility constraints allowing the + user to specify a range of values. +} +\details{ + if target is null, we'll try to minimize the metric + + if target is set, we'll try to meet the metric + + If max is violated to the upside, penalize the metric If + min is violated to the downside, penalize the metric Try + to meet the range between min and max +} +\author{ + Ross Bennett +} + Added: pkg/PortfolioAnalytics/man/position_limit_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/position_limit_constraint.Rd (rev 0) +++ pkg/PortfolioAnalytics/man/position_limit_constraint.Rd 2013-06-30 16:56:53 UTC (rev 2473) @@ -0,0 +1,28 @@ +\name{position_limit_constraint} +\alias{position_limit_constraint} +\title{constructor for position_limit_constraint} +\usage{ + position_limit_constraint(type, max.pos, enabled = FALSE, + ...) +} +\arguments{ + \item{type}{character type of the constraint} + + \item{max.pos}{maximum number of positions} + + \item{enabled}{TRUE/FALSE} + + \item{\dots}{any other passthru parameters to specify box + and/or group constraints} +} +\description{ + This function is called by add.constraint when + type="position_limit" is specified, + \code{\link{add.constraint}} Allows the user to specify + the maximum number of positions (i.e. number of assets + with non-zero weights) +} +\author{ + Ross Bennett +} + Modified: pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw =================================================================== --- pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw 2013-06-29 23:06:16 UTC (rev 2472) +++ pkg/PortfolioAnalytics/sandbox/constraints_vignette.Rnw 2013-06-30 16:56:53 UTC (rev 2473) @@ -80,10 +80,12 @@ The assets are grouped in 2 groups of 2 assets. The sum of asset weights of the first group must be greater than or equal to 0.15 and less than or equal to 0.65. The sum asset weights of the second group must be greater than or equal to 0.25 and less than or equal to 0.55. +Labels for the groups can be specified (e.g. size, asset class, style, etc.). By default, the group labels will be group1, group2, ..., groupN for N groups. <<>>= pspec <- add.constraint(portfolio=pspec, type="group", groups=c(2, 2), + group_labels=c("Style A", "Style B"), group_min=c(0.15, 0.25), group_max=c(0.65, 0.55), enabled=TRUE) From noreply at r-forge.r-project.org Sun Jun 30 19:28:24 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 30 Jun 2013 19:28:24 +0200 (CEST) Subject: [Returnanalytics-commits] r2474 - pkg/PortfolioAnalytics/R Message-ID: <20130630172824.9054C185291@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-30 19:28:24 +0200 (Sun, 30 Jun 2013) New Revision: 2474 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: the weight_sum_constraint can now be specified with type=leverage Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 16:56:53 UTC (rev 2473) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 17:28:24 UTC (rev 2474) @@ -212,9 +212,9 @@ ...=...) }, # Sum of weights constraints - weight=, weight_sum = {tmp_constraint <- weight_sum_constraint(type=type, - enabled=enabled, - ...=...) + weight=, leverage=, weight_sum = {tmp_constraint <- weight_sum_constraint(type=type, + enabled=enabled, + ...=...) }, # Turnover constraint turnover = {tmp_constraint <- turnover_constraint(type=type, From noreply at r-forge.r-project.org Sun Jun 30 19:42:45 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 30 Jun 2013 19:42:45 +0200 (CEST) Subject: [Returnanalytics-commits] r2475 - pkg/PortfolioAnalytics/R Message-ID: <20130630174245.C4ED8184A10@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-30 19:42:45 +0200 (Sun, 30 Jun 2013) New Revision: 2475 Modified: pkg/PortfolioAnalytics/R/constraints.R Log: added support for special case of weight_sum constraint type for full_investment, dollar_neutral, and active Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 17:28:24 UTC (rev 2474) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 17:42:45 UTC (rev 2475) @@ -216,6 +216,20 @@ enabled=enabled, ...=...) }, + # Special case of weight_sum constraint for full investment + full_investment = {tmp_constraint <- weight_sum_constraint(type=type, + min_sum=1, + max_sum=1, + enabled=enabled, + ...=...) + }, + # Special case of weight_sum constraint for dollar neutral or active + dollar_neutral=, active= {tmp_constraint <- weight_sum_constraint(type=type, + min_sum=0, + max_sum=0, + enabled=enabled, + ...=...) + }, # Turnover constraint turnover = {tmp_constraint <- turnover_constraint(type=type, enabled=enabled, From noreply at r-forge.r-project.org Sun Jun 30 20:04:32 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 30 Jun 2013 20:04:32 +0200 (CEST) Subject: [Returnanalytics-commits] r2476 - in pkg/PortfolioAnalytics: R man Message-ID: <20130630180432.47BE21859D6@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-30 20:04:31 +0200 (Sun, 30 Jun 2013) New Revision: 2476 Modified: pkg/PortfolioAnalytics/R/constraints.R pkg/PortfolioAnalytics/man/add.constraint.Rd pkg/PortfolioAnalytics/man/box_constraint.Rd pkg/PortfolioAnalytics/man/group_constraint.Rd pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd Log: updating documentation for constraints Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 17:42:45 UTC (rev 2475) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 18:04:31 UTC (rev 2476) @@ -177,12 +177,14 @@ #' #' This is the main function for adding and/or updating constraints in an object of type \code{\link{portfolio}}. #' -#' In general, you will define your constraints as: 'weight_sum', 'box', 'group', 'turnover', 'diversification', 'volatility', or 'position_limit'. +#' In general, you will define your constraints as: 'weight_sum', 'box', 'group', 'turnover', 'diversification', or 'position_limit'. #' +#' Special cases for the weight_sum constraint are "full_investment" and "dollar_nuetral" or "active" with appropriate values set for min_sum and max_sum. see \code{\link{weight_sum_constraint}} +#' #' @param portfolio an object of class 'portfolio' to add the constraint to, specifying the constraints for the optimization, see \code{\link{portfolio.spec}} -#' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', 'group', 'turnover', 'diversification', or 'volatility' +#' @param type character type of the constraint to add or update, currently 'weight_sum', 'box', 'group', 'turnover', 'diversification', or 'position_limit' #' @param enabled TRUE/FALSE -#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @param \dots any other passthru parameters to specify constraints #' @param indexnum if you are updating a specific constraint, the index number in the $objectives list to update #' @author Ross Bennett #' @seealso \code{\link{constraint_v2}}, \code{\link{weight_sum_constraint}}, \code{\link{box_constraint}}, \code{\link{group_constraint}}, \code{\link{turnover_constraint}}, \code{\link{diversification_constraint}}, \code{\link{volatility_constraint}}, \code{\link{position_limit_constraint}} @@ -272,7 +274,7 @@ #' @param min_mult numeric or named vector specifying minimum multiplier box constraint from seed weight in \code{assets} #' @param max_mult numeric or named vector specifying maximum multiplier box constraint from seed weight in \code{assets} #' @param enabled TRUE/FALSE -#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @param \dots any other passthru parameters to specify box constraints #' @author Ross Bennett #' @seealso \code{\link{add.constraint}} #' @export @@ -372,7 +374,7 @@ #' @param group_min numeric or vector specifying minimum weight group constraints #' @param group_max numeric or vector specifying minimum weight group constraints #' @param enabled TRUE/FALSE -#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @param \dots any other passthru parameters to specify group constraints #' @author Ross Bennett #' @seealso \code{\link{add.constraint}} #' @export @@ -415,14 +417,20 @@ #' constructor for weight_sum_constraint #' -#' This function is called by add.constraint when type="weight_sum" is specified. see \code{\link{add.constraint}} +#' This function is called by add.constraint when "weight_sum", "leverage", "full_investment", "dollar_neutral", or "active" is specified as the type. see \code{\link{add.constraint}} #' This function allows the user to specify the minimum and maximum that the weights sum to #' +#' Special cases for the weight_sum constraint are "full_investment" and "dollar_nuetral" or "active" +#' +#' If type="full_investment", min_sum=1 and max_sum=1 +#' +#' If type="dollar_neutral" or type="active", min_sum=0, and max_sum=0 +#' #' @param type character type of the constraint #' @param min_sum minimum sum of all asset weights, default 0.99 #' @param max_sum maximum sum of all asset weights, default 1.01 #' @param enabled TRUE/FALSE -#' @param \dots any other passthru parameters to specify box and/or group constraints +#' @param \dots any other passthru parameters to specify weight_sum constraints #' @author Ross Bennett #' @export weight_sum_constraint <- function(type, min_sum=0.99, max_sum=1.01, enabled=FALSE, ...){ Modified: pkg/PortfolioAnalytics/man/add.constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/add.constraint.Rd 2013-06-30 17:42:45 UTC (rev 2475) +++ pkg/PortfolioAnalytics/man/add.constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) @@ -12,12 +12,12 @@ \item{type}{character type of the constraint to add or update, currently 'weight_sum', 'box', 'group', - 'turnover', 'diversification', or 'volatility'} + 'turnover', 'diversification', or 'position_limit'} \item{enabled}{TRUE/FALSE} - \item{\dots}{any other passthru parameters to specify box - and/or group constraints} + \item{\dots}{any other passthru parameters to specify + constraints} \item{indexnum}{if you are updating a specific constraint, the index number in the $objectives list to @@ -30,7 +30,12 @@ \details{ In general, you will define your constraints as: 'weight_sum', 'box', 'group', 'turnover', - 'diversification', 'volatility', or 'position_limit'. + 'diversification', or 'position_limit'. + + Special cases for the weight_sum constraint are + "full_investment" and "dollar_nuetral" or "active" with + appropriate values set for min_sum and max_sum. see + \code{\link{weight_sum_constraint}} } \author{ Ross Bennett Modified: pkg/PortfolioAnalytics/man/box_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/box_constraint.Rd 2013-06-30 17:42:45 UTC (rev 2475) +++ pkg/PortfolioAnalytics/man/box_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) @@ -28,7 +28,7 @@ \item{enabled}{TRUE/FALSE} \item{\dots}{any other passthru parameters to specify box - and/or group constraints} + constraints} } \description{ This function is called by add.constraint when type="box" Modified: pkg/PortfolioAnalytics/man/group_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/group_constraint.Rd 2013-06-30 17:42:45 UTC (rev 2475) +++ pkg/PortfolioAnalytics/man/group_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) @@ -15,7 +15,7 @@ \item{groups}{vector specifying the groups of the assets} \item{group_labels}{character vector to label the groups - (i.e. size, asset class, style, etc.)} + (e.g. size, asset class, style, etc.)} \item{group_min}{numeric or vector specifying minimum weight group constraints} @@ -25,8 +25,8 @@ \item{enabled}{TRUE/FALSE} - \item{\dots}{any other passthru parameters to specify box - and/or group constraints} + \item{\dots}{any other passthru parameters to specify + group constraints} } \description{ This function is called by add.constraint when Modified: pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd 2013-06-30 17:42:45 UTC (rev 2475) +++ pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) @@ -16,16 +16,26 @@ \item{enabled}{TRUE/FALSE} - \item{\dots}{any other passthru parameters to specify box - and/or group constraints} + \item{\dots}{any other passthru parameters to specify + weight_sum constraints} } \description{ This function is called by add.constraint when - type="weight_sum" is specified. see - \code{\link{add.constraint}} This function allows the + "weight_sum", "leverage", "full_investment", + "dollar_neutral", or "active" is specified as the type. + see \code{\link{add.constraint}} This function allows the user to specify the minimum and maximum that the weights sum to } +\details{ + Special cases for the weight_sum constraint are + "full_investment" and "dollar_nuetral" or "active" + + If type="full_investment", min_sum=1 and max_sum=1 + + If type="dollar_neutral" or type="active", min_sum=0, and + max_sum=0 +} \author{ Ross Bennett } From noreply at r-forge.r-project.org Sun Jun 30 20:38:16 2013 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 30 Jun 2013 20:38:16 +0200 (CEST) Subject: [Returnanalytics-commits] r2477 - in pkg/PortfolioAnalytics: R man Message-ID: <20130630183816.CC378183B50@r-forge.r-project.org> Author: rossbennett34 Date: 2013-06-30 20:38:16 +0200 (Sun, 30 Jun 2013) New Revision: 2477 Modified: pkg/PortfolioAnalytics/R/constraints.R pkg/PortfolioAnalytics/man/box_constraint.Rd pkg/PortfolioAnalytics/man/diversification_constraint.Rd pkg/PortfolioAnalytics/man/group_constraint.Rd pkg/PortfolioAnalytics/man/position_limit_constraint.Rd pkg/PortfolioAnalytics/man/turnover_constraint.Rd pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd Log: adding examples in documentation files for all supported constraint types Modified: pkg/PortfolioAnalytics/R/constraints.R =================================================================== --- pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 18:04:31 UTC (rev 2476) +++ pkg/PortfolioAnalytics/R/constraints.R 2013-06-30 18:38:16 UTC (rev 2477) @@ -277,6 +277,20 @@ #' @param \dots any other passthru parameters to specify box constraints #' @author Ross Bennett #' @seealso \code{\link{add.constraint}} +#' @examples +#' data(edhec) +#' ret <- edhec[, 1:4] +#' +#' pspec <- portfolio.spec(assets=colnames(ret)) +#' +#' # defaults to min=0 and max=1 +#' pspec <- add.constraint(pspec, type="box") +#' +#' # specify box constraints as a scalar +#' pspec <- add.constraint(pspec, type="box", min=0.05, max=0.45) +#' +#' # specify box constraints per asset +#' pspec <- add.constraint(pspec, type="box", min=c(0.05, 0.10, 0.08, 0.06), max=c(0.45, 0.55, 0.35, 0.65)) #' @export box_constraint <- function(type, assets, min, max, min_mult, max_mult, enabled=FALSE, ...){ # Based on the constraint function for object of class constraint_v1 that @@ -377,6 +391,18 @@ #' @param \dots any other passthru parameters to specify group constraints #' @author Ross Bennett #' @seealso \code{\link{add.constraint}} +#' @examples +#' data(edhec) +#' ret <- edhec[, 1:4] +#' +#' pspec <- portfolio.spec(assets=colnames(ret)) +#' +#' pspec <- add.constraint(portfolio=pspec, +#' type="group", +#' groups=c(2, 2), +#' group_labels=c("Style A", "Style B"), +#' group_min=c(0.15, 0.25), +#' group_max=c(0.65, 0.55)) #' @export group_constraint <- function(type, assets, groups, group_labels=NULL, group_min, group_max, enabled=FALSE, ...) { nassets <- length(assets) @@ -432,6 +458,21 @@ #' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify weight_sum constraints #' @author Ross Bennett +#' @examples +#' data(edhec) +#' ret <- edhec[, 1:4] +#' +#' pspec <- portfolio.spec(assets=colnames(ret)) +#' +#' # min_sum and max_sum can be specified with type="weight_sum" or type="leverage" +#' pspec <- add.constraint(pspec, type="weight_sum", min_sum=1, max_sum=1) +#' +#' # Specify type="full_investment" to set min_sum=1 and max_sum=1 +#' pspec <- add.constraint(pspec, type="full_investment") +#' +#' # Specify type="dollar_neutral" or type="active" to set min_sum=0 and max_sum=0 +#' pspec <- add.constraint(pspec, type="dollar_neutral") +#' pspec <- add.constraint(pspec, type="active") #' @export weight_sum_constraint <- function(type, min_sum=0.99, max_sum=1.01, enabled=FALSE, ...){ Constraint <- constraint_v2(type, enabled=enabled, constrclass="weight_sum_constraint", ...) @@ -528,6 +569,13 @@ #' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify box and/or group constraints #' @author Ross Bennett +#' @examples +#' data(edhec) +#' ret <- edhec[, 1:4] +#' +#' pspec <- portfolio.spec(assets=colnames(ret)) +#' +#' pspec <- add.constraint(portfolio=pspec, type="turnover", turnover.target=0.6) #' @export turnover_constraint <- function(type, turnover.target, enabled=FALSE, ...){ Constraint <- constraint_v2(type, enabled=enabled, constrclass="turnover_constraint", ...) @@ -544,6 +592,13 @@ #' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify box and/or group constraints #' @author Ross Bennett +#' @examples +#' data(edhec) +#' ret <- edhec[, 1:4] +#' +#' pspec <- portfolio.spec(assets=colnames(ret)) +#' +#' pspec <- add.constraint(portfolio=pspec, type="diversification", div.target=0.7) #' @export diversification_constraint <- function(type, div.target, enabled=FALSE, ...){ Constraint <- constraint_v2(type, enabled=enabled, constrclass="diversification_constraint", ...) @@ -580,6 +635,13 @@ #' @param enabled TRUE/FALSE #' @param \dots any other passthru parameters to specify box and/or group constraints #' @author Ross Bennett +#' #' @examples +#' data(edhec) +#' ret <- edhec[, 1:4] +#' +#' pspec <- portfolio.spec(assets=colnames(ret)) +#' +#' pspec <- add.constraint(portfolio=pspec, type="position_limit", max.pos=3) #' @export position_limit_constraint <- function(type, max.pos, enabled=FALSE, ...){ Constraint <- constraint_v2(type, enabled=enabled, constrclass="position_limit_constraint", ...) Modified: pkg/PortfolioAnalytics/man/box_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/box_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) +++ pkg/PortfolioAnalytics/man/box_constraint.Rd 2013-06-30 18:38:16 UTC (rev 2477) @@ -34,6 +34,21 @@ This function is called by add.constraint when type="box" is specified. see \code{\link{add.constraint}} } +\examples{ +data(edhec) +ret <- edhec[, 1:4] + +pspec <- portfolio.spec(assets=colnames(ret)) + +# defaults to min=0 and max=1 +pspec <- add.constraint(pspec, type="box") + +# specify box constraints as a scalar +pspec <- add.constraint(pspec, type="box", min=0.05, max=0.45) + +# specify box constraints per asset +pspec <- add.constraint(pspec, type="box", min=c(0.05, 0.10, 0.08, 0.06), max=c(0.45, 0.55, 0.35, 0.65)) +} \author{ Ross Bennett } Modified: pkg/PortfolioAnalytics/man/diversification_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/diversification_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) +++ pkg/PortfolioAnalytics/man/diversification_constraint.Rd 2013-06-30 18:38:16 UTC (rev 2477) @@ -20,6 +20,14 @@ type="diversification" is specified, \code{\link{add.constraint}} } +\examples{ +data(edhec) +ret <- edhec[, 1:4] + +pspec <- portfolio.spec(assets=colnames(ret)) + +pspec <- add.constraint(portfolio=pspec, type="diversification", div.target=0.7) +} \author{ Ross Bennett } Modified: pkg/PortfolioAnalytics/man/group_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/group_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) +++ pkg/PortfolioAnalytics/man/group_constraint.Rd 2013-06-30 18:38:16 UTC (rev 2477) @@ -33,6 +33,19 @@ type="group" is specified. see \code{\link{add.constraint}} } +\examples{ +data(edhec) +ret <- edhec[, 1:4] + +pspec <- portfolio.spec(assets=colnames(ret)) + +pspec <- add.constraint(portfolio=pspec, + type="group", + groups=c(2, 2), + group_labels=c("Style A", "Style B"), + group_min=c(0.15, 0.25), + group_max=c(0.65, 0.55)) +} \author{ Ross Bennett } Modified: pkg/PortfolioAnalytics/man/position_limit_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/position_limit_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) +++ pkg/PortfolioAnalytics/man/position_limit_constraint.Rd 2013-06-30 18:38:16 UTC (rev 2477) @@ -22,7 +22,15 @@ the maximum number of positions (i.e. number of assets with non-zero weights) } +\examples{ +data(edhec) +ret <- edhec[, 1:4] + +pspec <- portfolio.spec(assets=colnames(ret)) + +pspec <- add.constraint(portfolio=pspec, type="position_limit", max.pos=3) +} \author{ - Ross Bennett + Ross Bennett #' } Modified: pkg/PortfolioAnalytics/man/turnover_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/turnover_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) +++ pkg/PortfolioAnalytics/man/turnover_constraint.Rd 2013-06-30 18:38:16 UTC (rev 2477) @@ -26,6 +26,14 @@ for global minimum variance problem with ROI quadprog plugin } +\examples{ +data(edhec) +ret <- edhec[, 1:4] + +pspec <- portfolio.spec(assets=colnames(ret)) + +pspec <- add.constraint(portfolio=pspec, type="turnover", turnover.target=0.6) +} \author{ Ross Bennett } Modified: pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd =================================================================== --- pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd 2013-06-30 18:04:31 UTC (rev 2476) +++ pkg/PortfolioAnalytics/man/weight_sum_constraint.Rd 2013-06-30 18:38:16 UTC (rev 2477) @@ -36,6 +36,22 @@ If type="dollar_neutral" or type="active", min_sum=0, and max_sum=0 } +\examples{ +data(edhec) +ret <- edhec[, 1:4] + +pspec <- portfolio.spec(assets=colnames(ret)) + +# min_sum and max_sum can be specified with type="weight_sum" or type="leverage" +pspec <- add.constraint(pspec, type="weight_sum", min_sum=1, max_sum=1) + +# Specify type="full_investment" to set min_sum=1 and max_sum=1 +pspec <- add.constraint(pspec, type="full_investment") + +# Specify type="dollar_neutral" or type="active" to set min_sum=0 and max_sum=0 +pspec <- add.constraint(pspec, type="dollar_neutral") +pspec <- add.constraint(pspec, type="active") +} \author{ Ross Bennett }