[Returnanalytics-commits] r1974 - pkg/PerformanceAnalytics/sandbox/Meucci

noreply at r-forge.r-project.org noreply at r-forge.r-project.org
Mon Jun 4 01:32:28 CEST 2012


Author: mkshah
Date: 2012-06-04 01:32:28 +0200 (Mon, 04 Jun 2012)
New Revision: 1974

Modified:
   pkg/PerformanceAnalytics/sandbox/Meucci/MeucciRobustBayesianAllocation.R
Log:
Checked the formula and results compared with Meucci's Matlab Program

Modified: pkg/PerformanceAnalytics/sandbox/Meucci/MeucciRobustBayesianAllocation.R
===================================================================
--- pkg/PerformanceAnalytics/sandbox/Meucci/MeucciRobustBayesianAllocation.R	2012-06-03 21:36:12 UTC (rev 1973)
+++ pkg/PerformanceAnalytics/sandbox/Meucci/MeucciRobustBayesianAllocation.R	2012-06-03 23:32:28 UTC (rev 1974)
@@ -1,5 +1,6 @@
 library( matlab )
 library( quadprog )
+library( ggplot2 )
 library( MASS )
 
 #' Construct the mean-variance efficient frontier using a quadratic solver
@@ -18,9 +19,9 @@
 #'      volatility          the variance of the portfolio along the frontier
 #'      weights             the weights of the portfolio components along the frontier
 #' @author Ram Ahluwalia \email{ram@@wingedfootcapital.com}
-#' To be tested by: Manan K. Shah
+#' Tested and Results Compared with Meucci's Matlab Code : Manan K. Shah
 efficientFrontier = function( discretizations , cov , mu , longonly = FALSE ) 
-  {    
+{    
   # setup quadratic program
   N = nrow( cov )
   firstDegree = zeros( N , 1 )
@@ -34,19 +35,26 @@
   else
     { Aqp = t( rbind( Aeq , A ) ) ; bqp = c( beq , b ) }
   
+  # determine return of minimum-risk portfolio
   minVolWeights = solve.QP( secondDegree , firstDegree , Aqp , bqp , length( beq ) )$solution
   minVolRet = minVolWeights %*% mu
   
+  # determine return of maximum-return portfolio
   maxRet = max( mu )
   
+  # slice efficient frontier in 'discretizations' number of equally thick horizontal sectors in the upper branch only
   step = ( maxRet - minVolRet ) / ( discretizations - 1 )
   targetReturns = seq( minVolRet , maxRet , step )
   
+  # compute the compositions and risk-return coordinates of the optimal allocations relative to each slice
+  
+  # start with min vol portfolio
   weights = minVolWeights
   volatility = sqrt( minVolWeights %*% cov %*% minVolWeights )
   returns = minVolRet
   
   for( i in 2:( discretizations - 1 ) ){
+    #  determine least risky portfolio for given expected return
     Aeq = ones( 1 , N )
     Aeq = rbind( Aeq , t( mu ) )
     
@@ -97,57 +105,57 @@
 #' @export
 robustBayesianPortfolioOptimization = function( mean_post , cov_post , nu_post , riskAversionMu = .1 , riskAversionSigma = .1 , discretizations = 10 , longonly = FALSE , volatility )
 {    
-    # parameter checks    
-    N = length( mean ) # number of assets    
-    if ( ( N < 2 ) == TRUE ) { stop( "Requires a minimum of two assets to perform optimization" ) }
-    if ( discretizations < 1 ) { stop( "Number of discretizations must be an integer greater than 1" ) }  
-    if ( volatility < 0 ) { stop( "Volatility cannot be a negative number" ) }
-    if ( nu_post < 3 ) { stop( "nu_post must be greater than 2 otherwise g_m is undefined " ) }
-    if ( riskAversionMu < 0 ) { stop( "riskAversionMu must be a positive number" ) }
-    if ( riskAversionSigma < 0 ) { stop( "riskAversionSigma must be a positive number" ) }
+  # parameter checks    
+  N = length( mean ) # number of assets    
+  if ( ( N < 2 ) == TRUE ) { stop( "Requires a minimum of two assets to perform optimization" ) }
+  if ( discretizations < 1 ) { stop( "Number of discretizations must be an integer greater than 1" ) }  
+  if ( volatility < 0 ) { stop( "Volatility cannot be a negative number" ) }
+  if ( nu_post < 3 ) { stop( "nu_post must be greater than 2 otherwise g_m is undefined " ) }
+  if ( riskAversionMu < 0 ) { stop( "riskAversionMu must be a positive number" ) }
+  if ( riskAversionSigma < 0 ) { stop( "riskAversionSigma must be a positive number" ) }
     
-    # construct Bayesian efficient frontier
-    bayesianFrontier = efficientFrontier( discretizations , cov_post , mean_post , longonly = TRUE ) # returns a list of returns, volatility, and assets weights along the posterior frontier. Each row represents a point on the frontier
+  # construct Bayesian efficient frontier
+  bayesianFrontier = efficientFrontier( discretizations , cov_post , mean_post , longonly = TRUE ) # returns a list of returns, volatility, and assets weights along the posterior frontier. Each row represents a point on the frontier
   
-    # measure gamma-m and gamma-s to identify which portfolios along the frontier are robust
-        quantileMeanSquared = qchisq( riskAversionMu , N ) # the value of q-u is typically set to the quantile of the chi-squared distribution with N degrees of freedom (formula 6)    
+  # measure gamma-m and gamma-s to identify which portfolios along the frontier are robust
+  quantileMeanSquared = qchisq( riskAversionMu , N ) # the value of q-u is typically set to the quantile of the chi-squared distribution with N degrees of freedom (formula 6)    
         
-        # g_m is defined as a constraint on the optimal robust portfolio such that the variance of the robust portfolio must be less than gamma-m
-        g_m = sqrt( quantileMeanSquared / time_post * nu_post / ( nu_post - 2 ) ) # gamma-m (formula 20)
+  # g_m is defined as a constraint on the optimal robust portfolio such that the variance of the robust portfolio must be less than gamma-m
+  g_m = sqrt( quantileMeanSquared / time_post * nu_post / ( nu_post - 2 ) ) # gamma-m (formula 20)
         
-        quantileCovSquared = qchisq( riskAversionMu , N * ( N + 1 ) / 2 ) # from formula 7. N*(N+1)/2 is the degrees of freedom in a symmetric matrix (number of unique elements)        
-        g_s = volatility / ( nu_post / ( nu_post + N + 1 ) + sqrt( 2 * nu_post * nu_post * quantileCovSquared / ( ( nu_post + N + 1 ) ^ 3 ) ) ) # gamma-sigma (formula 21) corresponding to the i'th portfolio along the sample efficient frontier
+  quantileCovSquared = qchisq( riskAversionSigma , N * ( N + 1 ) / 2 ) # from formula 7. N*(N+1)/2 is the degrees of freedom in a symmetric matrix (number of unique elements)        
+  g_s = volatility / ( nu_post / ( nu_post + N + 1 ) + sqrt( 2 * nu_post * nu_post * quantileCovSquared / ( ( nu_post + N + 1 ) ^ 3 ) ) ) # gamma-sigma (formula 21) corresponding to the i'th portfolio along the sample efficient frontier
     
-    # initialize parameters
-    target = NULL
+  # initialize parameters
+  target = NULL
 
-    # for each of the portfolios along the efficient Bayesian frontier identify the most robust portfolio
-    for( k in 1:( discretizations - 1 ) ) 
-        {                
-        weightsBay = bayesianFrontier[[ "weights" ]][ k , ]                
+  # for each of the portfolios along the efficient Bayesian frontier identify the most robust portfolio
+  for( k in 1:( discretizations - 1 ) ) 
+  {                
+    weightsBay = bayesianFrontier[[ "weights" ]][ k , ]                
         
-        # reject portfolios that do not satisfy the constraints of formula 19 (i.e. Bayesian portfolios that are not robust, for example, the portfolios at the limit -- 100% confidence in prior or 100% confidence in sample)        
-        # identify Robust Bayesian frontier which is a subset of the Bayesian frontier that is further shrunk to toward the global minimumm variance portfolio
-            # and even more closely tight to the right of the efficient frontier        
-            if ( weightsBay %*% cov_post %*% weightsBay <= g_s ) # constraint for formula 19
-                { target = c( target , weightsBay %*% mean_post - g_m * sqrt( weightsBay %*% cov_post %*% weightsBay )) } # formula 19
-                else { target = c( target , -999999999 ) } # if the Bayesian efficient portfolio does not satisfy the constraint we assign a large negative value (we will reject these portfolios in the next step)
-        }    
+    # reject portfolios that do not satisfy the constraints of formula 19 (i.e. Bayesian portfolios that are not robust, for example, the portfolios at the limit -- 100% confidence in prior or 100% confidence in sample)        
+    # identify Robust Bayesian frontier which is a subset of the Bayesian frontier that is further shrunk to toward the global minimumm variance portfolio
+    # and even more closely tight to the right of the efficient frontier        
+    if ( weightsBay %*% cov_post %*% weightsBay <= g_s ) # constraint for formula 19
+    { target = c( target , weightsBay %*% mean_post - g_m * sqrt( weightsBay %*% cov_post %*% weightsBay )) } # formula 19
+    else { target = c( target , -999999999 ) } # if the Bayesian efficient portfolio does not satisfy the constraint we assign a large negative value (we will reject these portfolios in the next step)
+  }    
 
-        maxTarget = max( target )    
-        if ( maxTarget == -999999999 ) { stop( "No robust portfolio found within credibility set. Try increasing volatility or adjusting risk aversion parameters." ) }
-        maxIndex = which( target == maxTarget , arr.ind = TRUE ) # identify most robust Bayesian portfolio
-        if ( length( maxIndex ) > 1 ) { stop( "The number of robust portfolios identified is greater than 1. Debug. " )}
+  maxTarget = max( target )    
+  if ( maxTarget == -999999999 ) { stop( "No robust portfolio found within credibility set. Try increasing volatility or adjusting risk aversion parameters." ) }
+  maxIndex = which( target == maxTarget , arr.ind = TRUE ) # identify most robust Bayesian portfolio
+  if ( length( maxIndex ) > 1 ) { stop( "The number of robust portfolios identified is greater than 1. Debug. " )}
     
-    # identify Robust portfolio as a subset of Bayesian frontier    
-    robustPortfolio = list( returns    = bayesianFrontier[[ "returns" ]][ maxIndex ] ,
-                            volatility = bayesianFrontier[[ "volatility" ]][ maxIndex ] ,
-                            weights    = bayesianFrontier[[ "weights" ]][ maxIndex , ] )    
+  # identify Robust portfolio as a subset of Bayesian frontier    
+  robustPortfolio = list( returns    = bayesianFrontier[[ "returns" ]][ maxIndex ] ,
+                          volatility = bayesianFrontier[[ "volatility" ]][ maxIndex ] ,
+                          weights    = bayesianFrontier[[ "weights" ]][ maxIndex , ] )    
     
-    return( list( bayesianFrontier = bayesianFrontier , robustPortfolio = robustPortfolio , g_m = g_m , g_s = g_s ) )
+  return( list( bayesianFrontier = bayesianFrontier , robustPortfolio = robustPortfolio , g_m = g_m , g_s = g_s ) )
     
-    # Test that the number of returns portfolios is <= number of discretizations
-    # Test that there are no NA's in the return results
+  # Test that the number of returns portfolios is <= number of discretizations
+  # Test that there are no NA's in the return results
 }
 
 # Example:
@@ -174,34 +182,34 @@
 #' @export
 PartialConfidencePosterior = function( mean_sample , cov_sample , mean_prior , cov_prior , relativeConfidenceInMeanPrior , relativeConfidenceInCovPrior , sampleSize )
 {
-    # parameter checks
-    if ( (length( mean_sample ) == nrow( cov_sample )) == FALSE ) { stop( "number of assets in mean must match number of assets in covariance matrix")}
-    if ( (length( mean_sample ) == length( mean_prior )) == FALSE ) { stop( "number of assets in mean must match number of assets in mean_prior")}
-    if ( ( nrow( cov_sample ) == nrow( cov_prior ) ) == FALSE ) { stop( "number of assets in sample covariance must match number of assets in prior covariance matrix")}
-    N = length( mean_sample ) # number of assets    
-    if ( ( N < 2 ) == TRUE ) { stop( "requires a minimum of two assets to perform optimization" ) }
-    if ( relativeConfidenceInMeanPrior < 0 ) { stop( "Confidence in mean prior must be a number greater than or equal to zero" ) }
-    if ( relativeConfidenceInCovPrior  < 0 ) { stop( "Confidence in covariance prior must be a number greater than or equal to zero" ) }
+  # parameter checks
+  if ( (length( mean_sample ) == nrow( cov_sample )) == FALSE ) { stop( "number of assets in mean must match number of assets in covariance matrix")}
+  if ( (length( mean_sample ) == length( mean_prior )) == FALSE ) { stop( "number of assets in mean must match number of assets in mean_prior")}
+  if ( ( nrow( cov_sample ) == nrow( cov_prior ) ) == FALSE ) { stop( "number of assets in sample covariance must match number of assets in prior covariance matrix")}
+  N = length( mean_sample ) # number of assets    
+  if ( ( N < 2 ) == TRUE ) { stop( "requires a minimum of two assets to perform optimization" ) }
+  if ( relativeConfidenceInMeanPrior < 0 ) { stop( "Confidence in mean prior must be a number greater than or equal to zero" ) }
+  if ( relativeConfidenceInCovPrior  < 0 ) { stop( "Confidence in covariance prior must be a number greater than or equal to zero" ) }
     
-    # Investor's experience and confidence is summarized by mean_prior, cov_prior, time_prior, and nu_prior
-        # nu_prior = confidence on the inverse of cov_prior (see 7.25 - Meucci Risk & Asset Allocation Text). A larger value of nu_prior corresponds to little uncertainty about the view on inverse of Sigma, and thus Sigma    
-        # confidenceInPrior = time_prior = T0 = confidence in the prior view mean_prior
-    confidenceInSample = sampleSize # typically the number of observations on which the mean_sample and cov_sample is based on
-    confidenceInMeanPrior = sampleSize * relativeConfidenceInMeanPrior
-    confidenceInCovPrior = sampleSize * relativeConfidenceInCovPrior
+  # Investor's experience and confidence is summarized by mean_prior, cov_prior, time_prior, and nu_prior
+  # nu_prior = confidence on the inverse of cov_prior (see 7.25 - Meucci Risk & Asset Allocation Text). A larger value of nu_prior corresponds to little uncertainty about the view on inverse of Sigma, and thus Sigma    
+  # confidenceInPrior = time_prior = T0 = confidence in the prior view mean_prior
+  confidenceInSample = sampleSize # typically the number of observations on which the mean_sample and cov_sample is based on
+  confidenceInMeanPrior = sampleSize * relativeConfidenceInMeanPrior
+  confidenceInCovPrior = sampleSize * relativeConfidenceInCovPrior
     
-    # blend prior and the sample data to construct posterior
-    time_post = confidenceInSample + confidenceInMeanPrior
-    nu_post = confidenceInSample + confidenceInCovPrior    
-    mean_post = 1/time_post * ( mean_sample * confidenceInSample + mean_prior * confidenceInMeanPrior )    
-    cov_post = 1/nu_post * (cov_sample * confidenceInSample + cov_prior * confidenceInCovPrior + ( mean_sample - mean_prior ) %*% t( ( mean_sample - mean_prior ) ) / ( 1 / confidenceInSample + 1 / confidenceInMeanPrior ) )
+  # blend prior and the sample data to construct posterior
+  time_post = confidenceInSample + confidenceInMeanPrior
+  nu_post = confidenceInSample + confidenceInCovPrior    
+  mean_post = 1/time_post * ( mean_sample * confidenceInSample + mean_prior * confidenceInMeanPrior )    
+  cov_post = 1/nu_post * (cov_sample * confidenceInSample + cov_prior * confidenceInCovPrior + ( mean_sample - mean_prior ) %*% t( ( mean_sample - mean_prior ) ) / ( 1 / confidenceInSample + 1 / confidenceInMeanPrior ) )
     
-    return( list( mean_post = mean_post , cov_post = cov_post , time_post = time_post , nu_post = nu_post ) )    
+  return( list( mean_post = mean_post , cov_post = cov_post , time_post = time_post , nu_post = nu_post ) )    
     
-    # TODO: Test expectations
-        # Test 1: If relative confidence in prior is 0, then returns mean_sample and cov_sample
-        # Test 2: If relative confidence in prior is 1, and sampleSize = 0 then returns mean_prior and cov_prior
-        # Test 3: As the number of sample size observations increase, the posterior mean and covariance shrinks toward mean_sample and cov_sample
+  # TODO: Test expectations
+    # Test 1: If relative confidence in prior is 0, then returns mean_sample and cov_sample
+    # Test 2: If relative confidence in prior is 1, and sampleSize = 0 then returns mean_prior and cov_prior
+    # Test 3: As the number of sample size observations increase, the posterior mean and covariance shrinks toward mean_sample and cov_sample
 }
 
 ####################################################################
@@ -233,7 +241,7 @@
 
 # Note the means are defined in such a way that a mean-variance optimization would yield an equally weighted portfolio
 # fake mean matrix : mus = 2.5 * Sigma / N
-    M = 2.5 * S %*% ones( N , 1 ) / N
+M = 2.5 * S %*% ones( N , 1 ) / N
 
 ####################################################################
 # conduct Monte carlo simulation
@@ -257,21 +265,21 @@
   trueFrontier = efficientFrontier( NumPortf , S , M , TRUE ) 
   
   # Bayesian prior for covariance and mu's (an arbitrary prior model of covariance and returns)
-    # the covariance prior is equal to the sample covariance on the principal diagonal
-    cov_prior  = diag( diag( cov ) ) 
+  # the covariance prior is equal to the sample covariance on the principal diagonal
+  cov_prior  = diag( diag( cov ) ) 
   
-    # set the prior expected returns for each asset to : mus = .5 * Sigma(1/N). Incidentally, this ensures there is a perfect positive linear relationship between asset variance and asset expected  return
-    mean_prior = .5 * cov_prior %*% rep( 1/N , N ) 
+  # set the prior expected returns for each asset to : mus = .5 * Sigma(1/N). Incidentally, this ensures there is a perfect positive linear relationship between asset variance and asset expected  return
+  mean_prior = .5 * cov_prior %*% rep( 1/N , N ) 
   
-    # set the confidence in the prior as twice the confidence in the sample and blend the prior with the sample data
-    posterior = PartialConfidencePosterior( mean_sample = mean , cov_sample = cov , mean_prior = mean_prior , cov_prior = cov_prior , 
+  # set the confidence in the prior as twice the confidence in the sample and blend the prior with the sample data
+  posterior = PartialConfidencePosterior( mean_sample = mean , cov_sample = cov , mean_prior = mean_prior , cov_prior = cov_prior , 
                                             relativeConfidenceInMeanPrior = 2 , relativeConfidenceInCovPrior = 2 , sampleSize = nrow( rets ) )
 
-    cov_post = posterior$cov_post ; mean_post = posterior$mean_post ; time_post = posterior$time_post ; nu_post = posterior$nu_post ; rm( posterior )
+  cov_post = posterior$cov_post ; mean_post = posterior$mean_post ; time_post = posterior$time_post ; nu_post = posterior$nu_post ; rm( posterior )
   
   # construct Bayesian frontier using blended mu and Sigma, and identify robust portfolio
   # returns a set of Bayesian efficient portfolios: a list of returns, volatility, and assets weights along the posterior frontier. Each row represents a point on the frontier
-        # and the returns, volatility, and assets of the most robust portfolio in the set
+  # and the returns, volatility, and assets of the most robust portfolio in the set
   
   pickVol = round( .8 * NumPortf ) # Picks the 80% highest volatility ( a parameter )...
   volatility = ( sampleFrontier[[ "volatility" ]][ pickVol ] ) ^ 2 # ...on the sample *efficient* frontier. On the efficient *sample* frontier. This is why the problem is a second-order cone programming problem. TODO: why use sample frontier?
@@ -308,39 +316,38 @@
     weightsBay = bayesianFrontier[[ "weights" ]][ k , ]
     mubay = c( mubay , weightsBay %*% M ) # given the optimal weights from Bayesian estimates of mu and Sigma, measure the actual return using the true asset means
     volbay = c( volbay , ( weightsBay %*% S %*% weightsBay ) ) # given the optimal weights from the Bayesian estimates of mu and Sigma, measure the actual variance of the portfolio
-   }   
+  }   
     
-    # measure the actual performance of the most Robust portfolio along the Bayesian efficient frontier
-    weightsRob = robustPortfolio$weights
-    murob = weightsRob %*% M
-    volrob = weightsRob %*% S %*% weightsRob
+  # measure the actual performance of the most Robust portfolio along the Bayesian efficient frontier
+  weightsRob = robustPortfolio$weights
+  murob = weightsRob %*% M
+  volrob = weightsRob %*% S %*% weightsRob
      
-    # collect actual return and actual variances results for each portfolio in each simulation
-    meanVarMus[[ j ]] = mumv # list of actual returns along efficient frontier for each simulation based on portfolio constructed using sample mean and sample co-variance
-    meanVarVols[[ j ]] = volmv # ...and the list of actual variances along efficient frontier
-    bayesianMus[[ j ]] = mubay # list of actual returns based on bayesian mixing of prior and data sampled from true distribution
-    bayesianVols[[ j ]] = volbay # ...and the list of associated actual variances
-    robustMus[[ j ]] = murob # list of actual return based on robust allocation... Note only one robust portfolio per simulation j is identified.
-    robustVols[[ j ]] = volrob # ...and the list of associated actual variances. Note only one robust portfolio per simulation j is identified.
-    trueMus[[ j ]] = mutrue # list of actual return based on optimizing with the true mus...
-    trueVols[[ j ]] = voltrue # ...and the list of associated actual variances  
+  # collect actual return and actual variances results for each portfolio in each simulation
+  meanVarMus[[ j ]] = mumv # list of actual returns along efficient frontier for each simulation based on portfolio constructed using sample mean and sample co-variance
+  meanVarVols[[ j ]] = volmv # ...and the list of actual variances along efficient frontier
+  bayesianMus[[ j ]] = mubay # list of actual returns based on bayesian mixing of prior and data sampled from true distribution
+  bayesianVols[[ j ]] = volbay # ...and the list of associated actual variances
+  robustMus[[ j ]] = murob # list of actual return based on robust allocation... Note only one robust portfolio per simulation j is identified.
+  robustVols[[ j ]] = volrob # ...and the list of associated actual variances. Note only one robust portfolio per simulation j is identified.
+  trueMus[[ j ]] = mutrue # list of actual return based on optimizing with the true mus...
+  trueVols[[ j ]] = voltrue # ...and the list of associated actual variances  
 }
 
 # Plot sample, bayesian, and robust mean/variance portfolios
-    library( ggplot2 )
 
-    # create dataframe consisting of actual returns, actual variance, and sample indicator    
-    actualReturns = unlist( meanVarMus ) ; actualVariance = unlist( meanVarVols )
-    plotData1 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "Sample" )
-    actualReturns = unlist( bayesianMus ) ; actualVariance = unlist( bayesianVols )
-    plotData2 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "Bayesian" )
-    actualReturns = unlist( robustMus ) ; actualVariance = unlist( robustVols )
-    plotData3 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "Robust Bayesian" )
-    actualReturns = unlist( trueMus ) ; actualVariance = unlist( trueVols )
-    plotData4 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "True frontier" )
-    plotData = rbind( plotData1 , plotData2 , plotData3 , plotData4 ) ; rm( plotData1 , plotData2 , plotData3 , actualReturns , actualVariance )
+# create dataframe consisting of actual returns, actual variance, and sample indicator    
+actualReturns = unlist( meanVarMus ) ; actualVariance = unlist( meanVarVols )
+plotData1 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "Sample" )
+actualReturns = unlist( bayesianMus ) ; actualVariance = unlist( bayesianVols )
+plotData2 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "Bayesian" )
+actualReturns = unlist( robustMus ) ; actualVariance = unlist( robustVols )
+plotData3 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "Robust Bayesian" )
+actualReturns = unlist( trueMus ) ; actualVariance = unlist( trueVols )
+plotData4 = data.frame( actualReturns = actualReturns, actualVariance = actualVariance , type = "True frontier" )
+plotData = rbind( plotData1 , plotData2 , plotData3 , plotData4 ) ; rm( plotData1 , plotData2 , plotData3 , actualReturns , actualVariance )
 
-    # build plot with overlays    
-    # Notice when plotting the the Bayesian portfolios are shrunk toward the prior. Therefore they 
-    # are less scattered and more efficient, although the prior differs significantly from the true market parameters.
-    ggplot( data = plotData ) + geom_point( aes_string( x = "actualVariance" , y = "actualReturns" , color = "type"  ) )
+# build plot with overlays    
+# Notice when plotting the the Bayesian portfolios are shrunk toward the prior. Therefore they 
+# are less scattered and more efficient, although the prior differs significantly from the true market parameters.
+ggplot( data = plotData ) + geom_point( aes_string( x = "actualVariance" , y = "actualReturns" , color = "type"  ) )



More information about the Returnanalytics-commits mailing list