[Returnanalytics-commits] r3735 - in pkg/Meucci: . R demo

noreply at r-forge.r-project.org noreply at r-forge.r-project.org
Wed Jun 24 09:37:27 CEST 2015


Author: xavierv
Date: 2015-06-24 09:37:27 +0200 (Wed, 24 Jun 2015)
New Revision: 3735

Modified:
   pkg/Meucci/NAMESPACE
   pkg/Meucci/R/HermiteGrid.R
   pkg/Meucci/R/data.R
   pkg/Meucci/demo/HermiteGrid_CaseStudy.R
   pkg/Meucci/demo/HermiteGrid_demo.R
Log:
- fixed HermiteGrid_CaseStudy demo script, ported the necessary data and exported its functions

Modified: pkg/Meucci/NAMESPACE
===================================================================
--- pkg/Meucci/NAMESPACE	2015-06-23 22:00:51 UTC (rev 3734)
+++ pkg/Meucci/NAMESPACE	2015-06-24 07:37:27 UTC (rev 3735)
@@ -73,9 +73,14 @@
 export(garch2f8)
 export(hermitePolynomial)
 export(integrateSubIntervals)
+export(kernelbw)
+export(kernelcdf)
+export(kernelinv)
+export(kernelpdf)
 export(linreturn)
 export(normalizeProb)
 export(pHistPriorPosterior)
+export(private_fun)
 export(robustBayesianPortfolioOptimization)
 export(std)
 export(subIntervals)

Modified: pkg/Meucci/R/HermiteGrid.R
===================================================================
--- pkg/Meucci/R/HermiteGrid.R	2015-06-23 22:00:51 UTC (rev 3734)
+++ pkg/Meucci/R/HermiteGrid.R	2015-06-24 07:37:27 UTC (rev 3735)
@@ -146,6 +146,7 @@
 #' @return bw a numeric signifying the bandwidth
 #'
 #' @author Manan Shah \email{mkshah@@tepper.cmu.edu}
+#' @export
 
 kernelbw <- function(xi) {
   N      <- length(xi)
@@ -174,6 +175,7 @@
 #'            value of length equal to x
 #'
 #' @author Manan Shah \email{mkshah@@tepper.cmu.edu}
+#' @export
 
 kernelcdf <- function(x, xi, bw, wi) {
   n <- length(xi)
@@ -205,6 +207,7 @@
 #'            length equal to x
 #'
 #' @author Manan Shah \email{mkshah@@tepper.cmu.edu}
+#' @export
 
 kernelpdf <- function(x, xi, bw, wi) {
   n <- length(xi)
@@ -237,6 +240,7 @@
 #'            to p
 #'
 #' @author Manan Shah \email{mkshah@@tepper.cmu.edu}
+#' @export
 
 kernelinv <- function(p, xi, bw, wi) {
   nargin <- length(as.list(match.call())) - 1
@@ -289,6 +293,7 @@
 #'            length equal to x or p
 #'
 #' @author Manan Shah \email{mkshah@@tepper.cmu.edu}
+#' @export
 
 private_fun <- function(x, xi, bw, wi, p) {
   f <- kernelcdf(x, xi, bw, wi) - p

Modified: pkg/Meucci/R/data.R
===================================================================
--- pkg/Meucci/R/data.R	2015-06-23 22:00:51 UTC (rev 3734)
+++ pkg/Meucci/R/data.R	2015-06-24 07:37:27 UTC (rev 3735)
@@ -283,4 +283,13 @@
 #' @author Xavier Valls\email{xaviervallspla@@gmail.com}
 #' @references A. Meucci, D. Ardia, S. Keel - "Fully Flexible Extreme Views"
 #' @keywords data
-NULL
\ No newline at end of file
+NULL
+
+#' @title Pseudo data: Gauss-Hermite grid case study input data
+#'
+#' @name pseudodata
+#' @docType data
+#' @author Xavier Valls\email{xaviervallspla@@gmail.com}
+#' @references A. Meucci, D. Ardia, S. Keel - "Fully Flexible Extreme Views"
+#' @keywords data
+NULL

Modified: pkg/Meucci/demo/HermiteGrid_CaseStudy.R
===================================================================
--- pkg/Meucci/demo/HermiteGrid_CaseStudy.R	2015-06-23 22:00:51 UTC (rev 3734)
+++ pkg/Meucci/demo/HermiteGrid_CaseStudy.R	2015-06-24 07:37:27 UTC (rev 3735)
@@ -10,7 +10,10 @@
 # IMPORTANT - This script is about the methodology, not the input data, 
 # which has been modified
 
-xi <- as.matrix(100 * data[, 2])
+data(pseudodata)
+data(ghqx)
+
+xi <- as.matrix(100 * pseudodata[, 2])
 n <- nrow(xi)
 
 # bandwidth
@@ -23,31 +26,32 @@
                 
 # Prior market model
 # kernel density
+market <- list()
+market$mu  <- mean(xi)
+market$pdf <- function (x) kernelpdf(x, xi, bw, wi)
+market$cdf <- function (x) kernelcdf(x, xi, bw, wi)
+market$inv <- function (x) kernelinv(x, xi, bw, wi)
+market$VaR95 <- market$inv(c(0.05))
+market$CVaR95 <- integrate(function(x) x * market$pdf(x), -100,
+                                                 market$VaR95)$val / 0.05
 
-market.mu  <- mean(xi)
-market.pdf <- function (x) kernelpdf(x, xi, bw, wi)
-market.cdf <- function (x) kernelcdf(x, xi, bw, wi)
-market.inv <- function (x) kernelinv(x, xi, bw, wi)
-market.VaR95 <- market.inv(c(0.05))
-market.CVaR95 <- integrate(function(x) x * market.pdf(x), -100,
-                                                 market.VaR95)$val / 0.05
-
 # numerical (Gauss-Hermite grid) prior
 
 # rescale GH zeros so they belong to [0,1]
 tmp <- (ghqx - min(ghqx)) / (max(ghqx) - min(ghqx))
 epsilon <- 1e-10
-Lower <- market.inv(epsilon)
-Upper <- market.inv(1 - epsilon)
+Lower <- market$inv(epsilon)
+Upper <- market$inv(1 - epsilon)
 X  <- Lower + tmp * (Upper - Lower) # rescale mesh
                 
-p <- integrateSubIntervals(X, market.cdf)
+p <- integrateSubIntervals(X, market$cdf)
 p <- normalizeProb(p)
 J <- nrow(X)
                 
 # Entropy posterior from extreme view on mean and CVaR
-view.mu   <- mean(xi) - 1.0
-view.CVaR95 <- market.CVaR95 - 1.0
+view <- list()
+view$mu   <- mean(xi) - 1.0
+view$CVaR95 <- market$CVaR95 - 1.0
                 
 # Netwton Raphson
 emptyMatrix <- matrix(,nrow = 0, ncol = 0)
@@ -55,8 +59,8 @@
 idx <- as.matrix(cumsum(p) <= 0.05)
 s[1] <- sum(idx)
 posteriorEntropy <- EntropyProg(p, t(idx), as.matrix(0.05), rbind(rep(1, J),
-                               t(X), t(idx * X)), rbind(1, view.mu,
-                               0.05 * view.CVaR95))
+                               t(X), t(idx * X)), rbind(1, view$mu,
+                               0.05 * view$CVaR95))
 KLdiv <- as.matrix(posteriorEntropy$optimizationPerformance$ml)
 p_ <- posteriorEntropy$p_
 
@@ -68,27 +72,18 @@
   idx <- cbind(matrix(1, 1, s[i - 1]), matrix(0, 1, J - s[i - 1]))
   posteriorEntropy1 <- EntropyProg(p, idx, as.matrix(0.05),
                                   rbind(matrix(1, 1, J), t(X), t(t(idx) * X)),
-                                  rbind(1, view.mu, 0.05 * view.CVaR95))
-  # [dummy, KLdiv_s]  = optimizeEntropy(p, [idx'; (idx .* X)'],
-  #                                     [0.05; 0.05 * view.CVaR95],
-  #                                     [ones(1, J); X'], [1; view.mu]);
-
+                                  rbind(1, view$mu, 0.05 * view$CVaR95))
+  
   idx <- cbind(matrix(1, 1, s[i - 1] + 1), matrix(0, 1, J - s[i - 1] - 1))
   posteriorEntropy2 <- EntropyProg(p, idx, as.matrix(0.05),
                                   rbind(matrix(1, 1, J), t(X), t(t(idx) * X)),
-                                  rbind(1, view.mu, 0.05 * view.CVaR95))
-  # [dummy, KLdiv_s1] = optimizeEntropy(p, [idx'; (idx .* X)'],
-  #                                    [0.05; 0.05 * view.CVaR95],
-  #                                    [ones(1, J); X'], [1; view.mu]);
-
+                                  rbind(1, view$mu, 0.05 * view$CVaR95))
+  
   idx <- cbind(matrix(1, 1, s[i - 1] + 2), matrix(0, 1, J - s[i - 1] - 2))
   posteriorEntropy3 <- EntropyProg(p, idx, as.matrix(0.05),
                                   rbind(matrix(1, 1, J), t(X), t(t(idx) * X)),
-                                  rbind(1, view.mu, 0.05 * view.CVaR95))
-  # [dummy, KLdiv_s2] = optimizeEntropy(p, [idx'; (idx .* X)'],
-  #                                     [0.05; 0.05 * view.CVaR95],
-  #                                     [ones(1, J); X'], [1; view.mu]);
-
+                                  rbind(1, view$mu, 0.05 * view$CVaR95))
+  
   # first difference
   DE  <- posteriorEntropy2$optimizationPerformance$ml -
         posteriorEntropy1$optimizationPerformance$ml
@@ -105,17 +100,22 @@
   idx  <- cbind(matrix(1, 1, s[i]), matrix(0, 1, J - s[i]))
   tempEntropy <- EntropyProg(p, idx, as.matrix(0.05), rbind(matrix(1, 1, J),
                             t(X), t(t(idx) * X)),
-                            rbind(1, view.mu, 0.05 * view.CVaR95))
-  # [tmp.p_, tmp.KLdiv] = optimizeEntropy(p, [idx'; (idx .* X)'],
-  #                                       [0.05; 0.05 * view.CVaR95],
-  #                                       [ones(1, J); X'], [1; view.mu]);
+                            rbind(1, view$mu, 0.05 * view$CVaR95))
   p_ <- cbind(p_, tempEntropy$p_)
-  KLdiv <- rbind(KLdiv, tempEntropy$optimizationPerformance$ml) #ok<*AGROW>
+  KLdiv <- rbind(KLdiv, tempEntropy$optimizationPerformance$ml)
 
   # if change in KLdiv less than one percent, stop
   if(abs((KLdiv[i] - KLdiv[i - 1]) / KLdiv[i - 1])  < 0.01)
     doStop <- 1
 }
 
-plot(t(X), p)
-plot(t(X), p_[,ncol(p_)])
+plot(t(X), p, type = "l", col = "blue", lwd = 1.5, xlab="Returns %",
+     ylab = "")
+lines(t(X), p_[, ncol(p_)], type = "l", col = "red", lwd = 1.5)
+lines(market$mu, 0.0,  type = "p", pch = 16, col = "blue")
+lines(market$CVaR95, 0.0,  type = "p", pch = 17, col = "blue")
+lines(view$mu, 0.0,  type = "p", pch = 16, col = "red")
+lines(view$CVaR95, 0.0,  type = "p", pch = 17, col = "red")
+legend("topright", 1.9, c("Prior", "Posterior"),
+       col = c("blue", "red"), lty = 1)
+

Modified: pkg/Meucci/demo/HermiteGrid_demo.R
===================================================================
--- pkg/Meucci/demo/HermiteGrid_demo.R	2015-06-23 22:00:51 UTC (rev 3734)
+++ pkg/Meucci/demo/HermiteGrid_demo.R	2015-06-24 07:37:27 UTC (rev 3735)
@@ -23,13 +23,13 @@
 market$inv <- function(x) qnorm(x, mean = market$mu, sd = sqrt(market$sig2))
 
 # numerical (Monte Carlo) prior
-monteCarlo <- emptyMatrix
+monteCarlo <- list()
 monteCarlo$J <- 100000
 monteCarlo$X <- market$rnd(monteCarlo$J)
 monteCarlo$p <- normalizeProb(1 / monteCarlo$J * ones(monteCarlo$J, 1))
 
 # numerical (Gauss-Hermite grid) prior
-ghqMesh <- emptyMatrix
+ghqMesh <- list()
 
 # rescale GH zeros so they belong to [0,1]
 tmp <- (ghqx - min(ghqx)) / (max(ghqx) - min(ghqx))
@@ -46,11 +46,11 @@
 # Entropy posterior from extreme view on expectation
 ################################################################################
 # view of the analyst
-view <- emptyMatrix
+view <- list()
 view$mu <- -3.0
 
 # analytical (known since normal model has analytical solution)
-truePosterior <- emptyMatrix
+truePosterior <- list()
 truePosterior <- Prior2Posterior(market$mu, 1, view$mu, market$sig2, 0)
 truePosterior$pdf <- function(x) dnorm(x, truePosterior$M_,
        								   sqrt(truePosterior$S_))
@@ -58,8 +58,8 @@
 # numerical (Monte Carlo)
 Aeq <- rbind(ones(1, monteCarlo$J), t(monteCarlo$X))
 beq <- rbind(1, view$mu)
-monteCarloOptimResult <- EntropyProg(monteCarlo$p, emptyMatrix, emptyMatrix, Aeq,
-                                     beq)
+monteCarloOptimResult <- EntropyProg(monteCarlo$p, emptyMatrix, emptyMatrix,
+ 									 Aeq, beq)
 
 monteCarlo$p_ <- monteCarloOptimResult$p_
 monteCarlo$KLdiv <- monteCarloOptimResult$optimizationPerformance$ml
@@ -83,17 +83,17 @@
 # Monte Carlo
 dev.new()
 plotDataMC <- PHist(monteCarlo$X, monteCarlo$p_, 50, main = "Monte Carlo",
-					xlim = c(xmin, xmax), ylim = c(0, ymax))
+    				xlim = c(xmin, xmax), ylim = c(0, ymax))
 lines(xmesh, market$pdf(xmesh), type = "l", col = "blue")
 lines(xmesh, truePosterior$pdf(xmesh),  type = "l", col = "red")
 lines(0.0, 0.0,  type = "p", pch = 17, col = "blue")
-lines(view.mu, 0.0,  type = "p", pch = 17, col = "red")
+lines(view$mu, 0.0,  type = "p", pch = 17, col = "red")
 
 # Gauss Hermite Grid
 dev.new()
 plotDataGHQ <- PHist(data.matrix(ghqMesh$X), ghqMesh$p_, 50,
                      main = "Gauss-Hermite grid",
-					 xlim = c(xmin, xmax), ylim = c(0, ymax))
+    				 xlim = c(xmin, xmax), ylim = c(0, ymax))
 lines(xmesh, market$pdf(xmesh), type = "l", col = "blue")
 lines(xmesh, truePosterior$pdf(xmesh),  type = "l", col = "red")
 lines(0.0, 0.0,  type = "p", pch = 17, col = "blue")



More information about the Returnanalytics-commits mailing list