[Yuima-commits] r375 - in pkg/yuima: . R man src
noreply at r-forge.r-project.org
noreply at r-forge.r-project.org
Tue Apr 21 12:51:29 CEST 2015
Author: kyuta
Date: 2015-04-21 12:51:29 +0200 (Tue, 21 Apr 2015)
New Revision: 375
Added:
pkg/yuima/R/hyavar.R
pkg/yuima/man/hyavar.Rd
Modified:
pkg/yuima/DESCRIPTION
pkg/yuima/NAMESPACE
pkg/yuima/NEWS
pkg/yuima/R/llag.R
pkg/yuima/src/cce_functions.c
Log:
added hyavar.R, hyavar.Rd
modified llag.R, cce_functions.c
Modified: pkg/yuima/DESCRIPTION
===================================================================
--- pkg/yuima/DESCRIPTION 2015-04-21 10:34:54 UTC (rev 374)
+++ pkg/yuima/DESCRIPTION 2015-04-21 10:51:29 UTC (rev 375)
@@ -1,8 +1,8 @@
Package: yuima
Type: Package
Title: The YUIMA Project package for SDEs
-Version: 1.0.63
-Date: 2015-04-19
+Version: 1.0.64
+Date: 2015-04-21
Depends: methods, zoo, stats4, utils, expm, cubature, mvtnorm
Author: YUIMA Project Team
Maintainer: Stefano M. Iacus <stefano.iacus at unimi.it>
Modified: pkg/yuima/NAMESPACE
===================================================================
--- pkg/yuima/NAMESPACE 2015-04-21 10:34:54 UTC (rev 374)
+++ pkg/yuima/NAMESPACE 2015-04-21 10:51:29 UTC (rev 375)
@@ -1,145 +1,146 @@
-import("methods")
-
-##importFrom("stats", "end", "time", "start")
-importFrom("graphics", "plot")
-import("zoo")
-importFrom(stats, confint)
-import("stats4")
-import("expm")
-import("mvtnorm")
-import("cubature")
-
-importFrom(utils, toLatex)
-
-
-
-
-
-
-exportClasses("yuima",
- "yuima.data",
- "yuima.sampling",
- "yuima.characteristic",
- "yuima.model",
- "model.parameter",
- "yuima.carma",
- "carma.info",
- "yuima.carma.qmle",
- "yuima.poisson",
- "yuima.qmle",
- "yuima.CP.qmle",
- "cogarch.info",
- "yuima.cogarch"
- )
-
-exportMethods(
- "dim",
- "length",
- ## "start",
- "plot",
- ## "time",
- ## "end",
- "simulate",
- "cce",
- "llag",
- "poisson.random.sampling",
- "get.zoo.data",
- "initialize",
-## "ql",
-## "rql",
-## "ml.ql",
- "adaBayes",
- "limiting.gamma",
- "getF",
- "getf",
- "getxinit",
- "gete",
- "simFunctional",
- "F0",
- "Fnorm",
- "asymptotic_term",
- "cbind.yuima"
- )
-
-## function which we want to expose to the user
-## all other functions are used internally by the
-## package
-export(setYuima)
-export(setModel) ## builds sde model
-export(setData)
-export(setSampling)
-export(setCharacteristic)
-export(setCarma)
-export(setPoisson)
-export(dconst)
-export(rconst)
-
-export(setCogarch)
-
-export(dim)
-export(length)
-#export(start)
-export(plot)
-#export(time)
-#export(end)
-
-export(simulate) # simulates couple of processes
-export(subsampling)
-export(cce)
-export(llag)
-export(poisson.random.sampling)
-export(noisy.sampling)
-export(mpv)
-export(bns.test)
-
-export(get.zoo.data)
-
-##export(ql,rql,ml.ql)
-##export(rql)
-export(adaBayes)
-export(rIG, rNIG, rbgamma, rngamma, rstable) ##:: random number generator for Inverse Gaussian
-export(limiting.gamma)
-
-export(setFunctional)
-export(getF)
-export(getf)
-export(getxinit)
-export(gete)
-
-export(simFunctional)
-export(F0)
-export(Fnorm)
-export(asymptotic_term)
-
-##export(LSE)
-export(lse)
-
-export(qmle)
-export(quasilogl)
-export(phi.test)
-export(lasso)
-export(CPoint)
-export(qmleR)
-export(qmleL)
-
-export(CarmaNoise) # Estimates the Levy in carma model
-export(gmm) # Estimation COGARCH(P,Q) using Method Of Moments
-
-
-export(qgv)
-export(mmfrac)
-
-export(cbind.yuima)
-
-S3method(print, phitest)
-S3method(print, qgv)
-S3method(print, mmfrac)
-S3method(print, yuima.lasso)
-
-S3method(toLatex, yuima)
-S3method(toLatex, yuima.model)
-S3method(toLatex, yuima.carma)
-S3method(toLatex, yuima.cogarch)
-
-useDynLib(yuima)
-
+import("methods")
+
+##importFrom("stats", "end", "time", "start")
+importFrom("graphics", "plot")
+import("zoo")
+importFrom(stats, confint)
+import("stats4")
+import("expm")
+import("mvtnorm")
+import("cubature")
+
+importFrom(utils, toLatex)
+
+
+
+
+
+
+exportClasses("yuima",
+ "yuima.data",
+ "yuima.sampling",
+ "yuima.characteristic",
+ "yuima.model",
+ "model.parameter",
+ "yuima.carma",
+ "carma.info",
+ "yuima.carma.qmle",
+ "yuima.poisson",
+ "yuima.qmle",
+ "yuima.CP.qmle",
+ "cogarch.info",
+ "yuima.cogarch"
+ )
+
+exportMethods(
+ "dim",
+ "length",
+ ## "start",
+ "plot",
+ ## "time",
+ ## "end",
+ "simulate",
+ "cce",
+ "llag",
+ "poisson.random.sampling",
+ "get.zoo.data",
+ "initialize",
+## "ql",
+## "rql",
+## "ml.ql",
+ "adaBayes",
+ "limiting.gamma",
+ "getF",
+ "getf",
+ "getxinit",
+ "gete",
+ "simFunctional",
+ "F0",
+ "Fnorm",
+ "asymptotic_term",
+ "cbind.yuima"
+ )
+
+## function which we want to expose to the user
+## all other functions are used internally by the
+## package
+export(setYuima)
+export(setModel) ## builds sde model
+export(setData)
+export(setSampling)
+export(setCharacteristic)
+export(setCarma)
+export(setPoisson)
+export(dconst)
+export(rconst)
+
+export(setCogarch)
+
+export(dim)
+export(length)
+#export(start)
+export(plot)
+#export(time)
+#export(end)
+
+export(simulate) # simulates couple of processes
+export(subsampling)
+export(cce)
+export(llag)
+export(poisson.random.sampling)
+export(noisy.sampling)
+export(mpv)
+export(bns.test)
+export(hyavar) # asymptotic variance estimator for the Hayashi-Yoshida estimator
+
+export(get.zoo.data)
+
+##export(ql,rql,ml.ql)
+##export(rql)
+export(adaBayes)
+export(rIG, rNIG, rbgamma, rngamma, rstable) ##:: random number generator for Inverse Gaussian
+export(limiting.gamma)
+
+export(setFunctional)
+export(getF)
+export(getf)
+export(getxinit)
+export(gete)
+
+export(simFunctional)
+export(F0)
+export(Fnorm)
+export(asymptotic_term)
+
+##export(LSE)
+export(lse)
+
+export(qmle)
+export(quasilogl)
+export(phi.test)
+export(lasso)
+export(CPoint)
+export(qmleR)
+export(qmleL)
+
+export(CarmaNoise) # Estimates the Levy in carma model
+export(gmm) # Estimation COGARCH(P,Q) using Method Of Moments
+
+
+export(qgv)
+export(mmfrac)
+
+export(cbind.yuima)
+
+S3method(print, phitest)
+S3method(print, qgv)
+S3method(print, mmfrac)
+S3method(print, yuima.lasso)
+
+S3method(toLatex, yuima)
+S3method(toLatex, yuima.model)
+S3method(toLatex, yuima.carma)
+S3method(toLatex, yuima.cogarch)
+
+useDynLib(yuima)
+
Modified: pkg/yuima/NEWS
===================================================================
--- pkg/yuima/NEWS 2015-04-21 10:34:54 UTC (rev 374)
+++ pkg/yuima/NEWS 2015-04-21 10:51:29 UTC (rev 375)
@@ -28,4 +28,7 @@
modified cce.Rd
2014/11/11: fixed a bug in cce.R (method "GME")
modified the example of cce.Rd
-2015/04/02: fixed a bug in rng.R (function "rstable")
+2015/04/02: fixed a bug in rng.R (function "rstable")
+2015/04/21: added hyavar.R, hyavar.Rd (asymptotic variance estimator for HY)
+ fixed a bug in llag.R
+ modified cce.Rd, cce_functions.c
Added: pkg/yuima/R/hyavar.R
===================================================================
--- pkg/yuima/R/hyavar.R (rev 0)
+++ pkg/yuima/R/hyavar.R 2015-04-21 10:51:29 UTC (rev 375)
@@ -0,0 +1,111 @@
+
+hyavar <- function(yuima, bw, nonneg = TRUE, psd = TRUE){
+
+ x <- get.zoo.data(yuima)
+ n.series <- length(x)
+
+ # allocate memory
+ ser.X <- vector(n.series, mode="list") # data in 'x'
+ ser.times <- vector(n.series, mode="list") # time index in 'x'
+ ser.diffX <- vector(n.series, mode="list")
+ ser.num <- integer(n.series) # number of observations
+ avar.cov <- diag(n.series) # asymptotic variances of cce(yuima)$covmat
+ avar.cor <- diag(0, n.series) # asymptotic variances of cce(yuima)$cormat
+
+ for(i in 1:n.series){
+
+ # NA data must be skipped
+ idt <- which(is.na(x[[i]]))
+ if(length(idt>0)){
+ x[[i]] <- x[[i]][-idt]
+ }
+ if(length(x[[i]])<2) {
+ stop("length of data (w/o NA) must be more than 1")
+ }
+
+ # set data and time index
+ ser.X[[i]] <- as.numeric(x[[i]]) # we need to transform data into numeric to avoid problem with duplicated indexes below
+ ser.times[[i]] <- as.numeric(time(x[[i]]))
+ ser.diffX[[i]] <- diff(ser.X[[i]])
+ # set number of observations
+ ser.num[i] <- length(ser.X[[i]])
+
+ }
+
+ # Computing the Hayashi-Yoshida estimator
+ result <- cce(yuima, psd = psd)
+ cmat <- result$covmat
+
+ if(n.series > 1){
+
+ if(missing(bw)){
+
+ bw <- matrix(0, n.series, n.series)
+
+ for(i in 1:(n.series - 1)){
+
+ for(j in (i + 1):n.series){
+
+ Init <- min(ser.times[[i]][1], ser.times[[j]][1])
+ Term <- max(ser.times[[i]][ser.num[i]], ser.times[[j]][ser.num[j]])
+ bw[i, j] <- (Term - Init) * min(ser.num[c(i,j)])^(-0.45)
+ bw[j, i] <- bw[i, j]
+
+ }
+
+ }
+
+ }
+
+ }
+
+ bw <- matrix(bw, n.series, n.series)
+
+ for(i in 1:n.series){
+
+ avar.cov[i, i] <- (2/3) * sum(ser.diffX[[i]]^4)
+
+ for(j in 1:i){
+
+ if(i > j){
+
+ N.max <- max(ser.num[i],ser.num[j])
+
+ avar <- .C("hyavar",
+ as.double(ser.times[[i]]),
+ as.double(ser.times[[j]]),
+ as.integer(ser.num[i]),
+ as.integer(ser.num[j]),
+ as.double(ser.X[[i]]),
+ as.double(ser.X[[j]]),
+ as.integer(N.max),
+ as.double(bw[i, j]),
+ avar = double(4))$avar
+ ## avar[1] = var(HYij), avar[2] = cov(HYii, HYij), avar[3] = cov(HYij, HYjj), avar[4] = cov(HYii, HYjj)
+
+ avar.cov[i, j] <- avar[1]
+ avar.cov[j, i] <- avar[1]
+
+ Pi <- matrix(c(avar.cov[i,i], avar[2], avar[4],
+ avar[2], avar[1], avar[3],
+ avar[4], avar[3], avar.cov[j,j]), 3, 3)
+
+ if(nonneg){ # Taking the spectral absolute value of Pi
+ tmp <- eigen(Pi, symmetric = TRUE)
+ Pi <- tmp$vectors %*% diag(abs(tmp$values)) %*% t(tmp$vectors)
+ }
+
+ d <- c(-0.5 * cmat[i,j]/cmat[i,i], 1, -0.5 * cmat[i,j]/cmat[j,j])
+
+ avar.cor[i, j] <- d %*% Pi %*% d / (cmat[i,i] * cmat[j,j])
+ avar.cor[j, i] <- avar.cor[i, j]
+
+ }
+
+ }
+
+ }
+
+ return(list(covmat = cmat, cormat = result$cormat, avar.cov = avar.cov, avar.cor = avar.cor))
+}
+
Modified: pkg/yuima/R/llag.R
===================================================================
--- pkg/yuima/R/llag.R 2015-04-21 10:34:54 UTC (rev 374)
+++ pkg/yuima/R/llag.R 2015-04-21 10:51:29 UTC (rev 375)
@@ -41,6 +41,9 @@
ser.diffX <- vector(d, mode="list") # difference of data
vol <- double(d)
+ # Set the tolerance to avoid numerical erros in comparison
+ tol <- 1e-6
+
for(i in 1:d){
# NA data must be skipped
@@ -144,9 +147,9 @@
as.integer(n-2),
as.integer(length(time1)),
as.integer(length(time2)),
- as.double(y),
- as.double(time1),
- as.double(time2),
+ as.double(y/tol),
+ as.double(time1/tol),
+ as.double(time2/tol),
double(length(time2)),
as.double(ser.diffX[[i]]),
as.double(ser.diffX[[j]]),
@@ -190,9 +193,9 @@
as.integer(length(grid[[num]])),
as.integer(length(time1)),
as.integer(length(time2)),
- as.double(grid[[num]]),
- as.double(time1),
- as.double(time2),
+ as.double(grid[[num]]/tol),
+ as.double(time1/tol),
+ as.double(time2/tol),
double(length(time2)),
as.double(ser.diffX[[i]]),
as.double(ser.diffX[[j]]),
@@ -296,13 +299,13 @@
as.integer(n-2),
as.integer(length(time1)),
as.integer(length(time2)),
- as.double(y),
- as.double(time1),
- as.double(time2),
+ as.double(y/tol),
+ as.double(time1/tol),
+ as.double(time2/tol),
double(length(time2)),
as.double(ser.diffX[[i]]),
as.double(ser.diffX[[j]]),
- value=double(n-2),PACKAGE="yuima")$value
+ value=double(n-2))$value
idx <- which.max(abs(tmp))
mlag <- -y[idx] # make the first timing of max or min
@@ -340,13 +343,13 @@
as.integer(length(grid[[num]])),
as.integer(length(time1)),
as.integer(length(time2)),
- as.double(grid[[num]]),
- as.double(time1),
- as.double(time2),
+ as.double(grid[[num]]/tol),
+ as.double(time1/tol),
+ as.double(time2/tol),
double(length(time2)),
as.double(ser.diffX[[i]]),
as.double(ser.diffX[[j]]),
- value=double(length(grid[[num]])),PACKAGE="yuima")$value
+ value=double(length(grid[[num]])))$value
idx <- which.max(abs(tmp))
mlag <- -grid[[num]][idx] # make the first timing of max or min
Added: pkg/yuima/man/hyavar.Rd
===================================================================
--- pkg/yuima/man/hyavar.Rd (rev 0)
+++ pkg/yuima/man/hyavar.Rd 2015-04-21 10:51:29 UTC (rev 375)
@@ -0,0 +1,180 @@
+\name{hyavar}
+\alias{hyavar}
+%- Also NEED an '\alias' for EACH other topic documented here.
+\title{
+%% ~~function to do ... ~~
+Asymptotic Variance Estimator for the Hayashi-Yoshida estimator
+}
+\description{
+%% ~~ A concise (1-5 lines) description of what the function does. ~~
+This function estimates the asymptotic variances of covariance and correlation estimates by the Hayashi-Yoshida estimator.
+}
+\usage{
+hyavar(yuima, bw, nonneg = TRUE, psd = TRUE)
+}
+%- maybe also 'usage' for other objects documented here.
+\arguments{
+ \item{yuima}{
+%% ~~Describe \code{yuima} here~~
+an object of yuima-class or yuima.data-class.
+}
+ \item{bw}{
+%% ~~Describe \code{bw} here~~
+a positive number or a numeric matrix. If it is a matrix, each component indicate the bandwidth parameter for the kernel estimators used to estimate the asymptotic variance of the corresponding component (necessary only for off-diagonal components). If it is a number, it is converted to a matrix as \code{matrix(bw,d,d)}, where \code{d=dim(x)}. The default value is the matrix whose \eqn{(i,j)}-th component is given by \eqn{min(n_i,n_j)^{0.45}}, where \eqn{n_i} denotes the number of the observations for the \eqn{i}-th component of the data.
+}
+ \item{nonneg}{
+%% ~~Describe \code{psd} here~~
+logical. If \code{TRUE}, the asymptotic variance estimates for correlations are always ensured to be non-negative. See `Details'.
+}
+
+\item{psd}{
+%% ~~Describe \code{psd} here~~
+passed to \code{\link{cce}}.
+}
+}
+\details{
+%% ~~ If necessary, more details than the description above ~~
+The precise description of the method used to estimate the asymptotic variances is as follows.
+For diagonal components, they are estimated by the realized quarticity multiplied by \code{2/3}. Its theoretical validity is ensured by Hayashi et al. (2011), for example.
+For off-diagonal components, they are estimated by the naive kernel approach descrived in Section 8.2 of Hayashi and Yoshida (2011). Note that the asymptotic covariance between a diagonal component and another component, which is necessary to evaluate the asymptotic variances of correlation estimates, is not provided in Hayashi and Yoshida (2011), but it can be derived in a similar manner to that paper.
+\cr
+\cr
+If \code{nonneg} is \code{TRUE}, negative values of the asymptotic variances of correlations are avoided in the following way. The computed asymptotic varaince-covariance matrix of the vector \eqn{(HYii,HYij,HYjj)} is converted to its spectral absolute value. Here, \eqn{HYij} denotes the Hayashi-Yohida estimator for the \eqn{(i,j)}-th component.
+\cr
+\cr
+The function also returns the covariance and correlation matrices calculated by the Hayashi-Yoshida estimator (using \code{\link{cce}}).
+}
+\value{
+%% ~Describe the value returned
+%% If it is a LIST, use
+%% \item{comp1 }{Description of 'comp1'}
+%% \item{comp2 }{Description of 'comp2'}
+%% ...
+A list with components:
+\item{covmat}{the estimated covariance matrix}
+\item{cormat}{the estimated correlation matrix}
+\item{avar.cov}{the estimated asymptotic variances for covariances}
+\item{avar.cor}{the estimated asymptotic variances for correlations}
+}
+\references{
+%% ~put references to the literature/web site here ~
+Barndorff-Nilesen, O. E. and Shephard, N. (2004)
+ Econometric analysis of realized covariation: High frequency based covariance, regression, and correlation in financial economics,
+ \emph{Econometrica}, \bold{72}, no. 3, 885--925.
+
+Bibinger, M. (2011)
+ Asymptotics of Asynchronicity,
+ technical report, Available at arXiv:1106.4222.
+
+Hayashi, T., Jacod, J. and Yoshida, N. (2011)
+ Irregular sampling and central limit theorems for power variations: The continuous case,
+ \emph{Annales de l'Institut Henri Poincare - Probabilites et Statistiques}, \bold{47}, no. 4, 1197--1218.
+
+Hayashi, T. and Yoshida, N. (2011)
+ Nonsynchronous covariation process and limit theorems,
+ \emph{Stochastic processes and their applications}, \bold{121}, 2416--2454.
+}
+\author{
+%% ~~who you are~~
+The YUIMA Project Team
+}
+\note{
+%% ~~further notes~~
+Construction of kernel-type estimators for off-diagonal components is implemented after pseudo-aggregation descrived in Bibinger (2011).
+}
+
+%% ~Make other sections like Warning with \section{Warning }{....} ~
+
+\seealso{
+%% ~~objects to See Also as \code{\link{help}}, ~~~
+\code{\link{setData}}, \code{\link{cce}}
+}
+\examples{
+## Set a model
+diff.coef.1 <- function(t, x1 = 0, x2 = 0) sqrt(1+t)
+diff.coef.2 <- function(t, x1 = 0, x2 = 0) sqrt(1+t^2)
+cor.rho <- function(t, x1 = 0, x2 = 0) sqrt(1/2)
+diff.coef.matrix <- matrix(c("diff.coef.1(t,x1,x2)",
+"diff.coef.2(t,x1,x2) * cor.rho(t,x1,x2)",
+"", "diff.coef.2(t,x1,x2) * sqrt(1-cor.rho(t,x1,x2)^2)"), 2, 2)
+cor.mod <- setModel(drift = c("", ""),
+diffusion = diff.coef.matrix,solve.variable = c("x1", "x2"))
+
+set.seed(111)
+
+## We use a function poisson.random.sampling to get observation by Poisson sampling.
+yuima.samp <- setSampling(Terminal = 1, n = 1200)
+yuima <- setYuima(model = cor.mod, sampling = yuima.samp)
+yuima <- simulate(yuima)
+psample<- poisson.random.sampling(yuima, rate = c(0.2,0.3), n = 1000)
+
+## Constructing a 95\% confidence interval for the quadratic covariation from psample
+result <- hyavar(psample)
+thetahat <- result$covmat[1,2] # estimate of the quadratic covariation
+se <- sqrt(result$avar.cov[1,2]) # estimated standard error
+c(lower = thetahat + qnorm(0.025) * se, upper = thetahat + qnorm(0.975) * se)
+
+## True value of the quadratic covariation.
+cc.theta <- function(T, sigma1, sigma2, rho) {
+ tmp <- function(t) return(sigma1(t) * sigma2(t) * rho(t))
+ integrate(tmp, 0, T)
+}
+
+cc.theta(T = 1, diff.coef.1, diff.coef.2, cor.rho)$value # contained in the constructed confidence interval
+
+# Example. A stochastic differential equation with nonlinear feedback.
+
+## Set a model
+drift.coef.1 <- function(x1,x2) x2
+drift.coef.2 <- function(x1,x2) -x1
+drift.coef.vector <- c("drift.coef.1","drift.coef.2")
+diff.coef.1 <- function(t,x1,x2) sqrt(abs(x1))*sqrt(1+t)
+diff.coef.2 <- function(t,x1,x2) sqrt(abs(x2))
+cor.rho <- function(t,x1,x2) 1/(1+x1^2)
+diff.coef.matrix <- matrix(c("diff.coef.1(t,x1,x2)",
+"diff.coef.2(t,x1,x2) * cor.rho(t,x1,x2)","",
+"diff.coef.2(t,x1,x2) * sqrt(1-cor.rho(t,x1,x2)^2)"), 2, 2)
+cor.mod <- setModel(drift = drift.coef.vector,
+ diffusion = diff.coef.matrix,solve.variable = c("x1", "x2"))
+
+## Generate a path of the process
+set.seed(111)
+yuima.samp <- setSampling(Terminal = 1, n = 10000)
+yuima <- setYuima(model = cor.mod, sampling = yuima.samp)
+yuima <- simulate(yuima, xinit=c(2,3))
+plot(yuima)
+
+
+## The "true" values of the covariance and correlation.
+result.full <- cce(yuima)
+(cov.true <- result.full$covmat[1,2]) # covariance
+(cor.true <- result.full$cormat[1,2]) # correlation
+
+## We use the function poisson.random.sampling to generate nonsynchronous
+## observations by Poisson sampling.
+psample<- poisson.random.sampling(yuima, rate = c(0.2,0.3), n = 3000)
+
+## Constructing 95\% confidence intervals for the covariation from psample
+result <- hyavar(psample)
+cov.est <- result$covmat[1,2] # estimate of covariance
+cor.est <- result$cormat[1,2] # estimate of correlation
+se.cov <- sqrt(result$avar.cov[1,2]) # estimated standard error of covariance
+se.cor <- sqrt(result$avar.cor[1,2]) # estimated standard error of correlation
+
+## 95\% confidence interval for covariance
+c(lower = cov.est + qnorm(0.025) * se.cov, upper = cov.est + qnorm(0.975) * se.cov) # contains cov.true
+
+## 95\% confidence interval for correlation
+c(lower = cor.est + qnorm(0.025) * se.cor, upper = cor.est + qnorm(0.975) * se.cor) # contains cor.true
+
+## We can also use the Fisher z transformation to construct a 95\% confidence interval for correlation
+## It often improves the finite sample behavior of the asymptotic theory (cf. Section 4.2.3 of Barndorff-Nielsen and Shephard (2004))
+z <- atanh(cor.est) # the Fisher z transformation of the estimated correlation
+se.z <- se.cor/(1 - cor.est^2) # standard error for z (calculated by the delta method)
+## 95\% confidence interval for correlation via the Fisher z transformation
+c(lower = tanh(z + qnorm(0.025) * se.z), upper = tanh(z + qnorm(0.975) * se.z))
+
+}
+% Add one or more standard keywords, see file 'KEYWORDS' in the
+% R documentation directory.
+\keyword{ts}
Modified: pkg/yuima/src/cce_functions.c
===================================================================
--- pkg/yuima/src/cce_functions.c 2015-04-21 10:34:54 UTC (rev 374)
+++ pkg/yuima/src/cce_functions.c 2015-04-21 10:51:29 UTC (rev 375)
@@ -285,41 +285,41 @@
void HYcrosscov(int *gridL, int *xL, int *yL, double *grid, double *xtime,
double *ytime, double *tmptime, double *dX, double *dY, double *value)
{
- int i, j, I, J;
-
- for(i = 0; i < *gridL; i++){
+ int i, j, I, J;
- for(j = 0; j < *yL; j++){
- tmptime[j] = ytime[j] + grid[i];
- }
-
- I = 0;
- J = 0;
-
- /* Checking Starting Point */
- while((I < (*xL-1)) && (J < (*yL-1))){
- if(xtime[I] >= tmptime[J + 1]){
- J++;
- }else if(xtime[I + 1] <= tmptime[J]){
- I++;
- }else{
- break;
+ for(i = 0; i < *gridL; i++){
+
+ for(j = 0; j < *yL; j++){
+ tmptime[j] = round(ytime[j] + grid[i]);
}
- }
-
- /* Main Component */
- while((I < (*xL-1)) && (J < (*yL-1))) {
- value[i] += dX[I] * dY[J];
- if(xtime[I + 1] > tmptime[J + 1]){
- J++;
- }else if(xtime[I + 1] < tmptime[J + 1]){
- I++;
- }else{
- I++;
- J++;
+
+ I = 0;
+ J = 0;
+
+ /* Checking Starting Point */
+ while((I < (*xL-1)) && (J < (*yL-1))){
+ if(round(xtime[I]) >= tmptime[J + 1]){
+ J++;
+ }else if(round(xtime[I + 1]) <= tmptime[J]){
+ I++;
+ }else{
+ break;
+ }
}
+
+ /* Main Component */
+ while((I < (*xL-1)) && (J < (*yL-1))) {
+ value[i] += dX[I] * dY[J];
+ if(round(xtime[I + 1]) > tmptime[J + 1]){
+ J++;
+ }else if(round(xtime[I + 1]) < tmptime[J + 1]){
+ I++;
+ }else{
+ I++;
+ J++;
+ }
+ }
}
- }
}
@@ -333,7 +333,7 @@
for(i = 0; i < *gridL; i++){
for(j = 0; j < *yL; j++){
- tmptime[j] = ytime[j] + grid[i];
+ tmptime[j] = round(ytime[j] + grid[i]);
}
I = 0;
@@ -341,9 +341,9 @@
/* Checking Starting Point */
while((I < (*xL-1)) && (J < (*yL-1))){
- if(xtime[I] >= tmptime[J + 1]){
+ if(round(xtime[I]) >= tmptime[J + 1]){
J++;
- }else if(xtime[I + 1] <= tmptime[J]){
+ }else if(round(xtime[I + 1]) <= tmptime[J]){
I++;
}else{
break;
@@ -353,9 +353,9 @@
/* Main Component */
while((I < (*xL-1)) && (J < (*yL-1))) {
value[i] += dX[I] * dY[J];
- if(xtime[I + 1] > tmptime[J + 1]){
+ if(round(xtime[I + 1]) > tmptime[J + 1]){
J++;
- }else if(xtime[I + 1] < tmptime[J + 1]){
+ }else if(round(xtime[I + 1]) < tmptime[J + 1]){
I++;
}else{
I++;
@@ -373,4 +373,154 @@
value[i] = B/sqrt((A + s)*(C + s));
}
-}
\ No newline at end of file
+}
+
+
+void hyavar(double *xtime, double *ytime, int *xlength, int *ylength,
+ double *x, double *y, int *N, double *h, double *result)
+{
+ int I, mu0, w0, i, L, m;
+ double dSigma11, dSigma12, dSigma22;
+ int mu[*N], w[*N], q[*N], r[*N];
+ double rtimes[*N], Sigma11[*N], Sigma12[*N], Sigma22[*N], H1[*N], H2[*N], H12[*N], H3[*N], dS2[*N], dxdy[*N];
+
+ Sigma11[0] = 0; Sigma12[0] = 0; Sigma22[0] = 0;
+
+ if(xtime[0] < ytime[0]){
+ rtimes[0] = ytime[0];
+ I = 0;
+ while(xtime[I] < ytime[0]){
+ I++;
+ if(I >= (*xlength-1)){
+ break;
+ }
+ }
+ mu0 = I;
+ if(ytime[0] < xtime[mu0]){
+ q[0] = mu0;
+ }else{
+ q[0] = mu0 + 1;
+ }
+ r[0] = 1;
+ }else if(xtime[0] > ytime[0]){
+ rtimes[0] = xtime[0];
+ I = 0;
+ while(xtime[0] > ytime[I]){
+ I++;
+ if(I >= (*ylength-1)){
+ break;
+ }
+ }
+ w0 = I;
+ q[0] = 1;
+ if(xtime[0] < ytime[w0]){
+ r[0] = w0;
+ }else{
+ r[0] = w0 + 1;
+ }
+ }else{
+ rtimes[0] = xtime[0];
+ q[0] = 1;
+ r[0] = 1;
+ }
+
+ for(i = 0; (q[i] < (*xlength - 1)) && (r[i] < (*ylength - 1)); i++) {
+ H1[i] = 0; H2[i] = 0; H12[i] = 0; H3[i] = 0;
+ if(xtime[q[i]] < ytime[r[i]]){
+ /*if(xtime[*xlength - 1] < ytime[r[i]]){*/
+ if(xtime[*xlength - 2] < ytime[r[i]]){
+ break;
+ }
+ Sigma22[i] += pow(y[r[i]] - y[r[i] - 1], 2);
+ H2[i] = pow(ytime[r[i]] - ytime[r[i] - 1], 2);
+ I = q[i];
+ while(xtime[I] < ytime[r[i]]){
+ Sigma11[i] += pow(x[I] - x[I - 1], 2);
+ H1[i] += pow(xtime[I] - xtime[I - 1], 2);
+ I++;
+ }
+ mu[i] = I;
+ w[i] = r[i];
+ r[i+1] = r[i] + 1;
+ if(ytime[r[i]] < xtime[mu[i]]){
+ q[i+1] = mu[i];
+ }else{
+ q[i+1] = mu[i] + 1;
+ Sigma11[i] += pow(x[mu[i]] - x[mu[i] - 1], 2);
+ H1[i] += pow(xtime[mu[i]] - xtime[mu[i] - 1], 2);
+ }
+ H12[i] = H1[i] - pow(xtime[q[i]] - xtime[q[i] - 1], 2) + pow(xtime[q[i]] - rtimes[i], 2);
+ }else if(xtime[q[i]] > ytime[r[i]]){
+ /*if(xtime[q[i]] > ytime[*ylength - 1]){*/
+ if(xtime[q[i]] > ytime[*ylength - 2]){
+ break;
+ }
+ Sigma11[i] += pow(x[q[i]] - x[q[i] - 1], 2);
+ H1[i] = pow(xtime[q[i]] - xtime[q[i] - 1], 2);
+ mu[i] = q[i];
+ I = r[i];
+ while(xtime[q[i]] > ytime[I]){
+ Sigma22[i] += pow(y[I] - y[I - 1], 2);
+ H2[i] += pow(ytime[I] - ytime[I - 1], 2);
+ I++;
+ }
+ w[i] = I;
+ q[i+1] = q[i] + 1;
+ if(xtime[q[i]] < ytime[w[i]]){
+ r[i+1] = w[i];
+ }else{
+ r[i+1] = w[i] + 1;
+ Sigma22[i] += pow(y[w[i]] - y[w[i] - 1], 2);
+ H2[i] += pow(ytime[w[i]] - ytime[w[i] - 1], 2);
+ }
+ H12[i] = H2[i] - pow(ytime[r[i]] - ytime[r[i] - 1], 2) + pow(ytime[r[i]] - rtimes[i], 2);
+ }else{
+ mu[i] = q[i];
+ w[i] = r[i];
+ q[i+1] = q[i] + 1;
+ r[i+1] = r[i] + 1;
+ Sigma11[i] += pow(x[q[i]] - x[q[i] - 1], 2);
+ Sigma22[i] += pow(y[r[i]] - y[r[i] - 1], 2);
+ H1[i] = pow(xtime[q[i]] - xtime[q[i] - 1], 2);
+ H2[i] = pow(ytime[r[i]] - ytime[r[i] - 1], 2);
+ H12[i] = pow(xtime[q[i]] - rtimes[i], 2);
+ }
+
+ dxdy[i] = (x[mu[i]] - x[q[i] - 1]) * (y[w[i]] - y[r[i] - 1]);
+ Sigma12[i] += (x[mu[i]] - x[q[i] - 1]) * (y[w[i]] - y[r[i] - 1]);
+ H3[i] = (xtime[mu[i]] - xtime[q[i] - 1]) * (ytime[w[i]] - ytime[r[i] - 1]);
+ rtimes[i + 1] = fmin(xtime[mu[i]], ytime[w[i]]);
+ dS2[i] = pow(rtimes[i + 1] - rtimes[i], 2);
+
+ Sigma11[i + 1] = Sigma11[i];
+ Sigma22[i + 1] = Sigma22[i];
+ Sigma12[i + 1] = Sigma12[i];
+
+ }
+
+ mu[i] = q[i];
+ w[i] = r[i];
+ dxdy[i] = (x[mu[i]] - x[q[i] - 1]) * (y[w[i]] - y[r[i] - 1]);
+
+ L = 0;
+
+ for(m = 0; m < (i - 1); m++){
+
+ while(rtimes[L + 1] <= (rtimes[m + 1] - *h)){
+ L++;
+ }
+
+ dSigma11 = (Sigma11[m] - Sigma11[L])/(*h);
+ dSigma12 = (Sigma12[m] - Sigma12[L])/(*h);
+ dSigma22 = (Sigma22[m] - Sigma22[L])/(*h);
+
+ /*result[0] += dxdy[m] * (dxdy[m] + 2 * dxdy[m + 1]) - 2 * dSigma12 * dSigma12 * dS2[m];*/
+ result[0] += dSigma11 * dSigma22 * H3[m] + dSigma12 * dSigma12 * (H1[m] + H2[m] - H12[m]);
+ result[1] += 2 * dSigma11 * dSigma12 * H1[m];
+ result[2] += 2 * dSigma12 * dSigma22 * H2[m];
+ result[3] += 2 * dSigma12 * dSigma12 * H12[m];
+
+ }
+
+}
+
More information about the Yuima-commits
mailing list