[Pomp-commits] r54 - in pkg: . R data inst inst/doc man tests

noreply at r-forge.r-project.org noreply at r-forge.r-project.org
Fri Jan 16 23:30:38 CET 2009


Author: kingaa
Date: 2009-01-16 23:30:37 +0100 (Fri, 16 Jan 2009)
New Revision: 54

Added:
   pkg/R/mvnorm.R
   pkg/R/nlf-funcs.R
   pkg/R/nlf-guts.R
   pkg/R/nlf-lql.R
   pkg/R/nlf-objfun.R
   pkg/R/nlf.R
   pkg/man/nlf.Rd
   pkg/tests/ou2-nlf.R
   pkg/tests/ou2-nlf.Rout
Modified:
   pkg/NAMESPACE
   pkg/data/ou2.rda
   pkg/inst/ChangeLog
   pkg/inst/doc/compiled_code_in_pomp.pdf
   pkg/inst/doc/intro_to_pomp.pdf
Log:
add functionality for parameter estimation via nonlinear forecasting, method due to Stephen P. Ellner and Bruce E. Kendall

Modified: pkg/NAMESPACE
===================================================================
--- pkg/NAMESPACE	2009-01-16 17:03:27 UTC (rev 53)
+++ pkg/NAMESPACE	2009-01-16 22:30:37 UTC (rev 54)
@@ -39,5 +39,6 @@
        sobol,
        bspline.basis,
        periodic.bspline.basis,
-       compare.mif
+       compare.mif,
+       nlf
        )

Added: pkg/R/mvnorm.R
===================================================================
--- pkg/R/mvnorm.R	                        (rev 0)
+++ pkg/R/mvnorm.R	2009-01-16 22:30:37 UTC (rev 54)
@@ -0,0 +1,71 @@
+## multivariate normal density
+## stolen from package 'mvtnorm' version 0.9-0
+
+rmvnorm <- function (n, mean = rep(0, nrow(sigma)), sigma = diag(length(mean)),
+                     method=c("eigen", "svd", "chol"))
+{    
+  if (nrow(sigma) != ncol(sigma)) {
+    stop("sigma must be a square matrix")
+  }
+  if (length(mean) != nrow(sigma)) {
+    stop("mean and sigma have non-conforming size")
+  }
+  sigma1 <- sigma
+  dimnames(sigma1) <- NULL
+  if(!isTRUE(all.equal(sigma1, t(sigma1)))){
+    warning("sigma is numerically not symmetric")
+  }
+
+  method <- match.arg(method)
+  
+  if(method == "eigen"){
+    ev <- eigen(sigma, symmetric = TRUE)
+    if (!all(ev$values >= -sqrt(.Machine$double.eps) * abs(ev$values[1]))){
+      warning("sigma is numerically not positive definite")
+    }    
+    retval <- ev$vectors %*%  diag(sqrt(ev$values), 
+                                   length(ev$values)) %*% t(ev$vectors)
+  }
+  else if(method == "svd"){
+    sigsvd <- svd(sigma)
+    if (!all(sigsvd$d >= -sqrt(.Machine$double.eps) * abs(sigsvd$d[1]))){
+      warning("sigma is numerically not positive definite")
+    }    
+    retval <- t(sigsvd$v %*% (t(sigsvd$u) * sqrt(sigsvd$d)))
+  }    
+  else if(method == "chol"){
+    retval <- chol(sigma, pivot = TRUE)
+    o <- order(attr(retval, "pivot"))
+    retval <- retval[,o]
+  }
+  
+  retval <- matrix(rnorm(n * ncol(sigma)), nrow = n) %*%  retval
+  retval <- sweep(retval, 2, mean, "+")
+  retval
+}
+
+dmvnorm <- function (x, mean, sigma, log = FALSE) {
+  if (is.vector(x)) {
+    x <- matrix(x, ncol = length(x))
+  }
+  if (missing(mean)) {
+    mean <- rep(0, length = ncol(x))
+  }
+  if (missing(sigma)) {
+    sigma <- diag(ncol(x))
+  }
+  if (NCOL(x) != NCOL(sigma)) {
+    stop("x and sigma have non-conforming size")
+  }
+  if (NROW(sigma) != NCOL(sigma)) {
+    stop("sigma must be a square matrix")
+  }
+  if (length(mean) != NROW(sigma)) {
+    stop("mean and sigma have non-conforming size")
+  }
+  distval <- mahalanobis(x, center = mean, cov = sigma)
+  logdet <- sum(log(eigen(sigma, symmetric=TRUE, only.values=TRUE)$values))
+  logretval <- -(ncol(x)*log(2*pi) + logdet + distval)/2
+  if(log) return(logretval)
+  exp(logretval)
+}

Added: pkg/R/nlf-funcs.R
===================================================================
--- pkg/R/nlf-funcs.R	                        (rev 0)
+++ pkg/R/nlf-funcs.R	2009-01-16 22:30:37 UTC (rev 54)
@@ -0,0 +1,90 @@
+make.lags.NLF <- function(x, lags, cov = NULL, nobs = 10000) {
+  x <- as.matrix(x)
+  xd <- ncol(x)
+  m <- length(lags)
+  N <- min(nobs,nrow(x)-max(lags))
+  n <- min(nobs,N)
+  if (N > nobs)
+    warning(" series length truncated to default in make.lags")
+  start <- max(lags)+1
+  temp <- matrix(0,ncol=xd*length(lags),nrow=n)
+  for (k in 1:length(lags)) {
+    a <- start-lags[k]
+    b <- a + n - 1
+    temp[,(1:xd)+(k-1)*xd] <- x[(a:b),]
+  }
+  a <- start
+  b <- a + n - 1
+  if(xd == 1)
+    lab <- format(paste("lag", rep(lags, rep(xd, length(lags))), sep = ""))
+  else
+    lab <- format(paste( rep(1:xd, length(lags)), "lag", rep(lags,rep(xd, length(lags))), sep = ""))
+  dimnames(temp) <- list(NULL,lab)
+  skip <- NA
+  if (!is.null(cov)) {
+    cov <- as.matrix(cov)
+    cov <- cov[a:b,,drop=FALSE]
+    ##temp <- cbind(temp, cov[a:b,  ])
+    ##cat(a, b)
+    skip <- (1:ncol(cov))+m*xd
+  }
+  if(xd == 1)
+    y <- c(x[a:b])
+  else
+    y <- x[a:b,]
+  list(
+       x=temp,
+       y=y,
+       nvar=m,
+       cov=cov,
+       lags=lags,
+       skip=skip,
+       start=a,
+       end=b
+       )
+}
+
+make.rbfbasis <- function (X, knots, fac) {
+  X1 <- X-knots[1]
+  nknots <- length(knots)
+  if (nknots>1) {
+    for (j in 2:nknots) {
+      X1 <- cbind(X1,X-knots[j])
+    }
+  }
+  exp(fac*(X1^2))
+}	 
+
+## GAUSS trimr function: trims n1 rows from the start, n2 rows from the end of a matrix or vector 
+trimr <- function (a,n1,n2) {
+  da <- dim(a)
+  if (is.null(da)) {
+    a[(n1+1):(length(a)-n2)]
+  } else {
+    a[(n1+1):(da[1]-n2),]
+  }
+}
+
+Newey.West <-function(x,y,maxlag) {
+  w <- 1-(1:maxlag)/(maxlag+1)
+  out <- mean(x*y,na.rm=T)
+  for(i in 1:maxlag) {
+    out <- out+w[i]*mean(trimr(x,i,0)*trimr(y,0,i),na.rm=T)+w[i]*mean(trimr(y,i,0)*trimr(x,0,i),na.rm=T)
+  }
+  out
+} 
+
+
+make.tensorbasis.NLF=function(A,B) {
+	if(nrow(A)!=nrow(B)) stop("Incompatible matrices in make.tensorbasis"); 
+	ncol.A=ncol(A)
+	ncol.B=ncol(B)
+	Tmat=matrix(0,nrow(A),ncol.A*ncol.B)
+	for(i in 1:ncol.A) {
+		start=(i-1)*ncol.B
+		for(j in 1:ncol.B) {
+			Tmat[,start+j]=A[,i]*B[,j]
+		}
+	}
+	return(Tmat)
+}

Added: pkg/R/nlf-guts.R
===================================================================
--- pkg/R/nlf-guts.R	                        (rev 0)
+++ pkg/R/nlf-guts.R	2009-01-16 22:30:37 UTC (rev 54)
@@ -0,0 +1,218 @@
+NLF.guts <- function (data.mat, data.times, model.mat, model.times, lags, period, tensor, 
+		      nrbf = 4, verbose = FALSE, plotfit = FALSE,
+                      bootstrap = FALSE, bootsamp = NULL) {
+
+  ## Version 1.0, 4 December 2007, S.P. Ellner and Bruce E. Kendall
+  ## Verstion 1.1, 19 June 2008, S.P. Ellner and A.A. King. 
+
+  ## Peculiarities of the code
+  ## 1. No basis functions involving cross terms of state variables 
+  ## 2. Model and data time series are assumed to be matrices with the same number of 
+  ##    rows (= number of observation vars in data set = number of obs. vars. simulated by model)
+  ##    This is formally a requirement of pomp objects, and here it is absolutely required.
+  
+
+#####################################################################################
+  ## ARGUMENTS:
+  ## data.mat = matrix of data time series, nobs x ntimes.data 
+  ## model.mat = matrix of model time series, nobs x ntimes.sim 
+  ## lags = vector of lag times for forecasting y(t) = f(y(t-lag1),y(t-lag2),....)+error
+  ## nrbf = number of radial basis functions
+  ## verbose: logical, print stuff out as it runs?
+  ## period: period=NA means the model is nonseasonal. period=integer>0 is the period of
+  ##         seasonal forcing in 'real time' (the units of model.times). 
+  ## tensor: logical. if FALSE, the fitted model is a gam with time(mod period) as
+  ##         one of the predictors, i.e. a gam with time-varying intercept. 
+  ##         if TRUE, the fitted model is a gam with lagged state variables as
+  ##         predictors and time-periodic coefficients, constructed using tensor
+  ##         products of basis functions of state variables with basis functions of time. 
+  ##
+  ##       NOTE: periodic models are constructed so that the time variable on the
+  ##       right hand side corresponds to the observation time of the predictee,
+  ##       not of the predictors 
+  ##
+  ## 
+  ## VALUE: the NLF approximation to the log likelihood of the data series
+  ##        under the forecasting model based on model.mat. The approximation used
+  ##        here is a generalized additive model for each observation variable, conditional
+  ##        on lagged values of all observation variables, with multivariate normal errors.     
+  ##        The return from this function is the vector of log quasi-likelihood values at 
+  ##        the data points; this must be summed to get the log quasiliklihood function.  
+  ##
+  ## IMPORTANT NOTE TO FUTURE PROGRAMMERS: It may appear at first glance that basis
+  ## functions for the data series, and other things related to the data series, could be
+  ## constructed once and for all rather than rebuilt on each call to this function. 
+  ## THIS IS NOT TRUE. The basis functions are constructed anew on each call using
+  ## information from the model-simulated time series, and this feature is important 
+  ## for reliable NLF parameter estimation because it rules out spurious good fits
+  ## that really are due to extrapolation far from the model-simulated time series.   
+#######################################################################################
+
+
+  FAILED = -999999999; 
+  if (verbose) print("NLF multivariate version 1.0")
+
+  nvar <- nrow(data.mat)
+  multivar <- (nvar>1)
+
+  ## do a lagged embedding for observation variable 1 
+  data.ts <- data.mat[1,]
+  model.ts <- model.mat[1,]
+
+  ## Set up the RBF knots
+  xm <- diff(range(model.ts))
+  rbf.knots <- min(model.ts)+seq(-0.1,1.1,length=nrbf)*xm
+  s <- 0.3*xm
+  fac <- -1/(2*s^2)
+  if (!is.finite(fac)) return(FAILED)
+  if (fac==0) return(FAILED)
+
+  seas <- (!is.na(period)&&abs(period)>0)
+
+  if (seas) {
+    seas.sim <- model.times%%abs(period)
+    seas.data <- data.times%%abs(period)
+  } else {
+    seas.sim <- NULL
+    seas.data <- NULL
+  }
+
+  ## Lag the data and set up the predicted values & seasonal indices
+  Lags.model <- make.lags.NLF(model.ts,lags=lags,cov=seas.sim)
+  Lags.data <- make.lags.NLF(data.ts,lags=lags,cov=seas.data)
+
+  if (bootstrap) {
+    Lags.data$x <- Lags.data$x[bootsamp,]
+    Lags.data$y <- Lags.data$y[bootsamp]
+    if (seas) 
+      Lags.data$cov <- Lags.data$cov[bootsamp]
+  }	
+
+  data.pred <- matrix(Lags.data$y,ncol=1)
+  model.pred <- matrix(Lags.model$y,ncol=1)
+
+  if (verbose) {
+    print("calculating ridge functions")
+    print(date())
+  }
+
+  rbfbasis.model <- make.rbfbasis(Lags.model$x,knots=rbf.knots,fac=fac)
+  rbfbasis.data <- make.rbfbasis(Lags.data$x,knots=rbf.knots,fac=fac)
+
+  if (seas) {
+    ## Set up the RBF knots
+    rbf.cov.knots <- seq(-0.1,1.1,length=nrbf)*period
+    s <- 0.3*period
+    fac.cov <- -1/(2*s^2)
+    if (!is.finite(fac.cov)) return(FAILED)
+    if (fac.cov==0) return(FAILED)
+    
+    rbfbasis.cov.model <- make.rbfbasis(Lags.model$cov,knots=rbf.cov.knots,fac=fac.cov)
+    rbfbasis.cov.data <- make.rbfbasis(Lags.data$cov,knots=rbf.cov.knots,fac=fac.cov)
+  }
+
+  if (multivar) {
+    for (jvar in 2:nvar) {
+      data.ts <- data.mat[jvar,]
+      model.ts <- model.mat[jvar,]
+
+      ## Set up the RBF knots
+      xm <- diff(range(model.ts))
+      rbf.knots <- min(model.ts)+seq(-0.1,1.1,length=nrbf)*xm
+      s <- 0.3*xm
+      fac <- -1/(2*s^2)
+      if (fac==0) return(FAILED)
+   
+      ## Lag the data and set up the predicted values & seasonal indices
+      Lags.model <- make.lags.NLF(model.ts,lags=lags,cov=seas.sim)
+      Lags.data <- make.lags.NLF(data.ts,lags=lags,cov=seas.data)
+  
+      if (bootstrap) {
+	Lags.data$x=Lags.data$x[bootsamp,]
+	Lags.data$y=Lags.data$y[bootsamp]
+      }	
+
+      data.pred <- cbind(data.pred,Lags.data$y)
+      model.pred <- cbind(model.pred,Lags.model$y)
+      rbfbasis.model <- cbind(rbfbasis.model,make.rbfbasis(Lags.model$x,knots=rbf.knots,fac=fac))
+      rbfbasis.data <- cbind(rbfbasis.data,make.rbfbasis(Lags.data$x,knots=rbf.knots,fac=fac))
+ 
+      if (verbose) {
+        print("done with ridge functions")
+        print(date())
+      }
+    }
+  }
+
+  if (seas) {
+      if(tensor) { 
+         # make gam coefficients time-dependent
+         rbfbasis.model <- make.tensorbasis.NLF(rbfbasis.model,rbfbasis.cov.model)      
+	 rbfbasis.data <- make.tensorbasis.NLF(rbfbasis.data,rbfbasis.cov.data)
+      }else{ 
+         # add time-varying intercept  
+         rbfbasis.model <- cbind(rbfbasis.model,rbfbasis.cov.model)
+         rbfbasis.data <- cbind(rbfbasis.data,rbfbasis.cov.data)
+      }
+  }
+  
+  prediction.errors <- matrix(0,dim(data.pred)[1],nvar)
+  model.residuals <- matrix(0,dim(model.pred)[1],nvar)
+
+  for (jvar in 1:nvar) {
+    model.lm <- lm(model.pred[,jvar]~rbfbasis.model-1)
+    model.residuals[,jvar] <- residuals(model.lm)
+    ck <- as.vector(coef(model.lm))
+    if (verbose) {
+      print(ck)
+      print(summary(model.lm))
+    }  
+
+    fitted.data <- rbfbasis.data%*%matrix(ck,ncol=1)
+    prediction.errors[,jvar] <- data.pred[,jvar]-fitted.data
+    if (plotfit) {
+      par(mfrow=c(1,2))
+      plot(model.pred,fitted(model.lm),pch='.')
+      abline(0,1)
+      plot(data.pred,fitted.data)
+      abline(0,1)
+      par(mfrow=c(1,1))
+    }
+  }
+
+  if (nvar==1) {
+    sigma.model <- sd(model.residuals[,1])
+    LQL <- dnorm(prediction.errors[,1],mean=0,sd=sigma.model,log=TRUE)
+  } else {
+    sigma.model <- cov(model.residuals)
+    LQL <- dmvnorm(prediction.errors,sigma=sigma.model,log=TRUE) ## NOTE: This could be improved using GLS.
+  }  
+  
+  LQL
+}
+
+dmvnorm <- function (x, mean, sigma, log = FALSE) {
+    if (is.vector(x)) {
+        x <- matrix(x, ncol = length(x))
+    }
+    if (missing(mean)) {
+        mean <- rep(0, length = ncol(x))
+    }
+    if (missing(sigma)) {
+        sigma <- diag(ncol(x))
+    }
+    if (NCOL(x) != NCOL(sigma)) {
+        stop("x and sigma have non-conforming size")
+    }
+    if (NROW(sigma) != NCOL(sigma)) {
+        stop("sigma must be a square matrix")
+    }
+    if (length(mean) != NROW(sigma)) {
+        stop("mean and sigma have non-conforming size")
+    }
+    distval <- mahalanobis(x, center = mean, cov = sigma)
+    logdet <- sum(log(eigen(sigma, symmetric=TRUE, only.values=TRUE)$values))
+    logretval <- -(ncol(x)*log(2*pi) + logdet + distval)/2
+    if(log) return(logretval)
+    exp(logretval)
+}

Added: pkg/R/nlf-lql.R
===================================================================
--- pkg/R/nlf-lql.R	                        (rev 0)
+++ pkg/R/nlf-lql.R	2009-01-16 22:30:37 UTC (rev 54)
@@ -0,0 +1,75 @@
+NLF.LQL <- function (params.fitted, object, params, par.index,
+                     times, lags, period, tensor, seed, nrbf = 4, verbose = FALSE,
+                     bootstrap = FALSE, bootsamp = NULL) {
+
+#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+# Computes the vector of log quasi-likelihood values at the observations  
+# Note that the log QL itself is returned, not the negative log QL,
+# so a large NEGATIVE value is used to flag bad parameters 
+#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+  FAILED =  -99999999999
+  params[par.index] <- params.fitted
+  
+  params <- as.matrix(params)
+  xstart <- init.state(object,params)
+
+  ## Need to extract number of state variables (nvar) from pomp object
+  ## Need to include simulation times in problem specification
+  ## Evaluates the NLF objective function given a POMP object.
+  ## Version 0.1, 3 Dec. 2007, Bruce E. Kendall & Stephen P. Ellner
+  ## Version 0.2, May 2008, Stephen P. Ellner  
+
+  data.ts <- data.array(object)
+
+  ## set the random seed (be very careful about this)
+  if (!is.null(seed)) {
+    if (!exists('.Random.seed',envir=.GlobalEnv)) runif(1)
+    save.seed <- get('.Random.seed',envir=.GlobalEnv)
+   set.seed(seed)
+  }
+
+  x <- try(
+           rprocess(object,xstart=xstart,times=times,params=params), # simulate the process model
+           silent=FALSE
+           )
+  if (inherits(x,'try-error'))
+    stop("'NLF.LQL' reports: error in user 'rprocess'")
+
+  if (!all(is.finite(x))) return(FAILED)
+
+  y <- try(
+           rmeasure(object,x=x[,,-1,drop=F],times=times[-1],params=params),
+           silent=FALSE
+           )
+  if (inherits(y,'try-error'))
+    stop("'NLF.LQL' reports: error in user 'rmeasure'")
+
+  if (!is.null(seed)) assign('.Random.seed',save.seed,envir=.GlobalEnv)  # restore the RNG state
+
+ ## Test whether the model time series is valid
+  model.ts <- array(dim=c(nrow(data.ts),length(times)-1),dimnames=list(rownames(data.ts),NULL))
+  if (!all(is.finite(x))) return(FAILED)
+
+  model.ts[,] <- y[,1,]
+  LQL <- try(
+             NLF.guts(
+                      data.mat=data.ts,
+                      data.times=time(object),
+                      model.mat=model.ts,
+                      model.times=times[-1],
+                      lags=lags,
+                      period=period, tensor=tensor, 
+                      nrbf=nrbf,
+                      verbose=F,
+                      bootstrap,
+                      bootsamp,
+                      plotfit=F
+                      ),
+             silent=FALSE
+             )
+  if (inherits(LQL,"try-error"))
+    stop("'NLF.LQL' reports: error in 'NLF.guts'")
+  LQL 
+ }
+

Added: pkg/R/nlf-objfun.R
===================================================================
--- pkg/R/nlf-objfun.R	                        (rev 0)
+++ pkg/R/nlf-objfun.R	2009-01-16 22:30:37 UTC (rev 54)
@@ -0,0 +1,22 @@
+nlf.objfun <- function (params.fitted, object, params, par.index,
+                        times, lags, period, tensor, seed, nrbf = 4,
+                        verbose = FALSE, bootstrap = FALSE, bootsamp = NULL) 
+{
+  -sum(
+       NLF.LQL(
+               params.fitted,
+               object,
+               params,
+               par.index,
+               times,
+               lags,
+               period, tensor,
+               seed,
+               nrbf,
+               verbose,
+               bootstrap,
+               bootsamp
+               ),
+       na.rm=T
+       )
+} 

Added: pkg/R/nlf.R
===================================================================
--- pkg/R/nlf.R	                        (rev 0)
+++ pkg/R/nlf.R	2009-01-16 22:30:37 UTC (rev 54)
@@ -0,0 +1,262 @@
+nlf <- function(object, start, est, lags, period = NA, tensor = FALSE, nconverge = 1000, nasymp = 1000, 
+                seed = 1066, nrbf = 4, method = 'subplex', skip.se = FALSE, verbose = FALSE, gr = NULL, 
+		bootstrap = FALSE, bootsamp = NULL, lql.frac = 0.1, se.par.frac = 0.1, eval.only = FALSE, ...) {
+  ## Fit a POMP object using NLF
+  ## v. 0.1, 3 Dec. 2007 
+  ## by Bruce Kendall & Steve Ellner
+  ## 
+  ## v. 0.2, 30 May 2008, by Steve Ellner 
+  ## Adds automatic Wald asymptotic standard errors and the 
+  ## capability for moving-blocks bootstrap standard errors.  
+  ## Quadratic regression near optimum used to select increments
+  ## for finite-difference approximations to gradient and Hessian 
+  ##
+  ## v 1.0, 19 June 2008 by Steve Ellner and Aaron King
+  ## adds capacity to fit models with periodically time-varying parameters of known period
+  ## and improves the compatibility with the standard for pomp objects
+
+
+  if (!is(object,'pomp'))
+    stop("'object' must be a 'pomp' object")
+
+  if (eval.only) est <- 1
+
+  if (is.character(est)) {
+    if (!all(est%in%names(start)))
+      stop(sQuote("nlf")," error: parameters named in ",sQuote("est")," must exist in ",sQuote("start"))
+
+    par.index <- which(names(start)%in%est)
+
+  } else if (is.numeric(est)) {
+    est <- as.integer(est)
+    if (any((est<1)|(est>length(start))))
+      stop(sQuote("nlf")," error: trying to estimate parameters that don't exist!")
+    par.index <- as.integer(est)
+  }
+
+  if (!(is.numeric(lql.frac)&&(lql.frac>0)&&(lql.frac<1)))
+    stop(sQuote("nlf")," error: ",sQuote("lql.frac")," must be in (0,1)")
+
+  if (!(is.numeric(se.par.frac)&&(se.par.frac>0)&&(se.par.frac<1)))
+    stop(sQuote("nlf")," error: ",sQuote("se.par.frac")," must be in (0,1)")
+
+  params <- start
+  guess <- params[par.index]
+
+  dt.tol <- 1e-3
+  times <- time(object,t0=TRUE)
+  dt <- diff(times[-1])
+  dt <- times[3]-times[2]
+  if (diff(range(dt))>dt.tol*mean(dt))
+    stop(sQuote("nlf")," requires evenly spaced sampling times")
+  
+  t0 <- times[1]
+                                        # Vector of times to output the simulation
+  times <- seq(
+               from=t0,
+               to=t0+(nconverge+nasymp)*dt,
+               by=dt
+               )
+
+
+  if (eval.only) {
+    opt <- optim(
+                 par=guess,
+                 fn=nlf.objfun,
+                 gr=gr,
+                 method=method, 
+                 object=object,
+                 params=params,
+                 par.index=par.index, 
+                 times=times,
+                 lags=lags,
+                 period=period,
+                 tensor=tensor,
+                 seed=seed,
+                 nrbf=nrbf, 
+                 verbose=verbose,
+                 bootstrap=bootstrap,
+                 bootsamp=bootsamp,
+                 control=list(
+                   maxit=0,
+                   method='SANN'
+                   )
+                 ) 
+    return(-opt$value) 
+  }
+
+  if (method == 'subplex') {
+    opt <- subplex(
+                   par=guess,
+                   fn=nlf.objfun,
+                   object=object,
+                   params=params,
+                   par.index=par.index, 
+                   times=times,
+                   lags=lags,
+                   period=period,
+                   tensor=tensor,
+                   seed=seed,
+                   nrbf=nrbf, 
+                   verbose=verbose,
+                   bootstrap=bootstrap,
+                   bootsamp=bootsamp,
+                   control=list(...)
+                   )
+  } else {
+    opt <- optim(
+                 par=guess,
+                 fn=nlf.objfun,
+                 gr=gr,
+                 method=method, 
+                 object=object,
+                 params=params,
+                 par.index=par.index, 
+                 times=times,
+                 lags=lags,
+                 period=period,
+                 tensor=tensor,
+                 seed=seed,
+                 nrbf=nrbf, 
+                 verbose=verbose,
+                 bootstrap=bootstrap,
+                 bootsamp=bootsamp,
+                 control=list(...)
+                 )  
+  }
+
+  opt$value <- -opt$value
+  params[par.index] <- opt$par
+  opt$params <- params
+  opt$par <- NULL
+
+  if (!skip.se) { ## compute estimated Variance-Covariance matrix of fitted parameters 
+    fitted <- params[par.index]
+    nfitted <- length(fitted)
+    Jhat <- matrix(0,nfitted,nfitted)
+    Ihat <- Jhat
+    f0 <- NLF.LQL(fitted,object=object, params=params, par.index=par.index, 
+                  times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                  verbose=FALSE)
+    F0 <- mean(f0,na.rm=T)
+
+    npts <- length(f0)
+    nlags <- round(5*npts^0.2) ## Number of lags to use in Newey-West covariance estimator 
+
+    ## find a good epsilon 
+    h <- se.par.frac
+    if (verbose) cat("h in NLF = ", h, "\n")
+    eps <- rep(h,nfitted)
+
+    for (i in 1:nfitted) {
+      Fvals <- rep(0,5)
+      Fvals[3] <- F0  
+      guess <- fitted
+      guess[i] <- fitted[i]-sqrt(2)*h*abs(fitted[i])  
+      Fvals[1] <- mean(NLF.LQL(guess,object=object, params=params, par.index=par.index, 
+                               times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                               verbose=FALSE),na.rm=T)
+      guess <- fitted
+      guess[i] <- fitted[i]-h*abs(fitted[i])
+      Fvals[2] <- mean(NLF.LQL(guess,object=object, params=params, par.index=par.index, 
+                               times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                               verbose=FALSE),na.rm=T); 
+      guess <- fitted
+      guess[i] <- fitted[i]+h*abs(fitted[i])
+      Fvals[4] <- mean(NLF.LQL(guess,object=object, params=params, par.index=par.index, 
+                               times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                               verbose=FALSE),na.rm=T)
+      guess <- fitted
+      guess[i] <- fitted[i]+sqrt(2)*h*abs(fitted[i])
+      Fvals[5] <- mean(NLF.LQL(guess,object=object, params=params, par.index=par.index, 
+                               times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                               verbose=FALSE),na.rm=T);
+      FAILED =  - 999999
+      Fvals[Fvals < FAILED+10] <- NA
+      xvals <- c(sqrt(2),1,0,1,sqrt(2))*h*fitted[i]
+      c2 <- lm(Fvals~I(xvals^2))$coef[2]
+      eps[i] <- sqrt(abs(lql.frac/c2))
+    }
+
+    if (verbose) cat("epsilon in NLF =",t(eps), "\n")
+
+    Imat <- matrix(0,npts,nfitted)
+    for (i in 1:nfitted) {
+      guess.up <- fitted
+      guess.up[i] <- guess.up[i]+eps[i]
+      f.up <- NLF.LQL(guess.up,object=object, params=params, par.index=par.index, 
+                      times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                      verbose=FALSE)
+      F.up <- mean(f.up,na.rm=T)
+
+      f.up2 <- NLF.LQL(guess.up,object=object, params=params, par.index=par.index, 
+                       times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                       verbose=FALSE)
+
+      if (verbose) cat("Fitted param ", i, F.up, mean(f.up2,na.rm=T)," up in ",sQuote("nlf"),"\n")
+
+      guess.down <- fitted
+      guess.down[i] <- guess.down[i]-eps[i]
+      f.down <- NLF.LQL(guess.down,object=object, params=params, par.index=par.index, 
+                        times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                        verbose=FALSE)
+      F.down <- mean(f.down,na.rm=T)
+
+      if (verbose) cat("Fitted param ",i, F.down," down in ",sQuote("NLF"),"\n")
+
+      Jhat[i,i] <- (F.up + F.down-2*F0)/(eps[i]*eps[i])
+      Imat[,i] <- (f.up-f.down)/(2*eps[i])
+      Ihat[i,i] <- Newey.West(Imat[,i],Imat[,i],nlags)
+    }
+
+    for (i in 1:(nfitted-1)) {
+      for (j in (i+1):nfitted) {
+        guess.uu <- fitted
+        guess.uu[i] <- guess.uu[i]+eps[i]
+        guess.uu[j] <- guess.uu[j]+eps[j]
+        F.uu <- mean(NLF.LQL(guess.uu,object=object, params=params, par.index=par.index, 
+                             times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                             verbose=FALSE),na.rm=T)
+
+        guess.ud <- fitted 
+        guess.ud[i] <- guess.ud[i]+eps[i]
+        guess.ud[j] <- guess.ud[j]-eps[j]
+        F.ud <- mean(NLF.LQL(guess.ud,object=object, params=params, par.index=par.index, 
+                             times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                             verbose=FALSE),na.rm=T) 
+
+        guess.du <- fitted 
+        guess.du[i] <- guess.du[i]-eps[i]
+        guess.du[j] <- guess.du[j]+eps[j]
+        F.du <- mean(NLF.LQL(guess.du,object=object, params=params, par.index=par.index, 
+                             times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                             verbose=FALSE),na.rm=T) 
+
+        guess.dd <- fitted 
+        guess.dd[i] <- guess.dd[i]-eps[i]
+        guess.dd[j] <- guess.dd[j]-eps[j] 
+        F.dd <- mean(NLF.LQL(guess.dd,object=object, params=params, par.index=par.index, 
+                             times=times, lags=lags, period=period, tensor=tensor, seed=seed, nrbf=4, 
+                             verbose=FALSE),na.rm=T) 
+
+        dij <- (F.uu+F.dd)-(F.ud+F.du)
+        dij <- dij/(4*eps[i]*eps[j]) 
+        Jhat[i,j] <- dij
+        Jhat[j,i] <- dij
+        Ihat[i,j] <- Newey.West(Imat[,i],Imat[,j],nlags)
+        Ihat[j,i] <- Ihat[i,j]  
+      }
+    }
+    opt$Jhat <- Jhat
+    opt$Ihat <- Ihat
+    negJinv <- -solve(Jhat)
+    Qhat <- negJinv%*%Ihat%*%negJinv
+    opt$Qhat <- Qhat
+    opt$se <- sqrt(diag(Qhat))/sqrt(npts)
+    names(opt$se) <- names(start)[par.index]
+    opt$npts <- npts
+  }
+  
+  opt
+}
+

Modified: pkg/data/ou2.rda
===================================================================
(Binary files differ)

Modified: pkg/inst/ChangeLog
===================================================================
--- pkg/inst/ChangeLog	2009-01-16 17:03:27 UTC (rev 53)
+++ pkg/inst/ChangeLog	2009-01-16 22:30:37 UTC (rev 54)
@@ -1,5 +1,12 @@
+2009-01-16  kingaa
+
+	* [r53] R/pfilter.R, man/pfilter.Rd: add 'verbose' option to
+	  'pfilter'
+
 2008-08-25  kingaa
 
+	* [r52] data/ou2.rda, inst/ChangeLog,
+	  inst/doc/compiled_code_in_pomp.pdf, inst/doc/intro_to_pomp.pdf:
 	* [r51] R/mif.R, man/mif.Rd, tests/ou2-mif.Rout.save: error trap in
 	  'mif': 'rw.sd' must have names
 	* [r50] DESCRIPTION, inst/NEWS, man/euler.Rd: better documentation
@@ -153,6 +160,5 @@
 
 2007-06-25  stefan7th
 
-	* [r1] README, ., R, man, www, www/index.php: Email in Readme
-	  changed
+	* [r1] ., R, man: Email in Readme changed
 

Modified: pkg/inst/doc/compiled_code_in_pomp.pdf
===================================================================
(Binary files differ)

Modified: pkg/inst/doc/intro_to_pomp.pdf
===================================================================
(Binary files differ)

Added: pkg/man/nlf.Rd
===================================================================
--- pkg/man/nlf.Rd	                        (rev 0)
+++ pkg/man/nlf.Rd	2009-01-16 22:30:37 UTC (rev 54)
@@ -0,0 +1,99 @@
+\name{nlf}
+\alias{nlf}
+\title{Fit Model to Data Using Nonlinear Forecasting (NLF)}
+\description{
+ Calls an optimizer to maximize the nonlinear forecasting (NLF) goodness of fit, by
+ simulating data from a model, fitting a nonlinear autoregressive
+ model to the simulated time series (which may be multivariate) and
+ using the fitted model to predict some or all variables in the data time series. 
+ NLF is an 'indirect inference' method using a quasi-likelihood as the objective
+ function.
+}
+\usage{
+ nlf(object, start, est, lags, period = NA, tensor = FALSE, nconverge=1000, nasymp=1000,
+     seed = 1066, nrbf = 4, method = 'subplex', skip.se = FALSE, verbose = FALSE, gr = NULL,
+     bootstrap=FALSE, bootsamp = NULL, lql.frac = 0.1, se.par.frac = 0.1, eval.only = FALSE, ...)
+}
+\arguments{
+  \item{object}{
+    A \code{pomp} object, with the data and model to fit to it.
+  }
+  \item{start}{
+    Named numeric vector with guessed parameters.
+  }
+  \item{est}{
+    Vector containing the names or indices of parameters to be estimated.
+  }
+  \item{lags}{
+    A vector specifying the lags to use when constructing the nonlinear autoregressive prediction model.
+    The first lag is the prediction interval.
+  }
+ \item{period}{
+   numeric; \code{period=NA} means the model is nonseasonal.
+   period>0 is the period of seasonal forcing in 'real time'.
+ }
+ \item{tensor}{
+   logical.
+   If FALSE, the fitted model is a generalized additive model with time mod period as one of the predictors, i.e., a gam with time-varying intercept. 
+   If TRUE, the fitted model is a gam with lagged state variables as predictors and time-periodic coefficients, constructed using tensor products of basis functions of state variables with basis functions of time.
+   }
+  \item{nconverge}{Number of convergence timesteps to be discarded from the model simulation}
+  \item{nasymp}{Number of asymptotic timesteps to be recorded from the
+    model simulation.}
+  \item{seed}{
+    Integer specifying the random number seed to use.
+    When fitting, it is usually best to always run the simulations with the same sequence of random numbers, which is accomplished by setting \code{seed} to an integer.
+    If you want a truly random simulation, set \code{seed=NULL}.
+  }
+  \item{nrbf}{A scalar specifying the number of radial basis functions to be used at each lag.}
+  \item{method}{Optimization method.  Choices are \code{\link[subplex]{subplex}} and
+    any of the methods used by \code{\link{optim}}}
+  \item{skip.se}{Logical; if \code{TRUE}, skip the computation of standard errors.}
+  \item{verbose}{Logical; if \code{TRUE}, the negative log quasilikelihood and parameter
+    values are printed at each iteration of the optimizer.}
+  \item{gr}{Something for \code{optim}}
+  \item{bootstrap} {Logical if \code{TRUE} the indices in \code{bootsamp} will determine which
+    of the conditional likelihood values be used in computing the quasi-loglikelihood. }
+  \item{bootsamp} {
+    Vector of integers; used to have the quasi-loglikelihood evaluated using a bootstrap re-sampling of the data set
+  } 
+  \item{lql.frac} {
+    target fractional change in log quasi-likelihood for quadratic standard error estimate
+  } 
+  \item{se.par.frac} {
+    initial parameter-change fraction for quadratic standard error estimate
+  } 
+  \item{eval.only} {
+    logical; if \code{TRUE}, no optimization is attempted and the quasi-loglikelihood value is evaluated at the \code{start} parameters.
+  } 
+  \item{\dots}{
+    Arguments that will be passed to \code{optim} in the \code{control} list.
+  }
+}
+\details{
+  This is functionally a wrapper for \code{nlf.objfun}, which does the statistical heavy lifting and should be consulted for details.
+}
+\value{
+  A list corresponding to the output from the optimizer, except that the full parameter vector is returned (not just the ones fitted), the LQL (and not -LQL) is reported, xstart is included, and asymptotic Wald standard errors based on M-estimator theory are returned for each fitted parameter.
+}
+\references{
+ The following papers describe and motivate the NLF approach to model fitting:
+
+ Ellner, S. P., Bailey, B. A., Bobashev, G. V., Gallant, A. R., Grenfell, B. T. and Nychka D. W. (1998)
+ Noise and nonlinearity in measles epidemics: combining mechanistic and statistical approaches to population modeling.
+ \emph{American Naturalist} \bold{151}, 425--440.
+
[TRUNCATED]

To get the complete diff run:
    svnlook diff /svnroot/pomp -r 54


More information about the pomp-commits mailing list