[Gsdesign-commits] r355 - in pkg/gsDesign: . R man

noreply at r-forge.r-project.org noreply at r-forge.r-project.org
Mon Sep 2 13:58:38 CEST 2013


Author: keaven
Date: 2013-09-02 13:58:37 +0200 (Mon, 02 Sep 2013)
New Revision: 355

Modified:
   pkg/gsDesign/DESCRIPTION
   pkg/gsDesign/NAMESPACE
   pkg/gsDesign/R/gsMethods.R
   pkg/gsDesign/R/gsSurv.R
   pkg/gsDesign/R/gsUtilities.R
   pkg/gsDesign/R/ssrCP.R
   pkg/gsDesign/man/gsBoundSummary.Rd
   pkg/gsDesign/man/gsDesign.Rd
   pkg/gsDesign/man/nSurv.Rd
   pkg/gsDesign/man/nSurvival.Rd
   pkg/gsDesign/man/ssrCP.Rd
Log:
Added ssrCP (experimental) and rewrite of gsBoundSummary

Modified: pkg/gsDesign/DESCRIPTION
===================================================================
--- pkg/gsDesign/DESCRIPTION	2013-06-05 20:14:06 UTC (rev 354)
+++ pkg/gsDesign/DESCRIPTION	2013-09-02 11:58:37 UTC (rev 355)
@@ -1,5 +1,5 @@
 Package: gsDesign
-Version: 2.8-01
+Version: 2.8-2
 Title: Group Sequential Design
 Author: Keaven Anderson 
 Maintainer: Keaven Anderson <keaven_anderson at merck.com>

Modified: pkg/gsDesign/NAMESPACE
===================================================================
--- pkg/gsDesign/NAMESPACE	2013-06-05 20:14:06 UTC (rev 354)
+++ pkg/gsDesign/NAMESPACE	2013-09-02 11:58:37 UTC (rev 355)
@@ -4,9 +4,10 @@
 export(gsBound, gsBound1, gsDesign, gsProbability)
 export(ciBinomial, nBinomial, simBinomial, testBinomial, gsBinomialExact, varBinomial)
 export(binomialSPRT, plot.binomialSPRT, plot.gsBinomialExact, nSurvival, nEvents, nNormal)
-export(normalGrid,ssrCP)
+export(normalGrid,ssrCP,condPower,Power.ssrCP,z2Z,z2Fisher,z2NC,plot.ssrCP)
 export(plot.gsDesign, plot.gsProbability, print.gsProbability, print.gsDesign)
-export(print.nSurvival, xtable.gsDesign, gsBoundSummary, xtable.gsSurv)
+export(print.nSurvival, gsBoundSummary, xtable.gsSurv)
+export(summary.gsDesign, xprint, print.gsBoundSummary)
 export(sfPower, sfHSD, sfExponential, sfBetaDist, sfLDOF, sfLDPocock, sfPoints, sfLogistic, sfExtremeValue, sfExtremeValue2, sfLinear, sfTruncated)
 export(sfCauchy, sfNormal, sfTDist, spendingFunction)
 export(checkScalar, checkVector, checkRange, checkLengths, isInteger)

Modified: pkg/gsDesign/R/gsMethods.R
===================================================================
--- pkg/gsDesign/R/gsMethods.R	2013-06-05 20:14:06 UTC (rev 354)
+++ pkg/gsDesign/R/gsMethods.R	2013-09-02 11:58:37 UTC (rev 355)
@@ -3,23 +3,16 @@
 #
 #  Exported Functions:
 #                   
+#    summary.gsDesign
 #    print.gsDesign
 #    print.gsProbability
 #    print.nSurvival
+#    print.gsBoundSummary
 #    gsBoundSummary
-#    xtable.gsDesign
 #
 #  Hidden Functions:
 #
 #    gsLegendText
-#    gsPlotName
-#    plotgsZ
-#    plotBval
-#    plotreleffect
-#    plotgsCP
-#    sfplot
-#    plotASN
-#    plotgsPower
 #    sfprint
 #
 #  Author(s): Keaven Anderson, PhD.
@@ -28,6 +21,7 @@
 #
 #  R Version: 2.7.2
 #
+#  Rewrite of gsBoundSummary and addition of print.gsBoundSummary, xprint
 ##################################################################################
 
 ###
@@ -92,7 +86,56 @@
     print(y)
   invisible(x)
 }
-
+"summary.gsDesign" <- function(object, information=FALSE, ...){
+  out <- NULL
+  if (object$test.type == 1){
+    out<- paste(out,"One-sided group sequential design with ",sep="")
+  }else if (object$test.type == 2){
+    out <- paste(out, "Symmetric two-sided group sequential design with ",sep="")
+  }else{
+    out <- paste(out, "Asymmetric two-sided group sequential design with ",sep="")
+    if(object$test.type %in% c(2,3,5)) out <- paste(out, "binding futility bound, ",sep="")
+    else out <- paste(out, "non-binding futility bound, ",sep="")
+  }
+  out <- paste(out, object$k," analyses, ",sep="")
+  if (object$nFixSurv > 0)
+  {   out <- paste(out, "time-to-event outcome with sample size ", ceiling(object$nSurv),
+              " and ", ceiling(object$n.I[object$k]), " events required, ", sep="")
+  }else if ("gsSurv" %in% class(object)){
+      out <- paste(out, "time-to-event outcome with sample size ", 
+                   ceiling(object$eNC+object$eNE)[object$k], 
+                   " and ", ceiling(object$n.I[object$k]), " events required, ", sep="")
+  }else if(information){out <- paste(out," total information ",round(object$n.I[object$k],2),", ",sep="")
+  }else out <- paste(out, "sample size ", ceiling(object$n.I[object$k]), ", ",sep="")
+  out <- paste(out, 100 * (1 - object$beta), "% power, ", 100 * object$alpha, "% (1-sided) Type I error",sep="")
+  if(is.character(object$upper$sf)){
+    out <- paste(out, " and ",sep="")
+    if(object$upper$sf=="WT"){
+      out <- paste(out, "Wang-Tsiatis bounds with Delta=",object$upper$param,sep="")
+    }else if(object$upper$sf=="Pocock"){
+      out <- paste(out, "Pocock bounds")
+    }else out <- paste(out, "O'Brien-Fleming bounds",sep="")
+  }else{
+    out <- paste(out, ". ",sep="")
+    if(object$test.type < 3){
+      out <- paste(out, "Bounds derived using a ", object$upper$name," spending function",sep="")
+      if(length(object$upper$param)==1){
+        out <- paste(out, " with ", object$upper$parname,"=",object$upper$param,sep="")
+      }
+    }else{
+      out <- paste(out, "Efficacy bounds derived using a ", object$upper$name," spending function",sep="")
+      if(length(object$upper$param)==1){
+        out <- paste(out, " with ", object$upper$parname,"=",object$upper$param,sep="")
+      }
+      out <- paste(out, ". Futility bounds derived using a ", object$lower$name," spending function",sep="")
+      if(length(object$lower$param)==1){
+        out <- paste(out, " with ", object$lower$parname,"=",object$lower$param,sep="")
+      }
+    }
+  }
+  out <- paste(out,".",sep="")
+  return(out)
+}
 "print.gsDesign" <- function(x, ...)
 {    
     if (x$nFixSurv > 0)
@@ -282,6 +325,150 @@
    cat("Events required (computed): nEvents=", ceiling(x$nEvents), "\n",sep="")
 	invisible(x)
 }
+gsBoundSummary <- function(x, deltaname=NULL, logdelta=FALSE, Nname=NULL, digits=4, ddigits=2, tdigits=0, timename="Month", 
+                           prior=normalGrid(mu=x$delta/2, sigma=10/x$n.I[x$k]), 
+                           POS=FALSE, ratio=NULL,exclude=c("B-value","Spending","CP","CP H1","PP"), r=18,...){
+  k <- x$k
+  if (is.null(Nname)){
+    if(x$n.fix==1){
+      Nname <- "N/Fixed design N"
+    }else if ("gsSurv" %in% class(x) || x$nFixSurv > 0){
+      Nname <-"Events"
+    }else Nname="N"
+  }
+  # delta values corresponding to x$theta
+  delta <- x$delta0 + (x$delta1-x$delta0)*x$theta/x$delta
+  if (logdelta) delta <- exp(delta)
+  # ratio is only used for RR and HR calculations at boundaries
+  if("gsSurv" %in% class(x)){
+    ratio <- x$ratio
+  }else if (is.null(ratio)) ratio <- 1
+  # delta values at bounds
+  # note that RR and HR are treated specially
+  if (x$test.type > 1){
+    if (x$nFixSurv > 0 || "gsSurv" %in% class(x) ||Nname=="HR"){
+      deltafutility <- gsHR(x=x,i=1:x$k,z=x$lower$bound[1:x$k],ratio=ratio)
+    }else if (tolower(Nname) =="rr"){
+      deltafutility <- gsRR(x=x,i=1:x$k,z=x$lower$bound[1:x$k],ratio=ratio)
+    }else{
+      deltafutility <- gsDelta(x=x,i=1:x$k,z=x$lower$bound[1:x$k])
+      if (logdelta==TRUE || "gsSurv" %in% class(x)) deltafutility <- exp(deltafutility)
+    }
+  }
+  if (x$nFixSurv > 0 || "gsSurv" %in% class(x) ||Nname=="HR"){
+    deltaefficacy <- gsHR(x=x,i=1:x$k,z=x$upper$bound[1:x$k],ratio=ratio)
+  }else if (tolower(Nname) =="rr"){
+    deltaefficacy <- gsRR(x=x,i=1:x$k,z=x$upper$bound[1:x$k],ratio=ratio)
+  }else{
+    deltaefficacy <- gsDelta(x=x,i=1:x$k,z=x$upper$bound[1:x$k])
+    if (logdelta==TRUE || "gsSurv" %in% class(x)) deltaefficacy <- exp(deltaefficacy)
+  }
+  if(is.null(deltaname)){
+    if ("gsSurv" %in% class(x) || x$nFixSurv>0){deltaname="HR"}else{deltaname="delta"}
+  }
+  # create delta names for boundary corssing probabilities
+  deltanames <- paste("P{Cross} if ",deltaname,"=",round(delta,ddigits),sep="")
+  pframe <- NULL
+  for(i in 1:length(x$theta)) pframe <- rbind(pframe, data.frame("Value"=deltanames[i],"Efficacy"=cumsum(x$upper$prob[,i]),i=1:x$k))
+  if(x$test.type>1){
+    pframe2 <- NULL
+    for(i in 1:length(x$theta)) pframe2 <- rbind(pframe2, data.frame("Futility"=cumsum(x$lower$prob[,i])))
+    pframe <- data.frame(cbind("Value"=pframe[,1],pframe2,pframe[,-1]))
+  }
+  # conditional power at bound, theta=hat(theta)
+  cp <- data.frame(gsBoundCP(x, r=r))
+  # conditional power at bound, theta=theta[1]
+  cp1<- data.frame(gsBoundCP(x, theta=x$delta, r=r))
+  if (x$test.type>1){
+    colnames(cp) <- c("Futility","Efficacy")
+    colnames(cp1)<- c("Futility","Efficacy")
+  }else{
+    colnames(cp) <- "Efficacy"
+    colnames(cp1)<- "Efficacy"
+  }
+  cp <- data.frame(cp,"Value"="CP",i=1:(x$k-1))
+  cp1 <- data.frame(cp1,"Value"="CP H1",i=1:(x$k-1))
+  if ("PP" %in% exclude){
+    pp<-NULL
+  }else{
+    # predictive probability
+    Efficacy <- as.vector(1:(x$k-1))
+    for(i in 1:(x$k-1)) Efficacy[i] <- gsPP(x=x,i=i, zi=x$upper$bound[i], theta=prior$z, wgts=prior$wgts, r=r, total=TRUE)
+    if (x$test.type>1){
+      Futility <- Efficacy
+      for(i in 1:(x$k-1)) Futility[i] <- gsPP(x=x,i=i, zi=x$lower$bound[i], theta=prior$z, wgts=prior$wgts, r=r, total=TRUE)
+    }else Futility<- NULL
+    pp <- data.frame(cbind(Efficacy,Futility,i=1:(x$k-1)))
+    pp$Value="PP"
+  }
+  # start a frame for other statistics
+  # z at bounds
+  statframe <- data.frame("Value"="Z","Efficacy"=x$upper$bound,i=1:x$k)
+  if (x$test.type>1) statframe<-data.frame(cbind(statframe,"Futility"=x$lower$bound))
+  # add nominal p-values at each bound
+  tem <- data.frame("Value"="p (1-sided)","Efficacy"=pnorm(x$upper$bound,lower.tail=FALSE),i=1:x$k)
+  if(x$test.type==2)tem <- data.frame(cbind(tem,"Futility"=pnorm(x$lower$bound,lower.tail=TRUE)))
+  if(x$test.type>2)tem <- data.frame(cbind(tem,"Futility"=pnorm(x$lower$bound,lower.tail=FALSE)))
+  statframe <- rbind(statframe,tem)                 
+  # delta values at bounds                 
+  tem <- data.frame("Value"=paste(deltaname,"at bound"),"Efficacy"=deltaefficacy,i=1:x$k)
+  if(x$test.type>1) tem$Futility <- deltafutility
+  statframe <- rbind(statframe,tem)                 
+  
+  # spending
+  tem <- data.frame("Value"="Spending",i=1:x$k,"Efficacy"=x$upper$spend)
+  if (x$test.type>1) tem$Futility=x$lower$spend
+  statframe<-rbind(statframe,tem)
+  # B-values
+  tem <- data.frame("Value"="B-value",i=1:x$k,"Efficacy"=gsBValue(x=x,z=x$upper$bound,i=1:x$k))
+  if (x$test.type>1) tem$Futility<-gsBValue(x=x,i=1:x$k,z=x$lower$bound)
+  statframe<-rbind(statframe,tem)
+  # put it all together
+  statframe <- rbind(statframe,cp,cp1,pp,pframe)
+  # exclude rows not wanted                 
+  statframe <- statframe[!(statframe$Value %in% exclude),]
+  # sort by analysis
+  statframe <- statframe[order(statframe$i),]
+  # add analysis and timing
+  statframe$Analysis <- ""
+  aname <- paste("IA ",1:x$k,": ",round(100*x$timing,0),"%",sep="")
+  aname[x$k]<-"Final"
+  statframe[statframe$Value==statframe$Value[1],]$Analysis <- aname
+  # sample size, events or information at analyses
+  if (!("gsSurv" %in% class(x))){
+    if(x$n.fix > 1) N <- ceiling(x$n.I) else N<-round(x$n.I,2)
+    if (Nname == "Information") N <- round(x$n.I,2)
+    nstat <- 2
+  }else{
+    nstat <- 4
+    statframe[statframe$Value==statframe$Value[3],]$Analysis <- paste("Events:",ceiling(x$eDC+x$eDE))
+    if (x$ratio==1) N <- 2*ceiling(x$eNE) else N <- ceiling(x$eNE+x$eNC)
+    Time <- round(x$T,tdigits)
+    statframe[statframe$Value==statframe$Value[4],]$Analysis <- paste(timename,": ",as.character(Time),sep="")
+  }
+  statframe[statframe$Value==statframe$Value[2],]$Analysis <- paste(Nname,": ",N,sep="")
+  # add POS and predicitive POS, if requested
+  if (POS){
+    ppos <- array("",x$k)
+    for(i in 1:(x$k-1)) ppos[i] <- paste("Post IA POS: ",as.character(round(100*gsCPOS(i=i, x=x, theta=prior$z, wgts=prior$wgts),1)),"%",sep="")
+    statframe[statframe$Value==statframe$Value[nstat+1],]$Analysis <-ppos 
+    statframe[nstat+2,]$Analysis <- ppos[1]
+    statframe[nstat+1,]$Analysis <- paste("Trial POS: ",as.character(round(100*gsPOS(x=x,theta=prior$z,wgts=prior$wgts),1)),"%",sep="")
+  }
+  # add futility column to data frame
+  scol <- c(1,2,if(x$test.type>1){4}else{NULL})
+  rval<-statframe[c(ncol(statframe),scol)]
+  rval$Efficacy <- round(rval$Efficacy,digits)
+  if(x$test.type>1) rval$Futility <- round(rval$Futility,digits)
+  class(rval)<-c("gsBoundSummary","data.frame")
+  return(rval)
+}
+xprint <- function(x,include.rownames=FALSE,hline.after=c(-1,which(x$Value==x[1,]$Value)-1,nrow(x)),...){
+  print.xtable(xtable(x), hline.after=hline.after, include.rownames=include.rownames,...)
+}
+print.gsBoundSummary <- function(x,row.names=FALSE,digits=4,...){
+  print.data.frame(x,row.names=row.names,digits=digits,...)
+}
 
 ###
 # Hidden Functions
@@ -294,7 +481,6 @@
             "2" = c(expression(paste("Reject ", H[0])), "Continue", expression(paste("Reject ", H[0]))),
             c(expression(paste("Reject ", H[0])), "Continue", expression(paste("Reject ", H[1]))))            
 }
-
 "sfprint" <- function(x)
 {   
     # print spending function information    
@@ -327,80 +513,3 @@
     }
     cat("\n")
 }
-
-gsBoundSummary <- function(x, upper=TRUE, ratio=1)
-{   if (upper)
-    {   sn <- -1
-        bnd <- x$upper
-    }else
-    {   sn <- 1
-        bnd <- x$lower
-    }
-    p0 <- cumsum(bnd$prob[,1])
-    p1 <- cumsum(bnd$prob[,x$theta==x$delta])
-    CP <- p0
-    CP[x$k] <- NA
-    CPhat <- CP
-    for(i in 1:x$k)
-    {   if (i < x$k)
-        {   CPtem <- gsCP(zi=bnd$bound[i], i=i, x=x, theta=NULL)$upper$prob
-            CP[i] <- sum(CPtem[,3])
-            CPhat[i] <- sum(CPtem[,1])
-        }
-    }
-    if (is.null(x$endpoint) || tolower(x$endpoint)!="survival") 
-    {    effect <- gsDelta(bnd$bound, i=1:x$k, x=x)
-         ename <- "delta"
-    }else
-    {    effect <- gsHR(z=bnd$bound, i=1:x$k, x=x, ratio=1)
-         ename <- "HR"
-    }
-    nval <- x$n.I
-    if (x$n.fix != 1) nval <- ceiling(nval/2)*2 
-    tab <- cbind(100*x$timing, nval, bnd$bound, pnorm(sn * bnd$bound), p0, p1, CP, CPhat,
-              effect,
-                 gsBValue(bnd$bound, i=1:x$k, x=x))
-    rnames <- c(paste("IA",1:(x$k-1)), "Final")
-    cnames <- c("Timing (%)", ifelse(x$n.fix==1,"r","N"), "Z", "Nominal p", "H0 spend", "H1 spend",
-                "CP theta1", "CP thetahat", ename , "B-value")
-    dimnames(tab) <- list(rnames, cnames)
-    gss <- list(upper=upper, tab=t(tab))
-    class(gss) <- "gsBoundSummary"
-    gss
-}
-xtable.gsDesign <- function(x, caption=NULL, label=NULL, align=NULL, digits=c(0,0,3,4,4,4,3,3,3,3),
-                             display=NULL, upper=TRUE, rnames=NULL, cnames=NULL, ratio=1,
-                             sanitize.text.function=function(x){x}, 
-                             sanitize.rownames.function=function(x){x},...)
-{  bnd <- round(gsBoundSummary(x, upper, ratio)$tab, digits)
-   if (is.null(cnames)) cnames <- colnames(bnd)
-   if (is.null(rnames))
-   {   rnames <- rownames(bnd)
-       if (rnames[9] == "delta") ename <- "$\\hat{\\delta}$"
-       else if (rnames[9] == "HR") ename <- "HR"
-
-       if (x$n.fix==1) nname <- "r"
-       else if (!is.null(x$endpoint) && tolower(x$endpoint)=="survival") nname <- "Events"  
-       else nname <- "N"
-       rnames <- c("Timing (\\%)", nname, "Z", "Nominal p", "H$_0$-spend", "$\\beta$-spend",
-                   "$\\hbox{CP}(\\theta_1,\\hbox{Z})$", "$\\hbox{CP}(\\hat{\\theta}, \\hbox{Z})$",
-                   ename, "B-value")
-   }
-   b <- matrix(as.character(bnd), nrow=nrow(bnd), ncol=ncol(bnd),
-               dimnames=list(rnames,cnames))
-   b
-   print(xtable(b, caption=caption, label=label, display=display, align=align,
-                   sanitize.text.function=sanitize.text.function,...),
-                   sanitize.rownames.function=sanitize.rownames.function)
-}
-print.gsBoundSummary <- function(x, digits=c(0,0,3,4,4,4,3,3,3,3),cnames=NULL,
-   rnames=c("Timing (%)","N","Z","Nominal p","H0 spend","H1 spend",
-"CP theta1","CP thetahat","delta","B-value"),quote=F){
-	bnd <- round(x$tab,digits)
-	if (is.null(cnames)) cnames <- colnames(bnd)
-	b <- matrix(as.character(bnd), nrow=nrow(bnd), ncol=ncol(bnd),
-               dimnames=list(rnames,cnames))
-	print(ifelse(x$upper,"Upper bound","Lower bound"))
-	print(b,quote=quote)
-}
-

Modified: pkg/gsDesign/R/gsSurv.R
===================================================================
--- pkg/gsDesign/R/gsSurv.R	2013-06-05 20:14:06 UTC (rev 354)
+++ pkg/gsDesign/R/gsSurv.R	2013-09-02 11:58:37 UTC (rev 355)
@@ -577,7 +577,8 @@
   alpha = alpha, beta = beta,  sided = sided, tol = tol)  
   y<-gsDesign(k=k,test.type=test.type,alpha=alpha/sided,
       beta=beta, astar=astar, n.fix=x$d, timing=timing,
-      sfu=sfu, sfupar=sfupar, sfl=sfl, sflpar=sflpar, tol=tol)
+      sfu=sfu, sfupar=sfupar, sfl=sfl, sflpar=sflpar, tol=tol,
+              delta1=log(hr), delta0=log(hr0))
   z<-gsnSurv(x,y$n.I[k])
   eDC <- NULL
   eDE <- NULL

Modified: pkg/gsDesign/R/gsUtilities.R
===================================================================
--- pkg/gsDesign/R/gsUtilities.R	2013-06-05 20:14:06 UTC (rev 354)
+++ pkg/gsDesign/R/gsUtilities.R	2013-09-02 11:58:37 UTC (rev 355)
@@ -16,9 +16,9 @@
 #  Author(s): William Constantine, Ph.D.
 # 
 #  Reviewer(s): REvolution Computing 19DEC2008 v.2.0 - William Constantine, Kellie Wills 
+#  Updated: Keaven Anderson, 16JUN2013 to fix error messages
+#  R Version: 3.0
 #
-#  R Version: 2.7.2
-#
 ##################################################################################
 
 ###
@@ -65,14 +65,14 @@
     if (!(left && right))
     {
         stop(paste(varname, " not on interval ", if (inclusion[1]) "[" else "(", interval[1], ", ", 
-                        interval[2], if (inclusion[2]) "]" else ")", sep=""))
+                        interval[2], if (inclusion[2]) "]" else ")", sep=""), call.=TRUE)
     }
     
     invisible(NULL)
 }
 
 "checkScalar" <- function(x, isType = "numeric", ...) 
-{        
+{
     # check inputs
     if (!is.character(isType))
     {
@@ -92,13 +92,13 @@
         # create error message    
         parent <- as.character(sys.call(-1)[[1]])
         varstr <- paste(if (length(parent) > 0) paste("In function", parent, ": variable") else "", deparse(substitute(x)))
-    stop(varstr, "must be scalar of class", isType)
+        stop(varstr, " must be scalar of class ", isType)
     }
     
     # check if input is on specified interval    
     if (length(list(...)) > 0)
     {
-        checkRange(x, ..., varname=varstr)
+        checkRange(x, ..., varname=deparse(substitute(x)))
     }
     
     invisible(NULL)
@@ -131,18 +131,18 @@
         # create error message
         parent <- as.character(sys.call(-1)[[1]])
         varstr <- paste(if (length(parent) > 0) paste("In function", parent, ": variable") else "", deparse(substitute(x))) 
-        stop(paste(varstr, "must be vector of class", isType))
+        stop(paste(varstr, " must be vector of class ", isType))
     }
     # check vector length
     if (!is.null(length) && (length(x) != length))
     {
-        stop(paste(varstr, "is a vector of length", length(x), "but should be of length", length))
+        stop(paste(varstr, " is a vector of length ", length(x), " but should be of length", length))
     }
     
     # check if input is on specified interval
     if (length(list(...)) > 0)
     {
-        checkRange(x, ..., varname=varstr)
+        checkRange(x, ..., varname=deparse(substitute(x)))
     }
     
     invisible(NULL)
@@ -227,7 +227,7 @@
     # create error message
     parent <- as.character(sys.call(-1)[[1]])
     varstr <- paste(if (length(parent) > 0) paste("In function", parent, ": variable") else "", deparse(substitute(x))) 
-    stop(paste(varstr, "must be matrix of class", isType))
+    stop(paste(varstr, " must be matrix of class ", isType))
   }
   # check matrix dimensions
   if (!is.null(nrows) && (NROW(x) != nrows))
@@ -242,7 +242,7 @@
   # check if input is on specified interval
   if (length(list(...)) > 0)
   {
-    checkRange(x, ..., varname=varstr)
+    checkRange(x, ..., varname=deparse(substitute(x)))
   }
   
   invisible(NULL)

Modified: pkg/gsDesign/R/ssrCP.R
===================================================================
--- pkg/gsDesign/R/ssrCP.R	2013-06-05 20:14:06 UTC (rev 354)
+++ pkg/gsDesign/R/ssrCP.R	2013-09-02 11:58:37 UTC (rev 355)
@@ -1,29 +1,292 @@
-ssrCP <- function(z, theta=NULL, maxinc=2, overrun=0, beta = 0.1, cpadj=c(.5,1-beta), x=gsDesign(k=2, timing=.5, beta=beta)){
-  if (class(x)!="gsDesign") stop("x must be passed as an object of class gsDesign")
-  if (2 != x$k) stop("input group sequential design must have 2 stages (k=2)")
-  w <- x$timing[1]
-  if (is.null(theta)) theta <- z / sqrt(x$n.I[1])
-  CP <- pnorm(theta*sqrt(x$n.I[2]*(1-w))-(x$upper$bound[2]-z*sqrt(w))/sqrt(1-w))
-  n2 <- array(x$n.I[1]+overrun,length(z))
-  indx <- ((z>x$lower$bound[1])&(z<x$upper$bound[1]))
-  indx2 <- indx & ((CP < cpadj[1])|(CP>cpadj[2]))
+# conditional power function
+"condPower" <- function(z1, n2, z2=z2NC, theta=NULL, 
+                x=gsDesign(k=2, timing=.5, beta=beta),
+                ...){
+  if (is.null(theta)) theta <- z1 / sqrt(x$n.I[1])
+  return(as.numeric(pnorm(sqrt(n2)*theta-
+                            z2(z1=z1,x=x,n2=n2))))
+}
+
+
+
+"condPowerDiff" <- function(z1, target, n2, z2=z2NC,
+                            theta=NULL,
+                            x=gsDesign(k=2,timing=.5)){
+  if (is.null(theta)) theta <- z1 / sqrt(x$n.I[1])
+  return(as.numeric(pnorm(sqrt(n2)*theta-
+                            z2(z1=z1,x=x,n2=n2))-target))
+}
+
+
+
+"n2sizediff" <- function(z1, target, beta=.1, z2=z2NC, 
+                         theta=NULL, 
+                         x=gsDesign(k=2, timing=.5, beta=beta)){
+  if (is.null(theta)) theta <- z1 / sqrt(x$n.I[1])
+  return(as.numeric(((z2(z1=z1,x=x,n2=target-x$n.I[1]) - 
+                        qnorm(beta))/theta)^2 + x$n.I[1] - 
+                      target))
+}
+
+
+
+ssrCP <- function(z1, theta=NULL, maxinc=2, overrun=0, 
+                  beta = x$beta, cpadj=c(.5,1-beta), 
+                  x=gsDesign(k=2, timing=.5, beta=beta),
+                  z2=z2NC,...){
+  if (class(x)!="gsDesign") 
+    stop("x must be passed as an object of class gsDesign")
+  if (2 != x$k) 
+    stop("input group sequential design must have 2 stages (k=2)")
+  if (overrun <0 | overrun + x$n.I[1]>x$n.I[2])
+    stop(paste("overrun  must be >= 0 and",
+               " <= 2nd stage sample size (",
+          round(x$n.I[2]-x$n.I[1],2),")",sep=""))
+  checkVector(x=z1,interval=c(-Inf,Inf), 
+              inclusion=c(FALSE,FALSE))
+  checkScalar(maxinc,interval=c(1,Inf), 
+              inclusion=c(FALSE,TRUE))
+  checkVector(cpadj,interval=c(0,1), 
+              inclusion=c(FALSE,FALSE),length=2)
+  if (cpadj[2]<=cpadj[1]) 
+    stop(paste("cpadj must be an increasing pair", 
+               "of numbers between 0 and 1"))
+  n2 <- array(x$n.I[1]+overrun,length(z1))
+  temtheta <- theta
+  if (is.null(theta))theta <- z1 / sqrt(x$n.I[1])
+  # compute conditional power for group sequential design
+  # for given z1
+  CP <- condPower(z1, n2=x$n.I[2]-x$n.I[1], z2=z2, 
+                  theta=theta, x=x, ...)
+  # continuation range
+  indx <- ((z1 > x$lower$bound[1]) & 
+           (z1 < x$upper$bound[1]))
+  # where final sample size will not be adapted
+  indx2 <- indx & ((CP < cpadj[1]) | (CP>cpadj[2]))
+  # in continuation region with no adaptation, use planned final n
   n2[indx2] <- x$n.I[2]
-  indx <- indx & !indx2
-  n2[indx] <- (((x$upper$bound[2] - z[indx] * sqrt(w)) / sqrt(1 - w) - qnorm(beta))/theta[indx])^2 + x$n.I[1]
-  n2[n2>maxinc*x$n.I[2]]<-maxinc*x$n.I[2]
-  return(n2)
+  # now set where you will adapt
+  indx2 <- indx & !indx2
+  # update sample size based on combination function  
+  # assuming original stage 2 sample size
+  z2i <- z2(z1=z1[indx2], x=x, n2=x$n.I[2]-x$n.I[1],...)
+  if (length(theta)==1){
+    n2i <- ((z2i - qnorm(beta)) / theta)^2 + x$n.I[1]
+  }else{
+    n2i <- ((z2i - qnorm(beta)) / theta[indx2])^2 + x$n.I[1]
+  }
+  n2i[n2i/x$n.I[2] > maxinc] <- x$n.I[2]*maxinc
+  # if conditional error depends on stage 2 sample size,
+  # do fixed point iteration to get conditional error
+  # and stage 2 sample size to correspond
+  if (class(z2i)[1] == "n2dependent"){
+    for(i in 1:6){
+      z2i <- z2(z1=z1[indx2], x=x, n2=n2i-x$n.I[1], ...)
+      if (length(theta)==1){
+        n2i <- ((z2i - qnorm(beta))/theta)^2 + x$n.I[1]
+      }else{
+        n2i <- ((z2i - qnorm(beta))/theta[indx2])^2 + 
+          x$n.I[1]
+      }
+      n2i[n2i/x$n.I[2] > maxinc] <- x$n.I[2]*maxinc
+    }
+  }
+  n2[indx2] <- n2i
+  if (length(theta)==1) theta <- array(theta,length(n2))
+  rval <- list(x=x, z2fn=z2, theta=temtheta, maxinc=maxinc, 
+               overrun=overrun, beta=beta, cpadj=cpadj, 
+               dat=data.frame(z1=z1, 
+                        z2=z2(z1=z1, x=x, n2=x$n.I[2], ...), 
+                        n2=n2, CP=CP, theta=theta,
+                        delta=x$delta0+theta*(x$delta1-x$delta0)))
+  class(rval) <- "ssrCP"
+  return(rval)
 }
-# Type I error if sufficient statistic used instead of
-# combination test
 
-#unadjTypeIErr <- function(maxinc=2, beta = 0.1, cpadj=c(.5,1-beta), 
-#                          x=gsDesign(k=2, timing=.5, beta=beta)){
-#  z <- normalGrid() # grid for interim z
-#  B <- sqrt(x$timing[1])*z # interim B-value
-#  thetahat <- z/sqrt(n.I[1]) # interim standardized effect size
-#  cp <-
-  # stage 2 sample size
-#  n2 <- ssrCP(z=z,maxinc=maxinc,beta=beta,cpadj=cpadj,x=x)
-  # Type I error for non-promising zone
-  # assuming non-binding futility
-  
\ No newline at end of file
+
+
+plot.ssrCP <- function(x, z1ticks=NULL, mar=c(7, 4, 4, 4)+.1, ylab="Adapted sample size", xlaboffset=-.2, lty=1, col=1,...){
+  par(mar=mar)
+  plot(x$dat$z1, x$dat$n2,type="l", axes=FALSE, xlab="", ylab="", lty=lty, col=col,...)
+  xlim <- par("usr")[1:2] # get x-axis range
+  axis(side=2, ...)
+  mtext(side=2, ylab, line=2,...)
+  w <- x$x$timing[1]
+  if (is.null(z1ticks)) z1ticks <- seq(ceiling(2*xlim[1])/2, floor(2*xlim[2])/2, by=.5)
+  theta <- z1ticks / sqrt(x$x$n.I[1])
+  CP <- pnorm(theta*sqrt(x$x$n.I[2]*(1-w))-(x$upper$bound[2]-z1ticks*sqrt(w))/sqrt(1-w))
+  CP <- condPower(z1=z1ticks,x=x$x,cpadj=x$cpadj,n2=x$x$n.I[2]-x$x$n.I[1])
+  axis(side=1,line=3,at=z1ticks,labels=as.character(round(CP,2)), ...)
+  mtext(side=1,"CP",line=3.5,at=xlim[1]+xlaboffset,...)
+  axis(side=1,line=1,at=z1ticks, labels=as.character(z1ticks),...)
+  mtext(side=1,expression(z[1]),line=.75,at=xlim[1]+xlaboffset,...)
+  axis(side=1,line=5,at=z1ticks, labels=as.character(round(theta/x$x$delta,2)),...)
+  mtext(side=1,expression(hat(theta)/theta[1]),line=5.5,at=xlim[1]+xlaboffset,...)
+}
+
+
+
+# normal combination test cutoff for z2
+"z2NC" <- function(z1,x,...){
+  z2 <- (x$upper$bound[2] - z1*sqrt(x$timing[1])) / 
+    sqrt(1-x$timing[1])
+  class(z2) <- c("n2independent","numeric")
+  return(z2)
+}
+# sufficient statistic cutoff for z2
+"z2Z" <- function(z1,x,n2=x$n.I[2]-x$n.I[1],...){
+  N2 <- x$n.I[1] + n2
+  z2 <- (x$upper$bound[2]-z1*sqrt(x$n.I[1]/N2)) / 
+    sqrt(n2/N2)
+  class(z2) <- c("n2dependent","numeric")
+  return(z2)
+}
+# Fisher's combination text
+"z2Fisher" <- function(z1,x,...){
+  z2 <- z1
+  indx <- pchisq(-2*log(pnorm(-z1)), 4, lower.tail=F) <= 
+    pnorm(-x$upper$bound[2])
+  z2[indx] <- -Inf
+  z2[!indx] <- qnorm(exp(-qchisq(pnorm(-x$upper$bound[2]), 
+                                 df=4, lower.tail=FALSE) /
+                           2-log(pnorm(-z1[!indx]))), 
+                     lower.tail=FALSE)
+  class(z2) <- c("n2independent","numeric")
+  return(z2)
+}
+"Power.ssrCP" <- function(x, theta=NULL, delta=NULL, r=18){
+  if (class(x) != "ssrCP") 
+    stop("Power.ssrCP must be called with x of class ssrCP")
+  if (is.null(theta) & is.null(delta)){
+    theta <- (0:80)/40*x$x$delta
+    delta <- (x$x$delta1-x$x$delta0)/x$x$delta*theta + x$x$delta0
+  }else if(!is.null(theta)){delta <- (x$x$delta1-x$x$delta0)/x$x$delta*theta + x$x$delta0
+  }else theta <- (delta-x$x$delta0)/(x$x$delta1-x$x$delta0)*x$x$delta
+  en <- theta
+  Power <- en
+  mu <- sqrt(x$x$n.I[1])*theta
+  # probability of stopping at 1st interim
+  Power <- pnorm(x$x$upper$bound[1]-mu,lower.tail=FALSE)
+  en <- (x$x$n.I[1]+x$overrun)*(Power + 
+                                  pnorm(x$x$lower$bound[1]-mu))
+  # get minimum and maximum conditional power at bounds
+  cpmin <- condPower(z1=x$x$lower$bound[1], 
+                     n2=x$x$n.I[2] - x$x$n.I[1], 
+                     z2=x$z2fn, theta=x$theta, 
+                     x=x$x, beta=x$beta)
+  cpmax <- condPower(z1=x$x$upper$bound[1], 
+                     n2=x$x$n.I[2] - x$x$n.I[1], 
+                     z2=x$z2fn, theta=x$theta, 
+                     x=x$x, beta=x$beta)
+  # if max conditional power <= lower cp for which adjustment 
+  # is done or min conditional power >= upper cp for which 
+  # adjustment is done, no adjustment required
+  if (cpmax <= x$cpadj[1] || cpmin >=x$cpadj[2]){
+    en <- en + x$x$n.I[2] * (pnorm(x$x$upper$bound[1]-mu) - 
+                               pnorm(x$x$lower$bound[1]-mu))
+    a <- x$x$lower$bound[1]
+    b <- x$x$upper$bound[2]
+    n2 <- x$x$n.I[2]-x$x$n.I[1]
+    grid <- normalGrid(mu=(a+b)/2,bounds=c(a,b),r=r)
+    for(i in 1:length(theta)) Power[i] <- Power[i] +
+      sum(dnorm(grid$z-sqrt(x$x$n.I[1])*theta[i]) * grid$gridwgts*
+            pnorm(x$z2fn(grid$z, x=x$x, n2=n2)-theta[i]*sqrt(n2), 
+                  lower.tail=FALSE))
+    return(data.frame(theta=theta,delta=delta,Power=Power,en=en))
+  }
+  # check if minimum conditional power met at interim lower bound,
+  # if not, compute probability of no SSR and accumulate
+  if (cpmin<x$cpadj[1]){
+    changepoint <- uniroot(condPowerDiff, 
+                           interval=c(x$x$lower$bound[1], 
+                                      x$x$upper$bound[1]), 
+                           target=x$cpadj[1],
+                           n2=x$x$n.I[2]-x$x$n.I[1], 
+                           z2=x$z2fn, theta=x$theta, 
+                           x=x$x)$root
+    en <- en + x$x$n.I[2]*(pnorm(changepoint-mu) - 
+                             pnorm(x$x$lower$bound[1]-mu))
+    a <- x$x$lower$bound[1]
+    b <- changepoint
+    n2 <- x$x$n.I[2]-x$x$n.I[1]
+    grid <- normalGrid(mu=(a+b)/2,
+                       bounds=c(a,b),r=r)
+    for(i in 1:length(theta)) Power[i] <- Power[i] +
+      sum(dnorm(grid$z-sqrt(x$x$n.I[1]) * theta[i])*grid$gridwgts*
+            pnorm(x$z2fn(grid$z, x=x$x, n2=n2)-theta[i]*
+                    sqrt(n2), lower.tail=FALSE))
+  }else changepoint <- x$x$lower$bound[1]
+  # check if max cp exceeded at interim upper bound
+  # if it is, compute probability of no final sample 
+  # size increase just below upper bound
+  if (cpmax >x$cpadj[2]){
+    changepoint2 <- uniroot(condPowerDiff, 
+                            interval=c(changepoint, x$x$upper$bound[1]), 
+                            target=x$cpadj[2], n2=x$x$n.I[2]-x$x$n.I[1], 
+                            z2=x$z2fn, theta=x$theta, x=x$x)$root
+    en <- en + x$x$n.I[2]*(pnorm(x$x$upper$bound[1]-mu)-
+                             pnorm(changepoint2-mu))
+    a <- changepoint2
+    b <- x$x$upper$bound[1]
+    n2 <- x$x$n.I[2]-x$x$n.I[1]
+    grid <- normalGrid(mu=(a+b)/2,bounds=c(a,b),r=r)
+    for(i in 1:length(theta)) Power[i] <-  Power[i] +
+      sum(dnorm(grid$z-sqrt(x$x$n.I[1])*theta[i]) * grid$gridwgts*
+            pnorm(x$z2fn(grid$z, x=x$x, n2=n2)-theta[i]*sqrt(n2), 
+                  lower.tail=FALSE))
+  }else changepoint2 <- x$upper$bound[1]
+  # next look if max sample size based on CP is greater than 
+  # allowed if it is, compute en for interval at max sample size
+  if (n2sizediff(z1=changepoint, target=x$maxinc*x$x$n.I[2], 
+                 beta=x$beta, z2=x$z2fn, theta=x$theta, x=x$x)>0){
+    # find point at which sample size based on cp 
+    # is same as max allowed
+    changepoint3 <- uniroot(condPowerDiff, 
+                            interval=c(changepoint,15), 
+                            target=1-x$beta, x=x$x, 
+                            n2=x$maxinc*x$x$n.I[2]-x$x$n.I[1],
+                            z2=x$z2fn, theta=x$theta)$root
+    if (changepoint3 >= changepoint2){
+      en <- en + x$maxinc*x$x$n.I[2]*(pnorm(changepoint2-mu)-
+                                        pnorm(changepoint-mu))
+      a <- changepoint
+      b <- changepoint2
+      n2 <- x$x$n.I[2]-x$x$n.I[1]
+      grid <- normalGrid(mu=(a+b)/2,bounds=c(a,b),r=r)
+      for(i in 1:length(theta)) Power[i] <-  Power[i] +
+        sum(dnorm(grid$z-sqrt(x$x$n.I[1])*theta[i]) * 
+              grid$gridwgts *
+              pnorm(x$z2fn(grid$z, x=x$x, n2=n2)-
+                      theta[i] * sqrt(n2), lower.tail=FALSE))
+      return(data.frame(theta=theta, delta=delta, 
+                        Power=Power, en=en))
+    }
+    en <- en + x$maxinc*x$x$n.I[2]*(pnorm(changepoint3-mu) - 
+                                      pnorm(changepoint-mu))
+    a <- changepoint
+    b <- changepoint3
+    n2 <- x$maxinc*x$x$n.I[2]-x$x$n.I[1]
+    grid <- normalGrid(mu=(a+b)/2,bounds=c(a,b),r=r)
+    for(i in 1:length(theta)) Power[i] <-  Power[i] +
+      sum(dnorm(grid$z-sqrt(x$x$n.I[1])*theta[i]) * 
+            grid$gridwgts *
+            pnorm(x$z2fn(grid$z, x=x$x, n2=n2) - 
+                    theta[i] * sqrt(n2), lower.tail=FALSE))
+  }else changepoint3 <- x$x$lower$bound[1]
+  # finally, integrate en over area where conditional power is in  
+  # range where we wish to increase to desired conditional power
+  grid <- normalGrid(mu=(changepoint3+changepoint2)/2, 
+                     bounds=c(changepoint3, changepoint2), r=r)    
+  y <- ssrCP(z1=grid$z, theta=x$theta, maxinc=x$maxinc*2, 
+             beta=x$beta, x=x$x, cpadj=c(.05,.9999), 
+             z2=x$z2fn)$dat
+  for(i in 1:length(theta)){
[TRUNCATED]

To get the complete diff run:
    svnlook diff /svnroot/gsdesign -r 355


More information about the Gsdesign-commits mailing list