[Gsdesign-commits] r150 - pkg/man

noreply at r-forge.r-project.org noreply at r-forge.r-project.org
Tue May 12 15:37:42 CEST 2009


Author: keaven
Date: 2009-05-12 15:37:42 +0200 (Tue, 12 May 2009)
New Revision: 150

Modified:
   pkg/man/Wang-Tsiatis-bounds.Rd
   pkg/man/binomial.Rd
   pkg/man/gsDesign-package.Rd
   pkg/man/gsDesign.Rd
   pkg/man/gsProbability.Rd
   pkg/man/gsbound.Rd
   pkg/man/normalGrid.Rd
   pkg/man/plot.gsDesign.Rd
   pkg/man/sfHSD.Rd
   pkg/man/sfLDPocock.Rd
   pkg/man/sfTDist.Rd
   pkg/man/sfexp.Rd
   pkg/man/sflogistic.Rd
   pkg/man/sfpoints.Rd
   pkg/man/sfpower.Rd
   pkg/man/spendingfunctions.Rd
   pkg/man/testutils.Rd
Log:
.Rd format updates

Modified: pkg/man/Wang-Tsiatis-bounds.Rd
===================================================================
--- pkg/man/Wang-Tsiatis-bounds.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/Wang-Tsiatis-bounds.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -38,11 +38,13 @@
 # Pocock design
 gsDesign(test.type=2, sfu="Pocock")
 
-# alternate call to get Pocock design specified using Wang-Tsiatis option and Delta=0.5
+# alternate call to get Pocock design specified using 
+# Wang-Tsiatis option and Delta=0.5
 gsDesign(test.type=2, sfu="WT", sfupar=0.5)
 
 # this is how this might work with a spending function approach
-# Hwang-Shih-DeCani spending function with gamma=1 is often used to approximate Pocock design
+# Hwang-Shih-DeCani spending function with gamma=1 is often used 
+# to approximate Pocock design
 gsDesign(test.type=2, sfu=sfHSD, sfupar=1)
 
 # unequal spacing works,  but may not be desirable 
@@ -55,7 +57,8 @@
 # One-sided O'Brien-Fleming design
 gsDesign(test.type=1, sfu="OF")
 
-# alternate call to get O'Brien-Fleming design specified using Wang-Tsiatis option and Delta=0
+# alternate call to get O'Brien-Fleming design specified using 
+# Wang-Tsiatis option and Delta=0
 gsDesign(test.type=1, sfu="WT", sfupar=0)
 }
 

Modified: pkg/man/binomial.Rd
===================================================================
--- pkg/man/binomial.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/binomial.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -34,8 +34,8 @@
 }
 
 \usage{
-nBinomial(p1, p2, alpha=.025, beta=0.1, delta0=0, ratio=1, sided=1, outtype=1,
-          scale="Difference") 
+nBinomial(p1, p2, alpha=.025, beta=0.1, delta0=0, ratio=1,
+          sided=1, outtype=1, scale="Difference") 
 testBinomial(x1, x2, n1, n2, delta0=0, chisq=0, adj=0,
              scale="Difference", tol=.1e-10)
 ciBinomial(x1, x2, n1, n2, alpha=.05, adj=0, scale="Difference")
@@ -44,8 +44,11 @@
 }
 \arguments{
 For \code{simBinomial()} and \code{ciBinomial()} all arguments must have length 1.
+
 For \code{testBinomial()}, \code{x2, x2, n1, n2, delta0, chisq,} and \code{adj} may be vectors.
+
 For \code{nBinomial()}, \code{p1, p2, beta, delta0} and \code{ratio} may be vectors.
+
 For \code{nBinomial()} or \code{testBinomial()}, when one or more arguments is a vector, the routines return a vector of sample sizes and powers, respectively.
 Where vector arguments are allowed, there may be a mix of scalar and vector arguments. 
 All arguments specified using vectors must have the same length.  
@@ -92,9 +95,9 @@
 Fleiss, JL, Tytun, A and Ury (1980), A simple approximation for calculating sample sizes for comparing independent proportions.
 \emph{Biometrics};36:343-346.
 
-Gordon, I and Watson R (1985), The myth of continuity-corrected sample size formulae. \emph{Biometrics};52:71-76.
+Gordon, I and Watson R (1985), The myth of continuity-corrected sample size formulae. \emph{Biometrics}; 52: 71-76.
 
-Miettinin, O and Nurminen, M (1980), Comparative analysis of two rates. \emph{Statistics in Medicine};4:213-226.
+Miettinin, O and Nurminen, M (1980), Comparative analysis of two rates. \emph{Statistics in Medicine}; 4 : 213-226.
 }
 
 \details{
@@ -117,7 +120,7 @@
 
 \value{
   \code{testBinomial()} and \code{simBinomial()} each return a vector of either Chi-square or Z test statistics. 
-  These may be compared to an appropriate cutoff point (e.g., \code{qnorm(.975)} or \code{qchisq(.95,1)}).
+  These may be compared to an appropriate cutoff point (e.g., \code{qnorm(.975)} for normal or \code{qchisq(.95,1)} for chi-square).
  
   With the default \code{outtype=2}, \code{nBinomial()} returns a list containing two vectors \code{n1} and \code{n2} containing
   sample sizes for groups 1 and 2, respectively.
@@ -153,7 +156,8 @@
 x0
 pnorm(x0, lower.tail=FALSE)
 
-# Perform 50k simulations to test validity of the above asymptotic p-values 
+# Perform 50k simulations to test validity of the above
+# asymptotic p-values 
 # (you may want to perform more to reduce standard error of estimate)
 sum(as.real(x0) <= 
     simBinomial(p1=.078, p2=.078, n1=500, n2=500, nsim=50000)) / 50000
@@ -166,22 +170,26 @@
 x
 pnorm(x, lower.tail=FALSE)
 
-# since chi-square tests equivalence (a 2-sided test) rather than non-inferiority (a 1-sided test), 
+# since chi-square tests equivalence (a 2-sided test) rather than
+# non-inferiority (a 1-sided test), 
 # the result is quite different
-pchisq(testBinomial(x1=410, x2=400, n1=500, n2=500, delta0= -.05, chisq=1, 
-                    adj=1), 1, lower.tail=FALSE)
+pchisq(testBinomial(x1=410, x2=400, n1=500, n2=500, delta0= -.05, 
+                    chisq=1, adj=1), 1, lower.tail=FALSE)
 
 # now simulate the z-statistic witthout continuity corrected variance
 sum(qnorm(.975) <= 
     simBinomial(p1=.8, p2=.8, n1=500, n2=500, nsim=100000)) / 100000
 
-# compute a sample size to show non-inferiority with 5% margin, 90% power
+# compute a sample size to show non-inferiority
+# with 5\% margin, 90\% power
 nBinomial(p1=.2, p2=.2, delta0=.05, alpha=.025, sided=1, beta=.1)
 
-# assuming a slight advantage in the experimental group lowers sample size requirement
+# assuming a slight advantage in the experimental group lowers
+# sample size requirement
 nBinomial(p1=.2, p2=.19, delta0=.05, alpha=.025, sided=1, beta=.1)
 
-# compute a sample size for comparing 15\% vs 10\% event rates with 1 to 2 randomization
+# compute a sample size for comparing 15\% vs 10\% event rates
+# with 1 to 2 randomization
 nBinomial(p1=.15, p2=.1, beta=.2, ratio=2, alpha=.05)
 
 # now look at total sample size using 1-1 randomization
@@ -198,15 +206,15 @@
 y3 <- nBinomial(p1, p2, beta=.2, outtype=1, alpha=.025, sided=1)
 p2 <- p1 * .5
 y4 <- nBinomial(p1, p2, beta=.2, outtype=1, alpha=.025, sided=1)
-plot(p1, y1, type="l", ylab="Sample size", xlab="Control group event rate",
-     ylim=c(0, 6000), lwd=2)
+plot(p1, y1, type="l", ylab="Sample size",
+     xlab="Control group event rate", ylim=c(0, 6000), lwd=2)
 title(main="Binomial sample size computation for 80 pct power")
 lines(p1, y2, lty=2, lwd=2)
 lines(p1, y3, lty=3, lwd=2)
 lines(p1, y4, lty=4, lwd=2)
 legend(x=c(.15, .2),y=c(4500, 6000),lty=c(2, 1, 3, 4), lwd=2,
-       legend=c("25 pct reduction", "33 pct reduction", "40 pct reduction",
-                "50 pct reduction"))
+       legend=c("25 pct reduction", "33 pct reduction",
+                "40 pct reduction", "50 pct reduction"))
 }
 
 \keyword{design}

Modified: pkg/man/gsDesign-package.Rd
===================================================================
--- pkg/man/gsDesign-package.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/gsDesign-package.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -71,8 +71,8 @@
 \seealso{\code{\link{gsDesign}}, \code{\link{gsProbability}}
 }
 \examples{
-# assume a fixed design (no interim) trial with the same endpoint requires 200 subjects
-# for 90\% power at alpha=.025, one-sided
+# assume a fixed design (no interim) trial with the same endpoint
+# requires 200 subjects for 90\% power at alpha=.025, one-sided
 x <- gsDesign(n.fix=200)
 plot(x)
 }

Modified: pkg/man/gsDesign.Rd
===================================================================
--- pkg/man/gsDesign.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/gsDesign.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -6,7 +6,8 @@
 \usage{
 gsDesign(k=3, test.type=4, alpha=0.025, beta=0.1, astar=0,  
          delta=0, n.fix=1, timing=1, sfu=sfHSD, sfupar=-4,
-         sfl=sfHSD, sflpar=-2, tol=0.000001, r=18, n.I = 0, maxn.IPlan = 0) 
+         sfl=sfHSD, sflpar=-2, tol=0.000001, r=18, n.I = 0,
+         maxn.IPlan = 0) 
 
 print.gsDesign(x,...)}
 
@@ -29,10 +30,10 @@
 	See details and examples.}
 	\item{timing}{Sets relative timing of interim analyses. Default of 1 produces equally spaced analyses. 
 	Otherwise, this is a vector of length \code{k} or \code{k-1}.
-	The values should satisfy \code{0 < timing[1] < timing[2] < ... < timing[k-1]< timing[k]=1}.}
+	The values should satisfy \code{0 < timing[1] < timing[2] < ... < timing[k-1] < 
+timing[k]=1}.}
 	\item{sfu}{A spending function or a character string indicating a boundary type (that is, \dQuote{WT} for Wang-Tsiatis bounds, \dQuote{OF} for O'Brien-Fleming bounds and \dQuote{Pocock} for Pocock bounds). 
-	For one-sided and symmetric two-sided testing (\code{test.type=1, 2}), 
-	\code{sfu} is used to completely specify spending. 
+	For one-sided and symmetric two-sided testing is used to completely specify spending (\code{test.type=1, 2}), \code{sfu}. 
 	The default value is \code{sfHSD} which is a Hwang-Shih-DeCani spending function.
 	See details, \link{Spending function overview}, manual and examples.}
 	\item{sfupar}{Real value, default is \eqn{-4} which is an O'Brien-Fleming-like conservative bound when used with the default Hwang-Shih-DeCani spending function. This is a real-vector for many spending functions.
@@ -70,7 +71,7 @@
 unless it was input as 0; in that case, value will be computed to give desired power for fixed design with input
 sample size \code{n.fix}.}
 \item{n.fix}{Sample size required to obtain desired power when effect size is \code{delta}.}
-\item{timing}{A vector of length \code{k} containing the portion of the total planned information/sample size at each analysis.}
+\item{timing}{A vector of length \code{k} containing the portion of the total planned information or sample size at each analysis.}
 \item{tol}{As input.}
 \item{r}{As input.}
 \item{upper}{Upper bound spending function, boundary and boundary crossing probabilities under the NULL and

Modified: pkg/man/gsProbability.Rd
===================================================================
--- pkg/man/gsProbability.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/gsProbability.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -73,16 +73,17 @@
 y <- gsProbability(d=x, theta=x$delta*seq(0, 2, .25))
 class(y)
 
-# note that "y" below is equivalent to print(y) and print.gsProbability(y)
+# note that "y" below is equivalent to print(y) and
+# print.gsProbability(y)
 y
 
-# the plot does not change from before since this is a gsDesign object
-# note that theta/delta is on x axis
+# the plot does not change from before since this is a
+# gsDesign object; note that theta/delta is on x axis
 plot(y, plottype=2)
 
 # now let's see what happens with a gsProbability object
-z <- gsProbability(k=3, a=x$lower$bound, b=x$upper$bound, n.I=x$n.I, 
-    theta=x$delta*seq(0, 2, .25))
+z <- gsProbability(k=3, a=x$lower$bound, b=x$upper$bound, 
+    n.I=x$n.I, theta=x$delta*seq(0, 2, .25))
 
 # with the above form,  the results is a gsProbability object
 class(z)

Modified: pkg/man/gsbound.Rd
===================================================================
--- pkg/man/gsbound.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/gsbound.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -67,13 +67,16 @@
 }
 
 \examples{
-#  set boundaries so that probability is .01 of first crossing each upper boundary
-#  and .02 of crossing each lower boundary under null hypothesis
-x <- gsBound(I=c(1, 2, 3)/3, trueneg=array(.02, 3), falsepos=array(.01, 3))
+# set boundaries so that probability is .01 of first crossing
+# each upper boundary and .02 of crossing each lower boundary
+# under the null hypothesis
+x <- gsBound(I=c(1, 2, 3)/3, trueneg=array(.02, 3),
+             falsepos=array(.01, 3))
 x
 
 #  use gsBound1 to set up boundary for a 1-sided test
-x <- gsBound1(theta= 0, I=c(1, 2, 3) / 3, a=array(-20, 3), probhi=c(.001, .009, .015))
+x <- gsBound1(theta= 0, I=c(1, 2, 3) / 3, a=array(-20, 3),
+              probhi=c(.001, .009, .015))
 x$b
 
 # check boundary crossing probabilities with gsProbability 
@@ -84,14 +87,17 @@
 #      use minus the upper bound as a lower bound
 #      replace theta with -theta
 #      set probhi as desired lower boundary crossing probabilities 
-#  Here we let set lower boundary crossing at 0.05 at each analysis assuming theta=2.2 
-y <- gsBound1(theta=-2.2,  I=c(1,  2,  3)/3,  a= -x$b,  probhi=array(.05,  3))
+#  Here we let set lower boundary crossing at 0.05 at each analysis
+#  assuming theta=2.2 
+y <- gsBound1(theta=-2.2, I=c(1, 2, 3)/3, a= -x$b, 
+              probhi=array(.05, 3))
 y$b
 
 #  Now use gsProbability to look at design
-#  Note that lower boundary crossing probabilities are as specified for theta=2.2,  
-#  but for theta=0 the upper boundary crossing probabilities are smaller
-#  than originally specified above after first interim analysis
+#  Note that lower boundary crossing probabilities are as
+#  specified for theta=2.2, but for theta=0 the upper boundary
+#  crossing probabilities are smaller than originally specified
+#  above after first interim analysis
 gsProbability(k=length(x$b), theta=c(0, 2.2), n.I=x$I, b=x$b, a= -y$b)
 }
 \keyword{design}

Modified: pkg/man/normalGrid.Rd
===================================================================
--- pkg/man/normalGrid.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/normalGrid.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -60,10 +60,13 @@
 x
 
 y <- normalGrid(r=3, mu=x$theta[2], sigma=x$theta[2] / 1.5)
-z <- gsProbability(k=3, theta=y$z, n.I=x$n.I, a=x$lower$bound, b=x$upper$bound)
+z <- gsProbability(k=3, theta=y$z, n.I=x$n.I, a=x$lower$bound,
+                   b=x$upper$bound)
 z <- gsProbability(d=x, theta=y$z)
-cat("Expected sample size averaged over normal prior distribution for theta with mu=", 
-   x$theta[2], "sigma=", x$theta[2]/1.5, ":", round(sum(z$en*y$wgt), 1), "\n")
+cat("Expected sample size averaged over normal ")
+cat("prior distribution for theta with mu=", 
+   x$theta[2], "sigma=", x$theta[2]/1.5, ":",
+   round(sum(z$en*y$wgt), 1), "\n")
 plot(y$z, z$en, xlab="theta", ylab="E{N}", 
    main="Expected sample size for different theta values")
 }

Modified: pkg/man/plot.gsDesign.Rd
===================================================================
--- pkg/man/plot.gsDesign.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/plot.gsDesign.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -111,8 +111,8 @@
 x <- gsDesign(k=5, test.type=2, n.fix=100)
 x
 
-# the following translate to calls to plot.gsDesign since x was returned by gsDesign
-# run these commands one at a time
+# the following translate to calls to plot.gsDesign since x was
+# returned by gsDesign; run these commands one at a time
 plot(x)
 plot(x, plottype=2)
 plot(x, plottype=3)
@@ -123,9 +123,11 @@
 
 #  choose different parameter values for power plot
 #  start with design in x from above
-y <- gsProbability(k=5, theta=seq(0, .5, .025), x$n.I, x$lower$bound, x$upper$bound)
+y <- gsProbability(k=5, theta=seq(0, .5, .025), x$n.I,
+                   x$lower$bound, x$upper$bound)
 
-# the following translates to a call to plot.gsProbability since y has that type
+# the following translates to a call to plot.gsProbability since
+# y has that type
 plot(y)
 }
 \keyword{design}

Modified: pkg/man/sfHSD.Rd
===================================================================
--- pkg/man/sfHSD.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/sfHSD.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -23,15 +23,14 @@
 \value{An object of type \code{spendfn}. See \link{Spending function overview} for further details.}
 \details{
 A Hwang-Shih-DeCani spending function takes the form
-\deqn{f(t;\alpha, \gamma)=\alpha(1-e^{-\gamma t})/(1-e^{-\gamma})} 
-{f(t; alpha, gamma) = alpha * (1-exp(-gamma * t))/(1 - exp(-gamma))}
+\deqn{f(t;\alpha, \gamma)=\alpha(1-e^{-\gamma t})/(1-e^{-\gamma})}{f(t; alpha, gamma) = alpha * (1-exp(-gamma * t))/(1 - exp(-gamma))}
 where \eqn{\gamma}{gamma} is the value passed in \code{param}.
-A value of \eqn{\gamma=-4}{gamma=-4) is used to approximate an O'Brien-Fleming design (see \code{\link{sfExponential}} for a better fit), 
+A value of \eqn{\gamma=-4}{gamma=-4} is used to approximate an O'Brien-Fleming design (see \code{\link{sfExponential}} for a better fit), 
 while a value of \eqn{\gamma=1}{gamma=1} approximates a Pocock design well.
 }
 \seealso{\link{Spending function overview}, \code{\link{gsDesign}}, \link{gsDesign package overview}}
 \note{The manual is not linked to this help file, but is available in library/gsdesign/doc/gsDesignManual.pdf
-in the directory where R is installed.}}
+in the directory where R is installed.}
 
 \author{Keaven Anderson \email{keaven\_anderson at merck.com}}
 \references{
@@ -47,7 +46,8 @@
 # print the design
 x
 
-# since sfHSD is the default for both sfu and sfl,  this could have been written as
+# since sfHSD is the default for both sfu and sfl,
+# this could have been written as
 x <- gsDesign(k=4, sfupar=-2, sflpar=1)
 
 # print again
@@ -57,7 +57,8 @@
 # show default values of gamma to see how the spending function changes
 # also show gamma=1 which is supposed to approximate a Pocock design
 t <- 0:100/100
-plot(t,  sfHSD(0.025, t, -4)$spend, xlab="Proportion of final sample size", 
+plot(t,  sfHSD(0.025, t, -4)$spend,
+   xlab="Proportion of final sample size", 
    ylab="Cumulative Type I error spending", 
    main="Hwang-Shih-DeCani Spending Function Example", type="l")
 lines(t, sfHSD(0.025, t, -2)$spend, lty=2)

Modified: pkg/man/sfLDPocock.Rd
===================================================================
--- pkg/man/sfLDPocock.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/sfLDPocock.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -30,7 +30,7 @@
 O'Brien-Fleming bound is implemented in the function (\code{sfLDOF()}):
 \deqn{f(t; \alpha)=2-2\Phi\left(\Phi^{-1}(1-\alpha/2)/ t^{1/2}\right).}{%
 f(t; alpha)=2-2*Phi(Phi^(-1)(1-alpha/2)/t^(1/2)\right).}
-The Lan-DeMets (1983) spending function to approximate a Pocock design is implemented in the function \code{sfLDPocock()}:
+The Lan-DeMets (1983) spending function to approximate a Pocock design is implemented in the function {\code{sfLDPocock()}}:
 \deqn{f(t;\alpha)=ln(1+(e-1)t).}{f(t;alpha)=ln(1+(e-1)t).}
 As shown in examples below, other spending functions can be used to get as good or better approximations to Pocock and
 O'Brien-Fleming bounds. In particular, O'Brien-Fleming bounds can be closely approximated using \code{\link{sfExponential}}.
@@ -48,19 +48,23 @@
 }
 
 \examples{
-# 2-sided,  symmetric 6-analysis trial Pocock spending function approximation 
+# 2-sided,  symmetric 6-analysis trial Pocock
+# spending function approximation 
 gsDesign(k=6, sfu=sfLDPocock, test.type=2)$upper$bound
 
 # show actual Pocock design
 gsDesign(k=6, sfu="Pocock", test.type=2)$upper$bound
 
-# approximate Pocock again using a standard Hwang-Shih-DeCani approximation
+# approximate Pocock again using a standard
+# Hwang-Shih-DeCani approximation
 gsDesign(k=6, sfu=sfHSD, sfupar=1, test.type=2)$upper$bound
 
-# use 'best' Hwang-Shih-DeCani approximation for Pocock,  k=6 (see manual for details)
+# use 'best' Hwang-Shih-DeCani approximation for Pocock,  k=6;
+# see manual for details
 gsDesign(k=6, sfu=sfHSD, sfupar=1.3354376, test.type=2)$upper$bound
 
-# 2-sided, symmetric 6-analysis trial O'Brien-Fleming spending function approximation 
+# 2-sided, symmetric 6-analysis trial
+# O'Brien-Fleming spending function approximation 
 gsDesign(k=6, sfu=sfLDOF, test.type=2)$upper$bound
 
 # show actual O'Brien-Fleming bound
@@ -72,7 +76,8 @@
 x$upper$bound
 x$upper$param
 
-# use 'best' exponential approximation for k=6 (see manual for details)
-gsDesign(k=6, sfu=sfExponential, sfupar=0.7849295, test.type=2)$upper$bound
+# use 'best' exponential approximation for k=6; see manual for details
+gsDesign(k=6, sfu=sfExponential, sfupar=0.7849295,
+         test.type=2)$upper$bound
 }
 \keyword{design}

Modified: pkg/man/sfTDist.Rd
===================================================================
--- pkg/man/sfTDist.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/sfTDist.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -60,11 +60,16 @@
 # 3-parameter specification: a,  b,  df
 sfTDist(1, 1:5/6, c(-1, 1.5, 4))$spend
 
-# 5-parameter specification fits 2 points,  in this case the 1st 2 interims
+# 5-parameter specification fits 2 points,  in this case
+# the 1st 2 interims are at 25\% and 50\% of observations with
+# cumulative error spending of 10\% and 20\%, respectively
 # final parameter is df
 sfTDist(1, 1:3/4, c(.25, .5, .1, .2, 4))$spend
 
-# 6-parameter specification fits 3 points,  in this case all of the interims
+# 6-parameter specification fits 3 points
+# Interims are at 25\%. 50\% and 75\% of observations
+# with cumulative spending of 10\%, 20\% and 50\%, respectively
+# Note: not all 3 point combinations can be fit
 sfTDist(1, 1:3/4, c(.25, .5, .75, .1, .2, .5))$spend
 
 # Example of error message when the 3-points specified 
@@ -77,7 +82,8 @@
 sfNormal(1, 1:3/4, c(.25, .5, .1, .2))$spend[3]
 sfCauchy(1, 1:3/4, c(.25, .5, .1, .2))$spend[3]
 
-# plot a few t-distribution spending functions fitting t=0.25,  5 and u=0.1,  0.2
+# plot a few t-distribution spending functions fitting
+# t=0.25, .5 and u=0.1, 0.2
 # to demonstrate the range of flexibility
 t <- 0:100/100
 plot(t, sfTDist(0.025, t, c(.25, .5, .1, .2, 1))$spend, 

Modified: pkg/man/sfexp.Rd
===================================================================
--- pkg/man/sfexp.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/sfexp.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -34,7 +34,7 @@
 cumulative distribution function defined for \eqn{x>0} and is defined as
 \deqn{f(t;\alpha, \nu)=1-F\left(F^{-1}(1-\alpha)/ t^\nu\right).}{%
 f(t; alpha, nu)=1-F(F^(-1)(1-alpha)/ t^nu).}
-The exponential spending function can be derived by letting \eqn{F(x)=1-exp(-x)}, the exponential cumulative distribution function.
+The exponential spending function can be derived by letting \eqn{F(x)=1-\exp(-x)}, the exponential cumulative distribution function.
 This function was derived as a generalization of the Lan-DeMets (1983) spending function used to approximate an
 O'Brien-Fleming spending function (\code{sfLDOF()}),
 \deqn{f(t; \alpha)=2-2\Phi \left( \Phi^{-1}(1-\alpha/2)/ t^{1/2} \right).}{%
@@ -57,17 +57,20 @@
 \examples{
 # use 'best' exponential approximation for k=6 to O'Brien-Fleming design
 # (see manual for details)
-gsDesign(k=6, sfu=sfExponential, sfupar=0.7849295, test.type=2)$upper$bound
+gsDesign(k=6, sfu=sfExponential, sfupar=0.7849295,
+         test.type=2)$upper$bound
 
 # show actual O'Brien-Fleming bound
 gsDesign(k=6, sfu="OF", test.type=2)$upper$bound
 
-# show Lan-DeMets approximation (not as close as sfExponential approximation)
+# show Lan-DeMets approximation
+# (not as close as sfExponential approximation)
 gsDesign(k=6, sfu=sfLDOF, test.type=2)$upper$bound
 
 # plot exponential spending function across a range of values of interest
 t <- 0:100/100
-plot(t, sfExponential(0.025, t, 0.8)$spend, xlab="Proportion of final sample size", 
+plot(t, sfExponential(0.025, t, 0.8)$spend,
+   xlab="Proportion of final sample size", 
    ylab="Cumulative Type I error spending", 
    main="Exponential Spending Function Example", type="l")
 lines(t, sfExponential(0.025, t, 0.5)$spend, lty=2)
@@ -75,7 +78,8 @@
 lines(t, sfExponential(0.025, t, 0.2)$spend, lty=4)
 lines(t, sfExponential(0.025, t, 0.15)$spend, lty=5)
 legend(x=c(.0, .3), y=.025*c(.7, 1), lty=1:5, 
-    legend=c("nu = 0.8", "nu = 0.5", "nu = 0.3", "nu = 0.2", "nu = 0.15"))
+    legend=c("nu = 0.8", "nu = 0.5", "nu = 0.3", "nu = 0.2",
+             "nu = 0.15"))
 text(x=.59, y=.95*.025, labels="<--approximates O'Brien-Fleming")
 }
 \keyword{design}

Modified: pkg/man/sflogistic.Rd
===================================================================
--- pkg/man/sflogistic.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/sflogistic.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -36,7 +36,9 @@
 	\item{t}{A vector of points with increasing values from 0 to 1, inclusive. Values of the proportion of 
 	sample size or information for which the spending function will be computed.}
 	\item{param}{In the two-parameter specification, \code{sfBetaDist()} requires 2 positive values, while
-	\code{sfLogistic()}, \code{sfNormal()}, \code{sfExtremeValue()}, \code{sfExtremeValue2()} and \code{sfCauchy()} require the first parameter 
+	\code{sfLogistic()}, \code{sfNormal()}, \code{sfExtremeValue()},
+
+ \code{sfExtremeValue2()} and \code{sfCauchy()} require the first parameter 
 	to be any real value and the second to be a positive value. 
 	The four parameter specification is \code{c(t1,t2,u1,u2)}
 	where the objective is that \code{sf(t1)=alpha*u1} and \code{sf(t2)=alpha*u2}.
@@ -100,24 +102,29 @@
 # now just give a=0 and b=1 as 3rd parameters for sfLogistic 
 lines(t, sfLogistic(1, t, c(0, 1))$spend, lty=3)
 
-# try a couple with unconventional shapes again using the xy form in the 3rd parameter
+# try a couple with unconventional shapes again using
+# the xy form in the 3rd parameter
 lines(t, sfLogistic(1, t, c(.4, .6, .1, .7))$spend, lty=4)
 lines(t, sfLogistic(1, t, c(.1, .7, .4, .6))$spend, lty=5)
 legend(x=c(.0, .475), y=c(.76, 1.03), lty=1:5, 
 legend=c("Fit (.1, 01) and (.4, .1)", "Fit (.01, .1) and (.1, .4)", 
-    "a=0,  b=1", "Fit (.4, .1) and (.6, .7)", "Fit (.1, .4) and (.7, .6)"))
+    "a=0,  b=1", "Fit (.4, .1) and (.6, .7)",
+     "Fit (.1, .4) and (.7, .6)"))
 
-# set up a function to plot comparsons of all 2-parameter spending functions
+# set up a function to plot comparsons of all
+# 2-parameter spending functions
 plotsf <- function(alpha, t, param)
 {   
-    plot(t, sfCauchy(alpha, t, param)$spend, xlab="Proportion of enrollment", 
+    plot(t, sfCauchy(alpha, t, param)$spend,
+    xlab="Proportion of enrollment", 
     ylab="Cumulative spending", type="l", lty=2)
     lines(t, sfExtremeValue(alpha, t, param)$spend, lty=5)
     lines(t, sfLogistic(alpha, t, param)$spend, lty=1)
     lines(t, sfNormal(alpha, t, param)$spend, lty=3)
     lines(t, sfExtremeValue2(alpha, t, param)$spend, lty=6, col=2)
     lines(t, sfBetaDist(alpha, t, param)$spend, lty=7, col=3)
-    legend(x=c(.05, .475), y=.025*c(.55, .9), lty=c(1, 2, 3, 5, 6, 7), 
+    legend(x=c(.05, .475), y=.025*c(.55, .9),
+             lty=c(1, 2, 3, 5, 6, 7), 
              col=c(1, 1, 1, 1, 2, 3), 
         legend=c("Logistic", "Cauchy", "Normal", "Extreme value", 
         "Extreme value 2", "Beta distribution"))

Modified: pkg/man/sfpoints.Rd
===================================================================
--- pkg/man/sfpoints.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/sfpoints.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -37,10 +37,12 @@
 
 \examples{
 # example to specify spending on a pointwise basis
-x <- gsDesign(k=6, sfu=sfPoints, sfupar=c(.01, .05, .1, .25, .5, 1), test.type=2)
+x <- gsDesign(k=6, sfu=sfPoints, sfupar=c(.01, .05, .1, .25, .5, 1),
+              test.type=2)
 x
 
-# get proportion of upper spending under null hypothesis at each analysis
+# get proportion of upper spending under null hypothesis
+# at each analysis
 y <- x$upper$prob[, 1] / .025
 
 # change to cumulative proportion of spending
@@ -56,8 +58,8 @@
     ylab="Cumulative proportion of spending", 
     type="p")
 
-# following lines produce error when compiling package on some systems
-# approximate this with a t-distribution spending function by fitting 3 points
+# approximate this with a t-distribution spending function
+# by fitting 3 points
 tx <- 0:100/100
 lines(tx, sfTDist(1, tx, c(c(1, 3, 5)/6, .01, .1, .5))$spend)
 text(x=.6, y=.9, labels="Pointwise Spending Approximated by")

Modified: pkg/man/sfpower.Rd
===================================================================
--- pkg/man/sfpower.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/sfpower.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -41,14 +41,16 @@
 }
 
 \examples{
-# design a 4-analysis trial using a Kim-DeMets spending function for both lower and upper bounds 
+# design a 4-analysis trial using a Kim-DeMets spending function 
+# for both lower and upper bounds 
 x <- gsDesign(k=4, sfu=sfPower, sfupar=3, sfl=sfPower, sflpar=1.5)
 
 # print the design
 x
 
 # plot the spending function using many points to obtain a smooth curve
-# show rho=3 for approximation to O'Brien-Fleming and rho=.75 for approximation to Pocock design.
+# show rho=3 for approximation to O'Brien-Fleming and rho=.75 for 
+# approximation to Pocock design.
 # Also show rho=2 for an intermediate spending.
 # Compare these to Hwang-Shih-DeCani spending with gamma=-4,  -2,  1
 t <- 0:100/100
@@ -61,9 +63,10 @@
 lines(t, sfHSD(0.025, t, 1)$spend, lty=3, col=2)
 lines(t, sfHSD(0.025, t, -2)$spend, lty=2, col=2)
 lines(t, sfHSD(0.025, t, -4)$spend, lty=1, col=2)
-legend(x=c(.0, .375), y=.025*c(.65, 1), lty=1:3, legend=c("rho= 3", "rho= 2", "rho= 0.75"))
+legend(x=c(.0, .375), y=.025*c(.65, 1), lty=1:3, 
+       legend=c("rho= 3", "rho= 2", "rho= 0.75"))
 legend(x=c(.0, .357), y=.025*c(.65, .85), lty=1:3, bty="n", col=2, 
-    legend=c("gamma= -4", "gamma= -2", "gamma=1"))
+       legend=c("gamma= -4", "gamma= -2", "gamma=1"))
 }
 \keyword{design}
 

Modified: pkg/man/spendingfunctions.Rd
===================================================================
--- pkg/man/spendingfunctions.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/spendingfunctions.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -76,18 +76,20 @@
 # Example 2: advance example: writing a new spending function  
 # Most users may ignore this!
 
-# implementation of 2-parameter version of beta distribution spending function
+# implementation of 2-parameter version of
+# beta distribution spending function
 # assumes t and alpha are appropriately specified (does not check!) 
 sfbdist <- function(alpha,  t,  param)
 {  
    # check inputs
    checkVector(param, "numeric", c(0, Inf), c(FALSE, TRUE))
-   if (length(param) !=2) 
-       stop("b-dist example spending function parameter must be of length 2")
+   if (length(param) !=2) stop(
+   "b-dist example spending function parameter must be of length 2")
 
-   # set spending using cumulative beta distribution function and return
-   x <- list(name="B-dist example", param=param, parname=c("a", "b"), sf=sfbdist, 
-           spend=alpha * pbeta(t, param[1], param[2]), bound=NULL, prob=NULL)  
+   # set spending using cumulative beta distribution and return
+   x <- list(name="B-dist example", param=param, parname=c("a", "b"), 
+             sf=sfbdist, spend=alpha * 
+           pbeta(t, param[1], param[2]), bound=NULL, prob=NULL)  
            
    class(x) <- "spendfn"
    

Modified: pkg/man/testutils.Rd
===================================================================
--- pkg/man/testutils.Rd	2009-05-06 15:49:22 UTC (rev 149)
+++ pkg/man/testutils.Rd	2009-05-12 13:37:42 UTC (rev 150)
@@ -12,7 +12,8 @@
 isInteger(x)
 checkScalar(x, isType = "numeric", ...)
 checkVector(x, isType = "numeric", ..., length=NULL) 
-checkRange(x, interval = 0:1, inclusion = c(TRUE, TRUE), varname = deparse(substitute(x)), tol=0)
+checkRange(x, interval = 0:1, inclusion = c(TRUE, TRUE), 
+   varname = deparse(substitute(x)), tol=0)
 checkLengths(..., allowSingle=FALSE)
 }
 \arguments{
@@ -68,8 +69,8 @@
 x <- c(3, pi, exp(1))
 checkVector(x, "numeric", c(1, 10), c(TRUE, FALSE), length=3)
 
-# do the same but change the expected length
-try(checkVector(x, "numeric", c(1, 10), c(TRUE, FALSE), length=2)) # expect error
+# do the same but change the expected length; expect error
+try(checkVector(x, "numeric", c(1, 10), c(TRUE, FALSE), length=2))
 
 # create faux function to check input variable
 foo <- function(moo) checkVector(moo, "character")



More information about the Gsdesign-commits mailing list