[Zooimage-commits] r248 - in pkg/zooimage: . R inst/etc man
noreply at r-forge.r-project.org
noreply at r-forge.r-project.org
Tue Dec 2 09:13:10 CET 2014
Author: phgrosjean
Date: 2014-12-02 09:13:10 +0100 (Tue, 02 Dec 2014)
New Revision: 248
Modified:
pkg/zooimage/DESCRIPTION
pkg/zooimage/NEWS
pkg/zooimage/R/correction.R
pkg/zooimage/R/import.R
pkg/zooimage/R/planktonSorter.R
pkg/zooimage/R/utilities.R
pkg/zooimage/inst/etc/ZooImageManual.pdf
pkg/zooimage/man/correctError.Rd
pkg/zooimage/man/import.Rd
pkg/zooimage/man/zooimage.package.Rd
Log:
Upgrade to version 5.1.0
Modified: pkg/zooimage/DESCRIPTION
===================================================================
--- pkg/zooimage/DESCRIPTION 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/DESCRIPTION 2014-12-02 08:13:10 UTC (rev 248)
@@ -1,8 +1,8 @@
Package: zooimage
Type: Package
Title: Analysis of numerical zooplankton images
-Version: 4.0-0
-Date: 2014-02-23
+Version: 5.1-0
+Date: 2014-12-02
Author: Philippe Grosjean [aut, cre],
Kevin Denis [aut]
Authors at R: c(person("Philippe", "Grosjean", role = c("aut", "cre"),
Modified: pkg/zooimage/NEWS
===================================================================
--- pkg/zooimage/NEWS 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/NEWS 2014-12-02 08:13:10 UTC (rev 248)
@@ -1,5 +1,37 @@
= zooimage News
+== Changes in zooimage 5.1-0
+
+* calcVars()/calcVarsVIS() and dropVars() are reworked to used only FIT_xxx
+ variables in case of use of the FlowCAM (and to calculated many derived vars
+ from there). This way, there is no need any more of a second image analysis
+ in ImageJ.
+
+
+== Changes in zooimage 5.0-0
+
+* importFlowCAM() and readFlowCAMlst() are reworked to create complete .zidb
+ files using all metadata from various version of Fluid Imaging's Visual
+ Spreadsheet software.
+
+
+== Changes in zooimage 4.0-2
+
+* correctError() has now a mode argument allowing to run the analysis in 'demo'
+ and 'stat' mode, in addition to the default 'validation' mode
+
+* The internal errorCorrection() function did not intialized ntrusted and
+ nsuspect in 'demo' mode
+
+* New version of the user manual (explanations of the new functions).
+
+
+== Changes in zooimage 4.0-1
+
+* A bug (non initialisation of the confusion matrix) prevented to use
+ errorCorrection() in demo or stat mode. Corrected.
+
+
== Changes in zooimage 4.0-0
* Error correction functions added: correctError().
Modified: pkg/zooimage/R/correction.R
===================================================================
--- pkg/zooimage/R/correction.R 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/R/correction.R 2014-12-02 08:13:10 UTC (rev 248)
@@ -357,6 +357,10 @@
## data -- the dataset to study
if (missing(data) || !inherits(data, "ZIDat"))
stop("data must be a ZIdat object")
+## Temporary hack to eliminate possible unwanted columns!
+data$Id.1 <- NULL
+data$X.Item.1 <- NULL
+
## classifier -- the classifier to use to classify particles
if (missing(classifier) || !inherits(classifier, "ZIClass"))
stop("classifier must be a ZIClass object")
@@ -451,8 +455,7 @@
if (is.null(testdir))
testdir <<- file.path(tempdir(),
noExtension(zidb))
- if (file.exists(testdir)) {
-
+ if (file.exists(testdir)) {
res <- dlgMessage(paste("Temporary validation directory already",
"exists. Do we erase old data there?"), type = "okcancel")$res
if (res == "cancel")
@@ -462,6 +465,7 @@
dir.create(testdir, recursive = TRUE)
if (!file.exists(testdir))
stop("cannot create 'testdir'!")
+ testdir <<- normalizePath(testdir)
## Put required files there: create the planktonSorter directory
plSort <- file.path(testdir, "planktonSorter")
dir.create(plSort)
@@ -593,6 +597,8 @@
if (step < 1) {
## At first time, take a random subsample
## Same as considering everything as suspect
+#PhG nsuspect <<- nobs
+#PhG ntrusted <<- 0
sample.ids <- sample(1:nobs, size = sample.size)
corr$Step[sample.ids] <<- step
corr$RdValidated[sample.ids] <<- step
@@ -792,7 +798,8 @@
}
## Error in the different fractions
- if (mode != "validation") {
+#PhG if (mode != "validation") {
+ if (mode == "stat") {
error <- validated != corr$Predicted
errInFract <- .errorInFractions(suspect = corr$Suspect,
error = error, validated = corr$Validated,
@@ -890,8 +897,10 @@
step.manual <<- TRUE
} else if (testset.validated) {
step.manual <<- FALSE
- #getTest()
- #correct()
+if (mode == "stat") {
+ getTest()
+ correct()
+}
cat(paste("Step", step + 1, "finished \n"))
step <<- step + 1
} else warning("You have to complete the validation first \n")
@@ -915,9 +924,9 @@
processDemo <- function () {
if (sample.size > 0) {
+#PhG process()
+#PhG validate()
process()
- validate()
- process()
} else cat("Correction done!\n")
}
Modified: pkg/zooimage/R/import.R
===================================================================
--- pkg/zooimage/R/import.R 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/R/import.R 2014-12-02 08:13:10 UTC (rev 248)
@@ -1,7 +1,22 @@
## ZooImage >= 3 importation routines
+## TODO:
+## - Import data with replicates as subdirs of one common dir
+## - Import grayscale data from a "grey" subdir of common dir, or in a first stage,
+## do not use any subdir data that do not contain a .lst file (cf, data are
+## in subdirs of "grey" dir)
+## - Warning: do not allow to mix, say 10x and 4x in the same sample! => check this!
+## - Replicates are AR.B25.2014-05-19.300A4X.01, .02, .03, ... => correct label from there?
+## - Use jpeg format for non color vignettes + check the difference in weight and
+## speed of loading in R
+## - Calculate default concentration values, using $Fluid$TotalVolumeML assuming
+## no dilution of the sample... SubPart is TotalVolumeML, SubCell = 1, VolIni = 1
+## - rajouter échelle de taille dans les vignettes
+## - Note: using jpeg instead of png: 10sec instead of 14sec, and 4.9Mb instead of 14.7Mb
+## loading time for 25 vignettes faster too.
#### Importation of FlowCAM data without image reanalysis ######################
## Read a FlowCAM .ctx file
+## TODO: add label everywhere in front of each table
readFlowCAMctx <- function (ctx, stop.it = TRUE)
{
## Check arguments
@@ -11,6 +26,10 @@
if (stop.it)
stop("'ctx' must be an existing (.ctx) file") else return(NULL)
+ ## Get the label from the directory containing the data
+ label <- basename(dirname(ctx))
+ if (label == ".") label <- basename(getwd())
+
## Read .ctx data
dat <- scan(ctx, what = character(), sep = "\t", skip = 0,
blank.lines.skip = TRUE, flush = TRUE, quiet = TRUE, comment.char = "")
@@ -39,10 +58,12 @@
## We need these keys that may not be present in old .ctx files
if (is.null(V$Fluid$TotalVolumeML)) {
## Volume calculation
+ cst <- V$Fluid$CalibConstant
+ if (is.null(cst)) cst <- V$Fluid$CalibrationConstant
Height <- (V$CaptureRegion$AcceptableBottom -
- V$CaptureRegion$AcceptableTop) * V$Fluid$CalibConstant
+ V$CaptureRegion$AcceptableTop) * cst
Width <- (V$CaptureRegion$AcceptableRight -
- V$CaptureRegion$AcceptableLeft) * V$Fluid$CalibConstant
+ V$CaptureRegion$AcceptableLeft) * cst
Area <- Height * Width
## Volume of one image
Volume <- (Area / (1e8)) * (V$Fluid$FlowCellDepth / 10000) # mL
@@ -72,6 +93,36 @@
if (length(secs) == 0) secs <- 0
V$RunTermination$MaxRunTime <- mins * 60 + secs
+ ## Possibly read also data from _notes.txt
+ notes <- sub("\\.ctx$", "_notes.txt", ctx)
+ if (file.exists(notes)) {
+ ## TODO: parse key=value items
+ notesData <- readLines(notes, warn = FALSE)
+ notesData <- paste(notesData, collapse = "\n")
+ } else noteData <- ""
+
+ ## TODO: check there is no Fraction, Process and Subsample entries yet!
+
+ ## Add Fraction data
+ V$Fraction <- data.frame(Code = "", Min = -1, Max = -1)
+
+ ## Add Process information
+ useESD <- V$CaptureParameters$UseESDForCapture
+ if (is.null(useESD)) useECD <- FALSE else useECD <- useESD != 1
+ V$Process <- data.frame(Version = "1.0-0", Method = "Direct VS import",
+ MinSize = as.numeric(V$CaptureParameters$MinESD)/1000, # In mm
+ MaxSize = as.numeric(V$CaptureParameters$MaxESD)/1000, # In mm
+ UseECD = useECD)
+
+ ## Add Subsample information
+ ## TODO: get this from _notes.txt... Here, assume using 10mL / 1L
+
+ V$Subsample <- data.frame(SubPart = 0.01, SubMethod = 1,
+ CellPart = 1, Replicates = 1, VolIni = 1, VolPrec = 0.1)
+
+ ## Add Label in front of each table
+ V <- lapply(V, function (x) cbind(data.frame(Label = label), x))
+
## Return the resulting list
V
}
@@ -125,7 +176,7 @@
cnames <- sub("Esd", "ESD", cnames)
cnames <- sub("FIT_Ch([1-9])_Width", "FIT_Ch\\1_TOF", cnames)
## We need to replace names by their zooimage equivalent
- cnames[cnames == "FIT_Id"] <- "Id" # The only one not starting woth FIT_
+ cnames[cnames == "FIT_Id"] <- "Id" # The only one not starting with FIT_
cnames[cnames == "FIT_ABD_Area"] <- "FIT_Area_ABD"
cnames[cnames == "FIT_ABD_Diameter"] <- "FIT_Diameter_ABD"
cnames[cnames == "FIT_ESD_Diameter"] <- "FIT_Diameter_ESD"
@@ -144,6 +195,9 @@
## Note: in comparison to old format, we have in addition:
#"FIT_Camera", "FIT_Fringe_Size", "FIT_Circle_Fit", "FIT_Ch1_Area",
#"FIT_Ch2_Area", "FIT_Ch3_Area"
+ #
+ # Plus "FIT_Symmetry", "FIT_Circularity_Hu", "FIT_Intensity_Calimage",
+ # "FIT_Raw_Convex_Hull_Area", "FIT_Raw_Filled_Area"
## Read the data in
tab <- read.table(lst, header = FALSE, sep = "|", dec = ".",
@@ -221,15 +275,38 @@
tab$FIT_Blue_Green_Ratio <- tab$FIT_Avg_Blue / tab$FIT_Avg_Green
tab$FIT_Red_Blue_Ratio <- tab$FIT_Avg_Red / tab$FIT_Avg_Blue
tab$FIT_Ch2_Ch1_Ratio <- tab$FIT_Ch2_Peak / tab$FIT_Ch1_Peak
-
- ## Try to extract metadata from .ctx file, if it exists
+
+ ## Need label
+ label <- basename(dirname(lst))
+ if (label == ".") label <- basename(getwd())
+
+ ## Try to extract metadata from .ctx file, if it exists
ctx <- sub("\\.lst$", ".ctx", lst)
if (read.ctx && file.exists(ctx)) {
ctxData <- readFlowCAMctx(ctx)
- ## TODO: return data in correct ZooImage format directly
- attr(tab, "FlowCAM.metadata") <- ctxData
- }
+ } else { # Use minimum default metadata
+ ctxData <- list(
+ Fraction = data.frame(Label = label, Code = "", Min = -1, Max = -1),
+ Process = data.frame(Label = label, Version = "1.0-0",
+ Method = "Direct VS import", MinSize = NA, MaxSize = NA, UseECD = NA),
+ Subsample = data.frame(Label = label, SubPart = 0.01, SubMethod = 1,
+ CellPart = 1, Replicates = 1, VolIni = 1, VolPrec = 0.1)
+ )
+ }
+ Sub <- ctxData$Subsample
+ ## Rework the table by renaming Id by Item, and prepending it with
+ ## Label, Item and ECD and postpending it with Dil
+ n <- nrow(tab)
+ items <- tab$Id
+ tab$Id <- NULL
+ dil <- 1/(Sub$SubPart * Sub$CellPart * Sub$Replicates * Sub$VolIni)
+ tab <- cbind(data.frame(Label = rep(label, n), Item = items,
+ ECD = ecd(tab$FIT_Raw_Area)), tab, data.frame(Dil = rep(dil, n)))
+
+ ## Add metadata and change class of the object
+ attr(tab, "metadata") <- ctxData
+ class(tab) <- c("ZI3Dat", "ZIDat", "data.frame")
tab
}
@@ -240,44 +317,67 @@
#res1 <- readFlowCAMlst(lstFile1)
## Temporary name!
-importFlowCAM <- function (lst, rgb.vigs = TRUE)
+importFlowCAM <- function (lst, rgb.vigs = TRUE, type = "ZI3", replace = FALSE)
{
+ ## Check arguments
+ rgb.vigs <- isTRUE(as.logical(rgb.vigs))
+ if (type != "ZI3") {
+ warning("only 'ZI3' is currently supported for 'type'")
+ return(invisible(FALSE))
+ }
+
+ ## Read metadata
dat <- readFlowCAMlst(lst, skip = 2, read.ctx = TRUE)
## Check results
if (!is.data.frame(dat) && NROW(dat) < 1)
stop("Problem while importing FlowCAM data, or empty series")
- if (is.null(attr(dat, "FlowCAM.metadata")))
+ if (is.null(attr(dat, "metadata")))
stop("Problem while importing FlowCAM metadata from the .ctx file")
- ## Create metadata from FlowCAM.metadatata
- ## TODO...
+ ## Change dir to sample's parent directory
+ sampledir <- dirname(lst)
+ if (sampledir == ".") sampledir <- getwd()
+ label <- basename(sampledir)
+ parentdir <- dirname(sampledir)
+ #odir <- setwd(sampledir)
+ odir <- setwd(parentdir)
+ on.exit(setwd(odir))
- ## ImportVignettes
- #require(tiff)
- #require(png)
+ ## .zidb file is computed, and check if file already exists
+ zidbfile <- paste(sampledir, "zidb", sep = ".")
+ if (!isTRUE(as.logical(replace)) && file.exists(zidbfile)) {
+ return(invisible(TRUE))
+ }
- ## List all tiff files in the directory (but exclude masks with _bin.tif)
- sampledir <- dirname(lst)
- odir <- setwd(sampledir)
- on.exit(setwd(odir))
+ ## Create the .zidb file
+ message("Creating the ZIDB file...")
+ filehashOption(defaultType = "DB1")
+ unlink(zidbfile)
+ dbCreate(zidbfile)
+ db <- dbInit(zidbfile)
+ dbInsert(db, ".ZI", 3)
+ if (isTRUE(rgb.vigs)) {
+ dbInsert(db, ".ImageType", "png")
+ } else {
+ dbInsert(db, ".ImageType", "jpeg")
+ }
+
+ ## Add vignettes to the .zidb file
+ message("Adding vignettes to ZIDB file...")
- ## Make sure zidbdir exists and is empty
- ## TODO: use a fresh dir, or erase existing one with user's acceptation
- zidbdir <- file.path(dirname(sampledir), "_import", basename(sampledir))
- if (file.exists(zidbdir) && dir(zidbdir) != 0)
- stop("The destination dir already exists and is not empty!")
- dir.create(zidbdir, recursive = TRUE, showWarnings = FALSE)
-
- tif <- dir(sampledir, pattern = "[0-9]\\.tif$", full.names = FALSE)
- ## Separate the list into collages and background calibration images
- isCal <- grepl("^.*cal_image_[0-9]+\\.tif$", tif)
- calFiles <- tif[isCal]
- colFiles <- tif[!isCal]
- ## Check we have at least one image for each set
- if (length(calFiles) == 0)
- stop("No background calibration image found")
- if (length(colFiles) == 0)
- stop("No collages found")
+# ## TODO: change this: do not use _import dir
+# zidbdir <- file.path(dirname(sampledir), "_import", basename(sampledir))
+# if (file.exists(zidbdir) && dir(zidbdir) != 0)
+# stop("The destination dir already exists and is not empty!")
+# dir.create(zidbdir, recursive = TRUE, showWarnings = FALSE)
+ tif <- dir(sampledir, pattern = "[0-9]\\.tif$", full.names = TRUE)
+ isCal <- grepl("^.*cal_image_[0-9]+\\.tif$", tif)
+ calFiles <- tif[isCal]
+ colFiles <- tif[!isCal]
+ if (length(calFiles) == 0)
+ stop("No background calibration image found")
+ if (length(colFiles) == 0)
+ stop("No collages found")
## Read all background calibration images into a list
cals <- list()
@@ -312,7 +412,7 @@
mat[coords[2]:coords[4], coords[1]:coords[3]]
## Determine best gray level for background after substraction
- gray <- attr(dat, "FlowCAM.metadata")$CaptureParameters$ThresholdLight
+ gray <- attr(dat, "metadata")$CaptureParameters$ThresholdLight
if (!length(gray)) {
warning("Unknown threshold gray level; using 40")
gray <- 40 # Target something like 40
@@ -327,7 +427,7 @@
## Do we need to load the next collage?
if (as.character(d$FIT_Filename) != colFile) {
filename <- as.character(d$FIT_Filename)
- collage <- readTIFF(source = filename)
+ collage <- readTIFF(source = file.path(sampledir, filename))
colFile <- d$FIT_Filename
colFiles <- colFiles[colFiles != filename]
## If the image is RGB, we got three dimensions to reduce to two
@@ -373,14 +473,59 @@
}
## Write this vignette
- vigFile <- file.path(zidbdir,
- sub("\\.tif$", paste0("_", i, ".png"), filename))
- writePNG(image = vig2, target = vigFile)
+# vigFile <- file.path(zidbdir,
+# sub("\\.tif$", paste0("_", i, ".png"), filename))
+# writePNG(image = vig2, target = vigFile)
+ #VigName <- sub("\\.tif$", paste0("_", i), filename)
+ VigName <- paste(label, i, sep = "_")
+
+ ## In case we use grayscale vignettes, use jpeg, otherwise, use png
+ if (isTRUE(rgb.vigs)) {
+ dbInsert(db, VigName, writePNG(image = vig2, target = raw()))
+ } else {
+ dbInsert(db, VigName, writeJPEG(image = vig2, target = raw(),
+ quality = 0.95))
+ }
}
## Create zidb
## TODO...
- dat
+ #dat
+ message("Adding data from ZIM files to ZIDB file...")
+# for (i in 1:length(Zims)) {
+# Zim <- Zims[i]
+# ZimName <- sub("\\.zim$", "", basename(Zim))
+# ZimSize <- file.info(Zim)$size
+# if (is.na(ZimSize)) {
+# warning("file '", Zim, "' not found or of null length")
+# return(invisible(FALSE))
+# }
+# dbInsert(db, ZimName, readBin(Zim, "raw", ZimSize + 100))
+# }
+
+ ## Adding metadata and particles' attributes to the .zidb file
+ ## TODO: SampleData come from a DESCRIPTION. zis file???
+ ## Here, use a default format
+ smpdat <- data.frame(Label = label, Station = NA, Data = NA, Time = NA,
+ TimeZone = NA, Latitude = NA, Longitude = NA, CorrdsPrec = NA,
+ Operator = NA, Note = NA) # TODO: add note from FlowCAM data!!!
+ class(smpdat) <- c("ZIDesc", "data.frame")
+
+ message("Adding sample data to ZIDB file...")
+ dbInsert(db, ".SampleData", smpdat)
+
+ message("Adding R data to ZIDB file...")
+# zidat <- file.path(zidir, paste0(basename(zidir), "_dat1.RData"))
+# obj <- load(zidat)
+# if (length(obj) != 1) {
+# warning("Error loading ", zidat)
+# return(invisible(FALSE))
+# }
+ dbInsert(db, ".Data", dat)
+# if (isTRUE(as.logical(delete.source)))
+# unlink(zidir, recursive = TRUE)
+ message("-- Done! --")
+ invisible(TRUE)
}
## Example
Modified: pkg/zooimage/R/planktonSorter.R
===================================================================
--- pkg/zooimage/R/planktonSorter.R 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/R/planktonSorter.R 2014-12-02 08:13:10 UTC (rev 248)
@@ -205,7 +205,7 @@
planktonSorterValidate <- function (path, query, body, ...) {
if (!length(body)) return()
-
+
## Special cases "iterate>>> " or "done>>> "
if (substring(body, 1, 11) == "iterate>>> ") {
res <- substring(body, 12)
@@ -282,7 +282,7 @@
#</body>
#</html>', url, url)
-correctError <- function(zidb, classifier, data = zidbDatRead(zidb),
+correctError <- function(zidb, classifier, data = zidbDatRead(zidb), mode = "validation",
fraction = 0.05, sample.min = 100, grp.min = 2, random.sample = 0.1,
algorithm = "rf", diff.max = 0.2, prop.bio = NULL, reset = TRUE,
result = NULL) {
@@ -333,13 +333,16 @@
}
## Create this object in TempEnv()
- ec <- errorCorrection (data, classifier, zidb = zidb, mode = "validation",
+ ec <- errorCorrection (data, classifier, zidb = zidb, mode = mode,
fraction = fraction, sample.min = sample.min, grp.min = grp.min,
random.sample = random.sample, algorithm = algorithm,
diff.max = diff.max, prop.bio = prop.bio, testdir = testdir, id = Name,
result = result, envir = parent.frame())
- assignTemp(Name, ec)
+ if (mode != "stat") assignTemp(Name, ec)
## Start its first iteration...
ec$iterate()
+
+ ## Return the object
+ ec
}
Modified: pkg/zooimage/R/utilities.R
===================================================================
--- pkg/zooimage/R/utilities.R 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/R/utilities.R 2014-12-02 08:13:10 UTC (rev 248)
@@ -107,7 +107,7 @@
"FIT_Avg_Blue", "FIT_PPC", "FIT_Ch1_Peak", "FIT_Ch1_TOF",
"FIT_Ch2_Peak", "FIT_Ch2_TOF", "FIT_Ch3_Peak", "FIT_Ch3_TOF",
"FIT_SaveX", "FIT_SaveY", "FIT_PixelW", "FIT_PixelH",
- "FIT_CaptureX", "FIT_CaptureY", "FIT_Edge_Gradient",
+ "FIT_CaptureX", "FIT_CaptureY", # Keep this one?"FIT_Edge_Gradient",
"FIT_Source_Image", "FIT_Calibration_Image", "FIT_High_U32",
"FIT_Low_U32", "FIT_Total", "FIT_Red_Green_Ratio",
"FIT_Blue_Green_Ratio", "FIT_Red_Blue_Ratio",
@@ -116,7 +116,21 @@
"FIT_Ch1_Area", "FIT_Ch2_Area", "FIT_Ch3_Area",
"FIT_TimeStamp1", "FIT_Source_Image.1",
"X.Item.1", "FeretAngle", "Count",
- "Skew", "Kurt", "Solidity")) # Last 3: NAs with multiple ROIs
+ "Skew", "Kurt", "Solidity", # Last 3: NAs with multiple ROIs
+
+ ## Added in zooimage v.5:
+ "FIT_Filename", "FIT_Feret_Min_Angle", "FIT_Feret_Max_Angle",
+
+ ## This is somehow redundant with other variables
+ "FIT_Raw_Area", "FIT_Raw_Perim", "FIT_Raw_Convex_Perim",
+ "FIT_Raw_Feret_Max", "FIT_Raw_Feret_Min", "FIT_Raw_Feret_Mean",
+ "FIT_Diameter_ABD", # This one is indeed ECD
+
+ ## Found in format 17 of a color FlowCAM (from KAUST)
+ ## and not used yet
+ "FIT_Symmetry", "FIT_Circularity_Hu", "FIT_Intensity_Calimage",
+ "FIT_Raw_Convex_Hull_Area", "FIT_Raw_Filled_Area"
+ ))
as.character(res)
}
@@ -127,8 +141,105 @@
## This is the calculation of derived variables
## Note that you can make your own version of this function for more
## calculated variables!
+
+## calcVarsVIS() also included here to keep track of it in the ZIClass object!
+## Calculate derived variables... FlowCAM's Visual Spreadsheet
+calcVarsVIS <- function (x, drop.vars = NULL, drop.vars.def = dropVars())
+{
+ ## Use only FIT_xxx vars, andderived attributes (26 attributes in total):
+ ## ECD, FIT_Area_ABD, FIT_Length, FIT_Width, FIT_Diameter_ESD,
+ ## FIT_Perimeter, FIT_Convex_Perimeter, FIT_Intensity, FIT_Sigma_Intensity,
+ ## FIT_Compactness, FIT_Elongation, FIT_Sum_Intensity, FIT_Roughness,
+ ## FIT_Edge_Gradient, FIT_Volume_ABD, FIT_Volume_ESD, FIT_Aspect_Ratio,
+ ## FIT_Transparency, EdgeRange, CV, MeanFDia, Transp2, FeretRoundness,
+ ## EdgeCV, EdgeSDNorm & Perim_Ratio
## A small hack to correct some 0 (which can be problematic in further calcs)
+ noZero <- function(x) {
+ x[x == 0] <- 1e-09
+ x
+ }
+
+ ## Euclidean distance between two points
+ distance <- function (x, y)
+ sqrt(x^2 + y^2)
+
+ ## All FIT_Raw_xxx vars have their counterpart resized in um:
+ ## FIT_Raw_Area -> FIT_Diameter_ABD
+ ## FIT_Raw_Feret_Max -> FIT_Length
+ ## FIT_Raw_Feret_Min -> FIT_Width
+ ## FIT_Raw_Feret_Mean -> FIT_Diameter_ESD
+ ## FIT_Raw_Perim -> FIt_Perimeter
+ ## FIT_Raw_Convex_Perim -> FIt_Convex_Perimeter
+ ## (=> all FIT_Raw_xxx should be eliminated in dropVars()!)
+
+ ## (re)calculate ECD from FIT_DIameter_ABD (was once calc from FIT_Raw_Area)
+ x$ECD <- noZero(x$FIT_Diameter_ABD)
+ x$FIT_Area_ABD <- noZero(x$FIT_Area_ABD)
+ x$FIT_Length <- noZero(x$FIT_Length)
+ x$FIT_Width <- noZero(x$FIT_Width)
+ x$FIT_Diameter_ESD <- noZero(x$FIT_Diameter_ESD)
+ x$FIT_Perimeter <- noZero(x$FIT_Perimeter)
+ x$FIT_Convex_Perimeter <- noZero(x$FIT_Convex_Perimeter)
+ x$FIT_Intensity <- noZero(x$FIT_Intensity)
+ x$FIT_Sigma_Intensity <- noZero(x$FIT_Sigma_Intensity)
+ x$FIT_Sum_Intensity <- noZero(x$FIT_Sum_Intensity)
+ x$FIT_Compactness <- noZero(x$FIT_Compactness)
+ x$FIT_Elongation <- noZero(x$FIT_Elongation)
+ x$FIT_Roughness <- noZero(x$FIT_Roughness)
+ x$FIT_Aspect_Ratio <- noZero(x$FIT_Aspect_Ratio)
+ x$FIT_Volume_ABD <- noZero(x$FIT_Volume_ABD)
+ x$FIT_Volume_ESD <- noZero(x$FIT_Volume_ESD)
+ x$FIT_Transparency <- noZero(x$FIT_Transparency)
+ x$FIT_Edge_Gradient <- noZero(x$FIT_Edge_Gradient)
+
+
+ ## Additional calculated variables
+ # This is FIT_Aspect_Ratio! x$ARFeret <- x$FIT_Width/x$FIT_Length
+ x$EdgeRange <- abs(x$FIT_Intensity - x$FIT_Edge_Gradient)
+ x$CV <- x$FIT_Sigma_Intensity/x$FIT_Intensity * 100
+ x$MeanFDia <- (x$FIT_Length + x$FIT_Width) / 2
+ x$Transp2 <- 1 - (x$FIT_Diameter_ABD/x$MeanFDia)
+ x$Transp2[x$Transp2 < 0] <- 0
+ x$FeretRoundness <- 4 * x$FIT_Area_ABD/(pi * sqrt(x$FIT_Length))
+ x$Circ. <- 4 * pi * x$FIT_Area_ABD / sqrt(x$FIT_Perimeter) # ImageJ calculation
+ x$EdgeCV <- x$FIT_Sigma_Intensity/x$FIT_Edge_Gradient * 100
+ x$EdgeSDNorm <- x$FIT_Intensity/x$EdgeRange
+ x$Perim_Ratio <- x$FIT_Convex_Perimeter / x$FIT_Perimeter
+
+ ## Eliminate variables that are not predictors... and use Id as rownames
+ Id <- x$Id
+ if (length(Id)) rownames(x) <- Id
+
+ ## Variables to drop
+ ## For those samples treated with FIT_VIS in ImageJ, we need to get rid of
+ ## the ImageJ variables
+ x$Area <- NULL
+ x$Mean <- NULL
+ x$StdDev <- NULL
+ x$Mode <- NULL
+ x$Min <- NULL
+ x$Max <- NULL
+ x$Perim. <- NULL
+ x$Major <- NULL
+ x$Minor <- NULL
+ x$Circ. <- NULL
+ x$Feret <- NULL
+ x$IntDen <- NULL
+ x$Median <- NULL
+
+ dropAll <- unique(as.character(c(drop.vars, drop.vars.def)))
+ for (dropVar in dropAll) x[[dropVar]] <- NULL
+
+ ## Return the recalculated data frame
+ x
+}
+
+ ## For data from the FlowCAM, we use a specific function
+ if (any(names(x) == "FIT_Length"))
+ return(calcVarsVIS(x, drop.vars = drop.vars, drop.vars.def = drop.vars.def))
+
+ ## A small hack to correct some 0 (which can be problematic in further calcs)
noZero <- function (x) {
x[x == 0] <- 0.000000001
x
@@ -184,7 +295,15 @@
## Calculate derived variables... FlowCAM's Visual Spreadsheet
calcVarsVIS <- function (x, drop.vars = NULL, drop.vars.def = dropVars())
{
- ## A small hack to correct some 0 (which can be problematic in further calcs)
+ ## Use only FIT_xxx vars, andderived attributes (26 attributes in total):
+ ## ECD, FIT_Area_ABD, FIT_Length, FIT_Width, FIT_Diameter_ESD,
+ ## FIT_Perimeter, FIT_Convex_Perimeter, FIT_Intensity, FIT_Sigma_Intensity,
+ ## FIT_Compactness, FIT_Elongation, FIT_Sum_Intensity, FIT_Roughness,
+ ## FIT_Edge_Gradient, FIT_Volume_ABD, FIT_Volume_ESD, FIT_Aspect_Ratio,
+ ## FIT_Transparency, EdgeRange, CV, MeanFDia, Transp2, FeretRoundness,
+ ## EdgeCV, EdgeSDNorm & Perim_Ratio
+
+ ## A small hack to correct some 0 (which can be problematic in further calcs)
noZero <- function(x) {
x[x == 0] <- 1e-09
x
@@ -194,11 +313,38 @@
distance <- function (x, y)
sqrt(x^2 + y^2)
- x$FIT_Area_ABD <- noZero(x$FIT_Area_ABD)
- x$FIT_Perimeter <- noZero(x$FIT_Perimeter)
+ ## All FIT_Raw_xxx vars have their counterpart resized in um:
+ ## FIT_Raw_Area -> FIT_Diameter_ABD
+ ## FIT_Raw_Feret_Max -> FIT_Length
+ ## FIT_Raw_Feret_Min -> FIT_Width
+ ## FIT_Raw_Feret_Mean -> FIT_Diameter_ESD
+ ## FIT_Raw_Perim -> FIt_Perimeter
+ ## FIT_Raw_Convex_Perim -> FIt_Convex_Perimeter
+ ## (=> all FIT_Raw_xxx should be eliminated in dropVars()!)
+
+ ## (re)calculate ECD from FIT_DIameter_ABD (was once calc from FIT_Raw_Area)
+ x$ECD <- noZero(x$FIT_Diameter_ABD)
+ x$FIT_Area_ABD <- noZero(x$FIT_Area_ABD)
x$FIT_Length <- noZero(x$FIT_Length)
x$FIT_Width <- noZero(x$FIT_Width)
- x$ARFeret <- x$FIT_Width/x$FIT_Length
+ x$FIT_Diameter_ESD <- noZero(x$FIT_Diameter_ESD)
+ x$FIT_Perimeter <- noZero(x$FIT_Perimeter)
+ x$FIT_Convex_Perimeter <- noZero(x$FIT_Convex_Perimeter)
+ x$FIT_Intensity <- noZero(x$FIT_Intensity)
+ x$FIT_Sigma_Intensity <- noZero(x$FIT_Sigma_Intensity)
+ x$FIT_Sum_Intensity <- noZero(x$FIT_Sum_Intensity)
+ x$FIT_Compactness <- noZero(x$FIT_Compactness)
+ x$FIT_Elongation <- noZero(x$FIT_Elongation)
+ x$FIT_Roughness <- noZero(x$FIT_Roughness)
+ x$FIT_Aspect_Ratio <- noZero(x$FIT_Aspect_Ratio)
+ x$FIT_Volume_ABD <- noZero(x$FIT_Volume_ABD)
+ x$FIT_Volume_ESD <- noZero(x$FIT_Volume_ESD)
+ x$FIT_Transparency <- noZero(x$FIT_Transparency)
+ x$FIT_Edge_Gradient <- noZero(x$FIT_Edge_Gradient)
+
+
+ ## Additional calculated variables
+ # This is FIT_Aspect_Ratio! x$ARFeret <- x$FIT_Width/x$FIT_Length
x$EdgeRange <- abs(x$FIT_Intensity - x$FIT_Edge_Gradient)
x$CV <- x$FIT_Sigma_Intensity/x$FIT_Intensity * 100
x$MeanFDia <- (x$FIT_Length + x$FIT_Width) / 2
@@ -215,6 +361,22 @@
if (length(Id)) rownames(x) <- Id
## Variables to drop
+ ## For those samples treated with FIT_VIS in ImageJ, we need to get rid of
+ ## the ImageJ variables
+ x$Area <- NULL
+ x$Mean <- NULL
+ x$StdDev <- NULL
+ x$Mode <- NULL
+ x$Min <- NULL
+ x$Max <- NULL
+ x$Perim. <- NULL
+ x$Major <- NULL
+ x$Minor <- NULL
+ x$Circ. <- NULL
+ x$Feret <- NULL
+ x$IntDen <- NULL
+ x$Median <- NULL
+
dropAll <- unique(as.character(c(drop.vars, drop.vars.def)))
for (dropVar in dropAll) x[[dropVar]] <- NULL
Modified: pkg/zooimage/inst/etc/ZooImageManual.pdf
===================================================================
(Binary files differ)
Modified: pkg/zooimage/man/correctError.Rd
===================================================================
--- pkg/zooimage/man/correctError.Rd 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/man/correctError.Rd 2014-12-02 08:13:10 UTC (rev 248)
@@ -6,7 +6,7 @@
Open a web page for manual validation and error correction of predicted abundances in samples.
}
\usage{
-correctError(zidb, classifier, data = zidbDatRead(zidb),
+correctError(zidb, classifier, data = zidbDatRead(zidb), mode = "validation",
fraction = 0.05, sample.min = 100, grp.min = 2, random.sample = 0.1,
algorithm = "rf", diff.max = 0.2, prop.bio = NULL, reset = TRUE,
result = NULL)
@@ -16,6 +16,12 @@
\item{zidb}{ Path to a Zidb file. }
\item{classifier}{ A ZIClass object appropriate for this sample and the desired classification. }
\item{data}{ A ZIDat or a ZITest object matching that sample (by default, it is the ZIDat object contained in the zidb file). }
+ \item{mode}{ The mode to use for error correction. By default, \code{mode = "validation"},
+ where particles are manually validated. \code{mode = "demo"} is the same one, but it sorts particles
+ according to the Class variable in data, ignoring changes made in the user interface (so that one
+ can explain the logic of the process without care about how particles are manually resorted).
+ Finally, \code{mode = "stat"} do not display the user interface at all and calculates all steps
+ directly to show gain from the process from 0 to 100\% of the particles validated. }
\item{fraction}{ The fraction of items to validate at each step (1/20th by default). }
\item{sample.min}{ Minimal number of items to take at each step. }
\item{grp.min}{ Minimal number of items to take for each group, on average. }
Modified: pkg/zooimage/man/import.Rd
===================================================================
--- pkg/zooimage/man/import.Rd 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/man/import.Rd 2014-12-02 08:13:10 UTC (rev 248)
@@ -12,7 +12,7 @@
\usage{
readFlowCAMctx(ctx, stop.it = TRUE)
readFlowCAMlst(lst, skip = 2, read.ctx = TRUE)
-importFlowCAM(lst, rgb.vigs = TRUE)
+importFlowCAM(lst, rgb.vigs = TRUE, type = "ZI3", replace = FALSE)
}
\arguments{
@@ -26,6 +26,10 @@
\item{read.ctx}{ should we also read the .ctx file with \code{readFlowCAMctx()}? }
\item{rgb.vigs}{ do we build color vignettes that mix OD, visual and mask in the
three RGB channels? }
+ \item{type}{ the type of \code{.zidb} file to create. Currently, only supports
+ \code{type = "ZI3"} (defaulf value). }
+ \item{replace}{ a boolean indicating if an existing \code{.zidb} file should
+ be replaced by a new one. }
}
\value{
Modified: pkg/zooimage/man/zooimage.package.Rd
===================================================================
--- pkg/zooimage/man/zooimage.package.Rd 2014-03-02 12:32:42 UTC (rev 247)
+++ pkg/zooimage/man/zooimage.package.Rd 2014-12-02 08:13:10 UTC (rev 248)
@@ -16,8 +16,8 @@
\tabular{ll}{
Package: \tab zooimage\cr
Type: \tab Package\cr
- Version: \tab 4.0-0\cr
- Date: \tab 2014-02-25\cr
+ Version: \tab 5.1-0\cr
+ Date: \tab 2014-12-02\cr
License: \tab GPL 2 or above at your convenience.\cr
}
Everytime you publish results that use ZooImage, you must place a reference
More information about the Zooimage-commits
mailing list