From noreply at r-forge.r-project.org Tue Dec 2 09:13:10 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 2 Dec 2014 09:13:10 +0100 (CET) Subject: [Zooimage-commits] r248 - in pkg/zooimage: . R inst/etc man Message-ID: <20141202081310.E6EA3183D55@r-forge.r-project.org> Author: phgrosjean Date: 2014-12-02 09:13:10 +0100 (Tue, 02 Dec 2014) New Revision: 248 Modified: pkg/zooimage/DESCRIPTION pkg/zooimage/NEWS pkg/zooimage/R/correction.R pkg/zooimage/R/import.R pkg/zooimage/R/planktonSorter.R pkg/zooimage/R/utilities.R pkg/zooimage/inst/etc/ZooImageManual.pdf pkg/zooimage/man/correctError.Rd pkg/zooimage/man/import.Rd pkg/zooimage/man/zooimage.package.Rd Log: Upgrade to version 5.1.0 Modified: pkg/zooimage/DESCRIPTION =================================================================== --- pkg/zooimage/DESCRIPTION 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/DESCRIPTION 2014-12-02 08:13:10 UTC (rev 248) @@ -1,8 +1,8 @@ Package: zooimage Type: Package Title: Analysis of numerical zooplankton images -Version: 4.0-0 -Date: 2014-02-23 +Version: 5.1-0 +Date: 2014-12-02 Author: Philippe Grosjean [aut, cre], Kevin Denis [aut] Authors at R: c(person("Philippe", "Grosjean", role = c("aut", "cre"), Modified: pkg/zooimage/NEWS =================================================================== --- pkg/zooimage/NEWS 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/NEWS 2014-12-02 08:13:10 UTC (rev 248) @@ -1,5 +1,37 @@ = zooimage News +== Changes in zooimage 5.1-0 + +* calcVars()/calcVarsVIS() and dropVars() are reworked to used only FIT_xxx + variables in case of use of the FlowCAM (and to calculated many derived vars + from there). This way, there is no need any more of a second image analysis + in ImageJ. + + +== Changes in zooimage 5.0-0 + +* importFlowCAM() and readFlowCAMlst() are reworked to create complete .zidb + files using all metadata from various version of Fluid Imaging's Visual + Spreadsheet software. + + +== Changes in zooimage 4.0-2 + +* correctError() has now a mode argument allowing to run the analysis in 'demo' + and 'stat' mode, in addition to the default 'validation' mode + +* The internal errorCorrection() function did not intialized ntrusted and + nsuspect in 'demo' mode + +* New version of the user manual (explanations of the new functions). + + +== Changes in zooimage 4.0-1 + +* A bug (non initialisation of the confusion matrix) prevented to use + errorCorrection() in demo or stat mode. Corrected. + + == Changes in zooimage 4.0-0 * Error correction functions added: correctError(). Modified: pkg/zooimage/R/correction.R =================================================================== --- pkg/zooimage/R/correction.R 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/R/correction.R 2014-12-02 08:13:10 UTC (rev 248) @@ -357,6 +357,10 @@ ## data -- the dataset to study if (missing(data) || !inherits(data, "ZIDat")) stop("data must be a ZIdat object") +## Temporary hack to eliminate possible unwanted columns! +data$Id.1 <- NULL +data$X.Item.1 <- NULL + ## classifier -- the classifier to use to classify particles if (missing(classifier) || !inherits(classifier, "ZIClass")) stop("classifier must be a ZIClass object") @@ -451,8 +455,7 @@ if (is.null(testdir)) testdir <<- file.path(tempdir(), noExtension(zidb)) - if (file.exists(testdir)) { - + if (file.exists(testdir)) { res <- dlgMessage(paste("Temporary validation directory already", "exists. Do we erase old data there?"), type = "okcancel")$res if (res == "cancel") @@ -462,6 +465,7 @@ dir.create(testdir, recursive = TRUE) if (!file.exists(testdir)) stop("cannot create 'testdir'!") + testdir <<- normalizePath(testdir) ## Put required files there: create the planktonSorter directory plSort <- file.path(testdir, "planktonSorter") dir.create(plSort) @@ -593,6 +597,8 @@ if (step < 1) { ## At first time, take a random subsample ## Same as considering everything as suspect +#PhG nsuspect <<- nobs +#PhG ntrusted <<- 0 sample.ids <- sample(1:nobs, size = sample.size) corr$Step[sample.ids] <<- step corr$RdValidated[sample.ids] <<- step @@ -792,7 +798,8 @@ } ## Error in the different fractions - if (mode != "validation") { +#PhG if (mode != "validation") { + if (mode == "stat") { error <- validated != corr$Predicted errInFract <- .errorInFractions(suspect = corr$Suspect, error = error, validated = corr$Validated, @@ -890,8 +897,10 @@ step.manual <<- TRUE } else if (testset.validated) { step.manual <<- FALSE - #getTest() - #correct() +if (mode == "stat") { + getTest() + correct() +} cat(paste("Step", step + 1, "finished \n")) step <<- step + 1 } else warning("You have to complete the validation first \n") @@ -915,9 +924,9 @@ processDemo <- function () { if (sample.size > 0) { +#PhG process() +#PhG validate() process() - validate() - process() } else cat("Correction done!\n") } Modified: pkg/zooimage/R/import.R =================================================================== --- pkg/zooimage/R/import.R 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/R/import.R 2014-12-02 08:13:10 UTC (rev 248) @@ -1,7 +1,22 @@ ## ZooImage >= 3 importation routines +## TODO: +## - Import data with replicates as subdirs of one common dir +## - Import grayscale data from a "grey" subdir of common dir, or in a first stage, +## do not use any subdir data that do not contain a .lst file (cf, data are +## in subdirs of "grey" dir) +## - Warning: do not allow to mix, say 10x and 4x in the same sample! => check this! +## - Replicates are AR.B25.2014-05-19.300A4X.01, .02, .03, ... => correct label from there? +## - Use jpeg format for non color vignettes + check the difference in weight and +## speed of loading in R +## - Calculate default concentration values, using $Fluid$TotalVolumeML assuming +## no dilution of the sample... SubPart is TotalVolumeML, SubCell = 1, VolIni = 1 +## - rajouter ?chelle de taille dans les vignettes +## - Note: using jpeg instead of png: 10sec instead of 14sec, and 4.9Mb instead of 14.7Mb +## loading time for 25 vignettes faster too. #### Importation of FlowCAM data without image reanalysis ###################### ## Read a FlowCAM .ctx file +## TODO: add label everywhere in front of each table readFlowCAMctx <- function (ctx, stop.it = TRUE) { ## Check arguments @@ -11,6 +26,10 @@ if (stop.it) stop("'ctx' must be an existing (.ctx) file") else return(NULL) + ## Get the label from the directory containing the data + label <- basename(dirname(ctx)) + if (label == ".") label <- basename(getwd()) + ## Read .ctx data dat <- scan(ctx, what = character(), sep = "\t", skip = 0, blank.lines.skip = TRUE, flush = TRUE, quiet = TRUE, comment.char = "") @@ -39,10 +58,12 @@ ## We need these keys that may not be present in old .ctx files if (is.null(V$Fluid$TotalVolumeML)) { ## Volume calculation + cst <- V$Fluid$CalibConstant + if (is.null(cst)) cst <- V$Fluid$CalibrationConstant Height <- (V$CaptureRegion$AcceptableBottom - - V$CaptureRegion$AcceptableTop) * V$Fluid$CalibConstant + V$CaptureRegion$AcceptableTop) * cst Width <- (V$CaptureRegion$AcceptableRight - - V$CaptureRegion$AcceptableLeft) * V$Fluid$CalibConstant + V$CaptureRegion$AcceptableLeft) * cst Area <- Height * Width ## Volume of one image Volume <- (Area / (1e8)) * (V$Fluid$FlowCellDepth / 10000) # mL @@ -72,6 +93,36 @@ if (length(secs) == 0) secs <- 0 V$RunTermination$MaxRunTime <- mins * 60 + secs + ## Possibly read also data from _notes.txt + notes <- sub("\\.ctx$", "_notes.txt", ctx) + if (file.exists(notes)) { + ## TODO: parse key=value items + notesData <- readLines(notes, warn = FALSE) + notesData <- paste(notesData, collapse = "\n") + } else noteData <- "" + + ## TODO: check there is no Fraction, Process and Subsample entries yet! + + ## Add Fraction data + V$Fraction <- data.frame(Code = "", Min = -1, Max = -1) + + ## Add Process information + useESD <- V$CaptureParameters$UseESDForCapture + if (is.null(useESD)) useECD <- FALSE else useECD <- useESD != 1 + V$Process <- data.frame(Version = "1.0-0", Method = "Direct VS import", + MinSize = as.numeric(V$CaptureParameters$MinESD)/1000, # In mm + MaxSize = as.numeric(V$CaptureParameters$MaxESD)/1000, # In mm + UseECD = useECD) + + ## Add Subsample information + ## TODO: get this from _notes.txt... Here, assume using 10mL / 1L + + V$Subsample <- data.frame(SubPart = 0.01, SubMethod = 1, + CellPart = 1, Replicates = 1, VolIni = 1, VolPrec = 0.1) + + ## Add Label in front of each table + V <- lapply(V, function (x) cbind(data.frame(Label = label), x)) + ## Return the resulting list V } @@ -125,7 +176,7 @@ cnames <- sub("Esd", "ESD", cnames) cnames <- sub("FIT_Ch([1-9])_Width", "FIT_Ch\\1_TOF", cnames) ## We need to replace names by their zooimage equivalent - cnames[cnames == "FIT_Id"] <- "Id" # The only one not starting woth FIT_ + cnames[cnames == "FIT_Id"] <- "Id" # The only one not starting with FIT_ cnames[cnames == "FIT_ABD_Area"] <- "FIT_Area_ABD" cnames[cnames == "FIT_ABD_Diameter"] <- "FIT_Diameter_ABD" cnames[cnames == "FIT_ESD_Diameter"] <- "FIT_Diameter_ESD" @@ -144,6 +195,9 @@ ## Note: in comparison to old format, we have in addition: #"FIT_Camera", "FIT_Fringe_Size", "FIT_Circle_Fit", "FIT_Ch1_Area", #"FIT_Ch2_Area", "FIT_Ch3_Area" + # + # Plus "FIT_Symmetry", "FIT_Circularity_Hu", "FIT_Intensity_Calimage", + # "FIT_Raw_Convex_Hull_Area", "FIT_Raw_Filled_Area" ## Read the data in tab <- read.table(lst, header = FALSE, sep = "|", dec = ".", @@ -221,15 +275,38 @@ tab$FIT_Blue_Green_Ratio <- tab$FIT_Avg_Blue / tab$FIT_Avg_Green tab$FIT_Red_Blue_Ratio <- tab$FIT_Avg_Red / tab$FIT_Avg_Blue tab$FIT_Ch2_Ch1_Ratio <- tab$FIT_Ch2_Peak / tab$FIT_Ch1_Peak - - ## Try to extract metadata from .ctx file, if it exists + + ## Need label + label <- basename(dirname(lst)) + if (label == ".") label <- basename(getwd()) + + ## Try to extract metadata from .ctx file, if it exists ctx <- sub("\\.lst$", ".ctx", lst) if (read.ctx && file.exists(ctx)) { ctxData <- readFlowCAMctx(ctx) - ## TODO: return data in correct ZooImage format directly - attr(tab, "FlowCAM.metadata") <- ctxData - } + } else { # Use minimum default metadata + ctxData <- list( + Fraction = data.frame(Label = label, Code = "", Min = -1, Max = -1), + Process = data.frame(Label = label, Version = "1.0-0", + Method = "Direct VS import", MinSize = NA, MaxSize = NA, UseECD = NA), + Subsample = data.frame(Label = label, SubPart = 0.01, SubMethod = 1, + CellPart = 1, Replicates = 1, VolIni = 1, VolPrec = 0.1) + ) + } + Sub <- ctxData$Subsample + ## Rework the table by renaming Id by Item, and prepending it with + ## Label, Item and ECD and postpending it with Dil + n <- nrow(tab) + items <- tab$Id + tab$Id <- NULL + dil <- 1/(Sub$SubPart * Sub$CellPart * Sub$Replicates * Sub$VolIni) + tab <- cbind(data.frame(Label = rep(label, n), Item = items, + ECD = ecd(tab$FIT_Raw_Area)), tab, data.frame(Dil = rep(dil, n))) + + ## Add metadata and change class of the object + attr(tab, "metadata") <- ctxData + class(tab) <- c("ZI3Dat", "ZIDat", "data.frame") tab } @@ -240,44 +317,67 @@ #res1 <- readFlowCAMlst(lstFile1) ## Temporary name! -importFlowCAM <- function (lst, rgb.vigs = TRUE) +importFlowCAM <- function (lst, rgb.vigs = TRUE, type = "ZI3", replace = FALSE) { + ## Check arguments + rgb.vigs <- isTRUE(as.logical(rgb.vigs)) + if (type != "ZI3") { + warning("only 'ZI3' is currently supported for 'type'") + return(invisible(FALSE)) + } + + ## Read metadata dat <- readFlowCAMlst(lst, skip = 2, read.ctx = TRUE) ## Check results if (!is.data.frame(dat) && NROW(dat) < 1) stop("Problem while importing FlowCAM data, or empty series") - if (is.null(attr(dat, "FlowCAM.metadata"))) + if (is.null(attr(dat, "metadata"))) stop("Problem while importing FlowCAM metadata from the .ctx file") - ## Create metadata from FlowCAM.metadatata - ## TODO... + ## Change dir to sample's parent directory + sampledir <- dirname(lst) + if (sampledir == ".") sampledir <- getwd() + label <- basename(sampledir) + parentdir <- dirname(sampledir) + #odir <- setwd(sampledir) + odir <- setwd(parentdir) + on.exit(setwd(odir)) - ## ImportVignettes - #require(tiff) - #require(png) + ## .zidb file is computed, and check if file already exists + zidbfile <- paste(sampledir, "zidb", sep = ".") + if (!isTRUE(as.logical(replace)) && file.exists(zidbfile)) { + return(invisible(TRUE)) + } - ## List all tiff files in the directory (but exclude masks with _bin.tif) - sampledir <- dirname(lst) - odir <- setwd(sampledir) - on.exit(setwd(odir)) + ## Create the .zidb file + message("Creating the ZIDB file...") + filehashOption(defaultType = "DB1") + unlink(zidbfile) + dbCreate(zidbfile) + db <- dbInit(zidbfile) + dbInsert(db, ".ZI", 3) + if (isTRUE(rgb.vigs)) { + dbInsert(db, ".ImageType", "png") + } else { + dbInsert(db, ".ImageType", "jpeg") + } + + ## Add vignettes to the .zidb file + message("Adding vignettes to ZIDB file...") - ## Make sure zidbdir exists and is empty - ## TODO: use a fresh dir, or erase existing one with user's acceptation - zidbdir <- file.path(dirname(sampledir), "_import", basename(sampledir)) - if (file.exists(zidbdir) && dir(zidbdir) != 0) - stop("The destination dir already exists and is not empty!") - dir.create(zidbdir, recursive = TRUE, showWarnings = FALSE) - - tif <- dir(sampledir, pattern = "[0-9]\\.tif$", full.names = FALSE) - ## Separate the list into collages and background calibration images - isCal <- grepl("^.*cal_image_[0-9]+\\.tif$", tif) - calFiles <- tif[isCal] - colFiles <- tif[!isCal] - ## Check we have at least one image for each set - if (length(calFiles) == 0) - stop("No background calibration image found") - if (length(colFiles) == 0) - stop("No collages found") +# ## TODO: change this: do not use _import dir +# zidbdir <- file.path(dirname(sampledir), "_import", basename(sampledir)) +# if (file.exists(zidbdir) && dir(zidbdir) != 0) +# stop("The destination dir already exists and is not empty!") +# dir.create(zidbdir, recursive = TRUE, showWarnings = FALSE) + tif <- dir(sampledir, pattern = "[0-9]\\.tif$", full.names = TRUE) + isCal <- grepl("^.*cal_image_[0-9]+\\.tif$", tif) + calFiles <- tif[isCal] + colFiles <- tif[!isCal] + if (length(calFiles) == 0) + stop("No background calibration image found") + if (length(colFiles) == 0) + stop("No collages found") ## Read all background calibration images into a list cals <- list() @@ -312,7 +412,7 @@ mat[coords[2]:coords[4], coords[1]:coords[3]] ## Determine best gray level for background after substraction - gray <- attr(dat, "FlowCAM.metadata")$CaptureParameters$ThresholdLight + gray <- attr(dat, "metadata")$CaptureParameters$ThresholdLight if (!length(gray)) { warning("Unknown threshold gray level; using 40") gray <- 40 # Target something like 40 @@ -327,7 +427,7 @@ ## Do we need to load the next collage? if (as.character(d$FIT_Filename) != colFile) { filename <- as.character(d$FIT_Filename) - collage <- readTIFF(source = filename) + collage <- readTIFF(source = file.path(sampledir, filename)) colFile <- d$FIT_Filename colFiles <- colFiles[colFiles != filename] ## If the image is RGB, we got three dimensions to reduce to two @@ -373,14 +473,59 @@ } ## Write this vignette - vigFile <- file.path(zidbdir, - sub("\\.tif$", paste0("_", i, ".png"), filename)) - writePNG(image = vig2, target = vigFile) +# vigFile <- file.path(zidbdir, +# sub("\\.tif$", paste0("_", i, ".png"), filename)) +# writePNG(image = vig2, target = vigFile) + #VigName <- sub("\\.tif$", paste0("_", i), filename) + VigName <- paste(label, i, sep = "_") + + ## In case we use grayscale vignettes, use jpeg, otherwise, use png + if (isTRUE(rgb.vigs)) { + dbInsert(db, VigName, writePNG(image = vig2, target = raw())) + } else { + dbInsert(db, VigName, writeJPEG(image = vig2, target = raw(), + quality = 0.95)) + } } ## Create zidb ## TODO... - dat + #dat + message("Adding data from ZIM files to ZIDB file...") +# for (i in 1:length(Zims)) { +# Zim <- Zims[i] +# ZimName <- sub("\\.zim$", "", basename(Zim)) +# ZimSize <- file.info(Zim)$size +# if (is.na(ZimSize)) { +# warning("file '", Zim, "' not found or of null length") +# return(invisible(FALSE)) +# } +# dbInsert(db, ZimName, readBin(Zim, "raw", ZimSize + 100)) +# } + + ## Adding metadata and particles' attributes to the .zidb file + ## TODO: SampleData come from a DESCRIPTION. zis file??? + ## Here, use a default format + smpdat <- data.frame(Label = label, Station = NA, Data = NA, Time = NA, + TimeZone = NA, Latitude = NA, Longitude = NA, CorrdsPrec = NA, + Operator = NA, Note = NA) # TODO: add note from FlowCAM data!!! + class(smpdat) <- c("ZIDesc", "data.frame") + + message("Adding sample data to ZIDB file...") + dbInsert(db, ".SampleData", smpdat) + + message("Adding R data to ZIDB file...") +# zidat <- file.path(zidir, paste0(basename(zidir), "_dat1.RData")) +# obj <- load(zidat) +# if (length(obj) != 1) { +# warning("Error loading ", zidat) +# return(invisible(FALSE)) +# } + dbInsert(db, ".Data", dat) +# if (isTRUE(as.logical(delete.source))) +# unlink(zidir, recursive = TRUE) + message("-- Done! --") + invisible(TRUE) } ## Example Modified: pkg/zooimage/R/planktonSorter.R =================================================================== --- pkg/zooimage/R/planktonSorter.R 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/R/planktonSorter.R 2014-12-02 08:13:10 UTC (rev 248) @@ -205,7 +205,7 @@ planktonSorterValidate <- function (path, query, body, ...) { if (!length(body)) return() - + ## Special cases "iterate>>> " or "done>>> " if (substring(body, 1, 11) == "iterate>>> ") { res <- substring(body, 12) @@ -282,7 +282,7 @@ # #', url, url) -correctError <- function(zidb, classifier, data = zidbDatRead(zidb), +correctError <- function(zidb, classifier, data = zidbDatRead(zidb), mode = "validation", fraction = 0.05, sample.min = 100, grp.min = 2, random.sample = 0.1, algorithm = "rf", diff.max = 0.2, prop.bio = NULL, reset = TRUE, result = NULL) { @@ -333,13 +333,16 @@ } ## Create this object in TempEnv() - ec <- errorCorrection (data, classifier, zidb = zidb, mode = "validation", + ec <- errorCorrection (data, classifier, zidb = zidb, mode = mode, fraction = fraction, sample.min = sample.min, grp.min = grp.min, random.sample = random.sample, algorithm = algorithm, diff.max = diff.max, prop.bio = prop.bio, testdir = testdir, id = Name, result = result, envir = parent.frame()) - assignTemp(Name, ec) + if (mode != "stat") assignTemp(Name, ec) ## Start its first iteration... ec$iterate() + + ## Return the object + ec } Modified: pkg/zooimage/R/utilities.R =================================================================== --- pkg/zooimage/R/utilities.R 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/R/utilities.R 2014-12-02 08:13:10 UTC (rev 248) @@ -107,7 +107,7 @@ "FIT_Avg_Blue", "FIT_PPC", "FIT_Ch1_Peak", "FIT_Ch1_TOF", "FIT_Ch2_Peak", "FIT_Ch2_TOF", "FIT_Ch3_Peak", "FIT_Ch3_TOF", "FIT_SaveX", "FIT_SaveY", "FIT_PixelW", "FIT_PixelH", - "FIT_CaptureX", "FIT_CaptureY", "FIT_Edge_Gradient", + "FIT_CaptureX", "FIT_CaptureY", # Keep this one?"FIT_Edge_Gradient", "FIT_Source_Image", "FIT_Calibration_Image", "FIT_High_U32", "FIT_Low_U32", "FIT_Total", "FIT_Red_Green_Ratio", "FIT_Blue_Green_Ratio", "FIT_Red_Blue_Ratio", @@ -116,7 +116,21 @@ "FIT_Ch1_Area", "FIT_Ch2_Area", "FIT_Ch3_Area", "FIT_TimeStamp1", "FIT_Source_Image.1", "X.Item.1", "FeretAngle", "Count", - "Skew", "Kurt", "Solidity")) # Last 3: NAs with multiple ROIs + "Skew", "Kurt", "Solidity", # Last 3: NAs with multiple ROIs + + ## Added in zooimage v.5: + "FIT_Filename", "FIT_Feret_Min_Angle", "FIT_Feret_Max_Angle", + + ## This is somehow redundant with other variables + "FIT_Raw_Area", "FIT_Raw_Perim", "FIT_Raw_Convex_Perim", + "FIT_Raw_Feret_Max", "FIT_Raw_Feret_Min", "FIT_Raw_Feret_Mean", + "FIT_Diameter_ABD", # This one is indeed ECD + + ## Found in format 17 of a color FlowCAM (from KAUST) + ## and not used yet + "FIT_Symmetry", "FIT_Circularity_Hu", "FIT_Intensity_Calimage", + "FIT_Raw_Convex_Hull_Area", "FIT_Raw_Filled_Area" + )) as.character(res) } @@ -127,8 +141,105 @@ ## This is the calculation of derived variables ## Note that you can make your own version of this function for more ## calculated variables! + +## calcVarsVIS() also included here to keep track of it in the ZIClass object! +## Calculate derived variables... FlowCAM's Visual Spreadsheet +calcVarsVIS <- function (x, drop.vars = NULL, drop.vars.def = dropVars()) +{ + ## Use only FIT_xxx vars, andderived attributes (26 attributes in total): + ## ECD, FIT_Area_ABD, FIT_Length, FIT_Width, FIT_Diameter_ESD, + ## FIT_Perimeter, FIT_Convex_Perimeter, FIT_Intensity, FIT_Sigma_Intensity, + ## FIT_Compactness, FIT_Elongation, FIT_Sum_Intensity, FIT_Roughness, + ## FIT_Edge_Gradient, FIT_Volume_ABD, FIT_Volume_ESD, FIT_Aspect_Ratio, + ## FIT_Transparency, EdgeRange, CV, MeanFDia, Transp2, FeretRoundness, + ## EdgeCV, EdgeSDNorm & Perim_Ratio ## A small hack to correct some 0 (which can be problematic in further calcs) + noZero <- function(x) { + x[x == 0] <- 1e-09 + x + } + + ## Euclidean distance between two points + distance <- function (x, y) + sqrt(x^2 + y^2) + + ## All FIT_Raw_xxx vars have their counterpart resized in um: + ## FIT_Raw_Area -> FIT_Diameter_ABD + ## FIT_Raw_Feret_Max -> FIT_Length + ## FIT_Raw_Feret_Min -> FIT_Width + ## FIT_Raw_Feret_Mean -> FIT_Diameter_ESD + ## FIT_Raw_Perim -> FIt_Perimeter + ## FIT_Raw_Convex_Perim -> FIt_Convex_Perimeter + ## (=> all FIT_Raw_xxx should be eliminated in dropVars()!) + + ## (re)calculate ECD from FIT_DIameter_ABD (was once calc from FIT_Raw_Area) + x$ECD <- noZero(x$FIT_Diameter_ABD) + x$FIT_Area_ABD <- noZero(x$FIT_Area_ABD) + x$FIT_Length <- noZero(x$FIT_Length) + x$FIT_Width <- noZero(x$FIT_Width) + x$FIT_Diameter_ESD <- noZero(x$FIT_Diameter_ESD) + x$FIT_Perimeter <- noZero(x$FIT_Perimeter) + x$FIT_Convex_Perimeter <- noZero(x$FIT_Convex_Perimeter) + x$FIT_Intensity <- noZero(x$FIT_Intensity) + x$FIT_Sigma_Intensity <- noZero(x$FIT_Sigma_Intensity) + x$FIT_Sum_Intensity <- noZero(x$FIT_Sum_Intensity) + x$FIT_Compactness <- noZero(x$FIT_Compactness) + x$FIT_Elongation <- noZero(x$FIT_Elongation) + x$FIT_Roughness <- noZero(x$FIT_Roughness) + x$FIT_Aspect_Ratio <- noZero(x$FIT_Aspect_Ratio) + x$FIT_Volume_ABD <- noZero(x$FIT_Volume_ABD) + x$FIT_Volume_ESD <- noZero(x$FIT_Volume_ESD) + x$FIT_Transparency <- noZero(x$FIT_Transparency) + x$FIT_Edge_Gradient <- noZero(x$FIT_Edge_Gradient) + + + ## Additional calculated variables + # This is FIT_Aspect_Ratio! x$ARFeret <- x$FIT_Width/x$FIT_Length + x$EdgeRange <- abs(x$FIT_Intensity - x$FIT_Edge_Gradient) + x$CV <- x$FIT_Sigma_Intensity/x$FIT_Intensity * 100 + x$MeanFDia <- (x$FIT_Length + x$FIT_Width) / 2 + x$Transp2 <- 1 - (x$FIT_Diameter_ABD/x$MeanFDia) + x$Transp2[x$Transp2 < 0] <- 0 + x$FeretRoundness <- 4 * x$FIT_Area_ABD/(pi * sqrt(x$FIT_Length)) + x$Circ. <- 4 * pi * x$FIT_Area_ABD / sqrt(x$FIT_Perimeter) # ImageJ calculation + x$EdgeCV <- x$FIT_Sigma_Intensity/x$FIT_Edge_Gradient * 100 + x$EdgeSDNorm <- x$FIT_Intensity/x$EdgeRange + x$Perim_Ratio <- x$FIT_Convex_Perimeter / x$FIT_Perimeter + + ## Eliminate variables that are not predictors... and use Id as rownames + Id <- x$Id + if (length(Id)) rownames(x) <- Id + + ## Variables to drop + ## For those samples treated with FIT_VIS in ImageJ, we need to get rid of + ## the ImageJ variables + x$Area <- NULL + x$Mean <- NULL + x$StdDev <- NULL + x$Mode <- NULL + x$Min <- NULL + x$Max <- NULL + x$Perim. <- NULL + x$Major <- NULL + x$Minor <- NULL + x$Circ. <- NULL + x$Feret <- NULL + x$IntDen <- NULL + x$Median <- NULL + + dropAll <- unique(as.character(c(drop.vars, drop.vars.def))) + for (dropVar in dropAll) x[[dropVar]] <- NULL + + ## Return the recalculated data frame + x +} + + ## For data from the FlowCAM, we use a specific function + if (any(names(x) == "FIT_Length")) + return(calcVarsVIS(x, drop.vars = drop.vars, drop.vars.def = drop.vars.def)) + + ## A small hack to correct some 0 (which can be problematic in further calcs) noZero <- function (x) { x[x == 0] <- 0.000000001 x @@ -184,7 +295,15 @@ ## Calculate derived variables... FlowCAM's Visual Spreadsheet calcVarsVIS <- function (x, drop.vars = NULL, drop.vars.def = dropVars()) { - ## A small hack to correct some 0 (which can be problematic in further calcs) + ## Use only FIT_xxx vars, andderived attributes (26 attributes in total): + ## ECD, FIT_Area_ABD, FIT_Length, FIT_Width, FIT_Diameter_ESD, + ## FIT_Perimeter, FIT_Convex_Perimeter, FIT_Intensity, FIT_Sigma_Intensity, + ## FIT_Compactness, FIT_Elongation, FIT_Sum_Intensity, FIT_Roughness, + ## FIT_Edge_Gradient, FIT_Volume_ABD, FIT_Volume_ESD, FIT_Aspect_Ratio, + ## FIT_Transparency, EdgeRange, CV, MeanFDia, Transp2, FeretRoundness, + ## EdgeCV, EdgeSDNorm & Perim_Ratio + + ## A small hack to correct some 0 (which can be problematic in further calcs) noZero <- function(x) { x[x == 0] <- 1e-09 x @@ -194,11 +313,38 @@ distance <- function (x, y) sqrt(x^2 + y^2) - x$FIT_Area_ABD <- noZero(x$FIT_Area_ABD) - x$FIT_Perimeter <- noZero(x$FIT_Perimeter) + ## All FIT_Raw_xxx vars have their counterpart resized in um: + ## FIT_Raw_Area -> FIT_Diameter_ABD + ## FIT_Raw_Feret_Max -> FIT_Length + ## FIT_Raw_Feret_Min -> FIT_Width + ## FIT_Raw_Feret_Mean -> FIT_Diameter_ESD + ## FIT_Raw_Perim -> FIt_Perimeter + ## FIT_Raw_Convex_Perim -> FIt_Convex_Perimeter + ## (=> all FIT_Raw_xxx should be eliminated in dropVars()!) + + ## (re)calculate ECD from FIT_DIameter_ABD (was once calc from FIT_Raw_Area) + x$ECD <- noZero(x$FIT_Diameter_ABD) + x$FIT_Area_ABD <- noZero(x$FIT_Area_ABD) x$FIT_Length <- noZero(x$FIT_Length) x$FIT_Width <- noZero(x$FIT_Width) - x$ARFeret <- x$FIT_Width/x$FIT_Length + x$FIT_Diameter_ESD <- noZero(x$FIT_Diameter_ESD) + x$FIT_Perimeter <- noZero(x$FIT_Perimeter) + x$FIT_Convex_Perimeter <- noZero(x$FIT_Convex_Perimeter) + x$FIT_Intensity <- noZero(x$FIT_Intensity) + x$FIT_Sigma_Intensity <- noZero(x$FIT_Sigma_Intensity) + x$FIT_Sum_Intensity <- noZero(x$FIT_Sum_Intensity) + x$FIT_Compactness <- noZero(x$FIT_Compactness) + x$FIT_Elongation <- noZero(x$FIT_Elongation) + x$FIT_Roughness <- noZero(x$FIT_Roughness) + x$FIT_Aspect_Ratio <- noZero(x$FIT_Aspect_Ratio) + x$FIT_Volume_ABD <- noZero(x$FIT_Volume_ABD) + x$FIT_Volume_ESD <- noZero(x$FIT_Volume_ESD) + x$FIT_Transparency <- noZero(x$FIT_Transparency) + x$FIT_Edge_Gradient <- noZero(x$FIT_Edge_Gradient) + + + ## Additional calculated variables + # This is FIT_Aspect_Ratio! x$ARFeret <- x$FIT_Width/x$FIT_Length x$EdgeRange <- abs(x$FIT_Intensity - x$FIT_Edge_Gradient) x$CV <- x$FIT_Sigma_Intensity/x$FIT_Intensity * 100 x$MeanFDia <- (x$FIT_Length + x$FIT_Width) / 2 @@ -215,6 +361,22 @@ if (length(Id)) rownames(x) <- Id ## Variables to drop + ## For those samples treated with FIT_VIS in ImageJ, we need to get rid of + ## the ImageJ variables + x$Area <- NULL + x$Mean <- NULL + x$StdDev <- NULL + x$Mode <- NULL + x$Min <- NULL + x$Max <- NULL + x$Perim. <- NULL + x$Major <- NULL + x$Minor <- NULL + x$Circ. <- NULL + x$Feret <- NULL + x$IntDen <- NULL + x$Median <- NULL + dropAll <- unique(as.character(c(drop.vars, drop.vars.def))) for (dropVar in dropAll) x[[dropVar]] <- NULL Modified: pkg/zooimage/inst/etc/ZooImageManual.pdf =================================================================== (Binary files differ) Modified: pkg/zooimage/man/correctError.Rd =================================================================== --- pkg/zooimage/man/correctError.Rd 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/man/correctError.Rd 2014-12-02 08:13:10 UTC (rev 248) @@ -6,7 +6,7 @@ Open a web page for manual validation and error correction of predicted abundances in samples. } \usage{ -correctError(zidb, classifier, data = zidbDatRead(zidb), +correctError(zidb, classifier, data = zidbDatRead(zidb), mode = "validation", fraction = 0.05, sample.min = 100, grp.min = 2, random.sample = 0.1, algorithm = "rf", diff.max = 0.2, prop.bio = NULL, reset = TRUE, result = NULL) @@ -16,6 +16,12 @@ \item{zidb}{ Path to a Zidb file. } \item{classifier}{ A ZIClass object appropriate for this sample and the desired classification. } \item{data}{ A ZIDat or a ZITest object matching that sample (by default, it is the ZIDat object contained in the zidb file). } + \item{mode}{ The mode to use for error correction. By default, \code{mode = "validation"}, + where particles are manually validated. \code{mode = "demo"} is the same one, but it sorts particles + according to the Class variable in data, ignoring changes made in the user interface (so that one + can explain the logic of the process without care about how particles are manually resorted). + Finally, \code{mode = "stat"} do not display the user interface at all and calculates all steps + directly to show gain from the process from 0 to 100\% of the particles validated. } \item{fraction}{ The fraction of items to validate at each step (1/20th by default). } \item{sample.min}{ Minimal number of items to take at each step. } \item{grp.min}{ Minimal number of items to take for each group, on average. } Modified: pkg/zooimage/man/import.Rd =================================================================== --- pkg/zooimage/man/import.Rd 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/man/import.Rd 2014-12-02 08:13:10 UTC (rev 248) @@ -12,7 +12,7 @@ \usage{ readFlowCAMctx(ctx, stop.it = TRUE) readFlowCAMlst(lst, skip = 2, read.ctx = TRUE) -importFlowCAM(lst, rgb.vigs = TRUE) +importFlowCAM(lst, rgb.vigs = TRUE, type = "ZI3", replace = FALSE) } \arguments{ @@ -26,6 +26,10 @@ \item{read.ctx}{ should we also read the .ctx file with \code{readFlowCAMctx()}? } \item{rgb.vigs}{ do we build color vignettes that mix OD, visual and mask in the three RGB channels? } + \item{type}{ the type of \code{.zidb} file to create. Currently, only supports + \code{type = "ZI3"} (defaulf value). } + \item{replace}{ a boolean indicating if an existing \code{.zidb} file should + be replaced by a new one. } } \value{ Modified: pkg/zooimage/man/zooimage.package.Rd =================================================================== --- pkg/zooimage/man/zooimage.package.Rd 2014-03-02 12:32:42 UTC (rev 247) +++ pkg/zooimage/man/zooimage.package.Rd 2014-12-02 08:13:10 UTC (rev 248) @@ -16,8 +16,8 @@ \tabular{ll}{ Package: \tab zooimage\cr Type: \tab Package\cr - Version: \tab 4.0-0\cr - Date: \tab 2014-02-25\cr + Version: \tab 5.1-0\cr + Date: \tab 2014-12-02\cr License: \tab GPL 2 or above at your convenience.\cr } Everytime you publish results that use ZooImage, you must place a reference From noreply at r-forge.r-project.org Sun Dec 7 08:40:32 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Sun, 7 Dec 2014 08:40:32 +0100 (CET) Subject: [Zooimage-commits] r249 - in pkg: . zooimage zooimage/R zooimage/inst/gui zooimage/inst/gui/errorcorrection zooimage/man Message-ID: <20141207074033.0A8BA1805B3@r-forge.r-project.org> Author: phgrosjean Date: 2014-12-07 08:40:31 +0100 (Sun, 07 Dec 2014) New Revision: 249 Added: pkg/zooimage/inst/gui/errorcorrection/ pkg/zooimage/inst/gui/errorcorrection/global.R pkg/zooimage/inst/gui/errorcorrection/server.R pkg/zooimage/inst/gui/errorcorrection/ui.R Removed: pkg/zooimage.Rcheck/ Modified: pkg/zooimage/DESCRIPTION pkg/zooimage/NAMESPACE pkg/zooimage/NEWS pkg/zooimage/R/ZIRes.R pkg/zooimage/R/ZITrain.R pkg/zooimage/R/gui.R pkg/zooimage/R/import.R pkg/zooimage/R/utilities.R pkg/zooimage/R/zid.R pkg/zooimage/R/zidb.R pkg/zooimage/man/gui.Rd Log: Changes in the GUI Modified: pkg/zooimage/DESCRIPTION =================================================================== --- pkg/zooimage/DESCRIPTION 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/DESCRIPTION 2014-12-07 07:40:31 UTC (rev 249) @@ -11,7 +11,7 @@ email = "kevin.denis at umons.ac.be")) Maintainer: Philippe Grosjean Depends: R (>= 2.14.0), svMisc (>= 0.9-67), svDialogs (>= 0.9-53), mlearning -Imports: filehash, jpeg, png, tiff, utils, digest, tools +Imports: filehash, jpeg, png, tiff, utils, digest, tools, shiny Suggests: rJava, mlbench Description: ZooImage is a free (open source) solution for analyzing digital images of zooplankton. In combination with ImageJ, a free image analysis Modified: pkg/zooimage/NAMESPACE =================================================================== --- pkg/zooimage/NAMESPACE 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/NAMESPACE 2014-12-07 07:40:31 UTC (rev 249) @@ -21,6 +21,7 @@ #import(RWeka) import(mlearning) #import(party) +import(shiny) # planktonSorter export(correctError) @@ -164,6 +165,7 @@ export(ZIDlg) # Not in menus yet! #export(subpartZIDat) +export(ZIUI) # GUI-Utilities export(selectGroups) Modified: pkg/zooimage/NEWS =================================================================== --- pkg/zooimage/NEWS 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/NEWS 2014-12-07 07:40:31 UTC (rev 249) @@ -7,6 +7,8 @@ from there). This way, there is no need any more of a second image analysis in ImageJ. +* A new UI for error correction using shiny. + == Changes in zooimage 5.0-0 Modified: pkg/zooimage/R/ZIRes.R =================================================================== --- pkg/zooimage/R/ZIRes.R 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/R/ZIRes.R 2014-12-07 07:40:31 UTC (rev 249) @@ -106,6 +106,9 @@ processSample <- function (x, sample, keep = NULL, detail = NULL, classes = "both", header = c("Abd", "Bio"), biomass = NULL, breaks = NULL) { + ## Fix ECD in case of FIT_VIS data + if ("FIT_Area_ABD" %in% names(x)) x$ECD <- ecd(x$FIT_Area_ABD) + ## Check arguments if (missing(sample)) { sample <- unique(sampleInfo(x$Label, type = "sample", ext = "")) @@ -208,7 +211,7 @@ x$P2 <- biomass[2] x$P3 <- biomass[3] } else stop("wrong 'biomass', must be NULL, a vector of 3 values or a data frame with Class, P1, P2 and P3") - if(!is.numeric(x$ECD)) stop("'ECD' required for biomasses") + if (!is.numeric(x$ECD)) stop("'ECD' required for biomasses") x$BioWeight <- (x$P1 * x$ECD^x$P3 + x$P2) * x$Dil } Modified: pkg/zooimage/R/ZITrain.R =================================================================== --- pkg/zooimage/R/ZITrain.R 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/R/ZITrain.R 2014-12-07 07:40:31 UTC (rev 249) @@ -380,6 +380,8 @@ ## Rename Dat in df df <- Dat + ## Fix ECD in case of FIT_VIS data + if ("FIT_Area_ABD" %in% names(df)) df$ECD <- ecd(df$FIT_Area_ABD) ## Problem if there is no remaining row in the data frame if (nrow(df) == 0) { warning("No valid item found (no vignettes with valid measurement data)") Modified: pkg/zooimage/R/gui.R =================================================================== --- pkg/zooimage/R/gui.R 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/R/gui.R 2014-12-07 07:40:31 UTC (rev 249) @@ -27,6 +27,8 @@ menuAddItem(ZIname, "List objects", "listObjects()") menuAddItem(ZIname, "Remove objects", "removeObjects()") menuAddItem(ZIname, "-", "") + menuAddItem(ZIname, "Interactive UI", "ZIUI()") + menuAddItem(ZIname, "--", "") menuAddItem(ZIname, "Online help", 'help("zooimage")') menuAddItem(ZIname, "Manual", "viewManual()") menuAddItem(ZIname, @@ -1345,3 +1347,16 @@ # res <- subpartThreshold(ZIDat = zid, Filter = threshold) # return(res) #} + + +################################################################################ +## New User Interface using Shiny for error correction +ZIUI <- function () { + #appdir <- system.file("gui", "errorcorrection", package = "zooimage") + #runApp(appdir) + res <- dlgOpen(title = "Select one R method file", + filters = dlgFilters[c("R", "All"), ])$res + if (length(res)) { + source(res, chdir = TRUE) + } +} \ No newline at end of file Modified: pkg/zooimage/R/import.R =================================================================== --- pkg/zooimage/R/import.R 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/R/import.R 2014-12-07 07:40:31 UTC (rev 249) @@ -302,7 +302,7 @@ tab$Id <- NULL dil <- 1/(Sub$SubPart * Sub$CellPart * Sub$Replicates * Sub$VolIni) tab <- cbind(data.frame(Label = rep(label, n), Item = items, - ECD = ecd(tab$FIT_Raw_Area)), tab, data.frame(Dil = rep(dil, n))) + ECD = ecd(tab$FIT_Area_ABD)), tab, data.frame(Dil = rep(dil, n))) ## Add metadata and change class of the object attr(tab, "metadata") <- ctxData Modified: pkg/zooimage/R/utilities.R =================================================================== --- pkg/zooimage/R/utilities.R 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/R/utilities.R 2014-12-07 07:40:31 UTC (rev 249) @@ -112,7 +112,7 @@ "FIT_Low_U32", "FIT_Total", "FIT_Red_Green_Ratio", "FIT_Blue_Green_Ratio", "FIT_Red_Blue_Ratio", "FIT_Ch2_Ch1_Ratio", "FIT_Ch4_Peak", "FIT_Ch4_TOF", "FIT_Timestamp1", - "FIT_Timestamp2", "FIT_Camera", "FIT_FringSize", "FIT_CircleFit", + "FIT_Timestamp2", "FIT_Camera", "FIT_FringSize", "FIT_Ch1_Area", "FIT_Ch2_Area", "FIT_Ch3_Area", "FIT_TimeStamp1", "FIT_Source_Image.1", "X.Item.1", "FeretAngle", "Count", @@ -129,7 +129,8 @@ ## Found in format 17 of a color FlowCAM (from KAUST) ## and not used yet "FIT_Symmetry", "FIT_Circularity_Hu", "FIT_Intensity_Calimage", - "FIT_Raw_Convex_Hull_Area", "FIT_Raw_Filled_Area" + "FIT_Raw_Convex_Hull_Area", "FIT_Raw_Filled_Area", + "FIT_CircleFit", "FIT_Edge_Gradient" )) as.character(res) } @@ -150,9 +151,8 @@ ## ECD, FIT_Area_ABD, FIT_Length, FIT_Width, FIT_Diameter_ESD, ## FIT_Perimeter, FIT_Convex_Perimeter, FIT_Intensity, FIT_Sigma_Intensity, ## FIT_Compactness, FIT_Elongation, FIT_Sum_Intensity, FIT_Roughness, - ## FIT_Edge_Gradient, FIT_Volume_ABD, FIT_Volume_ESD, FIT_Aspect_Ratio, - ## FIT_Transparency, EdgeRange, CV, MeanFDia, Transp2, FeretRoundness, - ## EdgeCV, EdgeSDNorm & Perim_Ratio + ## FIT_Volume_ABD, FIT_Volume_ESD, FIT_Aspect_Ratio, FIT_Transparency, + ## CV, MeanFDia, Transp2, FeretRoundness & Perim_Ratio ## A small hack to correct some 0 (which can be problematic in further calcs) noZero <- function(x) { @@ -174,7 +174,7 @@ ## (=> all FIT_Raw_xxx should be eliminated in dropVars()!) ## (re)calculate ECD from FIT_DIameter_ABD (was once calc from FIT_Raw_Area) - x$ECD <- noZero(x$FIT_Diameter_ABD) + x$ECD <- noZero(ecd(x$FIT_Area_ABD)) x$FIT_Area_ABD <- noZero(x$FIT_Area_ABD) x$FIT_Length <- noZero(x$FIT_Length) x$FIT_Width <- noZero(x$FIT_Width) @@ -191,20 +191,20 @@ x$FIT_Volume_ABD <- noZero(x$FIT_Volume_ABD) x$FIT_Volume_ESD <- noZero(x$FIT_Volume_ESD) x$FIT_Transparency <- noZero(x$FIT_Transparency) - x$FIT_Edge_Gradient <- noZero(x$FIT_Edge_Gradient) - ## Additional calculated variables # This is FIT_Aspect_Ratio! x$ARFeret <- x$FIT_Width/x$FIT_Length - x$EdgeRange <- abs(x$FIT_Intensity - x$FIT_Edge_Gradient) + ## For later on: + #x$EdgeRange <- abs(x$FIT_Intensity - x$FIT_Edge_Gradient) x$CV <- x$FIT_Sigma_Intensity/x$FIT_Intensity * 100 x$MeanFDia <- (x$FIT_Length + x$FIT_Width) / 2 x$Transp2 <- 1 - (x$FIT_Diameter_ABD/x$MeanFDia) x$Transp2[x$Transp2 < 0] <- 0 x$FeretRoundness <- 4 * x$FIT_Area_ABD/(pi * sqrt(x$FIT_Length)) x$Circ. <- 4 * pi * x$FIT_Area_ABD / sqrt(x$FIT_Perimeter) # ImageJ calculation - x$EdgeCV <- x$FIT_Sigma_Intensity/x$FIT_Edge_Gradient * 100 - x$EdgeSDNorm <- x$FIT_Intensity/x$EdgeRange + ## For later on: + #x$EdgeCV <- x$FIT_Sigma_Intensity/x$FIT_Edge_Gradient * 100 + #x$EdgeSDNorm <- x$FIT_Intensity/x$EdgeRange x$Perim_Ratio <- x$FIT_Convex_Perimeter / x$FIT_Perimeter ## Eliminate variables that are not predictors... and use Id as rownames Modified: pkg/zooimage/R/zid.R =================================================================== --- pkg/zooimage/R/zid.R 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/R/zid.R 2014-12-07 07:40:31 UTC (rev 249) @@ -521,11 +521,18 @@ rownames(allmes) <- 1:nrow(allmes) Names <- names(allmes) - ## Calculate an ECD from Area if there is not one yet - if (!"ECD" %in% Names && "Area" %in% Names) { - ECD <- ecd(allmes$Area) - allmes <- data.frame(allmes[, 1:2], ECD = ECD, allmes[, 3:ncol(allmes)]) - } + ## Calculate an ECD from Area (or FIT_Area_ABD) if there is not one yet + if (!"ECD" %in% Names) { + if ("FIT_Area_ABD" %in% Names) { # This is FlowCAM data! + ECD <- ecd(allmes$FIT_Area_ABD) + allmes <- data.frame(allmes[, 1:2], ECD = ECD, + allmes[, 3:ncol(allmes)]) + } else if ("Area" %in% Names) { # All other cases + ECD <- ecd(allmes$Area) + allmes <- data.frame(allmes[, 1:2], ECD = ECD, + allmes[, 3:ncol(allmes)]) + } + } attr(allmes, "metadata") <- allmeta class(allmes) <- c("ZI3Dat", "ZIDat", "data.frame") ZI.sample <- allmes @@ -585,6 +592,10 @@ ZI.sample <- NULL load(rdata) + ## Fix ECD in case of FIT_VIS data + if ("FIT_Area_ABD" %in% names(ZI.sample)) + ZI.sample$ECD <- ecd(ZI.sample$FIT_Area_ABD) + ## Delete the file if (deletefile) { unlink(rdata) Modified: pkg/zooimage/R/zidb.R =================================================================== --- pkg/zooimage/R/zidb.R 2014-12-02 08:13:10 UTC (rev 248) +++ pkg/zooimage/R/zidb.R 2014-12-07 07:40:31 UTC (rev 249) @@ -125,7 +125,10 @@ warning("Error loading ", zidat) return(invisible(FALSE)) } - dbInsert(db, ".Data", get(obj)) + dat <- get(obj) + ## Fix ECD in case of FIT_VIS data + if ("FIT_Area_ABD" %in% names(dat)) dat$ECD <- ecd(dat$FIT_Area_ABD) + dbInsert(db, ".Data", dat) ## Do we delete sources? if (isTRUE(as.logical(delete.source))) @@ -429,8 +432,12 @@ db2env(dbInit(zidbfile)) ## Read only Rdata file from a .zidb database -zidbDatRead <- function (zidbfile) - zidbLink(zidbfile)$.Data +zidbDatRead <- function (zidbfile) { + res <- zidbLink(zidbfile)$.Data + ## Fix ECD in case of FIT_VIS data + if ("FIT_Area_ABD" %in% names(res)) res$ECD <- ecd(res$FIT_Area_ABD) + res +} ## Read only the sample data zidbSampleRead <- function (zidbfile) Added: pkg/zooimage/inst/gui/errorcorrection/global.R =================================================================== --- pkg/zooimage/inst/gui/errorcorrection/global.R (rev 0) +++ pkg/zooimage/inst/gui/errorcorrection/global.R 2014-12-07 07:40:31 UTC (rev 249) @@ -0,0 +1,128 @@ +## Zoo/PhytoImage simplified analysis UI (run the application) +## Copyright (c) 2014, Philippe Grosjean (Philippe.Grosjean at umons.ac.be) +## TODO: allow for placing samples in subdirs + use tree view + +## Get the working directory +if (!exists(".ZI")) + stop("You must run this app from within a method script!") +inidir <- dirname(.ZI$wdir) +cat("Directory:", inidir, "\n") + +## Used to print a report after exiting the shiny app +print.reportObj <- function (x, ...) { + line <- paste0(c("\n", rep('-', getOption("width")), "\n")) + cat(line, paste0(x, collapse = "\n"), line, sep = "") + invisible(x) +} + +## Additional functions required by the UI +## Same a headerPanel, but taking less space, using h5 instead of h1 +smallHeaderPanel <- function (title, windowTitle = title) { + tagList(tags$head(tags$title(windowTitle)), div(class = "span12", + style = "padding: 2px 0px;", strong(title))) +} + +#smallTitlePanel <- function (title, windowTitle = title) { +# tagList(tags$head(tags$title(windowTitle)), h5(style = "padding: 2px 0px;", +# title)) +#} + +## Define UI for default process using a config .R script in zooimage +## TODO: change the title according to actual name and version of the software +## TODO: translate UI strings (English and French interfaces) +uiTitle <- paste0("Zoo/PhytoImage version 5.1-0 (UMONS/IFREMER rephy release) - ", + .ZI$method, " - ", .ZI$user) + + +### List all available methods +#Methods <- dir(file.path(inidir, "_analyses"), pattern = "\\.R$") +#if (!length(Methods)) stop("No methods defined in that directory") +### Eliminate .R +#Methods <- sub("\\.R$", "", Methods) +#Methods <- .ZI$method + +### Prepare for first method +#source(paste(file.path(inidir, "_analyses", .ZI$method), "R", sep = "."), chdir = TRUE) + +## List all samples currently available +listSamples <- function (path, method, unanalyzed.only = FALSE) { + res <- dir(path) + if (!length(res)) return(character(0)) + ## Eliminate hidden dirs and files (starting with "_") + res <- res[substr(res, 1, 1) != "_"] + if (!length(res)) return(character(0)) + ## Keep only dirs or .zidb files + res <- res[grepl("\\.zidb$", res) | file.info(file.path(inidir, res))$isdir] + if (!length(res)) return(character(0)) + ## Copy res to files, and eliminate .zidb extensions from res + files <- rev(res) + res <- rev(sub("\\.zidb$", "", res)) + ## Where there is a dir and a .zidb file for the same sample, eliminate dir + keep <- !duplicated(res) + ## Select files and dir, rereverting rev and files + res <- rev(res[keep]) + if (!length(res)) return(character(0)) + files <- rev(files[keep]) + ## Determine which sample is imported (has a .zidb file) + imp <- grepl("\\.zidb$", files) + ## Determine if some of these files are already processed + proc <- dir(file.path(path, "_analyses", method), + pattern = "\\_valid.RData$") + if (length(proc)) { + ## Keep only those items that are in res + procsmp <- sub("_valid\\.RData$", "", proc) + proc <- (res %in% procsmp) + } else proc <- rep(FALSE, length(res)) + ## Create names with smp [ ]/[I]/[A] + status <- rep("[ ]", length(res)) + status[imp] <- "[I]" + status[proc] <- "[A]" + nms <- paste(status, res) + + ## If keep unanalyzed only, select corresponding items + # if (isTRUE(as.logical(unanalyzed.only))) { + # res <- res[!proc] + # nms <- nms[!proc] + # files <- files[!proc] + # imp <- imp[!proc] + # } + + ## Create a list with samples, files and processed + list(samples = res, names = nms, files = files, imported = imp, + analyzed = proc) +} +AllSamples <- listSamples(inidir, method = .ZI$method) + + + +calcSample <- function (Sample, input, output, session) +{ + ## Is this sample already imported? + ## Try to import it anyway with replace = FALSE + if (file.exists(file.path(inidir, Sample))) { + ## Get .lst file first + Lst <- dir(file.path(inidir, Sample), pattern = "\\.lst$", + full.names = TRUE)[1] + if (length(Lst)) { + res <- try(importFlowCAM(Lst, rgb.vigs = FALSE, replace = FALSE), + silent = TRUE) + if (inherits(res, "try-error")) { + stop("Error importing sample", Sample) + } else { # Update list + Method <- .ZI$method #input$method + AllSamples <- listSamples(inidir, method = Method) + #, input$newonlyCheck) + ## Is this sample validated? + ## TODO: if reimported => backup validation data and clear it now! + if (file.exists(file.path(inidir, "_analyses", Method, + paste(Sample, "valid.RData", sep = "_")))) { + tag <- "[A]" + } else tag <- "[I]" + + updateSelectInput(session, "sample", choices = AllSamples$names, + selected = paste(tag, Sample)) + } + } + } +} + Added: pkg/zooimage/inst/gui/errorcorrection/server.R =================================================================== --- pkg/zooimage/inst/gui/errorcorrection/server.R (rev 0) +++ pkg/zooimage/inst/gui/errorcorrection/server.R 2014-12-07 07:40:31 UTC (rev 249) @@ -0,0 +1,397 @@ +## Zoo/PhytoImage simplified analysis UI (server code) +## Copyright (c) 2014, Philippe Grosjean (Philippe.Grosjean at umons.ac.be) +## TODO: allow for placing samples in subdirs + use tree view +## TODO: add "Stat" button for fully validated samples +## TODO: translate server messages (English and French interfaces) +## TODO: allow downloading the data with something like: +## In server.R: +#output$downloadData <- downloadHandler( +# filename = function() { +# paste('data-', Sys.Date(), '.csv', sep='') +# }, +# content = function(file) { +# write.csv(data, file) +# } +#) +# +## In ui.R: +#downloadLink('downloadData', 'Download') +## +## - Use includeMarkdown() +## +## - Use renderDataTable(), e.g., +## Pass a callback function to DataTables using I() +#renderDataTable(iris, options = list( +# iDisplayLength = 5, +# fnInitComplete = I("function(oSettings, json) {alert('Done.');}") +#)) + +shinyServer(function (input, output, session) { + + doAnalysis <- reactive({ + generalMessage <- function(message) { + paste0("______________________________________________________________________", + #"\n?chantillons totaux: ", length(AllSamples$names), + "\n?chantillons ? traiter: ", sum(!AllSamples$analyzed), + "\n?chantillons analys?s: ", sum(AllSamples$analyzed), + "\n\n", message, "\n", + "______________________________________________________________________\n") + } + + if (input$goButton == 0) + return(generalMessage("(Auncun ?chantillon n'a encore ?t? analys? au cours de cette session).")) + isolate({ + Sample <- substring(input$sample, 5) + ZIDB <- file.path(inidir, paste(Sample, "zidb", sep = ".")) + ## Determine if we already got some data... + ## First look at "demo" data _valid0.RData + DemoFile <- file.path(inidir, "_analyses", .ZI$method, #input$method, + paste(Sample, "valid0.RData", sep = "_")) + SampleFile <- file.path(inidir, "_analyses", .ZI$method, #input$method, + paste(Sample, "valid.RData", sep = "_")) + MetaFile <- file.path(inidir, "_analyses", .ZI$method, #input$method, + paste(Sample, "valid.txt", sep = "_")) + ResFile <- file.path(inidir, "_analyses", .ZI$method, #input$method, + paste(Sample, "res.RData", sep = "_")) + + ValidData <- paste(Sample, "valid", sep = "_") + ResData <- paste(Sample, "res", sep = "_") + if (exists(ValidData, inherits = FALSE)) rm(list = ValidData) + if (file.exists(DemoFile)) { # Run in demo mode + res <- load(DemoFile) + DemoData <- get(res) + rm(list = res) + ce <- correctError(zidb = ZIDB, classifier = .ZIClass, + data = DemoData, mode = "demo") + ## Note: we save just nothing, because we are in demo mode? + ## or do we save data? + } else { + ## Are there some data already available? + if (file.exists(SampleFile)) { # Reanalyze the sample + res <- load(SampleFile) + SampleData <- get(res) + rm(list = res) + ce <- correctError(zidb = ZIDB, classifier = .ZIClass, + data = SampleData) + } else { # Nothing available: start from scratch + ce <- correctError(zidb = ZIDB, classifier = .ZIClass) + } + + } #x <- "Demo found" else x <- "Demo not found" + + + ## Backup sample and metadata files if they exist + if (file.exists(SampleFile)) + file.copy(SampleFile, paste(SampleFile, "bak", sep = ".")) + unlink(SampleFile) + if (file.exists(MetaFile)) + file.copy(MetaFile, paste(MetaFile, "bak", sep = ".")) + unlink(MetaFile) + if (file.exists(ResFile)) + file.copy(ResFile, paste(ResFile, "bak", sep = ".")) + unlink(ResFile) + + ## The following code fails while we are still validating items... + ## TODO: associate name of validator + date + res <- try(save(list = ValidData, file = SampleFile), silent = TRUE) + while (inherits(res, "try-error")) { + Sys.sleep(0.5) # Wait 1/2 sec + res <- try(save(list = ValidData, file = SampleFile), + silent = TRUE) + } + ## Save associated metadata + cat("zooimage version: 5.1.0\n", file = MetaFile) + cat("method: ", .ZI$method, "\n", sep = "", + file = MetaFile, append = TRUE) + cat("user: ", .ZI$user, "\n", sep = "", + file = MetaFile, append = TRUE) + cat("date: ", as.character(Sys.time()), "\n", sep = "", + file = MetaFile, append = TRUE) + cat("training set: ", .ZI$train, "\n", sep = "", + file = MetaFile, append = TRUE) + ## should be../ more + #cat("training file: ", .ZI$trainfile, "\n", sep = "", + # file = MetaFile, append = TRUE) + cat("classifier: ", .ZI$classif, "\n", sep = "", + file = MetaFile, append = TRUE) + ## should be../ more + #cat("classifier file: ", .ZI$classifile, "\n", sep = "", + # file = MetaFile, append = TRUE) + cat("classifier cmd: ", .ZI$classifcmd, "\n", sep = "", + file = MetaFile, append = TRUE) + cat("size breaks: ", paste(.ZI$breaks, collapse = "-"), "\n", sep = "", + file = MetaFile, append = TRUE) + cat("biovolume conversion: \n", sep = "", + file = MetaFile, append = TRUE) + write.table(.ZI$biovolume, sep = "\t", dec = ".", row.names = FALSE, + col.names = TRUE, file = MetaFile, append = TRUE) + + ## Calculate results for this sample + dat2 <- get(ValidData) + cl <- levels(dat2$Class) # All classes + ## We used first uppercase for classes of interest, thus: + cl <- cl[grepl("^[A-Z]", cl)] + ## Now, we also want to calculate separate abundances for most abundant classes + ## i.e., those with at least 50 individuals measured + detail <- cl[cl %in% levels(dat2$Class)[table(dat2$Class) >= 50]] + ## Calculate results for this sample + ## TODO: correct the bug with keep = cl => replacement has different number of rows + #assign(ResData, processSample(dat2, keep = cl, detail = detail, + # biomass = .ZI$biovolume, breaks = .ZI$breaks, classes = "Class")) + assign(ResData, processSample(dat2, keep = NULL, detail = detail, + biomass = .ZI$biovolume, breaks = .ZI$breaks, classes = "Class")) + ## Save it + save(list = ResData, file = ResFile) + + ## Report success + x <- paste("(L'?chantillon", Sample, "vient d'?tre analys?).") + + Method <- .ZI$method #input$method + AllSamples <- listSamples(inidir, method = Method) + + if (file.exists(file.path(inidir, "_analyses", Method, + paste(Sample, "valid.RData", sep = "_")))) { + tag <- "[A]" + } else tag <- "[I]" + + updateSelectInput(session, "sample", choices = AllSamples$names, + selected = paste(tag, Sample)) + + return(generalMessage(x)) + }) + }) + + #output$generalSummary <- renderText({ + # if (input$stopButton) { # Manage clean closing of the page + # ## R?activer R + # ## TODO: change this code to get the name of R application under Mac OS X + # GUI <- .Platform$GUI + # if (GUI == "Rgui") { # Code for RGui under Windows + # try(bringToTop(-1), silent = TRUE) + # } else if (GUI == "AQUA") { # Code for R/R64/SciViews R64.app + # ## This works from Mac OS X 10.5 Leopard: + # try(system("osascript -e 'tell application id \"Rgui\" to activate'", + # ignore.stdout = TRUE, ignore.stderr = TRUE), silent = TRUE) + # #try(system("osascript -e 'tell application \"R\" to activate'", + # # ignore.stdout = TRUE, ignore.stderr = TRUE), silent = TRUE) + # #try(system("osascript -e 'tell application \"R64\" to activate'", + # # ignore.stdout = TRUE, ignore.stderr = TRUE), silent = TRUE) + # #try(system("osascript -e 'tell application \"SciViews R64\" to activate'", + # # ignore.stdout = TRUE, ignore.stderr = TRUE), silent = TRUE) + # } else if (grepl("^mac", .Platform$pkgType)) { # Try code for Terminal.app + # try(system("osascript -e 'tell application \"Terminal\" to activate'", + # ignore.stdout = TRUE, ignore.stderr = TRUE), silent = TRUE) + # } + # + # ## Stop the application, returning a short report of what was done + # report <- structure("Content of my report here...", class = "reportObj") + # stopApp(report) + # + # ## Indicate the app is disconnected + # paste(strong(em("Application d?connect?e!"))) + # + # } else { # Indicate number of samples to process and number analyzed + # ## TODO: make this reactive to the change to the list of samples + # paste(em("A traiter:"), strong(em(sum(!AllSamples$analyzed))), + # em(" - analys?s:"), strong(em(sum(AllSamples$analyzed)))) + # } + #}) + + output$sampleSummary <- renderPrint(width = 80, { + if (input$stopButton) { + #updateTabsetPanel(session, "mainTabset", selected = "R?sum?") + } else { + ## Also update the list of samples, depending on both method and newonlyCheck + # AllSamples <- listSamples(inidir, method = .ZI$method, input$newonlyCheck) + # updateSelectInput(session, "sample", choices = AllSamples$names) + Sample <- substring(input$sample, 5) + calcSample(Sample, input, output, session) + ## Link to the .zidb file and provide a summary of this sample + cat("===", Sample, "===\n") + ZIDB <- file.path(inidir, paste(Sample, "zidb", sep = ".")) + Dat <- zidbDatRead(ZIDB) + cat("?chantillon contenant", nrow(Dat), "particules num?ris?es.\n") + if (substr(input$sample, 1, 3) == "[A]") { + ## Get analysis statistics about this sample + #if (!exists("SampleData")) { + ## Download the data! + SampleFile <- file.path(inidir, "_analyses", .ZI$method, #input$method, + paste(Sample, "valid.RData", sep = "_")) + if (file.exists(SampleFile)) { + res <- load(SampleFile) + SampleData <- get(res) + rm(list = res) + } + #} + res <- try(print(table(SampleData$Class)), silent = TRUE) + if (inherits(res, "try-error")) + cat("\nStatistiques d'analyse pour l'?chantillon non disponibles\n") + } else cat("\nCet ?chantillon n'est pas encore analys? avec la m?thode '", .ZI$method, "'.", sep = "") + #head(Dat) + #print(summary(Dat[, c("ECD")])) + #print(attr(Dat, "metadata")) + #plot(Dat$Area, Dat$Perim.) + #cat("Ici, le r?sum? de", Sample) + cat("\n", doAnalysis()) + } + }) + + output$sampleTable <- renderDataTable(options = list(pageLength = 50), { #renderTable({ + if (input$stopButton) { + updateTabsetPanel(session, "mainTabset", selected = "R?sum?") + } else { + doAnalysis() + Sample <- substring(input$sample, 5) + calcSample(Sample, input, output, session) + ## Link to the .zidb file and provide a summary of this sample + #cat("===", Sample, "===\n") + ZIDB <- file.path(inidir, paste(Sample, "zidb", sep = ".")) + + ## Depending if the file is analyzed or not, we look at the + ## ZITest or ZIDat object + if (substr(input$sample, 1, 3) == "[A]") { + ## Get analysis statistics about this sample + #if (!exists("SampleData")) { + ## Download the data! + SampleFile <- file.path(inidir, "_analyses", .ZI$method, #input$method, + paste(Sample, "valid.RData", sep = "_")) + if (file.exists(SampleFile)) { + res <- load(SampleFile) + SampleData <- get(res) + rm(list = res) + } + #} + res <- try(Dat50 <- head(SampleData, n = 50), silent = TRUE) + if (inherits(res, "try-error")) { + Dat <- zidbDatRead(ZIDB) + Dat50 <- head(Dat, n = 50) + Dat50b <- Dat50 + Dat50b$Label <- NULL + Dat50b$Item <- NULL + Dat50b$ECD <- NULL + data.frame(Label = Dat50$Label, Item = Dat50$Item, + ECD = Dat50$ECD, Dat50b) + } else { + Dat50b <- Dat50 + Dat50b$Label <- NULL + Dat50b$Item <- NULL + Dat50b$ECD <- NULL + Dat50b$Class <- NULL + Dat50b$Predicted <- NULL + Dat50b$Id <- NULL + Dat50b$Id.1 <- NULL + data.frame(Label = Dat50$Label, Item = Dat50$Item, + ECD = Dat50$ECD, Class = Dat50$Class, Dat50b) + #Dat50$Predicted,Dat50$ECD, Dat50b) + } + } else { + Dat <- zidbDatRead(ZIDB) + Dat50 <- head(Dat, n = 50) + Dat50b <- Dat50 + Dat50b$Label <- NULL + Dat50b$Item <- NULL + Dat50b$ECD <- NULL + data.frame(Label = Dat50$Label, Item = Dat50$Item, + ECD = Dat50$ECD, Dat50b) + } + } + }) + + output$samplePlot <- renderPlot({ + if (input$stopButton) { + updateTabsetPanel(session, "mainTabset", selected = "R?sum?") + } else { + + ## This is only in shiny 0.10.2!! + #withProgress(message = 'Calculation in progress', + # detail = '...', value = 0, { + # for (i in 1:15) { + # incProgress(1/15, detail = paste0("...", i, "/15")) + # Sys.sleep(0.25) + # } + #}) + + Sample <- substring(input$sample, 5) + calcSample(Sample, input, output, session) + ## Link to the .zidb file and provide a summary of this sample + #cat("===", Sample, "===\n") + ZIDB <- file.path(inidir, paste(Sample, "zidb", sep = ".")) + Dat <- zidbDatRead(ZIDB) + hist(Dat$ECD, col = "cornsilk", breaks = "FD", + main = "Distribution de la taille des particules", + xlab = "ECD", ylab = "Fr?quences") + } + }) + + output$vignettesPlot <- renderPlot({ + if (input$stopButton) { + updateTabsetPanel(session, "mainTabset", selected = "R?sum?") + } else { + Sample <- substring(input$sample, 5) + calcSample(Sample, input, output, session) + ## Link to the .zidb file and provide a summary of this sample + #cat("===", Sample, "===\n") + ZIDB <- file.path(inidir, paste(Sample, "zidb", sep = ".")) + DB <- zidbLink(ZIDB) + Items <- ls(DB) # Contains data in *_dat1 and vignettes in *_nn + ## Eliminate items that are not vignettes + noVig <- grep("_dat1", Items) + if (length(noVig)) Vigs <- Items[-noVig] else Vigs <- Items + ## Display a 5*5 thumbnail of the first 25 vignettes + zidbPlotNew(Sample) + ImgType <- DB$.ImageType + for (i in 1:30) + zidbDrawVignette(DB[[Vigs[i]]], type = ImgType, item = i, + nx = 6, ny = 5) + } + }) + + output$sampleResults <- renderPrint({ + if (input$stopButton) { + updateTabsetPanel(session, "mainTabset", selected = "R?sum?") + } else { + ## Also update the list of samples, depending on both method and newonlyCheck [TRUNCATED] To get the complete diff run: svnlook diff /svnroot/zooimage -r 249 From noreply at r-forge.r-project.org Tue Dec 9 09:35:23 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 9 Dec 2014 09:35:23 +0100 (CET) Subject: [Zooimage-commits] r250 - pkg/zooimage Message-ID: <20141209083523.D256C1876DE@r-forge.r-project.org> Author: marmayer Date: 2014-12-09 09:35:23 +0100 (Tue, 09 Dec 2014) New Revision: 250 Modified: pkg/zooimage/DESCRIPTION Log: Triggering a rebuild after it has been stuck for a few days. Modified: pkg/zooimage/DESCRIPTION =================================================================== --- pkg/zooimage/DESCRIPTION 2014-12-07 07:40:31 UTC (rev 249) +++ pkg/zooimage/DESCRIPTION 2014-12-09 08:35:23 UTC (rev 250) @@ -1,7 +1,7 @@ Package: zooimage Type: Package Title: Analysis of numerical zooplankton images -Version: 5.1-0 +Version: 5.1-1 Date: 2014-12-02 Author: Philippe Grosjean [aut, cre], Kevin Denis [aut] From noreply at r-forge.r-project.org Tue Dec 9 10:03:08 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 9 Dec 2014 10:03:08 +0100 (CET) Subject: [Zooimage-commits] r251 - pkg/zooimage Message-ID: <20141209090308.755311875E7@r-forge.r-project.org> Author: marmayer Date: 2014-12-09 10:03:08 +0100 (Tue, 09 Dec 2014) New Revision: 251 Modified: pkg/zooimage/DESCRIPTION Log: Test to see if it rebuilds now Modified: pkg/zooimage/DESCRIPTION =================================================================== --- pkg/zooimage/DESCRIPTION 2014-12-09 08:35:23 UTC (rev 250) +++ pkg/zooimage/DESCRIPTION 2014-12-09 09:03:08 UTC (rev 251) @@ -1,7 +1,7 @@ Package: zooimage Type: Package Title: Analysis of numerical zooplankton images -Version: 5.1-1 +Version: 5.1-2 Date: 2014-12-02 Author: Philippe Grosjean [aut, cre], Kevin Denis [aut] From noreply at r-forge.r-project.org Tue Dec 9 10:07:58 2014 From: noreply at r-forge.r-project.org (noreply at r-forge.r-project.org) Date: Tue, 9 Dec 2014 10:07:58 +0100 (CET) Subject: [Zooimage-commits] r252 - pkg/zooimage Message-ID: <20141209090758.D8A3018767B@r-forge.r-project.org> Author: marmayer Date: 2014-12-09 10:07:58 +0100 (Tue, 09 Dec 2014) New Revision: 252 Modified: pkg/zooimage/DESCRIPTION Log: rebuild Modified: pkg/zooimage/DESCRIPTION =================================================================== --- pkg/zooimage/DESCRIPTION 2014-12-09 09:03:08 UTC (rev 251) +++ pkg/zooimage/DESCRIPTION 2014-12-09 09:07:58 UTC (rev 252) @@ -2,7 +2,7 @@ Type: Package Title: Analysis of numerical zooplankton images Version: 5.1-2 -Date: 2014-12-02 +Date: 2014-12-09 Author: Philippe Grosjean [aut, cre], Kevin Denis [aut] Authors at R: c(person("Philippe", "Grosjean", role = c("aut", "cre"),