[Sciviews-commits] r64 - in pkg/svTools: R data
noreply at r-forge.r-project.org
noreply at r-forge.r-project.org
Wed Oct 29 14:45:41 CET 2008
Author: romain
Date: 2008-10-29 14:45:40 +0100 (Wed, 29 Oct 2008)
New Revision: 64
Added:
pkg/svTools/R/checkUsage.R
pkg/svTools/R/check_description.R
pkg/svTools/R/check_namespace.R
pkg/svTools/R/complete_description.R
pkg/svTools/R/complete_namespace.R
pkg/svTools/R/completion.R
pkg/svTools/R/errorlist.R
pkg/svTools/R/packages.R
pkg/svTools/R/parseError.R
pkg/svTools/R/rdparse.R
pkg/svTools/R/roxygen.R
pkg/svTools/R/search_engines.R
pkg/svTools/R/sidekick.R
pkg/svTools/R/tryParse.R
pkg/svTools/R/trySource.R
pkg/svTools/R/zzz.R
pkg/svTools/data/descriptionFields.rda
Log:
Added: pkg/svTools/R/checkUsage.R
===================================================================
--- pkg/svTools/R/checkUsage.R (rev 0)
+++ pkg/svTools/R/checkUsage.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,117 @@
+#' Wrapper for the checkUsage function in codetools
+#'
+#' Wrapper for the checkUsage function in codetools.
+#' This one parses a file, calls checkUsage on every function of the
+#' file and identifies where are located each of the findings of checkUsage
+#' @export
+#' @param file file to analyse
+#' @return A data frame containing information about errors
+#' @author Romain Francois \email{francoisromain@@free.fr}
+checkUsageFile <- function( file ){
+
+ ### first parse for errors
+ p.out <- tryParse( file, action = .addError )
+ if( p.out %of% "data.frame" ){
+ return( getErrors( file = file ) )
+ }
+
+ resetErrors( file = file )
+
+ # silly hack to retrieve information from codetools
+ report <- function( x ){
+ findings <<- c( findings, x )
+ }
+
+ ..addError <- function( line, msg ){
+ addError( line = line, message = msg %-~% "(\\\n|^: )" , file = file, type = "warning" )
+ }
+
+
+ finding <- function( txt, p, i, rx, rx2 ){
+ param <- sub( rx, "\\1", txt )
+ param <- gsub( "\\.", "\\\\.", param )
+ exprs <- p[[i]][[3]][[3]]
+ srcref <- do.call( rbind, lapply( attr( exprs, "srcref" ), as.integer ) )
+ for( j in 1:length( exprs ) ){
+ src <- as.character( attr( exprs, "srcref" )[[j]] )
+ matchingLines <- grep( sprintf(rx2, param), src )
+ if( length( matchingLines ) ){
+ return( matchingLines + as.integer( srcref[j,1] ) - 1 )
+ }
+ }
+ }
+
+ find.parameter_changed_by_assignment <- function( txt, p, i ){
+ finding( txt, p, i,
+ rx = "^.*: parameter .(.*). changed by assignment\\\n",
+ rx2 = "[^.a-zA-Z0-9_]*%s[[:space:]]*(=|<-|<<-)" )
+ }
+
+ find.local_assigned_but_not_used <- function( txt, p, i ){
+ finding( txt, p, i,
+ rx = "^.*: local variable .(.*). assigned but may not be used\\\n",
+ rx2 = "^[^.a-zA-Z0-9_(,]*%s[[:space:]]*(=|<-|<<-)" )
+ }
+
+ find.no_global_def <- function( txt, p, i ){
+ finding( txt, p, i,
+ rx = "^.*: no visible global function definition for .(.*).\\\n",
+ rx2 = "[^.a-zA-Z0-9_]*%s[[:space:]]*\\(" )
+ }
+
+ find.no_local_def_as_function <- function( txt, p, i ){
+ finding( txt, p, i,
+ rx = "^.*: local variable .(.*). used as function with no apparent local function definition\\\n",
+ rx2 = "[^.a-zA-Z0-9_]*%s[[:space:]]*\\(" )
+ }
+
+ find.multiple_local_def <- function( txt, p, i ){
+ finding( txt, p, i,
+ rx = "^.*: multiple local function definitions for .(.*). with different formal arguments\\\n",
+ rx2 = "[^.a-zA-Z0-9_]*%s[[:space:]]*(=|<-|<<-)[[:space:]]*function" )
+ }
+
+
+ searchAndReport <- function( regex, fun ){
+ if( length( test.match <- grep( regex, findings ) ) ){
+ for( j in test.match ){
+ out <- fun( findings[j], p.out, i)
+ if( length( out) ){
+ ..addError( out, findings[j] )
+ }
+ }
+ }
+ }
+
+
+ for( i in 1:length(p.out) ){
+ if( looksLikeAFunction( p.out[[i]] ) ){
+ env <- new.env()
+ eval( p.out[[i]], envir = env )
+ fname <- ls( env )
+ if( length(fname) == 1){
+ findings <- NULL
+ codetools:::checkUsage( env[[fname]], all = TRUE, report = report, name = "" )
+ if( length(findings) ){
+
+ searchAndReport( "changed by assignment" , find.local_assigned_but_not_used )
+ searchAndReport( "assigned but may not be used" , find.no_global_def )
+ searchAndReport( "no visible global function definition", find.no_local_def_as_function )
+ searchAndReport( "no apparent local function definition", find.no_local_def_as_function )
+ searchAndReport( "multiple local function definitions" , find.multiple_local_def )
+
+ # TODO :this needs to be improved to deal with nested functions
+ if( length( test.assign <- grep( "’ may not be used", findings ) ) ){
+ for( j in test.assign ){
+ ..addError( attr(p.out, "srcref")[[i]][1] , findings[j] )
+ }
+ }
+
+ }
+ }
+ }
+ }
+ getErrors( file = file )
+
+}
+
Added: pkg/svTools/R/check_description.R
===================================================================
--- pkg/svTools/R/check_description.R (rev 0)
+++ pkg/svTools/R/check_description.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,121 @@
+
+check_description <- function( descfile, txt = readLines( descfile ) ){
+
+ txt <- txt %/~% "\\\n"
+ resetErrors( file = descfile )
+ ..addError <- function( file = descfile, line, message, type = "error" ) {
+ addError(file=file,line=line, message = message, type = type )
+ }
+
+ ### check mandatory fields
+ for( mandatory in c("Package", "Version", "License", "Description", "Title", "Author", "Maintainer" )){
+ if( !any( txt %~% sprintf("^%s", mandatory) ) ){
+ ..addError( line = 1, message = sprintf("field `%s` is mandatory", mandatory ) )
+ }
+ }
+
+ ### check the fields
+ fields <- txt %~|% "^[^:]+:" %-~% "[[:space:]]*:.*$"
+ if( ! all( test <- fields %in% descriptionFields[,1] ) ){
+ wrongFields <- fields[!test]
+ lapply( wrongFields, function(x){
+ rx.out <- regexpr( sprintf("^%s *:", x), txt )
+ line <- which( rx.out != -1 )
+ ..addError( line = line, message = sprintf("Wrong field : `%s`", x ) )
+ })
+ }
+
+ ### check the package name
+ package <- grep("^Package[[:space:]]*:", txt )
+ if( length(package )) {
+ packageName <- txt[package] %-~% "(^[^:]*:| )"
+ if( packageName %!~% "^[a-zA-Z][\\.0-9a-zA-Z]*$" ){
+ ..addError( line = package, message = "wrong package name")
+ }
+ }
+
+ ### check the version
+ version <- grep("^Version:", txt )
+ if( length(version) ){
+ versionNumber <- txt[version] %-~% "(^[^:]*:| )"
+ # TODO: handle translation packages
+ if( versionNumber %~% "[^0-9\\.-]" ){
+ ..addError( line = version, message = "Wrong format for the version number", type = "warning" )
+ }
+ nfields <- length(versionNumber %/~% "[-\\.]" )
+ if( nfields < 2){
+ ..addError( line = version, message = "Wrong version number, need at least two fields" , type = "warning")
+ }
+ if( nfields > 3 ){
+ ..addError( line = version, message = "Wrong version number, too many fields", type = "warning" )
+ }
+ }
+
+ ### check maintainer
+ maintainer <- grep( "^Maintainer:", txt )
+ if(length(maintainer)){
+ maintainerLine <- txt[ maintainer ] %-~% "^[^:]: *"
+ if( maintainerLine %~% "[\\.,]$" ){
+ ..addError( line = maintainer, message = "the maintainer field should not end with a period or commas" )
+ }
+ if( length(maintainerLine %/~% "@") != 2 ){
+ ..addError( line = maintainer,
+ message = "only one email adress in Maintainer field" )
+ }
+ email <- maintainerLine %-~% "(^[^<]*<|>[^>]*$)"
+ if( email %!~% "[^@]+@[^@]+" | email %~% "[[:space:]]" ){
+ ..addError( line = maintainer,
+ message = paste("wrong email adress: '", email, "'", sep = "" ) )
+ }
+ }
+
+ ### check date
+ date <- grep("^Date", txt )
+ if(length(date)){
+ dateLine <- txt[date] %-~% "(^[^:]*:| )"
+ if( dateLine %!~% "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}$" ){
+ ..addError( line = date, message = "the date should be in format yyyy-mm-dd" )
+ }
+ }
+
+ ### check the dependencies
+ # FIXME : all the stuff below comes from tools, I need to figure out what to do with it
+ db <- tools:::.read_description(descfile)
+ depends <- tools:::.get_requires_from_package_db(db, "Depends")
+ imports <- tools:::.get_requires_from_package_db(db, "Imports")
+ suggests <- tools:::.get_requires_from_package_db(db, "Suggests")
+ standard_package_names <- tools:::.get_standard_package_names()
+ bad_depends <- list()
+ reqs <- unique(c(depends, imports, if (!identical(as.logical(Sys.getenv("_R_CHECK_FORCE_SUGGESTS_")),
+ FALSE)) suggests))
+ installed <- character(0)
+ for (lib in .libPaths()) {
+ pkgs <- list.files(lib)
+ pkgs <- pkgs[file.access(file.path(lib, pkgs, "DESCRIPTION"),
+ 4) == 0]
+ installed <- c(pkgs, installed)
+ }
+ installed <- sub("_.*", "", installed)
+ reqs <- reqs %without% installed
+ m <- reqs %in% standard_package_names$stubs
+ if (length(reqs[!m]))
+ bad_depends$required_but_not_installed <- reqs[!m]
+ if (length(reqs[m]))
+ bad_depends$required_but_stub <- reqs[m]
+
+ if( length(bad <- bad_depends$required_but_not_installed) ) {
+ ..addError( line = grep("^(Depends|Suggests|Enhances)", txt),
+ message = paste("package `",bad,"` required but not installed", sep = "") )
+ }
+
+ if( length(bad <- bad_depends$required_but_stub) ) {
+ ..addError( line = grep("^(Depends|Suggests|Enhances)", txt),
+ message = paste("package `",bad,"` required but stub", sep = "") )
+ }
+
+ invisible( getErrors( file = descfile ) )
+
+}
+
+
+
Added: pkg/svTools/R/check_namespace.R
===================================================================
--- pkg/svTools/R/check_namespace.R (rev 0)
+++ pkg/svTools/R/check_namespace.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,161 @@
+
+namespaceDirectives <- c("export",
+ "exportPattern", "exportClass", "exportClasses",
+ "exportMethods", "import", "importFrom",
+ "importClassFrom", "importClassesFrom",
+ "importMethodsFrom", "useDynLib", "S3method", "if" )
+
+namespaceParser <- function( NAMESPACE, checkPackages = TRUE ){
+ resetErrors( file = NAMESPACE )
+ if( checkPackages ) allpacks <- .packages( all.available = TRUE )
+ ### look for the 'object is not subsettable' error
+ test <- try( tools:::.check_namespace(
+ dirname( tools:::file_path_as_absolute( NAMESPACE ) ) ),
+ silent = TRUE )
+ if( test %of% "try-error" ){
+ if( test %~% "object is not subsettable" ){
+ lengths <- sapply( p, length )
+ if( any( lengths == 1 ) ){
+ line <- attr( p, "srcref" )[[ which( lengths == 1 )[1] ]] [1]
+ addError(file=NAMESPACE, line=line, message = "object is not subsettable" )
+ }
+ } else {
+ addError( parseError( test ) )
+ }
+ }
+
+ ### look for unexpected namespace directives
+ p <- suppressWarnings( parse( NAMESPACE ) )
+
+ directives <- sapply( p, function(x) as.character(x[[1]]) )
+ if( any( test <- ! directives %in% namespaceDirectives ) ){
+ problemLine <- sapply( attr( p, "srcref" )[ test ], function(x) as.integer(x[1] ) )
+ addError(file=NAMESPACE, line=problemLine,
+ message = paste( "`", directives[test] , "` : Wrong NAMESPACE directive", sep = ""),
+ type = "warning" )
+ }
+
+
+ nS3 <- 0
+
+ ### parse the directives and look for the unexpected
+ parseDirective <- function(e, srcref, p, i) {
+ asChar <- function(cc) {
+ r <- as.character(cc)
+ if (any(r == "")){
+ addError( file = NAMESPACE, type = "error",
+ message = gettextf("empty name in directive '%s' in NAMESPACE file", as.character(e[[1]]) ),
+ line = srcref[1] )
+ }
+ r
+ }
+ switch(as.character(e[[1]]),
+ "if" = if (eval(e[[2]], .GlobalEnv)) parseDirective(e[[3]], srcref) else if (length(e) == 4) parseDirective(e[[4]], srcref),
+ "{" = for (ee in as.list(e[-1])) parseDirective(ee, srcref),
+ "=", "<-" = {
+ parseDirective(e[[3]], srcref)
+ if (as.character(e[[3]][[1]]) == "useDynLib")
+ names(dynlibs)[length(dynlibs)] <<- asChar(e[[2]])
+ }, export = {
+ exp <- e[-1]
+ exp <- structure(asChar(exp), names = names(exp))
+ if( !length( exp ) ){
+ addError( file = NAMESPACE, line = srcref[1],
+ message = "empty export", type = "warning" )
+ }
+ # TODO: check that the object exists
+ }, exportPattern = {
+ pat <- asChar(e[-1])
+ if( !length( pat ) ){
+ addError( file = NAMESPACE, line = srcref[1],
+ message = "empty pattern", type = "warning" )
+ }
+ if( asChar( attr( p, "srcref") [[i]]) %~% "[^\\\\]\\\\[^\\\\]" ){
+ addError( file= NAMESPACE, line = srcref[1],
+ message = "wrong pattern, need to double escape",
+ type = "warning" )
+ }
+ # TODO: try to match the regex against object names
+ # and warn if there is no match
+ }, exportClass = , exportClasses = {
+ # TODO: check that the class is defined
+ }, exportMethods = {
+ # TODO: check that the methods are defined
+ }, import = {
+ packages <- asChar(e[-1])
+ if( !length( packages) ){
+ addError( file = NAMESPACE, line = srcref[1],
+ message = "empty import directive", type = "warning" )
+ }
+ test <- packages %in% allpacks
+ if( any(!test) ){
+ addError( line = srcref[1], file = NAMESPACE, type = "error",
+ message = sprintf( "package `%s` is set to be imported but is not available", packages[!test] ) )
+ }
+ },
+ importFrom = {
+ imp <- asChar( e[-1] )
+ if( length( imp) < 2 ){
+ addError( file = NAMESPACE, line = srcref[1],
+ message = "Not enough information in importFrom directive",
+ type = "error" )
+ } else{
+ if( ! require( imp[1], character.only = TRUE ) ){
+ addError( line = srcref[1], file = NAMESPACE, type = "error",
+ message = sprintf( "package `%s` is set to be imported but is not available", imp[1] ) )
+ } else if( any( test <- !imp[-1] %in% ls( sprintf("package:%s", imp[1]) ) ) ){
+ addError( line = srcref[1], file = NAMESPACE, type = "error",
+ message = sprintf("object `%s` not exported from %s", imp[-1][test], imp[1]) )
+ }
+ # TODO: check if the variables are exported from the package
+ }
+
+ }, importClassFrom = , importClassesFrom = {
+ imp <- asChar( e[-1] )
+ if( length( imp) < 2 ){
+ addError( file = NAMESPACE, line = srcref[1],
+ message = "Not enough information in importFrom directive",
+ type = "error" )
+ } else{
+ if( ! require( imp[1], character.only = TRUE ) ){
+ addError( line = srcref[1], file = NAMESPACE, type = "error",
+ message = sprintf( "package `%s` is set to be imported but is not available", imp[1] ) )
+
+ }
+ # TODO: check if the classes are exported from the package
+ }
+ }, importMethodsFrom = {
+ imp <- asChar( e[-1] )
+ if( length( imp) < 2 ){
+ addError( file = NAMESPACE, line = srcref[1],
+ message = "Not enough information in importFrom directive",
+ type = "error" )
+ } else{
+ if( ! require( imp[1], character.only = TRUE ) ){
+ addError( line = srcref[1], file = NAMESPACE, type = "error",
+ message = sprintf( "package `%s` is set to be imported but is not available", imp[1] ) )
+
+ }
+ # TODO: check if the methods are exported from the package
+ }
+ }, useDynLib = {
+ # TODO: do something about it
+ }, S3method = {
+ spec <- e[-1]
+ if (length(spec) != 2 && length(spec) != 3)
+ addError( message = gettextf("bad 'S3method' directive: %s", deparse(e)),
+ file = NAMESPACE, line = srcref[1], type = "error" )
+ nS3 <<- nS3 + 1
+ if (nS3 > 500)
+ addError( message= "too many 'S3method' directives",
+ file = NAMESPACE, line = srcref[1], type = "error" )
+ } )
+ }
+ for (i in 1:length(p) ) {
+ srcref <- attr( p, "srcref" )
+ parseDirective( p[[i]], as.integer( srcref[[i]] ), p, i )
+ }
+ invisible( getErrors( file = NAMESPACE ) )
+
+}
+
Added: pkg/svTools/R/complete_description.R
===================================================================
--- pkg/svTools/R/complete_description.R (rev 0)
+++ pkg/svTools/R/complete_description.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,94 @@
+
+completeDescription <- function( file, row, col, text = readLines(file),
+ author = getOption( "svTools.description.author" ) ){
+
+ if( missing(text) ){
+ n <- if( missing(row) ) -1 else row
+ rl <- readLines( file, n = n )
+ row <- length(rl)
+ if(missing(col)) col <- nchar(rl[row])
+ } else{
+ rl <- text %/~% "\\\n"
+ row <- length(rl )
+ col <- nchar( rl[row] )
+ }
+ lastLine <- rl[ row ] <- substring( rl[row], 1, col )
+
+ if( lastLine %~% "^( +|[^:]+:)" ){
+ # extract the last field
+ lastField <- tail( which( rl %~% "^[^:]+:" ), 1 )
+ field <- rl[ lastField ] %-~% "(:.*$|[[:space:]]+)"
+
+ ### complete package names
+ if( field %in% c("Depends","Suggests", "Enhances", "Imports") ){
+ start <- lastLine %-~% ".*[,[:space:]]"
+ packages <- installedPackages( pattern = start )[, c("Package", "Title"), drop = FALSE ]
+ return( list( data = packages, token = start, ok = 1, type = "package" ) )
+ }
+
+ ### use the "svTools.description.author" option to complete
+ if( field %in% c("Author", "Maintainer" ) ){
+ if( !is.null( author) ){
+ return( list( ok = 1, data = cbind( author, "" ), token = lastLine %-~% ".*: *", type = "other" ) )
+ } else return( list( ok = 0 ) )
+ }
+
+ ### possible licenses
+ if( field == "License"){
+ possibleLicenses <- rbind(
+ c( "GPL-2", 'The "GNU General Public License" version 2 ' ),
+ c( "GPL-3", 'The "GNU General Public License" version 3 ' ),
+ c( "LGPL-2", 'The "GNU Library General Public License" version 2 ' ),
+ c( "LGPL-2.1", 'The "GNU Lesser General Public License" version 2.1 ' ),
+ c( "LGPL-3", 'The "GNU Lesser General Public License" version 3 ' ),
+ c( "AGPL-3", 'The "GNU Affero General Public License" version 3 ' ),
+ c( "Artistic-1.0", 'The "Artistic License" version 1.0 ' ),
+ c( "Artistic-2.0", 'The "Artistic License" version 2.0 ' ) )
+ return( list( ok = 1, data = possibleLicenses, token = lastLine %-~% ".*: *", type = "other" ) )
+ }
+
+ ### propose today's date
+ if( field == "Date"){
+ data <- cbind( format( Sys.time( ) , "%Y-%m-%d" ), "Today" )
+ return( list( ok = 1, data = data, token = lastLine %-~% ".*: *" , type = "other" ) )
+ }
+
+ if( field %in% c("LazyLoad", "LazyData", "ZipData") ){
+ data <- rbind( c("yes", ""), c("no", "" ) )
+ return( list( ok = 1, data = data, token = lastLine %-~% ".*: *" , type = "other" ) )
+ }
+
+ if( field == "Encoding" ){
+ data <- rbind(
+ c("latin1" , "" ),
+ c("latin2" , "" ),
+ c("UTF-8" , "" ) )
+ return( list( ok = 1, data = data, token = lastLine %-~% ".*: *" , type = "other" ) )
+ }
+
+ if( field == "Type" ){
+ data <- rbind(
+ c("Package" , "Usual package" ),
+ c("Translation" , "Translation package" ),
+ c("Frontend" , "Frontend package" ) )
+ return( list( ok = 1, data = data, token = lastLine %-~% ".*: *" , type = "other" ) )
+ }
+
+ ### give up
+ return( list( ok = 0 ) )
+
+ } else{
+ if( lastLine %~% "[^[:alpha:]]" ){
+ return( list( ok = 0 ) )
+ } else{
+ keep <- descriptionFields[,1] %~% lastLine | descriptionFields[,3] %~% lastLine
+ data <- as.matrix( descriptionFields[ keep, c(1, 3), drop = FALSE ] )
+
+ data[,1] <- paste( data[,1], ": ", sep = "")
+ return( list( data = data, ok = 1, token = lastLine, type = "fields" ) )
+ }
+ }
+
+
+}
+
Added: pkg/svTools/R/complete_namespace.R
===================================================================
--- pkg/svTools/R/complete_namespace.R (rev 0)
+++ pkg/svTools/R/complete_namespace.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,25 @@
+
+namespaceComplete <- function( line ){
+
+ ### export
+ if( line %~% "^[[:space:]]*export[[:space:]]*\\(" ){
+ ex <- line %-~% ".*[(,][[:space:]]*"
+ # TODO: parse the source files for functions
+ }
+
+ ### import
+ if( line %~% "^[[:space:]]*import[[:space:]]*\\(" ){
+ im <- line %-~% ".*[(,][[:space:]]*"
+ allpacks <- installedPackages( pattern = im )[,c("Package","Title")]
+ return( list( data = allpacks, type = "package" ) )
+ }
+
+ ### importFrom
+ if( line %~% "^[[:space:]]*importFrom[[:space:]]*\\([^,]*$" ){
+ im <- line %-~% ".*[(][[:space:]]*"
+ allpacks <- installedPackages( pattern = im )[,c("Package","Title")]
+ return( list( data = allpacks, type = "package" ) )
+ }
+
+}
+
Added: pkg/svTools/R/completion.R
===================================================================
--- pkg/svTools/R/completion.R (rev 0)
+++ pkg/svTools/R/completion.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,81 @@
+
+CompletePlusWrap <- function( ... ){
+ out <- CompletePlus( ..., minlength = 1 )
+ types <- rep( "function" , nrow(out ) )
+ completions <- out[,1]
+ types[ completions %~% "= *$" ] <- "argument"
+ types[ completions %~% ":: *$" ] <- "package"
+ # arguments first, then functions, then packages
+ out <- cbind( out, types ) [ order(types),, drop = FALSE ]
+
+ token <- utils:::.guessTokenFromLine( )
+ fun <- utils:::inFunction()
+ if(length(fun) && !is.na(fun)){
+ tooltip <- CallTip( fun )
+ } else {
+ tooltip <- NULL
+ fun <- ""
+ }
+
+ list( data = out, token = token,
+ fun = fun, tooltip = tooltip )
+}
+
+pchComplete <- function( line ){
+ allPch <- 1:25
+ if( line %~% "pch *= *[^,)]*$" ){
+ start <- line %-~% "^.*= *"
+ if( start %!~% "^ *[0-9]+ *$" ){
+ completions <- allPch
+ } else{
+ int <- try(as.integer(start), silent = TRUE)
+ if( int %of% "try-error" ) completions <- allPch
+ out <- as.integer( grep( start, allPch, value = TRUE ) )
+ completions <- if( length(out ) ) out else allPch
+ }
+ return( list( completions = completions, token = start ) )
+ }
+}
+
+colComplete <- function( line ){
+
+ token <- sub( "^.*=[[:space:]]*", "", line )
+ start <- token %-~% "[[:space:]]+"
+
+ if( start %~% "['\"]" ){
+ ### look at named colors
+ start <- start %-~% "['\"]"
+
+ cols <- ( allColors <- colors() ) %~|% start
+ if( !length( cols ) ) cols <- allColors
+ rgb <- t( col2rgb( cols ) )
+ cols <- paste( '"', cols, '"', sep = "" )
+ } else {
+ ### look at colors in the palette
+ pal <- palette()
+ if( nchar(start) ) {
+ cols <- 1:length(pal) %~|% start
+ if( !length( cols ) ){
+ cols <- 1:length(pal)
+ }
+ } else cols <- 1:length(pal)
+ rgb <- t( col2rgb( pal[cols] ) )
+ }
+ list( token = token, names = as.character(cols), rgb = rgb )
+
+}
+
+ltyComplete <- function( line ){
+
+ ltys <- c("blank", "dashed", "solid", "dotted", "longdash", "twodash" )
+
+ token <- line %-~% "^.*=[[:space:]]*"
+ start <- token %-~% "([[:space:]]+|'|\")"
+
+ matches <- ltys %~|% start
+ if( !length( matches ) ) matches <- ltys
+
+ list( lty = matches , token = token )
+
+}
+
Added: pkg/svTools/R/errorlist.R
===================================================================
--- pkg/svTools/R/errorlist.R (rev 0)
+++ pkg/svTools/R/errorlist.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,72 @@
+#' Reset the errors
+#'
+#' Removes errors concerning a file or a set of files from the list of errors
+#' @export
+#' @param file file for which errors should be removed
+#' @param pattern Files matching this regular expression will be removed from the error list.
+#' The macthing is performed on the \code{\link{basename}} of the file
+#' @return Nothing, only used for its side effect
+#' @author Romain Francois \email{francoisromain@@free.fr}
+resetErrors <- function( file = NULL, pattern = NULL ){
+ if( !is.null(file) ){
+ svTools.env$.errors <- svTools.env$.errors[ ! svTools.env$.errors$file %in% sapply( file, tools:::file_path_as_absolute ) , , drop = FALSE]
+ } else if( !is.null(pattern) && any( basename( svTools.env$.errors$file ) %~% pattern) ){
+ svTools.env$.errors <- svTools.env$.errors[ svTools.env$.errors$file %!~% pattern , , drop = FALSE]
+ } else{
+ svTools.env$.errors <- emptyError( )
+ }
+ invisible( NULL )
+}
+
+#' Add an error to the list of errors
+#'
+#' Adds an error to the list of errors
+#' @export
+#' @param file file in which the error is observed.
+#' (this argument can also be a structured error, see \code{\link{parseError}} in which case all other arguments are ignored )
+#' @param line line of the file n which the error happens
+#' @param message error message
+#' @param type type of error (typically error or warning)
+#' @return Nothing. Only used for side effects
+#' @author Romain Francois \email{francoisromain@@free.fr}
+addError <- function( file, line=1, message="", type = "error" ){
+ d <- if( file %of% "data.frame" && all( c("file", "line", "message", "type") %in% colnames( file) ) ){
+ file
+ } else{
+ data.frame( file = tools:::file_path_as_absolute(file), line = line,
+ message = message, type = type, stringsAsFactors = FALSE)
+ }
+ svTools.env$.errors <- rbind( svTools.env$.errors, d )
+}
+
+#' retrieve errors stored in the error list
+#'
+#' Facility to retrieve the errors stored in the error list
+#' @export
+#' @param file if specified, only errors about this file will be returned
+#' @param pattern if specified, only errors of files matching the pattern will be returned
+#' @return A data frame with columns file, line, message and type
+#' @author Romain Francois \email{francoisromain@@free.fr}
+getErrors <- function( file = NULL , pattern = NULL){
+ out <- svTools.env$.errors
+ if( ! is.null( pattern) && any( out$file %~% pattern) ) {
+ out <- out[ basename( out$file ) %~% pattern, ]
+ }
+ if( ! is.null(file) ){
+ out <- out[ out$file == tools:::file_path_as_absolute(file) , ]
+ }
+ if( nrow( out ) ) out[ order( out$file, out$line) , ] else emptyError()
+}
+
+#' Creates an empty structured error
+#'
+#' Creates an empty structured error, a data frame with all the columns needed for the
+#' structured errors, but with no lines
+#' @export
+#' @return A data frame with columns file, message, type and line
+#' @author Romain Francois \email{francoisromain@@free.fr}
+emptyError <- function(){
+ data.frame( file = character(0), line = integer(0), message = character(0),
+ type = character(0), stringsAsFactors = FALSE)
+}
+
Added: pkg/svTools/R/packages.R
===================================================================
--- pkg/svTools/R/packages.R (rev 0)
+++ pkg/svTools/R/packages.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,74 @@
+
+loadedPackages <- function(){
+ s <- grep( "^package:", search(), value = TRUE )
+ sub("^package:", "", s )
+}
+
+installedPackages <- function( pattern = NULL){
+ ip <- installed.packages( fields = "Title" )
+ if( !is.null(pattern) ){
+ keep <- suppressWarnings( union(
+ grep( pattern , ip [,"Package"], ignore.case = TRUE ),
+ grep( pattern , ip [,"Title"], ignore.case = TRUE ) ) )
+ ip <- ip[ keep, , drop = FALSE]
+ }
+ lp <- loadedPackages()
+ def <- c( getOption("defaultPackages"), "base")
+ ip <- cbind( ip,
+ "Loaded" = ifelse( ip[,'Package'] %in% lp , 1, 0 ),
+ "Default" = ifelse( ip[,'Package'] %in% def, 1, 0 )
+ )
+ ip
+}
+
+
+packdesc <- function (pkg, lib.loc = NULL, fields = NULL, drop = TRUE, encoding = "")
+{
+ retval <- list()
+ if (!is.null(fields)) {
+ fields <- as.character(fields)
+ retval[fields] <- NA
+ }
+ pkgpath <- ""
+ if (is.null(lib.loc)) {
+ if (pkg == "base")
+ pkgpath <- file.path(.Library, "base")
+ else if ((envname <- paste("package:", pkg, sep = "")) %in%
+ search()) {
+ pkgpath <- attr(as.environment(envname), "path")
+ if (is.null(pkgpath))
+ pkgpath <- ""
+ }
+ }
+ if (pkgpath == "") {
+ libs <- if (is.null(lib.loc))
+ .libPaths()
+ else lib.loc
+ for (lib in libs) if (file.access(file.path(lib, pkg),
+ 5) == 0) {
+ pkgpath <- file.path(lib, pkg)
+ break
+ }
+ }
+ if (pkgpath == "") {
+ pkgpath <- system.file(package = pkg, lib.loc = lib.loc)
+ if (pkgpath == "") {
+ warning(gettextf("no package '%s' was found", pkg),
+ domain = NA)
+ return(NA)
+ }
+ }
+ file <- file.path(pkgpath, "DESCRIPTION")
+ readLines(file)
+}
+
+packwebdesc <- function(pack, repos, width = 60){
+ temp <- tempfile(); on.exit(unlink(temp))
+ txt <- suppressWarnings( try({
+ download.file( sprintf("%s/Descriptions/%s.DESCRIPTION",repos,pack), destfile=temp, quiet = TRUE)
+ readLines(temp)
+ }, silent = TRUE) )
+ if( inherits(txt, "try-error") ) txt <- ""
+ txt
+}
+
Added: pkg/svTools/R/parseError.R
===================================================================
--- pkg/svTools/R/parseError.R (rev 0)
+++ pkg/svTools/R/parseError.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,24 @@
+#' Parses the content of an error generated by parse
+#'
+#' Makes sense of a \code{parse} error and structure the information
+#' @export
+#' @param err An error as generated by \code{parse}
+#' @param only.last Should only the last line of the error be outputed into the structure
+#' @return A data.frame with the following columns : file, line, message and type
+#' @author Romain Francois \email{francoisromain@@free.fr}
+parseError <- function( err, only.last = TRUE ){
+ msg <- strsplit( err, "\\\n" )[[1]]
+ line.nb <- as.integer(msg %~|% "^[[:digit:]]" %!~|% "^[[:digit:]]+:[[:space:]]*$" %-~|% ":.*" )
+ if( only.last ){
+ line.nb <- tail( line.nb, 1 )
+ }
+ msg.line <- paste(msg %!~|% "^[[:digit:]]+", collapse=" " ) %/~% " *: *"
+ file <- msg.line[2]
+ err.msg <- msg.line[3]
+ data.frame( file = rep(file, length(line.nb)),
+ line = line.nb,
+ message = rep(err.msg, length(line.nb) ),
+ type = rep( "error", length(line.nb) ),
+ stringsAsFactors = FALSE )
+}
+
Added: pkg/svTools/R/rdparse.R
===================================================================
--- pkg/svTools/R/rdparse.R (rev 0)
+++ pkg/svTools/R/rdparse.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,18 @@
+
+rdparse <- function( file ){
+ rdfile <- readLines( file )
+ index <- cumsum( rdfile %~% "^\\\\[[:alpha:]]" )
+
+ chunks <- lapply( unique( index ) , function(i) rdfile[index==i] )
+ names. <- sapply( chunks, function(x) x[1] %-~% "(^\\\\|{.*$)" )
+
+ cs <- cumsum( c(0, nchar( rdfile ) ) )
+ offset.start <- cs[ sapply( 1:max(index), function(i) which(index==i)[1] ) ]
+ offset.end <- cs[ 1 + sapply( 1:max(index), function(i) tail( which(index==i), 1 ) ) ]
+
+ list( offset.start = offset.start,
+ offset.end = offset.end,
+ chunks = chunks, names= names. )
+}
+
+
Added: pkg/svTools/R/roxygen.R
===================================================================
--- pkg/svTools/R/roxygen.R (rev 0)
+++ pkg/svTools/R/roxygen.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,197 @@
+
+roxygenComplete <- function( line = "#'" ){
+
+ roxygenTags <- rbind(
+ c( "author", "\\author" , "Author of the function" ) ,
+ c( "aliases", "\\alias, ..." , "" ) ,
+ c( "concept", "\\concept" , "" ) ,
+ c( "examples", "\\examples" , "" ) ,
+ c( "keywords", "\\keyword, ..." , "" ) ,
+ c( "method", "\\method" , "" ) ,
+ c( "name", "\\name" , "" ) ,
+ c( "note", "\\note" , "" ) ,
+ c( "param", "\\arguments{\\item, ...}" , "" ) ,
+ c( "references", "\\references" , "" ) ,
+ c( "return", "\\value" , "" ) ,
+ c( "seealso", "\\seealso" , "" ) ,
+ c( "title", "\\title" , "" ) ,
+ c( "TODO", "" , "" ) ,
+ c( "usage", "\\usage" , "" ) ,
+ c( "callGraph", "" , "Create a call graph of the default depth, excluding primitive functions" ) ,
+ c( "callGraphPrimitives", "" , "Create a call graph of the default depth, including primitive functions" ) ,
+ c( "callGraphDepth", "" , "Change the depth of the callgraph from the default of 2" ) ,
+ c( "include", "" , "See ?make.collate.roclet" ) ,
+ c( "export", "export" , "" ) ,
+ c( "exportClass", "exportClass" , "" ) ,
+ c( "exportMethod", "exportMethod" , "" ) ,
+ c( "exportPattern", "exportPattern" , "" ) ,
+ c( "S3method", "S3method" , "" ) ,
+ c( "import", "import" , "" ) ,
+ c( "importFrom", "importFrom" , "" ) ,
+ c( "importClassesFrom", "importClassesFrom" , "" ) ,
+ c( "importMethodsFrom", "importMethodsFrom" , "" ) )
+
+ tag <- line %-~% "^#' *@"
+ matchingKeywords <- unique( c(
+ grep( tag, roxygenTags[,1], ignore.case = TRUE ),
+ grep( tag, roxygenTags[,3], ignore.case = TRUE ) ) )
+ completions <- if( !length(matchingKeywords) ) roxygenTags else roxygenTags[ matchingKeywords, , drop = FALSE ]
+ token <- tag
+
+ list( completions = sprintf( "%s ", completions[,1]), token = token, title = completions[,3] )
+
+}
+
+roxygenParamComplete <- function( file, row, line="#' @param " ){
+ potential <- paste( argsfunafter( file, row, allArguments = FALSE ), " ", sep = "" )
+
+ line <- line %-~% "^#' *@param"
+ if( line %~% "^ +$" ){
+ return( list( completions = potential, token = "" ) )
+ }
+
+ start <- line %-~% "^[[:space:]]+"
+ if( start %~% "[[:space:]]+" ) {
+ return( list( completions = character(0), token = "" ) )
+ }
+
+ completions <- grep( start, potential, value = TRUE )
+ if( length( completions ) ){
+ return( list( completions = completions, token = start ) )
+ } else{
+ return( list( completions = character(0), token = "" ) )
+ }
+
+
+}
+
+
+#' Get arguments of the function defined after a given point
+#' @param file
+#' @param row
+argsfunafter <- function( file, row, allArguments = FALSE, p.out = parse(file), target = NULL ){
+ positions <- sapply( attr( p.out, "srcref" ), function(x) as.numeric(x)[1] )
+ target <- if( !is.null(target) ) target else which( positions >= row )[1]
+ funstart <- positions[target]
+
+ ### find the arguments that have already been documented
+ definedPars <- try( if( !allArguments && funstart > 1){
+ rl <- readLines( file, n = funstart - 1)
+ test <- rl %~% "#'"
+ definedPars <- if( tail( test, 1 ) ){
+ pos <- which( ! rev(test) )[1] - 1
+ last <- if( is.na(pos) ) funstart else funstart - pos
+ roxygenBlock <- rl[ last : (funstart-1) ]
+ roxygenBlock %-~|% "^.*@param[[:space:]]+" %-~% "[[:space:]].*$" %~|% "."
+ }
+ } , silent = TRUE )
+ if( definedPars %of% "try-error" ) definedPars <- NULL
+
+ ### find the arguments of the function
+ chunk <- p.out[[target]]
+ env <- new.env()
+ if( length(chunk) == 3 ){
+ if( chunk[[1]] == "<-" || chunk[[1]] == "=" ){
+ eval( chunk, env = env )
+ contents <- ls( env )
+ if( length( contents ) ){
+ object <- env[[ contents ]]
+ if( class( object ) == "function" ){
+ allpars <- names( formals( object) )
+ return( setdiff( allpars, definedPars ) )
+ }
+ }
+ }
+ }
+ invisible( NULL )
+}
+
+
+isAfter <- function( p.out, row, col){
+ rows <- sapply( attr( p.out, "srcref" ) , "[", 3 )
+ cols <- sapply( attr( p.out, "srcref" ) , "[", 4 )
+ row > rows | ( rows == row & col > cols )
+}
+
+isBefore <- function( p.out, row, col){
+ rows <- sapply( attr( p.out, "srcref" ) , "[", 1 )
+ cols <- sapply( attr( p.out, "srcref" ) , "[", 2 )
+ row < rows | ( rows == row & col < cols )
+}
+
+isInside <- function( p.out, row, col){
+ srcref <- attr( p.out, "srcref" )
+ startRows <- sapply( srcref , "[", 1 )
+ startCols <- sapply( srcref , "[", 2 )
+ endRows <- sapply( srcref , "[", 3 )
+ endCols <- sapply( srcref , "[", 4 )
+
+ ( ( row == startRows & col >= startCols ) | ( row >= startRows ) ) & ( ( row == endRows & col <= endCols ) | ( row <= endRows ) )
+
+}
+
+isFunction <- function( chunk ){
+ env <- new.env()
+ out <- try( {
+ eval( chunk, env = env )
+ name <- ls( env )
+ }, silent = TRUE )
+ test <- !( out %of% "try-error" ) && length(name) == 1 && env[[name]] %of% "function"
+ if( test ){
+ attr( test, "fun" ) <- name
+ }
+ test
+}
+
+
+
+generateRoxygenTemplate <- function( file, row, col,
+ author = getOption("svTools.roxygen.author"),
+ type = c("verbatim", "supperabbrev" ) ){
+
+ p.out <- parse( file )
+
+ where <- if( any(inside <- isInside(p.out, row, col)) ){
+ which( inside )
+ } else if( any( before <- isBefore(p.out, row, col) ) ) {
+ which( before ) [1]
+ } else length( p.out )
+
+ if( !( isfun <- isFunction(p.out[[where]]) ) ){
+ return( list( ok = 0 ) )
+ }
+ funname <- attr( isfun, "fun" )
+
+ startPos <- as.numeric( attr( p.out, "srcref" )[[where]][1:2] )
+
+ arguments <- argsfunafter( file = file, allArguments = T,
+ p.out = p.out, row = startPos[1], target = where )
+
+
+ template <- "#' ${1:Title (short) }\n#' \n#' ${2:Description (2-3 lines)}\n#' @export"
+ if( length(arguments) ){
+ template <- paste( template, paste( "#' @param ", arguments, " ${",2+1:length(arguments),": define `",arguments,"` }", sep = "", collapse = "\n" ), sep = "\n" )
+ }
+ index <- 2 + length( arguments ) + 1
+ template <- paste( template, paste("#' @return ${",index,": What does the function return}", sep = "" ), sep = "\n" )
+ index <- index + 1
+ if( !is.null(author) ){
+ author <- gsub( "([^@])@([^@])","\\1@@\\2", author )
+ template <- paste( template, paste( "#' @author ${",index,":", author, "}", sep = "" ), sep = "\n" )
+ }
+ index <- index + 1
+ template <- paste( template, paste( "#' @callGraph\n#' @examples\n#' ${",index,":# executable code for `",funname,"`}", sep = "" ), sep = "\n" )
+
+ type <- match.arg( type )
+
+ ### remove the supper abbrev stuff
+ if( type == "verbatim" ){
+ template <- gsub( "(?s)\\$\\{[[:digit:]]+: *([^}]+)\\}","\\1", template, perl = TRUE )
+ template <- paste( template, "\n" , sep = "")
+ }
+
+ list( template = template,
+ row = attr( p.out, "srcref" )[[where]][1],
+ ok = 1)
+}
+
Added: pkg/svTools/R/search_engines.R
===================================================================
--- pkg/svTools/R/search_engines.R (rev 0)
+++ pkg/svTools/R/search_engines.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,226 @@
+rsitesearch <- function( query ){
+
+ ### build the string to submit to the website
+ if(missing(query)) return(NULL)
+ paste0 <- function(...) paste(..., sep = "")
+ string <- paste0("http://search.r-project.org/cgi-bin/namazu.cgi?query=", gsub(" ", "+", query))
+ mpp <- "max=100"
+ restr <- "idxname=functions"
+ lang <- "lang=firefox"
+ qstring <- paste(string, mpp, lang, restr, sep = "&")
+
+ ### download the result of the query and read it into R
+ temp <- tempfile()
+ download.file( qstring, temp, quiet = TRUE )
+ result <- readLines( temp )
+ if( result %~+% "No document matching your query" ) return(NULL)
+
+ ### process the webpage to extract relevant information
+ result <- grep( "^http", result, value = TRUE )
+ result <- gsub( ".*/R/library/", "", result )
+ result <- strsplit( result, "(/html/| )")
+ result <- as.data.frame( do.call( rbind, result ) )
+ colnames( result ) <- c("package", "page", "score")
+ result$score <- as.numeric( result$score )
+ result$page <- gsub( "\\.html$", "", result$page )
+
+ ### structure the result by package
+ out <- by( result[,2:3], list( package = result$package), function(x){
+ list( data = x[ order(x$score, decreasing = TRUE) , ,drop = FALSE],
+ score = sum( x$score ) )
+ })
+ out <- out[ order( sapply(out, "[[", "score"), decreasing = TRUE ) ]
+
+ structure( out, class = "rsitesearch", call = NULL, query = query, link = qstring )
+}
+
+print.rsitesearch <- function( x, only.summary = FALSE, ...){
+ ### print header
+ cat( "r site search query for '", attr( x, "query" ), "'\n", sep = "", ... )
+ cat( attr(x, "link"), "\n\n", ... )
+ packages <- names(x)
+ scores <- sapply( x, "[[", "score")
+ for( i in seq(along = x)){
+ cat( sprintf("%s (%d)\n", packages[i], scores[i]) )
+ if(!only.summary) {
+ cat( paste( " |-- ",
+ sprintf( "%-30s %5d", x[[i]]$data$page, x[[i]]$data$score ) ,
+ sep = " ", collapse = "\n" ), ... )
+ cat("\n\n", ...)
+ }
+ }
+}
+
+head.rsitesearch <- function( x, n = 2, ...){
+ h <- head( unclass(x), n = n, ... )
+ structure( h, class = "rsitesearch", query = attr(x, "query"),
+ link = attr(x, "link" ) )
+}
+
+tail.rsitesearch <- function( x, n = 2, ...){
+ h <- tail( unclass(x), n = n, ... )
+ structure( h, class = "rsitesearch", query = attr(x, "query"),
+ link = attr(x, "link" ) )
+}
+
+`[.rsitesearch` <- function(x, i, ...){
+ structure( NextMethod("["), class = "rsitesearch",
+ query = attr(x, "query"), link = attr(x, "link") )
+}
+
+summary.rsitesearch <- function( object, ... ){
+ print( object, only.summary = TRUE, ... )
+}
+
+### search the gmane mailing list
+gmane <- function( query, groups = "*", prefix = "gmane.comp.lang.r" ) {
+
+ ### building the search url
+ url <- sprintf( "http://search.gmane.org/?query=%s&group=%s.%s&sort=relevance",
+ gsub(" +", "+", query), prefix, groups )
+
+ ### make the search
+ tmp <- tempfile()
+ on.exit( unlink(tmp) )
+ download.file( url, destfile=tmp)
+
+ ### read the html file, and deduce things
+ rl <- readLines( tmp )
+ firstLines <- rl %~|% "^<A HREF"
+ links <- gsub('^<A HREF="([^"]+)".*','\\1', firstLines )
+ # links <- gsub("//article\\.","//thread\\.",links)
+ links <- links %-~% "/match=.*$"
+ group <- gsub(
+ sprintf("http://article.gmane.org/%s.([^/]+)/.*", prefix ),
+ "\\1", links )
+ titles <- gsub( '.*>([^<]+)<.*', '\\1', firstLines )
+ relevance <- gsub( '.*\\((.*)%\\)$', '\\1', firstLines )
+
+ by( data.frame( links = links, titles = titles, relevance = relevance, stringsAsFactors=FALSE),
+ group, function(x) x )
+}
+
+### convert r news bib database into a data.frame
+.rnews <- function( query, file = "Rnews.bib" ){
+
+ biblines <- readLines( file )
+
+ ### read the pdf urls
+ strings <- biblines %~|% "@String\\{"
+ strings <- strings %-~% '(@String\\{|\\}|")'
+ x <- strings %/~% "[[:space:]]*=[[:space:]]*"
+ refs <- sapply( x, "[", 1 )
+ urls <- sapply( x, "[", 2 )
+ names(urls) <- refs
+
+ begin <- grep("^@Article", biblines )
+ end <- grep("^\\}", biblines )
+ articles <- mapply( function(x, y) {
+ lines <- biblines[x:y]
+ eqLines <- c( grep("=", lines ), length(lines)+1)
+ txt <- mapply( function(x1,x2){
+ paste( lines[x1:x2], collapse = " ")
+ } , head( eqLines,-1), tail( eqLines - 1,-1) )
+ txt <- txt %/~% "[[:space:]]*=[[:space:]]*"
+ fields <- sapply( txt, "[", 1) %-~% "[[:space:]]+"
+ content <- sapply( txt, "[", 2) %-~% "(^\\{|\\}?,$)"
+ content <- gsub(" +", " ",content)
+ names( content ) <- fields
+ content["url"] <- urls[ "http" ]
+ if( any( fields == "pdf") && content["pdf"] %in% refs ){
+ content["pdf"] <- urls[ content["pdf"] ]
+ }
+ content
+ } , begin+1, end-1 )
+ uNames <- unique( unlist( sapply( articles, names )) )
+ articles <- sapply( articles, function(x){
+ x[ uNames %wo% names(x) ] <- NA
+ x[ uNames ]
+ })
+
+ out <- as.data.frame( t(articles), stringsAsFactors=FALSE )
+ out$year <- as.numeric( out$year )
+ out$volume <- as.numeric( out$volume )
+ out$number <- as.numeric( out$number )
+ colnames( out ) <- uNames
+ out$issue <- paste( "Volume ", out$volume, "/", out$number, " (", out$month, " ",out$year,")", sep="" )
+ out
+}
+
+rnewssearch <- function( query, ... ){
+ matches <- agrep( query, rnews$title, ignore.case = TRUE,...)
+ subs <- rnews[ matches, ]
+ by( subs, subs$issue, function(x){
+ x$title
+ })
+}
+
+rwiki <- function( query ){
+ tmp <- tempfile()
+ on.exit( unlink(tmp) )
+
+ url <- sprintf( "http://wiki.r-project.org/rwiki/doku.php?do=search&id=%s",
+ gsub( " +", "+", query ) )
+ download.file( url, tmp )
+
+ results <- readLines( tmp ) %~|% "search_result"
+ results <- tail( results %/~% "search_result", -1 )
+
+ ids <- results %-~% '&.*'
+ ids <- ids %-~% '^.*id='
+
+ hits <- as.numeric( gsub('^.*class=\\"search_cnt\\">(.*) Hits</span>.*', '\\1', results ) )
+
+ snippets <- results %-~% '.*\\"search_snippet\\">'
+ snippets <- snippets %-~% '</?[^>]*>'
+ snippets <- snippets %-~% '\\[[^\\]*]\\]'
+ snippets <- snippets %-~% '<div.*'
+ snippets <- snippets %-~% 'hideLoadBar\\([^\\)]*\\)'
+
+ structure( list( id =ids, snippet = snippets, hit = hits, n = length(results) ),
+ class = "wikisearch")
+}
+
+print.wikisearch <- function(x, ...){
+ for( i in 1:x$n){
+ cat( x$id[i], " (", x$hit[i], " Hits)\n", sep = "", ... )
+ cat( strwrap(x$snippet[i], prefix = " ", width = 60 ), sep = "\n", ... )
+ cat( "\n", ... )
+ }
+}
+
+rgraphicalmanuals <- function(query){
+ url <- sprintf("http://cged.genes.nig.ac.jp/RGM2/index.php?query=%s&scope=all",
+ gsub(" +", "+", query) )
+
+ # TODO: follow
+ tmp <- tempfile()
+ on.exit( unlink(tmp) )
+ download.file( url, tmp )
+
+ results <- readLines( tmp )
+ results <- results %~|% '^<td><a href=".*/library/'
+ results <- results %-~% "^.*/library/"
+ results <- results %-~% '\\.html".*'
+
+ packages <- results %-~% "/.*"
+ pages <- results %-~% ".*/"
+ out <- by( pages, packages, function(x) as.character(unique(x)) )
+
+ structure( out, class = "graphicalmanuals" )
+}
+
+rgraphgallery <- function(query){
+ url <- sprintf("http://addictedtor.free.fr/graphiques/simplesearch.php?q=%s",
+ gsub(" +", "+", query ) )
+ tmp <- tempfile()
+ on.exit( unlink(tmp) )
+ download.file( url, tmp )
+
+ results <- readLines(tmp)
+ ids <- results %-~% ',.*'
+ titles <- results %-~% '^[0-9]*,'
+ structure( list(id = ids, title = titles, n = length(titles)),
+ class="graphgallery" )
+}
+
Added: pkg/svTools/R/sidekick.R
===================================================================
--- pkg/svTools/R/sidekick.R (rev 0)
+++ pkg/svTools/R/sidekick.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,134 @@
+#' Code sidekickParse
+#'
+#' Translate the tree representation generated by
+#' the R parser (parse) into a rectangular representation
+#' @export
+#' @param file File to parse
+#' @return A data frame with the following columns :
+#' - id : Identifier of the current node
+#' - srcref1, ..., srcref4 : identifies the position of the current node in the file
+#' - description : textual representation of the current node
+#' - parent : identifier of the parent node
+#' @author Romain Francois \email{francoisromain@@free.fr}
+#' @examples
+#' tf <- tempfile()
+#' cat( "jitter <- " , deparse( jitter ), sep = "\n", file = tf )
+#' sidekick( tf )
+#' unlink( tf )
+sidekick <- function( file ){
+ ### try to parse and return an error if failed
+ p <- try( parse( file ), silent = TRUE )
+ if( p %of% "try-error" ){
+ return( list( type = "error", data = parseError( p ) ) )
+ }
+
+ ### calls the actual sidekick function
+ sidekickParse( p )
+}
+
+
+sidekickParse <- function( p = try( parse(file), silent = TRUE) , top = TRUE, env = new.env(), parent = 0, file ){
+
+ if( top ) {
+
+ env[["data"]] <- data.frame(
+ id = numeric(0),
+ srcref1 = numeric(0),
+ srcref2 = numeric(0),
+ srcref3 = numeric(0),
+ srcref4 = numeric(0),
+ description = character(0),
+ parent = numeric(0),
+ mode = character(0), stringsAsFactors = FALSE )
+ if( p %of% "try-error" ){
+ return( env[["data"]] )
+ }
+
+ maxId <- 0
+ } else {
+ maxId <- max( env[["data"]][, "id"] )
+ }
+
+ atts <- attributes( p )
+ descriptions <-as.character( p )
+ hasAttrs <- "srcref" %in% names(atts)
+ if( hasAttrs ){
+ srcrefs <- t( sapply( attr(p, "srcref"), as.integer ) )
+ colnames( srcrefs ) <- paste("srcref", 1:4 , sep = "")
+ srcrefs <- as.data.frame( srcrefs )
+ ids <- maxId + 1:length(p)
+ modes <- sapply( p, mode )
+ data <- data.frame( id = ids,
+ srcrefs, description = descriptions,
+ parent = rep( parent, length(p) ),
+ mode = modes,
+ stringsAsFactors = FALSE)
+ env[["data"]] <- rbind( env[["data"]], data )
+
+ }
+
+ calls <- sapply( p, mode ) %in% c("call","function")
+ for( i in 1:length(p)){
+ if( !is.null(p) && calls[i] ){
+ test <- try( looksLikeAFunction( p[[i]] ), silent = TRUE )
+ if( test ){
+ env[["data"]][ ids[i], "mode" ] <- "function"
+ try( sidekickParse( p[[i]], top = FALSE, env = env, parent = if( hasAttrs) ids[i] else parent ), silent = TRUE )
+ } else {
+ test <- try( looksLikeAnIf( p[[i]] ), silent = TRUE )
+ if( ! test %of% "try-error" && test ){
+ pa <- try( addIfNode( TRUE, env = env, parent = if( hasAttrs ) ids[i] else parent, p[[i]][[3]] ), silent = TRUE )
+ sidekickParse( p[[i]][[3]], top = FALSE, env = env, parent = pa )
+ if( length(p[[i]]) == 4){
+ pa <- try( addIfNode( FALSE, env = env, parent = if( hasAttrs ) ids[i] else parent, p[[i]][[4]] ), silent = TRUE )
+ sidekickParse( p[[i]][[4]], top = FALSE, env = env, parent = pa )
+ }
+ } else{
+ sidekickParse( p[[i]], top = FALSE, env = env, parent = if( hasAttrs) ids[i] else parent )
+ }
+ }
+ }
+ }
+
+ if( top ){
+ env[["data"]]
+ }
+
+}
+
+addIfNode <- function( value = T, env = env, parent, nextnode ){
+
+ data <- env[["data"]]
+ if( !is.null( srcref <- attr(nextnode, "srcref") ) ){
+ id <- max(data$id) + 1
+ lap.out <- lapply( srcref, as.integer )
+ srcref <- t(c(
+ head( lap.out ,1)[[1]][1:2],
+ tail( lap.out ,1)[[1]][3:4] ) )
+ colnames( srcref ) <- paste( "srcref", 1:4, sep = "")
+ mode <- paste( "if", value, sep = ":" )
+ description <- mode
+ env[["data"]] <- rbind( env[["data"]], data.frame( id = id, srcref, description = description, mode = mode, parent = parent ) )
+ id
+ } else{
+ parent
+ }
+
+}
+
+looksLikeAFunction <- function( p ) {
+ if( length( p[[1]]) != 1 ) return(FALSE)
+ if( ! as.character( p[[1]] ) %in% c("<-", "<<-", "=" ) ) return(FALSE)
+ if( length( p ) <= 2 ) return(FALSE)
+ if( is.null( p[[3]] ) ) return(FALSE)
+ if( length( p[[3]] ) == 1 ) return(FALSE)
+ asc <- as.character( p[[3]][[1]] )
+ if( length( asc ) > 1 || asc != "function" ) return(FALSE)
+ TRUE
+}
+
+looksLikeAnIf <- function(p){
+ if( length( p[[1]] ) != 1 ) return(FALSE)
+ as.character(p[[1]]) == "if"
+}
+
Added: pkg/svTools/R/tryParse.R
===================================================================
--- pkg/svTools/R/tryParse.R (rev 0)
+++ pkg/svTools/R/tryParse.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,17 @@
+#' Tries to parse a file
+#'
+#' Tries to parse a file and structure the error if it cannot be
+#' @param file A file to \link{parse}
+#' @param action A function to call on the structured error
+#' @return If the parsing is sucessful, the output of the standard parse function is returned
+#' otherwise, the error that is generated by parse is structured by \code{\link{parseError}}
+#' @author Romain Francois \email{francoisromain@@free.fr}
+tryParse <- function(file, action){
+ out <- try( parse(file) , silent = TRUE)
+ if( inherits(out, "try-error") ) {
+ err <- parseError( out )
+ if( !missing(action) ) action( err )
+ invisible( err )
+ } else invisible(out)
+}
+
Added: pkg/svTools/R/trySource.R
===================================================================
--- pkg/svTools/R/trySource.R (rev 0)
+++ pkg/svTools/R/trySource.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,11 @@
+
+#' Try to source a script file and returns a structured error if it fails
+#' @export
+#' @param file A connection to source
+#' @return A structured error (see \link{parseError} ) if the file cannot be sourced
+#' @author Romain Francois \email{francoisromain@@free.fr}
+trySource <- function(file){
+ out <- try( source(file) , silent = TRUE)
+ if( out %of% "try-error") parseError( out )
+}
+
Added: pkg/svTools/R/zzz.R
===================================================================
--- pkg/svTools/R/zzz.R (rev 0)
+++ pkg/svTools/R/zzz.R 2008-10-29 13:45:40 UTC (rev 64)
@@ -0,0 +1,7 @@
+
+svTools.env <- new.env()
+
+.onAttach <- function( libname, pkgname){
+ addError( emptyError( ) )
+}
+
Added: pkg/svTools/data/descriptionFields.rda
===================================================================
(Binary files differ)
Property changes on: pkg/svTools/data/descriptionFields.rda
___________________________________________________________________
Name: svn:mime-type
+ application/octet-stream
More information about the Sciviews-commits
mailing list