### * undoc

undoc <-
function(package, dir, lib.loc = NULL)
{
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        helpIndex <- file.path(dir, "help", "AnIndex")
        if(!fileTest("-f", helpIndex))
            stop(paste("directory", sQuote(dir),
                       "contains no help index"))
        isBase <- package == "base"

        ## Find all documented topics from the help index.
        allDocTopics <- sort(scan(file = helpIndex,
                                  what = list("", ""),
                                  quiet = TRUE, sep="\t")[[1]])

        ## Load package into codeEnv.
        if(!isBase)
            .loadPackageQuietly(package, lib.loc)
        codeEnv <- .packageEnv(package)

        codeObjs <- ls(envir = codeEnv, all.names = TRUE)
    }
    else {
        if(missing(dir))
            stop(paste("you must specify", sQuote("package"),
                       "or", sQuote("dir")))
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        docsDir <- file.path(dir, "man")
        if(!fileTest("-d", docsDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain Rd sources"))
        isBase <- basename(dir) == "base"

        ## Find all documented topics from the Rd sources.
        aliases <- character(0)
        for(f in listFilesWithType(docsDir, "docs")) {
            aliases <- c(aliases,
                         grep("^\\\\alias", readLines(f), value = TRUE))
        }
        allDocTopics <- gsub("\\\\alias{(.*)}.*", "\\1", aliases)
        allDocTopics <- gsub("\\\\%", "%", allDocTopics)
        allDocTopics <- gsub(" ", "", allDocTopics)
        allDocTopics <- sort(unique(allDocTopics))

        codeEnv <- new.env()
        codeDir <- file.path(dir, "R")
        if(fileTest("-d", codeDir)) {
            ## Collect code in codeFile.
            codeFile <- tempfile("Rcode")
            on.exit(unlink(codeFile))
            if(!file.create(codeFile)) stop("unable to create ", codeFile)
            if(!all(file.append(codeFile, listFilesWithType(codeDir, "code"))))
                stop("unable to write code files")
            ## Read code from codeFile into codeEnv.
            yy <- try(.sourceAssignments(codeFile, env = codeEnv))
            if(inherits(yy, "try-error")) {
                stop("cannot source package code")
            }
        }

        codeObjs <- ls(envir = codeEnv, all.names = TRUE)

        ## Does the package have a NAMESPACE file?  Note that when
        ## working on the sources we (currently?) cannot deal with the
        ## (experimental) alternative way of specifying the namespace.
        if(file.exists(file.path(dir, "NAMESPACE"))) {
            nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
            ## Look only at exported objects (and not declared S3
            ## methods).
            OK <- codeObjs[codeObjs %in% nsInfo$exports]
            for(p in nsInfo$exportPatterns)
                OK <- c(OK, grep(p, codeObjs, value = TRUE))
            codeObjs <- unique(OK)
        }
    }

    dataObjs <- character(0)
    dataDir <- file.path(dir, "data")
    if(fileTest("-d", dataDir)) {
        dataEnv <- new.env()
        files <- listFilesWithType(dataDir, "data")
        files <- unique(basename(filePathSansExt(files)))
        ## <FIXME>
        ## Argh.  When working on the source directory of a package in a
        ## bundle, or a base package, we (currently?) cannot simply use
        ## data().  In these cases, we only have a 'DESCRIPTION.in'
        ## file.  On the other hand, data() uses .find.package() to find
        ## the package paths from its 'package' and '.lib.loc'
        ## arguments, and .find.packages() is really for finding
        ## *installed* packages, and hence tests for the existence of a
        ## 'DESCRIPTION' file.  As a last resort, use the fact that
        ## data() can be made to for look data sets in the 'data'
        ## subdirectory of the current working directory ...
        packageName <- basename(dir)
        libPath <- dirname(dir)
        if(!file.exists(file.path(dir, "DESCRIPTION"))) {
            ## Hope that there is a 'DESCRIPTION.in', maybe we should
            ## check for this?
            packageName <- character()
            libPath <- NULL
            owd <- getwd()
            setwd(dir)
            on.exit(setwd(owd))
        }
        ## </FIXME>
        for(f in files) {
            ## <NOTE>
            ## Non-standard evaluation for argument 'package' to data()
            ## gone in R 1.9.0.
            .tryQuietly(data(list = f, package = packageName,
                             lib.loc = libPath, envir = dataEnv))
            ## (We use .tryQuietly() because a .R data file using scan()
            ## to read in data from some other place may do this without
            ## 'quiet = TRUE', giving output which R CMD check would
            ## think to indicate a problem.)
            ## </NOTE>
            new <- ls(envir = dataEnv, all.names = TRUE)
            dataObjs <- c(dataObjs, new)
            rm(list = new, envir = dataEnv)
        }
    }

    ## Undocumented objects?
    if((length(codeObjs) == 0) && (length(dataObjs) == 0))
        warning("Neither code nor data objects found")

    if(!isBase) {
        ## Code objects in add-on packages with names starting with a
        ## dot are considered 'internal' (not user-level) by
        ## convention.
        ## <FIXME>
        ## Not clear whether everyone believes in this convention.
        ## We used to have
        ##   allObjs[! allObjs %in% c(allDocTopics,
        ##                            ".First.lib", ".Last.lib")]
        ## i.e., only exclude '.First.lib' and '.Last.lib'.
        codeObjs <- grep("^[^.].*", codeObjs, value = TRUE)
        ## Note that this also allows us to get rid of S4 meta objects
        ## (with names starting with '.__C__' or '.__M__'; well, as long
        ## as there are none in base).
        ## </FIXME>

        ## <FIXME>
        ## Need to do something about S4 generic functions 'created' by
        ## setGeneric() or setMethod() on 'ordinary' functions.
        ## The test below exempts objects that are generic functions if
        ## there is a visible nongeneric function and the default method
        ## is "derived", by a call to setGeneric.  This test allows
        ## nondocumentd generics in some cases (e.g., the generic was
        ## created locally from an inconsistent version).
        ## In the long run we need dynamic documentation.
        if(.isMethodsDispatchOn()) {
            codeObjs <-
                codeObjs[sapply(codeObjs, function(f) {
                    fdef <- get(f, envir = codeEnv)
                    if(methods::is(fdef, "genericFunction")) {
                        fOther <-
                            methods::getFunction(f, generic = FALSE,
                                                 mustFind = FALSE,
                                                 where = topenv(environment(fdef)))
                        if(is.null(fOther))
                            TRUE
                        else
                            !methods::is(methods::finalDefaultMethod(methods::getMethodsMetaData(f, codeEnv)),
                                         "derivedDefaultMethod")
                    }
                    else
                        TRUE
                }) == TRUE]
        }
        ## </FIXME>

        ## Allow group generics to be undocumented other than in base.
        ## In particular, those from methods partially duplicate base
        ## and are documented in base's groupGenerics.Rd.
        codeObjs <-
            codeObjs[! codeObjs %in%
                     c("Arith", "Compare", "Complex", "Math", "Math2",
                       "Ops", "Summary")]
    }

    ## <FIXME>
    ## Currently, loading data from an R file via sys.source() puts
    ## .required into the load environment if the R code has a call to
    ## require().
    dataObjs <- dataObjs[! dataObjs %in% c(".required")]
    ## </FIXME>

    undocThings <-
        list("code objects" =
             unique(codeObjs[! codeObjs %in% allDocTopics]),
             "data sets" =
             unique(dataObjs[! dataObjs %in% allDocTopics]))

    if(.isMethodsDispatchOn()) {
        ## Undocumented S4 classes?
        S4classes <- methods::getClasses(codeEnv)
        ## <NOTE>
        ## There is no point in worrying about exportClasses directives
        ## in a NAMESPACE file when working on a package source dir, as
        ## we only source the assignments, and hence do not get any
        ## S4 classes or methods.
        ## </NOTE>
        ## The bad ones:
        S4classes <-
            S4classes[!sapply(S4classes,
                              function(u) utils::topicName("class", u))
                      %in% allDocTopics]
        undocThings <-
            c(undocThings, list("S4 classes" = unique(S4classes)))
    }

    if(.isMethodsDispatchOn()) {
        ## Undocumented S4 methods?
        ## <NOTE>
        ## There is no point in worrying about exportMethods directives
        ## in a NAMESPACE file when working on a package source dir, as
        ## we only source the assignments, and hence do not get any
        ## S4 classes or methods.
        ## </NOTE>
        methodsSignatures <- function(f) {
            mlist <- methods::getMethodsMetaData(f, codeEnv)
            meths <- methods::linearizeMlist(mlist, FALSE)
            classes <- methods::slot(meths, "classes")
            ## Don't look for doc on a generated default method.
            default <-
                as.logical(lapply(classes,
                                  function(x)
                                  identical(all(x == "ANY"), TRUE)))
            if(any(default)
               && methods::is(methods::finalDefaultMethod(mlist),
                              "derivedDefaultMethod")) {
                classes <- classes[!default]
            }
            ## Exclude methods inherited from the 'appropriate' parent
            ## environment.
            makeSigs <- function(cls)
                unlist(lapply(cls, paste, collapse = "#"))
            penv <- .Internal(getRegisteredNamespace(as.name(package)))
            if(is.environment(penv))
                penv <- parent.env(penv)
            else
                penv <- parent.env(codeEnv)
            mlistFromPenv <- methods::getMethodsMetaData(f, penv)
            if(!is.null(mlistFromPenv)) {
                classesFromPenv <-
                    methods::slot(methods::linearizeMlist(mlistFromPenv),
                                  "classes")
                ind <- is.na(match(makeSigs(classes),
                                   makeSigs(classesFromPenv)))
                classes <- classes[ind]
            }
            sigs <- sapply(classes, paste, collapse = ",")
            if(length(sigs))
                paste(f, ",", sigs, sep = "")
            else
                character()
        }
        S4methods <-
            sapply(methods::getGenerics(codeEnv), methodsSignatures)
        S4methods <- as.character(unlist(S4methods, use.names = FALSE))
        ## The bad ones:
        S4methods <-
            S4methods[!sapply(S4methods,
                              function(u) utils::topicName("method", u))
                      %in% allDocTopics]
        undocThings <-
            c(undocThings,
              list("S4 methods" =
                   unique(sub("([^,]*),(.*)",
                              "generic \\1 and siglist \\2",
                              S4methods))))
    }

    class(undocThings) <- "undoc"
    undocThings
}

print.undoc <-
function(x, ...)
{
    for(i in which(sapply(x, length) > 0)) {
        tag <- names(x)[i]
        writeLines(paste("Undocumented ", tag, ":", sep = ""))
        ## We avoid markup for indicating S4 methods, hence need to
        ## special-case output for these ...
        if(tag == "S4 methods")
            writeLines(strwrap(x[[i]], indent = 2, exdent = 2))
        else
            .prettyPrint(x[[i]])
    }
    invisible(x)
}

### * codoc

codoc <-
function(package, dir, lib.loc = NULL,
         use.values = NULL, verbose = getOption("verbose"))
{
    ## <FIXME>
    ## Improvements worth considering:
    ## * Parallelize the actual checking (it is not necessary to loop
    ##   over the Rd files);
    ## * In case of a namespace, always use the namespace for codoc
    ##   computations (as it is also used for determining the usages for
    ##   which no corresponding object in the package exists), rather
    ##   than just the exported objects.
    ## </FIXME>

    hasNamespace <- FALSE

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        docsDir <- file.path(dir, "man")
        if(!fileTest("-d", docsDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain Rd sources"))
        isBase <- basename(dir) == "base"

        ## Load package into codeEnv.
        if(!isBase)
            .loadPackageQuietly(package, lib.loc)
        codeEnv <- .packageEnv(package)

        objectsInCode <- objects(envir = codeEnv, all.names = TRUE)

        ## Does the package have a namespace?
        if(packageHasNamespace(package, dirname(dir))) {
            hasNamespace <- TRUE
            objectsInCodeOrNamespace <-
                objects(envir = asNamespace(package), all.names = TRUE)
        }
        else
            objectsInCodeOrNamespace <- objectsInCode
    }
    else {
        if(missing(dir))
            stop(paste("you must specify", sQuote("package"),
                       "or", sQuote("dir")))
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        docsDir <- file.path(dir, "man")
        if(!fileTest("-d", docsDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain Rd sources"))
        isBase <- basename(dir) == "base"

        ## Collect code in codeFile.
        codeFile <- tempfile("Rcode")
        on.exit(unlink(codeFile))
        if(!file.create(codeFile)) stop("unable to create ", codeFile)
        if(!all(file.append(codeFile, listFilesWithType(codeDir, "code"))))
            stop("unable to write code files")

        ## Read code from codeFile into codeEnv.
        codeEnv <- new.env()
        if(verbose)
            cat("Reading code from", sQuote(codeFile), "\n")
        yy <- try(.sourceAssignments(codeFile, env = codeEnv))
        if(inherits(yy, "try-error")) {
            stop("cannot source package code")
        }

        objectsInCode <- objects(envir = codeEnv, all.names = TRUE)
        objectsInCodeOrNamespace <- objectsInCode

        ## Does the package have a NAMESPACE file?  Note that when
        ## working on the sources we (currently?) cannot deal with the
        ## (experimental) alternative way of specifying the namespace.
        if(file.exists(file.path(dir, "NAMESPACE"))) {
            hasNamespace <- TRUE
            nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
            ## Look only at exported objects.
            OK <- objectsInCode[objectsInCode %in% nsInfo$exports]
            for(p in nsInfo$exportPatterns)
                OK <- c(OK, grep(p, objectsInCode, value = TRUE))
            objectsInCode <- unique(OK)
        }
    }

    ## Find the function objects to work on.
    functionsInCode <-
        objectsInCode[sapply(objectsInCode,
                             function(f) {
                                 f <- get(f, envir = codeEnv)
                                 is.function(f) && (length(formals(f)) > 0)
                             }) == TRUE]
    ## <FIXME>
    ## Sourcing all R code files in the package is a problem for base,
    ## where this misses the .Primitive functions.  Hence, when checking
    ## base for objects shown in \usage but missing from the code, we
    ## get the primitive functions from the version of R we are using.
    ## Maybe one day we will have R code for the primitives as well ...
    if(isBase) {
        objectsInBase <-
            objects(envir = as.environment(NULL), all.names = TRUE)
        objectsInCode <-
            c(objectsInCode,
              objectsInBase[sapply(objectsInBase,
                                   .isPrimitive,
                                   NULL)],
              c(".First.lib", ".Last.lib", ".Random.seed",
                ".onLoad", ".onAttach", ".onUnload"))
        objectsInCodeOrNamespace <- objectsInCode
    }
    ## </FIXME>

    ## Build a list with the formals of the functions in the code
    ## indexed by the names of the functions.
    functionArgsInCode <-
        lapply(functionsInCode,
               function(f) formals(get(f, envir = codeEnv)))
    names(functionArgsInCode) <- functionsInCode
    if(.isMethodsDispatchOn()) {
        ## <NOTE>
        ## There is no point in worrying about exportMethods directives
        ## in a NAMESPACE file when working on a package source dir, as
        ## we only source the assignments, and hence do not get any
        ## S4 classes or methods.
        ## </NOTE>
        lapply(methods::getGenerics(codeEnv),
               function(f) {
                   meths <-
                       methods::linearizeMlist(methods::getMethodsMetaData(f, codeEnv))
                   sigs <- sapply(methods::slot(meths, "classes"),
                                  paste, collapse = ",")
                   if(!length(sigs)) return()
                   args <- lapply(methods::slot(meths, "methods"),
                                  formals)
                   names(args) <-
                       paste("\\S4method{", f, "}{", sigs, "}",
                             sep = "")
                   functionArgsInCode <<- c(functionArgsInCode, args)
               })
    }

    checkCoDoc <- function(fName, ffd) {
        ## Compare the formals of the function in the code named 'fName'
        ## and formals 'ffd' obtained from the documentation.
        ffc <- functionArgsInCode[[fName]]
        if(identical(use.values, FALSE)) {
            ffc <- names(ffc)
            ffd <- names(ffd)
            ok <- identical(ffc, ffd)
        } else {
            if(!identical(names(ffc), names(ffd)))
                ok <- FALSE
            else {
                vffc <- as.character(ffc) # values
                vffd <- as.character(ffd) # values
                if(!identical(use.values, TRUE)) {
                    ind <- nchar(as.character(ffd)) > 0
                    vffc <- vffc[ind]
                    vffd <- vffd[ind]
                }
                ok <- identical(vffc, vffd)
            }
        }
        if(ok)
            NULL
        else
            list(list(name = fName, code = ffc, docs = ffd))
    }

    db <- if(!missing(package))
        Rddb(package, lib.loc = dirname(dir))
    else
        Rddb(dir = dir)

    db <- lapply(db,
                 function(f) paste(Rdpp(f), collapse = "\n"))
    names(db) <- dbNames <- sapply(db, getRdSection, "name")
    if(isBase || basename(dir) == "graphics") {
        ind <- dbNames %in% c("Defunct", "Devices")
        db <- db[!ind]
        dbNames <- dbNames[!ind]
    }
    dbUsageTexts <- lapply(db, getRdSection, "usage")
    dbSynopses <- lapply(db, getRdSection, "synopsis")
    ind <- sapply(dbSynopses, length) > 0
    dbUsageTexts[ind] <- dbSynopses[ind]
    withSynopsis <- as.character(dbNames[ind])
    dbUsages <- lapply(dbUsageTexts, .parse_usage_as_much_as_possible)
    ind <- sapply(dbUsages,
                  function(x) !is.null(attr(x, "badLines")))
    badLines <- sapply(dbUsages[ind], attr, "badLines")

    ## <FIXME>
    ## Currently, there is no useful markup for S3 Ops group methods
    ## and S3 methods for subscripting and subassigning.  Hence, we
    ## cannot reliably distinguish between usage for the generic and
    ## that of a method ...
    functionsToBeIgnored <-
        c(.functionsToBeIgnoredFromUsage(basename(dir)),
          .functionsWithNoUsefulS3methodMarkup)
    ## </FIXME>

    badDocObjects <- list()
    functionsInUsages <- character()
    variablesInUsages <- character()
    dataSetsInUsages <- character()
    functionsInUsagesNotInCode <- list()

    for(docObj in dbNames) {

        exprs <- dbUsages[[docObj]]
        if(!length(exprs)) next

        ## Get variable names and data set usages first, mostly for
        ## curiosity.
        ## <FIXME>
        ## Use '<=' as we could get 'NULL' ... although of course this
        ## is not really a variable.
        ind <- sapply(exprs, length) <= 1
        ## </FIXME>
        if(any(ind)) {
            variablesInUsages <-
                c(variablesInUsages,
                  sapply(exprs[ind], deparse))
            exprs <- exprs[!ind]
        }
        ind <- as.logical(sapply(exprs,
                                 function(e)
                                 (length(e) == 2)
                                 && e[[1]] == as.symbol("data")))
        if(any(ind)) {
            dataSetsInUsages <-
                c(dataSetsInUsages,
                  sapply(exprs[ind], function(e) as.character(e[[2]])))
            exprs <- exprs[!ind]
        }
        functions <- sapply(exprs, function(e) as.character(e[[1]]))
        functions <- .transformS3methodMarkup(as.character(functions))
        ind <- (! functions %in% functionsToBeIgnored
                & functions %in% functionsInCode)
        badFunctions <-
            mapply(functions[ind],
                   exprs[ind],
                   FUN = function(x, y)
                   checkCoDoc(x, as.pairlist(as.alist.call(y[-1]))),
                   SIMPLIFY = FALSE)
        ## Replacement functions.
        ind <- as.logical(sapply(exprs,
                                 .isCallFromReplacementFunctionUsage))
        if(any(ind)) {
            exprs <- exprs[ind]
            replaceFuns <-
                paste(sapply(exprs,
                             function(e) as.character(e[[2]][[1]])),
                      "<-",
                      sep = "")
            replaceFuns <- .transformS3methodMarkup(replaceFuns)
            functions <- c(functions, replaceFuns)
            ind <- (replaceFuns %in% functionsInCode)
            if(any(ind)) {
                badReplaceFuns <-
                    mapply(replaceFuns[ind],
                           exprs[ind],
                           FUN = function(x, y)
                           checkCoDoc(x,
                                      as.pairlist(c(as.alist.call(y[[2]][-1]),
                                                    as.alist.symbol(y[[3]])))),
                           SIMPLIFY = FALSE)
                badFunctions <-
                    c(badFunctions, badReplaceFuns)
            }
        }

        badFunctions <- do.call("c", badFunctions)
        if(length(badFunctions) > 0)
            badDocObjects[[docObj]] <- badFunctions

        ## Determine functions with a \usage entry in the documentation
        ## but 'missing from the code'.  If a package has a namespace, we
        ## really need to look at all objects in the namespace (hence
        ## 'objectsInCodeOrNamespace'), as one can access the internal
        ## symbols via ':::' and hence package developers might want to
        ## provide function usages for some of the internal functions.
        ## <FIXME>
        ## We may still have \S4method{}{} entries in functions, which
        ## cannot have a corresponding object in the code.  Hence, we
        ## remove these function entries, but should really do better,
        ## by comparing the explicit \usage entries for S4 methods to
        ## what is actually in the code.  We most likely also should do
        ## something similar for S3 methods.
        ind <- grep(.S4_method_markup_regexp, functions)
        if(any(ind))
            functions <- functions[!ind]
        ## </FIXME>
        badFunctions <-
            functions[! functions %in%
                      c(objectsInCodeOrNamespace, functionsToBeIgnored)]
        if(length(badFunctions) > 0)
            functionsInUsagesNotInCode[[docObj]] <- badFunctions

        functionsInUsages <- c(functionsInUsages, functions)
    }

    ## Determine (function) objects in the code without a \usage entry.
    ## Of course, these could still be 'documented' via \alias.
    ## </NOTE>
    ## Older versions only printed this information without returning it
    ## (in case 'verbose' was true).  We now add this as an attribute to
    ## the badDocObjects returned.
    ## </NOTE>
    objectsInCodeNotInUsages <-
        objectsInCode[! objectsInCode %in%
                      c(functionsInUsages, variablesInUsages)]
    functionsInCodeNotInUsages <-
        functionsInCode[functionsInCode %in% objectsInCodeNotInUsages]
    ## (Note that 'functionsInCode' does not necessarily contain all
    ## (exported) functions in the package.)

    attr(badDocObjects, "objectsInCodeNotInUsages") <-
        objectsInCodeNotInUsages
    attr(badDocObjects, "functionsInCodeNotInUsages") <-
        functionsInCodeNotInUsages
    attr(badDocObjects, "functionsInUsagesNotInCode") <-
        functionsInUsagesNotInCode
    attr(badDocObjects, "functionArgsInCode") <- functionArgsInCode
    attr(badDocObjects, "hasNamespace") <- hasNamespace
    attr(badDocObjects, "withSynopsis") <- withSynopsis
    attr(badDocObjects, "badLines") <- badLines
    class(badDocObjects) <- "codoc"
    badDocObjects
}

print.codoc <-
function(x, ...)
{
    ## In general, functions in the code which only have an \alias but
    ## no \usage entry are not necessarily a problem---they might be
    ## mentioned in other parts of the Rd object documenting them, or be
    ## 'internal'.  However, if a package has a namespace (and this was
    ## used in the codoc() computations), then clearly all *exported*
    ## functions should have \usage entries.
    ## <FIXME>
    ## Things are not quite that simple.
    ## E.g., for generic functions with just a default and a formula
    ## method we typically do not have \usage for the generic itself.
    ## (This will change now with the new \method{}{} transformation.)
    ## Also, earlier versions of codoc() based on the defunct Perl code
    ## in extract-usage.pl (now removed) only dealt with the *functions*
    ## so all variables would come out as 'without usage information' ...
    ## As we can always access the information via
    ##    attr(codoc("foo"), "codeNotInUsages")
    ## disable reporting this for the time being ...
    ## <COMMENT>
    ##     objectsInCodeNotInUsages <-
    ##         attr(x, "objectsInCodeNotInUsages")
    ##     if(length(objectsInCodeNotInUsages)
    ##        && identical(TRUE, attr(x, "hasNamespace"))) {
    ##         if(length(objectsInCodeNotInUsages)) {
    ##             writeLines("Exported objects without usage information:")
    ##             .prettyPrint(objectsInCodeNotInUsages)
    ##             writeLines("")
    ##         }
    ##     }
    ## </COMMENT>
    ## Hmm.  But why not mention the exported *functions* without \usage
    ## information?  Note that currently there is no useful markup for
    ## S3 Ops group methods and S3 methods for subscripting and
    ## subassigning, so the corresponding generics and methods cannot
    ## reliably be distinguished, and hence would need to be excluded
    ## here as well.
    ## <COMMENT>
    ##     functionsInCodeNotInUsages <-
    ##         attr(x, "functionsInCodeNotInUsages")
    ##     if(length(functionsInCodeNotInUsages)
    ##        && identical(TRUE, attr(x, "hasNamespace"))) {
    ##         if(length(functionsInCodeNotInUsages)) {
    ##             writeLines("Exported functions without usage information:")
    ##             .prettyPrint(functionsInCodeNotInUsages)
    ##             writeLines("")
    ##         }
    ##     }
    ## </COMMENT>
    ## </FIXME>

    functionsInUsagesNotInCode <-
        attr(x, "functionsInUsagesNotInCode")
    if(length(functionsInUsagesNotInCode) > 0) {
        for(fname in names(functionsInUsagesNotInCode)) {
            writeLines(paste("Functions/methods with usage in",
                             "documentation object", sQuote(fname),
                             "but not in code:"))
            .prettyPrint(unique(functionsInUsagesNotInCode[[fname]]))
            writeLines("")
        }
    }

    if(length(x) == 0)
        return(invisible(x))
    hasOnlyNames <- is.character(x[[1]][[1]][["code"]])
    formatArgs <- function(s) {
        if(hasOnlyNames) {
            paste("function(", paste(s, collapse = ", "), ")", sep = "")
        }
        else {
            s <- paste(deparse(s), collapse = "")
            s <- gsub(" = \([,\\)]\)", "\\1", s)
            gsub("^list", "function", s)
        }
    }
    for(fname in names(x)) {
        writeLines(paste("Codoc mismatches from documentation object ",
                         sQuote(fname), ":", sep = ""))
        xfname <- x[[fname]]
        for(i in seq(along = xfname))
            writeLines(c(xfname[[i]][["name"]],
                         strwrap(paste("Code:",
                                       formatArgs(xfname[[i]][["code"]])),
                                 indent = 2, exdent = 17),
                         strwrap(paste("Docs:",
                                       formatArgs(xfname[[i]][["docs"]])),
                                 indent = 2, exdent = 17)))
        writeLines("")
    }

    invisible(x)
}

### * codocClasses

codocClasses <-
function(package, lib.loc = NULL)
{
    ## Compare the 'structure' of S4 classes in an installed package
    ## between code and documentation.
    ## Currently, only compares the slot names.

    ## <NOTE>
    ## This is patterned after the current codoc().
    ## It would be useful to return the whole information on class slot
    ## names found in the code and matching documentation (rather than
    ## just the ones with mismatches).
    ## Currently, we only return the names of all classes checked.
    ## </NOTE>

    badRdObjects <- list()
    class(badRdObjects) <- "codocClasses"

    ## Argument handling.
    if(length(package) != 1)
        stop(paste("argument", sQuote("package"),
                   "must be of length 1"))
    dir <- .find.package(package, lib.loc)
    if(!fileTest("-d", file.path(dir, "R")))
        stop(paste("directory", sQuote(dir),
                   "does not contain R code"))
    if(!fileTest("-d", file.path(dir, "man")))
        stop(paste("directory", sQuote(dir),
                   "does not contain Rd sources"))
    isBase <- basename(dir) == "base"

    ## Load package into codeEnv.
    if(!isBase)
        .loadPackageQuietly(package, lib.loc)
    codeEnv <- .packageEnv(package)

    if(!.isMethodsDispatchOn())
        return(badRdObjects)

    S4classes <- methods::getClasses(codeEnv)
    if(!length(S4classes)) return(badRdObjects)

    ## Build Rd data base.
    db <- Rddb(package, lib.loc = dirname(dir))
    db <- lapply(db, Rdpp)

    ## Need some heuristics now.  When does an Rd object document just
    ## one S4 class so that we can compare (at least) the slot names?
    ## Try the following:
    ## * \docType{} identical to "class";
    ## * just one \alias{} (could also check whether it ends in
    ##   "-class");
    ## * a non-empty user-defined section 'Slots'.

    ## As going through the db to extract sections can take some time,
    ## we do the vectorized metadata computations first, and try to
    ## subscript whenever possible.

    aliases <- lapply(db, .getRdMetaDataFromRdLines, "alias")
    idx <- (sapply(aliases, length) == 1)
    if(!any(idx)) return(badRdObjects)
    db <- db[idx]; aliases <- aliases[idx]
    idx <- sapply(lapply(db, .getRdMetaDataFromRdLines, "docType"),
                  identical, "class")
    if(!any(idx)) return(badRdObjects)
    db <- db[idx]; aliases <- aliases[idx]
    ## Now collapse.
    db <- lapply(db, paste, collapse = "\n")
    RdSlots <- lapply(db, getRdSection, "Slots", FALSE)
    idx <- !sapply(RdSlots, identical, character())
    if(!any(idx)) return(badRdObjects)
    db <- db[idx]
    aliases <- unlist(aliases[idx])
    RdSlots <- RdSlots[idx]

    dbNames <- sapply(db, .getRdName)
    if(length(dbNames) < length(db)) {
        ## <FIXME>
        ## What should we really do in this case?
        ## (We cannot refer to the bad Rd objects because we do not know
        ## their names, and have no idea which file they came from ...)
        stop("cannot deal with Rd objects with missing/empty names")
        ## </FIXME>
    }
    names(db) <- dbNames

    .getSlotNamesFromSlotSectionText <- function(txt) {
        ## Get \describe (inside user-defined section 'Slots'
        txt <- unlist(sapply(txt, getRdSection, "describe"))
        ## Suppose this worked ...
        ## Get the \items inside \describe
        txt <- unlist(sapply(txt, getRdItems))
        if(!length(txt)) return(character())
        ## And now strip enclosing '\code{...}:'
        txt <- gsub("\\\\code\{(.*)\}:?", "\\1", as.character(txt))
        txt <- unlist(strsplit(txt, ", *"))
        txt <- sub("^[[:space:]]*", "", txt)
        txt <- sub("[[:space:]]*$", "", txt)
        txt
    }

    S4classesChecked <- character()
    for(cl in S4classes) {
        idx <- which(utils::topicName("class", cl) == aliases)
        if(length(idx) == 1) {
            ## Add sanity checking later ...
            S4classesChecked <- c(S4classesChecked, cl)
            slotsInCode <-
                sort(names(methods::slot(methods::getClass(cl, where =
                                                           codeEnv),
                                         "slots")))
            slotsInDocs <-
                sort(.getSlotNamesFromSlotSectionText(RdSlots[[idx]]))
            if(!identical(slotsInCode, slotsInDocs)) {
                badRdObjects[[names(db)[idx]]] <-
                    list(name = cl,
                         code = slotsInCode,
                         docs = slotsInDocs)
            }
        }
    }

    attr(badRdObjects, "S4classesChecked") <-
        as.character(S4classesChecked)
    badRdObjects
}

print.codocClasses <-
function(x, ...)
{
    if (length(x) == 0)
        return(invisible(x))
    formatArgs <- function(s) paste(s, collapse = " ")
    for (docObj in names(x)) {
        writeLines(paste("S4 class codoc mismatches from ",
                         "documentation object ", sQuote(docObj), ":",
                         sep = ""))
        docObj <- x[[docObj]]
        writeLines(c(paste("Slots for class", sQuote(docObj[["name"]])),
                     strwrap(paste("Code:",
                                   formatArgs(docObj[["code"]])),
                             indent = 2, exdent = 8),
                     strwrap(paste("Docs:",
                                   formatArgs(docObj[["docs"]])),
                             indent = 2, exdent = 8)))
        writeLines("")
    }
    invisible(x)
}

### * codocData

codocData <-
function(package, lib.loc = NULL)
{
    ## Compare the 'structure' of 'data' objects (variables or data
    ## sets) in an installed package between code and documentation.
    ## Currently, only compares the variable names of data frames found.

    ## <NOTE>
    ## This is patterned after the current codoc().
    ## It would be useful to return the whole information on data frame
    ## variable names found in the code and matching documentation
    ## (rather than just the ones with mismatches).
    ## Currently, we only return the names of all data frames checked.
    ## </NOTE>

    badRdObjects <- list()
    class(badRdObjects) <- "codocData"

    ## Argument handling.
    if(length(package) != 1)
        stop(paste("argument", sQuote("package"),
                   "must be of length 1"))

    dir <- .find.package(package, lib.loc)
    if(!fileTest("-d", file.path(dir, "man")))
       stop(paste("directory", sQuote(dir),
                  "does not contain Rd sources"))
    isBase <- basename(dir) == "base"

    ## Load package into codeEnv.
    if(!isBase)
        .loadPackageQuietly(package, lib.loc)
    codeEnv <- .packageEnv(package)

    ## Could check here whether the package has any variables or data
    ## sets (and return if not).

    ## Build Rd data base.
    db <- Rddb(package, lib.loc = dirname(dir))
    db <- lapply(db, Rdpp)

    ## Need some heuristics now.  When does an Rd object document a
    ## data.frame (could add support for other classes later) variable
    ## or data set so that we can compare (at least) the names of the
    ## variables in the data frame?  Try the following:
    ## * just one \alias{};
    ## * if documentation was generated via prompt, there is a \format
    ##   section starting with 'A data frame with' (but many existing Rd
    ##   files instead have 'This data frame contains' and containing
    ##   one or more \describe sections inside.

    ## As going through the db to extract sections can take some time,
    ## we do the vectorized metadata computations first, and try to
    ## subscript whenever possible.
    aliases <- lapply(db, .getRdMetaDataFromRdLines, "alias")
    idx <- sapply(aliases, length) == 1
    if(!any(idx)) return(badRdObjects)
    db <- db[idx]; aliases <- aliases[idx]
    ## Now collapse.
    db <- lapply(db, paste, collapse = "\n")

    .getDataFrameVarNamesFromRdText <- function(txt) {
        txt <- getRdSection(txt, "format")
        ## Was there just one \format section?
        if(length(txt) != 1) return(character())
        ## What did it start with?
        if(!length(grep("^[ \n\t]*(A|This) data frame", txt)))
            return(character())
        ## Get \describe inside \format
        txt <- getRdSection(txt, "describe")
        ## Suppose this worked ...
        ## Get the \items inside \describe
        txt <- unlist(sapply(txt, getRdItems))
        if(!length(txt)) return(character())
        txt <- gsub("(.*):$", "\\1", as.character(txt))
        txt <- gsub("\\\\code\{(.*)\}:?", "\\1", txt)
        txt <- unlist(strsplit(txt, ", *"))
        txt <- sub("^[[:space:]]*", "", txt)
        txt <- sub("[[:space:]]*$", "", txt)
        txt
    }

    RdVarNames <- lapply(db, .getDataFrameVarNamesFromRdText)
    idx <- (sapply(RdVarNames, length) > 0)
    if(!length(idx)) return(badRdObjects)
    aliases <- unlist(aliases[idx])
    RdVarNames <- RdVarNames[idx]

    dbNames <- sapply(db[idx], .getRdName)
    if(length(dbNames) < length(aliases)) {
        ## <FIXME>
        ## What should we really do in this case?
        ## (We cannot refer to the bad Rd objects because we do not know
        ## their names, and have no idea which file they came from ...)
        stop("cannot deal with Rd objects with missing/empty names")
        ## </FIXME>
    }

    dataEnv <- new.env()
    dataDir <- file.path(dir, "data")
    hasData <- fileTest("-d", dataDir)
    dataExts <- .makeFileExts("data")

    ## Now go through the aliases.
    dataFramesChecked <- character()
    for(i in seq(along = aliases)) {
        ## Store the documented variable names.
        varNamesInDocs <- sort(RdVarNames[[i]])
        ## Try finding the variable or data set given by the alias.
        al <- aliases[i]
        if(exists(al, envir = codeEnv, mode = "list",
                  inherits = FALSE)) {
            al <- get(al, envir = codeEnv, mode = "list")
        }
        else if(hasData) {
            ## Should be a data set.
            if(!length(dir(dataDir)
                       %in% paste(al, dataExts, sep = "."))) {
                next                    # What the hell did we pick up?
            }
            ## Try loading the data set into dataEnv.
            data(list = al, envir = dataEnv)
            if(exists(al, envir = dataEnv, mode = "list",
                      inherits = FALSE)) {
                al <- get(al, envir = dataEnv, mode = "list")
            }
            ## And clean up dataEnv.
            rm(list = ls(envir = dataEnv, all.names = TRUE),
               envir = dataEnv)
        }
        if(!is.data.frame(al)) next
        ## Now we should be ready:
        dataFramesChecked <- c(dataFramesChecked, aliases[i])
        varNamesInCode <- sort(names(al))
        if(!identical(varNamesInCode, varNamesInDocs))
            badRdObjects[[dbNames[i]]] <-
                list(name = aliases[i],
                     code = varNamesInCode,
                     docs = varNamesInDocs)
    }

    attr(badRdObjects, "dataFramesChecked") <-
        as.character(dataFramesChecked)
    badRdObjects
}

print.codocData <-
function(x, ...)
{
    formatArgs <- function(s) paste(s, collapse = " ")
    for (docObj in names(x)) {
        writeLines(paste("Data codoc mismatches from ",
                         "documentation object ", sQuote(docObj), ":",
                         sep = ""))
        docObj <- x[[docObj]]
        writeLines(c(paste("Variables in data frame",
                           sQuote(docObj[["name"]])),
                     strwrap(paste("Code:",
                                   formatArgs(docObj[["code"]])),
                             indent = 2, exdent = 8),
                     strwrap(paste("Docs:",
                                   formatArgs(docObj[["docs"]])),
                             indent = 2, exdent = 8)))
        writeLines("")
    }
    invisible(x)
}

### * checkDocFiles

checkDocFiles <-
function(package, dir, lib.loc = NULL)
{
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
    }
    else {
        if(missing(dir))
            stop(paste("you must specify", sQuote("package"),
                       "or", sQuote("dir")))
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
    }

    docsDir <- file.path(dir, "man")
    if(!fileTest("-d", docsDir))
        stop(paste("directory", sQuote(dir),
                   "does not contain Rd sources"))
    isBase <- basename(dir) == "base"

    db <- if(!missing(package))
        Rddb(package, lib.loc = dirname(dir))
    else
        Rddb(dir = dir)

    db <- lapply(db, Rdpp)
    ## Do vectorized computations for metadata first.
    dbAliases <- lapply(db, .getRdMetaDataFromRdLines, "alias")
    dbKeywords <- lapply(db, .getRdMetaDataFromRdLines, "keyword")
    ## Now collapse.
    db <- lapply(db, paste, collapse = "\n")
    dbNames <- sapply(db, .getRdName)
    ## Safeguard against missing/empty names.
    if(length(dbNames) < length(db)) {
        ## <FIXME>
        ## What should we really do in this case?
        ## (We cannot refer to the bad Rd objects because we do not know
        ## their names, and have no idea which file they came from ...)
        stop("cannot deal with Rd objects with missing/empty names")
        ## </FIXME>
    }
    ind <- sapply(dbKeywords,
                  function(x) any(grep("^ *internal *$", x)))
    if(isBase || basename(dir) == "graphics")
        ind <- ind | dbNames %in% c("Defunct", "Deprecated", "Devices")
    if(any(ind)) {# exclude them
        db <- db[!ind]
        dbNames <- dbNames[!ind]
        dbAliases <- dbAliases[!ind]
    }
    names(db) <- names(dbAliases) <- dbNames
    dbUsageTexts <- lapply(db, getRdSection, "usage")
    dbUsages <- lapply(dbUsageTexts, .parse_usage_as_much_as_possible)
    ind <- as.logical(sapply(dbUsages,
                             function(x) !is.null(attr(x, "badLines"))))
    badLines <- sapply(dbUsages[ind], attr, "badLines")
    dbArgumentNames <- lapply(db, .getRdArgumentNames)

    functionsToBeIgnored <-
        .functionsToBeIgnoredFromUsage(basename(dir))

    badDocObjs <- list()

    for(docObj in dbNames) {

        exprs <- dbUsages[[docObj]]
        if(!length(exprs)) next

        aliases <- dbAliases[[docObj]]
        argNamesInArgList <- dbArgumentNames[[docObj]]

        ## Determine function names ('functions') and corresponding
        ## arguments ('argNamesInUsage') in the \usage.  Note how we
        ## try to deal with data set documentation.
        ind <- as.logical(sapply(exprs,
                                 function(e)
                                 ((length(e) > 1) &&
                                  !((length(e) == 2)
                                    && e[[1]] == as.symbol("data")))))
        exprs <- exprs[ind]
        ## Ordinary functions.
        functions <- as.character(sapply(exprs,
                                         function(e)
                                         as.character(e[[1]])))
        ## (Note that as.character(sapply(exprs, "[[", 1)) does not do
        ## what we want due to backquotifying.)
        ind <- ! functions %in% functionsToBeIgnored
        functions <- functions[ind]
        argNamesInUsage <-
            unlist(sapply(exprs[ind],
                          function(e) .argNamesFromCall(e[-1])))
        ## Replacement functions.
        ind <- as.logical(sapply(exprs,
                                 .isCallFromReplacementFunctionUsage))
        if(any(ind)) {
            replaceFuns <-
                paste(sapply(exprs[ind],
                             function(e) as.character(e[[2]][[1]])),
                      "<-",
                      sep = "")
            functions <- c(functions, replaceFuns)
            argNamesInUsage <-
                c(argNamesInUsage,
                  unlist(sapply(exprs[ind],
                                function(e)
                                c(.argNamesFromCall(e[[2]][-1]),
                                  .argNamesFromCall(e[[3]])))))
        }
        ## And finally transform the S3 \method{}{} markup into the
        ## usual function names ...
        ## <NOTE>
        ## If we were really picky, we would worry about possible
        ## namespace renaming.
        functions <- .transformS3methodMarkup(functions)
        ## </NOTE>

        ## Now analyze what we found.
        argNamesInUsageMissingInArgList <-
            argNamesInUsage[!argNamesInUsage %in% argNamesInArgList]
        argNamesInArgListMissingInUsage <-
            argNamesInArgList[!argNamesInArgList %in% argNamesInUsage]
        if(length(argNamesInArgListMissingInUsage) > 0) {
            usageText <- dbUsageTexts[[docObj]]
            badArgs <- character()
            ## In the case of 'over-documented' arguments, try to be
            ## defensive and reduce to arguments which either are not
            ## syntactically valid names of do not match the \usage text
            ## (modulo word boundaries).
            bad <- regexpr("^[[:alnum:]._]+$",
                           argNamesInArgListMissingInUsage) == -1
            if(any(bad)) {
                badArgs <- argNamesInArgListMissingInUsage[bad]
                argNamesInArgListMissingInUsage <-
                    argNamesInArgListMissingInUsage[!bad]
            }
            bad <- sapply(argNamesInArgListMissingInUsage,
                          function(x)
                          regexpr(paste("\\b", x, "\\b", sep = ""),
                                  usageText) == -1)
            argNamesInArgListMissingInUsage <-
                c(badArgs,
                  argNamesInArgListMissingInUsage[as.logical(bad)])
            ## Note that the fact that we can parse the raw \usage does
            ## not imply that over-documented arguments are a problem:
            ## this works for Rd files documenting e.g. shell utilities
            ## but fails for files with special syntax (Extract.Rd).
        }

        ## Also test whether the objects we found from the \usage all
        ## have aliases, provided that there is no alias which ends in
        ## '-deprecated' (see Deprecated.Rd).
        if(!any(grep("-deprecated$", aliases))) {
            ## Currently, there is no useful markup for S3 Ops group
            ## methods and S3 methods for subscripting and subassigning,
            ## so the corresponding generics and methods need to be
            ## excluded from this test (e.g., the usage for '+' in
            ## 'DateTimeClasses.Rd' ...).
            functions <- functions[!functions %in%
                                   .functionsWithNoUsefulS3methodMarkup]
            ## Argh.  There are good reasons for keeping \S4method{}{}
            ## as is, but of course this is not what the aliases use ...
            ## <FIXME>
            ## Should maybe use topicName(), but in any case, we should
            ## have functions for converting between the two forms, see
            ## also the code for undoc().
            aliases <- sub("([^,]+),(.+)-method$",
                           "\\\\S4method{\\1}{\\2}",
                           aliases)
            ## </FIXME>
            aliases <- gsub("\\\\%", "%", aliases)
            functionsNotInAliases <- functions[! functions %in% aliases]
        }
        else
            functionsNotInAliases <- character()

        if((length(argNamesInUsageMissingInArgList) > 0)
           || any(duplicated(argNamesInArgList))
           || (length(argNamesInArgListMissingInUsage) > 0)
           || (length(functionsNotInAliases) > 0))
            badDocObjs[[docObj]] <-
                list(missing = argNamesInUsageMissingInArgList,
                     duplicated =
                     argNamesInArgList[duplicated(argNamesInArgList)],
                     overdoc = argNamesInArgListMissingInUsage,
                     unaliased = functionsNotInAliases)

    }

    class(badDocObjs) <- "checkDocFiles"
    attr(badDocObjs, "badLines") <- badLines
    badDocObjs
}

print.checkDocFiles <-
function(x, ...)
{
    for(docObj in names(x)) {
        argNamesInUsageMissingInArgList <- x[[docObj]][["missing"]]
        if(length(argNamesInUsageMissingInArgList) > 0) {
            writeLines(paste("Undocumented arguments",
                             " in documentation object ",
                             sQuote(docObj), ":", sep = ""))
            .prettyPrint(unique(argNamesInUsageMissingInArgList))
        }
        duplicatedArgsInArgList <- x[[docObj]][["duplicated"]]
        if(length(duplicatedArgsInArgList) > 0) {
            writeLines(paste("Duplicated \\argument entries",
                             " in documentation object ",
                             sQuote(docObj), ":", sep = ""))
            .prettyPrint(duplicatedArgsInArgList)
        }
        argNamesInArgListMissingInUsage <- x[[docObj]][["overdoc"]]
        if(length(argNamesInArgListMissingInUsage) > 0) {
            writeLines(paste("Documented arguments not in \\usage",
                             " in documentation object ",
                             sQuote(docObj), ":", sep = ""))
            .prettyPrint(unique(argNamesInArgListMissingInUsage))
        }
        functionsNotInAliases <- x[[docObj]][["unaliased"]]
        if(length(functionsNotInAliases) > 0) {
            writeLines(paste("Objects in \\usage without \\alias",
                             " in documentation object ",
                             sQuote(docObj), ":", sep = ""))
            .prettyPrint(unique(functionsNotInAliases))
        }

        writeLines("")
    }
    invisible(x)
}

### * checkDocStyle

checkDocStyle <-
function(package, dir, lib.loc = NULL)
{
    hasNamespace <- FALSE

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in 'dir' ...
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        docsDir <- file.path(dir, "man")
        if(!fileTest("-d", docsDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain Rd sources"))
        isBase <- basename(dir) == "base"

        ## Load package into codeEnv.
        if(!isBase)
            .loadPackageQuietly(package, lib.loc)
        codeEnv <- .packageEnv(package)

        objectsInCode <- objects(envir = codeEnv, all.names = TRUE)

        ## Does the package have a namespace?
        if(packageHasNamespace(package, dirname(dir))) {
            hasNamespace <- TRUE
            ## Determine names of declared S3 methods and associated S3
            ## generics.
            nsS3methodsList <- getNamespaceInfo(package, "S3methods")
            nsS3generics <- nsS3methodsList[, 1]
            nsS3methods <- nsS3methodsList[, 3]
        }
    }
    else {
        if(missing(dir))
            stop(paste("you must specify", sQuote("package"),
                       "or", sQuote("dir")))
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        docsDir <- file.path(dir, "man")
        if(!fileTest("-d", docsDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain Rd sources"))
        isBase <- basename(dir) == "base"

        ## Collect code into codeFile.
        codeFile <- tempfile("Rcode")
        on.exit(unlink(codeFile))
        if(!file.create(codeFile)) stop("unable to create ", codeFile)
        if(!all(file.append(codeFile, listFilesWithType(codeDir, "code"))))
           stop("unable to write code files")

        ## Read code from codeFile into codeEnv.
        codeEnv <- new.env()
        yy <- try(.sourceAssignments(codeFile, env = codeEnv))
        if(inherits(yy, "try-error")) {
            stop("cannot source package code")
        }

        objectsInCode <- objects(envir = codeEnv, all.names = TRUE)

        ## Does the package have a NAMESPACE file?  Note that when
        ## working on the sources we (currently?) cannot deal with the
        ## (experimental) alternative way of specifying the namespace.
        if(file.exists(file.path(dir, "NAMESPACE"))) {
            hasNamespace <- TRUE
            nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
            ## Determine exported objects.
            OK <- objectsInCode[objectsInCode %in% nsInfo$exports]
            for(p in nsInfo$exportPatterns)
                OK <- c(OK, grep(p, objectsInCode, value = TRUE))
            objectsInCode <- unique(OK)
            ## Determine names of declared S3 methods and associated S3
            ## generics.
            nsS3methodsList <- .getNamespaceS3methodsList(nsInfo)
            nsS3generics <- nsS3methodsList[, 1]
            nsS3methods <- nsS3methodsList[, 3]
        }

    }

    ## Find the function objects in the given package.
    functionsInCode <-
        objectsInCode[sapply(objectsInCode,
                             function(f)
                             is.function(get(f, envir = codeEnv)))
                      == TRUE]

    ## Find all generic functions in the given package and (the current)
    ## base package.
    allGenerics <- character()
    envList <- list(codeEnv)
    if(!isBase) envList <- c(envList, list(as.environment(NULL)))
    for(env in envList) {
        ## Find all available S3 generics.
        objectsInEnv <- if(identical(env, codeEnv)) {
            ## We only want the exported ones anyway ...
            functionsInCode
        }
        else
            objects(envir = env, all.names = TRUE)
        if(length(objectsInEnv))
            allGenerics <-
                c(allGenerics,
                  objectsInEnv[sapply(objectsInEnv, .isS3Generic, env)==TRUE])
    }
    ## Add internal S3 generics and S3 group generics.
    allGenerics <-
        c(allGenerics,
          .getInternalS3generics(),
          .getS3groupGenerics())

    ## Find all methods in the given package for the generic functions
    ## determined above.  Store as a list indexed by the names of the
    ## generic functions.
    methodsStopList <- .makeS3MethodsStopList(basename(dir))
    methodsInPackage <- sapply(allGenerics, function(g) {
        ## <FIXME>
        ## We should really determine the name g dispatches for, see
        ## a current version of methods() [2003-07-07].  (Care is needed
        ## for internal generics and group generics.)
        ## Matching via grep() is tricky with e.g. a '$' in the name of
        ## the generic function ... hence substr().
        name <- paste(g, ".", sep = "")
        methods <-
            functionsInCode[substr(functionsInCode, 1, nchar(name))
                            == name]
        ## </FIXME>
        methods <- methods[! methods %in% methodsStopList]
        if(hasNamespace) {
            ## Find registered methods for generic g.
            methods <- c(methods, nsS3methods[nsS3generics == g])
        }
        methods
    })
    allMethodsInPackage <- unlist(methodsInPackage)

    db <- if(!missing(package))
        Rddb(package, lib.loc = dirname(dir))
    else
        Rddb(dir = dir)

    db <- lapply(db,
                 function(f) paste(Rdpp(f), collapse = "\n"))
    names(db) <- dbNames <- sapply(db, getRdSection, "name")

    dbUsageTexts <- lapply(db, getRdSection, "usage")
    dbUsages <- lapply(dbUsageTexts, .parse_usage_as_much_as_possible)
    ind <- sapply(dbUsages,
                  function(x) !is.null(attr(x, "badLines")))
    badLines <- sapply(dbUsages[ind], attr, "badLines")

    badDocObjects <- list()

    for(docObj in dbNames) {

        ## Determine function names in the \usage.
        exprs <- dbUsages[[docObj]]
        exprs <- exprs[sapply(exprs, length) > 1]
        ## Ordinary functions.
        functions <-
            as.character(sapply(exprs,
                                function(e) as.character(e[[1]])))
        ## (Note that as.character(sapply(exprs, "[[", 1)) does not do
        ## what we want due to backquotifying.)
        ## Replacement functions.
        ind <- as.logical(sapply(exprs,
                                 .isCallFromReplacementFunctionUsage))
        if(any(ind)) {
            replaceFuns <-
                paste(sapply(exprs[ind],
                             function(e) as.character(e[[2]][[1]])),
                      "<-",
                      sep = "")
            functions <- c(functions, replaceFuns)
        }

        methodsWithFullName <-
            functions[functions %in% allMethodsInPackage]

        functions <- .transformS3methodMarkup(functions)

        methodsWithGeneric <-
            sapply(functions[functions %in% allGenerics],
                   function(g)
                   functions[functions %in% methodsInPackage[[g]]],
                   simplify = FALSE)

        if((length(methodsWithGeneric) > 0) ||
           (length(methodsWithFullName > 0)))
            badDocObjects[[docObj]] <-
                list(withGeneric  = methodsWithGeneric,
                     withFullName = methodsWithFullName)

    }

    attr(badDocObjects, "badLines") <- badLines
    class(badDocObjects) <- "checkDocStyle"
    badDocObjects
}

print.checkDocStyle <-
function(x, ...) {
    for(docObj in names(x)) {
        ## <NOTE>
        ## With \method{GENERIC}{CLASS} now being transformed to show
        ## both GENERIC and CLASS info, documenting S3 methods on the
        ## same page as their generic is not necessarily a problem any
        ## more (as one can refer to the generic or the methods in the
        ## documentation, in particular for the primary argument).
        ## Hence, even if we still provide information about this, we
        ## no longer print it by default.  One can still access it via
        ##   lapply(checkDocStyle("foo"), "[[", "withGeneric")
        ## (but of course it does not print that nicely anymore),
        ## </NOTE>
        methodsWithFullName <- x[[docObj]][["withFullName"]]
        if(length(methodsWithFullName > 0)) {
            writeLines(paste("S3 methods shown with full name in ",
                             "documentation object ",
                             sQuote(docObj), ":", sep = ""))
            writeLines(strwrap(paste(methodsWithFullName,
                                     collapse = " "),
                               indent = 2, exdent = 2))
            writeLines("")
        }
    }
    invisible(x)
}

### * checkFF

checkFF <-
function(package, dir, file, lib.loc = NULL,
         verbose = getOption("verbose"))
{
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        if(basename(dir) != "base")
            .loadPackageQuietly(package, dirname(dir))
        codeEnv <- if(packageHasNamespace(package, dirname(dir)))
            asNamespace(package)
        else
            .packageEnv(package)
    }
    else if(!missing(dir)) {
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        file <- tempfile()
        on.exit(unlink(file))
        if(!file.create(file)) stop("unable to create ", file)
        if(!all(file.append(file, listFilesWithType(codeDir, "code"))))
            stop("unable to write code files")
    }
    else if(missing(file)) {
        stop(paste("you must specify ", sQuote("package"), ", ",
                   sQuote("dir"), " or ", sQuote("file"), sep = ""))
    }

    if(missing(package) && !fileTest("-f", file))
        stop(paste("file", sQuote(file), "does not exist"))

    ## <FIXME>
    ## Should there really be a 'verbose' argument?
    ## It may be useful to extract all foreign function calls but then
    ## we would want the calls back ...
    ## What we currently do is the following: if 'verbose' is true, we
    ## show all foreign function calls in abbreviated form with the line
    ## ending in either 'OK' or 'MISSING', and we return the list of
    ## 'bad' FF calls (i.e., where the 'PACKAGE' argument is missing)
    ## *invisibly* (so that output is not duplicated).
    ## Otherwise, if not verbose, we return the list of bad FF calls.
    ## </FIXME>

    badExprs <- list()
    FFfuns <- c(".C", ".Fortran", ".Call", ".External",
                ".Call.graphics", ".External.graphics")
    findBadExprs <- function(e) {
        if(is.call(e) || is.expression(e)) {
            ## <NOTE>
            ## This picks up all calls, e.g. a$b, and they may convert
            ## to a vector.  The function is the first element in all
            ## the calls we are interested in.
            ## BDR 2002-11-28
            ## </NOTE>
            if(as.character(e[[1]])[1] %in% FFfuns) {
                parg <- if(is.null(e[["PACKAGE"]])) {
                    badExprs <<- c(badExprs, e)
                    "MISSING"
                }
                else
                    "OK"
                if(verbose) {
                    cat(e[[1]], "(", deparse(e[[2]]), ", ...): ", parg,
                        "\n", sep = "")
                }
            }
            for(i in seq(along = e)) Recall(e[[i]])
        }
    }

    if(!missing(package)) {
        exprs <- lapply(ls(envir = codeEnv, all.names = TRUE),
                        function(f) {
                            f <- get(f, envir = codeEnv)
                            if(typeof(f) == "closure")
                                body(f)
                            else
                                NULL
                        })
        if(.isMethodsDispatchOn()) {
            ## Also check the code in S4 methods.
            ## This may find things twice if a setMethod() with a bad FF
            ## call is from inside a function (e.g., InitMethods()).
            for(f in methods::getGenerics(codeEnv)) {
                meths <-
                    methods::linearizeMlist(methods::getMethodsMetaData(f, codeEnv))
                exprs <-
                    c(exprs,
                      lapply(methods::slot(meths, "methods"), body))
            }
        }
    }
    else {
        exprs <- try(parse(file = file, n = -1))
        if(inherits(exprs, "try-error"))
            stop(paste("parse error in file", sQuote(file)))
    }
    for(i in seq(along = exprs)) findBadExprs(exprs[[i]])
    class(badExprs) <- "checkFF"
    if(verbose)
        invisible(badExprs)
    else
        badExprs
}

print.checkFF <-
function(x, ...)
{
    if(length(x) > 0) {
        writeLines(paste("Foreign function calls without",
                         sQuote("PACKAGE"), "argument:"))
        for(i in seq(along = x)) {
            writeLines(paste(deparse(x[[i]][[1]]),
                             "(",
                             deparse(x[[i]][[2]]),
                             ", ...)",
                             sep = ""))
        }
    }
    invisible(x)
}

### * checkS3methods

checkS3methods <-
function(package, dir, lib.loc = NULL)
{
    hasNamespace <- FALSE
    ## If an installed package has a namespace, we need to record the S3
    ## methods which are registered but not exported (so that we can
    ## get() them from the right place).
    S3reg <- character(0)

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        isBase <- basename(dir) == "base"

        ## Load package into codeEnv.
        if(!isBase)
            .loadPackageQuietly(package, lib.loc)
        codeEnv <- .packageEnv(package)

        objectsInCode <- objects(envir = codeEnv, all.names = TRUE)

        ## Does the package have a namespace?
        if(packageHasNamespace(package, dirname(dir))) {
            hasNamespace <- TRUE
            ## Determine names of declared S3 methods and associated S3
            ## generics.
            nsS3methodsList <- getNamespaceInfo(package, "S3methods")
            nsS3generics <- nsS3methodsList[, 1]
            nsS3methods <- nsS3methodsList[, 3]
            ## Determine unexported but declared S3 methods.
            S3reg <- nsS3methods[! nsS3methods %in% objectsInCode]
        }
    }
    else {
        if(missing(dir))
            stop(paste("you must specify", sQuote("package"),
                       "or", sQuote("dir")))
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        isBase <- basename(dir) == "base"

        ## Collect code into codeFile.
        codeFile <- tempfile("Rcode")
        on.exit(unlink(codeFile))
        if(!file.create(codeFile)) stop("unable to create ", codeFile)
        if(!all(file.append(codeFile, listFilesWithType(codeDir, "code"))))
            stop("unable to write code files")

        ## Read code from codeFile into codeEnv.
        codeEnv <- new.env()
        yy <- try(.sourceAssignments(codeFile, env = codeEnv))
        if(inherits(yy, "try-error")) {
            stop("cannot source package code")
        }

        objectsInCode <- objects(envir = codeEnv, all.names = TRUE)

        ## Does the package have a NAMESPACE file?  Note that when
        ## working on the sources we (currently?) cannot deal with the
        ## (experimental) alternative way of specifying the namespace.
        if(file.exists(file.path(dir, "NAMESPACE"))) {
            hasNamespace <- TRUE
            nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
            ## Determine exported objects.
            OK <- objectsInCode[objectsInCode %in% nsInfo$exports]
            for(p in nsInfo$exportPatterns)
                OK <- c(OK, grep(p, objectsInCode, value = TRUE))
            objectsInCode <- unique(OK)
            ## Determine names of declared S3 methods and associated S3
            ## generics.
            nsS3methodsList <- .getNamespaceS3methodsList(nsInfo)
            nsS3generics <- nsS3methodsList[, 1]
            nsS3methods <- nsS3methodsList[, 3]
        }

    }

    ## Find the function objects in the given package.
    functionsInCode <-
        objectsInCode[sapply(objectsInCode,
                             function(f)
                             is.function(get(f, envir = codeEnv)))
                      == TRUE]

    methodsStopList <- .makeS3MethodsStopList(basename(dir))
    S3groupGenerics <- .getS3groupGenerics()

    checkArgs <- function(g, m, env) {
        ## Do the arguments of method m (in codeEnv) 'extend' those of
        ## the generic g from environment env?  The method must have all
        ## arguments the generic has, with positional arguments of g in
        ## the same positions for m.
        ## Exception: '...' in the method swallows anything.
        genfun <- get(g, envir = env)
        gArgs <- names(formals(genfun))
        if(g == "plot") gArgs <- gArgs[-2]
        ogArgs <- gArgs
        gm <- if(m %in% S3reg) {
            ## See registerS3method() in namespace.R.
            defenv <-
                if (g %in% S3groupGenerics) .BaseNamespaceEnv
                else if (typeof(genfun) == "closure") environment(genfun)
                else .BaseNamespaceEnv
            S3Table <- get(".__S3MethodsTable__.", envir = defenv)
            if(!exists(m, envir = S3Table)) {
                warning(paste("declared S3 method", sQuote(m),
                              "not found"),
                        call. = FALSE)
                return(NULL)
            } else get(m, envir = S3Table)
        } else get(m, envir = codeEnv)
        mArgs <- omArgs <- names(formals(gm))
        ## If m is a formula method, its first argument *may* be called
        ## formula.  (Note that any argument name mismatch throws an
        ## error in current S-PLUS versions.)
        if(length(grep("\\.formula$", m)) > 0) {
            gArgs <- gArgs[-1]
            mArgs <- mArgs[-1]
        }
        dotsPos <- which(gArgs == "...")
        ipos <- if(length(dotsPos) > 0)
            seq(from = 1, length = dotsPos - 1)
        else
            seq(along = gArgs)

        dotsPos <- which(mArgs == "...")
        if(length(dotsPos) > 0)
            ipos <- ipos[seq(from = 1, length = dotsPos - 1)]
        posMatchOK <- identical(gArgs[ipos], mArgs[ipos])
        argMatchOK <- all(gArgs %in% mArgs) || length(dotsPos) > 0
        if(posMatchOK && argMatchOK)
            NULL
        else {
            l <- list(ogArgs, omArgs)
            names(l) <- c(g, m)
            list(l)
        }
    }

    ## Deal with S3 group methods.  We create a separate environment
    ## with pseudo-definitions for these.
    S3groupGenericsEnv <- new.env()
    assign("Math",
           function(x, ...) UseMethod("Math"),
           envir = S3groupGenericsEnv)
    assign("Ops",
           function(e1, e2) UseMethod("Ops"),
           envir = S3groupGenericsEnv)
    assign("Summary",
           function(x, ...) UseMethod("Summary"),
           envir = S3groupGenericsEnv)
    assign("Complex",
           function(x, ...) UseMethod("Complex"),
           envir = S3groupGenericsEnv)

    ## Now determine the 'bad' methods in the function objects of the
    ## package.
    badMethods <- list()
    envList <- list(codeEnv, S3groupGenericsEnv)
    if(!isBase) {
        ## <FIXME>
        ## Look for generics in the whole of the former base.
        ## Maybe eventually change this ...
        ## (Note that this requires that these packages are already
        ## attached.)
        envList <- c(envList,
                     list(as.environment(NULL)),
                     list(as.environment("package:graphics")),
                     list(as.environment("package:stats")),
                     list(as.environment("package:utils"))
                     )
        ## </FIXME>
        ## If 'package' was given, also use the loaded namespaces and
        ## attached packages listed in the DESCRIPTION Depends field.
        ## Not sure if this is the best approach: we could also try to
        ## determine which namespaces/packages were made available by
        ## loading the package (which should work at least when run from
        ## R CMD check), or we could simply attach every package listed
        ## as a dependency ... or perhaps do both.
        if(!missing(package)) {
            db <- try(read.dcf(file.path(dir, "DESCRIPTION"))[1, ],
                      silent = TRUE)
            if(inherits(db, "try-error"))
                stop(paste("package directory", sQuote(dir),
                           "has no valid DESCRIPTION file"))
            if(!is.na(depends <- db["Depends"])) {
                depends <- unlist(strsplit(depends, ","))
                depends <-
                    sub("^[[:space:]]*([[:alnum:].]+).*", "\\1", depends)
                depends <- depends[depends != "R"]
                ind <- depends %in% loadedNamespaces()
                if(any(ind)) {
                    envList <-
                        c(envList, lapply(depends[ind], getNamespace))
                    depends <- depends[!ind]
                }
                ind <- depends %in% .packages()
                if(any(ind)) {
                    envList <-
                        c(envList, lapply(depends[ind], .packageEnv))
                }
            }
        }
    }
    for(env in envList) {
        ## Find all available S3 generics.
        objectsInEnv <- if(identical(env, codeEnv)) {
            ## We only want the exported ones anyway ...
            functionsInCode
        }
        else
            objects(envir = env, all.names = TRUE)
        S3generics <- if(length(objectsInEnv))
            objectsInEnv[sapply(objectsInEnv, .isS3Generic, env) == TRUE]
        else character(0)

        ## For base, also add the internal S3 generics which are not
        ## .Primitive (as checkArgs() does not deal with these).
        if(identical(env, as.environment(NULL))) {
            internalS3generics <- .getInternalS3generics()
            internalS3generics <-
                internalS3generics[sapply(internalS3generics,
                                          .isPrimitive, NULL)
                                   == FALSE]
            S3generics <- c(S3generics, internalS3generics)
        }

        for(g in S3generics) {
            ## Find all methods in functionsInCode for S3 generic g.
            ## <FIXME>
            ## We should really determine the name g dispatches for, see
            ## a current version of methods() [2003-07-07].  (Care is
            ## needed for internal generics and group generics.)
            ## Matching via grep() is tricky with e.g. a '$' in the name
            ## of the generic function ... hence substr().
            name <- paste(g, ".", sep = "")
            methods <-
                functionsInCode[substr(functionsInCode, 1, nchar(name))
                                == name]
            ## </FIXME>
            methods <- methods[! methods %in% methodsStopList]
            if(hasNamespace) {
                ## Find registered methods for generic g.
                methods <- c(methods, nsS3methods[nsS3generics == g])
            }

            for(m in methods)
                ## both all() and all.equal() are generic.
                badMethods <- if(g == "all") {
                    m1 <- m[-grep("^all\\.equal", m)]
                    c(badMethods, if(length(m1)) checkArgs(g, m1, env))
                } else c(badMethods, checkArgs(g, m, env))
        }
    }

    class(badMethods) <- "checkS3methods"
    badMethods
}

print.checkS3methods <-
function(x, ...)
{
    formatArgs <- function(s)
        paste("function(", paste(s, collapse = ", "), ")", sep = "")
    for(entry in x) {
        writeLines(c(paste(names(entry)[1], ":", sep = ""),
                     strwrap(formatArgs(entry[[1]]),
                             indent = 2, exdent = 11),
                     paste(names(entry)[2], ":", sep = ""),
                     strwrap(formatArgs(entry[[2]]),
                             indent = 2, exdent = 11),
                     ""))
    }
    invisible(x)
}

### * checkReplaceFuns

checkReplaceFuns <-
function(package, dir, lib.loc = NULL)
{
    hasNamespace <- FALSE

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        isBase <- basename(dir) == "base"

        ## Load package into codeEnv.
        if(!isBase)
            .loadPackageQuietly(package, lib.loc)
        ## In case the package has a namespace, we really want to check
        ## all replacement functions in the package.  (If not, we need
        ## to change the code for the non-installed case to only look at
        ## exported (replacement) functions.)
        if(packageHasNamespace(package, dirname(dir))) {
            hasNamespace <- TRUE
            codeEnv <- asNamespace(package)
            nsS3methodsList <- getNamespaceInfo(package, "S3methods")
        }
        else
            codeEnv <- .packageEnv(package)
    }

    else {
        if(missing(dir))
            stop(paste("you must specify", sQuote("package"),
                       "or", sQuote("dir")))
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        codeDir <- file.path(dir, "R")
        if(!fileTest("-d", codeDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain R code"))
        isBase <- basename(dir) == "base"

        ## Collect code into codeFile.
        codeFile <- tempfile("Rcode")
        on.exit(unlink(codeFile))
        if(!file.create(codeFile)) stop("unable to create ", codeFile)
        if(!all(file.append(codeFile, listFilesWithType(codeDir, "code"))))
            stop("unable to write code files")

        ## Read code from codeFile into codeEnv.
        codeEnv <- new.env()
        yy <- try(.sourceAssignments(codeFile, env = codeEnv))
        if(inherits(yy, "try-error")) {
            stop("cannot source package code")
        }

        ## Does the package have a NAMESPACE file?  Note that when
        ## working on the sources we (currently?) cannot deal with the
        ## (experimental) alternative way of specifying the namespace.
        if(file.exists(file.path(dir, "NAMESPACE"))) {
            hasNamespace <- TRUE
            nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
            nsS3methodsList <- .getNamespaceS3methodsList(nsInfo)
        }
    }

    objectsInCode <- objects(envir = codeEnv, all.names = TRUE)
    replaceFuns <- character()

    if(hasNamespace) {
        nsS3generics <- nsS3methodsList[, 1]
        nsS3methods <- nsS3methodsList[, 3]
        ## S3 replacement methods from namespace registration?
        idx <- grep("<-$", nsS3generics)
        if(any(idx)) replaceFuns <- nsS3methods[idx]
        ## Now remove the functions registered as S3 methods.
        objectsInCode <-
            objectsInCode[! objectsInCode %in% nsS3methods]
    }

    replaceFuns <-
        c(replaceFuns, grep("<-", objectsInCode, value = TRUE))

    .checkLastFormalArg <- function(f) {
        argNames <- names(formals(f))
        if(!length(argNames))
            TRUE                        # most likely a .Primitive()
        else
            identical(argNames[length(argNames)], "value")
    }

    ## Find the replacement functions (which have formal arguments) with
    ## last arg not named 'value'.
    badReplaceFuns <- if(length(replaceFuns)) {
        replaceFuns[sapply(replaceFuns, function(f) {
            ## Always get the functions from codeEnv ...
            ## Should maybe get S3 methods from the registry ...
            f <- get(f, envir = codeEnv)
            if(!is.function(f)) return(TRUE)
            .checkLastFormalArg(f)
        }) == FALSE]} else character(0)

    if(.isMethodsDispatchOn()) {
        S4generics <- methods::getGenerics(codeEnv)
        ## Assume that the ones with names ending in '<-' are always
        ## replacement functions.
        S4generics <- grep("<-$", S4generics, value = TRUE)
        badS4ReplaceMethods <-
            sapply(S4generics,
                   function(f) {
                       meths <- methods::linearizeMlist(methods::getMethodsMetaData(f, codeEnv))
                       ind <- which(sapply(methods::slot(meths,
                                                         "methods"),
                                           .checkLastFormalArg)
                                    == FALSE)
                       if(!length(ind))
                           character()
                       else {
                           sigs <-
                               sapply(methods::slot(meths,
                                                    "classes")[ind],
                                      paste, collapse = ",")
                           paste("\\S4method{", f, "}{", sigs, "}",
                                 sep = "")
                       }
                   })
        badReplaceFuns <-
            c(badReplaceFuns,
              unlist(badS4ReplaceMethods, use.names = FALSE))
    }


    class(badReplaceFuns) <- "checkReplaceFuns"
    badReplaceFuns
}

print.checkReplaceFuns <-
function(x, ...)
{
    if(length(x) > 0) .prettyPrint(unclass(x))
    invisible(x)
}

### * checkTnF

checkTnF <-
function(package, dir, file, lib.loc = NULL)
{
    codeFiles <- docsFiles <- character(0)

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        ## Using package installed in @code{dir} ...
        dir <- .find.package(package, lib.loc)
        if(file.exists(file.path(dir, "R", "all.rda"))) {
            warning("cannot check R code installed as image")
        }
        codeFile <- file.path(dir, "R", package)
        if(file.exists(codeFile))       # could be data-only
            codeFiles <- codeFile
        exampleDir <- file.path(dir, "R-ex")
        if(fileTest("-d", exampleDir)) {
            codeFiles <- c(codeFiles,
                           listFilesWithExts(exampleDir, "R"))

        }
    }
    else if(!missing(dir)) {
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        codeDir <- file.path(dir, "R")
        if(fileTest("-d", codeDir))    # could be data-only
            codeFiles <- listFilesWithType(codeDir, "code")
        docsDir <- file.path(dir, "man")
        if(fileTest("-d", docsDir))
            docsFiles <- listFilesWithType(docsDir, "docs")
    }
    else if(!missing(file)) {
        if(!fileTest("-f", file))
            stop(paste("file", sQuote(file), "does not exist"))
        else
            codeFiles <- file
    }
    else
        stop(paste("you must specify ", sQuote("package"), ", ",
                   sQuote("dir"), " or ", sQuote("file"), sep = ""))

    findTnFInCode <- function(file, txt) {
        ## If 'txt' is given, it contains the extracted examples from
        ## the R documentation file 'file'.  Otherwise, 'file' gives a
        ## file with (just) R code.
        matches <- list()
        TnF <- c("T", "F")
        findBadExprs <- function(e, p) {
            if(is.name(e)
               && (as.character(e) %in% TnF)
               && !is.null(p)) {
                ## Need the 'list()' to deal with T/F in function
                ## arglists which are pairlists ...
                matches <<- c(matches, list(p))
            }
            else if(is.recursive(e)) {
                for(i in seq(along = e)) Recall(e[[i]], e)
            }
        }
        if(missing(txt)) {
            exprs <- try(parse(file = file, n = -1))
            if(inherits(exprs, "try-error"))
                stop(paste("parse error in file", sQuote(file)))
        }
        else {
            exprs <- try(parse(text = txt))
            if(inherits(exprs, "try-error"))
                stop(paste("parse error in examples from file",
                           sQuote(file)))
        }
        for(i in seq(along = exprs))
            findBadExprs(exprs[[i]], NULL)
        matches
    }

    badExprs <- list()
    for(file in codeFiles) {
        exprs <- findTnFInCode(file)
        if(length(exprs) > 0) {
            exprs <- list(exprs)
            names(exprs) <- file
            badExprs <- c(badExprs, exprs)
        }
    }
    for(file in docsFiles) {
        txt <- paste(Rdpp(readLines(file)), collapse = "\n")
        txt <- .getRdExampleCode(txt)
        exprs <- findTnFInCode(file, txt)
        if(length(exprs) > 0) {
            exprs <- list(exprs)
            names(exprs) <- file
            badExprs <- c(badExprs, exprs)
        }
    }
    class(badExprs) <- "checkTnF"
    badExprs
}

print.checkTnF <-
function(x, ...)
{
    for(fname in names(x)) {
        writeLines(paste("File ", sQuote(fname), ":", sep = ""))
        xfname <- x[[fname]]
        for(i in seq(along = xfname)) {
            writeLines(strwrap(paste("found T/F in",
                                     paste(deparse(xfname[[i]]),
                                           collapse = "")),
                               exdent = 4))
        }
        writeLines("")
    }
    invisible(x)
}

### * .checkPackageDepends

.checkPackageDepends <-
function(package)
{
    if(length(package) != 1)
        stop(paste("argument", sQuote("package"),
                   "must be of length 1"))
    dir <- .find.package(package)

    ## We definitely need a valid DESCRIPTION file.
    db <- try(read.dcf(file.path(dir, "DESCRIPTION"))[1, ],
              silent = TRUE)
    if(inherits(db, "try-error"))
        stop(paste("package directory", sQuote(dir),
                   "has no valid DESCRIPTION file"))

    packageName <- basename(dir)
    ## (Should really use db["Package"], but then we need to check
    ## whether this is really there ...)
    if("Depends" %in% names(db)) {
        depends <- unlist(strsplit(db["Depends"], ","))
        depends <-
            sub("^[[:space:]]*([[:alnum:].]+).*$", "\\1", depends)
        depends <- depends[depends != "R"]
    }
    else
        depends <- character()
    if("Suggests" %in% names(db)) {
        suggests <- unlist(strsplit(db["Suggests"], ","))
        suggests <-
            sub("^[[:space:]]*([[:alnum:].]+).*$", "\\1", suggests)
    }
    else
        suggests <- character()

    badDepends <- list()

    ## Are all packages listed in Depends/Suggests installed?
    reqs <- unique(c(depends, suggests))
    reqs <- reqs[!reqs %in%
                 utils::installed.packages()[ , "Package"]]
    if(length(reqs))
        badDepends$requiredButNotInstalled <- reqs

    ## Are all vignette dependencies at least suggested or equal to
    ## the package name?
    vignetteDir <- file.path(dir, "doc")
    if(fileTest("-d", vignetteDir)
       && length(listFilesWithType(vignetteDir, "vignette"))) {
        reqs <- .buildVignetteIndex(dir)$Depends
        reqs <- reqs[!reqs %in% c(depends, suggests, packageName)]
        if(length(reqs))
            badDepends$missingVignetteDepends <- reqs
    }

    ## Are all namespace dependencies listed as package dependencies?
    if(fileTest("-f", file.path(dir, "NAMESPACE"))) {
        reqs <- .getNamespacePackageDepends(dir)
        ## <FIXME>
        ## Not clear whether we want to require *all* namespace package
        ## dependencies listed in DESCRIPTION, or e.g. just the ones on
        ## non-base packages.  Do the latter for time being ...
        basePackageNames <-
            utils::installed.packages(priority = "base")[, "Package"]
        reqs <- reqs[!reqs %in% c(depends, basePackageNames)]
        ## </FIXME>
        if(length(reqs))
            badDepends$missingNamespaceDepends <- reqs
    }

    class(badDepends) <- "checkPackageDepends"
    badDepends
}

print.checkPackageDepends <- function(x, ...) {
    if(length(bad <- x$requiredButNotInstalled)) {
        writeLines("Packages required but not available:")
        .prettyPrint(bad)
        writeLines("")
    }
    if(length(bad <- x$missingVignetteDepends)) {
        writeLines("Vignette dependencies not required:")
        .prettyPrint(bad)
        writeLines("")
    }
    if(length(bad <- x$missingNamespaceDepends)) {
        writeLines("Namespace dependencies not required:")
        .prettyPrint(bad)
        writeLines("")
    }
    invisible(x)
}

### * as.alist.call

as.alist.call <-
function(x)
{
    y <- as.list(x)
    ind <- if(is.null(names(y)))
        seq(along = y)
    else
        which(names(y) == "")
    if(any(ind)) {
        names(y)[ind] <- as.character(y[ind])
        y[ind] <- rep.int(list(alist(irrelevant = )[[1]]), length(ind))
    }
    y
}

### * as.alist.symbol

as.alist.symbol <-
function(x)
{
    as.alist.call(call(as.character(x)))
}

### * .argNamesFromCall

.argNamesFromCall <-
function(x)
{
    y <- as.character(x)
    if(!is.null(nx <- names(x))) {
        ind <- which(nx != "")
        y[ind] <- nx[ind]
    }
    y
}

### * .functionsToBeIgnoredFromUsage

.functionsToBeIgnoredFromUsage <-
function(packageName)
{
    c("<-", "=",
      if(packageName == "base")
      c("(", "{", "function", "if", "for", "while", "repeat"),
      if(packageName == "utils") "?",
      if(packageName == "methods") "@")
}

### * .functionsWithNoUsefulS3methodMarkup

.functionsWithNoUsefulS3methodMarkup <-
    ## Currently there is no useful markup for S3 Ops group methods and
    ## S3 methods for subscripting and subassigning.
    c("+", "-", "*", "/", "^", "<", ">", "<=", ">=", "!=",
      "==", "%%", "%/%", "&", "|", "!",
      "[", "[[", "$", "[<-", "[[<-", "$<-")


### * .isCallFromReplacementFunctionUsage

.isCallFromReplacementFunctionUsage <-
function(x)
{
    ((length(x) == 3)
     && (identical(x[[1]], as.symbol("<-")))
     && (length(x[[2]]) > 1)
     && is.symbol(x[[3]]))
}

### * .packageEnv

.packageEnv <-
function(packageName)
    as.environment(paste("package", packageName, sep = ":"))

### * .parseTextAsMuchAsPossible

.parseTextAsMuchAsPossible <-
function(txt)
{
    exprs <- try(parse(text = txt), silent = TRUE)
    if(!inherits(exprs, "try-error")) return(exprs)
    exprs <- expression()
    lines <- unlist(strsplit(txt, "\n"))
    badLines <- character()
    while((n <- length(lines)) > 0) {
        i <- 1; txt <- lines[1]
        while(inherits(yy <- try(parse(text = txt), silent = TRUE),
                       "try-error")
              && (i < n)) {
            i <- i + 1; txt <- paste(txt, lines[i], collapse = "\n")
        }
        if(inherits(yy, "try-error")) {
            badLines <- c(badLines, lines[1])
            lines <- lines[-1]
        }
        else {
            exprs <- c(exprs, yy)
            lines <- lines[-seq(length = i)]
        }
    }
    attr(exprs, "badLines") <- badLines
    exprs
}

### * .parse_usage_as_much_as_possible

.parse_usage_as_much_as_possible <-
function(txt)
{
    txt <- gsub("\\\\l?dots", "...", txt)
    txt <- gsub("\\\\%", "%", txt)
    txt <- gsub(.S3_method_markup_regexp, "\"\\\\\\1\"", txt)
    txt <- gsub(.S4_method_markup_regexp, "\"\\\\\\1\"", txt)
    .parseTextAsMuchAsPossible(txt)
}

### * .prettyPrint

.prettyPrint <-
function(x)
{
    writeLines(strwrap(paste(x, collapse = " "),
                       indent = 2, exdent = 2))
}

### * .transformS3methodMarkup

.transformS3methodMarkup <-
function(x)
{
    ## Note how we deal with S3 replacement methods found.
    ## These come out named "\method{GENERIC}{CLASS}<-" which we
    ## need to turn into 'GENERIC<-.CLASS'.
    sub("\\\\(S3)?method{([.[:alnum:]]*)}{([.[:alnum:]]*)}(<-)?",
        "\\2\\4.\\3",
        x)
}

### * .S3_method_markup_regexp

.S3_method_markup_regexp <-
    "(\\\\(S3)?method{([.[:alnum:]]*)}{([.[:alnum:]]*)})"
    
### * .S4_method_markup_regexp

.S4_method_markup_regexp <-
    "(\\\\S4method{([.[:alnum:]]*)}{([.[:alnum:],]*)})"


### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
### * Rdpp

Rdpp <-
function(lines)
{
    ## Preprocess lines with Rd markup according to .Platform$OS.type.

    if(!is.character(lines))
        stop(paste("argument", sQuote(lines),
                   "must be a character vector"))

    ## Strip Rd comments first.
    lines <- .stripRdComments(lines)

    ppLineIndices <- grep("^#(endif|ifn?def[[:space:]]+[[:alnum:]]+)",
                          lines)
    ## <NOTE>
    ## This is based on the Perl code in R::Rdtools::Rdpp().
    ## What should we do with #ifn?def lines not matching the above?
    ## </NOTE>
    nOfPpLines <- length(ppLineIndices)
    if(nOfPpLines == 0) return(lines)

    OS <- .Platform$OS.type
    ppLines <- lines[ppLineIndices]

    ## Record the preprocessor line type: starts of conditionals with
    ## TRUE/FALSE according to whether they increase the skip level or
    ## not, and NA for ends of conditionals.
    ppTypes <- rep(NA, nOfPpLines)
    if(any(i <- grep("^#ifdef", ppLines))) {
        ppTypes[i] <- gsub("^#ifdef[[:space:]]+([[:alnum:]]+).*",
                           "\\1", ppLines[i]) != OS
    }
    if(any(i <- grep("^#ifndef", ppLines))) {
        ppTypes[i] <- gsub("^#ifndef[[:space:]]+([[:alnum:]]+).*",
                           "\\1", ppLines[i]) == OS
    }

    ## Looks stupid, but ... we need a loop to determine the skip list
    ## to deal with nested conditionals.
    skipList <- integer(0)
    skipLevel <- 0
    skipIndices <- ppLineIndices
    for(i in seq(along = ppTypes)) {
        if(!is.na(skip <- ppTypes[i])) {
            if(skipLevel == 0 && skip > 0) {
                skipStart <- ppLineIndices[i]
                skipLevel <- 1
            }
            else
                skipLevel <- skipLevel + skip
            skipList <- c(skip, skipList) # push
        }
        else {
            if(skipLevel == 1 && skipList[1] > 0) {
                skipIndices <- c(skipIndices,
                                 seq(from = skipStart,
                                     to = ppLineIndices[i]))
                skipLevel <- 0
            }
            else
                skipLevel <- skipLevel - skipList[1]
            skipList <- skipList[-1]    # pop
        }
    }

    lines[-skipIndices]
}

### * .stripRdComments

.stripRdComments <-
function(lines)
{
    gsub("(^|[^\\])((\\\\\\\\)*)%.*", "\\1\\2", lines)
}

### * Rdinfo

Rdinfo <-
function(file)
{
    ## <NOTE>
    ## This is based on the Perl code in R::Rd::info().
    ## It seems that matches for aliases and keywords are only single
    ## line.  Hence, as we get the lines from @code{Rdpp()}, we get
    ## aliases and keywords directly from them before collapsing them to
    ## one string (which also allows us to avoid looping as in the Perl
    ## code).
    ## </NOTE>

    if(is.character(file)) {
        file <- file(file)
        on.exit(close(file))
    }
    if(!inherits(file, "connection"))
        stop(paste("argument", sQuote(file),
                   "must be a character string or connection"))

    lines <- Rdpp(readLines(file))

    aliases <- .getRdMetaDataFromRdLines(lines, "alias")
    concepts <- .getRdMetaDataFromRdLines(lines, "concept")
    keywords <- .getRdMetaDataFromRdLines(lines, "keyword")

    ## Could be none or more than one ... argh.
    RdType <- c(.getRdMetaDataFromRdLines(lines, "docType"), "")[1]

    txt <- paste(lines, collapse = "\n")

    RdName <- .getRdName(txt)
    if(!length(RdName))
        stop(paste("missing/empty \\name field in ",
                   sQuote(summary(file)$description), "\n",
                   "Rd files must have a non-empty \\name.\n",
                   "See chapter ", sQuote("Writing R documentation"),
                   " in manual ", sQuote("Writing R Extensions"),
                   ".", sep = ""))

    RdTitle <- .getRdTitle(txt)
    if(!length(RdTitle))
        stop(paste("missing/empty \\title field in ",
                   sQuote(summary(file)$description), "\n",
                   "Rd files must have a non-empty \\title.\n",
                   "See chapter ", sQuote("Writing R documentation"),
                   " in manual ", sQuote("Writing R Extensions"),
                   ".", sep = ""))

    list(name = RdName, type = RdType, title = RdTitle,
         aliases = aliases, concepts = concepts, keywords = keywords)
}

### * Rdcontents

Rdcontents <-
function(RdFiles)
{
    ## Compute contents db from Rd files.

    RdFiles <- path.expand(RdFiles[fileTest("-f", RdFiles)])

    if(length(RdFiles) == 0)
        return(data.frame(File = I(character(0)),
                          Name = I(character(0)),
                          Type = I(character(0)),
                          Title = I(character(0)),
                          Aliases = I(list()),
                          Concepts = I(list()),
                          Keywords = I(list())))

    contents <- vector("list", length(RdFiles) * 6)
    dim(contents) <- c(length(RdFiles), 6)
    for(i in seq(along = RdFiles)) {
        contents[i, ] <- Rdinfo(RdFiles[i])
    }
    colnames(contents) <-
        c("Name", "Type", "Title", "Aliases", "Concepts", "Keywords")

    ## Although R-exts says about the Rd title slot that
    ## <QUOTE>
    ##   This should be capitalized, not end in a period, and not use
    ##   any markup (which would cause problems for hypertext search).
    ## </QUOTE>
    ## some Rd files have LaTeX-style markup, including
    ## * LaTeX-style single and double quotation
    ## * Medium and punctuation dashes
    ## * Escaped ampersand.
    ## Hence we try getting rid of these ...
    title <- unlist(contents[ , "Title"])
    title <- gsub("\(``\|''\)", "\"", title)
    title <- gsub("`", "'", title)
    title <- gsub("\([[:alnum:]]\)--\([[:alnum:]]\)", "\\1-\\2", title)
    title <- gsub("\\\\\&", "&", title)
    title <- gsub("---", "--", title)
    ## Also remove leading and trailing whitespace.
    title <- sub("^[[:space:]]*", "", title)
    title <- sub("[[:space:]]*$", "", title)

    data.frame(File = I(basename(RdFiles)),
               Name = I(unlist(contents[ , "Name"])),
               Type = I(unlist(contents[ , "Type"])),
               Title = I(title),
               Aliases = I(contents[ , "Aliases"]),
               Concepts = I(contents[ , "Concepts"]),
               Keywords = I(contents[ , "Keywords"]),
               row.names = NULL)  # avoid trying to compute row names
}

### * .writeContentsRDS

.writeContentsRDS <-
function(contents, outFile)
{
    ## Save Rd contents db to @file{outFile}.

    ## <NOTE>
    ## To deal with possible changes in the format of the contents db
    ## in the future, use a version attribute and/or a formal class.
    .saveRDS(contents, file = outFile)
    ## </NOTE>
}

### * .writeContentsDCF

.writeContentsDCF <-
function(contents, packageName, outFile)
{
    ## Write a @file{CONTENTS} DCF file from an Rd contents db.
    ## Note that these files currently have @samp{URL:} entries which
    ## contain the package name, whereas @code{Rdcontents()} works on
    ## collections of Rd files which do not necessarily all come from
    ## the same package ...

    ## If the contents is 'empty', return immediately.  (Otherwise,
    ## e.g. URLs would not be right ...)
    if(!NROW(contents)) return()

    ## <FIXME>
    ## This has 'html' hard-wired.
    ## Note that slashes etc. should be fine for URLs.
    URLs <- paste("../../../library/", packageName, "/html/",
                  filePathSansExt(contents[ , "File"]),
                  ".html",
                  sep = "")
    ## </FIXME>

    if(is.data.frame(contents))
        contents <-
            cbind(contents$Name,
                  sapply(contents$Aliases, paste, collapse = " "),
                  sapply(contents$Keywords, paste, collapse = " "),
                  contents$Title)
    else
        contents <-
            contents[, c("Name", "Aliases", "Keywords", "Title"),
                     drop = FALSE]

    cat(paste(c("Entry:", "Aliases:", "Keywords:", "Description:",
                "URL:"),
              t(cbind(contents, URLs))),
        sep = c("\n", "\n", "\n", "\n", "\n\n"),
        file = outFile)
}

### * .buildRdIndex

.buildRdIndex <-
function(contents, type = NULL)
{
    ## Build an Rd 'index' containing Rd "names" (see below) and titles,
    ## maybe subscripted according to the Rd type (\docType).

    keywords <- contents[ , "Keywords"]

    if(!is.null(type)) {
        idx <- contents[ , "Type"] %in% type
        ## Argh.  Ideally we only want to subscript according to
        ## \docType.  Maybe for 2.0 ...
        if(type == "data")
            idx <- idx | keywords == "datasets"
        ## (Note: we really only want the Rd objects which have
        ## 'datasets' as their *only* keyword.)
        contents <- contents[idx, , drop = FALSE]
        keywords <- keywords[idx]
    }

    ## Drop all Rd objects marked as 'internal' from the index.
    idx <- is.na(sapply(keywords, function(x) match("internal", x)))

    index <- contents[idx, c("Name", "Title"), drop = FALSE]
    if(nrow(index)) {
        ## If a \name is not a valid \alias, replace it by the first alias.
        aliases <- contents[idx, "Aliases"]
        bad <- which(!mapply("%in%", index[, 1], aliases))
        if(any(bad)) {
            tmp <- sapply(aliases[bad], "[[", 1)
            tmp[is.na(tmp)] <- ""
            index[bad, 1] <- tmp
        }
        ## and sort it by name
        index <- index[sort.list(index[,1]), ]
    }
    index
}

### * Rdindex

Rdindex <-
function(RdFiles, outFile = "", type = NULL,
         width = 0.9 * getOption("width"), indent = NULL)
{
    ## Create @file{INDEX} or @file{data/00Index} style files from Rd
    ## files.
    ##
    ## R version of defunct @code{R CMD Rdindex} (now removed).

    if((length(RdFiles) == 1) && fileTest("-d", RdFiles)) {
        ## Compatibility code for the former @code{R CMD Rdindex}
        ## interface.
        docsDir <- RdFiles
        if(fileTest("-d", file.path(docsDir, "man")))
            docsDir <- file.path(docsDir, "man")
        RdFiles <- listFilesWithType(docsDir, "docs")
    }

    if(outFile == "")
        outFile <- stdout()
    else if(is.character(outFile)) {
        outFile <- file(outFile, "w")
        on.exit(close(outFile))
    }
    if(!inherits(outFile, "connection"))
        stop(paste("argument", sQuote("outFile"),
                   "must be a character string or connection"))

    index <- .buildRdIndex(Rdcontents(RdFiles), type = type)

    writeLines(formatDL(index, width = width, indent = indent),
               outFile)
}

### * Rd2contents

Rd2contents <-
function(dir, outFile = "")
{
    ## <NOTE>
    ## Based on the defunct Perl code in R_HOME/share/Rd2contents.pl
    ## (now removed).
    ## </NOTE>

    if(!fileTest("-d", dir))
        stop(paste("directory", sQuote(dir), "does not exist"))
    else {
        dir <- filePathAsAbsolute(dir)
        packageName <- basename(dir)
    }
    docsDir <- file.path(dir, "man")
    if(!fileTest("-d", docsDir))
        stop(paste("directory", sQuote(dir),
                   "does not contain Rd sources"))

    if(outFile == "")
        outFile <- stdout()
    else if(is.character(outFile)) {
        outFile <- file(outFile, "w")
        on.exit(close(outFile))
    }
    if(!inherits(outFile, "connection"))
        stop(paste("argument", sQuote("outFile"),
                   "must be a character string or connection"))

    contents <- Rdcontents(listFilesWithType(docsDir, "docs"))

    .writeContentsDCF(contents, packageName, outFile)
}

### * Rddb

Rddb <-
function(package, dir, lib.loc = NULL)
{
    ## Build an Rd 'data base' from an installed package or the unpacked
    ## package sources as a list containing the 'raw' R documentation
    ## objects obtained via readLines().

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop(paste("argument", sQuote("package"),
                       "must be of length 1"))
        dir <- .find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        docsDir <- file.path(dir, "man")
        if(!fileTest("-d", docsDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain Rd objects"))
        docsFiles <- listFilesWithType(docsDir, "docs")
        db <- list()
        for(f in docsFiles) {
            lines <- readLines(f)
            eofPos <- grep("\\eof$", lines)
            db <- c(db, split(lines,
                              rep(seq(along = eofPos),
                                  times = diff(c(0, eofPos)))))
        }
    }
    else {
        if(missing(dir))
            stop(paste("you must specify", sQuote("package"),
                       "or", sQuote("dir")))
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            dir <- filePathAsAbsolute(dir)
        docsDir <- file.path(dir, "man")
        if(!fileTest("-d", docsDir))
            stop(paste("directory", sQuote(dir),
                       "does not contain Rd sources"))
        docsFiles <- listFilesWithType(docsDir, "docs")
        db <- lapply(docsFiles, readLines)

    }

    db

}

### * getRdSection

getRdSection <-
function(txt, type, predefined = TRUE)
{
    ## Extract Rd section(s) 'type' from (preprocessed) Rd markup in the
    ## character string 'txt'.  Use 'predefined = FALSE' for dealing
    ## with user-defined sections.

    ## <NOTE>
    ## This is *not* vectorized.  As we try extracting *all* top-level
    ## sections of the given type, computations on a single character
    ## string can result in a character vector of arbitray length.
    ## Hence, a vectorized version would return its results similar to
    ## e.g. strsplit(), i.e., a list of character vectors.  Worth the
    ## effort?
    ## </FIXME>

    out <- character()
    if(length(txt) != 1)
        stop("'txt' must be a character string")
    pattern <- paste("(^|\n)[[:space:]]*\\\\",
                     ifelse(predefined, type,
                            paste("section{", type, "}",
                                  sep = "")),
                     "{",
                     sep = "")
    while((pos <- regexpr(pattern, txt)) != -1) {
        txt <- substring(txt, pos + attr(pos, "match.length") - 1)
        pos <- delimMatch(txt)
        if(pos == -1) {
            if((type == "alias") && predefined) {
                ## \alias entries seem to be special (Paren.Rd).
                ## The regexp below feels wrong, but is based on what is
                ## used in Perl's R::Rdlists::build_index(), sort of.
                pos <- regexpr("{([^\n]*)}(\n|$)", txt)
            }
            if(pos == -1)
                stop(paste("unterminated section", sQuote(type)))
            else {
                out <- c(out, sub("{([^\n]*)}(\n|$).*", "\\1", txt))
                txt <- substring(txt, pos + attr(pos, "match.length"))
                next
            }

        }
        out <- c(out,
                 substring(txt,
                           pos + 1,
                           pos + attr(pos, "match.length") - 2))
        txt <- substring(txt, pos + attr(pos, "match.length"))
    }
    out
}

### * getRdItems

getRdItems <-
function(txt)
{
    ## Extract names of Rd \item{}{} markup in the character string
    ## 'txt'.
    out <- character()
    if(length(txt) != 1)
        stop("'txt' must be a character string")
    pattern <- "(^|\n)[[:space:]]*\\\\item{"
    while((pos <- regexpr(pattern, txt)) != -1) {
        txt <- substring(txt, pos + attr(pos, "match.length") - 1)
        if((pos <- delimMatch(txt)) == -1)
            stop(paste("unmatched \\item name in",
                       sQuote(paste("\\item{",
                                    sub("\n.*$", "", txt),
                                    sep = ""))),
                 call. = FALSE)
        out <- c(out,
                 substring(txt,
                           pos + 1,
                           pos + attr(pos, "match.length") - 2))
        txt <- substring(txt, pos + attr(pos, "match.length"))
        ## The next character should really be a '{'.  Let's be nice
        ## and tolerate whitespace in between ...
        if((pos <- regexpr("^[[:space:]]*{", txt)) == -1)
            stop(paste("no \\item description for item",
                       sQuote(out[length(out)])),
                 call. = FALSE)
        txt <- substring(txt, pos + attr(pos, "match.length") - 1)
        if((pos <- delimMatch(txt)) == -1)
            stop(paste("unmatched \\item description for item",
                       sQuote(out[length(out)])),
                 call. = FALSE)
        txt <- substring(txt, pos + attr(pos, "match.length"))
    }
    out
}

### * .getRdMetaDataFromRdLines

.getRdMetaDataFromRdLines <-
function(lines, kind) {
    pattern <- paste("^[[:space:]]*\\\\", kind,
                     "{[[:space:]]*(.*)[[:space:]]*}.*", sep = "")
    lines <- grep(pattern, lines, value = TRUE)
    lines <- sub(pattern, "\\1", lines)
    lines <- gsub("\\\\%", "%", lines)
    lines
}

### * .getRdArgumentNames

.getRdArgumentNames <-
function(txt)
{
    txt <- getRdSection(txt, "arguments")
    txt <- unlist(sapply(txt, getRdItems))
    if(!length(txt)) return(character())
    txt <- unlist(strsplit(txt, ", *"))
    txt <- gsub("\\\\l?dots", "...", txt)
    txt <- sub("^[[:space:]]*", "", txt)
    txt <- sub("[[:space:]]*$", "", txt)
    txt
}

### * .getRdName

.getRdName <-
function(txt)
{
    start <- regexpr("\\\\name{[[:space:]]*([^\}]+)[[:space:]]*}", txt)
    if(start == -1) return(character())
    RdName <- gsub("[[:space:]]*", " ",
                   substr(txt,
                          start + 6,
                          start + attr(start, "match.length") - 2))
    RdName
}

### * .getRdTitle

.getRdTitle <-
function(txt)
{
    start <- regexpr("\\\\title{[[:space:]]*([^\}]+)[[:space:]]*}", txt)
    if(start == -1) return(character())
    RdTitle <- gsub("[[:space:]]*", " ",
                    substr(txt,
                           start + 7,
                           start + attr(start, "match.length") - 2))
    RdTitle
}

### * .getRdExampleCode

.getRdExampleCode <-
function(txt)
{
    txt <- getRdSection(txt, "examples")
    if(length(txt) != 1) return(character())
    txt <- gsub("\\\\l?dots", "...", txt)
    txt <- gsub("\\\\%", "%", txt)

    ## Now try removing \dontrun{}.
    ## Simple version of R::Rdconv::undefine_command().
    out <- character()
    pattern <- "\\\\dontrun\\{"
    while((pos <- regexpr(pattern, txt)) != -1) {
        out <- c(out, substring(txt, 1, pos - 1))
        txt <- substring(txt, pos + attr(pos, "match.length") - 1)
        if((pos <- delimMatch(txt)) == -1)
            stop("unclosed \\dontrun")
        txt <- substring(txt, pos + attr(pos, "match.length"))
    }
    txt <- paste(c(out, txt), collapse = "")
    ## Now try removing \dontshow{} and \testonly{}.
    ## Simple version of R::Rdconv::replace_command().
    out <- character()
    pattern <- "\\\\(testonly|dontshow)\\{"
    while((pos <- regexpr(pattern, txt)) != -1) {
        out <- c(out, substring(txt, 1, pos - 1))
        txt <- substring(txt, pos + attr(pos, "match.length") - 1)
        if((pos <- delimMatch(txt)) == -1)
            stop("unclosed \\dontshow or \\testonly")
        out <- c(out,
                 substring(txt, 2, pos + attr(pos, "match.length") - 2))
        txt <- substring(txt, pos + attr(pos, "match.length"))
    }
    paste(c(out, txt), collapse = "")
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
### * checkVignettes
###
### Run a tangle+source and a weave on all vignettes of a package.

checkVignettes <-
function(package, dir, lib.loc = NULL,
         tangle=TRUE, weave=TRUE,
         workdir=c("tmp", "src", "cur"),
         keepfiles = FALSE)
{
    vigns <- pkgVignettes(package=package, dir=dir, lib.loc=lib.loc)
    if(is.null(vigns)) return(NULL)

    workdir <- match.arg(workdir)
    wd <- getwd()
    if(workdir=="tmp"){
        tmpd <- tempfile("Sweave")
        if (!dir.create(tmpd)) stop("Unable to create temp directory ",tmpd)
        setwd(tmpd)
    }
    else{
        keepfiles <- TRUE
        if(workdir=="src") setwd(vigns$dir)
    }

    outConn <- textConnection("out", "w")
    sink(outConn, type = "output")
    sink(outConn, type = "message")

    on.exit({sink(type = "output")
             sink(type = "message")
             setwd(wd)
             if(!keepfiles) unlink(tmpd, recursive=TRUE)
         })

    result <- list(tangle=list(), weave=list(), source=list())

    for(f in vigns$docs){
        if(tangle){
            yy <- try(Stangle(f, quiet=TRUE))
            if(inherits(yy, "try-error"))
                result$tangle[[f]] <- yy
        }

        if(weave){
            yy <- try(Sweave(f, quiet=TRUE))
            if(inherits(yy, "try-error"))
                result$weave[[f]] <- yy
        }
    }

    if(tangle){
        rfiles <- listFilesWithExts(getwd(), c("r", "s", "R", "S"))
        for(f in rfiles){
            yy <- try(source(f))
            if(inherits(yy, "try-error"))
                result$source[[f]] <- yy
        }
    }

    class(result) <- "checkVignettes"
    result
}

print.checkVignettes <-
function(x, ...)
{
    mycat <- function(y, title){
        if(length(y)>0){
            cat("\n", title, "\n\n", sep="")
            for(k in 1:length(y)){
                cat("File", names(y)[k], ":\n")
                cat(as.character(y[[k]]), "\n")
            }
        }
    }

    mycat(x$weave,  "*** Weave Errors ***")
    mycat(x$tangle, "*** Tangle Errors ***")
    mycat(x$source, "*** Source Errors ***")

    invisible(x)
}

### * pkgVignettes
###
### Get an object of class pkgVignettes which contains a list of Sweave
### files and the name of the directory which contains them.

pkgVignettes <- function(package, dir, lib.loc = NULL)
{
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1)
            stop("argument 'package' must be of length 1")
        docdir <- file.path(.find.package(package, lib.loc), "doc")
        ## Using package installed in @code{dir} ...
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        ## Using sources from directory @code{dir} ...
        if(!fileTest("-d", dir))
            stop(paste("directory", sQuote(dir), "does not exist"))
        else
            ## maybe perform tilde expansion on @code{dir}
            docdir <- file.path(dirname(dir), basename(dir), "inst", "doc")
    }

    if(!fileTest("-d", docdir)) return(NULL)

    docs <- listFilesWithType(docdir, "vignette")

    z <- list(docs=docs, dir=docdir)
    class(z) <- "pkgVignettes"
    z
}

### * buildVignettes
###
### Run a weave and pdflatex on all vignettes of a package and try to
### remove all temporary files that were created.

buildVignettes <-function(package, dir, lib.loc = NULL, quiet=TRUE)
{
    vigns <- pkgVignettes(package=package, dir=dir, lib.loc=lib.loc)
    if(is.null(vigns)) return(NULL)

    wd <- getwd()
    setwd(vigns$dir)

    on.exit(setwd(wd))

    origfiles <- list.files()
    have.makefile <- "makefile" %in% tolower(origfiles)

    pdfs <- character(0)
    for(f in vigns$docs){

        f <- basename(f)
        bf <- sub("\\..[^\\.]*$", "", f)
        bft <- paste(bf, ".tex", sep="")
        pdfs <- c(pdfs, paste(bf, ".pdf", sep=""))

        yy <- try(Sweave(f, quiet=quiet))
        if(inherits(yy, "try-error")) stop(yy)
        if(!have.makefile){
            texi2dvi(file=bft, pdf=TRUE, clean=FALSE, quiet=quiet)
        }
    }

    if(have.makefile) {
        yy <- system(Sys.getenv("MAKE"))
        if(yy>0) stop("running make failed")
    }
    else {
        f <- list.files()
        f <- f[!(f %in% c(pdfs, origfiles))]
        file.remove(f)
    }
    invisible(NULL)
}

### * .buildVignetteIndex


vignetteMetaRE <- function(tag)
    paste("[[:space:]]*%+[[:space:]]*\\\\Vignette", tag,
          "\{([^}]*)\}", sep = "")

vignetteInfo <- function(file) {
    lines <- readLines(file)
    ## \VignetteIndexEntry
    vignetteIndexEntryRE <- vignetteMetaRE("IndexEntry")
    title <- grep(vignetteIndexEntryRE, lines, value = TRUE)
    title <- c(gsub(vignetteIndexEntryRE, "\\1", title), "")[1]
    ## \VignetteDepends
    vignetteDependsRE <- vignetteMetaRE("Depends")
    depends <- grep(vignetteDependsRE, lines, value = TRUE)
    depends <- gsub(vignetteDependsRE, "\\1", depends)
    if(length(depends) > 0)
        depends <- unlist(strsplit(depends[1], ", *"))
    ## \VignetteKeyword and old-style \VignetteKeywords
    vignetteKeywordsRE <- vignetteMetaRE("Keywords")
    keywords <- grep(vignetteKeywordsRE, lines, value = TRUE)
    keywords <- gsub(vignetteKeywordsRE, "\\1", keywords)
    keywords <- if(length(keywords) == 0) {
        ## No old-style \VignetteKeywords entries found.
        vignetteKeywordRE <- vignetteMetaRE("Keyword")
        keywords <- grep(vignetteKeywordRE, lines, value = TRUE)
        gsub(vignetteKeywordRE, "\\1", keywords)
    }
    else
        unlist(strsplit(keywords[1], ", *"))
    list(file = file, title = title, depends = depends,
         keywords = keywords)
}

.buildVignetteIndex <-
function(vignetteDir)
{
    if(!fileTest("-d", vignetteDir))
        stop(paste("directory", sQuote(vignetteDir), "does not exist"))
    vignetteFiles <-
        path.expand(listFilesWithType(vignetteDir, "vignette"))

    if(length(vignetteFiles) == 0)
        return(data.frame(File = I(character(0)),
                          Title = I(character(0)),
                          Depends = I(list()),
                          Keywords = I(list()),
                          PDF = I(character())))

    contents <- vector("list", length = length(vignetteFiles) * 4)
    dim(contents) <- c(length(vignetteFiles), 4)
    for(i in seq(along = vignetteFiles))
        contents[i, ] <- vignetteInfo(vignetteFiles[i])
    colnames(contents) <- c("File", "Title", "Depends", "Keywords")

    ## (Note that paste(character(0), ".pdf") does not do what we want.)
    vignettePDFs <- sub("$", ".pdf", filePathSansExt(vignetteFiles))

    vignetteTitles <- unlist(contents[, "Title"])

    ## Compatibility code for transition from old-style to new-style
    ## indexing.  If we have @file{00Index.dcf}, use it when computing
    ## the vignette index, but let the index entries in the vignettes
    ## override the ones from the index file.
    if(fileTest("-f",
                 INDEX <- file.path(vignetteDir, "00Index.dcf"))) {
        vignetteEntries <- try(read.dcf(INDEX))
        if(inherits(vignetteEntries, "try-error"))
            warning(paste("cannot read index information in file",
                          sQuote(INDEX)))
        else
            vignetteEntries <-
                cbind(colnames(vignetteEntries), c(vignetteEntries))
        pos <- match(basename(vignettePDFs), vignetteEntries[ , 1], 0)
        idx <- which(vignetteTitles == "")
        vignetteTitles[which(pos != 0) & idx] <-
            vignetteEntries[pos, 2][idx]
    }

    vignettePDFs[!fileTest("-f", vignettePDFs)] <- ""
    vignettePDFs <- basename(vignettePDFs)

    data.frame(File = I(unlist(contents[, "File"])),
               Title = I(vignetteTitles),
               Depends = I(contents[, "Depends"]),
               Keywords = I(contents[, "Keywords"]),
               PDF = I(vignettePDFs),
               row.names = NULL) # avoid trying to compute row names
}

### * .checkVignetteIndex

.checkVignetteIndex <-
function(vignetteDir)
{
    if(!fileTest("-d", vignetteDir))
        stop(paste("directory", sQuote(vignetteDir), "does not exist"))
    vignetteIndex <- .buildVignetteIndex(vignetteDir)
    badEntries <-
        vignetteIndex[grep("^[[:space:]]*$", vignetteIndex[, "Title"]),
                      "File"]
    class(badEntries) <- "checkVignetteIndex"
    badEntries
}

print.checkVignetteIndex <-
function(x, ...)
{
    if(length(x) > 0) {
        writeLines(paste("Vignettes with missing or empty",
                         "\\VignetteIndexEntry:"))
        print(basename(filePathSansExt(unclass(x))), ...)
    }
    invisible(x)
}


### * .writeVignetteHTMLIndex

.writeVignetteHtmlIndex <- function(pkg, con, vignetteIndex=NULL)
{
    html <- c(paste("<html><head><title>R:", pkg, "vignettes</title>"),
              "<link rel=\"stylesheet\" type=\"text/css\" href=\"../../R.css\">",
              "</head><body>",
              paste("<h2>Vignettes of package", pkg,"</h2>"))

    if(is.null(vignetteIndex) || nrow(vignetteIndex)==0){
        html <- c(html, "Sorry, the package contains no vignette meta-information or index.",
                  "Please browse the <a href=\".\">directory</a>.")
    }
    else{
        html <- c(html, "<dl>")
        for(k in seq(1, nrow(vignetteIndex))){
            html <- c(html,
                      paste("<dt><a href=\"", vignetteIndex[k, "PDF"], "\">",
                            vignetteIndex[k, "PDF"], "</a>:", sep=""),
                      paste("<dd>", vignetteIndex[k, "Title"]))
        }
        html <- c(html, "</dl>")
    }
    html <- c(html, "</body></html>")
    writeLines(html, con=con)
}

vignetteDepends <- function(vignette, recursive=TRUE, reduce=TRUE,
                            local=TRUE, lib.loc=NULL) {
    if (length(vignette) != 1)
        stop("Argument 'vignette' must be of length 1")
    if (!file.exists(vignette))
        stop("File: ", vignette, " not found.")

    vigDeps <- vignetteInfo(vignette)$depends

    depMtrx <- getVigDepMtrx(vigDeps)
    instPkgs <- utils::installed.packages(lib.loc=lib.loc)
    getDepList(depMtrx, instPkgs, recursive, local, reduce,
               lib.loc)
}

getVigDepMtrx <- function(vigDeps) {
    ## Taken almost directly out of 'package.dependencies'
    if (length(vigDeps) > 0) {
        z <- unlist(strsplit(vigDeps, ",", fixed=TRUE))
        z <- sub("^[[:space:]]*(.*)", "\\1", z)
        z <- sub("(.*)[[:space:]]*$", "\\1", z)
        pat <- "^([^\\([:space:]]+)[[:space:]]*\\(([^\\)]+)\\).*"
        depMtrx <- cbind(sub(pat, "\\1", z), sub(pat, "\\2",
                                                 z), NA)
        noversion <- depMtrx[, 1] == depMtrx[, 2]
        depMtrx[noversion, 2] <- NA
        pat <- "[[:space:]]*([[<>=]+)[[:space:]]+(.*)"
        depMtrx[!noversion, 2:3] <- c(sub(pat, "\\1", depMtrx[!noversion,
                                                              2]), sub(pat, "\\2", depMtrx[!noversion, 2]))
        depMtrx
    }
    else
        NA
}




### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
### * .installPackageDescription

.installPackageDescription <-
function(dir, outDir)
{
    ## Function for taking the DESCRIPTION package meta-information,
    ## at least partially checking it, and installing it with the
    ## 'Built:' fields added.  Note that from 1.7.0 on, packages without
    ## compiled code are not marked as being from any platform.
    dfile <- file.path(dir, "DESCRIPTION")
    if(!fileTest("-f", dfile))
        stop(paste("file", sQuote(dfile), "does not exist"))
    db <- try(read.dcf(dfile)[1, ])
    if(inherits(db, "try-error"))
        stop(paste("file", sQuote(dfile), "is not in valid DCF format"))
    ## Check for fields needed for what follows.
    ## <FIXME>
    ## In fact, more fields are 'required' as per R CMD check.
    ## Eventually we should have the same tests here.
    ## Maybe have .checkDescription() for this?
    ## Should also include the above, of course.
    requiredFields <- c("Package", "Title", "Description")
    if(any(i <- which(is.na(match(requiredFields, names(db)))))) {
        stop(paste("required fields missing from DESCRIPTION:",
                   paste(requiredFields[i], collapse = " ")))
    }
    ## </FIXME>
    OS <- Sys.getenv("R_OSTYPE")
    OStype <- if(nchar(OS) && OS == "windows")
        "i386-pc-mingw32"
    else
        R.version$platform
    writeLines(c(formatDL(names(db), db, style = "list"),
                 paste("Built: R ",
                       paste(R.version[c("major", "minor")],
                             collapse = "."),
                       "; ",
                       if(fileTest("-d", file.path(dir, "src"))) OStype
                       else "",
                       "; ",
                       ## Prefer date in ISO 8601 format.
                       ## Could also use
                       ##   format(Sys.time(), "%a %b %d %X %Y")
                       Sys.time(),
                       "; ",
                       .OStype(),
                       sep = "")),
               file.path(outDir, "DESCRIPTION"))
    invisible()
}

### * .installPackageCodeFiles

.installPackageCodeFiles <-
function(dir, outDir)
{
    if(!fileTest("-d", dir))
        stop(paste("directory", sQuote(dir), "does not exist"))
    dir <- filePathAsAbsolute(dir)

    ## Attempt to set the LC_COLLATE locale to 'C' to turn off locale
    ## specific sorting.
    curLocale <- Sys.getlocale("LC_COLLATE")
    on.exit(Sys.setlocale("LC_COLLATE", curLocale), add = TRUE)
    ## (Guaranteed to work as per the Sys,setlocale() docs.)
    lccollate <- "C"
    if(Sys.setlocale("LC_COLLATE", lccollate) != lccollate) {
        ## <NOTE>
        ## I don't think we can give an error here.
        ## It may be the case that Sys.setlocale() fails because the "OS
        ## reports request cannot be honored" (src/main/platform.c), in
        ## which case we should still proceed ...
        warning("cannot turn off locale-specific sorting via LC_COLLATE")
        ## </NOTE>
    }

    ## We definitely need a valid DESCRIPTION file.
    db <- try(read.dcf(file.path(dir, "DESCRIPTION"))[1, ],
              silent = TRUE)
    if(inherits(db, "try-error"))
        stop(paste("package directory", sQuote(dir),
                   "has no valid DESCRIPTION file"))
    codeDir <- file.path(dir, "R")
    if(!fileTest("-d", codeDir)) return(invisible())

    codeFiles <- listFilesWithType(codeDir, "code", full.names = FALSE)

    collationField <-
        c(paste("Collate", .OStype(), sep = "."), "Collate")
    if(any(i <- collationField %in% names(db))) {
        ## We have a Collate specification in the DESCRIPTION file:
        ## currently, file paths relative to codeDir, separated by
        ## white space, possibly quoted.  Note that we could have
        ## newlines in DCF entries but do not allow them in file names,
        ## hence we gsub() them out.
        collationField <- collationField[i][1]
        codeFilesInCspec <-
            scan(textConnection(gsub("\n", " ", db[collationField])),
                 what = character(), strip.white = TRUE, quiet = TRUE)
        ## Duplicated entries in the collaction spec?
        badFiles <-
            unique(codeFilesInCspec[duplicated(codeFilesInCspec)])
        if(length(badFiles)) {
            out <- paste("\nduplicated files in",
                         sQuote(collationField),
                         "field:")
            out <- paste(out,
                         paste(" ", badFiles, collapse = "\n"),
                         sep = "\n")
            stop(out)
        }
        ## See which files are listed in the collation spec but don't
        ## exist.
        badFiles <- codeFilesInCspec[! codeFilesInCspec %in% codeFiles]
        if(length(badFiles)) {
            out <- paste("\nfiles in ", sQuote(collationField),
                         " field missing from ", sQuote(codeDir),
                         ":",
                         sep = "")
            out <- paste(out,
                         paste(" ", badFiles, collapse = "\n"),
                         sep = "\n")
            stop(out)
        }
        ## See which files exist but are missing from the collation
        ## spec.  Note that we do not want the collation spec to use
        ## only a subset of the available code files.
        badFiles <- codeFiles[! codeFiles %in% codeFilesInCspec]
        if(length(badFiles)) {
            out <- paste("\nfiles in", sQuote(codeDir),
                         "missing from", sQuote(collationField),
                         "field:")
            out <- paste(out,
                         paste(" ", badFiles, collapse = "\n"),
                         sep = "\n")
            stop(out)
        }
        ## Everything's groovy ...
        codeFiles <- codeFilesInCspec
    }

    codeFiles <- file.path(codeDir, codeFiles)

    if(!fileTest("-d", outDir) && !dir.create(outDir))
        stop("cannot open directory", sQuote(outDir))
    outCodeDir <- file.path(outDir, "R")
    if(!fileTest("-d", outCodeDir) && !dir.create(outCodeDir))
        stop("cannot open directory", sQuote(outCodeDir))
    outFile <- file.path(outCodeDir, db["Package"])
    ## <NOTE>
    ## It may be safer to do
    ##   writeLines(sapply(codeFiles, readLines), outFile)
    ## instead, but this would be much slower ...
    if(!file.create(outFile)) stop("unable to create ", outFile)
    writeLines(paste(".packageName <- \"", db["Package"], "\"", sep=""),
               outFile)
    if(!all(file.append(outFile, codeFiles)))
        stop("unable to write code files")
    ## </NOTE>

    invisible()
}


### * .installPackageIndices

.installPackageIndices <-
function(dir, outDir)
{
    if(!fileTest("-d", dir))
        stop(paste("directory", sQuote(dir), "does not exist"))
    if(!fileTest("-d", outDir))
        stop(paste("directory", sQuote(outDir), "does not exist"))

    ## If there is an @file{INDEX} file in the package sources, we
    ## install this, and do not build it.
    if(fileTest("-f", file.path(dir, "INDEX")))
        if(!file.copy(file.path(dir, "INDEX"),
                      file.path(outDir, "INDEX"),
                      overwrite = TRUE))
            stop("unable to copy INDEX to ", file.path(outDir, "INDEX"))

    outMetaDir <- file.path(outDir, "Meta")
    if(!fileTest("-d", outMetaDir) && !dir.create(outMetaDir))
         stop("cannot open directory", sQuote(outMetaDir))
    .installPackageRdIndices(dir, outDir)
    .installPackageVignetteIndex(dir, outDir)
    .installPackageDemoIndex(dir, outDir)
    invisible()
}

### * .installPackageRdIndices

.installPackageRdIndices <-
function(dir, outDir)
{
    dir <- filePathAsAbsolute(dir)
    docsDir <- file.path(dir, "man")
    if(!fileTest("-d", docsDir)) return(invisible())

    dataDir <- file.path(dir, "data")
    outDir <- filePathAsAbsolute(outDir)    
    ## <FIXME>
    ## Not clear whether we should use the basename of the directory we
    ## install to, or the package name as obtained from the DESCRIPTION
    ## file in the directory we install from (different for versioned
    ## installs).  We definitely do not want the basename of the dir we
    ## install from.
    packageName <- basename(outDir)
    ## </FIXME>

    indices <- c(file.path("Meta", "Rd.rds"),
                 file.path("Meta", "hsearch.rds"),
                 "CONTENTS", "INDEX")
    upToDate <- fileTest("-nt", file.path(outDir, indices), docsDir)
    if(fileTest("-d", dataDir)) {
        ## Note that the data index is computed from both the package's
        ## Rd files and the data sets actually available.
        upToDate <-
            c(upToDate,
              fileTest("-nt",
                        file.path(outDir, "Meta", "data.rds"),
                        c(dataDir, docsDir)))
    }
    if(all(upToDate)) return(invisible())

    contents <- Rdcontents(listFilesWithType(docsDir, "docs"))

    .writeContentsRDS(contents, file.path(outDir, "Meta", "Rd.rds"))

    .saveRDS(.buildHsearchIndex(contents, packageName, outDir),
             file.path(outDir, "Meta", "hsearch.rds"))

    .writeContentsDCF(contents, packageName,
                      file.path(outDir, "CONTENTS"))

    ## If there is no @file{INDEX} file in the package sources, we
    ## build one.
    ## <FIXME>
    ## Maybe also save this in RDS format then?
    if(!fileTest("-f", file.path(dir, "INDEX")))
        writeLines(formatDL(.buildRdIndex(contents)),
                   file.path(outDir, "INDEX"))
    ## </FIXME>

    if(fileTest("-d", dataDir)) {
        .saveRDS(.buildDataIndex(dataDir, contents),
                 file.path(outDir, "Meta", "data.rds"))
    }
    invisible()
}

### * .installPackageVignetteIndex

.installPackageVignetteIndex <-
function(dir, outDir)
{
    dir <- filePathAsAbsolute(dir)
    vignetteDir <- file.path(dir, "inst", "doc")
    ## Create a vignette index only if the vignette dir exists.
    if(!fileTest("-d", vignetteDir))
        return(invisible())

    outDir <- filePathAsAbsolute(outDir)    
    ## <FIXME>
    ## Not clear whether we should use the basename of the directory we
    ## install to, or the package name as obtained from the DESCRIPTION
    ## file in the directory we install from (different for versioned
    ## installs).  We definitely do not want the basename of the dir we
    ## install from.
    packageName <- basename(outDir)
    ## </FIXME>
    outVignetteDir <- file.path(outDir, "doc")
    if(!fileTest("-d", outVignetteDir) && !dir.create(outVignetteDir))
        stop("cannot open directory", sQuote(outVignetteDir))

    ## If there is an HTML index in the @file{inst/doc} subdirectory of
    ## the package source directory (@code{dir}), we do not overwrite it
    ## (similar to top-level @file{INDEX} files).  Installation already
    ## copies/d this over.
    hasHtmlIndex <- fileTest("-f", file.path(vignetteDir, "index.html"))
    htmlIndex <- file.path(outDir, "doc", "index.html")

    ## Write dummy HTML index if no vignettes are found and exit.
    if(!length(listFilesWithType(vignetteDir, "vignette"))) {
        if(!hasHtmlIndex)
            .writeVignetteHtmlIndex(packageName, htmlIndex)
        return(invisible())
    }

    vignetteIndex <- .buildVignetteIndex(vignetteDir)
    ## For base package vignettes there is no PDF in @file{vignetteDir}
    ## but there might/should be one in @file{outVignetteDir}.
    if(NROW(vignetteIndex) > 0) {
        vignettePDFs <-
            sub("$", ".pdf",
                basename(filePathSansExt(vignetteIndex$File)))
        ind <- fileTest("-f", file.path(outVignetteDir, vignettePDFs))
        vignetteIndex$PDF[ind] <- vignettePDFs[ind]
    }
    if(!hasHtmlIndex)
        .writeVignetteHtmlIndex(packageName, htmlIndex, vignetteIndex)

    .saveRDS(vignetteIndex,
             file = file.path(outDir, "Meta", "vignette.rds"))

    invisible()
}

### * .installPackageVignettes

.installPackageVignettes <-
function(dir, outDir)
{
    dir <- filePathAsAbsolute(dir)
    vignetteDir <- file.path(dir, "inst", "doc")
    if(!fileTest("-d", vignetteDir))
        return(invisible())
    vignetteFiles <- listFilesWithType(vignetteDir, "vignette")
    if(!length(vignetteFiles))
        return(invisible())

    outDir <- filePathAsAbsolute(outDir)
    outVignetteDir <- file.path(outDir, "doc")
    if(!fileTest("-d", outVignetteDir) && !dir.create(outVignetteDir))
        stop("cannot open directory", sQuote(outVignetteDir))
    ## For the time being, assume that no PDFs are available in
    ## vignetteDir.
    vignettePDFs <-
        file.path(outVignetteDir,
                  sub("$", ".pdf",
                      basename(filePathSansExt(vignetteFiles))))
    upToDate <- fileTest("-nt", vignettePDFs, vignetteFiles)
    if(all(upToDate))
        return(invisible())

    ## For the time being, the primary use of this function is to
    ## install (and build) vignettes in base packages.  Hence, we build
    ## in a subdir of the current directory rather than a temp dir: this
    ## allows inspection of problems and automatic cleanup via Make.
    cwd <- getwd()
    buildDir <- file.path(cwd, ".vignettes")
    if(!fileTest("-d", buildDir) && !dir.create(buildDir))
        stop(paste("cannot create directory", sQuote(buildDir)))
    on.exit(setwd(cwd))
    setwd(buildDir)

    ## Argh.  We need to ensure that vignetteDir is in TEXINPUTS and
    ## BIBINPUTS.
    envSep <- if(.Platform$OS.type == "windows") ";" else ":"
    ## (Yes, it would be nice to have envPath() similar to file.path().)
    texinputs <- Sys.getenv("TEXINPUTS")
    bibinputs <- Sys.getenv("BIBINPUTS")
    on.exit(Sys.putenv(TEXINPUTS = texinputs, BIBINPUTS = bibinputs),
            add = TRUE)
    Sys.putenv(TEXINPUTS = paste(vignetteDir, Sys.getenv("TEXINPUTS"),
               sep = envSep),
               BIBINPUTS = paste(vignetteDir, Sys.getenv("BIBINPUTS"),
               sep = envSep))

    for(srcfile in vignetteFiles[!upToDate]) {
        base <- basename(filePathSansExt(srcfile))
        texfile <- paste(base, ".tex", sep = "")
        yy <- try(Sweave(srcfile, pdf = TRUE, eps = FALSE, quiet =
                         TRUE))
        if(inherits(yy, "try-error"))
            stop(yy)
        ## In case of an error, do not clean up: should we point to
        ## buildDir for possible inspection of results/problems?
        if(.Platform$OS.type == "windows") {
            ## may not have texi2dvi
            res <- system(paste("pdflatex", texfile))
            if(res) stop(paste("unable to run pdflatex on", sQuote(texfile)))
            if(length(grep("\\bibdata",
                           readLines(paste(base, ".aux", sep = ""))))) {
                res <- system(paste("bibtex", base))
                if(res) stop(paste("unable to run bibtex on", sQuote(base)))
                res <- system(paste("pdflatex", texfile))
                if(res) stop(paste("unable to run pdflatex on", sQuote(texfile)))
            }
            res <- system(paste("pdflatex", texfile))
            if(res) stop(paste("unable to run pdflatex on", sQuote(texfile)))
        } else
            texi2dvi(texfile, pdf = TRUE, quiet = TRUE)
        pdffile <-
            paste(basename(filePathSansExt(srcfile)), ".pdf", sep = "")
        if(!file.exists(pdffile))
            stop(paste("file", sQuote(pdffile), "was not created"))
        if(!file.copy(pdffile, outVignetteDir, overwrite = TRUE))
            stop(paste("cannot copy", sQuote(pdffile), "to",
                       sQuote(outVignetteDir)))
    }
    ## need to change out of this dir before we delete it, at least on Windows
    setwd(cwd)
    unlink(buildDir, recursive = TRUE)
    invisible()
}


### * .installPackageDemoIndex

.installPackageDemoIndex <-
function(dir, outDir)
{
    demoDir <- file.path(dir, "demo")
    if(!fileTest("-d", demoDir)) return(invisible())
    demoIndex <- .buildDemoIndex(demoDir)
    .saveRDS(demoIndex,
             file = file.path(outDir, "Meta", "demo.rds"))
    invisible()
}

### * .installPackageNamespaceInfo

.installPackageNamespaceInfo <-
function(dir, outDir)
{
    dir <- filePathAsAbsolute(dir)
    nsFile <- file.path(dir, "NAMESPACE")
    if(!fileTest("-f", nsFile)) return(invisible())
    nsInfoFilePath <- file.path(outDir, "Meta", "nsInfo.rds")
    if(fileTest("-nt", nsInfoFilePath, nsFile)) return(invisible())
    nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
    outMetaDir <- file.path(outDir, "Meta")
    if(!fileTest("-d", outMetaDir) && !dir.create(outMetaDir))
        stop("cannot open directory", sQuote(outMetaDir))
    .saveRDS(nsInfo, nsInfoFilePath)
    invisible()
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
### Miscellaneous indexing functions.

## <NOTE>
## Currently indices are represented as 2-column character matrices.
## To 'merge' indices in the sense of using the values from index B for
## all keys in index A also present in index B, we currently use
##   idx <- match(indA[ , 1], indB[ , 1], 0)
##   indA[which(idx != 0), 2] <- indB[idx, 2]
## which could be abstracted into a function .mergeIndexEntries().
## </NOTE>

### * .buildDataIndex

.buildDataIndex <-
function(dataDir, contents)
{
    ## Build an index with information about all available data sets.
    ## See .buildDemoIndex() for an explanation of what we do here.

    ## <NOTE>
    ## We could also have an interface like
    ##   .buildDataIndex(dir, contents = NULL)
    ## where @code{dir} is the path to a package's root source dir and
    ## contents is Rdcontents(listFilesWithType(file.path(dir, "man"),
    ## "docs")).
    ## </NOTE>

    if(!fileTest("-d", dataDir))
        stop(paste("directory", sQuote(dataDir), "does not exist"))
    dataFiles <- listFilesWithType(dataDir, "data")
    ## <FIXME> to avoid name clashes CO2 is stored as zCO2.R
    dataTopics <- unique(basename(filePathSansExt(dataFiles)))
    dataTopics[dataTopics == "zCO2"] <- "CO2"
    if(!length(dataTopics)) return(matrix("", 0, 2))
    dataTopics <- sort(dataTopics)
    dataIndex <- cbind(dataTopics, "")
    ## Note that NROW(contents) might be 0.
    if(NROW(contents)) {
        aliasIndices <-
            rep(1 : NROW(contents), sapply(contents$Aliases, length))
        idx <- match(dataTopics, unlist(contents$Aliases), 0)
        dataIndex[which(idx != 0), 2] <-
            contents[aliasIndices[idx], "Title"]
    }
    dimnames(dataIndex) <- NULL
    dataIndex
}

### * .buildDemoIndex

.buildDemoIndex <-
function(demoDir)
{
    ## Build an index with information about all available demos.

    ## <NOTE>
    ## We use both the contents of @file{00Index} (if possible) and the
    ## information which demos are actually available to build the real
    ## demo index.
    ## This ensures that demo() really lists all *available* demos, even
    ## if some might be 'undocumented', i.e., without index information.
    ## Use .checkDemoIndex() to check whether available demo code and
    ## docs are in sync.
    ## </NOTE>

    if(!fileTest("-d", demoDir))
        stop(paste("directory", sQuote(demoDir), "does not exist"))
    demoFiles <- listFilesWithType(demoDir, "demo")
    demoTopics <- unique(basename(filePathSansExt(demoFiles)))
    if(!length(demoTopics)) return(matrix("", 0, 2))
    demoIndex <- cbind(demoTopics, "")
    if(fileTest("-f", INDEX <- file.path(demoDir, "00Index"))) {
        demoEntries <- try(read.00Index(INDEX))
        if(inherits(demoEntries, "try-error"))
            warning(paste("cannot read index information in file",
                          sQuote(INDEX)))
        idx <- match(demoTopics, demoEntries[ , 1], 0)
        demoIndex[which(idx != 0), 2] <- demoEntries[idx, 2]
    }
    dimnames(demoIndex) <- NULL
    demoIndex
}

### * .checkDemoIndex

.checkDemoIndex <-
function(demoDir)
{
    if(!fileTest("-d", demoDir))
        stop(paste("directory", sQuote(demoDir), "does not exist"))
    infoFromBuild <- .buildDemoIndex(demoDir)
    infoFromIndex <- try(read.00Index(file.path(demoDir, "00Index")))
    if(inherits(infoFromIndex, "try-error"))
        stop(paste("cannot read index information in file",
                   sQuote(file.path(demoDir, "00Index"))))
    badEntries <-
        list(missingFromIndex =
             infoFromBuild[grep("^[[:space:]]*$",
                                infoFromBuild[ , 2]),
                           1],
             missingFromDemos =
             infoFromIndex[!infoFromIndex[ , 1]
                           %in% infoFromBuild[ , 1],
                           1])
    class(badEntries) <- "checkDemoIndex"
    badEntries
}

print.checkDemoIndex <-
function(x, ...)
{
    if(length(x$missingFromIndex) > 0) {
        writeLines("Demos with missing or empty index information:")
        print(x$missingFromIndex)
    }
    if(length(x$missingFromDemos) > 0) {
        writeLines("Demo index entries without corresponding demo:")
        print(x$missingFromDemos)
    }
    invisible(x)
}

### * .buildHsearchIndex

.buildHsearchIndex <-
function(contents, packageName, libDir)
{
    ## Build an index of the Rd contents in 'contents', of a package
    ## named 'packageName' (to be) installed in 'libDir', in a form
    ## useful for help.search().

    dbAliases <- dbConcepts <- dbKeywords <- matrix(character(), nc = 3)

    if((nr <- NROW(contents)) > 0) {
        ## IDs are used for indexing the Rd objects in the help.search
        ## db.
        IDs <- seq(length = nr)
        if(!is.data.frame(contents)) {
            colnames(contents) <-
                c("Name", "Aliases", "Title", "Keywords")
            base <- contents[, c("Name", "Title"), drop = FALSE]
            ## If the contents db is not a data frame, then it has the
            ## aliases collapsed.  Split again as we need the first
            ## alias as the help topic to indicate for matching Rd
            ## objects.
            aliases <- strsplit(contents[, "Aliases"], " +")
            ## Don't do this for keywords though, as these might be
            ## non-standard (and hence contain white space ...).
        }
        else {
            base <- as.matrix(contents[, c("Name", "Title")])
            aliases <- contents[, "Aliases"]
        }
        keywords <- contents[, "Keywords"]
        ## We create 4 character matrices (cannot use data frames for
        ## efficiency reasons): 'dbBase' holds all character string
        ## data; 'dbAliases', 'dbConcepts' and 'dbKeywords' hold
        ## character vector data in a 3-column character matrix format
        ## with entry, ID of the Rd object the entry comes from, and the
        ## package the object comes from.  The latter is useful when
        ## subscripting the help.search db according to package.
        dbBase <- cbind(packageName, libDir, IDs, base,
                        topic = sapply(aliases, "[", 1))
        ## If there are no aliases at all, cbind() below would give
        ## matrix(packageName, nc = 1).  (Of course, Rd objects without
        ## aliases are useless ...)
        if(length(tmp <- unlist(aliases)) > 0)
            dbAliases <-
                cbind(tmp, rep.int(IDs, sapply(aliases, length)),
                      packageName)
        ## And similarly if there are no keywords at all.
        if(length(tmp <- unlist(keywords)) > 0)
            dbKeywords <-
                cbind(tmp, rep.int(IDs, sapply(keywords, length)),
                      packageName)
        ## Finally, concepts are a feature added in R 1.8 ...
        if("Concepts" %in% colnames(contents)) {
            concepts <- contents[, "Concepts"]
            if(length(tmp <- unlist(concepts)) > 0)
                dbConcepts <-
                    cbind(tmp, rep.int(IDs, sapply(concepts, length)),
                          packageName)
        }
    }
    else {
        dbBase <- matrix(character(), nc = 6)
    }

    colnames(dbBase) <-
        c("Package", "LibPath", "ID", "name", "title", "topic")
    colnames(dbAliases) <-
        c("Aliases", "ID", "Package")
    colnames(dbKeywords) <-
        c("Keywords", "ID", "Package")
    colnames(dbConcepts) <-
        c("Concepts", "ID", "Package")

    list(dbBase, dbAliases, dbKeywords, dbConcepts)
}


### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
md5sum <- function(files)
    structure(.Call("Rmd5", files, PACKAGE="tools"), names=files)

.installMD5sums <- function(pkgDir, outDir = pkgDir)
{
    dot <- getwd()
    setwd(pkgDir)
    x <- md5sum(dir(pkgDir, recursive=TRUE))
    setwd(dot)
    x <- x[names(x) != "MD5"]
    cat(paste(x, names(x), sep=" *"), sep="\n",
        file=file.path(outDir, "MD5"))
}

checkMD5sums <- function(pkg, dir)
{
    if(missing(dir)) dir <- .find.package(pkg, quiet=TRUE)
    if(!length(dir)) return(NA)
    md5file <- file.path(dir, "MD5")
    if(!file.exists(md5file)) return(NA)
    infile <- scan(md5file, what=list(md5="", name=""), quiet = TRUE)
    xx <- infile[[1]]
    nmxx <- names(xx) <- sub("^\\*", "", infile[[2]])
    dot <- getwd()
    setwd(dir)
    x <- md5sum(dir(dir, recursive=TRUE))
    setwd(dot)
    x <- x[names(x) != "MD5"]
    nmx <- names(x)
    res <- TRUE
    not.here <- !(nmxx %in% nmx)
    if(any(not.here)) {
        res <- FALSE
        cat("files", paste(nmxx[not.here], collapse=", "),
            "are missing\n", sep=" ")
    }
    nmxx <- nmxx[!not.here]
    diff <- xx[nmxx] != x[nmxx]
    if(any(diff)) {
        res <- FALSE
        cat("files", paste(nmxx[diff], collapse=", "),
            "have the wrong MD5 checksums\n", sep=" ")
    }
    return(res)
}
pkgDepends <- function(pkg, recursive=TRUE, local=TRUE,
                       reduce=TRUE, lib.loc=NULL) {
    if (length(pkg) != 1)
        stop("Argument 'pkg' must be of length 1")

    instPkgs <- utils::installed.packages(lib.loc=lib.loc)

    depMtrx <- getDepMtrx(pkg, instPkgs, local)
    if (is.null(depMtrx)) ## Package was not found
        stop("Package ", pkg, " was not found.")

    getDepList(depMtrx, instPkgs, recursive, local, reduce, lib.loc)
}

getDepList <- function(depMtrx, instPkgs, recursive=TRUE,
                       local=TRUE, reduce=TRUE, lib.loc=NULL) {
    out <- list(Depends=character(), Installed=character(),
                Found=list(), NotFound=character(),
                R=character())
    class(out) <- c("DependsList", class(out))

    if ((!is.matrix(depMtrx))&&(is.na(depMtrx))) ## no dependencies
        return(out)

    mtrxList <- buildDepList(depMtrx, instPkgs, recursive, lib.loc)

    if (local == FALSE) {
        toFind <- mtrxList$Depends[!apply(mtrxList$Depends, 1,
                                          isSatisfied,
                                          mtrxList$Installed),,drop=FALSE]

        if (reduce)
            toFind <- reduceDepends(toFind)

        if (length(toFind) > 0) {
            found <- foundDepends(toFind)
            out$Found <- found$Found
            mtrxList$NotFound <- found$NotFound
        }
    }

    if (reduce == TRUE) { ## Found and NotFound are already reduced
        mtrxList$R <- reduceDepends(mtrxList$R)
        mtrxList$Depends <- reduceDepends(mtrxList$Depends)
        mtrxList$Installed <- reduceDepends(mtrxList$Installed)
    }


    ## Massage the matrices back into dependency strings.  out$Found
    ## is already assigned.
    out$R <- depMtrxToStrings(mtrxList$R)
    out$Depends <- depMtrxToStrings(mtrxList$Depends)
    out$Installed <- depMtrxToStrings(mtrxList$Installed)
    out$NotFound <- depMtrxToStrings(mtrxList$NotFound)

    out
}

isSatisfied <- function(dep, instMtrx) {
    triplets <- apply(instMtrx,1,paste,collapse=":")
    match(paste(dep,collapse=":"),triplets,nomatch=0) > 0
}

buildDepList <- function(depMtrx, instPkgs, recursive=TRUE,
                         lib.loc=NULL) {
    mtrxList <- list(Depends=matrix(nrow=0,ncol=3),
                     Installed=matrix(nrow=0,ncol=3), R=matrix(nrow=0,ncol=3))


    ## First check to see if there is a dependency on R
    ## If there is, then check it
    whichR <- which(depMtrx[,1] == "R")
    if (length(whichR) > 0) {
        mtrxList$R <- depMtrx[whichR,,drop=FALSE]
        depMtrx <- depMtrx[-whichR,,drop=FALSE]
    }

    ## Get which of the direct depends are installed
    instDeps <- depMtrx[installedDepends(depMtrx, instPkgs),,drop=FALSE]

    if (recursive == TRUE) {
        mtrxList$Depends <- depMtrx
        mtrxList$Installed <- instDeps

        for (curPkg in depMtrx[,1]) {
            depMtrx <- getDepMtrx(curPkg, instPkgs)
            ## Make sure this package was found & has deps
            if ((is.null(depMtrx))||(is.na(depMtrx)))
                next

            curMtrxList <- buildDepList(depMtrx, instPkgs,
                                             recursive=recursive,
                                             lib.loc=lib.loc)
            mtrxList$R <- rbind(mtrxList$R, curMtrxList$R)
            mtrxList$Depends <- rbind(mtrxList$Depends,
                                      curMtrxList$Depends)
            mtrxList$Installed <- rbind(mtrxList$Installed,
                                        curMtrxList$Installed)
        }
    }
    else { ##recurse is FALSE
        mtrxList$Depends <- depMtrx
        mtrxList$Installed <- instDeps
    }

    mtrxList
}

getDepMtrx <- function(pkg, instPkgs, local=TRUE) {

    ## Need to see if pkg is installed - if not, get online
    row <- match(pkg,instPkgs[,"Package"])
    if (!is.na(row)) ## package is installed
        pkgDeps <- package.dependencies(instPkgs[row,])[[1]]
    else {
        if (local)
            pkgDeps <- NULL
        else
            pkgDeps <- getRemotePkgDepends(pkg)
    }

    pkgDeps  ## Either a matrix, NA if no deps or NULL if not found
}

getRemotePkgDepends <- function(pkg, contriburl=getOption("repositories")()[1]) {
    ## Will get the dependencies of a package from
    ## online repositories.  Returns NULL if it
    ## can not be found, otherwise returns the row provided
    ## in CRAN.packages().  Defaults to getting packages from CRAN,
    ## but other URLs can be specified.

    if(is.null(contriburl))
        contriburl <- contrib.url(getOption("CRAN"))

    cran <- CRAN.packages(contriburl=contriburl)
    whichRow <- which(pkg == cran[,"Package"])
    if (length(whichRow) > 0) {
        return(package.dependencies(cran[whichRow,])[[1]])
    }
    else
        NULL
}

installedDepends <- function(depMtrx, instPkgs) {
    ## Given a matrix of packages, will return a vector of row
    ## numbers that correspond to packages in the matrix where
    ## the dependency is met by installed packages

    pkgs <- depMtrx[,1]
    passPkgs <- character()
    if (length(pkgs) > 0) {
        installed <- (match(pkgs, instPkgs[,"Package"], nomatch=0) > 0)

        curPkgs <- depMtrx[installed,,drop=FALSE]
        if (nrow(curPkgs) > 0) {
            passVersReq <- apply(curPkgs, 1, function(x) {
                pkgVers <- instPkgs[instPkgs[,1]==x[1],"Version"]
                if (is.na(x[2])||
                    (compareDependsPkgVersion(pkgVers,
                                              x[2], x[3]) >= 0))
                    TRUE
                else
                    FALSE
            })
            passPkgs <- c(passPkgs,curPkgs[passVersReq,1])

            return(which(match(depMtrx[,1],passPkgs,nomatch=0) > 0))
        }
    }

    return(numeric())
}

foundDepends <- function(depMtrx, contriburl=getOption("repositories")()) {
    out <- list(Found=list())
    foundRows <- numeric()

    if(is.null(contriburl))
        contriburl <- contrib.url(c(CRAN = getOption("CRAN"),
                                      BIOC = getOption("BIOC")))


    for (j in 1:length(contriburl)) {
        cur <- character()
        cran <- CRAN.packages(contriburl=contriburl[j])

        if (nrow(depMtrx) > 0) {
            for (i in 1:nrow(depMtrx)) {
                found <- FALSE
                cranRow <- which(depMtrx[i,1] == cran[,1])
                if (length(cranRow) > 0) {
                    ## Found it in repos
                    if (is.na(depMtrx[i,2])) ## no version, automatically okay
                        found <- TRUE
                    else if(compareDependsPkgVersion(cran[cranRow,
                                                                  "Version"],
                                                             depMtrx[i,2],
                                                             depMtrx[i,3]))
                        found <- TRUE
                }
                if (found) {
                    foundRows <- c(foundRows,i)
                    cur <- c(cur,depMtrx[i,1])
                }
            }
        }

        if (length(cur) > 0)
            out$Found[contriburl[j]] <- cur
    }

    if (length(foundRows) != nrow(depMtrx))
        out$NotFound <- depMtrx[-foundRows,,drop=FALSE]

    out
}

compareDependsPkgVersion <- function(curVersion, versOper, versionReq) {
    ## Returns -1 if FALSE, 0 or 1 if TRUE
    if(versOper == ">=")
        return(compareVersion(curVersion, versionReq))
    if(versOper == "<=")
        return(compareVersion(versionReq, curVersion))
    else
        stop("bad operand")
}

reduceDepends <- function(depMtrx, quietly=TRUE) {
    if ((is.null(depMtrx))||nrow(depMtrx)==0)
        return(character())

    pkgList <- split(depMtrx, depMtrx[,1])
    out <- lapply(pkgList, function(x, quietly) {
        pkgMtrx <- matrix(x,nc=3)
        ## there are no version requirements so just return
        ## the pkg name
        if (all(is.na(pkgMtrx[,2])))
            outRow <- 1
        else {
            ## Have version requirements
            ## Get the maximum ">=" requirement if one exists
            gts <- pkgMtrx[pkgMtrx[,2] == ">=",,drop=FALSE]
            if (nrow(gts) > 0) {
               maxGts <- gts[1,3]
               outRow <- 1
               for (i in 1:nrow(gts)) {
                   if (compareVersion(gts[i,3], maxGts) > 0) {
                       maxGts <- gts[i,3]
                       outRow <- i
                   }
               }
            }

            ## Find the minimal <= requirement if one exists
            lts <- pkgMtrx[pkgMtrx[,2] == "<=",,drop=FALSE]
            if (nrow(lts) > 0) {
                minLts <- lts[1,3]
                minRow <- 1
                for (i in 1:nrow(lts)) {
                    if (compareVersion(lts[i,3], minLts) < 0) {
                        minLts <- lts[i,3]
                        minRow <- i
                    }
                }
                ## If there is a maxGts and it is larger then
                ## the minLts then we need to record both
                if (exists(maxGts))
                    if (maxGts > minLts)
                        outRow <- c(outRow, minRow)
                else
                    outRow <- minRow
            }
            if (quietly == FALSE)
                warning("Package ",pkg," had its dependencies ",
                        "reduced to a minimal set.")
        }
	pkgMtrx[outRow,]
    }, quietly)

    matrix(unlist(out), nc=3, byrow=TRUE)
}

depMtrxToStrings <- function(depMtrx) {
    if ((!is.null(depMtrx))&&(nrow(depMtrx) > 0)) {
        apply(depMtrx, 1, function(x){
            if (is.na(x[2]))
                x[1]
            else
                paste(x[1]," (",x[2]," ",x[3],")",sep="")
        })
    }
    else
        character()
}

installFoundDepends <- function(depPkgList, ...) {
    urls <- names(depPkgList)
    for (i in seq(along=depPkgList)) {
        if (length(depPkgList[[i]]) > 0)
            install.packages(depPkgList[[i]], contriburl=urls[i],...)
    }

    NULL
}
### * File utilities.

### ** filePathAsAbsolute

filePathAsAbsolute <-
function(x)
{
    ## Turn a possibly relative file path absolute, performing tilde
    ## expansion if necessary.
    ## Seems the only way we can do this is 'temporarily' change the
    ## working dir and see where this takes us.
    if(!file.exists(epath <- path.expand(x)))
        stop(paste("file", sQuote(x), "does not exist"))
    cwd <- getwd()
    on.exit(setwd(cwd))
    if(fileTest("-d", epath)) {
        ## Combining dirname and basename does not work for e.g. '.' or
        ## '..' on Unix ...
        setwd(epath)
        getwd()
    }
    else {
        setwd(dirname(epath))
        file.path(getwd(), basename(epath))
    }
}

### ** filePathSansExt

filePathSansExt <-
function(x)
{
    ## Return the file paths without extensions.
    ## (Only purely alphanumeric extensions are recognized.)
    sub("\\.[[:alpha:]]+$", "", x)
}

### ** fileTest

fileTest <-
function(op, x, y)
{
    ## Provide shell-style '-f', '-d', '-nt' and '-ot' tests.
    ## Note that file.exists() only tests existence ('test -e' on some
    ## systems), and that our '-f' tests for existence and not being a
    ## directory (the GNU variant tests for being a regular file).
    ## Note: vectorized in x and y.
    switch(op,
           "-f" = !is.na(isdir <- file.info(x)$isdir) & !isdir,
           "-d" = !is.na(isdir <- file.info(x)$isdir) & isdir,
           "-nt" = (!is.na(mt.x <- file.info(x)$mtime)
                    & !is.na(mt.y <- file.info(y)$mtime)
                    & (mt.x > mt.y)),
           "-ot" = (!is.na(mt.x <- file.info(x)$mtime)
                    & !is.na(mt.y <- file.info(y)$mtime)
                    & (mt.x < mt.y)),
           stop(paste("test", sQuote(op), "is not available")))
}

### ** listFilesWithExts

listFilesWithExts <-
function(dir, exts, all.files = FALSE, full.names = TRUE)
{
    ## Return the paths or names of the files in @code{dir} with
    ## extension in @code{exts}.
    files <- list.files(dir, all.files = all.files)
    files <- files[sub(".*\\.", "", files) %in% exts]
    if(full.names)
        files <- if(length(files) > 0)
            file.path(dir, files)
        else
            character(0)
    files
}

### ** listFilesWithType

listFilesWithType <-
function(dir, type, all.files = FALSE, full.names = TRUE)
{
    ## Return a character vector with the paths of the files in
    ## @code{dir} of type @code{type} (as in .makeFileExts()).
    ## When listing R code and documentation files, files in OS-specific
    ## subdirectories are included if present.
    exts <- .makeFileExts(type)
    files <-
        listFilesWithExts(dir, exts, all.files = all.files,
                          full.names = full.names)

    if(type %in% c("code", "docs")) {
        OSdir <- file.path(dir, .OStype())
        if(fileTest("-d", OSdir)) {
            OSfiles <-
                listFilesWithExts(OSdir, exts, all.files = all.files,
                                  full.names = FALSE)
            OSfiles <-
                file.path(if(full.names) OSdir else .OStype(),
                          OSfiles)
            files <- c(files, OSfiles)
        }
    }
    files
}

### * Text utilities.

### ** delimMatch

delimMatch <-
function(x, delim = c("\{", "\}"), syntax = "Rd")
{
    if(!is.character(x))
        stop("argument x must be a character vector")
    if((length(delim) != 2) || any(nchar(delim) != 1))
        stop("incorrect value for delim")
    if(syntax != "Rd")
        stop("only Rd syntax is currently supported")

    .Call("delim_match", x, delim, PACKAGE = "tools")
}


### * LaTeX utilities

### ** texi2dvi

texi2dvi <-
function(file, pdf = FALSE, clean = FALSE,
         quiet = TRUE, texi2dvi = getOption("texi2dvi"))
{
    ## Run texi2dvi on a file.

    if(pdf) pdf <- "--pdf" else pdf <- ""
    if(clean) clean <- "--clean" else clean <- ""
    if(quiet) quiet <- "--quiet" else quiet <- ""
    if(is.null(texi2dvi)) {
        if(file.exists(file.path(R.home(), "bin", "texi2dvi")))
            texi2dvi <- file.path(R.home(), "bin", "texi2dvi")
        else
            texi2dvi <- "texi2dvi"
    }

    yy <- system(paste(texi2dvi, quiet, pdf, clean, file))
    if(yy > 0) stop(paste("running texi2dvi on", file, "failed"))
}


### * Internal utility functions.

### ** .OStype

.OStype <-
function()
{
    OS <- Sys.getenv("R_OSTYPE")
    if(nchar(OS)) OS else .Platform$OS.type
}

### ** .getInternalS3generics

.getInternalS3generics <-
function()
{
    ## Get the list of R internal S3 generics (via DispatchOrEval(),
    ## cf. zMethods.Rd).
    c("[", "[[", "$", "[<-", "[[<-", "$<-", "length", "dimnames<-",
      "dimnames", "dim<-", "dim", "c", "unlist", "as.character",
      "as.vector", "is.array", "is.atomic", "is.call", "is.character",
      "is.complex", "is.double", "is.environment", "is.function",
      "is.integer", "is.language", "is.logical", "is.list", "is.matrix",
      "is.na", "is.nan", "is.null", "is.numeric", "is.object",
      "is.pairlist", "is.recursive", "is.single", "is.symbol",
      ## and also the members of the group generics from groupGeneric.Rd
      "abs", "sign", "sqrt", "floor", "ceiling", "trunc", "round", "signif",
      "exp", "log", "cos", "sin", "tan", "acos", "asin", "atan",
      "cosh", "sinh", "tanh", "acosh", "asinh", "atanh",
      "lgamma", "gamma", "gammaCody", "digamma", "trigamma",
      "tetragamma", "pentagamma", "cumsum", "cumprod", "cummax", "cummin",
      "+", "-", "*", "/", "^", "%%", "%/%", "&", "|", "!", "==", "!=",
      "<", "<=", ">=", ">",
      "all", "any", "sum", "prod", "max", "min", "range",
      "Arg", "Conj", "Im", "Mod", "Re"
      )
}

### ** .getNamespacePackageDepends

.getNamespacePackageDepends <- function(dir) {
    nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
    depends <- c(sapply(nsInfo$imports, "[[", 1),
                 sapply(nsInfo$importClasses, "[[", 1),
                 sapply(nsInfo$importMethods, "[[", 1))
    unique(sort(as.character(depends)))
}

### ** .getNamespaceS3methodsList

.getNamespaceS3methodsList <-
function(nsInfo)
{
    ## Get the list of the registered S3 methods for an 'nsInfo' object
    ## returned by parseNamespaceFile().  Each element of the list is a
    ## character vector of length 3 with the names of the generic, class
    ## and method (as a function).
    lapply(nsInfo$S3methods,
           function(spec) {
               if(length(spec) == 2)
                   spec <-
                       c(spec, paste(spec, collapse = "."))
               spec
           })
}

### ** .getS3groupGenerics

.getS3groupGenerics <- function() c("Ops", "Math", "Summary", "Complex")

### ** .isPrimitive

.isPrimitive <-
function(fname, envir)
{
    ## Determine whether object named 'fname' found in environment
    ## 'envir' is a primitive function.
    f <- get(fname, envir = envir, inherits = FALSE)
    is.function(f) && any(grep("^\\.Primitive", deparse(f)))
}

### ** .isS3Generic

.isS3Generic <-
function(fname, envir, mustMatch = TRUE)
{
    ## Determine whether object named 'fname' found in environment
    ## 'envir' is (to be considered) an S3 generic function.  Note,
    ## found *in* not found *from*, so envir does not have a default.
    ##
    ## If it is, does it despatch methods of fname?  We need that to
    ## look for possible methods as functions named fname.* ....
    ##
    ## Provided by LT with the following comments:
    ##
    ## This is tricky.  Figuring out what could possibly dispatch
    ## successfully some of the time is pretty much impossible given R's
    ## semantics.  Something containing a literal call to UseMethod is
    ## too broad in the sense that a UseMethod call in a local function
    ## doesn't produce a dispatch on the outer function ...
    ##
    ## If we use something like: a generic has to be
    ##      function(e) <UME>  # UME = UseMethod Expression
    ## with
    ##	    <UME> = UseMethod(...) |
    ##             if (...) <UME> [else ...] |
    ##             if (...) ... else <UME>
    ##             { ... <UME> ... }
    ## then a recognizer for UME might be as follows.

    f <- get(fname, envir = envir, inherits = FALSE)
    if(!is.function(f)) return(FALSE)
    isUMEbrace <- function(e) {
        for (ee in as.list(e[-1])) if (nchar(res <- isUME(ee))) return(res)
        ""
    }
    isUMEif <- function(e) {
        if (length(e) == 3) isUME(e[[3]])
        else {
            if (nchar(res <- isUME(e[[3]]))) res
            else if (nchar(res <- isUME(e[[4]]))) res
            else ""
        }

    }
    isUME <- function(e) {
        if (is.call(e) && (is.name(e[[1]]) || is.character(e[[1]]))) {
            switch(as.character(e[[1]]),
                   UseMethod = as.character(e[[2]]),
                   "{" = isUMEbrace(e),
                   "if" = isUMEif(e),
                   "")
        } else ""
    }
    res <- isUME(body(f))
    if(mustMatch) res == fname else nchar(res) > 0
}

### ** .loadPackageQuietly

.loadPackageQuietly <-
function(package, lib.loc)
{
    ## Load (reload if already loaded) @code{package} from
    ## @code{lib.loc}, capturing all output and messages.  All QC
    ## functions use this for loading packages because R CMD check
    ## interprets all output as indicating a problem.
    .tryQuietly({
        pos <- match(paste("package", package, sep = ":"), search())
        if(!is.na(pos))
            detach(pos = pos)
        library(package, lib.loc = lib.loc, character.only = TRUE,
                verbose = FALSE)
    })
}

### ** .makeFileExts

.makeFileExts <-
function(type = c("code", "data", "demo", "docs", "vignette"))
{
    ## Return a character vector with the possible/recognized file
    ## extensions for a given file type.
    switch(type,
           code = c("R", "r", "S", "s", "q"),
           ## Keep in sync with the order given in base's data.Rd.
           data = c("R", "r",
                    "RData", "rdata", "rda",
                    "tab", "txt", "TXT", "csv", "CSV"),
           demo = c("R", "r"),
           docs = c("Rd", "rd"),
           vignette = c(outer(c("R", "r", "S", "s"), c("nw", "tex"),
                              paste, sep = "")))
}

### ** .makeS3MethodsStopList

.makeS3MethodsStopList <-
function(package)
{
    ## Return a character vector with the names of the functions in
    ## @code{package} which 'look' like S3 methods, but are not.
    ## Using package=NULL returns all known examples

    ## round.POSIXt is a method for S3 and S4 group generics with
    ## deliberately different arg names.
    stopList <-
        list(base = c("all.equal", "all.names", "all.vars",
             "format.char", "format.info", "format.pval",
             "kappa.tri",
             "max.col",
             "print.atomic", "print.coefmat",
             "rep.int", "round.POSIXt"),
             Hmisc = "t.test.cluster",
             HyperbolicDist = "log.hist",
             MASS = c("frequency.polygon",
             "gamma.dispersion", "gamma.shape",
             "hist.FD", "hist.scott"),
             XML = "text.SAX",
             car = "scatterplot.matrix",
             graphics = c("boxplot.stats", "close.screen",
             "plot.design", "plot.new", "plot.window", "plot.xy",
             "split.screen"),
             hier.part = "all.regs",
             quadprog = c("solve.QP", "solve.QP.compact"),
             reposTools = "update.packages2",
             sm = "print.graph",
             stats = c("anova.lmlist", "fitted.values", "lag.plot",
             "influence.measures", "t.test"),
             utils = c("close.socket", "flush.console",
             "update.packages") 
             )
    if(is.null(package)) return(unlist(stopList))
    thisPkg <- stopList[[package]]
    if(!length(thisPkg)) character(0) else thisPkg
}

### ** .packageApply

.packageApply <-
function(packages = NULL, FUN, ...)
{
    ## Apply FUN and extra '...' args to all given packages.
    ## The default corresponds to all installed packages with high
    ## priority.
    if(is.null(packages))
        packages <- unique(installed.packages(priority = "high")[ , 1])
    out <- lapply(packages, FUN, ...)
    names(out) <- packages
    out
}

### ** .sourceAssignments

.sourceAssignments <-
function(file, envir)
{
    ## Read and parse expressions from @code{file}, and then
    ## successively evaluate the top-level assignments in @code{envir}.
    ## Apart from only dealing with assignments, basically does the same
    ## as @code{sys.source(file, envir, keep.source = FALSE)}.
    oop <- options(keep.source = FALSE)
    on.exit(options(oop))
    assignmentSymbolLM <- as.symbol("<-")
    assignmentSymbolEq <- as.symbol("=")
    exprs <- try(parse(n = -1, file = file))
    if(length(exprs) == 0)
        return(invisible())
    for(e in exprs) {
        if(e[[1]] == assignmentSymbolLM || e[[1]] == assignmentSymbolEq)
            eval(e, envir)
    }
    invisible()
}

### ** .tryQuietly

.tryQuietly <-
function(expr)
{
    ## Try to run an expression, suppressing all 'output'.  In case of
    ## failure, stop with the error message.
    oop <- options(warn = 1)
    on.exit(options(oop))
    outConn <- file(open = "w")         # anonymous tempfile
    sink(outConn, type = "output")
    sink(outConn, type = "message")
    yy <- try(expr, silent = TRUE)
    sink(type = "message")
    sink(type = "output")
    close(outConn)
    if(inherits(yy, "try-error"))
        stop(yy)
    yy
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
.noGenerics <- TRUE

.onUnload <- function(libpath)
    library.dynam.unload("tools", libpath)
