\name{nem.jackknife} \alias{nem.jackknife} \alias{print.nem.jackknife} \title{Jackknife for nested effect models} \description{ Assesses the statistical stability of a network via a jackknife procedure: Each S-gene is left out once and the network reconstructed on the remaining ones. The relative frequency of each edge to appear in n-1 jackknife samples is returned. } \usage{ nem.jackknife(D,thresh=0.5, inference="nem.greedy",models=NULL,type="mLL",para=NULL,hyperpara=NULL,Pe=NULL,Pm=NULL,Pmlocal=NULL,local.prior.size=length(unique(colnames(D))),local.prior.bias=1,triples.thrsh=0.5,lambda=0,delta=1,selEGenes=FALSE,verbose=TRUE) \method{print}{nem.jackknife}(x, ...) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{D}{data matrix with experiments in the columns (binary or continious)} \item{thresh}{only edges appearing with a higher frequency than "thresh" are returned} \item{inference}{\code{search} to use exhaustive enumeration, \code{triples} for triple-based inference, \code{pairwise} for the pairwise heuristic, \code{ModuleNetwork} for the module based inference, \code{nem.greedy} for greedy hillclimbing, \code{nem.greedyMAP} for alternating MAP optimization using log odds or log p-value densities} \item{models}{a list of adjacency matrices for model search. If NULL, \code{enumerate.models} is used for exhaustive enumeration of all possible models.} \item{type}{\code{mLL} or \code{FULLmLL} or \code{CONTmLL} or \code{CONTmLLBayes} or \code{CONTmLLMAP}, see \code{nem}} \item{para}{vector of length two: false positive rate and false negative rate for binary data. Used by \code{mLL}} \item{hyperpara}{vector of length four: used by \code{FULLmLL()} for binary data} \item{Pe}{prior of effect reporter positions in the phenotypic hierarchy (same dimension as D)} \item{Pm}{prior over models (n x n matrix)} \item{Pmlocal}{local model prior for pairwise and triplets learning. For pairwise learning generated by \code{local.model.prior()} according to arguments \code{local.prior.size} and \code{local.prior.bias}} \item{local.prior.size}{prior expected number of edges in the graph (for pairwise learning)} \item{local.prior.bias}{bias towards double-headed edges. Default: 1 (no bias; for pairwise learning)} \item{triples.thrsh}{threshold for model averaging to combine triple models for each edge} \item{lambda}{regularization parameter to incorporate prior assumptions. Default: 0 (no regularization)} \item{delta}{regularization parameter for automated E-gene subset selection (CONTmLLRatio only)} \item{selEGenes}{automated E-gene subset selection (includes tuning of delta for CONTmLLRatio)} \item{verbose}{do you want to see progression statements? Default: TRUE} \item{x}{nem object} \item{...}{other arguments to pass} } \details{ Calls \code{\link{nem}} or \code{\link{nemModelSelection}} internally, depending on whether or not lambda is a vector and Pm != NULL. } \value{ nem object with edge weights being the jackknife probabilities } \author{Holger Froehlich} \seealso{\code{\link{nem.bootstrap}}, \code{\link{nem.consensus}}, \code{\link{nem}}, \code{\link{nemModelSelection}}} \examples{ \dontrun{ data("BoutrosRNAi2002") D <- BoutrosRNAiDiscrete[,9:16] p <- c(.13,.05) nem.jackknife(D, para=p) } } \keyword{models}