def __init__(self, dirn, data_format=None, qc_dir="qc", regex_pattern=None, version=None): """Create a new QCReporter instance Arguments: dirn: top-level directory for the run data_format: (optional) set format of files to acquire qc_dir: (optional) name of qc subdirectory holding QC outputs relative to the top-level (default is 'qc') version: (optional) version of the QC reporter (default is the version of the bcftbx package) """ # Basic information self.__dirn = os.path.abspath(dirn) self.__data_format = data_format self.__qc_dir = os.path.join(self.__dirn, qc_dir) # Version if version is None: self.__version = "bcftbx-%s" % get_version() else: self.__version = version if not os.path.isdir(self.__qc_dir): raise QCReporterError, "QC dir %s not found" % self.qc_dir # Run and experiment names # Assume that the experiment name is the current dir and the # run name is the parent directory self.__name = self.__dirn.split(os.sep)[-1] self.__run = self.__dirn.split(os.sep)[-2] self.__report_base_name = "%s_report" % os.path.basename(self.qc_dir) self.__report_name = "%s_report.%s.%s" % (os.path.basename(self.qc_dir), self.__name, self.__run) # List of samples self.__samples = [] # Regexp pattern for selecting sample subset self.__regex_pattern = regex_pattern # HTML document self.__html = self.__init_html()
####################################################################### if __name__ == "__main__": # Initialise max_concurrent_jobs = 4 poll_interval = 30 max_total_jobs = 0 script = None data_dirs = [] input_type = "solid" email_addr = None ge_queue = None runner_type = "ge" # Set up command line parser p = argparse.ArgumentParser(version="%(prog)s "+get_version(), description= "Execute SCRIPT on data in each directory " "DIR. By default the SCRIPT is executed on " "each CSFASTA/QUAL file pair found in DIR, " "as 'SCRIPT CSFASTA QUAL'. Use the --input " "option to run SCRIPT on different types of " "data (e.g. FASTQ files). SCRIPT can be a " "quoted string to include command line " "options (e.g. 'run_solid2fastq.sh --gzip').") # Basic options group = p.add_argument_group("Basic Options") group.add_argument('--limit',action='store',dest='max_concurrent_jobs', type=int,default=max_concurrent_jobs, help="queue no more than MAX_CONCURRENT_JOBS at one "
#source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'genomics-bcftbx' copyright = u'2015, Peter Briggs' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. from bcftbx import get_version version = get_version() # The full version, including alpha/beta/rc tags. release = get_version() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files.
if __name__ == "__main__": # Initialise max_concurrent_jobs = 4 poll_interval = 30 max_total_jobs = 0 script = None data_dirs = [] input_type = "solid" email_addr = None ge_queue = None runner_type = "ge" # Set up command line parser p = optparse.OptionParser(usage="%prog [options] SCRIPT DIR [ DIR ...]", version="%prog "+get_version(), description= "Execute SCRIPT on data in each directory DIR. By default " "the SCRIPT is executed on each CSFASTA/QUAL file pair " "found in DIR, as 'SCRIPT CSFASTA QUAL'. Use the --input " "option to run SCRIPT on different types of data (e.g. " "FASTQ files). SCRIPT can be a quoted string to include " "command line options (e.g. 'run_solid2fastq.sh --gzip').") # Basic options group = optparse.OptionGroup(p,"Basic Options") group.add_option('--limit',action='store',dest='max_concurrent_jobs',type='int', default=max_concurrent_jobs, help="queue no more than MAX_CONCURRENT_JOBS at one time (default %s)" % max_concurrent_jobs) group.add_option('--input',action='store',dest='input_type',default=input_type,