def _get_cfg(i_json_config_fn, i_length_cutoff_fn): cfg = json.loads(stricter_json(open(i_json_config_fn).read())) log.info('cfg=\n%s' % pprint.pformat(cfg)) length_cutoff = int(cfg.get('length_cutoff', '0')) length_cutoff = get_length_cutoff(length_cutoff, i_length_cutoff_fn) cfg['length_cutoff'] = length_cutoff return cfg
def _get_cfg(i_json_config_fn, i_length_cutoff_fn): cfg = json.loads(stricter_json(open(i_json_config_fn).read())) log.info('cfg=\n%s' %pprint.pformat(cfg)) length_cutoff = int(cfg.get('length_cutoff', '0')) length_cutoff = get_length_cutoff(length_cutoff, i_length_cutoff_fn) cfg['length_cutoff'] = length_cutoff return cfg
def run_hgap_prepare(input_files, output_files, options): """Generate a config-file from options. """ say('options to run_hgap_prepare:\n{}'.format(pprint.pformat(options))) i_subreadset_fn, = input_files o_hgap_cfg_fn, o_logging_cfg_fn, o_log_fn = output_files run_dir = os.path.dirname(o_hgap_cfg_fn) symlink(os.path.join(run_dir, 'stderr'), o_log_fn) # This will be the cfg we pass to hgap_run. all_cfg = collections.defaultdict(lambda: collections.defaultdict(str)) # Get grid options, for job-distribution. update_for_grid(all_cfg, run_dir) # Set some other falcon options, based on hgap options. update_falcon(all_cfg) # Override from pbsmrtpipe config/preset.xml. all_cfg[OPTION_SECTION_FALCON]['genome_size'] = options[ TASK_HGAP_GENOME_LENGTH].strip() all_cfg[OPTION_SECTION_FALCON]['length_cutoff'] = options[ TASK_HGAP_SEED_LENGTH_CUTOFF].strip() all_cfg[OPTION_SECTION_FALCON]['seed_coverage'] = options[ TASK_HGAP_SEED_COVERAGE].strip() cfg_json = options[TASK_HGAP_OPTIONS].strip() if not cfg_json: cfg_json = '{}' override_cfg = json.loads(stricter_json(cfg_json)) update2(all_cfg, override_cfg) # Get options from pbsmrtpipe. pbsmrtpipe_opts = get_pbsmrtpipe_opts(run_dir) if OPTION_SECTION_PBSMRTPIPE not in all_cfg: all_cfg[OPTION_SECTION_PBSMRTPIPE] = dict() pbsmrtpipe_opts.update(all_cfg[OPTION_SECTION_PBSMRTPIPE]) all_cfg[OPTION_SECTION_PBSMRTPIPE] = pbsmrtpipe_opts # Dump all_cfg. say('Dumping to {}'.format(repr(o_hgap_cfg_fn))) dump_as_json(all_cfg, open(o_hgap_cfg_fn, 'w')) # Get logging cfg. logging_cfg = DEFAULT_LOGGING_CFG # Dump logging cfg. say('Dumping to {}'.format(repr(o_logging_cfg_fn))) dump_as_json(logging_cfg, open(o_logging_cfg_fn, 'w'))
def run_hgap_prepare(input_files, output_files, options): """Generate a config-file from options. """ say('options to run_hgap_prepare:\n{}'.format(pprint.pformat(options))) i_subreadset_fn, = input_files o_hgap_cfg_fn, o_logging_cfg_fn, o_log_fn = output_files run_dir = os.path.dirname(o_hgap_cfg_fn) symlink(os.path.join(run_dir, 'stderr'), o_log_fn) # This will be the cfg we pass to hgap_run. all_cfg = collections.defaultdict(lambda: collections.defaultdict(str)) # Get grid options, for job-distribution. update_for_grid(all_cfg, run_dir) # Set some other falcon options, based on hgap options. update_falcon(all_cfg) # Override from pbsmrtpipe config/preset.xml. all_cfg[OPTION_SECTION_FALCON]['genome_size'] = options[TASK_HGAP_GENOME_LENGTH].strip() all_cfg[OPTION_SECTION_FALCON]['length_cutoff'] = options[TASK_HGAP_SEED_LENGTH_CUTOFF].strip() all_cfg[OPTION_SECTION_FALCON]['seed_coverage'] = options[TASK_HGAP_SEED_COVERAGE].strip() cfg_json = options[TASK_HGAP_OPTIONS].strip() if not cfg_json: cfg_json = '{}' override_cfg = json.loads(stricter_json(cfg_json)) update2(all_cfg, override_cfg) update_pwatcher(all_cfg) # Get options from pbsmrtpipe. pbsmrtpipe_opts = get_pbsmrtpipe_opts(run_dir) if OPTION_SECTION_PBSMRTPIPE not in all_cfg: all_cfg[OPTION_SECTION_PBSMRTPIPE] = dict() pbsmrtpipe_opts.update(all_cfg[OPTION_SECTION_PBSMRTPIPE]) all_cfg[OPTION_SECTION_PBSMRTPIPE] = pbsmrtpipe_opts # Dump all_cfg. say('Dumping to {}'.format(repr(o_hgap_cfg_fn))) dump_as_json(all_cfg, open(o_hgap_cfg_fn, 'w')) # Get logging cfg. logging_cfg = DEFAULT_LOGGING_CFG # Dump logging cfg. say('Dumping to {}'.format(repr(o_logging_cfg_fn))) dump_as_json(logging_cfg, open(o_logging_cfg_fn, 'w'))
def write_report_from_stats(stats_ifs, report_ofs): """This is used by HGAP5, task_run_hgap.py. """ stats = json.loads(stricter_json(stats_ifs.read())) report = produce_report(**stats) report_ofs.write(report)
def write_report_from_stats(stats_ifs, report_ofs): stats = json.loads(stricter_json(stats_ifs.read())) report = produce_report(**stats) content = report.to_json() report_ofs.write(content)