def make_plugin_json(primary_key, report_dir, plugin, plugin_out_dir, net_location, url_root, username, runlevel=RunLevel.DEFAULT, blockId='', block_dirs=["."], instance_config={}): try: ion_params, warn = getparameter( os.path.join(report_dir, 'ion_params_00.json')) except: ion_params = getparameter_minimal( os.path.join(report_dir, 'ion_params_00.json')) json_obj = { "runinfo": get_runinfo(ion_params, primary_key, report_dir, plugin, plugin_out_dir, net_location, url_root, username, runlevel, blockId), "runplugin": get_runplugin(ion_params, runlevel, blockId, block_dirs), "expmeta": get_expmeta(ion_params, report_dir), "pluginconfig": get_pluginconfig(plugin, instance_config), "globalconfig": get_globalconfig(), "plan": get_plan(ion_params), "sampleinfo": ion_params.get("sampleInfo", {}), "datamanagement": get_datamanagement(primary_key), } # IonReporterUploader_V1_0 compatibility shim if plugin["name"] == "IonReporterUploader_V1_0" and plugin.get( "userInput", ""): json_obj["plan"]["irworkflow"] = plugin["userInput"][0].get("Workflow") return json_obj
def make_plugin_json( primary_key, report_dir, plugin, plugin_out_dir, net_location, url_root, username, runlevel=RunLevel.DEFAULT, blockId="", block_dirs=["."], instance_config={}, ): try: ion_params, warn = getparameter(os.path.join(report_dir, "ion_params_00.json")) except: ion_params = getparameter_minimal(os.path.join(report_dir, "ion_params_00.json")) json_obj = { "runinfo": get_runinfo( ion_params, primary_key, report_dir, plugin, plugin_out_dir, net_location, url_root, username, runlevel, blockId, ), "runplugin": get_runplugin(ion_params, runlevel, blockId, block_dirs), "expmeta": get_expmeta(ion_params, report_dir), "pluginconfig": get_pluginconfig(plugin, instance_config), "globalconfig": get_globalconfig(), "plan": get_plan(ion_params), "sampleinfo": ion_params.get("sampleInfo", {}), } # IonReporterUploader_V1_0 compatibility shim if plugin["name"] == "IonReporterUploader_V1_0" and plugin.get("userInput", ""): json_obj["plan"]["irworkflow"] = plugin["userInput"][0].get("Workflow") return json_obj
help='alignment') args = parser.parse_args() if args.verbose: print "BlockTLScript:", args if not args.do_sigproc and not args.do_basecalling and not args.do_alignment: parser.print_help() sys.exit(1) #ensure we permit read/write for owner and group output files. os.umask(0002) blockprocessing.printheader() env, warn = explogparser.getparameter() print warn blockprocessing.write_version() sys.stdout.flush() sys.stderr.flush() #------------------------------------------------------------- # Connect to Job Server #------------------------------------------------------------- try: jobserver = xmlrpclib.ServerProxy( "http://%s:%d" % (cluster_settings.JOBSERVER_HOST, cluster_settings.JOBSERVER_PORT), verbose=False, allow_none=True)
parser.add_argument('-z', '--do-zipping', dest='do_zipping', action='store_true', help='zipping') args = parser.parse_args() if args.verbose: print "MergeTLScript:",args if not args.do_sigproc and not args.do_basecalling and not args.do_zipping: parser.print_help() sys.exit(1) #ensure we permit read/write for owner and group output files. os.umask(0002) blockprocessing.printheader() env,warn = explogparser.getparameter() blockprocessing.write_version() sys.stdout.flush() sys.stderr.flush() #------------------------------------------------------------- # Connect to Job Server #------------------------------------------------------------- try: jobserver = xmlrpclib.ServerProxy("http://%s:%d" % (cluster_settings.JOBSERVER_HOST, cluster_settings.JOBSERVER_PORT), verbose=False, allow_none=True) primary_key_file = os.path.join(os.getcwd(),'primary.key') except: traceback.print_exc()
parser = argparse.ArgumentParser() parser.add_argument('-v', dest='verbose', action='store_true') parser.add_argument('-f', '--forward', required=False, dest='forwarddir', default='.', help='forward run report') parser.add_argument('-r', '--reverse', required=False, dest='reversedir', default='.', help='reverse run report') parser.add_argument('-o', '--output', required=False, dest='outdir', default='.', help='output directory') parser.add_argument('param', default='paramfile', help='param file') args = parser.parse_args() args.verbose = True if args.verbose: print "args:",args blockprocessing.printheader() env,warn = explogparser.getparameter() print warn #------------------------------------------------------------- # Update Report Status to 'Started' #------------------------------------------------------------- try: jobserver = xmlrpclib.ServerProxy("http://%s:%d" % (JOBSERVER_HOST, JOBSERVER_PORT), verbose=False, allow_none=True) debugging_cwd = os.getcwd() except: traceback.print_exc() def set_result_status(status): try: primary_key_file = os.path.join(os.getcwd(),'primary.key') jobserver.updatestatus(primary_key_file, status, True)