def __init__(self, input_table_filename, **extra_args): # setup output references # self.result_dir = result_dir self.output_dir = extra_args['output_dir'] self.output_filename = 'result-%s.Rdata' % extra_args['index_chunk'] self.output_file = os.path.join(self.output_dir,self.output_filename) outputs = [("./output.Rdata",self.output_filename)] # setup input references inputs = dict() inputs[input_table_filename] = "./input.txt" arguments = "Rscript --vanilla " # check the optional inputs if extra_args.has_key('driver_script'): inputs[extra_args['driver_script']] = "./run.R" arguments += "run.R " arguments += "input.txt output.Rdata" Application.__init__( self, arguments = arguments, inputs = inputs, outputs = outputs, stdout = 'gbugs.log', join=True, **extra_args)
def __init__(self, parameter_string, **extra_args): executables = [] inputs = dict() outputs = dict() if 'binary' in extra_args: binary_name = os.path.basename(extra_args['binary']) arguments = "./%s" % binary_name inputs[extra_args['binary']] = binary_name executables.append(binary_name) else: arguments = DEFAULT_EPICELL_BINARY for param in parameter_string.strip().split(','): arguments += " %s " % param # Set output outputs[DEFAULT_REMOTE_OUTPUT_FOLDER] = os.path.basename(DEFAULT_REMOTE_OUTPUT_FOLDER) arguments += DEFAULT_REMOTE_OUTPUT_FOLDER gc3libs.log.debug("Creating application for executing: %s", arguments) Application.__init__( self, arguments = arguments, inputs = inputs, outputs = gc3libs.ANY_OUTPUT, stdout = 'gepecell.log', join=True, executables = executables, **extra_args)
def __init__(self, subject, input_data_folder, **extra_args): self.output_dir = extra_args['output_dir'] self.result_dir = extra_args['result_dir'] inputs = dict() outputs = dict() executables = [] # execution wrapper needs to be added anyway gkjpd_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gkjpd.sh") inputs[gkjpd_wrapper_sh] = os.path.basename(gkjpd_wrapper_sh) inputs[input_data_folder] = './input' _command = "./%s ./input %s " % (os.path.basename(gkjpd_wrapper_sh), subject) # arguments = "matlab -nodesktop -nosplash -nodisplay -nodesktop "\ # "-r \"addpath(\'/home/gc3-user/spm12\'); addpath(\'./input\'); preprocessing_s3it(\'./input\',\'%s\'); quit\""\ # % subject # Set output Application.__init__( self, arguments = _command, inputs = inputs, outputs = gc3libs.ANY_OUTPUT, stdout = 'gkjpd.log', join=True, executables = "./%s" % os.path.basename(gkjpd_wrapper_sh), **extra_args)
def test_io_spec_to_dict_unicode(): import gc3libs.url Application._io_spec_to_dict( gc3libs.url.UrlKeyDict, { u'/tmp/\u0246': u'\u0246', '/tmp/b/': 'b'}, True)
def __init__(self, host, username, password, experiment, plate, well_name, x, y, input_path, output_path, fname_stem): out = 'vol_img_{w}_x{x:02d}_y{y:02d}'.format(x=x,y=y,w=well_name) output_dir = os.path.join(experiment, out) Application.__init__( self, arguments=[ './generate_volume_image_with_offline_beads.py', '-H', host, '-u', username, '--password', password, '-e', experiment, '--plate_name', plate, '--well_name', well_name, '-x', x, '-y', y, '--input_path', input_path, '--output_path', output_path, '--fname_stem', fname_stem], inputs=['generate_volume_image_with_offline_beads.py'], outputs=[], output_dir=output_dir, stdout='stdout.txt', stderr='stderr.txt', requested_memory=3750 * MB )
def __init__(self, bids_sub, dwi_folder, fs_folder_list, dmrirc_sub_file, **extra_args): inputs = dict() outputs = dict() self.output_dir = extra_args['output_dir'] # List of folders to copy to remote inputs[dwi_folder] = DEFAULT_REMOTE_DWI_FOLDER for fs in fs_folder_list: inputs[fs] = os.path.join(DEFAULT_REMOTE_FS_FOLDER, os.path.basename(fs)) inputs[dmrirc_sub_file] = DEFAULT_REMOTE_DMRIRC_FILE wrapper = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gtraclong_wrapper.py") inputs[wrapper] = os.path.basename(wrapper) arguments = "./%s %s" % ( inputs[wrapper], os.path.join(DEFAULT_REMOTE_INPUT_FOLDER, os.path.basename(dmrirc_sub_file))) # check if requested memory and walltime is lower than recommended by default self._check_requests(bids_sub, extra_args) Application.__init__( self, arguments=arguments, inputs=inputs, outputs=[DEFAULT_REMOTE_OUTPUT_FOLDER], stdout='gtraclong.log', join=True, **extra_args)
def __init__(self, vcf_group, index, **extra_args): self.S1_output = extra_args['S1_output'] extra_args['requested_memory'] = extra_args['S1_memory'] inputs = dict() outputs = dict() executables = [] # execution wrapper needs to be added anyway gatks1_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gatks1.sh") inputs[gatks1_wrapper_sh] = os.path.basename(gatks1_wrapper_sh) arguments = "./%s ./input %s -m %s" % ( os.path.basename(gatks1_wrapper_sh), index, str(extra_args['requested_memory'].amount(conv=int))) for vcf in vcf_group: inputs[vcf] = os.path.join('./input', os.path.basename(vcf)) # Set output self.vcf_output_filename = "./combined%d.g.vcf" % index outputs[self.vcf_output_filename] = os.path.join( self.S1_output, self.vcf_output_filename) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='gatks1.log', join=True, executables=os.path.basename(gatks1_wrapper_sh), **extra_args)
def __init__(self, network_data_path, benchmark_name, benchmark_file, **extra_args): self.output_dir = extra_args['output_dir'] inputs = dict() inputs[network_data_path] = './network_data/' inputs[benchmark_file] = os.path.basename(benchmark_file) # adding wrapper main script gbenchmark_wrapper = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gbenchmark_wrapper_allinone.py") inputs[gbenchmark_wrapper] = "gbenchmark_wrapper.py" arguments = "python gbenchmark_wrapper.py %s %s " % (inputs[benchmark_file], inputs[network_data_path]) # Take full node from 'benchmark' flavor extra_args['requested_cores'] = 8 Application.__init__( self, arguments = arguments, inputs = inputs, outputs = ["./results"], stdout = 'gbenchmark.log', join=True, executables = ['gbenchmark_wrapper.py', inputs[benchmark_file]], **extra_args)
def __init__(self, input_function, input_dataSet, **extra_args): self.output_dir = extra_args['output_dir'] self.input_function = input_function # inputs = dict() # outputs = dict() # # gtree_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), # "gc3libs/etc/gtree_wrapper.sh") gtree_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gtree_wrapper.sh") # inputs[gtree_wrapper_sh] = os.path.basename(gtree_wrapper_sh) # inputs['foo'] = 'foo' # inputs[input_function] = "%s" % os.path.basename(input_function) inputs[input_dataSet] = "%s" % os.path.basename(input_dataSet) # arguments = "./%s %s %s" % (inputs[gtree_wrapper_sh], inputs[input_function], inputs[input_dataSet]) # print 'hello' Application.__init__( self, arguments = arguments, inputs = inputs, outputs = ["./results"], stdout = 'gtree.log', join=True, executables = "./%s" % os.path.basename(input_function), **extra_args)
def __init__(self, input_folder, param_file, **extra_args): self.output_dir = extra_args['results_dir'] inputs = dict() outputs = dict() executables = [] self.jobname = extra_args['jobname'] # Check if binary to be executed is provided as part of input arguments if 'sisp' in extra_args: inputs[os.path.abspath(extra_args["sisp"])] = "./sisp" executables.append("./sisp") arguments = "./sisp" inputs[param_file] = PARAMETERS_FILE # Set output outputs['output/'] = os.path.join(self.output_dir,"./output") Application.__init__( self, arguments = arguments, inputs = inputs, outputs = outputs, stdout = 'gsisp.log', join=True, executables = executables, **extra_args)
def __init__(self, input_table_filename, **extra_args): # setup output references # self.result_dir = result_dir self.output_dir = extra_args['output_dir'] self.output_filename = 'result-%s.Rdata' % extra_args['index_chunk'] self.output_file = os.path.join(self.output_dir, self.output_filename) outputs = [("./output.Rdata", self.output_filename)] # setup input references inputs = dict() inputs[input_table_filename] = "./input.txt" arguments = "Rscript --vanilla " # check the optional inputs if extra_args.has_key('driver_script'): inputs[extra_args['driver_script']] = "./run.R" arguments += "run.R " arguments += "input.txt output.Rdata" Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='gbugs.log', join=True, **extra_args)
def __init__(self, matlab_function, parameter_list, tarfile=None, **extra_args): inputs = dict() outputs = dict() # execution wrapper needs to be added anyway wrapper = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gcrashdetect_wrapper.sh") inputs[wrapper] = "./wrapper.sh" arguments = "./wrapper.sh %s " % matlab_function for param in parameter_list: arguments += " %s " % param if tarfile: inputs[tarfile] = os.path.basename(tarfile) arguments += "-s %s " % inputs[tarfile] arguments += DEFAULT_REMOTE_OUTPUT_FOLDER Application.__init__(self, arguments=arguments, inputs=inputs, outputs=[DEFAULT_REMOTE_OUTPUT_FOLDER], stdout='gcrashdetect.log', join=True, executables="./wrapper.sh", **extra_args)
def __init__(self, input_folder, param_file, **extra_args): self.output_dir = extra_args['results_dir'] inputs = dict() outputs = dict() executables = [] self.jobname = extra_args['jobname'] # Check if binary to be executed is provided as part of input arguments if 'sisp' in extra_args: inputs[os.path.abspath(extra_args["sisp"])] = "./sisp" executables.append("./sisp") arguments = "./sisp" inputs[param_file] = PARAMETERS_FILE # Set output outputs['output/'] = os.path.join(self.output_dir, "./output") Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='gsisp.log', join=True, executables=executables, **extra_args)
def __init__(self, hunting, **extra_args): """ Remote execution: ABM [value] """ inputs = dict() outputs = dict() executables = list() if 'binary' in extra_args: remote_bin = os.path.basename(extra_args['binary']) arguments = "./%s " % remote_bin inputs[extra_args['binary']] = remote_bin executables.append(remote_bin) else: arguments = DEFAULT_REMOTE_BIN arguments += " %d " % hunting Application.__init__(self, arguments=arguments, inputs=inputs, outputs=gc3libs.ANY_OUTPUT, stdout='gabm.log', join=True, executables=executables, **extra_args)
def __init__(self, arguments, inputs=None, *more_args, **extra_args): # convert to string here as we want to compare args to file names arguments = [str(x) for x in arguments] # create `inputs` as would be done in the `Application` class ctor if inputs is not None: inputs = Application._io_spec_to_dict(gc3libs.url.UrlKeyDict, inputs, force_abs=True) else: inputs = gc3libs.url.UrlKeyDict() # scan command-line for things that look like actual files executable = arguments[0] if os.path.exists(executable): executable_name = os.path.basename(executable) inputs[executable] = executable_name arguments[0] = './' + executable_name for i, arg in enumerate(arguments[1:], 1): if arg not in inputs and os.path.exists(arg): inputs[arg] = os.path.basename(arg) arguments[i] = os.path.basename(arg) # recurse into superclass ctor Application.__init__(self, arguments, inputs, *more_args, **extra_args)
def __init__(self, model_index, seed, **extra_args): self.output_dir = extra_args['output_dir'] self.result_dir = extra_args['result_dir'] inputs = dict() outputs = dict() executables = [] if 'run_binary' in extra_args: inputs[os.path.abspath(extra_args['run_binary'])] = './par_recovery' arguments = "./par_recovery " executables.append('./par_recovery') else: arguments = "par_recovery " # self.output_filename = "ParRecovery_Genmodel%s_.mat" % str(model_index) # outputs[self.output_filename] = os.path.join(self.result_dir, "ParRecovery_Genmodel%s_%s.mat" % (str(model_index),extra_args['repetition'])) self.output_filename = "ParRecovery_Genmodel%s_%s.mat" % (str(model_index),extra_args['repetition']) outputs["ParRecovery_Genmodel%s_.mat" % str(model_index)] = self.output_filename arguments += "%s %s" % (str(model_index), str(seed)) Application.__init__( self, arguments = arguments, inputs = inputs, outputs = outputs, stdout = 'gprecovery.log', join=True, executables = executables, **extra_args)
def __init__(self, input_data, **extra_args): # setup input references inputs = dict() inputs[input_data] = "./input.txt" # adding wrapper main script gnlp_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gnlp_wrapper.py") inputs[gnlp_wrapper_sh] = "./wrapper.py" arguments = "./wrapper.py ./input.txt ./output.txt" outputs = dict() outputs["./output.txt"] = extra_args["output_file"] Application.__init__( self, arguments=arguments, inputs=inputs, outputs=outputs, stdout="gnlp.log", stderr="gnlp.err", executables="./wrapper.py", **extra_args )
def __init__(self, input_data, **extra_args): # setup input references inputs = dict() inputs[input_data] = "./input.txt" # adding wrapper main script gnlp_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gnlp_wrapper.py") inputs[gnlp_wrapper_sh] = "./wrapper.py" arguments = "./wrapper.py ./input.txt ./output.txt" outputs = dict() outputs['./output.txt'] = extra_args['output_file'] Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='gnlp.log', stderr='gnlp.err', executables="./wrapper.py", **extra_args)
def __init__(self, subject, subject_folder, dmrirc, **extra_args): self.output_dir = extra_args['output_dir'] inputs = dict() outputs = dict() inputs[subject_folder] = DEFAULT_REMOTE_INPUT_FOLDER # arguments = "trac-all -prep -c %s -debug" % dmrirc # arguments = DEFAULT_TRAC_COMMAND.format(dmrirc=dmrirc) wrapper = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gtrac_wrapper.py") inputs[wrapper] = os.path.basename(wrapper) arguments = "./%s %s" % (inputs[wrapper], dmrirc) if extra_args['requested_memory'] < DEFAULT_MEMORY: gc3libs.log.warning("GtracApplication for subject %s running with memory allocation " \ "'%d GB' lower than suggested one: '%d GB'," % (subject, extra_args['requested_memory'].amount(unit=GB), DEFAULT_MEMORY.amount(unit=GB))) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=[DEFAULT_REMOTE_OUTPUT_FOLDER], stdout='gtrac.log', join=True, **extra_args)
def __init__(self, input_phenotype, input_chromosom, **extra_args): inputs = dict() outputs = dict() output_dir = "./results" outputs[output_dir] = output_dir # execution wrapper needs to be added anyway gcombi_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gcombi.sh") inputs[gcombi_wrapper_sh] = os.path.basename(gcombi_wrapper_sh) inputs[input_phenotype] = os.path.basename(input_phenotype) inputs[input_chromosom] = os.path.basename(input_chromosom) command = "./%s ./%s ./%s" % (os.path.basename(gcombi_wrapper_sh), os.path.basename(input_phenotype), os.path.basename(input_chromosom)) Application.__init__( self, arguments = command, inputs = inputs, outputs = outputs, stdout = 'gcombi.log', join=True, executables = "./%s" % os.path.basename(gcombi_wrapper_sh), **extra_args)
def __init__(self, subject, input_data_folder, **extra_args): self.output_dir = extra_args['output_dir'] self.result_dir = extra_args['result_dir'] inputs = dict() outputs = dict() executables = [] # execution wrapper needs to be added anyway gkjpd_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gkjpd.sh") inputs[gkjpd_wrapper_sh] = os.path.basename(gkjpd_wrapper_sh) inputs[input_data_folder] = './input' _command = "./%s ./input %s " % (os.path.basename(gkjpd_wrapper_sh), subject) # arguments = "matlab -nodesktop -nosplash -nodisplay -nodesktop "\ # "-r \"addpath(\'/home/gc3-user/spm12\'); addpath(\'./input\'); preprocessing_s3it(\'./input\',\'%s\'); quit\""\ # % subject # Set output Application.__init__(self, arguments=_command, inputs=inputs, outputs=gc3libs.ANY_OUTPUT, stdout='gkjpd.log', join=True, executables="./%s" % os.path.basename(gkjpd_wrapper_sh), **extra_args)
def __init__(self, subject_name, input_nifti, freesurfer_steps, **extra_args): output_dir = DEFAULT_REMOTE_OUTPUT_FOLDER + subject_name + ".crossTP1" inputs = dict() outputs = dict() gfsurfer_wrapper = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gfsurfer_wrapper.py") inputs[gfsurfer_wrapper] = os.path.basename(gfsurfer_wrapper) inputs[input_nifti] = os.path.basename(input_nifti) outputs[output_dir] = output_dir arguments = "./%s %s %s %s" % (inputs[gfsurfer_wrapper], subject_name, os.path.basename(input_nifti), DEFAULT_REMOTE_OUTPUT_FOLDER) Application.__init__( self, arguments = arguments, inputs = inputs, outputs = outputs, stdout = 'gfsurfer.log', join=True, executables = [os.path.basename(gfsurfer_wrapper)], **extra_args)
def __init__(self, subject, subject_folder, **extra_args): self.output_dir = extra_args['output_dir'] inputs = dict() outputs = dict() gnift_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gnift_wrapper.py") inputs[gnift_wrapper_sh] = os.path.basename(gnift_wrapper_sh) inputs[subject_folder] = DEFAULT_REMOTE_INPUT_FOLDER arguments = "./%s %s %s %s" % (inputs[gnift_wrapper_sh], subject, DEFAULT_REMOTE_INPUT_FOLDER, DEFAULT_REMOTE_OUTPUT_FOLDER) Application.__init__( self, arguments = arguments, inputs = inputs, outputs = [DEFAULT_REMOTE_OUTPUT_FOLDER], stdout = 'gnift.log', join=True, **extra_args)
def __init__(self, parameter, **extra_args): inputs = dict() outputs = dict() # execution wrapper needs to be added anyway gsceuafish_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gsceuafish.sh") inputs[gsceuafish_wrapper_sh] = os.path.basename(gsceuafish_wrapper_sh) _command = "./%s %s " % (os.path.basename(gsceuafish_wrapper_sh), ' '.join(str(x) for x in parameter)) if "main_loop_folder" in extra_args: inputs[extra_args['main_loop_folder']] = './data/' Application.__init__(self, arguments=_command, inputs=inputs, outputs=gc3libs.ANY_OUTPUT, stdout='gsceuafish.log', join=True, executables="./%s" % os.path.basename(gsceuafish_wrapper_sh), **extra_args)
def __init__(self, param_file, data_file, **extra_args): self.output_dir = extra_args['output_dir'] self.result_dir = extra_args['result_dir'] inputs = dict() outputs = dict() executables = [] # Check if binary to be executed is provided as part of input arguments if 'run_binary' in extra_args: inputs[os.path.abspath(extra_args["run_binary"])] = "estimate_DCM.m" arguments = "matlab -nodesktop -nosplash -nodisplay -r \"estimate_DCM " \ "%s %s results;quit;\"" % (os.path.basename(param_file), os.path.basename(data_file)) inputs[param_file] = os.path.basename(param_file) inputs[data_file] = os.path.basename(data_file) # Set output outputs['results/'] = 'results/' Application.__init__( self, arguments = arguments, inputs = inputs, outputs = outputs, stdout = 'gscr.log', join=True, executables = executables, **extra_args)
def __init__(self, input_folder, **extra_args): self.output_dir = extra_args['output_dir'] self.input_folder = input_folder inputs = dict() outputs = dict() gndn_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gndn_wrapper.sh") inputs[gndn_wrapper_sh] = os.path.basename(gndn_wrapper_sh) inputs[input_folder] = "%s/" % os.path.basename(input_folder) outputs[os.path.join(os.path.basename(input_folder), "results")] = "results/" arguments = "./%s %s" % (inputs[gndn_wrapper_sh], inputs[input_folder]) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='gndn.log', join=True, executables="./%s" % os.path.basename(input_folder), **extra_args)
def __init__(self, subject, subject_folder, dmrirc, **extra_args): self.output_dir = extra_args['output_dir'] inputs = dict() outputs = dict() inputs[subject_folder] = DEFAULT_REMOTE_INPUT_FOLDER # arguments = "trac-all -prep -c %s -debug" % dmrirc # arguments = DEFAULT_TRAC_COMMAND.format(dmrirc=dmrirc) wrapper = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gtrac_wrapper.py") inputs[wrapper] = os.path.basename(wrapper) arguments = "./%s %s" % (inputs[wrapper], dmrirc) if extra_args['requested_memory'] < DEFAULT_MEMORY: gc3libs.log.warning("GtracApplication for subject %s running with memory allocation " \ "'%d GB' lower than suggested one: '%d GB'," % (subject, extra_args['requested_memory'].amount(unit=GB), DEFAULT_MEMORY.amount(unit=GB))) Application.__init__( self, arguments = arguments, inputs = inputs, outputs = [DEFAULT_REMOTE_OUTPUT_FOLDER], stdout = 'gtrac.log', join=True, **extra_args)
def __init__(self, parameter, **extra_args): inputs = dict() outputs = dict() # execution wrapper needs to be added anyway gsceuafish_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gsceuafish.sh") inputs[gsceuafish_wrapper_sh] = os.path.basename(gsceuafish_wrapper_sh) _command = "./%s %s " % (os.path.basename(gsceuafish_wrapper_sh), ' '.join(str(x) for x in parameter)) if "main_loop_folder" in extra_args: inputs[extra_args['main_loop_folder']] = './data/' Application.__init__( self, arguments = _command, inputs = inputs, outputs = gc3libs.ANY_OUTPUT, stdout = 'gsceuafish.log', join=True, executables = "./%s" % os.path.basename(gsceuafish_wrapper_sh), **extra_args)
def __init__(self, docking_file, docking_index, **extra_args): self.output_dir = extra_args['output_dir'] self.docking_index = docking_index inputs = dict() outputs = dict() grdock_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/grdock_wrapper.sh") inputs[grdock_wrapper_sh] = os.path.basename(grdock_wrapper_sh) inputs[docking_file] = os.path.basename(docking_file) if extra_args['data_folder']: for element in os.listdir(extra_args['data_folder']): inputs[os.path.abspath( os.path.join(extra_args['data_folder'], element))] = os.path.basename(element) arguments = "./%s -n %s -o Docked%s %s results" % ( inputs[grdock_wrapper_sh], extra_args['rbdock_iterations'], self.docking_index, os.path.basename(docking_file)) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=["results/"], stdout='grdock.log', join=True, **extra_args)
def __init__(self, hunting, **extra_args): """ Remote execution: ABM [value] """ inputs = dict() outputs = dict() executables = list() if 'binary' in extra_args: remote_bin = os.path.basename(extra_args['binary']) arguments = "./%s " % remote_bin inputs[extra_args['binary']] = remote_bin executables.append(remote_bin) else: arguments = DEFAULT_REMOTE_BIN arguments += " %d " % hunting Application.__init__( self, arguments = arguments, inputs = inputs, outputs = gc3libs.ANY_OUTPUT, stdout = 'gabm.log', join=True, executables = executables, **extra_args)
def __init__(self, input_files, **extra_args): """ The wrapper script is being used for start the simulation. """ inputs = dict() inputs[extra_args['stacks_exec']] = DEFAULT_STACKS_RUN_SCRIPT for f in input_files: inputs[f] = "./input/{0}".format(os.path.basename(f)) inputs[extra_args["decoy_output_folder"]] = DEFAULT_RESULT_FOLDER docker_mount = "-v $PWD/input:/input -v $PWD/output:/output " # Add memory requirement # extra_args.setdefault('requested_memory', 1.5*GiB) Application.__init__(self, arguments=DOCKER_RUN_COMMAND.format( DOCKER_MOUNT=docker_mount, STACKS_RUN_SCRIPT=DEFAULT_STACKS_RUN_SCRIPT, DOCKER_TO_RUN=extra_args["docker"]), inputs=inputs, outputs=DEFAULT_RESULT_FOLDER, stdout='gstacks.log', join=True, **extra_args)
def __init__(self, model_index, seed, **extra_args): self.output_dir = extra_args['output_dir'] self.result_dir = extra_args['result_dir'] inputs = dict() outputs = dict() executables = [] if 'run_binary' in extra_args: inputs[os.path.abspath( extra_args['run_binary'])] = './par_recovery' arguments = "./par_recovery " executables.append('./par_recovery') else: arguments = "par_recovery " # self.output_filename = "ParRecovery_Genmodel%s_.mat" % str(model_index) # outputs[self.output_filename] = os.path.join(self.result_dir, "ParRecovery_Genmodel%s_%s.mat" % (str(model_index),extra_args['repetition'])) self.output_filename = "ParRecovery_Genmodel%s_%s.mat" % ( str(model_index), extra_args['repetition']) outputs["ParRecovery_Genmodel%s_.mat" % str(model_index)] = self.output_filename arguments += "%s %s" % (str(model_index), str(seed)) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='gprecovery.log', join=True, executables=executables, **extra_args)
def __init__(self, network_data_file, run_script, **extra_args): self.output_dir = extra_args['output_dir'] inputs = dict() network_data_filename = os.path.basename(network_data_file) inputs[network_data_file] = network_data_filename inputs[run_script] = os.path.basename(run_script) # adding wrapper main script gbenchmark_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gbenchmark_wrapper.sh") inputs[gbenchmark_wrapper_sh] = "gbenchmark_wrapper.sh" arguments = "./gbenchmark_wrapper.sh -d -b %s -r %s ./%s " % (extra_args["benchmark_type"], os.path.basename(run_script), network_data_filename) extra_args['requested_cores'] = 8 Application.__init__( self, arguments = arguments, inputs = inputs, outputs = ["./results"], stdout = 'gbenchmark.log', join=True, executables = ['wrapper.sh'], **extra_args)
def __init__(self, funcfile, radius, params, inputfile, outputfile=None, **extra_args): funcname = basename_sans(funcfile) self.funcname = funcname self.radius = radius # map args to file system names inputname = basename_sans(inputfile) if outputfile is None: outputfile = ('output_{inputname}_{radius}.mat'.format(**locals())) # default execution params extra_args.setdefault('requested_cores', 1) extra_args.setdefault('requested_memory', 3 * GB) extra_args.setdefault('requested_architecture', Run.Arch.X86_64) extra_args.setdefault('requested_walltime', 30 * days) # actual app initialization Application.__init__( self, arguments=[ 'matlab', '-nodisplay', '-nojvm', #'-singleCompThread', '-r', (self.matlab_cmd.format(**locals())) ], inputs=[funcfile, inputfile], outputs=[outputfile], stdout='matlab.log', join=True, **extra_args)
def __init__(self, input_folder, **extra_args): self.output_dir = extra_args['output_dir'] self.input_folder = input_folder inputs = dict() outputs = dict() gndn_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gndn_wrapper.sh") inputs[gndn_wrapper_sh] = os.path.basename(gndn_wrapper_sh) inputs[input_folder] = "%s/" % os.path.basename(input_folder) outputs[os.path.join(os.path.basename(input_folder),"results")] = "results/" arguments = "./%s %s" % (inputs[gndn_wrapper_sh],inputs[input_folder]) Application.__init__( self, arguments = arguments, inputs = inputs, outputs = outputs, stdout = 'gndn.log', join=True, executables = "./%s" % os.path.basename(input_folder), **extra_args)
def __init__(self, input_folder, chromosomes_folder, **extra_args): executables = [] inputs = dict() outputs = dict() # Set output outputs[DEFAULT_REMOTE_OUTPUT_FILE] = DEFAULT_REMOTE_OUTPUT_FILE # Note: input data are amde available through network fileshare # inputs[data_folder] = os.path.basename(input_folder) # inputs[chromosomes_folder] = os.path.basename(chromosomes_folder) arguments = DOCKER_CMD.format( DATA_MOUNT=input_folder, CHROMOSOMES_MOUNT=chromosomes_folder, OUTPUT_MOUNT=outputs[DEFAULT_REMOTE_OUTPUT_FILE], was_release=extra_args["was_release"]) gc3libs.log.debug("Creating application for executing: %s", arguments) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='log', join=True, executables=executables, **extra_args)
def __init__(self, result_csv_path): Application.__init__(self, ['python', 'saplot.py'], inputs=['downloads/saplot.py', result_csv_path], outputs=['saplot.pdf'], output_dir=('saplot.d'), stdout="saplot.log", stderr="saplot.log")
def __init__(self, input_file, getsentiment_script, cores, **extra_args): executables = [] inputs = dict() outputs = dict() if not extra_args['sharedFS']: inputs[input_file] = os.path.basename(input_file) cmd = COMMAND.format(twitter_file=inputs[input_file], cores=cores) else: cmd = COMMAND.format(twitter_file=input_file, cores=cores) if getsentiment_script: inputs[getsentiment_script] = "./twitter-senti-extract.py" else: wrapper = resource_filename( Requirement.parse("gc3pie"), "gc3libs/etc/twitter-senti-extract.py") inputs[wrapper] = "./twitter-senti-extract.py" extra_args['requested_cores'] = cores Application.__init__( self, arguments=cmd, inputs=inputs, outputs=[ "{input_file_name_prefix}.csv".format( input_file_name_prefix=os.path.basename(input_file)) ], stdout='gsentitweet.log', join=True, executables=executables, **extra_args)
def __init__(self, input_file, **extra_args): # setup output references self.output_dir = extra_args['output_dir'] # setup input references inputs = dict() gmodis_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gmodis_wrapper.sh") inputs[gmodis_wrapper_sh] = os.path.basename(gmodis_wrapper_sh) _command = [] _command.append("./%s" % os.path.basename(gmodis_wrapper_sh)) # Add denug info _command.append("-d") if extra_args.has_key('fsc_dir'): inputs.update(dict((os.path.join(extra_args['fsc_dir'],v), os.path.join( os.path.basename(extra_args['fsc_dir']), v)) for v in os.listdir(extra_args['fsc_dir']))) _command.append("-f ./%s " % os.path.basename(extra_args['fsc_dir'])) if extra_args.has_key('gmodis_funct'): # e.g. ('/home/data/matlab/gmodis','~/bin/gmodis') inputs[extra_args['gmodis_funct']] = os.path.basename(extra_args['gmodis_funct']) _command.append("-x ./%s " % os.path.basename(extra_args['gmodis_funct'])) if extra_args.has_key('matlab_driver'): inputs[extra_args['matlab_driver']] = os.path.basename(extra_args['matlab_driver']) _command.append("-s ./%s " % os.path.basename(extra_args['matlab_driver'])) inputs[input_file] = os.path.basename(input_file) _command.append(os.path.basename(input_file)) outputs = gc3libs.ANY_OUTPUT # Add memory requirement extra_args['requested_memory'] = 16*GB Application.__init__( self, arguments = _command, executables = "./%s" % os.path.basename(gmodis_wrapper_sh), inputs = inputs, outputs = outputs, stdout = 'gmodis.log', join=True, **extra_args)
def __init__(self, input_file, **extra_args): inputs = dict() inputs[input_file] = "./input.csv" self.output_folder = "./results" # arguments = "./MCSpecs ./input.csv" # $ arguments = "matlab -nodesktop -nodisplay -nosplash -r \'Main_loop input.csv results; quit()\'" gwrappermc_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gwrappermc_wrapper.sh") inputs[gwrappermc_wrapper_sh] = os.path.basename(gwrappermc_wrapper_sh) arguments = "./%s " % inputs[gwrappermc_wrapper_sh] if 'main_loop_folder' in extra_args: inputs[extra_args['main_loop_folder']] = './data/' arguments += "-m ./data " arguments += " -i %s " % extra_args['index_chunk'] arguments += "input.csv results" Application.__init__( self, arguments = arguments, inputs = inputs, outputs = ['results/'], stdout = 'gwrappermc.log', join=True, executables = "./%s " % inputs[gwrappermc_wrapper_sh], **extra_args)
def __init__(self, docking_file, docking_index, **extra_args): self.output_dir = extra_args['output_dir'] self.docking_index = docking_index inputs = dict() outputs = dict() grdock_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/grdock_wrapper.sh") inputs[grdock_wrapper_sh] = os.path.basename(grdock_wrapper_sh) inputs[docking_file] = os.path.basename(docking_file) if extra_args['data_folder']: for element in os.listdir(extra_args['data_folder']): inputs[os.path.abspath(os.path.join(extra_args['data_folder'], element))] = os.path.basename(element) arguments = "./%s -n %s -o Docked%s %s results" % (inputs[grdock_wrapper_sh], extra_args['rbdock_iterations'], self.docking_index, os.path.basename(docking_file)) Application.__init__( self, arguments = arguments, inputs = inputs, outputs = ["results/"], stdout = 'grdock.log', join=True, **extra_args)
def __init__(self, vcf_list, **extra_args): self.S2_output = extra_args['S2_output'] extra_args['requested_memory'] = extra_args['S2_memory'] inputs = dict() outputs = dict() executables = [] # execution wrapper needs to be added anyway gatks2_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gatks2.sh") inputs[gatks2_wrapper_sh] = os.path.basename(gatks2_wrapper_sh) arguments = "./%s ./input -m %s " % (os.path.basename(gatks2_wrapper_sh), str(extra_args['requested_memory'].amount(conv=int))) for vcf in vcf_list: inputs[vcf] = os.path.join('./input',os.path.basename(vcf)) # Set output self.vcf_output = os.path.join(self.S2_output, 'genotyped.gvcf.vcf') outputs['genotyped.gvcf.vcf'] = self.vcf_output Application.__init__( self, arguments = arguments, inputs = inputs, outputs = outputs, stdout = 'gatks2.log', join=True, executables = os.path.basename(gatks2_wrapper_sh), **extra_args)
def __init__( self, scriptfile, # path to the R script to run datafiles, # additional files to upload days_of_the_week, # list of up to 7 int sampling_exp, # list of 3 float isolation_exp, # list of 4 int detection_exp, # list of 7 float nb, # see `R2jags::jags` param `n.burnin` ni, # see `R2jags::jags` param `n.iter` nt, # see `R2jags::jags` param `n.thin` **extra_args): # use a wrapper script to drive remote run wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/run_R.sh") inputs = {wrapper_sh: os.path.basename(wrapper_sh)} # upload R script exename = os.path.basename(scriptfile) run_name = './' + exename inputs[scriptfile] = exename # upload additional files for path in datafiles: inputs[path] = os.path.basename(path) # save command-line params self.days_of_the_week = days_of_the_week self.sampling_exp = sampling_exp self.isolation_exp = isolation_exp self.detection_exp = detection_exp self.nb = nb self.ni = ni self.nt = nt # provide defaults for envelope requests extra_args.setdefault('requested_cores', 4) extra_args.setdefault('requested_memory', 1 * GB) extra_args.setdefault('requested_architecture', Run.Arch.X86_64) extra_args.setdefault('requested_walltime', 12 * hours) # chain into `Application` superclass ctor Application.__init__( self, arguments=( #['/bin/echo'] + ['./' + os.path.basename(wrapper_sh), run_name] + [ repr_as_R(val) for val in ( days_of_the_week, sampling_exp, isolation_exp, detection_exp, nb, ni, nt, extra_args['requested_cores'], # nc ) ]), inputs=inputs, outputs=[ 'dclone_design_test_fits.Rdata', self.application_name + '.log', ], stdout=self.application_name + '.log', join=True, **extra_args)
def __init__(self, input_file, state_file, id_name, **extra_args): executables = [] inputs = dict() outputs = dict() inputs[input_file] = os.path.basename(input_file) if state_file: inputs[state_file] = os.path.basename(state_file) resume_option=" -t {state_file}".format(state_file=state_file) else: resume_option="" arguments = BEAST_COMMAND.format(resume=resume_option, input_xml=inputs[input_file]) gc3libs.log.debug("Creating application for executing: %s", arguments) self.id_name = id_name Application.__init__( self, arguments = arguments, inputs = inputs, outputs = gc3libs.ANY_OUTPUT, stdout = 'gsubbeast.log', join=True, executables = executables, **extra_args)
def __init__(self, network_data_file, run_script, **extra_args): self.output_dir = extra_args['output_dir'] inputs = dict() network_data_filename = os.path.basename(network_data_file) inputs[network_data_file] = network_data_filename inputs[run_script] = os.path.basename(run_script) # adding wrapper main script gbenchmark_wrapper_sh = resource_filename( Requirement.parse("gc3pie"), "gc3libs/etc/gbenchmark_wrapper.sh") inputs[gbenchmark_wrapper_sh] = "gbenchmark_wrapper.sh" arguments = "./gbenchmark_wrapper.sh -d -b %s -r %s ./%s " % ( extra_args["benchmark_type"], os.path.basename(run_script), network_data_filename) extra_args['requested_cores'] = 8 Application.__init__(self, arguments=arguments, inputs=inputs, outputs=["./results"], stdout='gbenchmark.log', join=True, executables=['wrapper.sh'], **extra_args)
def __init__(self, funcfile, radius, params, inputfile, outputfile=None, **extra_args): funcname = basename_sans(funcfile) self.funcname = funcname self.radius = radius # map args to file system names inputname = basename_sans(inputfile) if outputfile is None: outputfile = ('output_{inputname}_{radius}.mat'.format(**locals())) # default execution params extra_args.setdefault('requested_cores', 1) extra_args.setdefault('requested_memory', 3*GB) extra_args.setdefault('requested_architecture', Run.Arch.X86_64) extra_args.setdefault('requested_walltime', 30*days) # actual app initialization Application.__init__( self, arguments=[ 'matlab', '-nodisplay', '-nojvm', #'-singleCompThread', '-r', (self.matlab_cmd.format(**locals())) ], inputs = [funcfile, inputfile], outputs = [outputfile], stdout = 'matlab.log', join=True, **extra_args)
def __init__(self, events, matlab_file, case_file, **extra_args): executables = [] inputs = dict() outputs = dict() inputs[matlab_file] = os.path.basename(matlab_file) matlab_function = inputs[matlab_file].split(".")[0] inputs[case_file] = os.path.basename(case_file) arguments = MATLAB_CMD.format( main_function=matlab_function, events=events, case_file=os.path.basename(case_file), output_file=DEFAULT_REMOTE_OUTPUT_FILE, ) # Set output outputs[DEFAULT_REMOTE_OUTPUT_FILE] = DEFAULT_REMOTE_OUTPUT_FILE gc3libs.log.debug("Creating application for executing: %s", arguments) Application.__init__( self, arguments=arguments, inputs=inputs, outputs=outputs, stdout="gbraunian.log", join=True, executables=executables, **extra_args )
def __init__(self, input_file, mfunct, **extra_args): executables = [] inputs = dict() outputs = dict() self.results = extra_args['session_output_dir'] wrapper = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/matlab_wrapper.sh") inputs[wrapper] = "./wrapper.sh" inputs[input_file] = os.path.basename(input_file) arguments = "./wrapper.sh %s %s %s" % ( mfunct, os.path.basename(input_file), DEFAULT_REMOTE_OUTPUT_FOLDER) if 'source' in extra_args: inputs[extra_args['source']] = os.path.basename( extra_args['source']) arguments += " -s %s" % os.path.basename(extra_args['source']) # Set output outputs[DEFAULT_REMOTE_OUTPUT_FOLDER] = DEFAULT_REMOTE_OUTPUT_FOLDER gc3libs.log.info("Creating application for executing: %s", arguments) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=outputs, stdout='gpartialequilibrium.log', join=True, executables=executables, **extra_args)
def __init__(self, input_file, **extra_args): inputs = dict() inputs[input_file] = "./input.csv" self.output_folder = "./results" # arguments = "./MCSpecs ./input.csv" # $ arguments = "matlab -nodesktop -nodisplay -nosplash -r \'Main_loop input.csv results; quit()\'" gwrappermc_wrapper_sh = resource_filename( Requirement.parse("gc3pie"), "gc3libs/etc/gwrappermc_wrapper.sh") inputs[gwrappermc_wrapper_sh] = os.path.basename(gwrappermc_wrapper_sh) arguments = "./%s " % inputs[gwrappermc_wrapper_sh] if 'main_loop_folder' in extra_args: inputs[extra_args['main_loop_folder']] = './data/' arguments += "-m ./data " arguments += " -i %s " % extra_args['index_chunk'] arguments += "input.csv results" Application.__init__(self, arguments=arguments, inputs=inputs, outputs=['results/'], stdout='gwrappermc.log', join=True, executables="./%s " % inputs[gwrappermc_wrapper_sh], **extra_args)
def __init__(self, parameter_string, **extra_args): executables = [] inputs = dict() outputs = dict() if 'binary' in extra_args: binary_name = os.path.basename(extra_args['binary']) arguments = "./%s" % binary_name inputs[extra_args['binary']] = binary_name executables.append(binary_name) else: arguments = DEFAULT_EPICELL_BINARY for param in parameter_string.strip().split(','): arguments += " %s " % param # Set output outputs[DEFAULT_REMOTE_OUTPUT_FOLDER] = os.path.basename( DEFAULT_REMOTE_OUTPUT_FOLDER) arguments += DEFAULT_REMOTE_OUTPUT_FOLDER gc3libs.log.debug("Creating application for executing: %s", arguments) Application.__init__(self, arguments=arguments, inputs=inputs, outputs=gc3libs.ANY_OUTPUT, stdout='gepecell.log', join=True, executables=executables, **extra_args)
def __init__(self, input_file, **extra_args): """ The wrapper script is being used for start the simulation. """ inputs = [] outputs = [] gpyrad_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/gpyrad_wrapper.sh") inputs.append((gpyrad_wrapper_sh,os.path.basename(gpyrad_wrapper_sh))) cmd = "./gpyrad_wrapper.sh " if 's3cfg' in extra_args: inputs.append((extra_args['s3cfg'], "etc/s3cfg")) if 'wclust' in extra_args: cmd += " -w %s " % extra_args['wclust'] output_folder_name = 'clust%s' % extra_args['wclust'] else: # This is a convention of PyRAD output_folder_name = 'clust%.1f' % DEFAULT_WCLUST outputs.append(output_folder_name) if 'debug' in extra_args: cmd += "-d " outputs.append('strace.log') if 'paramsfile' in extra_args: cmd += " -p ./params.tmpl " #XXX: params file contains important paths needed # by pyRAD. If we deploy an alternative params.txt file supplied # by the end-user, we might incurr in the risk that we can no longer use # the assumptions we made with the original params.txt file. inputs.append((extra_args['paramsfile'],'./params.tmpl')) remote_input_file = os.path.join('./input',os.path.basename(input_file)) cmd += " %s " % remote_input_file inputs.append((input_file,remote_input_file)) # Add memory requirement extra_args.setdefault('requested_memory', 1.5*GiB) Application.__init__( self, # arguments should mimic the command line interfaca of the command to be # executed on the remote end arguments = cmd, inputs = inputs, outputs = outputs, stdout = 'gpyrad.log', join=True, **extra_args)
def __init__(self, input_dir, working_dir, output_container, **extra_args): """ Prepare remote execution of geosphere wrapper script. The resulting Application will associate a remote execution like: geosphere_wrapper.sh [options] <input archive> <working dir> <model name> Options: -g <grok binary file> path to 'grok' binary. Default in PATH -h <hgs binary file> path to 'hgs' binary. Default in PATH -o <S3 url> store output result on an S3 container -d enable debug """ inputs = [] geosphere_wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/geosphere_wrapper.sh") inputs.append((geosphere_wrapper_sh,os.path.basename(geosphere_wrapper_sh))) cmd = "./geosphere_wrapper.sh -d " if extra_args.has_key('s3cfg'): inputs.append((extra_args['s3cfg'], "etc/s3cfg")) if extra_args.has_key('grok_bin'): cmd += "-g %s " % extra_args['grok_bin'] inputs.append((extra_args['grok_bin'], os.path.join("./bin", os.path.basename(extra_args['grok_bin'])))) if extra_args.has_key('hgs_bin'): cmd += "-h %s " % extra_args['hgs_bin'] inputs.append((extra_args['hgs_bin'], os.path.join("./bin", os.path.basename(extra_args['hgs_bin'])))) cmd += "%s %s %s" % (input_dir, working_dir, output_container) Application.__init__( self, # arguments should mimic the command line interfaca of the command to be # executed on the remote end arguments = cmd, inputs = inputs, outputs = [], stdout = 'geosphere.log', join=True, **extra_args)
def __init__(self, scriptfile, # path to the R script to run datafiles, # additional files to upload days_of_the_week, # list of up to 7 int sampling_exp, # list of 3 float isolation_exp,# list of 4 int detection_exp,# list of 7 float nb, # see `R2jags::jags` param `n.burnin` ni, # see `R2jags::jags` param `n.iter` nt, # see `R2jags::jags` param `n.thin` **extra_args): # use a wrapper script to drive remote run wrapper_sh = resource_filename(Requirement.parse("gc3pie"), "gc3libs/etc/run_R.sh") inputs = { wrapper_sh:os.path.basename(wrapper_sh) } # upload R script exename = os.path.basename(scriptfile) run_name = './' + exename inputs[scriptfile] = exename # upload additional files for path in datafiles: inputs[path] = os.path.basename(path) # save command-line params self.days_of_the_week = days_of_the_week self.sampling_exp = sampling_exp self.isolation_exp = isolation_exp self.detection_exp = detection_exp self.nb = nb self.ni = ni self.nt = nt # provide defaults for envelope requests extra_args.setdefault('requested_cores', 4) extra_args.setdefault('requested_memory', 1*GB) extra_args.setdefault('requested_architecture', Run.Arch.X86_64) extra_args.setdefault('requested_walltime', 12*hours) # chain into `Application` superclass ctor Application.__init__( self, arguments=( #['/bin/echo'] + ['./' + os.path.basename(wrapper_sh), run_name] + [ repr_as_R(val) for val in ( days_of_the_week, sampling_exp, isolation_exp, detection_exp, nb, ni, nt, extra_args['requested_cores'], # nc )]), inputs = inputs, outputs = [ 'dclone_design_test_fits.Rdata', self.application_name + '.log', ], stdout = self.application_name + '.log', join=True, **extra_args)
def __init__(self): Application.__init__(self, arguments = [''], inputs = [], outputs = [], output_dir = None) self.execution.state = 'TERMINATED' self.execution.returncode = 0 self.changed = False
def test_io_spec_to_dict_unicode(): # pylint: disable=import-error,protected-access,redefined-outer-name import gc3libs.url with pytest.raises(gc3libs.exceptions.InvalidValue): Application._io_spec_to_dict( gc3libs.url.UrlKeyDict, { u'/tmp/\u0246': u'\u0246', '/tmp/b/': 'b'}, True)