def main(): script_details = script_utils.parse_docs(transform.__doc__) parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument('--workspace_service_url', help=script_details["Args"]["workspace_service_url"], action='store', type=str, nargs='?', required=True) parser.add_argument('--workspace_name', help=script_details["Args"]["workspace_name"], action='store', type=str, nargs='?', required=True) parser.add_argument("--object_name", help=script_details["Args"]["object_name"], action='store', type=str, nargs='?', required=True) parser.add_argument('--output_file_name', help=script_details["Args"]["output_file_name"], action='store', type=str, nargs='?', default=None, required=False) parser.add_argument('--input_directory', help=script_details["Args"]["input_directory"], action='store', type=str, nargs='?', required=True) parser.add_argument("--working_directory", help=script_details["Args"]["working_directory"], action='store', type=str, nargs='?', required=True) parser.add_argument("--has_replicates", help=script_details["Args"]["has_replicates"], action='store', type=str, nargs='?', required=True) parser.add_argument('--input_mapping', help=script_details["Args"]["input_mapping"], action='store', type=unicode, nargs='?', default=None, required=False) # custom arguments specific to this uploader parser.add_argument('--format_type', help=script_details["Args"]["format_type"], action='store', type=str, required=False) args, unknown = parser.parse_known_args() logger = script_utils.stderrlogger(__file__) logger.debug(args) try: transform(workspace_service_url=args.workspace_service_url, workspace_name=args.workspace_name, object_name=args.object_name, output_file_name=args.output_file_name, input_directory=args.input_directory, working_directory=args.working_directory, has_replicates=args.has_replicates, input_mapping=args.input_mapping, format_type=args.format_type, logger=logger) except Exception as e: logger.exception(e) sys.exit(1)
validated = False break else: logger.info("Validation passed on {0}".format(input_file_name)) checked = True if not validated: raise Exception("Validation failed!") elif not checked: raise Exception("No files were found that had a valid fasta or fastq extension.") else: logger.info("Validation passed.") if __name__ == "__main__": script_details = script_utils.parse_docs(validate.__doc__) import argparse parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument("--input_directory", help=script_details["Args"]["input_directory"], type=str, nargs="?", required=True) parser.add_argument("--working_directory", help=script_details["Args"]["working_directory"], type=str, nargs="?", required=True) args, unknown = parser.parse_known_args() returncode = 0 try: validate(input_directory = args.input_directory,
logger.info(stdout) if stderr is not None and len(stderr) > 0: logger.error( "Transformation from KBaseGenomes.Genome to Genbank.Genome failed on {0}" .format(object_name)) logger.error(stderr) sys.exit(1) logger.info( "Transformation from KBaseGenomes.Genome to Genbank.Genome completed.") sys.exit(0) if __name__ == "__main__": script_details = script_utils.parse_docs(transform.__doc__) parser = script_utils.ArgumentParser( prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument("--shock_service_url", help=script_details["Args"]["shock_service_url"], action="store", type=str, nargs='?', required=False) parser.add_argument("--workspace_service_url", help=script_details["Args"]["workspace_service_url"], action="store", type=str,
def main(): script_details = script_utils.parse_docs(transform.__doc__) import argparse parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument('--shock_service_url', help=script_details["Args"]["shock_service_url"], action='store', type=str, nargs='?', required=True) parser.add_argument('--handle_service_url', help=script_details["Args"]["handle_service_url"], action='store', type=str, nargs='?', default=None, required=False) parser.add_argument('--input_directory', help=script_details["Args"]["input_directory"], action='store', type=str, nargs='?', required=True) parser.add_argument('--working_directory', help=script_details["Args"]["working_directory"], action='store', type=str, nargs='?', required=True) parser.add_argument('--output_file_name', help=script_details["Args"]["output_file_name"], action='store', type=str, nargs='?', default=None, required=False) parser.add_argument('--shock_id', help=script_details["Args"]["shock_id"], action='store', type=str, nargs='?', default=None, required=False) parser.add_argument('--handle_id', help=script_details["Args"]["handle_id"], action='store', type=str, nargs='?', default=None, required=False) parser.add_argument('--input_mapping', help=script_details["Args"]["input_mapping"], action='store', type=unicode, nargs='?', default=None, required=False) # custom arguments specific to this uploader parser.add_argument('--polarity', help=script_details["Args"]["polarity"], action='store', type=int, required=False) parser.add_argument('--group', help=script_details["Args"]["group"], action='store', type=str, required=False) parser.add_argument('--inclusion_order', help=script_details["Args"]["inclusion_order"], action='store', type=int, required=False) parser.add_argument('--retention_correction', help=script_details["Args"]["retention_correction"], action='store', type=float, required=False) parser.add_argument('--atlases', help=script_details["Args"]["atlases"], action='store', type=str, nargs='?', required=False) parser.add_argument('--mzml_file_name', help=script_details["Args"]["mzml_file_name"], action='store', type=str, required=False) parser.add_argument('--normalization_factor', help=script_details["Args"]["normalization_factor"], action='store', type=float, required=False) args, unknown = parser.parse_known_args() logger = script_utils.stderrlogger(__file__) logger.debug(args) try: transform(shock_service_url=args.shock_service_url, handle_service_url=args.handle_service_url, output_file_name=args.output_file_name, input_directory=args.input_directory, working_directory=args.working_directory, shock_id=args.shock_id, handle_id=args.handle_id, input_mapping=args.input_mapping, mzml_file_name=args.mzml_file_name, polarity=args.polarity, atlases=args.atlases, group=args.group, inclusion_order=args.inclusion_order, normalization_factor=args.normalization_factor, retention_correction=args.retention_correction, logger=logger) except Exception as e: logger.exception(e) sys.exit(1)
# This generates the json for the object objectString = simplejson.dumps(contig_set_dict, sort_keys=True, indent=4) logger.info("ContigSet data structure creation completed. Writing out JSON.") output_file_path = os.path.join(working_directory,output_file_name) with open(output_file_path, "w") as outFile: outFile.write(objectString) logger.info("Conversion completed.") # called only if script is run from command line if __name__ == "__main__": script_details = script_utils.parse_docs(transform.__doc__) import argparse parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument('--shock_service_url', help=script_details["Args"]["shock_service_url"], action='store', type=str, nargs='?', required=True) parser.add_argument('--handle_service_url', help=script_details["Args"]["handle_service_url"], action='store', type=str, nargs='?', default=None, required=False) parser.add_argument('--input_directory', help=script_details["Args"]["input_directory"],
except biokbase.workspace.client.ServerError as err: print "ASSEMBLY SAVE FAILED ON genome " + str( assembly_name) + " ERROR: " + str(err) raise except: print "ASSEMBLY SAVE FAILED ON genome " + str( assembly_name) + " GENERAL_EXCEPTION: " + str( sys.exc_info()[0]) raise logger.info("Conversion completed.") # called only if script is run from command line if __name__ == "__main__": script_details = script_utils.parse_docs(upload_assembly.__doc__) import argparse parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument('--shock_service_url', help=script_details["Args"]["shock_service_url"], action='store', type=str, nargs='?', required=True) parser.add_argument('--handle_service_url', help=script_details["Args"]["handle_service_url"],
"name": assembly_name, "provenance":assembly_provenance}]}) assembly_not_saved = False except biokbase.workspace.client.ServerError as err: print "ASSEMBLY SAVE FAILED ON genome " + str(assembly_name) + " ERROR: " + str(err) raise except: print "ASSEMBLY SAVE FAILED ON genome " + str(assembly_name) + " GENERAL_EXCEPTION: " + str(sys.exc_info()[0]) raise logger.info("Conversion completed.") # called only if script is run from command line if __name__ == "__main__": script_details = script_utils.parse_docs(upload_assembly.__doc__) import argparse parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument('--shock_service_url', help=script_details["Args"]["shock_service_url"], action='store', type=str, nargs='?', required=True) parser.add_argument('--handle_service_url', help=script_details["Args"]["handle_service_url"], action='store', type=str, nargs='?', default=None, required=True) parser.add_argument('--workspace_name', nargs='?', help='workspace name to populate', required=True) parser.add_argument('--workspace_service_url', action='store', type=str, nargs='?', required=True)
def main(): script_details = script_utils.parse_docs(transform.__doc__) parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument('--workspace_service_url', help=script_details["Args"]["workspace_service_url"], action='store', type=str, nargs='?', required=True) parser.add_argument('--workspace_name', help=script_details["Args"]["workspace_name"], action='store', type=str, nargs='?', required=True) parser.add_argument("--object_name", help=script_details["Args"]["object_name"], action='store', type=str, nargs='?', required=True) parser.add_argument('--output_file_name', help=script_details["Args"]["output_file_name"], action='store', type=str, nargs='?', default=None, required=False) parser.add_argument('--input_directory', help=script_details["Args"]["input_directory"], action='store', type=str, nargs='?', required=True) parser.add_argument("--working_directory", help=script_details["Args"]["working_directory"], action='store', type=str, nargs='?', required=True) parser.add_argument('--input_mapping', help=script_details["Args"]["input_mapping"], action='store', type=unicode, nargs='?', default=None, required=False) # custom arguments specific to this uploader parser.add_argument('--format_type', help=script_details["Args"]["format_type"], action='store', type=str, required=False) parser.add_argument('--genome_object_name', help=script_details["Args"]["genome_object_name"], action='store', type=str, required=False) parser.add_argument('--fill_missing_values', help=script_details["Args"]["fill_missing_values"], action='store', type=int, required=False) parser.add_argument('--data_type', help=script_details["Args"]["data_type"], action='store', type=str, required=False) parser.add_argument('--data_scale', help=script_details["Args"]["data_scale"], action='store', type=str, required=False) args, unknown = parser.parse_known_args() logger = script_utils.stderrlogger(__file__) logger.debug(args) try: transform(workspace_service_url=args.workspace_service_url, workspace_name=args.workspace_name, object_name=args.object_name, output_file_name=args.output_file_name, input_directory=args.input_directory, working_directory=args.working_directory, input_mapping=args.input_mapping, format_type=args.format_type, genome_object_name=args.genome_object_name, fill_missing_values=args.fill_missing_values, data_type=args.data_type, data_scale=args.data_scale, logger=logger) except Exception as e: logger.exception(e) sys.exit(1)
def main(): """ KBase Convert task manager for converting between KBase objects. Step 1 - Run a converter to pull the source object and save the destination object. Args: workspace_service_url: URL for a KBase Workspace service where KBase objects are stored. ujs_service_url: URL for a User and Job State service to report task progress back to the user. shock_service_url: URL for a KBase SHOCK data store service for storing files and large reference data. handle_service_url: URL for a KBase Handle service that maps permissions from the Workspace to SHOCK for KBase types that specify a Handle reference instead of a SHOCK reference. source_workspace_name: The name of the source workspace. destination_workspace_name: The name of the destination workspace. source_object_name: The source object name. destination_object_name: The destination object name. source_kbase_type: The KBase Workspace type string that indicates the module and type of the object being created. destination_kbase_type: The KBase Workspace type string that indicates the module and type of the object being created. optional_arguments: This is a JSON string containing optional parameters that can be passed in for custom behavior per conversion. ujs_job_id: The job id from the User and Job State service that can be used to report status on task progress back to the user. job_details: This is a JSON string that passes in the script specific command line options for a given conversion type. The service pulls these config settings from a script config created by the developer of the conversion script and passes that into the AWE job that calls this script. working_directory: The working directory on disk where files can be created and will be cleaned when the job ends with success or failure. keep_working_directory: A flag to tell the script not to delete the working directory, which is mainly for debugging purposes. Returns: Literal return value is 0 for success and 1 for failure. Actual data output is one or more Workspace objects saved to a user's workspace. Authors: Matt Henderson, Gavin Price """ logger = script_utils.stderrlogger(__file__, level=logging.DEBUG) logger.info("Executing KBase Convert tasks") script_details = script_utils.parse_docs(main.__doc__) logger.debug(script_details["Args"]) parser = script_utils.ArgumentParser(description=script_details["Description"], epilog=script_details["Authors"]) # provided by service config parser.add_argument('--workspace_service_url', help=script_details["Args"]["workspace_service_url"], action='store', required=True) parser.add_argument('--ujs_service_url', help=script_details["Args"]["ujs_service_url"], action='store', required=True) # optional because not all KBase Workspace types contain a SHOCK or Handle reference parser.add_argument('--shock_service_url', help=script_details["Args"]["shock_service_url"], action='store', default=None) parser.add_argument('--handle_service_url', help=script_details["Args"]["handle_service_url"], action='store', default=None) # workspace info for pulling the data parser.add_argument('--source_workspace_name', help=script_details["Args"]["source_workspace_name"], action='store', required=True) parser.add_argument('--source_object_name', help=script_details["Args"]["source_object_name"], action='store', required=True) # workspace info for saving the data parser.add_argument('--destination_workspace_name', help=script_details["Args"]["destination_workspace_name"], action='store', required=True) parser.add_argument('--destination_object_name', help=script_details["Args"]["destination_object_name"], action='store', required=True) # the types that we are transforming between, currently assumed one to one parser.add_argument('--source_kbase_type', help=script_details["Args"]["source_kbase_type"], action='store', required=True) parser.add_argument('--destination_kbase_type', help=script_details["Args"]["destination_kbase_type"], action='store', required=True) # any user options provided, encoded as a jason string parser.add_argument('--optional_arguments', help=script_details["Args"]["optional_arguments"], action='store', default='{}') # Used if you are restarting a previously executed job? parser.add_argument('--ujs_job_id', help=script_details["Args"]["ujs_job_id"], action='store', default=None, required=False) # config information for running the validate and transform scripts parser.add_argument('--job_details', help=script_details["Args"]["job_details"], action='store', default=None) # the working directory is where all the files for this job will be written, # and normal operation cleans it after the job ends (success or fail) parser.add_argument('--working_directory', help=script_details["Args"]["working_directory"], action='store', default=None, required=True) parser.add_argument('--keep_working_directory', help=script_details["Args"]["keep_working_directory"], action='store_true') # ignore any extra arguments args, unknown = parser.parse_known_args() kb_token = os.environ.get('KB_AUTH_TOKEN') ujs = UserAndJobState(url=args.ujs_service_url, token=kb_token) est = datetime.datetime.utcnow() + datetime.timedelta(minutes=3) if args.ujs_job_id is not None: ujs.update_job_progress(args.ujs_job_id, kb_token, "KBase Data Convert started", 1, est.strftime('%Y-%m-%dT%H:%M:%S+0000')) # parse all the json strings from the argument list into dicts # TODO had issues with json.loads and unicode strings, workaround was using simplejson and base64 args.optional_arguments = simplejson.loads(base64.urlsafe_b64decode(args.optional_arguments)) args.job_details = simplejson.loads(base64.urlsafe_b64decode(args.job_details)) if not os.path.exists(args.working_directory): os.mkdir(args.working_directory) if args.ujs_job_id is not None: ujs.update_job_progress(args.ujs_job_id, kb_token, "Converting from {0} to {1}".format(args.source_kbase_type,args.destination_kbase_type), 1, est.strftime('%Y-%m-%dT%H:%M:%S+0000') ) # Step 1 : Convert the objects try: logger.info(args) convert_args = args.job_details["transform"] convert_args["optional_arguments"] = args.optional_arguments convert_args["working_directory"] = args.working_directory convert_args["workspace_service_url"] = args.workspace_service_url convert_args["source_workspace_name"] = args.source_workspace_name convert_args["source_object_name"] = args.source_object_name convert_args["destination_workspace_name"] = args.destination_workspace_name convert_args["destination_object_name"] = args.destination_object_name logger.info(convert_args) task_output = handler_utils.run_task(logger, convert_args) if task_output["stdout"] is not None: logger.debug("STDOUT : " + str(task_output["stdout"])) if task_output["stderr"] is not None: logger.debug("STDERR : " + str(task_output["stderr"])) except Exception, e: handler_utils.report_exception(logger, {"message": 'ERROR : Conversion from {0} to {1}'.format(args.source_kbase_type,args.destination_kbase_type), "exc": e, "ujs": ujs, "ujs_job_id": args.ujs_job_id, "token": kb_token, }, {"keep_working_directory": args.keep_working_directory, "working_directory": args.working_directory}) ujs.complete_job(args.ujs_job_id, kb_token, "Convert to {0} failed.".format( args.destination_workspace_name), str(e), None)
stdout, stderr = tool_process.communicate() if len(stderr) > 0: logger.error("Validation failed on {0}".format(fileName)) else: logger.info("Validation passed on {0}".format(fileName)) validated = True if not validated: raise Exception("Validation failed!") else: logger.info("Validation passed.") if __name__ == "__main__": script_details = script_utils.parse_docs(validate.__doc__) import argparse parser = argparse.ArgumentParser(prog=__file__, description=script_details["Description"], epilog=script_details["Authors"]) parser.add_argument("--input_file_name", help=script_details["Args"]["input_file_name"], type=str, nargs="?", required=True) parser.add_argument("--working_directory", help=script_details["Args"]["working_directory"], type=str, nargs="?",
handler_utils.report_exception(logger, error_object, cleanup_details) ujs.complete_job(ujs_job_id, kb_token, "Download from {0} failed.".format(workspace_name), traceback.format_exc(), None) sys.exit(1) if __name__ == "__main__": logger = script_utils.stderrlogger(__file__, level=logging.DEBUG) script_details = script_utils.parse_docs(download_taskrunner.__doc__) parser = script_utils.ArgumentParser(description=script_details["Description"], epilog=script_details["Authors"]) # provided by service config parser.add_argument('--workspace_service_url', help=script_details["Args"]["workspace_service_url"], action='store', required=True) parser.add_argument('--ujs_service_url', help=script_details["Args"]["ujs_service_url"], action='store', required=True) # optional because not all KBase Workspace types contain a SHOCK or Handle reference parser.add_argument('--shock_service_url',
def main(): """ KBase Convert task manager for converting between KBase objects. Step 1 - Run a converter to pull the source object and save the destination object. Args: workspace_service_url: URL for a KBase Workspace service where KBase objects are stored. ujs_service_url: URL for a User and Job State service to report task progress back to the user. shock_service_url: URL for a KBase SHOCK data store service for storing files and large reference data. handle_service_url: URL for a KBase Handle service that maps permissions from the Workspace to SHOCK for KBase types that specify a Handle reference instead of a SHOCK reference. source_workspace_name: The name of the source workspace. destination_workspace_name: The name of the destination workspace. source_object_name: The source object name. destination_object_name: The destination object name. source_kbase_type: The KBase Workspace type string that indicates the module and type of the object being created. destination_kbase_type: The KBase Workspace type string that indicates the module and type of the object being created. optional_arguments: This is a JSON string containing optional parameters that can be passed in for custom behavior per conversion. ujs_job_id: The job id from the User and Job State service that can be used to report status on task progress back to the user. job_details: This is a JSON string that passes in the script specific command line options for a given conversion type. The service pulls these config settings from a script config created by the developer of the conversion script and passes that into the AWE job that calls this script. working_directory: The working directory on disk where files can be created and will be cleaned when the job ends with success or failure. keep_working_directory: A flag to tell the script not to delete the working directory, which is mainly for debugging purposes. debug: Run the taskrunner in debug mode for local execution in a virtualenv. Returns: Literal return value is 0 for success and 1 for failure. Actual data output is one or more Workspace objects saved to a user's workspace. Authors: Matt Henderson, Gavin Price """ logger = script_utils.stderrlogger(__file__, level=logging.DEBUG) logger.info("Executing KBase Convert tasks") script_details = script_utils.parse_docs(main.__doc__) logger.debug(script_details["Args"]) parser = script_utils.ArgumentParser(description=script_details["Description"], epilog=script_details["Authors"]) # provided by service config parser.add_argument('--workspace_service_url', help=script_details["Args"]["workspace_service_url"], action='store', required=True) parser.add_argument('--ujs_service_url', help=script_details["Args"]["ujs_service_url"], action='store', required=True) # optional because not all KBase Workspace types contain a SHOCK or Handle reference parser.add_argument('--shock_service_url', help=script_details["Args"]["shock_service_url"], action='store', default=None) parser.add_argument('--handle_service_url', help=script_details["Args"]["handle_service_url"], action='store', default=None) # workspace info for pulling the data parser.add_argument('--source_workspace_name', help=script_details["Args"]["source_workspace_name"], action='store', required=True) parser.add_argument('--source_object_name', help=script_details["Args"]["source_object_name"], action='store', required=True) # workspace info for saving the data parser.add_argument('--destination_workspace_name', help=script_details["Args"]["destination_workspace_name"], action='store', required=True) parser.add_argument('--destination_object_name', help=script_details["Args"]["destination_object_name"], action='store', required=True) # the types that we are transforming between, currently assumed one to one parser.add_argument('--source_kbase_type', help=script_details["Args"]["source_kbase_type"], action='store', required=True) parser.add_argument('--destination_kbase_type', help=script_details["Args"]["destination_kbase_type"], action='store', required=True) # any user options provided, encoded as a jason string parser.add_argument('--optional_arguments', help=script_details["Args"]["optional_arguments"], action='store', default='{}') # Used if you are restarting a previously executed job? parser.add_argument('--ujs_job_id', help=script_details["Args"]["ujs_job_id"], action='store', default=None, required=False) # config information for running the validate and transform scripts parser.add_argument('--job_details', help=script_details["Args"]["job_details"], action='store', default=None) # the working directory is where all the files for this job will be written, # and normal operation cleans it after the job ends (success or fail) parser.add_argument('--working_directory', help=script_details["Args"]["working_directory"], action='store', default=None, required=True) parser.add_argument('--keep_working_directory', help=script_details["Args"]["keep_working_directory"], action='store_true') # turn on debugging options for script developers running locally parser.add_argument('--debug', help=script_details["Args"]["debug"], action='store_true') args = None try: args = parser.parse_args() except Exception, e: logger.debug("Caught exception parsing arguments!") logger.exception(e) sys.exit(1)
def main(): """ KBase Convert task manager for converting between KBase objects. Step 1 - Run a converter to pull the source object and save the destination object. Args: workspace_service_url: URL for a KBase Workspace service where KBase objects are stored. ujs_service_url: URL for a User and Job State service to report task progress back to the user. shock_service_url: URL for a KBase SHOCK data store service for storing files and large reference data. handle_service_url: URL for a KBase Handle service that maps permissions from the Workspace to SHOCK for KBase types that specify a Handle reference instead of a SHOCK reference. source_workspace_name: The name of the source workspace. destination_workspace_name: The name of the destination workspace. source_object_name: The source object name. destination_object_name: The destination object name. source_kbase_type: The KBase Workspace type string that indicates the module and type of the object being created. destination_kbase_type: The KBase Workspace type string that indicates the module and type of the object being created. optional_arguments: This is a JSON string containing optional parameters that can be passed in for custom behavior per conversion. ujs_job_id: The job id from the User and Job State service that can be used to report status on task progress back to the user. job_details: This is a JSON string that passes in the script specific command line options for a given conversion type. The service pulls these config settings from a script config created by the developer of the conversion script and passes that into the AWE job that calls this script. working_directory: The working directory on disk where files can be created and will be cleaned when the job ends with success or failure. keep_working_directory: A flag to tell the script not to delete the working directory, which is mainly for debugging purposes. debug: Run the taskrunner in debug mode for local execution in a virtualenv. Returns: Literal return value is 0 for success and 1 for failure. Actual data output is one or more Workspace objects saved to a user's workspace. Authors: Matt Henderson, Gavin Price """ logger = script_utils.stderrlogger(__file__, level=logging.DEBUG) logger.info("Executing KBase Convert tasks") script_details = script_utils.parse_docs(main.__doc__) logger.debug(script_details["Args"]) parser = script_utils.ArgumentParser( description=script_details["Description"], epilog=script_details["Authors"]) # provided by service config parser.add_argument('--workspace_service_url', help=script_details["Args"]["workspace_service_url"], action='store', required=True) parser.add_argument('--ujs_service_url', help=script_details["Args"]["ujs_service_url"], action='store', required=True) # optional because not all KBase Workspace types contain a SHOCK or Handle reference parser.add_argument('--shock_service_url', help=script_details["Args"]["shock_service_url"], action='store', default=None) parser.add_argument('--handle_service_url', help=script_details["Args"]["handle_service_url"], action='store', default=None) # workspace info for pulling the data parser.add_argument('--source_workspace_name', help=script_details["Args"]["source_workspace_name"], action='store', required=True) parser.add_argument('--source_object_name', help=script_details["Args"]["source_object_name"], action='store', required=True) # workspace info for saving the data parser.add_argument( '--destination_workspace_name', help=script_details["Args"]["destination_workspace_name"], action='store', required=True) parser.add_argument('--destination_object_name', help=script_details["Args"]["destination_object_name"], action='store', required=True) # the types that we are transforming between, currently assumed one to one parser.add_argument('--source_kbase_type', help=script_details["Args"]["source_kbase_type"], action='store', required=True) parser.add_argument('--destination_kbase_type', help=script_details["Args"]["destination_kbase_type"], action='store', required=True) # any user options provided, encoded as a jason string parser.add_argument('--optional_arguments', help=script_details["Args"]["optional_arguments"], action='store', default='{}') # Used if you are restarting a previously executed job? parser.add_argument('--ujs_job_id', help=script_details["Args"]["ujs_job_id"], action='store', default=None, required=False) # config information for running the validate and transform scripts parser.add_argument('--job_details', help=script_details["Args"]["job_details"], action='store', default=None) # the working directory is where all the files for this job will be written, # and normal operation cleans it after the job ends (success or fail) parser.add_argument('--working_directory', help=script_details["Args"]["working_directory"], action='store', default=None, required=True) parser.add_argument('--keep_working_directory', help=script_details["Args"]["keep_working_directory"], action='store_true') # turn on debugging options for script developers running locally parser.add_argument('--debug', help=script_details["Args"]["debug"], action='store_true') args = None try: args = parser.parse_args() except Exception, e: logger.debug("Caught exception parsing arguments!") logger.exception(e) sys.exit(1)