def convert(shock_url, shock_id, handle_url, handle_id, input_filename, output_filename, level=logging.INFO, logger=None):
    """
    Converts FASTA file to KBaseAssembly.SingleEndLibrary json string.

    Args:
        shock_url: A url for the KBase SHOCK service.
        handle_url: A url for the KBase Handle Service.
        shock_id: A KBase SHOCK node id.
        handle_id: A KBase Handle id.
        input_filename: A file name for the input FASTA data.
        output_filename: A file name where the output JSON string should be stored.
        level: Logging level, defaults to logging.INFO.

    """

    if logger is None:
        logger = script_utils.getStderrLogger(__file__)
    
    logger.info("Starting conversion of FASTA to KBaseAssembly.SingleEndLibrary.")

    token = os.environ.get('KB_AUTH_TOKEN')
    
    logger.info("Gathering information.")
    handles = script_utils.getHandles(logger, shock_url, handle_url, [shock_id], [handle_id],token)   
    
    assert len(handles) != 0
    
    objectString = json.dumps({"handle" : handles[0]}, sort_keys=True, indent=4)
    
    return objectString
def convert(
    workspace_service_url,
    shock_service_url,
    handle_service_url,
    workspace_name,
    object_name,
    working_directory,
    level=logging.INFO,
    logger=None,
):

    """
    Converts KBaseAssembly.SingleEndLibrary to a Fasta file of assembledDNA.

    Args:
	workspace_service_url :  A url for the KBase Workspace service
        shock_service_url: A url for the KBase SHOCK service.
        handle_service_url: A url for the KBase Handle Service.
        workspace_name : Name of the workspace
        object_name : Name of the object in the workspace
        working_directory : The working directory for where the output file should be stored.
        level: Logging level, defaults to logging.INFO.

    """

    if not os.path.isdir(args.working_directory):
        raise Exception("The working directory does not exist {0} does not exist".format(working_directory))

    md5 = None
    if logger is None:
        logger = script_utils.getStderrLogger(__file__)

    logger.info("Starting conversion of KBaseAssembly.SingleEndLibrary to FASTA.")

    token = os.environ.get("KB_AUTH_TOKEN")

    logger.info("Gathering information.")

    ws_client = biokbase.workspace.client.Workspace("https://kbase.us/services/ws")
    single_end_library = ws_client.get_objects([{"workspace": workspace_name, "name": object_name}])[0]["data"]

    shock_id = None
    if "handle" in single_end_library and "id" in single_end_library["handle"]:
        shock_id = single_end_library["handle"]["id"]
    if shock_id is None:
        raise Exception("There was not shock id found.")

    #    if "handle" in ws_object and  "remote_md5" in single_end_library['handle']:
    # 	md5 = single_end_library['handle']['remote_md5']

    script_utils.download_file_from_shock(logger, shock_service_url, shock_id, working_directory, token)
    logger.info("Conversion completed.")
Beispiel #3
0
def convert(workspace_service_url, shock_service_url, handle_service_url, workspace_name, object_name, working_directory, level=logging.INFO, logger=None):

    """
    Converts KBaseAssembly.SingleEndLibrary to a Fasta file of assembledDNA.

    Args:
	workspace_service_url :  A url for the KBase Workspace service
        shock_service_url: A url for the KBase SHOCK service.
        handle_service_url: A url for the KBase Handle Service.
        workspace_name : Name of the workspace
        object_name : Name of the object in the workspace
        working_directory : The working directory for where the output file should be stored.
        level: Logging level, defaults to logging.INFO.

    """

    if not os.path.isdir(args.working_directory):
        raise Exception("The working directory does not exist {0} does not exist".format(working_directory))

    md5 = None 
    if logger is None:
        logger = script_utils.getStderrLogger(__file__)
    
    logger.info("Starting conversion of KBaseAssembly.SingleEndLibrary to FASTA.")

    token = os.environ.get('KB_AUTH_TOKEN')
    
    logger.info("Gathering information.")

    ws_client = biokbase.workspace.client.Workspace('https://kbase.us/services/ws')
    single_end_library = ws_client.get_objects([{'workspace':workspace_name,'name':object_name}])[0]['data'] 

    shock_id = None
    if "handle" in single_end_library and "id" in single_end_library['handle']:
	shock_id  = single_end_library['handle']['id']
    if shock_id is None:
        raise Exception("There was not shock id found.")

#    if "handle" in ws_object and  "remote_md5" in single_end_library['handle']:
#	md5 = single_end_library['handle']['remote_md5']    	
    
    script_utils.download_file_from_shock(logger, shock_service_url, shock_id, working_directory, token)
    logger.info("Conversion completed.")
if __name__ == "__main__":	
    parser = argparse.ArgumentParser(prog='trns_transform_KBaseAssembly.FA-to-KBaseAssembly.SingleEndLibrary', 
                                     description='Converts FASTA file to KBaseAssembly.SingleEndLibrary json string.',
                                     epilog='Authors: Matt Henderson')
    parser.add_argument('-s', '--shock_url', help='Shock url', action='store', type=str, default='https://kbase.us/services/shock-api/', nargs='?')
    parser.add_argument('-n', '--handle_url', help='Handle service url', action='store', type=str, default='https://kbase.us/services/handle_service/', nargs='?')
    parser.add_argument('-f','--input_filename', help ='Input file name', action='store', type=str, nargs='?', required=False)
    parser.add_argument('-o', '--output_filename', help='Output file name', action='store', type=str, nargs='?', required=True)
    parser.add_argument('-r','--reference_name',help='Reference name', action='store', type=str, nargs='?',default=None)

    data_id = parser.add_mutually_exclusive_group(required=True)
    data_id.add_argument('-i', '--shock_id', help='Shock node id', action='store', type=str, nargs='?')
    data_id.add_argument('-d','--handle_id', help ='Handle id', action= 'store', type=str, nargs='?')

    args = parser.parse_args()

    logger = script_utils.getStderrLogger(__file__)
    try:
        ret_json = json.loads(convert(args.shock_url, args.shock_id, args.handle_url, args.handle_id, args.input_filename, args.output_filename,logger=logger))
	if args.reference_name is not None:
		ret_json["reference_name"] = args.reference_name
	logger.info("Writing out JSON.")
    	with open(args.output_filename, "w") as outFile:
        	outFile.write(json.dumps(ret_json,sort_keys = True, indent = 4))
   	logger.info("Conversion completed.")
    except:
        logger.exception("".join(traceback.format_exc()))
        sys.exit(1)
    
    sys.exit(0)