def convert(
    workspace_service_url,
    shock_service_url,
    handle_service_url,
    workspace_name,
    object_name,
    working_directory,
    level=logging.INFO,
    logger=None,
):

    """
    Converts KBaseAssembly.SingleEndLibrary to a Fasta file of assembledDNA.

    Args:
	workspace_service_url :  A url for the KBase Workspace service
        shock_service_url: A url for the KBase SHOCK service.
        handle_service_url: A url for the KBase Handle Service.
        workspace_name : Name of the workspace
        object_name : Name of the object in the workspace
        working_directory : The working directory for where the output file should be stored.
        level: Logging level, defaults to logging.INFO.

    """

    if not os.path.isdir(args.working_directory):
        raise Exception("The working directory does not exist {0} does not exist".format(working_directory))

    md5 = None
    if logger is None:
        logger = script_utils.getStderrLogger(__file__)

    logger.info("Starting conversion of KBaseAssembly.SingleEndLibrary to FASTA.")

    token = os.environ.get("KB_AUTH_TOKEN")

    logger.info("Gathering information.")

    ws_client = biokbase.workspace.client.Workspace("https://kbase.us/services/ws")
    single_end_library = ws_client.get_objects([{"workspace": workspace_name, "name": object_name}])[0]["data"]

    shock_id = None
    if "handle" in single_end_library and "id" in single_end_library["handle"]:
        shock_id = single_end_library["handle"]["id"]
    if shock_id is None:
        raise Exception("There was not shock id found.")

    #    if "handle" in ws_object and  "remote_md5" in single_end_library['handle']:
    # 	md5 = single_end_library['handle']['remote_md5']

    script_utils.download_file_from_shock(logger, shock_service_url, shock_id, working_directory, token)
    logger.info("Conversion completed.")
Ejemplo n.º 2
0
def convert(workspace_service_url, shock_service_url, handle_service_url, workspace_name, object_name, working_directory, level=logging.INFO, logger=None):

    """
    Converts KBaseAssembly.SingleEndLibrary to a Fasta file of assembledDNA.

    Args:
	workspace_service_url :  A url for the KBase Workspace service
        shock_service_url: A url for the KBase SHOCK service.
        handle_service_url: A url for the KBase Handle Service.
        workspace_name : Name of the workspace
        object_name : Name of the object in the workspace
        working_directory : The working directory for where the output file should be stored.
        level: Logging level, defaults to logging.INFO.

    """

    if not os.path.isdir(args.working_directory):
        raise Exception("The working directory does not exist {0} does not exist".format(working_directory))

    md5 = None 
    if logger is None:
        logger = script_utils.getStderrLogger(__file__)
    
    logger.info("Starting conversion of KBaseAssembly.SingleEndLibrary to FASTA.")

    token = os.environ.get('KB_AUTH_TOKEN')
    
    logger.info("Gathering information.")

    ws_client = biokbase.workspace.client.Workspace('https://kbase.us/services/ws')
    single_end_library = ws_client.get_objects([{'workspace':workspace_name,'name':object_name}])[0]['data'] 

    shock_id = None
    if "handle" in single_end_library and "id" in single_end_library['handle']:
	shock_id  = single_end_library['handle']['id']
    if shock_id is None:
        raise Exception("There was not shock id found.")

#    if "handle" in ws_object and  "remote_md5" in single_end_library['handle']:
#	md5 = single_end_library['handle']['remote_md5']    	
    
    script_utils.download_file_from_shock(logger, shock_service_url, shock_id, working_directory, token)
    logger.info("Conversion completed.")
                "objid": object_id
            }])[0]
    except Exception, e:
        logger.exception(
            "Unable to retrieve workspace object from {0}:{1}.".format(
                workspace_service_url, workspace_name))
        logger.exception(e)
        raise

    shock_id = None
    if "fasta_ref" in contig_set["data"]:
        shock_id = contig_set["data"]["fasta_ref"]
        logger.info("Retrieving data from Shock.")
        script_utils.download_file_from_shock(
            logger=logger,
            shock_service_url=shock_service_url,
            shock_id=shock_id,
            directory=working_directory,
            token=token)
    else:
        ws_object_name = contig_set["info"][1]
        valid_chars = "-_.(){0}{1}".format(string.ascii_letters, string.digits)
        temp_file_name = ""
        filename_chars = list()

        for character in ws_object_name:
            if character in valid_chars:
                filename_chars.append(character)
            else:
                filename_chars.append("_")

        if len(filename_chars) == 0:
            contig_set = ws_client.get_objects([{"workspace": workspace_name, "objid": object_id}])[0]
    except Exception, e:
        logger.exception(
            "Unable to retrieve workspace object from {0}:{1}.".format(workspace_service_url, workspace_name)
        )
        logger.exception(e)
        raise

    shock_id = None
    if "fasta_ref" in contig_set["data"]:
        shock_id = contig_set["data"]["fasta_ref"]
        logger.info("Retrieving data from Shock.")
        script_utils.download_file_from_shock(
            logger=logger,
            shock_service_url=shock_service_url,
            shock_id=shock_id,
            directory=working_directory,
            token=token,
        )
    else:
        ws_object_name = contig_set["info"][1]
        valid_chars = "-_.(){0}{1}".format(string.ascii_letters, string.digits)
        temp_file_name = ""
        filename_chars = list()

        for character in ws_object_name:
            if character in valid_chars:
                filename_chars.append(character)
            else:
                filename_chars.append("_")