def download_tool(tool, tool_version, required_files, logger, url, only_clean=False): tool_dirpath = get_dir_for_download(tool + tool_version, tool, required_files, logger, only_clean=only_clean) if not tool_dirpath: return None if only_clean: if os.path.isdir(tool_dirpath): shutil.rmtree(tool_dirpath, ignore_errors=True) return tool_dirpath failed_compilation_flag = join(tool_dirpath, 'make.failed') if not all(os.path.exists(join(tool_dirpath, fpath)) for fpath in required_files) and not \ check_prev_compilation_failed(tool, failed_compilation_flag): downloaded_fpath = join(tool_dirpath, tool + '.tar.gz') logger.main_info(' Downloading third-party tools...') download_unpack_compressed_tar(tool, url, downloaded_fpath, tool_dirpath, logger) if not all( os.path.exists(join(tool_dirpath, fpath)) for fpath in required_files): logger.warning('Failed to download ' + tool + ' from ' + url + 'and unpack it into ' + tool_dirpath) return None return tool_dirpath
def download_db(logger, is_prokaryote, is_fungus=False, only_clean=False): if is_prokaryote: url = bacteria_db_url clade = 'bacteria' elif is_fungus: url = fungi_db_url clade = 'fungi' else: url = eukaryota_db_url clade = 'eukaryota' dirpath = get_dir_for_download('busco', 'Busco databases', [clade], logger, only_clean=only_clean) if not dirpath: return None db_dirpath = join(dirpath, clade) if only_clean: if os.path.isdir(db_dirpath): shutil.rmtree(db_dirpath, ignore_errors=True) return True if not os.path.exists(db_dirpath): downloaded_fpath = join(dirpath, clade + '.tar.gz') logger.main_info(' Downloading BUSCO database...') download_unpack_compressed_tar(clade + ' database', url, downloaded_fpath, db_dirpath, logger) if not os.path.exists(db_dirpath): logger.warning('Failed to download ' + clade + ' database from ' + url + ' and unpack it into ' + dirpath) return None return db_dirpath
def download_db(logger, is_prokaryote, is_fungus=False, only_clean=False): if is_prokaryote: url = bacteria_db_url clade = 'bacteria' elif is_fungus: url = fungi_db_url clade = 'fungi' else: url = eukaryota_db_url clade = 'eukaryota' dirpath = get_dir_for_download('busco', 'Busco databases', [clade], logger, only_clean=only_clean) if not dirpath: return None db_dirpath = join(dirpath, clade) if only_clean: if os.path.isdir(db_dirpath): shutil.rmtree(db_dirpath, ignore_errors=True) return True if not os.path.exists(db_dirpath): downloaded_fpath = join(dirpath, clade + '.tar.gz') logger.main_info(' Downloading ' + clade + ' database...') download_unpack_compressed_tar(clade + ' database', url, downloaded_fpath, db_dirpath, logger) if not os.path.exists(db_dirpath): logger.warning('Failed to download ' + clade + ' database from ' + url + 'and unpack it into ' + dirpath) return None return db_dirpath
def download_tool(tool, tool_version, required_files, logger, url, only_clean=False): tool_dirpath = get_dir_for_download(tool + tool_version, tool, required_files, logger, only_clean=only_clean) if not tool_dirpath: return None if only_clean: if os.path.isdir(tool_dirpath): shutil.rmtree(tool_dirpath, ignore_errors=True) return tool_dirpath failed_compilation_flag = join(tool_dirpath, 'make.failed') if not all(os.path.exists(join(tool_dirpath, fpath)) for fpath in required_files) and not \ check_prev_compilation_failed(tool, failed_compilation_flag): downloaded_fpath = join(tool_dirpath, tool + '.tar.gz') logger.main_info(' Downloading ' + tool + '...') download_unpack_compressed_tar(tool, url, downloaded_fpath, tool_dirpath, logger) if not all(os.path.exists(join(tool_dirpath, fpath)) for fpath in required_files): logger.warning('Failed to download ' + tool + ' from ' + url + 'and unpack it into ' + tool_dirpath) return None return tool_dirpath