Beispiel #1
0
def download_snp_history(url=SNP_HISTORY_URL, outpath="SNPHistory.bcp.gz"):
    with tqdm(unit='B', unit_scale=True, miniters=1,
              desc=url.split('/')[-1]) as t:
        urlcleanup()
        urlretrieve(url, filename=outpath, reporthook=tqdm_hook(t), data=None)

    return outpath
Beispiel #2
0
def download_ebi_catalog(url, outpath):
    with tqdm(unit='B', unit_scale=True, miniters=1,
              desc=url.split('/')[-1]) as t:
        urlcleanup()
        urlretrieve(url, filename=outpath, reporthook=tqdm_hook(t), data=None)

    return outpath
Beispiel #3
0
def download_merge_arch(url=RS_MERGE_ARCH_URL, outpath="RsMergeArch.bcp.gz"):
    with tqdm(unit='B', unit_scale=True, miniters=1,
              desc=url.split('/')[-1]) as t:
        urlcleanup()
        urlretrieve(url, filename=outpath, reporthook=tqdm_hook(t), data=None)

    return outpath
Beispiel #4
0
def download_dbsnp_vcf(dbsnp_build=None,genome_build=None,url=None,outpath=None):
  """
  Download the NCBI dbSNP VCF for a given human genome build and dbSNP build

  Args:
    dbsnp_build: b147
    genome_build: GRCh37p13
    url: Direct URL to file, e.g. ftp://ftp.ncbi.nlm.nih.gov/snp/organisms/human_9606_b147_GRCh37p13/VCF/00-All.vcf.gz
    outpath: Constructed from genome_build and dbsnp_build. If not given, a random filename will be generated.

  Returns:
    Name of file into which we saved the data (will be constructed from builds, or random name)
  """

  if url is None:
    if not genome_build.startswith("GRC"):
      raise ValueError("Genome build should begin with GRC")

    if not dbsnp_build.startswith("b"):
      raise ValueError("dbSNP build should look like b147, b148, etc.")

    url = NCBI_VCF_TEMPLATE_URL.format(dbsnp_build,genome_build)

  if outpath is None:
    if genome_build is None or dbsnp_build is None:
      outpath = "dbsnp.vcf.gz"
    else:
      outpath = "human_9606_{}_{}_All.vcf.gz".format(dbsnp_build,genome_build)

  with tqdm(unit='B',unit_scale=True,miniters=1,desc=url.split('/')[-1]) as t:
    urlcleanup()
    urlretrieve(url,filename=outpath,reporthook=tqdm_hook(t),data=None)

  return outpath
Beispiel #5
0
def download_dbsnp_vcf(dbsnp_build=None,
                       genome_build=None,
                       url=None,
                       outpath=None):
    """
  Download the NCBI dbSNP VCF for a given human genome build and dbSNP build

  Args:
    dbsnp_build: b147
    genome_build: GRCh37p13
    url: Direct URL to file, e.g. ftp://ftp.ncbi.nlm.nih.gov/snp/organisms/human_9606_b147_GRCh37p13/VCF/00-All.vcf.gz
    outpath: Constructed from genome_build and dbsnp_build. If not given, a random filename will be generated.

  Returns:
    Name of file into which we saved the data (will be constructed from builds, or random name)
  """

    if url is None:
        if not genome_build.startswith("GRC"):
            raise ValueError("Genome build should begin with GRC")

        if not dbsnp_build.startswith("b"):
            raise ValueError("dbSNP build should look like b147, b148, etc.")

        url = NCBI_VCF_TEMPLATE_URL.format(dbsnp_build, genome_build)

    if outpath is None:
        if genome_build is None or dbsnp_build is None:
            outpath = "dbsnp.vcf.gz"
        else:
            outpath = "human_9606_{}_{}_All.vcf.gz".format(
                dbsnp_build, genome_build)

    with tqdm(unit='B', unit_scale=True, miniters=1,
              desc=url.split('/')[-1]) as t:
        urlcleanup()
        urlretrieve(url, filename=outpath, reporthook=tqdm_hook(t), data=None)

    return outpath
Beispiel #6
0
def download_tile(tile, url, pid, version, username, password):

    grass.debug("Download tile: %s" % tile, debug=1)
    local_tile = "NASADEM_HGT_" + str(tile) + ".zip"

    urllib2.urlcleanup()

    remote_tile = str(url) + "/" + version + "/2000.02.11/" + local_tile
    goturl = 1

    try:
        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(
            None, "https://urs.earthdata.nasa.gov", username, password
        )

        cookie_jar = CookieJar()

        opener = urllib2.build_opener(
            urllib2.HTTPBasicAuthHandler(password_manager),
            # urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
            # urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
            urllib2.HTTPCookieProcessor(cookie_jar),
        )
        urllib2.install_opener(opener)

        request = urllib2.Request(remote_tile)
        response = urllib2.urlopen(request)

        fo = open(local_tile, "w+b")
        fo.write(response.read())
        fo.close
        time.sleep(0.5)
    except:
        goturl = 0
        pass

    return goturl
Beispiel #7
0
def get_file(filename):
    '''Downloads filename from ChEBI FTP site'''
    destination = __DOWNLOAD_PARAMS['path']
    filepath = os.path.join(destination, filename)

    if not __is_current(filepath):

        if not os.path.exists(destination):
            os.makedirs(destination)

        url = 'ftp://ftp.ebi.ac.uk/pub/databases/chebi/' + \
            'Flat_file_tab_delimited/'
        urlretrieve(urlparse.urljoin(url, filename), filepath)
        urlcleanup()

    if filepath.endswith('.zip'):
        zfile = zipfile.ZipFile(filepath, 'r')
        filepath = os.path.join(destination, zfile.namelist()[0])
        zfile.extractall(destination)
    elif filepath.endswith('.gz'):
        unzipped_filepath = filepath[:-len('.gz')]

        if os.path.exists(unzipped_filepath) \
                and __is_current(unzipped_filepath):
            filepath = unzipped_filepath
        else:
            input_file = gzip.open(filepath, 'rb')
            filepath = os.path.join(destination, input_file.name[:-len('.gz')])
            output_file = open(filepath, 'wb')

            for line in input_file:
                output_file.write(line)

            input_file.close()
            output_file.close()

    return filepath
Beispiel #8
0
def download_tile(tile, url, pid, srtmv3, one, username, password):

    grass.debug("Download tile: %s" % tile, debug = 1)
    output = tile + '.r.in.srtm.tmp.' + str(pid)
    if srtmv3:
        if one:
            local_tile = str(tile) + '.SRTMGL1.hgt.zip'
        else:
            local_tile = str(tile) + '.SRTMGL3.hgt.zip'
    else:
        local_tile = str(tile) + '.hgt.zip'

    urllib2.urlcleanup()

    if srtmv3:
        remote_tile = str(url) + local_tile
        goturl = 1

        try:
            password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
            password_manager.add_password(None, "https://urs.earthdata.nasa.gov", username, password)

            cookie_jar = CookieJar()

            opener = urllib2.build_opener(
                urllib2.HTTPBasicAuthHandler(password_manager),
                #urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
                #urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
                urllib2.HTTPCookieProcessor(cookie_jar))
            urllib2.install_opener(opener)

            request = urllib2.Request(remote_tile)
            response = urllib2.urlopen(request)

            fo = open(local_tile, 'w+b')
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
        except:
            goturl = 0
            pass

        return goturl

    # SRTM subdirs: Africa, Australia, Eurasia, Islands, North_America, South_America
    for srtmdir in ('Africa', 'Australia', 'Eurasia', 'Islands', 'North_America', 'South_America'):
        remote_tile = str(url) + str(srtmdir) + '/' + local_tile
        goturl = 1

        try:
            response = urllib2.urlopen(request)
            fo = open(local_tile, 'w+b')
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
            # does not work:
            #urllib.urlretrieve(remote_tile, local_tile, data = None)
        except:
            goturl = 0
            pass

        if goturl == 1:
            return 1

    return 0
Beispiel #9
0
def download_snp_history(url=SNP_HISTORY_URL,outpath="SNPHistory.bcp.gz"):
  with tqdm(unit='B',unit_scale=True,miniters=1,desc=url.split('/')[-1]) as t:
    urlcleanup()
    urlretrieve(url,filename=outpath,reporthook=tqdm_hook(t),data=None)

  return outpath
Beispiel #10
0
def download_merge_arch(url=RS_MERGE_ARCH_URL,outpath="RsMergeArch.bcp.gz"):
  with tqdm(unit='B',unit_scale=True,miniters=1,desc=url.split('/')[-1]) as t:
    urlcleanup()
    urlretrieve(url,filename=outpath,reporthook=tqdm_hook(t),data=None)

  return outpath
Beispiel #11
0
def download_ebi_catalog(url,outpath):
  with tqdm(unit='B',unit_scale=True,miniters=1,desc=url.split('/')[-1]) as t:
    urlcleanup()
    urlretrieve(url,filename=outpath,reporthook=tqdm_hook(t),data=None)

  return outpath