コード例 #1
0
ファイル: fetch.py プロジェクト: desilinguist/conda
def download(url, dst_path):
    try:
        u = connectionhandled_urlopen(url)
    except IOError:
        raise RuntimeError("Could not open '%s'" % url)
    except ValueError as e:
        raise RuntimeError(e)

    size = get_http_value(u, 'Content-Length')
    if size:
        size = int(size)
        fn = basename(dst_path)
        getLogger('fetch.start').info((fn[:14], size))

    n = 0
    fo = open(dst_path, 'wb')
    while True:
        chunk = u.read(16384)
        if not chunk:
            break
        fo.write(chunk)
        n += len(chunk)
        if size:
            getLogger('fetch.update').info(n)
    fo.close()

    u.close()
    if size:
        getLogger('fetch.stop').info(None)
コード例 #2
0
def download(url, dst_path):
    try:
        u = connectionhandled_urlopen(url)
    except IOError:
        raise RuntimeError("Could not open '%s'" % url)
    except ValueError as e:
        raise RuntimeError(e)

    size = get_http_value(u, 'Content-Length')
    if size:
        size = int(size)
        fn = basename(dst_path)
        getLogger('fetch.start').info((fn[:14], size))

    n = 0
    fo = open(dst_path, 'wb')
    while True:
        chunk = u.read(16384)
        if not chunk:
            break
        fo.write(chunk)
        n += len(chunk)
        if size:
            getLogger('fetch.update').info(n)
    fo.close()

    u.close()
    if size:
        getLogger('fetch.stop').info(None)
コード例 #3
0
ファイル: fetch.py プロジェクト: nvdnkpr/conda
def fetch_pkg(info, dst_dir=config.pkgs_dir):
    '''
    fetch a package `fn` from `url` and store it into `dst_dir`
    '''
    fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
    url = info['channel'] + fn
    path = join(dst_dir, fn)
    pp = path + '.part'

    with Locked(dst_dir):
        for x in range(retries):
            try:
                fi = connectionhandled_urlopen(url)
            except IOError:
                log.debug("Attempt %d failed at urlopen" % x)
                continue
            if fi is None:
                log.debug("Could not fetch (urlopen returned None)")
                continue
            log.debug("Fetching: %s" % url)
            n = 0
            h = hashlib.new('md5')
            getLogger('fetch.start').info((fn, info['size']))
            need_retry = False
            try:
                fo = open(pp, 'wb')
            except IOError:
                raise RuntimeError("Could not open %r for writing.  "
                             "Permissions problem or missing directory?" % pp)
            while True:
                try:
                    chunk = fi.read(16384)
                except IOError:
                    need_retry = True
                    break
                if not chunk:
                    break
                try:
                    fo.write(chunk)
                except IOError:
                    raise RuntimeError("Failed to write to %r." % pp)
                h.update(chunk)
                n += len(chunk)
                getLogger('fetch.update').info(n)

            fo.close()
            if need_retry:
                continue

            fi.close()
            getLogger('fetch.stop').info(None)
            if h.hexdigest() != info['md5']:
                raise RuntimeError("MD5 sums mismatch for download: %s" % fn)
            try:
                os.rename(pp, path)
            except OSError:
                raise RuntimeError("Could not rename %r to %r." % (pp, path))
            return

    raise RuntimeError("Could not locate '%s'" % url)
コード例 #4
0
ファイル: fetch.py プロジェクト: NacimKACEL/misc
def fetch_repodata(url, cache={}):
    request = urllib2.Request(url + 'repodata.json.bz2')
    if url in cache:
        d = cache[url]
        if '_etag' in d:
            request.add_header('If-None-Match', d['_etag'])
        if '_mod' in d:
            request.add_header('If-Modified-Since', d['_mod'])

    try:
        u = connectionhandled_urlopen(request)
        data = u.read()
        u.close()
        d = json.loads(bz2.decompress(data).decode('utf-8'))
        etag = u.info().getheader('Etag')
        if etag:
            d['_etag'] = etag
        timestamp = u.info().getheader('Last-Modified')
        if timestamp:
            d['_mod'] = timestamp
        cache[url] = d

    except urllib2.HTTPError as e:
        sys.stderr.write("HTTPError: %d  %s\n" % (e.code, e.msg))
        if e.code != 304:
            raise

    except urllib2.URLError:
        sys.stderr.write("Error: unknown host: %s\n" % url)

    return cache[url]
コード例 #5
0
def fetch_repodata(url, cache_dir=None, use_cache=False):
    log.debug("fetching repodata: %s ..." % url)

    cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
    try:
        cache = json.load(open(cache_path))
    except (IOError, ValueError):
        cache = {'packages': {}}

    if use_cache:
        return cache

    request = urllib2.Request(url + 'repodata.json.bz2')
    if '_etag' in cache:
        request.add_header('If-None-Match', cache['_etag'])
    if '_mod' in cache:
        request.add_header('If-Modified-Since', cache['_mod'])

    try:
        u = connectionhandled_urlopen(request)
        data = u.read()
        u.close()
        cache = json.loads(bz2.decompress(data).decode('utf-8'))
        add_http_value_to_dict(u, 'Etag', cache, '_etag')
        add_http_value_to_dict(u, 'Last-Modified', cache, '_mod')

    except ValueError:
        raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url)

    except urllib2.HTTPError as e:
        msg = "HTTPError: %d  %s  %s\n" % (e.code, e.msg, url)
        log.debug(msg)
        if e.code != 304:
            raise RuntimeError(msg)

    except urllib2.URLError:
        sys.stderr.write("Error: unknown host: %s\n" % url)
        if fail_unknown_host:
            sys.exit(1)

    cache['_url'] = url
    try:
        with open(cache_path, 'w') as fo:
            json.dump(cache, fo, indent=2, sort_keys=True)
    except IOError:
        pass

    return cache or None
コード例 #6
0
ファイル: fetch.py プロジェクト: certik/conda
def fetch_repodata(url, cache_dir=None, use_cache=False):
    dotlog.debug("fetching repodata: %s ..." % url)

    cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
    try:
        cache = json.load(open(cache_path))
    except (IOError, ValueError):
        cache = {'packages': {}}

    if use_cache:
        return cache

    request = urllib2.Request(url + 'repodata.json.bz2')
    if '_etag' in cache:
        request.add_header('If-None-Match', cache['_etag'])
    if '_mod' in cache:
        request.add_header('If-Modified-Since', cache['_mod'])

    try:
        u = connectionhandled_urlopen(request)
        data = u.read()
        u.close()
        cache = json.loads(bz2.decompress(data).decode('utf-8'))
        add_http_value_to_dict(u, 'Etag', cache, '_etag')
        add_http_value_to_dict(u, 'Last-Modified', cache, '_mod')

    except ValueError:
        raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url)

    except urllib2.HTTPError as e:
        msg = "HTTPError: %d  %s  %s\n" % (e.code, e.msg, url)
        log.debug(msg)
        if e.code != 304:
            raise RuntimeError(msg)

    except urllib2.URLError:
        sys.stderr.write("Error: unknown host: %s\n" % url)
        if fail_unknown_host:
            sys.exit(1)

    cache['_url'] = url
    try:
        with open(cache_path, 'w') as fo:
            json.dump(cache, fo, indent=2, sort_keys=True)
    except IOError:
        pass

    return cache or None
コード例 #7
0
def fetch_pkg(info, dst_dir=None):
    '''
    fetch a package given by `info` and store it into `dst_dir`
    '''
    if dst_dir is None:
        dst_dir = config.pkgs_dirs[0]

    fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
    url = info['channel'] + fn
    log.debug("url=%r" % url)
    path = join(dst_dir, fn)
    pp = path + '.part'

    with Locked(dst_dir):
        for x in range(retries):
            try:
                fi = connectionhandled_urlopen(url)
            except IOError:
                log.debug("attempt %d failed at urlopen" % x)
                continue
            if fi is None:
                log.debug("could not fetch (urlopen returned None)")
                continue
            n = 0
            h = hashlib.new('md5')
            getLogger('fetch.start').info((fn, info['size']))
            need_retry = False
            try:
                fo = open(pp, 'wb')
            except IOError:
                raise RuntimeError(
                    "Could not open %r for writing.  "
                    "Permissions problem or missing directory?" % pp)
            while True:
                try:
                    chunk = fi.read(16384)
                except IOError:
                    need_retry = True
                    break
                if not chunk:
                    break
                try:
                    fo.write(chunk)
                except IOError:
                    raise RuntimeError("Failed to write to %r." % pp)
                h.update(chunk)
                n += len(chunk)
                getLogger('fetch.update').info(n)

            fo.close()
            if need_retry:
                continue

            fi.close()
            getLogger('fetch.stop').info(None)
            if h.hexdigest() != info['md5']:
                raise RuntimeError(
                    "MD5 sums mismatch for download: %s (%s != %s)" %
                    (fn, h.hexdigest(), info['md5']))
            try:
                os.rename(pp, path)
            except OSError:
                raise RuntimeError("Could not rename %r to %r." % (pp, path))
            return

    raise RuntimeError("Could not locate '%s'" % url)
コード例 #8
0
ファイル: s2.py プロジェクト: pombredanne/misc-1
import urllib2

from conda.connection import connectionhandled_urlopen


url = 'http://repo.continuum.io/pkgs/gpl/osx-64/repodata.json'
#url = 'https://conda.binstar.org/ilan/osx-64/repodata.json'

request = urllib2.Request(url)
request.add_header('If-None-Match', '2f1bf63044f924c048e0dce972929c4b')
try:
    u = connectionhandled_urlopen(request)
except urllib2.HTTPError as e:
    print e.code, e.msg
except urllib2.URLError:
    print "host unknown"

try:
    content = u.read()
    u.close()
    print content
except NameError:
    pass