Exemple #1
0
def download_file(url, filename, session=None):
    # create session
    if None is session:
        session = requests.session()
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        (proxy_address, pac_found) = proxy_setting(sickbeard.PROXY_SETTING, url)
        msg = '%sproxy for url: %s' % (('', 'PAC parsed ')[pac_found], url)
        if None is proxy_address:
            logger.log('Proxy error, aborted the request using %s' % msg, logger.DEBUG)
            return
        elif proxy_address:
            logger.log('Using %s' % msg, logger.DEBUG)
            session.proxies = {
                'http': proxy_address,
                'https': proxy_address
            }

    try:
        resp = session.get(url)
        if not resp.ok:
            logger.log(u"Requested url " + url + " returned status code is " + str(
                resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
            return False

        with open(filename, 'wb') as fp:
            for chunk in resp.iter_content(chunk_size=1024):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError, e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return False
Exemple #2
0
def download_file(url, filename, session=None):
    # create session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session,
                           cache=caches.FileCache(
                               os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({
        'User-Agent': USER_AGENT,
        'Accept-Encoding': 'gzip,deflate'
    })

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        logger.log("Using proxy for url: " + url, logger.DEBUG)
        session.proxies = {
            "http": sickbeard.PROXY_SETTING,
            "https": sickbeard.PROXY_SETTING,
        }

    try:
        resp = session.get(url)
        if not resp.ok:
            logger.log(
                u"Requested url " + url + " returned status code is " +
                str(resp.status_code) + ': ' +
                clients.http_error_code[resp.status_code], logger.DEBUG)
            return False

        with open(filename, 'wb') as fp:
            for chunk in resp.iter_content(chunk_size=1024):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError, e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url,
                   logger.WARNING)
        return False
Exemple #3
0
def download_file(url, filename, session=None):
    # create session
    if None is session:
        session = requests.session()
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        (proxy_address, pac_found) = proxy_setting(sickbeard.PROXY_SETTING, url)
        msg = '%sproxy for url: %s' % (('', 'PAC parsed ')[pac_found], url)
        if None is proxy_address:
            logger.log('Proxy error, aborted the request using %s' % msg, logger.DEBUG)
            return
        elif proxy_address:
            logger.log('Using %s' % msg, logger.DEBUG)
            session.proxies = {
                'http': proxy_address,
                'https': proxy_address
            }

    try:
        resp = session.get(url)
        if not resp.ok:
            logger.log(u"Requested url " + url + " returned status code is " + str(
                resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
            return False

        with open(filename, 'wb') as fp:
            for chunk in resp.iter_content(chunk_size=1024):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError as e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return False
    except requests.exceptions.ConnectionError as e:
        _remove_file_failed(filename)
        logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
        return False
    except requests.exceptions.Timeout as e:
        _remove_file_failed(filename)
        logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
        return False
    except EnvironmentError as e:
        _remove_file_failed(filename)
        logger.log(u"Unable to save the file: " + ex(e), logger.ERROR)
        return False
    except Exception:
        _remove_file_failed(filename)
        logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
        return False

    return True