Ejemplo n.º 1
0
def getURL(url,
           post_data=None,
           params=None,
           headers={},
           timeout=30,
           session=None,
           json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session,
                           cache=caches.FileCache(
                               os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({
        'User-Agent': USER_AGENT,
        'Accept-Encoding': 'gzip,deflate'
    })
    session.headers.update(headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {
                "http": sickbeard.PROXY_SETTING,
                "https": sickbeard.PROXY_SETTING,
            }

        # decide if we get or post data to server
        if post_data:
            resp = session.post(url, data=post_data, timeout=timeout)
        else:
            resp = session.get(url, timeout=timeout)

        if not resp.ok:
            logger.log(
                u"Requested url " + url + " returned status code is " +
                str(resp.status_code) + ': ' +
                clients.http_error_code[resp.status_code], logger.DEBUG)
            return

    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url,
                   logger.WARNING)
        return
Ejemplo n.º 2
0
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, "sessions")))

    # request session headers
    req_headers = {"User-Agent": USER_AGENT, "Accept-Encoding": "gzip,deflate"}
    if headers:
        req_headers.update(headers)
    session.headers.update(req_headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # Remove double-slashes from url
        parsed = list(urlparse.urlparse(url))
        parsed[2] = re.sub("/{2,}", "/", parsed[2])  # replace two or more / with one
        url = urlparse.urlunparse(parsed)

        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {"http": sickbeard.PROXY_SETTING, "https": sickbeard.PROXY_SETTING}

        # decide if we get or post data to server
        if post_data:
            resp = session.post(url, data=post_data, timeout=timeout)
        else:
            resp = session.get(url, timeout=timeout)

        if not resp.ok:
            logger.log(
                u"Requested url "
                + url
                + " returned status code is "
                + str(resp.status_code)
                + ": "
                + clients.http_error_code[resp.status_code],
                logger.DEBUG,
            )
            return

    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return
Ejemplo n.º 3
0
def download_file(url, filename, session=None):
    # create session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session,
                           cache=caches.FileCache(
                               os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({
        'User-Agent': USER_AGENT,
        'Accept-Encoding': 'gzip,deflate'
    })

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        logger.log("Using proxy for url: " + url, logger.DEBUG)
        session.proxies = {
            "http": sickbeard.PROXY_SETTING,
            "https": sickbeard.PROXY_SETTING,
        }

    try:
        resp = session.get(url)
        if not resp.ok:
            logger.log(
                u"Requested url " + url + " returned status code is " +
                str(resp.status_code) + ': ' +
                clients.http_error_code[resp.status_code], logger.DEBUG)
            return False

        with open(filename, 'wb') as fp:
            for chunk in resp.iter_content(chunk_size=1024,
                                           decode_unicode=True):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError, e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url,
                   logger.WARNING)
        return False
Ejemplo n.º 4
0
def download_file(url, filename, session=None):
    # create session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, "sessions")))

    # request session headers
    session.headers.update({"User-Agent": USER_AGENT, "Accept-Encoding": "gzip,deflate"})

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        logger.log("Using proxy for url: " + url, logger.DEBUG)
        session.proxies = {"http": sickbeard.PROXY_SETTING, "https": sickbeard.PROXY_SETTING}

    try:
        resp = session.get(url)
        if not resp.ok:
            logger.log(
                u"Requested url "
                + url
                + " returned status code is "
                + str(resp.status_code)
                + ": "
                + clients.http_error_code[resp.status_code],
                logger.DEBUG,
            )
            return False

        with open(filename, "wb") as fp:
            for chunk in resp.iter_content(chunk_size=1024):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError, e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return False
Ejemplo n.º 5
0
def getURL(url, post_data=None, params=None, headers={}, timeout=30, session=None, json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
    session.headers.update(headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {
                "http": sickbeard.PROXY_SETTING,
                "https": sickbeard.PROXY_SETTING,
            }

        # decide if we get or post data to server
        if post_data:
            resp = session.post(url, data=post_data, timeout=timeout)
        else:
            resp = session.get(url, timeout=timeout)

        if not resp.ok:
            logger.log(u"Requested url " + url + " returned status code is " + str(
                resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
            return

    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return
Ejemplo n.º 6
0
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))

    # request session headers
    req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
    if headers:
        req_headers.update(headers)
    session.headers.update(req_headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # Remove double-slashes from url
        parsed = list(urlparse.urlparse(url))
        parsed[2] = re.sub("/{2,}", "/", parsed[2])  # replace two or more / with one
        url = urlparse.urlunparse(parsed)

        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {
                "http": sickbeard.PROXY_SETTING,
                "https": sickbeard.PROXY_SETTING,
            }

        resp = session.get(url, data=post_data, timeout=timeout)
    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return
Ejemplo n.º 7
0
def download_file(url, filename, session=None):
    # create session
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))

    # request session headers
    session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        logger.log("Using proxy for url: " + url, logger.DEBUG)
        session.proxies = {
            "http": sickbeard.PROXY_SETTING,
            "https": sickbeard.PROXY_SETTING,
        }

    try:
        resp = session.get(url)
        if not resp.ok:
            return False

        with open(filename, 'wb') as fp:
            for chunk in resp.iter_content(chunk_size=1024):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError, e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return False