Esempio n. 1
0
def request(url,
            close=True,
            redirect=True,
            followredirect=False,
            error=False,
            proxy=None,
            post=None,
            headers=None,
            mobile=False,
            limit=None,
            referer=None,
            cookie=None,
            output='',
            timeout='30',
            httpsskip=False,
            use_web_proxy=False,
            XHR=False,
            IPv4=False):

    # output extended = 4, response = 2, responsecodeext = 2

    try:
        handlers = []
        redirectURL = url

        if IPv4 == True:
            setIP4()

        if error == False and not proxy == None:
            handlers += [
                urllib2.ProxyHandler({'http': '%s' % (proxy)}),
                urllib2.HTTPHandler
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if error == False and output == 'cookie2' or output == 'cookie' or output == 'extended' or not close == True:
            cookies = cookielib.LWPCookieJar()
            if httpsskip or use_web_proxy:
                handlers += [
                    urllib2.HTTPHandler(),
                    urllib2.HTTPCookieProcessor(cookies)
                ]
            else:
                handlers += [
                    urllib2.HTTPHandler(),
                    urllib2.HTTPSHandler(),
                    urllib2.HTTPCookieProcessor(cookies)
                ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        try:
            if error == False:
                if sys.version_info < (2, 7, 9): raise Exception()
                import ssl
                ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = ssl.CERT_NONE
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                opener = urllib2.install_opener(opener)
        except:
            pass

        try:
            headers.update(headers)
        except:
            headers = {}
        if 'User-Agent' in headers:
            pass
        elif not mobile == True:
            #headers['User-Agent'] = agent()
            #headers['User-Agent'] = Constants.USER_AGENT
            headers['User-Agent'] = randomagent()
        else:
            headers['User-Agent'] = 'Apple-iPhone/701.341'
        if 'Referer' in headers:
            pass
        elif referer == None:
            try:
                headers['Referer'] = '%s://%s/' % (urlparse.urlparse(
                    url).scheme, urlparse.urlparse(url).netloc)
            except:
                try:
                    headers['Referer'] = url
                except:
                    pass
        else:
            headers['Referer'] = referer
        if not 'Accept-Language' in headers:
            headers['Accept-Language'] = 'en-US'
        if 'X-Requested-With' in headers:
            pass
        elif XHR == True:
            headers['X-Requested-With'] = 'XMLHttpRequest'
        if 'Cookie' in headers:
            pass
        elif not cookie == None:
            headers['Cookie'] = cookie

        if error == False and redirect == False:

            class NoRedirection(urllib2.HTTPErrorProcessor):
                def http_response(self, request, response):
                    if IPv4 == True:
                        setIP6()
                    return response

            opener = urllib2.build_opener(NoRedirection)
            opener = urllib2.install_opener(opener)

            try:
                del headers['Referer']
            except:
                pass

        redirectHandler = None
        urlList = []
        if error == False and followredirect:

            class HTTPRedirectHandler(urllib2.HTTPRedirectHandler):
                def redirect_request(self, req, fp, code, msg, headers,
                                     newurl):
                    newreq = urllib2.HTTPRedirectHandler.redirect_request(
                        self, req, fp, code, msg, headers, newurl)
                    if newreq is not None:
                        self.redirections.append(newreq.get_full_url())
                    if IPv4 == True:
                        setIP6()
                    return newreq

            redirectHandler = HTTPRedirectHandler()
            redirectHandler.max_redirections = 10
            redirectHandler.redirections = [url]

            opener = urllib2.build_opener(redirectHandler)
            opener = urllib2.install_opener(opener)

        request = urllib2.Request(url, data=post, headers=headers)
        #print request

        try:
            response = urllib2.urlopen(request, timeout=int(timeout))
            if followredirect:
                for redURL in redirectHandler.redirections:
                    urlList.append(redURL)  # make a list, might be useful
                    redirectURL = redURL

        except urllib2.HTTPError as response:

            try:
                resp_code = response.code
            except:
                resp_code = None

            try:
                content = response.read()
            except:
                content = ''

            if response.code == 503:
                #Log("AAAA- CODE %s|%s " % (url, response.code))
                if 'cf-browser-verification' in content:
                    control.log('cf-browser-verification: CF-OK')

                    netloc = '%s://%s' % (urlparse.urlparse(url).scheme,
                                          urlparse.urlparse(url).netloc)
                    #cf = cache.get(cfcookie, 168, netloc, headers['User-Agent'], timeout)
                    cfc = cfcookie()
                    cf = cfc.get(netloc, headers['User-Agent'], timeout)

                    headers['Cookie'] = cf
                    request = urllib2.Request(url, data=post, headers=headers)
                    response = urllib2.urlopen(request, timeout=int(timeout))
                elif error == False:
                    if IPv4 == True:
                        setIP6()
                    return
                elif error == True:
                    return '%s: %s' % (response.code, response.reason), content
            elif response.code == 403:
                if output == 'response':
                    return response.code, content
                else:
                    return
            elif response.code == 307:
                #Log("AAAA- Response read: %s" % response.read(5242880))
                #Log("AAAA- Location: %s" % (response.headers['Location'].rstrip()))
                cookie = ''
                try:
                    cookie = '; '.join(
                        ['%s=%s' % (i.name, i.value) for i in cookies])
                except:
                    pass
                headers['Cookie'] = cookie
                request = urllib2.Request(response.headers['Location'],
                                          data=post,
                                          headers=headers)
                response = urllib2.urlopen(request, timeout=int(timeout))
                #Log("AAAA- BBBBBBB %s" %  response.code)
            elif resp_code != None:
                if IPv4 == True:
                    setIP6()
                if output == 'response':
                    return (resp_code, None)
                return resp_code
            elif error == False:
                #print ("Response code",response.code, response.msg,url)
                if IPv4 == True:
                    setIP6()
                return
            else:
                if IPv4 == True:
                    setIP6()
                return
        except Exception as e:
            control.log('ERROR client.py>request : %s' % url)
            control.log('ERROR client.py>request : %s' % e.args)
            if IPv4 == True:
                setIP6()
            if output == 'response':
                return (None, None)
            return None

        if output == 'cookie':
            try:
                result = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                result = cf
            except:
                pass

        elif output == 'response':
            if limit == '0':
                result = (str(response.code), response.read(224 * 1024))
            elif not limit == None:
                result = (str(response.code), response.read(int(limit) * 1024))
            else:
                result = (str(response.code), response.read(5242880))

        elif output == 'responsecodeext':
            result = (str(response.code), redirectURL)

        elif output == 'responsecode':
            result = str(response.code)

        elif output == 'chunk':
            try:
                content = int(response.headers['Content-Length'])
            except:
                content = (2049 * 1024)
            #Log('CHUNK %s|%s' % (url,content))
            if content < (2048 * 1024):
                if IPv4 == True:
                    setIP6()
                return
            result = response.read(16 * 1024)
            if close == True: response.close()
            if IPv4 == True:
                setIP6()
            return result

        elif output == 'extended':
            try:
                cookie = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                cookie = cf
            except:
                pass
            content = response.headers
            result = response.read(5242880)
            if IPv4 == True:
                setIP6()
            return (result, headers, content, cookie)

        elif output == 'geturl':
            result = response.geturl()

        elif output == 'headers':
            content = response.headers
            if IPv4 == True:
                setIP6()
            return content

        else:
            if limit == '0':
                result = response.read(224 * 1024)
            elif not limit == None:
                result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)

        if close == True:
            response.close()

        if IPv4 == True:
            setIP6()
        return result

    except Exception as e:
        control.log('ERROR client.py>request %s, %s' % (e.args, url))
        traceback.print_exc()
        if IPv4 == True:
            setIP6()
        return
Esempio n. 2
0
if not os.path.exists(profileDir):
    os.makedirs(profileDir)

urlopen = urllib2.urlopen
cj = cookielib.LWPCookieJar(xbmc.translatePath(cookiePath))
Request = urllib2.Request

handlers = [urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler()]

if (2, 7, 8) < sys.version_info < (2, 7, 12):
    try:
        import ssl
        ssl_context = ssl.create_default_context()
        ssl_context.check_hostname = False
        ssl_context.verify_mode = ssl.CERT_NONE
        handlers += [urllib2.HTTPSHandler(context=ssl_context)]
    except:
        pass

if cj != None:
    if os.path.isfile(xbmc.translatePath(cookiePath)):
        try:
            cj.load()
        except:
            try:
                os.remove(xbmc.translatePath(cookiePath))
                pass
            except:
                dialog.ok('Oh oh',
                          'The Cookie file is locked, please restart Kodi')
                pass
Esempio n. 3
0
api_key = "JiSi7VtYVPLLWxQbrvnTJJOOq"
api_secret = "x7irzMC0FOMgLQn1LqrhnuQrrmVNa0snIGVv0GHebgcKxZ72B9"
access_token_key = "710935210775302145-eY3DWTKNGR0PQHchK62QbgSTIVBMBRI"
access_token_secret = "A2zMfbLpkEGnzltqzdjhFZsoHevegspGaBc7sTIixk78l"

_debug = 0

oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret)
oauth_consumer = oauth.Consumer(key=api_key, secret=api_secret)

signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()

http_method = "GET"

http_handler = urllib.HTTPHandler(debuglevel=_debug)
https_handler = urllib.HTTPSHandler(debuglevel=_debug)
'''
Construct, sign, and open a twitter request
using the hard-coded credentials above.
'''


def twitterreq(url, method, parameters):
    req = oauth.Request.from_consumer_and_token(oauth_consumer,
                                                token=oauth_token,
                                                http_method=http_method,
                                                http_url=url,
                                                parameters=parameters)

    req.sign_request(signature_method_hmac_sha1, oauth_consumer, oauth_token)
Esempio n. 4
0
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False,
            limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'):
    try:
        if not url:
            return

        handlers = []

        if not proxy == None:
            handlers += [urllib2.ProxyHandler({'http': '%s' % (proxy)}), urllib2.HTTPHandler]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if output == 'cookie' or output == 'extended' or not close == True:
            cookies = cookielib.LWPCookieJar()
            handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if (2, 7, 8) < sys.version_info < (2, 7, 12):
            try:
                import ssl;
                ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = ssl.CERT_NONE
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                opener = urllib2.install_opener(opener)
            except:
                pass

        if url.startswith('//'): url = 'http:' + url

        _headers = {}
        if headers:
            try:
                _headers.update(headers)
            except:
                log_exception()
        if 'User-Agent' in _headers:
            pass
        elif not mobile == True:
            # headers['User-Agent'] = agent()
            _headers[
                'User-Agent'] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.75 Safari/537.36"
        else:
            _headers['User-Agent'] = 'Apple-iPhone/701.341'
        if 'Referer' in _headers:
            pass
        elif referer is not None:
            _headers['Referer'] = referer
        if not 'Accept-Language' in _headers:
            _headers['Accept-Language'] = 'en-US'
        if 'X-Requested-With' in _headers:
            pass
        elif XHR == True:
            _headers['X-Requested-With'] = 'XMLHttpRequest'
        if 'Cookie' in _headers:
            pass
        elif not cookie == None:
            _headers['Cookie'] = cookie
        if 'Accept-Encoding' in _headers:
            pass
        elif compression and limit is None:
            _headers['Accept-Encoding'] = 'gzip'

        if redirect == False:

            # old implementation
            # class NoRedirection(urllib2.HTTPErrorProcessor):
            #    def http_response(self, request, response): return response

            # opener = urllib2.build_opener(NoRedirection)
            # opener = urllib2.install_opener(opener)

            class NoRedirectHandler(urllib2.HTTPRedirectHandler):
                def http_error_302(self, req, fp, code, msg, headers):
                    infourl = urllib.addinfourl(fp, headers, req.get_full_url())
                    infourl.status = code
                    infourl.code = code
                    return infourl

                http_error_300 = http_error_302
                http_error_301 = http_error_302
                http_error_303 = http_error_302
                http_error_307 = http_error_302

            opener = urllib2.build_opener(NoRedirectHandler())
            urllib2.install_opener(opener)

            try:
                del _headers['Referer']
            except:
                pass

        if isinstance(post, dict):
            post = utils.byteify(post)
            post = urllib.urlencode(post)

        url = utils.byteify(url)

        request = urllib2.Request(url, data=post)
        _add_request_header(request, _headers)

        try:
            import ssl
            response = urllib2.urlopen(request, timeout=int(timeout), context=ssl._create_unverified_context())
        except urllib2.HTTPError as response:

            if response.code == 503 or ('shinden' in url and response.code == 403):
                s = requests.Session()
                headersok = {'User-Agent': _headers['User-Agent']}
                s.headers = headersok
                scraper = cloudflare6.create_scraper(s)
                if output == 'session':
                    return s
                if output == 'cookie':
                    return scraper.get_cookie_string(url, user_agent=_headers['User-Agent'])[0]
                else:
                    return scraper.get(url).content
            else:
                log_utils.log('Request-Error (%s): %s' % (str(response.code), url), log_utils.LOGDEBUG)
                if error == False: return

        if output == 'cookie':
            try:
                result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            if close == True: response.close()
            return result

        elif output == 'geturl':
            result = response.geturl()
            if close == True: response.close()
            return result

        elif output == 'headers':
            result = response.headers
            if close == True: response.close()
            return result

        elif output == 'chunk':
            try:
                content = int(response.headers['Content-Length'])
            except:
                content = (2049 * 1024)
            if content < (2048 * 1024): return
            result = response.read(16 * 1024)
            if close == True: response.close()
            return result

        elif output == 'file_size':
            try:
                content = int(response.headers['Content-Length'])
            except:
                content = '0'
            response.close()
            return content

        if limit == '0':
            result = response.read(224 * 1024)
        elif not limit == None:
            result = response.read(int(limit) * 1024)
        else:
            result = response.read(5242880)

        try:
            encoding = response.info().getheader('Content-Encoding')
        except:
            encoding = None
        if encoding == 'gzip':
            result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()

        if 'sucuri_cloudproxy_js' in result:
            su = sucuri().get(result)

            _headers['Cookie'] = su

            request = urllib2.Request(url, data=post)
            _add_request_header(request, _headers)

            response = urllib2.urlopen(request, timeout=int(timeout))

            if limit == '0':
                result = response.read(224 * 1024)
            elif not limit == None:
                result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)

            try:
                encoding = response.info().getheader('Content-Encoding')
            except:
                encoding = None
            if encoding == 'gzip':
                result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()

        if 'Blazingfast.io' in result and 'xhr.open' in result:
            netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
            ua = _headers['User-Agent']
            _headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout)

            result = _basic_request(url, headers=_headers, post=post, timeout=timeout, limit=limit)

        if output == 'extended':
            try:
                response_headers = dict([(item[0].title(), item[1]) for item in response.info().items()])
            except:
                response_headers = response.headers
            response_code = str(response.code)
            try:
                cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            if close == True: response.close()
            return (result, response_code, response_headers, _headers, cookie)
        else:
            if close == True: response.close()
            return result
    except Exception as e:
        log_utils.log('Request-Error: (%s) => %s' % (str(e), url), log_utils.LOGDEBUG)
access_token = creds['access_token']
url = "https://api.fitbit.com/1/user/-/profile.json"
headers = {
    "Authorization": "Bearer " + access_token,
    "Accept-Language": "en_US"
}
#print 'Requesting: ' + url
req = urllib2.Request(url, headers=headers)
response = urllib2.urlopen(req)
the_page = response.read()
body = json.loads(the_page)
weight = body['user']['weight'] / 2.20462
print weight

http_logger = urllib2.HTTPSHandler()  #debuglevel = 1)
#http_logger.set_http_debuglevel(1)
opener = urllib2.build_opener(http_logger)  # put your other handlers here too!
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
urllib2.install_opener(opener)


def auth():
    global creds
    # curl -X POST https://www.strava.com/api/v3/oauth/token -d client_id=20051 -d client_secret=[client_secret] -d code=[code] -d grant_type=authorization_code
    client_id = creds['strava_client_id']
    client_secret = creds['strava_client_secret']
    refresh_token = creds['strava_refresh_token']
    req = urllib2.Request(
        'https://www.strava.com/api/v3/oauth/token',
        urllib.urlencode({
Esempio n. 6
0
def request(url,
            close=True,
            redirect=True,
            error=False,
            proxy=None,
            post=None,
            headers=None,
            mobile=False,
            XHR=False,
            limit=None,
            referer=None,
            cookie=None,
            compression=True,
            output='',
            timeout='30',
            ignoreSsl=False,
            flare=True,
            ignoreErrors=None):
    try:
        if not url: return None

        handlers = []
        if proxy is not None:
            handlers += [
                urllib2.ProxyHandler({'http': '%s' % (proxy)}),
                urllib2.HTTPHandler
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if output == 'cookie' or output == 'extended' or not close is True:
            cookies = cookielib.LWPCookieJar()
            handlers += [
                urllib2.HTTPHandler(),
                urllib2.HTTPSHandler(),
                urllib2.HTTPCookieProcessor(cookies)
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if ignoreSsl or ((2, 7, 8) < sys.version_info < (2, 7, 12)):
            try:
                import ssl
                ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = ssl.CERT_NONE
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                opener = urllib2.install_opener(opener)
            except:
                pass

        if url.startswith('//'):
            url = 'http:' + url

        try:
            headers.update(headers)
        except:
            headers = {}

        if 'User-Agent' in headers: pass
        elif mobile is not True:
            headers['User-Agent'] = cache.get(randomagent, 12)
        else:
            headers['User-Agent'] = 'Apple-iPhone/701.341'

        if 'Referer' in headers: pass
        elif referer: headers['Referer'] = referer

        if 'Accept-Language' not in headers:
            headers['Accept-Language'] = 'en-US'

        if 'X-Requested-With' in headers: pass
        elif XHR: headers['X-Requested-With'] = 'XMLHttpRequest'

        if 'Cookie' in headers: pass
        elif cookie: headers['Cookie'] = cookie

        if 'Accept-Encoding' in headers: pass
        elif compression and limit is None: headers['Accept-Encoding'] = 'gzip'

        if redirect is False:

            class NoRedirection(urllib2.HTTPErrorProcessor):
                def http_response(self, request, response):
                    return response

            opener = urllib2.build_opener(NoRedirection)
            opener = urllib2.install_opener(opener)
            try:
                del headers['Referer']
            except:
                pass

        if isinstance(post, dict):
            # Gets rid of the error: 'ascii' codec can't decode byte 0xd0 in position 0: ordinal not in range(128)
            try:
                iter_items = post.iteritems()
            except:
                iter_items = post.items()
            for key, value in iter_items:
                try:
                    post[key] = value.encode('utf-8')
                except:
                    pass
            post = urlencode(post)

        request = urllib2.Request(url, data=post)
        _add_request_header(request, headers)

        try:
            response = urllib2.urlopen(request, timeout=int(timeout))
        except HTTPError as response:
            try:
                ignore = ignoreErrors and (int(response.code) == ignoreErrors
                                           or int(
                                               response.code) in ignoreErrors)
            except:
                ignore = False

            if not ignore:
                if response.code in [301, 307, 308, 503]:
                    cf_result = response.read(5242880)
                    try:
                        encoding = response.info().getheader(
                            'Content-Encoding')
                    except:
                        encoding = None

                    if encoding == 'gzip':
                        cf_result = gzip.GzipFile(
                            fileobj=StringIO(cf_result)).read()

                    if flare and 'cloudflare' in str(response.info()).lower():
                        log_utils.log(
                            'client module calling cfscrape: url=%s' % url,
                            level=log_utils.LOGNOTICE)
                        try:
                            from fenomscrapers.modules import cfscrape
                            if isinstance(post, dict): data = post
                            else:
                                try:
                                    data = parse_qs(post)
                                except:
                                    data = None

                            scraper = cfscrape.CloudScraper()
                            response = scraper.request(
                                method='GET' if post is None else 'POST',
                                url=url,
                                headers=headers,
                                data=data,
                                timeout=int(timeout))
                            result = response.content
                            flare = 'cloudflare'  # Used below
                            try:
                                cookies = response.request._cookies
                            except:
                                log_utils.error()
                        except:
                            log_utils.error()

                    elif 'cf-browser-verification' in cf_result:
                        netloc = '%s://%s' % (urlparse(url).scheme,
                                              urlparse(url).netloc)
                        ua = headers['User-Agent']
                        cf = cache.get(cfcookie().get, 168, netloc, ua,
                                       timeout)
                        headers['Cookie'] = cf
                        request = urllib2.Request(url, data=post)
                        _add_request_header(request, headers)
                        response = urllib2.urlopen(request,
                                                   timeout=int(timeout))
                    else:
                        log_utils.log('Request-Error (%s): %s' %
                                      (str(response.code), url),
                                      level=log_utils.LOGERROR)
                        if error is False: return None
                else:
                    log_utils.log('Request-Error (%s): %s' %
                                  (str(response.code), url),
                                  level=log_utils.LOGERROR)
                    if error is False: return None

        if output == 'cookie':
            try:
                result = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                result = cf
            except:
                pass
            if close is True: response.close()
            return result

        elif output == 'geturl':
            result = response.geturl()
            if close is True: response.close()
            return result

        elif output == 'headers':
            result = response.headers
            if close is True: response.close()
            return result

        elif output == 'chunk':
            try:
                content = int(response.headers['Content-Length'])
            except:
                content = (2049 * 1024)
            if content < (2048 * 1024): return
            result = response.read(16 * 1024)
            if close is True: response.close()
            return result

        if flare != 'cloudflare':
            if limit == '0': result = response.read(224 * 1024)
            elif limit is not None: result = response.read(int(limit) * 1024)
            else: result = response.read(5242880)

        try:
            encoding = response.info().getheader('Content-Encoding')
        except:
            encoding = None

        if encoding == 'gzip':
            result = gzip.GzipFile(fileobj=StringIO(result)).read()

        if 'sucuri_cloudproxy_js' in result:
            su = sucuri().get(result)
            headers['Cookie'] = su
            request = urllib2.Request(url, data=post)
            _add_request_header(request, headers)
            response = urllib2.urlopen(request, timeout=int(timeout))
            if limit == '0': result = response.read(224 * 1024)
            elif limit is not None: result = response.read(int(limit) * 1024)
            else: result = response.read(5242880)
            try:
                encoding = response.info().getheader('Content-Encoding')
            except:
                encoding = None
            if encoding == 'gzip':
                result = gzip.GzipFile(fileobj=StringIO(result)).read()

        if 'Blazingfast.io' in result and 'xhr.open' in result:
            netloc = '%s://%s' % (urlparse(url).scheme, urlparse(url).netloc)
            ua = headers['User-Agent']
            headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua,
                                          timeout)
            result = _basic_request(url,
                                    headers=headers,
                                    post=post,
                                    timeout=timeout,
                                    limit=limit)

        if output == 'extended':
            try:
                response_headers = dict([(item[0].title(), item[1])
                                         for item in response.info().items()])
            except:
                response_headers = response.headers
            try:
                response_code = str(response.code)
            except:
                response_code = str(response.status_code
                                    )  # object from CFScrape Requests object.
            try:
                cookie = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                cookie = cf
            except:
                pass
            if close is True: response.close()
            return (result, response_code, response_headers, headers, cookie)
        else:
            if close is True: response.close()
            return result

    except Exception as e:
        log_utils.log('Request-Error: (%s) => %s' % (str(e), url),
                      level=log_utils.LOGERROR)
        return None
Esempio n. 7
0
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'):
    try:
        handlers = []

        if not proxy == None:
            handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)


        if output == 'cookie' or output == 'extended' or not close == True:
            cookies = cookielib.LWPCookieJar()
            handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if (2, 7, 9) <= sys.version_info < (2, 7, 11):
            try:
                import ssl; ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = ssl.CERT_NONE
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                opener = urllib2.install_opener(opener)
            except:
                pass

        if url.startswith('//'): url = 'http:' + url

        try: headers.update(headers)
        except: headers = {}
        if 'User-Agent' in headers:
            pass
        elif not mobile == True:
            #headers['User-Agent'] = agent()
            headers['User-Agent'] = cache.get(randomagent, 1)
        else:
            headers['User-Agent'] = 'Apple-iPhone/701.341'
        if 'Referer' in headers:
            pass
        elif referer == None:
            headers['Referer'] = '%s://%s/' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
        else:
            headers['Referer'] = referer
        if not 'Accept-Language' in headers:
            headers['Accept-Language'] = 'en-US'
        if 'X-Requested-With' in headers:
            pass
        elif XHR == True:
            headers['X-Requested-With'] = 'XMLHttpRequest'
        if 'Cookie' in headers:
            pass
        elif not cookie == None:
            headers['Cookie'] = cookie
        if 'Accept-Encoding' in headers:
            pass
        elif compression and limit is None:
            headers['Accept-Encoding'] = 'gzip'


        if redirect == False:

            class NoRedirection(urllib2.HTTPErrorProcessor):
                def http_response(self, request, response): return response

            opener = urllib2.build_opener(NoRedirection)
            opener = urllib2.install_opener(opener)

            try: del headers['Referer']
            except: pass


        request = urllib2.Request(url, data=post, headers=headers)


        try:
            response = urllib2.urlopen(request, timeout=int(timeout))
        except urllib2.HTTPError as response:

            if response.code == 503:
                cf_result = response.read(5242880)
                try: encoding = response.info().getheader('Content-Encoding')
                except: encoding = None
                if encoding == 'gzip':
                    cf_result = gzip.GzipFile(fileobj=StringIO.StringIO(cf_result)).read()

                if 'cf-browser-verification' in cf_result:

                    netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)

                    ua = headers['User-Agent']

                    cf = cache.get(cfcookie().get, 168, netloc, ua, timeout)

                    headers['Cookie'] = cf

                    request = urllib2.Request(url, data=post, headers=headers)

                    response = urllib2.urlopen(request, timeout=int(timeout))

                elif error == False:
                    return

            elif error == False:
                return


        if output == 'cookie':
            try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
            except: pass
            try: result = cf
            except: pass
            if close == True: response.close()
            return result

        elif output == 'geturl':
            result = response.geturl()
            if close == True: response.close()
            return result

        elif output == 'headers':
            result = response.headers
            if close == True: response.close()
            return result

        elif output == 'chunk':
            try: content = int(response.headers['Content-Length'])
            except: content = (2049 * 1024)
            if content < (2048 * 1024): return
            result = response.read(16 * 1024)
            if close == True: response.close()
            return result


        if limit == '0':
            result = response.read(224 * 1024)
        elif not limit == None:
            result = response.read(int(limit) * 1024)
        else:
            result = response.read(5242880)

        try: encoding = response.info().getheader('Content-Encoding')
        except: encoding = None
        if encoding == 'gzip':
            result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()


        if 'sucuri_cloudproxy_js' in result:
            su = sucuri().get(result)

            headers['Cookie'] = su

            request = urllib2.Request(url, data=post, headers=headers)

            response = urllib2.urlopen(request, timeout=int(timeout))

            if limit == '0':
                result = response.read(224 * 1024)
            elif not limit == None:
                result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)

            try: encoding = response.info().getheader('Content-Encoding')
            except: encoding = None
            if encoding == 'gzip':
                result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()


        if output == 'extended':
            response_headers = response.headers
            response_code = str(response.code)
            try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
            except: pass
            try: cookie = cf
            except: pass
            if close == True: response.close()
            return (result, response_code, response_headers, headers, cookie)
        else:
            if close == True: response.close()
            return result
    except:
        return
def cfcookie(netloc, ua, timeout):
    try:
        headers = {'User-Agent': ua}

        request = urllib2.Request(netloc, headers=headers)

        try:
            response = urllib2.urlopen(request, timeout=int(timeout))
        except urllib2.HTTPError as response:
            result = response.read(5242880)

        jschl = re.findall('name="jschl_vc" value="(.+?)"/>', result)[0]

        init = re.findall('setTimeout\(function\(\){\s*.*?.*:(.*?)};',
                          result)[-1]

        builder = re.findall(r"challenge-form\'\);\s*(.*)a.v", result)[0]

        decryptVal = parseJSString(init)

        lines = builder.split(';')

        for line in lines:

            if len(line) > 0 and '=' in line:

                sections = line.split('=')
                line_val = parseJSString(sections[1])
                decryptVal = int(
                    eval(str(decryptVal) + sections[0][-1] + str(line_val)))

        answer = decryptVal + len(urlparse.urlparse(netloc).netloc)

        query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (
            netloc, jschl, answer)

        if 'type="hidden" name="pass"' in result:
            passval = re.findall('name="pass" value="(.*?)"', result)[0]
            query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (
                netloc, urllib.quote_plus(passval), jschl, answer)
            time.sleep(5)

        cookies = cookielib.LWPCookieJar()
        handlers = [
            urllib2.HTTPHandler(),
            urllib2.HTTPSHandler(),
            urllib2.HTTPCookieProcessor(cookies)
        ]
        opener = urllib2.build_opener(*handlers)
        opener = urllib2.install_opener(opener)

        try:
            request = urllib2.Request(query, headers=headers)
            response = urllib2.urlopen(request, timeout=int(timeout))
        except:
            pass

        cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])

        return cookie
    except:
        pass
Esempio n. 9
0
def fireFuzz(type, fuzz, url, params, header, delay, outputFile, proxy, prefix, postfix):
    """
        :Description: This function iterates through a list of fuzzing strings retrieved from the database, sends them to the target site and displays a progress bar of this process.

        :param type:  Type of the fuzzing strings to send [sql | xss].
        :type type: String
        
        :param fuzz:  Fuzzing strings
        :type fuzz: List
        
        :param url: Target URL
        :type url: String
        
        :param params: POST Parameter
        :type params: String
        
        :param header: Cookie header
        :type header: String
        
        :param delay: Delay between requests
        :type delay: Float
        
        :param outputFile:  Name of Output file
        :type outputFile: String
        
        :note: This function calls the showOutput() file with the saved outputs as argument.
        :todo: Add threads in order to send requests simultaneously.
		
    """
    print '''
    ___       ______________________   ______       ________        
    __ |     / /__    |__  ____/__  | / /__(_)____________(_)_____ _
    __ | /| / /__  /| |_  /_   __   |/ /__  /__  __ \____  /_  __ `/
    __ |/ |/ / _  ___ |  __/   _  /|  / _  / _  / / /___  / / /_/ / 
    ____/|__/  /_/  |_/_/      /_/ |_/  /_/  /_/ /_/___  /  \__,_/  
                                                    /___/           
                                                    
    WAFNinja - Penetration testers favorite for WAF Bypassing
    '''
    pbar = ProgressBar(widgets=[SimpleProgress(), ' Fuzz sent!    ', Percentage(), Bar()])
    if proxy is not '':
        httpProxy = urllib2.ProxyHandler({'http': proxy, 'https': proxy})
        ctx = ssl.create_default_context()
        ctx.check_hostname = False
        ctx.verify_mode = ssl.CERT_NONE
        opener = urllib2.build_opener(urllib2.HTTPSHandler(context=ctx),httpProxy)
        urllib2.install_opener(opener)
    else:
        opener = urllib2.build_opener()
    opener.addheaders = [('User-Agent', 'Mozilla/5.0')]
    for h in header:
        opener.addheaders.append(h)
    result = []
    
    for fuzz in pbar(fuzz):
        expected = fuzz[1]
        fuzz = prefix + fuzz[0] + postfix
        fuzz_enc = fuzz.encode('utf-8')
        try:
            sleep(float(delay))
            if params is None: # GET parameter
                randomString, url_with_fuzz = insertFuzz(url, fuzz_enc) 
                response = opener.open(url_with_fuzz)
            else: # POST parameter
                randomString, params_with_fuzz = setParams(params, fuzz_enc) 
                response = opener.open(url, urllib.urlencode(params_with_fuzz))
            content = response.read()
            occurence = content.find(randomString)+len(randomString) # get position of the randomString + length(randomString) to get to the fuzz
            result.append({
                'fuzz' : fuzz_enc, 
                'expected' : expected, 
                'httpCode' : response.getcode(), 
                'contentLength': len(content),  
                'output' : content[occurence:occurence+len(expected)]}) # take string from occurence to occurence+len(expected)
        except urllib2.HTTPError, error: # HTTP Status != 200
            if error.code == 404:
                print 'ERROR: Target URL not reachable!'
                sys.exit()
            else: # HTTP Status != 404
                result.append({
                    'fuzz' : fuzz_enc, 
                    'expected' : expected, 
                    'httpCode' : error.code, 
                    'contentLength': '-', 
                    'output' : '-'})
Esempio n. 10
0
def request(url,
            close=True,
            error=False,
            proxy=None,
            post=None,
            headers=None,
            mobile=False,
            safe=False,
            referer=None,
            cookie=None,
            output='',
            timeout='30'):
    try:
        handlers = []
        if not proxy == None:
            handlers += [
                urllib2.ProxyHandler({'http': '%s' % (proxy)}),
                urllib2.HTTPHandler
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)
        if output == 'cookie' or not close == True:
            import cookielib
            cookies = cookielib.LWPCookieJar()
            handlers += [
                urllib2.HTTPHandler(),
                urllib2.HTTPSHandler(),
                urllib2.HTTPCookieProcessor(cookies)
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)
        try:
            if sys.version_info < (2, 7, 9): raise Exception()
            import ssl
            ssl_context = ssl.create_default_context()
            ssl_context.check_hostname = False
            ssl_context.verify_mode = ssl.CERT_NONE
            handlers += [urllib2.HTTPSHandler(context=ssl_context)]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)
        except:
            pass

        try:
            headers.update(headers)
        except:
            headers = {}
        if 'User-Agent' in headers:
            pass
        elif not mobile == True:
            #headers['User-Agent'] = agent()
            headers['User-Agent'] = cache.get(randomagent, 1)
        else:
            headers['User-Agent'] = 'Apple-iPhone/701.341'
        if 'referer' in headers:
            pass
        elif referer == None:
            headers['referer'] = '%s://%s/' % (urlparse.urlparse(url).scheme,
                                               urlparse.urlparse(url).netloc)
        else:
            headers['referer'] = referer
        if not 'Accept-Language' in headers:
            headers['Accept-Language'] = 'en-US'
        if 'cookie' in headers:
            pass
        elif not cookie == None:
            headers['cookie'] = cookie

        request = urllib2.Request(url, data=post, headers=headers)

        try:
            response = urllib2.urlopen(request, timeout=int(timeout))
        except urllib2.HTTPError as response:
            if error == False: return

        if output == 'cookie':
            result = []
            for c in cookies:
                result.append('%s=%s' % (c.name, c.value))
            result = "; ".join(result)
        elif output == 'response':
            if safe == True:
                result = (str(response.code), response.read(224 * 1024))
            else:
                result = (str(response.code), response.read())
        elif output == 'chunk':
            content = int(response.headers['Content-Length'])
            if content < (2048 * 1024): return
            result = response.read(16 * 1024)
        elif output == 'geturl':
            result = response.geturl()
        else:
            if safe == True:
                result = response.read(224 * 1024)
            else:
                result = response.read()
        if close == True:
            response.close()

        return result
    except:
        return
def request(url,
            close=True,
            redirect=True,
            error=False,
            proxy=None,
            post=None,
            headers=None,
            mobile=False,
            limit=None,
            referer=None,
            cookie=None,
            output='',
            timeout='30'):
    try:
        handlers = []

        if not proxy == None:
            handlers += [
                urllib2.ProxyHandler({'http': '%s' % (proxy)}),
                urllib2.HTTPHandler
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if output == 'cookie' or output == 'extended' or not close == True:
            cookies = cookielib.LWPCookieJar()
            handlers += [
                urllib2.HTTPHandler(),
                urllib2.HTTPSHandler(),
                urllib2.HTTPCookieProcessor(cookies)
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        try:
            if sys.version_info < (2, 7, 9):
                raise Exception()
            import ssl
            ssl_context = ssl.create_default_context()
            ssl_context.check_hostname = False
            ssl_context.verify_mode = ssl.CERT_NONE
            handlers += [urllib2.HTTPSHandler(context=ssl_context)]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        except:
            pass

        try:
            headers.update(headers)
        except:
            headers = {}
        if 'User-Agent' in headers:
            pass
        elif not mobile == True:
            #headers['User-Agent'] = agent()
            headers['User-Agent'] = cache.get(randomagent, 1)
        else:
            headers['User-Agent'] = 'Apple-iPhone/701.341'
        if 'Referer' in headers:
            pass
        elif referer == None:
            headers['Referer'] = '%s://%s/' % (urlparse.urlparse(url).scheme,
                                               urlparse.urlparse(url).netloc)
        else:
            headers['Referer'] = referer
        if not 'Accept-Language' in headers:
            headers['Accept-Language'] = 'en-US'
        if 'Cookie' in headers:
            pass
        elif not cookie == None:
            headers['Cookie'] = cookie

        if redirect == False:

            class NoRedirection(urllib2.HTTPErrorProcessor):
                def http_response(self, request, response):
                    return response

            opener = urllib2.build_opener(NoRedirection)
            opener = urllib2.install_opener(opener)

            try:
                del headers['Referer']
            except:
                pass

        request = urllib2.Request(url, data=post, headers=headers)

        try:
            response = urllib2.urlopen(request, timeout=int(timeout))

        except urllib2.HTTPError as response:

            if response.code == 503:

                if 'cf-browser-verification' in response.read(5242880):

                    netloc = '%s://%s' % (urlparse.urlparse(url).scheme,
                                          urlparse.urlparse(url).netloc)

                    cf = cache.get(cfcookie, 168, netloc,
                                   headers['User-Agent'], timeout)

                    headers['Cookie'] = cf

                    request = urllib2.Request(url, data=post, headers=headers)

                    response = urllib2.urlopen(request, timeout=int(timeout))

                elif error == False:
                    return

            elif error == False:
                return

        if output == 'cookie':
            try:
                result = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                result = cf
            except:
                pass

        elif output == 'response':
            if limit == '0':
                result = (str(response.code), response.read(224 * 1024))
            elif not limit == None:
                result = (str(response.code), response.read(int(limit) * 1024))
            else:
                result = (str(response.code), response.read(5242880))

        elif output == 'chunk':
            try:
                content = int(response.headers['Content-Length'])
            except:
                content = (2049 * 1024)
            if content < (2048 * 1024): return
            result = response.read(16 * 1024)

        elif output == 'extended':
            try:
                cookie = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                cookie = cf
            except:
                pass
            content = response.headers
            result = response.read(5242880)
            return (result, headers, content, cookie)

        elif output == 'geturl':
            result = response.geturl()

        elif output == 'headers':
            content = response.headers
            return content

        else:
            if limit == '0':
                result = response.read(224 * 1024)
            elif not limit == None:
                result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)

        if close == True:
            response.close()

        return result

    except:
        return
def request(url,
            close=True,
            redirect=True,
            error=False,
            verify=True,
            proxy=None,
            post=None,
            headers=None,
            mobile=False,
            XHR=False,
            limit=None,
            referer=None,
            cookie=None,
            compression=True,
            output='',
            timeout='30'):
    try:
        if not url:
            return
        handlers = []
        if proxy is not None:
            handlers += [
                urllib2.ProxyHandler({'http': '%s' % (proxy)}),
                urllib2.HTTPHandler
            ]
            opener = urllib2.build_opener(*handlers)
            urllib2.install_opener(opener)
        if output == 'cookie' or output == 'extended' or not close is True:
            cookies = cookielib.LWPCookieJar()
            handlers += [
                urllib2.HTTPHandler(),
                urllib2.HTTPSHandler(),
                urllib2.HTTPCookieProcessor(cookies)
            ]
            opener = urllib2.build_opener(*handlers)
            urllib2.install_opener(opener)
        try:
            import platform
            node = platform.node().lower()
            is_XBOX = platform.uname()[1] == 'XboxOne'
        except Exception:
            node = ''
            is_XBOX = False
        if verify is False and sys.version_info >= (2, 7, 12):
            try:
                import ssl
                ssl_context = ssl._create_unverified_context()
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                urllib2.install_opener(opener)
            except:
                pass
        if verify is True and ((2, 7, 8) < sys.version_info <
                               (2, 7, 12) or is_XBOX):
            try:
                import ssl
                ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = ssl.CERT_NONE
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                urllib2.install_opener(opener)
            except:
                pass
        if url.startswith('//'): url = 'http:' + url
        _headers = {}
        try:
            _headers.update(headers)
        except:
            pass
        if 'User-Agent' in _headers:
            pass
        elif mobile is True:
            _headers['User-Agent'] = cache.get(randommobileagent, 1)
        else:
            _headers['User-Agent'] = cache.get(randomagent, 1)
        if 'Referer' in _headers:
            pass
        elif referer is not None:
            _headers['Referer'] = referer
        if not 'Accept-Language' in _headers:
            _headers['Accept-Language'] = 'en-US'
        if 'X-Requested-With' in _headers:
            pass
        elif XHR is True:
            _headers['X-Requested-With'] = 'XMLHttpRequest'
        if 'Cookie' in _headers:
            pass
        elif cookie is not None:
            _headers['Cookie'] = cookie
        if 'Accept-Encoding' in _headers:
            pass
        elif compression and limit is None:
            _headers['Accept-Encoding'] = 'gzip'
        if redirect is False:

            class NoRedirectHandler(urllib2.HTTPRedirectHandler):
                def http_error_302(self, req, fp, code, msg, headers):
                    infourl = urllib.addinfourl(fp, headers,
                                                req.get_full_url())
                    infourl.status = code
                    infourl.code = code
                    return infourl

                http_error_300 = http_error_302
                http_error_301 = http_error_302
                http_error_303 = http_error_302
                http_error_307 = http_error_302

            opener = urllib2.build_opener(NoRedirectHandler())
            urllib2.install_opener(opener)
            try:
                del _headers['Referer']
            except:
                pass
        if isinstance(post, dict):
            post = utils.byteify(post)
            post = urllib.urlencode(post)
        url = utils.byteify(url)
        request = urllib2.Request(url, data=post)
        _add_request_header(request, _headers)
        try:
            response = urllib2.urlopen(request, timeout=int(timeout))
        except urllib2.HTTPError as response:
            if response.code == 503:
                cf_result = response.read()
                try:
                    encoding = response.info().getheader('Content-Encoding')
                except:
                    encoding = None
                if encoding == 'gzip':
                    cf_result = gzip.GzipFile(
                        fileobj=StringIO.StringIO(cf_result)).read()
                if 'cf-browser-verification' in cf_result:
                    while 'cf-browser-verification' in cf_result:
                        netloc = '%s://%s/' % (urlparse.urlparse(url).scheme,
                                               urlparse.urlparse(url).netloc)
                        ua = _headers['User-Agent']
                        cf = cache.get(cfcookie().get, 1, netloc, ua, timeout)
                        _headers['Cookie'] = cf
                        request = urllib2.Request(url, data=post)
                        _add_request_header(request, _headers)
                        try:
                            response = urllib2.urlopen(request,
                                                       timeout=int(timeout))
                            cf_result = 'Success'
                        except urllib2.HTTPError as response:
                            cache.remove(cfcookie().get, netloc, ua, timeout)
                            cf_result = response.read()
                else:
                    log_utils.log(
                        'Request-Error (%s): %s' % (str(response.code), url),
                        log_utils.LOGDEBUG)
                    if error is False:
                        return
            else:
                log_utils.log(
                    'Request-Error (%s): %s' % (str(response.code), url),
                    log_utils.LOGDEBUG)
                if error is False:
                    return
        if output == 'cookie':
            try:
                result = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                result = cf
            except:
                pass
            if close is True:
                response.close()
            return result
        elif output == 'geturl':
            result = response.geturl()
            if close is True: response.close()
            return result
        elif output == 'headers':
            result = response.headers
            if close is True: response.close()
            return result
        elif output == 'chunk':
            try:
                content = int(response.headers['Content-Length'])
            except:
                content = (2049 * 1024)
            if content < (2048 * 1024): return
            result = response.read(16 * 1024)
            if close is True: response.close()
            return result
        elif output == 'file_size':
            try:
                content = int(response.headers['Content-Length'])
            except:
                content = '0'
            response.close()
            return content
        if limit == '0':
            result = response.read(224 * 1024)
        elif limit is not None:
            result = response.read(int(limit) * 1024)
        else:
            result = response.read(5242880)
        try:
            encoding = response.info().getheader('Content-Encoding')
        except:
            encoding = None
        if encoding == 'gzip':
            result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
        if 'sucuri_cloudproxy_js' in result:
            su = sucuri().get(result)
            _headers['Cookie'] = su
            request = urllib2.Request(url, data=post)
            _add_request_header(request, _headers)
            response = urllib2.urlopen(request, timeout=int(timeout))
            if limit == '0':
                result = response.read(224 * 1024)
            elif limit is not None:
                result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)
            try:
                encoding = response.info().getheader('Content-Encoding')
            except:
                encoding = None
            if encoding == 'gzip':
                result = gzip.GzipFile(
                    fileobj=StringIO.StringIO(result)).read()
        if 'Blazingfast.io' in result and 'xhr.open' in result:
            netloc = '%s://%s' % (urlparse.urlparse(url).scheme,
                                  urlparse.urlparse(url).netloc)
            ua = _headers['User-Agent']
            _headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua,
                                           timeout)
            result = _basic_request(url,
                                    headers=_headers,
                                    post=post,
                                    timeout=timeout,
                                    limit=limit)
        if output == 'extended':
            try:
                response_headers = dict([(item[0].title(), item[1])
                                         for item in response.info().items()])
            except:
                response_headers = response.headers
            response_code = str(response.code)
            try:
                cookie = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except:
                pass
            try:
                cookie = cf
            except:
                pass
            if close is True: response.close()
            return (result, response_code, response_headers, _headers, cookie)
        else:
            if close is True: response.close()
            return result
    except Exception as e:
        log_utils.log('Request-Error: (%s) => %s' % (str(e), url),
                      log_utils.LOGDEBUG)
        return
Esempio n. 13
0
    def do_request(self, json_obj):
        headers = {
            'Content-Type': 'application/json-rpc',
            'User-Agent': 'python/zabbix_api'
        }

        if self.httpuser:
            self.debug(logging.INFO, "HTTP Auth enabled")
            auth = 'Basic ' + string.strip(
                base64.encodestring(self.httpuser + ':' + self.httppasswd))
            headers['Authorization'] = auth
        self.r_query.append(str(json_obj))
        self.debug(logging.INFO, "Sending: " + str(json_obj))
        self.debug(logging.DEBUG, "Sending headers: " + str(headers))

        request = urllib2.Request(url=self.url,
                                  data=json_obj.encode('utf-8'),
                                  headers=headers)
        if self.proto == "https":
            https_handler = urllib2.HTTPSHandler(debuglevel=0)
            opener = urllib2.build_opener(https_handler)
        elif self.proto == "http":
            http_handler = urllib2.HTTPHandler(debuglevel=0)
            opener = urllib2.build_opener(http_handler)
        else:
            raise ZabbixAPIException("Unknow protocol %s" % self.proto)

        urllib2.install_opener(opener)
        try:
            response = opener.open(request, timeout=self.timeout)
        except Exception as e:
            raise ZabbixAPIException(
                "Site needs HTTP authentication. Error: " + str(e))
        self.debug(logging.INFO, "Response Code: " + str(response.code))

        # NOTE: Getting a 412 response code means the headers are not in the
        # list of allowed headers.
        if response.code != 200:
            raise ZabbixAPIException("HTTP ERROR %s: %s" %
                                     (response.status, response.reason))
        reads = response.read()
        if len(reads) == 0:
            raise ZabbixAPIException("Received zero answer")
        try:
            jobj = json.loads(reads.decode('utf-8'))
        except ValueError as msg:
            print("unable to decode. returned string: %s" % reads)
            sys.exit(-1)
        self.debug(logging.DEBUG, "Response Body: " + str(jobj))

        self.id += 1

        if 'error' in jobj:  # some exception
            msg = "Error %s: %s, %s while sending %s" % (
                jobj['error']['code'], jobj['error']['message'],
                jobj['error']['data'], str(json_obj))
            if re.search(".*already\sexists.*", jobj["error"]["data"],
                         re.I):  # already exists
                raise Already_Exists(msg, jobj['error']['code'])
            else:
                raise ZabbixAPIException(msg, jobj['error']['code'])
        return jobj
Esempio n. 14
0
    def getURLRequestData(self, params = {}, post_data = None):
        
        def urlOpen(req, customOpeners, timeout):
            if len(customOpeners) > 0:
                opener = urllib2.build_opener( *customOpeners )
                if timeout != None:
                    response = opener.open(req, timeout=timeout)
                else:
                    response = opener.open(req)
            else:
                if timeout != None:
                    response = urllib2.urlopen(req, timeout=timeout)
                else:
                    response = urllib2.urlopen(req)
            return response
        
        if IsMainThread():
            msg1 = _('It is not allowed to call getURLRequestData from main thread.')
            msg2 = _('You should never perform block I/O operations in the __init__.')
            msg3 = _('In next release exception will be thrown instead of this message!')
            GetIPTVNotify().push('%s\n\n%s\n\n%s' % (msg1, msg2, msg3), 'error', 40)
        
        if not self.useMozillaCookieJar:
            cj = cookielib.LWPCookieJar()
        else:
            cj = cookielib.MozillaCookieJar()
        response = None
        req      = None
        out_data = None
        opener   = None
        metadata = None
        
        timeout = params.get('timeout', None)
        
        if 'host' in params:
            host = params['host']
        else:
            host = self.HOST

        if 'header' in params:
            headers = params['header']
        elif None != self.HEADER:
            headers = self.HEADER
        else:
            headers = { 'User-Agent' : host }
            
        if 'User-Agent' not in headers:
            headers['User-Agent'] = host
        
        metadata = {}

        printDBG('pCommon - getURLRequestData() -> params: ' + str(params))
        printDBG('pCommon - getURLRequestData() -> headers: ' + str(headers)) 

        customOpeners = []
        #cookie support
        if 'use_cookie' not in params and 'cookiefile' in params and ('load_cookie' in params or 'save_cookie' in params):
            params['use_cookie'] = True 
        
        if params.get('use_cookie', False):
            if params.get('load_cookie', False):
                try:
                    cj.load(params['cookiefile'], ignore_discard = True)
                except IOError:
                    printDBG('Cookie file [%s] not exists' % params['cookiefile'])
                except Exception:
                    printExc()
            try:
                for cookieKey in params.get('cookie_items', {}).keys():
                    printDBG("cookie_item[%s=%s]" % (cookieKey, params['cookie_items'][cookieKey]))
                    cookieItem = cookielib.Cookie(version=0, name=cookieKey, value=params['cookie_items'][cookieKey], port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False)
                    cj.set_cookie(cookieItem)
            except Exception:
                printExc()
            customOpeners.append( urllib2.HTTPCookieProcessor(cj) )
            
        if params.get('no_redirection', False):
            customOpeners.append( NoRedirection() )
        
        # debug 
        #customOpeners.append(urllib2.HTTPSHandler(debuglevel=1))
        #customOpeners.append(urllib2.HTTPHandler(debuglevel=1))
        if not IsHttpsCertValidationEnabled():
            try:
                if params.get('ssl_protocol', None) != None:
                    ctx = ssl._create_unverified_context(params['ssl_protocol'])
                else:
                    ctx = ssl._create_unverified_context()
                customOpeners.append(urllib2.HTTPSHandler(context=ctx))
            except Exception: pass
        elif params.get('ssl_protocol', None) != None:
            ctx = ssl.SSLContext(params['ssl_protocol'])
            customOpeners.append(urllib2.HTTPSHandler(context=ctx))
        
        #proxy support
        if self.useProxy:
            http_proxy = self.proxyURL
        else:
            http_proxy = ''
        #proxy from parameters (if available) overwrite default one
        if 'http_proxy' in params:
            http_proxy = params['http_proxy']
        if '' != http_proxy:
            printDBG('getURLRequestData USE PROXY')
            customOpeners.append( urllib2.ProxyHandler({"http":http_proxy}) )
            customOpeners.append( urllib2.ProxyHandler({"https":http_proxy}) )
        
        pageUrl = params['url']
        proxy_gateway = params.get('proxy_gateway', '')
        if proxy_gateway != '':
            pageUrl = proxy_gateway.format(urllib.quote_plus(pageUrl, ''))
        printDBG("pageUrl: [%s]" % pageUrl)

        if None != post_data:
            printDBG('pCommon - getURLRequestData() -> post data: ' + str(post_data))
            if params.get('raw_post_data', False):
                dataPost = post_data
            elif params.get('multipart_post_data', False):
                customOpeners.append( MultipartPostHandler() )
                dataPost = post_data
            else:
                dataPost = urllib.urlencode(post_data)
            req = urllib2.Request(pageUrl, dataPost, headers)
        else:
            req = urllib2.Request(pageUrl, None, headers)

        if not params.get('return_data', False):
            out_data = urlOpen(req, customOpeners, timeout)
        else:
            gzip_encoding = False
            try:
                response = urlOpen(req, customOpeners, timeout)
                if response.info().get('Content-Encoding') == 'gzip':
                    gzip_encoding = True
                try: 
                    metadata['url'] = response.geturl()
                    metadata['status_code'] = response.getcode()
                    if 'Content-Type' in response.info(): metadata['content-type'] = response.info()['Content-Type']
                except Exception: pass
                
                data = response.read()
                response.close()
            except urllib2.HTTPError, e:
                ignoreCodeRanges = params.get('ignore_http_code_ranges', [(404, 404), (500, 500)])
                ignoreCode = False
                metadata['status_code'] = e.code
                for ignoreCodeRange in ignoreCodeRanges:
                    if e.code >= ignoreCodeRange[0] and e.code <= ignoreCodeRange[1]:
                        ignoreCode = True
                        break
                
                if ignoreCode:
                    printDBG('!!!!!!!! %s: getURLRequestData - handled' % e.code)
                    if e.fp.info().get('Content-Encoding', '') == 'gzip':
                        gzip_encoding = True
                    try: 
                        metadata['url'] = e.fp.geturl()
                        if 'Content-Type' in e.fp.info(): metadata['content-type'] = e.fp.info()['Content-Type']
                    except Exception: pass
                    data = e.fp.read()
                    #e.msg
                    #e.headers
                elif e.code == 503:
                    if params.get('use_cookie', False):
                        new_cookie = e.fp.info().get('Set-Cookie', '')
                        printDBG("> new_cookie[%s]" % new_cookie)
                        cj.save(params['cookiefile'], ignore_discard = True)
                    raise e
                else:
                    if e.code in [300, 302, 303, 307] and params.get('use_cookie', False) and params.get('save_cookie', False):
                        new_cookie = e.fp.info().get('Set-Cookie', '')
                        printDBG("> new_cookie[%s]" % new_cookie)
                        #for cookieKey in params.get('cookie_items', {}).keys():
                        #    cj.clear('', '/', cookieKey)
                        cj.save(params['cookiefile'], ignore_discard = True)
                    raise e
            try:
                if gzip_encoding:
                    printDBG('Content-Encoding == gzip')
                    buf = StringIO(data)
                    f = gzip.GzipFile(fileobj=buf)
                    out_data = f.read()
                else:
                    out_data = data
            except Exception as e:
                printExc()
                msg1 = _("Critical Error – Content-Encoding gzip cannot be handled!")
                msg2 = _("Last error:\n%s" % str(e))
                GetIPTVNotify().push('%s\n\n%s' % (msg1, msg2), 'error', 20)
                out_data = data
Esempio n. 15
0
    def build_config(self):
        """Build the config as a Plugins object and return.

        """
        config = monasca_setup.agent_config.Plugins()
        # First watch the process
        config.merge(monasca_setup.detection.watch_process(['apache2']))
        log.info("\tWatching the apache webserver process.")

        error_msg = '\n\t*** The Apache plugin is not configured ***\n\tPlease correct and re-run monasca-setup.'
        # Attempt login, requires either an empty root password from localhost
        # or relying on a configured /root/.apache.cnf
        if self.dependencies_installed():
            log.info("\tAttempting to use client credentials from {:s}".format(
                apache_conf))
            # Read the apache config file to extract the needed variables.
            client_section = False
            apache_url = None
            apache_user = None
            apache_pass = None
            try:
                with open(apache_conf, "r") as confFile:
                    for row in confFile:
                        if "[client]" in row:
                            client_section = True
                            pass
                        if client_section:
                            if "url=" in row:
                                apache_url = row.split("=")[1].strip()
                            if "user="******"=")[1].strip()
                            if "password="******"=")[1].strip()
            except IOError:
                log.info("\tUnable to read {:s}".format(apache_conf))
                log.info("\tWill try to setup Apache plugin using defaults.")

            if not apache_url:
                apache_url = DEFAULT_APACHE_URL

            opener = None
            if apache_user and apache_pass:
                password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
                password_mgr.add_password(None, apache_url, apache_user,
                                          apache_pass)
                handler = urllib2.HTTPBasicAuthHandler(password_mgr)
            else:
                if 'https' in apache_url:
                    handler = urllib2.HTTPSHandler()
                else:
                    handler = urllib2.HTTPHandler()

            opener = urllib2.build_opener(handler)

            response = None
            try:
                request = opener.open(apache_url)
                response = request.read()
                request.close()
                if 'Total Accesses:' in response:
                    instance_vars = {'apache_status_url': apache_url}
                    if apache_user and apache_pass:
                        instance_vars.update({
                            'apache_user': apache_user,
                            'apache_password': apache_pass
                        })
                    config['apache'] = {
                        'init_config': None,
                        'instances': [instance_vars]
                    }
                    log.info("\tSuccessfully setup Apache plugin.")
                else:
                    log.warn('Unable to access the Apache server-status URL;' +
                             error_msg)
            except urllib2.URLError, e:
                log.error(
                    '\tError {0} received when accessing url {1}.'.format(
                        e.reason, apache_url) +
                    '\n\tPlease ensure the Apache web server is running and your configuration '
                    + 'information in /root/.apache.cnf is correct.' +
                    error_msg)
            except urllib2.HTTPError, e:
                log.error('\tError code {0} received when accessing {1}'.
                          format(e.code, apache_url) + error_msg)
Esempio n. 16
0
def _request(method, url,
             params=None,
             data=None,
             headers=None,
             cookies=None,
             files=None,
             auth=None,
             timeout=None,
             allow_redirects=True,
             proxies=None,
             hooks=None,
             stream=None,
             verify=None,
             cert=None,
             json=None):
    if not headers:
        headers = {}
        pass

    url = urllib.quote(url, safe="%/:=&?~#+!$,;'@()*[]")

    handlers = []

    import sys
    # starting with python 2.7.9 urllib verifies every https request
    if False == verify and sys.version_info >= (2, 7, 9):
        import ssl

        ssl_context = ssl.create_default_context()
        ssl_context.check_hostname = False
        ssl_context.verify_mode = ssl.CERT_NONE
        handlers.append(urllib2.HTTPSHandler(context=ssl_context))
        pass

    # handlers.append(urllib2.HTTPCookieProcessor())
    # handlers.append(ErrorHandler)
    if not allow_redirects:
        handlers.append(NoRedirectHandler)
        pass
    opener = urllib2.build_opener(*handlers)

    query = ''
    if params:
        for key in params:
            value = params[key]
            if isinstance(value, str):
                value = value.decode('utf-8')
                pass
            params[key] = value.encode('utf-8')
            pass
        query = urllib.urlencode(params)
        pass
    if query:
        url += '?%s' % query
        pass
    request = urllib2.Request(url)
    if headers:
        for key in headers:
            request.add_header(key, str(unicode(headers[key]).encode('utf-8')))
            pass
        pass
    if data or json:
        if headers.get('Content-Type', '').startswith('application/x-www-form-urlencoded') or data:
            # transform a string into a map of values
            if isinstance(data, basestring):
                _data = data.split('&')
                data = {}
                for item in _data:
                    name, value = item.split('=')
                    data[name] = value
                    pass
                pass

            # encode each value
            for key in data:
                data[key] = to_utf8(data[key])
                pass

            # urlencode
            request.data = urllib.urlencode(data)
            pass
        elif headers.get('Content-Type', '').startswith('application/json') and data:
            request.data = real_json.dumps(data).encode('utf-8')
            pass
        elif json:
            request.data = real_json.dumps(json).encode('utf-8')
            pass
        else:
            if not isinstance(data, basestring):
                data = str(data)
                pass

            if isinstance(data, str):
                data = data.encode('utf-8')
                pass
            request.data = data
            pass
        pass
    elif method.upper() in ['POST', 'PUT']:
        request.data = "null"
        pass
    request.get_method = lambda: method
    result = Response()
    response = None
    try:
        response = opener.open(request)
    except urllib2.HTTPError, e:
        # HTTPError implements addinfourl, so we can use the exception to construct a response
        if isinstance(e, urllib2.addinfourl):
            response = e
            pass
        pass
Esempio n. 17
0
    def send(self,
             url,
             method='GET',
             payload=None,
             headers=None,
             cookiejar=None,
             auth=None,
             content=''):
        '''Makes a web request and returns a response object.'''
        if method.upper() != 'POST' and content:
            raise RequestException(
                'Invalid content type for the %s method: %s' %
                (method, content))
        # prime local mutable variables to prevent persistence
        if payload is None: payload = {}
        if headers is None: headers = {}
        if auth is None: auth = ()

        # set request arguments
        # process user-agent header
        headers['User-Agent'] = self.user_agent
        # process payload
        if content.upper() == 'JSON':
            headers['Content-Type'] = 'application/json'
            payload = json.dumps(payload)
        else:
            payload = urllib.urlencode(payload)
        # process basic authentication
        if len(auth) == 2:
            authorization = ('%s:%s' %
                             (auth[0], auth[1])).encode('base64').replace(
                                 '\n', '')
            headers['Authorization'] = 'Basic %s' % (authorization)
        # process socket timeout
        if self.timeout:
            socket.setdefaulttimeout(self.timeout)

        # set handlers
        # declare handlers list according to debug setting
        handlers = [
            urllib2.HTTPHandler(debuglevel=1),
            urllib2.HTTPSHandler(debuglevel=1)
        ] if self.debug else []
        # process cookiejar handler
        if cookiejar != None:
            handlers.append(urllib2.HTTPCookieProcessor(cookiejar))
        # process redirect and add handler
        if self.redirect == False:
            handlers.append(NoRedirectHandler)
        # process proxies and add handler
        if self.proxy:
            proxies = {'http': self.proxy, 'https': self.proxy}
            handlers.append(urllib2.ProxyHandler(proxies))

        # install opener
        opener = urllib2.build_opener(*handlers)
        urllib2.install_opener(opener)

        # process method and make request
        if method == 'GET':
            if payload: url = '%s?%s' % (url, payload)
            req = urllib2.Request(url, headers=headers)
        elif method == 'POST':
            req = urllib2.Request(url, data=payload, headers=headers)
        elif method == 'HEAD':
            if payload: url = '%s?%s' % (url, payload)
            req = urllib2.Request(url, headers=headers)
            req.get_method = lambda: 'HEAD'
        else:
            raise RequestException(
                'Request method \'%s\' is not a supported method.' % (method))
        try:
            resp = urllib2.urlopen(req)
        except urllib2.HTTPError as e:
            resp = e

        # build and return response object
        return ResponseObject(resp, cookiejar)
Esempio n. 18
0
 def __init__(self):
     # Intialize urllib for camera verification
     handler = urllib2.HTTPSHandler(debuglevel=1)
     opener = urllib2.build_opener(handler)
     urllib2.install_opener(opener)
Esempio n. 19
0
    def get_cookie(self, netloc, ua, timeout):
        try:
            headers = {'User-Agent': ua}
            request = urllib2.Request(netloc)
            _add_request_header(request, headers)

            try:
                response = urllib2.urlopen(request, timeout=int(timeout))
            except HTTPError as response:
                result = response.read(5242880)
                try:
                    encoding = response.info().getheader('Content-Encoding')
                except:
                    encoding = None
                if encoding == 'gzip':
                    result = gzip.GzipFile(fileobj=StringIO(result)).read()

            jschl = re.findall(
                r'name\s*=\s*["\']jschl_vc["\']\s*value\s*=\s*["\'](.+?)["\']/>',
                result, re.I)[0]
            init = re.findall(r'setTimeout\(function\(\){\s*.*?.*:(.*?)};',
                              result, re.I)[-1]
            builder = re.findall(r"challenge-form\'\);\s*(.*)a.v", result,
                                 re.I)[0]
            decryptVal = self.parseJSString(init)
            lines = builder.split(';')

            for line in lines:
                if len(line) > 0 and '=' in line:
                    sections = line.split('=')
                    line_val = self.parseJSString(sections[1])
                    decryptVal = int(
                        eval(
                            str(decryptVal) + sections[0][-1] + str(line_val)))

            answer = decryptVal + len(urlparse(netloc).netloc)
            query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (
                netloc, jschl, answer)

            if 'type="hidden" name="pass"' in result:
                passval = re.findall(
                    r'name\s*=\s*["\']pass["\']\s*value\s*=\s*["\'](.*?)["\']',
                    result, re.I)[0]
                query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (
                    netloc, quote_plus(passval), jschl, answer)
                time.sleep(6)

            cookies = cookielib.LWPCookieJar()
            handlers = [
                urllib2.HTTPHandler(),
                urllib2.HTTPSHandler(),
                urllib2.HTTPCookieProcessor(cookies)
            ]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

            try:
                request = urllib2.Request(query)
                _add_request_header(request, headers)
                response = urllib2.urlopen(request, timeout=int(timeout))
            except:
                pass

            cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
            if 'cf_clearance' in cookie: self.cookie = cookie
        except:
            log_utils.error()
Esempio n. 20
0
def request(url,
            close=True,
            redirect=True,
            error=False,
            verify=True,
            proxy=None,
            post=None,
            headers=None,
            mobile=False,
            XHR=False,
            limit=None,
            referer=None,
            cookie=None,
            compression=False,
            output='',
            timeout='30',
            username=None,
            password=None,
            as_bytes=False):
    """
    Re-adapted from Twilight0's tulip module => https://github.com/Twilight0/script.module.tulip
    post needs fixing
    """

    try:
        url = six.ensure_text(url, errors='ignore')
    except Exception:
        pass

    if isinstance(post, dict):
        post = bytes(urlencode(post), encoding='utf-8')
    elif isinstance(post, str):
        post = bytes(post, encoding='utf-8')

    try:
        handlers = []

        if username is not None and password is not None and not proxy:

            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
            passmgr.add_password(None, uri=url, user=username, passwd=password)
            handlers += [urllib2.HTTPBasicAuthHandler(passmgr)]
            opener = urllib2.build_opener(*handlers)
            urllib2.install_opener(opener)

        if proxy is not None:

            if username is not None and password is not None:

                if six.PY2:
                    passmgr = urllib2.ProxyBasicAuthHandler()
                else:
                    passmgr = urllib2.HTTPPasswordMgr()

                passmgr.add_password(None,
                                     uri=url,
                                     user=username,
                                     passwd=password)

                handlers += [
                    urllib2.ProxyHandler({'http': '{0}'.format(proxy)}),
                    urllib2.HTTPHandler,
                    urllib2.ProxyBasicAuthHandler(passmgr)
                ]
            else:
                handlers += [
                    urllib2.ProxyHandler({'http': '{0}'.format(proxy)}),
                    urllib2.HTTPHandler
                ]
            opener = urllib2.build_opener(*handlers)
            urllib2.install_opener(opener)

        if output == 'cookie' or output == 'extended' or close is not True:

            cookies = cookielib.LWPCookieJar()
            handlers += [
                urllib2.HTTPHandler(),
                urllib2.HTTPSHandler(),
                urllib2.HTTPCookieProcessor(cookies)
            ]

            opener = urllib2.build_opener(*handlers)
            urllib2.install_opener(opener)

        try:
            import platform
            is_XBOX = platform.uname()[1] == 'XboxOne'
        except Exception:
            is_XBOX = False

        if not verify and sys.version_info >= (2, 7, 12):

            try:

                import ssl
                ssl_context = ssl._create_unverified_context()
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                urllib2.install_opener(opener)

            except Exception:

                pass

        elif verify and ((2, 7, 8) < sys.version_info < (2, 7, 12) or is_XBOX):

            try:

                import ssl
                try:
                    import _ssl
                    CERT_NONE = _ssl.CERT_NONE
                except Exception:
                    CERT_NONE = ssl.CERT_NONE
                ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = CERT_NONE
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                urllib2.install_opener(opener)

            except Exception:

                pass

        try:
            headers.update(headers)
        except Exception:
            headers = {}

        if 'User-Agent' in headers:
            pass
        elif mobile is not True:
            #headers['User-Agent'] = agent()
            headers['User-Agent'] = cache.get(randomagent, 12)
        else:
            headers['User-Agent'] = cache.get(randommobileagent, 12)

        if 'Referer' in headers:
            pass
        elif referer is None:
            headers['Referer'] = '%s://%s/' % (urlparse(url).scheme,
                                               urlparse(url).netloc)
        else:
            headers['Referer'] = referer

        if not 'Accept-Language' in headers:
            headers['Accept-Language'] = 'en-US'

        if 'X-Requested-With' in headers:
            pass
        elif XHR is True:
            headers['X-Requested-With'] = 'XMLHttpRequest'

        if 'Cookie' in headers:
            pass
        elif cookie is not None:
            headers['Cookie'] = cookie

        if 'Accept-Encoding' in headers:
            pass
        elif compression and limit is None:
            headers['Accept-Encoding'] = 'gzip'

        if redirect is False:

            class NoRedirectHandler(urllib2.HTTPRedirectHandler):
                def http_error_302(self, reqst, fp, code, msg, head):

                    infourl = addinfourl(fp, head, reqst.get_full_url())
                    infourl.status = code
                    infourl.code = code

                    return infourl

                http_error_300 = http_error_302
                http_error_301 = http_error_302
                http_error_303 = http_error_302
                http_error_307 = http_error_302

            opener = urllib2.build_opener(NoRedirectHandler())
            urllib2.install_opener(opener)

            try:
                del headers['Referer']
            except Exception:
                pass

        req = urllib2.Request(url, data=post, headers=headers)

        try:

            response = urllib2.urlopen(req, timeout=int(timeout))

        except HTTPError as resp:

            response = resp
            if response.code == 503:

                if 'cf-browser-verification' in response.read(5242880):
                    from resources.lib.modules import cfscrape

                    netloc = '{0}://{1}'.format(
                        urlparse(url).scheme,
                        urlparse(url).netloc)

                    ua = headers['User-Agent']

                    #cf = cache.get(Cfcookie.get, 168, netloc, ua, timeout)
                    try:
                        cf = cache.get(cfscrape.get_cookie_string, 1, netloc,
                                       ua)[0]
                    except BaseException:
                        try:
                            cf = cfscrape.get_cookie_string(url, ua)[0]
                        except BaseException:
                            cf = None
                    finally:
                        headers['Cookie'] = cf

                    req = urllib2.Request(url, data=post, headers=headers)

                    response = urllib2.urlopen(req, timeout=int(timeout))

                elif error is False:
                    return

            elif error is False:
                return

        if output == 'cookie':

            try:
                result = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except Exception:
                pass

            try:
                result = cf
            except Exception:
                pass

        elif output == 'response':

            if limit == '0':
                result = (str(response.code), response.read(224 * 1024))
            elif limit is not None:
                result = (str(response.code), response.read(int(limit) * 1024))
            else:
                result = (str(response.code), response.read(5242880))

        elif output == 'chunk':

            try:
                content = int(response.headers['Content-Length'])
            except Exception:
                content = (2049 * 1024)

            if content < (2048 * 1024):
                return
            result = response.read(16 * 1024)

        elif output == 'extended':

            try:
                cookie = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except Exception:
                pass

            try:
                cookie = cf
            except Exception:
                pass

            content = response.headers
            response_code = str(response.code)
            result = response.read(5242880)

            if not as_bytes:

                result = six.ensure_text(result, errors='ignore')

            return result, response_code, headers, content, cookie

        elif output == 'geturl':

            result = response.geturl()

        elif output == 'headers':

            content = response.headers

            if close:
                response.close()

            return content

        elif output == 'file_size':

            try:
                content = int(response.headers['Content-Length'])
            except Exception:
                content = '0'

            response.close()

            return content

        elif output == 'json':

            content = json.loads(response.read(5242880))

            response.close()

            return content

        else:

            if limit == '0':
                result = response.read(224 * 1024)
            elif limit is not None:
                if isinstance(limit, int):
                    result = response.read(limit * 1024)
                else:
                    result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)

        if close is True:
            response.close()

        if not as_bytes:

            result = six.ensure_text(result, errors='ignore')

        return result

    except:

        log_utils.log('Client request failed on url: ' + url + ' | Reason', 1)

        return
Esempio n. 21
0
            def redirect_request(self, req, fp, code, msg, headers, newurl):
                newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
                newreq.get_method = lambda: req.get_method()
                return newreq
        exported_proxies = export_proxies(d)

        handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
        if export_proxies:
            handlers.append(urllib2.ProxyHandler())
        handlers.append(CacheHTTPHandler())
        # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
        # see PEP-0476, this causes verification errors on some https servers
        # so disable by default.
        import ssl
        if hasattr(ssl, '_create_unverified_context'):
            handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
        opener = urllib2.build_opener(*handlers)

        try:
            uri = ud.url.split(";")[0]
            r = urllib2.Request(uri)
            r.get_method = lambda: "HEAD"
            opener.open(r)
        except urllib2.URLError as e:
            # debug for now to avoid spamming the logs in e.g. remote sstate searches
            logger.debug(2, "checkstatus() urlopen failed: %s" % e)
            return False
        return True

    def _parse_path(self, regex, s):
        """
Esempio n. 22
0
           'keep-alive']'''
HeaderList = ["Server", "Via", "X-Powered-By"]
Pattern = re.compile('apache|JSP Engine|jetty|php|ssl', re.IGNORECASE)
Methods = ['post', 'get', 'put', 'trace']
Headers = {
    'User-Agent':
    'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36'
}

############# URLLIB2 #############################
# proxy = urllib2.ProxyHandler({'https': '127.0.0.1:8080'})
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
# opener = urllib2.build_opener(urllib2.HTTPSHandler(context=ctx))
opener = urllib2.build_opener(urllib2.HTTPSHandler(context=ctx))
opener.addheaders = [(
    'User-Agent',
    'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36'
), ('Accept-Encoding', 'gzip, deflat')]
httplib.HTTPSConnection._http_vsn = 10
############# URLLIB2 #############################


def ChangeMethod(Link):
    Status = StatusCode = ""
    try:
        if "http" not in Link:
            Link = "https://" + Link + "/"
        for Method in Methods:
            Request = getattr(requests, Method)
Esempio n. 23
0
import urllib
import json
import cookielib
import urllib2
from copy import deepcopy
from time import time

cookies = cookielib.LWPCookieJar()
handlers = [
    urllib2.HTTPHandler(),
    urllib2.HTTPSHandler(),
    urllib2.HTTPCookieProcessor(cookies)
]
opener = urllib2.build_opener(*handlers)


def fetch(uri):
    req = urllib2.Request(uri)
    return opener.open(req)


def dump():
    for cookie in cookies:
        print cookie.name, cookie.value


isMax = {
    "aros": False,
    "beach": False,
    "downtown": False,
    "price": False,
Esempio n. 24
0
File: http.py Progetto: mpmoers/salt
def query(url,
          method='GET',
          params=None,
          data=None,
          data_file=None,
          header_dict=None,
          header_list=None,
          header_file=None,
          username=None,
          password=None,
          auth=None,
          decode=False,
          decode_type='auto',
          status=False,
          headers=False,
          text=False,
          cookies=None,
          cookie_jar=JARFILE,
          cookie_format='lwp',
          persist_session=False,
          session_cookie_jar=SESSIONJARFILE,
          data_render=False,
          data_renderer=None,
          header_render=False,
          header_renderer=None,
          template_dict=None,
          test=False,
          test_url=None,
          node='minion',
          port=80,
          opts=None,
          requests_lib=None,
          ca_bundle=None,
          verify_ssl=None,
          cert=None,
          text_out=None,
          headers_out=None,
          decode_out=None,
          stream=False,
          handle=False,
          agent=USERAGENT,
          **kwargs):
    '''
    Query a resource, and decode the return data
    '''
    ret = {}

    if opts is None:
        if node == 'master':
            opts = salt.config.master_config(
                os.path.join(syspaths.CONFIG_DIR, 'master'))
        elif node == 'minion':
            opts = salt.config.minion_config(
                os.path.join(syspaths.CONFIG_DIR, 'minion'))
        else:
            opts = {}

    if requests_lib is None:
        requests_lib = opts.get('requests_lib', False)

    if requests_lib is True:
        if HAS_REQUESTS is False:
            ret['error'] = ('http.query has been set to use requests, but the '
                            'requests library does not seem to be installed')
            log.error(ret['error'])
            return ret
    else:
        requests_log = logging.getLogger('requests')
        requests_log.setLevel(logging.WARNING)

    if ca_bundle is None:
        ca_bundle = get_ca_bundle(opts)

    if verify_ssl is None:
        verify_ssl = opts.get('verify_ssl', True)

    if cert is None:
        cert = opts.get('cert', None)

    if data_file is not None:
        data = _render(data_file, data_render, data_renderer, template_dict,
                       opts)

    log.debug('Using {0} Method'.format(method))
    if method == 'POST':
        log.trace('POST Data: {0}'.format(pprint.pformat(data)))

    if header_file is not None:
        header_tpl = _render(header_file, header_render, header_renderer,
                             template_dict, opts)
        if isinstance(header_tpl, dict):
            header_dict = header_tpl
        else:
            header_list = header_tpl.splitlines()

    if header_dict is None:
        header_dict = {}

    if header_list is None:
        header_list = []

    if persist_session is True and HAS_MSGPACK:
        # TODO: This is hackish; it will overwrite the session cookie jar with
        # all cookies from this one connection, rather than behaving like a
        # proper cookie jar. Unfortunately, since session cookies do not
        # contain expirations, they can't be stored in a proper cookie jar.
        if os.path.isfile(session_cookie_jar):
            with salt.utils.fopen(session_cookie_jar, 'r') as fh_:
                session_cookies = msgpack.load(fh_)
            if isinstance(session_cookies, dict):
                header_dict.update(session_cookies)
        else:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                msgpack.dump('', fh_)

    for header in header_list:
        comps = header.split(':')
        if len(comps) < 2:
            continue
        header_dict[comps[0].strip()] = comps[1].strip()

    if username and password:
        auth = (username, password)
    else:
        auth = None

    if requests_lib is True:
        sess = requests.Session()
        sess.auth = auth
        sess.headers.update(header_dict)
        log.trace('Request Headers: {0}'.format(sess.headers))
        sess_cookies = sess.cookies
        sess.verify = verify_ssl
    else:
        sess_cookies = None

    if cookies is not None:
        if cookie_format == 'mozilla':
            sess_cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(
                cookie_jar)
        else:
            sess_cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(
                cookie_jar)
        if not os.path.isfile(cookie_jar):
            sess_cookies.save()
        else:
            sess_cookies.load()

    if agent == USERAGENT:
        agent = '{0} http.query()'.format(agent)
    header_dict['User-agent'] = agent

    if test is True:
        if test_url is None:
            return {}
        else:
            url = test_url
            ret['test'] = True

    if requests_lib is True:
        req_kwargs = {}
        if stream is True:
            if requests.__version__[0] == '0':
                # 'stream' was called 'prefetch' before 1.0, with flipped meaning
                req_kwargs['prefetch'] = False
            else:
                req_kwargs['stream'] = True

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, six.string_types):
                if os.path.exists(cert):
                    req_kwargs['cert'] = cert
            elif isinstance(cert, tuple):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['cert'] = cert
            else:
                log.error(
                    'The client-side certificate path that was passed is '
                    'not valid: {0}'.format(cert))

        result = sess.request(method,
                              url,
                              params=params,
                              data=data,
                              **req_kwargs)
        result.raise_for_status()
        if stream is True or handle is True:
            return {'handle': result}

        result_status_code = result.status_code
        result_headers = result.headers
        result_text = result.text
        result_cookies = result.cookies
    else:
        request = urllib2.Request(url, data)
        handlers = [
            urllib2.HTTPHandler,
            urllib2.HTTPCookieProcessor(sess_cookies)
        ]

        if url.startswith('https') or port == 443:
            if not HAS_MATCHHOSTNAME:
                log.warn(
                    ('match_hostname() not available, SSL hostname checking '
                     'not available. THIS CONNECTION MAY NOT BE SECURE!'))
            elif verify_ssl is False:
                log.warn(('SSL certificate verification has been explicitly '
                          'disabled. THIS CONNECTION MAY NOT BE SECURE!'))
            else:
                hostname = request.get_host()
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.connect((hostname, 443))
                sockwrap = ssl.wrap_socket(sock,
                                           ca_certs=ca_bundle,
                                           cert_reqs=ssl.CERT_REQUIRED)
                try:
                    match_hostname(sockwrap.getpeercert(), hostname)
                except CertificateError as exc:
                    ret['error'] = ('The certificate was invalid. '
                                    'Error returned was: {0}'.format(
                                        pprint.pformat(exc)))
                    return ret

                # Client-side cert handling
                if cert is not None:
                    cert_chain = None
                    if isinstance(cert, six.string_types):
                        if os.path.exists(cert):
                            cert_chain = (cert)
                    elif isinstance(cert, tuple):
                        if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                            cert_chain = cert
                    else:
                        log.error('The client-side certificate path that was '
                                  'passed is not valid: {0}'.format(cert))
                        return
                    if hasattr(ssl, 'SSLContext'):
                        # Python >= 2.7.9
                        context = ssl.SSLContext.load_cert_chain(*cert_chain)
                        handlers.append(urllib2.HTTPSHandler(context=context))  # pylint: disable=E1123
                    else:
                        # Python < 2.7.9
                        cert_kwargs = {
                            'host': request.get_host(),
                            'port': port,
                            'cert_file': cert_chain[0]
                        }
                        if len(cert_chain) > 1:
                            cert_kwargs['key_file'] = cert_chain[1]
                        handlers[
                            0] = salt.ext.six.moves.http_client.HTTPSConnection(
                                **cert_kwargs)

        opener = urllib2.build_opener(*handlers)
        for header in header_dict:
            request.add_header(header, header_dict[header])
        request.get_method = lambda: method
        result = opener.open(request)
        if stream is True or handle is True:
            return {'handle': result}

        result_status_code = result.code
        result_headers = result.headers.headers
        result_text = result.read()

    if isinstance(result_headers, list):
        result_headers_dict = {}
        for header in result_headers:
            comps = header.split(':')
            result_headers_dict[comps[0].strip()] = ':'.join(comps[1:]).strip()
        result_headers = result_headers_dict

    log.debug('Response Status Code: {0}'.format(result_status_code))
    log.trace('Response Headers: {0}'.format(result_headers))
    log.trace('Response Cookies: {0}'.format(sess_cookies))
    try:
        log.trace('Response Text: {0}'.format(result_text))
    except UnicodeEncodeError as exc:
        log.trace(
            ('Cannot Trace Log Response Text: {0}. This may be due to '
             'incompatibilities between requests and logging.').format(exc))

    if text_out is not None and os.path.exists(text_out):
        with salt.utils.fopen(text_out, 'w') as tof:
            tof.write(result_text)

    if headers_out is not None and os.path.exists(headers_out):
        with salt.utils.fopen(headers_out, 'w') as hof:
            hof.write(result_headers)

    if cookies is not None:
        sess_cookies.save()

    if persist_session is True and HAS_MSGPACK:
        # TODO: See persist_session above
        if 'set-cookie' in result_headers:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                session_cookies = result_headers.get('set-cookie', None)
                if session_cookies is not None:
                    msgpack.dump({'Cookie': session_cookies}, fh_)
                else:
                    msgpack.dump('', fh_)

    if status is True:
        ret['status'] = result_status_code

    if headers is True:
        ret['headers'] = result_headers

    if decode is True:
        if decode_type == 'auto':
            content_type = result_headers.get('content-type',
                                              'application/json')
            if 'xml' in content_type:
                decode_type = 'xml'
            elif 'json' in content_type:
                decode_type = 'json'
            else:
                decode_type = 'plain'

        valid_decodes = ('json', 'xml', 'plain')
        if decode_type not in valid_decodes:
            ret['error'] = ('Invalid decode_type specified. '
                            'Valid decode types are: {0}'.format(
                                pprint.pformat(valid_decodes)))
            log.error(ret['error'])
            return ret

        if decode_type == 'json':
            ret['dict'] = json.loads(result_text)
        elif decode_type == 'xml':
            ret['dict'] = []
            items = ET.fromstring(result_text)
            for item in items:
                ret['dict'].append(xml.to_dict(item))
        else:
            text = True

        if decode_out and os.path.exists(decode_out):
            with salt.utils.fopen(decode_out, 'w') as dof:
                dof.write(result_text)

    if text is True:
        ret['text'] = result_text

    return ret
Esempio n. 25
0
File: c1.py Progetto: haropy/python
urllib2.install_opener(opener)

# 4.使用HTTP的PUT和DELETE方法

import urllib2

uri = ''
request = urllib2.Request(uri, data=data)
request.get_method = lambda: 'PUT'  # or 'DELETE'
response = urllib2.urlopen(request)

# 5.使用DebugLog
import urllib2

httpHandler = urllib2.HTTPHandler(debuglevel=1)
httpsHandler = urllib2.HTTPSHandler(debuglevel=1)
opener = urllib2.build_opener(httpHandler, httpsHandler)
urllib2.install_opener(opener)
response = urllib2.urlopen('http://www.baidu.com')

# 1.URLError
import urllib2

requset = urllib2.Request('http://www.xxxxx.com')
try:
    urllib2.urlopen(request)
except urllib2.URLError, e:
    print e.reason

# coding:utf-8
import urllib2
debug_mode = config.get('debug', '') == 'true' or sys.argv[-1] == 'debug'

START_AUTH_TOKEN = 'authenticityToken" value="'
END_AUTH_TOKEN = '"'

RESTAURANT_ID_FOOTER_KEY = 'Restaurant Id'
SET_LEAF_ID_FOOTER_KEY = 'Set Leaf Id'

context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE

cookieJar = cookielib.CookieJar()

handlers = [
    urllib2.HTTPSHandler(context=context),
    urllib2.HTTPCookieProcessor(cookieJar)
]
opener = urllib2.build_opener(*handlers)


def login_and_get_auth_token():
    auth_token = login_page()
    auth(auth_token)
    return auth_token


def login_page():
    try:
        body = http('/login')
    except:
Esempio n. 27
0
import re
from bs4 import BeautifulSoup
import cookielib
import urllib
import urllib2
from captcha4 import captcha_to_string
import pickle
import pdb
# init
cookies = cookielib.CookieJar()

opener = urllib2.build_opener(urllib2.HTTPRedirectHandler(),
                              urllib2.HTTPHandler(debuglevel=0),
                              urllib2.HTTPSHandler(debuglevel=0),
                              urllib2.HTTPCookieProcessor(cookies))
# reslove captcha
res_dict = pickle.load(open('res_dict.p', 'rb'))
key = 'some string not from captcha'
print 'resolving captcha'
while key not in res_dict:
    login_url = 'http://54.69.105.130/36e294b1d0dbe06a0cf626221ed54fc0/login.php'
    response = opener.open(login_url)
    captcha_url = "http://54.69.105.130/simple-php-captcha.php?_CAPTCHA&t=0.76120300+1419259319"
    f = opener.open(captcha_url)
    img = f.read()
    open('out.png', 'wb').write(img)
    key = captcha_to_string('out.png')

captcha_str = res_dict[key]

values = {
Esempio n. 28
0
def install_debug_handler():
    opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=1),
                                  urllib2.HTTPSHandler(debuglevel=1))
    urllib2.install_opener(opener)
Esempio n. 29
0
    def __test_stream(self, stream_url):
        '''
        Returns True if the stream_url gets a non-failure http status (i.e. <400) back from the server
        otherwise return False

        Intended to catch stream urls returned by resolvers that would fail to playback
        '''
        # parse_qsl doesn't work because it splits elements by ';' which can be in a non-quoted UA
        try:
            headers = dict([
                item.split('=')
                for item in (stream_url.split('|')[1]).split('&')
            ])
        except:
            headers = {}
        for header in headers:
            headers[header] = urllib.unquote_plus(headers[header])
        common.logger.log_debug('Setting Headers on UrlOpen: %s' % (headers))

        try:
            import ssl
            ssl_context = ssl.create_default_context()
            ssl_context.check_hostname = False
            ssl_context.verify_mode = ssl.CERT_NONE
            opener = urllib2.build_opener(
                urllib2.HTTPSHandler(context=ssl_context))
            urllib2.install_opener(opener)
        except:
            pass

        try:
            msg = ''
            request = urllib2.Request(stream_url.split('|')[0],
                                      headers=headers)
            #  set urlopen timeout to 15 seconds
            http_code = urllib2.urlopen(request, timeout=15).getcode()
        except urllib2.URLError as e:
            if hasattr(e, 'reason'):
                # treat an unhandled url type as success
                if 'unknown url type' in str(e.reason).lower():
                    return True
                else:
                    msg = e.reason

            if isinstance(e, urllib2.HTTPError):
                http_code = e.code
            else:
                http_code = 600
            if not msg: msg = str(e)
        except Exception as e:
            http_code = 601
            msg = str(e)

        # added this log line for now so that we can catch any logs on streams that are rejected due to test_stream failures
        # we can remove it once we are sure this works reliably
        if int(http_code) >= 400:
            common.logger.log_warning(
                'Stream UrlOpen Failed: Url: %s HTTP Code: %s Msg: %s' %
                (stream_url, http_code, msg))

        return int(http_code) < 400
Esempio n. 30
0
def nsidc_icesat2_zarr(ddir,
                       PRODUCTS,
                       RELEASE,
                       VERSIONS,
                       GRANULES,
                       TRACKS,
                       USER='',
                       PASSWORD='',
                       YEARS=None,
                       SUBDIRECTORY=None,
                       AUXILIARY=False,
                       FLATTEN=False,
                       LOG=False,
                       LIST=False,
                       PROCESSES=0,
                       MODE=None,
                       CLOBBER=False):

    #-- check if directory exists and recursively create if not
    os.makedirs(ddir, MODE) if not os.path.exists(ddir) else None

    #-- output of synchronized files
    if LOG:
        #-- format: NSIDC_IceBridge_sync_2002-04-01.log
        today = time.strftime('%Y-%m-%d', time.localtime())
        LOGFILE = 'NSIDC_IceSat-2_sync_{0}.log'.format(today)
        fid = open(os.path.join(ddir, LOGFILE), 'w')
        print('ICESat-2 Data Sync Log ({0})'.format(today), file=fid)
    else:
        #-- standard output (terminal output)
        fid = sys.stdout

    #-- https://docs.python.org/3/howto/urllib2.html#id5
    #-- create a password manager
    password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
    #-- Add the username and password for NASA Earthdata Login system
    password_mgr.add_password(None, 'https://urs.earthdata.nasa.gov', USER,
                              PASSWORD)
    #-- Encode username/password for request authorization headers
    base64_string = base64.b64encode('{0}:{1}'.format(USER, PASSWORD).encode())
    #-- compile HTML parser for lxml
    parser = lxml.etree.HTMLParser()
    #-- Create cookie jar for storing cookies. This is used to store and return
    #-- the session cookie given to use by the data server (otherwise will just
    #-- keep sending us back to Earthdata Login to authenticate).
    cookie_jar = CookieJar()
    #-- create "opener" (OpenerDirector instance)
    opener = urllib2.build_opener(
        urllib2.HTTPBasicAuthHandler(password_mgr),
        urllib2.HTTPSHandler(context=ssl.SSLContext()),
        urllib2.HTTPCookieProcessor(cookie_jar))
    #-- add Authorization header to opener
    authorization_header = "Basic {0}".format(base64_string.decode())
    opener.addheaders = [("Authorization", authorization_header)]
    #-- Now all calls to urllib2.urlopen use our opener.
    urllib2.install_opener(opener)
    #-- All calls to urllib2.urlopen will now use handler
    #-- Make sure not to include the protocol in with the URL, or
    #-- HTTPPasswordMgrWithDefaultRealm will be confused.

    #-- remote https server for ICESat-2 Data
    HOST = 'https://n5eil01u.ecs.nsidc.org'
    #-- regular expression operator for finding files of a particular granule
    #-- find ICESat-2 HDF5 files in the subdirectory for product and release
    regex_track = '|'.join(['{0:04d}'.format(T) for T in TRACKS])
    regex_granule = '|'.join(['{0:02d}'.format(G) for G in GRANULES])
    regex_version = '|'.join(['{0:02d}'.format(V) for V in VERSIONS])
    regex_suffix = '(.*?)' if AUXILIARY else '(h5)'
    remote_regex_pattern = (
        '{0}(-\d{{2}})?_(\d{{4}})(\d{{2}})(\d{{2}})(\d{{2}})'
        '(\d{{2}})(\d{{2}})_({1})(\d{{2}})({2})_({3})_({4})(.*?).{5}$')

    #-- regular expression operator for finding subdirectories
    if SUBDIRECTORY:
        #-- Sync particular subdirectories for product
        R2 = re.compile('(' + '|'.join(SUBDIRECTORY) + ')', re.VERBOSE)
    elif YEARS:
        #-- Sync particular years for product
        regex_pattern = '|'.join('{0:d}'.format(y) for y in YEARS)
        R2 = re.compile('({0}).(\d+).(\d+)'.format(regex_pattern), re.VERBOSE)
    else:
        #-- Sync all available subdirectories for product
        R2 = re.compile('(\d+).(\d+).(\d+)', re.VERBOSE)

    #-- build list of remote files, remote modification times and local files
    remote_files = []
    remote_mtimes = []
    local_files = []
    #-- for each ICESat-2 product listed
    for p in PRODUCTS:
        print('PRODUCT={0}'.format(p), file=fid)
        #-- get directories from remote directory (* splat operator)
        remote_directories = ['ATLAS', '{0}.{1}'.format(p, RELEASE)]
        d = posixpath.join(HOST, *remote_directories)
        req = urllib2.Request(url=d)
        #-- compile regular expression operator for product, release and version
        args = (p, regex_track, regex_granule, RELEASE, regex_version,
                regex_suffix)
        R1 = re.compile(remote_regex_pattern.format(*args), re.VERBOSE)
        #-- read and parse request for subdirectories (find column names)
        tree = lxml.etree.parse(urllib2.urlopen(req), parser)
        colnames = tree.xpath('//td[@class="indexcolname"]//a/@href')
        remote_sub = [sd for sd in colnames if R2.match(sd)]
        #-- for each remote subdirectory
        for sd in remote_sub:
            #-- local directory for product and subdirectory
            if FLATTEN:
                local_dir = os.path.expanduser(ddir)
            else:
                local_dir = os.path.join(ddir, '{0}.{1}'.format(p, RELEASE),
                                         sd)
            #-- check if data directory exists and recursively create if not
            os.makedirs(local_dir,
                        MODE) if not os.path.exists(local_dir) else None
            #-- find ICESat-2 data files
            req = urllib2.Request(url=posixpath.join(d, sd))
            #-- read and parse request for remote files (columns and dates)
            tree = lxml.etree.parse(urllib2.urlopen(req), parser)
            colnames = tree.xpath('//td[@class="indexcolname"]//a/@href')
            collastmod = tree.xpath('//td[@class="indexcollastmod"]/text()')
            #-- find matching files (for granule, release, version, track)
            remote_file_lines = [
                i for i, f in enumerate(colnames) if R1.match(f)
            ]
            #-- build lists of each ICESat-2 data file
            for i in remote_file_lines:
                #-- remote and local versions of the file
                remote_files.append(posixpath.join(d, sd, colnames[i]))
                local_files.append(os.path.join(local_dir, colnames[i]))
                #-- get last modified date and convert into unix time
                LMD = time.strptime(collastmod[i].rstrip(), '%Y-%m-%d %H:%M')
                remote_mtimes.append(calendar.timegm(LMD))
        #-- close request
        req = None

    #-- sync in series if PROCESSES = 0
    if (PROCESSES == 0):
        #-- sync each ICESat-2 data file
        for i, remote_file in enumerate(remote_files):
            #-- sync ICESat-2 files with NSIDC server
            output = http_pull_file(remote_file, remote_mtimes[i],
                                    local_files[i], LIST, CLOBBER, MODE)
            #-- print the output string
            print(output, file=fid)
    else:
        #-- sync in parallel with multiprocessing Pool
        pool = mp.Pool(processes=PROCESSES)
        #-- sync each ICESat-2 data file
        output = []
        for i, remote_file in enumerate(remote_files):
            #-- sync ICESat-2 files with NSIDC server
            args = (remote_file, remote_mtimes[i], local_files[i])
            kwds = dict(LIST=LIST, CLOBBER=CLOBBER, MODE=MODE)
            output.append(
                pool.apply_async(multiprocess_sync, args=args, kwds=kwds))
        #-- start multiprocessing jobs
        #-- close the pool
        #-- prevents more tasks from being submitted to the pool
        pool.close()
        #-- exit the completed processes
        pool.join()
        #-- print the output string
        for out in output:
            print(out.get(), file=fid)

    #-- close log file and set permissions level to MODE
    if LOG:
        fid.close()
        os.chmod(os.path.join(ddir, LOGFILE), MODE)