Esempio n. 1
0
def _doPOST(action=None, extra_headers=None, args=None, url=API_URL, host=HOST):
    body = ACTION_REQUEST_MAPPING.get(action, None)
    if not body:
        print "Unable to find the request data for the action %s" %action
        sys.exit(1)
    body = body % args
    
    headers={
        'Host' : host,
        'Accept-Encoding' : 'deflate',
        'Content-Length' : len(body),
        'User-Agent' : '"Mozilla/5.0 (Windows; U; Windows NT 6.1; pl; rv:1.9.1) Gecko/20090624 Firefox/3.5 (.NET CLR 3.5.30729)',
    }
    if extra_headers:
        headers.update(extra_headers)
    
    request = urllib2.Request(url, body, headers)
    try:
        response = urllib2.urlopen(request)
        cookies = CookieJar()
        cookies.extract_cookies(response, request)
        cookie_handler= urllib2.HTTPCookieProcessor( cookies )
        redirect_handler= urllib2.HTTPRedirectHandler()
        opener = urllib2.build_opener(redirect_handler, cookie_handler)
        resp = opener.open(request)
        return resp.read()
    except urllib2.HTTPError, e:
        print >> sys.stderr, "National Rail servers having some trouble - ", e
        raise e
Esempio n. 2
0
def _doPOST(POST_DATA=LOGIN_POST_DATA, extra_headers=META_HEADERS, args=None, url=LOGIN_URL, cookies=None):
    """
    Method to login to sky
    """
    body = ''
    if POST_DATA:
        body = '&'.join([k+'='+v for k,v in POST_DATA.items()]) % args
    
    headers={
        'Accept-Encoding' : 'deflate',
        'Content-Length' : len(body),
    }
    if extra_headers:
        headers.update(extra_headers)
    
    request = urllib2.Request(url, body, headers)
    try:
        response = urllib2.urlopen(request)
        if not cookies:
            cookies = CookieJar()
            cookies.extract_cookies(response, request)
        cookie_handler= urllib2.HTTPCookieProcessor(cookies)
        redirect_handler= urllib2.HTTPRedirectHandler()
        opener = urllib2.build_opener(redirect_handler, cookie_handler)
        resp = opener.open(request)
        return cookies, resp.read()
    except urllib2.HTTPError, e:
        print >> sys.stderr, "Sky servers having some trouble - ", e
        raise e
Esempio n. 3
0
def request(url, data=None, headers={}, cookies={}, auth=None):
 if cookies:
  headers['Cookie'] = '; '.join(urllib.quote(k) + '=' + urllib.quote(v) for (k, v) in cookies.iteritems())
 request = urllib2.Request(url.encode('utf8'), data, headers)
 manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
 if auth:
  manager.add_password(None, request.get_full_url(), auth[0], auth[1])
 opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(manager), urllib2.HTTPDigestAuthHandler(manager))
 response = opener.open(request)
 cj = CookieJar()
 cj.extract_cookies(response, request)
 return HttpResponse(urlparse(response.geturl()), response.read(), response.info().headers, dict((c.name, c.value) for c in cj))
Esempio n. 4
0
def request(url, data=None, headers={}, cookies={}, auth=None):
 if cookies:
  headers['Cookie'] = '; '.join(urllib.quote(k) + '=' + urllib.quote(v) for (k, v) in cookies.iteritems())
 request = urllib2.Request(url.encode('utf8'), data, headers)
 manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
 if auth:
  manager.add_password(None, request.get_full_url(), auth[0], auth[1])
 opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(manager), urllib2.HTTPDigestAuthHandler(manager))
 response = opener.open(request)
 cj = CookieJar()
 cj.extract_cookies(response, request)
 return HttpResponse(response.read(), response.info().headers, dict((c.name, c.value) for c in cj))
Esempio n. 5
0
def getCurrentUsage(username, password):
    url = "https://cyberstore.tpg.com.au/your_account/index.php?function=checkaccountusage"

    data = {}
    values = {'check_username': username, 'password': password}

    data = urllib.urlencode(values)
    request = urllib2.Request(url, data)

    try:
        response = urllib2.urlopen(request)
    except:
        print("ERROR: Could not retrieve TPG website...");
        raise

    cookies = CookieJar()
    cookies.extract_cookies(response, request)
    cookie_handler = urllib2.HTTPCookieProcessor(cookies)
    redirect_handler = urllib2.HTTPRedirectHandler()
    opener = urllib2.build_opener(redirect_handler,cookie_handler)

    try:
        response = opener.open(request)
    except:
        print("ERROR: Could not retrieve account usage website...");
        raise

    the_page = response.read()


    # For accounts that count upload and download
    found = re.search('(<BR>Peak\ Downloads\ used:\ )(.+)( MBPeak\ Uploads\ used:\ )(.+)( MBPeak Total used: )(.+)( MB<br>Off-Peak Downloads used: )(.+)( MB<br>Off-Peak Uploads used: )(.+)( MBOff-Peak Total used: )(.+)( MB</td>)', the_page)
    if found:
        onpeak_downloads_used = found.group(2)
        onpeak_uploads_used = found.group(4)
        onpeak_used = found.group(6)
        offpeak_downloads_used = found.group(8)
        offpeak_uploads_used = found.group(10)
        offpeak_used = found.group(12)
        return float(onpeak_used), float(offpeak_used)

    # For accounts that only count download
    found = re.search('(<BR>Peak\ Downloads\ used:\ )(.+)( MB<br>Off-Peak Downloads used: )(.+)( MB</td>)', the_page)
    if found:
        onpeak_used = found.group(2)
        offpeak_used = found.group(4)
        return float(onpeak_used), float(offpeak_used)

    print("ERROR: Could not find quota information in returned site. Check login details.");
    #print(the_page)
    raise
def get_country_index():
    for country in range(1,999):
        jar = CookieJar()
        req = Request(URL_SEARCH, urlencode({'country': country}))
        res = urlopen(req)
        jar.extract_cookies(res, req)
        for page in count(1):
            req = Request(URL % (country, page))
            jar.add_cookie_header(req)
            doc = html.parse(urlopen(req))
            anchors = list(doc.findall('//table[@id="searchResultsTable"]/tbody//a'))
            for a in anchors:
                get_entry(urljoin(URL, a.get('href')))
            if doc.find('//span[@class="pagelinks"]/a/img[@alt="Next"]') is None:
                break
Esempio n. 7
0
def qq_friends(request):
    for k in request.POST:
        print '%s : %s' % (k, request.POST[k])
    verifysession = request.COOKIES['verifysession']
    print verifysession
    headers = {'Cookie':'''verifysession=%s''' % verifysession,
               'Content-Type':'application/x-www-form-urlencoded',
               'Referer':'http://mail.qq.com/',
               'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1',
               }
    data = urlencode(request.POST)
    login_request = Request('http://%s.mail.qq.com/cgi-bin/login?sid=0,2,zh_CN' % server_no, data, headers)
    result = urlopen(login_request)
    content = result.read()
    login_error = login_error_re.search(content)
    if login_error:
        error_no = login_error.group(1) #1:password wrong 2: captcha wrong
        if error_no == '1':
            error_msg = 'password or qq wrong'
        elif error_no == '2':
            error_msg = 'captcha wrong'
        return render_to_response('friends.html', locals())
    sid = login_succ_re.search(content).group(1)
        
    friends_list_headers = {'Referer':'http://mail.qq.com/',
                           'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1',
                           }
    friends_list_request = Request('http://%s.mail.qq.com/cgi-bin/addr_listall?sid=%s&sorttype=null&category=common' % (server_no, sid), headers = friends_list_headers)
    cj = CookieJar()
    cj.extract_cookies(result, friends_list_request)
    opener = build_opener(HTTPCookieProcessor(cj))
    result = opener.open(friends_list_request)
    grouplist = hacked_friendlist_page_re.search(result.read().decode('gb2312', 'ignore')).group(0)
    soup = BeautifulSoup(grouplist, fromEncoding = 'utf-8')
    grouplist = soup.findAll('li')
    friend_list = {}
    for group in grouplist:
        friend_list[group.a.string] = []
        list_request = Request('http://%s.mail.qq.com%s' % (server_no, group.a['href']), headers = friends_list_headers)
        result = opener.open(list_request)
        body = BeautifulSoup(body_re.search(result.read().decode('gb2312', 'ignore')).group(0), fromEncoding = 'utf-8')
        friends = body.findAll('div', attrs={'class':'M'})
        for friend in friends:
            friend_name = unescape(friend.p.span.contents[1].replace('&nbsp;', '', 1))
            friend_email = friend.p.img['addr']
            friend_list[group.a.string].append((friend_name, friend_email))
    
    return render_to_response('friends.html', locals())
Esempio n. 8
0
def request(url, data=None, headers={}, cookies={}, auth=None, returnHeaders=False):
 cookieHeader = cookieencode(cookies)
 if cookieHeader:
  headers['Cookie'] = cookieHeader
 request = urllib2.Request(url, data, headers)
 manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
 if auth:
  manager.add_password(None, request.get_full_url(), auth[0], auth[1])
 opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(manager), urllib2.HTTPDigestAuthHandler(manager))
 response = opener.open(request)
 if returnHeaders:
  cj = CookieJar()
  cj.extract_cookies(response, request)
  return response.read(), response.info().headers, dict((c.name, c.value) for c in cj)
 else:
  return response.read()
def get_country_index():
    for country in range(1, 999):
        jar = CookieJar()
        req = Request(URL_SEARCH, urlencode({'country': country}))
        res = urlopen(req)
        jar.extract_cookies(res, req)
        for page in count(1):
            req = Request(URL % (country, page))
            jar.add_cookie_header(req)
            doc = html.parse(urlopen(req))
            anchors = list(
                doc.findall('//table[@id="searchResultsTable"]/tbody//a'))
            for a in anchors:
                get_entry(urljoin(URL, a.get('href')))
            if doc.find(
                    '//span[@class="pagelinks"]/a/img[@alt="Next"]') is None:
                break
Esempio n. 10
0
    def fetchNetData(self, weekOrYear = True):
        """ Fetch the sensor readings from the internet.

        Keyword arguments:
        username --- The webpage username.
        username --- The webpage password.
        weekOrYear --- True if gathering Yearly data, False for weekly data."""

        url = 'http://meteoroleg.upc.es/dexserver/j_spring_security_check'
        if weekOrYear: #Yearly
            urlTemp = 'http://meteoroleg.upc.es/dexserver/report-results.htm?6578706f7274=1&d-49653-e=1&queryId=83'
            urlHum = 'http://meteoroleg.upc.es/dexserver/report-results.htm?6578706f7274=1&d-49653-e=1&queryId=84'
            urlLum = 'http://meteoroleg.upc.es/dexserver/report-results.htm?6578706f7274=1&d-49653-e=1&queryId=85'
        else: #weekly
            urlTemp = 'http://meteoroleg.upc.es/dexserver/report-results.htm?6578706f7274=1&d-49653-e=1&queryId=87'
            urlHum = 'http://meteoroleg.upc.es/dexserver/report-results.htm?6578706f7274=1&d-49653-e=1&queryId=89'
            urlLum = 'http://meteoroleg.upc.es/dexserver/report-results.htm?6578706f7274=1&d-49653-e=1&queryId=91'
        login = { 'j_username': self.username , 'j_password': self.password }
        headers = {'User-Agent': 'Mozilla/5.0 (X11; U; Linux i686; en-US)'}
        loginFormData = urllib.urlencode(login)

        req = urllib2.Request(url, loginFormData, headers)
        resp = urllib2.urlopen(req)

        cookies = CookieJar()
        cookies.extract_cookies(resp, req)

        cookie_handler = urllib2.HTTPCookieProcessor(cookies)
        redirect_handler = urllib2.HTTPRedirectHandler()
        opener = urllib2.build_opener(redirect_handler, cookie_handler)
        #Making the initial connection for the login
        opener.open(req)
        reqTemp =  urllib2.Request(urlTemp, headers = headers)
        reqHum =  urllib2.Request(urlHum, headers = headers)
        reqLum =  urllib2.Request(urlLum, headers = headers)
        log(self.logger, logging.INFO, 'Fetching temperature data from %s', urlTemp)
        respTemp = opener.open(reqTemp)
        log(self.logger, logging.INFO, 'Fetching humidity data from %s', urlHum)
        respHum = opener.open(reqHum)
        log(self.logger, logging.INFO, 'Fetching luminosity data from %s', urlLum)
        respLum = opener.open(reqLum)
        self.l.extend(respTemp)
        self.l.extend(respHum)
        self.l.extend(respLum)
Esempio n. 11
0
class v12HTTPAgent(object):
    
    def __init__(self):
        self.cookies = CookieJar()

    def inspect_cookies(self, response):
        self.cookies.extract_cookies(response, self._request)
        return response

    def request(self, method, url, headers, body=None):
        self._request = Request(method=method, url=url, headers=headers,
                                        cookies=self.cookies, body=body)
        factory = v12HTTPClientFactory(self._request)
        if factory.scheme == 'https':
            reactor.connectSSL(self._request.host, 443, factory, ClientContextFactory())
        else:
            reactor.connectTCP(self._request.host, 80, factory)

        factory.deferred.addCallback(self.inspect_cookies)
        return factory.deferred
Esempio n. 12
0
def request(url,
            data=None,
            headers={},
            cookies={},
            auth=None,
            returnHeaders=False):
    cookieHeader = cookieencode(cookies)
    if cookieHeader:
        headers['Cookie'] = cookieHeader
    request = urllib2.Request(url, data, headers)
    manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
    if auth:
        manager.add_password(None, request.get_full_url(), auth[0], auth[1])
    opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(manager),
                                  urllib2.HTTPDigestAuthHandler(manager))
    response = opener.open(request)
    if returnHeaders:
        cj = CookieJar()
        cj.extract_cookies(response, request)
        return response.read(), response.info().headers, dict(
            (c.name, c.value) for c in cj)
    else:
        return response.read()
Esempio n. 13
0
def reddit_auth(username, passwd):
    """
    Using a User's Reddit creds, see if Reddit.com auth
    succeeds, and return True or False.

    See: http://code.reddit.com/wiki/API#CorrectLogin
    """
    data = urllib.urlencode({'user':username, 'passwd': passwd})
    headers = {'User-Agent': 'Mozilla/5.0 Seddit.com -- Contact [email protected]'}
    request = urllib2.Request(REDDIT_AUTH_URL, data, headers)
    response = urllib2.urlopen(request)
    cookies = CookieJar()
    extracted = cookies.extract_cookies(response, request)
    cookie_names = [getattr(cookie, "name") for cookie in cookies]
    if LOGIN_SUCCESS_COOKIE_NAME in cookie_names:
        return True
    else:
        return False
Esempio n. 14
0
class SinaImg():
    def __init__(self):
        self.cookies = None
        self.referer = None
        self.url_login_prepare = 'http://login.weibo.cn/login/'
        self.url_img_add = 'http://weibo.cn/album/photo/add'
        self.url_img_doadd = 'http://weibo.cn/album/photo/doadd'
        self.headers = {
            'Accept': '*/*',
            'Accept-Encoding': 'gzip, deflate',
            'Accept-Language': 'en-US,en;q=0.5',
            'User-Agent':
            'Mozilla/5.0 (Windows NT 5.1; rv:19.0) \Gecko/20100101 \
            Firefox/19.0'
        }

    def decode_resp(self, resp):
        data = resp.read()
        data_encoding = resp.headers.get('Content-Encoding', None)
        if data_encoding == 'gzip':
            import StringIO
            import gzip
            tmp = gzip.GzipFile(fileobj=StringIO.StringIO(data), mode='rb')
            return tmp.read()
        elif data_encoding == 'deflate':
            import zlib
            return zlib.decompressobj(-zlib.MAX_WBITS).decompress(data)
        return data

    def socket_resp(self, url=None, usecookies=False, **kwargs):
        fileds = None
        if 'fileds' in kwargs:
            body = kwargs.pop('fileds')
            fileds = '&'.join([k+'='+v for k, v in body.items()])

        request = urllib2.Request(url=url, data=fileds, headers=self.headers)

        if 'headers' in kwargs:
            extra_headers = kwargs.pop('headers')
            if hasattr(extra_headers, 'items'):
                extra_headers = extra_headers.items()
            else:
                try:
                    if len(extra_headers) and not isinstance(
                        extra_headers[0], tuple
                    ):
                        raise TypeError
                except TypeError:
                    L.error('#'*15 + ' extra_headers type error')
            for k, v in extra_headers:
                request.add_header(k, v)

        if 'cookies' in kwargs:
            self.cookies = kwargs.pop('cookies')

        resp = None
        if usecookies:
            try:
                if not self.cookies:
                    cookies_request = urllib2.Request(
                        url=url, headers=self.headers
                    )
                    response = urllib2.urlopen(cookies_request)
                    self.cookies = CookieJar()
                    self.cookies.extract_cookies(response, cookies_request)
                cookie_handler = urllib2.HTTPCookieProcessor(self.cookies)
                redirect_handler = urllib2.HTTPRedirectHandler()
                opener = urllib2.build_opener(redirect_handler, cookie_handler)
                resp = opener.open(request)
            except urllib2.HTTPError as e:
                L.error('%s %s' % (e.code, url))
            finally:
                return self.decode_resp(resp)
        else:
            opener = urllib2.build_opener()
            try:
                resp = opener.open(request)
            except urllib2.HTTPError as e:
                L.error('%s %s' % (e.code, url))
            finally:
                return self.decode_resp(resp)

    def login_prepare(self, raw):
        form_action = None
        filed_password = None
        filed_vk = None
        filed_backURL = None
        filed_backTitle = None
        filed_submit = None

        pattern = re.compile('form action="([^"]*)"')
        if pattern.search(raw):
            form_action = pattern.search(raw).group(1)

        pattern = re.compile('password" name="([^"]*)"')
        if pattern.search(raw):
            filed_password = pattern.search(raw).group(1)

        pattern = re.compile('name="vk" value="([^"]*)"')
        if pattern.search(raw):
            filed_vk = pattern.search(raw).group(1)

        pattern = re.compile('name="backURL" value="([^"]*)"')
        if pattern.search(raw):
            filed_backURL = pattern.search(raw).group(1)

        pattern = re.compile('name="backTitle" value="([^"]*)"')
        if pattern.search(raw):
            filed_backTitle = pattern.search(raw).group(1)

        pattern = re.compile('name="submit" value="([^"]*)"')
        if pattern.search(raw):
            filed_submit = pattern.search(raw).group(1)

        fileds = {
            'form_action': form_action,
            'password': filed_password,
            'vk': filed_vk,
            'backURL': filed_backURL,
            'backTitle': filed_backTitle,
            'submit': filed_submit
        }
        return fileds

    def login(self):
        raw_login = self.socket_resp(self.url_login_prepare, True)
        fileds_pre = self.login_prepare(raw_login)

        url = self.url_login_prepare + fileds_pre.get('form_action')
        #url = HTMLParser.HTMLParser().unescape(
        #    url_login_prepare + fileds_pre.get('form_action')
        #)
        headers = {'Referer': self.url_login_prepare}
        fileds = {
            'mobile': '*****@*****.**',
            '%s' % fileds_pre.get('password'): 'lovelele',
            'remember': 'on',
            'backURL': fileds_pre.get('backURL'),
            'backTitle': fileds_pre.get('backTitle'),
            'tryCount': '',
            'vk': fileds_pre.get('vk'),
            'submit': fileds_pre.get('submit')
        }
        resp = self.socket_resp(url, True, fileds=fileds, headers=headers,
                                cookies=self.cookies)
    def img_add_prepare(self, raw):
        filed_album_id = None
        filed_upload = None
        filed_rl = None

        pattern = re.compile('option value="([^"]*)"')
        if pattern.search(raw):
            filed_album_id = pattern.search(raw).group(1)

        pattern = re.compile('name="upload" value="([^"]*)"')
        if pattern.search(raw):
            filed_upload = pattern.search(raw).group(1)

        pattern = re.compile('name="rl" value="([^"]*)"')
        if pattern.search(raw):
            filed_rl = pattern.search(raw).group(1)

        fileds = {
            'album_id': filed_album_id,
            'upload': filed_upload,
            'rl': filed_rl
        }
        return fileds

    def get_filename(self, file_path):
        return os.path.basename(file_path)

    def get_filetype(self, file_path):
        return os.path.splitext(file_path)[1]

    def get_img_url(self, raw=None):
        '''
        with open('sina_album.html', 'rb') as fh:
            raw = fh.read()
        '''

        img_url = None
        pattern = re.compile('<img src="([^"]*)" alt=\'\'')
        if pattern.search(raw):
            img_url = pattern.search(raw).group(1)
        return img_url

    def get_remote_img(self, url, filename='temp.jpg'):
        #return self.socket_resp(url)
        import utils
        utils.save_binary_file(filename, self.socket_resp(url))
        return filename

    def upload(self, upfile=None):
        if not upfile and upfile:
            L.debug('have no img to upload')
            return None

        if upfile.startswith('http://'):
            upfile = self.get_remote_img(upfile)

        raw_img_add = self.socket_resp(self.url_img_add, True)
        fileds_pre = self.img_add_prepare(raw_img_add)

        fileds = {
            'album_id': fileds_pre.get('album_id'),
            #'photo': raw_img,
            'photo': open(upfile, 'rb'),
            'description': '',
            'upload': fileds_pre.get('upload'),
            'rl': fileds_pre.get('rl')
        }

        headers = self.headers.copy()
        headers.update({'Referer': self.url_img_add})

        url = self.url_img_doadd

        request = urllib2.Request(url, None, headers)
        cookie_handler = urllib2.HTTPCookieProcessor(self.cookies)
        redirect_handler = urllib2.HTTPRedirectHandler()
        opener = urllib2.build_opener(
            cookie_handler, redirect_handler,
            MultipartPostHandler.MultipartPostHandler
        )
        resp = opener.open(request, fileds)
        result = self.get_img_url(self.decode_resp(resp))
        L.debug('uploaded image url: %s' % (result,))
        return result

    def upload_batch(self, upfiles=None):
        if not upfiles:
            return None
        result = []
        for upfile in upfiles:
            result.append(self.upload(upfile))
        return result
Esempio n. 15
0
class WebClient(Configurable):
    "Convenient HTTP request processing. Proxy is not supported in current version."
    # When a cleanup task is created, the task releases dead connections by this interval
    _default_cleanupinterval = 60
    # Persist this number of connections at most for each host. If all connections are in
    # use, new requests will wait until there are free connections.
    _default_samehostlimit = 20
    # Do not allow multiple requests to the same URL at the same time. If sameurllimit=True,
    # requests to the same URL will always be done sequential.
    _default_sameurllimit = False
    # CA file used to verify HTTPS certificates. To be compatible with older Python versions,
    # the new SSLContext is not enabled currently, so with the default configuration, the
    # certificates are NOT verified. You may configure this to a .pem file in your system,
    # usually /etc/pki/tls/cert.pem in Linux.
    _default_cafile = None
    # When following redirects and the server redirects too many times, raises an exception
    # and end the process
    _default_redirectlimit = 10
    # Verify the host with the host in certificate
    _default_verifyhost = True

    def __init__(self, allowcookies=False, cookiejar=None):
        '''
        :param allowcookies: Accept and store cookies, automatically use them on further requests
        :param cookiejar: Provide a customized cookiejar instead of the default CookieJar()
        '''
        self._connmap = {}
        self._requesting = set()
        self._hostwaiting = set()
        self._pathwaiting = set()
        self._protocol = Http(False)
        self.allowcookies = allowcookies
        if cookiejar is None:
            self.cookiejar = CookieJar()
        else:
            self.cookiejar = cookiejar
        self._tasks = []

    async def open(self,
                   container,
                   request,
                   ignorewebexception=False,
                   timeout=None,
                   datagen=None,
                   cafile=None,
                   key=None,
                   certificate=None,
                   followredirect=True,
                   autodecompress=False,
                   allowcookies=None):
        '''
        Open http request with a Request object
        
        :param container: a routine container hosting this routine
        :param request: vlcp.utils.webclient.Request object
        :param ignorewebexception: Do not raise exception on Web errors (4xx, 5xx), return a response normally
        :param timeout: timeout on connection and single http request. When following redirect, new request
               does not share the old timeout, which means if timeout=2:
               connect to host: (2s)
               wait for response: (2s)
               response is 302, redirect
               connect to redirected host: (2s)
               wait for response: (2s)
               ...
               
        :param datagen: if the request use a stream as the data parameter, you may provide a routine to generate
                        data for the stream. If the request failed early, this routine is automatically terminated.
                        
        :param cafile: provide a CA file for SSL certification check. If not provided, the SSL connection is NOT verified.
        :param key: provide a key file, for client certification (usually not necessary)
        :param certificate: provide a certificate file, for client certification (usually not necessary)
        :param followredirect: if True (default), automatically follow 3xx redirections
        :param autodecompress: if True, automatically detect Content-Encoding header and decode the body
        :param allowcookies: override default settings to disable the cookies
        '''
        if cafile is None:
            cafile = self.cafile
        if allowcookies is None:
            allowcookies = self.allowcookies
        forcecreate = False
        datagen_routine = None
        if autodecompress:
            if not request.has_header('Accept-Encoding'):
                request.add_header('Accept-Encoding', 'gzip, deflate')
        while True:
            # Find or create a connection
            conn, created = await self._getconnection(
                container, request.host, request.path,
                request.get_type() == 'https', forcecreate, cafile, key,
                certificate, timeout)
            # Send request on conn and wait for reply
            try:
                if allowcookies:
                    self.cookiejar.add_cookie_header(request)
                if isinstance(request.data, bytes):
                    stream = MemoryStream(request.data)
                else:
                    stream = request.data
                if datagen and datagen_routine is None:
                    datagen_routine = container.subroutine(datagen)
                else:
                    datagen_routine = None
                timeout_, result = await container.execute_with_timeout(
                    timeout,
                    self._protocol.request_with_response(
                        container, conn, _bytes(request.host),
                        _bytes(request.path), _bytes(request.method),
                        [(_bytes(k), _bytes(v))
                         for k, v in request.header_items()], stream))
                if timeout_:
                    if datagen_routine:
                        container.terminate(datagen_routine)
                    container.subroutine(
                        self._releaseconnection(conn, request.host,
                                                request.path,
                                                request.get_type() == 'https',
                                                True), False)
                    raise WebException('HTTP request timeout')
                finalresp, _ = result
                resp = Response(request.get_full_url(), finalresp,
                                container.scheduler)
                if allowcookies:
                    self.cookiejar.extract_cookies(resp, request)
                if resp.iserror and not ignorewebexception:
                    try:
                        exc = WebException(resp.fullstatus)
                        if autodecompress and resp.stream:
                            ce = resp.get_header('Content-Encoding', '')
                            if ce.lower() == 'gzip' or ce.lower() == 'x-gzip':
                                resp.stream.getEncoderList().append(
                                    encoders.gzip_decoder())
                            elif ce.lower() == 'deflate':
                                resp.stream.getEncoderList().append(
                                    encoders.deflate_decoder())
                        data = await resp.stream.read(container, 4096)
                        exc.response = resp
                        exc.body = data
                        if datagen_routine:
                            container.terminate(datagen_routine)
                        await resp.shutdown()
                        container.subroutine(
                            self._releaseconnection(
                                conn, request.host, request.path,
                                request.get_type() == 'https', True), False)
                        raise exc
                    finally:
                        resp.close()
                else:
                    try:
                        container.subroutine(
                            self._releaseconnection(
                                conn, request.host, request.path,
                                request.get_type() == 'https', False,
                                finalresp), False)
                        if followredirect and resp.status in (300, 301, 302,
                                                              303, 307, 308):
                            request.redirect(
                                resp,
                                ignorewebexception=ignorewebexception,
                                timeout=timeout,
                                cafile=cafile,
                                key=key,
                                certificate=certificate,
                                followredirect=followredirect,
                                autodecompress=autodecompress,
                                allowcookies=allowcookies)
                            resp.close()
                            continue
                        if autodecompress and resp.stream:
                            ce = resp.get_header('Content-Encoding', '')
                            if ce.lower() == 'gzip' or ce.lower() == 'x-gzip':
                                resp.stream.getEncoderList().append(
                                    encoders.gzip_decoder())
                            elif ce.lower() == 'deflate':
                                resp.stream.getEncoderList().append(
                                    encoders.deflate_decoder())
                        return resp
                    except:
                        resp.close()
                        raise
            except HttpConnectionClosedException:
                await self._releaseconnection(conn, request.host, request.path,
                                              request.get_type() == 'https',
                                              False)
                if not created:
                    # Retry on a newly created connection
                    forcecreate = True
                    continue
                else:
                    if datagen_routine:
                        container.terminate(datagen_routine)
                    raise
            except Exception as exc:
                await self._releaseconnection(conn, request.host, request.path,
                                              request.get_type() == 'https',
                                              True)
                raise exc
            break

    async def _releaseconnection(self,
                                 connection,
                                 host,
                                 path,
                                 https=False,
                                 forceclose=False,
                                 respevent=None):
        if not host:
            raise ValueError
        if forceclose:
            await connection.shutdown(True)
        if not forceclose and connection.connected and respevent:

            async def releaseconn():
                keepalive = await self._protocol.wait_for_response_end(
                    connection, connection, respevent.connmark, respevent.xid)
                conns = self._connmap[host]
                conns[2] -= 1
                if keepalive:
                    connection.setdaemon(True)
                    conns[1 if https else 0].append(connection)
                else:
                    await connection.shutdown()

            connection.subroutine(releaseconn(), False)
        else:
            conns = self._connmap[host]
            conns[2] -= 1
        if self.sameurllimit:
            self._requesting.remove((host, path, https))
        if (host, path,
                https) in self._pathwaiting or host in self._hostwaiting:
            await connection.wait_for_send(
                WebClientRequestDoneEvent(host, path, https))
            if (host, path, https) in self._pathwaiting:
                self._pathwaiting.remove((host, path, https))
            if host in self._hostwaiting:
                self._hostwaiting.remove(host)

    async def _getconnection(self,
                             container,
                             host,
                             path,
                             https=False,
                             forcecreate=False,
                             cafile=None,
                             key=None,
                             certificate=None,
                             timeout=None):
        if not host:
            raise ValueError
        matcher = WebClientRequestDoneEvent.createMatcher(host, path, https)
        while self.sameurllimit and (host, path, https) in self._requesting:
            self._pathwaiting.add((host, path, https))
            await matcher
        # Lock the path
        if self.sameurllimit:
            self._requesting.add((host, path, https))
        # connmap format: (free, free_ssl, workingcount)
        conns = self._connmap.setdefault(host, [[], [], 0])
        conns[0] = [c for c in conns[0] if c.connected]
        conns[1] = [c for c in conns[1] if c.connected]
        myset = conns[1 if https else 0]
        if not forcecreate and myset:
            # There are free connections, reuse them
            conn = myset.pop()
            conn.setdaemon(False)
            conns[2] += 1
            return (conn, False)
        matcher = WebClientRequestDoneEvent.createMatcher(host)
        while self.samehostlimit and len(conns[0]) + len(
                conns[1]) + conns[2] >= self.samehostlimit:
            if myset:
                # Close a old connection
                conn = myset.pop()
                await conn.shutdown()
            else:
                # Wait for free connections
                self._hostwaiting.add(host)
                await matcher
                conns = self._connmap.setdefault(host, [[], [], 0])
                myset = conns[1 if https else 0]
                if not forcecreate and myset:
                    conn = myset.pop()
                    conn.setdaemon(False)
                    conns[2] += 1
                    return (conn, False)
        # Create new connection
        conns[2] += 1
        conn = Client(
            urlunsplit(('ssl' if https else 'tcp', host, '/', '', '')),
            self._protocol, container.scheduler, key, certificate, cafile)
        if timeout is not None:
            conn.connect_timeout = timeout
        conn.start()
        connected = self._protocol.statematcher(
            conn, HttpConnectionStateEvent.CLIENT_CONNECTED, False)
        notconnected = self._protocol.statematcher(
            conn, HttpConnectionStateEvent.CLIENT_NOTCONNECTED, False)
        _, m = await M_(connected, notconnected)
        if m is notconnected:
            conns[2] -= 1
            await conn.shutdown(True)
            raise IOError('Failed to connect to %r' % (conn.rawurl, ))
        if https and cafile and self.verifyhost:
            try:
                # TODO: check with SSLContext
                hostcheck = re.sub(r':\d+$', '', host)
                if host == conn.socket.remoteaddr[0]:
                    # IP Address is currently now allowed
                    await conn.shutdown(True)
                    raise CertificateException(
                        'Cannot verify host with IP address')
                match_hostname(conn.socket.getpeercert(False), hostcheck)
            except:
                conns[2] -= 1
                raise
        return (conn, True)

    def cleanup(self, host=None):
        "Cleaning disconnected connections"
        if host is not None:
            conns = self._connmap.get(host)
            if conns is None:
                return
            # cleanup disconnected connections
            conns[0] = [c for c in conns[0] if c.connected]
            conns[1] = [c for c in conns[1] if c.connected]
            if not conns[0] and not conns[1] and not conns[2]:
                del self._connmap[host]
        else:
            hosts = list(self._connmap.keys())
            for h in hosts:
                self.cleanup(h)

    def cleanup_task(self, container, interval=None):
        '''
        If this client object is persist for a long time, and you are worrying about memory leak,
        create a routine with this method: myclient.cleanup_task(mycontainer, 60).
        But remember that if you have created at lease one task, you must call myclient.endtask()
        to completely release the webclient object.
        '''
        if interval is None:
            interval = self.cleanupinterval

        async def task():
            th = container.scheduler.setTimer(interval, interval)
            tm = TimerEvent.createMatcher(th)
            try:
                while True:
                    await tm
                    self.cleanup()
            finally:
                container.scheduler.cancelTimer(th)

        t = container.subroutine(task(), False, daemon=True)
        self._tasks.append(t)
        return t

    async def shutdown(self):
        "Shutdown free connections to release resources"
        for c0, c1, _ in list(self._connmap.values()):
            c0bak = list(c0)
            del c0[:]
            for c in c0bak:
                if c.connected:
                    await c.shutdown()
            c1bak = list(c1)
            del c1[:]
            for c in c1bak:
                if c.connected:
                    await c.shutdown()

    def endtask(self):
        for t in self._tasks:
            t.close()
        del self._tasks[:]

    async def urlopen(self,
                      container,
                      url,
                      data=None,
                      method=None,
                      headers={},
                      rawurl=False,
                      *args,
                      **kwargs):
        '''
        Similar to urllib2.urlopen, but:
        1. is a routine
        2. data can be an instance of vlcp.event.stream.BaseStream, or str/bytes
        3. can specify method
        4. if datagen is not None, it is a routine which writes to <data>. It is automatically terminated if the connection is down.
        5. can also specify key and certificate, for client certification
        6. certificates are verified with CA if provided.
        If there are keep-alived connections, they are automatically reused.
        See open for available arguments
        
        Extra argument:
        
        :param rawurl: if True, assume the url is already url-encoded, do not encode it again.
        '''
        return await self.open(
            container, Request(url, data, method, headers, rawurl=rawurl),
            *args, **kwargs)

    async def manualredirect(self, container, exc, data, datagen=None):
        "If data is a stream, it cannot be used again on redirect. Catch the ManualRedirectException and call a manual redirect with a new stream."
        request = exc.request
        request.data = data
        return await self.open(container,
                               request,
                               datagen=datagen,
                               **exc.kwargs)

    async def urlgetcontent(self,
                            container,
                            url,
                            data=None,
                            method=None,
                            headers={},
                            tostr=False,
                            encoding=None,
                            rawurl=False,
                            *args,
                            **kwargs):
        '''
        In Python2, bytes = str, so tostr and encoding has no effect.
        In Python3, bytes are decoded into unicode str with encoding.
        If encoding is not specified, charset in content-type is used if present, or default to utf-8 if not.
        See open for available arguments

        :param rawurl: if True, assume the url is already url-encoded, do not encode it again.
        '''
        req = Request(url, data, method, headers, rawurl=rawurl)
        with (await self.open(container, req, *args, **kwargs)) as resp:
            encoding = 'utf-8'
            if encoding is None:
                m = Message()
                m.add_header('Content-Type',
                             resp.get_header('Content-Type', 'text/html'))
                encoding = m.get_content_charset('utf-8')
            if not resp.stream:
                content = b''
            else:
                content = await resp.stream.read(container)
            if tostr:
                content = _str(content, encoding)
            return content
Esempio n. 16
0
class CookieTransport(xmlrpclib.Transport):
    '''A subclass of xmlrpclib.Transport that supports cookies.'''
    cookiejar = None
    scheme = 'http'
    
    # Cribbed from xmlrpclib.Transport.send_user_agent 
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist=list()
            for h,v in cookie_request.header_items():
                if h.startswith('Cookie'):
                    cookielist.append([h,v])
            # ...and put them over the connection
            for h,v in cookielist:
                connection.putheader(h,v)
    
    # This is the same request() method from xmlrpclib.Transport,
    # with a couple additions noted below
    def request_with_cookies(self, host, handler, request_body, verbose=0):
        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        # ADDED: construct the URL and Request object for proper cookie handling
        request_url = "%s://%s%s" % (self.scheme,host,handler)
        #log.debug("request_url is %s" % request_url)
        cookie_request  = urllib2.Request(request_url) 

        self.send_request(h,handler,request_body)
        self.send_host(h,host) 
        self.send_cookies(h,cookie_request) # ADDED. creates cookiejar if None.
        self.send_user_agent(h)
        self.send_content(h,request_body)

        errcode, errmsg, headers = h.getreply()

        # ADDED: parse headers and get cookies here
        cookie_response = CookieResponse(headers)
        # Okay, extract the cookies from the headers
        self.cookiejar.extract_cookies(cookie_response,cookie_request)
        #log.debug("cookiejar now contains: %s" % self.cookiejar._cookies)
        # And write back any changes
        if hasattr(self.cookiejar,'save'):
            try:
                self.cookiejar.save(self.cookiejar.filename)
            except Exception, e:
                raise
                #log.error("Couldn't write cookiefile %s: %s" % \
                #        (self.cookiejar.filename,str(e)))

        if errcode != 200:
            # When runs here, the HTTPS connection isn't useful any more
            #   before raising an exception to caller
            h.close()

            raise xmlrpclib.ProtocolError(
                host + handler,
                errcode, errmsg,
                headers
                )

        self.verbose = verbose

        try:
            sock = h._conn.sock
        except AttributeError:
            sock = None

        try:
            return self._parse_response(h.getfile(), sock)
        finally:
            h.close()
Esempio n. 17
0
class IpaClientlessTestDriver(object):
    def __init__(self,
                 hostname, domain, password,
                 bind_dn, bind_pw,
                 username='******', insecure=False):
        self.hostname = hostname
        self.domain = domain
        self.password = password
        self.username = username
        self.bind_dn = bind_dn
        self.bind_pw = bind_pw
        self.referer = "https://" + self.hostname + "/ipa"

        self.cj = CookieJar()
        self.ssl_ctx = self._ssl_ctx(insecure)

    def _auth(self, lazy=True):
        if lazy is True and len(self.cj) > 0:
            return 200

        login_url = self.referer + "/session/login_password"

        request = urllib2.Request(login_url)
        request.add_header('referer', self.referer)
        request.add_header('Content-Type', 'application/x-www-form-urlencoded')
        request.add_header('Accept', 'text/plain')

        query_args = {'user': self.username,
                      'password': self.password}
        encoded_args = urllib.urlencode(query_args)

        result = urllib2.urlopen(request,
                                 encoded_args,
                                 context=self.ssl_ctx)
        if result.getcode() == 200:
            self.cj.extract_cookies(result, request)
        return result.getcode()

    def _ssl_ctx(self, insecure):
        ctx = ssl.create_default_context()
        if insecure:
            ctx.check_hostname = False
            ctx.verify_mode = ssl.CERT_NONE
        return ctx

    def _json_request(self, jdata):
        ret = self._auth()
        if ret != 200:
            return ret

        json_url = self.referer + "/session/json"
        request = urllib2.Request(json_url)

        request.add_header('referer', self.referer)
        request.add_header('Content-Type', 'application/json')
        request.add_header('Accept', 'application/json')

        self.cj.add_cookie_header(request)
        result = urllib2.urlopen(request,
                                 jdata,
                                 context=self.ssl_ctx)
        return result.getcode()

    def fetch_cert(self, dest):
        url = "http://" + self.hostname + "/ipa/config/ca.crt"
        self.ca_cert = os.path.join(dest, "ca.crt")
        urllib.urlretrieve(url, self.ca_cert)

    def rm_cert(self):
        os.unlink(self.ca_cert)

    def run_cmd(self, method, params, args={}):
        cmd = json.dumps({"method": method,
                          "params": [params, args],
                          "id": "0"})
        return self._json_request(cmd)
Esempio n. 18
0
class HttpTransport(Transport):
    """
    Basic HTTP transport implemented using using urllib2, that provides for
    cookies & proxies but no authentication.

    """
    def __init__(self, **kwargs):
        """
        @param kwargs: Keyword arguments.
            - B{proxy} - An HTTP proxy to be specified on requests.
                 The proxy is defined as {protocol:proxy,}
                    - type: I{dict}
                    - default: {}
            - B{timeout} - Set the URL open timeout (seconds).
                    - type: I{float}
                    - default: 90

        """
        Transport.__init__(self)
        Unskin(self.options).update(kwargs)
        self.cookiejar = CookieJar()
        self.proxy = {}
        self.urlopener = None

    def open(self, request):
        try:
            url = self.__get_request_url_for_urllib(request)
            headers = request.headers
            log.debug('opening (%s)', url)
            u2request = urllib2.Request(url, headers=headers)
            self.proxy = self.options.proxy
            return self.u2open(u2request)
        except urllib2.HTTPError as e:
            raise TransportError(str(e), e.code, e.fp)

    def send(self, request):
        url = self.__get_request_url_for_urllib(request)
        msg = request.message
        headers = request.headers
        try:
            u2request = urllib2.Request(url, msg, headers)
            self.addcookies(u2request)
            self.proxy = self.options.proxy
            request.headers.update(u2request.headers)
            log.debug('sending:\n%s', request)
            fp = self.u2open(u2request)
            self.getcookies(fp, u2request)
            headers = fp.headers
            if sys.version_info < (3, 0):
                headers = headers.dict
            reply = Reply(httplib.OK, headers, fp.read())
            log.debug('received:\n%s', reply)
            return reply
        except urllib2.HTTPError as e:
            if e.code not in (httplib.ACCEPTED, httplib.NO_CONTENT):
                raise TransportError(e.msg, e.code, e.fp)

    def addcookies(self, u2request):
        """
        Add cookies in the cookiejar to the request.

        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Request.

        """
        self.cookiejar.add_cookie_header(u2request)

    def getcookies(self, fp, u2request):
        """
        Add cookies in the request to the cookiejar.

        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Request.

        """
        self.cookiejar.extract_cookies(fp, u2request)

    def u2open(self, u2request):
        """
        Open a connection.

        @param u2request: A urllib2 request.
        @type u2request: urllib2.Request.
        @return: The opened file-like urllib2 object.
        @rtype: fp

        """
        tm = self.options.timeout
        url = self.u2opener()
        if (sys.version_info < (3, 0)) and (self.u2ver() < 2.6):
            socket.setdefaulttimeout(tm)
            return url.open(u2request)
        return url.open(u2request, timeout=tm)

    def u2opener(self):
        """
        Create a urllib opener.

        @return: An opener.
        @rtype: I{OpenerDirector}

        """
        if self.urlopener is None:
            return urllib2.build_opener(*self.u2handlers())
        return self.urlopener

    def u2handlers(self):
        """
        Get a collection of urllib handlers.

        @return: A list of handlers to be installed in the opener.
        @rtype: [Handler,...]

        """
        return [urllib2.ProxyHandler(self.proxy)]

    def u2ver(self):
        """
        Get the major/minor version of the urllib2 lib.

        @return: The urllib2 version.
        @rtype: float

        """
        try:
            part = urllib2.__version__.split('.', 1)
            return float('.'.join(part))
        except Exception as e:
            log.exception(e)
            return 0

    def __deepcopy__(self, memo={}):
        clone = self.__class__()
        p = Unskin(self.options)
        cp = Unskin(clone.options)
        cp.update(p)
        return clone

    @staticmethod
    def __get_request_url_for_urllib(request):
        """
        Returns the given request's URL, properly encoded for use with urllib.

        We expect that the given request object already verified that the URL
        contains ASCII characters only and stored it as a native str value.

        urllib accepts URL information as a native str value and may break
        unexpectedly if given URL information in another format.

        Python 3.x httplib.client implementation must be given a unicode string
        and not a bytes object and the given string is internally converted to
        a bytes object using an explicitly specified ASCII encoding.

        Python 2.7 httplib implementation expects the URL passed to it to not
        be a unicode string. If it is, then passing it to the underlying
        httplib Request object will cause that object to forcefully convert all
        of its data to unicode, assuming that data contains ASCII data only and
        raising a UnicodeDecodeError exception if it does not (caused by simple
        unicode + string concatenation).

        Python 2.4 httplib implementation does not really care about this as it
        does not use the internal optimization present in the Python 2.7
        implementation causing all the requested data to be converted to
        unicode.

        """
        assert isinstance(request.url, str)
        return request.url
Esempio n. 19
0
 def _handleSecuredRedirect(self, response, sslCtx):
     '''Intercept security challenges - these are inferred by checking for a
     302 response with a location header requesting a HTTPS endpoint
     '''
     
     if response.status != httplib.FOUND:
         log.debug('_handleSecuredRedirect: No HTTP redirect found in '
                   'response - passing back to caller')
         return
     
     # Check for redirect location
     authn_redirect_uri = response.getheader('Location')
     if authn_redirect_uri is None:
         log.error('_handleSecuredRedirect: no redirect location set for %r '
                   'response- returning', httplib.FOUND)
         # Let client decide what to do with this response
         return 
     
     # Check the scheme and only follow the redirect here if it HTTPS
     parsed_authn_redirect_uri = urlparse(authn_redirect_uri)
     if parsed_authn_redirect_uri.scheme != 'https':
         log.info('_handleSecuredRedirect: Non-HTTPS redirect location set '
                  'for %r response - returning', httplib.FOUND)
         return
     
     # Prepare request authentication redirect location
     host, portStr = parsed_authn_redirect_uri.netloc.split(':', 1)
     port = int(portStr)
     authn_redirect_path = self.__class__._make_uri_path(
                                                 parsed_authn_redirect_uri)
     
     # Process cookies from the response passed into this function and set 
     # them in the authentication request.  Use cookielib with fake urllib2
     # HTTP request class needed to interface with
     response.info = lambda : response.msg
     authn_redirect_req = FakeUrllib2HTTPRequest(parsed_authn_redirect_uri)
     cookie_jar = CookieJar()
     cookie_jar.extract_cookies(response, authn_redirect_req)
     
     authn_redirect_ip_hdrs = authn_redirect_req.get_headers()
     
     # Redirect to HTTPS authentication endpoint uses GET method
     authn_conn = HTTPSConnection(host, port=port, ssl_context=sslCtx)
     
     authn_conn.request('GET', authn_redirect_path, None, 
                        authn_redirect_ip_hdrs)
     
     authn_response = authn_conn.getresponse()
     authn_conn.close()
     
     # Hack to make httplib response urllib2.Response-like
     authn_response.info = lambda : authn_response.msg
     cookie_jar.extract_cookies(authn_response, authn_redirect_req)
     
     if authn_response.status == httplib.FOUND:
         # Get redirect location
         return_uri = authn_response.getheader('Location')
         if return_uri is None:
             log.error('_handleSecuredRedirect: no redirect location set '
                       'for %r response from %r', httplib.FOUND, 
                       authn_redirect_uri)
             # Return the response and let the client decide what to do with
             # it
             return authn_response
         
         # Check URI for HTTP scheme
         parsed_return_uri = urlparse(return_uri)
         if parsed_return_uri.scheme != 'http':
             # Expecting http - don't process but instead return to client
             log.error('_handleSecuredRedirect: return URI %r is not HTTP, '
                       'passing back original response', return_uri)
             return
         
         # Make path
         return_uri_path = self.__class__._make_uri_path(parsed_return_uri)
         
         # Get host and port number
         (return_uri_host, 
          return_uri_port_str) = parsed_return_uri.netloc.split(':', 1)
         return_uri_port = int(return_uri_port_str)
         
         # Add any cookies to header
         return_req = FakeUrllib2HTTPRequest(parsed_return_uri)
         cookie_jar.add_cookie_header(return_req)
         return_headers = return_req.get_headers()
         
         # Invoke return URI passing headers            
         return_conn = httplib.HTTPConnection(return_uri_host, 
                                              port=return_uri_port)
         
         return_conn.request('GET', return_uri_path, None, return_headers)
         return_uri_res = return_conn.getresponse()
         return_conn.close()
         
         return return_uri_res
Esempio n. 20
0
def register_account(name):
    step1_params = {
        "nickname": name,
        "first_name" : "zzz",
        "last_name" : "zzz", 
        "email" : name + "@example.com",
        "email2" : name + "@example.com",
        "password1" : PASSWORD,
        "password2" : PASSWORD
        }
    step1_headers = {
        "Content-Type": "application/x-www-form-urlencoded",
        "Referer" : "http://heroesofnewerth.com/create_account.php",
        "User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.77 Safari/535.7",
        }
    step1_data = urllib.urlencode(step1_params)

    step2_params = {
        "f": "purchaseNew",
        "promocode": "",
        "purchase_id": "",
        "nickname": name,
        "first_name": "zzzz",
        "last_name": "zzzz",
        "email" : name + "@example.com",
        "bill_first_name": "", 
        "bill_last_name": "",
        "card_number": "",
        "month": "01",
        "year": "2009",
        "cvv": "",
        "io_HoN_BBq": "",
        "zip": ""
        }
    step2_headers = {
        "Content-Type": "application/x-www-form-urlencoded",
        "Referer" : "http://heroesofnewerth.com/create_account2.php",
        "User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.77 Safari/535.7",
        }
    step2_data = urllib.urlencode(step2_params)

    referral_url = "http://heroesofnewerth.com/ref.php?r=" + REFERRAL_CODE
    referral_request = urllib2.Request(referral_url)
    referral_response = urllib2.urlopen(referral_request)

    create_account_url = "http://heroesofnewerth.com/create_account.php"
    create_account_request = urllib2.Request(create_account_url, step1_data, step1_headers)
    create_account_response = urllib2.urlopen(create_account_request)
    
    cookies = CookieJar()
    cookies.extract_cookies(referral_response,referral_request)
    cookie_handler= urllib2.HTTPCookieProcessor( cookies )

    redirect_handler = HTTPRedirectHandler()
    
    opener = urllib2.build_opener(redirect_handler,cookie_handler)
    referral_response = opener.open(referral_request)
    create_response = opener.open(create_account_request)
    
    confirm_account_url = "http://heroesofnewerth.com/create_account2.php"
    confirm_account_request = urllib2.Request(confirm_account_url, step2_data, step2_headers)
    confirm_account_response = urllib2.urlopen(confirm_account_request)

    confirm_response = opener.open(confirm_account_request)

    return True
Esempio n. 21
0
class GeoCaching:
    def __init__(self, username='******', password='******'):

        self.jar = CookieJar()
        self.req = Request(
            "http://www.geocaching.com/login/default.aspx?RESET=Y",
            urlencode({
                '__EVENTTARGET': '',
                '__EVENTARGUMENT': '',
                '__VIEWSTATE': '',
                'ctl00$ContentBody$myUsername': username,
                'ctl00$ContentBody$myPassword': password,
                'ctl00$ContentBody$cookie': '',
                'ctl00$ContentBody$Button1': 'Login'
            }))

        self.jar.add_cookie_header(self.req)

        u = urlopen(self.req)

        self.jar.extract_cookies(u, self.req)

        u.close()

    def urlopen(self, *args):

        req = Request(*args)

        self.jar.add_cookie_header(req)

        return urlopen(req)

    def urlfetch(self, output, *args):

        u = self.urlopen(*args)

        if hasattr(output, "write"):
            f = output
        else:
            f = open(output, 'w')

        s = u.read(1024)

        while len(s) > 0:
            f.write(s)
            s = u.read(1024)

        u.close()
        f.close()

    def get_guid(self, gc):

        from HTML import GUID

        u = self.urlopen(
            "http://www.geocaching.com/seek/cache_details.aspx?wp=%s" % (gc, ))

        s1 = u.read(1024)
        s2 = u.read(1024)

        while len(s1) + len(s2) != 0:

            re = GUID.search(s1 + s2)

            if re:
                u.close()
                return re.group(1)

            s1 = s2
            s2 = u.read(1024)

        u.close()

        return False

    def fetch_loc(self, fileobject, gc):

        self.urlfetch(
            fileobject,
            "http://www.geocaching.com/seek/cache_details.aspx?wp=%s" % (gc, ),
            urlencode({
                '__EVENTTARGET': '',
                '__EVENTARGUMENT': '',
                '__VIEWSTATE': '',
                'ctl00$ContentBody$btnLocDL': 'LOC Waypoint File'
            }))

        return True

    def fetch_html(self, directory, gc, guid=None):

        from HTML import SCRIPTS, IMAGES, LINKS
        from HTML import script, image, link

        if guid is None:
            guid = self.get_guid(gc)

        if guid is False:
            return False

        u = self.urlopen(
            "http://www.geocaching.com/seek/cdpf.aspx?guid=%s&lc=10" %
            (guid, ))
        f = open("%s%s.html" % (directory, gc), 'w')

        line = u.readline()

        if line[0] in ("\r", "\n"):
            line = '<?xml version="1.0" encoding="utf-8" ?>' + line
        elif line[0:9] == "<!DOCTYPE":
            line = '<?xml version="1.0" encoding="utf-8" ?>' + "\n" + line

        f.write(line)

        for line in u:

            line = SCRIPTS.sub(lambda m: script(m, self, gc, directory), line)
            line = IMAGES.sub(lambda m: image(m, self, gc, directory), line)
            line = LINKS.sub(lambda m: link(m, self, gc, directory), line)

            f.write(line)

        u.close()
        f.close()

        return True

    def search_lat_long(self, lat, long):

        from SearchParser import SearchParser

        s = SearchParser(self)

        s.parse_stream(
            self.urlopen(
                "http://www.geocaching.com/seek/nearest.aspx?lat=%f&lng=%f" %
                (lat, long)))

        return s

    def fetch_window(self, lat1, lat2, lon1, lon2):

        if lat2 > lat1: lat1, lat2 = lat2, lat1
        if lon2 > lon1: lon1, lon2 = lon2, lon1

        f = self.urlopen(
            "http://www.geocaching.com/map/default.aspx/MapAction",
            '{"dto":{"data":{"c":1,"m":"","d":"%.9f|%.9f|%.9f|%.9f"},"ut":""}}'
            % (lat1, lat2, lon1, lon2), {
                "Origin": "http://www.geocaching.com",
                "Content-Type": "application/json"
            })

        j = load(f)
        j = loads(j["d"])

        ret = list()

        for obj in j["cs"]["cc"]:
            ret.append({
                "gc": obj["gc"],
                "type": obj["ctid"],
                "title": obj["nn"],
                "lat": int(round(obj["lat"] * 1E6)),
                "lon": int(round(obj["lon"] * 1E6)),
                "found": obj["f"],
                "disabled": not obj["ia"]
            })

        return ret
Esempio n. 22
0
class CookieTransport(TimeoutTransport):
    '''A subclass of xmlrpclib.Transport that supports cookies.'''
    cookiejar = None
    scheme = 'http'

    # Cribbed from xmlrpclib.Transport.send_user_agent
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist = list()
            for h, v in cookie_request.header_items():
                if h.startswith('Cookie'):
                    cookielist.append([h, v])
            # ...and put them over the connection
            for h, v in cookielist:
                connection.putheader(h, v)

    def single_request(self, host, handler, request_body, verbose=1):
        # issue XML-RPC request

        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        request_url = "%s://%s/" % (self.scheme, host)
        cookie_request = urllib2.Request(request_url)

        try:
            self.send_request(h, handler, request_body)
            self.send_host(h, host)
            self.send_cookies(
                h, cookie_request)  # ADDED. creates cookiejar if None.
            self.send_user_agent(h)
            self.send_content(h, request_body)

            response = h.getresponse(buffering=True)

            # ADDED: parse headers and get cookies here
            # fake a response object that we can fill with the headers above
            class CookieResponse:
                def __init__(self, headers):
                    self.headers = headers

                def info(self):
                    return self.headers

            cookie_response = CookieResponse(response.msg)
            # Okay, extract the cookies from the headers
            self.cookiejar.extract_cookies(cookie_response, cookie_request)
            # And write back any changes
            if hasattr(self.cookiejar, 'save'):
                self.cookiejar.save(self.cookiejar.filename)

            if response.status == 200:
                self.verbose = verbose
                return self.parse_response(response)
        except xmlrpclib.Fault:
            raise
        except Exception:
            # All unexpected errors leave connection in
            # a strange state, so we clear it.
            self.close()
            raise

        # discard any response data and raise exception
        if (response.getheader("content-length", 0)):
            response.read()
        raise xmlrpclib.ProtocolError(
            host + handler,
            response.status,
            response.reason,
            response.msg,
        )
Esempio n. 23
0
class WebClient(Configurable):
    "Convenient HTTP request processing. Proxy is not supported in current version."
    _default_cleanupinterval = 60
    _default_samehostlimit = 20
    _default_sameurllimit = False
    _default_cafile = None
    _default_redirectlimit = 10
    _default_verifyhost = True
    def __init__(self, allowcookies = False, cookiejar = None):
        '''
        :param allowcookies: Accept and store cookies, automatically use them on further requests
        :param cookiejar: Provide a customized cookiejar instead of the default CookieJar()
        '''
        self._connmap = {}
        self._requesting = set()
        self._hostwaiting = set()
        self._pathwaiting = set()
        self._protocol = Http(False)
        self.allowcookies = allowcookies
        if cookiejar is None:
            self.cookiejar = CookieJar()
        else:
            self.cookiejar = cookiejar
        self._tasks = []
    def open(self, container, request, ignorewebexception = False, timeout = None, datagen = None, cafile = None, key = None, certificate = None,
             followredirect = True, autodecompress = False, allowcookies = None):
        '''
        Open http request with a Request object
        
        :param container: a routine container hosting this routine
        :param request: vlcp.utils.webclient.Request object
        :param ignorewebexception: Do not raise exception on Web errors (4xx, 5xx), return a response normally
        :param timeout: timeout on connection and single http request. When following redirect, new request
               does not share the old timeout, which means if timeout=2:
               connect to host: (2s)
               wait for response: (2s)
               response is 302, redirect
               connect to redirected host: (2s)
               wait for response: (2s)
               ...
               
        :param datagen: if the request use a stream as the data parameter, you may provide a routine to generate
                        data for the stream. If the request failed early, this routine is automatically terminated.
                        
        :param cafile: provide a CA file for SSL certification check. If not provided, the SSL connection is NOT verified.
        :param key: provide a key file, for client certification (usually not necessary)
        :param certificate: provide a certificate file, for client certification (usually not necessary)
        :param followredirect: if True (default), automatically follow 3xx redirections
        :param autodecompress: if True, automatically detect Content-Encoding header and decode the body
        :param allowcookies: override default settings to disable the cookies
        '''
        with closing(container.delegateOther(self._open(container, request, ignorewebexception, timeout, datagen, cafile, key, certificate,
                                                    followredirect, autodecompress, allowcookies),
                                             container)) as g:
            for m in g:
                yield m
    def _open(self, container, request, ignorewebexception = False, timeout = None, datagen = None, cafile = None, key = None, certificate = None,
             followredirect = True, autodecompress = False, allowcookies = None):
        if cafile is None:
            cafile = self.cafile
        if allowcookies is None:
            allowcookies = self.allowcookies
        forcecreate = False
        datagen_routine = None
        if autodecompress:
            if not request.has_header('Accept-Encoding'):
                request.add_header('Accept-Encoding', 'gzip, deflate')
        while True:
            # Find or create a connection
            for m in self._getconnection(container, request.host, request.path, request.get_type() == 'https',
                                                forcecreate, cafile, key, certificate, timeout):
                yield m
            (conn, created) = container.retvalue
            # Send request on conn and wait for reply
            try:
                if allowcookies:
                    self.cookiejar.add_cookie_header(request)
                if isinstance(request.data, bytes):
                    stream = MemoryStream(request.data)
                else:
                    stream = request.data
                if datagen and datagen_routine is None:
                    datagen_routine = container.subroutine(datagen)
                else:
                    datagen_routine = None
                for m in container.executeWithTimeout(timeout, self._protocol.requestwithresponse(container, conn, _bytes(request.host), _bytes(request.path), _bytes(request.method),
                                                   [(_bytes(k), _bytes(v)) for k,v in request.header_items()], stream)):
                    yield m
                if container.timeout:
                    if datagen_routine:
                        container.terminate(datagen_routine)
                    container.subroutine(self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False)
                    raise WebException('HTTP request timeout')
                finalresp = container.http_finalresponse
                resp = Response(request.get_full_url(), finalresp, container.scheduler)
                if allowcookies:
                    self.cookiejar.extract_cookies(resp, request)
                if resp.iserror and not ignorewebexception:
                    try:
                        exc = WebException(resp.fullstatus)
                        for m in resp.stream.read(container, 4096):
                            yield m
                        exc.response = resp
                        exc.body = container.data
                        if datagen_routine:
                            container.terminate(datagen_routine)
                        for m in resp.shutdown():
                            yield m
                        container.subroutine(self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False)
                        raise exc
                    finally:
                        resp.close()
                else:
                    try:
                        container.subroutine(self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False, finalresp), False)
                        if followredirect and resp.status in (300, 301, 302, 303, 307, 308):
                            request.redirect(resp, ignorewebexception = ignorewebexception, timeout = timeout, cafile = cafile, key = key,
                                             certificate = certificate, followredirect = followredirect,
                                             autodecompress = autodecompress, allowcookies = allowcookies)
                            resp.close()
                            continue
                        if autodecompress and resp.stream:
                            ce = resp.get_header('Content-Encoding', '')
                            if ce.lower() == 'gzip' or ce.lower() == 'x-gzip':
                                resp.stream.getEncoderList().append(encoders.gzip_decoder())
                            elif ce.lower() == 'deflate':
                                resp.stream.getEncoderList().append(encoders.deflate_decoder())
                        container.retvalue = resp
                    except:
                        resp.close()
                        raise
            except HttpConnectionClosedException:
                for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False):
                    yield m
                if not created:
                    # Retry on a newly created connection
                    forcecreate = True
                    continue
                else:
                    if datagen_routine:
                        container.terminate(datagen_routine)
                    raise
            except Exception as exc:
                for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True):
                    yield m
                raise exc
            break
    def _releaseconnection(self, connection, host, path, https = False, forceclose = False, respevent = None):
        if not host:
            raise ValueError
        if forceclose:
            for m in connection.shutdown(True):
                yield m
        if not forceclose and connection.connected and respevent:
            def releaseconn():
                for m in self._protocol.waitForResponseEnd(connection, connection, respevent.connmark, respevent.xid):
                    yield m
                keepalive = connection.retvalue
                conns = self._connmap[host]
                conns[2] -= 1
                if keepalive:
                    connection.setdaemon(True)
                    conns[1 if https else 0].append(connection)
                else:
                    for m in connection.shutdown():
                        yield m
            connection.subroutine(releaseconn(), False)
        else:
            conns = self._connmap[host]
            conns[2] -= 1
        if self.sameurllimit:
            self._requesting.remove((host, path, https))
        if (host, path, https) in self._pathwaiting or host in self._hostwaiting:
            for m in connection.waitForSend(WebClientRequestDoneEvent(host, path, https)):
                yield m
            if (host, path, https) in self._pathwaiting:
                self._pathwaiting.remove((host, path, https))
            if host in self._hostwaiting:
                self._hostwaiting.remove(host)
    def _getconnection(self, container, host, path, https = False, forcecreate = False, cafile = None, key = None, certificate = None,
                       timeout = None):
        if not host:
            raise ValueError
        matcher = WebClientRequestDoneEvent.createMatcher(host, path, https)
        while self.sameurllimit and (host, path, https) in self._requesting:
            self._pathwaiting.add((host, path, https))
            yield (matcher,)
        # Lock the path
        if self.sameurllimit:
            self._requesting.add((host, path, https))
        # connmap format: (free, free_ssl, workingcount)
        conns = self._connmap.setdefault(host, [[],[], 0])
        conns[0] = [c for c in conns[0] if c.connected]
        conns[1] = [c for c in conns[1] if c.connected]
        myset = conns[1 if https else 0]
        if not forcecreate and myset:
            # There are free connections, reuse them
            conn = myset.pop()
            conn.setdaemon(False)
            container.retvalue = (conn, False)
            conns[2] += 1
            return
        matcher = WebClientRequestDoneEvent.createMatcher(host)
        while self.samehostlimit and len(conns[0]) + len(conns[1]) + conns[2] >= self.samehostlimit:
            if myset:
                # Close a old connection
                conn = myset.pop()
                for m in conn.shutdown():
                    yield m
            else:
                # Wait for free connections
                self._hostwaiting.add(host)
                yield (matcher,)
                conns = self._connmap.setdefault(host, [[],[], 0])
                myset = conns[1 if https else 0]
                if not forcecreate and myset:
                    conn = myset.pop()
                    conn.setdaemon(False)
                    container.retvalue = (conn, False)
                    conns[2] += 1
                    return
        # Create new connection
        conns[2] += 1
        conn = Client(urlunsplit(('ssl' if https else 'tcp', host, '/', '', '')), self._protocol, container.scheduler,
                      key, certificate, cafile)
        if timeout is not None:
            conn.connect_timeout = timeout
        conn.start()
        connected = self._protocol.statematcher(conn, HttpConnectionStateEvent.CLIENT_CONNECTED, False)
        notconnected = self._protocol.statematcher(conn, HttpConnectionStateEvent.CLIENT_NOTCONNECTED, False)
        yield (connected, notconnected)
        if container.matcher is notconnected:
            conns[2] -= 1
            for m in conn.shutdown(True):
                yield m
            raise IOError('Failed to connect to %r' % (conn.rawurl,))
        if https and cafile and self.verifyhost:
            try:
                # TODO: check with SSLContext
                hostcheck = re.sub(r':\d+$', '', host)
                if host == conn.socket.remoteaddr[0]:
                    # IP Address is currently now allowed
                    for m in conn.shutdown(True):
                        yield m
                    raise CertificateException('Cannot verify host with IP address')
                match_hostname(conn.socket.getpeercert(False), hostcheck)
            except:
                conns[2] -= 1
                raise
        container.retvalue = (conn, True)
    def cleanup(self, host = None):
        "Cleaning disconnected connections"
        if host is not None:
            conns = self._connmap.get(host)
            if conns is None:
                return
            # cleanup disconnected connections
            conns[0] = [c for c in conns[0] if c.connected]
            conns[1] = [c for c in conns[1] if c.connected]
            if not conns[0] and not conns[1] and not conns[2]:
                del self._connmap[host]
        else:
            hosts = list(self._connmap.keys())
            for h in hosts:
                self.cleanup(h)
    def cleanup_task(self, container, interval = None):
        '''
        If this client object is persist for a long time, and you are worrying about memory leak,
        create a routine with this method: myclient.cleanup_task(mycontainer, 60).
        But remember that if you have created at lease one task, you must call myclient.endtask()
        to completely release the webclient object.
        '''
        if interval is None:
            interval = self.cleanupinterval
        def task():
            th = container.scheduler.setTimer(interval, interval)
            tm = TimerEvent.createMatcher(th)
            try:
                while True:
                    yield (tm,)
                    self.cleanup()
            finally:
                container.scheduler.cancelTimer(th)
        t = container.subroutine(task(), False, daemon = True)
        self._tasks.append(t)
        return t
    def shutdown(self):
        "Shutdown free connections to release resources"
        for c0, c1, _ in list(self._connmap.values()):
            c0bak = list(c0)
            del c0[:]
            for c in c0bak:
                if c.connected:
                    for m in c.shutdown():
                        yield m
            c1bak = list(c1)
            del c1[:]
            for c in c1bak:
                if c.connected:
                    for m in c.shutdown():
                        yield m
    def endtask(self):
        for t in self._tasks:
            t.close()
        del self._tasks[:]
                
    def urlopen(self, container, url, data = None, method = None, headers = {}, rawurl = False, *args, **kwargs):
        '''
        Similar to urllib2.urlopen, but:
        1. is a routine
        2. data can be an instance of vlcp.event.stream.BaseStream, or str/bytes
        3. can specify method
        4. if datagen is not None, it is a routine which writes to <data>. It is automatically terminated if the connection is down.
        5. can also specify key and certificate, for client certification
        6. certificates are verified with CA if provided.
        If there are keep-alived connections, they are automatically reused.
        See open for available arguments
        
        Extra argument:
        
        :param rawurl: if True, assume the url is already url-encoded, do not encode it again.
        '''
        return self.open(container, Request(url, data, method, headers, rawurl=rawurl), *args, **kwargs)
    def manualredirect(self, container, exc, data, datagen = None):
        "If data is a stream, it cannot be used again on redirect. Catch the ManualRedirectException and call a manual redirect with a new stream."
        request = exc.request
        request.data = data
        return self.open(container, request, datagen = datagen, **exc.kwargs)
    def urlgetcontent(self, container, url, data = None, method = None, headers = {}, tostr = False,  encoding = None, rawurl = False, *args, **kwargs):
        '''
        In Python2, bytes = str, so tostr and encoding has no effect.
        In Python3, bytes are decoded into unicode str with encoding.
        If encoding is not specified, charset in content-type is used if present, or default to utf-8 if not.
        See open for available arguments

        :param rawurl: if True, assume the url is already url-encoded, do not encode it again.
        '''
        req = Request(url, data, method, headers, rawurl = rawurl)
        for m in self.open(container, req, *args, **kwargs):
            yield m
        resp = container.retvalue
        encoding = 'utf-8'
        if encoding is None:
            m = Message()
            m.add_header('Content-Type', resp.get_header('Content-Type', 'text/html'))
            encoding = m.get_content_charset('utf-8')
        if not resp.stream:
            content = b''
        else:
            for m in resp.stream.read(container):
                yield m
            content = container.data
        if tostr:
            content = _str(content, encoding)
        container.retvalue = content
Esempio n. 24
0
class HttxCookieJar(HttxObject):
    '''
    An CookieJar holder to enable deepcopy semantics with locking.

    CookieJars already lock access to the internals, but cannot be
    deepcopied, which prevents separation of cookiejars into different
    domains as achieved with different HttxOptions

    A light wrapper over CookieJar with a lock for deepcopy and access
    to the internal variable _cookies is needed to achieve deepcopy
    and therefore enable separation of domains

    @ivar cookiejar: CookieJar object holding the cookies
    @type cookiejar: urllib2 CookieJar
    '''
    def __init__(self):
        '''
        Constructor. It delegates construction to the base class
        L{HttxObject} and initializes the member variables
        '''
        HttxObject.__init__(self)
        self.cookiejar = CookieJar()

    def __deepcopy__(self, memo):
        '''
        Deepcopy support.

        The lock prevents access from any other part of the library to this
        CookieJar, enabling a deepcopy of the private variable into the
        private variable of the clone to enable separation of domains
        for CookieJar objects

        The existing RLock in the CookieJar objects forbids direct deepcopy

        @param memo: standard __deepcopy__ parameter to avoid circular references
        @type memo: dict
        @return: a cloned object
        @rtype: L{HttxCookieJar}
        '''
        clone = self.__class__()
        with self.lock:
            # CookieJar has a threading.RLock, so we may not deepcopy it
            # and it has no __deepcopy_ implemented
            clone.cookiejar._cookies = deepcopy(self.cookiejar._cookies, memo)

        return clone

    def add_cookie_header(self, request):
        '''
        Add a cookie header to the request if needed

        This is a simple stub for CookieJar add_cookie_header

        @param request: the request to be manipulated
        @type request: urllib2 compatible Request - L{HttxRequest} 
        '''
        self.cookiejar.add_cookie_header(request)

    def extract_cookies(self, response, request):
        '''
        Extract cookies from the response, using request as a basis to do so

        This is a simple stub for CookieJar extract_cookies

        @param response: the response containing the headers where cookies
                         may be present
        @type response: urllib2 compatible Response - L{HttxResponse} 
        @param request: the request to be manipulated
        @type request: urllib2 compatible Request - L{HttxRequest} 
        '''
        self.cookiejar.extract_cookies(response, request)
Esempio n. 25
0
class HttpTransport(Transport):
    """
    HTTP transport using urllib2.  Provided basic http transport
    that provides for cookies, proxies but no authentication.
    """

    def __init__(self, **kwargs):
        """
        @param kwargs: Keyword arguments.
            - B{proxy} - An http proxy to be specified on requests.
                 The proxy is defined as {protocol:proxy,}
                    - type: I{dict}
                    - default: {}
            - B{timeout} - Set the url open timeout (seconds).
                    - type: I{float}
                    - default: 90
        """
        Transport.__init__(self)
        Unskin(self.options).update(kwargs)
        self.cookiejar = CookieJar()
        self.proxy = {}
        self.urlopener = None

    def open(self, request):
        try:
            url = request.url
            log.debug('opening (%s)', url)
            u2request = u2.Request(url)
            self.proxy = self.options.proxy
            return self.u2open(u2request)
        except u2.HTTPError as e:
            raise TransportError(str(e), e.code, e.fp)

    def send(self, request):
        result = None
        url = request.url
        msg = request.message
        headers = request.headers
        try:
            u2request = u2.Request(url, msg, headers)
            self.addcookies(u2request)
            self.proxy = self.options.proxy
            request.headers.update(u2request.headers)
            log.debug('sending:\n%s', request)
            fp = self.u2open(u2request)
            self.getcookies(fp, u2request)
            result = Reply(200, fp.headers.dict, fp.read())
            log.debug('received:\n%s', result)
        except u2.HTTPError as e:
            if e.code in (202, 204):
                result = None
            else:
                raise TransportError(e.msg, e.code, e.fp)
        return result

    def addcookies(self, u2request):
        """
        Add cookies in the cookiejar to the request.
        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Requet.
        """
        self.cookiejar.add_cookie_header(u2request)

    def getcookies(self, fp, u2request):
        """
        Add cookies in the request to the cookiejar.
        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Requet.
        """
        self.cookiejar.extract_cookies(fp, u2request)

    def u2open(self, u2request):
        """
        Open a connection.
        @param u2request: A urllib2 request.
        @type u2request: urllib2.Requet.
        @return: The opened file-like urllib2 object.
        @rtype: fp
        """
        tm = self.options.timeout
        url = self.u2opener()
        if self.u2ver() < 2.6:
            socket.setdefaulttimeout(tm)
            return url.open(u2request)
        else:
            return url.open(u2request, timeout=tm)

    def u2opener(self):
        """
        Create a urllib opener.
        @return: An opener.
        @rtype: I{OpenerDirector}
        """
        if self.urlopener is None:
            return u2.build_opener(*self.u2handlers())
        else:
            return self.urlopener

    def u2handlers(self):
        """
        Get a collection of urllib handlers.
        @return: A list of handlers to be installed in the opener.
        @rtype: [Handler,...]
        """
        handlers = []
        handlers.append(u2.ProxyHandler(self.proxy))
        return handlers

    def u2ver(self):
        """
        Get the major/minor version of the urllib2 lib.
        @return: The urllib2 version.
        @rtype: float
        """
        try:
            part = u2.__version__.split('.', 1)
            n = float('.'.join(part))
            return n
        except Exception as e:
            log.exception(e)
            return 0

    def __deepcopy__(self, memo={}):
        clone = self.__class__()
        p = Unskin(self.options)
        cp = Unskin(clone.options)
        cp.update(p)
        return clone
Esempio n. 26
0
class CookieTransport(TimeoutTransport):
    '''A subclass of xmlrpclib.Transport that supports cookies.'''
    cookiejar = None
    scheme = 'http'

    # Cribbed from xmlrpclib.Transport.send_user_agent
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist = list()
            for h, v in cookie_request.header_items():
                if h.startswith('Cookie'):
                    cookielist.append([h, v])
            # ...and put them over the connection
            for h, v in cookielist:
                connection.putheader(h, v)

    # This is the same request() method from xmlrpclib.Transport,
    # with a couple additions noted below
    def request(self, host, handler, request_body, verbose=0):
        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        request_url = "%s://%s/" % (self.scheme, host)
        cookie_request = urllib2.Request(request_url)

        self.send_request(h, handler, request_body)
        self.send_host(h, host)
        self.send_cookies(h, cookie_request)  # ADDED. creates cookiejar if None.
        self.send_user_agent(h)
        self.send_content(h, request_body)

        errcode, errmsg, headers = h.getreply()

        # ADDED: parse headers and get cookies here
        # fake a response object that we can fill with the headers above
        class CookieResponse:
            def __init__(self, headers):
                self.headers = headers

            def info(self):
                return self.headers
        cookie_response = CookieResponse(headers)
        # Okay, extract the cookies from the headers
        self.cookiejar.extract_cookies(cookie_response, cookie_request)
        # And write back any changes
        if hasattr(self.cookiejar, 'save'):
            self.cookiejar.save(self.cookiejar.filename)

        if errcode != 200:
            raise xmlrpclib.ProtocolError(
                host + handler,
                errcode, errmsg,
                headers
            )

        self.verbose = verbose

        try:
            sock = h._conn.sock
        except AttributeError:
            sock = None

        return self._parse_response(h.getfile(), sock)
Esempio n. 27
0
class CookieTransport(xmlrpclib.Transport):
    """A subclass of xmlrpclib.Transport that supports cookies."""

    cookiejar = None
    scheme = "http"

    # Cribbed from xmlrpclib.Transport.send_user_agent
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist = list()
            for h, v in cookie_request.header_items():
                if h.startswith("Cookie"):
                    cookielist.append([h, v])
            # ...and put them over the connection
            for h, v in cookielist:
                connection.putheader(h, v)

    # This is the same request() method from xmlrpclib.Transport,
    # with a couple additions noted below
    def request_with_cookies(self, host, handler, request_body, verbose=0):
        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        # ADDED: construct the URL and Request object for proper cookie handling
        request_url = "%s://%s%s" % (self.scheme, host, handler)
        # log.debug("request_url is %s" % request_url)
        cookie_request = urllib2.Request(request_url)

        self.send_request(h, handler, request_body)
        self.send_host(h, host)
        self.send_cookies(h, cookie_request)  # ADDED. creates cookiejar if None.
        self.send_user_agent(h)
        self.send_content(h, request_body)

        errcode, errmsg, headers = h.getreply()

        # ADDED: parse headers and get cookies here
        cookie_response = CookieResponse(headers)
        # Okay, extract the cookies from the headers
        self.cookiejar.extract_cookies(cookie_response, cookie_request)
        # log.debug("cookiejar now contains: %s" % self.cookiejar._cookies)
        # And write back any changes
        if hasattr(self.cookiejar, "save"):
            try:
                self.cookiejar.save(self.cookiejar.filename)
            except Exception, e:
                raise
                # log.error("Couldn't write cookiefile %s: %s" % \
                #        (self.cookiejar.filename,str(e)))

        if errcode != 200:
            raise xmlrpclib.ProtocolError(host + handler, errcode, errmsg, headers)

        self.verbose = verbose

        try:
            sock = h._conn.sock
        except AttributeError:
            sock = None

        return self._parse_response(h.getfile(), sock)
Esempio n. 28
0
class CookieTransport(TimeoutTransport):
    '''A subclass of xmlrpclib.Transport that supports cookies.'''
    cookiejar = None
    scheme = 'http'

    # Cribbed from xmlrpclib.Transport.send_user_agent
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist = list()
            for h, v in cookie_request.header_items():
                if h.startswith('Cookie'):
                    cookielist.append([h, v])
            # ...and put them over the connection
            for h, v in cookielist:
                connection.putheader(h, v)

    # This is the same request() method from xmlrpclib.Transport,
    # with a couple additions noted below
    def request(self, host, handler, request_body, verbose=0):
        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        request_url = "%s://%s/" % (self.scheme, host)
        cookie_request = urllib2.Request(request_url)

        self.send_request(h, handler, request_body)
        self.send_host(h, host)
        self.send_cookies(h,
                          cookie_request)  # ADDED. creates cookiejar if None.
        self.send_user_agent(h)
        self.send_content(h, request_body)

        errcode, errmsg, headers = h.getreply()

        # ADDED: parse headers and get cookies here
        # fake a response object that we can fill with the headers above
        class CookieResponse:
            def __init__(self, headers):
                self.headers = headers

            def info(self):
                return self.headers

        cookie_response = CookieResponse(headers)
        # Okay, extract the cookies from the headers
        self.cookiejar.extract_cookies(cookie_response, cookie_request)
        # And write back any changes
        if hasattr(self.cookiejar, 'save'):
            self.cookiejar.save(self.cookiejar.filename)

        if errcode != 200:
            raise xmlrpclib.ProtocolError(host + handler, errcode, errmsg,
                                          headers)

        self.verbose = verbose

        try:
            sock = h._conn.sock
        except AttributeError:
            sock = None

        return self._parse_response(h.getfile(), sock)
Esempio n. 29
0
class CookieTransport(xmlrpclib.Transport):
    '''A subclass of xmlrpclib.Transport that supports cookies.'''
    cookiejar = None
    scheme = 'http'
    
    # Cribbed from xmlrpclib.Transport.send_user_agent 
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist=list()
            for h,v in cookie_request.header_items():
                if h.startswith('Cookie'):
                    cookielist.append([h,v])
            # ...and put them over the connection
            for h,v in cookielist:
                connection.putheader(h,v)
    
    # This is the same request() method from xmlrpclib.Transport,
    # with a couple additions noted below
    def request_with_cookies(self, host, handler, request_body, verbose=0):
        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        # ADDED: construct the URL and Request object for proper cookie handling
        request_url = "%s://%s%s" % (self.scheme,host,handler)
        #log.debug("request_url is %s" % request_url)
        cookie_request  = urllib2.Request(request_url) 

        self.send_request(h,handler,request_body)
        self.send_host(h,host) 
        self.send_cookies(h,cookie_request) # ADDED. creates cookiejar if None.
        self.send_user_agent(h)
        self.send_content(h,request_body)

        errcode, errmsg, headers = h.getreply()

        # ADDED: parse headers and get cookies here
        cookie_response = CookieResponse(headers)
        # Okay, extract the cookies from the headers
        self.cookiejar.extract_cookies(cookie_response,cookie_request)
        #log.debug("cookiejar now contains: %s" % self.cookiejar._cookies)
        # And write back any changes
        if hasattr(self.cookiejar,'save'):
            try:
                self.cookiejar.save(self.cookiejar.filename)
            except Exception as e:
                raise
                #log.error("Couldn't write cookiefile %s: %s" % \
                #        (self.cookiejar.filename,str(e)))

        if errcode != 200:
            # When runs here, the HTTPS connection isn't useful any more
            #   before raising an exception to caller
            h.close()

            raise xmlrpclib.ProtocolError(
                host + handler,
                errcode, errmsg,
                headers
                )

        self.verbose = verbose

        try:
            sock = h._conn.sock
        except AttributeError:
            sock = None

        try:
            return self._parse_response(h.getfile(), sock)
        finally:
            h.close()

        # This is just python 2.7's xmlrpclib.Transport.single_request, with
    # send additions noted below to send cookies along with the request
    def single_request_with_cookies(self, host, handler, request_body, verbose=0):
        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        # ADDED: construct the URL and Request object for proper cookie handling
        request_url = "%s://%s%s" % (self.scheme,host,handler)
        #log.debug("request_url is %s" % request_url)
        cookie_request  = urllib2.Request(request_url)

        try:
            self.send_request(h,handler,request_body)
            self.send_host(h,host)
            self.send_cookies(h,cookie_request) # ADDED. creates cookiejar if None.
            self.send_user_agent(h)
            self.send_content(h,request_body)

            response = h.getresponse(buffering=True)

            # ADDED: parse headers and get cookies here
            cookie_response = CookieResponse(response.msg)
            # Okay, extract the cookies from the headers
            self.cookiejar.extract_cookies(cookie_response,cookie_request)
            #log.debug("cookiejar now contains: %s" % self.cookiejar._cookies)
            # And write back any changes
            if hasattr(self.cookiejar,'save'):
                try:
                    self.cookiejar.save(self.cookiejar.filename)
                except Exception as e:
                    raise
                    #log.error("Couldn't write cookiefile %s: %s" % \
                    #        (self.cookiejar.filename,str(e)))

            if response.status == 200:
                self.verbose = verbose
                return self.parse_response(response)

            if (response.getheader("content-length", 0)):
                response.read()
            raise xmlrpclib.ProtocolError(
                host + handler,
                response.status, response.reason,
                response.msg,
                )
        except xmlrpclib.Fault:
            raise
        finally:
            h.close()

    # Override the appropriate request method
    if hasattr(xmlrpclib.Transport, 'single_request'):
        single_request = single_request_with_cookies # python 2.7+
    else:
        request = request_with_cookies # python 2.6 and earlier
Esempio n. 30
0
class IpaClientlessTestDriver(object):
    def __init__(self,
                 hostname, domain, password,
                 bind_dn, bind_pw,
                 username='******',
                 start_ssl=True,
                 insecure=False):
        self.hostname = hostname
        self.domain = domain
        self.password = password
        self.username = username
        self.bind_dn = bind_dn
        self.bind_pw = bind_pw
        self.start_ssl = start_ssl
        self.referer = "https://" + self.hostname + "/ipa"

        self.cj = CookieJar()

    def _auth(self, lazy=True):
        if lazy is True and len(self.cj) > 0:
            return 200

        login_url = self.referer + "/session/login_password"

        request = urllib2.Request(login_url)
        request.add_header('referer', self.referer)
        request.add_header('Content-Type', 'application/x-www-form-urlencoded')
        request.add_header('Accept', 'text/plain')

        query_args = {'user': self.username,
                      'password': self.password}
        encoded_args = urllib.urlencode(query_args)

        result = urllib2.urlopen(request,
                                 encoded_args)
        if result.getcode() == 200:
            self.cj.extract_cookies(result, request)
        return result.getcode()

    def _ssl_ctx(self, insecure):
        ctx = ssl.create_default_context()
        if insecure:
            ctx.check_hostname = False
            ctx.verify_mode = ssl.CERT_NONE
        return ctx

    def _json_request(self, jdata):
        ret = self._auth()
        if ret != 200:
            return ret

        json_url = self.referer + "/session/json"
        request = urllib2.Request(json_url)

        request.add_header('referer', self.referer)
        request.add_header('Content-Type', 'application/json')
        request.add_header('Accept', 'application/json')

        self.cj.add_cookie_header(request)
        result = urllib2.urlopen(request,
                                 jdata)
        return result.getcode()

    def fetch_cert(self, dest):
        url = "http://" + self.hostname + "/ipa/config/ca.crt"
        self.ca_cert = os.path.join(dest, "ca.crt")
        urllib.urlretrieve(url, self.ca_cert)

    def rm_cert(self):
        os.unlink(self.ca_cert)

    def run_cmd(self, method, params, args={}):
        cmd = json.dumps({"method": method,
                          "params": [params, args],
                          "id": "0"})
        return self._json_request(cmd)
Esempio n. 31
0
class CookieTransport(TimeoutTransport):
    """A subclass of xmlrpclib.Transport that supports cookies."""

    cookiejar = None
    scheme = "http"

    # Cribbed from xmlrpclib.Transport.send_user_agent
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist = list()
            for h, v in cookie_request.header_items():
                if h.startswith("Cookie"):
                    cookielist.append([h, v])
            # ...and put them over the connection
            for h, v in cookielist:
                connection.putheader(h, v)

    def single_request(self, host, handler, request_body, verbose=1):
        # issue XML-RPC request

        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        request_url = "%s://%s/" % (self.scheme, host)
        cookie_request = urllib2.Request(request_url)

        try:
            self.send_request(h, handler, request_body)
            self.send_host(h, host)
            self.send_cookies(h, cookie_request)  # ADDED. creates cookiejar if None.
            self.send_user_agent(h)
            self.send_content(h, request_body)

            response = h.getresponse(buffering=True)

            # ADDED: parse headers and get cookies here
            # fake a response object that we can fill with the headers above
            class CookieResponse:
                def __init__(self, headers):
                    self.headers = headers

                def info(self):
                    return self.headers

            cookie_response = CookieResponse(response.msg)
            # Okay, extract the cookies from the headers
            self.cookiejar.extract_cookies(cookie_response, cookie_request)
            # And write back any changes
            if hasattr(self.cookiejar, "save"):
                self.cookiejar.save(self.cookiejar.filename)

            if response.status == 200:
                self.verbose = verbose
                return self.parse_response(response)
        except xmlrpclib.Fault:
            raise
        except Exception:
            # All unexpected errors leave connection in
            # a strange state, so we clear it.
            self.close()
            raise

        # discard any response data and raise exception
        if response.getheader("content-length", 0):
            response.read()
        raise xmlrpclib.ProtocolError(host + handler, response.status, response.reason, response.msg)
Esempio n. 32
0
class HttxCookieJar(HttxObject):
    '''
    An CookieJar holder to enable deepcopy semantics with locking.

    CookieJars already lock access to the internals, but cannot be
    deepcopied, which prevents separation of cookiejars into different
    domains as achieved with different HttxOptions

    A light wrapper over CookieJar with a lock for deepcopy and access
    to the internal variable _cookies is needed to achieve deepcopy
    and therefore enable separation of domains

    @ivar cookiejar: CookieJar object holding the cookies
    @type cookiejar: urllib2 CookieJar
    '''

    def __init__(self):
        '''
        Constructor. It delegates construction to the base class
        L{HttxObject} and initializes the member variables
        '''
        HttxObject.__init__(self)
        self.cookiejar = CookieJar()


    def __deepcopy__(self, memo):
        '''
        Deepcopy support.

        The lock prevents access from any other part of the library to this
        CookieJar, enabling a deepcopy of the private variable into the
        private variable of the clone to enable separation of domains
        for CookieJar objects

        The existing RLock in the CookieJar objects forbids direct deepcopy

        @param memo: standard __deepcopy__ parameter to avoid circular references
        @type memo: dict
        @return: a cloned object
        @rtype: L{HttxCookieJar}
        '''
        clone = self.__class__()
        with self.lock:
            # CookieJar has a threading.RLock, so we may not deepcopy it
            # and it has no __deepcopy_ implemented
            clone.cookiejar._cookies = deepcopy(self.cookiejar._cookies, memo)

        return clone


    def add_cookie_header(self, request):
        '''
        Add a cookie header to the request if needed

        This is a simple stub for CookieJar add_cookie_header

        @param request: the request to be manipulated
        @type request: urllib2 compatible Request - L{HttxRequest} 
        '''
        self.cookiejar.add_cookie_header(request)


    def extract_cookies(self, response, request):
        '''
        Extract cookies from the response, using request as a basis to do so

        This is a simple stub for CookieJar extract_cookies

        @param response: the response containing the headers where cookies
                         may be present
        @type response: urllib2 compatible Response - L{HttxResponse} 
        @param request: the request to be manipulated
        @type request: urllib2 compatible Request - L{HttxRequest} 
        '''
        self.cookiejar.extract_cookies(response, request)
 def _handleSecuredRedirect(self, response, sslCtx):
     '''Intercept security challenges - these are inferred by checking for a
     302 response with a location header requesting a HTTPS endpoint
     '''
     
     if response.status != httplib.FOUND:
         log.debug('_handleSecuredRedirect: No HTTP redirect found in '
                   'response - passing back to caller')
         return
     
     # Check for redirect location
     authn_redirect_uri = response.getheader('Location')
     if authn_redirect_uri is None:
         log.error('_handleSecuredRedirect: no redirect location set for %r '
                   'response- returning', httplib.FOUND)
         # Let client decide what to do with this response
         return 
     
     # Check the scheme and only follow the redirect here if it HTTPS
     parsed_authn_redirect_uri = urlparse(authn_redirect_uri)
     if parsed_authn_redirect_uri.scheme != 'https':
         log.info('_handleSecuredRedirect: Non-HTTPS redirect location set '
                  'for %r response - returning', httplib.FOUND)
         return
     
     # Prepare request authentication redirect location
     host, portStr = parsed_authn_redirect_uri.netloc.split(':', 1)
     port = int(portStr)
     authn_redirect_path = self.__class__._make_uri_path(
                                                 parsed_authn_redirect_uri)
     
     # Process cookies from the response passed into this function and set 
     # them in the authentication request.  Use cookielib with fake urllib2
     # HTTP request class needed to interface with
     response.info = lambda : response.msg
     authn_redirect_req = FakeUrllib2HTTPRequest(parsed_authn_redirect_uri)
     cookie_jar = CookieJar()
     cookie_jar.extract_cookies(response, authn_redirect_req)
     
     authn_redirect_ip_hdrs = authn_redirect_req.get_headers()
     
     # Redirect to HTTPS authentication endpoint uses GET method
     authn_conn = pyopenssl.HTTPSConnection(host, port=port, 
                                            ssl_context=sslCtx)
     
     authn_conn.request('GET', authn_redirect_path, None, 
                        authn_redirect_ip_hdrs)
     
     authn_response = authn_conn.getresponse()
     authn_conn.close()
     
     # Hack to make httplib response urllib2.Response-like
     authn_response.info = lambda : authn_response.msg
     cookie_jar.extract_cookies(authn_response, authn_redirect_req)
     
     if authn_response.status == httplib.FOUND:
         # Get redirect location
         return_uri = authn_response.getheader('Location')
         if return_uri is None:
             log.error('_handleSecuredRedirect: no redirect location set '
                       'for %r response from %r', httplib.FOUND, 
                       authn_redirect_uri)
             # Return the response and let the client decide what to do with
             # it
             return authn_response
         
         # Check URI for HTTP scheme
         parsed_return_uri = urlparse(return_uri)
         if parsed_return_uri.scheme != 'http':
             # Expecting http - don't process but instead return to client
             log.error('_handleSecuredRedirect: return URI %r is not HTTP, '
                       'passing back original response', return_uri)
             return
         
         # Make path
         return_uri_path = self.__class__._make_uri_path(parsed_return_uri)
         
         # Get host and port number
         (return_uri_host, 
          return_uri_port_str) = parsed_return_uri.netloc.split(':', 1)
         return_uri_port = int(return_uri_port_str)
         
         # Add any cookies to header
         return_req = FakeUrllib2HTTPRequest(parsed_return_uri)
         cookie_jar.add_cookie_header(return_req)
         return_headers = return_req.get_headers()
         
         # Invoke return URI passing headers            
         return_conn = httplib.HTTPConnection(return_uri_host, 
                                              port=return_uri_port)
         
         return_conn.request('GET', return_uri_path, None, return_headers)
         return_uri_res = return_conn.getresponse()
         return_conn.close()
         
         return return_uri_res
     else:
         return res
Esempio n. 34
0
class GeoCaching:
    
    def __init__(self, username='******', password='******'):
        
        self.jar = CookieJar()
        self.req = Request("http://www.geocaching.com/login/default.aspx?RESET=Y", urlencode({
            '__EVENTTARGET': '',
            '__EVENTARGUMENT': '',
            '__VIEWSTATE': '',
            'ctl00$ContentBody$myUsername': username,
            'ctl00$ContentBody$myPassword': password,
            'ctl00$ContentBody$cookie': '',
            'ctl00$ContentBody$Button1': 'Login'
        }))
        
        self.jar.add_cookie_header(self.req)
        
        u = urlopen(self.req)
        
        self.jar.extract_cookies(u, self.req)
        
        u.close()
        
    
    def urlopen(self, *args):
        
        req = Request(*args)
        
        self.jar.add_cookie_header(req)
        
        return urlopen(req)
        
    
    def urlfetch(self, output, *args):
        
        u = self.urlopen(*args)
        
        if hasattr(output, "write"):
            f = output
        else:
            f = open(output, 'w')
        
        s = u.read(1024)
        
        while len(s) > 0:
            f.write(s)
            s = u.read(1024)
        
        u.close()
        f.close()
        
    
    def get_guid(self, gc):
        
        from HTML import GUID
        
        u = self.urlopen("http://www.geocaching.com/seek/cache_details.aspx?wp=%s" % (gc, ))
        
        s1 = u.read(1024)
        s2 = u.read(1024)
        
        while len(s1) + len(s2) != 0:
            
            re = GUID.search(s1 + s2)
            
            if re:
                u.close()
                return re.group(1)
            
            s1 = s2
            s2 = u.read(1024)
            
        
        u.close()
        
        return False
        

    def fetch_loc(self, fileobject, gc):
        
        self.urlfetch(
            fileobject,
            "http://www.geocaching.com/seek/cache_details.aspx?wp=%s" % (gc, ),
            urlencode({
                '__EVENTTARGET': '',
                '__EVENTARGUMENT': '',
                '__VIEWSTATE': '',
                'ctl00$ContentBody$btnLocDL': 'LOC Waypoint File'
            })
        )
        
        return True
        
    
    def fetch_html(self, directory, gc, guid = None):
        
        from HTML import SCRIPTS, IMAGES, LINKS
        from HTML import script, image, link
        
        if guid is None:
            guid = self.get_guid(gc)
        
        if guid is False:
            return False
        
        u = self.urlopen("http://www.geocaching.com/seek/cdpf.aspx?guid=%s&lc=10" % (guid, ))
        f = open("%s%s.html" % (directory, gc), 'w')
        
        line = u.readline()
        
        if line[0] in ("\r", "\n"):
            line = '<?xml version="1.0" encoding="utf-8" ?>' + line
        elif line[0:9] == "<!DOCTYPE":
            line = '<?xml version="1.0" encoding="utf-8" ?>' + "\n" + line
        
        f.write(line)
        
        for line in u:
            
            line = SCRIPTS.sub(lambda m: script(m, self, gc, directory), line)
            line = IMAGES.sub(lambda m: image(m, self, gc, directory), line)
            line = LINKS.sub(lambda m: link(m, self, gc, directory), line)
            
            f.write(line)
            
        
        u.close()
        f.close()
        
        return True
        
    
    def search_lat_long(self, lat, long):
        
        from SearchParser import SearchParser
        
        s = SearchParser(self)
        
        s.parse_stream(self.urlopen(
            "http://www.geocaching.com/seek/nearest.aspx?lat=%f&lng=%f" % (lat, long)
        ))
        
        return s
        
    
    def fetch_window(self, lat1, lat2, lon1, lon2):
        
        if lat2 > lat1: lat1, lat2 = lat2, lat1
        if lon2 > lon1: lon1, lon2 = lon2, lon1
        
        f = self.urlopen(
            "http://www.geocaching.com/map/default.aspx/MapAction",
            '{"dto":{"data":{"c":1,"m":"","d":"%.9f|%.9f|%.9f|%.9f"},"ut":""}}' % (lat1, lat2, lon1, lon2),
            {
                "Origin": "http://www.geocaching.com",
                "Content-Type": "application/json"
            }
        )
        
        j = load(f)
        j = loads(j["d"])
        
        ret = list()
        
        for obj in j["cs"]["cc"]:
            ret.append({
                "gc": obj["gc"],
                "type": obj["ctid"],
                "title": obj["nn"],
                "lat": int(round(obj["lat"] * 1E6)),
                "lon": int(round(obj["lon"] * 1E6)),
                "found": obj["f"],
                "disabled": not obj["ia"]
            })
        
        return ret