Esempio n. 1
0
    def extract_cookiejar(self):
        """
        Extract cookies that pycurl instance knows.

        Returns `CookieJar` object.
        """

        # Example of line:
        # www.google.com\tFALSE\t/accounts/\tFALSE\t0\tGoogleAccountsLocale_session\ten
        # Fields:
        # * domain
        # * whether or not all machines under that domain can read the cookie's information.
        # * path
        # * Secure Flag: whether or not a secure connection (HTTPS) is required to read the cookie.
        # * exp. timestamp
        # * name
        # * value
        cookiejar = CookieJar()
        for line in self.curl.getinfo(pycurl.INFO_COOKIELIST):
            values = line.split('\t')
            # old
            #cookies[values[-2]] = values[-1]
            # new
            cookie = create_cookie(
                name=values[5],
                value=values[6],
                domain=values[0],
                path=values[2],
                secure=values[3] == "TRUE",
                expires=int(values[4]) if values[4] else None,
            )
            cookiejar.set_cookie(cookie)
        return cookiejar
Esempio n. 2
0
def get_opener():
    global OPENER
    if OPENER:
        return OPENER
    cj = CookieJar()
    ck = Cookie(
        version=0,
        name="Locale",
        value="Russian",
        port=None,
        port_specified=False,
        domain="acm.timus.ru",
        domain_specified=False,
        domain_initial_dot=False,
        path="/",
        path_specified=True,
        secure=False,
        expires=None,
        discard=True,
        comment=None,
        comment_url=None,
        rest={"HttpOnly": None},
        rfc2109=False,
    )
    cj.set_cookie(ck)
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    data = urllib.urlencode({"Action": "edit", "JudgeID": JUDGE_ID, "Password": PASSWORD})
    response = opener.open(AUTH_URL, data)
    OPENER = opener
    return opener
Esempio n. 3
0
    def set_session_cookie(self, username, password):
        """
        Sets a session with Django test client and logs Ghost in by creating
        a session cookie for it.

        Args:
            username (str): The username to login with.
            password (str): The password to login with.
        """
        client = Client(enforce_csrf_checks=False)
        self.assertEqual(client.login(username=username, password=password),
                         True)
        sessionid = client.cookies['sessionid']
        django_cookie = Cookie(version=0,
                               name='sessionid',
                               value=sessionid.value,
                               port=None,
                               port_specified=False,
                               domain='localhost',
                               domain_specified=True,
                               domain_initial_dot=False,
                               path='/',
                               path_specified=True,
                               secure=False,
                               expires=None,
                               discard=True,
                               comment=None,
                               comment_url=None,
                               rest=None,
                               rfc2109=False)

        cj = CookieJar()
        cj.set_cookie(django_cookie)
        self.ghost.load_cookies(cj)
Esempio n. 4
0
    def test_cookie_store(self):
        cj = CookieJar()
        test_email = "*****@*****.**"
        test_cookies = [Cookie(version=0, name='Name', value='1',
                               port=None, port_specified=False,
                               domain='www.example.com',
                               domain_specified=False,
                               domain_initial_dot=False,
                               path='/', path_specified=True, secure=False,
                               expires=None,
                               discard=True, comment=None, comment_url=None,
                               rest={'HttpOnly': None},
                               rfc2109=False)]
        for c in test_cookies:
            cj.set_cookie(c)
        x = Credentials(id=test_email)
        cookie_list = [c for c in cj]
        x.cookies = cookie_list
        x.put()

        y = Credentials.get_by_id(test_email)
        self.assertIsNotNone(y)
        self.assertEquals(y.key.id(), test_email)
        stored_credentials_dict = [sc.__dict__ for sc in y.cookies]
        self.assertEquals(stored_credentials_dict,
                          [sc.__dict__ for sc in test_cookies])
Esempio n. 5
0
    def extract_cookiejar(self):
        """
        Extract cookies that pycurl instance knows.

        Returns `CookieJar` object.
        """

        # Example of line:
        # www.google.com\tFALSE\t/accounts/\tFALSE\t0\tGoogleAccountsLocale_session\ten
        # Fields:
        # * domain
        # * whether or not all machines under that domain can read the cookie's information.
        # * path
        # * Secure Flag: whether or not a secure connection (HTTPS) is required to read the cookie.
        # * exp. timestamp
        # * name
        # * value
        cookiejar = CookieJar()
        for line in self.curl.getinfo(pycurl.INFO_COOKIELIST):
            values = line.split('\t')
            # old
            #cookies[values[-2]] = values[-1]
            # new
            cookie = create_cookie(
                name=values[5],
                value=values[6],
                domain=values[0],
                path=values[2],
                secure=values[3] == "TRUE",
                expires=int(values[4]) if values[4] else None,
            )
            cookiejar.set_cookie(cookie)
        return cookiejar
Esempio n. 6
0
 def module_run(self, domains):
     base_url = 'https://www.bing.com/search'
     cnt = 0
     new = 0
     for domain in domains:
         self.heading(domain, level=0)
         base_query = 'domain:' + domain
         pattern = '"b_algo"><h2><a href="(?:\w*://)*(\S+?)\.%s[^"]*"' % (domain)
         subs = []
         # control variables
         new = True
         page = 0
         nr = 50
         cookiejar = CookieJar()
         cookiejar.set_cookie(self.make_cookie('SRCHHPGUSR', 'NEWWND=0&NRSLT=%d&SRCHLANG=&AS=1' % (nr), '.bing.com'))
         # execute search engine queries and scrape results storing subdomains in a list
         # loop until no new subdomains are found
         while new == True:
             content = None
             query = ''
             # build query based on results of previous results
             for sub in subs:
                 query += ' -domain:%s.%s' % (sub, domain)
             full_query = base_query + query
             url = '%s?first=%d&q=%s' % (base_url, (page*nr), urllib.quote_plus(full_query))
             # bing errors out at > 2059 characters not including the protocol
             if len(url) > 2066: url = url[:2066]
             self.verbose('URL: %s' % (url))
             # send query to search engine
             resp = self.request(url, cookiejar=cookiejar)
             if resp.status_code != 200:
                 self.alert('Bing has encountered an error. Please submit an issue for debugging.')
                 break
             content = resp.text
             sites = re.findall(pattern, content)
             # create a unique list
             sites = list(set(sites))
             new = False
             # add subdomain to list if not already exists
             for site in sites:
                 if site not in subs:
                     subs.append(site)
                     new = True
                     host = '%s.%s' % (site, domain)
                     self.output('%s' % (host))
                     new += self.add_hosts(host)
             if not new:
                 # exit if all subdomains have been found
                 if not '>Next</a>' in content:
                     break
                 else:
                     page += 1
                     self.verbose('No New Subdomains Found on the Current Page. Jumping to Result %d.' % ((page*nr)+1))
                     new = True
             # sleep script to avoid lock-out
             self.verbose('Sleeping to avoid lockout...')
             time.sleep(random.randint(5,15))
         cnt += len(subs)
     self.summarize(new, cnt)
Esempio n. 7
0
    def cookiejar(self):
        cookiejar = CookieJar()
        for domain, items in self._cookie.items():
            for path, names in items.items():
                for name, cookie in names.items():
                    cookiejar.set_cookie(cookie)

        return cookiejar
    def cookiejar(self):
        cookiejar = CookieJar()
        for domain, items in self.cookie.items():
            for path, names in items.items():
                for name, cookie in names.items():
                    cookiejar.set_cookie(cookie)

        return cookiejar
Esempio n. 9
0
class Browser(object):
    def __init__(self, base_url):
        self.cookie_jar = CookieJar()
        self.opener = build_opener(HTTPCookieProcessor(self.cookie_jar))
        self.base_url = base_url

    def open(self, url, **kwargs):
        if '_raw' in kwargs: _raw = kwargs['_raw']; del kwargs['_raw']
        else: _raw = False
        if '_is_json' in kwargs: _is_json = kwargs['_is_json']; del kwargs['_is_json']
        else: _is_json = True
        url = urljoin(self.base_url, url)
        data = urlencode(kwargs).encode()
        headers = {
            u"Content-Type" : u"application/x-www-form-urlencoded",
        }

        req = Request(url, data, headers)
        res = self.opener.open(req)

        with closing(res) as fp:
            content = fp.read()

        content = content.decode()

        if not _raw:
            content = json.loads(content)

        return res, content

    def set_cookie(self, name, value):
        url = urlparse(self.base_url)
        cookie = Cookie(
            version=0,
            name=name,
            value=value,
            port=None,
            port_specified=False,
            domain=url.netloc,
            domain_specified=False,
            domain_initial_dot=False,
            path=url.path,
            path_specified=True,
            secure=False,
            expires=sys.maxsize,
            discard=False,
            comment=None,
            comment_url=None,
            rest={},
            rfc2109=False,
        )

        self.cookie_jar.set_cookie(cookie)

    def __getitem__(self, url):
        return functools.partial(self.open, url)
Esempio n. 10
0
 def module_run(self, domains):
     base_url = 'https://www.bing.com/search'
     for domain in domains:
         self.heading(domain, level=0)
         base_query = 'domain:' + domain
         pattern = '"b_algo"><h2><a href="(?:\w*://)*(\S+?)\.%s[^"]*"' % (domain)
         subs = []
         # control variables
         new = True
         page = 0
         nr = 50
         cookiejar = CookieJar()
         cookiejar.set_cookie(self.make_cookie('SRCHHPGUSR', 'NEWWND=0&NRSLT=%d&SRCHLANG=&AS=1' % (nr), '.bing.com'))
         # execute search engine queries and scrape results storing subdomains in a list
         # loop until no new subdomains are found
         while new == True:
             content = None
             query = ''
             # build query based on results of previous results
             for sub in subs:
                 query += ' -domain:%s.%s' % (sub, domain)
             full_query = base_query + query
             url = '%s?first=%d&q=%s' % (base_url, (page*nr), urllib.quote_plus(full_query))
             # bing errors out at > 2059 characters not including the protocol
             if len(url) > 2066: url = url[:2066]
             self.verbose('URL: %s' % (url))
             # send query to search engine
             resp = self.request(url, cookiejar=cookiejar)
             if resp.status_code != 200:
                 self.alert('Bing has encountered an error. Please submit an issue for debugging.')
                 break
             content = resp.text
             sites = re.findall(pattern, content)
             # create a unique list
             sites = list(set(sites))
             new = False
             # add subdomain to list if not already exists
             for site in sites:
                 if site not in subs:
                     subs.append(site)
                     new = True
                     host = '%s.%s' % (site, domain)
                     self.output('%s' % (host))
                     self.add_hosts(host)
             if not new:
                 # exit if all subdomains have been found
                 if not '>Next</a>' in content:
                     break
                 else:
                     page += 1
                     self.verbose('No New Subdomains Found on the Current Page. Jumping to Result %d.' % ((page*nr)+1))
                     new = True
             # sleep script to avoid lock-out
             self.verbose('Sleeping to avoid lockout...')
             time.sleep(random.randint(5,15))
 def cookies(self):
     jar = CookieJar()
     if self.settings:
         jar.set_cookie(Cookie(
             version=0, name='settings',
             value=urllib.quote(phpserialize.serialize(self.settings)),
             port=None, port_specified=False, domain='mediapoisk.info',
             domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False,
             expires=None, discard=True, comment=None, comment_url=None, rest=None, rfc2109=True
             ))
     return jar
Esempio n. 12
0
    def test_cookiejar(self):
        c1 = create_cookie('foo', 'bar')
        c2 = create_cookie('foo', 'bar')
        self.assertFalse(c1 == c2)

        c = create_cookie('foo', 'bar', domain='.dumpz.org')
        self.assertEquals(c.domain, '.dumpz.org')

        cj = CookieJar()
        cj.set_cookie(create_cookie('foo', 'bar', domain='foo.com'))
        cj.set_cookie(create_cookie('foo', 'bar', domain='bar.com'))
        self.assertEqual(len(cj), 2)
Esempio n. 13
0
    def test_cookiejar(self):
        c1 = create_cookie('foo', 'bar')
        c2 = create_cookie('foo', 'bar')
        self.assertFalse(c1 == c2)

        c = create_cookie('foo', 'bar', domain='.dumpz.org')
        self.assertEquals(c.domain, '.dumpz.org')

        cj = CookieJar()
        cj.set_cookie(create_cookie('foo', 'bar', domain='foo.com'))
        cj.set_cookie(create_cookie('foo', 'bar', domain='bar.com'))
        self.assertEqual(len(cj), 2)
Esempio n. 14
0
class Yad2Client(object):
    def __init__(self):
        proxy = ProxyHandler(PROXY)
        self.cj = CookieJar()
        opener = build_opener(HTTPCookieProcessor(self.cj), proxy)
        install_opener(opener)

    def add_cookie(self, name, value):
        cookie = Cookie(version=0,
                        name=name,
                        value=value,
                        port=None,
                        port_specified=False,
                        domain='yad2.co.il',
                        domain_specified=False,
                        domain_initial_dot=False,
                        path='/',
                        path_specified=True,
                        secure=False,
                        expires=None,
                        discard=True,
                        comment=None,
                        comment_url=None,
                        rest={'HttpOnly': None},
                        rfc2109=False)

        self.cj.set_cookie(cookie)

    def clear_cookies(self):
        self.cj.clear()

    def get_url(self, url, headers={}, args={}):
        headers["Host"] = "m.yad2.co.il"
        headers[
            "Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"
        headers[
            "User-Agent"] = "Mozilla/5.0 (Linux; Android 4.2.2; Android SDK built for x86 Build/KK) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36"
        headers["Accept-Language"] = "en-US"

        args = args or {}
        args["DeviceType"] = "Redmi Note 3"
        args["AppVersion"] = "2.9"
        args["AppType"] = "Android"
        args["OSVersion"] = "5.0.2"
        args["udid"] = "582ffa3d-a4cf-425a-8b36-9874d7464015"

        url = url + "?" + urlencode(args)
        req = Request(url, headers=headers)
        response = urlopen(req)

        return response.read()
Esempio n. 15
0
 def module_run(self, domains):
     url = 'http://searchdns.netcraft.com/'
     pattern = '<td align\=\"left\">\s*<a href=\"http://(.*?)/"'
     # answer challenge cookie
     cookiejar = CookieJar()
     payload = {'restriction': 'site+ends+with', 'host': 'test.com'}
     resp = self.request(url, payload=payload, cookiejar=cookiejar)
     cookiejar = resp.cookiejar
     for cookie in cookiejar:
         if cookie.name == 'netcraft_js_verification_challenge':
             challenge = cookie.value
             response = hashlib.sha1(urllib.unquote(challenge)).hexdigest()
             cookiejar.set_cookie(
                 self.make_cookie('netcraft_js_verification_response',
                                  '%s' % response, '.netcraft.com'))
             break
     cnt = 0
     new = 0
     for domain in domains:
         self.heading(domain, level=0)
         payload['host'] = domain
         subs = []
         # execute search engine queries and scrape results storing subdomains in a list
         # loop until no Next Page is available
         while True:
             self.verbose('URL: %s?%s' % (url, urllib.urlencode(payload)))
             resp = self.request(url, payload=payload, cookiejar=cookiejar)
             content = resp.text
             sites = re.findall(pattern, content)
             # create a unique list
             sites = list(set(sites))
             # add subdomain to list if not already exists
             for site in sites:
                 if site not in subs:
                     subs.append(site)
                     self.output('%s' % (site))
                     new += self.add_hosts(site)
             # verifies if there's more pages to look while grabbing the correct
             # values for our payload...
             link = re.findall(r'(\blast\=\b|\bfrom\=\b)(.*?)&', content)
             if not link:
                 break
             else:
                 payload['last'] = link[0][1]
                 payload['from'] = link[1][1]
                 self.verbose('Next page available! Requesting again...')
                 # sleep script to avoid lock-out
                 self.verbose('Sleeping to Avoid Lock-out...')
                 time.sleep(random.randint(5, 15))
         cnt += len(subs)
     self.summarize(new, cnt)
Esempio n. 16
0
def get_cookie_jar():
		try:
			from cookielib import Cookie, CookieJar         # Python 2
		except ImportError:
			from http.cookiejar import Cookie, CookieJar    # Python 3.
			from http.cookies import SimpleCookie
		cj = CookieJar()
		# Cookie(version, name, value, port, port_specified, domain, 
		# domain_specified, domain_initial_dot, path, path_specified, 
		# secure, discard, comment, comment_url, rest)
		c = Cookie(version=0, name=COOKIE_NAME, value=COOKIE_VALUE, port=None, port_specified=False, domain='scplanner.net', 
			   domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest=None, rfc2109=True)
		cj.set_cookie(c)
		return cj
def get_cookies_from_response(url):
    cookiejar = CookieJar()

    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar))
    opener.open(url)

    # add a new cookie or replace a old one
    newcookie = make_cookie('newcookie', '11111', '.baidu.com', '/')

    # remove a cookie
    cookiejar.set_cookie(newcookie)
    cookiejar.clear('.baidu.com', '/', 'newcookie')

    return cookiejar
Esempio n. 18
0
 def module_run(self, domains):
     url = "http://searchdns.netcraft.com/"
     pattern = '<td align\="left">\s*<a href="http://(.*?)/"'
     # answer challenge cookie
     cookiejar = CookieJar()
     payload = {"restriction": "site+ends+with", "host": "test.com"}
     resp = self.request(url, payload=payload, cookiejar=cookiejar)
     cookiejar = resp.cookiejar
     for cookie in cookiejar:
         if cookie.name == "netcraft_js_verification_challenge":
             challenge = cookie.value
             response = hashlib.sha1(urllib.unquote(challenge)).hexdigest()
             cookiejar.set_cookie(
                 self.make_cookie("netcraft_js_verification_response", "%s" % response, ".netcraft.com")
             )
             break
     for domain in domains:
         self.heading(domain, level=0)
         payload["host"] = domain
         subs = []
         # execute search engine queries and scrape results storing subdomains in a list
         # loop until no Next Page is available
         while True:
             self.verbose("URL: %s?%s" % (url, encode_payload(payload)))
             resp = self.request(url, payload=payload, cookiejar=cookiejar)
             content = resp.text
             sites = re.findall(pattern, content)
             # create a unique list
             sites = list(set(sites))
             # add subdomain to list if not already exists
             for site in sites:
                 if site not in subs:
                     subs.append(site)
                     self.output("%s" % (site))
                     self.add_hosts(site)
             # verifies if there's more pages to look while grabbing the correct
             # values for our payload...
             link = re.findall(r"(\blast\=\b|\bfrom\=\b)(.*?)&", content)
             if not link:
                 break
             else:
                 payload["last"] = link[0][1]
                 payload["from"] = link[1][1]
                 self.verbose("Next page available! Requesting again...")
                 # sleep script to avoid lock-out
                 self.verbose("Sleeping to Avoid Lock-out...")
                 time.sleep(random.randint(5, 15))
         if not subs:
             self.output("No results found.")
Esempio n. 19
0
 def module_run(self, domains):
     url = 'http://searchdns.netcraft.com/'
     pattern = '<td align\=\"left\">\s*<a href=\"http://(.*?)/"'
     # answer challenge cookie
     cookiejar = CookieJar()
     payload = {'restriction': 'site+ends+with', 'host': 'test.com'}
     resp = self.request(url, payload=payload, cookiejar=cookiejar)
     cookiejar = resp.cookiejar
     for cookie in cookiejar:
         if cookie.name == 'netcraft_js_verification_challenge':
             challenge = cookie.value
             response = hashlib.sha1(urllib.unquote(challenge)).hexdigest()
             cookiejar.set_cookie(self.make_cookie('netcraft_js_verification_response', '%s' % response, '.netcraft.com'))
             break
     cnt = 0
     new = 0
     for domain in domains:
         self.heading(domain, level=0)
         payload['host'] = domain
         subs = []
         # execute search engine queries and scrape results storing subdomains in a list
         # loop until no Next Page is available
         while True:
             self.verbose('URL: %s?%s' % (url, urllib.urlencode(payload)))
             resp = self.request(url, payload=payload, cookiejar=cookiejar)
             content = resp.text
             sites = re.findall(pattern, content)
             # create a unique list
             sites = list(set(sites))
             # add subdomain to list if not already exists
             for site in sites:
                 if site not in subs:
                     subs.append(site)
                     self.output('%s' % (site))
                     new += self.add_hosts(site)
             # verifies if there's more pages to look while grabbing the correct 
             # values for our payload...
             link = re.findall(r'(\blast\=\b|\bfrom\=\b)(.*?)&', content)
             if not link:
                 break
             else:
                 payload['last'] = link[0][1]
                 payload['from'] = link[1][1]
                 self.verbose('Next page available! Requesting again...' )
                 # sleep script to avoid lock-out
                 self.verbose('Sleeping to Avoid Lock-out...')
                 time.sleep(random.randint(5,15))
         cnt += len(subs)
     self.summarize(new, cnt)
def get_cookies_from_response(url):
    cookiejar = CookieJar()

    opener = urllib2.build_opener(
        urllib2.HTTPCookieProcessor(cookiejar))
    opener.open(url)

    # add a new cookie or replace a old one
    newcookie = make_cookie('newcookie', '11111', '.baidu.com', '/')

    # remove a cookie
    cookiejar.set_cookie(newcookie)
    cookiejar.clear('.baidu.com', '/', 'newcookie')

    return cookiejar
Esempio n. 21
0
def test_cookielib_compatibility():
    cj = CookieJar()
    # Set time in order to be still valid in some years, when cookie strings expire
    cj._now = cj._policy._now = time.mktime((2012, 1, 1, 0, 0, 0, 0, 0, 0))

    request = Request('http://test.com')
    parser = HTTPResponse()
    parser.feed(MULTI_COOKIE_RESPONSE)
    cookies = cj.make_cookies(parser, request)
    # Don't use extract_cookies directly, as time can not be set there manually for testing
    for cookie in cookies:
        if cj._policy.set_ok(cookie, request):
            cj.set_cookie(cookie)
    # Three valid, not expired cookies placed
    assert len(list(cj)) == 3
Esempio n. 22
0
def load_cookies(csv_filename, basedomain):
    """ 加载 cookie:cookies.sqlite -> cookies.csv -> make_cookie()     仅适用于firefox
    """
    cj = CookieJar()
    save_to_csv(csv_filename, basedomain)  # 将cookies.sqlite写入csv文件
    cookies = csv.reader(open(csv_filename, 'r'))  # 获取 cookies
    fields = sqlite_read()  # 获取 表列名
    for cookie in cookies:
        aa = dict(zip(fields, cookie))  # 两个列表组成字典
        # pprint.pprint(aa)
        c = make_cookie(aa['name'], aa['value'])
        cj.set_cookie(c)  # 循环设置多个cookie,cookie不会覆盖
    for index, cookie in enumerate(cj):  # 显示cookies
        print('[', index, ']', cookie)
    return cj
Esempio n. 23
0
def test_cookielib_compatibility():
    cj = CookieJar()
    # Set time in order to be still valid in some years, when cookie strings expire
    cj._now = cj._policy._now = time.mktime((2012, 1, 1, 0, 0, 0, 0, 0, 0))

    request = Request('')
    parser = HTTPResponse()
    parser.feed(MULTI_COOKIE_RESPONSE)
    cookies = cj.make_cookies(parser, request)
    # Don't use extract_cookies directly, as time can not be set there manually for testing
    for cookie in cookies:
        if cj._policy.set_ok(cookie, request):
            cj.set_cookie(cookie)
    # Three valid, not expired cookies placed
    assert len(list(cj)) == 3
Esempio n. 24
0
class Wypok(object):
    def __init__(self, cookies):
        self.cj = CookieJar()
        for i in json.loads(cookies):
            c = Cookie(
                None,
                i["name"],
                i["value"],
                "80",
                "80",
                i["domain"],
                None,
                None,
                i["path"],
                None,
                i["secure"],
                time() + (60 * 60 * 60 * 24 * 365),
                "TestCookie",
                None,
                None,
                None,
            )
            self.cj.set_cookie(c)

    def get(self, url):
        r = requests.get(url, cookies=self.cj)
        return r.content

    def entries(self, url):

        ht = self.get(url)
        soup = BeautifulSoup(ht, "html.parser")
        ret = []
        for i in soup.find_all("li", class_=re.compile("link")):
            t = i.find("h2").text.strip()
            link = i.find("h2").find("a").get("href")
            desc = i.find("p", class_="text").text.strip()
            tags = []
            for j in i.find_all("a", class_="tag"):
                if "unhide" not in j.attrs["class"]:
                    tags.append(j.text.strip().lstrip("#"))

            wykopUrl = i.find("div", class_="diggbox").find("a").get("href")

            if not "partner" in wykopUrl and ("voteUp" in wykopUrl or "voteRemove" in wykopUrl):
                ret.append(Znalezisko(t, desc, link, wykopUrl, tags, handle=self))

        return ret
Esempio n. 25
0
 def __read_cj(self):
     MAX_EPS = 86400 #24 hours
     if os.path.exists("cookiejar.dat"):
         modtime = os.stat("cookiejar.dat").st_mtime
         if time.time() - modtime > MAX_EPS:
             return None
         else:
             dd = pickle.load(open("cookiejar.dat", "r"))
             cj =  CookieJar()
             for c in dd["cookies"]:
                 cj.set_cookie(c)
             self.__uid = dd["uid"]
             self.__nickname = dd["nick"]
             return cj
     else:
         return None
Esempio n. 26
0
 def __read_cj(self):
     MAX_EPS = 86400  #24 hours
     if os.path.exists("cookiejar.dat"):
         modtime = os.stat("cookiejar.dat").st_mtime
         if time.time() - modtime > MAX_EPS:
             return None
         else:
             dd = pickle.load(open("cookiejar.dat", "r"))
             cj = CookieJar()
             for c in dd["cookies"]:
                 cj.set_cookie(c)
             self.__uid = dd["uid"]
             self.__nickname = dd["nick"]
             return cj
     else:
         return None
Esempio n. 27
0
def main():
    c = Cookie(None, 'sid', '157272379', '443', '443', "10.0.199.8", None, None, '/', None, False, False, 'TestCookie', None, None, None)

    cj = CookieJar()
    cj.set_cookie(c)

    print ">>> cj:", cj

    contextFactory = WebClientContextFactory()
    agent = CookieAgent(RedirectAgent(Agent(reactor, contextFactory)), cj)

    d = agent.request('GET', 'https://10.0.199.8/datetime_get_request_periodic')

    d.addCallbacks(getBody, log.err)
    d.addCallback(lambda x: reactor.stop())
    reactor.run()
Esempio n. 28
0
class UESTC():
    def __init__(self, username, password, **kwargs):
        if username == '' or password == '':
            raise SchoolException('用户名或密码为空')

        self.username = username
        self.password = password
        self.logged_in = False
        self.courses = list()
        self.cookies = CookieJar()

    def check_requires_vcode(self):
        url = 'http://idas.uestc.edu.cn/authserver/needCaptcha.html?username=%s&_=%d' % (
            self.username, int(time() * 1000)
        )
        headers = header(referer='http://idas.uestc.edu.cn/authserver/login?'
                                 'service=http://portal.uestc.edu.cn/index.portal')
        r = get(url, headers=headers, cookies=self.cookies)
        for ck in r.cookies:
            self.cookies.set_cookie(ck)

        if r.content.startswith('false'):
            return False
        else:
            return True

    def login(self):
        if self.check_requires_vcode():
            raise SchoolException('需要验证码')

        page = get('http://idas.uestc.edu.cn/authserver/login?service=http://portal.uestc.edu.cn/index.portal', headers=header())
        for c in page.cookies:
            self.cookies.set_cookie(c)

        token = match_value('lt', page.content)
        data = {
            '_eventId': 'submit',
            'dllt': 'userNamePasswordLogin',
            'execution': 'e1s1',
            'lt': token,
            'password': self.password,
            'username': self.username,
            'rmShown': '1'
        }

        signon = post('http://idas.uestc.edu.cn/authserver/login?service=http://portal.uestc.edu.cn/index.portal',
                      headers=header(), data=data, cookies=self.cookies)
        for c in signon.cookies:
            self.cookies.set_cookie(c)
        for history in signon.history:
            for c in history.cookies:
                self.cookies.set_cookie(c)

        if signon.url.startwith('http://portal.uestc.edu.cn'):
            return True
        else:
            raise SchoolException('登录失败')
Esempio n. 29
0
    def load_from_file(self, path):
        """
        Load cookies from the file.

        Content of file should be a JSON-serialized list of dicts.
        """

        with open(path) as inf:
            data = inf.read()
            if data:
                items = json.loads(data)
            else:
                items = {}
        jar = CookieJar()
        for item in items:
            jar.set_cookie(create_cookie(**item))
        self.update(jar)
Esempio n. 30
0
    def load_from_file(self, path):
        """
        Load cookies from the file.

        Content of file should be a JSON-serialized list of dicts.
        """

        with open(path) as inf:
            data = inf.read()
            if data:
                items = json.loads(data)
            else:
                items = {}
        jar = CookieJar()
        for item in items:
            jar.set_cookie(create_cookie(**item))
        self.update(jar)
Esempio n. 31
0
def to_cookiejar(cookies):
    """Build CookieJar object from dict, list or tuple

    Attributes:
    - `cookies`: (dict, list or tuple)

    Returns:
    - `cookiejar`: `CookieJar` instance
    """
    if isinstance(cookies, CookieJar):
        return cookies

    tmp_cookies = []
    if isinstance(cookies, (TupleType, ListType)):
        tmp_cookies = cookies
    elif isinstance(cookies, DictType):
        tmp_cookies = [(k, v) for k, v in cookies.iteritems()]
    else:
        raise ValueError("Unsupported argument")

    cookie_jar = CookieJar()
    for k, v in tmp_cookies:
        cookie = Cookie(
            version=0,
            name=k,
            value=v,
            port=None,
            port_specified=False,
            domain='',
            domain_specified=False,
            domain_initial_dot=False,
            path='/',
            path_specified=True,
            secure=False,
            expires=None,
            discard=True,
            comment=None,
            comment_url=None,
            rest={'HttpOnly': None},
            rfc2109=False)
        cookie_jar.set_cookie(cookie)

    return cookie_jar
Esempio n. 32
0
def load_cookies2():
    """ 加载 cookie:变量 -> make_cookie()
    """
    cj = CookieJar()
    cookie1 = [
        'session_data_places',
        '"8bd6acbc1de59b1dfceb2099ae7c2b3a:JeRQM1TZ9lwn_7e9KS2LcAGQjUai1Y5TbtUsr1D-qUj_rleZ8zKEu9iUVL0V-r9_icFERBwtpsJE7eusjbh7G_M3Jlem9ojkSkOpjNBuv4qvPFUmtBtgECSfJr2aHDQPibmo1jZKgrVvjuMCYgbJeGoCVqsU8Ru3UIpHnGwrCubtNEvfFQB0dEickfTDUSlprWpH4Oq8YOmAimJ00nI3AJUqub5DfY0kZLVL6KFl7L-xn79HwEhzzE_dVbYi7lD_6ZUi-Q5ob6197UL8dp1zJnD6OQ37iriLG6uZ4wAliRgUt8xIb9ebg60o6viRmat2tkC9fJwbb2ghEoGResxA8-1p6h-Z26nFH_QPG00J7N27UA7B3tB1aIlgufn48Ng6b-2EAp37j01WR5z5S0QposiYl4aWLOGVZIhlLOgfRW2v3yv00aooW2jXhSjMGEeQ8TNyzWRQUPiNYEXqV-Hu52ZFB7y9jGqFRGgL0fQqzP7zb3c0out8omE54eNOUS9QoYK46GUt7Mgiql-jisc4sJPEdttuRTx2wsbJn8DJfV-_Y7A6Lwi8j9cRyqAYwtALD9sCeT7QM3D6yacgBhf9CMCmNTRIK-ZH34a85ZlcFgBUiUY778f268C3tqCxlozD-XMNgpRk7VC9UU5a0COjNZPwo_zgI759dx1IognjndUSrl9YU-WX02sY4PNYoR0AXIIV-cqfpBkQq6uM_UBhSEdNxCdlAk8rqZBoPRFuh98DpExdW3xuScE_BkeVB1WtT7VW_82rEoqxrYP5BLoISK4wck_D8fvQf6AjjzJLPtRKVZCpLrbl0Rq8IhWjlfrinAx6rlCGyIN5bEgKrnoxxqFI4DNg2eg84iVh4wTwqr72Gwe-OA9q6xLTgEIr2gfCrb3WqsMFQR8FWSjqCy_YyyBQzsBNGd_S5lZ8iruzzlR_5OkeHkTFX8qkfgJTNsXWwL-s9U3dCJITWqi35qRky7XJ4-a_qY5WnPtjh9nP1G7p9SLSrXzeJ7Pch4bi2T_xddzmo5nZGx8MxO-dmKY1rHG5SdGQNveHQQOYuod6YwWJTa_YcxObJ72itblRIFNB"'
    ]
    cookie2 = ['session_id_places', 'True']
    cookies = [cookie1, cookie2]  # 将cookies存入列表
    fields = ['name', 'value']
    for cookie in cookies:
        aa = dict(zip(fields, cookie))  # 两个列表组成字典
        # pprint.pprint(aa)
        c = make_cookie(aa['name'], aa['value'])
        cj.set_cookie(c)  # 循环设置多个cookie,cookie不会覆盖
    for index, cookie in enumerate(cj):  # 显示cookies
        print('[', index, ']', cookie)
    return cj
Esempio n. 33
0
class Yad2Client(object):

    def __init__(self):        
        proxy = ProxyHandler(PROXY)
        self.cj = CookieJar()
        opener = build_opener(HTTPCookieProcessor(self.cj), proxy)
        install_opener(opener)


    def add_cookie(self, name, value):
        cookie =  Cookie(version=0, name=name, value=value, port=None,
                         port_specified=False, domain='yad2.co.il',
                         domain_specified=False, domain_initial_dot=False,
                         path='/', path_specified=True, secure=False, expires=None,
                         discard=True, comment=None, comment_url=None,
                         rest={'HttpOnly': None}, rfc2109=False)
        
        self.cj.set_cookie(cookie)


    def clear_cookies(self):
        self.cj.clear()


    def get_url(self, url, headers = {}, args = {}):
        headers["Host"] = "m.yad2.co.il"
        headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"
        headers["User-Agent"] = "Mozilla/5.0 (Linux; Android 4.2.2; Android SDK built for x86 Build/KK) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36"
        headers["Accept-Language"] = "en-US"

        args = args or {}
        args["DeviceType"] = "Redmi Note 3"
        args["AppVersion"] = "2.9"
        args["AppType"] = "Android"
        args["OSVersion"] = "5.0.2"
        args["udid"] = "582ffa3d-a4cf-425a-8b36-9874d7464015"
                
        url = url + "?" + urlencode(args)
        req = Request(url, headers = headers)
        response = urlopen(req)

        return response.read()
Esempio n. 34
0
    def set_session_cookie(self, username, password):
        """
        Sets a session with Django test client and logs Ghost in by creating
        a session cookie for it.

        Args:
            username (str): The username to login with.
            password (str): The password to login with.
        """
        client = Client(enforce_csrf_checks=False)
        self.assertEqual(client.login(username=username, password=password), True)
        sessionid = client.cookies['sessionid']
        django_cookie = Cookie(
            version=0, name='sessionid', value=sessionid.value, port=None, port_specified=False, domain='localhost',
            domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False,
            expires=None, discard=True, comment=None, comment_url=None, rest=None, rfc2109=False
        )

        cj = CookieJar()
        cj.set_cookie(django_cookie)
        self.ghost.load_cookies(cj)
Esempio n. 35
0
    def testCookieAdapters(self):
        jar = CookieJar(policy=None)  # DefaultCookiePolicy())

        # set a cookie
        res = Response()
        tstval = str(uuid.uuid4())
        res.set_cookie("a-cookie", tstval, domain="example.com")
        cookies = jar.make_cookies(filters.ResponseCookieAdapter(res),
                                   Request.blank("http://example.com"))
        for c in cookies:
            jar.set_cookie(c)

        self.assert_(len(jar), ("where's my cookies?"))
        self.assert_("a-cookie" in [c.name for c in jar],
                     "seriously, where's my cookie")

        # now put the header on the request please
        request = Request.blank("http://example.com")
        self.assert_(".example.com" in jar._cookies.keys(),
                     jar._cookies.keys())
        jar.add_cookie_header(filters.RequestCookieAdapter(request))
        self.assert_("Cookie" in request.headers,
                     (str(request), "Y NO COOKIES?"))
Esempio n. 36
0
    def testCookieAdapters(self):
        jar = CookieJar(policy=None)  # DefaultCookiePolicy())

        # set a cookie
        res = Response()
        tstval = str(uuid.uuid4())
        res.set_cookie("a-cookie", tstval, domain="example.com")
        cookies = jar.make_cookies(filters.ResponseCookieAdapter(res),
                                   Request.blank("http://example.com"))
        for c in cookies:
            jar.set_cookie(c)

        self.assert_(len(jar), ("where's my cookies?"))
        self.assert_("a-cookie" in [c.name for c in jar],
                     "seriously, where's my cookie")

        # now put the header on the request please
        request = Request.blank("http://example.com")
        self.assert_(".example.com" in jar._cookies.keys(),
                     jar._cookies.keys())
        jar.add_cookie_header(filters.RequestCookieAdapter(request))
        self.assert_("Cookie" in request.headers,
                     (str(request), "Y NO COOKIES?"))
Esempio n. 37
0
class Connection:

    ENCODING = 'gb18030'
    USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:14.0) Gecko/20100101 Firefox/14.0.1'
    BBS_URL = 'http://bbs.nju.edu.cn'

    def __init__(self):
        self.cj = CookieJar()
        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
        self.opener.addheaders = [('User-Agent', self.USER_AGENT)]
        #self.opener.addheaders = [('Referer', self.BBS_URL)]
        self.base_url = self.BBS_URL

    def load_session(self, session):
        self.base_url = '{0}/vd{1}'.format(self.BBS_URL, session.vd)
        self.cj.set_cookie(make_cookie('_U_KEY', session.key))
        self.cj.set_cookie(make_cookie('_U_UID', session.uid))
        self.cj.set_cookie(make_cookie('_U_NUM', session.num))

    def send(self, action, params=None, body=None):
        params = encode_params(params, self.ENCODING)
        url = '{0}/{1}{2}{3}'.format(
                self.base_url,
                action,
                '?' if params else '',      # no prefixing '/'
                params)
        logger.debug(url)
        body = encode_params(body, self.ENCODING)

        try:
            resp = self.opener.open(url, body)
        except URLError:
            raise NetworkError()
        # decode() in py2.6 does not support `errors` kwarg.
        html = resp.read().decode(self.ENCODING, 'ignore')
        return html
Esempio n. 38
0
 def uplay(self, userpass):
     global work
     work = True
     headers = {
         'User-Agent':
         'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0',
         'Ubi-AppId': '314d4fef-e568-454a-ae06-43e3bece12a6',
         'Ubi-RequestedPlatformType': 'uplay',
         'Authorization': 'Basic {0}'.format(base64.b64encode(userpass)),
         'Content-Type': 'application/json; charset=utf-8'
     }
     while work:
         cj = CookieJar()
         try:
             response = requests.post(
                 'https://uplayconnect.ubi.com/ubiservices/v2/profiles/sessions',
                 '{}',
                 timeout=7.5,
                 headers=headers,
                 verify=False,
                 cookies=cj)
             js = json.loads(response.text)
             for cookie in js:
                 cj.set_cookie(self.makeCookie(cookie, js[cookie]))
             work = False
             r = requests.get('http://uplay.ubi.com/', cookies=cj)
             print r.text
         except requests.exceptions.HTTPError:
             print "HTTP Error!"
             work = True
         except requests.exceptions.ConnectionError:
             print "Connection Error!"
             work = True
         except requests.exceptions.Timeout:
             print "Timeout Error!"
             work = True
Esempio n. 39
0
def call_zzkko_api(uri, params=None, method='GET', timeout=10, 
                api_url_prefix=None, session_cookie_name='zzkko_session', session_cookie_value=None):
    """调用zzkko api"""
    from aladin.ext.net import JSONService

    if api_url_prefix is None:
        api_url_prefix = current_app.config['API_ZZKKO_COM']

    if session_cookie_value is None:
        session_cookie_value = request.cookies.get(session_cookie_name, None)

    cj = CookieJar()
    if session_cookie_value is not None:
        c = Cookie(None, session_cookie_name, session_cookie_value, 
               port=None, port_specified=None, domain='', 
               domain_specified=None, domain_initial_dot=None, path='/', 
               path_specified=None, secure=None, expires=None, 
               discard=None, comment=None, comment_url=None, 
               rest=None)
        cj.set_cookie(c)
        
    url = api_url_prefix + uri
    js = JSONService(url, params, method, timeout, cj)
    return js
Esempio n. 40
0
def videoLink(url):
    xbmc.log(msg='htvonline url: ,%s'%(url), level=xbmc.LOGDEBUG)
    # Create a cookie jar to store our custom cookies.
    jar = CookieJar()
     
    # Generate a request to make use of these cookies.
    request = Request(url=url)
     
    # Use makeCookie to generate a cookie and add it to the cookie jar.
    jar.set_cookie(makeCookie("popupNotify_htv", "99"))
    jar.set_cookie(makeCookie("_a3rd1403516032", "0-2"))

    jar.set_cookie(makeCookie("_a3rd1404112369", "0-2"))
    jar.set_cookie(makeCookie("ADB3rdCookie1403515058", "1"))
    jar.set_cookie(makeCookie("PHPSESSID", "hnhenek4um5ptd0mboocp8kgg1"))
    jar.set_cookie(makeCookie("SERVERID", "htvonline_web02"))
    jar.set_cookie(makeCookie("cookemail", "smas4home%40gmail.com"))
    jar.set_cookie(makeCookie("cookpass", "41bc1bdb7b971237065b5a9df6320d90"))
    jar.set_cookie(makeCookie("htvonline", "87662"))
    jar.set_cookie(makeCookie("__utma", "143899884.859084174.1416938614.1418336824.1418341901.14"))
    jar.set_cookie(makeCookie("__utmb", "143899884.85.9.1418349789800"))
    jar.set_cookie(makeCookie("__utmc", "143899884"))
     
    # Add the cookies from the jar to the request.
    jar.add_cookie_header(request)
     
    # Now, let us try open and read.
    opener = urllib2.build_opener()
    f = opener.open(request)
    link = f.read()
 
    #print "Server responds with: "
    #print f.read()
    #cookies = dict(popupNotify_htv='99',_a3rd1403516032='0-2',_a3rd1404112369='0-2',ADB3rdCookie1403515058='1',PHPSESSID='hnhenek4um5ptd0mboocp8kgg1',
    #          SERVERID='htvonline_web02',cookemail='smas4home%40gmail.com',cookpass='******',htvonline='87662',
    #          __utma='143899884.859084174.1416938614.1418336824.1418341901.14',__utmb='143899884.85.9.1418349789800',__utmc='143899884')
    #r = requests.get(url, cookies=cookies)
    xbmc.log(msg='htvonline Request: ,%s'%(link), level=xbmc.LOGDEBUG)
    ##link = urllib2.urlopen(url).read()
    ##xbmc.log(msg='htvonline link: ,%s'%(link), level=xbmc.LOGDEBUG)
    newlink = ''.join(link.splitlines()).replace('\t','')
    vlink = re.compile('file: "(.+?)",').findall(newlink)
    ##xbmc.log(msg='htvonline vlink: ,%s'%(vlink), level=xbmc.LOGDEBUG)
    return vlink[0]
class pyGoogleTrendsCsvDownloader(object):
    '''
    Google Trends Downloader
    
    Recommended usage:
    
    from pyGoogleTrendsCsvDownloader import pyGoogleTrendsCsvDownloader
    r = pyGoogleTrendsCsvDownloader(username, password)
    r.get_csv(cat='0-958', geo='US-ME-500')
    
    '''
    def __init__(self, username, password):
        '''
        Provide login and password to be used to connect to Google Trends
        All immutable system variables are also defined here
        '''
        
        # The amount of time (in secs) that the script should wait before making a request.
        # This can be used to throttle the downloading speed to avoid hitting servers too hard.
        # It is further randomized.
        self.download_delay = 0.25
        
        self.service = "trendspro"
        self.url_service = "http://www.google.com/trends/"
        self.url_download = self.url_service + "trendsReport?"
        
        self.login_params = {}
        # These headers are necessary, otherwise Google will flag the request at your account level
        self.headers = [('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:12.0) Gecko/20100101 Firefox/12.0'),
                        ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                        ("Accept-Language", "en-gb,en;q=0.5"),
                        ("Accept-Encoding", "gzip, deflate"),
                        ("Connection", "keep-alive")]
        self.url_login = '******'+self.service+'&passive=1209600&continue='+self.url_service+'&followup='+self.url_service
        self.url_authenticate = 'https://accounts.google.com/accounts/ServiceLoginAuth'
        self.header_dictionary = {}
        
        self._authenticate(username, password)
        
    def _authenticate(self, username, password):
        '''
        Authenticate to Google:
        1 - make a GET request to the Login webpage so we can get the login form
        2 - make a POST request with email, password and login form input values
        '''
        
        # Make sure we get CSV results in English
        ck = Cookie(version=0, name='I4SUserLocale', value='en_US', port=None, port_specified=False, domain='www.google.com', domain_specified=False,domain_initial_dot=False, path='/trends', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None)
        ck_pref = Cookie(version=0, name='PREF', value='', port=None, port_specified=False, domain='www.google.com', domain_specified=False,domain_initial_dot=False, path='/trends', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None) 

        self.cj = CookieJar()                            
        self.cj.set_cookie(ck)
        self.cj.set_cookie(ck_pref)
        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
        self.opener.addheaders = self.headers
        
        # Get all of the login form input values
        find_inputs = etree.XPath("//form[@id='gaia_loginform']//input")
        try:
            #
            resp = self.opener.open(self.url_login)
            
            if resp.info().get('Content-Encoding') == 'gzip':
                buf = StringIO( resp.read())
                f = gzip.GzipFile(fileobj=buf)
                data = f.read()
            else:
                data = resp.read()
            
            xmlTree = etree.fromstring(data, parser=html.HTMLParser(recover=True, remove_comments=True))
            
            for input in find_inputs(xmlTree):
                name = input.get('name')
                if name:
                    name = name.encode('utf8')
                    value = input.get('value', '').encode('utf8')
                    self.login_params[name] = value
        except:
            print("Exception while parsing: %s\n" % traceback.format_exc())    
        
        self.login_params["Email"] = username
        self.login_params["Passwd"] = password
        
        params = urllib.urlencode(self.login_params)
        self.opener.open(self.url_authenticate, params)
        
    def get_csv(self, throttle=False, **kwargs):
        '''
        Download CSV reports
        '''
        
        # Randomized download delay
        if throttle:
            r = random.uniform(0.5 * self.download_delay, 1.5 * self.download_delay)
            time.sleep(r)
        
        params = {
            'export': '1'
        }
        params.update(kwargs)
        params = urllib.urlencode(params)
        # If params = {'a': '1', 'b': 'with spaces /and +'}
        # the result would be 'a=1&b=with+spaces+%2Fand+%2B' and
        # we still need to replace '+' by its percent encoded version
        # For more info see http://bugs.python.org/issue13866
        params = params.replace('+', '%20')
        
        r = self.opener.open(self.url_download + params)
        
        # Make sure everything is working ;)
        if not r.info().has_key('Content-Disposition'):
            print "You've exceeded your quota. Continue tomorrow..."
            sys.exit(0)
            
        if r.info().get('Content-Encoding') == 'gzip':
            buf = StringIO( r.read())
            f = gzip.GzipFile(fileobj=buf)
            data = f.read()
        else:
            data = r.read()
        
        myFile = open('trends_%s.csv' % '_'.join(['%s-%s' % (key, slugify(unicode(value))) for (key, value) in kwargs.items()]), 'w')
        myFile.write(data)
        myFile.close()
Esempio n. 42
0
                  domain_initial_dot=False,
                  path="/",
                  path_specified=True,
                  secure=False,
                  expires=None,
                  discard=False,
                  comment=None,
                  comment_url=None,
                  rest=None)


# Create a cookie jar to store our custom cookies.
jar = CookieJar()

# Generate a request to make use of these cookies.
request = Request(url="http://kahdev.bur.st/python/cookies/receiver.php")

# Use makeCookie to generate a cookie and add it to the cookie jar.
jar.set_cookie(makeCookie("name", "kahdev"))
jar.set_cookie(makeCookie("where", "here"))

# Add the cookies from the jar to the request.
jar.add_cookie_header(request)

# Now, let us try open and read.
opener = urllib2.build_opener()
f = opener.open(request)

print "Server responds with: "
print f.read()
        domain_initial_dot=False,
        path="/", 
        path_specified=True,
        secure=False,
        expires=None,
        discard=False,
        comment=None,
        comment_url=None,
        rest=None
    )

# Create a cookie jar to store our custom cookies.
jar = CookieJar()

# Generate a request to make use of these cookies.
request = Request(url="http://kahdev.bur.st/python/cookies/receiver.php")

# Use makeCookie to generate a cookie and add it to the cookie jar.
jar.set_cookie(makeCookie("name", "kahdev"))
jar.set_cookie(makeCookie("where", "here"))

# Add the cookies from the jar to the request.
jar.add_cookie_header(request)

# Now, let us try open and read.
opener = urllib2.build_opener()
f = opener.open(request)

print "Server responds with: "
print f.read()
Esempio n. 44
0
def list_to_cookiejar(list):
  jar = CookieJar()
  for cookie in list_to_cookielist(list):
    jar.set_cookie(cookie)
  return jar
Esempio n. 45
0
 def from_cookie_list(cls, clist):
     cj = CookieJar()
     for cookie in clist:
         cj.set_cookie(cookie)
     return cls(cj)
Esempio n. 46
0
def _request(url, headers, post, cookies):
    log(url)
    url = quote_plus(url, safe='%/:?=&')
    if post:
        if sys.version_info[0] >= 3:  # for Python 3
            post = post.encode('utf-8')
        req = Request(url, post)
        log('########POST!')
    else:
        req = Request(url)
    if headers:
        for key in headers:
            req.add_header(key, headers[key])
        #req.add_header('Content-Type','application/json')
        req.has_header = lambda header_name: (
            True if header_name == 'Content-Length' else Request.has_header(
                req, header_name))
    else:
        req.add_header(
            'User-Agent',
            'Mozilla/5.0 (Windows NT 6.1; rv:25.0) Gecko/20100101 Firefox/25.0'
        )
        req.add_header('Accept-Encoding', 'gzip, deflate')

    if cookies:
        cj = CookieJar()
        log(pathUserdata(''))
        log(pathUserdata('cookies.txt'))
        if not f_exists(pathUserdata('')):
            f_mkdir(pathUserdata(''))
        if f_exists(pathUserdata('cookies.txt')):
            cookies_txt = f_open(pathUserdata('cookies.txt'))
            if cookies_txt:
                if sys.version_info[0] >= 3:  # for Python 3
                    if isinstance(cookies_txt, str):
                        cookies_txt = cookies_txt.encode('utf-8')
                c = pickle.loads(cookies_txt)
                for cookie in c:
                    cj.set_cookie(cookie)
        opener = build_opener(HTTPCookieProcessor(cj))
        response = opener.open(req)

        c = []
        for cookie in cj:
            log(str(cookie))
            c.append(cookie)
        log(str(cj))

        f_write(pathUserdata('cookies.txt'), pickle.dumps(c))
        #cj.save(cookiefile)
    else:
        response = urlopen(req)

    compressed = response.info().get('Content-Encoding') == 'gzip'
    link = response.read()
    response.close()
    if compressed:
        if sys.version_info[0] < 3:
            buf = StringIO(link)
        else:
            buf = BytesIO(link)
        f = gzip.GzipFile(fileobj=buf)
        link = f.read()
    if sys.version_info[0] >= 3:  # for Python 3
        link = link.decode('utf-8')
    return link
class pyGoogleTrendsCsvDownloader(object):
    '''
    Google Trends Downloader

    Recommended usage:

    from pyGoogleTrendsCsvDownloader import pyGoogleTrendsCsvDownloader
    r = pyGoogleTrendsCsvDownloader(username, password)
    r.get_csv(cat='0-958', geo='US-ME-500')

    '''
    def __init__(self, username, password):
        '''
        Provide login and password to be used to connect to Google Trends
        All immutable system variables are also defined here
        '''

        # The amount of time (in secs) that the script should wait before
        # making a request. This can be used to throttle the downloading speed
        # to avoid hitting servers too hard. It is further randomized.
        self.download_delay = 0.25

        self.service = "trendspro"
        self.url_service = "http://www.google.com/trends/"
        self.url_download = self.url_service + "trendsReport?"

        self.login_params = {}
        # These headers are necessary, otherwise Google will flag the request
        # at your account level
        self.headers = [
            ('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:12.0) '
             'Gecko/20100101 Firefox/12.0'),
            ("Accept", "text/html,application/xhtml+xml,application/xml;"
             "q=0.9,*/*;q=0.8"), ("Accept-Language", "en-gb,en;q=0.5"),
            ("Accept-Encoding", "gzip, deflate"), ("Connection", "keep-alive")
        ]
        self.url_login = ('https://accounts.google.com/ServiceLogin?'
                          'service={service}&'
                          'passive=1209600&'
                          'continue={url_service}&'
                          'followup={url_service}') \
                          .format(service=self.service,
                                  url_service=self.url_service)
        self.url_authenticate = ('https://accounts.google.com/accounts/'
                                 'ServiceLoginAuth')
        self.header_dictionary = {}

        self._authenticate(username, password)

    def _authenticate(self, username, password):
        '''
        Authenticate to Google:
        1 - make a GET request to the Login webpage so we can get the login
            form
        2 - make a POST request with email, password and login form input
            values
        '''

        # Make sure we get CSV results in English
        ck = Cookie(version=0,
                    name='I4SUserLocale',
                    value='en_US',
                    port=None,
                    port_specified=False,
                    domain='www.google.com',
                    domain_specified=False,
                    domain_initial_dot=False,
                    path='/trends',
                    path_specified=True,
                    secure=False,
                    expires=None,
                    discard=False,
                    comment=None,
                    comment_url=None,
                    rest=None)

        self.cj = CookieJar()
        self.cj.set_cookie(ck)
        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(
            self.cj))
        self.opener.addheaders = self.headers

        # Get all of the login form input values
        find_inputs = etree.XPath("//form[@id='gaia_loginform']//input")
        try:
            #
            resp = self.opener.open(self.url_login)

            if resp.info().get('Content-Encoding') == 'gzip':
                buf = StringIO(resp.read())
                f = gzip.GzipFile(fileobj=buf)
                data = f.read()
            else:
                data = resp.read()

            xmlTree = etree.fromstring(data,
                                       parser=html.HTMLParser(
                                           recover=True, remove_comments=True))

            for input in find_inputs(xmlTree):
                name = input.get('name')
                if name:
                    name = name.encode('utf8')
                    value = input.get('value', '').encode('utf8')
                    self.login_params[name] = value
        except:
            logger.warn("Parsing of form failed. Continuing anyway",
                        exc_info=True)

        self.login_params["Email"] = username
        self.login_params["Passwd"] = password

        params = urllib.urlencode(self.login_params)
        self.opener.open(self.url_authenticate, params)

    def get_csv_data(self, throttle=False, **kwargs):
        '''
        Download CSV reports
        '''

        # Randomized download delay
        if throttle:
            r = random.uniform(0.5 * self.download_delay,
                               1.5 * self.download_delay)
            time.sleep(r)

        params = {'export': 1}
        params.update(kwargs)
        params = urllib.urlencode(params)

        r = self.opener.open(self.url_download + params)

        # Make sure everything is working ;)
        if 'Content-Disposition' not in r.info():
            raise QuotaExceeded('Download quota exceeded. Try again tomorrow.')

        if r.info().get('Content-Encoding') == 'gzip':
            buf = StringIO(r.read())
            f = gzip.GzipFile(fileobj=buf)
            data = f.read()
        else:
            data = r.read()

        return data

    def get_csv(self, *args, **kwargs):
        data = self.get_csv_data(*args, **kwargs)
        myFile = open(
            'trends_%s.csv' % '_'.join(
                ['%s-%s' % (key, value) for (key, value) in kwargs.items()]),
            'w')
        myFile.write(data)
        myFile.close()
class pyGoogleTrendsCsvDownloader(object):
    '''
    Google Trends Downloader

    Recommended usage:

    from pyGoogleTrendsCsvDownloader import pyGoogleTrendsCsvDownloader
    r = pyGoogleTrendsCsvDownloader(username, password)
    r.get_csv(cat='0-958', geo='US-ME-500')

    '''
    def __init__(self, username, password):
        '''
        Provide login and password to be used to connect to Google Trends
        All immutable system variables are also defined here
        '''

        # The amount of time (in secs) that the script should wait before
        # making a request. This can be used to throttle the downloading speed
        # to avoid hitting servers too hard. It is further randomized.
        self.download_delay = 0.25

        self.service = "trendspro"
        self.url_service = "http://www.google.com/trends/"
        self.url_download = self.url_service + "trendsReport?"

        self.login_params = {}
        # These headers are necessary, otherwise Google will flag the request
        # at your account level
        self.headers = [('User-Agent',
                         'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:12.0) '
                         'Gecko/20100101 Firefox/12.0'),
                        ("Accept",
                         "text/html,application/xhtml+xml,application/xml;"
                         "q=0.9,*/*;q=0.8"),
                        ("Accept-Language", "en-gb,en;q=0.5"),
                        ("Accept-Encoding", "gzip, deflate"),
                        ("Connection", "keep-alive")]
        self.url_login = ('https://accounts.google.com/ServiceLogin?'
                          'service={service}&'
                          'passive=1209600&'
                          'continue={url_service}&'
                          'followup={url_service}') \
                          .format(service=self.service,
                                  url_service=self.url_service)
        self.url_authenticate = ('https://accounts.google.com/accounts/'
                                 'ServiceLoginAuth')
        self.header_dictionary = {}

        self._authenticate(username, password)

    def _authenticate(self, username, password):
        '''
        Authenticate to Google:
        1 - make a GET request to the Login webpage so we can get the login
            form
        2 - make a POST request with email, password and login form input
            values
        '''

        # Make sure we get CSV results in English
        ck = Cookie(version=0, name='I4SUserLocale', value='en_US', port=None,
                    port_specified=False, domain='www.google.com',
                    domain_specified=False, domain_initial_dot=False,
                    path='/trends', path_specified=True, secure=False,
                    expires=None, discard=False, comment=None,
                    comment_url=None, rest=None)

        self.cj = CookieJar()
        self.cj.set_cookie(ck)
        self.opener = urllib2.build_opener(
            urllib2.HTTPCookieProcessor(self.cj))
        self.opener.addheaders = self.headers

        # Get all of the login form input values
        find_inputs = etree.XPath("//form[@id='gaia_loginform']//input")
        try:
            #
            resp = self.opener.open(self.url_login)

            if resp.info().get('Content-Encoding') == 'gzip':
                buf = StringIO(resp.read())
                f = gzip.GzipFile(fileobj=buf)
                data = f.read()
            else:
                data = resp.read()

            xmlTree = etree.fromstring(
                data, parser=html.HTMLParser(recover=True,
                                             remove_comments=True))

            for input in find_inputs(xmlTree):
                name = input.get('name')
                if name:
                    name = name.encode('utf8')
                    value = input.get('value', '').encode('utf8')
                    self.login_params[name] = value
        except:
            logger.warn("Parsing of form failed. Continuing anyway",
                        exc_info=True)

        self.login_params["Email"] = username
        self.login_params["Passwd"] = password

        params = urllib.urlencode(self.login_params)
        self.opener.open(self.url_authenticate, params)

    def get_csv_data(self, throttle=False, **kwargs):
        '''
        Download CSV reports
        '''

        # Randomized download delay
        if throttle:
            r = random.uniform(0.5 * self.download_delay, 1.5 *
                               self.download_delay)
            time.sleep(r)

        params = {
            'export': 1
        }
        params.update(kwargs)
        params = urllib.urlencode(params)

        r = self.opener.open(self.url_download + params)

        # Make sure everything is working ;)
        if 'Content-Disposition' not in r.info():
            raise QuotaExceeded('Download quota exceeded. Try again tomorrow.')

        if r.info().get('Content-Encoding') == 'gzip':
            buf = StringIO(r.read())
            f = gzip.GzipFile(fileobj=buf)
            data = f.read()
        else:
            data = r.read()

        return data

    def get_csv(self, *args, **kwargs):
        data = self.get_csv_data(*args, **kwargs)
        myFile = open('trends_%s.csv' % '_'.join(['%s-%s' % (key, value)
                                                  for (key, value) in
                                                  kwargs.items()]), 'w')
        myFile.write(data)
        myFile.close()
Esempio n. 49
0
class sideload:
    def __init__(self, account_username, account_password, udid, app_id):
        self.account_username = account_username
        self.account_password = account_password

        if isValidUDID(udid):
            self.udid = udid
        else:
            logger.error("Invalid UDID entry.")
            sys.exit(0)

        self.app_id = app_id
        self.clientId = "XABBG36SBB"
        self.appIdKey = "ba2ec180e6ca6e6c6a542255453b24d6e6e5b2be0cc48bc1b0d8ad64cfe0228f"
        self.accountBlob = ""
        self.teamId = ""

        self.browser = mechanize.Browser()
        self.cookiejar = CookieJar()
        self.browser.set_cookiejar(self.cookiejar)
        self.browser.set_handle_equiv(True)
        self.browser.set_handle_gzip(True)
        self.browser.set_handle_redirect(True)
        self.browser.set_handle_referer(True)
        self.browser.set_handle_robots(False)
        self.browser.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),
                                        max_time=1)
        self.browser.addheaders = [('User-Agent', "Xcode"),
                                   ('Accept', 'text/x-xml-plist'),
                                   ('X-Xcode-Version', '7.0 (7A120f)')]

    def downloadProvisioningProfile(self, appIdId):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "appIdId": appIdId,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/downloadTeamProvisioningProfile.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        return root["provisioningProfile"]

    def removeAppId(self, appIdId):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "appIdId": appIdId,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/deleteAppId.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        return responseData

    def addAppId(self, identifier):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "entitlements": [],
            "identifier": identifier,
            "name": "Xcode iOS App ID " + identifier.replace(".", " "),
            "appIdName": "Xcode iOS App ID " + identifier.replace(".", " "),
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/addAppId.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        return root

    def listAppIds(self):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/listAppIds.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        return root["appIds"]

    def downloadDevelopmentCert(self):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/downloadDevelopmentCert.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        return root["certificate"]["certContent"].data

    def revokeDevelopmentCert(self, serialNumber):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "serialNumber": serialNumber,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/revokeDevelopmentCert.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))

    def submitDevelopmentCSR(self, csr):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "csrContent": csr,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/submitDevelopmentCSR.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        return root["certRequest"]

    def retrieveDevelopmentCerts(self):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/listAllDevelopmentCerts.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        return root["certificates"]

    def addDevice(self, deviceNumber):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "teamId": self.teamId,
            "deviceNumber": deviceNumber,
            "name": deviceNumber,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/ios/addDevice.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        return root

    def retrieveActiveTeam(self):
        requestId = str(uuid.uuid4()).upper()
        postData = {
            "clientId": self.clientId,
            "myacinfo": self.accountBlob,
            "protocolVersion": "QH65B2",
            "requestId": requestId,
            "userLocale": ["en_US"]
        }
        plist = plistlib.writePlistToString(postData)
        response = self.browser.open(
            urllib2.Request(
                "https://developerservices2.apple.com/services/QH65B2/listTeams.action?clientId="
                + self.clientId, plist, {'Content-Type': 'text/x-xml-plist'}))
        responseData = response.read()
        root = plistlib.readPlistFromString(responseData)
        teams = root["teams"]
        for team in teams:
            if team["status"] == "active":
                return team

        logger.error("No active teams listed on the account")

    def login(self):
        postData = {
            "appIdKey": self.appIdKey,
            "userLocale": "en_US",
            "protocolVersion": "A1234",
            "appleId": self.account_username,
            "password": self.account_password,
            "format": "json"
        }

        response = self.browser.open(
            "https://idmsa.apple.com/IDMSWebAuth/clientDAW.cgi",
            urllib.urlencode(postData))
        parsedData = json.loads(response.read())
        if parsedData["resultCode"] == "0":
            logger.info("Logged into Apple Developer Center")
            self.accountBlob = parsedData["myacinfo"]
            ck = cookielib.Cookie(version=0,
                                  name='myacinfo',
                                  value=parsedData['myacinfo'],
                                  port=None,
                                  port_specified=False,
                                  domain='apple.com',
                                  domain_specified=False,
                                  domain_initial_dot=False,
                                  path='/',
                                  path_specified=True,
                                  secure=False,
                                  expires=None,
                                  discard=True,
                                  comment=None,
                                  comment_url=None,
                                  rest={'HttpOnly': None},
                                  rfc2109=False)
            self.cookiejar.set_cookie(ck)
        else:
            logger.info("Login Failed: %s" % parsedData["resultString"])

    def process(self):
        # login to apple id
        self.login()

        # set up teamId
        team = self.retrieveActiveTeam()
        self.teamId = team["teamId"]

        # view development certificates
        certs = self.retrieveDevelopmentCerts()
        if len(certs) == 0 or not os.path.exists(
                os.path.join(os.path.join('.', 'csr'), 'dev.cer')):
            if len(certs) == 1:
                # revoke existing certificate
                self.revokeDevelopmentCert(certs[0]["serialNumber"])
                logger.info("Revoked existing certificate")

            logger.info("Generating certificate request")
            name = team["name"]
            cn = "iOS Developer: %s (%s)" % (name, name)

            handle = certificate.GenCsr()
            csr = handle.type_RSA(cn, self.account_username, 'iOS Development',
                                  'New York', 'New York', 'US', 2048, 'sha1')
            logger.info("Submitting development CSR...")
            submitResponse = self.submitDevelopmentCSR(csr)
            logger.info("Submission Status: %s",
                        submitResponse["statusString"])
            if submitResponse["statusCode"] != 1:
                logger.error("Unable to get CSR approved")
                handle._clean_old_files()
                sys.exit(0)

            # download development certificate
            devCert = self.downloadDevelopmentCert()
            if os.path.exists(os.path.join(os.path.join('.', 'csr'),
                                           'dev.cer')):
                os.remove(os.path.join(os.path.join('.', 'csr'), 'dev.cer'))
            fh = open(os.path.join(os.path.join('.', 'csr'), 'dev.cer'), "w")
            fh.write(devCert)
            fh.close()
            logger.info("Saved development certificate to csr/dev.cer")

        # add device
        resp = self.addDevice(self.udid)
        if resp["resultCode"] == 0:
            logger.info("Device: %s was added to the Dev Center" % self.udid)
        elif resp["resultCode"] == 35:
            logger.info(
                "Device: %s was already added to the Dev Center. Reason: %s" %
                (self.udid, resp["userString"]))
        else:
            logger.error(
                "Unable to add device: %s to the Dev Center. Reason: %s" %
                (self.udid, resp["userString"]))
            sys.exit(0)

        # see if appId exists already on account
        appIdId = ""
        appIds = self.listAppIds()
        for appId in appIds:
            # self.removeAppId(appId["appIdId"])
            if appId["identifier"] == self.app_id:
                appIdId = appId["appIdId"]
                logger.info("Found existing app identifier: %s", self.app_id)
                break

        if appIdId == "":
            # add appId
            resp = self.addAppId(self.app_id)
            if resp["resultCode"] == 0:
                appIdId = resp["appId"]["appIdId"]
                logger.info("Added app identifier: %s", self.app_id)
            else:
                logger.info("Unable to add app identifier: %s. Reason: %s" %
                            (self.app_id, resp["userString"]))
                sys.exit(0)

        # download provisioning profile
        resp = self.downloadProvisioningProfile(appIdId)
        profileData = resp["encodedProfile"].data
        if os.path.exists(os.path.join('.', 'profile.mobileprovision')):
            os.remove(os.path.join('.', 'profile.mobileprovision'))
        fh = open(os.path.join('.', 'profile.mobileprovision'), "w")
        fh.write(profileData)
        fh.close()
        logger.info("Saved provisioning profile to ./profile.mobileprovision")

        # delete app id
        self.removeAppId(appIdId)
        logger.info("Removed App ID: %s", appIdId)
Esempio n. 50
0
class Network(DOMMixin):

    capabilities = [
        'cookies',
        'headers',
        ]

    wait_expression = WaitExpression

    user_agent = {
        'browser': 'network',
        'platform': 'python',
        'version': '1.0',
        }

    def __init__(self, base_url=None):
        # accept additional request headers?  (e.g. user agent)
        self._base_url = base_url
        self.reset()

    def open(self, url, wait_for=None, timeout=0):
        """Open web page at *url*."""
        self._open(url)

    def reset(self):
        self._referrer = None
        self._request_environ = None
        self._cookie_jar = CookieJar()
        self._opener = urllib2.build_opener(
            urllib2.HTTPCookieProcessor(self._cookie_jar)
        )
        self.status_code = 0
        self.status = ''
        self.response = None
        self.location = None
        self.headers = ()

    def wait_for(self, condition, timeout=None):
        pass

    def sync_document(self):
        """The document is always synced."""

    _sync_document = DOMMixin.sync_document

    @property
    def cookies(self):
        if not (self._cookie_jar and self.location):
            return {}
        request = urllib2.Request(self.location)
        policy = self._cookie_jar._policy

        # return ok will only return a cookie if the following attrs are set
        # correctly => # "version", "verifiability", "secure", "expires",
        # "port", "domain"
        return dict((c.name, c.value.strip('"'))
            for c in self._cookie_jar if policy.return_ok(c, request))

    def set_cookie(self, name, value, domain=None, path=None,
                   session=True, expires=None, port=None):
#        Cookie(version, name, value, port, port_specified,
#                 domain, domain_specified, domain_initial_dot,
#                 path, path_specified, secure, expires,
#                 discard, comment, comment_url, rest,
#                 rfc2109=False):

        cookie = Cookie(0, name, value, port, bool(port),
                        domain or '', bool(domain),
                        (domain and domain.startswith('.')),
                        path or '', bool(path), False, expires,
                        session, None, None, {}, False)
        self._cookie_jar.set_cookie(cookie)

    def delete_cookie(self, name, domain=None, path=None):
        try:
            self._cookie_jar.clear(domain, path, name)
        except KeyError:
            pass

    # Internal methods
    @lazy_property
    def _lxml_parser(self):
        return html_parser_for(self, wsgi_elements)

    def _open(self, url, method='GET', data=None, refer=True,
              content_type=None):
        before_browser_activity.send(self)
        open_started = time()

        if data:
            data = urlencode(data)

        url = urljoin(self._base_url, url)
        if method == 'GET':
            if '?' in url:
                url, query_string = url.split('?', 1)
            else:
                query_string = None

            if data:
                query_string = data
            if query_string:
                url = url + '?' + query_string

            request = urllib2.Request(url)
        elif method == 'POST':
            request = urllib2.Request(url, data)
        else:
            raise Exception('Unsupported method: %s' % method)
        if self._referrer and refer:
            request.add_header('Referer', self._referrer)

        logger.info('%s(%s)', url, method)
        request_started = time()

        response = self._opener.open(request)

        request_ended = time()

        self.status_code = response.getcode()
        self.headers = Headers(
            (head.strip().split(': ',1) for head in response.info().headers)
        )
        self._referrer = request.get_full_url()
        self.location = response.geturl()
        self._response = response
        self.response = ''.join(list(response))
        self._sync_document()

        open_ended = time()
        request_time = request_ended - request_started

        logger.info("Fetched %s in %0.3fsec + %0.3fsec browser overhead",
                    url, request_time,
                    open_ended - open_started - request_time)
        after_browser_activity.send(self)
Esempio n. 51
0
class pyGoogleTrendsCsvDownloader(object):
    '''
    Google Trends Downloader.

    Recommended usage:

    from pyGoogleTrendsCsvDownloader import pyGoogleTrendsCsvDownloader
    r = pyGoogleTrendsCsvDownloader(username, password)
    r.get_csv_data(cat='0-958', geo='US-ME-500')

    '''
    def __init__(self, username, password):
        '''
        Provide login and password to be used to connect to Google Trends
        All immutable system variables are also defined here
        '''

        # The amount of time (in secs) that the script should wait before making a request.
        # This can be used to throttle the downloading speed to avoid hitting servers too hard.
        # It is further randomized.
        self.download_delay = 2

        self.service = "trendspro"
        self.url_service = "http://www.google.com/trends/"
        self.url_download = 'https://www.google.com/trends/trendsReport?'

        self.login_params = {}
        # These headers are necessary, otherwise Google will flag the request at your account level
        self.headers = [('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36'),
                        ("accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                        ("Accept-Language", "en-gb,en;q=0.8"),
                        ("Accept-Encoding", "gzip,deflate,sdch"),
                        ("referer", "https://www.google.com/trends/explore"),
                        ("pragma", "no-cache"),
                        ("cache-control", "no-cache"),
                        ]
        self.url_login = '******'+self.service+'&passive=1209600&continue='+self.url_service+'&followup='+self.url_service
        self.url_authenticate = 'https://accounts.google.com/accounts/ServiceLoginAuth'

        self._authenticate(username, password)

    def _authenticate(self, username, password):
        '''
        Authenticate to Google:
        1 - make a GET request to the Login webpage so we can get the login form
        2 - make a POST request with email, password and login form input values
        '''
        # Make sure we get CSV results in English
        ck1 = Cookie(version=0, name='I4SUserLocale', value='en_US', port=None, port_specified=False, domain='.google.com', domain_specified=False,domain_initial_dot=False, path='', path_specified=False, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None)
        # This cookie is now mandatory
        ck2 = Cookie(version=0, name='PREF', value='', port=None, port_specified=False, domain='.google.com', domain_specified=False,domain_initial_dot=False, path='', path_specified=False, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None)

        self.cj = CookieJar()
        self.cj.set_cookie(ck1)
        self.cj.set_cookie(ck2)

        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
        self.opener.addheaders = self.headers

        # Get all of the login form input values
        find_inputs = etree.XPath("//form[@id='gaia_loginform']//input")
        resp = self.opener.open(self.url_login)
        data = self.read_gzipped_response(resp)

        try:
            xmlTree = etree.fromstring(data, parser=html.HTMLParser(recover=True, remove_comments=True))
            for input in find_inputs(xmlTree):
                name = input.get('name')
                if name:
                    name = name.encode('utf8')
                    value = input.get('value', '').encode('utf8')
                    self.login_params[name] = value
        except:
            print("Exception while parsing: %s\n" % traceback.format_exc())

        self.login_params["Email"] = username
        self.login_params["Passwd"] = password

        params = urllib.urlencode(self.login_params)
        auth_resp = self.opener.open(self.url_authenticate, params)

        # Testing whether Authentication was a success
        # I noticed that a correct auth sets a few cookies
        if not self.is_authentication_successfull(auth_resp):
            print 'Warning: Authentication failed for user %s' % username
        else:
            print 'Authentication successfull for user %s' % username

    def is_authentication_successfull(self, response):
        '''
            Arbitrary way of us knowing whether the authentication succeeded or not:
            we look for a SSID cookie-set header value.
            I noticed that the 4 mandatory cookies were:
              - SID
              - SSID
              - HSID
              - PREF (but does not need to be set)
        '''
        if response:
            return 'SSID' in response.info().getheader('Set-Cookie')

        return False

    def is_quota_exceeded(self, response):
        # TODO: double check that the check for the content-disposition
        # is correct
        if response.info().has_key('Content-Disposition'):
            return False
        return True

    def read_gzipped_response(self, response):
        '''
            Since we are adding gzip to our http request Google can answer with gzipped data
            that needs uncompressing before handling.
            This method returns the text content of a Http response.
        '''
        if response.info().get('Content-Encoding') == 'gzip':
            buf = StringIO(response.read())
            f = gzip.GzipFile(fileobj=buf)
            content = f.read()
        else:
            content = response.read()
        return content

    def get_csv_data(self, **kwargs):
        '''
        Download CSV reports
        '''
        time.sleep(self.download_delay)

        params = {
            'hl': 'en-us',
            'export': 1
        }
        params.update(kwargs)

        # Silly python with the urlencode method
        params = urllib.urlencode(params).replace("+", "%20")
        response = self.opener.open(self.url_download + params)

        # Make sure quotas are not exceeded ;)
        if self.is_quota_exceeded(response):
           raise QuotaExceededException()

        return self.read_gzipped_response(response)
Esempio n. 52
0
class Account(object):
    login_cookie_name = '.ASPXFORMSAUTH'
    card_selection_event_target = 'ctl00$ctl00$ContentPlaceHolder1$TabContainer2$MyCardsTabPanel$ddlMyCardsList'

    def __init__(self, username=None, password=None, cookies=[]):
        self.username = username
        self.password = password
        self.journeys = {}

        self.cj = CookieJar()
        [self.cj.set_cookie(c) for c in cookies]

        self.br = mechanize.Browser(factory=mechanize.RobustFactory())
        self.br.set_cookiejar(self.cj)
        # Browser options
        self.br.set_handle_equiv(True)
        self.br.set_handle_gzip(True)
        self.br.set_handle_redirect(True)
        self.br.set_handle_referer(True)
        self.br.set_handle_robots(False)
        # Follows refresh 0 but not hangs on refresh > 0
        self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),
                                   max_time=1)
        # Required so ASP hidden forms are set properly
        self.br.addheaders = [('User-agent', 'Mozilla/5.0 (comptabile)')]

    def login(self):
        """
        Login and set name if successful.
        """
        url = BASE_URL + '/en/Login.aspx'
        self.br.open(url)
        self.br.select_form(nr=0)
        self.br.form["ctl00$ContentPlaceHolder1$UserName"] = self.username
        self.br.form["ctl00$ContentPlaceHolder1$Password"] = self.password
        self.br.submit(name="ctl00$ContentPlaceHolder1$btnlogin")

        # Upon successful login, this cookie should be set
        login_cookie = None
        for cookie in self.cj:
            if cookie.name == self.login_cookie_name:
                login_cookie = cookie
                break

        if login_cookie:
            # Get account holder name
            soup = BeautifulSoup(self.br.response().read())
            self.name = soup.find(id="LoginName1").string

            login_cookie.discard = True
            future = datetime.datetime.now() + datetime.timedelta(minutes=20)
            unix_time = int(time.mktime(future.timetuple()))
            login_cookie.expires = unix_time
            return True
        else:
            return False

    @property
    def logged_in(self):
        for cookie in self.cj:
            if cookie.name == self.login_cookie_name and not cookie.is_expired():
                return True
        return False

    @property
    def cards(self):
        if not hasattr(self, '_cards'):
            self._cards = self._fetch_cards()
        return self._cards

    @login_required
    def _fetch_cards(self):
        """
        Return the cards a user has registered as a dict in
        the form {card_id: {card details}}.
        """
        url = BASE_URL + "/en/SelfServices/CardServices/CardOverView.aspx"

        self.br.open(url)
        soup = BeautifulSoup(self.br.response().read())

        cards = {}
        for tag in soup.find('select').find_all('option'):
            card_id = tag['value']
            card_name = tag.text
            if tag.has_attr('selected'):
                cards[card_id] = self._card_overview(soup)
            else:
                self.br.select_form(nr=0)
                self.br.set_all_readonly(False)
                self.br["__EVENTTARGET"] = self.card_selection_event_target
                self.br[self.card_selection_event_target] = [card_id]
                self.br.submit()
                soup = BeautifulSoup(self.br.response().read())
                cards[card_id] = self._card_overview(soup)

        return cards

    @login_required
    def card_journeys(self, card_id):
        """
        Returns a list of all the journeys for a specific card.
        """
        card_id = str(card_id)

        journeys = self.journeys.get(card_id, [])
        if journeys:
            return journeys

        url = BASE_URL + "/en/SelfServices/CardServices/ViewJourneyHistory.aspx"

        self.br.open(url)
        self.br.select_form(nr=0)

        self.br.set_all_readonly(False)
        self.br["__EVENTTARGET"] = self.card_selection_event_target
        self.br[self.card_selection_event_target] = [card_id]
        self.br.submit()

        # Get print view which includes all journeys instead of clicking through list
        self.br.select_form(nr=0)
        self.br.submit(name="ctl00$ctl00$ContentPlaceHolder1$TabContainer2$MyCardsTabPanel$ContentPlaceHolder1$btn_Print")

        # Extract print data (contained in js print script)
        pattern = r'printWin.document.write\("(.*)"\);printWin.document.close'
        match = None
        match = re.search(pattern, self.br.response().read())
        if not match:
            raise ParseError("Could not extract journey print data.")
        soup = BeautifulSoup(match.group(1))

        journeys = self._journey_list(soup)
        self.journeys[card_id] = journeys

        return journeys

    def _card_overview(self, soup):
        """
        Return a dict of card details.
        """
        #[list(tag.stripped_strings) for tag in soup.find(id=re.compile('CardDetails')).find_all('li')[:-1]]
        overview_vals = [tag.contents[2].strip() for tag in
                soup.find(id=re.compile('CardDetails')).find_all('li')[:-1]]
        overview_keys = ('number', 'label', 'type', 'status', 'credit_status',
                'auto_topup', 'init_date', 'expiry_date', 'balance')
        card_overview = dict(zip(overview_keys, overview_vals))
        card_overview['balance'] = float(card_overview['balance'])

        return card_overview

    def _journey_list(self, soup):
        """
        Return a list of journey dicts.
        """
        table = soup.find(id='gvCardJourney')
        journeys = []
        for row in table.find_all('tr')[1:]:
            cols = row.find_all('td')
            time = cols[0].string.strip() + " " + cols[1].string.strip()
            journeys.append({
                'datetime': datetime.datetime.strptime(time,
                    '%d/%m/%Y %I:%M %p'),
                'source': cols[2].string.strip(),
                'type': cols[3].string.strip(),
                'amount': cols[4].string.strip(),
                'balance': cols[5].string.strip(),
            })
        return journeys
Esempio n. 53
0
class CookieManager(object):
    """
    Each Grab instance has `cookies` attribute that is instance of `CookieManager` class.

    That class contains helpful methods to create, load, save cookies from/to
    different places.
    """

    __slots__ = ('cookiejar', )

    def __init__(self, cookiejar=None):
        if cookiejar is not None:
            self.cookiejar = cookiejar
        else:
            self.cookiejar = CookieJar()
        #self.disable_cookiejar_lock(self.cookiejar)

    #def disable_cookiejar_lock(self, cj):
    #cj._cookies_lock = dummy_threading.RLock()

    def set(self, name, value, **kwargs):
        """Add new cookie or replace existing cookie with same parameters.

        :param name: name of cookie
        :param value: value of cookie
        :param kwargs: extra attributes of cookie
        """

        self.cookiejar.set_cookie(create_cookie(name, value, **kwargs))

    def update(self, cookies):
        if isinstance(cookies, CookieJar):
            for cookie in cookies:
                self.cookiejar.set_cookie(cookie)
        elif isinstance(cookies, CookieManager):
            for cookie in cookies.cookiejar:
                self.cookiejar.set_cookie(cookie)
        else:
            raise GrabMisuseError('Unknown type of cookies argument: %s' %
                                  type(cookies))

    @classmethod
    def from_cookie_list(cls, clist):
        cj = CookieJar()
        for cookie in clist:
            cj.set_cookie(cookie)
        return cls(cj)

    def clear(self):
        self.cookiejar = CookieJar()

    def __getstate__(self):
        state = {}
        for cls in type(self).mro():
            cls_slots = getattr(cls, '__slots__', ())
            for slot in cls_slots:
                if slot != '__weakref__':
                    if hasattr(self, slot):
                        state[slot] = getattr(self, slot)

        state['_cookiejar_cookies'] = list(self.cookiejar)
        del state['cookiejar']

        return state

    def __setstate__(self, state):
        state['cookiejar'] = CookieJar()
        for cookie in state['_cookiejar_cookies']:
            state['cookiejar'].set_cookie(cookie)
        del state['_cookiejar_cookies']

        for slot, value in state.items():
            setattr(self, slot, value)

    def __getitem__(self, key):
        for cookie in self.cookiejar:
            if cookie.name == key:
                return cookie.value
        raise KeyError

    def items(self):
        res = []
        for cookie in self.cookiejar:
            res.append((cookie.name, cookie.value))
        return res

    def load_from_file(self, path):
        """
        Load cookies from the file.

        Content of file should be a JSON-serialized list of dicts.
        """

        with open(path) as inf:
            data = inf.read()
            if data:
                items = json.loads(data)
            else:
                items = {}
        jar = CookieJar()
        for item in items:
            jar.set_cookie(create_cookie(**item))
        self.update(jar)

    def get_dict(self):
        res = []
        for cookie in self.cookiejar:
            res.append(dict((x, getattr(cookie, x)) for x in COOKIE_ATTRS))
        return res

    def save_to_file(self, path):
        """
        Dump all cookies to file.

        Cookies are dumped as JSON-serialized dict of keys and values.
        """

        with open(path, 'w') as out:
            out.write(json.dumps(self.get_dict()))
class pyGoogleTrendsCsvDownloader(object):
    '''
    Google Trends Downloader.

    Recommended usage:

    from pyGoogleTrendsCsvDownloader import pyGoogleTrendsCsvDownloader
    r = pyGoogleTrendsCsvDownloader(username, password)
    r.get_csv_data(cat='0-958', geo='US-ME-500')

    '''
    def __init__(self, username, password):
        '''
        Provide login and password to be used to connect to Google Trends
        All immutable system variables are also defined here
        '''

        # The amount of time (in secs) that the script should wait before making a request.
        # This can be used to throttle the downloading speed to avoid hitting servers too hard.
        # It is further randomized.
        self.download_delay = 2

        self.service = "trendspro"
        self.url_service = "http://www.google.com/trends/"
        self.url_download = 'https://www.google.com/trends/trendsReport?'

        self.login_params = {}
        # These headers are necessary, otherwise Google will flag the request at your account level
        self.headers = [('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36'),
                        ("accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                        ("Accept-Language", "en-gb,en;q=0.8"),
                        ("Accept-Encoding", "gzip,deflate,sdch"),
                        ("referer", "https://www.google.com/trends/explore"),
                        ("pragma", "no-cache"),
                        ("cache-control", "no-cache"),
                        ]
        self.url_login = '******'+self.service+'&passive=1209600&continue='+self.url_service+'&followup='+self.url_service
        self.url_authenticate = 'https://accounts.google.com/accounts/ServiceLoginAuth'

        self._authenticate(username, password)

    def _authenticate(self, username, password):
        '''
        Authenticate to Google:
        1 - make a GET request to the Login webpage so we can get the login form
        2 - make a POST request with email, password and login form input values
        '''
        # Make sure we get CSV results in English
        ck1 = Cookie(version=0, name='I4SUserLocale', value='en_US', port=None, port_specified=False, domain='.google.com', domain_specified=False,domain_initial_dot=False, path='', path_specified=False, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None)
        # This cookie is now mandatory
        # Not sure what the value represents but too many queries from the same value
        # lead to a Quota Exceeded error.
        # random_six_char = ''.join(random.choice('0123456789abcdef') for n in xrange(6))
        ck2 = Cookie(version=0, name='PREF', value='0000', port=None, port_specified=False, domain='.google.com', domain_specified=False,domain_initial_dot=False, path='', path_specified=False, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None)

        self.cj = CookieJar()
        self.cj.set_cookie(ck1)
        self.cj.set_cookie(ck2)

        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
        self.opener.addheaders = self.headers

        # Get all of the login form input values
        find_inputs = etree.XPath("//form[@id='gaia_loginform']//input")
        resp = self.opener.open(self.url_login)
        data = self.read_gzipped_response(resp)

        try:
            xmlTree = etree.fromstring(data, parser=html.HTMLParser(recover=True, remove_comments=True))
            for input in find_inputs(xmlTree):
                name = input.get('name')
                if name:
                    name = name.encode('utf8')
                    value = input.get('value', '').encode('utf8')
                    self.login_params[name] = value
        except:
            print("Exception while parsing: %s\n" % traceback.format_exc())

        self.login_params["Email"] = username
        self.login_params["Passwd"] = password

        params = urllib.urlencode(self.login_params)
        auth_resp = self.opener.open(self.url_authenticate, params)

        # Testing whether Authentication was a success
        # I noticed that a correct auth sets a few cookies
        if not self.is_authentication_successfull(auth_resp):
            print 'Warning: Authentication failed for user %s' % username
        else:
            print 'Authentication successfull for user %s' % username

    def is_authentication_successfull(self, response):
        '''
            Arbitrary way of us knowing whether the authentication succeeded or not:
            we look for a SSID cookie-set header value.
            I noticed that the 4 mandatory cookies were:
              - SID
              - SSID
              - HSID
              - PREF (but does not need to be set)
        '''
        if response:
            return 'SSID' in response.info().getheader('Set-Cookie')

        return False

    def is_quota_exceeded(self, response):
        # TODO: double check that the check for the content-disposition
        # is correct
        if response.info().has_key('Content-Disposition'):
            return False
        return True

    def read_gzipped_response(self, response):
        '''
            Since we are adding gzip to our http request Google can answer with gzipped data
            that needs uncompressing before handling.
            This method returns the text content of a Http response.
        '''
        if response.info().get('Content-Encoding') == 'gzip':
            buf = StringIO(response.read())
            f = gzip.GzipFile(fileobj=buf)
            content = f.read()
        else:
            content = response.read()
        return content

    def get_csv_data(self, **kwargs):
        '''
        Download CSV reports
        '''
        time.sleep(self.download_delay)

        params = {
            'hl': 'en-us',
            'export': 1
        }
        params.update(kwargs)

        # Silly python with the urlencode method
        params = urllib.urlencode(params).replace("+", "%20")
        response = self.opener.open(self.url_download + params)

        # Make sure quotas are not exceeded ;)
        if self.is_quota_exceeded(response):
           raise QuotaExceededException()

        return self.read_gzipped_response(response)
Esempio n. 55
0
 def from_cookie_list(cls, clist):
     cj = CookieJar()
     for cookie in cookie_list:
         cj.set_cookie(cookie)
     return cls(cj)
Esempio n. 56
0
class SafeBoxClient():
    def __init__(self, server_addr="localhost:8000"):
        self.server_addr = server_addr
        self.client_id = self.ccid = self.pin = None
        self.cookie_jar = CookieJar()
        self.curr_ticket = ""

    # startClient: Initializes the client's remaining attributes,
    # this implies starting a session and eventually client registration.
    def startClient(self, ccid, passwd, pin):

        # checking if client is already registered
        def checkClientReg_cb(success):
            if success == False:
                print "User not registered."
                if pin is None:
                    print "Please provide your Citizen Card for registration"
                    reactor.stop()
                    return
                else:
                    print "Registering user..."
                    return self.handleRegister()
            #pprint(self.cookie_jar.__dict__)
            print "User: "******" logged in."
            for cookie in self.cookie_jar:
                #print cookie
                #print type(cookie)
                self.curr_ticket = self.client_id.decryptData(cookie.value)

        # Instanciating ClientIdentity
        def startClientId_cb(key):
            self.client_id = ClientIdentity(self.ccid, passwd, key)
            self.handleStartSession(checkClientReg_cb)

        self.ccid = ccid
        if pin is not None:
            self.pin = pin
        return self.handleGetKey(startClientId_cb)

# Session, Registry and Authentication related opreations
#
# handleGetKey: handles getkey operations, this happens as the
# first step of the startClient operation.

    def handleGetKey(self, method):
        def handleGetKey_cb(response):
            defer = Deferred()
            defer.addCallback(method)
            response.deliverBody(DataPrinter(defer, "getkey"))
            return NOT_DONE_YET

        agent = Agent(reactor)
        headers = http_headers.Headers()
        d = agent.request('GET',
                          'http://localhost:8000/session/?method=getkey',
                          headers, None)

        d.addCallback(handleGetKey_cb)

        return NOT_DONE_YET

    # handleStartSession: handles startsession operations
    def handleStartSession(self, method):
        def procResponse_cb(response):
            defer = Deferred()
            defer.addCallback(method)
            response.deliverBody(DataPrinter(defer, "bool"))
            return NOT_DONE_YET

        def startSession_cb((signedNonce, nonceid)):
            agent = CookieAgent(Agent(reactor), self.cookie_jar)
            dataq = []
            dataq.append(signedNonce)
            body = _FileProducer(
                StringIO(self.client_id.encryptData(self.client_id.password)),
                dataq)
            headers = http_headers.Headers()
            d = agent.request(
                'PUT',
                'http://localhost:8000/session/?method=startsession&ccid=' +
                self.ccid + '&nonceid=' + str(nonceid), headers, body)
            d.addCallback(procResponse_cb)
            return NOT_DONE_YET

        def getNonce_cb(response):
            defer = Deferred()
            defer.addCallback(startSession_cb)
            response.deliverBody(getNonce(defer, self.client_id, self.pin))
            return NOT_DONE_YET

        if self.pin != None:
            agent = Agent(reactor)
            body = FileBodyProducer(
                StringIO(self.client_id.pub_key.exportKey('PEM')))
            headers = http_headers.Headers()
            d = agent.request(
                'GET', 'http://localhost:8000/session/?method=getnonce',
                headers, body)

            d.addCallback(getNonce_cb)

            return NOT_DONE_YET

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        body = FileBodyProducer(
            StringIO(self.client_id.encryptData(self.client_id.password)))
        headers = http_headers.Headers()
        d = agent.request(
            'PUT', 'http://localhost:8000/session/?method=startsession&ccid=' +
            self.ccid + '&nonceid=' + str(-1), headers, body)
        d.addCallback(procResponse_cb)
        return NOT_DONE_YET

    # handleRegister: Handles the registration process. Also part of the startClient operation.
    def handleRegister(self):
        def checkClientReg_cb(success):
            if success == False:
                print "ERROR: Couldn't register user."
                reactor.stop()
                return

            #pprint(self.cookie_jar.__dict__)
            for cookie in self.cookie_jar:
                #print cookie
                #print type(cookie)
                self.curr_ticket = self.client_id.decryptData(cookie.value)
            print "Registration Successful."
            print "User: "******" logged in."

        def procResponse_cb(response, method):
            defer = Deferred()
            defer.addCallback(method)
            response.deliverBody(DataPrinter(defer, "bool"))
            return NOT_DONE_YET

        def register_cb((signedNonce, nonceid)):
            agent = CookieAgent(Agent(reactor), self.cookie_jar)
            dataq = []
            dataq.append(signedNonce)
            dataq.append(self.client_id.encryptData(self.client_id.password))
            # Sending the Certificate and the Sub CA to the server
            if self.pin is None:
                print "ERROR! Check the pin!"
                reactor.stop()
            cert = cc.get_certificate(cc.CERT_LABEL, self.pin)
            #print type(cert.as_pem())
            #print cert.as_pem()
            if cert is None:
                print "ERROR! Check the pin"
                reactor.stop()
            subca = cc.get_certificate(cc.SUBCA_LABEL, self.pin)
            #print type(subca.as_pem())
            #print subca.as_pem()
            if subca is None:
                print "ERROR! Check the pin"
                reactor.stop()

            enc_cert = b64encode(cert.as_pem())
            #print "cert len: ", len(enc_cert)
            dataq.append(enc_cert)
            enc_subca = b64encode(subca.as_pem())
            #print "sub ca len: ", len(enc_subca)
            dataq.append(enc_subca)
            dataq.append(self.client_id.pub_key.exportKey('PEM'))
            ext_key = self.client_id.pub_key.exportKey('PEM')
            if self.pin is None:
                print "ERROR! Check the pin or the CC"
                reactor.stop()
            signed_ext_key = cc.sign(ext_key, cc.KEY_LABEL, self.pin)
            enc_sek = b64encode(signed_ext_key)
            #print "encoded ext key: ", enc_sek
            #print "len encoded: ", len(enc_sek)
            dataq.append(enc_sek)
            body = FileProducer2(dataq)
            headers = http_headers.Headers()
            #print "Password:"******"LEN:", len(self.client_id.encryptData(self.client_id.password))
            d = agent.request(
                'PUT', 'http://localhost:8000/pboxes/?method=register' +
                '&nonceid=' + str(nonceid), headers, body)
            d.addCallback(procResponse_cb, checkClientReg_cb)

        def getNonce_cb(response):
            defer = Deferred()
            defer.addCallback(register_cb)
            response.deliverBody(getNonce(defer, self.client_id, self.pin))
            return NOT_DONE_YET

        agent = Agent(reactor)
        body = FileBodyProducer(
            StringIO(self.client_id.pub_key.exportKey('PEM')))
        headers = http_headers.Headers()
        d = agent.request('GET',
                          'http://localhost:8000/session/?method=getnonce',
                          headers, body)

        d.addCallback(getNonce_cb)
        return NOT_DONE_YET

    def processCookie(self, uri):
        dci = number.long_to_bytes(
            number.bytes_to_long(self.curr_ticket) + long("1", base=10))
        #print "incremented ticket", number.bytes_to_long(dci)
        self.curr_ticket = dci
        sci = self.client_id.signData(str(dci))
        enc = self.client_id.encryptData(sci)
        for cookie in self.cookie_jar:
            cookie.value = enc
            cookie.path = uri
            self.cookie_jar.clear()
            self.cookie_jar.set_cookie(cookie)
        return dci
        #print cookie

# List Operations
#
# handleList: handles every list command

    def handleList_cb(self, response):
        defer = Deferred()
        response.deliverBody(DataPrinter(defer, "list"))
        return NOT_DONE_YET

    def handleListPboxes(self):
        args = ("list", str(self.ccid))
        salt = self.processCookie("/pboxes")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))
        #print "hashed:", self.client_id.genHashArgs(args, salt)
        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET',
            'http://localhost:8000/pboxes/?method=list&ccid=' + self.ccid,
            headers, body)
        d.addCallback(self.handleList_cb)
        return NOT_DONE_YET

    def handleListFiles(self):
        args = ("list", str(self.ccid))
        salt = self.processCookie("/files")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET',
            'http://localhost:8000/files/?method=list&ccid=' + self.ccid,
            headers, body)
        d.addCallback(self.handleList_cb)
        return NOT_DONE_YET

    def handleListShares(self):
        args = ("list", str(self.ccid))
        salt = self.processCookie("/shares")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET',
            'http://localhost:8000/shares/?method=list&ccid=' + self.ccid,
            headers, body)
        d.addCallback(self.handleList_cb)
        return NOT_DONE_YET

# Get Operations
#
# handleGetMData: Handles get pbox metadata operations.

    def handleGetMData(self, data):
        #data = (method, tgtccid)
        pprint(data)

        def handleGetMData_cb(response):
            defer = Deferred()
            defer.addCallback(data[0])
            response.deliverBody(DataPrinter(defer, "getmdata"))
            return NOT_DONE_YET

        args = ("get_mdata", str(self.ccid), data[1])
        salt = self.processCookie("/pboxes")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET', 'http://localhost:8000/pboxes/?method=get_mdata&ccid=' +
            self.ccid + "&tgtccid=" + data[1], headers, body)

        d.addCallback(handleGetMData_cb)

        return NOT_DONE_YET

    # handleGetFileMData: Handles get file metadata operations.
    def handleGetFileMData(self, data):
        #data = (method, fileid)
        def handleGetFileMData_cb(response):
            defer = Deferred()
            defer.addCallback(data[0])
            response.deliverBody(DataPrinter(defer, "getmdata"))
            return NOT_DONE_YET

        args = ("get_mdata", str(self.ccid), data[1])
        salt = self.processCookie("/files")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET', 'http://localhost:8000/files/?method=get_mdata&ccid=' +
            self.ccid + "&fileid=" + data[1], headers, body)

        d.addCallback(handleGetFileMData_cb)

        return NOT_DONE_YET

    # handleGetShareMData: Handles get share metadata operations.
    def handleGetShareMData(self, data):
        #data = (method, fileid)
        def handleGetShareMData_cb(response):
            defer = Deferred()
            defer.addCallback(data[0])
            response.deliverBody(DataPrinter(defer, "getmdata"))
            return NOT_DONE_YET

        args = ("get_mdata", str(self.ccid), data[1])
        salt = self.processCookie("/shares")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET', 'http://localhost:8000/shares/?method=get_mdata&ccid=' +
            self.ccid + "&fileid=" + data[1], headers, body)

        d.addCallback(handleGetShareMData_cb)

        return NOT_DONE_YET

    # handleGet: handles get file
    #def handleGet(self, line):
    def printResult_cb(self, data):
        pprint(data)  #TODO: Format this!
        return NOT_DONE_YET

    # for info requests
    def handleGetInfo(self, s):
        if s[1].lower() == "pboxinfo":
            return self.handleGetMData((self.printResult_cb, s[2].lower()))
        elif s[1].lower() == "fileinfo":
            return self.handleGetFileMData((self.printResult_cb, s[2].lower()))
        elif s[1].lower() == "shareinfo":
            return self.handleGetShareMData(
                (self.printResult_cb, s[2].lower()))

    # Decrypt and write the file
    def writeFile_cb(self, ignore,
                     s):  #we should implement http error code checking
        fileId = s[2]
        enc_file = open(fileId, "r")
        if len(s) == 4:
            dec_file = open(s[3], "w")
        else:
            dec_file = open(fileId + "_decrypted", "w")

        enc_key = enc_file.read(IV_KEY_SIZE_B64)
        # print "debugging: iv key writefile"
        # print enc_key
        print "Decrypting file..."
        key = self.client_id.decryptData(enc_key)
        enc_iv = enc_file.read(IV_KEY_SIZE_B64)
        #print enc_iv
        iv = self.client_id.decryptData(enc_iv)
        print iv
        self.client_id.decryptFileSym(enc_file, dec_file, key, iv)
        print "File written."

    # for get file
    def handleGetFile(self, s):
        def handleGetFile_cb(response, f):
            finished = Deferred()
            finished.addCallback(self.writeFile_cb, s)
            cons = FileConsumer(f)
            response.deliverBody(FileDownload(finished, cons))
            print "Downloading file..."
            return finished

        fileId = s[2]
        args = ("getfile", str(self.ccid), str(fileId))
        salt = self.processCookie("/files")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET', 'http://localhost:8000/files/?method=getfile&ccid=' +
            self.ccid + '&fileid=' + str(fileId), headers, body)
        f = open(fileId, "w")
        d.addCallback(handleGetFile_cb, f)
        return NOT_DONE_YET

    # for get shared
    def handleGetShared(self, s):
        def handleGetShared_cb(response, f):
            finished = Deferred()
            finished.addCallback(self.writeFile_cb, s)
            cons = FileConsumer(f)
            response.deliverBody(FileDownload(finished, cons))
            print "Downloading file..."
            return finished

        fileId = s[2]
        args = ("getshared", str(self.ccid), str(fileId))
        salt = self.processCookie("/shares")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'GET', 'http://localhost:8000/shares/?method=getshared&ccid=' +
            self.ccid + '&fileid=' + fileId, headers, body)
        f = open(fileId, "w")
        d.addCallback(handleGetShared_cb, f)
        return NOT_DONE_YET

# Put Operations
# printPutReply_cb: prints put and update responses

    def printPutReply_cb(self, response):
        print "Done."

        defer = Deferred()
        response.deliverBody(DataPrinter(defer, "getmdata"))
        return NOT_DONE_YET

    # handlePutFile: handles file upload
    def handlePutFile(self, line):
        print "Encrypting file..."
        s = line.split()
        file = open(s[2], 'r')
        enc_file = open("enc_fileout", 'w')
        crd = self.client_id.encryptFileSym(file, enc_file)

        args = ("putfile", str(self.ccid), os.path.basename(s[2]))
        salt = self.processCookie("/files")

        dataq = []
        dataq.append(self.client_id.genHashArgs(args, salt))
        dataq.append(self.client_id.encryptData(crd[0],
                                                self.client_id.pub_key))
        dataq.append(self.client_id.encryptData(crd[1]))
        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        #print crd[1]
        # print "debugging:key, iv putfile"
        # print dataq[1]
        # print len(dataq[1])
        # print dataq[2]
        # print len(dataq[2])
        print "Uploading file..."
        enc_file = open("enc_fileout", 'r')
        body = _FileProducer(enc_file, dataq)
        headers = http_headers.Headers()
        d = agent.request(
            'PUT', 'http://localhost:8000/files/?method=putfile&ccid=' +
            self.ccid + "&name=" + os.path.basename(s[2]), headers, body)
        d.addCallback(self.printPutReply_cb)

        return NOT_DONE_YET

# Update Operations
#
#handles update commands

    def handleUpdate(self, s):
        def encryptFile_cb(data):  #TODO: Some error checking here.
            def updateFile_cb(iv):
                #data = (key,)
                print "Updating file..."

                args = ("updatefile", str(self.ccid), os.path.basename(s[3]),
                        s[2])
                salt = self.processCookie("/files")

                dataq = []
                dataq.append(self.client_id.genHashArgs(args, salt))
                dataq.append(iv)
                # print "debugging:ticket, iv updatefile"
                # print dataq[0]
                # print dataq[1]
                # print len(dataq[1])
                agent = CookieAgent(Agent(reactor), self.cookie_jar)
                print "Uploading file..."
                enc_file = open("enc_fileout", 'r')
                body = _FileProducer(enc_file, dataq)
                headers = http_headers.Headers()
                d = agent.request(
                    'POST',
                    'http://localhost:8000/files/?method=updatefile&ccid=' +
                    self.ccid + "&name=" + os.path.basename(s[3]) +
                    "&fileid=" + s[2], headers, body)
                d.addCallback(self.printPutReply_cb)

                return NOT_DONE_YET

            def updateShared_cb(iv):
                print "Updating file..."

                args = ("updateshared", str(self.ccid), os.path.basename(s[3]),
                        s[2])
                salt = self.processCookie("/shares")

                dataq = []
                dataq.append(self.client_id.genHashArgs(args, salt))
                dataq.append(iv)
                # print "debugging:ticket, iv updatefile"
                # print dataq[0]
                # print dataq[1]
                # print len(dataq[1])
                print "Uploading file..."
                agent = CookieAgent(Agent(reactor), self.cookie_jar)
                enc_file = open("enc_fileout", 'r')
                body = _FileProducer(enc_file, dataq)
                headers = http_headers.Headers()
                d = agent.request(
                    'POST',
                    'http://localhost:8000/shares/?method=updateshared&ccid=' +
                    self.ccid + "&name=" + os.path.basename(s[3]) +
                    "&fileid=" + s[2], headers, body)
                d.addCallback(self.printPutReply_cb)

                return NOT_DONE_YET

            if isinstance(data, basestring):
                print data
                return

            print "Encrypting file..."
            #print data["data"]["SymKey"]
            enc_key = data["data"]["SymKey"]
            key = self.client_id.decryptData(enc_key, self.client_id.priv_key)
            #print len(key)
            file = open(s[3], 'r')
            enc_file = open("enc_fileout", 'w')
            crd = self.client_id.encryptFileSym(file, enc_file, key=key)
            new_iv = self.client_id.encryptData(crd[1])
            if s[1] == "shared":
                return updateShared_cb(new_iv)
            return updateFile_cb(new_iv)

        hsmd_data = (encryptFile_cb, s[2])
        if s[1] == "file":
            return self.handleGetFileMData(hsmd_data)
        return self.handleGetShareMData(hsmd_data)

    def handleUpdateSharePerm(self, s):
        args = ("updateshareperm", str(self.ccid), s[3], s[2], s[4])
        salt = self.processCookie("/shares")
        body = FileBodyProducer(
            StringIO(self.client_id.genHashArgs(args, salt)))

        agent = CookieAgent(Agent(reactor), self.cookie_jar)
        headers = http_headers.Headers()
        d = agent.request(
            'POST',
            'http://localhost:8000/shares/?method=updateshareperm&ccid=' +
            self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2] + "&writeable=" +
            s[4], headers, body)
        d.addCallback(self.printPutReply_cb)

        return NOT_DONE_YET

#Delete Operaions
#
# handleDelete: handles delete commands

    def handleDelete(self, line):
        def printDeleteReply_cb(data):
            if not data:
                print "Done."
            else:
                print "Done."

        def deleteFile_cb():
            args = ("delete", str(self.ccid), s[2])
            salt = self.processCookie("/files")
            body = FileBodyProducer(
                StringIO(self.client_id.genHashArgs(args, salt)))

            agent = CookieAgent(Agent(reactor), self.cookie_jar)
            headers = http_headers.Headers()
            d = agent.request(
                'DELETE', 'http://localhost:8000/files/?method=delete&ccid=' +
                self.ccid + "&fileid=" + s[2], headers, body)

            d.addCallback(printDeleteReply_cb)

        def deleteShare_cb():
            args = ("delete", str(self.ccid), s[2], s[3])
            salt = self.processCookie("/shares")
            body = FileBodyProducer(
                StringIO(self.client_id.genHashArgs(args, salt)))

            agent = CookieAgent(Agent(reactor), self.cookie_jar)
            headers = http_headers.Headers()
            d = agent.request(
                'DELETE', 'http://localhost:8000/shares/?method=delete&ccid=' +
                self.ccid + "&fileid=" + s[2] + "&rccid=" + s[3], headers,
                body)

            d.addCallback(printDeleteReply_cb)

        s = line.split()
        if len(s) == 4:
            return deleteShare_cb()
        if len(s) == 3:
            return deleteFile_cb()

        print "Error: invalid arguments!\n"
        print "Usage: delete <file|share> <fileid> <None|rccid>"
        return

# Share Operation
#

    def handleShare(self, line):
        def getFKey_cb(data):
            enc_key = data["data"]["SymKey"]

            def getDstKey_cb(data):
                dstkey = data["data"]["PubKey"]
                print "pubkey" + dstkey

                def shareFile_cb():
                    args = ("delete", str(self.ccid), s[3], s[2])
                    salt = self.processCookie("/shares")

                    dataq = []
                    dataq.append(self.client_id.genHashArgs(args, salt))
                    dataq.append(enc_sym_key)
                    print "Uploading symkey..."

                    agent = CookieAgent(Agent(reactor), self.cookie_jar)
                    body = _FileProducer(StringIO(""), dataq)
                    headers = http_headers.Headers()
                    d = agent.request(
                        'PUT',
                        'http://localhost:8000/shares/?method=sharefile&ccid='
                        + self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2],
                        headers, body)
                    d.addCallback(self.printPutReply_cb)

                    return d

                #enc_key = data["data"]["SymKey"]
                sym_key = self.client_id.decryptData(enc_key,
                                                     self.client_id.priv_key)
                dstkey = RSA.importKey(dstkey)
                enc_sym_key = self.client_id.encryptData(sym_key, dstkey)
                return shareFile_cb()

            hfmd_data = (getDstKey_cb, s[3].lower())
            return self.handleGetMData(hfmd_data)

        s = line.split()
        if len(s) == 4:
            hmd_data = (getFKey_cb, s[2].lower())
            return self.handleGetFileMData(hmd_data)

        else:
            if s[1].lower() != "file":
                print "Error: invalid arguments!\n"
                print "Usage: share file <fileid> <recipient's ccid>"
                return