Ejemplo n.º 1
0
    def _cookies_for_domain(self, domain, request, unverifiable):
        debug("Checking %s for cookies to return" % domain)
        if not self.policy.domain_return_ok(domain, request, unverifiable):
            return []

        if self.delayload:
            self._delayload_domain(domain)

        return CookieJar._cookies_for_domain(self, domain, request,
                                             unverifiable)
Ejemplo n.º 2
0
    def _cookies_for_domain(self, domain, request, unverifiable):
        debug("Checking %s for cookies to return" % domain)
        if not self.policy.domain_return_ok(domain, request, unverifiable):
            return []

        if self.delayload:
            self._delayload_domain(domain)

        return CookieJar._cookies_for_domain(
            self, domain, request, unverifiable)
Ejemplo n.º 3
0
 def _delayload_domain(self, domain):
     # if necessary, lazily load cookies for this domain
     delayload_info = self._delayload_domains.get(domain)
     if delayload_info is not None:
         cookie_file, ignore_discard, ignore_expires = delayload_info
         try:
             self.load_cookie_data(cookie_file, ignore_discard,
                                   ignore_expires)
         except IOError:
             debug("error reading cookie file, skipping: %s" % cookie_file)
         else:
             del self._delayload_domains[domain]
Ejemplo n.º 4
0
 def _delayload_domain(self, domain):
     # if necessary, lazily load cookies for this domain
     delayload_info = self._delayload_domains.get(domain)
     if delayload_info is not None:
         cookie_file, ignore_discard, ignore_expires = delayload_info
         try:
             self.load_cookie_data(cookie_file,
                                   ignore_discard, ignore_expires)
         except IOError:
             debug("error reading cookie file, skipping: %s" % cookie_file)
         else:
             del self._delayload_domains[domain]
Ejemplo n.º 5
0
    def save(self, filename=None, ignore_discard=False, ignore_expires=False):
        if filename is None:
            if self.filename is not None: filename = self.filename
            else: raise ValueError(MISSING_FILENAME_TEXT)

        f = open(filename, "w")
        try:
            f.write(self.header)
            now = time.time()
            debug("Saving Netscape cookies.txt file")
            for cookie in self:
                if not ignore_discard and cookie.discard:
                    debug("   Not saving %s: marked for discard" % cookie.name)
                    continue
                if not ignore_expires and cookie.is_expired(now):
                    debug("   Not saving %s: expired" % cookie.name)
                    continue
                if cookie.secure: secure = "TRUE"
                else: secure = "FALSE"
                if startswith(cookie.domain, "."): initial_dot = "TRUE"
                else: initial_dot = "FALSE"
                if cookie.expires is not None:
                    expires = str(cookie.expires)
                else:
                    expires = ""
                if cookie.name is not None:
                    name = cookie.name
                else:
                    name = ""
                f.write(
                    string.join([cookie.domain, initial_dot, cookie.path,
                                 secure, expires, name, cookie.value], "\t")+
                    "\n")
        finally:
            f.close()
Ejemplo n.º 6
0
    def _really_load(self, index, filename, ignore_discard, ignore_expires,
                     username):
        now = int(time.time())

        if username is None:
            username = string.lower(os.environ['USERNAME'])

        cookie_dir = os.path.dirname(filename)

        data = index.read(256)
        if len(data) != 256:
            raise IOError("%s file is too short" % filename)

        # Cookies' index.dat file starts with 32 bytes of signature
        # followed by an offset to the first record, stored as a little-
        # endian DWORD.
        sig, size, data = data[:32], data[32:36], data[36:]
        size = struct.unpack("<L", size)[0]

        # check that sig is valid
        if not self.magic_re.match(sig) or size != 0x4000:
            raise IOError("%s ['%s' %s] does not seem to contain cookies" %
                          (str(filename), sig, size))

        # skip to start of first record
        index.seek(size, 0)

        sector = 128  # size of sector in bytes

        while 1:
            data = ""

            # Cookies are usually in two contiguous sectors, so read in two
            # sectors and adjust if not a Cookie.
            to_read = 2 * sector
            d = index.read(to_read)
            if len(d) != to_read:
                break
            data = data + d

            # Each record starts with a 4-byte signature and a count
            # (little-endian DWORD) of sectors for the record.
            sig, size, data = data[:4], data[4:8], data[8:]
            size = struct.unpack("<L", size)[0]

            to_read = (size - 2) * sector

            ##             from urllib import quote
            ##             print "data", quote(data)
            ##             print "sig", quote(sig)
            ##             print "size in sectors", size
            ##             print "size in bytes", size*sector
            ##             print "size in units of 16 bytes", (size*sector) / 16
            ##             print "size to read in bytes", to_read
            ##             print

            if sig != "URL ":
                assert (sig
                        in ("HASH", "LEAK", self.padding, "\x00\x00\x00\x00"),
                        "unrecognized MSIE index.dat record: %s" %
                        binary_to_str(sig))
                if sig == "\x00\x00\x00\x00":
                    # assume we've got all the cookies, and stop
                    break
                if sig == self.padding:
                    continue
                # skip the rest of this record
                assert to_read >= 0
                if size != 2:
                    assert to_read != 0
                    index.seek(to_read, 1)
                continue

            # read in rest of record if necessary
            if size > 2:
                more_data = index.read(to_read)
                if len(more_data) != to_read: break
                data = data + more_data

            cookie_re = ("Cookie\:%s\@([\x21-\xFF]+).*?" % username +
                         "(%s\@[\x21-\xFF]+\.txt)" % username)
            m = re.search(cookie_re, data, re.I)
            if m:
                cookie_file = os.path.join(cookie_dir, m.group(2))
                if not self.delayload:
                    try:
                        self.load_cookie_data(cookie_file, ignore_discard,
                                              ignore_expires)
                    except IOError:
                        debug("error reading cookie file, skipping: %s" %
                              cookie_file)
                else:
                    domain = m.group(1)
                    i = domain.find("/")
                    if i != -1:
                        domain = domain[:i]

                    self._delayload_domains[domain] = (cookie_file,
                                                       ignore_discard,
                                                       ignore_expires)
Ejemplo n.º 7
0
    def _really_load(self, index, filename, ignore_discard, ignore_expires,
                     username):
        now = int(time.time())

        if username is None:
            username = string.lower(os.environ['USERNAME'])

        cookie_dir = os.path.dirname(filename)

        data = index.read(256)
        if len(data) != 256:
            raise IOError("%s file is too short" % filename)

        # Cookies' index.dat file starts with 32 bytes of signature
        # followed by an offset to the first record, stored as a little-
        # endian DWORD.
        sig, size, data = data[:32], data[32:36], data[36:]
        size = struct.unpack("<L", size)[0]

        # check that sig is valid
        if not self.magic_re.match(sig) or size != 0x4000:
            raise IOError("%s ['%s' %s] does not seem to contain cookies" %
                          (str(filename), sig, size))

        # skip to start of first record
        index.seek(size, 0)

        sector = 128  # size of sector in bytes

        while 1:
            data = ""

            # Cookies are usually in two contiguous sectors, so read in two
            # sectors and adjust if not a Cookie.
            to_read = 2 * sector
            d = index.read(to_read)
            if len(d) != to_read:
                break
            data = data + d

            # Each record starts with a 4-byte signature and a count
            # (little-endian DWORD) of sectors for the record.
            sig, size, data = data[:4], data[4:8], data[8:]
            size = struct.unpack("<L", size)[0]

            to_read = (size - 2) * sector

##             from urllib import quote
##             print "data", quote(data)
##             print "sig", quote(sig)
##             print "size in sectors", size
##             print "size in bytes", size*sector
##             print "size in units of 16 bytes", (size*sector) / 16
##             print "size to read in bytes", to_read
##             print

            if sig != "URL ":
                assert (sig in ("HASH", "LEAK",
                                self.padding, "\x00\x00\x00\x00"),
                        "unrecognized MSIE index.dat record: %s" %
                        binary_to_str(sig))
                if sig == "\x00\x00\x00\x00":
                    # assume we've got all the cookies, and stop
                    break
                if sig == self.padding:
                    continue
                # skip the rest of this record
                assert to_read >= 0
                if size != 2:
                    assert to_read != 0
                    index.seek(to_read, 1)
                continue

            # read in rest of record if necessary
            if size > 2:
                more_data = index.read(to_read)
                if len(more_data) != to_read: break
                data = data + more_data

            cookie_re = ("Cookie\:%s\@([\x21-\xFF]+).*?" % username +
                         "(%s\@[\x21-\xFF]+\.txt)" % username)
            m = re.search(cookie_re, data, re.I)
            if m:
                cookie_file = os.path.join(cookie_dir, m.group(2))
                if not self.delayload:
                    try:
                        self.load_cookie_data(cookie_file,
                                              ignore_discard, ignore_expires)
                    except IOError:
                        debug("error reading cookie file, skipping: %s" %
                              cookie_file)
                else:
                    domain = m.group(1)
                    i = domain.find("/")
                    if i != -1:
                        domain = domain[:i]

                    self._delayload_domains[domain] = (
                        cookie_file, ignore_discard, ignore_expires)