Beispiel #1
0
    def __init__(self,
                 root,
                 cache_file_name,
                 mode=None,
                 content_filter_stack_provider=None):
        """Create a hash cache in base dir, and set the file mode to mode.

        :param content_filter_stack_provider: a function that takes a
            path (relative to the top of the tree) and a file-id as
            parameters and returns a stack of ContentFilters.
            If None, no content filtering is performed.
        """
        self.root = osutils.safe_unicode(root)
        self.root_utf8 = self.root.encode(
            'utf8')  # where is the filesystem encoding ?
        self.hit_count = 0
        self.miss_count = 0
        self.stat_count = 0
        self.danger_count = 0
        self.removed_count = 0
        self.update_count = 0
        self._cache = {}
        self._mode = mode
        self._cache_file_name = osutils.safe_unicode(cache_file_name)
        self._filter_provider = content_filter_stack_provider
Beispiel #2
0
 def __init__(self, root, cache_file_name, mode=None):
     """Create a hash cache in base dir, and set the file mode to mode."""
     self.root = safe_unicode(root)
     self.root_utf8 = self.root.encode('utf8') # where is the filesystem encoding ?
     self.hit_count = 0
     self.miss_count = 0
     self.stat_count = 0
     self.danger_count = 0
     self.removed_count = 0
     self.update_count = 0
     self._cache = {}
     self._mode = mode
     self._cache_file_name = safe_unicode(cache_file_name)
Beispiel #3
0
 def __init__(self, root, cache_file_name, mode=None):
     """Create a hash cache in base dir, and set the file mode to mode."""
     self.root = safe_unicode(root)
     self.root_utf8 = self.root.encode(
         'utf8')  # where is the filesystem encoding ?
     self.hit_count = 0
     self.miss_count = 0
     self.stat_count = 0
     self.danger_count = 0
     self.removed_count = 0
     self.update_count = 0
     self._cache = {}
     self._mode = mode
     self._cache_file_name = safe_unicode(cache_file_name)
Beispiel #4
0
def save_commit_messages(local, master, old_revno, old_revid,
                         new_revno, new_revid):
    b = local
    if b is None:
        b = master
    mgr = SavedCommitMessagesManager(None, b)
    graph = b.repository.get_graph()
    revid_iterator = graph.iter_lefthand_ancestry(old_revid)
    cur_revno = old_revno
    new_revision_id = old_revid
    graph = b.repository.get_graph()
    for rev_id in revid_iterator:
        if cur_revno == new_revno:
            break
        cur_revno -= 1
        rev = b.repository.get_revision(rev_id)
        file_info = rev.properties.get('file-info', None)
        if file_info is None:
            file_info = {}
        else:
            file_info = bencode.bdecode(file_info.encode('UTF-8'))
        global_message = osutils.safe_unicode(rev.message)
        # Concatenate comment of the uncommitted revision
        mgr.insert(global_message, file_info)

        parents = graph.get_parent_map([rev_id]).get(rev_id, None)
        if not parents:
            continue
    mgr.save(None, b)
    def __init__(self, from_address, to_address, subject, body=None):
        """Create an email message.

        :param from_address: The origin address, to be put on the From header.
        :param to_address: The destination address of the message, to be put in
            the To header. Can also be a list of addresses.
        :param subject: The subject of the message.
        :param body: If given, the body of the message.

        All four parameters can be unicode strings or byte strings, but for the
        addresses and subject byte strings must be encoded in UTF-8. For the
        body any byte string will be accepted; if it's not ASCII or UTF-8,
        it'll be sent with charset=8-bit.
        """
        self._headers = {}
        self._body = body
        self._parts = []

        if isinstance(to_address, basestring):
            to_address = [ to_address ]

        to_addresses = []

        for addr in to_address:
            to_addresses.append(self.address_to_encoded_header(addr))

        self._headers['To'] = ', '.join(to_addresses)
        self._headers['From'] = self.address_to_encoded_header(from_address)
        self._headers['Subject'] = Header.Header(safe_unicode(subject))
        self._headers['User-Agent'] = 'Bazaar (%s)' % _bzrlib_version
    def __init__(self, root, cache_file_name, mode=None,
            content_filter_stack_provider=None):
        """Create a hash cache in base dir, and set the file mode to mode.

        :param content_filter_stack_provider: a function that takes a
            path (relative to the top of the tree) and a file-id as
            parameters and returns a stack of ContentFilters.
            If None, no content filtering is performed.
        """
        self.root = osutils.safe_unicode(root)
        self.root_utf8 = self.root.encode('utf8') # where is the filesystem encoding ?
        self.hit_count = 0
        self.miss_count = 0
        self.stat_count = 0
        self.danger_count = 0
        self.removed_count = 0
        self.update_count = 0
        self._cache = {}
        self._mode = mode
        self._cache_file_name = osutils.safe_unicode(cache_file_name)
        self._filter_provider = content_filter_stack_provider
    def address_to_encoded_header(address):
        """RFC2047-encode an address if necessary.

        :param address: An unicode string, or UTF-8 byte string.
        :return: A possibly RFC2047-encoded string.
        """
        # Can't call Header over all the address, because that encodes both the
        # name and the email address, which is not permitted by RFCs.
        user, email = Utils.parseaddr(address)
        if not user:
            return email
        else:
            return Utils.formataddr((str(Header.Header(safe_unicode(user))),
                email))
    def get_sha1(self, path, stat_value=None):
        """Return the sha1 of a file.
        """
        if path.__class__ is str:
            abspath = osutils.pathjoin(self.root_utf8, path)
        else:
            abspath = osutils.pathjoin(self.root, path)
        self.stat_count += 1
        file_fp = self._fingerprint(abspath, stat_value)

        if not file_fp:
            # not a regular file or not existing
            if path in self._cache:
                self.removed_count += 1
                self.needs_write = True
                del self._cache[path]
            return None

        if path in self._cache:
            cache_sha1, cache_fp = self._cache[path]
        else:
            cache_sha1, cache_fp = None, None

        if cache_fp == file_fp:
            ## mutter("hashcache hit for %s %r -> %s", path, file_fp, cache_sha1)
            ## mutter("now = %s", time.time())
            self.hit_count += 1
            return cache_sha1

        self.miss_count += 1

        mode = file_fp[FP_MODE_COLUMN]
        if stat.S_ISREG(mode):
            if self._filter_provider is None:
                filters = []
            else:
                filters = self._filter_provider(path=path, file_id=None)
            digest = self._really_sha1_file(abspath, filters)
        elif stat.S_ISLNK(mode):
            target = osutils.readlink(osutils.safe_unicode(abspath))
            digest = osutils.sha_string(target.encode('UTF-8'))
        else:
            raise errors.BzrError("file %r: unknown file stat mode: %o"
                                  % (abspath, mode))

        # window of 3 seconds to allow for 2s resolution on windows,
        # unsynchronized file servers, etc.
        cutoff = self._cutoff_time()
        if file_fp[FP_MTIME_COLUMN] >= cutoff \
                or file_fp[FP_CTIME_COLUMN] >= cutoff:
            # changed too recently; can't be cached.  we can
            # return the result and it could possibly be cached
            # next time.
            #
            # the point is that we only want to cache when we are sure that any
            # subsequent modifications of the file can be detected.  If a
            # modification neither changes the inode, the device, the size, nor
            # the mode, then we can only distinguish it by time; therefore we
            # need to let sufficient time elapse before we may cache this entry
            # again.  If we didn't do this, then, for example, a very quick 1
            # byte replacement in the file might go undetected.
            ## mutter('%r modified too recently; not caching', path)
            self.danger_count += 1
            if cache_fp:
                self.removed_count += 1
                self.needs_write = True
                del self._cache[path]
        else:
            ## mutter('%r added to cache: now=%f, mtime=%d, ctime=%d',
            ##        path, time.time(), file_fp[FP_MTIME_COLUMN],
            ##        file_fp[FP_CTIME_COLUMN])
            self.update_count += 1
            self.needs_write = True
            self._cache[path] = (digest, file_fp)
        return digest
Beispiel #9
0
    def get_sha1(self, path, stat_value=None):
        """Return the sha1 of a file.
        """
        if path.__class__ is str:
            abspath = osutils.pathjoin(self.root_utf8, path)
        else:
            abspath = osutils.pathjoin(self.root, path)
        self.stat_count += 1
        file_fp = self._fingerprint(abspath, stat_value)

        if not file_fp:
            # not a regular file or not existing
            if path in self._cache:
                self.removed_count += 1
                self.needs_write = True
                del self._cache[path]
            return None

        if path in self._cache:
            cache_sha1, cache_fp = self._cache[path]
        else:
            cache_sha1, cache_fp = None, None

        if cache_fp == file_fp:
            ## mutter("hashcache hit for %s %r -> %s", path, file_fp, cache_sha1)
            ## mutter("now = %s", time.time())
            self.hit_count += 1
            return cache_sha1

        self.miss_count += 1

        mode = file_fp[FP_MODE_COLUMN]
        if stat.S_ISREG(mode):
            if self._filter_provider is None:
                filters = []
            else:
                filters = self._filter_provider(path=path, file_id=None)
            digest = self._really_sha1_file(abspath, filters)
        elif stat.S_ISLNK(mode):
            target = osutils.readlink(osutils.safe_unicode(abspath))
            digest = osutils.sha_string(target.encode('UTF-8'))
        else:
            raise errors.BzrError("file %r: unknown file stat mode: %o" %
                                  (abspath, mode))

        # window of 3 seconds to allow for 2s resolution on windows,
        # unsynchronized file servers, etc.
        cutoff = self._cutoff_time()
        if file_fp[FP_MTIME_COLUMN] >= cutoff \
                or file_fp[FP_CTIME_COLUMN] >= cutoff:
            # changed too recently; can't be cached.  we can
            # return the result and it could possibly be cached
            # next time.
            #
            # the point is that we only want to cache when we are sure that any
            # subsequent modifications of the file can be detected.  If a
            # modification neither changes the inode, the device, the size, nor
            # the mode, then we can only distinguish it by time; therefore we
            # need to let sufficient time elapse before we may cache this entry
            # again.  If we didn't do this, then, for example, a very quick 1
            # byte replacement in the file might go undetected.
            ## mutter('%r modified too recently; not caching', path)
            self.danger_count += 1
            if cache_fp:
                self.removed_count += 1
                self.needs_write = True
                del self._cache[path]
        else:
            ## mutter('%r added to cache: now=%f, mtime=%d, ctime=%d',
            ##        path, time.time(), file_fp[FP_MTIME_COLUMN],
            ##        file_fp[FP_CTIME_COLUMN])
            self.update_count += 1
            self.needs_write = True
            self._cache[path] = (digest, file_fp)
        return digest
 def _escape(self, file_or_path):
     if not isinstance(file_or_path, basestring):
         file_or_path = '/'.join(file_or_path)
     if file_or_path == '':
         return u''
     return urlutils.escape(osutils.safe_unicode(file_or_path))
Beispiel #11
0
 def ftp2fs(self, ftppath):
     p = osutils.safe_unicode(ftppath)
     return ftpserver.AbstractedFS.ftp2fs(self, p)
Beispiel #12
0
 def fs2ftp(self, fspath):
     p = ftpserver.AbstractedFS.fs2ftp(self, osutils.safe_unicode(fspath))
     return osutils.safe_utf8(p)
Beispiel #13
0
 def ftp2fs(self, ftppath):
     p = osutils.safe_unicode(ftppath)
     return ftpserver.AbstractedFS.ftp2fs(self, p)
Beispiel #14
0
 def fs2ftp(self, fspath):
     p = ftpserver.AbstractedFS.fs2ftp(self, osutils.safe_unicode(fspath))
     return osutils.safe_utf8(p)
Beispiel #15
0
 def _escape(self, file_or_path):
     if not isinstance(file_or_path, basestring):
         file_or_path = '/'.join(file_or_path)
     if file_or_path == '':
         return u''
     return urlutils.escape(osutils.safe_unicode(file_or_path))