def _decode(self):
        # FIXME confusing definition and use cases. Now used only in doctest
        r"""
        Returns a printable and readable dict representing the current instance.

          - the byte arrays are decoded, errors are handled
              - with backslash replacement
              - and a warning is issued with full path of problematique entry
          - the datetime is converted to its string representation

        Check the warning messages triggered below: they should include full path

        >>> import datetime
        >>> d = DirBlock(name=b"/some/messy\xe9ename/in/path",
        ...              dt=datetime.datetime(2013, 8, 16, 17, 37, 18, 885441),
        ...              contents=[(False, b'messy\xe9efilename.jpg'),
        ...                        (False, b'110831202504820_47_000_apx_470_.jpg')])
        >>> d._decode() ==  {
        ...        'dt': '2013-08-16 17:37:18.885441',
        ...        'name': '/some/messy\\xe9ename/in/path',
        ...        'contents': [(False, 'messy\\xe9efilename.jpg'), (False, '110831202504820_47_000_apx_470_.jpg')]}
        True

        :return: dict
        """
        dirname = safe_decode(self.name)
        return dict(name=dirname,
                    dt=str(self.dt),
                    contents=[(flag, safe_decode(f, dirname + "/"))
                              for flag, f in self.contents])
Exemple #2
0
def print_mdb_settings(mdb):
    """
    Formats and prints details about an open mlocate database

    :param mdb: MLocateDB object, properly initialized
    """
    LOGGER.info("mlocate database header: %r", sorted(mdb.header.items()))
    # [('conf_block_size', 544), ('file_format', 0), ('req_visibility', 0),
    # ('root', b'/run/media/mich/MyBook')]
    LOGGER.info("mlocate database configuration: %r", sorted(mdb.conf.items()))

    conf = [(binutils.safe_decode(k), [binutils.safe_decode(e) for e in v])
            for k, v in sorted(mdb.conf.items())]

    print("""mlocate database details
    ====================================
    Root: {0}
    Requires visibility: {1}
    File format: {2}

    Configuration:
    """.format(
        binutils.safe_decode(mdb.header['root']),
        mdb.header['req_visibility'],
        mdb.header['file_format'],
    ))
    for k, value in conf:
        print("    - {0} = {1}".format(k, value))
    print("     ====================================\n\n")
Exemple #3
0
        def _level(prefix, depth,  st):
            if st == []: return
            if max_depth and depth >= max_depth:
                logging.info("Maximum depth reached, omitting details")
                return

            #pfx = prefix + '├── '
            for subtree in st[:-1]:
                print(prefix + '├── '  + safe_decode(subtree[0]))
                _level(prefix + "│   ", depth + 1, subtree[1])

            print(prefix + '└── ' + safe_decode(st[-1][0]))
            _level(prefix + '    ' , depth + 1, st[-1][1])
Exemple #4
0
def print_dir_list(d, r):
    """
    Prints a section showing matches for a single directory
    >>> from datetime import datetime
    >>> dt = datetime(2017, 7, 20, 13, 22, 43, 817771)
    >>> print_dir_list(mlocate.DirBlock(b'/some/directory/test', dt, []), [(True, b'some_dir'), ( False,b'some_file')])
    * 2017-07-20 13:22:43.817771 /some/directory/test
        - some_dir/
        - some_file

    :param d: dict representing a directory
    :param r: list of matched entries
    """
    print("* {0} {1}".format(d.dt, binutils.safe_decode(d.name)))
    for f in r:
        print("    - {0}{1}".format(binutils.safe_decode(f[1]), ["",
                                                                 "/"][f[0]]))
Exemple #5
0
def print_dir_count(d, r):
    """
    Prints a single line describing the given directory, with match count

    >>> from datetime import datetime
    >>> dt = datetime(2017, 7, 20, 13, 22, 43, 817771)
    >>> print_dir_count(mlocate.DirBlock(b'/some/directory/test', dt, []), [(True, b'some_dir'), ( False,b'some_file')])
    [2017-07-20 13:22:43.817771] 2 matches in /some/directory/test

    :param d: dict representing a directory
    :param r: the matches count
    """
    print("[{0}] {2} matches in {1}".format(d.dt, binutils.safe_decode(d.name),
                                            len(r)))
Exemple #6
0
def print_dir_test(d, r=True):
    """
    Prints a single line describing the given directory

    >>> from datetime import datetime
    >>> dt = datetime(2017, 7, 20, 13, 22, 43, 817771)
    >>> print_dir_test(mlocate.DirBlock(b'/some/directory/test', dt, []), True)
    2017-07-20 13:22:43.817771 /some/directory/test

    :param d: dict representing a directory
    :param r: Unused. Present to match action signature
    """
    assert r
    print("{0} {1}".format(d.dt, binutils.safe_decode(d.name)))
Exemple #7
0
def print_dir_json(d, r):
    """
    Prints a section showing matches for a single directory
    >>> from datetime import datetime
    >>> dt = datetime(2017, 7, 20, 13, 22, 43, 817771)
    >>> print_dir_json(mlocate.DirBlock(b'/some/directory/test', dt, []),
    ...                [(True, b'some_dir'), ( False,b'some_file')])
    ... # doctest: +NORMALIZE_WHITESPACE
    {
      "dt": "2017-07-20 13:22:43.817771",
      "matches": [
        [ true, "some_dir" ],
        [ false, "some_file" ]
      ],
      "name": "/some/directory/test"
    }

    :param d: DirBlock representing a directory
    :param r: list of matched entries
    """
    data = dict(name=binutils.safe_decode(d.name),
                dt=str(d.dt),
                matches=[(flag, binutils.safe_decode(f)) for flag, f in r])
    print(json.dumps(data, indent=2, sort_keys=True))
Exemple #8
0
    def report(self):
        """
        Instead of reporting each single duplicated directory,
        try to identify highest level of each set.

        """
        # Build reversed tree (parents)
        for ck, contents in self.tree.items():
            for d in contents:
                self.rtree.add_to(d, ck)

        # Select duplicated checksums
        dups = [
            ck for ck, l in self.by_ck.items()
            if (ck != DirHashStack.EMPTY_DIR_CK) and (len(l) > 1)
        ]
        if not dups:
            print("No duplicate found")
            return None

        print("Reporting Duplicates ")
        for ck in dups:
            # Check if all parents are as well duplicates (subdup)
            parents = self.rtree[ck]
            top = [p for p in parents if p not in dups]
            if not top:
                LOGGER.info("Skipping subdup: %s", ck)
                #typ = 'sub'
                continue
            if len(top) < len(parents):
                typ = 'mix'
                LOGGER.info("Mixed dup %s", ck)
            else:
                typ = 'top'

            dirs = self.by_ck[ck]
            print("* {0} : {1} potential duplicates ({2})".format(
                ck, len(dirs), typ))
            for d in sorted(dirs):
                print("   -", safe_decode(d))
Exemple #9
0
 def _level(d, st):
     prefix = indent * d + bullet
     for subtree in st:
         print(prefix + safe_decode(subtree[0]))
         _level(d+1, subtree[1])