예제 #1
0
def fmt_sz(intval):
    """ Format a byte sized value.
    """
    try:
        return fmt.human_size(intval)
    except (ValueError, TypeError):
        return "N/A".rjust(len(fmt.human_size(0)))
예제 #2
0
def fmt_sz(intval):
    """ Format a byte sized value.
    """
    try:
        return fmt.human_size(intval)
    except (ValueError, TypeError):
        return "N/A".rjust(len(fmt.human_size(0)))
예제 #3
0
 def __str__(self):
     """ Return statistics for this call.
     """
     return "out %s, in %s, took %.3fms/%.3fms" % (
         fmt.human_size(self._outbound).strip(),
         fmt.human_size(self._inbound).strip(),
         self._net_latency * 1000.0,
         self._latency * 1000.0,
     )
예제 #4
0
파일: xmlrpc.py 프로젝트: armpit/pyrocore
 def __str__(self):
     """ Return statistics for this call.
     """
     return "out %s, in %s, took %.3fms/%.3fms" % (
         fmt.human_size(self._outbound).strip(),
         fmt.human_size(self._inbound).strip(),
         self._net_latency * 1000.0,
         self._latency * 1000.0,
     )
예제 #5
0
 def __str__(self):
     """ Return statistics.
     """
     return "%d req, out %s [%s max], in %s [%s max], %.3fms/%.3fms avg latency" % (
         self._requests,
         fmt.human_size(self._outbound).strip(),
         fmt.human_size(self._outbound_max).strip(),
         fmt.human_size(self._inbound).strip(),
         fmt.human_size(self._inbound_max).strip(),
         self._net_latency * 1000.0 / self._requests,
         self._latency * 1000.0 / self._requests,
     )
예제 #6
0
파일: xmlrpc.py 프로젝트: armpit/pyrocore
 def __str__(self):
     """ Return statistics.
     """
     return "%d req, out %s [%s max], in %s [%s max], %.3fms/%.3fms avg latency" % (
         self._requests,
         fmt.human_size(self._outbound).strip(),
         fmt.human_size(self._outbound_max).strip(),
         fmt.human_size(self._inbound).strip(),
         fmt.human_size(self._inbound_max).strip(),
         self._net_latency * 1000.0 / self._requests,
         self._latency * 1000.0 / self._requests,
     )
예제 #7
0
파일: view.py 프로젝트: 01100001/pyroscope
def make_tooltip(item):
    """ Make a tooltip from the most important torrent details.
    """
    lines = [
        ##"HASH: %s" % item.hash,
        "RATIO: %.3f" % (item.ratio / 1000.0,),
        "RATE UP/DN: %s / %s" % (fmt.human_size(item.up_rate), fmt.human_size(item.down_rate)),
        "XFER UP/DN: %s / %s" % (fmt.human_size(item.up_total), fmt.human_size(item.down_total)),
        "STATE: %s" % " ".join(_make_state(item)),
        # last state change?
    ]

    return u"\n\u00A0|\u00A0".join(lines)
예제 #8
0
def make_tooltip(item):
    """ Make a tooltip from the most important torrent details.
    """
    lines = [
        ##"HASH: %s" % item.hash,
        "RATIO: %.3f" % (item.ratio / 1000.0, ),
        "RATE UP/DN: %s / %s" %
        (fmt.human_size(item.up_rate), fmt.human_size(item.down_rate)),
        "XFER UP/DN: %s / %s" %
        (fmt.human_size(item.up_total), fmt.human_size(item.down_total)),
        "STATE: %s" % " ".join(_make_state(item)),
        # last state change?
    ]

    return u"\n\u00A0|\u00A0".join(lines)
예제 #9
0
def _fmt_files(filelist):
    """ Produce a file listing.
    """
    depth = max(i.path.count('/') for i in filelist)
    pad = [u'\uFFFE'] * depth

    base_indent = ' ' * 38
    indent = 0
    result = []
    prev_path = pad
    sorted_files = sorted(
        (i.path.split('/')[:-1] + pad, i.path.rsplit('/', 1)[-1], i)
        for i in filelist)

    for path, name, fileinfo in sorted_files:
        path = path[:depth]
        if path != prev_path:
            common = min([depth] + [
                idx for idx, (dirname,
                              prev_name) in enumerate(zip(path, prev_path))
                if dirname != prev_name
            ])
            #result.append("!!%r %r" % (indent, common))
            #result.append("!!%r" % (prev_path,))
            #result.append("!!%r" % (path,))

            while indent > common:
                indent -= 1
                result.append("%s%s/" % (base_indent, ' ' * indent))

            for dirname in path[common:]:
                if dirname == u'\uFFFE':
                    break
                result.append("%s%s\\ %s" %
                              (base_indent, ' ' * indent, dirname))
                indent += 1

        ##result.append("!!%r %r" % (path, name))
        result.append("  %s %s %s %s| %s" % (
            {
                0: "off ",
                1: "    ",
                2: "high"
            }.get(fileinfo.prio, "????"),
            fmt.iso_datetime(fileinfo.mtime),
            fmt.human_size(fileinfo.size),
            ' ' * indent,
            name,
        ))

        prev_path = path

    while indent > 0:
        indent -= 1
        result.append("%s%s/" % (base_indent, ' ' * indent))
    result.append("%s= %d file(s)" % (base_indent, len(filelist)))

    return '\n'.join(result)
예제 #10
0
def bibyte(val):
    """ Format numerical byte size as human size.
    """
    from pyrocore.util.fmt import human_size

    try:
        val = int(val)
    except (TypeError, ValueError):
        return val
    else:    
        return human_size(val)
예제 #11
0
파일: engine.py 프로젝트: oleshiy/pyrocore
def _fmt_files(filelist):
    """ Produce a file listing.
    """
    depth = max(i.path.count("/") for i in filelist)
    pad = [u"\uFFFE"] * depth

    base_indent = " " * 38
    indent = 0
    result = []
    prev_path = pad
    sorted_files = sorted((i.path.split("/")[:-1] + pad, i.path.rsplit("/", 1)[-1], i) for i in filelist)

    for path, name, fileinfo in sorted_files:
        path = path[:depth]
        if path != prev_path:
            common = min(
                [depth] + [idx for idx, (dirname, prev_name) in enumerate(zip(path, prev_path)) if dirname != prev_name]
            )
            # result.append("!!%r %r" % (indent, common))
            # result.append("!!%r" % (prev_path,))
            # result.append("!!%r" % (path,))

            while indent > common:
                indent -= 1
                result.append("%s%s/" % (base_indent, " " * indent))

            for dirname in path[common:]:
                if dirname == u"\uFFFE":
                    break
                result.append("%s%s\\ %s" % (base_indent, " " * indent, dirname))
                indent += 1

        ##result.append("!!%r %r" % (path, name))
        result.append(
            "  %s %s %s %s| %s"
            % (
                {0: "off ", 1: "    ", 2: "high"}.get(fileinfo.prio, "????"),
                fmt.iso_datetime(fileinfo.mtime),
                fmt.human_size(fileinfo.size),
                " " * indent,
                name,
            )
        )

        prev_path = path

    while indent > 0:
        indent -= 1
        result.append("%s%s/" % (base_indent, " " * indent))
    result.append("%s= %d file(s)" % (base_indent, len(filelist)))

    return "\n".join(result)
예제 #12
0
    def listing(self, masked=True):
        """ List torrent info & contents. Returns a list of formatted lines.
        """
        # Assemble data
        metainfo, bad_encodings, bad_fields = sanitize(bencode.bread(
            self.filename),
                                                       diagnostics=True)
        announce = metainfo['announce']
        info = metainfo['info']
        infohash = hashlib.sha1(bencode.bencode(info))

        total_size = data_size(metainfo)
        piece_length = info['piece length']
        piece_number, last_piece_length = divmod(total_size, piece_length)

        # Build result
        result = [
            "NAME %s" % (os.path.basename(fmt.to_unicode(self.filename))),
            "SIZE %s (%i * %s + %s)" % (
                fmt.human_size(total_size).strip(),
                piece_number,
                fmt.human_size(piece_length).strip(),
                fmt.human_size(last_piece_length).strip(),
            ),
            "META %s (pieces %s %.1f%%)" % (
                fmt.human_size(os.path.getsize(self.filename)).strip(),
                fmt.human_size(len(info["pieces"])).strip(),
                100.0 * len(info["pieces"]) / os.path.getsize(self.filename),
            ),
            "HASH %s" % (infohash.hexdigest().upper()),
            "URL  %s" % (mask_keys if masked else str)(announce),
            "PRV  %s" % ("YES (DHT/PEX disabled)"
                         if info.get("private") else "NO (DHT/PEX enabled)"),
            "TIME %s" %
            ("N/A" if "creation date" not in metainfo else time.strftime(
                "%Y-%m-%d %H:%M:%S", time.localtime(
                    metainfo["creation date"]))),
        ]

        for label, key in (("BY  ", "created by"), ("REM ", "comment")):
            if key in metainfo:
                result.append("%s %s" % (label, metainfo.get(key, "N/A")))

        result.extend([
            "",
            "FILE LISTING%s" % ("" if 'length' in info else " [%d file(s)]" %
                                len(info['files']), ),
        ])
        if 'length' in info:
            # Single file
            result.append("%-69s%9s" % (
                fmt.to_unicode(info['name']),
                fmt.human_size(total_size),
            ))
        else:
            # Directory structure
            result.append("%s/" % fmt.to_unicode(info['name']))
            oldpaths = [None] * 99
            for entry in info['files']:
                # Remove crap that certain PHP software puts in paths
                entry_path = [fmt.to_unicode(i) for i in entry["path"] if i]

                for idx, item in enumerate(entry_path[:-1]):
                    if item != oldpaths[idx]:
                        result.append("%s%s/" % (' ' * (4 * (idx + 1)), item))
                        oldpaths[idx] = item
                result.append("%-69s%9s" % (
                    ' ' * (4 * len(entry_path)) + entry_path[-1],
                    fmt.human_size(entry['length']),
                ))

        if bad_encodings:
            result.extend([
                "",
                "WARNING: Bad encoding(s) {} in these fields: {}".format(
                    ', '.join(sorted(bad_encodings)),
                    ', '.join(sorted(bad_fields))),
                "Use the --raw option to inspect these encoding issues.",
            ])

        return result
예제 #13
0
    def _make_info(self, piece_size, progress, walker, piece_callback=None):
        """ Create info dict.
        """
        # These collect the file descriptions and piece hashes
        file_list = []
        pieces = []

        # Initialize progress state
        hashing_secs = time.time()
        totalsize = -1 if self._fifo else self._calc_size()
        totalhashed = 0

        # Start a new piece
        sha1sum = hashlib.sha1()
        done = 0
        filename = None

        # Hash all files
        for filename in walker:
            # Assemble file info
            filesize = os.path.getsize(filename)
            filepath = filename[len(
                os.path.dirname(self.datapath) if self._fifo else self.datapath
            ):].lstrip(os.sep)
            file_list.append({
                "length":
                filesize,
                "path": [
                    fmt.to_utf8(x) for x in fmt.to_unicode(filepath).replace(
                        os.sep, '/').split('/')
                ],
            })
            self.LOG.debug("Hashing %r, size %d..." % (filename, filesize))

            # Open file and hash it
            fileoffset = 0
            handle = open(filename, "rb")
            try:
                while fileoffset < filesize:
                    # Read rest of piece or file, whatever is smaller
                    chunk = handle.read(
                        min(filesize - fileoffset, piece_size - done))
                    sha1sum.update(chunk)  # bogus pylint: disable=E1101
                    done += len(chunk)
                    fileoffset += len(chunk)
                    totalhashed += len(chunk)

                    # Piece is done
                    if done == piece_size:
                        pieces.append(sha1sum.digest())  # bogus pylint: disable=E1101
                        if piece_callback:
                            piece_callback(filename, pieces[-1])

                        # Start a new piece
                        sha1sum = hashlib.sha1()
                        done = 0

                    # Report progress
                    if progress:
                        progress(totalhashed, totalsize)
            finally:
                handle.close()

        # Add hash of partial last piece
        if done > 0:
            pieces.append(sha1sum.digest())  # bogus pylint: disable=E1103
            if piece_callback:
                piece_callback(filename, pieces[-1])

        # Build the meta dict
        metainfo = {
            "pieces": b"".join(pieces),
            "piece length": piece_size,
            "name": os.path.basename(self.datapath),
        }

        # Handle directory/FIFO vs. single file
        if self._fifo or os.path.isdir(self.datapath):
            metainfo["files"] = file_list
        else:
            metainfo["length"] = totalhashed

        hashing_secs = time.time() - hashing_secs
        self.LOG.info("Hashing of %s took %.1f secs (%s/s)" % (
            fmt.human_size(totalhashed).strip(),
            hashing_secs,
            fmt.human_size(totalhashed / hashing_secs).strip(),
        ))

        # Return validated info dict
        return check_info(metainfo), totalhashed
예제 #14
0
    def listing(self, masked=True):
        """ List torrent info & contents. Returns a list of formatted lines.
        """
        # Assemble data
        metainfo = sanitize(bencode.bread(self.filename))
        announce = metainfo['announce']
        info = metainfo['info']
        info_hash = hashlib.sha1(bencode.bencode(info))

        total_size = data_size(metainfo)
        piece_length = info['piece length']
        piece_number, last_piece_length = divmod(total_size, piece_length)

        # Build result
        result = [
            "NAME %s" % (os.path.basename(self.filename)),
            "SIZE %s (%i * %s + %s)" % (
                fmt.human_size(total_size).strip(),
                piece_number, fmt.human_size(piece_length).strip(),
                fmt.human_size(last_piece_length).strip(),
            ),
            "META %s (pieces %s %.1f%%)" % (
                fmt.human_size(os.path.getsize(self.filename)).strip(),
                fmt.human_size(len(info["pieces"])).strip(),
                100.0 * len(info["pieces"]) / os.path.getsize(self.filename),
            ),
            "HASH %s" % (info_hash.hexdigest().upper()),
            "URL  %s" % (mask_keys if masked else str)(announce),
            "PRV  %s" % ("YES (DHT/PEX disabled)" if info.get("private") else "NO (DHT/PEX enabled)"),
            "TIME %s" % ("N/A" if "creation date" not in metainfo else
                time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(metainfo["creation date"]))
            ),
        ]

        for label, key in (("BY  ", "created by"), ("REM ", "comment")):
            if key in metainfo:
                result.append("%s %s" % (label, metainfo.get(key, "N/A")))

        result.extend([
            "",
            "FILE LISTING%s" % ("" if info.has_key('length') else " [%d file(s)]" % len(info['files']),),
        ])
        if info.has_key('length'):
            # Single file
            result.append("%-69s%9s" % (
                    info['name'],
                    fmt.human_size(total_size),
            ))
        else:
            # Directory structure
            result.append("%s/" % info['name'])
            oldpaths = [None] * 99
            for entry in info['files']:
                # Remove crap that certain PHP software puts in paths
                entry_path = [i for i in entry["path"] if i]

                for idx, item in enumerate(entry_path[:-1]):
                    if item != oldpaths[idx]:
                        result.append("%s%s/" % (' ' * (4*(idx+1)), item))
                        oldpaths[idx] = item
                result.append("%-69s%9s" % (
                    ' ' * (4*len(entry_path)) + entry_path[-1],
                    fmt.human_size(entry['length']),
                ))

        return result
예제 #15
0
    def _make_info(self, piece_size, progress, walker, piece_callback=None):
        """ Create info dict.
        """
        # These collect the file descriptions and piece hashes
        file_list = []
        pieces = []

        # Initialize progress state
        hashing_secs = time.time()
        totalsize = -1 if self._fifo else self._calc_size()
        totalhashed = 0

        # Start a new piece
        sha1sum = hashlib.sha1()
        done = 0

        # Hash all files
        for filename in walker:
            # Assemble file info
            filesize = os.path.getsize(filename)
            filepath = filename[len(os.path.dirname(self.datapath) if self._fifo else self.datapath) :].lstrip(os.sep)
            file_list.append({"length": filesize, "path": filepath.replace(os.sep, "/").split("/")})
            self.LOG.debug("Hashing %r, size %d..." % (filename, filesize))

            # Open file and hash it
            fileoffset = 0
            handle = open(filename, "rb")
            try:
                while fileoffset < filesize:
                    # Read rest of piece or file, whatever is smaller
                    chunk = handle.read(min(filesize - fileoffset, piece_size - done))
                    sha1sum.update(chunk)  # bogus pylint: disable=E1101
                    done += len(chunk)
                    fileoffset += len(chunk)
                    totalhashed += len(chunk)

                    # Piece is done
                    if done == piece_size:
                        pieces.append(sha1sum.digest())  # bogus pylint: disable=E1101
                        if piece_callback:
                            piece_callback(filename, pieces[-1])

                        # Start a new piece
                        sha1sum = hashlib.sha1()
                        done = 0

                    # Report progress
                    if progress:
                        progress(totalhashed, totalsize)
            finally:
                handle.close()

        # Add hash of partial last piece
        if done > 0:
            pieces.append(sha1sum.digest())  # bogus pylint: disable=E1103
            if piece_callback:
                piece_callback(filename, pieces[-1])

        # Build the meta dict
        metainfo = {"pieces": "".join(pieces), "piece length": piece_size, "name": os.path.basename(self.datapath)}

        # Handle directory/FIFO vs. single file
        if self._fifo or os.path.isdir(self.datapath):
            metainfo["files"] = file_list
        else:
            metainfo["length"] = totalhashed

        hashing_secs = time.time() - hashing_secs
        self.LOG.info(
            "Hashing of %s took %.1f secs (%s/s)"
            % (fmt.human_size(totalhashed).strip(), hashing_secs, fmt.human_size(totalhashed / hashing_secs).strip())
        )

        # Return validated info dict
        return check_info(metainfo), totalhashed