def byte_number_filter(i, verbose=False): if not isinstance(i, (float, int)): return 'not available' if verbose: return '{} ({})'.format(human_readable_file_size(i), format(i, ',d') + ' bytes') return human_readable_file_size(i)
def render_number_as_size(number, verbose=True): if not isinstance(number, (int, float)): return 'not available' if verbose: return '{} ({})'.format(human_readable_file_size(int(number)), format(number, ',d') + ' Byte') return human_readable_file_size(int(number))
def byte_number_filter(i, verbose=True): if isinstance(i, int) or isinstance(i, float): if verbose: return '{} ({})'.format(human_readable_file_size(i), format(i, ',d') + ' bytes') else: return human_readable_file_size(i) else: return 'not available'
def _detect_unpack_loss(self, fo, extracted_fos): decoding_overhead = 1 - fo.processed_analysis['unpacker'].get( 'encoding_overhead', 0) cleaned_size = get_binary_size_without_padding( fo.binary) * decoding_overhead - self.HEADER_OVERHEAD extracted_fos_size_sum = self._get_extracted_fos_size_sum( extracted_fos) fo.processed_analysis['unpacker'][ 'size packed -> unpacked'] = '{} -> {}'.format( human_readable_file_size(cleaned_size), human_readable_file_size(extracted_fos_size_sum)) if cleaned_size > extracted_fos_size_sum: fo.processed_analysis['unpacker']['summary'] = ['data lost'] else: fo.processed_analysis['unpacker']['summary'] = ['no data lost']
def _get_analyzed_jstree_node_contents(self, node): result = self._get_jstree_node_contents( '<b>{}</b> (<span style="color:gray;">{}</span>)'.format(node.name, human_readable_file_size(node.size)), '/analysis/{}/ro/{}'.format(node.uid, node.root_uid), '/analysis/{}/ro/{}'.format(node.uid, node.root_uid), get_correct_icon_for_mime(node.type) ) result['data'] = {'uid': node.uid} return result
def test_human_readable_file_size(input_data, expected): assert human_readable_file_size(input_data) == expected