コード例 #1
0
ファイル: views.py プロジェクト: ritashugisha/arachvid
    def get_context_data(self, request):
        db_size = os.stat(
            settings.DATABASES['legacy']['NAME']
        ).st_size
        safe_size = (2.0 * (10.0 ** 9.0))

        # FIX: these names need to be cleaner and more descriptive
        self.context['total_resources'] = Resource.objects.count()
        self.context['safe_percentage'] = (float(db_size) / safe_size) * 100.0
        self.context['db_size'] = sizeof_fmt(db_size)
        self.context['safe_size'] = sizeof_fmt(safe_size)
        self.context['total_links'] = Link.objects.count()
        self.context['link_resource_ratio'] = '{:.2f}'.format((
            (
                float(self.context['total_resources']) /
                float(self.context['total_links'])
            ) * 100.0
        ))
        self.context['total_hosts'] = Host.objects.count()
        self.context['total_blacklisted'] = LinkBlacklist.objects.count()
        try:
            self.context['blacklist_ratio'] = '{:.2f}'.format((
                float(self.context['total_links']) /
                float(self.context['total_blacklisted'])
            ))
        except ZeroDivisionError:
            self.context['blacklist_ratio'] = '0.0'
        return self.context
コード例 #2
0
def check_user(u):
    '''
    Check if one user shall be suspended. Returning the reason why suspend, or None.
    @type u: user.User
    '''
    if u.suspended: return None
    
    msg = None
    
    for r in u.get_meta("limit", []):
        if not 'type'   in r : continue
        if not 'amount' in r : continue
        if not 'since'  in r : continue 
        type, amount, since = r['type'], r['amount'], r['since']
        
        now   = datetime.datetime.now()
        
        since = datetime.datetime(now.year, now.month, 1)                       if since == 'this-month'  else \
                datetime.datetime(now.year, now.month, now.day - now.weekday()) if since == 'this-week'   else \
                datetime.datetime.strptime(since, DATE_FORMAT)
        
        if   type == 'time':     # Time-to-expire rule. amount: useless
            if now >= since:
                msg = "Expired: %s" % r['since']
                break
        elif type == 'traffic':  # Traffic-limited rule. amount: traffic in bytes.
            tq = traffic.query(uid=u.id, min_time=since.strftime(DATE_FORMAT), sum=traffic.QS_ALL)
            if tq[0][2] > amount:
                msg = "Traffic: used %s, limit %s" % (utils.sizeof_fmt(tq[0][2]), utils.sizeof_fmt(amount))
                break
        
    return msg
コード例 #3
0
def main(args):
    """
    process each argument
    """
    table = Texttable()
    table.set_cols_align(["r", "r", "r", "r", "r"])
    rows = [["Number", "File Name", "File Size", "Video Duration (H:MM:SS)", "Conversion Time"]]
    total_time = 0.0
    total_file_size = 0

    for index, arg in enumerate(args, start=1):
        timer = utils.Timer()
        with timer:
            result = resize(arg, (index, len(args)))
        #
        result.elapsed_time = timer.elapsed_time()
        rows.append([index,
                     result.file_name,
                     utils.sizeof_fmt(result.file_size),
                     utils.sec_to_hh_mm_ss(utils.get_video_length(result.file_name)) if result.file_name else "--",
                     "{0:.1f} sec.".format(result.elapsed_time) if result.status else FAILED])
        #
        if rows[-1][-1] != FAILED:
            total_time += result.elapsed_time
        total_file_size += result.file_size

    table.add_rows(rows)
    print table.draw()
    print 'Total file size:', utils.sizeof_fmt(total_file_size)
    print 'Total time: {0} (H:MM:SS)'.format(utils.sec_to_hh_mm_ss(total_time))
    print utils.get_unix_date()
コード例 #4
0
    def update_progress(self, kwparams):

        if self.data_engine.SUPPORT_PARTIAL_PROGRESS_REPORTING:
            kwparams['overall_file_size'] = utils.sizeof_fmt(self.file_size)
            kwparams['file_count'] = str(self.file_count)

        if 'overall_size' in kwparams:
            self.overall_size = kwparams['overall_size']
            self.overall_progress_bar.setMaximum(self.overall_size)

        if 'size_unknown' not in kwparams:
            kwparams['overall_size'] = utils.sizeof_fmt(self.overall_size)
        else:
            kwparams['overall_size'] = '? MB'

        if self.data_engine.SUPPORT_PARTIAL_PROGRESS_REPORTING:
            self.current_progress_label.setText('Current file (' +
                                                kwparams['cur_file_number'] +
                                                ' of ' +
                                                kwparams['file_count'] +
                                                '): ' +
                                                kwparams['cur_downloaded'] +
                                                ' of ' +
                                                kwparams['overall_file_size'] +
                                                ' downloaded')
            self.current_progress_bar.setValue(kwparams['cur_file_bytes'])

        self.overall_progress_label.setText('Overall progress: ' +
                                            kwparams['overall_downloaded'] +
                                            ' of ' + kwparams['overall_size'] +
                                            ' downloaded')
        self.overall_progress_bar.setValue(kwparams['overall_bytes'])
コード例 #5
0
 def test_sizeof_fmt(self):
     s1 = utils.sizeof_fmt(123456789)
     s2 = utils.sizeof_fmt(123456789012)
     s3 = utils.sizeof_fmt(1234)
     self.assertEqual(s1, '117.7 MB')
     self.assertEqual(s2, '115.0 GB')
     self.assertEqual(s3, '1.2 KB')
コード例 #6
0
    def print_stats():
        """A helper for printing statistics about the simulation"""
        data = (
            args.rlc,
            args.rlu,
            args.max_threshold,
            args.offline_rate,
            utils.num_fmt(files_in_storage),
            utils.num_fmt(files_uploaded),
            1 - files_in_storage / files_uploaded,
            utils.sizeof_fmt(data_in_storage),
            utils.sizeof_fmt(data_uploaded),
            1 - data_in_storage / data_uploaded,
            utils.get_mem_info(),
            tmr.elapsed_str,
            tmr_start.elapsed_str,
        )

        tmpl = ("Statistics: \n"
                "  Params: RLc=%s, RLu=%s, max_threshold=%s, offline_rate=%s\n"
                "  Files: files_in_storage=%s, files_uploaded=%s, DDP=%s\n"
                "  Data: data_in_storage=%s, data_uploaded=%s, DDP=%s\n"
                "  Execution: memory[%s], chunk_time=%s, total_time=%s")

        tmr.reset()

        print(tmpl % data, file=sys.stderr)
コード例 #7
0
ファイル: traces.py プロジェクト: zncb/wtrace
 def __str__(self):
     from utils import sizeof_fmt
     bw_up = sizeof_fmt(self.bytes_up)
     bw_down = sizeof_fmt(self.bytes_down)
     return u"[{name}] {bw_up}↑{bw_down}↓ | {req}↑{resp}↓  in {time:.2}s | {url}".format(name=self.target.name,url=self.target.url,\
                                                                                        time=self.end_time-self.init_time,\
                                                                                        bw_up=bw_up,bw_down=bw_down,\
                                                                                        req=self.requests,resp=self.responses)
コード例 #8
0
    def _internal_upload_directory(self, dir_name):
        if not os.path.isdir(dir_name):
            sys.stderr.write(dir_name + " is not a directory.\n")
            return

        try:
            dir_list = os.listdir(dir_name)
        except OSError as e:
            sys.stderr.write(str(e) + "\n")
            return

        for filename in dir_list:
            src_file = os.path.join(dir_name, filename)
            if os.path.isdir(src_file):
                self._internal_upload_directory(src_file)
                continue

            if not os.path.isfile(src_file):
                continue

            # if file is not jpg then continue
            if not self.is_valid_file_type(src_file):
                print("File " + filename + " is not an allowed file type.\n")
                continue

            self._count += 1
            file_size = utils.get_file_size(src_file)
            self._sizecount += file_size

            md5sum = utils.get_md5sum_from_file(src_file)

            uploaded = self._dataHelper.file_already_uploaded(
                self._cloud_service_name, md5sum)
            if uploaded:
                print "File", src_file, "already uploaded. 1"
                continue

            stt = time.time()
            photo_id = self.upload_file(src_file, md5sum)
            secondstoupload = time.time() - stt
            bits_per_second = file_size / secondstoupload

            if photo_id != 0:
                self._dataHelper.set_file_uploaded(src_file,
                                                   self._cloud_service_name,
                                                   photo_id, md5sum)
            else:
                self._failcount += 1

            if self._total_pics_count > 0:
                p = float(self._count) / float(self._total_pics_count) * 100.0
                print str(int(p)) + "% done. (" + str(self._count), "of", self._total_pics_count, \
                    "pictures,", self._failcount, "fails - " + utils.sizeof_fmt(self._sizecount) + \
                                                  " of " + utils.sizeof_fmt(self._total_pics_size) + ") ETA: " + \
                                                  utils.format_eta(bits_per_second, self._sizecount,
                                                                   self._total_pics_size)
コード例 #9
0
ファイル: lsbb.py プロジェクト: wiehan-a/spectral-toolkit
 def callback(self, buffer):
     
     self.cur_size += len(buffer)
     
     self.progress_update.emit({'cur_file_number': str(self.count),
                'cur_downloaded': utils.sizeof_fmt(self.cur_size),
                'overall_downloaded': utils.sizeof_fmt(self.size + self.cur_size),
                 'cur_file_bytes': self.cur_size,
                 'overall_bytes': self.size + self.cur_size
                })
コード例 #10
0
 def get_info(self, name):
     path = self.path
     file_type = get_type(path / name)
     mod_time = convert_date((path / name).stat().st_mtime)
     size = (path / name).stat().st_size
     size_str = utils.sizeof_fmt(size)
     return file_type, name, mod_time, size_str
コード例 #11
0
    def print_stats():
        data = (utils.num_fmt(files_in_storage), utils.num_fmt(files_uploaded),
                1 - files_in_storage / files_uploaded,
                utils.sizeof_fmt(data_in_storage),
                utils.sizeof_fmt(data_uploaded),
                1 - data_in_storage / data_uploaded, utils.get_mem_info(),
                tmr.elapsed_str)

        tmpl = ("Statistics: \n"
                "  Files: files_in_storage=%s, files_uploaded=%s, DDP=%s\n"
                "  Data: data_in_storage=%s, data_uploaded=%s, DDP=%s\n"
                "  Execution: memory[%s], chunk_time=%s")

        tmr.reset()

        print(tmpl % data, file=sys.stderr)
コード例 #12
0
ファイル: model.py プロジェクト: myelintek/lib-mlsteam
def versions(tag, offset, limit):
    results = []
    try:
        api = MyelindlApi()
        results = api.model_versions(
            tag,
            offset=offset,
            limit=limit,
        )
    except MyelindlApiError as e:
        click.echo('Fail due to {}'.format(str(e)))
        raise

    longest = max(len(d['tag']) for d in results)
    if longest < 10:
        longest = 10
    template = '| {:>%d} | {:>20} | {:>10} | {:>10} | {:>10} | {:>10} |'% longest
    header = template.format('tag', 'name', 'version', 'size', 'type', 'user')


    click.echo('=' * len(header))
    click.echo(header)
    click.echo('=' * len(header))

    for ds in results:
        line = template.format(
            ds['tag'],
            ds['name'],
            ds['version'],
            sizeof_fmt(ds['size']),
            ds['type'],
            ds['username'],
        )
        click.echo(line)
    click.echo('='* len(header))
コード例 #13
0
ファイル: datasets.py プロジェクト: clarkkev/deep-coref
 def write(self, path):
     if self.active:
         self.data = np.array(self.data, dtype='bool') \
             if self.name == 'y' or self.name == 'pf' else np.vstack(self.data)
         print "Writing {:}, dtype={:}, size={:}".format(self.name, str(self.data.dtype),
                                                         utils.sizeof_fmt(self.data.nbytes))
         np.save(path + self.name, self.data)
コード例 #14
0
def list():
    api = MyelindlApi()
    try:
        result = api.dataset_list()
    except MyelindlApiError as e:
        click.echo('Fail due to {}'.format(str(e)))
        raise
    longest = 10
    if result['datasets']:
        longest = max(len(d['name']) for d in result['datasets'])
    if longest < 10:
        longest = 10
    template = '| {:>10} | {:>%d} | {:>30} | {:>10} | {:>10} | {:>10} |' % longest
    header = template.format('id', 'name', 'description', 'size', 'type',
                             'user')

    click.echo('=' * len(header))
    click.echo(header)
    click.echo('=' * len(header))

    for ds in result['datasets']:
        line = template.format(
            ds['id'],
            ds['name'],
            ds['description'],
            sizeof_fmt(ds['size']),
            ds['type'],
            ds['username'],
        )
        click.echo(line)
    click.echo('=' * len(header))
コード例 #15
0
ファイル: sansa.py プロジェクト: wiehan-a/spectral-toolkit
 def callback(self, buffer):
     
     self.size += len(buffer)
     
     self.progress_update.emit({'overall_downloaded': utils.sizeof_fmt(self.size),
                                'overall_bytes': self.size
                                })
コード例 #16
0
def main(data_txt_path, label_txt_path, stride=25,
         images_folder='roadC621/'):
    """
    Train a neural network with patches of patch_size x patch_size
    (as given via the module network_path).

    Parameters
    ----------
    network_path : str
        Path to a Python script with a function generate_nnet(feats) which
        returns a neural network
    image_batch_size : int
    stride : int
    """
    assert image_batch_size >= 1
    assert stride >= 1
    features, labels = load_data_raw_images(train_images_folder=images_folder)
    mem_size = (sys.getsizeof(42) * len(features) * features[0].size +
                sys.getsizeof(42) * len(labels) * labels[0].size)
    logging.info("Loaded %i data images with their labels (approx %s)",
                 len(features),
                 utils.sizeof_fmt(mem_size))
    nn_params = {'training': {'image_batch_size': image_batch_size,
                              'stride': stride}}

    logging.info("## Network: %s", network_path)
    network = imp.load_source('sst.network', network_path)
    logging.info("Fully network: %s", str(network.fully))
    nn_params['code'] = inspect.getsource(network)
    nn_params['fully'] = network.fully
    nn_params['patch_size'] = network.patch_size
    assert nn_params['patch_size'] > 0

    labeled_patches = get_patches(features[:1],
                                  labels[:1],
                                  nn_params=nn_params)

    feats, _ = get_features(labeled_patches, fully=nn_params['fully'])
    net1 = network.generate_nnet(feats)
    for block in range(0, len(features), image_batch_size):
        from_img = block
        to_img = block + image_batch_size
        logging.info("Training on batch %i - %i of %i total",
                     from_img,
                     to_img,
                     len(features))
        labeled_patches = get_patches(features[from_img:to_img],
                                      labels[from_img:to_img],
                                      nn_params=nn_params,
                                      stride=stride)
        logging.info(("labeled_patches[0].shape: %s , "
                      "labeled_patches[1].shape: %s"),
                     labeled_patches[0].shape,
                     labeled_patches[1].shape)
        net1 = train_nnet(labeled_patches, net1, fully=nn_params['fully'])

    model_pickle_name = 'nnet1-trained.pickle'
    utils.serialize_model(net1,
                          filename=model_pickle_name,
                          parameters=nn_params)
コード例 #17
0
    def callback(self, buffer):

        self.cur_size += len(buffer)

        self.progress_update.emit({
            'cur_file_number':
            str(self.count),
            'cur_downloaded':
            utils.sizeof_fmt(self.cur_size),
            'overall_downloaded':
            utils.sizeof_fmt(self.size + self.cur_size),
            'cur_file_bytes':
            self.cur_size,
            'overall_bytes':
            self.size + self.cur_size
        })
コード例 #18
0
ファイル: views.py プロジェクト: jdrew1303/accountifie
def index(request):
    pip = os.path.join(sys.exec_prefix, 'bin', 'pip')
    if not os.path.isfile(pip):
        pip = 'pip'
    SHELL_COMMANDS = (
        ('Hostname','hostname'),
        ('hg version', 'hg id'),
        ('git version', "git log --pretty=format:'%h' -n 1"),
        ('hg branch', 'hg branch'),
        ('git branch', 'git rev-parse --abbrev-ref HEAD'),
        ('MySQL version', 'mysql --version'),
        ('Local Packages', '%s freeze -l' % pip)
    )
    SD = OrderedDict()
    for k,v in sorted(settings_list(), key=lambda x: x[0]):
        SD[k] = v
    context = RequestContext(request, {
        'args': sys.argv,
        'exe': sys.executable,
        'settings': SD,
        })

    context['versions'] = OrderedDict()
    # get versions
    curr_dir = os.path.realpath(os.path.dirname(__file__))
    for name, shell_command in SHELL_COMMANDS:
        try:
            result = utils.run_shell_command(shell_command, curr_dir)
            if result:
                if isinstance(result, list):
                    result = '<br>'.split(result)
                context['versions'][name] = result
        except:
            pass
    # machine status    
    context['machine'] = OrderedDict()
    if sys.platform == 'darwin':
        context['machine']['Uptime'] = 'not done yet on MacOS'
        context['machine']['Disk Space'] = 'not done yet on MacOS'
    elif sys.platform == 'win32':
        context['machine']['Uptime'] = 'not done yet on Windows'
        context['machine']['Disk Space'] = 'not done yet on Windows'
    else:
        context['machine']['Uptime'] = utils.server_uptime()
        context['machine']['Disk Space'] = utils.disk_usage('/')._asdict()
    if os.path.exists(settings.MEDIA_ROOT):
        context['machine']['Media Folder'] = utils.sizeof_fmt(utils.folder_size(settings.MEDIA_ROOT))

    context['stats'] = utils.get_available_stats()
    context['apps'] = [(app.__name__, ', '.join([model.__name__ for model in models])) for app, models in all_concrete_models()]
    context['relations'] = [[(model.__name__, ', '.join(['%s (%s) through %s' % (relation.__name__, relation.__module__, field.__class__.__name__)
                                                        for field, relation in relations]), app.__name__) 
                                                            for model, relations in rel_info] 
                                                                for app, rel_info in all_relations()]
    #context['rel_graph'] = 
    
    context['config_warnings'] = utils.get_configuration_warnings()

    return render_to_response('dashboard/index.html', context)
コード例 #19
0
ファイル: views.py プロジェクト: stavenko/holin.pro
    def post(self, request):
        response = {'success': False, }
        img = None
        if request.user.is_authenticated() or not settings.GALLERY_LOGIN_REQUIRED:
            file = (change_form_filenames(request.FILES)).get('image_gallery-file')
            if file:
                img = GalleryImage()
                img.image = file
                if img.image.width > img.image.height :
                    #LANDSCAPE
                    orientation = False
                else:
                    orientation = True
                img.orientation = orientation
                
                if request.user.is_authenticated():
                    img.owner = request.user
                img.save()

                img.thumbnail.save(os.path.basename(img.image.path), File(open(img.image.path)))
                thumb = Image.open(img.image.path)
                (w, h) = thumb.size
                side = min(w, h)
                thumb = thumb.crop([(w - side) / 2, (h - side) / 2, (w + side) / 2, (h + side) / 2])
                thumb.thumbnail(settings.GALLERY_THUMBNAILS_SIZE, Image.ANTIALIAS)
                thumb.save(img.thumbnail.path, quality=100)
                img.save()

                fsize = img.image.size
                
                max_fsize = settings.GALLERY_MAX_FILE_SIZE
                if fsize <= max_fsize:
                    type = imghdr.what(img.image.path).upper()
                    allowed_types = settings.GALLERY_IMAGE_TYPES
                    if type in allowed_types:
                        pil_img = Image.open(img.image.path)
                        min_size = settings.GALLERY_MIN_IMAGE_SIZE
                        max_size = settings.GALLERY_MAX_IMAGE_SIZE
                        if pil_img.size[0] >= min_size[0] and pil_img.size[1] >= min_size[1]:
                            if pil_img.size[0] <= max_size[0] and pil_img.size[1] <= max_size[1]:
                                response['success'] = True
                                response['id'] = img.id
                                response['url'] = img.thumbnail.url
                            else:
                                response['message'] = _('Maximal image size is %(w)sx%(h)s (Your image size is %(w_)sx%(h_)s).') % {'w': max_size[0], 'h': max_size[1], 'w_': pil_img.size[0], 'h_': pil_img.size[1]}
                        else:
                            response['message'] = _('Minimal image size is %(w)sx%(h)s (Your image size is %(w_)sx%(h_)s).') % {'w': min_size[0], 'h': min_size[1], 'w_': pil_img.size[0], 'h_': pil_img.size[1]}
                    else:
                        response['message'] = _('Unsupported image format. Supported formats are %s') % ', '.join(allowed_types).upper()
                else:
                    response['message'] = _('Maximal image file size is %s') % sizeof_fmt(max_fsize)
            else:
                response['message'] = _('Can not upload image. Please try again.')
        else:
            response['message'] = _('You must authorize to upload image.')

        if response['success'] == False and img is not None:
            img.delete()
        return HttpResponse(json.dumps(response))
コード例 #20
0
    def all_done(self):
        if self._done_called:
            return

        print "Done with file " + self._org_filename, " - Uploaded ", utils.sizeof_fmt(self.len), "in", \
            utils.format_time(time.time() - self._start_time), "\r",
        print
        self._done_called = True
コード例 #21
0
ファイル: server.py プロジェクト: priestc/LibraryDSS
def home(user=LOGGED_IN_USER):
    item_count = Item.objects.count()
    total_size = 0 #session.query(func.sum(Item.size))[0][0] or 0
    return {
        'user': user,
        'item_count': item_count,
        'total_size': sizeof_fmt(total_size)
    }
コード例 #22
0
ファイル: datasets.py プロジェクト: clarkkev/deep-coref
 def write(self, path):
     if self.active:
         self.data = np.array(self.data, dtype='bool') \
             if self.name == 'y' or self.name == 'pf' else np.vstack(self.data)
         print("Writing {:}, dtype={:}, size={:}".format(
             self.name, str(self.data.dtype),
             utils.sizeof_fmt(self.data.nbytes)))
         np.save(path + self.name, self.data)
コード例 #23
0
    def get_info(self, file):
        file_type = file.type
        name = file.name
        public_url = file.public_url
        mod_tyme = file.modified.strftime(magic_const.DATETIME_FORMAT)
        size = utils.sizeof_fmt(file.size)

        return file_type, name, public_url, mod_tyme, size
コード例 #24
0
    def _handleDirectory(self, dirname):
        """ Create a HTML page using template injection and render a tablular view of the directory. """

        entry = "<tr><td>[{{-EXTENSION-}}]</td><td><a href='{{-HREF-}}'>{{-FILE_NAME-}}</a></td><td align='right'>{{-DATE_MODIFIED-}}</td><td align='right'>{{-FILE_SIZE-}}</td></tr>"

        all_entries = ""
        template = self._readFile(self.config['OTHER_TEMPLATES'] + '/' +
                                  "dir.html")
        for ent in os.listdir(dirname):
            variables = {
                'EXTENSION':
                "DIR",
                'HREF':
                self._toHREF(dirname + "/" + ent),
                'FILE_NAME':
                ent,
                'DATE_MODIFIED':
                datetime.fromtimestamp(
                    os.stat(dirname + "/" +
                            ent).st_mtime).strftime("%A %d, %B %Y, %H:%M:%S"),
                'FILE_SIZE':
                "-"
            }

            # if the "ent" is a file
            if utils.isvalidFile(dirname + "/" + ent):
                if len(ent.split('.')) > 1:
                    variables['EXTENSION'] = ent.split('.')[-1]
                else:
                    variables['EXTENSION'] = "---"
                variables['FILE_SIZE'] = utils.sizeof_fmt(
                    os.stat(dirname + "/" + ent).st_size)

            all_entries += self._inject_variables(entry, variables)

        dicto = {
            'ENTRIES':
            all_entries,
            'SERVER_DETAILS':
            self.config['SERVER_SHORT_NAME'] + " Server at " +
            self.config['HOST_NAME'] + " Port " +
            str(self.config['BIND_PORT']),
            'PATH':
            self._toHREF(dirname) + "/",
            'BACK_HREF':
            "/".join((self._toHREF(dirname) + "/").split('/')[:-2])
        }
        if dicto['BACK_HREF'] == "":
            dicto['BACK_HREF'] = "/"

        return {
            'DIRECTORY': {
                'directory':
                self._inject_variables(template, dicto).encode('utf-8'),
                'status_code':
                200
            }
        }
コード例 #25
0
    def setup(self):

        data_engine = self.parameters['access_engine']

        self.loading_box = QVBoxLayout()
        self.main_vbox.addLayout(self.loading_box)

        self.movie_label = QLabel(
            'Estimated download size: <b>' +
            utils.sizeof_fmt(data_engine.calculate_size(self.parameters)) +
            '</b>')
        self.loading_box.addWidget(self.movie_label)

        self.loading_label = QLabel(
            'Local storage required: <b>' +
            utils.sizeof_fmt(data_engine.calculate_size(self.parameters)) +
            '</b>')
        self.loading_box.addWidget(self.loading_label)
コード例 #26
0
ファイル: core.py プロジェクト: fpenarru/gvsig-online
 def _helper(name, filetype, extension):
     return {
         'filepath': os.path.join(self.path, name),
         'extension': extension,
         'filetype': filetype,
         'filename': name,
         'filedate': FILEMANAGER_STORAGE.modified_time(os.path.join(self.path, name)),
         'filesize': sizeof_fmt(FILEMANAGER_STORAGE.size(os.path.join(self.path, name))),
     }
コード例 #27
0
ファイル: block_manager.py プロジェクト: Nosfe/monitoring
 def report(self, indent):
     pfx = " " * indent
     s = pfx + "Block manager\n"
     indent += 1
     pfx = " " * indent
     s += pfx + "Executor ID: {}\n".format(self.executor_id)
     s += pfx + "Time added: {}\n".format(datetime.fromtimestamp(self.add_timestamp/1000))
     s += pfx + "Maximum memory: {}\n".format(sizeof_fmt(self.maximum_memory))
     return s
コード例 #28
0
ファイル: torrent.py プロジェクト: alonewolfx2/seedmage
    def __str__(self):
        announce = self.torrent_header[b"announce"].decode("utf-8")
        result = "Announce: %s\n" % announce

        if b"creation date" in self.torrent_header:
            creation_date = self.torrent_header[b"creation date"]
            creation_date = datetime.fromtimestamp(creation_date)
            result += "Date: %s\n" % creation_date.strftime(
                "%Y/%m/%d %H:%M:%S")

        if b"created by" in self.torrent_header:
            created_by = self.torrent_header[b"created by"].decode("utf-8")
            result += "Created by: %s\n" % created_by

        if b"encoding" in self.torrent_header:
            encoding = self.torrent_header[b"encoding"].decode("utf-8")
            result += "Encoding:   %s\n" % encoding

        torrent_info = self.torrent_header[b"info"]
        piece_len = torrent_info[b"piece length"]
        result += "Piece len: %s\n" % utils.sizeof_fmt(piece_len)
        pieces = len(torrent_info[b"pieces"]) / 20
        result += "Pieces: %d\n" % pieces

        torrent_name = torrent_info[b"name"].decode("utf-8")
        result += "Name: %s\n" % torrent_name
        piece_len = torrent_info[b"piece length"]

        if b"files" in torrent_info:
            # Multiple File Mode
            result += "Files:\n"
            for file_info in torrent_info[b"files"]:
                fullpath = "/".join(
                    [x.decode("utf-8") for x in file_info[b"path"]])
                result += "  '%s' (%s)\n" % (
                    fullpath, utils.sizeof_fmt(file_info[b"length"]))
        else:
            # Single File Mode
            result += "Length: %s\n" % utils.sizeof_fmt(
                torrent_info[b"length"])
            if b"md5sum" in torrent_info:
                result += "Md5: %s\n" % torrent_info[b"md5sum"]

        return result
コード例 #29
0
ファイル: core.py プロジェクト: fpenarru/gvsig-online
 def file_details(self):
     filename = self.path.rsplit('/', 1)[-1]
     return {
         'directory': os.path.dirname(self.path),
         'filepath': self.path,
         'filename': filename,
         'filesize': sizeof_fmt(FILEMANAGER_STORAGE.size(self.location)),
         'filedate': FILEMANAGER_STORAGE.modified_time(self.location),
         'fileurl': self.url,
     }
コード例 #30
0
    def callback(self, buffer):

        self.size += len(buffer)

        self.progress_update.emit({
            'overall_downloaded':
            utils.sizeof_fmt(self.size),
            'overall_bytes':
            self.size
        })
コード例 #31
0
 def report(self, indent):
     pfx = " " * indent
     s = pfx + "Block manager\n"
     indent += 1
     pfx = " " * indent
     s += pfx + "Executor ID: {}\n".format(self.executor_id)
     s += pfx + "Time added: {}\n".format(
         datetime.fromtimestamp(self.add_timestamp / 1000))
     s += pfx + "Maximum memory: {}\n".format(
         sizeof_fmt(self.maximum_memory))
     return s
コード例 #32
0
ファイル: downloadworker.py プロジェクト: jamestyj/benchmark
    def get_file(self):
        if self.opts.verbose:
            log('%s: Downloading...' % self.file_name)

        start    = time.time()
        contents = self.s3_key.get_contents_as_string()
        elapsed  = time.time() - start
        speed    = len(contents) / elapsed
        log('%s: Downloaded %s in %.1fs (%s/s)' % (self.file_name,
                sizeof(contents), elapsed, sizeof_fmt(speed)))
        return self.file_name, self.data_set_name, contents
コード例 #33
0
def main():
    args = engine.parser.parse_args()
    filenames = utils.get_filenames(args.input)
    files_exist = len(filenames) != 0
    stopwords_exist = os.path.isfile(args.stopwords)
    if files_exist and stopwords_exist:
        used_tokenizer = engine.tokenizers[args.tokenizer]
        if used_tokenizer.has_rule(rules.stopping):
            used_tokenizer.make_rule(rules.stopping, args.stopwords)
        values = ['store_positions', 'calculate_tfidf']
        combinations = [{
            key: value
            for key, value in zip(values, option)
        } for option in product([True, False], repeat=len(values))]
        for combination in combinations:
            (indexer,
             max_memory), interval = utils.timeit(utils.profileit,
                                                  engine.indexit,
                                                  used_tokenizer,
                                                  filenames,
                                                  memory_usage=args.memory,
                                                  **combination)
            indexer.save(args.output)
            print('Answers({}):'.format(', '.join([
                '{} = {}'.format(key, value)
                for key, value in combination.items()
            ])))
            print('Time taken: {}s'.format(interval))
            print('Max memory usage: {}'.format(utils.sizeof_fmt(max_memory)))
            print('Disk size: {}'.format(
                utils.sizeof_fmt(os.path.getsize(args.output))))
            indexer.dispose()
            del indexer
    else:
        if not files_exist:
            print(
                'Error: File or directory (with files) to index doesn\'t exist!'
            )
        if not stopwords_exist:
            print('Error: Stopwords\' file doesn\'t exist!')
コード例 #34
0
def adm_stat(bot, update):
    utils.incStatTG("adm_stat")

    currentDataPackage = {
        "action" : "adm_stat",
        "chat_id" : update.message.chat_id,
    }

    try:

        langCode = db.userHandle.get_user(update.message.chat_id).lang
        lang = language.getLang(langCode)
        
        pid = os.getpid()
        py = psutil.Process(pid)
        mem = psutil.virtual_memory()

        bot.send_message(
            chat_id = update.message.chat_id, 
            text = lang["stat"].format(
                    psutil.cpu_percent(), 
                    utils.sizeof_fmt(mem.total), 
                    utils.sizeof_fmt(mem.available), 
                    utils.sizeof_fmt(mem.free), 
                    utils.sizeof_fmt(mem.used), 
                    mem.percent, 
                    language.display_time(time.time() - psutil.boot_time(), langCode, 5), 
                    language.display_time(time.time() - py.create_time(), langCode, 5),
                    cfg.globalStat.postSent,
                    cfg.globalStat.postRecieved,
                    cfg.globalStat.forcedRequests,
                    lang["stat_empty"] if len(cfg.globalStat.postAttachments) == 0 else '\n' + "\n".join([lang["stat_list_item"].format(utils.escape_string(k, utils.ef_bold), utils.escape_string(v, utils.ef_italic)) for k, v in iter(cfg.globalStat.postAttachments.items())]),
                    lang["stat_empty"] if len(cfg.globalStat.vkRequests) == 0 else '\n' +"\n".join([lang["stat_list_item"].format(utils.escape_string(k, utils.ef_bold), utils.escape_string(v, utils.ef_italic)) for k, v in iter(cfg.globalStat.vkRequests.items())]),
                    lang["stat_empty"] if len(cfg.globalStat.tgRequests) == 0 else '\n' +"\n".join([lang["stat_list_item"].format(utils.escape_string(k, utils.ef_bold), utils.escape_string(v, utils.ef_italic)) for k, v in iter(cfg.globalStat.tgRequests.items())])),
            
            parse_mode = telegram.ParseMode.MARKDOWN,
            reply_markup = { "remove_keyboard" : True })
    
    except Exception as ex:
        postSender.notify_admin(ex, currentDataPackage)
コード例 #35
0
ファイル: tgcore.py プロジェクト: Coestaris/ReplyItBot
def adm_stat(bot, update):

    try:
        pid = os.getpid()
        py = psutil.Process(pid)
        mem = psutil.virtual_memory()

        bot.send_message(
            chat_id=update.message.chat_id,
            text=
            u"*CPU*: {}_%_\n\n*Mem*:\n_Total_: {}\n_Available_: {}\n_Free_: {}\n_Used_: {} ({}%)\n\n*Server uptime*: {}\n\n*Bot uptime*: {}"
            .format(psutil.cpu_percent(), utils.sizeof_fmt(mem.total),
                    utils.sizeof_fmt(mem.available),
                    utils.sizeof_fmt(mem.free), utils.sizeof_fmt(mem.used),
                    mem.percent,
                    utils.display_time(time.time() - psutil.boot_time(), 5),
                    utils.display_time(time.time() - py.create_time(), 5)),
            parse_mode=telegram.ParseMode.MARKDOWN,
            reply_markup={"remove_keyboard": True})

    except Exception as ex:
        notify_admin(ex)
コード例 #36
0
    def progress(self, file_name, total, progress):
        if (total == 0):
            self.last_time = time.time()

        now_time = time.time()

        if (now_time - self.last_time >= 1):
            speed = (progress - self.sent_size) / (now_time - self.last_time)
            self.last_time = now_time
            self.sent_size = progress
            sys.stdout.write(file_name + "    %s/%s   %s/s    %f%%   \r" % (
                utils.sizeof_fmt(progress),
                utils.sizeof_fmt(self.total_size),
                utils.sizeof_fmt(speed),
                (float(self.sent_size) / float(self.total_size)) * 100))
            sys.stdout.flush()

        if (progress == self.total_size):
            print ""
            print "Done."
            self.sent_size = progress
            return
コード例 #37
0
ファイル: fs.py プロジェクト: y3g0r/filehosting
 def from_root_to_leafs(file_path):
     current = OrderedDict()
     stat_info = os.stat(file_path)
     current['path'] = file_path[len(self.path.base_dir):] or "/"
     current['bytes'] = stat_info.st_size
     current['size'] = utils.sizeof_fmt(stat_info.st_size)
     current['modified'] = d.datetime.fromtimestamp(stat_info.st_mtime).strftime(self.MODIFIED_DATETIME_FORMAT)
     if stat.S_ISDIR(stat_info.st_mode):
         current['is_dir'] = True
         current['children'] = [from_root_to_leafs(os.path.join(file_path, x)) for x in os.listdir(file_path)]
     else:
         current['is_dir'] = False
     return current
コード例 #38
0
def main():
    parser.add_argument(
        '--store_positions',
        action='store_true',
        help='Indicates if indexer stores positions of terms or not')
    parser.add_argument('--tfidf',
                        action='store_true',
                        help='Indicates if program calculates tfidf or not')
    args = parser.parse_args()
    filenames = utils.get_filenames(args.input)
    files_exist = len(filenames) != 0
    stopwords_exist = os.path.isfile(args.stopwords)
    if files_exist and stopwords_exist:
        used_tokenizer = tokenizers[args.tokenizer]
        if used_tokenizer.has_rule(rules.stopping):
            used_tokenizer.make_rule(rules.stopping, args.stopwords)
        (index, max_memory), interval = utils.timeit(
            utils.profileit,
            indexit,
            used_tokenizer,
            filenames,
            store_positions=args.store_positions,
            calculate_tfidf=args.tfidf,
            memory_usage=args.memory)
        index.save(args.output)
        print('Answers:')
        print('Time taken: {}s'.format(interval))
        print('Max memory usage: {}'.format(utils.sizeof_fmt(max_memory)))
        print('Disk size: {}'.format(
            utils.sizeof_fmt(os.path.getsize('{}.csv'.format(args.output)))))
        shutil.rmtree('index')
    else:
        if not files_exist:
            print(
                'Error: File or directory (with files) to index doesn\'t exist!'
            )
        if not stopwords_exist:
            print('Error: Stopwords\' file doesn\'t exist!')
コード例 #39
0
ファイル: file_views.py プロジェクト: bring52405/bioshare
def upload_file(request, share, subdir=None):
    from os.path import join
    os.umask(settings.UMASK)
    PATH = share.get_path()
    if subdir is not None:
        PATH = join(PATH, subdir)
    data = {
        'share': share.id,
        'subdir': subdir,
        'files': []
    }  #{key:val for key,val in request.POST.iteritems()}
    for name, file in request.FILES.iteritems():
        filename = clean_filename(file.name)
        FILE_PATH = join(PATH, filename)
        handle_uploaded_file(FILE_PATH, file)
        subpath = filename if subdir is None else subdir + filename
        url = reverse('download_file',
                      kwargs={
                          'share': share.id,
                          'subpath': subpath
                      })
        (mode, ino, dev, nlink, uid, gid, size, atime, mtime,
         ctime) = os.stat(FILE_PATH)
        data['files'].append({
            'name':
            filename,
            'extension':
            filename.split('.').pop() if '.' in filename else '',
            'size':
            sizeof_fmt(size),
            'bytes':
            size,
            'url':
            url,
            'modified':
            datetime.datetime.fromtimestamp(mtime).strftime("%m/%d/%Y %H:%M"),
            'isText':
            istext(FILE_PATH)
        })


#         response['url']=reverse('download_file',kwargs={'share':share.id,'subpath':details['subpath']})
#         url 'download_file' share=share.id subpath=subdir|default_if_none:""|add:file.name
    ShareLog.create(
        share=share,
        user=request.user,
        action=ShareLog.ACTION_FILE_ADDED,
        paths=[clean_filename(file.name) for file in request.FILES.values()],
        subdir=subdir)
    return json_response(data)
コード例 #40
0
 def update_progress(self, kwparams):
     
     if self.data_engine.SUPPORT_PARTIAL_PROGRESS_REPORTING:
         kwparams['overall_file_size'] = utils.sizeof_fmt(self.file_size)
         kwparams['file_count'] = str(self.file_count)
     
     if 'overall_size' in kwparams:
         self.overall_size = kwparams['overall_size']
         self.overall_progress_bar.setMaximum(self.overall_size)
     
     if 'size_unknown' not in kwparams:
         kwparams['overall_size'] = utils.sizeof_fmt(self.overall_size)
     else:
         kwparams['overall_size'] = '? MB'
     
     if self.data_engine.SUPPORT_PARTIAL_PROGRESS_REPORTING:
         self.current_progress_label.setText('Current file (' + kwparams['cur_file_number'] + ' of ' + 
                                             kwparams['file_count'] + '): ' + kwparams['cur_downloaded'] + 
                                             ' of ' + kwparams['overall_file_size'] + ' downloaded')
         self.current_progress_bar.setValue(kwparams['cur_file_bytes'])
     
     self.overall_progress_label.setText('Overall progress: ' + kwparams['overall_downloaded'] + ' of ' + 
                                          kwparams['overall_size'] + ' downloaded')
     self.overall_progress_bar.setValue(kwparams['overall_bytes'])
コード例 #41
0
ファイル: main.py プロジェクト: rettier/c
def ll(*args):
    files = storage_backend.list(prefix="/".join(args))
    dirs = sorted(filter(lambda x: x["dir"], files), key=lambda x: x["path"])
    files = sorted(filter(lambda x: not x["dir"], files),
                   key=lambda x: x["path"])

    result = ""
    for dir in dirs:
        result += "   (dir) {path}\n".format(**dir)

    for file in files:
        human_size = sizeof_fmt(file["size"])
        result += "{human_size:>8} {path}\n".format(human_size=human_size,
                                                    **file)

    return result
コード例 #42
0
ファイル: tasks.py プロジェクト: pavelvod/kaggle-m5-pipeline
    def run(self):
        # Now we have 3 sets of features
        data = pd.concat([
            self.load('data4'),
            self.load('data2').iloc[:, 2:],
            self.load('data3').iloc[:, 2:]
        ],
                         axis=1)

        data = reduce_mem_usage(data)

        # Let's check again memory usage
        print("{:>20}: {:>8}".format(
            'Full Grid', sizeof_fmt(data.memory_usage(index=True).sum())))
        print('Size:', data.shape)
        self.save(data)
コード例 #43
0
ファイル: fs.py プロジェクト: y3g0r/filehosting
 def from_root_to_leafs(file_path):
     current = OrderedDict()
     stat_info = os.stat(file_path)
     current['path'] = file_path[len(self.path.base_dir):] or "/"
     current['bytes'] = stat_info.st_size
     current['size'] = utils.sizeof_fmt(stat_info.st_size)
     current['modified'] = d.datetime.fromtimestamp(
         stat_info.st_mtime).strftime(self.MODIFIED_DATETIME_FORMAT)
     if stat.S_ISDIR(stat_info.st_mode):
         current['is_dir'] = True
         current['children'] = [
             from_root_to_leafs(os.path.join(file_path, x))
             for x in os.listdir(file_path)
         ]
     else:
         current['is_dir'] = False
     return current
コード例 #44
0
ファイル: fs.py プロジェクト: y3g0r/filehosting
 def from_leaf_to_root():
     children = []
     current = None
     for file_path in Worker.iterate_path(upper, lower):
         current = OrderedDict()
         stat_info = os.stat(file_path)
         current['path'] = file_path[len(self.path.base_dir):] or "/"
         current['bytes'] = stat_info.st_size
         current['size'] = utils.sizeof_fmt(stat_info.st_size)
         current['modified'] = d.datetime.fromtimestamp(stat_info.st_mtime).strftime(
             self.MODIFIED_DATETIME_FORMAT)
         if stat.S_ISDIR(stat_info.st_mode):
             current['is_dir'] = True
             current['children'] = children
         else:
             current['is_dir'] = False
         children = current
     return current
コード例 #45
0
ファイル: server.py プロジェクト: pinkeshbadjatiya/Nex
    def _handleDirectory(self, dirname):
        """ Create a HTML page using template injection and render a tablular view of the directory. """

        entry = "<tr><td>[{{-EXTENSION-}}]</td><td><a href='{{-HREF-}}'>{{-FILE_NAME-}}</a></td><td align='right'>{{-DATE_MODIFIED-}}</td><td align='right'>{{-FILE_SIZE-}}</td></tr>"

        all_entries = ""
        template = self._readFile(self.config['OTHER_TEMPLATES'] + '/' + "dir.html")
        for ent in os.listdir(dirname):
            variables = {
                'EXTENSION': "DIR",
                'HREF': self._toHREF(dirname + "/" + ent),
                'FILE_NAME': ent,
                'DATE_MODIFIED': datetime.fromtimestamp(os.stat(dirname + "/" + ent).st_mtime).strftime("%A %d, %B %Y, %H:%M:%S"),
                'FILE_SIZE': "-"
            }

            # if the "ent" is a file
            if utils.isvalidFile(dirname + "/" + ent):
                if len(ent.split('.')) > 1:
                    variables['EXTENSION'] = ent.split('.')[-1]
                else:
                    variables['EXTENSION'] = "---"
                variables['FILE_SIZE'] = utils.sizeof_fmt(os.stat(dirname + "/" + ent).st_size)

            all_entries += self._inject_variables(entry, variables)

        dicto = {
            'ENTRIES': all_entries,
            'SERVER_DETAILS': self.config['SERVER_SHORT_NAME'] + " Server at " + self.config['HOST_NAME'] + " Port " + str(self.config['BIND_PORT']),
            'PATH': self._toHREF(dirname) + "/",
            'BACK_HREF': "/".join((self._toHREF(dirname) + "/").split('/')[:-2])
        }
        if dicto['BACK_HREF'] == "":
            dicto['BACK_HREF'] = "/"

        return {
            'DIRECTORY': {
                'directory': self._inject_variables(template, dicto).encode('utf-8'),
                'status_code': 200
            }
        }
コード例 #46
0
ファイル: admin.py プロジェクト: chrisspen/django-analyze
 def max_memory_usage_str(self, obj=None):
     if not obj or not obj.max_memory_usage:
         return ''
     return utils.sizeof_fmt(obj.max_memory_usage)
コード例 #47
0
def categorylinks_sql2csv(inputFileName, pagesFileName, subcatsFileName):
	STATE_OUTSIDE = 1
	STATE_IN_ENTRY = 2
	STATE_IN_STRING = 3
	STATE_IN_STRING_ESCAPED = 4

	neededCategories = {}
	processedLines = 0
	readBytes = 0
	isNextCharExcaped = False

	with open(inputFileName, encoding='latin-1') as inputFile:
		with open(pagesFileName, 'a+', encoding='utf8') as pagesFile:
			with open(subcatsFileName, 'a+', encoding='utf8') as subcatsFile:
				while True:
					# jump to INSERT INTO
					expectedBeginning = "INSERT INTO"
					while True:
						lineBeginning = inputFile.read(len(expectedBeginning))
						#print (lineBeginning)
						if lineBeginning == "" or lineBeginning == expectedBeginning:
							break
						inputFile.readline() # read and throw rest of the line

					# we are now in expected line
					currentState = STATE_OUTSIDE
					entryBuffer = ""
					parts = []

					while True:
						ch = inputFile.read(1)
						readBytes += 1
						if ch == "":
							print ("EOF")
							return

						if currentState == STATE_OUTSIDE:
							entryBuffer = ""
							if ch == "\n":
								processedLines += 1
								print (str(processedLines) + ". line processed, " + utils.sizeof_fmt(readBytes) + " read")						
							elif ch == "(":
								currentState = STATE_IN_ENTRY
						elif currentState == STATE_IN_ENTRY:
							if ch == "'":
								currentState = STATE_IN_STRING
							elif ch == ",":
								parts.append(entryBuffer)
								entryBuffer = ""
							elif ch == ")":
								parts.append(entryBuffer)
								entryBuffer = ""
								
								# PRINT
								if parts[6] == "subcat":
									print(";".join([parts[0], parts[1]]), file=subcatsFile)
								else:
									print(";".join([parts[0], parts[1], parts[6]]), file=pagesFile)
								
								parts = []
								currentState = STATE_OUTSIDE
							else:
								entryBuffer += ch
						elif currentState == STATE_IN_STRING:
							if ch == "\\":
								entryBuffer += ch
								currentState = STATE_IN_STRING_ESCAPED		 
							if ch == "'":
								currentState = STATE_IN_ENTRY		 
							else:
								entryBuffer += ch
						elif currentState == STATE_IN_STRING_ESCAPED:
							entryBuffer += ch
							currentState = STATE_IN_STRING
コード例 #48
0
 def getSize(self):
     if self.size == -1:
         return 'N/A'
     return sizeof_fmt(self.size)
コード例 #49
0
ファイル: models.py プロジェクト: amschaal/bioshare
 def hr_size(self):
     from utils import sizeof_fmt
     return sizeof_fmt(self.bytes)
コード例 #50
0
ファイル: file_views.py プロジェクト: amschaal/bioshare
def upload_file(request, share, subdir=None):
    from os.path import join
    os.umask(settings.UMASK)
    PATH = share.get_path()
    if subdir is not None:
        PATH = join(PATH,subdir)
    data = {'share':share.id,'subdir':subdir,'files':[]}#{key:val for key,val in request.POST.iteritems()}
    for name,file in request.FILES.iteritems():
        FILE_PATH = join(PATH,file.name)
        handle_uploaded_file(FILE_PATH,file)
        subpath = file.name if subdir is None else subdir + file.name
        url = reverse('download_file',kwargs={'share':share.id,'subpath':subpath})
        (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(FILE_PATH)
        data['files'].append({'name':file.name,'extension':file.name.split('.').pop() if '.' in file.name else '','size':sizeof_fmt(size),'bytes':size, 'url':url,'modified':datetime.datetime.fromtimestamp(mtime).strftime("%m/%d/%Y %I:%M %p"), 'isText':istext(FILE_PATH)}) 
#         response['url']=reverse('download_file',kwargs={'share':share.id,'subpath':details['subpath']})
#         url 'download_file' share=share.id subpath=subdir|default_if_none:""|add:file.name 
    ShareLog.create(share=share,user=request.user,action=ShareLog.ACTION_FILE_ADDED,paths=[file.name for file in request.FILES.values()],subdir=subdir)
    return json_response(data)
コード例 #51
0
ファイル: sansa.py プロジェクト: wiehan-a/spectral-toolkit
 def download(self, start_time, end_time):
     print start_time, end_time
     num_samples = int(125 * (end_time - start_time).total_seconds())
     
     if config_db.has_key("proxies"):
         proxy_support = urllib2.ProxyHandler(config_db['proxies'])
         opener = urllib2.build_opener(proxy_support, urllib2.HTTPHandler(debuglevel=1))
         urllib2.install_opener(opener)
     
     request = urllib2.Request(build_request_string({'start_date' : start_time,
                                                     'end_date' : end_time}))
     request.add_header('Accept-encoding', 'gzip,deflate')
     response = urllib2.urlopen(request)
     self.response = response
     
     is_gzipped = response.headers.get('content-encoding', '').find('gzip') >= 0
     d = zlib.decompressobj(16 + zlib.MAX_WBITS)
     buffer = response.read()
     if is_gzipped:
         compressed = len(buffer)
         self.real_size += compressed
         buffer = d.decompress(buffer)
         print "LB", len(buffer) / compressed
         
         lines = buffer.split('<br>')
         if len(buffer) == 0 or len(lines) == 0:
             print "********* No data for block: ", num_samples, "samples missed"
             zeros = np.zeros(shape=(num_samples,), dtype=np.float32)
             zeros.tofile(self.comp_1_file)
             zeros.tofile(self.comp_2_file)
             zeros.tofile(self.comp_3_file)
             
             self.missing_intervals.append([self.sample_count, self.sample_count + num_samples])
             self.sample_count += num_samples
         
         else:
             last_time = start_time
             for x in lines:
                 line = x.split(';')
                 if len(line) > 1:
                     time_ = datetime.datetime.strptime(line[1], "%Y-%m-%d %H:%M:%S").replace(microsecond=int(line[2]))
                     interval = time_ - last_time
                     if interval > datetime.timedelta(microseconds=8100):
                         samples_missed = np.round(interval.total_seconds() / (1.0 / 125))
                         print "****** A-- we missed some samples ===", samples_missed, last_time, time_
                         zeros = np.zeros(shape=(samples_missed - 1,), dtype=np.float32)
                         zeros.tofile(self.comp_1_file)
                         zeros.tofile(self.comp_2_file)
                         zeros.tofile(self.comp_3_file)
                         self.missing_intervals.append([self.sample_count, self.sample_count + samples_missed])
                         self.sample_count += samples_missed
                     
                     last_time = time_
                     self.comp_1_file.write(struct.pack('f', float(line[3])))
                     self.comp_2_file.write(struct.pack('f', float(line[4])))
                     self.comp_3_file.write(struct.pack('f', float(line[5])))
                     self.sample_count += 1
                     
             interval = end_time - last_time
             if interval > datetime.timedelta(microseconds=16000):
                 samples_missed = np.round(interval.total_seconds() / (1.0 / 125)) - 1
                 print "****** B-- we missed some samples ===", samples_missed
                 zeros = np.zeros(shape=(samples_missed,), dtype=np.float32)
                 zeros.tofile(self.comp_1_file)
                 zeros.tofile(self.comp_2_file)
                 zeros.tofile(self.comp_3_file)
                 self.missing_intervals.append([self.sample_count, self.sample_count + samples_missed])
                 self.sample_count += samples_missed
                         
         
     self.size += 10 * 60
     
     print utils.sizeof_fmt(self.real_size)
     
     self.progress_update.emit({'overall_downloaded': utils.sizeof_fmt(self.real_size),
                                'overall_bytes': self.size,
                                'size_unknown' : True
                                })
コード例 #52
0
ファイル: asset.py プロジェクト: zncb/wtrace
 def __str__(self):
     from utils import sizeof_fmt
     return "{hsh} {sz:8} {ct:24} [{h}]".format(h=self.host,\
                                                ct="/".join([str(self.type),str(self.subtype)]),\
                                                sz=sizeof_fmt(self.size),\
                                                hsh=self.hashes['sha1'])
コード例 #53
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    if (len(argv) > 6) or (len(argv) < 5):
        print "Usage: " + argv[0] + " <config file> <domain> <v_node> <hostname> [YYYY_MM_DD]"
        print "The config file is the same format as used for backups, backup dir, snapshot name and swift credentials are used"
        print 'The domain is the domain to be restored from swift and the v_node is the vertica node name to restore data for'
        print 'If the year/month/day is specified the most recent backup on that day will be downloaded rather than prompting'
        return 1

    config_file = argv[1]
    domain = argv[2]
    v_node_name = argv[3]
    hostname = argv[4]
    if len(argv) == 6:
        day = argv[5]
    else:
        day = None
    config = yaml.load(open(config_file, 'r'))

    # Setup logging
    logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
    log = logging.getLogger(__name__)

    with LogTime(log.info, "Restore download completed"):

        # Setup swift/paths
        base_dir, prefix_dir = calculate_paths(config, v_node_name)
        swift_store = SwiftStore(config['swift_key'], config['swift_region'], config['swift_tenant'],
                                 config['swift_url'], config['swift_user'], prefix_dir, domain=domain,
                                 vnode=v_node_name, hostname=hostname)
        fs_store = FSStore(base_dir, prefix_dir)

        # Get the metadata from the last restore (if any)
        current_metadata = DirectoryMetadata(fs_store)

        # Grab the swift metadata we want to restore
        if day is None:
            pickle = choose_one(swift_store.list_pickles(), "Please choose a pickle to restore from")
        else:
            # Since the list is sorted this will find the newest that matches the given day, or None otherwise
            pickle = None
            for option in swift_store.list_pickles():
                if option.startswith(day):
                    pickle = option

        if pickle is None:
            log.error('No backups found in swift.')
            sys.exit(1)
        swift_metadata = DirectoryMetadata.load_pickle(swift_store, pickle)

        # Compare the files in the current restore and swift and download/delete as necessary
        with LogTime(log.debug, "Diff completed", seconds=True):
            to_download, to_del = swift_metadata.diff(current_metadata)

        size_downloaded = 0
        with LogTime(log.info, "Download Completed"):
            for relative_path in to_download:
                size_downloaded += swift_store.download(relative_path, base_dir)
        log.info("\tDownloaded %s in %d items" % (sizeof_fmt(size_downloaded), len(to_download)))

        with LogTime(log.info, "Deleted %d items" % len(to_del)):
            for relative_path in to_del:
                fs_store.delete(relative_path)

        EpochFiles(os.path.join(base_dir, prefix_dir), config['catalog_dir'], config['snapshot_name'], swift_metadata.date).restore()

        # Save the swift metadata to the local fs, to indicate the restore is done
        swift_metadata.save(fs_store)

    delete_pickles(fs_store)
コード例 #54
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    if len(argv) != 2:
        print "Usage: " + argv[0] + " <config file> "
        return 1

    config_file = argv[1]
    config = yaml.load(open(config_file, 'r'))

    # Setup logging
    log_path = os.path.join(config['log_dir'], 'backup_' + datetime.today().strftime('%A') + '.log')
    logging.basicConfig(format='%(asctime)s %(message)s', filename=log_path, level=logging.INFO)

    # log_time is not used here so the timing can be reported to nagios
    start = time.time()
    exit_status = 0

    # Run the vbr backup command - The vbr run is quite fast typically completing in less than a minute
    if config['run_vbr']:
        run_vbr(config)  # If this fails it will sys.exit with an appropriately bad nagios error

    try:
        base_dir, prefix_dir = calculate_paths(config)
        swift_store = SwiftStore(config['swift_key'], config['swift_region'], config['swift_tenant'],
                                 config['swift_url'], config['swift_user'], prefix_dir, config['auth_version'])
        fs_store = FSStore(base_dir, prefix_dir)
        upload_time = datetime.today()

        epoch_files = EpochFiles(os.path.join(base_dir, prefix_dir), config['snapshot_name'], upload_time)
        epoch_files.archive()

        # Grab the local and swift metadata
        current_metadata = DirectoryMetadata(fs_store, upload_time)
        current_metadata.save(fs_store)
        swift_metadata = DirectoryMetadata(swift_store)

        # Compare the files in the current backup and swift and upload as necessary, then delete as necessary
        with LogTime(log.debug, "Diff operation completed", seconds=True):
            to_add, do_not_del = current_metadata.diff(swift_metadata)

        size_uploaded = 0
        with LogTime(log.info, "Uploaded Completed"):
            for relative_path in to_add:
                size_uploaded += swift_store.upload(relative_path, base_dir)
        log.info("\tUploaded %s in %d items" % (sizeof_fmt(size_uploaded), len(to_add)))

        with LogTime(log.info, "Determining items to delete, retaining %d backups" % config['retain']):
            # Grab the pickle names I want to combine, relying on these being in order by date, newest first
            pickles = swift_store.list_pickles()
            combine_pickles = pickles[:config['retain']]

            # Take metadata in all these pickles combine.
            # It would be good to check that there is no overlap in filenames with different content.
            combined_metadata = DirectoryMetadata()
            for pickle in combine_pickles:
                pickle_metadata = DirectoryMetadata.load_pickle(swift_store, pickle)
                combined_metadata.metadata.update(pickle_metadata.metadata)

            # Do a diff with all that is in swift, anything in swift but not in the combined set can be deleted.
            should_be_empty, to_del = combined_metadata.diff(swift_metadata)
            if len(should_be_empty) != 0:
                exit_status = 1
                log.error(
                    "ERROR: Found files in the %d combined retained backups that were not in swift.\n%s"
                    % (config['retain'], should_be_empty)
                )

        with LogTime(log.info, "Deleted %d items" % len(to_del)):
            for relative_path in to_del:
                swift_store.delete(relative_path)

        # Upload today's metadata pickle, this is done last so its presence an indication the backup is done.
        current_metadata.save(swift_store)

        #Clean up old pickles
        delete_pickles(fs_store)
        delete_pickles(swift_store, config['retain'])

    except:
        log.exception('Unhandled Exception in Backup upload')
        # Move the Epoch files back to their original names so a retry run does not encounter issues with them
        epoch_files.restore()
        exit_status = 1

    # Status message and exit
    stop = time.time()
    duration = (stop - start) / 60
    duration_msg = "Backup completed in %d minutes total. Thresholds, warn %d.|%d" % \
                   (duration, config['warning'], duration)
    log.info(duration_msg)

    nagios_exit(exit_status, duration_msg, duration, config['warning'])
コード例 #55
0
ファイル: library.py プロジェクト: priestc/LibraryDSS
 def human_size(self):
     return sizeof_fmt(self.size)