def custom_progress_text(message):
    import progressbar
    from string import Formatter

    message_ = message.replace('(', '{')
    message_ = message_.replace(')', '}')

    keys = [key[1] for key in Formatter().parse(message_)]

    ids = {}
    for key in keys:
        if key is not None:
            ids[key] = float('nan')

    msg = progressbar.FormatCustomText(message, ids)
    return msg
Exemple #2
0
def test_format_custom_text_widget():
    widget = progressbar.FormatCustomText(
        'Spam: %(spam).1f kg, eggs: %(eggs)d',
        dict(
            spam=0.25,
            eggs=3,
        ),
    )

    bar = progressbar.ProgressBar(widgets=[
        widget,
    ])

    for i in bar(range(5)):
        widget.update_mapping(eggs=i * 2)
        assert widget.mapping['eggs'] == bar.widgets[0].mapping['eggs']
Exemple #3
0
    def __init__(self, stat_format=''):
        """Construct instance of data pipeline progress bar."""
        self.statuses = StatView()
        self.stat_format = stat_format

        self.format_custom_text = progressbar.FormatCustomText(
            '(%(school)s) ==%(mode)s== %(stats)s', )

        self.bar = progressbar.ProgressBar(redirect_stdout=True,
                                           max_value=progressbar.UnknownLength,
                                           widgets=[
                                               ' [',
                                               ProgressBar.Timer(),
                                               '] ',
                                               self.format_custom_text,
                                           ])
	def extractFile(self, filename):
		def on_progress(filename, position, total_size, pb):
			pass

		def get_file_progress_file_object_class(on_progress, pb):
			class FileProgressFileObject(tarfile.ExFileObject):
				def read(self, size, *args):
				  on_progress(self.name, self.position, self.size, pb)
				  return tarfile.ExFileObject.read(self, size, *args)
			return FileProgressFileObject

		class ProgressFileObject(io.FileIO):
			def __init__(self, path, pb, *args, **kwargs):
				self.pb = pb
				self._total_size = os.path.getsize(path)
				io.FileIO.__init__(self, path, *args, **kwargs)

			def read(self, size):
				self.pb.update(self.tell())
				return io.FileIO.read(self, size)
		#:
		terms = shutil.get_terminal_size((100, 100))
		# filler = 0
		# if terms[0] > 100:
		# 	filler = int(terms[0]/4)
		widgets = [
			progressbar.FormatCustomText("Extracting : {:25.25}".format(os.path.basename(filename))), " ",
			progressbar.Percentage(), " ",
			progressbar.Bar(fill=chr(9617), marker=chr(9608), left="[", right="]"), " ",
			progressbar.DataSize(), "/", progressbar.DataSize(variable="max_value"),
			# " "*filler,
		]

		pbar = progressbar.ProgressBar(widgets=widgets, maxval=os.path.getsize(filename))
		pbar.start()
		tarfile.TarFile.fileobject = get_file_progress_file_object_class(on_progress, pbar)
		tar = tarfile.open(fileobj=ProgressFileObject(filename, pbar), mode="r:*")
		outputPath = os.path.commonprefix(tar.getnames())
		if os.path.isfile(outputPath):
			return outputPath
			tar.close()
			pbar.finish()
		else:
			tar.extractall()
			tar.close()
			pbar.finish()
			return outputPath
def get_progress_bar(total):
    format_custom_text = progressbar.FormatCustomText(
        'Loss: %(loss).3f | Acc: %(acc).3f%% (%(c)d/%(t)d)',
        dict(
            loss=0,
            acc=0,
            c=0,
            t=0,
        ),
    )
    prog_bar = progressbar.ProgressBar(0, total, widgets=[
        progressbar.Counter(), ' of {} '.format(total),
        progressbar.Bar(),
        ' ', progressbar.ETA(),
        ' ', format_custom_text
    ])
    return prog_bar, format_custom_text
Exemple #6
0
 def get_progressbar_widget_list(nepoch: int) -> BarWidgetsReturn:
     epoch_status_fmt_str: str = 'EPOCH: %(epoch_ix)d/%(nepoch)d'
     epoch_status = progressbar.FormatCustomText(
         epoch_status_fmt_str, dict(epoch_ix=0, nepoch=nepoch))
     widgets_list: List[widgets.WidgetBase] = [
         widgets.Percentage(),
         ' ',
         widgets.SimpleProgress(format='(%s)' %
                                widgets.SimpleProgress.DEFAULT_FORMAT),
         ' ',
         epoch_status,
         ' ',
         widgets.Bar(),
         ' ',
         widgets.Timer(),
     ]
     return widgets_list, epoch_status
def format_custom_text():
    format_custom_text = progressbar.FormatCustomText(
        'Spam: %(spam).1f kg, eggs: %(eggs)d',
        dict(
            spam=0.25,
            eggs=3,
        ),
    )

    bar = progressbar.ProgressBar(widgets=[
        format_custom_text,
        ' :: ',
        progressbar.Percentage(),
    ])
    for i in bar(range(25)):
        format_custom_text.update_mapping(eggs=i * 2)
        time.sleep(0.1)
Exemple #8
0
    def __init__(self):

        self.format_custom_text = progressbar.FormatCustomText(
            '(%(school)s) ==%(mode)s== ', )

        self.bar = progressbar.ProgressBar(redirect_stdout=True,
                                           max_value=progressbar.UnknownLength,
                                           widgets=[
                                               ' [',
                                               Timer(),
                                               '] ',
                                               self.format_custom_text,
                                               progressbar.Bar(),
                                               '(',
                                               progressbar.ETA(),
                                               ')',
                                           ])
Exemple #9
0
    def __init__(self, stat_format="", statistics=None):
        """Construct instance of data pipeline progress bar."""
        self.statistics = statistics or StatView()
        self.stat_format = stat_format

        self.format_custom_text = progressbar.FormatCustomText(
            "(%(school)s) ==%(mode)s== %(stats)s", )

        self.bar = progressbar.ProgressBar(
            redirect_stdout=True,
            # max_value=progressbar.UnknownLength,
            widgets=[
                " [",
                Timer(),
                "] ",
                self.format_custom_text,
            ],
        )
Exemple #10
0
def runner_parallel(inp):
    function, args, name, queue, order_idx = inp
    idx = int(multiprocessing.current_process().name.split("-")[1])
    custom_text = progressbar.FormatCustomText(
        '{} - %(type_op)s: '.format(name), dict(type_op="Start"))
    widget_style = [
        custom_text,
        progressbar.Percentage(), ' (',
        progressbar.SimpleProgress(), ')', ' ',
        progressbar.Bar(), ' ',
        progressbar.Timer(), ' ',
        progressbar.AdaptiveETA()
    ]
    args += [
        ProgressBar(widgets=widget_style, fd=Writer((0, idx - 1), queue)),
        custom_text
    ]
    return (function(*args), order_idx)
Exemple #11
0
    def __init__(self, present="", past=None, max_value=1, vars=None,
                 **kwargs):

        self.present = present
        self.sub_bar = None
        self.finished = None

        if past is None:
            past = present

        self.msg_bar = MessageBar(
            msg=present, finish_msg="%s finished in" % past)
        widgets = [self.msg_bar, " "]

        if max_value is None:
            widgets.append(progressbar.Timer(format="%(elapsed)s"))
        else:
            widgets.append(progressbar.ETA(
                format="ETA: %(eta)s",
                format_finished="%(elapsed)s"))

        if vars is not None:
            self.var_vals = progressbar.FormatCustomText(
                " (" + ", ".join("%s: %%(%s)s" % (v, v) for v in vars) + ")",
                {v: "---" for v in vars})
            widgets.append(self.var_vals)
        else:
            self.var_vals = None

        def update_thread():
            while not self.finished:
                if self.sub_bar is None or self.sub_bar.finished:
                    self.update()
                time.sleep(0.001)

        self.thread = threading.Thread(target=update_thread)
        self.thread.daemon = True

        if max_value is None:
            max_value = progressbar.UnknownLength

        super(ProgressBar, self).__init__(
            poll_interval=0.1, widgets=widgets, fd=sys.stdout,
            max_value=max_value, **kwargs)
 def _create_progress_bar(cls, n_fitting_tasks, scheduler):
     cls.tasks_text = progressbar.FormatCustomText(
         '(%(tasks)d / %(n_tasks)d)', dict(tasks=0,
                                           n_tasks=n_fitting_tasks))
     cls.progress_bar = progressbar.ProgressBar(
         max_value=n_fitting_tasks,
         widgets=[progressbar.Percentage(), ' ', cls.tasks_text])
     cls.fitting_task = 1
     progress_bar_msg = str(
     ) if scheduler is 'multiprocessing' else 'Progress: '
     if all([
             hasattr(cls, attribute)
             for attribute in ['ind', 'dataset_name', 'n_datasets']
     ]):
         progress_bar_msg = 'Current dataset: {} | Completed datasets: {}/{} | ' + progress_bar_msg
         cls.progress_bar.prefix = progress_bar_msg.format(
             cls.dataset_name, cls.ind, cls.n_datasets)
     else:
         cls.progress_bar.prefix = progress_bar_msg
Exemple #13
0
    def __init__(self):

        self.format_custom_text = progressbar.FormatCustomText(
            "(%(school)s) ==%(mode)s== ", )

        self.bar = progressbar.ProgressBar(
            redirect_stdout=True,
            max_value=progressbar.UnknownLength,
            widgets=[
                " [",
                Timer(),
                "] ",
                self.format_custom_text,
                progressbar.Bar(),
                "(",
                progressbar.ETA(),
                ")",
            ],
        )
Exemple #14
0
    def __init__(self, n_epochs, n_batches):
        self.text = pbar.FormatCustomText(
            'Epoch: %(epoch).d/%(n_epochs).d, Batch: %(batch)d/%(n_batches)d',
            dict(epoch=0, n_epochs=n_epochs, batch=0, n_batches=n_batches),
        )

        self.bar = pbar.ProgressBar(widgets=[
            pbar.Percentage(),
            ' ',
            self.text,
            ' ',
            pbar.Bar(),
            ' ',
            pbar.Timer(),
            ' ',
            pbar.AdaptiveETA(),
            ' ',
        ],
                                    redirect_stdout=True)

        self.bar.start()
    def download_file(self,
                      url=None,
                      outputFileName=None,
                      outputPath=None,
                      bytes=False):
        def fmt_size(num, suffix="B"):
            for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
                if abs(num) < 1024.0:
                    return "%3.1f%s%s" % (num, unit, suffix)
                num /= 1024.0
            return "%.1f%s%s" % (num, "Yi", suffix)

        #:
        if not url:
            raise Exception("No URL specified.")

        if outputPath is None:  # Default to current dir.
            outputPath = os.getcwd()
        else:
            if not os.path.isdir(outputPath):
                raise Exception(
                    'Specified path "{0}" does not exist'.format(outputPath))

        fileName = os.path.basename(url)  # Get URL filename
        userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0"

        if 'sourceforge.net' in url.lower():
            userAgent = 'wget/1.18'  # sourceforce <3 wget

        if url.lower().startswith("ftp://"):
            self.log("Requesting : {0}".format(url))
            if outputFileName != None:
                fileName = outputFileName
            fullOutputPath = os.path.join(outputPath, fileName)
            urllib.request.urlretrieve(url, fullOutputPath)
            return fullOutputPath

        req = requests.get(url, stream=True, headers={"User-Agent": userAgent})

        if req.status_code != 200:
            req.raise_for_status()

        if "content-disposition" in req.headers:
            reSponse = re.findall("filename=(.+)",
                                  req.headers["content-disposition"])
            if reSponse == None:
                fileName = os.path.basename(url)
            else:
                fileName = reSponse[0]

        size = None
        compressed = False
        if "Content-Length" in req.headers:
            size = int(req.headers["Content-Length"])

        if "Content-Encoding" in req.headers:
            if req.headers["Content-Encoding"] == "gzip":
                compressed = True

        self.log("Requesting : {0} - {1}".format(
            url,
            fmt_size(size) if size != None else "?"))

        # terms = shutil.get_terminal_size((100,100))
        # filler = 0
        # if terms[0] > 100:
        # 	filler = int(terms[0]/4)

        widgetsNoSize = [
            progressbar.FormatCustomText("Downloading: {:25.25}".format(
                os.path.basename(fileName))), " ",
            progressbar.AnimatedMarker(markers='|/-\\'), " ",
            progressbar.DataSize()
            # " "*filler
        ]
        widgets = [
            progressbar.FormatCustomText("Downloading: {:25.25}".format(
                os.path.basename(fileName))),
            " ",
            progressbar.Percentage(),
            " ",
            progressbar.Bar(fill=chr(9617),
                            marker=chr(9608),
                            left="[",
                            right="]"),
            " ",
            progressbar.DataSize(),
            "/",
            progressbar.DataSize(variable="max_value"),
            " |",
            progressbar.AdaptiveTransferSpeed(),
            " | ",
            progressbar.ETA(),
            # " "*filler
        ]
        pbar = None
        if size == None:
            pbar = progressbar.ProgressBar(widgets=widgetsNoSize,
                                           maxval=progressbar.UnknownLength)
        else:
            pbar = progressbar.ProgressBar(widgets=widgets, maxval=size)

        if outputFileName != None:
            fileName = outputFileName
        fullOutputPath = os.path.join(outputPath, fileName)

        updateSize = 0

        if isinstance(pbar.max_value, int):
            updateSize = pbar.max_value if pbar.max_value < 1024 else 1024

        if bytes == True:
            output = b""
            bytesrecv = 0
            pbar.start()
            for buffer in req.iter_content(chunk_size=1024):
                if buffer:
                    output += buffer
                if compressed:
                    pbar.update(updateSize)
                else:
                    pbar.update(bytesrecv)
                bytesrecv += len(buffer)
            pbar.finish()
            return output
        else:
            with open(fullOutputPath, "wb") as file:
                bytesrecv = 0
                pbar.start()
                for buffer in req.iter_content(chunk_size=1024):
                    if buffer:
                        file.write(buffer)
                        file.flush()
                    if compressed:
                        pbar.update(updateSize)
                    else:
                        pbar.update(bytesrecv)
                    bytesrecv += len(buffer)
                pbar.finish()

                return fullOutputPath
Exemple #16
0
    def save_continue_progress(self, info, videofilename, url):

        dl = True
        path01 = os.getcwd()
        while_counter = 0
        slice = 1024 * 1  # 讀取影片串流區塊大小

        while dl == True:
            iterator = 0
            clist = []
            piece_count = 0
            print('讀取資料中.........')

            with open(videofilename + '.mp4', 'wb') as f:
                try:
                    print('start')

                    r = requests.get(url, stream=True, timeout=15)
                    file_size = int(r.headers['Content-Length'])
                    size = str(file_size)
                    piece = int(int(size) / 1024)
                    print('piece:{}'.format(piece))

                    bar = progressbar.ProgressBar(
                        maxval=file_size,
                        widgets=[
                            progressbar.Percentage(),
                            ' : ',
                            progressbar.Counter(),
                            ' of ',
                            progressbar.FormatCustomText(size),
                            ' bytes',
                            ' [',
                            progressbar.AdaptiveTransferSpeed(),
                            '] ',
                            # progressbar.ETA(),
                            # ' [',progressbar.Timer(), '] ',
                            progressbar.Bar(marker='■'),
                        ]).start()

                    for chunk in r.iter_content(chunk_size=slice):
                        try:

                            def gen():
                                yield chunk

                            ge = gen()
                            if len(chunk) == 1024:
                                f.write(ge.__next__())

                                clist.append(len(chunk))

                                # f.write(chunk)
                                iterator += slice
                                bar.update(iterator)
                                piece_count += 1
                                if int(piece_count) == int(piece):
                                    dl = False
                                    break
                            elif int(piece_count) == int(piece):
                                dl = False
                                break
                            else:
                                print('piece_count :{}'.format(piece_count))
                                print(' 不到 1MB 重抓')
                                f.write(ge.__next__())

                        except:
                            print('來此走一遭的 黑人問號')

                except requests.exceptions.ConnectionError:
                    print('\n  掉線拉 ')
                    dl = True
                    while_counter += 1
                    if while_counter == 10:
                        print('Give up...')
                        dl = False
                except:
                    print('黑人問號')

                finally:
                    if dl:
                        print(piece_count)
                        print(' ======Disconnect ======')
                        print('  Close file ;(')
                        os.remove(self.ori_path + '/' + 'dannyTumblr' + '/' +
                                  'Download' + '/' + info + '/' + 'Videos')
                    else:
                        print('  Download Done')
                        print('  Close file :>')
                        # print(clist)
                        for i in clist:
                            if i < 1024:
                                print(i)
                        print('影片總:{}字串'.format(len(clist)))
                    f.close()
        return
Exemple #17
0
def train(model,
          criterion,
          optimizer,
          dataloaders,
          scheduler=NoneScheduler(None),
          epoch=100,
          device=torch.device('cuda:0'),
          l2=0.0,
          metrics=(mm.Loss(), mm.CIndexForSlide()),
          standard_metric_index=1,
          clip_grad=False):
    best_model_wts = copy.deepcopy(model.state_dict())
    best_metric = 0.0
    best_metric_name = metrics[standard_metric_index].__class__.__name__ + \
        '_valid'
    history = {
        m.__class__.__name__ + p: []
        for p in ['_train', '_valid'] for m in metrics
    }
    model.to(device)

    for e in range(epoch):
        for phase in ['train', 'valid']:
            if phase == 'train':
                scheduler.step()
                model.train()
                prefix = "Train: "
            else:
                model.eval()
                prefix = "Valid: "
            # progressbar
            format_custom_text = pb.FormatCustomText('Loss: %(loss).4f',
                                                     dict(loss=0.))
            widgets = [
                prefix, " ",
                pb.Counter(), ' ',
                pb.Bar(), ' ',
                pb.Timer(), ' ',
                pb.AdaptiveETA(), ' ', format_custom_text
            ]
            iterator = pb.progressbar(dataloaders[phase], widgets=widgets)

            for m in metrics:
                m.reset()
            for batch_x, batch_y, (batch_ids, batch_files) in iterator:
                batch_x = batch_x.to(device)
                batch_y = batch_y.to(device)
                optimizer.zero_grad()
                with torch.set_grad_enabled(phase == 'train'):
                    logit = model(batch_x)
                    loss = criterion(logit, batch_y)
                    # 只给weight加l2正则化
                    if l2 > 0.0:
                        for p_n, p_v in model.named_parameters():
                            if p_n == 'weight':
                                loss += l2 * p_v.norm()
                    if phase == 'train':
                        loss.backward()
                        if clip_grad:
                            nn.utils.clip_grad_norm_(model.parameters(),
                                                     max_norm=1)
                        optimizer.step()
                with torch.no_grad():
                    for m in metrics:
                        if isinstance(m, mm.Loss):
                            m.add(loss.cpu().item(), batch_x.size(0))
                            format_custom_text.update_mapping(loss=m.value())
                        else:
                            m.add(logit.squeeze(), batch_y, batch_ids)

            for m in metrics:
                history[m.__class__.__name__ + '_' + phase].append(m.value())
            print("Epoch: %d, Phase:%s, " % (e, phase) + ", ".join([
                '%s: %.4f' % (m.__class__.__name__,
                              history[m.__class__.__name__ + '_' + phase][-1])
                for m in metrics
            ]))

            if phase == 'valid':
                epoch_metric = history[best_metric_name][-1]
                if epoch_metric > best_metric:
                    best_metric = epoch_metric
                    best_model_wts = copy.deepcopy(model.state_dict())

    print("Best metric: %.4f" % best_metric)
    model.load_state_dict(best_model_wts)
    return model, history
Exemple #18
0
 def default_widgets(self):
   return [pb.FormatCustomText(format = self.message),
           pb.Percentage(**self.widget_kwargs), 
           pb.Bar(**self.widget_kwargs), 
           pb.AdaptiveETA(**self.widget_kwargs)]
Exemple #19
0
    def _cli_packages_add_category(self, category: str):
        Packages.logger.info(f"Getting package info from www.whdload.de ...")

        all_packages = self._get_package_list(category, True)

        dupes = set(x for x in all_packages for y in self.data
                    if x == y.list_info)
        changed = set(x for x in all_packages for y in self.data
                      if x[1] == y.list_info[1] and x[3] != y.list_info[3])
        new_packages = sorted(all_packages.difference(dupes),
                              key=lambda x: x[0])

        if len(dupes) > 0:
            if len(dupes) == 1:
                package_text = "package"
            else:
                package_text = "packages"
            Packages.logger.info(
                f"Skipping {len(dupes)} duplicate {package_text}.")

        if len(changed) > 0:
            del_indexes = [
                i for x in changed for i in range(len(self.data))
                if x[1] == self.data[i].list_info[1]
            ]
            for i in del_indexes:
                del self.data[i]

            if len(changed) == 1:
                package_text = "package"
            else:
                package_text = "packages"
            Packages.logger.info(
                f"Removed {len(changed)} outdated {package_text} from WHDLoad Package data file."
            )

        if len(new_packages) > 0:
            if len(new_packages) == 1:
                package_text = "package"
            else:
                package_text = "packages"
            Packages.logger.info(
                f"Adding {len(new_packages)} {package_text} to WHDLoad Package data file."
            )

            if shutil.get_terminal_size().columns >= 120:
                bar_name_width = 35
                bar_name_format = progressbar.FormatCustomText(
                    "%(bar_name)s", {"bar_name": repeat(" ", bar_name_width)})
                bar_widgets = [
                    progressbar.Percentage(), " ",
                    progressbar.Bar(left="[", right="]", fill="."), " ",
                    progressbar.Counter("%(value)5d"),
                    f"/{len(new_packages):d}", " | ",
                    progressbar.Timer(format="Elapsed: %(elapsed)s"), " | ",
                    bar_name_format, " "
                ]
            else:
                bar_name_width = 20
                bar_name_format = progressbar.FormatCustomText(
                    "%(bar_name)s", {"bar_name": repeat(" ", bar_name_width)})
                bar_widgets = [
                    progressbar.Percentage(), " ",
                    progressbar.Bar(left="[", right="]", fill="."), " ",
                    progressbar.Timer(format="%(elapsed)s"), " | ",
                    bar_name_format, " "
                ]
            bar = progressbar.ProgressBar(widgets=bar_widgets,
                                          min_value=0,
                                          max_value=len(new_packages))

            bar_name = pad(truncate("Starting ...", bar_name_width),
                           bar_name_width, Align.Left.value)
            bar_name_format.update_mapping(bar_name=bar_name)
            bar.update(bar.min_value)

            for i in range(len(new_packages)):
                name = new_packages[i][0]
                bar_name = pad(truncate(f"{name}", bar_name_width),
                               bar_name_width, Align.Left.value)
                bar_name_format.update_mapping(bar_name=bar_name)
                DataSet.logger.info(f"Getting info for '{name}'")
                bar.update(i + 1)
                self._add_package(new_packages[i])
                self.save()
            bar_name = pad(truncate("Finished!", bar_name_width),
                           bar_name_width, Align.Left.value)
            bar_name_format.update_mapping(bar_name=bar_name)
            DataSet.logger.info("Finished!")
            bar.update(bar.max_value)
        else:
            Packages.logger.info(
                f"No packages to add to WHDLoad Package data file.")
Exemple #20
0
#####################################
def custom_progress_text(message):
    import progressbar
  from string import Formatter

  message_ = message.replace('(', '{')
  message_ = message_.replace(')', '}')

  keys = [key[1] for key in Formatter().parse(message_)]

  ids = {}
  for key in keys:
      if key is not None:
          ids[key] = float('nan')

  msg = progressbar.FormatCustomText(message, ids)
  return msg

def create_progress_bar(text=None):
    import progressbar
  if text is None:
      text = progressbar.FormatCustomText('')
  bar = progressbar.ProgressBar(widgets=[
      progressbar.Percentage(),
      progressbar.Bar(),
      progressbar.AdaptiveETA(), '  ',
      text,
      ])
  return bar

def display_model(model):
Exemple #21
0
 def track(self,
           files=None,
           startfile=None,
           endfile=None,
           preprocess=True,
           write=True,
           track=True,
           postprocess=True):
     self.progress = 0
     if files is None:
         files = ['generator'] + self.lines if hasattr(
             self, 'generator') else self.lines
     if startfile is not None and startfile in files:
         index = files.index(startfile)
         files = files[index:]
     if endfile is not None and endfile in files:
         index = files.index(endfile)
         files = files[:index + 1]
     if self.verbose:
         format_custom_text = progressbar.FormatCustomText(
             'File: %(running)s', {'running': ''})
         bar = progressbar.ProgressBar(widgets=[
             format_custom_text,
             progressbar.Percentage(),
             progressbar.Bar(),
             progressbar.Percentage(),
         ],
                                       max_value=len(files))
         format_custom_text.update_mapping(running=files[0] + '  ')
         for i in bar(list(range(len(files)))):
             l = files[i]
             self.progress = 100. * (i + 1) / len(files)
             if l == 'generator' and hasattr(self, 'generator'):
                 format_custom_text.update_mapping(running='Generator  ')
                 if write:
                     self.generator.write()
                 if track:
                     self.generator.run()
                 if postprocess:
                     self.generator.astra_to_hdf5()
             else:
                 if i == (len(files) - 1):
                     format_custom_text.update_mapping(running='Finished')
                 else:
                     format_custom_text.update_mapping(
                         running=files[i + 1] + '  ')
                 if preprocess:
                     self.latticeObjects[l].preProcess()
                 if write:
                     self.latticeObjects[l].write()
                 if track:
                     self.latticeObjects[l].run()
                 if postprocess:
                     self.latticeObjects[l].postProcess()
     else:
         for i in range(len(files)):
             l = files[i]
             self.progress = 100. * (i) / len(files)
             if l == 'generator' and hasattr(self, 'generator'):
                 if write:
                     self.generator.write()
                 self.progress = 100. * (i + 0.33) / len(files)
                 if track:
                     self.generator.run()
                 self.progress = 100. * (i + 0.66) / len(files)
                 if postprocess:
                     self.generator.astra_to_hdf5()
             else:
                 if preprocess:
                     self.latticeObjects[l].preProcess()
                 self.progress = 100. * (i + 0.25) / len(files)
                 if write:
                     self.latticeObjects[l].write()
                 self.progress = 100. * (i + 0.5) / len(files)
                 if track:
                     self.latticeObjects[l].run()
                 self.progress = 100. * (i + 0.75) / len(files)
                 if postprocess:
                     self.latticeObjects[l].postProcess()
         self.progress = 100
Exemple #22
0
# fetch issue data data from database to show to html file [Images]

import sqlite3
import os
import sys
import query_handler
import urllib
import urllib.request
import time
import progressbar
format_custom_text = progressbar.FormatCustomText(
    ' Checking or downloading: %(current)d out of %(images)d Images',
    dict(
        images=0,
        current=0,
    ),
)
global bar
bar = 0


def startBar(maxval):
    global bar
    bar = progressbar.ProgressBar(
        widgets=[
            progressbar.SimpleProgress(),
            format_custom_text,
            ' :: ',
            progressbar.Bar('█'),
            ' ',
            progressbar.ETA(),
Exemple #23
0
# download and decrypt PDF file
import urllib
import urllib.request
import sys
import os
import base64decoding
import PDFdecrypt
import time
import progressbar
global bar, downloaded
bar = 0
downloaded = 0
format_custom_text = progressbar.FormatCustomText(
    ' Downloading: %(current)d Bytes of %(total)d Bytes',
    dict(
        total=0,
        current=0,
    ),
)


def startBar(maxval):
    global bar
    bar = progressbar.ProgressBar(
        widgets=[
            progressbar.SimpleProgress(),
            format_custom_text,
            ' :: ',
            progressbar.Bar('█'),
            ' ',
            progressbar.ETA(),
 def train_sparse_dictionary1(self, data, sp_opt_max_iter=200,
                             init_traindata_num=200, incr_rate=2,
                             min_iterations=3, init_codebook_comps=None,
                             debug=False):
     '''
     <data> is a numpy array, holding all the features(of single kind) that
     are required to train the sparse dictionary, with dimensions
     [n_features, n_samples]. The sparse dictionary is trained with a random
     subset of <data>, which is increasing in each iteration with rate
     <incr_rate> , along with the max iterations <sp_opt_max_iter> of feature
     sign search algorithm. <min_iterations> is the least number of
     iterations of the dictionary training, after total data is processed.
     '''
     self.sparse_dim = min(data.shape) * self.sparse_dim_rat
     self.flush_variables()
     try:
         import progressbar
     except:
         LOG.warning('Install module progressbar2 to get informed about the'
                     +' feature sign search algorithm progress')
         pass
     self.initialize(data.shape[0], init_codebook_comps=init_codebook_comps)
     iter_count = 0
     retry_count = 0
     LOG.info('Training dictionary: ' + self.name)
     LOG.info('Minimum Epochs number after total data is processed:' + str(min_iterations))
     reached_traindata_num = False
     reached_traindata_count = 0
     computed = data.shape[1] * [None]
     retry = False
     lar_approx = False
     while True:
         LOG.info('Epoch: ' + str(iter_count))
         loaded = False
         self.sparse_feat_list = None
         self.inp_feat_list = None
         if debug and iter_count == 0:
             LOG.warning('Debug is on, loading data from first FSS execution')
             try:
                 with open(self.name+' debug_sparse.pkl','r') as inp:
                     (self.codebook_comps,
                      self.sparse_feat_list,
                      self.are_sparsecoded_inp) = pickle.load(inp)
                     loaded=True
             except (IOError, EOFError):
                 LOG.warning('Not existent '+self.name
                             +' debug_sparse.pkl')
         if not loaded:
             train_num = min(int(init_traindata_num *
                                 (incr_rate) ** iter_count),
                             data.shape[1])
             if train_num == data.shape[1] and not reached_traindata_num:
                 reached_traindata_num = True
                 LOG.info('Total data is processed')
             if reached_traindata_num:
                 reached_traindata_count += 1
             LOG.info('Number of samples used: ' + str(train_num))
             ran = rand.sample(xrange(data.shape[1]), train_num)
             feat_sign_max_iter = min(1000,
                                      sp_opt_max_iter * incr_rate ** iter_count)
             LOG.info('Feature Sign Search maximum iterations allowed:'
                      + str(feat_sign_max_iter))
             try:
                 format_custom_text = progressbar.FormatCustomText(
                     'Mean Initial Error: %(mean_init_energy).4f,'+
                     ' Mean Final Error: %(mean).4f ,Valid Samples Ratio: %(valid).2f',
                         dict(
                             mean_init_energy=0,
                             mean=0,
                             valid=0
                         ),
                     )
                 pbar = progressbar.ProgressBar(max_value=train_num - 1,
                                               redirect_stdout=True,
                                                widgets=[progressbar.widgets.Percentage(),
                                                         progressbar.widgets.Bar(),
                                                         format_custom_text])
                 errors=True
                 sum_error = 0
                 sum_energy = 0
             except UnboundLocalError:
                 pbar = None
                 errors = False
                 pass
             are_sparsecoded = []
             if pbar is not None:
                 iterat = pbar(enumerate(ran))
             else:
                 iterat = enumerate(ran)
             for count, sample_count in iterat:
                 fin_error, valid, init_energy = self.feature_sign_search_algorithm(
                     data[:, sample_count],
                     max_iter=feat_sign_max_iter,
                     ret_error=errors,training=True,
                     starting_points=computed[sample_count])
                 are_sparsecoded.append(True)
                 try:
                     if iter_count > 0 and valid:
                         #do not trust first iteration sparse features, before
                         #having trained the codebooks at least once
                         computed[sample_count] = self.sparse_feat_list[-1]
                 except (TypeError,AttributeError):
                     pass
                 if valid and pbar and errors:
                     sum_error += fin_error
                     mean_error = sum_error/float(sum(are_sparsecoded))
                     sum_energy += init_energy
                     mean_init_energy = sum_energy/float(sum(are_sparsecoded))
                 if pbar is not None:
                     format_custom_text.update_mapping(mean_init_energy=
                                                       mean_init_energy,
                                                       mean=mean_error,
                                                       valid=sum(are_sparsecoded)
                                                       /float(len(are_sparsecoded)))
                 self.initialize(data.shape[0])
             self.inp_feat_list = np.transpose(np.array(self.inp_feat_list))
             self.sparse_feat_list = np.array(self.sparse_feat_list).T
             are_sparsecoded = np.array(
                 are_sparsecoded).astype(bool)
             retry = np.sum(are_sparsecoded) < 1 / 3.0 * (are_sparsecoded).size
             self.are_sparsecoded_inp = self.inp_feat_list[:, are_sparsecoded]
             if debug and iter_count==0:
                 LOG.warning('Debug is on, saving debug_sparse.pkl')
                 with open(self.name + ' debug_sparse.pkl','w') as out:
                     pickle.dump((self.codebook_comps,
                                  self.sparse_feat_list,
                                  self.are_sparsecoded_inp), out)
         prev_error = compute_lineq_error(self.are_sparsecoded_inp, self.codebook_comps,
             self.sparse_feat_list)
         if not lar_approx:
             dictionary = self.conj_grad_dict_compute()
             curr_error = compute_lineq_error(
                 self.are_sparsecoded_inp,
                 dictionary,
                 self.sparse_feat_list)
         LOG.info('Reconstruction Error: ' + str(curr_error))
         if loaded:
             mean_init_energy=0
             mean_error = 0
         if curr_error > prev_error or mean_error>1000 or retry or lar_approx:
             if (prev_error > 100 or mean_error>1000
                 or retry or lar_approx):
                 if retry_count == 2 or lar_approx:
                     if iter_count != 0:
                         iter_count = 0
                         lar_approx = True
                         init_traindata_num = data.shape[1]
                         continue
                     LOG.warning('Training has high final error but' +
                                 ' reached maximum retries. No codebook can'
                                 + ' be produced with the fast method,'+
                                  ' using Lagrange Dual, as input'+
                                 ' sparsecoded data S is'
                                 +' ill-conditioned (too low' +
                                 ' rank of the STS).'+
                                  ' Least Angle Regression Method '+
                                 ' will be used')
                     self.codebook_comps = DictionaryLearning(
                         self.sparse_dim,
                         fit_algorithm='lars',
                         code_init=self.inp_feat_list.T).fit(
                             self.are_sparsecoded_inp.T).components_.T
                     curr_error = compute_lineq_error(
                                    self.are_sparsecoded_inp,
                                    self.codebook_comps,
                                    self.sparse_feat_list)
                     LOG.info('Reconstruction Error using LARS: '
                              + str(curr_error))
                     if curr_error > 1000:
                         LOG.info('LARS method did not converge,' +
                                  ' no codebook is produced.')
                         self.is_trained = False
                         self.codebook_comps = None
                     else:
                         break
                 LOG.warning('Training of codebook ' + self.name + ' completed with no success,'+
                             ' reinitializing (Retry:' + str(retry_count + 1) + ')')
                 self.flush_variables()
                 self.initialize(data.shape[0])
                 computed = data.shape[1] * [None]
                 retry_count += 1
                 iter_count = -1
                 reached_traindata_count = 0
                 reached_traindata_num = False
             elif (np.isclose(prev_error,curr_error,atol=0.1)
                   and reached_traindata_num and
                   reached_traindata_count > min_iterations):
                 break
         if curr_error < 0.5 and reached_traindata_num:
             break
         if (reached_traindata_num and
             reached_traindata_count > min_iterations and
             iter_count >= 0):
                 break
         iter_count += 1
         self.codebook_comps = dictionary
     self.inp_feat_list = None
     self.sparse_feat_list = None
     self.is_trained = True
Exemple #25
0
# decrypt and save PDF file
from PyPDF2 import PdfFileReader, PdfFileWriter
import time
import progressbar

global bar
bar = 0

format_custom_text = progressbar.FormatCustomText(
    ' Decrypting: %(current)d out of %(total)d Pages',
    dict(
        total = 0,
        current = 0,
    ),
)

def startBar(maxval):
    global bar
    bar = progressbar.ProgressBar(
    widgets=[progressbar.SimpleProgress(),
               format_custom_text,
        ' :: ',
              progressbar.Bar('█'), ' ',
        progressbar.ETA(), ' ',],
    max_value=maxval,
    ).start()

def progressCheck(current,total):
    format_custom_text.update_mapping(current = current, total= total)
    bar.update(current)
Exemple #26
0
# net = net.cuda()
optimizer = Adam(params=net.parameters(), lr=0.001)

# loss
loss = nn.NLLLoss()
batch_number = len(loader)
num_epochs = 500
logging_step = 50
logging_text_step = 1000
widgets = [
    progressbar.DynamicMessage("epoch"),
    ' ',
    'Batch: ',
    progressbar.Counter(),
    '/',
    progressbar.FormatCustomText('%(total)s', {"total": batch_number}),
    ' ',
    progressbar.Bar(marker="-", left='[', right=']'),
    ' ',
    progressbar.ETA(),
    ' ',
    progressbar.DynamicMessage('loss'),
    ' ',
    progressbar.DynamicMessage("accuracy"),
]

for i in xrange(num_epochs):
    progress = progressbar.ProgressBar(min_value=0,
                                       max_value=batch_number,
                                       initial_value=0,
                                       widgets=widgets).start()
Exemple #27
0
def train(model,
          criterion,
          optimizer,
          dataloaders,
          scheduler=NoneScheduler(None),
          epoch=100,
          device=torch.device('cuda:0'),
          l2=0.0,
          metrics=(mm.Loss(), ),
          standard_metric_index=1,
          clip_grad=False,
          weighter_multipler=1.0):
    weighter = Weighter(dataloaders['train'].dataset,
                        device,
                        multipler=weighter_multipler)
    # 构建几个变量来储存最好的模型
    best_model_wts = copy.deepcopy(model.state_dict())
    best_metric = 0.0
    best_metric_name = metrics[standard_metric_index].__class__.__name__ + \
        '_valid'
    best_weighter = copy.deepcopy(weighter)  # 储存最好模型对应的weighter
    # 构建dict来储存训练过程中的结果
    history = {
        m.__class__.__name__ + p: []
        for p in ['_train', '_valid'] for m in metrics
    }
    model.to(device)

    for e in range(epoch):
        for phase in ['train', 'valid']:
            if phase == 'train':
                scheduler.step()
                model.train()
                prefix = "Train: "
            else:
                model.eval()
                prefix = "Valid: "
            # progressbar
            format_custom_text = pb.FormatCustomText('Loss: %(loss).4f',
                                                     dict(loss=0.))
            widgets = [
                prefix, " ",
                pb.Percentage(), ' ',
                pb.SimpleProgress(format='(%s)' %
                                  pb.SimpleProgress.DEFAULT_FORMAT), ' ',
                pb.Bar(), ' ',
                pb.Timer(), ' ',
                pb.AdaptiveETA(), ' ', format_custom_text
            ]
            iterator = pb.progressbar(dataloaders[phase], widgets=widgets)

            for m in metrics:
                m.reset()
            for batch_x, batch_y, bag_ids, inst_ids in iterator:
                batch_x = batch_x.to(device)
                batch_y = batch_y.to(device)
                optimizer.zero_grad()
                with torch.set_grad_enabled(phase == 'train'):
                    proba = model(batch_x).squeeze()  # 注意模型输出的需要时proba
                    # 计算每个样本的权重
                    w = weighter(proba, batch_y, bag_ids, inst_ids)
                    # 这个criterion不能reduction
                    loss_es = criterion(proba, batch_y.float())
                    # 使用计算的权重
                    loss = (loss_es * w).mean()
                    # 只给weight加l2正则化
                    if l2 > 0.0:
                        for p_n, p_v in model.named_parameters():
                            if p_n == 'weight':
                                loss += l2 * p_v.norm()
                    if phase == 'train':
                        loss.backward()
                        if clip_grad:
                            nn.utils.clip_grad_norm_(model.parameters(),
                                                     max_norm=1)
                        optimizer.step()
                with torch.no_grad():
                    for m in metrics:
                        if isinstance(m, mm.Loss):
                            m.add(loss.cpu().item(), batch_x.size(0))
                            format_custom_text.update_mapping(loss=m.value())
                        else:
                            m.add(proba.squeeze(), batch_y, bag_ids)

            for m in metrics:
                history[m.__class__.__name__ + '_' + phase].append(m.value())
            print("Epoch: %d, Phase:%s, " % (e, phase) + ", ".join([
                '%s: %.4f' % (m.__class__.__name__,
                              history[m.__class__.__name__ + '_' + phase][-1])
                for m in metrics
            ]))

            if phase == 'valid':
                epoch_metric = history[best_metric_name][-1]
                if epoch_metric > best_metric:
                    best_metric = epoch_metric
                    best_model_wts = copy.deepcopy(model.state_dict())
                    best_weighter = copy.deepcopy(weighter)

    print("Best metric: %.4f" % best_metric)
    model.load_state_dict(best_model_wts)
    return model, history, best_weighter