示例#1
0
 def _init_pbar(self, ini_val, max_val, label):
     self._pbar = progressbar.ProgressBar(min_value=0,
                                          max_value=max_val,
                                          initial_value=ini_val,
                                          widgets=[
                                              label,
                                              progressbar.Percentage(),
                                              '(',
                                              progressbar.SimpleProgress(),
                                              ')',
                                              progressbar.Bar(),
                                              progressbar.Timer(),
                                              ' ',
                                              '|',
                                              progressbar.ETA(),
                                          ])
     self._pbar.start()
示例#2
0
def validation(sess, model, config):
    model.purpose = 1
    right_num = 0
    valid_loss = 0
    VALID_SIZE = config["VALID_SIZE"]
    batch_size = config["batch_size"]

    valid_tfrecords_file_path = config["valid_file_path"]
    iterator = read_data(valid_tfrecords_file_path, batch_size, dataset_type=1)
    next_batch = iterator.get_next()
    sess.run(iterator.initializer)
    # 实时显示进度条
    widgets = [
        'valid: ',
        progressbar.Counter(), ':{} '.format(VALID_SIZE // batch_size),
        progressbar.Percentage(), ' ',
        progressbar.Bar('#'), ' ',
        progressbar.Timer(), ' ',
        progressbar.ETA(), ' '
    ]
    bar = progressbar.ProgressBar(maxval=VALID_SIZE // batch_size,
                                  widgets=widgets)

    print('*' * 66)
    bar.start()
    for step in range(VALID_SIZE // batch_size):
        valid_image, valid_label, valid_level = sess.run(next_batch)
        batch_loss, batch_accuracy = sess.run(
            [model.loss, model.accuracy],
            feed_dict={
                model.image_input: valid_image,
                model.level_output: valid_level,
                model.label_output: valid_label
            })
        right_num += int(batch_size * batch_accuracy)
        valid_loss += batch_loss
        bar.update(step)
    bar.finish()

    # 计算当前网络的在验证集上的精度和损失
    valid_accuracy = right_num / VALID_SIZE
    valid_loss = valid_loss / (VALID_SIZE // batch_size)

    # 把网络状态重新设置成训练状态
    model.purpose = 0
    return valid_accuracy, valid_loss
示例#3
0
def show_progress(block_num, block_size, total_size):
    global pbar
    if pbar is None:
        if total_size > 0:
            prefixes = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi')
            power = min(int(math.log(total_size, 2) / 10), len(prefixes) - 1)
            scaled = float(total_size) / (2**(10 * power))
            total_size_str = '{:.1f} {}B'.format(scaled, prefixes[power])
            try:
                marker = '█'
            except UnicodeEncodeError:
                marker = '*'
            widgets = [
                progressbar.Percentage(),
                ' ',
                progressbar.DataSize(),
                ' / ',
                total_size_str,
                ' ',
                progressbar.Bar(marker=marker),
                ' ',
                progressbar.ETA(),
                ' ',
                progressbar.AdaptiveTransferSpeed(),
            ]
            pbar = progressbar.ProgressBar(widgets=widgets,
                                           max_value=total_size)
        else:
            widgets = [
                progressbar.DataSize(),
                ' ',
                progressbar.Bar(marker=progressbar.RotatingMarker()),
                ' ',
                progressbar.Timer(),
                ' ',
                progressbar.AdaptiveTransferSpeed(),
            ]
            pbar = progressbar.ProgressBar(widgets=widgets,
                                           max_value=progressbar.UnknownLength)

    downloaded = block_num * block_size
    if downloaded < total_size:
        pbar.update(downloaded)
    else:
        pbar.finish()
        pbar = None
示例#4
0
def score_docs(csv, dic_type, num_docs):
    '''Wrapper function that executes functions for preprocessing and dictionary scoring. 
    dict_type specifies the dicitonary with which the documents should be scored.
    Accepted values are: [emfd, mfd, mfd2]'''

    nlp = spacy.load('en', disable=['ner', 'parser', 'tagger'])
    nlp.add_pipe(tokenizer, name="mfd_tokenizer")

    if dic_type == 'emfd':
        nlp.add_pipe(score_emfd, name="score_emfd", last=True)
    elif dic_type == 'mfd':
        nlp.add_pipe(score_mfd, name="score_mfd", last=True)
    elif dic_type == 'mfd2':
        nlp.add_pipe(score_mfd2, name="score_mfd2", last=True)
    else:
        print(
            'Dictionary type not recognized. Available values are: emfd, mfd, mfd2'
        )
        return

    scored_docs = []
    widgets = [
        'Processed: ',
        progressbar.Counter(),
        ' ',
        progressbar.Percentage(),
        ' ',
        progressbar.Bar(marker='❤'),
        ' ',
        progressbar.Timer(),
        ' ',
        progressbar.ETA(),
    ]

    with progressbar.ProgressBar(max_value=num_docs, widgets=widgets) as bar:
        for i, row in csv[0].iteritems():
            scored_docs.append(nlp(row))
            bar.update(i)

    df = pd.DataFrame(scored_docs)

    if dic_type == 'emfd':
        df['f_var'] = df[probabilites].var(axis=1)
        df['sent_var'] = df[senti].var(axis=1)

    return df
示例#5
0
def get_training_progressbar_fn(n_steps, loss_history, opt):
    widgets = [
        progressbar.Bar(),
        progressbar.Percentage(), ' |',
        progressbar.Timer(), '|',
        progressbar.AdaptiveETA(), '|',
        progressbar.Variable('loss', width=6, precision=4), ', ',
        progressbar.Variable('lr', width=8, precision=3)
    ]
    bar = progressbar.ProgressBar(max_value=n_steps, widgets=widgets)

    def update_progressbar(i):
        loss = np.mean(loss_history[-50:])
        lr = float(opt._decayed_lr(tf.float32))
        bar.update(i+1, loss=loss, lr=lr)

    return update_progressbar
    def calculate_limit(self, ll):
        num_steps = 100
        bar = progressbar.ProgressBar(maxval=num_steps,
                                      widgets=[
                                          ' [',
                                          progressbar.Timer(),
                                          '] ',
                                          progressbar.Bar(),
                                          ' (',
                                          progressbar.ETA(),
                                          ') ',
                                      ])
        TS = []
        result = ll.fit()
        bar.start()
        bar.update(0)
        if self.DM_process == "annihilation":
            best_fit = ll.likelihood_model.M49.spectrum.main.DMAnnihilationFlux.sigmav.value
        else:
            best_fit = ll.likelihood_model.M49.spectrum.main.DMDecayFlux.tau.value
        vals = np.logspace(
            np.log10(best_fit) - 1.0,
            np.log10(best_fit) + 1.0, num_steps)
        for ival, val in enumerate(vals):
            if self.DM_process == "annihilation":
                ll.likelihood_model.M49.spectrum.main.DMAnnihilationFlux.sigmav.value = val
            else:
                ll.likelihood_model.M49.spectrum.main.DMDecayFlux.tau.value = val
            curr_LL = ll.data_list.values()[0].inner_fit()
            TS.append(curr_LL)
            bar.update(ival)

        TS = np.array(TS)
        TS = -TS
        TS -= np.min(TS)
        plt.semilogx(vals, TS)
        plt.savefig("example_step1.pdf")
        plt.show()

        if self.DM_process == "annihilation":
            selected_indices = vals > best_fit
        else:
            selected_indices = vals < best_fit

        interpolator = interp1d(TS[selected_indices], vals[selected_indices])
        return (best_fit, interpolator(1.35))
示例#7
0
def progressbarize(iterable, progress=False):
    """Construct progressbar for loops if progressbar requested, otherwise return directly iterable.

    :param iterable: iterable to use
    :param progress: True if print progressbar
    """
    if progress:
        # The casting to list is due to possibly yielded value that prevents
        # ProgressBar to compute overall ETA
        return progressbar.ProgressBar(widgets=[
            progressbar.Timer(), ', ',
            progressbar.Percentage(), ', ',
            progressbar.SimpleProgress(), ', ',
            progressbar.ETA()
        ])(list(iterable))

    return iterable
示例#8
0
    def __init__(self, config):
        ### somethings
        self.cfg = config
        self.dataset = dataset_protocol[config.data.protocol](config)
        self.dataloader = torch.utils.data.DataLoader(
            self.dataset,
            batch_size=config.data.batch_size,
            num_workers=config.framework.num_thread,
        )
        widgets = [
            "Testing phase [",
            progressbar.SimpleProgress(),
            "] [",
            progressbar.Percentage(),
            "] ",
            progressbar.Bar(marker="█"),
            " (",
            progressbar.Timer(),
            " ",
            progressbar.ETA(),
            ") ",
        ]
        self.bar = progressbar.ProgressBar(
            max_value=config.data.batch_size, widgets=widgets, term_width=100
        )

        ### logging
        self.logger = open("{}/{}.json".format(config.base_dir, "test"), "w")

        ### model
        self.model = model_protocol[config.model.protocol](config)
        self.load_checkpoints()
        if config.framework.num_gpu > 0:
            self.model.to(device=0)
        self.model.eval()
        if config.model.backbone.startswith("d"):
            self.turn_on_dropout()

        ### visualization
        self.vis = Visualizer(
            cartpole_length=1.5,
            x_lim=(0.0, config.data.delta_t * config.data.num_datapoints_per_epoch),
            figsize=(6, 8),
            gt_title=config.test.gt_title,
            model_title=config.test.model_title,
        )
示例#9
0
def lowercase_corpus(path_in: pathlib.Path, path_out: pathlib.Path) -> None:
    u.assert_folder_is_readable(path_in)
    u.assert_folder_is_writable(path_out)
    i = 1
    widgets = [
        'Pre-Processing Document # ',
        pb.Counter(), ' ',
        pb.Timer(), ' ',
        pb.BouncingBar(marker='.', left='[', right=']')
    ]
    with pb.ProgressBar(widgets=widgets) as bar:
        for file_name in path_in.iterdir():
            if u.is_corpus_document(file_name):
                bar.update(i)
                i = i + 1
                sentences = __lowercase_document(file_name)
                u.write_document(path_out, file_name, sentences)
示例#10
0
def post_hm_setup(NUM_FRAMES):
    # Set a placeholder dictionary for our outputs.
    pose_frames = {'scores': [], 'keypoints': [], 'bbox': [], 'bscores': []}
    # Set up the command-line progress bar.
    bar = progressbar.ProgressBar(widgets=[
        'Pose ',
        progressbar.Percentage(), ' -- ',
        progressbar.FormatLabel('frame %(value)d'), '/',
        progressbar.FormatLabel('%(max)d'), ' [',
        progressbar.Timer(), '] ',
        progressbar.Bar(), ' (',
        progressbar.ETA(), ') '
    ],
                                  maxval=NUM_FRAMES)
    # Start the progress bar.
    bar.start()
    return pose_frames, bar
示例#11
0
    def predict(self):
        with tf.Graph().as_default():
            var = self.add_model()

            saver = tf.train.Saver()

            # config = tf.ConfigProto(allow_soft_placement=True)
            # config.gpu_options.allow_growth = True
            # sess = tf.Session(config=config)

            sess = tf.Session()

            saver.restore(sess, self.args.model)
            logging.info("restore model from: {}".format(self.args.model))

            batch_size = 10000
            total_batch = int(np.ceil(len(self.test_data) / float(batch_size)))
            p = open(self.args.predict, "w")

            total_count = 0

            pbar = pb.ProgressBar(widgets=[
                "[TEST] ",
                pb.FileTransferSpeed(unit="batchs"),
                pb.Percentage(),
                pb.Bar(),
                pb.Timer(), " ",
                pb.ETA()
            ],
                                  maxval=total_batch).start()
            for i in xrange(total_batch):
                batchx = self.next_batch(batch_size, dtype="test")
                preds = sess.run(var['predict'],
                                 feed_dict={
                                     var['x']: batchx,
                                     var['keep_prob']: 1.0
                                 })
                fake_indices = range(0, 10000)
                answer = np.asarray([fake_indices, preds], dtype=int).T
                np.savetxt(self.args.predict,
                           answer,
                           fmt='%d',
                           header='id,label',
                           delimiter=",",
                           comments='')
            pbar.finish()
示例#12
0
def from_sorted_setslist(setslist,
                         distfn,
                         args=(),
                         weight='weight',
                         verbose=False):
    if distfn is None:
        distfn = rtsp.metric.euclidean_fn
    set_sizes = [len(s) for s in setslist]
    num_sets = len(setslist)
    graph = nx.Graph()
    # Generate the list of nodes ids
    num_edges = 0
    start = 0
    sets = []
    for i, size in enumerate(set_sizes):
        stop = start + size
        sets.append(range(start, stop))
        start = stop
        if i < len(set_sizes) - 1:
            num_edges += set_sizes[i] * set_sizes[i + 1]
    # Configure the status bar
    if verbose:
        widgets = ['Populating graph edges: ', pbar.SimpleProgress()]
        widgets += [' ', pbar.Bar(), ' ', pbar.Timer()]
        bar = pbar.ProgressBar(widgets=widgets, maxval=num_edges).start()
        count = 0
    # Add nodes and edges
    for i in xrange(num_sets - 1):
        j = i + 1
        set_i_indices = range(set_sizes[i])
        set_j_indices = range(set_sizes[j])
        for k, l in itertools.product(set_i_indices, set_j_indices):
            if verbose:
                bar.update(count)
                count += 1
            x = setslist[i][k]
            y = setslist[j][l]
            u = sets[i][k]
            v = sets[j][l]
            graph.add_node(u, value=x)
            graph.add_node(v, value=y)
            graph.add_edge(u, v, weight=distfn(x, y, *args))
    if verbose:
        bar.finish()
    return graph, sets
示例#13
0
def monitor_stack_deployment(cf_session, stack_name):

    stack_in_progress = [
        'CREATE_IN_PROGRESS',
        'ROLLBACK_IN_PROGRESS',
        'DELETE_IN_PROGRESS',
        'UPDATE_IN_PROGRESS',
        'UPDATE_COMPLETE_CLEANUP_IN_PROGRESS',
        'UPDATE_ROLLBACK_IN_PROGRESS',
        'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS',
        'REVIEW_IN_PROGRESS',
        'IMPORT_IN_PROGRESS',
        'IMPORT_ROLLBACK_IN_PROGRESS',
    ]

    stack_complete = [
        'CREATE_COMPLETE', 'ROLLBACK_COMPLETE', 'DELETE_COMPLETE',
        'UPDATE_COMPLETE', 'UPDATE_ROLLBACK_COMPLETE', 'IMPORT_COMPLETE',
        'IMPORT_ROLLBACK_COMPLETE'
    ]

    stack_failed = [
        'CREATE_FAILED',
        'ROLLBACK_FAILED',
        'DELETE_FAILED',
        'UPDATE_ROLLBACK_FAILED',
        'IMPORT_ROLLBACK_FAILED',
    ]

    widgets = [progressbar.BouncingBar(), ' ', progressbar.Timer()]
    bar = progressbar.ProgressBar(
        widgets=widgets,
        max_value=progressbar.UnknownLength,
        prefix='Waiting for stack deployment to complete ... ')
    progress_steps = 1
    done = False
    while not done:
        time.sleep(5)
        response = cf_session.describe_stacks(StackName=stack_name)
        bar.update(progress_steps)
        progress_steps += 1
        stack_status = response['Stacks'][0]['StackStatus']
        done = stack_status in stack_complete or stack_status in stack_failed
    bar.finish()
    return stack_status
示例#14
0
def test_dynamic_message_widget():
    widgets = [
        ' [',
        progressbar.Timer(),
        '] ',
        progressbar.Bar(),
        ' (',
        progressbar.ETA(),
        ') ',
        progressbar.DynamicMessage('loss'),
    ]

    p = progressbar.ProgressBar(widgets=widgets, max_value=1000)
    p.start()
    for i in range(0, 200, 5):
        time.sleep(0.1)
        p.update(i + 1, loss=.5)
    p.finish()
示例#15
0
文件: shared.py 项目: jg8610/jack
    def preprocess(self, questions: List[QASetting],
                   answers: Optional[List[List[Answer]]] = None,
                   is_eval: bool = False) -> List[XQAAnnotation]:

        if answers is None:
            answers = [None] * len(questions)
        preprocessed = []
        if len(questions) > 1000:
            bar = progressbar.ProgressBar(
                max_value=len(questions),
                widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') '])
            for q, a in bar(zip(questions, answers)):
                preprocessed.append(self.preprocess_instance(q, a))
        else:
            for q, a in zip(questions, answers):
                preprocessed.append(self.preprocess_instance(q, a))

        return preprocessed
示例#16
0
def pbar_loss(n_epochs, loss_getter):

    widget_loss = LossWidget(loss_getter)

    widgets = [
        '[',
        widget_loss,
        ']',
        ' [',
        pbar.Timer(),
        '] ',
        pbar.Bar(),
        ' (',
        pbar.ETA(),
        ') ',
    ]

    return ProgressBar(widgets=widgets, max_value=n_epochs)
示例#17
0
def runner_parallel(inp):
    function, args, name, queue, order_idx = inp
    idx = int(multiprocessing.current_process().name.split("-")[1])
    custom_text = progressbar.FormatCustomText(
        '{} - %(type_op)s: '.format(name), dict(type_op="Start"))
    widget_style = [
        custom_text,
        progressbar.Percentage(), ' (',
        progressbar.SimpleProgress(), ')', ' ',
        progressbar.Bar(), ' ',
        progressbar.Timer(), ' ',
        progressbar.AdaptiveETA()
    ]
    args += [
        ProgressBar(widgets=widget_style, fd=Writer((0, idx - 1), queue)),
        custom_text
    ]
    return (function(*args), order_idx)
示例#18
0
def progress_iter(progress):
    """
    Initialize and return a progress bar iter
    """
    widgets = [
        progressbar.Percentage(),
        " ",
        progressbar.Bar(),
        " ",
        progressbar.Timer(),
        " Returns: [",
        progressbar.Counter(),
        "/{}]".format(progress["minion_count"]),
    ]
    bar = progressbar.ProgressBar(widgets=widgets,
                                  maxval=progress["minion_count"])
    bar.start()
    return bar
示例#19
0
 def train2(self, Ntrain):
     pbar = progressbar.ProgressBar(widgets=[
         ' [',
         progressbar.Timer(),
         '] ',
         progressbar.Bar(),
         ' (',
         progressbar.ETA(),
         ') ',
     ])
     for i in pbar(range(Ntrain)):
         self.a = 2.0 * (nr.random(self.Nvis) * 2.0 - 1.0)
         self.b = 1.0 * (nr.random(self.Nhid) * 2.0 - 1.0)
         self.W = 1.0 * (nr.random((self.Nvis, self.Nhid)) * 2.0 - 1.0)
         temp = self.Avg2()
         if temp < self.currE:
             self.currE = temp
         self.convTest.append(self.currE)
示例#20
0
        def __init__(self, size, name=None):
            if name is None:
                name = ""
            widgets = [
                ' [',
                str(name),
                _progressbar.Timer(),
                ', ',
                _progressbar.Counter(),
                '/%s' % size,
                '] ',
                _progressbar.Bar(),
                ' (',
                _progressbar.ETA(),
                ') ',
            ]

            self.bar = _progressbar.ProgressBar(maxval=size, widgets=widgets)
示例#21
0
    def wrapper(*args, **kwargs):
        if progressbars:
            widgets = [progressbar.AnimatedMarker(),
                    "  ",
                    progressbar.Timer()]
            bar = progressbar.ProgressBar(poll_interval=1, widgets=widgets)

        t = threading.Thread(target=func, args=args, kwargs=kwargs)
        t.start()

        if progressbars:
            while t.is_alive():
                bar.update()
                time.sleep(0.5)

        t.join()
        if progressbars:
            bar.finish()
示例#22
0
    def generate_embeddings(
            self,
            all_images: ImageGenerator) -> List[Embedding]:
        featurized_batches = cast(List[Embedding], [])
        clean_images = np.array(list(map(utils.fixed_standardize, all_images)))

        widgets = ['Encoding:', pb.Percentage(), ' ',
                   pb.Bar(), ' ', pb.ETA(), ' ', pb.Timer()]
        timer = pb.ProgressBar(
            widgets=widgets,
            max_value=clean_images.shape[0])
        for index in range(0, clean_images.shape[0], self.batch_size):
            end_index = min(index + self.batch_size, clean_images.shape[0])
            timer.update(end_index)
            batch = clean_images[index:end_index, :]
            featurized_batches += self.extract_batch(batch)
        timer.finish()
        return featurized_batches
示例#23
0
    def __init__(self, present="", past=None, max_value=1, vars=None,
                 **kwargs):

        self.present = present
        self.sub_bar = None
        self.finished = None

        if past is None:
            past = present

        self.msg_bar = MessageBar(
            msg=present, finish_msg="%s finished in" % past)
        widgets = [self.msg_bar, " "]

        if max_value is None:
            widgets.append(progressbar.Timer(format="%(elapsed)s"))
        else:
            widgets.append(progressbar.ETA(
                format="ETA: %(eta)s",
                format_finished="%(elapsed)s"))

        if vars is not None:
            self.var_vals = progressbar.FormatCustomText(
                " (" + ", ".join("%s: %%(%s)s" % (v, v) for v in vars) + ")",
                {v: "---" for v in vars})
            widgets.append(self.var_vals)
        else:
            self.var_vals = None

        def update_thread():
            while not self.finished:
                if self.sub_bar is None or self.sub_bar.finished:
                    self.update()
                time.sleep(0.001)

        self.thread = threading.Thread(target=update_thread)
        self.thread.daemon = True

        if max_value is None:
            max_value = progressbar.UnknownLength

        super(ProgressBar, self).__init__(
            poll_interval=0.1, widgets=widgets, fd=sys.stdout,
            max_value=max_value, **kwargs)
示例#24
0
def main():
    # This scraper is data specific
    # Scrape all of the titles and sbtracts and store the lines in a text file
    url = 'https://patents.google.com/patent/US'
    url2 = '/en?oq='

    widgets = [
        ' [',
        progressbar.Timer(),
        '] ',
        progressbar.Bar(),
        ' (',
        progressbar.ETA(),
        ') ',
    ]

    with open('../input_data/' + INPUT_FILE, 'r') as input_file:
        patent_ids = input_file.readlines()

    with open('../output_data/tmp/titles.txt', 'w') as output_file:
        with open('../output_data/tmp/abstracts.txt', 'w') as abstract_file:
            for id in progressbar.progressbar(patent_ids, widgets=widgets):
                raw_html = html_get(id, url, url2)
                if raw_html is not None:
                    html = BeautifulSoup(raw_html, 'html.parser')

                    spans = html.findAll('span', {'itemprop': 'title'})
                    title = ''
                    if spans:
                        title = spans[0].get_text()[:-1].strip()
                        if title[-1:] != '.':
                            title += '.'

                    divs = html.findAll('div', {'class': 'abstract'})
                    abstract = ''
                    if divs:
                        abstract += divs[0].get_text()[:-1]
                        if abstract[-1:] != '.':
                            abstract += '.'

                    if title != '':
                        output_file.write(title + '\n')
                    if abstract != '':
                        abstract_file.write(abstract + '\n')
def extract(model, dataloaders):
    since = time.time()

    train_features = []
    val_features = []

    y_train = []
    y_val = []
    # Each epoch has a training and validation phase
    for phase in ['train', 'val']:

        widgets = [
            ' [',
            progressbar.Timer(), '] ',
            progressbar.Bar(), ' (',
            progressbar.ETA(), ') '
        ]
        pbar = progressbar.ProgressBar(maxval=len(dataloaders[phase]),
                                       widgets=widgets).start()
        # Iterate over data.
        i = 0
        for inputs, labels in dataloaders[phase]:
            i += 1

            pbar.update(i)
            inputs = inputs.to(device)
            #labels = labels.to(device)

            #cast input to float
            inputs = inputs.float()
            # zero the parameter gradients
            with torch.no_grad():
                # Extract the feature from the image
                feature = model(inputs)
                # Convert to NumPy Array, Reshape it, and save it to features variable
            if phase == "train":

                train_features.append(
                    feature.cpu().detach().numpy().reshape(-1))
                y_train.append(labels)
            elif phase == "val":
                val_features.append(feature.cpu().detach().numpy().reshape(-1))
                y_val.append(labels)
    return train_features, val_features, y_train, y_val
示例#26
0
    def extract_patch(self, image_files, patch_size, positive_overlap_thd,
                      negative_overlap_thd):

        bar = progressbar.ProgressBar(widgets=[
            ' [',
            progressbar.Timer(),
            '] ',
            progressbar.Bar(),
            ' (',
            progressbar.ETA(),
            ') ',
        ],
                                      maxval=len(image_files)).start()

        for i, image_file in enumerate(image_files):
            image = cv2.imread(image_file)

            # 1. detect regions
            candidate_regions = self._region_proposer.detect(image)
            candidate_patches = candidate_regions.get_patches(
                dst_size=patch_size)
            candidate_boxes = candidate_regions.get_boxes()

            # 2. load ground truth
            true_boxes, true_labels = self._annotator.get_boxes_and_labels(
                image_file)
            true_patches = rp.Regions(
                image, true_boxes).get_patches(dst_size=patch_size)

            # 3. calc overlap
            overlaps = self._overlap_calculator.calc_ious_per_truth(
                candidate_boxes, true_boxes)

            # 4. add patch to the samples
            self._select_positive_patch(candidate_patches, true_labels,
                                        overlaps, positive_overlap_thd)
            self._append_positive_patch(true_patches, true_labels)
            self._select_negative_patch(candidate_patches, overlaps,
                                        negative_overlap_thd)

            bar.update(i)
        bar.finish()

        return self._merge_sample()
示例#27
0
    def train(self):
        """Run optimization to train the model.
        """
        while self.epoch < self.max_epochs and self.epochs_no_improvement < self.max_epochs_no_improvement:

            print(f'Epoch {self.epoch:3d} | lr={self.optimizer.lr:4.5f}')

            # Progressbar
            widgets = [
                progressbar.FormatLabel(f'Epoch {self.epoch:3d} | Batch '),
                progressbar.SimpleProgress(), ' | ',
                progressbar.Percentage(), ' | ',
                progressbar.FormatLabel(f'Loss N/A'), ' | ',
                progressbar.Timer(), ' | ',
                progressbar.ETA()
            ]
            pbar_train = progressbar.ProgressBar(widgets=widgets)
            pbar_val = progressbar.ProgressBar(widgets=widgets)

            # Execute training on training set
            self.step(pbar_train)

            # Validate model on validation set
            self.validate(pbar_val)

            # Learning rate scheduler
            if self.lr_scheduler is not None:
                self.lr_scheduler.step()

            print(f'Epoch {self.epoch:3d} | Loss (T/V) {self.train_evaluator.loss:5.4f} / {self.val_evaluator.loss:5.4f} | ' \
                  f'{self.train_evaluator.evaluation_metric_name.capitalize()} (T/V) {self.train_evaluator.evaluation_metric:5.4f} / {self.val_evaluator.evaluation_metric:5.4f}')

            # Keep track of the best model
            if self.val_evaluator.evaluation_metric > self.best_val_metric:
                self.best_val_metric = self.val_evaluator.evaluation_metric
                self._save_checkpoint()
                self.epochs_no_improvement = 0
            else:
                self.epochs_no_improvement += 1

            # Update plots
            self._update_plots()

            self.epoch += 1
示例#28
0
def duration_progress(activity, duration, is_done):
    def progress(activity, iterations=100):
        bar = progressbar.ProgressBar(widgets=[
            activity,
            ' ',
            progressbar.Bar(),
            ' (',
            progressbar.ETA(),
            ') ',
        ])
        return bar(range(iterations))

    if duration is not None:
        iterations = 100
        time_per_iteration = float(duration) / float(iterations)

        bar = progress(activity, iterations)
        cursor = 0
        for cursor in range(iterations):
            bar.next()
            time.sleep(time_per_iteration)

            # If done early.
            if is_done():
                bar.update(100)
                break

    if not is_done():
        # if still not done.
        bar = progressbar.ProgressBar(widgets=[
            activity,
            ' ',
            progressbar.RotatingMarker(),
            ' (',
            progressbar.Timer(),
            ') ',
        ],
                                      max_value=progressbar.UnknownLength)
        i = 0
        while not is_done():
            bar.update(i)
            i += 1

    print("")
示例#29
0
def cat(list_files, output, title=None):
    """
    Equivalent of 'cat' unix command.

    Concatenate all files in 'list_files' and save result in 'output' folder.
    Concat using shutil.copyfileobj, in order to copy by chunks, to
    avoid memory problems if files are big.

    Parameters
    ----------
    list_files : list
        list of filenames to concatenate
    output : str
        output filename, where all concatenated files will be written
    title : str or None
        if you want to show a progressbar while concatenating files, add a title for this
        progressbar here. If no title, nothing will be shown during concatenation.

    """
    bar = None
    curnum = None
    if title:
        nbfiles = len(list_files)
        widgets = [
            title + ': ',
            progressbar.Bar(marker='█', left='', right='', fill=' '), ' ',
            progressbar.Counter(), f"/{nbfiles}"
            ' (',
            progressbar.Percentage(), ") - ",
            progressbar.Timer()
        ]
        bar = progressbar.ProgressBar(widgets=widgets,
                                      max_value=nbfiles,
                                      term_width=79).start()
        curnum = 1
    with open(output, "w") as outf:
        for file in list_files:
            if title:
                bar.update(curnum)
                curnum += 1
            with open(file, "r") as inf:
                shutil.copyfileobj(inf, outf)
    if title:
        bar.finish()
def optimize_K_param(model,hid_var,cell_exps):
    print 'M-step: optimizing K param......'
    K_param=model['K_param']
    new_K_param=np.zeros(K_param.shape)
    w_nz=model['w_nz']
    n_path,n_gene=K_param.shape
    cell_path=hid_var['cell_path']
    cell_time=hid_var['cell_time']
    path_info=model['path_info']
    g_param=model['g_param']
    sigma_param=model['sigma_param']
    k_split=n_split
    path_gene_k_table=np.zeros((n_path,n_gene,k_split))
    if progress_bar:
        bar = progressbar.ProgressBar(maxval=n_path*n_gene, \
                widgets=[' [', progressbar.Timer(), '] ',progressbar.Bar('=','[',']'),' ',progressbar.Percentage(),' (', progressbar.ETA(), ') '] )
        bar.start()
    count=0
    for p in range(n_path):
        Sp_idx=path_info[p]['Sp_idx']
        Sc_idx=path_info[p]['Sc_idx']
        g_a=g_param[Sp_idx]
        g_b=g_param[Sc_idx]
        p_idx=(cell_path==p)
        cell_exps_p=cell_exps[p_idx]
        cell_time_p=cell_time[p_idx]
        for j in range(n_gene):
            x_js=cell_exps_p[:,j]
            for ks in range(1,k_split+1):
                k=K_param_range/float(k_split)*ks
                mu_x_js=g_b[j]+(g_a[j]-g_b[j])*np.exp(-k*cell_time_p)
                tmp=((x_js-mu_x_js)**2./(2.*sigma_param[j]**2.)+np.log((sigma_param[j]*np.sqrt(2.*np.pi)) ))
                prob2 = np.where(x_js!=0.,0.,drop_out_param)
                mix_prob=w_nz[p,j]*np.exp(-tmp)+(1-w_nz[p,j])*prob2
                sum_log_prob=np.sum(np.log(mix_prob))
                path_gene_k_table[p,j,ks-1]=sum_log_prob
            max_ks=np.argmax(path_gene_k_table[p,j,:])+1
            max_k=K_param_range/float(k_split)*max_ks
            K_param[p,j]=max_k
            count+=1
            if progress_bar:
                bar.update(count)
    if progress_bar:
        bar.finish()