Beispiel #1
0
    def get_cells_contours(self, precision):
        pb = ProgressBar(total=100,
                         decimals=2,
                         prefix="getting cells contours",
                         length=terminal_size.get_terminal_size()[0] - 40,
                         fill='#',
                         zfill='-')
        contours = {}
        l = 1
        step = 100 / len(self.struct)
        for i in self.struct.values():

            x0, x1 = i[0][0], i[0][1]
            y0, y1 = i[1][0], i[1][1]

            bottom_dots = self.section_divide((x0, y0), (x1, y0), precision)
            right_dots = self.section_divide((x1, y0), (x1, y1), precision)
            top_dots = self.section_divide((x1, y1), (x0, y1), precision)
            left_dots = self.section_divide((x0, y1), (x0, y0), precision)

            inner_dots = []
            for k in bottom_dots:
                for j in left_dots:
                    inner_dots.append((k[0], j[1]))

            contours[str(l)] = [(x0, y0)] + bottom_dots + [
                (x1, y0)
            ] + right_dots + [(x1, y1)] + top_dots + [
                (x0, y1)
            ] + left_dots + inner_dots

            pb.print_progress_bar(step * l)

            l += 1
        self.contours = contours
Beispiel #2
0
    def fetch_despesas_deputados(self, anos, filepath='db.json.gz'):
        d_ids = self.get_ids()
        pbar_dep = ProgressBar(len(d_ids), prefix='Total Geral', suffix='', length=100)
        self._create_temp_dataframe()

        for d_id in d_ids:
            pbar_dep.next()
            self.set_anos(anos)
            self.set_itens(100)
            json_data = self.busca_despesas(d_id)
            total = self.get_total_paginas()
            if total == 0:
                continue
            json_filename = self._create_temp_data_file(total)
            #pbar = ProgressBar(total, prefix='Despesas Deputado {}'.format(d_id), length=50)
            while True:
                json_str = json.dumps(json_data['dados'])
                self._add_data_record(json_str[1:-1])
                #pbar.next()
                if not self.has_next():
                    break
                json_data = self.next()

            df_tmp = self.to_pandas(json_filename)
            df_tmp['idDeputado'] = str(d_id)
            self._add_df_record(df_tmp)
            self._clear_temp_data_file()

        self._save_temp_dataframe(filepath)
Beispiel #3
0
def save_test_data(fnames, bboxes):
    src_folder = 'cars_test'
    dst_folder = 'data/test'
    num_samples = len(fnames)

    pb = ProgressBar(total=100,
                     prefix='Save test data',
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='=')

    for i in range(num_samples):
        fname = fnames[i]
        (x1, y1, x2, y2) = bboxes[i]
        src_path = os.path.join(src_folder, fname)
        src_image = cv.imread(src_path)
        height, width = src_image.shape[:2]
        # margins of 16 pixels
        margin = 16
        x1 = max(0, x1 - margin)
        y1 = max(0, y1 - margin)
        x2 = min(x2 + margin, width)
        y2 = min(y2 + margin, height)
        # print(fname)
        pb.print_progress_bar((i + 1) * 100 / num_samples)

        dst_path = os.path.join(dst_folder, fname)
        crop_image = src_image[y1:y2, x1:x2]
        dst_img = cv.resize(src=crop_image, dsize=(img_height, img_width))
        cv.imwrite(dst_path, dst_img)
def process_sentiment_news():
    from analytics.SentimentAnalyser import SentimentAnalyser
    from time import time

    stime = time()

    sa = SentimentAnalyser(True)
    d = DB()

    x = d.get_news({"polarity": -2})
    res = sa.get_polarity([i['text'] for i in x])
    r = len(res)

    print("Started for", r, "posts")
    pb = ProgressBar(total=r - 1,
                     prefix='Processed',
                     decimals=3,
                     length=100,
                     fill='=',
                     zfill='-')

    for i in range(r):
        d.update_news(
            x[i]['_id'], {
                'polarity': float(res[i]),
                'query': '-'.join(x[i]['query'].lower().split())
            })
        pb.print_progress_bar(i)

    print("Processing finished in", time() - stime)
def build_trie_with_terms_and_definitions(data_path: str, resources_path: str):
    data = read_data_file(f"{resources_path}/json/{data_path}", 'json')

    total = len(data)

    print(f"\n\tSize of word document: {total}\n\n")

    trie = create_root_node()

    pb = ProgressBar(total=total,
                     prefix='Start',
                     suffix='Complete',
                     decimals=2,
                     length=50)

    current = 0
    for word in data:
        add_word_to_trie(trie, word, data[word]['definition'],
                         data[word]['count'])
        current += 1
        pb.print_progress_bar(current)

    print('''\tFinished inserting words into tree \n \
        \tCreating and saving pickle...
        ''')

    output_file = f"{resources_path}/pickles/korean_pickle.pkl"

    with open(f"{output_file}", 'wb') as file:
        pickle.dump(trie, file)

    print("\tDone!\n")
Beispiel #6
0
    def zipit(self, folders, dst):

        pb = ProgressBar(total=100,
                         prefix='Zip Progress',
                         suffix='Completed',
                         decimals=2,
                         length=50,
                         fill='>',
                         zfill='-')
        count = 0
        progress = 0

        self.logger.info("Counting Files...")

        zip_file = zipfile.ZipFile(dst, 'w', zipfile.ZIP_DEFLATED)
        for folder in folders:
            for dirpath, dirnames, filenames in os.walk(folder):
                for filename in filenames:
                    count += 1

        self.logger.info("%s files found to be zipped" % count)

        for folder in folders:
            for dirpath, dirnames, filenames in os.walk(folder):
                for filename in filenames:
                    zip_file.write(
                        os.path.join(dirpath, filename),
                        os.path.relpath(os.path.join(dirpath, filename),
                                        os.path.join(folders[0], '../..')))
                    progress += 1
                    pb.print_progress_bar((progress / count) * 100)

        zip_file.close()
Beispiel #7
0
    def _print_progress(self, event):
        """Print a progress based on the information on a GeneralEvent."""
        if event.indeterminate:
            # indeterminate, no progress to be shown
            return

        prefix = """
    --> {phase_name}

    {current_phase}/{target}""".format(
            phase_name=event.phase_name,
            current_phase=min(event.total_phases, event.current_phase + 1),
            target=event.total_phases,
        )

        elapsed = datetime.now() - self._start_time

        statuses = self._get_legends()
        suffix = """{runtime}

{statuses}""".format(statuses=statuses,
                     runtime=format_running_time(elapsed.seconds))

        pb = ProgressBar(
            total=100,
            prefix=prefix,
            suffix=suffix,
            decimals=0,
            length=self.bar_length,
            fill=self.filled_bar_char,
            zfill=self.empty_bar_char,
            file=self._out,
        )
        pb.print_progress_bar(event.progress * 100)
Beispiel #8
0
    def fetch_dados_proposicoes(self, filepath='db.json'):
        d_ids = self.get_ids()
        total = len(d_ids)
        print('Obtendo dados de %i proposições' % total)
        pbar = ProgressBar(total - 1,
                           prefix='Dados Proposições',
                           suffix='obtidos')
        json_file = tempfile.NamedTemporaryFile(mode='a',
                                                encoding='utf-8',
                                                delete=False)
        json_file.write('[')
        count = 0
        for d_id in d_ids:
            json_data = self.busca_por_id(d_id)
            json_file.write(json.dumps(json_data['dados']))
            pbar.next()

            if count != total - 1:
                json_file.write(",\n")
            if count % 50:
                json_file.flush()
            count += 1

        json_file.write(']')
        json_file.flush()
        json_file.close()
        print('%i dados de propisições obtidos' % total)
        print('Convertendo para pandas Dataframe')
        self.to_pandas_json_file(json_file.name, filepath)
        os.remove(json_file.name)
Beispiel #9
0
def main():
    """Load data, display data"""
    args = arg_parser_setup()
    clubhouse_apikey = get_clubhouse_apikey_or_exit()
    ch_api = ClubHouseAPI(clubhouse_apikey)
    cycle_logic = CycleLogic()

    if args.googlesheets:
        cycle_logic.enable_google_sheets_output(
            sheet_id=SHEET_ID,
            scopes=SCOPES,
            service_account_file=GOOGLE_SERVICE_ACCOUNT_FILE
        )

    members = ch_api.get_active_members()
    progress_bar = ProgressBar(total=len(members))

    for i, member in enumerate(members):
        progress_bar.print_progress_bar(i+1)
        cycle_logic.add_member(member)
        stories = ch_api.stories_by_mention(member['profile']['mention_name'])
        for ch_story in stories:
            story = Story(load_from_dict=ch_story, owner_id=member['id'])
            cycle_logic.add_story(story)


    print('\n\n')
    print(cycle_logic.tabulate_result(debug_member=args.debugmember))
Beispiel #10
0
    def _print_progress(self, event):
        """Print a progress based on the information on a GeneralEvent."""
        if event.indeterminate:
            # indeterminate, no progress to be shown
            return

        prefix = """
    --> {phase_name}

    {current_phase}/{target}""".format(
            phase_name=event.phase_name,
            current_phase=min(event.total_phases, event.current_phase + 1),
            target=event.total_phases,
        )

        statuses = ""
        legends = self._get_legends(event.sim_states)
        for state in event.sim_states:
            statuses += "    {}\n".format(legends[state])

        suffix = """{runtime}

{statuses}""".format(statuses=statuses,
                     runtime=format_running_time(event.runtime))

        pb = ProgressBar(total=100,
                         prefix=prefix,
                         suffix=suffix,
                         decimals=0,
                         length=self.bar_length,
                         fill=self.filled_bar_char,
                         zfill=self.empty_bar_char,
                         file=self._out)
        pb.print_progress_bar(event.progress * 100)
Beispiel #11
0
def get_yf_data(stock_index, index_file):
    print(stock_index)
    i = 0
    pb = ProgressBar(total=100,
                     prefix='Downloading',
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='*',
                     zfill='-')
    num = len(stock_index)
    ydata = {}
    for stock in stock_index:
        ticker = yf.Ticker(stock)
        ydata[ticker.info['symbol']] = ticker.info
        i = i + 1
        pb.print_progress_bar((i / num) * 100)

    ydata_frame = pd.DataFrame(ydata).T[[
        'longName', 'regularMarketPreviousClose', 'fiftyTwoWeekHigh',
        'fiftyTwoWeekLow', 'forwardPE', 'marketCap'
    ]]
    ydata_frame.columns = ['名称', '最新价', '52周最高', '52周最低', '市盈率', '总市值']
    ydata_frame['总市值'] = ydata_frame['总市值'] / 100000000
    return ydata_frame
Beispiel #12
0
def ransac_error_calc_optimized(size, calculation_model_func):
    print("Training using RANSAC")
    pb = ProgressBar(total=ITERATIONS)
    best_score = 0
    i = 0
    while i < ITERATIONS:
        samples = choice_random(size)
        model = calculation_model_func(samples)
        if model is None:
            i -= 1
            continue

        consensus = 0
        b = 0
        for points in neighbors:
            error = calculate_error(model, points)
            if error < MAX_ERROR:
                consensus += 1
            if consensus + (len(neighbors) - b) < best_score:
                break
            b += 1

        if consensus > best_score:
            best_score = consensus
            best_model = model
        i += 1
        pb.print_progress_bar(i)
    return best_model
def extract(usage, package, image_path, json_path):
    filename = 'data/{}.zip'.format(package)
    print('Extracting {}...'.format(filename))
    with zipfile.ZipFile(filename, 'r') as zip_ref:
        zip_ref.extractall('data')

    pb = ProgressBar(total=100,
                     prefix='Save {} data'.format(usage),
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='=')
    if not os.path.exists('data/{}'.format(usage)):
        os.makedirs('data/{}'.format(usage))
    with open('data/{}/{}'.format(package, json_path)) as json_data:
        data = json.load(json_data)
    num_samples = len(data)
    print("num_samples: " + str(num_samples))
    for i in range(num_samples):
        item = data[i]
        image_name = item['image_id']
        label_id = item['label_id']
        src_folder = 'data/{}/{}'.format(package, image_path)
        src_path = os.path.join(src_folder, image_name)
        dst_folder = 'data/{}'.format(usage)
        label = "%02d" % (int(label_id), )
        dst_path = os.path.join(dst_folder, label)
        if not os.path.exists(dst_path):
            os.makedirs(dst_path)
        dst_path = os.path.join(dst_path, image_name)
        src_image = cv.imread(src_path)
        dst_image = cv.resize(src_image, (img_height, img_width),
                              cv.INTER_CUBIC)
        cv.imwrite(dst_path, dst_image)
        pb.print_progress_bar((i + 1) * 100 / num_samples)
Beispiel #14
0
def main():

    with open(INPUT_FILE) as file:
        data = json.load(file)

    total = len(data)

    print(f"\n\tSize of word document: {total}\n\n")

    trie = create_root_node()

    pb = ProgressBar(total=total,
                     prefix='Start',
                     suffix='Complete',
                     decimals=2,
                     length=50)

    current = 0
    for word in data:
        add_word_to_trie(trie, word, data[word])
        current += 1
        pb.print_progress_bar(current)

    print('''\tFinished inserting words into tree \n \
        \tCreating and saving pickle...
        ''')

    with open(f"{OUTPUT_FILE}", 'wb') as file:
        pickle.dump(trie, file)

    print("\tDone!\n")
def do_composite_test():
    num_bgs = 20

    with open('Combined_Dataset/Test_set/test_bg_names.txt') as f:
        bg_files = f.read().splitlines()
    with open('Combined_Dataset/Test_set/test_fg_names.txt') as f:
        fg_files = f.read().splitlines()

    # a_files = os.listdir(a_path)
    num_samples = len(fg_files) * num_bgs

    pb = ProgressBar(total=100,
                     prefix='Compose test images',
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='=')
    start = time.time()
    bcount = 0
    for fcount in range(len(fg_files)):
        im_name = fg_files[fcount]

        for i in range(num_bgs):
            bg_name = bg_files[bcount]
            process(im_name, bg_name, fcount, bcount)
            bcount += 1

            pb.print_progress_bar(bcount * 100.0 / num_samples)

    end = time.time()
    elapsed = end - start
    print('elapsed: {} seconds'.format(elapsed))
def test_digits(model, digits, labels, ensemble_size, reshape_fun):
    steps_results = {'c_error': {}, 'entropy': {}}

    dnum = 200

    pb = ProgressBar(total=100,
                     prefix='Sim trial progress',
                     length=25,
                     fill='=',
                     zfill='_')
    for i in range(1, 101):
        dnoice = salt_and_pepper(digits, i * dnum)

        d = utils.normalize_data(reshape_fun(dnoice))
        entropy = ann.test_model(model, [d] * ensemble_size,
                                 labels,
                                 metric='entropy')
        c_error = ann.test_model(model, [d] * ensemble_size,
                                 labels,
                                 metric='c_error')
        steps_results['entropy'][i] = entropy
        steps_results['c_error'][i] = c_error
        pb.print_progress_bar(i)

    return steps_results
Beispiel #17
0
def save_test_data(fnames, bboxes):
    """
    Saves test data in a separate folder
    Args:
        fnames: The filenames of the files to save in the seperate folder.
        bboxes: Bounding boxes

    Returns: 
    """
    src_folder = "cars_test"
    dst_folder = "data/test"
    num_samples = len(fnames)

    pb = ProgressBar(total=100, prefix="Save test data", suffix="", decimals=3, length=50, fill="=")

    for i in range(num_samples):
        fname = fnames[i]
        (x1, y1, x2, y2) = bboxes[i]
        src_path = os.path.join(src_folder, fname)
        src_image = cv.imread(src_path)
        height, width = src_image.shape[:2]
        # margins of 16 pixels
        margin = 16
        x1 = max(0, x1 - margin)
        y1 = max(0, y1 - margin)
        x2 = min(x2 + margin, width)
        y2 = min(y2 + margin, height)
        pb.print_progress_bar((i + 1) * 100 / num_samples)

        dst_path = os.path.join(dst_folder, fname)
        cv.imwrite(dst_path, src_image)
Beispiel #18
0
def compute_class_prior(do_plot=False):
    categories_folder = 'data/instance-level_human_parsing/Training/Category_ids'
    names = [
        f for f in os.listdir(categories_folder) if f.lower().endswith('.png')
    ]
    num_samples = len(names)
    prior_prob = np.zeros(num_classes)
    pb = ProgressBar(total=num_samples,
                     prefix='Compute class prior',
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='=')
    for i in range(num_samples):
        name = names[i]
        filename = os.path.join(categories_folder, name)
        category = np.ravel(cv.imread(filename, 0))
        counts = np.bincount(category)
        idxs = np.nonzero(counts)[0]
        prior_prob[idxs] += counts[idxs]
        pb.print_progress_bar(i + 1)

    prior_prob = prior_prob / (1.0 * np.sum(prior_prob))

    # Save
    np.save(os.path.join(data_dir, "prior_prob.npy"), prior_prob)

    if do_plot:
        plt.hist(prior_prob, bins=100)
        plt.yscale("log")
        plt.show()
Beispiel #19
0
def predict(img_dir, model):
    img_files = []
    for root, dirs, files in os.walk(img_dir, topdown=False):
        for name in files:
            img_files.append(os.path.join(root, name))
    img_files = sorted(img_files)

    y_pred = []
    y_test = []
    pb = ProgressBar(total=100,
                     prefix='Predict data',
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='=')
    for img_path in img_files:
        # print(img_path)
        img = image.load_img(img_path, target_size=(224, 224))
        x = image.img_to_array(img)
        preds = model.predict(x[None, :, :, :])
        decoded = decode_predictions(preds, top=1)
        pred_label = decoded[0][0][0]
        # print(pred_label)
        y_pred.append(pred_label)
        tokens = img_path.split('/')
        class_id = int(tokens[-2])
        # print(str(class_id))
        y_test.append(class_id)
        pb.print_progress_bar(len(y_pred) * 100 / len(img_files))

    return y_pred, y_test
Beispiel #20
0
    def clear_table(self):
        pb = ProgressBar(total=100,
                         decimals=2,
                         prefix="clearing table",
                         length=terminal_size.get_terminal_size()[0] - 40,
                         fill='#',
                         zfill='-')
        connected = tuple(nx.strongly_connected_components(
            self.symbolic_image))
        remember = set()
        rest = {}
        for i in connected:
            if len(i) > 1:
                remember.update(i)
            # else:
            #     try: nx.find_cycle(self.symbolic_image, i)
            #     except nx.exception.NetworkXNoCycle: pass
            #     else: remember.update(i)

        step = 100 / len(self.struct)
        k = 1
        m = 1
        for i in self.struct:
            if i in remember:
                rest[str(k)] = self.struct[i]
                k += 1
            pb.print_progress_bar(m * step)
            m += 1

        self.struct = rest
Beispiel #21
0
def ransac_error_optimized(size, calculation_model_func):
    print("Training using RANSAC")
    pb = ProgressBar(total=ITERATIONS)
    lowest_error = 9999999999
    i = 0
    while i < ITERATIONS:
        samples = choice_random(size)
        model = calculation_model_func(samples)
        if model is None:
            i -= 1
            continue

        consensus = 0
        iter_error = 0
        for points in neighbors:
            error = calculate_error(model, points)
            if error < lowest_error:
                consensus += 1
            iter_error += error

        if lowest_error > iter_error:
            lowest_error = iter_error
            best_model = model
        i += 1
        pb.print_progress_bar(i)
    return best_model
Beispiel #22
0
    def normalize_background(cls, stack, radius=None):
        """
            Normalize background to mean 0 and std 1.
            Estimate the mean and std of each image in the stack using pixels
            outside radius r (pixels), and normalize the image such that the
            background has mean 0 and std 1. Each image in the stack is corrected
            separately.

            :param radius: radius for normalization (default is half the side of image)

            Example:
            normalized_stack = cryo_normalize_background(stack,55);
        """
        validate_square_projections(stack)
        num_images = stack.shape[0]  # assuming C-contiguous array
        side = stack.shape[1]

        if radius is None:
            radius = np.floor(side / 2)

        # find indices of backgruond pixels in the images
        ctr = (side + 1) / 2
        x_axis, y_axis = np.meshgrid(range(1, side + 1), range(1, side + 1))
        radiisq = (x_axis.flatten() - ctr)**2 + (y_axis.flatten() - ctr)**2
        background_pixels_idx = radiisq > radius * radius

        if AspireConfig.verbosity == 1:
            pb = ProgressBar(total=100,
                             prefix='normalizing background',
                             suffix='completed',
                             decimals=0,
                             length=100,
                             fill='%')

        else:
            pb = None

        normalized_stack = np.ones(stack.shape)
        for i in range(num_images):
            if pb:
                pb.print_progress_bar((i + 1) / num_images * 100)

            proj = stack[i, :, :]
            background_pixels = proj.flatten() * background_pixels_idx
            background_pixels = background_pixels[background_pixels != 0]

            # compute mean and standard deviation of background pixels
            proj_mean = np.mean(background_pixels)
            std = np.std(background_pixels, ddof=1)

            # normalize the projections
            if std < 1.0e-5:
                logger.warning(
                    f'Variance of background of image {i} is too small (std={std}). '
                    'Cannot normalize!')

            normalized_stack[i, :, :] = (proj - proj_mean) / std

        return normalized_stack
Beispiel #23
0
def percentage(now, total):
    value = (now * 100) / total
    pb = ProgressBar(total=100,
                     prefix='Here',
                     decimals=2,
                     length=50,
                     fill='#',
                     zfill='-')
    pb.print_progress_bar(value)
Beispiel #24
0
 def analyzeAllDomains(self):
     pb = ProgressBar(total=int(len(self.domains)-1),prefix='Domain analysis in progress', suffix='', decimals=3, length=50, fill='X', zfill='-')
     data = self.getFrames()
     for idx,domain in enumerate(self.getDomains()):
         topic = data[domain]
         arr = topic.to_numpy()
         self.analyzeByDomain(domain, arr)
         self.calculateGlobalData(arr)
         pb.print_progress_bar(idx)
     self.serializeResultsDict()
Beispiel #25
0
def PrintStatus(SyosetuInfo, AllList, Status):
    prgsbar = ProgressBar(total=AllList,
                          prefix="ダウンロード : ",
                          suffix=" ({}/{})\n".format(Status, AllList),
                          decimals=2,
                          length=50,
                          fill="#",
                          zfill=" ")
    ClearWindow()
    print(SyosetuInfo)
    prgsbar.print_progress_bar(Status)
Beispiel #26
0
def save_train_and_test_data(fnames, fnamesPaste, imagesPaths, dict_labels):

    # src_folder = 'data/DeepLearningFilesPosAug'
    # src_folder = '/home/ml/datasets/DeepLearningFiles'

    num_samples = len(fnames)
    # print 'num_samples: ', num_samples #1903

    train_split = 0.8
    num_train = int(round(num_samples * train_split))
    # print 'num_train: ', num_train #1522

    train_indexes = random.sample(range(num_samples), num_train)
    # print 'train_indexes: ', train_indexes

    pb = ProgressBar(total=100,
                     prefix='Save train and test data',
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='=')

    for i in range(num_samples):
        fname = fnames[i]
        fnamePaste = fnamesPaste[i]
        image_path = imagesPaths[i]
        # print 'fname: ', fname
        # print 'fnamePaste', fnamePaste

        # src_path = os.path.join(src_folder, fnamePaste)

        #Deixa as imagens originais no mesmo tamanho
        im_data = load_image_data(image_path)

        # src_image = cv.imread(im_data)
        # print 'src_path: ', src_path
        # print 'src_image: ', src_image

        pb.print_progress_bar((i + 1) * 100 / num_samples)

        if i in train_indexes:
            dst_folder = 'data/pre_train'
        else:
            dst_folder = 'data/test'

        label = dict_labels[fnamePaste[:4]]
        # print 'label: ', label

        dst_path = os.path.join(dst_folder, label)
        if not os.path.exists(dst_path):
            os.makedirs(dst_path)
        dst_path = os.path.join(dst_path, fname)

        cv.imwrite(dst_path, im_data)
Beispiel #27
0
 def __init__(self, port):
     self.ip = ""
     self.port = port
     self.s = socket.socket()
     self.pb = ProgressBar(total=100,
                           prefix='',
                           suffix='Now',
                           decimals=3,
                           length=50,
                           fill='X',
                           zfill='-')
Beispiel #28
0
def test(test_data, labels):
    # with open(conf.readValue("lstm_model_path"), "rb") as file:
    #     model = pickle.load(file)
    model = PolarityLSTM(embedding_dim, vocab_size, hidden_dim, output_size,
                         n_layers)
    model.load_state_dict(torch.load(conf.readValue("lstm_model_path")))

    if ("-gpu" in sys.argv):
        model.cuda(device)

    pb = ProgressBar(total=int(len(test_data['embedding']) - 1 / batch_size),
                     prefix='Training in progress',
                     suffix='',
                     decimals=3,
                     length=50,
                     fill='X',
                     zfill='-')

    test_generator = DataSampler(seq_tensor, seq_lengths, labels, batch_size)
    model.eval()
    evaluator = Evaluator()

    outputs = []
    labels = []
    counter = 0

    LOGGER.debug("Evaluation in progress")

    for subset_input_tensor, subset_input_lengths, subset_labels_tensor in iter(
            test_generator):
        pb.print_progress_bar(counter)

        subset_input_tensor = subset_input_tensor.to(device)
        subset_input_lengths = subset_input_lengths.to(device)
        subset_labels_tensor = subset_labels_tensor.to(device)

        if ("-gpu" in sys.argv):
            model.lstm.flatten_parameters()

        try:
            output = model(subset_input_tensor, subset_input_lengths)
        except RuntimeError as ex:
            print(counter)
            print(ex)
            print(subset_input_tensor)
            print(subset_input_lengths)
            continue

        binary_output = (output >= 0.5).short()
        outputs.extend(binary_output.cpu().detach().numpy())
        labels.extend(subset_labels_tensor.cpu().detach().numpy())
        counter += 1

    return evaluator.evaluate(labels, outputs)
def import_data(data_directory, size=(50, 50), max_samples=0):
    # Define X is features, y is label.
    X = []
    y = []

    # Define data-set directory.
    dir_labels = data_directory

    # We use the folder name as a label name.
    labels = os.listdir(dir_labels)

    # Search all folder in data-set directory.
    for i, label in enumerate(labels):

        # Get all image name in each folder.
        dir_images = dir_labels + os.sep + label
        image_names = os.listdir(dir_images)

        # Create a new progress bar.
        progress_bar = ProgressBar(total=min(len(image_names), max_samples),
                                   prefix="Label(%s):" % label,
                                   suffix="%d/%d" % (i + 1, len(labels)),
                                   length=50)

        # Search all image in each folder.
        n = 0
        for j, image_name in enumerate(image_names):

            # Read image from file.
            filename = dir_images + os.sep + image_name
            image = Image.imread(filename)
            image = cv2.resize(image, size)

            # Add into X, y.
            X.append(image)
            y.append(i)

            # Update progress bar.
            progress_bar.print_progress_bar(j + 1)

            n += 1
            if n >= max_samples:
                break

    # Shuffle data.
    data = list(zip(X, y))
    random.shuffle(data)
    X, y = zip(*data)

    # Final X, y.
    X = np.array(X)
    y = np.array(y)

    return X, y, labels
def progressbar(prefix: str, m_track: str, wait: int):
    pb = ProgressBar(total=100,
                     prefix=prefix,
                     suffix=m_track,
                     decimals=0,
                     length=10,
                     fill='|',
                     zfill=' '
                     )
    for i in range(1, wait + 1):
        time.sleep(1)
        pb.print_progress_bar(int(100 * i // wait))