def _eraseSectors(self, offset, length, tries=3):
        """Clears one or more sectors"""
        assert offset % self.sector_size == 0
        sectors_offset = offset // self.sector_size

        assert length % self.sector_size == 0
        sector_count = length // self.sector_size

        with progress.Bar(expected_size=sector_count) as bar:
            for sector in range(sector_count):
                sector_index = sectors_offset + sector

                bar.show(sector)

                # Erase sector up to 'tries' times
                for _ in range(tries):
                    if self._eraseSector(sector_index):
                        break
                else:  # No erase was successful
                    logError('Could not erase sector %08x' % sector_index)
                    return False

            bar.show(sector_count)

        return True
Exemple #2
0
    def copyAllImages(self, origin, v=False):
        if self.closed == True:
            print(' [!] This environment is close')
            return

        self.imagesPath = os.path.join(self.name, 'images')
        os.makedirs(self.imagesPath)

        self.imagesList, n = selectImages(origin, v=v)

        if v == True:
            print(" [x] Copying {} images from {}".format(n, origin))

        if v == True:
            with progress.Bar(label="    [o] Copying ...",
                              expected_size=len(self.imagesList)) as bar:
                val = 0
                for image in self.imagesList:
                    scopy(image, self.imagesPath)
                    val += 1
                    bar.show(val)
        else:
            for image in imagesList:
                scopy(image, images_dir)

        self.originImagesList = copy.copy(self.imagesList)
        self.imagesList, n = selectImages(self.imagesPath)
Exemple #3
0
def siftExtraction(imagesList, v=False):

	sift = cv2.xfeatures2d.SIFT_create()

	xdes = []
	xkpt = []

	if v == True:
		with progress.Bar(label=" [x] Sift extraction ...", expected_size=len(imagesList)) as bar:
			val = 0
			for i in imagesList:
				img = cv2.imread(i)
				gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
				kpt, des = sift.detectAndCompute(gray, None)
				xdes.append(des)
				xkpt.append([k.pt for k in kpt])
				val += 1
				bar.show(val)
	else:
		for i in imagesList:
			img = cv2.imread(i)
			gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
			kpt, des = sift.detectAndCompute(gray, None)
			xdes.append(des)
			xkpt.append([k.pt for k in kpt])

	return np.array(xdes), np.array(xkpt)
Exemple #4
0
    def download(self, record, path):
        assert record['type'] == FILE, 'Not a file'

        file_name = path
        assert record['blob'], 'No blob data for {}'.format(file_name)

        length = record['blob']['length']
        url = DOWNLOAD.format(**record['blob'])
        result = self.get(url=url, stream=True)
        assert result.ok, 'Problem loading file {}'.format(file_name)

        total_length = length
        result.raw.decode_content = True
        it = result.iter_content(chunk_size=1024)
        label = "{:20.20}".format(file_name)
        count = total_length // 1024 + 1
        bar = progress.Bar(label=label,
                           width=32,
                           empty_char='.',
                           filled_char='>',
                           expected_size=count,
                           every=3)
        with open(file_name, 'wb') as f:
            with bar:
                for i, item in enumerate(it):
                    bar.show(i + 1)
                    if item:
                        f.write(item)
                        f.flush()
Exemple #5
0
def progress_bar(label, size=10):
    with progress.Bar(label=colored.yellow(label), expected_size=size) as bar:
        last_val = 0
        for val in range(size + 1):
            time.sleep(0.1 * (val - last_val))
            bar.show(val)
            last_val = val
Exemple #6
0
def get_profile(args):

    # getting the latest archive from the server
    if TASK_CLUSTER:
        url = TC_LINK % args.scenarii
        basename = "today-%s.tgz" % args.scenarii
    else:
        basename = "%s-latest.tar.gz" % args.scenarii
        url = args.archives_server + "/%s" % basename
    exists, __ = check_exists(url)

    if not exists:
        return None

    target = os.path.join(args.archives_dir, basename)
    archive = download_file(url, target=target, check_file=False)
    with tarfile.open(archive, "r:gz") as tar:
        logger.msg("Checking the tarball content...")
        size = len(list(tar))
        with progress.Bar(expected_size=size) as bar:

            def _extract(self, *args, **kw):
                if not TASK_CLUSTER:
                    bar.show(bar.last_progress + 1)
                try:
                    return self.old(*args, **kw)
                finally:
                    pass

            tar.old = tar.extract
            tar.extract = functools.partial(_extract, tar)
            tar.extractall(args.profile)

    return args.profile
Exemple #7
0
    def _create_archive(self, when, iterator=None):
        if iterator is None:

            def _files(tar):
                files = glob.glob(os.path.join(self.profile_dir, "*"))
                yield len(files)
                for filename in files:
                    try:
                        tar.add(filename, os.path.basename(filename))
                        yield filename
                    except FileNotFoundError:
                        # locks and such
                        pass

            iterator = _files

        if isinstance(when, str):
            archive = when
        else:
            archive, __ = self._get_archive_path(when)

        with tarfile.open(archive, "w:gz", dereference=True) as tar:
            it = iterator(tar)
            size = next(it)
            with progress.Bar(expected_size=size) as bar:
                for filename in it:
                    if not TASK_CLUSTER:
                        bar.show(bar.last_progress + 1)

        self._checksum(archive)
        return archive
Exemple #8
0
def processing_pic(operation, params, pic_list):
    """
    Apply an operation on all the pictures in the 'processed' directory
    according to the parameters retrieved from the command line

    Arguments:
    ----------
    operation: str
        Which operation to perform ('Resizing', 'Rotating')
    params: Namespace object
        Parameters as retrieved from :py:func:`get_params`
    pic_list: list
        List of all the pictures to resize (file names)
    """
    puts(colored.cyan(operation + ' pictures'))
    op_bar = progress.Bar(label=operation, expected_size=len(pic_list))
    # Open and transform all pictures
    for i, pic in enumerate(pic_list):
        with Image(filename=pic) as img:
            if operation == 'Resizing':
                img.resize(params.width, params.height)
            elif operation == 'Rotating':
                img.rotate(params.rotation)
            img.save(filename=pic)
            op_bar.show(i + 1)
    puts('\n')
Exemple #9
0
 def _deliver_data(self, url, cookies, progress_bar):
     headers = self.getDefaultHeaders()
     if not progress_bar:
         return requests.get(url,
                             headers=headers,
                             cookies=cookies,
                             verify=ssl_verify,
                             timeout=60,
                             proxies=self.proxies).content
     response_content = bytes()
     response = requests.get(url,
                             headers=headers,
                             cookies=cookies,
                             verify=ssl_verify,
                             stream=True,
                             timeout=60,
                             proxies=self.proxies)
     total_length = int(response.headers.get('content-length'))
     chunk_size = 32 * (1 << 10)  # 32 KB
     bar = progress.Bar(expected_size=(total_length >> 10))
     for index, chunk in enumerate(
             response.iter_content(chunk_size=chunk_size)):
         response_content += chunk
         bar.show(index * chunk_size >> 10)
     bar.done()
     return response_content
def _get_players_from_api(premier_league_only=True, add_team_names=True):
    all_cutdown_player_data = FantasyPremierLeagueApi.get_cutdown_player_data()
    players = []
    with progress.Bar(label="Loading Player Data: ",
                      expected_size=len(all_cutdown_player_data)) as bar:
        for count, cutdown_player_data in enumerate(all_cutdown_player_data):
            bar.show(count + 1)
            if PlayerCache.in_cache(cutdown_player_data['id']):
                player_data = PlayerCache.get_full_player_data(
                    cutdown_player_data)
            else:
                player_data = FantasyPremierLeagueApi.get_full_player_data(
                    cutdown_player_data)
                PlayerCache.save_full_player_data(player_data['id'],
                                                  player_data)
            players.append(Player(player_data))

    team_names = FantasyPremierLeagueApi.get_team_name_mapping()

    if premier_league_only:
        players = [p for p in players if p.team_code in team_names.keys()]

    if add_team_names:
        for player in players:
            player.team = team_names[player.team_code]

    return players
Exemple #11
0
def main():
    last_host_list = []

    while True:
        new_host_list = []

        all_peers = get_peer_list(last_host_list)
        with progress.Bar(label="Fetching data",
                          expected_size=len(all_peers)) as bar:
            for i, (host, port) in enumerate(get_peer_list(last_host_list)):
                height = get_peer_height(host, port)
                new_host_list.append((host, port, height))
                bar.show(i)

        new_host_list = sorted(new_host_list,
                               key=lambda t: -t[-1]
                               if t[-1] is not None else 0)

        last_host_list = new_host_list

        for (host, port, height) in new_host_list:

            reader = geolite2.reader()
            match = reader.get(host2ip(host))
            if match is not None:
                if 'country' in match:
                    country = match['country']['iso_code']
                elif 'continent' in match:
                    country = match['continent']['code']
            else:
                country = "??"

            print("%3s %30s %6s %7s" %
                  (country, host, port,
                   height if height is not None else "??????"))
Exemple #12
0
def download(url=None, dest=None):
    if url is None:
        print "Error: Missing url."
        return

    if dest is None:
        dest = url.split('/')[-1]

    f = open(dest, 'wb')
    u = urllib2.urlopen(url)
    meta = u.info()

    total_length = int(meta.getheaders("Content-Length")[0])
    chunk = 8000
    toolbar_width = int(math.ceil(total_length / (chunk - 1)))

    count = 0
    with progress.Bar(expected_size=toolbar_width) as bar:
        while True:
            buffer = u.read(chunk)
            if not buffer:
                break
            count += 1
            bar.show(count)
            # Write to file
            f.write(buffer)

    sys.stdout.write("\n")
    f.close()
Exemple #13
0
 def __init__(self, buf):
     self._progress = 0
     self._len = len(buf)
     self._bar = None
     if self._len > 4096:
         self._bar = progress.Bar(filled_char='=', every=4096)
     BytesIO.__init__(self, buf)
Exemple #14
0
def get_protein_atlas_zip(zipname, clean=False, force=False):
    """Download a Kaggle competition data .zip archive named `zipname` and store it
    in its own directory whose name is the stem of `zipname`.

    Parameters:

        zipname : str

            name of the kaggle .zip file. ***UNDEFINED BEHAVIOR WHEN zipname HAS
            NO FILE EXTENSION***

        clean : bool, default False

            If true, delete the .zip file after expanding

        force : bool, default False

            If true, download and decompress the .zip file even if the directory
            ZIPFILE_TO_DESTIONATION[zipfile] already exists.

    Throws:

        subprocess.CalledProcessError:

            Subprocess call to kaggle returned with nonzero exit status.

        KeyError :

            PATH lookup failed.

    """
    outputdir_name = Path(zipname).stem
    outputdir = PATH[outputdir_name]
    if outputdir.exists() and not force:
        print("{}: Skipping, output directory {} already exists".format(
            zipname, str(outputdir)))
    else:
        download_cmd = [
            "kaggle", "competitions", "download",
            "human-protein-atlas-image-classification", "-f", zipname, "-p",
            str(PATH[zipname].parent)
        ]
        print(" ".join(download_cmd))
        subprocess.run(download_cmd, check=True)

        with ZipFile(PATH[zipname]) as f:
            outputdir = PATH[zipname]
            if not outputdir.exists():
                outputdir.mkdir(parents=True, exist_ok=True)
            members = f.infolist()
            label = "unzipping {} ".format(zipname)
            with progress.Bar(label=label, expected_size=len(members)) as bar:
                for i, member in enumerate(members):
                    if not outputdir.joinpath(member.filename).exists():
                        f.extract(member, path=outputdir)
                    bar.show(i)

        if clean:
            PATH[zipname].unlink()
 def __init__(self, data: memoryview, logger: logging.Logger):
     self._file = io.BytesIO(data)
     self._size = len(data)
     self._enabled = logger.isEnabledFor(logging.INFO)
     if self._enabled:
         self._progress = progress.Bar(expected_size=self._size)
     else:
         logger.debug("Progress indication is not enabled")
Exemple #16
0
async def build_profile(args):
    scenarii = scenario[args.scenarii]

    # getting the latest archive from the server
    if TASK_CLUSTER:
        url = TC_LINK % args.scenarii
        basename = 'today-%s.tgz' % args.scenarii
    else:
        basename = '%s-latest.tar.gz' % args.scenarii
        url = args.archives_server + '/%s' % basename

    exists, headers = check_exists(url)
    metadata = {}

    if exists:
        target = os.path.join(args.archives_dir, basename)
        archive = download_file(url, target=target, check_file=False)
        with tarfile.open(archive, "r:gz") as tar:
            logger.msg("Checking the tarball content...")
            size = len(list(tar))
            with progress.Bar(expected_size=size) as bar:
                def _extract(self, *args, **kw):
                    if not TASK_CLUSTER:
                        bar.show(bar.last_progress + 1)
                    try:
                        return self.old(*args, **kw)
                    finally:
                        pass
                        # if args[0].name == ".hp.json":
                        #   import pdb; pdb.set_trace()

                tar.old = tar.extract
                tar.extract = functools.partial(_extract, tar)
                tar.extractall(args.profile)

    logger.msg("Updating profile located at %r" % args.profile)

    f_args = ["-profile", args.profile]
    if platform.system() != 'Darwin':
        f_args.append('-headless')

    caps = {"moz:firefoxOptions": {"args": f_args}}
    if args.firefox is not None:
        caps['moz:firefoxOptions']['binary'] = args.firefox

    logger.msg("Starting the Fox...")
    with open('gecko.log', 'a+') as glog:
        async with get_session(CustomGeckodriver(log_file=glog),
                               Firefox(**caps)) as session:
            metadata = await scenarii(session, args)

    # writing metadata
    logger.msg("Creating metadata...")
    metadata['name'] = args.scenarii
    with open(os.path.join(args.profile, '.hp.json'), 'w') as f:
        f.write(json.dumps(metadata))

    logger.msg("Done.")
    def process(self, item):
        #print '%s is sleeping' % item

        bar = progress.Bar(label=item, expected_size=10)

        for i in range(11):
            time.sleep(random() * 1.)
            bar.show(i)

        #print 'wakeup %s!' % item
        return True
Exemple #18
0
 def update(self, op_code, cur_count, max_count=None, message=''):
     is_begin = op_code & git.RemoteProgress.BEGIN != 0
     # is_end=op_code & git.RemoteProgress.END != 0
     if is_begin:
         if self.bar is None:
             self.bar = progress.Bar(label="cloning {} repository".format(
                 self.repo_name),
                                     expected_size=max_count)
     else:
         if op_code != cur_count:
             self.bar.show(cur_count)  # update progress bar
def main(_):
    # create global configuration object
    model_config = Configuration(FLAGS.config)
    model = create_model(FLAGS, model_config)
    placeholders = {
        'l': tf.placeholder(tf.float32, (1, None, None, 3)),
        'r': tf.placeholder(tf.float32, (1, None, None, 3)),
        'd': tf.placeholder(tf.float32, (1, None, None, 1)),
    }
    x = {
        'l': tf.placeholder(tf.float32, (1, None, None, 3)),
        'r': tf.placeholder(tf.float32, (1, None, None, 3)),
        'd': tf.placeholder(tf.float32, (1, None, None, 1)),
    }
    p = namedtuple('Placeholders', placeholders.keys())(**placeholders)
    px = namedtuple('Placeholders', x.keys())(**x)
    model.build(px, True, None)
    model.build(p, False, True)
    session = tf.Session()
    saver = tf.train.Saver()
    # init variables
    session.run(tf.local_variables_initializer())
    session.run(tf.global_variables_initializer())
    # restore model if provided a checkpoint
    if model_config.checkpoint is not None:
        print("Restoring model from {}".format(model_config.checkpoint))
        saver.restore(session, model_config.checkpoint)
    # init dataset
    paths = get_paths_for_dataset(FLAGS.dataset)
    if 'test' not in paths:
        paths = {
            'train': [],
            'train_valid': [],
            'valid': [],
            'test': paths,
        }
    dataset = Dataset(get_example_class(FLAGS.dataset), paths, FLAGS.dataset)
    fd = lambda x: {p.l: x.left, p.r: x.right}
    reconstructions = os.path.join(model_config.directory, 'submission')
    os.makedirs(reconstructions, exist_ok=True)
    for generator in dataset:
        rec_dir = os.path.join(reconstructions, generator.name)
        os.makedirs(rec_dir, exist_ok=True)
        with progress.Bar(label=generator.name,
                          expected_size=generator.length) as bar:
            for i, example in enumerate(generator.examples):
                bar.show(i)
                if FLAGS.strided:
                    d = predict_strided(session, model, example, p)
                else:
                    d = session.run(model.outputs[p], fd(example)).squeeze()
                store_disparity(
                    d, os.path.join(rec_dir, '{}.png'.format(example.name)))
    def _writeSectors(self, offset, data, tries=3):
        """Write one or more sectors with data

        This method clears the sectors before writing to them and checks
        for valid data via reading each page and comparing the checksum.
        """
        assert offset % self.sector_size == 0
        pages_offset = offset // self.page_size
        sectors_offset = offset // self.sector_size

        assert len(data) % self.sector_size == 0
        page_count = len(data) // self.page_size
        sector_count = len(data) // self.sector_size

        with progress.Bar(expected_size=page_count) as bar:
            sector_write_attempt = 0
            sector = 0
            while sector < sector_count:
                sector_index = sectors_offset + sector

                bar.show(sector * self.pages_per_sector)

                # Erase sector up to 'tries' times
                for _ in range(tries):
                    if self._eraseSector(sector_index):
                        break
                else:  # No erase was successful
                    logError('Could not erase sector 0x%08x' % sector_index)
                    return False

                for page in range(self.pages_per_sector):
                    page_data_index = sector * self.pages_per_sector + page
                    data_index = page_data_index * self.page_size
                    page_index = pages_offset + page_data_index

                    if self._writePage(
                            page_index,
                            data[data_index:data_index + self.page_size]):
                        bar.show(page_data_index + 1)
                        continue

                    sector_write_attempt += 1
                    if sector_write_attempt < tries:
                        break  # Retry sector

                    logError('Could not write page 0x%08x' % page_index)
                    return False

                else:  # All pages written normally -> next sector
                    sector += 1

        return True
Exemple #21
0
def create_callback(encoder_len):
    total_len = encoder_len
    bar = progress.Bar(expected_size=(total_len / 1024) + 1)

    def my_callback(monitor):
        # Your callback function
        if monitor == -1:
            bar.done()
        else:
            bar.show(monitor.bytes_read / 1024 + 1)
        #print str(monitor.bytes_read/1024)+" "+str(total_len/1024+1)

    return my_callback
    def verifyWithFile(self,
                       filename,
                       flash_offset=0,
                       file_offset=0,
                       length=DEFAULT_FLASH_SIZE):
        """Verify the flash content by checking against the file

        This method only uses checksums to verify the data integrity.
        """
        if length % self.page_size != 0:
            logError('length must be a multiple of the page size %d' %
                     self.page_size)
            return False

        if flash_offset % self.page_size != 0:
            logError('flash_offset must be a multiple of the page size %d' %
                     self.page_size)
            return False

        page_count = length // self.page_size
        pages_offset = flash_offset // self.page_size

        try:
            with open(filename, 'rb') as file:
                file.seek(file_offset)

                with progress.Bar(expected_size=page_count) as bar:
                    for page in range(page_count):
                        bar.show(page)

                        data = file.read(self.page_size)

                        page_index = pages_offset + page
                        crc = self._loadPageMultiple(page_index)
                        if crc is None:
                            logError('Could not read page 0x%08x' % page_index)
                            return True

                        if crc == binascii.crc32(data):
                            logOk('Page 0x%08x OK' % page_index)
                        else:
                            logError('Page 0x%08x invalid' % page_index)

                    bar.show(page_count)

            logOk('Done')
            return True
        except IOError:
            logError('Could not write to file \'%s\'' % filename)
            return True
Exemple #23
0
def homographyFilterPairs(kps, matches, v=False):

	n, m = matches.shape

	if v == True:
		with progress.Bar(label=" [x] Filtering matches ...", expected_size=n * m) as bar:
			val = 0
			for i in range(n):
				for j in range(m):
					nmatch = homographyFilter(kps[i], kps[j], matches[i, j])
					matches[i, j] = nmatch
					val += 1
					bar.show(val)
	else:
		for i in range(n):
			for j in range(m):
				nmatch = homographyFilter(kps[i], kps[j], matches[i, j])
				matches[i, j] = nmatch
Exemple #24
0
def RequestQueueProgress(request_id):
    request_details = clc.Queue.GetStatus(request_id, silent=True)
    p = progress.Bar(label="%s  " % (request_details['RequestTitle']),
                     expected_size=100)
    while True:
        p.show(request_details['PercentComplete'])
        if request_details['CurrentStatus'] in ('Succeeded', 'Failed'): break
        time.sleep(2)
        request_details = clc.v1.Queue.GetStatus(request_id, silent=True)
    p.done()
    if request_details['CurrentStatus'] == 'Succeeded':
        Status(
            'SUCCESS', 1, "%s - %s" %
            (request_details['RequestTitle'], request_details['ProgressDesc']))
    elif request_details['CurrentStatus'] == 'Failed':
        Status(
            'ERROR', 3, "%s - %s" %
            (request_details['RequestTitle'], request_details['ProgressDesc']))
Exemple #25
0
    def removeImages(self, v=False):
        if self.closed == True:
            print(' [!] This environment is close')
            return

        if v == True:
            print(" [x] Removing test environment images")

        if v == True:
            with progress.Bar(label="    [o] Removing ...",
                              expected_size=len(self.imagesList)) as bar:
                val = 0
                for image in self.imagesList:
                    os.remove(image)
                    val += 1
                    bar.show(val)
        else:
            for image in self.imagesList:
                os.remove(image)
Exemple #26
0
    def __init__(self, ranges):
        super(ProgressHandle, self).__init__()

        # Get number of frames to compute
        nbFramesToCompute = 0
        for timeRange in ranges:
            nbFramesToCompute += (timeRange._end - timeRange._begin +
                                  1) / timeRange._step
        expectedSize = (nbFramesToCompute
                        if nbFramesToCompute < samUtils.getMaxInt() else 1)

        # Create progress bar
        if clintVersion >= '0.3.5':
            self._progress = progress.Bar(expected_size=expectedSize)
            self._counter = 1
        else:
            self._it = list(range(0, expectedSize + 1))
            self._progress = progress.bar(self._it, expected_size=expectedSize)
            self._progress.next()
Exemple #27
0
    def delivery(self, packageName, versionCode,
                 offerType=1, downloadToken=None, progress_bar=False):
        """Download an already purchased app.

        packageName is the app unique ID (usually starting with 'com.').

        versionCode can be grabbed by using the details() method on the given
        app."""
        path = "delivery"
        params = {'ot': str(offerType),
                  'doc': packageName,
                  'vc': str(versionCode)}
        headers = self.getDefaultHeaders()
        if downloadToken is not None:
            params['dtok'] = downloadToken
        url = "https://android.clients.google.com/fdfe/%s" % path
        response = requests.get(url, headers=headers,
                                params=params, verify=ssl_verify)
        resObj = googleplay_pb2.ResponseWrapper.FromString(response.content)
        if resObj.commands.displayErrorMessage != "":
            raise RequestError(resObj.commands.displayErrorMessage)
        elif resObj.payload.deliveryResponse.appDeliveryData.downloadUrl == "":
            raise RequestError('App not purchased')
        else:
            downloadUrl = resObj.payload.deliveryResponse.appDeliveryData.downloadUrl
            cookie = resObj.payload.deliveryResponse.appDeliveryData.downloadAuthCookie[0]
            cookies = {
                str(cookie.name): str(cookie.value)
            }
            if not progress_bar:
                return requests.get(downloadUrl, headers=headers,
                                    cookies=cookies, verify=ssl_verify).content

            response_content = bytes()
            response = requests.get(downloadUrl, headers=headers, cookies=cookies, verify=ssl_verify, stream=True)
            total_length = int(response.headers.get('content-length'))
            chunk_size = 32 * (1<<10)  # 32 KB
            bar = progress.Bar(expected_size=(total_length >> 10))
            for index, chunk in enumerate(response.iter_content(chunk_size=chunk_size)):
                response_content += chunk
                bar.show(index * chunk_size >> 10)
            bar.done()
            return response_content
Exemple #28
0
    def copyAllResizedImages(self, origin, y, v=False):
        if self.closed == True:
            print(' [!] This environment is close')
            return

        self.imagesPath = os.path.join(self.name, 'images')
        os.makedirs(self.imagesPath)

        self.pairsPath = os.path.join(self.name, 'pairs')
        os.makedirs(self.pairsPath)

        self.imagesList, n = selectImages(origin, v=v)

        if v == True:
            print(" [x] Copying {} images from {}".format(n, origin))

        if v == True:
            with progress.Bar(label="    [o] Copying ...",
                              expected_size=len(self.imagesList)) as bar:
                val = 0
                for image in self.imagesList:
                    img = cv2.imread(image, 0)
                    res = cv2.resize(img, (614, 1024),
                                     interpolation=cv2.INTER_CUBIC)
                    equ = cv2.equalizeHist(res)
                    cv2.imwrite(
                        os.path.join(self.imagesPath, os.path.basename(image)),
                        equ)
                    val += 1
                    bar.show(val)
        else:
            for image in imagesList:
                img = cv2.imread(image, 0)
                res = cv2.resize(img, (614, 1024),
                                 interpolation=cv2.INTER_CUBIC)
                equ = cv2.equalizeHist(res)
                cv2.imwrite(
                    os.path.join(self.imagesPath, os.path.basename(image)),
                    equ)

        self.originImagesList = copy.copy(self.imagesList)
        self.imagesList, n = selectImages(self.imagesPath)
Exemple #29
0
def RequestBlueprintProgress(request_id, location, alias, quiet=False):
    time_start = time.time()
    time_task_start = time_start
    request_details = clc.v1.Blueprint.GetStatus(request_id,
                                                 location,
                                                 alias,
                                                 silent=True)
    description = request_details['Description']
    Status('SUCCESS', 1, request_details['Description'])
    if not quiet: p = progress.Bar(expected_size=100)
    while True:
        if description != request_details['Description']:
            description = request_details['Description']
            if not quiet: sys.stdout.write("\033[K")  # clear line
            Status(
                'SUCCESS', 1,
                "%s - %s" % (request_details['Description'],
                             sec_to_time(int(time.time() - time_task_start))))
            time_task_start = time.time()
        if not quiet: p.show(request_details['PercentComplete'])
        if request_details['CurrentStatus'] in ('Succeeded', 'Failed'): break
        time.sleep(2)
        request_details = clc.v1.Blueprint.GetStatus(request_id,
                                                     location,
                                                     alias,
                                                     silent=True)
    #p.done()
    if not quiet: sys.stdout.write("\033[K")  # clear line
    duration_secs = int(time.time() - time_start)
    if request_details['CurrentStatus'] == 'Succeeded':
        Status(
            'SUCCESS', 1, "%s - %s" %
            (request_details['Description'], sec_to_time(duration_secs)))
    elif request_details['CurrentStatus'] == 'Failed':
        Status(
            'ERROR', 3, "%s - %s" %
            (request_details['Description'], sec_to_time(duration_secs)))

    servers = []
    for server in request_details['Servers']:
        servers.append({'Server': server})
    return (servers)
Exemple #30
0
def side_bars(params, pic_list):
    """
    Add black side bars to vertical pictures

    Arguments:
    ----------
    params: Namespace object
        Parameters as retrieved from :py:func:`get_params`
    picList: list
        List of all the pictures to resize (file names)
    """
    puts(colored.cyan('Adding side bars'))
    sb_bar = progress.Bar(label='Sidebars', expected_size=len(pic_list))
    side_bars_width = int((params.tot_width - params.width) / 2)
    # Open and transform all pictures
    for i, pic in enumerate(pic_list):
        with Image(filename=pic) as img:
            img.border(Color('black'), side_bars_width, 0)
            img.save(filename=pic)
            sb_bar.show(i + 1)
    puts('\n')