コード例 #1
0
ファイル: commands.py プロジェクト: Web5design/warehouse
def synchronize_by_projects(projects=None, fetcher=None, progress=True,
        download=None):
    if fetcher is None:
        fetcher = PyPIFetcher()

    # Grab the current datetime
    current = fetcher.current()

    # Synchronize all the classifiers with PyPI
    synchronize_classifiers(fetcher)

    if not projects:
        # Grab a list of projects from PyPI
        projects = fetcher.projects()

        # We are not synchronizing a subset of projects, so we can check for
        #   any deletions (if required) and yank them.
        diff.projects(projects)

        # Commit our yanked projects
        db.session.commit()

    if progress:
        bar = ShadyBar("Processing Projects", max=len(projects))
    else:
        bar = DummyBar()

    for project in bar.iter(projects):
        synchronize_project(project, fetcher, download=download)

    logger.info("Finished processing projects at %s", current)

    return current
コード例 #2
0
ファイル: umm.py プロジェクト: todun/vimana
def download_model(name, url):
    """
    Downloads large model file 
    returns the hash of the newly downloded model 
    and location of the model in temp folder
        :param url: string of url location of the model
        :param name: string name of model 
    """
    # https://stackoverflow.com/questions/16694907/download-large-file-in-python-with-requests
    local_filename = url.split('/')[-1]

    local_filename = TEMP_LOCATION + local_filename

    full_hash = sha3_256()
    with requests.get(url, stream=True) as r:
        size = r.headers['content-length']
        if size:
            p = ShadyBar(local_filename, max=int(size))
        else:
            p = Spinner(local_filename)

        with open(local_filename, 'wb') as f:
            for chunk in r.iter_content(chunk_size=8192):
                if chunk:  # filter out keep-alive new chunks
                    p.next(len(chunk))
                    f.write(chunk)
                    full_hash.update(chunk)
                    # f.flush()

    unique_filename = MODEL_LOCATION + name + ".h5"
    os.rename(local_filename, unique_filename)

    return full_hash.hexdigest(), unique_filename
コード例 #3
0
ファイル: commands.py プロジェクト: jjl/warehouse
def synchronize_by_projects(projects=None,
                            fetcher=None,
                            progress=True,
                            download=None):
    if fetcher is None:
        fetcher = PyPIFetcher()

    # Grab the current datetime
    current = fetcher.current()

    # Synchronize all the classifiers with PyPI
    synchronize_classifiers(fetcher)

    if not projects:
        # Grab a list of projects from PyPI
        projects = fetcher.projects()

        # We are not synchronizing a subset of projects, so we can check for
        #   any deletions (if required) and yank them.
        diff.projects(projects)

        # Commit our yanked projects
        db.session.commit()

    if progress:
        bar = ShadyBar("Processing Projects", max=len(projects))
    else:
        bar = DummyBar()

    for project in bar.iter(projects):
        synchronize_project(project, fetcher, download=download)

    logger.info("Finished processing projects at %s", current)

    return current
コード例 #4
0
 def _update_with_es(self):
     kw={
         'index': self.es_index,
         'doc_type': self.query_type,
         'scroll': '1m',
         'search_type': 'scan',
         'size': self.bulk_size
     }
     scroll = self.es.search(**kw)
     sid = scroll['_scroll_id']
     total_size = scroll['hits']['total']
     hits_size = total_size
     dealt_size = 0
     print("docs: " + str(total_size))
     suffix = '%(percent)d%% - %(index)d [%(elapsed_td)s / %(eta_td)s]'
     bar = ShadyBar("clone",suffix=suffix,max=total_size)
     while (hits_size>0):
         scroll = self.es.scroll(scroll_id=sid,scroll='1m')
         sid = scroll['_scroll_id']
         hits = scroll['hits']['hits']
         hits_size = len(hits)
         # todo
         if (hits_size>0):
             res = self._bulk_es_mongo(hits)
         #
         # dealt size
         dealt_size += hits_size
         bar.goto(dealt_size)
     # done
     print('\nDone !')
コード例 #5
0
    def main(self):
        print("Welcome to the coin flipping simulator!")
        # Infinite loop
        while True:
            # Catch exception if user enters letters or nothing
            try:
                times = input("How many coins do you want to flip?: ")
                # Check if user entered `exit` or `quit` before converting to int
                # (So exception wouldn't trigger)
                if times == "exit" or times == "quit":
                    print("Exiting...")
                    # Exit the loop which quits the program
                    break
                else:
                    times = int(times)
                if times <= 0:
                    raise ZeroDivisionError
                # Create progress bar for counting flips
                bar = Bar('Flipping {} coin(s)...'.format(times), max=times,
                          suffix='%(index)d/%(max)d - %(percent).1f%% - %(eta)ds')
                # Define vars for possible options

                heads, tails = 0, 0
                # Loop for the amount of times
                for x in range(times):
                    # Random flip
                    flip = randint(0, 1)
                    # Check results
                    if flip == 0:
                        heads += 1
                    else:
                        tails += 1
                    # Progress bar next
                    bar.next()
                # Finish the progress bar
                bar.finish()
                # Output results
                table = PrettyTable(["Side", "Amount", "Percent"])
                # Define the rows
                rows = [["Heads", heads, f"%{round((heads / times * 100), 4)}"],
                        ["Tails", tails, f"%{round((tails / times * 100), 4)}"]]
                # Loop over the rows instead of calling `add_row()` multiple times
                # Allows for easy expandability
                for row in rows:
                    table.add_row(row)
                # Output the Table
                print(table)
                # Set the config vars for the table
                graphics.data = [heads, tails]
                graphics.labels = ["Heads", "Tails"]
                graphics.title = "Result of {} flipped coin(s)".format(times)
                # Display a vertical bar graph
                print("Generating graph...")
                graphics.plot_x()
            except ValueError:
                print("Please only enter numbers")
            except ZeroDivisionError:
                print("You must flip at least 1 coin")
コード例 #6
0
ファイル: __init__.py プロジェクト: Erethon/kamaki
class ProgressBarArgument(FlagArgument):
    """Manage a progress bar"""

    def __init__(self, help='', parsed_name='', default=True):
        self.suffix = '%(percent)d%%'
        super(ProgressBarArgument, self).__init__(help, parsed_name, default)

    def clone(self):
        """Get a modifiable copy of this bar"""
        newarg = ProgressBarArgument(
            self.help, self.parsed_name, self.default)
        newarg._value = self._value
        return newarg

    def get_generator(
            self, message, message_len=25, countdown=False, timeout=100):
        """Get a generator to handle progress of the bar (gen.next())"""
        if self.value:
            return None
        try:
            self.bar = KamakiProgressBar(
                message.ljust(message_len), max=timeout or 100)
        except NameError:
            self.value = None
            return self.value
        if countdown:
            bar_phases = list(self.bar.phases)
            self.bar.empty_fill, bar_phases[0] = bar_phases[-1], ''
            bar_phases.reverse()
            self.bar.phases = bar_phases
            self.bar.bar_prefix = ' '
            self.bar.bar_suffix = ' '
            self.bar.suffix = '%(remaining)ds to timeout'
        else:
            self.bar.suffix = '%(percent)d%% - %(eta)ds'
        self.bar.start()

        def progress_gen(n):
            for i in self.bar.iter(range(int(n))):
                yield
            yield
        return progress_gen

    def finish(self):
        """Stop progress bar, return terminal cursor to user"""
        if self.value:
            return
        mybar = getattr(self, 'bar', None)
        if mybar:
            mybar.finish()
コード例 #7
0
 def __init__(self, original_dir, blurred_dir, confidence_threshold,
              input_video):
     self.original_dir = original_dir
     self.blurred_dir = blurred_dir
     self.confidence_threshold = confidence_threshold
     self.input_video = input_video
     self.prepare_frames()
     self.n_frames = self.extract_stills()
     self.bar = ShadyBar("Blurring", max=self.n_frames)
     self.codec = cv2.VideoWriter_fourcc(*'MP4V')
     self.blur_movie(confidence_threshold)
     self.bar.finish()
     print("New video is in {} with {} frames".format(
         self.output_video, self.n_frames))
コード例 #8
0
ファイル: __init__.py プロジェクト: Erethon/kamaki
    def get_generator(
            self, message, message_len=25, countdown=False, timeout=100):
        """Get a generator to handle progress of the bar (gen.next())"""
        if self.value:
            return None
        try:
            self.bar = KamakiProgressBar(
                message.ljust(message_len), max=timeout or 100)
        except NameError:
            self.value = None
            return self.value
        if countdown:
            bar_phases = list(self.bar.phases)
            self.bar.empty_fill, bar_phases[0] = bar_phases[-1], ''
            bar_phases.reverse()
            self.bar.phases = bar_phases
            self.bar.bar_prefix = ' '
            self.bar.bar_suffix = ' '
            self.bar.suffix = '%(remaining)ds to timeout'
        else:
            self.bar.suffix = '%(percent)d%% - %(eta)ds'
        self.bar.start()

        def progress_gen(n):
            for i in self.bar.iter(range(int(n))):
                yield
            yield
        return progress_gen
コード例 #9
0
ファイル: plot_bar.py プロジェクト: JunkaiZhan/PyLearning
def plot_bar():

    # Method 0: Using \r to print
    def view_bar(num, sum, bar_title="Processing", bar_word="▓"):
        rate = num / sum
        rate_num = round(rate * 100)
        rest_num = 100 - rate_num
        print(("\r\033[1;32m" + bar_title + " \033[0m\033[1;35m|" + bar_word *
               rate_num + " " * rest_num + "| \033[0m\033[1;33m%3d%%\033[0m") %
              (rate_num),
              end="")
        if rate_num == 100: print("\n", end="")

    with open("plot_statistic.py", 'r') as file:
        lines = file.readlines()
        for _ in range(len(lines)):
            time.sleep(0.02)
            view_bar(_, len(lines) - 1)

    # Method 1: Using alive_progress <<<
    with alive_bar(100) as bar:
        for _ in range(100):
            bar()
            time.sleep(0.02)

    # Method 2: Using tqdm <<<
    with open("plot_statistic.py", 'r') as file:
        lines = file.readlines()
        for _ in tqdm(lines):
            time.sleep(0.02)

    # Methods 3: Using Progress <<<
    with open("plot_statistic.py", "r") as file:
        lines = file.readlines()
        # bar   = IncrementalBar('BarName', max = len(lines))
        # bar   = ChargingBar('BarName', max = len(lines))
        bar = FillingCirclesBar('BarName', max=len(lines))
        # bar   = ShadyBar('BarName', max = len(lines))
        for _ in lines:
            bar.next()
            time.sleep(0.02)
        bar.finish()

    with open("plot_statistic.py", "r") as file:
        lines = file.readlines()
        bar = ChargingBar('BarName', max=len(lines))
        for _ in lines:
            bar.next()
            time.sleep(0.02)
        bar.finish()

    with open("plot_statistic.py", "r") as file:
        lines = file.readlines()
        bar = ShadyBar('BarName', max=len(lines))
        for _ in lines:
            bar.next()
            time.sleep(0.02)
        bar.finish()
コード例 #10
0
def export_corrected(file_num, is_range=False):
    if is_range:
        bar = ShadyBar(
            message="Loading dataset",
            suffix='%(index)d/%(max)d - %(percent).1f%% - %(eta_td)s',
            max=file_num * 2)
        threads = list()
        for file in range(1, file_num + 1):
            #export_single(file, bar)
            t = SplitThread(file, 'SplitThread-{}'.format(file), file)
            t.start()
            threads.append(t)
        for _t in threads:
            _t.join()
        print('Job complete. {} Threads executed'.format(file_num))
    else:
        bar = ShadyBar(
            message="Loading dataset",
            suffix='%(index)d/%(max)d - %(percent).1f%% - %(eta_td)s',
            max=2)
        export_single(file_num, bar)
コード例 #11
0
ファイル: nfl-scores-loader.py プロジェクト: tic/Scoreagami
def loadAllData(client):
	print('Initiating data load...')
	current_year = int(datetime.datetime.now().year) + 1
	# Wipe the table
	client['scores']['scores'].remove({})
	client['scores']['table'].find_one_and_update({'_id': 0}, {'$set': {'score_table': {} }})
	print('Uploading data from NFL seasons', start_year, '-', (current_year - 1))

	with ShadyBar('', max=(current_year-start_year), suffix='%(percent).1f%% - %(eta)d s  ') as bar:
		for year in range(start_year, current_year): # Loop through every NFL season
			loadOneYear(client, year)
			bar.next()
コード例 #12
0
ファイル: pynmj.py プロジェクト: ericcolleu/python-nmj
def main():
    logging.basicConfig(level=logging.INFO, filename="pynmj.log")
    #_LOGGER.setLevel(logging.INFO)
    options, arguments = parse_options()
    try:
        try:
            get_lock(arguments[0])
            updater = NMJUpdater(arguments[0], "local_directory")
            if options.clean_name:
                updater.clean_names()
            medias = updater.scan_dir()
            _LOGGER.info("Found %s medias", len(medias))
            bar = ShadyBar('Updating database',
                           max=len(medias),
                           suffix='[ETA %(eta_td)s] (%(percent)d%%)')
            for rank, media in enumerate(medias):
                _LOGGER.info("Media %s/%s", rank + 1, len(medias))
                updater.search_media_and_add(media)
                bar.next()
            _LOGGER.info("Cleaning DB...")
            updater.clean()
            _LOGGER.info("Done")
            bar.finish()
        except:
            import traceback
            traceback.print_exc()
    finally:
        release_lock(arguments[0])
コード例 #13
0
def main():
    bar = ShadyBar('Preparing update', max=100)
    for i in range(100):
        time.sleep(.01)
        bar.next()
    bar.finish()

    subprocess.check_call([sys.executable, "-m", "pip", "install", UPDATE_URL])
コード例 #14
0
ファイル: commands.py プロジェクト: Web5design/warehouse
def synchronize_by_journals(since=None, fetcher=None, progress=True,
        download=None):
    if fetcher is None:
        fetcher = PyPIFetcher()

    # Grab the current datetime
    current = fetcher.current()

    # Synchronize all the classifiers with PyPI
    synchronize_classifiers(fetcher)

    # Grab the journals since `since`
    journals = fetcher.journals(since=since)

    # Storage for projects that have been updated or deleted
    updated = set()
    deleted = set()

    # Check if we have anything to process before attempting to
    if journals:
        if progress:
            bar = ShadyBar("Processing Journals", max=len(journals))
        else:
            bar = DummyBar()

        # Handle Renames, these need to occur first because PyPI retroactively
        #   changes journal names to the new project, which if we experience
        #   any of these prior to handling a rename it'll trigger a sync which
        #   will act like it's a new project and not a renamed project.
        if since is not None:
            for journal in journals:
                if journal.action.lower().startswith("rename from "):
                    _, _, previous = journal.action.split(" ", 2)

                    proj = Project.get(previous)
                    proj.rename(journal.name)

        # Commit the renames
        db.session.commit()

        for journal in bar.iter(journals):
            if redis.sismember(REDIS_JOURNALS_KEY, journal.id):
                # We've already processed this entry, so skip to the next one
                continue

            created = datetime.datetime.utcfromtimestamp(journal.timestamp)
            Journal.create(
                        name=journal.name,
                        version=journal.version,
                        created=created,
                        action=journal.action,
                        pypi_id=journal.id,
                    )

            if (journal.action.lower() == "remove" and
                    journal.version is None):
                # Delete the entire project
                if journal.name not in deleted:
                    updated.discard(journal.name)
                    deleted.add(journal.name)

                    # Actually yank the project
                    Project.yank(journal.name, synchronize=False)
            elif journal.action.lower().startswith("rename from "):
                _, _, previous = journal.action.split(" ", 2)

                # Do nothing for right now, eventually we'll use this spot for
                #   creating a history event
            else:
                # Process the update
                if journal.name not in updated:
                    deleted.discard(journal.name)
                    updated.add(journal.name)

                    # Actually synchronize the project
                    synchronize_project(journal.name,
                        fetcher,
                        download=download,
                    )

            try:
                # Add this ID to our list of IDs we've processed in Redis
                redis.sadd(REDIS_JOURNALS_KEY, journal.id)

                # Commit any changes made from this journal entry
                db.session.commit()
            except:
                # If any exception occured during committing remove the id
                #   from redis
                redis.srem(REDIS_JOURNALS_KEY, journal.id)
                raise

    logger.info(
        "Finished processing journals at %s; updated %s and deleted %s",
        current, len(updated), len(deleted),
    )

    return current