def mkprogress(encoder):
    """
    Returns a function that can be used as a callback for
    :class:`requests_toolbelt.MultipartEncoderMonitor`.

    :param encoder: instance of :class:`requests_toolbelt.MultipartEncoder`
    """
    expected_size = encoder.len
    bar = Bar(expected_size=expected_size, filled_char='=')
    waiting = [False]

    def callback(monitor):
        """
        prints progess bar

        :param monitor: instance of
            :class:`requests_toolbelt.MultipartEncoderMonitor`
        """
        if monitor.bytes_read < expected_size:
            bar.show(monitor.bytes_read)
        elif not waiting[0]:
            waiting[0] = True
            bar.done()
            print('\nwaiting for server-side processing ...')

    return callback
Beispiel #2
0
    def create_progress_callback(encoder):
        encoder_len = encoder.len
        progress_bar = Bar(expected_size=encoder_len, filled_char='=')

        def callback(monitor):
            progress_bar.show(monitor.bytes_read)

        return callback, progress_bar
Beispiel #3
0
def _download_with_progressbar(response, content_length):
    """
    :type response: requests.Response
    :type content_length: int
    """
    with Bar(expected_size=content_length) as bar:
        for i, block in enumerate(response.iter_content(1024), 1):
            yield block
            bar.show(min(i * 1024, content_length))
Beispiel #4
0
def create_console_callback(size):
    """ 
    Create progress callback suitable for displaying the progress of a
    requests_toolbelt.MultipartEncoderMonitor to a text console.
    
    @param size int representing the number of bytes to be read be the
    MultipartEncoderMonitor
    """
    bar = Bar(expected_size=size, filled_char='\u25A0', every=1024)

    def callback(monitor):
        bar.show(monitor.bytes_read)

    return callback
Beispiel #5
0
def course_exports(course_list):
    '''Do course exports for each course in the list'''
    #pprint.pprint(course_list)
    bar = Bar(label='Exporting Courses ', expected_size=len(course_list))
    export_list = []
    for idx, course in enumerate(course_list):
        export_list.append(course_export(course))
        bar.show(idx)

    blen = len(export_list)
    bar.label = "Checking Export Statuses: "
    bar.expected_length = blen
    bar.show(1)
    while export_list:
        if len(export_list) < 10:
            time.sleep(2)
        bar.show(blen - len(export_list))
        #print export_list
        for idx, cm in enumerate(export_list):
            course, res = cm
            url = res.get('progress_url', None)
            if not url:
                del (export_list[idx])

                print 'not able to generate export for course', course
            else:
                download_progress = requests.get(
                    url, headers=config['headers']).json()

                bar.label = "Checking Export Status: {} {}% ".format(
                    course[course_id_field], download_progress['completion'])
                if download_progress['workflow_state'] not in [
                        'queued', 'running'
                ]:
                    if download_progress['workflow_state'] == 'completed':
                        url = "https://{domain}/api/v1/courses/{}/content_exports/{}".format(
                            course[course_id_field], res['id'], **config)
                        export_info = requests.get(
                            url, headers=config['headers']).json()
                        #print 'export_info',export_info
                        export_url = export_info['attachment']['url']
                        course['export_url'] = export_url
                        yield course
                    else:
                        course['export_url'] = None
                    del (export_list[idx])
def runMigrations(copies):
    pool = Pool(processes=num_processes)
    #copies.reverse()

    bar = Bar()
    #res = pool.map(massDoCopies,((bar,x) for x in copies))
    #for x in copies:
    #  massDoCopies((bar,x))

    res = pool.map_async(massDoCopies, ((bar, x) for x in copies))
    stats = []
    try:
        stats.append(res.get(0xFFFF))
    except KeyboardInterrupt:
        print 'kill processes'
        #pool.terminate()
        exit()
    except TypeError, err:
        print 'err', err
        pass
Beispiel #7
0
def runMigrations(copies):
  pool = Pool(processes=num_processes)
  #copies.reverse()

  bar = Bar()
  res = pool.map(massDoCopies,((bar,x) for x in copies))
Beispiel #8
0
 def __init__(self, length):
     self.bar = Bar(expected_size=length, filled_char='=')
     self.status = 0