Exemplo n.º 1
0
def example8():
    pbar = ProgressBar(max_value=8)  # Progressbar can't guess max_value.
    for i in pbar((i for i in range(8))):
        time.sleep(0.001)
Exemplo n.º 2
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 11 13:06:40 2020

@author: mac
"""
import numpy as np
import random
import pandas as pd
from progressbar import ProgressBar

pbar = ProgressBar()

#tasks to do:
#display board()
#Choose move
#play move
#Choose symbols
memory = pd.read_csv('memory.csv')
memory = memory.set_index('state')
if 'Unnamed: 0' in memory.columns:
    memory = memory.drop('Unnamed: 0', axis=1)
#state = '_+_+_+_+_+_+ + + '
#memory = pd.DataFrame(
#   {'TL' : [0], 'TC' : [0], 'TR' : [0],
#  'ML' : [0], 'MC' : [0], 'MR' : [0],
#   'BL' : [0], 'BC' : [0], 'BR' : [0]
#   },
#   index = [state]
#)
Exemplo n.º 3
0
def color_images_full(model, b_size=32):
    """
    Function that colors images with approaches on full images.
    Function is used on reg-full and reg-full-vgg approaches.

    Parameters
    ----------
    model : keras.engine.training.Model
        Model for image colorization
    b_size : int
        Size of bach of images
    """
    abs_file_path = get_abs_path(data_origin)
    images = get_image_list(abs_file_path)

    # get list of images to color
    num_of_images = len(images)

    #progress bar
    pbar = ProgressBar(maxval=num_of_images,
                       widgets=[Percentage(), ' ',
                                Bar(), ' ',
                                ETA()])
    pbar.start()

    # for each batch
    for batch_n in range(int(math.ceil(num_of_images / b_size))):
        _b_size = b_size if (
            batch_n + 1) * b_size < num_of_images else num_of_images % b_size

        # load images
        original_size_images = []
        all_images_l = np.zeros((_b_size, 224, 224, 1))
        for i in range(_b_size):
            # get image
            image_lab = load_images(
                os.path.join(abs_file_path, images[batch_n * b_size + i]))
            original_size_images.append(image_lab[:, :, 0])
            image_lab_resized = resize_image_lab(image_lab, (224, 224))
            all_images_l[i, :, :, :] = image_lab_resized[:, :, 0][:, :,
                                                                  np.newaxis]

        # prepare images for a global network
        all_vgg = np.zeros((_b_size, 224, 224, 3))
        for i in range(_b_size):
            all_vgg[i, :, :, :] = np.tile(all_images_l[i], (1, 1, 1, 3))

        # color
        if model.name == "reg_full_vgg":  # vgg has no global network
            color_im = model.predict(all_vgg, batch_size=b_size)
        else:
            color_im = model.predict([all_images_l, all_vgg],
                                     batch_size=b_size)

        # save all images
        abs_save_path = get_abs_path(data_destination)
        for i in range(_b_size):
            # to rgb
            original_im_bw = original_size_images[i]
            h, w = original_im_bw.shape

            # workaround for not suitable shape while resizing
            small_images = np.concatenate((all_images_l[i], color_im[i]),
                                          axis=2)
            colored_im = resize_image_lab(small_images, (h, w))

            lab_im = np.concatenate(
                (original_im_bw[:, :, np.newaxis], colored_im[:, :, 1:]),
                axis=2)
            im_rgb = color.lab2rgb(lab_im)

            # save
            scipy.misc.toimage(im_rgb, cmin=0.0,
                               cmax=1.0).save(abs_save_path + model.name +
                                              "_" +
                                              images[batch_n * b_size + i])

        # update progress bar
        pbar.update(min((batch_n + 1) * b_size, num_of_images))

    # stop progress bar
    pbar.finish()
Exemplo n.º 4
0
    shutil.copy2(
        Path(project_dir) / 'libcblite.so.sym', f'./libcblite-{args.version}')
    os.chdir(workspace)

    package_name = f'{args.product}-{args.edition}-{args.version}-{args.bld_num}-{args.os}.tar.gz'
    print()
    print(f"=== Creating {workspace}/{package_name} package ===")
    print()

    os.chdir(str(workspace_path / 'build_release'))
    shutil.copy2(
        workspace_path / 'product-texts' / 'mobile' / 'couchbase-lite' /
        'license' / f'LICENSE_{args.edition}.txt',
        f'libcblite-{args.version}/LICENSE.txt')

    pbar = ProgressBar(maxval=3)
    pbar.start()
    with tarfile.open(f'{workspace}/{package_name}', 'w:gz') as tar:
        tar.add(f'libcblite-{args.version}/include', recursive=True)
        pbar.update(1)
        tar.add(f'libcblite-{args.version}/lib', recursive=True)
        pbar.update(2)
        tar.add(f'libcblite-{args.version}/LICENSE.txt')
        pbar.update(3)
        pbar.finish()

    symbols_package_name = f'{args.product}-{args.edition}-{args.version}-{args.bld_num}-{args.os}-symbols.tar.gz'
    with tarfile.open(f'{workspace}/{symbols_package_name}', 'w:gz') as tar:
        tar.add(f'libcblite-{args.version}/libcblite.so.sym')

    os.chdir(workspace)
Exemplo n.º 5
0
    def scan(self,
             clock_en1=True,
             pixels=512,
             clock_en2=True,
             trigger_en=True,
             measure_direction=True,
             offset=15,
             mask_steps=4,
             PrmpVbpDac=80,
             vthin2Dac=0,
             columns=[True] * 16,
             vthin1Dac=80,
             preCompVbnDac=50,
             mask_filename='',
             **kwargs):
        '''Scan loop
        Parameters
        ----------
        mask : int
            Number of mask steps.
        repeat : int
            Number of injections.
        '''
        inj_factor = 1.0
        INJ_LO = 0.0
        #try:
        #dut = Dut(ScanBase.get_basil_dir(self)+'/examples/lab_devices/agilent33250a_pyserial.yaml')
        #dut.init()
        #logging.info('Connected to '+str(dut['Pulser'].get_info()))
        #except RuntimeError:
        #INJ_LO = 0.0#0.2
        #inj_factor = 2.0
        #logging.info('External injector not connected. Switch to internal one')
        #self.dut['INJ_LO'].set_voltage(INJ_LO, unit='V')
        offset = offset - 1
        vthin1Dac = vthin1Dac + 1

        self.dut['global_conf']['PrmpVbpDac'] = 80
        self.dut['global_conf']['vthin1Dac'] = 255
        self.dut['global_conf']['vthin2Dac'] = 0
        self.dut['global_conf']['vffDac'] = 24
        self.dut['global_conf']['PrmpVbnFolDac'] = 51
        self.dut['global_conf']['vbnLccDac'] = 1
        self.dut['global_conf']['compVbnDac'] = 25
        self.dut['global_conf']['preCompVbnDac'] = 50

        self.dut.write_global()
        self.dut['control']['RESET'] = 0b01
        self.dut['control']['DISABLE_LD'] = 0
        self.dut['control']['PIX_D_CONF'] = 0
        self.dut['control'].write()

        self.dut['control']['CLK_OUT_GATE'] = 1
        self.dut['control']['CLK_BX_GATE'] = 1
        self.dut['control'].write()
        time.sleep(0.1)

        self.dut['control']['RESET'] = 0b11
        self.dut['control'].write()

        self.dut['global_conf']['OneSr'] = 1

        self.dut['global_conf']['TestHit'] = 0
        self.dut['global_conf']['SignLd'] = 0
        self.dut['global_conf']['InjEnLd'] = 0
        self.dut['global_conf']['TDacLd'] = 0
        self.dut['global_conf']['PixConfLd'] = 0
        self.dut.write_global()

        #self.dut['global_conf']['OneSr'] = 0  #all multi columns in parallel
        self.dut['global_conf']['ColEn'][:] = bitarray.bitarray([True] *
                                                                16)  #(columns)
        self.dut['global_conf']['ColSrEn'][:] = bitarray.bitarray([True] * 16)
        self.dut.write_global()

        self.dut['pixel_conf'].setall(False)
        self.dut.write_pixel()
        self.dut['global_conf']['InjEnLd'] = 1
        self.dut.write_global()
        self.dut['global_conf']['InjEnLd'] = 0

        mask_en = np.full([64, 64], False, dtype=np.bool)
        mask_tdac = np.full([64, 64], 16, dtype=np.uint8)
        ###
        if pixels > 1 and pixels <= 64:
            mask_en[1:2, :] = True
        ###
        if pixels == 1:
            mask_en[1][1] = True

        if mask_filename:
            logging.info('Using pixel mask from file: %s', mask_filename)

            with tb.open_file(mask_filename, 'r') as in_file_h5:
                mask_tdac = in_file_h5.root.scan_results.tdac_mask[:]
                if pixels > 64:
                    mask_en = in_file_h5.root.scan_results.en_mask[:]

        self.dut.write_en_mask(mask_en)
        self.dut.write_tune_mask(mask_tdac)
        self.dut.write_global()

        self.dut['global_conf']['OneSr'] = 0
        self.dut.write_global()

        self.dut['trigger'].set_delay(10000)  #trigger for no injection 10000
        self.dut['trigger'].set_width(16)  #16
        self.dut['trigger'].set_repeat(1)
        self.dut['trigger'].set_en(False)

        logging.debug('Configure TDC')
        self.dut['tdc']['RESET'] = True
        self.dut['tdc']['EN_TRIGGER_DIST'] = True
        self.dut['tdc']['ENABLE_EXTERN'] = False
        self.dut['tdc']['EN_ARMING'] = False
        self.dut['tdc']['EN_INVERT_TRIGGER'] = False
        self.dut['tdc']['EN_INVERT_TDC'] = False
        self.dut['tdc']['EN_WRITE_TIMESTAMP'] = True

        lmask = [1] + ([0] * (mask_steps - 1))
        lmask = lmask * ((64 * 64) / mask_steps + 1)
        lmask = lmask[:64 * 64]
        ranges = np.arange(0, (vthin1Dac - offset), 1)
        n = 0
        for ni in ranges:
            time.sleep(0.5)
            bv_mask = bitarray.bitarray(lmask)
            if measure_direction:
                vthin1Dac1 = vthin1Dac - n
            else:
                vthin1Dac1 = n + offset
            with self.readout(scan_param_id=vthin1Dac1):  #vthin1Dac-n):
                logging.info('Scan Parameter: %f (%d of %d)', vthin1Dac1,
                             n + 1, vthin1Dac - offset)
                pbar = ProgressBar(maxval=mask_steps).start()

                self.dut['global_conf']['vthin1Dac'] = 255
                self.dut['global_conf']['preCompVbnDac'] = 50
                self.dut['global_conf']['vthin2Dac'] = 0
                self.dut['global_conf']['PrmpVbpDac'] = 80
                self.dut.write_global()
                time.sleep(0.1)

                self.dut['pixel_conf'][:] = bv_mask
                self.dut.write_pixel_col()
                self.dut['global_conf']['InjEnLd'] = 0  #1
                #self.dut['global_conf']['PixConfLd'] = 0b11
                self.dut.write_global()

                bv_mask[1:] = bv_mask[0:-1]
                bv_mask[0] = 0
                self.dut['global_conf']['vthin1Dac'] = vthin1Dac1
                self.dut['global_conf']['preCompVbnDac'] = preCompVbnDac
                self.dut['global_conf']['vthin2Dac'] = vthin2Dac
                self.dut['global_conf']['PrmpVbpDac'] = PrmpVbpDac
                self.dut.write_global()
                time.sleep(0.1)

                #while not self.dut['inj'].is_done():
                #pass

                if trigger_en == True:
                    self.dut['trigger'].set_repeat(0)
                if clock_en1 == False:
                    self.dut['control']['CLK_BX_GATE'] = 0
                    self.dut['control'].write()
                if clock_en2 == False:
                    self.dut['control']['CLK_OUT_GATE'] = 0
                    self.dut['control'].write()
                if trigger_en == True:
                    self.dut['trigger'].start()

                self.dut['tdc'].ENABLE = True

                time.sleep(5)  #10

                self.dut['tdc'].ENABLE = False

                #n=0
                #if trigger_en == True:
                #while not self.dut['trigger'].is_done():
                #time.sleep(1)
                #n=n+1
                #print self.dut['trigger'].is_done() , n, "sekunden"

                if clock_en1 == False:
                    self.dut['control']['CLK_BX_GATE'] = 1
                    self.dut['control'].write()
                if clock_en2 == False:
                    self.dut['control']['CLK_OUT_GATE'] = 1
                    self.dut['control'].write()
                #while not self.dut['trigger'].is_done():
                #pass
                n = n + 1

        scan_results = self.h5_file.create_group("/", 'scan_results',
                                                 'Scan Masks')
        self.h5_file.createCArray(scan_results, 'tdac_mask', obj=mask_tdac)
        self.h5_file.createCArray(scan_results, 'en_mask', obj=mask_en)
Exemplo n.º 6
0
def upload_files(binary_filename, updown_client):
    """Upload a binary file to the Store.

    Submit a file to the Store upload service and return the
    corresponding upload_id.
    """
    result = {'success': False, 'errors': []}

    try:
        binary_file_size = os.path.getsize(binary_filename)
        binary_file = open(binary_filename, 'rb')
        encoder = MultipartEncoder(
            fields={
                'binary': ('filename', binary_file, 'application/octet-stream')
            }
        )

        # Create a progress bar that looks like: Uploading foo [==  ] 50%
        progress_bar = ProgressBar(
            widgets=['Uploading {} '.format(binary_filename),
                     Bar(marker='=', left='[', right=']'), ' ', Percentage()],
            maxval=os.path.getsize(binary_filename))
        progress_bar.start()
        # Print a newline so the progress bar has some breathing room.
        logger.info('')

        # Create a monitor for this upload, so that progress can be displayed
        monitor = MultipartEncoderMonitor(
            encoder, functools.partial(_update_progress_bar, progress_bar,
                                       binary_file_size))

        # Begin upload
        response = updown_client.upload(monitor)

        # Make sure progress bar shows 100% complete
        progress_bar.finish()

        if response.ok:
            response_data = response.json()
            result.update({
                'success': response_data.get('successful', True),
                'upload_id': response_data['upload_id'],
                'binary_filesize': os.path.getsize(binary_filename),
                'source_uploaded': False,
            })
        else:
            logger.error(
                'There was an error uploading the package.\n'
                'Reason: %s\n'
                'Text: %s',
                response.reason, response.text)
            result['errors'] = [response.text]
    except Exception as err:
        logger.exception(
            'An unexpected error was found while uploading files.')
        result['errors'] = [str(err)]
    finally:
        # Close the open file
        binary_file.close()

    return result
Exemplo n.º 7
0
def rest_allspec(overwrite=False):
    """Load and interpolate *ALL* HST FOS/GHRS starburst spectra
    on to the same rest-frame wavelength grid
    """

    path = join(datapath.hstfos_path(), _subpath, 'corrected')

    # check output files
    bands = _allinone_rest_bands
    for thisband in bands:
        # check outfiles
        outfile = allinone_rest_filename(thisband)
        if isfile(outfile) and not overwrite:
           print "File {0} exists. Use overwrite to overwrite it.".format(outfile)
           return -1
        # print "Will write into these files: {0}".format(outfile)

    # read in the starburst catalog
    objs_ori = starburst_readin()
    nobj = objs_ori.size

    # make a temporary new catalog
    objs_dtype = [('RA', 'f8'),
                  ('DEC', 'f8'),
                  ('Z', 'f8'),
                  ('gal', 'S15')]
    objs = np.zeros(nobj, dtype=objs_dtype)
    objs['RA'] = 0.
    objs['DEC'] = 0.
    objs['Z'] = 0.
    objs['gal'] = objs_ori['gal']

    # read in master wavelength grid
    master_wave = (aio.allinone_wave_readin())[0]['WAVE']
    master_loglam = np.log10(master_wave)
    nwave = master_wave.size

    # initialization, nobj second dimension because of NMF traditions
    rest_allflux = np.zeros((nwave, nobj))
    rest_allivar = np.zeros((nwave, nobj))

    # Wavelength
    wave_pos = np.array([1000., 3300.])
    rest_loc = np.searchsorted(master_wave, wave_pos)
    newloglam = master_loglam[rest_loc[0]:rest_loc[1]]
    flux = np.zeros((objs.size, newloglam.size))
    ivar = np.zeros((objs.size, newloglam.size))

    pbar = ProgressBar(maxval=nobj).start()
    # Progress bar
    for (iobj, thisobj)  in zip(np.arange(objs.size), objs):
        pbar.update(iobj)
        thisdata = readspec_rest(thisobj)
        inloglam = np.log10(thisdata['wave'])
        influx = thisdata['flux']
        inivar = 1./np.power(thisdata['error'], 2)
        (rest_allflux[rest_loc[0]:rest_loc[1], iobj], rest_allivar[rest_loc[0]:rest_loc[1], iobj]) = specutils.interpol_spec(inloglam, influx, inivar, newloglam)

    #Progress bar
    pbar.finish()

    # write out
    print "Now I am writing everything out..."
    allinone_rest_writeout(objs, master_wave, rest_allflux, rest_allivar, overwrite=overwrite)
Exemplo n.º 8
0
def with_example18():
    with ProgressBar(maxval=10, term_width=20, left_justify=False) as \
            progress:
        assert progress._env_size() is not None
        for i in range(10):
            progress.update(i)
Exemplo n.º 9
0
def with_example19():
    with ProgressBar(maxval=1) as progress:
        try:
            progress.update(2)
        except ValueError:
            pass
Exemplo n.º 10
0
def example12():
    widgets = ['Balloon: ', AnimatedMarker(markers='.oO@* ')]
    pbar = ProgressBar(widgets=widgets)
    for i in pbar((i for i in range(24))):
        time.sleep(0.001)
Exemplo n.º 11
0
def example16():
    widgets = [FormatLabel('Bouncer: value %(value)d - '), BouncingBar()]
    pbar = ProgressBar(widgets=widgets)
    for i in pbar((i for i in range(100))):
        time.sleep(0.001)
Exemplo n.º 12
0
def example11():
    widgets = [FormatLabel('Processed: %(value)d lines (in: %(elapsed)s)')]
    pbar = ProgressBar(widgets=widgets)
    for i in pbar((i for i in range(15))):
        time.sleep(0.001)
Exemplo n.º 13
0
def example10():
    widgets = ['Processed: ', Counter(), ' lines (', Timer(), ')']
    pbar = ProgressBar(widgets=widgets)
    for i in pbar((i for i in range(15))):
        time.sleep(0.001)
Exemplo n.º 14
0
def example9():
    pbar = ProgressBar(widgets=['Working: ', AnimatedMarker()])
    for i in pbar((i for i in range(5))):
        time.sleep(0.001)
Exemplo n.º 15
0
else:
    fname = sys.argv[1]
    if len(sys.argv) >= 4:
        acctid = sys.argv[2]
        secret = sys.argv[3]

if acctid is None:
    acctid = raw_input("AWS_ACCESS_KEY_ID: ").strip()

if secret is None:
    secret = raw_input("AWS_SECRET_ACCESS_KEY: ").strip()

bucket = "kroll.appcelerator.com"
key = os.path.basename(fname)
conn = S3Connection(acctid, secret)
bucket = conn.get_bucket(bucket)
k = bucket.new_key(key)

pbar = ProgressBar().start()
try:

    def progress_callback(current, total):
        pbar.update(int(100 * (float(current) / float(total))))

    k.set_contents_from_filename(fname,
                                 cb=progress_callback,
                                 num_cb=100,
                                 policy='public-read')
finally:
    pbar.finish()
Exemplo n.º 16
0
def with_example20():
    progress = ProgressBar(maxval=1)
    try:
        progress.update(1)
    except RuntimeError:
        pass
Exemplo n.º 17
0
def _upload_files(sca_client, name, data, result):
    data['name'] = name
    response = sca_client.snap_upload(data)
    if response.ok:
        response_data = response.json()
        status_url = response_data['status_url']

        # This is just a waiting game, so we'll show an indeterminate
        # AnimatedMarker for it.
        progress_indicator = ProgressBar(
            widgets=['Checking package status... ', AnimatedMarker()],
            maxval=7)
        progress_indicator.start()

        # Execute the package scan in another thread so we can update the
        # progress indicator.
        with ThreadPoolExecutor(max_workers=1) as executor:
            future = executor.submit(get_scan_data, sca_client, status_url)

            count = 0
            while not future.done():
                # Annoyingly, there doesn't seem to be a way to actually
                # make a progress indicator that will go on forever, so we
                # need to restart this one each time we reach the end of
                # its animation.
                if count >= 7:
                    progress_indicator.start()
                    count = 0

                # Actually update the progress indicator
                progress_indicator.update(count)
                count += 1
                time.sleep(0.15)

            # Grab the results from the package scan
            completed, data = future.result()

        progress_indicator.finish()

        if completed:
            message = data.get('message', '')
            if not message:
                result['success'] = True
                result['revision'] = data.get('revision')
            else:
                result['errors'] = [message]
        else:
            result['errors'] = [
                'Package scan took too long.',
            ]
            status_web_url = response_data.get('web_status_url')
            if status_web_url:
                result['errors'].append(
                    'Please check the status later at: {}.'.format(
                        status_web_url),
                )
        result['application_url'] = data.get('application_url', '')
    else:
        logger.error(
            'There was an error uploading the application.\n'
            'Reason: {}\n'
            'Text: {}'.format(response.reason, response.text))
        result['errors'] = [response.text]
    return result
Exemplo n.º 18
0
def with_example21b():
    with ProgressBar(maxval=1, redirect_stderr=True) as progress:
        print('', file=sys.stderr)
        progress.update(0)
Exemplo n.º 19
0
            # fill inbox
            done = s.query(tables.clangs.program_id) \
              .filter(tables.clangs.clang == args.clang)
            todo = s.query(tables.programs) \
              .filter(~tables.programs.id.in_(done)) \
              .order_by(tables.programs.date_added) \
              .limit(BATCH_SIZE)

            for program in todo:
                inbox.append(program)

        for tables in tablesets:
            if args.recheck:
                q = s.query(tables.clang_stderrs)
                for stderr in ProgressBar(max_value=q.count())(q):
                    assertion_ = util.get_assertion(s, tables.clang_assertions,
                                                    stderr.stderr)
                    unreachable_ = util.get_unreachable(
                        s, tables.clang_unreachables, stderr.stderr)
                    terminate_ = util.get_terminate(s, tables.clang_terminates,
                                                    stderr.stderr)

                    errs = sum(1 if x else 0
                               for x in [assertion_, unreachable_, terminate_])
                    if errs > 1:
                        raise LookupError(
                            f"Multiple errors types found in: {stderr}\n\n" +
                            f"Assertion: {assertion_}\n" +
                            f"Unreachable: {unreachable_}\n" +
                            f"Terminate: {terminate_}")
Exemplo n.º 20
0
def with_example22():
    try:
        with ProgressBar(maxval=-1) as progress:
            progress.start()
    except ValueError:
        pass
Exemplo n.º 21
0
st_dt = int(input("Set starting date\n"))
end_dt = int(input("Set ending date\n"))

# Ceate a folder to save Mack270 files
try:
    os.mkdir(outpath)
except OSError:
    print("Creation of the directory %s failed" % outpath)
else:
    print("Successfully created the directory %s" % outpath)

# Read file names in llc270 directory
# (Make sure there is only llc270 files to reshape in the folder)
yer_ls = np.arange(st_dt, end_dt + 1)
itr_nb = len(yer_ls)
itr = 0

# Read and reshape llc270 file
with ProgressBar(max_value=itr_nb) as bar:
    for i in range(itr_nb):
        # Leap year verification
        ndy = leap_year(yer_ls[i])
        # Read and zoom llc270 files
        Mac_data = RnConv(inpath + fnm + str(yer_ls[i]), ndy)
        # Save JRA55 Mack270 files
        bin_save(outpath + fnm_out + str(yer_ls[i]), Mac_data)
        # Update progressbar
        time.sleep(0.1)
        itr += 1
        bar.update(itr)
Exemplo n.º 22
0
def generate_subtitles(  # pylint: disable=too-many-locals,too-many-arguments
    source_path,
    output=None,
    concurrency=DEFAULT_CONCURRENCY,
    src_language=DEFAULT_SRC_LANGUAGE,
    dst_language=DEFAULT_DST_LANGUAGE,
    subtitle_file_format=DEFAULT_SUBTITLE_FORMAT,
    api_key=None,
):
    """
    Given an input audio/video file, generate subtitles in the specified language and format.
    """
    audio_filename, audio_rate = extract_audio(source_path)

    regions = find_speech_regions(audio_filename)

    pool = multiprocessing.Pool(concurrency)
    converter = FLACConverter(source_path=audio_filename)
    recognizer = SpeechRecognizer(language=src_language,
                                  rate=audio_rate,
                                  api_key=GOOGLE_SPEECH_API_KEY)

    transcripts = []
    if regions:
        try:
            widgets = [
                "Converting speech regions to FLAC files: ",
                Percentage(), ' ',
                Bar(), ' ',
                ETA()
            ]
            pbar = ProgressBar(widgets=widgets, maxval=len(regions)).start()
            extracted_regions = []
            for i, extracted_region in enumerate(pool.imap(converter,
                                                           regions)):
                extracted_regions.append(extracted_region)
                pbar.update(i)
            pbar.finish()

            widgets = [
                "Performing speech recognition: ",
                Percentage(), ' ',
                Bar(), ' ',
                ETA()
            ]
            pbar = ProgressBar(widgets=widgets, maxval=len(regions)).start()

            for i, transcript in enumerate(
                    pool.imap(recognizer, extracted_regions)):
                transcripts.append(transcript)
                pbar.update(i)
            pbar.finish()

            if src_language.split("-")[0] != dst_language.split("-")[0]:
                if api_key:
                    google_translate_api_key = api_key
                    translator = Translator(dst_language,
                                            google_translate_api_key,
                                            dst=dst_language,
                                            src=src_language)
                    prompt = "Translating from {0} to {1}: ".format(
                        src_language, dst_language)
                    widgets = [prompt, Percentage(), ' ', Bar(), ' ', ETA()]
                    pbar = ProgressBar(widgets=widgets,
                                       maxval=len(regions)).start()
                    translated_transcripts = []
                    for i, transcript in enumerate(
                            pool.imap(translator, transcripts)):
                        translated_transcripts.append(transcript)
                        pbar.update(i)
                    pbar.finish()
                    transcripts = translated_transcripts
                else:
                    print(
                        "Error: Subtitle translation requires specified Google Translate API key. "
                        "See --help for further information.")
                    return 1

        except KeyboardInterrupt:
            pbar.finish()
            pool.terminate()
            pool.join()
            print("Cancelling transcription")
            raise

    timed_subtitles = [(r, t) for r, t in zip(regions, transcripts) if t]
    formatter = FORMATTERS.get(subtitle_file_format)
    formatted_subtitles = formatter(timed_subtitles)

    dest = output

    if not dest:
        base = os.path.splitext(source_path)[0]
        dest = "{base}.{format}".format(base=base, format=subtitle_file_format)

    with open(dest, 'wb') as output_file:
        output_file.write(formatted_subtitles.encode("utf-8"))

    os.remove(audio_filename)

    return dest
Exemplo n.º 23
0
 def progress_bar(self, total):
     widget = ["AutoCutMovie: ", Percentage(), Bar("#"), Timer(), " ", ETA()]
     bar = ProgressBar(widgets=widget, maxval=total).start()
     return bar
Exemplo n.º 24
0
def svg_heatmap(data, filename, row_labels=None, box_size=4,
                index=None,
                cmap=ISH, norm_rows_by=None, draw_row_labels=False,
                col_sep='', box_height=None, total_width=None,
                draw_box=False, draw_name=False, data_names=None,
                progress_bar = False,
                max_width=np.inf,
                spacers=None,
                cmap_by_prefix=None,
                split_columns=False,
                vspacer=30,
                hatch_nan=True, hatch_size=20,
                first_col='', last_col=''):
    """
    Draw heatmap as an SVG file stored in filename

    *data* can be either a 2D array-like type (list of lists, numpy array,
    pandas DataFrame, etc), or a tuple of 2D array-likes, in which case a
    separator will be added between each one in the output

    *cmap* is a matplotlib-like colormap (i.e. a callable that expects floats
    in the range 0.0-1.0.), or an iterable of the same length as the tuple
    *data* containing colormaps

    *row_labels* can be supplied, otherwise they will detected from the first
    item in *data*, if available, and if not they will be blank.

    If *total_width* is supplied, width of each dataset in *data* will be
    scaled to that constant. If *box_height* is supplied, the height of each
    row will be *box_height*, otherwise it will be equal to the width of each
    element. If neither are supplied, elements will be squares equal to
    *box_size*. IT IS STRONGLY RECOMMENDED that if if supplying *total_width*,
    *box_height* also be specified, but this is not enforced.

    *draw_row_labels*, if True, will label the rows on the right hand side. As
    of 2013/09/03, this won't scale the SVG properly, so including the
    resulting file in an html element won't display properly.

    *spacers* is the distance between adjacent datasets.  Can either be a
    number, in which case it will apply to all datasets, or an interable for
    different distances. If the iterable is shorter than the number of
    datasets, the last value will be repeated.

    """
    import svgwrite as svg
    import pandas as pd

    if split_columns and isinstance(data, pd.DataFrame):
        from Utils import sel_startswith
        colnames = list(sorted(
            {col.split(col_sep)[0] for col in data.columns}))
        data = tuple(
            data.select(**sel_startswith(colname)) for colname in colnames
        )
    elif not isinstance(data, tuple):
        data = (data,)

    rows, cols = np.shape(data[0])
    if index is not None:
        rows = len(index)
    if box_height is None:
        box_height = box_size

    if total_width is not None and max_width is not np.inf:
        dwg = svg.Drawing(filename,
                          size=(max_width,
                                np.ceil((len(data) * total_width)/max_width)
                                * (box_height+vspacer)))
    else:
        dwg = svg.Drawing(filename)
    dwg.add(svg.base.Title(path.basename(filename)))

    pat = dwg.pattern(id='hatch', insert=(0, 0), size=(hatch_size, hatch_size),
                      patternUnits='userSpaceOnUse')
    g = pat.add(dwg.g(style="fill:none; stroke:#B0B0B0; stroke-width:1"))
    g.add(dwg.path(('M0,0', 'l{hatch},{hatch}'.format(hatch=hatch_size))))
    g.add(dwg.path(('M{hatch2},0 l{hatch2},{hatch2}'.format(hatch2=hatch_size/2).split())))
    g.add(dwg.path(('M0,{hatch2} l{hatch2},{hatch2}'.format(hatch2=hatch_size/2).split())))

    dwg.add(pat)

    if row_labels is None:
        if index is not None:
            row_labels = index
        elif hasattr(data[0], 'index'):
            row_labels = data[0].index
        else:
            row_labels = ['' for row in range(rows)]

    if box_height is None:
        box_height = box_size

    if not hasattr(cmap, "__len__"):
        cmap = [cmap for frame in data]

    if data_names is None:
        data_names = ["" for frame in data]

    if len(cmap) != len(data):
        raise ValueError("cmap and data should be the same length")

    if not hasattr(spacers, "__len__"):
        spacers = [spacers]
    else:
        spacers = list(spacers)
    while len(spacers) < len(data):
        spacers.append(spacers[-1])

    if not isinstance(norm_rows_by, tuple):
        norm_rows_by = repeat(norm_rows_by)

    x_start = 0
    y_start = 0
    y_diff = 0
    if progress_bar:
        from progressbar import ProgressBar
        iterator = zip(data, cmap, data_names, norm_rows_by, spacers)
        pbar = ProgressBar(maxval=len(iterator)*rows).start()
        pbar_val = 0
    else:
        iterator = zip(data, cmap, data_names, norm_rows_by, spacers)

    for frame, c_cmap, name, normer, spacer in iterator:
        if frame is None:
            if total_width is not None:
                if spacer is None:
                    x_start += total_width * 1.1
                else:
                    x_start += total_width + spacer
            else:
                if spacer is None:
                    x_start += box_size
                else:
                    x_start += spacer
            if x_start > max_width:
                x_start = 0
                y_start += y_diff
                continue
        frame = pd.DataFrame(frame)
        if normer is None:
            norm_data = frame.copy()
        elif normer is 'mean':
            norm_data = frame.divide(frame.dropna(axis=1).mean(axis=1)+10, axis=0)
        elif normer is 'max':
            norm_data = frame.divide(frame.dropna(axis=1).max(axis=1)+10, axis=0)
        elif normer is 'center0':
            norm_data = (0.5 +
                         0.5 * frame.divide(frame.dropna(axis=1).abs().max(axis=1),
                                      axis=0)
                        )
        elif index is not None and hasattr(normer, "ix"):
            norm_data = frame.divide(normer.ix[index], axis=0)
        elif hasattr(normer, "__len__") and len(normer) == rows:
            norm_data = frame.divide(normer, axis=0)

        elif hasattr(normer, "__len__"):
            raise TypeError("norm_rows_by should be the same shape "
                            "as the number of rows")
        else:
            norm_data = frame.divide(normer, axis=0)

        if not c_cmap or str(c_cmap).lower() == 'default':
            c_cmap = ISH

        new_rows, new_cols = np.shape(frame)
        if hasattr(frame, 'index'):
            col_labels = frame.columns
        else:
            col_labels = ['' for col in range(new_cols)]
        if new_rows != rows:
            raise ValueError("All input elements must have the same number of"
                             " rows (and same row meanings --unchecked)")

        if total_width is not None:
            box_size = total_width / float(new_cols)

        for i in range(rows):
            if progress_bar:
                pbar.update(pbar_val)
                pbar_val += 1
            prefix = col_labels[0][:col_labels[0].find(col_sep)]
            if cmap_by_prefix:
                c_cmap = cmap_by_prefix(prefix)
            for j in range(new_cols):
                g = dwg.g()
                g.add(svg.base.Title("{}, {}: {:.2f}".format(row_labels[i],
                                                             col_labels[j],
                                                             frame.ix[i, j])))
                hatch = not isfinite(norm_data.ix[i, j])
                if hatch:
                    n = 0
                    norm_data.ix[i, j] = 0
                    if j > 0:
                        norm_data.ix[i, j] += norm_data.ix[i, j-1]
                        n += 1
                    if j + 1 < len(norm_data.columns):
                        norm_data.ix[i, j] += norm_data.ix[i, j+1]
                        n += 1
                    norm_data.ix[i, j] /= n
                g.add(dwg.rect((x_start + box_size*j, y_start + i*box_height),
                               (box_size, box_height),
                               style="fill:#{:02x}{:02x}{:02x}"
                               .format(*[int(255*x) for x in
                                         c_cmap(norm_data.ix[i, j])])))
                dwg.add(g)
                if hatch_nan and hatch:
                    g.add(dwg.rect((x_start + box_size*j,
                                    y_start + i*box_height),
                                   (box_size, box_height),
                                   style="fill:url(#hatch)"
                                  )
                         )
                col_base = col_labels[j][:col_labels[j].find(col_sep)]
                if col_base != prefix:
                    prefix = col_base
                    if cmap_by_prefix:
                        c_cmap = cmap_by_prefix(prefix)
                    g.add(dwg.line((x_start + box_size * j,
                                    y_start + i * box_height),
                                   (x_start + box_size * j,
                                    y_start + (i + 1) * box_height),
                                   style="stroke-width:{}; stroke:#000000"
                                   .format(.1 * box_size)))
        dwg.add(dwg.text(first_col, (x_start,
                                     y_start + (i + 1) * box_height)))
        dwg.add(dwg.text(last_col, (x_start + (new_cols - 1) * box_size,
                                    y_start + (i + 1) * box_height)))
        if draw_box:
            dwg.add(dwg.rect((x_start, y_start + 0),
                             (new_cols*box_size, rows*box_height),
                             style="stroke-width:1; "
                             "stroke:#000000; fill:none"))
        if draw_name:
            dwg.add(dwg.text(name,
                             (x_start + box_size * new_cols / 2.0,
                              y_start + box_height * (rows) + 13),
                             style="text-anchor: middle;"))

        if total_width is not None:
            if spacer is None:
                x_start += total_width * 1.1
            else:
                x_start += total_width + spacer
        else:
            if spacer is None:
                x_start += new_cols * box_size + box_size
            else:
                x_start += new_cols * box_size + spacer

        y_diff = new_rows * box_height + 30
        if x_start + total_width >= max_width:
            x_start = 0
            y_start += new_rows*box_height + vspacer

    if draw_row_labels:
        for i in range(rows):
            dwg.add(dwg.text(row_labels[i],
                             (x_start, y_start + i*box_height+box_height),
                             style='font-size:{}'.format(box_height),
                            ))
    pbar.finish()
    dwg.saveas(filename)
Exemplo n.º 25
0
def run():
    import sys
    if sys.argv[1] == 'dhfr':
        [system, positions, testsystem_name] = dhfr()
    elif sys.argv[1] == 'tip3p':
        [system, positions, testsystem_name] = tip3p()

    precision = sys.argv[2]
    platform_name = sys.argv[3]

    print('%s %s : contains %d particles' %
          (testsystem_name, precision, system.getNumParticles()))
    print('')

    # Remove CMMotionRemover and barostat
    indices_to_remove = list()
    for index in range(system.getNumForces()):
        force = system.getForce(index)
        force_name = force.__class__.__name__
        print(force_name)
        if force_name in ['MonteCarloBarostat', 'CMMotionRemover']:
            print('Removing %s (force index %d)' % (force_name, index))
            indices_to_remove.append(index)
    indices_to_remove.reverse()
    for index in indices_to_remove:
        system.removeForce(index)
    print('')

    # Add barostat
    barostat = openmm.MonteCarloBarostat(pressure, temperature, frequency)

    # Create OpenMM context
    print('Creating context...')
    from openmmtools import integrators
    integrator = integrators.VelocityVerletIntegrator(timestep)
    integrator.setConstraintTolerance(1.0e-8)
    platform = openmm.Platform.getPlatformByName(platform_name)
    platform.setPropertyDefaultValue('Precision', precision)
    if platform_name == 'CUDA':
        platform.setPropertyDefaultValue('DeterministicForces', 'true')
        print('Using deterministic forces...')
    context = openmm.Context(system, integrator, platform)

    context.setPositions(positions)
    print('')

    # Get PME parameters
    print('Retrieving PME parameters...')
    for force in system.getForces():
        if force.__class__.__name__ == 'NonbondedForce':
            nbforce = force
            break
    pme_parameters = nbforce.getPMEParametersInContext(context)
    print(pme_parameters)

    # Flush
    sys.stdout.flush()

    # Equilibrate with barostat
    print('equilibrating...')
    barostat.setFrequency(frequency)
    from progressbar import Percentage, Bar, ETA, RotatingMarker
    widgets = [
        'equilibration: ',
        Percentage(), ' ',
        Bar(marker=RotatingMarker()), ' ',
        ETA()
    ]
    progress = ProgressBar(widgets=widgets)
    for iteration in progress(range(nequil)):
        context.setVelocitiesToTemperature(temperature)
        integrator.step(nequilsteps)

    # Get positions, velocities, and box vectors
    print('')
    state = context.getState(getPositions=True, getVelocities=True)
    box_vectors = state.getPeriodicBoxVectors()
    positions = state.getPositions(asNumpy=True)
    velocities = state.getVelocities(asNumpy=True)
    del context, integrator

    # Remove CMMotionRemover and barostat
    indices_to_remove = list()
    for index in range(system.getNumForces()):
        force = system.getForce(index)
        force_name = force.__class__.__name__
        print(force_name)
        if force_name in ['MonteCarloBarostat', 'CMMotionRemover']:
            print('Removing %s (force index %d)' % (force_name, index))
            indices_to_remove.append(index)
    indices_to_remove.reverse()
    for index in indices_to_remove:
        system.removeForce(index)

    #
    integrator = integrators.VelocityVerletIntegrator(timestep)
    integrator.setConstraintTolerance(1.0e-8)
    context = openmm.Context(system, integrator, platform)
    context.setPeriodicBoxVectors(*box_vectors)
    context.setPositions(positions)
    context.setVelocities(velocities)

    # Open NetCDF file for writing.
    ncfile = netcdf.Dataset(
        'work-%s-%s-%s.nc' % (testsystem_name, precision, platform_name), 'w')
    ncfile.createDimension('nwork', 0)  # extensible dimension
    ncfile.createDimension('nworkvals', nworkvals + 1)
    ncfile.createVariable('work', np.float32, ('nwork', 'nworkvals'))
    work = np.zeros([nwork, nworkvals + 1], np.float32)
    for i in range(nwork):
        context.setVelocitiesToTemperature(temperature)
        integrator.step(nequilsteps)  # equilibrate
        state = context.getState(getEnergy=True)
        initial_energy = state.getPotentialEnergy() + state.getKineticEnergy()
        widgets = [
            'Work %5d / %5d: ' % (i, nwork),
            Percentage(), ' ',
            Bar(marker=RotatingMarker()), ' ',
            ETA()
        ]
        progress = ProgressBar(widgets=widgets)
        for workval in progress(range(nworkvals)):
            integrator.step(nworksteps)
            state = context.getState(getEnergy=True)
            current_energy = state.getPotentialEnergy(
            ) + state.getKineticEnergy()
            work[i, workval + 1] = (current_energy - initial_energy) / kT
            ncfile.variables['work'][i, workval + 1] = work[i, workval + 1]
        print(work[i, :])
        ncfile.sync()
Exemplo n.º 26
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--category',
                        '-c',
                        help='category name',
                        required=True)
    parser.add_argument('--level',
                        '-l',
                        type=int,
                        help='level id',
                        required=True)
    parser.add_argument('--load_ckpt',
                        '-k',
                        help='Path to a check point file for load',
                        required=True)
    parser.add_argument('--model', '-m', help='Model to use', required=True)
    parser.add_argument('--setting',
                        '-x',
                        help='Setting to use',
                        required=True)
    parser.add_argument('--batch_size',
                        '-b',
                        help='Batch size during testing',
                        default=8,
                        type=int)
    parser.add_argument('--save_ply',
                        '-s',
                        help='Save results as ply',
                        action='store_true')
    parser.add_argument('--save_dir',
                        '-o',
                        help='The output directory',
                        type=str,
                        default=None)
    parser.add_argument('--save_num_shapes',
                        '-u',
                        help='how many shapes to visualize',
                        default=20,
                        type=int)
    args = parser.parse_args()
    print(args)

    if args.save_ply:
        if os.path.exists(args.save_dir):
            print('ERROR: folder %s exists! Please check and delete!' %
                  args.save_dir)
            exit(1)
        os.mkdir(args.save_dir)

    model = importlib.import_module(args.model)
    setting_path = os.path.join(os.path.dirname(__file__), args.model)
    sys.path.append(setting_path)
    setting = importlib.import_module(args.setting)

    sample_num = setting.sample_num
    batch_size = args.batch_size

    args.data_folder = '../../data/sem_seg_h5/'

    # Load all test data
    args.filelist = os.path.join(args.data_folder,
                                 '%s-%d' % (args.category, args.level),
                                 'test_files.txt')
    data_test, _, label_gt = data_utils.load_seg(args.filelist)
    num_shape = data_test.shape[0]
    print('Loaded data: %s shapes in total to test.' % num_shape)

    # Load current category + level statistics
    with open(
            '../../stats/after_merging_label_ids/%s-level-%d.txt' %
        (args.category, args.level), 'r') as fin:
        setting.num_class = len(fin.readlines()) + 1  # with "other"
        print('NUM CLASS: %d' % setting.num_class)

    ######################################################################
    # Placeholders
    is_training = tf.placeholder(tf.bool, name='is_training')
    pts_fts = tf.placeholder(tf.float32,
                             shape=(batch_size, sample_num, setting.data_dim),
                             name='points')
    ######################################################################

    ######################################################################
    pts_fts_sampled = pts_fts
    points_sampled = pts_fts_sampled
    features_sampled = None

    net = model.Net(points_sampled, features_sampled, is_training, setting)
    seg_probs_op = tf.nn.softmax(net.logits, name='seg_probs')

    # for restore model
    saver = tf.train.Saver()

    parameter_num = np.sum(
        [np.prod(v.shape.as_list()) for v in tf.trainable_variables()])
    print('{}-Parameter number: {:d}.'.format(datetime.now(), parameter_num))

    # Create a session
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    config.log_device_placement = False
    sess = tf.Session(config=config)

    # Load the model
    ckptstate = tf.train.get_checkpoint_state(args.load_ckpt)
    if ckptstate is not None:
        LOAD_MODEL_FILE = os.path.join(
            args.load_ckpt, os.path.basename(ckptstate.model_checkpoint_path))
        saver.restore(sess, LOAD_MODEL_FILE)
        print("Model loaded in file: %s" % LOAD_MODEL_FILE)
    else:
        print("Fail to load modelfile: %s" % args.load_ckpt)

    # Start the testing
    print('{}-Testing...'.format(datetime.now()))

    num_batch = (num_shape - 1) // batch_size + 1
    pts_batch = np.zeros((batch_size, sample_num, 3), dtype=np.float32)

    avg_acc = 0.0
    avg_cnt = 0

    shape_iou_tot = 0.0
    shape_iou_cnt = 0

    part_intersect = np.zeros((setting.num_class), dtype=np.float32)
    part_union = np.zeros((setting.num_class), dtype=np.float32)

    bar = ProgressBar()
    all_seg_probs = []
    for batch_idx in bar(range(num_batch)):
        start_idx = batch_idx * batch_size
        end_idx = min((batch_idx + 1) * batch_size, num_shape)

        pts_batch[:end_idx - start_idx, ...] = data_test[start_idx:end_idx]

        seg_probs = sess.run(seg_probs_op,
                             feed_dict={
                                 pts_fts: pts_batch,
                                 is_training: False
                             })
        seg_probs = seg_probs[:end_idx - start_idx]
        all_seg_probs.append(seg_probs)

        seg_res = np.argmax(seg_probs[:, :, 1:], axis=-1) + 1

        avg_acc += np.sum(
            np.mean((seg_res == label_gt[start_idx:end_idx]) |
                    (label_gt[start_idx:end_idx] == 0),
                    axis=-1))
        avg_cnt += end_idx - start_idx

        seg_gt = label_gt[start_idx:end_idx]
        seg_res[seg_gt == 0] = 0

        for i in range(end_idx - start_idx):
            cur_pred = seg_res[i]
            cur_gt = seg_gt[i]

            cur_shape_iou_tot = 0.0
            cur_shape_iou_cnt = 0
            for j in range(1, setting.num_class):
                cur_gt_mask = (cur_gt == j)
                cur_pred_mask = (cur_pred == j)

                has_gt = (np.sum(cur_gt_mask) > 0)
                has_pred = (np.sum(cur_pred_mask) > 0)

                if has_gt or has_pred:
                    intersect = np.sum(cur_gt_mask & cur_pred_mask)
                    union = np.sum(cur_gt_mask | cur_pred_mask)
                    iou = intersect / union

                    cur_shape_iou_tot += iou
                    cur_shape_iou_cnt += 1

                    part_intersect[j] += intersect
                    part_union[j] += union

            if cur_shape_iou_cnt > 0:
                cur_shape_miou = cur_shape_iou_tot / cur_shape_iou_cnt
                shape_iou_tot += cur_shape_miou
                shape_iou_cnt += 1

        if args.save_ply and start_idx < args.save_num_shapes:
            for i in range(start_idx, min(end_idx, args.save_num_shapes)):
                out_fn = os.path.join(args.save_dir, 'shape-%02d-pred.ply' % i)
                data_utils.save_ply_property(data_test[i],
                                             seg_res[i - start_idx],
                                             setting.num_class, out_fn)
                out_fn = os.path.join(args.save_dir, 'shape-%02d-gt.ply' % i)
                data_utils.save_ply_property(data_test[i], label_gt[i],
                                             setting.num_class, out_fn)

    all_seg_probs = np.vstack(all_seg_probs)
    np.save('out.npy', all_seg_probs)

    print('{}-Done!'.format(datetime.now()))

    print('Average Accuracy: %f' % (avg_acc / avg_cnt))
    print('Shape mean IoU: %f' % (shape_iou_tot / shape_iou_cnt))

    part_iou = np.divide(part_intersect[1:], part_union[1:])
    mean_part_iou = np.mean(part_iou)
    print('Category mean IoU: %f, %s' % (mean_part_iou, str(part_iou)))

    out_list = ['%3.1f' % (item * 100) for item in part_iou.tolist()]
    print('%3.1f;%3.1f;%3.1f;%s' %
          (avg_acc * 100 / avg_cnt, shape_iou_tot * 100 / shape_iou_cnt,
           mean_part_iou * 100, '[' + ', '.join(out_list) + ']'))
Exemplo n.º 27
0
def color_images_part(model):
    """
    Function that colors images with approaches on part of images.
    Function is used on reg-parts, class-wo-weights and class-with-weights approaches.

    Parameters
    ----------
    model : keras.engine.training.Model
        Model for image colorization
    """

    # get images to color
    test_set_dir_path = get_abs_path(data_origin)

    image_list = get_image_list(test_set_dir_path)
    num_of_images = len(image_list)

    # init progress bar
    pbar = ProgressBar(maxval=num_of_images,
                       widgets=[Percentage(), ' ',
                                Bar(), ' ',
                                ETA()])
    pbar.start()

    # repeat for each image
    for i in range(num_of_images):
        # get image
        image_lab = load_images(os.path.join(test_set_dir_path, image_list[i]))
        image_l = image_lab[:, :, 0]
        h, w = image_l.shape

        # split images to list of images
        slices_dim_h = int(math.ceil(h / 32))
        slices_dim_w = int(math.ceil(w / 32))

        slices = np.zeros((slices_dim_h * slices_dim_w * 4, 32, 32, 1))
        for a in range(slices_dim_h * 2 - 1):
            for b in range(slices_dim_w * 2 - 1):
                part = image_l[a * 32 // 2:a * 32 // 2 + 32,
                               b * 32 // 2:b * 32 // 2 + 32]
                # fill with zero on edges
                _part = np.zeros((32, 32))
                _part[:part.shape[0], :part.shape[1]] = part

                slices[a * slices_dim_w * 2 + b] = _part[:, :, np.newaxis]

        # lover originals dimension to 224x224 to feed vgg and increase dim
        image_lab_224_b = resize_image_lab(image_lab, (224, 224))
        image_l_224 = np.repeat(image_lab_224_b[:, :, 0, np.newaxis],
                                3,
                                axis=2).astype(float)

        # append together booth lists
        input_data = [
            slices,
            np.array([
                image_l_224,
            ] * slices_dim_h * slices_dim_w * 4)
        ]

        # predict
        predictions_ab = model.predict(input_data, batch_size=32)

        # for histograms -> transformation from hist to ab
        if model.name == "class_wo_weights" or model.name == "class_with_weights":
            indices = np.argmax(predictions_ab[:, :, :, :], axis=3)

            predictions_a = indices // 20 * 10 - 100 + 5
            predictions_b = indices % 20 * 10 - 100 + 5  # +5 to set in the middle box
            predictions_ab = np.stack((predictions_a, predictions_b), axis=3)

        # reshape back to original size
        original_size_im = np.zeros((slices_dim_h * 32, slices_dim_w * 32, 2))
        o_h, o_w = original_size_im.shape[:2]

        for n in range(predictions_ab.shape[0]):
            a, b = n // (slices_dim_w * 2) * 16, n % (slices_dim_w * 2) * 16

            if a + 32 > o_h or b + 32 > o_w:
                continue  # it is empty edge

            # weight decision
            if a == 0 and b == 0:
                weight = weight_top_left
            elif a == 0 and b == o_w - 32:
                weight = weight_top_right
            elif a == 0:
                weight = weight_top
            elif a == o_h - 32 and b == 0:
                weight = weight_bottom_left
            elif b == 0:
                weight = weight_left
            elif a == o_h - 32 and b == o_w - 32:
                weight = weight_bottom_right
            elif a == o_h - 32:
                weight = weight_bottom
            elif b == o_w - 32:
                weight = weight_right
            else:
                weight = weight_m

            im_a = predictions_ab[n, :, :, 0] * weight
            im_b = predictions_ab[n, :, :, 1] * weight

            original_size_im[a:a + 32, b:b + 32, :] += np.stack((im_a, im_b),
                                                                axis=2)

        # make original shape image
        original_size_im = original_size_im[:h, :w]

        # to rgb
        color_im = np.concatenate(
            (image_l[:, :, np.newaxis], original_size_im), axis=2)
        im_rgb = color.lab2rgb(color_im)

        # save
        abs_svave_path = get_abs_path(data_destination)
        scipy.misc.toimage(im_rgb, cmin=0.0,
                           cmax=1.0).save(abs_svave_path + model.name + "_" +
                                          image_list[i])

        # update progress bar
        pbar.update(i + 1)

    # finish progress bar
    pbar.finish()
    organization_id) + '&y=' + str(year_range)
organizations_url = 'http://api.aiddata.org/data/origin/organizations?'
json_orgs = json.load(urllib2.urlopen(organizations_url))
donating_org = ''

# Finds the organization based on the id
for org in json_orgs['hits']:
    donating_org = org['name']
    print 'Creating map for ' + donating_org

    json_result = json.load(urllib2.urlopen(url))
    num_projects = json_result['project_count']
    count = 0
    totamt = 0
    country_dict = {}
    pbar = ProgressBar(maxval=num_projects).start()

    # Iterates over the projects from the AidData api in chunks of 50, the max size allowed by the api
    while (count < num_projects):
        project_info = getProjectData(count, organization_id, year_range)
        for project in project_info['items']:
            # Only looks at projects that have transaction values
            if 'transactions' in project:
                for transactions in project['transactions']:
                    # Ignores projects that don't indicate a recipient country
                    if 'tr_receiver_country' in transactions and transactions[
                            'tr_receiver_country']['iso3'] != '':
                        donor = transactions['tr_funding_org']['name']
                        receiver = transactions['tr_receiver_country']['iso3']
                        amount = transactions['tr_constant_value']
                        totamt += amount
Exemplo n.º 29
0
def zippy_attack(url):
    global zippy_secret_attempts
    global zippy_conn_attempts
    zippy_req = urllib.request.Request(url)
    try:
        zippy_data = retry_urlopen(zippy_req)
        if zippy_data.status == 200:
            zippy_html = str(zippy_data.read())
            zippy_soup = BeautifulSoup(zippy_html)
            if not zippy_soup.title.text == "Zippyshare.com - ":
                zippy_dl = zippy_soup.find('a', id="dlbutton")
                if not zippy_dl is None:
                    zippy_js = zippy_soup.find_all('script')
                    for js in zippy_js:
                        if re.match('\\\\n   var somffunction',
                                    js.text) or re.match(
                                        '\\\\n   var otfunction', js.text):
                            a = re.search('var a = (\d*)\;', js.text)
                            if a.group(1):
                                if args.v:
                                    print(colors.OKGREEN +
                                          "Attemping to break secret" +
                                          colors.ENDC)
                                secret = int(a.group(1))
                                download_secret = str(
                                    int((secret % 78956) * (secret % 3) + 18))
                                url_info = url.split('/')
                                download_server = str(
                                    url_info[2].split('.')[0])
                                download_file = str(url_info[4])
                                zippy_title = zippy_soup.title.text.split(
                                    ' - ')
                                zippy_title.pop(0)
                                download_name = " ".join(zippy_title)
                                download_name = urllib.parse.quote(
                                    download_name)
                                url = "http://" + download_server + ".zippyshare.com/d/" + download_file + "/" + download_secret + "/" + download_name
                                test_req = urllib.request.Request(
                                    url=url, method='HEAD')
                                test_data = urllib.request.urlopen(test_req)
                                content_type = test_data.headers[
                                    'content-type'].split(';')
                                if content_type[0] == "application/x-download":
                                    if args.v:
                                        print(colors.OKBLUE + "\tSuccess" +
                                              colors.ENDC)
                                    widgets = [
                                        " " + " ".join(zippy_title) + " ",
                                        Percentage(), ' ',
                                        Bar(), ' ',
                                        ETA(), ' ',
                                        FileTransferSpeed()
                                    ]
                                    pbar = ProgressBar(widgets=widgets)

                                    def dlProgress(count, blockSize,
                                                   totalSize):
                                        if pbar.maxval is None:
                                            pbar.maxval = totalSize
                                            pbar.start()

                                        pbar.update(
                                            min(count * blockSize, totalSize))

                                    dl, headers = urllib.request.urlretrieve(
                                        url,
                                        " ".join(zippy_title),
                                        reporthook=dlProgress)
                                    pbar.finish()
                                elif zippy_secret_attempts <= zippy_secret_attempts_max:
                                    if args.v:
                                        print(colors.WARNING + "\tFailed" +
                                              colors.ENDC)
                                    zippy_secret_attempts += 1
                                    zippy_attack(url)
                                else:
                                    print(
                                        colors.FAIL +
                                        "Reached max secret attempts, exiting"
                                        + colors.ENDC)
                                    exit(0)

                else:
                    print(colors.WARNING + "Can't find download button..." +
                          colors.ENDC)
            else:
                print(colors.WARNING + "Dead link" + colors.ENDC)
        else:
            print(colors.WARNING + "Bad status code: " +
                  str(zippy_data.status) + colors.ENDC)

    except URLError:
        if zippy_conn_attempts <= zippy_conn_attempts_max:
            if args.v:
                print(colors.WARNING +
                      "Connection refused, let's wait 5 seconds and retry" +
                      colors.ENDC)
            zippy_conn_attempts += 1
            time.sleep(5)
            zippy_attack(url)
        else:
            print(colors.FAIL + "Reached connection retry limit, exiting" +
                  colors.ENDC)
            exit(0)
Exemplo n.º 30
0
def example7():
    pbar = ProgressBar()  # Progressbar can guess max_value automatically.
    for i in pbar(range(8)):
        time.sleep(0.001)