Exemple #1
0
def cmd_reindex(drop_before=False, **kwargs):
    """Reindex collections"""

    ctx = client.Context(**kwargs)
    ctx.log_warn("All Writes operations is blocked pending run !")
    
    if ctx.silent or click.confirm('Do you want to continue?', abort=True):
    
        db = ctx.mongo_database()
        
        if drop_before:
            with click.progressbar(constants.COL_ALL ,
                                   length=len(constants.COL_ALL)+1,
                                   label='Drop indexes') as collections:
                for collection in collections:
                    ctx.log_ok(" [%s]" % collection)
                    try:
                        db[collection].drop_indexes()
                    except Exception as err:
                        ctx.log_warn(str(err))

        ctx.log("Create or update all indexes !")
        create_or_update_indexes(db)
        
        if not drop_before:
            with click.progressbar(constants.COL_ALL,
                                   length=len(constants.COL_ALL)+1,
                                   label='Reindex collections') as collections:
                for collection in collections:
                    ctx.log_ok(" [%s]" % collection)
                    try:
                        db[collection].reindex()
                    except Exception as err:
                        ctx.log_warn(str(err))
Exemple #2
0
def process_samples(db, *lims_samples):
    """Process a list of LIMS samples.

    Args:
        db (Brain): Moneypenny database API instance
        lims_sample (List[dict]): list of transformed samples
    """
    active_samples = (sample for sample in lims_samples
                      if not sample['cancelled'])
    new_samples = (build_sample(lims_sample) for lims_sample in active_samples)
    with click.progressbar(new_samples, length=len(lims_samples),
                           label='lims | samples') as bar:
        case_groups = group_records(bar, 'case_id')

    new_cases = (build_case(case_id, samples) for case_id, samples in
                 six.iteritems(case_groups))
    with click.progressbar(new_cases, length=len(case_groups),
                           label='lims | cases') as bar:
        cust_groups = group_records(bar, 'customer_id')

    new_custs = (build_customer(cust_id, cases) for cust_id, cases in
                 six.iteritems(cust_groups))
    with click.progressbar(new_custs, length=len(cust_groups),
                           label='lims | customers') as bar:
        for customer in bar:
            logger.debug("saving %s", customer.id)
            persist(db, customer)
Exemple #3
0
    def load_to_cache():
        with click.progressbar(Locale.get_all_locales(), label="Loading locales to cache") as bar:
            for locale_id in bar:
                Locale.get_items_by_locale(locale_id)

        users = {}
        with click.progressbar(UserLocale.objects.all().values_list("user_id", "locale_id"),
                               label="Loading user locales to cache") as bar:
            for user_id, genre_id in bar:
                try:
                    users[user_id].add(genre_id)
                except KeyError:
                    users[user_id] = set([genre_id])
            for user_id, genres in users.items():
                Locale.get_user_locales.lock_this(
                    Locale.get_user_locales.cache.set
                )(Locale.get_user_locales.key(user_id), genres, Locale.get_user_locales.timeout)

        items = {}
        with click.progressbar(ItemLocale.objects.all().values_list("item_id", "locale_id"),
                               label="Loading item locales to cache") as bar:
            for item_id, genre_id in bar:
                try:
                    items[item_id].add(genre_id)
                except KeyError:
                    items[item_id] = set([genre_id])
            for item_id, genres in items.items():
                Locale.get_item_locales.lock_this(
                    Locale.get_item_locales.cache.set
                )(Locale.get_item_locales.key(item_id), genres, Locale.get_item_locales.timeout)
Exemple #4
0
def progress(count):
    """Demonstrates the progress bar."""
    items = range_type(count)

    def process_slowly(item):
        time.sleep(0.002 * random.random())

    def filter(items):
        for item in items:
            if random.random() > 0.3:
                yield item

    with click.progressbar(items, label='Processing user accounts',
                           fill_char=click.style('#', fg='green')) as bar:
        for item in bar:
            process_slowly(item)

    def show_item(item):
        if item is not None:
            return 'Item #%d' % item

    with click.progressbar(filter(items), label='Committing transaction',
                           fill_char=click.style('#', fg='yellow'),
                           item_show_func=show_item) as bar:
        for item in bar:
            process_slowly(item)

    with click.progressbar(length=count, label='Counting',
                           bar_template='%(label)s  %(bar)s | %(info)s',
                           fill_char=click.style(u'█', fg='cyan'),
                           empty_char=' ') as bar:
        for item in bar:
            process_slowly(item)
Exemple #5
0
    def _graph_shodan(self):
        """Convert the Shodan tables with ports added as Neo4j graph nodes linked to hosts."""
        self.c.execute("SELECT ip_address,port,banner_data,os,organization FROM shodan_host_lookup")
        all_shodan_lookup = self.c.fetchall()
        with click.progressbar(all_shodan_lookup,
                               label="Creating Port nodes",
                               length=len(all_shodan_lookup)) as bar:
            for row in bar:
                query = """
                MATCH (a:IP {Address:'%s'})
                CREATE UNIQUE (b:Port {Number:'%s', OS:'%s', Organization:"%s", Hostname:''})<-[r:HAS_PORT]-(a)
                SET a.Organization = "%s"
                RETURN a,b
                """ % (row[0],row[1],row[3],row[4],row[4])
                helpers.execute_query(self.neo4j_driver,query)

        self.c.execute("SELECT domain,ip_address,port,banner_data,os,hostname FROM shodan_search")
        all_shodan_search = self.c.fetchall()
        with click.progressbar(all_shodan_search,
                               label="Creating Port and IP relationships",
                               length=len(all_shodan_search)) as bar:
            for row in bar:
                query = """
                MATCH (a:Port)<-[:HAS_PORT]-(b:IP {Address:'%s'})
                SET a.Hostname = "%s"
                RETURN a
                """ % (row[1],row[5])
                helpers.execute_query(self.neo4j_driver,query)
Exemple #6
0
def add_refs():
    parser = et.XMLParser(remove_blank_text=True)

    print('adding cites')
    with open(src_file) as f:
        dom = et.parse(f, parser)

    code_node = dom.find('//document[@id="D.C. Code"]')
    nodes = code_node.xpath('//annoGroup[not(heading/text()="History")]/annotation') + code_node.xpath('//text')

    with click.progressbar(nodes) as progress_nodes:

        for node in progress_nodes:
            node_text = et.tostring(node, encoding='utf-8').decode()
            count = 0

            if '1981 Ed.' in node_text or '1973 Ed.' in node_text:
                continue
            for regex, substr in subs:
                node_text, local_count = regex.subn(substr, node_text)
                count += local_count

            if count:
                node.getparent().replace(node, et.fromstring(node_text))

    print('adding pdfs')

    nodes = dom.xpath('//document[starts-with(@id, "D.C. Law")]/cites/law[not(@url)]')
    with click.progressbar(nodes) as progress_nodes:
        for node in progress_nodes:
            if os.path.isfile(pdf_path.format(**node.attrib)):
                node.set('url', './docs/{session}-{lawId}.pdf'.format(**node.attrib))

    with open(dst_file, 'wb') as f:
        f.write(et.tostring(dom, pretty_print=True, encoding="utf-8"))
Exemple #7
0
 def write(self):
     # Process cards into a list of svg pages
     label = click.style('Processing Cards', fg='yellow')
     with click.progressbar(self.cards, label=label, show_eta=True) as cards:
         _pages = []
         _page = self.get_new_page()
         for card in cards:
             i = self.cards.index(card)
             svg_card = sg.fromfile(card)
             h = i % self.n
             x = '%spx' % self.get_x_transform(_page, svg_card)
             y = '%spx' % self.get_y_transform(_page, svg_card, h)
             svg_card.root.set("x", x)
             svg_card.root.set("y", y)
             _page.append(svg_card)
             if not h and i:
                 _pages.append(_page)
                 _page = self.get_new_page()
             if card == self.cards[-1]:
                 _pages.append(_page)
     # Create directory if does not exist.
     output = self.dir + ".paginated"
     if not os.path.isdir(output):
         os.mkdir(output)
     # Write svg_pages to files.
     label = click.style('Writing pages', fg='yellow')
     with click.progressbar(_pages, label=label, show_eta=True) as pages:
         i = 1
         for page in pages:
             f = os.path.join(output, '%05d.svg' % i)
             page.save(f)
             i = i + 1
    def stage_data(self):
        self.remote_machine.makedir(self.config.adjoint_dir)
        hpass = 1 / self.iteration_info['highpass']
        lpass = 1 / self.iteration_info['lowpass']


        with open('./lasif_data.p', 'rb') as fh:
            f = cPickle.load(fh)
        if self.config.input_data_type == 'noise':
                f.append({'input_data_type': 'noise'})
        elif self.config.input_data_type == 'earthquake':
                f.append({'input_data_type': 'earthquake'})
        with open('./lasif_data.p', 'wb') as fh:
                cPickle.dump(f,fh)


        with click.progressbar(self.all_events, label="Copying preprocessed data...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(self.config.lasif_project_path, 'DATA', event, 'preprocessed_{:.1f}_{:.1f}'.format(lpass, hpass), 'preprocessed_data.mseed')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.makedir(event_dir)
                    self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))
                except:
                    print "\nCould not sync preprocessed_data.mseed for: " + event

        with click.progressbar(self.all_events, label="Copying synthetics...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(self.config.lasif_project_path, 'SYNTHETICS', event, self.config.base_iteration, 'synthetics.mseed')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))
                except:
                    print "\nCould not sync synthetics.mseed for: " + event

        with click.progressbar(self.all_events, label="Copying windows...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(self.config.lasif_project_path, 'ADJOINT_SOURCES_AND_WINDOWS/WINDOWS', self.config.first_iteration ,event, 'windows.p')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))
                except:
                    print "\nCould not sync window.p for: " + event


        self.remote_machine.put_file('lasif_data.p',
                                     os.path.join(self.config.adjoint_dir, 'lasif_data.p'))

        remote_script = os.path.join(self.config.adjoint_dir, "create_adjoint_sources.py")
        with io.open(utilities.get_script_file('create_adjoint_sources'), 'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        remote_sbatch = os.path.join(self.config.adjoint_dir, 'create_adjoint_sources.sbatch')
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Exemple #9
0
def k2mosaic_mosaic(tpf_filenames, mission, campaign, channel, cadencelist,
                    output_prefix='', verbose=True, processes=None):
    """Mosaic a set of TPF files for a set of cadences."""
    task = partial(k2mosaic_mosaic_one, tpf_filenames=tpf_filenames,
                   campaign=campaign, channel=channel,
                   output_prefix=output_prefix, verbose=verbose)
    if processes is None or processes > 1:  # Use parallel processing
        from multiprocessing import Pool
        pool = Pool(processes=processes)
        with click.progressbar(pool.imap(task, cadencelist), label='Mosaicking', show_pos=True) as iterable:
            [job for job in iterable]
    else:  # Single process
        with click.progressbar(cadencelist, label='Mosaicking', show_pos=True) as iterable:
            [task(job) for job in iterable]
Exemple #10
0
def cli(ctx, url):
    """
    Command to sync notes with Firebase
    """
    firebase_instance = firebase.FirebaseApplication(url + '/notes', None)
    if firebase_instance:
        db = ctx.database()
        cursor = db.cursor()

        query_all = "SELECT * from `notes`"
        cursor.execute(query_all)
        all_notes = cursor.fetchall()
        from_firebase = firebase_instance.get('/notes',None)

        print from_firebase
        from_firebase_listed = []
        if not from_firebase:
            from_firebase = []

        for firebase_key in from_firebase:
            # print firebase_note
            from_firebase_listed.append(from_firebase[firebase_key])


        to_download = helpers.to_sync(from_firebase_listed, all_notes)

        to_upload = helpers.to_sync(all_notes, from_firebase_listed)

        print to_download

        # combined = to_download + to_upload

        # upload
        with click.progressbar(to_upload, length=len(to_upload),
                               label='Syncing ...', color=True, width=0) as bar:
            for note in bar:
                result = firebase_instance.put(url , 'notes/{}'.format(note['checksum']), note)

        # download
        with click.progressbar(to_download, length=len(to_download),
                               label='Syncing ...', color=True, width=0) as bar:
            for note in bar:
                cursor.execute('''INSERT INTO notes
        (title, content, date_created,date_modified, checksum)
        VALUES(?,?,?,?,?)''',
                               (note['title'], note['content'], note['date_created'], note['date_modified'], note['checksum']))

                db.commit()
    else:
        click.secho('Invalid url provided', fg="white", bg="red")
 def download(self, identifier):
     self.verify_configuration()
     template_dir = get_template_dir(self, identifier)
     if os.path.isdir(template_dir):
         shutil.rmtree(template_dir)
     elif os.path.isfile(template_dir):
         os.remove(template_dir)
     # verify release exists:
     click.echo('Fetching release on {} with tag {}'.format(self.config.location, identifier.version))
     r = requests.get('https://api.github.com/repos/{}/releases/tags/{}'.format(self.config.location,
                                                                                identifier.version),
                      headers=self.create_headers(), verify=get_cert_attr())
     if r.status_code == 200:
         for asset in [a for a in r.json()['assets'] if a['name'] == '{}-template.zip'.format(identifier.name)]:
             # Time to download the file
             proscli.utils.debug('Found {}'.format(asset['url']))
             dr = requests.get(asset['url'], headers=self.create_headers('application/octet-stream'), stream=True,
                               verify=get_cert_attr())
             if dr.status_code == 200 or dr.status_code == 302:
                 with tempfile.NamedTemporaryFile(delete=False) as tf:
                     # todo: no temp file necessary - go straight from download to zipfile extraction
                     with click.progressbar(length=asset['size'],
                                            label='Downloading {} (v: {})'.format(asset['name'],
                                                                                  identifier.version)) \
                             as progress_bar:
                         for chunk in dr.iter_content(128):
                             tf.write(chunk)
                             progress_bar.update(128)
                     tf.close()  # need to close since opening again as ZipFile
                     with zipfile.ZipFile(tf.name) as zf:
                         with click.progressbar(length=len(zf.namelist()),
                                                label='Extracting {}'.format(asset['name'])) as progress_bar:
                             for file in zf.namelist():
                                 zf.extract(file, path=template_dir)
                                 progress_bar.update(1)
                     os.remove(tf.name)
                 click.echo('Template downloaded to {}'.format(template_dir))
                 return True
             else:
                 click.echo('Unable to download {} from {} (Status code: {})'.format(asset['name'],
                                                                                     self.config.location,
                                                                                     dr.status_code))
                 proscli.utils.debug(dr.__dict__)
                 return False
     else:
         click.echo('Unable to find {} on {} (Status code: {})'.format(identifier.version,
                                                                       self.config.name,
                                                                       r.status_code))
         proscli.utils.debug(r.__dict__)
         return False
Exemple #12
0
def progress(count):
    """Demonstrates the progress bar."""
    items = range_type(count)

    def process_slowly(item):
        time.sleep(0.002 * random.random())

    def filter(items):
        for item in items:
            if random.random() > 0.3:
                yield item

    with click.progressbar(items, label='Processing accounts',
                           fill_char=click.style('#', fg='green')) as bar:
        for item in bar:
            process_slowly(item)

    def show_item(item):
        if item is not None:
            return 'Item #%d' % item

    with click.progressbar(filter(items), label='Committing transaction',
                           fill_char=click.style('#', fg='yellow'),
                           item_show_func=show_item) as bar:
        for item in bar:
            process_slowly(item)

    with click.progressbar(length=count, label='Counting',
                           bar_template='%(label)s  %(bar)s | %(info)s',
                           fill_char=click.style(u'█', fg='cyan'),
                           empty_char=' ') as bar:
        for item in bar:
            process_slowly(item)

    with click.progressbar(length=count, width=0, show_percent=False,
                           show_eta=False,
                           fill_char=click.style('#', fg='magenta')) as bar:
        for item in bar:
            process_slowly(item)

    # 'Non-linear progress bar'
    steps = [math.exp( x * 1. / 20) - 1 for x in range(20)]
    count = int(sum(steps))
    with click.progressbar(length=count, show_percent=False,
                           label='Slowing progress bar',
                           fill_char=click.style(u'█', fg='green')) as bar:
        for item in steps:
            time.sleep(item)
            bar.update(item)
Exemple #13
0
    def run(self):
        keen_client = KeenClient(
            project_id=self.project_id,
            write_key=self.write_key,
        )

        timezone = pytz.timezone(self.data['user']['timezone'])

        self.events = []
        with click.progressbar(self.data['days'],
                            label='Preparing keen.io events',
                            fill_char=click.style('#', fg='blue')) as days:

            for day in days:
                dt = self._parse_date(day['date'], timezone)

                self.append_event(dt, 'total', {
                    'seconds': day['grand_total']['total_seconds'],
                })

                categories = [
                    'editor',
                    'language',
                    'operating_system',
                    'project',
                ]
                for category in categories:
                    for item in day.get(category + 's', []):
                        self.append_event(dt, category, {
                            'seconds': item['total_seconds'],
                            'name': item['name'],
                        })

                files = {}
                for project in day.get('projects', []):
                    for f in project.get('dump', {}).get('files', []):
                        if f['name'] not in files:
                            files[f['name']] = 0
                        files[f['name']] += f['total_seconds']

                for name, seconds in files.items():
                    self.append_event(dt, 'file', {
                        'seconds': seconds,
                        'name': name,
                    })

        if len(self.events) == 0:
            click.echo('No events available for exporting to keen.io')
            return

        click.echo('Uploading events to keen.io...')
        start = datetime.utcfromtimestamp(self.data['range']['start'])
        end = datetime.utcfromtimestamp(self.data['range']['end'])
        collection = 'WakaTime Data Dump from {start} until {end}'.format(
            start=start.strftime('%Y-%m-%d'),
            end=end.strftime('%Y-%m-%d'),
        )
        keen_client.add_events({
            collection: self.events,
        })
Exemple #14
0
def backup(ctx, filename, label, port, selection):
    """Request a backup from the Monomachine"""
    connection = ctx.obj["connection"]
    midiin, midiout, port = connect_midi(port, ctx.obj["settings"]["port"])

    data = bytearray([])
    monomachine = MonomachineSysex(midiin, midiout)
    with click.progressbar(BACKUP_MESSAGES[selection]) as bar:
        hashed = md5.new()
        for message in bar:
            result = bytearray(monomachine.send(message))
            data += result
            hashed.update(result)
        hashed = hashed.hexdigest()

    if filename:
        with open(filename, "wb") as fp:
            fp.write(data)

    connection.execute("""
    INSERT INTO backups (selection, label, port, hash, data)
    VALUES (?, ?, ?, ?, ?)
    """, (selection, label, port, hashed, buffer(data)))

    connection.commit()
    connection.close()
Exemple #15
0
def compress_gallery(gallery):
    logging.info('Compressing assets for %s', gallery.title)
    compress_settings = gallery.settings.get('compress_assets_options',
                                             DEFAULT_SETTINGS)
    compressor = get_compressor(compress_settings)

    if compressor is None:
        return

    # Collecting theme assets
    theme_assets = []
    for current_directory, _, filenames in os.walk(
            os.path.join(gallery.settings['destination'], 'static')):
        for filename in filenames:
            theme_assets.append(os.path.join(current_directory, filename))

    with progressbar(length=len(gallery.albums) + len(theme_assets),
                     label='Compressing static files') as bar:
        for album in gallery.albums.values():
            compressor.compress(os.path.join(album.dst_path,
                                             album.output_file))
            bar.update(1)

        for theme_asset in theme_assets:
            compressor.compress(theme_asset)
            bar.update(1)
Exemple #16
0
def download_images(image_paths):
    headers = {"User-Agent": smugmug.application}
    cookies = {}
    session_id = getattr(smugmug, 'session_id', None)
    # TODO: This apparently allows downloading of private images, but doesn't
    # actually work in testing.
    if session_id:
        cookies["SMSESS"] = session_id
    session = requests.Session()
    session.headers = headers
    session.cookies = requests.utils.cookiejar_from_dict(cookies)

    with click.progressbar(image_paths, label='Downloading images') as paths:
        for image_path, url, checked_md5sum in paths:
            req = session.get(url)
            if checked_md5sum:
                md5sum = hashlib.md5()
                md5sum.update(req.content)
                if md5sum.hexdigest() != checked_md5sum:
                    click.secho(
                        'Checksum for downloaded image %s incorrect; skipping '
                        'image' % image_path, fg='red')
                    continue

            if not req.content:
                click.secho(
                    'Downloaded image %s is empty; skipping ' % url, fg='red')
                continue

            with open(image_path, 'wb') as f:
                f.write(req.content)
def main(log_file, drop_event, drop_logger, replacements, replacements_from_file, output):
    if replacements_from_file:
        replacements = replacements_from_file.read()
    if not replacements:
        replacements = '{}'
    try:
        replacements = json.loads(replacements)
    except (JSONDecodeError, UnicodeDecodeError) as ex:
        raise UsageError(f'Option "--replacements" contains invalid JSON: {ex}') from ex
    log_records, known_fields = parse_log(log_file)
    prog_bar = click.progressbar(log_records, label='Rendering', file=_default_text_stderr())
    with prog_bar as log_records_progr:
        print(
            render(
                log_file.name,
                transform_records(
                    filter_records(
                        log_records_progr,
                        drop_events=set(d.lower() for d in drop_event),
                        drop_loggers=set(l.lower() for l in drop_logger),
                    ),
                    replacements,
                ),
                len(log_records),
                known_fields,
            ),
            file=output,
        )
Exemple #18
0
        def _progressBar(*args, **kwargs):
            bar = click.progressbar(*args, **kwargs)
            bar.bar_template = "[%(bar)s]  %(info)s  %(label)s"
            bar.show_percent = True
            bar.show_pos = True

            def formatSize(length):
                if length == 0:
                    return '%.2f' % length
                unit = ''
                # See https://en.wikipedia.org/wiki/Binary_prefix
                units = ['k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
                while True:
                    if length <= 1024 or len(units) == 0:
                        break
                    unit = units.pop(0)
                    length /= 1024.
                return '%.2f%s' % (length, unit)

            def formatPos(_self):
                pos = formatSize(_self.pos)
                if _self.length_known:
                    pos += '/%s' % formatSize(_self.length)
                return pos

            bar.format_pos = types.MethodType(formatPos, bar)
            return bar
    def write(self, filename, calendar, assets, show_progress=False):
        """
        Parameters
        ----------
        filename : str
            The location at which we should write our output.
        calendar : pandas.DatetimeIndex
            Calendar to use to compute asset calendar offsets.
        assets : pandas.Int64Index
            The assets for which to write data.
        show_progress : bool
            Whether or not to show a progress bar while writing.

        Returns
        -------
        table : bcolz.ctable
            The newly-written table.
        """
        _iterator = self.gen_tables(assets)
        if show_progress:
            pbar = progressbar(
                _iterator,
                length=len(assets),
                item_show_func=lambda i: i if i is None else str(i[0]),
                label="Merging asset files:",
            )
            with pbar as pbar_iterator:
                return self._write_internal(filename, calendar, pbar_iterator)
        return self._write_internal(filename, calendar, _iterator)
def get_test_times(num_tests):
    from sauceutils import SauceTools
    sauce = SauceTools("https://saucelabs.com", "polarqa", "d609b648-22e3-44bb-a38e-c28931df837d")
    jobs = []
    last_time = int(time.time())
    test_times = defaultdict(list)
    bar_length = int(math.ceil(num_tests/500))

    with click.progressbar(xrange(bar_length),
                           label="Downloading statistics from Sauce Labs",
                           fill_char=click.style('+', fg='green', bold=True),
                           empty_char=click.style('-', fg='red'),
                           width=40) as bar:
        for i in bar:
            jobs += sauce.get_jobs(num_jobs=500, full=True, end_date=last_time)
            last_time = int(jobs[-1]['start_time'])

    # Only add tests that have passed
    for job in jobs:
        if job['passed'] and job['end_time']:
            test_times[job['name'].lower()].append([float(job['creation_time']),
                                                    float(job['end_time']) - float(job['start_time'])])
    click.secho("Sorted through statistics", fg='white')

    return test_times
Exemple #21
0
def check(path, pretend):
    empty, invalid = 0, 0

    with TilesStorage.open(path) as ts:
        tot = len(ts.index)

        with click.progressbar(ts.index, width=0, show_eta=True, show_pos=True,
                               show_percent=True) as bar:
            for col, row in bar:
                try:
                    _, _, size, _ = ts.index.get_entry(col, row)
                    if size:
                        ts.get_tile_image(col, row).load()
                    else:
                        empty += 1
                except Exception as e:
                    click.echo(str(e))
                    invalid += 1
                    if not pretend:
                        ts.set_tile(col, row, '')

    click.echo('Found {} invalid tiles'.format(invalid))
    click.echo('Found {} empty tiles'.format(empty))
    click.echo('{} tiles ({:%}) are now empty'.format(
        invalid + empty, (invalid + empty) * 1.0 / tot))
Exemple #22
0
def update():
    """Gather new chapters from followed series."""
    pool = concurrent.futures.ThreadPoolExecutor(config.get().download_threads)
    futures = []
    warnings = []
    aliases = {}
    query = db.session.query(db.Series).filter_by(following=True).all()
    output.series('Updating {} series'.format(len(query)))
    for follow in query:
        fut = pool.submit(series_by_url, follow.url)
        futures.append(fut)
        aliases[fut] = follow.alias
    with click.progressbar(length=len(futures), show_pos=True,
                           fill_char='>', empty_char=' ') as bar:
        for future in concurrent.futures.as_completed(futures):
            try:
                series = future.result()
            except requests.exceptions.ConnectionError as e:
                warnings.append('Unable to update {} (connection error)'
                                .format(aliases[future]))
            except exceptions.ScrapingError:
                warnings.append('Unable to update {} (scraping error)'
                                .format(follow.alias))
            else:
                series.update()
            bar.update(1)
    for w in warnings:
        output.warning(w)
    list_new()
Exemple #23
0
def download_with_progress(url, chunk_size, **progress_kwargs):
    """
    Download streaming data from a URL, printing progress information to the
    terminal.

    Parameters
    ----------
    url : str
        A URL that can be understood by ``requests.get``.
    chunk_size : int
        Number of bytes to read at a time from requests.
    **progress_kwargs
        Forwarded to click.progressbar.

    Returns
    -------
    data : BytesIO
        A BytesIO containing the downloaded data.
    """
    resp = requests.get(url, stream=True)
    resp.raise_for_status()

    total_size = int(resp.headers['content-length'])
    data = BytesIO()
    with progressbar(length=total_size, **progress_kwargs) as pbar:
        for chunk in resp.iter_content(chunk_size=chunk_size):
            data.write(chunk)
            pbar.update(len(chunk))

    data.seek(0)
    return data
Exemple #24
0
def dl(src, dst, dry_run, delete_after, logfile):
	"""Downloads files from put.io based on SRC pattern and DST folder"""

	move_patterns.append(Pattern(src, dst))
	client = putio.Client(OAUTH_TOKEN)
	click.echo("PutCLI will %s, using first matching put.io paths: \n\t%s"
		% ( 'download' if not dry_run else 'check what to download',
			'\n\t'.join([str(i) for i in move_patterns])))
	for path, dirs, files in walk('/', client.File.get(0)):
		for d in dirs[:]: # iterate on dir copy to avoid changing while iterating
			dirpath = os.path.join(path, d.name)
			# click.echo("Testing dirpath %s from %s" % (dirpath,d))
			match = False
			for p in move_patterns:
				if p.source_re.match(dirpath):
					match = True
					break
			if match:
				dest_dir = os.path.join(p.dest, path[1:])
				# click.echo("Source path %s, source dir %s, dest path %s, dest dir %s" % (path, d.name, dest_dir, d.name))
				click.echo('Matched "put.io:%s", download to "%s"' % (dirpath, dest_dir))
				for f, dest in d._download_directory(dest_dir, delete_after_download=delete_after, iter=True):
					label = f.name
					click.echo(dest)
					if not dry_run:
						chunk_generator = f._download_file(dest, delete_after_download=delete_after, iter=True)
						with click.progressbar(chunk_generator, length=f.size/putio.CHUNK_SIZE + 1, label=label, width=0) as bar:
							for update in bar:
								pass
				dirs.remove(d)
			else:
				logger.debug("No match %s" % dirpath)
Exemple #25
0
  def get_files(self, sub_path, pattern='*'):
    def progress_function(item):
      if item:
        return 'Parsing {0}'.format(item)

    with SSHClient() as client:
      client.set_missing_host_key_policy(WarningPolicy())
      client.load_system_host_keys()
      client.connect(**self.connect_settings)
      with SFTPClient.from_transport(client.get_transport()) as ftpclient:
        self.ftpclient = ftpclient
        files = sorted(filter(lambda x: fnmatch(x, pattern), ftpclient.listdir(os.path.join(self.root, sub_path))))
        with click.progressbar(files,
                               show_eta=False,
                               label='Parsing {0} logs from ssh'.format(len(files)),
                               item_show_func=progress_function) as progressbar:
          for filename in progressbar:
            path = os.path.join(self.root, sub_path, filename)
            size = ftpclient.lstat(path).st_size
            key = self.keys.log_file(filename='ssh://' + path)
            prev = self.r.get(key)
            if prev is None:
              pos = 0
            else:
              pos = int(prev)
            if size > pos:
              if progressbar.is_hidden:
                print('Reading {filename} from offset {pos}'.format(filename=path, pos=pos))
              yield path, pos
              self.r.set(key, size)
Exemple #26
0
def main(birth_date, expected_years, row_label, row_label_period, highlight_date):

    expected_death_date = birth_date + relativedelta(years=expected_years)

    expected_death_year = expected_death_date.year
    birth_year = birth_date.year

    curr_date = date.today()

    with click.progressbar(range(1, 53), label='{}/52 weeks of year'.format(yweek(curr_date))) as bar:
        for i in bar:
            if i == yweek(curr_date):
                break

    # ensures that the formatting won't break for those who are alive
    # between 9999 and 10000 A.D. and still using this for some reason
    max_year_width = len(str(expected_death_year)) + 1
    max_age_width = len(str(expected_years)) + 1

    fmt_dct = dict(age=expected_years,
                   year=expected_death_year,
                   max_year_width=max_year_width,
                   max_age_width=max_age_width)
    row_label_len = len(row_label_formats[row_label].format(**fmt_dct))

    # Normalize set of dates to highlight (using set for constant time lookup)
    highlight_set = set(date(d.year, 1, 1) + timedelta(weeks=yweek(d))
                        for d in highlight_date)

    for year in range(birth_year, expected_death_year + 1):

        if year == birth_year:  # Print header on first iteration in loop
            click.echo(' ' * row_label_len, nl=False)
            click.echo(header())

        age = year - birth_year

        if age % row_label_period:
            click.echo(' ' * row_label_len, nl=False)
        else:
            fmt_dct = dict(age=age,
                           year=year,
                           max_year_width=max_year_width,
                           max_age_width=max_age_width)
            click.echo(row_label_formats[row_label].format(**fmt_dct), nl=False)

        date_iter = date(year, 1, 1)

        while date_iter.year == year:
            if birth_date < date_iter < curr_date:
                if date_iter in highlight_set:
                    click.secho(FILLED, nl=False, fg='red')
                else:
                    click.secho(FILLED, nl=False, fg='green')
            else:
                click.echo(EMPTY, nl=False)
            click.echo(' ', nl=False)
            date_iter += timedelta(weeks=1)

        click.echo('')
Exemple #27
0
def run(host, port):

    port = int(port)

    from . import interop_tests
    test_names = [x for x in dir(interop_tests) if x.startswith("test_")]

    tests = [getattr(interop_tests, test_name) for test_name in test_names]

    results = []
    with click.progressbar(tests, label="Running interop tests...") as _tests:
        for test in _tests:
            results.append(test(host, port))

    fmt_results = []
    for r in results:
        fmt_results.append((r.name,
                            "True" if r.success else "False", r.reason if r.reason else "", r.transcript))

    t = Texttable()
    t.set_cols_width([20, 10, 80, 60])
    rows = [["Name", "Successful", "Reason", "Client Transcript"]]
    rows.extend(fmt_results)
    t.add_rows(rows)
    print(t.draw(), file=sys.__stdout__)

    failures = []
    for x in results:
        if not x.success:
            failures.append(False)

    if failures:
        sys.exit(len(failures))
    sys.exit(0)
Exemple #28
0
def progress():
    """Sample progress bar."""
    i = range(0, 200)
    logging.debug('%s -> %s', i[0], i[-1])
    with click.progressbar(i, width=0, fill_char=red('#')) as items:
        for _ in items:
            time.sleep(.01)
Exemple #29
0
    def __init__(self, trading_params, data_proxy, **kwargs):
        """init

        :param Strategy strategy: current user strategy object
        :param TradingParams trading_params: current trading params
        :param DataProxy data_proxy: current data proxy to access data
        """
        self.trading_params = trading_params
        self._data_proxy = data_proxy

        self._strategy_context = kwargs.get("strategy_context")
        if self._strategy_context is None:
            self._strategy_context = StrategyContext()

        self._user_init = kwargs.get("init", dummy_func)
        self._user_handle_bar = kwargs.get("handle_bar", dummy_func)
        self._user_before_trading = kwargs.get("before_trading", dummy_func)

        self._simu_exchange = kwargs.get("simu_exchange")
        if self._simu_exchange is None:
            self._simu_exchange = SimuExchange(data_proxy, trading_params)

        self._event_source = SimulatorAStockTradingEventSource(trading_params)
        self._current_dt = None
        self.current_universe = set()

        self.progress_bar = click.progressbar(length=len(self.trading_params.trading_calendar), show_eta=False)
Exemple #30
0
def import_system_symbols(bundles, threads, trim_symbols, no_demangle):
    """Imports system symbols from preprocessed zip files into Sentry.

    It takes a list of zip files as arguments that contain preprocessed
    system symbol information.  These zip files contain JSON dumps.  The
    actual zipped up dsym files cannot be used here, they need to be
    preprocessed.
    """
    import zipfile
    from sentry.utils.db import is_mysql
    if threads != 1 and is_mysql():
        warnings.warn(Warning('disabled threading for mysql'))
        threads = 1
    for path in bundles:
        with zipfile.ZipFile(path) as f:
            sdk_info = json.load(f.open('sdk_info'))
            label = ('%s.%s.%s (%s)' % (
                sdk_info['version_major'],
                sdk_info['version_minor'],
                sdk_info['version_patchlevel'],
                sdk_info['version_build'],
            )).ljust(18)
            with click.progressbar(f.namelist(), label=label) as bar:
                process_archive(bar, f, sdk_info, threads,
                                trim_symbols=trim_symbols,
                                demangle=not no_demangle)
Exemple #31
0
def SessionsPerStation(station, datefrom, dateto, format, apikey):

    """\b
       -------------------------------------------
       This command displays all charging sessions
       at a specific station, during the givven pe-
       riod of time.
       -------------------------------------------

       User authorization level: Connected user

       REST API call: /SessionsPerStation

       \b
       -------------------------------------------
       NOTE: Date options must be chronologically
       correct. (--datefrom = starting date ,
       --dateto = ending date)
       -------------------------------------------
       """

    validation.valid(apikey)
    validation.fdatecheck(datefrom)
    validation.tdatecheck(dateto)
    validation.chronological(datefrom, dateto)
    validation.user()

    click.echo()
    click.echo(click.style("Loading...", fg='cyan'))
    f = open(paths.token_path, "r")
    token = f.readline()
    f.close()
    p = {'format': format, 'apikey': apikey}
    h = {'Authorization':'Token '+token}
    service = 'SessionsPerStation/'+ station+ '/'+ datefrom+ '/'+ dateto
    response = requests.get(url=f'{paths.baseURL}/{service}/', headers=h, params=p)
    if response.status_code == 200 or response.status_code == 402:
        with click.progressbar([1, 2, 3]) as bar:
                for x in bar:
                    time.sleep(0.5)
        if format == 'json':
            res = json.loads(response.text)
            click.echo()
            click.echo('Station ID: '+ str(res['StationID']))
            click.echo('Station operator: '+ str(res['Operator']))
            click.echo('Request Timestamp: '+ str(res['RequestTimestamp']))
            click.echo('Period from: '+ str(res['PeriodFrom']))
            click.echo('Period to: '+ str(res['PeriodTo']))
            click.echo('Total energy delivered: '+ str(res['TotalEnergyDelivered'])+ ' kWh')
            click.echo('Number of charging sessions: '+ str(res['NumberOfChargingSessions']))
            click.echo('Number of active points: '+ str(res['NumberOfActivePoints']))
            if response.status_code == 200:
                click.echo()
                click.echo(click.style('Sessions Summary:', fg='cyan'))
                for session in res['SessionsSummaryList']:
                    click.echo()
                    click.echo('Point ID: '+ str(session['PointID']))
                    click.echo('Point sessions: '+ str(session['PointSessions']))
                    click.echo('Energy delivered: '+ str(session['EnergyDelivered'])+ ' kWh')
            else:
                click.echo()
                click.echo(click.style('Sessions Summary: ', fg='cyan') + 'No data')
        else:
            f = open(paths.temporary_path, 'w')
            f.write(response.text)
            f.close()
            with open(paths.temporary_path) as temp:
                res = csv.DictReader(temp)
                for rows in res:
                    click.echo()
                    click.echo('Station ID: '+ str(rows['StationID']))
                    click.echo('Station operator: '+ str(rows['Operator']))
                    click.echo('Request timestamp: '+ str(rows['RequestTimestamp']))
                    click.echo('Period from: '+ str(rows['PeriodFrom']))
                    click.echo('Period to: '+ str(rows['PeriodTo']))
                    click.echo('Total energy delivered: '+ str(rows['TotalEnergyDelivered'])+ ' kWh')
                    click.echo('Number of charging sessions: '+ str(rows['NumberOfChargingSessions']))
                    points_count = rows['NumberOfActivePoints']
                    points_count = int(points_count)
                    click.echo('Number of active points: '+ str(points_count))
                    if response.status_code == 200:
                        click.echo()
                        click.echo(click.style('Sessions Summary: ', fg='cyan'))
                        for i in range(0,points_count):
                            click.echo()
                            click.echo('Point ID: '+ str(rows['SessionsSummaryList.'+str(i)+'.PointID']))
                            click.echo('Point sessions: '+ str(rows['SessionsSummaryList.'+str(i)+'.PointSessions']))
                            click.echo('Energy delivered: '+ str(rows['SessionsSummaryList.'+str(i)+'.EnergyDelivered'])+ ' kWh')
                    else:
                        click.echo()
                        click.echo(click.style('Sessions Summary: ', fg='cyan') + 'No data')
            os.remove(paths.temporary_path)
    else:
        if format == 'json':
            click.echo()
            raise click.ClickException(response.text[2:(len(response.text))-2])
        else:
            click.echo()
            raise click.ClickException(response.text[4:(len(response.text))-2])
Exemple #32
0
def create_low_level_cogs(
    mosaic_definition: Dict,
    output_profile: Dict,
    prefix: str = "mosaic_ovr",
    max_overview_level: int = 6,
    config: Dict = None,
    threads=1,
) -> None:
    """
    Create WebOptimized Overview COG from a mosaic definition file.

    Attributes
    ----------
    mosaic_definition : dict, required
        Mosaic definition.
    prefix : str
    add_mask, bool, optional
        Force output dataset creation with a mask.
    max_overview_level : int
    config : dict
        Rasterio Env options.

    """
    tilesize = 256

    base_zoom = mosaic_definition["minzoom"] - 1
    bounds = mosaic_definition["bounds"]
    asset = _get_asset_example(mosaic_definition)
    info = _get_info(asset)

    extrema = tile_extrema(bounds, base_zoom)
    res = _meters_per_pixel(base_zoom, 0, tilesize=tilesize)

    # Create multiples files if coverage is too big
    extremas = _split_extrema(extrema,
                              max_ovr=max_overview_level,
                              tilesize=tilesize)
    for ix, extrema in enumerate(extremas):
        blocks = list(_get_blocks(extrema))
        random.shuffle(blocks)

        width = (extrema["x"]["max"] - extrema["x"]["min"]) * tilesize
        height = (extrema["y"]["max"] - extrema["y"]["min"]) * tilesize
        w, n = mercantile.xy(*mercantile.ul(extrema["x"]["min"], extrema["y"]
                                            ["min"], base_zoom))

        params = dict(
            driver="GTiff",
            dtype=info[1],
            count=info[0],
            width=width,
            height=height,
            crs="epsg:3857",
            transform=Affine(res, 0, w, 0, -res, n),
            nodata=info[4],
            tiled=True,
            blockxsize=256,
            blockysize=256,
        )

        config = config or {}
        with rasterio.Env(**config):
            with MemoryFile() as memfile:
                with memfile.open(**params) as mem:

                    def _get_tile(wind):
                        idx, window = wind
                        x = extrema["x"]["min"] + idx[1]
                        y = extrema["y"]["min"] + idx[0]
                        assets = list(
                            set(get_assets(mosaic_definition, x, y,
                                           base_zoom)))
                        if assets:
                            tile, mask = mosaic_tiler(
                                assets,
                                x,
                                y,
                                base_zoom,
                                cogeoTiler,
                                tilesize=tilesize,
                                pixel_selection=defaults.FirstMethod(),
                            )
                            if tile is None:
                                raise Exception("Empty")

                        return window, tile, mask

                    with futures.ThreadPoolExecutor(
                            max_workers=threads) as executor:
                        future_work = [
                            executor.submit(_get_tile, item) for item in blocks
                        ]
                        with click.progressbar(
                                futures.as_completed(future_work),
                                length=len(future_work),
                                show_percent=True,
                        ) as future:
                            for res in future:
                                pass

                    for f in _filter_futures(future_work):
                        window, tile, mask = f
                        mem.write(tile, window=window)
                        if info[5]:
                            mem.write_mask(mask.astype("uint8"), window=window)

                    cog_translate(
                        mem,
                        f"{prefix}_{ix}.tif",
                        output_profile,
                        config=config,
                        in_memory=True,
                    )
Exemple #33
0
def SessionsPerProvider(provider, datefrom, dateto, format, apikey):

    """\b
       -------------------------------------------
       This command displays all charging sessions
       from an energy provider, during the givven
       period of time.
       -------------------------------------------

       User authorization level: Connected user

       REST API call: /SessionsPerProvider

       \b
       -------------------------------------------
       NOTE: Date options must be chronologically
       correct. (--datefrom = starting date ,
       --dateto = ending date)
       -------------------------------------------
       """

    validation.valid(apikey)
    validation.fdatecheck(datefrom)
    validation.tdatecheck(dateto)
    validation.chronological(datefrom, dateto)
    validation.user()

    click.echo()
    click.echo(click.style("Loading...", fg='cyan'))
    f = open(paths.token_path, "r")
    token = f.readline()
    f.close()
    p = {'format': format, 'apikey': apikey}
    h = {'Authorization':'Token '+token}
    service = 'SessionsPerProvider/'+ provider+ '/'+ datefrom+ '/'+ dateto
    with requests.get(url=f'{paths.baseURL}/{service}/', headers=h, params=p, stream=True) as response:    
        if response.status_code == 200 or response.status_code == 402:
            with click.progressbar([1, 2, 3]) as bar:
                for x in bar:
                    time.sleep(0.5)
            if format == 'json':
                res = json.loads(response.text)
                click.echo()
                click.echo('Provider ID: '+ str(res['ProviderID']))
                click.echo('Provider name: '+ str(res['ProviderName']))
                click.echo('Number of provider charging sessions: '+ str(res['NumberOfProviderChargingSessions']))
                if response.status_code == 200:
                    click.echo()
                    click.echo(click.style('Provider Charging Sessions:', fg='cyan'))
                    for session in res['ProviderChargingSessionsList']:
                        click.echo()
                        click.echo('Station ID: '+ str(session['StationID']))
                        click.echo('Session ID: '+ str(session['SessionID']))
                        click.echo('Vehicle ID: '+ str(session['VehicleID']))
                        click.echo('Started on: '+ str(session['StartedOn']))
                        click.echo('Finished on: '+ str(session['FinishedOn']))
                        click.echo('Energy delivered: '+ str(session['EnergyDelivered'])+ ' kWh')
                        click.echo('Price policy reform: '+ str(session['PricePolicyRef']))
                        click.echo('Cost per kWh: '+ str(session['CostPerKWh']))
                        click.echo('Total cost: '+ str(session['TotalCost']))
                else:
                    click.echo()
                    click.echo(click.style('Provider Charging Sessions:', fg='cyan')+ 'No data')
            else:
                f = open(paths.temporary_path, 'w')
                f.write(response.text)
                f.close()
                with open(paths.temporary_path) as temp:
                    res = csv.DictReader(temp)
                    for rows in res:
                        click.echo()
                        click.echo('Provider ID: '+ str(rows['ProviderID']))
                        click.echo('Provider name: '+ str(rows['ProviderName']))
                        sessions_count = rows['NumberOfProviderChargingSessions']
                        sessions_count = int(sessions_count)
                        click.echo('Number of provider charging sessions: '+ str(sessions_count))
                        if response.status_code == 200:
                            click.echo()
                            click.echo(click.style('Provider Charging Sessions: ', fg='cyan'))
                            for i in range(0,sessions_count):
                                click.echo()
                                click.echo('Station ID: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.StationID']))
                                click.echo('Session ID: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.SessionID']))
                                click.echo('Vehicle ID: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.VehicleID']))
                                click.echo('Started on: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.StartedOn']))
                                click.echo('Finished on: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.FinishedOn']))
                                click.echo('Energy delivered: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.EnergyDelivered'])+ ' kWh')
                                click.echo('Price policy reform: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.PricePolicyRef']))
                                click.echo('Cost per kWh: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.CostPerKWh'])+ ' $')
                                click.echo('Total cost: '+ str(rows['ProviderChargingSessionsList.'+str(i)+'.TotalCost'])+ ' $')
                        else:
                            click.echo()
                            click.echo(click.style('Provider Charging Sessions: ', fg='cyan')+ 'No data')
                os.remove(paths.temporary_path)
        else:
            if format == 'json':
                click.echo()
                raise click.ClickException(response.text[2:(len(response.text))-2])
            else:
                click.echo()
                raise click.ClickException(response.text[4:(len(response.text))-2])
Exemple #34
0
def TalwegMetrics(axis):
    """
    Calculate median talweg height relative to valley floor

    @api    fct-metrics:talweg

    @input  dem: dem
    @input  talweg: ax_talweg
    @input  swath_bounds: ax_swaths_refaxis_bounds
    @input  swath_raster: ax_swaths_refaxis
    @input  axis_measure: ax_axis_measure
    @input  swath_elevation: ax_swath_elevation_npz

    @output metrics_talweg: metrics_talweg
    """

    elevation_raster = config.tileset().filename('dem')
    talweg_shapefile = config.filename('ax_talweg', axis=axis)
    swath_bounds = config.filename('ax_swaths_refaxis_bounds', axis=axis)
    swath_raster = config.tileset().filename('ax_swaths_refaxis', axis=axis)
    measure_raster = config.tileset().filename('ax_axis_measure', axis=axis)

    # swath => z0, slope

    defs = xr.open_dataset(swath_bounds)
    defs.load()
    defs = defs.sortby('measure')

    estimates = dict()

    with click.progressbar(defs['swath'].values) as iterator:
        for gid in iterator:

            filename = config.filename('ax_swath_elevation_npz',
                                       axis=axis,
                                       gid=gid)

            if os.path.exists(filename):

                data = np.load(filename, allow_pickle=True)
                z0 = data['z0_valley_floor']
                slope = data['slope_valley_floor']

                if not (np.isnan(z0) or np.isnan(slope)):
                    estimates[gid] = (slope, z0)

    # talweg => vertices (x, y, z, swath, axis m)

    swathid = np.array([])
    coordz = np.array([])
    coordm = np.array([])
    coords = np.array([])
    # coordxy = np.zeros((0, 2), dtype='float32')
    s0 = 0.0

    with fiona.open(talweg_shapefile) as fs:
        with click.progressbar(fs, length=len(fs)) as iterator:
            for feature in iterator:

                coordinates = np.array(feature['geometry']['coordinates'],
                                       dtype='float32')
                length = asShape(feature['geometry']).length

                with rio.open(swath_raster) as ds:

                    coordij = fct.worldtopixel(coordinates[:, :2],
                                               ds.transform)
                    pixels = list()
                    # segment_s = list()

                    # we must interpolate segments between vertices
                    # otherwise we may miss out swaths that fit between 2 vertices

                    for a, b in zip(coordij[:-1], coordij[1:]):
                        for i, j in rasterize_linestring(a, b):

                            pixels.append((i, j))
                            # segment_s.append(s0 + s*length)

                    segment_xy = fct.pixeltoworld(
                        np.array(pixels, dtype='int32'), ds.transform)

                    # coordxy = np.concatenate([
                    #     coordxy,
                    #     segment_xy
                    # ], axis=0)

                    # calculate s coordinate
                    segment_s = s0 + np.cumsum(
                        np.linalg.norm(
                            # segment_xy[:0:-1] - segment_xy[-2::-1],
                            segment_xy[1:] - segment_xy[:-1],
                            axis=1))

                    coords = np.concatenate([coords, [s0], segment_s])

                    s0 = segment_s[-1]

                    segment_swathid = np.array(list(ds.sample(segment_xy, 1)))
                    swathid = np.concatenate([swathid, segment_swathid[:, 0]],
                                             axis=0)

                with rio.open(measure_raster) as measure_ds:

                    segment_m = np.array(list(measure_ds.sample(segment_xy,
                                                                1)))
                    coordm = np.concatenate([coordm, segment_m[:, 0]], axis=0)

                with rio.open(elevation_raster) as ds:

                    segment_z = np.array(list(ds.sample(segment_xy, 1)))
                    coordz = np.concatenate([coordz, segment_z[:, 0]], axis=0)

    indices = sorted(enumerate(swathid), key=itemgetter(1))
    groups = itertools.groupby(indices, key=itemgetter(1))

    swids = list()
    measures = list()
    heights = list()
    talweg_slopes = list()
    talweg_lengths = list()
    talweg_elevations = list()
    # floodplain_slopes = list()

    for swid, group in groups:

        if swid == 0:
            continue

        elements = np.array([k for k, _ in group])

        talweg_length = np.max(coords[elements]) - np.min(coords[elements])

        Y = ztalweg = coordz[elements]
        X = np.column_stack([
            coords[elements],
            np.ones_like(elements),
        ])

        (talweg_slope,
         talweg_z0), sqerror_talweg, _, _ = np.linalg.lstsq(X, Y, rcond=None)

        if swid in estimates:

            floodplain_slope, floodplain_z0 = estimates[swid]
            zvalley = floodplain_slope * coordm[elements] + floodplain_z0

            height_median = np.median(ztalweg - zvalley)
            height_min = np.min(ztalweg - zvalley)

        else:

            # htalweg = -10.0
            height_median = height_min = np.nan
            floodplain_slope = np.nan

        swathm = defs['measure'].sel(swath=swid).values

        measures.append(swathm)
        swids.append(swid)
        heights.append((height_min, height_median))
        # floodplain_slopes.append(floodplain_slope)
        talweg_slopes.append(talweg_slope)
        talweg_lengths.append(talweg_length)
        talweg_elevations.append((np.min(ztalweg), np.median(ztalweg)))

    swids = np.array(swids, dtype='uint32')
    measures = np.array(measures, dtype='float32')

    heights = np.array(heights, dtype='float32')
    interpolated = np.isnan(heights[:, 0])
    heights = InterpolateMissingValues(measures, heights)

    talweg_lengths = np.array(talweg_lengths, dtype='float32')
    talweg_elevations = np.array(talweg_elevations, dtype='float32')
    talweg_slopes = -100 * np.array(talweg_slopes, dtype='float32')
    # floodplain_slopes = np.array(floodplain_slopes, dtype='float32')

    dataset = xr.Dataset(
        {
            'talweg_height_min': ('measure', heights[:, 0]),
            'talweg_height_median': ('measure', heights[:, 1]),
            'talweg_height_is_interpolated': ('measure', interpolated),
            'talweg_length': ('measure', talweg_lengths),
            'talweg_elevation_min': ('measure', talweg_elevations[:, 0]),
            'talweg_elevation_median': ('measure', talweg_elevations[:, 1]),
            'talweg_slope': ('measure', talweg_slopes)
            # 'floodplain_slope': ('measure', floodplain_slopes)
        },
        coords={
            'axis': axis,
            'measure': measures,
            'swath': ('measure', swids),
        })

    # Metadata

    set_metadata(dataset, 'metrics_talweg')

    return dataset
Exemple #35
0
def download(obj, provider, language, age, directory, encoding, single, force,
             hearing_impaired, min_score, verbose, path):
    """Download best subtitles.

    PATH can be an directory containing videos, a video file path or a video file name. It can be used multiple times.

    If an existing subtitle is detected (external or embedded) in the correct language, the download is skipped for
    the associated video.

    """
    # process parameters
    language = set(language)

    # scan videos
    videos = []
    ignored_videos = []
    errored_paths = []
    with click.progressbar(path,
                           label='Collecting videos',
                           item_show_func=lambda p: p or '') as bar:
        for p in bar:
            logger.debug('Collecting path %s', p)

            # non-existing
            if not os.path.exists(p):
                try:
                    video = Video.fromname(p)
                except:
                    logger.exception(
                        'Unexpected error while collecting non-existing path %s',
                        p)
                    errored_paths.append(p)
                    continue
                videos.append(video)
                continue

            # directories
            if os.path.isdir(p):
                try:
                    scanned_videos = scan_videos(p,
                                                 subtitles=not force,
                                                 embedded_subtitles=not force)
                except:
                    logger.exception(
                        'Unexpected error while collecting directory path %s',
                        p)
                    errored_paths.append(p)
                    continue
                for video in scanned_videos:
                    if check_video(video,
                                   languages=language,
                                   age=age,
                                   undefined=single):
                        videos.append(video)
                    else:
                        ignored_videos.append(video)
                continue

            # other inputs
            try:
                video = scan_video(p,
                                   subtitles=not force,
                                   embedded_subtitles=not force)
            except:
                logger.exception('Unexpected error while collecting path %s',
                                 p)
                errored_paths.append(p)
                continue
            if check_video(video,
                           languages=language,
                           age=age,
                           undefined=single):
                videos.append(video)
            else:
                ignored_videos.append(video)

    # output errored paths
    if verbose > 0:
        for p in errored_paths:
            click.secho('%s errored' % p, fg='red')

    # output ignored videos
    if verbose > 1:
        for video in ignored_videos:
            click.secho(
                '%s ignored - subtitles: %s / age: %d day%s' %
                (os.path.split(video.name)[1],
                 ', '.join(str(s) for s in video.subtitle_languages)
                 or 'none', video.age.days, 's' if video.age.days > 1 else ''),
                fg='yellow')

    # report collected videos
    click.echo('%s video%s collected / %s video%s ignored / %s error%s' % (
        click.style(
            str(len(videos)), bold=True, fg='green' if videos else None),
        's' if len(videos) > 1 else '',
        click.style(str(len(ignored_videos)),
                    bold=True,
                    fg='yellow' if ignored_videos else None),
        's' if len(ignored_videos) > 1 else '',
        click.style(str(len(errored_paths)),
                    bold=True,
                    fg='red' if errored_paths else None),
        's' if len(errored_paths) > 1 else '',
    ))

    # exit if no video collected
    if not videos:
        return

    # download best subtitles
    downloaded_subtitles = defaultdict(list)
    with ProviderPool(providers=provider,
                      provider_configs=obj['provider_configs']) as pool:
        with click.progressbar(
                videos,
                label='Downloading subtitles',
                item_show_func=lambda v: os.path.split(v.name)[1]
                if v is not None else '') as bar:
            for v in bar:
                subtitles = pool.download_best_subtitles(
                    pool.list_subtitles(v, language - v.subtitle_languages),
                    v,
                    language,
                    min_score=v.scores['hash'] * min_score / 100,
                    hearing_impaired=hearing_impaired,
                    only_one=single)
                downloaded_subtitles[v] = subtitles

    # save subtitles
    total_subtitles = 0
    for v, subtitles in downloaded_subtitles.items():
        saved_subtitles = save_subtitles(v,
                                         subtitles,
                                         single=single,
                                         directory=directory,
                                         encoding=encoding)
        total_subtitles += len(saved_subtitles)

        if verbose > 0:
            click.echo(
                '%s subtitle%s downloaded for %s' %
                (click.style(str(len(saved_subtitles)), bold=True), 's' if
                 len(saved_subtitles) > 1 else '', os.path.split(v.name)[1]))

        if verbose > 1:
            for s in saved_subtitles:
                matches = s.get_matches(v, hearing_impaired=hearing_impaired)
                score = compute_score(matches, v)

                # score color
                score_color = None
                if isinstance(v, Movie):
                    if score < v.scores['title']:
                        score_color = 'red'
                    elif score < v.scores['title'] + v.scores[
                            'year'] + v.scores['release_group']:
                        score_color = 'yellow'
                    else:
                        score_color = 'green'
                elif isinstance(v, Episode):
                    if score < v.scores['series'] + v.scores[
                            'season'] + v.scores['episode']:
                        score_color = 'red'
                    elif score < (v.scores['series'] + v.scores['season'] +
                                  v.scores['episode'] +
                                  v.scores['release_group']):
                        score_color = 'yellow'
                    else:
                        score_color = 'green'

                # scale score from 0 to 100 taking out preferences
                scaled_score = score
                if s.hearing_impaired == hearing_impaired:
                    scaled_score -= v.scores['hearing_impaired']
                scaled_score *= 100 / v.scores['hash']

                # echo some nice colored output
                click.echo(
                    '  - [{score}] {language} subtitle from {provider_name} (match on {matches})'
                    .format(score=click.style('{:5.1f}'.format(scaled_score),
                                              fg=score_color,
                                              bold=score >= v.scores['hash']),
                            language=s.language.name
                            if s.language.country is None else '%s (%s)' %
                            (s.language.name, s.language.country.name),
                            provider_name=s.provider_name,
                            matches=', '.join(
                                sorted(matches, key=v.scores.get,
                                       reverse=True))))

    if verbose == 0:
        click.echo('Downloaded %s subtitle%s' %
                   (click.style(str(total_subtitles), bold=True),
                    's' if total_subtitles > 1 else ''))
Exemple #36
0
    def verifyProm(self):
        # Reset the PROM
        self._resetCmd()

        if self._useVars:
            # Set the data bus
            self.DataWrBus.set(0xFFFFFFFF)
            # Set the block transfer size
            self.TranSize.set(0xFF)
        else:
            # Set the data bus
            self._rawWrite(offset=0x0,
                           data=0xFFFFFFFF,
                           tryCount=self._tryCount)  # Deprecated
            # Set the block transfer size
            self._rawWrite(offset=0x80, data=0xFF,
                           tryCount=self._tryCount)  # Deprecated

        # Setup the status bar
        with click.progressbar(
                length=self._mcs.size,
                label=click.style('Verifying PROM:', fg='green'),
        ) as bar:
            for i in range(self._mcs.size):
                if ((i & 0x1) == 0):
                    # Get the data and address from MCS file
                    addr = int(self._mcs.entry[i]
                               [0]) >> 1  # 16-bit word addressing at the PROM
                    data = int(self._mcs.entry[i][1]) & 0xFF
                    # Check for burst transfer
                    if ((i & 0x1FF) == 0):
                        # Throttle down printf rate
                        bar.update(0x1FF)

                        if self._useVars:
                            # Start a burst transfer
                            self.BurstTran.set(0x80000000 | addr)

                            # Get the data
                            dataArray = self.BurstData.get()

                        else:
                            # Start a burst transfer
                            self._rawWrite(
                                offset=0x84,
                                data=0x80000000 | addr,
                                tryCount=self._tryCount)  # Deprecated
                            # Get the data
                            dataArray = self._rawRead(
                                offset=0x400,
                                numWords=256,
                                tryCount=self._tryCount)  # Deprecated
                else:
                    # Get the data for MCS file
                    data |= (int(self._mcs.entry[i][1]) << 8)
                    # Get the prom data from data array
                    prom = dataArray[(i & 0x1FF) >> 1]
                    # Compare PROM to file
                    if (data != prom):
                        click.secho(
                            ("\nAddr = 0x%x: MCS = 0x%x != PROM = 0x%x" %
                             (addr, data, prom)),
                            fg='red')
                        raise surf.misc.McsException('verifyProm() Failed\n\n')
            # Close the status bar
            bar.update(self._mcs.size)
Exemple #37
0
 def start(self):
     with progressbar(self._unpacker.get_items(), label="Unpacking") as pb:
         for item in pb:
             self._unpacker.extract_item(item, self._dest_dir)
     return True
Exemple #38
0
def simulator(candles: Dict[str, Dict[str, Union[str, np.ndarray]]],
              hyperparameters=None) -> None:
    begin_time_track = time.time()
    key = f"{config['app']['considering_candles'][0][0]}-{config['app']['considering_candles'][0][1]}"
    first_candles_set = candles[key]['candles']
    length = len(first_candles_set)
    # to preset the array size for performance
    store.app.starting_time = first_candles_set[0][0]
    store.app.time = first_candles_set[0][0]

    # initiate strategies
    for r in router.routes:
        StrategyClass = jh.get_strategy_class(r.strategy_name)

        try:
            r.strategy = StrategyClass()
        except TypeError:
            raise exceptions.InvalidStrategy(
                "Looks like the structure of your strategy directory is incorrect. Make sure to include the strategy INSIDE the __init__.py file."
                "\nIf you need working examples, check out: https://github.com/jesse-ai/example-strategies"
            )
        except:
            raise

        r.strategy.name = r.strategy_name
        r.strategy.exchange = r.exchange
        r.strategy.symbol = r.symbol
        r.strategy.timeframe = r.timeframe

        # inject hyper parameters (used for optimize_mode)
        # convert DNS string into hyperparameters
        if r.dna and hyperparameters is None:
            hyperparameters = jh.dna_to_hp(r.strategy.hyperparameters(), r.dna)

        # inject hyperparameters sent within the optimize mode
        if hyperparameters is not None:
            r.strategy.hp = hyperparameters

        # init few objects that couldn't be initiated in Strategy __init__
        # it also injects hyperparameters into self.hp in case the route does not uses any DNAs
        r.strategy._init_objects()

        selectors.get_position(r.exchange, r.symbol).strategy = r.strategy

    # add initial balance
    save_daily_portfolio_balance()

    with click.progressbar(length=length,
                           label='Executing simulation...') as progressbar:
        for i in range(length):
            # update time
            store.app.time = first_candles_set[i][0] + 60_000

            # add candles
            for j in candles:
                short_candle = candles[j]['candles'][i]
                if i != 0:
                    previous_short_candle = candles[j]['candles'][i - 1]
                    short_candle = _get_fixed_jumped_candle(
                        previous_short_candle, short_candle)
                exchange = candles[j]['exchange']
                symbol = candles[j]['symbol']

                store.candles.add_candle(short_candle,
                                         exchange,
                                         symbol,
                                         '1m',
                                         with_execution=False,
                                         with_generation=False)

                # print short candle
                if jh.is_debuggable('shorter_period_candles'):
                    print_candle(short_candle, True, symbol)

                _simulate_price_change_effect(short_candle, exchange, symbol)

                # generate and add candles for bigger timeframes
                for timeframe in config['app']['considering_timeframes']:
                    # for 1m, no work is needed
                    if timeframe == '1m':
                        continue

                    count = jh.timeframe_to_one_minutes(timeframe)
                    until = count - ((i + 1) % count)

                    if (i + 1) % count == 0:
                        generated_candle = generate_candle_from_one_minutes(
                            timeframe,
                            candles[j]['candles'][(i - (count - 1)):(i + 1)])
                        store.candles.add_candle(generated_candle,
                                                 exchange,
                                                 symbol,
                                                 timeframe,
                                                 with_execution=False,
                                                 with_generation=False)

            # update progressbar
            if not jh.is_debugging() and not jh.should_execute_silently(
            ) and i % 60 == 0:
                progressbar.update(60)

            # now that all new generated candles are ready, execute
            for r in router.routes:
                count = jh.timeframe_to_one_minutes(r.timeframe)
                # 1m timeframe
                if r.timeframe == timeframes.MINUTE_1:
                    r.strategy._execute()
                elif (i + 1) % count == 0:
                    # print candle
                    if jh.is_debuggable('trading_candles'):
                        print_candle(
                            store.candles.get_current_candle(
                                r.exchange, r.symbol, r.timeframe), False,
                            r.symbol)
                    r.strategy._execute()

            # now check to see if there's any MARKET orders waiting to be executed
            store.orders.execute_pending_market_orders()

            if i != 0 and i % 1440 == 0:
                save_daily_portfolio_balance()

    if not jh.should_execute_silently():
        if jh.is_debuggable('trading_candles') or jh.is_debuggable(
                'shorter_period_candles'):
            print('\n')

        # print executed time for the backtest session
        finish_time_track = time.time()
        print('Executed backtest simulation in: ',
              f'{round(finish_time_track - begin_time_track, 2)} seconds')

    for r in router.routes:
        r.strategy._terminate()
        store.orders.execute_pending_market_orders()

    # now that backtest is finished, add finishing balance
    save_daily_portfolio_balance()
Exemple #39
0
 def progress(file):
     stat = os.stat(file.name)
     with click.progressbar(length=stat.st_size, label='Converting') as bar:
         for line in file.readlines():
             bar.update(len(line))
             yield line
Exemple #40
0
    def set_playlist(self, anime_url: str, verbose: bool):
        if not self.signed_in:
            click.echo("No user logged in. Use login command.")
            exit(1)
        if anime_url.isdecimal():
            anime_url = "https://www.anime-on-demand.de/anime/" + anime_url
        elif re.match("https://(www\.)?anime-on-demand\.de/anime/\d+",
                      anime_url) is None:
            raise AoDDownloaderException(
                "Given url does not match a playlist url")
        if verbose:
            click.echo("Matched playlist url")

        if not self.config.german and not self.config.japanese:
            if verbose:
                click.echo("No language selection found")
            japanese, german = self.config.setLanguages()
            if not german and not japanese:
                raise AoDDownloaderException(
                    "No language chosen. Please choose at least one.")
        else:
            german = self.config.german
            japanese = self.config.japanese

        response = self._validate_response(self.session.get(anime_url),
                                           return_obj='soup')
        streams = {}
        if german:
            if verbose:
                click.echo("Set german stream")
            streams["german"] = response.find(
                'input', {'title': 'Deutschen Stream starten'})
        if japanese:
            if verbose:
                click.echo("Set japanese stream")
            streams["japanese"] = response.find(
                'input',
                {'title': 'Japanischen Stream mit Untertiteln starten'})

        if (not streams.get("german")
                or not streams.get("german")['data-playlist']) and (
                    not streams.get("japanese")
                    or not streams.get("japanese")['data-playlist']):
            raise AoDDownloaderException(
                f"Could not determine stream for {anime_url}. Selected language may not be available"
            )

        playlist_data = []
        for stream in streams:
            playlist_url = f"https://www.anime-on-demand.de{streams[stream]['data-playlist']}"
            for episode in self._validate_response(
                    self.session.get(playlist_url),
                    return_obj='json').get('playlist'):
                if len(stream) > 1:
                    episode['description'] += f"_{stream[:3].upper()}"
                playlist_data.append(episode)
        with click.progressbar(playlist_data,
                               label="Parsing episodes",
                               show_percent=False,
                               show_eta=False,
                               show_pos=True) as pd:
            self.current_playlist = [
                self._parse_episode(episodeData) for episodeData in pd
            ]
Exemple #41
0
def Admin(ctx, usermod, username, passw, healthcheck, resetsessions, users, sessionsupd, source, format, apikey, role):

    """\b
       -------------------------------------------
       System Administration Command
       -------------------------------------------

       User authorization level: Administrator

       \b
       -------------------------------------------
       Options Usage:
       -------------------------------------------

       \b
       --usermod --username username --passw password
       Using this parameter the administrator can
       create a new user or update an existing
       user's password. If the username exists,
       then the passw argument will be the new
       password of the user. If not, then a new
       user with the username and password argu-
       ments will be created.
       There is an optional parameter --role, where
       you can declare the role of a new user.


       \b
       --healthcheck
       Using this parameter the administrator can
       execute a system health check. The function
       returns the system's health status ("OK" on
       succes / "failed" on failure).


       \b
       --resetsessions
       Using this parameter the administrator can
       reset all charging sessions as well as ini-
       tialize the default admin user (username:
       admin, password: petrol4ever). NOTE: The
       reset is final and after the execution of
       the function, all charging records will be
       deleted.


       \b
       --users username
       Using this parameter the administrator can
       see the information of a user.

       \b
       --sessionsupd --source filename
       Using this parameter the administrator can
       "upload" a CSV file with charging sessions
       data. The name of the file is stated in the
       --source argument and the file itself must
       be in multipart/from-data encoding.

       \b
       -------------------------------------------
       NOTE: You can call multible function by en-
       tering more than one option parameters. The
       options do not have to be in order.

       \b
       NOTE: If more than one options are entered,
       /healthcheck (if called) will be executed
       first and /resetsessions (if called) last.

       \b
       NOTE: Options resetsessions and sessionsupd
       cannot be executed in the same command.

       -------------------------------------------
       """

    upd = validation.validsessionsupd(sessionsupd, source)
    umod = validation.validusermod(usermod, username, passw)
    validation.valid(apikey)
    validation.nand(upd,resetsessions)


    #Health check

    if healthcheck:
        ctx.invoke(public_commands.healthcheck, format=format, apikey=apikey)


    #User info

    if users!=None:
        validation.user()
        click.echo()
        click.echo(click.style("Loading user's information...", fg='cyan'))
        f = open(paths.token_path, "r")
        token = f.readline()
        f.close()
        p = {'format': format, 'apikey': apikey}
        h = {'Authorization':'Token '+token}
        service = 'admin/users/'+ users
        response = requests.get(url=f'{paths.baseURL}/{service}/', headers=h, params=p)
        if response.status_code == 200:
            with click.progressbar([1, 2, 3, 4]) as bar:
                for x in bar:
                    time.sleep(0.4)
            if format == 'json':
                res = json.loads(response.text)
                click.echo()
                click.echo('User ID: '+ str(res['ID']))
                click.echo('Username: '******'Username']))
                click.echo('First name: '+ str(res['FirstName']))
                click.echo('Last name: '+ str(res['LastName']))
                click.echo('Role: '+ str(res['Role']))
                click.echo('Date joined: '+ str(res['DateJoined']))
                click.echo('Last login: '******'LastLogin']))
            else:
                f = open(paths.temporary_path, 'w')
                f.write(response.text)
                f.close()
                with open(paths.temporary_path) as temp:
                    res = csv.DictReader(temp)
                    for rows in res:
                        click.echo()
                        click.echo('User ID: '+ str(rows['ID']))
                        click.echo('Username: '******'Username']))
                        click.echo('First name: '+ str(rows['FirstName']))
                        click.echo('Last name: '+ str(rows['LastName']))
                        click.echo('Role: '+ str(rows['Role']))
                        click.echo('Date joined: '+ str(rows['DateJoined']))
                        click.echo('Last login: '******'LastLogin']))
                os.remove(paths.temporary_path)
        else:
            if format == 'json':
                click.echo()
                raise click.ClickException(response.text[2:(len(response.text))-2])
            else:
                click.echo()
                raise click.ClickException(response.text[4:(len(response.text))-2])


    #Password change / New user

    if umod:
        f = open(paths.token_path, "r")
        token = f.readline()
        f.close()
        firstname = str(username)
        lastname = str(username)
        email = str(username) + '@evmail.com'
        click.echo('Is this a new user? If so, you may want to enter first name, last name and email. If you'
                    +' do not enter these info they will be automatically filled in. If this is an existing'
                    +' user, then no more changes than password will be applied.')
        click.echo("Do you want to enter user's info? " + click.style('[y/n]', fg='cyan'), nl=False)
        c = click.getchar()
        if c=='y':
            click.echo()
            flag = True
            while flag:
                firstname = click.prompt('First name')
                flag = validation.namecheck(firstname)
            flag = True
            while flag:
                lastname = click.prompt('Last name')
                flag = validation.namecheck(lastname)
            email = click.prompt('E-mail')
        click.echo()
        click.echo()
        click.echo(click.style("Processing...", fg='cyan'))
        p = {'format': format,  'role': role, 'firstname': firstname, 'lastname': lastname, 'email': email, 'apikey': apikey}
        h = {'Authorization':'Token '+token}
        service = 'admin/usermod/'+ str(username)+ '/'+ str(passw)
        response = requests.post(url=f'{paths.baseURL}/{service}/', headers=h, params=p)
        if response.status_code != 401:
            if format == 'json':
                res = json.loads(response.text)
                with click.progressbar([1, 2, 3]) as bar:
                    for x in bar:
                        time.sleep(0.5)
                if response.status_code == 200:
                    click.echo()
                    click.echo("Usermod status: " + click.style(res['status'], fg='green'))
                else:
                    click.echo()
                    click.echo("Usermod status: " + click.style(res['status'], fg='red'))
            else:
                f = open(paths.temporary_path, 'w')
                f.write(response.text)
                f.close()
                with open(paths.temporary_path) as temp:
                    res = csv.DictReader(temp)
                    for rows in res: 
                        if response.status_code == 200:
                            click.echo()
                            click.echo("Status: " + click.style(rows['status'], fg='green'))
                        else:
                            click.echo()
                            click.echo("Status: " + click.style(rows['status'], fg='red'))
                os.remove(paths.temporary_path)
        else:
            if format == 'json':
                click.echo()
                raise click.ClickException(response.text[2:(len(response.text))-2])
            else:
                click.echo()
                raise click.ClickException(response.text[4:(len(response.text))-2])


    #Sessions update

    if upd:
        click.echo()
        click.echo(click.style("Uploading CSV file...", fg='cyan'))
        ff = open(paths.token_path, "r")
        token = ff.readline()
        ff.close()
        p = {'format': format, 'apikey': apikey}
        h = {'Authorization':'Token '+token}
        f = {'file': open(source,'r')}
        service = 'admin/system/sessionsupd'
        response = requests.post(url=f'{paths.baseURL}/{service}/', files=f, headers=h, params=p)
        if response.status_code == 200:
            if format == 'json':
                res = json.loads(response.text)
                with click.progressbar([1, 2, 3, 4, 5]) as bar:
                    for x in bar:
                        time.sleep(0.3)
                click.echo()
                click.echo('Sessions in uploaded file: ' + str(res['SessionsInUploadedFile']))
                click.echo('Sessions imported: '+ str(res['SessionsImported']))
                click.echo('Total sessions in database: '+ str(res['TotalSessionsInDatabase']))
            else:
                f = open(paths.temporary_path, 'w')
                f.write(response.text)
                f.close()
                with open(paths.temporary_path) as temp:
                    res = csv.DictReader(temp)
                    for rows in res:
                        click.echo()
                        click.echo('Sessions in uploaded file: '+ str(rows['SessionsInUploadedFile']))
                        click.echo('Sessions imported: '+ str(rows['SessionsImported']))
                        click.echo('Total sessions in database: '+ str(rows['TotalSessionsInDatabase']))
                os.remove(paths.temporary_path)
        else:
            click.echo()
            if response.status_code == 401:
                if format == 'json':
                    click.echo()
                    raise click.ClickException('Error: Update ' + response.text[2:(len(response.text))-2])
                else:
                    click.echo()
                    raise click.ClickException('Error: Update ' + response.text[4:(len(response.text))-2])
            else:
               if format == 'json':
                    click.echo()
                    raise click.ClickException(response.text[2:(len(response.text))-2])
               else:
                    click.echo()
                    raise click.ClickException(response.text[4:(len(response.text))-2]) 


    #Reset sessions

    if resetsessions:
        ctx.invoke(public_commands.resetsessions, format=format, apikey=apikey)
Exemple #42
0
def export(
    ctx,
    cli_obj,
    db,
    photos_library,
    keyword,
    person,
    album,
    uuid,
    title,
    no_title,
    description,
    no_description,
    uti,
    ignore_case,
    edited,
    external_edit,
    favorite,
    not_favorite,
    hidden,
    not_hidden,
    shared,
    not_shared,
    verbose,
    overwrite,
    export_by_date,
    export_edited,
    export_bursts,
    export_live,
    original_name,
    sidecar,
    only_photos,
    only_movies,
    burst,
    not_burst,
    live,
    not_live,
    download_missing,
    dest,
):
    """ Export photos from the Photos database.
        Export path DEST is required.
        Optionally, query the Photos database using 1 or more search options; 
        if more than one option is provided, they are treated as "AND" 
        (e.g. search for photos matching all options).
        If no query options are provided, all photos will be exported.
    """

    if not os.path.isdir(dest):
        sys.exit("DEST must be valid path")

    # sanity check input args
    if favorite and not_favorite:
        # can't search for both favorite and notfavorite
        click.echo(cli.commands["export"].get_help(ctx))
        return
    elif hidden and not_hidden:
        # can't search for both hidden and nothidden
        click.echo(cli.commands["export"].get_help(ctx))
        return
    elif title and no_title:
        # can't search for both title and no_title
        click.echo(cli.commands["export"].get_help(ctx))
        return
    elif description and no_description:
        # can't search for both description and no_description
        click.echo(cli.commands["export"].get_help(ctx))
        return
    elif only_photos and only_movies:
        # can't have only photos and only movies
        click.echo(cli.commands["export"].get_help(ctx))
        return
    elif burst and not_burst:
        # can't search for both burst and not_burst
        click.echo(cli.commands["export"].get_help(ctx))
        return
    elif live and not_live:
        # can't search for both live and not_live
        click.echo(cli.commands["export"].get_help(ctx))
        return

    isphoto = ismovie = True  # default searches for everything
    if only_movies:
        isphoto = False
    if only_photos:
        ismovie = False

    db = get_photos_db(*photos_library, db, cli_obj.db)
    if db is None:
        click.echo(cli.commands["export"].get_help(ctx))
        click.echo("\n\nLocated the following Photos library databases: ")
        _list_libraries()
        return

    photos = _query(
        db,
        keyword,
        person,
        album,
        uuid,
        title,
        no_title,
        description,
        no_description,
        ignore_case,
        edited,
        external_edit,
        favorite,
        not_favorite,
        hidden,
        not_hidden,
        None,  # missing -- won't export these but will warn user
        None,  # not-missing
        shared,
        not_shared,
        isphoto,
        ismovie,
        uti,
        burst,
        not_burst,
        live,
        not_live,
        False,  # cloudasset
        False,  # not_cloudasset
        False,  # incloud
        False,  # not_incloud
    )

    if photos:
        if export_bursts:
            # add the burst_photos to the export set
            photos_burst = [p for p in photos if p.burst]
            for burst in photos_burst:
                burst_set = [p for p in burst.burst_photos if not p.ismissing]
                photos.extend(burst_set)

        num_photos = len(photos)
        photo_str = "photos" if num_photos > 1 else "photo"
        click.echo(f"Exporting {num_photos} {photo_str} to {dest}...")
        if not verbose:
            # show progress bar
            with click.progressbar(photos) as bar:
                for p in bar:
                    export_photo(
                        p,
                        dest,
                        verbose,
                        export_by_date,
                        sidecar,
                        overwrite,
                        export_edited,
                        original_name,
                        export_live,
                        download_missing,
                    )
        else:
            for p in photos:
                export_path = export_photo(
                    p,
                    dest,
                    verbose,
                    export_by_date,
                    sidecar,
                    overwrite,
                    export_edited,
                    original_name,
                    export_live,
                    download_missing,
                )
                if export_path:
                    click.echo(f"Exported {p.filename} to {export_path}")
                else:
                    click.echo(f"Did not export missing file {p.filename}")
    else:
        click.echo("Did not find any photos to export")
Exemple #43
0
def prepare(
    data,
    layout,
    recipe,
    output,
    plotting,
    sparse,
    overwrite,
    set_obs_names,
    set_var_names,
    run_qc,
    make_obs_names_unique,
    make_var_names_unique,
):
    """Preprocesses data for use with cellxgene.

    This tool runs a series of scanpy routines for preparing a dataset
    for use with cellxgene. It loads data from different formats
    (h5ad, loom, or a 10x directory), runs dimensionality reduction,
    computes nearest neighbors, computes a layout, performs clustering,
    and saves the results. Includes additional options for naming
    annotations, ensuring sparsity, and plotting results."""

    # collect slow imports here to make CLI startup more responsive
    click.echo("[cellxgene] Starting CLI...")
    import matplotlib

    matplotlib.use("Agg")
    import scanpy as sc

    # scanpy settings
    sc.settings.verbosity = 0
    sc.settings.autosave = True

    # check args
    if sparse and not recipe == "none":
        raise click.UsageError("Cannot use a recipe when forcing sparsity")

    output = expanduser(output)

    if not output:
        click.echo(
            "Warning: No file will be saved, to save the results of cellxgene prepare include "
            "--output <filename> to save output to a new file")
    if isfile(output) and not overwrite:
        raise click.UsageError(
            f"Cannot overwrite existing file {output}, try using the flag --overwrite"
        )

    def load_data(data):
        if isfile(data):
            name, extension = splitext(data)
            if extension == ".h5ad":
                adata = sc.read_h5ad(data)
            elif extension == ".loom":
                adata = sc.read_loom(data)
            else:
                raise click.FileError(
                    data,
                    hint="does not have a valid extension [.h5ad | .loom]")
        elif isdir(data):
            if not data.endswith(sep):
                data += sep
            adata = sc.read_10x_mtx(data)
        else:
            raise click.FileError(data, hint="not a valid file or path")

        if not set_obs_names == "":
            if set_obs_names not in adata.obs_keys():
                raise click.UsageError(
                    f"obs {set_obs_names} not found, options are: {adata.obs_keys()}"
                )
            adata.obs_names = adata.obs[set_obs_names]
        if not set_var_names == "":
            if set_var_names not in adata.var_keys():
                raise click.UsageError(
                    f"var {set_var_names} not found, options are: {adata.var_keys()}"
                )
            adata.var_names = adata.var[set_var_names]
        if make_obs_names_unique:
            adata.obs_names_make_unique()
        if make_var_names_unique:
            adata.var_names_make_unique()
        if not adata._obs.index.is_unique:
            click.echo("Warning: obs index is not unique")
        if not adata._var.index.is_unique:
            click.echo("Warning: var index is not unique")
        return adata

    def calculate_qc_metrics(adata):
        if run_qc:
            sc.pp.calculate_qc_metrics(adata, inplace=True)
        return adata

    def make_sparse(adata):
        if (type(adata.X) is ndarray) and sparse:
            adata.X = csc_matrix(adata.X)

    def run_recipe(adata):
        if recipe == "seurat":
            sc.pp.recipe_seurat(adata)
        elif recipe == "zheng17":
            sc.pp.recipe_zheng17(adata)
        else:
            sc.pp.filter_cells(adata, min_genes=5)
            sc.pp.filter_genes(adata, min_cells=25)
            if sparse:
                sc.pp.scale(adata, zero_center=False)
            else:
                sc.pp.scale(adata)

    def run_pca(adata):
        if sparse:
            sc.pp.pca(adata, svd_solver="arpack", zero_center=False)
        else:
            sc.pp.pca(adata, svd_solver="arpack")

    def run_neighbors(adata):
        sc.pp.neighbors(adata)

    def run_louvain(adata):
        try:
            sc.tl.louvain(adata)
        except ModuleNotFoundError:
            click.echo(
                "\nWarning: louvain module is not installed, no clusters will be calculated. "
                "To fix this please install cellxgene with the optional feature louvain enabled: "
                "`pip install cellxgene[louvain]`")

    def run_layout(adata):
        if len(unique(adata.obs["louvain"].values)) < 10:
            palette = "tab10"
        else:
            palette = "tab20"

        if "umap" in layout:
            sc.tl.umap(adata)
            if plotting:
                sc.pl.umap(adata,
                           color="louvain",
                           palette=palette,
                           save="_louvain")

        if "tsne" in layout:
            sc.tl.tsne(adata)
            if plotting:
                sc.pl.tsne(adata,
                           color="louvain",
                           palette=palette,
                           save="_louvain")

    def show_step(item):
        if run_qc:
            qc_name = "Calculating QC metrics"
        else:
            qc_name = "Skipping QC"
        names = {
            "calculate_qc_metrics": qc_name,
            "make_sparse": "Ensuring sparsity",
            "run_recipe": f'Running preprocessing recipe "{recipe}"',
            "run_pca": "Running PCA",
            "run_neighbors": "Calculating neighbors",
            "run_louvain": "Calculating clusters",
            "run_layout": "Computing layout",
        }
        if item is not None:
            return names[item.__name__]

    steps = [
        calculate_qc_metrics, make_sparse, run_recipe, run_pca, run_neighbors,
        run_louvain, run_layout
    ]

    click.echo(f"[cellxgene] Loading data from {data}, please wait...")
    adata = load_data(data)

    click.echo("[cellxgene] Beginning preprocessing...")
    with click.progressbar(steps,
                           label="[cellxgene] Progress",
                           show_eta=False,
                           item_show_func=show_step) as bar:
        for step in bar:
            step(adata)

    # saving
    if not output == "":
        click.echo(f"[cellxgene] Saving results to {output}...")
        adata.write(output)

    click.echo("[cellxgene] Success!")
Exemple #44
0
    def writeProm(self):
        # Reset the PROM
        self._resetCmd()
        # Create a burst data array
        dataArray = [0] * 256

        # Set the block transfer size
        if self._useVars:
            self.TranSize.set(0xFF)
        else:
            self._rawWrite(0x80, 0xFF, tryCount=self._tryCount)  # Deprecated

        # Setup the status bar
        with click.progressbar(
                length=self._mcs.size,
                label=click.style('Writing PROM:  ', fg='green'),
        ) as bar:
            for i in range(self._mcs.size):
                if ((i & 0x1) == 0):
                    # Check for first byte of burst transfer
                    if ((i & 0x1FF) == 0):
                        # Throttle down printf rate
                        bar.update(0x1FF)
                        # Get the start bursting address
                        addr = int(self._mcs.entry[i][0]
                                   ) >> 1  # 16-bit word addressing at the PROM
                        # Reset the counter
                        cnt = 0
                    # Get the data from MCS file
                    dataArray[cnt] = int(self._mcs.entry[i][1]) & 0xFF
                else:
                    # Get the data from MCS file
                    dataArray[cnt] |= (int(self._mcs.entry[i][1]) << 8)
                    cnt += 1
                    # Check for the last byte
                    if (cnt == 256):

                        if self._useVars:
                            # Write burst data
                            self.BurstData.set(dataArray)
                            # Start a burst transfer
                            self.BurstTran.set(0x7FFFFFFF & addr)

                        else:
                            # Write burst data
                            self._rawWrite(
                                offset=0x400,
                                data=dataArray,
                                tryCount=self._tryCount)  # Deprecated
                            # Start a burst transfer
                            self._rawWrite(
                                offset=0x84,
                                data=0x7FFFFFFF & addr,
                                tryCount=self._tryCount)  # Deprecated

            # Check for leftover data
            if (cnt != 256):
                # Fill the rest of the data array with ones
                for i in range(cnt, 256):
                    dataArray[i] = 0xFFFF

                if self._useVars:
                    # Write burst data
                    self.BurstData.set(dataArray)
                    # Start a burst transfer
                    self.BurstTran.set(0x7FFFFFFF & addr)

                else:
                    # Write burst data
                    self._rawWrite(offset=0x400,
                                   data=dataArray,
                                   tryCount=self._tryCount)  # Deprecated
                    # Start a burst transfer
                    self._rawWrite(offset=0x84,
                                   data=0x7FFFFFFF & addr,
                                   tryCount=self._tryCount)  # Deprecated

            # Close the status bar
            bar.update(self._mcs.size)
Exemple #45
0
def site_add(config, name, shape):
    if not config.project_config:
        raise click.ClickException('You must be in a CumulusCI configured local git repository.')
   
    verify_overwrite(config)

    # Initialize payload
    payload = {
        'app': {},
        'source_blob': {
            'url': 'https://github.com/SFDO-Tooling/MetaCI/tarball/master/',
       },
    }
    env = {} 

    # Heroku API Token
    try:
        token = subprocess.check_output(['heroku','auth:token']).strip()
    except:
        click.echo()
        click.echo(click.style('# Heroku API Token', bold=True, fg='blue'))
        click.echo('Enter your Heroku API Token.  If you do not have a token, go to the Account page in Heroku and use the API Token section: https://dashboard.heroku.com/account')
        click.echo(click.style(
            'NOTE: For security purposes, your input will be hidden.  Paste your API Token and hit Enter to continue.',
            fg='yellow',
        ))
        token = click.prompt('API Token', hide_input=True)

    heroku_api = prompt_heroku_token()

    # App Name
    if not name:
        click.echo()
        click.echo(click.style('# Heroku App Name', bold=True, fg='blue'))
        click.echo('Specify the name of the Heroku app you want to create.')
        payload['app']['name'] = click.prompt('App Name')
    else:
        payload['app']['name'] = name

    app_shape, num_workers = prompt_app_shape(shape)
    # Hirefire Token
    if app_shape == 'prod':
        click.echo()
        click.echo(click.style('# Hirefire Token', bold=True, fg='blue'))
        click.echo('The prod app shape requires the use of Hirefire.io to scale the build worker dynos.  You will need to have an account on Hirefire.io and get the API token from your account.')
        env['HIREFIRE_TOKEN'] = click.prompt('Hirefire API Token')

    # Salesforce DX
    click.echo()
    click.echo(click.style('# Salesforce DX Configuration', bold=True, fg='blue'))
    click.echo('The following prompts collect information from your local Salesforce DX configuration to use to configure MetaCI to use sfdx')

    # Salesforce DX Hub Key
    click.echo()
    click.echo('MetaCI uses JWT to connect to your Salesforce DX devhub.  Please enter the path to your local private key file.  If you have not set up JWT for your devhub, refer to the documentation: https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_auth_jwt_flow.htm')
    sfdx_private_key = click.prompt(
        'Path to private key',
        type=click.Path(readable=True, dir_okay=False),
        default=os.path.expanduser('~/.ssh/sfdx_server.key'),
    )
    with open(sfdx_private_key, 'r') as f:
        env['SFDX_HUB_KEY'] = f.read()

    # Salesforce DX Client ID
    click.echo()
    click.echo('Enter the Connected App Client ID you used for the JWT authentication flow to the Salesforce DX devhub.')
    click.echo(click.style(
        'NOTE: For security purposes, your input will be hidden.  Paste your Client ID and hit Enter to continue.',
        fg='yellow',
    ))
    env['SFDX_CLIENT_ID'] = click.prompt('Client ID', hide_input=True)

    # Salesforce DX Username
    try:
        devhub = subprocess.check_output(['sfdx','force:config:get', 'defaultdevhubusername', '--json']).strip()
        devhub = json.loads(devhub)
        devhub = devhub['result'][0]['value']
    except:
        devhub = None
   
    if devhub:
        env['SFDX_HUB_USERNAME'] = devhub
    else: 
        click.echo()
        click.echo('Enter the username MetaCI should use for JWT authentication to the Salesforce DX devhub.')
        env['SFDX_HUB_USERNAME'] = click.prompt('Username')

    # Get connected app info from CumulusCI keychain
    connected_app = config.keychain.get_service('connected_app')
    env['CONNECTED_APP_CALLBACK_URL'] = connected_app.callback_url
    env['CONNECTED_APP_CLIENT_ID'] = connected_app.client_id
    env['CONNECTED_APP_CLIENT_SECRET'] = connected_app.client_secret

    # Set the site url
    env['SITE_URL'] = 'https://{}.herokuapp.com'.format(payload['app']['name'])

    # Get Github credentials from CumulusCI keychain
    github = config.keychain.get_service('github')
    env['GITHUB_USERNAME'] = github.username
    env['GITHUB_PASSWORD'] = github.password
    env['GITHUB_WEBHOOK_BASE_URL'] = '{SITE_URL}/webhook/github'.format(**env)
    env['FROM_EMAIL'] = github.email

    # Prepare the payload 
    payload['overrides'] = {
        'env': env,
    } 

    # Create the new Heroku App
    headers = {
        'Accept': 'application/vnd.heroku+json; version=3',
        'Authorization': 'Bearer {}'.format(token),
    }
    resp = requests.post('https://api.heroku.com/app-setups', json=payload, headers=headers)
    if resp.status_code != 202:
        raise click.ClickException('Failed to create Heroku App.  Reponse code [{}]: {}'.format(resp.status_code, resp.json()))
    app_setup = resp.json()
    
    # Check status
    status = app_setup['status']
    click.echo()
    click.echo('Status: {}'.format(status))
    click.echo(click.style('Creating app:', fg='yellow'))
    i = 1
    build_started = False
    with click.progressbar(length=200) as bar:
        while status in ['pending']:
            check_resp = requests.get(
                'https://api.heroku.com/app-setups/{id}'.format(**app_setup),
                headers=headers,
            )
            if check_resp.status_code != 200:
                raise click.ClickException('Failed to check status of app creation.  Reponse code [{}]: {}'.format(check_resp.status_code, check_resp.json()))
            check_data = check_resp.json()

            # Stream the build log output once the build starts
            if not build_started and check_data['build'] != None:
                build_started = True
                bar.update(100)
                click.echo()
                click.echo()
                click.echo(click.style('Build {id} Started:'.format(**check_data['build']), fg='yellow'))
                build_stream = requests.get(check_data['build']['output_stream_url'], stream=True, headers=headers)
                for chunk in build_stream.iter_content():
                    click.echo(chunk, nl=False)

            # If app-setup is still pending, sleep and then poll again
            if check_data['status'] != 'pending':
                bar.update(100)
                break
            if i < 98:
                i += 1
                bar.update(i)
            time.sleep(2)

    click.echo()
    # Success
    if check_data['status'] == 'succeeded':
        click.echo(click.style('Heroku app creation succeeded!', fg='green', bold=True))
        render_recursive(check_data)
    # Failed
    elif check_data['status'] == 'failed':
        click.echo(click.style('Heroku app creation failed', fg='red', bold=True))
        render_recursive(check_data)
        if check_data['build']:
            click.echo()
            click.echo('Build Info:')
            resp = requests.get('https://api.heroku.com/builds/{id}'.format(**check_data['build']), headers=headers)
            render_recursive(resp.json())
        return
    else:
        raise click.ClickException('Received an unknown status from the Heroku app-setups API.  Full API response: {}'.format(check_data))

    heroku_app = heroku_api.app(check_data['app']['id'])

    # Apply the app shape
    click.echo()
    click.echo(click.style('# Applying App Shape', bold=True, fg='blue'))
    set_app_shape(heroku_app, app_shape, num_workers)

    click.echo()
    click.echo(click.style('# Create Admin User', bold=True, fg='blue'))
    click.echo('You will need an initial admin user to start configuring your new MetaCI site.  Enter a password for the admin user and one will be created for you')
    password = click.prompt('Password', hide_input=True, confirmation_prompt=True)
    

    # Create the admin user
    click.echo()
    click.echo(click.style('Creating admin user:'******'yellow'))
    command = 'python manage.py autoadminuser {FROM_EMAIL}'.format(**env)
    admin_output, dyno = heroku_app.run_command(command, printout=True, env={'ADMINUSER_PASS': password})
    click.echo(admin_output.splitlines()[-1:])
    
    # Create the admin user's API token
    click.echo()
    click.echo(click.style('Generating API token for admin user:'******'yellow'))
    command = 'python manage.py usertoken admin'
    token_output, dyno = heroku_app.run_command(command)
    for line in token_output.splitlines():
        if line.startswith('Token: '):
            api_token = line.replace('Token: ', '', 1).strip()

    # Create the scheduled jobs needed by MetaCI
    click.echo()
    click.echo(click.style('Creating admin user:'******'yellow'))
    command = 'python manage.py metaci_scheduled_jobs'
    admin_output, dyno = heroku_app.run_command(command, printout=True, env={'ADMINUSER_PASS': password})
    click.echo(admin_output.splitlines()[-1:])
    
    if api_token:
        service = ServiceConfig({
            'url': env['SITE_URL'],
            'token': api_token,
            'app_name': check_data['app']['name'],
        })
        config.keychain.set_service('metaci', service)
        click.echo(click.style('Successfully connected metaci to the new site at {SITE_URL}'.format(**env), fg='green', bold=True))
    else:
        click.echo(click.style('Failed to create API connection to new metaci site at {SITE_URL}.  Try metaci site connect'.format(**env), fg='red', bold=True))