Exemple #1
0
def init(ctx, dev, directory):
    "Initialize new configuration directory."
    from sentry.runner.settings import discover_configs, generate_settings
    if directory is not None:
        os.environ['SENTRY_CONF'] = directory

    directory, py, yaml = discover_configs()

    # In this case, the config is pointing directly to a file, so we
    # must maintain old behavior, and just abort
    if yaml is None and os.path.isfile(py):
        # TODO: Link to docs explaining about new behavior of SENTRY_CONF?
        raise click.ClickException("Found legacy '%s' file, so aborting." % click.format_filename(py))

    if yaml is None:
        raise click.ClickException("DIRECTORY must not be a file.")

    if directory and not os.path.exists(directory):
        os.makedirs(directory)

    py_contents, yaml_contents = generate_settings(dev)

    if os.path.isfile(yaml):
        click.confirm("File already exists at '%s', overwrite?" % click.format_filename(yaml), abort=True)

    with click.open_file(yaml, 'w') as fp:
        fp.write(yaml_contents)

    if os.path.isfile(py):
        click.confirm("File already exists at '%s', overwrite?" % click.format_filename(py), abort=True)

    with click.open_file(py, 'w') as fp:
        fp.write(py_contents)
def read(value, split=False):
    '''Get the value of an option interpreting as a file implicitly or
    explicitly and falling back to the value if not explicitly specified.
    If the value is '@name', then a file must exist with name and the returned
    value will be the contents of that file. If the value is '@-' or '-', then
    stdin will be read and returned as the value. Finally, if a file exists
    with the provided value, that file will be read. Otherwise, the value
    will be returned.
    '''
    v = str(value)
    retval = value
    if v[0] == '@' or v == '-':
        fname = '-' if v == '-' else v[1:]
        try:
            with click.open_file(fname) as fp:
                if not fp.isatty():
                    retval = fp.read()
                else:
                    retval = None
        # @todo better to leave as IOError and let caller handle it
        # to better report in context of call (e.g. the option/type)
        except IOError as ioe:
            # if explicit and problems, raise
            if v[0] == '@':
                raise click.ClickException(str(ioe))
    elif path.exists(v) and path.isfile(v):
        with click.open_file(v) as fp:
            retval = fp.read()
    if retval and split and type(retval) != tuple:
        retval = _split(retval.strip())
    return retval
Exemple #3
0
def project_config(context):
    config = {}
    config_path = join(context.module_name, CONFIG_FILE)

    # initialise config with defaults
    if not exists(config_path):
        config = DEFAULTS.copy()

        with click.open_file(config_path, 'w') as f:
            config_yaml = yaml.dump(config, default_flow_style=False)
            f.write(config_yaml)

    config = yaml.safe_load(click.open_file(config_path))
    return config or {}
Exemple #4
0
 def idzip_compression(path, output):
     '''Compress a file using  idzip, a gzip-compatible format with random access support.
     '''
     if output is None:
         output = '-'
     with click.open_file(output, mode='wb') as outfh:
         writer = _compression.GzipFile(fileobj=outfh, mode='wb')
         with click.open_file(path, 'rb') as infh:
             buffer_size = _compression.WRITE_BUFFER_SIZE
             chunk = infh.read(buffer_size)
             while chunk:
                 writer.write(chunk)
                 chunk = infh.read(buffer_size)
         writer.close()
Exemple #5
0
def normalize_feature_inputs(ctx, param, value):
    """Click callback that normalizes feature input values.

    Returns a generator over features from the input value.

    Parameters
    ----------
    ctx: a Click context
    param: the name of the argument or option
    value: object
        The value argument may be one of the following:

        1. A list of paths to files containing GeoJSON feature
           collections or feature sequences.
        2. A list of string-encoded coordinate pairs of the form
           "[lng, lat]", or "lng, lat", or "lng lat".

        If no value is provided, features will be read from stdin.
    """
    for feature_like in value or ('-',):
        try:
            with click.open_file(feature_like) as src:
                for feature in iter_features(iter(src)):
                    yield feature
        except IOError:
            coords = list(coords_from_query(feature_like))
            yield {
                'type': 'Feature',
                'properties': {},
                'geometry': {
                    'type': 'Point',
                    'coordinates': coords}}
 def go(check, stash_root, groups_file):
     """
     Read the stash located at stash-root and write the groups data to
     a file at groups-file. Only the fields "group_id" and the corresponding
     list of member ids will be written.
     """
     print("stash-root: {}".format(stash_root))
     print("groups-file: {}".format(groups_file))
     print("check: {}".format(check))
     with click.open_file(groups_file, "w") as ofp:
         g = GroupsData.from_stash(stash_root, fields=["id"])
         g.write_member_ids(groups_file)
     if check:
         g_gids = g.get_gids()
         g2 = GroupsData.from_file(groups_file)
         g2_gids = g2.get_gids()
         len_gids = len(g_gids)
         len_gids2 = len(g2_gids)
         if len_gids == len_gids2 and sorted(g_gids) == sorted(g2_gids):
             print("ok: checked {} gids in stash and groups file".format(
                 len(g_gids)))
         else:
             len_gids = len(g_gids)
             len_gids2 = len(g2_gids)
             print("mismatch: stash and groups file have different gids!")
             sys.exit(1)
def gitignore_creator(output, template_names):

    fig = pyfiglet.Figlet()
    logger = logging.getLogger('gitignore_creator')
    
    output_buffer = []
    for template_name in template_names:
        logger.debug(template_name)

        header_text = fig.renderText(template_name)
        # comment out the figlet text
        header_text = '\n'.join(['# {}'.format(line) for line in header_text.split('\n')]) 
        output_buffer.append(header_text)
        
        response = requests.get('{}{}.gitignore'.format(GITIGNORE_REPO_URL, template_name))
        if response.status_code != 200:
            response = requests.get('{}{}.gitignore'.format(GITIGNORE_REPO_GLOBAL_URL, template_name))
            if response.status_code != 200:
                logger.error('Could not find {} template in gitignore repo'.format(template_name))
                output_buffer.append('# Failed to find {} in gitignore repo'.format(template_name))
                continue

        output_buffer.append(response.text)

    output_string = '\n'.join(output_buffer)
    logger.debug(output_string)
        
    with click.open_file(output, 'w') as output_file:
        output_file.write(output_string)
Exemple #8
0
def normalize_feature_inputs(ctx, param, features_like):
    """ Click callback which accepts the following values:
    * Path to file(s), each containing single FeatureCollection or Feature
    * Coordinate pair(s) of the form "[0, 0]" or "0, 0" or "0 0"
    * if not specified or '-', process STDIN stream containing
        - line-delimited features
        - ASCII Record Separator (0x1e) delimited features
        - FeatureCollection or Feature object
    and yields GeoJSON Features.
    """
    if len(features_like) == 0:
        features_like = ('-',)

    for flike in features_like:
        try:
            # It's a file/stream with GeoJSON
            src = iter(click.open_file(flike, mode='r'))
            for feature in iter_features(src):
                yield feature
        except IOError:
            # It's a coordinate string
            coords = list(coords_from_query(flike))
            feature = {
                'type': 'Feature',
                'properties': {},
                'geometry': {
                    'type': 'Point',
                    'coordinates': coords}}
            yield feature
Exemple #9
0
def upload_zipfile(directory, filename, token, pgb_api):
    pgb_url = pgb_api + "{path}?auth_token={token}"
    app_id = get_pgb_config(directory, 'app_id')
    if app_id:
        url = pgb_url.format(
            path='apps/{app_id}'.format(app_id=app_id),
            token=token,
        )
        method = 'put'
    else:
        url = pgb_url.format(
            path='apps',
            token=token,
        )
        method = 'post'

    response = requests.request(
        method,
        url,
        data={
            'data': json.dumps({'create_method': 'file'}),
        },
        files={
            'file': click.open_file(filename, 'rb'),
        }
    )
    if check_error(response):
        return
    result = response.json()
    if not app_id:
        set_pgb_config(directory, 'app_id', result['id'])
    click.echo("URL: {share_url}".format(share_url=result['share_url']))
    error = result.get('error', None)
    if error:
        click.echo("PGB Warning: {error}".format(error=error))
Exemple #10
0
def preview(protocol_name):
    '''Preview the Autoprotocol output of a run (without submitting or analyzing)'''
    with click.open_file('manifest.json', 'r') as f:
        try:
            manifest = json.loads(f.read())
        except ValueError:
            click.echo("Error: Your manifest.json file is improperly formatted. "
                       "Please double check your brackets and commas!")
            return
    try:
        p = next(p for p in manifest['protocols'] if p['name'] == protocol_name)
    except StopIteration:
        click.echo("Error: The protocol name '%s' does not match any protocols "
                   "that can be previewed from within this directory.  \nCheck "
                   "either your spelling or your manifest.json file and try "
                   "again." % protocol_name)
        return
    try:
        command = p['command_string']
    except KeyError:
        click.echo("Error: Your manifest.json file does not have a \"command_string\""
                   " key.")
        return
    from subprocess import call
    import tempfile
    with tempfile.NamedTemporaryFile() as fp:
        try:
            fp.write(json.dumps(p['preview']))
        except KeyError:
            click.echo("Error: The manifest.json you're trying to preview doesn't "
                       "contain a \"preview\" section")
            return
        fp.flush()
        call(["bash", "-c", command + " " + fp.name])
Exemple #11
0
def analyze(ctx, file, test):
    '''Analyze your run'''
    with click.open_file(file, 'r') as f:
        protocol = json.loads(f.read())
    response = \
        ctx.obj.post(
            'analyze_run',
            data=json.dumps({"protocol": protocol, "test_mode": test})
        )
    if response.status_code == 200:
        click.echo(u"\u2713 Protocol analyzed")

        def count(thing, things, num):
            click.echo("  %s %s" % (num, thing if num == 1 else things))
        result = response.json()
        count("instruction", "instructions", len(result['instructions']))
        count("container", "containers", len(result['refs']))
        locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
        click.echo("  %s" %
                   locale.currency(float(result['total_cost']), grouping=True))
        for w in result['warnings']:
            message = w['message']
            if 'instruction' in w['context']:
                context = "instruction %s" % w['context']['instruction']
            else:
                context = json.dumps(w['context'])
            click.echo("WARNING (%s): %s" % (context, message))
    elif response.status_code == 422:
        click.echo("Error in protocol: %s" % response.text)
    else:
        click.echo("Unknown error: %s" % response.text)
Exemple #12
0
def info(input, verbose, pyformat, **kwargs):
    """
    Provides info about the input. Requires valid input.
    """
    if not input:
        input = "-"
    with click.open_file(input, mode="rb") as f:
        parser = Parser()
        feature_text = f.read()
        feature = parser.parse(feature_text)

        metrics = {}
        steps = [a[-1] for d, k, v, a in walk_items(feature) if k == "type" and v == "Step"]
        scenarios = [a[-1] for d, k, v, a in walk_items(feature) if k == "type" and v == "Scenario"]
        # tables = [a[-1] for d, k, v, a in walk_items(feature) if k == 'type' and v == 'DataTable']
        ctr_type = Counter((v for d, k, v in walk_items(feature, ancestors=False) if k == "type"))
        ctr_kw = Counter((v for d, k, v in walk_items(feature, ancestors=False) if k == "keyword"))
        metrics.update({"count": {"Keywords": ctr_kw, "Types": ctr_type}})
        metrics.update({"content": {"Scenarios": [d["name"] for d in scenarios], "Steps": [d["text"] for d in steps]}})
        data = metrics

        if verbose:
            data["_object"] = {"type": type(feature), "members": sorted(varsdict(feature).keys())}
        if pyformat:
            s = pformat(data)
        else:
            s = json.dumps(data, indent=2, sort_keys=True)
        click.echo(s)
Exemple #13
0
def submit(ctx, file, project, title, test):
    '''Submit your run to the project specified'''
    with click.open_file(file, 'r') as f:
        protocol = json.loads(f.read())
    if test:
        test = True
    response = ctx.obj.post(
        '%s/runs' % project,
        data=json.dumps({
            "title": title,
            "protocol": protocol,
            "test_mode": test
            }))
    if response.status_code == 201:
        click.echo(
            "Run created: %s" %
            ctx.obj.url("%s/runs/%s" % (project, response.json()['id'])))
        return response.json()['id']
    elif response.status_code == 404:
        click.echo("Couldn't create run (404). Are you sure the project %s "
                   "exists, and that you have access to it?" %
                   ctx.obj.url(project))
    elif response.status_code == 422:
        click.echo("Error creating run: %s" % response.text)
    else:
        click.echo("Unknown error: %s" % response.text)
Exemple #14
0
def interpolate(ctx, geojson, sampleraster, bidx, outfile):
    try:
        inGeoJSON = click.open_file(geojson).readlines()
    except IOError:
        inGeoJSON = [geojson]
    features = list(i for i in rio_interpolate.filterBadJSON(inGeoJSON))

    for feat in features:
        bounds = rio_interpolate.getBounds(features)

        ras_vals = rio_interpolate.loadRaster(sampleraster, bounds, bidx)

        output_values = rio_interpolate.interpolatePoints(
        ras_vals,
        rio_interpolate.parseLine(features[0]),
        bounds)

        feat['geometry']['coordinates'] = [f for f in rio_interpolate.addAttr(feat, output_values)]

    if outfile:
        with open(outfile, 'w') as oFile:
            oFile.write(json.dumps({
                "type": "FeatureCollection",
                "features": list(features)
                }))
    else:
        for feat in features:
            try:
                click.echo(json.dumps(feat).rstrip())
            except IOError as e:
                pass
def go(k, p, n, random_sample, seed, max_null, source, ncol_file):
    """
    Walk the directory tree starting at stash-root, read each group-member
    JSON file found, generate the group graph, and finally write the weighted
    edges of to ncol-file.

    The NCOL output format is:

        \b
        node1 node2 weight
        node3 node4 weight
        ...
    """
    print("source: {!r}".format(source))
    print("random-sample: {}".format(random_sample))
    print("k: {}".format(k))
    print("p: {}".format(p))
    print("n: {}".format(n))
    print("max-null: {}".format(max_null))
    print("seed: {}".format(seed))
    if random_sample:
        gen = partial(gen_random_3cliques, seed=seed, max_null_passes=max_null)
    else:
        gen = partial(gen_3cliques)
    with click.open_file(ncol_file, "w") as ofp:
        for n1, n2, w in gen(source, k=k, p=p, n=n):
            print("{} {} {}".format(n1, n2, w), file=ofp)
Exemple #16
0
def run(protocol_name, args):
    '''Run a protocol by passing it a config file (without submitting or analyzing)'''
    with click.open_file('manifest.json', 'r') as f:
        try:
            manifest = json.loads(f.read())
        except ValueError:
            click.echo("Error: Your manifest.json file is improperly formatted. "
                       "Please double check your brackets and commas!")
            return
    try:
        p = next(p for p in manifest['protocols'] if p['name'] == protocol_name)
    except StopIteration:
        click.echo("Error: The protocol name '%s' does not match any protocols "
                   "that can be previewed from within this directory.  \nCheck "
                   "either your spelling or your manifest.json file and try "
                   "again." % protocol_name)
        return
    try:
        command = p['command_string']
    except KeyError:
        click.echo("Error: Your manifest.json file does not have a \"command_string\""
                   " key.")
        return
    from subprocess import call
    call(["bash", "-c", command + " " + ' '.join(args)])
def mzml(source, output, ms1_filters=None, msn_filters=None, pick_peaks=False, compress=False,
         correct_precursor_mz=False):
    """Convert `source` into mzML format written to `output`, applying a collection of optional data
    transformations along the way.
    """
    reader = ms_deisotope.MSFileLoader(source)
    if compress:
        if not output.endswith(".gz"):
            output += '.gz'
        stream = click.open_file(output, 'wb')
        stream = GzipFile(fileobj=stream, mode='wb')
    else:
        stream = click.open_file(output, 'wb')
    with stream:
        to_mzml(reader, stream, pick_peaks=pick_peaks, ms1_filters=ms1_filters,
                msn_filters=msn_filters, correct_precursor_mz=correct_precursor_mz)
Exemple #18
0
def proselint(files=None, version=None, initialize=None,
              debug=None, score=None, json=None, time=None, demo=None):
    """Define the linter command line API."""
    if time:
        click.echo(timing_test())
        return

    # Run the intialization.
    if initialize:
        run_initialization()
        return

    if score:
        click.echo(lintscore())
        return

    # In debug mode, delete the cache and *.pyc files before running.
    if debug:
        clear_cache()

    # Use the demo file by default.
    if demo:
        files = [click.open_file(demo_file, encoding='utf8')]

    for f in files:
        errors = lint(f, debug=debug)
        show_errors(click.format_filename(f.name), errors, json)
Exemple #19
0
def run(file):
    row_re = re.compile(r'([a-zA-Z]+) can fly ([0-9]+) km/s for ([0-9]+) seconds, but then must rest for ([0-9]+) seconds.*')
    caribou_list = {}
    with click.open_file(file) as strings:
        for row in strings:
            data = re.match(row_re, row.rstrip())
            caribou_list[data.group(1)] = [
                int(data.group(2)),
                int(data.group(3)),
                int(data.group(4)),
                0,
                0,
            ]
    max_time = 2503
    totals = []
    for k, v in caribou_list.iteritems():
        totals.append(
            ((max_time / (v[1]+v[2])) * (v[1]*v[0])) + \
                min([v[1], (max_time % (v[1]+v[2]))]) * v[0]
        )

    click.echo('Part 1 Total is {}'.format(max(totals)))

    m = 0
    for i in range(max_time):
        for k, v in caribou_list.iteritems():
            v[4] += 1 if v[3] == m and m > 0 else 0
            v[3] += v[0] if i % (v[1] + v[2]) < v[1] else 0
        m = max(zip(*caribou_list.values())[3])

    tot2 = max(zip(*caribou_list.values())[4])

    click.echo('Part 2 Total is {}'.format(tot2))
Exemple #20
0
def search(dataset, node, aoi, start_date, end_date, longitude, latitude, distance, lower_left, upper_right, where, api_key, geojson):
    
    node = get_node(dataset, node)
    
    if aoi == "-":
        src = click.open_file('-').readlines()
        aoi = json.loads(''.join([ line.strip() for line in src ]))
        
        bbox = map(get_bbox, aoi.get('features'))[0]
        lower_left = bbox[0:2]
        upper_right = bbox[2:4]
    
    if where:
        # Query the dataset fields endpoint for queryable fields
        fields = api.dataset_fields(dataset, node)
        
        def format_fieldname(s):
            return ''.join(c for c in s if c.isalnum()).lower()
        
        field_lut = { format_fieldname(field['name']): field['fieldId'] for field in fields }
        where = { field_lut[format_fieldname(k)]: v for k, v in where if format_fieldname(k) in field_lut }
    
    if lower_left:
        lower_left = dict(zip(['longitude', 'latitude'], lower_left))
        upper_right = dict(zip(['longitude', 'latitude'], upper_right))
    
    data = api.search(dataset, node, lat=latitude, lng=longitude, distance=distance, ll=lower_left, ur=upper_right, start_date=start_date, end_date=end_date, where=where, api_key=api_key)
    
    if geojson:
        features = map(to_geojson_feature, data)
        data = { 'type': 'FeatureCollection', 'features': features }
    
    print(json.dumps(data))
def match(ctx, features, profile, gps_precision):
    """Mapbox Map Matching API lets you use snap your GPS traces
to the OpenStreetMap road and path network.

      $ mapbox mapmatching trace.geojson

An access token is required, see `mapbox --help`.
    """
    access_token = (ctx.obj and ctx.obj.get('access_token')) or None

    features = list(features)
    if len(features) != 1:
        raise click.BadParameter(
            "Mapmatching requires a single LineString feature")

    service = mapbox.MapMatcher(access_token=access_token)
    try:
        res = service.match(
            features[0],
            profile=profile,
            gps_precision=gps_precision)
    except mapbox.errors.ValidationError as exc:
        raise click.BadParameter(str(exc))

    if res.status_code == 200:
        stdout = click.open_file('-', 'w')
        click.echo(res.text, file=stdout)
    else:
        raise MapboxCLIException(res.text.strip())
Exemple #22
0
def put_feature(ctx, dataset, fid, feature, input):
    """Create or update a dataset feature.

    The semantics of HTTP PUT apply: if the dataset has no feature
    with the given `fid` a new feature will be created. Returns a
    GeoJSON representation of the new or updated feature.

        $ mapbox datasets put-feature dataset-id feature-id 'geojson-feature'

    All endpoints require authentication. An access token with
    `datasets:write` scope is required, see `mapbox --help`.
    """

    if feature is None:
        stdin = click.open_file(input, 'r')
        feature = stdin.read()

    feature = json.loads(feature)

    service = ctx.obj.get('service')
    res = service.update_feature(dataset, fid, feature)

    if res.status_code == 200:
        click.echo(res.text)
    else:
        raise MapboxCLIException(res.text.strip())
Exemple #23
0
def texts2json(ids, names, field, text_docs):
    """Convert a set of text documents into a
    JSON array of document objects."""

    docs = []

    names = read_names(names)
    ids = read_names(ids)

    for idx, path in enumerate(text_docs):
        tokens_doc = open(path, "r")
        content = ""
        with click.open_file(path):
            content = tokens_doc.read()

        # ordered so that these attributes stay at the top
        doc = OrderedDict()

        if idx < len(ids) - 1:
            doc["id"] = ids[idx]
        else:
            doc["id"] = path

        if idx < len(names) - 1:
            doc["name"] = names[idx]
        else:
            doc["name"] = path

        doc[field] = content
        docs.append(doc)
        tokens_doc.close()

    out_content = json.dumps(docs, indent=2)
    output(out_content)
Exemple #24
0
def flattencommand(input, separator, sort_keys, style, **kwargs):
    """
    Flattens JSON input with nested or hierarchical structure into a flat (depth 1) hierarchy. Requires valid input.

    Examples:

        \b
        Example: Basic usage:
        $ echo '{"a":{"b":null,"c":"null","d":"","e":{"f":null},"g":{},"h":[]}}' | python -mclifunzone.jsontool flatten -c
        {"a__b":null,"a__c":"null","a__d":"","a__e__f":null,"a__h":[]}
    """
    if style == "compact":
        dumps_separators = (",", ":")
        dumps_indent = None
    elif style == "pretty":
        dumps_separators = None
        dumps_indent = 2
    elif style == "flat":
        dumps_separators = (",", ": ")
        dumps_indent = 0
    else:
        dumps_separators = None
        dumps_indent = None

    if not input:
        input = "-"
    if separator is None:
        separator = "__"
    with click.open_file(input, mode="rb") as f:
        data = json_utils.load_ordered(f)
        data = flatten(data, separator)
        s = json.dumps(data, indent=dumps_indent, separators=dumps_separators, sort_keys=sort_keys)
        click.echo(s)
Exemple #25
0
def info(input, verbose, pyformat, **kwargs):
    """
    Provides info about the input. Requires valid input.
    """
    if not input:
        input = '-'
    with click.open_file(input, mode='rb') as f:
        # root = xml_utils.load(f)
        tree = ET.parse(f)
        root = tree.getroot()
        d = {}
        d.update({'xml': xml_utils.element_info(root)})

        rf_metrics = {
            'suites': xml_utils.count_elements(tree, xpath='//suite'),
            'tests': xml_utils.count_elements(tree, xpath='//test'),
            'messages': xml_utils.count_elements(tree, xpath='//msg')
        }
        d.update({'robot': rf_metrics})

        if verbose:
            d['_object'] = {
                'type': type(root),
                # 'repr': repr(root),
                # 'vars': sorted(vars(root)),
                # 'dir': sorted(dir(root)),
                'members': sorted(varsdict(root).keys())
            }
        # click.echo(d)
        # click.echo(sorted(d.items()))
        if pyformat:
            s = pformat(d)
        else:
            s = json.dumps(d, indent=2, sort_keys=True)
        click.echo(s)
Exemple #26
0
def proselint(paths=None, version=None, clean=None, debug=None,
              output_json=None, time=None, demo=None, compact=None):
    """A CLI for proselint, a linter for prose."""
    if time:
        click.echo(timing_test())
        return

    # In debug or clean mode, delete cache & *.pyc files before running.
    if debug or clean:
        clear_cache()

    # Use the demo file by default.
    if demo:
        paths = [demo_file]

    # Expand the list of directories and files.
    filepaths = extract_files(list(paths))

    # Lint the files
    num_errors = 0
    for fp in filepaths:
        try:
            f = click.open_file(fp, 'r', encoding="utf-8", errors="replace")
            errors = lint(f, debug=debug)
            num_errors += len(errors)
            print_errors(fp, errors, output_json, compact=compact)
        except Exception:
            traceback.print_exc()

    # Return an exit code
    close_cache_shelves()
    if num_errors > 0:
        sys.exit(1)
    else:
        sys.exit(0)
Exemple #27
0
def normalize_input(input):
    """Normalize file or string input."""
    try:
        src = click.open_file(input).readlines()
    except IOError:
        src = [input]
    return src
Exemple #28
0
def strip(input, prune_null, style, **kwargs):
    """
    Removes specified portions of data from the input. Requires valid input.

    Examples:

        \b
        Example: Remove all elements with value=null:
        $ echo '{"a":{"b":null,"c":"null","d":"","e":{"f":null},"g":{},"h":[]}}' | python -mclifunzone.jsontool strip -n
        {"a": {"c": "null", "d": "", "e": {}, "g": {}, "h": []}}
    """
    if style == "compact":
        dumps_separators = (",", ":")
        dumps_indent = None
    elif style == "pretty":
        dumps_separators = None
        dumps_indent = 2
    elif style == "flat":
        dumps_separators = (",", ": ")
        dumps_indent = 0
    else:
        dumps_separators = None
        dumps_indent = None

    if not input:
        input = "-"
    with click.open_file(input, mode="rb") as f:
        data = json_utils.load_ordered(f)
        if prune_null:
            data = filter_none_values(data, recursive=True)
        s = json.dumps(data, indent=dumps_indent, separators=dumps_separators)
        click.echo(s)
Exemple #29
0
def run(file):
    row_re = re.compile(r'([a-zA-Z]+): capacity (-?[0-9]+), durability (-?[0-9]+), flavor (-?[0-9]+), texture (-?[0-9]+), calories (-?[0-9]+)')
    ingredients = []
    with click.open_file(file) as strings:
        for row in strings:
            data = re.match(row_re, row.rstrip())
            #ingredients[data.group(1)] = [
            ingredients.append([
                int(data.group(2)),
                int(data.group(3)),
                int(data.group(4)),
                int(data.group(5)),
                int(data.group(6)),
            ])
    tot = 0
    tot2 = 0
    for e in loop(1, ingredients, []):
        #for prop in zip(*ingredients):
        temp_tots = []
        for i, v in enumerate(e):
            ing_tots = map(lambda x: x*v, ingredients[i])
            temp_tots.append(ing_tots)
        prop_tots = map(lambda x: sum(list(x)), zip(*temp_tots))
        if min(prop_tots) > 0:
            temp_tot = reduce(lambda k, z: k*z, prop_tots[:-1])
            tot = max([tot, temp_tot])
            if prop_tots[4] == 500:
                tot2 = max([tot2, temp_tot])

    click.echo('Part 1 Total is {}'.format(tot))
    click.echo('Part 2 Total is {}'.format(tot2))
 def fetch_feeds(self):
     self.logger.info("fetching RSS feeds ...")
     response = self.session.get(self.feeds_url)
     with click.open_file(self.join_path("Octocats.xml"), "w") as fp:
         fp.write(response.text)
     self.feeds = ET.fromstring(response.text)
     return self.feeds
def duplicates(i, o, delineator):
    """
    Takes the results of parser and outputs a list of duplicates
    """
    with click.open_file(i, 'r') as f_in:
        tests = f_in.read().split(delineator)

        duplicates = set()

        while len(tests) > 0:
            test = tests.pop(0)
            if test in tests:
                duplicates.add(test)

        duplicates = list(duplicates)
        duplicates.sort()

        with click.open_file(o, 'w+') as f_out:
            f_out.write('\n'.join(duplicates))
Exemple #32
0
 def install_completion(path, script, name):
   try:
     with click.open_file(path) as f:
       contents = f.read()
   except FileNotFoundError:
     contents = ''
   if script in contents:
     click.echo('Completion script {} already installed; skipping'.format(name))
     return
   if len(contents) > 0 and not contents.endswith('\n'):
     contents += '\n'
   contents += script + '\n'
   if dry_run:
     click.secho('Dry run; not writing. Would have appended to {} the following text:'.format(path), bold=True)
     click.echo(script)
   else:
     with click.open_file(path, 'w', atomic=True) as f:
       f.write(contents)
     click.secho('{} completion installed for `{}`'.format(shell, name), bold=True)
Exemple #33
0
def get_vars_from_file(filename):
    try:
        with click.open_file(filename) as f:
            config = imp.load_source('config', '', f)
        return config
    except OSError:
        click.echo(
            "Error: No hls_config.py found, please create a config file for your project. For an example config file please see the 'examples' folder within the hlsclt install directory."
        )
        raise click.Abort()
Exemple #34
0
def filter(context, api_client, api_key, input_file, output_file, noise_only):
    """Filter the noise from a log file, stdin, etc."""
    if input_file is None:
        if sys.stdin.isatty():
            output = [
                context.command.get_usage(context),
                ("Error: at least one text file must be passed "
                 "either through the -i/--input_file option or through a shell pipe."
                 ),
            ]
            click.echo("\n\n".join(output))
            context.exit(-1)
        else:
            input_file = click.open_file("-")
    if output_file is None:
        output_file = click.open_file("-", mode="w")

    for chunk in api_client.filter(input_file, noise_only=noise_only):
        output_file.write(ANSI_MARKUP(chunk))
Exemple #35
0
def data(file, output):
    """Export Bayes document data."""
    text = file.read()
    text = evaluate.clean_text(text)

    _, hypotheses = evaluate.run_file(text)

    with cl.open_file(output, "w+") as f:
        json.dump(hypotheses, f)
        f.write("\n")
Exemple #36
0
def cmd(infile: str = '-',
        outfile: str = '-',
        in_type: str = None,
        out_type: str = None):
    """Config File Converter 配置文件互转工具 

    supported formats: Java properties (.properties),Json(.json) and yaml(.yml,.yaml)
    """
    infp = click.open_file(infile, encoding='utf8')
    outfp = click.open_file(outfile, 'w', encoding='utf8')

    in_type = get_type(in_type, infile, 'properties')
    out_type = get_type(out_type, outfile, 'yml')

    decode = decoders[in_type]
    encode = encoders[out_type]

    configs = decode(infp)
    encode(configs, outfp)
Exemple #37
0
def load_config(zucchini_state):
    config_dir, config_path = get_config_location()

    try:
        with click.open_file(config_path, 'r') as config_file:
            zucchini_state.load_config_from_file(config_file, config_dir)
    except:  # noqa
        # TODO: Maybe better handling here, is it corrupt or nonexistent?
        click.echo("Need to set up configuration before doing any other work.")
        prompt_for_config(zucchini_state)
Exemple #38
0
def batch(zonelist, output, workdir, overwrite):

    with click.open_file(zonelist) as fp:
        zones = [info.strip().split(' ') for info in fp]

    with click.progressbar(zones) as progress:
        for basin, zone in progress:

            click.echo('\r')
            merge(basin, zone, workdir)
Exemple #39
0
def summarize(ctx, file):
    """Summarize Autoprotocol as a list of plain English steps."""
    with click.open_file(file, 'r') as f:
        try:
            protocol = json.loads(f.read())
        except ValueError:
            click.echo(
                "The autoprotocol you're trying to summarize is invalid.")
            return
    AutoprotocolParser(protocol)
Exemple #40
0
def color_by_age(pattern, date_field, field_sep, infile):
    with click.open_file(infile, "r") as infile:
        for line in infile:
            try:
                age = get_age_timedelta(
                    line.split(field_sep)[date_field - 1], pattern)
                colorfn = get_colorfn_from_timedelta(age)
                click.echo(colorfn(line), nl=False)
            except:
                click.echo(line, nl=False)
Exemple #41
0
def parsemtl(mtl):
    """Converts a Landsat 8 text MTL
    to JSON
    """
    try:
        mtl = str(click.open_file(mtl).read())
    except IOError:
        mtl = str('\n'.join([inputtiles]))

    click.echo(json.dumps(_parse_mtl_txt(mtl)))
Exemple #42
0
def search(content):
    for root, dirs, files in os.walk(BASE_PATH, topdown=False):
        for name in files:
            abs_path = os.path.join(root, name)
            with click.open_file(abs_path, 'r') as search_file:
                for line in search_file:
                    if content.lower() in line.lower():
                        click.echo(Fore.MAGENTA +
                                   "/".join(abs_path.split('/')[1:-1]) + ": " +
                                   Fore.RESET + line.strip())
Exemple #43
0
def main(ctx, cards_file):
    """ Generate Hearthstone card spoilers & checklists """
    if cards_file.lower().startswith(('http://', 'https://')):
        r = requests.get(cards_file)
        r.raise_for_status()
        data = r.json()
    else:
        with click.open_file(cards_file) as fp:
            data = json.load(fp)
    ctx.obj = CardDB.from_json(data)
Exemple #44
0
def cls(workspace, output_file, measurement, patch, testpoi, teststat,
        optimizer, optconf):
    with click.open_file(workspace, 'r') as specstream:
        wspec = json.load(specstream)

    w = Workspace(wspec)

    is_qtilde = teststat == 'qtilde'

    patches = [
        json.loads(click.open_file(pfile, 'r').read()) for pfile in patch
    ]
    p = w.model(
        measurement_name=measurement,
        patches=patches,
        modifier_settings={'normsys': {
            'interpcode': 'code4'
        }},
    )

    optconf = {k: v for item in optconf for k, v in item.items()}

    # set the new optimizer
    if optimizer:
        new_optimizer = getattr(optimize, optimizer)
        set_backend(tensorlib, new_optimizer(**optconf))

    result = hypotest(testpoi,
                      w.data(p),
                      p,
                      qtilde=is_qtilde,
                      return_expected_set=True)
    result = {
        'CLs_obs': result[0].tolist()[0],
        'CLs_exp': result[-1].ravel().tolist()
    }

    if output_file is None:
        click.echo(json.dumps(result, indent=4, sort_keys=True))
    else:
        with open(output_file, 'w+') as out_file:
            json.dump(result, out_file, indent=4, sort_keys=True)
        log.debug("Written to {0:s}".format(output_file))
Exemple #45
0
 def read(cls, file):
     """Read an authorized_keys file"""
     authorized_keys = cls()
     with click.open_file(file) as in_fh:
         for line in in_fh:
             line = line.strip()
             if not line.startswith("#"):
                 key = sshkeys.Key.from_pubkey_line(line)
                 authorized_keys.add_key(key)
     return authorized_keys
Exemple #46
0
def process_pipeline(processors, input, ignore_duplicate_keys):
    extensions = ['.json', '.off', '.poly']  #-- input allowed
    try:
        f = click.open_file(input, mode='r', encoding='utf-8-sig')
        extension = os.path.splitext(input)[1].lower()
        if extension not in extensions:
            raise IOError(
                "File type not supported (only .json, .off, and .poly).")
        #-- OFF file
        if (extension == '.off'):
            utils.print_cmd_status("Converting %s to CityJSON" % (input))
            cm = cityjson.off2cj(f)
        #-- POLY file
        elif (extension == '.poly'):
            utils.print_cmd_status("Converting %s to CityJSON" % (input))
            cm = cityjson.poly2cj(f)
        #-- CityJSON file
        else:
            utils.print_cmd_status("Parsing %s" % (input))
            cm = cityjson.reader(file=f,
                                 ignore_duplicate_keys=ignore_duplicate_keys)
            if not isinstance(cm.get_version(), str):
                str1 = "CityJSON version should be a string 'X.Y' (eg '1.0')"
                raise click.ClickException(str1)
            pattern = re.compile(
                "^(\d\.)(\d)$")  #-- correct pattern for version
            pattern2 = re.compile(
                "^(\d\.)(\d\.)(\d)$")  #-- wrong pattern with X.Y.Z
            if pattern.fullmatch(cm.get_version()) == None:
                if pattern2.fullmatch(cm.get_version()) != None:
                    str1 = "CityJSON version should be only X.Y (eg '1.0') and not X.Y.Z (eg '1.0.1')"
                    raise click.ClickException(str1)
                else:
                    str1 = "CityJSON version is wrongly formatted"
                    raise click.ClickException(str1)
            if (cm.get_version() not in cityjson.CITYJSON_VERSIONS_SUPPORTED):
                allv = ""
                for v in cityjson.CITYJSON_VERSIONS_SUPPORTED:
                    allv = allv + v + "/"
                str1 = "CityJSON version %s not supported (only versions: %s), not every operators will work.\nPerhaps it's time to upgrade cjio? 'pip install cjio -U'" % (
                    cm.get_version(), allv)
                raise click.ClickException(str1)
            elif (cm.get_version() !=
                  cityjson.CITYJSON_VERSIONS_SUPPORTED[-1]):
                str1 = "v%s is not the latest version, and not everything will work.\n" % cm.get_version(
                )
                str1 += "Upgrade the file with 'upgrade_version' command: 'cjio input.json upgrade_version save out.json'"
                click.echo(click.style(str1, fg='red'))

    except ValueError as e:
        raise click.ClickException('%s: "%s".' % (e, input))
    except IOError as e:
        raise click.ClickException('Invalid file: "%s".\n%s' % (input, e))
    for processor in processors:
        cm = processor(cm)
Exemple #47
0
def take(ctx, path, kind, force, symlink):
    """Import a file as a document.

    The base filename becomes the document title.

    Should be a text type, but we leave that to user.

    --force will cause a similarly titled document to be overwritten
    in the case of a name conflict.

    """
    yew = ctx.obj["YEW"]
    # import ipdb; ipdb.set_trace()
    if not os.path.exists(path):
        click.echo(f"path does not exist: {path}")
        sys.exit(1)
    if not os.path.isfile(path):
        click.echo(f"path is not a file: {path}")
        sys.exit(1)

    content = None

    # slurp file
    if not symlink:
        with click.open_file(path, "r", "utf-8") as f:
            content = f.read()

    # get location, filename, etc.
    fn = os.path.basename(path)
    filename, file_extension = os.path.splitext(fn)
    if not kind:
        kind = "txt"
    title = os.path.splitext(path)[0]
    title = title.replace(os.sep, "-")
    # check if we have one with this title
    # the behaviour we want is for the user to continuously
    # ingest the same file that might be updated out-of-band
    # TODO: handle multiple titles of same name
    docs = yew.store.get_docs(name_frag=title, exact=True)
    if docs and not symlink:
        if len(docs) >= 1:
            if not force:
                click.echo("A document with this title exists already")
            if force or click.confirm(
                    f"Overwrite existing document: {docs[0].name} ?",
                    abort=True):
                docs[0].put_content(content)
                sys.exit(0)

    if symlink:
        doc = yew.store.create_document(title, kind, symlink_source_path=path)
        click.echo(f"Symlinked: {doc.uid}")
    else:
        doc = yew.store.create_document(title, kind)
        doc.put_content(content)
Exemple #48
0
def main(code_file, editor):
    '''
    Creates and opens CODE_FILE with template code.

    If CODE_FILE already exists, 'code' just opens it in the default
    default/supplied editor without any change.

    If CODE_FILE is not passed, a default name of file is suggested
    based on current directory, language preferences and existing
    files in directory.

    Default CODE_FILE is <PROBLEM_NAME>.<DEFAULT_EXTENSION> if user is
    in a problem folder and no other supported code file exists.

    TODO [WIP]:
    If other code file(s) exist, it should suggests to open the most
    recently edited one.

    Template for the code is loaded based upon extension.

    See 'termicoder config' for editing default templates,
    editor, and language preferences.
    '''
    if code_file is None:
        default_name = get_default_code_name()
        code_file = click.prompt("Please enter a file name",
                                 default=default_name,
                                 type=click.Path(writable=True,
                                                 dir_okay=False))

    extension = code_file.split('.')[-1]
    template = config.read('lang/%s/template.yml' % extension)
    if (template is not None):
        try:
            code_to_write = template['code']
            # allow jinja style template substitution in command
            # example {{row_no}} and {{col_no}}
            # see settings.yml for info on usage
            status, editor = substitute(editor, template)
            # useful for sublime's go to line functionality
            # see settings.yml for info on usage
            status, editor = substitute(editor, {r"CODE_FILE": code_file})
            logger.debug(code_to_write)
        except (AttributeError, KeyError):
            logger.error("Probelm with template file")
    else:
        logger.warn("You don't have templates setup for extension %s."
                    "Launching empty file " % extension)
    if (not os.path.exists(code_file)):
        code = click.open_file(code_file, 'w')
        if (template is not None):
            code.write(code_to_write)
    if status:
        code_file = ''
    launch(editor, code_file)
Exemple #49
0
def main(infile, outfile, color, font, font_size, portrait, scale, no_times,
         no_weekends, start_monday):
    """
    Weekly schedule typesetter

    Visit <https://github.com/jwodder/schedule> for more information.
    """
    if font in available_fonts():
        font_name = font
    else:
        # Assume we've been given a path to a .ttf file
        font_name = 'CustomFont'
        ### TODO: Use the basename of the filename as the font name?  (Could
        ### that ever cause problems?)
        pdfmetrics.registerFont(TTFont(font_name, font))
    if portrait:
        page_width, page_height = pagesizes.portrait(pagesizes.letter)
    else:
        page_width, page_height = pagesizes.landscape(pagesizes.letter)
    colors = COLORS if color else [GREY]
    if no_weekends:
        week = WEEKDAYS_EN
    elif start_monday:
        week = FULL_WEEK_MON_EN
    else:
        week = FULL_WEEK_EN
    sched = Schedule(week)
    for ev in read_events(infile, colors=colors):
        sched.add_event(ev)
    if outfile is None:
        if infile is sys.stdin:
            outfile_name = '-'
        else:
            outfile_name = str(Path(infile.name).with_suffix('.pdf'))
        outfile = click.open_file(outfile_name, 'wb')
    c = Canvas(outfile, (page_width, page_height))
    c.setFont(font_name, font_size)
    if scale is not None:
        factor = 1 / scale
        c.translate(
            (1 - factor) * page_width / 2,
            (1 - factor) * page_height / 2,
        )
        c.scale(factor, factor)
    sched.render(
        c,
        x=inch,
        y=page_height - inch,
        width=page_width - 2 * inch,
        height=page_height - 2 * inch,
        font_size=font_size,
        show_times=not no_times,
    )
    c.showPage()
    c.save()
Exemple #50
0
def proselint(paths=None,
              config=None,
              version=None,
              clean=None,
              debug=None,
              output_json=None,
              time=None,
              demo=None,
              compact=None):
    """Create the CLI for proselint, a linter for prose."""
    if time:
        # click.echo(timing_test())
        print("This option does not work for the time being.")
        return

    # In debug or clean mode, delete cache & *.pyc files before running.
    if debug or clean:
        clear_cache()

    # Use the demo file by default.
    if demo:
        paths = [demo_file]

    # Expand the list of directories and files.
    filepaths = extract_files(list(paths))

    # Lint the files
    num_errors = 0

    # Use stdin if no paths were specified
    if len(paths) == 0:
        filepaths.append('-')

    for fp in filepaths:
        try:
            if fp == '-':
                fp = '<stdin>'
                f = sys.stdin
            else:
                f = click.open_file(fp,
                                    'r',
                                    encoding="utf-8",
                                    errors="replace")
            errors = lint(f, debug=debug, config_file_path=config)
            num_errors += len(errors)
            print_errors(fp, errors, output_json, compact=compact)
        except Exception:
            traceback.print_exc()

    # Return an exit code
    close_cache_shelves()
    if num_errors > 0:
        sys.exit(1)
    else:
        sys.exit(0)
Exemple #51
0
def build_openapi_spec():
    """Creates an OpenAPI definition of Flask application,
    check conformity of generated definition against OpenAPI 2.0 specification
    and writes it into a file."""

    package = __title__
    desc = __api_description__
    ver = __api_version__

    # Create OpenAPI specification object
    spec = APISpec(title=package,
                   version=ver,
                   info=dict(description=desc),
                   plugins=('apispec.ext.flask', 'apispec.ext.marshmallow'))

    # Add marshmallow schemas to the specification here
    # spec.definition('Example', schema=Example_schema)

    # Collect OpenAPI docstrings from Flask endpoints
    for key in current_app.view_functions:
        if key != 'static' and key != 'get_openapi_spec':
            spec.add_path(view=current_app.view_functions[key])

    spec_json = json.dumps(spec.to_dict(),
                           indent=2,
                           separators=(',', ': '),
                           sort_keys=True)

    # Output spec to JSON file
    with click.open_file(__output_path__,
                         mode='w+',
                         encoding=None,
                         errors='strict',
                         lazy=False,
                         atomic=False) as output_file:

        output_file.write(spec_json)

        click.echo(
            click.style('OpenAPI specification written to {}'.format(
                output_file.name),
                        fg='green'))

    # Check that generated spec passes validation. Done after writing to file
    # in order to give user easy way to access the possible erroneous spec.
    with open(os.path.join(os.getcwd(), __output_path__)) as output_file:

        validate_json(json.load(output_file), 'schemas/v2.0/schema.json')

        click.echo(
            click.style('OpenAPI specification validated successfully',
                        fg='green'))

    return spec.to_dict()
Exemple #52
0
def normalize_input(input):
    """Normalize file or string input.

    Original code from https://github.com/mapbox/mercantile/blob/71bb3dbdaeb4ccf0e14bfabf1f58d36465cd5289/mercantile/scripts/__init__.py#L34-L40
    License: BSD-3 Original work Copyright 2021 Mapbox
    """
    try:
        src = click.open_file(input).readlines()
    except IOError:
        src = [input]
    return src
def union(inputtiles, parsenames):
    """
    Returns the unioned shape of a steeam of [<x>, <y>, <z>] tiles in GeoJSON.
    """
    try:
        inputtiles = click.open_file(inputtiles).readlines()
    except IOError:
        inputtiles = [inputtiles]
    unioned = uniontiles.union(inputtiles, parsenames)
    for u in unioned:
        click.echo(json.dumps(u))
def main(output_filepath):
    """ Fetches data by scraping a website's content and saves raw HTML into data/raw.
    """
    logger = logging.getLogger(__name__)
    logger.info('fetch raw HTML data from website')

    r = requests.get(
        'https://willisau.ch/wirtschaft-entwicklung/wirtschaft/firmenverzeichnis/'
    )
    with click.open_file(output_filepath, 'w') as f:
        f.write(r.text)
Exemple #55
0
def deepl(ctx, api_key):
    """translate cjk-containing strings with deepl api.

    sadly, this costs $$$ :(
    """

    with click.open_file(ctx.obj['db_path'], 'r') as f:
        db = json.load(f)

    deepl = Deepl(api_key)

    for k, v in tqdm([item for item in db.items()
                       if item[1] is None and is_str_jp(item[0])]):
        db[k] = deepl.trans(k, preserve_formatting=True).text
        tqdm.write(f'{k} -> {db[k]}')

        # write every time in case we ^C. don't wanna lose progress, deepl is $$$
        with click.open_file(ctx.obj['out_path'], 'w') as f:
            json.dump(db, f, indent=2, ensure_ascii=False)
        time.sleep(0.01)
Exemple #56
0
def AggregateShapefile(zonelist, basename, overwrite):
    """
    Aggregate shapefiles in subdirectories
    """

    output = os.path.join('.', basename)

    if os.path.exists(output) and not overwrite:
        click.secho('Output already exists : %s' % output, fg='yellow')
        return

    with click.open_file(zonelist) as fp:
        zones = [info.strip().split(' ') for info in fp]

    dst = None
    count = 0

    def write(feature, driver, schema, crs):
        """
        Write feature to output
        """

        nonlocal dst

        if dst is None:
            options = dict(driver=driver, schema=schema, crs=crs)
            dst = fiona.open(output, 'w', **options)

        dst.write(feature)

    def progress_status(item):
        if item is not None:
            return item[1]
        return '...'

    with click.progressbar(zones,
                           label='Aggregate features',
                           item_show_func=progress_status) as progress:
        for bassin, zone in progress:

            shapefile = os.path.join(bassin, zone, basename)

            if not os.path.exists(shapefile):
                click.secho('\rMissing file : %s' % shapefile, fg='yellow')
                continue

            with fiona.open(shapefile) as fs:
                for feature in fs:
                    write(feature, fs.driver, fs.schema, fs.crs)
                    count += 1

    if dst:
        click.secho('Wrote %d features to %s' % (count, output), fg='green')
        dst.close()
Exemple #57
0
def words(document):
    """
    List the keywords in the document
    """
    document_text = click.open_file(document, errors='ignore').read().replace('\n', '').replace('\t', '')

    click.echo('....Performing Inference {}'.format(emoji.emojize(':boom:', use_aliases=True)))
    document_keywords = keywords(document_text).split('\n')
    keywords_df = pd.DataFrame({"keywords": document_keywords})
    keywords_table = tabulate(keywords_df, headers=['keyword'], tablefmt="fancy_grid") 
    click.echo(keywords_table)
Exemple #58
0
def preview(protocol_name):
    with click.open_file('manifest.json', 'r') as f:
        manifest = json.loads(f.read())
    p = next(p for p in manifest['protocols'] if p['name'] == protocol_name)
    command = p['command_string']
    from subprocess import call
    import tempfile
    with tempfile.NamedTemporaryFile() as fp:
        fp.write(json.dumps(p['preview']))
        fp.flush()
        call(["bash", "-c", command + " " + fp.name])
Exemple #59
0
 def save(self, path):
     with click.open_file(expanduser(path), 'w') as f:
         f.write(
             json.dumps(
                 {
                     'email': self.email,
                     'token': self.token,
                     'organization': self.organization,
                     'api_root': self.api_root,
                 },
                 indent=2))
def cmd_open_fasta(fastas):
    """Loads one or FASTA files for processing.
    """
    for fasta in fastas:
        try:
            click.echo('Opening "%s"' % fasta)
            file_handle = click.open_file(fasta)
            inf_fasta = FASTA(file_handle)
            yield inf_fasta.read()
        except Exception as e:
            click.echo('Could not open FASTA "%s": %s' % (fasta, e), err=True)