コード例 #1
0
ファイル: execution.py プロジェクト: JethroTseng/http-prompt
    def visit_immutation(self, node, children):
        context = self._final_context()
        child_type = children[0].expr_name

        if child_type == 'preview':
            if self.tool == 'httpie':
                command = ['http'] + context.httpie_args(self.method,
                                                         quote=True)
            else:
                assert self.tool == 'curl'
                command = ['curl'] + context.curl_args(self.method, quote=True)
            click.echo(' '.join(command))
        elif child_type == 'action':
            output = BytesIO()
            try:
                env = Environment(stdout=output, is_windows=False)
                httpie_main(context.httpie_args(self.method), env=env)
                content = output.getvalue()
            finally:
                output.close()

            # XXX: Work around a bug of click.echo_via_pager(). When you pass
            # a bytestring to echo_via_pager(), it converts the bytestring with
            # str(b'abc'), which makes it "b'abc'".
            if six.PY2:
                content = unicode(content, 'utf-8')  # noqa
            else:
                content = str(content, 'utf-8')
            click.echo_via_pager(content)

        return node
コード例 #2
0
ファイル: users.py プロジェクト: dmwesterhoff/slackd
def users():
    """List all of the team's users and their status"""
    try:
        response = slack_client.users.list(presence=True)
    except Exception as e:
        click.echo(str(e))

    if response.successful:
        users = response.body['members']

        # Collect array of arrays that contain user data in column order
        table_data = []
        for user in users:
            if not user['deleted']:
                user_data = [user['name'],
                             user.get('real_name', None),
                             user.get('presence', 'bot'),
                             user['profile'].get('email', None)]
                table_data.append(user_data)
        table_data.sort(key=operator.itemgetter(2))
        table_headers = [click.style('User', fg='yellow'),
                         click.style('Name', fg='yellow'),
                         click.style('Presence', fg='yellow'),
                         click.style('Email', fg='yellow')]
        click.secho(tabulate.tabulate(table_data,
                                      table_headers,
                                      tablefmt="fancy_grid"))
    else:
        click.secho('wtf')
コード例 #3
0
ファイル: coverme.py プロジェクト: spumer/coverme
 def nice_echo(msg):
     if msg.startswith('+++'):
         click.secho(msg, fg='green')
     elif msg.startswith('***'):
         click.secho(msg, fg='red', err=True)
     else:
         click.echo(msg)
コード例 #4
0
ファイル: hydra-eval-failures.py プロジェクト: KaiHa/nixpkgs
def cli(jobset):
    """
    Given a Hydra project, inspect latest evaluation
    and print a summary of failed builds
    """

    url = "http://hydra.nixos.org/jobset/{}".format(jobset)

    # get the last evaluation
    click.echo(click.style(
        'Getting latest evaluation for {}'.format(url), fg='green'))
    d = get_response_text(url)
    evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
    latest_eval_url = evaluations[0].get('href')

    # parse last evaluation page
    click.echo(click.style(
        'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
    d = get_response_text(latest_eval_url + '?full=1')

    # TODO: aborted evaluations
    # TODO: dependency failed without propagated builds
    for tr in d('img[alt="Failed"]').parents('tr'):
        a = pq(tr)('a')[1]
        print("- [ ] [{}]({})".format(a.text, a.get('href')))

        sys.stdout.flush()

        maintainers = get_maintainers(a.text)
        if maintainers:
            print("  - maintainers: {}".format(", ".join(map(lambda u: '@' + u, maintainers))))
        # TODO: print last three persons that touched this file
        # TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?

        sys.stdout.flush()
コード例 #5
0
ファイル: dev_tools.py プロジェクト: AlinaKay/Flexget
def bump_version(bump_type):
    """Bumps version to the next release, or development version."""
    cur_ver = _get_version()
    click.echo('current version: %s' % cur_ver)
    ver_split = cur_ver.split('.')
    if 'dev' in ver_split[-1]:
        if bump_type == 'dev':
            # If this is already a development version, increment the dev count by 1
            ver_split[-1] = 'dev%d' % (int(ver_split[-1].strip('dev') or 0) + 1)
        else:
            # Just strip off dev tag for next release version
            ver_split = ver_split[:-1]
    else:
        # Increment the revision number by one
        if len(ver_split) == 2:
            # We don't have a revision number, assume 0
            ver_split.append('1')
        else:
            if 'b' in ver_split[2]:
                # beta version
                minor, beta = ver_split[-1].split('b')
                ver_split[-1] = '%sb%s' % (minor, int(beta) + 1)
            else:
                ver_split[-1] = str(int(ver_split[-1]) + 1)
        if bump_type == 'dev':
            ver_split.append('dev')
    new_version = '.'.join(ver_split)
    for line in fileinput.FileInput('flexget/_version.py', inplace=1):
        if line.startswith('__version__ ='):
            line = "__version__ = '%s'\n" % new_version
        print(line, end='')
    click.echo('new version: %s' % new_version)
コード例 #6
0
ファイル: euler.py プロジェクト: JoshKarpel/euler
def timeit(problem):
    """Time the solver for a problem."""
    problem = problem.rjust(3, '0')

    timer = _timeit.Timer('mod.solve()', setup = f'import importlib; mod = importlib.import_module(f"problems.{problem}")')
    loops, total_time = timer.autorange()
    click.echo(f'Time per Solve: {total_time / loops:.6f} seconds')
コード例 #7
0
ファイル: euler.py プロジェクト: JoshKarpel/euler
def check(start, end):
    """Solve many problems."""
    start = max(start, 1)

    header = f'  P  │ {"Answer".center(ANSWER_WIDTH)} │ C │ Elapsed Time'
    bar = ''.join('─' if char != '│' else '┼' for char in header)

    click.echo(header)
    click.echo(bar)

    for problem in range(start, end + 1):
        problem = str(problem).rjust(3, '0')
        try:
            mod = importlib.import_module(f'problems.{problem}')
            solver = solve_with_diagnostics(mod.solve)
            answer = solver()

            click.secho(f' {problem} │ {str(answer.answer).center(ANSWER_WIDTH)} │ {CORRECT_TO_STR[answer.correct]} │ {answer.elapsed_time:.6f} seconds',
                        fg = CORRECT_TO_COLOR[answer.correct])
        except (ImportError, ModuleNotFoundError):
            click.secho(f' {problem} │ {"SOLVER NOT FOUND".center(ANSWER_WIDTH)} │ ? │',
                        fg = 'yellow')
        except Exception as e:
            click.secho(f' {problem} │ {"EXCEPTION".center(ANSWER_WIDTH)} │ ? │',
                        fg = 'yellow')
コード例 #8
0
ファイル: cli.py プロジェクト: danielfrg/ec2hosts
def run(ctx):
    click.echo("New /etc/hosts file:")
    content = ec2hosts.gen_file()
    click.echo(content)
    if click.confirm('Do you want to continue?'):
        ec2hosts.write(content)
        ec2hosts.move()
コード例 #9
0
ファイル: update.py プロジェクト: AerisCloud/AerisCloud
def cli(force):
    """
    Update AerisCloud
    """
    if not force and config.get('github', 'enabled', default=False) == 'true':
        client = Github().gh
        repo = client.repository('aeriscloud', 'aeriscloud')
        latest_release = repo.iter_releases().next()
        latest_version = latest_release.tag_name[1:]

        if semver.compare(version, latest_version) != -1:
            click.secho('AerisCloud is already up to date!', fg='green')
            sys.exit(0)

        click.echo('A new version of AerisCloud is available: %s (%s)' % (
            click.style(latest_version, fg='green', bold=True),
            click.style(latest_release.name, bold=True)
        ))

    # retrieve install script in a tmpfile
    tmp = tempfile.NamedTemporaryFile()
    r = requests.get('https://raw.githubusercontent.com/' +
                     'AerisCloud/AerisCloud/develop/scripts/install.sh')
    if r.status_code != 200:
        fatal('error: update server returned %d (%s)' % (
            r.status_code, r.reason))

    tmp.write(r.content)
    tmp.flush()

    os.environ['INSTALL_DIR'] = aeriscloud_path
    call(['bash', tmp.name])

    tmp.close()
コード例 #10
0
ファイル: bahmni.py プロジェクト: Bahmni/bahmni-package
def db_backup(ctx):
   should_copy_to_local_machine =  click.prompt('Do you want to copy db backup to local machine in /db-backup directory? y/N', type=bool)
   if should_copy_to_local_machine:
      addExtraVar(ctx,"copy_to_local_machine", "yes" )
   command = ctx.obj['ANSIBLE_COMMAND'].format("db-backup.yml", ctx.obj['EXTRA_VARS'])
   click.echo(command)
   subprocess.check_call(command, shell=True)
コード例 #11
0
ファイル: cli.py プロジェクト: danielfrg/ec2hosts
def main():
    try:
        cli(obj={})
    except Exception as e:
        import traceback
        click.echo(traceback.format_exc(), err=True)
        sys.exit(1)
コード例 #12
0
ファイル: cli.py プロジェクト: DracoBlue/twtxt
def config(ctx, key, value, remove, edit):
    """Get or set config item."""
    conf = ctx.obj["conf"]

    if not edit and not key:
        raise click.BadArgumentUsage("You have to specify either a key or use --edit.")

    if edit:
        return click.edit(filename=conf.config_file)

    if remove:
        try:
            conf.cfg.remove_option(key[0], key[1])
        except Exception as e:
            logger.debug(e)
        else:
            conf.write_config()
        return

    if not value:
        try:
            click.echo(conf.cfg.get(key[0], key[1]))
        except Exception as e:
            logger.debug(e)
        return

    if not conf.cfg.has_section(key[0]):
        conf.cfg.add_section(key[0])

    conf.cfg.set(key[0], key[1], value)
    conf.write_config()
コード例 #13
0
ファイル: keenio.py プロジェクト: zerin108/wakadump
    def run(self):
        keen_client = KeenClient(
            project_id=self.project_id,
            write_key=self.write_key,
        )

        timezone = pytz.timezone(self.data['user']['timezone'])

        self.events = []
        with click.progressbar(self.data['days'],
                            label='Preparing keen.io events',
                            fill_char=click.style('#', fg='blue')) as days:

            for day in days:
                dt = self._parse_date(day['date'], timezone)

                self.append_event(dt, 'total', {
                    'seconds': day['grand_total']['total_seconds'],
                })

                categories = [
                    'editor',
                    'language',
                    'operating_system',
                    'project',
                ]
                for category in categories:
                    for item in day.get(category + 's', []):
                        self.append_event(dt, category, {
                            'seconds': item['total_seconds'],
                            'name': item['name'],
                        })

                files = {}
                for project in day.get('projects', []):
                    for f in project.get('dump', {}).get('files', []):
                        if f['name'] not in files:
                            files[f['name']] = 0
                        files[f['name']] += f['total_seconds']

                for name, seconds in files.items():
                    self.append_event(dt, 'file', {
                        'seconds': seconds,
                        'name': name,
                    })

        if len(self.events) == 0:
            click.echo('No events available for exporting to keen.io')
            return

        click.echo('Uploading events to keen.io...')
        start = datetime.utcfromtimestamp(self.data['range']['start'])
        end = datetime.utcfromtimestamp(self.data['range']['end'])
        collection = 'WakaTime Data Dump from {start} until {end}'.format(
            start=start.strftime('%Y-%m-%d'),
            end=end.strftime('%Y-%m-%d'),
        )
        keen_client.add_events({
            collection: self.events,
        })
コード例 #14
0
ファイル: msword_cli.py プロジェクト: waylan/msword-cli
def new(template, show):
    ''' 
    Create a new document and activate. 
    
    When the template path is relative, an attempt will be made to load 
    the template from the current working directory, and then from the 
    template directory set in Word's File Options dialog. If your 
    template file is in another location, you must specify an absolute 
    path.

    If no template path is provided, a new blank document will be created.
    '''
    try:
        if template:
            click.echo('Opening new document using template: "%s"' % template)
            doc = WORD.Documents.Add(template, Visible=show)
        else:
            click.echo('Opening new blank document.')
            doc = WORD.Documents.Add(Visible=show)
        if show and not WORD.Visible:
            # Only change state to visible if not visible
            # otherwise leave Word's visible state as-is
            WORD.Visible = show
    except com_error as e:
        raise click.ClickException(e.excepinfo[2])
コード例 #15
0
ファイル: msword_cli.py プロジェクト: waylan/msword-cli
def close(all, force):
    ''' 
    Close document(s). 
    
    Unless the --force option is used, Word will prompt
    to save any changes.
    
    Will only quit Word if no other documents are open.
    '''
    try:
        if all:
            doc = WORD.Documents
        else:
            doc = WORD.ActiveDocument
        if force:
            click.echo('Force closing document...')
            doc.Close(C.wdDoNotSaveChanges)
        else:
            click.echo('Closing document...')
            doc.Close(C.wdPromptToSaveChanges)

        if not WORD.Documents.Count:
            # Only quit if no other documents are open
            WORD.Quit()
    except com_error as e:
        raise click.ClickException(e.excepinfo[2])
コード例 #16
0
 def sane_get(parser, key):
     try:
         return parser.get(section, key)
     except ConfigParser.NoSectionError:
         click.echo('Config section "%s" missing' % section)
     except ConfigParser.NoOptionError:
         click.echo('Config section "%s" missing key "%s"' % (section, key))
コード例 #17
0
ファイル: cmdline.py プロジェクト: pombredanne/versionah
def bump(display_format, file_type, shtool, vcs, filename, bump):
    """Bump version in existing file.

    :param str display_format: Format to display output in
    :type filename: `tuple` of `str`
    :param filename: File to operate on
    :type file_type: `tuple` of `str`
    :param file_type: File type to produce
    :param bool shtool: Write shtool_ compatible files
    :param bool vcs: Tag release in version control
    :param str bump: Component to bump

    .. _shtool: http://www.gnu.org/software/shtool/shtool.html
    """
    if file_type and len(file_type) != len(filename):
        raise click.BadParameter("Number of --type options and filename args " "must match!")
    multi = len(filename) != 1
    for ftype, fname in zip(file_type + (None,) * len(filename), filename):
        if not ftype:
            ftype = guess_type(fname)

        version = CliVersion.read(fname)

        if not bump:
            bump = VERSION_COMPS[len(version.components) - 1]

        version.bump(bump)
        version.write(fname, ftype, shtool)

        if multi:
            click.echo("%s: " % fname, nl=False)
        success(version.display(display_format))
    return version
コード例 #18
0
ファイル: cmdline.py プロジェクト: ChillarAnand/pyvideo-data
def validate(ctx, paths):
    """Validates JSON file data located in PATHS."""
    if not paths:
        raise click.UsageError('No files or directories specified.')

    error_count = 0

    for path in paths:
        data = load_json_data(path)
        click.echo('Looking at %d items...' % len(data))
        for fn, item in data:
            errors = validate_item(fn, item)
            if errors:
                error_count += len(errors)
                for err in errors:
                    click.echo(
                        '%(fn)s: E:%(name)s:%(msg)s' % {'fn': fn, 'name': err.name, 'msg': err.msg},
                        err=True
                    )

    # FIXME: Validate things that need to be unique across the
    # dataset here.

    # FIXME: Validate file format? i.e. 2-space indents? Sort order?

    print('Done!')
    ctx.exit(code=1 if error_count else 0)
コード例 #19
0
ファイル: backup.py プロジェクト: 280185386/sentry
def export(dest, silent, indent, exclude):
    "Exports core metadata for the Sentry installation."

    if exclude is None:
        exclude = ()
    else:
        exclude = exclude.lower().split(',')

    from django.db.models import get_apps
    from django.core import serializers

    def yield_objects():
        app_list = [(a, None) for a in get_apps()]

        # Collate the objects to be serialized.
        for model in sort_dependencies(app_list):
            if (
                not getattr(model, '__core__', True) or
                model.__name__.lower() in exclude or
                model._meta.proxy
            ):
                if not silent:
                    click.echo(">> Skipping model <%s>" % (model.__name__,), err=True)
                continue

            queryset = model._base_manager.order_by(model._meta.pk.name)
            for obj in queryset.iterator():
                yield obj

    if not silent:
        click.echo('>> Beginning export', err=True)
    serializers.serialize("json", yield_objects(), indent=indent, stream=dest,
                          use_natural_keys=True)
コード例 #20
0
ファイル: cli.py プロジェクト: gitter-badger/passpie
def init(db, force, no_git, recipient, passphrase):
    if force:
        if os.path.isdir(db.path):
            shutil.rmtree(db.path)
            logging.info('removed directory %s' % db.path)
        elif os.path.isfile(db.path):
            os.remove(db.path)
            logging.info('removed file %s' % db.path)

    try:
        os.makedirs(db.path)
    except (SystemError, OSError):
        message = "Path exists '{}'. `--force` to overwrite".format(db.path)
        raise click.ClickException(click.style(message, fg='yellow'))

    if recipient:
        logging.info('create .passpierc file at %s' % db.path)
        config.create(db.path, defaults=dict(recipient=recipient))
    else:
        logging.info('create .passpierc file at %s' % db.path)
        config.create(db.path, defaults={})
        if not passphrase:
            passphrase = click.prompt('Passphrase',
                                      hide_input=True,
                                      confirmation_prompt=True)
        keys_filepath = os.path.join(db.config['path'], '.keys')
        create_keys(passphrase, keys_filepath, key_length=db.config['key_length'])

    if not no_git:
        logging.info('init git repository in %s' % db.path)
        db.repo.init()
        db.repo.commit(message='Initialized git repository', add=True)

    click.echo("Initialized database in {}".format(db.path))
コード例 #21
0
ファイル: cli.py プロジェクト: gitter-badger/passpie
def status(db, full, days, passphrase):
    ensure_passphrase(passphrase, db.config)
    credentials = db.credentials()

    for cred in credentials:
        decrypted = decrypt(cred['password'],
                            recipient=db.config['recipient'],
                            passphrase=passphrase,
                            homedir=db.config['homedir'])
        cred["password"] = decrypted

    if credentials:
        limit = db.config['status_repeated_passwords_limit']
        credentials = checkers.repeated(credentials, limit)
        credentials = checkers.modified(credentials, days)

        for c in credentials:
            if c['repeated']:
                c['repeated'] = click.style(str(c['repeated']), 'red')
            if c['modified']:
                c['modified'] = click.style(str(c['modified']), 'red')

        table = Table(['fullname', 'repeated', 'modified'],
                      table_format=db.config['table_format'],
                      missing=click.style('OK', 'green'))
        click.echo(table.render(credentials))
コード例 #22
0
ファイル: betelgeuse.py プロジェクト: mrniranjan/betelgeuse
def test_results(path):
    """Summary of tests from the jUnit XML file."""
    test_summary = parse_test_results(parse_junit(path))
    summary = '\n'.join(
        ['{0}: {1}'.format(*status) for status in test_summary.items()]
    ).title()
    click.echo(summary)
コード例 #23
0
ファイル: commands.py プロジェクト: AndrzejR/cubes
def validate(show_defaults, show_warnings, model_path):
    """Validate model metadata"""

    click.echo("Reading model %s" % model_path)
    model = cubes.read_model_metadata(model_path)

    click.echo("Validating model...")
    result = cubes.providers.validate_model(model)

    error_count = 0
    warning_count = 0
    default_count = 0

    for error in result:
        if error.scope == "model":
            scope = "model"
        else:
            if error.object:
                scope = "%s '%s'" % (error.scope, error.object)
            else:
                scope = "unknown %s" % error.scope

        if error.property:
            scope += " property '%s'" % error.property

        show = True
        if error.severity == "error":
            error_count += 1
        elif error.severity == "warning":
            warning_count += 1
            show = show_warnings
        elif error.severity == "default":
            show = show_defaults
            default_count += 1

        if show:
            print("%s in %s: %s"
                  % (error.severity.upper(), scope, error.message))

    if error_count == 0:
        if warning_count == 0:
            if default_count == 0:
                verdict = "model can be used"
            else:
                verdict = "model can be used, " \
                          "make sure that the defaults reflect reality"
        else:
            verdict = "not recommended to use the model, " \
                      "some issues might emerge"
    else:
        verdict = "model can not be used"

    print("")
    print("Defaults used  %d" % default_count)
    print("Warning        %d" % warning_count)
    print("Errors         %d" % error_count)
    print("Summary        %s" % verdict)

    if error_count > 0:
        exit(1)
コード例 #24
0
ファイル: commands.py プロジェクト: istrategylabs/mo-cli
def logs(env, **kwargs):
    """Executes the heroku logs command with the tail flag. The default
    environment is staging but production can be tailed using the environment
    flag
    """
    config = kwargs.get('config')
    envs = config.get('heroku')
    app = envs.get(env)

    try:
        # If there is no heroku section of the mo.yml file,
        # raise an error
        if (envs is None):
            err = 'No heroku apps defined for this project'
            raise MalformedMoYaml(err)

        configured_env = envs.get(env)

        if (configured_env is None):
            err = 'No {0} heroku environment defined'.format(env)
            raise MalformedMoYaml(err)

    except MalformedMoYaml as err:
        sys.exit(err)

    click.echo("\n   heroku logs --tail --app {0}\n".format(app))
    call(["heroku", "logs", "--tail", "--app", app])
コード例 #25
0
def analyze_content(size):
  """Analyze content for basic size information.

  Gives basic information about the total size of
  content in MegaBytes and the suitable storage medium to copy
  content.

  Args:
    size: Size of intended storage medium in MB
  """
  with open("./config.yaml") as data_file:
    conf_data = yaml.load(data_file)

  src_dir = conf_data["source"]["main_path"]
  total_size = get_size(src_dir) / 1000000.00

  if not size:
    single_layered_disc = int(math.ceil(total_size / 4700))
    dual_layered_disc = int(math.ceil(total_size / 8500))
    flash = int(math.ceil(total_size / 16000))
    click.echo("The total size of content is {0}MB".format(total_size))
    click.echo("You need {0} single-layered DVD disc(s) or {1} dual-layered"
               " DVD disc(s) to copy content".format(single_layered_disc,
                                                     dual_layered_disc))
    click.echo(
        " OR You need {0} (16GB) flash drive(s) to copy content".format(flash))
  else:
    device_number = int(math.ceil(total_size / int(size)))
    click.echo("The total size of content is {0}MB".format(total_size))
    click.echo(
        "You need {0} storage device of this size to copy content".format(device_number))
コード例 #26
0
ファイル: ingestion_client.py プロジェクト: pmars/tools
def postfile(host, port, user, auth_token, db, table, filename, lines, version):
    """
        post data reading from file to server
    """
    if not os.path.isfile(filename):
        click.echo('[ERROR] Filename not exist')
        return

    headers = {'X-USERNAME':user, 'X-AUTH-TOKEN':auth_token}
    url = "http://%s:%s/%s/%s/%s" % (host, port, version, db, table)

    total_lines = wc['-l', filename]().split()[0]
    over_lines = 0
    post_data = []
    for data in open(filename):
        js = json.loads(data)
        js['tags']= js['tags'][0] if len(js['tags']) else ''
        post_data.append(js)
        if len(post_data) == lines:
            over_lines = over_lines + lines
            r = requests.post(url, data=json.dumps(post_data), headers=headers)
            if r.status_code != 200:
                click.echo("Invalid response from server")
                return
            click.echo('Post %s/%s lines data to ingestion Server' % (over_lines, total_lines))
            post_data = []

    if len(post_data) > 0:
        over_lines = over_lines + len(post_data)
        requests.post(url, data=json.dumps(post_data), headers=headers)
        click.echo('Post %s/%s lines data to ingestion Server' % (over_lines,total_lines))

    click.echo("All data post to ingestion server")
コード例 #27
0
ファイル: phonegap.py プロジェクト: wq/wq.app
def upload_zipfile(directory, filename, token, pgb_api):
    pgb_url = pgb_api + "{path}?auth_token={token}"
    app_id = get_pgb_config(directory, 'app_id')
    if app_id:
        url = pgb_url.format(
            path='apps/{app_id}'.format(app_id=app_id),
            token=token,
        )
        method = 'put'
    else:
        url = pgb_url.format(
            path='apps',
            token=token,
        )
        method = 'post'

    response = requests.request(
        method,
        url,
        data={
            'data': json.dumps({'create_method': 'file'}),
        },
        files={
            'file': click.open_file(filename, 'rb'),
        }
    )
    if check_error(response):
        return
    result = response.json()
    if not app_id:
        set_pgb_config(directory, 'app_id', result['id'])
    click.echo("URL: {share_url}".format(share_url=result['share_url']))
    error = result.get('error', None)
    if error:
        click.echo("PGB Warning: {error}".format(error=error))
コード例 #28
0
ファイル: commands.py プロジェクト: AndrzejR/cubes
def main(*args, **kwargs):

    try:
        cli(*args, **kwargs)

    except InconsistencyError as e:
        # Internal Error - error caused by some edge case conditio, misbehaved
        # cubes or wrongly categorized error
        #
        # It is very unlikely that the user might fix this error by changing
        # his/her input.
        #
        if os.environ.get("CUBES_ERROR_DEBUG"):
            raise
        else:
            click.echo("\n" \
                  "Error: Internal error occured.\n"
                  "Reason: {}\n\n" \
                  "Please report the error and informatiob about what you " \
                  "were doing to the Cubes development team.\n"
                  .format(e), err=True)
            sys.exit(1)

    except (InternalError, UserError) as e:
        # Error caused by the user – model or data related.
        #
        # User can fix the error by altering his/her input.
        #
        if os.environ.get("CUBES_ERROR_DEBUG"):
            raise
        else:
            click.echo("\nError: {}".format(e), err=True)
            sys.exit(1)
コード例 #29
0
ファイル: build.py プロジェクト: houcy/mitmproxy
def wizard(ctx, version, username, password, repository, projects):
    """
    Interactive Release Wizard
    """
    for project in projects:
        if subprocess.check_output(["git", "status", "--porcelain"], cwd=join(root_dir, project)):
            raise RuntimeError("%s repository is not clean." % project)

    # Build test release
    ctx.invoke(sdist, projects=projects)
    ctx.invoke(test, projects=projects)
    click.confirm("Please test the release now. Is it ok?", abort=True)

    # bump version, update docs and contributors
    ctx.invoke(set_version, version=version, projects=projects)
    ctx.invoke(docs)
    ctx.invoke(contributors)

    # version bump commit + tag
    ctx.invoke(git, args=["commit", "-a", "-m", "bump version"], projects=projects)
    ctx.invoke(git, args=["tag", "v" + version], projects=projects)
    ctx.invoke(git, args=["push"], projects=projects)
    ctx.invoke(git, args=["push", "--tags"], projects=projects)

    # Re-invoke sdist with bumped version
    ctx.invoke(sdist, projects=projects)
    click.confirm("All good, can upload to PyPI?", abort=True)
    ctx.invoke(upload_release, username=username, password=password, repository=repository)
    click.echo("All done!")
コード例 #30
0
ファイル: repair.py プロジェクト: duanshuaimin/sentry
def repair_callsigns():
    from sentry.utils.query import RangeQuerySetWrapperWithProgressBar, \
        RangeQuerySetWrapper
    from sentry.models.counter import increment_project_counter
    from sentry.models import Organization, Group, Project

    click.echo('Repairing callsigns')

    queryset = Organization.objects.all()

    for org in RangeQuerySetWrapperWithProgressBar(queryset):
        projects = list(org.project_set.all())
        callsigns = get_callsigns(projects)
        for project in projects:
            if project.callsign is None:
                Project.objects.filter(
                    pk=project.id,
                    callsign=None
                ).update(callsign=callsigns[project.id])
            q = Group.objects.filter(
                project=project,
                short_id=None,
            )
            for group in RangeQuerySetWrapper(q):
                with catchable_atomic():
                    pending_short_id = increment_project_counter(
                        project)
                    updated = Group.objects.filter(
                        pk=group.id,
                        short_id=None
                    ).update(short_id=pending_short_id)
                    if updated == 0:
                        raise RollbackLocally()
コード例 #31
0
ファイル: rsmanage.py プロジェクト: phim4viet/RunestoneServer
def initdb(config, list_tables, reset, fake, force):
    """Initialize and optionally reset the database"""
    os.chdir(findProjectRoot())
    if not os.path.exists(DBSDIR):
        click.echo("Making databases folder")
        os.mkdir(DBSDIR)

    if not os.path.exists(PRIVATEDIR):
        click.echo("Making private directory for auth")
        os.mkdir(PRIVATEDIR)

    if reset:
        if not force:
            click.confirm(
                "Resetting the database will delete the database and the contents of the databases folder.  Are you sure?",
                default=False,
                abort=True,
                prompt_suffix=": ",
                show_default=True,
                err=False,
            )
        res = subprocess.call(
            "dropdb --if-exists --host={} --username={} {}".format(
                config.dbhost, config.dbuser, config.dbname
            ),
            shell=True,
        )
        if res == 0:
            res = subprocess.call(
                "createdb --echo --host={} --username={} {}".format(
                    config.dbhost, config.dbuser, config.dbname
                ),
                shell=True,
            )
        else:
            click.echo("Failed to drop the database do you have permission?")
            sys.exit(1)

        click.echo("Removing all files in databases/")
        table_migrate_prefix = "runestone_"
        if config.conf == "test":
            table_migrate_prefix = "test_runestone_"
        for the_file in os.listdir(DBSDIR):
            file_path = os.path.join(DBSDIR, the_file)
            try:
                if os.path.isfile(file_path) and file_path.startswith(
                    os.path.join(DBSDIR, table_migrate_prefix)
                ):
                    print("removing ", file_path)
                    os.unlink(file_path)
            except Exception as e:
                print(e)

    if len(os.listdir("{}/databases".format(APP_PATH))) > 1 and not fake and not force:
        click.confirm(
            "It appears you already have database migration information do you want to proceed?",
            default=False,
            abort=True,
            prompt_suffix=": ",
            show_default=True,
            err=False,
        )

    click.echo(
        message="Initializing the database", file=None, nl=True, err=False, color=None
    )

    if fake:
        os.environ["WEB2PY_MIGRATE"] = "fake"

    list_tables = "-A --list_tables" if config.verbose or list_tables else ""
    cmd = "python web2py.py --no-banner -S {} -M -R {}/rsmanage/initialize_tables.py {}".format(
        APP, APP_PATH, list_tables
    )
    click.echo("Running: {}".format(cmd))
    res = subprocess.call(cmd, shell=True)

    if res != 0:
        click.echo(message="Database Initialization Failed")
コード例 #32
0
    def init_root(project_name, directory):
        """Takes in project name to create a single directory."""

        # if statement that uses the click.Choice option
        if directory == "project":
            directory = paths["all_projects"]
            os.chdir(directory)

        if directory == "app":
            directory = os.path.join(paths["all_projects"], paths["apps"])
            os.chdir(directory)

        click.echo(sep)
        click.echo()
        click.echo("'ere's where you're at...")
        dir_picker(os.getcwd(), "dir_list")
        click.echo(sep)

        click.echo()
        click.echo(f"So, you wanna create a root for {project_name}?")
        click.echo(sep)
        # TODO: confirmation
        click.echo()
        click.echo("This is where it shall be created...")
        click.echo(os.getcwd())
        click.echo(sep)
コード例 #33
0
ファイル: lint.py プロジェクト: marc9595/arrow
def python_numpydoc(symbols=None, whitelist=None, blacklist=None):
    """Run numpydoc linter on python.

    Pyarrow must be available for import.
    """
    logger.info("Running python docstring linters")
    # by default try to run on all pyarrow package
    symbols = symbols or {
        'pyarrow',
        'pyarrow.compute',
        'pyarrow.csv',
        'pyarrow.dataset',
        'pyarrow.feather',
        'pyarrow.flight',
        'pyarrow.fs',
        'pyarrow.gandiva',
        'pyarrow.ipc',
        'pyarrow.json',
        'pyarrow.orc',
        'pyarrow.parquet',
        'pyarrow.plasma',
        'pyarrow.types',
    }
    try:
        numpydoc = NumpyDoc(symbols)
    except RuntimeError as e:
        logger.error(str(e))
        yield LintResult(success=False)
        return

    results = numpydoc.validate(
        # limit the validation scope to the pyarrow package
        from_package='pyarrow',
        rules_whitelist=whitelist,
        rules_blacklist=blacklist
    )

    if len(results) == 0:
        yield LintResult(success=True)
        return

    number_of_violations = 0
    for obj, result in results:
        errors = result['errors']

        # inspect doesn't play nice with cython generated source code,
        # to use a hacky way to represent a proper __qualname__
        doc = getattr(obj, '__doc__', '')
        name = getattr(obj, '__name__', '')
        qualname = getattr(obj, '__qualname__', '')
        module = getattr(obj, '__module__', '')
        instance = getattr(obj, '__self__', '')
        if instance:
            klass = instance.__class__.__name__
        else:
            klass = ''

        try:
            cython_signature = doc.splitlines()[0]
        except Exception:
            cython_signature = ''

        desc = '.'.join(filter(None, [module, klass, qualname or name]))

        click.echo()
        click.echo(click.style(desc, bold=True, fg='yellow'))
        if cython_signature:
            qualname_with_signature = '.'.join([module, cython_signature])
            click.echo(
                click.style(
                    '-> {}'.format(qualname_with_signature),
                    fg='yellow'
                )
            )

        for error in errors:
            number_of_violations += 1
            click.echo('{}: {}'.format(*error))

    msg = 'Total number of docstring violations: {}'.format(
        number_of_violations
    )
    click.echo()
    click.echo(click.style(msg, fg='red'))

    yield LintResult(success=False)
コード例 #34
0
ファイル: cleanup.py プロジェクト: webZW/sentry
def cleanup(days, project, concurrency, silent, model, router, timed):
    """Delete a portion of trailing data based on creation date.

    All data that is older than `--days` will be deleted.  The default for
    this is 30 days.  In the default setting all projects will be truncated
    but if you have a specific project you want to limit this to this can be
    done with the `--project` flag which accepts a project ID or a string
    with the form `org/project` where both are slugs.
    """
    if concurrency < 1:
        click.echo('Error: Minimum concurrency is 1', err=True)
        raise click.Abort()

    os.environ['_SENTRY_CLEANUP'] = '1'

    # Make sure we fork off multiprocessing pool
    # before we import or configure the app
    from multiprocessing import Process, JoinableQueue as Queue

    pool = []
    task_queue = Queue(1000)
    for _ in xrange(concurrency):
        p = Process(target=multiprocess_worker, args=(task_queue,))
        p.daemon = True
        p.start()
        pool.append(p)

    from sentry.runner import configure
    configure()

    from django.db import router as db_router
    from sentry.app import nodestore
    from sentry.db.deletion import BulkDeleteQuery
    from sentry import models

    if timed:
        import time
        from sentry.utils import metrics
        start_time = time.time()

    # list of models which this query is restricted to
    model_list = {m.lower() for m in model}

    def is_filtered(model):
        if router is not None and db_router.db_for_write(model) != router:
            return True
        if not model_list:
            return False
        return model.__name__.lower() not in model_list

    # Deletions that use `BulkDeleteQuery` (and don't need to worry about child relations)
    # (model, datetime_field, order_by)
    BULK_QUERY_DELETES = [
        (models.EventMapping, 'date_added', '-date_added'),
        (models.EventAttachment, 'date_added', None),
        (models.UserReport, 'date_added', None),
        (models.GroupEmailThread, 'date', None),
        (models.GroupRuleStatus, 'date_added', None),
    ] + EXTRA_BULK_QUERY_DELETES

    # Deletions that use the `deletions` code path (which handles their child relations)
    # (model, datetime_field, order_by)
    DELETES = (
        (models.Event, 'datetime', 'datetime'),
        (models.Group, 'last_seen', 'last_seen'),
    )

    if not silent:
        click.echo('Removing expired values for LostPasswordHash')

    if is_filtered(models.LostPasswordHash):
        if not silent:
            click.echo('>> Skipping LostPasswordHash')
    else:
        models.LostPasswordHash.objects.filter(
            date_added__lte=timezone.now() - timedelta(hours=48)
        ).delete()

    if not silent:
        click.echo('Removing expired values for OrganizationMember')

    if is_filtered(models.OrganizationMember):
        if not silent:
            click.echo('>> Skipping OrganizationMember')
    else:
        expired_threshold = timezone.now() - timedelta(days=days)
        models.OrganizationMember.delete_expired(expired_threshold)

    for model in [models.ApiGrant, models.ApiToken]:
        if not silent:
            click.echo(u'Removing expired values for {}'.format(model.__name__))

        if is_filtered(model):
            if not silent:
                click.echo(u'>> Skipping {}'.format(model.__name__))
        else:
            queryset = model.objects.filter(
                expires_at__lt=(timezone.now() - timedelta(days=API_TOKEN_TTL_IN_DAYS)),
            )

            # SentryAppInstallations are associated to ApiTokens. We're okay
            # with these tokens sticking around so that the Integration can
            # refresh them, but all other non-associated tokens should be
            # deleted.
            if model is models.ApiToken:
                queryset = queryset.filter(sentry_app_installation__isnull=True)

            queryset.delete()

    project_id = None
    if project:
        click.echo(
            "Bulk NodeStore deletion not available for project selection", err=True)
        project_id = get_project(project)
        if project_id is None:
            click.echo('Error: Project not found', err=True)
            raise click.Abort()
    else:
        if not silent:
            click.echo("Removing old NodeStore values")

        cutoff = timezone.now() - timedelta(days=days)
        try:
            nodestore.cleanup(cutoff)
        except NotImplementedError:
            click.echo(
                "NodeStore backend does not support cleanup operation", err=True)

    for bqd in BULK_QUERY_DELETES:
        if len(bqd) == 4:
            model, dtfield, order_by, chunk_size = bqd
        else:
            chunk_size = 10000
            model, dtfield, order_by = bqd

        if not silent:
            click.echo(
                u"Removing {model} for days={days} project={project}".format(
                    model=model.__name__,
                    days=days,
                    project=project or '*',
                )
            )
        if is_filtered(model):
            if not silent:
                click.echo('>> Skipping %s' % model.__name__)
        else:
            BulkDeleteQuery(
                model=model,
                dtfield=dtfield,
                days=days,
                project_id=project_id,
                order_by=order_by,
            ).execute(chunk_size=chunk_size)

    for model, dtfield, order_by in DELETES:
        if not silent:
            click.echo(
                u"Removing {model} for days={days} project={project}".format(
                    model=model.__name__,
                    days=days,
                    project=project or '*',
                )
            )

        if is_filtered(model):
            if not silent:
                click.echo('>> Skipping %s' % model.__name__)
        else:
            imp = '.'.join((model.__module__, model.__name__))

            q = BulkDeleteQuery(
                model=model,
                dtfield=dtfield,
                days=days,
                project_id=project_id,
                order_by=order_by,
            )

            for chunk in q.iterator(chunk_size=100):
                task_queue.put((imp, chunk))

            task_queue.join()

    # Clean up FileBlob instances which are no longer used and aren't super
    # recent (as there could be a race between blob creation and reference)
    if not silent:
        click.echo("Cleaning up unused FileBlob references")
    if is_filtered(models.FileBlob):
        if not silent:
            click.echo('>> Skipping FileBlob')
    else:
        cleanup_unused_files(silent)

    # Shut down our pool
    for _ in pool:
        task_queue.put(_STOP_WORKER)

    # And wait for it to drain
    for p in pool:
        p.join()

    if timed:
        duration = int(time.time() - start_time)
        metrics.timing('cleanup.duration', duration, instance=router, sample_rate=1.0)
        click.echo("Clean up took %s second(s)." % duration)
コード例 #35
0
def main(backup, write_to_disk, password, dump_all, dump_raw):
    """Reads device information out from an sqlite3 DB.
     If the given file is an Android backup (.ab), the database
     will be extracted automatically.
     If the given file is an iOS backup, the tokens will be
     extracted (and decrypted if needed) automatically.
    """
    def read_miio_database(tar):
        DBFILE = "apps/com.xiaomi.smarthome/db/miio2.db"
        try:
            db = tar.extractfile(DBFILE)
        except KeyError as ex:
            click.echo("Unable to find miio database file %s: %s" % (
                DBFILE, ex))
            return []
        if write_to_disk:
            file = write_to_disk
        else:
            file = tempfile.NamedTemporaryFile()
        with file as fp:
            click.echo("Saving database to %s" % fp.name)
            fp.write(db.read())

            return list(reader.read_tokens(fp.name))

    def read_yeelight_database(tar):
        DBFILE = "apps/com.yeelight.cherry/sp/miot.xml"
        _LOGGER.info("Trying to read %s", DBFILE)
        try:
            db = tar.extractfile(DBFILE)
        except KeyError as ex:
            click.echo("Unable to find yeelight database file %s: %s" % (
                DBFILE, ex))
            return []

        return list(read_android_yeelight(db))

    devices = []
    reader = BackupDatabaseReader(dump_raw)
    if backup.endswith(".ab"):
        try:
            from android_backup import AndroidBackup
        except ModuleNotFoundError:
            click.echo("You need to install android_backup to extract "
                       "tokens from Android backup files.")
            return

        with AndroidBackup(backup, stream=False) as f:
            tar = f.read_data(password)

            devices.extend(read_miio_database(tar))

            devices.extend(read_yeelight_database(tar))
    else:
        devices = list(reader.read_tokens(backup))

    for dev in devices:
        if dev.ip or dump_all:
            click.echo("%s\n"
                       "\tModel: %s\n"
                       "\tIP address: %s\n"
                       "\tToken: %s\n"
                       "\tMAC: %s" % (dev.name, dev.model,
                                      dev.ip, dev.token, dev.mac))
            if dump_raw:
                click.echo(dev)
コード例 #36
0
ファイル: TCPServer.py プロジェクト: ZhengC1/Networking
#!/usr/local/bin/python

from click import echo
from socket import *
from multiprocessing import Process
import time

server_port = 15020
server_socket = socket(AF_INET, SOCK_STREAM)

server_socket.bind(('',server_port))

server_socket.listen(1)
echo("The Server is ready to recieve")

def connection(connection_socket):
    recieved_data = ''
    while  recieved_data != 'quit':
        received_data = connection_socket.recv(1024)
        connection_socket.send(recieved_data.upper())
    connectionSocket.close()

while True:
    connectionSocket, addr = serverSocket.accept()
    p = Process(target=connection, args=(connectionSocket,))
    p.start()


コード例 #37
0
ファイル: cli.py プロジェクト: youngminz/pyhometax
def main(args=None):
    """Console script for pyhometax."""
    click.echo("Replace this message by putting your code into "
               "pyhometax.cli.main")
    click.echo("See click documentation at https://click.palletsprojects.com/")
    return 0
コード例 #38
0
ファイル: rsmanage.py プロジェクト: phim4viet/RunestoneServer
def inituser(
    config,
    instructor,
    fromfile,
    username,
    password,
    first_name,
    last_name,
    email,
    course,
    ignore_dupes,
):
    """Add a user (or users from a csv file)"""
    os.chdir(findProjectRoot())
    mess = [
        "Success",
        "Value Error -- check the format of your CSV file",
        "Duplicate User -- Check your data or use --ignore_dupes if you are adding students to an existing CSV",
        "Unknown Error -- check the format of your CSV file",
    ]
    if fromfile:
        # if fromfile then be sure to get the full path name NOW.
        # csv file should be username, email first_name, last_name, password, course
        # users from a csv cannot be instructors
        for line in csv.reader(fromfile):
            if len(line) != 6:
                click.echo("Not enough data to create a user.  Lines must be")
                click.echo("username, email first_name, last_name, password, course")
                exit(1)
            if "@" not in line[1]:
                click.echo("emails should have an @ in them in column 2")
                exit(1)
            userinfo = {}
            userinfo["username"] = line[0]
            userinfo["password"] = line[4]
            userinfo["first_name"] = line[2]
            userinfo["last_name"] = line[3]
            userinfo["email"] = line[1]
            userinfo["course"] = line[5]
            userinfo["instructor"] = False
            os.environ["RSM_USERINFO"] = json.dumps(userinfo)
            res = subprocess.call(
                "python web2py.py --no-banner -S runestone -M -R applications/runestone/rsmanage/makeuser.py",
                shell=True,
            )
            if res != 0:
                click.echo(
                    "Failed to create user {} error {}".format(line[0], mess[res])
                )
                if res == 2 and ignore_dupes:
                    click.echo(f"ignoring duplicate user {userinfo['username']}")
                    continue
                else:
                    exit(res)

    else:
        userinfo = {}
        userinfo["username"] = username or click.prompt("Username")
        userinfo["password"] = password or click.prompt("Password", hide_input=True)
        userinfo["first_name"] = first_name or click.prompt("First Name")
        userinfo["last_name"] = last_name or click.prompt("Last Name")
        userinfo["email"] = email or click.prompt("email address")
        userinfo["course"] = course or click.prompt("course name")
        if not instructor:
            if (
                username and course
            ):  # user has supplied other info via CL parameter safe to assume False
                userinfo["instructor"] = False
            else:
                userinfo["instructor"] = click.confirm(
                    "Make this user an instructor", default=False
                )

        os.environ["RSM_USERINFO"] = json.dumps(userinfo)
        res = subprocess.call(
            "python web2py.py --no-banner -S runestone -M -R applications/runestone/rsmanage/makeuser.py",
            shell=True,
        )
        if res != 0:
            click.echo(
                "Failed to create user {} error {} fix your data and try again. Use --verbose for more detail".format(
                    userinfo["username"], res
                )
            )
            exit(1)
        else:
            click.echo("Success")
コード例 #39
0
ファイル: rsmanage.py プロジェクト: phim4viet/RunestoneServer
def addcourse(
    config,
    course_name,
    basecourse,
    start_date,
    python3,
    login_required,
    institution,
    language,
    host,
    allow_pairs,
):
    """Create a course in the database"""

    os.chdir(findProjectRoot())  # change to a known location
    eng = create_engine(config.dburl)
    done = False
    if course_name:
        use_defaults = True
    else:
        use_defaults = False
    while not done:
        if not course_name:
            course_name = click.prompt("Course Name")
        if not python3 and not use_defaults:
            python3 = (
                "T" if click.confirm("Use Python3 style syntax?", default="T") else "F"
            )
        else:
            python3 = "T" if python3 else "F"
        if not basecourse and not use_defaults:
            basecourse = click.prompt("Base Course")
        if not start_date and not use_defaults:
            start_date = click.prompt("Start Date YYYY-MM-DD")
        if not institution and not use_defaults:
            institution = click.prompt("Your institution")
        if not login_required and not use_defaults:
            login_required = (
                "T" if click.confirm("Require users to log in", default="T") else "F"
            )
        else:
            login_required = "T" if login_required else "F"
        if not allow_pairs and not use_defaults:
            allow_pairs = (
                "T"
                if click.confirm("Enable pair programming support", default=False)
                else "F"
            )
        else:
            allow_pairs = "T" if allow_pairs else "F"

        res = eng.execute(
            "select id from courses where course_name = '{}'".format(course_name)
        ).first()
        if not res:
            done = True
        else:
            click.confirm(
                "Course {} already exists continue with a different name?".format(
                    course_name
                ),
                default=True,
                abort=True,
            )

    eng.execute(
        """insert into courses (course_name, base_course, python3, term_start_date, login_required, institution, allow_pairs)
                values ('{}', '{}', '{}', '{}', '{}', '{}', '{}')
                """.format(
            course_name,
            basecourse,
            python3,
            start_date,
            login_required,
            institution,
            allow_pairs,
        )
    )

    click.echo("Course added to DB successfully")
コード例 #40
0
ファイル: rsmanage.py プロジェクト: phim4viet/RunestoneServer
def addeditor(config, username, basecourse):
    """
    Add an existing user as an instructor for a course
    """
    eng = create_engine(config.dburl)
    res = eng.execute("select id from auth_user where username=%s", username).first()
    if res:
        userid = res[0]
    else:
        click.echo("Sorry, that user does not exist", color="red")
        sys.exit(-1)

    res = eng.execute(
        "select id from courses where course_name=%s and base_course=%s",
        basecourse,
        basecourse,
    ).first()
    if not res:
        click.echo("Sorry, that base course does not exist", color="red")
        sys.exit(-1)

    # if needed insert a row into auth_membership
    res = eng.execute("select id from auth_group where role='editor'").first()
    if res:
        role = res[0]
    else:
        click.echo(
            "Sorry, your system does not have the editor role setup -- this is bad",
            color="red",
        )
        sys.exit(-1)

    res = eng.execute(
        "select * from auth_membership where user_id=%s and group_id=%s", userid, role
    ).first()
    if not res:
        eng.execute(
            "insert into auth_membership (user_id, group_id) values (%s, %s)",
            userid,
            role,
        )
        click.echo("made {} an editor".format(username), color="green")
    else:
        click.echo("{} is already an editor".format(username), color="red")

    # if needed insert a row into user_courses
    res = eng.execute(
        "select * from editor_basecourse where editor=%s and base_course=%s ",
        userid,
        basecourse,
    ).first()
    if not res:
        eng.execute(
            "insert into editor_basecourse (editor, base_course) values (%s, %s)",
            userid,
            basecourse,
        )
        click.echo(
            "made {} an editor for {}".format(username, basecourse), color="green"
        )
    else:
        click.echo(
            "{} is already an editor for {}".format(username, basecourse), color="red"
        )
コード例 #41
0
ファイル: util.py プロジェクト: code42/code42cli
 def _handle_interrupts(self, sig, frame):
     if not self.interrupted:
         self.interrupted = True
         echo(f"\n{self.warning}\n{self.exit_instructions}", err=True)
     else:
         exit()
コード例 #42
0
ファイル: rsmanage.py プロジェクト: phim4viet/RunestoneServer
def build(config, course, repo, skipclone):
    """Build the book for an existing course"""
    os.chdir(findProjectRoot())  # change to a known location
    eng = create_engine(config.dburl)
    res = eng.execute(
        "select id from courses where course_name = '{}'".format(course)
    ).first()
    if not res:
        click.echo(
            "Error:  The course {} must already exist in the database -- use rsmanage addcourse".format(
                course
            ),
            color="red",
        )
        exit(1)

    os.chdir(BUILDDIR)
    if not skipclone:
        res = subprocess.call("git clone {}".format(repo), shell=True)
        if res != 0:
            click.echo(
                "Cloning the repository failed, please check the URL and try again"
            )
            exit(1)

    proj_dir = os.path.basename(repo).replace(".git", "")
    click.echo("Switching to project dir {}".format(proj_dir))
    os.chdir(proj_dir)
    paver_file = os.path.join("..", "..", "custom_courses", course, "pavement.py")
    click.echo("Checking for pavement {}".format(paver_file))
    if os.path.exists(paver_file):
        shutil.copy(paver_file, "pavement.py")
    else:
        cont = click.confirm("WARNING -- NOT USING CUSTOM PAVEMENT FILE - continue")
        if not cont:
            sys.exit()

    try:
        if os.path.exists("pavement.py"):
            sys.path.insert(0, os.getcwd())
            from pavement import options, dest
        else:
            click.echo(
                "I can't find a pavement.py file in {} you need that to build".format(
                    os.getcwd()
                )
            )
            exit(1)
    except ImportError as e:
        click.echo("You do not appear to have a good pavement.py file.")
        print(e)
        exit(1)

    if options.project_name != course:
        click.echo(
            "Error: {} and {} do not match.  Your course name needs to match the project_name in pavement.py".format(
                course, project_name
            )
        )
        exit(1)

    res = subprocess.call("runestone build --all", shell=True)
    if res != 0:
        click.echo("building the book failed, check the log for errors and try again")
        exit(1)
    click.echo("Build succeedeed... Now deploying to static")
    if dest != "../../static":
        click.echo(
            "Incorrect deployment directory.  dest should be ../../static in pavement.py"
        )
        exit(1)

    res = subprocess.call("runestone deploy", shell=True)
    if res == 0:
        click.echo("Success! Book deployed")
    else:
        click.echo("Deploy failed, check the log to see what went wrong.")

    click.echo("Cleaning up")
    os.chdir("..")
    subprocess.call("rm -rf {}".format(proj_dir), shell=True)
コード例 #43
0
def get_blueprint_list(name, filter_by, limit, offset, quiet, all_items, out):
    """Get the blueprints, optionally filtered by a string"""

    client = get_api_client()

    params = {"length": limit, "offset": offset}
    filter_query = ""
    if name:
        filter_query = get_name_query([name])
    if filter_by:
        filter_query = filter_query + ";(" + filter_by + ")"
    if all_items:
        filter_query += get_states_filter(BLUEPRINT.STATES)
    if filter_query.startswith(";"):
        filter_query = filter_query[1:]

    if filter_query:
        params["filter"] = filter_query

    res, err = client.blueprint.list(params=params)

    if err:
        context = get_context()
        server_config = context.get_server_config()
        pc_ip = server_config["pc_ip"]

        LOG.warning("Cannot fetch blueprints from {}".format(pc_ip))
        return

    if out == "json":
        click.echo(json.dumps(res.json(), indent=4, separators=(",", ": ")))
        return

    json_rows = res.json()["entities"]
    if not json_rows:
        click.echo(highlight_text("No blueprint found !!!\n"))
        return

    if quiet:
        for _row in json_rows:
            row = _row["status"]
            click.echo(highlight_text(row["name"]))
        return

    table = PrettyTable()
    table.field_names = [
        "NAME",
        "BLUEPRINT TYPE",
        "APPLICATION COUNT",
        "PROJECT",
        "STATE",
        "CREATED ON",
        "LAST UPDATED",
        "UUID",
    ]
    for _row in json_rows:
        row = _row["status"]
        metadata = _row["metadata"]
        bp_type = ("Single VM" if "categories" in metadata
                   and "TemplateType" in metadata["categories"]
                   and metadata["categories"]["TemplateType"] == "Vm" else
                   "Multi VM/Pod")

        project = (metadata["project_reference"]["name"]
                   if "project_reference" in metadata else None)

        creation_time = int(metadata["creation_time"]) // 1000000
        last_update_time = int(metadata["last_update_time"]) // 1000000

        table.add_row([
            highlight_text(row["name"]),
            highlight_text(bp_type),
            highlight_text(row["application_count"]),
            highlight_text(project),
            highlight_text(row["state"]),
            highlight_text(time.ctime(creation_time)),
            "{}".format(arrow.get(last_update_time).humanize()),
            highlight_text(row["uuid"]),
        ])
    click.echo(table)
コード例 #44
0
ファイル: rsmanage.py プロジェクト: phim4viet/RunestoneServer
def echoEnviron(config):
    click.echo("WEB2PY_CONFIG is {}".format(config.conf))
    click.echo("The database URL is configured as {}".format(config.dburl))
    click.echo("DBNAME is {}".format(config.dbname))
コード例 #45
0
ファイル: jeecf.py プロジェクト: cgfly/jeecf-cli
 def logout(self):
     config_path = os.environ['HOME'] + "/.jeecf"
     if os.path.exists(config_path):
         os.remove(config_path)
     click.echo(f"Logout Success!")
コード例 #46
0
def launch_blueprint_simple(
    blueprint_name=None,
    app_name=None,
    blueprint=None,
    profile_name=None,
    patch_editables=True,
    launch_params=None,
    is_brownfield=False,
):
    client = get_api_client()

    if app_name:
        LOG.info("Searching for existing applications with name {}".format(
            app_name))

        res, err = client.application.list(
            params={"filter": "name=={}".format(app_name)})
        if err:
            raise Exception("[{}] - {}".format(err["code"], err["error"]))

        res = res.json()
        total_matches = res["metadata"]["total_matches"]
        if total_matches:
            LOG.debug(res)
            LOG.error(
                "Application Name ({}) is already used.".format(app_name))
            sys.exit(-1)

        LOG.info("No existing application found with name {}".format(app_name))

    if not blueprint:
        if is_brownfield:
            blueprint = get_blueprint(client,
                                      blueprint_name,
                                      is_brownfield=True)
        else:
            blueprint = get_blueprint(client, blueprint_name)

    blueprint_uuid = blueprint.get("metadata", {}).get("uuid", "")
    blueprint_name = blueprint_name or blueprint.get("metadata", {}).get(
        "name", "")

    project_ref = blueprint["metadata"].get("project_reference", {})
    project_uuid = project_ref.get("uuid")
    bp_status = blueprint["status"]["state"]
    if bp_status != "ACTIVE":
        LOG.error(
            "Blueprint is in {} state. Unable to launch it".format(bp_status))
        sys.exit(-1)

    LOG.info("Fetching runtime editables in the blueprint")
    profiles = get_blueprint_runtime_editables(client, blueprint)
    profile = None
    if profile_name is None:
        profile = profiles[0]
    else:
        for app_profile in profiles:
            app_prof_ref = app_profile.get("app_profile_reference", {})
            if app_prof_ref.get("name") == profile_name:
                profile = app_profile

                break
        if not profile:
            raise Exception(
                "No profile found with name {}".format(profile_name))

    runtime_editables = profile.pop("runtime_editables", [])

    launch_payload = {
        "spec": {
            "app_name":
            app_name if app_name else "App-{}-{}".format(
                blueprint_name, int(time.time())),
            "app_description":
            "",
            "app_profile_reference":
            profile.get("app_profile_reference", {}),
            "runtime_editables":
            runtime_editables,
        }
    }

    if runtime_editables and patch_editables:
        runtime_editables_json = json.dumps(runtime_editables,
                                            indent=4,
                                            separators=(",", ": "))
        click.echo(
            "Blueprint editables are:\n{}".format(runtime_editables_json))

        # Check user input
        prompt_cli = bool(not launch_params)
        launch_runtime_vars = parse_launch_runtime_vars(launch_params)
        launch_runtime_substrates = parse_launch_runtime_substrates(
            launch_params)
        launch_runtime_deployments = parse_launch_runtime_deployments(
            launch_params)
        launch_runtime_credentials = parse_launch_runtime_credentials(
            launch_params)

        res, err = client.blueprint.read(blueprint_uuid)
        if err:
            LOG.error("[{}] - {}".format(err["code"], err["error"]))
            sys.exit(-1)

        bp_data = res.json()

        substrate_list = runtime_editables.get("substrate_list", [])
        if substrate_list:
            if not launch_params:
                click.echo("\n\t\t\t", nl=False)
                click.secho("SUBSTRATE LIST DATA", underline=True, bold=True)

            substrate_definition_list = bp_data["status"]["resources"][
                "substrate_definition_list"]
            package_definition_list = bp_data["status"]["resources"][
                "package_definition_list"]
            substrate_name_data_map = {}
            for substrate in substrate_definition_list:
                substrate_name_data_map[substrate["name"]] = substrate

            vm_img_map = {}
            for package in package_definition_list:
                if package["type"] == "SUBSTRATE_IMAGE":
                    vm_img_map[package["name"]] = package["uuid"]

            for substrate in substrate_list:
                if launch_params:
                    new_val = get_val_launch_runtime_substrate(
                        launch_runtime_substrates=launch_runtime_substrates,
                        path=substrate.get("name"),
                        context=substrate.get("context", None),
                    )
                    if new_val:
                        substrate["value"] = new_val

                else:
                    provider_type = substrate["type"]
                    provider_cls = get_provider(provider_type)
                    provider_cls.get_runtime_editables(
                        substrate,
                        project_uuid,
                        substrate_name_data_map[substrate["name"]],
                        vm_img_map,
                    )

        bp_runtime_variables = runtime_editables.get("variable_list", [])

        # POP out action variables(Day2 action variables) bcz they cann't be given at bp launch time
        variable_list = []
        for _var in bp_runtime_variables:
            _var_context = _var["context"]
            context_list = _var_context.split(".")

            # If variable is defined under runbook(action), ignore it
            if len(context_list) >= 3 and context_list[-3] == "runbook":
                continue

            variable_list.append(_var)

        if variable_list:
            if not launch_params:
                click.echo("\n\t\t\t", nl=False)
                click.secho("VARIABLE LIST DATA", underline=True, bold=True)
            for variable in variable_list:
                context = variable["context"]
                editables = variable["value"]
                hide_input = variable.get("type") == "SECRET"
                get_field_values(
                    editables,
                    context,
                    path=variable.get("name", ""),
                    bp_data=bp_data["status"]["resources"],
                    hide_input=hide_input,
                    prompt=prompt_cli,
                    launch_runtime_vars=launch_runtime_vars,
                )

        deployment_list = runtime_editables.get("deployment_list", [])
        # deployment can be only supplied via non-interactive way for now
        if deployment_list and launch_params:
            for deployment in deployment_list:
                new_val = get_val_launch_runtime_deployment(
                    launch_runtime_deployments=launch_runtime_deployments,
                    path=deployment.get("name"),
                    context=deployment.get("context", None),
                )
                if new_val:
                    deployment["value"] = new_val

        credential_list = runtime_editables.get("credential_list", [])
        # credential can be only supplied via non-interactive way for now
        if credential_list and launch_params:
            for credential in credential_list:
                new_val = get_val_launch_runtime_credential(
                    launch_runtime_credentials=launch_runtime_credentials,
                    path=credential.get("name"),
                    context=credential.get("context", None),
                )
                if new_val:
                    credential["value"] = new_val

        runtime_editables_json = json.dumps(runtime_editables,
                                            indent=4,
                                            separators=(",", ": "))
        LOG.info("Updated blueprint editables are:\n{}".format(
            runtime_editables_json))

    res, err = client.blueprint.launch(blueprint_uuid, launch_payload)
    if not err:
        LOG.info("Blueprint {} queued for launch".format(blueprint_name))
    else:
        raise Exception("[{}] - {}".format(err["code"], err["error"]))
    response = res.json()
    launch_req_id = response["status"]["request_id"]

    poll_launch_status(client, blueprint_uuid, launch_req_id)
コード例 #47
0
def print_status(status):
    if status.get("title"):
        click.echo("Title: {}".format(status["title"]))

    if status.get("current_time"):
        current = human_time(status["current_time"])
        if status.get("duration"):
            duration = human_time(status["duration"])
            remaining = human_time(status["remaining"])
            click.echo("Time: {} / {} ({}%)".format(current, duration,
                                                    status["progress"]))
            click.echo("Remaining time: {}".format(remaining))
        else:
            click.echo("Time: {}".format(current))

    if status.get("player_state"):
        click.echo("State: {}".format(status["player_state"]))

    if status.get("volume_level"):
        click.echo("Volume: {}".format(status["volume_level"]))
コード例 #48
0
def describe_bp(blueprint_name, out):
    """Displays blueprint data"""

    client = get_api_client()
    bp = get_blueprint(client, blueprint_name, all=True)

    res, err = client.blueprint.read(bp["metadata"]["uuid"])
    if err:
        raise Exception("[{}] - {}".format(err["code"], err["error"]))

    bp = res.json()

    if out == "json":
        bp.pop("status", None)
        click.echo(json.dumps(bp, indent=4, separators=(",", ": ")))
        return

    click.echo("\n----Blueprint Summary----\n")
    click.echo("Name: " + highlight_text(blueprint_name) + " (uuid: " +
               highlight_text(bp["metadata"]["uuid"]) + ")")
    click.echo("Description: " + highlight_text(bp["status"]["description"]))
    click.echo("Status: " + highlight_text(bp["status"]["state"]))
    click.echo("Owner: " +
               highlight_text(bp["metadata"]["owner_reference"]["name"]),
               nl=False)
    click.echo(" Project: " +
               highlight_text(bp["metadata"]["project_reference"]["name"]))

    created_on = int(bp["metadata"]["creation_time"]) // 1000000
    past = arrow.get(created_on).humanize()
    click.echo("Created: {} ({})".format(
        highlight_text(time.ctime(created_on)), highlight_text(past)))
    bp_resources = bp.get("status").get("resources", {})
    profile_list = bp_resources.get("app_profile_list", [])
    click.echo("Application Profiles [{}]:".format(
        highlight_text(len(profile_list))))
    for profile in profile_list:
        profile_name = profile["name"]
        click.echo("\t" + highlight_text(profile_name))

        substrate_ids = [
            dep.get("substrate_local_reference", {}).get("uuid")
            for dep in profile.get("deployment_create_list", [])
        ]
        substrate_types = [
            sub.get("type")
            for sub in bp_resources.get("substrate_definition_list")
            if sub.get("uuid") in substrate_ids
        ]
        click.echo("\tSubstrates[{}]:".format(
            highlight_text(len(substrate_types))))
        click.echo("\t\t{}".format(highlight_text(", ".join(substrate_types))))

        click.echo("\tActions[{}]:".format(
            highlight_text(len(profile["action_list"]))))
        for action in profile["action_list"]:
            action_name = action["name"]
            if action_name.startswith("action_"):
                prefix_len = len("action_")
                action_name = action_name[prefix_len:]
            click.echo("\t\t" + highlight_text(action_name))

    service_list = (bp.get("status").get("resources",
                                         {}).get("service_definition_list",
                                                 []))
    click.echo("Services [{}]:".format(highlight_text(len(service_list))))
    for service in service_list:
        service_name = service["name"]
        click.echo("\t" + highlight_text(service_name))
コード例 #49
0
ファイル: argument.py プロジェクト: saisai/tutorial
def hello(name):
    click.echo(f'Hello {name}')
コード例 #50
0
ファイル: jeecf.py プロジェクト: cgfly/jeecf-cli
 def _get_error_message(self, resp):
     click.echo(f"Error: {resp['errorMessage']}")
コード例 #51
0
ファイル: cleanup.py プロジェクト: zhangmuxi/sentry
def cleanup(days, project, concurrency, silent):
    """Delete a portion of trailing data based on creation date.

    All data that is older than `--days` will be deleted.  The default for
    this is 30 days.  In the default setting all projects will be truncated
    but if you have a specific project you want to limit this to this can be
    done with the `--project` flag which accepts a project ID or a string
    with the form `org/project` where both are slugs.
    """
    from sentry.app import nodestore
    from sentry.db.deletion import BulkDeleteQuery
    from sentry.models import (
        Event, EventMapping, Group, GroupRuleStatus, GroupTagValue,
        LostPasswordHash, TagValue, GroupEmailThread,
    )

    # these models should be safe to delete without cascades, in order
    BULK_DELETES = (
        (GroupRuleStatus, 'date_added'),
        (GroupTagValue, 'last_seen'),
        (TagValue, 'last_seen'),
        (GroupEmailThread, 'date'),
    )

    GENERIC_DELETES = (
        (Event, 'datetime'),
        (Group, 'last_seen'),
    )

    if not silent:
        click.echo("Removing expired values for LostPasswordHash")
    LostPasswordHash.objects.filter(
        date_added__lte=timezone.now() - timedelta(hours=48)
    ).delete()

    project_id = None
    if project:
        click.echo("Bulk NodeStore deletion not available for project selection", err=True)
        project_id = get_project(project)
        if project_id is None:
            click.echo('Error: Project not found', err=True)
            raise click.Abort()
    else:
        if not silent:
            click.echo("Removing old NodeStore values")
        cutoff = timezone.now() - timedelta(days=days)
        try:
            nodestore.cleanup(cutoff)
        except NotImplementedError:
            click.echo("NodeStore backend does not support cleanup operation", err=True)

    for model, dtfield in BULK_DELETES:
        if not silent:
            click.echo("Removing {model} for days={days} project={project}".format(
                model=model.__name__,
                days=days,
                project=project or '*',
            ))
        BulkDeleteQuery(
            model=model,
            dtfield=dtfield,
            days=days,
            project_id=project_id,
        ).execute()

    # EventMapping is fairly expensive and is special cased as it's likely you
    # won't need a reference to an event for nearly as long
    if not silent:
        click.echo("Removing expired values for EventMapping")
    BulkDeleteQuery(
        model=EventMapping,
        dtfield='date_added',
        days=min(days, 7),
        project_id=project_id,
    ).execute()

    # Clean up FileBLob instances which are no longer used and aren't super
    # recent (as there could be a race between blob creation and reference)
    if not silent:
        click.echo("Cleaning up unused FileBlob references")
    cleanup_unused_files(silent)

    for model, dtfield in GENERIC_DELETES:
        if not silent:
            click.echo("Removing {model} for days={days} project={project}".format(
                model=model.__name__,
                days=days,
                project=project or '*',
            ))
        BulkDeleteQuery(
            model=model,
            dtfield=dtfield,
            days=days,
            project_id=project_id,
        ).execute_generic()
コード例 #52
0
def cast(settings, video_url, subtitles, force_default, random_play, no_subs,
         no_playlist, ytdl_option):
    controller = "default" if force_default or ytdl_option else None
    playlist_playback = False
    st_thr = su_thr = subs = None
    cst, stream = setup_cast(settings["device"],
                             video_url=video_url,
                             prep="app",
                             controller=controller,
                             ytdl_options=ytdl_option)
    media_is_image = stream.guessed_content_category == "image"

    if stream.is_local_file:
        st_thr = create_server_thread(video_url,
                                      stream.local_ip,
                                      stream.port,
                                      stream.guessed_content_type,
                                      single_req=media_is_image)
    elif stream.is_playlist and not (no_playlist and stream.video_id):
        if stream.playlist_length == 0:
            cst.kill(idle_only=True)
            raise CliError("Playlist is empty")
        if not random_play and cst.playlist_capability and stream.playlist_all_ids:
            playlist_playback = True
        else:
            if random_play:
                entry = random.randrange(0, stream.playlist_length)
            else:
                warning("Playlist playback not possible, playing first video")
                entry = 0
            stream.set_playlist_entry(entry)

    if playlist_playback:
        click.echo("Casting remote playlist {}...".format(video_url))
        video_id = stream.video_id or stream.playlist_all_ids[0]
        cst.play_playlist(stream.playlist_id, video_id=video_id)
    else:
        if not subtitles and not no_subs and stream.is_local_file:
            subtitles = hunt_subtitles(video_url)
        if subtitles:
            subs = SubsInfo(subtitles, stream.local_ip, stream.port + 1)
            su_thr = create_server_thread(subs.file,
                                          subs.local_ip,
                                          subs.port,
                                          "text/vtt;charset=utf-8",
                                          single_req=True)

        click.echo("Casting {} file {}...".format(
            "local" if stream.is_local_file else "remote", video_url))
        click.echo('{} "{}" on "{}"...'.format(
            "Showing" if media_is_image else "Playing", stream.video_title,
            cst.cc_name))
        if cst.info_type == "url":
            cst.play_media_url(
                stream.video_url,
                title=stream.video_title,
                content_type=stream.guessed_content_type,
                subtitles=subs.url if subs else None,
                thumb=stream.video_thumbnail,
            )
        elif cst.info_type == "id":
            cst.play_media_id(stream.video_id)
        else:
            raise ValueError("Invalid or undefined info type")

        if stream.is_local_file or subs:
            add_msg = ", press Ctrl+C when done" if stream.is_local_file and not media_is_image else ""
            click.echo("Serving local file(s){}.".format(add_msg))
            while (st_thr and st_thr.is_alive()) or (su_thr
                                                     and su_thr.is_alive()):
                time.sleep(1)
コード例 #53
0
ファイル: db.py プロジェクト: pratclot/ws_flask
def init_db_command():
    """Clear the existing data and create new tables."""
    init_db()
    click.echo("Initialized the database.")
コード例 #54
0
ファイル: cli.py プロジェクト: jonathan-marcus/apprise
def main(body, title, config, attach, urls, notification_type, theme, tag,
         input_format, dry_run, recursion_depth, verbose, disable_async,
         debug, version):
    """
    Send a notification to all of the specified servers identified by their
    URLs the content provided within the title, body and notification-type.

    For a list of all of the supported services and information on how to
    use them, check out at https://github.com/caronc/apprise
    """
    # Note: Click ignores the return values of functions it wraps, If you
    #       want to return a specific error code, you must call sys.exit()
    #       as you will see below.

    debug = True if debug else False
    if debug:
        # Verbosity must be a minimum of 3
        verbose = 3 if verbose < 3 else verbose

    # Logging
    ch = logging.StreamHandler(sys.stdout)
    if verbose > 3:
        # -vvvv: Most Verbose Debug Logging
        logger.setLevel(logging.TRACE)

    elif verbose > 2:
        # -vvv: Debug Logging
        logger.setLevel(logging.DEBUG)

    elif verbose > 1:
        # -vv: INFO Messages
        logger.setLevel(logging.INFO)

    elif verbose > 0:
        # -v: WARNING Messages
        logger.setLevel(logging.WARNING)

    else:
        # No verbosity means we display ERRORS only AND any deprecation
        # warnings
        logger.setLevel(logging.ERROR)

    # Format our logger
    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
    ch.setFormatter(formatter)
    logger.addHandler(ch)

    # Update our asyncio logger
    asyncio_logger = logging.getLogger('asyncio')
    for handler in logger.handlers:
        asyncio_logger.addHandler(handler)
    asyncio_logger.setLevel(logger.level)

    if version:
        print_version_msg()
        sys.exit(0)

    # Simple Error Checking
    notification_type = notification_type.strip().lower()
    if notification_type not in NOTIFY_TYPES:
        logger.error(
            'The --notification-type (-n) value of {} is not supported.'
            .format(notification_type))
        # 2 is the same exit code returned by Click if there is a parameter
        # issue.  For consistency, we also return a 2
        sys.exit(2)

    input_format = input_format.strip().lower()
    if input_format not in NOTIFY_FORMATS:
        logger.error(
            'The --input-format (-i) value of {} is not supported.'
            .format(input_format))
        # 2 is the same exit code returned by Click if there is a parameter
        # issue.  For consistency, we also return a 2
        sys.exit(2)

    # Prepare our asset
    asset = AppriseAsset(
        # Our body format
        body_format=input_format,

        # Set the theme
        theme=theme,

        # Async mode is only used for Python v3+ and allows a user to send
        # all of their notifications asyncronously.  This was made an option
        # incase there are problems in the future where it's better that
        # everything run sequentially/syncronously instead.
        async_mode=disable_async is not True,
    )

    # Create our Apprise object
    a = Apprise(asset=asset, debug=debug, location=ContentLocation.LOCAL)

    # The priorities of what is accepted are parsed in order below:
    #    1. URLs by command line
    #    2. Configuration by command line
    #    3. URLs by environment variable: APPRISE_URLS
    #    4. Configuration by environment variable: APPRISE_CONFIG
    #    5. Default Configuration File(s) (if found)
    #
    if urls:
        if tag:
            # Ignore any tags specified
            logger.warning(
                '--tag (-g) entries are ignored when using specified URLs')
            tag = None

        # Load our URLs (if any defined)
        for url in urls:
            a.add(url)

        if config:
            # Provide a warning to the end user if they specified both
            logger.warning(
                'You defined both URLs and a --config (-c) entry; '
                'Only the URLs will be referenced.')

    elif config:
        # We load our configuration file(s) now only if no URLs were specified
        # Specified config entries trump all
        a.add(AppriseConfig(
            paths=config, asset=asset, recursion=recursion_depth))

    elif os.environ.get('APPRISE_URLS', '').strip():
        logger.debug('Loading provided APPRISE_URLS environment variable')
        if tag:
            # Ignore any tags specified
            logger.warning(
                '--tag (-g) entries are ignored when using specified URLs')
            tag = None

        # Attempt to use our APPRISE_URLS environment variable (if populated)
        a.add(os.environ['APPRISE_URLS'].strip())

    elif os.environ.get('APPRISE_CONFIG', '').strip():
        logger.debug('Loading provided APPRISE_CONFIG environment variable')
        # Fall back to config environment variable (if populated)
        a.add(AppriseConfig(
            paths=os.environ['APPRISE_CONFIG'].strip(),
            asset=asset, recursion=recursion_depth))
    else:
        # Load default configuration
        a.add(AppriseConfig(
            paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))],
            asset=asset, recursion=recursion_depth))

    if len(a) == 0:
        logger.error(
            'You must specify at least one server URL or populated '
            'configuration file.')
        print_help_msg(main)
        sys.exit(1)

    # each --tag entry comprises of a comma separated 'and' list
    # we or each of of the --tag and sets specified.
    tags = None if not tag else [parse_list(t) for t in tag]

    if not dry_run:
        if body is None:
            logger.trace('No --body (-b) specified; reading from stdin')
            # if no body was specified, then read from STDIN
            body = click.get_text_stream('stdin').read()

        # now print it out
        result = a.notify(
            body=body, title=title, notify_type=notification_type, tag=tags,
            attach=attach)
    else:
        # Number of rows to assume in the terminal.  In future, maybe this can
        # be detected and made dynamic. The actual row count is 80, but 5
        # characters are already reserved for the counter on the left
        rows = 75

        # Initialize our URL response;  This is populated within the for/loop
        # below; but plays a factor at the end when we need to determine if
        # we iterated at least once in the loop.
        url = None

        for idx, server in enumerate(a.find(tag=tags)):
            url = server.url(privacy=True)
            click.echo("{: 3d}. {}".format(
                idx + 1,
                url if len(url) <= rows else '{}...'.format(url[:rows - 3])))
            if server.tags:
                click.echo("{} - {}".format(' ' * 5, ', '.join(server.tags)))

        # Initialize a default response of nothing matched, otherwise
        # if we matched at least one entry, we can return True
        result = None if url is None else True

    if result is None:
        # There were no notifications set.  This is a result of just having
        # empty configuration files and/or being to restrictive when filtering
        # by specific tag(s)

        # Exit code 3 is used since Click uses exit code 2 if there is an
        # error with the parameters specified
        sys.exit(3)

    elif result is False:
        # At least 1 notification service failed to send
        sys.exit(1)

    # else:  We're good!
    sys.exit(0)
コード例 #55
0
def launch_refarch_env(region=None,
                    ami=None,
                    no_confirm=False,
                    node_instance_type=None,
                    gluster_stack=None,
                    keypair=None,
                    public_hosted_zone=None,
                    deployment_type=None,
                    console_port=443,
                    rhsm_user=None,
                    rhsm_password=None,
                    rhsm_pool=None,
                    containerized=None,
                    node_type=None,
                    private_subnet_id1=None,
                    private_subnet_id2=None,
                    private_subnet_id3=None,
                    gluster_volume_type=None,
                    gluster_volume_size=None,
                    node_sg=None,
                    iam_role=None,
                    existing_stack=None,
                    use_cloudformation_facts=False,
                    verbose=0):

  # Need to prompt for the R53 zone:
  if public_hosted_zone is None:
    public_hosted_zone = click.prompt('Hosted DNS zone for accessing the environment')

  if existing_stack is None:
    existing_stack = click.prompt('Specify the name of the existing CloudFormation stack')

  if gluster_stack is None:
    gluster_stack = click.prompt('Specify a unique name for the CNS CloudFormation stack')

 # If no keypair is specified fail:
  if keypair is None:
    keypair = click.prompt('A SSH keypair must be specified or created')

  # If the user already provided values, don't bother asking again
  if deployment_type in ['openshift-enterprise'] and rhsm_user is None:
    rhsm_user = click.prompt("RHSM username?")
  if deployment_type in ['openshift-enterprise'] and rhsm_password is None:
    rhsm_password = click.prompt("RHSM password?", hide_input=True)
  if deployment_type in ['openshift-enterprise'] and rhsm_pool is None:
    rhsm_pool = click.prompt("RHSM Pool ID or Subscription Name for OpenShift?")

  # Prompt for vars if they are not defined
  if use_cloudformation_facts and iam_role is None:
    iam_role = "Computed by Cloudformations"
  elif iam_role is None:
    iam_role = click.prompt("Specify the IAM Role of the node?")

  if use_cloudformation_facts and node_sg is None:
    node_sg = "Computed by Cloudformations"
  elif node_sg is None:
    node_sg = click.prompt("Specify the Security Group for the nodes?")

  if use_cloudformation_facts and private_subnet_id1 is None:
    private_subnet_id1 = "Computed by Cloudformations"
  elif private_subnet_id1 is None:
    private_subnet_id1 = click.prompt("Specify the first private subnet for the nodes?")

  if use_cloudformation_facts and private_subnet_id2 is None:
    private_subnet_id2 = "Computed by Cloudformations"
  elif private_subnet_id2 is None:
    private_subnet_id2 = click.prompt("Specify the second private subnet for the nodes?")

  if use_cloudformation_facts and private_subnet_id3 is None:
    private_subnet_id3 = "Computed by Cloudformations"
  elif private_subnet_id3 is None:
    private_subnet_id3 = click.prompt("Specify the third private subnet for the nodes?")

  # Hidden facts for infrastructure.yaml
  create_key = "no"
  create_vpc = "no"
  add_node = "yes"
  node_type = "gluster"

  # Display information to the user about their choices
  if use_cloudformation_facts:
      click.echo('Configured values:')
      click.echo('\tami: %s' % ami)
      click.echo('\tregion: %s' % region)
      click.echo('\tgluster_stack: %s' % gluster_stack)
      click.echo('\tnode_instance_type: %s' % node_instance_type)
      click.echo('\tgluster_volume_type: %s' % gluster_volume_type)
      click.echo('\tgluster_volume_size: %s' % gluster_volume_size)
      click.echo('\tkeypair: %s' % keypair)
      click.echo('\tdeployment_type: %s' % deployment_type)
      click.echo('\tpublic_hosted_zone: %s' % public_hosted_zone)
      click.echo('\tconsole port: %s' % console_port)
      click.echo('\trhsm_user: %s' % rhsm_user)
      click.echo('\trhsm_password: *******')
      click.echo('\trhsm_pool: %s' % rhsm_pool)
      click.echo('\tcontainerized: %s' % containerized)
      click.echo('\texisting_stack: %s' % existing_stack)
      click.echo('\tSubnets, Security Groups, and IAM Roles will be gather from the CloudFormation')
      click.echo("")
  else:
      click.echo('Configured values:')
      click.echo('\tami: %s' % ami)
      click.echo('\tregion: %s' % region)
      click.echo('\tgluster_stack: %s' % gluster_stack)
      click.echo('\tnode_instance_type: %s' % node_instance_type)
      click.echo('\tprivate_subnet_id1: %s' % private_subnet_id1)
      click.echo('\tprivate_subnet_id2: %s' % private_subnet_id2)
      click.echo('\tprivate_subnet_id3: %s' % private_subnet_id3)
      click.echo('\tgluster_volume_type: %s' % gluster_volume_type)
      click.echo('\tgluster_volume_size: %s' % gluster_volume_size)
      click.echo('\tkeypair: %s' % keypair)
      click.echo('\tnode_sg: %s' % node_sg)
      click.echo('\tdeployment_type: %s' % deployment_type)
      click.echo('\tpublic_hosted_zone: %s' % public_hosted_zone)
      click.echo('\tconsole port: %s' % console_port)
      click.echo('\trhsm_user: %s' % rhsm_user)
      click.echo('\trhsm_password: *******')
      click.echo('\trhsm_pool: %s' % rhsm_pool)
      click.echo('\tcontainerized: %s' % containerized)
      click.echo('\tiam_role: %s' % iam_role)
      click.echo('\texisting_stack: %s' % existing_stack)
      click.echo("")

  if not no_confirm:
    click.confirm('Continue using these values?', abort=True)

  playbooks = ['playbooks/infrastructure.yaml', 'playbooks/add-node.yaml']

  for playbook in playbooks:

    # hide cache output unless in verbose mode
    devnull='> /dev/null'

    if verbose > 0:
      devnull=''

    # refresh the inventory cache to prevent stale hosts from
    # interferring with re-running
    command='inventory/aws/hosts/ec2.py --refresh-cache %s' % (devnull)
    os.system(command)

    # remove any cached facts to prevent stale data during a re-run
    command='rm -rf .ansible/cached_facts'
    os.system(command)

    if use_cloudformation_facts:
        command='ansible-playbook -i inventory/aws/hosts -e \'region=%s \
        ami=%s \
        keypair=%s \
        gluster_stack=%s \
        add_node=yes \
    	node_instance_type=%s \
    	public_hosted_zone=%s \
    	deployment_type=%s \
        console_port=%s \
    	rhsm_user=%s \
    	rhsm_password=%s \
    	rhsm_pool="%s" \
    	containerized=%s \
    	node_type=gluster \
    	key_path=/dev/null \
    	create_key=%s \
    	create_vpc=%s \
        gluster_volume_type=%s \
        gluster_volume_size=%s \
    	stack_name=%s \' %s' % (region,
                    	ami,
                    	keypair,
                        gluster_stack,
                    	node_instance_type,
                    	public_hosted_zone,
                    	deployment_type,
                        console_port,
                    	rhsm_user,
                    	rhsm_password,
                    	rhsm_pool,
                    	containerized,
                    	create_key,
                    	create_vpc,
                        gluster_volume_type,
                        gluster_volume_size,
                    	existing_stack,
                    	playbook)
    else:
        command='ansible-playbook -i inventory/aws/hosts -e \'region=%s \
        ami=%s \
        keypair=%s \
        gluster_stack=%s \
        add_node=yes \
   	node_sg=%s \
    	node_instance_type=%s \
    	private_subnet_id1=%s \
    	private_subnet_id2=%s \
    	private_subnet_id3=%s \
    	public_hosted_zone=%s \
    	deployment_type=%s \
        console_port=%s \
    	rhsm_user=%s \
    	rhsm_password=%s \
    	rhsm_pool="%s" \
    	containerized=%s \
    	node_type=gluster \
    	iam_role=%s \
    	key_path=/dev/null \
    	create_key=%s \
    	create_vpc=%s \
        gluster_volume_type=%s \
        gluster_volume_size=%s \
    	stack_name=%s \' %s' % (region,
                    	ami,
                    	keypair,
                        gluster_stack,
                    	node_sg,
                    	node_instance_type,
                    	private_subnet_id1,
                    	private_subnet_id2,
                    	private_subnet_id3,
                    	public_hosted_zone,
                    	deployment_type,
                        console_port,
                    	rhsm_user,
                    	rhsm_password,
                    	rhsm_pool,
                    	containerized,
                    	iam_role,
                    	create_key,
                    	create_vpc,
                        gluster_volume_type,
                        gluster_volume_size,
                    	existing_stack,
                    	playbook)

    if verbose > 0:
      command += " -" + "".join(['v']*verbose)
      click.echo('We are running: %s' % command)

    status = os.system(command)
    if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
      return os.WEXITSTATUS(status)
コード例 #56
0
ファイル: run.py プロジェクト: wxnacy/goss
def print_version(ctx, param, value):
    if not value or ctx.resilient_parsing:
        return
    click.echo(__version__.__version__)
    ctx.exit()
コード例 #57
0
def tiles(ctx, zoom, input, bounding_tile, with_bounds, seq, x_json_seq):
    """Lists Web Mercator tiles at ZOOM level intersecting
    GeoJSON [west, south, east, north] bounding boxen, features, or
    collections read from stdin. Output is a JSON
    [x, y, z [, west, south, east, north -- optional]] array.

    Example:

    $ echo "[-105.05, 39.95, -105, 40]" | mercantile tiles 12

    Output:

    [852, 1550, 12]
    [852, 1551, 12]
    [853, 1550, 12]
    [853, 1551, 12]

    """
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('mercantile')
    try:
        src = click.open_file(input).readlines()
    except IOError:
        src = [input]

    src = iter(src)
    first_line = next(src)

    # If input is RS-delimited JSON sequence.
    if first_line.startswith(u'\x1e'):
        def feature_gen():
            buffer = first_line.strip(u'\x1e')
            for line in src:
                if line.startswith(u'\x1e'):
                    if buffer:
                        yield json.loads(buffer)
                    buffer = line.strip(u'\x1e')
                else:
                    buffer += line
            else:
                yield json.loads(buffer)
    else:
        def feature_gen():
            yield json.loads(first_line)
            for line in src:
                yield json.loads(line)

    try:
        source = feature_gen()
        # Detect the input format
        for obj in source:
            if isinstance(obj, list):
                bbox = obj
                if len(bbox) == 2:
                    bbox += bbox
                if len(bbox) != 4:
                    raise ValueError("Invalid input.")
            elif isinstance(obj, dict):
                if 'bbox' in obj:
                    bbox = obj['bbox']
                else:
                    box_xs = []
                    box_ys = []
                    for feat in obj.get('features', [obj]):
                        lngs, lats = zip(*list(coords(feat)))
                        box_xs.extend([min(lngs), max(lngs)])
                        box_ys.extend([min(lats), max(lats)])
                    bbox = min(box_xs), min(box_ys), max(box_xs), max(box_ys)
            west, south, east, north = bbox
            if bounding_tile:
                vals = mercantile.bounding_tile(
                        west, south, east, north, truncate=False)
                output = json.dumps(vals)
                if seq:
                    click.echo(u'\x1e')
                click.echo(output)
            else:
                # shrink the bounds a small amount so that
                # shapes/tiles round trip.
                epsilon = 1.0e-10
                west += epsilon
                south += epsilon
                east -= epsilon
                north -= epsilon
                for tile in mercantile.tiles(
                        west, south, east, north, [zoom], truncate=False):
                    vals = (tile.x, tile.y, zoom)
                    if with_bounds:
                        vals += mercantile.bounds(tile.x, tile.y, zoom)
                    output = json.dumps(vals)
                    if seq:
                        click.echo(u'\x1e')
                    click.echo(output)

        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
コード例 #58
0
def set_support_value(key, entries, reason):
    """Set a support info variable.

    Examples:
    support.py set coin:BTC trezor1=1.10.5 trezor2=2.4.7 suite=yes connect=no
    support.py set coin:LTC trezor1=yes connect=

    Setting a variable to "yes", "true" or "1" sets support to true.
    Setting a variable to "no", "false" or "0" sets support to false.
    (or null, in case of trezor1/2)
    Setting variable to empty ("trezor1=") will set to null, or clear the entry.
    Setting a variable to a particular version string (e.g., "2.4.7") will set that
    particular version.
    """
    defs, _ = coin_info.coin_info_with_duplicates()
    coins = defs.as_dict()
    if key not in coins:
        click.echo(f"Failed to find key {key}")
        click.echo("Use 'support.py show' to search for the right one.")
        sys.exit(1)

    if coins[key].get("duplicate") and coin_info.is_token(coins[key]):
        shortcut = coins[key]["shortcut"]
        click.echo(f"Note: shortcut {shortcut} is a duplicate.")

    for entry in entries:
        try:
            device, value = entry.split("=", maxsplit=1)
        except ValueError:
            click.echo(f"Invalid entry: {entry}")
            sys.exit(2)

        if device not in SUPPORT_INFO:
            raise click.ClickException(f"unknown device: {device}")

        if value in ("yes", "true", "1"):
            set_supported(device, key, True)
        elif value in ("no", "false", "0"):
            if device in coin_info.MISSING_SUPPORT_MEANS_NO:
                click.echo(f"Setting explicitly unsupported for {device}.")
                click.echo(f"Perhaps you meant removing support, i.e., '{device}=' ?")
            if not reason:
                reason = click.prompt(f"Enter reason for not supporting on {device}:")
            set_unsupported(device, key, reason)
        elif value == "":
            clear_support(device, key)
        else:
            # arbitrary string
            set_supported(device, key, value)

    print_support(coins[key])
    write_support_info()
コード例 #59
0
ファイル: scripts.py プロジェクト: wwxFromTju/ray
def get_worker_ips(cluster_config_file, cluster_name):
    worker_ips = get_worker_node_ips(cluster_config_file, cluster_name)
    click.echo("\n".join(worker_ips))
コード例 #60
0
def shapes(
        ctx, input, precision, indent, compact, projected,
        seq, output_mode, collect, extents, buffer):

    """Reads one or more Web Mercator tile descriptions
    from stdin and writes either a GeoJSON feature collection (the
    default) or a JSON sequence of GeoJSON features/collections to
    stdout.

    tile descriptions may be either an [x, y, z] array or a JSON
    object of the form

      {"tile": [x, y, z], "properties": {"name": "foo", ...}}

    In the latter case, the properties object will be used to update
    the properties object of the output feature.
    """
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('mercantile')
    dump_kwds = {'sort_keys': True}
    if indent:
        dump_kwds['indent'] = indent
    if compact:
        dump_kwds['separators'] = (',', ':')

    try:
        src = click.open_file(input).__iter__()
    except IOError:
        src = [input]

    try:
        features = []
        col_xs = []
        col_ys = []
        for i, line in enumerate(src):
            line = line.strip()
            obj = json.loads(line)
            if isinstance(obj, dict):
                x, y, z = obj['tile'][:3]
                props = obj.get('properties')
                fid = obj.get('id')
            elif isinstance(obj, list):
                x, y, z = obj[:3]
                props = {}
                fid = None
            else:
                raise ValueError("Invalid input: %r", obj)
            west, south, east, north = mercantile.bounds(x, y, z)
            if projected == 'mercator':
                west, south = mercantile.xy(west, south, truncate=False)
                east, north = mercantile.xy(east, north, truncate=False)
            if buffer:
                west -= buffer
                south -= buffer
                east += buffer
                north += buffer
            if precision and precision >= 0:
                west, south, east, north = (
                    round(v, precision) for v in (west, south, east, north))
            bbox = [
                min(west, east), min(south, north),
                max(west, east), max(south, north)]
            col_xs.extend([west, east])
            col_ys.extend([south, north])
            geom = {
                'type': 'Polygon',
                'coordinates': [[
                    [west, south],
                    [west, north],
                    [east, north],
                    [east, south],
                    [west, south]]]}
            xyz = str((x, y, z))
            feature = {
                'type': 'Feature',
                'bbox': bbox,
                'id': xyz,
                'geometry': geom,
                'properties': {'title': 'XYZ tile %s' % xyz}}
            if props:
                feature['properties'].update(props)
            if fid:
                feature['id'] = fid
            if collect:
                features.append(feature)
            elif extents:
                click.echo(" ".join(map(str, bbox)))
            else:
                if seq:
                    click.echo(u'\x1e')
                if output_mode == 'bbox':
                    click.echo(json.dumps(bbox, **dump_kwds))
                elif output_mode == 'feature':
                    click.echo(json.dumps(feature, **dump_kwds))

        if collect and features:
            bbox = [min(col_xs), min(col_ys), max(col_xs), max(col_ys)]
            click.echo(json.dumps({
                'type': 'FeatureCollection',
                'bbox': bbox, 'features': features},
                **dump_kwds))

        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)