def authenticate(username, password):
    print("Signing in...")

    if password:
      icloud = PyiCloudService(username, password)
    else:
      icloud = PyiCloudService(username)

    if icloud.requires_2fa:
        print "Two-factor authentication required. Your trusted devices are:"

        devices = icloud.trusted_devices
        for i, device in enumerate(devices):
            print "  %s: %s" % (i, device.get('deviceName',
                "SMS to %s" % device.get('phoneNumber')))

        device = click.prompt('Which device would you like to use?', default=0)
        device = devices[device]
        if not icloud.send_verification_code(device):
            print "Failed to send verification code"
            sys.exit(1)

        code = click.prompt('Please enter validation code')
        if not icloud.validate_verification_code(device, code):
            print "Failed to verify verification code"
            sys.exit(1)

    return icloud
Esempio n. 2
0
File: base.py Progetto: ralf57/taxi
def create_config_file(filename):
    """
    Create main configuration file if it doesn't exist.
    """
    import textwrap
    from six.moves.urllib import parse

    if not os.path.exists(filename):
        old_default_config_file = os.path.join(os.path.dirname(filename),
                                               '.tksrc')
        if os.path.exists(old_default_config_file):
            upgrade = click.confirm("\n".join(textwrap.wrap(
                "It looks like you recently updated Taxi. Some "
                "configuration changes are required. You can either let "
                "me upgrade your configuration file or do it "
                "manually.")) + "\n\nProceed with automatic configuration "
                "file upgrade?", default=True
            )

            if upgrade:
                settings = Settings(old_default_config_file)
                settings.convert_to_4()
                with open(filename, 'w') as config_file:
                    settings.config.write(config_file)
                os.remove(old_default_config_file)
                return
            else:
                print("Ok then.")
                sys.exit(0)

        response = click.confirm(
            "The configuration file %s does not exist yet.\nDo you want to"
            " create it now?" % filename, default=True
        )

        if response:
            config = resource_string('taxi',
                                     'etc/taxirc.sample').decode('utf-8')
            available_backends = backends_registry._entry_points.keys()
            context = {}
            context['backend'] = click.prompt(
                "Enter the backend you want to use (choices are %s)" %
                ', '.join(available_backends),
                type=click.Choice(available_backends)
            )
            context['username'] = click.prompt("Enter your username")
            context['password'] = parse.quote(
                click.prompt("Enter your password", hide_input=True),
                safe=''
            )
            context['hostname'] = click.prompt(
                "Enter the hostname of the backend (eg. "
                "timesheets.example.com)", type=Hostname()
            )
            templated_config = config.format(**context)
            with open(filename, 'w') as f:
                f.write(templated_config)
        else:
            print("Ok then.")
            sys.exit(1)
Esempio n. 3
0
def create(grant_admin):
    """Creates a new user"""
    user_type = 'user' if not grant_admin else 'admin'
    while True:
        email = prompt_email()
        if email is None:
            return
        email = email.lower()
        if not User.query.filter(User.all_emails == email, ~User.is_deleted, ~User.is_pending).has_rows():
            break
        print(cformat('%{red}Email already exists'))
    first_name = click.prompt("First name").strip()
    last_name = click.prompt("Last name").strip()
    affiliation = click.prompt("Affiliation", '').strip()
    print()
    while True:
        username = click.prompt("Enter username").lower().strip()
        if not Identity.find(provider='indico', identifier=username).count():
            break
        print(cformat('%{red}Username already exists'))
    password = prompt_pass()
    if password is None:
        return

    identity = Identity(provider='indico', identifier=username, password=password)
    user = create_user(email, {'first_name': to_unicode(first_name), 'last_name': to_unicode(last_name),
                               'affiliation': to_unicode(affiliation)}, identity)
    user.is_admin = grant_admin
    _print_user_info(user)

    if click.confirm(cformat("%{yellow}Create the new {}?").format(user_type), default=True):
        db.session.add(user)
        db.session.commit()
        print(cformat("%{green}New {} created successfully with ID: %{green!}{}").format(user_type, user.id))
Esempio n. 4
0
def login(ctx, username, api_key):
    """Logs the user in by asking for username and api_key
    """
    if username is None:
        username = click.prompt('Username (netID)')
        click.echo()
    if api_key is None:
        click.echo('Please get your API key from ' +
                   click.style(_api_key_url, underline=True))
        api_key = click.prompt('API key')
        click.echo()
    click.echo('Checking your credentials...', nl=False)

    client = ApiClient(api_server_url=settings.API_SERVER_URL,
                       username=username, api_key=api_key)
    try:
        client.test_api_key()
    except ApiClientAuthenticationError:
        click.secho('invalid', bg='red', fg='black')
        click.echo('Please try again.')
        ctx.exit(code=exit_codes.OTHER_FAILURE)
    else:
        click.secho('OK', bg='green', fg='black')
        user = User(username=username, api_key=api_key)
        save_user(user)
Esempio n. 5
0
def update(db, fullname, name, login, password, random, pattern, comment):
    credential = db.credential(fullname)
    if not credential:
        message = "Credential '{}' not found".format(fullname)
        raise click.ClickException(click.style(message, fg='red'))

    if random or pattern:
        pattern = pattern if pattern else db.config['genpass_pattern']
        password = genpass(pattern=pattern)

    values = credential.copy()
    if any([name, login, password, random, comment]):
        values["name"] = name if name else credential["name"]
        values["login"] = login if login else credential["login"]
        values["password"] = password if password else credential["password"]
        values["comment"] = comment if comment else credential["comment"]
    else:
        values["name"] = click.prompt("Name", default=credential["name"])
        values["login"] = click.prompt("Login", default=credential["login"])
        values["password"] = click.prompt("Password",
                                          hide_input=True,
                                          default=credential["password"],
                                          confirmation_prompt=True,
                                          show_default=False,
                                          prompt_suffix=" [*****]: ")
        values["comment"] = click.prompt("Comment",
                                         default=credential["comment"])

    if values != credential:
        if values["password"] != credential["password"]:
            encrypted = encrypt(password, recipient=db.config['recipient'], homedir=db.config['homedir'])
            values['password'] = encrypted
        db.update(fullname=fullname, values=values)
        db.repo.commit('Updated {}'.format(credential['fullname']))
Esempio n. 6
0
def init(yaml_file):
    '''Initialize a new alert definition YAML file'''
    template = textwrap.dedent('''
    check_definition_id: {check_id}
    id:
    status: ACTIVE
    name: "{name}"
    description: "Example Alert Description"
    team: "{team}"
    responsible_team: "{team}"
    condition: |
      >100
    entities:
    entities_exclude:
    status: ACTIVE
    priority: 2
    tags:
    parent_id:
    parameters:
    ''')
    name = click.prompt('Alert name', default='Example Alert')
    check_id = click.prompt('Check ID')
    team = click.prompt('(Responsible-) Team', default='Example Team')
    data = template.format(name=name, team=team, check_id=check_id)
    yaml_file.write(data.encode('utf-8'))
Esempio n. 7
0
    def gather_information(self, race_id, number):
        click.echo('Hello there! We are going to copy a race now.')

        # get race
        if not race_id:
            races = Race.objects.all().order_by('-start_datetime')
            for race in races:
                click.echo('{0}. {1}'.format(race.pk, race.title))

            race = self.get_race(click.prompt("First, give me the ID of the Race object we're gonna copy. If there is more than one race already, give me ID of the latest one."))
        else:
            race = self.get_race(race_id)

        while not race:
            race = self.get_race(click.prompt("Wrong ID! Try again"))

        # get number
        if not number:
            number = click.prompt("What is the number of the race? If this is a second race, write 2. If third, then 3. You got it")

        date = self.prepare_date(click.prompt("What is the date of this new race? (Format: MM/DD/YYYY)"))
        while not date:
            date = self.prepare_date(click.prompt("Wrong format! Provide a date in format: MM/DD/YYYY)"))

        return (race, number, date)
def get_proxy_hosts_excludes():
    message = """
If a proxy is needed to reach HTTP and HTTPS traffic, please enter the
name below. This proxy will be configured by default for all processes
that need to reach systems outside the cluster. An example proxy value
would be:

    http://proxy.example.com:8080/

More advanced configuration is possible if using Ansible directly:

https://docs.openshift.com/enterprise/latest/install_config/http_proxies.html
"""
    click.echo(message)

    message = "Specify your http proxy ? (ENTER for none)"
    http_proxy_hostname = click.prompt(message, default='')

    # TODO: Fix this prompt message and behavior. 'ENTER' will default
    # to the http_proxy_hostname if one was provided
    message = "Specify your https proxy ? (ENTER for none)"
    https_proxy_hostname = click.prompt(message, default=http_proxy_hostname)

    if http_proxy_hostname or https_proxy_hostname:
        message = """
All hosts in your OpenShift inventory will automatically be added to the NO_PROXY value.
Please provide any additional hosts to be added to NO_PROXY. (ENTER for none)
"""
        proxy_excludes = click.prompt(message, default='')
    else:
        proxy_excludes = ''

    return http_proxy_hostname, https_proxy_hostname, proxy_excludes
Esempio n. 9
0
def get_config_data():
    fn = os.path.expanduser(DEFAULT_CONFIG_FILE)
    data = {}
    try:
        if os.path.exists(fn):
            with open(fn) as fd:
                data = yaml.safe_load(fd)

            if 'password' in data:
                keyring.set_password("zmon-cli", data['user'], data['password'])
                del data['password']
                with open(fn, mode='w') as fd:
                    yaml.dump(data, fd, default_flow_style=False,
                              allow_unicode=True,
                              encoding='utf-8')
        else:
            clickclick.warning("No configuration file found at [{}]".format(DEFAULT_CONFIG_FILE))
            data['url'] = click.prompt("ZMon Base URL (e.g. https://zmon2.local/rest/api/v1)")
            data['user'] = click.prompt("ZMon username", default=os.environ['USER'])

            with open(fn, mode='w') as fd:
                yaml.dump(data, fd, default_flow_style=False,
                          allow_unicode=True,
                          encoding='utf-8')
    except Exception as e:
        error(e)

    return validate_config(data)
Esempio n. 10
0
def collect_hosts():
    """
        Collect host information from user. This will later be filled in using
        ansible.

        Returns: a list of host information collected from the user
    """
    click.clear()
    click.echo('***Host Configuration***')
    message = """
The OpenShift Master serves the API and web console.  It also coordinates the
jobs that have to run across the environment.  It can even run the datastore.
For wizard based installations the database will be embedded.  It's possible to
change this later using etcd from Red Hat Enterprise Linux 7.

Any Masters configured as part of this installation process will also be
configured as Nodes.  This is so that the Master will be able to proxy to Pods
from the API.  By default this Node will be unscheduleable but this can be changed
after installation with 'oadm manage-node'.

The OpenShift Node provides the runtime environments for containers.  It will
host the required services to be managed by the Master.

http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#master
http://docs.openshift.com/enterprise/3.0/architecture/infrastructure_components/kubernetes_infrastructure.html#node
    """
    click.echo(message)

    hosts = []
    more_hosts = True
    ip_regex = re.compile(r'^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$')

    while more_hosts:
        host_props = {}
        hostname_or_ip = click.prompt('Enter hostname or IP address:',
                                      default='',
                                      value_proc=validate_prompt_hostname)

        if ip_regex.match(hostname_or_ip):
            host_props['ip'] = hostname_or_ip
        else:
            host_props['hostname'] = hostname_or_ip

        host_props['master'] = click.confirm('Will this host be an OpenShift Master?')
        host_props['node'] = True

        rpm_or_container = click.prompt('Will this host be RPM or Container based (rpm/container)?',
                                        type=click.Choice(['rpm', 'container']),
                                        default='rpm')
        if rpm_or_container == 'container':
            host_props['containerized'] = True
        else:
            host_props['containerized'] = False

        host = Host(**host_props)

        hosts.append(host)

        more_hosts = click.confirm('Do you want to add additional hosts?')
    return hosts
Esempio n. 11
0
def get_proxy_hostnames_and_excludes():
    message = """
If a proxy is needed to reach HTTP and HTTPS traffic please enter the name below.
This proxy will be configured by default for all processes needing to reach systems outside
the cluster.

More advanced configuration is possible if using ansible directly:

https://docs.openshift.com/enterprise/latest/install_config/http_proxies.html
"""
    click.echo(message)

    message = "Specify your http proxy ? (ENTER for none)"
    http_proxy_hostname = click.prompt(message, default='')

    message = "Specify your https proxy ? (ENTER for none)"
    https_proxy_hostname = click.prompt(message, default=http_proxy_hostname)

    if http_proxy_hostname or https_proxy_hostname:
        message = """
All hosts in your openshift inventory will automatically be added to the NO_PROXY value.
Please provide any additional hosts to be added to NO_PROXY. (ENTER for none)
"""
        proxy_excludes = click.prompt(message, default='')
    else:
        proxy_excludes = ''

    return http_proxy_hostname, https_proxy_hostname, proxy_excludes
Esempio n. 12
0
def cli():
    with open(VERSION_PATH, 'r') as fp:
        version = fp.read().strip()

    click.echo('Current release: %s' % click.style(version))

    new_version = click.prompt('Enter your new release')
    release_message = click.prompt('Enter your release message')

    current_branch = c('git rev-parse --abbrev-ref HEAD').strip()
    c('git checkout develop')
    with open(VERSION_PATH, 'w') as fp:
        fp.write(new_version)
    c('git add %s' % VERSION_PATH)
    c('git commit -m "update VERSION from %s to %s"' % (version, new_version))
    c('git push')

    c('git checkout master')
    c('git merge develop --commit')
    c('git tag %s -m "%s"' % (new_version, re.sub(r'[\"]', '\\\"', release_message)))
    c('git push')
    c('git push --tags')
    c('python setup.py sdist register upload')

    c('git checkout %s' % current_branch)
def get_user_credentials(two1_dir="~/.two1/two1.json"):
    """ Collect user credentials at CLI.
    """

    with open(os.path.expanduser(two1_dir), "r") as f:
        username = json.load(f)["username"]
    try:
        w = wallet.Wallet()
    except:
        logger.info(click.style("A technical error occured. Please try the previous command again.", fg="magenta"))
        sys.exit()
    machine_auth = machine_auth_wallet.MachineAuthWallet(w)
    rest_client = _rest_client.TwentyOneRestClient(TWO1_HOST, machine_auth, username)
    address = w.current_address

    correct_password = False
    pw = click.prompt(click.style("Please enter your 21 password", fg=PROMPT_COLOR), hide_input=True)

    while not correct_password:
        try:
            rest_client.login(payout_address=address, password=pw)
            correct_password = True
        except:
            pw = click.prompt(click.style("Incorrect 21 password. Please try again", fg="magenta"),
                              hide_input=True)

    return username, pw
Esempio n. 14
0
File: base.py Progetto: lvsv/lancet
    def get_credentials(self, service, checker=None):
        url = self.config.get(service, "url")
        username = self.config.get(service, "username")
        key = "lancet+{}".format(url)
        if username:
            password = keyring.get_password(key, username)
            if password:
                return url, username, password

        with taskstatus.suspend():
            while True:
                click.echo("Please provide your authentication information for {}".format(url))
                if not username:
                    username = click.prompt("Username")
                else:
                    click.echo("Username: {}".format(username))
                password = click.prompt("Password", hide_input=True)

                if checker:
                    with taskstatus("Checking provided credentials") as ts:
                        if not checker(url, username, password):
                            ts.fail("Login failed")
                            username, password = None, None
                            continue
                        else:
                            ts.ok("Correctly authenticated to {}", url)

                keyring.set_password(key, username, password)
                return url, username, password
Esempio n. 15
0
def assemble_document(title, fields):
    """
    Prompts to fill in any gaps in form values, then builds document with all fields.
    """
    data = []
    for field in fields:
        entry = {}
        entry['label'] = field['label']
        if 'validator' in field.keys():
            valid = False
            while not valid:
                answer = click.prompt(field['label'])
                valid = field['validator'](answer)
            entry['value'] = answer
        elif 'value' in field.keys():
            entry['value'] = field['value']
        elif 'value_from' in field.keys():
            entry['value'] = field['value_from'][field['label']]
        elif 'not_null' in field.keys() and field['not_null']:
            entry['value'] = field['not_null'][field['label']]
        else:
            entry['value'] = click.prompt(field['label'])
        data.append(entry)
    document = "Rein %s\n" % title
    for entry in data:
        document = document + entry['label'] + ": " + entry['value'] + "\n"
    return document[:-1]
Esempio n. 16
0
def remove(config_file, cluster_name, format, dcs):
    config, dcs, cluster = ctl_load_config(cluster_name, config_file, dcs)

    if not isinstance(dcs, Etcd):
        raise PatroniCtlException('We have not implemented this for DCS of type {}'.format(type(dcs)))

    output_members(cluster, format=format)

    confirm = click.prompt('Please confirm the cluster name to remove', type=str)
    if confirm != cluster_name:
        raise PatroniCtlException('Cluster names specified do not match')

    message = 'Yes I am aware'
    confirm = \
        click.prompt('You are about to remove all information in DCS for {}, please type: "{}"'.format(cluster_name,
                     message), type=str)
    if message != confirm:
        raise PatroniCtlException('You did not exactly type "{}"'.format(message))

    if cluster.leader:
        confirm = click.prompt('This cluster currently is healthy. Please specify the master name to continue')
        if confirm != cluster.leader.name:
            raise PatroniCtlException('You did not specify the current master of the cluster')

    dcs.client.delete(dcs._base_path, recursive=True)
Esempio n. 17
0
def request_access_interactive(region, odd_host):
    region = click.prompt('AWS region', default=region)
    odd_host = click.prompt('Odd SSH bastion hostname', default=odd_host)

    all_instances = piu.utils.list_running_instances(region, [])

    stack_instances = [instance for instance in all_instances
                       if instance.name and instance.stack_name and instance.stack_version]

    instance_count = len(stack_instances)
    if instance_count == 0:
        raise click.ClickException('No running instances were found.')

    stack_instances.sort(key=operator.attrgetter('stack_name', 'stack_version'))

    print()
    table_entries = [dict(index=idx, **instance._asdict()) for idx, instance in enumerate(stack_instances, start=1)]
    print_table(
        'index name stack_name stack_version private_ip instance_id'.split(),
        table_entries)
    print()

    if instance_count > 1:
        allowed_choices = ["{}".format(n) for n in range(1, instance_count + 1)]
        instance_index = int(click.prompt('Choose an instance (1-{})'.format(instance_count),
                                          type=click.Choice(allowed_choices))) - 1
    else:
        click.confirm('Connect to {}?'.format(stack_instances[0].name), default=True, abort=True)
        instance_index = 0

    host = stack_instances[instance_index].private_ip
    reason = click.prompt('Reason', default='Troubleshooting')
    return (host, odd_host, reason)
Esempio n. 18
0
def cli():
    q = click.prompt('请输入线路名', value_proc=str)
    lines = BeijingBus.search_lines(q)
    for index, line in enumerate(lines):
        click.echo()
        click.secho('[%s] %s' % (index+1, line.name), bold=True, underline=True)
        station_names = [s.name for s in line.stations]
        click.echo()
        click.echo('站点列表:%s' % ','.join(station_names))

    click.echo()
    q = click.prompt('请从结果中选择线路编号', type=int)

    line = lines[q-1]
    click.clear()
    click.echo('你选择了 %s,下面请选择站点' % line.name)
    click.echo()
    for index, station in enumerate(line.stations):
        click.echo('[%s] %s' % (index+1, station.name))

    click.echo()
    q = click.prompt('请从结果中选择线路编号', type=int)

    while True:
        echo_realtime_data(line, q)
        time.sleep(5)
Esempio n. 19
0
def login_server(wio):
    while True:
        click.echo("1.) International[https://iot.seeed.cc]")
        click.echo("2.) China[https://cn.iot.seeed.cc]")
        click.echo("3.) Local")
        server = click.prompt("choice main server", type=int)
        if server == 1:
            wio.set_config("mserver","https://iot.seeed.cc")
            wio.set_config("mserver_ip","45.79.4.239")
            return
        elif server == 2:
            wio.set_config("mserver","https://cn.iot.seeed.cc")
            wio.set_config("mserver_ip","120.25.216.117")
            return
        elif server == 3:
            break
        else:
            continue

    click.secho('> ', fg='green', nl=False)
    mserver_ip = click.prompt("Please enter local main server ip")

    click.secho('> ', fg='green', nl=False)
    mserver = click.prompt("Please enter local main server url")

    wio.set_config("mserver", mserver)
    wio.set_config("mserver_ip", mserver_ip)
Esempio n. 20
0
def login(cfg, password):
    """Log in with your slipstream credentials."""
    should_prompt = True
    api = Api(cfg.settings['endpoint'])
    username = cfg.settings.get('username')

    if username and password:
        try:
            api.login(username, password)
        except HTTPError as e:
            if e.response.status_code != 401:
                raise
            logger.warning("Invalid credentials provided.")
        else:
            should_prompt = False

    while should_prompt:
        logger.notify("Enter your SlipStream credentials.")
        username = click.prompt("Username")
        password = click.prompt("Password (typing will be hidden)",
                                hide_input=True)

        try:
            api.login(username, password)
        except HTTPError as e:
            if e.response.status_code != 401:
                raise
            logger.error("Authentication failed.")
        else:
            cfg.settings['username'] = username
            logger.notify("Authentication successful.")
            should_prompt = False

    cfg.write_config()
    logger.info("Local credentials saved.")
Esempio n. 21
0
def profile_keygen(size):
    userdata = get_userdata()

    if 'rsa' in userdata:
        click.confirm('This profile already has an RSA key pair. Do you want to replace it?', abort=True)
        click.confirm(
            'Are you sure? This make make any existing password databases inaccessible and cannot be undone!',
            abort=True
        )

    password = click.prompt('Password (optional)', hide_input=True, default='')
    if password:
        confirm_password = click.prompt('Again', hide_input=True)
        if password != confirm_password:
            raise click.UsageError("Passwords don't match!")

    click.echo('Generating key pair. This may take a while...')
    key = RSA.generate(size)
    public_key = key.publickey()

    userdata['rsa'] = {
        'key': b64encode(key.exportKey('PEM', passphrase=password or None)).decode(),
        'public_key': b64encode(public_key.exportKey('PEM')).decode()
    }

    write_userdata(userdata)

    click.echo('Key pair created. Your public key is:\n\n{0}'.format(public_key.exportKey('OpenSSH').decode()))
Esempio n. 22
0
def quickstart():
    """Quickstart wizard for setting up twtxt."""
    width = click.get_terminal_size()[0]
    width = width if width <= 79 else 79

    click.secho("twtxt - quickstart", fg="cyan")
    click.secho("==================", fg="cyan")
    click.echo()

    help_text = "This wizard will generate a basic configuration file for twtxt with all mandatory options set. " \
                "Have a look at the README.rst to get information about the other available options and their meaning."
    click.echo(textwrap.fill(help_text, width))

    click.echo()
    nick = click.prompt("➤ Please enter your desired nick", default=os.environ.get("USER", ""))
    twtfile = click.prompt("➤ Please enter the desired location for your twtxt file", "~/twtxt.txt", type=click.Path())
    disclose_identity = click.confirm("➤ Do you want to disclose your identity? Your nick and URL will be shared", default=False)

    click.echo()
    add_news = click.confirm("➤ Do you want to follow the twtxt news feed?", default=True)

    conf = Config.create_config(nick, twtfile, disclose_identity, add_news)
    open(os.path.expanduser(twtfile), "a").close()

    click.echo()
    click.echo("✓ Created config file at '{0}'.".format(click.format_filename(conf.config_file)))
Esempio n. 23
0
def existe_archivo(archivo):
	if os.path.exists(archivo):
		print "Si existe\n"
		opcion=click.prompt('Desea A) Escribir en el archivo B) Leer el archivo')
		if (opcion=='A' or opcion=='a'):
			escritura=click.prompt("Coloque lo que quiere escribir en el archivo")
			f=open(archivo, 'a+')
			f.write(escritura)
			f.close()
			print "Se ha modificado el archivo" 
			
		elif (opcion=='B' or opcion=='b'):
			print "\nMostrando lo que hay en el archivo...\n"
			f=open(archivo)
			for linea in f:	
				print linea
			f.close()

	else:
		print "El archivo no existe"
		respuesta=click.prompt('\nDesea crear un archivo con ese nombre? (s/n): ')
		if (respuesta=="S" or respuesta=="s"):	
			print "\nCreando un archivo con ese nombre..."
			f=open(archivo, 'a+')
			f.close()
Esempio n. 24
0
def report(context, render, language, samples, group, human, panel):
  """Generate a coverage report from Chanjo SQL output."""
  # get uri + dialect of Chanjo database
  uri, dialect = context.obj.get('db'), context.obj.get('dialect')

  # set the custom option
  context.obj.set('report.human', human)
  context.obj.set('report.panel', panel)
  context.obj.set('report.samples', samples)
  context.obj.set('report.group', group)
  context.obj.set('report.language', language)

  if uri is None:
    # chanjo executed without "--db" set, prompt user input
    click.prompt('Please enter path to database', type=click.Path(exists=True))

  # create instance of Chanjo API "Miner"
  api = Miner(uri, dialect=dialect)

  # determine which render method to use and initialize it
  render_method = load_entry_point(ROOT_PACKAGE, 'chanjo_report.interfaces',
                                   render)

  # run the render_method and print the result to STDOUT
  click.echo(render_method(api, options=context.obj))
Esempio n. 25
0
def update(fullname, name, login, password, comment):
    db = Database(config.path)
    credential = get_credential_or_abort(db, fullname)
    values = credential.copy()

    if any([name, login, password, comment]):
        values["name"] = name if name else credential["name"]
        values["login"] = login if login else credential["login"]
        values["password"] = password if password else credential["password"]
        values["comment"] = comment if comment else credential["comment"]
    else:
        values["name"] = click.prompt("Name", default=credential["name"])
        values["login"] = click.prompt("Login", default=credential["login"])
        values["password"] = click.prompt("Password",
                                          hide_input=True,
                                          default=credential["password"],
                                          confirmation_prompt=True,
                                          show_default=False,
                                          prompt_suffix=" [*****]: ")
        values["comment"] = click.prompt("Comment",
                                         default=credential["comment"])

    if values != credential:
        values["fullname"] = make_fullname(values["login"], values["name"])
        values["modified"] = datetime.now()
        if values["password"] != credential["password"]:
            with Cryptor(config.path) as cryptor:
                values["password"] = cryptor.encrypt(password)
        db = Database(config.path)
        db.update(values, (where("fullname") == credential["fullname"]))
Esempio n. 26
0
def choose_datetime_format():
    """query user for their date format of choice"""
    while True:
        ordering_choices = [
            ('year month day', ['%Y', '%m', '%d']),
            ('day month year', ['%d', '%m', '%Y']),
            ('month day year', ['%m', '%d', '%Y']),
        ]
        separator_choices = ['-', '.', '/']
        validate = partial(validate_int, min_value=0, max_value=2)

        print("What ordering of year, month, date do you want to use? "
              "(You can choose the separator in the next step)")
        print('\n'.join(
            ['[{}] {}'.format(num, one) for num, (one, _) in enumerate(ordering_choices)]))
        ordering_no = prompt("Please choose one of the above options", value_proc=validate)
        print()
        print("Now, please choose a separator")
        print('\n'.join(['[{}] {}'.format(num, one) for num, one in enumerate(separator_choices)]))
        prompt_text = "Please choose one of the above options"
        separator = separator_choices[prompt(prompt_text, value_proc=validate)]
        dateformat = separator.join(ordering_choices[ordering_no][1])
        today = date.today()
        text = ("Does this look sensible to you: {} "
                "(today as an example: {})?".format(dateformat, today.strftime(dateformat)))
        if confirm(text):
            break
    return dateformat
Esempio n. 27
0
def _get_credentials(jenkins_url):
    click.secho('Authorization error', fg='red', bold=True)
    click.secho('Please supply credentials to access %s' % jenkins_url)
    username = click.prompt('Username')
    password = click.prompt('Password', hide_input=True)
    click.secho('')
    return utils.replace_auth_in_url(jenkins_url, username, password)
Esempio n. 28
0
def configure(config):
    """Configure GitHub access"""
    emails = config.get("emails", [])
    if not emails:
        try:
            emails = [get_git_email()]
        except:
            pass

    emails = click.prompt("Your email addresses (comma separated)", default=",".join(emails) or None)
    token = click.prompt(
        "Your personal GitHub access token", hide_input=True, default=config.get("github_access_token")
    )

    emails = list([mail.strip() for mail in emails.split(",")])
    config = {"emails": emails, "github_access_token": token}

    repositories = {}
    with Action("Scanning repositories..") as act:
        for repo in get_repos(token):
            repositories[repo["url"]] = repo
            act.progress()

    path = os.path.join(CONFIG_DIR, "repositories.yaml")
    os.makedirs(CONFIG_DIR, exist_ok=True)
    with open(path, "w") as fd:
        yaml.safe_dump(repositories, fd)

    with Action("Storing configuration.."):
        stups_cli.config.store_config(config, "github-maintainer-cli")
Esempio n. 29
0
def get_vm_options():
    """ Get user-selected config options for the 21 VM.
    """

    logger.info(click.style("Configure 21 virtual machine.", fg=TITLE_COLOR))
    logger.info("Press return to accept defaults.")

    default_disk = Two1MachineVirtual.DEFAULT_VDISK_SIZE
    default_memory = Two1MachineVirtual.DEFAULT_VM_MEMORY
    default_port = Two1MachineVirtual.DEFAULT_SERVICE_PORT
    default_network = Two1MachineVirtual.DEFAULT_ZEROTIER_INTERFACE

    disk_size = click.prompt("  Virtual disk size in MB (default = %s)" % default_disk,
                             type=int, default=default_disk, show_default=False)
    vm_memory = click.prompt("  Virtual machine memory in MB (default = %s)" % default_memory,
                             type=int, default=default_memory, show_default=False)
    server_port = click.prompt("  Port for micropayments server (default = %s)" % default_port,
                               type=int, default=default_port, show_default=False)
    network_interface = click.prompt("  Network interface (default = %s)" % default_network,
                                     default=default_network, show_default=False)

    return VmConfiguration(disk_size=disk_size,
                           vm_memory=vm_memory,
                           server_port=server_port,
                           network_interface=network_interface)
Esempio n. 30
0
def run(flow, env):
    window = get_window(env)
    window.set_window_option('remain-on-exit', 'on')

    progress_file = tempfile.mktemp()
    with open(progress_file, 'w'):
        pass

    t = threading.Thread(target=tail_f_loop, args=(progress_file,))
    t.daemon = True
    t.start()

    sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
    t0 = time.time()
    # noinspection PyBroadException
    try:
        success = traverse(window, flow, progress_file)
        print_conclusion(flow, success, t0)
    except BaseException:
        success = False
        print_conclusion(flow, success, t0)
        logging.exception('Failed to finish running flow')

    window.set_window_option('remain-on-exit', 'off')
    kill_dead_panes(window)
    click.prompt('Press any key to exit', prompt_suffix='..', default='', show_default=False)
    return success
def upgrade(ctx, latest_minor, next_major):
    oo_cfg = ctx.obj['oo_cfg']

    if len(oo_cfg.deployment.hosts) == 0:
        click.echo("No hosts defined in: %s" % oo_cfg.config_path)
        sys.exit(1)

    variant = oo_cfg.settings['variant']
    if find_variant(variant)[0] is None:
        click.echo("%s is not a supported variant for upgrade." % variant)
        sys.exit(0)

    old_version = oo_cfg.settings['variant_version']

    try:
        mapping = UPGRADE_MAPPINGS[old_version]
    except KeyError:
        click.echo('No upgrades available for %s %s' % (variant, old_version))
        sys.exit(0)

    message = """
        This tool will help you upgrade your existing OpenShift installation.
        Currently running: %s %s
"""
    click.echo(message % (variant, old_version))

    # Map the dynamic upgrade options to the playbook to run for each.
    # Index offset by 1.
    # List contains tuples of booleans for (latest_minor, next_major)
    selections = []
    if not (latest_minor or next_major):
        i = 0
        if 'minor_playbook' in mapping:
            click.echo("(%s) Update to latest %s" % (i + 1, old_version))
            selections.append((True, False))
            i += 1
        if 'major_playbook' in mapping:
            click.echo("(%s) Upgrade to next release: %s" %
                       (i + 1, mapping['major_version']))
            selections.append((False, True))
            i += 1

        response = click.prompt(
            "\nChoose an option from above",
            type=click.Choice(list(map(str, range(1,
                                                  len(selections) + 1)))))
        latest_minor, next_major = selections[int(response) - 1]

    if next_major:
        if 'major_playbook' not in mapping:
            click.echo(
                "No major upgrade supported for %s %s with this version "
                "of atomic-openshift-utils." % (variant, old_version))
            sys.exit(0)
        playbook = mapping['major_playbook']
        new_version = mapping['major_version']
        # Update config to reflect the version we're targeting, we'll write
        # to disk once Ansible completes successfully, not before.
        oo_cfg.settings['variant_version'] = new_version
        if oo_cfg.settings['variant'] == 'enterprise':
            oo_cfg.settings['variant'] = 'openshift-enterprise'

    if latest_minor:
        if 'minor_playbook' not in mapping:
            click.echo(
                "No minor upgrade supported for %s %s with this version "
                "of atomic-openshift-utils." % (variant, old_version))
            sys.exit(0)
        playbook = mapping['minor_playbook']
        new_version = old_version

    click.echo(
        "OpenShift will be upgraded from %s %s to latest %s %s on the following hosts:\n"
        % (variant, old_version, oo_cfg.settings['variant'], new_version))
    for host in oo_cfg.deployment.hosts:
        click.echo("  * %s" % host.connect_to)

    if not ctx.obj['unattended']:
        # Prompt interactively to confirm:
        if not click.confirm("\nDo you want to proceed?"):
            click.echo("Upgrade cancelled.")
            sys.exit(0)

    retcode = openshift_ansible.run_upgrade_playbook(oo_cfg.deployment.hosts,
                                                     playbook,
                                                     ctx.obj['verbose'])
    if retcode > 0:
        click.echo("Errors encountered during upgrade, please check %s." %
                   oo_cfg.settings['ansible_log_path'])
    else:
        oo_cfg.save_to_disk()
        click.echo("Upgrade completed! Rebooting all hosts is recommended.")
    print('Two-factor authentication is required. Please select a device.')

    devices = api.trusted_devices

    for i in range(len(devices)):
        print(str(i) + '. ' + devices[i]['phoneNumber'])

    deviceId = int(input('Enter the device id: '))
    device = devices[deviceId]

    if not api.send_verification_code(device):
        print('Failed to send verification code.')
        exit(1)

    while True:
        code = click.prompt('Enter the verification code: ')
        if api.validate_verification_code(device, code):
            break
        else:
            print('Invalid verification code.')

from datetime import datetime, timedelta
from gspread import authorize
from oauth2client.service_account import ServiceAccountCredentials
from numpy import array
import pytz

# how to change day for next month
next_day = datetime.now()
next_day = datetime.now() + timedelta(days=1)
Esempio n. 33
0
def set_drive(drive_id=None, email=None, local_root=None, ignore_file=None):
    try:
        all_drives, drive_table = print_all_drives()
        click.echo()
    except Exception as e:
        error('Error: %s.' % e)
        return

    interactive = drive_id is None or email is None
    if interactive:
        # Interactive mode to ask for which drive to add.
        index = click.prompt(
            'Please enter row number of the Drive to add or modify (CTRL+C to abort)',
            type=int)
        try:
            email, drive_id = index_to_drive_table_row(index, drive_table)
        except ValueError as e:
            error(str(e))
            return

    try:
        account_id = email_to_account_id(context, email)
    except Exception as e:
        error(str(e))
        return

    # Traverse the Drive objects and see if Drive exists.
    found_drive = False
    for d in all_drives[account_id][2]:
        if drive_id == d.id:
            found_drive = True
            break
    if not found_drive:
        error('Did not find Drive "%s".' % drive_id)
        return

    # Confirm if Drive already exists.
    drive_exists = drive_id in context.all_drives()
    curr_drive_config = None
    if drive_exists:
        if interactive:
            click.confirm(
                'Drive "%s" is already set. Overwrite its existing configuration?'
                % drive_id,
                abort=True)
        curr_drive_config = context.get_drive(drive_id)

    click.echo()
    acc_profile = all_drives[account_id][0]
    click.echo(
        click.style('Going to add/edit Drive "%s" of account "%s"...' %
                    (drive_id, acc_profile.account_email),
                    fg='cyan'))

    if interactive:
        local_root, ignore_file = read_drive_config_interactively(
            drive_exists, curr_drive_config)
    else:
        # Non-interactive mode. The drive may or may not exist in config, and the cmdline args may or may not be
        # specified. If drive exists in config, use existing values for missing args. If drive does not exist,
        # local root is required and ignore file is optional (use default if missing).
        try:
            if local_root is None:
                if drive_exists:
                    local_root = curr_drive_config.localroot_path
                else:
                    raise ValueError(
                        'Please specify the local directory for the Drive with "--local-root" argument.'
                    )
            local_root = os.path.abspath(local_root)
            if not os.path.isdir(local_root):
                raise ValueError('Path "%s" should be an existing directory.' %
                                 local_root)
            if ignore_file is None and drive_exists:
                ignore_file = curr_drive_config.ignorefile_path
            if ignore_file is None or not os.path.isfile(ignore_file):
                click.secho(
                    'Warning: ignore file path does not point to a file. Use default.',
                    fg='yellow')
                ignore_file = context.config_dir + '/' + context.DEFAULT_IGNORE_FILENAME
            if (drive_exists and local_root == curr_drive_config.localroot_path
                    and ignore_file == curr_drive_config.ignorefile_path):
                click.secho('No parameter was changed. Skipped operation.',
                            fg='yellow')
                return
        except ValueError as e:
            error(str(e))
            return

    d = context.add_drive(
        drive_config.LocalDriveConfig(drive_id, account_id, ignore_file,
                                      local_root))
    save_context(context)
    success('\nSuccessfully configured Drive %s of account %s (%s):' %
            (d.drive_id, acc_profile.account_email, d.account_id))
    click.echo('  Local directory: ' + d.localroot_path)
    click.echo('  Ignore file path: ' + d.ignorefile_path)
def _get_user_response(prompt: str) -> str:
    return click.prompt(prompt, show_default=False).strip().lower()
Esempio n. 35
0
def transactions_to_billing(transactions_filename, billing_filename,
                            note_threshold, one_by_one, unambiguous_only):
    """
    Import from the Transaction history into Billing book.

    This command takes year from the billing filename and imports only those transactions
    which correspond to that year.
    """
    click.echo(
        f"Reading the transactions history from '{transactions_filename}'")
    history = TransactionHistory(filename=transactions_filename)
    if history.transactions:
        click.echo(RESULT_OK)

    click.echo(f"Reading the destination billing file '{billing_filename}'")
    billing_book = BillingBook(billing_filename)
    if billing_book.month_billings:
        click.echo(RESULT_OK)

    try:
        for transaction in history.transactions:
            if transaction.has_receipt or transaction.created.year != billing_book.year:
                continue

            click.echo(f"Importing {transaction}...")

            if (not one_by_one or one_by_one
                    and click.confirm(f"Continue?", default=True)):
                result_msg = RESULT_OK
                month_billing = billing_book.get_month_billing(
                    month=transaction.created.month)
                if unambiguous_only and len(transaction.matching_types) != 1:
                    click.echo("Ambiguous type. Skipped.")
                    continue

                if len(transaction.matching_types) > 1:
                    choices = "\n".join(f"{i} - {cell_type.name}"
                                        for i, cell_type in enumerate(
                                            transaction.matching_types))
                    msg = f"Transaction {transaction} can be one of: \n{choices}\n. Nothing to skip."
                    selected_index = click.prompt(text=msg,
                                                  default="",
                                                  show_choices=True)
                    if not selected_index.strip():
                        click.echo("Skipped.")
                        continue
                    preferred_type = transaction.matching_types[int(
                        selected_index)]

                elif len(transaction.matching_types) < 1:
                    available_types = {
                        i: cell_type
                        for i, cell_type in enumerate(CellType)
                    }
                    choices = "\n".join(
                        f"{i} - {cell_type.name}"
                        for i, cell_type in available_types.items())
                    msg = f"Can't determine good type for {transaction}. Choose one of: \n{choices}\n. Nothing to skip."
                    selected_index = click.prompt(text=msg,
                                                  default="",
                                                  show_choices=True)
                    if not selected_index.strip():
                        click.echo("Skipped.")
                        continue
                    preferred_type = available_types[int(selected_index)]

                else:
                    preferred_type = transaction.good_type

                try:
                    month_billing.import_transaction(
                        transaction,
                        note_threshold=note_threshold,
                        preferred_type=preferred_type,
                    )
                except ValueError as e:
                    result_msg = RESULT_WARNING.format(e)

                except Exception as e:
                    result_msg = RESULT_ERROR.format(e)

                click.echo(result_msg)
                transaction.has_receipt = True
    finally:
        click.echo("Updating the history spreadsheet...")
        history.post_to_spreadsheet()
        click.echo(RESULT_OK)
Esempio n. 36
0
def up(ctx, targets, run_id, tx_id, inventory_format, ignore_failed_hooks,
       no_hooks, disable_uhash, env_vars, use_shell, no_progress):
    """
    Provisions nodes from the given target(s) in the given PinFile.

    The `run_id` requires an associated target, where the `tx_id` will look up
    the targets from the specified transaction.

    The data from the targets is obtained from the PinFile (default).
    By setting `use_rundb_for_actions = True` in linchpin.conf, any
    up transaction which use the `-r/--run_id` or `-t/--tx_id` option will
    obtain target data from the RunDB.

    targets:    Provision ONLY the listed target(s). If omitted, ALL targets in
    the appropriate PinFile will be provisioned.

    run-id:     Use the data from the provided run_id value
    """

    vault_pass = os.environ.get('VAULT_PASSWORD', '')

    ctx.env_vars = env_vars

    if ctx.ask_vault_pass:
        vault_pass = click.prompt("enter vault password", hide_input=True)

    ctx.set_evar('vault_pass', vault_pass)

    if ignore_failed_hooks:
        ctx.set_cfg("hook_flags", "ignore_failed_hooks", ignore_failed_hooks)
    if no_hooks:
        ctx.set_cfg("hook_flags", "no_hooks", no_hooks)
    if no_progress:
        ctx.set_cfg("progress_bar", "no_progress", str(no_progress))
    if disable_uhash:
        ctx.set_evar("disable_uhash_targets", disable_uhash.split(','))

    if use_shell:
        ctx.set_cfg("ansible", "use_shell", use_shell)

    if tx_id:
        try:
            return_code, results = lpcli.lp_up(targets=targets,
                                               tx_id=tx_id,
                                               inv_f=inventory_format)
            _handle_results(ctx, results, return_code)
        except LinchpinError as e:
            ctx.log_state(e)
            sys.exit(1)
    else:  # if tx_id is not passed, use run_id as a baseline
        if (not len(targets) or len(targets) > 1) and run_id:
            raise click.UsageError("A single target is required when calling"
                                   " destroy with `--run_id` option")
        try:
            return_code, results = lpcli.lp_up(targets=targets,
                                               run_id=run_id,
                                               tx_id=tx_id,
                                               inv_f=inventory_format,
                                               env_vars=env_vars)
            _handle_results(ctx, results, return_code)
        except LinchpinError as e:
            ctx.log_state(e)
            sys.exit(1)
def collect_hosts(oo_cfg,
                  existing_env=False,
                  masters_set=False,
                  print_summary=True):
    """
        Collect host information from user. This will later be filled in using
        Ansible.

        Returns: a list of host information collected from the user
    """
    click.clear()
    click.echo('*** Host Configuration ***')
    message = """
You must now specify the hosts that will compose your OpenShift cluster.

Please enter an IP address or hostname to connect to for each system in the
cluster. You will then be prompted to identify what role you want this system to
serve in the cluster.

OpenShift masters serve the API and web console and coordinate the jobs to run
across the environment. Optionally, you can specify multiple master systems for
a high-availability (HA) deployment. If you choose an HA deployment, then you
are prompted to identify a *separate* system to act as the load balancer for
your cluster once you define all masters and nodes.

Any masters configured as part of this installation process are also
configured as nodes. This enables the master to proxy to pods
from the API. By default, this node is unschedulable, but this can be changed
after installation with the 'oadm manage-node' command.

OpenShift nodes provide the runtime environments for containers. They host the
required services to be managed by the master.

http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#master
http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#node
    """
    click.echo(message)

    hosts = []
    roles = set(['master', 'node', 'storage', 'etcd'])
    more_hosts = True
    num_masters = 0
    while more_hosts:
        host_props = {}
        host_props['roles'] = []
        host_props['connect_to'] = click.prompt(
            'Enter hostname or IP address',
            value_proc=validate_prompt_hostname)

        if not masters_set:
            if click.confirm('Will this host be an OpenShift master?'):
                host_props['roles'].append('master')
                host_props['roles'].append('etcd')
                num_masters += 1

                if oo_cfg.settings['variant_version'] == '3.0':
                    masters_set = True
        host_props['roles'].append('node')

        host_props['containerized'] = False
        if oo_cfg.settings['variant_version'] != '3.0':
            rpm_or_container = \
                click.prompt('Will this host be RPM or Container based (rpm/container)?',
                             type=click.Choice(['rpm', 'container']),
                             default='rpm')
            if rpm_or_container == 'container':
                host_props['containerized'] = True

        host_props['new_host'] = existing_env

        host = Host(**host_props)

        hosts.append(host)

        if print_summary:
            print_installation_summary(hosts,
                                       oo_cfg.settings['variant_version'])

        # If we have one master, this is enough for an all-in-one deployment,
        # thus we can start asking if you want to proceed. Otherwise we assume
        # you must.
        if masters_set or num_masters != 2:
            more_hosts = click.confirm('Do you want to add additional hosts?')

    if num_masters > 2:
        master_lb = collect_master_lb(hosts)
        if master_lb:
            hosts.append(master_lb)
            roles.add('master_lb')
    else:
        set_cluster_hostname(oo_cfg)

    if not existing_env:
        collect_storage_host(hosts)

    return hosts, roles
Esempio n. 38
0
def dict_prompt(data, key, default=''):
    data[key] = click.prompt(key, default=data.get(key, default))
Esempio n. 39
0
def uninstall(package_name: str, verbose: bool, debug: bool, no_color: bool,
              logfile: str, yes: bool, silent: bool, python: bool,
              no_cache: bool):

    metadata = generate_metadata(None, silent, verbose, debug, no_color, yes,
                                 logfile, None, None)

    super_cache = check_supercache_valid()

    if no_cache:
        super_cache = False

    if logfile:
        logfile = logfile.replace('.txt', '.log')
        createConfig(logfile, logging.INFO, 'Install')

    if python:

        flags = []

        package_names = package_name.split(',')

        for name in package_names:
            handle_python_package(name, 'uninstall', metadata)

        sys.exit()

    status = 'Initializing'
    setup_name = ''
    keyboard.add_hotkey('ctrl+c',
                        lambda: handle_exit(status, setup_name, metadata))

    packages = package_name.split(',')

    if super_cache:
        res, time = handle_cached_request()

    else:
        status = 'Networking'
        write_verbose('Sending GET Request To /rapidquery/packages', metadata)
        write_debug('Sending GET Request To /rapidquery/packages', metadata)
        log_info('Sending GET Request To /rapidquery/packages', logfile)
        res, time = send_req_all()
        res = json.loads(res)
    correct_names = get_correct_package_names(res)
    corrected_package_names = []

    for name in packages:
        if name in correct_names:
            corrected_package_names.append(name)
        else:
            corrections = difflib.get_close_matches(name, correct_names)
            if corrections:
                if silent:
                    click.echo(
                        click.style(
                            'Incorrect / Invalid Package Name Entered. Aborting Uninstallation.',
                            fg='red'))
                    log_info(
                        'Incorrect / Invalid Package Name Entered. Aborting Uninstallation',
                        logfile)
                    handle_exit(status, setup_name, metadata)

                if yes:
                    write(f'Autocorrecting To {corrections[0]}', 'green',
                          metadata)
                    log_info(f'Autocorrecting To {corrections[0]}', logfile)
                    write(f'Successfully Autocorrected To {corrections[0]}',
                          'green', metadata)
                    log_info(f'Successfully Autocorrected To {corrections[0]}',
                             logfile)
                    corrected_package_names.append(corrections[0])

                else:
                    write(f'Autocorrecting To {corrections[0]}',
                          'bright_magenta', metadata)
                    write_verbose(f'Autocorrecting To {corrections[0]}',
                                  metadata)
                    write_debug(f'Autocorrecting To {corrections[0]}',
                                metadata)
                    log_info(f'Autocorrecting To {corrections[0]}', logfile)
                    if click.prompt(
                            'Would You Like To Continue? [y/n]') == 'y':
                        package_name = corrections[0]
                        corrected_package_names.append(package_name)
                    else:
                        sys.exit()
            else:
                write(f'Could Not Find Any Packages Which Match {name}',
                      'bright_magenta', metadata)
                write_debug(f'Could Not Find Any Packages Which Match {name}',
                            metadata)
                write_verbose(
                    f'Could Not Find Any Packages Which Match {name}',
                    metadata)
                log_info(f'Could Not Find Any Packages Which Match {name}',
                         logfile)

    write_debug(install_debug_headers, metadata)
    for header in install_debug_headers:
        log_info(header, logfile)

    index = 0

    for package in corrected_package_names:
        pkg = res[package]
        packet = Packet(package, pkg['package-name'], pkg['win64'],
                        pkg['darwin'], pkg['debian'], pkg['win64-type'],
                        pkg['darwin-type'], pkg['debian-type'],
                        pkg['custom-location'], pkg['install-switches'],
                        pkg['uninstall-switches'], None)
        proc = None
        keyboard.add_hotkey('ctrl+c', lambda: kill_proc(proc, metadata))

        kill_running_proc(packet.json_name, metadata)

        if super_cache:
            write(
                f'Rapidquery Successfully SuperCached {packet.json_name} in {round(time, 6)}s',
                'bright_yellow', metadata)
            write_debug(
                f'Rapidquery Successfully SuperCached {packet.json_name} in {round(time, 9)}s',
                metadata)
            log_info(
                f'Rapidquery Successfully SuperCached {packet.json_name} in {round(time, 6)}s',
                metadata)
        else:
            write(
                f'Rapidquery Successfully Received {packet.json_name}.json in {round(time, 6)}s',
                'bright_green', metadata)
            log_info(
                f'Rapidquery Successfully Received {packet.json_name}.json in {round(time, 6)}s',
                logfile)

        # Getting UninstallString or QuietUninstallString From The Registry Search Algorithm
        write_verbose("Fetching uninstall key from the registry...", metadata)
        log_info("Fetching uninstall key from the registry...", logfile)

        start = timer()
        key = get_uninstall_key(packet.json_name)
        end = timer()

        # If The UninstallString Or QuietUninstallString Doesn't Exist
        # if not key:
        #     write_verbose('No registry keys found', verbose, no_color, silent)
        #     log_info('No registry keys found', logfile)
        #     if "uninstall-command" in pkg:
        #         if pkg['uninstall-command'] == '':
        #             write(
        #                 f'Could Not Find Any Existing Installations Of {packet.json_name}', 'yellow', no_color, silent)
        #             log_error(
        #                 f'Could Not Find Any Existing Installations Of {packet.json_name}', logfile)
        #             closeLog(logfile, 'Uninstall')
        #             index += 1
        #             continue
        #         else:
        #             write_verbose("Executing the uninstall command",
        #                           verbose, no_color, silent)
        #             log_info("Executing the uninstall command", logfile)

        #             try:
        #                 proc = Popen(shlex.split(
        #                     pkg['uninstall-command']), stdout=PIPE, stdin=PIPE, stderr=PIPE)
        #                 proc.wait()
        #                 if proc.returncode != 0:
        #                     write(f'Installation Failed, Make Sure You Accept All Prompts Asking For Admin permission', 'red', no_color, silent)
        #                     handle_exit(status, 'None', no_color, silent)
        #             except FileNotFoundError:

        #                 proc = Popen(shlex.split(
        #                     pkg['uninstall-command']), stdout=PIPE, stdin=PIPE, stderr=PIPE, shell=True)
        #                 proc.wait()
        #                 if proc.returncode != 0:
        #                     write(f'Installation Failed, Make Sure You Accept All Prompts Asking For Admin permission', 'red', no_color, silent)
        #                     handle_exit(status, 'None', no_color, silent)

        #             write(
        #                 f"Successfully Uninstalled {package_name}", "bright_magenta", no_color, silent)

        #             index += 1
        #             write_debug(
        #                 f'Terminated debugger at {strftime("%H:%M:%S")} on uninstall::completion', debug, no_color, silent)
        #             log_info(
        #                 f'Terminated debugger at {strftime("%H:%M:%S")} on uninstall::completion', logfile)
        #             closeLog(logfile, 'Uninstall')
        #             continue
        #     else:
        # write(
        #     f'Could Not Find Any Existing Installations Of {package_name}', 'yellow', no_color, silent)
        # closeLog(logfile, 'Uninstall')
        # index += 1
        # continue

        if not key:
            write(
                f'Could Not Find Any Existing Installations Of {packet.display_name}',
                'yellow', metadata)
            closeLog(logfile, 'Uninstall')
            index += 1
            continue

        write_verbose("Uninstall key found.", metadata)
        log_info("Uninstall key found.", logfile)

        write(
            f'Successfully Retrieved Uninstall Key In {round(end - start, 4)}s',
            'cyan', metadata)
        log_info(
            f'Successfully Retrieved Uninstall Key In {round(end - start, 4)}s',
            logfile)

        command = ''

        # Key Can Be A List Or A Dictionary Based On Results

        if isinstance(key, list):
            if key:
                key = key[0]

        write(f'Uninstalling {packet.display_name}', 'green', metadata)

        # If QuietUninstallString Exists (Preferable)
        if 'QuietUninstallString' in key:
            command = key['QuietUninstallString']
            command = command.replace('/I', '/X')
            command = command.replace('/quiet', '/passive')

            additional_switches = None
            if packet.uninstall_switches:
                if packet.uninstall_switches != []:
                    write_verbose("Adding additional uninstall switches",
                                  metadata)
                    log_info("Adding additional uninstall switches", logfile)
                    additional_switches = packet.uninstall_switches

            if additional_switches:
                for switch in additional_switches:
                    command += ' ' + switch

            write_verbose("Executing the quiet uninstall command", metadata)
            log_info("Executing the quiet uninstall command", logfile)

            run_cmd(command, metadata, 'uninstallation')

            write(f"Successfully Uninstalled {packet.display_name}",
                  "bright_magenta", metadata)

            write_verbose("Uninstallation completed.", metadata)
            log_info("Uninstallation completed.", logfile)

            index += 1
            write_debug(
                f'Terminated debugger at {strftime("%H:%M:%S")} on uninstall::completion',
                metadata)
            log_info(
                f'Terminated debugger at {strftime("%H:%M:%S")} on uninstall::completion',
                logfile)
            closeLog(logfile, 'Uninstall')

        # If Only UninstallString Exists (Not Preferable)
        if 'UninstallString' in key and 'QuietUninstallString' not in key:
            command = key['UninstallString']
            command = command.replace('/I', '/X')
            command = command.replace('/quiet', '/passive')
            command = f'"{command}"'
            for switch in packet.uninstall_switches:
                command += f' {switch}'

            # Run The UninstallString
            write_verbose("Executing the Uninstall Command", metadata)
            log_info("Executing the Uninstall Command", logfile)

            run_cmd(command, metadata, 'uninstallation')

            write(f'Successfully Uninstalled {packet.display_name}',
                  'bright_magenta', metadata)
            write_verbose("Uninstallation completed.", metadata)
            log_info("Uninstallation completed.", logfile)
            index += 1
            write_debug(
                f'Terminated debugger at {strftime("%H:%M:%S")} on uninstall::completion',
                metadata)
            log_info(
                f'Terminated debugger at {strftime("%H:%M:%S")} on uninstall::completion',
                logfile)
            closeLog(logfile, 'Uninstall')
Esempio n. 40
0
    def connect(self,
                database='',
                host='',
                user='',
                port='',
                passwd='',
                dsn='',
                **kwargs):
        # Connect to the database.

        if not user:
            user = getuser()

        if not database:
            database = user

        # If password prompt is not forced but no password is provided, try
        # getting it from environment variable.
        if not self.force_passwd_prompt and not passwd:
            passwd = os.environ.get('PGPASSWORD', '')

        # Prompt for a password immediately if requested via the -W flag. This
        # avoids wasting time trying to connect to the database and catching a
        # no-password exception.
        # If we successfully parsed a password from a URI, there's no need to
        # prompt for it, even with the -W flag
        if self.force_passwd_prompt and not passwd:
            passwd = click.prompt('Password for %s' % user,
                                  hide_input=True,
                                  show_default=False,
                                  type=str)

        # Prompt for a password after 1st attempt to connect without a password
        # fails. Don't prompt if the -w flag is supplied
        auto_passwd_prompt = not passwd and not self.never_passwd_prompt

        # Attempt to connect to the database.
        # Note that passwd may be empty on the first attempt. If connection
        # fails because of a missing password, but we're allowed to prompt for
        # a password (no -w flag), prompt for a passwd and try again.
        try:
            try:
                pgexecute = PGExecute(database, user, passwd, host, port, dsn,
                                      **kwargs)
            except (OperationalError, InterfaceError) as e:
                if ('no password supplied' in utf8tounicode(e.args[0])
                        and auto_passwd_prompt):
                    passwd = click.prompt('Password for %s' % user,
                                          hide_input=True,
                                          show_default=False,
                                          type=str)
                    pgexecute = PGExecute(database, user, passwd, host, port,
                                          dsn, **kwargs)
                else:
                    raise e

        except Exception as e:  # Connecting to a database could fail.
            self.logger.debug('Database connection failed: %r.', e)
            self.logger.error("traceback: %r", traceback.format_exc())
            click.secho(str(e), err=True, fg='red')
            exit(1)

        self.pgexecute = pgexecute
Esempio n. 41
0
def conectar_servidor(servidor):
    servidor = click.prompt("Servidor")
    click.echo("{}".format(servidor))
Esempio n. 42
0
def install(
    package_name: str,
    verbose: bool,
    debug: bool,
    no_progress: bool,
    no_color: bool,
    logfile: str,
    yes: bool,
    silent: bool,
    python: bool,
    install_directory: str,
    virus_check: bool,
    no_cache: bool,
    sync: bool,
    reduce: bool,
    rate_limit: int,
    node: bool,
):
    if logfile:
        logfile = logfile.replace('=', '')
    metadata = generate_metadata(no_progress, silent, verbose, debug, no_color,
                                 yes, logfile, virus_check, reduce)

    if logfile:
        logfile = logfile.replace('.txt', '.log')
        createConfig(logfile, logging.INFO, 'Install')

    if python:

        package_names = package_name.split(',')

        for name in package_names:
            handle_python_package(name, 'install', metadata)

        sys.exit()

    if node:
        package_names = package_name.split(',')
        for name in package_names:
            handle_node_package(name, 'install', metadata)

        sys.exit()

    super_cache = check_supercache_valid()
    if no_cache:
        super_cache = False

    status = 'Initializing'
    setup_name = ''
    keyboard.add_hotkey('ctrl+c',
                        lambda: handle_exit(status, setup_name, metadata))

    packages = package_name.strip(' ').split(',')

    if super_cache:
        res, time = handle_cached_request()

    else:
        status = 'Networking'
        write_verbose('Sending GET Request To /packages', metadata)
        write_debug('Sending GET Request To /packages', metadata)
        log_info('Sending GET Request To /packages', logfile)
        res, time = send_req_all()
        res = json.loads(res)
        update_supercache(res)
        del res['_id']

    correct_names = get_correct_package_names(res)
    corrected_package_names = []

    for name in packages:
        if name in correct_names:
            corrected_package_names.append(name)
        else:
            corrections = difflib.get_close_matches(name, correct_names)
            if corrections:
                if silent:
                    click.echo(
                        click.style(
                            'Incorrect / Invalid Package Name Entered. Aborting Installation.',
                            fg='red'))
                    log_info(
                        'Incorrect / Invalid Package Name Entered. Aborting Installation',
                        logfile)
                    handle_exit(status, setup_name, metadata)

                if yes:
                    write(f'Autocorrecting To {corrections[0]}', 'green',
                          metadata)
                    log_info(f'Autocorrecting To {corrections[0]}', logfile)
                    write(f'Successfully Autocorrected To {corrections[0]}',
                          'green', metadata)
                    log_info(f'Successfully Autocorrected To {corrections[0]}',
                             logfile)
                    corrected_package_names.append(corrections[0])

                else:
                    write(f'Autocorrecting To {corrections[0]}',
                          'bright_magenta', metadata)
                    write_verbose(f'Autocorrecting To {corrections[0]}',
                                  metadata)
                    write_debug(f'Autocorrecting To {corrections[0]}',
                                metadata)
                    log_info(f'Autocorrecting To {corrections[0]}', logfile)
                    if click.prompt(
                            'Would You Like To Continue? [y/n]') == 'y':
                        package_name = corrections[0]
                        corrected_package_names.append(package_name)
                    else:
                        sys.exit()
            else:
                write(f'Could Not Find Any Packages Which Match {name}',
                      'bright_magenta', metadata)
                write_debug(f'Could Not Find Any Packages Which Match {name}',
                            metadata)
                write_verbose(
                    f'Could Not Find Any Packages Which Match {name}',
                    metadata)
                log_info(f'Could Not Find Any Packages Which Match {name}',
                         logfile)

    write_debug(install_debug_headers, metadata)
    for header in install_debug_headers:
        log_info(header, logfile)

    index = 0

    if not sync:
        if len(corrected_package_names) > 5:
            write(
                'electric Doesn\'t Support More Than 5 Parallel Downloads At Once Currently. Use The --sync Flag To Synchronously Download The Packages',
                'red', metadata)
        if len(corrected_package_names) > 1:
            packets = []
            for package in corrected_package_names:
                pkg = res[package]
                custom_dir = None
                if install_directory:
                    custom_dir = install_directory + f'\\{pkg["package-name"]}'
                else:
                    custom_dir = install_directory
                packet = Packet(package, pkg['package-name'], pkg['win64'],
                                pkg['darwin'], pkg['debian'],
                                pkg['win64-type'], pkg['darwin-type'],
                                pkg['debian-type'], pkg['custom-location'],
                                pkg['install-switches'],
                                pkg['uninstall-switches'], custom_dir)
                installation = find_existing_installation(
                    package, packet.json_name)
                if installation:
                    write_debug(
                        f"Aborting Installation As {packet.json_name} is already installed.",
                        metadata)
                    write_verbose(
                        f"Found an existing installation of => {packet.json_name}",
                        metadata)
                    write(
                        f"Found an existing installation {packet.json_name}.",
                        'bright_yellow', metadata)
                    installation_continue = click.prompt(
                        f'Would you like to reinstall {packet.json_name} [y/n]'
                    )
                    if installation_continue == 'y' or installation_continue == 'y' or yes:
                        os.system(f'electric uninstall {packet.json_name}')
                        os.system(f'electric install {packet.json_name}')
                        return
                    else:
                        handle_exit(status, setup_name, metadata)

                write_verbose(f"Package to be installed: {packet.json_name}",
                              metadata)
                log_info(f"Package to be installed: {packet.json_name}",
                         logfile)

                write_verbose(
                    f'Finding closest match to {packet.json_name}...',
                    metadata)
                log_info(f'Finding closest match to {packet.json_name}...',
                         logfile)
                packets.append(packet)

            if super_cache:
                write(
                    f'Rapidquery Successfully SuperCached Packages in {round(time, 6)}s',
                    'bright_yellow', metadata)
                write_debug(
                    f'Rapidquery Successfully SuperCached Packages in {round(time, 9)}s',
                    metadata)
                log_info(
                    f'Rapidquery Successfully SuperCached Packages in {round(time, 6)}s',
                    logfile)
            else:
                write(
                    f'Rapidquery Successfully Received packages.json in {round(time, 6)}s',
                    'bright_yellow', metadata)
                write_debug(
                    f'Rapidquery Successfully Received packages.json in {round(time, 9)}s',
                    metadata)
                log_info(
                    f'Rapidquery Successfully Received packages.json in {round(time, 6)}s',
                    logfile)

                write_verbose('Generating system download path...', metadata)
                log_info('Generating system download path...', logfile)

            manager = PackageManager(packets, metadata)
            paths = manager.handle_multi_download()
            log_info('Finished Rapid Download...', logfile)
            log_info(
                'Using Rapid Install To Complete Setup, Accept Prompts Asking For Admin Permission...',
                logfile)
            manager.handle_multi_install(paths)
            return

    for package in corrected_package_names:
        pkg = res[package]
        packet = Packet(package, pkg['package-name'], pkg['win64'],
                        pkg['darwin'], pkg['debian'], pkg['win64-type'],
                        pkg['darwin-type'], pkg['debian-type'],
                        pkg['custom-location'], pkg['install-switches'],
                        pkg['uninstall-switches'], install_directory)
        installation = find_existing_installation(package, packet.json_name)

        if installation:
            write_debug(
                f"Aborting Installation As {packet.json_name} is already installed.",
                metadata)
            write_verbose(
                f"Found an existing installation of => {packet.json_name}",
                metadata)
            write(f"Found an existing installation {packet.json_name}.",
                  'bright_yellow', metadata)
            installation_continue = click.prompt(
                f'Would you like to reinstall {packet.json_name} [y/n]')
            if installation_continue == 'y' or installation_continue == 'y' or yes:
                os.system(f'electric uninstall {packet.json_name}')
                os.system(f'electric install {packet.json_name}')
                return
            else:
                handle_exit(status, setup_name, metadata)
        write_verbose(f"Package to be installed: {packet.json_name}", metadata)
        log_info(f"Package to be installed: {packet.json_name}", logfile)

        write_verbose(f'Finding closest match to {packet.json_name}...',
                      metadata)
        log_info(f'Finding closest match to {packet.json_name}...', logfile)

        if index == 0:
            if super_cache:
                write(
                    f'Rapidquery Successfully SuperCached {packet.json_name} in {round(time, 6)}s',
                    'bright_yellow', metadata)
                write_debug(
                    f'Rapidquery Successfully SuperCached {packet.json_name} in {round(time, 9)}s',
                    metadata)
                log_info(
                    f'Rapidquery Successfully SuperCached {packet.json_name} in {round(time, 6)}s',
                    logfile)
            else:
                write(
                    f'Rapidquery Successfully Received {packet.json_name}.json in {round(time, 6)}s',
                    'bright_yellow', metadata)
                write_debug(
                    f'Rapidquery Successfully Received {packet.json_name}.json in {round(time, 9)}s',
                    metadata)
                log_info(
                    f'Rapidquery Successfully Received {packet.json_name}.json in {round(time, 6)}s',
                    logfile)

        write_verbose('Generating system download path...', metadata)
        log_info('Generating system download path...', logfile)

        start = timer()

        status = 'Download Path'
        download_url = get_download_url(packet)
        status = 'Got Download Path'
        end = timer()

        val = round(Decimal(end) - Decimal(start), 6)
        write(f'Electrons Transferred In {val}s', 'cyan', metadata)
        log_info(f'Electrons Transferred In {val}s', logfile)

        write('Initializing Rapid Download...', 'green', metadata)
        log_info('Initializing Rapid Download...', logfile)

        # Downloading The File From Source
        write_verbose(f"Downloading from '{download_url}'", metadata)
        log_info(f"Downloading from '{download_url}'", logfile)
        status = 'Downloading'

        if rate_limit == -1:
            path = download(download_url, no_progress, silent,
                            packet.win64_type)
        else:
            bucket = TokenBucket(tokens=10 * rate_limit, fill_rate=rate_limit)

            limiter = Limiter(
                bucket=bucket,
                filename=f"{tempfile.gettempdir()}\Setup{packet.win64_type}",
            )

            urlretrieve(
                url=download_url,
                filename=f"{tempfile.gettempdir()}\Setup{packet.win64_type}",
                reporthook=limiter)

            path = f"{tempfile.gettempdir()}\Setup{packet.win64_type}"

        status = 'Downloaded'

        write('\nFinished Rapid Download', 'green', metadata)
        log_info('Finished Rapid Download', logfile)

        if virus_check:
            write('Scanning File For Viruses...', 'blue', metadata)
            check_virus(path, metadata)

        write(
            'Using Rapid Install, Accept Prompts Asking For Admin Permission...',
            'cyan', metadata)
        log_info(
            'Using Rapid Install To Complete Setup, Accept Prompts Asking For Admin Permission...',
            logfile)
        if debug:
            click.echo('\n')
        write_debug(
            f'Installing {packet.json_name} through Setup{packet.win64}',
            metadata)
        log_info(f'Installing {packet.json_name} through Setup{packet.win64}',
                 logfile)
        start_snap = get_environment_keys()
        status = 'Installing'
        # Running The Installer silently And Completing Setup
        install_package(path, packet, metadata)

        status = 'Installed'
        final_snap = get_environment_keys()

        if final_snap.env_length > start_snap.env_length or final_snap.sys_length > start_snap.sys_length:
            write('Refreshing Environment Variables...', 'green', metadata)
            start = timer()
            log_info('Refreshing Environment Variables', logfile)
            write_debug(
                'Refreshing Env Variables, Calling Batch Script At scripts/refreshvars.cmd',
                metadata)
            write_verbose('Refreshing Environment Variables', metadata)
            refresh_environment_variables()
            end = timer()
            write_debug(
                f'Successfully Refreshed Environment Variables in {round(end - start)} seconds',
                metadata)

        write(f'Successfully Installed {packet.display_name}!',
              'bright_magenta', metadata)
        log_info(f'Successfully Installed {packet.display_name}!', logfile)

        if metadata.reduce_package:
            write('Successfully Cleaned Up Installer From Temp Directory...',
                  'green', metadata)
            os.remove(path)

        write_verbose('Installation and setup completed.', metadata)
        log_info('Installation and setup completed.', logfile)
        write_debug(
            f'Terminated debugger at {strftime("%H:%M:%S")} on install::completion',
            metadata)
        log_info(
            f'Terminated debugger at {strftime("%H:%M:%S")} on install::completion',
            logfile)
        closeLog(logfile, 'Install')

        index += 1
    end = timer()
Esempio n. 43
0
def configure():
    """ Perform initial setup. """
    config = {'sync_interval': 300}
    generate_key = not click.confirm("Do you already have an API key for "
                                     "zotero-cli?")
    if generate_key:
        (config['api_key'],
         config['library_id']) = ZoteroBackend.create_api_key()
    else:
        config['api_key'] = click.prompt(
            "Please enter the API key for zotero-cli")
        config['library_id'] = click.prompt("Please enter your library ID")
    sync_method = select([("local", "Local Zotero storage"),
                          ("zotcoud", "Use Zotero file cloud"),
                          ("webdav", "Use WebDAV storage")],
                         default=1,
                         required=True,
                         prompt="How do you want to access files for reading?")
    if sync_method == "local":
        storage_dirs = tuple(find_storage_directories())
        if storage_dirs:
            options = [(name, "{} ({})".format(click.style(name, fg="cyan"),
                                               path))
                       for name, path in storage_dirs]
            config['storage_dir'] = select(
                options,
                required=False,
                prompt="Please select a storage directory (-1 to enter "
                "manually)")
        if config.get('storage_dir') is None:
            click.echo(
                "Could not automatically locate a Zotero storage directory.")
            while True:
                storage_dir = click.prompt(
                    "Please enter the path to your Zotero storage directory",
                    default='')
                if not storage_dir:
                    storage_dir = None
                    break
                elif not os.path.exists(storage_dir):
                    click.echo("Directory does not exist!")
                elif not re.match(r'.*storage/?', storage_dir):
                    click.echo("Path must point to a `storage` directory!")
                else:
                    config['storage_dir'] = storage_dir
                    break
    elif sync_method == "webdav":
        while True:
            if not config.get('webdav_path'):
                config['webdav_path'] = click.prompt(
                    "Please enter the WebDAV URL (without '/zotero'!)")
            if not config.get('webdav_user'):
                config['webdav_user'] = click.prompt(
                    "Please enter the WebDAV user name")
                config['webdav_pass'] = click.prompt(
                    "Please enter the WebDAV password")
            try:
                test_resp = requests.options(config['webdav_path'],
                                             auth=(config['webdav_user'],
                                                   config['webdav_pass']))
            except requests.ConnectionError:
                click.echo("Invalid WebDAV URL, could not reach server.")
                config['webdav_path'] = None
                continue
            if test_resp.status_code == 200:
                break
            elif test_resp.status_code == 404:
                click.echo("Invalid WebDAV path, does not exist.")
                config['webdav_path'] = None
            elif test_resp.status_code == 401:
                click.echo("Bad credentials.")
                config['webdav_user'] = None
            else:
                click.echo("Unknown error, please check your settings.")
                click.echo("Server response code was: {}".format(
                    test_resp.status_code))
                config['webdav_path'] = None
                config['webdav_user'] = None
    config['sync_method'] = sync_method

    markup_formats = pypandoc.get_pandoc_formats()[0]
    config['note_format'] = select(zip(markup_formats, markup_formats),
                                   default=markup_formats.index('markdown'),
                                   prompt="Select markup format for notes")
    save_config(config)
    zot = ZoteroBackend(config['api_key'], config['library_id'], 'user')
    click.echo("Initializing local index...")
    num_synced = zot.synchronize()
    click.echo("Synchronized {} items.".format(num_synced))
Esempio n. 44
0
def odoo(crear_usuario):
    email = click.prompt('Usuario o Correo')
    password = click.prompt("Contraseña")
    click.echo("%s %s " % (email, password))
Esempio n. 45
0
def _execute_backfill_command_at_location(cli_args,
                                          print_fn,
                                          instance,
                                          workspace,
                                          repo_location,
                                          using_graph_job_op_apis=False):
    external_repo = get_external_repository_from_repo_location(
        repo_location, cli_args.get("repository"))

    external_pipeline = get_external_pipeline_or_job_from_external_repo(
        external_repo, cli_args.get("pipeline_or_job"))

    noprompt = cli_args.get("noprompt")

    pipeline_partition_set_names = {
        external_partition_set.name: external_partition_set
        for external_partition_set in
        external_repo.get_external_partition_sets()
        if external_partition_set.pipeline_name == external_pipeline.name
    }

    if not pipeline_partition_set_names:
        raise click.UsageError(
            "No partition sets found for pipeline/job `{}`".format(
                external_pipeline.name))
    partition_set_name = cli_args.get("partition_set")
    if not partition_set_name:
        if len(pipeline_partition_set_names) == 1:
            partition_set_name = next(iter(
                pipeline_partition_set_names.keys()))
        elif noprompt:
            raise click.UsageError(
                "No partition set specified (see option `--partition-set`)")
        else:
            partition_set_name = click.prompt(
                "Select a partition set to use for backfill: {}".format(
                    ", ".join(x for x in pipeline_partition_set_names.keys())))

    partition_set = pipeline_partition_set_names.get(partition_set_name)

    if not partition_set:
        raise click.UsageError(
            "No partition set found named `{}`".format(partition_set_name))

    run_tags = get_tags_from_args(cli_args)

    repo_handle = RepositoryHandle(
        repository_name=external_repo.name,
        repository_location=repo_location,
    )

    try:
        partition_names_or_error = repo_location.get_external_partition_names(
            repo_handle,
            partition_set_name,
        )
    except Exception:
        error_info = serializable_error_info_from_exc_info(sys.exc_info())
        raise DagsterBackfillFailedError(
            "Failure fetching partition names: {error_message}".format(
                error_message=error_info.message),
            serialized_error_info=error_info,
        )

    partition_names = gen_partition_names_from_args(
        partition_names_or_error.partition_names, cli_args)

    # Print backfill info
    print_fn("\n Pipeline/Job: {}".format(external_pipeline.name))
    if not using_graph_job_op_apis:
        print_fn("Partition set: {}".format(partition_set_name))
    print_fn("   Partitions: {}\n".format(
        print_partition_format(partition_names, indent_level=15)))

    # Confirm and launch
    if noprompt or click.confirm(
            "Do you want to proceed with the backfill ({} partitions)?".format(
                len(partition_names))):

        print_fn("Launching runs... ")

        backfill_id = make_new_backfill_id()
        backfill_job = PartitionBackfill(
            backfill_id=backfill_id,
            partition_set_origin=partition_set.get_external_origin(),
            status=BulkActionStatus.REQUESTED,
            partition_names=partition_names,
            from_failure=False,
            reexecution_steps=None,
            tags=run_tags,
            backfill_timestamp=pendulum.now("UTC").timestamp(),
        )
        try:
            partition_execution_data = (
                repo_location.get_external_partition_set_execution_param_data(
                    repository_handle=repo_handle,
                    partition_set_name=partition_set_name,
                    partition_names=partition_names,
                ))
        except Exception:
            error_info = serializable_error_info_from_exc_info(sys.exc_info())
            instance.add_backfill(
                backfill_job.with_status(
                    BulkActionStatus.FAILED).with_error(error_info))
            return print_fn("Backfill failed: {}".format(error_info))

        assert isinstance(partition_execution_data,
                          ExternalPartitionSetExecutionParamData)

        for partition_data in partition_execution_data.partition_data:
            pipeline_run = create_backfill_run(
                instance,
                repo_location,
                external_pipeline,
                partition_set,
                backfill_job,
                partition_data,
            )
            if pipeline_run:
                instance.submit_run(pipeline_run.run_id, workspace)

        instance.add_backfill(
            backfill_job.with_status(BulkActionStatus.COMPLETED))

        print_fn("Launched backfill job `{}`".format(backfill_id))

    else:
        print_fn("Aborted!")
Esempio n. 46
0
def menu():
    """ A simple menu to easily iterate over API """

    # Global Variables
    user = load_user_data()
    if user.has_key('token'):
        TOKEN = user['token']
    else:
        TOKEN = None

    API = ClientAPI()

    menu = 'main'
    while True:
        if menu == 'main':
            click.clear()
            if TOKEN is None:
                click.echo('Main menu:')
                click.echo('    r: get token')
                click.echo('    q: quit')
            else:
                click.echo('Main menu:')
                click.echo('    s: solve problems')
                click.echo('    g: get status')
                click.echo('    r: get token')
                click.echo('    i: get info')
                click.echo('    q: quit')

            char = click.getchar()
            if char == 'r':
                menu = 'register'
            elif char == 's':
                __phaseI(API, TOKEN)
                __phaseII(API, TOKEN)
                __phaseIII(API, TOKEN)
                __phaseIV(API, TOKEN)
                click.pause()

            elif char == 'g':
                get_status(API, TOKEN)

            elif char == 'i':
                get_info(user)

            elif char == 'q':
                menu = 'quit'

            else:
                click.echo('Invalid input')

        elif menu == 'register':
            click.clear()
            click.echo('Register menu:')
            if TOKEN is None:
                email = click.prompt('   Please enter the registered email')
                github = click.prompt('   Please enter the github repo')
                TOKEN = get_token(API, email, github)
                user = {'email': email, 'github': github, 'token': TOKEN}
                menu = 'main'
            else:
                click.echo('    n: get new token')
                click.echo('    b: back')
                char = click.getchar()
                if char == 'n':
                    TOKEN = None
                elif char == 'b':
                    menu = 'main'
                else:
                    click.echo('Invalid input')

        elif menu == 'quit':
            save_user_data(user)
            click.echo("I'll save your progress! See Ya!")
            return
Esempio n. 47
0
 def decrease_asset_qty(self):
     asset_id = click.prompt("AssetID : asset#domain")
     qty = click.prompt("Qty To Subtract")
     self.iroha_client.subtract_asset_qty(asset_id, qty)
Esempio n. 48
0
def ask_for_confirm(msg):
    confirm = click.prompt(msg, default='Y')
    return confirm.lower() in ('y', 'yes')
Esempio n. 49
0
 def create_new_asset(self):
     asset = click.prompt("New Asset Name Only")
     domain = click.prompt("Domain Name Only")
     precision = click.prompt("Precision", type=int)
     self.iroha_client.create_new_asset(asset, domain, precision)
Esempio n. 50
0
def __call_for_login(ctx):
    ctx.invoke(login, host=click.prompt('Host'), user=click.prompt('User'), password=click.prompt('Password', hide_input=True))
Esempio n. 51
0
 def write_account_detail(self):
     account_id = click.prompt("Account To Use : Username@domain")
     key = click.prompt("Enter New Key, existing key entries will be overwritten")
     value = click.prompt("Please enter a value to set")
     self.iroha_client.set_account_detail(account_id, key, value)
Esempio n. 52
0
 def increase_asset_qty(self):
     asset_id = click.prompt("AssetID : asset#domain")
     qty = click.prompt("Qty To Add")
     self.iroha_client.add_asset_qty(asset_id, qty)
Esempio n. 53
0
 def create_new_user_account(self):
     user_name = click.prompt("Username For New Account")
     domain = click.prompt("Domain")
     public_key = click.prompt("Public Key")
     self.iroha_client.create_new_account(user_name, domain, public_key)
Esempio n. 54
0
 def grant_acc_read_permission(self):
     account_id = click.prompt("Account To Use : Username@domain")
     contact = click.prompt("Username@domain Your Write Acc Granting Permission")
     self.iroha_client.grant_account_read_permission(
         account_id=account_id, contact=contact
     )
Esempio n. 55
0
File: main.py Progetto: jn7163/mycli
    def connect(self,
                database='',
                user='',
                passwd='',
                host='',
                port='',
                socket='',
                charset='',
                local_infile='',
                ssl=''):

        cnf = {
            'database': None,
            'user': None,
            'password': None,
            'host': None,
            'port': None,
            'socket': None,
            'default-character-set': None,
            'local-infile': None,
            'loose-local-infile': None,
            'ssl-ca': None,
            'ssl-cert': None,
            'ssl-key': None,
            'ssl-cipher': None,
            'ssl-verify-serer-cert': None,
        }

        cnf = self.read_my_cnf_files(self.cnf_files, cnf.keys())

        # Fall back to config values only if user did not specify a value.

        database = database or cnf['database']
        if port or host:
            socket = ''
        else:
            socket = socket or cnf['socket']
        user = user or cnf['user'] or os.getenv('USER')
        host = host or cnf['host'] or 'localhost'
        port = port or cnf['port'] or 3306
        ssl = ssl or {}

        try:
            port = int(port)
        except ValueError as e:
            self.echo("Error: Invalid port number: '{0}'.".format(port),
                      err=True,
                      fg='red')
            exit(1)

        passwd = passwd or cnf['password']
        charset = charset or cnf['default-character-set'] or 'utf8'

        # Favor whichever local_infile option is set.
        for local_infile_option in (local_infile, cnf['local-infile'],
                                    cnf['loose-local-infile'], False):
            try:
                local_infile = str_to_bool(local_infile_option)
                break
            except (TypeError, ValueError):
                pass

        ssl = self.merge_ssl_with_cnf(ssl, cnf)
        # prune lone check_hostname=False
        if not any(v for v in ssl.values()):
            ssl = None

        # Connect to the database.

        try:
            try:
                sqlexecute = SQLExecute(database, user, passwd, host, port,
                                        socket, charset, local_infile, ssl)
            except OperationalError as e:
                if ('Access denied for user' in e.args[1]):
                    passwd = click.prompt('Password',
                                          hide_input=True,
                                          show_default=False,
                                          type=str)
                    sqlexecute = SQLExecute(database, user, passwd, host, port,
                                            socket, charset, local_infile, ssl)
                else:
                    raise e
        except Exception as e:  # Connecting to a database could fail.
            self.logger.debug('Database connection failed: %r.', e)
            self.logger.error("traceback: %r", traceback.format_exc())
            self.echo(str(e), err=True, fg='red')
            exit(1)

        self.sqlexecute = sqlexecute
Esempio n. 56
0
 def detach_role(self):
     account_id = click.prompt("Username For New Account")
     role_name = click.prompt("Domain")
     self.iroha_client.detach_role_tx(account_id, role_name)
Esempio n. 57
0
def import_session(ctx, session_archive, force):
    if not os.path.exists(session_archive):
        click.echo('File {} does not exist.'.format(session_archive),
                   file=sys.stderr)
        sys.exit(1)

    config_file = cli_setup.DEFAULT_CONFIG_LOCATION
    profile = ctx.obj['profile']

    config = configparser.ConfigParser()
    config.read(config_file)
    current_profiles = [key for key, value in config.items()]

    try:
        temp_dir_name = tempfile.mkdtemp()

        with zipfile.ZipFile(session_archive) as zf:
            zf.extractall(temp_dir_name)

            archived_config_location = os.path.join(temp_dir_name, 'config')
            if not os.path.exists(archived_config_location):
                click.echo(
                    "Session archive {} is invalid. Did not contain file 'config'."
                    .format(session_archive),
                    file=sys.stderr)
                sys.exit(1)

            archived_config = configparser.ConfigParser()
            archived_config.read(archived_config_location)
            archived_profiles = [key for key, value in archived_config.items()]

            if len(archived_profiles) < 1:
                click.echo(
                    'ERROR: The archived config does not contain valid profiles.',
                    file=sys.stderr)
                sys.exit(1)

            # Ignore the default DEFAULT section item and pick the last item. If empty or only DEFAULT picks DEFAULT.
            profile_no = len(archived_profiles) - 1
            archived_profile_name = archived_profiles[profile_no]
            archived_profile = archived_config[archived_profile_name]

            if 'security_token_file' not in archived_profile:
                click.echo(
                    'ERROR: Cannot import non token based profile (profile must contain value for security_token_file).',
                    file=sys.stderr)
                sys.exit(1)

            if force:
                cli_setup.remove_profile_from_config(config_file,
                                                     archived_profile_name)

            while archived_profile_name in current_profiles and not force:
                archived_profile_name = click.prompt(
                    "Config already contains a profile with the same name as the archived profile: {}. Provide an alternative name for the imported profile"
                    .format(archived_profile_name))

            imported_resources_dir = os.path.join(
                cli_setup.DEFAULT_TOKEN_DIRECTORY, archived_profile_name)
            if not os.path.exists(imported_resources_dir):
                os.makedirs(imported_resources_dir)

            # copy referenced files from archived config to imported_resources_dir
            for key, value in six.iteritems(archived_profile):
                if key.endswith((CONFIG_KEY_FILE_SUFFIX, TOKEN_FILE_SUFFIX)):
                    # there is no nesting in the archive so the config will always reference the filename directly
                    new_file_location = os.path.join(imported_resources_dir,
                                                     value)

                    existing_file_location = os.path.join(temp_dir_name, value)
                    copy(existing_file_location, imported_resources_dir)

                    cli_setup.apply_user_only_access_permissions(
                        new_file_location)

            # write new profile to existing config file
            archived_profile = translate_config_filepaths_to_prefix(
                archived_profile, imported_resources_dir)
            cli_setup.write_config(filename=config_file,
                                   profile_name=archived_profile_name,
                                   **archived_profile)
    finally:
        rmtree(temp_dir_name)

    click.echo('Imported profile {} written to: {}'.format(
        archived_profile_name, config_file),
               file=sys.stderr)

    click.echo("""
    Try out your newly imported session credentials with the following example command:

    oci iam region list --config-file {config_file} --profile {profile} --auth {auth}
""".format(config_file=config_file,
           profile=archived_profile_name,
           auth=cli_constants.OCI_CLI_AUTH_SESSION_TOKEN))
Esempio n. 58
0
def viewFile(name,types,pid):
    """
    view-files: Filter based list of the names and ids of the first 10 files the user has access to
    """
    token = os.path.join(dirpath,'token.json')
    store = file.Storage(token)
    creds = store.get()
    service = build('drive', 'v3', http=creds.authorize(Http()))
    page_token = None
    query=""
    if name:
        q_name=click.prompt('enter the search value')
        query="name contains '"+q_name+"' "
    if types:
        mimeTypes={
			"xls":'application/vnd.ms-excel',
    		"xlsx" :'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
    		"xml" :'text/xml',
    		"ods":'application/vnd.oasis.opendocument.spreadsheet',
    		"csv":'text/plain',
    		"tmpl":'text/plain',
    		"pdf": 'application/pdf',
    		"php":'application/x-httpd-php',
    		"jpg":'image/jpeg',
    		"png":'image/png',
    		"gif":'image/gif',
    		"bmp":'image/bmp',
    		"txt":'text/plain',
    		"doc":'application/msword',
    		"js":'text/js',
    		"swf":'application/x-shockwave-flash',
    		"mp3":'audio/mpeg',
    		"zip":'application/zip',
    		"rar":'application/rar',
    		"tar":'application/tar',
    		"arj":'application/arj',
    		"cab":'application/cab',
    		"html":'text/html',
    		"htm":'text/html',
    		"default":'application/octet-stream',
            "audio":'application/vnd.google-apps.audio',	
            "Google Docs":'application/vnd.google-apps.document',
            "Google Drawing":'application/vnd.google-apps.drawing',
            "Google Drive file":'application/vnd.google-apps.file',	
            "Google Forms":'application/vnd.google-apps.form',	
            "Google Fusion Tables":'application/vnd.google-apps.fusiontable',	
            "Google My Maps":'application/vnd.google-apps.map',	
            "Google Photos":'application/vnd.google-apps.photo',	
            "Google Slides":'application/vnd.google-apps.presentation',	
            "Google Apps Scripts":'application/vnd.google-apps.script',	
            "Google Sites":'application/vnd.google-apps.site',	
            "Google Sheets":'application/vnd.google-apps.spreadsheet',	
            "3rd party shortcut":'application/vnd.google-apps.drive-sdk',	
    		"folder":'application/vnd.google-apps.folder'
		}
        promptMessage = 'Choose a media type to filter \n(press SPACE to mark, ENTER to continue, s to stop):'
        title = promptMessage
        options = [x for x in mimeTypes.keys()]
        picker = Picker(options, title, multi_select=True, min_selection_count=1)
        picker.register_custom_handler(ord('s'),go_back)
        selected = picker.start()
        if type(selected) == list:
            query+="and ("
            for types in selected:
                query+="mimeType='"+mimeTypes[types[0]]+"' or "
            query=query[:-3]
            query+=")"
        if (not name) and types:
            query=query[4:]
    if pid:
         parent=click.prompt('enter the fid of parent or  sharing link')
         fid = get_fid(parent)
         if (name != False) or (types != False) :
             query+=" and "
         query+="'"+fid+"' in parents"
    i = 1
    while True:
        response = service.files().list(q=query,
                                            spaces='drive',
                                            fields='nextPageToken, files(id, name,mimeType,modifiedTime)',
                                            pageToken=page_token).execute() 
        
        templist = [response.get('files', [])[i:i+25] for i in range(0, len(response.get('files', [])), 25)] #breakdown list to 25 entries at a time
        for item in templist:
            t = PrettyTable(['Sr.','Name','ID','Type','Modified Time'])                                  
            for fils in item: 
                t.add_row([i,fils.get('name')[:25], fils.get('id'),fils.get('mimeType').replace('application/','')[:25],fils.get('modifiedTime')])
                i+=1
            print(t)
            click.confirm('Do you want to continue?',abort=True)
            click.clear()
        page_token = response.get('nextPageToken', None)
        if page_token is None:
            break
    def launch_refarch_env(self):

        with open(self.inventory_file, 'r') as f:
            print yaml.safe_dump(json.load(f), default_flow_style=False)

        if not self.args.no_confirm:
            if not click.confirm('Continue adding nodes with these values?'):
                sys.exit(0)

        if 'cns' in self.container_storage and 'storage' in self.node_type:
            playbooks = ['playbooks/cns-storage.yaml']

        elif 'crs' in self.container_storage and 'storage' in self.node_type:
            if 'None' in self.tag:
                # do the full install and config minus the cleanup
                self.tag = 'vms,crs-node-setup,heketi-setup,heketi-ocp'
            playbooks = ['playbooks/crs-storage.yaml']
            if 'heketi-setup' in self.tag:
                self.admin_key = click.prompt("Admin key password for heketi?",
                                              hide_input=True)
                self.user_key = click.prompt("User key password for heketi?",
                                             hide_input=True)
        else:
            if 'None' in self.tag:
                # do the full install and config minus the cleanup
                self.tag = 'all'
            playbooks = ['playbooks/add-node.yaml']

        for playbook in playbooks:

            devnull = '> /dev/null'

            if self.verbose > 0:
                devnull = ''

            # refresh the inventory cache to prevent stale hosts from
            # interferring with re-running
            command = 'inventory/vsphere/vms/vmware_inventory.py %s' % (
                devnull)
            os.system(command)

            # remove any cached facts to prevent stale data during a re-run
            command = 'rm -rf .ansible/cached_facts'
            os.system(command)

            command = 'ansible-playbook'
            command = command + ' --extra-vars "@./add-node.json" --tags %s -e \' add_node=yes vcenter_host=%s \
            vcenter_username=%s \
            vcenter_password=%s \
            vcenter_template_name=%s \
            vcenter_folder=%s \
            vcenter_datastore=%s \
            vcenter_cluster=%s \
            vcenter_datacenter=%s \
            vcenter_resource_pool=%s \
            public_hosted_zone=%s \
            app_dns_prefix=%s \
            vm_dns=%s \
            vm_gw=%s \
            vm_netmask=%s \
            vm_network=%s \
            wildcard_zone=%s \
            console_port=%s \
            container_storage=%s \
            deployment_type=%s \
            openshift_vers=%s \
            admin_key=%s \
            user_key=%s \
            rhel_subscription_user=%s \
            rhel_subscription_pass=%s \
            rhel_subscription_server=%s \
            rhel_subscription_pool="%s" \
            openshift_sdn=%s \
            lb_host=%s \
            node_type=%s \
            nfs_host=%s \
            nfs_registry_mountpoint=%s \' %s' % (
                self.tag, self.vcenter_host, self.vcenter_username,
                self.vcenter_password, self.vcenter_template_name,
                self.vcenter_folder, self.vcenter_datastore,
                self.vcenter_cluster, self.vcenter_datacenter,
                self.vcenter_resource_pool, self.public_hosted_zone,
                self.app_dns_prefix, self.vm_dns, self.vm_gw, self.vm_netmask,
                self.vm_network, self.wildcard_zone, self.console_port,
                self.container_storage, self.deployment_type,
                self.openshift_vers, self.admin_key, self.user_key,
                self.rhel_subscription_user, self.rhel_subscription_pass,
                self.rhel_subscription_server, self.rhel_subscription_pool,
                self.openshift_sdn, self.lb_host, self.node_type,
                self.nfs_host, self.nfs_registry_mountpoint, playbook)

            if self.verbose > 0:
                command += " -vvvvv"
                click.echo('We are running: %s' % command)

            status = os.system(command)
            if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
                return os.WEXITSTATUS(status)
            else:
                print "Successful run!"
                if not click.confirm('Update INI?'):
                    sys.exit(0)
                self.update_ini_file()
                if not click.confirm('Delete inventory file?'):
                    sys.exit(0)
                print "Removing the existing %s file" % self.inventory_file
                os.remove(self.inventory_file)
Esempio n. 60
0
def main():

    cols, rows, = get_dims()

    print('Hello Monika!')
    time.sleep(2)
    print('Speech Banana is loading...')
    time.sleep(2)
    os.system("xinput list")
    key_num = input(
        "What is the keyboard's number, xinput list then look for the keyboard id: "
    )
    banana = gen_banana(cols)

    i = 1

    while i != 2:  # creating infinite loop

        try:
            os.system('clear')
            print("\n" * 7)
            print(banana)

            subprocess.run('xinput set-prop 6 "Device Enabled" 1',
                           shell=True,
                           check=True)
            print('\n' * 3)
            text = 'Type your sentence below!'
            print('{}Hello!\n{}Do you want to see how I hear?'.format(
                ' ' * (int(cols / 2 - (6 / 2))),
                ' ' * (int(cols / 2 - (29 / 2)))))
            user_input = click.prompt(u'{}{}\n\n\n{}'.format(
                ' ' * (int(cols / 2 - (len(text) / 2))), text,
                ' ' * (int(cols / 4))).center(int(cols / 4)),
                                      prompt_suffix='')
            if user_input == 'quit this f****r':
                exit(0)
            #print(user_input.center(cols))
            subprocess.run('xinput set-prop 6 "Device Enabled" 0',
                           shell=True,
                           check=True)
            n = 0

            # loop to make time look like a robot
            while n != 5:  #number = seconds
                print(".".center(cols))
                time.sleep(1.2)
                n += 1

            print("\n")
            timestamp = get_time()  #yyyy-mm-dd_mm-ss
            #audio_path = 'data/audio/{}.mp3'.format(timestamp)

            monified = monify(user_input)
            #monified_audio = gtts.gTTS(monified)
            #monified_audio.save(audio_path)
            #p = vlc.MediaPlayer(audio_path)
            #p.play()
            make_sexy(monified, cols, rows)
            '''try:
				encrypted_input = encrypt(user_input)
			except:
				encrypted_input = 'encryption failed'''
            update_data(timestamp, monified, user_input)
            #os.system('clear') #clears screen

        except click.exceptions.Abort:
            continue