Exemplo n.º 1
0
    def allocate(self, size):
        from traceback import extract_stack
        stack = tuple(frm[2] for frm in extract_stack())
        description = self.describe(stack, size)

        histogram = {}
        for bsize, descr in six.itervalues(self.blocks):
            histogram[bsize, descr] = histogram.get((bsize, descr), 0) + 1

        from pytools import common_prefix
        cpfx = common_prefix(descr for bsize, descr in histogram)

        print(
                "\n  Allocation of size %d occurring "
                "(mem: last_free:%d, free: %d, total:%d) (pool: held:%d, active:%d):"
                "\n      at: %s" % (
                    (size, self.last_free) + cuda.mem_get_info()
                    + (self.held_blocks, self.active_blocks,
                    description)),
                file=self.logfile)

        hist_items = sorted(list(six.iteritems(histogram)))
        for (bsize, descr), count in hist_items:
            print("  %s (%d bytes): %dx" % (descr[len(cpfx):], bsize, count),
                    file=self.logfile)

        if self.interactive:
            input("  [Enter]")

        result = DeviceMemoryPool.allocate(self, size)
        self.blocks[result] = size, description
        self.last_free, _ = cuda.mem_get_info()
        return result
Exemplo n.º 2
0
def print_info(machine_name, connections, width, height, ip_file_filename):
    """ Print the current machine info in a human-readable form and wait for
    the user to press enter.

    Parameters
    ----------
    machine_name : str
        The machine the job is running on.
    connections : {(x, y): hostname, ...}
        The connections to the boards.
    width, height : int
        The width and height of the machine in chips.
    ip_file_filename : str
    """
    t = Terminal()

    to_print = OrderedDict()

    to_print["Hostname"] = t.bright(connections[(0, 0)])
    to_print["Width"] = width
    to_print["Height"] = height

    if len(connections) > 1:
        to_print["Num boards"] = len(connections)
        to_print["All hostnames"] = ip_file_filename

    to_print["Running on"] = machine_name

    print(render_definitions(to_print))

    try:
        input(t.dim("<Press enter when done>"))
    except (KeyboardInterrupt, EOFError):
        print("")
Exemplo n.º 3
0
 def _simple_interactive_config(self):
   self._show_config()
   while True:
     print()
     self._simple_interactive_menu(
       's) Show Config', 'u) Set Server URL',
       'd) Set Data Dir', 'm) Main Menu')
     user_input = input('Config> ').strip().lower()
     if user_input == 's':
       self._show_config()
     elif user_input == 'd':
       new_dl_dir = input('  New Directory> ').strip().lower()
       if new_dl_dir in ('', 'x', 'q'):
         print('  Cancelled!')
       elif os.path.isdir(new_dl_dir):
         self._ds.download_dir = new_dl_dir
       else:
         print(('Directory %r not found!  Create it first.' %
              new_dl_dir))
     elif user_input == 'u':
       new_url = input('  New URL> ').strip().lower()
       if new_url in ('', 'x', 'q'):
         print('  Cancelled!')
       else:
         if not new_url.startswith('http://'):
           new_url = 'http://'+new_url
         try: self._ds.url = new_url
         except Exception as e:
           print('Error reading <%r>:\n  %s' % (new_url, e))
     elif user_input == 'm':
       break
def produce(topic, conf):
    """
        Produce User records
    """

    from confluent_kafka.avro import AvroProducer

    producer = AvroProducer(conf, default_value_schema=record_schema)

    print("Producing user records to topic {}. ^c to exit.".format(topic))
    while True:
        # Instantiate new User, populate fields, produce record, execute callbacks.
        record = User()
        try:
            record.name = input("Enter name: ")
            record.favorite_number = int(input("Enter favorite number: "))
            record.favorite_color = input("Enter favorite color: ")

            # The message passed to the delivery callback will already be serialized.
            # To aid in debugging we provide the original object to the delivery callback.
            producer.produce(topic=topic, value=record.to_dict(),
                             callback=lambda err, msg, obj=record: on_delivery(err, msg, obj))
            # Serve on_delivery callbacks from previous asynchronous produce()
            producer.poll(0)
        except KeyboardInterrupt:
            break
        except ValueError:
            print("Invalid input, discarding record...")
            continue

    print("\nFlushing records...")
    producer.flush()
Exemplo n.º 5
0
Arquivo: ssh.py Projeto: BBOOXX/stash
    def connect(self, host, passwd=None, port=22):
        print('Connecting...')
        username, host = host.split('@')

        if passwd is not None:
            return self._connect_with_passwd(host, username, passwd, port)

        else:
            print('Looking for SSH keys...')
            key_filename = self.find_ssh_keys()
            if len(key_filename) > 0:
                try:
                    self.client.connect(host,
                                        username=username,
                                        password=passwd,
                                        port=port,
                                        key_filename=key_filename)
                    return True
                except paramiko.SSHException as e:
                    print('Failed to login with SSH Keys: {}'.format(repr(e)))
                    print('Trying password ...')
                    passwd = input('Enter password:'******'Error: {}'.format(e))
                    return False
            else:
                print('No SSH key found. Trying password ...')
                passwd = input('Enter password:')
                return self._connect_with_passwd(host, username, passwd, port)
def main(project, bucket, zone, instance_name, wait=True):
    compute = googleapiclient.discovery.build('compute', 'v1')

    print('Creating instance.')

    operation = create_instance(compute, project, zone, instance_name, bucket)
    wait_for_operation(compute, project, zone, operation['name'])

    instances = list_instances(compute, project, zone)

    print('Instances in project %s and zone %s:' % (project, zone))
    for instance in instances:
        print(' - ' + instance['name'])

    print("""
Instance created.
It will take a minute or two for the instance to complete work.
Check this URL: http://storage.googleapis.com/{}/output.png
Once the image is uploaded press enter to delete the instance.
""".format(bucket))

    if wait:
        input()

    print('Deleting instance.')

    operation = delete_instance(compute, project, zone, instance_name)
    wait_for_operation(compute, project, zone, operation['name'])
def run(project, zone, instance_name):
    credentials = GoogleCredentials.get_application_default()
    compute = build('compute', 'v1', credentials=credentials)

    print('Creating instance.')

    operation = create_instance(compute, project, zone, instance_name)
    wait_for_operation(compute, project, zone, operation['name'])

    instances = list_instances(compute, project, zone)

    print('Instances in project %s and zone %s:' % (project, zone))
    for instance in instances:
        print(' - ' + instance['name'])

    print("""
Instance created.
It will take a minute or two for the instance to complete work.
Check this URL: http://storage.googleapis.com/%s/output.png
Once the image is uploaded press enter to delete the instance.
""" % project)

    input()

    print('Deleting instance.')

    operation = delete_instance(compute, project, zone, instance_name)
    wait_for_operation(compute, project, zone, operation['name'])
Exemplo n.º 8
0
def invoke_editor(s, filename="edit.txt", descr="the file"):
    from tempfile import mkdtemp
    tempdir = mkdtemp()

    from os.path import join
    full_name = join(tempdir, filename)

    outf = open(full_name, "w")
    outf.write(str(s))
    outf.close()

    import os
    if "EDITOR" in os.environ:
        from subprocess import Popen
        p = Popen([os.environ["EDITOR"], full_name])
        os.waitpid(p.pid, 0)[1]
    else:
        print("(Set the EDITOR environment variable to be "
                "dropped directly into an editor next time.)")
        input("Edit %s at %s now, then hit [Enter]:"
                % (descr, full_name))

    inf = open(full_name, "r")
    result = inf.read()
    inf.close()

    return result
Exemplo n.º 9
0
def _CommandLine(args):
    if 1 < len(args):
        for arg in args[1:]:
            if arg in ('-h', '--help'):
                print('This program converts integers which may be signed ' +
                      'between any two number bases %d and over.\n' +
                      'Inputs as follows:\n' +
                      'inNum = the Input Number\n' +
                      'inBas = the Input Base\n' +
                      'outBas = the Output Base' % (MINIMUM_BASE))
                break
            elif arg in ('-t', '--test'):
                import test
                test.RunTests()
                break
        else:
            print(BasCalc(*args[1:]))
    else:
        print('Base Converter')
        exitVals = ('q', 'quit')
        while True:
            try:
                print('Output Number: ' +
                      BasCalc(input('\nEnter an Input Number: ').strip(),
                              input('Enter an Input Base: ').strip(),
                              input('Enter an Output Base: ').strip()))
            except (BaseConvError, ValueError) as e:
                print('Error: ', e)
            if input('\nEnter any of the following values to exit: %s\n' +
                     'or press return to continue: ' %
                     (str(exitVals))).strip().lower() in exitVals:
                break
Exemplo n.º 10
0
def package_manager(config_opts, chroot, plugins):
    pm = config_opts.get('package_manager', 'yum')
    if pm == 'yum':
        return Yum(config_opts, chroot, plugins)
    elif pm == 'dnf':
        if os.path.isfile(config_opts['dnf_command']):
            return Dnf(config_opts, chroot, plugins)
        # RHEL without DNF
        (distribution, version) = distro.linux_distribution(full_distribution_name=False)[0:2]
        if distribution in ['redhat', 'centos']:
            version = int(version.split('.')[0])
            if version < 8:
                if 'dnf_warning' in config_opts and config_opts['dnf_warning']:
                    print("""WARNING! WARNING! WARNING!
You are building package for distribution which use DNF. However your system
does not support DNF. You can continue with YUM, which will likely succeed,
but the result may be little different.
You can suppress this warning when you put
  config_opts['dnf_warning'] = False
in Mock config.""")
                    input("Press Enter to continue.")
                return Yum(config_opts, chroot, plugins)
        # something else then EL, and no dnf_command exist
        # This will likely mean some error later.
        # Either user is smart or let him shot in his foot.
        return Dnf(config_opts, chroot, plugins)
    else:
        # TODO specific exception type
        raise Exception('Unrecognized package manager')
Exemplo n.º 11
0
def test_stdin_from_generator_expression():
    generator = (x for x in ['one', 'two', 'three'])

    with stdin_from(generator):
        assert input() == 'one'
        assert input() == 'two'
        assert input() == 'three'
Exemplo n.º 12
0
def start(args):
    """Set up the Geofront server URL."""
    for path in load_config_paths(CONFIG_RESOURCE):
        path = os.path.join(path.decode(), SERVER_CONFIG_FILENAME)
        if os.path.isfile(path):
            message = 'Geofront server URL is already configured: ' + path
            if args.force:
                print(message + '; overwriting...', file=sys.stderr)
            else:
                parser.exit(message)
    while True:
        server_url = input('Geofront server URL: ')
        if not server_url.startswith(('https://', 'http://')):
            print(server_url, 'is not a valid url.')
            continue
        elif not server_url.startswith('https://'):
            cont = input('It is not a secure URL. '
                         'https:// is preferred over http://. '
                         'Continue (y/N)? ')
            if cont.strip().lower() != 'y':
                continue
        break
    server_config_filename = os.path.join(
        save_config_path(CONFIG_RESOURCE).decode(),
        SERVER_CONFIG_FILENAME
    )
    with open(server_config_filename, 'w') as f:
        print(server_url, file=f)
    authenticate.call(args)
Exemplo n.º 13
0
    def apply_actions(self, iw, actions):

        action_meta = {'REDO': False}

        if actions.count() > 0:

            if self.dump_actions:
                self.dump_action_dict(iw, actions)

            if self.dry_run:
                print("Dry-run specified, skipping execution of actions")
            else:
                if self.force:
                    print("Running actions non-interactive as you forced.")
                    self.execute_actions(iw, actions)
                    return action_meta
                cont = input("Take recommended actions (y/N/a/R/T/DEBUG)? ")
                if cont in ('a', 'A'):
                    sys.exit(0)
                if cont in ('Y', 'y'):
                    self.execute_actions(iw, actions)
                if cont == 'T':
                    self.template_wizard(iw)
                    action_meta['REDO'] = True
                if cont in ('r', 'R'):
                    action_meta['REDO'] = True
                if cont == 'DEBUG':
                    # put the user into a breakpoint to do live debug
                    action_meta['REDO'] = True
                    import epdb; epdb.st()
        elif self.always_pause:
            print("Skipping, but pause.")
            cont = input("Continue (Y/n/a/R/T/DEBUG)? ")
            if cont in ('a', 'A', 'n', 'N'):
                sys.exit(0)
            if cont == 'T':
                self.template_wizard(iw)
                action_meta['REDO'] = True
            elif cont in ('r', 'R'):
                action_meta['REDO'] = True
            elif cont == 'DEBUG':
                # put the user into a breakpoint to do live debug
                import epdb; epdb.st()
                action_meta['REDO'] = True
        elif self.force_description_fixer:
            # FIXME: self.FIXED_ISSUES not defined since 1cf9674cd38edbd17aff906d72296c99043e5c13
            #        either define self.FIXED_ISSUES, either remove this method
            # FIXME force_description_fixer is not known by DefaultTriager (only
            #       by AnsibleTriage): if not removed, move it to AnsibleTriage
            if iw.html_url not in self.FIXED_ISSUES:
                if self.meta['template_missing_sections']:
                    changed = self.template_wizard(iw)
                    if changed:
                        action_meta['REDO'] = True
                self.FIXED_ISSUES.append(iw.html_url)
        else:
            print("Skipping.")

        # let the upper level code redo this issue
        return action_meta
Exemplo n.º 14
0
 def setup_package_options(self):
     self.package_options = self.DEFAULT_PACKAGE_OPTIONS
     print('Choose between the following strategies : ')
     strategies = list(strategies_class_lookup.keys())
     for istrategy, strategy in enumerate(strategies):
         print(' <{}> : {}'.format(str(istrategy + 1), strategy))
     test = input(' ... ')
     self.package_options['default_strategy'] = {'strategy': strategies[int(test) - 1], 'strategy_options': {}}
     strategy_class = strategies_class_lookup[strategies[int(test) - 1]]
     if len(strategy_class.STRATEGY_OPTIONS) > 0:
         for option, option_dict in strategy_class.STRATEGY_OPTIONS.items():
             while True:
                 print('  => Enter value for option "{}" '
                       '(<ENTER> for default = {})\n'.format(option,
                                                             str(option_dict['default'])))
                 print('     Valid options are :\n')
                 print('       {}'.format(option_dict['type'].allowed_values))
                 test = input('     Your choice : ')
                 if test == '':
                     self.package_options['default_strategy']['strategy_options'][option] = option_dict['type'](strategy_class.STRATEGY_OPTIONS[option]['default'])
                     break
                 try:
                     self.package_options['default_strategy']['strategy_options'][option] = option_dict['type'](test)
                     break
                 except ValueError:
                     print('Wrong input for option {}'.format(option))
Exemplo n.º 15
0
def main():
    pl = NBPlot()
    for ii in range(10):
        pl.plot()
        time.sleep(0.5)
    input('press Enter...')
    pl.plot(finished=True)
Exemplo n.º 16
0
def ask_option(message, options, acceptable_responses=None):
    options_display = '/'.join(options)
    acceptable_responses = acceptable_responses or options
    r = input('{} [{}]'.format(message, options_display))
    while r not in acceptable_responses:
        r = input('Please enter one of {} :'.format(', '.join(options)))
    return r
Exemplo n.º 17
0
def restTest():
    """"""    
    # 创建API对象并初始化
    api = LbankRestApi()
    api.init(API_KEY, SECRET_KEY)
    api.start(1)
    
    # 测试
    #api.addReq('GET', '/currencyPairs.do', {}, api.onData)
    #api.addReq('GET', '/accuracy.do', {}, api.onData)

    #api.addReq('GET', '/ticker.do', {'symbol': 'eth_btc'}, api.onData)
    #api.addReq('GET', '/depth.do', {'symbol': 'eth_btc', 'size': '5'}, api.onData)
    
    #api.addReq('post', '/user_info.do', {}, api.onData)
    
    req = {
        'symbol': 'sc_btc',
        'current_page': '1',
        'page_length': '50'
    }
    api.addReq('POST', '/orders_info_no_deal.do', req, api.onData)
    
    # 阻塞
    input()    
Exemplo n.º 18
0
def main():
    i = insync.client(os.path.expanduser('~/lib/insync.db'))
    i.login()
    i.desktop()

    for prodtype in ('ACCOUNT', 'DEPOSIT', 'CREDIT'):
        for item in i.products(prodtype)['items']:
            if 'onDesktop' not in item or item['onDesktop']:
                continue
            print('------------------------------------')
            print('    title:', item['info']['title'])
            print('  type/id:', item['type'], item['id'])
            if 'amount' in item['info']:
                print('  balance:',
                      item['info']['amount']['amount'],
                      item['info']['amount']['currency'])

    print()

    type_ = input('Enter item TYPE: ')
    id_ = input('Enter item ID: ')

    i.debug = True
    i.add_product_shortcut(type_.strip(), id_.strip())
    i.debug = False
    i.logout()
Exemplo n.º 19
0
def run(argv=None):
    """Run the program

    Usage: des.py [options] <bits>

    It will ask you for further inputs

    Options::
        -h,--help           Show this help
        -v,--verbose        Increase verbosity
        --test              Generate test strings
    """
    import sys
    import docopt
    import textwrap
    from binascii import unhexlify, hexlify
    from multiprocessing import Pool

    argv = sys.argv[1:]
    args = docopt.docopt(textwrap.dedent(run.__doc__), argv)

    nbits = int(args['<bits>'])

    # set up logging
    level = logging.WARN
    if args['--verbose']:
        level = logging.INFO
    logging.basicConfig(level=level)

    if args['--test']:
        from random import randint
        key1 = nth_key(randint(0, 2**nbits))
        key2 = nth_key(randint(0, 2**nbits))
        plain_text = bytes((randint(0, 255) for i in range(8)))
        cipher_text = encrypt(key2, encrypt(key1, plain_text))
        print("key: ({}, {})".format(hexlify(key1).decode('utf-8'),
                                     hexlify(key2).decode('utf-8')))
        print("plain text:  {}".format(hexlify(plain_text).decode('utf-8')))
        print("cipher text: {}".format(hexlify(cipher_text).decode('utf-8')))
        return

    input_more = True
    pairs = []
    while input_more:
        plain_text = unhexlify(
            input("Please input the plain text, hex encoded: "
                  ).strip().encode('utf-8'))
        cipher_text = unhexlify(
            input("Please input the cipher text, hex encoded: "
                  ).strip().encode('utf-8'))
        pairs.append((plain_text, cipher_text))
        if 'y' not in input("Do you want to supply more texts? [y/N]: "):
            input_more = False

    with Pool() as p:
        keys = meet_in_the_middle(nbits, pairs, pool=p)
    if keys:
        print("Found keys: ({}, {})".format(*keys))
    else:
        print("Did not find keys!")
Exemplo n.º 20
0
def confirm(args, evil=[]):
    if not evil and not args.confirm:
        LOG.warning(color(
            'You used the --confirm flag so we are proceeding '
            'without a review/confimation step. Good luck. ', fg='yellow'))
        return True
    if evil:
        for idx, message in enumerate(evil, start=1):
            LOG.warn(color(
                'Found something scary. Problem #%d:\n',
                fg='red', style='bold'), idx)
            sys.stderr.write(
                color(message, fg='black', bg='yellow', style='bold') + '\n\n')
        if args.i_fear_no_evil:
            LOG.warning(color(
                'You used the --i-fear-no-evil flag, so we are '
                'proceeding without review/confirmation even though '
                'you\'re trying to do something scary.  I hope your '
                'insurance is fully paid up...', fg='red'))
        else:
            x = 'I FEAR NO EVIL'
            while input(color(
                    '\nType exactly "%s" to proceed --> ' % x, fg='red')) != x:
                pass
    # yes, we do this even if we pass through the evil check;
    # that's intentional
    if args.confirm:
        while input(color('\nType "c" to confirm --> ', fg='cyan')) != 'c':
            pass
    return True
Exemplo n.º 21
0
 def prompt_yesno(self, msg):
     if self._prompt is not None:
         return self._prompt
     cmd = input('{} (Yes/No)> '.format(msg)).strip().lower()
     while cmd not in ('yes', 'no', 'y', 'n'):
         cmd = input('Please type (Yes/No)> ').strip().lower()
     return cmd in ('yes', 'y')
Exemplo n.º 22
0
    def save_config(config):
        """設定を保存.

        :param config:
        :return:
        """
        config_path = Path(config.config_path)
        if not config_path.exists():
            allow = True
        else:
            allow = _confirm("overwrite %s \nOK?" % str(config_path))
        if allow:
            print("enter your region[us-east-1]:")
            region = moves.input() or "us-east-1"
            with config_path.open("w") as fp:
                print("save to %s" % str(config_path))
                fp.write(DEFAULT_CONFIG.format(region=region))

        key_file = Path(config.config.get('key_file'))
        if key_file.exists():
            allow = _confirm("overwrite %s \nOK?" % str(key_file))
        else:
            allow = True
        if allow:
            print("enter your aws_access_key_id:")
            key_id = moves.input()
            print("enter your aws_secret_access_key:")
            secret_key = moves.input()
            if key_id and secret_key:
                print("save to %s" % key_file)
                with key_file.open("w") as fp:
                    fp.write(u"%s:%s" % (key_id, secret_key))
Exemplo n.º 23
0
    def make_api_key(self):
        api_key_url = "{0}/account/generate_api_key".format(self.host)
        cookies = requests.get(api_key_url, headers={"Referer": self.host}).cookies
        data = {'rattic_tfa_generate_api_key-current_step': 'auth', 'csrfmiddlewaretoken': cookies['csrftoken']}

        sys.stderr.write("username: "******"password: "******"")

        data['auth-username'] = username
        data['auth-password'] = password

        res = requests.post(api_key_url, data=data, cookies=cookies, headers={"Referer": self.host})
        if res.status_code not in (200, 400):
            print(res.content.decode('utf-8'))
            raise BespinError("Failed to generate an api token from rattic")

        if res.status_code == 400:
            data['rattic_tfa_generate_api_key-current_step'] = 'token'
            sys.stderr.write("token: ")
            sys.stderr.flush()
            token = input()
            data['token-otp_token'] = token
            res = requests.post(api_key_url, data=data, cookies=cookies, headers={"Referer": self.host})

        return "ApiKey {0}:{1}".format(username, res.content.decode('utf-8'))
Exemplo n.º 24
0
def main():
    x = mtb.xcard()

    print('---')
    pwd = input('Enter your new password (digits only, 8 chars: ')
    if len(pwd) != 8 or not pwd.isdigit():
        raise Exception('Bad password, try again')

    pan = input('Enter your x-card number: ')
    cvc = input('Enter your cvc2 code: ')
    r = x.register(pan, cvc)

    # otp request
    otp = input('Enter your SMS code: ')
    o = x.confirmotp(otp.strip())

    # password
    x.setpassword(pwd)

    print('Registered')
    print('---')
    print('password:'******'userId:', r['userId'])
    print('-')
    print('CARDREF (not needed?):', r['CARDREF'])
    print('trsaPub (not nneded?):', o['trsaPub'])
Exemplo n.º 25
0
 def private_key_copy(self):
     log.debug(u'Copied private key')
     self.key_handler.copy_decrypted_private_key()
     msg = u'Private key decrypted. Press enter to continue'
     self.display_msg(msg)
     input()
     self()
Exemplo n.º 26
0
def read_text():
    lines = []
    line = input()
    while line:
        lines.append(line)
        line = input()
    return "\n\n".join(lines)
Exemplo n.º 27
0
def get_correct_answer(question, default=None, required=False,
                       answer=None, is_answer_correct=None):
    while 1:
        if default is None:
            msg = u' - No Default Available'
        else:
            msg = (u'\n[DEFAULT] -> {}\nPress Enter To '
                   'Use Default'.format(default))
        prompt = question + msg + '\n--> '
        if answer is None:
            answer = input(prompt)
        if answer == '' and required and default is not None:
            print(u'You have to enter a value\n\n')
            input(u'Press enter to continue')
            print('\n\n')
            answer = None
            continue
        if answer == u'' and default is not None:
            answer = default
        _ans = ask_yes_no(u'You entered {}, is this '
                          'correct?'.format(answer),
                          answer=is_answer_correct)
        if _ans:
            return answer
        else:
            answer = None
Exemplo n.º 28
0
    def pre_work(self):
        # this method will be called before the gathering begins

        localname = self.get_local_name()

        if not self.commons['cmdlineopts'].batch and not self.commons['cmdlineopts'].quiet:
            try:
                self.report_name = input(_("Please enter your first initial and last name [%s]: ") % localname)

                self.ticket_number = input(_("Please enter the case number that you are generating this report for: "))
                self._print()
            except:
                self._print()
                self.report_name = localname

        if len(self.report_name) == 0:
            self.report_name = localname

        if self.commons['cmdlineopts'].customer_name:
            self.report_name = self.commons['cmdlineopts'].customer_name

        if self.commons['cmdlineopts'].ticket_number:
            self.ticket_number = self.commons['cmdlineopts'].ticket_number

        self.report_name = self.sanitize_report_name(self.report_name)
        if self.ticket_number:
            self.ticket_number = self.sanitize_ticket_number(self.ticket_number)

        if (self.report_name == ""):
            self.report_name = "default"

        return
Exemplo n.º 29
0
def aws_sign_in(aws_profile, duration_minutes=DEFAULT_SIGN_IN_DURATION_MINUTES,
                force_new=False):
    """
    Create a temp session through MFA for a given aws profile

    :param aws_profile: The name of an existing aws profile to create a temp session for
    :param duration_minutes: How long to set the session expiration if a new one is created
    :param force_new: If set to True, creates new credentials even if valid ones are found
    :return: The name of temp session profile.
             (Always the passed in profile followed by ':session')
    """
    aws_session_profile = '{}:session'.format(aws_profile)
    if not force_new \
            and _has_valid_session_credentials(aws_session_profile):
        return aws_session_profile

    default_username = get_default_username()
    if default_username.is_guess:
        username = input("Enter username associated with credentials [{}]: ".format(
            default_username)) or default_username
        print_help_message_about_the_commcare_cloud_default_username_env_var(username)
    else:
        username = default_username
    mfa_token = input("Enter your MFA token: ")
    generate_session_profile(aws_profile, username, mfa_token, duration_minutes)

    puts(colored.green(u"✓ Sign in accepted"))
    puts(colored.cyan(
        "You will be able to use AWS from the command line for the next {} minutes."
        .format(duration_minutes)))
    puts(colored.cyan(
        "To use this session outside of commcare-cloud, "
        "prefix your command with AWS_PROFILE={}:session".format(aws_profile)))
    return aws_session_profile
Exemplo n.º 30
0
def setup_manager(yes=False, port=23624, domain=None):
	"Setup bench-manager.local site with the bench_manager app installed on it"
	from six.moves import input
	create_new_site = True
	if 'bench-manager.local' in os.listdir('sites'): 
		ans = input('Site aleady exists. Overwrite existing new site? [Y/n]: ')
		while ans.lower() not in ['y', 'n', '']:
			ans = input('Please type "y" or "n". Site aleady exists. Overwrite existing new site? [Y/n]: ')
		if ans=='n': create_new_site = False
	if create_new_site: exec_cmd("bench new-site --force bench-manager.local")

	if 'bench_manager' in os.listdir('apps'):
		print('App aleady exists. Skipping downloading the app')
	else: 
		exec_cmd("bench get-app bench_manager")

	exec_cmd("bench --site bench-manager.local install-app bench_manager")

	from bench.config.common_site_config import get_config
	bench_path = '.'
	conf = get_config(bench_path)
	if conf.get('restart_supervisor_on_update') or conf.get('restart_systemd_on_update'):
		# implicates a production setup or so I presume
		if not domain:
			print("Please specify the site name on which you want to host bench-manager using the 'domain' flag")
			sys.exit(1)
	
		from bench.utils import get_sites, get_bench_name
		bench_name = get_bench_name(bench_path)

		if domain not in get_sites(bench_path):
			raise Exception("No such site")

		from bench.config.nginx import make_bench_manager_nginx_conf
		make_bench_manager_nginx_conf(bench_path, yes=yes, port=port, domain=domain)
Exemplo n.º 31
0
from functools import wraps
from six.moves import input

import rpmfusion_cert

from rfpkgdb2client import PkgDB, PkgDBException

PKGDB_URL = 'http://127.0.0.1:5000'
AUTH = True

if AUTH:
    try:
        USERNAME = rpmfusion_cert.read_user_cert()
    except:
        USERNAME = input('FAS username: '******'FAS password: ')
    if not PASSWORD:
        AUTH = False

COL_NAME = str(uuid.uuid1())[:30]
PKG_NAME = str(uuid.uuid1())[:30]
VERSION = time.mktime(datetime.datetime.utcnow().timetuple())


def auth_only(function):
    """ Decorator to skip tests if AUTH is set to False """
    @wraps(function)
    def decorated_function(*args, **kwargs):
        """ Decorated function, actually does the work. """
        if AUTH:
Exemplo n.º 32
0
from six.moves import input
import example_helpers
import drms

# Print the doc string of this example.
print(__doc__)

# Export request ID
request_id = ''

# Create DRMS client, use debug=True to see the query URLs.
c = drms.Client(verbose=True)

# Ask for a RequestID, if it is not set yet.
if not request_id:
    request_id = input('Please enter a RequestID: ')
    print()

# Querying the server using the entered RequestID.
print('Looking up export request "%s"...' % request_id)
r = c.export_from_id(request_id)

# Print request URL and number of available files.
print('\nRequest URL: %s' % r.request_url)
print('%d file(s) available for download.\n' % len(r.urls))

# Ask if the files should be downloaded.
do_download = input('Retrieve all files [y/N]? ')
print()

if do_download.lower() in ['y', 'yes']:
Exemplo n.º 33
0
 def login_prompt(self):  #pragma: no cover
     """Prompts user for username and password and calls login()."""
     username = input("Username: ")
     password = getpass.getpass()
     return self.login(username=username, password=password)
Exemplo n.º 34
0
    def handle(self, *args, **options):
        """
        Drop test database for this project.
        """

        if args:
            raise CommandError("reset_db takes no arguments")

        router = options.get('router')
        dbinfo = settings.DATABASES.get(router)
        if dbinfo is None:
            raise CommandError("Unknown database router %s" % router)

        engine = dbinfo.get('ENGINE').split('.')[-1]

        user = password = database_name = ''
        if engine == 'mysql':
            read_default_file = dbinfo.get('OPTIONS',
                                           {}).get('read_default_file')
            if read_default_file:
                config = configparser.ConfigParser()
                config.read(read_default_file)
                user = config.get('client', 'user')
                password = config.get('client', 'password')
                database_name = config.get('client', 'database')

        user = options.get('user') or dbinfo.get('USER') or user
        password = options.get('password') or dbinfo.get(
            'PASSWORD') or password

        try:
            database_name = dbinfo['TEST']['NAME']
        except KeyError:
            database_name = None

        if database_name is None:
            database_name = TEST_DATABASE_PREFIX + (options.get('dbname')
                                                    or dbinfo.get('NAME'))

        if database_name is None or database_name == '':
            raise CommandError(
                "You need to specify DATABASE_NAME in your Django settings file."
            )

        database_host = dbinfo.get('HOST')
        database_port = dbinfo.get('PORT')

        verbosity = int(options.get('verbosity', 1))
        if options.get('interactive'):
            confirm = input("""
You have requested to drop the test database.
This will IRREVERSIBLY DESTROY
ALL data in the database "%s".
Are you sure you want to do this?

Type 'yes' to continue, or 'no' to cancel: """ % (database_name, ))
        else:
            confirm = 'yes'

        if confirm != 'yes':
            print("Reset cancelled.")
            return

        if engine in ('sqlite3', 'spatialite'):
            import os
            try:
                logging.info("Unlinking %s database" % engine)
                if os.path.isfile(database_name):
                    os.unlink(database_name)
            except OSError:
                pass
        elif engine in ('mysql', ):
            import MySQLdb as Database
            kwargs = {
                'user': user,
                'passwd': password,
            }
            if database_host.startswith('/'):
                kwargs['unix_socket'] = database_host
            else:
                kwargs['host'] = database_host

            if database_port:
                kwargs['port'] = int(database_port)

            connection = Database.connect(**kwargs)
            drop_query = 'DROP DATABASE IF EXISTS `%s`' % database_name
            logging.info('Executing: "' + drop_query + '"')
            connection.query(drop_query)
        elif engine in ('postgresql', 'postgresql_psycopg2', 'postgis'):
            if engine == 'postgresql':
                import psycopg as Database  # NOQA
            elif engine in ('postgresql_psycopg2', 'postgis'):
                import psycopg2 as Database  # NOQA

            conn_string = "dbname=template1"
            if user:
                conn_string += " user=%s" % user
            if password:
                conn_string += " password='******'" % password
            if database_host:
                conn_string += " host=%s" % database_host
            if database_port:
                conn_string += " port=%s" % database_port

            connection = Database.connect(conn_string)
            connection.set_isolation_level(0)  # autocommit false
            cursor = connection.cursor()
            drop_query = "DROP DATABASE IF EXISTS \"%s\";" % database_name
            logging.info('Executing: "' + drop_query + '"')

            try:
                cursor.execute(drop_query)
            except Database.ProgrammingError as e:
                logging.exception("Error: %s" % str(e))
        else:
            raise CommandError("Unknown database engine %s" % engine)

        if verbosity >= 2 or options.get('interactive'):
            print("Reset successful.")
Exemplo n.º 35
0
def continue_maybe(prompt):
    result = input("\n%s (y/n): " % prompt)
    if result.lower() != "y":
        fail("Okay, exiting")
Exemplo n.º 36
0
    #  Note: Assumes this is a sorted (newest->oldest) list of un-released
    #  versions
    if branch == "master":
        return versions[0]
    else:
        branch_ver = branch.replace("branch-", "")
        return filter(lambda x: x.name.startswith(branch_ver), versions)[-1]


# branches = get_json("%s/branches" % GITHUB_API_BASE)
# branch_names = filter(lambda x: x.startswith("branch-"),
#                       [x['name'] for x in branches])
# Assumes branch names can be sorted lexicographically
# latest_branch = sorted(branch_names, reverse=True)[0]

pr_num = input("Which pull request would you like to merge? (e.g. 34): ")
pr = get_json("%s/pulls/%s" % (GITHUB_API_BASE, pr_num))

url = pr["url"]
title = pr["title"]
body = pr["body"]
target_ref = pr["base"]["ref"]
user_login = pr["user"]["login"]
base_ref = pr["head"]["ref"]
pr_repo_desc = "%s/%s" % (user_login, base_ref)

if pr["merged"] is True:
    print("Pull request {0} has already been merged, please backport manually".
          format(pr_num))
    sys.exit(0)
Exemplo n.º 37
0
def _Input(prompt):
    """For easy mocking in the unittest."""
    return input(prompt)
Exemplo n.º 38
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.
    """
        adapter = self.context['api_adapter']
        location_get = self.context['location_get']
        location = location_get(args)
        cluster_ref = adapter.ParseCluster(args.name, location)
        cluster_name = args.name
        cluster_node_count = None
        cluster_zone = cluster_ref.zone
        cluster_is_required = self.IsClusterRequired(args)
        try:
            # Attempt to get cluster for better prompts and to validate args.
            # Error is a warning but not fatal. Should only exit with a failure on
            # the actual update API calls below.
            cluster = adapter.GetCluster(cluster_ref)
            cluster_name = cluster.name
            cluster_node_count = cluster.currentNodeCount
            cluster_zone = cluster.zone
        except (exceptions.HttpException,
                apitools_exceptions.HttpForbiddenError, util.Error) as error:
            if cluster_is_required:
                raise
            log.warning(
                ('Problem loading details of cluster to update:\n\n{}\n\n'
                 'You can still attempt updates to the cluster.\n').format(
                     console_attr.SafeText(error)))

        # locations will be None if additional-zones was specified, an empty list
        # if it was specified with no argument, or a populated list if zones were
        # provided. We want to distinguish between the case where it isn't
        # specified (and thus shouldn't be passed on to the API) and the case where
        # it's specified as wanting no additional zones, in which case we must pass
        # the cluster's primary zone to the API.
        # TODO(b/29578401): Remove the hasattr once the flag is GA.
        locations = None
        if hasattr(args,
                   'additional_zones') and args.additional_zones is not None:
            locations = sorted([cluster_ref.zone] + args.additional_zones)
        if hasattr(args, 'node_locations') and args.node_locations is not None:
            locations = sorted(args.node_locations)

        flags.LogBasicAuthDeprecationWarning(args)
        if args.IsSpecified('username') or args.IsSpecified(
                'enable_basic_auth'):
            flags.MungeBasicAuthFlags(args)
            options = api_adapter.SetMasterAuthOptions(
                action=api_adapter.SetMasterAuthOptions.SET_USERNAME,
                username=args.username,
                password=args.password)

            try:
                op_ref = adapter.SetMasterAuth(cluster_ref, options)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif (args.generate_password or args.set_password
              or args.IsSpecified('password')):
            if args.generate_password:
                password = ''
                options = api_adapter.SetMasterAuthOptions(
                    action=api_adapter.SetMasterAuthOptions.GENERATE_PASSWORD,
                    password=password)
            else:
                password = args.password
                if not args.IsSpecified('password'):
                    password = input('Please enter the new password:'******'Enabling/Disabling Network Policy causes a rolling '
                'update of all cluster nodes, similar to performing a cluster '
                'upgrade.  This operation is long-running and will block other '
                'operations on the cluster (including delete) until it has run '
                'to completion.',
                cancel_on_no=True)
            options = api_adapter.SetNetworkPolicyOptions(
                enabled=args.enable_network_policy)
            try:
                op_ref = adapter.SetNetworkPolicy(cluster_ref, options)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif args.start_ip_rotation or args.start_credential_rotation:
            if args.start_ip_rotation:
                msg_tmpl = """This will start an IP Rotation on cluster [{name}]. The \
master will be updated to serve on a new IP address in addition to the current \
IP address. Kubernetes Engine will then recreate all nodes ({num_nodes} nodes) \
to point to the new IP address. This operation is long-running and will block \
other operations on the cluster (including delete) until it has run to \
completion."""
                rotate_credentials = False
            elif args.start_credential_rotation:
                msg_tmpl = """This will start an IP and Credentials Rotation on cluster\
 [{name}]. The master will be updated to serve on a new IP address in addition \
to the current IP address, and cluster credentials will be rotated. Kubernetes \
Engine will then recreate all nodes ({num_nodes} nodes) to point to the new IP \
address. This operation is long-running and will block other operations on the \
cluster (including delete) until it has run to completion."""
                rotate_credentials = True
            console_io.PromptContinue(message=msg_tmpl.format(
                name=cluster_name,
                num_nodes=cluster_node_count if cluster_node_count else '?'),
                                      cancel_on_no=True)
            try:
                op_ref = adapter.StartIpRotation(
                    cluster_ref, rotate_credentials=rotate_credentials)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif args.complete_ip_rotation or args.complete_credential_rotation:
            if args.complete_ip_rotation:
                msg_tmpl = """This will complete the in-progress IP Rotation on \
cluster [{name}]. The master will be updated to stop serving on the old IP \
address and only serve on the new IP address. Make sure all API clients have \
been updated to communicate with the new IP address (e.g. by running `gcloud \
container clusters get-credentials --project {project} --zone {zone} {name}`). \
This operation is long-running and will block other operations on the cluster \
(including delete) until it has run to completion."""
            elif args.complete_credential_rotation:
                msg_tmpl = """This will complete the in-progress Credential Rotation on\
 cluster [{name}]. The master will be updated to stop serving on the old IP \
address and only serve on the new IP address. Old cluster credentials will be \
invalidated. Make sure all API clients have been updated to communicate with \
the new IP address (e.g. by running `gcloud container clusters get-credentials \
--project {project} --zone {zone} {name}`). This operation is long-running and \
will block other operations on the cluster (including delete) until it has run \
to completion."""
            console_io.PromptContinue(message=msg_tmpl.format(
                name=cluster_name,
                project=cluster_ref.projectId,
                zone=cluster_zone),
                                      cancel_on_no=True)
            try:
                op_ref = adapter.CompleteIpRotation(cluster_ref)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif args.update_labels is not None:
            try:
                op_ref = adapter.UpdateLabels(cluster_ref, args.update_labels)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif args.remove_labels is not None:
            try:
                op_ref = adapter.RemoveLabels(cluster_ref, args.remove_labels)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif args.logging_service is not None and args.monitoring_service is None:
            try:
                op_ref = adapter.SetLoggingService(cluster_ref,
                                                   args.logging_service)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif args.maintenance_window is not None:
            try:
                op_ref = adapter.SetDailyMaintenanceWindow(
                    cluster_ref, cluster.maintenancePolicy,
                    args.maintenance_window)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif getattr(args, 'maintenance_window_start', None) is not None:
            try:
                op_ref = adapter.SetRecurringMaintenanceWindow(
                    cluster_ref, cluster.maintenancePolicy,
                    args.maintenance_window_start, args.maintenance_window_end,
                    args.maintenance_window_recurrence)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif getattr(args, 'clear_maintenance_window', None):
            try:
                op_ref = adapter.RemoveMaintenanceWindow(
                    cluster_ref, cluster.maintenancePolicy)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif getattr(args, 'add_maintenance_exclusion_end', None) is not None:
            try:
                op_ref = adapter.AddMaintenanceExclusion(
                    cluster_ref, cluster.maintenancePolicy,
                    args.add_maintenance_exclusion_name,
                    args.add_maintenance_exclusion_start,
                    args.add_maintenance_exclusion_end)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif getattr(args, 'remove_maintenance_exclusion', None) is not None:
            try:
                op_ref = adapter.RemoveMaintenanceExclusion(
                    cluster_ref, cluster.maintenancePolicy,
                    args.remove_maintenance_exclusion)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif getattr(args, 'add_cross_connect_subnetworks', None) is not None:
            try:
                op_ref = adapter.ModifyCrossConnectSubnetworks(
                    cluster_ref,
                    cluster.privateClusterConfig.crossConnectConfig,
                    add_subnetworks=args.add_cross_connect_subnetworks)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif getattr(args, 'remove_cross_connect_subnetworks',
                     None) is not None:
            try:
                op_ref = adapter.ModifyCrossConnectSubnetworks(
                    cluster_ref,
                    cluster.privateClusterConfig.crossConnectConfig,
                    remove_subnetworks=args.remove_cross_connect_subnetworks)

            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        elif getattr(args, 'clear_cross_connect_subnetworks',
                     None) is not None:
            try:
                op_ref = adapter.ModifyCrossConnectSubnetworks(
                    cluster_ref,
                    cluster.privateClusterConfig.crossConnectConfig,
                    clear_all_subnetworks=True)
            except apitools_exceptions.HttpError as error:
                raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
        else:
            if args.enable_legacy_authorization is not None:
                op_ref = adapter.SetLegacyAuthorization(
                    cluster_ref, args.enable_legacy_authorization)
            else:
                options = self.ParseUpdateOptions(args, locations)
                op_ref = adapter.UpdateCluster(cluster_ref, options)

        if not args.async_:
            adapter.WaitForOperation(op_ref,
                                     'Updating {0}'.format(
                                         cluster_ref.clusterId),
                                     timeout_s=args.timeout)

            log.UpdatedResource(cluster_ref)
            cluster_url = util.GenerateClusterUrl(cluster_ref)
            log.status.Print(
                'To inspect the contents of your cluster, go to: ' +
                cluster_url)

            if (args.start_ip_rotation or args.complete_ip_rotation
                    or args.start_credential_rotation
                    or args.complete_credential_rotation):
                cluster = adapter.GetCluster(cluster_ref)
                try:
                    util.ClusterConfig.Persist(cluster, cluster_ref.projectId)
                except kconfig.MissingEnvVarError as error:
                    log.warning(error)
Exemplo n.º 39
0
 def stdin_listener():
     while True:
         selection = input("Press Q to quit\n")
         if selection == "Q" or selection == "q":
             print("Quitting...")
             break
Exemplo n.º 40
0
def PromptForAuthCode(message, authorize_url):
    log.err.Print('{message}\n\n    {url}\n\n'.format(
        message=message,
        url=authorize_url,
    ))
    return input('Enter verification code: ').strip()
Exemplo n.º 41
0
def git_push(args):
    parser = argparse.ArgumentParser(
        prog='git push',
        usage=
        'git push [http(s)://<remote repo> or remote] [-u username[:password]]',
        description="Push to a remote repository")
    parser.add_argument('url', type=str, nargs='?', help='URL to push to')
    parser.add_argument('-u',
                        metavar='username[:password]',
                        type=str,
                        required=False,
                        help='username[:password]')
    result = parser.parse_args(args)

    user, sep, pw = result.u.partition(':') if result.u else (None, None, None)

    repo = _get_repo()

    origin = 'origin'
    if not result.url:
        result.url = repo.remotes.get('origin', '')
    if result.url in repo.remotes:
        origin = result.url
        result.url = repo.remotes.get(origin)

    branch_name = os.path.join(
        'refs', 'heads',
        repo.active_branch)  #'refs/heads/%s' % repo.active_branch

    print("Attempting to push to: {0}, branch: {1}".format(
        result.url, branch_name))

    netloc = urlparse(result.url).netloc

    keychainservice = 'stash.git.{0}'.format(netloc)

    if sep and not user:
        # -u : clears keychain for this server
        for service in keychain.get_services():
            if service[0] == keychainservice:
                keychain.delete_password(*service)

    #Attempt to retrieve user
    if not user and SAVE_PASSWORDS and result.url.startswith('http'):
        try:
            user = dict(keychain.get_services())[keychainservice]
        except KeyError:
            user = input('Enter username: '******'Enter password: '******'Enter credentials for {0}'.format(netloc))

    outstream = StringIO()
    if user:
        if not pw and SAVE_PASSWORDS:
            pw = keychain.get_password(keychainservice, user)

        #Check again, did we retrieve a password?
        if not pw:
            user, pw = console.login_alert(
                'Enter credentials for {0}'.format(netloc), login=user)
        host_with_auth = '{}:{}@{}'.format(user, pw, netloc)
        url = urlunparse(urlparse(result.url)._replace(netloc=host_with_auth))
        porcelain.push(repo.repo.path, url, branch_name, errstream=outstream)
        keychain.set_password(keychainservice, user, pw)

    else:
        porcelain.push(repo.repo.path,
                       result.url,
                       branch_name,
                       errstream=outstream)

    for line in outstream.getvalue().split('\n'):
        print((line.replace(pw, '*******') if pw else line))

    print('success!')
Exemplo n.º 42
0
 def __obtain_input(self, text):
     if sys.version_info >= (3, 0):
         return input(text)
     return raw_input(text)  # noqa
Exemplo n.º 43
0
         from dulwich.client import default_user_agent_string
         from dulwich import porcelain
         from dulwich.index import index_entry_from_stat
         if not dulwich.__version__ == REQUIRED_DULWICH_VERSION:
             print(
                 'Could not find correct version. Will download proper fork now'
             )
             download_dulwich = True
         else:
             print('Correct version loaded.')
 except ImportError as e:
     print('dulwich was not found.  Will attempt to download. ')
     download_dulwich = True
 try:
     if download_dulwich:
         if not input(
                 'Need to download dulwich.  OK to download [y/n]?') == 'y':
             raise ImportError()
         _stash('wget {} -o $TMPDIR/dulwich.zip'.format(DULWICH_URL))
         _stash('unzip $TMPDIR/dulwich.zip -d $TMPDIR/dulwich')
         _stash('rm -r $STASH_ROOT/lib/dulwich.old')
         _stash('mv $STASH_ROOT/lib/dulwich $STASH_ROOT/lib/dulwich.old')
         _stash('mv $TMPDIR/dulwich/dulwich $STASH_ROOT/lib/')
         _stash('rm  $TMPDIR/dulwich.zip')
         _stash('rm -r $TMPDIR/dulwich')
         _stash('rm -r $STASH_ROOT/lib/dulwich.old')
         try:
             # dulwich might have already been in site-packages for instance.
             # So, some acrobatic might be needed to unload the module
             if 'dulwich' in sys.modules:
                 for m in [
                         m for m in sys.modules if m.startswith('dulwich')
Exemplo n.º 44
0
def salt(context, force=False):
    # Ensure Salt dependencies are installed
    install_salt_dependencies(context, force)
    # Ensure Salt is installed in the virtualenv
    # It's not installed globally because it's a large, non-required dependency,
    # and the installation fails on Windows
    print("Checking Salt installation...", end='')
    reqs_path = os.path.join(context.topdir, 'python', 'requirements-salt.txt')
    process = subprocess.Popen(["pip", "install", "-q", "-I", "-r", reqs_path],
                               stdout=PIPE,
                               stderr=PIPE)
    process.wait()
    if process.returncode:
        out, err = process.communicate()
        print('failed to install Salt via pip:')
        print('Output: {}\nError: {}'.format(out, err))
        return 1
    print("done")

    salt_root = os.path.join(context.sharedir, 'salt')
    config_dir = os.path.join(salt_root, 'etc', 'salt')
    pillar_dir = os.path.join(config_dir, 'pillars')

    # In order to allow `mach bootstrap` to work from any CWD,
    # the `root_dir` must be an absolute path.
    # We place it under `context.sharedir` because
    # Salt caches data (e.g. gitfs files) in its `var` subdirectory.
    # Hence, dynamically generate the config with an appropriate `root_dir`
    # and serialize it as JSON (which is valid YAML).
    config = {
        'hash_type': 'sha384',
        'master': 'localhost',
        'root_dir': salt_root,
        'state_output': 'changes',
        'state_tabular': True,
    }
    if 'SERVO_SALTFS_ROOT' in os.environ:
        config.update({
            'fileserver_backend': ['roots'],
            'file_roots': {
                'base': [os.path.abspath(os.environ['SERVO_SALTFS_ROOT'])],
            },
        })
    else:
        config.update({
            'fileserver_backend': ['git'],
            'gitfs_env_whitelist':
            'base',
            'gitfs_provider':
            'gitpython',
            'gitfs_remotes': [
                'https://github.com/servo/saltfs.git',
            ],
        })

    if not os.path.exists(config_dir):
        os.makedirs(config_dir, mode=0o700)
    with open(os.path.join(config_dir, 'minion'), 'w') as config_file:
        config_file.write(json.dumps(config) + '\n')

    # Similarly, the pillar data is created dynamically
    # and temporarily serialized to disk.
    # This dynamism is not yet used, but will be in the future
    # to enable Android bootstrapping by using
    # context.sharedir as a location for Android packages.
    pillar = {
        'top.sls': {
            'base': {
                '*': ['bootstrap'],
            },
        },
        'bootstrap.sls': {
            'fully_managed': False,
        },
    }
    if os.path.exists(pillar_dir):
        shutil.rmtree(pillar_dir)
    os.makedirs(pillar_dir, mode=0o700)
    for filename in pillar:
        with open(os.path.join(pillar_dir, filename), 'w') as pillar_file:
            pillar_file.write(json.dumps(pillar[filename]) + '\n')

    cmd = [
        # sudo escapes from the venv, need to use full path
        find_executable('salt-call'),
        '--local',
        '--config-dir={}'.format(config_dir),
        '--pillar-root={}'.format(pillar_dir),
        'state.apply',
        'servo-build-dependencies',
    ]

    if not force:
        print('Running bootstrap in dry-run mode to show changes')
        # Because `test=True` mode runs each state individually without
        # considering how required/previous states affect the system,
        # it will often report states with requisites as failing due
        # to the requisites not actually being run,
        # even though these are spurious and will succeed during
        # the actual highstate.
        # Hence `--retcode-passthrough` is not helpful in dry-run mode,
        # so only detect failures of the actual salt-call binary itself.
        retcode = run_as_root(cmd + ['test=True'])
        if retcode != 0:
            print('Something went wrong while bootstrapping')
            return retcode

        proceed = input(
            'Proposed changes are above, proceed with bootstrap? [y/N]: ')
        if proceed.lower() not in ['y', 'yes']:
            return 0

        print('')

    print('Running Salt bootstrap')
    retcode = run_as_root(cmd + ['--retcode-passthrough'])
    if retcode == 0:
        print('Salt bootstrapping complete')
    else:
        print('Salt bootstrapping encountered errors')
    return retcode
Exemplo n.º 45
0
def run_flow(flow, storage, flags=None, http=None):
    """Core code for a command-line application.

    The ``run()`` function is called from your application and runs
    through all the steps to obtain credentials. It takes a ``Flow``
    argument and attempts to open an authorization server page in the
    user's default web browser. The server asks the user to grant your
    application access to the user's data. If the user grants access,
    the ``run()`` function returns new credentials. The new credentials
    are also stored in the ``storage`` argument, which updates the file
    associated with the ``Storage`` object.

    It presumes it is run from a command-line application and supports the
    following flags:

        ``--auth_host_name`` (string, default: ``localhost``)
           Host name to use when running a local web server to handle
           redirects during OAuth authorization.

        ``--auth_host_port`` (integer, default: ``[8080, 8090]``)
           Port to use when running a local web server to handle redirects
           during OAuth authorization. Repeat this option to specify a list
           of values.

        ``--[no]auth_local_webserver`` (boolean, default: ``True``)
           Run a local web server to handle redirects during OAuth
           authorization.

    The tools module defines an ``ArgumentParser`` the already contains the
    flag definitions that ``run()`` requires. You can pass that
    ``ArgumentParser`` to your ``ArgumentParser`` constructor::

        parser = argparse.ArgumentParser(
            description=__doc__,
            formatter_class=argparse.RawDescriptionHelpFormatter,
            parents=[tools.argparser])
        flags = parser.parse_args(argv)

    Args:
        flow: Flow, an OAuth 2.0 Flow to step through.
        storage: Storage, a ``Storage`` to store the credential in.
        flags: ``argparse.Namespace``, (Optional) The command-line flags. This
               is the object returned from calling ``parse_args()`` on
               ``argparse.ArgumentParser`` as described above. Defaults
               to ``argparser.parse_args()``.
        http: An instance of ``httplib2.Http.request`` or something that
              acts like it.

    Returns:
        Credentials, the obtained credential.
    """
    if flags is None:
        flags = argparser.parse_args()
    logging.getLogger().setLevel(getattr(logging, flags.logging_level))
    if not flags.noauth_local_webserver:
        success = False
        port_number = 0
        for port in flags.auth_host_port:
            port_number = port
            try:
                httpd = ClientRedirectServer((flags.auth_host_name, port),
                                             ClientRedirectHandler)
            except socket.error:
                pass
            else:
                success = True
                break
        flags.noauth_local_webserver = not success
        if not success:
            print(_FAILED_START_MESSAGE)

    if not flags.noauth_local_webserver:
        oauth_callback = 'http://{host}:{port}/'.format(
            host=flags.auth_host_name, port=port_number)
    else:
        oauth_callback = client.OOB_CALLBACK_URN
    flow.redirect_uri = oauth_callback
    authorize_url = flow.step1_get_authorize_url()

    if not flags.noauth_local_webserver:
        import webbrowser
        webbrowser.open(authorize_url, new=1, autoraise=True)
        print(_BROWSER_OPENED_MESSAGE.format(address=authorize_url))
    else:
        print(_GO_TO_LINK_MESSAGE.format(address=authorize_url))

    code = None
    if not flags.noauth_local_webserver:
        httpd.handle_request()
        if 'error' in httpd.query_params:
            sys.exit('Authentication request was rejected.')
        if 'code' in httpd.query_params:
            code = httpd.query_params['code']
        else:
            print('Failed to find "code" in the query parameters '
                  'of the redirect.')
            sys.exit('Try running with --noauth_local_webserver.')
    else:
        code = input('Enter verification code: ').strip()

    try:
        credential = flow.step2_exchange(code, http=http)
    except client.FlowExchangeError as e:
        sys.exit('Authentication has failed: {0}'.format(e))

    storage.put(credential)
    credential.set_store(storage)
    print('Authentication successful.')

    return credential
Exemplo n.º 46
0
    def remove():
        """
swift-ring-builder <builder_file> remove <search-value> [search-value ...]

or

swift-ring-builder <builder_file> search
    --region <region> --zone <zone> --ip <ip or hostname> --port <port>
    --replication-ip <r_ip or r_hostname> --replication-port <r_port>
    --device <device_name> --meta <meta> --weight <weight>

    Where <r_ip>, <r_hostname> and <r_port> are replication ip, hostname
    and port.
    Any of the options are optional in both cases.

    Removes the device(s) from the ring. This should normally just be used for
    a device that has failed. For a device you wish to decommission, it's best
    to set its weight to 0, wait for it to drain all its data, then use this
    remove command. This will not take effect until after running 'rebalance'.
    This is so you can make multiple device changes and rebalance them all just
    once.
        """
        if len(argv) < 4:
            print(Commands.remove.__doc__.strip())
            print()
            print(parse_search_value.__doc__.strip())
            exit(EXIT_ERROR)

        # 1、参数解析,返回设备列表
        devs = _parse_remove_values(argv[3:])

        if not devs:
            print('Search value matched 0 devices.\n'
                  'The on-disk ring builder is unchanged.')
            exit(EXIT_ERROR)

        if len(devs) > 1:
            print('Matched more than one device:')
            for dev in devs:
                print('    %s' % format_device(dev))
            if input('Are you sure you want to remove these %s '
                     'devices? (y/N) ' % len(devs)) != 'y':
                print('Aborting device removals')
                exit(EXIT_ERROR)

        # 2、遍历待移除设备列表,从ring环中移除dev_id对应的设备,实际是添加到_remove_devs列表中
        for dev in devs:
            try:
                # 从ring环中移除dev_id对应的设备,实际是添加到_remove_devs列表中
                builder.remove_dev(dev['id'])
            except exceptions.RingBuilderError as e:
                print('-' * 79)
                print('An error occurred while removing device with id %d\n'
                      'This usually means that you attempted to remove\n'
                      'the last device in a ring. If this is the case,\n'
                      'consider creating a new ring instead.\n'
                      'The on-disk ring builder is unchanged.\n'
                      'Original exception message: %s' % (dev['id'], e))
                print('-' * 79)
                exit(EXIT_ERROR)

            print('%s marked for removal and will '
                  'be removed next rebalance.' % format_device(dev))
        # 3、保存到.builder文件中
        builder.save(builder_file)
        exit(EXIT_SUCCESS)
Exemplo n.º 47
0
def dictionary(jarvis, s):
    """
    Get meaning, synonym and antonym of any word
    """
    if len(s) == 0:
        jarvis.say('\nEnter word')
        word = input()
    else:
        word = s

    syns = wordnet.synsets(word)
    if len(syns) == 0:
        jarvis.say("Don't recognise that word")
        return

    synonyms = set()
    antonyms = set()

    count = 0
    for meaning in syns:
        count = count + 1
        jarvis.say("{:>3}. {}".format(count, meaning.definition()))

        for synonym in meaning.lemmas():
            if synonym.name() != word:
                synonyms.add(synonym.name())
            for antonym in synonym.antonyms():
                antonyms.add(antonym.name())

    jarvis.say('\nSynonyms:\n' + ", ".join(synonyms))
    jarvis.say('\nAntonyms:\n' + ", ".join(antonyms))

    # detail loop
    def input_detail_id():
        jarvis.say("")
        synlen = len(syns)
        detail_id = input("Details of meaning (1-{}): ? ".format(synlen))
        if detail_id == '':
            return None

        try:
            detail_id = int(detail_id)
        except ValueError:
            return input_detail_id()

        if detail_id <= 0 or detail_id > synlen:
            jarvis.say("Choose Value between 1 and {}".format(synlen))
            return input_detail_id()

        return detail_id

    detail_id = input_detail_id()
    while detail_id is not None:
        meaning = syns[detail_id - 1]

        synonyms = [synonym for synonym in meaning.lemma_names() if synonym != word]
        examples = meaning.examples()

        antonyms = set()
        for synonym in meaning.lemmas():
            for antonym in synonym.antonyms():
                antonyms.add(antonym.name())

        jarvis.say('')
        jarvis.say('== {}. =='.format(detail_id))
        jarvis.say("Meaning  : {}".format(meaning.definition()))
        if len(synonyms) > 0:
            jarvis.say("Synonyms : {}".format(", ".join(synonyms)))
        if len(antonyms) > 0:
            jarvis.say("Antonyms : {}".format(", ".join(antonyms)))
        if len(examples) > 0:
            if len(examples) == 1:
                jarvis.say("Examples : {}".format(examples[0]))
            else:
                jarvis.say("Examples :\n-{}".format("\n- ".join(examples)))

        detail_id = input_detail_id()
 def _init_vars(self, conf_file):
     """ Declare constants """
     _SECTION = "failover_failback"
     _TARGET = "dr_target_host"
     _SOURCE = "dr_source_map"
     _VAULT = "vault"
     _VAR_FILE = "var_file"
     _ANSIBLE_PLAY = 'ansible_play'
     setups = ['primary', 'secondary']
     """ Declare varialbles """
     target_host, source_map, vault, var_file, ansible_play = \
         '', '', '', '', ''
     settings = SafeConfigParser()
     settings.read(conf_file)
     if _SECTION not in settings.sections():
         settings.add_section(_SECTION)
     if not settings.has_option(_SECTION, _TARGET):
         settings.set(_SECTION, _TARGET, '')
     if not settings.has_option(_SECTION, _SOURCE):
         settings.set(_SECTION, _SOURCE, '')
     if not settings.has_option(_SECTION, _VAULT):
         settings.set(_SECTION, _VAULT, '')
     if not settings.has_option(_SECTION, _VAR_FILE):
         settings.set(_SECTION, _VAR_FILE, '')
     if not settings.has_option(_SECTION, _ANSIBLE_PLAY):
         settings.set(_SECTION, _ANSIBLE_PLAY, '')
     target_host = settings.get(_SECTION,
                                _TARGET,
                                vars=DefaultOption(settings,
                                                   _SECTION,
                                                   target_host=None))
     source_map = settings.get(_SECTION,
                               _SOURCE,
                               vars=DefaultOption(settings,
                                                  _SECTION,
                                                  source_map=None))
     vault = settings.get(_SECTION,
                          _VAULT,
                          vars=DefaultOption(settings, _SECTION,
                                             vault=None))
     var_file = settings.get(_SECTION,
                             _VAR_FILE,
                             vars=DefaultOption(settings,
                                                _SECTION,
                                                var_file=None))
     ansible_play = settings.get(_SECTION,
                                 _ANSIBLE_PLAY,
                                 vars=DefaultOption(settings,
                                                    _SECTION,
                                                    ansible_play=None))
     while target_host not in setups:
         target_host = input(INPUT + PREFIX +
                             "target host was not defined. "
                             "Please provide the target host "
                             "(primary or secondary): " + END)
     while source_map not in setups:
         source_map = input(INPUT + PREFIX +
                            "source mapping was not defined. "
                            "Please provide the source mapping "
                            "(primary or secondary): " + END)
     while not os.path.isfile(var_file):
         var_file = input("%s%svar file mapping '%s' does not exist. "
                          "Please provide a valid mapping var file: %s" %
                          (INPUT, PREFIX, var_file, END))
     while not os.path.isfile(vault):
         vault = input("%s%spassword file '%s' does not exist. "
                       "Please provide a valid password file:%s " %
                       (INPUT, PREFIX, vault, END))
     while (not ansible_play) or (not os.path.isfile(ansible_play)):
         ansible_play = input(
             "%s%sansible play '%s' "
             "is not initialized. "
             "Please provide the ansible play file "
             "to generate the mapping var file "
             "with ('%s'):%s " %
             (INPUT, PREFIX, str(ansible_play), PLAY_DEF, END) or PLAY_DEF)
     return target_host, source_map, var_file, vault, ansible_play
conn_str = os.getenv("IOTHUB_DEVICE_CONNECTION_STRING")
device_client = IoTHubDeviceClient.create_from_connection_string(conn_str)

# connect the client.
device_client.connect()


# define behavior for receiving a C2D message
def twin_patch_listener(device_client):
    while True:
        patch = device_client.receive_twin_desired_properties_patch()  # blocking call
        print("the data in the desired properties patch was: {}".format(patch))


# Run a listener thread in the background
listen_thread = threading.Thread(target=twin_patch_listener, args=(device_client,))
listen_thread.daemon = True
listen_thread.start()


# Wait for user to indicate they are done listening for messages
while True:
    selection = input("Press Q to quit\n")
    if selection == "Q" or selection == "q":
        print("Quitting...")
        break


# finally, disconnect
device_client.disconnect()
Exemplo n.º 50
0
def _interactive_input_fn(hparams, decode_hp):
  """Generator that reads from the terminal and yields "interactive inputs".

  Due to temporary limitations in tf.learn, if we don't want to reload the
  whole graph, then we are stuck encoding all of the input as one fixed-size
  numpy array.

  We yield int32 arrays with shape [const_array_size].  The format is:
  [num_samples, decode_length, len(input ids), <input ids>, <padding>]

  Args:
    hparams: model hparams
    decode_hp: decode hparams
  Yields:
    numpy arrays

  Raises:
    Exception: when `input_type` is invalid.
  """
  num_samples = decode_hp.num_samples if decode_hp.num_samples > 0 else 1
  decode_length = decode_hp.extra_length
  input_type = "text"
  p_hparams = hparams.problem_hparams
  has_input = "inputs" in p_hparams.input_modality
  vocabulary = p_hparams.vocabulary["inputs" if has_input else "targets"]
  # This should be longer than the longest input.
  const_array_size = 10000
  # Import readline if available for command line editing and recall.
  try:
    import readline  # pylint: disable=g-import-not-at-top,unused-variable
  except ImportError:
    pass
  while True:
    prompt = ("INTERACTIVE MODE  num_samples=%d  decode_length=%d  \n"
              "  it=<input_type>     ('text' or 'image' or 'label', default: "
              "text)\n"
              "  ns=<num_samples>    (changes number of samples, default: 1)\n"
              "  dl=<decode_length>  (changes decode length, default: 100)\n"
              "  <%s>                (decode)\n"
              "  q                   (quit)\n"
              ">" % (num_samples, decode_length, "source_string"
                     if has_input else "target_prefix"))
    input_string = input(prompt)
    if input_string == "q":
      return
    elif input_string[:3] == "ns=":
      num_samples = int(input_string[3:])
    elif input_string[:3] == "dl=":
      decode_length = int(input_string[3:])
    elif input_string[:3] == "it=":
      input_type = input_string[3:]
    else:
      if input_type == "text":
        input_ids = vocabulary.encode(input_string)
        if has_input:
          input_ids.append(text_encoder.EOS_ID)
        x = [num_samples, decode_length, len(input_ids)] + input_ids
        assert len(x) < const_array_size
        x += [0] * (const_array_size - len(x))
        features = {
            "inputs": np.array(x).astype(np.int32),
        }
      elif input_type == "image":
        input_path = input_string
        img = vocabulary.encode(input_path)
        features = {
            "inputs": img.astype(np.int32),
        }
      elif input_type == "label":
        input_ids = [int(input_string)]
        x = [num_samples, decode_length, len(input_ids)] + input_ids
        features = {
            "inputs": np.array(x).astype(np.int32),
        }
      else:
        raise Exception("Unsupported input type.")
      for k, v in six.iteritems(
          problem_lib.problem_hparams_to_features(p_hparams)):
        features[k] = np.array(v).astype(np.int32)
      yield features
Exemplo n.º 51
0
# -*- coding: utf-8 -*-
'''
@authors: Manuel Boissenin, Yann Cointepas, Denis Riviere

@organization: NAO, UNATI, Neurospin, Gif-sur-Yvette, France

'''

from __future__ import absolute_import
from __future__ import print_function
from . import sro
from six.moves import input

object_uri = input("Please enter object URI: ")

test_proxy = sro.Proxy(object_uri)

result = test_proxy.add(40, 2)
print(type(result))
print(result)

print(test_proxy.print_variable())

try:
    result = test_proxy.add(40, 'deux')
except Exception as e:
    print("Exception as expected: " + str(e))
Exemplo n.º 52
0
"""
    Developed by Sameera K. Abeykoon (December 5th 2018)
    To extract slice timing infomation from .json files
    and save it a txt file

"""

import os, io
import numpy as np
import json
import sys
from six.moves import input

if len(sys.argv)<2:
    data_path = input("Enter the json files path ? ")
else:
    data_path = sys.argv[1]


json_files = [f for f in os.listdir(data_path) if (f.endswith('.json') and 'fMRI' in f)]

for j_file in json_files:
    # get a new file to save the slice timing information
    file_name = os.path.splitext(os.path.basename(j_file))[0]
    new_file = data_path + "/" + file_name + ".txt"
    print (j_file)
    # open the .json file
    j_filepath = data_path + "/" + j_file
    input_file = open(j_filepath, 'r')
    json_decode = json.load(input_file)
Exemplo n.º 53
0
 def prompt(self, prompt):
     return input(prompt)
Exemplo n.º 54
0
def purge_computer_with_DC_objects(ucr, binddn, bindpw, computername):
    lp = LoadParm()
    lp.load('/etc/samba/smb.conf')

    samdb = SamDB(os.path.join(SAMBA_PRIVATE_DIR, "sam.ldb"),
                  session_info=system_session(lp),
                  lp=lp)

    backlink_attribute_list = [
        "serverReferenceBL", "frsComputerReferenceBL",
        "msDFSR-ComputerReferenceBL"
    ]
    msgs = samdb.search(base=ucr["samba4/ldap/base"],
                        scope=samba.ldb.SCOPE_SUBTREE,
                        expression=filter_format(
                            "(&(objectClass=computer)(sAMAccountName=%s$))",
                            [computername]),
                        attrs=backlink_attribute_list)
    if not msgs:
        print("Samba 4 computer account '%s' not found." % (computername, ))
        sys.exit(1)

    answer = input("Really remove %s from Samba 4? [y/N]: " % computername)
    if not answer.lower() in ('y', 'yes'):
        print("Ok, stopping as requested.\n")
        sys.exit(2)

    computer_obj = msgs[0]

    # Confirmation check
    answer = input("If you are really sure type YES and hit enter: ")
    if answer != 'YES':
        print("The answer was not 'YES', confirmation failed.\n")
        sys.exit(1)
    else:
        print("Ok, continuing as requested.\n")

    # Determine the NTDS_objectGUID
    NTDS_objectGUID = None
    if "serverReferenceBL" in computer_obj:
        msgs = samdb.search(
            base=computer_obj["serverReferenceBL"][0].decode('UTF-8'),
            scope=samba.ldb.SCOPE_SUBTREE,
            expression="(CN=NTDS Settings)",
            attrs=["objectGUID"])
        if msgs and "objectGUID" in msgs[0]:
            NTDS_objectGUID = str(
                ndr_unpack(misc.GUID, msgs[0]["objectGUID"][0]))

    # Determine the Domain_GUID
    msgs = samdb.search(base=ucr["samba4/ldap/base"],
                        scope=samba.ldb.SCOPE_BASE,
                        attrs=["objectGUID"])
    if not msgs:
        print("Samba 4 Domain_GUID for base dn '%s' not found." %
              (ucr["samba4/ldap/base"], ))
        sys.exit(1)
    Domain_GUID = str(ndr_unpack(misc.GUID, msgs[0]["objectGUID"][0]))

    # Build current site list
    msgs = samdb.search(base="CN=Configuration,%s" % ucr["samba4/ldap/base"],
                        scope=samba.ldb.SCOPE_SUBTREE,
                        expression="(objectClass=site)",
                        attrs=["cn"])
    site_list = [obj["cn"][0].decode('UTF-8') for obj in msgs]

    # Remove Samba 4 DNS records
    purge_s4_dns_records(ucr, binddn, bindpw, computername, NTDS_objectGUID,
                         Domain_GUID, site_list)

    # remove objects from Samba 4 SAM database
    for backlink_attribute in backlink_attribute_list:
        if backlink_attribute in computer_obj:
            backlink_object = computer_obj[backlink_attribute][0]
            try:
                print("Removing %s from SAM database." % (backlink_object, ))
                samdb.delete(backlink_object, ["tree_delete:0"])
            except Exception:
                print(
                    "Removal of Samba 4 %s objects %s from Samba 4 SAM database failed."
                    % (
                        backlink_attribute,
                        backlink_object,
                    ),
                    file=sys.stderr)
                print(traceback.format_exc())

    # Now delete the Samba 4 computer account and sub-objects
    # Cannot use tree_delete on isCriticalSystemObject, perform recursive delete like ldbdel code does it:
    msgs = samdb.search(base=computer_obj.dn,
                        scope=samba.ldb.SCOPE_SUBTREE,
                        attrs=["dn"])
    obj_dn_list = [obj.dn for obj in msgs]
    obj_dn_list.sort(key=len)
    obj_dn_list.reverse()
    for obj_dn in obj_dn_list:
        try:
            print("Removing %s from SAM database." % (obj_dn, ))
            samdb.delete(obj_dn)
        except Exception:
            print(
                "Removal of Samba 4 computer account object %s from Samba 4 SAM database failed."
                % (obj_dn, ),
                file=sys.stderr)
            print(traceback.format_exc(), file=sys.stderr)

    answer = input("Really remove %s from UDM as well? [y/N]: " % computername)
    if not answer.lower() in ('y', 'yes'):
        print("Ok, stopping as requested.\n")
        sys.exit(2)

    # Finally, for consistency remove S4 computer object from UDM
    purge_udm_computer(ucr, binddn, bindpw, computername)
def get_next_arg(prompt):
    try:
        return next(argv_iter)
    except StopIteration:
        return input(prompt)
Exemplo n.º 56
0
def cli():
    # Location of your Arrow git clone
    SEP = os.path.sep
    ARROW_HOME = os.path.abspath(__file__).rsplit(SEP, 2)[0]
    PROJECT_NAME = ARROW_HOME.rsplit(SEP, 1)[1]
    print("ARROW_HOME = " + ARROW_HOME)
    print("PROJECT_NAME = " + PROJECT_NAME)

    cmd = CommandInput()

    # ASF JIRA username
    jira_username = os.environ.get("JIRA_USERNAME")

    # ASF JIRA password
    jira_password = os.environ.get("JIRA_PASSWORD")

    if not jira_username:
        jira_username = cmd.prompt("Env JIRA_USERNAME not set, "
                                   "please enter your JIRA username:"******"Env JIRA_PASSWORD not set, "
                                    "please enter "
                                    "your JIRA password:"******"Which pull request would you like to merge? (e.g. 34): ")

    # Remote name which points to the GitHub site
    git_remote = os.environ.get("PR_REMOTE_NAME", "apache")

    os.chdir(ARROW_HOME)

    jira_con = jira.client.JIRA({'server': JIRA_API_BASE},
                                basic_auth=(jira_username, jira_password))
    github_api = GitHubAPI(PROJECT_NAME)

    pr = PullRequest(cmd, github_api, git_remote, jira_con, pr_num)

    if pr.is_merged:
        print("Pull request %s has already been merged")
        sys.exit(0)

    if not pr.is_mergeable:
        msg = ("Pull request %s is not mergeable in its current form.\n" %
               pr_num + "Continue? (experts only!)")
        cmd.continue_maybe(msg)

    pr.show()

    cmd.continue_maybe("Proceed with merging pull request #%s?" % pr_num)

    # merged hash not used
    pr.merge()

    cmd.continue_maybe("Would you like to update the associated JIRA?")
    jira_comment = ("Issue resolved by pull request %s\n[%s/%s]" %
                    (pr_num, "https://github.com/apache/" + PROJECT_NAME +
                     "/pull", pr_num))

    versions, default_fix_versions = pr.jira_issue.get_candidate_fix_versions()

    default_fix_versions = ",".join(default_fix_versions)

    issue_fix_versions = cmd.prompt("Enter comma-separated "
                                    "fix version(s) [%s]: " %
                                    default_fix_versions)
    if issue_fix_versions == "":
        issue_fix_versions = default_fix_versions
    issue_fix_versions = issue_fix_versions.replace(" ", "").split(",")

    def get_version_json(version_str):
        return [x for x in versions if x.name == version_str][0].raw

    fix_versions_json = [get_version_json(v) for v in issue_fix_versions]
    pr.jira_issue.resolve(fix_versions_json, jira_comment)
Exemplo n.º 57
0
def _GetInput():
    try:
        return input()
    except EOFError:
        return None
Exemplo n.º 58
0
 def get_input(prompt):
     return input(prompt)
Exemplo n.º 59
0
    def cmdloop(self, opts):
        """ Interactive mode worker function

        :param opts: command options
        :type opts: options.
        """
        self.interactive = True

        if not opts.nologo:
            sys.stdout.write(FIPSSTR)
            CLI.version(self._progname, versioning.__version__,\
                                versioning.__extracontent__, fileh=sys.stdout)

        if not self.app.typepath.adminpriv:
            UI().user_not_admin()

        if opts.debug:
            LOGGER.setLevel(logging.DEBUG)
            LERR.setLevel(logging.DEBUG)

        #**********Handler for GUI tab tab ***************
        for section in self._commands:
            if section.startswith('_'):
                continue

            for command in self._commands[section]:
                self.commlist.append(command.name)

        for item in self.commlist:
            if item == "help":
                self.candidates[item] = self.commlist
            else:
                self.candidates[item] = []

        self._redobj = TabAndHistoryCompletionClass(dict(self.candidates))
        try:
            session = PromptSession(completer=self._redobj, \
                                                        complete_style=CompleteStyle.READLINE_LIKE)

        except:
            LOGGER.info("Console error: Tab complete is unavailable.")
            session = None

        while True:
            try:
                if session:
                    line = session.prompt(versioning.__shortname__+ u' > ', \
                                bottom_toolbar=self._redobj.bottom_toolbar)
                else:
                    line = input(versioning.__shortname__ + u' > ')

            except (EOFError, KeyboardInterrupt) as error:
                line = "quit\n"

            if not len(line):
                continue
            elif line.endswith(os.linesep):
                line.rstrip(os.linesep)

            nargv = shlex.split(line, posix=False)

            try:
                if not (any(x.startswith("-h") for x in nargv) or \
                    any(x.startswith("--h") for x in nargv) or "help" in line):
                    if "login " in line or line == 'login' or \
                        any(x.startswith("--url") for x in nargv):
                        self.app.logout()
                self.retcode = self._run_command(opts, nargv)
                self.check_for_tab_lists(nargv)
            except Exception as excp:
                self.handle_exceptions(excp)

            if self.opts.verbose:
                sys.stdout.write("iLOrest return code: %s\n" % self.retcode)

        return self.retcode
Exemplo n.º 60
0
def main(mode, save_path, num_batches, data_path=None):
    reverser = WordReverser(100, len(char2code), name="reverser")

    if mode == "train":
        # Data processing pipeline
        dataset_options = dict(dictionary=char2code,
                               level="character",
                               preprocess=_lower)
        if data_path:
            dataset = TextFile(data_path, **dataset_options)
        else:
            dataset = OneBillionWord("training", [99], **dataset_options)
        data_stream = dataset.get_example_stream()
        data_stream = Filter(data_stream, _filter_long)
        data_stream = Mapping(data_stream,
                              reverse_words,
                              add_sources=("targets", ))
        data_stream = Batch(data_stream, iteration_scheme=ConstantScheme(10))
        data_stream = Padding(data_stream)
        data_stream = Mapping(data_stream, _transpose)

        # Initialization settings
        reverser.weights_init = IsotropicGaussian(0.1)
        reverser.biases_init = Constant(0.0)
        reverser.push_initialization_config()
        reverser.encoder.weights_init = Orthogonal()
        reverser.generator.transition.weights_init = Orthogonal()

        # Build the cost computation graph
        chars = tensor.lmatrix("features")
        chars_mask = tensor.matrix("features_mask")
        targets = tensor.lmatrix("targets")
        targets_mask = tensor.matrix("targets_mask")
        batch_cost = reverser.cost(chars, chars_mask, targets,
                                   targets_mask).sum()
        batch_size = named_copy(chars.shape[1], "batch_size")
        cost = aggregation.mean(batch_cost, batch_size)
        cost.name = "sequence_log_likelihood"
        logger.info("Cost graph is built")

        # Give an idea of what's going on
        model = Model(cost)
        params = model.get_params()
        logger.info("Parameters:\n" +
                    pprint.pformat([(key, value.get_value().shape)
                                    for key, value in params.items()],
                                   width=120))

        # Initialize parameters
        for brick in model.get_top_bricks():
            brick.initialize()

        # Define the training algorithm.
        cg = ComputationGraph(cost)
        algorithm = GradientDescent(cost=cost,
                                    params=cg.parameters,
                                    step_rule=CompositeRule(
                                        [StepClipping(10.0),
                                         Scale(0.01)]))

        # Fetch variables useful for debugging
        generator = reverser.generator
        (energies, ) = VariableFilter(applications=[generator.readout.readout],
                                      name_regex="output")(cg.variables)
        (activations, ) = VariableFilter(
            applications=[generator.transition.apply],
            name=generator.transition.apply.states[0])(cg.variables)
        max_length = named_copy(chars.shape[0], "max_length")
        cost_per_character = named_copy(
            aggregation.mean(batch_cost, batch_size * max_length),
            "character_log_likelihood")
        min_energy = named_copy(energies.min(), "min_energy")
        max_energy = named_copy(energies.max(), "max_energy")
        mean_activation = named_copy(
            abs(activations).mean(), "mean_activation")
        observables = [
            cost, min_energy, max_energy, mean_activation, batch_size,
            max_length, cost_per_character, algorithm.total_step_norm,
            algorithm.total_gradient_norm
        ]
        for name, param in params.items():
            observables.append(named_copy(param.norm(2), name + "_norm"))
            observables.append(
                named_copy(algorithm.gradients[param].norm(2),
                           name + "_grad_norm"))

        # Construct the main loop and start training!
        average_monitoring = TrainingDataMonitoring(observables,
                                                    prefix="average",
                                                    every_n_batches=10)
        main_loop = MainLoop(
            model=model,
            data_stream=data_stream,
            algorithm=algorithm,
            extensions=[
                Timing(),
                TrainingDataMonitoring(observables, after_batch=True),
                average_monitoring,
                FinishAfter(after_n_batches=num_batches)
                # This shows a way to handle NaN emerging during
                # training: simply finish it.
                .add_condition("after_batch", _is_nan),
                Plot(os.path.basename(save_path),
                     [[average_monitoring.record_name(cost)],
                      [average_monitoring.record_name(cost_per_character)]],
                     every_n_batches=10),
                # Saving the model and the log separately is convenient,
                # because loading the whole pickle takes quite some time.
                Checkpoint(save_path,
                           every_n_batches=500,
                           save_separately=["model", "log"]),
                Printing(every_n_batches=1)
            ])
        main_loop.run()
    elif mode == "sample" or mode == "beam_search":
        chars = tensor.lmatrix("input")
        generated = reverser.generate(chars)
        model = Model(generated)
        logger.info("Loading the model..")
        model.set_param_values(load_parameter_values(save_path))

        def generate(input_):
            """Generate output sequences for an input sequence.

            Incapsulates most of the difference between sampling and beam
            search.

            Returns
            -------
            outputs : list of lists
                Trimmed output sequences.
            costs : list
                The negative log-likelihood of generating the respective
                sequences.

            """
            if mode == "beam_search":
                samples, = VariableFilter(bricks=[reverser.generator],
                                          name="outputs")(ComputationGraph(
                                              generated[1]))
                # NOTE: this will recompile beam search functions
                # every time user presses Enter. Do not create
                # a new `BeamSearch` object every time if
                # speed is important for you.
                beam_search = BeamSearch(input_.shape[1], samples)
                outputs, costs = beam_search.search({chars: input_},
                                                    char2code['</S>'],
                                                    3 * input_.shape[0])
            else:
                _1, outputs, _2, _3, costs = (
                    model.get_theano_function()(input_))
                outputs = list(outputs.T)
                costs = list(costs.T)
                for i in range(len(outputs)):
                    outputs[i] = list(outputs[i])
                    try:
                        true_length = outputs[i].index(char2code['</S>']) + 1
                    except ValueError:
                        true_length = len(outputs[i])
                    outputs[i] = outputs[i][:true_length]
                    costs[i] = costs[i][:true_length].sum()
            return outputs, costs

        while True:
            line = input("Enter a sentence\n")
            message = ("Enter the number of samples\n"
                       if mode == "sample" else "Enter the beam size\n")
            batch_size = int(input(message))

            encoded_input = [
                char2code.get(char, char2code["<UNK>"])
                for char in line.lower().strip()
            ]
            encoded_input = ([char2code['<S>']] + encoded_input +
                             [char2code['</S>']])
            print("Encoder input:", encoded_input)
            target = reverse_words((encoded_input, ))[0]
            print("Target: ", target)

            samples, costs = generate(
                numpy.repeat(numpy.array(encoded_input)[:, None],
                             batch_size,
                             axis=1))
            messages = []
            for sample, cost in equizip(samples, costs):
                message = "({})".format(cost)
                message += "".join(code2char[code] for code in sample)
                if sample == target:
                    message += " CORRECT!"
                messages.append((cost, message))
            messages.sort(key=operator.itemgetter(0), reverse=True)
            for _, message in messages:
                print(message)