Exemplo n.º 1
0
def generate_temporary_playbook(roles=None, become=True, gather_facts=False, reboot=False, interactive=False):
    jinja_file = 'administration/temporary.yml.j2'
    if interactive:
        jinja_args = {
            "become": query_yes_no(userlog.warn("Become?")),
            "gather_facts": query_yes_no(userlog.warn("Gather Facts?"), default="no"),
            "reboot": query_yes_no(userlog.error("Reboot after the execution?"), default="no")
        }
    else:
        jinja_args = {
            'become': become,
            'gather_facts': gather_facts,
            'reboot': reboot
        }
    if not roles:
        roles = select_roles()

    env = Environment(loader=FileSystemLoader(os.path.dirname(os.path.realpath(__file__))))
    env.trim_blocks = True
    env.lstrip_blocks = True
    temporary_playbook = env.get_template(jinja_file).render(roles=roles,
                                                             become=jinja_args["become"],
                                                             gather_facts=jinja_args["gather_facts"],
                                                             reboot=jinja_args["reboot"])

    with open('administration/temporary.yml', 'w') as f:
        f.write(temporary_playbook)
Exemplo n.º 2
0
def rebase(args, repo_name):
    target_branch = args.branch
    if is_rebase_active():
        _exit('rebase is already in progress')
    stashed = False
    if is_dirty():
        if utils.query_yes_no('stash modified files and continue?'):
            utils.cmd('git stash')
            stashed = True
        else:
            _exit('working directory is dirty and changes not stashed')
    try:
        utils.cmd(f'git rebase -i --autosquash {target_branch}', None, None)
    except subprocess.CalledProcessError:
        if utils.query_yes_no('rebase failed! abort?', default='no'):
            utils.cmd('git rebase --abort')
            if stashed:
                utils.cmd('git stash pop')
        else:
            utils.cmd('git status')
            print('1. Resolve conflicts in "Unmerged paths" file list')
            print('2. $ git add <conflict-files>')
            print('3. $ git rebase --continue')
            print('Start over using $ git rebase --abort')
            if stashed:
                print('Warning: you have stashed changes!')
        _exit('rebase did not complete!')
Exemplo n.º 3
0
def main(playbook=None, temp_playbook=False):
    roles = None
    # Case 1 -> Playbooks available
    if playbook or query_yes_no(userlog.warn("Select a playbook?")):
        if not playbook:
            playbook = select_playbook()
        try:
            with open(PLAYBOOK_PATH + "/{0}.yml".format(playbook)) as f:
                role_yaml = yaml.load(f)
                roles = role_yaml[1]["roles"]
        except IOError:
            print(userlog.error('Problem extracting roles from playbook "{0}"'.format(playbook)))

    if not roles and not temp_playbook and not query_yes_no("Select Roles?"):
        print(userlog.error("No valid Options. Exiting"))
        return 0
    elif not roles:
        generate_temporary_playbook(roles=roles, become=True, gather_facts=False, reboot=True)
    else:
        pass

    print(userlog.info(json.dumps(roles, indent=4)))

    vm_vars = {}
    for role in roles:
        vm_vars.update(read_role_vars(role=role))

    print(userlog.info("\nContents of {0} are:\n".format(POPULATED_VARS_OUTPUT) + json.dumps(vm_vars, indent=4)))

    with open(POPULATED_VARS_OUTPUT, 'w') as f:
        json.dump(vm_vars, f, indent=4)
    return 0
Exemplo n.º 4
0
def read_sshd_configuration(def_vars):
    read_dict = dict()
    if "privileged_host" in def_vars:
        # VALIDATE Already Present vars
        priv_args = [elem for elem in def_vars["privileged_host"]
                     if HostExemption.validate(raise_f=False, **elem)
                     and query_yes_no(json.dumps(elem, indent=4)+"\nKeep this?")]

        print(userlog.info("\nAll Privileged Hosts:\n"+json.dumps(priv_args, indent=4)))

        if query_yes_no(userlog.warn("Do you want to insert more?")):
            print(userlog.info("\nPlease enter an IP. Or enter empty string to stop\n"))
            for elem in HostExemption.read_priviliged_hosts(priv_len=len(priv_args)+1):
                priv_args.append(elem)

            print("\nAll Hosts:\n" + json.dumps(priv_args, indent=4))

    else:
        priv_args = []
        print(userlog.warn("\nNo privileged host. Please enter IPs. Or enter empty string to stop\n"))
        for elem in HostExemption.read_priviliged_hosts():
            priv_args.append(elem)

    read_dict["privileged_host"] = priv_args

    return read_dict
Exemplo n.º 5
0
def set_ssh_keys(ssh_keys_path=None, remove_all=False):
    """Install public ssh keys to allow fast authentication
    it Loops in a folder where public ssh kesys are stored looking for *.pub
    files and ask which one needs to be installed remotely.
    The default folder for ssh keys if not specified is ~/.ssh of current user
    """
    if remove_all:
        if utils.query_yes_no("WARNING!! Are you sure to remove all the ssh "
                              "keys?\nYou might not be able to login anymore!",
                              'yes'):
            run('mkdir -p $HOME/.ssh')
            run('cat /dev/null > $HOME/.ssh/authorized_keys')

    if ssh_keys_path is None:
        ssh_keys_path = os.path.join(os.getenv('HOME'), '.ssh/*.pub')
    else:
        ssh_keys_path = os.path.join(ssh_keys_path, '*.pub')
    ids_ssh = glob.glob(ssh_keys_path)
    print ids_ssh
    for id_ssh in ids_ssh:
        with open(id_ssh, 'r') as f:
            # reading the public key for anything after the key to get name and
            # address that normally follow the key
            id_ssh_file = f.read()
        id_ssh_name = ' '.join(id_ssh_file.split()[2:])
        if utils.query_yes_no("CONFIGURE {}:{}'s ssh key for "
                              "authentication?".format(os.path.basename(id_ssh),
                                                       id_ssh_name), 'yes'):
            run('mkdir -p $HOME/.ssh')
            run('touch $HOME/.ssh/authorized_keys')
            append('$HOME/.ssh/authorized_keys', id_ssh_file.rstrip('\n\r'))
Exemplo n.º 6
0
def read_network_configuration(def_vars):
    read_dict = dict()
    ip_iface = None

    read_dict["RESOLVCONF_FLAG"] = query_yes_no(userlog.warn("Install resolvconf packages?"), default="no")

    # Read interface
    if query_yes_no(userlog.warn("Specify an interface? Else the default as defined in ansible facts will be used."),
                    default="no"):
        read_dict["primary_interface"] = str(input(userlog.info("Please Specify a valid interface: >> ")))

    # Read Primary IP Interface
    if "primary_ip" in def_vars and "primary_netmask" in def_vars:
        print(userlog.info('\nDefault Value:\t{1}\tfor\t"{0}"\n'.
                           format("primary_ip", def_vars["primary_ip"])))
        print(userlog.info('\nDefault Value:\t{1}\tfor\t"{0}"\n'.
                           format("primary_netmask", def_vars["primary_netmask"])))

        if query_yes_no(userlog.warn("Keep the default value?")):
            read_dict["primary_ip"] = def_vars["primary_ip"]
            read_dict["primary_netmask"] = def_vars["primary_netmask"]
            read_dict["primary_network"] = def_vars["primary_network"]
            read_dict["primary_broadcast"] = def_vars["primary_broadcast"]

        else:
            print(userlog.warn('\nOverwriting default value for "primary_interface"'))
            ip_iface = read_ip(custom_message=" for {0}".format("primary_interface"), maskless=False)

    else:
        print(userlog.warn("\nNo default value for \"{0}\".".format("primary_interface")))
        ip_iface = read_ip(custom_message=" for {0}".format("primary_interface"), maskless=False)

    # Infer the "primary_ip", "primary_netmask", "primary_network","primary_broadcast",
    if ip_iface:
        read_dict["primary_ip"] = ip_iface.ip.__str__()
        read_dict["primary_netmask"] = ip_iface.netmask.__str__()
        read_dict["primary_network"] = ip_iface.network.network_address.__str__()
        read_dict["primary_broadcast"] = ip_iface.network.broadcast_address.__str__()

    # Validate IP Settings
    for ip_arg in ip_args:
        if ip_arg in def_vars:
            print(userlog.info("\nDefault Value:\t{1}\tfor\t\"{0}\"\n".format(ip_arg, def_vars[ip_arg])))
            if query_yes_no(userlog.warn("Keep the default value?")):
                read_dict[ip_arg] = def_vars[ip_arg]
                continue
            else:
                print(userlog.warn("\nOverwriting default value for \"{0}\".".format(ip_arg)))
                read_dict[ip_arg] = str(read_ip(custom_message=" for {0}".format(ip_arg)))
        else:
            print(userlog.warn("\nNo default value for \"{0}\".".format(ip_arg)))
            read_dict[ip_arg] = str(read_ip(custom_message=" for {0}".format(ip_arg)))

    return read_dict
Exemplo n.º 7
0
def write_data(data, filename):
    """
    Writes a pandas dataframe to a designated CSV file in a 'processed' folder.
    If the file already exists, the data is added to the file.
    """

    data_COM = data.loc[:, ['ImageURL', 'Data', 'tag']]
    data_HUM = data.drop(['Data'], axis=1)

    fpath_COM = 'C019_processed'
    fpath_HUM = 'C019_to_review'

    for data, fpath in zip([data_COM, data_HUM], [fpath_COM, fpath_HUM]):
        print('\nWriting processed data to {}...\n'.format(fpath))

        if not os.path.isdir(fpath):
            os.makedirs(fpath)

        fpath = '{}/{}'.format(fpath, filename)

        if not os.path.isfile(fpath):
            data.to_csv(fpath, index=False)

        else:
            overwrite = query_yes_no(
                'Data already processed. Overwrite existing file?')

            if overwrite:
                print('overwriting file {}'.format(fpath))
                data.to_csv(fpath, index=False)

            else:
                append = query_yes_no('Add data to existing processed file?')
                if append:
                    # Check for matching columns
                    curr_cols = pd.read_csv(fpath, nrows=1).columns
                    same_length = len(data.columns) == len(curr_cols)
                    assert (
                        same_length
                    ), 'Number of columns of processed data do not match database'

                    same_column = (data.columns == curr_cols).all()
                    assert same_column, 'Column names of processed data do not match database'

                    print('Adding data to file {}'.format(fpath))
                    data.to_csv(fpath, mode='a', index_label='Index')
                else:
                    print(
                        'Please modify name of existing processed file {} and re-run the program.'
                        .format(fpath.split('/')[-1]))
def executa_limpeza():
    APAGA = query_yes_no(PERGUNTA)
    if DEBUG:
        PATH = "C:/Users/Neylson/Documents/RAIS"
        arquivos = glob.glob(PATH + "/data/*.csv")
    else:
        arquivos = glob.glob("../data/*.csv")

    print("Aplicando regras de limpeza...")
    limpos = []
    for arq in tqdm(arquivos):
        bd = pd.read_csv(arq, sep='\t', decimal=',')
        limpos.append(limpa_dados(bd, arq))

    # Exporta os dados
    print("Exportando os dados para a pasta data/processed/ ...")
    pd.concat(limpos, sort=False)[[
        'Ano', 'CNAE 2.0 Classe', 'Escolaridade após 2005', 'Qtd Hora Contr',
        'Idade', 'Ind Simples', 'Mun Trab', 'Município', 'Nacionalidade',
        'Brasileiro', 'Natureza Jurídica', 'Ind Portador Defic',
        'Qtd Dias Afastamento', 'Raça Cor', 'Regiões Adm DF',
        'Vl Remun Dezembro Nom', 'Vl Remun Dezembro (SM)',
        'Vl Remun Média Nom', 'Vl Remun Média (SM)', 'CNAE 2.0 Subclasse',
        'Sexo Trabalhador', 'Tamanho Estabelecimento', 'Tempo Emprego',
        'Tipo Admissão', 'Tipo Defic', 'Tipo Salário', 'Tipo Vínculo'
    ]].to_csv("../data/processed/rais_2008_2018.csv", sep='\t', index=False)

    if APAGA:
        [os.remove(i) for i in arquivos]
Exemplo n.º 9
0
Arquivo: cli.py Projeto: pakesson/cfs
def download(key, password):
    r = requests.get('{baseurl}/api/download?key={key}'.format(
        baseurl=BASE_URL, key=key))

    if r.status_code == 200:
        json_data = r.json()

        url = json_data['url']
        r = requests.get(url)

        metadata = r.headers['x-amz-meta-filename']

        password = hashlib.sha256(password).digest()
        box = nacl.secret.SecretBox(password)

        filename = box.decrypt(base64.b64decode(urllib.unquote_plus(metadata)))

        if query_yes_no("Download the file '{file}'?".format(file=filename)):
            encrypted_data = r.content
            data = box.decrypt(encrypted_data)

            with open(filename, 'wb') as f:
                f.write(data)

            print("File saved as '{file}'".format(file=filename))
    else:
        print("Download failed.")
Exemplo n.º 10
0
    def _register_computer(self):
        _continue = utils.query_yes_no(
            _('Have you check config options in this machine (%s)?') % settings.CONF_FILE
        )
        if _continue == 'no':
            _operation_failed(_('Check %s file and register again') % settings.CONF_FILE)
            sys.exit(errno.EAGAIN)

        _user = raw_input('%s: ' % _('User to register computer at server'))
        if not _user:
            _operation_failed(_('Empty user. Exiting %s.') % self.CMD)
            logging.info('Empty user in register computer option')
            sys.exit(errno.EAGAIN)

        _pass = getpass.getpass('%s: ' % _('Password'))

        _data = {
            'username': _user,
            'password': _pass,
            'version': self.migas_version,
            'platform': platform.system(),  # new for server 3.0
            'pms': str(self.pms),  # new for server 3.0
        }
        self._save_sign_keys(_data)
        _operation_ok(_('Computer registered at server'))
Exemplo n.º 11
0
def main():
    args = parse_args()
    args.log_dir = os.path.join(args.log_dir, args.name)

    if os.path.exists(args.log_dir):
        if query_yes_no('You already have a run called {}, override?'.format(
                args.name)):
            shutil.rmtree(args.log_dir)
        else:
            exit(0)

    del args.__dict__['name']
    log_dir = args.log_dir
    epochs = args.epochs
    content_dim = args.content_dim
    class_dim = args.class_dim
    batch_size = args.batch_size
    lr = args.lr
    stddev = args.stddev
    num_samples = args.num_samples

    mnist_class = MNIST(num_samples=num_samples)
    mnist_loader = DataLoader(mnist_class, batch_size=batch_size, shuffle=True)
    generator = GeneratorForMnistGLO(content_dim + class_dim)

    writer = SummaryWriter(log_dir=log_dir)
    train(data_loader=mnist_loader,
          glo_generator=generator,
          writer=writer,
          lr=lr,
          content_dim=content_dim,
          class_dim=class_dim,
          epochs=epochs,
          stddev=stddev,
          num_samples=num_samples)
Exemplo n.º 12
0
Arquivo: cli.py Projeto: pakesson/cfs
def download(key, password):
    r = requests.get('{baseurl}/api/download?key={key}'.format(baseurl=BASE_URL, key=key))

    if r.status_code == 200:
        json_data = r.json()

        url = json_data['url']
        r = requests.get(url)

        metadata = r.headers['x-amz-meta-filename']

        password = hashlib.sha256(password).digest()
        box = nacl.secret.SecretBox(password)

        filename = box.decrypt(base64.b64decode(urllib.unquote_plus(metadata)))

        if query_yes_no("Download the file '{file}'?".format(file=filename)):
            encrypted_data = r.content
            data = box.decrypt(encrypted_data)

            with open(filename, 'wb') as f:
                f.write(data)

            print("File saved as '{file}'".format(file=filename))
    else:
        print("Download failed.")
Exemplo n.º 13
0
def sync_upstream(args, repo_name):
    upstream_location = f'[email protected]:{args.account}/{repo_name}.git'
    target_branch = args.branch
    remote_name = args.remote
    remotes = get_remotes()
    if upstream_location == remotes[ORIGIN_NAME]['push']['url']:
        _exit(f'upstream identical to origin for {upstream_location}')
    if args.remote not in remotes:
        utils.cmd(f'git remote add {remote_name} {upstream_location}')
        remotes = get_remotes()
    start_branch = get_current_branch()
    try:
        utils.cmd(f'git fetch {remote_name}')
    except subprocess.CalledProcessError:
        utils.cmd(f'git remote remove {remote_name}')
        _exit(f'unable to fetch {remote_name} at {upstream_location}')
    stashed = False
    if start_branch != target_branch:
        try:
            checkout_branch(target_branch)
        except subprocess.CalledProcessError as e:
            error = e.stderr.rstrip()
            if error.startswith('error: Your local changes to the following files would be overwritten by checkout'):  # noqa:E501
                if utils.query_yes_no('stash modified files and continue?'):
                    utils.cmd('git stash')
                    stashed = True
                    checkout_branch(target_branch)
            else:
                raise e
    utils.cmd(f'git merge {remote_name}/{target_branch}')
    if start_branch != target_branch:
        checkout_branch(start_branch)
    if stashed:
        utils.cmd('git stash pop')
Exemplo n.º 14
0
def force_push(args, repo_name):
    origin_url = get_remote_url(ORIGIN_NAME, 'push')
    branch_name = get_current_branch()
    if utils.query_yes_no(f'force push to {origin_url}?'):
        utils.cmd(f'git push {ORIGIN_NAME} -f {branch_name}')
    else:
        _exit('force push canceled')
Exemplo n.º 15
0
    def _register_computer(self):
        _continue = utils.query_yes_no(
            _('Have you check config options in this machine (%s)?') %
            settings.CONF_FILE)
        if _continue == 'no':
            _operation_failed(
                _('Check %s file and register again') % settings.CONF_FILE)
            sys.exit(errno.EAGAIN)

        _user = raw_input('%s: ' % _('User to register computer at server'))
        if not _user:
            _operation_failed(_('Empty user. Exiting %s.') % self.CMD)
            logging.info('Empty user in register computer option')
            sys.exit(errno.EAGAIN)

        _pass = getpass.getpass('%s: ' % _('Password'))

        _data = {
            'username': _user,
            'password': _pass,
            'version': self.migas_version,
            'platform': platform.system(),  # new for server 3.0
            'pms': str(self.pms),  # new for server 3.0
        }
        self._save_sign_keys(_data)
        _operation_ok(_('Computer registered at server'))
Exemplo n.º 16
0
def remove_experiment(name):
    experiments = experiment_scheduler.load_experiments(
        cluster=None, filter_eq_dct=dict(name=name))

    if len(experiments) == 0:
        print "No experiments in database %s" % get_db_string("experiments")
        return

    experiment = experiments[0]
    table_name = experiment["table"]
    if query_yes_no("Do you really want to delete experiment %s?" % bold(name)):
        print "Deleting %s..." % name
        experiment_scheduler.delete_experiments([experiment])
    if query_yes_no("Do you want to delete corresponding jobs?"):
        jobs = job_scheduler.load_jobs(table_name)
        print "Deleting %d jobs..." % len(jobs)
        job_scheduler.delete_jobs(table_name, jobs)
Exemplo n.º 17
0
def _check_column_name(
    actual_value: str, expected_value: str, sheet_name: str, file_name: str
) -> None:
    if actual_value.lower().replace(" ", "_") != expected_value:
        warnings.warn(
            f"Expected column named {expected_value}, got {actual_value} in sheet = {sheet_name}, file = {file_name}"
        )
        if not query_yes_no("Do you want to continue anyway?"):
            raise ValueError("File {file_name} was malformed.")
Exemplo n.º 18
0
def delete_user(username):
    if query_yes_no("Confirm delete user '{}' from user database.".format(username)):
        try:
            with session_scope() as db_session:
                user = db_session.query(Users).filter(Users.user_name == username).one()
                db_session.delete(user)
        except sqlalchemy.orm.exc.NoResultFound:
            print("No user found with this name.")
            sys.exit(1)
Exemplo n.º 19
0
def install_packages(packages,
                     what_for='for a complete setup to work properly'):
    '''Try to install .deb packages given by list.

    Return True, if packages could be installed or are installed already, or if
    they cannot be installed but the user gives feedback to continue.

    Else return False.
    '''
    res = True
    non_installed_packages = _non_installed(packages)
    packages_str = '  '.join(non_installed_packages)
    if non_installed_packages:
        with quiet():
            dpkg = _has_dpkg()
        hint = '  (You may have to install them manually)'
        do_install = False
        go_on = True
        if dpkg:
            if _is_sudoer('Want to install dpkg packages'):
                do_install = True
            else:
                do_install is False  # cannot install anything
                info = yellow(' '.join([
                    'This deb packages are missing to be installed',
                    flo("{what_for}: "),
                    ', '.join(non_installed_packages),
                ]))
                question = '  Continue anyway?'
                go_on = query_yes_no(info + hint + question, default='no')
        else:
            # dpkg == False, unable to determine if packages are installed
            do_install = False  # cannot install anything
            info = yellow(' '.join([
                flo('Required {what_for}: '),
                ', '.join(non_installed_packages),
            ]))
            go_on = query_yes_no(info + hint + '  Continue?', default='yes')
        if not go_on:
            sys.exit('Abort')
        if do_install:
            command = flo('sudo  apt-get install {packages_str}')
            res = run(command).return_code == 0
    return res
Exemplo n.º 20
0
    def wrapper(*args, **kwargs):
        if not os.path.exists(FABSETUP_CUSTOM_DIR):
            msg = '''\
Git repository ~/.fabsetup-custom with configurations does not exist.
This configs are required to use fabsetup.

Clone it if you already have your own fabsetup-custom repository:

    git clone <user>@<hostname>:/path/to/fabsetup-custom.git  ~/.fabetup-custom

Else, initialize a new repository.

Init a new repository `~/.fabsetup-custom`?'''
            if not query_yes_no(msg, default='yes'):
                sys.exit('abort')
            custom_dir = FABSETUP_CUSTOM_DIR
            presetting_dir = join(FABFILE_DATA_DIR,
                                  'presetting-fabsetup-custom')
            if not isdir(custom_dir):
                print(yellow('\n** **     Init ') +
                      yellow('~/.fabsetup-custom', bold=True) +
                      yellow('      ** **\n'))
                print(yellow(flo('** Create files in dir {custom_dir} **')))
                local(flo('mkdir -p {custom_dir}'))
                local(flo('cp -r --no-clobber {presetting_dir}/. {custom_dir}'))
                import_fabsetup_custom(globals())
            else:
                with quiet():
                    local(flo(
                        'cp -r --no-clobber {presetting_dir}/. {custom_dir}'))

            if not isdir(join(custom_dir, '.git')):
                print(yellow(
                    '\n** Git repo ~/.fabsetup-custom: '
                    'init and first commit **'))
                local(flo('cd {custom_dir} && git init'))
                local(flo('cd {custom_dir} && git add .'))
                local(flo('cd {custom_dir} && git commit -am "Initial commit"'))
                print(yellow("** Done. Don't forget to create a backup of your "
                             '~/.fabsetup-custom repo **\n'))
                print(yellow("** But do not make it public, it's custom **\n",
                             bold=True))
            else:
                with quiet():
                    cmd = flo('cd {custom_dir} && git status --porcelain')
                    res = local(cmd, capture=True)
                    if res:
                        print(yellow('\n** git repo  ') +
                              magenta('~/.fabsetup-custom  ') +
                              yellow('has uncommitted changes: **'))
                        print(cmd)
                        print(yellow(res, bold=True))
                        print(yellow(
                            "** Don't forget to commit them and make a "
                            "backup of your repo **\n"))
        return func(*args, **kwargs)
Exemplo n.º 21
0
    def test_file_hdf5(self, file_name, speaker_index, speaker_index_2):
        """
        Function to extract multi pitch from file. Currently supports only HDF5 files.
        """
        sess = tf.Session()
        self.load_model(sess, log_dir =  config.log_dir)
        mel = self.read_hdf5_file(file_name)



        out_mel = self.process_file(mel[:,:-2], speaker_index, speaker_index_2, sess)




        self.plot_features(mel, out_mel)





        synth = utils.query_yes_no("Synthesize output? ")

        if synth:
            gen_change = utils.query_yes_no("Change in gender? ")
            if gen_change:
                female_male = utils.query_yes_no("Female to male?")
                if female_male:
                    out_featss = np.concatenate((out_mel[:mel.shape[0]], mel[:out_mel.shape[0],-2:-1]-12, mel[:out_mel.shape[0],-1:]), axis = -1)
                else:
                    out_featss = np.concatenate((out_mel[:mel.shape[0]], mel[:out_mel.shape[0],-2:-1]+12, mel[:out_mel.shape[0],-1:]), axis = -1)
            else:
                out_featss = np.concatenate((out_mel[:mel.shape[0]], mel[:out_mel.shape[0],-2:]), axis = -1)

            audio_out = utils.feats_to_audio(out_featss) 

            sf.write('./{}_{}.wav'.format(file_name[:-5], config.singers[speaker_index_2]), audio_out, config.fs)

        synth_ori = utils.query_yes_no("Synthesize ground truth with vocoder? ")

        if synth_ori:
            audio = utils.feats_to_audio(mel) 
            sf.write('./{}_{}_ori.wav'.format(file_name[:-5], config.singers[speaker_index]), audio, config.fs)
Exemplo n.º 22
0
def install_packages(packages,
                     what_for='for a complete setup to work properly'):
    '''Try to install .deb packages given by list.

    Return True, if packages could be installed or are installed already, or if
    they cannot be installed but the user gives feedback to continue.

    Else return False.
    '''
    res = True
    non_installed_packages = _non_installed(packages)
    packages_str = '  '.join(non_installed_packages)
    if non_installed_packages:
        with quiet():
            dpkg = _has_dpkg()
        hint = '  (You may have to install them manually)'
        do_install = False
        go_on = True
        if dpkg:
            if _is_sudoer('Want to install dpkg packages'):
                do_install = True
            else:
                do_install is False  # cannot install anything
                info = yellow(' '.join([
                    'This deb packages are missing to be installed',
                    flo("{what_for}: "), ', '.join(non_installed_packages),
                ]))
                question = '  Continue anyway?'
                go_on = query_yes_no(info + hint + question, default='no')
        else:
            # dpkg == False, unable to determine if packages are installed
            do_install = False  # cannot install anything
            info = yellow(' '.join([
                flo('Required {what_for}: '),
                ', '.join(non_installed_packages),
            ]))
            go_on = query_yes_no(info + hint + '  Continue?', default='yes')
        if not go_on:
            sys.exit('Abort')
        if do_install:
            command = flo('sudo  apt-get install {packages_str}')
            res = run(command).return_code == 0
    return res
Exemplo n.º 23
0
def transaction_limit_callback(file_analyzer, logger):
    retry = query_yes_no('Daily transaction limit reached. Retry with a different key?')
    if retry:
        key = raw_input('Please enter the new key: ')
        try:
            file_analyzer.api = AlchemyAPI(api_key=key)
            return True
        except BadApiKeyError as e:
            logger.critical('The API key is not in proper format. Cause: ' + str(e))
            exit(-1)
    else:
        return False
Exemplo n.º 24
0
 def set_config(self, args, config):
   if args.param in utils.CONFIG_PARAMS:
     if args.param == 'refresh_token':
       confirm = utils.query_yes_no(
           'Are you sure you want to update the refresh token?')
       if not confirm:
         return
     setattr(config, args.param, args.value)
     print '%s = %s' % (args.param, getattr(config, args.param))
     config.save()
     print 'Configuration updated.'
   else:
     print 'Unknown configuration parameter: %s' % args.param
Exemplo n.º 25
0
 def set_config(self, args, config):
     if args.param in utils.CONFIG_PARAMS:
         if args.param == 'refresh_token':
             confirm = utils.query_yes_no(
                 'Are you sure you want to update the refresh token?')
             if not confirm:
                 return
         setattr(config, args.param, args.value)
         print '%s = %s' % (args.param, getattr(config, args.param))
         config.save()
         print 'Configuration updated.'
     else:
         print 'Unknown configuration parameter: %s' % args.param
Exemplo n.º 26
0
def store_features(filename):

	print('Storing features:')

	if query_yes_no("Drop the features collection?", "no"):
		db.features.drop()
	
	with open(filename) as f:
		features = json.load(f)

	print("Inserting features in bulk")
	result = db.features.insert_many(features)
	print(result)
Exemplo n.º 27
0
def store_recommendations(filename):

	print('Storing recommendations:')

	if query_yes_no("Drop the recommendations collection?", "no"):
		db.recommendations.drop()
		
	with open(filename) as f:
		recommendations = json.load(f)

	print("Inserting recommendations in bulk")
	result = db.recommendations.insert_many(recommendations)
	print(result)
Exemplo n.º 28
0
def read_saltstack(def_vars):
    read_dict = dict()
    read_dict["SALT_INSTALL_FLAG"] = query_yes_no(userlog.warn("Install Salt?"), default="no")
    read_dict["SALT_CONFIGURE_FLAG"] = query_yes_no(userlog.info("Configure Salt?"))
    read_dict["SALT_MINION_FLAG"] = query_yes_no(userlog.info("Salt Minion?"))
    read_dict["SALT_MASTER_FLAG"] = query_yes_no(userlog.warn("Salt Master?"), default="no")
    read_dict["SALT_ID"] = read_hostname("to be used for Salt Minion ID")

    if "SALT_MASTER_IP" in def_vars:
        print(userlog.info('\nDefault Value:\t{1}\tfor\t"{0}"\n'.format("SALT_MASTER_IP", def_vars["SALT_MASTER_IP"])))

        if query_yes_no(userlog.warn("Keep the default value?")):
            read_dict["SALT_MASTER_IP"] = def_vars["SALT_MASTER_IP"]

        else:
            print(userlog.warn('\nOverwriting default value for "SALT_MASTER_IP"'))
            read_dict["SALT_MASTER_IP"] = read_ip(custom_message=" for {0}".format("SALT_MASTER_IP")).__str__()

    else:
        print(userlog.warn("\nNo default value for \"{0}\".".format("SALT_MASTER_IP")))
        read_dict["SALT_MASTER_IP"] = read_ip(custom_message=" for {0}".format("SALT_MASTER_IP")).__str__()

    salt_packages = []

    if read_dict["SALT_MASTER_FLAG"]:
        salt_packages.append({
            "name": "salt-master",
            "state": "latest"
        })

    if read_dict["SALT_MINION_FLAG"]:
        salt_packages.append({
            "name": "salt-minion",
            "state": "latest"
        })

    read_dict["salt_packages"] = salt_packages
    return read_dict
Exemplo n.º 29
0
def bootstrap(ubuntu_only):
    print(utils.make_bright("<bootstrap>"))

    if not utils.query_yes_no(
            "This operation will download a bunch of libcs into"
            f" {utils.make_bright(utils.get_libcs_dirpath())}. Proceed?"):
        utils.abort("Aborted by user.")

    _add_ubuntu_libcs()
    if not ubuntu_only:
        _add_debian_libcs()
        _add_arch_linux_libcs()

    print(utils.make_bright("</bootstrap>"))
Exemplo n.º 30
0
    def test_file_hdf5(self, file_name, singer_index):
        """
        Function to extract multi pitch from file. Currently supports only HDF5 files.
        """
        sess = tf.Session()
        self.load_model(sess, log_dir = config.log_dir)
        feats, f0_nor, pho_target = self.read_hdf5_file(file_name)

        out_feats = self.process_file(f0_nor, pho_target, singer_index,  sess)

        self.plot_features(feats, out_feats)

        synth = utils.query_yes_no("Synthesize output? ")

        if synth:

            out_featss = np.concatenate((out_feats[:feats.shape[0]], feats[:out_feats.shape[0],-2:]), axis = -1)

            utils.feats_to_audio(out_featss,file_name[:-4]+'output') 
        synth_ori = utils.query_yes_no("Synthesize gorund truth with vocoder? ")

        if synth_ori:
            utils.feats_to_audio(feats,file_name[:-4]+'ground_truth') 
Exemplo n.º 31
0
def branch(args, repo_name):
    if args.warn_base:
        current_branch = get_current_branch()
        if current_branch not in args.warn_base:
            message = f'Current branch {current_branch} is not in {args.warn_base}, proceed?'  # noqa:E501
            if not utils.query_yes_no(message, default='no'):
                exit('Exited without creating branch!')
    branches = list_branches(pattern=f'{args.prefix}[0-9]*')
    new_index = max([
        int(branch.split(f'{args.prefix}')[1])
        for branch in branches
    ] + [0]) + 1
    new_branch = f'{args.prefix}{new_index}'
    utils.cmd(f'git checkout -b {new_branch}')
Exemplo n.º 32
0
    def find_inject(self):
        """Look for injection."""
        insert_word = self.target_url.find('*')
        inject = False
        if insert_word != -1:
            inject = query_yes_no('* Custom injection found, continue? ')
            if not inject:
                index = self.target_url.find('*')
                url = self.target_url[:index] + self.target_url[index + 1:]
            else:
                url = self.target_url
        else:
            url = self.target_url

        return url
Exemplo n.º 33
0
def query_students():
    students = {}

    while True:
        student_id = query('Student ID:')
        student_name = query('Full name:')
        student_email = query('E-mail address:')

        students[student_id] = {
            'name': student_name,
            'e-mail': student_email,
        }

        if not query_yes_no('Do you want to add another student?',
                            default=False):
            return students
Exemplo n.º 34
0
    def secure_delete_index(self, index_name):
        
        if not self.is_connected():
            logger.error('Error. Not connected to Elasticsearch')
            return

        if type(index_name) is not str:
            logger.error('Error. Index name must be a str')
            return

        msg = "Do you want to delete the index '{}'?".format(index_name)
        if self.es.indices.exists(index_name):
            if utils.query_yes_no(msg, False):
                res = self.es.indices.delete(index=index_name)
                logger.info("The index {} was deleted successfully".
                             format(index_name))
        return True
Exemplo n.º 35
0
def set_authentication_keys():
    """Loops in current user .ssh looking for certificates and ask which one
    needs to be installed remotely"""

    ssh_path = os.path.join(os.getenv('HOME'), '.ssh/')
    ids_ssh = glob('{}id*.pub'.format(ssh_path))
    for id_ssh in ids_ssh:
        with open(id_ssh, 'r') as f:
            # reading the public key for anything after the key to get name and
            # address that normally follow the key
            id_ssh_file = f.read()
        id_ssh_name = ' '.join(id_ssh_file.split()[2:])
        if query_yes_no("CONFIGURE {}'s ssh key for "
                        "authentication?".format(id_ssh_name), 'yes'):
            run('mkdir -p $HOME/.ssh')
            run('touch $HOME/.ssh/authorized_keys')
            append('$HOME/.ssh/authorized_keys', id_ssh_file.rstrip('\n\r'))
def main():
    # TODO: Better CLI Help
    # TODO: split into parse_args() and main(args) (see dwca2shp)
    parser = argparse.ArgumentParser(
        description="Import a DarwinCore Archive file into CartDB.")

    parser.add_argument('--domain',
                        help='Your CartoDB domain (without .cartodb.com).',
                        required=True)
    parser.add_argument('--api-key', 
                        dest='apikey',
                        help='Your CartoDB API key.',
                        required=True)
    parser.add_argument('--table',
                        help="CartoDB destination table name",
                        required=True)
    parser.add_argument('--truncate-table',
                        action='store_true',
                        dest='truncate',
                        help="Truncate destination table prior to import.")
    parser.add_argument('source_file',
                        help="Source DwC-A file", 
                        type=argparse.FileType('r'))

    args = parser.parse_args()

    target_table = args.table

    out = CartoDBOutput(args.apikey, args.domain, args.table)

    if args.truncate:
        if query_yes_no("Are you sure you want to truncate the database ? Data will be LOST !", default="no"):
            out.truncate_table()

    with DwCAReader(args.source_file) as dwca:
        if valid_dwca(dwca):    
            for line in dwca.each_line():
                try:
                    out.insert_line(**dwcaline_to_epsg4326(line))
                    sys.stdout.write('.')
                except CartoDBException as e:
                    print ("CartoDB error: ", e)
        else:
            # TODO: more detailed message
            print "Invalid source DwC-A file."
Exemplo n.º 37
0
def gen_all_binvec(width):
    if width > 20:
        proceed = utils.query_yes_no(
            'You are generating a {} (large amount) of \
                                     samples. Proceed?'.format(2**width))
        if not proceed:
            return []

    all_bin_vecs = []
    print(width)
    for num in range(2**width):
        bin_vec = width * [0]
        for i, bit in enumerate(bin(num)[2:][::-1]):
            bin_vec[i] = 0 if bit == '0' else 1

        all_bin_vecs.append(bin_vec)

    return all_bin_vecs
Exemplo n.º 38
0
    def run(self):
        utils.new_line()
        utils.info('Parse and group DICOM directory')
        self._parser.parse_acquisitions()

        utils.new_line()
        utils.info('Sort and set up acquisitions')
        self._parser.sort_acquisitions()

        #utils.new_line()
        #utils.ok('Acquisitions of interest:')
        #for _ in self._parser.caught: utils.info(_)

        utils.new_line()
        utils.warning('Acquisitions excluded:')
        for _ in self._parser._excluded:
            utils.info(_)

        utils.new_line()
        utils.info('Create YAML file for dcm2niibatch')
        for acq in self._parser.acquisitions:
            self._batch.add(acq)
        self._batch.write()

        utils.new_line()
        utils.ok('Batch file:')
        self._batch.show()

        if self._yes:
            launchBatch = True
        else:
            msg = "Do you want to launch dcm2niibatch ?"
            launchBatch = utils.query_yes_no(msg)

        if launchBatch:
            self._batch.launch()
            for acq in self._parser.acquisitions:
                acq.update_json()
        else:
            utils.new_line()
            utils.ok("To launch dcm2niibatch later:")
            utils.info("cd {}".format(self._codeDir))
            utils.info(self._batch.command)
        return 0
Exemplo n.º 39
0
    def setup_output_path(self):
        try:
            os.mkdir(self.output_dir)
            os.mkdir(self.model_dir)
            os.mkdir(self.eval_dir)
        except OSError:
            print 'output directory already exists...'
            if (utils.query_yes_no('Wipe data and continue?')):
                shutil.rmtree(self.output_dir)
                os.mkdir(self.output_dir)
                os.mkdir(self.model_dir)
                os.mkdir(self.eval_dir)
            else:
                return False

        os.chmod(self.output_dir, 0766)
        os.chmod(self.model_dir, 0766)
        os.chmod(self.eval_dir, 0766)
        return True
Exemplo n.º 40
0
async def run(midiout):
    try_again = True
    while try_again:
        print("Trying to connect to pulse sensor...")
        try:
            async with BleakClient(address) as client:
                model = await client.read_gatt_char(model_uid)
                model = model.decode("utf-8")

                battery = await client.read_gatt_char(battery_uid)
                battery = battery[0]

                print("Connected to pulse sensor: ", end="")
                print("Model: {:s}, battery state: {:d}% \n".format(
                    model, battery))

                def callback(sender, data):
                    bpm = data[1]
                    print("\r\rCurrent heart rate: {:d} bpm".format(bpm),
                          end="")
                    midi_val = hr2midi(bpm)
                    channel_volume_course = [176, 7, midi_val]
                    midiout.send_message(channel_volume_course)
                    print(", MIDI output value: {:d}".format(midi_val), end="")
                    print(" (approx volume: {:.2f} %).".format(
                        (midi_val / 127) * 100),
                          end="")

                await client.start_notify(heartbeat_uid, callback)
                while client.is_connected and running:
                    try:
                        await asyncio.sleep(1)
                    except KeyboardInterrupt:
                        await client.disconnect()
                        del midiout
                        print('Program ended by interrupt, bye bye! :-)')
        except BleakError as e:
            print(
                "Ooops, could not connect to the pulse sensor... This went wrong:"
            )
            print(str(e))
            try_again = query_yes_no("Try again?")
Exemplo n.º 41
0
def main(argv):
    parser = argparse.ArgumentParser(description='Create a skeleton application using some command line options.')
    parser.add_argument('appname', help='The application name')
    parser.add_argument('-b', '--bower', help='Dependencies installed using bower')
    parser.add_argument('-n', '--no-debug', action='store_false')
    parser.add_argument('-v', '--virtualenv', action='store_true')
    parser.add_argument('-d', '--database', action='store_true')
    parser.add_argument('-g', '--git', action='store_true')
    args = parser.parse_args()

    bower = None
    if args.bower:
        bower = args.bower.split(',')
    virtualenv = args.virtualenv
    debug = args.no_debug
    appname = args.appname
    database = args.database
    git = args.git

    if database:
        project = FlaskDbProject(appname)
    else:
        project = FlaskProject(appname)

    project.debug = debug

    project.bower = bower
    project.virtualenv = virtualenv
    project.git = git

    print(generate_brief(project.brief_var))
    errors = External().errors
    if len(errors) > 0:
        print(generate_errorlist({'errors': errors, }))
        sys.exit(1)

    if query_yes_no("Is this correct ?"):
        project.install()
    else:
        print("Aborting")
        sys.exit(0)
Exemplo n.º 42
0
def terminate_and_clean(self, confirm=True, debug=False):
    """
    Terminates an instance and deletes the associated security group and private key if they are named after the name
    tag of the instance.

    For example if an instance has the following security groups ['standard', 'instance_name'] and the following keypair
    ['instance_name'], then only the security group 'instance_name' and the keypair 'instance_name' will be deleted.

    This method will also ask for confirmation, except if the argument 'confirm' is set to False.

    :param boto.ec2.instance.Instance self:
        Current instance.
    :param bool confirm:
        Whether or not to ask for confirmation when executing this method. Default : True.
    :param bool debug:
        Displays debug information. Default : False.
    """
    if 'name' not in self.tags:
        print("This instance doesn't have a name tag. Aborting.")
        return
    print("Please wait.")
    sgs = [
        sg for sg in self.get_all_security_groups()
        if sg.name == self.tags['name'] and len(sg.instances()) == 1
    ]
    kp = self.connection.get_all_key_pairs(self.key_name)[0]
    print("SG : {}".format(", ".join(
        ["{} {}".format(sg.name, sg.id) for sg in sgs])))
    print("KeyPair : {}".format(kp.name))
    if confirm:
        if not query_yes_no("Are you sure ?"):
            print("Aborting")
            return
    self.terminate()
    self.wait_for('terminated')
    print("Instance is terminated.")
    for sg in sgs:
        sg.delete()
    print("Security Group(s) are deleted.")
    kp.delete()
    print("KeyPair is deleted.")
Exemplo n.º 43
0
def setup_directories(data_dir, output_dir):
    os.chdir(data_dir)
    
    try:
        os.mkdir(output_dir)
        os.mkdir('%s/models' % output_dir)
        os.mkdir('%s/eval' % output_dir)
    except OSError:
        print 'output directory already exists...'
        if (utils.query_yes_no('Wipe data and continue?')):
            shutil.rmtree(output_dir)
            os.mkdir(output_dir)
            os.mkdir('%s/models' % output_dir)
            os.mkdir('%s/eval' % output_dir)
        else:
            return False
    
    os.chmod(output_dir, 0766)
    os.chmod('%s/models' % output_dir, 0766)
    os.chmod('%s/eval' % output_dir, 0766)
    return True
Exemplo n.º 44
0
    def handle_noargs(self, **options):
        self.logger.debug("Tagging audio files")
        #mixes = Mix.objects.filter(uid='1348680a-507d-4a1e-a828-dffc90191c5b')
        mixes = Mix.objects.filter(mp3tags_updated=False)
        try:
            for mix in mixes:
                self.logger.debug("Tagging: %s" % mix.title)
                mix_file = mix.get_absolute_path()
                if not os.path.isfile(mix_file):
                    result = query_yes_no("Mix file %s\n\t(%s - %s)\ndoes not exist, delete mix entry?" % (
                                            mix_file, mix.title, mix.slug))
                    if result:
                        mix.delete()
                else:
                    mix.create_mp3_tags()
                    mix.mp3tags_updated = True
                    mix.save()
        except Exception:
            self.logger.exception("Error tagging mix: %s" % mix.uid)

        print "Finished tagging"
def file_exist_check(new_filename, to_append):
    """
    Checks if a given file exists on the hard disk. If so, prompt the user to
    overwrite it. If user selects 'no,' attempt to create a new file with 
    <to_append> appended to the end of the filename.
    @params:
        new_filename    - Requried : Name of file to check
        to_append       - Required : String to append if file exists
    """
    while exists(new_filename):
        print("WARNING: " + str(new_filename) + " already exists")
        print(new_filename)
        resp = utils.query_yes_no("Overwrite file?")
        if resp:
            print("Overwriting file")
            remove(new_filename)
            break
        else:
            new_filename += to_append
            print("Trying:", new_filename)
    return new_filename
Exemplo n.º 46
0
def terminate_and_clean(self, confirm=True, debug=False):
    """
    Terminates an instance and deletes the associated security group and private key if they are named after the name
    tag of the instance.

    For example if an instance has the following security groups ['standard', 'instance_name'] and the following keypair
    ['instance_name'], then only the security group 'instance_name' and the keypair 'instance_name' will be deleted.

    This method will also ask for confirmation, except if the argument 'confirm' is set to False.

    :param boto.ec2.instance.Instance self:
        Current instance.
    :param bool confirm:
        Whether or not to ask for confirmation when executing this method. Default : True.
    :param bool debug:
        Displays debug information. Default : False.
    """
    if 'name' not in self.tags:
        print("This instance doesn't have a name tag. Aborting.")
        return
    print("Please wait.")
    sgs = [sg for sg in self.get_all_security_groups() if sg.name == self.tags['name'] and len(sg.instances()) == 1]
    kp = self.connection.get_all_key_pairs(self.key_name)[0]
    print("SG : {}".format(", ".join(["{} {}".format(sg.name, sg.id) for sg in sgs])))
    print("KeyPair : {}".format(kp.name))
    if confirm:
        if not query_yes_no("Are you sure ?"):
            print("Aborting")
            return
    self.terminate()
    self.wait_for('terminated')
    print("Instance is terminated.")
    for sg in sgs:
        sg.delete()
    print("Security Group(s) are deleted.")
    kp.delete()
    print("KeyPair is deleted.")
Exemplo n.º 47
0
def store_students(filename):

	print('Storing students:')

	# MongoDB Drop Collection
	if query_yes_no("Drop the students collection?", "no"):
		db.students.drop()

	with open(os.path.join(main_dir, filename)) as f:
		reader = csv.DictReader(f)
		students = [ 
			{ 
				'name': row['Name'], 
				'surname': row['Surname'], 
				'email': row['Email'],
				'username': row['Username'],
				'token': str(uuid.uuid4()),
				'status': 0, # Status: "Created"
			} for row in reader ]

	# MongoDB Insert in Bulk
	# print("Inserting students in bulk")
	# result = db.students.insert_many(students)
	# print(result)

	# MongoDB Insert one by one
	for student in students:
		candidate = db.students.find(
			{ 'username': student['username'] },
			{ 'username': 1 },
		)
		if candidate.count() > 0:
			print('Student %s is already in the database' % (student['username']))
		else:
			print('Inserting %s in the database' % (student['username']))
			result = db.students.insert_one(student)
			print('Inserted: %s' % (result.inserted_id))
Exemplo n.º 48
0
def modify_csv(EXP):
    response = query_yes_no("Do you want to comment one of these experiments?",
                            default="no")

    choice = ["General_impression", "Suggested_changes"]

    if response:
        while True:
            try:
                name = input("Please enter the name of the experiment: ")
                variable = input(
                    "Please enter the name of the variable (General_impression or Suggested_changes): "
                )
                if variable not in choice:
                    print("Error with the arguments, try again.\n")
                    continue
                comment = input("Please enter the comment: ")
            except:
                name = raw_input("Please enter the name of the experiment: ")
                variable = raw_input(
                    "Please enter the name of the variable (General_impression or Suggested_changes): "
                )
                if variable not in choice:
                    print("Error with the arguments, try again.\n")
                    continue
                comment = raw_input("Please enter the comment: ")

            try:
                EXP[name].loc['Comments', variable] = comment
                print(EXP)
                NEW_EXP = modify_csv(EXP)
                return NEW_EXP
            except:
                print("Error with the arguments, try again.\n")
    else:
        return EXP
Exemplo n.º 49
0
def main():
    if utils.DATA['version'] != VERSION:
        print('Your version of Launchcraft ({}) does not match the minimum version of Launchcraft ({}). Please update.'.format(VERSION, utils.DATA['version']))
        utils.exit()

    print('This script will ask you yes or no questions.')
    print('Any answers in square brackets (e.g. [1.7.10]), or that are capitalized (e.g. [Y/n]) are the default answers, and will be selected when you press enter.')
    utils.print_separator()

    version = raw_input('Which version of Minecraft would you like to use? [1.7.10]: ').lower()
    if version == '':
        version = '1.7.10'

    if version not in utils.DATA['versions']:
        print("Invalid version selected.")
        utils.exit()

    utils.MODS = utils.DATA['versions'][version]

    JAR_DIR = os.path.join(VERSIONS_DIR, version)

    FORGE_VERSION = '{}-Forge{}'.format(version, utils.MODS['mods']['forge']['version'])
    FORGE_DIR = os.path.join(VERSIONS_DIR, FORGE_VERSION)

    print('Entering directory "{}".'.format(MINECRAFT_DIR))
    try:
        os.chdir(MINECRAFT_DIR)
    except:
        print('Failed to enter minecraft directory, please install minecraft first.')
        utils.exit()
    utils.print_separator()

    # Set the directory to which the custom profile will be installed.
    profile_name = raw_input('What would you like to call the profile being created? [launchcraft]: ').lower()
    if profile_name == '':
        profile_name = 'launchcraft'
    PROFILE_DIR = os.path.join(VERSIONS_DIR, profile_name)
    print('Creating profile {}'.format(profile_name))

    # Delete the old profile directory so we can start from scratch.
    try:
        shutil.rmtree(PROFILE_DIR)
        print('Removed old profile directory.')
    except OSError as ex:
        if ex.errno == errno.ENOENT:
            print('No old profile directory found.')
        else:
            print(ex)
            print('Failed to remove old profile directory, exiting...')
            utils.exit()
    utils.print_separator()

    forge = utils.query_yes_no('Would you like to use Forge?', default='no')
    if forge:
        if os.path.exists(FORGE_DIR):
            print('The required Forge version has been detected on your system.')
            message = 'reinstall'
        else:
            print('The required Forge version has not been detected on your system.')
            message = 'install'
        # Ask the user whether or not they need Forge.
        if utils.query_yes_no('Do you need to {} Forge?'.format(message), default='no'):
            forge = utils.MODS['mods']['forge']
            name = forge['name']
            version = forge['version']
            jarName = 'forge.jar'

            if sys.platform == 'win32' or sys.platform == 'cygwin':
                os.chdir(BASE_DIR)

            # Download the Forge installer.
            print('Downloading {} version {}'.format(name, version))
            utils.downloadFile(forge['url'], jarName)

            if sys.platform == 'win32' or sys.platform == 'cygwin':
                print('You must now run the {} that has been downloaded to your Launchcraft directory.'.format(jarName))
                utils.exit()
            else:
                # Run the installer so the user can install Forge.
                print('You will now be asked to install Forge version {}.'.format(version))
                with open(os.devnull, 'w') as devnull:
                    subprocess.call('java -jar {}'.format(jarName), shell=True, stdout=devnull)

                os.remove(jarName)
    utils.print_separator()

    JAR_FILE = os.path.join(PROFILE_DIR, '{}.jar'.format(profile_name))
    JSON_FILE = os.path.join(PROFILE_DIR, '{}.json'.format(profile_name))

    if forge:
        print('Using Forge {} as the base for the profile'.format(utils.MODS['mods']['forge']['version']))
        if not os.path.exists(MOD_DIR):
            os.makedirs(MOD_DIR)

        utils.INSTALLED_MODS.append('forge')
        JAR_DIR = FORGE_DIR
        print('Creating new profile directory.')
        shutil.copytree(FORGE_DIR, PROFILE_DIR)
        print('Renaming Forge jar.')
        shutil.move(os.path.join(PROFILE_DIR, '{}.jar'.format(FORGE_VERSION)), JAR_FILE)
        SOURCE_JSON_FILE = '{}.json'.format(FORGE_VERSION)

        print('Entering newly created profile directory.')
        os.chdir(PROFILE_DIR)
    else:
        print('Using Minecraft {} as the base for the profile'.format(version))
        # Create the profile directory.
        try:
            print('Creating new profile directory.')
            os.makedirs(PROFILE_DIR)
        except OSError as ex:
            print(ex)
            print('Failed to create new profile directory, exiting...')
            utils.exit()

        print('Entering newly created profile directory.')
        os.chdir(PROFILE_DIR)

        print('Downloading "{0}.jar" and "{0}.json".'.format(version))
        utils.downloadFile('https://s3.amazonaws.com/Minecraft.Download/versions/{0}/{0}.jar'.format(version), '{}.jar'.format(profile_name))
        utils.downloadFile('https://s3.amazonaws.com/Minecraft.Download/versions/{0}/{0}.json'.format(version), '{}.json'.format(version))
        SOURCE_JSON_FILE = '{}.json'.format(version)

    print('Creating "{}.json".'.format(profile_name))
    with open('{}'.format(SOURCE_JSON_FILE), "r") as file:
        data = json.load(file)
    data['id'] = profile_name
    with open(JSON_FILE, "w") as file:
        json.dump(data, file, indent=4)

    print('Deleting "{}".'.format(SOURCE_JSON_FILE))
    os.remove(SOURCE_JSON_FILE)
    utils.print_separator()

    if utils.query_yes_no('Do you want to install mods?', default='no'):
        print('Which mods would you like to install?')
        toInstall = utils.printAskOptions(utils.MODS['mods'])
        print('Installing mods.')
        print('')
        for mod in toInstall:
            modData = utils.MODS['mods'][mod]
            skip = False

            conflicts = [i for i in modData['conflicts'] if i in utils.INSTALLED_MODS]

            if mod == 'forge':
                continue

            # Do not install forge-dependant mods if Forge is not installed.
            if 'forge' in modData['deps'] and 'forge' not in utils.INSTALLED_MODS:
                print('Skipping {} due to missing Forge'.format(modData['name']))
                skip = True
            # Skip conflicting mods
            elif conflicts:
                conflicting_mods = ""
                for i in conflicts:
                    conflicting_mods += utils.MODS['mods'][i]['name'] + ", "
                print('Skipping {} because it conflicts with {}'.format(modData['name'], conflicting_mods[:-2]))
                skip = True

            if skip:
                print('')
                continue

            utils.installDep(mod, JAR_FILE)
            print('')

    utils.removeMETAINF(JAR_FILE)
    utils.print_separator()

    if utils.query_yes_no('Do you want to install texture packs?', default='no'):
        if not os.path.exists(RESOURCEPACK_DIR):
            os.makedirs(RESOURCEPACK_DIR)
        print("What texture packs would you like to install?")
        toInstall = utils.printAskOptions(utils.MODS['resourcepacks'])
        print('Installing resourcepacks.')
        print('')
        for pack in toInstall:
            packData = utils.MODS['resourcepacks'][pack]

            utils.installResourcePack(pack)
            print('')
    utils.print_separator()

    if utils.query_yes_no('Do you want to install shader packs?', default='no'):
        if not os.path.exists(SHADERPACK_DIR):
            os.makedirs(SHADERPACK_DIR)
        print("What shader packs would you like to install?")
        toInstall = utils.printAskOptions(utils.MODS['shaderpacks'])
        print('Installing shaderpacks.')
        print('')
        for pack in toInstall:
            packData = utils.MODS['shaderpacks'][pack]

            utils.installShaderPack(pack)
            print('')
    utils.print_separator()

    print('Completed successfully!')
    utils.exit()

    try:
        input('Press any key to exit...')
    except:
        pass
Exemplo n.º 50
0
import shutil
import urllib
import tarfile
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import init
import config
import utils

gtzan_origin = "http://opihi.cs.uvic.ca/sound/genres.tar.gz"

if __name__ == '__main__':
    init.init_logger()
    conf = config.get_config()
    audio_folder = os.path.expanduser(conf.get('Input', 'AudioFolder'))
    if os.path.isdir(audio_folder):
        if utils.query_yes_no("Audio folder %s exists. Do you want to overwrite it?" % audio_folder):
            shutil.rmtree(audio_folder)
            os.makedirs(audio_folder)
    else:
        logging.debug("Audio Folder %s not found. Creating..." % audio_folder)
        os.makedirs(audio_folder)

    gtzan_dest = os.path.join(audio_folder, os.path.basename(gtzan_origin))
    if os.path.isfile(gtzan_dest):
        if utils.query_yes_no("GTZAN dataset already downloaded in %s. Do you want to download it again?" % gtzan_dest):
            os.remove(gtzan_dest)
            logging.info("Downloading GTZAN dataset from %s to %s" % (gtzan_origin, gtzan_dest))
            urllib.urlretrieve(gtzan_origin, gtzan_dest)
    else:
        logging.info("Downloading GTZAN dataset from %s to %s" % (gtzan_origin, gtzan_dest))
        urllib.urlretrieve(gtzan_origin, gtzan_dest)
Exemplo n.º 51
0
if len(trained_models) == 0:
    print('Error: There is no pretrained model in models directory - ', args.models_dir)
    print('Please change the models directory using --models_dir option')
    exit(1)

title = 'Please choose pretrained model to use: '
model_file, _ = pick(trained_models, title)

with open(args.category_names, 'r') as f:
    cat_to_name = json.load(f)

check_gpu = 'cuda:0' if args.gpu and torch.cuda.is_available() else 'cpu'

if args.gpu and check_gpu == 'cpu':
    confirm_cpu = query_yes_no('There is no cuda compatible gpu exists, do you want to continue using cpu?')
    if not confirm_cpu:
        exit(0)

print('Running prediction, please wait...')

device = torch.device(check_gpu)

state = torch.load(model_file, map_location=check_gpu)


model = getattr(models, state['arch'])(pretrained=True)
for param in model.parameters():
    param.requires_grad = False

Exemplo n.º 52
0
#!/usr/bin/env python
import os
import shutil
import urllib
import tarfile
import utils

gtzan_origin = "http://opihi.cs.uvic.ca/sound/genres.tar.gz"
gtzan_base_path = os.path.join(os.environ["PYLEARN2_DATA_PATH"], "GTZAN")

if __name__ == '__main__':
    if os.path.isdir(gtzan_base_path):
        if utils.query_yes_no(
                "GTZAN base path %s exists. \
Do you want to overwrite it? (this will delete all the contents)"
                % gtzan_base_path):
            shutil.rmtree(gtzan_base_path)
            os.makedirs(gtzan_base_path)
    else:
        print("GTZAN base path %s not found. Creating..." % gtzan_base_path)
        os.makedirs(gtzan_base_path)

    gtzan_dest = os.path.join(gtzan_base_path, os.path.basename(gtzan_origin))
    if os.path.isfile(gtzan_dest):
        if utils.query_yes_no(
                "GTZAN dataset already downloaded in %s. \
Do you want to download it again?"
                % gtzan_dest):
            os.remove(gtzan_dest)
            print(
                "Downloading GTZAN dataset from %s to %s"
Exemplo n.º 53
0
def read_iptables(def_vars):
    # TODO fix bug on duplicate addition! do not allow rules on the same port
    read_dict = dict()
    # Filter old rules in public_services
    try:
        read_dict["public_services"] = [elem for elem in def_vars["public_services"]
                                        if (elem["service"] == "ssh" and elem["port"] == 22)
                                        or query_yes_no(userlog.warn("Keep this public service?\n" +
                                                                     json.dumps(elem, indent=4)))]
    except KeyError:
        read_dict["public_services"] = list()

    try:
        # Filter old rules in restricted_services
        read_dict["restricted_services"] = [elem for elem in def_vars["restricted_services"]
                                            if (elem["service"] == "ssh" and elem["port"] == 22)
                                            or query_yes_no(userlog.warn("Keep this restricted service?\n" +
                                                                         json.dumps(elem, indent=4)))]
    except KeyError:
        read_dict["restricted_services"] = list()

    iface = "{{ iface }}"

    # Read interface
    if query_yes_no(userlog.warn("Specify an interface? Else the default as defined in ansible facts will be used."),
                    default="no"):
        iface = str(input(userlog.info("Please Specify a valid interface: >> ")))
        read_dict['iface'] = iface

    if query_yes_no(userlog.warn("Enable SSH?")):
        # DEFINE SSH SERVICE
        ssh_service = dict()
        ssh_service["port"] = 22
        ssh_service["service"] = "ssh"
        ssh_service["protocol"] = "tcp"
        ssh_service["iface"] = iface
        ssh_service["direction"] = "in"

        # REMOVE OLD SSH RULES from public_services
        read_dict["public_services"] = [elem for elem in read_dict["public_services"]
                                        if elem["service"] != "ssh"
                                        and elem["port"] != 22]

        # REMOVE OLD SSH RULES from restricted_services
        read_dict["restricted_services"] = [elem for elem in read_dict["restricted_services"]
                                            if elem["service"] != "ssh"
                                            and elem["port"] != 22]

        if query_yes_no(userlog.warn("Restrict SSH?")):
            ssh_service["sources"] = list()
            while True:
                ip_addr = read_ip(custom_message=" to allow SSH from", accept_none=True)
                if not ip_addr:
                    break
                else:
                    ssh_service["sources"].append(str(ip_addr))

            read_dict["restricted_services"].append(ssh_service)
        else:
            read_dict["public_services"].append(ssh_service)

    while True:
        print(userlog.info("\nPlease Enter New Service\n"))
        service = dict()
        try:
            service["port"] = int(input(userlog.info("Enter Port number: >>") + ' '))
        except ValueError:
            break
        service["service"] = input(userlog.info("Enter Service Name: >> ") + ' ')
        if not service["service"]:
            break

        if query_yes_no("TCP?"):
            service["protocol"] = "tcp"
        elif query_yes_no("UDP?"):
            service["protocol"] = "udp"
        else:
            print(userlog.error("Ignoring Service\n"))
            continue

        service["iface"] = iface
        if query_yes_no("Ingress?"):
            service["direction"] = "in"
        else:
            service["direction"] = "out"

        if query_yes_no("Restrict Service?"):
            service["sources"] = list()
            while True:
                ip_addr = read_ip(custom_message=" to allow {0} from".format(service["service"]), accept_none=True)
                if not ip_addr:
                    break
                else:
                    service["sources"].append(str(ip_addr))

            read_dict["restricted_services"].append(service)
        else:
            read_dict["public_services"].append(service)

            # TODO Refactor this to a class, like Packages-Hosts

    read_dict["RELOAD_FLAG"] = query_yes_no(userlog.error("ATTENTION!\nReload the rules immediately?\n"
                                                          "This might result in a loss of connectivity"),
                                            default="no")

    # TODO Ask for application of FW rules
    # READ Template for Rules
    # READ allow out ?
    # TODO Implement more services
    # READ BASE services
    # READ RESTRICTED services
    return read_dict
Exemplo n.º 54
0
def main():
    parser = OptionParser("usage: luchizz.py --host hosts [options]",
                          version="luchizz {}".format(__version__))
    parser.add_option("-H", "--hosts", dest="HOSTS",
                      help="comma-separated list of hosts to operate on",
                      type='string', metavar="HOSTS")
    parser.add_option("-p", "--packages-file", dest="PKGS_FILE",
                      help="yaml file for the debian packages you want to "
                      "install via apt-get",
                      type='string', metavar="PKGS_FILE")
    parser.add_option("-d", "--debug", dest="DEBUG",
                      help="all output from fabric", action='store_true',
                      default=False)
    (options, args) = parser.parse_args()

    # Setting up the target hosts
    if options.HOSTS:
        env.host_string = options.HOSTS.split(',')[0]

    # Setting up the default path for the packages yaml
    if not options.PKGS_FILE:
        options.PKGS_FILE = os.path.join(LUCHIZZ_DIR, 'packages.yaml')
    # Make sure the package file exists
    if os.path.isfile(options.PKGS_FILE):
        # get the dictionary from the yaml file
        p = open(options.PKGS_FILE, 'r')
        packages = yaml.load(p.read())
        p.close()
    else:
        print "IOError: packages file not found {}".format(options.PKGS_FILE)
        sys.exit(1)

    # Setting up fabric output for debug
    # FIXME here there are problem with the overrided options context managers
    # needs to be always used probably
    if options.DEBUG:
        to_set = {'aborts': True,
                  'debug': True,
                  'running': True,
                  'status': True,
                  'stderr': True,
                  'stdout': True,
                  'user': True,
                  'warnings': True}
    # Setting up fabric output for normal usage
    else:
        to_set = {'aborts': True,
                  'debug': False,
                  'running': False,
                  'status': False,
                  'stderr': False,
                  'stdout': True,
                  'user': False,
                  'warnings': True}
    # Apply the dictionary structure to the output handler of fabric
    for key in to_set.keys():
        fabric_output[key] = to_set[key]

    print_splash(__version__)
    with quiet():
        check_root()
    print("\nReady to luchizz: {}?\n"
          "CTRL-C to abort\n".format(env.host_string))
    time.sleep(1)

    # Setup etckeeper
    if not is_installed('etckeeper'):
        if query_yes_no("SETUP etckeeper to track changes in /etc "
                        "using git?", 'yes'):
            setup_etckeeper()

    # Luchizz the shell
    if query_yes_no("Do you want to `luchizz` root and all users "
                    "with a home folder in /home?", 'yes'):
        with quiet():
            luchizz_shell()
        # If luchizz shell is applied a dedicated commit is applied
        # 127 return code is in case etckeeper is not installed won't fail
        with settings(ok_ret_codes=(0, 1, 127)), quiet():
            sudo('etckeeper commit -m "luchizzed shell"')

    # Install luchizz scripts
    if query_yes_no("INSTALL luchizz scripts in /usr/local/bin?", 'yes'):
        with quiet():
            luchizz_scripts()

    # Copy ssh keys
    if query_yes_no("CONFIGURE local ssh keys as authorized for "
                    "authentication?", 'yes'):
        with quiet():
            set_authentication_keys()

    # Copy .gitconfig
    if os.path.isfile(os.path.join(os.getenv('HOME'), '.gitconfig')):
        if query_yes_no("CONFIGURE .gitconfig file from the local user?",
                        'yes'):
            with quiet():
                set_gitconfig()

    if query_yes_no("CONFIGURE do you want to luchizz the gitconfig for"
                    "local user?", 'yes'):
            with quiet():
                luchizz_gitconfig()

    # Disable backports
    if query_yes_no("DISABLE backports repositories?", 'yes'):
        with quiet():
            set_disable_backports()

    # Disable automatic installation of suggested and recommended packages
    if query_yes_no("DISABLE automatic installation of recommended packages?",
                    'yes'):
        with quiet():
            set_disable_recommended()

    for pkg_section in packages.keys():
        if query_yes_no("INSTALL {} packages?".format(pkg_section),
                        'yes'):
            install_packages(packages[pkg_section])

    # ~shorewall = query_yes_no("Do you want to install shorewall and setup "
    # ~"as one interface server?""", 'no')
    # ~if shorewall:
    # ~setup_shorewall_one_interface()

    # Catch all commit for etckeeper
    # 127 return code is in case etckeeper is not installed won't fail
    with settings(ok_ret_codes=(0, 1, 127)), quiet():
        sudo('etckeeper commit -m "final luchizz commit"')

    print "\nluchizz done"
    return 0
Exemplo n.º 55
0
def read_ssh_keys(def_vars):
    read_dict = dict()

    if not def_vars:
        def_vars = dict()

    # Validate Key Path
    if "exec_user_keys" in def_vars:
        exec_user_keys = [key for key in def_vars["exec_user_keys"] if Path(os.path.expanduser(key["file"])).is_file()
                          and query_yes_no(userlog.warn("Keep this Key Option? ---> {0}").format(key))]

    else:
        exec_user_keys = []

    print(userlog.info("Current Public Keys that will be installed for Ansible runner:\n"
          + json.dumps(exec_user_keys, indent=4))+"\n")

    print(userlog.warn("Reading Additional Keys. Enter \"empty\" string to stop."))

    while True:
        key = read_pub_key(key="exec user")

        if not key:
            break

        if query_yes_no(userlog.warn("=== Present? ===")):
            state = "present"
        else:
            state = "absent"

        exec_user_keys.append({"file": key, "state": state})

    # Keys for Root
    root_keys = []
    if query_yes_no(userlog.warn("Will you execute as ROOT?")):
        print(userlog.error("Beware! The keys you have specified for the exec user will be installed to Root")+"\n")

    elif query_yes_no(userlog.error("Install a key to ROOT?")):

        if "root_keys" in def_vars:
            root_keys = [key for key in def_vars["root_keys"] if Path(os.path.expanduser(key["file"])).is_file()
                         and query_yes_no(userlog.warn("Keep this Key? ---> {0}").format(key))]

        print(userlog.info("Current Public Keys that will be installed for ROOT:\n"
                           + json.dumps(root_keys, indent=4)))

        print(userlog.warn("\nReading Additional Keys. Enter \"empty\" string to stop."))

        while True:
            key = read_pub_key(key="root user")

            if not key:
                break

            if query_yes_no(userlog.warn("=== Present? ===")):
                state = "present"
            else:
                state = "absent"

            root_keys.append({"file": key, "state": state})

    else:
        pass

    custom_user_keys = []
    if "custom_user_keys" in def_vars:
        custom_user_keys = [key for key in def_vars["custom_user_keys"]
                            if Path(os.path.expanduser(key["file"])).is_file()
                            and query_yes_no(userlog.warn("Keep this Key? ---> {0}").format(key))]

    read_dict["exec_user_keys"] = exec_user_keys
    read_dict["root_keys"] = root_keys

    # TODO this part need a bit of refinement

    if query_yes_no(userlog.info("Enter keys for another user?"), default="no"):

        custom_user = None
        if "custom_user" in def_vars:
            custom_user = def_vars["custom_user"]

        while True:
            print(userlog.info("Custom User value is:") + " {0}".format(custom_user))

            if custom_user and query_yes_no(userlog.warn("Keep the value?")):
                break
            else:
                custom_user = input(userlog.info("Enter the desired username!\n"))

        print(userlog.info("Current Public Keys that will be installed for the user:"******" {0}\n".format(custom_user)
                           + json.dumps(custom_user_keys, indent=4)))

        if query_yes_no(userlog.warn("Enter additional Keys?")):
            while not key:
                key = read_pub_key(key="Custom user")

                if not key:
                    break

                if query_yes_no(userlog.warn("=== Present? ===")):
                    state = "present"
                else:
                    state = "absent"

                custom_user_keys.append({"file": key, "state": state})

        read_dict["custom_user_keys"] = custom_user_keys

    else:
        read_dict["custom_user_keys"] = []

    return read_dict
Exemplo n.º 56
0
def main(argv):
    parser = argparse.ArgumentParser(description='Create a flask skeleton application using some command line options.')
    parser.add_argument('appname', help='The application name')
    parser.add_argument('-b', '--bower', help='Dependencies installed using bower')
    parser.add_argument('-n', '--no-debug', action='store_false')
    parser.add_argument('-v', '--virtualenv', action='store_true')
    parser.add_argument('-d', '--database', action='store_true')
    parser.add_argument('-g', '--git', action='store_true')
    args = parser.parse_args()

    errors = []
    bower = None
    bower_exe = None
    if args.bower:
        bower = args.bower.split(',')
        bower_exe = shutil.which('bower')
        if not bower_exe:
            errors.append('Bower executable could not be found.')
    virtualenv = args.virtualenv
    virtualenv_exe = None
    if virtualenv:
        virtualenv_exe = shutil.which('virtualenv')
        if not virtualenv_exe:
            errors.append('Virtualenv executable could not be found.')
            virtualenv = False

    debug = args.no_debug
    appname = args.appname
    fullpath = os.path.join(cwd, appname)
    secret_key = codecs.encode(os.urandom(32), 'hex').decode('utf-8')

    template_var = {
        'pyversion': platform.python_version(),
        'appname': appname,
        'bower': bower,
        'debug': debug,
        'virtualenv': virtualenv,
        'secret_key': secret_key,
        'path': fullpath,
        'require': colors.WARNING,
        'enabled': colors.OKGREEN,
        'disabled': colors.FAIL,
        'end': colors.ENDC,
        'database': args.database,
        'git': args.git
    }

    if virtualenv:
        template_var['virtualenv_exe'] = virtualenv_exe

    if bower:
        template_var['bower_exe'] = bower_exe

    print(generate_brief(template_var))
    if len(errors) > 0:
        template = template_env.get_template('errors.jinja2')
        template_var = {
            'errors': errors,
            'red': colors.FAIL,
            'end': colors.ENDC
        }
        print(template.render(template_var))
        sys.exit(1)

    if query_yes_no("Is this correct ?"):
        if args.database:
            skeleton_dir = 'skel_db'
            config_file = 'config_db.jinja2'
        else:
            skeleton_dir = 'skel'
            config_file = 'config.jinja2'
        # Copying the whole skeleton into the new path. Error if the path already exists
        # TODO error handling here.
        print('Copying Skeleton...\t\t\t', end="", flush=True)
        shutil.copytree(os.path.join(script_dir, skeleton_dir), fullpath)
        print("{green}Ok{end}".format(green=colors.OKGREEN, end=colors.ENDC))
        # Creating the configuration file using the command line arguments
        print('Creating config file...\t\t\t', end="", flush=True)
        template = template_env.get_template(config_file)
        template_var = {
            'secret_key': secret_key,
            'debug': debug,
        }
        with open(os.path.join(fullpath, 'config.py'), 'w') as fd:
            fd.write(template.render(template_var))

        print("{green}Ok{end}".format(green=colors.OKGREEN, end=colors.ENDC))

        if virtualenv:
            # If virtualenv is requested, then create it and install the required libs to work
            print('Creating the virtualenv...\t\t', end="", flush=True)
            output, error = subprocess.Popen(
                [virtualenv_exe, os.path.join(fullpath, 'venv'), '--no-site-package'],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            ).communicate()
            if error:
                with open('virtualenv_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    print("{red}An error occured during the creation of the virtualenv. Please consult {yellow}virtualenv_error.log{red} file for details.{end}".format(
                        red=colors.FAIL,
                        yellow=colors.WARNING,
                        end=colors.ENDC))
                    sys.exit(2)
            venv_bin = os.path.join(fullpath, 'venv/bin')
            print("{green}Ok{end}".format(green=colors.OKGREEN, end=colors.ENDC))
            print("Installing Python Dependencies...\t", end="", flush=True)
            output, error = subprocess.Popen(
                [os.path.join(venv_bin, 'pip'), 'install', '-r', os.path.join(fullpath, 'requirements.txt')],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            ).communicate()
            if error:
                with open('pip_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    print("{red}An error occured during the installation of dependencies. Please consult {yellow}pip_error.log{red} file for details.{end}".format(
                        red=colors.FAIL,
                        yellow=colors.WARNING,
                        end=colors.ENDC))
                    sys.exit(2)
            print("{green}Ok{end}".format(green=colors.OKGREEN, end=colors.ENDC))

        if bower:
            os.chdir(os.path.join(fullpath, 'app', 'static'))
            for dependency in bower:
                print("Bower {}...\t\t\t".format(dependency.title()), end="", flush=True)
                output, error = subprocess.Popen(
                    [bower_exe, 'install', dependency],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE
                ).communicate()
                if error:
                    with open('bower_error.log', 'w') as fd:
                        fd.write(error.decode('utf-8'))
                    print("{red}An error occured during the installation of {dep}. Please consult {yellow}bower_error.log{red} file for details.{end}".format(
                        red=colors.FAIL,
                        yellow=colors.WARNING,
                        end=colors.ENDC,
                        dep=dependency))
                print("{green}Ok{end}".format(green=colors.OKGREEN, end=colors.ENDC))

        if args.git:
            print('Git Init...\t\t\t\t', end="", flush=True)
            output, error = subprocess.Popen(
                ['git', 'init', fullpath],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            ).communicate()
            if error:
                with open('git_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    print("{red}An error occured during the creation of the virtualenv. Please consult "
                          "{yellow}virtualenv_error.log{red} file for details.{end}".format(
                              red=colors.FAIL,
                              yellow=colors.WARNING,
                              end=colors.ENDC))
                    sys.exit(2)
            print("{green}Ok{end}".format(green=colors.OKGREEN, end=colors.ENDC))
            print('Generating Gitignore...\t\t\t', end="", flush=True)
            shutil.copyfile(os.path.join(script_dir, 'templates', 'gitignore'), os.path.join(fullpath, '.gitignore'))
            print("{green}Ok{end}".format(green=colors.OKGREEN, end=colors.ENDC))

    else:
        print("Aborting")
        sys.exit(0)
Exemplo n.º 57
0
def read_packages(def_vars):
    read_dict = dict()

    # TODO ditch current logic. Ask for the entire dictionary Keep It ? Else check each one. Consider BASE only?

    # UPGRADE SYSTEM
    read_dict["UPGRADE_DIST_FLAG"] = query_yes_no(userlog.warn("Do you want to update/upgrade the system?"))

    # READ BASE PACKAGES
    if "base_packages" in def_vars:
        # VALIDATE Already Present vars
        base_pack = [elem for elem in def_vars["base_packages"]
                     if Packages.validate(raise_f=False, **elem)
                     and query_yes_no(userlog.info(json.dumps(elem, indent=4)+"\nKeep this?"))]

        print(userlog.warn("\nAll packages:\n"+json.dumps(def_vars["base_packages"], indent=4)))

        if query_yes_no(userlog.info("Do you want to insert more BASE packages?")):
            print(userlog.warn("\nEnter Additional Base Packages.\n"))
            for elem in Packages.read_extra_packages():
                base_pack.append(elem)

        print(userlog.warn("\nAll packages:\n"+json.dumps(def_vars["base_packages"], indent=4)))
    else:
        base_pack = []
        print(userlog.error("\nNo BASE packages. Please enter package name. Or enter empty string to stop\n"))
        for elem in Packages.read_extra_packages():
            base_pack.append(elem)

    if base_pack:
        read_dict["base_packages"] = base_pack
    else:
        if query_yes_no(userlog.warn("Install NO BASE packages?")):
            read_dict["base_packages"] = base_pack

    # READ EXTRA PACKAGES
    if "extra_packages" in def_vars:
        # VALIDATE Already Present vars
        extra_pack = [elem for elem in def_vars["extra_packages"]
                      if Packages.validate(raise_f=False, **elem)
                      and query_yes_no(userlog.info(json.dumps(elem, indent=4)+"\nKeep this?"))]

        print(userlog.warn("\nAll packages:\n"+json.dumps(def_vars["extra_packages"], indent=4)))

        if query_yes_no("Do you want to insert more EXTRA packages?"):
            print("\nEnter Packages.\n")
            for elem in Packages.read_extra_packages():
                extra_pack.append(elem)

        print("\nAll packages:\n"+json.dumps(def_vars["extra_packages"], indent=4))
    else:
        extra_pack = []
        print("\nNo packages. Please enter package name. Or enter empty string to stop\n")
        for elem in Packages.read_extra_packages():
            extra_pack.append(elem)

    if extra_pack:
        read_dict["extra_packages"] = extra_pack
    else:
        if query_yes_no("Install NO extra packages?"):
            read_dict["extra_packages"] = extra_pack
    read_dict["extra_packages"] = extra_pack

    return read_dict
Exemplo n.º 58
0
def read_hostnames_and_hosts(def_vars):
    """

    :param def_vars:
    :return:
    """

    read_dict = dict()

    # Load Hostname
    try:
        hostname = def_vars["hostname"]

    except KeyError:
        hostname = ''

    # Read Hostname if None or you don't want to keep it
    if not hostname or not query_yes_no("Keep this hostname: {0}?".format(hostname)):
        hostname = read_hostname()

    try:
        # Filter Association Mappings
        host_mappings = [elem for elem in def_vars["hosts"]
                         if query_yes_no(userlog.warn("Keep this IP -> Hostname mapping?\n" +
                                                      json.dumps(elem, indent=4)))]

    except KeyError:
        host_mappings = []

    while True:
        print(userlog.info("\nPlease Enter a New IP - HOSTNAMES mapping. Empty to break\n"))
        ip_addr = read_ip(accept_none=True)

        if not ip_addr:
            break

        # Does not accept_none
        if query_yes_no('State Present?'):
            state = 'present'

            assigned_hosts = [read_hostname()]
            print(assigned_hosts)

            while True:
                print(userlog.info("Enter Empty String to finish the mapping."))
                myhost = read_hostname(accept_none=True)
                if myhost:
                    assigned_hosts.append(myhost)
                else:
                    break

        else:
            state = 'absent'
            assigned_hosts = []

        host_mappings.append(
            {
                'ip': str(ip_addr),
                # creates tab-separated string from the list of assigned hosts
                'name': '\t'.join(assigned_hosts),
                'state': state
            }
        )

    read_dict["hosts_mappings"] = host_mappings
    read_dict["hostname"] = hostname

    return read_dict