Exemplo n.º 1
0
def upload_kits(version,
                upload_type="ent",
                remote_dir="/var/www/downloads.voltdb.com/technologies/server",
                kit_dir="/home/test/releases/released",
                dry_run='False'):

    """ Upload kits to the voltdb download site.
        Args:
            version - version number such as 7.1.2
            upload_type - upload types are "ent" (only ent kit) and "all" for all kits
            remote_dir - directory to upload to
            kits_dir - directory to pull kits from
            dry_run - if true, the upload will not be done


    """
    if not version:
        exit ("FATAL: You must specify a version number that exists in ~tests/releases/released")

    #Any upload_type starting with ent will work
    #This allows the Jenkins selector to be "ent_only"
    upload_type = upload_type[:3]
    if upload_type not in ("ent", "all"):
        exit("FATAL: Upload upload_type must be enterprise or all")

    dry_run = str_option_to_bool(dry_run)
    ent_kits = ["voltdb-ent-%s.tar.gz","voltdb-ent-%s.SHA256SUM","LINUX-voltdb-ent-%s.tar.gz","MAC-voltdb-ent-%s.tar.gz"]
    all_kits = ent_kits + ["voltdb-pro-%s.tar.gz","voltdb-pro-%s.SHA256SUM", "voltdb-%s.tar.gz", "voltdb-community-%s.tar.gz"]
    kits_home = kit_dir + "/voltdb-" + version
    count=0

    with quiet():
        dircheck = local("ls -d " + kits_home, capture=True)
    if dircheck.failed:
        exit ("FATAL: " + dircheck.stderr)


    #For all the files in ent_kits or all_kits upload the files
    for k in eval(upload_type + "_kits"):
        f = os.path.join(kits_home, k) % version
        #print "====testing" + f
        with quiet():
            filecheck = local("ls " + f)

        if filecheck.succeeded:
            count += 1
            if dry_run:
                print "DRYRUN:" + f + "  -->  " + remote_dir
            else:
                with cd(remote_dir):
                    put(f, k % version)


    if not count:
        print "Maybe the kits in kits_home do not match version " + version
        print "This can happen if your tag does not match your version.txt"
        print
        print "Maybe these files need to be renamed:"
        local("ls " + kits_home)
        exit ("FATAL: No files uploaded")
Exemplo n.º 2
0
def ssh_cmd(host, cmd, options={}):
    if host == "localhost":
        with settings(quiet()):
            output = local(cmd, capture=True)
    else:
        with settings(quiet(), use_ssh_config = True, host_string = host):
            output = run(cmd)
    return output
Exemplo n.º 3
0
 def generate_ssh_keys(self):
     for ssh_key_name, ssh_keygen_args in sorted(self.ssh_keys):
         if not exists(self.custom_template_path):
             os.mkdir(self.custom_template_path)
         ssh_key = join(self.custom_template_path, ssh_key_name)
         if exists(ssh_key):
             continue
         with settings(quiet(), warn_only=True):
             result = local("ssh-keygen %s -f %s -N ''" % (ssh_keygen_args, ssh_key), capture=True)
             if result.failed:
                 continue
         with settings(quiet()):
             fingerprint = local("ssh-keygen -lf %s" % ssh_key, capture=True).split()[1]
         print("Generated %s with fingerprint %s." % (ssh_key_name, fingerprint))
Exemplo n.º 4
0
def jobs():
    local_jobs_dir = mkdtemp()
    # Download all job infos  TODO: is there better way?
    with quiet():
        rsync_project(remote_dir=env.jobs_path + '/',
                      local_dir=local_jobs_dir,
                      upload=False)
    table = []
    table.append(['project', 'spider', 'job', 'state', 'items', 'requests',
                  'errors'])

    running = set()
    with quiet():
        docker_ps = run('sudo docker ps | grep scrapy---')
        for line in docker_ps.splitlines():
            container_id = line.split()[-1]
            _, project, spider, job = container_id.split('---')
            running.add((project, spider, job))

    for project in sorted(listdir(local_jobs_dir)):
        project_path = join(local_jobs_dir, project)
        for spider in sorted(listdir(project_path)):
            spider_path = join(project_path, spider)
            for job in sorted(map(int, listdir(spider_path)), reverse=True):
                job = str(job)
                items = '-'
                requests = '-'
                errors = '-'
                state = 'unknown'
                job_path = join(spider_path, job)
                with open(job_path) as f:
                    try:
                        job_info = json.load(f)
                    except ValueError:
                        pass
                    else:
                        stats = job_info['stats']
                        if 'finish_reason' in stats:
                            state = 'done ({})'.format(stats['finish_reason'])
                        elif (project, spider, job) in running:
                            state = 'running'
                        items = stats.get('item_scraped_count', 0)
                        requests = stats.get('response_received_count', 0)
                        errors = stats.get('log_count/ERROR', 0)
                table.append([project, spider, job, state, items, requests,
                              errors])

    pprint_table(sys.stdout, table)
Exemplo n.º 5
0
def check_java_checkstyle(file_path, config_path=None):
    if not file_path.endswith(".java"):
        return True, None

    if not os.path.exists(CHECKSTYLE_JAR_PATH):
        with lcd(os.path.join(OTTERTUNE_DIR, "client/controller")):  # pylint: disable=not-context-manager
            local("gradle downloadJars")

    options = '' if config_path is None else '-c ' + config_path
    with quiet():
        res = local("java -jar {} {} {}".format(CHECKSTYLE_JAR_PATH, options, file_path),
                    capture=True)
    lines = res.stdout.split('\n')
    assert len(lines) >= 2 and lines[0] == "Starting audit..." and lines[-1] == "Audit done."
    if len(lines) == 2:
        return True, None
    output = []
    for line in lines[1:-1]:
        parts = line.strip().split(':')
        line_number = int(parts[1])
        text, code = parts[-1].rsplit('[', 1)
        text = text.strip()
        code = code[:-1]
        output.append(format_message(os.path.basename(file_path), line_number, text, code))
    output = ''.join(output)
    return False, output
Exemplo n.º 6
0
def check_pylint(file_path, config_path=None):
    if not file_path.endswith(".py"):
        return True, None

    options = [
        '--output-format=json',
        '--reports=yes',
    ]
    if config_path is not None:
        options.append('--rcfile=' + config_path)

    with settings(warn_only=True), quiet():
        res = local('pylint {} {}'.format(' '.join(options), file_path), capture=True)
    if res.stdout == '':
        assert res.return_code == 0, 'return_code={}, expected=0\n{}'.format(
            res.return_code, res.stderr)
        return True, None

    output = []
    errors = json.loads(res.stdout)
    for entry in errors:
        # Remove extra whitespace and hints
        msg = entry['message'].replace('^', '').replace('|', '')
        msg = re.sub(' +', ' ', msg)
        msg = msg.strip()
        output.append(format_message(os.path.basename(file_path), entry['line'],
                                     msg, entry['symbol']))
    output = ''.join(output)
    return res.return_code == 0, output
Exemplo n.º 7
0
    def _validate_puppet(self, recurse=False):
        """ when recurse==True,
              all puppet under _ymir_service_root/puppet will be checked

            otherwise,
              only validate the files mentioned in SETUP_LIST / PROVISION_LIST
        """
        errs = []
        pdir = self._puppet_dir
        if not os.path.exists(pdir):
            msg = 'puppet directory does not exist @ {0}'
            msg = msg.format(pdir)
            errs.append(msg)
        else:
            with quiet():
                parser = self._template_data().get('puppet_parser', '')
                validation_cmd = 'puppet parser {0} validate '.format(
                    '--parser {0}'.format(parser) if parser else '')
                result = local('find {0}|grep .pp$'.format(pdir), capture=True)
                for filename in result.split('\n'):
                    (" .. validating {0}".format(filename))
                    result = local('{1} {0}'.format(
                        filename, validation_cmd), capture=True)
                    error = result.return_code != 0
                    if error:
                        short_fname = filename.replace(os.getcwd(), '.')
                        error = "running `{1} {0}'".format(
                            short_fname, validation_cmd)
                        errs.append(error)
        return errs
Exemplo n.º 8
0
def test():
    'Test differences in rendering.'

    reference = 'reference.txt'
    output = 'output.txt'

    with lcd(TESTS_DIR):
        def curl(path):
            local('curl http://localhost:5000%s >> %s' % (path, output))
            stdout.write('.')
            stdout.flush()

        with quiet():
            local('echo > ' + output)
            curl('/')
            curl('/feed')
            curl('/posts')

            for root, dirnames, filenames in walk(POSTS_DIR):
                for filename in sorted(filenames):
                    if filename.startswith('.'): continue
                    if not filename.endswith('.md'): continue

                    curl('/posts/%s' % filename[:-len('.md')])

            stdout.write('\n')
            stdout.flush()

        local('git diff --no-index -- %s %s' % (reference, output))
Exemplo n.º 9
0
 def initialize_project(self):
     logger.info('Intializing local worker environment')
     with quiet():
         local('virtualenv -p python2.7 env')
         local('PATH=/usr/lib/ccache:/usr/lib64/ccache/bin:$PATH '
               'env/bin/pip install '
               '--download-cache /tmp/pip -r requirements.txt')
Exemplo n.º 10
0
def _hotswap(file, type, keywords):
    file_with_full_path = '/app/online/{}{}'.format(GAME, file)
    file_path = os.path.dirname(file_with_full_path)
    #local('rsync -aqP {}/{{hotswap.zip,md5.txt}} {}@{}:{}/'.format(file_path, env.user, env.host_string, REMOTE_DIR))
    run('mkdir -p {}'.format(REMOTE_DIR))
    with lcd(file_path):
        put('hotswap.zip', REMOTE_DIR)
        put('md5.txt', REMOTE_DIR)

    with cd(REMOTE_DIR):
        run('dos2unix md5.txt && md5sum -c md5.txt')
        run('unzip -q hotswap.zip')
        run('cd hotswap && chmod +x attach remote update')

    ret_value = {}

    for gameServer in LOCATE_GAME_SRVS[env.host_string]:
        with cd('/app/{}/backend/logs'.format(gameServer)):
            run('echo >start.out')

        with cd('{}/hotswap'.format(REMOTE_DIR)):
            pid = gameServer_pid(gameServer)
            run('./{} {}'.format(type, pid))

        with cd('/app/{}/backend/logs'.format(gameServer)):
            for each_keyword in keywords.split(','):
                with quiet():
                    do_hotswap_success = run('grep --color=never -E -A 20 "reload.*{}" {} | grep --color=never "reload succ"'.format(each_keyword, 'start.out')).succeeded
                if not do_hotswap_success:
                    ret_value[gameServer] = False
                    break
            else:
                ret_value[gameServer] = True

    return ret_value
Exemplo n.º 11
0
    def realmem(self):
        import math

        with settings(quiet()):
            realmem = run("sysctl -n hw.realmem").strip()
        realmem = float(realmem) / 1024 / 1024
        return 2 ** int(math.ceil(math.log(realmem, 2)))
Exemplo n.º 12
0
    def _all_gameServer_info(self):
        """
        Get all self.game info. It will get a dict like:
        
        { 'astd_17wan_1': '10.6.120.23', 
          'astd_37wan_98': '10.4.5.5',
                     .
                     .
                     .
          'astd_37wan_8': '10.4.5.15'}
   
        """
        #with quiet():
        #    result_info = local('''/app/opbin/work/global/getserverlist -g %s|awk -F@ -v OFS=@ '{print $1,$2}' ''' % self.game, capture=True)
        #info_list = result_info.splitlines()
        #info_dict = { each.split('@')[0]:each.split('@')[1] for each in info_list }

        with quiet():
            result_info = local('''/app/opbin/work/bible/main.py serverlist -g %s -l %s -s '.*' ''' % (self.game, self.region), capture=True)
        info_list = result_info.splitlines()

        info_dict = {}

        for each in info_list:
            name, ip = each.split('@')
            info_dict['{}_{}'.format(self.game, name)] = ip

        return info_dict
Exemplo n.º 13
0
    def run(self, src='prod', dest='local', *args, **kwargs):
        """
        Synchronizes the unversioned folders from one environment to another. (src: prod, dest: local)

        Typically this is used to download the WordPress uploads folder from the production server, as well as any other
        folders defined in the UNVERSIONED_FOLDERS config value. Note that this function DOES NOT copy/transfer any of the
        application code - that must be done instead using the "deploy" task.

        Example usage:

        - `fab rsync             # Default params, same as following command.`
        - `fab rsync:prod,local  # Downloads unversioned files from the production to the local server.`
        - `fab rsync:local,prod  # NOT RECOMMENDED - have not developed/tested this yet.`
        - `fab rsync:local,dev   # NOT RECOMMENDED - have not developed/tested this yet.`
        - `fab rsync:prod,dev    # NOT RECOMMENDED - have not tested this, nor is it necessary UNLESS the dev server is on a different server than the prod server. Also, not sure rsync supports one remote to another.`
        """
        for dir in env.conf.unversioned_folders:
            cmd_vars = {
                'src_host': env[src]['hostname'],
                'dest_host': env[dest]['hostname'],
                'root': env[src]['root'],
                'dest_root': env[dest]['root'],
                'dir': dir,
                'extra_options': '--cvs-exclude',
            }
            if src == 'local':
                cmd = 'rsync -ravz %(extra_options)s %(root)s/%(dir)s/ %(dest_host)s:%(dest_root)s/%(dir)s' % cmd_vars
            else:
                cmd = 'rsync -ravz %(extra_options)s %(src_host)s:%(root)s/%(dir)s/ %(dest_root)s/%(dir)s' % cmd_vars
            print('Syncing unversioned files...')
            with quiet():
                local(cmd)
Exemplo n.º 14
0
def _local_resolver(domain):
    with quiet():
        ret_value = local('''nslookup %s |grep "^Address" |grep -v '#'|awk '{print $2}' ''' % domain, capture=True)
    if ret_value:
        return ret_value
    else:
        raise Exception('Fail to resolve {} to ip address'.format(domain))
Exemplo n.º 15
0
def main():
    parser = argparse.ArgumentParser(
        prog='pyrsnapshot',
        description='''\
    Pyrsnapshot will back up the contents of the current directory to the
    remote server and save it under the path given. Use the executing user's
    .ssh/ssh_config to configure the user and private key for the connection.
    ''',
    )
    help = 'Keep how many {} backups (default: {})'
    for adjective, default in zip(remote.ADJECTIVES, remote.DEFAULTS):
        parser.add_argument(
            '--{}'.format(adjective),
            type=int,
            default=default,
            help=help.format(adjective, default)
        )
    parser.add_argument(
        'remote',
        metavar='host:path',
        help='Backup to the remote host at the given path'
    )

    try:
        args = parser.parse_args()
        host, root = args.remote.split(':')
        if not root:
            raise Exception("You must specify the remote path to backup to.")
        with quiet():
            env.use_ssh_config = True
            execute(remote.pyrsnapshot, root, args, hosts=[host])
    finally:
        disconnect_all()
Exemplo n.º 16
0
def check_working_copy():
    """
    Check if working copy is clean.
    """
    with quiet():
        if local('git diff --quiet').failed:
            abort(red('Working copy is not clean'))
Exemplo n.º 17
0
def release(new_version):
    with quiet():
        local('git commit -am "new version {}"'.format(new_version))
        local('git tag -a v{0} -m \'new version {0}\''.format(new_version))
        local('git push origin master --tags')
    local("python setup.py register")
    local("python setup.py sdist upload -r pypi")
Exemplo n.º 18
0
def check_bucket_permissions():
    with quiet():
        result = local(
            'aws lambda get-policy' +
            '    --function-name ' + LAMBDA_FUNCTION_NAME +
            AWS_CLI_STANDARD_OPTIONS,
            capture=True
        )

    if result.failed or result == '':
        return False

    result_decoded = json.loads(result)
    if not isinstance(result_decoded, dict):
        return False

    policy = json.loads(result_decoded.get('Policy', '{}'))
    if not isinstance(policy, dict):
        return False

    statements = policy.get('Statement', [])
    for s in statements:
        if s.get('Sid', '') == BUCKET_PERMISSION_SID:
            return True

    return False
def own_django_log():
    """ make sure logs/django.log is owned by www-data"""
    require('srvr', 'path', 'within_virtualenv', provided_by=env.servers)

    with quiet():
        log_path = os.path.join(env.path, 'logs', 'django.log')
        if run('ls {}'.format(log_path)).succeeded:
            sudo('chown www-data:www-data {}'.format(log_path))
Exemplo n.º 20
0
def test_enable_password_auth(sshd_config):

    from fabtools.ssh import enable_password_auth

    enable_password_auth(sshd_config=SSHD_CONFIG)
    with quiet():
        assert contains(SSHD_CONFIG, 'PasswordAuthentication yes', exact=True)
        assert not contains(SSHD_CONFIG, 'PasswordAuthentication no', exact=True)
Exemplo n.º 21
0
 def decorator(*args, **kwargs):
     # with shell_env(**config):
     #     if 'environ' in inspect.getargspec(f)[0]:
     #         kwargs['environ'] = config
     #     with quiet():
     #         return f(*args, **kwargs)
     with quiet():
         return f(*args, **kwargs)
Exemplo n.º 22
0
def sh_mktemp(base_path, directory=True, **kwargs):
    with quiet():
        args = ['--tmpdir={0}'.format(base_path)]
        if directory:
            args.append('-d')

        args = ' '.join(args)
        return run_cmd('mktemp {0}'.format(args), **kwargs)
Exemplo n.º 23
0
 def reset(self, remote=None, branch=None, hard=False, files=None):
     with quiet():
         args = ["%s/%s" % (remote, branch)]
         if hard:
             args.append("--hard")
         if files:
             args.append(" ".join(files))
         self.git("reset", args)
Exemplo n.º 24
0
def logs():
    '''delete cassandra logs'''
    with quiet():
        sudo("rm -f {0}/*".format(LOG_FOLDER))
        for i in [YCSB_RUN_OUT_FILE, YCSB_RUN_ERR_FILE,
                  YCSB_LOAD_OUT_FILE, YCSB_LOAD_ERR_FILE]:
            sudo("rm {0}".format(i))
        sudo("rm -f {0} {1}".format(DSTAT_SERVER,DSTAT_YCSB))
Exemplo n.º 25
0
 def process_untracked_files(self):
     with quiet():
         untracked_files = local(self.git("ls-files --other --exclude-standard"), capture=True)
         if untracked_files:
             answer = prompt(cyan("You have untracked files. Add them?(y/n):"), default="n")
             if answer == "y":
                 self.git("add", untracked_files.splitlines())
                 self.commit()
Exemplo n.º 26
0
def ubuntu_update():
	"""Update Ubuntu
	"""
	with quiet():
		run('aptitude update')
		upgrade = run('apt-get -y upgrade')
	if upgrade.failed is False:
		puts('Distribution up to date')
def upload_file(local_file, remote_path, mode=0o644, owner="root:root",
                temp_dir="", changelog=False):
    """Upload a file to a remote host.

    ``local_file`` can be a filename or a seekable file-like object.  Globbing
    is not supported.

    ``remote_file`` should be a full filename, not just the directory.

    ``mode`` can be an integer (e.g. 0o755).

    ``changelog``, if True, adds a changelog message of the form "uploaded
    {filename}".

    Bug: doesn't handle ``with cd(...):`` or ``with lcd(...):``.  Probably.

    Bug: doesn't set mode/ownership if the file exists and has the same content
    but different mode/ownership.

    Warning: is not suitable for uploading secrets (changes the mode after
    uploading the file), unless you take care to specify ``temp_dir`` to point
    to a non-world-readable area.

    Undocumented features that are subject to change without notice:
    ``mode`` can be a string or None; ``owner`` can be None.
    """
    if isinstance(mode, int):
        mode = '{:o}'.format(mode)
    assert_shell_safe(remote_path, mode or '', temp_dir)
    assert_shell_safe(owner or '', extra_allow=':')
    local_is_path = not callable(getattr(local_file, 'read', None))
    if isinstance(local_file, StringIO) and not getattr(local_file, 'name', None):
        local_file.name = os.path.basename(remote_path)
    with closing(SFTP(env.host_string)) as ftp:
        if env.get('cwd'):
            home = ftp.normalize('.')
            temp_dir = posixpath.join(home, temp_dir)
        tmp_path = posixpath.join(
            temp_dir, hashlib.sha1(env.host_string + remote_path).hexdigest())
        assert_shell_safe(tmp_path)
        ftp.put(local_file, tmp_path, use_sudo=False, mirror_local_mode=False,
                mode=None, local_is_path=local_is_path, temp_dir="")
        with quiet():
            same = sudo("test -f {realfile} && cmp -s {tempfile} {realfile}".format(
                tempfile=tmp_path, realfile=remote_path)).succeeded
        if same:
            sudo("rm {tempfile}".format(tempfile=tmp_path))
            return False
        else:
            if mode is not None:
                sudo('chmod {mode} {tempfile}'.format(mode=mode, tempfile=tmp_path))
            if owner:
                sudo('chown {owner} {tempfile}'.format(owner=owner, tempfile=tmp_path))
            sudo("mv {tempfile} {realfile}".format(tempfile=tmp_path,
                                                   realfile=remote_path))
            if changelog:
                changelog_append("# updated {}".format(remote_path))
            return True
def get_postfix_setting(setting):
    """Get the current value of a postfix setting"""
    assert_shell_safe(setting)
    with quiet():
        current_setting = run("postconf -h {setting}".format(setting=setting))
    if current_setting.startswith('postconf: warning:'):
        # assume "postconf: warning: {setting}: unknown parameter"
        current_setting = ''
    return current_setting
Exemplo n.º 29
0
def prepare_server():
    has_docker = False
    with quiet():
        has_docker = run('docker --version').succeeded
    if not has_docker:
        run('sudo apt-get install -y docker.io')
    run('mkdir -p {} {} {} {} {}'.format(
        env.project_path, env.items_path, env.logs_path, env.jobs_path,
        env.files_path))
def package_installed(package):
    """Check if the specified packages is installed."""
    assert_shell_safe(package)
    # XXX: doing this in a loop is slow :(
    with quiet():
        # XXX idea: return exists('/var/lib/dpkg/info/{}.list'.format(package))
        # caveats: libnss-myhostname:amd64.list :/
        status = run("dpkg-query -W --showformat='${Status}' %s" % package)
        return status == "install ok installed"
Exemplo n.º 31
0
    def attach(self, instance, user="******"):
        """ attach volume or snapshot """
        if isinstance(instance, str):
            instance = aws.get(instance, collections=aws.ec2.instances)

        fab.env.host_string = instance.public_ip_address
        fab.env.user = user
        volume = aws.get(self.name, collections=aws.ec2.volumes)

        if volume:
            # validate volume
            if volume.availability_zone != \
                       instance.placement["AvailabilityZone"]:
                raise Exception("volume and instance must be in same "
                                "availability zone")
        else:
            # create volume from snapshot
            snapshot = self.latest_snapshot()
            if snapshot is None:
                raise Exception("No volume or snapshot found "
                                "for %s" % self.name)
            r = aws.client.create_volume(
                SnapshotId=snapshot.id,
                AvailabilityZone=instance.placement["AvailabilityZone"],
                VolumeType="gp2")
            volume = aws.ec2.Volume(r["VolumeId"])
            aws.set_name(volume, self.name)

        # remove existing attachment
        if volume.attachments:
            self.detach()

        # wait until available
        while True:
            item = aws.client.describe_volumes(
                VolumeIds=[volume.id])["Volumes"][0]
            if item["State"] == "available":
                break
            log.info("waiting until volume available")
            sleep(15)
        log.info("volume available")

        # attach
        instance.attach_volume(VolumeId=volume.id, Device='/dev/xvdf')

        # wait until usable.
        while True:
            with fab.quiet():
                if fab.sudo("ls -l /dev/xvdf").succeeded:
                    break
            log.info("waiting until volume visible")
            sleep(1)
        log.info("volume attached")
Exemplo n.º 32
0
def own_django_log():
    """ make sure logs/django.log is owned by www-data"""
    if is_vagrant():
        return

    require('srvr', 'path', provided_by=env.servers)

    with quiet():
        log_path = os.path.join(env.path, 'logs', 'django.log')
        if exists(log_path):
            sudo('chown www-data:www-data {}'.format(log_path))
            sudo('chmod g+rw {}'.format(log_path))
Exemplo n.º 33
0
def wait_ssh():
    """ wait for ssh server """
    log.info("waiting for ssh server")
    while True:
        with fab.quiet():
            try:
                fab.sudo("ls")
                break
            except:
                pass
        sleep(1)
    log.info("ssh connected %s" % fab.env.host_string)
Exemplo n.º 34
0
    def generate_ssl_keystore(self, root_certificate, keystore_file, storepass, worker_home):
        logger.info('Generating SSL keystore')
        remote_keystore = "{}/perfrunner/{}".format(worker_home, keystore_file)
        remote_root_cert = "{}/perfrunner/{}".format(worker_home, root_certificate)
        put(root_certificate, remote_root_cert)

        with quiet():
            run("keytool -delete -keystore {} -alias couchbase -storepass storepass"
                .format(remote_keystore))
        run("keytool -importcert -file {} -storepass {} -trustcacerts "
            "-noprompt -keystore {} -alias couchbase"
            .format(remote_root_cert, storepass, remote_keystore))
Exemplo n.º 35
0
def update_index(force=False):
    """
    Update pkgin package definitions.
    """
    manager = MANAGER
    if force:
        with quiet():
            # clean the package cache
            run_as_root("%(manager)s clean" % locals())
        run_as_root("%(manager)s -f update" % locals())
    else:
        run_as_root("%(manager)s update" % locals())
Exemplo n.º 36
0
def test_abort_message_only_printed_once():
    """
    abort()'s SystemExit should not cause a reprint of the error message
    """
    # No good way to test the implicit stderr print which sys.exit/SystemExit
    # perform when they are allowed to bubble all the way to the top. So, we
    # invoke a subprocess and look at its stderr instead.
    with quiet():
        result = local("fab -f tests/support/aborts.py kaboom", capture=True)
    # When error in #1318 is present, this has an extra "It burns!" at end of
    # stderr string.
    eq_(result.stderr, "Fatal error: It burns!\n\nAborting.")
Exemplo n.º 37
0
def create_virtualenv():
    require('srvr', 'path', 'within_virtualenv', provided_by=env.servers)
    env_vpath = get_virtual_env_path()
    with quiet():
        if run('ls {}'.format(env_vpath)).succeeded:
            print(green(
                'virtual environment at [{}] exists'.format(env_vpath)))
            return

    # All we need is a .venv dir in the project folder;
    # 'pipenv install' will set it up first time
    print(yellow('setting up virtual environment in [{}]'.format(env_vpath)))
    run('mkdir {}'.format(env_vpath))
Exemplo n.º 38
0
 def unmount(self):
     """ unmount """
     with fab.quiet():
         r = fab.sudo("umount /v1")
         if r.succeeded:
             log.info("volume dismounted")
         else:
             log.warning("dismount failed. trying to force.")
             r = fab.sudo("fuser -km /v1")
             if r.succeeded:
                 log.info("volume dismounted")
             else:
                 log.warning("failed to force dismount")
Exemplo n.º 39
0
def status_rabbitmq():
    with settings(warn_only=True), quiet():
        #res = sudo('rabbitmqctl status', pty=False)
        res = local('sudo rabbitmqctl status')
    if res.return_code == 2 or res.return_code == 69:
        status = STATUS.STOPPED
    elif res.return_code == 0:
        status = STATUS.RUNNING
    else:
        raise Exception("Rabbitmq: unknown status " + str(res.return_code))
    print status
    print_status(status, 'rabbitmq')
    return status
Exemplo n.º 40
0
def allow(user, port):
    path = os.path.join('/etc/authbind/byport', str(port))
    needsUpdate = True
    with quiet():
        state = run('/usr/bin/stat -c %U:%a {}'.format(path))
        needsUpdate = state.strip().split(':') != [user, '500']
    if needsUpdate:
        if not hasSudoCapabilities():
            abort('Trying to give {} access to port {} but have insufficient '
                  'capabilities.'.format(user, port))
        sudo('/bin/touch {}'.format(path))
        sudo('/bin/chown {0}:{0} {1}'.format(user, path))
        sudo('/bin/chmod 0500 {}'.format(path))
Exemplo n.º 41
0
def docker_check_service_base():
    """
	Check if the consul image exists, build it if not.

	.. versionadded:: 0.2
	"""
    with quiet():
        log.info('Checking if we have a consul image ...')
        env.arch = get_local_arch()
        state = sudo('%(docker)s inspect service-base:test' % env)

    if state.return_code == 1:
        docker_generate_service_base()
Exemplo n.º 42
0
def sh_chown(path, user=None, group=None, recursive=False, **kwargs):
    if user:
        mask = '{0}'.format(user)
        if group:
            mask = '{0}:{1}'.format(mask, group)

    with quiet():
        extra_args = ''
        if recursive:
            extra_args = ' -R'
        return run_cmd('chown {0} {1} {2}'.format(extra_args, mask, path),
                       capture=True,
                       **kwargs)
Exemplo n.º 43
0
def up_to_date(env, cmd, version, args=None, stdout_flag=None):
    """Check if the given command is up to date with the provided version.
    """
    if shared._executable_not_on_path(cmd):
        return False
    if args:
        cmd = cmd + " " + " ".join(args)
    with quiet():
        out = env.safe_run_output(cmd)
    if stdout_flag:
        iversion = _parse_from_stdoutflag(out, stdout_flag)
    else:
        iversion = out.strip()
    return LooseVersion(iversion) >= LooseVersion(version)
Exemplo n.º 44
0
def get_commit_id():
    '''Find the commit id of the checked out repo we are in.'''

    id = ''

    if fs.sh_which('svn'):
        with quiet():
            content = run_cmd('svn info')
            if content:
                detected = text.safefind('(?m)^Revision: ([0-9]+)', content)
                if detected:
                    id = detected

    if not id:
        if fs.sh_which('git'):
            with quiet():
                content = run_cmd('git svn info')
                if content:
                    detected = text.safefind('(?m)^Revision: ([0-9]+)', content)
                    if detected:
                        id = detected

    return id
Exemplo n.º 45
0
    def install_devices(self):
        mounts = self.mounts
        with settings(quiet()):
            cd_device = env.instance.config.get('bootstrap-cd-device', 'cd0')
            if '/dev/{dev} on /rw/cdrom'.format(dev=cd_device) not in mounts:
                run('test -e /dev/{dev} && mount_cd9660 /dev/{dev} /cdrom || true'
                    .format(dev=cd_device))
            usb_device = env.instance.config.get('bootstrap-usb-device',
                                                 'da0a')
            if '/dev/{dev} on /rw/media'.format(dev=usb_device) not in mounts:
                run('test -e /dev/{dev} && mount -o ro /dev/{dev} /media || true'
                    .format(dev=usb_device))

        return [cd_device, usb_device]
    def __init__(self, filename, remote_dir=REMOTE_DIR):
        self.filename = filename
        self.dir = run('pwd')

        with quiet():
            has_the_file = run('test -f {}'.format(filename)).succeeded

        if not has_the_file:
            raise Exception('File {}/{} NOT exists'.format(self.dir, filename))

        tmp_tag = self.filename.split('/')[0]
        backup_dir = '{}/{}'.format(remote_dir, tmp_tag)
        run('[ -d {0} ] || mkdir -p {0}'.format(backup_dir))
        run('cp {} {}/'.format(self.filename, backup_dir))
Exemplo n.º 47
0
def wait_ssh():
    """ wait for ssh server """
    apps.setdebug()
    log.info("waiting for ssh server")
    while True:
        with fab.quiet():
            try:
                r = fab.sudo("ls")
            except:
                pass
            if r.succeeded:
                break
        sleep(1)
    log.info("ssh connected %s"%fab.env.host_string)
Exemplo n.º 48
0
def release():
    with quiet():
        version_row = local("grep 'VERSION = ' setup.py", capture=True)
        version = version_row.split(' = ')[1].strip()
        version = version[1:-1].split('.')
        version[-1] = str(int(version[-1]) + 1)
        new_version = '.'.join(version)
        local("sed -isetup.py 's/VERSION =.*/VERSION = \"{0}\"/g' setup.py".
              format(new_version))
        local('git commit -am "new version {0}"'.format(new_version))
        local('git tag -a v{0} -m \'new version {0}\''.format(new_version))
        local('git push origin master --tags')
    local("python setup.py register")
    local("python setup.py sdist upload -r pypi")
Exemplo n.º 49
0
    def __init__(self, filename, remote_dir=REMOTE_DIR):
        self.filename = filename

        with quiet():
            has_the_file = run('test -f {}'.format(filename)).succeeded

        if not has_the_file:
            raise Exception('File {} NOT exists under backend/apps/'.format(filename))
        
        self.dir = run('pwd')
        gameServer = self.dir.split('/')[2]
        backup_dir = '{}/{}'.format(remote_dir, gameServer)
        run('[ -d {0} ] || mkdir -p {0}'.format(backup_dir))
        run('cp {} {}/'.format(self.filename, backup_dir))
Exemplo n.º 50
0
def pull_db(dump='backup'):
    app_env = honcho.environ.parse(open('.env').read())
    remote_db = dj_database_url.parse(app_env['REAL_DATABASE_URL'])
    local_db = dj_database_url.parse(app_env['DATABASE_URL'])

    # Make and download database dump
    if dump == 'direct':
        # Dump directly from remote database with local pg_dump
        print('Making database dump...')
        local(DUMP_COMMAND % remote_db)
    elif dump == 'app':
        # Alternative: dump to app-server than rsync here,
        #              useful with slow or flaky internet connection.
        print('Making database dump...')
        run(DUMP_COMMAND % remote_db)
        print('Downloading dump...')
        local(
            'rsync -avP stargeo:/home/ubuntu/app/stargeo.sql.gz stargeo.sql.gz'
        )
        run('rm stargeo.sql.gz')
    elif dump == 'backup':
        # Alternative: fetch latests db backup
        local(
            'rsync -avP stargeo:/home/ubuntu/db-backups/stargeo.sql.gz stargeo.sql.gz'
        )
    elif dump == 'local':
        print('Using local dump...')
        if not os.path.exists('stargeo.sql.gz'):
            print(
                red('Local database dump not found (stargeo.sql.gz).\n'
                    'Please use "remote" or "app" dump.'))
            return

    print('Dropping %(NAME)s...' % local_db)
    with quiet():
        local('psql -Upostgres -c "drop database if exists %(NAME)s"' %
              local_db)

        # Check if database is deleted
        if local('psql -Upostgres -d %(NAME)s -c ""' % local_db,
                 capture=True).succeeded:
            print(
                red('Database not dropped.\n'
                    'Disconnect all the clients and retry with "fab pull_db:local"'
                    ))
            return

    # Load dump
    local('gzip -cd stargeo.sql.gz | psql -Upostgres -f -')
Exemplo n.º 51
0
def set_docker_folder(folder="/var/lib"):
    """ set location of docker images and containers
    for xdrive volume = "/v1"
    """
    # create daemon.json settings
    config = '{"graph":"%s/docker"}' % folder
    fab.sudo("mkdir -p /etc/docker")
    fab.put(io.StringIO(config), "/etc/docker/daemon.json", use_sudo=True)

    # create target folder
    fab.sudo(f"mkdir -p {folder}/docker")

    # restart to activate new target folder
    with fab.quiet():
        fab.sudo("service docker restart")
Exemplo n.º 52
0
def run(params):
    """ run container """
    with fab.quiet():
        r = fab.sudo("nvidia-smi")

    # gpu
    if r.succeeded:
        # nvidia-docker run and save drivers
        fab.run(f"nvidia-docker run {params}")
        volumepath = "/v1/var/lib/nvidia-docker/volumes"
        if exists(volumepath):
            fab.sudo("cp -r --parents /var/lib/nvidia-docker/volumes /v1")
    else:
        # cpu
        fab.run(f"docker run {params}")
Exemplo n.º 53
0
    def _download_file():
        for game_server in locate_game_servers[env.host_string]:
            local_path = '{}/{}/'.format(local_root_path, game_server)
            local('su - astd -c "mkdir -p {}"'.format(local_path))
            target_file = '/app/{}/{}'.format(game_server, remote_file)
            with quiet():
                target_file_exists = run(
                    'test -f {}'.format(target_file)).succeeded

            if target_file_exists:
                get(target_file, local_path)
            else:
                raise Exception('File {} NOT exists on {}'.format(
                    target_file, game_server))

        local('chown -R astd.astd {}'.format(local_root_path))
Exemplo n.º 54
0
def matchServer_exists(matchServer, ip):
    with quiet():
        exists = local(
            '''grep "\\b{}\\b" /etc/hosts '''.format(matchServer)).succeeded
    if exists:
        raise Exception(
            '''The match server {} already exists in /etc/hosts'''.format(
                matchServer))
    else:
        matchServer_dir_exists = execute(remote_dir_exists,
                                         '/app/{}'.format(matchServer),
                                         hosts=[ip])[ip]
        if matchServer_dir_exists:
            raise Exception(
                '''The match dir: /app/{} already exists on {}'''.format(
                    matchServer, ip))
Exemplo n.º 55
0
def start_workers():
    '''starts compute + site workers in the background, use view_workers to look at the output'''
    with quiet():
        if not local('tmux -V').succeeded:
            print 'Please install tmux before running this command, i.e. "brew install tmux"'
            return
        _print("Starting tmux...")

        if local('tmux has-session -t codalab_workers').succeeded:
            print green("session already started! fab view_workers to view it.")
            return

        if local('./tmux.sh').succeeded:
            print green("done")
        else:
            print red("could not start workers!")
Exemplo n.º 56
0
def start():
    '''start cassandra in all nodes in order'''
    with cd(CODE_DIR):
        for i in range(MAX_RETRIES):
            cassandra_bin = path.join(CODE_DIR, "bin", "cassandra")
            sudo("screen -d -m {0} -f".format(cassandra_bin), pty=False)
            time.sleep(2)
            with quiet():
                res = sudo("pgrep -f 'java.*c[a]ssandra'")
                if not res.failed:
                    break
                print('Starting cassandra failed at {0}. Retry {1}.'.format(
                    env.host_string, i))
        else:
            abort('starting cassandra failed at node {0}.'.format(
                env.host_string))
Exemplo n.º 57
0
def distroName():
    """
    Get the name of the distro.
    """
    with quiet():
        lsb = run('/usr/bin/lsb_release --id --short', warn_only=True)
        if lsb.succeeded:
            return lsb.lower()

        distros = [
            ('centos', '/etc/centos-release'),
            ('fedora', '/etc/fedora-release'),
        ]
        for distro, sentinel in distros:
            if succeeds('/usr/bin/test -f {}'.format(sentinel)):
                return distro
Exemplo n.º 58
0
def uninstall():
    """
    Uninstall Presto after stopping the services on all nodes
    """
    stop()

    # currently we have two rpm names out so we need this retry
    with quiet():
        ret = sudo('rpm -e presto')
        if ret.succeeded:
            print('Package uninstalled successfully on: ' + env.host)
            return

    ret = sudo('rpm -e presto-server-rpm')
    if ret.succeeded:
        print('Package uninstalled successfully on: ' + env.host)
Exemplo n.º 59
0
def checksum(filename, *files_or_dirs):
    paths = ' '.join(files_or_dirs)
    # check whether the files have changed (or the checksum file does not exist at all)
    with quiet():
        if not sudo(
                f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum -c {filename}'
        ).failed:
            modified = False
        else:
            modified = True
    yield modified
    # compute checksum for specified paths
    if modified:
        sudo(
            f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum > {filename}'
        )
Exemplo n.º 60
0
def schedule(spider, args=''):
    jobs_dir = join(env.jobs_path, env.project, spider)
    with quiet():
        run('mkdir -p {}'.format(jobs_dir))
        all_jobs = run('ls -1 {}'.format(jobs_dir))
    jobs = map(int, re.findall('\d+', all_jobs))
    if jobs:
        latest_job = max(jobs)
    else:
        latest_job = 0
    job = latest_job + 1
    job_path = join(jobs_dir, str(job))

    run('touch {}'.format(job_path))

    items_dir = join(env.items_path, env.project, spider)
    run('mkdir -p {}'.format(items_dir))
    items_path = join(items_dir, str(job) + '.jl')
    run('touch {}'.format(items_path))

    logs_dir = join(env.logs_path, env.project, spider)
    run('mkdir -p {}'.format(logs_dir))
    logs_path = join(logs_dir, str(job) + '.log')
    run('touch {}'.format(logs_path))

    files_dir = join(env.files_path, env.project, spider, str(job))
    run('mkdir -p {}'.format(files_dir))

    run('sudo docker run -d -t -i '
        '--name scrapy---{project}---{spider}---{job} '
        '-v {job_path}:/data/job '
        '-v {items_path}:/data/items.jl '
        '-v {logs_path}:/data/log.log '
        '-v {files_dir}:/data/files '
        'scrapy-{project} '
        'scrapy crawl {spider} {args} '
        '-s FILES_DIR=/data/files '
        '-s JOB_PATH=/data/job '
        '-o /data/items.jl '
        '--logfile /data/log.log'.format(job_path=job_path,
                                         items_path=items_path,
                                         logs_path=logs_path,
                                         files_dir=files_dir,
                                         project=env.project,
                                         spider=spider,
                                         job=job,
                                         args=args))