Example #1
0
def display_command(command):
    """
    Print command function's docstring, then exit. Invoked with -d/--display.
    """
    # Sanity check
    command = command.replace('-', '_')
    cmd_string = command.replace('_', '-')
    if command not in commands:
        abort("Command '%s' not found, exiting." % cmd_string)
    cmd = commands[command]

    # figure out arg spec
    while hasattr(cmd, '_decorated'):
        # descend through decorators
        cmd = cmd._decorated
    argspec = inspect.getargspec(cmd)
    if filter(lambda x: x, argspec):
        args = "Arguments: " + inspect.formatargspec(*argspec)[1:-1]
    else:
        args = "Arguments: None"

    # Print out nicely presented docstring if found
    if cmd.__doc__:
        print("Displaying detailed information for command '%s':" % cmd_string)
        print(indent(args))
        print('')
        print(indent(cmd.__doc__, strip=True))
        print('')
    # Or print notice if not
    else:
        print("No detailed information available for command '%s':" % cmd_string)
        print(indent(args))
    sys.exit(0)
Example #2
0
def delete_branch(branch_name):
    """ Delete branch """
    if branch_name == get_current_branch():
        abort('You have to switch to another branch')
    with fab_settings(warn_only=True):
        local("git branch -D %s" % branch_name)
        local("git push origin --delete %s" % branch_name)
Example #3
0
def ssh_config(host_string=None):
    """
    Return ssh configuration dict for current env.host_string host value.

    Memoizes the loaded SSH config file, but not the specific per-host results.

    This function performs the necessary "is SSH config enabled?" checks and
    will simply return an empty dict if not. If SSH config *is* enabled and the
    value of env.ssh_config_path is not a valid file, it will abort.

    May give an explicit host string as ``host_string``.
    """
    from fabric.state import env
    if not env.use_ssh_config:
        return {}
    if '_ssh_config' not in env:
        try:
            conf = ssh.SSHConfig()
            path = os.path.expanduser(env.ssh_config_path)
            with open(path) as fd:
                conf.parse(fd)
                env._ssh_config = conf
        except IOError:
            abort("Unable to load SSH config file '%s'" % path)
    host = parse_host_string(host_string or env.host_string)['host']
    return env._ssh_config.lookup(host)
Example #4
0
def _setup_path():
    """
    Assigns all the various paths that will be needed (to the env object)
    in deploying code, populating config templates, etc.
    """
    env.sup_template_path = posixpath.join(posixpath.abspath(settings.__file__),settings.SUPERVISOR_TEMPLATE_PATH)
    env.project_root = settings.PROJECT_ROOT
    env.www_root = posixpath.join(env.project_root,'www',env.environment)
    env.log_dir = posixpath.join(env.www_root,'log')
    env.code_root = posixpath.join(env.www_root,'code_root')
    env.project_media = posixpath.join(env.code_root, 'media')
    env.project_static = posixpath.join(env.project_root, 'static')
    env.virtualenv_name = getattr(settings, 'PYTHON_ENV_NAME', 'python_env') #not a required setting and should be sufficient with default name
    env.virtualenv_root = posixpath.join(env.www_root, env.virtualenv_name)
    env.services_root = posixpath.join(env.project_root, 'services')
    env.httpd_services_root = posixpath.join(env.services_root, 'apache')
    env.httpd_services_template_name = '%(project)s.conf' % env
    env.httpd_remote_services_template_path = posixpath.join(env.httpd_services_root,env.httpd_services_template_name)
    env.supervisor_conf_root = posixpath.join(env.services_root, 'supervisor')
    env.supervisor_conf_path = posixpath.join(env.supervisor_conf_root, 'supervisor.conf')
    env.supervisor_init_template_path = settings.SUPERVISOR_INIT_TEMPLATE
    if env.os == 'ubuntu':
        env.httpd_remote_conf_root = '/etc/apache2/sites-enabled'
        env.httpd_user_group = 'www-data'
    elif env.os == 'redhat':
        env.httpd_remote_conf_root = '/etc/httpd/conf.d'
        env.httpd_user_group = 'apache'
    else:
        utils.abort('In Web module. Remote operating system ("%(os)s") not recognized. Aborting.' % env)
Example #5
0
def checkoutCode(voltdbGit, proGit, rbmqExportGit, gitloc):
    global buildir
    # clean out the existing dir
    run("rm -rf " + builddir)
    # make the build dir again
    run("mkdir -p " + builddir)
    # change to it
    with cd(builddir):
        # do the checkouts, collect checkout errors on both community &
        # pro repos so user gets status on both checkouts
        message = ""
        run("git clone -q %s/voltdb.git" % gitloc)
        result = run("cd voltdb; git checkout %s" % voltdbGit, warn_only=True)
        if result.failed:
            message = "VoltDB checkout failed. Missing branch %s." % rbmqExportGit

        run("git clone -q %s/pro.git" % gitloc)
        result = run("cd pro; git checkout %s" % proGit, warn_only=True)
        if result.failed:
            message += "\nPro checkout failed. Missing branch %s." % rbmqExportGit

        #rabbitmq isn't mirrored internally, so don't use gitloc
        run("git clone -q [email protected]:VoltDB/export-rabbitmq.git")
        result = run("cd export-rabbitmq; git checkout %s" % rbmqExportGit, warn_only=True)
        # Probably ok to use master for export-rabbitmq.
        if result.failed:
            print "\nExport-rabbitmg branch %s checkout failed. Defaulting to master." % rbmqExportGit

        if len(message) > 0:
            abort(message)

        return run("cat voltdb/version.txt").strip()
Example #6
0
def set_project(project=None):
    """
    Checks if project is set, and if not it will prompt you to enter
    a valid project
    """
    if not 'project' in env.params:
        
        config_folder = "%s/config" % (env.home_path)
        available_projects = []
        for tmp_project in os.listdir(config_folder):
            if os.path.isdir(os.path.join(config_folder, tmp_project)):
                available_projects.append(tmp_project)
        
        if len(available_projects) == 0:
            abort(red("No projects available."))
            
        if not project:
            print(green("Available projects :"))
            print("")
            for project in available_projects:
                print("- %s" % project)
             
            print("")
            project = prompt('Enter project name : ', default=available_projects[0])
        
        if project not in available_projects:
            print(red("`%s` is not a valid project !" % project))
            set_project()
        else:
            env.params['project'] = project
Example #7
0
def set_environment(environment=None):
    """
    Checks if environment is set, and if not it will prompt you to enter
    a valid project
    """
    if not 'environment' in env.params:
        if len(env.environments) == 0:
            abort(red("No environmens available."))
        
        if not environment:
            print(green("Available environments :"))
            print("")
            for environment in env.environments:
                print("- %s" % environment)
            
            print("")
            environment = prompt('Enter environment : ', default=env.environments[0])
        
        if environment not in env.environments:
            print("")
            print(red("`%s` is not a valid environment !" % environment))
            set_environment()
        else:
            # Set environment settings
            env.params['environment'] = environment
    
            config.environment()
Example #8
0
def build(name,
          debs_path=None,
          path_to_missile=None,
          app_version=None,
          env_version=None,
          service_version=None,
          webcallback=None):
    """
    Build the package for the working copy remotely via trebuchet.
    <name> of the working copy folder.
    <debs_path> path to where the DEB package should be created into.
    <path_to_missile> relative path to the missile file for trebuchet
    <app_version> version specific for the application package built.
    <env_version> version specific for the lib/environment package built.
    <service_version> version specific for the services packages built.
    <webcallback> web URI to send Trebuchet callbacks to.
    """
    # Get value from .gachetterc
    debs_path = debs_path if 'debs_path' not in env else env.debs_path
    if debs_path is None:
        abort("""
            Either specify the debs_path in your call or add it to the .gachetterc file.""")

    wc = WorkingCopy(name)
    wc.set_version(app=app_version, env=env_version, service=service_version)
    wc.build(debs_path, path_to_missile, webcallback, trebuchet_bin=trebuchet_bin)
Example #9
0
def rollback():
    if exists(backup_dir):
        puts('> Rolling back to previous deploy')
        run('mv %s %s' % (target_dir, staging_dir))
        run('mv %s %s' % (backup_dir, target_dir))
    else:
        abort('Rollback failed, no backup exists')
Example #10
0
def sync():
    """Rysnc local states and pillar data to the master, and checkout margarita."""
    # Check for missing local secrets so that they don't get deleted
    # project.rsync_project fails if host is not set
    sudo("mkdir -p /srv")
    if not have_secrets():
        get_secrets()
    else:
        # Check for differences in the secrets files
        for environment in [env.environment]:
            remote_file = os.path.join('/srv/pillar/', environment, 'secrets.sls')
            with lcd(os.path.join(CONF_ROOT, 'pillar', environment)):
                if files.exists(remote_file):
                    get(remote_file, 'secrets.sls.remote')
                else:
                    local('touch secrets.sls.remote')
                with settings(warn_only=True):
                    result = local('diff -u secrets.sls.remote secrets.sls')
                    if result.failed and files.exists(remote_file) and not confirm(
                            red("Above changes will be made to secrets.sls. Continue?")):
                        abort("Aborted. File have been copied to secrets.sls.remote. " +
                              "Resolve conflicts, then retry.")
                    else:
                        local("rm secrets.sls.remote")
    salt_root = CONF_ROOT if CONF_ROOT.endswith('/') else CONF_ROOT + '/'
    project.rsync_project(local_dir=salt_root, remote_dir='/tmp/salt', delete=True)
    sudo('rm -rf /srv/salt /srv/pillar')
    sudo('mv /tmp/salt/* /srv/')
    sudo('rm -rf /tmp/salt/')
    execute(margarita)
Example #11
0
def setup_minion(*roles):
    """Setup a minion server with a set of roles."""
    require('environment')
    for r in roles:
        if r not in VALID_ROLES:
            abort('%s is not a valid server role for this project.' % r)
    config = {
        'master': 'localhost' if env.master == env.host else env.master,
        'output': 'mixed',
        'grains': {
            'environment': env.environment,
            'roles': list(roles),
        },
        'mine_functions': {
            'network.interfaces': [],
            'network.ip_addrs': []
        },
    }
    _, path = tempfile.mkstemp()
    with open(path, 'w') as f:
        yaml.dump(config, f, default_flow_style=False)
    sudo("mkdir -p /etc/salt")
    put(local_path=path, remote_path="/etc/salt/minion", use_sudo=True)
    # install salt minion if it's not there already
    install_salt(SALT_VERSION, master=False, minion=True, restart=True)
    # queries server for its fully qualified domain name to get minion id
    key_name = run('python -c "import socket; print socket.getfqdn()"')
    execute(accept_key, key_name)
Example #12
0
def _add_merged_attributes(node, all_recipes, all_roles):
    """Merges attributes from cookbooks, node and roles

    Chef Attribute precedence:
    http://wiki.opscode.com/display/chef/Attributes#Attributes
    -AttributeTypeandPrecedence
    LittleChef implements, in precedence order:
        - Cookbook default
        - Role default
        - Node normal
        - Role override

    NOTE: In order for cookbook attributes to be read, they need to be
        correctly defined in its metadata.json

    """
    # Get cookbooks from extended recipes
    attributes = {}
    for recipe in node['recipes']:
        # Find this recipe
        found = False
        for r in all_recipes:
            if recipe == r['name']:
                found = True
                for attr in r['attributes']:
                    if r['attributes'][attr].get('type') == "hash":
                        value = {}
                    else:
                        value = r['attributes'][attr].get('default')
                    # Attribute dictionaries are defined as a single
                    # compound key. Split and build proper dict
                    build_dct(attributes, attr.split("/"), value)
        if not found:
            error = "Could not find recipe '{0}' while ".format(recipe)
            error += "building node data bag for '{0}'".format(node['name'])
            abort(error)

    # Get default role attributes
    for role in node['roles']:
        for r in all_roles:
            if role == r['name']:
                update_dct(attributes, r.get('default_attributes', {}))

    # Get normal node attributes
    non_attribute_fields = [
        'id', 'name', 'role', 'roles', 'recipes', 'run_list', 'ipaddress']
    node_attributes = {}
    for key in node:
        if key in non_attribute_fields:
            continue
        node_attributes[key] = node[key]
    update_dct(attributes, node_attributes)

    # Get override role attributes
    for role in node['roles']:
        for r in all_roles:
            if role == r['name']:
                update_dct(attributes, r.get('override_attributes', {}))
    # Merge back to the original node object
    node.update(attributes)
Example #13
0
 def run(self, cmd=None):
     super(RemoteDjangoAdmin, self).run()
     if cmd:
         with prefix('workon %(domain)s' % env):
             run('django-admin.py %s' % cmd)
     else:
         abort('You must specify a command (e.g. sycdb --migrate)')
Example #14
0
    def _read_littlechef_config(self):
        try:
            config = ConfigParser.SafeConfigParser()
            success = config.read(littlechef.CONFIGFILE)
            if success:
                if os.path.isfile("rackspace.yaml"):
                    return yaml.load(file("rackspace.yaml"))
                elif os.path.isfile("rackspace.yml"):
                    return yaml.load(file("rackspace.yml"))
                else:
                    print(
                        "WARNING: Reading configuration from deprecated {0} file, consider "
                        "upgrading to use rackspace.yaml".format(littlechef.CONFIGFILE)
                    )
                    return dict(config.items("rackspace"))

            else:
                abort(
                    "Could not read littlechef configuration file!  "
                    "Make sure you are running in a kitchen (fix new_kitchen)."
                )
        except ConfigParser.ParsingError:
            pass
        except ConfigParser.NoSectionError:
            pass

        return None
Example #15
0
def _merge(hosts, roles, exclude=[]):
    """
    Merge given host and role lists into one list of deduped hosts.
    """
    # Abort if any roles don't exist
    bad_roles = [x for x in roles if x not in state.env.roledefs]
    if bad_roles:
        abort("The following specified roles do not exist:\n%s" % (
            indent(bad_roles)
        ))

    # Look up roles, turn into flat list of hosts
    role_hosts = []
    for role in roles:
        value = state.env.roledefs[role]
        # Handle "lazy" roles (callables)
        if callable(value):
            value = value()
        role_hosts += value

    # Return deduped combo of hosts and role_hosts, preserving order within
    # them (vs using set(), which may lose ordering) and skipping hosts to be
    # excluded.
    cleaned_hosts = _clean_hosts(list(hosts) + list(role_hosts))
    all_hosts = []
    for host in cleaned_hosts:
        if host not in all_hosts and host not in exclude:
            all_hosts.append(host)
    return all_hosts
def import_media(filename=None):
    """
    Extracts media dump into your local media root.

    Please note that this might overwrite existing local files.

    Usage::

        fab import_media
        fab import_media:filename=foobar.tar.gz

    """
    if not filename:
        filename = settings.MEDIA_DUMP_FILENAME

    project_root = os.getcwd()

    with fab_settings(hide('everything'), warn_only=True):
        is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
            project_root, filename)).failed
    if is_backup_missing:
        abort(red('ERROR: There is no media backup that could be imported in'
                  ' {0}. We need a file called {1} in that folder.'.format(
                      project_root, filename)))

    # copy the dump into the media root folder
    with lcd(project_root):
        local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))

    # extract and remove media dump
    with lcd(settings.MEDIA_ROOT):
        local('tar -xvf {0}'.format(filename))
        local('rm -rf {0}'.format(filename))
Example #17
0
def cleanup_db_backups(params):
    """
    Cleanup sql backup files from folder
    """
    print yellow("Warning mysql.cleanup_db_backups is deprecated from version 1.0")
    params = utils.format_params(params)
    
    if not 'path' in params:
        abort(red("No path param set!"))
        
    if not 'max_backup_history' in params:
        params['max_backup_history'] = 5
            
    with cd(params['path']):
        folder_result = run("ls -tr1 | grep '\.tar.gz$'")
        if len(folder_result) > 0:
            files = folder_result.split('\n')
            
            current_file_count = len(files)
            print("%s backup files found..." % current_file_count)
            
            if len(files) > params['max_backup_history']:
                total_to_remove = len(files) - params['max_backup_history']
                print("Going to remove `%s` files" % total_to_remove)
                for file in files[0:total_to_remove]:
                    file_path = "%s/%s" % (params['path'], file.strip())
                    print("- %s" % file_path)
                    run("rm %s" % (file_path))
                    
            else:
                print("No sql backup files to remove... limit is set to `%s`" % params['max_backup_history'])
        else:
            print(green("No sql backup files available..."))
Example #18
0
def _configure_node(configfile):
    """Exectutes chef-solo to apply roles and recipes to a node"""
    with hide('running'):
        print "Uploading node.json..."
        remote_file = '/root/{0}'.format(configfile.split("/")[-1])
        # Ensure secure permissions
        put(configfile, remote_file, use_sudo=True, mode=400)
        sudo('chown root:root {0}'.format(remote_file)),
        sudo('mv {0} /etc/chef/node.json'.format(remote_file)),
        # Remove local temporary node file
        os.remove(configfile)
        # Always configure Chef Solo
        solo.configure()
        print "\n== Cooking ==\n"
        with settings(hide('warnings'), warn_only=True):
            output = sudo(
                'chef-solo -l {0} -j /etc/chef/node.json'.format(env.loglevel))
            if output.failed:
                if 'chef-solo: command not found' in output:
                    print(
                        colors.red(
                            "\nFAILED: Chef Solo is not installed on this node"))
                    print(
                        "Type 'cook nodes:{0} deploy_chef' to install it".format(
                            env.host))
                    abort("")
                else:
                    print(colors.red(
                        "\nFAILED: A problem occurred while executing chef-solo"))
                    abort("")
            else:
                print(colors.green("\nSUCCESS: Node correctly configured"))
def syntax_check():
    """Runs flake8 against the codebase."""
    with fab_settings(warn_only=True):
        for file_type in settings.SYNTAX_CHECK:
            needs_to_abort = False
            # because egrep fails with exit code 1, we need to allow this as
            # a successful exit code in our env
            if 1 not in env.ok_ret_codes:
                env.ok_ret_codes.append(1)
            output = local(
                'find -name "{}" -print'.format(file_type),
                capture=True,
            )
            files = output.split()
            for file in files:
                if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
                    continue
                result = local('egrep -i -n "{0}" {1}'.format(
                    settings.SYNTAX_CHECK[file_type], file), capture=True)
                if result:
                    warn(red("Syntax check found in '{0}': {1}".format(
                        file, result)))
                    needs_to_abort = True
            if needs_to_abort:
                abort(red('There have been errors. Please fix them and run'
                          ' the check again.'))
            else:
                puts(green('Syntax check found no errors. Very good!'))
Example #20
0
def deploy():
    """deploy code to remote host by checking out the latest via git"""
    if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \
       not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False):
        utils.abort('Deployment aborted.')

    _require_target()
    run('echo ping!')  # workaround for delayed console response

    try:
        execute(update_code)
        execute(update_virtualenv)
        execute(clear_services_dir)
        set_supervisor_config()
        if env.should_migrate:
            execute(stop_pillows)
            execute(stop_celery_tasks)
            execute(migrate)
        execute(_do_collectstatic)
        execute(do_update_django_locales)
        execute(version_static)
        if env.should_migrate:
            execute(flip_es_aliases)
    except Exception:
        execute(mail_admins, "Deploy failed", "You had better check the logs.")
        # hopefully bring the server back to life
        execute(services_restart)
        raise
    else:
        execute(services_restart)
        execute(record_successful_deploy)
Example #21
0
def restart_services():
    _require_target()
    if not console.confirm('Are you sure you want to restart the services on '
                           '{env.environment}?'.format(env=env), default=False):
        utils.abort('Task aborted.')

    execute(services_restart)
Example #22
0
def atomic_src_update():
    numbers_list = get_src_dir_numbers()
    directory = '{src}.{number:05d}'.format(
        src=SRC_DIR, number=max(numbers_list + [0]) + 1)

    if env.force:
        run('rm -f {lock}'.format(lock=DEPLOYMENT_LOCK))

    with settings(warn_only=True):
        result = run('ln -ns {directory} {lock}'.format(
            directory=directory,
            lock=DEPLOYMENT_LOCK
            ))

    if result.failed:
        with hide('running', 'stdout', 'stderr'):
            current_time = int(run('date +%s'))
            locked_at = int(run('stat -c "%Y" {lock}'.format(lock=DEPLOYMENT_LOCK)))
            locked_for = timedelta(seconds=current_time-locked_at)
            abort(red(
                "Someone else is holding the deployment lock (For {locked_for})."
                " Rerun with force=1 to kick them off (could be dangerous).".format(
                    locked_for=locked_for
                ),
                bold=True
            ))

    try:
        yield directory
    except:
        run('unlink {lock}'.format(lock=DEPLOYMENT_LOCK))
        raise
    else:
        run('mv -f -T {lock} {src}'.format(lock=DEPLOYMENT_LOCK, src=SRC_DIR))
Example #23
0
def restart_kraken(instance, wait='serial'):
    """ Restart all krakens of an instance (using pool), serially or in parallel,
        then test them. Testing serially assures that krakens are restarted serially.
        :param wait: string.
               Possible values=False or None: restart in parallel, no test
               'serial': restart serially and test
               'parallel': restart in parallel and test
               'no_test': explicitely skip tests (faster but dangerous)
        The default value is 'serial' because it is the safest scenario
        to restart the krakens of an instance in production.
    """
    if wait not in ('serial', 'parallel', 'no_test'):
        abort(yellow("Error: wait parameter must be 'serial', 'parallel' or 'no_test', found '{}'".format(wait)))
    instance = get_real_instance(instance)
    excluded = instance.name in env.excluded_instances
    # restart krakens of this instance that are also in the eng role,
    # this works with the "pool" switch mechanism used in upgrade_all()
    for host in set(instance.kraken_engines).intersection(env.roledefs['eng']):
        restart_kraken_on_host(instance, host)
        if wait == 'serial' and not excluded:
            test_kraken(instance, fail_if_error=False, wait=True, hosts=[host])
    if wait == 'parallel' and not excluded:
        test_kraken(instance, fail_if_error=False, wait=True)
    if wait != 'no_test' and excluded:
        print(yellow("Coverage '{}' has no data, not testing it".format(instance.name)))
    if wait == 'no_test':
        print(yellow("Warning Coverage '{}' not tested: parameter wait='no_test'".format(instance.name)))
def deploy(rolename=None):
    """
    Deploy configuration on the remote hosts.

    Possible arguments are -
        coordinator - Deploy the coordinator configuration to the coordinator
        node
        workers - Deploy workers configuration to the worker nodes. This will
        not deploy configuration for a coordinator that is also a worker

    If no rolename is specified, then configuration for all roles will be
    deployed

    Parameters:
        rolename - [coordinator|workers]
    """
    if rolename is None:
        _LOGGER.info("Running configuration deploy")
        prestoadmin.deploy.coordinator()
        prestoadmin.deploy.workers()
    else:
        if rolename.lower() == 'coordinator':
            prestoadmin.deploy.coordinator()
        elif rolename.lower() == 'workers':
            prestoadmin.deploy.workers()
        else:
            abort("Invalid Argument. Possible values: coordinator, workers")
def show(config_type=None):
    """
    Print to the user the contents of the configuration files deployed

    If no config_type is specified, then all four configurations will be
    printed.  No warning will be printed for a missing log.properties since
    it is not a required configuration file.

    Parameters:
        config_type: [node|jvm|config|log]
    """
    file_name = ''
    if config_type is None:
        configuration_show(NODE_PROPERTIES)
        configuration_show(JVM_CONFIG)
        configuration_show(CONFIG_PROPERTIES)
        configuration_show(LOG_PROPERTIES, should_warn=False)
    else:
        if config_type.lower() == 'node':
            file_name = NODE_PROPERTIES
        elif config_type.lower() == 'jvm':
            file_name = JVM_CONFIG
        elif config_type.lower() == 'config':
            file_name = CONFIG_PROPERTIES
        elif config_type.lower() == 'log':
            file_name = LOG_PROPERTIES
        else:
            abort("Invalid Argument. Possible values: node, jvm, config, log")

        configuration_show(file_name)
def load_db(filename=None):
    """Loads a dump into the database"""
    env.box_dump_filename = filename

    if not filename:
        abort(red('Dump missing. "fab server.load_db:filename"', bold=True))

    if not os.path.exists(filename):
        abort(red('"%(box_dump_filename)s" does not exist.' % env, bold=True))

    if not confirm(
            'Completely replace the remote database'
            ' "%(box_database)s" (if it exists)?', default=False):
        return

    run(
        'psql -c "DROP DATABASE IF EXISTS %(box_database)s"')
    run(
        'createdb %(box_database)s --encoding=UTF8 --template=template0'
        ' --owner=%(box_database)s')
    run_local(
        'cat %(box_dump_filename)s |'
        'ssh %(host_string)s "source .profile && psql %(box_database)s"')
    run(
        'psql %(box_database)s -c "REASSIGN OWNED BY admin '
        ' TO %(box_database)s"')
Example #27
0
def setup_virtualenv():
    """
    Initially creates the virtualenv in the correct places (creating directory structures as necessary) on the
    remote host.
    If necessary, installs setup_tools, then pip, then virtualenv (packages)
    """
    print green('In packages module.  Installing VirtualEnv on host machine...')
    require('virtualenv_root', provided_by=('setup_env'))

    with cd('/tmp'):
        if env.os == 'ubuntu':
            sudo('apt-get install -y python-setuptools python-setuptools-devel')
        elif env.os == 'redhat':
            sudo('yum install -y python-setuptools python-setuptools-devel')
        else:
            utils.abort('Unrecognized OS %s!' % env.os)
        sudo('easy_install pip')
        sudo('pip install virtualenv', pty=True, shell=True)

        print yellow('Require user:%(sudo_user)s password!' % env)
        with fab_settings(user=env.sudo_user, sudo_prompt='ARemind sudo password: '******'mkdir -p %(www_root)s' % env)
            sudo('chown -R %(www_root)s %(virtualenv_root)s' % env)
            sudo('chgrp -R %(www_root)s %(virtualenv_root)s' % env)
            args = '--clear --distribute'
            sudo('virtualenv %s %s' % (args, env.virtualenv_root), user=env.sudo_user)
    print green('In packages module. Done installing VirtualEnv...')
Example #28
0
def delete_kraken_queue_to_rabbitmq(instance, apply_on='reverse'):
    """
    Remove queue for a kraken
    """
    instance = get_real_instance(instance)
    if apply_on == 'engines':
        hosts, exclude_hosts = instance.kraken_engines, ()
    elif apply_on == 'reverse':
        hosts, exclude_hosts = env.roledefs['eng'], instance.kraken_engines
    elif apply_on == 'all':
        hosts, exclude_hosts = env.roledefs['eng'], ()
    else:
        abort("Bad 'apply_on' parameter value: {}".format(apply_on))

    if env.rabbitmq_host_api == 'localhost':
        host_string = env.roledefs['tyr_master'][0]
    else:
        host_string = env.rabbitmq_host_api

    for host in set(hosts) - set(exclude_hosts):
        with settings(host_string=host_string):
            run('curl -i -u {}:{} -XDELETE "http://localhost:{}/api/queues/%2F/kraken_{}_{}_rt"'
                .format(env.rabbitmq_user, env.rabbitmq_pass, env.rabbitmq_port_api,
                        get_host_addr(host).split('.')[0], instance))
            run('curl -i -u {}:{} -XDELETE "http://localhost:{}/api/queues/%2F/kraken_{}_{}_task"'
                .format(env.rabbitmq_user, env.rabbitmq_pass, env.rabbitmq_port_api,
                        get_host_addr(host).split('.')[0], instance))
def dotenv(**kwargs):
    """ adds a key value pair to the .env file on a server """
    require('root', provided_by=env.deployments)
    if not len(kwargs):
        utils.abort('missing variable. usage: fab production dotenv:MYVAR=myvalue')
    for key, value in kwargs.items():
        _add_to_dotenv(key, value)
Example #30
0
def hotfix_deploy():
    """
    deploy ONLY the code with no extra cleanup or syncing

    for small python-only hotfixes

    """
    if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \
       not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False) or \
       not console.confirm('HEY!!!! YOU ARE ONLY DEPLOYING CODE. THIS IS NOT A NORMAL DEPLOY. COOL???', default=False):
        utils.abort('Deployment aborted.')

    _require_target()
    run('echo ping!')  # workaround for delayed console response

    try:
        execute(update_code)
    except Exception:
        execute(mail_admins, "Deploy failed", "You had better check the logs.")
        # hopefully bring the server back to life
        execute(services_restart)
        raise
    else:
        execute(services_restart)
        execute(record_successful_deploy)
Example #31
0
def publish(beta='True', version=None, channel='browser', pre='True'):
    """Upload extension to s3 (credentials in ~/.s3cfg need to be set to primary)"""
    if not (beta == 'True') and version is not None:
        abort("You should never publish a non-beta package with a fixed version.\n"\
              "Always use git tags (and push them to upstream) so we can keep "\
              "track of all live versions.")



    update_manifest_file_name = "latest.rdf"
    latest_html_file_name = "latest.html"
    icon_name = "icon.png"
    output_file_name = package(beta, version, "True", channel) # !!!! we must publish only signed versions !!!!
    icon_url = "http://cdn2.cliqz.com/update/%s" % icon_name

    folder = get_folder_name(beta=='True', channel)
    upload_folder = folder + ('_pre' if pre == 'True' else '')

    path_to_s3 = PATH_TO_S3_BUCKET + upload_folder + '/'

    local("aws s3 cp %s %s --acl public-read" % (output_file_name, path_to_s3))

    env = Environment(loader=FileSystemLoader('templates'))
    manifest_template = env.get_template(update_manifest_file_name)

    if version is None:
        version = get_version(beta)

    download_link = "https://s3.amazonaws.com/cdncliqz/update/%s/%s" % (folder, output_file_name)
    upload_folder_link = "https://s3.amazonaws.com/cdncliqz/update/%s/%s" % (upload_folder, output_file_name)
    download_link_latest_html = "http://cdn2.cliqz.com/update/%s/%s" % (folder, output_file_name)

    output_from_parsed_template = manifest_template.render(version=version,
                                                           download_link=download_link)
    with open(update_manifest_file_name, "wb") as f:
        f.write(output_from_parsed_template.encode("utf-8"))
    local("aws s3 cp %s %s --acl public-read --content-type 'text/rdf'" % (update_manifest_file_name,
                                                          path_to_s3))
    local("rm  %s" % update_manifest_file_name)

    # Provide a link to the latest stable version
    latest_template = env.get_template(latest_html_file_name)
    output_from_parsed_template = latest_template.render(download_link=download_link_latest_html,
                                                         icon_url=icon_url)
    with open(latest_html_file_name, "wb") as f:
        f.write(output_from_parsed_template.encode("utf-8"))
    local("aws s3 cp %s %s --acl public-read" % (latest_html_file_name,
                                            path_to_s3))

    #replace latest.xpi when everything is done
    local("aws s3 cp latest.xpi %s --acl public-read" % path_to_s3)

    local("rm  %s" % latest_html_file_name)

    credentials = {}
    execfile("../fern/release-creds.txt", credentials)
    auth = (
        'balrogadmin',
        credentials['balrog_credentials']['balrogadmin']
    )

    submitter = Submitter(
        release_name="SystemAddons-"+upload_folder,
        auth=auth,
        api_root="http://balrog-admin.10e99.net/api",
        addon_id="*****@*****.**",
        addon_version=version,
        addon_url=upload_folder_link
    )
    submitter.submit()
Example #32
0
import getpass
import re
import threading
import select
import socket
import sys

from fabric.auth import get_password, set_password
from fabric.utils import abort, handle_prompt_abort

try:
    import warnings
    warnings.simplefilter('ignore', DeprecationWarning)
    import paramiko as ssh
except ImportError:
    abort("paramiko is a required module. Please install it:\n\t"
          "$ sudo easy_install paramiko")

host_pattern = r'((?P<user>.+)@)?(?P<host>[^:]+)(:(?P<port>\d+))?'
host_regex = re.compile(host_pattern)


class HostConnectionCache(dict):
    """
    Dict subclass allowing for caching of host connections/clients.

    This subclass does not offer any extra methods, but will intelligently
    create new client connections when keys are requested, or return previously
    created connections instead.

    Key values are the same as host specifiers throughout Fabric: optional
    username + ``@``, mandatory hostname, optional ``:`` + port number.
Example #33
0
def main():
    """
    Main command-line execution loop.
    """
    try:
        # Parse command line options
        parser, options, arguments = parse_options()

        # Handle regular args vs -- args
        arguments = parser.largs
        remainder_arguments = parser.rargs

        # Update env with any overridden option values
        # NOTE: This needs to remain the first thing that occurs
        # post-parsing, since so many things hinge on the values in env.
        for option in env_options:
            state.env[option.dest] = getattr(options, option.dest)

        # Handle --hosts, --roles, --exclude-hosts (comma separated string =>
        # list)
        for key in ['hosts', 'roles', 'exclude_hosts']:
            if key in state.env and isinstance(state.env[key], basestring):
                state.env[key] = state.env[key].split(',')

        # Handle output control level show/hide
        update_output_levels(show=options.show, hide=options.hide)

        # Handle version number option
        if options.show_version:
            print("Fabric %s" % state.env.version)
            sys.exit(0)

        # Load settings from user settings file, into shared env dict.
        state.env.update(load_settings(state.env.rcfile))

        # Find local fabfile path or abort
        fabfile = find_fabfile()
        if not fabfile and not remainder_arguments:
            abort("""Couldn't find any fabfiles!

Remember that -f can be used to specify fabfile path, and use -h for help.""")

        # Store absolute path to fabfile in case anyone needs it
        state.env.real_fabfile = fabfile

        # Load fabfile (which calls its module-level code, including
        # tweaks to env values) and put its commands in the shared commands
        # dict
        if fabfile:
            docstring, callables, default = load_fabfile(fabfile)
            state.commands.update(callables)

        # Handle case where we were called bare, i.e. just "fab", and print
        # a help message.
        actions = (options.list_commands, options.shortlist, options.display,
                   arguments, remainder_arguments, default)
        if not any(actions):
            parser.print_help()
            sys.exit(1)

        # Abort if no commands found
        if not state.commands and not remainder_arguments:
            abort("Fabfile didn't contain any commands!")

        # Now that we're settled on a fabfile, inform user.
        if state.output.debug:
            if fabfile:
                print("Using fabfile '%s'" % fabfile)
            else:
                print("No fabfile loaded -- remainder command only")

        # Shortlist is now just an alias for the "short" list format;
        # it overrides use of --list-format if somebody were to specify both
        if options.shortlist:
            options.list_format = 'short'
            options.list_commands = True

        # List available commands
        if options.list_commands:
            print("\n".join(list_commands(docstring, options.list_format)))
            sys.exit(0)

        # Handle show (command-specific help) option
        if options.display:
            display_command(options.display)

        # If user didn't specify any commands to run, show help
        if not (arguments or remainder_arguments or default):
            parser.print_help()
            sys.exit(0)  # Or should it exit with error (1)?

        # Parse arguments into commands to run (plus args/kwargs/hosts)
        commands_to_run = parse_arguments(arguments)

        # Parse remainders into a faux "command" to execute
        remainder_command = parse_remainder(remainder_arguments)

        # Figure out if any specified task names are invalid
        unknown_commands = []
        for tup in commands_to_run:
            if crawl(tup[0], state.commands) is None:
                unknown_commands.append(tup[0])

        # Abort if any unknown commands were specified
        if unknown_commands:
            abort("Command(s) not found:\n%s" \
                % indent(unknown_commands))

        # Generate remainder command and insert into commands, commands_to_run
        if remainder_command:
            r = '<remainder>'
            state.commands[r] = lambda: api.run(remainder_command)
            commands_to_run.append((r, [], {}, [], [], []))

        # Ditto for a default, if found
        if not commands_to_run and default:
            commands_to_run.append((default.name, [], {}, [], [], []))

        if state.output.debug:
            names = ", ".join(x[0] for x in commands_to_run)
            print("Commands to run: %s" % names)

        # At this point all commands must exist, so execute them in order.
        for name, args, kwargs, cli_hosts, cli_roles, cli_exclude_hosts in commands_to_run:
            # Get callable by itself
            task = crawl(name, state.commands)
            # Set current task name (used for some error messages)
            state.env.command = name
            # Set host list (also copy to env)
            state.env.all_hosts = hosts = get_hosts(task, cli_hosts, cli_roles,
                                                    cli_exclude_hosts)
            # If hosts found, execute the function on each host in turn
            for host in hosts:
                # Preserve user
                prev_user = state.env.user
                # Split host string and apply to env dict
                username, hostname, port = interpret_host_string(host)
                # Log to stdout
                if state.output.running:
                    print("[%s] Executing task '%s'" % (host, name))
                # Actually run command
                _run_task(task, args, kwargs)
                # Put old user back
                state.env.user = prev_user
            # If no hosts found, assume local-only and run once
            if not hosts:
                _run_task(task, args, kwargs)
        # If we got here, no errors occurred, so print a final note.
        if state.output.status:
            print("\nDone.")
    except SystemExit:
        # a number of internal functions might raise this one.
        raise
    except KeyboardInterrupt:
        if state.output.status:
            print >> sys.stderr, "\nStopped."
        sys.exit(1)
    except:
        sys.excepthook(*sys.exc_info())
        # we might leave stale threads if we don't explicitly exit()
        sys.exit(1)
    finally:
        disconnect_all()
    sys.exit(0)
Example #34
0
def check_pipenv():
    with quiet():
        if run('which pipenv').failed:
            abort('pipenv is missing, '
                  'please install it as root with "pip install pipenv"')
Example #35
0
def main(fabfile_locations=None):
    """
    Main command-line execution loop.
    """
    try:
        # Parse command line options
        parser, options, arguments = parse_options()

        # Handle regular args vs -- args
        arguments = parser.largs
        remainder_arguments = parser.rargs

        # Allow setting of arbitrary env keys.
        # This comes *before* the "specific" env_options so that those may
        # override these ones. Specific should override generic, if somebody
        # was silly enough to specify the same key in both places.
        # E.g. "fab --set shell=foo --shell=bar" should have env.shell set to
        # 'bar', not 'foo'.
        for pair in _escape_split(',', options.env_settings):
            pair = _escape_split('=', pair)
            # "--set x" => set env.x to True
            # "--set x=" => set env.x to ""
            key = pair[0]
            value = True
            if len(pair) == 2:
                value = pair[1]
            state.env[key] = value

        # Update env with any overridden option values
        # NOTE: This needs to remain the first thing that occurs
        # post-parsing, since so many things hinge on the values in env.
        for option in env_options:
            state.env[option.dest] = getattr(options, option.dest)

        # Handle --hosts, --roles, --exclude-hosts (comma separated string =>
        # list)
        for key in ['hosts', 'roles', 'exclude_hosts']:
            if key in state.env and isinstance(state.env[key],
                                               six.string_types):
                state.env[key] = state.env[key].split(',')

        # Feed the env.tasks : tasks that are asked to be executed.
        state.env['tasks'] = arguments

        # Handle output control level show/hide
        update_output_levels(show=options.show, hide=options.hide)

        # Handle version number option
        if options.show_version:
            print("Fabric3 %s" % state.env.version)
            print("Paramiko %s" % ssh.__version__)
            sys.exit(0)

        # Load settings from user settings file, into shared env dict.
        state.env.update(load_settings(state.env.rcfile))

        # Find local fabfile path or abort
        fabfile = find_fabfile(fabfile_locations)
        if not fabfile and not remainder_arguments:
            abort("""Couldn't find any fabfiles!

Remember that -f can be used to specify fabfile path, and use -h for help.""")

        # Store absolute path to fabfile in case anyone needs it
        state.env.real_fabfile = fabfile

        # Load fabfile (which calls its module-level code, including
        # tweaks to env values) and put its commands in the shared commands
        # dict
        default = None
        if fabfile:
            docstring, callables, default = load_fabfile(fabfile)
            state.commands.update(callables)

        # Handle case where we were called bare, i.e. just "fab", and print
        # a help message.
        actions = (options.list_commands, options.shortlist, options.display,
                   arguments, remainder_arguments, default)
        if not any(actions):
            parser.print_help()
            sys.exit(1)

        # Abort if no commands found
        if not state.commands and not remainder_arguments:
            abort("Fabfile didn't contain any commands!")

        # Now that we're settled on a fabfile, inform user.
        if state.output.debug:
            if fabfile:
                print("Using fabfile '%s'" % fabfile)
            else:
                print("No fabfile loaded -- remainder command only")

        # Shortlist is now just an alias for the "short" list format;
        # it overrides use of --list-format if somebody were to specify both
        if options.shortlist:
            options.list_format = 'short'
            options.list_commands = True

        # List available commands
        if options.list_commands:
            show_commands(docstring, options.list_format)

        # Handle show (command-specific help) option
        if options.display:
            display_command(options.display)

        # If user didn't specify any commands to run, show help
        if not (arguments or remainder_arguments or default):
            parser.print_help()
            sys.exit(0)  # Or should it exit with error (1)?

        # Parse arguments into commands to run (plus args/kwargs/hosts)
        commands_to_run = parse_arguments(arguments)

        # Parse remainders into a faux "command" to execute
        remainder_command = parse_remainder(remainder_arguments)

        # Figure out if any specified task names are invalid
        unknown_commands = []
        for tup in commands_to_run:
            if crawl(tup[0], state.commands) is None:
                unknown_commands.append(tup[0])

        # Abort if any unknown commands were specified
        if unknown_commands and not state.env.get('skip_unknown_tasks', False):
            warn("Command(s) not found:\n%s" \
                % indent(unknown_commands))
            show_commands(None, options.list_format, 1)

        # Generate remainder command and insert into commands, commands_to_run
        if remainder_command:
            r = '<remainder>'
            state.commands[r] = lambda: api.run(remainder_command)
            commands_to_run.append((r, [], {}, [], [], []))

        # Ditto for a default, if found
        if not commands_to_run and default:
            commands_to_run.append((default.name, [], {}, [], [], []))

        # Initial password prompt, if requested
        if options.initial_password_prompt:
            prompt = "Initial value for env.password: "******"Initial value for env.sudo_password: "******", ".join(x[0] for x in commands_to_run)
            print("Commands to run: %s" % names)

        # At this point all commands must exist, so execute them in order.
        for name, args, kwargs, arg_hosts, arg_roles, arg_exclude_hosts in commands_to_run:
            execute(name,
                    hosts=arg_hosts,
                    roles=arg_roles,
                    exclude_hosts=arg_exclude_hosts,
                    *args,
                    **kwargs)
        # If we got here, no errors occurred, so print a final note.
        if state.output.status:
            print("\nDone.")
    except SystemExit:
        # a number of internal functions might raise this one.
        raise
    except KeyboardInterrupt:
        if state.output.status:
            sys.stderr.write("\nStopped.\n")
        sys.exit(1)
    except:
        sys.excepthook(*sys.exc_info())
        # we might leave stale threads if we don't explicitly exit()
        sys.exit(1)
    finally:
        disconnect_all()
    sys.exit(0)
Example #36
0
def die(msg):
    """Serious error"""
    abort(red(msg, bold=True))
Example #37
0
def require_itwewina():
    """
    Check that we're using deploying itwewina; abort otherwise.
    """
    if env.get('current_dict') != 'itwewina':
        abort('please run as `fab [server] itwewina [commands ...]`')
Example #38
0
def main():
    """
    Main command-line execution loop.
    """
    try:
        # Parse command line options
        parser, options, arguments = parse_options()

        # Handle regular args vs -- args
        arguments = parser.largs
        remainder_arguments = parser.rargs

        # Update env with any overridden option values
        # NOTE: This needs to remain the first thing that occurs
        # post-parsing, since so many things hinge on the values in env.
        for option in env_options:
            env[option.dest] = getattr(options, option.dest)

        # Handle --hosts, --roles (comma separated string => list)
        for key in ['hosts', 'roles']:
            if key in env and isinstance(env[key], str):
                env[key] = env[key].split(',')

        # Handle output control level show/hide
        update_output_levels(show=options.show, hide=options.hide)

        # Handle version number option
        if options.show_version:
            print("Fabric %s" % env.version)
            sys.exit(0)

        # Load settings from user settings file, into shared env dict.
        env.update(load_settings(env.rcfile))

        # Find local fabfile path or abort
        fabfile = find_fabfile()
        if not fabfile and not remainder_arguments:
            abort("Couldn't find any fabfiles!")

        # Store absolute path to fabfile in case anyone needs it
        env.real_fabfile = fabfile

        # Load fabfile (which calls its module-level code, including
        # tweaks to env values) and put its commands in the shared commands
        # dict
        if fabfile:
            docstring, callables = load_fabfile(fabfile)
            commands.update(callables)

        # Autocompletion support
        autocomplete_items = [cmd.replace('_', '-') for cmd in commands]
        if 'autocomplete' in env:
            autocomplete_items += env.autocomplete

        autocomplete(parser, ListCompleter(autocomplete_items))

        # Handle hooks related options
        _disable_hooks = options.disable_hooks
        _enable_hooks = options.enable_hooks

        if _disable_hooks:
            for _hook in _disable_hooks.strip().split():
                DISABLED_HOOKS.append(_hook.strip())

        if _enable_hooks:
            for _hook in _enable_hooks.strip().split():
                ENABLED_HOOKS.append(_hook.strip())

        # Handle the non-execution flow
        if not arguments and not remainder_arguments:

            # Non-verbose command list
            if options.shortlist:
                shortlist()

            # Handle show (command-specific help) option
            if options.display:
                display_command(options.display)

            # Else, show the list of commands and exit
            list_commands(docstring)

        # Now that we're settled on a fabfile, inform user.
        if output.debug:
            if fabfile:
                print("Using fabfile '%s'" % fabfile)
            else:
                print("No fabfile loaded -- remainder command only")

        # Parse arguments into commands to run (plus args/kwargs/hosts)
        commands_to_run, env_update = parse_arguments(arguments)
        env.update(env_update)

        # Parse remainders into a faux "command" to execute
        remainder_command = parse_remainder(remainder_arguments)

        # Figure out if any specified task names are invalid
        unknown_commands = []
        for tup in commands_to_run:
            if tup[0] not in commands:
                unknown_commands.append(tup[0])

        # Abort if any unknown commands were specified
        if unknown_commands:
            abort("Command(s) not found:\n%s" \
                % indent(unknown_commands))

        # Generate remainder command and insert into commands, commands_to_run
        if remainder_command:
            r = '<remainder>'
            commands[r] = lambda: api.run(remainder_command)
            commands_to_run.append((r, [], {}, [], []))

        if output.debug:
            names = ", ".join(x[0] for x in commands_to_run)
            print("Commands to run: %s" % names)

        call_hooks('commands.before', commands, commands_to_run)

        # Initialse context runner
        env()

        # Initialise the default stage if none are given as the first command.
        if 'stages' in env:
            if commands_to_run[0][0] not in env.stages:
                execute_command(
                    (env.stages[0], (), {}, None, None, None), commands
                    )
            else:
                execute_command(commands_to_run.pop(0), commands)

        if env.config_file:
            config_path = realpath(expanduser(env.config_file))
            config_path = join(dirname(fabfile), config_path)
            config_file = open(config_path, 'rb')
            config = load_yaml(config_file.read())
            if not config:
                env.config = AttributeDict()
            elif not isinstance(config, dict):
                abort("Invalid config file found at %s" % config_path)
            else:
                env.config = AttributeDict(config)
            config_file.close()

        call_hooks('config.loaded')
        first_time_env_call = 1

        # At this point all commands must exist, so execute them in order.
        for spec in commands_to_run:
            execute_command(spec, commands)

        # If we got here, no errors occurred, so print a final note.
        if output.status:
            msg = "\nDone."
            if env.colors:
                msg = env.color_settings['finish'](msg)
            print(msg)

    except SystemExit:
        # a number of internal functions might raise this one.
        raise
    except KeyboardInterrupt:
        if output.status:
            msg = "\nStopped."
            if env.colors:
                msg = env.color_settings['finish'](msg)
            print >> sys.stderr, msg
        sys.exit(1)
    except:
        sys.excepthook(*sys.exc_info())
        # we might leave stale threads if we don't explicitly exit()
        sys.exit(1)
    finally:
        call_hooks('commands.after')
        disconnect_all()
    sys.exit(0)
Example #39
0
def execute(task, *args, **kwargs):
    """
    Execute ``task`` (callable or name), honoring host/role decorators, etc.

    ``task`` may be an actual callable object, or it may be a registered task
    name, which is used to look up a callable just as if the name had been
    given on the command line (including :ref:`namespaced tasks <namespaces>`,
    e.g. ``"deploy.migrate"``.

    The task will then be executed once per host in its host list, which is
    (again) assembled in the same manner as CLI-specified tasks: drawing from
    :option:`-H`, :ref:`env.hosts <hosts>`, the `~fabric.decorators.hosts` or
    `~fabric.decorators.roles` decorators, and so forth.

    ``host``, ``hosts``, ``role``, ``roles`` and ``exclude_hosts`` kwargs will
    be stripped out of the final call, and used to set the task's host list, as
    if they had been specified on the command line like e.g. ``fab
    taskname:host=hostname``.

    Any other arguments or keyword arguments will be passed verbatim into
    ``task`` when it is called, so ``execute(mytask, 'arg1', kwarg1='value')``
    will (once per host) invoke ``mytask('arg1', kwarg1='value')``.

    This function returns a dictionary mapping host strings to the given task's
    return value for that host's execution run. For example, ``execute(foo,
    hosts=['a', 'b'])`` might return ``{'a': None, 'b': 'bar'}`` if ``foo``
    returned nothing on host `a` but returned ``'bar'`` on host `b`.

    In situations where a task execution fails for a given host but overall
    progress does not abort (such as when :ref:`env.skip_bad_hosts
    <skip-bad-hosts>` is True) the return value for that host will be the error
    object or message.

    .. seealso::
        :ref:`The execute usage docs <execute>`, for an expanded explanation
        and some examples.

    .. versionadded:: 1.3
    .. versionchanged:: 1.4
        Added the return value mapping; previously this function had no defined
        return value.
    """
    my_env = {'clean_revert': True}
    results = {}
    # Obtain task
    if not (callable(task) or _is_task(task)):
        # Assume string, set env.command to it
        my_env['command'] = task
        task = crawl(task, state.commands)
        if task is None:
            abort("%r is not callable or a valid task name" % (task,))
    # Set env.command if we were given a real function or callable task obj
    else:
        dunder_name = getattr(task, '__name__', None)
        my_env['command'] = getattr(task, 'name', dunder_name)
    # Normalize to Task instance if we ended up with a regular callable
    if not _is_task(task):
        from fabric.decorators import task as task_decorator
        task = task_decorator(task)
    # Filter out hosts/roles kwargs
    new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs)
    # Set up host list
    my_env['all_hosts'] = task.get_hosts(hosts, roles, exclude_hosts, state.env)

    # No hosts, just run once locally
    if not my_env['all_hosts']:
        with settings(**my_env):
            results['<local-only>'] = task.run(*args, **new_kwargs)
        return results

    parallel = requires_parallel(task)
    if parallel:
        # Import multiprocessing if needed, erroring out usefully
        # if it can't.
        try:
            import multiprocessing
        except ImportError:
            import traceback
            tb = traceback.format_exc()
            abort(tb + """
    At least one task needs to be run in parallel, but the
    multiprocessing module cannot be imported (see above
    traceback.) Please make sure the module is installed
    or that the above ImportError is fixed.""")

        # Get max pool size for this task
        pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size)
        # Set up job comms queue
        queue = multiprocessing.Queue()
        role_limits = state.env.get('role_limits', None)
        jobs = JobQueue(pool_size, queue, role_limits=role_limits, debug=state.output.debug)
    else:
        queue = None
        jobs = None

    # Attempt to cycle on hosts, skipping if needed
    for host in my_env['all_hosts']:
        task.role = task.get_role(host, hosts, state.env)
        try:
            results[host] = _execute(task, host, my_env, args, new_kwargs, jobs, queue)
        except NetworkError, e:
            results[host] = e
            # Backwards compat test re: whether to use an exception or
            # abort
            if not state.env.use_exceptions_for['network']:
                func = warn if state.env.skip_bad_hosts else abort
                error(e.message, func=func, exception=e.wrapped)
            else:
                raise
Example #40
0
def dev_server():
    """ use dev environment on remote host to play with code in production-like env"""
    utils.abort('remove this line when dev server setup')
    env.environment = 'dev_server'
    env.hosts = ['fen-vz-' + project_settings.project_name + '-dev']
    _local_setup()
Example #41
0
if 'riapsApps' not in env:
    env.riapsApps = os.getenv('RIAPSAPPS')
    if env.riapsApps  == None:
        print("RIAPS Configuration - RIAPSAPPS  is not set, using /home/riaps/riaps_apps")
        env.riapsApps = '/home/riaps/riaps_apps'

env.riapsLib = '/usr/local/lib'

# Use RIAPS SSH key
#env.key_filename = os.path.join(env.riapsHome,"keys/" + str(const.ctrlPrivateKey))
validate = True if 'validate' in env else False

# If no command line roles or hosts are passed (i.e. -R or -H), only then use listed hosts
# Allows for passing of individual hosts or roles on which to run tasks
env.roledefs = None
if 'hostsFile' in env:
    if os.path.isfile(env.hostsFile):
        sys.hosts(env.hostsFile,validate)
    else:
        print("Given hosts file \"%s\" does not exist, exiting..." % env.hostsFile)
# elif not env.roles and not env.hosts and not [s for s in env.tasks if 'sys.hosts' in s]:
else:
    # Task is not sys.hosts
    riaps_conf = os.path.join(env.riapsHome,'etc/riaps-hosts.conf')
    sys.hosts(riaps_conf,validate)

if env.roledefs is None:
    abort('Bad configuration/hosts/roles')
#
Example #42
0
 def restart_site(self):
     with hide('output'):
         result = self.process_manager.restart()
         if result.failed:
             print result
             utils.abort(red("{} failed with code {}".format(result.real_command, result.return_code)))
Example #43
0
def test_abort_with_exception():
    """
    abort() should raise a provided exception
    """
    with settings(abort_exception=TestException):
        abort("Test")
Example #44
0
def clean_db(revision=None):
    """ delete the entire database """
    if env.environment == 'production':
        utils.abort('do not delete the production database!!!')
    _tasks("clean_db")
Example #45
0
def check_role_versions():  # type: () -> None
    """
    Usage: fab check_role_versions

    If the wrong versions of any roles are installed, per deployment/requirements.yml,
    fail.

    If any required roles are not installed, install them.

    If env.devflag is true, warns but ignores any locally installed roles. Otherwise,
    locally installed roles are a fatal error. See the `dev` task
    to set env.devflag.
    """

    okay = True  # False if we've spotted any problems
    bad = [
    ]  # Paths to where missing roles should be installed, or where bad version roles are installed
    requirements = yaml.load(open("deployment/requirements.yml"))
    requirements = sorted(requirements, key=req_name)
    requirements_to_install = False
    for req in requirements:
        name = req_name(req)
        install_dir = find_install_role(name)
        if not install_dir:
            print(yellow("WARNING: role %s not installed" % (name, )))
            requirements_to_install = True
            continue
        meta_path = os.path.join(install_dir, 'meta/.galaxy_install_info')
        if os.path.exists(meta_path):
            meta = yaml.load(open(meta_path))
            if meta['version'] != req['version']:
                print(
                    red("ERROR: role %s at %s is version %s, should be version %s"
                        %
                        (name, install_dir, meta['version'], req['version'])))
                okay = False
                bad.append(install_dir)
            else:
                print(
                    green("GOOD:  role %s %s at %s" %
                          (name, meta['version'], install_dir)))
        else:
            # User must have installed this locally, don't check version
            if env.devflag:
                print(
                    yellow(
                        "SKIP:  role %s at %s appears to have been locally installed"
                        % (name, install_dir)))
            else:
                okay = False
                print(
                    red("ERROR:  role %s at %s appears to have been locally installed, will not continue"
                        % (name, install_dir)))
                print(
                    red("To ignore this error, add 'dev' argument to fab command before this"
                        ))

    if requirements_to_install and okay:
        execute(install_roles)

    if not okay:
        print(
            red("Ansible galaxy role requirements are not satisfied, quitting.  The simplest fix is to delete "
                "the roles that have wrong versions, then run ``fab install_roles`` again."
                ))
        if bad:
            print("E.g.")
            print("$ rm -r %s" % " ".join(badname for badname in bad))
        abort('check_installed_roles failed')
Example #46
0
def _add_merged_attributes(node, all_recipes, all_roles):
    """Merges attributes from cookbooks, node and roles

    Chef Attribute precedence:
    http://docs.opscode.com/essentials_cookbook_attribute_files.html#attribute-precedence
    LittleChef implements, in precedence order:
        - Cookbook default
        - Environment default
        - Role default
        - Node normal
        - Role override
        - Environment override

    NOTE: In order for cookbook attributes to be read, they need to be
        correctly defined in its metadata.json

    """
    # Get cookbooks from extended recipes
    attributes = {}
    for recipe in node['recipes']:
        # Find this recipe
        found = False
        for r in all_recipes:
            if recipe == r['name']:
                found = True
                for attr in r['attributes']:
                    if r['attributes'][attr].get('type') == "hash":
                        value = {}
                    else:
                        value = r['attributes'][attr].get('default')
                    # Attribute dictionaries are defined as a single
                    # compound key. Split and build proper dict
                    build_dct(attributes, attr.split("/"), value)
        if not found:
            error = "Could not find recipe '{0}' while ".format(recipe)
            error += "building node data bag for '{0}'".format(node['name'])
            abort(error)

    # Get default role attributes
    for role in node['roles']:
        for r in all_roles:
            if role == r['name']:
                update_dct(attributes, r.get('default_attributes', {}))

    # Get default environment attributes
    environment = lib.get_environment(node['chef_environment'])
    update_dct(attributes, environment.get('default_attributes', {}))

    # Get normal node attributes
    non_attribute_fields = [
        'id', 'name', 'role', 'roles', 'recipes', 'run_list', 'ipaddress'
    ]
    node_attributes = {}
    for key in node:
        if key in non_attribute_fields:
            continue
        node_attributes[key] = node[key]
    update_dct(attributes, node_attributes)

    # Get override role attributes
    for role in node['roles']:
        for r in all_roles:
            if role == r['name']:
                update_dct(attributes, r.get('override_attributes', {}))

    # Get override environment attributes
    update_dct(attributes, environment.get('override_attributes', {}))

    # Merge back to the original node object
    node.update(attributes)
Example #47
0
def execute(task, *args, **kwargs):
    """
    Execute ``task`` (callable or name), honoring host/role decorators, etc.

    ``task`` may be an actual callable object, or it may be a registered task
    name, which is used to look up a callable just as if the name had been
    given on the command line (including :ref:`namespaced tasks <namespaces>`,
    e.g. ``"deploy.migrate"``.

    The task will then be executed once per host in its host list, which is
    (again) assembled in the same manner as CLI-specified tasks: drawing from
    :option:`-H`, :ref:`env.hosts <hosts>`, the `~fabric.decorators.hosts` or
    `~fabric.decorators.roles` decorators, and so forth.

    ``host``, ``hosts``, ``role``, ``roles`` and ``exclude_hosts`` kwargs will
    be stripped out of the final call, and used to set the task's host list, as
    if they had been specified on the command line like e.g. ``fab
    taskname:host=hostname``.

    Any other arguments or keyword arguments will be passed verbatim into
    ``task`` (the function itself -- not the ``@task`` decorator wrapping your
    function!) when it is called, so ``execute(mytask, 'arg1',
    kwarg1='value')`` will (once per host) invoke ``mytask('arg1',
    kwarg1='value')``.

    :returns:
        a dictionary mapping host strings to the given task's return value for
        that host's execution run. For example, ``execute(foo, hosts=['a',
        'b'])`` might return ``{'a': None, 'b': 'bar'}`` if ``foo`` returned
        nothing on host `a` but returned ``'bar'`` on host `b`.

        In situations where a task execution fails for a given host but overall
        progress does not abort (such as when :ref:`env.skip_bad_hosts
        <skip-bad-hosts>` is True) the return value for that host will be the
        error object or message.

    .. seealso::
        :ref:`The execute usage docs <execute>`, for an expanded explanation
        and some examples.

    .. versionadded:: 1.3
    .. versionchanged:: 1.4
        Added the return value mapping; previously this function had no defined
        return value.
    """
    my_env = {'clean_revert': True}
    results = {}
    # Obtain task
    is_callable = callable(task)
    if not (is_callable or _is_task(task)):
        # Assume string, set env.command to it
        my_env['command'] = task
        task = crawl(task, state.commands)
        if task is None:
            msg = "%r is not callable or a valid task name" % (
                my_env['command'], )
            if state.env.get('skip_unknown_tasks', False):
                warn(msg)
                return
            else:
                abort(msg)
    # Set env.command if we were given a real function or callable task obj
    else:
        dunder_name = getattr(task, '__name__', None)
        my_env['command'] = getattr(task, 'name', dunder_name)
    # Normalize to Task instance if we ended up with a regular callable
    if not _is_task(task):
        task = WrappedCallableTask(task)
    # Filter out hosts/roles kwargs
    new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs)
    # Set up host list
    my_env['all_hosts'], my_env[
        'effective_roles'] = task.get_hosts_and_effective_roles(
            hosts, roles, exclude_hosts, state.env)

    parallel = requires_parallel(task)
    if parallel:
        # Import multiprocessing if needed, erroring out usefully
        # if it can't.
        try:
            import multiprocessing
        except ImportError:
            import traceback
            tb = traceback.format_exc()
            abort(tb + """
    At least one task needs to be run in parallel, but the
    multiprocessing module cannot be imported (see above
    traceback.) Please make sure the module is installed
    or that the above ImportError is fixed.""")
    else:
        multiprocessing = None

    # Get pool size for this task
    pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size)
    # Set up job queue in case parallel is needed
    queue = multiprocessing.Queue() if parallel else None
    jobs = JobQueue(pool_size, queue)
    if state.output.debug:
        jobs._debug = True

    # Call on host list
    if my_env['all_hosts']:
        # Attempt to cycle on hosts, skipping if needed
        for host in my_env['all_hosts']:
            try:
                results[host] = _execute(task, host, my_env, args, new_kwargs,
                                         jobs, queue, multiprocessing)
            except NetworkError, e:
                results[host] = e
                # Backwards compat test re: whether to use an exception or
                # abort
                if not state.env.use_exceptions_for['network']:
                    func = warn if state.env.skip_bad_hosts else abort
                    error(e.message, func=func, exception=e.wrapped)
                else:
                    raise

            # If requested, clear out connections here and not just at the end.
            if state.env.eagerly_disconnect:
                disconnect_all()

        # If running in parallel, block until job queue is emptied
        if jobs:
            err = "One or more hosts failed while executing task '%s'" % (
                my_env['command'])
            jobs.close()
            # Abort if any children did not exit cleanly (fail-fast).
            # This prevents Fabric from continuing on to any other tasks.
            # Otherwise, pull in results from the child run.
            ran_jobs = jobs.run()
            for name, d in ran_jobs.iteritems():
                if d['exit_code'] != 0:
                    if isinstance(d['results'], NetworkError) and \
                            _is_network_error_ignored():
                        error(d['results'].message,
                              func=warn,
                              exception=d['results'].wrapped)
                    elif isinstance(d['results'], BaseException):
                        error(err, exception=d['results'])
                    else:
                        error(err)
                results[name] = d['results']
Example #48
0
def test_abort():
    """
    abort() should raise SystemExit
    """
    abort("Test")
 def wrapper(*args, **kwargs):
     if env.machine is None:
         abort(red('ERROR: You must provide a server name to call this'
                   ' task!'))
     return fn(*args, **kwargs)
Example #50
0
def executes(task, all_args, *args, **kwargs):
    """
    """
    hosts = [
        all_args[index].get('host', '') for index in xrange(len(all_args))
    ]

    my_env = {'clean_revert': True}
    results = {}
    # Obtain task
    is_callable = callable(task)
    if not (is_callable or _is_task(task)):
        # Assume string, set env.command to it
        my_env['command'] = task
        task = crawl(task, state.commands)
        if task is None:
            msg = "%r is not callable or a valid task name" % (
                my_env['command'], )
            if state.env.get('skip_unknown_tasks', False):
                warn(msg)
                return
            else:
                abort(msg)
    # Set env.command if we were given a real function or callable task obj
    else:
        dunder_name = getattr(task, '__name__', None)
        my_env['command'] = getattr(task, 'name', dunder_name)
    # Normalize to Task instance if we ended up with a regular callable
    if not _is_task(task):
        task = WrappedCallableTask(task)

    my_env['all_hosts'] = hosts
    my_env['all_args'] = all_args

    parallel = requires_parallel(task)
    if parallel:
        # Import multiprocessing if needed, erroring out usefully
        # if it can't.
        try:
            import multiprocessing
        except ImportError:
            import traceback
            tb = traceback.format_exc()
            abort(tb + """
    At least one task needs to be run in parallel, but the
    multiprocessing module cannot be imported (see above
    traceback.) Please make sure the module is installed
    or that the above ImportError is fixed.""")
    else:
        multiprocessing = None

    # Get pool size for this task
    pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size)
    # Set up job queue in case parallel is needed
    queue = multiprocessing.Queue() if parallel else None
    jobs = JobQueue(pool_size, queue)
    if state.output.debug:
        jobs._debug = True

    has_hosts = len(my_env['all_hosts']) > 0
    for index in xrange(len(my_env['all_args'])):
        host = my_env['all_args'][index].get('host', '')
        if has_hosts:
            host = my_env['all_hosts'][index]

        input_kwargs = my_env['all_args'][index]
        try:
            results[host] = _execute(task, host, my_env, args, input_kwargs,
                                     jobs, queue, multiprocessing)
        except NetworkError, e:
            results[host] = e
            # Backwards compat test re: whether to use an exception or
            # abort
            if not state.env.use_exceptions_for['network']:
                func = warn if state.env.skip_bad_hosts else abort
                error(e.message, func=func, exception=e.wrapped)
            else:
                raise

        # If requested, clear out connections here and not just at the end.
        if state.env.eagerly_disconnect:
            disconnect_all()
Example #51
0
def production():
    """ use production environment on remote host"""
    utils.abort('Production deployment not yet implemented.')
Example #52
0
def _to_bool(b):
    if not b in ['True', 'False', True, False]:
        abort('boolean expected, got: %s' % b)
    return (b in ['True', True])
Example #53
0
def room_task(room_name, task_name=None):
    """
    Configures the fabric globabl env variable for other tasks
    """
    # Load the project.yaml file so we can extract configuration for the given room_name
    project_config = project_yaml()
    config = load_project_config(project_config)

    # Abort if no project name found
    project_name = config.get("name", None)
    if not project_name:
        abort(
            "No name key found in the project.yaml. Please specify a project name"
        )
    env.project_name = project_name
    env.local_project_dir = os.path.dirname(project_config)
    # Abort if we cannot find room_name in rooms
    rooms = config.get("rooms", {})
    room = rooms.get(room_name, None)
    if not room:
        abort("""{0} is not a room name listed in project.yaml\n
              Available room names: {1}""".format(room_name, rooms.keys()))

    # Extract the top-level config sans the rooms config
    config_no_rooms = config.copy()
    config_no_rooms.pop("rooms", None)
    # Merge the config of our room into the top-level
    env.config = config_no_rooms
    env.config.update(room)

    # Calling basename on project_name should be harmless
    # In the case that the user specified target, say, build/foo,
    # then basename gives us foo
    env.target_name = os.path.basename(
        env.config.get("target", env.project_name))

    # Running locally if:
    # - User specified is-local: True
    # - Room name is localhost
    # - Hosts is empty
    if room.get("is-local", room_name == "localhost"
                or not room.get("hosts", [])):
        env.hosts = ['localhost']
        env.use_ssh_config = False
        env.project_dir = env.local_project_dir
        env.file_exists = os.path.exists
        env.rsync = lambda: None  # Don't rsync when running locally -- noop
        env.cd = fabric.context_managers.lcd
        # Generate a shell script that duplicates the task
        task_name = task_name or env.tasks[-1]
        env.run = local
        env.background_run = env.run
        env.relpath = os.path.relpath
        env.launch_format_str = "{0} {1}"
        env.debug_launch_format_str = "{0} {1} {2}"
    else:
        env.user = room.get("user", env.local_user)  # needed for remote run
        env.hosts = room.get("hosts", [])
        env.use_ssh_config = True
        # Default remote project dir is /tmp/localusername/projectname
        env.project_dir = room.get("project-dir",
                                   default_remote_project_folder())
        env.run = run
        env.background_run = lambda cmd: env.run(cmd, pty=False)
        env.file_exists = fabric.contrib.files.exists
        env.rsync = rsync_task
        env.cd = fabric.context_managers.cd
        env.relpath = lambda p: p
        env.launch_format_str = "sh -c '(({0} nohup {1} > {2} 2> {2}) &)'"
        env.debug_launch_format_str = "tmux new -d -s {0} '{1}'".format(
            env.target_name, "{0} {1} {2}")
    env.build_dir = os.path.abspath(
        env.relpath(
            os.path.join(env.project_dir, env.config.get("build-dir",
                                                         "build"))))
Example #54
0
def build(outdir=None, device_sdk=None, simulator_sdk=None, **kwargs):
    """
    Build card.io SDK.
    """
    print(colors.white("Setup", bold=True))

    to_hide = [] if env.verbose else ["stdout", "stderr", "running"]

    xcode_preprocessor_flags = {}

    if not outdir:
        message = """
                     You must provide outdir=<sdk output parent dir>
                     Example usage:
                       `fab build:outdir=~` - normal build
                       `fab build:outdir=~,SCAN_EXPIRY=0` - to disable the experimental expiry-scan feature
                  """
        abort(textwrap.dedent(message).format(**locals()))

    if _confirm_ready_for_release("assets/strings"):
        sys.exit(1)

    outdir = os.path.abspath(os.path.expanduser(outdir))
    print colors.yellow(
        "Will save release sdk to {outdir}".format(outdir=outdir))
    out_subdir = "card.io_ios_sdk_{0}".format(_version_str(show_dirty=True))

    xcode_preprocessor_flags.update(kwargs)
    formatted_xcode_preprocessor_flags = " ".join(
        "{k}={v}".format(k=k, v=v)
        for k, v in xcode_preprocessor_flags.iteritems())
    extra_xcodebuild_settings = "GCC_PREPROCESSOR_DEFINITIONS='$(value) {formatted_xcode_preprocessor_flags}'".format(
        **locals())

    device_sdk = device_sdk or "iphoneos"
    simulator_sdk = simulator_sdk or "iphonesimulator"

    arch_to_sdk = (("armv7", device_sdk), ("armv7s", device_sdk), ("arm64",
                                                                   device_sdk),
                   ("i386", simulator_sdk), ("x86_64", simulator_sdk))

    with settings(hide(*to_hide)):
        icc_root = local("git rev-parse --show-toplevel", capture=True)

    temp_dir = tempfile.mkdtemp() + os.sep
    atexit.register(shutil.rmtree, temp_dir, True)

    print(colors.white("Preparing dmz", bold=True))
    with settings(hide(*to_hide)):
        with lcd(os.path.join(icc_root, "dmz")):
            dmz_all_filename = os.path.join("dmz", "dmz_all.cpp")
            with open(dmz_all_filename) as f:
                old_dmz_all = f.read()
            local("fab concat")
            with open(dmz_all_filename) as f:
                new_dmz_all = f.read()
            if old_dmz_all != new_dmz_all:
                print(
                    colors.red("WARNING: dmz_all.h was not up to date!",
                               bold=True))

    print(colors.white("Building", bold=True))
    print(colors.white("Using temp dir {temp_dir}".format(**locals())))
    print(
        colors.white(
            "Using extra Xcode flags: {formatted_xcode_preprocessor_flags}".
            format(**locals())))

    with lcd(icc_root):

        with settings(hide(*to_hide)):
            lipo_build_dirs = {}
            build_config = "Release"
            arch_build_dirs = {}
            for arch, sdk in arch_to_sdk:
                print(
                    colors.blue(
                        "({build_config}) Building {arch}".format(**locals())))

                base_xcodebuild_command = "xcrun xcodebuild -target CardIO -arch {arch} -sdk {sdk} -configuration {build_config}".format(
                    **locals())

                clean_cmd = "{base_xcodebuild_command} clean".format(
                    **locals())
                local(clean_cmd)

                build_dir = os.path.join(temp_dir, build_config, arch)
                arch_build_dirs[arch] = build_dir
                os.makedirs(build_dir)
                parallelize = "" if env.verbose else "-parallelizeTargets"  # don't parallelize verbose builds, it's hard to read the output
                build_cmd = "{base_xcodebuild_command} {parallelize} CONFIGURATION_BUILD_DIR={build_dir}  {extra_xcodebuild_settings}".format(
                    **locals())
                local(build_cmd)

            print(colors.blue("({build_config}) Lipoing".format(**locals())))
            lipo_dir = os.path.join(temp_dir, build_config, "universal")
            lipo_build_dirs[build_config] = lipo_dir
            os.makedirs(lipo_dir)
            arch_build_dirs["universal"] = lipo_dir
            # in Xcode 4.5 GM, xcrun selects the wrong lipo to use, so circumventing xcrun for now :(
            lipo_cmd = "`xcode-select -print-path`/Platforms/iPhoneOS.platform/Developer/usr/bin/lipo " \
                       "           {armv7}/{libname}" \
                       "           -arch armv7s {armv7s}/{libname}" \
                       "           -arch arm64 {arm64}/{libname}" \
                       "           -arch i386 {i386}/{libname}" \
                       "           -arch x86_64 {x86_64}/{libname}" \
                       "           -create" \
                       "           -output {universal}/{libname}".format(libname=env.libname, **arch_build_dirs)
            local(lipo_cmd)

            print(
                colors.blue("({build_config}) Stripping debug symbols".format(
                    **locals())))
            strip_cmd = "xcrun strip -S {universal}/{libname}".format(
                libname=env.libname, **arch_build_dirs)
            local(strip_cmd)

            out_subdir_suffix = "_".join("{k}-{v}".format(k=k, v=v)
                                         for k, v in kwargs.iteritems())
            if out_subdir_suffix:
                out_subdir_suffix = "_" + out_subdir_suffix
            out_subdir += out_subdir_suffix
            sdk_dir = os.path.join(outdir, out_subdir)

            print(
                colors.white("Assembling release SDK in {sdk_dir}".format(
                    sdk_dir=sdk_dir),
                             bold=True))
            if os.path.isdir(sdk_dir):
                shutil.rmtree(sdk_dir)
            cardio_dir = os.path.join(sdk_dir, "CardIO")
            os.makedirs(cardio_dir)

            header_files = glob.glob(os.path.join("CardIO_Public_API", "*.h"))
            _copy(header_files, cardio_dir)

            libfile = os.path.join(lipo_build_dirs["Release"], env.libname)
            shutil.copy2(libfile, cardio_dir)

            release_dir = os.path.join(icc_root, "Release")
            shutil.copy2(os.path.join(release_dir, "release_notes.txt"),
                         sdk_dir)
            shutil.copy2(os.path.join(release_dir, "CardIO.podspec"), sdk_dir)
            shutil.copy2(os.path.join(release_dir, "acknowledgments.md"),
                         sdk_dir)
            shutil.copy2(os.path.join(release_dir, "LICENSE.md"), sdk_dir)
            shutil.copy2(os.path.join(release_dir, "README.md"), sdk_dir)
            shutil.copytree(os.path.join(release_dir, "SampleApp"),
                            os.path.join(sdk_dir, "SampleApp"),
                            ignore=shutil.ignore_patterns(".DS_Store"))
            shutil.copytree(os.path.join(release_dir, "SampleApp-Swift"),
                            os.path.join(sdk_dir, "SampleApp-Swift"),
                            ignore=shutil.ignore_patterns(".DS_Store"))
Example #55
0
def require_isoinstall():
    """
    check if run in the ISO installation
    """
    if system.get_hostname() != "archiso":
        abort("You seem not execute this script on iso install")
Example #56
0
def awesome_deploy(confirm="yes", resume='no', offline='no'):
    """Preindex and deploy if it completes quickly enough, otherwise abort
    fab <env> deploy:confirm=no  # do not confirm
    fab <env> deploy:resume=yes  # resume from previous deploy
    fab <env> deploy:offline=yes  # offline deploy
    """
    _require_target()
    if strtobool(confirm) and (
            not _confirm_translated() or not console.confirm(
                'Are you sure you want to preindex and deploy to '
                '{env.environment}?'.format(env=env),
                default=False)):
        utils.abort('Deployment aborted.')

    if resume == 'yes':
        try:
            cached_payload = retrieve_cached_deploy_env()
            checkpoint_index = retrieve_cached_deploy_checkpoint()
        except Exception:
            print red('Unable to resume deploy, please start anew')
            raise
        env.update(cached_payload)
        env.resume = True
        env.checkpoint_index = checkpoint_index or 0
        print magenta('You are about to resume the deploy in {}'.format(
            env.code_root))

    if datetime.datetime.now().isoweekday() == 5:
        warning_message = 'Friday'
    else:
        warning_message = ''

    env.offline = offline == 'yes'

    if env.offline:
        print magenta('You are about to run an offline deploy.'
                      'Ensure that you have run `fab prepare_offline_deploy`.')
        offline_ops.check_ready()
        if not console.confirm(
                'Are you sure you want to do an offline deploy?'.format(
                    default=False)):
            utils.abort('Task aborted')

        # Force ansible user and prompt for password
        env.user = '******'
        env.password = getpass('Enter the password for then ansbile user: '******'')
        print('┓┏┓┏┓┃')
        print('┛┗┛┗┛┃\○/')
        print('┓┏┓┏┓┃  /      ' + warning_message)
        print('┛┗┛┗┛┃ノ)')
        print('┓┏┓┏┓┃         deploy,')
        print('┛┗┛┗┛┃')
        print('┓┏┓┏┓┃         good')
        print('┛┗┛┗┛┃')
        print('┓┏┓┏┓┃         luck!')
        print('┃┃┃┃┃┃')
        print('┻┻┻┻┻┻')

    _deploy_without_asking()
Example #57
0
def _ensure_venv():
    if "VIRTUAL_ENV" not in os.environ:
        abort("No active virtualenv found. Please create / activate one before continuing.")
Example #58
0
def build_task():
    """
    obi build
    """
    with env.cd(env.project_dir):
        user_specified_build = env.config.get("build-cmd", None)
        if env.config.has_key("build-cmd"):
            if user_specified_build:
                env.run(user_specified_build)
        else:
            if 'meson-args' in env.config:
                # Arguments for the meson step
                meson_args = env.config.get("meson-args", [])
                meson_args = ' '.join(map(shlexquote, meson_args))
                sentinel_hash = hashlib.sha256(meson_args).hexdigest()
            elif 'cmake-args' in env.config:
                # Arguments for the cmake step
                cmake_args = env.config.get("cmake-args", [])
                # workaround for naughty old templates
                buggy_arg = '-G "Unix Makefiles"'
                if buggy_arg in cmake_args:
                    cmake_args.remove(buggy_arg)
                    nag = 'Buggy cmake-arg {} detected in config and ignored.\n' \
                          'To avoid seeing this message, remove this entry from ' \
                          'cmake-args in project.yaml.'.format(buggy_arg)
                    print('!!!!!!!!!!!!!!!!!')
                    print('!!! BEGIN NAG !!!')
                    print('!!!!!!!!!!!!!!!!!')
                    print(nag)
                    print('(Sleeping to give you time to read this)')
                    time.sleep(4.20)
                    print('!!!!!!!!!!!!!!!!!')
                    print('!!!  END NAG  !!!')
                    print('!!!!!!!!!!!!!!!!!')
                cmake_args = ' '.join(map(shlexquote, cmake_args))
                sentinel_hash = hashlib.sha256(cmake_args).hexdigest()
            else:
                abort(
                    "Neither meson-args nor cmake-args were set in project.yaml"
                )
            # Arguments for the build step
            build_args = env.config.get("build-args", [])
            if len(build_args) == 1 and re.match(r"^-(j|l)\d+ -(j|l)\d+$",
                                                 build_args[0]):
                build_args = build_args[0].split(" ")
            build_args = " ".join(map(shlexquote, build_args))
            env.run("mkdir -p {0}".format(shlexquote(env.build_dir)))
            # If running cmake or meson succeeds, we make a file in the build directory
            # to signal to future obi processes that they don't need to re-run
            # cmake or meson (unless *-args, and therefore sentinel_hash, changes).
            # See issue #38 and issue #120
            sentinel_path = env.build_dir + "/hello-obi.txt"
            # this is a work-around for an apple bug to filter out the resulting
            # ugly linker warnings:
            # See issue 150 or:
            # https://forums.developer.apple.com/thread/97850
            warning_filter = "ld: warning: text-based stub file"
            # translation from shell to pseudocode:
            #   * if the contents of SENTINEL_PATH match SENTINEL_HASH, do nothing
            #   * else if meson-args, run meson with meson-args and write SENTINEL_HASH to SENTINEL_PATH
            #   * else, run cmake with cmake-args and write SENTINEL_HASH to SENTINEL_PATH
            # Note: meson needs to be given some hints about how to find
            # a) itself, b) g-speak, and c) boost
            # Hence prepending obi_extra_path to PATH, and invoking meson with obenv.
            if 'meson-args' in env.config:
                env.run(
                    "test $(cat {sentinel_path} 2>/dev/null || echo definitelynotashahash) = {sentinel_hash} "\
                    "  || (PATH={obi_extra_path}:$PATH; obenv meson {build_dir} {meson_args} && " \
                    "      echo {sentinel_hash} > {sentinel_path})".format(
                        project_dir=shlexquote(env.project_dir),
                        build_dir=shlexquote(env.build_dir),
                        obi_extra_path = env.obi_extra_path,
                        meson_args=meson_args,
                        sentinel_path=shlexquote(sentinel_path),
                        sentinel_hash=sentinel_hash))
                env.run(
                    "set -o pipefail; ninja -C {0} {1} 2>&1 | grep -v '{2}'".
                    format(shlexquote(env.build_dir), build_args,
                           warning_filter),
                    shell="/bin/bash")
            elif 'cmake-args' in env.config:
                env.run(
                    "test $(cat {sentinel_path} 2>/dev/null || echo definitelynotashahash) = {sentinel_hash} "\
                    "  || (cmake -H{project_dir} -B{build_dir} {cmake_args} && " \
                    "      echo {sentinel_hash} > {sentinel_path})".format(
                        project_dir=shlexquote(env.project_dir),
                        build_dir=shlexquote(env.build_dir),
                        cmake_args=cmake_args,
                        sentinel_path=shlexquote(sentinel_path),
                        sentinel_hash=sentinel_hash))
                env.run(
                    "set -o pipefail; cmake --build {0} -- {1} 2>&1 | grep -v '{2}'"
                    .format(shlexquote(env.build_dir), build_args,
                            warning_filter),
                    shell="/bin/bash")
Example #59
0
    def __init__(self, name, db_password, db_local='fr_FR.UTF8',
                 is_free=False, chaos_database=None, rt_topics=[],
                 zmq_socket_port=None, db_name=None, db_user=None, source_dir=None,
                 enable_realtime=False, realtime_proxies=[], street_network=None, ridesharing=None,
                 cache_raptor=None, zmq_server=None,
                 kraken_threads=None, autocomplete=None, kraken_prometheus_port=None):
        self.name = name
        self.db_password = db_password
        self.is_free = is_free
        self.zmq_port = zmq_socket_port
        if kraken_prometheus_port == 'auto' and self.zmq_port:
            self.kraken_prometheus_port = self.zmq_port + env.kraken_prometheus_shift
        else:
            self.kraken_prometheus_port = kraken_prometheus_port

        if env.use_zmq_socket_file:
            self.kraken_zmq_socket = 'ipc://{kraken_dir}/{instance}/kraken.sock'.format(
                kraken_dir=env.kraken_basedir, instance=self.name)
            self.jormungandr_zmq_socket_for_instance = self.kraken_zmq_socket
            self.zmq_server, self.kraken_engines = None, list(env.roledefs['eng'])
        elif zmq_socket_port:
            env_zmq_server = getattr(env, 'zmq_server', None)
            if zmq_server:
                if isinstance(zmq_server, basestring):
                    if zmq_server == 'localhost':
                        self.zmq_server = zmq_server
                        self.kraken_engines = list(env.roledefs['ws'])
                    else:
                        self.kraken_engines = [env.make_ssh_url(zmq_server)]
                        self.zmq_server = env_zmq_server or zmq_server
                elif env_zmq_server:  # zmq_server is a list
                    self.zmq_server, self.kraken_engines = env_zmq_server, env.make_ssh_url(zmq_server)
                else:
                    abort('Platform configuration file must include a env.zmq_server specification '
                          '(see fabfile.env.platforms for some instructions)')
            elif env_zmq_server:
                self.zmq_server, self.kraken_engines = env_zmq_server, list(env.roledefs['eng'])
            else:
                abort('Platform configuration file must include a env.zmq_server specification '
                      '(see fabfile.env.platforms for some instructions)')
            self.kraken_zmq_socket = 'tcp://*:{port}'.format(port=zmq_socket_port)
            self.jormungandr_zmq_socket_for_instance = 'tcp://{server}:{port}'.format(
                server=self.zmq_server, port=zmq_socket_port)
        else:
            abort('Instance configuration must include a zmq_socket_port, aborting '
                  '(see fabfile.env.platforms for some instructions)')

        if not kraken_threads:
            self.kraken_nb_threads = env.KRAKEN_NB_THREADS
        else:
            self.kraken_nb_threads = kraken_threads
        self.db_local = db_local
        self.chaos_database = chaos_database
        self.rt_topics = rt_topics
        # postgres doesn't like dash, replace them by underscore
        self.db_name = db_name if db_name else 'ed_' + self.name.replace('-', '_')
        self.db_user = db_user if db_user else 'ed_' + self.name.replace('-', '_')
        self._source_dir = source_dir if source_dir != 'auto' else '/srv/ed/source/{}/{}/FUSIO/EXPORT/'.\
            format(self.name.upper(), (getattr(env, 'fusio_name', None) or env.name).upper())
        self.enable_realtime = enable_realtime
        self.realtime_proxies = realtime_proxies
        self.cache_raptor = cache_raptor
        self.street_network = street_network
        self.autocomplete = autocomplete
        self.ridesharing = ridesharing
Example #60
0
def require(*keys, **kwargs):
    """
    Check for given keys in the shared environment dict and abort if not found.

    Positional arguments should be strings signifying what env vars should be
    checked for. If any of the given arguments do not exist, Fabric will abort
    execution and print the names of the missing keys.

    The optional keyword argument ``used_for`` may be a string, which will be
    printed in the error output to inform users why this requirement is in
    place. ``used_for`` is printed as part of a string similar to::

        "Th(is|ese) variable(s) (are|is) used for %s"

    so format it appropriately.

    The optional keyword argument ``provided_by`` may be a list of functions or
    function names or a single function or function name which the user should
    be able to execute in order to set the key or keys; it will be included in
    the error output if requirements are not met.

    Note: it is assumed that the keyword arguments apply to all given keys as a
    group. If you feel the need to specify more than one ``used_for``, for
    example, you should break your logic into multiple calls to ``require()``.

    .. versionchanged:: 1.1
        Allow iterable ``provided_by`` values instead of just single values.
    """
    # If all keys exist and are non-empty, we're good, so keep going.
    missing_keys = list(filter(lambda x: x not in env or (x in env and
        isinstance(env[x], (dict, list, tuple, set)) and not env[x]), keys))
    if not missing_keys:
        return
    # Pluralization
    if len(missing_keys) > 1:
        variable = "variables were"
        used = "These variables are"
    else:
        variable = "variable was"
        used = "This variable is"
    # Regardless of kwargs, print what was missing. (Be graceful if used outside
    # of a command.)
    if 'command' in env:
        prefix = "The command '%s' failed because the " % env.command
    else:
        prefix = "The "
    msg = "%sfollowing required environment %s not defined:\n%s" % (
        prefix, variable, indent(missing_keys)
    )
    # Print used_for if given
    if 'used_for' in kwargs:
        msg += "\n\n%s used for %s" % (used, kwargs['used_for'])
    # And print provided_by if given
    if 'provided_by' in kwargs:
        funcs = kwargs['provided_by']
        # non-iterable is given, treat it as a list of this single item
        if not hasattr(funcs, '__iter__'):
            funcs = [funcs]
        if len(funcs) > 1:
            command = "one of the following commands"
        else:
            command = "the following command"
        to_s = lambda obj: getattr(obj, '__name__', str(obj))
        provided_by = [to_s(obj) for obj in funcs]
        msg += "\n\nTry running %s prior to this one, to fix the problem:\n%s"\
            % (command, indent(provided_by))
    abort(msg)