Example #1
0
def master_client_with_config(config):
    profile = config.profile['master']
    identity = None
    hostname = None
    username = None

    # raises UnknownConfigProvider
    provider = config.provider_for_profile(profile)

    # raises KeyError aka MissingProfileKey
    with profile_key_error():
        username = profile['ssh_username']
        log.debug('SSH Client Username: %s', username)

    # raises KeyError aka MissingConifgKey
    with config_key_error():
        hostname = config.data['master']
        log.debug('SSH Client Hostname: %s', hostname)

    identity = profile.get('ssh_password', provider.ssh_identity())
    log.debug('SSH Client Identity: %s', identity)

    if not identity:
        raise MissingIdentity
#         log.error('No identity specificed.\n\
# Please set ssh_password on the master profile or provide a private_key \
# in your provider for this master')
#         return

    return connect(
        hostname=hostname,
        username=username,
        identity=identity)
Example #2
0
    def load_config(self, local_config=None, project_config=None, global_config=None):
        self.log.debug('Loading configuration')
        local_data = {}
        env_config = None

        if not local_config:
            local_config = os.path.join(self.local_path(), 'config')
            env_config = os.path.join(self.current_env(), 'config')

        self.log.debug('Loading local config: %s', local_config)
        local_data = self.load_file(local_config, env_config)

        if not local_data:
            raise ConfigNotFound

        with config_key_error():
            project = local_data['project']
            self.log.debug('Project name is: %s', project)

        if not global_config:
            global_config = os.path.join(self.user_path(), 'config')

        if not project_config:
            project_config = os.path.join(self.project_path(project), 'config')

        self.log.debug(
            'Loading global and project configs: %s, %s',
            global_config, project_config)

        data = self.load_file(global_config, project_config)
        data.update(local_data)

        return data
Example #3
0
    def __init__(self, data, session=None, profile=None, providers=None):

        with config_key_error():
            data['project']

        self.data = data
        self.providers = {} if providers is None else providers
        self.session = {} if session is None else session
        self.profile = {} if profile is None else profile
Example #4
0
    def _connect(self, provider):

        cfg_region = provider.get('region', 'us-east-1')

        with config_key_error():
            region = ec2.get_region(cfg_region,
                aws_access_key_id=provider['id'],
                aws_secret_access_key=provider['key'])

            self.conn = boto.connect_ec2(
                aws_access_key_id=provider['id'],
                aws_secret_access_key=provider['key'],
                region=region)
Example #5
0
    def load_providers(self, provider_config=None):
        self.log.debug('Loading provider data')
        providers = {}

        if provider_config:
            providers = self.load_file(provider_config)
        else:
            with config_key_error():
                project = self.data['project']

                project_path = self.project_path(project)
                project_providers = os.path.join(project_path, 'providers')
                providers = self.load_file(project_providers)

            local_path = self.local_path()
            local_providers = os.path.join(local_path, 'providers')
            providers.update(self.load_file(local_providers))

        return providers
Example #6
0
    def verify_keys(self):

        data = self.provider

        if 'keyname' not in data \
        and 'private_key' not in data:
            self.log.debug('EC2 key settings not present')
            raise NeedsEc2Key

        with config_key_error():
            ec2_key_name = data['keyname']
            ec2_key_path = os.path.expanduser(data['private_key'])

        if not os.path.isfile(ec2_key_path):
            self.log.error('Unable to locate key at %s', ec2_key_path)
            raise KeyNotFound

        if not self._ec2_key_exists(ec2_key_name):
            self.log.error('Invalid EC2 key name %s', ec2_key_name)
            raise KeyNotFound
Example #7
0
def connect_master(config):

    profile = config.profile['master']

    # raises UnknownConfigProvider
    provider = config.provider_for_profile(profile)

    with profile_key_error():
        username = profile['ssh_username']

    with config_key_error():
        hostname = config.data['master']

    identity = provider.ssh_identity()

    log.debug('Opening SSH to %s@%s using identity %s',
        username, hostname, identity)

    call('ssh {0}@{1} -i {2} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o IdentitiesOnly=yes' \
        .format(
            username,
            hostname,
            identity), shell=True)
Example #8
0
def run(config, argv):

    args = docopt(__doc__, argv=argv)



    # this is what makes up the above
    profile = config.profile['master']
    identity = None
    hostname = None
    username = None

    try:
        provider = config.provider_for_profile(profile)
    except UnknownConfigProvider as e:
        sys.stdout.write(
            'Unknown config provider \'{0}\', unable to continue.\n'\
            .format(e.message))
        return

    with profile_key_error():
        username = profile['ssh_username']

    with config_key_error():
        hostname = config.data['master']

    identity = profile.get('ssh_password', provider.ssh_identity())

    if not identity:
        log.error('No identity specificed.\n\
Please set ssh_password on the master profile or provide a private_key \
in your provider for this master')
        return

    current_env = config.environment

    if not current_env:
        sys.stdout.write('No environment available.\n')
        sys.stdout.write('Have you run \'cloudseed init env <environment>\'?\n')
        return

    sys.stdout.write('Syncing states for \'{0}\'\n'.format(current_env))

    env_path = Filesystem.local_env_path(current_env)
    states_path = os.path.join(env_path, 'states')

    if not os.path.isdir(states_path):
        sys.stdout.write('States dir not found at \'%s\'\n', states_path)
        return

    master_config = salt_master_config(config)
    sys.stdout.write('Archiving contents at \'{0}\'\n'.format(states_path))

    tmp = tempfile.NamedTemporaryFile(delete=False)

    archive = tarfile.open(fileobj=tmp, mode='w:gz')
    archive.add(states_path, '.')
    archive.close()

    tmp.close()
    log.debug('Archive created at %s', tmp.name)

    try:
        remote_path = master_config['file_roots']['base'][0]
    except KeyError:
        remote_path = '/srv/salt'

    remote_file = os.path.join('/tmp', os.path.basename(tmp.name))

    log.debug('Initializing SSH Client')
    with ssh_client_error():
        ssh_client = ssh.master_client_with_config(config)

    log.debug('Initializing SFTP Client')
    sftp_client = sftp.connect(
        hostname=hostname,
        username=username,
        identity=identity)

    log.debug(
        'Remotely Executing: sudo sh -c "mkdir -p %s; chown -R root:root %s"',
        remote_path, remote_path)

    ssh.run(
        ssh_client,
        'sudo sh -c "mkdir -p {0}; chown -R root:root {0}"'.format(remote_path))

    sys.stdout.write('Transferring archive to \'{0}\'\n'.format(hostname))
    sftp.put(sftp_client, tmp.name, remote_file)
    sys.stdout.write('Unpacking archive to \'{0}\'\n'.format(remote_path))

    log.debug(
        'Remotely Executing: sudo sh -c "tar -C %s -xvf %s; chwon -R root:root %s"',
        remote_path, remote_file, remote_path)

    ssh.run(ssh_client,
        'sudo sh -c "tar -C {0} -xvf {1}; chown -R root:root {0}"'\
        .format(remote_path, remote_file))

    log.debug(
        'Remotely Executing: rm -rf %s',
        remote_file)

    ssh.run(ssh_client,
        'rm -f {0}'\
        .format(remote_file))

    os.unlink(tmp.name)

    # debugging command only
    ssh.run(ssh_client,
            'sudo sh -c "salt \'master\' state.highstate --async"')

    sys.stdout.write('Sync complete\n')