Esempio n. 1
0
def _update_remote_bvars(stackname, buildvars):
    LOG.info('updating %r with new vars %r', stackname, buildvars)
    # not all projects have a 'revision'
    #ensure(core_utils.hasallkeys(buildvars, ['revision']), "buildvars missing key 'revision'")

    encoded = encode_bvars(buildvars)
    fid = core_utils.ymd(fmt='%Y%m%d%H%M%S')
    cmds = [
        # make a backup
        'if [ -f /etc/build-vars.json.b64 ]; then cp /etc/build-vars.json.b64 /tmp/build-vars.json.b64.%s; fi;' % fid,
    ]
    lmap(sudo, cmds)
    put(StringIO(encoded), "/etc/build-vars.json.b64", use_sudo=True)
    LOG.info("%r updated", stackname)
Esempio n. 2
0
def download_file(stackname,
                  path,
                  destination='.',
                  node=None,
                  allow_missing="False",
                  use_bootstrap_user="******"):
    """Downloads `path` from `stackname` putting it into the `destination` folder, or the `destination` file if it exists and it is a file.

    If `allow_missing` is "True", a non-existant `path` will be skipped without errors.

    If `use_bootstrap_user` is "True", the owner_ssh user will be used for connecting instead of the standard deploy user.

    Boolean arguments are expressed as strings as this is the idiomatic way of passing them from the command line.
    """
    allow_missing, use_bootstrap_user = lmap(
        strtobool, [allow_missing, use_bootstrap_user])

    @backoff.on_exception(backoff.expo, NetworkError, max_time=60)
    def _download(path, destination):
        with stack_conn(
                stackname,
                username=BOOTSTRAP_USER if use_bootstrap_user else DEPLOY_USER,
                node=node):
            if allow_missing and not remote_file_exists(path):
                return  # skip download
            download(path, destination, use_sudo=True)

    _download(path, destination)
Esempio n. 3
0
def parse_loc_list(loc_list):
    "wrangle the list of paths the user gave us. expand if they specify a directory, etc"
    # give the convenient user-form some structure
    p_loc_list = lmap(_parse_loc, loc_list)

    # do some post processing

    def expand_dirs(triple):
        protocol, host, path = triple
        if protocol in ['dir', 'file'] and not os.path.exists(path):
            LOG.warn("could not resolve %r, skipping", path)
            return [None]
        if protocol == 'dir':
            yaml_files = utils.listfiles(path, ['.yaml'])
            return [('file', host, ppath) for ppath in yaml_files]
        return [triple]

    # we don't want dirs, we want files
    p_loc_list = utils.shallow_flatten(map(expand_dirs, p_loc_list))

    # remove any bogus values
    p_loc_list = lfilter(None, p_loc_list)

    # remove any duplicates. can happen when we expand dir => files
    p_loc_list = utils.unique(p_loc_list)

    return p_loc_list
Esempio n. 4
0
def parse_loc_list(loc_list):
    "wrangle the list of paths the user gave us. expand if they specify a directory, etc"
    # give the convenient user-form some structure
    p_loc_list = lmap(_parse_loc, loc_list)
    # do some post processing

    def expand_dirs(triple):
        protocol, host, path = triple
        if protocol in ['dir', 'file'] and not os.path.exists(path):
            LOG.warn("could not resolve %r, skipping", path)
            return [None]
        if protocol == 'dir':
            yaml_files = utils.listfiles(path, ['.yaml'])
            return [('file', host, ppath) for ppath in yaml_files]
        return [triple]
    # we don't want dirs, we want files
    p_loc_list = utils.shallow_flatten(map(expand_dirs, p_loc_list))

    # remove any bogus values
    p_loc_list = lfilter(None, p_loc_list)

    # remove any duplicates. can happen when we expand dir => files
    p_loc_list = utils.unique(p_loc_list)

    return p_loc_list
Esempio n. 5
0
def write_missing_keypairs_to_s3():
    "uploads any missing ec2 keys to S3 if they're present locally"
    remote_keys = keypair.all_in_s3()
    local_paths = keypair.all_locally()
    local_keys = lmap(os.path.basename, local_paths)

    to_upload = set(local_keys).difference(set(remote_keys))

    print('remote:', remote_keys)
    print('local:', local_keys)
    print('to upload:', to_upload)

    def write(key):
        stackname = os.path.splitext(key)[0]
        keypair.write_keypair_to_s3(stackname)

    lmap(write, to_upload)
Esempio n. 6
0
def write_missing_keypairs_to_s3():
    "uploads any missing ec2 keys to S3 if they're present locally"
    remote_keys = keypair.all_in_s3()
    local_paths = keypair.all_locally()
    local_keys = lmap(os.path.basename, local_paths)

    to_upload = set(local_keys).difference(set(remote_keys))

    print('remote:', remote_keys)
    print('local:', local_keys)
    print('to upload:', to_upload)

    def write(key):
        stackname = os.path.splitext(key)[0]
        keypair.write_keypair_to_s3(stackname)

    lmap(write, to_upload)
Esempio n. 7
0
 def call(*args, **kwargs):
     ss = core.steady_aws_stacks(utils.find_region())
     keys = lmap(first, ss)
     idx = dict(zip(keys, ss))
     helpfn = lambda pick: idx[pick][1]
     if not keys:
         print('\nno AWS stacks *in a steady state* exist, cannot continue.')
         return
     stackname = first(args) or os.environ.get('INSTANCE')
     if not stackname or stackname not in keys:
         stackname = utils._pick("stack", sorted(keys), helpfn=helpfn, default_file=deffile('.active-stack'))
     return func(stackname, *args[1:], **kwargs)
Esempio n. 8
0
 def call(*args, **kwargs):
     ss = core.steady_aws_stacks(utils.find_region())
     keys = lmap(first, ss)
     idx = dict(zip(keys, ss))
     helpfn = lambda pick: idx[pick][1]
     if not keys:
         print(
             '\nno AWS stacks *in a steady state* exist, cannot continue.')
         return
     stackname = first(args) or os.environ.get('INSTANCE')
     if not stackname or stackname not in keys:
         stackname = utils._pick("stack",
                                 sorted(keys),
                                 helpfn=helpfn,
                                 default_file=deffile('.active-stack'))
     return func(stackname, *args[1:], **kwargs)
Esempio n. 9
0
def download_file(stackname, path, destination='.', node=None, allow_missing="False", use_bootstrap_user="******"):
    """
    Downloads `path` from `stackname` putting it into the `destination` folder, or the `destination` file if it exists and it is a file.

    If `allow_missing` is "True", a non-existant `path` will be skipped without errors.

    If `use_bootstrap_user` is "True", the owner_ssh user will be used for connecting instead of the standard deploy user.

    Boolean arguments are expressed as strings as this is the idiomatic way of passing them from the command line.
    """
    allow_missing, use_bootstrap_user = lmap(strtobool, [allow_missing, use_bootstrap_user])

    @backoff.on_exception(backoff.expo, fabric.exceptions.NetworkError, max_time=60)
    def _download(path, destination):
        with stack_conn(stackname, username=BOOTSTRAP_USER if use_bootstrap_user else DEPLOY_USER, node=node):
            if allow_missing and not files.exists(path):
                return # skip download
            get(path, destination, use_sudo=True)

    _download(path, destination)
Esempio n. 10
0
STACK_DIR = join(CFN, "stacks")  # ll: ./.cfn/stacks
CONTEXT_DIR = join(CFN, "contexts")  # ll: ./.cfn/stacks
SCRIPTS_DIR = "scripts"
PRIVATE_DIR = "private"
KEYPAIR_DIR = join(CFN, "keypairs")  # ll: ./.cfn/keypairs
# the .cfn dir was for cloudformation stuff, but we keep keypairs in there too, so this can't hurt
# perhaps a namechange from .cfn to .state or something later
TERRAFORM_DIR = join(CFN, "terraform")

STACK_PATH = join(PROJECT_PATH, STACK_DIR)  # "/.../.cfn/stacks/"
CONTEXT_PATH = join(PROJECT_PATH, CONTEXT_DIR)  # "/.../.cfn/contexts/"
KEYPAIR_PATH = join(PROJECT_PATH, KEYPAIR_DIR)  # "/.../.cfn/keypairs/"
SCRIPTS_PATH = join(PROJECT_PATH, SCRIPTS_DIR)  # "/.../scripts/"

# create all necessary paths and ensure they are writable
lmap(utils.mkdir_p,
     [TEMP_PATH, STACK_PATH, CONTEXT_PATH, SCRIPTS_PATH, KEYPAIR_PATH])

# logging

LOG_DIR = "logs"
LOG_PATH = join(PROJECT_PATH, LOG_DIR)  # /.../logs/
LOG_FILE = join(LOG_PATH, "app.log")  # /.../logs/app.log
utils.mkdir_p(LOG_PATH)

FORMAT = logging.Formatter(
    "%(asctime)s - %(levelname)s - %(processName)s - %(name)s - %(message)s")
CONSOLE_FORMAT = logging.Formatter("%(levelname)s - %(name)s - %(message)s")

# http://docs.python.org/2/howto/logging-cookbook.html
ROOTLOG = logging.getLogger()  # important! this is the *root LOG*
# all other LOGs are derived from this one
Esempio n. 11
0
STACK_DIR = join(CFN, "stacks") # ll: ./.cfn/stacks
CONTEXT_DIR = join(CFN, "contexts") # ll: ./.cfn/stacks
SCRIPTS_DIR = "scripts"
PRIVATE_DIR = "private"
KEYPAIR_DIR = join(CFN, "keypairs") # ll: ./.cfn/keypairs
# the .cfn dir was for cloudformation stuff, but we keep keypairs in there too, so this can't hurt
# perhaps a namechange from .cfn to .state or something later
TERRAFORM_DIR = join(CFN, "terraform")

STACK_PATH = join(PROJECT_PATH, STACK_DIR) # "/.../.cfn/stacks/"
CONTEXT_PATH = join(PROJECT_PATH, CONTEXT_DIR) # "/.../.cfn/contexts/"
KEYPAIR_PATH = join(PROJECT_PATH, KEYPAIR_DIR) # "/.../.cfn/keypairs/"
SCRIPTS_PATH = join(PROJECT_PATH, SCRIPTS_DIR) # "/.../scripts/"

# create all necessary paths and ensure they are writable
lmap(utils.mkdir_p, [TEMP_PATH, STACK_PATH, CONTEXT_PATH, SCRIPTS_PATH, KEYPAIR_PATH])

# logging

LOG_DIR = "logs"
LOG_PATH = join(PROJECT_PATH, LOG_DIR) # /.../logs/
LOG_FILE = join(LOG_PATH, "app.log") # /.../logs/app.log
utils.mkdir_p(LOG_PATH)

FORMAT = logging.Formatter("%(asctime)s - %(levelname)s - %(processName)s - %(name)s - %(message)s")
CONSOLE_FORMAT = logging.Formatter("%(levelname)s - %(name)s - %(message)s")

# http://docs.python.org/2/howto/logging-cookbook.html
ROOTLOG = logging.getLogger() # important! this is the *root LOG*
# all other LOGs are derived from this one
ROOTLOG.setLevel(logging.DEBUG) # *default* output level for all LOGs