Exemplo n.º 1
0
 def _inject_utils(self):
     self._make_asset_bucket()
     for util in [
         ['CidrBlocks', 'get_cidrs', 'GetCidrs', 'CidrBlocks', 'AutoCidrs'],
         ['NumberOfAvailabilityZones', 'get_azs', 'GetAzs', 'AvailabilityZones', 'AutoAzs'],
         ['MasterUserPassword', 'generate_password', 'GeneratePassword', 'MasterUserPassword', 'AutoPassword'],
         ['DBName', 'generate_dbname', 'GenerateDBName', 'DBName', 'AutoDBName'],
         ['EMRClusterName', 'generate_emrname', 'GenerateEMRClusterName', 'EMRClusterName', 'AutoEMRClusterName'],
         ['EMRCidr', 'get_emrcidrs', 'GetEMRCidr', 'EMRCidr', 'AutoEMRCidr']
     ]:
         if util[0] in self.template['Parameters']:
             if self.template['Parameters'][util[3]]['Default'] == 'Auto':
                 with open(os.path.dirname(os.path.abspath(__file__)) + "/functions/%s/template.snippet" % util[1], 'r') as stream:
                     snippet = CFNYAMLHandler.ordered_safe_load(stream)
                 if not os.path.isfile(os.path.dirname(os.path.abspath(__file__)) + "/functions/%s/requirements.txt" % util[1]):
                     with open(os.path.dirname(os.path.abspath(__file__)) + "/functions/%s/lambda_function.py" % util[1], 'r') as stream:
                         function_code = stream.read()
                     snippet['Resources']['AWSSBInjected%sLambda' % util[2]]['Properties']['Code']['ZipFile'] = function_code
                 else:
                     self._inject_copy_zips()
                     bucket, key = self._publish_lambda_zip(os.path.dirname(os.path.abspath(__file__)) + "/functions/%s/" % util[1], util[1])
                     snippet['Resources']['AWSSBInjected%sLambda' % util[2]]['Properties']['Code']['S3Bucket'] = '!Ref AWSSBInjectedLambdaZipsBucket'
                     snippet['Resources']['AWSSBInjected%sLambda' % util[2]]['Properties']['Code']['S3Key'] = key
                     snippet['Resources']['AWSSBInjected%sLambda' % util[2]]['Properties']['Handler'] = 'lambda_function.handler'
                     snippet['Resources']['AWSSBInjected%sLambda' % util[2]]['Properties']['Code'].pop('ZipFile')
                     self.template['Resources']['AWSSBInjectedCopyZips']['Properties']['Objects'].append(util[1] + '/lambda_function.zip')
                 temp_template = CFNYAMLHandler.ordered_safe_dump(self.template, default_flow_style=False).replace(
                     "!Ref %s" % util[3],
                     "!If [ %s, !GetAtt AWSSBInjected%s.%s, !Ref %s ]" % (util[4], util[2], util[3], util[3])
                 )
                 self.template = CFNYAMLHandler.ordered_safe_load(temp_template)
                 self.template['Resources'] = OrderedDict({**self.template['Resources'], **snippet['Resources']})
                 self.template['Conditions'] = OrderedDict({**self.template['Conditions'], **snippet['Conditions']})
Exemplo n.º 2
0
 def _inject_iam(self, policies=None):
     with open(os.path.dirname(os.path.abspath(__file__)) + "/functions/create_keypair/template.snippet", 'r') as stream:
         snippet = CFNYAMLHandler.ordered_safe_load(stream)
     with open(os.path.dirname(os.path.abspath(__file__)) + "/functions/create_keypair/lambda_function.py", 'r') as stream:
         function_code = stream.read()
     snippet['Resources']['AWSSBInjectedIAMUserLambda']['Properties']['Code']['ZipFile'] = function_code
     policy_template = snippet['Resources'].pop('AWSSBInjectedIAMUserPolicy')
     policy_arns = []
     if policies:
         pnum = 0
         for policy in policies:
             if type(policy) in [dict, OrderedDict]:
                 pnum += 1
                 pname = 'AWSSBInjectedIAMUserPolicy%s' % str(pnum)
                 p = copy.deepcopy(policy_template)
                 p['Properties']['PolicyName'] = pname
                 p['Properties']['PolicyDocument'] = policy['PolicyDocument']
                 snippet['Resources'][pname] = p
             elif policy.startswith('arn:aws:iam'):
                 policy_arns.append(policy)
         if policy_arns:
             snippet['Resources']['AWSSBInjectedIAMUser']['Properties']['ManagedPolicyArns'] = policy_arns
         else:
             snippet['Resources']['AWSSBInjectedIAMUser'].pop('Properties')
     if 'Resources' not in self.template:
         self.template['Resources'] = {}
     if 'Outputs' not in self.template:
         self.template['Outputs'] = {}
     self.template['Resources'] = OrderedDict({**self.template['Resources'], **snippet['Resources']})
     self.template['Outputs'] = OrderedDict({**self.template['Outputs'], **snippet['Outputs']})
 def _inject_copy_zips(self):
     self._make_asset_bucket()
     if 'AWSSBInjectedCopyZips' not in self.template['Resources'].keys():
         with open(
                 os.path.dirname(os.path.abspath(__file__)) +
                 "/functions/copy_zips/template.snippet", 'r') as stream:
             snippet = CFNYAMLHandler.ordered_safe_load(stream)
         with open(
                 os.path.dirname(os.path.abspath(__file__)) +
                 "/functions/copy_zips/lambda_function.py", 'r') as stream:
             function_code = stream.read()
         snippet['Resources']['AWSSBInjectedCopyZipsLambda']['Properties'][
             'Code']['ZipFile'] = function_code
         p = snippet['Resources']['AWSSBInjectedCopyZipsRole'][
             'Properties']['Policies']
         p[0]['PolicyDocument']['Statement'][0]['Resource'][0] = p[0][
             'PolicyDocument']['Statement'][0]['Resource'][0].replace(
                 '${SourceBucketName}',
                 self.bucket_name).replace('${KeyPrefix}', self.key_prefix)
         p[0]['PolicyDocument']['Statement'][1]['Resource'][0] = p[0][
             'PolicyDocument']['Statement'][1]['Resource'][0].replace(
                 '${KeyPrefix}', self.key_prefix)
         snippet['Resources']['AWSSBInjectedCopyZips']['Properties'][
             'SourceBucket'] = self.bucket_name
         snippet['Resources']['AWSSBInjectedCopyZips']['Properties'][
             'Prefix'] = self.key_prefix + 'functions/'
         self.template['Resources'] = OrderedDict({
             **self.template['Resources'],
             **snippet['Resources']
         })
Exemplo n.º 4
0
 def _upload_template(self):
     for k in list(self.template.keys()):
         if not self.template[k]:
             self.template.pop(k)
     tpl = CFNYAMLHandler.ordered_safe_dump(self.template, default_flow_style=False)
     key = os.path.join(self.key_prefix, 'templates/%s/template.yaml' % self.service_name)
     self.s3_client.put_object(Body=tpl, Bucket=self.bucket_name, Key=key, ACL=self.s3acl)
     return self.bucket_name, key
Exemplo n.º 5
0
 def _fetch_contents(filename):
     """Loads the template to inspect"""
     with open(filename) as f:
         tfdata = f.read()
     stripped_tfdata = tfdata.strip()
     if stripped_tfdata[0] in ['{', '['
                               ] and stripped_tfdata[-1] in ['}', ']']:
         filetype = 'json'
         loaded_template_data = json.loads(
             tfdata, object_pairs_hook=collections.OrderedDict)
     else:
         filetype = 'yaml'
         loaded_template_data = cfy.ordered_safe_load(
             open(filename, 'rU'),
             object_pairs_hook=collections.OrderedDict)
     return filetype, loaded_template_data, tfdata
    def __init__(self, template_path=None, service_spec_path=None):
        """
        Initialise the class, optionally providing paths for the template and a seperate service spec, if
        service_spec_path is not specified then we'll look for it in the template Metadata.

        :param template_path:
        :param service_spec_path:
        """
        self.template = {}
        self.service_spec = {}
        if template_path:
            self.template_path = os.path.dirname(template_path)
            with open(template_path, 'r') as stream:
                self.template = CFNYAMLHandler.ordered_safe_load(stream)
            if not service_spec_path:
                self.service_spec = self.template['Metadata']['AWS::ServiceBroker::Specification']
        if service_spec_path:
            with open(service_spec_path, 'r') as stream:
                self.service_spec = yaml.load(stream)
        if not self.service_spec:
            raise Exception("cannot continue without either a ['Metadata']['AWS::ServiceBroker::Specification'] section in the template, or a path to a seperate spec using service_spec_path")
Exemplo n.º 7
0
    def create_apb_skeleton(self,
                            apb_spec,
                            prescribed_parameters,
                            bindings,
                            template,
                            service_name,
                            build_path=None):
        if build_path:
            os.makedirs(build_path, exist_ok=True)
            tmpname = os.path.join(build_path, "%s" % service_name)
            os.makedirs(os.path.join(build_path, "%s" % service_name),
                        exist_ok=True)
        else:
            tmpname = '/tmp/AWSSB-' + str(
                b64encode(bytes(str(random()), 'utf8'))).replace(
                    "b'", '').replace("'", '').replace('=', '')
            os.makedirs(tmpname)
        print("build path: %s" % tmpname)
        shutil.copytree(
            os.path.dirname(os.path.abspath(__file__)) + '/data/apb_template/',
            tmpname + '/apb')
        for dname, dirs, files in os.walk(tmpname):
            for fname in files:
                fpath = os.path.join(dname, fname)
                if not fname.endswith('.zip'):
                    with open(fpath) as f:
                        s = f.read()
                    s = s.replace("${SERVICE_NAME}", service_name).replace(
                        "${SERVICE_NAME_UPPER}",
                        service_name.upper()).replace('${CREATE_IAM_USER}',
                                                      str(bindings['IAMUser']))
                    with open(fpath, "w") as f:
                        f.write(s)
        for plan in prescribed_parameters.keys():
            prescribed_parameters[plan][
                'params_string'] = "{{ namespace }}::{{ _apb_plan_id }}::{{ _apb_service_class_id }}::{{ _apb_service_instance_id }}"
            prescribed_parameters[plan][
                'params_hash'] = "{{ params_string | checksum }}"
            with open(
                    tmpname +
                    '/apb/roles/aws-provision-apb/vars/%s.yml' % plan,
                    "w") as f:
                f.write(
                    CFNYAMLHandler.ordered_safe_dump(
                        prescribed_parameters[plan], default_flow_style=False))
            shutil.copy(
                tmpname + '/apb/roles/aws-provision-apb/vars/%s.yml' % plan,
                tmpname + '/apb/roles/aws-deprovision-apb/vars/%s.yml' % plan)
        with open(tmpname + '/apb/apb.yml', "w") as f:
            f.write(
                CFNYAMLHandler.ordered_safe_dump(apb_spec,
                                                 default_flow_style=False))
        with open(tmpname +
                  '/apb/roles/aws-provision-apb/tasks/main.yml') as f:
            main_provision_task = yaml.load(f)
        create_user = False
        try:
            create_user = template['Metadata'][
                'AWS::ServiceBroker::Specification']['Bindings']['IAM'][
                    'AddKeypair']
        except KeyError as e:
            pass
        for t in main_provision_task:
            if 'name' in t.keys():
                if t['name'] == 'Encode bind credentials':
                    if not create_user:
                        aws_key_id = '%s_AWS_ACCESS_KEY_ID' % service_name
                        aws_key = '%s_AWS_SECRET_ACCESS_KEY' % service_name
                        t['asb_encode_binding']['fields'].pop(
                            aws_key_id.upper())
                        t['asb_encode_binding']['fields'].pop(aws_key.upper())

                    for b in bindings['CFNOutputs']:
                        t['asb_encode_binding']['fields'][camel_convert(
                            b).upper()] = "{{ cfn.stack_outputs.%s }}" % b
            elif 'block' in t.keys():
                for it in t['block']:
                    if it['name'] == 'Create Resources':
                        if 'Parameters' in template.keys():
                            for p in template['Parameters'].keys():
                                default = ""
                                if 'Default' in template['Parameters'][p].keys(
                                ):
                                    default = template['Parameters'][p][
                                        'Default']
                                it['cloudformation']['template_parameters'][
                                    p] = '{{ %s | default("%s") | string }}' % (
                                        p, default)
        with open(tmpname + '/apb/roles/aws-provision-apb/tasks/main.yml',
                  'w') as f:
            f.write(
                CFNYAMLHandler.ordered_safe_dump(main_provision_task,
                                                 default_flow_style=False))
        with open(tmpname + '/template.yaml', 'w') as f:
            f.write(
                CFNYAMLHandler.ordered_safe_dump(template,
                                                 default_flow_style=False))
        return tmpname
Exemplo n.º 8
0
 def _copy_file(self, in_file, out_file):
     CFNYAMLHandler.validate_output_dir(os.path.split(out_file)[0])
     # copy only if it's a new location for the output
     if in_file is not out_file:
         shutil.copyfile(in_file, out_file)
Exemplo n.º 9
0
    def rewrite_only(self):
        """
        This function searches through all the files and rewrites any references of the production S3 bucket name
         to the target S3 bucket name. This is done by both things like line-by-line basic rewrites or walking the
         tree of a JSON or YAML document to find the references.
        """
        # Create file list and recurse if args._input_path is directory
        file_list = self._get_file_list(self._input_path)
        self.logger.info("Files to be worked on:")
        self.logger.info(file_list)

        # Validate output
        if self._output_directory is not None:
            CFNYAMLHandler.validate_output_dir(self._output_directory)

        self.logger.info("Production S3 bucket name that we are looking for [{}]".format(self._prod_bucket_name))
        self.logger.info("Replacement S3 bucket name that we are rewriting with [{}]".format(self._target_bucket_name))
        self.logger.info("Production S3 key prefix that we are looking for [{}]".format(self._prod_key_prefix))
        self.logger.info("Replacement S3 key prefix that we are rewriting with [{}]".format(self._target_key_prefix))

        # Rewrite files
        for current_file in file_list:
            # Determine output file
            if self._output_directory:
                if len(file_list) == 1:
                    output_file = os.path.join(self._output_directory, os.path.basename(current_file))
                else:
                    output_file = os.path.join(self._output_directory, current_file.replace(self._input_path, '', 1).lstrip('\/'))
            else:
                output_file = current_file

            # Load current file
            if self._rewrite_mode != self.BASIC_REWRITE_MODE \
                    and current_file.endswith(tuple(self._TEMPLATE_EXT)) \
                    and os.path.dirname(current_file).endswith('/templates'):
                self.logger.info("Opening file [{}]".format(current_file))
                with open(current_file, 'r', newline=None) as template:
                    template_raw_data = template.read()
                    template.close()
                template_raw_data = template_raw_data.strip()

                if template_raw_data[0] in ['{', '['] and template_raw_data[-1] in ['}', ']']:
                    self.logger.info('Detected JSON. Loading file.')
                    FILE_FORMAT = 'JSON'
                    template_data = json.load(open(current_file, 'r', newline=None), object_pairs_hook=OrderedDict)
                else:
                    self.logger.info('Detected YAML. Loading file.')
                    FILE_FORMAT = 'YAML'
                    template_data = CFNYAMLHandler.ordered_safe_load(open(current_file, 'r', newline=None), object_pairs_hook=OrderedDict)

                if FILE_FORMAT in ['JSON', 'YAML']:
                    # Iterate through every top level node.
                    # This was only added in case we need to examine only parts of the template
                    if type(template_data) in [OrderedDict, dict]:
                        for node_key in template_data.keys():
                            self.logger.debug("Working on node [{}]".format(node_key))
                            self._recurse_nodes(template_data[node_key])
                    elif type(template_data) is list:
                        self._recurse_nodes(template_data)
                    else:
                        if self._dry_run:
                            self.logger.warning("[WHAT IF DRY RUN]: [{0}] Unsupported {1} structure. Skipping but copying.".format(current_file, FILE_FORMAT))
                        else:
                            self.logger.warning("[{0}] Unsupported {1} structure. Skipping but copying.".format(current_file, FILE_FORMAT))
                            if current_file is not output_file:
                                shutil.copyfile(current_file, output_file)

                    # Write modified template
                    if self._dry_run:
                        self.logger.info("[WHAT IF DRY RUN]: Writing file [{}]".format(output_file))
                    else:
                        self.logger.info("Writing file [{}]".format(output_file))
                        CFNYAMLHandler.validate_output_dir(os.path.split(output_file)[0])
                        with open(output_file, 'w') as updated_template:
                            if FILE_FORMAT == 'JSON':
                                updated_template.write(json.dumps(template_data, indent=4, separators=(',', ': ')))
                            elif FILE_FORMAT == 'YAML':
                                updated_template.write(
                                    CFNYAMLHandler.ordered_safe_dump(template_data, indent=2, allow_unicode=True, default_flow_style=False, explicit_start=True, explicit_end=True))
                        updated_template.close()
                else:
                    if self._dry_run:
                        self.logger.warning("[WHAT IF DRY RUN]: [{}] Unsupported file format. Skipping but copying.".format(current_file))
                    else:
                        self.logger.warning("[{}] Unsupported file format. Skipping but copying.".format(current_file))
                        if current_file is not output_file:
                            shutil.copyfile(current_file, output_file)
            else:
                self.logger.info("Opening file [{}]".format(current_file))
                try:
                    with open(current_file, 'r', newline=None) as f:
                        file_data = f.readlines()

                    for index, line in enumerate(file_data):
                        file_data[index] = self._string_rewriter(line)

                    # Write modified file
                    if self._dry_run:
                        self.logger.info("[WHAT IF DRY RUN]: Writing file [{}]".format(output_file))
                    else:
                        self.logger.info("Writing file [{}]".format(output_file))
                        CFNYAMLHandler.validate_output_dir(os.path.split(output_file)[0])
                        with open(output_file, 'w') as updated_file:
                            updated_file.writelines(file_data)
                        updated_file.close()
                except UnicodeDecodeError:
                    if self._dry_run:
                        self.logger.info("[WHAT IF DRY RUN]: Ran into a (UnicodeDecodeError) problem trying to read the file [{}]. Skipping but copying.".format(current_file))
                    else:
                        self.logger.warning("Ran into a (UnicodeDecodeError) problem trying to read the file [{}]. Skipping but copying.".format(current_file))
                        self._copy_file(current_file, output_file)
                except TaskCatException:
                    raise
                except Exception as e:
                    raise e