def run(temp_folder_name):
    def generate_keys(key_type):
        private_key_filename = os.path.join(temp_folder_name, key_type)
        subprocess.check_call([
            'ssh-keygen', '-q', '-N', '', '-t', key_type, '-f',
            private_key_filename
        ],
                              stdout=sys.stderr,
                              stderr=sys.stderr)
        public_key_filename = '%s.pub' % private_key_filename

        with open(private_key_filename, 'rt') as private_key_file:
            private_key = private_key_file.read()
        with open(public_key_filename, 'rt') as public_key_file:
            public_key = public_key_file.read()

        return (private_key, public_key)

    keys = {}

    for key_type in ['ecdsa', 'ed25519', 'rsa']:
        (private_key, public_key) = generate_keys(key_type)
        keys['%s_private' % key_type] = PreservedScalarString(private_key)
        keys['%s_public' % key_type] = PreservedScalarString(public_key)

    ruamel.yaml.YAML().dump({'ssh_keys': keys}, sys.stdout)
 def get_ordered_input_map(self, schema_def_requirement):
     """
     Turn a CommandInputParameter into a CommentedMap with a consistent order.
     """
     input_map = CommentedMap()
     if self.label:
         input_map['label'] = self.label
     if self.type:  # type is not required for CommandInputParameter. Never seen this case, but...
         input_map['type'] = self._handle_input_type_field(
             schema_def_requirement)
     if self.default:
         input_map['default'] = self.default
     if self.inputBinding:
         input_map[
             'inputBinding'] = self.inputBinding.get_ordered_input_binding(
             )
     if self.format:
         input_map['format'] = self.format
     if self.streamable:
         input_map['streamable'] = self.streamable
     if self.secondaryFiles:
         input_map['secondaryFiles'] = self.secondaryFiles
     if self.doc:
         input_map['doc'] = PreservedScalarString(self.doc)
     return input_map
Exemple #3
0
    def __setitem__(self, index, value):
        existing_validator = self._value[index].validator

        if isinstance(value, YAML):
            new_value = existing_validator(value._chunk)
        else:
            new_value = existing_validator(YAMLChunk(ruamel_structure(value)))

        # First validate against forked document
        proposed_chunk = self._chunk.fork()
        proposed_chunk.contents[index] = new_value.as_marked_up()
        proposed_chunk.strictparsed()[index] = deepcopy(
            new_value.as_marked_up())

        if self.is_mapping():
            updated_value = existing_validator(proposed_chunk.val(index))
            updated_value._chunk.make_child_of(self._chunk.val(index))
        else:
            updated_value = existing_validator(proposed_chunk.index(index))
            updated_value._chunk.make_child_of(self._chunk.index(index))

        # If validation succeeds, update for real
        marked_up = new_value.as_marked_up()

        # So that the nicer x: | style of text is used instead of
        # x: "text\nacross\nlines"
        if isinstance(marked_up, (str, unicode)):
            if u"\n" in marked_up:
                marked_up = PreservedScalarString(marked_up)

        self._chunk.contents[index] = marked_up
        self._value[YAML(index) if self.is_mapping() else index] = new_value
Exemple #4
0
def import_mab_summary(input_mabcsv, refid_lookup, output_yaml):

    mabdata = load_data(input_mabcsv)
    refid_lookup = load_refid_lookup(refid_lookup)
    groups = []
    for row in mabdata:
        groupobj = {
            'references': extract_references(row, refid_lookup),
            'antibodies': get_first(row, 'mab name'),
        }
        source = get_first(row, 'source', 'sources')
        if source:
            groupobj['source'] = PreservedScalarString(source)

        groupobj['status'] = get_first(row, 'status')
        groupobj['type'] = get_first(row, 'type')
        groupobj['pdb'] = get_first(row, 'pdb')
        groupobj['ighv'] = get_first(row, 'ighv')

        shm = get_first(row, 'shm(%)')
        if shm:
            shm = round(float(shm) * 100)
        groupobj['shm'] = shm
        groupobj['cdrh3_len'] = get_first(row, 'cdrh3 length')
        groupobj['iglv'] = get_first(row, 'iglv')
        groupobj['ic50'] = get_first(row, 'ic50(live, ng/ml or pv: ng/ml)')
        groupobj['epitope_class'] = get_first(row, 'epitope class')

        groups.append(groupobj)
    yaml.dump(groups, output_yaml)
def main():
    setup_logging()
    args = get_args()
    fn_output = args.output
    num_requested = args.number
    name_snapshot = args.snapshot

    # find and shuffle the set of all candidate syntactic mutations
    with launch_servers() as (client_bugzoo, client_boggart):
        snapshot = client_bugzoo.bugs[name_snapshot]
        is_acceptable = \
            functools.partial(is_acceptable_mutant, client_bugzoo, client_boggart, snapshot)
        candidates = all_mutations(client_bugzoo, client_boggart, snapshot)
        random.shuffle(candidates)

        acceptable = []  # type: List[str]
        for mutation in candidates:
            if len(acceptable) == num_requested:
                break
            if is_acceptable(mutation):
                diff = str(
                    client_boggart.mutations_to_diff(snapshot, [mutation]))
                acceptable.append(diff)

        yml = [{'diff': PreservedScalarString(d)} for d in acceptable]
        with open(fn_output, 'w') as f:
            YAML().dump(yml, f)
def build_file(path, file):
    with open(path + '/' + file, 'r') as data:
        content = data.read()
    try:
        perms = [line for line in content.split('\n') if "__perms__" in line]
        perms = perms[0].split(' ')[1]
    except IndexError:
        if file.split('.')[len(file.split('.')) - 1] in EXEC:
            perms = PERMS['exec']
        else:
            perms = PERMS['other']
        print("Couldn't find perms in file " + file + ", defaulting to " +
              perms)
    try:
        owner = [line for line in content.split('\n') if "__ownr__" in line]
        owner = owner[0].split(' ')[1]
    except IndexError:
        print("Couldn't find owner in file " + file + ", defaulting to root")
        owner = "root"
    yaml = {
        "path": path.replace(MAIN_DIR + '/config/system/', '/') + '/' + file
    }
    yaml['permissions'] = perms
    yaml['owner'] = owner
    yaml['content'] = PreservedScalarString(content)
    return yaml
Exemple #7
0
    def __setitem__(self, index, value):
        strictindex = self._strictindex(index)
        try:
            value_validator = self._value[strictindex].validator
        except KeyError:
            # TODO: What if value isn't a YAML object?
            value_validator = value.validator

        new_value = (value_validator(value._chunk)
                     if isinstance(value, YAML) else value_validator(
                         YAMLChunk(value_validator.to_yaml(value))))

        # Fork the value
        forked_chunk = self._chunk.fork(strictindex, new_value)

        # Validate and attach to current structure
        if self.is_mapping():
            updated_value = value_validator(forked_chunk.val(strictindex))
            updated_value._chunk.make_child_of(self._chunk.val(strictindex))
        else:
            updated_value = value_validator(forked_chunk.index(strictindex))
            updated_value._chunk.make_child_of(self._chunk.index(strictindex))

        marked_up = new_value.as_marked_up()

        # So that the nicer x: | style of text is used instead of
        # x: "text\nacross\nlines"
        if isinstance(marked_up, (str, unicode)):
            if u"\n" in marked_up:
                marked_up = PreservedScalarString(marked_up)

        self._chunk.contents[self._chunk.ruamelindex(strictindex)] = marked_up
        self._value[YAML(forked_chunk.ruamelindex(strictindex)) if self.
                    is_mapping() else forked_chunk.ruamelindex(strictindex
                                                               )] = new_value
Exemple #8
0
 def _convert_node(self, node, depth=0):
     try:
         import bs4
     except ImportError:
         print("For HTML conversion you need to install BeautifulSoup")
         print("e.g. using (pip install beautifulsoup4)")
         sys.exit(1)
     from ruamel.yaml.comments import CommentedMap
     from ruamel.yaml.scalarstring import PreservedScalarString
     ret_val = []
     if node.attrs:
         ret_val.append({'.attribute': node.attrs})
     for data in node.contents:
         if isinstance(data, bs4.Tag):
             kv = CommentedMap()
             # print data.name, data.attrs
             # convert the intenals of the tag
             kv[data.name] = self._convert_node(data, depth + 1)
             ret_val.append(kv)
         elif isinstance(data, bs4.NavigableString):
             s, nl = self._strip(data)
             if not s:
                 continue
             if nl:
                 ret_val.append(PreservedScalarString(s))
                 continue
             ret_val.append(s)
         else:
             print('unknow type', type(data))
     if self.flatten and len(ret_val) == 1:
         return ret_val[0]
     return ret_val
def main(input, output, support_pairing):
    seed_config = yaml.load(input)
    tx_device = seed_config['tx_device']

    switches = {}
    covers = {}
    customize = {}

    availability_template = PreservedScalarString(
        f"{{{{ is_state('binary_sensor.{tx_device}_status', 'on') }}}}")

    for blind in seed_config['blinds']:
        svc_call = lambda action: get_service_call(
            tx_device=tx_device,
            remote_id=blind['remote'],
            channel_id=blind['channel'],
            action=action,
        )

        cover = {
            'friendly_name': blind['name'],
            'device_class': 'blind',
            'open_cover': svc_call("OPEN"),
            'close_cover': svc_call("CLOSE"),
            'stop_cover': svc_call("STOP"),
            'availability_template': availability_template,
        }
        covers[camelize(blind['name'])] = cover
        customize['cover.' + camelize(blind['name'])] = CUSTOMIZE_BASE

        pairing_switch = {
            'friendly_name': blind['name'] + ' Blind Pairing',
            'value_template': 'off',
            'turn_on': svc_call("PAIR"),
            'turn_off': [],
            'availability_template': availability_template,
        }
        switches[camelize(pairing_switch['friendly_name'])] = pairing_switch

    package = {
        'homeassistant': {
            'customize': customize,
        },
        'cover': [{
            'platform': 'template',
            'covers': covers,
        }]
    }

    if support_pairing:
        package['switch'] = {
            'platform': 'template',
            'switches': switches,
        }

    yaml.dump(package, output)
    def apply_config(self, buildfarm_config_root_folder):
        custom_yaml_config = self.to_yaml()
        yaml = YAML()
        yaml.preserve_quotes = True
        buildfarm_config_files = [
            Path("hiera/hieradata/common.yaml"),
            Path("hiera/hieradata/buildfarm_role/repo.yaml"),
            Path("hiera/hieradata/buildfarm_role/agent.yaml"),
            Path("hiera/hieradata/buildfarm_role/master.yaml"),
        ]

        for buildfarm_config_file in buildfarm_config_files:
            print('Loading file %s' % buildfarm_config_file)
            with open(
                    str(buildfarm_config_root_folder / buildfarm_config_file),
                    'r') as bcfile:
                hiera_yaml = yaml.load(bcfile)

                for hiera_key in hiera_yaml.keys():
                    if hiera_key in custom_yaml_config.keys():
                        print('Substituting field %s in file %s' %
                              (hiera_key, str(buildfarm_config_file)))
                        hiera_yaml[hiera_key] = custom_yaml_config[hiera_key]

            with open(
                    str(buildfarm_config_root_folder / buildfarm_config_file),
                    'w') as bcfile:
                yaml.dump(hiera_yaml, bcfile)

        # The fields credentials::git-fetch-ssh::XXX are not in the original yaml configuration, so they need to be added separately
        # FIXME: here we are adding the raw passphrase, but it should be the hash instead
        if self.git_fetch_ssh_private_key:
            yaml_str = {
                'credentials::git-fetch-ssh::username':
                SingleQuotedScalarString(self.git_fetch_ssh_username),
                'credentials::git-fetch-ssh::id':
                self.git_fetch_ssh_id,
                'credentials::git-fetch-ssh::passphrase':
                SingleQuotedScalarString(self.git_fetch_ssh_passphrase),
                'credentials::git-fetch-ssh::private_key':
                PreservedScalarString(self.git_fetch_ssh_private_key),
            }
            with open(
                    str(buildfarm_config_root_folder /
                        Path("hiera/hieradata/buildfarm_role/master.yaml")),
                    'r',
            ) as master_file:
                master_yaml = yaml.load(master_file)
            # Add the new keys (update)
            master_yaml.update(yaml_str)
            with open(
                    str(buildfarm_config_root_folder /
                        Path("hiera/hieradata/buildfarm_role/master.yaml")),
                    'w',
            ) as master_file:
                yaml.dump(master_yaml, master_file)
 def json_yaml_adapt(self, data):
     if isinstance(data, dict):
         for k in data:
             v = data[k]
             if isinstance(v, string_types):
                 if '\n' in v:
                     data[k] = PreservedScalarString(v)
                     continue
     elif isinstance(data, list):
         pass
     return data
Exemple #12
0
 def to_dict(self) -> Dict[str, Any]:
     return {
         'diff':
         PreservedScalarString(self.diff),
         'inconsistent': [{
             'oracle': o,
             'trace': t
         } for o, t in self.fn_inconsistent_traces],
         'consistent': [{
             'oracle': o,
             'trace': t
         } for o, t in self.fn_consistent_traces]
     }
Exemple #13
0
def save(used: Set[str]):
    """Update the current brick.yaml file."""

    # drop unused bricks
    asked = {b.name for b in brick.MANIFEST.get().require}
    keep: List[brick.Brick] = [brick.TARGET.get()]
    for brk in brick.BUILD_LIST.get([]):
        if brk.name in used:
            keep.append(brk)
        elif brk.name in asked:
            LOGGER.info("unused %s %s", brk.name, brk.version)
    brick.BUILD_LIST.set(keep)
    dirs = [d.name for d in directs()]
    mins = mvs.req(brick.TARGET.get(), brick.BUILD_LIST.get(), dirs, Reqs())
    brick.MANIFEST.get().require = mins

    # format and save
    yaml = YAML()
    man = brick.MANIFEST.get()
    file = brick.ROOT.get().joinpath("brick.yaml")
    data: Any = {}
    if file.exists():
        data = yaml.load(file.read_text())
    order: MutableMapping[str, Any] = OrderedDict()
    order["name"] = man.name
    order["version"] = man.version
    order["license"] = man.license or None
    order["private"] = man.private or None
    desc = man.description
    if len(desc) <= 59:
        order["description"] = desc or None
    else:
        order["description"] = PreservedScalarString("\n".join(
            [desc[:70], desc[70:]]))
    order["main"] = man.main
    reqs = sorted(man.require, key=lambda r: r.name)
    order["require"] = dict(OrderedDict(
        (r.name, r.version) for r in reqs)) or None
    repls: MutableMapping[str, Any] = OrderedDict()
    for repl in sorted(man.replace.items(), key=lambda r: r[0]):
        if isinstance(repl[1], pathlib.Path):
            repls[repl[0]] = str(repl[1])
        else:
            repls[repl[0]] = " ".join([repl[1].name, repl[1].version])
    order["replace"] = dict(repls) or None
    excs = sorted(man.exclude, key=lambda r: r.name)
    order["exclude"] = dict(OrderedDict(
        (r.name, r.version) for r in excs)) or None
    order.update({k: v for k, v in data.items() if k not in set(order.keys())})
    with file.open("w") as handle:
        yaml.dump({k: v for k, v in order.items() if v is not None}, handle)
Exemple #14
0
 def update_template(self):
     """
     updates the Code property of a AWS::Lambda::Function resource of name `self.resource` to `self.code`
     """
     resource = self.template.get('Resources', {}).get(self.resource, None)
     if resource and resource['Type'] == 'AWS::Lambda::Function':
         code = resource.get('Properties', {}).get('Code', {})
         old_code = code['ZipFile'] if 'ZipFile' in code else None
         if old_code != self.code:
             sys.stderr.write(
                 'INFO: updating inline code of lambda {} in {}\n'.format(self.resource, self.filename))
             if 'Properties' not in resource:
                 resource['Properties'] = {}
             if 'Code' not in resource['Properties']:
                 resource['Properties']['Code'] = {}
             resource['Properties']['Code'] = {'ZipFile': PreservedScalarString(self.code)}
             self.dirty = True
     elif resource:
         sys.stderr.write(
             'WARN: resource {} in {} is not of type AWS::Lambda::Function\n'.format(self.resource, self.filename))
Exemple #15
0
def gatk_tool_to_cwl(gatk_tool: GATKTool, cmd_line_options,
                     annotation_names: List[str]) -> Dict:
    """
    Return a dictionary representing a CWL file from a given GATKTool.
    """

    version = GATKVersion(cmd_line_options.version)

    if gatk_tool.name in SPECIAL_GATK3_MODULES and not version.is_3():
        _logger.warning(
            f"Tool {gatk_tool.name}'s cwl may be incorrect. The GATK documentation needs to be looked at by a human and hasn't been yet."
        )

    base_command = cmd_line_options.gatk_command.split(" ")

    if version.is_3():
        base_command.append("--analysis_type")

    base_command.append(gatk_tool.name)

    cwl = {
        'id':
        gatk_tool.name,
        'cwlVersion':
        'v1.0',
        'baseCommand':
        base_command,
        'class':
        'CommandLineTool',
        "doc":
        PreservedScalarString(gatk_tool.dict.description),
        'requirements': [{
            "class": "ShellCommandRequirement"
        }, {
            "class": "InlineJavascriptRequirement",
            "expressionLib": [PreservedScalarString(JS_LIBRARY)]
        }, {
            "class":
            "SchemaDefRequirement",
            "types": [{
                "type": "enum",
                "name": "annotation_type",
                "symbols": annotation_names
            }]
        }] + ([] if cmd_line_options.no_docker else
              [{
                  "class": "DockerRequirement",
                  "dockerPull": cmd_line_options.docker_image_name
              }])
    }

    # Create and write the cwl file

    outputs = []
    inputs = []

    for argument in gatk_tool.arguments:
        if argument.name not in INVALID_ARGS:
            argument_inputs, argument_outputs = gatk_argument_to_cwl(
                argument, gatk_tool.name, version)

            synonym = argument.synonym
            if synonym is not None and len(
                    argument_inputs) >= 1 and synonym.lstrip(
                        "-") != argument.name.lstrip("-"):
                argument_inputs[0]["doc"] += f" [synonymous with {synonym}]"

            inputs.extend(argument_inputs)

            if argument_outputs and any(
                    arg.name.startswith("create-output-")
                    for arg in gatk_tool.arguments):
                # This depends on the first output always being the main one (not a tag).
                assert "tag" not in argument_outputs[0]["doc"]
                argument_outputs[0].setdefault("secondaryFiles", [])
                doc = argument.summary + argument.dict.fulltext
                if (("BAM" in doc or "bam" in argument.name) and
                    ("VCF" not in doc and "variant" not in doc)
                        or gatk_tool.name in ("UnmarkDuplicates",
                                              "FixMisencodedBaseQualityReads",
                                              "RevertBaseQualityScores",
                                              "ApplyBQSR", "PrintReads")):
                    # This is probably the BAM/CRAM output.
                    argument_outputs[0]["secondaryFiles"].extend([
                        "$(inputs['create-output-bam-index']? self.basename + self.nameext.replace('m', 'i') : [])",
                        "$(inputs['create-output-bam-md5']? self.basename + '.md5' : [])"
                    ])
                elif (("VCF" in doc or "variant" in doc) and "BAM" not in doc
                      or gatk_tool.name == "CNNScoreVariants"):
                    # This is probably the VCF output.
                    argument_outputs[0]["secondaryFiles"].extend([
                        # If the extension is .vcf, the index's extension is .vcf.idx;
                        # if the extension is .vcf.gz, the index's extension is .vcf.gz.tbi.
                        "$(inputs['create-output-variant-index']? self.basename + (inputs['output-filename'].endsWith('.gz')? '.tbi':'.idx') : [])",
                        "$(inputs['create-output-variant-md5']? self.basename + '.md5' : [])"
                    ])
                elif "IGV formatted file" in doc or "table" in doc or argument.name in (
                        "graph-output",
                        "activity-profile-out") or gatk_tool.name in (
                            "Pileup", "AnnotateIntervals", "VariantsToTable",
                            "GetSampleName", "PreprocessIntervals",
                            "BaseRecalibrator", "CountFalsePositives",
                            "CollectAllelicCounts", "CalculateMixingFractions",
                            "SplitIntervals", "GenomicsDBImport",
                            "GetPileupSummaries", "VariantRecalibrator",
                            "CollectReadCounts", "CheckPileup",
                            "ASEReadCounter"):
                    # This is not a BAM or VCF output, no need to add secondary files.
                    pass
                else:
                    _logger.warning(
                        f"Ambiguous output argument {argument.name} for {gatk_tool.name}"
                    )

                if not argument_outputs[0]["secondaryFiles"]:
                    del argument_outputs[0]["secondaryFiles"]

            outputs.extend(argument_outputs)

    cwl["inputs"] = inputs
    cwl["outputs"] = outputs

    return cwl
    def to_yaml(self):
        self.ubuntu_distros = [x['ubuntu'] for x in self.distros]
        self.ubuntu_distros = list(set(self.ubuntu_distros))
        self.ubuntu_distros.sort()
        ubuntu_building_config = """\
[ubuntu_building]
architectures: %s
distros: %s
repository_path: /var/repos/ubuntu/building
signing_key: %s
upstream_config: /home/jenkins-agent/reprepro_config
""" % (
            ' '.join(self.architectures),
            ' '.join(self.ubuntu_distros),
            self.gpg_key_id,
        )

        ubuntu_testing_config = """\
[ubuntu_testing]
architectures: %s
distros: %s
repository_path: /var/repos/ubuntu/testing
signing_key: %s
upstream_config: /home/jenkins-agent/reprepro_config
""" % (
            ' '.join(self.architectures),
            ' '.join(self.ubuntu_distros),
            self.gpg_key_id,
        )

        ubuntu_main_config = """\
[ubuntu_main]
architectures: %s
distros: %s
repository_path: /var/repos/ubuntu/main
signing_key: %s
upstream_config: /home/jenkins-agent/reprepro_config
""" % (
            ' '.join(self.architectures),
            ' '.join(self.ubuntu_distros),
            self.gpg_key_id,
        )

        reprepro_config_content = """\
name: ros_bootstrap
method: http://repos.ros.org/repos/ros_bootstrap
suites: [%s]
component: main
architectures: [%s]
verify_release: blindtrust
""" % (
            ', '.join(self.ubuntu_distros),
            ', '.join(self.architectures),
        )

        yaml_str = {
            'master::ip':
            self.ip_master,
            'repo::ip':
            self.ip_repo,
            'timezone':
            SingleQuotedScalarString(self.timezone),
            'ssh_keys': {
                SingleQuotedScalarString(self.ssh_name): {
                    'key': SingleQuotedScalarString(self.ssh_public),
                    'type': self.ssh_type,
                    'user': '******',
                    'require': SingleQuotedScalarString('User[jenkins-agent]'),
                }
            },
            'jenkins::slave::ui_user':
            self.jenkins_user,
            'jenkins::slave::ui_pass':
            SingleQuotedScalarString(self.jenkins_password),
            'user::admin::name':
            self.jenkins_user,
            'user::admin::password_hash':
            '#jbcrypt:' + PreservedScalarString(
                hashpw(self.jenkins_password.encode('UTF-8'),
                       gensalt(10, prefix=b"2a")).decode('UTF-8')),
            'jenkins::private_ssh_key':
            PreservedScalarString(self.ssh_private),
            'ssh_host_keys': {
                'repo': SingleQuotedScalarString(self.repo_hostkey)
            },
            'jenkins-agent::gpg_key_id':
            self.gpg_key_id,
            'jenkins-agent::gpg_private_key':
            PreservedScalarString(self.gpg_private_key),
            'jenkins-agent::gpg_public_key':
            PreservedScalarString(self.gpg_public_key),
            'jenkins-agent::reprepro_updater_config':
            ubuntu_building_config + "\n" + ubuntu_testing_config + "\n" +
            ubuntu_main_config,
            'jenkins-agent::reprepro_config': {
                SingleQuotedScalarString('/home/jenkins-agent/reprepro_config/ros_bootstrap.yaml'):
                {
                    'ensure': SingleQuotedScalarString('present'),
                    'content': PreservedScalarString(reprepro_config_content),
                }
            },
        }
        # If there was an additional hotkey defined, add it to the configuration
        if self.git_fetch_hostkey:
            yaml_str['ssh_host_keys'] = {
                'ssh_host_keys': {
                    'repo':
                    SingleQuotedScalarString(self.repo_hostkey),
                    self.git_fetch_hostkey.split()[0]:
                    SingleQuotedScalarString(self.git_fetch_hostkey),
                }
            }
        return yaml_str
Exemple #17
0
def edit_instance_group(cluster_name, state_store):
    """Edit instance group configuration to disable default mount point creation on ephemeral0,
    and add nvme-cli package, just in case.
    """
    cmd_list = [
        'kops', 'get', 'ig', 'nodes',
        '--name=%s' % cluster_name,
        '--state=%s' % state_store, '-o', 'yaml'
    ]
    print cmd_list

    retcode, data = 0, ''
    try:
        data = subprocess.check_output(cmd_list)
    except subprocess.CalledProcessError as exc:
        retcode = exc.returncode
        data = exc.output
        if data:
            print data

    print 'kops execution code', retcode
    if retcode != 0:
        return retcode

    yaml = YAML()
    try:
        inst_group = yaml.load(data)
        if 'spec' not in inst_group:
            raise ValueError
    except ValueError as exc:
        print '%s did not output valid yaml' % cmd_list
        print data
        return 1

    inst_group['spec']['additionalUserData'] = [{
        'name':
        'nuvo_customization.txt',
        'type':
        'text/cloud-config',
        'content':
        PreservedScalarString(
            '#cloud-config\nmounts:\n- [ ephemeral0 ]\npackages:\n- nvme-cli\n'
        )
    }]

    data = yaml.dump(inst_group)
    tmpname = ''
    with tempfile.NamedTemporaryFile(prefix='inst_group',
                                     suffix='.yaml',
                                     delete=False) as myfile:
        tmpname = myfile.name
        myfile.write(data)

    cmd_list = ['kops', 'replace', '-f', tmpname, '--state=%s' % state_store]
    print cmd_list

    retcode = subprocess.call(cmd_list)
    print 'kops execution code', retcode

    if retcode == 0:
        os.remove(tmpname)
    else:
        print 'preserved temp file', tmpname
    return retcode
Exemple #18
0
def wrap_yaml_string(s, width=100):
    ss = (l.rstrip() for l in s.splitlines())
    ss = (l for l in ss if l)
    #ss = textwrap.wrap('\n'.join(ss), width=width, drop_whitespace=False, tabsize=2)
    return PreservedScalarString('\n'.join(ss))
Exemple #19
0
def preserve_literal(s):
    return PreservedScalarString(s.replace('\r\n', '\n').replace('\r', '\n'))
Exemple #20
0
 def to_yaml(self, data):
     if not utils.is_string(data):
         raise YAMLSerializationError("'{}' is not a string".format(data))
     if "\n" in data:
         return PreservedScalarString(data)
     return data