def get_secret_templates(self): def _secret(secret_name, secret): template = CommentedMap() template['force'] = secret.get('force', False) template['apiVersion'] = 'v1' template = CommentedMap() template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = "Secret" template['metadata'] = CommentedMap([('name', secret_name), ('namespace', self._namespace_name)]) template['type'] = 'Opaque' template['data'] = {} for key, vault_variable in iteritems(secret): template['data'][key] = "{{ %s | b64encode }}" % vault_variable return template templates = CommentedSeq() if self._secrets: for secret_name, secret_config in iteritems(self._secrets): secret = _secret(secret_name, secret_config) templates.append(secret) return templates
def get_pvc_tasks(self, tags=[]): module_name='k8s_v1_persistent_volume_claim' tasks = CommentedSeq() for template in self.get_pvc_templates(): task = CommentedMap() task['name'] = 'Create PVC' task[module_name] = CommentedMap() task[module_name]['state'] = 'present' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] task[module_name]['force'] = template.pop('force', False) task[module_name]['resource_definition'] = template if tags: task['tags'] = copy.copy(tags) tasks.append(task) if self._volumes: # Remove any volumes where state is 'absent' for volname, vol_config in self._volumes.items(): if self.CONFIG_KEY in vol_config: if vol_config[self.CONFIG_KEY].get('state', 'present') == 'absent': task = CommentedMap() task['name'] = 'Remove PVC' task[module_name] = CommentedMap() task[module_name]['name'] = volname task[module_name]['namespace'] = self._namespace_name task[module_name]['state'] = 'absent' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] if tags: task['tags'] = copy.copy(tags) tasks.append(task) return tasks
def get_deployment_tasks(self, module_name=None, engine_state=None, tags=[]): tasks = CommentedSeq() for template in self.get_deployment_templates(engine_state=engine_state): task = CommentedMap() if engine_state is None: task_name = 'Create deployment, and scale replicas up' else: task_name = 'Stop running containers by scaling replicas down to 0' task['name'] = task_name task[module_name] = CommentedMap() task[module_name]['state'] = 'present' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] task[module_name]['force'] = template.pop('force', False) task[module_name]['resource_definition'] = template if tags: task['tags'] = copy.copy(tags) tasks.append(task) if engine_state != 'stop': for name, service_config in self._services.items(): # Remove deployment for any services where state is 'absent' if service_config.get(self.CONFIG_KEY, {}).get('state', 'present') == 'absent': task['name'] = 'Remove deployment' task[module_name] = CommentedMap() task[module_name]['state'] = 'absent' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] task[module_name]['name'] = name task[module_name]['namespace'] = self._namespace_name if tags: task['tags'] = copy.copy(tags) tasks.append(task) return tasks
def get_secret_templates(self): def _secret(secret_name, secret): template = CommentedMap() template['force'] = secret.get('force', False) template['apiVersion'] = 'v1' template = CommentedMap() template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = "Secret" template['metadata'] = CommentedMap([ ('name', secret_name), ('namespace', self._namespace_name) ]) template['type'] = 'Opaque' template['data'] = {} for key, vault_variable in iteritems(secret): template['data'][key] = "{{ %s | b64encode }}" % vault_variable return template templates = CommentedSeq() if self._secrets: for secret_name, secret_config in iteritems(self._secrets): secret = _secret(secret_name, secret_config) templates.append(secret) return templates
def get_service_tasks(self, tags=[]): module_name='k8s_v1_service' tasks = CommentedSeq() for template in self.get_services_templates(): task = CommentedMap() task['name'] = 'Create service' task[module_name] = CommentedMap() task[module_name]['state'] = 'present' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] task[module_name]['force'] = template.pop('force', False) task[module_name]['resource_definition'] = template if tags: task['tags'] = copy.copy(tags) tasks.append(task) if self._services: # Remove an services where state is 'absent' for name, service in self._services.items(): if service.get(self.CONFIG_KEY, {}).get('state', 'present') == 'absent': task = CommentedMap() task['name'] = 'Remove service' task[module_name] = CommentedMap() task[module_name]['state'] = 'absent' task[module_name]['name'] = name task[module_name]['namespace'] = self._namespace_name if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] if tags: task['tags'] = copy.copy(tags) tasks.append(task) return tasks
def _merge_simple_lists(self, lhs: CommentedSeq, rhs: CommentedSeq, path: YAMLPath, node_coord: NodeCoords) -> CommentedSeq: """ Merge two CommentedSeq-wrapped lists of Scalars or CommentedSeqs. Parameters: 1. lhs (CommentedSeq) The merge target. 2. rhs (CommentedSeq) The merge source. 3. path (YAMLPath) Location within the DOM where this merge is taking place. 4. node_coord (NodeCoords) The RHS root node, its parent, and reference within its parent; used for config lookups. Returns: (list) The merged result. Raises: - `MergeException` when a clean merge is impossible. """ if not isinstance(lhs, CommentedSeq): raise MergeException( "Impossible to add Array data to non-Array destination.", path) merge_mode = self.config.array_merge_mode(node_coord) if merge_mode is ArrayMergeOpts.LEFT: return lhs if merge_mode is ArrayMergeOpts.RIGHT: return rhs tagless_lhs = Nodes.tagless_elements(lhs) for idx, ele in enumerate(rhs): path_next = path + "[{}]".format(idx) self.logger.debug("Processing element {} at {}.".format( idx, path_next), prefix="Merger::_merge_simple_lists: ", data=ele) if merge_mode is ArrayMergeOpts.UNIQUE: cmp_val = ele if isinstance(ele, TaggedScalar): cmp_val = ele.value self.logger.debug( "Looking for comparison value, {}, in:".format(cmp_val), prefix="Merger::_merge_simple_lists: ", data=tagless_lhs) if cmp_val in tagless_lhs: lhs = CommentedSeq([ ele if (e == cmp_val or (isinstance(e, TaggedScalar) and e.value == cmp_val)) else e for e in lhs ]) else: lhs.append(ele) continue lhs.append(ele) return lhs
def cli(sections, input, output, quiet): # Set YAML parameters yaml = YAML(typ="rt") # Round-trip mode allows for comment insertion indent_offset = 2 yaml.indent(offset=indent_offset) # Load environment file template with open(os.path.join("requirements", "environment.in")) as f: env_yml = yaml.load(f.read()) # Extract dependency section contents if not quiet: print(f"Reading dependencies from {input}") setup_config = read_configuration(input) sections = [x.strip() for x in sections.split(",")] section_indices = dict() dep_list = (deepcopy(env_yml["dependencies"]) if "dependencies" in env_yml else []) i = len(dep_list) for section in sections: section_indices[section] = i try: packages = (setup_config["options"]["install_requires"] if section == "main" else setup_config["options"]["extras_require"][section]) except KeyError: raise RuntimeError(f"Cannot fetch dependencies from {input}") for package in packages: if package not in dep_list: # Do not duplicate dep_list.append(package) i += 1 # Format dependency list lst = CS(dep_list) for section, i in section_indices.items(): lst.yaml_set_comment_before_after_key(i, section, indent_offset) env_yml["dependencies"] = lst # Output to terminal if not quiet: yaml.dump(env_yml, sys.stdout) # Output to file if output is not None: with open(output, "w") as outfile: if not quiet: print() print(f"Saving to {output}") yaml.dump(env_yml, outfile)
def get_services_templates(self): """ Generate a service configuration """ def _create_service(name, service): template = CommentedMap() state = service.get(self.CONFIG_KEY, {}).get('state', 'present') if state == 'present': ports = self.get_service_ports(service) if ports: template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = 'Service' template['force'] = service.get(self.CONFIG_KEY, {}).get('service', {}).get('force', False) labels = CommentedMap([ ('app', self._namespace_name), ('service', name) ]) template['metadata'] = CommentedMap([ ('name', name), ('namespace', self._namespace_name), ('labels', copy.deepcopy(labels)) ]) template['spec'] = CommentedMap([ ('selector', copy.deepcopy(labels)), ('ports', ports) ]) # Translate options: if service.get(self.CONFIG_KEY): for key, value in service[self.CONFIG_KEY].items(): if key == 'service': for service_key, service_value in value.items(): if service_key == 'force': continue elif service_key == 'metadata': self.copy_attribute(template, service_key, service_value) else: self.copy_attribute(template['spec'], service_key, service_value) return template templates = CommentedSeq() if self._services: for name, service in self._services.items(): template = _create_service(name, service) if template: templates.append(template) if service.get('links'): # create services for aliased links for link in service['links']: if ':' in link: service_name, alias = link.split(':') alias_config = self._services.get(service_name) if alias_config: new_service = _create_service(alias, alias_config) if new_service: templates.append(new_service) return templates
def cmap( d, lc=None, fn=None ): # type: (Union[int, float, str, Text, Dict, List], List[int], Text) -> Union[int, float, str, Text, CommentedMap, CommentedSeq] if lc is None: lc = [0, 0, 0, 0] if fn is None: fn = "test" if isinstance(d, CommentedMap): fn = d.lc.filename if hasattr(d.lc, "filename") else fn for k, v in six.iteritems(d): if k in d.lc.data: d[k] = cmap(v, lc=d.lc.data[k], fn=fn) else: d[k] = cmap(v, lc, fn=fn) return d if isinstance(d, CommentedSeq): fn = d.lc.filename if hasattr(d.lc, "filename") else fn for k, v in enumerate(d): if k in d.lc.data: d[k] = cmap(v, lc=d.lc.data[k], fn=fn) else: d[k] = cmap(v, lc, fn=fn) return d if isinstance(d, dict): cm = CommentedMap() for k in sorted(d.keys()): v = d[k] if isinstance(v, CommentedBase): uselc = [v.lc.line, v.lc.col, v.lc.line, v.lc.col] vfn = v.lc.filename if hasattr(v.lc, "filename") else fn else: uselc = lc vfn = fn cm[k] = cmap(v, lc=uselc, fn=vfn) cm.lc.add_kv_line_col(k, uselc) cm.lc.filename = fn return cm if isinstance(d, list): cs = CommentedSeq() for k, v in enumerate(d): if isinstance(v, CommentedBase): uselc = [v.lc.line, v.lc.col, v.lc.line, v.lc.col] vfn = v.lc.filename if hasattr(v.lc, "filename") else fn else: uselc = lc vfn = fn cs.append(cmap(v, lc=uselc, fn=vfn)) cs.lc.add_kv_line_col(k, uselc) cs.lc.filename = fn return cs else: return d
def cmap( d, # type: Union[int, float, str, Text, Dict[Text, Any], List[Dict[Text, Any]]] lc=None, # type: Optional[List[int]] fn=None, # type: Optional[Text] ): # type: (...) -> Union[int, float, str, Text, CommentedMap, CommentedSeq] if lc is None: lc = [0, 0, 0, 0] if fn is None: fn = "test" if isinstance(d, CommentedMap): fn = d.lc.filename if hasattr(d.lc, "filename") else fn for k, v in six.iteritems(d): if d.lc.data is not None and k in d.lc.data: d[k] = cmap(v, lc=d.lc.data[k], fn=fn) else: d[k] = cmap(v, lc, fn=fn) return d if isinstance(d, CommentedSeq): fn = d.lc.filename if hasattr(d.lc, "filename") else fn for k2, v2 in enumerate(d): if d.lc.data is not None and k2 in d.lc.data: d[k2] = cmap(v2, lc=d.lc.data[k2], fn=fn) else: d[k2] = cmap(v2, lc, fn=fn) return d if isinstance(d, MutableMapping): cm = CommentedMap() for k in sorted(d.keys()): v = d[k] if isinstance(v, CommentedBase): uselc = [v.lc.line, v.lc.col, v.lc.line, v.lc.col] vfn = v.lc.filename if hasattr(v.lc, "filename") else fn else: uselc = lc vfn = fn cm[k] = cmap(v, lc=uselc, fn=vfn) cm.lc.add_kv_line_col(k, uselc) cm.lc.filename = fn return cm if isinstance(d, MutableSequence): cs = CommentedSeq() for k3, v3 in enumerate(d): if isinstance(v3, CommentedBase): uselc = [v3.lc.line, v3.lc.col, v3.lc.line, v3.lc.col] vfn = v3.lc.filename if hasattr(v3.lc, "filename") else fn else: uselc = lc vfn = fn cs.append(cmap(v3, lc=uselc, fn=vfn)) cs.lc.add_kv_line_col(k3, uselc) cs.lc.filename = fn return cs else: return d
def update_environment_yml(): """Update conda_dev_env.yml file for conda.""" import re from ruamel.yaml import YAML from ruamel.yaml.comments import CommentedMap, CommentedSeq environment_filename = "conda_dev_env.yml" cmap = CommentedMap() cmap.yaml_set_start_comment( ("Usage: conda env create -n myenvname -f {} python=3.6\n" " conda activate myenvname\n" " pip install --no-deps -e .".format(environment_filename))) cmap["name"] = "aiida_crystal17" cmap["channels"] = CommentedSeq(["conda-forge", "cjs14"]) cmap["channels"].yaml_add_eol_comment("for sqlalchemy-diff and pgtest", 1) cmap["dependencies"] = dmap = CommentedSeq() # additional packages dmap.append("pip") dmap.append("aiida-core.services") # fix incompatibilities between conda and pypi replacements = {"pre-commit": "pre_commit"} setup_json = get_setup_json() for base, key in [ (None, "install_requires"), ("extras_require", "testing"), ("extras_require", "code_style"), ("extras_require", "docs"), ]: requirements = setup_json.get(base, setup_json)[key] count = 0 for req in sorted(requirements, key=lambda x: x.lower()): # skip packages required for specific python versions < 3 if re.findall("python_version\\s*\\<\\s*\\'?3", req): continue req = req.split(";")[0] for (regex, replacement) in iter(replacements.items()): req = re.sub(regex, replacement, req) count += 1 dmap.append(req.lower()) dmap.yaml_set_comment_before_after_key(len(dmap) - count, before=key) yaml = YAML(typ="rt") yaml.default_flow_style = False yaml.encoding = "utf-8" yaml.allow_unicode = True file_path = os.path.join(ROOT_DIR, environment_filename) with open(file_path, "w") as env_file: yaml.dump(cmap, env_file)
def __delitem__(self, i): if isinstance(i, str): r = [ii for ii, x in enumerate(self) if x.get('name', None) == i] if len(r) > 1: raise KeyError( "There are {} elements named {}.".format(len(r), i)) elif len(r) == 0: raise KeyError("No element named {} found.".format(i)) else: CommentedSeq.__delitem__(self, r[0]) else: return CommentedSeq.__delitem__(self, i)
def __setitem__(self, i, v): if isinstance(i, str): r = [(ii, x) for ii, x in enumerate(self) if x.get('name', None) == i] if len(r) > 1: raise KeyError("There are {} elements named {}.".format( len(r), i)) elif len(r) == 0: self.append(v) else: CommentedSeq.__setitem__(self, r[0][0], v) else: CommentedSeq.__setitem__(self, i, v)
def split_container_config(input_data): components = sort_components_based_on_execution_id(input_data) updated_components = CommentedSeq() for component in components: container_count = component['deploy']['container_count'] for i in range(0, container_count): temp_component = copy.deepcopy(component) temp_component['container_count'] = 1 temp_component['id'] = component['execution_id'] + 0 del temp_component['execution_id'] updated_components.append(temp_component) input_data['lightweight_components'] = updated_components return input_data
def cmap(d, lc=None, fn=None): # type: (Union[int, float, str, Text, Dict, List], List[int], Text) -> Union[int, float, str, Text, CommentedMap, CommentedSeq] if lc is None: lc = [0, 0, 0, 0] if fn is None: fn = "test" if isinstance(d, CommentedMap): fn = d.lc.filename if hasattr(d.lc, "filename") else fn for k,v in six.iteritems(d): if k in d.lc.data: d[k] = cmap(v, lc=d.lc.data[k], fn=fn) else: d[k] = cmap(v, lc, fn=fn) return d if isinstance(d, CommentedSeq): fn = d.lc.filename if hasattr(d.lc, "filename") else fn for k,v in enumerate(d): if k in d.lc.data: d[k] = cmap(v, lc=d.lc.data[k], fn=fn) else: d[k] = cmap(v, lc, fn=fn) return d if isinstance(d, dict): cm = CommentedMap() for k in sorted(d.keys()): v = d[k] if isinstance(v, CommentedBase): uselc = [v.lc.line, v.lc.col, v.lc.line, v.lc.col] vfn = v.lc.filename if hasattr(v.lc, "filename") else fn else: uselc = lc vfn = fn cm[k] = cmap(v, lc=uselc, fn=vfn) cm.lc.add_kv_line_col(k, uselc) cm.lc.filename = fn return cm if isinstance(d, list): cs = CommentedSeq() for k,v in enumerate(d): if isinstance(v, CommentedBase): uselc = [v.lc.line, v.lc.col, v.lc.line, v.lc.col] vfn = v.lc.filename if hasattr(v.lc, "filename") else fn else: uselc = lc vfn = fn cs.append(cmap(v, lc=uselc, fn=vfn)) cs.lc.add_kv_line_col(k, uselc) cs.lc.filename = fn return cs else: return d
def create_default_seq(config): if not config.get('config'): config['config'] = CommentedSeq() if not config.get('package'): config['package'] = CommentedSeq() if not config.get('ignore'): config['ignore'] = CommentedSeq() if not isinstance(config['config'], list): config['config'] = list_to_seq(config['config']) if not isinstance(config['package'], list): config['package'] = list_to_seq(config['package']) if not isinstance(config['ignore'], list): config['ignore'] = list_to_seq(config['ignore']) return config
def _resolve_idmap( self, document: CommentedMap, loader: "Loader", ) -> None: # Convert fields with mapSubject into lists # use mapPredicate if the mapped value isn't a dict. for idmapField in loader.idmap: if idmapField in document: idmapFieldValue = document[idmapField] if ( isinstance(idmapFieldValue, MutableMapping) and "$import" not in idmapFieldValue and "$include" not in idmapFieldValue ): ls = CommentedSeq() for k in sorted(idmapFieldValue.keys()): val = idmapFieldValue[k] v = None # type: Optional[CommentedMap] if not isinstance(val, CommentedMap): if idmapField in loader.mapPredicate: v = CommentedMap( ((loader.mapPredicate[idmapField], val),) ) v.lc.add_kv_line_col( loader.mapPredicate[idmapField], document[idmapField].lc.data[k], ) v.lc.filename = document.lc.filename else: raise ValidationException( "mapSubject '{}' value '{}' is not a dict " "and does not have a mapPredicate.".format(k, v) ) else: v = val v[loader.idmap[idmapField]] = k v.lc.add_kv_line_col( loader.idmap[idmapField], document[idmapField].lc.data[k] ) v.lc.filename = document.lc.filename ls.lc.add_kv_line_col(len(ls), document[idmapField].lc.data[k]) ls.lc.filename = document.lc.filename ls.append(v) document[idmapField] = ls
def generate_orchestration_playbook(self, url=None, namespace=None, local_images=True, **kwargs): """ Generate an Ansible playbook to orchestrate services. :param url: registry URL where images will be pulled from :param namespace: registry namespace :param local_images: bypass pulling images, and use local copies :return: playbook dict """ for service_name in self.services: image = self.get_latest_image_for_service(service_name) if local_images: self.services[service_name]['image'] = image.tags[0] else: if namespace is not None: image_url = urljoin('{}/'.format(urljoin(url, namespace)), image.tags[0]) else: image_url = urljoin(url, image.tags[0]) self.services[service_name]['image'] = image_url if kwargs.get('k8s_auth'): self.k8s_client.set_authorization(kwargs['auth']) play = CommentedMap() play['name'] = u'Manage the lifecycle of {} on {}'.format(self.project_name, self.display_name) play['hosts'] = 'localhost' play['gather_facts'] = 'no' play['connection'] = 'local' play['roles'] = CommentedSeq() play['tasks'] = CommentedSeq() role = CommentedMap([ ('role', 'kubernetes-modules') ]) play['roles'].append(role) play.yaml_set_comment_before_after_key( 'roles', before='Include Ansible Kubernetes and OpenShift modules', indent=4) play.yaml_set_comment_before_after_key('tasks', before='Tasks for setting the application state. ' 'Valid tags include: start, stop, restart, destroy', indent=4) play['tasks'].append(self.deploy.get_namespace_task(state='present', tags=['start'])) play['tasks'].append(self.deploy.get_namespace_task(state='absent', tags=['destroy'])) play['tasks'].extend(self.deploy.get_service_tasks(tags=['start'])) play['tasks'].extend(self.deploy.get_deployment_tasks(engine_state='stop', tags=['stop', 'restart'])) play['tasks'].extend(self.deploy.get_deployment_tasks(tags=['start', 'restart'])) play['tasks'].extend(self.deploy.get_pvc_tasks(tags=['start'])) playbook = CommentedSeq() playbook.append(play) logger.debug(u'Created playbook to run project', playbook=playbook) return playbook
def split_component_config(input_data): components = input_data['lightweight_components'] updated_components = CommentedSeq() ## deep copy for component in components: for idx, val in enumerate(component['deploy']): temp_component = copy.deepcopy(component) temp_component['deploy'] = copy.deepcopy(component['deploy'][idx]) #temp_component['execution_id'] = copy.deepcopy(component['execution_id']) updated_components.append(temp_component) components = updated_components input_data['lightweight_components'] = components return input_data
def test_recipe(): seq = CommentedSeq(["importlib-metadata >=0.12", "pytest"]) seq.yaml_add_eol_comment("[py<38]", 0) item = RecipeItem(0, seq) assert item.value == "importlib-metadata >=0.12" assert item.selector == "py<38" assert str(item) == "importlib-metadata >=0.12 # [py<38]" item.value = "importlib-metadata" item.selector = "py35" assert item.value == "importlib-metadata" assert item.selector == "py35" assert seq.ca.items[0][0].value == " # [py35]" assert seq[0] == "importlib-metadata" assert str(item) == "importlib-metadata # [py35]"
def update_secondaryFiles(t, top=False): # type: (Any, bool) -> Union[MutableSequence[MutableMapping[str, str]], MutableMapping[str, str]] if isinstance(t, CommentedSeq): new_seq = copy.deepcopy(t) for index, entry in enumerate(t): new_seq[index] = update_secondaryFiles(entry) return new_seq elif isinstance(t, MutableSequence): return CommentedSeq([update_secondaryFiles(p) for p in t]) elif isinstance(t, MutableMapping): return t elif top: return CommentedSeq([CommentedMap([("pattern", t)])]) else: return CommentedMap([("pattern", t)])
def get_pvc_templates(self): def _volume_to_pvc(claim_name, claim): template = CommentedMap() template['force'] = claim.get('force', False) template['apiVersion'] = 'v1' template = CommentedMap() template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = "PersistentVolumeClaim" template['metadata'] = CommentedMap([('name', claim_name), ('namespace', self._namespace_name)]) template['spec'] = CommentedMap() template['spec']['resources'] = {'requests': {'storage': '1Gi'}} if claim.get('volume_name'): template['spec']['volumeName'] = claim['volume_name'] if claim.get('access_modes'): template['spec']['accessModes'] = claim['access_modes'] if claim.get('requested_storage'): template['spec']['resources']['requests']['storage'] = claim[ 'requested_storage'] if claim.get('storage_class'): if not template['metadata'].get('annotations'): template['metadata']['annotations'] = {} template['metadata']['annotations']['storageClass'] = claim[ 'storage_class'] #TODO verify this syntax if claim.get('selector'): if claim['selector'].get('match_labels'): if not template['spec'].get('selector'): template['spec']['selector'] = dict() template['spec']['selector']['matchLabels'] = claim[ 'match_labels'] if claim['selector'].get('match_expressions'): if not template['spec'].get('selector'): template['spec']['selector'] = dict() template['spec']['selector']['matchExpressions'] = claim[ 'match_expressions'] return template templates = CommentedSeq() if self._volumes: for volname, vol_config in iteritems(self._volumes): if self.CONFIG_KEY in vol_config: if vol_config[self.CONFIG_KEY].get('state', 'present') == 'present': volume = _volume_to_pvc(volname, vol_config[self.CONFIG_KEY]) templates.append(volume) return templates
def test_rdf_datetime() -> None: """Affirm that datetime objects can be serialized in makerdf().""" ldr = Loader({}) ctx: ContextType = { "id": "@id", "location": {"@id": "@id", "@type": "@id"}, "bar": "http://example.com/bar", "ex": "http://example.com/", } ldr.add_context(ctx) ra: CommentedMap = cast( CommentedMap, ldr.resolve_all( cmap( { "id": "foo", "bar": {"id": "baz"}, } ), "http://example.com", )[0], ) ra["s:dateCreated"] = datetime.datetime(2020, 10, 8) g = makerdf(None, ra, ctx) g.serialize(destination=stdout(), format="n3") g2 = makerdf(None, CommentedSeq([ra]), ctx) g2.serialize(destination=stdout(), format="n3")
def to_yaml(self, data): self._should_be_list(data) # TODO : Different length string return CommentedSeq([ validator.to_yaml(item) for item, validator in zip(data, self._validators) ])
def ruamel_structure(data, validator=None): """ Take dicts and lists and return a ruamel.yaml style structure of CommentedMaps, CommentedSeqs and data. If a validator is presented and the type is unknown, it is checked against the validator to see if it will turn it back in to YAML. """ if isinstance(data, dict): if len(data) == 0: raise exceptions.CannotBuildDocumentsFromEmptyDictOrList( "Document must be built with non-empty dicts and lists") return CommentedMap([(ruamel_structure(key), ruamel_structure(value)) for key, value in data.items()]) elif isinstance(data, list): if len(data) == 0: raise exceptions.CannotBuildDocumentsFromEmptyDictOrList( "Document must be built with non-empty dicts and lists") return CommentedSeq([ruamel_structure(item) for item in data]) elif isinstance(data, bool): return u"yes" if data else u"no" elif isinstance(data, (int, float)): return str(data) else: if not is_string(data): raise exceptions.CannotBuildDocumentFromInvalidData( ("Document must be built from a combination of:\n" "string, int, float, bool or nonempty list/dict\n\n" "Instead, found variable with type '{}': '{}'").format( type(data).__name__, data)) return data
def build_next_node(yaml_path: YAMLPath, depth: int, value: Any = None) -> Any: """ Get the best default value for the next entry in a YAML Path. Parameters: 1. yaml_path (deque) The pre-parsed YAML Path to follow 2. depth (int) Index of the YAML Path segment to evaluate 3. value (Any) The expected value for the final YAML Path entry Returns: (Any) The most appropriate default value Raises: N/A """ default_value = Nodes.wrap_type(value) segments = yaml_path.escaped if not (segments and len(segments) > depth): return default_value typ = segments[depth][0] if typ == PathSegmentTypes.INDEX: default_value = CommentedSeq() elif typ == PathSegmentTypes.KEY: default_value = CommentedMap() return default_value
def saveenroll(): """Save and Enroll""" global dir try: yaml = YAML() with open(cmd + "/client-config.yaml") as fp: data = yaml.load(fp) data['url'] = "http://" + addr data['mspdir'] = dir + "/msp" data['csr']['cn'] = t5.get() f = CommentedSeq([ CommentedMap([('C', t6.get()), ('ST', t7.get()), ('L', t8.get()), ('O', t9.get()), ('OU', t10.get())]) ]) data['csr']['names'] = f fp = open(cmd + "/client-config.yaml", "w") yaml.dump(data, fp) res = check_output( "cd " + cmd + "; export FABRIC_CA_CLIENT_HOME=" + dir + "; fabric-ca-client enroll -c client-config.yaml -u http://" + t11.get() + ":" + t12.get() + "@" + addr, shell=True) except Exception as e: error(str(e)) print(res) tkMessageBox.showinfo(message="Successfully Enrolled")
def _migrate_decorators(self, element, old_key, new_key): self.log.debug("Converting display_element blink decorator to widget " "animation") decorator = element[old_key] element[new_key] = CommentedMap() element[new_key]['show_slide'] = CommentedSeq() on_dict = CommentedMap() on_dict['property'] = 'opacity' on_dict['value'] = 1 on_dict['duration'] = str(decorator.get('on_secs', .5)) + 's' element[new_key]['show_slide'].append(on_dict) off_dict = CommentedMap() off_dict['property'] = 'opacity' off_dict['value'] = 0 off_dict['duration'] = str(decorator.get('off_secs', .5)) + 's' off_dict['repeat'] = True element[new_key]['show_slide'].append(off_dict) del element[old_key] return element
def FSlist(l): # concert list into flow-style (default is block style) from ruamel.yaml.comments import CommentedSeq double_quoted_list = [DoubleQuotedScalarString(x) for x in l] cs = CommentedSeq(double_quoted_list) cs.fa.set_flow_style() return cs
def _type_dsl( self, t: Union[str, CommentedMap, CommentedSeq], lc: LineCol, filename: str, ) -> Union[str, CommentedMap, CommentedSeq]: if not isinstance(t, str): return t m = typeDSLregex.match(t) if not m: return t first = m.group(1) assert first second = third = None if bool(m.group(2)): second = CommentedMap((("type", "array"), ("items", first))) second.lc.add_kv_line_col("type", lc) second.lc.add_kv_line_col("items", lc) second.lc.filename = filename if bool(m.group(3)): third = CommentedSeq(["null", second or first]) third.lc.add_kv_line_col(0, lc) third.lc.add_kv_line_col(1, lc) third.lc.filename = filename return third or second or first
def seq(self, *args): # type: (Any) -> Any if self.typ == 'rt': from ruamel.yaml.comments import CommentedSeq return CommentedSeq(*args) else: return list(*args)
def _type_dsl( self, t, # type: Union[Text, Dict, List] lc, filename): # type: (...) -> Union[Text, Dict[Text, Text], List[Union[Text, Dict[Text, Text]]]] if not isinstance(t, (str, six.text_type)): return t m = Loader.typeDSLregex.match(t) if not m: return t first = m.group(1) second = third = None if bool(m.group(2)): second = CommentedMap((("type", "array"), ("items", first))) second.lc.add_kv_line_col("type", lc) second.lc.add_kv_line_col("items", lc) second.lc.filename = filename if bool(m.group(3)): third = CommentedSeq([u"null", second or first]) third.lc.add_kv_line_col(0, lc) third.lc.add_kv_line_col(1, lc) third.lc.filename = filename return third or second or first
def wrap_type(value: Any) -> Any: """ Wrap a value in one of the ruamel.yaml wrapper types. Parameters: 1. value (Any) The value to wrap. Returns: (Any) The wrapped value or the original value when a better wrapper could not be identified. Raises: N/A """ wrapped_value = value ast_value = Nodes.typed_value(value) typ = type(ast_value) if typ is list: wrapped_value = CommentedSeq(value) elif typ is dict: wrapped_value = CommentedMap(value) elif typ is str: wrapped_value = PlainScalarString(value) elif typ is int: wrapped_value = ScalarInt(value) elif typ is float: wrapped_value = Nodes.make_float_node(ast_value) elif typ is bool: wrapped_value = ScalarBoolean(bool(value)) return wrapped_value
def update_environment_yml(): """ Updates conda_dev_env.yml file for conda. """ import re from ruamel.yaml.comments import CommentedMap, CommentedSeq from ruamel.yaml import YAML environment_filename = 'conda_dev_env.yml' cmap = CommentedMap() cmap.yaml_set_start_comment( 'Usage: conda env create -n myenvname -f {} python=3.6'.format( environment_filename)) cmap['name'] = 'aiida_icl' cmap['channels'] = CommentedSeq(['conda-forge', 'cjs']) cmap['channels'].yaml_add_eol_comment('for sqlalchemy-diff and pgtest', 1) cmap['dependencies'] = dmap = CommentedSeq() # fix incompatibilities between conda and pypi replacements = {} setup_json = get_setup_json() for base, key in [(None, 'install_requires'), ('extras_require', 'testing'), ('extras_require', 'code_style')]: requirements = setup_json.get(base, setup_json)[key] count = 0 for req in sorted(requirements, key=lambda x: x.lower()): # skip packages required for specific python versions < 3 if re.findall("python_version\\s*\\<\\s*\\'?3", req): continue req = req.split(';')[0] for (regex, replacement) in iter(replacements.items()): req = re.sub(regex, replacement, req) count += 1 dmap.append(req.lower()) dmap.yaml_set_comment_before_after_key(len(dmap) - count, before=key) yaml = YAML(typ='rt') yaml.default_flow_style = False yaml.encoding = 'utf-8' yaml.allow_unicode = True file_path = os.path.join(ROOT_DIR, environment_filename) with open(file_path, 'w') as env_file: yaml.dump(cmap, env_file)
def get_step(tool: Workflow, step_id: str, loading_context: LoadingContext) -> CommentedMap: """Extract a single WorkflowStep for the given step_id.""" extracted = CommentedMap() step = find_step(tool.steps, step_id, loading_context)[0] if step is None: raise Exception(f"Step {step_id} was not found") new_id, step_name = cast(str, step["id"]).rsplit("#") extracted["steps"] = CommentedSeq([step]) extracted["inputs"] = CommentedSeq() extracted["outputs"] = CommentedSeq() for in_port in cast(List[CWLObjectType], step["in"]): name = "#" + cast(str, in_port["id"]).split("#")[-1].split("/")[-1] inp: CWLObjectType = {"id": name, "type": "Any"} if "default" in in_port: inp["default"] = in_port["default"] extracted["inputs"].append(CommentedMap(inp)) in_port["source"] = name if "linkMerge" in in_port: del in_port["linkMerge"] for outport in cast(List[Union[str, Mapping[str, Any]]], step["out"]): if isinstance(outport, Mapping): outport_id = cast(str, outport["id"]) else: outport_id = outport name = outport_id.split("#")[-1].split("/")[-1] extracted["outputs"].append({ "id": name, "type": "Any", "outputSource": f"{new_id}#{step_name}/{name}", }) for f in tool.tool: if f not in ("steps", "inputs", "outputs"): extracted[f] = tool.tool[f] extracted["id"] = new_id if "cwlVersion" not in extracted: extracted["cwlVersion"] = tool.metadata["cwlVersion"] return extracted
def __setitem__(self, key: str, value: Any): if key not in self.yaml_obj: self.add_subsection(key) if isinstance(value, (str, int)): self.yaml_obj[key] = CommentedSeq() self[key].add_item(value) elif isinstance(value, dict): Section(key, self.yaml_obj)
def add_item(self, item: Union[str, int]): """Add a new item to the current section :param item: Receive the value for the current item """ if not isinstance(self.yaml_obj, CommentedSeq): self._get_parent()[self.section_name] = CommentedSeq() RecipeItem(len(self.yaml_obj), self.yaml_obj, item)
def _resolve_idmap(self, document, # type: CommentedMap loader # type: Loader ): # type: (...) -> None # Convert fields with mapSubject into lists # use mapPredicate if the mapped value isn't a dict. for idmapField in loader.idmap: if (idmapField in document): idmapFieldValue = document[idmapField] if (isinstance(idmapFieldValue, dict) and "$import" not in idmapFieldValue and "$include" not in idmapFieldValue): ls = CommentedSeq() for k in sorted(idmapFieldValue.keys()): val = idmapFieldValue[k] v = None # type: Optional[CommentedMap] if not isinstance(val, CommentedMap): if idmapField in loader.mapPredicate: v = CommentedMap( ((loader.mapPredicate[idmapField], val),)) v.lc.add_kv_line_col( loader.mapPredicate[idmapField], document[idmapField].lc.data[k]) v.lc.filename = document.lc.filename else: raise validate.ValidationException( "mapSubject '%s' value '%s' is not a dict" "and does not have a mapPredicate", k, v) else: v = val v[loader.idmap[idmapField]] = k v.lc.add_kv_line_col(loader.idmap[idmapField], document[idmapField].lc.data[k]) v.lc.filename = document.lc.filename ls.lc.add_kv_line_col( len(ls), document[idmapField].lc.data[k]) ls.lc.filename = document.lc.filename ls.append(v) document[idmapField] = ls
def get_secret_tasks(self, tags=[]): module_name='k8s_v1_secret' tasks = CommentedSeq() for template in self.get_secret_templates(): task = CommentedMap() task['name'] = 'Create Secret' task[module_name] = CommentedMap() task[module_name]['state'] = 'present' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] task[module_name]['force'] = template.pop('force', False) task[module_name]['resource_definition'] = template if tags: task['tags'] = copy.copy(tags) tasks.append(task) return tasks
def get_pvc_templates(self): def _volume_to_pvc(claim_name, claim): template = CommentedMap() template['force'] = claim.get('force', False) template['apiVersion'] = 'v1' template = CommentedMap() template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = "PersistentVolumeClaim" template['metadata'] = CommentedMap([ ('name', claim_name), ('namespace', self._namespace_name) ]) template['spec'] = CommentedMap() template['spec']['resources'] = {'requests': {'storage': '1Gi'}} if claim.get('volume_name'): template['spec']['volumeName'] = claim['volume_name'] if claim.get('access_modes'): template['spec']['accessModes'] = claim['access_modes'] if claim.get('requested_storage'): template['spec']['resources']['requests']['storage'] = claim['requested_storage'] if claim.get('storage_class'): if not template['metadata'].get('annotations'): template['metadata']['annotations'] = {} template['metadata']['annotations']['storageClass'] = claim['storage_class'] #TODO verify this syntax if claim.get('selector'): if claim['selector'].get('match_labels'): if not template['spec'].get('selector'): template['spec']['selector'] = dict() template['spec']['selector']['matchLabels'] = claim['match_labels'] if claim['selector'].get('match_expressions'): if not template['spec'].get('selector'): template['spec']['selector'] = dict() template['spec']['selector']['matchExpressions'] = claim['match_expressions'] return template templates = CommentedSeq() if self._volumes: for volname, vol_config in self._volumes.items(): if self.CONFIG_KEY in vol_config: if vol_config[self.CONFIG_KEY].get('state', 'present') == 'present': volume = _volume_to_pvc(volname, vol_config[self.CONFIG_KEY]) templates.append(volume) return templates
def _resolve_type_dsl(self, document, # type: CommentedMap loader # type: Loader ): # type: (...) -> None for d in loader.type_dsl_fields: if d in document: datum2 = datum = document[d] if isinstance(datum, (str, six.text_type)): datum2 = self._type_dsl(datum, document.lc.data[ d], document.lc.filename) elif isinstance(datum, CommentedSeq): datum2 = CommentedSeq() for n, t in enumerate(datum): datum2.lc.add_kv_line_col( len(datum2), datum.lc.data[n]) datum2.append(self._type_dsl( t, datum.lc.data[n], document.lc.filename)) if isinstance(datum2, CommentedSeq): datum3 = CommentedSeq() seen = [] # type: List[Text] for i, item in enumerate(datum2): if isinstance(item, CommentedSeq): for j, v in enumerate(item): if v not in seen: datum3.lc.add_kv_line_col( len(datum3), item.lc.data[j]) datum3.append(v) seen.append(v) else: if item not in seen: datum3.lc.add_kv_line_col( len(datum3), datum2.lc.data[i]) datum3.append(item) seen.append(item) document[d] = datum3 else: document[d] = datum2
def get_deployment_templates(self, default_api=None, default_kind=None, default_strategy=None, engine_state=None): def _service_to_k8s_container(name, config, container_name=None): container = CommentedMap() if container_name: container['name'] = container_name else: container['name'] = container['name'] if config.get('container_name') else name container['securityContext'] = CommentedMap() container['state'] = 'present' volumes = [] for key, value in iteritems(config): if key in self.IGNORE_DIRECTIVES: pass elif key == 'cap_add': if not container['securityContext'].get('Capabilities'): container['securityContext']['Capabilities'] = dict(add=[], drop=[]) for cap in value: if self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]: container['securityContext']['Capabilities']['add'].append( self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]) elif key == 'cap_drop': if not container['securityContext'].get('Capabilities'): container['securityContext']['Capabilities'] = dict(add=[], drop=[]) for cap in value: if self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]: container['securityContext']['Capabilities']['drop'].append( self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]) elif key == 'command': if isinstance(value, string_types): container['args'] = shlex.split(value) else: container['args'] = copy.copy(value) elif key == 'container_name': pass elif key == 'entrypoint': if isinstance(value, string_types): container['command'] = shlex.split(value) else: container['command'] = copy.copy(value) elif key == 'environment': expanded_vars = self.expand_env_vars(value) if expanded_vars: if 'env' not in container: container['env'] = [] container['env'].extend(expanded_vars) elif key in ('ports', 'expose'): if not container.get('ports'): container['ports'] = [] self.add_container_ports(value, container['ports']) elif key == 'privileged': container['securityContext']['privileged'] = value elif key == 'read_only': container['securityContext']['readOnlyRootFileSystem'] = value elif key == 'stdin_open': container['stdin'] = value elif key == 'volumes': vols, vol_mounts = self.get_k8s_volumes(value) if vol_mounts: if 'volumeMounts' not in container: container['volumeMounts'] = [] container['volumeMounts'].extend(vol_mounts) if vols: volumes += vols elif key == 'secrets': for secret, secret_config in iteritems(value): if self.CONFIG_KEY in secret_config: vols, vol_mounts, env_variables = self.get_k8s_secrets(secret, secret_config[self.CONFIG_KEY]) if vol_mounts: if 'volumeMounts' not in container: container['volumeMounts'] = [] container['volumeMounts'].extend(vol_mounts) if vols: volumes += vols if env_variables: if 'env' not in container: container['env'] = [] container['env'].extend(env_variables) elif key == 'working_dir': container['workingDir'] = value else: container[key] = value return container, volumes def _update_volumes(existing_volumes, new_volumes): existing_names = {} for vol in existing_volumes: existing_names[vol['name']] = 1 for vol in new_volumes: if vol['name'] not in existing_names: existing_volumes.append(vol) templates = CommentedSeq() for name, service_config in iteritems(self._services): containers = [] volumes = [] pod = {} if service_config.get('containers'): for c in service_config['containers']: cname = "{}-{}".format(name, c['container_name']) k8s_container, k8s_volumes, = _service_to_k8s_container(name, c, container_name=cname) containers.append(k8s_container) _update_volumes(volumes, k8s_volumes) else: k8s_container, k8s_volumes = _service_to_k8s_container(name, service_config) containers.append(k8s_container) volumes += k8s_volumes if service_config.get(self.CONFIG_KEY): for key, value in iteritems(service_config[self.CONFIG_KEY]): if key == 'deployment': for deployment_key, deployment_value in iteritems(value): if deployment_key != 'force': self.copy_attribute(pod, deployment_key, deployment_value) labels = CommentedMap([ ('app', self._namespace_name), ('service', name) ]) state = service_config.get(self.CONFIG_KEY, {}).get('state', 'present') if state == 'present': template = CommentedMap() template['apiVersion'] = default_api template['kind'] = default_kind template['force'] = service_config.get(self.CONFIG_KEY, {}).get('deployment', {}).get('force', False) template['metadata'] = CommentedMap([ ('name', name), ('labels', copy.deepcopy(labels)), ('namespace', self._namespace_name) ]) template['spec'] = CommentedMap() template['spec']['template'] = CommentedMap() template['spec']['template']['metadata'] = CommentedMap([('labels', copy.deepcopy(labels))]) template['spec']['template']['spec'] = CommentedMap([ ('containers', containers) ]) # When the engine requests a 'stop', set replicas to 0, stopping all containers template['spec']['replicas'] = 1 if not engine_state == 'stop' else 0 template['spec']['strategy'] = CommentedMap([('type', default_strategy)]) if volumes: template['spec']['template']['spec']['volumes'] = volumes if pod: for key, value in iteritems(pod): if key == 'securityContext': template['spec']['template']['spec'][key] = value elif key != 'replicas' or (key == 'replicas' and engine_state != 'stop'): # Leave replicas at 0 when engine_state is 'stop' template['spec'][key] = value templates.append(template) return templates
def _init_job(self, joborder, runtime_context): # type: (Mapping[Text, Text], RuntimeContext) -> Builder job = cast(Dict[Text, Union[Dict[Text, Any], List[Any], Text, None]], copy.deepcopy(joborder)) make_fs_access = getdefault(runtime_context.make_fs_access, StdFsAccess) fs_access = make_fs_access(runtime_context.basedir) load_listing_req, _ = self.get_requirement( "LoadListingRequirement") if load_listing_req is not None: load_listing = load_listing_req.get("loadListing") else: load_listing = "no_listing" # Validate job order try: fill_in_defaults(self.tool[u"inputs"], job, fs_access) normalizeFilesDirs(job) schema = self.names.get_name("input_record_schema", "") if schema is None: raise WorkflowException("Missing input record schema: " "{}".format(self.names)) validate.validate_ex(schema, job, strict=False, logger=_logger_validation_warnings) if load_listing and load_listing != "no_listing": get_listing(fs_access, job, recursive=(load_listing == "deep_listing")) visit_class(job, ("File",), functools.partial(add_sizes, fs_access)) if load_listing == "deep_listing": for i, inparm in enumerate(self.tool["inputs"]): k = shortname(inparm["id"]) if k not in job: continue v = job[k] dircount = [0] def inc(d): # type: (List[int]) -> None d[0] += 1 visit_class(v, ("Directory",), lambda x: inc(dircount)) if dircount[0] == 0: continue filecount = [0] visit_class(v, ("File",), lambda x: inc(filecount)) if filecount[0] > FILE_COUNT_WARNING: # Long lines in this message are okay, will be reflowed based on terminal columns. _logger.warning(strip_dup_lineno(SourceLine(self.tool["inputs"], i, Text).makeError( """Recursive directory listing has resulted in a large number of File objects (%s) passed to the input parameter '%s'. This may negatively affect workflow performance and memory use. If this is a problem, use the hint 'cwltool:LoadListingRequirement' with "shallow_listing" or "no_listing" to change the directory listing behavior: $namespaces: cwltool: "http://commonwl.org/cwltool#" hints: cwltool:LoadListingRequirement: loadListing: shallow_listing """ % (filecount[0], k)))) except (validate.ValidationException, WorkflowException) as err: raise WorkflowException("Invalid job input record:\n" + Text(err)) files = [] # type: List[Dict[Text, Text]] bindings = CommentedSeq() tmpdir = u"" stagedir = u"" docker_req, _ = self.get_requirement("DockerRequirement") default_docker = None if docker_req is None and runtime_context.default_container: default_docker = runtime_context.default_container if (docker_req or default_docker) and runtime_context.use_container: if docker_req is not None: # Check if docker output directory is absolute if docker_req.get("dockerOutputDirectory") and \ docker_req.get("dockerOutputDirectory").startswith('/'): outdir = docker_req.get("dockerOutputDirectory") else: outdir = docker_req.get("dockerOutputDirectory") or \ runtime_context.docker_outdir or random_outdir() elif default_docker is not None: outdir = runtime_context.docker_outdir or random_outdir() tmpdir = runtime_context.docker_tmpdir or "/tmp" stagedir = runtime_context.docker_stagedir or "/var/lib/cwl" else: outdir = fs_access.realpath( runtime_context.outdir or tempfile.mkdtemp( prefix=getdefault(runtime_context.tmp_outdir_prefix, DEFAULT_TMP_PREFIX))) if self.tool[u"class"] != 'Workflow': tmpdir = fs_access.realpath(runtime_context.tmpdir or tempfile.mkdtemp()) stagedir = fs_access.realpath(runtime_context.stagedir or tempfile.mkdtemp()) builder = Builder(job, files, bindings, self.schemaDefs, self.names, self.requirements, self.hints, {}, runtime_context.mutation_manager, self.formatgraph, make_fs_access, fs_access, runtime_context.job_script_provider, runtime_context.eval_timeout, runtime_context.debug, runtime_context.js_console, runtime_context.force_docker_pull, load_listing, outdir, tmpdir, stagedir) bindings.extend(builder.bind_input( self.inputs_record_schema, job, discover_secondaryFiles=getdefault(runtime_context.toplevel, False))) if self.tool.get("baseCommand"): for index, command in enumerate(aslist(self.tool["baseCommand"])): bindings.append({ "position": [-1000000, index], "datum": command }) if self.tool.get("arguments"): for i, arg in enumerate(self.tool["arguments"]): lc = self.tool["arguments"].lc.data[i] filename = self.tool["arguments"].lc.filename bindings.lc.add_kv_line_col(len(bindings), lc) if isinstance(arg, MutableMapping): arg = copy.deepcopy(arg) if arg.get("position"): arg["position"] = [arg["position"], i] else: arg["position"] = [0, i] bindings.append(arg) elif ("$(" in arg) or ("${" in arg): cm = CommentedMap(( ("position", [0, i]), ("valueFrom", arg) )) cm.lc.add_kv_line_col("valueFrom", lc) cm.lc.filename = filename bindings.append(cm) else: cm = CommentedMap(( ("position", [0, i]), ("datum", arg) )) cm.lc.add_kv_line_col("datum", lc) cm.lc.filename = filename bindings.append(cm) # use python2 like sorting of heterogeneous lists # (containing str and int types), if PY3: key = functools.cmp_to_key(cmp_like_py2) else: # PY2 key = lambda d: d["position"] # This awkward construction replaces the contents of # "bindings" in place (because Builder expects it to be # mutated in place, sigh, I'm sorry) with its contents sorted, # supporting different versions of Python and ruamel.yaml with # different behaviors/bugs in CommentedSeq. bindings_copy = copy.deepcopy(bindings) del bindings[:] bindings.extend(sorted(bindings_copy, key=key)) if self.tool[u"class"] != 'Workflow': builder.resources = self.evalResources(builder, runtime_context) return builder
def _init_job(self, joborder, runtimeContext): # type: (Dict[Text, Text], RuntimeContext) -> Builder job = cast(Dict[Text, Union[Dict[Text, Any], List[Any], Text, None]], copy.deepcopy(joborder)) make_fs_access = getdefault(runtimeContext.make_fs_access, StdFsAccess) fs_access = make_fs_access(runtimeContext.basedir) # Validate job order try: fill_in_defaults(self.tool[u"inputs"], job, fs_access) normalizeFilesDirs(job) validate.validate_ex(self.names.get_name("input_record_schema", ""), job, strict=False, logger=_logger_validation_warnings) except (validate.ValidationException, WorkflowException) as e: raise WorkflowException("Invalid job input record:\n" + Text(e)) files = [] # type: List[Dict[Text, Text]] bindings = CommentedSeq() tmpdir = u"" stagedir = u"" loadListingReq, _ = self.get_requirement("http://commonwl.org/cwltool#LoadListingRequirement") if loadListingReq: loadListing = loadListingReq.get("loadListing") else: loadListing = "deep_listing" # will default to "no_listing" in CWL v1.1 dockerReq, _ = self.get_requirement("DockerRequirement") defaultDocker = None if dockerReq is None and runtimeContext.default_container: defaultDocker = runtimeContext.default_container if (dockerReq or defaultDocker) and runtimeContext.use_container: if dockerReq: # Check if docker output directory is absolute if dockerReq.get("dockerOutputDirectory") and \ dockerReq.get("dockerOutputDirectory").startswith('/'): outdir = dockerReq.get("dockerOutputDirectory") else: outdir = dockerReq.get("dockerOutputDirectory") or \ runtimeContext.docker_outdir or "/var/spool/cwl" elif defaultDocker: outdir = runtimeContext.docker_outdir or "/var/spool/cwl" tmpdir = runtimeContext.docker_tmpdir or "/tmp" stagedir = runtimeContext.docker_stagedir or "/var/lib/cwl" else: outdir = fs_access.realpath( runtimeContext.outdir or tempfile.mkdtemp( prefix=getdefault(runtimeContext.tmp_outdir_prefix, DEFAULT_TMP_PREFIX))) if self.tool[u"class"] != 'Workflow': tmpdir = fs_access.realpath(runtimeContext.tmpdir or tempfile.mkdtemp()) stagedir = fs_access.realpath(runtimeContext.stagedir or tempfile.mkdtemp()) builder = Builder(job, files, bindings, self.schemaDefs, self.names, self.requirements, self.hints, runtimeContext.eval_timeout, runtimeContext.debug, {}, runtimeContext.js_console, runtimeContext.mutation_manager, self.formatgraph, make_fs_access, fs_access, runtimeContext.force_docker_pull, loadListing, outdir, tmpdir, stagedir, runtimeContext.job_script_provider) bindings.extend(builder.bind_input( self.inputs_record_schema, job, discover_secondaryFiles=getdefault(runtimeContext.toplevel, False))) if self.tool.get("baseCommand"): for n, b in enumerate(aslist(self.tool["baseCommand"])): bindings.append({ "position": [-1000000, n], "datum": b }) if self.tool.get("arguments"): for i, a in enumerate(self.tool["arguments"]): lc = self.tool["arguments"].lc.data[i] fn = self.tool["arguments"].lc.filename bindings.lc.add_kv_line_col(len(bindings), lc) if isinstance(a, dict): a = copy.copy(a) if a.get("position"): a["position"] = [a["position"], i] else: a["position"] = [0, i] bindings.append(a) elif ("$(" in a) or ("${" in a): cm = CommentedMap(( ("position", [0, i]), ("valueFrom", a) )) cm.lc.add_kv_line_col("valueFrom", lc) cm.lc.filename = fn bindings.append(cm) else: cm = CommentedMap(( ("position", [0, i]), ("datum", a) )) cm.lc.add_kv_line_col("datum", lc) cm.lc.filename = fn bindings.append(cm) # use python2 like sorting of heterogeneous lists # (containing str and int types), # TODO: unify for both runtime if six.PY3: key = cmp_to_key(cmp_like_py2) else: # PY2 key = lambda dict: dict["position"] bindings.sort(key=key) if self.tool[u"class"] != 'Workflow': builder.resources = self.evalResources(builder, runtimeContext) return builder
def __getitem__(self, key): val = CommentedSeq.__getitem__(self, key) return self._evaluate_item(val)
def __init__(self, init=None): if init is not None: CommentedSeq.__init__(self, init) else: CommentedSeq.__init__(self)
def generate_orchestration_playbook(self, url=None, namespace=None, settings=None, repository_prefix=None, pull_from_url=None, tag=None, vault_files=None, **kwargs): """ Generate an Ansible playbook to orchestrate services. :param url: registry URL where images were pushed. :param namespace: registry namespace :param repository_prefix: prefix to use for the image name :param settings: settings dict from container.yml :param pull_from_url: if url to pull from is different than url :return: playbook dict """ def _update_service(service_name, service_config): if url and namespace: # Reference previously pushed image image_id = self.get_latest_image_id_for_service(service_name) if not image_id: raise exceptions.AnsibleContainerConductorException( u"Unable to get image ID for service {}. Did you forget to run " u"`ansible-container build`?".format(service_name) ) image_tag = tag or self.get_build_stamp_for_image(image_id) if repository_prefix: image_name = "{}-{}".format(repository_prefix, service_name) elif repository_prefix is None: image_name = "{}-{}".format(self.project_name, service_name) else: image_name = service_name repository = "{}/{}".format(namespace, image_name) image_name = "{}:{}".format(repository, image_tag) pull_url = pull_from_url if pull_from_url else url service_config['image'] = "{}/{}".format(pull_url.rstrip('/'), image_name) else: # We're using a local image, so check that the image was built image = self.get_latest_image_for_service(service_name) if image is None: raise exceptions.AnsibleContainerConductorException( u"No image found for service {}, make sure you've run `ansible-container " u"build`".format(service_name) ) service_config['image'] = image.tags[0] for service_name, service in iteritems(self.services): # set the image property of each container if service.get('containers'): for container in service['containers']: if container.get('roles'): container_service_name = "{}-{}".format(service_name, container['container_name']) _update_service(container_service_name, container) else: container['image'] = container['from'] elif service.get('roles'): _update_service(service_name, service) else: service['image'] = service['from'] play = CommentedMap() play['name'] = u'Manage the lifecycle of {} on {}'.format(self.project_name, self.display_name) play['hosts'] = 'localhost' play['gather_facts'] = 'no' play['connection'] = 'local' play['roles'] = CommentedSeq() play['vars_files'] = CommentedSeq() play['tasks'] = CommentedSeq() role = CommentedMap([ ('role', 'ansible.kubernetes-modules') ]) if vault_files: play['vars_files'].extend(vault_files) play['roles'].append(role) play.yaml_set_comment_before_after_key( 'roles', before='Include Ansible Kubernetes and OpenShift modules', indent=4) play.yaml_set_comment_before_after_key('tasks', before='Tasks for setting the application state. ' 'Valid tags include: start, stop, restart, destroy', indent=4) play['tasks'].append(self.deploy.get_namespace_task(state='present', tags=['start'])) play['tasks'].append(self.deploy.get_namespace_task(state='absent', tags=['destroy'])) play['tasks'].extend(self.deploy.get_secret_tasks(tags=['start'])) play['tasks'].extend(self.deploy.get_service_tasks(tags=['start'])) play['tasks'].extend(self.deploy.get_deployment_tasks(engine_state='stop', tags=['stop', 'restart'])) play['tasks'].extend(self.deploy.get_deployment_tasks(tags=['start', 'restart'])) play['tasks'].extend(self.deploy.get_pvc_tasks(tags=['start'])) playbook = CommentedSeq() playbook.append(play) logger.debug(u'Created playbook to run project', playbook=playbook) return playbook
def get_deployment_templates(self, default_api=None, default_kind=None, default_strategy=None, engine_state=None): def _service_to_container(name, service): container = CommentedMap() container['name'] = name container['securityContext'] = CommentedMap() container['state'] = 'present' volumes = [] pod = {} for key, value in service.items(): if key in self.IGNORE_DIRECTIVES: pass elif key == 'cap_add': if not container['securityContext'].get('Capabilities'): container['securityContext']['Capabilities'] = dict(add=[], drop=[]) for cap in value: if self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]: container['securityContext']['Capabilities']['add'].append( self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]) elif key == 'cap_drop': if not container['securityContext'].get('Capabilities'): container['securityContext']['Capabilities'] = dict(add=[], drop=[]) for cap in value: if self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]: container['securityContext']['Capabilities']['drop'].append( self.DOCKER_TO_KUBE_CAPABILITY_MAPPING[cap]) elif key == 'command': if isinstance(value, string_types): container['args'] = shlex.split(value) else: container['args'] = value elif key == 'container_name': container['name'] = value elif key == 'entrypoint': if isinstance(value, string_types): container['command'] = shlex.split(value) else: container['command'] = copy.copy(value) elif key == 'environment': expanded_vars = self.expand_env_vars(value) if expanded_vars: container['env'] = expanded_vars elif key in ('ports', 'expose'): if not container.get('ports'): container['ports'] = [] self.add_container_ports(value, container['ports']) elif key == 'privileged': container['securityContext']['privileged'] = value elif key == 'read_only': container['securityContext']['readOnlyRootFileSystem'] = value elif key == 'stdin_open': container['stdin'] = value elif key == 'volumes': vols, vol_mounts = self.get_k8s_volumes(value) if vol_mounts: container['volumeMounts'] = vol_mounts if vols: volumes += vols elif key == 'working_dir': container['workingDir'] = value else: container[key] = value # Translate options: if service.get(self.CONFIG_KEY): for key, value in service[self.CONFIG_KEY].items(): if key == 'deployment': for deployment_key, deployment_value in value.items(): if deployment_key != 'force': self.copy_attribute(pod, deployment_key, deployment_value) return container, volumes, pod templates = CommentedSeq() for name, service_config in self._services.items(): container, volumes, pod = _service_to_container(name, service_config) labels = CommentedMap([ ('app', self._namespace_name), ('service', name) ]) state = service_config.get(self.CONFIG_KEY, {}).get('state', 'present') if state == 'present': template = CommentedMap() template['apiVersion'] = default_api template['kind'] = default_kind template['force'] = service_config.get(self.CONFIG_KEY, {}).get('deployment', {}).get('force', False) template['metadata'] = CommentedMap([ ('name', name), ('labels', copy.deepcopy(labels)), ('namespace', self._namespace_name) ]) template['spec'] = CommentedMap() template['spec']['template'] = CommentedMap() template['spec']['template']['metadata'] = CommentedMap([('labels', copy.deepcopy(labels))]) template['spec']['template']['spec'] = CommentedMap([ ('containers', [container]) # TODO: allow multiple pods in a container ]) # When the engine requests a 'stop', set replicas to 0, stopping all containers template['spec']['replicas'] = 1 if not engine_state == 'stop' else 0 template['spec']['strategy'] = CommentedMap([('type', default_strategy)]) if volumes: template['spec']['template']['spec']['volumes'] = volumes if pod: for key, value in pod.items(): if key == 'securityContext': template['spec']['template']['spec'][key] = value elif key != 'replicas' or (key == 'replicas' and engine_state != 'stop'): # Leave replicas at 0 when engine_state is 'stop' template['spec'][key] = value templates.append(template) return templates