def main(): #setup debug logging global logger logger = logging.getLogger('root') #FORMAT = "[%(filename)s:%(lineno)s-%(funcName)s()]%(message)s" FORMAT = "[%(lineno)s-%(funcName)s] %(message)s" logging.basicConfig(format=FORMAT) logger.setLevel(logging.DEBUG) #input params zipfn='' yamlfile='' try: opts, args = getopt.getopt(sys.argv[1:], "hd", ["help", "description"]) except getopt.GetoptError as err: print str(err) usage() sys.exit(2) for opt,arg in opts: if opt in ("-h", "--help"): usage() sys.exit() elif opt in ("-d", "--description"): description() sys.exit() else: assert False, "unhandled option" if not (len(args) == 1): usage() sys.exit(2) # Unpack the zip file into a temp directory zipfn=sys.argv[1] tmpdir=unpack_zip(zipfn) # Read the TOSCA.meta file yaml=parse_metafile(tmpdir) # logger.debug("Yaml content:") # pprint.pprint(yaml) for t,val in yaml.items(): logger.debug("Found yaml root item:"+t) bundledir=tempfile.mkdtemp(prefix="BUNDLE_", dir="./") cbundle=create_charms(yaml,tmpdir,bundledir) rbundle=create_relations(yaml,tmpdir,bundledir) bundlefile=create_bundle(str(cbundle)+"\n"+str(rbundle),bundledir) print "Import complete, bundle file is: "+bundlefile #cleanup tmpdir shutil.rmtree(tmpdir) #Should we clean up bundledir? On error only? #For now, clean it up always. shutil.rmtree(bundledir)
def configure_tests(yaml): tests = [] for section, cfg in yaml.items(): if section in RESERVED: continue mod = importlib.import_module(f'tests.{section}') class_name = ''.join(s.title() for s in section.split('_')) cls = getattr(mod, class_name) tests.append(cls(cfg)) return tests
def load_qc_input_yaml_flat(path): data = {} yaml = load_yaml(path) for cohort, cohort_data in yaml.items(): for sample, sample_data in cohort_data.items(): data[(cohort, sample)] = { data_label: data for data_label, data in sample_data.items() } return data
def get_scales_from_fitopt_file(self): if self.sys_file_in is None: return {} self.logger.debug(f"Loading sys scaling from {self.sys_file_in}") yaml = read_yaml(self.sys_file_in) if 'FLAG_USE_SAME_EVENTS' in yaml.keys(): yaml.pop('FLAG_USE_SAME_EVENTS') raw = { k: float(v.split(maxsplit=1)[0]) for _, d in yaml.items() for k, v in d.items() } return raw
def _pairs(yaml): """Returns pairs either in order listed in yaml if it was defined like ordered mapping, or just in arbitrary order of dict iteration """ if isinstance(yaml, (list, tuple)): for item in yaml: for k, v in item.items(): yield k, v else: for k, v in yaml.items(): yield k, v
def load_qc_input(path): data = {} yaml = load_yaml(path) assert len(yaml.keys()) == 1 patient = list(yaml.keys())[0] for patient, patient_data in yaml.items(): for sample, sample_data in patient_data.items(): for library, library_data in sample_data.items(): data[(sample, library)] = { data_label: data for data_label, data in library_data.items() } return data, patient
def write_bin_from_yaml(yaml): bytes_written = 0 if type(yaml) is str: encoded = yaml.encode('utf-8') sys.stdout.buffer.write(encoded) sys.stdout.buffer.write(b'\0') bytes_written = len(encoded) + 1 elif type(yaml) is int: if yaml > 255 or yaml < 0: print("integer value ({}) is outside of byte range".format(yaml), file=sys.stderr) sys.exit(1) else: sys.stdout.buffer.write(struct.pack('B', yaml)) bytes_written = 1 elif type(yaml) is list: for e in yaml: bytes_written += write_bin_from_yaml(e) elif type(yaml) is dict: size = None content = None fill = 0xFF for (k, v) in yaml.items(): if k == 'size': if type(v) is not int: print("Block size parameter is not an integer", file=sys.stderr) sys.exit(1) size = v elif k == 'content': content = v elif k == 'fill': if type(v) is not int: print("Block fill parameter is not an integer", file=sys.stderr) sys.exit(1) if v < 0 or v > 255: print("Fill value is not a byte", file=sys.stderr) sys.exit(1) fill = v else: print("Dict has unexpected key {}".format(k), file=sys.stderr) exit(1) bytes_written = write_block(size, content, fill) else: print("Element is not an expected type: {}".format(type(yaml)), file=sys.stderr) sys.exit(1) return bytes_written
def createActionCategories(self, yaml, root_yaml): work = [] for cat_name, cat in yaml.items(): if cat_name == '_placeholder': continue cat_id = self.ovtDB.simple.getActionCategoryByName(cat_name) if cat_id != None: self.verboseAnalyse("%s (%s)" % (cat_name, cat_id)) work.extend(self.createActions(cat_id, cat, root_yaml)) else: raise CannotCreateException("Action Category - '%s'" % cat_name) return work
def emulate_conf(self, yaml, merge="replace_all", dpid=""): dpids = self.parse_dpids(dpid) if merge == "replace_all": nconf = defaultdict(lambda: defaultdict(set)) for dpid, data in self.mappings.iteritems(): if dpid not in dpids: nconf[dpid] = copy.deepcopy(self.mappings[dpid]) elif merge == "combine": nconf = copy.deepcopy(self.mappings) elif merge == "replace_ports": nconf = copy.deepcopy(self.mappings) for dpid, dconf in yaml.items(): if dpid in dpids: for l, r in dconf.iteritems(): for i in nconf[dpid][l]: nconf[dpid][i].discard(l) del nconf[dpid][l] for dpid, dconf in yaml.items(): if dpid in dpids: for l, r in dconf.iteritems(): nconf[dpid][l].update(self.expand_ranges(r)) return nconf
def createLinkToAttributes(self, ver_id, type, yaml): work = [] for attr_name, attr in yaml.items(): if attr_name == '_placeholder': continue attr_name = str(attr_name) attr_id = self.ovtDB.simple.getAttributeByName(type, attr_name) if attr_id != None: self.verboseAnalyse(" %s (%s)" % (attr_name, attr_id)) work.extend( self.createLinkToAttributeValue(ver_id, attr_id, attr)) else: raise CannotCreateException("attribute - '%s'" % attr_name) return work
def roll_up_keys(yaml): if isinstance(yaml, list): for v in yaml: roll_up_keys(v) elif isinstance(yaml, dict): f = {} for k, v in list(yaml.items()): items = k.split(':') if (len(items) > 1): newv = {':'.join(items[1:]): v} f[items[0]] = newv yaml[items[0]] = newv del yaml[k] roll_up_keys(yaml[items[0]]) return yaml
def createLinkToResourceTypes(self, ver_id, yaml): work = [] for restype_name, restype in yaml.items(): if restype_name == '_placeholder': continue restype_name = str(restype_name) restype_id = self.ovtDB.simple.getResourceTypeByName(restype_name) if restype_id != None: self.verboseAnalyse(" %s (%s)" % (restype_name, restype_id)) work.extend( self.createLinkToAttributes(ver_id, restype_id, restype)) else: raise CannotCreateException("resource type - '%s'" % restype_name) return work
def createActions(self, in_cat_id, yaml, root_yaml): work = [] for act_name, act in yaml.items(): if act_name == '_placeholder': continue act_name = str(act_name) act_id = self.ovtDB.simple.getActionByName(in_cat_id, act_name) if act_id != None: self.verboseAnalyse(" %s (%s)" % (act_name, act_id)) work.append(ActionLocker(act_id)) work.extend(self.createVersionedActions( act_id, act, root_yaml)) else: raise CannotCreateException("Action - '%s'" % act_name) return work
def createProducerDependencies(self, consumer_id, yaml, root_yaml): newdeps = [] for depgrp_name, depdepgrp in yaml.items(): if depgrp_name == '_placeholder': continue depgrp_name = str(depgrp_name) depgrp_id = self.ovtDB.simple.getDependencyGroupByName(depgrp_name) for depactgrp_name, depactgrp in depdepgrp.items(): if depactgrp_name == '_placeholder': continue depactgrp_name = str(depactgrp_name) depactgrp_id = self.ovtDB.simple.getActionCategoryByName( depactgrp_name) for depact_name, depact in depactgrp.items(): if depact_name == '_placeholder': continue depact_name = str(depact_name) depact_id = self.ovtDB.simple.getActionByName( depactgrp_id, depact_name) for depver_name, depver in depact.items(): if depver_name == '_placeholder': continue depver_name = str(depver_name) self.verboseAnalyse(" %s %s" % (depact_name, depver_name)) depver_id = self.ovtDB.simple.getVersionedActionByName( depact_id, depver_name) if depver_id == None: # The intention here is to use the placeholder associated with the full # description of the new version. There is another placeholder in the # producer area of the description but that is irrelevant as it represents # the dependency rather than the versioned action itself if not depactgrp_name in root_yaml \ or not depact_name in root_yaml[depactgrp_name] \ or not depver_name in root_yaml[depactgrp_name][depact_name]: raise MissingVersionException( "No details found for new version %s:%s:%s" % (depactgrp_name, depact_name, depver_name)) depver_id = root_yaml[depactgrp_name][depact_name][ depver_name]['_placeholder'] newdeps.extend( self.createProducerDependency( consumer_id, depver_id, depgrp_id, depver)) return newdeps
def parse_type(self, yaml, json): if 'type' in yaml: json.append({ 'type': self.get_actual_type_name(yaml['type']), 'name': 'default', 'array': self.check_type_is_array(yaml['type']) }) return tmp_yraml = yaml.items() for key, value in tmp_yraml: if 'type' not in value: continue json.append({ 'type': self.get_actual_type_name(value['type']), 'name': key, 'array': self.check_type_is_array(value['type']) })
def _yaml_check_map_with_meta(yaml): if not isinstance(yaml, dict): raise SpecParseError('{} must be a map. Use "key: value" syntax.'.format(yaml)) found_non_meta = False for key, value in yaml.items(): if not isinstance(key, str): raise SpecParseError('Key "{}" in {} must be a string.'.format(key, yaml)) if not key.startswith('_'): if found_non_meta: raise SpecParseError( 'More than one non-metadata field (no leading underscore) in {}.'.format(yaml)) found_non_meta = True else: if key == '_description': if not isinstance(value, str): raise SpecParseError('Value of "_description" ({}) in {} must be a string.'.format( value, yaml)) else: raise SpecParseError('Unrecognized metadata field "{}" in {}.'.format(key, yaml))
def createLinkToConfigOptions(self, from_ver_id, yaml): newlinks = [] for cfggrp_name, cfggrp in yaml.items(): if cfggrp_name == '_placeholder': continue cfggrp_name = str(cfggrp_name) cfggrp_id = self.ovtDB.simple.getConfigOptionGroupByName( cfggrp_name) assert cfggrp_id != None self.verboseAnalyse(" %s (exists)" % cfggrp_name) for cfgopt_name, cfglookup in cfggrp.items(): if cfgopt_name == '_placeholder': continue cfgopt_name = str(cfgopt_name) cfgopt_id = self.ovtDB.simple.getConfigOptionByName( cfggrp_id, cfgopt_name) if cfgopt_id != None: newlinks.extend( self.createLinkToConfigOption(from_ver_id, cfgopt_name, cfgopt_id)) if type(cfglookup) == types.ListType: for cfgoptlookup_name in cfglookup: cfgoptlookup_name = str(cfgoptlookup_name) cfgoptlookup_id = self.ovtDB.simple.getConfigOptionLookupByName( cfgopt_id, cfgoptlookup_name) if cfgoptlookup_id != None: newlinks.extend( self.createLinkToConfigOptionLookup( from_ver_id, cfgoptlookup_name, cfgoptlookup_id)) else: raise CannotCreateException( "Config Option Lookup - '%s':'%s'" % (cfgopt_name, cfgoptlookup_name)) else: raise CannotCreateException("Config Option - '%s'" % cfgopt_name) return newlinks
def __process_extends(self, yaml): def merge(a, b): ''' Helper function to a and b together: - Where a and b are lists, the result is a + b - Where a and b are dicts, the result is the union of a and b - Otherwise, the result is b This performs a deep copy of all lists/dicts so the result does what you'd expect. ''' if type(a) != type(b): raise ValueError() if isinstance(a, list): return a + b elif isinstance(a, dict): merged = {} for k, v in a.items(): merged[k] = v for k, v in b.items(): merged[k] = v return merged else: return b # yaml is modified in the loop, so we have to list() it for section, data in list(yaml.items()): if not isinstance(data, dict): continue referenced = data.get('extends') if not referenced: continue if referenced not in yaml or referenced == section: raise YamlError(f'Invalid section for "extends: {referenced}"') # We need deep copies to avoid references to lists within dicts, # etc. combined = deepcopy(yaml[referenced]) data = deepcopy(data) for item, value in data.items(): if item == 'extends': continue try: base = combined[item] except KeyError: # base doesn't have this key, so we can just # write it out combined[item] = value else: try: combined[item] = merge(base, value) except ValueError: raise YamlError( f'Mismatched types for {item} in {section} vs {referenced}' ) yaml[section] = combined return yaml
def _yaml_get_from_map_with_meta(yaml): for key, value in yaml.items(): if not key.startswith('_'): return key, value raise KeyError('Non-metadata field not found in {}.'.format(yaml))