def mergeConfigurationFiles(base_config_file_path, update_from_config_file_path, merged_save_to_path): """Merges two iohub configuration files into one and saves it to a file using the path/file name in merged_save_to_path.""" base_config = yload(open(base_config_file_path, 'r'), Loader=yLoader) update_from_config = yload(open(update_from_config_file_path, 'r'), Loader=yLoader) def merge(update, base): if isinstance(update, dict) and isinstance(base, dict): for k, v in base.items(): if k not in update: update[k] = v else: if isinstance(update[k], list): if isinstance(v, list): v.extend(update[k]) update[k] = v else: update[k].insert(0, v) else: update[k] = merge(update[k], v) return update merged = merge(copy.deepcopy(update_from_config), base_config) ydump(merged, open(merged_save_to_path, 'w'), Dumper=yDumper) return merged
def gitConfigCache(self, name, url): """Precache file for 1 hour from git and use cached file.""" output = None if os.path.isfile('/tmp/dtnrm-no-config-fetch.yaml'): filename = '/tmp/dtnrm-link-%s.yaml' % name with open(filename, 'r') as fd: output = yload(fd.read()) else: datetimeNow = datetime.datetime.now() filename = '/tmp/%s-%s.yaml' % (datetimeNow.strftime('%Y-%m-%d-%H'), name) if os.path.isfile(filename): with open(filename, 'r') as fd: output = yload(fd.read()) else: datetimelasthour = datetimeNow - datetime.timedelta(hours=1) prevfilename = '/tmp/%s-%s.yaml' % (datetimelasthour.strftime('%Y-%m-%d-%H'), name) if os.path.isfile(prevfilename): self.logger.debug('Remove previous old cache file %s', prevfilename) try: os.remove(prevfilename) os.remove('/tmp/dtnrm-link-%s.yaml' % name) except OSError: pass self.logger.debug('Receiving new file from GIT for %s', name) outyaml = getWebContentFromURL(url).text with open(filename, 'w') as fd: fd.write(outyaml) try: os.symlink(filename, '/tmp/dtnrm-link-%s.yaml' % name) except OSError: pass output = yload(outyaml) return output
def test_down_the_rabbit_hole(self): """ simulate a changed object graph """ try: from yaml import dump as ydump, safe_load as yload somethingtotest = SomethingToTest() somethingtotest.var3 = Subvar("3") yaml_ = ydump(somethingtotest) # probably not a good idea with untrusted data data = yload(yaml_) somevar = "somevalue" self.assert_exp(data, self.extension) somethingtotest.added_this = dict(somevar=somevar) somethingtotest.var3.value = "3++" yaml_ = ydump(somethingtotest) # probably not a good idea with untrusted data data = yload(yaml_) self.assert_exp(data, self.extension) except (Exception,) as e: if cpdb(): pdb.set_trace() raise
def __init__(self, options): try: self.options = options pwd = os.getcwd() self.workdir = self.options.workdir or pwd self.config = None fnp_config = self.options.config if self.options.init: fnp_config = self._initialize(fnp_config) if not fnp_config: for dn in [self.workdir, pwd]: fnp_config = os.path.join(dn, self.FN_CONFIG) try: with open(fnp_config) as fi: self.config = yload(fi) break except (IOError, ) as e: pass else: msg = "missing configuration file. perhaps you wanted to use the --init option to create one?" print(msg) sys.exit(1) else: with open(fnp_config) as fi: self.config = yload(fi) self.scan = not self.options.noscan # self.vars = dict() self.vars["scandir"] = self.workdir sectionname = "filenames" section = self.config["vars"][sectionname] for k, v in section.items(): self.vars.update(**{"%s_%s" % (sectionname, k): v}) self.import_classifier = ClassifierImport(self) self.scanwriter = ScanWriter(self) self.matcher = Matcher() self.builder = Builder(self) except (ValueError, ) as e: raise except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def load_yaml(filename): ''' load yaml file''' if isinstance(filename, file): return yload(filename, Loader=Loader) with open(filename) as datafd: data = yload(datafd, Loader=Loader) return data
def config(conf): collectd.debug('Configuring Stuff') global REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, NAMESPACE, METRICS, INSTANCE_ID for node in conf.children: if node.key == 'region': REGION = node.values[0] if node.key == 'aws_access_key_id': AWS_ACCESS_KEY_ID = node.values[0] if node.key == 'aws_secret_access_key': AWS_SECRET_ACCESS_KEY = node.values[0] if node.key == 'namespace': NAMESPACE = node.values[0] if node.key == 'metrics_config': metrics_config = node.values[0] if not metrics_config: collectd.warning("Missing YAML plugins configuration please define metrics_config") collectd.debug('Loading YAML plugins configuration') try: stream = open(metrics_config) METRICS = yload(stream) except: collectd.warning(("Couldn't load YAML plugins configuration {0}").format(metrics_config)) # get instance ID INSTANCE_ID = boto.utils.get_instance_metadata()['instance-id']
def getConfig(self, configFile='../../../../auth.yaml'): if not os.path.isfile(configFile): raise Exception('Config file not found: %s' % configFile) with open(configFile, 'r') as fd: self.config = yload(fd.read()) self._validateConfig() self._setDefaults()
def parse(self): def _verify_columns(stuff): if 'columns' in stuff: if type(stuff['columns']) != type([]): raise TypeError('Wrong columns structure') for i in stuff['columns']: if type(i) != type({}): raise TypeError("Item is not a dict") if 'DPDDname' not in i: raise AttributeError("Entry missing DPDDname") if 'RPN' not in i and len(i['NativeInputs']) != 1: raise AttributeError("Missing RPN attribute") return def _verify_table_spec(stuff): if 'table_spec' in stuff: if type(stuff['table_spec']) != type([]): raise TypeError('Wrong table_spec structure') for i in stuff['table_spec']: if type(i) != type({}): raise TypeError("table spec item is not a dict") y = yload(self.inf, Loader=FullLoader) if type(y) != type({}): raise TypeError("Input is not a dict") _verify_columns(y) _verify_table_spec(y) return y
def processBookmarks(self): """ Run through all bookmarks found :return: """ try: with open(self.chromeBookmarks, "r") as f: bk = f.readlines() try: data = u" ".join( [xx.decode(u"ascii", u"ignore") for xx in bk]) ym = yload(data) if isinstance(ym, dict): ymd = ym[u"roots"][u"bookmark_bar"][u"children"] self.dumpCollection(ymd) except Exception, msg: logger.error(u"%s" % msg) sys.exit(-1) logger.info(u"Saving : %s" % self.fileBookmarks) saveList(self.bookmarks, self.fileBookmarks) fld = sorted(list(set([x.lower() for x in self.folders]))) logger.info(u"Saving : %s" % self.fileFolders) saveList(fld, self.fileFolders)
def processBookmarks(self): """ Run through all bookmarks found :return: """ try: with open(self.chromeBookmarks, "r") as f: bk = f.readlines() try: data = u" ".join([xx.decode(u"ascii", u"ignore") for xx in bk]) ym = yload(data) if isinstance(ym, dict): ymd = ym[u"roots"][u"bookmark_bar"][u"children"] self.dumpCollection(ymd) except Exception, msg: logger.error(u"%s" % msg) sys.exit(-1) logger.info(u"Saving : %s" % self.fileBookmarks) saveList(self.bookmarks, self.fileBookmarks) fld = sorted(list(set([x.lower() for x in self.folders]))) logger.info(u"Saving : %s" % self.fileFolders) saveList(fld, self.fileFolders)
def pretty_print_list(file_name=None, data_format="JSON"): # print YAML or JSON representations of list data assert (file_name is not None), "Provide a file name" assert ((data_format == "JSON" or data_format == "YAML")), ("Format must be 'JSON'" " or 'YAML'") try: formatted_list = [] with open(file_name, "r") as f: if data_format == "JSON": some_list = jload(f) formatted_list = jdumps(some_list) elif data_format == "YAML": some_list = yload(f) formatted_list = ydump(some_list, default_flow_style=False, explicit_start=True, width=1, indent=2) except IOError as e: print "Could not read file: %s" % e except Exception as e: print "Unexpected exception: %s" % e print "======================" print "list from file: %s in %s data_format:" % (file_name, data_format) print "======================" print formatted_list print "======================" print "list from file: %s in pretty_print native python" % file_name print "======================" pp(some_list, width=1)
def load(self, formatter=None): try: fd = open(self.filepath, 'r') except IOError: if self.strict is True: raise else: return _, file_extension = os.path.splitext(self.filepath) if file_extension.lower() in JSON_EXTENSIONS: import json self.data = dict((self.format(k, formatter), v) for k, v in json.load(fd).items()) elif file_extension.lower() in YAML_EXTENSIONS: from yaml import load as yload, dump as ydump try: from yaml import CLoader as Loader except ImportError: from yaml import Loader self.data = dict((self.format(k, formatter), v) for k, v in yload(fd, Loader=Loader).items()) elif file_extension.lower() in PYTHON_EXTENSIONS: mod = imp.load_source('mod', self.filepath) self.data = dict((self.format(k, formatter), v) for k, v in vars(mod).items() if k.isupper()) else: raise ValueError( "Unhandled file extension {0}".format(file_extension)) fd.close()
def load(self): _, file_extension = os.path.splitext(self.filepath) fd = open(self.filepath, 'r') if file_extension.lower() in JSON_EXTENSIONS: import json self.data = { self._format_key(k): v for k, v in json.load(fd).items() } elif file_extension.lower() in YAML_EXTENSIONS: from yaml import load as yload, dump as ydump try: from yaml import CLoader as Loader except ImportError: from yaml import Loader self.data = { self._format_key(k): v for k, v in yload(fd, Loader=Loader).items() } elif file_extension.lower() in PYTHON_EXTENSIONS: mod = imp.load_source('mod', self.filepath) self.data = {k: v for k, v in vars(mod).items() if k.isupper()} else: raise ValueError( "Unhandled file extension {0}".format(file_extension)) fd.close()
def get_newest_config(self): try: log.debug('send request for newest config: {}'.format( self.raw_json)) url_ = API_PATH + '/configs?order=time_modified.desc&limit=1' response = requests.get(url=url_) self.raw_json = response.json() log.debug('received config json: {}'.format(self.raw_json)) # Check if postgrest is up and if a valid config exists if not isinstance(self.raw_json, list) or not self.raw_json: return False if 'config_data' in self.raw_json[0]: self.raw_json_config = self.raw_json[0]['config_data'] if 'raw_config' in self.raw_json[0]: self.raw_config = self.raw_json[0]['raw_config'] if 'time_modified' in self.raw_json[0]: self.time_modified = self.raw_json[0]['time_modified'] if 'comment' in self.raw_json[0]: self.config_comment = self.raw_json[0]['comment'] self.config_yaml = yload(json.dumps(self.raw_json_config)) except BaseException: log.exception('exception') return False return True
def pretty_print_list(file_name=None, data_format="JSON"): # print YAML or JSON representations of list data assert(file_name is not None), "Provide a file name" assert((data_format == "JSON" or data_format == "YAML")), ("Format must be 'JSON'" " or 'YAML'") try: formatted_list = [] with open(file_name, "r") as f: if data_format == "JSON": some_list = jload(f) formatted_list = jdumps(some_list) elif data_format == "YAML": some_list = yload(f) formatted_list = ydump(some_list, default_flow_style=False, explicit_start=True, width=1, indent=2) except IOError as e: print "Could not read file: %s" % e except Exception as e: print "Unexpected exception: %s" % e print "======================" print "list from file: %s in %s data_format:" % (file_name, data_format) print "======================" print formatted_list print "======================" print "list from file: %s in pretty_print native python" % file_name print "======================" pp(some_list, width=1)
def config(conf): collectd.debug('Configuring Stuff') global REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, NAMESPACE, METRICS, INSTANCE_ID for node in conf.children: if node.key == 'region': REGION = node.values[0] if node.key == 'aws_access_key_id': AWS_ACCESS_KEY_ID = node.values[0] if node.key == 'aws_secret_access_key': AWS_SECRET_ACCESS_KEY = node.values[0] if node.key == 'namespace': NAMESPACE = node.values[0] if node.key == 'metrics_config': metrics_config = node.values[0] if not metrics_config: collectd.warning( "Missing YAML plugins configuration please define metrics_config") collectd.debug('Loading YAML plugins configuration') try: stream = open(metrics_config) METRICS = yload(stream) except: collectd.warning(("Couldn't load YAML plugins configuration {0}" ).format(metrics_config)) # get instance ID INSTANCE_ID = boto.utils.get_instance_metadata()['instance-id']
def test_0032_test_bucket(self): try: if not self.exp_tests_requirements: return if not self.has_run(): self.test_001_scan() with self.get_file(FN_SCAN) as fi: data = yload(fi) got_all = data["pips"]["buckets"]["tests"] t_msg = "missing %s from %s.%s" for exp in self.exp_tests_requirements: found = self.mgr.import_classifier.packagetracker.di_packagename[ exp] msg = t_msg % (exp, FN_SCAN, ":pips/tests/") self.assertTrue(exp in got_all, msg) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def load(self, formatter=None): try: fd = open(self.filepath, 'r') except IOError: if self.strict is True: raise else: return _, file_extension = os.path.splitext(self.filepath) if file_extension.lower() in JSON_EXTENSIONS: import json self.data = dict((self.format(k, formatter), v) for k, v in json.load(fd).items()) elif file_extension.lower() in YAML_EXTENSIONS: from yaml import load as yload, dump as ydump try: from yaml import CLoader as Loader except ImportError: from yaml import Loader self.data = dict((self.format(k, formatter), v) for k, v in yload(fd, Loader=Loader).items()) elif file_extension.lower() in PYTHON_EXTENSIONS: mod = imp.load_source('mod', self.filepath) self.data = dict((self.format(k, formatter), v) for k, v in vars(mod).items() if k.isupper()) else: raise ValueError("Unhandled file extension {0}".format(file_extension)) fd.close()
def __init__(self): self.fname = find_config_file() if path.isfile(self.fname): f = open(self.fname) self._config = yload(f, Loader=YLoader) f.close() else: self._config = {"awsom": {"accounts": {}}}
def __init__(self): self.fname = find_config_file() if path.isfile(self.fname): f = open(self.fname) self._config = yload(f, Loader=YLoader) f.close() else: self._config = {"awsom":{"accounts":{}}}
def get_all_config(): try: with open(config_file, 'r') as f: all_config_info = yload(f) except FileNotFoundError: raise Exception( "Need a valid yaml file as the configuration, %s didn't work" % config_file) return all_config_info
def parse_config(cfg): config = yload(open(abspath(cfg))) assert isinstance(config, dict), "must be dictionary type" for groupKey in ['daemon', 'batch', 'software', 'storage']: group = config.get(groupKey, {}) assert isinstance( group, dict), "{group} must be of type dictionary".format(group=groupKey) return config
def provisionallyLoadPipeline(self, staged): for ext in ['yml', 'yaml', 'json']: pipeDef = os.path.join(staged, 'pipeline_definition.' + ext) if os.path.isfile(pipeDef): break with open(pipeDef) as pD: pipeDef = pD.read() pipeDef = yload(pipeDef) return pipeDef
def _get_chunk_intervals(fpath, max_chunks): ''' Parameters: fpath (string) Path to properly-formatted yaml file containing intervals and (optional) max # SNe per chunk max_chunks (int) max # of returned intervals allowed Returns: intervals - list of (start, end) values for chunks ''' y = yload(open(fpath), Loader=FullLoader) if not isinstance(y, dict): raise ValueError('variability yaml file is not a dict!') if not 'intervals' in y: raise ValueError('variability yaml file missing intervals keyword') intervals_yaml = y['intervals'] if not isinstance(intervals_yaml, list): raise ValueError('variability yaml file has improper intervals list') if len(intervals_yaml) > max_chunks: raise ValueError('variability yaml file has too many intervals') intervals = [] lens = [] ix = 0 for iy in intervals_yaml: start = int(iy['start']) end = int(iy['end']) if (start < 0) or (start > end): raise ValueError('variability yaml file has bad interval') intervals.append((start, end)) lens.append(end + 1 - start) ix += 1 if not 'max_chunk_size' in y: return intervals mcs = y['max_chunk_size'] if mcs >= max(lens): return intervals # Split intervals as needed up to max_chunks ret_intervals = [] remaining = max_chunks - len(intervals) if remaining == 0: return intervals for i in range(len(intervals)): if remaining == 0 or lens[i] <= mcs: ret_intervals.append(intervals[i]) continue # split to_split = int(min(remaining + 1, np.ceil(lens[i] / mcs))) add_for_end = int(np.ceil(lens[i] / to_split)) - 1 start = int(intervals[i][0]) for k in range(to_split): end = min(start + add_for_end, intervals[i][1]) ret_intervals.append((start, end)) start = end + 1 remaining -= (to_split - 1) return ret_intervals
def getConfig(self, configFile='/etc/sense-o-auth.yaml'): if not os.path.isfile(configFile): configFile = os.getenv('HOME') + '/.sense-o-auth.yaml' if not os.path.isfile(configFile): raise Exception('Config file not found: %s' % configFile) with open(configFile, 'r') as fd: self.config = yload(fd.read(), Loader=FullLoader) self._validateConfig() self._setDefaults()
def parse(self): """ Store information from yaml file internally """ if self.parsed: return self.parsed with open(self.inf) as f: self.parsed = yload(f, Loader=FullLoader) self._verify()
def _getmaps(self, noask=None): if not self.maps: maplst = self.gameini if len(self.servers[self.srv]) > 1: maplst = self.servers[self.srv][1] if maplst.startswith('~'): maplst = expanduser(maplst) if not maplst.startswith('/'): print( 'error: cannot read maplist if no absolute path is provided' ) with open(maplst, 'r') as mfh: lines = mfh.readlines() try: with open(expanduser(self.maptbl), 'r') as mfh: self.mapnames = yload(mfh.read(), Loader=Loader) except FileNotFoundError: with open(expanduser(self.maptbl), 'w+') as mfh: mfh.write(ydump({}, Dumper=Dumper)) self.mapnames = {} for l in lines: if not l or not l.startswith('MapRotation'): continue ugcid = l.split('MapId="')[1].split('", ')[0] gmmod = l.split('GameMode="')[1].split('")')[0] name = self._getmapname(ugcid) self.maps[name] = [ugcid, gmmod] self.mapnames[ugcid] = name with open(expanduser(self.maptbl), 'w+') as mfh: mfh.write(ydump(self.mapnames, Dumper=Dumper)) if noask: return ask = [ iList( 'map', carousel=True, message='select map', choices=[m for m in self.maps.keys()] + ['<Return>'], ), ] mapp = list(prompt(ask).values())[0] if mapp == '<Return>': return mmod = self.maps[mapp][1] modes = [mmod] + [s for s in ['SND', 'TDM', 'DM', 'GUN'] if s != mmod] ask = [ iList( 'mod', carousel=True, message='select mode (irrelevant if set by map)', choices=[m for m in modes] + ['<Return>'], ), ] mode = list(prompt(ask).values())[0] if mode != '<Return>': return '%s %s' % (self.maps[mapp][0], mode)
def testBookmarks(self): bookmarks = os.getcwd() + os.sep + "test" + os.sep + "TestBookmarks" with open(self.chromeBookmarks, "r") as f: bk = f.readlines() data = " ".join([xx.decode("utf-8", errors="replace") for xx in bk]) ym = yload(data) assert (ym is not None)
def Main(): ymlfile = yload( open( "./ranking/list100/100.yml", "r", encoding="utf-8-sig", ), Loader=BaseLoader, ) for x in ymlfile: print(x[":name"][2:], 601 - int(x[":rank"])) Single(x[":name"][2:], 601 - int(x[":rank"]))
def main(argv=None): parser = ArgumentParser(description="Solve a vector packing problem.") parser.add_argument('-i', '--input', help='input file') parser.add_argument('-o', '--output', default='-', help='output file') parser.add_argument('-P', '--pack', default='pack_by_bins', help='packing algorithm') parser.add_argument('-I', '--itemsort', default='none', help='item sorting algorithm') parser.add_argument('-B', '--binsort', default='none', help='bin sorting algorithm') parser.add_argument('-S', '--select', default='none', help='pairwise selection algorithm') parser.add_argument('-s', '--split', default=1, type=int, help='split the problem') args = parser.parse_args() args.problem = {} if isfile(args.input): args.problem = yload(open(args.input, 'r'), Loader=Loader) else: raise SystemExit("error: can't find file %s" % args.input) solution = pack_vectors(**args.__dict__) # FIXME: hacky mclient = None mcoll = None if args.output.startswith("mongodb://"): try: dbinfo = uri_parser.parse_uri(args.output) host, port = dbinfo['nodelist'][0] db, collection = dbinfo['database'].split('/') username = dbinfo['username'] password = dbinfo['password'] connect_url = host + ':' + str(port) if username is not None and password is not None: connect_url = username + ':' + password + '@' + connect_url connect_url = 'mongodb://' + connect_url except (AttributeError, ValueError): raise SystemExit('Required mongodb output url format is ' '"mongodb://[user:pass@]host[:port]/database/collection"') mclient = MongoClient(connect_url) mcoll = mclient[db][collection] if mcoll.find_one(solution) is not None: raise SystemExit('Solution To This Problem Already Exists!') if mcoll is not None and mclient is not None: mcoll.insert(solution) mclient.close() else: print(ydump(solution, Dumper=Dumper))
def test_Bookmarks(self): bookmarks = os.getcwd() + os.sep + "test" + os.sep + "TestBookmarks" with open(bookmarks, "rb") as f: bk = f.readlines() data = " ".join( [xx.decode("utf-8", errors="replace") for xx in bk]) ym = yload(data) assert (ym is not None)
def get_all_config(): """ Get all configuration """ try: with open(config_file,'r') as f: #print("Config file %s" % (str(f))) #all_config_info=yload(f, Loader=yaml.FullLoader) all_config_info=yload(f) except IOError: # doesnt work on 2.7 FileNotFoundError: raise Exception("Need a valid yaml file as the configuration, %s didn't work" % config_file) return all_config_info
def test_001_init(self): try: print("self.testdir:%s" % (self.testdir)) fnp = os.path.join(self.mgr.workdir, self.mgr.FN_CONFIG) with open(fnp) as fi: config = yload(fi) ppp(config) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def parse(self): y = yload(self.inf) if type(y) != type([]): raise TypeError("Input is not a list") dpdd_dict = {} for i in y: if type(i) != type({}): raise TypeError("Item is not a dict") if 'DPDDname' not in i: raise AttributeError("Entry missing DPDDname") if 'RPN' not in i and len(i['NativeInputs']) != 1: raise AttributeError("Missing RPN attribute") return y
def yaml_read(path): """Read a dictionary from a file. Args: path (str): the input file name. Returns: dict: the data. """ data = None with open(path, "r") as f: data = yload(f, Loader=YLoader) return data
def Main(): print(WEEKS) ymlfile = yload( open( f"./ranking/list1/{WEEKS}_3.yml", "r", encoding="utf-8-sig", ), Loader=BaseLoader, ) for x in ymlfile: print(x[":name"][2:]) Single(x[":name"][2:])
def load(self, filePath=None): self._filePath = expanduser(filePath or self._filePath) try: with open(self._filePath, 'rt') as f: self.append(yload(f)) self.changed = False self.loaded = True ok = True except: warning("File %s can't be read" % self._filePath) self.loaded = False ok = False return ok
def main(argv=None): parser = ArgumentParser(description="batch queue workload manager client") parser.add_argument('-s', '--server', default='127.0.0.1', help='bqwmd server') parser.add_argument('-p', '--port', default='5000', help='bqwmd port') parser.add_argument('-s', '--submit', help='submit jobspec file') args = parser.parse_args() configs = yload(open(args.submit, 'r')) service_url = "http://{}:{}/v2.0/createReservation".format(args.server, args.port) response = requests.post(service_url, json=configs) print response.text
def parse_select_cmdline(sortcmd): args = sortcmd.split(":") arg = args.pop(0) desc = False if arg in [ "a", "d" ]: if arg is "d": desc = True arg = args.pop(0) sort_key = get_select_by_name(arg) if desc: sort_key = negate_func(sort_key) if args: kwargs = yload("\n".join(arg.replace('=', ': ') for arg in args)) sort_key = partial(sort_key, **kwargs) return sort_key
def load(self): _, file_extension = os.path.splitext(self.filepath) fd = open(self.filepath, 'r') if file_extension.lower() in JSON_EXTENSIONS: import json self.data = {self._format_key(k):v for k,v in json.load(fd).items()} elif file_extension.lower() in YAML_EXTENSIONS: from yaml import load as yload, dump as ydump try: from yaml import CLoader as Loader except ImportError: from yaml import Loader self.data = {self._format_key(k):v for k,v in yload(fd, Loader=Loader).items()} elif file_extension.lower() in PYTHON_EXTENSIONS: mod = imp.load_source('mod', self.filepath) self.data = {k:v for k,v in vars(mod).items() if k.isupper()} else: raise ValueError("Unhandled file extension {0}".format(file_extension)) fd.close()
def parse_sort_cmdline(sortcmd): args = sortcmd.split(":") arg = args.pop(0) desc = False if arg in [ "a", "d" ]: if arg is "d": desc = True arg = args.pop(0) sort_key = get_sort_key_by_name(arg) kwargs = {} if args: kwargs.update(yload("\n".join(arg.replace('=', ': ') for arg in args))) if desc: return partial(negate_func, sort_key, **kwargs) if kwargs: return partial(sort_key, **kwargs) return sort_key
def main(argv=None): from argparse import ArgumentParser from os.path import isfile from sys import stdin, stdout from yaml import load as yload try: from yaml import CLoader as YLoader except ImportError: from yaml import Loader as YLoader engines = {"mako": render_mako, "jinja2": render_jinja2} parser = ArgumentParser(description="Render a file using templates.") parser.add_argument("-i", "--inputfile", help="input file") parser.add_argument("-e", "--engine", help="templating engine") parser.add_argument("-d", "--template_dirs", help=": delimited template search path") parser.add_argument("-t", "--template", help="template to apply to input file") parser.add_argument("-b", "--block", help="template block to override") parser.add_argument("-m", "--metafile", action="append", help="metadata file in yaml format") parser.add_argument("-v", "--var", action="append", default=[], help="name=value pairs to be added to metadata") parser.add_argument("-o", "--outputfile", default="-", help="output file") parser.add_argument("-ienc", "--input_encoding", help="input encoding") parser.add_argument("-oenc", "--output_encoding", help="output encoding") args = parser.parse_args() meta = dict() # defaults... meta["inputfile"] = None meta["engine"] = "mako" meta["template"] = None meta["block"] = None meta["template_dirs"] = ["."] meta["input_encoding"] = "utf-8" meta["output_encoding"] = "utf-8" meta["output_format"] = "html5" meta["outputfile"] = "-" metafiles = [] if isfile(config_file_name): metafiles.append(config_file_name) if args.metafile: metafiles += args.metafile if metafiles: for metafile in args.metafile: if isfile(metafile): meta.update(yload(open(metafile, "r"), Loader=YLoader)) else: raise SystemExit("error: can't find metafile %s" % metafile) if args.var: for pair in args.var: name, value = pair.split("=") meta[name] = value if args.inputfile: meta["inputfile"] = args.inputfile if args.engine: meta["engine"] = args.engine if args.template: meta["template"] = args.template if args.block: meta["block"] = args.block if args.template_dirs: meta["template_dirs"] = args.template_dirs.split(":") if args.input_encoding: meta["input_encoding"] = args.input_encoding if args.output_encoding: meta["output_encoding"] = args.output_encoding if args.outputfile: meta["outputfile"] = args.outputfile fp = None if meta["inputfile"]: if meta["inputfile"] == "-": fp = stdin elif isfile(meta["inputfile"]): fp = open(meta["inputfile"], "r") else: raise SystemExit("error: can't find %s" % args.inputfile) data = "" if fp: data = fp.read() if meta["outputfile"] == "-": out = stdout else: out = open(args.outputfile, "w") out.write(engines[meta["engine"]](data, meta))
def __init__(self, config): ''' config: can be a .yml or .dfa file ''' self.config = yload(open(config), Loader=Loader) # - Setup the Automata self._load_tree()
from wtforms.fields import PasswordField from flask.ext.wtf import Form, validators, TextField from flask import request, redirect, url_for, session, Markup from flask.ext import wtf from flask.ext.admin import Admin, AdminIndexView, BaseView, expose from flask.ext.admin.contrib.sqlamodel import ModelView from flask.ext.login import current_user from labmanager.babel import gettext, lazy_gettext from labmanager.models import LtUser, Course, Laboratory, PermissionToLt, PermissionToCourse, RequestPermissionLT from labmanager.views import RedirectView from labmanager.db import db from labmanager.rlms import get_manager_class import labmanager.forms as forms from labmanager.utils import data_filename config = yload(open(data_filename('labmanager/config/config.yml'))) ################################################################# # # Base class # class PleAuthManagerMixin(object): def is_accessible(self): if not current_user.is_authenticated(): return False return session['usertype'] == 'lms' and current_user.access_level == 'admin' class L4lPleModelView(PleAuthManagerMixin, ModelView): def _handle_view(self, name, **kwargs): if not self.is_accessible():
for f in tqdm(files_to_process): if not isfile(reco_file(f)): _files_to_process.append(f) files_to_process = _files_to_process print 'after check: found %i files to process this cycle.'%len(files_to_process) nfiles = len(files_to_process) chunks = [files_to_process[x:x+g_maxfiles] for x in xrange(0, len(files_to_process), g_maxfiles)] print 'created %i chunks this cycle'%len(chunks) for j,ch in enumerate(chunks): print '** working on chunk %i, size: %i **'%(j+1,len(ch)) ofile = opjoin(wd,"chunk_%i.yaml"%(j+1)) inf_c = ch out_c = [reco_file(f) for f in inf_c] ydump(dict(zip(inf_c,out_c)),open(ofile,'wb')) assert isfile(ofile), "yaml file missing!" print 'size of chunk: ',len(out_c) max_jobs = int(cfg.get("max_jobs",10)) nch = len(chunks) sarr = "1-{nchunks}%{jobs}".format(nchunks=nch,jobs=max_jobs) if \ nch > max_jobs else "1-{nchunks}".format(nchunks=nch) environ["SARR"]=sarr #print '*** ENV DUMP ***' #system("env | sort") new_wrapper = opjoin(wd,"submit_slurm.sh") make_wrapper(wrapper,new_wrapper) system("sbatch {wrapper}".format(wrapper=new_wrapper)) #### DONE if __name__ == '__main__': cfg = yload(open(argv[1],"rb")) main(cfg)
def load_yaml_config(filename): """ Open Yaml file, load content for flask config and returns it as a python dict """ content = io.open(filename, 'r').read() return yload(content).get('flask', {})
def readConfig(scr_path): ''' Returns the config dict loaded from scr_path, which must be the path to a YAML file. ''' return yload(open(scr_path, 'r'), Loader=yLoader)
# coding: utf-8 """Pyris configuration Retrieve user and password from the YAML configuration file for the database access """ import os import io from yaml import load as yload _cfgfile = os.environ.get('PYRIS_APP_SETTINGS') if _cfgfile is not None: with io.open(_cfgfile, 'r') as fobj: DATABASE = yload(fobj.read()).get('database', {}) else: DATABASE = {"USER": os.environ["USER"], "HOST": "localhost"}