def _get_config(self): config = None possibles = [ fs.abspath(os.path.join(self.basedir, 'boss.json')), fs.abspath(os.path.join(self.basedir, 'boss.yml')), ] for path in possibles: if os.path.exists(path): if os.path.basename(path) == 'boss.json': config = self._get_json_config(path) break elif os.path.basename(path) == 'boss.yml': config = self._get_yaml_config(path) break if not config: raise boss_exc.BossTemplateError("No supported config found.") # fix it up with some defaults if 'delimiters' not in config.keys(): config['delimiters'] = ('@', '@') config['start_delimiter'] = config['delimiters'][0] config['end_delimiter'] = config['delimiters'][1] else: config['start_delimiter'] = config['delimiters'][0] config['end_delimiter'] = config['delimiters'][1] return config
def _get_config(self): config = None possibles = [ fs.abspath(os.path.join(self.basedir, "boss.json")), fs.abspath(os.path.join(self.basedir, "boss.yml")), ] for path in possibles: if os.path.exists(path): if os.path.basename(path) == "boss.json": config = self._get_json_config(path) break elif os.path.basename(path) == "boss.yml": config = self._get_yaml_config(path) break if not config: raise boss_exc.BossTemplateError("No supported config found.") # fix it up with some defaults if "delimiters" not in config.keys(): config["delimiters"] = ("@", "@") config["start_delimiter"] = config["delimiters"][0] config["end_delimiter"] = config["delimiters"][1] else: config["start_delimiter"] = config["delimiters"][0] config["end_delimiter"] = config["delimiters"][1] return config
def _read_partial(self, part_file): # try to read partial from template's "partials" directory part_path = fs.abspath(os.path.join(self.basedir, 'partials/', part_file)) try: f = open(part_path, 'r') print("Reading partial from: %s" % part_path) except OSError as e: # partial wasn't under template dir, so try "partials" source try: src = self.app.db['sources']['partials'] if src['is_local']: basedir = src['path'] else: basedir = src['cache'] part_path = fs.abspath(os.path.join(basedir, part_file)) try: f = open(part_path, 'r') print("Reading partial from: %s" % part_path) except OSError as e: print("Unable to read partial %s from boss source 'paritals'" % part_file) return False except KeyError as e: return False part_data = f.read() f.close() return part_data
def copy(self, dest_basedir): self._populate_vars() dest_basedir = fs.abspath(dest_basedir) # first handle local files for tmpl_path in self._walk_path(self.basedir): dest_path = fs.abspath(re.sub(self.basedir, dest_basedir, tmpl_path)) self._copy_path(tmpl_path, dest_path) # second handle external files if 'external_files' in self.config.keys(): for _file, remote_uri in self.config['external_files'].items(): dest_path = self._sub(os.path.join(dest_basedir, _file)) remote_uri = self._sub(remote_uri) try: data = self._sub(urlopen(remote_uri).read().decode('utf8')) except HTTPError as e: data = '' self._write_file(dest_path, data) # lastly do injections if 'injections' in self.config.keys() \ and len(self.config['injections']) > 0: for dest_path in self._walk_path(dest_basedir): self._inject_or_pass(dest_path)
def copy(self, dest_basedir): self._populate_vars() dest_basedir = fs.abspath(dest_basedir) # first handle local files for tmpl_path in self._walk_path(self.basedir): dest_path = fs.abspath(re.sub(self.basedir, dest_basedir, tmpl_path)) self._copy_path(tmpl_path, dest_path) # second handle external files if 'external_files' in self.config.keys(): for _file, remote_uri in self.config['external_files'].items(): dest_path = self._sub(os.path.join(dest_basedir, _file)) remote_uri = self._sub(remote_uri) try: data = self._sub(urlopen(remote_uri).read().decode('utf8')) except HTTPError as e: data = '' self._write_file(dest_path, data) # do injections if 'injections' in self.config.keys() \ and len(self.config['injections']) > 0: for dest_path in self._walk_path(dest_basedir): self._inject_or_pass(dest_path) # do partials if 'partials' in self.config.keys() \ and len(self.config['partials']) > 0: for dest_path in self._walk_path(dest_basedir): self._embed_or_pass(dest_path)
def _copy_path(self, tmpl_path, dest_path): f = open(fs.abspath(tmpl_path), 'r') data = f.read() f.close() dest_path = self._sub(fs.abspath(dest_path)) dest_data = self._sub_or_pass(tmpl_path, data) self._write_file(dest_path, dest_data)
def _walk_path(self, path): for items in os.walk(fs.abspath(path)): for _file in items[2]: if _file == 'boss.yml': continue elif re.match('(.*)\.boss\.bak(.*)', _file): continue else: yield fs.abspath(os.path.join(items[0], _file))
def init_certs(app): certs_folder = app.config.get('esper', 'certs_folder') path = fs.abspath(certs_folder) # Check if path exists if not Path(path).exists(): app.log.debug(f"[init_certs] Creating Certs folder!") fs.ensure_dir_exists(path) app.extend('certs_path', path) app.extend('local_key', fs.abspath(app.config.get('esper', 'local_key'))) app.extend('local_cert', fs.abspath(app.config.get('esper', 'local_cert'))) app.extend('device_cert', fs.abspath(app.config.get('esper', 'device_cert')))
def test_config_files_is_none(): # verify the autogenerated config files list... create a unique # test app here since testapp removes all the config path settings class ThisApp(App): class Meta: argv = [] exit_on_close = False with ThisApp('test-app', config_files=None) as app: user_home = fs.abspath(fs.HOME_DIR) if platform.system().lower() in ['windows']: files = [ os.path.join('C:\\', 'etc', app.label, '%s.conf' % app.label), os.path.join(user_home, '.%s.conf' % app.label), os.path.join(user_home, '.%s' % app.label, 'config', '%s.conf' % app.label), ] else: files = [ os.path.join('/', 'etc', app.label, '%s.conf' % app.label), os.path.join(user_home, '.%s.conf' % app.label), os.path.join(user_home, '.%s' % app.label, 'config', '%s.conf' % app.label), ] for f in files: assert f in app._meta.config_files
def _setup_file_log(self): """Add a file log handler.""" file_path = os.path.expandvars( fs.abspath(self.app.config.get('log', 'file'))) log_dir = os.path.dirname(file_path) if not os.path.exists(log_dir): os.makedirs(log_dir) if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.file_format if self.app.config.get('log', 'rotate'): from logbook import RotatingFileHandler file_handler = RotatingFileHandler( file_path, max_size=int(self.app.config.get('log', 'max_bytes')), backup_count=int(self.app.config.get('log', 'max_files')), format_string=fmt_string, level=logbook.lookup_level(self.get_level()), bubble=True, ) else: from logbook import FileHandler file_handler = FileHandler( file_path, format_string=fmt_string, level=logbook.lookup_level(self.get_level()), bubble=True, ) self._file_handler = file_handler self.backend.handlers.append(file_handler)
def clean(self): for items in os.walk(self.app.pargs.project_path): for _file in items[2]: path = fs.abspath(os.path.join(items[0], _file)) if re.match('(.*)\.boss\.bak(.*)', path): self.app.log.warn("Removing: %s" % _file) os.remove(path)
def test_config_files_is_none(): # verify the autogenerated config files list... create a unique # test app here since testapp removes all the config path settings class ThisApp(App): class Meta: argv = [] exit_on_close = False with ThisApp("test-app", config_files=None) as app: user_home = fs.abspath(fs.HOME_DIR) if platform.system().lower() in ["windows"]: files = [ os.path.join("C:\\", "etc", app.label, f"{app.label}.conf"), os.path.join(user_home, f".{app.label}.conf"), os.path.join(user_home, f".{app.label}", "config", f"{app.label}.conf"), ] else: files = [ os.path.join("/", "etc", app.label, f"{app.label}.conf"), os.path.join(user_home, f".{app.label}.conf"), os.path.join(user_home, f".{app.label}", "config", f"{app.label}.conf"), ] for f in files: assert f in app._meta.config_files
def _setup_file_log(self): """Add a file log handler.""" file_path = os.path.expandvars(fs.abspath(self.app.config.get('log', 'file'))) log_dir = os.path.dirname(file_path) if not os.path.exists(log_dir): os.makedirs(log_dir) if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.file_format if self.app.config.get('log', 'rotate'): from logbook import RotatingFileHandler file_handler = RotatingFileHandler( file_path, max_size=int(self.app.config.get('log', 'max_bytes')), backup_count=int(self.app.config.get('log', 'max_files')), format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True, ) else: from logbook import FileHandler file_handler = FileHandler(file_path, format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True, ) self._file_handler = file_handler self.backend.handlers.append(file_handler)
def generate(self): root_path = find_nearest_root() if not root_path: self.app.log.fatal( "Could not find the root of the project please change directories" ) return None fs.ensure_dir_exists("%s/jobs" % root_path) job_dir = "{root}/jobs/{name}".format(root=root_path, name=self.app.pargs.name) if os.path.exists(job_dir): self.app.log.fatal( "Sorry there is already a job in this directory can't overwrite" ) return None example_path = os.path.join(fs.abspath(__file__), os.pardir, os.pardir, "templates", "word_count") data = {"name": self.app.pargs.name} self.app.template.copy(example_path, job_dir, data) self.app.log.info(""" A cluster config is not created when you generate a job, please refer to the job README.md for instructions on how to generate a cluster config file that can be used to run jobs on EMR Alternatively you can type if you are familiar with the process: cluster_funk clusters generate-config -h """)
def extend_tinydb(app): state_file = app.config.get('tank', 'state_file') state_file = fs.abspath(state_file) state_dir = os.path.dirname(state_file) if not os.path.exists(state_dir): os.makedirs(state_dir) app.extend('state', TinyDB(state_file))
def _load_template_from_file(self, path): for templ_dir in self.app._meta.template_dirs: full_path = fs.abspath(os.path.join(templ_dir, path)) if os.path.exists(full_path): self.app.log.debug('loading template file %s' % full_path) return open(full_path, encoding='utf-8', mode='r').read() else: continue
def validate_config(self): # fix up paths self.config.set('boss', 'data_dir', fs.abspath(self.config.get('boss', 'data_dir'))) # create directories if not os.path.exists(self.config.get('boss', 'data_dir')): os.makedirs(self.config.get('boss', 'data_dir')) # add shortcuts pth = os.path.join(self.config.get('boss', 'data_dir'), 'cache') if not os.path.exists(fs.abspath(pth)): os.makedirs(fs.abspath(pth)) self.config.set('boss', 'cache_dir', pth) pth = os.path.join(self.config.get('boss', 'data_dir'), 'boss.db') self.config.set('boss', 'db_path', pth)
def resource_path(*path_parts: str) -> str: """ Resolves path to a resource. """ if '..' in path_parts: raise ValueError('parent directory references are forbidden') tank_src = os.path.dirname(os.path.dirname(fs.abspath(__file__))) return fs.join(tank_src, 'resources', *path_parts)
def create_from_template(self, source, template, dest_dir): src = self.app.db['sources'][source] if src['is_local']: basedir = os.path.join(src['path'], template) else: basedir = os.path.join(src['cache'], template) tmpl = Template(self.app, fs.abspath(basedir)) tmpl.copy(dest_dir)
def _get_json_config(self, path): full_path = fs.abspath(path) if not os.path.exists(full_path): raise boss_exc.BossTemplateError("Invalid template config.") self.app.log.debug('loading template config %s' % full_path) import json return json.load(open(full_path, 'r'))
def clean(self): if not len(self.app.pargs.extra) >= 1: raise boss_exc.BossArgumentError("Project path required.") for items in os.walk(self.app.pargs.extra[0]): for _file in items[2]: path = fs.abspath(os.path.join(items[0], _file)) if re.match('(.*)\.boss\.bak(.*)', path): self.app.log.warn("Removing: %s" % _file) os.remove(path)
def extend_tinydb(app): db_file = app.config.get('myredpy', 'db_file') # expand full path db_file = fs.abspath(db_file) app.log.debug('Tiny db file is {}'.format(db_file)) # ensure path exists db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): os.makedirs(db_dir) app.extend('db', TinyDB(db_file))
def add(self, label, path, local=False): sources = self.app.db['sources'] cache_dir = mkdtemp(dir=self.app.config.get('boss', 'cache_dir')) if local is True: path = fs.abspath(path) sources[label] = dict(label=label, path=path, cache=cache_dir, is_local=local, last_sync_time='never') self.app.db['sources'] = sources
def _get_yaml_config(self, path): full_path = fs.abspath(path) if not os.path.exists(full_path): raise boss_exc.BossTemplateError("Invalid template config.") self.app.log.debug("loading template config %s" % full_path) try: import yaml except ImportError as e: raise boss_exc.BossRuntimeError("Unable to import yaml. " + "Please install pyYaml.") return yaml.load(open(full_path, "r"))
def create_from_template(self, source, template, dest_dir): try: src = self.app.db["sources"][source] except KeyError as e: raise exc.BossTemplateError("Source repo '%s' " % source + "does not exist.") if src["is_local"]: basedir = os.path.join(src["path"], template) else: basedir = os.path.join(src["cache"], template) tmpl = TemplateManager(self.app, fs.abspath(basedir)) tmpl.copy(dest_dir)
def extend_tinydb(app): app.log.info('saving data with tinydb') db_file = app.config.get('ccli', 'db_file') # ensure that we expand the full path db_file = fs.abspath(db_file) app.log.info(f'tinydb database file is: {db_file}') # ensure our parent directory exists db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): os.makedirs(db_dir) app.extend('db', TinyDB(db_file))
def create_from_template(self, source, template, dest_dir): try: src = self.app.db['sources'][source] except KeyError as e: raise exc.BossSourceError("Source repo '%s' " % source + \ "does not exist.") if src['is_local']: basedir = os.path.join(src['path'], template) else: basedir = os.path.join(src['cache'], template) tmpl = TemplateManager(self.app, fs.abspath(basedir)) tmpl.copy(dest_dir)
def _get_config(self): config = None possibles = [ fs.abspath(os.path.join(self.basedir, 'boss.json')), fs.abspath(os.path.join(self.basedir, 'boss.yml')), ] for path in possibles: if os.path.exists(path): if os.path.basename(path) == 'boss.json': config = self._get_json_config(path) break elif os.path.basename(path) == 'boss.yml': config = self._get_yaml_config(path) break if not config: raise boss_exc.BossTemplateError("No supported config found.") # fix it up with some defaults if not config.has_key('delimiter'): config['delimiter'] = '@' return config
def _get_yaml_config(self, path): full_path = fs.abspath(path) if not os.path.exists(full_path): raise boss_exc.BossTemplateError("Invalid template config.") self.app.log.debug('loading template config %s' % full_path) try: import yaml except ImportError as e: raise boss_exc.BossRuntimeError("Unable to import yaml. " + "Please install pyYaml.") return yaml.load(open(full_path, 'r'))
def create_from_template(self, source, template, dest_dir): try: src = self.app.db['sources'][source] except KeyError as e: raise exc.BossTemplateError("Source repo '%s' " % source + \ "does not exist.") if src['is_local']: basedir = os.path.join(src['path'], template) else: basedir = os.path.join(src['cache'], template) tmpl = TemplateManager(self.app, fs.abspath(basedir)) tmpl.copy(dest_dir)
def extend_tinydb(app): app.log.info('extending todo application with tinydb') db_file = app.config.get('todo', 'db_file') # ensure that we expand the full path db_file = fs.abspath(db_file) app.log.info('tinydb database file is: %s' % db_file) # ensure our parent directory exists db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): os.makedirs(db_dir) app.extend('db', TinyDB(db_file))
def add(self, label, path, local=False): sources = self.app.db['sources'] cache_dir = mkdtemp(dir=self.app.config.get('boss', 'cache_dir')) if local is True: path = fs.abspath(path) sources[label] = dict( label=label, path=path, cache=cache_dir, is_local=local, last_sync_time='never' ) self.app.db['sources'] = sources
def extend_tinydb(app): """Add support for TinyDB.""" app.log.info('Extending PenIn with TinyDB') db_file = app.config.get('remediar', 'db_file') # Ensure that we expand the full path db_file = fs.abspath(db_file) app.log.info('TinyDB database file is: {}'.format(db_file)) # Ensure our parent directory exists db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): os.makedirs(db_dir) app.extend('db', TinyDB(db_file))
def extend_tinydb(app): db_file = app.config.get('esper', 'creds_file') app.log.debug(f"[extend_tinydb] DB File path from config: {db_file}") # ensure that we expand the full path db_file = fs.abspath(db_file) app.log.debug(f"[extend_tinydb] Absolute DB File path: {db_file}") # Create the parent folder(s) app.log.debug(f"[extend_tinydb] Creating parent folders for DB File...") fs.ensure_parent_dir_exists(db_file) # Create and assign the DB file app.log.debug(f"[extend_tinydb] Assigning DB object to app -> app.db") app.extend('creds', TinyDB(db_file))
def init_tinydb(self): """ Initialises TinyDB. Returns: """ self.logger.info('extending application with tinydb') db_file = self.config.get('venture', 'db_file') db_file = fs.abspath(db_file) self.logger.info('tinydb database file is: %s' % db_file) db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): os.makedirs(db_dir) return TinyDB(db_file)
def database_initialization_hook(app): ''' Database init hook run before command processing ''' app.log.debug("Initializing the upsolve database file") db_absolute_path = fs.abspath(app.config.get('upsolve', 'db')) app.log.debug('Database file is: %s' % db_absolute_path) db_dir = os.path.dirname(db_absolute_path) if not os.path.exists(db_dir): app.log.debug("Creating parent directory %s" % db_dir) os.makedirs(db_dir) tiny_db = TinyDB(db_absolute_path) app.extend('db', tiny_db) app.log.debug("TinyDB initialization successful, connected to %s" % db_absolute_path)
def setup_db(app): app.extend('db', shelve.open(app.config.get('boss', 'db_path'))) if 'sources' not in app.db.keys(): cache_dir = fs.abspath( mkdtemp(dir=app.config.get('boss', 'cache_dir'))) app.db['sources'] = dict() sources = app.db['sources'] sources['boss'] = dict( label='boss', path='https://github.com/datafolklabs/boss-templates.git', cache=cache_dir, is_local=False, last_sync_time='never') app.db['sources'] = sources if 'templates' not in app.db.keys(): app.db['templates'] = dict()
def setup_db(app): app.extend('db', shelve.open(app.config.get('boss', 'db_path'))) if 'sources' not in app.db.keys(): cache_dir = fs.abspath(mkdtemp(dir=app.config.get('boss', 'cache_dir'))) app.db['sources'] = dict() sources = app.db['sources'] sources['boss'] = dict( label='boss', path='https://github.com/datafolklabs/boss-templates.git', cache=cache_dir, is_local=False, last_sync_time='never' ) app.db['sources'] = sources if 'templates' not in app.db.keys(): app.db['templates'] = dict()
def sync(self, source): sources = self.app.db["sources"] src = self.app.db["sources"][source] if not src["is_local"]: if not os.path.exists(os.path.join(src["cache"], ".git")): shell.exec_cmd2(["git", "clone", src["path"], src["cache"]]) else: orig_dir = fs.abspath(os.curdir) try: os.chdir(src["cache"]) shell.exec_cmd2(["git", "pull"]) os.chdir(orig_dir) finally: os.chdir(orig_dir) src["last_sync_time"] = datetime.now() sources[source] = src self.app.db["sources"] = sources
def sync(self, source): sources = self.app.db['sources'] src = self.app.db['sources'][source] if not src['is_local']: if not os.path.exists(os.path.join(src['cache'], '.git')): shell.exec_cmd2(['git', 'clone', src['path'], src['cache']]) else: orig_dir = fs.abspath(os.curdir) try: os.chdir(src['cache']) shell.exec_cmd2(['git', 'pull']) os.chdir(orig_dir) finally: os.chdir(orig_dir) src['last_sync_time'] = datetime.now() sources[source] = src self.app.db['sources'] = sources
def add_source(self): if not self.app.pargs.modifier1 or not self.app.pargs.modifier2: raise boss_exc.BossArgumentError("Repository name and path required.") sources = self.app.db['sources'] label = self.app.pargs.modifier1 path = self.app.pargs.modifier2 cache_dir = mkdtemp(dir=self.app.config.get('boss', 'cache_dir')) if self.app.pargs.local: path = fs.abspath(path) sources[label] = dict( label=label, path=path, cache=cache_dir, is_local=self.app.pargs.local, last_sync_time='never' ) self.app.db['sources'] = sources
def sync(self, source): sources = self.app.db['sources'] src = self.app.db['sources'][source] if not src['is_local']: if not os.path.exists(os.path.join(src['cache'], '.git')): shell.exec_cmd2([ 'git', 'clone', src['path'], src['cache'] ]) else: orig_dir = fs.abspath(os.curdir) try: os.chdir(src['cache']) shell.exec_cmd2(['git', 'pull']) os.chdir(orig_dir) finally: os.chdir(orig_dir) src['last_sync_time'] = datetime.now() sources[source] = src self.app.db['sources'] = sources
def setup_db(app): app.extend('db', shelve.open(app.config.get('boss', 'db_path'))) # Note: Can't use the SourceManager here, because it relies on the db # being setup/extended first. if 'sources' not in app.db.keys(): cache_dir = fs.abspath(mkdtemp(dir=app.config.get('boss', 'cache_dir'))) app.db['sources'] = dict() sources = app.db['sources'] sources['boss'] = dict( label='boss', path='https://github.com/datafolklabs/boss-templates.git', cache=cache_dir, is_local=False, last_sync_time='never' ) app.db['sources'] = sources if 'templates' not in app.db.keys(): app.db['templates'] = dict()
def setup_db(app): app.extend('db', shelve.open(app.config.get('boss', 'db_path'))) # Note: Can't use the SourceManager here, because it relies on the db # being setup/extended first. if 'sources' not in app.db.keys(): cache_dir = fs.abspath( mkdtemp(dir=app.config.get('boss', 'cache_dir'))) app.db['sources'] = dict() sources = app.db['sources'] sources['boss'] = dict( label='boss', path='https://github.com/datafolklabs/boss-templates.git', cache=cache_dir, is_local=False, last_sync_time='never') app.db['sources'] = sources if 'templates' not in app.db.keys(): app.db['templates'] = dict()
def test_abspath(self): path = fs.abspath('.') self.ok(path.startswith('/'))
def __init__(self, app, path): self.app = app self.basedir = fs.abspath(path) self.config = self._get_config() self._word_map = dict() self._vars = dict()
def test_abspath(): path = fs.abspath('.') assert path.startswith('/')