def determine_app_root(name, app_root=None): """Determines the root directory of a given application Args: name (str): application name Returns: Root directory of app, as a string """ if app_root is None: if name == "__main__" or "_mod_wsgi" in name: app_mod = imports.import_module(name) return abspath( os.path.dirname( app_mod.__file__)).rstrip('/') else: log.error("Unable to determine application root." + " Using current working directory '%s'" % os.getcwd()) return abspath(os.getcwd()).rstrip('/') else: if exists(app_root) and not os.path.isfile(app_root): return abspath(app_root).rstrip('/') else: raise FileNotFoundError("Invalid path" + " for root '%s'" % app_root) from None
def __init__(self, expire=60): # Create initial token dict. self.clear() self._token_expire = expire self._rsakey = rsa.RSAKey() if files.exists(g.app.path.rstrip('/') + '/private.pem'): password = g.app.config.get('tokens', 'key_password', fallback=None) self._rsakey.load_pem_key_file(g.app.path.rstrip('/') + '/private.pem', password=password) if files.exists(g.app.path.rstrip('/') + '/public.pem'): self._rsakey.load_pem_key_file( g.app.path.rstrip('/') + '/public.pem')
def copy(self, src, dst, new_extension=None): """Copy the resource Args: src (str): resource location dst (str): destination """ try: self.exists(src, True) if self.is_dir(src): mkdir(dst, recursive=True) real_src = src walk_files = self.walk(real_src) for walk_file in walk_files: real_src = src.rstrip('/') + '/' + walk_file.strip('/') real_dst = dst.rstrip('/') + '/' + walk_file.strip('/') if self.is_dir(real_src): mkdir(real_dst, recursive=True) else: content = self.read(real_src) if new_extension is not None and exists(real_dst): real_dst += "." + new_extension.strip('.') with open(real_dst, 'wb') as new_file: new_file.write(content) else: content = self.read(src) src_file = src.strip('/').split('/')[-1] if is_dir(dst): dst = dst.rstrip('/') + '/' + src_file if new_extension is not None and exists(dst): dst += "." + new_extension.strip('.') with open(dst, 'wb') as new_file: new_file.write(content) except ImportError: raise ImportError(self._module) from None
def clean_sessions(args): """Removes all expired session files""" path = args.path.rstrip('/') tmp_path = os.path.join(path, 'tmp') config = Config() config.load(path + '/settings.ini') expire = config.getint('sessions', 'expire', fallback=86400) if exists(tmp_path): files = ls(tmp_path) for file in files: if file[2].startswith('session_'): modified = file[8] expired = now() - timedelta(seconds=expire) if modified <= expired: rm(file[1])
def build_doc(root_path, venv_path, src_path, ref, doc_dir, name): chdir(src_path) log.info("Checkout '%s/%s'" % ( name, ref, )) execute(["git", "checkout", ref]) metadata_py = src_path + '/' + name + '/metadata.py' exec_globals = {} exec(open(metadata_py).read(), exec_globals, exec_globals) confpy = venv_path + '/conf.py' if exists(confpy): rm(confpy) with resource_stream('tachweb', 'github/conf.py.tpl') as tpl_file: template = Template(if_bytes_to_unicode(tpl_file.read())) with open(confpy, 'w') as real_file: real_file.write(template.safe_substitute(**exec_globals)) buildsh = venv_path + '/build.sh' if exists(buildsh): rm(buildsh) with resource_stream('tachweb', 'github/build.sh.tpl') as tpl_file: template = Template(if_bytes_to_unicode(tpl_file.read())) with open(buildsh, 'w') as real_file: real_file.write( template.safe_substitute(virtualenv=venv_path, src_path=src_path, doc_dir=doc_dir)) chmod(buildsh, 700) return execute([ "/usr/bin/env", venv_path + "/build.sh", venv_path, src_path, doc_dir ], check=True)
def setup(args): """Setup Tachyonic Package with Luxon Creates relevant directories and copies relevant files Called when **-i** is used Args: args (parse_args object): arguments gathered from terminal """ path = args.path.rstrip('/') module = Module(args.pkg) no_install = ['luxon', 'psychokinetic'] if args.pkg in no_install: print("Your suppose to install luxon applications not '%s'" % args.pkg) exit() def copy(name, new_extension=None): if module.exists(name): module.copy(name, path + '/' + name, new_extension=new_extension) mkdir(path) if (module.exists('etc')): mkdir("/etc/tachyonic") module.copy('etc', '/etc/tachyonic', 'default') mkdir(joinpath(path, 'tmp')) copy('policy.json', 'default') copy('settings.ini', 'default') try: if exists(path + '/settings.ini'): chmod(path + '/settings.ini', 640) except PermissionError: pass copy('wsgi.py', 'default') copy('static') mkdir('%s/templates/%s' % (path, args.pkg), recursive=True)
def github(req, resp): root_path = g.app.path mkdir(joinpath(root_path, 'github')) mkdir(joinpath(root_path, 'docs')) try: projects = load(root_path + '/projects.pickle') except FileNotFoundError: projects = {} username = g.app.config.get('github', 'username') password = g.app.config.get('github', 'password') tachyonic = GitHub(auth=(username, password)) while True: try: teams = {} github_teams = tachyonic.teams('TachyonicProject') for github_team in github_teams: team = github_team['name'] if team == "Author": continue teams[team] = {} github_members = tachyonic.team_members(github_team['id']) for github_member in github_members: login = github_member['login'] teams[team][login] = {} teams[team][login]['github'] = github_member['html_url'] teams[team][login]['avatar'] = github_member['avatar_url'] save(teams, root_path + '/team.pickle', perms=664) save(tachyonic.projects('TachyonicProject'), root_path + '/planning.pickle', perms=664) found = [] log.info("Getting Repos") repos = tachyonic.repos('TachyonicProject') for repo in repos: name = repo['name'] found.append(name) description = repo['description'] if name not in projects: projects[name] = {} log.info("Scanning Repo " + name) updated_at = utc(repo['updated_at']) created_at = utc(repo['created_at']) pushed_at = utc(repo['pushed_at']) if (('updated_at' not in projects[name]) or ('updated_at' in projects[name] and updated_at != projects[name]['updated_at']) or ('pushed_at' not in projects[name]) or ('pushed_at' in projects[name] and pushed_at != projects[name]['pushed_at'])): projects[name]['created_at'] = created_at projects[name]['description'] = description projects[name]['clone_url'] = repo['clone_url'] log.info("Getting Branches for %s" % name) branches = tachyonic.branches('TachyonicProject', name) branches = [branch['name'] for branch in branches] projects[name]['branches'] = branches log.info("Getting Tags for %s" % name) tags = tachyonic.tags('TachyonicProject', name) tags = [tag['name'] for tag in tags] projects[name]['tags'] = tags projects[name]['refs'] = version_order(branches + tags) projects[name]['doc_refs'] = {} else: log.info("Project %s Already up-to-date (%s)" % ( name, updated_at, )) projects[name]['updated_at'] = updated_at projects[name]['pushed_at'] = pushed_at if 'updated_doc' not in projects[name]: projects[name]['updated_doc'] = {} for ref in projects[name]['refs']: current_datetime = now() if ref in projects[name]['updated_doc']: commits = tachyonic.commits( 'TachyonicProject', name, sha=ref, since=format_iso8601( projects[name]['updated_doc'][ref])) if len(commits) == 0: log.info("Documentation" + " '%s/%s'" % ( name, ref, ) + " Already up-to-date (%s)" % updated_at) continue venv_dir = "%s/github/%s_%s" % ( root_path, name, ref, ) doc_dir = "%s/docs/%s_%s" % ( root_path, name, ref, ) src_path = venv_dir + '/' + name log.info("Creating Virtual Environment '%s'" % venv_dir) create_env(str(venv_dir), wipe=True, site_packages=False) clone(projects[name]['clone_url'], src_path) if (exists(src_path + '/docs/source/conf.py') and exists(src_path + '/docs/Makefile')): log.info("Bulding '%s/%s'" % ( name, ref, )) projects[name]['doc_refs'][ref] = True info = build_doc(root_path, venv_dir, src_path, ref, doc_dir, name) updated(name, ref, info) else: projects[name]['doc_refs'][ref] = False log.warning("No Sphinx docs found '%s/%s'" % ( name, ref, )) projects[name]['updated_doc'][ref] = current_datetime save(projects, root_path + '/projects.pickle', perms=664) events = [] events_ordered = [] git_events = tachyonic.events('TachyonicProject') for pj in projects.copy(): if pj not in found: del projects[pj] else: for event in git_events: type = event['type'] payload = event['payload'] if type == 'PullRequestEvent': pr = payload['pull_request'] merged = pr['merged'] base = pr['base'] ref = base['ref'] if merged is True: merged_at = utc(pr['merged_at']) events.append((merged_at, "Code Updated", "Repo " + pj + "/" + ref + "")) for item in sorted(events, key=operator.itemgetter(0)): events_ordered.append(item) events_ordered = list(reversed(events_ordered)) save(events_ordered[0:10], root_path + '/events.pickle', perms=664) save(projects, root_path + '/projects.pickle', perms=664) log.info('Infinite loop sleeping 5 Minutes') sleep(300) except KeyboardInterrupt: print("Control-C closed / Killed") break except ExecuteError as e: handle_error(e.title, e.description) except Exception as e: trace = str(traceback.format_exc()) error = '%s: %s' % (object_name(e), e) handle_error(error, trace)