async def setup(self, token=None, id_=None): if id_: try: dirs = appdirs.AppDirs('slackups', 'slackups') tokencache = os.path.join(dirs.user_cache_dir, str(id_) + '-slack.json') token_ = open(tokencache, 'r').read().strip() except: logger.info("No cached token at " + tokencache) if token == None: return "ERROR: No token" else: token = token_ self.token = token self.client = SlackApi(token) self.client.call = self.client.execute_method res = await self.client.call('auth.test') logger.info(res) if not 'ok' in res or not res['ok'] or not 'user_id' in res: logger.warning("Invalid slack token!") return "ERROR: Invalid token?" self.id_ = res['user_id'] dirs = appdirs.AppDirs('slackups', 'slackups') tokencache = os.path.join(dirs.user_cache_dir, str(self.id_) + '-slack.json') open(tokencache, 'w').write(token) logger.info("Slack user set up! " + self.id_) return "Slack token accepted!"
def main(): """Main entry point.""" # Build default paths for files. dirs = appdirs.AppDirs('hangups', 'hangups') default_log_path = os.path.join(dirs.user_log_dir, 'hangups.log') default_cookies_path = os.path.join(dirs.user_cache_dir, 'cookies.json') parser = argparse.ArgumentParser( prog='hangups', formatter_class=argparse.ArgumentDefaultsHelpFormatter ) dirs = appdirs.AppDirs('hangups', 'hangups') parser.add_argument('-d', '--debug', action='store_true', help='log detailed debugging messages') parser.add_argument('--log', default=default_log_path, help='log file path') parser.add_argument('--cookies', default=default_cookies_path, help='cookie storage path') parser.add_argument('--key-next-tab', default='ctrl d', help='keybinding for next tab') parser.add_argument('--key-prev-tab', default='ctrl u', help='keybinding for previous tab') parser.add_argument('--col-scheme', choices=COL_SCHEMES.keys(), default='default', help='colour scheme to use') args = parser.parse_args() # Create all necessary directories. for path in [args.log, args.cookies]: directory = os.path.dirname(path) if directory != '' and not os.path.isdir(directory): try: os.makedirs(directory) except OSError as e: sys.exit('Failed to create directory: {}'.format(e)) log_level = logging.DEBUG if args.debug else logging.WARNING logging.basicConfig(filename=args.log, level=log_level, format=LOG_FORMAT) try: ChatUI(args.cookies, { 'next_tab': args.key_next_tab, 'prev_tab': args.key_prev_tab, }, COL_SCHEMES[args.col_scheme]) except KeyboardInterrupt: pass except: # urwid will prevent some exceptions from being printed unless we use # print a newline first. print('') raise
def login(self): dirs = appdirs.AppDirs('hangups', 'hangups') token = hangups.RefreshTokenCache( os.path.join(dirs.user_cache_dir, 'refresh_token.txt')) return hangups.get_auth( utils.Authenticator(self.auth['email'], self.auth['password']), token)
def setup(silentprocesses): global silent, configdir, whitelist silent = silentprocesses try: import appdirs, shutil configdir = appdirs.AppDirs("pyrsi", "tukkek").user_config_dir os.makedirs(configdir, exist_ok=True) whitelist = os.path.join(configdir, FILENAME) print(f'Reading external configuration: {whitelist}...') if not os.path.exists(whitelist): shutil.copyfile('whitelist-template.cfg', whitelist) update() except ModuleNotFoundError: print('appdirs module not found, ignoring external configuration...') return try: import watchdog, watchdog.observers, watchdog.events class Updater(watchdog.events.FileSystemEventHandler): def on_modified(self, event): if event.src_path == whitelist: update() o = watchdog.observers.Observer() o.schedule(Updater(), configdir) o.start() except ModuleNotFoundError: print( "python3-watchdog not found, whitelist auto-reloading won't be enabled." ) return
def test_load_config_appdir_discovery_compat_found(tmp_path): """ Ensure that a compatible version's config directory will be used. """ compat_major = "COMPAT" # Doesn't need to be an integer compat_appdirs = appdirs.AppDirs( "medallion", "oasis-open", version=compat_major, ) compat_cfg_p = pathlib.Path(compat_appdirs.site_config_dir) def is_dir(inst): return inst == compat_cfg_p with mock.patch( # Mock the `sorted()` builtin for the config module when we reload it # and return a faked "CURRENT" candidate and a "COMPAT" candidate which # our `is_dir()` mock knows to return `True` for "medallion.config.sorted", return_value=["CURRENT", compat_major ]) as mock_sorted, mock.patch("pathlib.Path.is_dir", new=is_dir): m_cfg_local = importlib.reload(m_cfg) mock_sorted.assert_called_once_with(mock.ANY, reverse=True) assert m_cfg_local.DEFAULT_CONFFILE == compat_cfg_p / "medallion.conf" assert m_cfg_local.DEFAULT_CONFDIR == compat_cfg_p / "config.d"
def __init__(self, filename: str): ab_dir = appdirs.AppDirs("ActivityBrowser", "ActivityBrowser") if not os.path.isdir(ab_dir.user_data_dir): os.makedirs(ab_dir.user_data_dir, exist_ok=True) self.move_old_settings(ab_dir.user_data_dir, filename) super().__init__(ab_dir.user_data_dir, filename)
def get_config(author_name='oliver2213', app_name='mqn'): confname = app_name + ".conf" # dir is a path to files included with the application, and should work whether or not the app is bundled if getattr(sys, 'frozen', False): # we are frozen dir = sys._MEIPASS else: dir = os.path.dirname(os.path.abspath(__file__)) # check the working directory for a config first if os.path.exists(os.path.join(os.getcwd(), confname)) and os.path.isfile( os.path.join(os.getcwd(), confname)): with open(os.path.join(os.getcwd(), confname), 'r') as f: config = toml.load(f) return config, os.path.join( os.getcwd(), confname ) # return the configuration in the current working directory # then check the user's config directory ucd = appdirs.AppDirs(appname=app_name, appauthor=author_name).user_config_dir if os.path.exists(os.path.join(ucd, confname)) and os.path.isfile( os.path.join(ucd, confname)): with open(os.path.join(ucd, confname), 'r') as f: config = toml.load(f) return config, os.path.join(ucd, confname) # then check the program directory (if running from source, this will be the directory containing this program; if bundled, it will be the directory of the bundle or the temp directory for an one-file bundle) if os.path.exists(os.path.join(dir, confname)) and os.path.isfile( os.path.join(dir, confname)): with open(os.path.join(dir, confname), 'r') as f: config = toml.load(f) return config, os.path.join(dir, confname) # from app directory # if none of that worked return None, None # no config found
def load(cls): ad = appdirs.AppDirs(APP_NAME, APP_AUTHOR) config_dir = ad.user_config_dir if not os.path.isdir(config_dir): os.mkdir(config_dir) defaults_dir = os.path.join(config_dir, 'defaults') if not os.path.isdir(defaults_dir): os.mkdir(defaults_dir) file_path = os.path.join(config_dir, 'settings.yml') if os.path.isfile(file_path): try: with open(file_path, 'r') as file: settings = yaml.load(file) except Exception as x: error = x time = datetime.datetime.now().isoformat('_', 'seconds').replace(':','') os.rename(file_path, os.path.join(ad.site_config_dir(), 'settings_invalid_{0}.yml'.format(time))) settings = cls.__Settings() else: settings = cls.__Settings() cls.__appdirs = ad cls.__defaults_dir = defaults_dir cls.__file_path = file_path cls.__instance = settings
async def setup(self, token=None): logger.info("setting up hangups...") self.token = token dirs = appdirs.AppDirs('slackups', 'slackups') tokencache = os.path.join(dirs.user_cache_dir, str(self.user.slack.id_) + '-cookies.json') logger.info("token cache: " + tokencache) try: self.cookies = hangups.auth.get_auth(self.auth_code_f, tokencache) except: logger.warning("No hangouts auth") return logger.info("Hangouts auth seems ok") asyncio.ensure_future(self.run()) logger.info("hangups scheduled for " + str(self.user.slack.id_)) n = 0.0 while self.userList == None and n < 10: await asyncio.sleep(0.1) n += 0.1 if self.userList == None: logger.warning("Never connected") return logger.info("!!!Hangups ok!!!") self.id_ = 'ok'
def initialize(self, options: 'argparse.Namespace', config_files: Optional[List[str]] = None): """ Loads all config files, then parses all command line arguments. Parameters ---------- options: argparse.Namespace Command line options from `setup_arg_parser` to parse. config_files: Optional[List[str]] If specified, loads only the listed files. Otherwise, loads the default config files. """ self.reset() if config_files is None: dirs = appdirs.AppDirs('delta', 'nasa') config_files = [ os.path.join(dirs.site_config_dir, 'delta.yaml'), os.path.join(dirs.user_config_dir, 'delta.yaml') ] for filename in config_files: if os.path.exists(filename): config.load(filename) if options is not None: config.parse_args(options)
def fileexcepthook(exception_type, exception_value, traceback_object): logger = logging.getLogger(__name__) exceptionText = "".join( traceback.format_exception(exception_type, exception_value, traceback_object)) logger.critical("Unhandled exception: %s", exceptionText) versionInfo = "Friture " + friture.__version__ timeString = time.strftime("%Y-%m-%d, %H:%M:%S") # same as in analyzer.py logFileName = "friture.log.txt" dirs = appdirs.AppDirs("Friture", "") logDir = dirs.user_data_dir email = "*****@*****.**" notice = \ """<h1>Opps! Something went wrong!</h1>\n\n"""\ """<p>Sorry, there was an error we could not handle.</p>"""\ """<p>You can choose to abort, or to ignore the error and try to continue """\ """(this is not guaranteed to work).</p>"""\ """<h2>Please help us fix it!</h2>\n\n"""\ """<p>Please contact us directly via email at <a href="mailto:%s?Subject=Friture%%20acrash report">%s</a> """\ """and include the log file named <i>%s</i> from the following folder:</p>"""\ """<p><a href="file:///%s">%s</a></p>"""\ """<p>Alternatively, if you have a GitHub account, you can create a new issue on <a href="https://github.com/tlecomte/friture/issues">https://github.com/tlecomte/friture/issues</a></p>"""\ """<h3>Error details</h3>""" % \ (email, email, logFileName, logDir, logDir) msg = notice + timeString + ' (%s)' % versionInfo + '<br>' + exceptionText.replace( "\r\n", "\n").replace("\n", "<br>").replace(" ", ' ') return msg
def __init__(self, app, author, file, defaults): self._file = file self._app_dir = appdirs.AppDirs(app, author) self._listeners = defaultdict(list) files = self._get_files() if len(files): self.data = self._get_shelve() try: self._deep_update(self.data, defaults) except pickle.UnpicklingError: if len(files) > 1: # we cannot tell whether the config file is really # corrupted if there are multiple possibly non-pickle files raise Exception( "Cannot perform recovery, multiple config files found!" ) # The pickle file is corrupted logging.error("Config file is corrupted, attempting recovery.") # the cache contains the data that was recoverable recovered = self.data.cache self.data.close() # delete all the generated pickle files for f in self._get_generated_files(): Path(f).unlink(True) self.data = self._get_shelve() self._deep_update(self.data, defaults) # restore the little we could recover self._deep_update(self.data, recovered) else: make_path(self.get_dir()) self.data = self._get_shelve() self.data.update(defaults) atexit.register(self._release)
def __init__(self, app, author, file, defaults): self._file = file self._app_dir = appdirs.AppDirs(app, author) self._listeners = defaultdict(list) path = os.path.join(self.get_dir(), "{}.dat".format(file)) if os.path.exists(path): self.data = self._get_shelve() try: self._deep_update(self.data, defaults) except pickle.UnpicklingError: # The pickle file is corrupted logging.error("Config file is corrupted, attempting recovery.") # the cache contains the data that was recoverable recovered = self.data.cache self.data.close() # delete all the generated pickle files usually dir, bak and dat for f in Path(self.get_dir()).glob('{}.*'.format(file)): f.unlink(True) self.data = self._get_shelve() self._deep_update(self.data, defaults) # restore the little we could recover self._deep_update(self.data, recovered) else: make_path(self.get_dir()) self.data = self._get_shelve() self.data.update(defaults) atexit.register(self._release)
def from_module_spec( module_spec: importlib.machinery.ModuleSpec, ) -> appdirs.AppDirs: module_name = module_spec.name distribution_name = module_name.partition(".")[0] distribution = importlib.metadata.distribution(distribution_name) return appdirs.AppDirs(appname=distribution.metadata["Name"])
def __init__(self, args: Configuration): self.args = args self.config_directories = appdirs.AppDirs('bdfr', 'BDFR') self.run_time = datetime.now().isoformat() self._setup_internal_objects() self.reddit_lists = self.retrieve_reddit_lists()
def createFileLogHandler(self, filename=None): """ Create a file log handler to store script logs. Called automatically by the default :func:`setUp` method if logToFile is True. Removes any existing file log handlers. :param filename: filename of new log file :type filename: str """ if self._handler_file is not None: self.__logger.removeHandler(self._handler_file) if filename is None: filename = time.strftime("%Y%m%d-%H%M%S-" + self.fqn + ".log") try: import appdirs dirs = appdirs.AppDirs("Labtronyx", roaming=True) log_path = dirs.user_log_dir if not os.path.exists(log_path): os.makedirs(log_path) filename = os.path.join(log_path, filename) except: pass self.logger.info("Logging to file: %s", filename) self._handler_file = logging.FileHandler(filename) self._handler_file.setFormatter(self._formatter) self.__logger.addHandler(self._handler_file)
async def slackSetup(self): dirs = appdirs.AppDirs('slackups', 'slackups') botTokenPath = os.path.join(dirs.user_config_dir, 'bot.token') adminTokenPath = os.path.join(dirs.user_config_dir, 'admin.token') try: botToken = open(botTokenPath, 'r').read().strip() except: logger.exception("Loading bot token %s", botTokenPath) sys.exit(0) try: adminToken = open(adminTokenPath, 'r').read().strip() except: logger.exception("Loading admin token %s", adminTokenPath) sys.exit(0) self.slackAPI = SlackApi(adminToken) self.slackAPI.call = self.slackAPI.execute_method channels = await self.slackAPI.call('channels.list') print("Got channels: " + str(channels)) groups = await self.slackAPI.call('groups.list') print("Got groups: " + str(channels)) ims = await self.slackAPI.call('im.list') print("Got IMs: " + str(ims)) self.bot = await AdminBot.from_api_token(botToken) self.bot.main = self await self.bot.setup()
def test_dirs(self): dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0') self.assertIsInstance(dirs.user_data_dir, STRING_TYPE) self.assertIsInstance(dirs.site_data_dir, STRING_TYPE) self.assertIsInstance(dirs.user_cache_dir, STRING_TYPE) self.assertIsInstance(dirs.user_state_dir, STRING_TYPE) self.assertIsInstance(dirs.user_log_dir, STRING_TYPE)
def _get_pdfbox_path(self): """ Return path to local copy of PDFBox jar file. """ # Use PDFBOX environmental variable if it exists: if 'PDFBOX' in os.environ: pdfbox_path = os.environ['PDFBOX'] if not os.path.exists(pdfbox_path): raise RuntimeError('pdfbox not found') return pdfbox_path # Use platform-specific cache directory: a = appdirs.AppDirs('python-pdfbox') cache_dir = a.user_cache_dir pdfbox_path = os.path.join(cache_dir, os.path.basename(pdfbox_url)) # Retrieve, cache, and verify PDFBox jar file: if not os.path.exists(pdfbox_path): r = urllib.request.urlopen(pdfbox_url) try: data = r.read() except: raise RuntimeError('error retrieving %s' % os.path.basename(pdfbox_url)) else: if not os.path.isdir(cache_dir): os.mkdir(cache_dir) with open(pdfbox_path, 'wb') as f: f.write(data) return pdfbox_path
def __init__(self, namespace, owner): user_config_dir = appdirs.AppDirs(namespace, owner).user_config_dir if not os.path.exists(user_config_dir): os.makedirs(user_config_dir, exist_ok=True) self._settings_path = os.path.join(user_config_dir, self.FILE_NAME) self._config = ConfigParser() self._config.read(self._settings_path)
def get_app_dirs(): """ Get the directories for the application Returns: :obj:`appdirs.AppDirs`: application directories """ return appdirs.AppDirs("BioSimulatorsUtils", "BioSimulatorsTeam")
def main(): """Main entry point""" # Build default paths for files. dirs = appdirs.AppDirs('hangupsbot', 'hangupsbot') default_log_path = os.path.join(dirs.user_data_dir, 'hangupsbot.log') default_token_path = os.path.join(dirs.user_data_dir, 'refresh_token.txt') default_config_path = os.path.join(dirs.user_data_dir, 'config.json') # Configure argument parser parser = argparse.ArgumentParser( prog='hangupsbot', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-d', '--debug', action='store_true', help=_('log detailed debugging messages')) parser.add_argument('--log', default=default_log_path, help=_('log file path')) parser.add_argument('--token', default=default_token_path, help=_('OAuth refresh token storage path')) parser.add_argument('--config', default=default_config_path, help=_('config storage path')) parser.add_argument('--version', action='version', version='%(prog)s {}'.format(__version__), help=_('show program\'s version number and exit')) args = parser.parse_args() # Create all necessary directories. for path in [args.log, args.token, args.config]: directory = os.path.dirname(path) if directory and not os.path.isdir(directory): try: os.makedirs(directory) except OSError as e: sys.exit(_('Failed to create directory: {}').format(e)) # If there is no config file in user data directory, copy default one there if not os.path.isfile(args.config): try: shutil.copy( os.path.abspath( os.path.join(os.path.dirname(__file__), 'config.json')), args.config) except (OSError, IOError) as e: sys.exit(_('Failed to copy default config file: {}').format(e)) # Configure logging log_level = logging.DEBUG if args.debug else logging.WARNING logging.basicConfig(filename=args.log, level=log_level, format=LOG_FORMAT) # asyncio's debugging logs are VERY noisy, so adjust the log level logging.getLogger('asyncio').setLevel(logging.WARNING) # Start Hangups bot bot = HangupsBot(args.token, args.config) bot.run()
def main(argv=None): dirs = appdirs.AppDirs(appname="Stud.IP-Fuse", appauthor=False) # disable author/company folder on windows os.makedirs(dirs.user_data_dir, exist_ok=True) # must exist for log files os.makedirs(dirs.user_config_dir, exist_ok=True) # must exist for oauth token storage configure_logging(dirs) args, fuse_args = parse_args(dirs, argv) os.makedirs(args.cache_dir, exist_ok=True) try: if not args.debug_logging: logging.root.setLevel(logging.INFO) if not args.debug_aio: logging.getLogger("asyncio").setLevel(logging.WARNING) log_status("STARTING", args=args, level=logging.DEBUG) log.info("Starting %s" % get_environment()) if args.debug_fuse: from studip_fuse.studipfs.fuse_ops import log_ops log_ops.setLevel(logging.DEBUG) fuse_ops = FUSEView(log_args=args, loop_setup_fn=aioimpl_asyncio.setup_loop(args=args)) if args.login_method == "oauth": login_oauth_args(args) else: if args.pwfile == "-": from getpass import getpass password = getpass() else: try: with open(args.pwfile, "rt") as f: password = f.read().rstrip('\n') except FileNotFoundError as e: log.warning("%s. Either specify a file from which your Stud.IP password can be read " "or use `--pwfile -` to enter it using a prompt in the shell." % e) return args.get_password = lambda: password # wrap in lambda to prevent printing log.info("Going to mount at %s (uid=%s, gid=%s, pid=%s, python pid=%s)", os.path.abspath(args.mount), *fuse_get_context(), os.getpid()) try: # this calls fork if args.foreground == False (and breaks running asyncio loops due to https://bugs.python.org/issue21998) # XXX on windows args.mount may not exist, on Linux it must exist FUSE(fuse_ops, args.mount, debug=fuse_args.pop("debug_fuse"), **fuse_args) except RuntimeError as e: if more_itertools.first(e.args, None) in FUSE_ERROR_CODES: msg = FUSE_ERROR_CODES[e.args[0]] if e.args[0] == 1: msg += ". Please check whether the mountpoint you specified is an empty directory or another instance of studip-fuse is using it" msg += ". Please check stderr for details." raise RuntimeError(msg) from e else: raise except SystemExit: pass except: log.error("main() function quit exceptionally", exc_info=True) finally: log_status("TERMINATED", args=args, level=logging.DEBUG) log.debug("Program terminated")
def init(): global app_dirs global recursion app_dirs = appdirs.AppDirs(appname='revenge', appauthor='bannsec') # Help watch for recursive loading recursion = set()
def finalize_options(self): from studip_fuse.launcher.cmd_util import parse_args import appdirs import shlex dirs = appdirs.AppDirs(appname="Stud.IP-Fuse", appauthor=False) parse_args(dirs, shlex.split(self.args), prog="setup.py make_windows_shortcut --args=")
def test_from_module_spec(self) -> None: app_dirs = appdirs.AppDirs("phile") module_spec = phile.phill.appdirs.__spec__ assert module_spec is not None app_paths = phile.phill.appdirs.AppPaths.from_module_spec( module_spec ) assert app_paths.appname == app_dirs.appname
def parse_args(cls): parser = argparse.ArgumentParser(description='Laniakea Runtime', prog='laniakea', add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter, epilog='The exit status is 0 for non-failures and 1 for failures.') dirs = appdirs.AppDirs("laniakea", "Mozilla Security") if not os.path.isdir(dirs.user_config_dir): shutil.copytree(os.path.join(cls.HOME, 'examples'), dirs.user_config_dir) m = parser.add_argument_group('Mandatory Arguments') g = m.add_mutually_exclusive_group(required=True) g.add_argument('-create-on-demand', action='store_true', help='Create on-demand instances') g.add_argument('-create-spot', action='store_true', help='Create spot instances') g.add_argument('-stop', nargs='?', const=-1, metavar='n', help='Stop active instances') g.add_argument('-terminate', nargs='?', const=-1, metavar='n', help='Terminate active instances') g.add_argument('-status', action='store_true', help='List current state of instances') g.add_argument('-run', metavar='cmd', type=str, default='', help='Execute commands via SSH') g.add_argument('-list-userdata-macros', action='store_true', help='List available macros') g.add_argument('-print-userdata', action='store_true', help='Print the UserData script to stdout') u = parser.add_argument_group('UserData Arguments') u.add_argument('-userdata', metavar='path', type=argparse.FileType(), default=os.path.join(cls.HOME, 'userdata', 'default.sh'), help='UserData script for cloud-init') u.add_argument('-userdata-macros', metavar='k=v', nargs='+', type=str, help='Custom macros') o = parser.add_argument_group('Optional Arguments') o.add_argument('-tags', metavar='k=v', nargs='+', type=str, help='Assign tags to instances') o.add_argument('-only', metavar='k=v', nargs='+', type=str, help='Filter instances') o.add_argument('-images', metavar='path', type=argparse.FileType(), default=os.path.join(dirs.user_config_dir, 'images.json'), help='EC2 image definitions') o.add_argument('-image-name', metavar='str', type=str, default='default', help='Name of image definition') o.add_argument('-image-args', metavar='k=v', nargs='+', type=str, help='Custom image arguments') o.add_argument('-profile', metavar='str', type=str, default='laniakea', help='AWS profile name in .boto') o.add_argument('-max-spot-price', metavar='#', type=float, default=0.05, help='Max price for spot instances') o.add_argument('-region', type=str, default='us-west-2', help='EC2 region') o.add_argument('-zone', type=str, default=None, help='EC2 placement zone') o.add_argument('-root-device-type', type=str, default='ebs', choices=['ebs', 'instance_store'], help='EC2 placement zone') o.add_argument('-ebs-size', type=int, default=None, help='Sets the root disk space size. If unset, the EC2 default is used.') o.add_argument('-ebs-volume-type', type=str, default='gp2', choices=['gp2', 'io1', 'standard'], help='Sets the root disk volume type.') o.add_argument('-ebs-volume-delete-on-termination', action='store_true', default=False, help='Set this to delete the root EBS volume on termination.') o.add_argument('-verbosity', default=2, type=int, choices=list(range(1, 6, 1)), help='Log level for the logging module') o.add_argument('-focus', action='store_true', default=False, help=argparse.SUPPRESS) o.add_argument('-settings', metavar='path', type=argparse.FileType(), default=os.path.join(dirs.user_config_dir, 'laniakea.json'), help='Laniakea settings') o.add_argument('-h', '-help', '--help', action='help', help=argparse.SUPPRESS) o.add_argument('-version', action='version', version='%(prog)s {}'.format(cls.VERSION), help=argparse.SUPPRESS) return parser.parse_args()
def main(): """Main entry point""" # Build default paths for files. dirs = appdirs.AppDirs("hangupsbot", "hangupsbot") default_log_path = os.path.join(dirs.user_data_dir, "hangupsbot.log") default_cookies_path = os.path.join(dirs.user_data_dir, "cookies.json") default_config_path = os.path.join(dirs.user_data_dir, "config.json") default_memory_path = os.path.join(dirs.user_data_dir, "memory.json") # Configure argument parser parser = argparse.ArgumentParser( prog="hangupsbot", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("-d", "--debug", action="store_true", help=_("log detailed debugging messages")) parser.add_argument("--log", default=default_log_path, help=_("log file path")) parser.add_argument("--cookies", default=default_cookies_path, help=_("cookie storage path")) parser.add_argument("--memory", default=default_memory_path, help=_("memory storage path")) parser.add_argument("--config", default=default_config_path, help=_("config storage path")) parser.add_argument("--retries", default=5, type=int, help=_("Maximum disconnect / reconnect retries before " "quitting")) parser.add_argument("--version", action="version", version="%(prog)s {}".format(version.__version__), help=_("show program\"s version number and exit")) args = parser.parse_args() # Create all necessary directories. for path in [args.log, args.cookies, args.config, args.memory]: directory = os.path.dirname(path) if directory and not os.path.isdir(directory): try: os.makedirs(directory) except OSError as err: sys.exit(_("Failed to create directory: %s"), err) # If there is no config file in user data directory, copy default one there if not os.path.isfile(args.config): try: shutil.copy( os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "config.json")), args.config) except (OSError, IOError) as err: sys.exit(_("Failed to copy default config file: %s"), err) configure_logging(args) # initialise the bot bot = HangupsBot(args.cookies, args.config, args.memory, args.retries) # start the bot bot.run()
def uri(self) -> str: """ Returns the URI for MLFlow to store data. """ uri = self._config_dict['uri'] if uri == 'default': uri = 'file://' + os.path.join( appdirs.AppDirs('delta', 'nasa').user_data_dir, 'mlflow') return uri
def dir(self) -> str: """ Returns the directory for tensorboard to store to. """ tbd = self._config_dict['dir'] if tbd == 'default': tbd = os.path.join( appdirs.AppDirs('delta', 'nasa').user_data_dir, 'tensorboard') return tbd