:synopsis: Modul ini mengandung implementasi dari modul kbbi. .. moduleauthor:: sage <*****@*****.**> """ import argparse import json import re import sys from pathlib import Path from urllib.parse import quote import requests from appdirs import AppDirs from bs4 import BeautifulSoup APPDIR = AppDirs("kbbi", "laymonage") DATA_DIR = Path(APPDIR.user_data_dir) DATA_DIR.mkdir(parents=True, exist_ok=True) class KBBI: """Sebuah laman dalam KBBI daring.""" host = "https://kbbi.kemdikbud.go.id" def __init__(self, kueri, auth=None): """Membuat objek KBBI baru berdasarkan kueri yang diberikan. :param kueri: Kata kunci pencarian :type kueri: str :param auth: objek AutentikasiKBBI
import os import platform from PyQt5.QtCore import QSettings from appdirs import AppDirs APP_NAME = "Power Hour Creator" APP_AUTHOR = "jac241" APP_DIRS = AppDirs(APP_NAME, APP_AUTHOR) ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) EXT_DIR = os.path.abspath(os.path.join(ROOT_DIR, "../ext")) AUDIO_FORMAT = "m4a" VIDEO_FORMAT = "mp4" phc_env = os.environ.get("PHC_ENV", "production") MIGRATIONS_PATH = os.path.join(ROOT_DIR, "db", "migrations") track_length = 60 OS = platform.system().lower() DEFAULT_TRACKLIST_DIR = os.path.expanduser("~/Documents") DB_CONN_NAME = "phc_db_conn" def db_path(): return os.path.join(APP_DIRS.user_data_dir, "{}.db".format(phc_env))
def main(**kwargs): """Ciphey - Automated Decryption Tool Documentation: https://github.com/Ciphey/Ciphey/wiki\n Discord (support here, we're online most of the day): https://discord.ciphey.online/\n GitHub: https://github.com/ciphey/ciphey\n Ciphey is an automated decryption tool using smart artificial intelligence and natural language processing. Input encrypted text, get the decrypted text back. Examples:\n Basic Usage: ciphey -t "aGVsbG8gbXkgbmFtZSBpcyBiZWU=" """ """Function to deal with arguments. Either calls with args or not. Makes Pytest work. It gets the arguments in the function definition using locals() if withArgs is True, that means this is being called with command line args so go to arg_parsing() to get those args we then update locals() with the new command line args and remove "withArgs" This function then calls call_encryption(**result) which passes our dict of args to the function as its own arguments using dict unpacking. Returns: The output of the decryption. """ # if user wants to know where appdirs is # print and exit if "appdirs" in kwargs and kwargs["appdirs"]: dirs = AppDirs("Ciphey", "Ciphey") path_to_config = dirs.user_config_dir print( f"The settings.yml file should be at {os.path.join(path_to_config, 'settings.yml')}" ) return None # Now we create the config object config = iface.Config() # Load the settings file into the config load_msg: str cfg_arg = kwargs["config"] if cfg_arg is None: # Make sure that the config dir actually exists os.makedirs(iface.Config.get_default_dir(), exist_ok=True) config.load_file(create=True) load_msg = f"Opened config file at {os.path.join(iface.Config.get_default_dir(), 'config.yml')}" else: config.load_file(cfg_arg) load_msg = f"Opened config file at {cfg_arg}" # Load the verbosity, so that we can start logging verbosity = kwargs["verbose"] quiet = kwargs["quiet"] if verbosity is None: if quiet is not None: verbosity = -quiet elif quiet is not None: verbosity -= quiet if kwargs["greppable"] is not None: verbosity -= 999 # Use the existing value as a base config.verbosity += verbosity config.update_log_level(config.verbosity) logger.debug(load_msg) logger.trace(f"Got cmdline args {kwargs}") # Now we load the modules module_arg = kwargs["module"] if module_arg is not None: config.modules += list(module_arg) # We need to load formats BEFORE we instantiate objects if kwargs["bytes"] is not None: config.update_format("bytes") # Next, load the objects params = kwargs["param"] if params is not None: for i in params: key, value = i.split("=", 1) parent, name = key.split(".", 1) config.update_param(parent, name, value) config.update("checker", kwargs["checker"]) config.update("searcher", kwargs["searcher"]) config.update("default_dist", kwargs["default_dist"]) config.complete_config() logger.trace(f"Command line opts: {kwargs}") logger.trace(f"Config finalised: {config}") # Finally, we load the plaintext if kwargs["text"] is None: if kwargs["file"] is not None: kwargs["text"] = kwargs["file"].read() elif kwargs["text_stdin"] is not None: kwargs["text"] = kwargs["text_stdin"] else: # else print help menu print("[bold red]Error. No inputs were given to Ciphey. [bold red]") @click.pass_context def all_procedure(ctx): print_help(ctx) all_procedure() return None if issubclass(config.objs["format"], type(kwargs["text"])): pass elif config.objs["format"] == str and type(kwargs["text"]) is bytes: kwargs["text"] = kwargs["text"].decode("utf-8") elif config.objs["format"] == bytes and type(kwargs["text"]) is str: kwargs["text"] = kwargs["text"].encode("utf-8") else: raise TypeError(f"Cannot load type {config.format} from {type(kwargs['text'])}") result: Optional[str] # if debug or quiet mode is on, run without spinner if config.verbosity != 0: result = decrypt(config, kwargs["text"]) else: # else, run with spinner if verbosity is 0 with yaspin(Spinners.earth, "Thinking") as sp: config.set_spinner(sp) result = decrypt(config, kwargs["text"]) if result is None: result = "Could not find any solutions." print(result)
from appdirs import AppDirs from pathlib import Path import os class FakeDirs: def __init__(self, data_dir): self.user_data_dir = data_dir self.user_cache_dir = data_dir + "/cache" self.user_log_dir = data_dir + "/logs" if os.environ.get('SUWAKO_DATA_DIR'): suwako_data_dir = os.environ.get('SUWAKO_DATA_DIR') dirs = FakeDirs(suwako_data_dir) else: dirs = AppDirs("Suwako", "Kyuunex") exports_directory = dirs.user_data_dir + "/exports" Path(dirs.user_data_dir).mkdir(parents=True, exist_ok=True) Path(dirs.user_cache_dir).mkdir(parents=True, exist_ok=True) Path(dirs.user_log_dir).mkdir(parents=True, exist_ok=True) Path(exports_directory).mkdir(parents=True, exist_ok=True) database_file = dirs.user_data_dir + "/maindb.sqlite3"
"""Module handling the configuration of the Data Manager.""" from six.moves import configparser, input from six import print_ from appdirs import AppDirs import os from os import path # The software can be modified for use with other experiments # The "branding" is used to make this easier _branding = "t2kdm" app_dirs = AppDirs(_branding, _branding) default_values = { 'backend': 'dirac', 'basedir': '/t2k.org', 'location': '/', 'maid_config': path.join(app_dirs.user_config_dir, 'maid.conf'), 'blacklist': '-', } descriptions = { 'backend': "Which backend should be used?\n"\ "Supported backends: dirac\n"\ "Legacy backends: gfal, lcg", 'basedir': "What base directory should be assumed for all files on the grid?", 'location': "What is your location?\n"\ "It must follow the general pattern of '/continent/country/site'.\n"\ "This is used to determine the closest storage element (SE) when downloading files.\n"\ "Examples: /europe/uk/ral\n"\ " /americas/ca/triumf\n"\
logger = logging.getLogger(__name__) VERSIONS = get_versions() del get_versions __descr__ = "Asynchronous [black-box] Optimization" __version__ = VERSIONS["version"] __license__ = "BSD-3-Clause" __author__ = u"Epistímio" __author_short__ = u"Epistímio" __author_email__ = "*****@*****.**" __copyright__ = u"2017-2020, Epistímio" __url__ = "https://github.com/epistimio/orion" DIRS = AppDirs(__name__, __author_short__) del AppDirs DEF_CONFIG_FILES_PATHS = [ os.path.join(DIRS.site_data_dir, "orion_config.yaml.example"), os.path.join(DIRS.site_config_dir, "orion_config.yaml"), os.path.join(DIRS.user_config_dir, "orion_config.yaml"), ] def define_config(): """Create and define the fields of the configuration object.""" config = Configuration() define_storage_config(config) define_experiment_config(config) define_worker_config(config)
import os import re import json import logging from time import time from io import open from contextlib import contextmanager import fnmatch import shutil from collections import UserDict import sqlite3 from appdirs import AppDirs from steamctl import __appname__ _LOG = logging.getLogger(__name__) _appdirs = AppDirs(__appname__) def ensure_dir(path, mode=0o750): dirpath = os.path.dirname(path) if not os.path.exists(dirpath): _LOG.debug("Making dirs: %s", dirpath) os.makedirs(dirpath, mode) def normpath(path): if os.sep == '/': path = path.replace('\\', '/') return os.path.normpath(path)
import configargparse import logging import sys from appdirs import AppDirs from pynYNAB.ClientFactory import clientfromargs from pynYNAB.scripts.csvimport import do_csvimport, verify_csvimport from pynYNAB.scripts.ofximport import do_ofximport, verify_ofximport logging.basicConfig() configfile = 'ynab.conf' if not os.path.exists(configfile): myAppdir = AppDirs('pynYNAB').user_config_dir configfile = os.path.join(myAppdir, configfile) LOG = logging.getLogger(__name__) parser = configargparse.getArgumentParser('pynYNAB', default_config_files=[configfile], add_env_var_help=True, add_config_file_help=True, auto_env_var_prefix='NYNAB_') parser.add_argument('--email', metavar='Email', type=str, required=False, help='The Email User ID for nYNAB')
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # File and folder paths import logging import os.path import signal import sys from appdirs import AppDirs pkgname = 'urlwatch' urlwatch_dir = os.path.expanduser(os.path.join('~', '.' + pkgname)) urlwatch_cache_dir = AppDirs(pkgname).user_cache_dir if not os.path.exists(urlwatch_dir): urlwatch_dir = AppDirs(pkgname).user_config_dir # Check if we are installed in the system already (prefix, bindir) = os.path.split(os.path.dirname(os.path.abspath(sys.argv[0]))) if bindir != 'bin': sys.path.insert(0, os.path.join(prefix, bindir, 'lib')) from urlwatch.command import UrlwatchCommand from urlwatch.config import CommandConfig from urlwatch.main import Urlwatch from urlwatch.storage import YamlConfigStorage, CacheMiniDBStorage, CacheRedisStorage, UrlsYaml
if class_ is None: # detect repo type: try: return AnnexRepo(dir_, create=False) except RuntimeError as e: pass try: return GitRepo(dir_, create=False) except InvalidGitRepositoryError as e: raise RuntimeError("No reproman repository found in %s" % abspath_) else: try: return class_(dir_, create=False) except (RuntimeError, InvalidGitRepositoryError) as e: raise RuntimeError("No %s repository found in %s." % (type_, abspath_)) else: dir_ = normpath(opj(dir_, "..")) if class_ is not None: raise RuntimeError("No %s repository found in %s" % (type_, abspath_)) else: raise RuntimeError("No reproman repository found in %s" % abspath_) from appdirs import AppDirs from os.path import join as opj dirs = AppDirs("reproman", "reproman.org")
-n <dataset_name>, --name <dataset_name> --expire <expiry date> the permission expiry date in format YYYY-mm-dd """ import os from docopt import docopt from appdirs import AppDirs from modules.AccessManager import AccessManager from modules.VaultClient import VaultClient from util import constants from util.util import Util import click import webbrowser from urllib.parse import urljoin __version__ = constants.VERSION dirs = AppDirs(constants.APP_NAME, constants.APP_AUTHOR) os.makedirs(dirs.user_data_dir, exist_ok=True) tokenfile = os.path.join(dirs.user_data_dir, "vault_token") if __name__ == "__main__": arguments = docopt(__doc__, version=__version__) if arguments["--username"]: vault_client = VaultClient(vault_addr=arguments["--vault"], vault_username=arguments["--username"], vault_passowrd=arguments["--password"], tokenfile=tokenfile) elif arguments["--oauth"]: vault_ui_url = urljoin(arguments["--vault"], "/ui/vault/auth?with=oidc") webbrowser.open_new_tab(vault_ui_url) token = input('Please input your vault client token: ')
from appdirs import AppDirs from pathlib import Path import os class FakeDirs: def __init__(self, data_dir): self.user_data_dir = data_dir self.user_cache_dir = data_dir + "/cache" self.user_log_dir = data_dir + "/logs" if os.environ.get('MOMIJI_DATA_DIR'): momiji_data_dir = os.environ.get('MOMIJI_DATA_DIR') dirs = FakeDirs(momiji_data_dir) else: dirs = AppDirs("Momiji", "Kyuunex") exports_directory = dirs.user_data_dir + "/exports" art_directory = dirs.user_data_dir + "/art" Path(dirs.user_data_dir).mkdir(parents=True, exist_ok=True) Path(dirs.user_cache_dir).mkdir(parents=True, exist_ok=True) Path(dirs.user_log_dir).mkdir(parents=True, exist_ok=True) Path(exports_directory).mkdir(parents=True, exist_ok=True) # Path(art_directory).mkdir(parents=True, exist_ok=True) database_file = dirs.user_data_dir + "/maindb.sqlite3"
import platform from watchdog.observers import Observer from watchdog.observers.polling import PollingObserver from watchdog.events import FileSystemEventHandler from appdirs import AppDirs import json import pickle import signal import yaml import uuid from radiam_api import RadiamAPI import radiam_extract from requests import exceptions import re dirs = AppDirs("radiam-agent", "Compute Canada") os.makedirs(dirs.user_data_dir, exist_ok=True) tokenfile = os.path.join(dirs.user_data_dir, "token") os.environ['TIKA_LOG_PATH'] = dirs.user_data_dir from tika import parser as tikaParser post_data_limit = 1000000 # only available on non-Windows, and optional try: import grp except: pass if platform.system() == 'Windows': import win32security else:
def setup(self, args: Namespace, db_path: Path): root_folder = Path(args.root_folder).absolute() photos_folder = Path(args.photos_path) albums_folder = Path(args.albums_path) compare_folder = None if args.compare_folder: compare_folder = Path(args.compare_folder).absolute() app_dirs = AppDirs(APP_NAME) self.data_store = LocalData(db_path, args.flush_index) credentials_file = db_path / ".gphotos.token" if args.secret: secret_file = Path(args.secret) else: secret_file = Path(app_dirs.user_config_dir) / "client_secret.json" if args.new_token and credentials_file.exists(): credentials_file.unlink() scope = [ 'https://www.googleapis.com/auth/photoslibrary.readonly', 'https://www.googleapis.com/auth/photoslibrary.sharing', ] photos_api_url = 'https://photoslibrary.googleapis.com/$discovery' \ '/rest?version=v1' self.auth = Authorize(scope, credentials_file, secret_file, int(args.max_retries)) self.auth.authorize() self.google_photos_client = RestClient(photos_api_url, self.auth.session) self.google_photos_idx = GooglePhotosIndex(self.google_photos_client, root_folder, self.data_store, args.photos_path, args.use_flat_path) self.google_photos_down = GooglePhotosDownload( self.google_photos_client, root_folder, self.data_store, int(args.max_retries), int(args.max_threads)) self.google_albums_sync = GoogleAlbumsSync( self.google_photos_client, root_folder, self.data_store, args.flush_index or args.retry_download or args.rescan, photos_folder, albums_folder, args.use_flat_path, args.omit_album_date, args.use_hardlinks) if args.compare_folder: self.local_files_scan = LocalFilesScan(root_folder, compare_folder, self.data_store) self._start_date = Utils.string_to_date(args.start_date) self._end_date = Utils.string_to_date(args.end_date) self.google_albums_sync.shared_albums = not args.skip_shared_albums self.google_albums_sync.album_index = not args.no_album_index self.google_albums_sync.use_start_date = args.album_date_by_first_photo self.google_albums_sync.album = args.album self.google_albums_sync.favourites = args.favourites_only self.google_photos_down.start_date = self._start_date self.google_photos_down.end_date = self._end_date self.google_photos_down.retry_download = args.retry_download self.google_photos_down.case_insensitive_fs = args.case_insensitive_fs self.google_photos_idx.start_date = self._start_date self.google_photos_idx.end_date = self._end_date self.google_photos_idx.include_video = not args.skip_video self.google_photos_idx.rescan = args.rescan self.google_photos_idx.favourites = args.favourites_only self.google_photos_idx.case_insensitive_fs = args.case_insensitive_fs self.google_photos_idx.archived = args.archived
from getpass import getuser import csv import gzip import logging import os import sys import tarfile import zipfile import wget from appdirs import AppDirs logger = logging.getLogger(__name__) DIRS = AppDirs('iepy', getuser()) if not os.path.exists(DIRS.user_data_dir): # making sure that user_data_dir exists os.mkdir(DIRS.user_data_dir) def unzip(zipped_list, n): """returns n lists with the elems of zipped_list unsplitted. The general case could be solved with zip(*zipped_list), but here we are also dealing with: - un-zipping empy list to n empty lists - ensuring that all zipped items in zipped_list have lenght n, raising ValueError if not. """ if not zipped_list: return tuple([[]] * n) else:
import os from pkg_resources import resource_filename, resource_stream import re import shutil import signal import subprocess import sys import textwrap from appdirs import AppDirs from jinja2 import Environment, FileSystemLoader from jsmin import jsmin from malboxes._version import __version__ DIRS = AppDirs("malboxes") DEBUG = False def initialize(): # create appdata directories if they don't exist if not os.path.exists(DIRS.user_config_dir): os.makedirs(DIRS.user_config_dir) if not os.path.exists(DIRS.user_cache_dir): os.makedirs(DIRS.user_cache_dir) return init_parser() def init_parser():
import time from datetime import datetime # Import PyPi libraries from appdirs import AppDirs import click import pyotp import pyperclip # Set app information appname = 'iotp' appauthor = 'Dan Mills' appversion = '1.0.2' # Setup appdirs dirs = AppDirs(appname, appauthor) keyFile = os.path.join(dirs.user_data_dir, 'keys.json') def setup_keys(): """ Check for data file and directory and create if is doesn't exist Returns a dictionary of saved keys """ if not os.path.isdir(dirs.user_data_dir): os.makedirs(dirs.user_data_dir) try: with open(keyFile, 'r') as f: keys = json.load(f)
except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def getIcon(name): return QIcon(respath("icons/" + name)) def respath(filename): return os.path.join(os.path.dirname(__file__), filename) dirs = AppDirs("PRE-Workbench", "Weller IT", roaming=True) mkdir_p(dirs.user_config_dir) configFilespec = os.path.join(dirs.user_config_dir, "config.xdr") configWatchers = dict() configDict = dict() try: with open(configFilespec, "rb") as f: configDict = xdrm.loads(f.read()) except: pass if __name__ == "__main__": import sys, json, binascii def configSerializer(obj):
import texttable import socket from queue import Queue import re import netaddr import errno from os.path import realpath, basename, isdir, isfile import netifaces import requests import threading import yaml from appdirs import AppDirs import json APP_NAME = 'lanscan' APPDIRS = AppDirs(APP_NAME) LOGFILE = os.path.join(APPDIRS.user_log_dir, 'lanscan.log') VENDOR_CACHE = os.path.join(APPDIRS.user_cache_dir, 'vendors') NMAP_SCANNER = nmap.PortScanner() # logging.basicConfig(format='%(asctime)s %(levelname)-5s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG) logger = logging.getLogger(__name__) def initialize_directories(): if not isdir(APPDIRS.user_cache_dir): os.makedirs(APPDIRS.user_cache_dir, mode=0o755, exist_ok=True) if not isdir(APPDIRS.user_log_dir): os.makedirs(APPDIRS.user_log_dir, mode=0o755, exist_ok=True)
# SteamFastLogin - Login manager for Steam, allowing fast switching between accounts # Copyright (C) 2017 Matthew Gamble <*****@*****.**> # # This project is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License Version 3 as published by the Free # Software Foundation. No other version currently applies to this project. This # project is distributed without any warranty. Please see LICENSE.txt for the # full text of the license. from appdirs import AppDirs from pathlib import Path dirs = AppDirs("steam-fast-login") def confDir() -> Path: cdir = Path(dirs.user_config_dir) cdir.mkdir(mode=0o750, parents=True, exist_ok=True) return cdir def usersConfFile() -> Path: cdir = confDir() return cdir / "users.json" def settingsConfFile() -> Path: cdir = confDir() return cdir / "settings.json"
DEPLOY_DIR = BASE_DIR / 'deploy' # Test Filepaths try: import tests except ImportError: raise DevelopmentInstallationRequired(importable_name='tests') else: # TODO: Another way to handle this situation? # __file__ can be None, especially with namespace packages on # Python 3.7 or when using apidoc and sphinx-build. file_path = tests.__file__ NUCYPHER_TEST_DIR = dirname(file_path) if file_path is not None else str() # User Application Filepaths APP_DIR = AppDirs(nucypher.__title__, nucypher.__author__) DEFAULT_CONFIG_ROOT = os.getenv('NUCYPHER_CONFIG_ROOT', default=APP_DIR.user_data_dir) USER_LOG_DIR = os.getenv('NUCYPHER_USER_LOG_DIR', default=APP_DIR.user_log_dir) DEFAULT_LOG_FILENAME = "nucypher.log" DEFAULT_JSON_LOG_FILENAME = "nucypher.json" # Static Seednodes SeednodeMetadata = namedtuple('seednode', ['checksum_address', 'rest_host', 'rest_port']) SEEDNODES = tuple() # Sentry (Add your public key and user ID below) NUCYPHER_SENTRY_PUBLIC_KEY = "" NUCYPHER_SENTRY_USER_ID = "" NUCYPHER_SENTRY_ENDPOINT = f"https://{NUCYPHER_SENTRY_PUBLIC_KEY}@sentry.io/{NUCYPHER_SENTRY_USER_ID}"
from pathlib import Path from appdirs import AppDirs loc = AppDirs("conflict", "CytoidCommunity") CACHE_PATH = Path(loc.user_cache_dir) CONFIG_PATH = Path(loc.user_config_dir) DATA_PATH = Path(loc.user_data_dir) LOG_PATH = Path(loc.user_log_dir) for path in [CACHE_PATH, CONFIG_PATH, DATA_PATH, LOG_PATH]: path.mkdir(parents=True, exist_ok=True) CONFIG_FILE = CONFIG_PATH / 'config.toml' CONFIG_FILE.touch(exist_ok=True)
"""Several utility functions. """ # standard imports from typing import Any import os import pickle import logging # third party imports try: from appdirs import AppDirs APPNAME = "deepvis" # FIXME[hack]: not the right place to define here! APPAUTHOR = "krumnack" _appdirs = AppDirs(APPNAME, APPAUTHOR) except ImportError: _appdirs = None # logging LOG = logging.getLogger(__name__) def cache_path(cache: str = None) -> str: """Get th path the cache directory or a cache file. """ cache_dir = 'cache' if _appdirs is None else _appdirs.user_cache_dir return cache_dir if cache is None else os.path.join(cache_dir, cache) def read_cache(cache: str) -> Any: """Load data from a cache file.
from anyblok.blok import BlokManager from .common import preload_databases import sys from anyblok_pyramid.pyramid_config import Configurator from anyblok.config import Configuration from os import environ, path from appdirs import AppDirs from anyblok import load_init_function_from_entry_points if BlokManager.bloks: # AnyBlok already load, the state are not sure, better to stop here sys.exit(1) load_init_function_from_entry_points() # load default files ad = AppDirs('AnyBlok') # load the global configuration file Configuration.parse_configfile(path.join(ad.site_config_dir, 'conf.cfg'), ()) # load the user configuration file Configuration.parse_configfile(path.join(ad.user_config_dir, 'conf.cfg'), ()) # load config file in environment variable configfile = environ.get('ANYBLOK_CONFIGFILE') if configfile: Configuration.parse_configfile(configfile, ()) if 'logging_level' in Configuration.configuration: Configuration.initialize_logging() BlokManager.load() preload_databases() config = Configurator()
else: try: return class_(dir_, create=False) except (RuntimeError, InvalidGitRepositoryError) as e: raise RuntimeError("No %s repository found in %s." % (type_, abspath_)) else: dir_ = normpath(opj(dir_, "..")) if class_ is not None: raise RuntimeError("No %s repository found in %s" % (type_, abspath_)) else: raise RuntimeError("No datalad repository found in %s" % abspath_) # Do some centralizing of things needed by the datalad API: # TODO: May be there should be a dedicated class for the master collection. # For now just use helper functions to clean up the implementations of the API. # Design decision about this also depends on redesigning the handle/collection # classes (Metadata class => Backends => Repos). # The local master used by datalad is not a technically special # collection, but a collection with a special purpose for its "user", # who is datalad. So, deriving a class from Collection(Repo) and make common # tasks methods of this class might be an option either way. Also might become # handy, once we decide to have several "masters" (user-level, sys-level, etc.) from appdirs import AppDirs from os.path import join as opj dirs = AppDirs("datalad", "datalad.org")
[REDIS] host = 127.0.0.1 port = 6379 db = 1 encoding = utf-8 [MYSQL] host = 127.0.0.1 port = 3306 db = test user = pydatacoll password = pydatacoll [LOG] level = INFO format = %(asctime)s %(name)s [%(levelname)s] %(message)s """ app_dir = AppDirs('PyDataColl', False, version=app_ver) config_file = os.path.join(app_dir.user_config_dir, 'config.ini') if not os.path.exists(config_file): if not os.path.exists(app_dir.user_config_dir): os.makedirs(app_dir.user_config_dir) with open(config_file, 'wt') as f: f.write(config_example) print('create config file:', config_file) config = configparser.ConfigParser(interpolation=None) config.read(config_file)
name = 'age' def convert(self, value, param, ctx): match = re.match(r'^(?:(?P<weeks>\d+?)w)?(?:(?P<days>\d+?)d)?(?:(?P<hours>\d+?)h)?$', value) if not match: self.fail('%s is not a valid age' % value) return timedelta(**{k: int(v) for k, v in match.groupdict(0).items()}) AGE = AgeParamType() PROVIDER = click.Choice(sorted(provider_manager.names())) REFINER = click.Choice(sorted(refiner_manager.names())) dirs = AppDirs('subliminal') cache_file = 'subliminal.dbm' config_file = 'config.ini' @click.group(context_settings={'max_content_width': 100}, epilog='Suggestions and bug reports are greatly appreciated: ' 'https://github.com/Diaoul/subliminal/') @click.option('--addic7ed', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD', help='Addic7ed configuration.') @click.option('--legendastv', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD', help='LegendasTV configuration.') @click.option('--opensubtitles', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD', help='OpenSubtitles configuration.') @click.option('--cache-dir', type=click.Path(writable=True, file_okay=False), default=dirs.user_cache_dir, show_default=True, expose_value=True, help='Path to the cache directory.') @click.option('--debug', is_flag=True, help='Print useful information for debugging subliminal and for reporting bugs.') @click.version_option(__version__) @click.pass_context
Configuration settings for TinyDecred. """ import argparse import logging import os import sys from urllib.parse import urlparse from appdirs import AppDirs from decred.dcr import nets from decred.util import helpers # Set the data directory in a OS-appropriate location. _ad = AppDirs("TinyWallet", False) DATA_DIR = _ad.user_data_dir helpers.mkdir(DATA_DIR) # The master configuration file name. CONFIG_NAME = "tinywallet.conf" CONFIG_PATH = os.path.join(DATA_DIR, CONFIG_NAME) # Some decred constants. MAINNET = nets.mainnet.Name TESTNET = nets.testnet.Name SIMNET = nets.simnet.Name logLevelMap = { "critical": logging.CRITICAL,
def test_appDataDir(monkeypatch): """ Tests appDataDir to ensure it gives expected results for various operating systems. Test adapted from dcrd TestAppDataDir. """ # App name plus upper and lowercase variants. appName = "myapp" appNameUpper = appName.capitalize() appNameLower = appName # Get the home directory to use for testing expected results. homeDir = Path.home() # When we're on Windows, set the expected local and roaming directories # per the environment vars. When we aren't on Windows, the function # should return the current directory when forced to provide the # Windows path since the environment variables won't exist. winLocal = "." currentOS = platform.system() if currentOS == "Windows": localAppData = os.getenv("LOCALAPPDATA") winLocal = Path(localAppData, appNameUpper) else: # This is kinda cheap, since this is exactly what the function does. # But it's all I got to pass testing when testing OS is not Windows. winLocal = AppDirs(appNameUpper, "").user_data_dir # Mac app data directory. macAppData = homeDir / "Library" / "Application Support" posixPath = Path(homeDir, "." + appNameLower) macPath = Path(macAppData, appNameUpper) """ Tests are 3-tuples: opSys (str): Operating system. appName (str): The appDataDir argument. want (str): The expected result """ tests = [ # Various combinations of application name casing, leading # period, operating system, and roaming flags. ("Windows", appNameLower, winLocal), ("Windows", appNameUpper, winLocal), ("Windows", "." + appNameLower, winLocal), ("Windows", "." + appNameUpper, winLocal), ("Linux", appNameLower, posixPath), ("Linux", appNameUpper, posixPath), ("Linux", "." + appNameLower, posixPath), ("Linux", "." + appNameUpper, posixPath), ("Darwin", appNameLower, macPath), ("Darwin", appNameUpper, macPath), ("Darwin", "." + appNameLower, macPath), ("Darwin", "." + appNameUpper, macPath), ("OpenBSD", appNameLower, posixPath), ("OpenBSD", appNameUpper, posixPath), ("OpenBSD", "." + appNameLower, posixPath), ("OpenBSD", "." + appNameUpper, posixPath), ("FreeBSD", appNameLower, posixPath), ("FreeBSD", appNameUpper, posixPath), ("FreeBSD", "." + appNameLower, posixPath), ("FreeBSD", "." + appNameUpper, posixPath), ("NetBSD", appNameLower, posixPath), ("NetBSD", appNameUpper, posixPath), ("NetBSD", "." + appNameLower, posixPath), ("NetBSD", "." + appNameUpper, posixPath), ("unrecognized", appNameLower, posixPath), ("unrecognized", appNameUpper, posixPath), ("unrecognized", "." + appNameLower, posixPath), ("unrecognized", "." + appNameUpper, posixPath), # No application name provided, so expect current directory. ("Windows", "", "."), ("Linux", "", "."), ("Darwin", "", "."), ("OpenBSD", "", "."), ("FreeBSD", "", "."), ("NetBSD", "", "."), ("unrecognized", "", "."), # Single dot provided for application name, so expect current # directory. ("Windows", ".", "."), ("Linux", ".", "."), ("Darwin", ".", "."), ("OpenBSD", ".", "."), ("FreeBSD", ".", "."), ("NetBSD", ".", "."), ("unrecognized", ".", "."), ] def testplatform(): return opSys monkeypatch.setattr(platform, "system", testplatform) for opSys, name, want in tests: ret = helpers.appDataDir(name) assert str(want) == str(ret), (opSys, name, want) def testexpanduser(s): return "" def testgetenv(s): return "" opSys = "Linux" monkeypatch.setattr(os.path, "expanduser", testexpanduser) monkeypatch.setattr(os, "getenv", testgetenv) assert helpers.appDataDir(appName) == "."
def main(): """Start game and main loop.""" # Read configuration file dirs = AppDirs(appname='brutalmaze', appauthor=False, multipath=True) parents = dirs.site_config_dir.split(pathsep) parents.append(dirs.user_config_dir) filenames = [pathjoin(parent, 'settings.ini') for parent in parents] config = ConfigReader(filenames) config.parse() # Parse command-line arguments parser = ArgumentParser(usage='%(prog)s [options]', formatter_class=RawTextHelpFormatter) parser.add_argument('-v', '--version', action='version', version='Brutal Maze {}'.format(__version__)) parser.add_argument( '--write-config', nargs='?', const=stdout, type=FileType('w'), metavar='PATH', dest='defaultcfg', help='write default config and exit, if PATH not specified use stdout') parser.add_argument( '-c', '--config', metavar='PATH', help='location of the configuration file (fallback: {})'.format( pathsep.join(filenames))) parser.add_argument( '-s', '--size', type=int, nargs=2, metavar=('X', 'Y'), help='the desired screen size (fallback: {}x{})'.format(*config.size)) parser.add_argument('-f', '--max-fps', type=int, metavar='FPS', help='the desired maximum FPS (fallback: {})'.format( config.max_fps)) parser.add_argument('--mute', '-m', action='store_true', default=None, dest='muted', help='mute all sounds (fallback: {})'.format( config.muted)) parser.add_argument('--unmute', action='store_false', dest='muted', help='unmute sound') parser.add_argument('--music-volume', type=float, metavar='VOL', dest='musicvol', help='between 0.0 and 1.0 (fallback: {})'.format( config.musicvol)) parser.add_argument( '--touch', action='store_true', default=None, help='enable touch-friendly control (fallback: {})'.format( config.touch)) parser.add_argument('--no-touch', action='store_false', dest='touch', help='disable touch-friendly control') parser.add_argument( '--record-dir', metavar='DIR', dest='export_dir', help='directory to write game records (fallback: {})'.format( config.export_dir or '*disabled*')) parser.add_argument( '--record-rate', metavar='SPF', dest='export_rate', help='snapshots of game state per second (fallback: {})'.format( config.export_rate)) parser.add_argument('--server', action='store_true', default=None, help='enable server (fallback: {})'.format( config.server)) parser.add_argument('--no-server', action='store_false', dest='server', help='disable server') parser.add_argument('--host', help='host to bind server to (fallback: {})'.format( config.host)) parser.add_argument( '--port', type=int, help='port for server to listen on (fallback: {})'.format(config.port)) parser.add_argument( '-t', '--timeout', type=float, help='socket operations timeout in seconds (fallback: {})'.format( config.timeout)) parser.add_argument( '--head', action='store_false', default=None, dest='headless', help='run server with graphics and sound (fallback: {})'.format( not config.headless)) parser.add_argument('--headless', action='store_true', help='run server without graphics or sound') args = parser.parse_args() if args.defaultcfg is not None: with open(SETTINGS) as settings: args.defaultcfg.write(settings.read()) args.defaultcfg.close() exit() # Manipulate config if args.config: config.config.read(args.config) config.parse() config.read_args(args) # Main loop with Game(config) as game: if config.server: socket_thread = Thread(target=game.remote_control) socket_thread.daemon = True # make it disposable socket_thread.start() while game.update(): game.control(*game.sockinp) elif config.touch: while game.update(): game.touch_control() else: while game.update(): game.user_control()