示例#1
0
文件: base.py 项目: baoshan/sacad
 def __init__(self, target_size, size_tolerance_prct, min_delay_between_accesses=2 / 3):
   self.target_size = target_size
   self.size_tolerance_prct = size_tolerance_prct
   db_filepath = os.path.join(appdirs.user_cache_dir(appname="sacad",
                                                     appauthor=False),
                              "api_watcher_%s.sqlite" % (self.__class__.__name__.lower()))
   os.makedirs(os.path.dirname(db_filepath), exist_ok=True)
   self.api_watcher = api_watcher.ApiAccessRateWatcher(logging.getLogger(),
                                                       db_filepath=db_filepath,
                                                       min_delay_between_accesses=min_delay_between_accesses)
   self.http_session = http.session()
   if not hasattr(__class__, "api_cache"):
     db_filepath = os.path.join(appdirs.user_cache_dir(appname="sacad",
                                                       appauthor=False),
                                "sacad-cache.sqlite")
     os.makedirs(os.path.dirname(db_filepath), exist_ok=True)
     cache_name = "cover_source_api_data"
     __class__.api_cache = web_cache.WebCache(db_filepath,
                                              cache_name,
                                              caching_strategy=web_cache.CachingStrategy.FIFO,
                                              expiration=60 * 60 * 24 * 90,  # 3 month
                                              compression=web_cache.Compression.DEFLATE)
     logging.getLogger().debug("Total size of file '%s': %s" % (db_filepath,
                                                                __class__.api_cache.getDatabaseFileSize()))
     purged_count = __class__.api_cache.purge()
     logging.getLogger().debug("%u obsolete entries have been removed from cache '%s'" % (purged_count, cache_name))
     row_count = len(__class__.api_cache)
     logging.getLogger().debug("Cache '%s' contains %u entries" % (cache_name, row_count))
示例#2
0
    def setup_sync_dir(self):
        self.cache_dir = appdirs.user_cache_dir("studip", "fknorr")
        self.create_path(self.cache_dir)
        history_file_name = os.path.join(appdirs.user_cache_dir("studip", "fknorr"), "history")
        history = []
        try:
            with open(history_file_name, "r", encoding="utf-8") as file:
                history = list(filter(None, file.read().splitlines()))
        except Exception:
            pass

        skipped_history = 0
        if "sync_dir" in self.command_line:
            sync_dir = self.command_line["sync_dir"]
        else:
            if history and os.path.isdir(history[0]):
                sync_dir = history[0]
                print("Using last sync directory {} ...".format(sync_dir))
            else:
                skipped_history = 1
                default_dir = "~/StudIP"
                for entry in history[1:]:
                    skipped_history += 1
                    if os.path.isdir(entry):
                        default_dir = entry

                sync_dir = input("Sync directory [{}]: ".format(default_dir))
                if not sync_dir:
                    sync_dir = default_dir

        sync_dir = os.path.abspath(os.path.expanduser(sync_dir))
        history = history[skipped_history:]
        while sync_dir in history:
            history.remove(sync_dir)

        history.insert(0, sync_dir)
        self.sync_dir = sync_dir

        try:
            with open(history_file_name, "w", encoding="utf-8") as file:
                file.write("\n".join(history) + "\n")
        except Exception as e:
            self.print_io_error("Unable to write to", history_file_name, e)
            raise ApplicationExit()

        self.dot_dir = os.path.join(self.sync_dir, ".studip")
        self.create_path(self.dot_dir)

        self.config_file_name = os.path.join(self.dot_dir, "studip.conf")
        self.db_file_name = os.path.join(self.dot_dir, "cache.sqlite")
 def user_cache_dir(self):
     """Return ``user_cache_dir``."""
     directory = appdirs.user_cache_dir(self.appname, self.appauthor,
                           version=self.version)
     if self.create:
         self._ensure_directory_exists(directory)
     return directory
def add_census_options(parser, module):
    """"""

    states_mapping, states_kw = get_states_mapping(module)

    parser.add_argument(
        '-s', '--states',
        nargs='+',
        required=True,
        choices=sorted(states_mapping.keys()),
        help='states for which {} data is to be include in database, '
             'indicate states with two letter postal codes'.format(module)
    )
    parser.add_argument(
        '-y', '--year',
        required=True,
        type=int,
        dest='{}_year'.format(module.lower()),
        help='year of the desired {} data product'.format(module)
    )
    parser.add_argument(
        '-nm', '--no_model',
        default=True,
        dest='model',
        action='store_false',
        help='by default a sqlalchemy model of the produced schema is '
             'created, use this flag to opt out of that functionality'
    )

    # data_dir is not user configurable, it is convenient to store it
    # similar settings that are in the global argparse namespace object
    data_dir = join(user_cache_dir(__package__), module)
    parser.set_defaults(data_dir=data_dir, **{states_kw: states_mapping})

    return parser
示例#5
0
文件: dirnamer.py 项目: Zomojo/Cake
def user_cache_dir(appname='ct', appauthor=None, version=None, opinion=True, args=None, argv=None, exedir=None):
    if args is None:
        verbose = 0
    else:
        verbose = args.verbose
    # command line > environment variables > config file values > defaults

    cachedir = ct.configutils.extract_value_from_argv(key='CTCACHE', argv=argv)
    if cachedir:
        _verbose_write("Highest priority CTCACHE is the command line.", verbose=verbose, newline=True)
        _verbose_write_found(cachedir, verbose=verbose)
        return cachedir

    _verbose_write('CTCACHE not on commandline. Falling back to environment variables.', verbose=verbose, newline=True)
    try:            
        cachedir = os.environ['CTCACHE']
        _verbose_write_found(cachedir, verbose=verbose)
        return cachedir

    except KeyError:
        pass

    _verbose_write('CTCACHE not in environment variables. Falling back to config files.', verbose=verbose, newline=True)

    cachedir = ct.configutils.extract_item_from_ct_conf('CTCACHE', exedir=exedir, verbose=verbose)
    if cachedir:
        _verbose_write_found(cachedir, verbose=verbose)
        return cachedir

    _verbose_write("CTCACHE not in config files.  Falling back to python-appdirs (which on linux wraps XDG variables).", verbose=verbose, newline=True)
    cachedir = appdirs.user_cache_dir(appname, appauthor, version, opinion)
    _verbose_write_found(cachedir, verbose=verbose)
    return cachedir
示例#6
0
文件: util.py 项目: hiveeyes/kotori
def setup_h2m_structs_pyclibrary():
    cache_dir = os.path.join(user_cache_dir('kotori'), 'lst')
    if not os.path.isdir(cache_dir): os.makedirs(cache_dir)
    lib_dir = os.path.join(os.path.dirname(__file__), 'cpp')
    library = LibraryAdapter(u'h2m_structs.h', u'h2m_structs.so', include_path=lib_dir, library_path=lib_dir, cache_path=cache_dir)
    struct_registry = StructRegistryByID(library)
    return struct_registry
示例#7
0
    def __init__(self):
        appname   = "testapp"
        appauthor = "testauthor"
        self.dir  = user_cache_dir(appname, appauthor)

        if not os.path.exists(self.dir):
            os.makedirs(self.dir)
示例#8
0
    def __init__(self, session=None, cache_dir=None, auto_download=True, lang=None):  # pylint: disable=too-many-arguments
        if not cache_dir:
            self._cache_dir = user_cache_dir('simpleanidb', appauthor='simpleanidb')  # appauthor is requered on windows
            if not os.path.isdir(self._cache_dir):
                os.makedirs(self._cache_dir)
        else:
            self._cache_dir = cache_dir
        if not os.path.isdir(self._cache_dir):
            raise ValueError('{0} does not exist'.format(self._cache_dir))
        elif not os.access(self._cache_dir, os.W_OK):
            raise IOError('{0} is not writable'.format(self._cache_dir))

        self.session = session or requests.Session()
        self.session.headers.setdefault('user-agent', 'simpleanidb/{0}.{1}.{2}'.format(*__version__))

        self.anime_titles_path = os.path.join(
            self._cache_dir, 'anime-titles.xml.gz')
        self.anime_list_path = os.path.join(
            self._cache_dir, 'anime-list.xml.gz')
        self.auto_download = auto_download
        self._xml_titles = self._xml = None
        self._xml_list = None
        self.lang = lang
        if not lang:
            self.lang = 'en'
示例#9
0
文件: base.py 项目: desbma/sacad
 def __init__(self, target_size, size_tolerance_prct, *, min_delay_between_accesses=0, jitter_range_ms=None,
              allow_cookies=False):
   self.target_size = target_size
   self.size_tolerance_prct = size_tolerance_prct
   self.logger = logging.getLogger(self.__class__.__name__)
   self.http = http_helpers.Http(allow_session_cookies=allow_cookies,
                                 min_delay_between_accesses=min_delay_between_accesses,
                                 jitter_range_ms=jitter_range_ms,
                                 logger=self.logger)
   if not hasattr(__class__, "api_cache"):
     db_filepath = os.path.join(appdirs.user_cache_dir(appname="sacad",
                                                       appauthor=False),
                                "sacad-cache.sqlite")
     os.makedirs(os.path.dirname(db_filepath), exist_ok=True)
     day_s = 60 * 60 * 24
     __class__.api_cache = web_cache.WebCache(db_filepath,
                                              "cover_source_api_data",
                                              caching_strategy=web_cache.CachingStrategy.FIFO,
                                              expiration=random.randint(day_s * 7, day_s * 14),  # 1-2 weeks
                                              compression=web_cache.Compression.DEFLATE)
     __class__.probe_cache = web_cache.WebCache(db_filepath,
                                                "cover_source_probe_data",
                                                caching_strategy=web_cache.CachingStrategy.FIFO,
                                                expiration=day_s * 30 * 6)  # 6 months
     logging.getLogger("Cache").debug("Total size of file '%s': %s" % (db_filepath,
                                                                       __class__.api_cache.getDatabaseFileSize()))
     for cache, cache_name in zip((__class__.api_cache, __class__.probe_cache),
                                  ("cover_source_api_data", "cover_source_probe_data")):
       purged_count = cache.purge()
       logging.getLogger("Cache").debug("%u obsolete entries have been removed from cache '%s'" % (purged_count,
                                                                                                   cache_name))
       row_count = len(cache)
       logging.getLogger("Cache").debug("Cache '%s' contains %u entries" % (cache_name, row_count))
示例#10
0
文件: cache.py 项目: bmerry/pyopencl
def _create_built_program_from_source_cached(ctx, src, options, devices, cache_dir):
    from os.path import join

    include_path = ["."]

    option_idx = 0
    while option_idx < len(options):
        option = options[option_idx].strip()
        if option.startswith("-I") or option.startswith("/I"):
            if len(option) == 2:
                if option_idx+1 < len(options):
                    include_path.append(options[option_idx+1])
                option_idx += 2
            else:
                include_path.append(option[2:].lstrip())
                option_idx += 1
        else:
            option_idx += 1

    if cache_dir is None:
        import appdirs
        cache_dir = join(appdirs.user_cache_dir("pyopencl", "pyopencl"),
                "pyopencl-compiler-cache-v2-py%s" % (
                    ".".join(str(i) for i in sys.version_info),))

    # {{{ ensure cache directory exists

    try:
        os.makedirs(cache_dir)
    except OSError, e:
        from errno import EEXIST
        if e.errno != EEXIST:
            raise
def get_profitbricks_client(username=None, password=None, api_version=None, endpoint=None,
                            config=None, store_endpoint=True, timeout=90):
    # pylint: disable=R0913
    """Connect to the API and return a ProfitBricks client object.

    If `username` is not specified, :func:`get_username()` is used to
    retrieve the username. If `password` is not specified,
    :func:`get_password()` is used for determining the password.
    `api_version`, `endpoint`, and `store_endpoint` are passed to a
    :func:`get_endpoint()` call to calculate the endpoint.

    A connection to the ProfitBricks public API is made and ProfitBricks
    client object is created. All available API calls will become methods
    of the returned client object.

    An :class:`urllib2.URLError` will be raised when the connection to
    the API failed. No error will be raised when the credentials are
    wrong, but method calls will raise a
    :class:`WrongCredentialsException`.
    """

    if config is None:
        config = get_config()
    if username is None:
        username = get_username(config)
    if password is None:
        password = get_password(username, config)
    endpoint = get_endpoint(api_version, endpoint, config, store_endpoint)

    cachedir = appdirs.user_cache_dir(_SCRIPT_NAME, _COMPANY)
    cache = suds.cache.ObjectCache(cachedir)
    soap_client = suds.client.Client(endpoint, username=username, cache=cache,
                                     password=password, timeout=timeout, cachingpolicy=1)
    return _ProfitbricksClient(soap_client)
def cache_dir():
    '''Background cache directory'''
    try:
        from appdirs import user_cache_dir
        return user_cache_dir(__app_name__, __author__)
    except ImportError:
        return './'
示例#13
0
    def _get_pid_dir(self, app_name, arg_pid_dir):

        # first figure out what the directory is:
        if arg_pid_dir:
            pid_dir  = arg_pid_dir
            self.logger.debug('pid_dir will be based on arg: %s' % arg_pid_dir)
        else:
            if app_name:
                pid_dir  = os.path.join(appdirs.user_cache_dir(app_name), 'jobs')
                self.logger.debug('pid_dir will be based on user_cache_dir: %s' % pid_dir)
            else:
                err_msg = 'app_name must be provided if pid_dir is not'
                self.logger.critical(err_msg)
                raise ValueError, err_msg

        # next try to create it, just in case it isn't there
        try:
            os.makedirs(pid_dir)
        except OSError as exception:
            if exception.errno != errno.EEXIST:
                self.logger.critical('Error trying to access pid_dir: %s' % pid_dir)
                raise

        # finally, return it
        return pid_dir
示例#14
0
 def calc_user_cache_dir_var(self, make_dir=True):
     if "USER_CACHE_DIR" not in var_stack:
         os_family_name = var_stack.resolve("$(__CURRENT_OS__)")
         if os_family_name == "Mac":
             user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
             user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param)
         elif os_family_name == "Win":
             user_cache_dir = appdirs.user_cache_dir("$(INSTL_EXEC_DISPLAY_NAME)", "$(COMPANY_NAME)")
         elif os_family_name == "Linux":
             user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
             user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param)
         var_description = "from InstlInstanceBase.get_user_cache_dir"
         var_stack.set_var("USER_CACHE_DIR", var_description).append(user_cache_dir)
     if make_dir:
         user_cache_dir_resolved = var_stack.resolve("$(USER_CACHE_DIR)", raise_on_fail=True)
         safe_makedirs(user_cache_dir_resolved)
示例#15
0
文件: compiler.py 项目: drufat/pycuda
def compile(source, nvcc="nvcc", options=None, keep=False,
        no_extern_c=False, arch=None, code=None, cache_dir=None,
        include_dirs=[], target="cubin"):

    assert target in ["cubin", "ptx", "fatbin"]

    if not no_extern_c:
        source = 'extern "C" {\n%s\n}\n' % source

    if options is None:
        options = DEFAULT_NVCC_FLAGS

    options = options[:]
    if arch is None:
        from pycuda.driver import Error
        try:
            from pycuda.driver import Context
            arch = "sm_%d%d" % Context.get_device().compute_capability()
        except Error:
            pass

    from pycuda.driver import CUDA_DEBUGGING
    if CUDA_DEBUGGING:
        cache_dir = False
        keep = True
        options.extend(["-g", "-G"])

    if cache_dir is None:
        from os.path import join
        import appdirs
        cache_dir = os.path.join(appdirs.user_cache_dir("pycuda", "pycuda"),
                "compiler-cache-v1")

        from os import makedirs
        try:
            makedirs(cache_dir)
        except OSError as e:
            from errno import EEXIST
            if e.errno != EEXIST:
                raise

    if arch is not None:
        options.extend(["-arch", arch])

    if code is not None:
        options.extend(["-code", code])

    if 'darwin' in sys.platform and sys.maxint == 9223372036854775807:
        options.append('-m64')
    elif 'win32' in sys.platform and sys.maxsize == 9223372036854775807:
        options.append('-m64')
    elif 'win32' in sys.platform and sys.maxsize == 2147483647:
        options.append('-m32')

    include_dirs = include_dirs + [_find_pycuda_include_path()]

    for i in include_dirs:
        options.append("-I"+i)

    return compile_plain(source, options, keep, nvcc, cache_dir, target)
示例#16
0
def parse_args():
    parser = argparse.ArgumentParser(description="set (near-realtime) picture of Earth as your desktop background",
                                     epilog="http://labs.boramalper.org/himawaripy")

    parser.add_argument("--version", action="version", version="%(prog)s {}.{}.{}".format(*HIMAWARIPY_VERSION))

    group = parser.add_mutually_exclusive_group()

    group.add_argument("--auto-offset", action="store_true", dest="auto_offset", default=False,
                       help="determine offset automatically")
    group.add_argument("-o", "--offset", type=int, dest="offset", default=10,
                       help="UTC time offset in hours, must be less than or equal to +10")

    parser.add_argument("-l", "--level", type=int, choices=[4, 8, 16, 20], dest="level", default=4,
                        help="increases the quality (and the size) of each tile. possible values are 4, 8, 16, 20")
    parser.add_argument("-d", "--deadline", type=int, dest="deadline", default=6,
                        help="deadline in minutes to download all the tiles, set 0 to cancel")
    parser.add_argument("--save-battery", action="store_true", dest="save_battery", default=False,
                        help="stop refreshing on battery")
    parser.add_argument("--output-dir", type=str, dest="output_dir",
                        help="directory to save the temporary background image",
                        default=appdirs.user_cache_dir(appname="himawaripy", appauthor=False))

    args = parser.parse_args()

    if not -12 <= args.offset <= 10:
        sys.exit("OFFSET has to be between -12 and +10!\n")

    if not args.deadline >= 0:
        sys.exit("DEADLINE has to be greater than (or equal to if you want to disable) zero!\n")

    return args
示例#17
0
    def _build_session(cls, cache, cache_expire_after, user_agent):
        # Retrieve cache directory
        if isinstance(cache, compat.string_types):
            cache_dir = cache
        else:
            cache_dir = user_cache_dir('tmdbsimple')

        # Ensure cache directory exists
        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir)

        if cache:
            # Construct cached requests session
            import requests_cache

            cls.session = requests_cache.CachedSession(
                allowable_codes=(200, 404),
                expire_after=cache_expire_after,
                backend='sqlite',
                cache_name=os.path.join(cache_dir, 'tmdbsimple'),
            )
        else:
            # Construct simple requests session
            cls.session = requests.Session()

        # Set user agent
        cls.session.headers.update({
            'Accept': 'application/json',
            'Content-Type': 'application/json',
            'Connection': 'close',
            'User-Agent': user_agent
        })
    def run(self, cache=True):
        """Run application."""

        self._query()

        # configure `requests` cache
        if cache:
            cache_dir = appdirs.user_cache_dir('craigslist')
            os.makedirs(cache_dir, exist_ok=True)
            requests_cache.install_cache(
                cache_name=os.path.join(cache_dir, 'craigslist'),
                expire_after=timedelta(hours=0.5))

        print('Running query...\n')

        # record the start time
        start = time.time()

        self.prices = self._getprices()

        # determine elapsed time of queries
        self.duration = time.time() - start

        # remove expired cache entries
        if cache:
            requests_cache.core.remove_expired_responses()

        # print statistics (if any price data exists)
        if self.prices:
            self._print()
        else:
            print('Nothing found for that search.')
示例#19
0
    def __init__(self, identifier, key_builder=None, container_dir=None):
        """
        :arg identifier: a file-name-compatible string identifying this
            dictionary
        :arg key_builder: a subclass of :class:`KeyBuilder`
        """

        self.identifier = identifier

        if key_builder is None:
            key_builder = KeyBuilder()

        self.key_builder = key_builder

        from os.path import join
        if container_dir is None:
            import appdirs
            container_dir = join(
                    appdirs.user_cache_dir("pytools", "pytools"),
                    "pdict-v2-%s-py%s" % (
                        identifier,
                        ".".join(str(i) for i in sys.version_info),))

        self.container_dir = container_dir

        self._make_container_dir()
示例#20
0
def get_data_dir(subdir=None, envkey=None):
    if envkey and environ.get(envkey):
        envdir = environ[envkey]
        if subdir:
            return join(envdir, subdir)
        else:
            return envdir
    return appdirs.user_cache_dir(subdir if subdir else "datacache")
示例#21
0
def get_maps_cache_dir():
    cache_dir = appdirs.user_cache_dir('visbio', appauthor="Zachary King")
    map_cache_dir = join(cache_dir, "map_cache", "")
    try:
        os.makedirs(map_cache_dir)
    except OSError:
        pass
    return map_cache_dir
示例#22
0
文件: c.py 项目: hiveeyes/kotori
 def from_header(cls, include_path=None, header_files=None):
     cache_dir = user_cache_dir('lst', 'kotori')
     if not os.path.isdir(cache_dir): os.makedirs(cache_dir)
     library = LibraryAdapter(
         header_files, cls.compile(include_path, header_files),
         include_path=include_path, library_path=include_path,
         cache_path=cache_dir)
     return library
示例#23
0
 def test_helpers(self):
     self.assertIsInstance(
         appdirs.user_data_dir('MyApp', 'MyCompany'), STRING_TYPE)
     self.assertIsInstance(
         appdirs.site_data_dir('MyApp', 'MyCompany'), STRING_TYPE)
     self.assertIsInstance(
         appdirs.user_cache_dir('MyApp', 'MyCompany'), STRING_TYPE)
     self.assertIsInstance(
         appdirs.user_log_dir('MyApp', 'MyCompany'), STRING_TYPE)
示例#24
0
 def calc_user_cache_dir_var(self, make_dir=True):
     if "USER_CACHE_DIR" not in var_stack:
         os_family_name = var_stack.ResolveVarToStr("__CURRENT_OS__")
         if os_family_name == "Mac":
             user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
             user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param)
         elif os_family_name == "Win":
             user_cache_dir = appdirs.user_cache_dir("$(INSTL_EXEC_DISPLAY_NAME)", "$(COMPANY_NAME)")
         elif os_family_name == "Linux":
             user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
             user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param)
         else:
             raise RuntimeError("Unknown operating system "+os_family_name)
         var_description = "from InstlInstanceBase.get_user_cache_dir"
         var_stack.set_var("USER_CACHE_DIR", var_description).append(user_cache_dir)
     if make_dir:
         user_cache_dir_resolved = var_stack.ResolveVarToStr("USER_CACHE_DIR")
         os.makedirs(user_cache_dir_resolved, exist_ok=True)
示例#25
0
def mktmpdir():
    """
    Due to OSX and boot2docker, I can't use the tempdir module as /tmp cannot
    be mounted in boot2docker (only /Users/<user> is available)
    """
    base_dir = appdirs.user_cache_dir('ansible_role_test', 'aeriscloud')
    tmp_dir = os.path.join(base_dir, uuid.uuid4().hex)
    os.makedirs(tmp_dir)
    return tmp_dir
示例#26
0
文件: cache.py 项目: clld/clldclient
 def __init__(self):
     cache_dir = user_cache_dir(clldclient.__name__)
     self.path = os.path.join(cache_dir, 'db.sqlite')
     if not os.path.exists(cache_dir):
         try:
             os.makedirs(cache_dir)
         except OSError:  # pragma: no cover
             self.path = None
     self.db = self.init_db()
示例#27
0
文件: test_api.py 项目: eddyp/appdirs
 def test_helpers(self):
     self.assertTrue(isinstance(
         appdirs.user_data_dir('MyApp', 'MyCompany'), str))
     self.assertTrue(isinstance(
         appdirs.site_data_dir('MyApp', 'MyCompany'), str))
     self.assertTrue(isinstance(
         appdirs.user_cache_dir('MyApp', 'MyCompany'), str))
     self.assertTrue(isinstance(
         appdirs.user_log_dir('MyApp', 'MyCompany'), str))
示例#28
0
def get_data_dir(subdir = None, envkey =  None):
    if subdir is None: subdir = "epitopes"

    dir = appdirs.user_cache_dir(subdir)

    if subdir == "epitopes" and "EPITOPES_data_dir" in environ:
        return environ["EPITOPES_data_dir"]
    else:
        return dir
示例#29
0
def fm_index_path(genome):
    """
    Returns a path for cached reference peptides, for the given genome.
    """
    cache_dir = user_cache_dir('vaxrank')
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)

    return os.path.join(cache_dir, '%s_%d_%d.fm' % (
        genome.species.latin_name, genome.release, 2 if six.PY2 else 3))
示例#30
0
def _get_default_cache_path():
    path = appdirs.user_cache_dir('zeep', False)
    try:
        os.makedirs(path)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(path):
            pass
        else:
            raise
    return os.path.join(path, 'cache.db')
示例#31
0
from miio.device import UpdateState
from miio.miioprotocol import MiIOProtocol
from miio.updater import OneShotServer

_LOGGER = logging.getLogger(__name__)
pass_dev = click.make_pass_decorator(miio.Device, ensure=True)


@click.group(invoke_without_command=True, cls=ExceptionHandlerGroup)
@click.option("--ip", envvar="MIROBO_IP", callback=validate_ip)
@click.option("--token", envvar="MIROBO_TOKEN", callback=validate_token)
@click.option("-d", "--debug", default=False, count=True)
@click.option(
    "--id-file",
    type=click.Path(dir_okay=False, writable=True),
    default=user_cache_dir("python-miio") + "/python-mirobo.seq",
)
@click.version_option()
@click.pass_context
def cli(ctx, ip: str, token: str, debug: int, id_file: str):
    """A tool to command Xiaomi Vacuum robot."""
    if debug:
        logging.basicConfig(level=logging.DEBUG)
        _LOGGER.info("Debug mode active")
    else:
        logging.basicConfig(level=logging.INFO)

    # if we are scanning, we do not try to connect.
    if ctx.invoked_subcommand == "discover":
        ctx.obj = "discover"
        return
示例#32
0
文件: utils.py 项目: cwinpy/cwinpy
from numba import jit, njit
from numba.extending import get_cython_function_address

from .parfile import PulsarParameters

#: exit code to return when checkpointing
CHECKPOINT_EXIT_CODE = 77

#: URL for LALSuite solar system ephemeris files
LAL_EPHEMERIS_URL = "https://git.ligo.org/lscsoft/lalsuite/raw/master/lalpulsar/lib/{}"

#: the current solar system ephemeris types in LALSuite
LAL_EPHEMERIS_TYPES = ["DE200", "DE405", "DE421", "DE430"]

#: the location for caching ephemeris files
EPHEMERIS_CACHE_DIR = appdirs.user_cache_dir(appname="cwinpy", appauthor=False)

#: the current TEMPO-compatible binary system model types provided in LALSuite
LAL_BINARY_MODELS = [
    "BT",
    "BT1P",
    "BT2P",
    "BTX",
    "ELL1",
    "DD",
    "DDS",
    "MSS",
    "T2",
]

#: aliases between GW detector prefixes and TEMPO2 observatory names
示例#33
0
    validate_token,
)
from .device import UpdateState
from .updater import OneShotServer

_LOGGER = logging.getLogger(__name__)
pass_dev = click.make_pass_decorator(miio.Device, ensure=True)


@click.group(invoke_without_command=True, cls=ExceptionHandlerGroup)
@click.option('--ip', envvar="MIROBO_IP", callback=validate_ip)
@click.option('--token', envvar="MIROBO_TOKEN", callback=validate_token)
@click.option('-d', '--debug', default=False, count=True)
@click.option('--id-file',
              type=click.Path(dir_okay=False, writable=True),
              default=user_cache_dir('python-miio') + '/python-mirobo.seq')
@click.version_option()
@click.pass_context
def cli(ctx, ip: str, token: str, debug: int, id_file: str):
    """A tool to command Xiaomi Vacuum robot."""
    if debug:
        logging.basicConfig(level=logging.DEBUG)
        _LOGGER.info("Debug mode active")
    else:
        logging.basicConfig(level=logging.INFO)

    # if we are scanning, we do not try to connect.
    if ctx.invoked_subcommand == "discover":
        ctx.obj = "discover"
        return
示例#34
0
import os
import time
import webbrowser
from http.server import BaseHTTPRequestHandler, HTTPServer
from queue import Queue
from threading import Thread

import requests
from appdirs import user_cache_dir
from dotenv import load_dotenv

load_dotenv()
APP_ID = os.environ.get("APP_ID")
APP_SECRET = os.environ.get("APP_SECRET")
USER_ID = os.environ.get("USER_ID")
PLAYLIST_DIR = os.path.join(user_cache_dir("spotify-to-mp3"), "playlists")


class Worker(Thread):
    """ Thread executing tasks from a given task queue """
    def __init__(self, tasks):
        Thread.__init__(self)
        self.tasks = tasks
        self.daemon = True
        self.start()

    def run(self):
        while True:
            func, args = self.tasks.get()
            try:
                func(*args)
示例#35
0
import os.path

import appdirs

# Increases the quality and the size. Possible values: 4, 8, 16, 20
level = 4

# Define a hourly offset or let the script calculate it depending on your timezone
# If auto_offset is True, then script will calculate your hour offset automatically depending on your location.
# If hour_offset is greater than 0, then script will use it.
# If both of the variables are set different than their default values below, then script will raise an error. Here,
# using the default values, script will put the realtime picture of Earth.
auto_offset = True
hour_offset = 0

# Path to the output file
output_file = os.path.join(
    appdirs.user_cache_dir(appname="himawaripy", appauthor=False),
    "latest.png")

# Xfce4 displays to change the background of
xfce_displays = [
    "/backdrop/screen0/monitor0/image-path",
    "/backdrop/screen0/monitor0/workspace0/last-image"
]
示例#36
0
print_input_ir = False

print_line_numbers = False

print_function_source = False

print_commands = False
print_command_elapsed_time = False

# Generate a .c file or a .cpp?
pure_c = True

# Throw away .c and .o files
delete_temp_files = True

# Directory to use for caching generated modules.
# Set to None to disable caching.
from appdirs import user_cache_dir
cache_dir = user_cache_dir('parakeet')

# if compiling C or OpenMP we can skip some of the craziness and
# have distutils figure out the system config and compiler for us
use_distutils = True

# show all the warnings? w
suppress_compiler_output = False

# when compiling with NVCC, other headers get implicitly included
# and cause warnings since Python redefines _POSIX_C_SOURCE
# we can undefine it before including Python.h to get rid of those warnings
undef_posix_c_source = True
示例#37
0
文件: srtm.py 项目: DFEvans/bother
import requests
import appdirs
import rasterio
import rasterio.merge
from rasterio.io import MemoryFile


ZIP_BASE_URL = 'http://srtm.csi.cgiar.org/wp-content/uploads/files/srtm_5x5/TIFF/'
ZIP_FNAME = 'srtm_{x:02d}_{y:02d}.zip'
TIF_FNAME = 'srtm_{x:02d}_{y:02d}.tif'

TILE_X_BOUNDS = (1, 72)
TILE_Y_BOUNDS = (1, 24)
TILE_SHAPE = (6000, 6000)
SRTM_NODATA = -32768
CACHE_DIR = appdirs.user_cache_dir('bother', appauthor=False)


def wrap_range(start: int, end: int, min_val: int = 1, max_val: int = 72) -> Iterable[int]:
    i = start
    if not ((min_val <= start <= max_val) and (min_val <= end <= max_val)):
        raise ValueError('start and end must each be between min_val and max_val.')
    while i != end:
        if i > max_val:
            i = min_val
        yield i
        i += 1

def is_cached(fname: str, cache_dir: str) -> bool:
    try:
        return fname in os.listdir(cache_dir)
示例#38
0
    logging.basicConfig(level=log_level, filename=args.log_file)
else:
    logging.basicConfig(level=log_level)


# find path for tasks queue
if args.queue_path is not None:
    if args.qeueu is not None:
        raise ValueError("cannot specify --queue and --queue-path at the same time")

    queue_path = Path(args.qeueu_path)

elif args.queue == "<temp>":
    queue_path = Path(tempfile.gettempdir()).joinpath("qop-temp.sqlite3")

elif args.queue is not None:
    queue_path = Path(appdirs.user_cache_dir("qop")).joinpath(f"{args.queue}.sqlite3")

else:
    queue_path = Path(appdirs.user_cache_dir("qop")).joinpath("default.sqlite3")

if not queue_path.parent.exists():
    queue_path.parent.mkdir(parents=True)
    lg.info(f"created default directory for qop queues: '{queue_path.parent}'")


# launch daemon
with daemon.QopDaemon(port=9393, queue_path=queue_path, persist_queue=(args.queue != "<temp>")) as qopd:
    qopd.listen()

示例#39
0
import os
import sys
import git
import json
import appdirs
import requests
import datetime

hotfix_url = 'https://discovery.services.gearboxsoftware.com/v2/client/epic/pc/oak/verification'
output_dir = '/home/pez/git/b2patching/bl3hotfixes'
point_in_time_base = 'point_in_time'
point_in_time_dir = os.path.join(output_dir, point_in_time_base)
cumulative_file = 'hotfixes_current.json'

# Get our cache dir, and create if it doesn't exist
cache_dir = appdirs.user_cache_dir('bl3hotfixes', 'Apocalyptech')
if not os.path.isdir(cache_dir):
    os.makedirs(cache_dir)
if not os.path.isdir(cache_dir):
    raise Exception('Couldn\'t create cache dir: {}'.format(cache_dir))

# Get our current hotfix data, if we can
hotfix_cache = os.path.join(cache_dir, 'hotfixes.json')
cur_hotfixes = None
if os.path.exists(hotfix_cache):
    with open(hotfix_cache) as df:
        cur_hotfixes = df.read()

# Grab hotfixes (and other data) from server
r = requests.get(hotfix_url)
verification = json.loads(r.text)
示例#40
0
import argparse
import tarfile
import io
from urllib.request import urlopen
import re

from IPython.display import display, HTML, Audio, update_display
import ipywidgets as widgets
import appdirs

from basismixer.utils import pair_files

REPO_NAME = 'vienna4x22_rematched'
DATASET_URL = 'https://jobim.ofai.at/gitlab/accompanion/{}/repository/archive'.format(REPO_NAME)
OGG_URL_BASE = 'https://spocs.duckdns.org/vienna_4x22/'
TMP_DIR = appdirs.user_cache_dir('basismixer')
# this is where our data set will be
DATASET_DIR = os.path.join(TMP_DIR, '{}.git'.format(REPO_NAME))
PIECES = ()
PERFORMERS = ()
SCORE_PERFORMANCE_PAIRS = None

def init():
    global DATASET_DIR, PIECES, PERFORMERS, SCORE_PERFORMANCE_PAIRS

    status = widgets.Output()
    display(status)
    status.clear_output()

    # # assume we have the data to avoid download
    # DATASET_DIR = '/tmp/vienna4x22_rematched.git'
示例#41
0
}

BLOCK_INDICATORS = (
    'form id="captcha-form"',
    'This page appears when Google automatically detects requests coming from your computer '
    'network which appear to be in violation of the <a href="//www.google.com/policies/terms/">Terms of Service'
)

BLOCKED_QUESTION_FRAGMENTS = ('webcache.googleusercontent.com', )

STAR_HEADER = u('\u2605')
ANSWER_HEADER = u('{2}  Answer from {0} {2}\n{1}')
NO_ANSWER_MSG = '< no answer given >'

CACHE_EMPTY_VAL = "NULL"
CACHE_DIR = appdirs.user_cache_dir('howdoi')
CACHE_ENTRY_MAX = 128

if os.getenv('HOWDOI_DISABLE_CACHE'):
    cache = NullCache()  # works like an always empty cache
else:
    cache = FileSystemCache(CACHE_DIR, CACHE_ENTRY_MAX, default_timeout=0)

howdoi_session = requests.session()


class BlockError(RuntimeError):
    pass


def _random_int(width):
示例#42
0
def _create_built_program_from_source_cached(ctx, src, options_bytes,
        devices, cache_dir, include_path):
    from os.path import join

    if cache_dir is None:
        import appdirs
        cache_dir = join(appdirs.user_cache_dir("pyopencl", "pyopencl"),
                "pyopencl-compiler-cache-v2-py%s" % (
                    ".".join(str(i) for i in sys.version_info),))

    # {{{ ensure cache directory exists

    try:
        os.makedirs(cache_dir)
    except OSError as e:
        from errno import EEXIST
        if e.errno != EEXIST:
            raise

    # }}}

    if devices is None:
        devices = ctx.devices

    cache_keys = [get_cache_key(device, options_bytes, src) for device in devices]

    binaries = []
    to_be_built_indices = []
    logs = []
    for i, (device, cache_key) in enumerate(zip(devices, cache_keys)):
        cache_result = retrieve_from_cache(cache_dir, cache_key)

        if cache_result is None:
            to_be_built_indices.append(i)
            binaries.append(None)
            logs.append(None)
        else:
            binary, log = cache_result
            binaries.append(binary)
            logs.append(log)

    message = (75*"="+"\n").join(
            "Build on %s succeeded, but said:\n\n%s" % (dev, log)
            for dev, log in zip(devices, logs)
            if log is not None and log.strip())

    if message:
        from pyopencl import compiler_output
        compiler_output(
                "Built kernel retrieved from cache. Original from-source "
                "build had warnings:\n"+message)

    # {{{ build on the build-needing devices, in one go

    result = None
    already_built = False

    if to_be_built_indices:
        # defeat implementation caches:
        from uuid import uuid4
        src = src + "\n\n__constant int pyopencl_defeat_cache_%s = 0;" % (
                uuid4().hex)

        prg = _cl._Program(ctx, src)
        prg.build(options_bytes, [devices[i] for i in to_be_built_indices])

        prg_devs = prg.get_info(_cl.program_info.DEVICES)
        prg_bins = prg.get_info(_cl.program_info.BINARIES)
        prg_logs = prg._get_build_logs()

        for dest_index in to_be_built_indices:
            dev = devices[dest_index]
            src_index = prg_devs.index(dev)
            binaries[dest_index] = prg_bins[src_index]
            _, logs[dest_index] = prg_logs[src_index]

        if len(to_be_built_indices) == len(devices):
            # Important special case: if code for all devices was built,
            # then we may simply use the program that we just built as the
            # final result.

            result = prg
            already_built = True

    if result is None:
        result = _cl._Program(ctx, devices, binaries)

    # }}}

    # {{{ save binaries to cache

    if to_be_built_indices:
        cleanup_m = CleanupManager()
        try:
            try:
                CacheLockManager(cleanup_m, cache_dir)

                for i in to_be_built_indices:
                    cache_key = cache_keys[i]
                    binary = binaries[i]

                    mod_cache_dir_m = ModuleCacheDirManager(cleanup_m,
                            join(cache_dir, cache_key))
                    info_path = mod_cache_dir_m.sub("info")
                    binary_path = mod_cache_dir_m.sub("binary")
                    source_path = mod_cache_dir_m.sub("source.cl")

                    outf = open(source_path, "wt")
                    outf.write(src)
                    outf.close()

                    outf = open(binary_path, "wb")
                    outf.write(binary)
                    outf.close()

                    from six.moves.cPickle import dump
                    info_file = open(info_path, "wb")
                    dump(_SourceInfo(
                        dependencies=get_dependencies(src, include_path),
                        log=logs[i]), info_file)
                    info_file.close()

            except:
                cleanup_m.error_clean_up()
                raise
        finally:
            cleanup_m.clean_up()

    # }}}

    return result, already_built
示例#43
0
            Path("targets") / target_name
        ],
        encoding="utf-8",
        stderr=subprocess.STDOUT if stderr else subprocess.PIPE,
    )

    if output_format in {"json", "sarif"} and not stderr:
        output = _clean_output_json(output)

    return output


REPO_CACHE = Path(
    os.path.expanduser(
        os.environ.get("GITHUB_REPO_CACHE",
                       appdirs.user_cache_dir("semgrep-tests"))))


@pytest.fixture()
def clone_github_repo():
    """
    Fixture to clone a github repo. Usage:
    ```
    def my_test_function(clone_github_repo):
        repo_path = clone_github_repo(url="https://github.com/returntocorp/semgrep", sha="abdfe')
        subprocess.run(["ls", repo_path])
    ```

    :returns: A path to the repo, guaranteed to live at least until the end of the test
    """
    yield _github_repo_retry_wrapper
示例#44
0
from .config import (
    NgdConfig,
)
from .jobs import DownloadJob
from . import metadata
from .summary import SummaryReader

# Python < 2.7.9 hack: fix ssl support
if sys.version_info < (2, 7, 9):  # pragma: no cover
    from requests.packages.urllib3.contrib import pyopenssl
    pyopenssl.inject_into_urllib3()


# Get the user's cache dir in a system-independent manner
CACHE_DIR = user_cache_dir(appname="ncbi-genome-download", appauthor="kblin")


def argument_parser(version=None):
    """Create the argument parser for ncbi-genome-download."""
    parser = argparse.ArgumentParser()
    parser.add_argument('group',
                        default=NgdConfig.get_default('group'),
                        help='The NCBI taxonomic group to download (default: %(default)s). '
                        'A comma-separated list of taxonomic groups is also possible. For example: "bacteria,viral"'
                        'Choose from: {choices}'.format(choices=NgdConfig.get_choices('group')))
    parser.add_argument('-s', '--section', dest='section',
                        choices=NgdConfig.get_choices('section'),
                        default=NgdConfig.get_default('section'),
                        help='NCBI section to download (default: %(default)s)')
    parser.add_argument('-F', '--format', dest='file_format',
示例#45
0
import os

import appdirs
import distlib.database
import distlib.scripts
import distlib.wheel
import packaging.utils
import pip_shims
import setuptools.dist
import six
import vistir

from ._pip_shims import VCS_SUPPORT, build_wheel as _build_wheel, unpack_url

CACHE_DIR = os.environ.get("PACKAGEBUILDER_CACHE_DIR",
                           appdirs.user_cache_dir("packagebuilder"))


def filter_sources(requirement, sources):
    """Returns a filtered list of sources for this requirement.

    This considers the index specified by the requirement, and returns only
    matching source entries if there is at least one.
    """
    if not sources or not requirement.index:
        return sources
    filtered_sources = [
        source for source in sources if source.get("name") == requirement.index
    ]
    return filtered_sources or sources
示例#46
0
def main(argv=None):
    # Protect access token and potentially encryption keys
    block_tracing()

    if argv is None:
        argv = sys.argv

    parser = argparse.ArgumentParser()
    userspacefs.add_cli_arguments(parser)
    parser.add_argument("-c", "--config-file", help="config file path")
    parser.add_argument(
        "-e",
        "--encrypted-folder",
        dest='encrypted_folders',
        type=parse_encrypted_folder_arg,
        default=[],
        action='append',
        help=
        "relative paths of encrypted folders, can be used multiple times. requires safefs"
    )
    parser.add_argument(
        "--print-default-config-file",
        action='store_true',
        help="print default config file path to standard out and quit")
    parser.add_argument("mount_point", nargs='?')
    args = parser.parse_args(argv[1:])

    config_dir = appdirs.user_config_dir(APP_NAME)

    if args.config_file is not None:
        config_file = args.config_file
    else:
        config_file = os.path.join(config_dir, "config.json")

    if args.print_default_config_file:
        print(config_file)
        return 0

    if not args.smb_no_mount and args.mount_point is None:
        parser.print_usage()
        print("%s: error: please provide the mount_point argument" %
              (os.path.basename(argv[0]), ))
        return 1

    os.makedirs(config_dir, exist_ok=True)

    config = {}
    try:
        f = open(config_file)
    except IOError as e:
        if e.errno != errno.ENOENT: raise
    else:
        try:
            with f:
                config = json.load(f)
        except ValueError as e:
            print("Config file %r is not valid json: %s" % (config_file, e))
            return -1

    access_token = None
    save_access_token = False
    save_config = False

    access_token_command = config.get("access_token_command", None)
    if access_token_command is not None:
        print("Running %r for access token" %
              (' '.join(access_token_command), ))
        try:
            access_token = subprocess.check_output(
                access_token_command).decode("utf-8")
        except TypeError:
            print("Bad access token command: %r, " % (access_token_command, ))
            return -1

    if access_token is None:
        keyring_user = config.get("keyring_user", None)

        if keyring_user is not None:
            try:
                access_token = keyring.get_password(APP_NAME, keyring_user)
            except KeyringError as e:
                print("Failed to get access token from keyring: %s" % (e, ))

    if access_token is None:
        access_token_privy = config.get("access_token_privy", None)
        if access_token_privy is not None:
            passwd = None
            while True:
                passwd = getpass.getpass(
                    "Enter access token passphrase (not your Dropbox password) (Ctrl-C to quit): "
                )
                try:
                    access_token = privy.peek(access_token_privy,
                                              passwd).decode('utf-8')
                except ValueError:
                    if not yes_no_input(
                            "Incorrect password, create new access token?"):
                        continue
                break
            del passwd

    try_directly = False
    while True:
        if access_token is None:
            save_access_token = True

        if (access_token is None and try_directly and yes_no_input(
                "Want to try entering the access token directly?")):
            print("Go to https://dropbox.com/developers/apps to "
                  "create an app and generate a personal access token.")

            while True:
                access_token = getpass.getpass(
                    "Enter Access token (Ctrl-C to quit): ")
                if not access_token:
                    print("Access tokens cannot be empty")
                    continue
                break

        if access_token is None:
            auth_flow = dropbox.DropboxOAuth2FlowNoRedirect(
                APP_KEY, APP_SECRET)
            authorize_url = auth_flow.start()
            print("We need an access token. Perform the following steps:")
            print("1. Go to " + authorize_url)
            print("2. Click \"Allow\" (you may have to log in first)")
            print("3. Copy the authorization code.")

            while True:
                auth_code = input(
                    "Enter authoritization code (Ctrl-C to quit): ")
                if not auth_code:
                    print("Authorization code cannot be empty")
                    continue
                break

            try:
                oauth_result = auth_flow.finish(auth_code)
            except Exception as e:
                print("Authorization code was invalid!")
                try_directly = True
                continue

            access_token = oauth_result.access_token

        # test out access token
        try:
            dropbox.Dropbox(access_token).users_get_current_account()
        except (dropbox.exceptions.BadInputError,
                dropbox.exceptions.AuthError) as e:
            print("Error using access token: %s" % (e, ))
            access_token = None
            try_directly = True
        else:
            break

    if save_access_token and yes_no_input(
            "We're all connected. Do you want to save your credentials for future runs?",
            default_yes=True):
        keyring_user = ''.join(
            [random.choice("asdfghjklzxcvbnmqwertyuiop") for _ in range(24)])
        try:
            keyring.set_password(APP_NAME, keyring_user, access_token)
        except (KeyringError, RuntimeError) as e:
            print(
                "We need a passphrase to encrypt your access token before we can save it."
            )
            print(
                "Warning: Your access token passphrase must contain enough randomness to be resistent to hacking. You can read this for more info: https://blogs.dropbox.com/tech/2012/04/zxcvbn-realistic-password-strength-estimation/"
            )
            while True:
                pass_ = getpass.getpass("Enter new access token passphrase: ")
                pass2_ = getpass.getpass(
                    "Enter new access token passphrase (again): ")
                if pass_ != pass2_:
                    print("Passphrases didn't match, please re-enter")
                else:
                    del pass2_
                    break
            config.pop('keyring_user', None)
            config['access_token_privy'] = privy.hide(
                access_token.encode('utf-8'), pass_, server=False)
            del pass_
            save_config = True
        else:
            config.pop('access_token_privy', None)
            config['keyring_user'] = keyring_user
            save_config = True

    if not config.get("asked_send_error_reports", False):
        if yes_no_input(
                "Would you like to help us improve %s by providing anonymous error reports?"
                % (APP_NAME, ),
                default_yes=True):
            config['send_error_reports'] = True
        config['asked_send_error_reports'] = True
        save_config = True

    if save_config:
        with open(config_file, "w") as f:
            json.dump(config, f)

    log.info("Starting %s...", APP_NAME)

    wrap_fs_errors = True
    if config.get('send_error_reports', False):
        try:
            version = pkg_resources.require("dbxfs")[0].version
        except Exception:
            log.warning("Failed to get version", exc_info=True)
            version = ''

        try:
            sentry_sdk.init(
                "https://[email protected]/1293235",
                release='%s@%s' % (APP_NAME, version),
                with_locals=False)
            wrap_fs_errors = True
        except Exception:
            log.warning("Failed to initialize sentry", exc_info=True)

    cache_folder = os.path.join(appdirs.user_cache_dir(APP_NAME), "file_cache")
    with contextlib.suppress(FileExistsError):
        os.makedirs(cache_folder)

    def create_fs():
        fs = CachingFileSystem(DropboxFileSystem(access_token),
                               cache_folder=cache_folder)
        if sys.platform == 'darwin':
            fs = DisableQuickLookFileSystem(fs)

        if wrap_fs_errors:
            fs = WrapErrorsFileSystem(fs)
        return fs

    encrypted_folders = config.get("encrypted_folders",
                                   []) + args.encrypted_folders

    create_fs = safefs_wrap_create_fs(create_fs, encrypted_folders)

    if not os.path.exists(args.mount_point):
        if yes_no_input(
                "Mount point \"%s\" doesn't exist, do you want to create it?" %
            (args.mount_point, ),
                default_yes=True):
            os.makedirs(args.mount_point, exist_ok=True)

    return userspacefs.simple_main(args.mount_point, "dbxfs", create_fs, args)
示例#47
0
#!/usr/bin/env python3.7

from __future__ import print_function

import subprocess, sys, os, difflib, hashlib, shutil
import appdirs

from io import StringIO

worktrees_to_look_at = "master", "develop", "factory"

cache_dir = appdirs.user_cache_dir("Nuitka-Speedcenter", None)
git_dir = os.path.join(cache_dir, "git")
clone_dir = os.path.join(git_dir, "Nuitka.git")


def executeCommand(command):
    print("Execute: ", command)

    assert 0 == os.system(command)


def getNuitkaWorktreeDir(worktree):
    return os.path.join(git_dir, worktree)


def makedirs(path, mode=0o755):
    if not os.path.isdir(path):
        os.makedirs(path, mode)

示例#48
0
import re

from appdirs import user_cache_dir
from lxml import etree, objectify
from dogpile.cache import make_region

from . import namespaces
from .version import __version__

region = make_region().configure(
    'dogpile.cache.dbm',
    expiration_time=1209600,  # 14 days
    arguments={
        "filename":
        "{dir}/{version}.dbm".format(
            dir=user_cache_dir('anentropic', 'dirtyebay'),
            version='dirtyebay_{}'.format(__version__))
    })

VERSION_COMMENT = re.compile(r'\s*Version\s*(\d+)\s*')

NS_MAP = {
    'wsdl': namespaces.WSDL,
    'ebay': namespaces.EBAY,
    'xs': namespaces.XSD,
}


class VersionNotFound(Exception):
    pass
示例#49
0
文件: acd_cli.py 项目: nabcos/acd_cli
    import requests.utils
    old_dau = requests.utils.default_user_agent

    def new_dau():
        return _app_name + '/' + __version__ + ' ' + old_dau()

    requests.utils.default_user_agent = new_dau
except:
    pass

# path settings

cp = os.environ.get('ACD_CLI_CACHE_PATH')
sp = os.environ.get('ACD_CLI_SETTINGS_PATH')

CACHE_PATH = cp if cp else appdirs.user_cache_dir(_app_name)
SETTINGS_PATH = sp if sp else appdirs.user_config_dir(_app_name)

if not os.path.isdir(CACHE_PATH):
    try:
        os.makedirs(CACHE_PATH, mode=0o0700)  # private data
    except OSError:
        logger.critical('Error creating cache directory "%s"' % CACHE_PATH)
        sys.exit(1)

# return values

INVALID_ARG_RETVAL = 2  # doubles as flag
INIT_FAILED_RETVAL = 3
KEYB_INTERR_RETVAL = 4
示例#50
0
    ConsolidatedSortFilterDialog)
from GuiV2.GSMatch2_Core.utils import create_button
# from GSMatch.GSMatch_Core.ChromatogramDisplay import Display
from GuiV2.icons import get_icon
"""
# begin wxGlade: dependencies
import wx.propgrid
# end wxGlade
"""

# begin wxGlade: extracode
# end wxGlade

# TODO: Import this from a common location here and in data_viewer_server
# Determine cache directory for data_viewer_server
cache_dir = pathlib.Path(appdirs.user_cache_dir("GunShotMatch"))
if not cache_dir.exists():
    cache_dir.mkdir()
cache_dir = cache_dir / "data_viewer_cache"
if not cache_dir.exists():
    cache_dir.mkdir()


class DataViewer(wx.Panel, Base.NotebookToolsMixin):
    def __init__(self,
                 parent,
                 id=wx.ID_ANY,
                 pos=wx.DefaultPosition,
                 size=wx.DefaultSize,
                 style=0,
                 name="DataViewer"):
示例#51
0
import time
from tempfile import mkdtemp, NamedTemporaryFile
try:
    from urllib.request import urlopen, urlretrieve, urlcleanup
except:
    from urllib import urlopen, urlretrieve, urlcleanup

from bucketcache import Bucket
from pyfaidx import Fasta
from appdirs import user_cache_dir

from genomepy import exceptions
from genomepy.utils import filter_fasta
from genomepy.__about__ import __version__

my_cache_dir = os.path.join(user_cache_dir("genomepy"), __version__)
# Create .cache dir if it does not exist
if not os.path.exists(my_cache_dir):
    os.makedirs(my_cache_dir)

cached = Bucket(my_cache_dir, days=7)


class ProviderBase(object):
    """Provider base class.

    Use to get a list of available providers:
    >>> ProviderBase.list_providers()
    ['UCSC', 'NCBI', 'Ensembl']

    Create a provider:
示例#52
0
def compile(source,
            nvcc="nvcc",
            options=None,
            keep=False,
            no_extern_c=False,
            arch=None,
            code=None,
            cache_dir=None,
            include_dirs=[],
            target="cubin"):

    assert target in ["cubin", "ptx", "fatbin"]

    if not no_extern_c:
        source = 'extern "C" {\n%s\n}\n' % source

    if options is None:
        options = DEFAULT_NVCC_FLAGS

    options = options[:]
    if arch is None:
        from pycuda.driver import Error
        try:
            from pycuda.driver import Context
            arch = "sm_%d%d" % Context.get_device().compute_capability()
        except Error:
            pass

    from pycuda.driver import CUDA_DEBUGGING
    if CUDA_DEBUGGING:
        cache_dir = False
        keep = True
        options.extend(["-g", "-G"])

    if "PYCUDA_CACHE_DIR" in os.environ and cache_dir is None:
        cache_dir = os.environ["PYCUDA_CACHE_DIR"]

    if "PYCUDA_DISABLE_CACHE" in os.environ:
        cache_dir = False

    if cache_dir is None:
        from os.path import join
        import appdirs
        cache_dir = os.path.join(appdirs.user_cache_dir("pycuda", "pycuda"),
                                 "compiler-cache-v1")

        from os import makedirs
        try:
            makedirs(cache_dir)
        except OSError as e:
            from errno import EEXIST
            if e.errno != EEXIST:
                raise

    if arch is not None:
        options.extend(["-arch", arch])

    if code is not None:
        options.extend(["-code", code])

    if 'darwin' in sys.platform and sys.maxint == 9223372036854775807:
        options.append('-m64')
    elif 'win32' in sys.platform and sys.maxsize == 9223372036854775807:
        options.append('-m64')
    elif 'win32' in sys.platform and sys.maxsize == 2147483647:
        options.append('-m32')

    include_dirs = include_dirs + [_find_pycuda_include_path()]

    for i in include_dirs:
        options.append("-I" + i)

    return compile_plain(source, options, keep, nvcc, cache_dir, target)
示例#53
0
try:
    import pyximport

    pyximport.install(language_level=3)
    del pyximport
except ImportError:
    pass

import appdirs
import configparser  # noqa
import pathlib  # noqa

output_path = pathlib.Path(appdirs.user_cache_dir("supriya", "supriya"))
if not output_path.exists():
    try:
        output_path.mkdir(parents=True, exist_ok=True)
    except IOError:
        pass

config = configparser.ConfigParser()
config.read_dict({"core": {"editor": "vim", "scsynth_path": "scsynth"}})
config_path = pathlib.Path(appdirs.user_config_dir("supriya", "supriya"))
config_path = config_path / "supriya.cfg"
if not config_path.exists():
    try:
        config_path.parent.mkdir(parents=True, exist_ok=True)
        with config_path.open("w") as file_pointer:
            config.write(file_pointer, True)
    except IOError:
        pass
with config_path.open() as file_pointer:
示例#54
0
import os
import appdirs

lib_dir = os.path.abspath(os.path.dirname(__file__))
data_dir = appdirs.user_data_dir(appname='calval', appauthor='satellogic')
shapes_dir = lib_dir + '/site_data'
scenes_dir = data_dir + '/scenes'
dl_dir = data_dir + '/downloads'
normalized_dir = data_dir
cache_dir = appdirs.user_cache_dir(appname='calval', appauthor='satellogic')
示例#55
0
def get_cache_dir():
    cache_dir = user_cache_dir(appname=_DIR_APP_NAME)
    os.makedirs(cache_dir, exist_ok=True)
    return cache_dir
示例#56
0
def cache_dir():
    return os.environ.get("GIT_AUTOSHARE_CACHE_DIR") or appdirs.user_cache_dir(
        APP_NAME)
示例#57
0
import appdirs
import prompt_toolkit as pt
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.completion import WordCompleter, NestedCompleter
from prompt_toolkit.history import FileHistory
from prompt_toolkit.styles import style_from_pygments_cls
from pygments.styles import get_style_by_name

from .. import langref
from .base import BaseRepl
from ..extra.pygments import RollitLexer
from ..runtime import context as curent_context

__all__ = []

cachedir = pathlib.Path(appdirs.user_cache_dir('rollit')) / 'repl'
cachedir.mkdir(parents=True, exist_ok=True)


class ScopeCompleter(WordCompleter):
    """
    """
    def __init__(self, runner, scope_attr):
        super().__init__(None)
        self.runner = runner
        self.scope_attr = scope_attr

    @property
    # pylint: disable=missing-function-docstring
    def words(self):
        if curent_context:
示例#58
0
import subprocess
import sys
import re
import shutil

import prompter
import yaml

from ocdeployer.utils import object_merge, oc, load_cfg_file, get_routes, switch_to_project
from ocdeployer.secrets import SecretImporter
from ocdeployer.deploy import DeployRunner

log = logging.getLogger("ocdeployer")
logging.basicConfig(level=logging.INFO)
logging.getLogger("sh").setLevel(logging.CRITICAL)
appdirs_path = pathlib.Path(appdirs.user_cache_dir(appname="ocdeployer"))

CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])


def wipe(no_confirm, project, label):
    extra_msg = ""
    if label:
        extra_msg = " with label '{}'".format(label)

    if not no_confirm and prompter.yesno(
            "I'm about to delete everything in project '{}'{}.  Continue?".
            format(project, extra_msg),
            default="no",
    ):
        sys.exit(0)
    """
    if name not in os.environ:
        return False
    return os.environ.get(name).lower() not in ("0", "false", "no", "off")


PIPENV_IS_CI = bool("CI" in os.environ or "TF_BUILD" in os.environ)

# HACK: Prevent invalid shebangs with Homebrew-installed Python:
# https://bugs.python.org/issue22490
os.environ.pop("__PYVENV_LAUNCHER__", None)

# Load patched pip instead of system pip
os.environ["PIP_SHIMS_BASE_MODULE"] = fs_str("pipenv.patched.notpip")

PIPENV_CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv"))
"""Location for Pipenv to store it's package cache.

Default is to use appdir's user cache directory.
"""

PIPENV_COLORBLIND = bool(os.environ.get("PIPENV_COLORBLIND"))
"""If set, disable terminal colors.

Some people don't like colors in their terminals, for some reason. Default is
to show colors.
"""

# Tells Pipenv which Python to default to, when none is provided.
PIPENV_DEFAULT_PYTHON_VERSION = os.environ.get("PIPENV_DEFAULT_PYTHON_VERSION")
"""Use this Python version when creating new virtual environments by default.
示例#60
0
from mnamer.__version__ import VERSION

__all__ = [
    "CACHE_PATH",
    "CURRENT_YEAR",
    "DEPRECATED",
    "IS_DEBUG",
    "SUBTITLE_CONTAINERS",
    "SYSTEM",
    "USAGE",
    "VERSION",
    "VERSION_MAJOR",
]

CACHE_PATH = Path(
    user_cache_dir(),
    f"mnamer-py{version_info.major}.{version_info.minor}").absolute()

CURRENT_YEAR = datetime.now().year

DEPRECATED = {"no_replace", "replacements"}

IS_DEBUG = gettrace() is not None

SUBTITLE_CONTAINERS = [".srt", ".idx", ".sub"]

SYSTEM = {
    "date": date.today(),
    "platform": platform(),
    "arguments": argv[1:],
    "cache location": f"{CACHE_PATH}.sql",