Exemple #1
0
def get_github_api():
    """Create a GitHub API instance.

    Returns:
        httplib2.Http: GitHub HTTP session
    """
    cache_dir = user_cache('hubugs')
    with open('{}/CACHEDIR.TAG'.format(cache_dir), 'w') as f:
        f.writelines([
            'Signature: 8a477f597d28d172789f06886806bc55\n',
            '# This file is a cache directory tag created by hubugs.\n',
            '# For information about cache directory tags, see:\n',
            '#   http://www.brynosaurus.com/cachedir/\n',
            ])
    return httplib2.Http(cache_dir, ca_certs=CA_CERTS)
Exemple #2
0
    def read(
        __directory: str, backup: bool = True, write_cache: bool = True
    ) -> 'Events':
        """Read and parse database.

        .. note::

            Assumes a new :obj:`Events` object should be created if the
            directory is missing.

        Args:
            __directory: Location to read database files from
            backup: Whether to create backup files
            write_cache: Whether to write cache files

        Returns:
            Parsed events database

        """
        if not os.path.exists(__directory):
            return Events(backup=backup)
        events = []
        xdg_cache_dir = xdg_basedir.user_cache('rdial')
        cache_dir = os.path.join(xdg_cache_dir, __directory.replace('/', '_'))
        if write_cache and not os.path.isdir(cache_dir):
            os.makedirs(cache_dir)
            with click.open_file(f'{xdg_cache_dir}/CACHEDIR.TAG', 'w') as f:
                f.writelines([
                    'Signature: 8a477f597d28d172789f06886806bc55\n',
                    '# This file is a cache directory tag created by rdial.\n',
                    '# For information about cache directory tags, see:\n',
                    '#   http://www.brynosaurus.com/cachedir/\n',
                ])
        for fname in glob.glob(f'{__directory}/*.csv'):
            task = os.path.basename(fname)[:-4]
            cache_file = os.path.join(cache_dir, task) + '.pkl'
            evs = None
            if os.path.exists(cache_file) and utils.newer(cache_file, fname):
                try:
                    # UnicodeDecodeError must be caught for the Python 2 to
                    # 3 upgrade path.
                    with click.open_file(cache_file, 'rb') as f:
                        cache = pickle.load(f)
                except (
                    pickle.UnpicklingError, EOFError, ImportError,
                    UnicodeDecodeError
                ):
                    pass
                else:
                    if isinstance(cache, dict) and cache['version'] == 1:
                        evs = cache['events']
                    else:
                        os.unlink(cache_file)
            if evs is None:
                with click.open_file(fname, encoding='utf-8') as f:
                    # We're not using the prettier DictReader here as it is
                    # *significantly* slower for large data files (~5x).
                    reader = csv.reader(f, dialect=RdialDialect)
                    if not next(reader) == FIELDS:
                        raise ValueError(
                            'Invalid data {!r}'.format(
                                click.format_filename(fname)
                            )
                        )
                    evs = [
                        Event(task, *row)  # pylint: disable=star-args
                        for row in reader
                    ]
                if write_cache:
                    with click.open_file(cache_file, 'wb', atomic=True) as f:
                        pickle.dump({
                            'version': 1,
                            'events': evs
                        }, f, pickle.HIGHEST_PROTOCOL)
            events.extend(evs)
        return Events(sorted(events, key=operator.attrgetter('start')))
Exemple #3
0
def test_cache_no_home(monkeypatch):
    monkeypatch.setattr('os.environ', {})
    assert xdg_basedir.user_cache('jnrbase') == '/.cache/jnrbase'
Exemple #4
0
def test_cache_macos(monkeypatch):
    monkeypatch.setattr('sys.platform', 'darwin')
    assert '/Caches' in xdg_basedir.user_cache('jnrbase')
Exemple #5
0
def test_cache_no_args(monkeypatch):
    monkeypatch.setenv('XDG_CACHE_HOME', '~/.xdg/cache')
    assert '/.xdg/cache/jnrbase' in xdg_basedir.user_cache('jnrbase')
Exemple #6
0
def test_cache_macos():
    expect(xdg_basedir.user_cache('jnrbase')).contains('/Caches')
Exemple #7
0
def test_cache_no_home():
    with patch_env(clear=True):
        expect(xdg_basedir.user_cache('jnrbase')) == '/.cache/jnrbase'
Exemple #8
0
def test_cache_no_args():
    with patch_env({'XDG_CACHE_HOME': '~/.xdg/cache'}):
        expect(xdg_basedir.user_cache('jnrbase')).contains(
            '/.xdg/cache/jnrbase'
        )
Exemple #9
0
    def read(__directory: str,
             backup: bool = True,
             write_cache: bool = True) -> 'Events':
        """Read and parse database.

        .. note::

            Assumes a new :obj:`Events` object should be created if the
            directory is missing.

        Args:
            __directory: Location to read database files from
            backup: Whether to create backup files
            write_cache: Whether to write cache files

        Returns:
            Parsed events database

        """
        if not os.path.exists(__directory):
            return Events(backup=backup)
        events = []
        xdg_cache_dir = xdg_basedir.user_cache('rdial')
        cache_dir = os.path.join(xdg_cache_dir, __directory.replace('/', '_'))
        if write_cache and not os.path.isdir(cache_dir):
            os.makedirs(cache_dir)
            with click.open_file(f'{xdg_cache_dir}/CACHEDIR.TAG', 'w') as f:
                f.writelines([
                    'Signature: 8a477f597d28d172789f06886806bc55\n',
                    '# This file is a cache directory tag created by rdial.\n',
                    '# For information about cache directory tags, see:\n',
                    '#   http://www.brynosaurus.com/cachedir/\n',
                ])
        for fname in glob.glob(f'{__directory}/*.csv'):
            task = os.path.basename(fname)[:-4]
            cache_file = os.path.join(cache_dir, task) + '.pkl'
            evs = None
            if os.path.exists(cache_file) and utils.newer(cache_file, fname):
                try:
                    # UnicodeDecodeError must be caught for the Python 2 to
                    # 3 upgrade path.
                    with click.open_file(cache_file, 'rb') as f:
                        cache = pickle.load(f)
                except (pickle.UnpicklingError, EOFError, ImportError,
                        UnicodeDecodeError):
                    pass
                else:
                    if isinstance(cache, dict) and cache['version'] == 1:
                        evs = cache['events']
                    else:
                        os.unlink(cache_file)
            if evs is None:
                with click.open_file(fname, encoding='utf-8') as f:
                    # We're not using the prettier DictReader here as it is
                    # *significantly* slower for large data files (~5x).
                    reader = csv.reader(f, dialect=RdialDialect)
                    if not next(reader) == FIELDS:
                        raise ValueError('Invalid data {!r}'.format(
                            click.format_filename(fname)))
                    evs = [
                        Event(task, *row)  # pylint: disable=star-args
                        for row in reader
                    ]
                if write_cache:
                    with click.open_file(cache_file, 'wb', atomic=True) as f:
                        pickle.dump({
                            'version': 1,
                            'events': evs
                        }, f, pickle.HIGHEST_PROTOCOL)
            events.extend(evs)
        return Events(sorted(events, key=operator.attrgetter('start')))