コード例 #1
0
ファイル: parts.py プロジェクト: canonical/charmcraft
    def __init__(
        self,
        all_parts: Dict[str, Any],
        *,
        work_dir: pathlib.Path,
        project_dir: pathlib.Path,
        project_name: str,
        ignore_local_sources: List[str],
    ):
        self._all_parts = all_parts.copy()
        self._project_dir = project_dir

        # set the cache dir for parts package management
        cache_dir = BaseDirectory.save_cache_path("charmcraft")

        try:
            self._lcm = LifecycleManager(
                {"parts": all_parts},
                application_name="charmcraft",
                work_dir=work_dir,
                cache_dir=cache_dir,
                ignore_local_sources=ignore_local_sources,
                project_name=project_name,
            )
        except PartsError as err:
            raise CraftError(f"Error bootstrapping lifecycle manager: {err}") from err
コード例 #2
0
ファイル: mdx_dot.py プロジェクト: jrd/markdown-dot
 def run(self, lines):
     """ Match and store Fenced Code Blocks in the HtmlStash. """
     print("text reading")
     text = "\n".join(lines)
     print("text read")
     while 1:
         m = FENCED_BLOCK_RE.search(text)
         if m:
             out_file = m.group('out')
             code = m.group('code')
             show = True
             if out_file[0] == '!':
                 show = False
                 out_file = out_file[1:]
             ext = os.path.splitext(out_file)[1][1:].strip()
             h_path = md5.new(out_file.encode('utf8')).hexdigest()
             h_code = md5.new(code.encode('utf8')).hexdigest()
             cache = BaseDirectory.save_cache_path('markdown-dot') + h_path
             if self.should_generate(out_file, cache, h_code):
                 self.ensure_dir_exists(out_file)
                 print("generate " + out_file)
                 dot = subprocess.Popen(['dot', '-T', ext, '-o', out_file], bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                 print("".join(dot.communicate(input=code.encode('utf8'))))
             else:
                 print("pass " + out_file)
             if show:
                 img = "![%s](%s)" % (os.path.basename(out_file), out_file)
                 text = '%s\n%s\n%s' % (text[:m.start()], img, text[m.end():])
             else:
                 text = '%s\n%s' % (text[:m.start()], text[m.end():])
         else:
             break
     return text.split("\n")
コード例 #3
0
    def __init__(self, release):
        self.release = release
        self.name = 'apt-venv'
        self.config = self._load_config_from_files()

        self.distro = None
        for distro in self.config['distributions']:
            if self.release in self.config['distributions'][distro]['releases']:
                self.distro = distro
        if not self.distro:
            base = "Release \"{}\" not valid. ".format(self.release)
            if not self.release:
                base = "No release declared. "
            all_releases = []
            for distro in sorted(self.config['distributions'].keys()):
                releases = self.config['distributions'][distro]['releases']
                all_releases.append(" [%s] %s" % (distro, ' - '.join(releases)))
            raise ValueError(base +
                             "Please specify one of:\n%s" %
                             '\n'.join(all_releases))
        self.config_path = _BaseDirectory.save_config_path(self.name)
        self.cache_path = _BaseDirectory.save_cache_path(self.name)
        self.data_path = _BaseDirectory.save_data_path(self.name)
        self.config_path = _os.path.join(self.config_path, self.release)
        self.cache_path = _os.path.join(self.cache_path, self.release)
        self.data_path = _os.path.join(self.data_path, self.release)

        self.bashrc = _os.path.join(self.config_path, "bash.rc")
        self.sourceslist = _os.path.join(self.config_path, "sources.list")
        self.aptconf = _os.path.join(self.config_path, "apt.conf")
コード例 #4
0
    def _show_captcha(self):
        xdg_captcha_cache = './cap.png' if (self.platform == 'win32') \
            else BaseDirectory.save_cache_path('slt-usage') + '/cap.png'
        if self.platform != 'win32':
            self.spinner.start()
        self.browser.get(_SLT_URL)
        elem = self.browser.find_element_by_css_selector('tr > td > img')
        with open(xdg_captcha_cache, 'w+b') as f:
            f.write(elem.screenshot_as_png)

        if self.platform != 'win32':
            self.spinner.stop()

        try:
            call([
                'termpix', xdg_captcha_cache, '--true-colour', '--width', '97',
                '--height', '19'
            ])
        except FileNotFoundError:
            init()
            print(
                Fore.RED + Style.BRIGHT +
                '\nInstall termpix (https://github.com/hopey-dishwasher/termpix)'
                ' to view captcha inline on the terminal!')
            Image.open(xdg_captcha_cache).show()
コード例 #5
0
ファイル: __init__.py プロジェクト: alessio/apt-venv
    def __init__(self, release):
        self.release = release
        self.name = 'apt-venv'
        self.config = _loadJSON(open('/etc/apt-venv.conf'))

        self.distro = None
        for distro in self.config['distributions']:
            if self.release in self.config['distributions'][distro]['releases']:
                self.distro = distro
        if not self.distro:
            base = "Release \"{}\" not valid. ".format(self.release)
            if not self.release:
                base = "No release declared. "
            all_releases = []
            for distro in sorted(self.config['distributions'].keys()):
                releases = self.config['distributions'][distro]['releases']
                all_releases.append(" [%s] %s" % (distro, ' - '.join(releases)))
            raise ValueError(base +
                             "Please specify one of:\n%s" %
                             '\n'.join(all_releases))
        self.config_path = _BaseDirectory.save_config_path(self.name)
        self.cache_path = _BaseDirectory.save_cache_path(self.name)
        self.data_path = _BaseDirectory.save_data_path(self.name)
        self.config_path = _os.path.join(self.config_path, self.release)
        self.cache_path = _os.path.join(self.cache_path, self.release)
        self.data_path = _os.path.join(self.data_path, self.release)

        self.bashrc = _os.path.join(self.config_path, "bash.rc")
        self.sourceslist = _os.path.join(self.config_path, "sources.list")
        self.aptconf = _os.path.join(self.config_path, "apt.conf")
コード例 #6
0
def main():
    cache_directory = BaseDirectory.save_cache_path('ob_xdg_apps')
    xml_file = os.path.join(cache_directory, 'menu.xml')

    appdirs = (os.path.join(datadir, 'applications') for datadir in
               BaseDirectory.xdg_data_dirs)

    if os.path.isfile(xml_file):
        updated = False
        for appdir in appdirs:
            if os.path.isdir(appdir):
                if os.stat(appdir).st_ctime > os.stat(xml_file).st_ctime:
                    updated = True
                    break

        if not updated:
            with open(xml_file) as f:
                print f.read()
            return

    icon_theme = gtk.icon_theme_get_default()

    menu = etree.Element('openbox_pipe_menu')
    menu_accumulator = MenuAccumulator()
    for desktop_entry in get_desktop_entries():
        menu_accumulator.add_entry(desktop_entry)
    menu_accumulator.finalize()

    categories = sorted(menu_accumulator.structure.keys())

    for category in categories:
        submenu_id = '{}-submenu'.format(category)
        submenu = etree.SubElement(menu, 'menu',
                                   {'id': submenu_id, 'label': category})

        for desktop_entry in menu_accumulator.structure[category]:
            name = desktop_entry.getName()
            item_attributes = {'label': name.decode('utf-8')}
            entry_icon = desktop_entry.getIcon()
            if os.path.isfile(entry_icon):
                item_attributes['icon'] = entry_icon
            else:
                icon_name = os.path.splitext(entry_icon)[0]
                icon_info = icon_theme.lookup_icon(icon_name, 48, 0)
                if icon_info is not None:
                    item_attributes['icon'] = icon_info.get_filename()
            item = etree.SubElement(submenu, 'item', item_attributes)
            action = etree.SubElement(item, 'action', {'name': 'Execute'})
            command = etree.SubElement(action, 'command')
            command.text = desktop_entry.getExec()

            if desktop_entry.getStartupNotify():
                startup_notify = etree.SubElement(action, 'startupnotify')
                enabled = etree.SubElement(startup_notify, 'enabled')
                enabled.text = 'yes'

    xml = etree.tostring(menu, pretty_print=True)
    with open(xml_file, 'w') as f:
        f.write(xml)
    print xml
コード例 #7
0
def main(steam_path=None, mountpoint=None):

    # Setup XDG directories
    config_dir = BaseDirectory.save_config_path('steamfuse')
    data_dir = BaseDirectory.save_data_path('steamfuse')
    cache_dir = BaseDirectory.save_cache_path('steamfuse')

    # Check/Set path to steam installation
    if steam_path is None:
        steam_path = os.path.expanduser('~/.local/share/Steam')
        if not os.path.exists(steam_path):
            steam_path = os.path.expanduser('~/.var/app/com.valvesoftware.Steam/data/Steam/')
            if not os.path.exists(steam_path):
                print('Could not find Steam install dir. Specify as argument.')
                return -1

    # Find libraries and installed games
    main_library = os.path.join(steam_path, 'steamapps')
    libraryfolders_vdf = vdf.load(open(os.path.join(main_library, 'libraryfolders.vdf'), 'r'))
    more_libraries = [
        os.path.join(folder['path'], 'steamapps') for key, folder in libraryfolders_vdf['libraryfolders'].items()
        if key.isdigit() and int(key) > 0
    ]

    # Setup mergerfs mount
    mergerfs_path = os.path.join(data_dir, 'mergerfs')
    if not os.path.exists(mergerfs_path):
        os.mkdir(mergerfs_path)
    proc = subprocess.Popen(
        ['mergerfs', f'{main_library}:{":".join(more_libraries)}', f'{mergerfs_path}'],
        stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, text=True)
    out, err = proc.communicate()
    if err:
        print(err)
        return -1

    # Download applist from Steam
    applist = os.path.join(cache_dir, 'applist.json')
    if not os.path.exists(applist):
        url = 'https://api.steampowered.com/ISteamApps/GetAppList/v2/'
        res = requests.get(url, allow_redirects=True)
        open(applist, 'wb').write(res.content)

    if mountpoint is None:
        mountpoint = os.path.join(data_dir, 'SteamFuse')
    if not os.path.exists(mountpoint):
        os.mkdir(mountpoint)
    try:
        FUSE(SteamFuseTree(mergerfs_path, applist), mountpoint=mountpoint, nothreads=True, foreground=True)
    except RuntimeError:
        pass

    proc = subprocess.Popen(
        ['fusermount', '-u', f'{mergerfs_path}'],
        stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, text=True)
    out, err = proc.communicate()
    if err:
        print(err)
        return -1
コード例 #8
0
ファイル: __init__.py プロジェクト: Kienyew/Skimmed-Wudao
def get_default_database() -> DatabaseManager:
    global __running_database

    if __running_database is None:
        db_path = Path(BaseDirectory.save_cache_path(
            'skimmed-wudao')) / 'wudao-database.sqlite3'
        __running_database = DatabaseManager(db_path)

    return __running_database
コード例 #9
0
 def test_save_cache_path(self):
     tmpdir = tempfile.mkdtemp()
     try:
         environ['XDG_CACHE_HOME'] = tmpdir
         reload(BaseDirectory)
         datapath = BaseDirectory.save_cache_path("foo")
         self.assertEqual(datapath, os.path.join(tmpdir, "foo"))
     finally:
         shutil.rmtree(tmpdir)
コード例 #10
0
ファイル: test-basedirectory.py プロジェクト: zester/pyxdg
 def test_save_cache_path(self):
     tmpdir = tempfile.mkdtemp()
     try:
         environ['XDG_CACHE_HOME'] = tmpdir
         reload(BaseDirectory)
         datapath = BaseDirectory.save_cache_path("foo")
         self.assertEqual(datapath, os.path.join(tmpdir, "foo"))
     finally:
         shutil.rmtree(tmpdir)
コード例 #11
0
ファイル: directory.py プロジェクト: nsanegit/morituri
 def getCache(self):
     try:
         from xdg import BaseDirectory
         path = BaseDirectory.save_cache_path('morituri')
         self.info('Using XDG, cache directory is %s' % path)
     except ImportError:
         path = os.path.expanduser('~/.morituri/cache')
         if not os.path.exists(path):
             os.makedirs(path)
         self.info('Not using XDG, cache directory is %s' % path)
     return path
コード例 #12
0
ファイル: utils.py プロジェクト: cherti/blinky
def get_cache_dir():
    from xdg import BaseDirectory

    if os.path.isdir(
            os.path.expanduser('~/.blinky')):  # backward compatibility
        print(
            " DEPRECATION WARNING: support for ~/.blinky will be removed in future versions, call migrate-blinky-dirs.py to migrate and silence this message"
        )
        return os.path.expanduser('~/.blinky/cache')

    return BaseDirectory.save_cache_path('blinky')
コード例 #13
0
ファイル: logger.py プロジェクト: rickard-von-essen/goopg
class GoopgLogger(object):
    """
    A simple class wich configure the basic logger
    """
    filelog = os.path.join(BaseDirectory.save_cache_path('goopg'), 'log')
    logging.basicConfig(
        filename=filelog,
        filemode='a',
        level=logging.ERROR,
        format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
    # redirect stderr to logger
    sys.stderr = StreamToLogger(logging.getLogger('STDERR'), logging.ERROR)
コード例 #14
0
ファイル: parts.py プロジェクト: snapcore/snapcraft
    def __init__(
        self,
        all_parts: Dict[str, Any],
        *,
        work_dir: pathlib.Path,
        assets_dir: pathlib.Path,
        base: str,
        package_repositories: List[Dict[str, Any]],
        parallel_build_count: int,
        part_names: Optional[List[str]],
        adopt_info: Optional[str],
        parse_info: Dict[str, List[str]],
        project_name: str,
        project_vars: Dict[str, str],
        extra_build_snaps: Optional[List[str]] = None,
        target_arch: str,
    ):
        self._work_dir = work_dir
        self._assets_dir = assets_dir
        self._package_repositories = package_repositories
        self._part_names = part_names
        self._adopt_info = adopt_info
        self._parse_info = parse_info
        self._all_part_names = [*all_parts]

        emit.progress("Initializing parts lifecycle")

        # set the cache dir for parts package management
        cache_dir = BaseDirectory.save_cache_path("snapcraft")

        if target_arch == "all":
            target_arch = get_host_architecture()

        platform_arch = convert_architecture_deb_to_platform(target_arch)

        try:
            self._lcm = craft_parts.LifecycleManager(
                {"parts": all_parts},
                application_name="snapcraft",
                work_dir=work_dir,
                cache_dir=cache_dir,
                arch=platform_arch,
                base=base,
                ignore_local_sources=["*.snap"],
                extra_build_snaps=extra_build_snaps,
                parallel_build_count=parallel_build_count,
                project_name=project_name,
                project_vars_part_name=adopt_info,
                project_vars=project_vars,
            )
        except craft_parts.PartsError as err:
            raise errors.PartsLifecycleError(str(err)) from err
コード例 #15
0
ファイル: mutt_ldap.py プロジェクト: jpalus/mutt-ldap
    def _get_cache_path(self):
        "Get the cache file path"

        # Some versions of pyxdg don't have save_cache_path (0.20 and older)
        # See: https://bugs.freedesktop.org/show_bug.cgi?id=26458
        if _xdg_basedirectory and "save_cache_path" in dir(_xdg_basedirectory):
            path = _xdg_basedirectory.save_cache_path("")
        else:
            self._log_xdg_import_error()
            path = _os_path.expanduser(_os_path.join("~", ".cache"))
            if not _os_path.isdir(path):
                _os.makedirs(path)
        return _os_path.join(path, "mutt-ldap.json")
コード例 #16
0
ファイル: mutt_ldap.py プロジェクト: mvk/mutt-ldap
    def _get_cache_path(self):
        "Get the cache file path"

        # Some versions of pyxdg don't have save_cache_path (0.20 and older)
        # See: https://bugs.freedesktop.org/show_bug.cgi?id=26458
        if _xdg_basedirectory and 'save_cache_path' in dir(_xdg_basedirectory):
            path = _xdg_basedirectory.save_cache_path('')
        else:
            self._log_xdg_import_error()
            path = _os_path.expanduser(_os_path.join('~', '.cache'))
            if not _os_path.isdir(path):
                _os.makedirs(path)
        return _os_path.join(path, 'mutt-ldap.json')
コード例 #17
0
ファイル: directory.py プロジェクト: DoomHammer/morituri
    def getCache(self, name=None):
        try:
            from xdg import BaseDirectory
            path = BaseDirectory.save_cache_path('morituri')
            self.info('Using XDG, cache directory is %s' % path)
        except (ImportError, AttributeError):
            # save_cache_path was added in pyxdg 0.25
            path = os.path.join(os.path.expanduser('~'), '.morituri', 'cache')
            if not os.path.exists(path):
                os.makedirs(path)
            self.info('Not using XDG, cache directory is %s' % path)

        if name:
            path = os.path.join(path, name)
            if not os.path.exists(path):
                os.makedirs(path)

        return path
コード例 #18
0
    def getCache(self, name=None):
        try:
            from xdg import BaseDirectory
            path = BaseDirectory.save_cache_path('morituri')
            self.info('Using XDG, cache directory is %s' % path)
        except (ImportError, AttributeError):
            # save_cache_path was added in pyxdg 0.25
            path = os.path.join(os.path.expanduser('~'), '.morituri', 'cache')
            if not os.path.exists(path):
                os.makedirs(path)
            self.info('Not using XDG, cache directory is %s' % path)

        if name:
            path = os.path.join(path, name)
            if not os.path.exists(path):
                os.makedirs(path)

        return path
コード例 #19
0
    def _ensure_xdg_dirs(self):
        """Make sure we have the default resource.

        :return The path to the XDG resource.
        """
        cache_dir = None
        try:
            cache_dir = xdgBaseDir.save_cache_path(DEFAULT_XDG_RESOURCE,
                                                   DEFAULT_LAVA_TOOL_RESOURCE)
        except AttributeError:
            # python-xdg 0.19 (Ubuntu Precise)
            # FIXME this duplicates the semantics from the newer python-xdg
            cache_dir = os.path.join(xdgBaseDir.xdg_cache_home,
                                     DEFAULT_XDG_RESOURCE,
                                     DEFAULT_LAVA_TOOL_RESOURCE)
            if not os.path.exists(cache_dir):
                os.makedirs(cache_dir)

        return cache_dir
コード例 #20
0
ファイル: directory.py プロジェクト: dioltas/morituri
    def getReadCaches(self, name=None):
        paths = []

        try:
            from xdg import BaseDirectory
            path = BaseDirectory.save_cache_path('morituri')
            self.info('For XDG, read cache directory is %s' % path)
            paths.append(path)
        except ImportError:
            pass

        path = os.path.join(os.path.expanduser('~'), '.morituri', 'cache')
        if os.path.exists(path):
            self.info('From before XDG, read cache directory is %s' % path)
            paths.append(path)

        if name:
            paths = [os.path.join(p, name) for p in paths]

        return paths
コード例 #21
0
    def download(self):
        """Download this dataset into the application cache directory.

        Short circuit if the dataset is "fixture," or if the dataset is already
        downloaded.

        :return: Nothing.
        """
        if self.name == 'fixture':
            return
        cache_dir = BaseDirectory.save_cache_path(XDG_RESOURCE)
        archive_url = DATASETS[self.name]
        archive_basename = os.path.basename(urlsplit(archive_url).path)
        archive_path = os.path.join(cache_dir, archive_basename)
        if os.path.exists(archive_path):
            return
        with open(archive_path, 'wb') as handle:
            # The chunk size of 256 bytes (2^8) is arbitrarily chosen.
            for chunk in requests.get(archive_url).iter_content(
                    chunk_size=256):
                handle.write(chunk)
コード例 #22
0
    def getReadCaches(self, name=None):
        paths = []

        try:
            from xdg import BaseDirectory
            path = BaseDirectory.save_cache_path('morituri')
            self.info('For XDG, read cache directory is %s' % path)
            paths.append(path)
        except (ImportError, AttributeError):
            # save_cache_path was added in pyxdg 0.21
            pass

        path = os.path.join(os.path.expanduser('~'), '.morituri', 'cache')
        if os.path.exists(path):
            self.info('From before XDG, read cache directory is %s' % path)
            paths.append(path)

        if name:
            paths = [os.path.join(p, name) for p in paths]

        return paths
コード例 #23
0
def cache_from_config(host, clear=False):
    """Setup caching from given host configuration.


    :param host: A dictionary with the host configuration.
    :type host: python:dict

    :param clear: Whether to clear already cached entries or not.
    :type clear: python:bool

    """
    log.debug("Enter: cache_from_config(host={!r}, clear={!r})".format(
        host, clear))

    if not host.get('use_cache'):
        return

    cache_d = BaseDirectory.save_cache_path(PROG_NAME)
    becmd.net.cache_setup(os.path.join(cache_d, host['host']), clear=clear)

    log.debug("Exit: cache_from_config(host={!r}, clear={!r}) -> None".format(
        host, clear))
コード例 #24
0
ファイル: mdx_dot.py プロジェクト: jrd/markdown-dot
 def run(self, lines):
     """ Match and store Fenced Code Blocks in the HtmlStash. """
     text = "\n".join(lines)
     while 1:
         m = FENCED_BLOCK_RE.search(text)
         if m:
             out_file = m.group('out')
             code = m.group('code')
             show = True
             if out_file[0] == '!':
                 show = False
                 out_file = out_file[1:]
             ext = os.path.splitext(out_file)[1][1:].strip()
             h_path = md5(out_file.encode('utf8')).hexdigest()
             h_code = md5(code.encode('utf8')).hexdigest()
             cache = os.path.join(
                 BaseDirectory.save_cache_path('markdown-dot'), h_path)
             if self.should_generate(out_file, cache, h_code):
                 self.ensure_dir_exists(out_file)
                 print("generate " + out_file)
                 dot = subprocess.Popen(['dot', '-T', ext, '-o', out_file],
                                        bufsize=1,
                                        stdin=subprocess.PIPE,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE)
                 print(dot.communicate(input=code.encode('utf8'))[1])
                 with open(cache, 'w') as f:
                     f.write(h_code)
             else:
                 print("pass " + out_file)
             if show:
                 img = "![%s](%s)" % (os.path.basename(out_file), out_file)
                 text = '%s\n%s\n%s' % (text[:m.start()], img,
                                        text[m.end():])
             else:
                 text = '%s\n%s' % (text[:m.start()], text[m.end():])
         else:
             break
     return text.split("\n")
コード例 #25
0
    def __init__(
        self,
        all_parts: Dict[str, Any],
        *,
        work_dir: pathlib.Path,
        ignore_local_sources: List[str],
    ):
        self._all_parts = all_parts.copy()

        # set the cache dir for parts package management
        cache_dir = BaseDirectory.save_cache_path("charmcraft")

        try:
            self._lcm = LifecycleManager(
                {"parts": all_parts},
                application_name="charmcraft",
                work_dir=work_dir,
                cache_dir=cache_dir,
                ignore_local_sources=ignore_local_sources,
            )
            self._lcm.refresh_packages_list()
        except PartsError as err:
            raise CommandError(err)
コード例 #26
0
ファイル: state.py プロジェクト: pedrosans/pocoy
You should have received a copy of the GNU General Public License
along with this program.  If not, see <https://www.gnu.org/licenses/>.
"""
from types import ModuleType
from xdg import BaseDirectory as Base
from xdg import DesktopEntry as Desktop
from typing import Dict, List
import os
import json

POCOY_DESKTOP = 'pocoy.desktop'
POCOY_PACKAGE = 'pocoy'
auto_start_dir = Base.save_config_path("autostart")
auto_start_file = os.path.join(auto_start_dir, POCOY_DESKTOP)
config_dir = Base.save_config_path(POCOY_PACKAGE)
cache_dir = Base.save_cache_path(POCOY_PACKAGE)
workspace_file = cache_dir + '/workspace.json'
decorations_file = cache_dir + '/decoration.json'
parameters_file = cache_dir + '/parameters.json'
loaded_parameters: Dict = None
loaded_workspaces: Dict = None
loaded_decorations: Dict = None
config_module: ModuleType = None
DEFAULT_PARAMETERS = {
    'position': 'bottom',
    'width': 800,
    'auto_hint': True,
    'auto_select_first_hint': False,
    'desktop_icon': 'light',
    'desktop_notifications': False,
    'window_manger_border': 0,
コード例 #27
0
    url = full_repo_url
    param = {
        'path': path,
        'per_page': '100'
    }
    while url:
        r = session.get(
            url,
            params=param
        )
        yield r.json()
        url = r.links.get("next", {"url": False})["url"]
        param = {}

with closing(percache.Cache(
    os.path.join(BaseDirectory.save_cache_path("malucrawl_reportificate"), "cache")
)) as cache:

    @cache
    def get_commit_details(commit_url):
        return session.get(commit_url).json()

    @cache
    def count_words_in_tree(tree_url):
        return sum(
            map(
                lambda tree: blob_lacount(tree["url"]),
                itertools.ifilter(
                    lambda tree: tree["type"] == "blob" and fnmatchcase(tree["path"], valid_files),
                    session.get(tree_url, params={"recursive": 1}).json()["tree"]
                )
コード例 #28
0
ファイル: config.py プロジェクト: acerix/fatbot
import shutil
import pytoml

import ctypes
import platform
import sys

app_name = 'fatbot'

# define filesystem paths

from xdg import BaseDirectory
package_dir = os.path.dirname(os.path.realpath(__file__))
config_dir = BaseDirectory.save_config_path(app_name)
data_dir = BaseDirectory.save_data_path(app_name)
cache_dir = BaseDirectory.save_cache_path(app_name)
#runtime_dir = BaseDirectory.get_runtime_dir(app_name) # XDG_RUNTIME_DIR undefined in systemd?
runtime_dir = cache_dir

config_file = os.path.join(config_dir, 'config.toml')

# load config file

if not os.path.isfile(config_file):
    shutil.copyfile(os.path.join(package_dir, 'examples', 'config.toml'),
                    config_file)

with open(config_file) as config_file_object:
    settings = pytoml.load(config_file_object)

# copy version number to settings
コード例 #29
0
ファイル: reportificate.py プロジェクト: graingert/malucrawl
    url = full_repo_url
    param = {
        'path': path,
        'per_page': '100'
    }
    while url:
        r = session.get(
            url,
            params=param
        )
        yield r.json()
        url = r.links.get("next", {"url": False})["url"]
        param = {}

with closing(percache.Cache(
    os.path.join(BaseDirectory.save_cache_path("malucrawl_reportificate"), "cache")
)) as cache:

    @cache
    def get_commit_details(commit_url):
        return session.get(commit_url).json()

    @cache
    def count_words_in_tree(tree_url):
        return sum(
            map(
                lambda tree: blob_lacount(tree["url"]),
                itertools.ifilter(
                    lambda tree: tree["type"] == "blob" and fnmatchcase(tree["path"], valid_files),
                    session.get(tree_url, params={"recursive": 1}).json()["tree"]
                )
コード例 #30
0
ファイル: cache.py プロジェクト: tymofij/zapys
#!/usr/bin/env python
# -*- coding: UTF-8 -*-

import os
from xdg import BaseDirectory


cache_dir = BaseDirectory.save_cache_path('zapys')

def put(server, id, text):
    if not os.path.exists(os.path.join(cache_dir, server)):
        os.makedirs(os.path.join(cache_dir, server))
    f = open(os.path.join(cache_dir, server, str(id) + '.text'), 'w')
    f.write(text)
    f.close()

def get(server, id):
    return open(os.path.join(cache_dir, server, str(id) + '.text')).read()

def get_temp():
    return open(os.path.join(cache_dir, 'entry.html'), 'w')
コード例 #31
0
def get_cover_path(uri, dim):
    if not get_cover_path.cache_path:
        get_cover_path.cache_path = BaseDirectory.save_cache_path(
            Config.applicationID + '/coverArt/'
        )
    return get_cover_path.cache_path + uri + ":" + str(dim)
コード例 #32
0
ファイル: common.py プロジェクト: vonshednob/pter
    BaseDirectory = None


PROGRAMNAME = 'pter'
QTPROGRAMNAME = 'qpter'
HERE = pathlib.Path(os.path.abspath(__file__)).parent
HOME = pathlib.Path.home()
CONFIGDIR = HOME / ".config" / PROGRAMNAME
CONFIGFILE = HOME / ".config" / PROGRAMNAME / (PROGRAMNAME + ".conf")
CACHEDIR = HOME / ".cache" / PROGRAMNAME
CACHEFILE = CACHEDIR / (PROGRAMNAME + ".settings")

if BaseDirectory is not None:
    CONFIGDIR = pathlib.Path(BaseDirectory.save_config_path(PROGRAMNAME) or CONFIGDIR)
    CONFIGFILE = CONFIGDIR / (PROGRAMNAME + ".conf")
    CACHEDIR = pathlib.Path(BaseDirectory.save_cache_path(PROGRAMNAME) or CACHEDIR)
    CACHEFILE = CACHEDIR / (PROGRAMNAME + ".settings")

SEARCHES_FILE = CONFIGDIR / "searches.txt"
TEMPLATES_FILE = CONFIGDIR / "templates.txt"

URL_RE = re.compile(r'([A-Za-z][A-Za-z0-9+\-.]*)://([^ ]+)')

DEFAULT_TASK_FORMAT = '{selection: >} {nr: >} {done} {tracking }{due }{(pri) }{description}'
ATTR_TRACKING = 'tracking'
ATTR_T = 't'
ATTR_DUE = 'due'
ATTR_PRI = 'pri'
ATTR_ID = 'id'

DELEGATE_ACTION_NONE = 'none'
コード例 #33
0
import urllib2
from debian import debian_support
import subprocess
import re
import gzip
import hashlib
import shutil
import contextlib
import itertools
import tempfile
import logging
import make_overlay
LOGGER = logging.getLogger(__name__)

from xdg import BaseDirectory
CACHE_DIR = BaseDirectory.save_cache_path('rundeb')
if not os.path.exists(CACHE_DIR):
	os.makedirs(CACHE_DIR)

class VersionRestriction(object):
	LT = "<<"
	LTE = "<="
	EQ = "="
	GTE = ">="
	GT = ">>"
	OPS = (LT, LTE, EQ, GTE, GT, None)
	def __init__(self, op, version):
		assert op in self.OPS, "Invalid operator: %s" % (op,)
		self.op = op
		self.version = version
	def zi_xml(self):
コード例 #34
0
ファイル: __init__.py プロジェクト: c1sc0/PGPgram
class Db:
    """The data handling object for pgpgram.

    Args:
        verbose (int): level of
    """

    config_path = BaseDirectory.save_config_path(name)
    data_path = BaseDirectory.save_data_path(name)
    cache_path = BaseDirectory.save_cache_path(name)
    executable_path = dirname(realpath(__file__))

    def __init__(self, verbose=0):
        self.verbose = verbose

        # Load files list from disk into 'files' attribute
        try:
            self.files = load(path_join(self.config_path, "files.pkl"))
        except FileNotFoundError as e:
            if verbose > 0:
                pprint("files pickle not found in path, initializing")
            self.files = []

        # Load configuration from disk into 'config' attribute
        try:
            self.config = load(path_join(self.config_path, "config.pkl"))

        except FileNotFoundError as e:
            # Init configuration
            if verbose > 0:
                pprint("Config file not found in path, initializing")

            self.config = {"db key": random_id(20)}

            # Paths
            index_dir = path_join(self.data_path, "index")
            tdlib_dir = path_join(self.data_path, 'tdlib')
            tdlib_config_symlink = path_join(self.config_path, "tdlib")
            tdlib_documents_dir = path_join(self.cache_path, "documents")
            tdlib_documents_symlink = path_join(tdlib_dir, "documents")

            # Init paths
            if not exists(index_dir):
                mkdir(index_dir)

            if not exists(tdlib_dir):
                mkdir(tdlib_dir)
                mkdir(tdlib_documents_dir)
                symlink(tdlib_dir, tdlib_config_symlink)
                symlink(tdlib_documents_dir, tdlib_documents_symlink)

        # Load index
        try:
            self.index = load(path_join(self.data_path, "index.pkl"))
        except:
            if verbose > 0:
                print("index still not built")
        self.save()

    def save(self):
        """Save db

            Formats db in a format compatible with trovotutto,
            builds the trovotutto index and then save the following to disk:
            - search index
            - files list
            - configuration
        """
        pgpgram_db = PGPgramDb(self, filetype="any", exclude=[], update=True)
        self.index = Index(pgpgram_db, slb=3, verbose=self.verbose)
        save(self.index, path_join(self.data_path, "index.pkl"))
        save(self.files, path_join(self.config_path, "files.pkl"))
        save(self.config, path_join(self.config_path, "config.pkl"))

    def search(self,
               query,
               path=getcwd(),
               filetype="any",
               exclude=[],
               results_number=10,
               reverse=True,
               verbose=0):

        if filetype != "any" or path != getcwd():
            word_shortest = min([len(w) for w in query.split(" ")])
            pgpgram_db_kwargs = {
                'path': path,
                'filetype': filetype,
                'exclude': exclude,
                'update': True
            }
            pgpgram_db = PGPgramDb(self, **pgpgram_db_kwargs)
            self.index = Index(pgpgram_db, slb=word_shortest, verbose=verbose)

        results = self.index.search(query)

        self.display_results(results[:results_number], reverse=reverse)

        if results != []:
            choice = int(input("Select file to restore (number): "))
            f = next(d for d in self.files
                     if d['path'] == results[choice])["name"]
            restore = Restore(f, download_directory=getcwd(), verbose=verbose)

    def display_results(self, results, reverse=True):
        lines = []
        for i, f in enumerate(results):
            g = f.split("/")
            result = {
                "title":
                "{}{}. {}{}{}".format(color.GREEN + color.BOLD, i, color.BLUE,
                                      g[-1], color.END),
                "subtitle":
                "{}{}{}\n".format(color.GRAY, f, color.END)
            }
            lines.append(result)

        if reverse: lines.reverse()

        for result in lines:
            print(result['title'])
            print(result['subtitle'])

    def import_file(self, filename):
        files = load(filename)
        for f in files:
            if not f['hash'] in [g['hash'] for g in self.files]:
                self.files.append(f)
                print("adding {}".format(f['name']))
        self.save()
コード例 #35
0
ファイル: pappymenu.py プロジェクト: Kingdread/pappymenu
def cache_path():
    """Return the path of the cache file."""
    return os.path.join(BaseDirectory.save_cache_path(__appname__),
                        'menu-cache')
コード例 #36
0
ファイル: __init__.py プロジェクト: Artanis/icecrate
import os.path

from xdg import BaseDirectory
import redis

from icecrate.utils import keygen
from icecrate._version import __version__, __version_info__

__db_version__ = "1"

# TEMP CONFIG
HOST = "localhost"
PORT = 6379
DB   = 0
INDEXDIR = BaseDirectory.save_cache_path("icecrate")

database = redis.StrictRedis(host=HOST, port=PORT, db=DB, decode_responses=True)
database.set(keygen("icecrate", meta="version"), __version__)
database.set(keygen("icecrate", meta="dbversion"), __db_version__)

import icecrate.items
import icecrate.tags
import icecrate.search
コード例 #37
0
ファイル: cons.py プロジェクト: AlexPoilrouge/tvcmd
    },
    ACQUIRED: {
        "text": "ACQUIRED",
        "color": "\033[33m"
    },
    SEEN: {
        "text": "SEEN",
        "color": "\033[32m"
    },
    FUTURE: {
        "text": "FUTURE",
        "color": "\033[34m"
    },
    IGNORED: {
        "text": "IGNORED",
        "color": "\033[94m"
    }
}

BASE = "tvcmd"
if os.environ.get("DEBUG"):
    BASE = "tvcmd_debug"

CONFIG = xdg_dir.save_config_path(BASE)
CONFIG_MAIN = CONFIG + "/main.cfg"
CONFIG_STATUS = CONFIG + "/status.db"

CACHE = xdg_dir.save_cache_path(BASE)
CACHE_HTTP = CACHE + "/http"
CACHE_EPISODES = CACHE + "/episodes.db"
コード例 #38
0
ファイル: _deb.py プロジェクト: snapcore/snapcraft
from snapcraft_legacy import file_utils
from snapcraft_legacy.internal.indicators import is_dumb_terminal

from . import errors
from ._base import BaseRepo, get_pkg_name_parts
from .deb_package import DebPackage

if sys.platform == "linux":
    # Ensure importing works on non-Linux.
    from .apt_cache import AptCache

logger = logging.getLogger(__name__)

_DEB_CACHE_DIR: pathlib.Path = pathlib.Path(
    BaseDirectory.save_cache_path("snapcraft", "download")
)
_STAGE_CACHE_DIR: pathlib.Path = pathlib.Path(
    BaseDirectory.save_cache_path("snapcraft", "stage-packages")
)

_HASHSUM_MISMATCH_PATTERN = re.compile(r"(E:Failed to fetch.+Hash Sum mismatch)+")
_DEFAULT_FILTERED_STAGE_PACKAGES: List[str] = [
    "adduser",
    "apt",
    "apt-utils",
    "base-files",
    "base-passwd",
    "bash",
    "bsdutils",
    "coreutils",
コード例 #39
0
ファイル: cons.py プロジェクト: arabar/tvcmd
import xdg.BaseDirectory as xdg_dir, os

# episodes status
NEW = 0
ADQUIRED = 1
SEEN = 2
FUTURE = 3
IGNORED = 4
    
ENUM_EPISODE_STATUS = {
    NEW: {"text": "NEW", "color": "" },
    ADQUIRED: {"text": "ADQUIRED", "color": "\033[33m" },
    SEEN: {"text": "SEEN", "color": "\033[32m" },
    FUTURE: {"text": "FUTURE", "color": "\033[34m" },
    IGNORED: {"text": "IGNORED", "color": "\033[94m" }
}

BASE = "tvcmd"
if os.environ.get("DEBUG"):
    BASE = "tvcmd_debug"

CONFIG = xdg_dir.save_config_path(BASE)
CONFIG_MAIN = CONFIG + "/main.cfg"
CONFIG_STATUS = CONFIG + "/status.db"

CACHE = xdg_dir.save_cache_path(BASE)
CACHE_HTTP = CACHE + "/http"
CACHE_EPISODES = CACHE + "/episodes.db"
コード例 #40
0
ファイル: config.py プロジェクト: einaru/curry
    Configuration

    Copyright: (c) 2014 Einar Uvsløkk
    License: GNU General Public License (GPL) version 3 or later
"""
import os
import logging
import configparser

from curry import prog_name

try:
    from xdg import BaseDirectory
    config_path = BaseDirectory.save_config_path(prog_name)
    cache_path = BaseDirectory.save_cache_path(prog_name)
except:
    config_path = os.path.join(os.path.expanduser('~/.config'), prog_name)
    if not os.path.isdir(config_path):
        os.makedirs(config_path)
    cache_path = os.path.join(os.path.expanduser('~/.cache'), prog_name)
    if not os.path.isdir(cache_path):
        os.makedirs(cache_path)

__all__ = ['config', 'get_cache_file']

config_file = os.path.join(config_path, 'config.ini')

log = logging.getLogger(__name__)

コード例 #41
0
ファイル: env.py プロジェクト: Kekun/video-games
	def save_cache_path():
		return BaseDirectory.save_cache_path(Environement.resource)
コード例 #42
0
ファイル: _launchpad.py プロジェクト: snapcore/snapcraft
 def _create_cache_directory(self) -> str:
     cache_dir = BaseDirectory.save_cache_path("snapcraft", "provider",
                                               "launchpad")
     os.makedirs(cache_dir, mode=0o700, exist_ok=True)
     return cache_dir
コード例 #43
0
ファイル: __init__.py プロジェクト: rickard-von-essen/goopg
from xdg import BaseDirectory

from logger import StreamToLogger


# Check https://developers.google.com/gmail/api/auth/scopes
# for all available scopes
OAUTH_SCOPE = 'https://mail.google.com/'

# Path to the client_secret.json, file comes from the Google Developer Console
CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                  'client_secret.json')

# Directory where the credentials storage files are placed
STORAGE_DIR = BaseDirectory.save_cache_path(os.path.join('goopg', 'storage'))


class Gmail():

    def __init__(self, username):
        # the main username
        self.username = username
        self.http = httplib2.Http()
        self.logger = logging.getLogger('Gmail')
        self.logger.setLevel(logging.DEBUG)

        # Start the OAuth flow to retrieve credentials
        flow = flow_from_clientsecrets(CLIENT_SECRET_FILE,
                                       scope=OAUTH_SCOPE,
                                       redirect_uri='urn:ietf:wg:oauth:2.0:oob:auto')
コード例 #44
0
ファイル: Libertine.py プロジェクト: attente/libertine
def get_libertine_container_path():
    return basedir.save_cache_path('libertine-container')
コード例 #45
0
ファイル: migrate-blinky-dirs.py プロジェクト: cherti/blinky
#!/usr/bin/python

from xdg import BaseDirectory
import os, shutil

data = BaseDirectory.save_data_path('blinky')
cache = BaseDirectory.save_cache_path('blinky')
blinkydir = os.path.abspath(os.path.expanduser('~/.blinky'))

if not os.path.isdir(blinkydir):  # backward compatibility
    print("No ~/.blinky found, nothing to migrate")
    exit()


def migrate(src, target, execute=False):
    if not execute:
        print("Moving ~/blinky/{} to {}".format(src, target))
    else:
        src = os.path.join(blinkydir, src)
        shutil.move(src, target)


print("Gonna start migrating ~/.blinky:")
migrate("build", cache)
migrate("logs", cache)
migrate("cache", os.path.join(cache, 'pkg'))
migrate("reviewed", data)

input("Proceed? (Ctrl+C if not)")

migrate("build", cache, execute=True)
コード例 #46
0
ファイル: __init__.py プロジェクト: jubayerarefin/goopg
from xdg import BaseDirectory

from logger import StreamToLogger


# Check https://developers.google.com/gmail/api/auth/scopes
# for all available scopes
OAUTH_SCOPE = 'https://mail.google.com/'

# Path to the client_secret.json, file comes from the Google Developer Console
CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                  'client_secret.json')

# Directory where the credentials storage files are placed
STORAGE_DIR = BaseDirectory.save_cache_path(os.path.join('goopg', 'storage'))


class Gmail():

    def __init__(self, username):
        # the main username
        self.username = username
        self.http = httplib2.Http()
        self.logger = logging.getLogger('Gmail')

        # Start the OAuth flow to retrieve credentials
        flow = flow_from_clientsecrets(CLIENT_SECRET_FILE,
                                       scope=OAUTH_SCOPE)

        # The storage for current user
コード例 #47
0
ファイル: config.py プロジェクト: acerix/fatbot
import pytoml

import ctypes
import platform
import sys

app_name = 'fatbot'


# define filesystem paths

from xdg import BaseDirectory
package_dir = os.path.dirname(os.path.realpath(__file__))
config_dir = BaseDirectory.save_config_path(app_name)
data_dir = BaseDirectory.save_data_path(app_name)
cache_dir = BaseDirectory.save_cache_path(app_name)
#runtime_dir = BaseDirectory.get_runtime_dir(app_name) # XDG_RUNTIME_DIR undefined in systemd?
runtime_dir = cache_dir

config_file = os.path.join(config_dir, 'config.toml')


# load config file

if not os.path.isfile(config_file):
    shutil.copyfile(os.path.join(package_dir, 'examples', 'config.toml'), config_file)

with open(config_file) as config_file_object:
    settings = pytoml.load(config_file_object)