def __init__(self): # Init databases and fifo name = 'roficlip' self.ring_db = '{0}/{1}'.format(BaseDirectory.save_data_path(name), 'ring.db') self.persist_db = '{0}/{1}'.format(BaseDirectory.save_data_path(name), 'persistent.db') self.fifo_path = '{0}/{1}.fifo'.format( BaseDirectory.get_runtime_dir(strict=False), name) self.config_path = '{0}/settings'.format( BaseDirectory.save_config_path(name)) if not os.path.isfile(self.ring_db): open(self.ring_db, "a+").close() if not os.path.isfile(self.persist_db): open(self.persist_db, "a+").close() if (not os.path.exists(self.fifo_path) or not stat.S_ISFIFO(os.stat(self.fifo_path).st_mode)): os.mkfifo(self.fifo_path) self.fifo = os.open(self.fifo_path, os.O_RDONLY | os.O_NONBLOCK) # Init clipboard and read databases self.cb = gtk.Clipboard() self.ring = self.read(self.ring_db) self.persist = self.read(self.persist_db) # Load settings self.load_config() # Init notifications if self.cfg['notify']: pynotify.init(name)
def __init__(self): config_dir = xdg.save_config_path('hobo') data_dir = xdg.save_data_path('hobo') self.images_dir = os.path.join(data_dir, 'images') if not os.path.isdir(self.images_dir): os.mkdir(self.images_dir) self.template_file = os.path.join(self.images_dir, 'hobo.templates') touch(self.template_file) self.db = Db(os.path.join(data_dir, 'hobo.db')) config_file = os.path.join(config_dir, 'hobo.ini') self._cfg = ConfigParser() self._cfg.read(config_file) self.bridge_device = self.get('config', 'bridge_device') or 'hob0' self.base_mem = self.get('config', 'base_mem') or '1024' self.base_cpu = self.get('config', 'base_cpu') or '1' # compression analysis: # -1 256M # -9 213M # --best 223M # might as well use -1 # libvirt docs recommend: # --best --block-size=16777216 # but it's sloooow self.compress_flags = self.get('config', 'compress_flags') or '-1 -T0 --block-size=16777216'
def get_state_path(): """Get complete path for skill state file. Returns: (str) path to skills.json """ return join(BaseDirectory.save_data_path('mycroft'), 'skills.json')
def __init__( self, *, project, echoer, is_ephemeral: bool = False, build_provider_flags: Dict[str, str] = None, ) -> None: self.project = project self.echoer = echoer self._is_ephemeral = is_ephemeral self.instance_name = "snapcraft-{}".format(project._snap_meta.name) if project._snap_meta.version: self.snap_filename = "{}_{}_{}.snap".format( project._snap_meta.name, project._snap_meta.version, project.deb_arch) else: self.snap_filename = "{}_{}.snap".format(project._snap_meta.name, project.deb_arch) self.provider_project_dir = os.path.join( BaseDirectory.save_data_path("snapcraft"), "projects", project._snap_meta.name, self._get_provider_name(), ) if build_provider_flags is None: build_provider_flags = dict() self.build_provider_flags = build_provider_flags.copy()
def get_save_dir(): ''' Returns the path to the directory to save the maps to ''' if os.name == 'nt': savedir = os.path.join(os.path.expanduser('~'), '.labyrinth') if not os.access(savedir, os.W_OK): os.makedirs(savedir) return savedir old_savedir = os.path.join(os.path.expanduser('~'), ".gnome2", "labyrinth") savedir = BaseDirectory.save_data_path("labyrinth") # Migrate maps to the new save directory. if os.path.exists(old_savedir) and os.path.isdir(old_savedir): for m in os.listdir(old_savedir): try: os.rename(os.path.join(old_savedir, m), os.path.join(savedir, m)) except Exception as e: warnings.warn("Failed to migrate %s: %s" % (m, e)) # remove old dir try: os.rmdir(old_savedir) except Exception as e: warnings.warn("Could not remove old map dir (%s): %s" % (old_savedir, e)) return savedir
def __init__(self, release): self.release = release self.name = 'apt-venv' self.config = self._load_config_from_files() self.distro = None for distro in self.config['distributions']: if self.release in self.config['distributions'][distro]['releases']: self.distro = distro if not self.distro: base = "Release \"{}\" not valid. ".format(self.release) if not self.release: base = "No release declared. " all_releases = [] for distro in sorted(self.config['distributions'].keys()): releases = self.config['distributions'][distro]['releases'] all_releases.append(" [%s] %s" % (distro, ' - '.join(releases))) raise ValueError(base + "Please specify one of:\n%s" % '\n'.join(all_releases)) self.config_path = _BaseDirectory.save_config_path(self.name) self.cache_path = _BaseDirectory.save_cache_path(self.name) self.data_path = _BaseDirectory.save_data_path(self.name) self.config_path = _os.path.join(self.config_path, self.release) self.cache_path = _os.path.join(self.cache_path, self.release) self.data_path = _os.path.join(self.data_path, self.release) self.bashrc = _os.path.join(self.config_path, "bash.rc") self.sourceslist = _os.path.join(self.config_path, "sources.list") self.aptconf = _os.path.join(self.config_path, "apt.conf")
def set_last_end_date(workspace_id, date): """Set the last "end date" for a workspace (used in report queries). :param workspace_id: ID of workspace to set the last used end date for. :type workspace_id: int | str :param date: End date to store :type date: datetime.datetime :return: Nothing. :rtype: None :raises OSError: If the data file cannot be saved (or existing data cannot be loaded in order to preserve it). :raises json.JSONDecodeError: If existing data cannot be loaded (in order to preserve it) because the data file is corrupt. """ # See http://stackoverflow.com/q/1450957 workspace_id = str(workspace_id) path = os.path.join( BaseDirectory.save_data_path(APP_SHORTNAME), END_DATES_FILENAME ) if os.path.exists(path): # Load existing data so that we preserve it. with open(path, "r") as fh: data = json.load(fh) else: data = {} data[workspace_id] = date.isoformat() # XXX: Write safely (write + rename) with open(path, "w") as fh: json.dump(data, fh)
def get_save_dir (): ''' Returns the path to the directory to save the maps to ''' if os.name == 'nt': savedir = os.path.join(os.path.expanduser('~'), '.labyrinth') if not os.access (savedir, os.W_OK): os.makedirs (savedir) return savedir old_savedir = os.path.join (os.path.expanduser('~'), ".gnome2", "labyrinth") savedir = BaseDirectory.save_data_path("labyrinth") # Migrate maps to the new save directory. if os.path.exists(old_savedir) and os.path.isdir(old_savedir): for m in os.listdir(old_savedir): try: os.rename(os.path.join(old_savedir, m), os.path.join(savedir, m)) except Exception as e: warnings.warn("Failed to migrate %s: %s" % (m, e)) # remove old dir try: os.rmdir(old_savedir) except Exception as e: warnings.warn("Could not remove old map dir (%s): %s" % (old_savedir, e)) return savedir
def __init__(self, release): self.release = release self.name = 'apt-venv' self.config = _loadJSON(open('/etc/apt-venv.conf')) self.distro = None for distro in self.config['distributions']: if self.release in self.config['distributions'][distro]['releases']: self.distro = distro if not self.distro: base = "Release \"{}\" not valid. ".format(self.release) if not self.release: base = "No release declared. " all_releases = [] for distro in sorted(self.config['distributions'].keys()): releases = self.config['distributions'][distro]['releases'] all_releases.append(" [%s] %s" % (distro, ' - '.join(releases))) raise ValueError(base + "Please specify one of:\n%s" % '\n'.join(all_releases)) self.config_path = _BaseDirectory.save_config_path(self.name) self.cache_path = _BaseDirectory.save_cache_path(self.name) self.data_path = _BaseDirectory.save_data_path(self.name) self.config_path = _os.path.join(self.config_path, self.release) self.cache_path = _os.path.join(self.cache_path, self.release) self.data_path = _os.path.join(self.data_path, self.release) self.bashrc = _os.path.join(self.config_path, "bash.rc") self.sourceslist = _os.path.join(self.config_path, "sources.list") self.aptconf = _os.path.join(self.config_path, "apt.conf")
def __init__(self, platform='default', old_skills_dir=None, skills_dir=None, repo=None, versioned=True): self.platform = platform # Keep this variable alive for a while, is used to move skills from the # old config based location to XDG self.old_skills_dir = path.expanduser(old_skills_dir or '') or None self.skills_dir = (skills_dir or BaseDirectory.save_data_path('mycroft/skills')) self.repo = repo or SkillRepo() self.versioned = versioned self.lock = MsmProcessLock() # Property placeholders self._all_skills = None self._default_skills = None self._local_skills = None self._device_skill_state = None self.saving_handled = False self.device_skill_state_hash = '' with self.lock: self._init_skills_data()
def main(steam_path=None, mountpoint=None): # Setup XDG directories config_dir = BaseDirectory.save_config_path('steamfuse') data_dir = BaseDirectory.save_data_path('steamfuse') cache_dir = BaseDirectory.save_cache_path('steamfuse') # Check/Set path to steam installation if steam_path is None: steam_path = os.path.expanduser('~/.local/share/Steam') if not os.path.exists(steam_path): steam_path = os.path.expanduser('~/.var/app/com.valvesoftware.Steam/data/Steam/') if not os.path.exists(steam_path): print('Could not find Steam install dir. Specify as argument.') return -1 # Find libraries and installed games main_library = os.path.join(steam_path, 'steamapps') libraryfolders_vdf = vdf.load(open(os.path.join(main_library, 'libraryfolders.vdf'), 'r')) more_libraries = [ os.path.join(folder['path'], 'steamapps') for key, folder in libraryfolders_vdf['libraryfolders'].items() if key.isdigit() and int(key) > 0 ] # Setup mergerfs mount mergerfs_path = os.path.join(data_dir, 'mergerfs') if not os.path.exists(mergerfs_path): os.mkdir(mergerfs_path) proc = subprocess.Popen( ['mergerfs', f'{main_library}:{":".join(more_libraries)}', f'{mergerfs_path}'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, text=True) out, err = proc.communicate() if err: print(err) return -1 # Download applist from Steam applist = os.path.join(cache_dir, 'applist.json') if not os.path.exists(applist): url = 'https://api.steampowered.com/ISteamApps/GetAppList/v2/' res = requests.get(url, allow_redirects=True) open(applist, 'wb').write(res.content) if mountpoint is None: mountpoint = os.path.join(data_dir, 'SteamFuse') if not os.path.exists(mountpoint): os.mkdir(mountpoint) try: FUSE(SteamFuseTree(mergerfs_path, applist), mountpoint=mountpoint, nothreads=True, foreground=True) except RuntimeError: pass proc = subprocess.Popen( ['fusermount', '-u', f'{mergerfs_path}'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, text=True) out, err = proc.communicate() if err: print(err) return -1
def test_save_data_path(self): tmpdir = tempfile.mkdtemp() try: environ['XDG_DATA_HOME'] = tmpdir reload(BaseDirectory) datapath = BaseDirectory.save_data_path("foo") self.assertEqual(datapath, os.path.join(tmpdir, "foo")) finally: shutil.rmtree(tmpdir)
def test_save_data_path(self): tmpdir = tempfile.mkdtemp() try: environ['XDG_DATA_HOME'] = tmpdir reload(BaseDirectory) datapath = BaseDirectory.save_data_path("foo") self.assertEqual(datapath, os.path.join(tmpdir, "foo")) finally: shutil.rmtree(tmpdir)
def __init__(self): print (time.time()-start_time, "start init application") Gtk.Application.__init__(self, application_id = 'org.gnome.badnik', flags = Gio.ApplicationFlags.FLAGS_NONE) GLib.threads_init() Gdk.threads_init() self.simplename = "badnik" self.fullname = "Video games" self.datadir = os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))) self.savedatadir = BaseDirectory.save_data_path(self.simplename) self.iconsdir = self.datadir + "/data/icons" self.tosecdir = self.datadir + "/data/tosec" self.srcdir = self.datadir + "/src" self.systems = Badnik.SystemCollection () self.systems.add (Badnik.MegaDrive ()) self.systems.add (Badnik.Desktop ()) self.gamesdb = BadnikLibrary(self, self.savedatadir) self.focused_game = None self.connect("activate", self.on_activate) self.register(None) self.settings = Gio.Settings.new('org.gnome.badnik') self.builder = Gtk.Builder() self.builder.add_from_file(self.srcdir + "/ressources/app-menu.ui") self.builder.connect_signals(self) self.menumodel = self.builder.get_object("app-menu") self.set_app_menu(self.menumodel) self._action_entries = [ { 'name': 'quit', 'callback': self.on_quit, 'accel': '<Primary>q' }, { 'name': 'about', 'callback': self.on_about }, { 'name': 'help', 'callback': self.on_help, 'accel': 'F1' }, { 'name': 'fullscreen', 'callback': self.on_fullscreen, 'accel': 'F11' }, { 'name': 'view-as', 'callback': self.on_view_as, 'create_hook': self._view_as_create_hook, 'parameter_type': 's', 'state': self.settings.get_value('view-as') }, { 'name': 'add-games', 'callback': self.on_add_games }, { 'name': 'download-metadata', 'callback': self.on_download_metadata, 'accel': '<Primary>m' } ] self._add_actions() settings = Gtk.Settings.get_default() settings.set_property("gtk-application-prefer-dark-theme", True) settings.set_property("gtk-shell-shows-app-menu", True) print (time.time()-start_time, "end init application") self.running_games = {} self.systems.connect("game_found", self.on_game_found) self.gamesdb.connect("game_added", self.on_game_added)
def save_data_file(filename): """ Return filename in the XDG data home directory, where the directory is guaranteed to exist """ direc = base.save_data_path(PACKAGE_NAME) if not direc: return None filepath = os.path.join(direc, filename) return filepath
def __init__(self): data_dir = xdgBaseDirectory.save_data_path("clearskies") control_path = os.path.join(data_dir, "control") self.connected = False plat = platform.platform() if "Windows" in plat: self.socket = WindowsJsonTransport(control_path) else: self.socket = UnixJsonTransport(control_path)
def get_log_data(self, lines=0): try: data_dir = xdgBaseDirectory.save_data_path("clearskies") log_path = os.path.join(data_dir, "log") data = open(log_path).read() if lines: data = "\n".join(data.split("\n")[-(lines + 1):]) return data except Exception as e: raise ProtocolException("Couldn't get log data: %s" % e)
def save_data_file(filename): """ Return filename in the XDG data home directory, where the directory is guaranteed to exist """ direc = base.save_data_path(PACKAGE_NAME) if not direc: return None filepath = os.path.join(direc, filename) return filepath
def get_save_path() -> Path: """Return a path to where a database file may be created. Create intermediate directories as needed. """ importlib.reload(BaseDirectory) return Path( BaseDirectory.save_data_path(DB_FILE.parent), DB_FILE.name, )
def write_data(data, filename=None, folder=None): if filename is None: filename = get_default_filename() if folder is None: folder = BaseDirectory.save_data_path(DATA_FOLDER_NAME) filepath = os.path.join(folder, filename) with codecs.open(filepath, 'w', encoding='utf-8') as f: simplejson.dump(data, f, ensure_ascii=False, indent=4) return filepath
def __init__(self): self.CACHE_HOME = os.path.join(BaseDirectory.xdg_cache_home, "puding") # creating cache home if it doesn't exist if not os.path.isdir(self.CACHE_HOME): os.makedirs(self.CACHE_HOME) self.CONFIG_HOME = BaseDirectory.save_config_path("puding") self.CONFIG_FILE = os.path.join(self.CONFIG_HOME, "settings.json") self.DATA_HOME = BaseDirectory.save_data_path("puding") self.DATA_PATH = map(self.append_app_name, BaseDirectory.xdg_data_dirs) self.DEV_HOME = os.path.abspath(os.path.dirname(__file__))
def write_data(data, filename=None, folder=None): if filename is None: filename = get_default_filename() if folder is None: folder = BaseDirectory.save_data_path(DATA_FOLDER_NAME) filepath = os.path.join(folder, filename) with codecs.open(filepath, 'w', encoding='utf-8') as f: simplejson.dump(data, f, ensure_ascii=False, indent=4) return filepath
def test_installed_skills_path_not_virtual_env(self): """Test the property representing the installed skill file path.""" with patch(self.mock_package + 'os.access') as os_patch: os_patch.return_value = False updater = SkillUpdater(self.message_bus_mock) self.assertEqual( os.path.join(BaseDirectory.save_data_path('mycroft'), '.mycroft-skills'), updater.installed_skills_file_path )
def of(args: object) -> "Profile": args = vars(args) name = args.get("profile_name") assert isinstance(name, str) return Profile( name, args.get( "profile_dir", Path(BaseDirectory.save_data_path("qutebrowser-profiles")), ), )
def __init__(self, debug=False): homedir = os.path.expanduser('~') self._conf_dir_name = BaseDirectory.save_config_path('actracker') self._log_dir_name = BaseDirectory.save_data_path('actracker') self._conf_fname = os.path.join(self._conf_dir_name, 'conf.json') self._load_conf() self._load_log() self._last_application = ('', '') self.activity_counter = {} self._current_day = datetime.now().day self.debug = debug
def get_data_dir(): from xdg import BaseDirectory if os.path.isdir( os.path.expanduser('~/.blinky')): # backward compatibility print( " DEPRECATION WARNING: support for ~/.blinky will be removed in future versions, call migrate-blinky-dirs.py to migrate and silence this message" ) return os.path.expanduser('~/.blinky/cache') return BaseDirectory.save_data_path('blinky')
def __init__(self): self.db = DAL('sqlite://caldav.db', folder=BaseDirectory.save_data_path('pyhttpserver')) self.db.define_table('collections', Field('url', type='string', required=True, unique=True), Field('displayname', type='string'), Field('subscribed', type='boolean', default=True)) self.db.define_table('colitems', Field('href', type='string', required=True), Field('etag', type='string'), Field('collection', type='reference collections'), Field('content', type='blob'), Field('local_status', type='integer', default=0)) self.davStorages = {}
def parse_opts(): ''' This method parses the commandline options to next, if any, and it parses the configuration file ''' t = TUI() parser = OptionParser(usage=constants.USAGE) parser.add_option(u'-c', u'--conf', nargs=1, dest=u'new_path', help=u'NEW_PATH specifies a different configuration file') parser.add_option(u'-r', u'--random', action="store_const", dest="func", const=t.do_random, help=u'Start an ep for a random show') parser.add_option(u'-l', u'--list', action="store_const", dest="func", const=t.do_list, help=u'List all your shows') parser.add_option(u'-n', u'--new', action="store_const", dest="func", const=t.do_new, help=u'List shows for which there are new eps on your system') parser.add_option(u'-u', u'--update', action="store_const", dest="func", const=t.do_update, help=u'Connect to the TVRage database and update your show information') parser.add_option(u'-a', u'--add', action="store_const", dest="func", const=t.do_add_show, help=u'Add a show to the database') parser.add_option(u'--add_location', action="store_const", dest="func", const=t.do_add_show_location, help=u'Add a location for a show to the database') parser.add_option(u'--change', action="store_const", dest="func", const=t.do_change_show, help=u'Change the current season and ep for a show') parser.add_option(u'--scan', action="store_const", dest="func", const=t.do_scan, help=u'Scan your series path for shows') (options, args) = parser.parse_args() # Load a default config config = ConfigParser.SafeConfigParser() config.add_section(u'general') config.set(u'general', constants.ConfKeys.PLAYER_CMD, u'mplayer') config.set(u'general', constants.ConfKeys.SHOW_PATH, u'~/downloads/series') db_path = BaseDirectory.save_data_path('next') config.set(u'general', constants.ConfKeys.DB_PATH, db_path) # Load the config override if options.new_path: path = options.new_path if not (os.path.exists(path) and os.access(path, os.F_OK) and os.access(path, os.W_OK)): print u'No configfile found in "{0}", generating default configfile. Please modify, then start next again!'.format(path) gen_example(path) sys.exit(-1) else: path = BaseDirectory.load_first_config('next', 'next.conf') if path: config.read(path) result = dict(config.items(u'general')) for (k, v) in result.items(): # make sure bools are parsed correct if 'false' == v.lower() or 'no' == v.lower() or '0' == v: result[k] = False if 'true' == v.lower() or 'yes' == v.lower() or '1' == v: result[k] = True t.conf = result return options, result, args
def _start_build( *, lp: LaunchpadClient, project: Project, build_id: str, package_all_sources: bool ) -> None: # Pull/update sources for project. worktree_dir = BaseDirectory.save_data_path("snapcraft", "remote-build", build_id) wt = WorkTree(worktree_dir, project, package_all_sources=package_all_sources) repo_dir = wt.prepare_repository() lp.push_source_tree(repo_dir) # Start building. lp.start_build() echo.info("If interrupted, resume with: 'snapcraft remote-build --recover'")
def install_mime_info(application, package_file): """Copy 'package_file' as ``~/.local/share/mime/packages/<application>.xml.`` If package_file is None, install ``<app_dir>/<application>.xml``. If already installed, does nothing. May overwrite an existing file with the same name (if the contents are different)""" application += '.xml' with open(package_file) as f: new_data = f.read() # See if the file is already installed package_dir = os.path.join('mime', 'packages') resource = os.path.join(package_dir, application) for x in BaseDirectory.load_data_paths(resource): try: with open(x) as f: old_data = f.read() except: continue if old_data == new_data: return # Already installed global _cache_uptodate _cache_uptodate = False # Not already installed; add a new copy # Create the directory structure... new_file = os.path.join(BaseDirectory.save_data_path(package_dir), application) # Write the file... with open(new_file, 'w') as f: f.write(new_data) # Update the database... command = 'update-mime-database' if os.spawnlp(os.P_WAIT, command, command, BaseDirectory.save_data_path('mime')): os.unlink(new_file) raise Exception("The '%s' command returned an error code!\n" \ "Make sure you have the freedesktop.org shared MIME package:\n" \ "http://standards.freedesktop.org/shared-mime-info/" % command)
def install(self, archive: Optional[Path] = None) -> None: """Install this dataset. :param archive: The path to a zip archive containing the dataset. :raise DatasetInstallError: If no ``archive`` is provided and none is found in TP's cache. """ if self.name in installed(): return importlib.reload(BaseDirectory) dst = Path(BaseDirectory.save_data_path(DATASETS_DIR), self.name) assert not dst.exists() # Decide on an archive to extract. if not archive: archive = Path( BaseDirectory.xdg_cache_home, ARCHIVES_DIR, self.archive, ) if not archive.exists(): raise exceptions.DatasetInstallError( 'No archive explicitly provided, and none found at ' f"{archive}. Can't install this dataset.") tmp = tempfile.mkdtemp() try: # Extract archive. with zipfile.ZipFile(archive, 'r') as handle: handle.extractall(tmp) # Fix TwitterHandles.csv. infile_path = Path(tmp, 'TwitterHandles.csv') outfile_path = Path(tmp, 'FixedTwitterHandles.csv') with open(infile_path) as infile: with open(outfile_path, 'w') as outfile: self.munge_twitter_handles(infile, outfile) shutil.move(outfile_path, infile_path) # Fix ExtractedTweets.csv. infile_path = Path(tmp, 'ExtractedTweets.csv') outfile_path = Path(tmp, 'FixedExtractedTweets.csv') with open(infile_path) as infile: with open(outfile_path, 'w') as outfile: self.munge_extracted_tweets(infile, outfile) shutil.move(outfile_path, infile_path) # Install fixed files. shutil.move(tmp, dst) except: shutil.rmtree(tmp) raise
def install_mime_info(application, package_file): """Copy 'package_file' as ``~/.local/share/mime/packages/<application>.xml.`` If package_file is None, install ``<app_dir>/<application>.xml``. If already installed, does nothing. May overwrite an existing file with the same name (if the contents are different)""" application += '.xml' new_data = open(package_file).read() # See if the file is already installed package_dir = os.path.join('mime', 'packages') resource = os.path.join(package_dir, application) for x in BaseDirectory.load_data_paths(resource): try: old_data = open(x).read() except: continue if old_data == new_data: return # Already installed global _cache_uptodate _cache_uptodate = False # Not already installed; add a new copy # Create the directory structure... new_file = os.path.join(BaseDirectory.save_data_path(package_dir), application) # Write the file... open(new_file, 'w').write(new_data) # Update the database... command = 'update-mime-database' if os.spawnlp(os.P_WAIT, command, command, BaseDirectory.save_data_path('mime')): os.unlink(new_file) raise Exception("The '%s' command returned an error code!\n" \ "Make sure you have the freedesktop.org shared MIME package:\n" \ "http://standards.freedesktop.org/shared-mime-info/" % command)
def loadconfig(confpath, resource): '''load conf or create default if it doesn't exist''' conffile = resource + '.ini' fullconfpath = os.path.join(confpath, conffile) cfg = configparser.ConfigParser() if len(cfg.read(fullconfpath)) == 0: # empty or non-existent conf file, create default cfg['base'] = { 'dumpdir': xdgbasedir.save_data_path(resource), 'port': '7777' } with open(fullconfpath, 'w') as configfile: cfg.write(configfile) return cfg
def _install_fixture(self): """Install the "fixture" dataset. Short circuit if this dataset is already installed. :return: Nothing. """ if self.installed(): return data_dir = BaseDirectory.save_data_path(XDG_RESOURCE) fixture_dir = os.path.join(data_dir, self.name) os.mkdir(fixture_dir) _write_links_csv(os.path.join(fixture_dir, 'links.csv')) _write_movies_csv(os.path.join(fixture_dir, 'movies.csv')) _write_ratings_csv(os.path.join(fixture_dir, 'ratings.csv')) _write_tags_csv(os.path.join(fixture_dir, 'tags.csv'))
def install(self): """Install this dataset. Short circuit if this dataset is already installed. :return: Nothing. """ if self.name == 'fixture': self._install_fixture() return if self.installed(): return download_path = self.download_path() data_dir = BaseDirectory.save_data_path(XDG_RESOURCE) with zipfile.ZipFile(download_path, 'r') as handle: handle.extractall(data_dir)
def __init__(self, *, project, echoer, is_ephemeral: bool = False) -> None: self.project = project self.echoer = echoer self._is_ephemeral = is_ephemeral self.instance_name = "snapcraft-{}".format(project.info.name) if project.info.version: self.snap_filename = "{}_{}_{}.snap".format( project.info.name, project.info.version, project.deb_arch) else: self.snap_filename = "{}_{}.snap".format(project.info.name, project.deb_arch) self.provider_project_dir = os.path.join( BaseDirectory.save_data_path("snapcraft"), "projects", self._get_provider_name(), project.info.name, )
def __init__(self): """Create database if needed.""" base_directory = BaseDirectory.save_data_path('pic2map') db_filename = os.path.join(base_directory, 'location.db') Database.__init__(self, db_filename) if os.path.isfile(db_filename): self.location_table = self['location'] else: logger.debug('Creating location database %r...', db_filename) self.location_table = Table( 'location', self.metadata, Column('filename', String, unique=True), Column('latitude', Float), Column('longitude', Float), Column('datetime', DateTime), ) self.location_table.create()
def __init__(self): """Create database if needed.""" base_directory = BaseDirectory.save_data_path('pic2map') db_filename = os.path.join(base_directory, 'location.db') Database.__init__(self, db_filename) if os.path.isfile(db_filename): self.location_table = self['location'] else: logger.debug('Creating location database %r...', db_filename) self.location_table = Table( 'location', self.metadata, Column('filename', String, unique=True), Column('latitude', Float), Column('longitude', Float), Column('datetime', DateTime), ) self.location_table.create()
def __init__(self, *, project, echoer, is_ephemeral: bool = False) -> None: self.project = project self.echoer = echoer self._is_ephemeral = is_ephemeral # Once https://github.com/CanonicalLtd/multipass/issues/220 is # closed we can prepend snapcraft- again. self.instance_name = petname.Generate(2, "-") self.project_dir = shlex.quote(project.info.name) if project.info.version: self.snap_filename = "{}_{}_{}.snap".format( project.info.name, project.info.version, project.deb_arch ) else: self.snap_filename = "{}_{}.snap".format( project.info.name, project.deb_arch ) self.provider_project_dir = os.path.join( BaseDirectory.save_data_path("snapcraft"), "projects", project.info.name )
def __init__(self, platform='default', skills_dir=None, repo=None, versioned=True): self.platform = platform self.skills_dir = (path.expanduser(skills_dir or '') or BaseDirectory.save_data_path('mycroft/skills')) self.repo = repo or SkillRepo() self.versioned = versioned self.lock = MsmProcessLock() # Property placeholders self._all_skills = None self._default_skills = None self._local_skills = None self._device_skill_state = None self.saving_handled = False self.device_skill_state_hash = '' with self.lock: self._init_skills_data()
def main(args=None): from os.path import basename from pprint import pprint from xdg import BaseDirectory try: if args: config = {} exec(compile(open(args[0]).read(), args[0], 'exec'), {}, config) APP_KEY = config.get('APP_KEY') APP_SECRET = config.get('APP_SECRET') except: log.error("Could not read config file.") if not APP_KEY or not APP_SECRET: sys.exit("You need to set your APP_KEY and APP_SECRET!") logging.basicConfig(level=logging.DEBUG) name = 'pydodo' token_file = join(BaseDirectory.save_data_path(name), 'dropbox_token') client = DropboxClient(APP_KEY, APP_SECRET, name, token_file=token_file) if not client.authorized: client.login() if client.authorized: print("Login successful!") print("\n".join("%s: %s" % i for i in client.account_info().items())) with open(__file__, 'rb') as f: response = client.put_file(basename(__file__), f) print("uploaded:", response) metadata = client.metadata('/') print("App folder contents:") print("--------------------\n") pprint(metadata['contents'])
def install(self, archive: Optional[Path] = None) -> None: """Install this dataset. :param archive: **Ignored.** """ if self.name in installed(): return importlib.reload(BaseDirectory) dst = Path(BaseDirectory.save_data_path(DATASETS_DIR), self.name) assert not dst.exists() tmp = tempfile.mkdtemp() try: for name in ('ExtractedTweets.csv', 'TwitterHandles.csv'): in_path = str(PurePath('static', 'simple-fixture', name)) out_path = str(PurePath(tmp, name)) with pkg_resources.resource_stream('tp', in_path) as in_handle: with open(out_path, 'wb') as out_handle: shutil.copyfileobj(in_handle, out_handle) shutil.copytree(tmp, dst) except: shutil.rmtree(tmp) raise
def save_data_path(): return BaseDirectory.save_data_path(Environement.resource)
def build_directory(*args): data_path = BaseDirectory.save_data_path('buildhck') p = path.join(data_path, config.get('builds_directory', 'builds'), *args) return p
def _create_data_directory(self) -> str: data_dir = BaseDirectory.save_data_path("snapcraft", "provider", "launchpad") os.makedirs(data_dir, mode=0o700, exist_ok=True) return data_dir
__doc__ = """ Usage: jay [-h] [--setup-bash | --version] [INPUT ...] jay --autocomplete <current-position> <params>... -h --help show this --setup-bash setup `j` function and autocomplete for bash --version print current version --autocomplete provides autocompletion instead of just one matching dir """ __version__ = '0.1' JAY_XDG_DATA_HOME = BaseDirectory.save_data_path('jay') RECENT_IDX_FILENAME = join(JAY_XDG_DATA_HOME, 'recent') IDX_FILENAME = join(JAY_XDG_DATA_HOME, 'index') # index filename IDX_MAX_SIZE = 100 # max number of entries in the index class Jay(object): """Singleton of directories index""" _instance = None def __new__(cls, *args, **kwargs): if not cls._instance: cls._instance = super(Jay, cls).__new__(cls, *args, **kwargs) return cls._instance
def destroy(self): print('pbfuse destroy') base = BaseDirectory.save_data_path('pbfuse') with open(path.join(base, 'state.yaml'), 'w') as f: yaml.dump(self._state, f)
def get_config_file(): dir_full_path = BaseDirectory.save_data_path('codecook-bash.config') file_full_path = os.path.join(dir_full_path, 'user.config') return file_full_path
#!/usr/bin/env python3 """ Manage a set of disposable firefox profiles """ import os import sys import shutil import tempfile import subprocess import xdg.BaseDirectory as bd _APP_NAME = 'cleanfox' _BROWSER_CMD = 'firefox' _XDG_DATA = bd.save_data_path(_APP_NAME) _BASE_PROFILE = os.path.join(_XDG_DATA, 'base-profile') def _init_base_profile(): """ Create the profile to use as a basis for all others """ args = [_BROWSER_CMD, '-CreateProfile', 'cleanfox '+_BASE_PROFILE] subprocess.call(args) def _spawn_base_profile(): """ launch the base profile for setup purposes """ args = [_BROWSER_CMD, '-no-remote', '-profile', _BASE_PROFILE] subprocess.call(args)
def initialize_globals(): c = AttrDict() # Set default dropout rates if FLAGS.dropout_rate2 < 0: FLAGS.dropout_rate2 = FLAGS.dropout_rate if FLAGS.dropout_rate3 < 0: FLAGS.dropout_rate3 = FLAGS.dropout_rate if FLAGS.dropout_rate6 < 0: FLAGS.dropout_rate6 = FLAGS.dropout_rate # Set default checkpoint dir if not FLAGS.checkpoint_dir: FLAGS.checkpoint_dir = xdg.save_data_path( os.path.join('deepspeech', 'checkpoints')) if FLAGS.load not in ['last', 'best', 'init', 'auto', 'transfer']: FLAGS.load = 'auto' # Set default summary dir if not FLAGS.summary_dir: FLAGS.summary_dir = xdg.save_data_path( os.path.join('deepspeech', 'summaries')) # Standard session configuration that'll be used for all new sessions. c.session_config = tfv1.ConfigProto( allow_soft_placement=True, log_device_placement=FLAGS.log_placement, inter_op_parallelism_threads=FLAGS.inter_op_parallelism_threads, intra_op_parallelism_threads=FLAGS.intra_op_parallelism_threads, gpu_options=tfv1.GPUOptions(allow_growth=FLAGS.use_allow_growth)) # CPU device c.cpu_device = '/cpu:0' # Available GPU devices c.available_devices = get_available_gpus(c.session_config) # If there is no GPU available, we fall back to CPU based operation if not c.available_devices: c.available_devices = [c.cpu_device] if FLAGS.utf8: c.alphabet = UTF8Alphabet() else: c.alphabet = Alphabet(os.path.abspath(FLAGS.alphabet_config_path)) # Geometric Constants # =================== # For an explanation of the meaning of the geometric constants, please refer to # doc/Geometry.md # Number of MFCC features c.n_input = 26 # TODO: Determine this programmatically from the sample rate # The number of frames in the context c.n_context = 9 # TODO: Determine the optimal value using a validation data set # Number of units in hidden layers c.n_hidden = FLAGS.n_hidden c.n_hidden_1 = c.n_hidden c.n_hidden_2 = c.n_hidden c.n_hidden_5 = c.n_hidden # LSTM cell state dimension c.n_cell_dim = c.n_hidden # The number of units in the third layer, which feeds in to the LSTM c.n_hidden_3 = c.n_cell_dim # Units in the sixth layer = number of characters in the target language plus one c.n_hidden_6 = c.alphabet.size() + 1 # +1 for CTC blank label # Size of audio window in samples if (FLAGS.feature_win_len * FLAGS.audio_sample_rate) % 1000 != 0: log_error( '--feature_win_len value ({}) in milliseconds ({}) multiplied ' 'by --audio_sample_rate value ({}) must be an integer value. Adjust ' 'your --feature_win_len value or resample your audio accordingly.' ''.format(FLAGS.feature_win_len, FLAGS.feature_win_len / 1000, FLAGS.audio_sample_rate)) sys.exit(1) c.audio_window_samples = FLAGS.audio_sample_rate * (FLAGS.feature_win_len / 1000) # Stride for feature computations in samples if (FLAGS.feature_win_step * FLAGS.audio_sample_rate) % 1000 != 0: log_error( '--feature_win_step value ({}) in milliseconds ({}) multiplied ' 'by --audio_sample_rate value ({}) must be an integer value. Adjust ' 'your --feature_win_step value or resample your audio accordingly.' ''.format(FLAGS.feature_win_step, FLAGS.feature_win_step / 1000, FLAGS.audio_sample_rate)) sys.exit(1) c.audio_step_samples = FLAGS.audio_sample_rate * (FLAGS.feature_win_step / 1000) if FLAGS.one_shot_infer: if not os.path.exists(FLAGS.one_shot_infer): log_error( 'Path specified in --one_shot_infer is not a valid file.') sys.exit(1) ConfigSingleton._config = c # pylint: disable=protected-access
### MISC TEXT FIELD VALUES AND TEMPLATES ### # FIXME: almost all of these should go somewhere else as I refactor Fity AUTOSAVE_INTERVAL = int(60 * 1) # minutes between autosaves of the data file GTK_DATE_TEXT_TEMPLATE = "%B %d, %Y" DEFAULT_TIME_EST = 10.0 UNRECOGNIZED_DATE_TEXT = "(date unrecognized)" # represents "there is no AOF assigned to this project", i.e.: "unfiled" NO_AOF_ASSIGNED = "No AOF Assigned" ENGAGE_TOTALS_TEMPLATE = "Tasks shown: {0} Total time: {1}h:{2}m" ARCHIVED_SINGLETONS_TIME_TMPLT = "-%Y-%m-%d-%H:%M" SANITARY_CHARS = string.lowercase + string.digits + " " ### PATHS ### HOME_DIR = os.path.expanduser("~") APP_DATA_PATH = _find_app_data_path() USER_DATA_PATH = BaseDirectory.save_data_path("fluidity") LOG_FILE_PATH = os.path.join(USER_DATA_PATH, "fluidity_debug.log") RECURRENCE_DATA = os.path.join(USER_DATA_PATH, "recurring_tasks.yaml") USER_DATA_MAIN_FNAME = "fluidity.pkl" USER_DATA_MAIN_FILE = os.path.join(USER_DATA_PATH, USER_DATA_MAIN_FNAME) PROCESSED_STUFF_FILE_NAME = "processed_stuff.pkl" BACKUPS_PATH = os.path.join(USER_DATA_PATH, "backups") ARCHIVED_SINGLETONS_FNAME = "archived_singletons{0}.pkl" # PROJECT SUPPORT FILE PATHS READ_REVIEW_PATH = _get_read_review_path() INBOX_FOLDER = os.path.join(HOME_DIR, "Inbox") NOTE_SLIDER_FOLDER = os.path.join(USER_DATA_PATH, "slider-inbox") MAIN_PRJ_SUPPORT_FOLDER = os.path.join(HOME_DIR, "Projects") ACTIVE_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Active") COMPLETED_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Completed")
import shutil import pytoml import ctypes import platform import sys app_name = 'fatbot' # define filesystem paths from xdg import BaseDirectory package_dir = os.path.dirname(os.path.realpath(__file__)) config_dir = BaseDirectory.save_config_path(app_name) data_dir = BaseDirectory.save_data_path(app_name) cache_dir = BaseDirectory.save_cache_path(app_name) #runtime_dir = BaseDirectory.get_runtime_dir(app_name) # XDG_RUNTIME_DIR undefined in systemd? runtime_dir = cache_dir config_file = os.path.join(config_dir, 'config.toml') # load config file if not os.path.isfile(config_file): shutil.copyfile(os.path.join(package_dir, 'examples', 'config.toml'), config_file) with open(config_file) as config_file_object: settings = pytoml.load(config_file_object)
from xdg import BaseDirectory import configparser import os import sys from pprint import pprint # Configuring the config filename _appname = (__package__ or 'planer.daemon').split('.')[0] _config_file = _appname + '.conf' _defaults_file = os.path.join(os.path.dirname(__file__), _config_file) # Read default and user configurations config = configparser.ConfigParser() with open(_defaults_file) as f: config.read_file(f, "defaults") config.read(os.path.join(path, _config_file) for path in BaseDirectory.load_config_paths(_appname)) # Database is absolute or relative to the xdg data home. _database_file = config['daemon']['database file'] _database_file = os.path.expanduser(_database_file) if not os.path.isabs(_database_file): config['daemon']['database file'] = os.path.join( BaseDirectory.save_data_path(_appname), config['daemon']['database file'])
def main(): parser = _ArgumentParser(prog=__appname__) parser.add_argument('-D', '--debug', type=int, help='set debug level') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + _VERSION) parser.add_argument( '-c', '--command', help="exec the given command instead of entry the interactive shell") parser.add_argument( '-d', '--delete', action="store_true", default=False, help="delete venv for release") parser.add_argument( '-l', '--list', action="store_true", help="list all venv installed in your system") parser.add_argument( '-u', '--update', action="store_true", help="update the apt indexes") parser.add_argument( 'release', nargs='?', help="the debian/ubuntu release") args = parser.parse_args() if 'APT_VENV' in _os.environ: print("You can't run apt-venv inside apt-venv session") exit(1) if args.debug: _utils.DEBUG_LEVEL = args.debug if args.list: data_path = _BaseDirectory.save_data_path(__appname__) dirs = _os.listdir(data_path) if len(dirs) > 0: print("Installed apt-venv:\n %s" % "\n ".join(dirs)) exit(0) else: print("There is no apt-venv on your system") exit(1) try: venv = _AptVenv(args.release) if args.delete: venv.delete() else: if not venv.exists(): venv.create() print( "Welcome to apt virtual environment for {} release." .format(venv.release)) print( "All the configuration is available in {}" .format(venv.config_path)) print("You may want run first \"apt-get update\"") if args.update: venv.update() else: venv.run(command=args.command) except ValueError as exception: print (str(exception)) exit(1)
def get_state_dir(cls) -> str: return BaseDirectory.save_data_path('egt')
def initialize_globals(): c = AttrDict() # CPU device c.cpu_device = '/cpu:0' # Available GPU devices c.available_devices = get_available_gpus() # If there is no GPU available, we fall back to CPU based operation if not c.available_devices: c.available_devices = [c.cpu_device] # Set default dropout rates if FLAGS.dropout_rate2 < 0: FLAGS.dropout_rate2 = FLAGS.dropout_rate if FLAGS.dropout_rate3 < 0: FLAGS.dropout_rate3 = FLAGS.dropout_rate if FLAGS.dropout_rate6 < 0: FLAGS.dropout_rate6 = FLAGS.dropout_rate # Set default checkpoint dir if not FLAGS.checkpoint_dir: FLAGS.checkpoint_dir = xdg.save_data_path(os.path.join('deepspeech', 'checkpoints')) if FLAGS.load not in ['last', 'best', 'init', 'auto']: FLAGS.load = 'auto' # Set default summary dir if not FLAGS.summary_dir: FLAGS.summary_dir = xdg.save_data_path(os.path.join('deepspeech', 'summaries')) # Standard session configuration that'll be used for all new sessions. c.session_config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=FLAGS.log_placement, inter_op_parallelism_threads=FLAGS.inter_op_parallelism_threads, intra_op_parallelism_threads=FLAGS.intra_op_parallelism_threads) c.alphabet = Alphabet(os.path.abspath(FLAGS.alphabet_config_path)) # Geometric Constants # =================== # For an explanation of the meaning of the geometric constants, please refer to # doc/Geometry.md # Number of MFCC features c.n_input = 26 # TODO: Determine this programmatically from the sample rate # The number of frames in the context c.n_context = 9 # TODO: Determine the optimal value using a validation data set # Number of units in hidden layers c.n_hidden = FLAGS.n_hidden c.n_hidden_1 = c.n_hidden c.n_hidden_2 = c.n_hidden c.n_hidden_5 = c.n_hidden # LSTM cell state dimension c.n_cell_dim = c.n_hidden # The number of units in the third layer, which feeds in to the LSTM c.n_hidden_3 = c.n_cell_dim # Units in the sixth layer = number of characters in the target language plus one c.n_hidden_6 = c.alphabet.size() + 1 # +1 for CTC blank label # Size of audio window in samples c.audio_window_samples = FLAGS.audio_sample_rate * (FLAGS.feature_win_len / 1000) # Stride for feature computations in samples c.audio_step_samples = FLAGS.audio_sample_rate * (FLAGS.feature_win_step / 1000) if FLAGS.one_shot_infer: if not os.path.exists(FLAGS.one_shot_infer): log_error('Path specified in --one_shot_infer is not a valid file.') exit(1) ConfigSingleton._config = c # pylint: disable=protected-access
def get_data_home(): """ Directory where data is to be saved Guaranteed to exist """ return base.save_data_path(PACKAGE_NAME)
def __init__(self, url=None, branch=None): self.path = join(BaseDirectory.save_data_path('mycroft'), 'skills-repo') self.url = url or "https://github.com/MycroftAI/mycroft-skills" self.branch = branch or "20.08" self.repo_info = {}
def get_user_data_dir(): return BaseDirectory.save_data_path("gnome-clocks")