Esempio n. 1
0
def _get_new_directories(platform_type):
    directories = {}
    if platform_type == ANDROID:
        directories['data'] = '%s/lbrynet' % android_app_internal_storage_dir()
        directories['lbryum'] = '%s/lbryum' % android_app_internal_storage_dir()
        directories['download'] = '%s/Download' % android_internal_storage_dir()
    elif platform_type == WINDOWS:
        directories['data'] = user_data_dir('lbrynet', 'lbry')
        directories['lbryum'] = user_data_dir('lbryum', 'lbry')
        directories['download'] = get_path(FOLDERID.Downloads, UserHandle.current)
    elif platform_type == DARWIN:
        directories = _get_old_directories(platform_type)
    elif platform_type == LINUX:
        directories['data'] = user_data_dir('lbry/lbrynet')
        directories['lbryum'] = user_data_dir('lbry/lbryum')
        try:
            with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
                down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
                down_dir = re.sub('\$HOME', os.getenv('HOME'), down_dir)
                directories['download'] = re.sub('\"', '', down_dir)
        except EnvironmentError:
            directories['download'] = os.getenv('XDG_DOWNLOAD_DIR')

        if not directories['download']:
            directories['download'] = os.path.expanduser('~/Downloads')
    else:
        raise ValueError('unknown platform value')
    return directories
Esempio n. 2
0
    def __init__(self):
        """Creates a new instance of PyGlassApplication."""
        QtCore.QObject.__init__(self)
        self._qApplication = None
        self._window       = None
        self._splashScreen = None

        # Sets a temporary standard out and error for deployed applications in a write allowed
        # location to prevent failed write results.
        if PyGlassEnvironment.isDeployed:
            if appdirs:
                userDir = appdirs.user_data_dir(self.appID, self.appGroupID)
            else:
                userDir = FileUtils.createPath(
                    os.path.expanduser('~'), '.pyglass', self.appGroupID, self.appID, isDir=True)

            path = FileUtils.createPath(
                userDir,
                self.appID + '_out.log', isFile=True)
            folder = FileUtils.getDirectoryOf(path, createIfMissing=True)
            sys.stdout = open(path, 'w+')

            FileUtils.createPath(
                appdirs.user_data_dir(self.appID, self.appGroupID),
                self.appID + '_error.log',
                isFile=True)
            folder = FileUtils.getDirectoryOf(path, createIfMissing=True)
            sys.stderr = open(path, 'w+')

        PyGlassEnvironment.initializeAppSettings(self)
Esempio n. 3
0
    def set_data_to_stored(self, url, text, pwd):

        pwd_data = plistlib.Data(pwd)
        if not os.path.exists(user_data_dir("Drag&Press")):
                os.makedirs(user_data_dir("Drag&Press"))

        if sys.platform == 'darwin' :
            plistlib.writePlist([url, text, pwd_data], user_data_dir("Drag&Press/Preferences", "ludovicl"))
        elif sys.platform == 'win32':
            if not os.path.exists(user_data_dir("Drag&Press")):
                os.makedirs(user_data_dir("Drag&Press"))
Esempio n. 4
0
 def __init__(self, account_id='default'):
     """init."""
     # Define the temp directory and file name format
     configDir = appdirs.user_data_dir('PyCMDS', 'WrightGroup')
     if not os.path.isdir(configDir):
         os.makedirs(configDir)
     prefix = 'google-drive-'
     suffix = '-' + account_id + '.txt'
     # Check for existing file
     lis = glob(os.path.join(configDir, prefix + "*" + suffix))
     self.mycreds_path = ''
     if len(lis) > 0:
         for f in lis:
             # Check that for read and write access (or is bitwise, checking both)
             # Note this check is probably not needed with appdirs, but is not
             #     harmful and provides additional insurance against crashes.
             if os.access(f, os.W_OK | os.R_OK):
                 self.mycreds_path = f
                 break
     # Make a new file if one does not exist with sufficent permissions
     if self.mycreds_path == '':
         self.mycreds_path = tempfile.mkstemp(prefix=prefix,
                                              suffix=suffix,
                                              text=True,
                                              dir=configDir)[1]
     self._authenticate()
Esempio n. 5
0
    def __init__(self, stringToFind):
        self.stringToFind = stringToFind
        self.change_quotes()
        configDir = appdirs.user_data_dir('plover', 'plover')
        configFile = os.path.join(configDir, "plover.cfg")
        if self.dictNames == []:
            try:
                infile = open(configFile, 'r')
            except:
                print("No plover.cfg found")
                exit(1)

            for line in infile:
                if line.find('dictionary_file') == 0:
                    start_of_path = line.find(' = ') + 3
                    self.dictFileList.append(str(line[start_of_path:-1]))
            infile.close()

            if self.dictFileList == []:
                print("No dictionaries defined")
                exit(1)

            for dict in self.dictFileList:
                self.dictNames.append(dict[dict.rfind("/") + 1:-5])
                with open(dict, 'r') as fp:
                    self.dictList.append(json.load(fp))
Esempio n. 6
0
def datapath():
    '''Returns the path where app data is to be installed.'''
    import appdirs
    if iamroot():
        return appdirs.site_data_dir(appname, appauthor)
    else:
        return appdirs.user_data_dir(appname, appauthor)
Esempio n. 7
0
def get_app_dir():
    d = appdirs.user_data_dir("sapphire", "SapphireOpenSystems")

    if not os.path.exists(d):
        os.makedirs(d)
    
    return d
Esempio n. 8
0
    def __init__(self, db_dir=None, db_name='metadata.db'):
        """ Gets datagristle config, and creates db objects if necessary.
        """
        logging.basicConfig(filename='/tmp/datagristle_metadata.log')
        logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)

        if db_dir is None:
            user_data_dir = appdirs.user_data_dir('datagristle')
        else:
            user_data_dir = db_dir
        if not os.path.exists(user_data_dir):
            print 'data dir (%s) missing - it will be created' % user_data_dir
            os.makedirs(user_data_dir)

        self.fqdb_name  = os.path.join(user_data_dir, db_name)
        self.engine     = create_engine('sqlite:////%s' % self.fqdb_name)
        def _fk_pragma_on_connect(dbapi_con, con_record):
            """ turns foreign key enforcement on"""
            dbapi_con.execute('pragma foreign_keys=ON')

        event.listen(self.engine, 'connect', _fk_pragma_on_connect)

        self.engine.echo    = False

        self.metadata = MetaData(self.engine)
        self.create_db_tables_declaratively()
Esempio n. 9
0
    def __init__(self, db_dir=None, db_name='metadata.db'):
        """ Gets datagristle config, and creates db objects if necessary.
        """
        if db_dir is None:
            user_data_dir = appdirs.user_data_dir('datagristle')
        else:
            user_data_dir = db_dir
        if not os.path.exists(user_data_dir):
            print 'data dir (%s) missing - it will be created' % user_data_dir
            os.makedirs(user_data_dir)

        #class FKListener(PoolListener):
        #    def connect(self, dbapi_con, con_record):
        #        db_cursor = dbapi_con.execute('pragma foreign_keys=ON')

        self.fqdb_name  = os.path.join(user_data_dir, db_name)
        self.db         = create_engine('sqlite:////%s' % self.fqdb_name, 
                                           logging_name='/tmp/gristle_sql.log')
        def _fk_pragma_on_connect(dbapi_con, con_record):
            """ turns foreign key enforcement on"""
            dbapi_con.execute('pragma foreign_keys=ON')

        event.listen(self.db, 'connect', _fk_pragma_on_connect)

        self.db.echo    = False

        self.metadata = MetaData(self.db)
        self.create_db_tables_declaratively()
Esempio n. 10
0
def get_glottolog_data(datatype, release):
    """
    Lookup or download data from Glottolog.

    :param datatype: 'newick'|'geo'
    :param release: Glottolog release number >= '2.4'
    :return: the path of the data file
    """
    path_spec = {
        'newick': ('glottolog-{0}.newick', 'tree-glottolog-newick.txt'),
        'geo': ('glottolog-{0}-geo.csv', 'languages-and-dialects-geo.csv'),
    }
    fname_pattern, fname_source = path_spec[datatype]
    fname = fname_pattern.format(release)
    path = os.path.join(os.path.dirname(__file__), 'data', fname)
    if not os.path.exists(path):
        data_dir = user_data_dir('beastling')
        if not os.path.exists(data_dir):
            os.makedirs(data_dir)
        path = os.path.join(data_dir, fname)
        if not os.path.exists(path):
            try:
                URLopener().retrieve(
                    'http://glottolog.org/static/download/{0}/{1}'.format(
                        release, fname_source),
                    path)
            except (IOError, ValueError):
                raise ValueError(
                    'Could not retrieve %s data for Glottolog %s' % (datatype, release))
    return path
Esempio n. 11
0
def determineHomeFolder(name):
  ''' Determine process's user's home directory.
      No need to run on every Configr object creation, as it is assumed to be static throughout one configuration state lifetime.
      If any of the environment variables have been modified by the same process, call the function again.
      name: application name to use
      returns: None
      Side effect: sets module-global "home" variable
  '''
  try:
    import appdirs  # optional dependency which already solves some some problems for us
    home["value"] = appdirs.user_data_dir(name, "configr")  # app/author
  except:
    try:  # get user home regardless of currently set environment variables
      from win32com.shell import shell, shellcon
      home["value"] = shell.SHGetFolderPath(0, shellcon.CSIDL_PROFILE, None, 0)
    except:
      try:  # unix-like native solution ignoring environment variables
        from pwd import getpwuid
        home["value"] = getpwuid(os.getuid()).pw_dir
      except:  # now try standard approaches
        home["value"] = os.getenv("USERPROFILE")  # for windows only
        if home["value"] is None: home["value"] = os.expanduser("~")  # recommended cross-platform solution, but could refer to a mapped network drive on Windows
  if home["value"] is None: raise Exception("Cannot reliably determine user's home directory, please file a bug report at https://github.com/ArneBachmann/configr")
  debug("Determined home folder: %s" % home["value"])
  return home["value"]  # HINT this return is only for convenience and shouldn't be used by user code
Esempio n. 12
0
        def getui():
            def download_ui(dest_dir):
                url = urlopen("https://rawgit.com/lbryio/lbry-web-ui/master/dist.zip")
                z = ZipFile(StringIO(url.read()))
                z.extractall(dest_dir)
                return defer.succeed(dest_dir)

            data_dir = user_data_dir("LBRY")
            version_dir = os.path.join(data_dir, "ui_version_history")

            git_version = subprocess.check_output("git ls-remote https://github.com/lbryio/lbry-web-ui.git | grep HEAD | cut -f 1", shell=True)

            if not os.path.isdir(data_dir):
                os.mkdir(data_dir)

            if not os.path.isdir(os.path.join(data_dir, "ui_version_history")):
                os.mkdir(version_dir)

            if not os.path.isfile(os.path.join(version_dir, git_version)):
                try:
                    f = open(os.path.join(version_dir, git_version), "w")
                    version_message = "Updating UI " + str(datetime.now())
                    f.write(version_message)
                    f.close()
                except:
                    LBRYNotify("You should have been notified to install xcode command line tools, once it's installed you can start LBRY")
                    sys.exit(0)

                if os.path.isdir(os.path.join(data_dir, "lbry-web-ui")):
                    os.rmdir(os.path.join(data_dir, "lbry-web-ui"))

            if os.path.isdir(os.path.join(data_dir, "lbry-web-ui")):
                return defer.succeed(os.path.join(data_dir, "lbry-web-ui"))
            else:
                return download_ui((os.path.join(data_dir, "lbry-web-ui")))
Esempio n. 13
0
    def __init__(self):
        self.enabled = False
        # The main OTP directory, e.g. ~/.local/share/open-telemetry-project
        self.otp_path = None
        # The directory we use for storing things, e.g.
        #    ~/.local/share/open-telemetry-project/otp-python/$HOSTNAME/
        self.data_path = None

        # {project_id:
        #   {key:
        #     {value: counter}
        self.stats = defaultdict(lambda:
                                 defaultdict(lambda:
                                             defaultdict(float)))

        if DISABLE_ENVVAR in os.environ:
            return

        user_data_path = appdirs.user_data_dir()
        if not os.path.isdir(user_data_path):
            return

        self.otp_path = os.path.join(user_data_path, DIRECTORY_NAME)
        # If the otp_path doesn't even exist, then the user has definitely not
        # consented.
        if not os.path.isdir(self.otp_path):
            return

        # if we do not own directory, then bail out
        XX

        self.otp_python_dir = os.path.join(self.otp_dir,
                                           "otp-python",
                                           socket.gethostname())
Esempio n. 14
0
 def __init__(self, current={}):
     """ Generates a config file with necessary data for gitberg
     """
     self.answers = {}
     self.current = current
     if not self.current.get('library_path', ''):
         current['library_path']= user_data_dir('gitberg', 'Free Ebook Foundation')
Esempio n. 15
0
    def __init__(self):
        data_dir = user_data_dir('sky3ds', 'Aperture Laboratories')
        template_txt = os.path.join(data_dir, 'template.txt')

        file_name = tkFileDialog.askopenfile( initialdir = os.path.expanduser('~/Desktop'), filetypes=[ ("Text files","*.txt")] )

        if file_name:
            try:
                new_template = file_name.read()
                write_template = open(template_txt, 'w')
                write_template.write(new_template)

                file_name.close()
                write_template.close()

                tkMessageBox.showinfo("Template Updated", "Template.txt updated successfully")

            except:
                raise Exception("Template.txt could not be updated")

            try:
                titles.convert_template_to_json()
            except:
                raise Exception("Template.txt could not converted to JSON. Please verify that your template.txt is not corrupt.")
        else:
            return
def data_dir():
    '''Background storage location'''
    try:
        from appdirs import user_data_dir
        return user_data_dir(__app_name__, __author__)
    except ImportError:
        return './backgrounds'
Esempio n. 17
0
def main():
    args = docopt.docopt(__doc__)
    config_dir = appdirs.user_data_dir("RSS-filter", "U2Ft")
    config, filters = check_config(config_dir)
    logging.basicConfig(filename=os.path.join(config_dir, "RSS-filter.log"), level=logging.INFO,
                        datefmt="%Y-%m-%d %H:%M:%S", format="%(asctime)s: %(message)s")

    # silence requests.packages.urllib3's logging of every connection at level INFO
    requests_logger = logging.getLogger("requests.packages.urllib3")
    requests_logger.setLevel(logging.WARNING)

    if isinstance(filters, tuple) or args["--edit"]:
        edit_filters(filters, config_dir)
        exit(4)

    feedbin = Feedbin(config["username"], config["password"])

    if args["--list"]:
        list_feeds(feedbin)
        exit(0)

    feed_count, item_count = feedbin.apply_filters(filters, args["--starred"])
    msg = "{} matching {} {} found in {} matching {}."
    msg = msg.format(item_count,
                     "entry" if item_count == 1 else "entries",
                     "was" if item_count == 1 else "were",
                     feed_count,
                     "feed" if feed_count == 1 else "feeds")
    logging.info(msg)
    print "\n{}".format(msg)
Esempio n. 18
0
File: db.py Progetto: labordus/aquaf
def DB2JSON():
    path = appdirs.user_data_dir('aquaf', False, False, False)
    filepath = os.path.join(path, 'aquaf.json')
    dbpath = path_to_db()
    connection = sqlite3.connect(dbpath)
    cursor = connection.cursor()
    cursor.execute("PRAGMA foreign_keys = ON")
    cursor.execute("SELECT * FROM tblLink")
    rows = cursor.fetchall()
    if len(rows) == 0:  # Geen data? Return False
        return False

    rowarray_list = []
    for row in rows:
        t = str((row[1]))
        rowarray_list.append({"link": t})
    j = json.dumps({'items': rowarray_list}, indent=2, separators=(',', ': '))

    try:
        fp = open(filepath, "w")
    except IOError:
        # If not exists, create the file
        fp = open(filepath, "w+")
    fp.write(j)
    fp.close()
    connection.close()

    return True
 def __enter__(self):
     if readline_loaded:
         if readline.__doc__ and 'libedit' in readline.__doc__:
             readline.parse_and_bind("bind '\t' rl_complete")  # Enable tab completions on MacOS
         else:
             readline.parse_and_bind("tab: complete")  # and on other OSs
         readline.parse_and_bind("set completion-ignore-case on")
         readline.parse_and_bind("set show-all-if-ambiguous on")
         readline.parse_and_bind("set completion-map-case on")
         readline.parse_and_bind("set show-all-if-unmodified on")
         readline.parse_and_bind("set expand-tilde on")
         history_file_dir = appdirs.user_data_dir(self.this_program_name, self.this_program_name)
         os.makedirs(history_file_dir, exist_ok=True)
         self.history_file_path = os.path.join(history_file_dir, "." + self.this_program_name + "_console_history")
         try:
             readline.read_history_file(self.history_file_path)
         except Exception:  # Corrupt or non existent history file might raise an exception
             try:
                 os.remove(self.history_file_path)
             except Exception:
                 pass  # if removing the file also fail - just ignore it
     if colorama_loaded:
         colorama.init()
     self.prompt = self.this_program_name + ": "
     self.save_dir = os.getcwd()
     return self
Esempio n. 20
0
 def user_data_dir(self):
     """Return ``user_data_dir``."""
     directory = appdirs.user_data_dir(self.appname, self.appauthor,
                          version=self.version, roaming=self.roaming)
     if self.create:
         self._ensure_directory_exists(directory)
     return directory
Esempio n. 21
0
def get_dbhash_file_path(): 
    if default_dbhash_file: return default_dbhash_file
    try: 
        dbhash_file = os.path.join(config.DATA_DIR, "dbhashes.txt")
    except: 
        dbhash_file = os.path.join(appdirs.user_data_dir(appauthor='Counterparty', appname='counterblockd', roaming=True), "dbhashes.txt")
    return dbhash_file
Esempio n. 22
0
 def __init__(self, name):
     self.config = RawConfigParser()
     self.file_opts = {}
     if sys.version_info[0] >= 3:
         self.file_opts['encoding'] = 'utf-8'
     if hasattr(appdirs, 'user_config_dir'):
         data_dir = appdirs.user_config_dir('photini')
     else:
         data_dir = appdirs.user_data_dir('photini')
     if not os.path.isdir(data_dir):
         os.makedirs(data_dir, mode=0700)
     self.file_name = os.path.join(data_dir, '%s.ini' % name)
     if name == 'editor':
         for old_file_name in (os.path.expanduser('~/photini.ini'),
                               os.path.join(data_dir, 'photini.ini')):
             if os.path.exists(old_file_name):
                 self.config.read(old_file_name, **self.file_opts)
                 self.save()
                 os.unlink(old_file_name)
     self.config.read(self.file_name, **self.file_opts)
     self.timer = QtCore.QTimer()
     self.timer.setSingleShot(True)
     self.timer.setInterval(3000)
     self.timer.timeout.connect(self.save)
     self.has_section = self.config.has_section
Esempio n. 23
0
def DB2JSONONGEBUIKT():
    path = appdirs.user_data_dir('aquaf', False, False, False)
    filepath = os.path.join(path, 'aquaf.json')
    connection = sqlite3.connect('/home/kelp/.local/share/aquaf/aquaftest.db')
    cursor = connection.cursor()
    cursor.execute("select linkURL from tblLink")
    rows = cursor.fetchall()
    voortext = '''{ "items": [
'''
    linktext = ""
    for row in rows:
        linktext = linktext + '''
        {
      "link":"%s"
    }
,''' % row[0]

    achtertext = '''
]}
'''
    text = voortext + linktext[:-1] + achtertext

    try:
        fp = open(filepath, "w")
    except IOError:
        # If not exists, create the file
        fp = open(filepath, "w+")
    fp.write(text)
    fp.close()
    connection.close()
Esempio n. 24
0
def main(argv=None, prog=None, **kwargs):
    """reflectme main entry point"""

    database_location = path.join(user_data_dir('reflectme', 'clavery'), 'database.db')

    parser = argparse.ArgumentParser(description='Create an HTTP server to record and\
                                     respond to requests.')
    parser.add_argument('host', type=str, default='0.0.0.0', nargs='?',
                        help='host to listen on (default: %(default)s)')
    parser.add_argument('port', type=int, default=5000, nargs='?',
                        help='host to listen on (default: %(default)s)')
    parser.add_argument('--database', dest='database', type=str, default=database_location,
                        help='sqlite database location (default: %(default)s)'.format(database_location))
    parser.add_argument('--debug', dest='debug', action='store_true',
                        help='run web server in debug mode')
    parser.add_argument('--reset', dest='reset', action='store_true',
                        help='reset database (clear all records)',)
    parser.set_defaults(host='0.0.0.0', port=5000,
                        database=database_location,
                        debug=False,
                        reset=False)

    args = parser.parse_args()

    norm_path = path.abspath(args.database)
    if not path.exists(path.dirname(norm_path)):
        os.makedirs(path.dirname(norm_path))

    if args.reset:
        os.remove(norm_path)

    app = create_app(database=norm_path)
    app.run(host=args.host, port=args.port, debug=args.debug)
Esempio n. 25
0
    def onclickselectjson(self, event):
        import appdirs
        if (sys.platform.startswith('win')):  # dan win32 of win64
            standaarddir = "C:\Program Files\AquaforumUploader"
        else:  # posix
            standaarddir = "@HOME/.local/share"  # dit werkt niet

        dlg = wx.FileDialog(
            self, message="Selecteer images.json",
            #            defaultDir=os.getcwd(),
            defaultDir=standaarddir,
            defaultFile="",
            wildcard='images.json',
            style=wx.OPEN
        )

        if dlg.ShowModal() == wx.ID_OK:
            oudejson = dlg.GetPath()
            # nog effe voor de zekerheid testen..
            head, tail = os.path.split(oudejson)
            if tail != 'images.json':
                print 'verkeerd bestand gekozen'
            else:
                path = appdirs.user_data_dir('aquaf', False, False, False)
                #    check_path_exists(os.path.join(path, 'aquaf.db'))
                filepath = os.path.join(path, 'aquaf.json')
                with open(oudejson) as f:
                    with open(filepath, "w") as f1:
                        for line in f:
                            #                            if "]}" in line:
                            #                            f1.write(rstrip(line))
                            f1.write(line)
                self.m_staticText4.Label = 'Data is geimporteerd, je kunt dit venster nu afsluiten'
# else: # wx.ID_CANCEL
        dlg.Destroy()
Esempio n. 26
0
 def _storeConfig(self):
     data_dir = user_data_dir(appname, appauthor)
     f = os.path.join(data_dir, configFile)
     # print("Your configuration file is located at " + f)
     self.mkdir_p(data_dir)
     with open(f, 'w') as fp:
         json.dump(self.store, fp)
Esempio n. 27
0
    def run(self):
        import appdirs

        old_data_dir = appdirs.user_config_dir(appauthor='Counterparty', appname='counterpartyd', roaming=True)
        old_database = os.path.join(old_data_dir, 'counterpartyd.9.db')
        old_database_testnet = os.path.join(old_data_dir, 'counterpartyd.9.testnet.db')

        new_data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True)
        new_database = os.path.join(new_data_dir, '{}.db'.format(config.APP_NAME))
        new_database_testnet = os.path.join(new_data_dir, '{}.testnet.db'.format(config.APP_NAME))

        # User have an old version of `counterpartyd`
        if os.path.exists(old_data_dir):
            # Move database
            if not os.path.exists(new_data_dir):
                os.makedirs(new_data_dir)
                files_to_copy = {
                    old_database: new_database,
                    old_database_testnet: new_database_testnet
                }
                for src_file in files_to_copy:
                    if os.path.exists(src_file):
                        dest_file = files_to_copy[src_file]
                        print('Copy {} to {}'.format(src_file, dest_file))
                        shutil.copy(src_file, dest_file)
Esempio n. 28
0
def _get_app_cfg_paths(appname, run_as_user):
    import appdirs #installed earlier
    cfg_path = os.path.join(
        appdirs.user_data_dir(appauthor='SFRDirect', appname=appname, roaming=True) \
            if os.name == "nt" else ("%s/.config/%s" % (os.path.expanduser("~%s" % run_as_user), appname)), 
        "%s.conf" % appname.replace('-testnet', ''))
    data_dir = os.path.dirname(cfg_path)
    return (data_dir, cfg_path)
Esempio n. 29
0
 def __init__(self):
     if not ConfigStore.store:
         print "ConfigStoreShelve.__init__"
         self.app_data_dir = user_data_dir('kotori', 'elmyra')
         if not os.path.exists(self.app_data_dir):
             os.makedirs(self.app_data_dir)
         self.config_file = os.path.join(self.app_data_dir, 'config')
         ConfigStore.store = shelve.open(self.config_file, writeback=True)
Esempio n. 30
0
 def dumpUser(self):
     # we don't use the message...
     with open(os.path.join(user_data_dir(self.appname),
                            self.data_fname), 'wb') as f:
         tmp_pw = self.user.password
         self.user.password = ''
         pickle.dump(self.user, f)
         self.user.password = tmp_pw
Esempio n. 31
0
def get_swiss_data_dir():
    return appdirs.user_data_dir("swiss")
Esempio n. 32
0
def pytest_runtest_teardown(item, nextitem):
    path = os.path.join(appdirs.user_data_dir(nkms.db.CONFIG_APPNAME),
                        nkms.db.DB_NAME)
    if os.path.exists(path):
        shutil.rmtree(path)
Esempio n. 33
0
class Config:
    app_name = "lime-comb"

    data_dir = Path(user_data_dir(app_name))
    config_dir = Path(user_config_dir(app_name))

    client_lime_comb_url = "http://lime-comb.web.app/_client-lime-comb.json"
    config_file = config_dir / "config.yml"
    oauth_client_config = config_dir / "client-lime-comb.json"
    credentials_file = data_dir / "credentials"
    keyring_dir = data_dir / "keyring"

    config_dir.mkdir(exist_ok=True, parents=True)
    data_dir.mkdir(exist_ok=True, parents=True)
    keyring_dir.mkdir(exist_ok=True, parents=True, mode=0o700)

    comment = "lime comb"
    __raised = False

    @property
    def oauth_gcp_conf(self):
        path = self.oauth_client_config
        if not path.exists():
            try:
                logger.info(f"fetching {self.client_lime_comb_url}")
                response = requests.get(self.client_lime_comb_url)
                response.raise_for_status()
                with open(str(path), "w") as f:
                    f.write(response.content.decode("utf-8"))
            except Exception as e:
                logger.error(
                    f"Error {e} during fetching client-lime-comb.json")
        return path

    def get_configurable(self):
        return {
            "username": self.username,
            "email": self.email,
            "always_import": self.always_import,
            "password": self.password,
            "export_priv_key": self.export_priv_key,
            "export_password": self.export_password,
        }

    def __repr__(self):
        return f"Config: ({self.get_configurable()}) & constant values"

    @property
    def username(self):
        return self._read_property("username")

    @username.setter
    def username(self, username):
        self.__save_property("username", username)

    @property
    def password(self):
        return self._read_property("password")

    @password.setter
    def password(self, password):
        self.__save_property("password", password)

    @property
    def email(self):
        return self._read_property("email")

    @email.setter
    def email(self, email):
        self.__save_property("email", email, lc_validate_email)

    @property
    def export_password(self):
        return self._read_property("export_password", True)

    @export_password.setter
    def export_password(self, value):
        self.__save_property("export_password", convert_bool_string(value),
                             validate_bool)

    @property
    def export_priv_key(self):
        return self._read_property("export_priv_key", True)

    @export_priv_key.setter
    def export_priv_key(self, value):
        self.__save_property("export_priv_key", convert_bool_string(value),
                             validate_bool)

    @property
    def always_import(self):
        return self._read_property("always_import", True)

    @always_import.setter
    def always_import(self, value):
        self.__save_property("always_import", convert_bool_string(value),
                             validate_bool)

    def _read_config(self):
        try:
            with open(self.config_file, "r") as f:
                _config = yaml.safe_load(f.read())
                if _config:
                    return _config
            return {}
        except FileNotFoundError:
            self.config_file.touch(mode=0o600)
            return {}

    def __write_config(self, conf):
        with open(self.config_file, "w") as f:
            f.write(yaml.dump(dict(conf)))

    def _read_property(self, name, default=None):
        conf = self._read_config()
        if not conf and not self.__raised:
            logger.error(f"config is empty")
            self.__raised = True
            raise EmptyConfigError("Empty Config")
        if name in conf.keys():
            return conf[name]
        return default

    def __save_property(self, name, value, validator=None):
        if validator:
            validator(value)
        conf = self._read_config()
        conf[name] = value
        self.__write_config(conf)

    @classmethod
    def _config_input(cls, property_name, *, default=None):
        value = None
        while True:
            value = input(
                f"{property_name} (suggested {default}): ") or default
            if value:
                return value

    def _gen_config(self):
        print("-" * 44)
        print("Empty config detected. Setting up a new one")
        if not self.password:
            alphabet = string.ascii_letters + string.digits
            self.password = "".join(
                secrets.choice(alphabet) for i in range(32))
        self.password = self._config_input("password", default=self.password)
        self.username = self._config_input("username", default=self.username)
        while True:
            try:
                self._config_input("email", default=self.email)
                break
            except EmailSyntaxError as e:
                logger.error(e)
        self.export_password = self.get_bool(
            "export_password: (suggested true)", default=self.export_password)
        self.export_priv_key = self.get_bool(
            "export_priv_key (suggested true)", default=self.export_priv_key)
        self.always_import = self.get_bool("always_import (suggested true)",
                                           default=self.always_import)
        print("-" * 44)

    @classmethod
    def get_bool(cls, message, *, default=None):
        while True:
            my_bool = input(f"{message} [True/False]: ")
            if my_bool == "" and default != None:
                return default
            try:
                validate_bool(my_bool)
                break
            except ValueError:
                logger.warning(f"{my_bool} is not True or False")
        return convert_bool_string(my_bool)
Esempio n. 34
0
'''

import os
import re
import json

from genx.models.utils import UserVars, fw, fp, bc, __bc_dict__  # @UnusedImport
from genx.gui_logging import iprint

# configuration file to store the known materials
try:
    import appdirs
except ImportError:
    config_path = os.path.expanduser(os.path.join('~', '.genx'))
else:
    config_path = appdirs.user_data_dir('GenX3', 'ArturGlavic')
if not os.path.exists(config_path):
    os.makedirs(config_path)
config_file = os.path.join(config_path, 'materials.cfg')

default_materials = [
    [[["Cr", 1.0]], "7.19*0.602214/51.9961"],
    [[["D", 2.0], ["O", 1.0]], "1.107*0.602214/20.0276"],
    [[["Fe", 1.0]], "7.874*0.602214/55.845"],
    [[["Fe", 2.0], ["O", 3.0]], "2.0/100.713"],
    [[["H", 2.0], ["O", 1.0]], "1*0.602214/18.0152"],
    [[["La", 0.7], ["Sr", 0.3], ["Mn", 1.0], ["O", 3.0]], "6.0/349.916"],
    [[["Ni", 1.0]], "8.908*0.602214/58.6934"],
    [[["Si", 1.0]], "2.329*0.602214/28.0855"],
    [[["Si", 1.0], ["O", 2.0]], "3.0/113.005"],
    [[["Sr", 1.0], ["Ti", 1.0], ["O", 3.0]], "1.0/(3.905**3)"],
Esempio n. 35
0
def get_data_dir():
    """ get the directory where to put some data """
    return ensure_exists(appdirs.user_data_dir(PYLDRAW))
Esempio n. 36
0
def user_data_dir():
    """Return the user Intake catalog directory"""
    return appdirs.user_data_dir(appname='intake', appauthor='intake')
Esempio n. 37
0
import os, json

import appdirs

name = "pydatasci"

app_dir_no_trailing_slash = appdirs.user_data_dir("pydatasci")
# Adds trailing slash or backslashes depending on OS.
app_dir = os.path.join(app_dir_no_trailing_slash, '')
default_config_path = app_dir + "config.json"
default_db_path = app_dir + "aidb.sqlite3"


def check_exists_folder():
    # If Windows does not have permission to read the folder, it will fail when trailing backslashes \\ provided.
    app_dir_exists = os.path.exists(app_dir_no_trailing_slash)
    if app_dir_exists:
        print(
            f"\n=> Success - the following file path already exists on your system:\n{app_dir}\n"
        )
        return True
    else:
        print(
            f"\n=> Info - it appears the following folder does not exist on your system:\n{app_dir}\n"
        )
        print(
            "\n=> Fix - you can attempt to fix this by running `pds.create_folder()`.\n"
        )
        return False

Esempio n. 38
0
class DataDir(object):
    """ This class ensures that the user's data is stored in its OS
        preotected user directory:

         Furthermore, it offers an interface to generated backups
         in the `backups/` directory every now and then.
    """

    appname = "steem"
    appauthor = "Steemit Inc"
    storageDatabase = "steem.sqlite"

    data_dir = user_data_dir(appname, appauthor)
    sqlDataBaseFile = os.path.join(data_dir, storageDatabase)

    def __init__(self):
        #: Storage
        self.mkdir_p()

    def mkdir_p(self):
        """ Ensure that the directory in which the data is stored
            exists
        """
        if os.path.isdir(self.data_dir):
            return
        else:
            try:
                os.makedirs(self.data_dir)
            except FileExistsError:
                return
            except OSError:
                raise

    def sqlite3_backup(self, dbfile, backupdir):
        """ Create timestamped database copy
        """
        if not os.path.isdir(backupdir):
            os.mkdir(backupdir)
        backup_file = os.path.join(
            backupdir,
            os.path.basename(self.storageDatabase) +
            datetime.now().strftime("-" + timeformat))
        connection = sqlite3.connect(self.sqlDataBaseFile)
        cursor = connection.cursor()
        # Lock database before making a backup
        cursor.execute('BEGIN IMMEDIATE')
        # Make new backup file
        shutil.copyfile(dbfile, backup_file)
        log.info("Creating {}...".format(backup_file))
        # Unlock database
        connection.rollback()
        configStorage["lastBackup"] = datetime.now().strftime(timeformat)

    def clean_data(self):
        """ Delete files older than 70 days
        """
        log.info("Cleaning up old backups")
        for filename in os.listdir(self.data_dir):
            backup_file = os.path.join(self.data_dir, filename)
            if os.stat(backup_file).st_ctime < (time.time() - 70 * 86400):
                if os.path.isfile(backup_file):
                    os.remove(backup_file)
                    log.info("Deleting {}...".format(backup_file))

    def refreshBackup(self):
        """ Make a new backup
        """
        backupdir = os.path.join(self.data_dir, "backups")
        self.sqlite3_backup(self.sqlDataBaseFile, backupdir)
        self.clean_data()
Esempio n. 39
0
import sys
import time
from pathlib import Path

import appdirs
from loguru import logger
from tomlkit.toml_document import TOMLDocument
from tomlkit.toml_file import TOMLFile

from .__about__ import __author__, __title__

CONFIG_BASE_PATH = Path(appdirs.user_config_dir(__title__, __author__))

LOG_BASE_PATH = Path(appdirs.user_data_dir(__title__, __author__))
LOG_FORMAT = '<lvl>[{time:YYYY-MM-DD HH:mm:ss}]</lvl> {message}'
LOG_DEBUG_FORMAT = LOG_FORMAT

logger.level('NORMAL', no=25, color="<green>")
logger.level('INFO', no=20, color="<green><bold>")
logger.level('ACTION_FAILURE', no=16, color="<red>")
logger.level('ACTION_SUCCESS', no=15, color="<cyan>")

VERBOSITY_LOG_LEVELS = {
    0: 50,
    1: 40,
    2: 30,
    3: 25,
    4: 20,
    5: 16,
    6: 15,
    7: 10,
Esempio n. 40
0

def get_system_coafile(coalib_root):
    if os.path.isfile(os.path.join(coalib_root, 'system_coafile')):
        return os.path.join(coalib_root, 'system_coafile')
    else:
        logging.warning('Filename default_coafile has been deprecated. '
                        'Please use system_coafile instead.')
        return os.path.join(coalib_root, 'default_coafile')


system_coafile = get_system_coafile(coalib_root)

user_coafile = os.path.join(os.path.expanduser('~'), '.coarc')

default_coafile = '.coafile'

USER_DATA_DIR = appdirs.user_data_dir('coala', version=VERSION)

GLOBBING_SPECIAL_CHARS = '()[]|?*'

URL_REGEX = re.compile(
    r'^(?:(?:http|ftp)[s]?://)?'  # scheme
    r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+'  # domain name
    r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
    r'localhost|'  # OR localhost
    r'(?:\d{1,3}\.){3}\d{1,3})'  # OR an ip
    r'(?::\d+)?'  # optional port number
    r'(?:/?|[/?]\S+)$',  # path
    re.IGNORECASE)
Esempio n. 41
0
        def __repr__(self):
            return f"<{self.__class__.__name__} {dict(self)!r}>"

        @staticmethod
        def _dump(obj):
            "Encode as msgpack using numpy-aware encoder."
            # See https://github.com/msgpack/msgpack-python#string-and-binary-type
            # for more on use_bin_type.
            return msgpack.packb(obj,
                                 default=msgpack_numpy.encode,
                                 use_bin_type=True)

        @staticmethod
        def _load(file):
            return msgpack.unpackb(file,
                                   object_hook=msgpack_numpy.decode,
                                   raw=False)


runengine_metadata_dir = appdirs.user_data_dir(
    appname="bluesky") / Path("runengine-metadata")

# PersistentDict will create the directory if it does not exist
RE.md = PersistentDict(runengine_metadata_dir)

# Turn down super-verbose logging for caproto
import logging
# logging.getLogger('caproto').setLevel('ERROR')
# logging.getLogger('caproto.ch').setLevel('ERROR')
logging.getLogger('ophyd').setLevel('WARNING')
Esempio n. 42
0
from io import open
from os import makedirs
from os.path import join, dirname, getmtime
from appdirs import user_data_dir
from fasteners import InterProcessLock

try:
    unicode
except NameError:
    unicode = str

import logging
logger = logging.getLogger("mbedls.platform_database")
del logging

LOCAL_PLATFORM_DATABASE = join(user_data_dir("mbedls"), "platforms.json")
LOCAL_MOCKS_DATABASE = join(user_data_dir("mbedls"), "mock.json")

DEFAULT_PLATFORM_DB = {
    u'0001': u'LPC2368',
    u'0002': u'LPC2368',
    u'0003': u'LPC2368',
    u'0004': u'LPC2368',
    u'0005': u'LPC2368',
    u'0006': u'LPC2368',
    u'0007': u'LPC2368',
    u'0100': u'LPC2368',
    u'0183': u'UBLOX_C027',
    u'0200': u'KL25Z',
    u'0201': u'KW41Z',
    u'0210': u'KL05Z',
Esempio n. 43
0
import sys
import tempfile
import time
import urllib.parse
from typing import *
from typing.io import *

import appdirs
import bs4

import onlinejudge.__about__ as version
import onlinejudge._implementation.logging as log
from onlinejudge.type import *

config_dir = pathlib.Path(appdirs.user_config_dir(version.__package_name__))
data_dir = pathlib.Path(appdirs.user_data_dir(version.__package_name__))
cache_dir = pathlib.Path(appdirs.user_cache_dir(version.__package_name__))
html_parser = 'lxml'


def describe_status_code(status_code: int) -> str:
    return '{} {}'.format(status_code, http.client.responses[status_code])


def previous_sibling_tag(tag: bs4.Tag) -> bs4.Tag:
    tag = tag.previous_sibling
    while tag and not isinstance(tag, bs4.Tag):
        tag = tag.previous_sibling
    return tag

Esempio n. 44
0
class MusterModule(PypeModule, ITrayModule, IWebServerRoutes):
    """
    Module handling Muster Render credentials. This will display dialog
    asking for user credentials for Muster if not already specified.
    """
    cred_folder_path = os.path.normpath(
        appdirs.user_data_dir('pype-app', 'pype')
    )
    cred_filename = 'muster_cred.json'

    name = "muster"

    def initialize(self, modules_settings):
        muster_settings = modules_settings[self.name]
        self.enabled = muster_settings["enabled"]
        self.muster_url = muster_settings["MUSTER_REST_URL"]

        self.cred_path = os.path.join(
            self.cred_folder_path, self.cred_filename
        )
        # Tray attributes
        self.widget_login = None
        self.action_show_login = None
        self.rest_api_obj = None

    def get_global_environments(self):
        return {
            "MUSTER_REST_URL": self.muster_url
        }

    def tray_init(self):
        from .widget_login import MusterLogin
        self.widget_login = MusterLogin(self)

    def tray_start(self):
        """Show login dialog if credentials not found."""
        # This should be start of module in tray
        cred = self.load_credentials()
        if not cred:
            self.show_login()

    def tray_exit(self):
        """Nothing special for Muster."""
        return

    def connect_with_modules(self, *_a, **_kw):
        return

    # Definition of Tray menu
    def tray_menu(self, parent):
        """Add **change credentials** option to tray menu."""
        from Qt import QtWidgets

        # Menu for Tray App
        menu = QtWidgets.QMenu('Muster', parent)
        menu.setProperty('submenu', 'on')

        # Actions
        self.action_show_login = QtWidgets.QAction(
            "Change login", menu
        )

        menu.addAction(self.action_show_login)
        self.action_show_login.triggered.connect(self.show_login)

        parent.addMenu(menu)

    def webserver_initialization(self, server_manager):
        """Implementation of IWebServerRoutes interface."""
        if self.tray_initialized:
            from .rest_api import MusterModuleRestApi

            self.rest_api_obj = MusterModuleRestApi(self, server_manager)

    def load_credentials(self):
        """
        Get credentials from JSON file
        """
        credentials = {}
        try:
            file = open(self.cred_path, 'r')
            credentials = json.load(file)
        except Exception:
            file = open(self.cred_path, 'w+')
        file.close()

        return credentials

    def get_auth_token(self, username, password):
        """
        Authenticate user with Muster and get authToken from server.
        """
        if not self.muster_url:
            raise AttributeError("Muster REST API url not set")
        params = {
            'username': username,
            'password': password
        }
        api_entry = '/api/login'
        response = self._requests_post(
            self.muster_url + api_entry, params=params)
        if response.status_code != 200:
            self.log.error(
                'Cannot log into Muster: {}'.format(response.status_code))
            raise Exception('Cannot login into Muster.')

        try:
            token = response.json()['ResponseData']['authToken']
        except ValueError as e:
            self.log.error('Invalid response from Muster server {}'.format(e))
            raise Exception('Invalid response from Muster while logging in.')

        self.save_credentials(token)

    def save_credentials(self, token):
        """
        Save credentials to JSON file
        """
        data = {
            'token': token
        }

        file = open(self.cred_path, 'w')
        file.write(json.dumps(data))
        file.close()

    def show_login(self):
        """
        Show dialog to enter credentials
        """
        if self.widget_login:
            self.widget_login.show()

    def _requests_post(self, *args, **kwargs):
        """ Wrapper for requests, disabling SSL certificate validation if
            DONT_VERIFY_SSL environment variable is found. This is useful when
            Deadline or Muster server are running with self-signed certificates
            and their certificate is not added to trusted certificates on
            client machines.

            WARNING: disabling SSL certificate validation is defeating one line
            of defense SSL is providing and it is not recommended.
        """
        if 'verify' not in kwargs:
            kwargs['verify'] = False if os.getenv("PYPE_DONT_VERIFY_SSL", True) else True  # noqa
        return requests.post(*args, **kwargs)
Esempio n. 45
0
    def package_folder(self):
        _dir = user_data_dir(self._li.lib_name, "HydrOffice")
        if not os.path.exists(_dir):  # create it if it does not exist
            os.makedirs(_dir)

        return _dir
Esempio n. 46
0
# Copyright (c) 2012 Hesky Fisher
# See LICENSE.txt for details.
"""Platform dependent configuration."""

import appdirs
import os
from os.path import realpath, join, dirname
import sys

# If plover is run from a pyinstaller binary.
if hasattr(sys, 'frozen') and hasattr(sys, '_MEIPASS'):
    ASSETS_DIR = sys._MEIPASS
# If plover is run from an app bundle on Mac.
elif (sys.platform.startswith('darwin') and '.app' in realpath(__file__)):
    ASSETS_DIR = os.getcwd()
else:
    ASSETS_DIR = join(dirname(dirname(realpath(__file__))), 'assets')

CONFIG_DIR = appdirs.user_data_dir('plover', 'plover')
Esempio n. 47
0
 def __init__(self):
     self.default_location = appdirs.user_data_dir("IdlePeriodCount", "BlackbeardSoftware")
     self.config_file = os.path.join(self.default_location, "config.json")
Esempio n. 48
0
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Resource, Api
import sqlite3
import json
from althea import __app__, __filename__
from althea.resources.base import dict_factory
import appdirs
import os
import sys

app = Flask(__name__)
database = os.path.join(appdirs.user_data_dir(__app__), __filename__)

app = Flask(__name__)
api = Api(app)


class Althea_Meta(Resource):
    def get(self):
        #Connect to databse
        conn = sqlite3.connect(database)
        conn.row_factory = dict_factory
        c = conn.cursor()
        sql = "select * from models"
        c.execute(sql)
        metadata = c.fetchall()
        return {'result': metadata}


class Model_Input(Resource):
Esempio n. 49
0
INITIALIZING_CODE = 'initializing'
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
DOWNLOAD_TIMEOUT_CODE = 'timeout'
DOWNLOAD_RUNNING_CODE = 'running'
DOWNLOAD_STOPPED_CODE = 'stopped'
STREAM_STAGES = [(INITIALIZING_CODE, 'Initializing...'),
                 (DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
                 (DOWNLOAD_RUNNING_CODE, 'Started stream'),
                 (DOWNLOAD_STOPPED_CODE, 'Paused stream'),
                 (DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')]

if sys.platform != "darwin":
    log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
else:
    log_dir = user_data_dir("LBRY")

if not os.path.isdir(log_dir):
    os.mkdir(log_dir)

lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME)
log = logging.getLogger(__name__)


class GetStream(object):
    def __init__(self,
                 sd_identifier,
                 session,
                 wallet,
                 lbry_file_manager,
                 exchange_rate_manager,
Esempio n. 50
0
# A threshold for the max cells to compute a volume for when repr-ing
REPR_VOLUME_MAX_CELLS = 1e6

# Set where figures are saved
FIGURE_PATH = None

# allow user to override the examples path
if 'PYVISTA_USERDATA_PATH' in os.environ:
    USER_DATA_PATH = os.environ['PYVISTA_USERDATA_PATH']
    if not os.path.isdir(USER_DATA_PATH):
        raise FileNotFoundError(
            f'Invalid PYVISTA_USERDATA_PATH at {USER_DATA_PATH}')

else:
    # Set up data directory
    USER_DATA_PATH = appdirs.user_data_dir('pyvista')
    if not os.path.exists(USER_DATA_PATH):
        os.makedirs(USER_DATA_PATH)

try:
    EXAMPLES_PATH = os.path.join(USER_DATA_PATH, 'examples')
    if not os.path.exists(EXAMPLES_PATH):
        try:
            os.makedirs(EXAMPLES_PATH)
        except FileExistsError:  # Edge case due to IO race conditions
            pass
except Exception as e:
    warnings.warn(
        f'Unable to create `EXAMPLES_PATH` at "{EXAMPLES_PATH}"\n'
        f'Error: {e}\n\n'
        'Override the default path by setting the environmental variable '
Esempio n. 51
0
def load_config(args=None, check_with_data=True):
    """
    Load the configuration from file.

    :param args: An argparse args structure.
    :param check_with_data: Whether we should use the available OpenData to
        check the config values. Defaults to ``True``.
    :return: The loaded config dict.
    """
    LOGGER.info("Initializing configuration...")
    # Default configuration
    config_data = DEFAULT_CONFIG.copy()

    # Load config from specified JSON
    if args and getattr(args, "config", None):
        LOGGER.debug("Loading configuration from %s.", args.config)
        try:
            with open(args.config, "r") as fh:
                config_data.update(json.load(fh))
        except (IOError, ValueError) as exc:
            LOGGER.error(
                "Unable to load configuration from file, using default configuration: %s.",
                exc,
            )

    # Overload config with arguments
    if args and getattr(args, "passes", None) is not None:
        LOGGER.debug("Overloading number of passes from CLI arguments: %d.",
                     args.passes)
        config_data["passes"] = args.passes
    if args and getattr(args, "max_entries", None) is not None:
        LOGGER.debug(
            "Overloading maximum number of entries from CLI arguments: %d.",
            args.max_entries,
        )
        config_data["max_entries"] = args.max_entries
    if args and getattr(args, "port", None) is not None:
        LOGGER.debug("Overloading web app port: %d.", args.port)
        config_data["port"] = args.port
    if args and getattr(args, "host", None) is not None:
        LOGGER.debug("Overloading web app host: %s.", args.host)
        config_data["host"] = str(args.host)

    # Handle data_directory option
    if args and getattr(args, "data_dir", None) is not None:
        LOGGER.debug("Overloading data directory from CLI arguments.")
        config_data["data_directory"] = args.data_dir
    elif config_data["data_directory"] is None:
        config_data["data_directory"] = appdirs.user_data_dir(
            "flatisfy", "flatisfy")
        LOGGER.debug("Using default XDG data directory: %s.",
                     config_data["data_directory"])

    if not os.path.isdir(config_data["data_directory"]):
        LOGGER.info(
            "Creating data directory according to config: %s",
            config_data["data_directory"],
        )
        os.makedirs(config_data["data_directory"])
        os.makedirs(os.path.join(config_data["data_directory"], "images"))

    if config_data["database"] is None:
        config_data["database"] = "sqlite:///" + os.path.join(
            config_data["data_directory"], "flatisfy.db")

    if config_data["search_index"] is None:
        config_data["search_index"] = os.path.join(
            config_data["data_directory"], "search_index")

    # Handle constraints filtering
    if args and getattr(args, "constraints", None) is not None:
        LOGGER.info(
            ("Filtering constraints from config according to CLI argument. "
             "Using only the following constraints: %s."),
            args.constraints.replace(",", ", "),
        )
        constraints_filter = args.constraints.split(",")
        config_data["constraints"] = {
            k: v
            for k, v in config_data["constraints"].items()
            if k in constraints_filter
        }

    # Sanitize website url
    if config_data["website_url"] is not None:
        if config_data["website_url"][-1] != "/":
            config_data["website_url"] += "/"

    config_validation = validate_config(config_data, check_with_data)
    if config_validation is True:
        LOGGER.info("Config has been fully initialized.")
        return config_data
    LOGGER.error("Error in configuration: %s.", config_validation)
    return None
Esempio n. 52
0
def get_user_data_dir():
    user_dir = appdirs.user_data_dir('pysimpledlna', 'wx_c')
    if not os.path.exists(user_dir):
        os.makedirs(user_dir)
    return os.path.abspath(user_dir)
Esempio n. 53
0
def main():
    """
    Tautulli application entry point. Parses arguments, setups encoding and
    initializes the application.
    """

    # Fixed paths to Tautulli
    if hasattr(sys, 'frozen') and hasattr(sys, '_MEIPASS'):
        plexpy.FROZEN = True
        plexpy.FULL_PATH = os.path.abspath(sys.executable)
        plexpy.PROG_DIR = sys._MEIPASS
    else:
        plexpy.FULL_PATH = os.path.abspath(__file__)
        plexpy.PROG_DIR = os.path.dirname(plexpy.FULL_PATH)

    plexpy.ARGS = sys.argv[1:]

    # From sickbeard
    plexpy.SYS_PLATFORM = sys.platform
    plexpy.SYS_ENCODING = None

    try:
        locale.setlocale(locale.LC_ALL, "")
        plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale()
    except (locale.Error, IOError):
        pass

    # for OSes that are poorly configured I'll just force UTF-8
    if not plexpy.SYS_ENCODING or plexpy.SYS_ENCODING in ('ANSI_X3.4-1968',
                                                          'US-ASCII', 'ASCII'):
        plexpy.SYS_ENCODING = 'UTF-8'

    # Set up and gather command line arguments
    parser = argparse.ArgumentParser(
        description=
        'A Python based monitoring and tracking tool for Plex Media Server.')

    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Increase console logging verbosity')
    parser.add_argument('-q',
                        '--quiet',
                        action='store_true',
                        help='Turn off console logging')
    parser.add_argument('-d',
                        '--daemon',
                        action='store_true',
                        help='Run as a daemon')
    parser.add_argument('-p',
                        '--port',
                        type=int,
                        help='Force Tautulli to run on a specified port')
    parser.add_argument('--dev',
                        action='store_true',
                        help='Start Tautulli in the development environment')
    parser.add_argument(
        '--datadir', help='Specify a directory where to store your data files')
    parser.add_argument('--config', help='Specify a config file to use')
    parser.add_argument('--nolaunch',
                        action='store_true',
                        help='Prevent browser from launching on startup')
    parser.add_argument(
        '--pidfile',
        help='Create a pid file (only relevant when running as a daemon)')
    parser.add_argument(
        '--nofork',
        action='store_true',
        help='Start Tautulli as a service, do not fork when restarting')

    args = parser.parse_args()

    if args.verbose:
        plexpy.VERBOSE = True
    if args.quiet:
        plexpy.QUIET = True

    # Do an intial setup of the logger.
    # Require verbose for pre-initilization to see critical errors
    logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)

    try:
        plexpy.SYS_TIMEZONE = tzlocal.get_localzone()
    except (pytz.UnknownTimeZoneError, LookupError, ValueError) as e:
        logger.error("Could not determine system timezone: %s" % e)
        plexpy.SYS_TIMEZONE = pytz.UTC

    plexpy.SYS_UTC_OFFSET = datetime.datetime.now(
        plexpy.SYS_TIMEZONE).strftime('%z')

    if helpers.bool_true(os.getenv('TAUTULLI_DOCKER', False)):
        plexpy.DOCKER = True
    if helpers.bool_true(os.getenv('TAUTULLI_SNAP', False)):
        plexpy.SNAP = True

    if args.dev:
        plexpy.DEV = True
        logger.debug("Tautulli is running in the dev environment.")

    if args.daemon:
        if sys.platform == 'win32':
            logger.warn(
                "Daemonizing not supported under Windows, starting normally")
        else:
            plexpy.DAEMON = True
            plexpy.QUIET = True

    if args.nofork:
        plexpy.NOFORK = True
        logger.info(
            "Tautulli is running as a service, it will not fork when restarted."
        )

    if args.pidfile:
        plexpy.PIDFILE = str(args.pidfile)

        # If the pidfile already exists, plexpy may still be running, so
        # exit
        if os.path.exists(plexpy.PIDFILE):
            try:
                with open(plexpy.PIDFILE, 'r') as fp:
                    pid = int(fp.read())
            except IOError as e:
                raise SystemExit("Unable to read PID file: %s", e)

            try:
                os.kill(pid, 0)
            except OSError:
                logger.warn("PID file '%s' already exists, but PID %d is "
                            "not running. Ignoring PID file." %
                            (plexpy.PIDFILE, pid))
            else:
                # The pidfile exists and points to a live PID. plexpy may
                # still be running, so exit.
                raise SystemExit("PID file '%s' already exists. Exiting." %
                                 plexpy.PIDFILE)

        # The pidfile is only useful in daemon mode, make sure we can write the
        # file properly
        if plexpy.DAEMON:
            plexpy.CREATEPID = True

            try:
                with open(plexpy.PIDFILE, 'w') as fp:
                    fp.write("pid\n")
            except IOError as e:
                raise SystemExit("Unable to write PID file: %s", e)
        else:
            logger.warn("Not running in daemon mode. PID file creation " \
                        "disabled.")

    # Determine which data directory and config file to use
    if args.datadir:
        plexpy.DATA_DIR = args.datadir
    elif plexpy.FROZEN:
        plexpy.DATA_DIR = appdirs.user_data_dir("Tautulli", False)
    else:
        plexpy.DATA_DIR = plexpy.PROG_DIR

    # Migrate Snap data dir
    if plexpy.SNAP:
        snap_common = os.environ['SNAP_COMMON']
        old_data_dir = os.path.join(snap_common, 'Tautulli')
        if os.path.exists(old_data_dir) and os.listdir(old_data_dir):
            plexpy.SNAP_MIGRATE = True
            logger.info("Migrating Snap user data.")
            shutil.move(old_data_dir, plexpy.DATA_DIR)

    if args.config:
        config_file = args.config
    else:
        config_file = os.path.join(plexpy.DATA_DIR, config.FILENAME)

    # Try to create the DATA_DIR if it doesn't exist
    if not os.path.exists(plexpy.DATA_DIR):
        try:
            os.makedirs(plexpy.DATA_DIR)
        except OSError:
            raise SystemExit('Could not create data directory: ' +
                             plexpy.DATA_DIR + '. Exiting....')

    # Make sure the DATA_DIR is writeable
    if not os.access(plexpy.DATA_DIR, os.W_OK):
        raise SystemExit('Cannot write to the data directory: ' +
                         plexpy.DATA_DIR + '. Exiting...')

    # Put the database in the DATA_DIR
    plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, database.FILENAME)

    # Move 'plexpy.db' to 'tautulli.db'
    if os.path.isfile(os.path.join(plexpy.DATA_DIR, 'plexpy.db')) and \
            not os.path.isfile(os.path.join(plexpy.DATA_DIR, plexpy.DB_FILE)):
        try:
            os.rename(os.path.join(plexpy.DATA_DIR, 'plexpy.db'),
                      plexpy.DB_FILE)
        except OSError as e:
            raise SystemExit("Unable to rename plexpy.db to tautulli.db: %s",
                             e)

    if plexpy.DAEMON:
        plexpy.daemonize()

    # Read config and start logging
    plexpy.initialize(config_file)

    # Start the background threads
    plexpy.start()

    # Force the http port if neccessary
    if args.port:
        plexpy.HTTP_PORT = args.port
        logger.info('Using forced web server port: %i', plexpy.HTTP_PORT)
    else:
        plexpy.HTTP_PORT = int(plexpy.CONFIG.HTTP_PORT)

    # Check if pyOpenSSL is installed. It is required for certificate generation
    # and for CherryPy.
    if plexpy.CONFIG.ENABLE_HTTPS:
        try:
            import OpenSSL
        except ImportError:
            logger.warn("The pyOpenSSL module is missing. Install this "
                        "module to enable HTTPS. HTTPS will be disabled.")
            plexpy.CONFIG.ENABLE_HTTPS = False

    # Try to start the server. Will exit here is address is already in use.
    webstart.start()

    if common.PLATFORM == 'Windows':
        if plexpy.CONFIG.SYS_TRAY_ICON:
            plexpy.WIN_SYS_TRAY_ICON = windows.WindowsSystemTray()
            plexpy.WIN_SYS_TRAY_ICON.start()
        windows.set_startup()
    elif common.PLATFORM == 'Darwin':
        macos.set_startup()

    # Open webbrowser
    if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not plexpy.DEV:
        plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT,
                              plexpy.HTTP_ROOT)

    if common.PLATFORM == 'Darwin' and plexpy.CONFIG.SYS_TRAY_ICON:
        if not macos.HAS_PYOBJC:
            logger.warn("The pyobjc module is missing. Install this "
                        "module to enable the MacOS menu bar icon.")
            plexpy.CONFIG.SYS_TRAY_ICON = False

        if plexpy.CONFIG.SYS_TRAY_ICON:
            # MacOS menu bar icon must be run on the main thread and is blocking
            # Start the rest of Tautulli on a new thread
            thread = threading.Thread(target=wait)
            thread.daemon = True
            thread.start()

            plexpy.MAC_SYS_TRAY_ICON = macos.MacOSSystemTray()
            plexpy.MAC_SYS_TRAY_ICON.start()
        else:
            wait()
    else:
        wait()
Esempio n. 54
0
def get_data_dir():
    data_dir = user_data_dir(appname='label-studio')
    os.makedirs(data_dir, exist_ok=True)
    return data_dir
 def _app_data_candidates():
     key = str("VIRTUALENV_OVERRIDE_APP_DATA")
     if key in os.environ:
         yield os.environ[key]
     else:
         yield user_data_dir(appname="virtualenv", appauthor="pypa")
Esempio n. 56
0
def main():
    PRESETS_DIR = appdirs.user_data_dir("wialon_ips", "Sergey Shevchik")
    if not os.path.exists(PRESETS_DIR):
        os.makedirs(PRESETS_DIR)

    PRESETS_PATH = os.path.join(PRESETS_DIR, 'wialon_ips_presets.conf')

    if len(sys.argv) > 1 and sys.argv[1] == 'clear':
        try:
            os.remove(PRESETS_PATH)
        except:
            pass
        sys.exit()

    def endpoint_filter(endpoint):
        return endpoint.split()[0]

    DEFAULT_TRACK_URL = 'http://89.223.93.46:8000/wialon_ips/sample_track.txt'

    ENDPOINTS = {
        'Wialon Hosting NL': '193.193.165.165',
        'Wialon Hosting MSK': '185.213.0.24',
        'Wialon Hosting USA': '64.120.108.24',
        'Wialon Hosting TRACE': '193.193.165.166',
        'Wialon Hosting TIG': '185.213.1.24',
        'Wialon Hosting OLD': '77.74.50.78',
        'Specify custom': 'Custom'
    }

    LAST_PRESETS = None
    LAST_CUSTOM_ENDPOINT = None
    LAST_UID = None
    LAST_SRC_URL = None
    LAST_SRC_PATH = None

    PRESET = None

    try:
        with open(PRESETS_PATH, 'r') as cf:
            conf = json.load(cf)
            if 'last_uid' in conf:
                LAST_UID = conf['last_uid']
            if 'last_src_url' in conf:
                LAST_SRC_URL = conf['last_src_url']
            if 'last_src_path' in conf:
                LAST_SRC_PATH = conf['last_src_path']
            if 'last_custom_endpoint' in conf:
                LAST_CUSTOM_ENDPOINT = conf['last_custom_endpoint']

            if 'presets' in conf and type(conf['presets']) is dict:
                LAST_PRESETS = conf['presets']
                if len(LAST_PRESETS):
                    load_preset = prompt(
                        dict(message='Load from preset',
                             type='list',
                             choices=['no', 'yes'],
                             name='load_preset'))['load_preset']
                    if load_preset == 'yes':
                        choosen_preset = prompt(
                            dict(message='Choose preset',
                                 type='list',
                                 choices=LAST_PRESETS,
                                 name='choosen_preset'))['choosen_preset']
                        if len(choosen_preset):
                            PRESET = LAST_PRESETS[choosen_preset]
    except:
        pass

    SETTINGS = {}
    if PRESET:
        SETTINGS = PRESET
    else:
        # ASKING PROTOCOL
        SETTINGS['protocol'] = prompt(
            dict(message='Protocol',
                 type='list',
                 choices=['TCP', 'UDP'],
                 name='protocol'))['protocol']

        # ASKING ENDPOINT
        endpoint = prompt(dict(message='Choose endpoint', type='list', choices=[ENDPOINTS[ep] + ' (' + ep + ')' for ep in ENDPOINTS], \
         name='ep', filter=endpoint_filter))['ep']
        if endpoint == 'Custom':
            ep_q = dict(message='Enter endpoint', type='input', name='ep')
            if LAST_CUSTOM_ENDPOINT:
                ep_q['default'] = LAST_CUSTOM_ENDPOINT
            SETTINGS['endpoint'] = prompt(ep_q)['ep']
            if len(SETTINGS['endpoint']):
                LAST_CUSTOM_ENDPOINT = SETTINGS['endpoint']
        else:
            SETTINGS['endpoint'] = endpoint

        # ASKING PORT
        SETTINGS['port'] = int(
            prompt(
                dict(message='Port',
                     type='input',
                     default='20332',
                     name='port'))['port'])

        # ASKING INTERVAL
        SETTINGS['interval'] = prompt(
            dict(message='Interval(seconds)',
                 type='input',
                 default='5',
                 name='ival'))['ival']

        # ASKING UID
        uid_q = dict(message='IMEI', type='input', name='uid')
        if LAST_UID:
            uid_q['default'] = LAST_UID
        SETTINGS['uid'] = prompt(uid_q)['uid']
        if len(SETTINGS['uid']):
            LAST_UID = SETTINGS['uid']

        # ASKING SRC
        SETTINGS['track_src_type'] = prompt(
            dict(message='Track source type',
                 type='list',
                 choices=['URL', 'File'],
                 name='track_src_type'))['track_src_type']
        src_q = dict(name='src', type='input')
        if SETTINGS['track_src_type'] == 'File':
            src_q['message'] = 'File path'
            if LAST_SRC_PATH:
                src_q['default'] = LAST_SRC_PATH
            SETTINGS['track_src'] = prompt(src_q)['src']
            if len(SETTINGS['track_src']):
                LAST_SRC_PATH = SETTINGS['track_src']
        else:
            src_q['message'] = 'Track URL'
            if LAST_SRC_URL:
                src_q['default'] = LAST_SRC_URL
            else:
                src_q['default'] = DEFAULT_TRACK_URL
            SETTINGS['track_src'] = prompt(src_q)['src']
            if len(SETTINGS['track_src']):
                LAST_SRC_URL = SETTINGS['track_src']

    try:
        PROTOCOL = SETTINGS['protocol']
        ENDPOINT = SETTINGS['endpoint']
        PORT = SETTINGS['port']
        UID = SETTINGS['uid']
        INTERVAL = SETTINGS['interval']
        TRACK_SRC_TYPE = SETTINGS['track_src_type']
        TRACK_SRC = SETTINGS['track_src']
    except Exception as e:
        print('Settings are invalid: ' + str(e))
        sys.exit()

    TRACK_DATA = None
    if TRACK_SRC_TYPE == 'File':
        try:
            with open(TRACK_SRC) as f:
                TRACK_DATA = f.readlines()
        except Exception as e:
            print(
                'Failed to get track data from specified source {0} ({1}): {2}'
                .format(TRACK_SRC, TRACK_SRC_TYPE, e))
    elif TRACK_SRC_TYPE == 'URL':
        try:
            r = requests.get(TRACK_SRC)
            TRACK_DATA = r.text.split()
        except Exception as e:
            print(
                'Failed to get track data from specified source {0} ({1}): {2}'
                .format(TRACK_SRC, TRACK_SRC_TYPE, e))

    if not TRACK_DATA:
        sys.exit()

    try:
        with open(PRESETS_PATH, 'w') as cf:
            new_config = dict()
            if LAST_UID:
                new_config['last_uid'] = LAST_UID
            if LAST_CUSTOM_ENDPOINT:
                new_config['last_custom_endpoint'] = LAST_CUSTOM_ENDPOINT
            if LAST_SRC_PATH:
                new_config['last_src_path'] = LAST_SRC_PATH
            if LAST_SRC_URL:
                new_config['last_src_url'] = LAST_SRC_URL

            new_presets = None
            if LAST_PRESETS:
                new_presets = LAST_PRESETS
            if not PRESET:
                save_to_preset = prompt(
                    dict(message='Save as preset',
                         type='list',
                         choices=['no', 'yes'],
                         name='answer'))['answer']
                if save_to_preset == 'yes':
                    preset_name = prompt(
                        dict(message='New preset name',
                             type='input',
                             name='preset_name'))['preset_name']
                    if len(preset_name):
                        new_presets = new_presets or dict()
                        new_presets[preset_name] = SETTINGS

            if new_presets:
                new_config["presets"] = new_presets

            json.dump(new_config, cf)
    except Exception as e:
        print('Failed to save update config: ' + str(e))

    def parse_line(input_line):
        res = re.search(r'(\d+.\d+),(?!A)(\D),(\d+.\d+),(\D)', input_line)
        lat1 = res.group(1)
        lat2 = res.group(2)
        lon1 = res.group(3)
        lon2 = res.group(4)
        return (lat1, lat2, lon1, lon2)

    msgs = [parse_line(line) for line in TRACK_DATA]

    if PROTOCOL == 'TCP':
        LOGIN_MESSAGE = b'#L#'
        LOGIN_MESSAGE = b''.join([LOGIN_MESSAGE, bytearray(UID, 'utf-8')])
        LOGIN_MESSAGE = b''.join([LOGIN_MESSAGE, b';NA\r\n'])

        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.connect((ENDPOINT, PORT))

        print('Connected to {0}'.format(s.getpeername()))

        print('Sending login message')
        sent = s.send(LOGIN_MESSAGE)
        data = s.recv(1024)
        if data.decode('utf-8').startswith('#AL#1'):
            print('Login Success. Sending messages...')
        else:
            print('Login Failed: ' + data.decode('utf-8'))
            sys.exit()

        while True:
            for msg in msgs:
                request = '#SD#NA;NA;{};{};{};{};140;0;100;6'.format(
                    msg[0], msg[1], msg[2], msg[3]).encode('utf-8') + b'\r\n'

                bytes_sent = s.send(request)
                print(request)
                readen_data = s.recv(1024)
                print(readen_data)

                time.sleep(float(INTERVAL))

        s.close()
    elif PROTOCOL == 'UDP':
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        while True:
            for msg in msgs:
                request = '{}#SD#NA;NA;{};{};{};{};140;0;100;6'.format(
                    UID, msg[0], msg[1], msg[2], msg[3])
                r = b''.join([bytearray(request, 'utf-8'), b'\r\n'])
                print(r)
                sock.sendto(r, (ENDPOINT, PORT))
                # data = sock.recv(400)
                # print(data)
                time.sleep(float(INTERVAL))
Esempio n. 57
0
 def start(self, game_file_or_path: str) -> None:
     """Start the driver from a parsed set of arguments"""
     _check_required_libraries()
     gamepath = pathlib.Path(game_file_or_path)
     if gamepath.is_dir():
         # cd into the game directory (we can import it then), and load its config and zones
         os.chdir(str(gamepath))
         sys.path.insert(0, os.curdir)
     elif gamepath.is_file():
         # the game argument points to a file, assume it is a zipfile, add it to the import path
         sys.path.insert(0, str(gamepath))
     else:
         raise FileNotFoundError("Cannot find specified game")
     assert "story" not in sys.modules, "cannot start new story if it was already loaded before"
     cmds.clear_registered_commands()    # needed to allow stories to define their own custom commands after this
     import story
     if not hasattr(story, "Story"):
         raise AttributeError("Story class not found in the story file. It should be called 'Story'.")
     self.story = story.Story()
     self.story._verify(self)
     if self.game_mode not in self.story.config.supported_modes:
         raise ValueError("driver mode '%s' not supported by this story. Valid modes: %s" %
                          (self.game_mode, list(self.story.config.supported_modes)))
     self.story.config.mud_host = self.story.config.mud_host or "localhost"
     self.story.config.mud_port = self.story.config.mud_port or 8180
     self.story.config.server_mode = self.game_mode
     if self.game_mode != GameMode.IF and self.story.config.server_tick_method == TickMethod.COMMAND:
         raise ValueError("'command' tick method can only be used in 'if' game mode")
     # Register the driver and add some more stuff in the global context.
     self.resources = vfs.VirtualFileSystem(root_package="story")   # read-only story resources
     mud_context.config = self.story.config
     mud_context.resources = self.resources
     # check for existence of cmds package in the story root
     loader = pkgutil.get_loader("cmds")
     if loader:
         ld = pathlib.Path(loader.get_filename("cmds")).parent.parent.resolve()        # type: ignore
         sd = pathlib.Path(inspect.getabsfile(story)).parent       # type: ignore   # mypy doesn't recognise getabsfile?
         if ld == sd:   # only load them if the directory is the same as where the story was loaded from
             cmds.clear_registered_commands()   # making room for the story's commands
             # noinspection PyUnresolvedReferences
             import cmds as story_cmds      # import the cmd package from the story
             for verb, func, privilege in cmds.all_registered_commands():
                 try:
                     self.commands.add(verb, func, privilege)
                 except ValueError:
                     self.commands.override(verb, func, privilege)
             cmds.clear_registered_commands()
     self.commands.adjust_available_commands(self.story.config.server_mode)
     self.game_clock = util.GameDateTime(self.story.config.epoch or self.server_started, self.story.config.gametime_to_realtime)
     self.moneyfmt = None
     if self.story.config.money_type != MoneyType.NOTHING:
         self.moneyfmt = util.MoneyFormatter.create_for(self.story.config.money_type)
     user_data_dir = pathlib.Path(appdirs.user_data_dir("Tale-" + util.storyname_to_filename(self.story.config.name),
                                                        "Razorvine", roaming=True))
     user_data_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
     self.user_resources = vfs.VirtualFileSystem(root_path=user_data_dir, readonly=False)  # r/w to the local 'user data' directory
     self.story.init(self)
     if self.story.config.playable_races:
         # story provides playable races. Check that every race is known.
         invalid = self.story.config.playable_races - playable_races
         if invalid:
             raise errors.StoryConfigError("invalid playable_races")
     else:
         # no particular races in story config, take the defaults
         self.story.config.playable_races = playable_races
     self.zones = self._load_zones(self.story.config.zones)
     if not self.story.config.startlocation_player:
         raise errors.StoryConfigError("player startlocation not configured in story")
     if not self.story.config.startlocation_wizard:
         self.story.config.startlocation_wizard = self.story.config.startlocation_player
     self.lookup_location(self.story.config.startlocation_player)
     self.lookup_location(self.story.config.startlocation_wizard)
     if self.story.config.server_tick_method == TickMethod.COMMAND:
         # If the server tick is synchronized with player commands, this factor needs to be 1,
         # because at every command entered the game time simply advances 1 x server_tick_time.
         self.story.config.gametime_to_realtime = 1
     assert self.story.config.server_tick_time > 0
     assert self.story.config.max_wait_hours >= 0
     self.game_clock = util.GameDateTime(self.story.config.epoch or self.server_started, self.story.config.gametime_to_realtime)
     # convert textual exit strings to actual exit object bindings
     for x in self.unbound_exits:
         x._bind_target(self.zones)
     self.unbound_exits = []
     sys.excepthook = util.excepthook  # install custom verbose crash reporter
     self.start_main_loop()   # doesn't exit! (unless game is killed)
     self._stop_driver()
Esempio n. 58
0
}


class SQLiteHandler(logging.Handler):
    """
    Logging handler for SQLite.
    Based on Vinay Sajip's DBHandler class (http://www.red-dove.com/python_logging.html)
    """
    def emit(self, record):
        # Use default formatting:
        self.format(record)
        level = MAP_LEVELS.get(record.levelno, 0)
        notes = record.msg
        if record.exc_info:
            notes += " " + logging._defaultFormatter.formatException(
                record.exc_info)
        # Insert log record:
        worker.execute_noreturn(
            worker.save_log, record.botname, level, notes,
            datetime.datetime.fromtimestamp(record.created))


# Derive sqlite file directory
data_dir = user_data_dir(appname, appauthor)
sqlDataBaseFile = os.path.join(data_dir, storageDatabase)

# Create directory for sqlite file
mkdir_p(data_dir)

worker = DatabaseWorker()
Esempio n. 59
0
#!/usr/bin/env python

import shutil
import os
from appdirs import user_data_dir
from pathlib import Path

from flix.version import __version__

data_path = Path(
    user_data_dir("FastFlix",
                  appauthor=False,
                  version=__version__,
                  roaming=True))
data_path.mkdir(parents=True, exist_ok=True)

dest = f'{data_path}{os.sep}plugins'

print(f"Copying plugins to {dest}")

shutil.rmtree(dest, ignore_errors=True)
shutil.copytree(f'flix{os.sep}plugins', dest)
Esempio n. 60
0
class DataDir(object):
    """ This class ensures that the user's data is stored in its OS
        preotected user directory:

        **OSX:**

         * `~/Library/Application Support/<AppName>`

        **Windows:**

         * `C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName>`
         * `C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName>`

        **Linux:**

         * `~/.local/share/<AppName>`

         Furthermore, it offers an interface to generated backups
         in the `backups/` directory every now and then.
    """
    appname = "peerplays"
    appauthor = "Peerplays Blockchain Standards Association"
    storageDatabase = "peerplays.sqlite"

    data_dir = user_data_dir(appname, appauthor)
    sqlDataBaseFile = os.path.join(data_dir, storageDatabase)

    def __init__(self):
        #: Storage
        self.mkdir_p()

    def mkdir_p(self):
        """ Ensure that the directory in which the data is stored
            exists
        """
        if os.path.isdir(self.data_dir):
            return
        else:
            try:
                os.makedirs(self.data_dir)
            except FileExistsError:
                return
            except OSError:
                raise

    def sqlite3_backup(self, dbfile, backupdir):
        """ Create timestamped database copy
        """
        if not os.path.isdir(backupdir):
            os.mkdir(backupdir)
        backup_file = os.path.join(
            backupdir,
            os.path.basename(self.storageDatabase) +
            datetime.now().strftime("-" + timeformat))
        connection = sqlite3.connect(self.sqlDataBaseFile)
        cursor = connection.cursor()
        # Lock database before making a backup
        cursor.execute('begin immediate')
        # Make new backup file
        shutil.copyfile(dbfile, backup_file)
        log.info("Creating {}...".format(backup_file))
        # Unlock database
        connection.rollback()
        configStorage["lastBackup"] = datetime.now().strftime(timeformat)

    def clean_data(self):
        """ Delete files older than 70 days
        """
        log.info("Cleaning up old backups")
        for filename in os.listdir(self.data_dir):
            backup_file = os.path.join(self.data_dir, filename)
            if os.stat(backup_file).st_ctime < (time.time() - 70 * 86400):
                if os.path.isfile(backup_file):
                    os.remove(backup_file)
                    log.info("Deleting {}...".format(backup_file))

    def refreshBackup(self):
        """ Make a new backup
        """
        backupdir = os.path.join(self.data_dir, "backups")
        self.sqlite3_backup(self.sqlDataBaseFile, backupdir)
        self.clean_data()