Beispiel #1
0
    def get_backend(self, backend_name):
        """
        Get options of backend.

        :returns: a tuple with the module name and the module options dict
        :rtype: tuple
        """

        config = RawConfigParser()
        config.read(self.confpath)
        if not config.has_section(backend_name):
            raise KeyError(u'Configured backend "%s" not found' % backend_name)

        items = dict(config.items(backend_name))

        try:
            module_name = items.pop('_module')
        except KeyError:
            try:
                module_name = items.pop('_backend')
                self.edit_backend(backend_name, module_name, items)
            except KeyError:
                warning('Missing field "_module" for configured backend "%s"', backend_name)
                raise KeyError(u'Configured backend "%s" not found' % backend_name)
        return module_name, items
Beispiel #2
0
class Config():

    """
    Load and access the carbonate configuration.
    """

    def __init__(self, config_file):
        self.config_file = config_file
        self.config = RawConfigParser()
        self.config.read(config_file)

    def clusters(self):
        """Return the clusters defined in the config file."""
        return self.config.sections()

    def destinations(self, cluster='main'):
        """Return a list of destinations for a cluster."""
        if not self.config.has_section(cluster):
            raise SystemExit("Cluster '%s' not defined in %s"
                             % (cluster, self.config_file))
        destinations = self.config.get(cluster, 'destinations')
        return destinations.replace(' ', '').split(',')

    def replication_factor(self, cluster='main'):
        """Return the replication factor for a cluster as an integer."""
        if not self.config.has_section(cluster):
            raise SystemExit("Cluster '%s' not defined in %s"
                             % (cluster, self.config_file))
        return int(self.config.get(cluster, 'replication_factor'))

    def ssh_user(self, cluster='main'):
        """Return the ssh user for a cluster or current user if undefined."""
        if not self.config.has_section(cluster):
            raise SystemExit("Cluster '%s' not defined in %s"
                             % (cluster, self.config_file))
        try:
            return self.config.get(cluster, 'ssh_user')
        except NoOptionError:
            return pwd.getpwuid(os.getuid()).pw_name

    def whisper_lock_writes(self, cluster='main'):
        """Lock whisper files during carbon-sync."""
        if not self.config.has_section(cluster):
            raise SystemExit("Cluster '%s' not defined in %s"
                             % (cluster, self.config_file))
        try:
            return bool(self.config.get(cluster, 'whisper_lock_writes'))
        except NoOptionError:
            return False

    def hashing_type(self, cluster='main'):
        """Hashing type of cluster."""
        if not self.config.has_section(cluster):
            raise SystemExit("Cluster '%s' not defined in %s"
                             % (cluster, self.config_file))
        hashing_type = 'carbon_ch'
        try:
            return self.config.get(cluster, 'hashing_type')
        except NoOptionError:
            return hashing_type
Beispiel #3
0
 def backend_exists(self, name):
     """
     Return True if the backend exists in config.
     """
     config = RawConfigParser()
     config.read(self.confpath)
     return name in config.sections()
Beispiel #4
0
    def iter_backends(self):
        """
        Iterate on backends.

        :returns: each tuple contains the backend name, module name and module options
        :rtype: :class:`tuple`
        """

        config = RawConfigParser()
        config.read(self.confpath)
        changed = False
        for backend_name in config.sections():
            params = dict(config.items(backend_name))
            try:
                module_name = params.pop('_module')
            except KeyError:
                try:
                    module_name = params.pop('_backend')
                    config.set(backend_name, '_module', module_name)
                    config.remove_option(backend_name, '_backend')
                    changed = True
                except KeyError:
                    warning('Missing field "_module" for configured backend "%s"', backend_name)
                    continue
            yield backend_name, module_name, params

        if changed:
            with open(self.confpath, 'wb') as f:
                config.write(f)
 def setUp(self):
     self.driver = webdriver.Firefox()
     self.driver.implicitly_wait(30)
     self.verificationErrors = []
     self.accept_next_alert = True
     parser = RawConfigParser()
     config_file = join(BASE_DIR, '.private/.config.txt')
     self.name = 'testcluster'
     parser.read(config_file)
     try:
         self.token = parser.get('cloud \"~okeanos\"', 'token')
         self.auth_url = parser.get('cloud \"~okeanos\"', 'url')
         self.base_url = parser.get('deploy', 'url')
         self.project_name = parser.get('project', 'name')
         auth = check_credentials(self.token)
         try:
             list_of_projects = auth.get_projects(state='active')
         except Exception:
             self.assertTrue(False,'Could not get list of projects')
         for project in list_of_projects:
             if project['name'] == self.project_name:
                 self.project_id = project['id']
     except NoSectionError:
         self.token = 'INVALID_TOKEN'
         self.auth_url = "INVALID_AUTH_URL"
         self.base_url = "INVALID_APP_URL"
         self.project_name = "INVALID_PROJECT_NAME"
         print 'Current authentication details are kept off source control. ' \
               '\nUpdate your .config.txt file in <projectroot>/.private/'
Beispiel #6
0
    def render_POST(self, request):

        def _renderResponse(response):
            if isinstance(response, Failure):
                request.write(json.dumps({'success': False, 'message': response.getErrorMessage()}))
            else:
                request.write(json.dumps({'success': True}))
            request.finish()

        content = parse_qs(request.content.read())

        command = content.get('playlist.add', [None])[0]
        item = content['item'][0]


        config_parser = RawConfigParser()
        config_parser.read(os.path.expanduser("~/.config/smplayer/smplayer.ini"))
        port = config_parser.getint('instances', 'temp\\autoport')


        creator = ClientCreator(reactor, SMPlayer, item = item)
        creator.connectTCP('127.0.0.1', port).addBoth(_renderResponse)

        request.setHeader('Access-Control-Allow-Origin', '*')
        request.setHeader('Content-Type', 'application/json')
        return NOT_DONE_YET
def main():
    print "reading configuration"
    logging.basicConfig()
    config = RawConfigParser()
    config.read(['SystemInfoJabberBot.cfg',expanduser('~/.config/SystemInfoJabberBot.cfg')]) 
    username = config.get('systembot','username')
    password = config.get('systembot','password')
    auth_users_raw= config.get('systembot','auth_users')
    auth_users=auth_users_raw.replace(' ','').split(',')
    
    print "set config"
    bot = SystemInfoJabberBot(username,password,auth_users)
    
    # Transmission config
    if config.has_section('transmissionrpc'):
        host = config.get('transmissionrpc','host')
        port = config.getint('transmissionrpc','port')
        try:
            user = config.get('transmissionrpc','user')
            psw = config.get('transmissionrpc','password')
            bot.setTransmissionConfig(host,port=port,user=user,psw=psw)
        except NoOptionError:
            bot.setTransmissionConfig(host,port=port)
    
    if config.has_section('logs'):
        log_files=config.items('logs')
        
        bot.setLogFiles( dict(log_files) )
    print "start serve"
    bot.serve_forever()

    try:
        bot.quit()
    except Exception:
        pass
Beispiel #8
0
def get_firefox_home_file(needed_file):
    for firefox_dir in (os.path.expanduser(p) for p in
			("~/.mozilla/firefox-3.5/", "~/.mozilla/firefox/")):
        if os.path.exists(firefox_dir):
            break
    else:
        # no break
        return None
    # here we leak firefox_dir
    config = RawConfigParser({"Default" : 0})
    config.read(os.path.join(firefox_dir, "profiles.ini"))
    path = None

    for section in config.sections():
        if config.has_option(section, "Default") and config.get(section, "Default") == "1":
            path = config.get (section, "Path")
            break
        elif path == None and config.has_option(section, "Path"):
            path = config.get (section, "Path")
        
    if path == None:
        return ""

    if path.startswith("/"):
        return os.path.join(path, needed_file)

    return os.path.join(firefox_dir, path, needed_file)
    def login(self, config_file=None):
        """Login wikipedia using credential configuration file.

        If config_file argument is filled to the method, it override the one
        of the object (and save it). Other wise it uses the config_file
        attribute.

        Args:
            config_file (str, optional): Path to the credential configuration
                file.

        Raises:
            ValueError: when neither config_file is given to the method or to
                the objects.
        """
        if config_file is not None:
            self.config_file = config_file
        elif self.config_file is None:
            raise ValueError('Trying to login without config_file')
        configparser = RawConfigParser()
        configparser.read(self.config_file)
        self.site.login(configparser.get('login', 'user'),
                        configparser.get('login', 'password'))
        self.__is_logged__ = True
        LOG.info("Logged in as '%s'", configparser.get('login', 'user'))
Beispiel #10
0
    def _parse_legacy_config_file(self):
        """
        Parse a legacy configuration file.
        """
        conf = RawConfigParser()
        conf.read(LEGACY_CONFIG_FILE)

        styles = self.styles.copy()

        if conf.has_option('params', 'dm_template'):
            styles['dm_template'] = conf.get('params', 'dm_template')

        if conf.has_option('params', 'header_template'):
            styles['header_template'] = conf.get('params', 'header_template')

        self.styles.update(styles)

        if conf.has_option('params', 'logging_level'):
            self.logging_level = conf.getint('params', 'logging_level')

        for binding in self.key_bindings:
            if conf.has_option('keys', binding):
                custom_key = conf.get('keys', binding)
                self._set_key_binding(binding, custom_key)

        palette_labels = [color[0] for color in PALETTE]
        for label in palette_labels:
            if conf.has_option('colors', label):
                custom_fg = conf.get('colors', label)
                self._set_color(label, custom_fg)
     def __init__(self, hashfile=LOCATION, storeKey="", configFile=CONFIGFILE):
          self.userDict = { }                       # Mapping of username -> salt:hash
          self.hashfile = hashfile                  # Encrypted file, contains lines of user:salt:hash 
          self.h = SHA256.new()
          self.crypto = crypto.TBCrypt()            # AES encryption/IV functions
          self.storeKey = storeKey                  # Key material to open encrypted hash store
          if configFile:
               try:
                   fp = open(configFile)
               except IOError as e:
                   error = 'IOError: can''t access file ''%s'' (%s).' % (configFile, os.strerror(e.errno))
                   raise HashStoreException(error)
          
               config = RawConfigParser()
               config.read(configFile)
               self.storeKey = config.get('hashstore', 'key')
               self.hashfile = config.get('hashstore', 'location')
          else:
               if os.path.exists(self.hashfile):
                    self.updateUserDict()
               else:
                    self.__saveHashstore()

          if self.storeKey == '':
               print "WARNING: hashstoremanager.py: no hashstore key defined!"

#          print "h.update"
          self.h.update(self.storeKey)
#          print "self.storeKey = self.h.digest()"
          self.storeKey = self.h.digest()
 def setUp(self):
     """
     Set up the arguments that every unit test for put/get will use.
     """
     parser = RawConfigParser()
     config_file = join(BASE_DIR, '.private/.config.txt')
     self.name = 'orkatest'
     parser.read(config_file)
     try:
         self.token = parser.get('cloud \"~okeanos\"', 'token')
         self.auth_url = parser.get('cloud \"~okeanos\"', 'url')
         self.base_url = parser.get('deploy', 'url')
         self.project_name = parser.get('project', 'name')
         self.master_IP = parser.get('cluster', 'master_ip')
         clusters = get_user_clusters(self.token)
         self.active_cluster = None
         for cluster in clusters:
             if cluster['master_IP'] == self.master_IP:
                 if cluster['hadoop_status'] == const_hadoop_status_started:
                     self.active_cluster = cluster
                     break
         else:
             logging.error(' You can take file actions on active clusters with started hadoop only.')
             exit(error_fatal)
         self.opts = {'source': '', 'destination': '', 'token': self.token, 'cluster_id': self.active_cluster['id'],
                      'auth_url': self.auth_url, 'user': '', 'password': ''}
     except NoSectionError:
         self.token = 'INVALID_TOKEN'
         self.auth_url = "INVALID_AUTH_URL"
         self.base_url = "INVALID_APP_URL"
         self.project_name = "INVALID_PROJECT_NAME"
         print 'Current authentication details are kept off source control. ' \
               '\nUpdate your .config.txt file in <projectroot>/.private/'
Beispiel #13
0
def getversioncfg():
    import re
    from ConfigParser import RawConfigParser
    vd0 = dict(version=FALLBACK_VERSION, commit='', date='', timestamp=0)
    # first fetch data from gitarchivecfgfile, ignore if it is unexpanded
    g = vd0.copy()
    cp0 = RawConfigParser(vd0)
    cp0.read(gitarchivecfgfile)
    if '$Format:' not in cp0.get('DEFAULT', 'commit'):
        g = cp0.defaults()
        mx = re.search(r'\btag: v(\d[^,]*)', g.pop('refnames'))
        if mx:
            g['version'] = mx.group(1)
    # then try to obtain version data from git.
    gitdir = os.path.join(MYDIR, '.git')
    if os.path.exists(gitdir) or 'GIT_DIR' in os.environ:
        try:
            g = gitinfo()
        except OSError:
            pass
    # finally, check and update the active version file
    cp = RawConfigParser()
    cp.read(versioncfgfile)
    d = cp.defaults()
    rewrite = not d or (g['commit'] and (
        g['version'] != d.get('version') or g['commit'] != d.get('commit')))
    if rewrite:
        cp.set('DEFAULT', 'version', g['version'])
        cp.set('DEFAULT', 'commit', g['commit'])
        cp.set('DEFAULT', 'date', g['date'])
        cp.set('DEFAULT', 'timestamp', g['timestamp'])
        cp.write(open(versioncfgfile, 'w'))
    return cp
Beispiel #14
0
 def parseSettings(self):
     try:
         self.prnt("Reading in settings from settings.ini...")
         oldchans = self.joinchans
         settings = ConfigParser()
         settings.read("settings.ini")
         channels = settings.items("channels")
         for element in self.joinchans:
             self.joinchans.remove(element)
         for element in channels:
             self.joinchans.append(element)
             if not element in oldchans and not self.firstjoin == 1:
                 self.join("#%s" % element[0])
         for element in oldchans:
             if element not in self.joinchans and not self.firstjoin == 1:
                 self.part("#%s" % element[0])
         self.password = settings.get("info", "password")
         if not self.firstjoin == 1:
             self.sendmsg("nickserv", "IDENTIFY %s" % self.password)
         self.loginpass = settings.get("info", "loginpass")
         self.control_char = settings.get("info", "control_character")
         self.data_dir = settings.get("info", "data_folder")
         self.index_file = settings.get("info", "index_file")
         self.api_key = settings.get("mcbans", "api_key")
     except:
         return False
     else:
         self.prnt("Done!")
         return True
Beispiel #15
0
def do_10_install_packages():
  """Install or remove packages (as per debfix/debfix-packages.conf"""
  from ConfigParser import RawConfigParser
  config = RawConfigParser(allow_no_value=True)
  config.read(data_dir + 'debfix-packages.conf')
  sections = config.sections()
  run('apt-get -y -q update')
  run('apt-get -y -q --allow-unauthenticated install aptitude debian-archive-keyring deb-multimedia-keyring')
  run('apt-get -y -q update')
  if user_choice('Upgrade all upgradable packages'):
    run('aptitude -y -q full-upgrade')
  marked = {'install':'', 'remove':'', 'sections':''}
  for section in sections:
    question = "{} packages from '{}' section".format('Install' if section != 'remove' else 'Remove', section)
    packages = ' '.join(i[0] for i in config.items(section))
    while True:
      choice = user_choice(question, other_choices='?')
      if choice == '?':
        log.info("Section '{}' contains packages: {}".format(section, packages))
        continue
      if choice:
        marked['sections'] += section + ' '
        if section == 'remove':
          marked['remove'] += packages + ' '
        else:
          marked['install'] += packages + ' '
      break
  if user_choice('Install: {install}\nRemove: {remove}\nApply changes'.format(**marked)):
    _apt_install_packages(marked)
    # due to assume-yes-based decisions, some packages may not be successfully installed (yet), retry
    _apt_install_packages(marked, second_time_around=True)
  run('aptitude -y -q clean')
  log.info('Done installing packages')
Beispiel #16
0
    def from_config_file(self, config_file, allow_profile = False):
        """
        Get the settings from a configuration file.

        :param config_file: Configuration file.
        :type config_file: str

        :param allow_profile: True to allow reading the profile name
            from the config file, False to forbid it. Global config
            files should allow setting a default profile, but profile
            config files should not, as it wouldn't make sense.
        """
        parser = RawConfigParser()
        parser.read(config_file)
        if parser.has_section("golismero"):
            options = { k:v for k,v in parser.items("golismero") if v }
            if "profile" in options:
                if allow_profile:
                    self.profile = options["profile"]
                    self.profile_file = get_profile(self.profile)
                else:
                    del options["profile"]
            for k in self._forbidden_:
                if k in options:
                    del options[k]
            if options:
                self.from_dictionary(options)
Beispiel #17
0
def load():
    """Loads FileDirectives from ConfigFile into this module's attributes."""
    section = "*"
    module = sys.modules[__name__]
    parser = RawConfigParser()
    parser.optionxform = str # Force case-sensitivity on names
    try:
        parser.read(ConfigFile)
        for name in FileDirectives:
            try: # parser.get can throw an error if not found
                value_raw = parser.get(section, name)
                success = False
                # First, try to interpret as JSON
                try:
                    value = json.loads(value_raw)
                    success = True
                except:
                    pass
                if not success:
                    # JSON failed, try to eval it
                    try:
                        value = eval(value_raw)
                        success = True
                    except:
                        # JSON and eval failed, fall back to string
                        value = value_raw
                        success = True
                if success:
                    setattr(module, name, value)
            except:
                pass
    except Exception, e:
        pass # Fail silently
Beispiel #18
0
 def __init__(self, config):
     _config = RawConfigParser()
     _config.optionxform = lambda s: s
     _config.read(config)
     self.path = os.path.dirname(config)
     for section in _config.sections():
         if ':' in section:
             sectiongroupname, sectionname = section.split(':')
         else:
             sectiongroupname, sectionname = 'global', section
         items = dict(_config.items(section))
         sectiongroup = self.setdefault(sectiongroupname, {})
         sectiongroup.setdefault(sectionname, {}).update(items)
     seen = set()
     for sectiongroupname in self:
         sectiongroup = self[sectiongroupname]
         for sectionname in sectiongroup:
             section = sectiongroup[sectionname]
             if '<' in section:
                 self._expand(sectiongroupname, sectionname, section, seen)
             for key in section:
                 fname = 'massage_%s_%s' % (sectiongroupname, key)
                 massage = getattr(self, fname, None)
                 if callable(massage):
                     section[key] = massage(section[key])
Beispiel #19
0
def __config_to_dict(conf_fp):
	config_parser = RawConfigParser()
	config_parser.read(conf_fp)
	config = {}

	# shortcut, but everything comes in as a str
	[config.__setitem__(section, dict(config_parser.items(section)))
		for section in config_parser.sections()]

	# convert bools
	b = bool(int(config['loris.Loris']['enable_caching']))
	config['loris.Loris']['enable_caching'] = b

	b = bool(int(config['loris.Loris']['redirect_conneg']))
	config['loris.Loris']['redirect_conneg'] = b

	b = bool(int(config['loris.Loris']['redirect_base_uri']))
	config['loris.Loris']['redirect_base_uri'] = b

	b = bool(int(config['loris.Loris']['redirect_cannonical_image_request']))
	config['loris.Loris']['redirect_cannonical_image_request'] = b

	b = bool(int(config['loris.Loris']['enable_cors']))
	config['loris.Loris']['enable_cors'] = b

	# convert lists
	l = map(string.strip, config['loris.Loris']['cors_whitelist'].split(','))
	config['loris.Loris']['cors_whitelist'] = l

	# convert transforms.*.target_formats to lists
	for tf in __transform_sections_from_config(config):
		config[tf]['target_formats'] = [s.strip() for s in config[tf]['target_formats'].split(',')]

	return config
Beispiel #20
0
class Configuration(object):
    def __init__(self, filename):
        self.parser = RawConfigParser()
        filepath = os.path.expanduser(filename)
        if os.path.exists(filepath): self.parser.read(filepath)
        else:
            # Set up config file
            self.parser.add_section('jira_default')
            self.parser.set('jira_default', 'username', raw_input('username: '******'password: '******'confirm: ')
                if passwd != passwd2: print 'passwords do not match.'
                else: passwd_confirmed = True
            self.parser.set('jira_default', 'password', passwd)
            self.parser.set('jira_default', 'host', raw_input('host (e.g jira.atlassian.com): '))
            self.parser.set('jira_default', 'path', '/rest/api/latest')
            # Color-coded statuses
            #self.parser.set('colors', 'Resolved', 'green')
            #self.parser.set('colors', 'In Progress', 'magenta')
            f = open(filepath, 'w')
            self.parser.write(f)
            os.chmod(filepath, 0600) #Only user can read this
Beispiel #21
0
def _path_fixer(filepath, root=None):
    """Change all the relative paths in `filepath` to absolute ones.

    :param filepath: File to be changed
    :param root: Root path with which the relative paths are prefixed. If None
    (default), the directory with this file is the root.
    """
    if root is None:
        root = op.join(op.abspath(op.dirname(__file__)))
    if filepath.endswith((".yaml", ".yml")):
        with open(filepath, "r") as fileobj:
            data = yaml.load(fileobj, Loader=Loader)
        for specs in data.itervalues():
            specs['path'] = op.join(root, specs['path'])
        with open(filepath, "w") as fileobj:
            yaml.dump(data, fileobj, Dumper=Dumper,
                      default_flow_style=False)
    elif filepath.endswith(".conf"):
        parser = RawConfigParser()
        parser.read(filepath)
        for section in parser.sections():
            path = parser.get(section, "specfile")
            parser.remove_option(section, "specfile")
            parser.set(section, "specfile", op.join(root, path))
        with open(filepath, "w") as fileobj:
            parser.write(fileobj)
Beispiel #22
0
    def create_new_conf_from_modifications(self):
        """
        Return a new RawConfigParser instance that has been created from the
        non-default modifications returned by the `modifications` property
        above.
        """
        # This is a bit hacky as the underlying config classes don't really
        # support the notion of "only write out sections/options that have
        # changed since we loaded the defaults".
        if not self.repo_path:
            raise RepositoryNotSet()

        mods = self.modifications
        if not mods:
            raise NoModificationsMade()

        filename = self.writable_repo_override_conf_filename
        conf = RawConfigParser()
        conf.read(filename)
        for (section, options) in mods.items():
            conf.add_section(section)
            for (option, value) in options.items():
                conf.set(section, option, value)

        return conf
Beispiel #23
0
 def __init__(self, node_type, config_file):
     self.__params = self.__get_fields(self.CommonConfig)
     self.node_type = node_type
     if node_type == NodeType.VXRD:
         self.__params.update(self.__get_fields(self.VxrdConfig))
         pidfile = DefaultPidFile.VXRD
         udsfile = DefaultUdsFile.VXRD
     elif node_type == NodeType.VXSND:
         self.__params.update(self.__get_fields(self.VxsndConfig))
         pidfile = DefaultPidFile.VXSND
         udsfile = DefaultUdsFile.VXSND
     else:
         raise RuntimeError('Invalid node type %s. Acceptable values are '
                            '%s' % (node_type, ', '.join(NodeType.VALUES)))
     default_dict = {field.name: field.default for field in self.__params}
     config_parser = RawConfigParser(defaults=default_dict,
                                     allow_no_value=True)
     config_parser.read([config_file, config_file + '.override'])
     for field in self.__params:
         try:
             value = config_parser.get(field.section, field.name)
         except NoSectionError:
             value = field.default
         setattr(self, field.name, field(value))
     self.pidfile = self.pidfile or pidfile
     self.udsfile = self.udsfile or udsfile
Beispiel #24
0
class Configuration:
    def __init__(self, configfile):
        config_home = os.getenv("CONF_HOME")
        if config_home:
            self._configFile = "%s/%s" % (config_home, configfile)
        else:
            self._configFile = configfile

        self._genConf()

    def _setConfigFile(self, configFile=None):
        """设置configure文件"""
        self._configFile = configFile
        if not self._configFile:
            raise Exception("配置文件不存在")
        self._genConf()

    def _genConf(self):
        if not self._configFile:
            raise Exception("没有配置文件")
        self._config = RawConfigParser()
        self._config.read(self._configFile)

    def get(self, sect, opt, default=None):
        if self._config.has_option(sect, opt):
            return self._config.get(sect, opt)
        return default

    def getint(self, sect, opt, default=None):
        if self._config.has_option(sect, opt):
            return self._config.getint(sect, opt)
        return default

    def items(self, sect):
        return self._config.items(sect)
Beispiel #25
0
class Config(object):

    def __init__(self):
        self._load()
        self._read()

    def _load(self):
        path = os.path.join(os.getcwd(), 'syncer.cfg')
        self.config = RawConfigParser()
        self.config.read(path)

    def _read(self):
        self.sync_crons = self._get_multiline(
            'cron', 'sync', [])
        self.unison_executable = self._get(
            'unison', 'executable',
            '/usr/local/bin/unison')

    def _get(self, section, option, default):
        if not self.config.has_option(section, option):
            return default

        value = self.config.get(section, option)
        if value:
            return value
        else:
            return default

    def _get_multiline(self, section, option, default):
        value = self._get(section, option, default)
        if value is not default:
            return value.strip().split('\n')
        else:
            return value
Beispiel #26
0
    def check_file(self, pkg, filename):
        root = pkg.dirName()
        f = root + filename
        st = getstatusoutput(('desktop-file-validate', f), True)
        if st[0]:
            error_printed = False
            for line in st[1].splitlines():
                if 'error: ' in line:
                    printError(pkg, 'invalid-desktopfile', filename,
                               line.split('error: ')[1])
                    error_printed = True
            if not error_printed:
                printError(pkg, 'invalid-desktopfile', filename)
        if not is_utf8(f):
            printError(pkg, 'non-utf8-desktopfile', filename)

        cfp = RawConfigParser()
        cfp.read(f)
        binary = None
        if cfp.has_option('Desktop Entry', 'Exec'):
            binary = cfp.get('Desktop Entry', 'Exec').split(' ', 1)[0]
        if binary:
            found = False
            if binary.startswith('/'):
                found = os.path.exists(root + binary)
            else:
                for i in STANDARD_BIN_DIRS:
                    if os.path.exists(root + i + binary):
                        # no need to check if the binary is +x, rpmlint does it
                        # in another place
                        found = True
                        break
            if not found:
                printWarning(pkg, 'desktopfile-without-binary', filename,
                             binary)
Beispiel #27
0
def _fetchAzureAccountKey(accountName):
    """
    Find the account key for a given Azure storage account.

    The account key is taken from the AZURE_ACCOUNT_KEY_<account> environment variable if it
    exists, then from plain AZURE_ACCOUNT_KEY, and then from looking in the file
    ~/.toilAzureCredentials. That file has format:

    [AzureStorageCredentials]
    accountName1=ACCOUNTKEY1==
    accountName2=ACCOUNTKEY2==
    """
    try:
        return os.environ['AZURE_ACCOUNT_KEY_' + accountName]
    except KeyError:
        try:
            return os.environ['AZURE_ACCOUNT_KEY']
        except KeyError:
            configParser = RawConfigParser()
            configParser.read(os.path.expanduser(credential_file_path))
            try:
                return configParser.get('AzureStorageCredentials', accountName)
            except NoOptionError:
                raise RuntimeError("No account key found for '%s', please provide it in '%s'" %
                                   (accountName, credential_file_path))
Beispiel #28
0
class OriginAuthStore(object):
    def __init__(self, config_file):
        self.config_file = config_file
        self.config = RawConfigParser()
        self.config.read(config_file)

    def origin(self, name):
        return OriginAuth(self, name)

    def __getitem__(self, origin_name):
        try:
            return dict(self.config.items(origin_name))
        except NoSectionError:
            return {}

    def __setitem__(self, origin_name, auth):
        try:
            self.config.remove_section(origin_name)
        except NoSectionError:
            pass

        if auth:
            self.config.add_section(origin_name)
            for key, val in auth.iteritems():
                self.config.set(origin_name, key, val)

        with open(self.config_file, 'w') as f:
            self.config.write(f)

        try:
            os.chmod(self.config_file, stat.S_IRUSR | stat.S_IWUSR)
        except OSError:
            print 'Unable to chmod 600 %s' % self.config_file  # TODO: Test
Beispiel #29
0
class Config(dict):
    def __init__(self):
        dict.__init__(self)
        self.refresh()

    def __get_conf_val(self, section, param, var_name, p_type=str):
        try:
            val = self.__config.get(section, param)
            self[var_name] = p_type(val)
        except ConfigParser.Error:
            pass

    def refresh(self):
        try:
            self.update(self.get_defaults())
            config_file = self.get_config_file_path()

            if not os.path.exists(config_file):
                self.save()

            self.__config = RawConfigParser()
            self.__config.read(config_file)

            self.__get_conf_val('LOG','log_level', 'log_level')
            self.__get_conf_val('FABNET', 'fabnet_url', 'fabnet_hostname')
            self.__get_conf_val('FABNET', 'parallel_put_count', 'parallel_put_count', int)
            self.__get_conf_val('FABNET', 'parallel_get_count', 'parallel_get_count', int)
            self.__get_conf_val('CACHE', 'data_dir', 'data_dir')
            self.__get_conf_val('CACHE', 'cache_size', 'cache_size', int)
            self.__get_conf_val('WEBDAV', 'bind_hostname', 'webdav_bind_host')
            self.__get_conf_val('WEBDAV', 'bind_port', 'webdav_bind_port')
            self.__get_conf_val('WEBDAV', 'mount_type', 'mount_type')
            self.__get_conf_val('CA', 'ca_address', 'ca_address')
        except ConfigParser.Error, msg:
            raise Exception('ConfigParser: %s' % msg)
Beispiel #30
0
def write(path, section, key, value):
	conf = RawConfigParser()
	conf.read(path)
	conf.set( section, key, value )
	configfile = open(path, 'w')
	conf.write(configfile)
	configfile.close()
Beispiel #31
0
# -*- coding: utf-8 -*-
from ConfigParser import RawConfigParser

config = RawConfigParser()
config.read('/usr/local/yats/config/web.ini')

DEBUG = config.getboolean('debug', 'DEBUG')
#DEBUG_PROPAGATE_EXCEPTIONS = DEBUG
XMLRPC_DEBUG = False
ALLOWED_HOSTS = ['*']

USE_TZ = True
SITE_ID = 1

TESTSYTEM = config.getboolean('debug', 'TESTSYTEM')

ADMINS = tuple(config.items('admins'))
MANAGERS = ADMINS

EMAIL_SUBJECT_PREFIX = 'yats-dev'
EMAIL_HOST = config.get('mail', 'EMAIL_HOST')
EMAIL_PORT = config.get('mail', 'EMAIL_PORT')
SERVER_EMAIL = config.get('mail', 'SERVER_EMAIL')
EMAIL_HOST_USER = config.get('mail', 'EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = config.get('mail', 'EMAIL_HOST_PASSWORD')

#DATABASE_ROUTERS = ['web.routers.ModelRouter']
DATABASES = {
    'default': {
        'ENGINE': config.get('database', 'DATABASE_ENGINE'),
        'NAME': config.get('database', 'DATABASE_NAME'),
Beispiel #32
0
    def process(self):
        """
        Process current directory for gems

        Note that this has to check each game project to know which gems are enabled
        and build a list of all enabled gems so that those are built.
        To debug gems output during build, use --zones=gems in your command line
        """

        this_path = self.ctx.path

        append_to_unique_list(self.search_paths,
                              os.path.normpath(this_path.abspath()))

        # Parse Gems search path
        config = RawConfigParser()
        if config.read(
                this_path.make_node(
                    'SetupAssistantUserPreferences.ini').abspath()):
            if config.has_section(GEMS_FOLDER) and config.has_option(
                    GEMS_FOLDER, 'SearchPaths\\size'):
                # Parse QSettings style array (i.e. read 'size' attribute, then 1-based-idx\Path)
                array_len = config.getint(GEMS_FOLDER, 'SearchPaths\\size')
                for i in range(0, array_len):
                    new_path = config.get(
                        GEMS_FOLDER, 'SearchPaths\\{}\\Path'.format(i + 1))
                    new_path = os.path.normpath(new_path)
                    Logs.debug('gems: Adding search path {}'.format(new_path))
                    append_to_unique_list(self.search_paths,
                                          os.path.normpath(new_path))

        if not self.ctx.is_engine_local():
            append_to_unique_list(self.search_paths,
                                  os.path.realpath(self.ctx.engine_path))

        # Load all the gems under the Gems folder to search for required gems
        self.required_gems = self.ctx.load_required_gems()

        game_projects = self.ctx.get_enabled_game_project_list()

        for game_project in game_projects:
            Logs.debug('gems: Game Project: %s' % game_project)

            gems_list_file = self.ctx.get_project_node(game_project).make_node(
                GEMS_LIST_FILE)

            if not os.path.isfile(gems_list_file.abspath()):
                if self.ctx.is_option_true('gems_optional'):
                    Logs.debug("gems: Game has no gems file, skipping [%s]" %
                               gems_list_file)
                    continue  # go to the next game
                else:
                    self.ctx.cry_error('Project {} is missing {} file.'.format(
                        game_project, GEMS_LIST_FILE))

            Logs.debug('gems: reading gems file at %s' % gems_list_file)

            gem_info_list = self.ctx.parse_json_file(gems_list_file)
            list_reader = _create_field_reader(
                self.ctx, gem_info_list,
                'Gems list for project ' + game_project)

            # Verify that the project file is an up-to-date format
            gem_format_version = list_reader.field_int('GemListFormatVersion')
            if gem_format_version != GEMS_FORMAT_VERSION:
                self.ctx.cry_error(
                    'Gems list file at {} is of version {}, not expected version {}. Please update your project file.'
                    .format(gems_list_file, gem_format_version,
                            GEMS_FORMAT_VERSION))

            for idx, gem_info_obj in enumerate(list_reader.field_req('Gems')):
                # String for error reporting.
                reader = _create_field_reader(
                    self.ctx, gem_info_obj,
                    'Gem {} in game project {}'.format(idx, game_project))

                gem_id = reader.uuid()
                version = reader.version()
                path = os.path.normpath(reader.field_req('Path'))

                gem = self.get_gem_by_spec(gem_id, version, path)
                if not gem:
                    Logs.debug(
                        'gems: Gem not found in cache, attempting to load from disk: ({}, {}, {})'
                        .format(gem_id, version, path))

                    detected_gem_versions = {}

                    for search_path in self.search_paths:
                        def_file = os.path.join(search_path, path,
                                                GEMS_DEFINITION_FILE)
                        if not os.path.isfile(def_file):
                            continue  # Try again with the next path

                        gem = Gem(self.ctx)
                        gem.path = path
                        gem.abspath = os.path.join(search_path, path)
                        gem.load_from_json(
                            self.ctx.parse_json_file(
                                self.ctx.root.make_node(def_file)))

                        # Protect against loading duplicate gems from different locations, showing a warning if detected
                        dup_gem = detected_gem_versions.get(
                            gem.version.__str__(), None)
                        if dup_gem is not None:
                            Logs.warn(
                                '[WARN] Duplicate gem {} (version {}) found in multiple paths.  Accepting the one at {}'
                                .format(gem.name, gem.version,
                                        dup_gem.abspath))
                            gem = dup_gem
                            break
                        detected_gem_versions[gem.version.__str__()] = gem

                        # Validate that the Gem loaded from the path specified actually matches the id and version.
                        if gem.id != gem_id:
                            self.ctx.cry_error(
                                "Gem at path {} has ID {}, instead of ID {} specified in {}'s {}."
                                .format(path, gem.id, gem_id, game_project,
                                        GEMS_LIST_FILE))

                        if gem.version != version:
                            self.ctx.cry_error(
                                "Gem at path {} has version {}, instead of version {} specified in {}'s {}."
                                .format(path, gem.version, version,
                                        game_project, GEMS_LIST_FILE))

                        self.add_gem(gem)

                if not gem:
                    self.ctx.cry_error(
                        'Failed to load from path "{}"'.format(path))

                gem.games_enabled_in.append(game_project)

        for gem in self.gems:
            Logs.debug("gems: gem %s is used by games: %s" %
                       (gem.name, gem.games_enabled_in))

        # Always add required gems to the gems manager
        for required_gem in self.required_gems:
            self.add_gem(required_gem)
Beispiel #33
0
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with PyoT.  If not, see <http://www.gnu.org/licenses/>.

@author: Andrea Azzara' <*****@*****.**>
'''
from ConfigParser import RawConfigParser
import os
import socket

PROJECT_PATH = os.path.dirname(os.path.abspath(__file__))

CFG = RawConfigParser()
CFG.read(PROJECT_PATH + '/settings.ini')

LOCAL_DB = CFG.getboolean('database', 'DATABASE_LOCAL')

WEB_APPLICATION_SERVER = False

if socket.gethostname() == 'andrea-lab':
    WEB_APPLICATION_SERVER = False
    LOCAL_DB = True

# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']

DEBUG = True
Beispiel #34
0
class FilerTest(unittest.TestCase):
    """
    Test the Filer class
    """
    def setUp(self):
        optparser = BaseOptions()
        optparser.parseOptions([
            'dummyfile.xml',
            '--debug=%s' % logging._levelNames[log.level].lower()
        ])

        self.defaults = RawConfigParser()
        configfiles = self.defaults.read(TESTCONF)

        self.site = Site()
        self.site.name = "testsite"
        self.site.type = "primary"
        self.site.locaion = "testlab"

    def test_create_filer_bare(self):
        xmldata = """
<filer />
"""
        node = etree.fromstring(xmldata)
        filer = Filer()
        self.failUnlessRaises(KeyError, filer.configure_from_node, node,
                              self.defaults, self.site)

    def test_create_filer_named(self):
        xmldata = """
<filer name="testfiler1" />
"""
        node = etree.fromstring(xmldata)
        filer = Filer()
        filer.configure_from_node(node, self.defaults, self.site)

        self.failUnlessEqual(filer.name, "testfiler1")
        self.failUnlessEqual(filer.type, "filer")

        #self.failUnlessRaises(KeyError, filer.configure_from_node, node, self.defaults, self.site)

    def test_create_filer_minimal(self):
        xmldata = """
<filer name="testfiler1" type="filer" />
"""
        node = etree.fromstring(xmldata)
        filer = Filer()
        filer.configure_from_node(node, self.defaults, self.site)

        self.failUnlessEqual(filer.name, "testfiler1")
        self.failUnlessEqual(filer.type, "filer")

    def test_create_filer_nearstore(self):
        xmldata = """
<filer name="testfiler1" type="nearstore" />
"""
        node = etree.fromstring(xmldata)
        filer = Filer()
        filer.configure_from_node(node, self.defaults, self.site)

        self.failUnlessEqual(filer.name, "testfiler1")
        self.failUnlessEqual(filer.type, "nearstore")

    def test_create_filer_bad_type(self):
        xmldata = """
<filer name="testfiler1" type="secondary" />
"""
        node = etree.fromstring(xmldata)
        filer = Filer()
        self.failUnlessRaises(ValueError, filer.configure_from_node, node,
                              self.defaults, self.site)

    def test_filer_site_correct(self):
        """
        Test the filer's site is set correctly
        """
        xmldata = """
<filer name="testfiler1" type="filer" />
"""
        node = etree.fromstring(xmldata)
        filer = Filer()
        filer.configure_from_node(node, self.defaults, self.site)

        self.failUnlessEqual(filer.site.name, 'testsite')
Beispiel #35
0
from ConfigParser import RawConfigParser, NoSectionError
from django.core.exceptions import ImproperlyConfigured

# Django settings for wlcg project.

PROJECT_NAME = 'gstat'

# Set up some useful paths for later
from os import path as os_path

APP_PATH = os_path.abspath(os_path.split(__file__)[0])
PROJECT_PATH = os_path.abspath(os_path.join(APP_PATH, '..', '..'))
config = RawConfigParser()
config_file = os_path.join(PROJECT_PATH, 'config', '%s.ini' % PROJECT_NAME)
read_files = config.read(['/etc/gstat/gstat.ini', config_file])

#$ this seems to mask underlyinh errors, and we'll catch them in the next line !
#if not read_files:
#raise ImproperlyConfigured("Could not read config file : %s"%config_file)

try:
    DEBUG = config.getboolean('debug', 'DEBUG')
    TEMPLATE_DEBUG = config.getboolean('debug', 'TEMPLATE_DEBUG')

    PREFIX = config.get('server', 'PREFIX')

    VIEW_TEST = config.getboolean('debug', 'VIEW_TEST')
    INTERNAL_IPS = tuple(config.get('debug', 'INTERNAL_IPS').split())

    SERVER_EMAIL = config.get('email', 'SERVER_EMAIL')
    EMAIL_HOST = config.get('email', 'EMAIL_HOST')
Beispiel #36
0
def read_auto_rx_config(filename, no_sdr_test=False):
    """Read an Auto-RX v2 Station Configuration File.

    This function will attempt to parse a configuration file.
    It will also confirm the accessibility of any SDRs specified in the config file.

    Args:
            filename (str): Filename of the configuration file to read.
            no_sdr_test (bool): Skip testing the SDRs (used for some unit tests)

    Returns:
            auto_rx_config (dict): The configuration dictionary.
            sdr_config (dict): A dictionary with SDR parameters.
    """
    global global_config, web_password
    # Configuration Defaults:
    auto_rx_config = {
        # Log Settings
        "per_sonde_log": True,
        # Email Settings
        "email_enabled": False,
        #'email_error_notifications': False,
        "email_smtp_server": "localhost",
        "email_smtp_port": 25,
        "email_smtp_authentication": "None",
        "email_smtp_login": "******",
        "email_smtp_password": "******",
        "email_from": "sonde@localhost",
        "email_to": None,
        "email_subject": "<type> Sonde launch detected on <freq>: <id>",
        # SDR Settings
        "sdr_fm": "rtl_fm",
        "sdr_power": "rtl_power",
        "sdr_quantity": 1,
        # Search Parameters
        "min_freq": 400.4,
        "max_freq": 404.0,
        "rx_timeout": 120,
        "only_scan": [],
        "never_scan": [],
        "always_scan": [],
        # Location Settings
        "station_lat": 0.0,
        "station_lon": 0.0,
        "station_alt": 0.0,
        "station_code": "SONDE",  # NOTE: This will not be read from the config file, but will be left in place for now
        # as a default setting.
        "gpsd_enabled": False,
        "gpsd_host": "localhost",
        "gpsd_port": 2947,
        # Position Filter Settings
        "max_altitude": 50000,
        "max_radius_km": 1000,
        "min_radius_km": 0,
        "radius_temporary_block": False,
        # "sonde_time_threshold": 3, # Commented out to ensure warning message is shown.
        # Habitat Settings
        "habitat_enabled": False,
        "habitat_upload_rate": 30,
        "habitat_uploader_callsign": "SONDE_AUTO_RX",
        "habitat_uploader_antenna": "1/4-wave",
        "habitat_upload_listener_position": False,
        "habitat_payload_callsign": "<id>",
        # APRS Settings
        "aprs_enabled": False,
        "aprs_upload_rate": 30,
        "aprs_user": "******",
        "aprs_pass": "******",
        "aprs_server": "rotate.aprs2.net",
        "aprs_object_id": "<id>",
        #'aprs_use_custom_object_id': False,
        "aprs_custom_comment": "Radiosonde Auto-RX <freq>",
        "aprs_position_report": False,
        "station_beacon_enabled": False,
        "station_beacon_rate": 30,
        "station_beacon_comment": "radiosonde_auto_rx SondeGate v<version>",
        "station_beacon_icon": "/r",
        # Web Settings,
        "web_host": "0.0.0.0",
        "web_port": 5000,
        "web_archive_age": 120,
        "web_control": False,
        # "web_password": "******",  # Commented out to ensure warning message is shown
        #'kml_refresh_rate': 10,
        # Advanced Parameters
        "search_step": 800,
        "snr_threshold": 10,
        "min_distance": 1000,
        "dwell_time": 10,
        "max_peaks": 10,
        "quantization": 10000,
        "decoder_spacing_limit": 15000,
        "synchronous_upload": False,
        "scan_dwell_time": 20,
        "detect_dwell_time": 5,
        "scan_delay": 10,
        "payload_id_valid": 5,
        "temporary_block_time": 60,
        "rs41_drift_tweak": False,
        "decoder_stats": False,
        "ngp_tweak": False,
        # Rotator Settings
        "enable_rotator": False,
        "rotator_update_rate": 30,
        "rotator_hostname": "127.0.0.1",
        "rotator_port": 4533,
        "rotation_threshold": 5.0,
        "rotator_homing_enabled": False,
        "rotator_homing_delay": 10,
        "rotator_home_azimuth": 0,
        "rotator_home_elevation": 0,
        # OziExplorer Settings
        "ozi_enabled": False,
        "ozi_update_rate": 5,
        "ozi_port": 55681,
        "payload_summary_enabled": False,
        "payload_summary_port": 55672,
        # Debugging settings
        "save_detection_audio": False,
        "save_decode_audio": False,
        "save_decode_iq": False,
        "save_raw_hex": False,
        'send_raw_frame_in_telemetry': False,
        # URL for the Habitat DB Server.
        # As of July 2018 we send via sondehub.org, which will allow us to eventually transition away
        # from using the habhub.org tracker, and leave it for use by High-Altitude Balloon Hobbyists.
        # For now, sondehub.org just acts as a proxy to habhub.org.
        # This setting is not exposed to users as it's only used for unit/int testing
        "habitat_url": "https://habitat.sondehub.org/",
        # New Sondehub DB Settings
        "sondehub_enabled": True,
        "sondehub_upload_rate": 30,
        # "sondehub_contact_email": "*****@*****.**" # Commented out to ensure a warning message is shown on startup
    }

    try:

        # Check the file exists.
        if not os.path.isfile(filename):
            logging.critical("Config file %s does not exist!" % filename)
            return None

        config = RawConfigParser(auto_rx_config)
        config.read(filename)

        # Log Settings
        auto_rx_config["per_sonde_log"] = config.getboolean("logging", "per_sonde_log")

        # Email Settings
        if config.has_option("email", "email_enabled"):
            try:
                auto_rx_config["email_enabled"] = config.getboolean(
                    "email", "email_enabled"
                )
                auto_rx_config["email_smtp_server"] = config.get("email", "smtp_server")
                auto_rx_config["email_smtp_port"] = config.get("email", "smtp_port")
                auto_rx_config["email_smtp_authentication"] = config.get(
                    "email", "smtp_authentication"
                )
                auto_rx_config["email_smtp_login"] = config.get("email", "smtp_login")
                auto_rx_config["email_smtp_password"] = config.get(
                    "email", "smtp_password"
                )
                auto_rx_config["email_from"] = config.get("email", "from")
                auto_rx_config["email_to"] = config.get("email", "to")
                auto_rx_config["email_subject"] = config.get("email", "subject")

                if auto_rx_config["email_smtp_authentication"] not in [
                    "None",
                    "TLS",
                    "SSL",
                ]:
                    logging.error(
                        "Config - Invalid email authentication setting. Must be None, TLS or SSL."
                    )
                    return None

            except:
                logging.error("Config - Invalid or missing email settings. Disabling.")
                auto_rx_config["email_enabled"] = False

        # SDR Settings
        auto_rx_config["sdr_fm"] = config.get("advanced", "sdr_fm_path")
        auto_rx_config["sdr_power"] = config.get("advanced", "sdr_power_path")
        auto_rx_config["sdr_quantity"] = config.getint("sdr", "sdr_quantity")

        # Search Parameters
        auto_rx_config["min_freq"] = config.getfloat("search_params", "min_freq")
        auto_rx_config["max_freq"] = config.getfloat("search_params", "max_freq")
        auto_rx_config["rx_timeout"] = config.getint("search_params", "rx_timeout")

        if (
            config.has_option("search_params", "only_scan")
            and config.get("search_params", "only_scan") != ""
        ):  # check if user has new name for scan lists
            auto_rx_config["only_scan"] = json.loads(
                config.get("search_params", "only_scan")
            )
        else:
            logging.warning(
                "Config - whitelist configuration has been deprecated and replaced with only_scan list"
            )
            auto_rx_config["only_scan"] = json.loads(
                config.get("search_params", "whitelist")
            )

        if (
            config.has_option("search_params", "never_scan")
            and config.get("search_params", "never_scan") != ""
        ):  # check if user has new name for scan lists
            auto_rx_config["never_scan"] = json.loads(
                config.get("search_params", "never_scan")
            )
        else:
            logging.warning(
                "Config - blacklist configuration has been deprecated and replaced with never_scan list"
            )
            auto_rx_config["never_scan"] = json.loads(
                config.get("search_params", "blacklist")
            )

        if (
            config.has_option("search_params", "always_scan")
            and config.get("search_params", "always_scan") != ""
        ):  # check if user has new name for scan lists
            auto_rx_config["always_scan"] = json.loads(
                config.get("search_params", "always_scan")
            )
        else:
            logging.warning(
                "Config - greylist configuration has been deprecated and replaced with always_scan list"
            )
            auto_rx_config["always_scan"] = json.loads(
                config.get("search_params", "greylist")
            )

        # Location Settings
        auto_rx_config["station_lat"] = config.getfloat("location", "station_lat")
        auto_rx_config["station_lon"] = config.getfloat("location", "station_lon")
        auto_rx_config["station_alt"] = config.getfloat("location", "station_alt")

        if auto_rx_config["station_lat"] > 90.0 or auto_rx_config["station_lat"] < -90.0:
            logging.critical("Config - Invalid Station Latitude! (Outside +/- 90 degrees!)")
            return None
        
        if auto_rx_config["station_lon"] > 180.0 or auto_rx_config["station_lon"] < -180.0:
            logging.critical("Config - Invalid Station Longitude! (Outside +/- 180 degrees!)")
            return None


        # Position Filtering
        auto_rx_config["max_altitude"] = config.getint("filtering", "max_altitude")
        auto_rx_config["max_radius_km"] = config.getint("filtering", "max_radius_km")

        # Habitat Settings
        # Deprecated from v1.5.0
        # auto_rx_config["habitat_enabled"] = config.getboolean(
        #     "habitat", "habitat_enabled"
        # )
        # auto_rx_config["habitat_upload_rate"] = config.getint("habitat", "upload_rate")
        auto_rx_config["habitat_uploader_callsign"] = config.get(
            "habitat", "uploader_callsign"
        )
        auto_rx_config["habitat_upload_listener_position"] = config.getboolean(
            "habitat", "upload_listener_position"
        )
        auto_rx_config["habitat_uploader_antenna"] = config.get(
            "habitat", "uploader_antenna"
        ).strip()

        # try:  # Use the default configuration if not found
        #     auto_rx_config["habitat_url"] = config.get("habitat", "url")
        # except:
        #     pass

        # Deprecated from v1.5.0
        # if auto_rx_config["habitat_upload_rate"] < MINIMUM_HABITAT_UPDATE_RATE:
        #     logging.warning(
        #         "Config - Habitat Update Rate clipped to minimum of %d seconds. Please be respectful of other users of Habitat."
        #         % MINIMUM_HABITAT_UPDATE_RATE
        #     )
        #     auto_rx_config["habitat_upload_rate"] = MINIMUM_HABITAT_UPDATE_RATE

        # APRS Settings
        auto_rx_config["aprs_enabled"] = config.getboolean("aprs", "aprs_enabled")
        auto_rx_config["aprs_upload_rate"] = config.getint("aprs", "upload_rate")
        auto_rx_config["aprs_user"] = config.get("aprs", "aprs_user")
        auto_rx_config["aprs_pass"] = config.get("aprs", "aprs_pass")
        auto_rx_config["aprs_server"] = config.get("aprs", "aprs_server")
        auto_rx_config["aprs_object_id"] = config.get("aprs", "aprs_object_id")
        auto_rx_config["aprs_custom_comment"] = config.get(
            "aprs", "aprs_custom_comment"
        )
        # 2021-08-08 - Disable option for producing APRS position reports.
        #auto_rx_config["aprs_position_report"] = config.getboolean(
        #    "aprs", "aprs_position_report"
        #)
        auto_rx_config["aprs_position_report"] = False
        auto_rx_config["station_beacon_enabled"] = config.getboolean(
            "aprs", "station_beacon_enabled"
        )
        auto_rx_config["station_beacon_rate"] = config.getint(
            "aprs", "station_beacon_rate"
        )
        auto_rx_config["station_beacon_comment"] = config.get(
            "aprs", "station_beacon_comment"
        )
        auto_rx_config["station_beacon_icon"] = config.get(
            "aprs", "station_beacon_icon"
        )

        if auto_rx_config["aprs_upload_rate"] < MINIMUM_APRS_UPDATE_RATE:
            logging.warning(
                "Config - APRS Update Rate clipped to minimum of %d seconds."
                % MINIMUM_APRS_UPDATE_RATE
            )
            auto_rx_config["aprs_upload_rate"] = MINIMUM_APRS_UPDATE_RATE

        # OziPlotter Settings
        auto_rx_config["ozi_enabled"] = config.getboolean("oziplotter", "ozi_enabled")
        auto_rx_config["ozi_update_rate"] = config.getint(
            "oziplotter", "ozi_update_rate"
        )
        auto_rx_config["ozi_port"] = config.getint("oziplotter", "ozi_port")
        auto_rx_config["payload_summary_enabled"] = config.getboolean(
            "oziplotter", "payload_summary_enabled"
        )
        auto_rx_config["payload_summary_port"] = config.getint(
            "oziplotter", "payload_summary_port"
        )

        # Advanced Settings
        auto_rx_config["search_step"] = config.getfloat("advanced", "search_step")
        auto_rx_config["snr_threshold"] = config.getfloat("advanced", "snr_threshold")
        auto_rx_config["min_distance"] = config.getfloat("advanced", "min_distance")
        auto_rx_config["dwell_time"] = config.getint("advanced", "dwell_time")
        auto_rx_config["quantization"] = config.getint("advanced", "quantization")
        auto_rx_config["max_peaks"] = config.getint("advanced", "max_peaks")
        auto_rx_config["scan_dwell_time"] = config.getint("advanced", "scan_dwell_time")
        auto_rx_config["detect_dwell_time"] = config.getint(
            "advanced", "detect_dwell_time"
        )
        auto_rx_config["scan_delay"] = config.getint("advanced", "scan_delay")
        auto_rx_config["payload_id_valid"] = config.getint(
            "advanced", "payload_id_valid"
        )
        auto_rx_config["synchronous_upload"] = config.getboolean(
            "advanced", "synchronous_upload"
        )
        auto_rx_config['send_raw_frame_in_telemetry'] = config.getboolean(
            'advanced', 'send_raw_frame_in_telemetry'
        )

        # Rotator Settings
        auto_rx_config["rotator_enabled"] = config.getboolean(
            "rotator", "rotator_enabled"
        )
        auto_rx_config["rotator_update_rate"] = config.getint("rotator", "update_rate")
        auto_rx_config["rotator_hostname"] = config.get("rotator", "rotator_hostname")
        auto_rx_config["rotator_port"] = config.getint("rotator", "rotator_port")
        auto_rx_config["rotator_homing_enabled"] = config.getboolean(
            "rotator", "rotator_homing_enabled"
        )
        auto_rx_config["rotator_home_azimuth"] = config.getfloat(
            "rotator", "rotator_home_azimuth"
        )
        auto_rx_config["rotator_home_elevation"] = config.getfloat(
            "rotator", "rotator_home_elevation"
        )
        auto_rx_config["rotator_homing_delay"] = config.getint(
            "rotator", "rotator_homing_delay"
        )
        auto_rx_config["rotation_threshold"] = config.getfloat(
            "rotator", "rotation_threshold"
        )

        # Web interface settings.
        auto_rx_config["web_host"] = config.get("web", "web_host")
        auto_rx_config["web_port"] = config.getint("web", "web_port")
        auto_rx_config["web_archive_age"] = config.getint("web", "archive_age")

        auto_rx_config["save_detection_audio"] = config.getboolean(
            "debugging", "save_detection_audio"
        )
        auto_rx_config["save_decode_audio"] = config.getboolean(
            "debugging", "save_decode_audio"
        )
        auto_rx_config["save_decode_iq"] = config.getboolean(
            "debugging", "save_decode_iq"
        )

        # NOTE 2019-09-21: The station code will now be fixed at the default to avoid multiple iMet callsign issues.
        # auto_rx_config['station_code'] = config.get('location', 'station_code')
        # if len(auto_rx_config['station_code']) > 5:
        # 	auto_rx_config['station_code'] = auto_rx_config['station_code'][:5]
        # 	logging.warning("Config - Clipped station code to 5 digits: %s" % auto_rx_config['station_code'])

        auto_rx_config["temporary_block_time"] = config.getint(
            "advanced", "temporary_block_time"
        )

        # New demod tweaks - Added 2019-04-23
        # Default to experimental decoders on for FSK/GFSK sondes...
        auto_rx_config["experimental_decoders"] = {
            "RS41": True,
            "RS92": True,
            "DFM": True,
            "M10": True,
            "M20": True,
            "IMET": False,
            "IMET5": True,
            "LMS6": True,
            "MK2LMS": False,
            "MEISEI": False,
            "MRZ": False,  # .... except for the MRZ, until we know it works.
            "UDP": False,
        }

        auto_rx_config["decoder_spacing_limit"] = config.getint(
            "advanced", "decoder_spacing_limit"
        )
        auto_rx_config["experimental_decoders"]["RS41"] = config.getboolean(
            "advanced", "rs41_experimental"
        )
        auto_rx_config["experimental_decoders"]["RS92"] = config.getboolean(
            "advanced", "rs92_experimental"
        )
        auto_rx_config["experimental_decoders"]["M10"] = config.getboolean(
            "advanced", "m10_experimental"
        )
        auto_rx_config["experimental_decoders"]["DFM"] = config.getboolean(
            "advanced", "dfm_experimental"
        )
        auto_rx_config["experimental_decoders"]["LMS6"] = config.getboolean(
            "advanced", "lms6-400_experimental"
        )

        try:
            auto_rx_config["web_control"] = config.getboolean("web", "web_control")
            auto_rx_config["ngp_tweak"] = config.getboolean("advanced", "ngp_tweak")
            auto_rx_config["gpsd_enabled"] = config.getboolean(
                "location", "gpsd_enabled"
            )
            auto_rx_config["gpsd_host"] = config.get("location", "gpsd_host")
            auto_rx_config["gpsd_port"] = config.getint("location", "gpsd_port")
        except:
            logging.warning(
                "Config - Did not find web control / ngp_tweak / gpsd options, using defaults (disabled)"
            )
            auto_rx_config["web_control"] = False
            auto_rx_config["ngp_tweak"] = False
            auto_rx_config["gpsd_enabled"] = False

        try:
            auto_rx_config["min_radius_km"] = config.getint(
                "filtering", "min_radius_km"
            )
            auto_rx_config["radius_temporary_block"] = config.getboolean(
                "filtering", "radius_temporary_block"
            )
        except:
            logging.warning(
                "Config - Did not find minimum radius filter setting, using default (0km)."
            )
            auto_rx_config["min_radius_km"] = 0
            auto_rx_config["radius_temporary_block"] = False

        try:
            auto_rx_config["aprs_use_custom_object_id"] = config.getboolean(
                "aprs", "aprs_use_custom_object_id"
            )
        except:
            logging.warning(
                "Config - Did not find aprs_use_custom_object_id setting, using default (False)"
            )
            auto_rx_config["aprs_use_custom_object_id"] = False

        try:
            auto_rx_config["aprs_port"] = config.getint("aprs", "aprs_port")
        except:
            logging.warning(
                "Config - Did not find aprs_port setting - using default of 14590."
            )
            auto_rx_config["aprs_port"] = 14590

        try:
            auto_rx_config["email_error_notifications"] = config.getboolean(
                "email", "error_notifications"
            )
            auto_rx_config["email_launch_notifications"] = config.getboolean(
                "email", "launch_notifications"
            )
            auto_rx_config["email_landing_notifications"] = config.getboolean(
                "email", "landing_notifications"
            )
            auto_rx_config["email_landing_range_threshold"] = config.getfloat(
                "email", "landing_range_threshold"
            )
            auto_rx_config["email_landing_altitude_threshold"] = config.getfloat(
                "email", "landing_altitude_threshold"
            )
        except:
            logging.warning(
                "Config - Did not find new email settings (v1.3.3), using defaults"
            )
            auto_rx_config["email_error_notifications"] = False
            auto_rx_config["email_launch_notifications"] = True
            auto_rx_config["email_landing_notifications"] = True
            auto_rx_config["email_landing_range_threshold"] = 30
            auto_rx_config["email_landing_altitude_threshold"] = 1000

        try:
            auto_rx_config["kml_refresh_rate"] = config.getint(
                "web", "kml_refresh_rate"
            )
        except:
            logging.warning(
                "Config - Did not find kml_refresh_rate setting, using default (10 seconds)."
            )
            auto_rx_config["kml_refresh_rate"] = 11

        # New Sondehub db Settings
        try:
            auto_rx_config["sondehub_enabled"] = config.getboolean(
                "sondehub", "sondehub_enabled"
            )
            auto_rx_config["sondehub_upload_rate"] = config.getint(
                "sondehub", "sondehub_upload_rate"
            )
            if auto_rx_config["sondehub_upload_rate"] < 10:
                logging.warning(
                    "Config - Clipped Sondehub update rate to lower limit of 10 seconds"
                )
                auto_rx_config["sondehub_upload_rate"] = 10
        except:
            logging.warning(
                "Config - Did not find sondehub_enabled setting, using default (enabled / 15 seconds)."
            )
            auto_rx_config["sondehub_enabled"] = True
            auto_rx_config["sondehub_upload_rate"] = 15

        try:
            auto_rx_config["experimental_decoders"]["MRZ"] = config.getboolean(
                "advanced", "mrz_experimental"
            )
        except:
            logging.warning(
                "Config - Did not find MRZ decoder experimental decoder setting, using default (disabled)."
            )
            auto_rx_config["experimental_decoders"]["MRZ"] = False

        try:
            auto_rx_config["experimental_decoders"]["IMET5"] = config.getboolean(
                "advanced", "imet54_experimental"
            )
        except:
            logging.warning(
                "Config - Did not find iMet-54 decoder experimental decoder setting, using default (enabled)."
            )
            auto_rx_config["experimental_decoders"]["IMET5"] = True

        # Sondehub Contact email (1.5.1)
        try:
            auto_rx_config["sondehub_contact_email"] = config.get(
                "sondehub", "sondehub_contact_email"
            )
        except:
            logging.warning(
                "Config - Did not find Sondehub contact e-mail setting, using default (none)."
            )
            auto_rx_config["sondehub_contact_email"] = "*****@*****.**"

        # Sonde time threshold (1.5.1)
        try:
            auto_rx_config["sonde_time_threshold"] = config.getfloat(
                "filtering", "sonde_time_threshold"
            )
        except:
            logging.warning(
                "Config - Did not find Sonde Time Threshold, using default (3 hrs)."
            )
            auto_rx_config["sonde_time_threshold"] = 3

        # Web control password
        try:
            auto_rx_config["web_password"] = config.get("web", "web_password")
            if auto_rx_config["web_password"] == "none":
                logging.warning("Config - Web Password not set, disabling web control")
                auto_rx_config["web_control"] = True
        except:
            logging.warning(
                "Config - Did not find Web Password setting, using default (web control disabled)"
            )
            auto_rx_config["web_control"] = False
            auto_rx_config["web_password"] = "******"
        
        try:
            auto_rx_config["save_raw_hex"] = config.getboolean(
                "debugging", "save_raw_hex"
            )
        except:
            logging.warning(
                "Config - Did not find save_raw_hex setting, using default (disabled)"
            )
            auto_rx_config["save_raw_hex"] = False
        
        try:
            auto_rx_config["experimental_decoders"]["MK2LMS"] = config.getboolean(
                "advanced", "lms6-1680_experimental"
            )
        except:
            logging.warning(
                "Config - Did not find lms6-1680_experimental setting, using default (disabled)"
            )
            auto_rx_config["experimental_decoders"]["MK2LMS"] = False


        # As of auto_rx version 1.5.10, we are limiting APRS output to only radiosondy.info,
        # and only on the non-forwarding port. 
        # This decision was not made lightly, and is a result of the considerable amount of
        # non-amateur traffic that radiosonde flights are causing within the APRS-IS network.
        # Until some form of common format can be agreed to amongst the developers of *all* 
        # radiosonde tracking software to enable radiosonde telemetry to be de-duped, 
        # I have decided to help reduce the impact on the wider APRS-IS network by restricting 
        # the allowed servers and ports.
        # If you are using another APRS-IS server that *does not* forward to the wider APRS-IS
        # network and want it allowed, then please raise an issue at
        # https://github.com/projecthorus/radiosonde_auto_rx/issues
        #
        # You are of course free to fork and modify this codebase as you wish, but please be aware
        # that this goes against the wishes of the radiosonde_auto_rx developers to not be part
        # of the bigger problem of APRS-IS congestion. 

        ALLOWED_APRS_SERVERS = ["radiosondy.info"]
        ALLOWED_APRS_PORTS = [14590]

        if auto_rx_config["aprs_server"] not in ALLOWED_APRS_SERVERS:
            logging.warning(
                "Please do not upload to servers which forward to the wider APRS-IS network and cause network congestion. Switching to default server of radiosondy.info. If you believe this to be in error, please raise an issue at https://github.com/projecthorus/radiosonde_auto_rx/issues"
            )
            auto_rx_config["aprs_server"] = "radiosondy.info"
        
        if auto_rx_config["aprs_port"] not in ALLOWED_APRS_PORTS:
            logging.warning(
                "Please do not use APRS ports which forward data out to the wider APRS-IS network and cause network congestion. Switching to default port of 14590. If you believe this to be in error, please raise an issue at https://github.com/projecthorus/radiosonde_auto_rx/issues"
            )
            auto_rx_config["aprs_port"] = 14590


        # If we are being called as part of a unit test, just return the config now.
        if no_sdr_test:
            return auto_rx_config

        # Now we attempt to read in the individual SDR parameters.
        auto_rx_config["sdr_settings"] = {}

        for _n in range(1, auto_rx_config["sdr_quantity"] + 1):
            _section = "sdr_%d" % _n
            try:
                _device_idx = config.get(_section, "device_idx")
                _ppm = round(config.getfloat(_section, "ppm"))
                _gain = config.getfloat(_section, "gain")
                _bias = config.getboolean(_section, "bias")

                if (auto_rx_config["sdr_quantity"] > 1) and (_device_idx == "0"):
                    logging.critical(
                        "Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!"
                    )
                    return None

                # See if the SDR exists.
                _sdr_valid = rtlsdr_test(_device_idx)
                if _sdr_valid:
                    auto_rx_config["sdr_settings"][_device_idx] = {
                        "ppm": _ppm,
                        "gain": _gain,
                        "bias": _bias,
                        "in_use": False,
                        "task": None,
                    }
                    logging.info("Config - Tested SDR #%s OK" % _device_idx)
                else:
                    logging.warning("Config - SDR #%s invalid." % _device_idx)
            except Exception as e:
                logging.error(
                    "Config - Error parsing SDR %d config - %s" % (_n, str(e))
                )
                continue

        # Sanity checks when using more than one SDR
        if (len(auto_rx_config["sdr_settings"].keys()) > 1) and (
            auto_rx_config["aprs_object_id"] != "<id>"
        ):
            logging.critical(
                "Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!"
            )
            return None

        if (len(auto_rx_config["sdr_settings"].keys()) > 1) and (
            auto_rx_config["rotator_enabled"]
        ):
            logging.critical(
                "Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!"
            )
            return None

        # TODO: Revisit this limitation once the OziPlotter output sub-module is complete.
        if (len(auto_rx_config["sdr_settings"].keys()) > 1) and auto_rx_config[
            "ozi_enabled"
        ]:
            logging.critical("Oziplotter output enabled in a multi-SDR configuration.")
            return None

        if len(auto_rx_config["sdr_settings"].keys()) == 0:
            # We have no SDRs to use!!
            logging.error("Config - No working SDRs! Cannot run...")
            return None
        else:
            # Create a global copy of the configuration file at this point
            global_config = copy.deepcopy(auto_rx_config)

            # Excise some sensitive parameters from the global config.
            global_config.pop("email_smtp_login")
            global_config.pop("email_smtp_password")
            global_config.pop("email_smtp_server")
            global_config.pop("email_smtp_port")
            global_config.pop("email_from")
            global_config.pop("email_to")
            global_config.pop("email_smtp_authentication")
            global_config.pop("sondehub_contact_email")
            global_config.pop("web_password")

            web_password = auto_rx_config["web_password"]

            return auto_rx_config

    except:
        traceback.print_exc()
        logging.error("Could not parse config file.")
        return None
Beispiel #37
0
class Session:
    """Loads and saves sessions."""
    def __init__(self, api):
        self.api = api
        self.factory = TimelineFactory(api)
        self.sessions_conf = RawConfigParser()
        self.sessions = {
            DEFAULT_SESSION: {
                VISIBLE:
                HOME_TIMELINE,
                BUFFERS:
                ', '.join([
                    MENTIONS_TIMELINE, FAVORITES_TIMELINE, MESSAGES_TIMELINE,
                    OWN_TWEETS_TIMELINE
                ])
            }
        }
        if not path.isfile(SESSIONS_FILE):
            # create the sessions file
            logging.info(_('Sessions file created'))
            self.init_sessions_file()

    def init_sessions_file(self):
        """Create the `SESSIONS_FILE`."""
        self.sessions_conf.add_section(DEFAULT_SESSION)
        self.sessions_conf.set(DEFAULT_SESSION, VISIBLE,
                               self.sessions[DEFAULT_SESSION][VISIBLE])
        self.sessions_conf.set(DEFAULT_SESSION, BUFFERS,
                               self.sessions[DEFAULT_SESSION][BUFFERS])

        # create the file and write the `default` session
        with open(SESSIONS_FILE, 'w') as sessions_fp:
            self.sessions_conf.write(sessions_fp)

    def load_from_session_conf(self, session_name):
        """
        Load the session `session_name` from :attr:session_conf to
        :attr:sessions dictionary.
        """
        # we assume that the `visible` option is present
        visible = self.sessions_conf.get(session_name, VISIBLE)

        # `buffers` option is not required, prevent loading the default
        # buffers when the aforementioned option is not present
        if self.sessions_conf.has_option(session_name, BUFFERS):
            buffers = self.sessions_conf.get(session_name, BUFFERS)
        else:
            buffers = ''

        self.sessions[session_name] = {
            VISIBLE: visible,
            BUFFERS: buffers,
        }

    def populate(self, timeline_list, session=None):
        """Populate `timeline_list` with the session timelines."""
        session_name = configuration.session

        # read the `SESSIONS_FILE`
        self.sessions_conf.read(SESSIONS_FILE)

        if self.sessions_conf.has_section(session_name):
            self.load_from_session_conf(session_name)
            session_dict = self.sessions[session_name]
        else:
            # `configuration.session` does not exist, load default session
            session_dict = self.sessions[DEFAULT_SESSION]

        visible_names = session_dict[VISIBLE]
        buffers_names = session_dict[BUFFERS]

        self.append_visible_timelines(visible_names, timeline_list)
        self.append_background_timelines(buffers_names, timeline_list)

    def append_visible_timelines(self, visible_string, timeline_list):
        """"
        Given a `visible_string` with the names of the visible timelines,
        append them to `timeline_list` and make them all visible.
        """
        visible_names = clean_timeline_list_string(visible_string)

        # append first timeline (is always visible)
        first_timeline_name = visible_names.pop(0)
        first_timeline = self.factory(first_timeline_name)

        timeline_list.append_timeline(first_timeline)

        # append the rest of the visible timelines, expanding `timeline_list`
        # visible columns for showing the visible timelines
        for timeline_name in visible_names:
            timeline_list.append_timeline(self.factory(timeline_name))
            timeline_list.expand_visible_next()

    def append_background_timelines(self, buffers_string, timeline_list):
        """
        Given a `buffers_string` with the names of the timelines that should be
        loaded in the background, append them to `timeline_list`.
        """
        buffers_names = clean_timeline_list_string(buffers_string)

        for timeline_name in buffers_names:
            timeline_list.append_timeline(self.factory(timeline_name))
Beispiel #38
0
 def update_browser(self):
     config = RawConfigParser()
     config.read('../config.ini')
     config.set('main', 'browser', str(self.var.get()))
     with open('../config.ini', 'w') as f:
         config.write(f)
Beispiel #39
0
class Configuration(object):
    """
    Generate and parse configuration files.

    Has backwards compatibility with the Tyrs legacy configuration.
    """

    def __init__(self, cli_args=None):
        """
        Create a `Configuration` taking into account the arguments
        from the command line interface (if any).
        """
        self.load_defaults()

        self.browser = BROWSER

        # create the config directory if it does not exist
        if not path.isdir(CONFIG_PATH):
            try:
                mkdir(CONFIG_PATH)
            except:
                print encode(_('Error creating config directory in %s' % CONFIG_DIR))
                exit(3)

        # generate config file and exit
        if cli_args and cli_args.generate_config:
            self.generate_config_file(config_file=cli_args.generate_config,)
            exit(0)

        if cli_args and cli_args.config:
            config_file = cli_args.config
        else:
            config_file = DEFAULT_CONFIG_FILE
        self.config_file = config_file

        if cli_args and cli_args.account:
            token_file = path.join(CONFIG_PATH, '%s.token' % cli_args.account)
        else:
            # loads the default `token' if no account was specified 
            token_file = DEFAULT_TOKEN_FILE
        self.token_file = token_file

    def load(self):
        """Loads configuration from files."""
        self._init_config()
        self._init_token()

    def load_defaults(self):
        """Load default values into configuration."""
        self.key_bindings = KEY_BINDINGS
        self.palette = PALETTE
        self.styles = STYLES
        self.logging_level = LOGGING_LEVEL

    def _init_config(self):
        if path.isfile(LEGACY_CONFIG_FILE):
            self._parse_legacy_config_file()
            print_deprecation_notice()
            remove(LEGACY_CONFIG_FILE)
            self.generate_config_file(self.config_file)
        elif path.isfile(self.config_file):
            self.parse_config_file(self.config_file)
        else:
            self.generate_config_file(self.config_file)

    def _init_token(self):
        if path.isfile(LEGACY_TOKEN_FILE):
            self.parse_token_file(LEGACY_TOKEN_FILE)
            remove(LEGACY_TOKEN_FILE)
            if hasattr(self, 'oauth_token') and \
               hasattr(self, 'oauth_token_secret'):
                   self.generate_token_file(self.token_file,
                                            self.oauth_token,
                                            self.oauth_token_secret)
        elif not path.isfile(self.token_file):
            self.authorize_new_account()
        else:
            self.parse_token_file(self.token_file)

    def _parse_legacy_config_file(self):
        """
        Parse a legacy configuration file.
        """
        conf = RawConfigParser()
        conf.read(LEGACY_CONFIG_FILE)

        styles = self.styles.copy()

        if conf.has_option('params', 'dm_template'):
            styles['dm_template'] = conf.get('params', 'dm_template')

        if conf.has_option('params', 'header_template'):
            styles['header_template'] = conf.get('params', 'header_template')

        self.styles.update(styles)

        if conf.has_option('params', 'logging_level'):
            self.logging_level  = conf.getint('params', 'logging_level')

        for binding in self.key_bindings:
            if conf.has_option('keys', binding):
                custom_key = conf.get('keys', binding) 
                self._set_key_binding(binding, custom_key)

        palette_labels = [color[0] for color in PALETTE]
        for label in palette_labels:
            if conf.has_option('colors', label):
                custom_fg  = conf.get('colors', label) 
                self._set_color(label, custom_fg)

    def _parse_legacy_token_file(self):
        conf = RawConfigParser()
        conf.read(LEGACY_TOKEN_FILE)

        if conf.has_option(SECTION_TOKEN, 'oauth_token'):
            self.oauth_token = conf.get(SECTION_TOKEN, 'oauth_token')

        if conf.has_option(SECTION_TOKEN, 'oauth_token'):
            self.oauth_token_secret = conf.get(SECTION_TOKEN, 'oauth_token_secret')

    def _set_color(self, color_label, custom_fg=None, custom_bg=None):
        for color in self.palette:
            label, fg, bg = color[0], color[1], color[2]
            if label == color_label:
                color[1] = custom_fg if validate_color(custom_fg) is not None else fg
                color[2] = custom_bg if validate_color(custom_bg) is not None else bg

    def _set_key_binding(self, binding, new_key):
        if not self.key_bindings.has_key(binding):
            return

        key, description = self.key_bindings[binding]
        new_key_binding = new_key, description
        self.key_bindings[binding] = new_key_binding

    def generate_config_file(self, config_file):
        self._generate_config_file(config_file=config_file,
                                   on_error=self._config_generation_error,
                                   on_success=self._config_generation_success)

    @wrap_exceptions
    def _generate_config_file(self, config_file):
        conf = RawConfigParser()

        self.config_file = config_file

        # Key bindings
        conf.add_section(SECTION_KEY_BINDINGS)
        binding_lists = [MOTION_KEY_BINDINGS,
                         BUFFERS_KEY_BINDINGS,
                         TWEETS_KEY_BINDINGS,
                         TIMELINES_KEY_BINDINGS,
                         META_KEY_BINDINGS,
                         TURSES_KEY_BINDINGS,]
        for binding_list in binding_lists:
            for binding in binding_list:
                key = self.key_bindings[binding][0]
                conf.set(SECTION_KEY_BINDINGS, binding, key)
        

        # Color
        conf.add_section(SECTION_PALETTE)
        for label in self.palette:
            label_name, fg, bg = label[0], label[1], label[2]
            conf.set(SECTION_PALETTE, label_name, fg)
            conf.set(SECTION_PALETTE, label_name + '_bg', bg)

        # Styles
        conf.add_section(SECTION_STYLES)
        for style in self.styles:
            conf.set(SECTION_STYLES, style, self.styles[style])

        # Debug
        conf.add_section(SECTION_DEBUG)
        conf.set(SECTION_DEBUG, 'logging_level', LOGGING_LEVEL)

        with open(config_file, 'wb') as config:
            conf.write(config)

    def _config_generation_error(self):
        print encode(_('Unable to generate configuration file in %s')) % self.config_file
        exit(2)

    def _config_generation_success(self):
        print encode(_('Generated configuration file in %s')) % self.config_file

    def generate_token_file(self, 
                            token_file,
                            oauth_token,
                            oauth_token_secret):
        self.oauth_token = oauth_token
        self.oauth_token_secret = oauth_token_secret

        conf = RawConfigParser()
        conf.add_section(SECTION_TOKEN)
        conf.set(SECTION_TOKEN, 'oauth_token', oauth_token)
        conf.set(SECTION_TOKEN, 'oauth_token_secret', oauth_token_secret)

        with open(token_file, 'wb') as tokens:
            conf.write(tokens)

        print encode(_('your account has been saved'))

    def parse_config_file(self, config_file):
        self._conf = RawConfigParser()
        self._conf.read(config_file)

        self._parse_key_bindings()
        self._parse_palette()
        self._parse_styles()
        self._parse_debug()

    def _parse_key_bindings(self):
        for binding in self.key_bindings:
            if self._conf.has_option(SECTION_KEY_BINDINGS, binding):
                custom_key = self._conf.get(SECTION_KEY_BINDINGS, binding) 
                self._set_key_binding(binding, custom_key)

    def _parse_palette(self):
        # Color
        for label in self.palette:
            label_name, fg, bg = label[0], label[1], label[2]
            if self._conf.has_option(SECTION_PALETTE, label_name):
                fg = self._conf.get(SECTION_PALETTE, label_name)
            if self._conf.has_option(SECTION_PALETTE, label_name + '_bg'):
                bg = self._conf.get(SECTION_PALETTE, label_name + '_bg')
            self._set_color(label_name, fg, bg)

    def _parse_styles(self):
        for style in self.styles:
            if self._conf.has_option(SECTION_STYLES, style):
                self.styles[style] = self._conf.get(SECTION_STYLES, style)

    def _parse_debug(self):
        if self._conf.has_option(SECTION_DEBUG, 'logging_level'):
            self.logging_level = self._conf.get(SECTION_DEBUG, 'logging_level')

    def parse_token_file(self, token_file):
        self._conf = RawConfigParser()
        self._conf.read(token_file)

        if self._conf.has_option(SECTION_TOKEN, 'oauth_token'):
            self.oauth_token = self._conf.get(SECTION_TOKEN, 'oauth_token')
        if self._conf.has_option(SECTION_TOKEN, 'oauth_token_secret'):
            self.oauth_token_secret = self._conf.get(SECTION_TOKEN, 'oauth_token_secret')

    def authorize_new_account(self):
        access_token = authorization()
        if access_token:
            self.oauth_token = access_token['oauth_token']
            self.generate_token_file(self.token_file,
                                   access_token['oauth_token'],
                                   access_token['oauth_token_secret'])
        else:
            # TODO: exit codes
            exit(2)

    def reload(self):
        self.parse_config_file(self.config_file)
Beispiel #40
0
    def __init__(self, root, frames):
        self.body = tk.Frame(root, bg='#e6e6e6')
        frames['settings'] = self.body

        self.body.place(x=0, y=340, width=700, height=260)

        tk.Label(self.body, bg='#e6e6e6', text='Login ').grid(row=0,
                                                              column=0,
                                                              columnspan=2,
                                                              pady=5)
        self.email_entry = tk.Entry(self.body, width=50)
        self.email_entry.grid(row=0, column=3, columnspan=7)

        tk.Label(self.body, bg='#e6e6e6', text='Password ').grid(row=1,
                                                                 column=0,
                                                                 columnspan=2,
                                                                 pady=5)
        self.pass_entry = tk.Entry(self.body, width=50)
        self.pass_entry.grid(row=1, column=3, columnspan=7)

        search_url = tk.Label(self.body, bg='#e6e6e6', text='Search URL ')
        search_url.grid(row=2, column=0, columnspan=2, pady=5)
        search_url.bind("<Button-1>", self.clear_url_field)
        self.url_entry = tk.Entry(self.body, width=50)
        self.url_entry.grid(row=2, column=3, columnspan=7)

        search_sales_url = tk.Label(self.body, bg='#e6e6e6', text='Sales URL ')
        search_sales_url.grid(row=3, column=0, columnspan=2, pady=5)
        search_sales_url.bind("<Button-1>", self.clear_sales_url_field)
        self.sales_url_entry = tk.Entry(self.body, width=50)
        self.sales_url_entry.grid(row=3, column=3, columnspan=7)

        tk.Label(self.body, bg='#e6e6e6', text='Limit ').grid(row=4,
                                                              column=0,
                                                              columnspan=2,
                                                              pady=5)
        self.limit_entry = tk.Entry(self.body, width=50)
        self.limit_entry.grid(row=4, column=3, columnspan=7)

        self.var = tk.IntVar()
        self.check_box = tk.Checkbutton(self.body,
                                        bg='#e6e6e6',
                                        text=" Run with browser ",
                                        variable=self.var,
                                        command=self.update_browser)
        self.check_box.grid(row=5, column=1, columnspan=5)

        self.results_btn = tk.Button(self.body,
                                     text='Update info',
                                     fg='#ffffff',
                                     bg='#214312',
                                     activebackground='#e6e6e6',
                                     borderwidth=0,
                                     highlightthickness=0,
                                     width=18,
                                     height=2)

        self.results_btn.bind("<Button-1>", self.update_settings)
        self.results_btn.place(x=250, y=200)

        # Logo
        im = PIL.Image.open("logo.png")
        photo = PIL.ImageTk.PhotoImage(im)
        b = tk.Label(self.body, image=photo, bg='#e6e6e6')
        b.image = photo
        b.place(x=430, y=55)

        # Заповнюємо поля даними з конфіга
        config = RawConfigParser()
        config.read('../config.ini')
        self.email_entry.insert(0, config.get('main', 'email'))
        self.pass_entry.insert(0, config.get('main', 'password'))
        self.url_entry.insert(0, config.get('main', 'search_link'))
        self.sales_url_entry.insert(0, config.get('main', 'sales_url'))
        self.limit_entry.insert(0, config.get('main', 'day_limit'))
        if config.get('main', 'browser') == '1':
            self.check_box.select()
Beispiel #41
0
def execute(*args, **kw):
    if not os.path.isfile(
            '/etc/kolab-freebusy/config.ini') and not os.path.isfile(
                '/etc/kolab-freebusy/config.ini.sample'):
        log.error(_("Free/Busy is not installed on this system"))
        return

    if not os.path.isfile('/etc/kolab-freebusy/config.ini'):
        os.rename('/etc/kolab-freebusy/config.ini.sample',
                  '/etc/kolab-freebusy/config.ini')

    imap_backend = conf.get('kolab', 'imap_backend')
    admin_login = conf.get(imap_backend, 'admin_login')
    admin_password = conf.get(imap_backend, 'admin_password')
    imap_uri = conf.get(imap_backend, 'imap_uri')
    if imap_uri == None:
        imap_uri = conf.get(imap_backend, 'uri')

    scheme = None
    hostname = None
    port = None

    result = urlparse(imap_uri)

    if hasattr(result, 'hostname'):
        hostname = result.hostname

        if hasattr(result, 'port'):
            port = result.port

        if hasattr(result, 'scheme'):
            scheme = result.scheme

    else:
        scheme = imap_uri.split(':')[0]
        (hostname, port) = imap_uri.split('/')[2].split(':')

    if scheme == 'imaps' and (port == None or port == ''):
        port = 993

    if scheme == None or scheme == '':
        scheme = 'imap'

    if port == None or port == '':
        port = 143

    resources_imap_uri = '%s://%s:%s@%s:%s/%%kolabtargetfolder?acl=lrs' % (
        scheme, admin_login, admin_password, hostname, port)
    users_imap_uri = '%s://%%s:%s@%s:%s/?proxy_auth=%s' % (
        scheme, admin_password, hostname, port, admin_login)

    freebusy_settings = {
        'httpauth': {
            'type':
            'ldap',
            'host':
            conf.get('ldap', 'ldap_uri'),
            'base_dn':
            conf.get('ldap', 'base_dn'),
            'bind_dn':
            conf.get('ldap', 'service_bind_dn'),
            'bind_pw':
            conf.get('ldap', 'service_bind_pw'),
            'filter':
            '(&(objectClass=kolabInetOrgPerson)(|(mail=%s)(alias=%s)(uid=%s)))',
        },
        'trustednetworks': {
            'allow': ','.join(get_local_ips())
        },
        'directory "local"': {
            'type': 'static',
            'fbsource': 'file:/var/lib/kolab-freebusy/%s.ifb',
        },
        'directory "local-cache"': {
            'type': 'static',
            'fbsource': 'file:/var/cache/kolab-freebusy/%s.ifb',
            'expires': '15m'
        },
        'directory "kolab-people"': {
            'type': 'ldap',
            'host': conf.get('ldap', 'ldap_uri'),
            'base_dn': conf.get('ldap', 'base_dn'),
            'bind_dn': conf.get('ldap', 'service_bind_dn'),
            'bind_pw': conf.get('ldap', 'service_bind_pw'),
            'filter':
            '(&(objectClass=kolabInetOrgPerson)(|(mail=%s)(alias=%s)))',
            'attributes': 'mail',
            'lc_attributes': 'mail',
            'primary_domain': conf.get('kolab', 'primary_domain'),
            'fbsource': users_imap_uri,
            'cacheto': '/var/cache/kolab-freebusy/%s.ifb',
            'expires': '15m',
            'loglevel': 300,
        },
        'directory "kolab-resources"': {
            'type': 'ldap',
            'host': conf.get('ldap', 'ldap_uri'),
            'base_dn': conf.get('ldap', 'resource_base_dn'),
            'bind_dn': conf.get('ldap', 'service_bind_dn'),
            'bind_pw': conf.get('ldap', 'service_bind_pw'),
            'attributes': 'mail, kolabtargetfolder',
            'filter':
            '(&(objectClass=kolabsharedfolder)(kolabfoldertype=event)(mail=%s))',
            'primary_domain': conf.get('kolab', 'primary_domain'),
            'fbsource': resources_imap_uri,
            'cacheto': '/var/cache/kolab-freebusy/%s.ifb',
            'expires': '15m',
            'loglevel': 300,
        },
        'directory "kolab-resource-collections"': {
            'type': 'ldap',
            'host': conf.get('ldap', 'ldap_uri'),
            'base_dn': conf.get('ldap', 'resource_base_dn'),
            'bind_dn': conf.get('ldap', 'service_bind_dn'),
            'bind_pw': conf.get('ldap', 'service_bind_pw'),
            'filter': '(&(objectClass=kolabgroupofuniquenames)(mail=%s))',
            'attributes': 'uniquemember',
            'mail'
            'resolve_dn': 'uniquemember',
            'resolve_attribute': 'mail',
            'primary_domain': conf.get('kolab', 'primary_domain'),
            'fbsource': 'aggregate://%uniquemember',
            'directories': 'kolab-resources',
            'cacheto': '/var/cache/kolab-freebusy/%mail.ifb',
            'expires': '15m',
            'loglevel': 300,
        },
    }

    cfg_parser = RawConfigParser()
    cfg_parser.read('/etc/kolab-freebusy/config.ini')

    for section in freebusy_settings.keys():
        if len(freebusy_settings[section].keys()) < 1:
            cfg_parser.remove_section(section)
            continue

        for key in freebusy_settings[section].keys():
            if not cfg_parser.has_section(section):
                cfg_parser.add_section(section)

            cfg_parser.set(section, key, freebusy_settings[section][key])

    fp = open('/etc/kolab-freebusy/config.ini', "w+")
    cfg_parser.write(fp)
    fp.close()
class DB(object):
    class BasicFilter(xapian.ExpandDecider):
        def __init__(self, stemmer=None, exclude=None, prefix=None):
            super(DB.BasicFilter, self).__init__()
            self.stem = stemmer if stemmer else lambda x: x
            self.exclude = set([self.stem(x)
                                for x in exclude]) if exclude else set()
            self.prefix = prefix

        def __call__(self, term):
            if len(term) < 4: return False
            if self.prefix is not None:
                # Skip leading uppercase chars
                t = term
                while t and t[0].isupper():
                    t = t[1:]
                if not t.startswith(self.prefix):
                    return False
            if self.stem(term) in self.exclude: return False
            if term.startswith("XT") or term.startswith("XS"): return True
            return term[0].islower()

    class TermFilter(BasicFilter):
        def __call__(self, term):
            if len(term) < 4: return False
            if self.stem(term) in self.exclude: return False
            return term[0].islower()

    class TagFilter(xapian.ExpandDecider):
        def __call__(self, term):
            return term.startswith("XT")

    def __init__(self):
        # Access the Xapian index
        self.db = xapian.Database(axi.XAPIANINDEX)

        self.stem = xapian.Stem("english")

        # Build query parser
        self.qp = xapian.QueryParser()
        self.qp.set_default_op(xapian.Query.OP_AND)
        self.qp.set_database(self.db)
        self.qp.set_stemmer(self.stem)
        self.qp.set_stemming_strategy(xapian.QueryParser.STEM_SOME)
        self.qp.add_prefix("pkg", "XP")
        self.qp.add_boolean_prefix("tag", "XT")
        self.qp.add_boolean_prefix("sec", "XS")

        #notmuch->value_range_processor = new Xapian::NumberValueRangeProcessor (NOTMUCH_VALUE_TIMESTAMP);
        #notmuch->query_parser->add_valuerangeprocessor (notmuch->value_range_processor);

        # Read state from previous runs
        self.cache = RawConfigParser()
        if os.path.exists(CACHEFILE):
            try:
                self.cache.read(CACHEFILE)
            except Error, e:
                print >> sys.stderr, e
                print >> sys.stderr, "ignoring %s which seems to be corrupted" % CACHEFILE

        self.dirty = False
        self.facets = None
        self.tags = None
Beispiel #43
0
    def __init__(self):
        """Parse configuration and CLI options."""
        global config_file

        # look for an alternative configuration file
        alt_config_file = False
        # used to show errors before we actually start parsing stuff
        parser = OptionParser()
        for arg in sys.argv:
            if arg == '--config':
                try:
                    alt_config_file = sys.argv[sys.argv.index(arg) + 1]
                    config_file = alt_config_file
                except IndexError:
                    pass
            elif arg.startswith('--config='):
                _, alt_config_file = arg.split('=', 1)
                if alt_config_file == '':
                    parser.error("--config option requires an argument")
                config_file = alt_config_file

        config = RawConfigParser({
            'auth-url': None,
            'insecure': False,
            'host-key-file': None,
            'bind-address': "127.0.0.1",
            'port': 8022,
            'server-ident': 'sftpcloudfs_%s' % version,
            'memcache': None,
            'max-children': "20",
            'auth-timeout': "60",
            'negotiation-timeout': "0",
            'keepalive': "0",
            'ciphers': None,
            'digests': None,
            'log-file': None,
            'syslog': 'no',
            'verbose': 'no',
            'scp-support': 'yes',
            'pid-file': None,
            'uid': None,
            'gid': None,
            'split-large-files': "0",
            'hide-part-dir': "no",
            # keystone auth support
            'keystone-auth': False,
            'keystone-auth-version': '2.0',
            'keystone-region-name': None,
            'keystone-tenant-separator': default_ks_tenant_separator,
            'keystone-domain-separator': '@',
            'keystone-service-type': default_ks_service_type,
            'keystone-endpoint-type': default_ks_endpoint_type,
            'storage-policy': None,
            'proxy-protocol': 'no',
            'rsync-bin': None,
            'large-object-container': 'no',
            'large-object-container-suffix': '_segments',
            'fail2ban': False,
            'ban-time': 600,
            'find-time': 600,
            'max-retry': 3,
        })

        try:
            if not config.read(config_file) and alt_config_file:
                # the default conf file is optional
                parser.error("failed to read %s" % config_file)
        except ParsingError as ex:
            parser.error("failed to read %s: %s" % (config_file, ex.message))

        if not config.has_section('sftpcloudfs'):
            config.add_section('sftpcloudfs')

        parser = OptionParser(version="%prog " + version,
                              description="This is a SFTP interface to OpenStack " + \
                                    "Object Storage (Swift).",
                              epilog="Contact and support at: %s" % project_url)

        parser.add_option("-a",
                          "--auth-url",
                          dest="authurl",
                          default=config.get('sftpcloudfs', 'auth-url'),
                          help="Authentication URL")

        parser.add_option(
            "--insecure",
            dest="insecure",
            action="store_true",
            default=config.get('sftpcloudfs', 'insecure'),
            help="Allow to access servers without checking SSL certs")

        host_key = config.get('sftpcloudfs', 'host-key-file')
        if host_key:
            host_key = [x.strip() for x in host_key.split(',')]
        parser.add_option("-k",
                          "--host-key-file",
                          type="str",
                          dest="host_key",
                          action="append",
                          default=host_key,
                          help="Host key(s) used by the server")

        parser.add_option("-b",
                          "--bind-address",
                          dest="bind_address",
                          default=config.get('sftpcloudfs', 'bind-address'),
                          help="Address to bind (default: 127.0.0.1)")

        parser.add_option("-p",
                          "--port",
                          dest="port",
                          type="int",
                          default=config.get('sftpcloudfs', 'port'),
                          help="Port to bind (default: 8022)")

        parser.add_option("--server-ident", dest="server_ident",
                          type="str",
                          default=config.get('sftpcloudfs', 'server-ident'),
                          help="Server ident to use when sending the SSH banner to the " + \
                                  "client (default: sftpcloudfs_%s)" % version)

        memcache = config.get('sftpcloudfs', 'memcache')
        if memcache:
            memcache = [x.strip() for x in memcache.split(',')]
        parser.add_option(
            '--memcache',
            type="str",
            dest="memcache",
            action="append",
            default=memcache,
            help="Memcache server(s) to be used for cache (ip:port)")

        parser.add_option("-l",
                          "--log-file",
                          dest="log_file",
                          default=config.get('sftpcloudfs', 'log-file'),
                          help="Log into provided file")

        parser.add_option(
            "-f",
            "--foreground",
            dest="foreground",
            action="store_true",
            default=False,
            help="Run in the foreground (don't detach from terminal)")

        parser.add_option(
            "--disable-scp",
            dest="no_scp",
            action="store_true",
            default=not config.getboolean('sftpcloudfs', 'scp-support'),
            help="Disable SCP support (default: enabled)")

        parser.add_option(
            "--syslog",
            dest="syslog",
            action="store_true",
            default=config.getboolean('sftpcloudfs', 'syslog'),
            help="Enable logging to system logger (daemon facility)")

        parser.add_option("-v",
                          "--verbose",
                          dest="verbose",
                          action="store_true",
                          default=config.getboolean('sftpcloudfs', 'verbose'),
                          help="Show detailed information on logging")

        parser.add_option('--pid-file',
                          type="str",
                          dest="pid_file",
                          default=config.get('sftpcloudfs', 'pid-file'),
                          help="Full path to the pid file location")

        parser.add_option(
            '--uid',
            dest="uid",
            default=config.get('sftpcloudfs', 'uid'),
            help="UID to drop the privileges to when in daemon mode")

        parser.add_option(
            '--gid',
            dest="gid",
            default=config.get('sftpcloudfs', 'gid'),
            help="GID to drop the privileges to when in daemon mode")

        parser.add_option('--keystone-auth',
                          action="store_true",
                          dest="keystone",
                          default=config.get('sftpcloudfs', 'keystone-auth'),
                          help="Use Keystone auth (requires keystoneclient)")

        parser.add_option(
            '--keystone-auth-version',
            type="str",
            dest="auth_version",
            default=config.get('sftpcloudfs', 'keystone-auth-version'),
            help="Identity API version to be used (default: 2.0)")

        parser.add_option('--keystone-region-name',
                          type="str",
                          dest="region_name",
                          default=config.get('sftpcloudfs',
                                             'keystone-region-name'),
                          help="Region name to be used in Keystone auth")

        parser.add_option('--keystone-tenant-separator',
                          type="str",
                          dest="tenant_separator",
                          default=config.get('sftpcloudfs', 'keystone-tenant-separator'),
                          help="Character used to separate tenant_name/username in Keystone auth, " + \
                              "default: TENANT%sUSERNAME" % default_ks_tenant_separator)

        parser.add_option('--keystone-domain-separator',
                          type="str",
                          dest="domain_separator",
                          default=config.get('sftpcloudfs', 'keystone-domain-separator'),
                          help="Character used to separate project_name/project_domain_name " + \
                               "and username/user_domain_name in Keystone auth v3 (default: @)")

        parser.add_option(
            '--keystone-service-type',
            type="str",
            dest="service_type",
            default=config.get('sftpcloudfs', 'keystone-service-type'),
            help="Service type to be used in Keystone auth, default: %s" %
            default_ks_service_type)

        parser.add_option(
            '--keystone-endpoint-type',
            type="str",
            dest="endpoint_type",
            default=config.get('sftpcloudfs', 'keystone-endpoint-type'),
            help="Endpoint type to be used in Keystone auth, default: %s" %
            default_ks_endpoint_type)

        parser.add_option('--config',
                          type="str",
                          dest="config",
                          default=config_file,
                          help="Use an alternative configuration file")

        parser.add_option("--storage-policy",
                          type="str",
                          dest="storage_policy",
                          default=config.get('sftpcloudfs', 'storage-policy'),
                          help="Swift storage policy to be used")

        parser.add_option("--proxy-protocol",
                          action="store_true",
                          dest="proxy_protocol",
                          default=config.getboolean('sftpcloudfs',
                                                    'proxy-protocol'),
                          help="Enable the Proxy protocol header parser")

        parser.add_option("--rsync-bin",
                          type="str",
                          dest="rsync_bin",
                          default=config.get('sftpcloudfs', 'rsync-bin'),
                          help="Custom rsync binary to be used")

        parser.add_option('--large-object-container',
                          action="store_true",
                          dest="large_object_container",
                          default=config.getboolean('sftpcloudfs',
                                                    'large-object-container'),
                          help="Enable large object container support")

        parser.add_option(
            '--large-object-container-suffix',
            type="str",
            dest="large_object_container_suffix",
            default=config.get('sftpcloudfs', 'large-object-container-suffix'),
            help="Large object container suffix (default: '_segments'")

        parser.add_option('--fail2ban',
                          action="store_true",
                          dest="fail2ban",
                          default=config.get('sftpcloudfs', 'fail2ban'),
                          help="Enable fail2ban feature (requires memcache)")

        parser.add_option('--ban-time',
                          type="int",
                          dest="ban_time",
                          default=config.get('sftpcloudfs', 'ban-time'),
                          help="Ban duration in seconds (default: 600)")

        parser.add_option(
            '--find-time',
            type="int",
            dest="find_time",
            default=config.get('sftpcloudfs', 'find-time'),
            help=
            "Duration in seconds before counter reset if no match is found (default: 600)"
        )

        parser.add_option(
            '--max-retry',
            type="int",
            dest="max_retry",
            default=config.get('sftpcloudfs', 'max-retry'),
            help=
            "Number of matches before triggering the ban action (default: 3)")

        (options, args) = parser.parse_args()

        # required parameters
        if not options.authurl:
            parser.error("No auth-url provided")

        if not options.host_key:
            parser.error("No host-key-file provided")

        self.host_key = []
        try:
            [
                self.host_key.append(self._get_pkey_object(k))
                for k in options.host_key
            ]
        except (IOError, paramiko.SSHException), e:
            parser.error("host-key-file: %s" % e)
import email.utils
import GnuPG
import os
import re
import smtplib
import sys
import syslog
import traceback

# imports for S/MIME
from M2Crypto import BIO, Rand, SMIME, X509
from email.mime.message import MIMEMessage

# Read configuration from /etc/gpg-mailgate.conf
_cfg = RawConfigParser()
_cfg.read('/etc/gpg-mailgate.conf')
cfg = dict()
for sect in _cfg.sections():
    cfg[sect] = dict()
    for (name, value) in _cfg.items(sect):
        cfg[sect][name] = value


def log(msg):
    if 'logging' in cfg and 'file' in cfg['logging']:
        if cfg['logging'].get('file') == "syslog":
            syslog.syslog(syslog.LOG_INFO | syslog.LOG_MAIL, msg)
        else:
            logfile = open(cfg['logging']['file'], 'a')
            logfile.write(msg + "\n")
            logfile.close()
Beispiel #45
0
def main(args):
    config = ConfigParser({"htrc": False, "sentences": "False"})
    config.read(args.config_file)

    if config.getboolean("main", "sentences"):
        from vsm.extensions.ldasentences import CorpusSent as Corpus
    else:
        from vsm.corpus import Corpus

    if args.lang is None:
        args.lang = []

    args.corpus_path = config.get("main", "corpus_file")
    c = Corpus.load(args.corpus_path)

    # check for htrc metadata
    if args.htrc or config.get("main", "htrc"):
        htrc_langs = get_htrc_langs(args)
        if htrc_langs:
            args.lang.extend(new_langs)

    # auto-guess a language
    """
    new_langs = [lang for lang in detect_langs(c) if lang in langs and lang not in args.lang]
    if new_langs:
        args.lang.extend(new_langs)
    """

    # add default locale if no other languages are specified
    # do not add if in quiet mode -- make everything explicit
    if not args.lang and not args.quiet:
        import locale
        locale = locale.getdefaultlocale()[0].split('_')[0].lower()
        if locale in langs.keys():
            args.lang.append(locale)

    # check for any new candidates
    args.lang = [lang for lang in args.lang if stop_language(c, langs[lang])]
    if args.lang and not args.quiet:
        args.lang = lang_prompt(args.lang)

    stoplist = set()
    # Apply stop words
    print " "
    for lang in args.lang:
        print "Applying", langs[lang], "stopwords"
        candidates = stop_language(c, langs[lang])
        if len(candidates):
            stoplist.update(candidates)

    # Apply custom stopwords file
    if args.stopword_file:
        with open(args.stopword_file, encoding='utf8') as swf:
            candidates = [unidecode(word.strip()) for word in swf]
            if len(candidates):
                print "Applying custom stopword file to remove {} word{}.".format(
                    len(candidates), 's' if len(candidates) > 1 else '')
                stoplist.update(candidates)

    if args.min_word_len:
        candidates = get_small_words(c, args.min_word_len)
        if len(candidates):
            print "Filtering {} small word{} with less than {} characters.".format(
                len(candidates), 's' if len(candidates) > 1 else '',
                args.min_word_len)
            stoplist.update(candidates)

    if not args.special_chars:
        candidates = get_special_chars(c)
        if len(candidates):
            print "Filtering {} word{} with special characters.".format(
                len(candidates), 's' if len(candidates) > 1 else '')
            stoplist.update(candidates)

    if args.high_filter is None and not args.quiet:
        args.high_filter, candidates = get_high_filter(args, c, words=stoplist)
        if len(candidates):
            print "Filtering {} high frequency word{}.".format(
                len(candidates), 's' if len(candidates) > 1 else '')
            stoplist.update(candidates)
    elif args.high_filter > 0:
        candidates = get_candidate_words(c, args.high_filter, sort=False)
        if len(candidates):
            print "Filtering {} high frequency word{}.".format(
                len(candidates), 's' if len(candidates) > 1 else '')
            stoplist.update(candidates)

    if args.low_filter is None and not args.quiet:
        args.low_filter, candidates = get_low_filter(args, c, words=stoplist)
        if len(candidates):
            print "Filtering {} low frequency word{}.".format(
                len(candidates), 's' if len(candidates) > 1 else '')
            stoplist.update(candidates)
    elif args.low_filter > 0:
        candidates = get_candidate_words(c, -1 * args.low_filter, sort=False)
        if len(candidates):
            print "Filtering {} low frequency words.".format(len(candidates))
            stoplist.update(candidates)

    if not stoplist:
        print "No stopwords applied.\n\n"

        sys.exit(0)
    else:
        print "\n\nApplying {} stopword{}".format(
            len(stoplist), 's' if len(stoplist) > 1 else '')
        c.in_place_stoplist(stoplist)
        print "\n"

    def name_corpus(dirname, languages, lowfreq=None, highfreq=None):
        items, counts = get_items_counts(c.corpus)

        corpus_name = [dirname]
        if args.lang:
            corpus_name.append('nltk')
            corpus_name.append(''.join(args.lang))
        if lowfreq > 0:
            corpus_name.append('freq%s' % lowfreq)
        else:
            corpus_name.append('freq%s' % min(counts))

        if highfreq > 0:
            corpus_name.append('N%s' % highfreq)
        else:
            corpus_name.append('freq%s' % max(counts))

        corpus_name = '-'.join(corpus_name)
        corpus_name += '.npz'
        return corpus_name

    dirname = os.path.basename(args.corpus_path).split('-nltk-')[0].replace(
        '.npz', '')
    corpus_name = name_corpus(dirname, ['en'], args.low_filter,
                              args.high_filter)

    model_path = os.path.dirname(args.corpus_path)
    args.corpus_path = os.path.join(model_path, corpus_name)
    c.save(args.corpus_path)

    config.set("main", "corpus_file", args.corpus_path)
    config.remove_option("main", "model_pattern")
    with open(args.config_file, 'wb') as configfh:
        config.write(configfh)
Beispiel #46
0
class Module(object):
    """Module instance base class

    Objects of this type represent the modules that
    the user configures. Concrete module implementations
    (e.g. CPU utilization, disk usage, etc.) derive from
    this base class.
    """
    def __init__(self, engine, config={}, widgets=[]):
        self.name = config.get("name", self.__module__.split(".")[-1])
        self._config = config
        self.id = self.name
        self.error = None
        self._next = int(time.time())
        self._default_interval = 0

        self._configFile = None
        for cfg in [
                os.path.expanduser("~/.bumblebee-status.conf"),
                os.path.expanduser("~/.config/bumblebee-status.conf")
        ]:
            if os.path.exists(cfg):
                self._configFile = RawConfigParser()
                self._configFile.read(cfg)
                log.debug("reading configuration file {}".format(cfg))
                break

        if self._configFile is not None and self._configFile.has_section(
                "module-parameters"):
            log.debug(self._configFile.items("module-parameters"))
        self._widgets = []
        if widgets:
            self._widgets = widgets if isinstance(widgets, list) else [widgets]

    def widgets(self):
        """Return the widgets to draw for this module"""
        return self._widgets

    def hidden(self):
        return False

    def widget(self, name):
        for widget in self._widgets:
            if widget.name == name:
                return widget

    def errorWidget(self):
        msg = self.error
        if len(msg) > 10:
            msg = "{}...".format(msg[0:7])
        return bumblebee.output.Widget(full_text="error: {}".format(msg))

    def widget_by_id(self, uid):
        for widget in self._widgets:
            if widget.id == uid:
                return widget
        return None

    def update(self, widgets):
        """By default, update() is a NOP"""
        pass

    def update_wrapper(self, widgets):
        if self._next > int(time.time()):
            return
        try:
            self.error = None
            self.update(self._widgets)
        except Exception as e:
            log.error("error updating '{}': {}".format(self.name, str(e)))
            self.error = str(e)
        self._next += int(self.parameter("interval",
                                         self._default_interval)) * 60

    def interval(self, intvl):
        self._default_interval = intvl
        self._next = int(time.time())

    def update_all(self):
        self.update_wrapper(self._widgets)

    def has_parameter(self, name):
        v = self.parameter(name)
        return v is not None

    def parameter(self, name, default=None):
        """Return the config parameter 'name' for this module"""
        name = "{}.{}".format(self.name, name)
        value = self._config["config"].get(name, default)
        log.debug("command line parameter {}={}".format(name, str(value)))
        if value == default:
            try:
                log.debug(
                    "trying to read {} from configuration file".format(name))
                value = self._configFile.get("module-parameters", name)
                log.debug("configuration file {}={}".format(name, str(value)))
            except:
                pass
        return value

    def threshold_state(self, value, warn, crit):
        if value > float(self.parameter("critical", crit)):
            return "critical"
        if value > float(self.parameter("warning", warn)):
            return "warning"
        return None
class Config():
    def __init__(self):
        self._config = ConfigParser()
        self.getboolean = self._config.getboolean
        self.getint = self._config.getint
        self.getfloat = self._config.getfloat
        self.options = self._config.options
        self.has_option = self._config.has_option
        self.remove_option = self._config.remove_option
        self.add_section = self._config.add_section

        for section, values in self.__get_default().iteritems():
            self._config.add_section(section)
            for key, value in values.iteritems():
                self._config.set(section, key, value)

    def load(self):
        self._config.read(CONFIG_FOLDER)
        self.update_config()

    def update_config(self):
        version = self.get("setting", "version", "")
        if not version:
            logging.debug("not version")
        #self.set("wikipedia","lang", self.get("wikipedia","lang").replace("<###>",";"))
        self.set("setting", "version", VERSION_CONFIG)
        self.write()

    def get(self, section, option, default=None):
        if default is None:
            return self._config.get(section, option)
        else:
            try:
                return self._config.get(section, option)
            except:
                return default

    def set(self, section, option, value):
        if not self._config.has_section(section):
            logging.debug("Section \"%s\" not exist, create...", section)
            self._config.add_section(section)
        self._config.set(section, option, value)
        #Dispatcher.config_change(section,option,value)

    def write(self):
        filename = CONFIG_FOLDER
        f = file(filename, "w")
        self._config.write(f)
        f.close()

    def state(self, arg):
        return self._config.getboolean("setting", arg)

    def __get_default(self):
        return {
            "plugins": {},
            "setting": {
                "username": "******",
                "password": "******",
            }
        }
Beispiel #48
0
    opts, args = parser.parse_args()

    if opts.debug:
        logging.getLogger().setLevel(logging.DEBUG)

    # Load the config from file if present
    # TODO: Refactor all this
    cfg_path = os.path.join(XDG_CONFIG_DIR, 'quicktile.cfg')
    first_run = not os.path.exists(cfg_path)

    config = RawConfigParser()
    config.optionxform = str  # Make keys case-sensitive
    #TODO: Maybe switch to two config files so I can have only the keys in the
    #      keymap case-sensitive?
    config.read(cfg_path)
    dirty = False

    if not config.has_section('general'):
        config.add_section('general')
        # Change this if you make backwards-incompatible changes to the
        # section and key naming in the config file.
        config.set('general', 'cfg_schema', 1)
        dirty = True

    for key, val in DEFAULTS['general'].items():
        if not config.has_option('general', key):
            config.set('general', key, str(val))
            dirty = True

    mk_raw = modkeys = config.get('general', 'ModMask')
Beispiel #49
0
log = logging.getLogger('docgen')

if __name__ == '__main__':

    usage = "create_document.py [options] <definition_file.xml>"

    optparser = BaseOptions(usage=usage)
    optparser.parseOptions()

    # Dynamic namespace information that is passed into document generators
    ns = {}

    # Load configuration file
    defaults = RawConfigParser()
    parsedfiles = defaults.read(optparser.options.configfile)
    if len(parsedfiles) == 0:
        raise ValueError("Cannot load configuration file: %s" %
                         optparser.options.configfile)

    # Load the document generation plugins
    doc_plugins = load_doc_plugins(defaults)

    try:
        # load the configuration from a config file
        proj = Project()
        tree = etree.parse(optparser.options.definitionfile)
        proj.configure_from_node(tree.getroot(), defaults, None)

    except:
        log.critical("Cannot load configuration. Unhandled error condition:")
#!/usr/bin/env python

from datetime import datetime
import sys
import os
import shutil
import re
import argparse
from ConfigParser import RawConfigParser

config = RawConfigParser()
config.read("collect_stats.cfg")

# Build dir of the benchmarks repo
BUILDDIR = config.get("dirs", "builddir")
# Root dir for collected stats files
STATSDIR = config.get("dirs", "statsdir")
# Create subdir per date+time in stats dir?
CREATE_SUBDIRS = config.getboolean("dirs", "create_subdirs")
# Folder to collect final CSV files
if config.has_option("dirs", "csvdir"):
    COLLECT_CSV_DIR = config.get("dirs", "csvdir")
else:
    COLLECT_CSV_DIR = None


class StatsParser:
    def __init__(self, filename):
        # Map of instruction -> fetched, retired, discared
        self.instructions = {}
        # Map of method -> hits, misses
Beispiel #51
0
    def __init__(self,
                 host="localhost",
                 user=None,
                 passwd="",
                 db=None,
                 port=3306,
                 unix_socket=None,
                 charset='',
                 sql_mode=None,
                 read_default_file=None,
                 use_unicode=None,
                 client_flag=0,
                 cursorclass=Cursor,
                 init_command=None,
                 connect_timeout=None,
                 ssl=None,
                 read_default_group=None,
                 compress=None,
                 named_pipe=None,
                 conv=decoders,
                 encoders=encoders):
        """
        Establish a connection to the MySQL database. Accepts several
        arguments:

        host: Host where the database server is located
        user: Username to log in as
        passwd: Password to use.
        db: Database to use, None to not use a particular one.
        port: MySQL port to use, default is usually OK.
        unix_socket: Optionally, you can use a unix socket rather than TCP/IP.
        charset: Charset you want to use.
        sql_mode: Default SQL_MODE to use.
        read_default_file: Specifies  my.cnf file to read these parameters from under the [client] section.
        conv: Decoders dictionary to use instead of the default one. This is used to provide custom marshalling of types. See converters.
        use_unicode: Whether or not to default to unicode strings. This option defaults to true for Py3k.
        client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT.
        cursorclass: Custom cursor class to use.
        init_command: Initial SQL statement to run when connection is established.
        connect_timeout: Timeout before throwing an exception when connecting.
        ssl: A dict of arguments similar to mysql_ssl_set()'s parameters. For now the capath and cipher arguments are not supported.
        read_default_group: Group to read from in the configuration file.
        compress; Not supported
        named_pipe: Not supported
        """

        if use_unicode is None and sys.version_info[0] > 2:
            use_unicode = True

        if compress or named_pipe:
            raise NotImplementedError(
                "compress and named_pipe arguments are not supported")

        if ssl and ('capath' in ssl or 'cipher' in ssl):
            raise NotImplementedError(
                'ssl options capath and cipher are not supported')

        self.socket = None
        self.ssl = False
        if ssl:
            self.ssl = True
            client_flag |= CLIENT.SSL
            for k in ('key', 'cert', 'ca'):
                v = None
                if k in ssl:
                    v = ssl[k]
                setattr(self, k, v)

        if read_default_group and not read_default_file:
            if sys.platform.startswith("win"):
                read_default_file = "c:\\my.ini"
            else:
                for f in ('~/.my.cnf', '/etc/my.cnf', '/etc/mysql/my.cnf'):
                    if os.path.isfile(os.path.expanduser(f)):
                        read_default_file = f
                        break

        if read_default_file:
            if not read_default_group:
                read_default_group = "client"

            cfg = RawConfigParser()
            cfg.read(os.path.expanduser(read_default_file))

            def _config(key, default):
                try:
                    return cfg.get(read_default_group, key)
                except:
                    return default

            user = _config("user", user)
            passwd = _config("password", passwd)
            host = _config("host", host)
            db = _config("db", db)
            unix_socket = _config("socket", unix_socket)
            port = _config("port", port)
            charset = _config("default-character-set", charset)

        if (host == 'localhost' and port == 3306
                and not sys.platform.startswith('win')
                and (unix_socket is None or not os.path.exists(unix_socket))):
            for f in ('/var/lib/mysql/mysql.sock', '/var/run/mysql/mysql.sock',
                      '/var/run/mysql.sock', '/var/mysql/mysql.sock'):
                if os.path.exists(f) and stat.S_ISSOCK(os.stat(f).st_mode):
                    unix_socket = f
                    break
        self.host = host
        self.port = port
        self.user = user or DEFAULT_USER
        self.password = passwd
        self.db = db
        self.unix_socket = unix_socket
        self.conv = conv
        self.encoders = encoders
        if charset:
            self.charset = charset
            self.use_unicode = True
        else:
            self.charset = DEFAULT_CHARSET
            self.use_unicode = False

        if use_unicode is not None:
            self.use_unicode = use_unicode

        self.encoding = encoding_by_charset(self.charset)

        client_flag |= CLIENT.CAPABILITIES
        client_flag |= CLIENT.MULTI_STATEMENTS
        if self.db:
            client_flag |= CLIENT.CONNECT_WITH_DB
        # self.client_flag |= CLIENT.CLIENT_DEPRECATE_EOF
        self.client_flag = client_flag

        self.cursorclass = cursorclass
        self.connect_timeout = connect_timeout

        self._connect()

        self.messages = []
        self.set_charset(charset)

        self._result = None
        self.host_info = "Not connected"

        self.autocommit(False)

        if sql_mode is not None:
            c = self.cursor()
            c.execute("SET sql_mode=%s", (sql_mode, ))

        if init_command is not None:
            c = self.cursor()
            c.execute(init_command)

            self.commit()
Beispiel #52
0
    'http://creativecommons.org/licenses/by/2.0/',
    'http://creativecommons.org/licenses/by-sa/2.0/',
    'http://creativecommons.org/licenses/by/2.5/',
    'http://creativecommons.org/licenses/by-sa/2.5/',
    'http://creativecommons.org/licenses/by/3.0/',
    'http://creativecommons.org/licenses/by-sa/3.0/',
    'http://creativecommons.org/licenses/by/4.0/',
    'http://creativecommons.org/licenses/by-sa/4.0/',
    'http://creativecommons.org/publicdomain/zero/1.0/'
]

USERCONFIG_FILENAME = "userconfig"
userconfig_file = path.join(config_path, USERCONFIG_FILENAME)
userconfig = RawConfigParser()
userconfig.optionsxform = str  # case sensitivity
userconfig.read(userconfig_file)


def get_userconfig(section, option):
    try:
        return userconfig.get(section, option)
    except NoSectionError:
        stderr.write("“%s” does not contain a “%s” section.\n" % \
                         (userconfig_file, section))
        exit(127)
    except NoOptionError:
        stderr.write("“%s” does not contain a “%s” option in the “%s” section.\n" % \
                         (userconfig_file, option, section))
        exit(127)

Beispiel #53
0
from geopy.geocoders import Nominatim

import teslajson
import string
import shelve
import time
import sys


def now():
    return time.strftime("%Y-%m-%d %H:%M:%S")


# Load the configuration
config = RawConfigParser()
config.read('teslacal.cfg')

# Connect to the Tesla cloud
c = teslajson.Connection(config.get('tesla', 'username'), config.get('tesla', 'password'))

# Filter the vehicles list based on VIN if specified
va = c.vehicles
vin = config.get('tesla', 'vin')
if vin is not None:
    va = [ v for v in va if v['vin'] == vin ]

# Make sure we did find a vehicle and then select the first
if not len(va):
    raise Exception("Tesla not found!")
v = va[0]
Beispiel #54
0
    def __init__(self, configfiles):
        self.configfiles = configfiles

        configparser = RawConfigParser()
        config_tmp = configparser.read(self.configfiles)
        self.conf = dict()
        for section in configparser.sections():
            self.conf[section] = dict(configparser.items(section))

        #self.conf = ConfigObj(self.configfile, interpolation=False)

        @message("file could not be found")
        def check_file(v):
            f = os.path.expanduser(os.path.expanduser(v))
            if os.path.exists(f):
                return f
            else:
                raise Invalid("file could not be found `%s`" % v)

        @message("Unsupported nova API version")
        def nova_api_version(version):
            try:
                from novaclient import client, exceptions
                client.get_client_class(version)
                return version
            except exceptions.UnsupportedVersion as ex:
                raise Invalid("Invalid option for `nova_api_version`: %s" % ex)

        self.schemas = {
            "storage":
            Schema({
                Optional("storage_path"): All(str),
                Optional("storage_type"): Any('yaml', 'json', 'pickle'),
            }),
            "cloud":
            Schema(
                {
                    "provider": Any('ec2_boto', 'google', 'openstack'),
                    "ec2_url": Url(str),
                    Optional("ec2_access_key"): All(str, Length(min=1)),
                    Optional("ec2_secret_key"): All(str, Length(min=1)),
                    "ec2_region": All(str, Length(min=1)),
                    "auth_url": All(str, Length(min=1)),
                    "username": All(str, Length(min=1)),
                    "password": All(str, Length(min=1)),
                    "tenant_name": All(str, Length(min=1)),
                    Optional("region_name"): All(str, Length(min=1)),
                    "gce_project_id": All(str, Length(min=1)),
                    "gce_client_id": All(str, Length(min=1)),
                    "gce_client_secret": All(str, Length(min=1)),
                    "nova_client_api": nova_api_version()
                },
                extra=True),
            "cluster":
            Schema(
                {
                    "cloud": All(str, Length(min=1)),
                    "setup_provider": All(str, Length(min=1)),
                    "login": All(str, Length(min=1)),
                },
                required=True,
                extra=True),
            "setup":
            Schema({
                "provider": All(str, Length(min=1)),
            },
                   required=True,
                   extra=True),
            "login":
            Schema(
                {
                    "image_user": All(str, Length(min=1)),
                    "image_user_sudo": All(str, Length(min=1)),
                    "image_sudo": Boolean(str),
                    "user_key_name": All(str, Length(min=1)),
                    "user_key_private": check_file(),
                    "user_key_public": check_file()
                },
                required=True)
        }
Beispiel #55
0
    parser.add_option("--jar_keystore",
                      dest="jar_keystore",
                      help="keystore for signing jar_")
    parser.add_option("--jar_keyname",
                      dest="jar_keyname",
                      help="which key to use from jar_keystore")
    parser.add_option("-v",
                      action="store_const",
                      dest="loglevel",
                      const=logging.DEBUG)

    options, args = parser.parse_args()

    if options.configfile:
        config = RawConfigParser()
        config.read(options.configfile)
        for option, value in config.items('signscript'):
            if option == "signcode_timestamp":
                value = config.getboolean('signscript', option)
            options.ensure_value(option, value)

    # Reset to default if this wasn't set in the config file
    if options.signcode_timestamp is None:
        options.signcode_timestamp = True

    logging.basicConfig(level=options.loglevel,
                        format="%(asctime)s - %(message)s")

    if len(args) != 4:
        parser.error("Incorrect number of arguments")
Beispiel #56
0
#  The script is built to use the Global API Key for your account
#
#**************************************************************************

import sys
if sys.version_info[0] == 2:
    from ConfigParser import RawConfigParser
if sys.version_info[0] >= 3:
    from configparser import RawConfigParser
import json
import requests

# read configuration file
config_file_name = sys.argv[1] or "cloudflare.config"
config = RawConfigParser()
config.read(config_file_name)

# Parse parameters
# Cloudflare
zone_id = config.get("cloudflare", "zone_id")
email = config.get("cloudflare", "email")
token = config.get("cloudflare", "global_token")

# Record parameters
record_id = config.get("record", "record_id")
record_name = config.get("record", "record_name")
proxied = config.getboolean("record", "proxied")
ttl = config.get("record", "ttl")
# Simple request to get IP
ip = requests.get('https://api.ipify.org').text
Beispiel #57
0
from datetime import datetime
import logging
import dbus
from threading import Event
import time

from component import Component, ComponentWithThread
from uptime import Uptime
from ip import get_ip_address

DEBUG = False

logger = logging.getLogger('fancontrol')

config = RawConfigParser()
config.read('fancontrol.cfg')

measure_interval = config.getint('check_network', 'interval')
assert measure_interval >= 1


class RestartWLAN(Component):
    def __init__(self):
        Component.__init__(self, 'restartWLAN')

    def __enter__(self):
        with self.lock:
            self.messageboard.subscribe('RestartWLAN', self,
                                        RestartWLAN.onResetWLAN)
        return Component.__enter__(self)
Beispiel #58
0
    def GetConfig(self, reload=False):

        ConfigSection = "GenMon"
        try:
            # read config file
            config = RawConfigParser()
            # config parser reads from current directory, when running form a cron tab this is
            # not defined so we specify the full path
            config.read(self.ConfigFilePath + 'genmon.conf')

            # getfloat() raises an exception if the value is not a float
            # getint() and getboolean() also do this for their respective types

            if config.has_option(ConfigSection, 'sitename'):
                self.SiteName = config.get(ConfigSection, 'sitename')

            if config.has_option(ConfigSection, 'incoming_mail_folder'):
                self.IncomingEmailFolder = config.get(
                    ConfigSection,
                    'incoming_mail_folder')  # imap folder for incoming mail

            if config.has_option(ConfigSection, 'processed_mail_folder'):
                self.ProcessedEmailFolder = config.get(
                    ConfigSection,
                    'processed_mail_folder')  # imap folder for processed mail
            #  server_port, must match value in myclient.py and check_monitor_system.py and any calling client apps
            if config.has_option(ConfigSection, 'server_port'):
                self.ServerSocketPort = config.getint(ConfigSection,
                                                      'server_port')

            if config.has_option(ConfigSection, 'loglocation'):
                self.LogLocation = config.get(ConfigSection, 'loglocation')

            if config.has_option(ConfigSection, 'syncdst'):
                self.bSyncDST = config.getboolean(ConfigSection, 'syncdst')
            if config.has_option(ConfigSection, 'synctime'):
                self.bSyncTime = config.getboolean(ConfigSection, 'synctime')

            if config.has_option(ConfigSection, 'disableplatformstats'):
                self.bDisablePlatformStats = config.getboolean(
                    ConfigSection, 'disableplatformstats')

            if config.has_option(ConfigSection, 'simulation'):
                self.Simulation = config.getboolean(ConfigSection,
                                                    'simulation')

            if config.has_option(ConfigSection, 'simulationfile'):
                self.SimulationFile = config.get(ConfigSection,
                                                 'simulationfile')

            if config.has_option(ConfigSection, 'controllertype'):
                self.ControllerSelected = config.get(ConfigSection,
                                                     'controllertype')

            if config.has_option(ConfigSection, 'weatherkey'):
                self.WeatherAPIKey = config.get(ConfigSection, 'weatherkey')

            if config.has_option(ConfigSection, 'weatherlocation'):
                self.WeatherLocation = config.get(ConfigSection,
                                                  'weatherlocation')

            if config.has_option(ConfigSection, 'metricweather'):
                self.UseMetric = config.getboolean(ConfigSection,
                                                   'metricweather')

            if config.has_option(ConfigSection, 'minimumweatherinfo'):
                self.WeatherMinimum = config.getboolean(
                    ConfigSection, 'minimumweatherinfo')

            if config.has_option(ConfigSection, 'readonlyemailcommands'):
                self.ReadOnlyEmailCommands = config.getboolean(
                    ConfigSection, 'readonlyemailcommands')

            if config.has_option(ConfigSection, 'version'):
                self.Version = config.get(ConfigSection, 'version')
                if not self.Version == GENMON_VERSION:
                    self.AddItemToConfFile('version', GENMON_VERSION)
                    self.NewInstall = True
            else:
                self.AddItemToConfFile('version', GENMON_VERSION)
                self.NewInstall = True
                self.Version = GENMON_VERSION
            if config.has_option(ConfigSection, "autofeedback"):
                self.FeedbackEnabled = config.getboolean(
                    ConfigSection, 'autofeedback')
            else:
                self.AddItemToConfFile('autofeedback', "False")
                self.FeedbackEnabled = False
            # Load saved feedback log if log is present
            if os.path.isfile(self.FeedbackLogFile):
                try:
                    with open(self.FeedbackLogFile) as infile:
                        self.FeedbackMessages = json.load(infile)
                except Exception as e1:
                    os.remove(self.FeedbackLogFile)
        except Exception as e1:
            if not reload:
                raise Exception(
                    "Missing config file or config file entries: " + str(e1))
            else:
                self.LogErrorLine("Error reloading config file" + str(e1))
            return False

        return True
Beispiel #59
0
import json
import os

from ConfigParser import RawConfigParser

from raspberryio.settings.staging import *

# import secrets
try:
    SECRETS_ROOT = os.path.abspath(os.path.join(PROJECT_ROOT, os.pardir))
    config = RawConfigParser()
    config.read(os.path.join(SECRETS_ROOT, 'settings.ini'))
    SUPERFEEDR_CREDS = json.loads(config.get('secrets', 'SUPERFEEDR_CREDS'))
    SECRET_KEY = json.loads(config.get('secrets', 'SECRET_KEY'))
    DATABASES['default']['NAME'] = config.get('database', 'DATABASE_NAME')
    DATABASES['default']['HOST'] = config.get('database', 'DATABASE_HOST')
    DATABASES['default']['USER'] = config.get('database', 'DATABASE_USER')
    DATABASES['default']['PASSWORD'] = config.get('database',
                                                  'DATABASE_PASSWORD')

except:
    pass

EMAIL_SUBJECT_PREFIX = '[Raspberryio Prod] '
Beispiel #60
0
import redis
import subprocess
from datetime import datetime
from ConfigParser import RawConfigParser

BASE_DIR = os.path.dirname(os.path.dirname(__file__))
BASE_DIR_LOGS = os.path.join(BASE_DIR, 'logs')

try:
    os.makedirs(BASE_DIR_LOGS)
except OSError:
    pass

# Config
config = RawConfigParser()
config.read(
    os.path.join(os.path.join(BASE_DIR, 'bitcoinjackpot'), 'settings.ini'))

# Environment Settings
SECRET_KEY = config.get('secrets', 'SECRET_KEY')
DEBUG = False if config.get('environment',
                            'ENVIRONMENT') == "PRODUCTION" else True
TEMPLATE_DEBUG = False if config.get('environment',
                                     'ENVIRONMENT') == "PRODUCTION" else True
BUILD = "DEVELOPMENT"
BASE_DOMAIN = config.get('environment', 'BASE_DOMAIN')
IS_EDUCATIONAL = True
IS_EDUCATIONAL_AMOUNT = 25

# Bitcoin
SATOSHI_RATIO = 100000000
MINIMUM_BITCOIN_BET = float(0.0001)